databricks-sdk 0.33.0__tar.gz → 0.35.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/NOTICE +14 -0
- {databricks_sdk-0.33.0/databricks_sdk.egg-info → databricks_sdk-0.35.0}/PKG-INFO +8 -1
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/__init__.py +4 -2
- databricks_sdk-0.33.0/databricks/sdk/core.py → databricks_sdk-0.35.0/databricks/sdk/_base_client.py +66 -109
- databricks_sdk-0.35.0/databricks/sdk/core.py +99 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/mixins/files.py +9 -9
- databricks_sdk-0.35.0/databricks/sdk/mixins/open_ai_client.py +52 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/apps.py +213 -8
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/catalog.py +17 -5
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/dashboards.py +10 -3
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/jobs.py +58 -1
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/pipelines.py +53 -3
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/settings.py +179 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/sharing.py +1 -1
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/sql.py +27 -3
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/workspace.py +2 -2
- databricks_sdk-0.35.0/databricks/sdk/version.py +1 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0/databricks_sdk.egg-info}/PKG-INFO +8 -1
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks_sdk.egg-info/SOURCES.txt +4 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks_sdk.egg-info/requires.txt +12 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/setup.py +4 -2
- databricks_sdk-0.35.0/tests/test_base_client.py +278 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_core.py +14 -284
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_dbfs_mixins.py +5 -8
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_model_serving_auth.py +20 -12
- databricks_sdk-0.35.0/tests/test_open_ai_mixin.py +30 -0
- databricks_sdk-0.33.0/databricks/sdk/version.py +0 -1
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/LICENSE +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/README.md +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/__init__.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/_property.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/_widgets/__init__.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/azure.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/casing.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/clock.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/config.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/credentials_provider.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/data_plane.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/dbutils.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/environments.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/errors/__init__.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/errors/base.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/errors/customizer.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/errors/deserializer.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/errors/mapper.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/errors/overrides.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/errors/parser.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/errors/platform.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/errors/private_link.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/errors/sdk.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/logger/__init__.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/mixins/__init__.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/mixins/compute.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/mixins/workspace.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/oauth.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/py.typed +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/retries.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/runtime/__init__.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/__init__.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/_internal.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/billing.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/compute.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/files.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/iam.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/marketplace.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/ml.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/oauth2.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/provisioning.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/serving.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/service/vectorsearch.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks/sdk/useragent.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/databricks_sdk.egg-info/top_level.txt +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/setup.cfg +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_auth.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_auth_manual_tests.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_client.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_compute_mixins.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_config.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_data_plane.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_dbutils.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_environments.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_errors.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_init_file.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_internal.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_jobs.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_metadata_service_auth.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_misc.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_oauth.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_retries.py +0 -0
- {databricks_sdk-0.33.0 → databricks_sdk-0.35.0}/tests/test_user_agent.py +0 -0
|
@@ -12,8 +12,22 @@ googleapis/google-auth-library-python - https://github.com/googleapis/google-aut
|
|
|
12
12
|
Copyright google-auth-library-python authors
|
|
13
13
|
License - https://github.com/googleapis/google-auth-library-python/blob/main/LICENSE
|
|
14
14
|
|
|
15
|
+
openai/openai-python - https://github.com/openai/openai-python
|
|
16
|
+
Copyright 2024 OpenAI
|
|
17
|
+
License - https://github.com/openai/openai-python/blob/main/LICENSE
|
|
18
|
+
|
|
15
19
|
This software contains code from the following open source projects, licensed under the BSD (3-clause) license.
|
|
16
20
|
|
|
17
21
|
x/oauth2 - https://cs.opensource.google/go/x/oauth2/+/master:oauth2.go
|
|
18
22
|
Copyright 2014 The Go Authors. All rights reserved.
|
|
19
23
|
License - https://cs.opensource.google/go/x/oauth2/+/master:LICENSE
|
|
24
|
+
|
|
25
|
+
encode/httpx - https://github.com/encode/httpx
|
|
26
|
+
Copyright 2019, Encode OSS Ltd
|
|
27
|
+
License - https://github.com/encode/httpx/blob/master/LICENSE.md
|
|
28
|
+
|
|
29
|
+
This software contains code from the following open source projects, licensed under the MIT license:
|
|
30
|
+
|
|
31
|
+
langchain-ai/langchain - https://github.com/langchain-ai/langchain/blob/master/libs/partners/openai
|
|
32
|
+
Copyright 2023 LangChain, Inc.
|
|
33
|
+
License - https://github.com/langchain-ai/langchain/blob/master/libs/partners/openai/LICENSE
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: databricks-sdk
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.35.0
|
|
4
4
|
Summary: Databricks SDK for Python (Beta)
|
|
5
5
|
Home-page: https://databricks-sdk-py.readthedocs.io
|
|
6
6
|
Author: Serge Smertin
|
|
@@ -40,9 +40,16 @@ Requires-Dist: requests-mock; extra == "dev"
|
|
|
40
40
|
Requires-Dist: pyfakefs; extra == "dev"
|
|
41
41
|
Requires-Dist: databricks-connect; extra == "dev"
|
|
42
42
|
Requires-Dist: pytest-rerunfailures; extra == "dev"
|
|
43
|
+
Requires-Dist: openai; extra == "dev"
|
|
44
|
+
Requires-Dist: langchain-openai; python_version > "3.7" and extra == "dev"
|
|
45
|
+
Requires-Dist: httpx; extra == "dev"
|
|
43
46
|
Provides-Extra: notebook
|
|
44
47
|
Requires-Dist: ipython<9,>=8; extra == "notebook"
|
|
45
48
|
Requires-Dist: ipywidgets<9,>=8; extra == "notebook"
|
|
49
|
+
Provides-Extra: openai
|
|
50
|
+
Requires-Dist: openai; extra == "openai"
|
|
51
|
+
Requires-Dist: langchain-openai; python_version > "3.7" and extra == "openai"
|
|
52
|
+
Requires-Dist: httpx; extra == "openai"
|
|
46
53
|
|
|
47
54
|
# Databricks SDK for Python (Beta)
|
|
48
55
|
|
|
@@ -6,6 +6,7 @@ from databricks.sdk import azure
|
|
|
6
6
|
from databricks.sdk.credentials_provider import CredentialsStrategy
|
|
7
7
|
from databricks.sdk.mixins.compute import ClustersExt
|
|
8
8
|
from databricks.sdk.mixins.files import DbfsExt
|
|
9
|
+
from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
|
|
9
10
|
from databricks.sdk.mixins.workspace import WorkspaceExt
|
|
10
11
|
from databricks.sdk.service.apps import AppsAPI
|
|
11
12
|
from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI,
|
|
@@ -71,6 +72,7 @@ from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
|
|
|
71
72
|
CspEnablementAccountAPI,
|
|
72
73
|
DefaultNamespaceAPI,
|
|
73
74
|
DisableLegacyAccessAPI,
|
|
75
|
+
DisableLegacyDbfsAPI,
|
|
74
76
|
DisableLegacyFeaturesAPI,
|
|
75
77
|
EnhancedSecurityMonitoringAPI,
|
|
76
78
|
EsmEnablementAccountAPI,
|
|
@@ -174,7 +176,7 @@ class WorkspaceClient:
|
|
|
174
176
|
self._config = config.copy()
|
|
175
177
|
self._dbutils = _make_dbutils(self._config)
|
|
176
178
|
self._api_client = client.ApiClient(self._config)
|
|
177
|
-
serving_endpoints =
|
|
179
|
+
serving_endpoints = ServingEndpointsExt(self._api_client)
|
|
178
180
|
self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client)
|
|
179
181
|
self._alerts = AlertsAPI(self._api_client)
|
|
180
182
|
self._alerts_legacy = AlertsLegacyAPI(self._api_client)
|
|
@@ -636,7 +638,7 @@ class WorkspaceClient:
|
|
|
636
638
|
return self._service_principals
|
|
637
639
|
|
|
638
640
|
@property
|
|
639
|
-
def serving_endpoints(self) ->
|
|
641
|
+
def serving_endpoints(self) -> ServingEndpointsExt:
|
|
640
642
|
"""The Serving Endpoints API allows you to create, update, and delete model serving endpoints."""
|
|
641
643
|
return self._serving_endpoints
|
|
642
644
|
|
databricks_sdk-0.33.0/databricks/sdk/core.py → databricks_sdk-0.35.0/databricks/sdk/_base_client.py
RENAMED
|
@@ -1,90 +1,82 @@
|
|
|
1
|
-
import
|
|
1
|
+
import logging
|
|
2
2
|
from datetime import timedelta
|
|
3
3
|
from types import TracebackType
|
|
4
|
-
from typing import Any, BinaryIO, Iterator,
|
|
5
|
-
|
|
4
|
+
from typing import (Any, BinaryIO, Callable, Dict, Iterable, Iterator, List,
|
|
5
|
+
Optional, Type, Union)
|
|
6
6
|
|
|
7
|
-
|
|
7
|
+
import requests
|
|
8
|
+
import requests.adapters
|
|
8
9
|
|
|
10
|
+
from . import useragent
|
|
9
11
|
from .casing import Casing
|
|
10
|
-
from .
|
|
11
|
-
# To preserve backwards compatibility (as these definitions were previously in this module)
|
|
12
|
-
from .credentials_provider import *
|
|
12
|
+
from .clock import Clock, RealClock
|
|
13
13
|
from .errors import DatabricksError, _ErrorCustomizer, _Parser
|
|
14
14
|
from .logger import RoundTrip
|
|
15
|
-
from .oauth import retrieve_token
|
|
16
15
|
from .retries import retried
|
|
17
16
|
|
|
18
|
-
__all__ = ['Config', 'DatabricksError']
|
|
19
|
-
|
|
20
17
|
logger = logging.getLogger('databricks.sdk')
|
|
21
18
|
|
|
22
|
-
URL_ENCODED_CONTENT_TYPE = "application/x-www-form-urlencoded"
|
|
23
|
-
JWT_BEARER_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:jwt-bearer"
|
|
24
|
-
OIDC_TOKEN_PATH = "/oidc/v1/token"
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
class ApiClient:
|
|
28
|
-
_cfg: Config
|
|
29
|
-
_RETRY_AFTER_DEFAULT: int = 1
|
|
30
|
-
|
|
31
|
-
def __init__(self, cfg: Config = None):
|
|
32
19
|
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
20
|
+
class _BaseClient:
|
|
21
|
+
|
|
22
|
+
def __init__(self,
|
|
23
|
+
debug_truncate_bytes: int = None,
|
|
24
|
+
retry_timeout_seconds: int = None,
|
|
25
|
+
user_agent_base: str = None,
|
|
26
|
+
header_factory: Callable[[], dict] = None,
|
|
27
|
+
max_connection_pools: int = None,
|
|
28
|
+
max_connections_per_pool: int = None,
|
|
29
|
+
pool_block: bool = True,
|
|
30
|
+
http_timeout_seconds: float = None,
|
|
31
|
+
extra_error_customizers: List[_ErrorCustomizer] = None,
|
|
32
|
+
debug_headers: bool = False,
|
|
33
|
+
clock: Clock = None):
|
|
34
|
+
"""
|
|
35
|
+
:param debug_truncate_bytes:
|
|
36
|
+
:param retry_timeout_seconds:
|
|
37
|
+
:param user_agent_base:
|
|
38
|
+
:param header_factory: A function that returns a dictionary of headers to include in the request.
|
|
39
|
+
:param max_connection_pools: Number of urllib3 connection pools to cache before discarding the least
|
|
40
|
+
recently used pool. Python requests default value is 10.
|
|
41
|
+
:param max_connections_per_pool: The maximum number of connections to save in the pool. Improves performance
|
|
42
|
+
in multithreaded situations. For now, we're setting it to the same value as connection_pool_size.
|
|
43
|
+
:param pool_block: If pool_block is False, then more connections will are created, but not saved after the
|
|
44
|
+
first use. Blocks when no free connections are available. urllib3 ensures that no more than
|
|
45
|
+
pool_maxsize connections are used at a time. Prevents platform from flooding. By default, requests library
|
|
46
|
+
doesn't block.
|
|
47
|
+
:param http_timeout_seconds:
|
|
48
|
+
:param extra_error_customizers:
|
|
49
|
+
:param debug_headers: Whether to include debug headers in the request log.
|
|
50
|
+
:param clock: Clock object to use for time-related operations.
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
self._debug_truncate_bytes = debug_truncate_bytes or 96
|
|
54
|
+
self._debug_headers = debug_headers
|
|
55
|
+
self._retry_timeout_seconds = retry_timeout_seconds or 300
|
|
56
|
+
self._user_agent_base = user_agent_base or useragent.to_string()
|
|
57
|
+
self._header_factory = header_factory
|
|
58
|
+
self._clock = clock or RealClock()
|
|
41
59
|
self._session = requests.Session()
|
|
42
60
|
self._session.auth = self._authenticate
|
|
43
61
|
|
|
44
|
-
# Number of urllib3 connection pools to cache before discarding the least
|
|
45
|
-
# recently used pool. Python requests default value is 10.
|
|
46
|
-
pool_connections = cfg.max_connection_pools
|
|
47
|
-
if pool_connections is None:
|
|
48
|
-
pool_connections = 20
|
|
49
|
-
|
|
50
|
-
# The maximum number of connections to save in the pool. Improves performance
|
|
51
|
-
# in multithreaded situations. For now, we're setting it to the same value
|
|
52
|
-
# as connection_pool_size.
|
|
53
|
-
pool_maxsize = cfg.max_connections_per_pool
|
|
54
|
-
if cfg.max_connections_per_pool is None:
|
|
55
|
-
pool_maxsize = pool_connections
|
|
56
|
-
|
|
57
|
-
# If pool_block is False, then more connections will are created,
|
|
58
|
-
# but not saved after the first use. Blocks when no free connections are available.
|
|
59
|
-
# urllib3 ensures that no more than pool_maxsize connections are used at a time.
|
|
60
|
-
# Prevents platform from flooding. By default, requests library doesn't block.
|
|
61
|
-
pool_block = True
|
|
62
|
-
|
|
63
62
|
# We don't use `max_retries` from HTTPAdapter to align with a more production-ready
|
|
64
63
|
# retry strategy established in the Databricks SDK for Go. See _is_retryable and
|
|
65
64
|
# @retried for more details.
|
|
66
|
-
http_adapter = HTTPAdapter(pool_connections=
|
|
67
|
-
|
|
68
|
-
|
|
65
|
+
http_adapter = requests.adapters.HTTPAdapter(pool_connections=max_connections_per_pool or 20,
|
|
66
|
+
pool_maxsize=max_connection_pools or 20,
|
|
67
|
+
pool_block=pool_block)
|
|
69
68
|
self._session.mount("https://", http_adapter)
|
|
70
69
|
|
|
71
70
|
# Default to 60 seconds
|
|
72
|
-
self._http_timeout_seconds =
|
|
73
|
-
|
|
74
|
-
self._error_parser = _Parser(extra_error_customizers=[_AddDebugErrorCustomizer(cfg)])
|
|
71
|
+
self._http_timeout_seconds = http_timeout_seconds or 60
|
|
75
72
|
|
|
76
|
-
|
|
77
|
-
def account_id(self) -> str:
|
|
78
|
-
return self._cfg.account_id
|
|
79
|
-
|
|
80
|
-
@property
|
|
81
|
-
def is_account_client(self) -> bool:
|
|
82
|
-
return self._cfg.is_account_client
|
|
73
|
+
self._error_parser = _Parser(extra_error_customizers=extra_error_customizers)
|
|
83
74
|
|
|
84
75
|
def _authenticate(self, r: requests.PreparedRequest) -> requests.PreparedRequest:
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
76
|
+
if self._header_factory:
|
|
77
|
+
headers = self._header_factory()
|
|
78
|
+
for k, v in headers.items():
|
|
79
|
+
r.headers[k] = v
|
|
88
80
|
return r
|
|
89
81
|
|
|
90
82
|
@staticmethod
|
|
@@ -115,26 +107,9 @@ class ApiClient:
|
|
|
115
107
|
flattened = dict(flatten_dict(with_fixed_bools))
|
|
116
108
|
return flattened
|
|
117
109
|
|
|
118
|
-
def get_oauth_token(self, auth_details: str) -> Token:
|
|
119
|
-
if not self._cfg.auth_type:
|
|
120
|
-
self._cfg.authenticate()
|
|
121
|
-
original_token = self._cfg.oauth_token()
|
|
122
|
-
headers = {"Content-Type": URL_ENCODED_CONTENT_TYPE}
|
|
123
|
-
params = urlencode({
|
|
124
|
-
"grant_type": JWT_BEARER_GRANT_TYPE,
|
|
125
|
-
"authorization_details": auth_details,
|
|
126
|
-
"assertion": original_token.access_token
|
|
127
|
-
})
|
|
128
|
-
return retrieve_token(client_id=self._cfg.client_id,
|
|
129
|
-
client_secret=self._cfg.client_secret,
|
|
130
|
-
token_url=self._cfg.host + OIDC_TOKEN_PATH,
|
|
131
|
-
params=params,
|
|
132
|
-
headers=headers)
|
|
133
|
-
|
|
134
110
|
def do(self,
|
|
135
111
|
method: str,
|
|
136
|
-
|
|
137
|
-
url: str = None,
|
|
112
|
+
url: str,
|
|
138
113
|
query: dict = None,
|
|
139
114
|
headers: dict = None,
|
|
140
115
|
body: dict = None,
|
|
@@ -142,18 +117,13 @@ class ApiClient:
|
|
|
142
117
|
files=None,
|
|
143
118
|
data=None,
|
|
144
119
|
auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None,
|
|
145
|
-
response_headers: List[str] = None) -> Union[dict, BinaryIO]:
|
|
120
|
+
response_headers: List[str] = None) -> Union[dict, list, BinaryIO]:
|
|
146
121
|
if headers is None:
|
|
147
122
|
headers = {}
|
|
148
|
-
if url is None:
|
|
149
|
-
# Remove extra `/` from path for Files API
|
|
150
|
-
# Once we've fixed the OpenAPI spec, we can remove this
|
|
151
|
-
path = re.sub('^/api/2.0/fs/files//', '/api/2.0/fs/files/', path)
|
|
152
|
-
url = f"{self._cfg.host}{path}"
|
|
153
123
|
headers['User-Agent'] = self._user_agent_base
|
|
154
124
|
retryable = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
|
|
155
125
|
is_retryable=self._is_retryable,
|
|
156
|
-
clock=self.
|
|
126
|
+
clock=self._clock)
|
|
157
127
|
response = retryable(self._perform)(method,
|
|
158
128
|
url,
|
|
159
129
|
query=query,
|
|
@@ -168,19 +138,19 @@ class ApiClient:
|
|
|
168
138
|
for header in response_headers if response_headers else []:
|
|
169
139
|
resp[header] = response.headers.get(Casing.to_header_case(header))
|
|
170
140
|
if raw:
|
|
171
|
-
resp["contents"] =
|
|
141
|
+
resp["contents"] = _StreamingResponse(response)
|
|
172
142
|
return resp
|
|
173
143
|
if not len(response.content):
|
|
174
144
|
return resp
|
|
175
145
|
|
|
176
|
-
|
|
177
|
-
if
|
|
146
|
+
json_response = response.json()
|
|
147
|
+
if json_response is None:
|
|
178
148
|
return resp
|
|
179
149
|
|
|
180
|
-
if isinstance(
|
|
181
|
-
return
|
|
150
|
+
if isinstance(json_response, list):
|
|
151
|
+
return json_response
|
|
182
152
|
|
|
183
|
-
return {**resp, **
|
|
153
|
+
return {**resp, **json_response}
|
|
184
154
|
|
|
185
155
|
@staticmethod
|
|
186
156
|
def _is_retryable(err: BaseException) -> Optional[str]:
|
|
@@ -250,23 +220,10 @@ class ApiClient:
|
|
|
250
220
|
def _record_request_log(self, response: requests.Response, raw: bool = False) -> None:
|
|
251
221
|
if not logger.isEnabledFor(logging.DEBUG):
|
|
252
222
|
return
|
|
253
|
-
logger.debug(RoundTrip(response, self.
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
class _AddDebugErrorCustomizer(_ErrorCustomizer):
|
|
257
|
-
"""An error customizer that adds debug information about the configuration to unauthenticated and
|
|
258
|
-
unauthorized errors."""
|
|
259
|
-
|
|
260
|
-
def __init__(self, cfg: Config):
|
|
261
|
-
self._cfg = cfg
|
|
262
|
-
|
|
263
|
-
def customize_error(self, response: requests.Response, kwargs: dict):
|
|
264
|
-
if response.status_code in (401, 403):
|
|
265
|
-
message = kwargs.get('message', 'request failed')
|
|
266
|
-
kwargs['message'] = self._cfg.wrap_debug_info(message)
|
|
223
|
+
logger.debug(RoundTrip(response, self._debug_headers, self._debug_truncate_bytes, raw).generate())
|
|
267
224
|
|
|
268
225
|
|
|
269
|
-
class
|
|
226
|
+
class _StreamingResponse(BinaryIO):
|
|
270
227
|
_response: requests.Response
|
|
271
228
|
_buffer: bytes
|
|
272
229
|
_content: Union[Iterator[bytes], None]
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from typing import BinaryIO
|
|
3
|
+
from urllib.parse import urlencode
|
|
4
|
+
|
|
5
|
+
from ._base_client import _BaseClient
|
|
6
|
+
from .config import *
|
|
7
|
+
# To preserve backwards compatibility (as these definitions were previously in this module)
|
|
8
|
+
from .credentials_provider import *
|
|
9
|
+
from .errors import DatabricksError, _ErrorCustomizer
|
|
10
|
+
from .oauth import retrieve_token
|
|
11
|
+
|
|
12
|
+
__all__ = ['Config', 'DatabricksError']
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger('databricks.sdk')
|
|
15
|
+
|
|
16
|
+
URL_ENCODED_CONTENT_TYPE = "application/x-www-form-urlencoded"
|
|
17
|
+
JWT_BEARER_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:jwt-bearer"
|
|
18
|
+
OIDC_TOKEN_PATH = "/oidc/v1/token"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ApiClient:
|
|
22
|
+
|
|
23
|
+
def __init__(self, cfg: Config):
|
|
24
|
+
self._cfg = cfg
|
|
25
|
+
self._api_client = _BaseClient(debug_truncate_bytes=cfg.debug_truncate_bytes,
|
|
26
|
+
retry_timeout_seconds=cfg.retry_timeout_seconds,
|
|
27
|
+
user_agent_base=cfg.user_agent,
|
|
28
|
+
header_factory=cfg.authenticate,
|
|
29
|
+
max_connection_pools=cfg.max_connection_pools,
|
|
30
|
+
max_connections_per_pool=cfg.max_connections_per_pool,
|
|
31
|
+
pool_block=True,
|
|
32
|
+
http_timeout_seconds=cfg.http_timeout_seconds,
|
|
33
|
+
extra_error_customizers=[_AddDebugErrorCustomizer(cfg)],
|
|
34
|
+
clock=cfg.clock)
|
|
35
|
+
|
|
36
|
+
@property
|
|
37
|
+
def account_id(self) -> str:
|
|
38
|
+
return self._cfg.account_id
|
|
39
|
+
|
|
40
|
+
@property
|
|
41
|
+
def is_account_client(self) -> bool:
|
|
42
|
+
return self._cfg.is_account_client
|
|
43
|
+
|
|
44
|
+
def get_oauth_token(self, auth_details: str) -> Token:
|
|
45
|
+
if not self._cfg.auth_type:
|
|
46
|
+
self._cfg.authenticate()
|
|
47
|
+
original_token = self._cfg.oauth_token()
|
|
48
|
+
headers = {"Content-Type": URL_ENCODED_CONTENT_TYPE}
|
|
49
|
+
params = urlencode({
|
|
50
|
+
"grant_type": JWT_BEARER_GRANT_TYPE,
|
|
51
|
+
"authorization_details": auth_details,
|
|
52
|
+
"assertion": original_token.access_token
|
|
53
|
+
})
|
|
54
|
+
return retrieve_token(client_id=self._cfg.client_id,
|
|
55
|
+
client_secret=self._cfg.client_secret,
|
|
56
|
+
token_url=self._cfg.host + OIDC_TOKEN_PATH,
|
|
57
|
+
params=params,
|
|
58
|
+
headers=headers)
|
|
59
|
+
|
|
60
|
+
def do(self,
|
|
61
|
+
method: str,
|
|
62
|
+
path: str = None,
|
|
63
|
+
url: str = None,
|
|
64
|
+
query: dict = None,
|
|
65
|
+
headers: dict = None,
|
|
66
|
+
body: dict = None,
|
|
67
|
+
raw: bool = False,
|
|
68
|
+
files=None,
|
|
69
|
+
data=None,
|
|
70
|
+
auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None,
|
|
71
|
+
response_headers: List[str] = None) -> Union[dict, list, BinaryIO]:
|
|
72
|
+
if url is None:
|
|
73
|
+
# Remove extra `/` from path for Files API
|
|
74
|
+
# Once we've fixed the OpenAPI spec, we can remove this
|
|
75
|
+
path = re.sub('^/api/2.0/fs/files//', '/api/2.0/fs/files/', path)
|
|
76
|
+
url = f"{self._cfg.host}{path}"
|
|
77
|
+
return self._api_client.do(method=method,
|
|
78
|
+
url=url,
|
|
79
|
+
query=query,
|
|
80
|
+
headers=headers,
|
|
81
|
+
body=body,
|
|
82
|
+
raw=raw,
|
|
83
|
+
files=files,
|
|
84
|
+
data=data,
|
|
85
|
+
auth=auth,
|
|
86
|
+
response_headers=response_headers)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class _AddDebugErrorCustomizer(_ErrorCustomizer):
|
|
90
|
+
"""An error customizer that adds debug information about the configuration to unauthenticated and
|
|
91
|
+
unauthorized errors."""
|
|
92
|
+
|
|
93
|
+
def __init__(self, cfg: Config):
|
|
94
|
+
self._cfg = cfg
|
|
95
|
+
|
|
96
|
+
def customize_error(self, response: requests.Response, kwargs: dict):
|
|
97
|
+
if response.status_code in (401, 403):
|
|
98
|
+
message = kwargs.get('message', 'request failed')
|
|
99
|
+
kwargs['message'] = self._cfg.wrap_debug_info(message)
|
|
@@ -167,7 +167,7 @@ class _DbfsIO(BinaryIO):
|
|
|
167
167
|
return f"<_DbfsIO {self._path} {'read' if self.readable() else 'write'}=True>"
|
|
168
168
|
|
|
169
169
|
|
|
170
|
-
class
|
|
170
|
+
class _VolumesIO(BinaryIO):
|
|
171
171
|
|
|
172
172
|
def __init__(self, api: files.FilesAPI, path: str, *, read: bool, write: bool, overwrite: bool):
|
|
173
173
|
self._buffer = []
|
|
@@ -262,7 +262,7 @@ class _FilesIO(BinaryIO):
|
|
|
262
262
|
self.close()
|
|
263
263
|
|
|
264
264
|
def __repr__(self) -> str:
|
|
265
|
-
return f"<
|
|
265
|
+
return f"<_VolumesIO {self._path} {'read' if self.readable() else 'write'}=True>"
|
|
266
266
|
|
|
267
267
|
|
|
268
268
|
class _Path(ABC):
|
|
@@ -398,7 +398,7 @@ class _LocalPath(_Path):
|
|
|
398
398
|
return f'<_LocalPath {self._path}>'
|
|
399
399
|
|
|
400
400
|
|
|
401
|
-
class
|
|
401
|
+
class _VolumesPath(_Path):
|
|
402
402
|
|
|
403
403
|
def __init__(self, api: files.FilesAPI, src: Union[str, pathlib.Path]):
|
|
404
404
|
self._path = pathlib.PurePosixPath(str(src).replace('dbfs:', '').replace('file:', ''))
|
|
@@ -411,7 +411,7 @@ class _FilesPath(_Path):
|
|
|
411
411
|
return False
|
|
412
412
|
|
|
413
413
|
def child(self, path: str) -> Self:
|
|
414
|
-
return
|
|
414
|
+
return _VolumesPath(self._api, str(self._path / path))
|
|
415
415
|
|
|
416
416
|
def _is_dir(self) -> bool:
|
|
417
417
|
try:
|
|
@@ -431,7 +431,7 @@ class _FilesPath(_Path):
|
|
|
431
431
|
return self.is_dir
|
|
432
432
|
|
|
433
433
|
def open(self, *, read=False, write=False, overwrite=False) -> BinaryIO:
|
|
434
|
-
return
|
|
434
|
+
return _VolumesIO(self._api, self.as_string, read=read, write=write, overwrite=overwrite)
|
|
435
435
|
|
|
436
436
|
def list(self, *, recursive=False) -> Generator[files.FileInfo, None, None]:
|
|
437
437
|
if not self.is_dir:
|
|
@@ -458,13 +458,13 @@ class _FilesPath(_Path):
|
|
|
458
458
|
def delete(self, *, recursive=False):
|
|
459
459
|
if self.is_dir:
|
|
460
460
|
for entry in self.list(recursive=False):
|
|
461
|
-
|
|
461
|
+
_VolumesPath(self._api, entry.path).delete(recursive=True)
|
|
462
462
|
self._api.delete_directory(self.as_string)
|
|
463
463
|
else:
|
|
464
464
|
self._api.delete(self.as_string)
|
|
465
465
|
|
|
466
466
|
def __repr__(self) -> str:
|
|
467
|
-
return f'<
|
|
467
|
+
return f'<_VolumesPath {self._path}>'
|
|
468
468
|
|
|
469
469
|
|
|
470
470
|
class _DbfsPath(_Path):
|
|
@@ -589,8 +589,8 @@ class DbfsExt(files.DbfsAPI):
|
|
|
589
589
|
'UC Volumes paths, not external locations or DBFS mount points.')
|
|
590
590
|
if src.scheme == 'file':
|
|
591
591
|
return _LocalPath(src.geturl())
|
|
592
|
-
if src.path.startswith(
|
|
593
|
-
return
|
|
592
|
+
if src.path.startswith('/Volumes'):
|
|
593
|
+
return _VolumesPath(self._files_api, src.geturl())
|
|
594
594
|
return _DbfsPath(self._dbfs_api, src.geturl())
|
|
595
595
|
|
|
596
596
|
def copy(self, src: str, dst: str, *, recursive=False, overwrite=False):
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
from databricks.sdk.service.serving import ServingEndpointsAPI
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class ServingEndpointsExt(ServingEndpointsAPI):
|
|
5
|
+
|
|
6
|
+
# Using the HTTP Client to pass in the databricks authorization
|
|
7
|
+
# This method will be called on every invocation, so when using with model serving will always get the refreshed token
|
|
8
|
+
def _get_authorized_http_client(self):
|
|
9
|
+
import httpx
|
|
10
|
+
|
|
11
|
+
class BearerAuth(httpx.Auth):
|
|
12
|
+
|
|
13
|
+
def __init__(self, get_headers_func):
|
|
14
|
+
self.get_headers_func = get_headers_func
|
|
15
|
+
|
|
16
|
+
def auth_flow(self, request: httpx.Request) -> httpx.Request:
|
|
17
|
+
auth_headers = self.get_headers_func()
|
|
18
|
+
request.headers["Authorization"] = auth_headers["Authorization"]
|
|
19
|
+
yield request
|
|
20
|
+
|
|
21
|
+
databricks_token_auth = BearerAuth(self._api._cfg.authenticate)
|
|
22
|
+
|
|
23
|
+
# Create an HTTP client with Bearer Token authentication
|
|
24
|
+
http_client = httpx.Client(auth=databricks_token_auth)
|
|
25
|
+
return http_client
|
|
26
|
+
|
|
27
|
+
def get_open_ai_client(self):
|
|
28
|
+
try:
|
|
29
|
+
from openai import OpenAI
|
|
30
|
+
except Exception:
|
|
31
|
+
raise ImportError(
|
|
32
|
+
"Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]`"
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
return OpenAI(
|
|
36
|
+
base_url=self._api._cfg.host + "/serving-endpoints",
|
|
37
|
+
api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
|
|
38
|
+
http_client=self._get_authorized_http_client())
|
|
39
|
+
|
|
40
|
+
def get_langchain_chat_open_ai_client(self, model):
|
|
41
|
+
try:
|
|
42
|
+
from langchain_openai import ChatOpenAI
|
|
43
|
+
except Exception:
|
|
44
|
+
raise ImportError(
|
|
45
|
+
"Langchain Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]` and ensure you are using python>3.7"
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
return ChatOpenAI(
|
|
49
|
+
model=model,
|
|
50
|
+
openai_api_base=self._api._cfg.host + "/serving-endpoints",
|
|
51
|
+
api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
|
|
52
|
+
http_client=self._get_authorized_http_client())
|