databricks-sdk 0.32.1__py3-none-any.whl → 0.32.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +48 -46
- databricks/sdk/config.py +3 -3
- databricks/sdk/core.py +17 -30
- databricks/sdk/credentials_provider.py +39 -5
- databricks/sdk/errors/__init__.py +2 -2
- databricks/sdk/errors/customizer.py +50 -0
- databricks/sdk/errors/deserializer.py +106 -0
- databricks/sdk/errors/parser.py +55 -119
- databricks/sdk/logger/round_trip_logger.py +2 -1
- databricks/sdk/mixins/files.py +9 -9
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.32.1.dist-info → databricks_sdk-0.32.2.dist-info}/METADATA +1 -1
- {databricks_sdk-0.32.1.dist-info → databricks_sdk-0.32.2.dist-info}/RECORD +17 -15
- {databricks_sdk-0.32.1.dist-info → databricks_sdk-0.32.2.dist-info}/WHEEL +1 -1
- {databricks_sdk-0.32.1.dist-info → databricks_sdk-0.32.2.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.32.1.dist-info → databricks_sdk-0.32.2.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.32.1.dist-info → databricks_sdk-0.32.2.dist-info}/top_level.txt +0 -0
databricks/sdk/__init__.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
1
3
|
import databricks.sdk.core as client
|
|
2
4
|
import databricks.sdk.dbutils as dbutils
|
|
3
5
|
from databricks.sdk import azure
|
|
@@ -116,31 +118,31 @@ class WorkspaceClient:
|
|
|
116
118
|
|
|
117
119
|
def __init__(self,
|
|
118
120
|
*,
|
|
119
|
-
host: str = None,
|
|
120
|
-
account_id: str = None,
|
|
121
|
-
username: str = None,
|
|
122
|
-
password: str = None,
|
|
123
|
-
client_id: str = None,
|
|
124
|
-
client_secret: str = None,
|
|
125
|
-
token: str = None,
|
|
126
|
-
profile: str = None,
|
|
127
|
-
config_file: str = None,
|
|
128
|
-
azure_workspace_resource_id: str = None,
|
|
129
|
-
azure_client_secret: str = None,
|
|
130
|
-
azure_client_id: str = None,
|
|
131
|
-
azure_tenant_id: str = None,
|
|
132
|
-
azure_environment: str = None,
|
|
133
|
-
auth_type: str = None,
|
|
134
|
-
cluster_id: str = None,
|
|
135
|
-
google_credentials: str = None,
|
|
136
|
-
google_service_account: str = None,
|
|
137
|
-
debug_truncate_bytes: int = None,
|
|
138
|
-
debug_headers: bool = None,
|
|
121
|
+
host: Optional[str] = None,
|
|
122
|
+
account_id: Optional[str] = None,
|
|
123
|
+
username: Optional[str] = None,
|
|
124
|
+
password: Optional[str] = None,
|
|
125
|
+
client_id: Optional[str] = None,
|
|
126
|
+
client_secret: Optional[str] = None,
|
|
127
|
+
token: Optional[str] = None,
|
|
128
|
+
profile: Optional[str] = None,
|
|
129
|
+
config_file: Optional[str] = None,
|
|
130
|
+
azure_workspace_resource_id: Optional[str] = None,
|
|
131
|
+
azure_client_secret: Optional[str] = None,
|
|
132
|
+
azure_client_id: Optional[str] = None,
|
|
133
|
+
azure_tenant_id: Optional[str] = None,
|
|
134
|
+
azure_environment: Optional[str] = None,
|
|
135
|
+
auth_type: Optional[str] = None,
|
|
136
|
+
cluster_id: Optional[str] = None,
|
|
137
|
+
google_credentials: Optional[str] = None,
|
|
138
|
+
google_service_account: Optional[str] = None,
|
|
139
|
+
debug_truncate_bytes: Optional[int] = None,
|
|
140
|
+
debug_headers: Optional[bool] = None,
|
|
139
141
|
product="unknown",
|
|
140
142
|
product_version="0.0.0",
|
|
141
|
-
credentials_strategy: CredentialsStrategy = None,
|
|
142
|
-
credentials_provider: CredentialsStrategy = None,
|
|
143
|
-
config: client.Config = None):
|
|
143
|
+
credentials_strategy: Optional[CredentialsStrategy] = None,
|
|
144
|
+
credentials_provider: Optional[CredentialsStrategy] = None,
|
|
145
|
+
config: Optional[client.Config] = None):
|
|
144
146
|
if not config:
|
|
145
147
|
config = client.Config(host=host,
|
|
146
148
|
account_id=account_id,
|
|
@@ -742,31 +744,31 @@ class AccountClient:
|
|
|
742
744
|
|
|
743
745
|
def __init__(self,
|
|
744
746
|
*,
|
|
745
|
-
host: str = None,
|
|
746
|
-
account_id: str = None,
|
|
747
|
-
username: str = None,
|
|
748
|
-
password: str = None,
|
|
749
|
-
client_id: str = None,
|
|
750
|
-
client_secret: str = None,
|
|
751
|
-
token: str = None,
|
|
752
|
-
profile: str = None,
|
|
753
|
-
config_file: str = None,
|
|
754
|
-
azure_workspace_resource_id: str = None,
|
|
755
|
-
azure_client_secret: str = None,
|
|
756
|
-
azure_client_id: str = None,
|
|
757
|
-
azure_tenant_id: str = None,
|
|
758
|
-
azure_environment: str = None,
|
|
759
|
-
auth_type: str = None,
|
|
760
|
-
cluster_id: str = None,
|
|
761
|
-
google_credentials: str = None,
|
|
762
|
-
google_service_account: str = None,
|
|
763
|
-
debug_truncate_bytes: int = None,
|
|
764
|
-
debug_headers: bool = None,
|
|
747
|
+
host: Optional[str] = None,
|
|
748
|
+
account_id: Optional[str] = None,
|
|
749
|
+
username: Optional[str] = None,
|
|
750
|
+
password: Optional[str] = None,
|
|
751
|
+
client_id: Optional[str] = None,
|
|
752
|
+
client_secret: Optional[str] = None,
|
|
753
|
+
token: Optional[str] = None,
|
|
754
|
+
profile: Optional[str] = None,
|
|
755
|
+
config_file: Optional[str] = None,
|
|
756
|
+
azure_workspace_resource_id: Optional[str] = None,
|
|
757
|
+
azure_client_secret: Optional[str] = None,
|
|
758
|
+
azure_client_id: Optional[str] = None,
|
|
759
|
+
azure_tenant_id: Optional[str] = None,
|
|
760
|
+
azure_environment: Optional[str] = None,
|
|
761
|
+
auth_type: Optional[str] = None,
|
|
762
|
+
cluster_id: Optional[str] = None,
|
|
763
|
+
google_credentials: Optional[str] = None,
|
|
764
|
+
google_service_account: Optional[str] = None,
|
|
765
|
+
debug_truncate_bytes: Optional[int] = None,
|
|
766
|
+
debug_headers: Optional[bool] = None,
|
|
765
767
|
product="unknown",
|
|
766
768
|
product_version="0.0.0",
|
|
767
|
-
credentials_strategy: CredentialsStrategy = None,
|
|
768
|
-
credentials_provider: CredentialsStrategy = None,
|
|
769
|
-
config: client.Config = None):
|
|
769
|
+
credentials_strategy: Optional[CredentialsStrategy] = None,
|
|
770
|
+
credentials_provider: Optional[CredentialsStrategy] = None,
|
|
771
|
+
config: Optional[client.Config] = None):
|
|
770
772
|
if not config:
|
|
771
773
|
config = client.Config(host=host,
|
|
772
774
|
account_id=account_id,
|
databricks/sdk/config.py
CHANGED
|
@@ -92,11 +92,11 @@ class Config:
|
|
|
92
92
|
def __init__(self,
|
|
93
93
|
*,
|
|
94
94
|
# Deprecated. Use credentials_strategy instead.
|
|
95
|
-
credentials_provider: CredentialsStrategy = None,
|
|
96
|
-
credentials_strategy: CredentialsStrategy = None,
|
|
95
|
+
credentials_provider: Optional[CredentialsStrategy] = None,
|
|
96
|
+
credentials_strategy: Optional[CredentialsStrategy] = None,
|
|
97
97
|
product=None,
|
|
98
98
|
product_version=None,
|
|
99
|
-
clock: Clock = None,
|
|
99
|
+
clock: Optional[Clock] = None,
|
|
100
100
|
**kwargs):
|
|
101
101
|
self._header_factory = None
|
|
102
102
|
self._inner = {}
|
databricks/sdk/core.py
CHANGED
|
@@ -10,7 +10,7 @@ from .casing import Casing
|
|
|
10
10
|
from .config import *
|
|
11
11
|
# To preserve backwards compatibility (as these definitions were previously in this module)
|
|
12
12
|
from .credentials_provider import *
|
|
13
|
-
from .errors import DatabricksError,
|
|
13
|
+
from .errors import DatabricksError, _ErrorCustomizer, _Parser
|
|
14
14
|
from .logger import RoundTrip
|
|
15
15
|
from .oauth import retrieve_token
|
|
16
16
|
from .retries import retried
|
|
@@ -71,6 +71,8 @@ class ApiClient:
|
|
|
71
71
|
# Default to 60 seconds
|
|
72
72
|
self._http_timeout_seconds = cfg.http_timeout_seconds if cfg.http_timeout_seconds else 60
|
|
73
73
|
|
|
74
|
+
self._error_parser = _Parser(extra_error_customizers=[_AddDebugErrorCustomizer(cfg)])
|
|
75
|
+
|
|
74
76
|
@property
|
|
75
77
|
def account_id(self) -> str:
|
|
76
78
|
return self._cfg.account_id
|
|
@@ -219,27 +221,6 @@ class ApiClient:
|
|
|
219
221
|
return f'matched {substring}'
|
|
220
222
|
return None
|
|
221
223
|
|
|
222
|
-
@classmethod
|
|
223
|
-
def _parse_retry_after(cls, response: requests.Response) -> Optional[int]:
|
|
224
|
-
retry_after = response.headers.get("Retry-After")
|
|
225
|
-
if retry_after is None:
|
|
226
|
-
# 429 requests should include a `Retry-After` header, but if it's missing,
|
|
227
|
-
# we default to 1 second.
|
|
228
|
-
return cls._RETRY_AFTER_DEFAULT
|
|
229
|
-
# If the request is throttled, try parse the `Retry-After` header and sleep
|
|
230
|
-
# for the specified number of seconds. Note that this header can contain either
|
|
231
|
-
# an integer or a RFC1123 datetime string.
|
|
232
|
-
# See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After
|
|
233
|
-
#
|
|
234
|
-
# For simplicity, we only try to parse it as an integer, as this is what Databricks
|
|
235
|
-
# platform returns. Otherwise, we fall back and don't sleep.
|
|
236
|
-
try:
|
|
237
|
-
return int(retry_after)
|
|
238
|
-
except ValueError:
|
|
239
|
-
logger.debug(f'Invalid Retry-After header received: {retry_after}. Defaulting to 1')
|
|
240
|
-
# defaulting to 1 sleep second to make self._is_retryable() simpler
|
|
241
|
-
return cls._RETRY_AFTER_DEFAULT
|
|
242
|
-
|
|
243
224
|
def _perform(self,
|
|
244
225
|
method: str,
|
|
245
226
|
url: str,
|
|
@@ -261,15 +242,8 @@ class ApiClient:
|
|
|
261
242
|
stream=raw,
|
|
262
243
|
timeout=self._http_timeout_seconds)
|
|
263
244
|
self._record_request_log(response, raw=raw or data is not None or files is not None)
|
|
264
|
-
error = get_api_error(response)
|
|
245
|
+
error = self._error_parser.get_api_error(response)
|
|
265
246
|
if error is not None:
|
|
266
|
-
status_code = response.status_code
|
|
267
|
-
is_http_unauthorized_or_forbidden = status_code in (401, 403)
|
|
268
|
-
is_too_many_requests_or_unavailable = status_code in (429, 503)
|
|
269
|
-
if is_http_unauthorized_or_forbidden:
|
|
270
|
-
error.message = self._cfg.wrap_debug_info(error.message)
|
|
271
|
-
if is_too_many_requests_or_unavailable:
|
|
272
|
-
error.retry_after_secs = self._parse_retry_after(response)
|
|
273
247
|
raise error from None
|
|
274
248
|
return response
|
|
275
249
|
|
|
@@ -279,6 +253,19 @@ class ApiClient:
|
|
|
279
253
|
logger.debug(RoundTrip(response, self._cfg.debug_headers, self._debug_truncate_bytes, raw).generate())
|
|
280
254
|
|
|
281
255
|
|
|
256
|
+
class _AddDebugErrorCustomizer(_ErrorCustomizer):
|
|
257
|
+
"""An error customizer that adds debug information about the configuration to unauthenticated and
|
|
258
|
+
unauthorized errors."""
|
|
259
|
+
|
|
260
|
+
def __init__(self, cfg: Config):
|
|
261
|
+
self._cfg = cfg
|
|
262
|
+
|
|
263
|
+
def customize_error(self, response: requests.Response, kwargs: dict):
|
|
264
|
+
if response.status_code in (401, 403):
|
|
265
|
+
message = kwargs.get('message', 'request failed')
|
|
266
|
+
kwargs['message'] = self._cfg.wrap_debug_info(message)
|
|
267
|
+
|
|
268
|
+
|
|
282
269
|
class StreamingResponse(BinaryIO):
|
|
283
270
|
_response: requests.Response
|
|
284
271
|
_buffer: bytes
|
|
@@ -411,10 +411,7 @@ class CliTokenSource(Refreshable):
|
|
|
411
411
|
|
|
412
412
|
def refresh(self) -> Token:
|
|
413
413
|
try:
|
|
414
|
-
|
|
415
|
-
# windows requires shell=True to be able to execute 'az login' or other commands
|
|
416
|
-
# cannot use shell=True all the time, as it breaks macOS
|
|
417
|
-
out = subprocess.run(self._cmd, capture_output=True, check=True, shell=is_windows)
|
|
414
|
+
out = _run_subprocess(self._cmd, capture_output=True, check=True)
|
|
418
415
|
it = json.loads(out.stdout.decode())
|
|
419
416
|
expires_on = self._parse_expiry(it[self._expiry_field])
|
|
420
417
|
return Token(access_token=it[self._access_token_field],
|
|
@@ -429,6 +426,26 @@ class CliTokenSource(Refreshable):
|
|
|
429
426
|
raise IOError(f'cannot get access token: {message}') from e
|
|
430
427
|
|
|
431
428
|
|
|
429
|
+
def _run_subprocess(popenargs,
|
|
430
|
+
input=None,
|
|
431
|
+
capture_output=True,
|
|
432
|
+
timeout=None,
|
|
433
|
+
check=False,
|
|
434
|
+
**kwargs) -> subprocess.CompletedProcess:
|
|
435
|
+
"""Runs subprocess with given arguments.
|
|
436
|
+
This handles OS-specific modifications that need to be made to the invocation of subprocess.run."""
|
|
437
|
+
kwargs['shell'] = sys.platform.startswith('win')
|
|
438
|
+
# windows requires shell=True to be able to execute 'az login' or other commands
|
|
439
|
+
# cannot use shell=True all the time, as it breaks macOS
|
|
440
|
+
logging.debug(f'Running command: {" ".join(popenargs)}')
|
|
441
|
+
return subprocess.run(popenargs,
|
|
442
|
+
input=input,
|
|
443
|
+
capture_output=capture_output,
|
|
444
|
+
timeout=timeout,
|
|
445
|
+
check=check,
|
|
446
|
+
**kwargs)
|
|
447
|
+
|
|
448
|
+
|
|
432
449
|
class AzureCliTokenSource(CliTokenSource):
|
|
433
450
|
""" Obtain the token granted by `az login` CLI command """
|
|
434
451
|
|
|
@@ -437,13 +454,30 @@ class AzureCliTokenSource(CliTokenSource):
|
|
|
437
454
|
if subscription is not None:
|
|
438
455
|
cmd.append("--subscription")
|
|
439
456
|
cmd.append(subscription)
|
|
440
|
-
if tenant:
|
|
457
|
+
if tenant and not self.__is_cli_using_managed_identity():
|
|
441
458
|
cmd.extend(["--tenant", tenant])
|
|
442
459
|
super().__init__(cmd=cmd,
|
|
443
460
|
token_type_field='tokenType',
|
|
444
461
|
access_token_field='accessToken',
|
|
445
462
|
expiry_field='expiresOn')
|
|
446
463
|
|
|
464
|
+
@staticmethod
|
|
465
|
+
def __is_cli_using_managed_identity() -> bool:
|
|
466
|
+
"""Checks whether the current CLI session is authenticated using managed identity."""
|
|
467
|
+
try:
|
|
468
|
+
cmd = ["az", "account", "show", "--output", "json"]
|
|
469
|
+
out = _run_subprocess(cmd, capture_output=True, check=True)
|
|
470
|
+
account = json.loads(out.stdout.decode())
|
|
471
|
+
user = account.get("user")
|
|
472
|
+
if user is None:
|
|
473
|
+
return False
|
|
474
|
+
return user.get("type") == "servicePrincipal" and user.get("name") in [
|
|
475
|
+
'systemAssignedIdentity', 'userAssignedIdentity'
|
|
476
|
+
]
|
|
477
|
+
except subprocess.CalledProcessError as e:
|
|
478
|
+
logger.debug("Failed to get account information from Azure CLI", exc_info=e)
|
|
479
|
+
return False
|
|
480
|
+
|
|
447
481
|
def is_human_user(self) -> bool:
|
|
448
482
|
"""The UPN claim is the username of the user, but not the Service Principal.
|
|
449
483
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from .base import DatabricksError, ErrorDetail
|
|
2
|
-
from .
|
|
3
|
-
from .parser import
|
|
2
|
+
from .customizer import _ErrorCustomizer
|
|
3
|
+
from .parser import _Parser
|
|
4
4
|
from .platform import *
|
|
5
5
|
from .private_link import PrivateLinkValidationError
|
|
6
6
|
from .sdk import *
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import logging
|
|
3
|
+
|
|
4
|
+
import requests
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class _ErrorCustomizer(abc.ABC):
|
|
8
|
+
"""A customizer for errors from the Databricks REST API."""
|
|
9
|
+
|
|
10
|
+
@abc.abstractmethod
|
|
11
|
+
def customize_error(self, response: requests.Response, kwargs: dict):
|
|
12
|
+
"""Customize the error constructor parameters."""
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class _RetryAfterCustomizer(_ErrorCustomizer):
|
|
16
|
+
"""An error customizer that sets the retry_after_secs parameter based on the Retry-After header."""
|
|
17
|
+
|
|
18
|
+
_DEFAULT_RETRY_AFTER_SECONDS = 1
|
|
19
|
+
"""The default number of seconds to wait before retrying a request if the Retry-After header is missing or is not
|
|
20
|
+
a valid integer."""
|
|
21
|
+
|
|
22
|
+
@classmethod
|
|
23
|
+
def _parse_retry_after(cls, response: requests.Response) -> int:
|
|
24
|
+
retry_after = response.headers.get("Retry-After")
|
|
25
|
+
if retry_after is None:
|
|
26
|
+
logging.debug(
|
|
27
|
+
f'No Retry-After header received in response with status code 429 or 503. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}'
|
|
28
|
+
)
|
|
29
|
+
# 429 requests should include a `Retry-After` header, but if it's missing,
|
|
30
|
+
# we default to 1 second.
|
|
31
|
+
return cls._DEFAULT_RETRY_AFTER_SECONDS
|
|
32
|
+
# If the request is throttled, try parse the `Retry-After` header and sleep
|
|
33
|
+
# for the specified number of seconds. Note that this header can contain either
|
|
34
|
+
# an integer or a RFC1123 datetime string.
|
|
35
|
+
# See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After
|
|
36
|
+
#
|
|
37
|
+
# For simplicity, we only try to parse it as an integer, as this is what Databricks
|
|
38
|
+
# platform returns. Otherwise, we fall back and don't sleep.
|
|
39
|
+
try:
|
|
40
|
+
return int(retry_after)
|
|
41
|
+
except ValueError:
|
|
42
|
+
logging.debug(
|
|
43
|
+
f'Invalid Retry-After header received: {retry_after}. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}'
|
|
44
|
+
)
|
|
45
|
+
# defaulting to 1 sleep second to make self._is_retryable() simpler
|
|
46
|
+
return cls._DEFAULT_RETRY_AFTER_SECONDS
|
|
47
|
+
|
|
48
|
+
def customize_error(self, response: requests.Response, kwargs: dict):
|
|
49
|
+
if response.status_code in (429, 503):
|
|
50
|
+
kwargs['retry_after_secs'] = self._parse_retry_after(response)
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import json
|
|
3
|
+
import logging
|
|
4
|
+
import re
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
import requests
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class _ErrorDeserializer(abc.ABC):
|
|
11
|
+
"""A parser for errors from the Databricks REST API."""
|
|
12
|
+
|
|
13
|
+
@abc.abstractmethod
|
|
14
|
+
def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
15
|
+
"""Parses an error from the Databricks REST API. If the error cannot be parsed, returns None."""
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class _EmptyDeserializer(_ErrorDeserializer):
|
|
19
|
+
"""A parser that handles empty responses."""
|
|
20
|
+
|
|
21
|
+
def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
22
|
+
if len(response_body) == 0:
|
|
23
|
+
return {'message': response.reason}
|
|
24
|
+
return None
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class _StandardErrorDeserializer(_ErrorDeserializer):
|
|
28
|
+
"""
|
|
29
|
+
Parses errors from the Databricks REST API using the standard error format.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
33
|
+
try:
|
|
34
|
+
payload_str = response_body.decode('utf-8')
|
|
35
|
+
resp = json.loads(payload_str)
|
|
36
|
+
except UnicodeDecodeError as e:
|
|
37
|
+
logging.debug('_StandardErrorParser: unable to decode response using utf-8', exc_info=e)
|
|
38
|
+
return None
|
|
39
|
+
except json.JSONDecodeError as e:
|
|
40
|
+
logging.debug('_StandardErrorParser: unable to deserialize response as json', exc_info=e)
|
|
41
|
+
return None
|
|
42
|
+
if not isinstance(resp, dict):
|
|
43
|
+
logging.debug('_StandardErrorParser: response is valid JSON but not a dictionary')
|
|
44
|
+
return None
|
|
45
|
+
|
|
46
|
+
error_args = {
|
|
47
|
+
'message': resp.get('message', 'request failed'),
|
|
48
|
+
'error_code': resp.get('error_code'),
|
|
49
|
+
'details': resp.get('details'),
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
# Handle API 1.2-style errors
|
|
53
|
+
if 'error' in resp:
|
|
54
|
+
error_args['message'] = resp['error']
|
|
55
|
+
|
|
56
|
+
# Handle SCIM Errors
|
|
57
|
+
detail = resp.get('detail')
|
|
58
|
+
status = resp.get('status')
|
|
59
|
+
scim_type = resp.get('scimType')
|
|
60
|
+
if detail:
|
|
61
|
+
# Handle SCIM error message details
|
|
62
|
+
# @see https://tools.ietf.org/html/rfc7644#section-3.7.3
|
|
63
|
+
if detail == "null":
|
|
64
|
+
detail = "SCIM API Internal Error"
|
|
65
|
+
error_args['message'] = f"{scim_type} {detail}".strip(" ")
|
|
66
|
+
error_args['error_code'] = f"SCIM_{status}"
|
|
67
|
+
return error_args
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class _StringErrorDeserializer(_ErrorDeserializer):
|
|
71
|
+
"""
|
|
72
|
+
Parses errors from the Databricks REST API in the format "ERROR_CODE: MESSAGE".
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
__STRING_ERROR_REGEX = re.compile(r'([A-Z_]+): (.*)')
|
|
76
|
+
|
|
77
|
+
def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
78
|
+
payload_str = response_body.decode('utf-8')
|
|
79
|
+
match = self.__STRING_ERROR_REGEX.match(payload_str)
|
|
80
|
+
if not match:
|
|
81
|
+
logging.debug('_StringErrorParser: unable to parse response as string')
|
|
82
|
+
return None
|
|
83
|
+
error_code, message = match.groups()
|
|
84
|
+
return {'error_code': error_code, 'message': message, 'status': response.status_code, }
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class _HtmlErrorDeserializer(_ErrorDeserializer):
|
|
88
|
+
"""
|
|
89
|
+
Parses errors from the Databricks REST API in HTML format.
|
|
90
|
+
"""
|
|
91
|
+
|
|
92
|
+
__HTML_ERROR_REGEXES = [re.compile(r'<pre>(.*)</pre>'), re.compile(r'<title>(.*)</title>'), ]
|
|
93
|
+
|
|
94
|
+
def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
95
|
+
payload_str = response_body.decode('utf-8')
|
|
96
|
+
for regex in self.__HTML_ERROR_REGEXES:
|
|
97
|
+
match = regex.search(payload_str)
|
|
98
|
+
if match:
|
|
99
|
+
message = match.group(1) if match.group(1) else response.reason
|
|
100
|
+
return {
|
|
101
|
+
'status': response.status_code,
|
|
102
|
+
'message': message,
|
|
103
|
+
'error_code': response.reason.upper().replace(' ', '_')
|
|
104
|
+
}
|
|
105
|
+
logging.debug('_HtmlErrorParser: no <pre> tag found in error response')
|
|
106
|
+
return None
|
databricks/sdk/errors/parser.py
CHANGED
|
@@ -1,115 +1,32 @@
|
|
|
1
|
-
import abc
|
|
2
|
-
import json
|
|
3
1
|
import logging
|
|
4
|
-
import
|
|
5
|
-
from typing import Optional
|
|
2
|
+
from typing import List, Optional
|
|
6
3
|
|
|
7
4
|
import requests
|
|
8
5
|
|
|
9
6
|
from ..logger import RoundTrip
|
|
10
7
|
from .base import DatabricksError
|
|
8
|
+
from .customizer import _ErrorCustomizer, _RetryAfterCustomizer
|
|
9
|
+
from .deserializer import (_EmptyDeserializer, _ErrorDeserializer,
|
|
10
|
+
_HtmlErrorDeserializer, _StandardErrorDeserializer,
|
|
11
|
+
_StringErrorDeserializer)
|
|
11
12
|
from .mapper import _error_mapper
|
|
12
13
|
from .private_link import (_get_private_link_validation_error,
|
|
13
14
|
_is_private_link_redirect)
|
|
14
15
|
|
|
16
|
+
# A list of _ErrorDeserializers that are tried in order to parse an API error from a response body. Most errors should
|
|
17
|
+
# be parsable by the _StandardErrorDeserializer, but additional parsers can be added here for specific error formats.
|
|
18
|
+
# The order of the parsers is not important, as the set of errors that can be parsed by each parser should be disjoint.
|
|
19
|
+
_error_deserializers = [
|
|
20
|
+
_EmptyDeserializer(),
|
|
21
|
+
_StandardErrorDeserializer(),
|
|
22
|
+
_StringErrorDeserializer(),
|
|
23
|
+
_HtmlErrorDeserializer(),
|
|
24
|
+
]
|
|
15
25
|
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
21
|
-
"""Parses an error from the Databricks REST API. If the error cannot be parsed, returns None."""
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class _EmptyParser(_ErrorParser):
|
|
25
|
-
"""A parser that handles empty responses."""
|
|
26
|
-
|
|
27
|
-
def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
28
|
-
if len(response_body) == 0:
|
|
29
|
-
return {'message': response.reason}
|
|
30
|
-
return None
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
class _StandardErrorParser(_ErrorParser):
|
|
34
|
-
"""
|
|
35
|
-
Parses errors from the Databricks REST API using the standard error format.
|
|
36
|
-
"""
|
|
37
|
-
|
|
38
|
-
def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
39
|
-
try:
|
|
40
|
-
payload_str = response_body.decode('utf-8')
|
|
41
|
-
resp: dict = json.loads(payload_str)
|
|
42
|
-
except json.JSONDecodeError as e:
|
|
43
|
-
logging.debug('_StandardErrorParser: unable to deserialize response as json', exc_info=e)
|
|
44
|
-
return None
|
|
45
|
-
|
|
46
|
-
error_args = {
|
|
47
|
-
'message': resp.get('message', 'request failed'),
|
|
48
|
-
'error_code': resp.get('error_code'),
|
|
49
|
-
'details': resp.get('details'),
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
# Handle API 1.2-style errors
|
|
53
|
-
if 'error' in resp:
|
|
54
|
-
error_args['message'] = resp['error']
|
|
55
|
-
|
|
56
|
-
# Handle SCIM Errors
|
|
57
|
-
detail = resp.get('detail')
|
|
58
|
-
status = resp.get('status')
|
|
59
|
-
scim_type = resp.get('scimType')
|
|
60
|
-
if detail:
|
|
61
|
-
# Handle SCIM error message details
|
|
62
|
-
# @see https://tools.ietf.org/html/rfc7644#section-3.7.3
|
|
63
|
-
if detail == "null":
|
|
64
|
-
detail = "SCIM API Internal Error"
|
|
65
|
-
error_args['message'] = f"{scim_type} {detail}".strip(" ")
|
|
66
|
-
error_args['error_code'] = f"SCIM_{status}"
|
|
67
|
-
return error_args
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
class _StringErrorParser(_ErrorParser):
|
|
71
|
-
"""
|
|
72
|
-
Parses errors from the Databricks REST API in the format "ERROR_CODE: MESSAGE".
|
|
73
|
-
"""
|
|
74
|
-
|
|
75
|
-
__STRING_ERROR_REGEX = re.compile(r'([A-Z_]+): (.*)')
|
|
76
|
-
|
|
77
|
-
def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
78
|
-
payload_str = response_body.decode('utf-8')
|
|
79
|
-
match = self.__STRING_ERROR_REGEX.match(payload_str)
|
|
80
|
-
if not match:
|
|
81
|
-
logging.debug('_StringErrorParser: unable to parse response as string')
|
|
82
|
-
return None
|
|
83
|
-
error_code, message = match.groups()
|
|
84
|
-
return {'error_code': error_code, 'message': message, 'status': response.status_code, }
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
class _HtmlErrorParser(_ErrorParser):
|
|
88
|
-
"""
|
|
89
|
-
Parses errors from the Databricks REST API in HTML format.
|
|
90
|
-
"""
|
|
91
|
-
|
|
92
|
-
__HTML_ERROR_REGEXES = [re.compile(r'<pre>(.*)</pre>'), re.compile(r'<title>(.*)</title>'), ]
|
|
93
|
-
|
|
94
|
-
def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
95
|
-
payload_str = response_body.decode('utf-8')
|
|
96
|
-
for regex in self.__HTML_ERROR_REGEXES:
|
|
97
|
-
match = regex.search(payload_str)
|
|
98
|
-
if match:
|
|
99
|
-
message = match.group(1) if match.group(1) else response.reason
|
|
100
|
-
return {
|
|
101
|
-
'status': response.status_code,
|
|
102
|
-
'message': message,
|
|
103
|
-
'error_code': response.reason.upper().replace(' ', '_')
|
|
104
|
-
}
|
|
105
|
-
logging.debug('_HtmlErrorParser: no <pre> tag found in error response')
|
|
106
|
-
return None
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
# A list of ErrorParsers that are tried in order to parse an API error from a response body. Most errors should be
|
|
110
|
-
# parsable by the _StandardErrorParser, but additional parsers can be added here for specific error formats. The order
|
|
111
|
-
# of the parsers is not important, as the set of errors that can be parsed by each parser should be disjoint.
|
|
112
|
-
_error_parsers = [_EmptyParser(), _StandardErrorParser(), _StringErrorParser(), _HtmlErrorParser(), ]
|
|
26
|
+
# A list of _ErrorCustomizers that are applied to the error arguments after they are parsed. Customizers can modify the
|
|
27
|
+
# error arguments in any way, including adding or removing fields. Customizers are applied in order, so later
|
|
28
|
+
# customizers can override the changes made by earlier customizers.
|
|
29
|
+
_error_customizers = [_RetryAfterCustomizer(), ]
|
|
113
30
|
|
|
114
31
|
|
|
115
32
|
def _unknown_error(response: requests.Response) -> str:
|
|
@@ -124,24 +41,43 @@ def _unknown_error(response: requests.Response) -> str:
|
|
|
124
41
|
f'https://github.com/databricks/databricks-sdk-go/issues. Request log:```{request_log}```')
|
|
125
42
|
|
|
126
43
|
|
|
127
|
-
|
|
44
|
+
class _Parser:
|
|
128
45
|
"""
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
46
|
+
A parser for errors from the Databricks REST API. It attempts to deserialize an error using a sequence of
|
|
47
|
+
deserializers, and then customizes the deserialized error using a sequence of customizers. If the error cannot be
|
|
48
|
+
deserialized, it returns a generic error with debugging information and instructions to report the issue to the SDK
|
|
49
|
+
issue tracker.
|
|
132
50
|
"""
|
|
133
|
-
if not response.ok:
|
|
134
|
-
content = response.content
|
|
135
|
-
for parser in _error_parsers:
|
|
136
|
-
try:
|
|
137
|
-
error_args = parser.parse_error(response, content)
|
|
138
|
-
if error_args:
|
|
139
|
-
return _error_mapper(response, error_args)
|
|
140
|
-
except Exception as e:
|
|
141
|
-
logging.debug(f'Error parsing response with {parser}, continuing', exc_info=e)
|
|
142
|
-
return _error_mapper(response, {'message': 'unable to parse response. ' + _unknown_error(response)})
|
|
143
51
|
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
52
|
+
def __init__(self,
|
|
53
|
+
extra_error_parsers: List[_ErrorDeserializer] = [],
|
|
54
|
+
extra_error_customizers: List[_ErrorCustomizer] = []):
|
|
55
|
+
self._error_parsers = _error_deserializers + (extra_error_parsers
|
|
56
|
+
if extra_error_parsers is not None else [])
|
|
57
|
+
self._error_customizers = _error_customizers + (extra_error_customizers
|
|
58
|
+
if extra_error_customizers is not None else [])
|
|
59
|
+
|
|
60
|
+
def get_api_error(self, response: requests.Response) -> Optional[DatabricksError]:
|
|
61
|
+
"""
|
|
62
|
+
Handles responses from the REST API and returns a DatabricksError if the response indicates an error.
|
|
63
|
+
:param response: The response from the REST API.
|
|
64
|
+
:return: A DatabricksError if the response indicates an error, otherwise None.
|
|
65
|
+
"""
|
|
66
|
+
if not response.ok:
|
|
67
|
+
content = response.content
|
|
68
|
+
for parser in self._error_parsers:
|
|
69
|
+
try:
|
|
70
|
+
error_args = parser.deserialize_error(response, content)
|
|
71
|
+
if error_args:
|
|
72
|
+
for customizer in self._error_customizers:
|
|
73
|
+
customizer.customize_error(response, error_args)
|
|
74
|
+
return _error_mapper(response, error_args)
|
|
75
|
+
except Exception as e:
|
|
76
|
+
logging.debug(f'Error parsing response with {parser}, continuing', exc_info=e)
|
|
77
|
+
return _error_mapper(response,
|
|
78
|
+
{'message': 'unable to parse response. ' + _unknown_error(response)})
|
|
79
|
+
|
|
80
|
+
# Private link failures happen via a redirect to the login page. From a requests-perspective, the request
|
|
81
|
+
# is successful, but the response is not what we expect. We need to handle this case separately.
|
|
82
|
+
if _is_private_link_redirect(response):
|
|
83
|
+
return _get_private_link_validation_error(response.url)
|
|
@@ -48,7 +48,8 @@ class RoundTrip:
|
|
|
48
48
|
# Raw streams with `Transfer-Encoding: chunked` do not have `Content-Type` header
|
|
49
49
|
sb.append("< [raw stream]")
|
|
50
50
|
elif self._response.content:
|
|
51
|
-
|
|
51
|
+
decoded = self._response.content.decode('utf-8', errors='replace')
|
|
52
|
+
sb.append(self._redacted_dump("< ", decoded))
|
|
52
53
|
return '\n'.join(sb)
|
|
53
54
|
|
|
54
55
|
@staticmethod
|
databricks/sdk/mixins/files.py
CHANGED
|
@@ -167,7 +167,7 @@ class _DbfsIO(BinaryIO):
|
|
|
167
167
|
return f"<_DbfsIO {self._path} {'read' if self.readable() else 'write'}=True>"
|
|
168
168
|
|
|
169
169
|
|
|
170
|
-
class
|
|
170
|
+
class _FilesIO(BinaryIO):
|
|
171
171
|
|
|
172
172
|
def __init__(self, api: files.FilesAPI, path: str, *, read: bool, write: bool, overwrite: bool):
|
|
173
173
|
self._buffer = []
|
|
@@ -262,7 +262,7 @@ class _VolumesIO(BinaryIO):
|
|
|
262
262
|
self.close()
|
|
263
263
|
|
|
264
264
|
def __repr__(self) -> str:
|
|
265
|
-
return f"<
|
|
265
|
+
return f"<_FilesIO {self._path} {'read' if self.readable() else 'write'}=True>"
|
|
266
266
|
|
|
267
267
|
|
|
268
268
|
class _Path(ABC):
|
|
@@ -398,7 +398,7 @@ class _LocalPath(_Path):
|
|
|
398
398
|
return f'<_LocalPath {self._path}>'
|
|
399
399
|
|
|
400
400
|
|
|
401
|
-
class
|
|
401
|
+
class _FilesPath(_Path):
|
|
402
402
|
|
|
403
403
|
def __init__(self, api: files.FilesAPI, src: Union[str, pathlib.Path]):
|
|
404
404
|
self._path = pathlib.PurePosixPath(str(src).replace('dbfs:', '').replace('file:', ''))
|
|
@@ -411,7 +411,7 @@ class _VolumesPath(_Path):
|
|
|
411
411
|
return False
|
|
412
412
|
|
|
413
413
|
def child(self, path: str) -> Self:
|
|
414
|
-
return
|
|
414
|
+
return _FilesPath(self._api, str(self._path / path))
|
|
415
415
|
|
|
416
416
|
def _is_dir(self) -> bool:
|
|
417
417
|
try:
|
|
@@ -431,7 +431,7 @@ class _VolumesPath(_Path):
|
|
|
431
431
|
return self.is_dir
|
|
432
432
|
|
|
433
433
|
def open(self, *, read=False, write=False, overwrite=False) -> BinaryIO:
|
|
434
|
-
return
|
|
434
|
+
return _FilesIO(self._api, self.as_string, read=read, write=write, overwrite=overwrite)
|
|
435
435
|
|
|
436
436
|
def list(self, *, recursive=False) -> Generator[files.FileInfo, None, None]:
|
|
437
437
|
if not self.is_dir:
|
|
@@ -458,13 +458,13 @@ class _VolumesPath(_Path):
|
|
|
458
458
|
def delete(self, *, recursive=False):
|
|
459
459
|
if self.is_dir:
|
|
460
460
|
for entry in self.list(recursive=False):
|
|
461
|
-
|
|
461
|
+
_FilesPath(self._api, entry.path).delete(recursive=True)
|
|
462
462
|
self._api.delete_directory(self.as_string)
|
|
463
463
|
else:
|
|
464
464
|
self._api.delete(self.as_string)
|
|
465
465
|
|
|
466
466
|
def __repr__(self) -> str:
|
|
467
|
-
return f'<
|
|
467
|
+
return f'<_FilesPath {self._path}>'
|
|
468
468
|
|
|
469
469
|
|
|
470
470
|
class _DbfsPath(_Path):
|
|
@@ -589,8 +589,8 @@ class DbfsExt(files.DbfsAPI):
|
|
|
589
589
|
'UC Volumes paths, not external locations or DBFS mount points.')
|
|
590
590
|
if src.scheme == 'file':
|
|
591
591
|
return _LocalPath(src.geturl())
|
|
592
|
-
if src.path.startswith('/Volumes'):
|
|
593
|
-
return
|
|
592
|
+
if src.path.startswith(('/Volumes', '/Models')):
|
|
593
|
+
return _FilesPath(self._files_api, src.geturl())
|
|
594
594
|
return _DbfsPath(self._dbfs_api, src.geturl())
|
|
595
595
|
|
|
596
596
|
def copy(self, src: str, dst: str, *, recursive=False, overwrite=False):
|
databricks/sdk/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '0.32.
|
|
1
|
+
__version__ = '0.32.2'
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
databricks/__init__.py,sha256=CF2MJcZFwbpn9TwQER8qnCDhkPooBGQNVkX4v7g6p3g,537
|
|
2
|
-
databricks/sdk/__init__.py,sha256=
|
|
2
|
+
databricks/sdk/__init__.py,sha256=5RHQoyAUIQoZ4SVHO95eW79WxzkQ3m-N7yX7LhnlRlw,48310
|
|
3
3
|
databricks/sdk/_property.py,sha256=sGjsipeFrjMBSVPjtIb0HNCRcMIhFpVx6wq4BkC3LWs,1636
|
|
4
4
|
databricks/sdk/azure.py,sha256=8P7nEdun0hbQCap9Ojo7yZse_JHxnhYsE6ApojnPz7Q,1009
|
|
5
5
|
databricks/sdk/casing.py,sha256=NKYPrfPbQjM7lU4hhNQK3z1jb_VEA29BfH4FEdby2tg,1137
|
|
6
6
|
databricks/sdk/clock.py,sha256=Ivlow0r_TkXcTJ8UXkxSA0czKrY0GvwHAeOvjPkJnAQ,1360
|
|
7
|
-
databricks/sdk/config.py,sha256=
|
|
8
|
-
databricks/sdk/core.py,sha256=
|
|
9
|
-
databricks/sdk/credentials_provider.py,sha256=
|
|
7
|
+
databricks/sdk/config.py,sha256=UaD-UcgvvohbrDmvbQgUt-KFd8FP1w3iWvaocsoIz9k,21169
|
|
8
|
+
databricks/sdk/core.py,sha256=PL3RdTMEOtcy4KPt6TKuL6ylm8RaHBXiqGbYwjRu5Gk,14679
|
|
9
|
+
databricks/sdk/credentials_provider.py,sha256=8AEDLUgQfCLA5ODVGYsS_70N61a9yYv7137G8wDilsY,30781
|
|
10
10
|
databricks/sdk/data_plane.py,sha256=Er2z2fT-KVupJKzGozGGZ-jCQ3AmDWq-DZppahIK6tU,2591
|
|
11
11
|
databricks/sdk/dbutils.py,sha256=HFCuB-el6SFKhF8qRfJxYANtyLTm-VG9GtQuQgZXFkM,15741
|
|
12
12
|
databricks/sdk/environments.py,sha256=5KoVuVfF-ZX17rua1sH3EJCCtniVrREXBXsMNDEV-UU,4293
|
|
@@ -14,23 +14,25 @@ databricks/sdk/oauth.py,sha256=KzcJPYLL3JL6RDvf_Q8SDAaF9xSaoYNCRD4rYInZDuo,18319
|
|
|
14
14
|
databricks/sdk/py.typed,sha256=pSvaHpbY1UPNEXyVFUjlgBhjPFZMmVC_UNrPC7eMOHI,74
|
|
15
15
|
databricks/sdk/retries.py,sha256=WgLh12bwdBc6fCQlaig3kKu18cVhPzFDGsspvq629Ew,2454
|
|
16
16
|
databricks/sdk/useragent.py,sha256=I2-VnJSE6cg9QV4GXkoQSkHsEB3bDvRGgkawbBNl4G0,5540
|
|
17
|
-
databricks/sdk/version.py,sha256=
|
|
17
|
+
databricks/sdk/version.py,sha256=mteW834a5f5Gxn1SNzRvKxq9Y4nN1qsJoM5ubib79tI,23
|
|
18
18
|
databricks/sdk/_widgets/__init__.py,sha256=Qm3JB8LmdPgEn_-VgxKkodTO4gn6OdaDPwsYcDmeIRI,2667
|
|
19
19
|
databricks/sdk/_widgets/default_widgets_utils.py,sha256=Rk59AFzVYVpOektB_yC_7j-vSt5OdtZA85IlG0kw0xA,1202
|
|
20
20
|
databricks/sdk/_widgets/ipywidgets_utils.py,sha256=P-AyGeahPiX3S59mxpAMgffi4gyJ0irEOY7Ekkn9nQ0,2850
|
|
21
|
-
databricks/sdk/errors/__init__.py,sha256=
|
|
21
|
+
databricks/sdk/errors/__init__.py,sha256=WBEGgTRWM41A8VPGzIg59Ph5EVIH7n3rR3Jq3wa88A8,212
|
|
22
22
|
databricks/sdk/errors/base.py,sha256=eSOKUZ5t8e_S6OFrsEyzx-vraQ0PYemsP98H9Md53M4,4893
|
|
23
|
+
databricks/sdk/errors/customizer.py,sha256=rHMJr04-v11u6mLVu07uwKOXDuBWCuXfVfXSndqWOi4,2211
|
|
24
|
+
databricks/sdk/errors/deserializer.py,sha256=pQ7G0HMiDw_QXaLQ4xKk10GAsISBlHrmqCOLewnRNWE,3999
|
|
23
25
|
databricks/sdk/errors/mapper.py,sha256=G52KAcRfDFUOjgS-gvh8_X_3FXqN1P5Mmgi6F0VAb5k,1162
|
|
24
26
|
databricks/sdk/errors/overrides.py,sha256=u1fZ1X2gPRv_zf1u_4EqVzbWHiFsPzm_X0sMNOCMwAE,1649
|
|
25
|
-
databricks/sdk/errors/parser.py,sha256=
|
|
27
|
+
databricks/sdk/errors/parser.py,sha256=JEPNGEyFA75CBOXnLgLUviy4IHmpbN7sGzRYux3-SME,4438
|
|
26
28
|
databricks/sdk/errors/platform.py,sha256=0EwGUTcmoobAK41KsFAnRkT6AlOY_umzr4jWEgd-6hY,3113
|
|
27
29
|
databricks/sdk/errors/private_link.py,sha256=-cDxHSm7MBpdaEFgDGvbrW4dxCRVQwSunGhwe5Ay80g,2314
|
|
28
30
|
databricks/sdk/errors/sdk.py,sha256=_euMruhvquB0v_SKtgqxJUiyXHWuTb4Jl7ji6_h0E_A,109
|
|
29
31
|
databricks/sdk/logger/__init__.py,sha256=0_sSQfDkaFGqMHZUVw-g_Ax-RFmOv0Z6NjxCVAeUSO0,41
|
|
30
|
-
databricks/sdk/logger/round_trip_logger.py,sha256=
|
|
32
|
+
databricks/sdk/logger/round_trip_logger.py,sha256=SMtHDfdqy5Noge2iZO-LpuEm92rz3A5ANfzRzPe6qEU,4794
|
|
31
33
|
databricks/sdk/mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
32
34
|
databricks/sdk/mixins/compute.py,sha256=khb00BzBckc4RLUF4-GnNMCSO5lXKt_XYMM3IhiUxlA,11237
|
|
33
|
-
databricks/sdk/mixins/files.py,sha256=
|
|
35
|
+
databricks/sdk/mixins/files.py,sha256=dUVheOusKIy6idj7E4g4vsi4q7NZw-_Q6sBoNw_MVw4,20539
|
|
34
36
|
databricks/sdk/mixins/workspace.py,sha256=dWMNvuEi8jJ5wMhrDt1LiqxNdWSsmEuDTzrcZR-eJzY,4896
|
|
35
37
|
databricks/sdk/runtime/__init__.py,sha256=9NnZkBzeZXZRQxcE1qKzAszQEzcpIgpL7lQzW3_kxEU,7266
|
|
36
38
|
databricks/sdk/runtime/dbutils_stub.py,sha256=UFbRZF-bBcwxjbv_pxma00bjNtktLLaYpo8oHRc4-9g,11421
|
|
@@ -55,9 +57,9 @@ databricks/sdk/service/sharing.py,sha256=kalJYd0v1SwuGhlCaq4l2ZhzNlev9OwNbCXFIOK
|
|
|
55
57
|
databricks/sdk/service/sql.py,sha256=RaXIYMDtHbhvB7gtSMyvQsqiO_E0cMz5NXeTsrqtPVk,334558
|
|
56
58
|
databricks/sdk/service/vectorsearch.py,sha256=a5Y4vrS_oAJJqa69XwKMANhGuZi5glS0PSXBXz1bKGU,62961
|
|
57
59
|
databricks/sdk/service/workspace.py,sha256=FKLf5esRmfFstIXo7HQg6HQCzQ2svrb6ulr8yzZ7-8U,101182
|
|
58
|
-
databricks_sdk-0.32.
|
|
59
|
-
databricks_sdk-0.32.
|
|
60
|
-
databricks_sdk-0.32.
|
|
61
|
-
databricks_sdk-0.32.
|
|
62
|
-
databricks_sdk-0.32.
|
|
63
|
-
databricks_sdk-0.32.
|
|
60
|
+
databricks_sdk-0.32.2.dist-info/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
|
|
61
|
+
databricks_sdk-0.32.2.dist-info/METADATA,sha256=MDIMnh2mylfaS-wyBerMRQvfRuBPl08p7mWbKu8stPk,37967
|
|
62
|
+
databricks_sdk-0.32.2.dist-info/NOTICE,sha256=Qnc0m8JjZNTDV80y0h1aJGvsr4GqM63m1nr2VTypg6E,963
|
|
63
|
+
databricks_sdk-0.32.2.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
|
64
|
+
databricks_sdk-0.32.2.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
|
|
65
|
+
databricks_sdk-0.32.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|