databricks-sdk 0.31.1__tar.gz → 0.32.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- {databricks_sdk-0.31.1/databricks_sdk.egg-info → databricks_sdk-0.32.1}/PKG-INFO +1 -1
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/core.py +17 -130
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/errors/__init__.py +2 -1
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/errors/base.py +32 -2
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/errors/mapper.py +1 -5
- databricks_sdk-0.32.1/databricks/sdk/errors/parser.py +147 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/errors/private_link.py +1 -1
- databricks_sdk-0.32.1/databricks/sdk/logger/__init__.py +1 -0
- databricks_sdk-0.32.1/databricks/sdk/logger/round_trip_logger.py +118 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/catalog.py +85 -4
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/iam.py +8 -8
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/jobs.py +246 -4
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/ml.py +8 -2
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/serving.py +20 -8
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/vectorsearch.py +8 -1
- databricks_sdk-0.32.1/databricks/sdk/version.py +1 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1/databricks_sdk.egg-info}/PKG-INFO +1 -1
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks_sdk.egg-info/SOURCES.txt +3 -0
- databricks_sdk-0.32.1/tests/test_errors.py +126 -0
- databricks_sdk-0.31.1/databricks/sdk/version.py +0 -1
- databricks_sdk-0.31.1/tests/test_errors.py +0 -97
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/LICENSE +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/NOTICE +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/README.md +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/__init__.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/__init__.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/_property.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/_widgets/__init__.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/azure.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/casing.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/clock.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/config.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/credentials_provider.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/data_plane.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/dbutils.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/environments.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/errors/overrides.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/errors/platform.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/errors/sdk.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/mixins/__init__.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/mixins/compute.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/mixins/files.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/mixins/workspace.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/oauth.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/py.typed +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/retries.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/runtime/__init__.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/runtime/dbutils_stub.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/__init__.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/_internal.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/apps.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/billing.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/compute.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/dashboards.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/files.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/marketplace.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/oauth2.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/pipelines.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/provisioning.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/settings.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/sharing.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/sql.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/service/workspace.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks/sdk/useragent.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks_sdk.egg-info/dependency_links.txt +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks_sdk.egg-info/requires.txt +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/databricks_sdk.egg-info/top_level.txt +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/setup.cfg +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/setup.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_auth.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_auth_manual_tests.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_client.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_compute_mixins.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_config.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_core.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_data_plane.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_dbfs_mixins.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_dbutils.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_environments.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_init_file.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_internal.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_jobs.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_metadata_service_auth.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_misc.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_oauth.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_retries.py +0 -0
- {databricks_sdk-0.31.1 → databricks_sdk-0.32.1}/tests/test_user_agent.py +0 -0
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
import re
|
|
2
|
-
import urllib.parse
|
|
3
2
|
from datetime import timedelta
|
|
4
|
-
from json import JSONDecodeError
|
|
5
3
|
from types import TracebackType
|
|
6
4
|
from typing import Any, BinaryIO, Iterator, Type
|
|
7
5
|
from urllib.parse import urlencode
|
|
@@ -12,8 +10,8 @@ from .casing import Casing
|
|
|
12
10
|
from .config import *
|
|
13
11
|
# To preserve backwards compatibility (as these definitions were previously in this module)
|
|
14
12
|
from .credentials_provider import *
|
|
15
|
-
from .errors import DatabricksError,
|
|
16
|
-
from .
|
|
13
|
+
from .errors import DatabricksError, get_api_error
|
|
14
|
+
from .logger import RoundTrip
|
|
17
15
|
from .oauth import retrieve_token
|
|
18
16
|
from .retries import retried
|
|
19
17
|
|
|
@@ -262,134 +260,23 @@ class ApiClient:
|
|
|
262
260
|
auth=auth,
|
|
263
261
|
stream=raw,
|
|
264
262
|
timeout=self._http_timeout_seconds)
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
if not message:
|
|
280
|
-
message = response.reason
|
|
281
|
-
raise self._make_nicer_error(response=response, message=message) from None
|
|
282
|
-
|
|
283
|
-
@staticmethod
|
|
284
|
-
def _make_sense_from_html(txt: str) -> str:
|
|
285
|
-
matchers = [r'<pre>(.*)</pre>', r'<title>(.*)</title>']
|
|
286
|
-
for attempt in matchers:
|
|
287
|
-
expr = re.compile(attempt, re.MULTILINE)
|
|
288
|
-
match = expr.search(txt)
|
|
289
|
-
if not match:
|
|
290
|
-
continue
|
|
291
|
-
return match.group(1).strip()
|
|
292
|
-
return txt
|
|
293
|
-
|
|
294
|
-
def _make_nicer_error(self, *, response: requests.Response, **kwargs) -> DatabricksError:
|
|
295
|
-
status_code = response.status_code
|
|
296
|
-
message = kwargs.get('message', 'request failed')
|
|
297
|
-
is_http_unauthorized_or_forbidden = status_code in (401, 403)
|
|
298
|
-
is_too_many_requests_or_unavailable = status_code in (429, 503)
|
|
299
|
-
if is_http_unauthorized_or_forbidden:
|
|
300
|
-
message = self._cfg.wrap_debug_info(message)
|
|
301
|
-
if is_too_many_requests_or_unavailable:
|
|
302
|
-
kwargs['retry_after_secs'] = self._parse_retry_after(response)
|
|
303
|
-
kwargs['message'] = message
|
|
304
|
-
return error_mapper(response, kwargs)
|
|
305
|
-
|
|
306
|
-
def _record_request_log(self, response: requests.Response, raw=False):
|
|
263
|
+
self._record_request_log(response, raw=raw or data is not None or files is not None)
|
|
264
|
+
error = get_api_error(response)
|
|
265
|
+
if error is not None:
|
|
266
|
+
status_code = response.status_code
|
|
267
|
+
is_http_unauthorized_or_forbidden = status_code in (401, 403)
|
|
268
|
+
is_too_many_requests_or_unavailable = status_code in (429, 503)
|
|
269
|
+
if is_http_unauthorized_or_forbidden:
|
|
270
|
+
error.message = self._cfg.wrap_debug_info(error.message)
|
|
271
|
+
if is_too_many_requests_or_unavailable:
|
|
272
|
+
error.retry_after_secs = self._parse_retry_after(response)
|
|
273
|
+
raise error from None
|
|
274
|
+
return response
|
|
275
|
+
|
|
276
|
+
def _record_request_log(self, response: requests.Response, raw: bool = False) -> None:
|
|
307
277
|
if not logger.isEnabledFor(logging.DEBUG):
|
|
308
278
|
return
|
|
309
|
-
|
|
310
|
-
url = urllib.parse.urlparse(request.url)
|
|
311
|
-
query = ''
|
|
312
|
-
if url.query:
|
|
313
|
-
query = f'?{urllib.parse.unquote(url.query)}'
|
|
314
|
-
sb = [f'{request.method} {urllib.parse.unquote(url.path)}{query}']
|
|
315
|
-
if self._cfg.debug_headers:
|
|
316
|
-
if self._cfg.host:
|
|
317
|
-
sb.append(f'> * Host: {self._cfg.host}')
|
|
318
|
-
for k, v in request.headers.items():
|
|
319
|
-
sb.append(f'> * {k}: {self._only_n_bytes(v, self._debug_truncate_bytes)}')
|
|
320
|
-
if request.body:
|
|
321
|
-
sb.append("> [raw stream]" if raw else self._redacted_dump("> ", request.body))
|
|
322
|
-
sb.append(f'< {response.status_code} {response.reason}')
|
|
323
|
-
if raw and response.headers.get('Content-Type', None) != 'application/json':
|
|
324
|
-
# Raw streams with `Transfer-Encoding: chunked` do not have `Content-Type` header
|
|
325
|
-
sb.append("< [raw stream]")
|
|
326
|
-
elif response.content:
|
|
327
|
-
sb.append(self._redacted_dump("< ", response.content))
|
|
328
|
-
logger.debug("\n".join(sb))
|
|
329
|
-
|
|
330
|
-
@staticmethod
|
|
331
|
-
def _mask(m: Dict[str, any]):
|
|
332
|
-
for k in m:
|
|
333
|
-
if k in {'bytes_value', 'string_value', 'token_value', 'value', 'content'}:
|
|
334
|
-
m[k] = "**REDACTED**"
|
|
335
|
-
|
|
336
|
-
@staticmethod
|
|
337
|
-
def _map_keys(m: Dict[str, any]) -> List[str]:
|
|
338
|
-
keys = list(m.keys())
|
|
339
|
-
keys.sort()
|
|
340
|
-
return keys
|
|
341
|
-
|
|
342
|
-
@staticmethod
|
|
343
|
-
def _only_n_bytes(j: str, num_bytes: int = 96) -> str:
|
|
344
|
-
diff = len(j.encode('utf-8')) - num_bytes
|
|
345
|
-
if diff > 0:
|
|
346
|
-
return f"{j[:num_bytes]}... ({diff} more bytes)"
|
|
347
|
-
return j
|
|
348
|
-
|
|
349
|
-
def _recursive_marshal_dict(self, m, budget) -> dict:
|
|
350
|
-
out = {}
|
|
351
|
-
self._mask(m)
|
|
352
|
-
for k in sorted(m.keys()):
|
|
353
|
-
raw = self._recursive_marshal(m[k], budget)
|
|
354
|
-
out[k] = raw
|
|
355
|
-
budget -= len(str(raw))
|
|
356
|
-
return out
|
|
357
|
-
|
|
358
|
-
def _recursive_marshal_list(self, s, budget) -> list:
|
|
359
|
-
out = []
|
|
360
|
-
for i in range(len(s)):
|
|
361
|
-
if i > 0 >= budget:
|
|
362
|
-
out.append("... (%d additional elements)" % (len(s) - len(out)))
|
|
363
|
-
break
|
|
364
|
-
raw = self._recursive_marshal(s[i], budget)
|
|
365
|
-
out.append(raw)
|
|
366
|
-
budget -= len(str(raw))
|
|
367
|
-
return out
|
|
368
|
-
|
|
369
|
-
def _recursive_marshal(self, v: any, budget: int) -> any:
|
|
370
|
-
if isinstance(v, dict):
|
|
371
|
-
return self._recursive_marshal_dict(v, budget)
|
|
372
|
-
elif isinstance(v, list):
|
|
373
|
-
return self._recursive_marshal_list(v, budget)
|
|
374
|
-
elif isinstance(v, str):
|
|
375
|
-
return self._only_n_bytes(v, self._debug_truncate_bytes)
|
|
376
|
-
else:
|
|
377
|
-
return v
|
|
378
|
-
|
|
379
|
-
def _redacted_dump(self, prefix: str, body: str) -> str:
|
|
380
|
-
if len(body) == 0:
|
|
381
|
-
return ""
|
|
382
|
-
try:
|
|
383
|
-
# Unmarshal body into primitive types.
|
|
384
|
-
tmp = json.loads(body)
|
|
385
|
-
max_bytes = 96
|
|
386
|
-
if self._debug_truncate_bytes > max_bytes:
|
|
387
|
-
max_bytes = self._debug_truncate_bytes
|
|
388
|
-
# Re-marshal body taking redaction and character limit into account.
|
|
389
|
-
raw = self._recursive_marshal(tmp, max_bytes)
|
|
390
|
-
return "\n".join([f'{prefix}{line}' for line in json.dumps(raw, indent=2).split("\n")])
|
|
391
|
-
except JSONDecodeError:
|
|
392
|
-
return f'{prefix}[non-JSON document of {len(body)} bytes]'
|
|
279
|
+
logger.debug(RoundTrip(response, self._cfg.debug_headers, self._debug_truncate_bytes, raw).generate())
|
|
393
280
|
|
|
394
281
|
|
|
395
282
|
class StreamingResponse(BinaryIO):
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import re
|
|
2
|
+
import warnings
|
|
2
3
|
from dataclasses import dataclass
|
|
3
4
|
from typing import Dict, List, Optional
|
|
4
5
|
|
|
@@ -41,9 +42,38 @@ class DatabricksError(IOError):
|
|
|
41
42
|
retry_after_secs: int = None,
|
|
42
43
|
details: List[Dict[str, any]] = None,
|
|
43
44
|
**kwargs):
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
:param message:
|
|
48
|
+
:param error_code:
|
|
49
|
+
:param detail: [Deprecated]
|
|
50
|
+
:param status: [Deprecated]
|
|
51
|
+
:param scimType: [Deprecated]
|
|
52
|
+
:param error: [Deprecated]
|
|
53
|
+
:param retry_after_secs:
|
|
54
|
+
:param details:
|
|
55
|
+
:param kwargs:
|
|
56
|
+
"""
|
|
57
|
+
# SCIM-specific parameters are deprecated
|
|
58
|
+
if detail:
|
|
59
|
+
warnings.warn(
|
|
60
|
+
"The 'detail' parameter of DatabricksError is deprecated and will be removed in a future version."
|
|
61
|
+
)
|
|
62
|
+
if scimType:
|
|
63
|
+
warnings.warn(
|
|
64
|
+
"The 'scimType' parameter of DatabricksError is deprecated and will be removed in a future version."
|
|
65
|
+
)
|
|
66
|
+
if status:
|
|
67
|
+
warnings.warn(
|
|
68
|
+
"The 'status' parameter of DatabricksError is deprecated and will be removed in a future version."
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# API 1.2-specific parameters are deprecated
|
|
44
72
|
if error:
|
|
45
|
-
|
|
46
|
-
|
|
73
|
+
warnings.warn(
|
|
74
|
+
"The 'error' parameter of DatabricksError is deprecated and will be removed in a future version."
|
|
75
|
+
)
|
|
76
|
+
|
|
47
77
|
if detail:
|
|
48
78
|
# Handle SCIM error message details
|
|
49
79
|
# @see https://tools.ietf.org/html/rfc7644#section-3.7.3
|
|
@@ -4,11 +4,9 @@ from databricks.sdk.errors import platform
|
|
|
4
4
|
from databricks.sdk.errors.base import DatabricksError
|
|
5
5
|
|
|
6
6
|
from .overrides import _ALL_OVERRIDES
|
|
7
|
-
from .private_link import (_get_private_link_validation_error,
|
|
8
|
-
_is_private_link_redirect)
|
|
9
7
|
|
|
10
8
|
|
|
11
|
-
def
|
|
9
|
+
def _error_mapper(response: requests.Response, raw: dict) -> DatabricksError:
|
|
12
10
|
for override in _ALL_OVERRIDES:
|
|
13
11
|
if override.matches(response, raw):
|
|
14
12
|
return override.custom_error(**raw)
|
|
@@ -23,8 +21,6 @@ def error_mapper(response: requests.Response, raw: dict) -> DatabricksError:
|
|
|
23
21
|
# where there's a default exception class per HTTP status code, and we do
|
|
24
22
|
# rely on Databricks platform exception mapper to do the right thing.
|
|
25
23
|
return platform.STATUS_CODE_MAPPING[status_code](**raw)
|
|
26
|
-
if _is_private_link_redirect(response):
|
|
27
|
-
return _get_private_link_validation_error(response.url)
|
|
28
24
|
|
|
29
25
|
# backwards-compatible error creation for cases like using older versions of
|
|
30
26
|
# the SDK on way never releases of the platform.
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import json
|
|
3
|
+
import logging
|
|
4
|
+
import re
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
import requests
|
|
8
|
+
|
|
9
|
+
from ..logger import RoundTrip
|
|
10
|
+
from .base import DatabricksError
|
|
11
|
+
from .mapper import _error_mapper
|
|
12
|
+
from .private_link import (_get_private_link_validation_error,
|
|
13
|
+
_is_private_link_redirect)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class _ErrorParser(abc.ABC):
|
|
17
|
+
"""A parser for errors from the Databricks REST API."""
|
|
18
|
+
|
|
19
|
+
@abc.abstractmethod
|
|
20
|
+
def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
21
|
+
"""Parses an error from the Databricks REST API. If the error cannot be parsed, returns None."""
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class _EmptyParser(_ErrorParser):
|
|
25
|
+
"""A parser that handles empty responses."""
|
|
26
|
+
|
|
27
|
+
def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
28
|
+
if len(response_body) == 0:
|
|
29
|
+
return {'message': response.reason}
|
|
30
|
+
return None
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class _StandardErrorParser(_ErrorParser):
|
|
34
|
+
"""
|
|
35
|
+
Parses errors from the Databricks REST API using the standard error format.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
39
|
+
try:
|
|
40
|
+
payload_str = response_body.decode('utf-8')
|
|
41
|
+
resp: dict = json.loads(payload_str)
|
|
42
|
+
except json.JSONDecodeError as e:
|
|
43
|
+
logging.debug('_StandardErrorParser: unable to deserialize response as json', exc_info=e)
|
|
44
|
+
return None
|
|
45
|
+
|
|
46
|
+
error_args = {
|
|
47
|
+
'message': resp.get('message', 'request failed'),
|
|
48
|
+
'error_code': resp.get('error_code'),
|
|
49
|
+
'details': resp.get('details'),
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
# Handle API 1.2-style errors
|
|
53
|
+
if 'error' in resp:
|
|
54
|
+
error_args['message'] = resp['error']
|
|
55
|
+
|
|
56
|
+
# Handle SCIM Errors
|
|
57
|
+
detail = resp.get('detail')
|
|
58
|
+
status = resp.get('status')
|
|
59
|
+
scim_type = resp.get('scimType')
|
|
60
|
+
if detail:
|
|
61
|
+
# Handle SCIM error message details
|
|
62
|
+
# @see https://tools.ietf.org/html/rfc7644#section-3.7.3
|
|
63
|
+
if detail == "null":
|
|
64
|
+
detail = "SCIM API Internal Error"
|
|
65
|
+
error_args['message'] = f"{scim_type} {detail}".strip(" ")
|
|
66
|
+
error_args['error_code'] = f"SCIM_{status}"
|
|
67
|
+
return error_args
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class _StringErrorParser(_ErrorParser):
|
|
71
|
+
"""
|
|
72
|
+
Parses errors from the Databricks REST API in the format "ERROR_CODE: MESSAGE".
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
__STRING_ERROR_REGEX = re.compile(r'([A-Z_]+): (.*)')
|
|
76
|
+
|
|
77
|
+
def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
78
|
+
payload_str = response_body.decode('utf-8')
|
|
79
|
+
match = self.__STRING_ERROR_REGEX.match(payload_str)
|
|
80
|
+
if not match:
|
|
81
|
+
logging.debug('_StringErrorParser: unable to parse response as string')
|
|
82
|
+
return None
|
|
83
|
+
error_code, message = match.groups()
|
|
84
|
+
return {'error_code': error_code, 'message': message, 'status': response.status_code, }
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class _HtmlErrorParser(_ErrorParser):
|
|
88
|
+
"""
|
|
89
|
+
Parses errors from the Databricks REST API in HTML format.
|
|
90
|
+
"""
|
|
91
|
+
|
|
92
|
+
__HTML_ERROR_REGEXES = [re.compile(r'<pre>(.*)</pre>'), re.compile(r'<title>(.*)</title>'), ]
|
|
93
|
+
|
|
94
|
+
def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
|
|
95
|
+
payload_str = response_body.decode('utf-8')
|
|
96
|
+
for regex in self.__HTML_ERROR_REGEXES:
|
|
97
|
+
match = regex.search(payload_str)
|
|
98
|
+
if match:
|
|
99
|
+
message = match.group(1) if match.group(1) else response.reason
|
|
100
|
+
return {
|
|
101
|
+
'status': response.status_code,
|
|
102
|
+
'message': message,
|
|
103
|
+
'error_code': response.reason.upper().replace(' ', '_')
|
|
104
|
+
}
|
|
105
|
+
logging.debug('_HtmlErrorParser: no <pre> tag found in error response')
|
|
106
|
+
return None
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
# A list of ErrorParsers that are tried in order to parse an API error from a response body. Most errors should be
|
|
110
|
+
# parsable by the _StandardErrorParser, but additional parsers can be added here for specific error formats. The order
|
|
111
|
+
# of the parsers is not important, as the set of errors that can be parsed by each parser should be disjoint.
|
|
112
|
+
_error_parsers = [_EmptyParser(), _StandardErrorParser(), _StringErrorParser(), _HtmlErrorParser(), ]
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _unknown_error(response: requests.Response) -> str:
|
|
116
|
+
"""A standard error message that can be shown when an API response cannot be parsed.
|
|
117
|
+
|
|
118
|
+
This error message includes a link to the issue tracker for the SDK for users to report the issue to us.
|
|
119
|
+
"""
|
|
120
|
+
request_log = RoundTrip(response, debug_headers=True, debug_truncate_bytes=10 * 1024).generate()
|
|
121
|
+
return (
|
|
122
|
+
'This is likely a bug in the Databricks SDK for Python or the underlying '
|
|
123
|
+
'API. Please report this issue with the following debugging information to the SDK issue tracker at '
|
|
124
|
+
f'https://github.com/databricks/databricks-sdk-go/issues. Request log:```{request_log}```')
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def get_api_error(response: requests.Response) -> Optional[DatabricksError]:
|
|
128
|
+
"""
|
|
129
|
+
Handles responses from the REST API and returns a DatabricksError if the response indicates an error.
|
|
130
|
+
:param response: The response from the REST API.
|
|
131
|
+
:return: A DatabricksError if the response indicates an error, otherwise None.
|
|
132
|
+
"""
|
|
133
|
+
if not response.ok:
|
|
134
|
+
content = response.content
|
|
135
|
+
for parser in _error_parsers:
|
|
136
|
+
try:
|
|
137
|
+
error_args = parser.parse_error(response, content)
|
|
138
|
+
if error_args:
|
|
139
|
+
return _error_mapper(response, error_args)
|
|
140
|
+
except Exception as e:
|
|
141
|
+
logging.debug(f'Error parsing response with {parser}, continuing', exc_info=e)
|
|
142
|
+
return _error_mapper(response, {'message': 'unable to parse response. ' + _unknown_error(response)})
|
|
143
|
+
|
|
144
|
+
# Private link failures happen via a redirect to the login page. From a requests-perspective, the request
|
|
145
|
+
# is successful, but the response is not what we expect. We need to handle this case separately.
|
|
146
|
+
if _is_private_link_redirect(response):
|
|
147
|
+
return _get_private_link_validation_error(response.url)
|
|
@@ -51,7 +51,7 @@ def _is_private_link_redirect(resp: requests.Response) -> bool:
|
|
|
51
51
|
return parsed.path == '/login.html' and 'error=private-link-validation-error' in parsed.query
|
|
52
52
|
|
|
53
53
|
|
|
54
|
-
def _get_private_link_validation_error(url: str) ->
|
|
54
|
+
def _get_private_link_validation_error(url: str) -> PrivateLinkValidationError:
|
|
55
55
|
parsed = parse.urlparse(url)
|
|
56
56
|
env = get_environment_for_hostname(parsed.hostname)
|
|
57
57
|
return PrivateLinkValidationError(message=_private_link_info_map[env.cloud].error_message(),
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .round_trip_logger import RoundTrip
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import urllib.parse
|
|
3
|
+
from typing import Dict, List
|
|
4
|
+
|
|
5
|
+
import requests
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class RoundTrip:
|
|
9
|
+
"""
|
|
10
|
+
A utility class for converting HTTP requests and responses to strings.
|
|
11
|
+
|
|
12
|
+
:param response: The response object to stringify.
|
|
13
|
+
:param debug_headers: Whether to include headers in the generated string.
|
|
14
|
+
:param debug_truncate_bytes: The maximum number of bytes to include in the generated string.
|
|
15
|
+
:param raw: Whether the response is a stream or not. If True, the response will not be logged directly.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(self,
|
|
19
|
+
response: requests.Response,
|
|
20
|
+
debug_headers: bool,
|
|
21
|
+
debug_truncate_bytes: int,
|
|
22
|
+
raw=False):
|
|
23
|
+
self._debug_headers = debug_headers
|
|
24
|
+
self._debug_truncate_bytes = max(debug_truncate_bytes, 96)
|
|
25
|
+
self._raw = raw
|
|
26
|
+
self._response = response
|
|
27
|
+
|
|
28
|
+
def generate(self) -> str:
|
|
29
|
+
"""
|
|
30
|
+
Generate a string representation of the request and response. The string will include the request method, URL,
|
|
31
|
+
headers, and body, as well as the response status code, reason, headers, and body. Outgoing information
|
|
32
|
+
will be prefixed with `>`, and incoming information will be prefixed with `<`.
|
|
33
|
+
:return: A string representation of the request.
|
|
34
|
+
"""
|
|
35
|
+
request = self._response.request
|
|
36
|
+
url = urllib.parse.urlparse(request.url)
|
|
37
|
+
query = ''
|
|
38
|
+
if url.query:
|
|
39
|
+
query = f'?{urllib.parse.unquote(url.query)}'
|
|
40
|
+
sb = [f'{request.method} {urllib.parse.unquote(url.path)}{query}']
|
|
41
|
+
if self._debug_headers:
|
|
42
|
+
for k, v in request.headers.items():
|
|
43
|
+
sb.append(f'> * {k}: {self._only_n_bytes(v, self._debug_truncate_bytes)}')
|
|
44
|
+
if request.body:
|
|
45
|
+
sb.append("> [raw stream]" if self._raw else self._redacted_dump("> ", request.body))
|
|
46
|
+
sb.append(f'< {self._response.status_code} {self._response.reason}')
|
|
47
|
+
if self._raw and self._response.headers.get('Content-Type', None) != 'application/json':
|
|
48
|
+
# Raw streams with `Transfer-Encoding: chunked` do not have `Content-Type` header
|
|
49
|
+
sb.append("< [raw stream]")
|
|
50
|
+
elif self._response.content:
|
|
51
|
+
sb.append(self._redacted_dump("< ", self._response.content.decode('utf-8')))
|
|
52
|
+
return '\n'.join(sb)
|
|
53
|
+
|
|
54
|
+
@staticmethod
|
|
55
|
+
def _mask(m: Dict[str, any]):
|
|
56
|
+
for k in m:
|
|
57
|
+
if k in {'bytes_value', 'string_value', 'token_value', 'value', 'content'}:
|
|
58
|
+
m[k] = "**REDACTED**"
|
|
59
|
+
|
|
60
|
+
@staticmethod
|
|
61
|
+
def _map_keys(m: Dict[str, any]) -> List[str]:
|
|
62
|
+
keys = list(m.keys())
|
|
63
|
+
keys.sort()
|
|
64
|
+
return keys
|
|
65
|
+
|
|
66
|
+
@staticmethod
|
|
67
|
+
def _only_n_bytes(j: str, num_bytes: int = 96) -> str:
|
|
68
|
+
diff = len(j.encode('utf-8')) - num_bytes
|
|
69
|
+
if diff > 0:
|
|
70
|
+
return f"{j[:num_bytes]}... ({diff} more bytes)"
|
|
71
|
+
return j
|
|
72
|
+
|
|
73
|
+
def _recursive_marshal_dict(self, m, budget) -> dict:
|
|
74
|
+
out = {}
|
|
75
|
+
self._mask(m)
|
|
76
|
+
for k in sorted(m.keys()):
|
|
77
|
+
raw = self._recursive_marshal(m[k], budget)
|
|
78
|
+
out[k] = raw
|
|
79
|
+
budget -= len(str(raw))
|
|
80
|
+
return out
|
|
81
|
+
|
|
82
|
+
def _recursive_marshal_list(self, s, budget) -> list:
|
|
83
|
+
out = []
|
|
84
|
+
for i in range(len(s)):
|
|
85
|
+
if i > 0 >= budget:
|
|
86
|
+
out.append("... (%d additional elements)" % (len(s) - len(out)))
|
|
87
|
+
break
|
|
88
|
+
raw = self._recursive_marshal(s[i], budget)
|
|
89
|
+
out.append(raw)
|
|
90
|
+
budget -= len(str(raw))
|
|
91
|
+
return out
|
|
92
|
+
|
|
93
|
+
def _recursive_marshal(self, v: any, budget: int) -> any:
|
|
94
|
+
if isinstance(v, dict):
|
|
95
|
+
return self._recursive_marshal_dict(v, budget)
|
|
96
|
+
elif isinstance(v, list):
|
|
97
|
+
return self._recursive_marshal_list(v, budget)
|
|
98
|
+
elif isinstance(v, str):
|
|
99
|
+
return self._only_n_bytes(v, self._debug_truncate_bytes)
|
|
100
|
+
else:
|
|
101
|
+
return v
|
|
102
|
+
|
|
103
|
+
def _redacted_dump(self, prefix: str, body: str) -> str:
|
|
104
|
+
if len(body) == 0:
|
|
105
|
+
return ""
|
|
106
|
+
try:
|
|
107
|
+
# Unmarshal body into primitive types.
|
|
108
|
+
tmp = json.loads(body)
|
|
109
|
+
max_bytes = 96
|
|
110
|
+
if self._debug_truncate_bytes > max_bytes:
|
|
111
|
+
max_bytes = self._debug_truncate_bytes
|
|
112
|
+
# Re-marshal body taking redaction and character limit into account.
|
|
113
|
+
raw = self._recursive_marshal(tmp, max_bytes)
|
|
114
|
+
return "\n".join([f'{prefix}{line}' for line in json.dumps(raw, indent=2).split("\n")])
|
|
115
|
+
except json.JSONDecodeError:
|
|
116
|
+
to_log = self._only_n_bytes(body, self._debug_truncate_bytes)
|
|
117
|
+
log_lines = [prefix + x.strip('\r') for x in to_log.split("\n")]
|
|
118
|
+
return '\n'.join(log_lines)
|
|
@@ -1268,7 +1268,8 @@ class CreateMetastoreAssignment:
|
|
|
1268
1268
|
"""The unique ID of the metastore."""
|
|
1269
1269
|
|
|
1270
1270
|
default_catalog_name: str
|
|
1271
|
-
"""The name of the default catalog in the metastore."
|
|
1271
|
+
"""The name of the default catalog in the metastore. This field is depracted. Please use "Default
|
|
1272
|
+
Namespace API" to configure the default catalog for a Databricks workspace."""
|
|
1272
1273
|
|
|
1273
1274
|
workspace_id: Optional[int] = None
|
|
1274
1275
|
"""A workspace ID."""
|
|
@@ -4150,6 +4151,49 @@ class QuotaInfo:
|
|
|
4150
4151
|
quota_name=d.get('quota_name', None))
|
|
4151
4152
|
|
|
4152
4153
|
|
|
4154
|
+
@dataclass
|
|
4155
|
+
class RegenerateDashboardRequest:
|
|
4156
|
+
table_name: Optional[str] = None
|
|
4157
|
+
"""Full name of the table."""
|
|
4158
|
+
|
|
4159
|
+
warehouse_id: Optional[str] = None
|
|
4160
|
+
"""Optional argument to specify the warehouse for dashboard regeneration. If not specified, the
|
|
4161
|
+
first running warehouse will be used."""
|
|
4162
|
+
|
|
4163
|
+
def as_dict(self) -> dict:
|
|
4164
|
+
"""Serializes the RegenerateDashboardRequest into a dictionary suitable for use as a JSON request body."""
|
|
4165
|
+
body = {}
|
|
4166
|
+
if self.table_name is not None: body['table_name'] = self.table_name
|
|
4167
|
+
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
4168
|
+
return body
|
|
4169
|
+
|
|
4170
|
+
@classmethod
|
|
4171
|
+
def from_dict(cls, d: Dict[str, any]) -> RegenerateDashboardRequest:
|
|
4172
|
+
"""Deserializes the RegenerateDashboardRequest from a dictionary."""
|
|
4173
|
+
return cls(table_name=d.get('table_name', None), warehouse_id=d.get('warehouse_id', None))
|
|
4174
|
+
|
|
4175
|
+
|
|
4176
|
+
@dataclass
|
|
4177
|
+
class RegenerateDashboardResponse:
|
|
4178
|
+
dashboard_id: Optional[str] = None
|
|
4179
|
+
"""Id of the regenerated monitoring dashboard."""
|
|
4180
|
+
|
|
4181
|
+
parent_folder: Optional[str] = None
|
|
4182
|
+
"""The directory where the regenerated dashboard is stored."""
|
|
4183
|
+
|
|
4184
|
+
def as_dict(self) -> dict:
|
|
4185
|
+
"""Serializes the RegenerateDashboardResponse into a dictionary suitable for use as a JSON request body."""
|
|
4186
|
+
body = {}
|
|
4187
|
+
if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
|
|
4188
|
+
if self.parent_folder is not None: body['parent_folder'] = self.parent_folder
|
|
4189
|
+
return body
|
|
4190
|
+
|
|
4191
|
+
@classmethod
|
|
4192
|
+
def from_dict(cls, d: Dict[str, any]) -> RegenerateDashboardResponse:
|
|
4193
|
+
"""Deserializes the RegenerateDashboardResponse from a dictionary."""
|
|
4194
|
+
return cls(dashboard_id=d.get('dashboard_id', None), parent_folder=d.get('parent_folder', None))
|
|
4195
|
+
|
|
4196
|
+
|
|
4153
4197
|
@dataclass
|
|
4154
4198
|
class RegisteredModelAlias:
|
|
4155
4199
|
"""Registered model alias."""
|
|
@@ -5220,7 +5264,8 @@ class UpdateMetastore:
|
|
|
5220
5264
|
@dataclass
|
|
5221
5265
|
class UpdateMetastoreAssignment:
|
|
5222
5266
|
default_catalog_name: Optional[str] = None
|
|
5223
|
-
"""The name of the default catalog
|
|
5267
|
+
"""The name of the default catalog in the metastore. This field is depracted. Please use "Default
|
|
5268
|
+
Namespace API" to configure the default catalog for a Databricks workspace."""
|
|
5224
5269
|
|
|
5225
5270
|
metastore_id: Optional[str] = None
|
|
5226
5271
|
"""The unique ID of the metastore."""
|
|
@@ -7208,7 +7253,8 @@ class MetastoresAPI:
|
|
|
7208
7253
|
:param metastore_id: str
|
|
7209
7254
|
The unique ID of the metastore.
|
|
7210
7255
|
:param default_catalog_name: str
|
|
7211
|
-
The name of the default catalog in the metastore.
|
|
7256
|
+
The name of the default catalog in the metastore. This field is depracted. Please use "Default
|
|
7257
|
+
Namespace API" to configure the default catalog for a Databricks workspace.
|
|
7212
7258
|
|
|
7213
7259
|
|
|
7214
7260
|
"""
|
|
@@ -7421,7 +7467,8 @@ class MetastoresAPI:
|
|
|
7421
7467
|
:param workspace_id: int
|
|
7422
7468
|
A workspace ID.
|
|
7423
7469
|
:param default_catalog_name: str (optional)
|
|
7424
|
-
The name of the default catalog
|
|
7470
|
+
The name of the default catalog in the metastore. This field is depracted. Please use "Default
|
|
7471
|
+
Namespace API" to configure the default catalog for a Databricks workspace.
|
|
7425
7472
|
:param metastore_id: str (optional)
|
|
7426
7473
|
The unique ID of the metastore.
|
|
7427
7474
|
|
|
@@ -7916,6 +7963,40 @@ class QualityMonitorsAPI:
|
|
|
7916
7963
|
headers=headers)
|
|
7917
7964
|
return MonitorRefreshListResponse.from_dict(res)
|
|
7918
7965
|
|
|
7966
|
+
def regenerate_dashboard(self,
|
|
7967
|
+
table_name: str,
|
|
7968
|
+
*,
|
|
7969
|
+
warehouse_id: Optional[str] = None) -> RegenerateDashboardResponse:
|
|
7970
|
+
"""Regenerate a monitoring dashboard.
|
|
7971
|
+
|
|
7972
|
+
Regenerates the monitoring dashboard for the specified table.
|
|
7973
|
+
|
|
7974
|
+
The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
|
|
7975
|
+
table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
|
|
7976
|
+
- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
|
|
7977
|
+
owner of the table
|
|
7978
|
+
|
|
7979
|
+
The call must be made from the workspace where the monitor was created. The dashboard will be
|
|
7980
|
+
regenerated in the assets directory that was specified when the monitor was created.
|
|
7981
|
+
|
|
7982
|
+
:param table_name: str
|
|
7983
|
+
Full name of the table.
|
|
7984
|
+
:param warehouse_id: str (optional)
|
|
7985
|
+
Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first
|
|
7986
|
+
running warehouse will be used.
|
|
7987
|
+
|
|
7988
|
+
:returns: :class:`RegenerateDashboardResponse`
|
|
7989
|
+
"""
|
|
7990
|
+
body = {}
|
|
7991
|
+
if warehouse_id is not None: body['warehouse_id'] = warehouse_id
|
|
7992
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
7993
|
+
|
|
7994
|
+
res = self._api.do('POST',
|
|
7995
|
+
f'/api/2.1/quality-monitoring/tables/{table_name}/monitor/dashboard',
|
|
7996
|
+
body=body,
|
|
7997
|
+
headers=headers)
|
|
7998
|
+
return RegenerateDashboardResponse.from_dict(res)
|
|
7999
|
+
|
|
7919
8000
|
def run_refresh(self, table_name: str) -> MonitorRefreshInfo:
|
|
7920
8001
|
"""Queue a metric refresh for a monitor.
|
|
7921
8002
|
|