databricks-sdk 0.31.0__tar.gz → 0.32.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (89) hide show
  1. {databricks_sdk-0.31.0/databricks_sdk.egg-info → databricks_sdk-0.32.0}/PKG-INFO +1 -1
  2. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/core.py +17 -130
  3. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/credentials_provider.py +4 -1
  4. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/errors/__init__.py +2 -1
  5. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/errors/base.py +32 -2
  6. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/errors/mapper.py +1 -5
  7. databricks_sdk-0.32.0/databricks/sdk/errors/parser.py +146 -0
  8. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/errors/private_link.py +1 -1
  9. databricks_sdk-0.32.0/databricks/sdk/logger/__init__.py +1 -0
  10. databricks_sdk-0.32.0/databricks/sdk/logger/round_trip_logger.py +118 -0
  11. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/catalog.py +85 -4
  12. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/iam.py +8 -8
  13. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/jobs.py +246 -4
  14. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/ml.py +8 -2
  15. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/serving.py +20 -8
  16. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/vectorsearch.py +8 -1
  17. databricks_sdk-0.32.0/databricks/sdk/version.py +1 -0
  18. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0/databricks_sdk.egg-info}/PKG-INFO +1 -1
  19. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks_sdk.egg-info/SOURCES.txt +3 -0
  20. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_config.py +39 -0
  21. databricks_sdk-0.32.0/tests/test_errors.py +117 -0
  22. databricks_sdk-0.31.0/databricks/sdk/version.py +0 -1
  23. databricks_sdk-0.31.0/tests/test_errors.py +0 -97
  24. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/LICENSE +0 -0
  25. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/NOTICE +0 -0
  26. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/README.md +0 -0
  27. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/__init__.py +0 -0
  28. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/__init__.py +0 -0
  29. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/_property.py +0 -0
  30. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/_widgets/__init__.py +0 -0
  31. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
  32. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
  33. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/azure.py +0 -0
  34. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/casing.py +0 -0
  35. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/clock.py +0 -0
  36. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/config.py +0 -0
  37. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/data_plane.py +0 -0
  38. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/dbutils.py +0 -0
  39. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/environments.py +0 -0
  40. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/errors/overrides.py +0 -0
  41. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/errors/platform.py +0 -0
  42. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/errors/sdk.py +0 -0
  43. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/mixins/__init__.py +0 -0
  44. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/mixins/compute.py +0 -0
  45. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/mixins/files.py +0 -0
  46. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/mixins/workspace.py +0 -0
  47. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/oauth.py +0 -0
  48. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/py.typed +0 -0
  49. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/retries.py +0 -0
  50. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/runtime/__init__.py +0 -0
  51. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
  52. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/__init__.py +0 -0
  53. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/_internal.py +0 -0
  54. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/apps.py +0 -0
  55. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/billing.py +0 -0
  56. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/compute.py +0 -0
  57. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/dashboards.py +0 -0
  58. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/files.py +0 -0
  59. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/marketplace.py +0 -0
  60. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/oauth2.py +0 -0
  61. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/pipelines.py +0 -0
  62. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/provisioning.py +0 -0
  63. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/settings.py +0 -0
  64. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/sharing.py +0 -0
  65. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/sql.py +0 -0
  66. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/service/workspace.py +0 -0
  67. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks/sdk/useragent.py +0 -0
  68. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
  69. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks_sdk.egg-info/requires.txt +0 -0
  70. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/databricks_sdk.egg-info/top_level.txt +0 -0
  71. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/setup.cfg +0 -0
  72. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/setup.py +0 -0
  73. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_auth.py +0 -0
  74. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_auth_manual_tests.py +0 -0
  75. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_client.py +0 -0
  76. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_compute_mixins.py +0 -0
  77. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_core.py +0 -0
  78. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_data_plane.py +0 -0
  79. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_dbfs_mixins.py +0 -0
  80. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_dbutils.py +0 -0
  81. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_environments.py +0 -0
  82. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_init_file.py +0 -0
  83. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_internal.py +0 -0
  84. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_jobs.py +0 -0
  85. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_metadata_service_auth.py +0 -0
  86. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_misc.py +0 -0
  87. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_oauth.py +0 -0
  88. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_retries.py +0 -0
  89. {databricks_sdk-0.31.0 → databricks_sdk-0.32.0}/tests/test_user_agent.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: databricks-sdk
3
- Version: 0.31.0
3
+ Version: 0.32.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Home-page: https://databricks-sdk-py.readthedocs.io
6
6
  Author: Serge Smertin
@@ -1,7 +1,5 @@
1
1
  import re
2
- import urllib.parse
3
2
  from datetime import timedelta
4
- from json import JSONDecodeError
5
3
  from types import TracebackType
6
4
  from typing import Any, BinaryIO, Iterator, Type
7
5
  from urllib.parse import urlencode
@@ -12,8 +10,8 @@ from .casing import Casing
12
10
  from .config import *
13
11
  # To preserve backwards compatibility (as these definitions were previously in this module)
14
12
  from .credentials_provider import *
15
- from .errors import DatabricksError, error_mapper
16
- from .errors.private_link import _is_private_link_redirect
13
+ from .errors import DatabricksError, get_api_error
14
+ from .logger import RoundTrip
17
15
  from .oauth import retrieve_token
18
16
  from .retries import retried
19
17
 
@@ -262,134 +260,23 @@ class ApiClient:
262
260
  auth=auth,
263
261
  stream=raw,
264
262
  timeout=self._http_timeout_seconds)
265
- try:
266
- self._record_request_log(response, raw=raw or data is not None or files is not None)
267
- if not response.ok: # internally calls response.raise_for_status()
268
- # TODO: experiment with traceback pruning for better readability
269
- # See https://stackoverflow.com/a/58821552/277035
270
- payload = response.json()
271
- raise self._make_nicer_error(response=response, **payload) from None
272
- # Private link failures happen via a redirect to the login page. From a requests-perspective, the request
273
- # is successful, but the response is not what we expect. We need to handle this case separately.
274
- if _is_private_link_redirect(response):
275
- raise self._make_nicer_error(response=response) from None
276
- return response
277
- except requests.exceptions.JSONDecodeError:
278
- message = self._make_sense_from_html(response.text)
279
- if not message:
280
- message = response.reason
281
- raise self._make_nicer_error(response=response, message=message) from None
282
-
283
- @staticmethod
284
- def _make_sense_from_html(txt: str) -> str:
285
- matchers = [r'<pre>(.*)</pre>', r'<title>(.*)</title>']
286
- for attempt in matchers:
287
- expr = re.compile(attempt, re.MULTILINE)
288
- match = expr.search(txt)
289
- if not match:
290
- continue
291
- return match.group(1).strip()
292
- return txt
293
-
294
- def _make_nicer_error(self, *, response: requests.Response, **kwargs) -> DatabricksError:
295
- status_code = response.status_code
296
- message = kwargs.get('message', 'request failed')
297
- is_http_unauthorized_or_forbidden = status_code in (401, 403)
298
- is_too_many_requests_or_unavailable = status_code in (429, 503)
299
- if is_http_unauthorized_or_forbidden:
300
- message = self._cfg.wrap_debug_info(message)
301
- if is_too_many_requests_or_unavailable:
302
- kwargs['retry_after_secs'] = self._parse_retry_after(response)
303
- kwargs['message'] = message
304
- return error_mapper(response, kwargs)
305
-
306
- def _record_request_log(self, response: requests.Response, raw=False):
263
+ self._record_request_log(response, raw=raw or data is not None or files is not None)
264
+ error = get_api_error(response)
265
+ if error is not None:
266
+ status_code = response.status_code
267
+ is_http_unauthorized_or_forbidden = status_code in (401, 403)
268
+ is_too_many_requests_or_unavailable = status_code in (429, 503)
269
+ if is_http_unauthorized_or_forbidden:
270
+ error.message = self._cfg.wrap_debug_info(error.message)
271
+ if is_too_many_requests_or_unavailable:
272
+ error.retry_after_secs = self._parse_retry_after(response)
273
+ raise error from None
274
+ return response
275
+
276
+ def _record_request_log(self, response: requests.Response, raw: bool = False) -> None:
307
277
  if not logger.isEnabledFor(logging.DEBUG):
308
278
  return
309
- request = response.request
310
- url = urllib.parse.urlparse(request.url)
311
- query = ''
312
- if url.query:
313
- query = f'?{urllib.parse.unquote(url.query)}'
314
- sb = [f'{request.method} {urllib.parse.unquote(url.path)}{query}']
315
- if self._cfg.debug_headers:
316
- if self._cfg.host:
317
- sb.append(f'> * Host: {self._cfg.host}')
318
- for k, v in request.headers.items():
319
- sb.append(f'> * {k}: {self._only_n_bytes(v, self._debug_truncate_bytes)}')
320
- if request.body:
321
- sb.append("> [raw stream]" if raw else self._redacted_dump("> ", request.body))
322
- sb.append(f'< {response.status_code} {response.reason}')
323
- if raw and response.headers.get('Content-Type', None) != 'application/json':
324
- # Raw streams with `Transfer-Encoding: chunked` do not have `Content-Type` header
325
- sb.append("< [raw stream]")
326
- elif response.content:
327
- sb.append(self._redacted_dump("< ", response.content))
328
- logger.debug("\n".join(sb))
329
-
330
- @staticmethod
331
- def _mask(m: Dict[str, any]):
332
- for k in m:
333
- if k in {'bytes_value', 'string_value', 'token_value', 'value', 'content'}:
334
- m[k] = "**REDACTED**"
335
-
336
- @staticmethod
337
- def _map_keys(m: Dict[str, any]) -> List[str]:
338
- keys = list(m.keys())
339
- keys.sort()
340
- return keys
341
-
342
- @staticmethod
343
- def _only_n_bytes(j: str, num_bytes: int = 96) -> str:
344
- diff = len(j.encode('utf-8')) - num_bytes
345
- if diff > 0:
346
- return f"{j[:num_bytes]}... ({diff} more bytes)"
347
- return j
348
-
349
- def _recursive_marshal_dict(self, m, budget) -> dict:
350
- out = {}
351
- self._mask(m)
352
- for k in sorted(m.keys()):
353
- raw = self._recursive_marshal(m[k], budget)
354
- out[k] = raw
355
- budget -= len(str(raw))
356
- return out
357
-
358
- def _recursive_marshal_list(self, s, budget) -> list:
359
- out = []
360
- for i in range(len(s)):
361
- if i > 0 >= budget:
362
- out.append("... (%d additional elements)" % (len(s) - len(out)))
363
- break
364
- raw = self._recursive_marshal(s[i], budget)
365
- out.append(raw)
366
- budget -= len(str(raw))
367
- return out
368
-
369
- def _recursive_marshal(self, v: any, budget: int) -> any:
370
- if isinstance(v, dict):
371
- return self._recursive_marshal_dict(v, budget)
372
- elif isinstance(v, list):
373
- return self._recursive_marshal_list(v, budget)
374
- elif isinstance(v, str):
375
- return self._only_n_bytes(v, self._debug_truncate_bytes)
376
- else:
377
- return v
378
-
379
- def _redacted_dump(self, prefix: str, body: str) -> str:
380
- if len(body) == 0:
381
- return ""
382
- try:
383
- # Unmarshal body into primitive types.
384
- tmp = json.loads(body)
385
- max_bytes = 96
386
- if self._debug_truncate_bytes > max_bytes:
387
- max_bytes = self._debug_truncate_bytes
388
- # Re-marshal body taking redaction and character limit into account.
389
- raw = self._recursive_marshal(tmp, max_bytes)
390
- return "\n".join([f'{prefix}{line}' for line in json.dumps(raw, indent=2).split("\n")])
391
- except JSONDecodeError:
392
- return f'{prefix}[non-JSON document of {len(body)} bytes]'
279
+ logger.debug(RoundTrip(response, self._cfg.debug_headers, self._debug_truncate_bytes, raw).generate())
393
280
 
394
281
 
395
282
  class StreamingResponse(BinaryIO):
@@ -607,7 +607,10 @@ def databricks_cli(cfg: 'Config') -> Optional[CredentialsProvider]:
607
607
  token = token_source.token()
608
608
  return {'Authorization': f'{token.token_type} {token.access_token}'}
609
609
 
610
- return OAuthCredentialsProvider(inner, token_source.token)
610
+ def token() -> Token:
611
+ return token_source.token()
612
+
613
+ return OAuthCredentialsProvider(inner, token)
611
614
 
612
615
 
613
616
  class MetadataServiceTokenSource(Refreshable):
@@ -1,5 +1,6 @@
1
1
  from .base import DatabricksError, ErrorDetail
2
- from .mapper import error_mapper
2
+ from .mapper import _error_mapper
3
+ from .parser import get_api_error
3
4
  from .platform import *
4
5
  from .private_link import PrivateLinkValidationError
5
6
  from .sdk import *
@@ -1,4 +1,5 @@
1
1
  import re
2
+ import warnings
2
3
  from dataclasses import dataclass
3
4
  from typing import Dict, List, Optional
4
5
 
@@ -41,9 +42,38 @@ class DatabricksError(IOError):
41
42
  retry_after_secs: int = None,
42
43
  details: List[Dict[str, any]] = None,
43
44
  **kwargs):
45
+ """
46
+
47
+ :param message:
48
+ :param error_code:
49
+ :param detail: [Deprecated]
50
+ :param status: [Deprecated]
51
+ :param scimType: [Deprecated]
52
+ :param error: [Deprecated]
53
+ :param retry_after_secs:
54
+ :param details:
55
+ :param kwargs:
56
+ """
57
+ # SCIM-specific parameters are deprecated
58
+ if detail:
59
+ warnings.warn(
60
+ "The 'detail' parameter of DatabricksError is deprecated and will be removed in a future version."
61
+ )
62
+ if scimType:
63
+ warnings.warn(
64
+ "The 'scimType' parameter of DatabricksError is deprecated and will be removed in a future version."
65
+ )
66
+ if status:
67
+ warnings.warn(
68
+ "The 'status' parameter of DatabricksError is deprecated and will be removed in a future version."
69
+ )
70
+
71
+ # API 1.2-specific parameters are deprecated
44
72
  if error:
45
- # API 1.2 has different response format, let's adapt
46
- message = error
73
+ warnings.warn(
74
+ "The 'error' parameter of DatabricksError is deprecated and will be removed in a future version."
75
+ )
76
+
47
77
  if detail:
48
78
  # Handle SCIM error message details
49
79
  # @see https://tools.ietf.org/html/rfc7644#section-3.7.3
@@ -4,11 +4,9 @@ from databricks.sdk.errors import platform
4
4
  from databricks.sdk.errors.base import DatabricksError
5
5
 
6
6
  from .overrides import _ALL_OVERRIDES
7
- from .private_link import (_get_private_link_validation_error,
8
- _is_private_link_redirect)
9
7
 
10
8
 
11
- def error_mapper(response: requests.Response, raw: dict) -> DatabricksError:
9
+ def _error_mapper(response: requests.Response, raw: dict) -> DatabricksError:
12
10
  for override in _ALL_OVERRIDES:
13
11
  if override.matches(response, raw):
14
12
  return override.custom_error(**raw)
@@ -23,8 +21,6 @@ def error_mapper(response: requests.Response, raw: dict) -> DatabricksError:
23
21
  # where there's a default exception class per HTTP status code, and we do
24
22
  # rely on Databricks platform exception mapper to do the right thing.
25
23
  return platform.STATUS_CODE_MAPPING[status_code](**raw)
26
- if _is_private_link_redirect(response):
27
- return _get_private_link_validation_error(response.url)
28
24
 
29
25
  # backwards-compatible error creation for cases like using older versions of
30
26
  # the SDK on way never releases of the platform.
@@ -0,0 +1,146 @@
1
+ import abc
2
+ import json
3
+ import logging
4
+ import re
5
+ from typing import Optional
6
+
7
+ import requests
8
+
9
+ from ..logger import RoundTrip
10
+ from .base import DatabricksError
11
+ from .mapper import _error_mapper
12
+ from .private_link import (_get_private_link_validation_error,
13
+ _is_private_link_redirect)
14
+
15
+
16
+ class _ErrorParser(abc.ABC):
17
+ """A parser for errors from the Databricks REST API."""
18
+
19
+ @abc.abstractmethod
20
+ def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
21
+ """Parses an error from the Databricks REST API. If the error cannot be parsed, returns None."""
22
+
23
+
24
+ class _EmptyParser(_ErrorParser):
25
+ """A parser that handles empty responses."""
26
+
27
+ def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
28
+ if len(response_body) == 0:
29
+ return {'message': response.reason}
30
+ return None
31
+
32
+
33
+ class _StandardErrorParser(_ErrorParser):
34
+ """
35
+ Parses errors from the Databricks REST API using the standard error format.
36
+ """
37
+
38
+ def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
39
+ try:
40
+ payload_str = response_body.decode('utf-8')
41
+ resp: dict = json.loads(payload_str)
42
+ except json.JSONDecodeError as e:
43
+ logging.debug('_StandardErrorParser: unable to deserialize response as json', exc_info=e)
44
+ return None
45
+
46
+ error_args = {
47
+ 'message': resp.get('message', 'request failed'),
48
+ 'error_code': resp.get('error_code'),
49
+ 'details': resp.get('details'),
50
+ }
51
+
52
+ # Handle API 1.2-style errors
53
+ if 'error' in resp:
54
+ error_args['message'] = resp['error']
55
+
56
+ # Handle SCIM Errors
57
+ detail = resp.get('detail')
58
+ status = resp.get('status')
59
+ scim_type = resp.get('scimType')
60
+ if detail:
61
+ # Handle SCIM error message details
62
+ # @see https://tools.ietf.org/html/rfc7644#section-3.7.3
63
+ error_args[
64
+ 'message'] = f"{scim_type} {error_args.get('message', 'SCIM API Internal Error')}".strip(" ")
65
+ error_args['error_code'] = f"SCIM_{status}"
66
+ return error_args
67
+
68
+
69
+ class _StringErrorParser(_ErrorParser):
70
+ """
71
+ Parses errors from the Databricks REST API in the format "ERROR_CODE: MESSAGE".
72
+ """
73
+
74
+ __STRING_ERROR_REGEX = re.compile(r'([A-Z_]+): (.*)')
75
+
76
+ def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
77
+ payload_str = response_body.decode('utf-8')
78
+ match = self.__STRING_ERROR_REGEX.match(payload_str)
79
+ if not match:
80
+ logging.debug('_StringErrorParser: unable to parse response as string')
81
+ return None
82
+ error_code, message = match.groups()
83
+ return {'error_code': error_code, 'message': message, 'status': response.status_code, }
84
+
85
+
86
+ class _HtmlErrorParser(_ErrorParser):
87
+ """
88
+ Parses errors from the Databricks REST API in HTML format.
89
+ """
90
+
91
+ __HTML_ERROR_REGEXES = [re.compile(r'<pre>(.*)</pre>'), re.compile(r'<title>(.*)</title>'), ]
92
+
93
+ def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
94
+ payload_str = response_body.decode('utf-8')
95
+ for regex in self.__HTML_ERROR_REGEXES:
96
+ match = regex.search(payload_str)
97
+ if match:
98
+ message = match.group(1) if match.group(1) else response.reason
99
+ return {
100
+ 'status': response.status_code,
101
+ 'message': message,
102
+ 'error_code': response.reason.upper().replace(' ', '_')
103
+ }
104
+ logging.debug('_HtmlErrorParser: no <pre> tag found in error response')
105
+ return None
106
+
107
+
108
+ # A list of ErrorParsers that are tried in order to parse an API error from a response body. Most errors should be
109
+ # parsable by the _StandardErrorParser, but additional parsers can be added here for specific error formats. The order
110
+ # of the parsers is not important, as the set of errors that can be parsed by each parser should be disjoint.
111
+ _error_parsers = [_EmptyParser(), _StandardErrorParser(), _StringErrorParser(), _HtmlErrorParser(), ]
112
+
113
+
114
+ def _unknown_error(response: requests.Response) -> str:
115
+ """A standard error message that can be shown when an API response cannot be parsed.
116
+
117
+ This error message includes a link to the issue tracker for the SDK for users to report the issue to us.
118
+ """
119
+ request_log = RoundTrip(response, debug_headers=True, debug_truncate_bytes=10 * 1024).generate()
120
+ return (
121
+ 'This is likely a bug in the Databricks SDK for Python or the underlying '
122
+ 'API. Please report this issue with the following debugging information to the SDK issue tracker at '
123
+ f'https://github.com/databricks/databricks-sdk-go/issues. Request log:```{request_log}```')
124
+
125
+
126
+ def get_api_error(response: requests.Response) -> Optional[DatabricksError]:
127
+ """
128
+ Handles responses from the REST API and returns a DatabricksError if the response indicates an error.
129
+ :param response: The response from the REST API.
130
+ :return: A DatabricksError if the response indicates an error, otherwise None.
131
+ """
132
+ if not response.ok:
133
+ content = response.content
134
+ for parser in _error_parsers:
135
+ try:
136
+ error_args = parser.parse_error(response, content)
137
+ if error_args:
138
+ return _error_mapper(response, error_args)
139
+ except Exception as e:
140
+ logging.debug(f'Error parsing response with {parser}, continuing', exc_info=e)
141
+ return _error_mapper(response, {'message': 'unable to parse response. ' + _unknown_error(response)})
142
+
143
+ # Private link failures happen via a redirect to the login page. From a requests-perspective, the request
144
+ # is successful, but the response is not what we expect. We need to handle this case separately.
145
+ if _is_private_link_redirect(response):
146
+ return _get_private_link_validation_error(response.url)
@@ -51,7 +51,7 @@ def _is_private_link_redirect(resp: requests.Response) -> bool:
51
51
  return parsed.path == '/login.html' and 'error=private-link-validation-error' in parsed.query
52
52
 
53
53
 
54
- def _get_private_link_validation_error(url: str) -> _PrivateLinkInfo:
54
+ def _get_private_link_validation_error(url: str) -> PrivateLinkValidationError:
55
55
  parsed = parse.urlparse(url)
56
56
  env = get_environment_for_hostname(parsed.hostname)
57
57
  return PrivateLinkValidationError(message=_private_link_info_map[env.cloud].error_message(),
@@ -0,0 +1 @@
1
+ from .round_trip_logger import RoundTrip
@@ -0,0 +1,118 @@
1
+ import json
2
+ import urllib.parse
3
+ from typing import Dict, List
4
+
5
+ import requests
6
+
7
+
8
+ class RoundTrip:
9
+ """
10
+ A utility class for converting HTTP requests and responses to strings.
11
+
12
+ :param response: The response object to stringify.
13
+ :param debug_headers: Whether to include headers in the generated string.
14
+ :param debug_truncate_bytes: The maximum number of bytes to include in the generated string.
15
+ :param raw: Whether the response is a stream or not. If True, the response will not be logged directly.
16
+ """
17
+
18
+ def __init__(self,
19
+ response: requests.Response,
20
+ debug_headers: bool,
21
+ debug_truncate_bytes: int,
22
+ raw=False):
23
+ self._debug_headers = debug_headers
24
+ self._debug_truncate_bytes = max(debug_truncate_bytes, 96)
25
+ self._raw = raw
26
+ self._response = response
27
+
28
+ def generate(self) -> str:
29
+ """
30
+ Generate a string representation of the request and response. The string will include the request method, URL,
31
+ headers, and body, as well as the response status code, reason, headers, and body. Outgoing information
32
+ will be prefixed with `>`, and incoming information will be prefixed with `<`.
33
+ :return: A string representation of the request.
34
+ """
35
+ request = self._response.request
36
+ url = urllib.parse.urlparse(request.url)
37
+ query = ''
38
+ if url.query:
39
+ query = f'?{urllib.parse.unquote(url.query)}'
40
+ sb = [f'{request.method} {urllib.parse.unquote(url.path)}{query}']
41
+ if self._debug_headers:
42
+ for k, v in request.headers.items():
43
+ sb.append(f'> * {k}: {self._only_n_bytes(v, self._debug_truncate_bytes)}')
44
+ if request.body:
45
+ sb.append("> [raw stream]" if self._raw else self._redacted_dump("> ", request.body))
46
+ sb.append(f'< {self._response.status_code} {self._response.reason}')
47
+ if self._raw and self._response.headers.get('Content-Type', None) != 'application/json':
48
+ # Raw streams with `Transfer-Encoding: chunked` do not have `Content-Type` header
49
+ sb.append("< [raw stream]")
50
+ elif self._response.content:
51
+ sb.append(self._redacted_dump("< ", self._response.content.decode('utf-8')))
52
+ return '\n'.join(sb)
53
+
54
+ @staticmethod
55
+ def _mask(m: Dict[str, any]):
56
+ for k in m:
57
+ if k in {'bytes_value', 'string_value', 'token_value', 'value', 'content'}:
58
+ m[k] = "**REDACTED**"
59
+
60
+ @staticmethod
61
+ def _map_keys(m: Dict[str, any]) -> List[str]:
62
+ keys = list(m.keys())
63
+ keys.sort()
64
+ return keys
65
+
66
+ @staticmethod
67
+ def _only_n_bytes(j: str, num_bytes: int = 96) -> str:
68
+ diff = len(j.encode('utf-8')) - num_bytes
69
+ if diff > 0:
70
+ return f"{j[:num_bytes]}... ({diff} more bytes)"
71
+ return j
72
+
73
+ def _recursive_marshal_dict(self, m, budget) -> dict:
74
+ out = {}
75
+ self._mask(m)
76
+ for k in sorted(m.keys()):
77
+ raw = self._recursive_marshal(m[k], budget)
78
+ out[k] = raw
79
+ budget -= len(str(raw))
80
+ return out
81
+
82
+ def _recursive_marshal_list(self, s, budget) -> list:
83
+ out = []
84
+ for i in range(len(s)):
85
+ if i > 0 >= budget:
86
+ out.append("... (%d additional elements)" % (len(s) - len(out)))
87
+ break
88
+ raw = self._recursive_marshal(s[i], budget)
89
+ out.append(raw)
90
+ budget -= len(str(raw))
91
+ return out
92
+
93
+ def _recursive_marshal(self, v: any, budget: int) -> any:
94
+ if isinstance(v, dict):
95
+ return self._recursive_marshal_dict(v, budget)
96
+ elif isinstance(v, list):
97
+ return self._recursive_marshal_list(v, budget)
98
+ elif isinstance(v, str):
99
+ return self._only_n_bytes(v, self._debug_truncate_bytes)
100
+ else:
101
+ return v
102
+
103
+ def _redacted_dump(self, prefix: str, body: str) -> str:
104
+ if len(body) == 0:
105
+ return ""
106
+ try:
107
+ # Unmarshal body into primitive types.
108
+ tmp = json.loads(body)
109
+ max_bytes = 96
110
+ if self._debug_truncate_bytes > max_bytes:
111
+ max_bytes = self._debug_truncate_bytes
112
+ # Re-marshal body taking redaction and character limit into account.
113
+ raw = self._recursive_marshal(tmp, max_bytes)
114
+ return "\n".join([f'{prefix}{line}' for line in json.dumps(raw, indent=2).split("\n")])
115
+ except json.JSONDecodeError:
116
+ to_log = self._only_n_bytes(body, self._debug_truncate_bytes)
117
+ log_lines = [prefix + x.strip('\r') for x in to_log.split("\n")]
118
+ return '\n'.join(log_lines)