databricks-sdk 0.44.1__py3-none-any.whl → 0.45.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (63) hide show
  1. databricks/sdk/__init__.py +123 -115
  2. databricks/sdk/_base_client.py +112 -88
  3. databricks/sdk/_property.py +12 -7
  4. databricks/sdk/_widgets/__init__.py +13 -2
  5. databricks/sdk/_widgets/default_widgets_utils.py +21 -15
  6. databricks/sdk/_widgets/ipywidgets_utils.py +47 -24
  7. databricks/sdk/azure.py +8 -6
  8. databricks/sdk/casing.py +5 -5
  9. databricks/sdk/config.py +152 -99
  10. databricks/sdk/core.py +57 -47
  11. databricks/sdk/credentials_provider.py +300 -205
  12. databricks/sdk/data_plane.py +86 -3
  13. databricks/sdk/dbutils.py +123 -87
  14. databricks/sdk/environments.py +52 -35
  15. databricks/sdk/errors/base.py +61 -35
  16. databricks/sdk/errors/customizer.py +3 -3
  17. databricks/sdk/errors/deserializer.py +38 -25
  18. databricks/sdk/errors/details.py +417 -0
  19. databricks/sdk/errors/mapper.py +1 -1
  20. databricks/sdk/errors/overrides.py +27 -24
  21. databricks/sdk/errors/parser.py +26 -14
  22. databricks/sdk/errors/platform.py +10 -10
  23. databricks/sdk/errors/private_link.py +24 -24
  24. databricks/sdk/logger/round_trip_logger.py +28 -20
  25. databricks/sdk/mixins/compute.py +90 -60
  26. databricks/sdk/mixins/files.py +815 -145
  27. databricks/sdk/mixins/jobs.py +191 -16
  28. databricks/sdk/mixins/open_ai_client.py +26 -20
  29. databricks/sdk/mixins/workspace.py +45 -34
  30. databricks/sdk/oauth.py +372 -196
  31. databricks/sdk/retries.py +14 -12
  32. databricks/sdk/runtime/__init__.py +34 -17
  33. databricks/sdk/runtime/dbutils_stub.py +52 -39
  34. databricks/sdk/service/_internal.py +12 -7
  35. databricks/sdk/service/apps.py +618 -418
  36. databricks/sdk/service/billing.py +827 -604
  37. databricks/sdk/service/catalog.py +6552 -4474
  38. databricks/sdk/service/cleanrooms.py +550 -388
  39. databricks/sdk/service/compute.py +5241 -3531
  40. databricks/sdk/service/dashboards.py +1313 -923
  41. databricks/sdk/service/files.py +442 -309
  42. databricks/sdk/service/iam.py +2115 -1483
  43. databricks/sdk/service/jobs.py +4151 -2588
  44. databricks/sdk/service/marketplace.py +2210 -1517
  45. databricks/sdk/service/ml.py +3364 -2255
  46. databricks/sdk/service/oauth2.py +922 -584
  47. databricks/sdk/service/pipelines.py +1865 -1203
  48. databricks/sdk/service/provisioning.py +1435 -1029
  49. databricks/sdk/service/serving.py +2040 -1278
  50. databricks/sdk/service/settings.py +2846 -1929
  51. databricks/sdk/service/sharing.py +2201 -877
  52. databricks/sdk/service/sql.py +4650 -3103
  53. databricks/sdk/service/vectorsearch.py +816 -550
  54. databricks/sdk/service/workspace.py +1330 -906
  55. databricks/sdk/useragent.py +36 -22
  56. databricks/sdk/version.py +1 -1
  57. {databricks_sdk-0.44.1.dist-info → databricks_sdk-0.45.0.dist-info}/METADATA +31 -31
  58. databricks_sdk-0.45.0.dist-info/RECORD +70 -0
  59. {databricks_sdk-0.44.1.dist-info → databricks_sdk-0.45.0.dist-info}/WHEEL +1 -1
  60. databricks_sdk-0.44.1.dist-info/RECORD +0 -69
  61. {databricks_sdk-0.44.1.dist-info → databricks_sdk-0.45.0.dist-info}/LICENSE +0 -0
  62. {databricks_sdk-0.44.1.dist-info → databricks_sdk-0.45.0.dist-info}/NOTICE +0 -0
  63. {databricks_sdk-0.44.1.dist-info → databricks_sdk-0.45.0.dist-info}/top_level.txt +0 -0
databricks/sdk/retries.py CHANGED
@@ -9,16 +9,18 @@ from .clock import Clock, RealClock
9
9
  logger = logging.getLogger(__name__)
10
10
 
11
11
 
12
- def retried(*,
13
- on: Sequence[Type[BaseException]] = None,
14
- is_retryable: Callable[[BaseException], Optional[str]] = None,
15
- timeout=timedelta(minutes=20),
16
- clock: Clock = None,
17
- before_retry: Callable = None):
12
+ def retried(
13
+ *,
14
+ on: Optional[Sequence[Type[BaseException]]] = None,
15
+ is_retryable: Optional[Callable[[BaseException], Optional[str]]] = None,
16
+ timeout=timedelta(minutes=20),
17
+ clock: Optional[Clock] = None,
18
+ before_retry: Optional[Callable] = None,
19
+ ):
18
20
  has_allowlist = on is not None
19
21
  has_callback = is_retryable is not None
20
22
  if not (has_allowlist or has_callback) or (has_allowlist and has_callback):
21
- raise SyntaxError('either on=[Exception] or callback=lambda x: .. is required')
23
+ raise SyntaxError("either on=[Exception] or callback=lambda x: .. is required")
22
24
  if clock is None:
23
25
  clock = RealClock()
24
26
 
@@ -37,30 +39,30 @@ def retried(*,
37
39
  retry_reason = None
38
40
  # sleep 10s max per attempt, unless it's HTTP 429 or 503
39
41
  sleep = min(10, attempt)
40
- retry_after_secs = getattr(err, 'retry_after_secs', None)
42
+ retry_after_secs = getattr(err, "retry_after_secs", None)
41
43
  if retry_after_secs is not None:
42
44
  # cannot depend on DatabricksError directly because of circular dependency
43
45
  sleep = retry_after_secs
44
- retry_reason = 'throttled by platform'
46
+ retry_reason = "throttled by platform"
45
47
  elif is_retryable is not None:
46
48
  retry_reason = is_retryable(err)
47
49
  elif on is not None:
48
50
  for err_type in on:
49
51
  if not isinstance(err, err_type):
50
52
  continue
51
- retry_reason = f'{type(err).__name__} is allowed to retry'
53
+ retry_reason = f"{type(err).__name__} is allowed to retry"
52
54
 
53
55
  if retry_reason is None:
54
56
  # raise if exception is not retryable
55
57
  raise err
56
58
 
57
- logger.debug(f'Retrying: {retry_reason} (sleeping ~{sleep}s)')
59
+ logger.debug(f"Retrying: {retry_reason} (sleeping ~{sleep}s)")
58
60
  if before_retry:
59
61
  before_retry()
60
62
 
61
63
  clock.sleep(sleep + random())
62
64
  attempt += 1
63
- raise TimeoutError(f'Timed out after {timeout}') from last_err
65
+ raise TimeoutError(f"Timed out after {timeout}") from last_err
64
66
 
65
67
  return wrapper
66
68
 
@@ -3,14 +3,23 @@ from __future__ import annotations
3
3
  import logging
4
4
  from typing import Dict, Optional, Union, cast
5
5
 
6
- logger = logging.getLogger('databricks.sdk')
6
+ logger = logging.getLogger("databricks.sdk")
7
7
  is_local_implementation = True
8
8
 
9
9
  # All objects that are injected into the Notebook's user namespace should also be made
10
10
  # available to be imported from databricks.sdk.runtime.globals. This import can be used
11
11
  # in Python modules so users can access these objects from Files more easily.
12
12
  dbruntime_objects = [
13
- "display", "displayHTML", "dbutils", "table", "sql", "udf", "getArgument", "sc", "sqlContext", "spark"
13
+ "display",
14
+ "displayHTML",
15
+ "dbutils",
16
+ "table",
17
+ "sql",
18
+ "udf",
19
+ "getArgument",
20
+ "sc",
21
+ "sqlContext",
22
+ "spark",
14
23
  ]
15
24
 
16
25
  # DO NOT MOVE THE TRY-CATCH BLOCK BELOW AND DO NOT ADD THINGS BEFORE IT! WILL MAKE TEST FAIL.
@@ -18,7 +27,8 @@ try:
18
27
  # We don't want to expose additional entity to user namespace, so
19
28
  # a workaround here for exposing required information in notebook environment
20
29
  from dbruntime.sdk_credential_provider import init_runtime_native_auth
21
- logger.debug('runtime SDK credential provider available')
30
+
31
+ logger.debug("runtime SDK credential provider available")
22
32
  dbruntime_objects.append("init_runtime_native_auth")
23
33
  except ImportError:
24
34
  init_runtime_native_auth = None
@@ -29,18 +39,19 @@ globals()["init_runtime_native_auth"] = init_runtime_native_auth
29
39
  def init_runtime_repl_auth():
30
40
  try:
31
41
  from dbruntime.databricks_repl_context import get_context
42
+
32
43
  ctx = get_context()
33
44
  if ctx is None:
34
- logger.debug('Empty REPL context returned, skipping runtime auth')
45
+ logger.debug("Empty REPL context returned, skipping runtime auth")
35
46
  return None, None
36
47
  if ctx.workspaceUrl is None:
37
- logger.debug('Workspace URL is not available, skipping runtime auth')
48
+ logger.debug("Workspace URL is not available, skipping runtime auth")
38
49
  return None, None
39
- host = f'https://{ctx.workspaceUrl}'
50
+ host = f"https://{ctx.workspaceUrl}"
40
51
 
41
52
  def inner() -> Dict[str, str]:
42
53
  ctx = get_context()
43
- return {'Authorization': f'Bearer {ctx.apiToken}'}
54
+ return {"Authorization": f"Bearer {ctx.apiToken}"}
44
55
 
45
56
  return host, inner
46
57
  except ImportError:
@@ -50,11 +61,12 @@ def init_runtime_repl_auth():
50
61
  def init_runtime_legacy_auth():
51
62
  try:
52
63
  import IPython
64
+
53
65
  ip_shell = IPython.get_ipython()
54
66
  if ip_shell is None:
55
67
  return None, None
56
68
  global_ns = ip_shell.ns_table["user_global"]
57
- if 'dbutils' not in global_ns:
69
+ if "dbutils" not in global_ns:
58
70
  return None, None
59
71
  dbutils = global_ns["dbutils"].notebook.entry_point.getDbutils()
60
72
  if dbutils is None:
@@ -62,11 +74,11 @@ def init_runtime_legacy_auth():
62
74
  ctx = dbutils.notebook().getContext()
63
75
  if ctx is None:
64
76
  return None, None
65
- host = getattr(ctx, 'apiUrl')().get()
77
+ host = getattr(ctx, "apiUrl")().get()
66
78
 
67
79
  def inner() -> Dict[str, str]:
68
80
  ctx = dbutils.notebook().getContext()
69
- return {'Authorization': f'Bearer {getattr(ctx, "apiToken")().get()}'}
81
+ return {"Authorization": f'Bearer {getattr(ctx, "apiToken")().get()}'}
70
82
 
71
83
  return host, inner
72
84
  except ImportError:
@@ -97,7 +109,8 @@ except ImportError:
97
109
  try:
98
110
  # We expect this to fail and only do this for providing types
99
111
  from pyspark.sql.context import SQLContext
100
- sqlContext: SQLContext = None # type: ignore
112
+
113
+ sqlContext: SQLContext = None # type: ignore
101
114
  table = sqlContext.table
102
115
  except Exception as e:
103
116
  logging.debug(f"Failed to initialize globals 'sqlContext' and 'table', continuing. Cause: {e}")
@@ -109,8 +122,9 @@ except ImportError:
109
122
 
110
123
  try:
111
124
  from databricks.connect import DatabricksSession # type: ignore
125
+
112
126
  spark = DatabricksSession.builder.getOrCreate()
113
- sql = spark.sql # type: ignore
127
+ sql = spark.sql # type: ignore
114
128
  except Exception as e:
115
129
  # We are ignoring all failures here because user might want to initialize
116
130
  # spark session themselves and we don't want to interfere with that
@@ -118,11 +132,11 @@ except ImportError:
118
132
 
119
133
  try:
120
134
  # We expect this to fail locally since dbconnect does not support sparkcontext. This is just for typing
121
- sc = spark.sparkContext
135
+ sc = spark.sparkContext # type: ignore
122
136
  except Exception as e:
123
137
  logging.debug(f"Failed to initialize global 'sc', continuing. Cause: {e}")
124
138
 
125
- def display(input=None, *args, **kwargs) -> None: # type: ignore
139
+ def display(input=None, *args, **kwargs) -> None: # type: ignore
126
140
  """
127
141
  Display plots or data.
128
142
  Display plot:
@@ -144,9 +158,10 @@ except ImportError:
144
158
  """
145
159
  # Import inside the function so that imports are only triggered on usage.
146
160
  from IPython import display as IPDisplay
147
- return IPDisplay.display(input, *args, **kwargs) # type: ignore
148
161
 
149
- def displayHTML(html) -> None: # type: ignore
162
+ return IPDisplay.display(input, *args, **kwargs) # type: ignore
163
+
164
+ def displayHTML(html) -> None: # type: ignore
150
165
  """
151
166
  Display HTML data.
152
167
  Parameters
@@ -160,13 +175,15 @@ except ImportError:
160
175
  """
161
176
  # Import inside the function so that imports are only triggered on usage.
162
177
  from IPython import display as IPDisplay
163
- return IPDisplay.display_html(html, raw=True) # type: ignore
178
+
179
+ return IPDisplay.display_html(html, raw=True) # type: ignore
164
180
 
165
181
  # We want to propagate the error in initialising dbutils because this is a core
166
182
  # functionality of the sdk
167
183
  from databricks.sdk.dbutils import RemoteDbUtils
168
184
 
169
185
  from . import dbutils_stub
186
+
170
187
  dbutils_type = Union[dbutils_stub.dbutils, RemoteDbUtils]
171
188
 
172
189
  dbutils = RemoteDbUtils()
@@ -2,21 +2,21 @@ import typing
2
2
  from collections import namedtuple
3
3
 
4
4
 
5
- class FileInfo(namedtuple('FileInfo', ['path', 'name', 'size', "modificationTime"])):
5
+ class FileInfo(namedtuple("FileInfo", ["path", "name", "size", "modificationTime"])):
6
6
  pass
7
7
 
8
8
 
9
- class MountInfo(namedtuple('MountInfo', ['mountPoint', 'source', 'encryptionType'])):
9
+ class MountInfo(namedtuple("MountInfo", ["mountPoint", "source", "encryptionType"])):
10
10
  pass
11
11
 
12
12
 
13
- class SecretScope(namedtuple('SecretScope', ['name'])):
13
+ class SecretScope(namedtuple("SecretScope", ["name"])):
14
14
 
15
15
  def getName(self):
16
16
  return self.name
17
17
 
18
18
 
19
- class SecretMetadata(namedtuple('SecretMetadata', ['key'])):
19
+ class SecretMetadata(namedtuple("SecretMetadata", ["key"])):
20
20
  pass
21
21
 
22
22
 
@@ -49,8 +49,7 @@ class dbutils:
49
49
  ...
50
50
 
51
51
  @staticmethod
52
- def getCurrentCredentials() -> typing.Mapping[str, str]:
53
- ...
52
+ def getCurrentCredentials() -> typing.Mapping[str, str]: ...
54
53
 
55
54
  class data:
56
55
  """
@@ -129,40 +128,38 @@ class dbutils:
129
128
  ...
130
129
 
131
130
  @staticmethod
132
- def cacheFiles(*files):
133
- ...
131
+ def cacheFiles(*files): ...
134
132
 
135
133
  @staticmethod
136
- def cacheTable(name: str):
137
- ...
134
+ def cacheTable(name: str): ...
138
135
 
139
136
  @staticmethod
140
- def uncacheFiles(*files):
141
- ...
137
+ def uncacheFiles(*files): ...
142
138
 
143
139
  @staticmethod
144
- def uncacheTable(name: str):
145
- ...
140
+ def uncacheTable(name: str): ...
146
141
 
147
142
  @staticmethod
148
- def mount(source: str,
149
- mount_point: str,
150
- encryption_type: str = "",
151
- owner: typing.Optional[str] = None,
152
- extra_configs: typing.Mapping[str, str] = {},
153
- ) -> bool:
143
+ def mount(
144
+ source: str,
145
+ mount_point: str,
146
+ encryption_type: str = "",
147
+ owner: typing.Optional[str] = None,
148
+ extra_configs: typing.Mapping[str, str] = {},
149
+ ) -> bool:
154
150
  """
155
151
  Mounts the given source directory into DBFS at the given mount point
156
152
  """
157
153
  ...
158
154
 
159
155
  @staticmethod
160
- def updateMount(source: str,
161
- mount_point: str,
162
- encryption_type: str = "",
163
- owner: typing.Optional[str] = None,
164
- extra_configs: typing.Mapping[str, str] = {},
165
- ) -> bool:
156
+ def updateMount(
157
+ source: str,
158
+ mount_point: str,
159
+ encryption_type: str = "",
160
+ owner: typing.Optional[str] = None,
161
+ extra_configs: typing.Mapping[str, str] = {},
162
+ ) -> bool:
166
163
  """
167
164
  Similar to mount(), but updates an existing mount point (if present) instead of creating a new one
168
165
  """
@@ -200,7 +197,12 @@ class dbutils:
200
197
  """
201
198
 
202
199
  @staticmethod
203
- def get(taskKey: str, key: str, default: any = None, debugValue: any = None) -> None:
200
+ def get(
201
+ taskKey: str,
202
+ key: str,
203
+ default: any = None,
204
+ debugValue: any = None,
205
+ ) -> None:
204
206
  """
205
207
  Returns the latest task value that belongs to the current job run
206
208
  """
@@ -238,7 +240,11 @@ class dbutils:
238
240
  ...
239
241
 
240
242
  @staticmethod
241
- def run(path: str, timeout_seconds: int, arguments: typing.Mapping[str, str]) -> str:
243
+ def run(
244
+ path: str,
245
+ timeout_seconds: int,
246
+ arguments: typing.Mapping[str, str],
247
+ ) -> str:
242
248
  """
243
249
  This method runs a notebook and returns its exit value
244
250
  """
@@ -307,7 +313,12 @@ class dbutils:
307
313
  ...
308
314
 
309
315
  @staticmethod
310
- def dropdown(name: str, defaultValue: str, choices: typing.List[str], label: str = None):
316
+ def dropdown(
317
+ name: str,
318
+ defaultValue: str,
319
+ choices: typing.List[str],
320
+ label: str = None,
321
+ ):
311
322
  """Creates a dropdown input widget with given specification.
312
323
  :param name: Name of argument associated with the new input widget
313
324
  :param defaultValue: Default value of the input widget (must be one of choices)
@@ -317,11 +328,12 @@ class dbutils:
317
328
  ...
318
329
 
319
330
  @staticmethod
320
- def combobox(name: str,
321
- defaultValue: str,
322
- choices: typing.List[str],
323
- label: typing.Optional[str] = None,
324
- ):
331
+ def combobox(
332
+ name: str,
333
+ defaultValue: str,
334
+ choices: typing.List[str],
335
+ label: typing.Optional[str] = None,
336
+ ):
325
337
  """Creates a combobox input widget with given specification.
326
338
  :param name: Name of argument associated with the new input widget
327
339
  :param defaultValue: Default value of the input widget
@@ -331,11 +343,12 @@ class dbutils:
331
343
  ...
332
344
 
333
345
  @staticmethod
334
- def multiselect(name: str,
335
- defaultValue: str,
336
- choices: typing.List[str],
337
- label: typing.Optional[str] = None,
338
- ):
346
+ def multiselect(
347
+ name: str,
348
+ defaultValue: str,
349
+ choices: typing.List[str],
350
+ label: typing.Optional[str] = None,
351
+ ):
339
352
  """Creates a multiselect input widget with given specification.
340
353
  :param name: Name of argument associated with the new input widget
341
354
  :param defaultValue: Default value of the input widget (must be one of choices)
@@ -6,18 +6,21 @@ from typing import Callable, Dict, Generic, Optional, Type, TypeVar
6
6
  def _from_dict(d: Dict[str, any], field: str, cls: Type) -> any:
7
7
  if field not in d or d[field] is None:
8
8
  return None
9
- return getattr(cls, 'from_dict')(d[field])
9
+ return getattr(cls, "from_dict")(d[field])
10
10
 
11
11
 
12
12
  def _repeated_dict(d: Dict[str, any], field: str, cls: Type) -> any:
13
13
  if field not in d or not d[field]:
14
14
  return []
15
- from_dict = getattr(cls, 'from_dict')
15
+ from_dict = getattr(cls, "from_dict")
16
16
  return [from_dict(v) for v in d[field]]
17
17
 
18
18
 
19
19
  def _get_enum_value(cls: Type, value: str) -> Optional[Type]:
20
- return next((v for v in getattr(cls, '__members__').values() if v.value == value), None)
20
+ return next(
21
+ (v for v in getattr(cls, "__members__").values() if v.value == value),
22
+ None,
23
+ )
21
24
 
22
25
 
23
26
  def _enum(d: Dict[str, any], field: str, cls: Type) -> any:
@@ -43,7 +46,7 @@ def _escape_multi_segment_path_parameter(param: str) -> str:
43
46
  return urllib.parse.quote(param)
44
47
 
45
48
 
46
- ReturnType = TypeVar('ReturnType')
49
+ ReturnType = TypeVar("ReturnType")
47
50
 
48
51
 
49
52
  class Wait(Generic[ReturnType]):
@@ -60,8 +63,10 @@ class Wait(Generic[ReturnType]):
60
63
  def bind(self) -> dict:
61
64
  return self._bind
62
65
 
63
- def result(self,
64
- timeout: datetime.timedelta = datetime.timedelta(minutes=20),
65
- callback: Callable[[ReturnType], None] = None) -> ReturnType:
66
+ def result(
67
+ self,
68
+ timeout: datetime.timedelta = datetime.timedelta(minutes=20),
69
+ callback: Callable[[ReturnType], None] = None,
70
+ ) -> ReturnType:
66
71
  kwargs = self._bind.copy()
67
72
  return self._waiter(callback=callback, timeout=timeout, **kwargs)