databricks-sdk 0.44.1__py3-none-any.whl → 0.46.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +135 -116
- databricks/sdk/_base_client.py +112 -88
- databricks/sdk/_property.py +12 -7
- databricks/sdk/_widgets/__init__.py +13 -2
- databricks/sdk/_widgets/default_widgets_utils.py +21 -15
- databricks/sdk/_widgets/ipywidgets_utils.py +47 -24
- databricks/sdk/azure.py +8 -6
- databricks/sdk/casing.py +5 -5
- databricks/sdk/config.py +156 -99
- databricks/sdk/core.py +57 -47
- databricks/sdk/credentials_provider.py +306 -206
- databricks/sdk/data_plane.py +75 -50
- databricks/sdk/dbutils.py +123 -87
- databricks/sdk/environments.py +52 -35
- databricks/sdk/errors/base.py +61 -35
- databricks/sdk/errors/customizer.py +3 -3
- databricks/sdk/errors/deserializer.py +38 -25
- databricks/sdk/errors/details.py +417 -0
- databricks/sdk/errors/mapper.py +1 -1
- databricks/sdk/errors/overrides.py +27 -24
- databricks/sdk/errors/parser.py +26 -14
- databricks/sdk/errors/platform.py +10 -10
- databricks/sdk/errors/private_link.py +24 -24
- databricks/sdk/logger/round_trip_logger.py +28 -20
- databricks/sdk/mixins/compute.py +90 -60
- databricks/sdk/mixins/files.py +815 -145
- databricks/sdk/mixins/jobs.py +191 -16
- databricks/sdk/mixins/open_ai_client.py +26 -20
- databricks/sdk/mixins/workspace.py +45 -34
- databricks/sdk/oauth.py +379 -198
- databricks/sdk/retries.py +14 -12
- databricks/sdk/runtime/__init__.py +34 -17
- databricks/sdk/runtime/dbutils_stub.py +52 -39
- databricks/sdk/service/_internal.py +12 -7
- databricks/sdk/service/apps.py +618 -418
- databricks/sdk/service/billing.py +827 -604
- databricks/sdk/service/catalog.py +6552 -4474
- databricks/sdk/service/cleanrooms.py +550 -388
- databricks/sdk/service/compute.py +5263 -3536
- databricks/sdk/service/dashboards.py +1331 -924
- databricks/sdk/service/files.py +446 -309
- databricks/sdk/service/iam.py +2115 -1483
- databricks/sdk/service/jobs.py +4151 -2588
- databricks/sdk/service/marketplace.py +2210 -1517
- databricks/sdk/service/ml.py +3839 -2256
- databricks/sdk/service/oauth2.py +910 -584
- databricks/sdk/service/pipelines.py +1865 -1203
- databricks/sdk/service/provisioning.py +1435 -1029
- databricks/sdk/service/serving.py +2060 -1290
- databricks/sdk/service/settings.py +2846 -1929
- databricks/sdk/service/sharing.py +2201 -877
- databricks/sdk/service/sql.py +4650 -3103
- databricks/sdk/service/vectorsearch.py +816 -550
- databricks/sdk/service/workspace.py +1330 -906
- databricks/sdk/useragent.py +36 -22
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.44.1.dist-info → databricks_sdk-0.46.0.dist-info}/METADATA +31 -31
- databricks_sdk-0.46.0.dist-info/RECORD +70 -0
- {databricks_sdk-0.44.1.dist-info → databricks_sdk-0.46.0.dist-info}/WHEEL +1 -1
- databricks_sdk-0.44.1.dist-info/RECORD +0 -69
- {databricks_sdk-0.44.1.dist-info → databricks_sdk-0.46.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.44.1.dist-info → databricks_sdk-0.46.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.44.1.dist-info → databricks_sdk-0.46.0.dist-info}/top_level.txt +0 -0
databricks/sdk/_base_client.py
CHANGED
|
@@ -17,7 +17,7 @@ from .errors import DatabricksError, _ErrorCustomizer, _Parser
|
|
|
17
17
|
from .logger import RoundTrip
|
|
18
18
|
from .retries import retried
|
|
19
19
|
|
|
20
|
-
logger = logging.getLogger(
|
|
20
|
+
logger = logging.getLogger("databricks.sdk")
|
|
21
21
|
|
|
22
22
|
|
|
23
23
|
def _fix_host_if_needed(host: Optional[str]) -> Optional[str]:
|
|
@@ -25,35 +25,37 @@ def _fix_host_if_needed(host: Optional[str]) -> Optional[str]:
|
|
|
25
25
|
return host
|
|
26
26
|
|
|
27
27
|
# Add a default scheme if it's missing
|
|
28
|
-
if
|
|
29
|
-
host =
|
|
28
|
+
if "://" not in host:
|
|
29
|
+
host = "https://" + host
|
|
30
30
|
|
|
31
31
|
o = urllib.parse.urlparse(host)
|
|
32
32
|
# remove trailing slash
|
|
33
|
-
path = o.path.rstrip(
|
|
33
|
+
path = o.path.rstrip("/")
|
|
34
34
|
# remove port if 443
|
|
35
35
|
netloc = o.netloc
|
|
36
36
|
if o.port == 443:
|
|
37
|
-
netloc = netloc.split(
|
|
37
|
+
netloc = netloc.split(":")[0]
|
|
38
38
|
|
|
39
39
|
return urllib.parse.urlunparse((o.scheme, netloc, path, o.params, o.query, o.fragment))
|
|
40
40
|
|
|
41
41
|
|
|
42
42
|
class _BaseClient:
|
|
43
43
|
|
|
44
|
-
def __init__(
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
44
|
+
def __init__(
|
|
45
|
+
self,
|
|
46
|
+
debug_truncate_bytes: Optional[int] = None,
|
|
47
|
+
retry_timeout_seconds: Optional[int] = None,
|
|
48
|
+
user_agent_base: Optional[str] = None,
|
|
49
|
+
header_factory: Optional[Callable[[], dict]] = None,
|
|
50
|
+
max_connection_pools: Optional[int] = None,
|
|
51
|
+
max_connections_per_pool: Optional[int] = None,
|
|
52
|
+
pool_block: Optional[bool] = True,
|
|
53
|
+
http_timeout_seconds: Optional[float] = None,
|
|
54
|
+
extra_error_customizers: Optional[List[_ErrorCustomizer]] = None,
|
|
55
|
+
debug_headers: Optional[bool] = False,
|
|
56
|
+
clock: Optional[Clock] = None,
|
|
57
|
+
streaming_buffer_size: int = 1024 * 1024,
|
|
58
|
+
): # 1MB
|
|
57
59
|
"""
|
|
58
60
|
:param debug_truncate_bytes:
|
|
59
61
|
:param retry_timeout_seconds:
|
|
@@ -87,9 +89,11 @@ class _BaseClient:
|
|
|
87
89
|
# We don't use `max_retries` from HTTPAdapter to align with a more production-ready
|
|
88
90
|
# retry strategy established in the Databricks SDK for Go. See _is_retryable and
|
|
89
91
|
# @retried for more details.
|
|
90
|
-
http_adapter = requests.adapters.HTTPAdapter(
|
|
91
|
-
|
|
92
|
-
|
|
92
|
+
http_adapter = requests.adapters.HTTPAdapter(
|
|
93
|
+
pool_connections=max_connections_per_pool or 20,
|
|
94
|
+
pool_maxsize=max_connection_pools or 20,
|
|
95
|
+
pool_block=pool_block,
|
|
96
|
+
)
|
|
93
97
|
self._session.mount("https://", http_adapter)
|
|
94
98
|
|
|
95
99
|
# Default to 60 seconds
|
|
@@ -110,7 +114,7 @@ class _BaseClient:
|
|
|
110
114
|
# See: https://github.com/databricks/databricks-sdk-py/issues/142
|
|
111
115
|
if query is None:
|
|
112
116
|
return None
|
|
113
|
-
with_fixed_bools = {k: v if type(v) != bool else (
|
|
117
|
+
with_fixed_bools = {k: v if type(v) != bool else ("true" if v else "false") for k, v in query.items()}
|
|
114
118
|
|
|
115
119
|
# Query parameters may be nested, e.g.
|
|
116
120
|
# {'filter_by': {'user_ids': [123, 456]}}
|
|
@@ -140,30 +144,34 @@ class _BaseClient:
|
|
|
140
144
|
return False
|
|
141
145
|
return data.seekable()
|
|
142
146
|
|
|
143
|
-
def do(
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
147
|
+
def do(
|
|
148
|
+
self,
|
|
149
|
+
method: str,
|
|
150
|
+
url: str,
|
|
151
|
+
query: Optional[dict] = None,
|
|
152
|
+
headers: Optional[dict] = None,
|
|
153
|
+
body: Optional[dict] = None,
|
|
154
|
+
raw: bool = False,
|
|
155
|
+
files=None,
|
|
156
|
+
data=None,
|
|
157
|
+
auth: Optional[Callable[[requests.PreparedRequest], requests.PreparedRequest]] = None,
|
|
158
|
+
response_headers: Optional[List[str]] = None,
|
|
159
|
+
) -> Union[dict, list, BinaryIO]:
|
|
154
160
|
if headers is None:
|
|
155
161
|
headers = {}
|
|
156
|
-
headers[
|
|
162
|
+
headers["User-Agent"] = self._user_agent_base
|
|
157
163
|
|
|
158
164
|
# Wrap strings and bytes in a seekable stream so that we can rewind them.
|
|
159
165
|
if isinstance(data, (str, bytes)):
|
|
160
|
-
data = io.BytesIO(data.encode(
|
|
166
|
+
data = io.BytesIO(data.encode("utf-8") if isinstance(data, str) else data)
|
|
161
167
|
|
|
162
168
|
if not data:
|
|
163
169
|
# The request is not a stream.
|
|
164
|
-
call = retried(
|
|
165
|
-
|
|
166
|
-
|
|
170
|
+
call = retried(
|
|
171
|
+
timeout=timedelta(seconds=self._retry_timeout_seconds),
|
|
172
|
+
is_retryable=self._is_retryable,
|
|
173
|
+
clock=self._clock,
|
|
174
|
+
)(self._perform)
|
|
167
175
|
elif self._is_seekable_stream(data):
|
|
168
176
|
# Keep track of the initial position of the stream so that we can rewind to it
|
|
169
177
|
# if we need to retry the request.
|
|
@@ -173,25 +181,29 @@ class _BaseClient:
|
|
|
173
181
|
logger.debug(f"Rewinding input data to offset {initial_data_position} before retry")
|
|
174
182
|
data.seek(initial_data_position)
|
|
175
183
|
|
|
176
|
-
call = retried(
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
184
|
+
call = retried(
|
|
185
|
+
timeout=timedelta(seconds=self._retry_timeout_seconds),
|
|
186
|
+
is_retryable=self._is_retryable,
|
|
187
|
+
clock=self._clock,
|
|
188
|
+
before_retry=rewind,
|
|
189
|
+
)(self._perform)
|
|
180
190
|
else:
|
|
181
191
|
# Do not retry if the stream is not seekable. This is necessary to avoid bugs
|
|
182
192
|
# where the retry doesn't re-read already read data from the stream.
|
|
183
193
|
logger.debug(f"Retry disabled for non-seekable stream: type={type(data)}")
|
|
184
194
|
call = self._perform
|
|
185
195
|
|
|
186
|
-
response = call(
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
196
|
+
response = call(
|
|
197
|
+
method,
|
|
198
|
+
url,
|
|
199
|
+
query=query,
|
|
200
|
+
headers=headers,
|
|
201
|
+
body=body,
|
|
202
|
+
raw=raw,
|
|
203
|
+
files=files,
|
|
204
|
+
data=data,
|
|
205
|
+
auth=auth,
|
|
206
|
+
)
|
|
195
207
|
|
|
196
208
|
resp = dict()
|
|
197
209
|
for header in response_headers if response_headers else []:
|
|
@@ -220,6 +232,7 @@ class _BaseClient:
|
|
|
220
232
|
# and Databricks SDK for Go retries
|
|
221
233
|
# (see https://github.com/databricks/databricks-sdk-go/blob/main/apierr/errors.go)
|
|
222
234
|
from urllib3.exceptions import ProxyError
|
|
235
|
+
|
|
223
236
|
if isinstance(err, ProxyError):
|
|
224
237
|
err = err.original_error
|
|
225
238
|
if isinstance(err, requests.ConnectionError):
|
|
@@ -230,48 +243,55 @@ class _BaseClient:
|
|
|
230
243
|
#
|
|
231
244
|
# return a simple string for debug log readability, as `raise TimeoutError(...) from err`
|
|
232
245
|
# will bubble up the original exception in case we reach max retries.
|
|
233
|
-
return f
|
|
246
|
+
return f"cannot connect"
|
|
234
247
|
if isinstance(err, requests.Timeout):
|
|
235
248
|
# corresponds to `TLS handshake timeout` and `i/o timeout` in Go.
|
|
236
249
|
#
|
|
237
250
|
# return a simple string for debug log readability, as `raise TimeoutError(...) from err`
|
|
238
251
|
# will bubble up the original exception in case we reach max retries.
|
|
239
|
-
return f
|
|
252
|
+
return f"timeout"
|
|
240
253
|
if isinstance(err, DatabricksError):
|
|
241
254
|
message = str(err)
|
|
242
255
|
transient_error_string_matches = [
|
|
243
256
|
"com.databricks.backend.manager.util.UnknownWorkerEnvironmentException",
|
|
244
|
-
"does not have any associated worker environments",
|
|
245
|
-
"
|
|
257
|
+
"does not have any associated worker environments",
|
|
258
|
+
"There is no worker environment with id",
|
|
259
|
+
"Unknown worker environment",
|
|
260
|
+
"ClusterNotReadyException",
|
|
261
|
+
"Unexpected error",
|
|
246
262
|
"Please try again later or try a faster operation.",
|
|
247
263
|
"RPC token bucket limit has been exceeded",
|
|
248
264
|
]
|
|
249
265
|
for substring in transient_error_string_matches:
|
|
250
266
|
if substring not in message:
|
|
251
267
|
continue
|
|
252
|
-
return f
|
|
268
|
+
return f"matched {substring}"
|
|
253
269
|
return None
|
|
254
270
|
|
|
255
|
-
def _perform(
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
271
|
+
def _perform(
|
|
272
|
+
self,
|
|
273
|
+
method: str,
|
|
274
|
+
url: str,
|
|
275
|
+
query: Optional[dict] = None,
|
|
276
|
+
headers: Optional[dict] = None,
|
|
277
|
+
body: Optional[dict] = None,
|
|
278
|
+
raw: bool = False,
|
|
279
|
+
files=None,
|
|
280
|
+
data=None,
|
|
281
|
+
auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None,
|
|
282
|
+
):
|
|
283
|
+
response = self._session.request(
|
|
284
|
+
method,
|
|
285
|
+
url,
|
|
286
|
+
params=self._fix_query_string(query),
|
|
287
|
+
json=body,
|
|
288
|
+
headers=headers,
|
|
289
|
+
files=files,
|
|
290
|
+
data=data,
|
|
291
|
+
auth=auth,
|
|
292
|
+
stream=raw,
|
|
293
|
+
timeout=self._http_timeout_seconds,
|
|
294
|
+
)
|
|
275
295
|
self._record_request_log(response, raw=raw or data is not None or files is not None)
|
|
276
296
|
error = self._error_parser.get_api_error(response)
|
|
277
297
|
if error is not None:
|
|
@@ -305,14 +325,14 @@ class _StreamingResponse(BinaryIO):
|
|
|
305
325
|
_closed: bool = False
|
|
306
326
|
|
|
307
327
|
def fileno(self) -> int:
|
|
308
|
-
|
|
328
|
+
return 0
|
|
309
329
|
|
|
310
|
-
def flush(self) -> int:
|
|
311
|
-
|
|
330
|
+
def flush(self) -> int: # type: ignore
|
|
331
|
+
return 0
|
|
312
332
|
|
|
313
333
|
def __init__(self, response: _RawResponse, chunk_size: Union[int, None] = None):
|
|
314
334
|
self._response = response
|
|
315
|
-
self._buffer = b
|
|
335
|
+
self._buffer = b""
|
|
316
336
|
self._content = None
|
|
317
337
|
self._chunk_size = chunk_size
|
|
318
338
|
|
|
@@ -338,14 +358,14 @@ class _StreamingResponse(BinaryIO):
|
|
|
338
358
|
|
|
339
359
|
def read(self, n: int = -1) -> bytes:
|
|
340
360
|
"""
|
|
341
|
-
Read up to n bytes from the response stream. If n is negative, read
|
|
342
|
-
until the end of the stream.
|
|
361
|
+
Read up to n bytes from the response stream. If n is negative, read
|
|
362
|
+
until the end of the stream.
|
|
343
363
|
"""
|
|
344
364
|
|
|
345
365
|
self._open()
|
|
346
366
|
read_everything = n < 0
|
|
347
367
|
remaining_bytes = n
|
|
348
|
-
res = b
|
|
368
|
+
res = b""
|
|
349
369
|
while remaining_bytes > 0 or read_everything:
|
|
350
370
|
if len(self._buffer) == 0:
|
|
351
371
|
try:
|
|
@@ -383,10 +403,10 @@ class _StreamingResponse(BinaryIO):
|
|
|
383
403
|
def writable(self) -> bool:
|
|
384
404
|
return False
|
|
385
405
|
|
|
386
|
-
def write(self, s: Union[bytes, bytearray]) -> int:
|
|
406
|
+
def write(self, s: Union[bytes, bytearray]) -> int: # type: ignore
|
|
387
407
|
raise NotImplementedError()
|
|
388
408
|
|
|
389
|
-
def writelines(self, lines: Iterable[bytes]) -> None:
|
|
409
|
+
def writelines(self, lines: Iterable[bytes]) -> None: # type: ignore
|
|
390
410
|
raise NotImplementedError()
|
|
391
411
|
|
|
392
412
|
def __next__(self) -> bytes:
|
|
@@ -395,8 +415,12 @@ class _StreamingResponse(BinaryIO):
|
|
|
395
415
|
def __iter__(self) -> Iterator[bytes]:
|
|
396
416
|
return self._content
|
|
397
417
|
|
|
398
|
-
def __exit__(
|
|
399
|
-
|
|
418
|
+
def __exit__(
|
|
419
|
+
self,
|
|
420
|
+
t: Union[Type[BaseException], None],
|
|
421
|
+
value: Union[BaseException, None],
|
|
422
|
+
traceback: Union[TracebackType, None],
|
|
423
|
+
) -> None:
|
|
400
424
|
self._content = None
|
|
401
|
-
self._buffer = b
|
|
425
|
+
self._buffer = b""
|
|
402
426
|
self.close()
|
databricks/sdk/_property.py
CHANGED
|
@@ -16,8 +16,9 @@ class _cached_property:
|
|
|
16
16
|
if self.attrname is None:
|
|
17
17
|
self.attrname = name
|
|
18
18
|
elif name != self.attrname:
|
|
19
|
-
raise TypeError(
|
|
20
|
-
|
|
19
|
+
raise TypeError(
|
|
20
|
+
"Cannot assign the same cached_property to two different names " f"({self.attrname!r} and {name!r})."
|
|
21
|
+
)
|
|
21
22
|
|
|
22
23
|
def __get__(self, instance, owner=None):
|
|
23
24
|
if instance is None:
|
|
@@ -26,9 +27,11 @@ class _cached_property:
|
|
|
26
27
|
raise TypeError("Cannot use cached_property instance without calling __set_name__ on it.")
|
|
27
28
|
try:
|
|
28
29
|
cache = instance.__dict__
|
|
29
|
-
except AttributeError:
|
|
30
|
-
msg = (
|
|
31
|
-
|
|
30
|
+
except AttributeError: # not all objects have __dict__ (e.g. class defines slots)
|
|
31
|
+
msg = (
|
|
32
|
+
f"No '__dict__' attribute on {type(instance).__name__!r} "
|
|
33
|
+
f"instance to cache {self.attrname!r} property."
|
|
34
|
+
)
|
|
32
35
|
raise TypeError(msg) from None
|
|
33
36
|
val = cache.get(self.attrname, _NOT_FOUND)
|
|
34
37
|
if val is _NOT_FOUND:
|
|
@@ -36,7 +39,9 @@ class _cached_property:
|
|
|
36
39
|
try:
|
|
37
40
|
cache[self.attrname] = val
|
|
38
41
|
except TypeError:
|
|
39
|
-
msg = (
|
|
40
|
-
|
|
42
|
+
msg = (
|
|
43
|
+
f"The '__dict__' attribute on {type(instance).__name__!r} instance "
|
|
44
|
+
f"does not support item assignment for caching {self.attrname!r} property."
|
|
45
|
+
)
|
|
41
46
|
raise TypeError(msg) from None
|
|
42
47
|
return val
|
|
@@ -43,7 +43,17 @@ try:
|
|
|
43
43
|
# Detect if we are in an interactive notebook by iterating over the mro of the current ipython instance,
|
|
44
44
|
# to find ZMQInteractiveShell (jupyter). When used from REPL or file, this check will fail, since the
|
|
45
45
|
# mro only contains TerminalInteractiveShell.
|
|
46
|
-
if
|
|
46
|
+
if (
|
|
47
|
+
len(
|
|
48
|
+
list(
|
|
49
|
+
filter(
|
|
50
|
+
lambda i: i.__name__ == "ZMQInteractiveShell",
|
|
51
|
+
get_ipython().__class__.__mro__,
|
|
52
|
+
)
|
|
53
|
+
)
|
|
54
|
+
)
|
|
55
|
+
== 0
|
|
56
|
+
):
|
|
47
57
|
logging.debug("Not in an interactive notebook. Skipping ipywidgets implementation for dbutils.")
|
|
48
58
|
raise EnvironmentError("Not in an interactive notebook.")
|
|
49
59
|
|
|
@@ -61,7 +71,8 @@ try:
|
|
|
61
71
|
warnings.warn(
|
|
62
72
|
"\nTo use databricks widgets interactively in your notebook, please install databricks sdk using:\n"
|
|
63
73
|
"\tpip install 'databricks-sdk[notebook]'\n"
|
|
64
|
-
"Falling back to default_value_only implementation for databricks widgets."
|
|
74
|
+
"Falling back to default_value_only implementation for databricks widgets."
|
|
75
|
+
)
|
|
65
76
|
logging.debug(f"{e.msg}. Skipping ipywidgets implementation for dbutils.")
|
|
66
77
|
raise e
|
|
67
78
|
|
|
@@ -11,25 +11,31 @@ class DefaultValueOnlyWidgetUtils(WidgetUtils):
|
|
|
11
11
|
def text(self, name: str, defaultValue: str, label: typing.Optional[str] = None):
|
|
12
12
|
self._widgets[name] = defaultValue
|
|
13
13
|
|
|
14
|
-
def dropdown(
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
14
|
+
def dropdown(
|
|
15
|
+
self,
|
|
16
|
+
name: str,
|
|
17
|
+
defaultValue: str,
|
|
18
|
+
choices: typing.List[str],
|
|
19
|
+
label: typing.Optional[str] = None,
|
|
20
|
+
):
|
|
19
21
|
self._widgets[name] = defaultValue
|
|
20
22
|
|
|
21
|
-
def combobox(
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
23
|
+
def combobox(
|
|
24
|
+
self,
|
|
25
|
+
name: str,
|
|
26
|
+
defaultValue: str,
|
|
27
|
+
choices: typing.List[str],
|
|
28
|
+
label: typing.Optional[str] = None,
|
|
29
|
+
):
|
|
26
30
|
self._widgets[name] = defaultValue
|
|
27
31
|
|
|
28
|
-
def multiselect(
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
32
|
+
def multiselect(
|
|
33
|
+
self,
|
|
34
|
+
name: str,
|
|
35
|
+
defaultValue: str,
|
|
36
|
+
choices: typing.List[str],
|
|
37
|
+
label: typing.Optional[str] = None,
|
|
38
|
+
):
|
|
33
39
|
self._widgets[name] = defaultValue
|
|
34
40
|
|
|
35
41
|
def _get(self, name: str) -> str:
|
|
@@ -28,9 +28,9 @@ class DbUtilsWidget:
|
|
|
28
28
|
if type(value) == str or value is None:
|
|
29
29
|
return value
|
|
30
30
|
if type(value) == list or type(value) == tuple:
|
|
31
|
-
return
|
|
31
|
+
return ",".join(value)
|
|
32
32
|
|
|
33
|
-
raise ValueError("The returned value has invalid type (
|
|
33
|
+
raise ValueError(f"The returned value has invalid type ({type(value)}).")
|
|
34
34
|
|
|
35
35
|
|
|
36
36
|
class IPyWidgetUtil(WidgetUtils):
|
|
@@ -38,7 +38,12 @@ class IPyWidgetUtil(WidgetUtils):
|
|
|
38
38
|
def __init__(self) -> None:
|
|
39
39
|
self._widgets: typing.Dict[str, DbUtilsWidget] = {}
|
|
40
40
|
|
|
41
|
-
def _register(
|
|
41
|
+
def _register(
|
|
42
|
+
self,
|
|
43
|
+
name: str,
|
|
44
|
+
widget: ValueWidget,
|
|
45
|
+
label: typing.Optional[str] = None,
|
|
46
|
+
):
|
|
42
47
|
label = label if label is not None else name
|
|
43
48
|
w = DbUtilsWidget(label, widget)
|
|
44
49
|
|
|
@@ -51,29 +56,47 @@ class IPyWidgetUtil(WidgetUtils):
|
|
|
51
56
|
def text(self, name: str, defaultValue: str, label: typing.Optional[str] = None):
|
|
52
57
|
self._register(name, widget_string.Text(defaultValue), label)
|
|
53
58
|
|
|
54
|
-
def dropdown(
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
def combobox(self,
|
|
62
|
-
name: str,
|
|
63
|
-
defaultValue: str,
|
|
64
|
-
choices: typing.List[str],
|
|
65
|
-
label: typing.Optional[str] = None):
|
|
66
|
-
self._register(name, widget_string.Combobox(value=defaultValue, options=choices), label)
|
|
67
|
-
|
|
68
|
-
def multiselect(self,
|
|
69
|
-
name: str,
|
|
70
|
-
defaultValue: str,
|
|
71
|
-
choices: typing.List[str],
|
|
72
|
-
label: typing.Optional[str] = None):
|
|
59
|
+
def dropdown(
|
|
60
|
+
self,
|
|
61
|
+
name: str,
|
|
62
|
+
defaultValue: str,
|
|
63
|
+
choices: typing.List[str],
|
|
64
|
+
label: typing.Optional[str] = None,
|
|
65
|
+
):
|
|
73
66
|
self._register(
|
|
74
67
|
name,
|
|
75
|
-
widget_selection.
|
|
76
|
-
|
|
68
|
+
widget_selection.Dropdown(value=defaultValue, options=choices),
|
|
69
|
+
label,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
def combobox(
|
|
73
|
+
self,
|
|
74
|
+
name: str,
|
|
75
|
+
defaultValue: str,
|
|
76
|
+
choices: typing.List[str],
|
|
77
|
+
label: typing.Optional[str] = None,
|
|
78
|
+
):
|
|
79
|
+
self._register(
|
|
80
|
+
name,
|
|
81
|
+
widget_string.Combobox(value=defaultValue, options=choices),
|
|
82
|
+
label,
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
def multiselect(
|
|
86
|
+
self,
|
|
87
|
+
name: str,
|
|
88
|
+
defaultValue: str,
|
|
89
|
+
choices: typing.List[str],
|
|
90
|
+
label: typing.Optional[str] = None,
|
|
91
|
+
):
|
|
92
|
+
self._register(
|
|
93
|
+
name,
|
|
94
|
+
widget_selection.SelectMultiple(
|
|
95
|
+
value=(defaultValue,),
|
|
96
|
+
options=[("__EMPTY__", ""), *list(zip(choices, choices))],
|
|
97
|
+
),
|
|
98
|
+
label,
|
|
99
|
+
)
|
|
77
100
|
|
|
78
101
|
def _get(self, name: str) -> str:
|
|
79
102
|
return self._widgets[name].value
|
databricks/sdk/azure.py
CHANGED
|
@@ -4,14 +4,14 @@ from .oauth import TokenSource
|
|
|
4
4
|
from .service.provisioning import Workspace
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
def add_workspace_id_header(cfg:
|
|
7
|
+
def add_workspace_id_header(cfg: "Config", headers: Dict[str, str]):
|
|
8
8
|
if cfg.azure_workspace_resource_id:
|
|
9
9
|
headers["X-Databricks-Azure-Workspace-Resource-Id"] = cfg.azure_workspace_resource_id
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
def add_sp_management_token(token_source:
|
|
12
|
+
def add_sp_management_token(token_source: "TokenSource", headers: Dict[str, str]):
|
|
13
13
|
mgmt_token = token_source.token()
|
|
14
|
-
headers[
|
|
14
|
+
headers["X-Databricks-Azure-SP-Management-Token"] = mgmt_token.access_token
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
def get_azure_resource_id(workspace: Workspace):
|
|
@@ -22,6 +22,8 @@ def get_azure_resource_id(workspace: Workspace):
|
|
|
22
22
|
"""
|
|
23
23
|
if workspace.azure_workspace_info is None:
|
|
24
24
|
return None
|
|
25
|
-
return (
|
|
26
|
-
|
|
27
|
-
|
|
25
|
+
return (
|
|
26
|
+
f"/subscriptions/{workspace.azure_workspace_info.subscription_id}"
|
|
27
|
+
f"/resourceGroups/{workspace.azure_workspace_info.resource_group}"
|
|
28
|
+
f"/providers/Microsoft.Databricks/workspaces/{workspace.workspace_name}"
|
|
29
|
+
)
|
databricks/sdk/casing.py
CHANGED
|
@@ -8,22 +8,22 @@ class _Name(object):
|
|
|
8
8
|
for ch in raw_name:
|
|
9
9
|
if ch.isupper():
|
|
10
10
|
if segment:
|
|
11
|
-
self._segments.append(
|
|
11
|
+
self._segments.append("".join(segment))
|
|
12
12
|
segment = [ch.lower()]
|
|
13
13
|
elif ch.islower():
|
|
14
14
|
segment.append(ch)
|
|
15
15
|
else:
|
|
16
16
|
if segment:
|
|
17
|
-
self._segments.append(
|
|
17
|
+
self._segments.append("".join(segment))
|
|
18
18
|
segment = []
|
|
19
19
|
if segment:
|
|
20
|
-
self._segments.append(
|
|
20
|
+
self._segments.append("".join(segment))
|
|
21
21
|
|
|
22
22
|
def to_snake_case(self) -> str:
|
|
23
|
-
return
|
|
23
|
+
return "_".join(self._segments)
|
|
24
24
|
|
|
25
25
|
def to_header_case(self) -> str:
|
|
26
|
-
return
|
|
26
|
+
return "-".join([s.capitalize() for s in self._segments])
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
class Casing(object):
|