databricks-sdk 0.26.0__py3-none-any.whl → 0.27.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -59,12 +59,12 @@ from databricks.sdk.service.serving import AppsAPI, ServingEndpointsAPI
59
59
  from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
60
60
  AccountSettingsAPI,
61
61
  AutomaticClusterUpdateAPI,
62
+ ComplianceSecurityProfileAPI,
62
63
  CredentialsManagerAPI,
63
64
  CspEnablementAccountAPI,
64
- CspEnablementAPI,
65
65
  DefaultNamespaceAPI,
66
+ EnhancedSecurityMonitoringAPI,
66
67
  EsmEnablementAccountAPI,
67
- EsmEnablementAPI,
68
68
  IpAccessListsAPI,
69
69
  NetworkConnectivityAPI,
70
70
  PersonalComputeAPI,
@@ -267,7 +267,7 @@ class WorkspaceClient:
267
267
 
268
268
  @property
269
269
  def apps(self) -> AppsAPI:
270
- """Lakehouse Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
270
+ """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
271
271
  return self._apps
272
272
 
273
273
  @property
@@ -0,0 +1,42 @@
1
+ # Copied from functools.py
2
+ # Remove when Python 3.8 is the minimum supported version.
3
+
4
+ _NOT_FOUND = object()
5
+
6
+
7
+ class _cached_property:
8
+
9
+ def __init__(self, func):
10
+ self.func = func
11
+ self.attrname = None
12
+ self.__doc__ = func.__doc__
13
+ self.__module__ = func.__module__
14
+
15
+ def __set_name__(self, owner, name):
16
+ if self.attrname is None:
17
+ self.attrname = name
18
+ elif name != self.attrname:
19
+ raise TypeError("Cannot assign the same cached_property to two different names "
20
+ f"({self.attrname!r} and {name!r}).")
21
+
22
+ def __get__(self, instance, owner=None):
23
+ if instance is None:
24
+ return self
25
+ if self.attrname is None:
26
+ raise TypeError("Cannot use cached_property instance without calling __set_name__ on it.")
27
+ try:
28
+ cache = instance.__dict__
29
+ except AttributeError: # not all objects have __dict__ (e.g. class defines slots)
30
+ msg = (f"No '__dict__' attribute on {type(instance).__name__!r} "
31
+ f"instance to cache {self.attrname!r} property.")
32
+ raise TypeError(msg) from None
33
+ val = cache.get(self.attrname, _NOT_FOUND)
34
+ if val is _NOT_FOUND:
35
+ val = self.func(instance)
36
+ try:
37
+ cache[self.attrname] = val
38
+ except TypeError:
39
+ msg = (f"The '__dict__' attribute on {type(instance).__name__!r} instance "
40
+ f"does not support item assignment for caching {self.attrname!r} property.")
41
+ raise TypeError(msg) from None
42
+ return val
databricks/sdk/core.py CHANGED
@@ -145,11 +145,14 @@ class ApiClient:
145
145
  if not len(response.content):
146
146
  return resp
147
147
 
148
- json = response.json()
149
- if isinstance(json, list):
150
- return json
148
+ jsonResponse = response.json()
149
+ if jsonResponse is None:
150
+ return resp
151
+
152
+ if isinstance(jsonResponse, list):
153
+ return jsonResponse
151
154
 
152
- return {**resp, **json}
155
+ return {**resp, **jsonResponse}
153
156
 
154
157
  @staticmethod
155
158
  def _is_retryable(err: BaseException) -> Optional[str]:
databricks/sdk/dbutils.py CHANGED
@@ -1,9 +1,10 @@
1
1
  import base64
2
2
  import json
3
3
  import logging
4
+ import os.path
4
5
  import threading
5
- import typing
6
6
  from collections import namedtuple
7
+ from typing import Callable, Dict, List
7
8
 
8
9
  from .core import ApiClient, Config, DatabricksError
9
10
  from .mixins import compute as compute_ext
@@ -34,7 +35,7 @@ class SecretMetadata(namedtuple('SecretMetadata', ['key'])):
34
35
  class _FsUtil:
35
36
  """ Manipulates the Databricks filesystem (DBFS) """
36
37
 
37
- def __init__(self, dbfs_ext: dbfs_ext.DbfsExt, proxy_factory: typing.Callable[[str], '_ProxyUtil']):
38
+ def __init__(self, dbfs_ext: dbfs_ext.DbfsExt, proxy_factory: Callable[[str], '_ProxyUtil']):
38
39
  self._dbfs = dbfs_ext
39
40
  self._proxy_factory = proxy_factory
40
41
 
@@ -45,17 +46,15 @@ class _FsUtil:
45
46
 
46
47
  def head(self, file: str, maxBytes: int = 65536) -> str:
47
48
  """Returns up to the first 'maxBytes' bytes of the given file as a String encoded in UTF-8 """
48
- res = self._dbfs.read(file, length=maxBytes, offset=0)
49
- raw = base64.b64decode(res.data)
50
- return raw.decode('utf8')
49
+ with self._dbfs.download(file) as f:
50
+ return f.read(maxBytes).decode('utf8')
51
51
 
52
- def ls(self, dir: str) -> typing.List[FileInfo]:
52
+ def ls(self, dir: str) -> List[FileInfo]:
53
53
  """Lists the contents of a directory """
54
- result = []
55
- for f in self._dbfs.list(dir):
56
- name = f.path.split('/')[-1]
57
- result.append(FileInfo(f'dbfs:{f.path}', name, f.file_size, f.modification_time))
58
- return result
54
+ return [
55
+ FileInfo(f.path, os.path.basename(f.path), f.file_size, f.modification_time)
56
+ for f in self._dbfs.list(dir)
57
+ ]
59
58
 
60
59
  def mkdirs(self, dir: str) -> bool:
61
60
  """Creates the given directory if it does not exist, also creating any necessary parent directories """
@@ -83,7 +82,7 @@ class _FsUtil:
83
82
  mount_point: str,
84
83
  encryption_type: str = None,
85
84
  owner: str = None,
86
- extra_configs: 'typing.Dict[str, str]' = None) -> bool:
85
+ extra_configs: Dict[str, str] = None) -> bool:
87
86
  """Mounts the given source directory into DBFS at the given mount point"""
88
87
  fs = self._proxy_factory('fs')
89
88
  kwargs = {}
@@ -105,7 +104,7 @@ class _FsUtil:
105
104
  mount_point: str,
106
105
  encryption_type: str = None,
107
106
  owner: str = None,
108
- extra_configs: 'typing.Dict[str, str]' = None) -> bool:
107
+ extra_configs: Dict[str, str] = None) -> bool:
109
108
  """ Similar to mount(), but updates an existing mount point (if present) instead of creating a new one """
110
109
  fs = self._proxy_factory('fs')
111
110
  kwargs = {}
@@ -117,7 +116,7 @@ class _FsUtil:
117
116
  kwargs['extra_configs'] = extra_configs
118
117
  return fs.updateMount(source, mount_point, **kwargs)
119
118
 
120
- def mounts(self) -> typing.List[MountInfo]:
119
+ def mounts(self) -> List[MountInfo]:
121
120
  """ Displays information about what is mounted within DBFS """
122
121
  result = []
123
122
  fs = self._proxy_factory('fs')
@@ -150,13 +149,13 @@ class _SecretsUtil:
150
149
  string_value = val.decode()
151
150
  return string_value
152
151
 
153
- def list(self, scope) -> typing.List[SecretMetadata]:
152
+ def list(self, scope) -> List[SecretMetadata]:
154
153
  """Lists the metadata for secrets within the specified scope."""
155
154
 
156
155
  # transform from SDK dataclass to dbutils-compatible namedtuple
157
156
  return [SecretMetadata(v.key) for v in self._api.list_secrets(scope)]
158
157
 
159
- def listScopes(self) -> typing.List[SecretScope]:
158
+ def listScopes(self) -> List[SecretScope]:
160
159
  """Lists the available scopes."""
161
160
 
162
161
  # transform from SDK dataclass to dbutils-compatible namedtuple
@@ -245,8 +244,7 @@ class _ProxyUtil:
245
244
  """Enables temporary workaround to call remote in-REPL dbutils without having to re-implement them"""
246
245
 
247
246
  def __init__(self, *, command_execution: compute.CommandExecutionAPI,
248
- context_factory: typing.Callable[[],
249
- compute.ContextStatusResponse], cluster_id: str, name: str):
247
+ context_factory: Callable[[], compute.ContextStatusResponse], cluster_id: str, name: str):
250
248
  self._commands = command_execution
251
249
  self._cluster_id = cluster_id
252
250
  self._context_factory = context_factory
@@ -267,8 +265,8 @@ import re
267
265
  class _ProxyCall:
268
266
 
269
267
  def __init__(self, *, command_execution: compute.CommandExecutionAPI,
270
- context_factory: typing.Callable[[], compute.ContextStatusResponse], cluster_id: str,
271
- util: str, method: str):
268
+ context_factory: Callable[[], compute.ContextStatusResponse], cluster_id: str, util: str,
269
+ method: str):
272
270
  self._commands = command_execution
273
271
  self._cluster_id = cluster_id
274
272
  self._context_factory = context_factory