databricks-sdk 0.68.0__py3-none-any.whl → 0.70.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +6 -10
- databricks/sdk/config.py +62 -14
- databricks/sdk/dbutils.py +17 -0
- databricks/sdk/mixins/files.py +1155 -111
- databricks/sdk/mixins/files_utils.py +293 -0
- databricks/sdk/service/catalog.py +5 -4
- databricks/sdk/service/compute.py +10 -0
- databricks/sdk/service/dataquality.py +197 -52
- databricks/sdk/service/jobs.py +73 -38
- databricks/sdk/service/ml.py +297 -0
- databricks/sdk/service/oauth2.py +27 -1
- databricks/sdk/service/pipelines.py +2 -2
- databricks/sdk/service/provisioning.py +10 -1
- databricks/sdk/service/settings.py +2 -0
- databricks/sdk/service/sharing.py +0 -38
- databricks/sdk/service/sql.py +37 -38
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.68.0.dist-info → databricks_sdk-0.70.0.dist-info}/METADATA +1 -1
- {databricks_sdk-0.68.0.dist-info → databricks_sdk-0.70.0.dist-info}/RECORD +23 -22
- {databricks_sdk-0.68.0.dist-info → databricks_sdk-0.70.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.68.0.dist-info → databricks_sdk-0.70.0.dist-info}/licenses/LICENSE +0 -0
- {databricks_sdk-0.68.0.dist-info → databricks_sdk-0.70.0.dist-info}/licenses/NOTICE +0 -0
- {databricks_sdk-0.68.0.dist-info → databricks_sdk-0.70.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,293 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import threading
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from typing import Any, BinaryIO, Callable, Iterable, Optional
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@dataclass
|
|
10
|
+
class CreateDownloadUrlResponse:
|
|
11
|
+
"""Response from the download URL API call."""
|
|
12
|
+
|
|
13
|
+
url: str
|
|
14
|
+
"""The presigned URL to download the file."""
|
|
15
|
+
headers: dict[str, str]
|
|
16
|
+
"""Headers to use when making the download request."""
|
|
17
|
+
|
|
18
|
+
@classmethod
|
|
19
|
+
def from_dict(cls, data: dict[str, Any]) -> CreateDownloadUrlResponse:
|
|
20
|
+
"""Create an instance from a dictionary."""
|
|
21
|
+
if "url" not in data:
|
|
22
|
+
raise ValueError("Missing 'url' in response data")
|
|
23
|
+
headers = data["headers"] if "headers" in data else {}
|
|
24
|
+
parsed_headers = {x["name"]: x["value"] for x in headers}
|
|
25
|
+
return cls(url=data["url"], headers=parsed_headers)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class _ConcatenatedInputStream(BinaryIO):
|
|
29
|
+
"""This class joins two input streams into one."""
|
|
30
|
+
|
|
31
|
+
def __init__(self, head_stream: BinaryIO, tail_stream: BinaryIO):
|
|
32
|
+
if not head_stream.readable():
|
|
33
|
+
raise ValueError("head_stream is not readable")
|
|
34
|
+
if not tail_stream.readable():
|
|
35
|
+
raise ValueError("tail_stream is not readable")
|
|
36
|
+
|
|
37
|
+
self._head_stream = head_stream
|
|
38
|
+
self._tail_stream = tail_stream
|
|
39
|
+
self._head_size = None
|
|
40
|
+
self._tail_size = None
|
|
41
|
+
|
|
42
|
+
def close(self) -> None:
|
|
43
|
+
try:
|
|
44
|
+
self._head_stream.close()
|
|
45
|
+
finally:
|
|
46
|
+
self._tail_stream.close()
|
|
47
|
+
|
|
48
|
+
def fileno(self) -> int:
|
|
49
|
+
raise AttributeError()
|
|
50
|
+
|
|
51
|
+
def flush(self) -> None:
|
|
52
|
+
raise NotImplementedError("Stream is not writable")
|
|
53
|
+
|
|
54
|
+
def isatty(self) -> bool:
|
|
55
|
+
raise NotImplementedError()
|
|
56
|
+
|
|
57
|
+
def read(self, __n: int = -1) -> bytes:
|
|
58
|
+
head = self._head_stream.read(__n)
|
|
59
|
+
remaining_bytes = __n - len(head) if __n >= 0 else __n
|
|
60
|
+
tail = self._tail_stream.read(remaining_bytes)
|
|
61
|
+
return head + tail
|
|
62
|
+
|
|
63
|
+
def readable(self) -> bool:
|
|
64
|
+
return True
|
|
65
|
+
|
|
66
|
+
def readline(self, __limit: int = -1) -> bytes:
|
|
67
|
+
# Read and return one line from the stream.
|
|
68
|
+
# If __limit is specified, at most __limit bytes will be read.
|
|
69
|
+
# The line terminator is always b'\n' for binary files.
|
|
70
|
+
head = self._head_stream.readline(__limit)
|
|
71
|
+
if len(head) > 0 and head[-1:] == b"\n":
|
|
72
|
+
# end of line happened before (or at) the limit
|
|
73
|
+
return head
|
|
74
|
+
|
|
75
|
+
# if __limit >= 0, len(head) can't exceed limit
|
|
76
|
+
remaining_bytes = __limit - len(head) if __limit >= 0 else __limit
|
|
77
|
+
tail = self._tail_stream.readline(remaining_bytes)
|
|
78
|
+
return head + tail
|
|
79
|
+
|
|
80
|
+
def readlines(self, __hint: int = -1) -> list[bytes]:
|
|
81
|
+
# Read and return a list of lines from the stream.
|
|
82
|
+
# Hint can be specified to control the number of lines read: no more lines will be read
|
|
83
|
+
# If the total size (in bytes/characters) of all lines so far exceeds hint.
|
|
84
|
+
|
|
85
|
+
# In fact, BytesIO(bytes) will not read next line if total size of all lines
|
|
86
|
+
# *equals or* exceeds hint.
|
|
87
|
+
|
|
88
|
+
head_result = self._head_stream.readlines(__hint)
|
|
89
|
+
head_total_bytes = sum(len(line) for line in head_result)
|
|
90
|
+
|
|
91
|
+
if 0 < __hint <= head_total_bytes and head_total_bytes > 0:
|
|
92
|
+
# We reached (or passed) the hint by reading from head_stream, or exhausted head_stream.
|
|
93
|
+
|
|
94
|
+
if head_result[-1][-1:] == b"\n":
|
|
95
|
+
# If we reached/passed the hint and also stopped at the line break, return.
|
|
96
|
+
return head_result
|
|
97
|
+
|
|
98
|
+
# Reading from head_stream could have stopped only because the stream was exhausted
|
|
99
|
+
if len(self._head_stream.read(1)) > 0:
|
|
100
|
+
raise ValueError(
|
|
101
|
+
f"Stream reading finished prematurely after reading {head_total_bytes} bytes, reaching or exceeding hint {__hint}"
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
# We need to finish reading the current line, now from tail_stream.
|
|
105
|
+
|
|
106
|
+
tail_result = self._tail_stream.readlines(1) # We will only read the first line from tail_stream.
|
|
107
|
+
assert len(tail_result) <= 1
|
|
108
|
+
if len(tail_result) > 0:
|
|
109
|
+
# We will then append the tail as the last line of the result.
|
|
110
|
+
return head_result[:-1] + [head_result[-1] + tail_result[0]]
|
|
111
|
+
else:
|
|
112
|
+
return head_result
|
|
113
|
+
|
|
114
|
+
# We did not reach the hint by reading head_stream but exhausted it, continue reading from tail_stream
|
|
115
|
+
# with an adjusted hint
|
|
116
|
+
if __hint >= 0:
|
|
117
|
+
remaining_bytes = __hint - head_total_bytes
|
|
118
|
+
else:
|
|
119
|
+
remaining_bytes = __hint
|
|
120
|
+
|
|
121
|
+
tail_result = self._tail_stream.readlines(remaining_bytes)
|
|
122
|
+
|
|
123
|
+
if head_total_bytes > 0 and head_result[-1][-1:] != b"\n" and len(tail_result) > 0:
|
|
124
|
+
# If head stream does not end with the line break, we need to concatenate
|
|
125
|
+
# the last line of the head result and the first line of tail result
|
|
126
|
+
return head_result[:-1] + [head_result[-1] + tail_result[0]] + tail_result[1:]
|
|
127
|
+
else:
|
|
128
|
+
# Otherwise, just append two lists of lines.
|
|
129
|
+
return head_result + tail_result
|
|
130
|
+
|
|
131
|
+
def _get_stream_size(self, stream: BinaryIO) -> int:
|
|
132
|
+
prev_offset = stream.tell()
|
|
133
|
+
try:
|
|
134
|
+
stream.seek(0, os.SEEK_END)
|
|
135
|
+
return stream.tell()
|
|
136
|
+
finally:
|
|
137
|
+
stream.seek(prev_offset, os.SEEK_SET)
|
|
138
|
+
|
|
139
|
+
def _get_head_size(self) -> int:
|
|
140
|
+
if self._head_size is None:
|
|
141
|
+
self._head_size = self._get_stream_size(self._head_stream)
|
|
142
|
+
return self._head_size
|
|
143
|
+
|
|
144
|
+
def _get_tail_size(self) -> int:
|
|
145
|
+
if self._tail_size is None:
|
|
146
|
+
self._tail_size = self._get_stream_size(self._tail_stream)
|
|
147
|
+
return self._tail_size
|
|
148
|
+
|
|
149
|
+
def seek(self, __offset: int, __whence: int = os.SEEK_SET) -> int:
|
|
150
|
+
if not self.seekable():
|
|
151
|
+
raise NotImplementedError("Stream is not seekable")
|
|
152
|
+
|
|
153
|
+
if __whence == os.SEEK_SET:
|
|
154
|
+
if __offset < 0:
|
|
155
|
+
# Follow native buffer behavior
|
|
156
|
+
raise ValueError(f"Negative seek value: {__offset}")
|
|
157
|
+
|
|
158
|
+
head_size = self._get_head_size()
|
|
159
|
+
|
|
160
|
+
if __offset <= head_size:
|
|
161
|
+
self._head_stream.seek(__offset, os.SEEK_SET)
|
|
162
|
+
self._tail_stream.seek(0, os.SEEK_SET)
|
|
163
|
+
else:
|
|
164
|
+
self._head_stream.seek(0, os.SEEK_END) # move head stream to the end
|
|
165
|
+
self._tail_stream.seek(__offset - head_size, os.SEEK_SET)
|
|
166
|
+
|
|
167
|
+
elif __whence == os.SEEK_CUR:
|
|
168
|
+
current_offset = self.tell()
|
|
169
|
+
new_offset = current_offset + __offset
|
|
170
|
+
if new_offset < 0:
|
|
171
|
+
# gracefully don't seek before start
|
|
172
|
+
new_offset = 0
|
|
173
|
+
self.seek(new_offset, os.SEEK_SET)
|
|
174
|
+
|
|
175
|
+
elif __whence == os.SEEK_END:
|
|
176
|
+
if __offset > 0:
|
|
177
|
+
# Python allows to seek beyond the end of stream.
|
|
178
|
+
|
|
179
|
+
# Move head to EOF and tail to (EOF + offset), so subsequent tell()
|
|
180
|
+
# returns len(head) + len(tail) + offset, same as for native buffer
|
|
181
|
+
self._head_stream.seek(0, os.SEEK_END)
|
|
182
|
+
self._tail_stream.seek(__offset, os.SEEK_END)
|
|
183
|
+
else:
|
|
184
|
+
self._tail_stream.seek(__offset, os.SEEK_END)
|
|
185
|
+
tail_pos = self._tail_stream.tell()
|
|
186
|
+
if tail_pos > 0:
|
|
187
|
+
# target position lies within the tail, move head to EOF
|
|
188
|
+
self._head_stream.seek(0, os.SEEK_END)
|
|
189
|
+
else:
|
|
190
|
+
tail_size = self._get_tail_size()
|
|
191
|
+
self._head_stream.seek(__offset + tail_size, os.SEEK_END)
|
|
192
|
+
else:
|
|
193
|
+
raise ValueError(__whence)
|
|
194
|
+
return self.tell()
|
|
195
|
+
|
|
196
|
+
def seekable(self) -> bool:
|
|
197
|
+
return self._head_stream.seekable() and self._tail_stream.seekable()
|
|
198
|
+
|
|
199
|
+
def __getattribute__(self, name: str) -> Any:
|
|
200
|
+
if name == "fileno":
|
|
201
|
+
raise AttributeError()
|
|
202
|
+
elif name in ["tell", "seek"] and not self.seekable():
|
|
203
|
+
raise AttributeError()
|
|
204
|
+
|
|
205
|
+
return super().__getattribute__(name)
|
|
206
|
+
|
|
207
|
+
def tell(self) -> int:
|
|
208
|
+
if not self.seekable():
|
|
209
|
+
raise NotImplementedError()
|
|
210
|
+
|
|
211
|
+
# Assuming that tail stream stays at 0 until head stream is exhausted
|
|
212
|
+
return self._head_stream.tell() + self._tail_stream.tell()
|
|
213
|
+
|
|
214
|
+
def truncate(self, __size: Optional[int] = None) -> int:
|
|
215
|
+
raise NotImplementedError("Stream is not writable")
|
|
216
|
+
|
|
217
|
+
def writable(self) -> bool:
|
|
218
|
+
return False
|
|
219
|
+
|
|
220
|
+
def write(self, __s: bytes) -> int:
|
|
221
|
+
raise NotImplementedError("Stream is not writable")
|
|
222
|
+
|
|
223
|
+
def writelines(self, __lines: Iterable[bytes]) -> None:
|
|
224
|
+
raise NotImplementedError("Stream is not writable")
|
|
225
|
+
|
|
226
|
+
def __next__(self) -> bytes:
|
|
227
|
+
# IOBase [...] supports the iterator protocol, meaning that an IOBase object can be
|
|
228
|
+
# iterated over yielding the lines in a stream. [...] See readline().
|
|
229
|
+
result = self.readline()
|
|
230
|
+
if len(result) == 0:
|
|
231
|
+
raise StopIteration
|
|
232
|
+
return result
|
|
233
|
+
|
|
234
|
+
def __iter__(self) -> "BinaryIO":
|
|
235
|
+
return self
|
|
236
|
+
|
|
237
|
+
def __enter__(self) -> "BinaryIO":
|
|
238
|
+
self._head_stream.__enter__()
|
|
239
|
+
self._tail_stream.__enter__()
|
|
240
|
+
return self
|
|
241
|
+
|
|
242
|
+
def __exit__(self, __type, __value, __traceback) -> None:
|
|
243
|
+
self._head_stream.__exit__(__type, __value, __traceback)
|
|
244
|
+
self._tail_stream.__exit__(__type, __value, __traceback)
|
|
245
|
+
|
|
246
|
+
def __str__(self) -> str:
|
|
247
|
+
return f"Concat: {self._head_stream}, {self._tail_stream}]"
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
class _PresignedUrlDistributor:
|
|
251
|
+
"""
|
|
252
|
+
Distributes and manages presigned URLs for downloading files.
|
|
253
|
+
|
|
254
|
+
This class ensures thread-safe access to a presigned URL, allowing retrieval and invalidation.
|
|
255
|
+
When the URL is invalidated, a new one will be fetched using the provided function.
|
|
256
|
+
"""
|
|
257
|
+
|
|
258
|
+
def __init__(self, get_new_url_func: Callable[[], CreateDownloadUrlResponse]):
|
|
259
|
+
"""
|
|
260
|
+
Initialize the distributor.
|
|
261
|
+
|
|
262
|
+
Args:
|
|
263
|
+
get_new_url_func: A callable that returns a new presigned URL response.
|
|
264
|
+
"""
|
|
265
|
+
self._get_new_url_func = get_new_url_func
|
|
266
|
+
self._current_url = None
|
|
267
|
+
self.current_version = 0
|
|
268
|
+
self.lock = threading.RLock()
|
|
269
|
+
|
|
270
|
+
def get_url(self) -> tuple[CreateDownloadUrlResponse, int]:
|
|
271
|
+
"""
|
|
272
|
+
Get the current presigned URL and its version.
|
|
273
|
+
|
|
274
|
+
Returns:
|
|
275
|
+
A tuple containing the current presigned URL response and its version.
|
|
276
|
+
"""
|
|
277
|
+
with self.lock:
|
|
278
|
+
if self._current_url is None:
|
|
279
|
+
self._current_url = self._get_new_url_func()
|
|
280
|
+
return self._current_url, self.current_version
|
|
281
|
+
|
|
282
|
+
def invalidate_url(self, version: int) -> None:
|
|
283
|
+
"""
|
|
284
|
+
Invalidate the current presigned URL if the version matches. If the version does not match,
|
|
285
|
+
the URL remains unchanged. This ensures that only the most recent version can invalidate the URL.
|
|
286
|
+
|
|
287
|
+
Args:
|
|
288
|
+
version: The version to check before invalidating the URL.
|
|
289
|
+
"""
|
|
290
|
+
with self.lock:
|
|
291
|
+
if version == self.current_version:
|
|
292
|
+
self._current_url = None
|
|
293
|
+
self.current_version += 1
|
|
@@ -1740,7 +1740,7 @@ class ConnectionInfo:
|
|
|
1740
1740
|
|
|
1741
1741
|
|
|
1742
1742
|
class ConnectionType(Enum):
|
|
1743
|
-
"""Next Id:
|
|
1743
|
+
"""Next Id: 46"""
|
|
1744
1744
|
|
|
1745
1745
|
BIGQUERY = "BIGQUERY"
|
|
1746
1746
|
DATABRICKS = "DATABRICKS"
|
|
@@ -2542,12 +2542,13 @@ class CredentialPurpose(Enum):
|
|
|
2542
2542
|
|
|
2543
2543
|
|
|
2544
2544
|
class CredentialType(Enum):
|
|
2545
|
-
"""Next Id:
|
|
2545
|
+
"""Next Id: 14"""
|
|
2546
2546
|
|
|
2547
2547
|
ANY_STATIC_CREDENTIAL = "ANY_STATIC_CREDENTIAL"
|
|
2548
2548
|
BEARER_TOKEN = "BEARER_TOKEN"
|
|
2549
2549
|
OAUTH_ACCESS_TOKEN = "OAUTH_ACCESS_TOKEN"
|
|
2550
2550
|
OAUTH_M2M = "OAUTH_M2M"
|
|
2551
|
+
OAUTH_MTLS = "OAUTH_MTLS"
|
|
2551
2552
|
OAUTH_REFRESH_TOKEN = "OAUTH_REFRESH_TOKEN"
|
|
2552
2553
|
OAUTH_RESOURCE_OWNER_PASSWORD = "OAUTH_RESOURCE_OWNER_PASSWORD"
|
|
2553
2554
|
OAUTH_U2M = "OAUTH_U2M"
|
|
@@ -8549,7 +8550,7 @@ class RowFilterOptions:
|
|
|
8549
8550
|
|
|
8550
8551
|
@dataclass
|
|
8551
8552
|
class SchemaInfo:
|
|
8552
|
-
"""Next ID:
|
|
8553
|
+
"""Next ID: 42"""
|
|
8553
8554
|
|
|
8554
8555
|
browse_only: Optional[bool] = None
|
|
8555
8556
|
"""Indicates whether the principal is limited to retrieving metadata for the associated object
|
|
@@ -8762,7 +8763,7 @@ class Securable:
|
|
|
8762
8763
|
|
|
8763
8764
|
|
|
8764
8765
|
class SecurableKind(Enum):
|
|
8765
|
-
"""Latest kind:
|
|
8766
|
+
"""Latest kind: CONNECTION_SALESFORCE_OAUTH_MTLS = 268; Next id:269"""
|
|
8766
8767
|
|
|
8767
8768
|
TABLE_DB_STORAGE = "TABLE_DB_STORAGE"
|
|
8768
8769
|
TABLE_DELTA = "TABLE_DELTA"
|
|
@@ -7155,11 +7155,21 @@ class TerminationReasonCode(Enum):
|
|
|
7155
7155
|
NEPHOS_RESOURCE_MANAGEMENT = "NEPHOS_RESOURCE_MANAGEMENT"
|
|
7156
7156
|
NETVISOR_SETUP_TIMEOUT = "NETVISOR_SETUP_TIMEOUT"
|
|
7157
7157
|
NETWORK_CHECK_CONTROL_PLANE_FAILURE = "NETWORK_CHECK_CONTROL_PLANE_FAILURE"
|
|
7158
|
+
NETWORK_CHECK_CONTROL_PLANE_FAILURE_DUE_TO_MISCONFIG = "NETWORK_CHECK_CONTROL_PLANE_FAILURE_DUE_TO_MISCONFIG"
|
|
7158
7159
|
NETWORK_CHECK_DNS_SERVER_FAILURE = "NETWORK_CHECK_DNS_SERVER_FAILURE"
|
|
7160
|
+
NETWORK_CHECK_DNS_SERVER_FAILURE_DUE_TO_MISCONFIG = "NETWORK_CHECK_DNS_SERVER_FAILURE_DUE_TO_MISCONFIG"
|
|
7159
7161
|
NETWORK_CHECK_METADATA_ENDPOINT_FAILURE = "NETWORK_CHECK_METADATA_ENDPOINT_FAILURE"
|
|
7162
|
+
NETWORK_CHECK_METADATA_ENDPOINT_FAILURE_DUE_TO_MISCONFIG = (
|
|
7163
|
+
"NETWORK_CHECK_METADATA_ENDPOINT_FAILURE_DUE_TO_MISCONFIG"
|
|
7164
|
+
)
|
|
7160
7165
|
NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE = "NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE"
|
|
7166
|
+
NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE_DUE_TO_MISCONFIG = (
|
|
7167
|
+
"NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE_DUE_TO_MISCONFIG"
|
|
7168
|
+
)
|
|
7161
7169
|
NETWORK_CHECK_NIC_FAILURE = "NETWORK_CHECK_NIC_FAILURE"
|
|
7170
|
+
NETWORK_CHECK_NIC_FAILURE_DUE_TO_MISCONFIG = "NETWORK_CHECK_NIC_FAILURE_DUE_TO_MISCONFIG"
|
|
7162
7171
|
NETWORK_CHECK_STORAGE_FAILURE = "NETWORK_CHECK_STORAGE_FAILURE"
|
|
7172
|
+
NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG = "NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG"
|
|
7163
7173
|
NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE"
|
|
7164
7174
|
NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE"
|
|
7165
7175
|
NO_ACTIVATED_K8S = "NO_ACTIVATED_K8S"
|