databricks-sdk 0.21.0__py3-none-any.whl → 0.23.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/mixins/compute.py +2 -2
- databricks/sdk/service/catalog.py +237 -88
- databricks/sdk/service/dashboards.py +304 -10
- databricks/sdk/service/iam.py +1 -0
- databricks/sdk/service/serving.py +19 -14
- databricks/sdk/service/settings.py +4 -4
- databricks/sdk/service/sharing.py +22 -3
- databricks/sdk/service/sql.py +3 -3
- databricks/sdk/service/vectorsearch.py +8 -1
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.21.0.dist-info → databricks_sdk-0.23.0.dist-info}/METADATA +1 -1
- {databricks_sdk-0.21.0.dist-info → databricks_sdk-0.23.0.dist-info}/RECORD +16 -16
- {databricks_sdk-0.21.0.dist-info → databricks_sdk-0.23.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.21.0.dist-info → databricks_sdk-0.23.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.21.0.dist-info → databricks_sdk-0.23.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.21.0.dist-info → databricks_sdk-0.23.0.dist-info}/top_level.txt +0 -0
|
@@ -4,13 +4,119 @@ from __future__ import annotations
|
|
|
4
4
|
|
|
5
5
|
import logging
|
|
6
6
|
from dataclasses import dataclass
|
|
7
|
+
from enum import Enum
|
|
7
8
|
from typing import Dict, Optional
|
|
8
9
|
|
|
10
|
+
from ._internal import _enum
|
|
11
|
+
|
|
9
12
|
_LOG = logging.getLogger('databricks.sdk')
|
|
10
13
|
|
|
11
14
|
# all definitions in this file are in alphabetical order
|
|
12
15
|
|
|
13
16
|
|
|
17
|
+
@dataclass
|
|
18
|
+
class CreateDashboardRequest:
|
|
19
|
+
display_name: str
|
|
20
|
+
"""The display name of the dashboard."""
|
|
21
|
+
|
|
22
|
+
parent_path: Optional[str] = None
|
|
23
|
+
"""The workspace path of the folder containing the dashboard. Includes leading slash and no
|
|
24
|
+
trailing slash."""
|
|
25
|
+
|
|
26
|
+
serialized_dashboard: Optional[str] = None
|
|
27
|
+
"""The contents of the dashboard in serialized string form."""
|
|
28
|
+
|
|
29
|
+
warehouse_id: Optional[str] = None
|
|
30
|
+
"""The warehouse ID used to run the dashboard."""
|
|
31
|
+
|
|
32
|
+
def as_dict(self) -> dict:
|
|
33
|
+
"""Serializes the CreateDashboardRequest into a dictionary suitable for use as a JSON request body."""
|
|
34
|
+
body = {}
|
|
35
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
36
|
+
if self.parent_path is not None: body['parent_path'] = self.parent_path
|
|
37
|
+
if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard
|
|
38
|
+
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
39
|
+
return body
|
|
40
|
+
|
|
41
|
+
@classmethod
|
|
42
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateDashboardRequest:
|
|
43
|
+
"""Deserializes the CreateDashboardRequest from a dictionary."""
|
|
44
|
+
return cls(display_name=d.get('display_name', None),
|
|
45
|
+
parent_path=d.get('parent_path', None),
|
|
46
|
+
serialized_dashboard=d.get('serialized_dashboard', None),
|
|
47
|
+
warehouse_id=d.get('warehouse_id', None))
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class Dashboard:
|
|
52
|
+
create_time: Optional[str] = None
|
|
53
|
+
"""The timestamp of when the dashboard was created."""
|
|
54
|
+
|
|
55
|
+
dashboard_id: Optional[str] = None
|
|
56
|
+
"""UUID identifying the dashboard."""
|
|
57
|
+
|
|
58
|
+
display_name: Optional[str] = None
|
|
59
|
+
"""The display name of the dashboard."""
|
|
60
|
+
|
|
61
|
+
etag: Optional[str] = None
|
|
62
|
+
"""The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard
|
|
63
|
+
has not been modified since the last read."""
|
|
64
|
+
|
|
65
|
+
lifecycle_state: Optional[LifecycleState] = None
|
|
66
|
+
"""The state of the dashboard resource. Used for tracking trashed status."""
|
|
67
|
+
|
|
68
|
+
parent_path: Optional[str] = None
|
|
69
|
+
"""The workspace path of the folder containing the dashboard. Includes leading slash and no
|
|
70
|
+
trailing slash."""
|
|
71
|
+
|
|
72
|
+
path: Optional[str] = None
|
|
73
|
+
"""The workspace path of the dashboard asset, including the file name."""
|
|
74
|
+
|
|
75
|
+
serialized_dashboard: Optional[str] = None
|
|
76
|
+
"""The contents of the dashboard in serialized string form."""
|
|
77
|
+
|
|
78
|
+
update_time: Optional[str] = None
|
|
79
|
+
"""The timestamp of when the dashboard was last updated by the user."""
|
|
80
|
+
|
|
81
|
+
warehouse_id: Optional[str] = None
|
|
82
|
+
"""The warehouse ID used to run the dashboard."""
|
|
83
|
+
|
|
84
|
+
def as_dict(self) -> dict:
|
|
85
|
+
"""Serializes the Dashboard into a dictionary suitable for use as a JSON request body."""
|
|
86
|
+
body = {}
|
|
87
|
+
if self.create_time is not None: body['create_time'] = self.create_time
|
|
88
|
+
if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
|
|
89
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
90
|
+
if self.etag is not None: body['etag'] = self.etag
|
|
91
|
+
if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
|
|
92
|
+
if self.parent_path is not None: body['parent_path'] = self.parent_path
|
|
93
|
+
if self.path is not None: body['path'] = self.path
|
|
94
|
+
if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard
|
|
95
|
+
if self.update_time is not None: body['update_time'] = self.update_time
|
|
96
|
+
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
97
|
+
return body
|
|
98
|
+
|
|
99
|
+
@classmethod
|
|
100
|
+
def from_dict(cls, d: Dict[str, any]) -> Dashboard:
|
|
101
|
+
"""Deserializes the Dashboard from a dictionary."""
|
|
102
|
+
return cls(create_time=d.get('create_time', None),
|
|
103
|
+
dashboard_id=d.get('dashboard_id', None),
|
|
104
|
+
display_name=d.get('display_name', None),
|
|
105
|
+
etag=d.get('etag', None),
|
|
106
|
+
lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
|
|
107
|
+
parent_path=d.get('parent_path', None),
|
|
108
|
+
path=d.get('path', None),
|
|
109
|
+
serialized_dashboard=d.get('serialized_dashboard', None),
|
|
110
|
+
update_time=d.get('update_time', None),
|
|
111
|
+
warehouse_id=d.get('warehouse_id', None))
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
class LifecycleState(Enum):
|
|
115
|
+
|
|
116
|
+
ACTIVE = 'ACTIVE'
|
|
117
|
+
TRASHED = 'TRASHED'
|
|
118
|
+
|
|
119
|
+
|
|
14
120
|
@dataclass
|
|
15
121
|
class PublishRequest:
|
|
16
122
|
dashboard_id: Optional[str] = None
|
|
@@ -40,19 +146,89 @@ class PublishRequest:
|
|
|
40
146
|
|
|
41
147
|
|
|
42
148
|
@dataclass
|
|
43
|
-
class
|
|
149
|
+
class PublishedDashboard:
|
|
150
|
+
display_name: Optional[str] = None
|
|
151
|
+
"""The display name of the published dashboard."""
|
|
152
|
+
|
|
153
|
+
embed_credentials: Optional[bool] = None
|
|
154
|
+
"""Indicates whether credentials are embedded in the published dashboard."""
|
|
155
|
+
|
|
156
|
+
revision_create_time: Optional[str] = None
|
|
157
|
+
"""The timestamp of when the published dashboard was last revised."""
|
|
158
|
+
|
|
159
|
+
warehouse_id: Optional[str] = None
|
|
160
|
+
"""The warehouse ID used to run the published dashboard."""
|
|
161
|
+
|
|
162
|
+
def as_dict(self) -> dict:
|
|
163
|
+
"""Serializes the PublishedDashboard into a dictionary suitable for use as a JSON request body."""
|
|
164
|
+
body = {}
|
|
165
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
166
|
+
if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials
|
|
167
|
+
if self.revision_create_time is not None: body['revision_create_time'] = self.revision_create_time
|
|
168
|
+
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
169
|
+
return body
|
|
170
|
+
|
|
171
|
+
@classmethod
|
|
172
|
+
def from_dict(cls, d: Dict[str, any]) -> PublishedDashboard:
|
|
173
|
+
"""Deserializes the PublishedDashboard from a dictionary."""
|
|
174
|
+
return cls(display_name=d.get('display_name', None),
|
|
175
|
+
embed_credentials=d.get('embed_credentials', None),
|
|
176
|
+
revision_create_time=d.get('revision_create_time', None),
|
|
177
|
+
warehouse_id=d.get('warehouse_id', None))
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
@dataclass
|
|
181
|
+
class TrashDashboardResponse:
|
|
44
182
|
|
|
45
183
|
def as_dict(self) -> dict:
|
|
46
|
-
"""Serializes the
|
|
184
|
+
"""Serializes the TrashDashboardResponse into a dictionary suitable for use as a JSON request body."""
|
|
47
185
|
body = {}
|
|
48
186
|
return body
|
|
49
187
|
|
|
50
188
|
@classmethod
|
|
51
|
-
def from_dict(cls, d: Dict[str, any]) ->
|
|
52
|
-
"""Deserializes the
|
|
189
|
+
def from_dict(cls, d: Dict[str, any]) -> TrashDashboardResponse:
|
|
190
|
+
"""Deserializes the TrashDashboardResponse from a dictionary."""
|
|
53
191
|
return cls()
|
|
54
192
|
|
|
55
193
|
|
|
194
|
+
@dataclass
|
|
195
|
+
class UpdateDashboardRequest:
|
|
196
|
+
dashboard_id: Optional[str] = None
|
|
197
|
+
"""UUID identifying the dashboard."""
|
|
198
|
+
|
|
199
|
+
display_name: Optional[str] = None
|
|
200
|
+
"""The display name of the dashboard."""
|
|
201
|
+
|
|
202
|
+
etag: Optional[str] = None
|
|
203
|
+
"""The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard
|
|
204
|
+
has not been modified since the last read."""
|
|
205
|
+
|
|
206
|
+
serialized_dashboard: Optional[str] = None
|
|
207
|
+
"""The contents of the dashboard in serialized string form."""
|
|
208
|
+
|
|
209
|
+
warehouse_id: Optional[str] = None
|
|
210
|
+
"""The warehouse ID used to run the dashboard."""
|
|
211
|
+
|
|
212
|
+
def as_dict(self) -> dict:
|
|
213
|
+
"""Serializes the UpdateDashboardRequest into a dictionary suitable for use as a JSON request body."""
|
|
214
|
+
body = {}
|
|
215
|
+
if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
|
|
216
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
217
|
+
if self.etag is not None: body['etag'] = self.etag
|
|
218
|
+
if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard
|
|
219
|
+
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
220
|
+
return body
|
|
221
|
+
|
|
222
|
+
@classmethod
|
|
223
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateDashboardRequest:
|
|
224
|
+
"""Deserializes the UpdateDashboardRequest from a dictionary."""
|
|
225
|
+
return cls(dashboard_id=d.get('dashboard_id', None),
|
|
226
|
+
display_name=d.get('display_name', None),
|
|
227
|
+
etag=d.get('etag', None),
|
|
228
|
+
serialized_dashboard=d.get('serialized_dashboard', None),
|
|
229
|
+
warehouse_id=d.get('warehouse_id', None))
|
|
230
|
+
|
|
231
|
+
|
|
56
232
|
class LakeviewAPI:
|
|
57
233
|
"""These APIs provide specific management operations for Lakeview dashboards. Generic resource management can
|
|
58
234
|
be done with Workspace API (import, export, get-status, list, delete)."""
|
|
@@ -60,11 +236,75 @@ class LakeviewAPI:
|
|
|
60
236
|
def __init__(self, api_client):
|
|
61
237
|
self._api = api_client
|
|
62
238
|
|
|
239
|
+
def create(self,
|
|
240
|
+
display_name: str,
|
|
241
|
+
*,
|
|
242
|
+
parent_path: Optional[str] = None,
|
|
243
|
+
serialized_dashboard: Optional[str] = None,
|
|
244
|
+
warehouse_id: Optional[str] = None) -> Dashboard:
|
|
245
|
+
"""Create dashboard.
|
|
246
|
+
|
|
247
|
+
Create a draft dashboard.
|
|
248
|
+
|
|
249
|
+
:param display_name: str
|
|
250
|
+
The display name of the dashboard.
|
|
251
|
+
:param parent_path: str (optional)
|
|
252
|
+
The workspace path of the folder containing the dashboard. Includes leading slash and no trailing
|
|
253
|
+
slash.
|
|
254
|
+
:param serialized_dashboard: str (optional)
|
|
255
|
+
The contents of the dashboard in serialized string form.
|
|
256
|
+
:param warehouse_id: str (optional)
|
|
257
|
+
The warehouse ID used to run the dashboard.
|
|
258
|
+
|
|
259
|
+
:returns: :class:`Dashboard`
|
|
260
|
+
"""
|
|
261
|
+
body = {}
|
|
262
|
+
if display_name is not None: body['display_name'] = display_name
|
|
263
|
+
if parent_path is not None: body['parent_path'] = parent_path
|
|
264
|
+
if serialized_dashboard is not None: body['serialized_dashboard'] = serialized_dashboard
|
|
265
|
+
if warehouse_id is not None: body['warehouse_id'] = warehouse_id
|
|
266
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
267
|
+
|
|
268
|
+
res = self._api.do('POST', '/api/2.0/lakeview/dashboards', body=body, headers=headers)
|
|
269
|
+
return Dashboard.from_dict(res)
|
|
270
|
+
|
|
271
|
+
def get(self, dashboard_id: str) -> Dashboard:
|
|
272
|
+
"""Get dashboard.
|
|
273
|
+
|
|
274
|
+
Get a draft dashboard.
|
|
275
|
+
|
|
276
|
+
:param dashboard_id: str
|
|
277
|
+
UUID identifying the dashboard.
|
|
278
|
+
|
|
279
|
+
:returns: :class:`Dashboard`
|
|
280
|
+
"""
|
|
281
|
+
|
|
282
|
+
headers = {'Accept': 'application/json', }
|
|
283
|
+
|
|
284
|
+
res = self._api.do('GET', f'/api/2.0/lakeview/dashboards/{dashboard_id}', headers=headers)
|
|
285
|
+
return Dashboard.from_dict(res)
|
|
286
|
+
|
|
287
|
+
def get_published(self, dashboard_id: str) -> PublishedDashboard:
|
|
288
|
+
"""Get published dashboard.
|
|
289
|
+
|
|
290
|
+
Get the current published dashboard.
|
|
291
|
+
|
|
292
|
+
:param dashboard_id: str
|
|
293
|
+
UUID identifying the dashboard to be published.
|
|
294
|
+
|
|
295
|
+
:returns: :class:`PublishedDashboard`
|
|
296
|
+
"""
|
|
297
|
+
|
|
298
|
+
headers = {'Accept': 'application/json', }
|
|
299
|
+
|
|
300
|
+
res = self._api.do('GET', f'/api/2.0/lakeview/dashboards/{dashboard_id}/published', headers=headers)
|
|
301
|
+
return PublishedDashboard.from_dict(res)
|
|
302
|
+
|
|
63
303
|
def publish(self,
|
|
64
304
|
dashboard_id: str,
|
|
65
305
|
*,
|
|
66
306
|
embed_credentials: Optional[bool] = None,
|
|
67
|
-
warehouse_id: Optional[str] = None):
|
|
307
|
+
warehouse_id: Optional[str] = None) -> PublishedDashboard:
|
|
68
308
|
"""Publish dashboard.
|
|
69
309
|
|
|
70
310
|
Publish the current draft dashboard.
|
|
@@ -77,14 +317,68 @@ class LakeviewAPI:
|
|
|
77
317
|
:param warehouse_id: str (optional)
|
|
78
318
|
The ID of the warehouse that can be used to override the warehouse which was set in the draft.
|
|
79
319
|
|
|
80
|
-
|
|
320
|
+
:returns: :class:`PublishedDashboard`
|
|
81
321
|
"""
|
|
82
322
|
body = {}
|
|
83
323
|
if embed_credentials is not None: body['embed_credentials'] = embed_credentials
|
|
84
324
|
if warehouse_id is not None: body['warehouse_id'] = warehouse_id
|
|
85
325
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
86
326
|
|
|
87
|
-
self._api.do('POST',
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
327
|
+
res = self._api.do('POST',
|
|
328
|
+
f'/api/2.0/lakeview/dashboards/{dashboard_id}/published',
|
|
329
|
+
body=body,
|
|
330
|
+
headers=headers)
|
|
331
|
+
return PublishedDashboard.from_dict(res)
|
|
332
|
+
|
|
333
|
+
def trash(self, dashboard_id: str):
|
|
334
|
+
"""Trash dashboard.
|
|
335
|
+
|
|
336
|
+
Trash a dashboard.
|
|
337
|
+
|
|
338
|
+
:param dashboard_id: str
|
|
339
|
+
UUID identifying the dashboard.
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
"""
|
|
343
|
+
|
|
344
|
+
headers = {'Accept': 'application/json', }
|
|
345
|
+
|
|
346
|
+
self._api.do('DELETE', f'/api/2.0/lakeview/dashboards/{dashboard_id}', headers=headers)
|
|
347
|
+
|
|
348
|
+
def update(self,
|
|
349
|
+
dashboard_id: str,
|
|
350
|
+
*,
|
|
351
|
+
display_name: Optional[str] = None,
|
|
352
|
+
etag: Optional[str] = None,
|
|
353
|
+
serialized_dashboard: Optional[str] = None,
|
|
354
|
+
warehouse_id: Optional[str] = None) -> Dashboard:
|
|
355
|
+
"""Update dashboard.
|
|
356
|
+
|
|
357
|
+
Update a draft dashboard.
|
|
358
|
+
|
|
359
|
+
:param dashboard_id: str
|
|
360
|
+
UUID identifying the dashboard.
|
|
361
|
+
:param display_name: str (optional)
|
|
362
|
+
The display name of the dashboard.
|
|
363
|
+
:param etag: str (optional)
|
|
364
|
+
The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard has
|
|
365
|
+
not been modified since the last read.
|
|
366
|
+
:param serialized_dashboard: str (optional)
|
|
367
|
+
The contents of the dashboard in serialized string form.
|
|
368
|
+
:param warehouse_id: str (optional)
|
|
369
|
+
The warehouse ID used to run the dashboard.
|
|
370
|
+
|
|
371
|
+
:returns: :class:`Dashboard`
|
|
372
|
+
"""
|
|
373
|
+
body = {}
|
|
374
|
+
if display_name is not None: body['display_name'] = display_name
|
|
375
|
+
if etag is not None: body['etag'] = etag
|
|
376
|
+
if serialized_dashboard is not None: body['serialized_dashboard'] = serialized_dashboard
|
|
377
|
+
if warehouse_id is not None: body['warehouse_id'] = warehouse_id
|
|
378
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
379
|
+
|
|
380
|
+
res = self._api.do('PATCH',
|
|
381
|
+
f'/api/2.0/lakeview/dashboards/{dashboard_id}',
|
|
382
|
+
body=body,
|
|
383
|
+
headers=headers)
|
|
384
|
+
return Dashboard.from_dict(res)
|
databricks/sdk/service/iam.py
CHANGED
|
@@ -777,6 +777,7 @@ class PermissionLevel(Enum):
|
|
|
777
777
|
CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS'
|
|
778
778
|
CAN_MANAGE_RUN = 'CAN_MANAGE_RUN'
|
|
779
779
|
CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS'
|
|
780
|
+
CAN_QUERY = 'CAN_QUERY'
|
|
780
781
|
CAN_READ = 'CAN_READ'
|
|
781
782
|
CAN_RESTART = 'CAN_RESTART'
|
|
782
783
|
CAN_RUN = 'CAN_RUN'
|
|
@@ -567,12 +567,12 @@ class EndpointCoreConfigInput:
|
|
|
567
567
|
"""The name of the serving endpoint to update. This field is required."""
|
|
568
568
|
|
|
569
569
|
served_entities: Optional[List[ServedEntityInput]] = None
|
|
570
|
-
"""A list of served entities for the endpoint to serve. A serving endpoint can have up to
|
|
570
|
+
"""A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served
|
|
571
571
|
entities."""
|
|
572
572
|
|
|
573
573
|
served_models: Optional[List[ServedModelInput]] = None
|
|
574
574
|
"""(Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A
|
|
575
|
-
serving endpoint can have up to
|
|
575
|
+
serving endpoint can have up to 15 served models."""
|
|
576
576
|
|
|
577
577
|
traffic_config: Optional[TrafficConfig] = None
|
|
578
578
|
"""The traffic config defining how invocations to the serving endpoint should be routed."""
|
|
@@ -661,6 +661,10 @@ class EndpointCoreConfigSummary:
|
|
|
661
661
|
|
|
662
662
|
@dataclass
|
|
663
663
|
class EndpointPendingConfig:
|
|
664
|
+
auto_capture_config: Optional[AutoCaptureConfigOutput] = None
|
|
665
|
+
"""Configuration for Inference Tables which automatically logs requests and responses to Unity
|
|
666
|
+
Catalog."""
|
|
667
|
+
|
|
664
668
|
config_version: Optional[int] = None
|
|
665
669
|
"""The config version that the serving endpoint is currently serving."""
|
|
666
670
|
|
|
@@ -680,6 +684,7 @@ class EndpointPendingConfig:
|
|
|
680
684
|
def as_dict(self) -> dict:
|
|
681
685
|
"""Serializes the EndpointPendingConfig into a dictionary suitable for use as a JSON request body."""
|
|
682
686
|
body = {}
|
|
687
|
+
if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config.as_dict()
|
|
683
688
|
if self.config_version is not None: body['config_version'] = self.config_version
|
|
684
689
|
if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities]
|
|
685
690
|
if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models]
|
|
@@ -690,7 +695,8 @@ class EndpointPendingConfig:
|
|
|
690
695
|
@classmethod
|
|
691
696
|
def from_dict(cls, d: Dict[str, any]) -> EndpointPendingConfig:
|
|
692
697
|
"""Deserializes the EndpointPendingConfig from a dictionary."""
|
|
693
|
-
return cls(
|
|
698
|
+
return cls(auto_capture_config=_from_dict(d, 'auto_capture_config', AutoCaptureConfigOutput),
|
|
699
|
+
config_version=d.get('config_version', None),
|
|
694
700
|
served_entities=_repeated_dict(d, 'served_entities', ServedEntityOutput),
|
|
695
701
|
served_models=_repeated_dict(d, 'served_models', ServedModelOutput),
|
|
696
702
|
start_time=d.get('start_time', None),
|
|
@@ -2438,8 +2444,7 @@ class ServingEndpointsAPI:
|
|
|
2438
2444
|
raise TimeoutError(f'timed out after {timeout}: {status_message}')
|
|
2439
2445
|
|
|
2440
2446
|
def build_logs(self, name: str, served_model_name: str) -> BuildLogsResponse:
|
|
2441
|
-
"""
|
|
2442
|
-
served model.
|
|
2447
|
+
"""Get build logs for a served model.
|
|
2443
2448
|
|
|
2444
2449
|
Retrieves the build logs associated with the provided served model.
|
|
2445
2450
|
|
|
@@ -2518,7 +2523,7 @@ class ServingEndpointsAPI:
|
|
|
2518
2523
|
self._api.do('DELETE', f'/api/2.0/serving-endpoints/{name}', headers=headers)
|
|
2519
2524
|
|
|
2520
2525
|
def export_metrics(self, name: str):
|
|
2521
|
-
"""
|
|
2526
|
+
"""Get metrics of a serving endpoint.
|
|
2522
2527
|
|
|
2523
2528
|
Retrieves the metrics associated with the provided serving endpoint in either Prometheus or
|
|
2524
2529
|
OpenMetrics exposition format.
|
|
@@ -2587,7 +2592,7 @@ class ServingEndpointsAPI:
|
|
|
2587
2592
|
return ServingEndpointPermissions.from_dict(res)
|
|
2588
2593
|
|
|
2589
2594
|
def list(self) -> Iterator[ServingEndpoint]:
|
|
2590
|
-
"""
|
|
2595
|
+
"""Get all serving endpoints.
|
|
2591
2596
|
|
|
2592
2597
|
:returns: Iterator over :class:`ServingEndpoint`
|
|
2593
2598
|
"""
|
|
@@ -2599,7 +2604,7 @@ class ServingEndpointsAPI:
|
|
|
2599
2604
|
return parsed if parsed is not None else []
|
|
2600
2605
|
|
|
2601
2606
|
def logs(self, name: str, served_model_name: str) -> ServerLogsResponse:
|
|
2602
|
-
"""
|
|
2607
|
+
"""Get the latest logs for a served model.
|
|
2603
2608
|
|
|
2604
2609
|
Retrieves the service logs associated with the provided served model.
|
|
2605
2610
|
|
|
@@ -2623,7 +2628,7 @@ class ServingEndpointsAPI:
|
|
|
2623
2628
|
*,
|
|
2624
2629
|
add_tags: Optional[List[EndpointTag]] = None,
|
|
2625
2630
|
delete_tags: Optional[List[str]] = None) -> Iterator[EndpointTag]:
|
|
2626
|
-
"""
|
|
2631
|
+
"""Update tags of a serving endpoint.
|
|
2627
2632
|
|
|
2628
2633
|
Used to batch add and delete tags from a serving endpoint with a single API call.
|
|
2629
2634
|
|
|
@@ -2645,7 +2650,7 @@ class ServingEndpointsAPI:
|
|
|
2645
2650
|
return [EndpointTag.from_dict(v) for v in res]
|
|
2646
2651
|
|
|
2647
2652
|
def put(self, name: str, *, rate_limits: Optional[List[RateLimit]] = None) -> PutResponse:
|
|
2648
|
-
"""Update
|
|
2653
|
+
"""Update rate limits of a serving endpoint.
|
|
2649
2654
|
|
|
2650
2655
|
Used to update the rate limits of a serving endpoint. NOTE: only external and foundation model
|
|
2651
2656
|
endpoints are supported as of now.
|
|
@@ -2683,7 +2688,7 @@ class ServingEndpointsAPI:
|
|
|
2683
2688
|
stop: Optional[List[str]] = None,
|
|
2684
2689
|
stream: Optional[bool] = None,
|
|
2685
2690
|
temperature: Optional[float] = None) -> QueryEndpointResponse:
|
|
2686
|
-
"""Query a serving endpoint
|
|
2691
|
+
"""Query a serving endpoint.
|
|
2687
2692
|
|
|
2688
2693
|
:param name: str
|
|
2689
2694
|
The name of the serving endpoint. This field is required.
|
|
@@ -2789,7 +2794,7 @@ class ServingEndpointsAPI:
|
|
|
2789
2794
|
served_entities: Optional[List[ServedEntityInput]] = None,
|
|
2790
2795
|
served_models: Optional[List[ServedModelInput]] = None,
|
|
2791
2796
|
traffic_config: Optional[TrafficConfig] = None) -> Wait[ServingEndpointDetailed]:
|
|
2792
|
-
"""Update a serving endpoint
|
|
2797
|
+
"""Update config of a serving endpoint.
|
|
2793
2798
|
|
|
2794
2799
|
Updates any combination of the serving endpoint's served entities, the compute configuration of those
|
|
2795
2800
|
served entities, and the endpoint's traffic config. An endpoint that already has an update in progress
|
|
@@ -2800,11 +2805,11 @@ class ServingEndpointsAPI:
|
|
|
2800
2805
|
:param auto_capture_config: :class:`AutoCaptureConfigInput` (optional)
|
|
2801
2806
|
Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.
|
|
2802
2807
|
:param served_entities: List[:class:`ServedEntityInput`] (optional)
|
|
2803
|
-
A list of served entities for the endpoint to serve. A serving endpoint can have up to
|
|
2808
|
+
A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served
|
|
2804
2809
|
entities.
|
|
2805
2810
|
:param served_models: List[:class:`ServedModelInput`] (optional)
|
|
2806
2811
|
(Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A
|
|
2807
|
-
serving endpoint can have up to
|
|
2812
|
+
serving endpoint can have up to 15 served models.
|
|
2808
2813
|
:param traffic_config: :class:`TrafficConfig` (optional)
|
|
2809
2814
|
The traffic config defining how invocations to the serving endpoint should be routed.
|
|
2810
2815
|
|
|
@@ -3271,10 +3271,10 @@ class NetworkConnectivityAPI:
|
|
|
3271
3271
|
private_endpoint_rule_id: str) -> NccAzurePrivateEndpointRule:
|
|
3272
3272
|
"""Delete a private endpoint rule.
|
|
3273
3273
|
|
|
3274
|
-
Initiates deleting a private endpoint rule.
|
|
3275
|
-
|
|
3276
|
-
|
|
3277
|
-
compute resources.
|
|
3274
|
+
Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the private
|
|
3275
|
+
endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and will be deleted
|
|
3276
|
+
after seven days of deactivation. When a private endpoint is deactivated, the `deactivated` field is
|
|
3277
|
+
set to `true` and the private endpoint is not available to your serverless compute resources.
|
|
3278
3278
|
|
|
3279
3279
|
:param network_connectivity_config_id: str
|
|
3280
3280
|
Your Network Connectvity Configuration ID.
|
|
@@ -1287,7 +1287,12 @@ class SharedDataObject:
|
|
|
1287
1287
|
comment: Optional[str] = None
|
|
1288
1288
|
"""A user-provided comment when adding the data object to the share. [Update:OPT]"""
|
|
1289
1289
|
|
|
1290
|
-
|
|
1290
|
+
content: Optional[str] = None
|
|
1291
|
+
"""The content of the notebook file when the data object type is NOTEBOOK_FILE. This should be
|
|
1292
|
+
base64 encoded. Required for adding a NOTEBOOK_FILE, optional for updating, ignored for other
|
|
1293
|
+
types."""
|
|
1294
|
+
|
|
1295
|
+
data_object_type: Optional[SharedDataObjectDataObjectType] = None
|
|
1291
1296
|
"""The type of the data object."""
|
|
1292
1297
|
|
|
1293
1298
|
history_data_sharing_status: Optional[SharedDataObjectHistoryDataSharingStatus] = None
|
|
@@ -1326,7 +1331,8 @@ class SharedDataObject:
|
|
|
1326
1331
|
if self.added_by is not None: body['added_by'] = self.added_by
|
|
1327
1332
|
if self.cdf_enabled is not None: body['cdf_enabled'] = self.cdf_enabled
|
|
1328
1333
|
if self.comment is not None: body['comment'] = self.comment
|
|
1329
|
-
if self.
|
|
1334
|
+
if self.content is not None: body['content'] = self.content
|
|
1335
|
+
if self.data_object_type is not None: body['data_object_type'] = self.data_object_type.value
|
|
1330
1336
|
if self.history_data_sharing_status is not None:
|
|
1331
1337
|
body['history_data_sharing_status'] = self.history_data_sharing_status.value
|
|
1332
1338
|
if self.name is not None: body['name'] = self.name
|
|
@@ -1344,7 +1350,8 @@ class SharedDataObject:
|
|
|
1344
1350
|
added_by=d.get('added_by', None),
|
|
1345
1351
|
cdf_enabled=d.get('cdf_enabled', None),
|
|
1346
1352
|
comment=d.get('comment', None),
|
|
1347
|
-
|
|
1353
|
+
content=d.get('content', None),
|
|
1354
|
+
data_object_type=_enum(d, 'data_object_type', SharedDataObjectDataObjectType),
|
|
1348
1355
|
history_data_sharing_status=_enum(d, 'history_data_sharing_status',
|
|
1349
1356
|
SharedDataObjectHistoryDataSharingStatus),
|
|
1350
1357
|
name=d.get('name', None),
|
|
@@ -1355,6 +1362,18 @@ class SharedDataObject:
|
|
|
1355
1362
|
string_shared_as=d.get('string_shared_as', None))
|
|
1356
1363
|
|
|
1357
1364
|
|
|
1365
|
+
class SharedDataObjectDataObjectType(Enum):
|
|
1366
|
+
"""The type of the data object."""
|
|
1367
|
+
|
|
1368
|
+
MATERIALIZED_VIEW = 'MATERIALIZED_VIEW'
|
|
1369
|
+
MODEL = 'MODEL'
|
|
1370
|
+
NOTEBOOK_FILE = 'NOTEBOOK_FILE'
|
|
1371
|
+
SCHEMA = 'SCHEMA'
|
|
1372
|
+
STREAMING_TABLE = 'STREAMING_TABLE'
|
|
1373
|
+
TABLE = 'TABLE'
|
|
1374
|
+
VIEW = 'VIEW'
|
|
1375
|
+
|
|
1376
|
+
|
|
1358
1377
|
class SharedDataObjectHistoryDataSharingStatus(Enum):
|
|
1359
1378
|
"""Whether to enable or disable sharing of data history. If not specified, the default is
|
|
1360
1379
|
**DISABLED**."""
|
databricks/sdk/service/sql.py
CHANGED
|
@@ -220,7 +220,7 @@ class AlertQuery:
|
|
|
220
220
|
"""The timestamp at which this query was last updated."""
|
|
221
221
|
|
|
222
222
|
user_id: Optional[int] = None
|
|
223
|
-
"""The ID of the user who
|
|
223
|
+
"""The ID of the user who owns the query."""
|
|
224
224
|
|
|
225
225
|
def as_dict(self) -> dict:
|
|
226
226
|
"""Serializes the AlertQuery into a dictionary suitable for use as a JSON request body."""
|
|
@@ -709,7 +709,7 @@ class Dashboard:
|
|
|
709
709
|
user: Optional[User] = None
|
|
710
710
|
|
|
711
711
|
user_id: Optional[int] = None
|
|
712
|
-
"""The ID of the user
|
|
712
|
+
"""The ID of the user who owns the dashboard."""
|
|
713
713
|
|
|
714
714
|
widgets: Optional[List[Widget]] = None
|
|
715
715
|
|
|
@@ -2304,7 +2304,7 @@ class Query:
|
|
|
2304
2304
|
user: Optional[User] = None
|
|
2305
2305
|
|
|
2306
2306
|
user_id: Optional[int] = None
|
|
2307
|
-
"""The ID of the user who
|
|
2307
|
+
"""The ID of the user who owns the query."""
|
|
2308
2308
|
|
|
2309
2309
|
visualizations: Optional[List[Visualization]] = None
|
|
2310
2310
|
|
|
@@ -320,6 +320,9 @@ class DeltaSyncVectorIndexSpecResponse:
|
|
|
320
320
|
|
|
321
321
|
@dataclass
|
|
322
322
|
class DirectAccessVectorIndexSpec:
|
|
323
|
+
embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None
|
|
324
|
+
"""Contains the optional model endpoint to use during query time."""
|
|
325
|
+
|
|
323
326
|
embedding_vector_columns: Optional[List[EmbeddingVectorColumn]] = None
|
|
324
327
|
|
|
325
328
|
schema_json: Optional[str] = None
|
|
@@ -333,6 +336,8 @@ class DirectAccessVectorIndexSpec:
|
|
|
333
336
|
def as_dict(self) -> dict:
|
|
334
337
|
"""Serializes the DirectAccessVectorIndexSpec into a dictionary suitable for use as a JSON request body."""
|
|
335
338
|
body = {}
|
|
339
|
+
if self.embedding_source_columns:
|
|
340
|
+
body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns]
|
|
336
341
|
if self.embedding_vector_columns:
|
|
337
342
|
body['embedding_vector_columns'] = [v.as_dict() for v in self.embedding_vector_columns]
|
|
338
343
|
if self.schema_json is not None: body['schema_json'] = self.schema_json
|
|
@@ -341,7 +346,9 @@ class DirectAccessVectorIndexSpec:
|
|
|
341
346
|
@classmethod
|
|
342
347
|
def from_dict(cls, d: Dict[str, any]) -> DirectAccessVectorIndexSpec:
|
|
343
348
|
"""Deserializes the DirectAccessVectorIndexSpec from a dictionary."""
|
|
344
|
-
return cls(
|
|
349
|
+
return cls(embedding_source_columns=_repeated_dict(d, 'embedding_source_columns',
|
|
350
|
+
EmbeddingSourceColumn),
|
|
351
|
+
embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns',
|
|
345
352
|
EmbeddingVectorColumn),
|
|
346
353
|
schema_json=d.get('schema_json', None))
|
|
347
354
|
|
databricks/sdk/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '0.
|
|
1
|
+
__version__ = '0.23.0'
|