databricks-sdk 0.27.1__py3-none-any.whl → 0.28.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +9 -9
- databricks/sdk/azure.py +0 -27
- databricks/sdk/config.py +6 -9
- databricks/sdk/core.py +5 -0
- databricks/sdk/environments.py +34 -1
- databricks/sdk/errors/__init__.py +1 -0
- databricks/sdk/errors/mapper.py +4 -0
- databricks/sdk/errors/private_link.py +60 -0
- databricks/sdk/service/catalog.py +666 -628
- databricks/sdk/service/compute.py +72 -105
- databricks/sdk/service/jobs.py +1 -12
- databricks/sdk/service/marketplace.py +9 -31
- databricks/sdk/service/pipelines.py +118 -3
- databricks/sdk/service/serving.py +78 -10
- databricks/sdk/service/sharing.py +37 -2
- databricks/sdk/service/sql.py +0 -1
- databricks/sdk/service/vectorsearch.py +188 -1
- databricks/sdk/service/workspace.py +8 -4
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.28.0.dist-info}/METADATA +1 -1
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.28.0.dist-info}/RECORD +25 -24
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.28.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.28.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.28.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.28.0.dist-info}/top_level.txt +0 -0
|
@@ -531,17 +531,23 @@ class CreateShare:
|
|
|
531
531
|
comment: Optional[str] = None
|
|
532
532
|
"""User-provided free-form text description."""
|
|
533
533
|
|
|
534
|
+
storage_root: Optional[str] = None
|
|
535
|
+
"""Storage root URL for the share."""
|
|
536
|
+
|
|
534
537
|
def as_dict(self) -> dict:
|
|
535
538
|
"""Serializes the CreateShare into a dictionary suitable for use as a JSON request body."""
|
|
536
539
|
body = {}
|
|
537
540
|
if self.comment is not None: body['comment'] = self.comment
|
|
538
541
|
if self.name is not None: body['name'] = self.name
|
|
542
|
+
if self.storage_root is not None: body['storage_root'] = self.storage_root
|
|
539
543
|
return body
|
|
540
544
|
|
|
541
545
|
@classmethod
|
|
542
546
|
def from_dict(cls, d: Dict[str, any]) -> CreateShare:
|
|
543
547
|
"""Deserializes the CreateShare from a dictionary."""
|
|
544
|
-
return cls(comment=d.get('comment', None),
|
|
548
|
+
return cls(comment=d.get('comment', None),
|
|
549
|
+
name=d.get('name', None),
|
|
550
|
+
storage_root=d.get('storage_root', None))
|
|
545
551
|
|
|
546
552
|
|
|
547
553
|
@dataclass
|
|
@@ -790,6 +796,7 @@ class Privilege(Enum):
|
|
|
790
796
|
REFRESH = 'REFRESH'
|
|
791
797
|
SELECT = 'SELECT'
|
|
792
798
|
SET_SHARE_PERMISSION = 'SET_SHARE_PERMISSION'
|
|
799
|
+
SINGLE_USER_ACCESS = 'SINGLE_USER_ACCESS'
|
|
793
800
|
USAGE = 'USAGE'
|
|
794
801
|
USE_CATALOG = 'USE_CATALOG'
|
|
795
802
|
USE_CONNECTION = 'USE_CONNECTION'
|
|
@@ -1215,6 +1222,12 @@ class ShareInfo:
|
|
|
1215
1222
|
owner: Optional[str] = None
|
|
1216
1223
|
"""Username of current owner of share."""
|
|
1217
1224
|
|
|
1225
|
+
storage_location: Optional[str] = None
|
|
1226
|
+
"""Storage Location URL (full path) for the share."""
|
|
1227
|
+
|
|
1228
|
+
storage_root: Optional[str] = None
|
|
1229
|
+
"""Storage root URL for the share."""
|
|
1230
|
+
|
|
1218
1231
|
updated_at: Optional[int] = None
|
|
1219
1232
|
"""Time at which this share was updated, in epoch milliseconds."""
|
|
1220
1233
|
|
|
@@ -1230,6 +1243,8 @@ class ShareInfo:
|
|
|
1230
1243
|
if self.name is not None: body['name'] = self.name
|
|
1231
1244
|
if self.objects: body['objects'] = [v.as_dict() for v in self.objects]
|
|
1232
1245
|
if self.owner is not None: body['owner'] = self.owner
|
|
1246
|
+
if self.storage_location is not None: body['storage_location'] = self.storage_location
|
|
1247
|
+
if self.storage_root is not None: body['storage_root'] = self.storage_root
|
|
1233
1248
|
if self.updated_at is not None: body['updated_at'] = self.updated_at
|
|
1234
1249
|
if self.updated_by is not None: body['updated_by'] = self.updated_by
|
|
1235
1250
|
return body
|
|
@@ -1243,6 +1258,8 @@ class ShareInfo:
|
|
|
1243
1258
|
name=d.get('name', None),
|
|
1244
1259
|
objects=_repeated_dict(d, 'objects', SharedDataObject),
|
|
1245
1260
|
owner=d.get('owner', None),
|
|
1261
|
+
storage_location=d.get('storage_location', None),
|
|
1262
|
+
storage_root=d.get('storage_root', None),
|
|
1246
1263
|
updated_at=d.get('updated_at', None),
|
|
1247
1264
|
updated_by=d.get('updated_by', None))
|
|
1248
1265
|
|
|
@@ -1576,6 +1593,9 @@ class UpdateShare:
|
|
|
1576
1593
|
owner: Optional[str] = None
|
|
1577
1594
|
"""Username of current owner of share."""
|
|
1578
1595
|
|
|
1596
|
+
storage_root: Optional[str] = None
|
|
1597
|
+
"""Storage root URL for the share."""
|
|
1598
|
+
|
|
1579
1599
|
updates: Optional[List[SharedDataObjectUpdate]] = None
|
|
1580
1600
|
"""Array of shared data object updates."""
|
|
1581
1601
|
|
|
@@ -1586,6 +1606,7 @@ class UpdateShare:
|
|
|
1586
1606
|
if self.name is not None: body['name'] = self.name
|
|
1587
1607
|
if self.new_name is not None: body['new_name'] = self.new_name
|
|
1588
1608
|
if self.owner is not None: body['owner'] = self.owner
|
|
1609
|
+
if self.storage_root is not None: body['storage_root'] = self.storage_root
|
|
1589
1610
|
if self.updates: body['updates'] = [v.as_dict() for v in self.updates]
|
|
1590
1611
|
return body
|
|
1591
1612
|
|
|
@@ -1596,6 +1617,7 @@ class UpdateShare:
|
|
|
1596
1617
|
name=d.get('name', None),
|
|
1597
1618
|
new_name=d.get('new_name', None),
|
|
1598
1619
|
owner=d.get('owner', None),
|
|
1620
|
+
storage_root=d.get('storage_root', None),
|
|
1599
1621
|
updates=_repeated_dict(d, 'updates', SharedDataObjectUpdate))
|
|
1600
1622
|
|
|
1601
1623
|
|
|
@@ -2193,7 +2215,11 @@ class SharesAPI:
|
|
|
2193
2215
|
def __init__(self, api_client):
|
|
2194
2216
|
self._api = api_client
|
|
2195
2217
|
|
|
2196
|
-
def create(self,
|
|
2218
|
+
def create(self,
|
|
2219
|
+
name: str,
|
|
2220
|
+
*,
|
|
2221
|
+
comment: Optional[str] = None,
|
|
2222
|
+
storage_root: Optional[str] = None) -> ShareInfo:
|
|
2197
2223
|
"""Create a share.
|
|
2198
2224
|
|
|
2199
2225
|
Creates a new share for data objects. Data objects can be added after creation with **update**. The
|
|
@@ -2203,12 +2229,15 @@ class SharesAPI:
|
|
|
2203
2229
|
Name of the share.
|
|
2204
2230
|
:param comment: str (optional)
|
|
2205
2231
|
User-provided free-form text description.
|
|
2232
|
+
:param storage_root: str (optional)
|
|
2233
|
+
Storage root URL for the share.
|
|
2206
2234
|
|
|
2207
2235
|
:returns: :class:`ShareInfo`
|
|
2208
2236
|
"""
|
|
2209
2237
|
body = {}
|
|
2210
2238
|
if comment is not None: body['comment'] = comment
|
|
2211
2239
|
if name is not None: body['name'] = name
|
|
2240
|
+
if storage_root is not None: body['storage_root'] = storage_root
|
|
2212
2241
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
2213
2242
|
|
|
2214
2243
|
res = self._api.do('POST', '/api/2.1/unity-catalog/shares', body=body, headers=headers)
|
|
@@ -2288,6 +2317,7 @@ class SharesAPI:
|
|
|
2288
2317
|
comment: Optional[str] = None,
|
|
2289
2318
|
new_name: Optional[str] = None,
|
|
2290
2319
|
owner: Optional[str] = None,
|
|
2320
|
+
storage_root: Optional[str] = None,
|
|
2291
2321
|
updates: Optional[List[SharedDataObjectUpdate]] = None) -> ShareInfo:
|
|
2292
2322
|
"""Update a share.
|
|
2293
2323
|
|
|
@@ -2299,6 +2329,8 @@ class SharesAPI:
|
|
|
2299
2329
|
In the case that the share name is changed, **updateShare** requires that the caller is both the share
|
|
2300
2330
|
owner and a metastore admin.
|
|
2301
2331
|
|
|
2332
|
+
If there are notebook files in the share, the __storage_root__ field cannot be updated.
|
|
2333
|
+
|
|
2302
2334
|
For each table that is added through this method, the share owner must also have **SELECT** privilege
|
|
2303
2335
|
on the table. This privilege must be maintained indefinitely for recipients to be able to access the
|
|
2304
2336
|
table. Typically, you should use a group as the share owner.
|
|
@@ -2313,6 +2345,8 @@ class SharesAPI:
|
|
|
2313
2345
|
New name for the share.
|
|
2314
2346
|
:param owner: str (optional)
|
|
2315
2347
|
Username of current owner of share.
|
|
2348
|
+
:param storage_root: str (optional)
|
|
2349
|
+
Storage root URL for the share.
|
|
2316
2350
|
:param updates: List[:class:`SharedDataObjectUpdate`] (optional)
|
|
2317
2351
|
Array of shared data object updates.
|
|
2318
2352
|
|
|
@@ -2322,6 +2356,7 @@ class SharesAPI:
|
|
|
2322
2356
|
if comment is not None: body['comment'] = comment
|
|
2323
2357
|
if new_name is not None: body['new_name'] = new_name
|
|
2324
2358
|
if owner is not None: body['owner'] = owner
|
|
2359
|
+
if storage_root is not None: body['storage_root'] = storage_root
|
|
2325
2360
|
if updates is not None: body['updates'] = [v.as_dict() for v in updates]
|
|
2326
2361
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
2327
2362
|
|
databricks/sdk/service/sql.py
CHANGED
|
@@ -235,7 +235,11 @@ class DeltaSyncVectorIndexSpecRequest:
|
|
|
235
235
|
"""The columns that contain the embedding source."""
|
|
236
236
|
|
|
237
237
|
embedding_vector_columns: Optional[List[EmbeddingVectorColumn]] = None
|
|
238
|
-
"""The columns that contain the embedding vectors."""
|
|
238
|
+
"""The columns that contain the embedding vectors. The format should be array[double]."""
|
|
239
|
+
|
|
240
|
+
embedding_writeback_table: Optional[str] = None
|
|
241
|
+
"""[Optional] Automatically sync the vector index contents and computed embeddings to the specified
|
|
242
|
+
Delta table. The only supported table name is the index name with the suffix `_writeback_table`."""
|
|
239
243
|
|
|
240
244
|
pipeline_type: Optional[PipelineType] = None
|
|
241
245
|
"""Pipeline execution mode.
|
|
@@ -256,6 +260,8 @@ class DeltaSyncVectorIndexSpecRequest:
|
|
|
256
260
|
body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns]
|
|
257
261
|
if self.embedding_vector_columns:
|
|
258
262
|
body['embedding_vector_columns'] = [v.as_dict() for v in self.embedding_vector_columns]
|
|
263
|
+
if self.embedding_writeback_table is not None:
|
|
264
|
+
body['embedding_writeback_table'] = self.embedding_writeback_table
|
|
259
265
|
if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type.value
|
|
260
266
|
if self.source_table is not None: body['source_table'] = self.source_table
|
|
261
267
|
return body
|
|
@@ -267,6 +273,7 @@ class DeltaSyncVectorIndexSpecRequest:
|
|
|
267
273
|
EmbeddingSourceColumn),
|
|
268
274
|
embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns',
|
|
269
275
|
EmbeddingVectorColumn),
|
|
276
|
+
embedding_writeback_table=d.get('embedding_writeback_table', None),
|
|
270
277
|
pipeline_type=_enum(d, 'pipeline_type', PipelineType),
|
|
271
278
|
source_table=d.get('source_table', None))
|
|
272
279
|
|
|
@@ -279,6 +286,9 @@ class DeltaSyncVectorIndexSpecResponse:
|
|
|
279
286
|
embedding_vector_columns: Optional[List[EmbeddingVectorColumn]] = None
|
|
280
287
|
"""The columns that contain the embedding vectors."""
|
|
281
288
|
|
|
289
|
+
embedding_writeback_table: Optional[str] = None
|
|
290
|
+
"""[Optional] Name of the Delta table to sync the vector index contents and computed embeddings to."""
|
|
291
|
+
|
|
282
292
|
pipeline_id: Optional[str] = None
|
|
283
293
|
"""The ID of the pipeline that is used to sync the index."""
|
|
284
294
|
|
|
@@ -301,6 +311,8 @@ class DeltaSyncVectorIndexSpecResponse:
|
|
|
301
311
|
body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns]
|
|
302
312
|
if self.embedding_vector_columns:
|
|
303
313
|
body['embedding_vector_columns'] = [v.as_dict() for v in self.embedding_vector_columns]
|
|
314
|
+
if self.embedding_writeback_table is not None:
|
|
315
|
+
body['embedding_writeback_table'] = self.embedding_writeback_table
|
|
304
316
|
if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
|
|
305
317
|
if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type.value
|
|
306
318
|
if self.source_table is not None: body['source_table'] = self.source_table
|
|
@@ -313,6 +325,7 @@ class DeltaSyncVectorIndexSpecResponse:
|
|
|
313
325
|
EmbeddingSourceColumn),
|
|
314
326
|
embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns',
|
|
315
327
|
EmbeddingVectorColumn),
|
|
328
|
+
embedding_writeback_table=d.get('embedding_writeback_table', None),
|
|
316
329
|
pipeline_id=d.get('pipeline_id', None),
|
|
317
330
|
pipeline_type=_enum(d, 'pipeline_type', PipelineType),
|
|
318
331
|
source_table=d.get('source_table', None))
|
|
@@ -515,6 +528,22 @@ class ListEndpointResponse:
|
|
|
515
528
|
next_page_token=d.get('next_page_token', None))
|
|
516
529
|
|
|
517
530
|
|
|
531
|
+
@dataclass
|
|
532
|
+
class ListValue:
|
|
533
|
+
values: Optional[List[Value]] = None
|
|
534
|
+
|
|
535
|
+
def as_dict(self) -> dict:
|
|
536
|
+
"""Serializes the ListValue into a dictionary suitable for use as a JSON request body."""
|
|
537
|
+
body = {}
|
|
538
|
+
if self.values: body['values'] = [v.as_dict() for v in self.values]
|
|
539
|
+
return body
|
|
540
|
+
|
|
541
|
+
@classmethod
|
|
542
|
+
def from_dict(cls, d: Dict[str, any]) -> ListValue:
|
|
543
|
+
"""Deserializes the ListValue from a dictionary."""
|
|
544
|
+
return cls(values=_repeated_dict(d, 'values', Value))
|
|
545
|
+
|
|
546
|
+
|
|
518
547
|
@dataclass
|
|
519
548
|
class ListVectorIndexesResponse:
|
|
520
549
|
next_page_token: Optional[str] = None
|
|
@@ -537,6 +566,29 @@ class ListVectorIndexesResponse:
|
|
|
537
566
|
vector_indexes=_repeated_dict(d, 'vector_indexes', MiniVectorIndex))
|
|
538
567
|
|
|
539
568
|
|
|
569
|
+
@dataclass
|
|
570
|
+
class MapStringValueEntry:
|
|
571
|
+
"""Key-value pair."""
|
|
572
|
+
|
|
573
|
+
key: Optional[str] = None
|
|
574
|
+
"""Column name."""
|
|
575
|
+
|
|
576
|
+
value: Optional[Value] = None
|
|
577
|
+
"""Column value, nullable."""
|
|
578
|
+
|
|
579
|
+
def as_dict(self) -> dict:
|
|
580
|
+
"""Serializes the MapStringValueEntry into a dictionary suitable for use as a JSON request body."""
|
|
581
|
+
body = {}
|
|
582
|
+
if self.key is not None: body['key'] = self.key
|
|
583
|
+
if self.value: body['value'] = self.value.as_dict()
|
|
584
|
+
return body
|
|
585
|
+
|
|
586
|
+
@classmethod
|
|
587
|
+
def from_dict(cls, d: Dict[str, any]) -> MapStringValueEntry:
|
|
588
|
+
"""Deserializes the MapStringValueEntry from a dictionary."""
|
|
589
|
+
return cls(key=d.get('key', None), value=_from_dict(d, 'value', Value))
|
|
590
|
+
|
|
591
|
+
|
|
540
592
|
@dataclass
|
|
541
593
|
class MiniVectorIndex:
|
|
542
594
|
creator: Optional[str] = None
|
|
@@ -712,6 +764,75 @@ class ResultManifest:
|
|
|
712
764
|
return cls(column_count=d.get('column_count', None), columns=_repeated_dict(d, 'columns', ColumnInfo))
|
|
713
765
|
|
|
714
766
|
|
|
767
|
+
@dataclass
|
|
768
|
+
class ScanVectorIndexRequest:
|
|
769
|
+
"""Request payload for scanning data from a vector index."""
|
|
770
|
+
|
|
771
|
+
index_name: Optional[str] = None
|
|
772
|
+
"""Name of the vector index to scan."""
|
|
773
|
+
|
|
774
|
+
last_primary_key: Optional[str] = None
|
|
775
|
+
"""Primary key of the last entry returned in the previous scan."""
|
|
776
|
+
|
|
777
|
+
num_results: Optional[int] = None
|
|
778
|
+
"""Number of results to return. Defaults to 10."""
|
|
779
|
+
|
|
780
|
+
def as_dict(self) -> dict:
|
|
781
|
+
"""Serializes the ScanVectorIndexRequest into a dictionary suitable for use as a JSON request body."""
|
|
782
|
+
body = {}
|
|
783
|
+
if self.index_name is not None: body['index_name'] = self.index_name
|
|
784
|
+
if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key
|
|
785
|
+
if self.num_results is not None: body['num_results'] = self.num_results
|
|
786
|
+
return body
|
|
787
|
+
|
|
788
|
+
@classmethod
|
|
789
|
+
def from_dict(cls, d: Dict[str, any]) -> ScanVectorIndexRequest:
|
|
790
|
+
"""Deserializes the ScanVectorIndexRequest from a dictionary."""
|
|
791
|
+
return cls(index_name=d.get('index_name', None),
|
|
792
|
+
last_primary_key=d.get('last_primary_key', None),
|
|
793
|
+
num_results=d.get('num_results', None))
|
|
794
|
+
|
|
795
|
+
|
|
796
|
+
@dataclass
|
|
797
|
+
class ScanVectorIndexResponse:
|
|
798
|
+
"""Response to a scan vector index request."""
|
|
799
|
+
|
|
800
|
+
data: Optional[List[Struct]] = None
|
|
801
|
+
"""List of data entries"""
|
|
802
|
+
|
|
803
|
+
last_primary_key: Optional[str] = None
|
|
804
|
+
"""Primary key of the last entry."""
|
|
805
|
+
|
|
806
|
+
def as_dict(self) -> dict:
|
|
807
|
+
"""Serializes the ScanVectorIndexResponse into a dictionary suitable for use as a JSON request body."""
|
|
808
|
+
body = {}
|
|
809
|
+
if self.data: body['data'] = [v.as_dict() for v in self.data]
|
|
810
|
+
if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key
|
|
811
|
+
return body
|
|
812
|
+
|
|
813
|
+
@classmethod
|
|
814
|
+
def from_dict(cls, d: Dict[str, any]) -> ScanVectorIndexResponse:
|
|
815
|
+
"""Deserializes the ScanVectorIndexResponse from a dictionary."""
|
|
816
|
+
return cls(data=_repeated_dict(d, 'data', Struct), last_primary_key=d.get('last_primary_key', None))
|
|
817
|
+
|
|
818
|
+
|
|
819
|
+
@dataclass
|
|
820
|
+
class Struct:
|
|
821
|
+
fields: Optional[List[MapStringValueEntry]] = None
|
|
822
|
+
"""Data entry, corresponding to a row in a vector index."""
|
|
823
|
+
|
|
824
|
+
def as_dict(self) -> dict:
|
|
825
|
+
"""Serializes the Struct into a dictionary suitable for use as a JSON request body."""
|
|
826
|
+
body = {}
|
|
827
|
+
if self.fields: body['fields'] = [v.as_dict() for v in self.fields]
|
|
828
|
+
return body
|
|
829
|
+
|
|
830
|
+
@classmethod
|
|
831
|
+
def from_dict(cls, d: Dict[str, any]) -> Struct:
|
|
832
|
+
"""Deserializes the Struct from a dictionary."""
|
|
833
|
+
return cls(fields=_repeated_dict(d, 'fields', MapStringValueEntry))
|
|
834
|
+
|
|
835
|
+
|
|
715
836
|
@dataclass
|
|
716
837
|
class SyncIndexResponse:
|
|
717
838
|
|
|
@@ -805,6 +926,42 @@ class UpsertDataVectorIndexResponse:
|
|
|
805
926
|
status=_enum(d, 'status', UpsertDataStatus))
|
|
806
927
|
|
|
807
928
|
|
|
929
|
+
@dataclass
|
|
930
|
+
class Value:
|
|
931
|
+
bool_value: Optional[bool] = None
|
|
932
|
+
|
|
933
|
+
list_value: Optional[ListValue] = None
|
|
934
|
+
|
|
935
|
+
null_value: Optional[str] = None
|
|
936
|
+
|
|
937
|
+
number_value: Optional[float] = None
|
|
938
|
+
|
|
939
|
+
string_value: Optional[str] = None
|
|
940
|
+
|
|
941
|
+
struct_value: Optional[Struct] = None
|
|
942
|
+
|
|
943
|
+
def as_dict(self) -> dict:
|
|
944
|
+
"""Serializes the Value into a dictionary suitable for use as a JSON request body."""
|
|
945
|
+
body = {}
|
|
946
|
+
if self.bool_value is not None: body['bool_value'] = self.bool_value
|
|
947
|
+
if self.list_value: body['list_value'] = self.list_value.as_dict()
|
|
948
|
+
if self.null_value is not None: body['null_value'] = self.null_value
|
|
949
|
+
if self.number_value is not None: body['number_value'] = self.number_value
|
|
950
|
+
if self.string_value is not None: body['string_value'] = self.string_value
|
|
951
|
+
if self.struct_value: body['struct_value'] = self.struct_value.as_dict()
|
|
952
|
+
return body
|
|
953
|
+
|
|
954
|
+
@classmethod
|
|
955
|
+
def from_dict(cls, d: Dict[str, any]) -> Value:
|
|
956
|
+
"""Deserializes the Value from a dictionary."""
|
|
957
|
+
return cls(bool_value=d.get('bool_value', None),
|
|
958
|
+
list_value=_from_dict(d, 'list_value', ListValue),
|
|
959
|
+
null_value=d.get('null_value', None),
|
|
960
|
+
number_value=d.get('number_value', None),
|
|
961
|
+
string_value=d.get('string_value', None),
|
|
962
|
+
struct_value=_from_dict(d, 'struct_value', Struct))
|
|
963
|
+
|
|
964
|
+
|
|
808
965
|
@dataclass
|
|
809
966
|
class VectorIndex:
|
|
810
967
|
creator: Optional[str] = None
|
|
@@ -1216,6 +1373,36 @@ class VectorSearchIndexesAPI:
|
|
|
1216
1373
|
headers=headers)
|
|
1217
1374
|
return QueryVectorIndexResponse.from_dict(res)
|
|
1218
1375
|
|
|
1376
|
+
def scan_index(self,
|
|
1377
|
+
index_name: str,
|
|
1378
|
+
*,
|
|
1379
|
+
last_primary_key: Optional[str] = None,
|
|
1380
|
+
num_results: Optional[int] = None) -> ScanVectorIndexResponse:
|
|
1381
|
+
"""Scan an index.
|
|
1382
|
+
|
|
1383
|
+
Scan the specified vector index and return the first `num_results` entries after the exclusive
|
|
1384
|
+
`primary_key`.
|
|
1385
|
+
|
|
1386
|
+
:param index_name: str
|
|
1387
|
+
Name of the vector index to scan.
|
|
1388
|
+
:param last_primary_key: str (optional)
|
|
1389
|
+
Primary key of the last entry returned in the previous scan.
|
|
1390
|
+
:param num_results: int (optional)
|
|
1391
|
+
Number of results to return. Defaults to 10.
|
|
1392
|
+
|
|
1393
|
+
:returns: :class:`ScanVectorIndexResponse`
|
|
1394
|
+
"""
|
|
1395
|
+
body = {}
|
|
1396
|
+
if last_primary_key is not None: body['last_primary_key'] = last_primary_key
|
|
1397
|
+
if num_results is not None: body['num_results'] = num_results
|
|
1398
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
1399
|
+
|
|
1400
|
+
res = self._api.do('POST',
|
|
1401
|
+
f'/api/2.0/vector-search/indexes/{index_name}/scan',
|
|
1402
|
+
body=body,
|
|
1403
|
+
headers=headers)
|
|
1404
|
+
return ScanVectorIndexResponse.from_dict(res)
|
|
1405
|
+
|
|
1219
1406
|
def sync_index(self, index_name: str):
|
|
1220
1407
|
"""Synchronize an index.
|
|
1221
1408
|
|
|
@@ -144,7 +144,8 @@ class CreateRepo:
|
|
|
144
144
|
gitLabEnterpriseEdition and awsCodeCommit."""
|
|
145
145
|
|
|
146
146
|
path: Optional[str] = None
|
|
147
|
-
"""Desired path for the repo in the workspace.
|
|
147
|
+
"""Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If
|
|
148
|
+
repo is created in /Repos, path must be in the format /Repos/{folder}/{repo-name}."""
|
|
148
149
|
|
|
149
150
|
sparse_checkout: Optional[SparseCheckout] = None
|
|
150
151
|
"""If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable
|
|
@@ -949,7 +950,8 @@ class RepoInfo:
|
|
|
949
950
|
"""ID of the repo object in the workspace."""
|
|
950
951
|
|
|
951
952
|
path: Optional[str] = None
|
|
952
|
-
"""Desired path for the repo in the workspace.
|
|
953
|
+
"""Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If
|
|
954
|
+
repo is created in /Repos, path must be in the format /Repos/{folder}/{repo-name}."""
|
|
953
955
|
|
|
954
956
|
provider: Optional[str] = None
|
|
955
957
|
"""Git provider. This field is case-insensitive. The available Git providers are gitHub,
|
|
@@ -1613,7 +1615,8 @@ class ReposAPI:
|
|
|
1613
1615
|
bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
|
|
1614
1616
|
gitLabEnterpriseEdition and awsCodeCommit.
|
|
1615
1617
|
:param path: str (optional)
|
|
1616
|
-
Desired path for the repo in the workspace.
|
|
1618
|
+
Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If repo
|
|
1619
|
+
is created in /Repos, path must be in the format /Repos/{folder}/{repo-name}.
|
|
1617
1620
|
:param sparse_checkout: :class:`SparseCheckout` (optional)
|
|
1618
1621
|
If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable
|
|
1619
1622
|
sparse checkout after the repo is created.
|
|
@@ -1706,7 +1709,8 @@ class ReposAPI:
|
|
|
1706
1709
|
Token used to get the next page of results. If not specified, returns the first page of results as
|
|
1707
1710
|
well as a next page token if there are more results.
|
|
1708
1711
|
:param path_prefix: str (optional)
|
|
1709
|
-
Filters repos that have paths starting with the given path prefix.
|
|
1712
|
+
Filters repos that have paths starting with the given path prefix. If not provided repos from /Repos
|
|
1713
|
+
will be served.
|
|
1710
1714
|
|
|
1711
1715
|
:returns: Iterator over :class:`RepoInfo`
|
|
1712
1716
|
"""
|
databricks/sdk/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '0.
|
|
1
|
+
__version__ = '0.28.0'
|
|
@@ -1,26 +1,27 @@
|
|
|
1
1
|
databricks/__init__.py,sha256=CF2MJcZFwbpn9TwQER8qnCDhkPooBGQNVkX4v7g6p3g,537
|
|
2
|
-
databricks/sdk/__init__.py,sha256=
|
|
2
|
+
databricks/sdk/__init__.py,sha256=XlfPt-_tfDGubwEhhpc8QowEU24QX4gY7dhHIO6moHk,43850
|
|
3
3
|
databricks/sdk/_property.py,sha256=sGjsipeFrjMBSVPjtIb0HNCRcMIhFpVx6wq4BkC3LWs,1636
|
|
4
|
-
databricks/sdk/azure.py,sha256=
|
|
4
|
+
databricks/sdk/azure.py,sha256=8P7nEdun0hbQCap9Ojo7yZse_JHxnhYsE6ApojnPz7Q,1009
|
|
5
5
|
databricks/sdk/casing.py,sha256=NKYPrfPbQjM7lU4hhNQK3z1jb_VEA29BfH4FEdby2tg,1137
|
|
6
6
|
databricks/sdk/clock.py,sha256=Ivlow0r_TkXcTJ8UXkxSA0czKrY0GvwHAeOvjPkJnAQ,1360
|
|
7
|
-
databricks/sdk/config.py,sha256=
|
|
8
|
-
databricks/sdk/core.py,sha256=
|
|
7
|
+
databricks/sdk/config.py,sha256=ZPoO-o3U4PtlcPdKY-eOQxAVRsUrE9omeaEVzbuUen8,18906
|
|
8
|
+
databricks/sdk/core.py,sha256=e3RlEv7CDiVd-1i0XJUIx8iJ6bY__i39OCcwoniUfx8,19048
|
|
9
9
|
databricks/sdk/credentials_provider.py,sha256=zLmXLbt6zDS-P4jRBiS9if6QQGOea2CZn3fUrmJuJLY,26255
|
|
10
10
|
databricks/sdk/dbutils.py,sha256=JUoT5hJVe_fi95g_BqX08iDzsoYfneybXRub42VC-Bw,12771
|
|
11
|
-
databricks/sdk/environments.py,sha256=
|
|
11
|
+
databricks/sdk/environments.py,sha256=5KoVuVfF-ZX17rua1sH3EJCCtniVrREXBXsMNDEV-UU,4293
|
|
12
12
|
databricks/sdk/oauth.py,sha256=jqe0yrrTUfRL8kpR21Odwn4R_X6Ns-hTLu3dKYDI1EM,18313
|
|
13
13
|
databricks/sdk/py.typed,sha256=pSvaHpbY1UPNEXyVFUjlgBhjPFZMmVC_UNrPC7eMOHI,74
|
|
14
14
|
databricks/sdk/retries.py,sha256=WgLh12bwdBc6fCQlaig3kKu18cVhPzFDGsspvq629Ew,2454
|
|
15
|
-
databricks/sdk/version.py,sha256=
|
|
15
|
+
databricks/sdk/version.py,sha256=eKEnUCzRu6EJ3qyBcomfuYJe60IGd89zgi1KXWEOG4o,23
|
|
16
16
|
databricks/sdk/_widgets/__init__.py,sha256=Qm3JB8LmdPgEn_-VgxKkodTO4gn6OdaDPwsYcDmeIRI,2667
|
|
17
17
|
databricks/sdk/_widgets/default_widgets_utils.py,sha256=Rk59AFzVYVpOektB_yC_7j-vSt5OdtZA85IlG0kw0xA,1202
|
|
18
18
|
databricks/sdk/_widgets/ipywidgets_utils.py,sha256=P-AyGeahPiX3S59mxpAMgffi4gyJ0irEOY7Ekkn9nQ0,2850
|
|
19
|
-
databricks/sdk/errors/__init__.py,sha256=
|
|
19
|
+
databricks/sdk/errors/__init__.py,sha256=3l_wHB0S9Y6mDc_c5mUHb-TndDQxa-tdPeWmTbnBNAo,176
|
|
20
20
|
databricks/sdk/errors/base.py,sha256=oawBxpuoyImsLu29ntpAgOc6RQ7kD-UcuFFER9jB3iI,3880
|
|
21
|
-
databricks/sdk/errors/mapper.py,sha256=
|
|
21
|
+
databricks/sdk/errors/mapper.py,sha256=sK4aoloV-F8h1J4YHFrcNVAUBLLQQFti-ceXVmm6HpU,1386
|
|
22
22
|
databricks/sdk/errors/overrides.py,sha256=YswUrm5-FshXtUZ7q53f9DvnjnLoDUUoZ-YTpAyy5F0,1136
|
|
23
23
|
databricks/sdk/errors/platform.py,sha256=dpD97-YcjXTqOwWg2XulFzdyb8qufN14PyU1FdukpF8,3017
|
|
24
|
+
databricks/sdk/errors/private_link.py,sha256=6wVRJQqousGQC7qfT0pV8LqujqfR3XLbSix_XjqVC8s,2304
|
|
24
25
|
databricks/sdk/errors/sdk.py,sha256=_euMruhvquB0v_SKtgqxJUiyXHWuTb4Jl7ji6_h0E_A,109
|
|
25
26
|
databricks/sdk/mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
26
27
|
databricks/sdk/mixins/compute.py,sha256=khb00BzBckc4RLUF4-GnNMCSO5lXKt_XYMM3IhiUxlA,11237
|
|
@@ -31,26 +32,26 @@ databricks/sdk/runtime/dbutils_stub.py,sha256=UFbRZF-bBcwxjbv_pxma00bjNtktLLaYpo
|
|
|
31
32
|
databricks/sdk/service/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
32
33
|
databricks/sdk/service/_internal.py,sha256=nWbJfW5eJCQgAZ3TmA26xoWb6SNZ5N76ZA8bO1N4AsU,1961
|
|
33
34
|
databricks/sdk/service/billing.py,sha256=Hbe5bMsBrpebuAl8yj-GwVRGktrzKwiZJj3gq1wUMaI,50625
|
|
34
|
-
databricks/sdk/service/catalog.py,sha256=
|
|
35
|
-
databricks/sdk/service/compute.py,sha256=
|
|
35
|
+
databricks/sdk/service/catalog.py,sha256=hGYFAIr80CmBxeASIvEnlYdkXGXcC_8d_4H0yW-SXGE,410757
|
|
36
|
+
databricks/sdk/service/compute.py,sha256=YdaGAH1YfNsiKzEu6ASglDvoj9YJCbPSKvJ-xQzQ6rA,397674
|
|
36
37
|
databricks/sdk/service/dashboards.py,sha256=PwhX73El3POXdblc7ZOm2PAkhf5TcSZ5Na73_ne2Zb4,18801
|
|
37
38
|
databricks/sdk/service/files.py,sha256=VCt83YSI9rhQexmxaQdrUXHq2UCYfZcDMLvJx5X6n1M,38162
|
|
38
39
|
databricks/sdk/service/iam.py,sha256=11L45bjOYwzxMVlAXpKrFMOxrZzgZy75JSIOkeAXuFg,147645
|
|
39
|
-
databricks/sdk/service/jobs.py,sha256=
|
|
40
|
-
databricks/sdk/service/marketplace.py,sha256
|
|
40
|
+
databricks/sdk/service/jobs.py,sha256=vpOT9WCDOkTO2uA_DqU-3s2wdplaZ4gvOs-Hicp2y38,304830
|
|
41
|
+
databricks/sdk/service/marketplace.py,sha256=-TE_cZ05oOw-immazvAbJZrzG7bjVuFP3D68ny9FHj0,135895
|
|
41
42
|
databricks/sdk/service/ml.py,sha256=vohBdESClI3EOpO-ZZ44W-CMz1alq5Tw4oJnWa99Z2M,236128
|
|
42
43
|
databricks/sdk/service/oauth2.py,sha256=zpEA7glY_EsPvMgkk-hmt4eVgrmtcSGgduI7XlShNUo,36215
|
|
43
|
-
databricks/sdk/service/pipelines.py,sha256=
|
|
44
|
+
databricks/sdk/service/pipelines.py,sha256=yY_C6MN3yNtp7Kemp6RkJPRgAu-HQf-ywrVontT0mb4,118755
|
|
44
45
|
databricks/sdk/service/provisioning.py,sha256=DP4Df4X-p0JEUk4zAJQhjX_wxpMi673OKLXFhxl6YSE,142678
|
|
45
|
-
databricks/sdk/service/serving.py,sha256=
|
|
46
|
+
databricks/sdk/service/serving.py,sha256=0Kuy_W4UOQ-7tRkxgQiqJ_1L2ttwhIJdL_6LEd2kO1o,146399
|
|
46
47
|
databricks/sdk/service/settings.py,sha256=bhbYqlLj4gpy_GhCifa_0sLvoDBRNTJzU9H5TerFU4E,177359
|
|
47
|
-
databricks/sdk/service/sharing.py,sha256=
|
|
48
|
-
databricks/sdk/service/sql.py,sha256=
|
|
49
|
-
databricks/sdk/service/vectorsearch.py,sha256=
|
|
50
|
-
databricks/sdk/service/workspace.py,sha256=
|
|
51
|
-
databricks_sdk-0.
|
|
52
|
-
databricks_sdk-0.
|
|
53
|
-
databricks_sdk-0.
|
|
54
|
-
databricks_sdk-0.
|
|
55
|
-
databricks_sdk-0.
|
|
56
|
-
databricks_sdk-0.
|
|
48
|
+
databricks/sdk/service/sharing.py,sha256=v6MMjz7n-gfqwcALKSWspRGa7LahNGTwb1oArjigBZA,100181
|
|
49
|
+
databricks/sdk/service/sql.py,sha256=BurJRqsYl4kACpPp2StXEBDL1jjI_-BzydGhOOgwhwU,257882
|
|
50
|
+
databricks/sdk/service/vectorsearch.py,sha256=Z3fW8ZRB6Lwob_U-TIzstXyKSTLJGfVtfXWG6rUxDh4,59038
|
|
51
|
+
databricks/sdk/service/workspace.py,sha256=FKLf5esRmfFstIXo7HQg6HQCzQ2svrb6ulr8yzZ7-8U,101182
|
|
52
|
+
databricks_sdk-0.28.0.dist-info/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
|
|
53
|
+
databricks_sdk-0.28.0.dist-info/METADATA,sha256=n3jNER2VjZIXRlcclyLqxqJCTtsbXb8gYD8jXdP0yT4,35766
|
|
54
|
+
databricks_sdk-0.28.0.dist-info/NOTICE,sha256=Qnc0m8JjZNTDV80y0h1aJGvsr4GqM63m1nr2VTypg6E,963
|
|
55
|
+
databricks_sdk-0.28.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
|
56
|
+
databricks_sdk-0.28.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
|
|
57
|
+
databricks_sdk-0.28.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|