databricks-sdk 0.43.0__py3-none-any.whl → 0.44.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +250 -243
- databricks/sdk/service/apps.py +6 -0
- databricks/sdk/service/billing.py +31 -1
- databricks/sdk/service/catalog.py +8 -0
- databricks/sdk/service/cleanrooms.py +1 -71
- databricks/sdk/service/compute.py +50 -42
- databricks/sdk/service/dashboards.py +28 -0
- databricks/sdk/service/serving.py +1 -1
- databricks/sdk/service/sharing.py +15 -6
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.43.0.dist-info → databricks_sdk-0.44.0.dist-info}/METADATA +1 -1
- {databricks_sdk-0.43.0.dist-info → databricks_sdk-0.44.0.dist-info}/RECORD +16 -16
- {databricks_sdk-0.43.0.dist-info → databricks_sdk-0.44.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.43.0.dist-info → databricks_sdk-0.44.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.43.0.dist-info → databricks_sdk-0.44.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.43.0.dist-info → databricks_sdk-0.44.0.dist-info}/top_level.txt +0 -0
databricks/sdk/service/apps.py
CHANGED
|
@@ -45,6 +45,9 @@ class App:
|
|
|
45
45
|
description: Optional[str] = None
|
|
46
46
|
"""The description of the app."""
|
|
47
47
|
|
|
48
|
+
id: Optional[str] = None
|
|
49
|
+
"""The unique identifier of the app."""
|
|
50
|
+
|
|
48
51
|
pending_deployment: Optional[AppDeployment] = None
|
|
49
52
|
"""The pending deployment of the app. A deployment is considered pending when it is being prepared
|
|
50
53
|
for deployment to the app compute."""
|
|
@@ -78,6 +81,7 @@ class App:
|
|
|
78
81
|
if self.default_source_code_path is not None:
|
|
79
82
|
body['default_source_code_path'] = self.default_source_code_path
|
|
80
83
|
if self.description is not None: body['description'] = self.description
|
|
84
|
+
if self.id is not None: body['id'] = self.id
|
|
81
85
|
if self.name is not None: body['name'] = self.name
|
|
82
86
|
if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict()
|
|
83
87
|
if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
|
|
@@ -102,6 +106,7 @@ class App:
|
|
|
102
106
|
if self.default_source_code_path is not None:
|
|
103
107
|
body['default_source_code_path'] = self.default_source_code_path
|
|
104
108
|
if self.description is not None: body['description'] = self.description
|
|
109
|
+
if self.id is not None: body['id'] = self.id
|
|
105
110
|
if self.name is not None: body['name'] = self.name
|
|
106
111
|
if self.pending_deployment: body['pending_deployment'] = self.pending_deployment
|
|
107
112
|
if self.resources: body['resources'] = self.resources
|
|
@@ -125,6 +130,7 @@ class App:
|
|
|
125
130
|
creator=d.get('creator', None),
|
|
126
131
|
default_source_code_path=d.get('default_source_code_path', None),
|
|
127
132
|
description=d.get('description', None),
|
|
133
|
+
id=d.get('id', None),
|
|
128
134
|
name=d.get('name', None),
|
|
129
135
|
pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment),
|
|
130
136
|
resources=_repeated_dict(d, 'resources', AppResource),
|
|
@@ -894,6 +894,27 @@ class GetBudgetConfigurationResponse:
|
|
|
894
894
|
return cls(budget=_from_dict(d, 'budget', BudgetConfiguration))
|
|
895
895
|
|
|
896
896
|
|
|
897
|
+
@dataclass
|
|
898
|
+
class LimitConfig:
|
|
899
|
+
"""The limit configuration of the policy. Limit configuration provide a budget policy level cost
|
|
900
|
+
control by enforcing the limit."""
|
|
901
|
+
|
|
902
|
+
def as_dict(self) -> dict:
|
|
903
|
+
"""Serializes the LimitConfig into a dictionary suitable for use as a JSON request body."""
|
|
904
|
+
body = {}
|
|
905
|
+
return body
|
|
906
|
+
|
|
907
|
+
def as_shallow_dict(self) -> dict:
|
|
908
|
+
"""Serializes the LimitConfig into a shallow dictionary of its immediate attributes."""
|
|
909
|
+
body = {}
|
|
910
|
+
return body
|
|
911
|
+
|
|
912
|
+
@classmethod
|
|
913
|
+
def from_dict(cls, d: Dict[str, any]) -> LimitConfig:
|
|
914
|
+
"""Deserializes the LimitConfig from a dictionary."""
|
|
915
|
+
return cls()
|
|
916
|
+
|
|
917
|
+
|
|
897
918
|
@dataclass
|
|
898
919
|
class ListBudgetConfigurationsResponse:
|
|
899
920
|
budgets: Optional[List[BudgetConfiguration]] = None
|
|
@@ -1641,23 +1662,32 @@ class BudgetPolicyAPI:
|
|
|
1641
1662
|
return
|
|
1642
1663
|
query['page_token'] = json['next_page_token']
|
|
1643
1664
|
|
|
1644
|
-
def update(self,
|
|
1665
|
+
def update(self,
|
|
1666
|
+
policy_id: str,
|
|
1667
|
+
*,
|
|
1668
|
+
limit_config: Optional[LimitConfig] = None,
|
|
1669
|
+
policy: Optional[BudgetPolicy] = None) -> BudgetPolicy:
|
|
1645
1670
|
"""Update a budget policy.
|
|
1646
1671
|
|
|
1647
1672
|
Updates a policy
|
|
1648
1673
|
|
|
1649
1674
|
:param policy_id: str
|
|
1650
1675
|
The Id of the policy. This field is generated by Databricks and globally unique.
|
|
1676
|
+
:param limit_config: :class:`LimitConfig` (optional)
|
|
1677
|
+
DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy
|
|
1651
1678
|
:param policy: :class:`BudgetPolicy` (optional)
|
|
1652
1679
|
Contains the BudgetPolicy details.
|
|
1653
1680
|
|
|
1654
1681
|
:returns: :class:`BudgetPolicy`
|
|
1655
1682
|
"""
|
|
1656
1683
|
body = policy.as_dict()
|
|
1684
|
+
query = {}
|
|
1685
|
+
if limit_config is not None: query['limit_config'] = limit_config.as_dict()
|
|
1657
1686
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
1658
1687
|
|
|
1659
1688
|
res = self._api.do('PATCH',
|
|
1660
1689
|
f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}',
|
|
1690
|
+
query=query,
|
|
1661
1691
|
body=body,
|
|
1662
1692
|
headers=headers)
|
|
1663
1693
|
return BudgetPolicy.from_dict(res)
|
|
@@ -8983,6 +8983,7 @@ class CatalogsAPI:
|
|
|
8983
8983
|
if page_token is not None: query['page_token'] = page_token
|
|
8984
8984
|
headers = {'Accept': 'application/json', }
|
|
8985
8985
|
|
|
8986
|
+
if "max_results" not in query: query['max_results'] = 0
|
|
8986
8987
|
while True:
|
|
8987
8988
|
json = self._api.do('GET', '/api/2.1/unity-catalog/catalogs', query=query, headers=headers)
|
|
8988
8989
|
if 'catalogs' in json:
|
|
@@ -9151,6 +9152,7 @@ class ConnectionsAPI:
|
|
|
9151
9152
|
if page_token is not None: query['page_token'] = page_token
|
|
9152
9153
|
headers = {'Accept': 'application/json', }
|
|
9153
9154
|
|
|
9155
|
+
if "max_results" not in query: query['max_results'] = 0
|
|
9154
9156
|
while True:
|
|
9155
9157
|
json = self._api.do('GET', '/api/2.1/unity-catalog/connections', query=query, headers=headers)
|
|
9156
9158
|
if 'connections' in json:
|
|
@@ -9656,6 +9658,7 @@ class ExternalLocationsAPI:
|
|
|
9656
9658
|
if page_token is not None: query['page_token'] = page_token
|
|
9657
9659
|
headers = {'Accept': 'application/json', }
|
|
9658
9660
|
|
|
9661
|
+
if "max_results" not in query: query['max_results'] = 0
|
|
9659
9662
|
while True:
|
|
9660
9663
|
json = self._api.do('GET',
|
|
9661
9664
|
'/api/2.1/unity-catalog/external-locations',
|
|
@@ -11389,6 +11392,7 @@ class SchemasAPI:
|
|
|
11389
11392
|
if page_token is not None: query['page_token'] = page_token
|
|
11390
11393
|
headers = {'Accept': 'application/json', }
|
|
11391
11394
|
|
|
11395
|
+
if "max_results" not in query: query['max_results'] = 0
|
|
11392
11396
|
while True:
|
|
11393
11397
|
json = self._api.do('GET', '/api/2.1/unity-catalog/schemas', query=query, headers=headers)
|
|
11394
11398
|
if 'schemas' in json:
|
|
@@ -11578,6 +11582,7 @@ class StorageCredentialsAPI:
|
|
|
11578
11582
|
if page_token is not None: query['page_token'] = page_token
|
|
11579
11583
|
headers = {'Accept': 'application/json', }
|
|
11580
11584
|
|
|
11585
|
+
if "max_results" not in query: query['max_results'] = 0
|
|
11581
11586
|
while True:
|
|
11582
11587
|
json = self._api.do('GET',
|
|
11583
11588
|
'/api/2.1/unity-catalog/storage-credentials',
|
|
@@ -11802,6 +11807,7 @@ class SystemSchemasAPI:
|
|
|
11802
11807
|
if page_token is not None: query['page_token'] = page_token
|
|
11803
11808
|
headers = {'Accept': 'application/json', }
|
|
11804
11809
|
|
|
11810
|
+
if "max_results" not in query: query['max_results'] = 0
|
|
11805
11811
|
while True:
|
|
11806
11812
|
json = self._api.do('GET',
|
|
11807
11813
|
f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas',
|
|
@@ -12044,6 +12050,7 @@ class TablesAPI:
|
|
|
12044
12050
|
if schema_name is not None: query['schema_name'] = schema_name
|
|
12045
12051
|
headers = {'Accept': 'application/json', }
|
|
12046
12052
|
|
|
12053
|
+
if "max_results" not in query: query['max_results'] = 0
|
|
12047
12054
|
while True:
|
|
12048
12055
|
json = self._api.do('GET', '/api/2.1/unity-catalog/tables', query=query, headers=headers)
|
|
12049
12056
|
if 'tables' in json:
|
|
@@ -12104,6 +12111,7 @@ class TablesAPI:
|
|
|
12104
12111
|
if table_name_pattern is not None: query['table_name_pattern'] = table_name_pattern
|
|
12105
12112
|
headers = {'Accept': 'application/json', }
|
|
12106
12113
|
|
|
12114
|
+
if "max_results" not in query: query['max_results'] = 0
|
|
12107
12115
|
while True:
|
|
12108
12116
|
json = self._api.do('GET', '/api/2.1/unity-catalog/table-summaries', query=query, headers=headers)
|
|
12109
12117
|
if 'tables' in json:
|
|
@@ -289,24 +289,11 @@ class CleanRoomAssetNotebook:
|
|
|
289
289
|
"""Base 64 representation of the notebook contents. This is the same format as returned by
|
|
290
290
|
:method:workspace/export with the format of **HTML**."""
|
|
291
291
|
|
|
292
|
-
review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None
|
|
293
|
-
"""top-level status derived from all reviews"""
|
|
294
|
-
|
|
295
|
-
reviews: Optional[List[CleanRoomNotebookReview]] = None
|
|
296
|
-
"""All existing approvals or rejections"""
|
|
297
|
-
|
|
298
|
-
runner_collaborators: Optional[List[CleanRoomCollaborator]] = None
|
|
299
|
-
"""collaborators that can run the notebook"""
|
|
300
|
-
|
|
301
292
|
def as_dict(self) -> dict:
|
|
302
293
|
"""Serializes the CleanRoomAssetNotebook into a dictionary suitable for use as a JSON request body."""
|
|
303
294
|
body = {}
|
|
304
295
|
if self.etag is not None: body['etag'] = self.etag
|
|
305
296
|
if self.notebook_content is not None: body['notebook_content'] = self.notebook_content
|
|
306
|
-
if self.review_state is not None: body['review_state'] = self.review_state.value
|
|
307
|
-
if self.reviews: body['reviews'] = [v.as_dict() for v in self.reviews]
|
|
308
|
-
if self.runner_collaborators:
|
|
309
|
-
body['runner_collaborators'] = [v.as_dict() for v in self.runner_collaborators]
|
|
310
297
|
return body
|
|
311
298
|
|
|
312
299
|
def as_shallow_dict(self) -> dict:
|
|
@@ -314,19 +301,12 @@ class CleanRoomAssetNotebook:
|
|
|
314
301
|
body = {}
|
|
315
302
|
if self.etag is not None: body['etag'] = self.etag
|
|
316
303
|
if self.notebook_content is not None: body['notebook_content'] = self.notebook_content
|
|
317
|
-
if self.review_state is not None: body['review_state'] = self.review_state
|
|
318
|
-
if self.reviews: body['reviews'] = self.reviews
|
|
319
|
-
if self.runner_collaborators: body['runner_collaborators'] = self.runner_collaborators
|
|
320
304
|
return body
|
|
321
305
|
|
|
322
306
|
@classmethod
|
|
323
307
|
def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetNotebook:
|
|
324
308
|
"""Deserializes the CleanRoomAssetNotebook from a dictionary."""
|
|
325
|
-
return cls(etag=d.get('etag', None),
|
|
326
|
-
notebook_content=d.get('notebook_content', None),
|
|
327
|
-
review_state=_enum(d, 'review_state', CleanRoomNotebookReviewNotebookReviewState),
|
|
328
|
-
reviews=_repeated_dict(d, 'reviews', CleanRoomNotebookReview),
|
|
329
|
-
runner_collaborators=_repeated_dict(d, 'runner_collaborators', CleanRoomCollaborator))
|
|
309
|
+
return cls(etag=d.get('etag', None), notebook_content=d.get('notebook_content', None))
|
|
330
310
|
|
|
331
311
|
|
|
332
312
|
class CleanRoomAssetStatusEnum(Enum):
|
|
@@ -531,56 +511,6 @@ class CleanRoomCollaborator:
|
|
|
531
511
|
organization_name=d.get('organization_name', None))
|
|
532
512
|
|
|
533
513
|
|
|
534
|
-
@dataclass
|
|
535
|
-
class CleanRoomNotebookReview:
|
|
536
|
-
comment: Optional[str] = None
|
|
537
|
-
"""review comment"""
|
|
538
|
-
|
|
539
|
-
created_at_millis: Optional[int] = None
|
|
540
|
-
"""timestamp of when the review was submitted"""
|
|
541
|
-
|
|
542
|
-
review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None
|
|
543
|
-
"""review outcome"""
|
|
544
|
-
|
|
545
|
-
reviewer_collaborator_alias: Optional[str] = None
|
|
546
|
-
"""collaborator alias of the reviewer"""
|
|
547
|
-
|
|
548
|
-
def as_dict(self) -> dict:
|
|
549
|
-
"""Serializes the CleanRoomNotebookReview into a dictionary suitable for use as a JSON request body."""
|
|
550
|
-
body = {}
|
|
551
|
-
if self.comment is not None: body['comment'] = self.comment
|
|
552
|
-
if self.created_at_millis is not None: body['created_at_millis'] = self.created_at_millis
|
|
553
|
-
if self.review_state is not None: body['review_state'] = self.review_state.value
|
|
554
|
-
if self.reviewer_collaborator_alias is not None:
|
|
555
|
-
body['reviewer_collaborator_alias'] = self.reviewer_collaborator_alias
|
|
556
|
-
return body
|
|
557
|
-
|
|
558
|
-
def as_shallow_dict(self) -> dict:
|
|
559
|
-
"""Serializes the CleanRoomNotebookReview into a shallow dictionary of its immediate attributes."""
|
|
560
|
-
body = {}
|
|
561
|
-
if self.comment is not None: body['comment'] = self.comment
|
|
562
|
-
if self.created_at_millis is not None: body['created_at_millis'] = self.created_at_millis
|
|
563
|
-
if self.review_state is not None: body['review_state'] = self.review_state
|
|
564
|
-
if self.reviewer_collaborator_alias is not None:
|
|
565
|
-
body['reviewer_collaborator_alias'] = self.reviewer_collaborator_alias
|
|
566
|
-
return body
|
|
567
|
-
|
|
568
|
-
@classmethod
|
|
569
|
-
def from_dict(cls, d: Dict[str, any]) -> CleanRoomNotebookReview:
|
|
570
|
-
"""Deserializes the CleanRoomNotebookReview from a dictionary."""
|
|
571
|
-
return cls(comment=d.get('comment', None),
|
|
572
|
-
created_at_millis=d.get('created_at_millis', None),
|
|
573
|
-
review_state=_enum(d, 'review_state', CleanRoomNotebookReviewNotebookReviewState),
|
|
574
|
-
reviewer_collaborator_alias=d.get('reviewer_collaborator_alias', None))
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
class CleanRoomNotebookReviewNotebookReviewState(Enum):
|
|
578
|
-
|
|
579
|
-
APPROVED = 'APPROVED'
|
|
580
|
-
PENDING = 'PENDING'
|
|
581
|
-
REJECTED = 'REJECTED'
|
|
582
|
-
|
|
583
|
-
|
|
584
514
|
@dataclass
|
|
585
515
|
class CleanRoomNotebookTaskRun:
|
|
586
516
|
"""Stores information about a single task run."""
|
|
@@ -637,11 +637,11 @@ class ClusterAttributes:
|
|
|
637
637
|
a set of default values will be used."""
|
|
638
638
|
|
|
639
639
|
cluster_log_conf: Optional[ClusterLogConf] = None
|
|
640
|
-
"""The configuration for delivering spark logs to a long-term storage destination.
|
|
641
|
-
destinations (
|
|
642
|
-
If the conf is given, the logs will be delivered to the destination
|
|
643
|
-
destination of driver logs is `$destination/$clusterId/driver`, while the
|
|
644
|
-
executor logs is `$destination/$clusterId/executor`."""
|
|
640
|
+
"""The configuration for delivering spark logs to a long-term storage destination. Three kinds of
|
|
641
|
+
destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
|
|
642
|
+
specified for one cluster. If the conf is given, the logs will be delivered to the destination
|
|
643
|
+
every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
|
|
644
|
+
destination of executor logs is `$destination/$clusterId/executor`."""
|
|
645
645
|
|
|
646
646
|
cluster_name: Optional[str] = None
|
|
647
647
|
"""Cluster name requested by the user. This doesn't have to be unique. If not specified at
|
|
@@ -947,11 +947,11 @@ class ClusterDetails:
|
|
|
947
947
|
while each new cluster has a globally unique id."""
|
|
948
948
|
|
|
949
949
|
cluster_log_conf: Optional[ClusterLogConf] = None
|
|
950
|
-
"""The configuration for delivering spark logs to a long-term storage destination.
|
|
951
|
-
destinations (
|
|
952
|
-
If the conf is given, the logs will be delivered to the destination
|
|
953
|
-
destination of driver logs is `$destination/$clusterId/driver`, while the
|
|
954
|
-
executor logs is `$destination/$clusterId/executor`."""
|
|
950
|
+
"""The configuration for delivering spark logs to a long-term storage destination. Three kinds of
|
|
951
|
+
destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
|
|
952
|
+
specified for one cluster. If the conf is given, the logs will be delivered to the destination
|
|
953
|
+
every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
|
|
954
|
+
destination of executor logs is `$destination/$clusterId/executor`."""
|
|
955
955
|
|
|
956
956
|
cluster_log_status: Optional[LogSyncStatus] = None
|
|
957
957
|
"""Cluster log delivery status."""
|
|
@@ -1428,11 +1428,16 @@ class ClusterLogConf:
|
|
|
1428
1428
|
access s3, please make sure the cluster iam role in `instance_profile_arn` has permission to
|
|
1429
1429
|
write data to the s3 destination."""
|
|
1430
1430
|
|
|
1431
|
+
volumes: Optional[VolumesStorageInfo] = None
|
|
1432
|
+
"""destination needs to be provided. e.g. `{ "volumes" : { "destination" :
|
|
1433
|
+
"/Volumes/catalog/schema/volume/cluster_log" } }`"""
|
|
1434
|
+
|
|
1431
1435
|
def as_dict(self) -> dict:
|
|
1432
1436
|
"""Serializes the ClusterLogConf into a dictionary suitable for use as a JSON request body."""
|
|
1433
1437
|
body = {}
|
|
1434
1438
|
if self.dbfs: body['dbfs'] = self.dbfs.as_dict()
|
|
1435
1439
|
if self.s3: body['s3'] = self.s3.as_dict()
|
|
1440
|
+
if self.volumes: body['volumes'] = self.volumes.as_dict()
|
|
1436
1441
|
return body
|
|
1437
1442
|
|
|
1438
1443
|
def as_shallow_dict(self) -> dict:
|
|
@@ -1440,12 +1445,15 @@ class ClusterLogConf:
|
|
|
1440
1445
|
body = {}
|
|
1441
1446
|
if self.dbfs: body['dbfs'] = self.dbfs
|
|
1442
1447
|
if self.s3: body['s3'] = self.s3
|
|
1448
|
+
if self.volumes: body['volumes'] = self.volumes
|
|
1443
1449
|
return body
|
|
1444
1450
|
|
|
1445
1451
|
@classmethod
|
|
1446
1452
|
def from_dict(cls, d: Dict[str, any]) -> ClusterLogConf:
|
|
1447
1453
|
"""Deserializes the ClusterLogConf from a dictionary."""
|
|
1448
|
-
return cls(dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo),
|
|
1454
|
+
return cls(dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo),
|
|
1455
|
+
s3=_from_dict(d, 's3', S3StorageInfo),
|
|
1456
|
+
volumes=_from_dict(d, 'volumes', VolumesStorageInfo))
|
|
1449
1457
|
|
|
1450
1458
|
|
|
1451
1459
|
@dataclass
|
|
@@ -1918,11 +1926,11 @@ class ClusterSpec:
|
|
|
1918
1926
|
a set of default values will be used."""
|
|
1919
1927
|
|
|
1920
1928
|
cluster_log_conf: Optional[ClusterLogConf] = None
|
|
1921
|
-
"""The configuration for delivering spark logs to a long-term storage destination.
|
|
1922
|
-
destinations (
|
|
1923
|
-
If the conf is given, the logs will be delivered to the destination
|
|
1924
|
-
destination of driver logs is `$destination/$clusterId/driver`, while the
|
|
1925
|
-
executor logs is `$destination/$clusterId/executor`."""
|
|
1929
|
+
"""The configuration for delivering spark logs to a long-term storage destination. Three kinds of
|
|
1930
|
+
destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
|
|
1931
|
+
specified for one cluster. If the conf is given, the logs will be delivered to the destination
|
|
1932
|
+
every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
|
|
1933
|
+
destination of executor logs is `$destination/$clusterId/executor`."""
|
|
1926
1934
|
|
|
1927
1935
|
cluster_name: Optional[str] = None
|
|
1928
1936
|
"""Cluster name requested by the user. This doesn't have to be unique. If not specified at
|
|
@@ -2334,11 +2342,11 @@ class CreateCluster:
|
|
|
2334
2342
|
cluster."""
|
|
2335
2343
|
|
|
2336
2344
|
cluster_log_conf: Optional[ClusterLogConf] = None
|
|
2337
|
-
"""The configuration for delivering spark logs to a long-term storage destination.
|
|
2338
|
-
destinations (
|
|
2339
|
-
If the conf is given, the logs will be delivered to the destination
|
|
2340
|
-
destination of driver logs is `$destination/$clusterId/driver`, while the
|
|
2341
|
-
executor logs is `$destination/$clusterId/executor`."""
|
|
2345
|
+
"""The configuration for delivering spark logs to a long-term storage destination. Three kinds of
|
|
2346
|
+
destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
|
|
2347
|
+
specified for one cluster. If the conf is given, the logs will be delivered to the destination
|
|
2348
|
+
every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
|
|
2349
|
+
destination of executor logs is `$destination/$clusterId/executor`."""
|
|
2342
2350
|
|
|
2343
2351
|
cluster_name: Optional[str] = None
|
|
2344
2352
|
"""Cluster name requested by the user. This doesn't have to be unique. If not specified at
|
|
@@ -3469,11 +3477,11 @@ class EditCluster:
|
|
|
3469
3477
|
a set of default values will be used."""
|
|
3470
3478
|
|
|
3471
3479
|
cluster_log_conf: Optional[ClusterLogConf] = None
|
|
3472
|
-
"""The configuration for delivering spark logs to a long-term storage destination.
|
|
3473
|
-
destinations (
|
|
3474
|
-
If the conf is given, the logs will be delivered to the destination
|
|
3475
|
-
destination of driver logs is `$destination/$clusterId/driver`, while the
|
|
3476
|
-
executor logs is `$destination/$clusterId/executor`."""
|
|
3480
|
+
"""The configuration for delivering spark logs to a long-term storage destination. Three kinds of
|
|
3481
|
+
destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
|
|
3482
|
+
specified for one cluster. If the conf is given, the logs will be delivered to the destination
|
|
3483
|
+
every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
|
|
3484
|
+
destination of executor logs is `$destination/$clusterId/executor`."""
|
|
3477
3485
|
|
|
3478
3486
|
cluster_name: Optional[str] = None
|
|
3479
3487
|
"""Cluster name requested by the user. This doesn't have to be unique. If not specified at
|
|
@@ -7773,11 +7781,11 @@ class UpdateClusterResource:
|
|
|
7773
7781
|
a set of default values will be used."""
|
|
7774
7782
|
|
|
7775
7783
|
cluster_log_conf: Optional[ClusterLogConf] = None
|
|
7776
|
-
"""The configuration for delivering spark logs to a long-term storage destination.
|
|
7777
|
-
destinations (
|
|
7778
|
-
If the conf is given, the logs will be delivered to the destination
|
|
7779
|
-
destination of driver logs is `$destination/$clusterId/driver`, while the
|
|
7780
|
-
executor logs is `$destination/$clusterId/executor`."""
|
|
7784
|
+
"""The configuration for delivering spark logs to a long-term storage destination. Three kinds of
|
|
7785
|
+
destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
|
|
7786
|
+
specified for one cluster. If the conf is given, the logs will be delivered to the destination
|
|
7787
|
+
every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
|
|
7788
|
+
destination of executor logs is `$destination/$clusterId/executor`."""
|
|
7781
7789
|
|
|
7782
7790
|
cluster_name: Optional[str] = None
|
|
7783
7791
|
"""Cluster name requested by the user. This doesn't have to be unique. If not specified at
|
|
@@ -8077,7 +8085,7 @@ class UpdateResponse:
|
|
|
8077
8085
|
@dataclass
|
|
8078
8086
|
class VolumesStorageInfo:
|
|
8079
8087
|
destination: str
|
|
8080
|
-
"""Unity Catalog
|
|
8088
|
+
"""Unity Catalog volumes file destination, e.g. `/Volumes/catalog/schema/volume/dir/file`"""
|
|
8081
8089
|
|
|
8082
8090
|
def as_dict(self) -> dict:
|
|
8083
8091
|
"""Serializes the VolumesStorageInfo into a dictionary suitable for use as a JSON request body."""
|
|
@@ -8619,11 +8627,11 @@ class ClustersAPI:
|
|
|
8619
8627
|
:param clone_from: :class:`CloneCluster` (optional)
|
|
8620
8628
|
When specified, this clones libraries from a source cluster during the creation of a new cluster.
|
|
8621
8629
|
:param cluster_log_conf: :class:`ClusterLogConf` (optional)
|
|
8622
|
-
The configuration for delivering spark logs to a long-term storage destination.
|
|
8623
|
-
destinations (
|
|
8624
|
-
the conf is given, the logs will be delivered to the destination every
|
|
8625
|
-
driver logs is `$destination/$clusterId/driver`, while the destination
|
|
8626
|
-
`$destination/$clusterId/executor`.
|
|
8630
|
+
The configuration for delivering spark logs to a long-term storage destination. Three kinds of
|
|
8631
|
+
destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
|
|
8632
|
+
specified for one cluster. If the conf is given, the logs will be delivered to the destination every
|
|
8633
|
+
`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination
|
|
8634
|
+
of executor logs is `$destination/$clusterId/executor`.
|
|
8627
8635
|
:param cluster_name: str (optional)
|
|
8628
8636
|
Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
|
|
8629
8637
|
the cluster name will be an empty string.
|
|
@@ -8952,11 +8960,11 @@ class ClustersAPI:
|
|
|
8952
8960
|
Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a
|
|
8953
8961
|
set of default values will be used.
|
|
8954
8962
|
:param cluster_log_conf: :class:`ClusterLogConf` (optional)
|
|
8955
|
-
The configuration for delivering spark logs to a long-term storage destination.
|
|
8956
|
-
destinations (
|
|
8957
|
-
the conf is given, the logs will be delivered to the destination every
|
|
8958
|
-
driver logs is `$destination/$clusterId/driver`, while the destination
|
|
8959
|
-
`$destination/$clusterId/executor`.
|
|
8963
|
+
The configuration for delivering spark logs to a long-term storage destination. Three kinds of
|
|
8964
|
+
destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
|
|
8965
|
+
specified for one cluster. If the conf is given, the logs will be delivered to the destination every
|
|
8966
|
+
`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination
|
|
8967
|
+
of executor logs is `$destination/$clusterId/executor`.
|
|
8960
8968
|
:param cluster_name: str (optional)
|
|
8961
8969
|
Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
|
|
8962
8970
|
the cluster name will be an empty string.
|
|
@@ -827,6 +827,7 @@ class MessageErrorType(Enum):
|
|
|
827
827
|
REPLY_PROCESS_TIMEOUT_EXCEPTION = 'REPLY_PROCESS_TIMEOUT_EXCEPTION'
|
|
828
828
|
RETRYABLE_PROCESSING_EXCEPTION = 'RETRYABLE_PROCESSING_EXCEPTION'
|
|
829
829
|
SQL_EXECUTION_EXCEPTION = 'SQL_EXECUTION_EXCEPTION'
|
|
830
|
+
STOP_PROCESS_DUE_TO_AUTO_REGENERATE = 'STOP_PROCESS_DUE_TO_AUTO_REGENERATE'
|
|
830
831
|
TABLES_MISSING_EXCEPTION = 'TABLES_MISSING_EXCEPTION'
|
|
831
832
|
TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION = 'TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION'
|
|
832
833
|
TOO_MANY_TABLES_EXCEPTION = 'TOO_MANY_TABLES_EXCEPTION'
|
|
@@ -1743,6 +1744,33 @@ class GenieAPI:
|
|
|
1743
1744
|
headers=headers)
|
|
1744
1745
|
return GenieGetMessageQueryResultResponse.from_dict(res)
|
|
1745
1746
|
|
|
1747
|
+
def get_message_query_result_by_attachment(self, space_id: str, conversation_id: str, message_id: str,
|
|
1748
|
+
attachment_id: str) -> GenieGetMessageQueryResultResponse:
|
|
1749
|
+
"""Get conversation message SQL query result by attachment id.
|
|
1750
|
+
|
|
1751
|
+
Get the result of SQL query by attachment id This is only available if a message has a query
|
|
1752
|
+
attachment and the message status is `EXECUTING_QUERY`.
|
|
1753
|
+
|
|
1754
|
+
:param space_id: str
|
|
1755
|
+
Genie space ID
|
|
1756
|
+
:param conversation_id: str
|
|
1757
|
+
Conversation ID
|
|
1758
|
+
:param message_id: str
|
|
1759
|
+
Message ID
|
|
1760
|
+
:param attachment_id: str
|
|
1761
|
+
Attachment ID
|
|
1762
|
+
|
|
1763
|
+
:returns: :class:`GenieGetMessageQueryResultResponse`
|
|
1764
|
+
"""
|
|
1765
|
+
|
|
1766
|
+
headers = {'Accept': 'application/json', }
|
|
1767
|
+
|
|
1768
|
+
res = self._api.do(
|
|
1769
|
+
'GET',
|
|
1770
|
+
f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result/{attachment_id}',
|
|
1771
|
+
headers=headers)
|
|
1772
|
+
return GenieGetMessageQueryResultResponse.from_dict(res)
|
|
1773
|
+
|
|
1746
1774
|
def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]:
|
|
1747
1775
|
"""Start conversation.
|
|
1748
1776
|
|
|
@@ -1247,7 +1247,7 @@ class ExternalModel:
|
|
|
1247
1247
|
provider: ExternalModelProvider
|
|
1248
1248
|
"""The name of the provider for the external model. Currently, the supported providers are
|
|
1249
1249
|
'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving',
|
|
1250
|
-
'google-cloud-vertex-ai', 'openai', and '
|
|
1250
|
+
'google-cloud-vertex-ai', 'openai', 'palm', and 'custom'."""
|
|
1251
1251
|
|
|
1252
1252
|
name: str
|
|
1253
1253
|
"""The name of the external model."""
|
|
@@ -1658,6 +1658,7 @@ class ProvidersAPI:
|
|
|
1658
1658
|
if page_token is not None: query['page_token'] = page_token
|
|
1659
1659
|
headers = {'Accept': 'application/json', }
|
|
1660
1660
|
|
|
1661
|
+
if "max_results" not in query: query['max_results'] = 0
|
|
1661
1662
|
while True:
|
|
1662
1663
|
json = self._api.do('GET', '/api/2.1/unity-catalog/providers', query=query, headers=headers)
|
|
1663
1664
|
if 'providers' in json:
|
|
@@ -1699,12 +1700,18 @@ class ProvidersAPI:
|
|
|
1699
1700
|
if page_token is not None: query['page_token'] = page_token
|
|
1700
1701
|
headers = {'Accept': 'application/json', }
|
|
1701
1702
|
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
|
|
1703
|
+
if "max_results" not in query: query['max_results'] = 0
|
|
1704
|
+
while True:
|
|
1705
|
+
json = self._api.do('GET',
|
|
1706
|
+
f'/api/2.1/unity-catalog/providers/{name}/shares',
|
|
1707
|
+
query=query,
|
|
1708
|
+
headers=headers)
|
|
1709
|
+
if 'shares' in json:
|
|
1710
|
+
for v in json['shares']:
|
|
1711
|
+
yield ProviderShare.from_dict(v)
|
|
1712
|
+
if 'next_page_token' not in json or not json['next_page_token']:
|
|
1713
|
+
return
|
|
1714
|
+
query['page_token'] = json['next_page_token']
|
|
1708
1715
|
|
|
1709
1716
|
def update(self,
|
|
1710
1717
|
name: str,
|
|
@@ -1937,6 +1944,7 @@ class RecipientsAPI:
|
|
|
1937
1944
|
if page_token is not None: query['page_token'] = page_token
|
|
1938
1945
|
headers = {'Accept': 'application/json', }
|
|
1939
1946
|
|
|
1947
|
+
if "max_results" not in query: query['max_results'] = 0
|
|
1940
1948
|
while True:
|
|
1941
1949
|
json = self._api.do('GET', '/api/2.1/unity-catalog/recipients', query=query, headers=headers)
|
|
1942
1950
|
if 'recipients' in json:
|
|
@@ -2157,6 +2165,7 @@ class SharesAPI:
|
|
|
2157
2165
|
if page_token is not None: query['page_token'] = page_token
|
|
2158
2166
|
headers = {'Accept': 'application/json', }
|
|
2159
2167
|
|
|
2168
|
+
if "max_results" not in query: query['max_results'] = 0
|
|
2160
2169
|
while True:
|
|
2161
2170
|
json = self._api.do('GET', '/api/2.1/unity-catalog/shares', query=query, headers=headers)
|
|
2162
2171
|
if 'shares' in json:
|
databricks/sdk/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '0.
|
|
1
|
+
__version__ = '0.44.0'
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
databricks/__init__.py,sha256=CF2MJcZFwbpn9TwQER8qnCDhkPooBGQNVkX4v7g6p3g,537
|
|
2
|
-
databricks/sdk/__init__.py,sha256=
|
|
2
|
+
databricks/sdk/__init__.py,sha256=PEAdNX4VkzmzgAqhh89lIy42WmW-JM2oAnj69XThCUM,56239
|
|
3
3
|
databricks/sdk/_base_client.py,sha256=FwKMk4pN0AXt8S8RML2OHFYV4yxyhgd9NMjxJKiSIUs,16071
|
|
4
4
|
databricks/sdk/_property.py,sha256=sGjsipeFrjMBSVPjtIb0HNCRcMIhFpVx6wq4BkC3LWs,1636
|
|
5
5
|
databricks/sdk/azure.py,sha256=8P7nEdun0hbQCap9Ojo7yZse_JHxnhYsE6ApojnPz7Q,1009
|
|
@@ -15,7 +15,7 @@ databricks/sdk/oauth.py,sha256=ZlIzEGlKTUgGGgLfv5NQJr3Y_mWpKgTr8-hUEwwqfEE,23861
|
|
|
15
15
|
databricks/sdk/py.typed,sha256=pSvaHpbY1UPNEXyVFUjlgBhjPFZMmVC_UNrPC7eMOHI,74
|
|
16
16
|
databricks/sdk/retries.py,sha256=kdCKGIJjSkGLZYmQ0oI_hiGj7FP7MIqK9-nIr7WbykU,2574
|
|
17
17
|
databricks/sdk/useragent.py,sha256=o9cojoaVwI7C6tbIZy6jcQ8QiYuUmdL5_zATu6IZSaw,7373
|
|
18
|
-
databricks/sdk/version.py,sha256=
|
|
18
|
+
databricks/sdk/version.py,sha256=MMPeQ2-kYJhgLEqI-j8q_6GpHFGW12eIl3b2Nt7TutU,23
|
|
19
19
|
databricks/sdk/_widgets/__init__.py,sha256=Qm3JB8LmdPgEn_-VgxKkodTO4gn6OdaDPwsYcDmeIRI,2667
|
|
20
20
|
databricks/sdk/_widgets/default_widgets_utils.py,sha256=Rk59AFzVYVpOektB_yC_7j-vSt5OdtZA85IlG0kw0xA,1202
|
|
21
21
|
databricks/sdk/_widgets/ipywidgets_utils.py,sha256=P-AyGeahPiX3S59mxpAMgffi4gyJ0irEOY7Ekkn9nQ0,2850
|
|
@@ -41,12 +41,12 @@ databricks/sdk/runtime/__init__.py,sha256=9NnZkBzeZXZRQxcE1qKzAszQEzcpIgpL7lQzW3
|
|
|
41
41
|
databricks/sdk/runtime/dbutils_stub.py,sha256=UFbRZF-bBcwxjbv_pxma00bjNtktLLaYpo8oHRc4-9g,11421
|
|
42
42
|
databricks/sdk/service/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
43
43
|
databricks/sdk/service/_internal.py,sha256=nWbJfW5eJCQgAZ3TmA26xoWb6SNZ5N76ZA8bO1N4AsU,1961
|
|
44
|
-
databricks/sdk/service/apps.py,sha256=
|
|
45
|
-
databricks/sdk/service/billing.py,sha256=
|
|
46
|
-
databricks/sdk/service/catalog.py,sha256=
|
|
47
|
-
databricks/sdk/service/cleanrooms.py,sha256=
|
|
48
|
-
databricks/sdk/service/compute.py,sha256=
|
|
49
|
-
databricks/sdk/service/dashboards.py,sha256=
|
|
44
|
+
databricks/sdk/service/apps.py,sha256=TVEqapzmon-QbWwgxWtC50qYt6BLDa1hKNYz4v2BeXU,52756
|
|
45
|
+
databricks/sdk/service/billing.py,sha256=I7c6xdysk8HU9PD7JCO-foJrGr8P208wXv2A5qq8XJw,98437
|
|
46
|
+
databricks/sdk/service/catalog.py,sha256=jB3WyXFw6ox7_C6jiXhl1tWWsO-dGmdRSovR1DWpe2U,590416
|
|
47
|
+
databricks/sdk/service/cleanrooms.py,sha256=pfCxkGNI6XHgcHPNab7EqkcRHzemK8CKShCi1bl2r9M,57959
|
|
48
|
+
databricks/sdk/service/compute.py,sha256=Q5hmR9vVlbJtd-CwL_IEHj4cw6TtmNEuwxKI1TOarWY,536472
|
|
49
|
+
databricks/sdk/service/dashboards.py,sha256=KrVP9mo3cubJNom2mKdemhq3_MPU-TeEGP5cKgh7QKI,101393
|
|
50
50
|
databricks/sdk/service/files.py,sha256=KewI3yw9HsqHKTlJlAkeO0CBszvaMrdBeyxTKORK9rk,45392
|
|
51
51
|
databricks/sdk/service/iam.py,sha256=ez1G4m8AihZtip2On0o5aADTba25SWBcpYGf1SssJu0,172829
|
|
52
52
|
databricks/sdk/service/jobs.py,sha256=q0NY-SBp1Amo4M8CmxUQ-wwNIOfi9TJrqYDWyupgFZI,426794
|
|
@@ -55,15 +55,15 @@ databricks/sdk/service/ml.py,sha256=wvheyoVzDUczufsWOjrUvBkK3KKwV1ZSJ6kXWQN4y_M,
|
|
|
55
55
|
databricks/sdk/service/oauth2.py,sha256=pXU8MnsZFxmLCr3lLoLO1bCCJld7925jSjT6L4xxKKw,75768
|
|
56
56
|
databricks/sdk/service/pipelines.py,sha256=ZAtYNEaVqkMpa4uWAH1pbXH0Rx2DDxNVdKqsADcQeS8,161720
|
|
57
57
|
databricks/sdk/service/provisioning.py,sha256=QAFKTjRP6rh9gPIP17ownqhAFY2XE0HvqNfTsf3D27w,168727
|
|
58
|
-
databricks/sdk/service/serving.py,sha256=
|
|
58
|
+
databricks/sdk/service/serving.py,sha256=kH9Y8fRcwkKg1wx5m3Gih2CcOsdbAnDXgkCeSOB4QY8,196934
|
|
59
59
|
databricks/sdk/service/settings.py,sha256=Y21HbrWLwaJw60tS2oghbd28Z2W77zfCzFZ0vs4uhVs,319026
|
|
60
|
-
databricks/sdk/service/sharing.py,sha256=
|
|
60
|
+
databricks/sdk/service/sharing.py,sha256=vs69Y2FE28A4XnLxDGKL2HvV-LKgB-DV7nSnbYc_y60,105419
|
|
61
61
|
databricks/sdk/service/sql.py,sha256=GU_8ALx1r3lgu_FCmEJs8zTcWFKtJhKQ2CJcIcJgxzo,399590
|
|
62
62
|
databricks/sdk/service/vectorsearch.py,sha256=5p5pW94Bv_Q2tw4j8kFb35nAoFa9GUG5FIHTdfAHWps,77997
|
|
63
63
|
databricks/sdk/service/workspace.py,sha256=BCoi43R1L2eJI9DYq9vwCVdjbMsdLuzDebN6AZvT4kg,128751
|
|
64
|
-
databricks_sdk-0.
|
|
65
|
-
databricks_sdk-0.
|
|
66
|
-
databricks_sdk-0.
|
|
67
|
-
databricks_sdk-0.
|
|
68
|
-
databricks_sdk-0.
|
|
69
|
-
databricks_sdk-0.
|
|
64
|
+
databricks_sdk-0.44.0.dist-info/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
|
|
65
|
+
databricks_sdk-0.44.0.dist-info/METADATA,sha256=2Y3c4mHOJ-s54GaO-Up6IP8WToQdcTnC59B2v3F6Ah4,38301
|
|
66
|
+
databricks_sdk-0.44.0.dist-info/NOTICE,sha256=tkRcQYA1k68wDLcnOWbg2xJDsUOJw8G8DGBhb8dnI3w,1588
|
|
67
|
+
databricks_sdk-0.44.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
68
|
+
databricks_sdk-0.44.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
|
|
69
|
+
databricks_sdk-0.44.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|