databricks-sdk 0.23.0__py3-none-any.whl → 0.25.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -388,6 +388,23 @@ class ClientsTypes:
388
388
  return cls(jobs=d.get('jobs', None), notebooks=d.get('notebooks', None))
389
389
 
390
390
 
391
+ @dataclass
392
+ class CloneCluster:
393
+ source_cluster_id: str
394
+ """The cluster that is being cloned."""
395
+
396
+ def as_dict(self) -> dict:
397
+ """Serializes the CloneCluster into a dictionary suitable for use as a JSON request body."""
398
+ body = {}
399
+ if self.source_cluster_id is not None: body['source_cluster_id'] = self.source_cluster_id
400
+ return body
401
+
402
+ @classmethod
403
+ def from_dict(cls, d: Dict[str, any]) -> CloneCluster:
404
+ """Deserializes the CloneCluster from a dictionary."""
405
+ return cls(source_cluster_id=d.get('source_cluster_id', None))
406
+
407
+
391
408
  @dataclass
392
409
  class CloudProviderNodeInfo:
393
410
  status: Optional[List[CloudProviderNodeStatus]] = None
@@ -1420,6 +1437,10 @@ class ClusterSpec:
1420
1437
  """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation,
1421
1438
  a set of default values will be used."""
1422
1439
 
1440
+ clone_from: Optional[CloneCluster] = None
1441
+ """When specified, this clones libraries from a source cluster during the creation of a new
1442
+ cluster."""
1443
+
1423
1444
  cluster_log_conf: Optional[ClusterLogConf] = None
1424
1445
  """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
1425
1446
  destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
@@ -1554,6 +1575,7 @@ class ClusterSpec:
1554
1575
  body['autotermination_minutes'] = self.autotermination_minutes
1555
1576
  if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
1556
1577
  if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
1578
+ if self.clone_from: body['clone_from'] = self.clone_from.as_dict()
1557
1579
  if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict()
1558
1580
  if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
1559
1581
  if self.cluster_source is not None: body['cluster_source'] = self.cluster_source.value
@@ -1589,6 +1611,7 @@ class ClusterSpec:
1589
1611
  autotermination_minutes=d.get('autotermination_minutes', None),
1590
1612
  aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes),
1591
1613
  azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes),
1614
+ clone_from=_from_dict(d, 'clone_from', CloneCluster),
1592
1615
  cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf),
1593
1616
  cluster_name=d.get('cluster_name', None),
1594
1617
  cluster_source=_enum(d, 'cluster_source', ClusterSource),
@@ -1679,29 +1702,6 @@ class CommandStatusResponse:
1679
1702
  status=_enum(d, 'status', CommandStatus))
1680
1703
 
1681
1704
 
1682
- @dataclass
1683
- class ComputeSpec:
1684
- kind: Optional[ComputeSpecKind] = None
1685
- """The kind of compute described by this compute specification."""
1686
-
1687
- def as_dict(self) -> dict:
1688
- """Serializes the ComputeSpec into a dictionary suitable for use as a JSON request body."""
1689
- body = {}
1690
- if self.kind is not None: body['kind'] = self.kind.value
1691
- return body
1692
-
1693
- @classmethod
1694
- def from_dict(cls, d: Dict[str, any]) -> ComputeSpec:
1695
- """Deserializes the ComputeSpec from a dictionary."""
1696
- return cls(kind=_enum(d, 'kind', ComputeSpecKind))
1697
-
1698
-
1699
- class ComputeSpecKind(Enum):
1700
- """The kind of compute described by this compute specification."""
1701
-
1702
- SERVERLESS_PREVIEW = 'SERVERLESS_PREVIEW'
1703
-
1704
-
1705
1705
  class ContextStatus(Enum):
1706
1706
 
1707
1707
  ERROR = 'Error'
@@ -1754,6 +1754,10 @@ class CreateCluster:
1754
1754
  """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation,
1755
1755
  a set of default values will be used."""
1756
1756
 
1757
+ clone_from: Optional[CloneCluster] = None
1758
+ """When specified, this clones libraries from a source cluster during the creation of a new
1759
+ cluster."""
1760
+
1757
1761
  cluster_log_conf: Optional[ClusterLogConf] = None
1758
1762
  """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
1759
1763
  destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
@@ -1884,6 +1888,7 @@ class CreateCluster:
1884
1888
  body['autotermination_minutes'] = self.autotermination_minutes
1885
1889
  if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
1886
1890
  if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
1891
+ if self.clone_from: body['clone_from'] = self.clone_from.as_dict()
1887
1892
  if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict()
1888
1893
  if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
1889
1894
  if self.cluster_source is not None: body['cluster_source'] = self.cluster_source.value
@@ -1919,6 +1924,7 @@ class CreateCluster:
1919
1924
  autotermination_minutes=d.get('autotermination_minutes', None),
1920
1925
  aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes),
1921
1926
  azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes),
1927
+ clone_from=_from_dict(d, 'clone_from', CloneCluster),
1922
1928
  cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf),
1923
1929
  cluster_name=d.get('cluster_name', None),
1924
1930
  cluster_source=_enum(d, 'cluster_source', ClusterSource),
@@ -2592,6 +2598,10 @@ class EditCluster:
2592
2598
  """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation,
2593
2599
  a set of default values will be used."""
2594
2600
 
2601
+ clone_from: Optional[CloneCluster] = None
2602
+ """When specified, this clones libraries from a source cluster during the creation of a new
2603
+ cluster."""
2604
+
2595
2605
  cluster_log_conf: Optional[ClusterLogConf] = None
2596
2606
  """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
2597
2607
  destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
@@ -2722,6 +2732,7 @@ class EditCluster:
2722
2732
  body['autotermination_minutes'] = self.autotermination_minutes
2723
2733
  if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
2724
2734
  if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
2735
+ if self.clone_from: body['clone_from'] = self.clone_from.as_dict()
2725
2736
  if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
2726
2737
  if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict()
2727
2738
  if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
@@ -2758,6 +2769,7 @@ class EditCluster:
2758
2769
  autotermination_minutes=d.get('autotermination_minutes', None),
2759
2770
  aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes),
2760
2771
  azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes),
2772
+ clone_from=_from_dict(d, 'clone_from', CloneCluster),
2761
2773
  cluster_id=d.get('cluster_id', None),
2762
2774
  cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf),
2763
2775
  cluster_name=d.get('cluster_name', None),
@@ -2969,6 +2981,37 @@ class EditResponse:
2969
2981
  return cls()
2970
2982
 
2971
2983
 
2984
+ @dataclass
2985
+ class Environment:
2986
+ """The a environment entity used to preserve serverless environment side panel and jobs'
2987
+ environment for non-notebook task. In this minimal environment spec, only pip dependencies are
2988
+ supported. Next ID: 5"""
2989
+
2990
+ client: str
2991
+ """* User-friendly name for the client version: “client”: “1” The version is a string,
2992
+ consisting of the major client version"""
2993
+
2994
+ dependencies: Optional[List[str]] = None
2995
+ """List of pip dependencies, as supported by the version of pip in this environment. Each
2996
+ dependency is a pip requirement file line
2997
+ https://pip.pypa.io/en/stable/reference/requirements-file-format/ Allowed dependency could be
2998
+ <requirement specifier>, <archive url/path>, <local project path>(WSFS or Volumes in
2999
+ Databricks), <vcs project url> E.g. dependencies: ["foo==0.0.1", "-r
3000
+ /Workspace/test/requirements.txt"]"""
3001
+
3002
+ def as_dict(self) -> dict:
3003
+ """Serializes the Environment into a dictionary suitable for use as a JSON request body."""
3004
+ body = {}
3005
+ if self.client is not None: body['client'] = self.client
3006
+ if self.dependencies: body['dependencies'] = [v for v in self.dependencies]
3007
+ return body
3008
+
3009
+ @classmethod
3010
+ def from_dict(cls, d: Dict[str, any]) -> Environment:
3011
+ """Deserializes the Environment from a dictionary."""
3012
+ return cls(client=d.get('client', None), dependencies=d.get('dependencies', None))
3013
+
3014
+
2972
3015
  @dataclass
2973
3016
  class EventDetails:
2974
3017
  attributes: Optional[ClusterAttributes] = None
@@ -6208,6 +6251,7 @@ class ClustersAPI:
6208
6251
  autotermination_minutes: Optional[int] = None,
6209
6252
  aws_attributes: Optional[AwsAttributes] = None,
6210
6253
  azure_attributes: Optional[AzureAttributes] = None,
6254
+ clone_from: Optional[CloneCluster] = None,
6211
6255
  cluster_log_conf: Optional[ClusterLogConf] = None,
6212
6256
  cluster_name: Optional[str] = None,
6213
6257
  cluster_source: Optional[ClusterSource] = None,
@@ -6256,6 +6300,8 @@ class ClustersAPI:
6256
6300
  :param azure_attributes: :class:`AzureAttributes` (optional)
6257
6301
  Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a
6258
6302
  set of default values will be used.
6303
+ :param clone_from: :class:`CloneCluster` (optional)
6304
+ When specified, this clones libraries from a source cluster during the creation of a new cluster.
6259
6305
  :param cluster_log_conf: :class:`ClusterLogConf` (optional)
6260
6306
  The configuration for delivering spark logs to a long-term storage destination. Two kinds of
6261
6307
  destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. If
@@ -6364,6 +6410,7 @@ class ClustersAPI:
6364
6410
  if autotermination_minutes is not None: body['autotermination_minutes'] = autotermination_minutes
6365
6411
  if aws_attributes is not None: body['aws_attributes'] = aws_attributes.as_dict()
6366
6412
  if azure_attributes is not None: body['azure_attributes'] = azure_attributes.as_dict()
6413
+ if clone_from is not None: body['clone_from'] = clone_from.as_dict()
6367
6414
  if cluster_log_conf is not None: body['cluster_log_conf'] = cluster_log_conf.as_dict()
6368
6415
  if cluster_name is not None: body['cluster_name'] = cluster_name
6369
6416
  if cluster_source is not None: body['cluster_source'] = cluster_source.value
@@ -6404,6 +6451,7 @@ class ClustersAPI:
6404
6451
  autotermination_minutes: Optional[int] = None,
6405
6452
  aws_attributes: Optional[AwsAttributes] = None,
6406
6453
  azure_attributes: Optional[AzureAttributes] = None,
6454
+ clone_from: Optional[CloneCluster] = None,
6407
6455
  cluster_log_conf: Optional[ClusterLogConf] = None,
6408
6456
  cluster_name: Optional[str] = None,
6409
6457
  cluster_source: Optional[ClusterSource] = None,
@@ -6432,6 +6480,7 @@ class ClustersAPI:
6432
6480
  autotermination_minutes=autotermination_minutes,
6433
6481
  aws_attributes=aws_attributes,
6434
6482
  azure_attributes=azure_attributes,
6483
+ clone_from=clone_from,
6435
6484
  cluster_log_conf=cluster_log_conf,
6436
6485
  cluster_name=cluster_name,
6437
6486
  cluster_source=cluster_source,
@@ -6491,6 +6540,7 @@ class ClustersAPI:
6491
6540
  autotermination_minutes: Optional[int] = None,
6492
6541
  aws_attributes: Optional[AwsAttributes] = None,
6493
6542
  azure_attributes: Optional[AzureAttributes] = None,
6543
+ clone_from: Optional[CloneCluster] = None,
6494
6544
  cluster_log_conf: Optional[ClusterLogConf] = None,
6495
6545
  cluster_name: Optional[str] = None,
6496
6546
  cluster_source: Optional[ClusterSource] = None,
@@ -6546,6 +6596,8 @@ class ClustersAPI:
6546
6596
  :param azure_attributes: :class:`AzureAttributes` (optional)
6547
6597
  Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a
6548
6598
  set of default values will be used.
6599
+ :param clone_from: :class:`CloneCluster` (optional)
6600
+ When specified, this clones libraries from a source cluster during the creation of a new cluster.
6549
6601
  :param cluster_log_conf: :class:`ClusterLogConf` (optional)
6550
6602
  The configuration for delivering spark logs to a long-term storage destination. Two kinds of
6551
6603
  destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. If
@@ -6654,6 +6706,7 @@ class ClustersAPI:
6654
6706
  if autotermination_minutes is not None: body['autotermination_minutes'] = autotermination_minutes
6655
6707
  if aws_attributes is not None: body['aws_attributes'] = aws_attributes.as_dict()
6656
6708
  if azure_attributes is not None: body['azure_attributes'] = azure_attributes.as_dict()
6709
+ if clone_from is not None: body['clone_from'] = clone_from.as_dict()
6657
6710
  if cluster_id is not None: body['cluster_id'] = cluster_id
6658
6711
  if cluster_log_conf is not None: body['cluster_log_conf'] = cluster_log_conf.as_dict()
6659
6712
  if cluster_name is not None: body['cluster_name'] = cluster_name
@@ -6696,6 +6749,7 @@ class ClustersAPI:
6696
6749
  autotermination_minutes: Optional[int] = None,
6697
6750
  aws_attributes: Optional[AwsAttributes] = None,
6698
6751
  azure_attributes: Optional[AzureAttributes] = None,
6752
+ clone_from: Optional[CloneCluster] = None,
6699
6753
  cluster_log_conf: Optional[ClusterLogConf] = None,
6700
6754
  cluster_name: Optional[str] = None,
6701
6755
  cluster_source: Optional[ClusterSource] = None,
@@ -6724,6 +6778,7 @@ class ClustersAPI:
6724
6778
  autotermination_minutes=autotermination_minutes,
6725
6779
  aws_attributes=aws_attributes,
6726
6780
  azure_attributes=azure_attributes,
6781
+ clone_from=clone_from,
6727
6782
  cluster_id=cluster_id,
6728
6783
  cluster_log_conf=cluster_log_conf,
6729
6784
  cluster_name=cluster_name,
@@ -117,6 +117,33 @@ class LifecycleState(Enum):
117
117
  TRASHED = 'TRASHED'
118
118
 
119
119
 
120
+ @dataclass
121
+ class MigrateDashboardRequest:
122
+ source_dashboard_id: str
123
+ """UUID of the dashboard to be migrated."""
124
+
125
+ display_name: Optional[str] = None
126
+ """Display name for the new Lakeview dashboard."""
127
+
128
+ parent_path: Optional[str] = None
129
+ """The workspace path of the folder to contain the migrated Lakeview dashboard."""
130
+
131
+ def as_dict(self) -> dict:
132
+ """Serializes the MigrateDashboardRequest into a dictionary suitable for use as a JSON request body."""
133
+ body = {}
134
+ if self.display_name is not None: body['display_name'] = self.display_name
135
+ if self.parent_path is not None: body['parent_path'] = self.parent_path
136
+ if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id
137
+ return body
138
+
139
+ @classmethod
140
+ def from_dict(cls, d: Dict[str, any]) -> MigrateDashboardRequest:
141
+ """Deserializes the MigrateDashboardRequest from a dictionary."""
142
+ return cls(display_name=d.get('display_name', None),
143
+ parent_path=d.get('parent_path', None),
144
+ source_dashboard_id=d.get('source_dashboard_id', None))
145
+
146
+
120
147
  @dataclass
121
148
  class PublishRequest:
122
149
  dashboard_id: Optional[str] = None
@@ -191,6 +218,20 @@ class TrashDashboardResponse:
191
218
  return cls()
192
219
 
193
220
 
221
+ @dataclass
222
+ class UnpublishDashboardResponse:
223
+
224
+ def as_dict(self) -> dict:
225
+ """Serializes the UnpublishDashboardResponse into a dictionary suitable for use as a JSON request body."""
226
+ body = {}
227
+ return body
228
+
229
+ @classmethod
230
+ def from_dict(cls, d: Dict[str, any]) -> UnpublishDashboardResponse:
231
+ """Deserializes the UnpublishDashboardResponse from a dictionary."""
232
+ return cls()
233
+
234
+
194
235
  @dataclass
195
236
  class UpdateDashboardRequest:
196
237
  dashboard_id: Optional[str] = None
@@ -300,6 +341,33 @@ class LakeviewAPI:
300
341
  res = self._api.do('GET', f'/api/2.0/lakeview/dashboards/{dashboard_id}/published', headers=headers)
301
342
  return PublishedDashboard.from_dict(res)
302
343
 
344
+ def migrate(self,
345
+ source_dashboard_id: str,
346
+ *,
347
+ display_name: Optional[str] = None,
348
+ parent_path: Optional[str] = None) -> Dashboard:
349
+ """Migrate dashboard.
350
+
351
+ Migrates a classic SQL dashboard to Lakeview.
352
+
353
+ :param source_dashboard_id: str
354
+ UUID of the dashboard to be migrated.
355
+ :param display_name: str (optional)
356
+ Display name for the new Lakeview dashboard.
357
+ :param parent_path: str (optional)
358
+ The workspace path of the folder to contain the migrated Lakeview dashboard.
359
+
360
+ :returns: :class:`Dashboard`
361
+ """
362
+ body = {}
363
+ if display_name is not None: body['display_name'] = display_name
364
+ if parent_path is not None: body['parent_path'] = parent_path
365
+ if source_dashboard_id is not None: body['source_dashboard_id'] = source_dashboard_id
366
+ headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
367
+
368
+ res = self._api.do('POST', '/api/2.0/lakeview/dashboards/migrate', body=body, headers=headers)
369
+ return Dashboard.from_dict(res)
370
+
303
371
  def publish(self,
304
372
  dashboard_id: str,
305
373
  *,
@@ -345,6 +413,21 @@ class LakeviewAPI:
345
413
 
346
414
  self._api.do('DELETE', f'/api/2.0/lakeview/dashboards/{dashboard_id}', headers=headers)
347
415
 
416
+ def unpublish(self, dashboard_id: str):
417
+ """Unpublish dashboard.
418
+
419
+ Unpublish the dashboard.
420
+
421
+ :param dashboard_id: str
422
+ UUID identifying the dashboard to be published.
423
+
424
+
425
+ """
426
+
427
+ headers = {'Accept': 'application/json', }
428
+
429
+ self._api.do('DELETE', f'/api/2.0/lakeview/dashboards/{dashboard_id}/published', headers=headers)
430
+
348
431
  def update(self,
349
432
  dashboard_id: str,
350
433
  *,
@@ -6,7 +6,7 @@ import logging
6
6
  from dataclasses import dataclass
7
7
  from typing import BinaryIO, Dict, Iterator, List, Optional
8
8
 
9
- from ._internal import _repeated_dict
9
+ from ._internal import _escape_multi_segment_path_parameter, _repeated_dict
10
10
 
11
11
  _LOG = logging.getLogger('databricks.sdk')
12
12
 
@@ -789,7 +789,9 @@ class FilesAPI:
789
789
 
790
790
  headers = {}
791
791
 
792
- self._api.do('PUT', f'/api/2.0/fs/directories{directory_path}', headers=headers)
792
+ self._api.do('PUT',
793
+ f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}',
794
+ headers=headers)
793
795
 
794
796
  def delete(self, file_path: str):
795
797
  """Delete a file.
@@ -804,7 +806,9 @@ class FilesAPI:
804
806
 
805
807
  headers = {}
806
808
 
807
- self._api.do('DELETE', f'/api/2.0/fs/files{file_path}', headers=headers)
809
+ self._api.do('DELETE',
810
+ f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}',
811
+ headers=headers)
808
812
 
809
813
  def delete_directory(self, directory_path: str):
810
814
  """Delete a directory.
@@ -822,7 +826,9 @@ class FilesAPI:
822
826
 
823
827
  headers = {}
824
828
 
825
- self._api.do('DELETE', f'/api/2.0/fs/directories{directory_path}', headers=headers)
829
+ self._api.do('DELETE',
830
+ f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}',
831
+ headers=headers)
826
832
 
827
833
  def download(self, file_path: str) -> DownloadResponse:
828
834
  """Download a file.
@@ -839,7 +845,7 @@ class FilesAPI:
839
845
  headers = {'Accept': 'application/octet-stream', }
840
846
  response_headers = ['content-length', 'content-type', 'last-modified', ]
841
847
  res = self._api.do('GET',
842
- f'/api/2.0/fs/files{file_path}',
848
+ f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}',
843
849
  headers=headers,
844
850
  response_headers=response_headers,
845
851
  raw=True)
@@ -864,7 +870,9 @@ class FilesAPI:
864
870
 
865
871
  headers = {}
866
872
 
867
- self._api.do('HEAD', f'/api/2.0/fs/directories{directory_path}', headers=headers)
873
+ self._api.do('HEAD',
874
+ f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}',
875
+ headers=headers)
868
876
 
869
877
  def get_metadata(self, file_path: str) -> GetMetadataResponse:
870
878
  """Get file metadata.
@@ -880,7 +888,7 @@ class FilesAPI:
880
888
  headers = {}
881
889
  response_headers = ['content-length', 'content-type', 'last-modified', ]
882
890
  res = self._api.do('HEAD',
883
- f'/api/2.0/fs/files{file_path}',
891
+ f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}',
884
892
  headers=headers,
885
893
  response_headers=response_headers)
886
894
  return GetMetadataResponse.from_dict(res)
@@ -924,10 +932,11 @@ class FilesAPI:
924
932
  headers = {'Accept': 'application/json', }
925
933
 
926
934
  while True:
927
- json = self._api.do('GET',
928
- f'/api/2.0/fs/directories{directory_path}',
929
- query=query,
930
- headers=headers)
935
+ json = self._api.do(
936
+ 'GET',
937
+ f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}',
938
+ query=query,
939
+ headers=headers)
931
940
  if 'contents' in json:
932
941
  for v in json['contents']:
933
942
  yield DirectoryEntry.from_dict(v)
@@ -956,4 +965,8 @@ class FilesAPI:
956
965
  if overwrite is not None: query['overwrite'] = overwrite
957
966
  headers = {'Content-Type': 'application/octet-stream', }
958
967
 
959
- self._api.do('PUT', f'/api/2.0/fs/files{file_path}', query=query, headers=headers, data=contents)
968
+ self._api.do('PUT',
969
+ f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}',
970
+ query=query,
971
+ headers=headers,
972
+ data=contents)
@@ -890,7 +890,7 @@ class PermissionsRequest:
890
890
  request_object_type: Optional[str] = None
891
891
  """The type of the request object. Can be one of the following: authorization, clusters,
892
892
  cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines,
893
- registered-models, repos, serving-endpoints, or sql-warehouses."""
893
+ registered-models, repos, serving-endpoints, or warehouses."""
894
894
 
895
895
  def as_dict(self) -> dict:
896
896
  """Serializes the PermissionsRequest into a dictionary suitable for use as a JSON request body."""
@@ -915,7 +915,7 @@ class PrincipalOutput:
915
915
  """The display name of the principal."""
916
916
 
917
917
  group_name: Optional[str] = None
918
- """The group name of the groupl. Present only if the principal is a group."""
918
+ """The group name of the group. Present only if the principal is a group."""
919
919
 
920
920
  principal_id: Optional[int] = None
921
921
  """The unique, opaque id of the principal."""
@@ -1135,7 +1135,9 @@ class UpdateRuleSetRequest:
1135
1135
  @dataclass
1136
1136
  class UpdateWorkspaceAssignments:
1137
1137
  permissions: List[WorkspacePermission]
1138
- """Array of permissions assignments to update on the workspace."""
1138
+ """Array of permissions assignments to update on the workspace. Note that excluding this field will
1139
+ have the same effect as providing an empty list which will result in the deletion of all
1140
+ permissions for the principal."""
1139
1141
 
1140
1142
  principal_id: Optional[int] = None
1141
1143
  """The ID of the user, service principal, or group."""
@@ -1238,20 +1240,6 @@ class UserSchema(Enum):
1238
1240
  URN_IETF_PARAMS_SCIM_SCHEMAS_EXTENSION_WORKSPACE_2_0_USER = 'urn:ietf:params:scim:schemas:extension:workspace:2.0:User'
1239
1241
 
1240
1242
 
1241
- @dataclass
1242
- class WorkspaceAssignmentsUpdated:
1243
-
1244
- def as_dict(self) -> dict:
1245
- """Serializes the WorkspaceAssignmentsUpdated into a dictionary suitable for use as a JSON request body."""
1246
- body = {}
1247
- return body
1248
-
1249
- @classmethod
1250
- def from_dict(cls, d: Dict[str, any]) -> WorkspaceAssignmentsUpdated:
1251
- """Deserializes the WorkspaceAssignmentsUpdated from a dictionary."""
1252
- return cls()
1253
-
1254
-
1255
1243
  class WorkspacePermission(Enum):
1256
1244
 
1257
1245
  ADMIN = 'ADMIN'
@@ -2590,6 +2578,9 @@ class PermissionsAPI:
2590
2578
  For the mapping of the required permissions for specific actions or abilities and other important
2591
2579
  information, see [Access Control].
2592
2580
 
2581
+ Note that to manage access control on service principals, use **[Account Access Control
2582
+ Proxy](:service:accountaccesscontrolproxy)**.
2583
+
2593
2584
  [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html"""
2594
2585
 
2595
2586
  def __init__(self, api_client):
@@ -2604,7 +2595,7 @@ class PermissionsAPI:
2604
2595
  :param request_object_type: str
2605
2596
  The type of the request object. Can be one of the following: authorization, clusters,
2606
2597
  cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines,
2607
- registered-models, repos, serving-endpoints, or sql-warehouses.
2598
+ registered-models, repos, serving-endpoints, or warehouses.
2608
2599
  :param request_object_id: str
2609
2600
  The id of the request object.
2610
2601
 
@@ -2652,7 +2643,7 @@ class PermissionsAPI:
2652
2643
  :param request_object_type: str
2653
2644
  The type of the request object. Can be one of the following: authorization, clusters,
2654
2645
  cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines,
2655
- registered-models, repos, serving-endpoints, or sql-warehouses.
2646
+ registered-models, repos, serving-endpoints, or warehouses.
2656
2647
  :param request_object_id: str
2657
2648
  The id of the request object.
2658
2649
  :param access_control_list: List[:class:`AccessControlRequest`] (optional)
@@ -2683,7 +2674,7 @@ class PermissionsAPI:
2683
2674
  :param request_object_type: str
2684
2675
  The type of the request object. Can be one of the following: authorization, clusters,
2685
2676
  cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines,
2686
- registered-models, repos, serving-endpoints, or sql-warehouses.
2677
+ registered-models, repos, serving-endpoints, or warehouses.
2687
2678
  :param request_object_id: str
2688
2679
  The id of the request object.
2689
2680
  :param access_control_list: List[:class:`AccessControlRequest`] (optional)
@@ -3375,7 +3366,8 @@ class WorkspaceAssignmentAPI:
3375
3366
  parsed = PermissionAssignments.from_dict(json).permission_assignments
3376
3367
  return parsed if parsed is not None else []
3377
3368
 
3378
- def update(self, workspace_id: int, principal_id: int, permissions: List[WorkspacePermission]):
3369
+ def update(self, workspace_id: int, principal_id: int,
3370
+ permissions: List[WorkspacePermission]) -> PermissionAssignment:
3379
3371
  """Create or update permissions assignment.
3380
3372
 
3381
3373
  Creates or updates the workspace permissions assignment in a given account and workspace for the
@@ -3386,16 +3378,19 @@ class WorkspaceAssignmentAPI:
3386
3378
  :param principal_id: int
3387
3379
  The ID of the user, service principal, or group.
3388
3380
  :param permissions: List[:class:`WorkspacePermission`]
3389
- Array of permissions assignments to update on the workspace.
3390
-
3381
+ Array of permissions assignments to update on the workspace. Note that excluding this field will
3382
+ have the same effect as providing an empty list which will result in the deletion of all permissions
3383
+ for the principal.
3391
3384
 
3385
+ :returns: :class:`PermissionAssignment`
3392
3386
  """
3393
3387
  body = {}
3394
3388
  if permissions is not None: body['permissions'] = [v.value for v in permissions]
3395
3389
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
3396
3390
 
3397
- self._api.do(
3391
+ res = self._api.do(
3398
3392
  'PUT',
3399
3393
  f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}',
3400
3394
  body=body,
3401
3395
  headers=headers)
3396
+ return PermissionAssignment.from_dict(res)