databricks-sdk 0.18.0__py3-none-any.whl → 0.19.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +30 -1
- databricks/sdk/azure.py +14 -0
- databricks/sdk/clock.py +49 -0
- databricks/sdk/config.py +7 -0
- databricks/sdk/core.py +2 -1
- databricks/sdk/credentials_provider.py +14 -3
- databricks/sdk/environments.py +1 -1
- databricks/sdk/errors/__init__.py +1 -1
- databricks/sdk/errors/mapper.py +5 -5
- databricks/sdk/mixins/workspace.py +3 -3
- databricks/sdk/retries.py +9 -5
- databricks/sdk/service/catalog.py +173 -78
- databricks/sdk/service/compute.py +86 -25
- databricks/sdk/service/files.py +136 -22
- databricks/sdk/service/iam.py +42 -36
- databricks/sdk/service/jobs.py +192 -14
- databricks/sdk/service/ml.py +27 -36
- databricks/sdk/service/oauth2.py +3 -4
- databricks/sdk/service/pipelines.py +50 -29
- databricks/sdk/service/settings.py +338 -57
- databricks/sdk/service/sharing.py +3 -4
- databricks/sdk/service/sql.py +24 -17
- databricks/sdk/service/vectorsearch.py +13 -17
- databricks/sdk/service/workspace.py +18 -7
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.18.0.dist-info → databricks_sdk-0.19.0.dist-info}/METADATA +1 -1
- databricks_sdk-0.19.0.dist-info/RECORD +53 -0
- databricks_sdk-0.18.0.dist-info/RECORD +0 -52
- /databricks/sdk/errors/{mapping.py → platform.py} +0 -0
- {databricks_sdk-0.18.0.dist-info → databricks_sdk-0.19.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.18.0.dist-info → databricks_sdk-0.19.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.18.0.dist-info → databricks_sdk-0.19.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.18.0.dist-info → databricks_sdk-0.19.0.dist-info}/top_level.txt +0 -0
|
@@ -65,15 +65,33 @@ class AddInstanceProfile:
|
|
|
65
65
|
|
|
66
66
|
|
|
67
67
|
@dataclass
|
|
68
|
-
class
|
|
69
|
-
|
|
70
|
-
"""
|
|
71
|
-
|
|
68
|
+
class Adlsgen2Info:
|
|
69
|
+
destination: str
|
|
70
|
+
"""abfss destination, e.g.
|
|
71
|
+
`abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>`."""
|
|
72
|
+
|
|
73
|
+
def as_dict(self) -> dict:
|
|
74
|
+
"""Serializes the Adlsgen2Info into a dictionary suitable for use as a JSON request body."""
|
|
75
|
+
body = {}
|
|
76
|
+
if self.destination is not None: body['destination'] = self.destination
|
|
77
|
+
return body
|
|
78
|
+
|
|
79
|
+
@classmethod
|
|
80
|
+
def from_dict(cls, d: Dict[str, any]) -> Adlsgen2Info:
|
|
81
|
+
"""Deserializes the Adlsgen2Info from a dictionary."""
|
|
82
|
+
return cls(destination=d.get('destination', None))
|
|
83
|
+
|
|
72
84
|
|
|
73
|
-
|
|
85
|
+
@dataclass
|
|
86
|
+
class AutoScale:
|
|
87
|
+
max_workers: Optional[int] = None
|
|
74
88
|
"""The maximum number of workers to which the cluster can scale up when overloaded. Note that
|
|
75
89
|
`max_workers` must be strictly greater than `min_workers`."""
|
|
76
90
|
|
|
91
|
+
min_workers: Optional[int] = None
|
|
92
|
+
"""The minimum number of workers to which the cluster can scale down when underutilized. It is also
|
|
93
|
+
the initial number of workers the cluster will have after creation."""
|
|
94
|
+
|
|
77
95
|
def as_dict(self) -> dict:
|
|
78
96
|
"""Serializes the AutoScale into a dictionary suitable for use as a JSON request body."""
|
|
79
97
|
body = {}
|
|
@@ -2213,7 +2231,7 @@ class DataSecurityMode(Enum):
|
|
|
2213
2231
|
|
|
2214
2232
|
@dataclass
|
|
2215
2233
|
class DbfsStorageInfo:
|
|
2216
|
-
destination:
|
|
2234
|
+
destination: str
|
|
2217
2235
|
"""dbfs destination, e.g. `dbfs:/my/path`"""
|
|
2218
2236
|
|
|
2219
2237
|
def as_dict(self) -> dict:
|
|
@@ -2957,6 +2975,18 @@ class GcpAttributes:
|
|
|
2957
2975
|
|
|
2958
2976
|
[GCP documentation]: https://cloud.google.com/compute/docs/disks/local-ssd#choose_number_local_ssds"""
|
|
2959
2977
|
|
|
2978
|
+
use_preemptible_executors: Optional[bool] = None
|
|
2979
|
+
"""This field determines whether the spark executors will be scheduled to run on preemptible VMs
|
|
2980
|
+
(when set to true) versus standard compute engine VMs (when set to false; default). Note: Soon
|
|
2981
|
+
to be deprecated, use the availability field instead."""
|
|
2982
|
+
|
|
2983
|
+
zone_id: Optional[str] = None
|
|
2984
|
+
"""Identifier for the availability zone in which the cluster resides. This can be one of the
|
|
2985
|
+
following: - "HA" => High availability, spread nodes across availability zones for a Databricks
|
|
2986
|
+
deployment region [default] - "AUTO" => Databricks picks an availability zone to schedule the
|
|
2987
|
+
cluster on. - A GCP availability zone => Pick One of the available zones for (machine type +
|
|
2988
|
+
region) from https://cloud.google.com/compute/docs/regions-zones."""
|
|
2989
|
+
|
|
2960
2990
|
def as_dict(self) -> dict:
|
|
2961
2991
|
"""Serializes the GcpAttributes into a dictionary suitable for use as a JSON request body."""
|
|
2962
2992
|
body = {}
|
|
@@ -2965,6 +2995,9 @@ class GcpAttributes:
|
|
|
2965
2995
|
if self.google_service_account is not None:
|
|
2966
2996
|
body['google_service_account'] = self.google_service_account
|
|
2967
2997
|
if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count
|
|
2998
|
+
if self.use_preemptible_executors is not None:
|
|
2999
|
+
body['use_preemptible_executors'] = self.use_preemptible_executors
|
|
3000
|
+
if self.zone_id is not None: body['zone_id'] = self.zone_id
|
|
2968
3001
|
return body
|
|
2969
3002
|
|
|
2970
3003
|
@classmethod
|
|
@@ -2973,7 +3006,9 @@ class GcpAttributes:
|
|
|
2973
3006
|
return cls(availability=_enum(d, 'availability', GcpAvailability),
|
|
2974
3007
|
boot_disk_size=d.get('boot_disk_size', None),
|
|
2975
3008
|
google_service_account=d.get('google_service_account', None),
|
|
2976
|
-
local_ssd_count=d.get('local_ssd_count', None)
|
|
3009
|
+
local_ssd_count=d.get('local_ssd_count', None),
|
|
3010
|
+
use_preemptible_executors=d.get('use_preemptible_executors', None),
|
|
3011
|
+
zone_id=d.get('zone_id', None))
|
|
2977
3012
|
|
|
2978
3013
|
|
|
2979
3014
|
class GcpAvailability(Enum):
|
|
@@ -2985,6 +3020,23 @@ class GcpAvailability(Enum):
|
|
|
2985
3020
|
PREEMPTIBLE_WITH_FALLBACK_GCP = 'PREEMPTIBLE_WITH_FALLBACK_GCP'
|
|
2986
3021
|
|
|
2987
3022
|
|
|
3023
|
+
@dataclass
|
|
3024
|
+
class GcsStorageInfo:
|
|
3025
|
+
destination: str
|
|
3026
|
+
"""GCS destination/URI, e.g. `gs://my-bucket/some-prefix`"""
|
|
3027
|
+
|
|
3028
|
+
def as_dict(self) -> dict:
|
|
3029
|
+
"""Serializes the GcsStorageInfo into a dictionary suitable for use as a JSON request body."""
|
|
3030
|
+
body = {}
|
|
3031
|
+
if self.destination is not None: body['destination'] = self.destination
|
|
3032
|
+
return body
|
|
3033
|
+
|
|
3034
|
+
@classmethod
|
|
3035
|
+
def from_dict(cls, d: Dict[str, any]) -> GcsStorageInfo:
|
|
3036
|
+
"""Deserializes the GcsStorageInfo from a dictionary."""
|
|
3037
|
+
return cls(destination=d.get('destination', None))
|
|
3038
|
+
|
|
3039
|
+
|
|
2988
3040
|
@dataclass
|
|
2989
3041
|
class GetClusterPermissionLevelsResponse:
|
|
2990
3042
|
permission_levels: Optional[List[ClusterPermissionsDescription]] = None
|
|
@@ -3538,6 +3590,10 @@ class InitScriptExecutionDetailsStatus(Enum):
|
|
|
3538
3590
|
|
|
3539
3591
|
@dataclass
|
|
3540
3592
|
class InitScriptInfo:
|
|
3593
|
+
abfss: Optional[Adlsgen2Info] = None
|
|
3594
|
+
"""destination needs to be provided. e.g. `{ "abfss" : { "destination" :
|
|
3595
|
+
"abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }"""
|
|
3596
|
+
|
|
3541
3597
|
dbfs: Optional[DbfsStorageInfo] = None
|
|
3542
3598
|
"""destination needs to be provided. e.g. `{ "dbfs" : { "destination" : "dbfs:/home/cluster_log" }
|
|
3543
3599
|
}`"""
|
|
@@ -3546,6 +3602,9 @@ class InitScriptInfo:
|
|
|
3546
3602
|
"""destination needs to be provided. e.g. `{ "file" : { "destination" : "file:/my/local/file.sh" }
|
|
3547
3603
|
}`"""
|
|
3548
3604
|
|
|
3605
|
+
gcs: Optional[GcsStorageInfo] = None
|
|
3606
|
+
"""destination needs to be provided. e.g. `{ "gcs": { "destination": "gs://my-bucket/file.sh" } }`"""
|
|
3607
|
+
|
|
3549
3608
|
s3: Optional[S3StorageInfo] = None
|
|
3550
3609
|
"""destination and either the region or endpoint need to be provided. e.g. `{ "s3": { "destination"
|
|
3551
3610
|
: "s3://cluster_log_bucket/prefix", "region" : "us-west-2" } }` Cluster iam role is used to
|
|
@@ -3563,8 +3622,10 @@ class InitScriptInfo:
|
|
|
3563
3622
|
def as_dict(self) -> dict:
|
|
3564
3623
|
"""Serializes the InitScriptInfo into a dictionary suitable for use as a JSON request body."""
|
|
3565
3624
|
body = {}
|
|
3625
|
+
if self.abfss: body['abfss'] = self.abfss.as_dict()
|
|
3566
3626
|
if self.dbfs: body['dbfs'] = self.dbfs.as_dict()
|
|
3567
3627
|
if self.file: body['file'] = self.file.as_dict()
|
|
3628
|
+
if self.gcs: body['gcs'] = self.gcs.as_dict()
|
|
3568
3629
|
if self.s3: body['s3'] = self.s3.as_dict()
|
|
3569
3630
|
if self.volumes: body['volumes'] = self.volumes.as_dict()
|
|
3570
3631
|
if self.workspace: body['workspace'] = self.workspace.as_dict()
|
|
@@ -3573,8 +3634,10 @@ class InitScriptInfo:
|
|
|
3573
3634
|
@classmethod
|
|
3574
3635
|
def from_dict(cls, d: Dict[str, any]) -> InitScriptInfo:
|
|
3575
3636
|
"""Deserializes the InitScriptInfo from a dictionary."""
|
|
3576
|
-
return cls(
|
|
3637
|
+
return cls(abfss=_from_dict(d, 'abfss', Adlsgen2Info),
|
|
3638
|
+
dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo),
|
|
3577
3639
|
file=_from_dict(d, 'file', LocalFileInfo),
|
|
3640
|
+
gcs=_from_dict(d, 'gcs', GcsStorageInfo),
|
|
3578
3641
|
s3=_from_dict(d, 's3', S3StorageInfo),
|
|
3579
3642
|
volumes=_from_dict(d, 'volumes', VolumesStorageInfo),
|
|
3580
3643
|
workspace=_from_dict(d, 'workspace', WorkspaceStorageInfo))
|
|
@@ -4428,7 +4491,7 @@ class ListSortOrder(Enum):
|
|
|
4428
4491
|
|
|
4429
4492
|
@dataclass
|
|
4430
4493
|
class LocalFileInfo:
|
|
4431
|
-
destination:
|
|
4494
|
+
destination: str
|
|
4432
4495
|
"""local file destination, e.g. `file:/my/local/file.sh`"""
|
|
4433
4496
|
|
|
4434
4497
|
def as_dict(self) -> dict:
|
|
@@ -5027,6 +5090,11 @@ class RuntimeEngine(Enum):
|
|
|
5027
5090
|
|
|
5028
5091
|
@dataclass
|
|
5029
5092
|
class S3StorageInfo:
|
|
5093
|
+
destination: str
|
|
5094
|
+
"""S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using cluster
|
|
5095
|
+
iam role, please make sure you set cluster iam role and the role has write access to the
|
|
5096
|
+
destination. Please also note that you cannot use AWS keys to deliver logs."""
|
|
5097
|
+
|
|
5030
5098
|
canned_acl: Optional[str] = None
|
|
5031
5099
|
"""(Optional) Set canned access control list for the logs, e.g. `bucket-owner-full-control`. If
|
|
5032
5100
|
`canned_cal` is set, please make sure the cluster iam role has `s3:PutObjectAcl` permission on
|
|
@@ -5036,11 +5104,6 @@ class S3StorageInfo:
|
|
|
5036
5104
|
for writing data, you may want to set `bucket-owner-full-control` to make bucket owner able to
|
|
5037
5105
|
read the logs."""
|
|
5038
5106
|
|
|
5039
|
-
destination: Optional[str] = None
|
|
5040
|
-
"""S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using cluster
|
|
5041
|
-
iam role, please make sure you set cluster iam role and the role has write access to the
|
|
5042
|
-
destination. Please also note that you cannot use AWS keys to deliver logs."""
|
|
5043
|
-
|
|
5044
5107
|
enable_encryption: Optional[bool] = None
|
|
5045
5108
|
"""(Optional) Flag to enable server side encryption, `false` by default."""
|
|
5046
5109
|
|
|
@@ -5371,7 +5434,7 @@ class UnpinCluster:
|
|
|
5371
5434
|
|
|
5372
5435
|
@dataclass
|
|
5373
5436
|
class VolumesStorageInfo:
|
|
5374
|
-
destination:
|
|
5437
|
+
destination: str
|
|
5375
5438
|
"""Unity Catalog Volumes file destination, e.g. `/Volumes/my-init.sh`"""
|
|
5376
5439
|
|
|
5377
5440
|
def as_dict(self) -> dict:
|
|
@@ -5388,7 +5451,7 @@ class VolumesStorageInfo:
|
|
|
5388
5451
|
|
|
5389
5452
|
@dataclass
|
|
5390
5453
|
class WorkloadType:
|
|
5391
|
-
clients:
|
|
5454
|
+
clients: ClientsTypes
|
|
5392
5455
|
"""defined what type of clients can use the cluster. E.g. Notebooks, Jobs"""
|
|
5393
5456
|
|
|
5394
5457
|
def as_dict(self) -> dict:
|
|
@@ -5405,7 +5468,7 @@ class WorkloadType:
|
|
|
5405
5468
|
|
|
5406
5469
|
@dataclass
|
|
5407
5470
|
class WorkspaceStorageInfo:
|
|
5408
|
-
destination:
|
|
5471
|
+
destination: str
|
|
5409
5472
|
"""workspace files destination, e.g. `/Users/user1@databricks.com/my-init.sh`"""
|
|
5410
5473
|
|
|
5411
5474
|
def as_dict(self) -> dict:
|
|
@@ -6405,10 +6468,9 @@ class ClustersAPI:
|
|
|
6405
6468
|
|
|
6406
6469
|
while True:
|
|
6407
6470
|
json = self._api.do('POST', '/api/2.0/clusters/events', body=body, headers=headers)
|
|
6408
|
-
if 'events'
|
|
6409
|
-
|
|
6410
|
-
|
|
6411
|
-
yield ClusterEvent.from_dict(v)
|
|
6471
|
+
if 'events' in json:
|
|
6472
|
+
for v in json['events']:
|
|
6473
|
+
yield ClusterEvent.from_dict(v)
|
|
6412
6474
|
if 'next_page' not in json or not json['next_page']:
|
|
6413
6475
|
return
|
|
6414
6476
|
body = json['next_page']
|
|
@@ -7733,10 +7795,9 @@ class PolicyFamiliesAPI:
|
|
|
7733
7795
|
|
|
7734
7796
|
while True:
|
|
7735
7797
|
json = self._api.do('GET', '/api/2.0/policy-families', query=query, headers=headers)
|
|
7736
|
-
if 'policy_families'
|
|
7737
|
-
|
|
7738
|
-
|
|
7739
|
-
yield PolicyFamily.from_dict(v)
|
|
7798
|
+
if 'policy_families' in json:
|
|
7799
|
+
for v in json['policy_families']:
|
|
7800
|
+
yield PolicyFamily.from_dict(v)
|
|
7740
7801
|
if 'next_page_token' not in json or not json['next_page_token']:
|
|
7741
7802
|
return
|
|
7742
7803
|
query['page_token'] = json['next_page_token']
|
databricks/sdk/service/files.py
CHANGED
|
@@ -112,6 +112,43 @@ class Delete:
|
|
|
112
112
|
return cls(path=d.get('path', None), recursive=d.get('recursive', None))
|
|
113
113
|
|
|
114
114
|
|
|
115
|
+
@dataclass
|
|
116
|
+
class DirectoryEntry:
|
|
117
|
+
file_size: Optional[int] = None
|
|
118
|
+
"""The length of the file in bytes. This field is omitted for directories."""
|
|
119
|
+
|
|
120
|
+
is_directory: Optional[bool] = None
|
|
121
|
+
"""True if the path is a directory."""
|
|
122
|
+
|
|
123
|
+
last_modified: Optional[int] = None
|
|
124
|
+
"""Last modification time of given file in milliseconds since unix epoch."""
|
|
125
|
+
|
|
126
|
+
name: Optional[str] = None
|
|
127
|
+
"""The name of the file or directory."""
|
|
128
|
+
|
|
129
|
+
path: Optional[str] = None
|
|
130
|
+
"""The absolute path of the file or directory."""
|
|
131
|
+
|
|
132
|
+
def as_dict(self) -> dict:
|
|
133
|
+
"""Serializes the DirectoryEntry into a dictionary suitable for use as a JSON request body."""
|
|
134
|
+
body = {}
|
|
135
|
+
if self.file_size is not None: body['file_size'] = self.file_size
|
|
136
|
+
if self.is_directory is not None: body['is_directory'] = self.is_directory
|
|
137
|
+
if self.last_modified is not None: body['last_modified'] = self.last_modified
|
|
138
|
+
if self.name is not None: body['name'] = self.name
|
|
139
|
+
if self.path is not None: body['path'] = self.path
|
|
140
|
+
return body
|
|
141
|
+
|
|
142
|
+
@classmethod
|
|
143
|
+
def from_dict(cls, d: Dict[str, any]) -> DirectoryEntry:
|
|
144
|
+
"""Deserializes the DirectoryEntry from a dictionary."""
|
|
145
|
+
return cls(file_size=d.get('file_size', None),
|
|
146
|
+
is_directory=d.get('is_directory', None),
|
|
147
|
+
last_modified=d.get('last_modified', None),
|
|
148
|
+
name=d.get('name', None),
|
|
149
|
+
path=d.get('path', None))
|
|
150
|
+
|
|
151
|
+
|
|
115
152
|
@dataclass
|
|
116
153
|
class DownloadResponse:
|
|
117
154
|
contents: Optional[BinaryIO] = None
|
|
@@ -149,6 +186,28 @@ class FileInfo:
|
|
|
149
186
|
path=d.get('path', None))
|
|
150
187
|
|
|
151
188
|
|
|
189
|
+
@dataclass
|
|
190
|
+
class ListDirectoryResponse:
|
|
191
|
+
contents: Optional[List[DirectoryEntry]] = None
|
|
192
|
+
"""Array of DirectoryEntry."""
|
|
193
|
+
|
|
194
|
+
next_page_token: Optional[str] = None
|
|
195
|
+
"""A token, which can be sent as `page_token` to retrieve the next page."""
|
|
196
|
+
|
|
197
|
+
def as_dict(self) -> dict:
|
|
198
|
+
"""Serializes the ListDirectoryResponse into a dictionary suitable for use as a JSON request body."""
|
|
199
|
+
body = {}
|
|
200
|
+
if self.contents: body['contents'] = [v.as_dict() for v in self.contents]
|
|
201
|
+
if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
|
|
202
|
+
return body
|
|
203
|
+
|
|
204
|
+
@classmethod
|
|
205
|
+
def from_dict(cls, d: Dict[str, any]) -> ListDirectoryResponse:
|
|
206
|
+
"""Deserializes the ListDirectoryResponse from a dictionary."""
|
|
207
|
+
return cls(contents=_repeated_dict(d, 'contents', DirectoryEntry),
|
|
208
|
+
next_page_token=d.get('next_page_token', None))
|
|
209
|
+
|
|
210
|
+
|
|
152
211
|
@dataclass
|
|
153
212
|
class ListStatusResponse:
|
|
154
213
|
files: Optional[List[FileInfo]] = None
|
|
@@ -505,59 +564,114 @@ class FilesAPI:
|
|
|
505
564
|
def __init__(self, api_client):
|
|
506
565
|
self._api = api_client
|
|
507
566
|
|
|
567
|
+
def create_directory(self, directory_path: str):
|
|
568
|
+
"""Create a directory.
|
|
569
|
+
|
|
570
|
+
Creates an empty directory. If called on an existing directory, the API returns a success response.
|
|
571
|
+
|
|
572
|
+
:param directory_path: str
|
|
573
|
+
The absolute path of a directory.
|
|
574
|
+
|
|
575
|
+
|
|
576
|
+
"""
|
|
577
|
+
|
|
578
|
+
headers = {}
|
|
579
|
+
self._api.do('PUT', f'/api/2.0/fs/directories{directory_path}', headers=headers)
|
|
580
|
+
|
|
508
581
|
def delete(self, file_path: str):
|
|
509
|
-
"""Delete a file
|
|
582
|
+
"""Delete a file.
|
|
510
583
|
|
|
511
|
-
Deletes a file
|
|
584
|
+
Deletes a file.
|
|
512
585
|
|
|
513
586
|
:param file_path: str
|
|
514
|
-
The absolute path of the file
|
|
587
|
+
The absolute path of the file.
|
|
515
588
|
|
|
516
589
|
|
|
517
590
|
"""
|
|
518
591
|
|
|
519
592
|
headers = {}
|
|
520
|
-
self._api.do('DELETE', f'/api/2.0/fs/files
|
|
593
|
+
self._api.do('DELETE', f'/api/2.0/fs/files{file_path}', headers=headers)
|
|
594
|
+
|
|
595
|
+
def delete_directory(self, directory_path: str):
|
|
596
|
+
"""Delete a directory.
|
|
597
|
+
|
|
598
|
+
Deletes an empty directory. If the directory is not empty, the API returns a HTTP 400 error.
|
|
599
|
+
|
|
600
|
+
:param directory_path: str
|
|
601
|
+
The absolute path of a directory.
|
|
602
|
+
|
|
603
|
+
|
|
604
|
+
"""
|
|
605
|
+
|
|
606
|
+
headers = {}
|
|
607
|
+
self._api.do('DELETE', f'/api/2.0/fs/directories{directory_path}', headers=headers)
|
|
521
608
|
|
|
522
609
|
def download(self, file_path: str) -> DownloadResponse:
|
|
523
610
|
"""Download a file.
|
|
524
611
|
|
|
525
|
-
Downloads a file of up to
|
|
612
|
+
Downloads a file of up to 5 GiB.
|
|
526
613
|
|
|
527
614
|
:param file_path: str
|
|
528
|
-
The absolute path of the file
|
|
615
|
+
The absolute path of the file.
|
|
529
616
|
|
|
530
617
|
:returns: :class:`DownloadResponse`
|
|
531
618
|
"""
|
|
532
619
|
|
|
533
620
|
headers = {'Accept': 'application/octet-stream', }
|
|
534
|
-
res = self._api.do('GET', f'/api/2.0/fs/files
|
|
621
|
+
res = self._api.do('GET', f'/api/2.0/fs/files{file_path}', headers=headers, raw=True)
|
|
535
622
|
return DownloadResponse(contents=res)
|
|
536
623
|
|
|
537
|
-
def
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
624
|
+
def list_directory_contents(self,
|
|
625
|
+
directory_path: str,
|
|
626
|
+
*,
|
|
627
|
+
page_size: Optional[int] = None,
|
|
628
|
+
page_token: Optional[str] = None) -> Iterator[DirectoryEntry]:
|
|
629
|
+
"""List directory contents.
|
|
630
|
+
|
|
631
|
+
Returns the contents of a directory. If there is no directory at the specified path, the API returns a
|
|
632
|
+
HTTP 404 error.
|
|
633
|
+
|
|
634
|
+
:param directory_path: str
|
|
635
|
+
The absolute path of a directory.
|
|
636
|
+
:param page_size: int (optional)
|
|
637
|
+
The maximum number of directory entries to return. The API may return fewer than this value.
|
|
638
|
+
Receiving fewer results does not imply there are no more results. As long as the response contains a
|
|
639
|
+
next_page_token, there may be more results.
|
|
640
|
+
|
|
641
|
+
If unspecified, at most 1000 directory entries will be returned. The maximum value is 1000. Values
|
|
642
|
+
above 1000 will be coerced to 1000.
|
|
643
|
+
:param page_token: str (optional)
|
|
644
|
+
A page token, received from a previous `list` call. Provide this to retrieve the subsequent page.
|
|
645
|
+
When paginating, all other parameters provided to `list` must match the call that provided the page
|
|
646
|
+
token.
|
|
647
|
+
|
|
648
|
+
:returns: Iterator over :class:`DirectoryEntry`
|
|
546
649
|
"""
|
|
547
650
|
|
|
548
651
|
query = {}
|
|
549
|
-
if
|
|
652
|
+
if page_size is not None: query['page_size'] = page_size
|
|
653
|
+
if page_token is not None: query['page_token'] = page_token
|
|
550
654
|
headers = {'Accept': 'application/json', }
|
|
551
|
-
|
|
552
|
-
|
|
655
|
+
|
|
656
|
+
while True:
|
|
657
|
+
json = self._api.do('GET',
|
|
658
|
+
f'/api/2.0/fs/directories{directory_path}',
|
|
659
|
+
query=query,
|
|
660
|
+
headers=headers)
|
|
661
|
+
if 'contents' in json:
|
|
662
|
+
for v in json['contents']:
|
|
663
|
+
yield DirectoryEntry.from_dict(v)
|
|
664
|
+
if 'next_page_token' not in json or not json['next_page_token']:
|
|
665
|
+
return
|
|
666
|
+
query['page_token'] = json['next_page_token']
|
|
553
667
|
|
|
554
668
|
def upload(self, file_path: str, contents: BinaryIO, *, overwrite: Optional[bool] = None):
|
|
555
669
|
"""Upload a file.
|
|
556
670
|
|
|
557
|
-
Uploads a file of up to
|
|
671
|
+
Uploads a file of up to 5 GiB.
|
|
558
672
|
|
|
559
673
|
:param file_path: str
|
|
560
|
-
The absolute path of the file
|
|
674
|
+
The absolute path of the file.
|
|
561
675
|
:param contents: BinaryIO
|
|
562
676
|
:param overwrite: bool (optional)
|
|
563
677
|
If true, an existing file will be overwritten.
|
|
@@ -568,4 +682,4 @@ class FilesAPI:
|
|
|
568
682
|
query = {}
|
|
569
683
|
if overwrite is not None: query['overwrite'] = overwrite
|
|
570
684
|
headers = {'Content-Type': 'application/octet-stream', }
|
|
571
|
-
self._api.do('PUT', f'/api/2.0/fs/files
|
|
685
|
+
self._api.do('PUT', f'/api/2.0/fs/files{file_path}', query=query, headers=headers, data=contents)
|
databricks/sdk/service/iam.py
CHANGED
|
@@ -1465,14 +1465,15 @@ class AccountGroupsAPI:
|
|
|
1465
1465
|
f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups',
|
|
1466
1466
|
query=query,
|
|
1467
1467
|
headers=headers)
|
|
1468
|
+
if 'Resources' in json:
|
|
1469
|
+
for v in json['Resources']:
|
|
1470
|
+
i = v['id']
|
|
1471
|
+
if i in seen:
|
|
1472
|
+
continue
|
|
1473
|
+
seen.add(i)
|
|
1474
|
+
yield Group.from_dict(v)
|
|
1468
1475
|
if 'Resources' not in json or not json['Resources']:
|
|
1469
1476
|
return
|
|
1470
|
-
for v in json['Resources']:
|
|
1471
|
-
i = v['id']
|
|
1472
|
-
if i in seen:
|
|
1473
|
-
continue
|
|
1474
|
-
seen.add(i)
|
|
1475
|
-
yield Group.from_dict(v)
|
|
1476
1477
|
query['startIndex'] += len(json['Resources'])
|
|
1477
1478
|
|
|
1478
1479
|
def patch(self,
|
|
@@ -1705,14 +1706,15 @@ class AccountServicePrincipalsAPI:
|
|
|
1705
1706
|
f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals',
|
|
1706
1707
|
query=query,
|
|
1707
1708
|
headers=headers)
|
|
1709
|
+
if 'Resources' in json:
|
|
1710
|
+
for v in json['Resources']:
|
|
1711
|
+
i = v['id']
|
|
1712
|
+
if i in seen:
|
|
1713
|
+
continue
|
|
1714
|
+
seen.add(i)
|
|
1715
|
+
yield ServicePrincipal.from_dict(v)
|
|
1708
1716
|
if 'Resources' not in json or not json['Resources']:
|
|
1709
1717
|
return
|
|
1710
|
-
for v in json['Resources']:
|
|
1711
|
-
i = v['id']
|
|
1712
|
-
if i in seen:
|
|
1713
|
-
continue
|
|
1714
|
-
seen.add(i)
|
|
1715
|
-
yield ServicePrincipal.from_dict(v)
|
|
1716
1718
|
query['startIndex'] += len(json['Resources'])
|
|
1717
1719
|
|
|
1718
1720
|
def patch(self,
|
|
@@ -2005,14 +2007,15 @@ class AccountUsersAPI:
|
|
|
2005
2007
|
f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users',
|
|
2006
2008
|
query=query,
|
|
2007
2009
|
headers=headers)
|
|
2010
|
+
if 'Resources' in json:
|
|
2011
|
+
for v in json['Resources']:
|
|
2012
|
+
i = v['id']
|
|
2013
|
+
if i in seen:
|
|
2014
|
+
continue
|
|
2015
|
+
seen.add(i)
|
|
2016
|
+
yield User.from_dict(v)
|
|
2008
2017
|
if 'Resources' not in json or not json['Resources']:
|
|
2009
2018
|
return
|
|
2010
|
-
for v in json['Resources']:
|
|
2011
|
-
i = v['id']
|
|
2012
|
-
if i in seen:
|
|
2013
|
-
continue
|
|
2014
|
-
seen.add(i)
|
|
2015
|
-
yield User.from_dict(v)
|
|
2016
2019
|
query['startIndex'] += len(json['Resources'])
|
|
2017
2020
|
|
|
2018
2021
|
def patch(self,
|
|
@@ -2267,14 +2270,15 @@ class GroupsAPI:
|
|
|
2267
2270
|
if "count" not in query: query['count'] = 100
|
|
2268
2271
|
while True:
|
|
2269
2272
|
json = self._api.do('GET', '/api/2.0/preview/scim/v2/Groups', query=query, headers=headers)
|
|
2273
|
+
if 'Resources' in json:
|
|
2274
|
+
for v in json['Resources']:
|
|
2275
|
+
i = v['id']
|
|
2276
|
+
if i in seen:
|
|
2277
|
+
continue
|
|
2278
|
+
seen.add(i)
|
|
2279
|
+
yield Group.from_dict(v)
|
|
2270
2280
|
if 'Resources' not in json or not json['Resources']:
|
|
2271
2281
|
return
|
|
2272
|
-
for v in json['Resources']:
|
|
2273
|
-
i = v['id']
|
|
2274
|
-
if i in seen:
|
|
2275
|
-
continue
|
|
2276
|
-
seen.add(i)
|
|
2277
|
-
yield Group.from_dict(v)
|
|
2278
2282
|
query['startIndex'] += len(json['Resources'])
|
|
2279
2283
|
|
|
2280
2284
|
def patch(self,
|
|
@@ -2646,14 +2650,15 @@ class ServicePrincipalsAPI:
|
|
|
2646
2650
|
'/api/2.0/preview/scim/v2/ServicePrincipals',
|
|
2647
2651
|
query=query,
|
|
2648
2652
|
headers=headers)
|
|
2653
|
+
if 'Resources' in json:
|
|
2654
|
+
for v in json['Resources']:
|
|
2655
|
+
i = v['id']
|
|
2656
|
+
if i in seen:
|
|
2657
|
+
continue
|
|
2658
|
+
seen.add(i)
|
|
2659
|
+
yield ServicePrincipal.from_dict(v)
|
|
2649
2660
|
if 'Resources' not in json or not json['Resources']:
|
|
2650
2661
|
return
|
|
2651
|
-
for v in json['Resources']:
|
|
2652
|
-
i = v['id']
|
|
2653
|
-
if i in seen:
|
|
2654
|
-
continue
|
|
2655
|
-
seen.add(i)
|
|
2656
|
-
yield ServicePrincipal.from_dict(v)
|
|
2657
2662
|
query['startIndex'] += len(json['Resources'])
|
|
2658
2663
|
|
|
2659
2664
|
def patch(self,
|
|
@@ -2955,14 +2960,15 @@ class UsersAPI:
|
|
|
2955
2960
|
if "count" not in query: query['count'] = 100
|
|
2956
2961
|
while True:
|
|
2957
2962
|
json = self._api.do('GET', '/api/2.0/preview/scim/v2/Users', query=query, headers=headers)
|
|
2963
|
+
if 'Resources' in json:
|
|
2964
|
+
for v in json['Resources']:
|
|
2965
|
+
i = v['id']
|
|
2966
|
+
if i in seen:
|
|
2967
|
+
continue
|
|
2968
|
+
seen.add(i)
|
|
2969
|
+
yield User.from_dict(v)
|
|
2958
2970
|
if 'Resources' not in json or not json['Resources']:
|
|
2959
2971
|
return
|
|
2960
|
-
for v in json['Resources']:
|
|
2961
|
-
i = v['id']
|
|
2962
|
-
if i in seen:
|
|
2963
|
-
continue
|
|
2964
|
-
seen.add(i)
|
|
2965
|
-
yield User.from_dict(v)
|
|
2966
2972
|
query['startIndex'] += len(json['Resources'])
|
|
2967
2973
|
|
|
2968
2974
|
def patch(self,
|