databricks-sdk 0.0.7__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (41) hide show
  1. databricks/sdk/__init__.py +121 -104
  2. databricks/sdk/core.py +76 -16
  3. databricks/sdk/dbutils.py +18 -17
  4. databricks/sdk/mixins/compute.py +6 -6
  5. databricks/sdk/mixins/dbfs.py +6 -6
  6. databricks/sdk/oauth.py +28 -14
  7. databricks/sdk/service/{unitycatalog.py → catalog.py} +375 -1146
  8. databricks/sdk/service/{clusters.py → compute.py} +2176 -61
  9. databricks/sdk/service/{dbfs.py → files.py} +6 -6
  10. databricks/sdk/service/{scim.py → iam.py} +567 -27
  11. databricks/sdk/service/jobs.py +44 -34
  12. databricks/sdk/service/{mlflow.py → ml.py} +976 -1071
  13. databricks/sdk/service/oauth2.py +3 -3
  14. databricks/sdk/service/pipelines.py +46 -30
  15. databricks/sdk/service/{deployment.py → provisioning.py} +47 -29
  16. databricks/sdk/service/settings.py +849 -0
  17. databricks/sdk/service/sharing.py +1176 -0
  18. databricks/sdk/service/sql.py +15 -15
  19. databricks/sdk/service/workspace.py +917 -22
  20. databricks/sdk/version.py +1 -1
  21. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/METADATA +3 -1
  22. databricks_sdk-0.1.1.dist-info/RECORD +37 -0
  23. databricks/sdk/service/clusterpolicies.py +0 -399
  24. databricks/sdk/service/commands.py +0 -478
  25. databricks/sdk/service/gitcredentials.py +0 -202
  26. databricks/sdk/service/globalinitscripts.py +0 -262
  27. databricks/sdk/service/instancepools.py +0 -757
  28. databricks/sdk/service/ipaccesslists.py +0 -340
  29. databricks/sdk/service/libraries.py +0 -282
  30. databricks/sdk/service/permissions.py +0 -470
  31. databricks/sdk/service/repos.py +0 -250
  32. databricks/sdk/service/secrets.py +0 -472
  33. databricks/sdk/service/tokenmanagement.py +0 -182
  34. databricks/sdk/service/tokens.py +0 -137
  35. databricks/sdk/service/workspaceconf.py +0 -50
  36. databricks_sdk-0.0.7.dist-info/RECORD +0 -48
  37. /databricks/sdk/service/{endpoints.py → serving.py} +0 -0
  38. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/LICENSE +0 -0
  39. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/NOTICE +0 -0
  40. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/WHEEL +0 -0
  41. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/top_level.txt +0 -0
@@ -1,757 +0,0 @@
1
- # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
2
-
3
- import logging
4
- from dataclasses import dataclass
5
- from enum import Enum
6
- from typing import Dict, Iterator, List
7
-
8
- from ._internal import _enum, _from_dict, _repeated
9
-
10
- _LOG = logging.getLogger('databricks.sdk')
11
-
12
- # all definitions in this file are in alphabetical order
13
-
14
-
15
- @dataclass
16
- class CreateInstancePool:
17
- instance_pool_name: str
18
- node_type_id: str
19
- aws_attributes: 'InstancePoolAwsAttributes' = None
20
- azure_attributes: 'InstancePoolAzureAttributes' = None
21
- custom_tags: 'Dict[str,str]' = None
22
- disk_spec: 'DiskSpec' = None
23
- enable_elastic_disk: bool = None
24
- idle_instance_autotermination_minutes: int = None
25
- instance_pool_fleet_attributes: 'InstancePoolFleetAttributes' = None
26
- max_capacity: int = None
27
- min_idle_instances: int = None
28
- preloaded_docker_images: 'List[DockerImage]' = None
29
- preloaded_spark_versions: 'List[str]' = None
30
-
31
- def as_dict(self) -> dict:
32
- body = {}
33
- if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
34
- if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
35
- if self.custom_tags: body['custom_tags'] = self.custom_tags
36
- if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict()
37
- if self.enable_elastic_disk: body['enable_elastic_disk'] = self.enable_elastic_disk
38
- if self.idle_instance_autotermination_minutes:
39
- body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
40
- if self.instance_pool_fleet_attributes:
41
- body['instance_pool_fleet_attributes'] = self.instance_pool_fleet_attributes.as_dict()
42
- if self.instance_pool_name: body['instance_pool_name'] = self.instance_pool_name
43
- if self.max_capacity: body['max_capacity'] = self.max_capacity
44
- if self.min_idle_instances: body['min_idle_instances'] = self.min_idle_instances
45
- if self.node_type_id: body['node_type_id'] = self.node_type_id
46
- if self.preloaded_docker_images:
47
- body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images]
48
- if self.preloaded_spark_versions:
49
- body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions]
50
- return body
51
-
52
- @classmethod
53
- def from_dict(cls, d: Dict[str, any]) -> 'CreateInstancePool':
54
- return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes),
55
- azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes),
56
- custom_tags=d.get('custom_tags', None),
57
- disk_spec=_from_dict(d, 'disk_spec', DiskSpec),
58
- enable_elastic_disk=d.get('enable_elastic_disk', None),
59
- idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None),
60
- instance_pool_fleet_attributes=_from_dict(d, 'instance_pool_fleet_attributes',
61
- InstancePoolFleetAttributes),
62
- instance_pool_name=d.get('instance_pool_name', None),
63
- max_capacity=d.get('max_capacity', None),
64
- min_idle_instances=d.get('min_idle_instances', None),
65
- node_type_id=d.get('node_type_id', None),
66
- preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage),
67
- preloaded_spark_versions=d.get('preloaded_spark_versions', None))
68
-
69
-
70
- @dataclass
71
- class CreateInstancePoolResponse:
72
- instance_pool_id: str = None
73
-
74
- def as_dict(self) -> dict:
75
- body = {}
76
- if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id
77
- return body
78
-
79
- @classmethod
80
- def from_dict(cls, d: Dict[str, any]) -> 'CreateInstancePoolResponse':
81
- return cls(instance_pool_id=d.get('instance_pool_id', None))
82
-
83
-
84
- @dataclass
85
- class DeleteInstancePool:
86
- instance_pool_id: str
87
-
88
- def as_dict(self) -> dict:
89
- body = {}
90
- if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id
91
- return body
92
-
93
- @classmethod
94
- def from_dict(cls, d: Dict[str, any]) -> 'DeleteInstancePool':
95
- return cls(instance_pool_id=d.get('instance_pool_id', None))
96
-
97
-
98
- @dataclass
99
- class DiskSpec:
100
- disk_count: int = None
101
- disk_iops: int = None
102
- disk_size: int = None
103
- disk_throughput: int = None
104
- disk_type: 'DiskType' = None
105
-
106
- def as_dict(self) -> dict:
107
- body = {}
108
- if self.disk_count: body['disk_count'] = self.disk_count
109
- if self.disk_iops: body['disk_iops'] = self.disk_iops
110
- if self.disk_size: body['disk_size'] = self.disk_size
111
- if self.disk_throughput: body['disk_throughput'] = self.disk_throughput
112
- if self.disk_type: body['disk_type'] = self.disk_type.as_dict()
113
- return body
114
-
115
- @classmethod
116
- def from_dict(cls, d: Dict[str, any]) -> 'DiskSpec':
117
- return cls(disk_count=d.get('disk_count', None),
118
- disk_iops=d.get('disk_iops', None),
119
- disk_size=d.get('disk_size', None),
120
- disk_throughput=d.get('disk_throughput', None),
121
- disk_type=_from_dict(d, 'disk_type', DiskType))
122
-
123
-
124
- @dataclass
125
- class DiskType:
126
- azure_disk_volume_type: 'DiskTypeAzureDiskVolumeType' = None
127
- ebs_volume_type: 'DiskTypeEbsVolumeType' = None
128
-
129
- def as_dict(self) -> dict:
130
- body = {}
131
- if self.azure_disk_volume_type: body['azure_disk_volume_type'] = self.azure_disk_volume_type.value
132
- if self.ebs_volume_type: body['ebs_volume_type'] = self.ebs_volume_type.value
133
- return body
134
-
135
- @classmethod
136
- def from_dict(cls, d: Dict[str, any]) -> 'DiskType':
137
- return cls(azure_disk_volume_type=_enum(d, 'azure_disk_volume_type', DiskTypeAzureDiskVolumeType),
138
- ebs_volume_type=_enum(d, 'ebs_volume_type', DiskTypeEbsVolumeType))
139
-
140
-
141
- class DiskTypeAzureDiskVolumeType(Enum):
142
-
143
- PREMIUM_LRS = 'PREMIUM_LRS'
144
- STANDARD_LRS = 'STANDARD_LRS'
145
-
146
-
147
- class DiskTypeEbsVolumeType(Enum):
148
-
149
- GENERAL_PURPOSE_SSD = 'GENERAL_PURPOSE_SSD'
150
- THROUGHPUT_OPTIMIZED_HDD = 'THROUGHPUT_OPTIMIZED_HDD'
151
-
152
-
153
- @dataclass
154
- class DockerBasicAuth:
155
- password: str = None
156
- username: str = None
157
-
158
- def as_dict(self) -> dict:
159
- body = {}
160
- if self.password: body['password'] = self.password
161
- if self.username: body['username'] = self.username
162
- return body
163
-
164
- @classmethod
165
- def from_dict(cls, d: Dict[str, any]) -> 'DockerBasicAuth':
166
- return cls(password=d.get('password', None), username=d.get('username', None))
167
-
168
-
169
- @dataclass
170
- class DockerImage:
171
- basic_auth: 'DockerBasicAuth' = None
172
- url: str = None
173
-
174
- def as_dict(self) -> dict:
175
- body = {}
176
- if self.basic_auth: body['basic_auth'] = self.basic_auth.as_dict()
177
- if self.url: body['url'] = self.url
178
- return body
179
-
180
- @classmethod
181
- def from_dict(cls, d: Dict[str, any]) -> 'DockerImage':
182
- return cls(basic_auth=_from_dict(d, 'basic_auth', DockerBasicAuth), url=d.get('url', None))
183
-
184
-
185
- @dataclass
186
- class EditInstancePool:
187
- instance_pool_id: str
188
- instance_pool_name: str
189
- node_type_id: str
190
- aws_attributes: 'InstancePoolAwsAttributes' = None
191
- azure_attributes: 'InstancePoolAzureAttributes' = None
192
- custom_tags: 'Dict[str,str]' = None
193
- disk_spec: 'DiskSpec' = None
194
- enable_elastic_disk: bool = None
195
- idle_instance_autotermination_minutes: int = None
196
- instance_pool_fleet_attributes: 'InstancePoolFleetAttributes' = None
197
- max_capacity: int = None
198
- min_idle_instances: int = None
199
- preloaded_docker_images: 'List[DockerImage]' = None
200
- preloaded_spark_versions: 'List[str]' = None
201
-
202
- def as_dict(self) -> dict:
203
- body = {}
204
- if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
205
- if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
206
- if self.custom_tags: body['custom_tags'] = self.custom_tags
207
- if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict()
208
- if self.enable_elastic_disk: body['enable_elastic_disk'] = self.enable_elastic_disk
209
- if self.idle_instance_autotermination_minutes:
210
- body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
211
- if self.instance_pool_fleet_attributes:
212
- body['instance_pool_fleet_attributes'] = self.instance_pool_fleet_attributes.as_dict()
213
- if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id
214
- if self.instance_pool_name: body['instance_pool_name'] = self.instance_pool_name
215
- if self.max_capacity: body['max_capacity'] = self.max_capacity
216
- if self.min_idle_instances: body['min_idle_instances'] = self.min_idle_instances
217
- if self.node_type_id: body['node_type_id'] = self.node_type_id
218
- if self.preloaded_docker_images:
219
- body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images]
220
- if self.preloaded_spark_versions:
221
- body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions]
222
- return body
223
-
224
- @classmethod
225
- def from_dict(cls, d: Dict[str, any]) -> 'EditInstancePool':
226
- return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes),
227
- azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes),
228
- custom_tags=d.get('custom_tags', None),
229
- disk_spec=_from_dict(d, 'disk_spec', DiskSpec),
230
- enable_elastic_disk=d.get('enable_elastic_disk', None),
231
- idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None),
232
- instance_pool_fleet_attributes=_from_dict(d, 'instance_pool_fleet_attributes',
233
- InstancePoolFleetAttributes),
234
- instance_pool_id=d.get('instance_pool_id', None),
235
- instance_pool_name=d.get('instance_pool_name', None),
236
- max_capacity=d.get('max_capacity', None),
237
- min_idle_instances=d.get('min_idle_instances', None),
238
- node_type_id=d.get('node_type_id', None),
239
- preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage),
240
- preloaded_spark_versions=d.get('preloaded_spark_versions', None))
241
-
242
-
243
- @dataclass
244
- class FleetLaunchTemplateOverride:
245
- availability_zone: str
246
- instance_type: str
247
- max_price: float = None
248
- priority: float = None
249
-
250
- def as_dict(self) -> dict:
251
- body = {}
252
- if self.availability_zone: body['availability_zone'] = self.availability_zone
253
- if self.instance_type: body['instance_type'] = self.instance_type
254
- if self.max_price: body['max_price'] = self.max_price
255
- if self.priority: body['priority'] = self.priority
256
- return body
257
-
258
- @classmethod
259
- def from_dict(cls, d: Dict[str, any]) -> 'FleetLaunchTemplateOverride':
260
- return cls(availability_zone=d.get('availability_zone', None),
261
- instance_type=d.get('instance_type', None),
262
- max_price=d.get('max_price', None),
263
- priority=d.get('priority', None))
264
-
265
-
266
- @dataclass
267
- class FleetOnDemandOption:
268
- allocation_strategy: 'FleetOnDemandOptionAllocationStrategy' = None
269
- max_total_price: float = None
270
- use_capacity_reservations_first: bool = None
271
-
272
- def as_dict(self) -> dict:
273
- body = {}
274
- if self.allocation_strategy: body['allocation_strategy'] = self.allocation_strategy.value
275
- if self.max_total_price: body['max_total_price'] = self.max_total_price
276
- if self.use_capacity_reservations_first:
277
- body['use_capacity_reservations_first'] = self.use_capacity_reservations_first
278
- return body
279
-
280
- @classmethod
281
- def from_dict(cls, d: Dict[str, any]) -> 'FleetOnDemandOption':
282
- return cls(allocation_strategy=_enum(d, 'allocation_strategy', FleetOnDemandOptionAllocationStrategy),
283
- max_total_price=d.get('max_total_price', None),
284
- use_capacity_reservations_first=d.get('use_capacity_reservations_first', None))
285
-
286
-
287
- class FleetOnDemandOptionAllocationStrategy(Enum):
288
- """Only lowest-price and prioritized are allowed"""
289
-
290
- CAPACITY_OPTIMIZED = 'CAPACITY_OPTIMIZED'
291
- DIVERSIFIED = 'DIVERSIFIED'
292
- LOWEST_PRICE = 'LOWEST_PRICE'
293
- PRIORITIZED = 'PRIORITIZED'
294
-
295
-
296
- @dataclass
297
- class FleetSpotOption:
298
- allocation_strategy: 'FleetSpotOptionAllocationStrategy' = None
299
- instance_pools_to_use_count: int = None
300
- max_total_price: float = None
301
-
302
- def as_dict(self) -> dict:
303
- body = {}
304
- if self.allocation_strategy: body['allocation_strategy'] = self.allocation_strategy.value
305
- if self.instance_pools_to_use_count:
306
- body['instance_pools_to_use_count'] = self.instance_pools_to_use_count
307
- if self.max_total_price: body['max_total_price'] = self.max_total_price
308
- return body
309
-
310
- @classmethod
311
- def from_dict(cls, d: Dict[str, any]) -> 'FleetSpotOption':
312
- return cls(allocation_strategy=_enum(d, 'allocation_strategy', FleetSpotOptionAllocationStrategy),
313
- instance_pools_to_use_count=d.get('instance_pools_to_use_count', None),
314
- max_total_price=d.get('max_total_price', None))
315
-
316
-
317
- class FleetSpotOptionAllocationStrategy(Enum):
318
- """lowest-price | diversified | capacity-optimized"""
319
-
320
- CAPACITY_OPTIMIZED = 'CAPACITY_OPTIMIZED'
321
- DIVERSIFIED = 'DIVERSIFIED'
322
- LOWEST_PRICE = 'LOWEST_PRICE'
323
- PRIORITIZED = 'PRIORITIZED'
324
-
325
-
326
- @dataclass
327
- class Get:
328
- """Get instance pool information"""
329
-
330
- instance_pool_id: str
331
-
332
-
333
- @dataclass
334
- class GetInstancePool:
335
- instance_pool_id: str
336
- aws_attributes: 'InstancePoolAwsAttributes' = None
337
- azure_attributes: 'InstancePoolAzureAttributes' = None
338
- custom_tags: 'Dict[str,str]' = None
339
- default_tags: 'Dict[str,str]' = None
340
- disk_spec: 'DiskSpec' = None
341
- enable_elastic_disk: bool = None
342
- idle_instance_autotermination_minutes: int = None
343
- instance_pool_fleet_attributes: 'InstancePoolFleetAttributes' = None
344
- instance_pool_name: str = None
345
- max_capacity: int = None
346
- min_idle_instances: int = None
347
- node_type_id: str = None
348
- preloaded_docker_images: 'List[DockerImage]' = None
349
- preloaded_spark_versions: 'List[str]' = None
350
- state: 'InstancePoolState' = None
351
- stats: 'InstancePoolStats' = None
352
- status: 'InstancePoolStatus' = None
353
-
354
- def as_dict(self) -> dict:
355
- body = {}
356
- if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
357
- if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
358
- if self.custom_tags: body['custom_tags'] = self.custom_tags
359
- if self.default_tags: body['default_tags'] = self.default_tags
360
- if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict()
361
- if self.enable_elastic_disk: body['enable_elastic_disk'] = self.enable_elastic_disk
362
- if self.idle_instance_autotermination_minutes:
363
- body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
364
- if self.instance_pool_fleet_attributes:
365
- body['instance_pool_fleet_attributes'] = self.instance_pool_fleet_attributes.as_dict()
366
- if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id
367
- if self.instance_pool_name: body['instance_pool_name'] = self.instance_pool_name
368
- if self.max_capacity: body['max_capacity'] = self.max_capacity
369
- if self.min_idle_instances: body['min_idle_instances'] = self.min_idle_instances
370
- if self.node_type_id: body['node_type_id'] = self.node_type_id
371
- if self.preloaded_docker_images:
372
- body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images]
373
- if self.preloaded_spark_versions:
374
- body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions]
375
- if self.state: body['state'] = self.state.value
376
- if self.stats: body['stats'] = self.stats.as_dict()
377
- if self.status: body['status'] = self.status.as_dict()
378
- return body
379
-
380
- @classmethod
381
- def from_dict(cls, d: Dict[str, any]) -> 'GetInstancePool':
382
- return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes),
383
- azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes),
384
- custom_tags=d.get('custom_tags', None),
385
- default_tags=d.get('default_tags', None),
386
- disk_spec=_from_dict(d, 'disk_spec', DiskSpec),
387
- enable_elastic_disk=d.get('enable_elastic_disk', None),
388
- idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None),
389
- instance_pool_fleet_attributes=_from_dict(d, 'instance_pool_fleet_attributes',
390
- InstancePoolFleetAttributes),
391
- instance_pool_id=d.get('instance_pool_id', None),
392
- instance_pool_name=d.get('instance_pool_name', None),
393
- max_capacity=d.get('max_capacity', None),
394
- min_idle_instances=d.get('min_idle_instances', None),
395
- node_type_id=d.get('node_type_id', None),
396
- preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage),
397
- preloaded_spark_versions=d.get('preloaded_spark_versions', None),
398
- state=_enum(d, 'state', InstancePoolState),
399
- stats=_from_dict(d, 'stats', InstancePoolStats),
400
- status=_from_dict(d, 'status', InstancePoolStatus))
401
-
402
-
403
- @dataclass
404
- class InstancePoolAndStats:
405
- aws_attributes: 'InstancePoolAwsAttributes' = None
406
- azure_attributes: 'InstancePoolAzureAttributes' = None
407
- custom_tags: 'Dict[str,str]' = None
408
- default_tags: 'Dict[str,str]' = None
409
- disk_spec: 'DiskSpec' = None
410
- enable_elastic_disk: bool = None
411
- idle_instance_autotermination_minutes: int = None
412
- instance_pool_fleet_attributes: 'InstancePoolFleetAttributes' = None
413
- instance_pool_id: str = None
414
- instance_pool_name: str = None
415
- max_capacity: int = None
416
- min_idle_instances: int = None
417
- node_type_id: str = None
418
- preloaded_docker_images: 'List[DockerImage]' = None
419
- preloaded_spark_versions: 'List[str]' = None
420
- state: 'InstancePoolState' = None
421
- stats: 'InstancePoolStats' = None
422
- status: 'InstancePoolStatus' = None
423
-
424
- def as_dict(self) -> dict:
425
- body = {}
426
- if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
427
- if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
428
- if self.custom_tags: body['custom_tags'] = self.custom_tags
429
- if self.default_tags: body['default_tags'] = self.default_tags
430
- if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict()
431
- if self.enable_elastic_disk: body['enable_elastic_disk'] = self.enable_elastic_disk
432
- if self.idle_instance_autotermination_minutes:
433
- body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
434
- if self.instance_pool_fleet_attributes:
435
- body['instance_pool_fleet_attributes'] = self.instance_pool_fleet_attributes.as_dict()
436
- if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id
437
- if self.instance_pool_name: body['instance_pool_name'] = self.instance_pool_name
438
- if self.max_capacity: body['max_capacity'] = self.max_capacity
439
- if self.min_idle_instances: body['min_idle_instances'] = self.min_idle_instances
440
- if self.node_type_id: body['node_type_id'] = self.node_type_id
441
- if self.preloaded_docker_images:
442
- body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images]
443
- if self.preloaded_spark_versions:
444
- body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions]
445
- if self.state: body['state'] = self.state.value
446
- if self.stats: body['stats'] = self.stats.as_dict()
447
- if self.status: body['status'] = self.status.as_dict()
448
- return body
449
-
450
- @classmethod
451
- def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolAndStats':
452
- return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes),
453
- azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes),
454
- custom_tags=d.get('custom_tags', None),
455
- default_tags=d.get('default_tags', None),
456
- disk_spec=_from_dict(d, 'disk_spec', DiskSpec),
457
- enable_elastic_disk=d.get('enable_elastic_disk', None),
458
- idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None),
459
- instance_pool_fleet_attributes=_from_dict(d, 'instance_pool_fleet_attributes',
460
- InstancePoolFleetAttributes),
461
- instance_pool_id=d.get('instance_pool_id', None),
462
- instance_pool_name=d.get('instance_pool_name', None),
463
- max_capacity=d.get('max_capacity', None),
464
- min_idle_instances=d.get('min_idle_instances', None),
465
- node_type_id=d.get('node_type_id', None),
466
- preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage),
467
- preloaded_spark_versions=d.get('preloaded_spark_versions', None),
468
- state=_enum(d, 'state', InstancePoolState),
469
- stats=_from_dict(d, 'stats', InstancePoolStats),
470
- status=_from_dict(d, 'status', InstancePoolStatus))
471
-
472
-
473
- @dataclass
474
- class InstancePoolAwsAttributes:
475
- availability: 'InstancePoolAwsAttributesAvailability' = None
476
- spot_bid_price_percent: int = None
477
- zone_id: str = None
478
-
479
- def as_dict(self) -> dict:
480
- body = {}
481
- if self.availability: body['availability'] = self.availability.value
482
- if self.spot_bid_price_percent: body['spot_bid_price_percent'] = self.spot_bid_price_percent
483
- if self.zone_id: body['zone_id'] = self.zone_id
484
- return body
485
-
486
- @classmethod
487
- def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolAwsAttributes':
488
- return cls(availability=_enum(d, 'availability', InstancePoolAwsAttributesAvailability),
489
- spot_bid_price_percent=d.get('spot_bid_price_percent', None),
490
- zone_id=d.get('zone_id', None))
491
-
492
-
493
- class InstancePoolAwsAttributesAvailability(Enum):
494
- """Availability type used for the spot nodes.
495
-
496
- The default value is defined by InstancePoolConf.instancePoolDefaultAwsAvailability"""
497
-
498
- ON_DEMAND = 'ON_DEMAND'
499
- SPOT = 'SPOT'
500
- SPOT_WITH_FALLBACK = 'SPOT_WITH_FALLBACK'
501
-
502
-
503
- @dataclass
504
- class InstancePoolAzureAttributes:
505
- availability: 'InstancePoolAzureAttributesAvailability' = None
506
- spot_bid_max_price: float = None
507
-
508
- def as_dict(self) -> dict:
509
- body = {}
510
- if self.availability: body['availability'] = self.availability.value
511
- if self.spot_bid_max_price: body['spot_bid_max_price'] = self.spot_bid_max_price
512
- return body
513
-
514
- @classmethod
515
- def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolAzureAttributes':
516
- return cls(availability=_enum(d, 'availability', InstancePoolAzureAttributesAvailability),
517
- spot_bid_max_price=d.get('spot_bid_max_price', None))
518
-
519
-
520
- class InstancePoolAzureAttributesAvailability(Enum):
521
- """Shows the Availability type used for the spot nodes.
522
-
523
- The default value is defined by InstancePoolConf.instancePoolDefaultAzureAvailability"""
524
-
525
- ON_DEMAND_AZURE = 'ON_DEMAND_AZURE'
526
- SPOT_AZURE = 'SPOT_AZURE'
527
- SPOT_WITH_FALLBACK_AZURE = 'SPOT_WITH_FALLBACK_AZURE'
528
-
529
-
530
- @dataclass
531
- class InstancePoolFleetAttributes:
532
- fleet_on_demand_option: 'FleetOnDemandOption' = None
533
- fleet_spot_option: 'FleetSpotOption' = None
534
- launch_template_overrides: 'List[FleetLaunchTemplateOverride]' = None
535
-
536
- def as_dict(self) -> dict:
537
- body = {}
538
- if self.fleet_on_demand_option: body['fleet_on_demand_option'] = self.fleet_on_demand_option.as_dict()
539
- if self.fleet_spot_option: body['fleet_spot_option'] = self.fleet_spot_option.as_dict()
540
- if self.launch_template_overrides:
541
- body['launch_template_overrides'] = [v.as_dict() for v in self.launch_template_overrides]
542
- return body
543
-
544
- @classmethod
545
- def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolFleetAttributes':
546
- return cls(fleet_on_demand_option=_from_dict(d, 'fleet_on_demand_option', FleetOnDemandOption),
547
- fleet_spot_option=_from_dict(d, 'fleet_spot_option', FleetSpotOption),
548
- launch_template_overrides=_repeated(d, 'launch_template_overrides',
549
- FleetLaunchTemplateOverride))
550
-
551
-
552
- class InstancePoolState(Enum):
553
- """Current state of the instance pool."""
554
-
555
- ACTIVE = 'ACTIVE'
556
- DELETED = 'DELETED'
557
- STOPPED = 'STOPPED'
558
-
559
-
560
- @dataclass
561
- class InstancePoolStats:
562
- idle_count: int = None
563
- pending_idle_count: int = None
564
- pending_used_count: int = None
565
- used_count: int = None
566
-
567
- def as_dict(self) -> dict:
568
- body = {}
569
- if self.idle_count: body['idle_count'] = self.idle_count
570
- if self.pending_idle_count: body['pending_idle_count'] = self.pending_idle_count
571
- if self.pending_used_count: body['pending_used_count'] = self.pending_used_count
572
- if self.used_count: body['used_count'] = self.used_count
573
- return body
574
-
575
- @classmethod
576
- def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolStats':
577
- return cls(idle_count=d.get('idle_count', None),
578
- pending_idle_count=d.get('pending_idle_count', None),
579
- pending_used_count=d.get('pending_used_count', None),
580
- used_count=d.get('used_count', None))
581
-
582
-
583
- @dataclass
584
- class InstancePoolStatus:
585
- pending_instance_errors: 'List[PendingInstanceError]' = None
586
-
587
- def as_dict(self) -> dict:
588
- body = {}
589
- if self.pending_instance_errors:
590
- body['pending_instance_errors'] = [v.as_dict() for v in self.pending_instance_errors]
591
- return body
592
-
593
- @classmethod
594
- def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolStatus':
595
- return cls(pending_instance_errors=_repeated(d, 'pending_instance_errors', PendingInstanceError))
596
-
597
-
598
- @dataclass
599
- class ListInstancePools:
600
- instance_pools: 'List[InstancePoolAndStats]' = None
601
-
602
- def as_dict(self) -> dict:
603
- body = {}
604
- if self.instance_pools: body['instance_pools'] = [v.as_dict() for v in self.instance_pools]
605
- return body
606
-
607
- @classmethod
608
- def from_dict(cls, d: Dict[str, any]) -> 'ListInstancePools':
609
- return cls(instance_pools=_repeated(d, 'instance_pools', InstancePoolAndStats))
610
-
611
-
612
- @dataclass
613
- class PendingInstanceError:
614
- instance_id: str = None
615
- message: str = None
616
-
617
- def as_dict(self) -> dict:
618
- body = {}
619
- if self.instance_id: body['instance_id'] = self.instance_id
620
- if self.message: body['message'] = self.message
621
- return body
622
-
623
- @classmethod
624
- def from_dict(cls, d: Dict[str, any]) -> 'PendingInstanceError':
625
- return cls(instance_id=d.get('instance_id', None), message=d.get('message', None))
626
-
627
-
628
- class InstancePoolsAPI:
629
- """Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud
630
- instances which reduces a cluster start and auto-scaling times.
631
-
632
- Databricks pools reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use
633
- instances. When a cluster is attached to a pool, cluster nodes are created using the pool’s idle
634
- instances. If the pool has no idle instances, the pool expands by allocating a new instance from the
635
- instance provider in order to accommodate the cluster’s request. When a cluster releases an instance, it
636
- returns to the pool and is free for another cluster to use. Only clusters attached to a pool can use that
637
- pool’s idle instances.
638
-
639
- You can specify a different pool for the driver node and worker nodes, or use the same pool for both.
640
-
641
- Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does
642
- apply. See pricing."""
643
-
644
- def __init__(self, api_client):
645
- self._api = api_client
646
-
647
- def create(self,
648
- instance_pool_name: str,
649
- node_type_id: str,
650
- *,
651
- aws_attributes: InstancePoolAwsAttributes = None,
652
- azure_attributes: InstancePoolAzureAttributes = None,
653
- custom_tags: Dict[str, str] = None,
654
- disk_spec: DiskSpec = None,
655
- enable_elastic_disk: bool = None,
656
- idle_instance_autotermination_minutes: int = None,
657
- instance_pool_fleet_attributes: InstancePoolFleetAttributes = None,
658
- max_capacity: int = None,
659
- min_idle_instances: int = None,
660
- preloaded_docker_images: List[DockerImage] = None,
661
- preloaded_spark_versions: List[str] = None,
662
- **kwargs) -> CreateInstancePoolResponse:
663
- """Create a new instance pool.
664
-
665
- Creates a new instance pool using idle and ready-to-use cloud instances."""
666
- request = kwargs.get('request', None)
667
- if not request: # request is not given through keyed args
668
- request = CreateInstancePool(
669
- aws_attributes=aws_attributes,
670
- azure_attributes=azure_attributes,
671
- custom_tags=custom_tags,
672
- disk_spec=disk_spec,
673
- enable_elastic_disk=enable_elastic_disk,
674
- idle_instance_autotermination_minutes=idle_instance_autotermination_minutes,
675
- instance_pool_fleet_attributes=instance_pool_fleet_attributes,
676
- instance_pool_name=instance_pool_name,
677
- max_capacity=max_capacity,
678
- min_idle_instances=min_idle_instances,
679
- node_type_id=node_type_id,
680
- preloaded_docker_images=preloaded_docker_images,
681
- preloaded_spark_versions=preloaded_spark_versions)
682
- body = request.as_dict()
683
-
684
- json = self._api.do('POST', '/api/2.0/instance-pools/create', body=body)
685
- return CreateInstancePoolResponse.from_dict(json)
686
-
687
- def delete(self, instance_pool_id: str, **kwargs):
688
- """Delete an instance pool.
689
-
690
- Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously."""
691
- request = kwargs.get('request', None)
692
- if not request: # request is not given through keyed args
693
- request = DeleteInstancePool(instance_pool_id=instance_pool_id)
694
- body = request.as_dict()
695
- self._api.do('POST', '/api/2.0/instance-pools/delete', body=body)
696
-
697
- def edit(self,
698
- instance_pool_id: str,
699
- instance_pool_name: str,
700
- node_type_id: str,
701
- *,
702
- aws_attributes: InstancePoolAwsAttributes = None,
703
- azure_attributes: InstancePoolAzureAttributes = None,
704
- custom_tags: Dict[str, str] = None,
705
- disk_spec: DiskSpec = None,
706
- enable_elastic_disk: bool = None,
707
- idle_instance_autotermination_minutes: int = None,
708
- instance_pool_fleet_attributes: InstancePoolFleetAttributes = None,
709
- max_capacity: int = None,
710
- min_idle_instances: int = None,
711
- preloaded_docker_images: List[DockerImage] = None,
712
- preloaded_spark_versions: List[str] = None,
713
- **kwargs):
714
- """Edit an existing instance pool.
715
-
716
- Modifies the configuration of an existing instance pool."""
717
- request = kwargs.get('request', None)
718
- if not request: # request is not given through keyed args
719
- request = EditInstancePool(
720
- aws_attributes=aws_attributes,
721
- azure_attributes=azure_attributes,
722
- custom_tags=custom_tags,
723
- disk_spec=disk_spec,
724
- enable_elastic_disk=enable_elastic_disk,
725
- idle_instance_autotermination_minutes=idle_instance_autotermination_minutes,
726
- instance_pool_fleet_attributes=instance_pool_fleet_attributes,
727
- instance_pool_id=instance_pool_id,
728
- instance_pool_name=instance_pool_name,
729
- max_capacity=max_capacity,
730
- min_idle_instances=min_idle_instances,
731
- node_type_id=node_type_id,
732
- preloaded_docker_images=preloaded_docker_images,
733
- preloaded_spark_versions=preloaded_spark_versions)
734
- body = request.as_dict()
735
- self._api.do('POST', '/api/2.0/instance-pools/edit', body=body)
736
-
737
- def get(self, instance_pool_id: str, **kwargs) -> GetInstancePool:
738
- """Get instance pool information.
739
-
740
- Retrieve the information for an instance pool based on its identifier."""
741
- request = kwargs.get('request', None)
742
- if not request: # request is not given through keyed args
743
- request = Get(instance_pool_id=instance_pool_id)
744
-
745
- query = {}
746
- if instance_pool_id: query['instance_pool_id'] = request.instance_pool_id
747
-
748
- json = self._api.do('GET', '/api/2.0/instance-pools/get', query=query)
749
- return GetInstancePool.from_dict(json)
750
-
751
- def list(self) -> Iterator[InstancePoolAndStats]:
752
- """List instance pool info.
753
-
754
- Gets a list of instance pools with their statistics."""
755
-
756
- json = self._api.do('GET', '/api/2.0/instance-pools/list')
757
- return [InstancePoolAndStats.from_dict(v) for v in json.get('instance_pools', [])]