databricks-sdk 0.0.7__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (41) hide show
  1. databricks/sdk/__init__.py +121 -104
  2. databricks/sdk/core.py +76 -16
  3. databricks/sdk/dbutils.py +18 -17
  4. databricks/sdk/mixins/compute.py +6 -6
  5. databricks/sdk/mixins/dbfs.py +6 -6
  6. databricks/sdk/oauth.py +28 -14
  7. databricks/sdk/service/{unitycatalog.py → catalog.py} +375 -1146
  8. databricks/sdk/service/{clusters.py → compute.py} +2176 -61
  9. databricks/sdk/service/{dbfs.py → files.py} +6 -6
  10. databricks/sdk/service/{scim.py → iam.py} +567 -27
  11. databricks/sdk/service/jobs.py +44 -34
  12. databricks/sdk/service/{mlflow.py → ml.py} +976 -1071
  13. databricks/sdk/service/oauth2.py +3 -3
  14. databricks/sdk/service/pipelines.py +46 -30
  15. databricks/sdk/service/{deployment.py → provisioning.py} +47 -29
  16. databricks/sdk/service/settings.py +849 -0
  17. databricks/sdk/service/sharing.py +1176 -0
  18. databricks/sdk/service/sql.py +15 -15
  19. databricks/sdk/service/workspace.py +917 -22
  20. databricks/sdk/version.py +1 -1
  21. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/METADATA +3 -1
  22. databricks_sdk-0.1.1.dist-info/RECORD +37 -0
  23. databricks/sdk/service/clusterpolicies.py +0 -399
  24. databricks/sdk/service/commands.py +0 -478
  25. databricks/sdk/service/gitcredentials.py +0 -202
  26. databricks/sdk/service/globalinitscripts.py +0 -262
  27. databricks/sdk/service/instancepools.py +0 -757
  28. databricks/sdk/service/ipaccesslists.py +0 -340
  29. databricks/sdk/service/libraries.py +0 -282
  30. databricks/sdk/service/permissions.py +0 -470
  31. databricks/sdk/service/repos.py +0 -250
  32. databricks/sdk/service/secrets.py +0 -472
  33. databricks/sdk/service/tokenmanagement.py +0 -182
  34. databricks/sdk/service/tokens.py +0 -137
  35. databricks/sdk/service/workspaceconf.py +0 -50
  36. databricks_sdk-0.0.7.dist-info/RECORD +0 -48
  37. /databricks/sdk/service/{endpoints.py → serving.py} +0 -0
  38. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/LICENSE +0 -0
  39. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/NOTICE +0 -0
  40. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/WHEEL +0 -0
  41. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/top_level.txt +0 -0
@@ -2,7 +2,7 @@
2
2
 
3
3
  import logging
4
4
  from dataclasses import dataclass
5
- from typing import Any, Dict, Iterator, List
5
+ from typing import Dict, Iterator, List
6
6
 
7
7
  from ._internal import _from_dict, _repeated
8
8
 
@@ -223,8 +223,8 @@ class OAuthEnrollmentStatus:
223
223
 
224
224
  @dataclass
225
225
  class TokenAccessPolicy:
226
- access_token_ttl_in_minutes: Any = None
227
- refresh_token_ttl_in_minutes: Any = None
226
+ access_token_ttl_in_minutes: int = None
227
+ refresh_token_ttl_in_minutes: int = None
228
228
 
229
229
  def as_dict(self) -> dict:
230
230
  body = {}
@@ -13,9 +13,8 @@ from ._internal import Wait, _enum, _from_dict, _repeated
13
13
 
14
14
  _LOG = logging.getLogger('databricks.sdk')
15
15
 
16
- from .clusters import (AutoScale, AwsAttributes, AzureAttributes,
17
- ClusterLogConf, GcpAttributes)
18
- from .libraries import MavenLibrary
16
+ from .compute import (AutoScale, AwsAttributes, AzureAttributes,
17
+ ClusterLogConf, GcpAttributes, MavenLibrary)
19
18
 
20
19
  # all definitions in this file are in alphabetical order
21
20
 
@@ -133,7 +132,7 @@ class DataPlaneId:
133
132
 
134
133
 
135
134
  @dataclass
136
- class Delete:
135
+ class DeletePipelineRequest:
137
136
  """Delete a pipeline"""
138
137
 
139
138
  pipeline_id: str
@@ -229,6 +228,20 @@ class EventLevel(Enum):
229
228
  WARN = 'WARN'
230
229
 
231
230
 
231
+ @dataclass
232
+ class FileLibrary:
233
+ path: str = None
234
+
235
+ def as_dict(self) -> dict:
236
+ body = {}
237
+ if self.path: body['path'] = self.path
238
+ return body
239
+
240
+ @classmethod
241
+ def from_dict(cls, d: Dict[str, any]) -> 'FileLibrary':
242
+ return cls(path=d.get('path', None))
243
+
244
+
232
245
  @dataclass
233
246
  class Filters:
234
247
  exclude: 'List[str]' = None
@@ -246,7 +259,7 @@ class Filters:
246
259
 
247
260
 
248
261
  @dataclass
249
- class Get:
262
+ class GetPipelineRequest:
250
263
  """Get a pipeline"""
251
264
 
252
265
  pipeline_id: str
@@ -304,7 +317,7 @@ class GetPipelineResponseHealth(Enum):
304
317
 
305
318
 
306
319
  @dataclass
307
- class GetUpdate:
320
+ class GetUpdateRequest:
308
321
  """Get a pipeline update"""
309
322
 
310
323
  pipeline_id: str
@@ -326,7 +339,7 @@ class GetUpdateResponse:
326
339
 
327
340
 
328
341
  @dataclass
329
- class ListPipelineEvents:
342
+ class ListPipelineEventsRequest:
330
343
  """List pipeline events"""
331
344
 
332
345
  pipeline_id: str
@@ -357,7 +370,7 @@ class ListPipelineEventsResponse:
357
370
 
358
371
 
359
372
  @dataclass
360
- class ListPipelines:
373
+ class ListPipelinesRequest:
361
374
  """List pipelines"""
362
375
 
363
376
  filter: str = None
@@ -384,7 +397,7 @@ class ListPipelinesResponse:
384
397
 
385
398
 
386
399
  @dataclass
387
- class ListUpdates:
400
+ class ListUpdatesRequest:
388
401
  """List pipeline updates"""
389
402
 
390
403
  pipeline_id: str
@@ -600,6 +613,7 @@ class PipelineEvent:
600
613
 
601
614
  @dataclass
602
615
  class PipelineLibrary:
616
+ file: 'FileLibrary' = None
603
617
  jar: str = None
604
618
  maven: 'MavenLibrary' = None
605
619
  notebook: 'NotebookLibrary' = None
@@ -607,6 +621,7 @@ class PipelineLibrary:
607
621
 
608
622
  def as_dict(self) -> dict:
609
623
  body = {}
624
+ if self.file: body['file'] = self.file.as_dict()
610
625
  if self.jar: body['jar'] = self.jar
611
626
  if self.maven: body['maven'] = self.maven.as_dict()
612
627
  if self.notebook: body['notebook'] = self.notebook.as_dict()
@@ -615,7 +630,8 @@ class PipelineLibrary:
615
630
 
616
631
  @classmethod
617
632
  def from_dict(cls, d: Dict[str, any]) -> 'PipelineLibrary':
618
- return cls(jar=d.get('jar', None),
633
+ return cls(file=_from_dict(d, 'file', FileLibrary),
634
+ jar=d.get('jar', None),
619
635
  maven=_from_dict(d, 'maven', MavenLibrary),
620
636
  notebook=_from_dict(d, 'notebook', NotebookLibrary),
621
637
  whl=d.get('whl', None))
@@ -740,7 +756,7 @@ class PipelineTrigger:
740
756
 
741
757
 
742
758
  @dataclass
743
- class Reset:
759
+ class ResetRequest:
744
760
  """Reset a pipeline"""
745
761
 
746
762
  pipeline_id: str
@@ -858,7 +874,7 @@ class StartUpdateResponse:
858
874
 
859
875
 
860
876
  @dataclass
861
- class Stop:
877
+ class StopRequest:
862
878
  """Stop a pipeline"""
863
879
 
864
880
  pipeline_id: str
@@ -1098,7 +1114,7 @@ class PipelinesAPI:
1098
1114
  Deletes a pipeline."""
1099
1115
  request = kwargs.get('request', None)
1100
1116
  if not request: # request is not given through keyed args
1101
- request = Delete(pipeline_id=pipeline_id)
1117
+ request = DeletePipelineRequest(pipeline_id=pipeline_id)
1102
1118
 
1103
1119
  self._api.do('DELETE', f'/api/2.0/pipelines/{request.pipeline_id}')
1104
1120
 
@@ -1106,7 +1122,7 @@ class PipelinesAPI:
1106
1122
  """Get a pipeline."""
1107
1123
  request = kwargs.get('request', None)
1108
1124
  if not request: # request is not given through keyed args
1109
- request = Get(pipeline_id=pipeline_id)
1125
+ request = GetPipelineRequest(pipeline_id=pipeline_id)
1110
1126
 
1111
1127
  json = self._api.do('GET', f'/api/2.0/pipelines/{request.pipeline_id}')
1112
1128
  return GetPipelineResponse.from_dict(json)
@@ -1117,7 +1133,7 @@ class PipelinesAPI:
1117
1133
  Gets an update from an active pipeline."""
1118
1134
  request = kwargs.get('request', None)
1119
1135
  if not request: # request is not given through keyed args
1120
- request = GetUpdate(pipeline_id=pipeline_id, update_id=update_id)
1136
+ request = GetUpdateRequest(pipeline_id=pipeline_id, update_id=update_id)
1121
1137
 
1122
1138
  json = self._api.do('GET', f'/api/2.0/pipelines/{request.pipeline_id}/updates/{request.update_id}')
1123
1139
  return GetUpdateResponse.from_dict(json)
@@ -1135,11 +1151,11 @@ class PipelinesAPI:
1135
1151
  Retrieves events for a pipeline."""
1136
1152
  request = kwargs.get('request', None)
1137
1153
  if not request: # request is not given through keyed args
1138
- request = ListPipelineEvents(filter=filter,
1139
- max_results=max_results,
1140
- order_by=order_by,
1141
- page_token=page_token,
1142
- pipeline_id=pipeline_id)
1154
+ request = ListPipelineEventsRequest(filter=filter,
1155
+ max_results=max_results,
1156
+ order_by=order_by,
1157
+ page_token=page_token,
1158
+ pipeline_id=pipeline_id)
1143
1159
 
1144
1160
  query = {}
1145
1161
  if filter: query['filter'] = request.filter
@@ -1169,10 +1185,10 @@ class PipelinesAPI:
1169
1185
  Lists pipelines defined in the Delta Live Tables system."""
1170
1186
  request = kwargs.get('request', None)
1171
1187
  if not request: # request is not given through keyed args
1172
- request = ListPipelines(filter=filter,
1173
- max_results=max_results,
1174
- order_by=order_by,
1175
- page_token=page_token)
1188
+ request = ListPipelinesRequest(filter=filter,
1189
+ max_results=max_results,
1190
+ order_by=order_by,
1191
+ page_token=page_token)
1176
1192
 
1177
1193
  query = {}
1178
1194
  if filter: query['filter'] = request.filter
@@ -1202,10 +1218,10 @@ class PipelinesAPI:
1202
1218
  List updates for an active pipeline."""
1203
1219
  request = kwargs.get('request', None)
1204
1220
  if not request: # request is not given through keyed args
1205
- request = ListUpdates(max_results=max_results,
1206
- page_token=page_token,
1207
- pipeline_id=pipeline_id,
1208
- until_update_id=until_update_id)
1221
+ request = ListUpdatesRequest(max_results=max_results,
1222
+ page_token=page_token,
1223
+ pipeline_id=pipeline_id,
1224
+ until_update_id=until_update_id)
1209
1225
 
1210
1226
  query = {}
1211
1227
  if max_results: query['max_results'] = request.max_results
@@ -1221,7 +1237,7 @@ class PipelinesAPI:
1221
1237
  Resets a pipeline."""
1222
1238
  request = kwargs.get('request', None)
1223
1239
  if not request: # request is not given through keyed args
1224
- request = Reset(pipeline_id=pipeline_id)
1240
+ request = ResetRequest(pipeline_id=pipeline_id)
1225
1241
 
1226
1242
  self._api.do('POST', f'/api/2.0/pipelines/{request.pipeline_id}/reset')
1227
1243
  return Wait(self.wait_get_pipeline_running, pipeline_id=request.pipeline_id)
@@ -1258,7 +1274,7 @@ class PipelinesAPI:
1258
1274
  Stops a pipeline."""
1259
1275
  request = kwargs.get('request', None)
1260
1276
  if not request: # request is not given through keyed args
1261
- request = Stop(pipeline_id=pipeline_id)
1277
+ request = StopRequest(pipeline_id=pipeline_id)
1262
1278
 
1263
1279
  self._api.do('POST', f'/api/2.0/pipelines/{request.pipeline_id}/stop')
1264
1280
  return Wait(self.wait_get_pipeline_idle, pipeline_id=request.pipeline_id)
@@ -203,12 +203,14 @@ class CreateStorageConfigurationRequest:
203
203
  @dataclass
204
204
  class CreateVpcEndpointRequest:
205
205
  vpc_endpoint_name: str
206
- aws_vpc_endpoint_id: str
207
- region: str
206
+ aws_vpc_endpoint_id: str = None
207
+ gcp_vpc_endpoint_info: 'GcpVpcEndpointInfo' = None
208
+ region: str = None
208
209
 
209
210
  def as_dict(self) -> dict:
210
211
  body = {}
211
212
  if self.aws_vpc_endpoint_id: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id
213
+ if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info.as_dict()
212
214
  if self.region: body['region'] = self.region
213
215
  if self.vpc_endpoint_name: body['vpc_endpoint_name'] = self.vpc_endpoint_name
214
216
  return body
@@ -216,6 +218,7 @@ class CreateVpcEndpointRequest:
216
218
  @classmethod
217
219
  def from_dict(cls, d: Dict[str, any]) -> 'CreateVpcEndpointRequest':
218
220
  return cls(aws_vpc_endpoint_id=d.get('aws_vpc_endpoint_id', None),
221
+ gcp_vpc_endpoint_info=_from_dict(d, 'gcp_vpc_endpoint_info', GcpVpcEndpointInfo),
219
222
  region=d.get('region', None),
220
223
  vpc_endpoint_name=d.get('vpc_endpoint_name', None))
221
224
 
@@ -396,14 +399,7 @@ class EndpointUseCase(Enum):
396
399
  """This enumeration represents the type of Databricks VPC [endpoint service] that was used when
397
400
  creating this VPC endpoint.
398
401
 
399
- If the VPC endpoint connects to the Databricks control plane for either the front-end connection
400
- or the back-end REST API connection, the value is `WORKSPACE_ACCESS`.
401
-
402
- If the VPC endpoint connects to the Databricks workspace for the back-end [secure cluster
403
- connectivity] relay, the value is `DATAPLANE_RELAY_ACCESS`.
404
-
405
- [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html
406
- [secure cluster connectivity]: https://docs.databricks.com/security/secure-cluster-connectivity.html"""
402
+ [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html"""
407
403
 
408
404
  DATAPLANE_RELAY_ACCESS = 'DATAPLANE_RELAY_ACCESS'
409
405
  WORKSPACE_ACCESS = 'WORKSPACE_ACCESS'
@@ -492,6 +488,34 @@ class GcpNetworkInfo:
492
488
  vpc_id=d.get('vpc_id', None))
493
489
 
494
490
 
491
+ @dataclass
492
+ class GcpVpcEndpointInfo:
493
+ """The Google Cloud specific information for this Private Service Connect endpoint."""
494
+
495
+ project_id: str
496
+ psc_endpoint_name: str
497
+ endpoint_region: str
498
+ psc_connection_id: str = None
499
+ service_attachment_id: str = None
500
+
501
+ def as_dict(self) -> dict:
502
+ body = {}
503
+ if self.endpoint_region: body['endpoint_region'] = self.endpoint_region
504
+ if self.project_id: body['project_id'] = self.project_id
505
+ if self.psc_connection_id: body['psc_connection_id'] = self.psc_connection_id
506
+ if self.psc_endpoint_name: body['psc_endpoint_name'] = self.psc_endpoint_name
507
+ if self.service_attachment_id: body['service_attachment_id'] = self.service_attachment_id
508
+ return body
509
+
510
+ @classmethod
511
+ def from_dict(cls, d: Dict[str, any]) -> 'GcpVpcEndpointInfo':
512
+ return cls(endpoint_region=d.get('endpoint_region', None),
513
+ project_id=d.get('project_id', None),
514
+ psc_connection_id=d.get('psc_connection_id', None),
515
+ psc_endpoint_name=d.get('psc_endpoint_name', None),
516
+ service_attachment_id=d.get('service_attachment_id', None))
517
+
518
+
495
519
  @dataclass
496
520
  class GetCredentialRequest:
497
521
  """Get credential configuration"""
@@ -875,6 +899,7 @@ class VpcEndpoint:
875
899
  aws_account_id: str = None
876
900
  aws_endpoint_service_id: str = None
877
901
  aws_vpc_endpoint_id: str = None
902
+ gcp_vpc_endpoint_info: 'GcpVpcEndpointInfo' = None
878
903
  region: str = None
879
904
  state: str = None
880
905
  use_case: 'EndpointUseCase' = None
@@ -887,6 +912,7 @@ class VpcEndpoint:
887
912
  if self.aws_account_id: body['aws_account_id'] = self.aws_account_id
888
913
  if self.aws_endpoint_service_id: body['aws_endpoint_service_id'] = self.aws_endpoint_service_id
889
914
  if self.aws_vpc_endpoint_id: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id
915
+ if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info.as_dict()
890
916
  if self.region: body['region'] = self.region
891
917
  if self.state: body['state'] = self.state
892
918
  if self.use_case: body['use_case'] = self.use_case.value
@@ -900,6 +926,7 @@ class VpcEndpoint:
900
926
  aws_account_id=d.get('aws_account_id', None),
901
927
  aws_endpoint_service_id=d.get('aws_endpoint_service_id', None),
902
928
  aws_vpc_endpoint_id=d.get('aws_vpc_endpoint_id', None),
929
+ gcp_vpc_endpoint_info=_from_dict(d, 'gcp_vpc_endpoint_info', GcpVpcEndpointInfo),
903
930
  region=d.get('region', None),
904
931
  state=d.get('state', None),
905
932
  use_case=_enum(d, 'use_case', EndpointUseCase),
@@ -1251,13 +1278,7 @@ class NetworksAPI:
1251
1278
 
1252
1279
 
1253
1280
  class PrivateAccessAPI:
1254
- """These APIs manage private access settings for this account. A private access settings object specifies how
1255
- your workspace is accessed using AWS PrivateLink. Each workspace that has any PrivateLink connections must
1256
- include the ID for a private access settings object is in its workspace configuration object. Your account
1257
- must be enabled for PrivateLink to use these APIs. Before configuring PrivateLink, it is important to read
1258
- the [Databricks article about PrivateLink].
1259
-
1260
- [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html"""
1281
+ """These APIs manage private access settings for this account."""
1261
1282
 
1262
1283
  def __init__(self, api_client):
1263
1284
  self._api = api_client
@@ -1463,19 +1484,18 @@ class StorageAPI:
1463
1484
 
1464
1485
 
1465
1486
  class VpcEndpointsAPI:
1466
- """These APIs manage VPC endpoint configurations for this account. This object registers an AWS VPC endpoint
1467
- in your Databricks account so your workspace can use it with AWS PrivateLink. Your VPC endpoint connects
1468
- to one of two VPC endpoint services -- one for workspace (both for front-end connection and for back-end
1469
- connection to REST APIs) and one for the back-end secure cluster connectivity relay from the data plane.
1470
- Your account must be enabled for PrivateLink to use these APIs. Before configuring PrivateLink, it is
1471
- important to read the [Databricks article about PrivateLink].
1472
-
1473
- [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html"""
1487
+ """These APIs manage VPC endpoint configurations for this account."""
1474
1488
 
1475
1489
  def __init__(self, api_client):
1476
1490
  self._api = api_client
1477
1491
 
1478
- def create(self, vpc_endpoint_name: str, aws_vpc_endpoint_id: str, region: str, **kwargs) -> VpcEndpoint:
1492
+ def create(self,
1493
+ vpc_endpoint_name: str,
1494
+ *,
1495
+ aws_vpc_endpoint_id: str = None,
1496
+ gcp_vpc_endpoint_info: GcpVpcEndpointInfo = None,
1497
+ region: str = None,
1498
+ **kwargs) -> VpcEndpoint:
1479
1499
  """Create VPC endpoint configuration.
1480
1500
 
1481
1501
  Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to
@@ -1493,6 +1513,7 @@ class VpcEndpointsAPI:
1493
1513
  request = kwargs.get('request', None)
1494
1514
  if not request: # request is not given through keyed args
1495
1515
  request = CreateVpcEndpointRequest(aws_vpc_endpoint_id=aws_vpc_endpoint_id,
1516
+ gcp_vpc_endpoint_info=gcp_vpc_endpoint_info,
1496
1517
  region=region,
1497
1518
  vpc_endpoint_name=vpc_endpoint_name)
1498
1519
  body = request.as_dict()
@@ -1506,9 +1527,6 @@ class VpcEndpointsAPI:
1506
1527
  Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate
1507
1528
  privately with Databricks over [AWS PrivateLink].
1508
1529
 
1509
- Upon deleting a VPC endpoint configuration, the VPC endpoint in AWS changes its state from `accepted`
1510
- to `rejected`, which means that it is no longer usable from your VPC.
1511
-
1512
1530
  Before configuring PrivateLink, read the [Databricks article about PrivateLink].
1513
1531
 
1514
1532
  [AWS PrivateLink]: https://aws.amazon.com/privatelink