databricks-sdk 0.56.0__py3-none-any.whl → 0.58.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (31) hide show
  1. databricks/sdk/__init__.py +38 -11
  2. databricks/sdk/service/aibuilder.py +122 -17
  3. databricks/sdk/service/apps.py +15 -45
  4. databricks/sdk/service/billing.py +70 -74
  5. databricks/sdk/service/catalog.py +1898 -557
  6. databricks/sdk/service/cleanrooms.py +14 -55
  7. databricks/sdk/service/compute.py +305 -508
  8. databricks/sdk/service/dashboards.py +148 -223
  9. databricks/sdk/service/database.py +657 -127
  10. databricks/sdk/service/files.py +18 -54
  11. databricks/sdk/service/iam.py +55 -165
  12. databricks/sdk/service/jobs.py +238 -214
  13. databricks/sdk/service/marketplace.py +47 -146
  14. databricks/sdk/service/ml.py +1137 -447
  15. databricks/sdk/service/oauth2.py +17 -46
  16. databricks/sdk/service/pipelines.py +93 -69
  17. databricks/sdk/service/provisioning.py +34 -212
  18. databricks/sdk/service/qualitymonitorv2.py +5 -33
  19. databricks/sdk/service/serving.py +69 -55
  20. databricks/sdk/service/settings.py +106 -434
  21. databricks/sdk/service/sharing.py +33 -95
  22. databricks/sdk/service/sql.py +164 -254
  23. databricks/sdk/service/vectorsearch.py +13 -62
  24. databricks/sdk/service/workspace.py +36 -110
  25. databricks/sdk/version.py +1 -1
  26. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/METADATA +1 -1
  27. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/RECORD +31 -31
  28. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/WHEEL +0 -0
  29. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/licenses/LICENSE +0 -0
  30. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/licenses/NOTICE +0 -0
  31. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/top_level.txt +0 -0
@@ -133,7 +133,6 @@ class CloudResourceContainer:
133
133
  """The general workspace configurations that are specific to cloud providers."""
134
134
 
135
135
  gcp: Optional[CustomerFacingGcpCloudResourceContainer] = None
136
- """The general workspace configurations that are specific to Google Cloud."""
137
136
 
138
137
  def as_dict(self) -> dict:
139
138
  """Serializes the CloudResourceContainer into a dictionary suitable for use as a JSON request body."""
@@ -356,8 +355,6 @@ class CreateNetworkRequest:
356
355
  """The human-readable name of the network configuration."""
357
356
 
358
357
  gcp_network_info: Optional[GcpNetworkInfo] = None
359
- """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and
360
- secondary IP ranges)."""
361
358
 
362
359
  security_group_ids: Optional[List[str]] = None
363
360
  """IDs of one to five security groups associated with this network. Security group IDs **cannot**
@@ -368,10 +365,6 @@ class CreateNetworkRequest:
368
365
  multiple network configurations."""
369
366
 
370
367
  vpc_endpoints: Optional[NetworkVpcEndpoints] = None
371
- """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over
372
- [AWS PrivateLink].
373
-
374
- [AWS PrivateLink]: https://aws.amazon.com/privatelink/"""
375
368
 
376
369
  vpc_id: Optional[str] = None
377
370
  """The ID of the VPC associated with this network. VPC IDs can be used in multiple network
@@ -430,7 +423,6 @@ class CreateStorageConfigurationRequest:
430
423
  """The human-readable name of the storage configuration."""
431
424
 
432
425
  root_bucket_info: RootBucketInfo
433
- """Root S3 bucket information."""
434
426
 
435
427
  def as_dict(self) -> dict:
436
428
  """Serializes the CreateStorageConfigurationRequest into a dictionary suitable for use as a JSON request body."""
@@ -468,7 +460,6 @@ class CreateVpcEndpointRequest:
468
460
  """The ID of the VPC endpoint object in AWS."""
469
461
 
470
462
  gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None
471
- """The Google Cloud specific information for this Private Service Connect endpoint."""
472
463
 
473
464
  region: Optional[str] = None
474
465
  """The AWS region in which this VPC endpoint object exists."""
@@ -523,7 +514,6 @@ class CreateWorkspaceRequest:
523
514
  to `gcp`."""
524
515
 
525
516
  cloud_resource_container: Optional[CloudResourceContainer] = None
526
- """The general workspace configurations that are specific to cloud providers."""
527
517
 
528
518
  credentials_id: Optional[str] = None
529
519
  """ID of the workspace's credential configuration object."""
@@ -559,27 +549,8 @@ class CreateWorkspaceRequest:
559
549
  with the pattern `dbc-xxxxxxxx-xxxx`."""
560
550
 
561
551
  gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None
562
- """The network settings for the workspace. The configurations are only for Databricks-managed VPCs.
563
- It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP
564
- range configurations must be mutually exclusive. An attempt to create a workspace fails if
565
- Databricks detects an IP range overlap.
566
-
567
- Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and
568
- all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`,
569
- `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`.
570
-
571
- The sizes of these IP ranges affect the maximum number of nodes for the workspace.
572
-
573
- **Important**: Confirm the IP ranges used by your Databricks workspace before creating the
574
- workspace. You cannot change them after your workspace is deployed. If the IP address ranges for
575
- your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To
576
- determine the address range sizes that you need, Databricks provides a calculator as a Microsoft
577
- Excel spreadsheet. See [calculate subnet sizes for a new workspace].
578
-
579
- [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html"""
580
552
 
581
553
  gke_config: Optional[GkeConfig] = None
582
- """The configurations for the GKE cluster of a Databricks workspace."""
583
554
 
584
555
  is_no_public_ip_enabled: Optional[bool] = None
585
556
  """Whether no public IP is enabled for the workspace."""
@@ -597,9 +568,6 @@ class CreateWorkspaceRequest:
597
568
  network_id: Optional[str] = None
598
569
 
599
570
  pricing_tier: Optional[PricingTier] = None
600
- """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing].
601
-
602
- [AWS Pricing]: https://databricks.com/product/aws-pricing"""
603
571
 
604
572
  private_access_settings_id: Optional[str] = None
605
573
  """ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be
@@ -1259,8 +1227,6 @@ class Network:
1259
1227
  """Array of error messages about the network configuration."""
1260
1228
 
1261
1229
  gcp_network_info: Optional[GcpNetworkInfo] = None
1262
- """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and
1263
- secondary IP ranges)."""
1264
1230
 
1265
1231
  network_id: Optional[str] = None
1266
1232
  """The Databricks network configuration ID."""
@@ -1273,18 +1239,12 @@ class Network:
1273
1239
  subnet_ids: Optional[List[str]] = None
1274
1240
 
1275
1241
  vpc_endpoints: Optional[NetworkVpcEndpoints] = None
1276
- """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over
1277
- [AWS PrivateLink].
1278
-
1279
- [AWS PrivateLink]: https://aws.amazon.com/privatelink/"""
1280
1242
 
1281
1243
  vpc_id: Optional[str] = None
1282
1244
  """The ID of the VPC associated with this network configuration. VPC IDs can be used in multiple
1283
1245
  networks."""
1284
1246
 
1285
1247
  vpc_status: Optional[VpcStatus] = None
1286
- """The status of this network configuration object in terms of its use in a workspace: *
1287
- `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned."""
1288
1248
 
1289
1249
  warning_messages: Optional[List[NetworkWarning]] = None
1290
1250
  """Array of warning messages about the network configuration."""
@@ -1380,8 +1340,6 @@ class NetworkHealth:
1380
1340
  """Details of the error."""
1381
1341
 
1382
1342
  error_type: Optional[ErrorType] = None
1383
- """The AWS resource associated with this error: credentials, VPC, subnet, security group, or
1384
- network ACL."""
1385
1343
 
1386
1344
  def as_dict(self) -> dict:
1387
1345
  """Serializes the NetworkHealth into a dictionary suitable for use as a JSON request body."""
@@ -1451,7 +1409,6 @@ class NetworkWarning:
1451
1409
  """Details of the warning."""
1452
1410
 
1453
1411
  warning_type: Optional[WarningType] = None
1454
- """The AWS resource associated with this warning: a subnet or a security group."""
1455
1412
 
1456
1413
  def as_dict(self) -> dict:
1457
1414
  """Serializes the NetworkWarning into a dictionary suitable for use as a JSON request body."""
@@ -1510,11 +1467,6 @@ class PrivateAccessSettings:
1510
1467
  """An array of Databricks VPC endpoint IDs."""
1511
1468
 
1512
1469
  private_access_level: Optional[PrivateAccessLevel] = None
1513
- """The private access level controls which VPC endpoints can connect to the UI or API of any
1514
- workspace that attaches this private access settings object. * `ACCOUNT` level access (the
1515
- default) allows only VPC endpoints that are registered in your Databricks account connect to
1516
- your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your
1517
- workspace. For details, see `allowed_vpc_endpoint_ids`."""
1518
1470
 
1519
1471
  private_access_settings_id: Optional[str] = None
1520
1472
  """Databricks private access settings ID."""
@@ -1637,7 +1589,6 @@ class StorageConfiguration:
1637
1589
  """Time in epoch milliseconds when the storage configuration was created."""
1638
1590
 
1639
1591
  root_bucket_info: Optional[RootBucketInfo] = None
1640
- """Root S3 bucket information."""
1641
1592
 
1642
1593
  storage_configuration_id: Optional[str] = None
1643
1594
  """Databricks storage configuration ID."""
@@ -1869,11 +1820,6 @@ class UpsertPrivateAccessSettingsRequest:
1869
1820
  [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html"""
1870
1821
 
1871
1822
  private_access_level: Optional[PrivateAccessLevel] = None
1872
- """The private access level controls which VPC endpoints can connect to the UI or API of any
1873
- workspace that attaches this private access settings object. * `ACCOUNT` level access (the
1874
- default) allows only VPC endpoints that are registered in your Databricks account connect to
1875
- your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your
1876
- workspace. For details, see `allowed_vpc_endpoint_ids`."""
1877
1823
 
1878
1824
  private_access_settings_id: Optional[str] = None
1879
1825
  """Databricks Account API private access settings ID."""
@@ -1951,7 +1897,6 @@ class VpcEndpoint:
1951
1897
  """The ID of the VPC endpoint object in AWS."""
1952
1898
 
1953
1899
  gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None
1954
- """The Google Cloud specific information for this Private Service Connect endpoint."""
1955
1900
 
1956
1901
  region: Optional[str] = None
1957
1902
  """The AWS region in which this VPC endpoint object exists."""
@@ -1963,10 +1908,6 @@ class VpcEndpoint:
1963
1908
  [AWS DescribeVpcEndpoint documentation]: https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-vpc-endpoints.html"""
1964
1909
 
1965
1910
  use_case: Optional[EndpointUseCase] = None
1966
- """This enumeration represents the type of Databricks VPC [endpoint service] that was used when
1967
- creating this VPC endpoint.
1968
-
1969
- [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html"""
1970
1911
 
1971
1912
  vpc_endpoint_id: Optional[str] = None
1972
1913
  """Databricks VPC endpoint ID. This is the Databricks-specific name of the VPC endpoint. Do not
@@ -2073,7 +2014,6 @@ class Workspace:
2073
2014
  """The cloud name. This field always has the value `gcp`."""
2074
2015
 
2075
2016
  cloud_resource_container: Optional[CloudResourceContainer] = None
2076
- """The general workspace configurations that are specific to cloud providers."""
2077
2017
 
2078
2018
  creation_time: Optional[int] = None
2079
2019
  """Time in epoch milliseconds when the workspace was created."""
@@ -2097,27 +2037,8 @@ class Workspace:
2097
2037
  workspace is not for a external customer, then external_customer_info is empty."""
2098
2038
 
2099
2039
  gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None
2100
- """The network settings for the workspace. The configurations are only for Databricks-managed VPCs.
2101
- It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP
2102
- range configurations must be mutually exclusive. An attempt to create a workspace fails if
2103
- Databricks detects an IP range overlap.
2104
-
2105
- Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and
2106
- all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`,
2107
- `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`.
2108
-
2109
- The sizes of these IP ranges affect the maximum number of nodes for the workspace.
2110
-
2111
- **Important**: Confirm the IP ranges used by your Databricks workspace before creating the
2112
- workspace. You cannot change them after your workspace is deployed. If the IP address ranges for
2113
- your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To
2114
- determine the address range sizes that you need, Databricks provides a calculator as a Microsoft
2115
- Excel spreadsheet. See [calculate subnet sizes for a new workspace].
2116
-
2117
- [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html"""
2118
2040
 
2119
2041
  gke_config: Optional[GkeConfig] = None
2120
- """The configurations for the GKE cluster of a Databricks workspace."""
2121
2042
 
2122
2043
  is_no_public_ip_enabled: Optional[bool] = None
2123
2044
  """Whether no public IP is enabled for the workspace."""
@@ -2134,9 +2055,6 @@ class Workspace:
2134
2055
  the network is a customer-managed network."""
2135
2056
 
2136
2057
  pricing_tier: Optional[PricingTier] = None
2137
- """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing].
2138
-
2139
- [AWS Pricing]: https://databricks.com/product/aws-pricing"""
2140
2058
 
2141
2059
  private_access_settings_id: Optional[str] = None
2142
2060
  """ID of the workspace's private access settings object. Only used for PrivateLink. You must
@@ -2161,8 +2079,6 @@ class Workspace:
2161
2079
  """The human-readable name of the workspace."""
2162
2080
 
2163
2081
  workspace_status: Optional[WorkspaceStatus] = None
2164
- """The status of the workspace. For workspace creation, usually it is set to `PROVISIONING`
2165
- initially. Continue to check the status until the status is `RUNNING`."""
2166
2082
 
2167
2083
  workspace_status_message: Optional[str] = None
2168
2084
  """Message describing the current workspace status."""
@@ -2326,9 +2242,7 @@ class CredentialsAPI:
2326
2242
  self._api = api_client
2327
2243
 
2328
2244
  def create(self, credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential:
2329
- """Create credential configuration.
2330
-
2331
- Creates a Databricks credential configuration that represents cloud cross-account credentials for a
2245
+ """Creates a Databricks credential configuration that represents cloud cross-account credentials for a
2332
2246
  specified account. Databricks uses this to set up network infrastructure properly to host Databricks
2333
2247
  clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account
2334
2248
  ID) in the returned credential object, and configure the required access policy.
@@ -2361,9 +2275,7 @@ class CredentialsAPI:
2361
2275
  return Credential.from_dict(res)
2362
2276
 
2363
2277
  def delete(self, credentials_id: str):
2364
- """Delete credential configuration.
2365
-
2366
- Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot
2278
+ """Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot
2367
2279
  delete a credential that is associated with any workspace.
2368
2280
 
2369
2281
  :param credentials_id: str
@@ -2381,9 +2293,7 @@ class CredentialsAPI:
2381
2293
  )
2382
2294
 
2383
2295
  def get(self, credentials_id: str) -> Credential:
2384
- """Get credential configuration.
2385
-
2386
- Gets a Databricks credential configuration object for an account, both specified by ID.
2296
+ """Gets a Databricks credential configuration object for an account, both specified by ID.
2387
2297
 
2388
2298
  :param credentials_id: str
2389
2299
  Databricks Account API credential configuration ID
@@ -2401,9 +2311,8 @@ class CredentialsAPI:
2401
2311
  return Credential.from_dict(res)
2402
2312
 
2403
2313
  def list(self) -> Iterator[Credential]:
2404
- """Get all credential configurations.
2314
+ """Gets all Databricks credential configurations associated with an account specified by ID.
2405
2315
 
2406
- Gets all Databricks credential configurations associated with an account specified by ID.
2407
2316
 
2408
2317
  :returns: Iterator over :class:`Credential`
2409
2318
  """
@@ -2441,9 +2350,7 @@ class EncryptionKeysAPI:
2441
2350
  aws_key_info: Optional[CreateAwsKeyInfo] = None,
2442
2351
  gcp_key_info: Optional[CreateGcpKeyInfo] = None,
2443
2352
  ) -> CustomerManagedKey:
2444
- """Create encryption key configuration.
2445
-
2446
- Creates a customer-managed key configuration object for an account, specified by ID. This operation
2353
+ """Creates a customer-managed key configuration object for an account, specified by ID. This operation
2447
2354
  uploads a reference to a customer-managed key to Databricks. If the key is assigned as a workspace's
2448
2355
  customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks
2449
2356
  and secrets in the control plane, in addition to Databricks SQL queries and query history. If it is
@@ -2482,9 +2389,7 @@ class EncryptionKeysAPI:
2482
2389
  return CustomerManagedKey.from_dict(res)
2483
2390
 
2484
2391
  def delete(self, customer_managed_key_id: str):
2485
- """Delete encryption key configuration.
2486
-
2487
- Deletes a customer-managed key configuration object for an account. You cannot delete a configuration
2392
+ """Deletes a customer-managed key configuration object for an account. You cannot delete a configuration
2488
2393
  that is associated with a running workspace.
2489
2394
 
2490
2395
  :param customer_managed_key_id: str
@@ -2504,9 +2409,7 @@ class EncryptionKeysAPI:
2504
2409
  )
2505
2410
 
2506
2411
  def get(self, customer_managed_key_id: str) -> CustomerManagedKey:
2507
- """Get encryption key configuration.
2508
-
2509
- Gets a customer-managed key configuration object for an account, specified by ID. This operation
2412
+ """Gets a customer-managed key configuration object for an account, specified by ID. This operation
2510
2413
  uploads a reference to a customer-managed key to Databricks. If assigned as a workspace's
2511
2414
  customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks
2512
2415
  and secrets in the control plane, in addition to Databricks SQL queries and query history. If it is
@@ -2537,9 +2440,7 @@ class EncryptionKeysAPI:
2537
2440
  return CustomerManagedKey.from_dict(res)
2538
2441
 
2539
2442
  def list(self) -> Iterator[CustomerManagedKey]:
2540
- """Get all encryption key configurations.
2541
-
2542
- Gets all customer-managed key configuration objects for an account. If the key is specified as a
2443
+ """Gets all customer-managed key configuration objects for an account. If the key is specified as a
2543
2444
  workspace's managed services customer-managed key, Databricks uses the key to encrypt the workspace's
2544
2445
  notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history.
2545
2446
  If the key is specified as a workspace's storage customer-managed key, the key is used to encrypt the
@@ -2550,6 +2451,7 @@ class EncryptionKeysAPI:
2550
2451
 
2551
2452
  This operation is available only if your account is on the E2 version of the platform.
2552
2453
 
2454
+
2553
2455
  :returns: Iterator over :class:`CustomerManagedKey`
2554
2456
  """
2555
2457
 
@@ -2578,16 +2480,12 @@ class NetworksAPI:
2578
2480
  vpc_endpoints: Optional[NetworkVpcEndpoints] = None,
2579
2481
  vpc_id: Optional[str] = None,
2580
2482
  ) -> Network:
2581
- """Create network configuration.
2582
-
2583
- Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be
2483
+ """Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be
2584
2484
  used for new Databricks clusters. This requires a pre-existing VPC and subnets.
2585
2485
 
2586
2486
  :param network_name: str
2587
2487
  The human-readable name of the network configuration.
2588
2488
  :param gcp_network_info: :class:`GcpNetworkInfo` (optional)
2589
- The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and
2590
- secondary IP ranges).
2591
2489
  :param security_group_ids: List[str] (optional)
2592
2490
  IDs of one to five security groups associated with this network. Security group IDs **cannot** be
2593
2491
  used in multiple network configurations.
@@ -2595,10 +2493,6 @@ class NetworksAPI:
2595
2493
  IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in multiple
2596
2494
  network configurations.
2597
2495
  :param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional)
2598
- If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS
2599
- PrivateLink].
2600
-
2601
- [AWS PrivateLink]: https://aws.amazon.com/privatelink/
2602
2496
  :param vpc_id: str (optional)
2603
2497
  The ID of the VPC associated with this network. VPC IDs can be used in multiple network
2604
2498
  configurations.
@@ -2627,9 +2521,7 @@ class NetworksAPI:
2627
2521
  return Network.from_dict(res)
2628
2522
 
2629
2523
  def delete(self, network_id: str):
2630
- """Delete a network configuration.
2631
-
2632
- Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot
2524
+ """Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot
2633
2525
  delete a network that is associated with a workspace.
2634
2526
 
2635
2527
  This operation is available only if your account is on the E2 version of the platform.
@@ -2647,9 +2539,7 @@ class NetworksAPI:
2647
2539
  self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/networks/{network_id}", headers=headers)
2648
2540
 
2649
2541
  def get(self, network_id: str) -> Network:
2650
- """Get a network configuration.
2651
-
2652
- Gets a Databricks network configuration, which represents a cloud VPC and its resources.
2542
+ """Gets a Databricks network configuration, which represents a cloud VPC and its resources.
2653
2543
 
2654
2544
  :param network_id: str
2655
2545
  Databricks Account API network configuration ID.
@@ -2665,12 +2555,11 @@ class NetworksAPI:
2665
2555
  return Network.from_dict(res)
2666
2556
 
2667
2557
  def list(self) -> Iterator[Network]:
2668
- """Get all network configurations.
2669
-
2670
- Gets a list of all Databricks network configurations for an account, specified by ID.
2558
+ """Gets a list of all Databricks network configurations for an account, specified by ID.
2671
2559
 
2672
2560
  This operation is available only if your account is on the E2 version of the platform.
2673
2561
 
2562
+
2674
2563
  :returns: Iterator over :class:`Network`
2675
2564
  """
2676
2565
 
@@ -2697,9 +2586,7 @@ class PrivateAccessAPI:
2697
2586
  private_access_level: Optional[PrivateAccessLevel] = None,
2698
2587
  public_access_enabled: Optional[bool] = None,
2699
2588
  ) -> PrivateAccessSettings:
2700
- """Create private access settings.
2701
-
2702
- Creates a private access settings object, which specifies how your workspace is accessed over [AWS
2589
+ """Creates a private access settings object, which specifies how your workspace is accessed over [AWS
2703
2590
  PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object
2704
2591
  referenced by ID in the workspace's `private_access_settings_id` property.
2705
2592
 
@@ -2730,11 +2617,6 @@ class PrivateAccessAPI:
2730
2617
 
2731
2618
  [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
2732
2619
  :param private_access_level: :class:`PrivateAccessLevel` (optional)
2733
- The private access level controls which VPC endpoints can connect to the UI or API of any workspace
2734
- that attaches this private access settings object. * `ACCOUNT` level access (the default) allows
2735
- only VPC endpoints that are registered in your Databricks account connect to your workspace. *
2736
- `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details,
2737
- see `allowed_vpc_endpoint_ids`.
2738
2620
  :param public_access_enabled: bool (optional)
2739
2621
  Determines if the workspace can be accessed over public internet. For fully private workspaces, you
2740
2622
  can optionally specify `false`, but only if you implement both the front-end and the back-end
@@ -2764,9 +2646,7 @@ class PrivateAccessAPI:
2764
2646
  return PrivateAccessSettings.from_dict(res)
2765
2647
 
2766
2648
  def delete(self, private_access_settings_id: str):
2767
- """Delete a private access settings object.
2768
-
2769
- Deletes a private access settings object, which determines how your workspace is accessed over [AWS
2649
+ """Deletes a private access settings object, which determines how your workspace is accessed over [AWS
2770
2650
  PrivateLink].
2771
2651
 
2772
2652
  Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
@@ -2791,9 +2671,7 @@ class PrivateAccessAPI:
2791
2671
  )
2792
2672
 
2793
2673
  def get(self, private_access_settings_id: str) -> PrivateAccessSettings:
2794
- """Get a private access settings object.
2795
-
2796
- Gets a private access settings object, which specifies how your workspace is accessed over [AWS
2674
+ """Gets a private access settings object, which specifies how your workspace is accessed over [AWS
2797
2675
  PrivateLink].
2798
2676
 
2799
2677
  Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
@@ -2819,9 +2697,8 @@ class PrivateAccessAPI:
2819
2697
  return PrivateAccessSettings.from_dict(res)
2820
2698
 
2821
2699
  def list(self) -> Iterator[PrivateAccessSettings]:
2822
- """Get all private access settings objects.
2700
+ """Gets a list of all private access settings objects for an account, specified by ID.
2823
2701
 
2824
- Gets a list of all private access settings objects for an account, specified by ID.
2825
2702
 
2826
2703
  :returns: Iterator over :class:`PrivateAccessSettings`
2827
2704
  """
@@ -2843,9 +2720,7 @@ class PrivateAccessAPI:
2843
2720
  private_access_level: Optional[PrivateAccessLevel] = None,
2844
2721
  public_access_enabled: Optional[bool] = None,
2845
2722
  ):
2846
- """Replace private access settings.
2847
-
2848
- Updates an existing private access settings object, which specifies how your workspace is accessed
2723
+ """Updates an existing private access settings object, which specifies how your workspace is accessed
2849
2724
  over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object
2850
2725
  referenced by ID in the workspace's `private_access_settings_id` property.
2851
2726
 
@@ -2883,11 +2758,6 @@ class PrivateAccessAPI:
2883
2758
 
2884
2759
  [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
2885
2760
  :param private_access_level: :class:`PrivateAccessLevel` (optional)
2886
- The private access level controls which VPC endpoints can connect to the UI or API of any workspace
2887
- that attaches this private access settings object. * `ACCOUNT` level access (the default) allows
2888
- only VPC endpoints that are registered in your Databricks account connect to your workspace. *
2889
- `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details,
2890
- see `allowed_vpc_endpoint_ids`.
2891
2761
  :param public_access_enabled: bool (optional)
2892
2762
  Determines if the workspace can be accessed over public internet. For fully private workspaces, you
2893
2763
  can optionally specify `false`, but only if you implement both the front-end and the back-end
@@ -2929,9 +2799,7 @@ class StorageAPI:
2929
2799
  self._api = api_client
2930
2800
 
2931
2801
  def create(self, storage_configuration_name: str, root_bucket_info: RootBucketInfo) -> StorageConfiguration:
2932
- """Create new storage configuration.
2933
-
2934
- Creates new storage configuration for an account, specified by ID. Uploads a storage configuration
2802
+ """Creates new storage configuration for an account, specified by ID. Uploads a storage configuration
2935
2803
  object that represents the root AWS S3 bucket in your account. Databricks stores related workspace
2936
2804
  assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, you need to configure the
2937
2805
  required bucket policy.
@@ -2944,7 +2812,6 @@ class StorageAPI:
2944
2812
  :param storage_configuration_name: str
2945
2813
  The human-readable name of the storage configuration.
2946
2814
  :param root_bucket_info: :class:`RootBucketInfo`
2947
- Root S3 bucket information.
2948
2815
 
2949
2816
  :returns: :class:`StorageConfiguration`
2950
2817
  """
@@ -2964,9 +2831,7 @@ class StorageAPI:
2964
2831
  return StorageConfiguration.from_dict(res)
2965
2832
 
2966
2833
  def delete(self, storage_configuration_id: str):
2967
- """Delete storage configuration.
2968
-
2969
- Deletes a Databricks storage configuration. You cannot delete a storage configuration that is
2834
+ """Deletes a Databricks storage configuration. You cannot delete a storage configuration that is
2970
2835
  associated with any workspace.
2971
2836
 
2972
2837
  :param storage_configuration_id: str
@@ -2986,9 +2851,7 @@ class StorageAPI:
2986
2851
  )
2987
2852
 
2988
2853
  def get(self, storage_configuration_id: str) -> StorageConfiguration:
2989
- """Get storage configuration.
2990
-
2991
- Gets a Databricks storage configuration for an account, both specified by ID.
2854
+ """Gets a Databricks storage configuration for an account, both specified by ID.
2992
2855
 
2993
2856
  :param storage_configuration_id: str
2994
2857
  Databricks Account API storage configuration ID.
@@ -3008,9 +2871,8 @@ class StorageAPI:
3008
2871
  return StorageConfiguration.from_dict(res)
3009
2872
 
3010
2873
  def list(self) -> Iterator[StorageConfiguration]:
3011
- """Get all storage configurations.
2874
+ """Gets a list of all Databricks storage configurations for your account, specified by ID.
3012
2875
 
3013
- Gets a list of all Databricks storage configurations for your account, specified by ID.
3014
2876
 
3015
2877
  :returns: Iterator over :class:`StorageConfiguration`
3016
2878
  """
@@ -3037,9 +2899,7 @@ class VpcEndpointsAPI:
3037
2899
  gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None,
3038
2900
  region: Optional[str] = None,
3039
2901
  ) -> VpcEndpoint:
3040
- """Create VPC endpoint configuration.
3041
-
3042
- Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to
2902
+ """Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to
3043
2903
  communicate privately with Databricks over [AWS PrivateLink].
3044
2904
 
3045
2905
  After you create the VPC endpoint configuration, the Databricks [endpoint service] automatically
@@ -3057,7 +2917,6 @@ class VpcEndpointsAPI:
3057
2917
  :param aws_vpc_endpoint_id: str (optional)
3058
2918
  The ID of the VPC endpoint object in AWS.
3059
2919
  :param gcp_vpc_endpoint_info: :class:`GcpVpcEndpointInfo` (optional)
3060
- The Google Cloud specific information for this Private Service Connect endpoint.
3061
2920
  :param region: str (optional)
3062
2921
  The AWS region in which this VPC endpoint object exists.
3063
2922
 
@@ -3083,9 +2942,7 @@ class VpcEndpointsAPI:
3083
2942
  return VpcEndpoint.from_dict(res)
3084
2943
 
3085
2944
  def delete(self, vpc_endpoint_id: str):
3086
- """Delete VPC endpoint configuration.
3087
-
3088
- Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate
2945
+ """Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate
3089
2946
  privately with Databricks over [AWS PrivateLink].
3090
2947
 
3091
2948
  Before configuring PrivateLink, read the [Databricks article about PrivateLink].
@@ -3109,9 +2966,7 @@ class VpcEndpointsAPI:
3109
2966
  )
3110
2967
 
3111
2968
  def get(self, vpc_endpoint_id: str) -> VpcEndpoint:
3112
- """Get a VPC endpoint configuration.
3113
-
3114
- Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate
2969
+ """Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate
3115
2970
  privately with Databricks over [AWS PrivateLink].
3116
2971
 
3117
2972
  [AWS PrivateLink]: https://aws.amazon.com/privatelink
@@ -3133,14 +2988,13 @@ class VpcEndpointsAPI:
3133
2988
  return VpcEndpoint.from_dict(res)
3134
2989
 
3135
2990
  def list(self) -> Iterator[VpcEndpoint]:
3136
- """Get all VPC endpoint configurations.
3137
-
3138
- Gets a list of all VPC endpoints for an account, specified by ID.
2991
+ """Gets a list of all VPC endpoints for an account, specified by ID.
3139
2992
 
3140
2993
  Before configuring PrivateLink, read the [Databricks article about PrivateLink].
3141
2994
 
3142
2995
  [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
3143
2996
 
2997
+
3144
2998
  :returns: Iterator over :class:`VpcEndpoint`
3145
2999
  """
3146
3000
 
@@ -3216,9 +3070,7 @@ class WorkspacesAPI:
3216
3070
  storage_configuration_id: Optional[str] = None,
3217
3071
  storage_customer_managed_key_id: Optional[str] = None,
3218
3072
  ) -> Wait[Workspace]:
3219
- """Create a new workspace.
3220
-
3221
- Creates a new workspace.
3073
+ """Creates a new workspace.
3222
3074
 
3223
3075
  **Important**: This operation is asynchronous. A response with HTTP status code 200 means the request
3224
3076
  has been accepted and is in progress, but does not mean that the workspace deployed successfully and
@@ -3235,7 +3087,6 @@ class WorkspacesAPI:
3235
3087
  The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field to
3236
3088
  `gcp`.
3237
3089
  :param cloud_resource_container: :class:`CloudResourceContainer` (optional)
3238
- The general workspace configurations that are specific to cloud providers.
3239
3090
  :param credentials_id: str (optional)
3240
3091
  ID of the workspace's credential configuration object.
3241
3092
  :param custom_tags: Dict[str,str] (optional)
@@ -3267,26 +3118,7 @@ class WorkspacesAPI:
3267
3118
  If a new workspace omits this property, the server generates a unique deployment name for you with
3268
3119
  the pattern `dbc-xxxxxxxx-xxxx`.
3269
3120
  :param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional)
3270
- The network settings for the workspace. The configurations are only for Databricks-managed VPCs. It
3271
- is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range
3272
- configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks
3273
- detects an IP range overlap.
3274
-
3275
- Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP
3276
- addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`,
3277
- `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`.
3278
-
3279
- The sizes of these IP ranges affect the maximum number of nodes for the workspace.
3280
-
3281
- **Important**: Confirm the IP ranges used by your Databricks workspace before creating the
3282
- workspace. You cannot change them after your workspace is deployed. If the IP address ranges for
3283
- your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To
3284
- determine the address range sizes that you need, Databricks provides a calculator as a Microsoft
3285
- Excel spreadsheet. See [calculate subnet sizes for a new workspace].
3286
-
3287
- [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
3288
3121
  :param gke_config: :class:`GkeConfig` (optional)
3289
- The configurations for the GKE cluster of a Databricks workspace.
3290
3122
  :param is_no_public_ip_enabled: bool (optional)
3291
3123
  Whether no public IP is enabled for the workspace.
3292
3124
  :param location: str (optional)
@@ -3297,9 +3129,6 @@ class WorkspacesAPI:
3297
3129
  history. The provided key configuration object property `use_cases` must contain `MANAGED_SERVICES`.
3298
3130
  :param network_id: str (optional)
3299
3131
  :param pricing_tier: :class:`PricingTier` (optional)
3300
- The pricing tier of the workspace. For pricing tier information, see [AWS Pricing].
3301
-
3302
- [AWS Pricing]: https://databricks.com/product/aws-pricing
3303
3132
  :param private_access_settings_id: str (optional)
3304
3133
  ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be
3305
3134
  specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection),
@@ -3412,9 +3241,7 @@ class WorkspacesAPI:
3412
3241
  ).result(timeout=timeout)
3413
3242
 
3414
3243
  def delete(self, workspace_id: int):
3415
- """Delete a workspace.
3416
-
3417
- Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate.
3244
+ """Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate.
3418
3245
  However, it might take a few minutes for all workspaces resources to be deleted, depending on the size
3419
3246
  and number of workspace resources.
3420
3247
 
@@ -3434,9 +3261,7 @@ class WorkspacesAPI:
3434
3261
  self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", headers=headers)
3435
3262
 
3436
3263
  def get(self, workspace_id: int) -> Workspace:
3437
- """Get a workspace.
3438
-
3439
- Gets information including status for a Databricks workspace, specified by ID. In the response, the
3264
+ """Gets information including status for a Databricks workspace, specified by ID. In the response, the
3440
3265
  `workspace_status` field indicates the current status. After initial workspace creation (which is
3441
3266
  asynchronous), make repeated `GET` requests with the workspace ID and check its status. The workspace
3442
3267
  becomes available when the status changes to `RUNNING`.
@@ -3465,13 +3290,12 @@ class WorkspacesAPI:
3465
3290
  return Workspace.from_dict(res)
3466
3291
 
3467
3292
  def list(self) -> Iterator[Workspace]:
3468
- """Get all workspaces.
3469
-
3470
- Gets a list of all workspaces associated with an account, specified by ID.
3293
+ """Gets a list of all workspaces associated with an account, specified by ID.
3471
3294
 
3472
3295
  This operation is available only if your account is on the E2 version of the platform or on a select
3473
3296
  custom plan that allows multiple workspaces per account.
3474
3297
 
3298
+
3475
3299
  :returns: Iterator over :class:`Workspace`
3476
3300
  """
3477
3301
 
@@ -3496,9 +3320,7 @@ class WorkspacesAPI:
3496
3320
  storage_configuration_id: Optional[str] = None,
3497
3321
  storage_customer_managed_key_id: Optional[str] = None,
3498
3322
  ) -> Wait[Workspace]:
3499
- """Update workspace configuration.
3500
-
3501
- Updates a workspace configuration for either a running workspace or a failed workspace. The elements
3323
+ """Updates a workspace configuration for either a running workspace or a failed workspace. The elements
3502
3324
  that can be updated varies between these two use cases.
3503
3325
 
3504
3326
  ### Update a failed workspace You can update a Databricks workspace configuration for failed workspace