databricks-sdk 0.57.0__py3-none-any.whl → 0.58.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (30) hide show
  1. databricks/sdk/__init__.py +25 -4
  2. databricks/sdk/service/aibuilder.py +0 -36
  3. databricks/sdk/service/apps.py +1 -3
  4. databricks/sdk/service/billing.py +53 -23
  5. databricks/sdk/service/catalog.py +1692 -150
  6. databricks/sdk/service/cleanrooms.py +3 -22
  7. databricks/sdk/service/compute.py +245 -322
  8. databricks/sdk/service/dashboards.py +129 -162
  9. databricks/sdk/service/database.py +612 -97
  10. databricks/sdk/service/iam.py +3 -3
  11. databricks/sdk/service/jobs.py +6 -129
  12. databricks/sdk/service/marketplace.py +3 -2
  13. databricks/sdk/service/ml.py +713 -262
  14. databricks/sdk/service/oauth2.py +0 -1
  15. databricks/sdk/service/pipelines.py +12 -29
  16. databricks/sdk/service/provisioning.py +7 -125
  17. databricks/sdk/service/qualitymonitorv2.py +0 -18
  18. databricks/sdk/service/serving.py +39 -13
  19. databricks/sdk/service/settings.py +11 -128
  20. databricks/sdk/service/sharing.py +3 -9
  21. databricks/sdk/service/sql.py +94 -74
  22. databricks/sdk/service/vectorsearch.py +0 -19
  23. databricks/sdk/service/workspace.py +2 -6
  24. databricks/sdk/version.py +1 -1
  25. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/METADATA +1 -1
  26. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/RECORD +30 -30
  27. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/WHEEL +0 -0
  28. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/licenses/LICENSE +0 -0
  29. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/licenses/NOTICE +0 -0
  30. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/top_level.txt +0 -0
@@ -366,7 +366,6 @@ class FederationPolicy:
366
366
  the request URL."""
367
367
 
368
368
  oidc_policy: Optional[OidcFederationPolicy] = None
369
- """Specifies the policy to use for validating OIDC claims in your federated tokens."""
370
369
 
371
370
  policy_id: Optional[str] = None
372
371
  """The ID of the federation policy."""
@@ -98,12 +98,6 @@ class CreatePipeline:
98
98
  pipeline execution."""
99
99
 
100
100
  run_as: Optional[RunAs] = None
101
- """Write-only setting, available only in Create/Update calls. Specifies the user or service
102
- principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
103
- the pipeline.
104
-
105
- Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
106
- is thrown."""
107
101
 
108
102
  schema: Optional[str] = None
109
103
  """The default schema (database) where tables are read from or published to."""
@@ -510,12 +504,6 @@ class EditPipeline:
510
504
  pipeline execution."""
511
505
 
512
506
  run_as: Optional[RunAs] = None
513
- """Write-only setting, available only in Create/Update calls. Specifies the user or service
514
- principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
515
- the pipeline.
516
-
517
- Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
518
- is thrown."""
519
507
 
520
508
  schema: Optional[str] = None
521
509
  """The default schema (database) where tables are read from or published to."""
@@ -922,6 +910,11 @@ class GetPipelineResponse:
922
910
  pipeline_id: Optional[str] = None
923
911
  """The ID of the pipeline."""
924
912
 
913
+ run_as: Optional[RunAs] = None
914
+ """The user or service principal that the pipeline runs as, if specified in the request. This field
915
+ indicates the explicit configuration of `run_as` for the pipeline. To find the value in all
916
+ cases, explicit or implicit, use `run_as_user_name`."""
917
+
925
918
  run_as_user_name: Optional[str] = None
926
919
  """Username of the user that the pipeline will run on behalf of."""
927
920
 
@@ -952,6 +945,8 @@ class GetPipelineResponse:
952
945
  body["name"] = self.name
953
946
  if self.pipeline_id is not None:
954
947
  body["pipeline_id"] = self.pipeline_id
948
+ if self.run_as:
949
+ body["run_as"] = self.run_as.as_dict()
955
950
  if self.run_as_user_name is not None:
956
951
  body["run_as_user_name"] = self.run_as_user_name
957
952
  if self.spec:
@@ -981,6 +976,8 @@ class GetPipelineResponse:
981
976
  body["name"] = self.name
982
977
  if self.pipeline_id is not None:
983
978
  body["pipeline_id"] = self.pipeline_id
979
+ if self.run_as:
980
+ body["run_as"] = self.run_as
984
981
  if self.run_as_user_name is not None:
985
982
  body["run_as_user_name"] = self.run_as_user_name
986
983
  if self.spec:
@@ -1002,6 +999,7 @@ class GetPipelineResponse:
1002
999
  latest_updates=_repeated_dict(d, "latest_updates", UpdateStateInfo),
1003
1000
  name=d.get("name", None),
1004
1001
  pipeline_id=d.get("pipeline_id", None),
1002
+ run_as=_from_dict(d, "run_as", RunAs),
1005
1003
  run_as_user_name=d.get("run_as_user_name", None),
1006
1004
  spec=_from_dict(d, "spec", PipelineSpec),
1007
1005
  state=_enum(d, "state", PipelineState),
@@ -1211,6 +1209,7 @@ class IngestionPipelineDefinition:
1211
1209
 
1212
1210
  class IngestionSourceType(Enum):
1213
1211
 
1212
+ BIGQUERY = "BIGQUERY"
1214
1213
  DYNAMICS365 = "DYNAMICS365"
1215
1214
  GA4_RAW_DATA = "GA4_RAW_DATA"
1216
1215
  MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL"
@@ -1621,7 +1620,6 @@ class PipelineAccessControlRequest:
1621
1620
  """name of the group"""
1622
1621
 
1623
1622
  permission_level: Optional[PipelinePermissionLevel] = None
1624
- """Permission level"""
1625
1623
 
1626
1624
  service_principal_name: Optional[str] = None
1627
1625
  """application ID of a service principal"""
@@ -2195,7 +2193,6 @@ class PipelinePermission:
2195
2193
  inherited_from_object: Optional[List[str]] = None
2196
2194
 
2197
2195
  permission_level: Optional[PipelinePermissionLevel] = None
2198
- """Permission level"""
2199
2196
 
2200
2197
  def as_dict(self) -> dict:
2201
2198
  """Serializes the PipelinePermission into a dictionary suitable for use as a JSON request body."""
@@ -2283,7 +2280,6 @@ class PipelinePermissionsDescription:
2283
2280
  description: Optional[str] = None
2284
2281
 
2285
2282
  permission_level: Optional[PipelinePermissionLevel] = None
2286
- """Permission level"""
2287
2283
 
2288
2284
  def as_dict(self) -> dict:
2289
2285
  """Serializes the PipelinePermissionsDescription into a dictionary suitable for use as a JSON request body."""
@@ -2630,7 +2626,6 @@ class PipelineStateInfo:
2630
2626
  owner."""
2631
2627
 
2632
2628
  state: Optional[PipelineState] = None
2633
- """The pipeline state."""
2634
2629
 
2635
2630
  def as_dict(self) -> dict:
2636
2631
  """Serializes the PipelineStateInfo into a dictionary suitable for use as a JSON request body."""
@@ -3101,7 +3096,6 @@ class StackFrame:
3101
3096
  @dataclass
3102
3097
  class StartUpdate:
3103
3098
  cause: Optional[StartUpdateCause] = None
3104
- """What triggered this update."""
3105
3099
 
3106
3100
  full_refresh: Optional[bool] = None
3107
3101
  """If true, this update will reset all tables before running."""
@@ -3378,6 +3372,7 @@ class TableSpecificConfig:
3378
3372
  class TableSpecificConfigScdType(Enum):
3379
3373
  """The SCD type to use to ingest the table."""
3380
3374
 
3375
+ APPEND_ONLY = "APPEND_ONLY"
3381
3376
  SCD_TYPE_1 = "SCD_TYPE_1"
3382
3377
  SCD_TYPE_2 = "SCD_TYPE_2"
3383
3378
 
@@ -3528,7 +3523,6 @@ class UpdateStateInfo:
3528
3523
  creation_time: Optional[str] = None
3529
3524
 
3530
3525
  state: Optional[UpdateStateInfoState] = None
3531
- """The update state."""
3532
3526
 
3533
3527
  update_id: Optional[str] = None
3534
3528
 
@@ -3715,11 +3709,6 @@ class PipelinesAPI:
3715
3709
  Databricks user interface and it is added to sys.path when executing Python sources during pipeline
3716
3710
  execution.
3717
3711
  :param run_as: :class:`RunAs` (optional)
3718
- Write-only setting, available only in Create/Update calls. Specifies the user or service principal
3719
- that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
3720
-
3721
- Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
3722
- thrown.
3723
3712
  :param schema: str (optional)
3724
3713
  The default schema (database) where tables are read from or published to.
3725
3714
  :param serverless: bool (optional)
@@ -4072,7 +4061,6 @@ class PipelinesAPI:
4072
4061
 
4073
4062
  :param pipeline_id: str
4074
4063
  :param cause: :class:`StartUpdateCause` (optional)
4075
- What triggered this update.
4076
4064
  :param full_refresh: bool (optional)
4077
4065
  If true, this update will reset all tables before running.
4078
4066
  :param full_refresh_selection: List[str] (optional)
@@ -4223,11 +4211,6 @@ class PipelinesAPI:
4223
4211
  Databricks user interface and it is added to sys.path when executing Python sources during pipeline
4224
4212
  execution.
4225
4213
  :param run_as: :class:`RunAs` (optional)
4226
- Write-only setting, available only in Create/Update calls. Specifies the user or service principal
4227
- that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
4228
-
4229
- Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
4230
- thrown.
4231
4214
  :param schema: str (optional)
4232
4215
  The default schema (database) where tables are read from or published to.
4233
4216
  :param serverless: bool (optional)
@@ -133,7 +133,6 @@ class CloudResourceContainer:
133
133
  """The general workspace configurations that are specific to cloud providers."""
134
134
 
135
135
  gcp: Optional[CustomerFacingGcpCloudResourceContainer] = None
136
- """The general workspace configurations that are specific to Google Cloud."""
137
136
 
138
137
  def as_dict(self) -> dict:
139
138
  """Serializes the CloudResourceContainer into a dictionary suitable for use as a JSON request body."""
@@ -356,8 +355,6 @@ class CreateNetworkRequest:
356
355
  """The human-readable name of the network configuration."""
357
356
 
358
357
  gcp_network_info: Optional[GcpNetworkInfo] = None
359
- """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and
360
- secondary IP ranges)."""
361
358
 
362
359
  security_group_ids: Optional[List[str]] = None
363
360
  """IDs of one to five security groups associated with this network. Security group IDs **cannot**
@@ -368,10 +365,6 @@ class CreateNetworkRequest:
368
365
  multiple network configurations."""
369
366
 
370
367
  vpc_endpoints: Optional[NetworkVpcEndpoints] = None
371
- """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over
372
- [AWS PrivateLink].
373
-
374
- [AWS PrivateLink]: https://aws.amazon.com/privatelink/"""
375
368
 
376
369
  vpc_id: Optional[str] = None
377
370
  """The ID of the VPC associated with this network. VPC IDs can be used in multiple network
@@ -430,7 +423,6 @@ class CreateStorageConfigurationRequest:
430
423
  """The human-readable name of the storage configuration."""
431
424
 
432
425
  root_bucket_info: RootBucketInfo
433
- """Root S3 bucket information."""
434
426
 
435
427
  def as_dict(self) -> dict:
436
428
  """Serializes the CreateStorageConfigurationRequest into a dictionary suitable for use as a JSON request body."""
@@ -468,7 +460,6 @@ class CreateVpcEndpointRequest:
468
460
  """The ID of the VPC endpoint object in AWS."""
469
461
 
470
462
  gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None
471
- """The Google Cloud specific information for this Private Service Connect endpoint."""
472
463
 
473
464
  region: Optional[str] = None
474
465
  """The AWS region in which this VPC endpoint object exists."""
@@ -523,7 +514,6 @@ class CreateWorkspaceRequest:
523
514
  to `gcp`."""
524
515
 
525
516
  cloud_resource_container: Optional[CloudResourceContainer] = None
526
- """The general workspace configurations that are specific to cloud providers."""
527
517
 
528
518
  credentials_id: Optional[str] = None
529
519
  """ID of the workspace's credential configuration object."""
@@ -559,27 +549,8 @@ class CreateWorkspaceRequest:
559
549
  with the pattern `dbc-xxxxxxxx-xxxx`."""
560
550
 
561
551
  gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None
562
- """The network settings for the workspace. The configurations are only for Databricks-managed VPCs.
563
- It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP
564
- range configurations must be mutually exclusive. An attempt to create a workspace fails if
565
- Databricks detects an IP range overlap.
566
-
567
- Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and
568
- all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`,
569
- `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`.
570
-
571
- The sizes of these IP ranges affect the maximum number of nodes for the workspace.
572
-
573
- **Important**: Confirm the IP ranges used by your Databricks workspace before creating the
574
- workspace. You cannot change them after your workspace is deployed. If the IP address ranges for
575
- your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To
576
- determine the address range sizes that you need, Databricks provides a calculator as a Microsoft
577
- Excel spreadsheet. See [calculate subnet sizes for a new workspace].
578
-
579
- [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html"""
580
552
 
581
553
  gke_config: Optional[GkeConfig] = None
582
- """The configurations for the GKE cluster of a Databricks workspace."""
583
554
 
584
555
  is_no_public_ip_enabled: Optional[bool] = None
585
556
  """Whether no public IP is enabled for the workspace."""
@@ -597,9 +568,6 @@ class CreateWorkspaceRequest:
597
568
  network_id: Optional[str] = None
598
569
 
599
570
  pricing_tier: Optional[PricingTier] = None
600
- """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing].
601
-
602
- [AWS Pricing]: https://databricks.com/product/aws-pricing"""
603
571
 
604
572
  private_access_settings_id: Optional[str] = None
605
573
  """ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be
@@ -1259,8 +1227,6 @@ class Network:
1259
1227
  """Array of error messages about the network configuration."""
1260
1228
 
1261
1229
  gcp_network_info: Optional[GcpNetworkInfo] = None
1262
- """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and
1263
- secondary IP ranges)."""
1264
1230
 
1265
1231
  network_id: Optional[str] = None
1266
1232
  """The Databricks network configuration ID."""
@@ -1273,18 +1239,12 @@ class Network:
1273
1239
  subnet_ids: Optional[List[str]] = None
1274
1240
 
1275
1241
  vpc_endpoints: Optional[NetworkVpcEndpoints] = None
1276
- """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over
1277
- [AWS PrivateLink].
1278
-
1279
- [AWS PrivateLink]: https://aws.amazon.com/privatelink/"""
1280
1242
 
1281
1243
  vpc_id: Optional[str] = None
1282
1244
  """The ID of the VPC associated with this network configuration. VPC IDs can be used in multiple
1283
1245
  networks."""
1284
1246
 
1285
1247
  vpc_status: Optional[VpcStatus] = None
1286
- """The status of this network configuration object in terms of its use in a workspace: *
1287
- `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned."""
1288
1248
 
1289
1249
  warning_messages: Optional[List[NetworkWarning]] = None
1290
1250
  """Array of warning messages about the network configuration."""
@@ -1380,8 +1340,6 @@ class NetworkHealth:
1380
1340
  """Details of the error."""
1381
1341
 
1382
1342
  error_type: Optional[ErrorType] = None
1383
- """The AWS resource associated with this error: credentials, VPC, subnet, security group, or
1384
- network ACL."""
1385
1343
 
1386
1344
  def as_dict(self) -> dict:
1387
1345
  """Serializes the NetworkHealth into a dictionary suitable for use as a JSON request body."""
@@ -1451,7 +1409,6 @@ class NetworkWarning:
1451
1409
  """Details of the warning."""
1452
1410
 
1453
1411
  warning_type: Optional[WarningType] = None
1454
- """The AWS resource associated with this warning: a subnet or a security group."""
1455
1412
 
1456
1413
  def as_dict(self) -> dict:
1457
1414
  """Serializes the NetworkWarning into a dictionary suitable for use as a JSON request body."""
@@ -1510,11 +1467,6 @@ class PrivateAccessSettings:
1510
1467
  """An array of Databricks VPC endpoint IDs."""
1511
1468
 
1512
1469
  private_access_level: Optional[PrivateAccessLevel] = None
1513
- """The private access level controls which VPC endpoints can connect to the UI or API of any
1514
- workspace that attaches this private access settings object. * `ACCOUNT` level access (the
1515
- default) allows only VPC endpoints that are registered in your Databricks account connect to
1516
- your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your
1517
- workspace. For details, see `allowed_vpc_endpoint_ids`."""
1518
1470
 
1519
1471
  private_access_settings_id: Optional[str] = None
1520
1472
  """Databricks private access settings ID."""
@@ -1637,7 +1589,6 @@ class StorageConfiguration:
1637
1589
  """Time in epoch milliseconds when the storage configuration was created."""
1638
1590
 
1639
1591
  root_bucket_info: Optional[RootBucketInfo] = None
1640
- """Root S3 bucket information."""
1641
1592
 
1642
1593
  storage_configuration_id: Optional[str] = None
1643
1594
  """Databricks storage configuration ID."""
@@ -1869,11 +1820,6 @@ class UpsertPrivateAccessSettingsRequest:
1869
1820
  [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html"""
1870
1821
 
1871
1822
  private_access_level: Optional[PrivateAccessLevel] = None
1872
- """The private access level controls which VPC endpoints can connect to the UI or API of any
1873
- workspace that attaches this private access settings object. * `ACCOUNT` level access (the
1874
- default) allows only VPC endpoints that are registered in your Databricks account connect to
1875
- your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your
1876
- workspace. For details, see `allowed_vpc_endpoint_ids`."""
1877
1823
 
1878
1824
  private_access_settings_id: Optional[str] = None
1879
1825
  """Databricks Account API private access settings ID."""
@@ -1951,7 +1897,6 @@ class VpcEndpoint:
1951
1897
  """The ID of the VPC endpoint object in AWS."""
1952
1898
 
1953
1899
  gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None
1954
- """The Google Cloud specific information for this Private Service Connect endpoint."""
1955
1900
 
1956
1901
  region: Optional[str] = None
1957
1902
  """The AWS region in which this VPC endpoint object exists."""
@@ -1963,10 +1908,6 @@ class VpcEndpoint:
1963
1908
  [AWS DescribeVpcEndpoint documentation]: https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-vpc-endpoints.html"""
1964
1909
 
1965
1910
  use_case: Optional[EndpointUseCase] = None
1966
- """This enumeration represents the type of Databricks VPC [endpoint service] that was used when
1967
- creating this VPC endpoint.
1968
-
1969
- [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html"""
1970
1911
 
1971
1912
  vpc_endpoint_id: Optional[str] = None
1972
1913
  """Databricks VPC endpoint ID. This is the Databricks-specific name of the VPC endpoint. Do not
@@ -2073,7 +2014,6 @@ class Workspace:
2073
2014
  """The cloud name. This field always has the value `gcp`."""
2074
2015
 
2075
2016
  cloud_resource_container: Optional[CloudResourceContainer] = None
2076
- """The general workspace configurations that are specific to cloud providers."""
2077
2017
 
2078
2018
  creation_time: Optional[int] = None
2079
2019
  """Time in epoch milliseconds when the workspace was created."""
@@ -2097,27 +2037,8 @@ class Workspace:
2097
2037
  workspace is not for a external customer, then external_customer_info is empty."""
2098
2038
 
2099
2039
  gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None
2100
- """The network settings for the workspace. The configurations are only for Databricks-managed VPCs.
2101
- It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP
2102
- range configurations must be mutually exclusive. An attempt to create a workspace fails if
2103
- Databricks detects an IP range overlap.
2104
-
2105
- Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and
2106
- all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`,
2107
- `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`.
2108
-
2109
- The sizes of these IP ranges affect the maximum number of nodes for the workspace.
2110
-
2111
- **Important**: Confirm the IP ranges used by your Databricks workspace before creating the
2112
- workspace. You cannot change them after your workspace is deployed. If the IP address ranges for
2113
- your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To
2114
- determine the address range sizes that you need, Databricks provides a calculator as a Microsoft
2115
- Excel spreadsheet. See [calculate subnet sizes for a new workspace].
2116
-
2117
- [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html"""
2118
2040
 
2119
2041
  gke_config: Optional[GkeConfig] = None
2120
- """The configurations for the GKE cluster of a Databricks workspace."""
2121
2042
 
2122
2043
  is_no_public_ip_enabled: Optional[bool] = None
2123
2044
  """Whether no public IP is enabled for the workspace."""
@@ -2134,9 +2055,6 @@ class Workspace:
2134
2055
  the network is a customer-managed network."""
2135
2056
 
2136
2057
  pricing_tier: Optional[PricingTier] = None
2137
- """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing].
2138
-
2139
- [AWS Pricing]: https://databricks.com/product/aws-pricing"""
2140
2058
 
2141
2059
  private_access_settings_id: Optional[str] = None
2142
2060
  """ID of the workspace's private access settings object. Only used for PrivateLink. You must
@@ -2161,8 +2079,6 @@ class Workspace:
2161
2079
  """The human-readable name of the workspace."""
2162
2080
 
2163
2081
  workspace_status: Optional[WorkspaceStatus] = None
2164
- """The status of the workspace. For workspace creation, usually it is set to `PROVISIONING`
2165
- initially. Continue to check the status until the status is `RUNNING`."""
2166
2082
 
2167
2083
  workspace_status_message: Optional[str] = None
2168
2084
  """Message describing the current workspace status."""
@@ -2397,6 +2313,7 @@ class CredentialsAPI:
2397
2313
  def list(self) -> Iterator[Credential]:
2398
2314
  """Gets all Databricks credential configurations associated with an account specified by ID.
2399
2315
 
2316
+
2400
2317
  :returns: Iterator over :class:`Credential`
2401
2318
  """
2402
2319
 
@@ -2534,6 +2451,7 @@ class EncryptionKeysAPI:
2534
2451
 
2535
2452
  This operation is available only if your account is on the E2 version of the platform.
2536
2453
 
2454
+
2537
2455
  :returns: Iterator over :class:`CustomerManagedKey`
2538
2456
  """
2539
2457
 
@@ -2568,8 +2486,6 @@ class NetworksAPI:
2568
2486
  :param network_name: str
2569
2487
  The human-readable name of the network configuration.
2570
2488
  :param gcp_network_info: :class:`GcpNetworkInfo` (optional)
2571
- The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and
2572
- secondary IP ranges).
2573
2489
  :param security_group_ids: List[str] (optional)
2574
2490
  IDs of one to five security groups associated with this network. Security group IDs **cannot** be
2575
2491
  used in multiple network configurations.
@@ -2577,10 +2493,6 @@ class NetworksAPI:
2577
2493
  IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in multiple
2578
2494
  network configurations.
2579
2495
  :param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional)
2580
- If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS
2581
- PrivateLink].
2582
-
2583
- [AWS PrivateLink]: https://aws.amazon.com/privatelink/
2584
2496
  :param vpc_id: str (optional)
2585
2497
  The ID of the VPC associated with this network. VPC IDs can be used in multiple network
2586
2498
  configurations.
@@ -2647,6 +2559,7 @@ class NetworksAPI:
2647
2559
 
2648
2560
  This operation is available only if your account is on the E2 version of the platform.
2649
2561
 
2562
+
2650
2563
  :returns: Iterator over :class:`Network`
2651
2564
  """
2652
2565
 
@@ -2704,11 +2617,6 @@ class PrivateAccessAPI:
2704
2617
 
2705
2618
  [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
2706
2619
  :param private_access_level: :class:`PrivateAccessLevel` (optional)
2707
- The private access level controls which VPC endpoints can connect to the UI or API of any workspace
2708
- that attaches this private access settings object. * `ACCOUNT` level access (the default) allows
2709
- only VPC endpoints that are registered in your Databricks account connect to your workspace. *
2710
- `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details,
2711
- see `allowed_vpc_endpoint_ids`.
2712
2620
  :param public_access_enabled: bool (optional)
2713
2621
  Determines if the workspace can be accessed over public internet. For fully private workspaces, you
2714
2622
  can optionally specify `false`, but only if you implement both the front-end and the back-end
@@ -2791,6 +2699,7 @@ class PrivateAccessAPI:
2791
2699
  def list(self) -> Iterator[PrivateAccessSettings]:
2792
2700
  """Gets a list of all private access settings objects for an account, specified by ID.
2793
2701
 
2702
+
2794
2703
  :returns: Iterator over :class:`PrivateAccessSettings`
2795
2704
  """
2796
2705
 
@@ -2849,11 +2758,6 @@ class PrivateAccessAPI:
2849
2758
 
2850
2759
  [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
2851
2760
  :param private_access_level: :class:`PrivateAccessLevel` (optional)
2852
- The private access level controls which VPC endpoints can connect to the UI or API of any workspace
2853
- that attaches this private access settings object. * `ACCOUNT` level access (the default) allows
2854
- only VPC endpoints that are registered in your Databricks account connect to your workspace. *
2855
- `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details,
2856
- see `allowed_vpc_endpoint_ids`.
2857
2761
  :param public_access_enabled: bool (optional)
2858
2762
  Determines if the workspace can be accessed over public internet. For fully private workspaces, you
2859
2763
  can optionally specify `false`, but only if you implement both the front-end and the back-end
@@ -2908,7 +2812,6 @@ class StorageAPI:
2908
2812
  :param storage_configuration_name: str
2909
2813
  The human-readable name of the storage configuration.
2910
2814
  :param root_bucket_info: :class:`RootBucketInfo`
2911
- Root S3 bucket information.
2912
2815
 
2913
2816
  :returns: :class:`StorageConfiguration`
2914
2817
  """
@@ -2970,6 +2873,7 @@ class StorageAPI:
2970
2873
  def list(self) -> Iterator[StorageConfiguration]:
2971
2874
  """Gets a list of all Databricks storage configurations for your account, specified by ID.
2972
2875
 
2876
+
2973
2877
  :returns: Iterator over :class:`StorageConfiguration`
2974
2878
  """
2975
2879
 
@@ -3013,7 +2917,6 @@ class VpcEndpointsAPI:
3013
2917
  :param aws_vpc_endpoint_id: str (optional)
3014
2918
  The ID of the VPC endpoint object in AWS.
3015
2919
  :param gcp_vpc_endpoint_info: :class:`GcpVpcEndpointInfo` (optional)
3016
- The Google Cloud specific information for this Private Service Connect endpoint.
3017
2920
  :param region: str (optional)
3018
2921
  The AWS region in which this VPC endpoint object exists.
3019
2922
 
@@ -3091,6 +2994,7 @@ class VpcEndpointsAPI:
3091
2994
 
3092
2995
  [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
3093
2996
 
2997
+
3094
2998
  :returns: Iterator over :class:`VpcEndpoint`
3095
2999
  """
3096
3000
 
@@ -3183,7 +3087,6 @@ class WorkspacesAPI:
3183
3087
  The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field to
3184
3088
  `gcp`.
3185
3089
  :param cloud_resource_container: :class:`CloudResourceContainer` (optional)
3186
- The general workspace configurations that are specific to cloud providers.
3187
3090
  :param credentials_id: str (optional)
3188
3091
  ID of the workspace's credential configuration object.
3189
3092
  :param custom_tags: Dict[str,str] (optional)
@@ -3215,26 +3118,7 @@ class WorkspacesAPI:
3215
3118
  If a new workspace omits this property, the server generates a unique deployment name for you with
3216
3119
  the pattern `dbc-xxxxxxxx-xxxx`.
3217
3120
  :param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional)
3218
- The network settings for the workspace. The configurations are only for Databricks-managed VPCs. It
3219
- is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range
3220
- configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks
3221
- detects an IP range overlap.
3222
-
3223
- Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP
3224
- addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`,
3225
- `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`.
3226
-
3227
- The sizes of these IP ranges affect the maximum number of nodes for the workspace.
3228
-
3229
- **Important**: Confirm the IP ranges used by your Databricks workspace before creating the
3230
- workspace. You cannot change them after your workspace is deployed. If the IP address ranges for
3231
- your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To
3232
- determine the address range sizes that you need, Databricks provides a calculator as a Microsoft
3233
- Excel spreadsheet. See [calculate subnet sizes for a new workspace].
3234
-
3235
- [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
3236
3121
  :param gke_config: :class:`GkeConfig` (optional)
3237
- The configurations for the GKE cluster of a Databricks workspace.
3238
3122
  :param is_no_public_ip_enabled: bool (optional)
3239
3123
  Whether no public IP is enabled for the workspace.
3240
3124
  :param location: str (optional)
@@ -3245,9 +3129,6 @@ class WorkspacesAPI:
3245
3129
  history. The provided key configuration object property `use_cases` must contain `MANAGED_SERVICES`.
3246
3130
  :param network_id: str (optional)
3247
3131
  :param pricing_tier: :class:`PricingTier` (optional)
3248
- The pricing tier of the workspace. For pricing tier information, see [AWS Pricing].
3249
-
3250
- [AWS Pricing]: https://databricks.com/product/aws-pricing
3251
3132
  :param private_access_settings_id: str (optional)
3252
3133
  ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be
3253
3134
  specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection),
@@ -3414,6 +3295,7 @@ class WorkspacesAPI:
3414
3295
  This operation is available only if your account is on the E2 version of the platform or on a select
3415
3296
  custom plan that allows multiple workspaces per account.
3416
3297
 
3298
+
3417
3299
  :returns: Iterator over :class:`Workspace`
3418
3300
  """
3419
3301
 
@@ -63,24 +63,6 @@ class AnomalyDetectionRunStatus(Enum):
63
63
  ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR = "ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR"
64
64
 
65
65
 
66
- @dataclass
67
- class DeleteQualityMonitorResponse:
68
- def as_dict(self) -> dict:
69
- """Serializes the DeleteQualityMonitorResponse into a dictionary suitable for use as a JSON request body."""
70
- body = {}
71
- return body
72
-
73
- def as_shallow_dict(self) -> dict:
74
- """Serializes the DeleteQualityMonitorResponse into a shallow dictionary of its immediate attributes."""
75
- body = {}
76
- return body
77
-
78
- @classmethod
79
- def from_dict(cls, d: Dict[str, Any]) -> DeleteQualityMonitorResponse:
80
- """Deserializes the DeleteQualityMonitorResponse from a dictionary."""
81
- return cls()
82
-
83
-
84
66
  @dataclass
85
67
  class ListQualityMonitorResponse:
86
68
  next_page_token: Optional[str] = None