databricks-sdk 0.36.0__py3-none-any.whl → 0.38.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -61,7 +61,7 @@ class CreatePipeline:
61
61
  """Filters on which Pipeline packages to include in the deployed graph."""
62
62
 
63
63
  gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
64
- """The definition of a gateway pipeline to support CDC."""
64
+ """The definition of a gateway pipeline to support change data capture."""
65
65
 
66
66
  id: Optional[str] = None
67
67
  """Unique identifier for this pipeline."""
@@ -82,6 +82,9 @@ class CreatePipeline:
82
82
  photon: Optional[bool] = None
83
83
  """Whether Photon is enabled for this pipeline."""
84
84
 
85
+ restart_window: Optional[RestartWindow] = None
86
+ """Restart window of this pipeline."""
87
+
85
88
  schema: Optional[str] = None
86
89
  """The default schema (database) where tables are read from or published to. The presence of this
87
90
  field implies that the pipeline is in direct publishing mode."""
@@ -122,6 +125,7 @@ class CreatePipeline:
122
125
  if self.name is not None: body['name'] = self.name
123
126
  if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
124
127
  if self.photon is not None: body['photon'] = self.photon
128
+ if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
125
129
  if self.schema is not None: body['schema'] = self.schema
126
130
  if self.serverless is not None: body['serverless'] = self.serverless
127
131
  if self.storage is not None: body['storage'] = self.storage
@@ -151,6 +155,7 @@ class CreatePipeline:
151
155
  name=d.get('name', None),
152
156
  notifications=_repeated_dict(d, 'notifications', Notifications),
153
157
  photon=d.get('photon', None),
158
+ restart_window=_from_dict(d, 'restart_window', RestartWindow),
154
159
  schema=d.get('schema', None),
155
160
  serverless=d.get('serverless', None),
156
161
  storage=d.get('storage', None),
@@ -285,7 +290,7 @@ class EditPipeline:
285
290
  """Filters on which Pipeline packages to include in the deployed graph."""
286
291
 
287
292
  gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
288
- """The definition of a gateway pipeline to support CDC."""
293
+ """The definition of a gateway pipeline to support change data capture."""
289
294
 
290
295
  id: Optional[str] = None
291
296
  """Unique identifier for this pipeline."""
@@ -309,6 +314,9 @@ class EditPipeline:
309
314
  pipeline_id: Optional[str] = None
310
315
  """Unique identifier for this pipeline."""
311
316
 
317
+ restart_window: Optional[RestartWindow] = None
318
+ """Restart window of this pipeline."""
319
+
312
320
  schema: Optional[str] = None
313
321
  """The default schema (database) where tables are read from or published to. The presence of this
314
322
  field implies that the pipeline is in direct publishing mode."""
@@ -351,6 +359,7 @@ class EditPipeline:
351
359
  if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
352
360
  if self.photon is not None: body['photon'] = self.photon
353
361
  if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
362
+ if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
354
363
  if self.schema is not None: body['schema'] = self.schema
355
364
  if self.serverless is not None: body['serverless'] = self.serverless
356
365
  if self.storage is not None: body['storage'] = self.storage
@@ -381,6 +390,7 @@ class EditPipeline:
381
390
  notifications=_repeated_dict(d, 'notifications', Notifications),
382
391
  photon=d.get('photon', None),
383
392
  pipeline_id=d.get('pipeline_id', None),
393
+ restart_window=_from_dict(d, 'restart_window', RestartWindow),
384
394
  schema=d.get('schema', None),
385
395
  serverless=d.get('serverless', None),
386
396
  storage=d.get('storage', None),
@@ -588,13 +598,13 @@ class GetUpdateResponse:
588
598
  @dataclass
589
599
  class IngestionConfig:
590
600
  report: Optional[ReportSpec] = None
591
- """Select tables from a specific source report."""
601
+ """Select a specific source report."""
592
602
 
593
603
  schema: Optional[SchemaSpec] = None
594
- """Select tables from a specific source schema."""
604
+ """Select all tables from a specific source schema."""
595
605
 
596
606
  table: Optional[TableSpec] = None
597
- """Select tables from a specific source table."""
607
+ """Select a specific source table."""
598
608
 
599
609
  def as_dict(self) -> dict:
600
610
  """Serializes the IngestionConfig into a dictionary suitable for use as a JSON request body."""
@@ -615,7 +625,11 @@ class IngestionConfig:
615
625
  @dataclass
616
626
  class IngestionGatewayPipelineDefinition:
617
627
  connection_id: Optional[str] = None
618
- """Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the
628
+ """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this
629
+ gateway pipeline uses to communicate with the source."""
630
+
631
+ connection_name: Optional[str] = None
632
+ """Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the
619
633
  source."""
620
634
 
621
635
  gateway_storage_catalog: Optional[str] = None
@@ -633,6 +647,7 @@ class IngestionGatewayPipelineDefinition:
633
647
  """Serializes the IngestionGatewayPipelineDefinition into a dictionary suitable for use as a JSON request body."""
634
648
  body = {}
635
649
  if self.connection_id is not None: body['connection_id'] = self.connection_id
650
+ if self.connection_name is not None: body['connection_name'] = self.connection_name
636
651
  if self.gateway_storage_catalog is not None:
637
652
  body['gateway_storage_catalog'] = self.gateway_storage_catalog
638
653
  if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name
@@ -644,6 +659,7 @@ class IngestionGatewayPipelineDefinition:
644
659
  def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition:
645
660
  """Deserializes the IngestionGatewayPipelineDefinition from a dictionary."""
646
661
  return cls(connection_id=d.get('connection_id', None),
662
+ connection_name=d.get('connection_name', None),
647
663
  gateway_storage_catalog=d.get('gateway_storage_catalog', None),
648
664
  gateway_storage_name=d.get('gateway_storage_name', None),
649
665
  gateway_storage_schema=d.get('gateway_storage_schema', None))
@@ -652,12 +668,12 @@ class IngestionGatewayPipelineDefinition:
652
668
  @dataclass
653
669
  class IngestionPipelineDefinition:
654
670
  connection_name: Optional[str] = None
655
- """Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the
656
- source. Specify either ingestion_gateway_id or connection_name."""
671
+ """Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with
672
+ the source. This is used with connectors for applications like Salesforce, Workday, and so on."""
657
673
 
658
674
  ingestion_gateway_id: Optional[str] = None
659
- """Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate
660
- with the source. Specify either ingestion_gateway_id or connection_name."""
675
+ """Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate
676
+ with the source database. This is used with connectors to databases like SQL Server."""
661
677
 
662
678
  objects: Optional[List[IngestionConfig]] = None
663
679
  """Required. Settings specifying tables to replicate and the destination for the replicated tables."""
@@ -1444,7 +1460,7 @@ class PipelineSpec:
1444
1460
  """Filters on which Pipeline packages to include in the deployed graph."""
1445
1461
 
1446
1462
  gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
1447
- """The definition of a gateway pipeline to support CDC."""
1463
+ """The definition of a gateway pipeline to support change data capture."""
1448
1464
 
1449
1465
  id: Optional[str] = None
1450
1466
  """Unique identifier for this pipeline."""
@@ -1465,6 +1481,9 @@ class PipelineSpec:
1465
1481
  photon: Optional[bool] = None
1466
1482
  """Whether Photon is enabled for this pipeline."""
1467
1483
 
1484
+ restart_window: Optional[RestartWindow] = None
1485
+ """Restart window of this pipeline."""
1486
+
1468
1487
  schema: Optional[str] = None
1469
1488
  """The default schema (database) where tables are read from or published to. The presence of this
1470
1489
  field implies that the pipeline is in direct publishing mode."""
@@ -1503,6 +1522,7 @@ class PipelineSpec:
1503
1522
  if self.name is not None: body['name'] = self.name
1504
1523
  if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
1505
1524
  if self.photon is not None: body['photon'] = self.photon
1525
+ if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
1506
1526
  if self.schema is not None: body['schema'] = self.schema
1507
1527
  if self.serverless is not None: body['serverless'] = self.serverless
1508
1528
  if self.storage is not None: body['storage'] = self.storage
@@ -1530,6 +1550,7 @@ class PipelineSpec:
1530
1550
  name=d.get('name', None),
1531
1551
  notifications=_repeated_dict(d, 'notifications', Notifications),
1532
1552
  photon=d.get('photon', None),
1553
+ restart_window=_from_dict(d, 'restart_window', RestartWindow),
1533
1554
  schema=d.get('schema', None),
1534
1555
  serverless=d.get('serverless', None),
1535
1556
  storage=d.get('storage', None),
@@ -1668,6 +1689,50 @@ class ReportSpec:
1668
1689
  table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
1669
1690
 
1670
1691
 
1692
+ @dataclass
1693
+ class RestartWindow:
1694
+ start_hour: int
1695
+ """An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day.
1696
+ Continuous pipeline restart is triggered only within a five-hour window starting at this hour."""
1697
+
1698
+ days_of_week: Optional[RestartWindowDaysOfWeek] = None
1699
+ """Days of week in which the restart is allowed to happen (within a five-hour window starting at
1700
+ start_hour). If not specified all days of the week will be used."""
1701
+
1702
+ time_zone_id: Optional[str] = None
1703
+ """Time zone id of restart window. See
1704
+ https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html
1705
+ for details. If not specified, UTC will be used."""
1706
+
1707
+ def as_dict(self) -> dict:
1708
+ """Serializes the RestartWindow into a dictionary suitable for use as a JSON request body."""
1709
+ body = {}
1710
+ if self.days_of_week is not None: body['days_of_week'] = self.days_of_week.value
1711
+ if self.start_hour is not None: body['start_hour'] = self.start_hour
1712
+ if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id
1713
+ return body
1714
+
1715
+ @classmethod
1716
+ def from_dict(cls, d: Dict[str, any]) -> RestartWindow:
1717
+ """Deserializes the RestartWindow from a dictionary."""
1718
+ return cls(days_of_week=_enum(d, 'days_of_week', RestartWindowDaysOfWeek),
1719
+ start_hour=d.get('start_hour', None),
1720
+ time_zone_id=d.get('time_zone_id', None))
1721
+
1722
+
1723
+ class RestartWindowDaysOfWeek(Enum):
1724
+ """Days of week in which the restart is allowed to happen (within a five-hour window starting at
1725
+ start_hour). If not specified all days of the week will be used."""
1726
+
1727
+ FRIDAY = 'FRIDAY'
1728
+ MONDAY = 'MONDAY'
1729
+ SATURDAY = 'SATURDAY'
1730
+ SUNDAY = 'SUNDAY'
1731
+ THURSDAY = 'THURSDAY'
1732
+ TUESDAY = 'TUESDAY'
1733
+ WEDNESDAY = 'WEDNESDAY'
1734
+
1735
+
1671
1736
  @dataclass
1672
1737
  class SchemaSpec:
1673
1738
  destination_catalog: Optional[str] = None
@@ -2122,13 +2187,13 @@ class PipelinesAPI:
2122
2187
  def __init__(self, api_client):
2123
2188
  self._api = api_client
2124
2189
 
2125
- def wait_get_pipeline_idle(
2190
+ def wait_get_pipeline_running(
2126
2191
  self,
2127
2192
  pipeline_id: str,
2128
2193
  timeout=timedelta(minutes=20),
2129
2194
  callback: Optional[Callable[[GetPipelineResponse], None]] = None) -> GetPipelineResponse:
2130
2195
  deadline = time.time() + timeout.total_seconds()
2131
- target_states = (PipelineState.IDLE, )
2196
+ target_states = (PipelineState.RUNNING, )
2132
2197
  failure_states = (PipelineState.FAILED, )
2133
2198
  status_message = 'polling...'
2134
2199
  attempt = 1
@@ -2141,7 +2206,7 @@ class PipelinesAPI:
2141
2206
  if callback:
2142
2207
  callback(poll)
2143
2208
  if status in failure_states:
2144
- msg = f'failed to reach IDLE, got {status}: {status_message}'
2209
+ msg = f'failed to reach RUNNING, got {status}: {status_message}'
2145
2210
  raise OperationFailed(msg)
2146
2211
  prefix = f"pipeline_id={pipeline_id}"
2147
2212
  sleep = attempt
@@ -2153,13 +2218,13 @@ class PipelinesAPI:
2153
2218
  attempt += 1
2154
2219
  raise TimeoutError(f'timed out after {timeout}: {status_message}')
2155
2220
 
2156
- def wait_get_pipeline_running(
2221
+ def wait_get_pipeline_idle(
2157
2222
  self,
2158
2223
  pipeline_id: str,
2159
2224
  timeout=timedelta(minutes=20),
2160
2225
  callback: Optional[Callable[[GetPipelineResponse], None]] = None) -> GetPipelineResponse:
2161
2226
  deadline = time.time() + timeout.total_seconds()
2162
- target_states = (PipelineState.RUNNING, )
2227
+ target_states = (PipelineState.IDLE, )
2163
2228
  failure_states = (PipelineState.FAILED, )
2164
2229
  status_message = 'polling...'
2165
2230
  attempt = 1
@@ -2172,7 +2237,7 @@ class PipelinesAPI:
2172
2237
  if callback:
2173
2238
  callback(poll)
2174
2239
  if status in failure_states:
2175
- msg = f'failed to reach RUNNING, got {status}: {status_message}'
2240
+ msg = f'failed to reach IDLE, got {status}: {status_message}'
2176
2241
  raise OperationFailed(msg)
2177
2242
  prefix = f"pipeline_id={pipeline_id}"
2178
2243
  sleep = attempt
@@ -2205,6 +2270,7 @@ class PipelinesAPI:
2205
2270
  name: Optional[str] = None,
2206
2271
  notifications: Optional[List[Notifications]] = None,
2207
2272
  photon: Optional[bool] = None,
2273
+ restart_window: Optional[RestartWindow] = None,
2208
2274
  schema: Optional[str] = None,
2209
2275
  serverless: Optional[bool] = None,
2210
2276
  storage: Optional[str] = None,
@@ -2241,7 +2307,7 @@ class PipelinesAPI:
2241
2307
  :param filters: :class:`Filters` (optional)
2242
2308
  Filters on which Pipeline packages to include in the deployed graph.
2243
2309
  :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
2244
- The definition of a gateway pipeline to support CDC.
2310
+ The definition of a gateway pipeline to support change data capture.
2245
2311
  :param id: str (optional)
2246
2312
  Unique identifier for this pipeline.
2247
2313
  :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
@@ -2255,6 +2321,8 @@ class PipelinesAPI:
2255
2321
  List of notification settings for this pipeline.
2256
2322
  :param photon: bool (optional)
2257
2323
  Whether Photon is enabled for this pipeline.
2324
+ :param restart_window: :class:`RestartWindow` (optional)
2325
+ Restart window of this pipeline.
2258
2326
  :param schema: str (optional)
2259
2327
  The default schema (database) where tables are read from or published to. The presence of this field
2260
2328
  implies that the pipeline is in direct publishing mode.
@@ -2290,6 +2358,7 @@ class PipelinesAPI:
2290
2358
  if name is not None: body['name'] = name
2291
2359
  if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
2292
2360
  if photon is not None: body['photon'] = photon
2361
+ if restart_window is not None: body['restart_window'] = restart_window.as_dict()
2293
2362
  if schema is not None: body['schema'] = schema
2294
2363
  if serverless is not None: body['serverless'] = serverless
2295
2364
  if storage is not None: body['storage'] = storage
@@ -2518,7 +2587,8 @@ class PipelinesAPI:
2518
2587
  access_control_list: Optional[List[PipelineAccessControlRequest]] = None) -> PipelinePermissions:
2519
2588
  """Set pipeline permissions.
2520
2589
 
2521
- Sets permissions on a pipeline. Pipelines can inherit permissions from their root object.
2590
+ Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
2591
+ permissions if none are specified. Objects can inherit permissions from their root object.
2522
2592
 
2523
2593
  :param pipeline_id: str
2524
2594
  The pipeline for which to get or manage permissions.
@@ -2622,6 +2692,7 @@ class PipelinesAPI:
2622
2692
  name: Optional[str] = None,
2623
2693
  notifications: Optional[List[Notifications]] = None,
2624
2694
  photon: Optional[bool] = None,
2695
+ restart_window: Optional[RestartWindow] = None,
2625
2696
  schema: Optional[str] = None,
2626
2697
  serverless: Optional[bool] = None,
2627
2698
  storage: Optional[str] = None,
@@ -2661,7 +2732,7 @@ class PipelinesAPI:
2661
2732
  :param filters: :class:`Filters` (optional)
2662
2733
  Filters on which Pipeline packages to include in the deployed graph.
2663
2734
  :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
2664
- The definition of a gateway pipeline to support CDC.
2735
+ The definition of a gateway pipeline to support change data capture.
2665
2736
  :param id: str (optional)
2666
2737
  Unique identifier for this pipeline.
2667
2738
  :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
@@ -2675,6 +2746,8 @@ class PipelinesAPI:
2675
2746
  List of notification settings for this pipeline.
2676
2747
  :param photon: bool (optional)
2677
2748
  Whether Photon is enabled for this pipeline.
2749
+ :param restart_window: :class:`RestartWindow` (optional)
2750
+ Restart window of this pipeline.
2678
2751
  :param schema: str (optional)
2679
2752
  The default schema (database) where tables are read from or published to. The presence of this field
2680
2753
  implies that the pipeline is in direct publishing mode.
@@ -2710,6 +2783,7 @@ class PipelinesAPI:
2710
2783
  if name is not None: body['name'] = name
2711
2784
  if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
2712
2785
  if photon is not None: body['photon'] = photon
2786
+ if restart_window is not None: body['restart_window'] = restart_window.as_dict()
2713
2787
  if schema is not None: body['schema'] = schema
2714
2788
  if serverless is not None: body['serverless'] = serverless
2715
2789
  if storage is not None: body['storage'] = storage
@@ -412,6 +412,9 @@ class CreateWorkspaceRequest:
412
412
  gke_config: Optional[GkeConfig] = None
413
413
  """The configurations for the GKE cluster of a Databricks workspace."""
414
414
 
415
+ is_no_public_ip_enabled: Optional[bool] = None
416
+ """Whether no public IP is enabled for the workspace."""
417
+
415
418
  location: Optional[str] = None
416
419
  """The Google Cloud region of the workspace data plane in your Google account. For example,
417
420
  `us-east4`."""
@@ -460,6 +463,8 @@ class CreateWorkspaceRequest:
460
463
  if self.gcp_managed_network_config:
461
464
  body['gcp_managed_network_config'] = self.gcp_managed_network_config.as_dict()
462
465
  if self.gke_config: body['gke_config'] = self.gke_config.as_dict()
466
+ if self.is_no_public_ip_enabled is not None:
467
+ body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled
463
468
  if self.location is not None: body['location'] = self.location
464
469
  if self.managed_services_customer_managed_key_id is not None:
465
470
  body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
@@ -486,6 +491,7 @@ class CreateWorkspaceRequest:
486
491
  gcp_managed_network_config=_from_dict(d, 'gcp_managed_network_config',
487
492
  GcpManagedNetworkConfig),
488
493
  gke_config=_from_dict(d, 'gke_config', GkeConfig),
494
+ is_no_public_ip_enabled=d.get('is_no_public_ip_enabled', None),
489
495
  location=d.get('location', None),
490
496
  managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id',
491
497
  None),
@@ -632,6 +638,35 @@ class ErrorType(Enum):
632
638
  VPC = 'vpc'
633
639
 
634
640
 
641
+ @dataclass
642
+ class ExternalCustomerInfo:
643
+ authoritative_user_email: Optional[str] = None
644
+ """Email of the authoritative user."""
645
+
646
+ authoritative_user_full_name: Optional[str] = None
647
+ """The authoritative user full name."""
648
+
649
+ customer_name: Optional[str] = None
650
+ """The legal entity name for the external workspace"""
651
+
652
+ def as_dict(self) -> dict:
653
+ """Serializes the ExternalCustomerInfo into a dictionary suitable for use as a JSON request body."""
654
+ body = {}
655
+ if self.authoritative_user_email is not None:
656
+ body['authoritative_user_email'] = self.authoritative_user_email
657
+ if self.authoritative_user_full_name is not None:
658
+ body['authoritative_user_full_name'] = self.authoritative_user_full_name
659
+ if self.customer_name is not None: body['customer_name'] = self.customer_name
660
+ return body
661
+
662
+ @classmethod
663
+ def from_dict(cls, d: Dict[str, any]) -> ExternalCustomerInfo:
664
+ """Deserializes the ExternalCustomerInfo from a dictionary."""
665
+ return cls(authoritative_user_email=d.get('authoritative_user_email', None),
666
+ authoritative_user_full_name=d.get('authoritative_user_full_name', None),
667
+ customer_name=d.get('customer_name', None))
668
+
669
+
635
670
  @dataclass
636
671
  class GcpKeyInfo:
637
672
  kms_key_id: str
@@ -1210,6 +1245,10 @@ class UpdateWorkspaceRequest:
1210
1245
  customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC
1211
1246
  to a customer-managed VPC by updating the workspace to add a network configuration ID."""
1212
1247
 
1248
+ private_access_settings_id: Optional[str] = None
1249
+ """The ID of the workspace's private access settings configuration object. This parameter is
1250
+ available only for updating failed workspaces."""
1251
+
1213
1252
  storage_configuration_id: Optional[str] = None
1214
1253
  """The ID of the workspace's storage configuration object. This parameter is available only for
1215
1254
  updating failed workspaces."""
@@ -1232,6 +1271,8 @@ class UpdateWorkspaceRequest:
1232
1271
  if self.network_connectivity_config_id is not None:
1233
1272
  body['network_connectivity_config_id'] = self.network_connectivity_config_id
1234
1273
  if self.network_id is not None: body['network_id'] = self.network_id
1274
+ if self.private_access_settings_id is not None:
1275
+ body['private_access_settings_id'] = self.private_access_settings_id
1235
1276
  if self.storage_configuration_id is not None:
1236
1277
  body['storage_configuration_id'] = self.storage_configuration_id
1237
1278
  if self.storage_customer_managed_key_id is not None:
@@ -1249,6 +1290,7 @@ class UpdateWorkspaceRequest:
1249
1290
  None),
1250
1291
  network_connectivity_config_id=d.get('network_connectivity_config_id', None),
1251
1292
  network_id=d.get('network_id', None),
1293
+ private_access_settings_id=d.get('private_access_settings_id', None),
1252
1294
  storage_configuration_id=d.get('storage_configuration_id', None),
1253
1295
  storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None),
1254
1296
  workspace_id=d.get('workspace_id', None))
@@ -1443,6 +1485,10 @@ class Workspace:
1443
1485
 
1444
1486
  This value must be unique across all non-deleted deployments across all AWS regions."""
1445
1487
 
1488
+ external_customer_info: Optional[ExternalCustomerInfo] = None
1489
+ """If this workspace is for a external customer, then external_customer_info is populated. If this
1490
+ workspace is not for a external customer, then external_customer_info is empty."""
1491
+
1446
1492
  gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None
1447
1493
  """The network settings for the workspace. The configurations are only for Databricks-managed VPCs.
1448
1494
  It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP
@@ -1466,6 +1512,9 @@ class Workspace:
1466
1512
  gke_config: Optional[GkeConfig] = None
1467
1513
  """The configurations for the GKE cluster of a Databricks workspace."""
1468
1514
 
1515
+ is_no_public_ip_enabled: Optional[bool] = None
1516
+ """Whether no public IP is enabled for the workspace."""
1517
+
1469
1518
  location: Optional[str] = None
1470
1519
  """The Google Cloud region of the workspace data plane in your Google account (for example,
1471
1520
  `us-east4`)."""
@@ -1524,9 +1573,12 @@ class Workspace:
1524
1573
  if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
1525
1574
  if self.custom_tags: body['custom_tags'] = self.custom_tags
1526
1575
  if self.deployment_name is not None: body['deployment_name'] = self.deployment_name
1576
+ if self.external_customer_info: body['external_customer_info'] = self.external_customer_info.as_dict()
1527
1577
  if self.gcp_managed_network_config:
1528
1578
  body['gcp_managed_network_config'] = self.gcp_managed_network_config.as_dict()
1529
1579
  if self.gke_config: body['gke_config'] = self.gke_config.as_dict()
1580
+ if self.is_no_public_ip_enabled is not None:
1581
+ body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled
1530
1582
  if self.location is not None: body['location'] = self.location
1531
1583
  if self.managed_services_customer_managed_key_id is not None:
1532
1584
  body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
@@ -1557,9 +1609,11 @@ class Workspace:
1557
1609
  credentials_id=d.get('credentials_id', None),
1558
1610
  custom_tags=d.get('custom_tags', None),
1559
1611
  deployment_name=d.get('deployment_name', None),
1612
+ external_customer_info=_from_dict(d, 'external_customer_info', ExternalCustomerInfo),
1560
1613
  gcp_managed_network_config=_from_dict(d, 'gcp_managed_network_config',
1561
1614
  GcpManagedNetworkConfig),
1562
1615
  gke_config=_from_dict(d, 'gke_config', GkeConfig),
1616
+ is_no_public_ip_enabled=d.get('is_no_public_ip_enabled', None),
1563
1617
  location=d.get('location', None),
1564
1618
  managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id',
1565
1619
  None),
@@ -2399,6 +2453,7 @@ class WorkspacesAPI:
2399
2453
  deployment_name: Optional[str] = None,
2400
2454
  gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None,
2401
2455
  gke_config: Optional[GkeConfig] = None,
2456
+ is_no_public_ip_enabled: Optional[bool] = None,
2402
2457
  location: Optional[str] = None,
2403
2458
  managed_services_customer_managed_key_id: Optional[str] = None,
2404
2459
  network_id: Optional[str] = None,
@@ -2477,6 +2532,8 @@ class WorkspacesAPI:
2477
2532
  [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
2478
2533
  :param gke_config: :class:`GkeConfig` (optional)
2479
2534
  The configurations for the GKE cluster of a Databricks workspace.
2535
+ :param is_no_public_ip_enabled: bool (optional)
2536
+ Whether no public IP is enabled for the workspace.
2480
2537
  :param location: str (optional)
2481
2538
  The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`.
2482
2539
  :param managed_services_customer_managed_key_id: str (optional)
@@ -2519,6 +2576,7 @@ class WorkspacesAPI:
2519
2576
  if gcp_managed_network_config is not None:
2520
2577
  body['gcp_managed_network_config'] = gcp_managed_network_config.as_dict()
2521
2578
  if gke_config is not None: body['gke_config'] = gke_config.as_dict()
2579
+ if is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = is_no_public_ip_enabled
2522
2580
  if location is not None: body['location'] = location
2523
2581
  if managed_services_customer_managed_key_id is not None:
2524
2582
  body['managed_services_customer_managed_key_id'] = managed_services_customer_managed_key_id
@@ -2552,6 +2610,7 @@ class WorkspacesAPI:
2552
2610
  deployment_name: Optional[str] = None,
2553
2611
  gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None,
2554
2612
  gke_config: Optional[GkeConfig] = None,
2613
+ is_no_public_ip_enabled: Optional[bool] = None,
2555
2614
  location: Optional[str] = None,
2556
2615
  managed_services_customer_managed_key_id: Optional[str] = None,
2557
2616
  network_id: Optional[str] = None,
@@ -2568,6 +2627,7 @@ class WorkspacesAPI:
2568
2627
  deployment_name=deployment_name,
2569
2628
  gcp_managed_network_config=gcp_managed_network_config,
2570
2629
  gke_config=gke_config,
2630
+ is_no_public_ip_enabled=is_no_public_ip_enabled,
2571
2631
  location=location,
2572
2632
  managed_services_customer_managed_key_id=managed_services_customer_managed_key_id,
2573
2633
  network_id=network_id,
@@ -2653,6 +2713,7 @@ class WorkspacesAPI:
2653
2713
  managed_services_customer_managed_key_id: Optional[str] = None,
2654
2714
  network_connectivity_config_id: Optional[str] = None,
2655
2715
  network_id: Optional[str] = None,
2716
+ private_access_settings_id: Optional[str] = None,
2656
2717
  storage_configuration_id: Optional[str] = None,
2657
2718
  storage_customer_managed_key_id: Optional[str] = None) -> Wait[Workspace]:
2658
2719
  """Update workspace configuration.
@@ -2771,6 +2832,9 @@ class WorkspacesAPI:
2771
2832
  The ID of the workspace's network configuration object. Used only if you already use a
2772
2833
  customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a
2773
2834
  customer-managed VPC by updating the workspace to add a network configuration ID.
2835
+ :param private_access_settings_id: str (optional)
2836
+ The ID of the workspace's private access settings configuration object. This parameter is available
2837
+ only for updating failed workspaces.
2774
2838
  :param storage_configuration_id: str (optional)
2775
2839
  The ID of the workspace's storage configuration object. This parameter is available only for
2776
2840
  updating failed workspaces.
@@ -2791,6 +2855,8 @@ class WorkspacesAPI:
2791
2855
  if network_connectivity_config_id is not None:
2792
2856
  body['network_connectivity_config_id'] = network_connectivity_config_id
2793
2857
  if network_id is not None: body['network_id'] = network_id
2858
+ if private_access_settings_id is not None:
2859
+ body['private_access_settings_id'] = private_access_settings_id
2794
2860
  if storage_configuration_id is not None: body['storage_configuration_id'] = storage_configuration_id
2795
2861
  if storage_customer_managed_key_id is not None:
2796
2862
  body['storage_customer_managed_key_id'] = storage_customer_managed_key_id
@@ -2814,6 +2880,7 @@ class WorkspacesAPI:
2814
2880
  managed_services_customer_managed_key_id: Optional[str] = None,
2815
2881
  network_connectivity_config_id: Optional[str] = None,
2816
2882
  network_id: Optional[str] = None,
2883
+ private_access_settings_id: Optional[str] = None,
2817
2884
  storage_configuration_id: Optional[str] = None,
2818
2885
  storage_customer_managed_key_id: Optional[str] = None,
2819
2886
  timeout=timedelta(minutes=20)) -> Workspace:
@@ -2823,6 +2890,7 @@ class WorkspacesAPI:
2823
2890
  managed_services_customer_managed_key_id=managed_services_customer_managed_key_id,
2824
2891
  network_connectivity_config_id=network_connectivity_config_id,
2825
2892
  network_id=network_id,
2893
+ private_access_settings_id=private_access_settings_id,
2826
2894
  storage_configuration_id=storage_configuration_id,
2827
2895
  storage_customer_managed_key_id=storage_customer_managed_key_id,
2828
2896
  workspace_id=workspace_id).result(timeout=timeout)
@@ -2994,8 +2994,8 @@ class ServingEndpointsAPI:
2994
2994
  ) -> ServingEndpointPermissions:
2995
2995
  """Set serving endpoint permissions.
2996
2996
 
2997
- Sets permissions on a serving endpoint. Serving endpoints can inherit permissions from their root
2998
- object.
2997
+ Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
2998
+ permissions if none are specified. Objects can inherit permissions from their root object.
2999
2999
 
3000
3000
  :param serving_endpoint_id: str
3001
3001
  The serving endpoint for which to get or manage permissions.