databricks-sdk 0.59.0__py3-none-any.whl → 0.60.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -13,7 +13,7 @@ from databricks.sdk.mixins.files import DbfsExt, FilesExt
13
13
  from databricks.sdk.mixins.jobs import JobsExt
14
14
  from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
15
15
  from databricks.sdk.mixins.workspace import WorkspaceExt
16
- from databricks.sdk.service import aibuilder as pkg_aibuilder
16
+ from databricks.sdk.service import agentbricks as pkg_agentbricks
17
17
  from databricks.sdk.service import apps as pkg_apps
18
18
  from databricks.sdk.service import billing as pkg_billing
19
19
  from databricks.sdk.service import catalog as pkg_catalog
@@ -36,7 +36,7 @@ from databricks.sdk.service import sharing as pkg_sharing
36
36
  from databricks.sdk.service import sql as pkg_sql
37
37
  from databricks.sdk.service import vectorsearch as pkg_vectorsearch
38
38
  from databricks.sdk.service import workspace as pkg_workspace
39
- from databricks.sdk.service.aibuilder import AiBuilderAPI
39
+ from databricks.sdk.service.agentbricks import AgentBricksAPI
40
40
  from databricks.sdk.service.apps import AppsAPI
41
41
  from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI,
42
42
  BudgetsAPI, LogDeliveryAPI,
@@ -240,7 +240,7 @@ class WorkspaceClient:
240
240
  serving_endpoints = ServingEndpointsExt(self._api_client)
241
241
  self._access_control = pkg_iam.AccessControlAPI(self._api_client)
242
242
  self._account_access_control_proxy = pkg_iam.AccountAccessControlProxyAPI(self._api_client)
243
- self._ai_builder = pkg_aibuilder.AiBuilderAPI(self._api_client)
243
+ self._agent_bricks = pkg_agentbricks.AgentBricksAPI(self._api_client)
244
244
  self._alerts = pkg_sql.AlertsAPI(self._api_client)
245
245
  self._alerts_legacy = pkg_sql.AlertsLegacyAPI(self._api_client)
246
246
  self._alerts_v2 = pkg_sql.AlertsV2API(self._api_client)
@@ -377,9 +377,9 @@ class WorkspaceClient:
377
377
  return self._account_access_control_proxy
378
378
 
379
379
  @property
380
- def ai_builder(self) -> pkg_aibuilder.AiBuilderAPI:
380
+ def agent_bricks(self) -> pkg_agentbricks.AgentBricksAPI:
381
381
  """The Custom LLMs service manages state and powers the UI for the Custom LLM product."""
382
- return self._ai_builder
382
+ return self._agent_bricks
383
383
 
384
384
  @property
385
385
  def alerts(self) -> pkg_sql.AlertsAPI:
@@ -331,7 +331,7 @@ def file_oidc(cfg) -> Optional[CredentialsProvider]:
331
331
  # that provides a Databricks token from an IdTokenSource.
332
332
  def _oidc_credentials_provider(cfg, id_token_source: oidc.IdTokenSource) -> Optional[CredentialsProvider]:
333
333
  try:
334
- id_token = id_token_source.id_token()
334
+ id_token_source.id_token() # validate the id_token_source
335
335
  except Exception as e:
336
336
  logger.debug(f"Failed to get OIDC token: {e}")
337
337
  return None
@@ -341,7 +341,7 @@ def _oidc_credentials_provider(cfg, id_token_source: oidc.IdTokenSource) -> Opti
341
341
  token_endpoint=cfg.oidc_endpoints.token_endpoint,
342
342
  client_id=cfg.client_id,
343
343
  account_id=cfg.account_id,
344
- id_token=id_token,
344
+ id_token_source=id_token_source,
345
345
  disable_async=cfg.disable_async_token_refresh,
346
346
  )
347
347
 
@@ -28,7 +28,6 @@ from .._base_client import _BaseClient, _RawResponse, _StreamingResponse
28
28
  from .._property import _cached_property
29
29
  from ..config import Config
30
30
  from ..errors import AlreadyExists, NotFound
31
- from ..errors.customizer import _RetryAfterCustomizer
32
31
  from ..errors.mapper import _error_mapper
33
32
  from ..retries import retried
34
33
  from ..service import files
@@ -577,6 +576,27 @@ class _DbfsPath(_Path):
577
576
  return f"<_DbfsPath {self._path}>"
578
577
 
579
578
 
579
+ class _RetryableException(Exception):
580
+ """Base class for retryable exceptions in DBFS operations."""
581
+
582
+ def __init__(self, message: str, http_status_code: int):
583
+ super().__init__()
584
+ self.message = message
585
+ self.http_status_code = http_status_code
586
+
587
+ def __str__(self) -> str:
588
+ return f"{self.message} (HTTP Status: {self.http_status_code})"
589
+
590
+ @staticmethod
591
+ def make_error(response: requests.Response) -> "_RetryableException":
592
+ """Map the response to a retryable exception."""
593
+
594
+ return _RetryableException(
595
+ message=response.text,
596
+ http_status_code=response.status_code,
597
+ )
598
+
599
+
580
600
  class DbfsExt(files.DbfsAPI):
581
601
  __doc__ = files.DbfsAPI.__doc__
582
602
 
@@ -885,7 +905,7 @@ class FilesExt(files.FilesAPI):
885
905
  timeout=self._config.multipart_upload_single_chunk_upload_timeout_seconds,
886
906
  )
887
907
 
888
- upload_response = self._retry_idempotent_operation(perform, rewind)
908
+ upload_response = self._retry_cloud_idempotent_operation(perform, rewind)
889
909
 
890
910
  if upload_response.status_code in (200, 201):
891
911
  # Chunk upload successful
@@ -1097,7 +1117,7 @@ class FilesExt(files.FilesAPI):
1097
1117
  )
1098
1118
 
1099
1119
  try:
1100
- return self._retry_idempotent_operation(perform)
1120
+ return self._retry_cloud_idempotent_operation(perform)
1101
1121
  except RequestException:
1102
1122
  _LOG.warning("Failed to retrieve upload status")
1103
1123
  return None
@@ -1116,7 +1136,7 @@ class FilesExt(files.FilesAPI):
1116
1136
  # a 503 or 500 response, then you need to resume the interrupted upload from where it left off.
1117
1137
 
1118
1138
  # Let's follow that for all potentially retryable status codes.
1119
- # Together with the catch block below we replicate the logic in _retry_idempotent_operation().
1139
+ # Together with the catch block below we replicate the logic in _retry_databricks_idempotent_operation().
1120
1140
  if upload_response.status_code in self._RETRYABLE_STATUS_CODES:
1121
1141
  if retry_count < self._config.multipart_upload_max_retries:
1122
1142
  retry_count += 1
@@ -1243,7 +1263,7 @@ class FilesExt(files.FilesAPI):
1243
1263
  timeout=self._config.multipart_upload_single_chunk_upload_timeout_seconds,
1244
1264
  )
1245
1265
 
1246
- abort_response = self._retry_idempotent_operation(perform)
1266
+ abort_response = self._retry_cloud_idempotent_operation(perform)
1247
1267
 
1248
1268
  if abort_response.status_code not in (200, 201):
1249
1269
  raise ValueError(abort_response)
@@ -1265,7 +1285,7 @@ class FilesExt(files.FilesAPI):
1265
1285
  timeout=self._config.multipart_upload_single_chunk_upload_timeout_seconds,
1266
1286
  )
1267
1287
 
1268
- abort_response = self._retry_idempotent_operation(perform)
1288
+ abort_response = self._retry_cloud_idempotent_operation(perform)
1269
1289
 
1270
1290
  if abort_response.status_code not in (200, 201):
1271
1291
  raise ValueError(abort_response)
@@ -1283,23 +1303,31 @@ class FilesExt(files.FilesAPI):
1283
1303
  session.mount("http://", http_adapter)
1284
1304
  return session
1285
1305
 
1286
- def _retry_idempotent_operation(
1306
+ def _retry_cloud_idempotent_operation(
1287
1307
  self, operation: Callable[[], requests.Response], before_retry: Callable = None
1288
1308
  ) -> requests.Response:
1289
- """Perform given idempotent operation with necessary retries. Since operation is idempotent it's
1290
- safe to retry it for response codes where server state might have changed.
1309
+ """Perform given idempotent operation with necessary retries for requests to non Databricks APIs.
1310
+ For cloud APIs, we will retry on network errors and on server response codes.
1311
+ Since operation is idempotent it's safe to retry it for response codes where server state might have changed.
1291
1312
  """
1292
1313
 
1293
- def delegate():
1314
+ def delegate() -> requests.Response:
1294
1315
  response = operation()
1295
1316
  if response.status_code in self._RETRYABLE_STATUS_CODES:
1296
- attrs = {}
1297
- # this will assign "retry_after_secs" to the attrs, essentially making exception look retryable
1298
- _RetryAfterCustomizer().customize_error(response, attrs)
1299
- raise _error_mapper(response, attrs)
1317
+ raise _RetryableException.make_error(response)
1300
1318
  else:
1301
1319
  return response
1302
1320
 
1321
+ def extended_is_retryable(e: BaseException) -> Optional[str]:
1322
+ retry_reason_from_base = _BaseClient._is_retryable(e)
1323
+ if retry_reason_from_base is not None:
1324
+ return retry_reason_from_base
1325
+
1326
+ if isinstance(e, _RetryableException):
1327
+ # this is a retriable exception, but not a network error
1328
+ return f"retryable exception (status_code:{e.http_status_code})"
1329
+ return None
1330
+
1303
1331
  # following _BaseClient timeout
1304
1332
  retry_timeout_seconds = self._config.retry_timeout_seconds or 300
1305
1333
 
@@ -1307,7 +1335,7 @@ class FilesExt(files.FilesAPI):
1307
1335
  timeout=timedelta(seconds=retry_timeout_seconds),
1308
1336
  # also retry on network errors (connection error, connection timeout)
1309
1337
  # where we believe request didn't reach the server
1310
- is_retryable=_BaseClient._is_retryable,
1338
+ is_retryable=extended_is_retryable,
1311
1339
  before_retry=before_retry,
1312
1340
  )(delegate)()
1313
1341
 
@@ -4,6 +4,7 @@ from typing import Dict, Optional
4
4
  from requests import Response
5
5
 
6
6
  from databricks.sdk.service.serving import (ExternalFunctionRequestHttpMethod,
7
+ HttpRequestResponse,
7
8
  ServingEndpointsAPI)
8
9
 
9
10
 
@@ -88,15 +89,30 @@ class ServingEndpointsExt(ServingEndpointsAPI):
88
89
  """
89
90
  response = Response()
90
91
  response.status_code = 200
91
- server_response = super().http_request(
92
- connection_name=conn,
93
- method=method,
94
- path=path,
95
- headers=js.dumps(headers) if headers is not None else None,
96
- json=js.dumps(json) if json is not None else None,
97
- params=js.dumps(params) if params is not None else None,
92
+
93
+ # We currently don't call super.http_request because we need to pass in response_headers
94
+ # This is a temporary fix to get the headers we need for the MCP session id
95
+ # TODO: Remove this once we have a better way to get back the response headers
96
+ headers_to_capture = ["mcp-session-id"]
97
+ res = self._api.do(
98
+ "POST",
99
+ "/api/2.0/external-function",
100
+ body={
101
+ "connection_name": conn,
102
+ "method": method.value,
103
+ "path": path,
104
+ "headers": js.dumps(headers) if headers is not None else None,
105
+ "json": js.dumps(json) if json is not None else None,
106
+ "params": js.dumps(params) if params is not None else None,
107
+ },
108
+ headers={"Accept": "text/plain", "Content-Type": "application/json"},
109
+ raw=True,
110
+ response_headers=headers_to_capture,
98
111
  )
99
112
 
113
+ # Create HttpRequestResponse from the raw response
114
+ server_response = HttpRequestResponse.from_dict(res)
115
+
100
116
  # Read the content from the HttpRequestResponse object
101
117
  if hasattr(server_response, "contents") and hasattr(server_response.contents, "read"):
102
118
  raw_content = server_response.contents.read() # Read the bytes
@@ -109,4 +125,9 @@ class ServingEndpointsExt(ServingEndpointsAPI):
109
125
  else:
110
126
  raise ValueError("Contents must be bytes.")
111
127
 
128
+ # Copy headers from raw response to Response
129
+ for header_name in headers_to_capture:
130
+ if header_name in res:
131
+ response.headers[header_name] = res[header_name]
132
+
112
133
  return response
databricks/sdk/oidc.py CHANGED
@@ -188,14 +188,18 @@ class DatabricksOidcTokenSource(oauth.TokenSource):
188
188
  logger.debug("Client ID provided, authenticating with Workload Identity Federation")
189
189
 
190
190
  id_token = self._id_token_source.id_token()
191
+ return self._exchange_id_token(id_token)
191
192
 
193
+ # This function is used to create the OAuth client.
194
+ # It exists to make it easier to test.
195
+ def _exchange_id_token(self, id_token: IdToken) -> oauth.Token:
192
196
  client = oauth.ClientCredentials(
193
197
  client_id=self._client_id,
194
- client_secret="", # we have no (rotatable) secrets in OIDC flow
198
+ client_secret="", # there is no (rotatable) secrets in the OIDC flow
195
199
  token_url=self._token_endpoint,
196
200
  endpoint_params={
197
201
  "subject_token_type": "urn:ietf:params:oauth:token-type:jwt",
198
- "subject_token": id_token,
202
+ "subject_token": id_token.jwt,
199
203
  "grant_type": "urn:ietf:params:oauth:grant-type:token-exchange",
200
204
  },
201
205
  scopes=["all-apis"],
@@ -23,9 +23,6 @@ class CustomLlm:
23
23
  instructions: str
24
24
  """Instructions for the custom LLM to follow"""
25
25
 
26
- optimization_state: State
27
- """If optimization is kicked off, tracks the state of the custom LLM"""
28
-
29
26
  agent_artifact_path: Optional[str] = None
30
27
 
31
28
  creation_time: Optional[str] = None
@@ -45,6 +42,9 @@ class CustomLlm:
45
42
 
46
43
  id: Optional[str] = None
47
44
 
45
+ optimization_state: Optional[State] = None
46
+ """If optimization is kicked off, tracks the state of the custom LLM"""
47
+
48
48
  def as_dict(self) -> dict:
49
49
  """Serializes the CustomLlm into a dictionary suitable for use as a JSON request body."""
50
50
  body = {}
@@ -190,7 +190,7 @@ class Table:
190
190
  )
191
191
 
192
192
 
193
- class AiBuilderAPI:
193
+ class AgentBricksAPI:
194
194
  """The Custom LLMs service manages state and powers the UI for the Custom LLM product."""
195
195
 
196
196
  def __init__(self, api_client):
@@ -270,7 +270,7 @@ class AiBuilderAPI:
270
270
  "Accept": "application/json",
271
271
  }
272
272
 
273
- self._api.do("DELETE", f"/api/2.0/custom-lms/{id}", headers=headers)
273
+ self._api.do("DELETE", f"/api/2.0/custom-llms/{id}", headers=headers)
274
274
 
275
275
  def get_custom_llm(self, id: str) -> CustomLlm:
276
276
  """Get a Custom LLM.
@@ -7723,6 +7723,7 @@ class SchemaInfo:
7723
7723
 
7724
7724
 
7725
7725
  class SecurableKind(Enum):
7726
+ """Latest kind: CONNECTION_SQLSERVER_OAUTH_M2M = 254; Next id:255"""
7726
7727
 
7727
7728
  TABLE_DB_STORAGE = "TABLE_DB_STORAGE"
7728
7729
  TABLE_DELTA = "TABLE_DELTA"
@@ -132,15 +132,24 @@ class CleanRoomAccessRestricted(Enum):
132
132
  class CleanRoomAsset:
133
133
  """Metadata of the clean room asset"""
134
134
 
135
- added_at: Optional[int] = None
136
- """When the asset is added to the clean room, in epoch milliseconds."""
135
+ name: str
136
+ """A fully qualified name that uniquely identifies the asset within the clean room. This is also
137
+ the name displayed in the clean room UI.
138
+
139
+ For UC securable assets (tables, volumes, etc.), the format is
140
+ *shared_catalog*.*shared_schema*.*asset_name*
141
+
142
+ For notebooks, the name is the notebook file name."""
137
143
 
138
- asset_type: Optional[CleanRoomAssetAssetType] = None
144
+ asset_type: CleanRoomAssetAssetType
139
145
  """The type of the asset."""
140
146
 
147
+ added_at: Optional[int] = None
148
+ """When the asset is added to the clean room, in epoch milliseconds."""
149
+
141
150
  clean_room_name: Optional[str] = None
142
- """The name of the clean room this asset belongs to. This is an output-only field to ensure proper
143
- resource identification."""
151
+ """The name of the clean room this asset belongs to. This field is required for create operations
152
+ and populated by the server for responses."""
144
153
 
145
154
  foreign_table: Optional[CleanRoomAssetForeignTable] = None
146
155
  """Foreign table details available to all collaborators of the clean room. Present if and only if
@@ -150,15 +159,6 @@ class CleanRoomAsset:
150
159
  """Local details for a foreign that are only available to its owner. Present if and only if
151
160
  **asset_type** is **FOREIGN_TABLE**"""
152
161
 
153
- name: Optional[str] = None
154
- """A fully qualified name that uniquely identifies the asset within the clean room. This is also
155
- the name displayed in the clean room UI.
156
-
157
- For UC securable assets (tables, volumes, etc.), the format is
158
- *shared_catalog*.*shared_schema*.*asset_name*
159
-
160
- For notebooks, the name is the notebook file name."""
161
-
162
162
  notebook: Optional[CleanRoomAssetNotebook] = None
163
163
  """Notebook details available to all collaborators of the clean room. Present if and only if
164
164
  **asset_type** is **NOTEBOOK_FILE**"""
@@ -314,7 +314,7 @@ class CleanRoomAssetForeignTable:
314
314
 
315
315
  @dataclass
316
316
  class CleanRoomAssetForeignTableLocalDetails:
317
- local_name: Optional[str] = None
317
+ local_name: str
318
318
  """The fully qualified name of the foreign table in its owner's local metastore, in the format of
319
319
  *catalog*.*schema*.*foreign_table_name*"""
320
320
 
@@ -340,13 +340,13 @@ class CleanRoomAssetForeignTableLocalDetails:
340
340
 
341
341
  @dataclass
342
342
  class CleanRoomAssetNotebook:
343
- etag: Optional[str] = None
344
- """Server generated etag that represents the notebook version."""
345
-
346
- notebook_content: Optional[str] = None
343
+ notebook_content: str
347
344
  """Base 64 representation of the notebook contents. This is the same format as returned by
348
345
  :method:workspace/export with the format of **HTML**."""
349
346
 
347
+ etag: Optional[str] = None
348
+ """Server generated etag that represents the notebook version."""
349
+
350
350
  review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None
351
351
  """top-level status derived from all reviews"""
352
352
 
@@ -432,7 +432,7 @@ class CleanRoomAssetTable:
432
432
 
433
433
  @dataclass
434
434
  class CleanRoomAssetTableLocalDetails:
435
- local_name: Optional[str] = None
435
+ local_name: str
436
436
  """The fully qualified name of the table in its owner's local metastore, in the format of
437
437
  *catalog*.*schema*.*table_name*"""
438
438
 
@@ -490,7 +490,7 @@ class CleanRoomAssetView:
490
490
 
491
491
  @dataclass
492
492
  class CleanRoomAssetViewLocalDetails:
493
- local_name: Optional[str] = None
493
+ local_name: str
494
494
  """The fully qualified name of the view in its owner's local metastore, in the format of
495
495
  *catalog*.*schema*.*view_name*"""
496
496
 
@@ -516,7 +516,7 @@ class CleanRoomAssetViewLocalDetails:
516
516
 
517
517
  @dataclass
518
518
  class CleanRoomAssetVolumeLocalDetails:
519
- local_name: Optional[str] = None
519
+ local_name: str
520
520
  """The fully qualified name of the volume in its owner's local metastore, in the format of
521
521
  *catalog*.*schema*.*volume_name*"""
522
522
 
@@ -1178,8 +1178,8 @@ class CleanRoomAssetsAPI:
1178
1178
  access the asset. Typically, you should use a group as the clean room owner.
1179
1179
 
1180
1180
  :param clean_room_name: str
1181
- The name of the clean room this asset belongs to. This is an output-only field to ensure proper
1182
- resource identification.
1181
+ The name of the clean room this asset belongs to. This field is required for create operations and
1182
+ populated by the server for responses.
1183
1183
  :param asset: :class:`CleanRoomAsset`
1184
1184
 
1185
1185
  :returns: :class:`CleanRoomAsset`
@@ -707,6 +707,13 @@ class ProvisioningInfoState(Enum):
707
707
  UPDATING = "UPDATING"
708
708
 
709
709
 
710
+ class ProvisioningPhase(Enum):
711
+
712
+ PROVISIONING_PHASE_INDEX_SCAN = "PROVISIONING_PHASE_INDEX_SCAN"
713
+ PROVISIONING_PHASE_INDEX_SORT = "PROVISIONING_PHASE_INDEX_SORT"
714
+ PROVISIONING_PHASE_MAIN = "PROVISIONING_PHASE_MAIN"
715
+
716
+
710
717
  @dataclass
711
718
  class RequestedClaims:
712
719
  permission_set: Optional[RequestedClaimsPermissionSet] = None
@@ -960,6 +967,9 @@ class SyncedTablePipelineProgress:
960
967
  """The source table Delta version that was last processed by the pipeline. The pipeline may not
961
968
  have completely processed this version yet."""
962
969
 
970
+ provisioning_phase: Optional[ProvisioningPhase] = None
971
+ """The current phase of the data synchronization pipeline."""
972
+
963
973
  sync_progress_completion: Optional[float] = None
964
974
  """The completion ratio of this update. This is a number between 0 and 1."""
965
975
 
@@ -976,6 +986,8 @@ class SyncedTablePipelineProgress:
976
986
  body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds
977
987
  if self.latest_version_currently_processing is not None:
978
988
  body["latest_version_currently_processing"] = self.latest_version_currently_processing
989
+ if self.provisioning_phase is not None:
990
+ body["provisioning_phase"] = self.provisioning_phase.value
979
991
  if self.sync_progress_completion is not None:
980
992
  body["sync_progress_completion"] = self.sync_progress_completion
981
993
  if self.synced_row_count is not None:
@@ -991,6 +1003,8 @@ class SyncedTablePipelineProgress:
991
1003
  body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds
992
1004
  if self.latest_version_currently_processing is not None:
993
1005
  body["latest_version_currently_processing"] = self.latest_version_currently_processing
1006
+ if self.provisioning_phase is not None:
1007
+ body["provisioning_phase"] = self.provisioning_phase
994
1008
  if self.sync_progress_completion is not None:
995
1009
  body["sync_progress_completion"] = self.sync_progress_completion
996
1010
  if self.synced_row_count is not None:
@@ -1005,6 +1019,7 @@ class SyncedTablePipelineProgress:
1005
1019
  return cls(
1006
1020
  estimated_completion_time_seconds=d.get("estimated_completion_time_seconds", None),
1007
1021
  latest_version_currently_processing=d.get("latest_version_currently_processing", None),
1022
+ provisioning_phase=_enum(d, "provisioning_phase", ProvisioningPhase),
1008
1023
  sync_progress_completion=d.get("sync_progress_completion", None),
1009
1024
  synced_row_count=d.get("synced_row_count", None),
1010
1025
  total_row_count=d.get("total_row_count", None),
@@ -1735,7 +1750,7 @@ class DatabaseAPI:
1735
1750
  The name of the instance. This is the unique identifier for the instance.
1736
1751
  :param database_instance: :class:`DatabaseInstance`
1737
1752
  :param update_mask: str
1738
- The list of fields to update.
1753
+ The list of fields to update. This field is not yet supported, and is ignored by the server.
1739
1754
 
1740
1755
  :returns: :class:`DatabaseInstance`
1741
1756
  """
@@ -734,9 +734,11 @@ class IngestionSourceType(Enum):
734
734
  NETSUITE = "NETSUITE"
735
735
  ORACLE = "ORACLE"
736
736
  POSTGRESQL = "POSTGRESQL"
737
+ REDSHIFT = "REDSHIFT"
737
738
  SALESFORCE = "SALESFORCE"
738
739
  SERVICENOW = "SERVICENOW"
739
740
  SHAREPOINT = "SHAREPOINT"
741
+ SQLDW = "SQLDW"
740
742
  SQLSERVER = "SQLSERVER"
741
743
  TERADATA = "TERADATA"
742
744
  WORKDAY_RAAS = "WORKDAY_RAAS"
@@ -667,6 +667,7 @@ class ComplianceStandard(Enum):
667
667
  FEDRAMP_HIGH = "FEDRAMP_HIGH"
668
668
  FEDRAMP_IL5 = "FEDRAMP_IL5"
669
669
  FEDRAMP_MODERATE = "FEDRAMP_MODERATE"
670
+ GERMANY_C5 = "GERMANY_C5"
670
671
  HIPAA = "HIPAA"
671
672
  HITRUST = "HITRUST"
672
673
  IRAP_PROTECTED = "IRAP_PROTECTED"
@@ -2121,7 +2121,8 @@ class SharedDataObjectUpdate:
2121
2121
  """One of: **ADD**, **REMOVE**, **UPDATE**."""
2122
2122
 
2123
2123
  data_object: Optional[SharedDataObject] = None
2124
- """The data object that is being added, removed, or updated."""
2124
+ """The data object that is being added, removed, or updated. The maximum number update data objects
2125
+ allowed is a 100."""
2125
2126
 
2126
2127
  def as_dict(self) -> dict:
2127
2128
  """Serializes the SharedDataObjectUpdate into a dictionary suitable for use as a JSON request body."""
databricks/sdk/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.59.0"
1
+ __version__ = "0.60.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-sdk
3
- Version: 0.59.0
3
+ Version: 0.60.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Project-URL: Documentation, https://databricks-sdk-py.readthedocs.io
6
6
  Keywords: databricks,sdk
@@ -1,5 +1,5 @@
1
1
  databricks/__init__.py,sha256=CF2MJcZFwbpn9TwQER8qnCDhkPooBGQNVkX4v7g6p3g,537
2
- databricks/sdk/__init__.py,sha256=oYv56z-ssw6rcHyDgK65wnwyk8TUpiCtV-Ux2vJ7Kk0,60529
2
+ databricks/sdk/__init__.py,sha256=8fpQ2OCaHOAU_oVRvPzQkL4xeToAl4nI2-lMqqc5O6I,60551
3
3
  databricks/sdk/_base_client.py,sha256=IMHtzC5BhWt-lBVjifewR1Ah5fegGDMv0__-O1hCxWI,15850
4
4
  databricks/sdk/_property.py,sha256=ccbxhkXZmZOxbx2sqKMTzhVZDuvWXG0WPHFRgac6JAM,1701
5
5
  databricks/sdk/azure.py,sha256=sN_ARpmP9h1JovtiHIsDLtrVQP_K11eNDDtHS6PD19k,1015
@@ -7,17 +7,17 @@ databricks/sdk/casing.py,sha256=gZy-FlI7og5WNVX88Vb_7S1WeInwJLGws80CGj_9s48,1137
7
7
  databricks/sdk/clock.py,sha256=Ivlow0r_TkXcTJ8UXkxSA0czKrY0GvwHAeOvjPkJnAQ,1360
8
8
  databricks/sdk/config.py,sha256=rebzZAw0aMSxSwBeXKsF2VE9X_Y33Kjvcd1PO-5wgc4,23401
9
9
  databricks/sdk/core.py,sha256=6lsRl6BL3pLgqMMVFrOnQsx-RxxaJJL_Gt2jJfWUovs,3724
10
- databricks/sdk/credentials_provider.py,sha256=9_P3N52S87xPwI_yUSajnT49--kJWLhKCoHpn5Dwzps,41305
10
+ databricks/sdk/credentials_provider.py,sha256=YSlonZeqtR0zCLrsNPqXGPLoFg_9xih9MldBValvmd8,41340
11
11
  databricks/sdk/data_plane.py,sha256=br5IPnOdE611IBubxP8xkUR9_qzbSRSYyVWSua6znWs,3109
12
12
  databricks/sdk/dbutils.py,sha256=PoDIwNAYGZhVZC7krox7tsudUDNVSk0gsFjFWlKJXVk,15753
13
13
  databricks/sdk/environments.py,sha256=9eVeb68cksqY2Lqwth2PJNmK0JEGdIjh-ebrrmUbqCc,3963
14
14
  databricks/sdk/oauth.py,sha256=wOcZVfi-Jd83XQDW1rbDIJbxFqJOjaeTSlUgQYD8VWQ,28406
15
- databricks/sdk/oidc.py,sha256=A9umMkfnL-Nwfw2GljGxqTtkz7PjMTzltGaeckfrvT4,5749
15
+ databricks/sdk/oidc.py,sha256=_o6Ul8QMyxi_qgKJVSB4YdGuoiRqiZnChMdsRs0QgiM,5974
16
16
  databricks/sdk/oidc_token_supplier.py,sha256=QrO6J0QY4yFfcdQDL5h2OfxMxvBZJPtPmPeqLbPJ5Xw,1065
17
17
  databricks/sdk/py.typed,sha256=pSvaHpbY1UPNEXyVFUjlgBhjPFZMmVC_UNrPC7eMOHI,74
18
18
  databricks/sdk/retries.py,sha256=7k2kEexGqGKXHNAWHbPFSZSugU8UIU0qtyly_hix22Q,2581
19
19
  databricks/sdk/useragent.py,sha256=boEgzTv-Zmo6boipZKjSopNy0CXg4GShC1_lTKpJgqs,7361
20
- databricks/sdk/version.py,sha256=HMyZ2jNndR4_RS4EcSAHrVoKBifJVKSLuqmMuKnl_JI,23
20
+ databricks/sdk/version.py,sha256=XCDU0PzRSGNchH5cFRqniZdytnPgXmacSgzeC0BxXiI,23
21
21
  databricks/sdk/_widgets/__init__.py,sha256=VhI-VvLlr3rKUT1nbROslHJIbmZX_tPJ9rRhrdFsYUA,2811
22
22
  databricks/sdk/_widgets/default_widgets_utils.py,sha256=_hwCbptLbRzWEmknco0H1wQNAYcuy2pjFO9NiRbvFeo,1127
23
23
  databricks/sdk/_widgets/ipywidgets_utils.py,sha256=mg3rEPG9z76e0yVjGgcLybUvd_zSuN5ziGeKiZ-c8Ew,2927
@@ -36,40 +36,40 @@ databricks/sdk/logger/__init__.py,sha256=0_sSQfDkaFGqMHZUVw-g_Ax-RFmOv0Z6NjxCVAe
36
36
  databricks/sdk/logger/round_trip_logger.py,sha256=H2YhxUPZpWSwAwCdfa03D5vRUFxsV73bbM8eF_l9QrQ,4873
37
37
  databricks/sdk/mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
38
  databricks/sdk/mixins/compute.py,sha256=76Fhc7cDQfOf2IHkPtHZpAnxNfrSLMKl9dbQ6KswXaM,11066
39
- databricks/sdk/mixins/files.py,sha256=er_bDsMAmvqENkiYJmvLGchbKYmIw_KN0PsojigSCUM,56556
39
+ databricks/sdk/mixins/files.py,sha256=Y3IvOnB9Yogf6Ndr2uJ-HLGm57sHACoic4N3MXXmx38,57605
40
40
  databricks/sdk/mixins/jobs.py,sha256=4ywi0dZ8mEN8KZWLmZBFfdbejTP6JATvf9wCCRkdJBw,11558
41
- databricks/sdk/mixins/open_ai_client.py,sha256=cLfJAywSPfa4X-DLYOGZdKv7nIud2kja9cjrHQ4iYHM,4571
41
+ databricks/sdk/mixins/open_ai_client.py,sha256=FxMatC648dY6_PU-ClO172Iowivt4LzLgem00tNPPvQ,5552
42
42
  databricks/sdk/mixins/workspace.py,sha256=sgahprJIPLAxTvikHd9Wq2ifBW1Mcc5qz9u6EB-qm7w,4958
43
43
  databricks/sdk/runtime/__init__.py,sha256=6nthZxeYY1HjHieQcP7kXVLIId7w2yfHpZRXXtDLDAc,7333
44
44
  databricks/sdk/runtime/dbutils_stub.py,sha256=S_pgWyGmwp3Ay-pMDEXccYsPwNVqCtz7MpD3fZVlHUA,11408
45
45
  databricks/sdk/service/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
46
  databricks/sdk/service/_internal.py,sha256=PY83MPehEwGuMzCnyvolqglnfZeQ7-eS38kedTa7KDU,1985
47
- databricks/sdk/service/aibuilder.py,sha256=sNdOOYn7-VXpcK0YTiO5CU2LkkOlHU7Jh6hQSlxJ_C8,11655
47
+ databricks/sdk/service/agentbricks.py,sha256=0HI48RP5XAZwbNk4kz2wFcn0isSyIJkwsfV9y-oaQ0I,11675
48
48
  databricks/sdk/service/apps.py,sha256=xievdxty2wRngg1MIPNN3nCjTbgf3kWbXNQHh-JV-xk,58002
49
49
  databricks/sdk/service/billing.py,sha256=Y1tuA7x-wl720TCA98merqUqwrhA4KGd92oWCv679ps,90880
50
- databricks/sdk/service/catalog.py,sha256=FChGIfAPttOKMQIGmyyKi8JA2yI8yfGe9QNtBGRCMMI,594350
51
- databricks/sdk/service/cleanrooms.py,sha256=2zD06mz4cFo1Se_9zYErHnQpPpkT5H8M7Sr68641JJg,61799
50
+ databricks/sdk/service/catalog.py,sha256=4hYPAU61_6s8d0zuWJls6Xk4khWDHzWgfZKbHm17CXU,594423
51
+ databricks/sdk/service/cleanrooms.py,sha256=t8H9xo7Vxp4csEBIz7-DEaYDWMqLEdBJSHx5UdnmXLs,61714
52
52
  databricks/sdk/service/compute.py,sha256=Obx66MV4eXj1J85rNNy76-LTr0DB9m2iNc_iE1wa-Mw,467530
53
53
  databricks/sdk/service/dashboards.py,sha256=Ds4oHTqXmR5LmDQ-gVn3PQqKwSG6MKn4MzI91Xc5nbg,92454
54
- databricks/sdk/service/database.py,sha256=nBg4NqBDY9C-KoNCICR41NlyfY1wahtQ7yeE-fZoapA,74337
54
+ databricks/sdk/service/database.py,sha256=f4_MqNs0oOmpmWasWkwmJGIuxyK_C3AK1VJQuKaj8_8,75066
55
55
  databricks/sdk/service/files.py,sha256=k28gM20L9bw_RmKcIm8IcNFfHVtENCbakptM3p8bgbw,38403
56
56
  databricks/sdk/service/iam.py,sha256=cSKocvBtKqC9IARCBpYpCfUZAnmGdRis2vVdzx6X_WQ,165768
57
57
  databricks/sdk/service/jobs.py,sha256=2H9Uj_lSJbdrUUV8ehbo_o_fkad9NxfSE3jbt7xmTuo,425121
58
58
  databricks/sdk/service/marketplace.py,sha256=8MpP8Y65R6bGyvuWW4ZU6b-6__a4WLZVcDScLh0to4g,153028
59
59
  databricks/sdk/service/ml.py,sha256=th5S-VD_NGA6nqOMQrjrr7ubrsTSm0oZdOZ4XZbVdZQ,303895
60
60
  databricks/sdk/service/oauth2.py,sha256=6yoa5zmpJ68lCIIFyqcrM3fiSaWvPtf7Pl1dNhXL9pU,75330
61
- databricks/sdk/service/pipelines.py,sha256=bK3dcPrpaGY4BWAcSBeK7-9WZ4ChN5ZwJQJgu5lW5CU,154568
61
+ databricks/sdk/service/pipelines.py,sha256=CblPuyVF6z5cLEbxuU0JpYLPDtGavvUN8vyiaAYGOPA,154614
62
62
  databricks/sdk/service/provisioning.py,sha256=zZm_4lDO_mhDAOcaNoMDDz4Die_MXoapNa4NuwLJYww,129980
63
63
  databricks/sdk/service/qualitymonitorv2.py,sha256=82IUD7oTDNPwMcIDE_v59-nr2I3gpL65Ih7UfB5eInY,9202
64
64
  databricks/sdk/service/serving.py,sha256=9hz_YJ4wG64xTW72NrXB5whrqdndGgPPXYJuQbtiHKU,209023
65
- databricks/sdk/service/settings.py,sha256=kfAVeQI5_9ywh8xwHHvWHOW0Q6IPq3nSH-fI0ErSP0A,371344
66
- databricks/sdk/service/sharing.py,sha256=WCluDVgW80zVbVABREEShR8ht7tBBtb7Bwn5Tqi80ps,141194
65
+ databricks/sdk/service/settings.py,sha256=aebimI7E1p1HUncwrOtSyRM3Hq_gPaCAUH8z5slcTrA,371374
66
+ databricks/sdk/service/sharing.py,sha256=s5FM0AuqzGuWJXOrTrV783KIkw6pH0EI8e62NhPsXGc,141255
67
67
  databricks/sdk/service/sql.py,sha256=cYqo-nPiS1b9QBWdYT9GmBbiMjDtnsGWL7KLCRiDc9E,383657
68
68
  databricks/sdk/service/vectorsearch.py,sha256=8aARB3z0HAVKX7wMD3ZHqxG7_OXHvTccSYi-CQU8TgI,69088
69
69
  databricks/sdk/service/workspace.py,sha256=iss6wuYvMDSMrgwks0FuRRBeJSZFmWNOCkPIMJAzMgY,111868
70
- databricks_sdk-0.59.0.dist-info/licenses/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
71
- databricks_sdk-0.59.0.dist-info/licenses/NOTICE,sha256=tkRcQYA1k68wDLcnOWbg2xJDsUOJw8G8DGBhb8dnI3w,1588
72
- databricks_sdk-0.59.0.dist-info/METADATA,sha256=2eKwNhUneFBTjasvLEHngUZn0VP-MFCNNy53wvY-i8o,39397
73
- databricks_sdk-0.59.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
74
- databricks_sdk-0.59.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
75
- databricks_sdk-0.59.0.dist-info/RECORD,,
70
+ databricks_sdk-0.60.0.dist-info/licenses/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
71
+ databricks_sdk-0.60.0.dist-info/licenses/NOTICE,sha256=tkRcQYA1k68wDLcnOWbg2xJDsUOJw8G8DGBhb8dnI3w,1588
72
+ databricks_sdk-0.60.0.dist-info/METADATA,sha256=VoIqIkBMeEsfp60SIE5qLyGeLx1YUqtk9kVhxksziQk,39397
73
+ databricks_sdk-0.60.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
74
+ databricks_sdk-0.60.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
75
+ databricks_sdk-0.60.0.dist-info/RECORD,,