databricks-sdk 0.69.0__py3-none-any.whl → 0.71.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (37) hide show
  1. databricks/sdk/__init__.py +24 -24
  2. databricks/sdk/dbutils.py +17 -0
  3. databricks/sdk/mixins/files.py +10 -10
  4. databricks/sdk/service/agentbricks.py +2 -0
  5. databricks/sdk/service/apps.py +10 -0
  6. databricks/sdk/service/billing.py +13 -3
  7. databricks/sdk/service/catalog.py +131 -47
  8. databricks/sdk/service/cleanrooms.py +11 -3
  9. databricks/sdk/service/compute.py +64 -0
  10. databricks/sdk/service/dashboards.py +10 -0
  11. databricks/sdk/service/database.py +12 -0
  12. databricks/sdk/service/dataquality.py +201 -52
  13. databricks/sdk/service/files.py +7 -72
  14. databricks/sdk/service/iam.py +26 -36
  15. databricks/sdk/service/iamv2.py +6 -0
  16. databricks/sdk/service/jobs.py +86 -154
  17. databricks/sdk/service/marketplace.py +18 -0
  18. databricks/sdk/service/ml.py +464 -13
  19. databricks/sdk/service/oauth2.py +37 -19
  20. databricks/sdk/service/pipelines.py +25 -2
  21. databricks/sdk/service/provisioning.py +19 -1
  22. databricks/sdk/service/qualitymonitorv2.py +2 -0
  23. databricks/sdk/service/serving.py +16 -21
  24. databricks/sdk/service/settings.py +45 -72
  25. databricks/sdk/service/settingsv2.py +2 -0
  26. databricks/sdk/service/sharing.py +23 -69
  27. databricks/sdk/service/sql.py +85 -62
  28. databricks/sdk/service/tags.py +2 -0
  29. databricks/sdk/service/vectorsearch.py +8 -0
  30. databricks/sdk/service/workspace.py +18 -91
  31. databricks/sdk/version.py +1 -1
  32. {databricks_sdk-0.69.0.dist-info → databricks_sdk-0.71.0.dist-info}/METADATA +1 -1
  33. {databricks_sdk-0.69.0.dist-info → databricks_sdk-0.71.0.dist-info}/RECORD +37 -37
  34. {databricks_sdk-0.69.0.dist-info → databricks_sdk-0.71.0.dist-info}/WHEEL +0 -0
  35. {databricks_sdk-0.69.0.dist-info → databricks_sdk-0.71.0.dist-info}/licenses/LICENSE +0 -0
  36. {databricks_sdk-0.69.0.dist-info → databricks_sdk-0.71.0.dist-info}/licenses/NOTICE +0 -0
  37. {databricks_sdk-0.69.0.dist-info → databricks_sdk-0.71.0.dist-info}/top_level.txt +0 -0
@@ -293,6 +293,7 @@ class WorkspaceClient:
293
293
  self._feature_engineering = pkg_ml.FeatureEngineeringAPI(self._api_client)
294
294
  self._feature_store = pkg_ml.FeatureStoreAPI(self._api_client)
295
295
  self._files = _make_files_client(self._api_client, self._config)
296
+ self._forecasting = pkg_ml.ForecastingAPI(self._api_client)
296
297
  self._functions = pkg_catalog.FunctionsAPI(self._api_client)
297
298
  self._genie = pkg_dashboards.GenieAPI(self._api_client)
298
299
  self._git_credentials = pkg_workspace.GitCredentialsAPI(self._api_client)
@@ -375,9 +376,8 @@ class WorkspaceClient:
375
376
  self._workspace = WorkspaceExt(self._api_client)
376
377
  self._workspace_bindings = pkg_catalog.WorkspaceBindingsAPI(self._api_client)
377
378
  self._workspace_conf = pkg_settings.WorkspaceConfAPI(self._api_client)
378
- self._workspace_settings_v2 = pkg_settingsv2.WorkspaceSettingsV2API(self._api_client)
379
- self._forecasting = pkg_ml.ForecastingAPI(self._api_client)
380
379
  self._workspace_iam_v2 = pkg_iamv2.WorkspaceIamV2API(self._api_client)
380
+ self._workspace_settings_v2 = pkg_settingsv2.WorkspaceSettingsV2API(self._api_client)
381
381
  self._groups = pkg_iam.GroupsAPI(self._api_client)
382
382
  self._service_principals = pkg_iam.ServicePrincipalsAPI(self._api_client)
383
383
  self._users = pkg_iam.UsersAPI(self._api_client)
@@ -599,6 +599,11 @@ class WorkspaceClient:
599
599
  """A feature store is a centralized repository that enables data scientists to find and share features."""
600
600
  return self._feature_store
601
601
 
602
+ @property
603
+ def forecasting(self) -> pkg_ml.ForecastingAPI:
604
+ """The Forecasting API allows you to create and get serverless forecasting experiments."""
605
+ return self._forecasting
606
+
602
607
  @property
603
608
  def functions(self) -> pkg_catalog.FunctionsAPI:
604
609
  """Functions implement User-Defined Functions (UDFs) in Unity Catalog."""
@@ -974,21 +979,16 @@ class WorkspaceClient:
974
979
  """This API allows updating known workspace settings for advanced users."""
975
980
  return self._workspace_conf
976
981
 
977
- @property
978
- def workspace_settings_v2(self) -> pkg_settingsv2.WorkspaceSettingsV2API:
979
- """APIs to manage workspace level settings."""
980
- return self._workspace_settings_v2
981
-
982
- @property
983
- def forecasting(self) -> pkg_ml.ForecastingAPI:
984
- """The Forecasting API allows you to create and get serverless forecasting experiments."""
985
- return self._forecasting
986
-
987
982
  @property
988
983
  def workspace_iam_v2(self) -> pkg_iamv2.WorkspaceIamV2API:
989
984
  """These APIs are used to manage identities and the workspace access of these identities in <Databricks>."""
990
985
  return self._workspace_iam_v2
991
986
 
987
+ @property
988
+ def workspace_settings_v2(self) -> pkg_settingsv2.WorkspaceSettingsV2API:
989
+ """APIs to manage workspace level settings."""
990
+ return self._workspace_settings_v2
991
+
992
992
  @property
993
993
  def groups(self) -> pkg_iam.GroupsAPI:
994
994
  """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects."""
@@ -1086,11 +1086,13 @@ class AccountClient:
1086
1086
  self._access_control = pkg_iam.AccountAccessControlAPI(self._api_client)
1087
1087
  self._billable_usage = pkg_billing.BillableUsageAPI(self._api_client)
1088
1088
  self._budget_policy = pkg_billing.BudgetPolicyAPI(self._api_client)
1089
+ self._budgets = pkg_billing.BudgetsAPI(self._api_client)
1089
1090
  self._credentials = pkg_provisioning.CredentialsAPI(self._api_client)
1090
1091
  self._custom_app_integration = pkg_oauth2.CustomAppIntegrationAPI(self._api_client)
1091
1092
  self._encryption_keys = pkg_provisioning.EncryptionKeysAPI(self._api_client)
1092
1093
  self._federation_policy = pkg_oauth2.AccountFederationPolicyAPI(self._api_client)
1093
1094
  self._groups_v2 = pkg_iam.AccountGroupsV2API(self._api_client)
1095
+ self._iam_v2 = pkg_iamv2.AccountIamV2API(self._api_client)
1094
1096
  self._ip_access_lists = pkg_settings.AccountIpAccessListsAPI(self._api_client)
1095
1097
  self._log_delivery = pkg_billing.LogDeliveryAPI(self._api_client)
1096
1098
  self._metastore_assignments = pkg_catalog.AccountMetastoreAssignmentsAPI(self._api_client)
@@ -1114,8 +1116,6 @@ class AccountClient:
1114
1116
  self._workspace_assignment = pkg_iam.WorkspaceAssignmentAPI(self._api_client)
1115
1117
  self._workspace_network_configuration = pkg_settings.WorkspaceNetworkConfigurationAPI(self._api_client)
1116
1118
  self._workspaces = pkg_provisioning.WorkspacesAPI(self._api_client)
1117
- self._iam_v2 = pkg_iamv2.AccountIamV2API(self._api_client)
1118
- self._budgets = pkg_billing.BudgetsAPI(self._api_client)
1119
1119
  self._groups = pkg_iam.AccountGroupsAPI(self._api_client)
1120
1120
  self._service_principals = pkg_iam.AccountServicePrincipalsAPI(self._api_client)
1121
1121
  self._users = pkg_iam.AccountUsersAPI(self._api_client)
@@ -1143,6 +1143,11 @@ class AccountClient:
1143
1143
  """A service serves REST API about Budget policies."""
1144
1144
  return self._budget_policy
1145
1145
 
1146
+ @property
1147
+ def budgets(self) -> pkg_billing.BudgetsAPI:
1148
+ """These APIs manage budget configurations for this account."""
1149
+ return self._budgets
1150
+
1146
1151
  @property
1147
1152
  def credentials(self) -> pkg_provisioning.CredentialsAPI:
1148
1153
  """These APIs manage credential configurations for this workspace."""
@@ -1168,6 +1173,11 @@ class AccountClient:
1168
1173
  """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects."""
1169
1174
  return self._groups_v2
1170
1175
 
1176
+ @property
1177
+ def iam_v2(self) -> pkg_iamv2.AccountIamV2API:
1178
+ """These APIs are used to manage identities and the workspace access of these identities in <Databricks>."""
1179
+ return self._iam_v2
1180
+
1171
1181
  @property
1172
1182
  def ip_access_lists(self) -> pkg_settings.AccountIpAccessListsAPI:
1173
1183
  """The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console."""
@@ -1283,16 +1293,6 @@ class AccountClient:
1283
1293
  """These APIs manage workspaces for this account."""
1284
1294
  return self._workspaces
1285
1295
 
1286
- @property
1287
- def iam_v2(self) -> pkg_iamv2.AccountIamV2API:
1288
- """These APIs are used to manage identities and the workspace access of these identities in <Databricks>."""
1289
- return self._iam_v2
1290
-
1291
- @property
1292
- def budgets(self) -> pkg_billing.BudgetsAPI:
1293
- """These APIs manage budget configurations for this account."""
1294
- return self._budgets
1295
-
1296
1296
  @property
1297
1297
  def groups(self) -> pkg_iam.AccountGroupsAPI:
1298
1298
  """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects."""
databricks/sdk/dbutils.py CHANGED
@@ -281,10 +281,17 @@ def get_local_notebook_path():
281
281
  return value
282
282
 
283
283
 
284
+ def not_supported_method_err_msg(methodName):
285
+ return f"Method '{methodName}' is not supported in the SDK version of DBUtils"
286
+
287
+
284
288
  class _OverrideProxyUtil:
285
289
 
286
290
  @classmethod
287
291
  def new(cls, path: str):
292
+ if path in cls.not_supported_override_paths:
293
+ raise ValueError(cls.not_supported_override_paths[path])
294
+
288
295
  if len(cls.__get_matching_overrides(path)) > 0:
289
296
  return _OverrideProxyUtil(path)
290
297
  return None
@@ -301,6 +308,16 @@ class _OverrideProxyUtil:
301
308
  "notebook.entry_point.getDbutils().notebook().getContext().notebookPath().get()": get_local_notebook_path,
302
309
  }
303
310
 
311
+ # These paths work the same as 'proxy_override_paths' but instead of using a local implementation we raise an exception.
312
+ not_supported_override_paths = {
313
+ # The object returned by 'credentials.getServiceCredentialProvider()' can't be serialized to JSON.
314
+ # Without this override, the command would fail with an error 'TypeError: Object of type Session is not JSON serializable'.
315
+ # We override it to show a better error message
316
+ "credentials.getServiceCredentialsProvider": not_supported_method_err_msg(
317
+ "credentials.getServiceCredentialsProvider"
318
+ ),
319
+ }
320
+
304
321
  @classmethod
305
322
  def __get_matching_overrides(cls, path: str):
306
323
  return [x for x in cls.proxy_override_paths.keys() if x.startswith(path)]
@@ -1064,7 +1064,7 @@ class FilesExt(files.FilesAPI):
1064
1064
  def upload(
1065
1065
  self,
1066
1066
  file_path: str,
1067
- content: BinaryIO,
1067
+ contents: BinaryIO,
1068
1068
  *,
1069
1069
  overwrite: Optional[bool] = None,
1070
1070
  part_size: Optional[int] = None,
@@ -1076,7 +1076,7 @@ class FilesExt(files.FilesAPI):
1076
1076
 
1077
1077
  :param file_path: str
1078
1078
  The absolute remote path of the target file, e.g. /Volumes/path/to/your/file
1079
- :param content: BinaryIO
1079
+ :param contents: BinaryIO
1080
1080
  The contents of the file to upload. This must be a BinaryIO stream.
1081
1081
  :param overwrite: bool (optional)
1082
1082
  If true, an existing file will be overwritten. When not specified, assumed True.
@@ -1096,7 +1096,7 @@ class FilesExt(files.FilesAPI):
1096
1096
 
1097
1097
  if self._config.disable_experimental_files_api_client:
1098
1098
  _LOG.info("Disable experimental files API client, will use the original upload method.")
1099
- super().upload(file_path=file_path, contents=content, overwrite=overwrite)
1099
+ super().upload(file_path=file_path, contents=contents, overwrite=overwrite)
1100
1100
  return UploadStreamResult()
1101
1101
 
1102
1102
  _LOG.debug(f"Uploading file from BinaryIO stream")
@@ -1107,12 +1107,12 @@ class FilesExt(files.FilesAPI):
1107
1107
 
1108
1108
  # Determine content length if the stream is seekable
1109
1109
  content_length = None
1110
- if content.seekable():
1110
+ if contents.seekable():
1111
1111
  _LOG.debug(f"Uploading using seekable mode")
1112
1112
  # If the stream is seekable, we can read its size.
1113
- content.seek(0, os.SEEK_END)
1114
- content_length = content.tell()
1115
- content.seek(0)
1113
+ contents.seek(0, os.SEEK_END)
1114
+ content_length = contents.tell()
1115
+ contents.seek(0)
1116
1116
 
1117
1117
  # Get optimized part size and batch size based on content length and provided part size
1118
1118
  optimized_part_size, optimized_batch_size = self._get_optimized_performance_parameters_for_upload(
@@ -1135,17 +1135,17 @@ class FilesExt(files.FilesAPI):
1135
1135
  )
1136
1136
 
1137
1137
  if ctx.use_parallel:
1138
- self._parallel_upload_from_stream(ctx, content)
1138
+ self._parallel_upload_from_stream(ctx, contents)
1139
1139
  return UploadStreamResult()
1140
1140
  elif ctx.content_length is not None:
1141
- self._upload_single_thread_with_known_size(ctx, content)
1141
+ self._upload_single_thread_with_known_size(ctx, contents)
1142
1142
  return UploadStreamResult()
1143
1143
  else:
1144
1144
  _LOG.debug(f"Uploading using non-seekable mode")
1145
1145
  # If the stream is not seekable, we cannot determine its size.
1146
1146
  # We will use a multipart upload.
1147
1147
  _LOG.debug(f"Using multipart upload for non-seekable input stream of unknown size for file {file_path}")
1148
- self._single_thread_multipart_upload(ctx, content)
1148
+ self._single_thread_multipart_upload(ctx, contents)
1149
1149
  return UploadStreamResult()
1150
1150
 
1151
1151
  def upload_from(
@@ -238,6 +238,7 @@ class AgentBricksAPI:
238
238
 
239
239
  :returns: :class:`CustomLlm`
240
240
  """
241
+
241
242
  body = {}
242
243
  if agent_artifact_path is not None:
243
244
  body["agent_artifact_path"] = agent_artifact_path
@@ -328,6 +329,7 @@ class AgentBricksAPI:
328
329
 
329
330
  :returns: :class:`CustomLlm`
330
331
  """
332
+
331
333
  body = {}
332
334
  if custom_llm is not None:
333
335
  body["custom_llm"] = custom_llm.as_dict()
@@ -790,11 +790,13 @@ class AppManifestAppResourceUcSecurableSpecUcSecurablePermission(Enum):
790
790
 
791
791
  MANAGE = "MANAGE"
792
792
  READ_VOLUME = "READ_VOLUME"
793
+ SELECT = "SELECT"
793
794
  WRITE_VOLUME = "WRITE_VOLUME"
794
795
 
795
796
 
796
797
  class AppManifestAppResourceUcSecurableSpecUcSecurableType(Enum):
797
798
 
799
+ TABLE = "TABLE"
798
800
  VOLUME = "VOLUME"
799
801
 
800
802
 
@@ -1867,6 +1869,7 @@ class AppsAPI:
1867
1869
  Long-running operation waiter for :class:`App`.
1868
1870
  See :method:wait_get_app_active for more details.
1869
1871
  """
1872
+
1870
1873
  body = app.as_dict()
1871
1874
  query = {}
1872
1875
  if no_compute is not None:
@@ -1903,6 +1906,7 @@ class AppsAPI:
1903
1906
  Long-running operation waiter for :class:`AppUpdate`.
1904
1907
  See :method:wait_get_update_app_succeeded for more details.
1905
1908
  """
1909
+
1906
1910
  body = {}
1907
1911
  if app is not None:
1908
1912
  body["app"] = app.as_dict()
@@ -1949,6 +1953,7 @@ class AppsAPI:
1949
1953
  Long-running operation waiter for :class:`AppDeployment`.
1950
1954
  See :method:wait_get_deployment_app_succeeded for more details.
1951
1955
  """
1956
+
1952
1957
  body = app_deployment.as_dict()
1953
1958
  headers = {
1954
1959
  "Accept": "application/json",
@@ -2124,6 +2129,7 @@ class AppsAPI:
2124
2129
 
2125
2130
  :returns: :class:`AppPermissions`
2126
2131
  """
2132
+
2127
2133
  body = {}
2128
2134
  if access_control_list is not None:
2129
2135
  body["access_control_list"] = [v.as_dict() for v in access_control_list]
@@ -2189,6 +2195,7 @@ class AppsAPI:
2189
2195
 
2190
2196
  :returns: :class:`App`
2191
2197
  """
2198
+
2192
2199
  body = app.as_dict()
2193
2200
  headers = {
2194
2201
  "Accept": "application/json",
@@ -2209,6 +2216,7 @@ class AppsAPI:
2209
2216
 
2210
2217
  :returns: :class:`AppPermissions`
2211
2218
  """
2219
+
2212
2220
  body = {}
2213
2221
  if access_control_list is not None:
2214
2222
  body["access_control_list"] = [v.as_dict() for v in access_control_list]
@@ -2234,6 +2242,7 @@ class AppsSettingsAPI:
2234
2242
 
2235
2243
  :returns: :class:`CustomTemplate`
2236
2244
  """
2245
+
2237
2246
  body = template.as_dict()
2238
2247
  headers = {
2239
2248
  "Accept": "application/json",
@@ -2316,6 +2325,7 @@ class AppsSettingsAPI:
2316
2325
 
2317
2326
  :returns: :class:`CustomTemplate`
2318
2327
  """
2328
+
2319
2329
  body = template.as_dict()
2320
2330
  headers = {
2321
2331
  "Accept": "application/json",
@@ -3,6 +3,7 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import logging
6
+ import uuid
6
7
  from dataclasses import dataclass
7
8
  from enum import Enum
8
9
  from typing import Any, BinaryIO, Dict, Iterator, List, Optional
@@ -1067,9 +1068,6 @@ class LogDeliveryConfiguration:
1067
1068
  [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
1068
1069
  [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html"""
1069
1070
 
1070
- account_id: str
1071
- """Databricks account ID."""
1072
-
1073
1071
  credentials_id: str
1074
1072
  """The ID for a method:credentials/create that represents the AWS IAM role with policy and trust
1075
1073
  relationship as described in the main billable usage documentation page. See [Configure billable
@@ -1083,6 +1081,9 @@ class LogDeliveryConfiguration:
1083
1081
 
1084
1082
  [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html"""
1085
1083
 
1084
+ account_id: Optional[str] = None
1085
+ """Databricks account ID."""
1086
+
1086
1087
  config_id: Optional[str] = None
1087
1088
  """The unique UUID of log delivery configuration"""
1088
1089
 
@@ -1565,6 +1566,9 @@ class BudgetPolicyAPI:
1565
1566
 
1566
1567
  :returns: :class:`BudgetPolicy`
1567
1568
  """
1569
+
1570
+ if request_id is None or request_id == "":
1571
+ request_id = str(uuid.uuid4())
1568
1572
  body = {}
1569
1573
  if policy is not None:
1570
1574
  body["policy"] = policy.as_dict()
@@ -1679,6 +1683,7 @@ class BudgetPolicyAPI:
1679
1683
 
1680
1684
  :returns: :class:`BudgetPolicy`
1681
1685
  """
1686
+
1682
1687
  body = policy.as_dict()
1683
1688
  query = {}
1684
1689
  if limit_config is not None:
@@ -1715,6 +1720,7 @@ class BudgetsAPI:
1715
1720
 
1716
1721
  :returns: :class:`CreateBudgetConfigurationResponse`
1717
1722
  """
1723
+
1718
1724
  body = {}
1719
1725
  if budget is not None:
1720
1726
  body["budget"] = budget.as_dict()
@@ -1797,6 +1803,7 @@ class BudgetsAPI:
1797
1803
 
1798
1804
  :returns: :class:`UpdateBudgetConfigurationResponse`
1799
1805
  """
1806
+
1800
1807
  body = {}
1801
1808
  if budget is not None:
1802
1809
  body["budget"] = budget.as_dict()
@@ -1895,6 +1902,7 @@ class LogDeliveryAPI:
1895
1902
 
1896
1903
  :returns: :class:`WrappedLogDeliveryConfiguration`
1897
1904
  """
1905
+
1898
1906
  body = {}
1899
1907
  if log_delivery_configuration is not None:
1900
1908
  body["log_delivery_configuration"] = log_delivery_configuration.as_dict()
@@ -1989,6 +1997,7 @@ class LogDeliveryAPI:
1989
1997
 
1990
1998
 
1991
1999
  """
2000
+
1992
2001
  body = {}
1993
2002
  if status is not None:
1994
2003
  body["status"] = status.value
@@ -2026,6 +2035,7 @@ class UsageDashboardsAPI:
2026
2035
 
2027
2036
  :returns: :class:`CreateBillingUsageDashboardResponse`
2028
2037
  """
2038
+
2029
2039
  body = {}
2030
2040
  if dashboard_type is not None:
2031
2041
  body["dashboard_type"] = dashboard_type.value