databricks-sdk 0.45.0__tar.gz → 0.46.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (83) hide show
  1. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/CHANGELOG.md +17 -0
  2. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/PKG-INFO +1 -1
  3. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/__init__.py +13 -2
  4. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/config.py +4 -0
  5. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/credentials_provider.py +6 -1
  6. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/data_plane.py +1 -59
  7. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/oauth.py +7 -2
  8. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/compute.py +38 -21
  9. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/dashboards.py +34 -17
  10. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/files.py +4 -0
  11. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/ml.py +476 -2
  12. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/oauth2.py +0 -12
  13. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/serving.py +29 -21
  14. databricks_sdk-0.46.0/databricks/sdk/version.py +1 -0
  15. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks_sdk.egg-info/PKG-INFO +1 -1
  16. databricks_sdk-0.46.0/setup.cfg +23 -0
  17. databricks_sdk-0.45.0/databricks/sdk/version.py +0 -1
  18. databricks_sdk-0.45.0/setup.cfg +0 -73
  19. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/CONTRIBUTING.md +0 -0
  20. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/DCO +0 -0
  21. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/LICENSE +0 -0
  22. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/MANIFEST.in +0 -0
  23. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/Makefile +0 -0
  24. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/NOTICE +0 -0
  25. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/README.md +0 -0
  26. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/SECURITY.md +0 -0
  27. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/__init__.py +0 -0
  28. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/_base_client.py +0 -0
  29. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/_property.py +0 -0
  30. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/_widgets/__init__.py +0 -0
  31. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
  32. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
  33. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/azure.py +0 -0
  34. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/casing.py +0 -0
  35. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/clock.py +0 -0
  36. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/core.py +0 -0
  37. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/dbutils.py +0 -0
  38. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/environments.py +0 -0
  39. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/errors/__init__.py +0 -0
  40. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/errors/base.py +0 -0
  41. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/errors/customizer.py +0 -0
  42. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/errors/deserializer.py +0 -0
  43. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/errors/details.py +0 -0
  44. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/errors/mapper.py +0 -0
  45. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/errors/overrides.py +0 -0
  46. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/errors/parser.py +0 -0
  47. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/errors/platform.py +0 -0
  48. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/errors/private_link.py +0 -0
  49. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/errors/sdk.py +0 -0
  50. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/logger/__init__.py +0 -0
  51. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
  52. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/mixins/__init__.py +0 -0
  53. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/mixins/compute.py +0 -0
  54. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/mixins/files.py +0 -0
  55. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/mixins/jobs.py +0 -0
  56. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/mixins/open_ai_client.py +0 -0
  57. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/mixins/workspace.py +0 -0
  58. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/py.typed +0 -0
  59. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/retries.py +0 -0
  60. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/runtime/__init__.py +0 -0
  61. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
  62. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/__init__.py +0 -0
  63. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/_internal.py +0 -0
  64. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/apps.py +0 -0
  65. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/billing.py +0 -0
  66. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/catalog.py +0 -0
  67. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/cleanrooms.py +0 -0
  68. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/iam.py +0 -0
  69. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/jobs.py +0 -0
  70. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/marketplace.py +0 -0
  71. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/pipelines.py +0 -0
  72. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/provisioning.py +0 -0
  73. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/settings.py +0 -0
  74. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/sharing.py +0 -0
  75. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/sql.py +0 -0
  76. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/vectorsearch.py +0 -0
  77. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/service/workspace.py +0 -0
  78. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks/sdk/useragent.py +0 -0
  79. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks_sdk.egg-info/SOURCES.txt +0 -0
  80. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
  81. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks_sdk.egg-info/requires.txt +0 -0
  82. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/databricks_sdk.egg-info/top_level.txt +0 -0
  83. {databricks_sdk-0.45.0 → databricks_sdk-0.46.0}/pyproject.toml +0 -0
@@ -1,5 +1,22 @@
1
1
  # Version changelog
2
2
 
3
+ ## Release v0.46.0
4
+
5
+ ### New Features and Improvements
6
+ * [Experimental] Add support for async token refresh ([#916](https://github.com/databricks/databricks-sdk-py/pull/916)).
7
+ This can be enabled with by setting the following setting:
8
+ ```
9
+ export DATABRICKS_ENABLE_EXPERIMENTAL_ASYNC_TOKEN_REFRESH=1.
10
+ ```
11
+ This feature and its setting are experimental and may be removed in future releases.
12
+
13
+ ### API Changes
14
+ * Added [w.forecasting](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/forecasting.html) workspace-level service.
15
+ * Added `statement_id` field for `databricks.sdk.service.dashboards.GenieQueryAttachment`.
16
+ * Added `could_not_get_model_deployments_exception` enum value for `databricks.sdk.service.dashboards.MessageErrorType`.
17
+ * [Breaking] Removed `jwks_uri` field for `databricks.sdk.service.oauth2.OidcFederationPolicy`.
18
+
19
+
3
20
  ## Release v0.45.0
4
21
 
5
22
  ### New Features and Improvements
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: databricks-sdk
3
- Version: 0.45.0
3
+ Version: 0.46.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Project-URL: Documentation, https://databricks-sdk-py.readthedocs.io
6
6
  Keywords: databricks,sdk
@@ -8,6 +8,7 @@ import databricks.sdk.dbutils as dbutils
8
8
  import databricks.sdk.service as service
9
9
  from databricks.sdk import azure
10
10
  from databricks.sdk.credentials_provider import CredentialsStrategy
11
+ from databricks.sdk.data_plane import DataPlaneTokenSource
11
12
  from databricks.sdk.mixins.compute import ClustersExt
12
13
  from databricks.sdk.mixins.files import DbfsExt, FilesExt
13
14
  from databricks.sdk.mixins.jobs import JobsExt
@@ -63,7 +64,8 @@ from databricks.sdk.service.marketplace import (
63
64
  ProviderExchangeFiltersAPI, ProviderExchangesAPI, ProviderFilesAPI,
64
65
  ProviderListingsAPI, ProviderPersonalizationRequestsAPI,
65
66
  ProviderProviderAnalyticsDashboardsAPI, ProviderProvidersAPI)
66
- from databricks.sdk.service.ml import ExperimentsAPI, ModelRegistryAPI
67
+ from databricks.sdk.service.ml import (ExperimentsAPI, ForecastingAPI,
68
+ ModelRegistryAPI)
67
69
  from databricks.sdk.service.oauth2 import (AccountFederationPolicyAPI,
68
70
  CustomAppIntegrationAPI,
69
71
  OAuthPublishedAppsAPI,
@@ -284,8 +286,11 @@ class WorkspaceClient:
284
286
  self._secrets = service.workspace.SecretsAPI(self._api_client)
285
287
  self._service_principals = service.iam.ServicePrincipalsAPI(self._api_client)
286
288
  self._serving_endpoints = serving_endpoints
289
+ serving_endpoints_data_plane_token_source = DataPlaneTokenSource(
290
+ self._config.host, self._config.oauth_token, not self._config.enable_experimental_async_token_refresh
291
+ )
287
292
  self._serving_endpoints_data_plane = service.serving.ServingEndpointsDataPlaneAPI(
288
- self._api_client, serving_endpoints
293
+ self._api_client, serving_endpoints, serving_endpoints_data_plane_token_source
289
294
  )
290
295
  self._settings = service.settings.SettingsAPI(self._api_client)
291
296
  self._shares = service.sharing.SharesAPI(self._api_client)
@@ -305,6 +310,7 @@ class WorkspaceClient:
305
310
  self._workspace = WorkspaceExt(self._api_client)
306
311
  self._workspace_bindings = service.catalog.WorkspaceBindingsAPI(self._api_client)
307
312
  self._workspace_conf = service.settings.WorkspaceConfAPI(self._api_client)
313
+ self._forecasting = service.ml.ForecastingAPI(self._api_client)
308
314
 
309
315
  @property
310
316
  def config(self) -> client.Config:
@@ -808,6 +814,11 @@ class WorkspaceClient:
808
814
  """This API allows updating known workspace settings for advanced users."""
809
815
  return self._workspace_conf
810
816
 
817
+ @property
818
+ def forecasting(self) -> service.ml.ForecastingAPI:
819
+ """The Forecasting API allows you to create and get serverless forecasting experiments."""
820
+ return self._forecasting
821
+
811
822
  def get_workspace_id(self) -> int:
812
823
  """Get the workspace ID of the workspace that this client is connected to."""
813
824
  response = self._api_client.do("GET", "/api/2.0/preview/scim/v2/Me", response_headers=["X-Databricks-Org-Id"])
@@ -95,6 +95,10 @@ class Config:
95
95
  max_connections_per_pool: int = ConfigAttribute()
96
96
  databricks_environment: Optional[DatabricksEnvironment] = None
97
97
 
98
+ enable_experimental_async_token_refresh: bool = ConfigAttribute(
99
+ env="DATABRICKS_ENABLE_EXPERIMENTAL_ASYNC_TOKEN_REFRESH"
100
+ )
101
+
98
102
  enable_experimental_files_api_client: bool = ConfigAttribute(env="DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT")
99
103
  files_api_client_download_max_total_recovers = None
100
104
  files_api_client_download_max_total_recovers_without_progressing = 1
@@ -191,6 +191,7 @@ def oauth_service_principal(cfg: "Config") -> Optional[CredentialsProvider]:
191
191
  token_url=oidc.token_endpoint,
192
192
  scopes=["all-apis"],
193
193
  use_header=True,
194
+ disable_async=not cfg.enable_experimental_async_token_refresh,
194
195
  )
195
196
 
196
197
  def inner() -> Dict[str, str]:
@@ -290,6 +291,7 @@ def azure_service_principal(cfg: "Config") -> CredentialsProvider:
290
291
  token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
291
292
  endpoint_params={"resource": resource},
292
293
  use_params=True,
294
+ disable_async=not cfg.enable_experimental_async_token_refresh,
293
295
  )
294
296
 
295
297
  _ensure_host_present(cfg, token_source_for)
@@ -355,6 +357,7 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
355
357
  token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
356
358
  endpoint_params=params,
357
359
  use_params=True,
360
+ disable_async=not cfg.enable_experimental_async_token_refresh,
358
361
  )
359
362
 
360
363
  def refreshed_headers() -> Dict[str, str]:
@@ -458,8 +461,9 @@ class CliTokenSource(Refreshable):
458
461
  token_type_field: str,
459
462
  access_token_field: str,
460
463
  expiry_field: str,
464
+ disable_async: bool = True,
461
465
  ):
462
- super().__init__()
466
+ super().__init__(disable_async=disable_async)
463
467
  self._cmd = cmd
464
468
  self._token_type_field = token_type_field
465
469
  self._access_token_field = access_token_field
@@ -690,6 +694,7 @@ class DatabricksCliTokenSource(CliTokenSource):
690
694
  token_type_field="token_type",
691
695
  access_token_field="access_token",
692
696
  expiry_field="expiry",
697
+ disable_async=not cfg.enable_experimental_async_token_refresh,
693
698
  )
694
699
 
695
700
  @staticmethod
@@ -2,7 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import threading
4
4
  from dataclasses import dataclass
5
- from typing import Callable, List, Optional
5
+ from typing import Callable, Optional
6
6
  from urllib import parse
7
7
 
8
8
  from databricks.sdk import oauth
@@ -88,61 +88,3 @@ class DataPlaneDetails:
88
88
  """URL used to query the endpoint through the DataPlane."""
89
89
  token: Token
90
90
  """Token to query the DataPlane endpoint."""
91
-
92
-
93
- ## Old implementation. #TODO: Remove after the new implementation is used
94
-
95
-
96
- class DataPlaneService:
97
- """Helper class to fetch and manage DataPlane details."""
98
-
99
- from .service.serving import DataPlaneInfo
100
-
101
- def __init__(self):
102
- self._data_plane_info = {}
103
- self._tokens = {}
104
- self._lock = threading.Lock()
105
-
106
- def get_data_plane_details(
107
- self,
108
- method: str,
109
- params: List[str],
110
- info_getter: Callable[[], DataPlaneInfo],
111
- refresh: Callable[[str], Token],
112
- ):
113
- """Get and cache information required to query a Data Plane endpoint using the provided methods.
114
-
115
- Returns a cached DataPlaneDetails if the details have already been fetched previously and are still valid.
116
- If not, it uses the provided functions to fetch the details.
117
-
118
- :param method: method name. Used to construct a unique key for the cache.
119
- :param params: path params used in the "get" operation which uniquely determine the object. Used to construct a unique key for the cache.
120
- :param info_getter: function which returns the DataPlaneInfo. It will only be called if the information is not already present in the cache.
121
- :param refresh: function to refresh the token. It will only be called if the token is missing or expired.
122
- """
123
- all_elements = params.copy()
124
- all_elements.insert(0, method)
125
- map_key = "/".join(all_elements)
126
- info = self._data_plane_info.get(map_key)
127
- if not info:
128
- self._lock.acquire()
129
- try:
130
- info = self._data_plane_info.get(map_key)
131
- if not info:
132
- info = info_getter()
133
- self._data_plane_info[map_key] = info
134
- finally:
135
- self._lock.release()
136
-
137
- token = self._tokens.get(map_key)
138
- if not token or not token.valid:
139
- self._lock.acquire()
140
- token = self._tokens.get(map_key)
141
- try:
142
- if not token or not token.valid:
143
- token = refresh(info.authorization_details)
144
- self._tokens[map_key] = token
145
- finally:
146
- self._lock.release()
147
-
148
- return DataPlaneDetails(endpoint_url=info.endpoint_url, token=token)
@@ -426,12 +426,16 @@ class SessionCredentials(Refreshable):
426
426
  client_id: str,
427
427
  client_secret: str = None,
428
428
  redirect_url: str = None,
429
+ disable_async: bool = True,
429
430
  ):
430
431
  self._token_endpoint = token_endpoint
431
432
  self._client_id = client_id
432
433
  self._client_secret = client_secret
433
434
  self._redirect_url = redirect_url
434
- super().__init__(token)
435
+ super().__init__(
436
+ token=token,
437
+ disable_async=disable_async,
438
+ )
435
439
 
436
440
  def as_dict(self) -> dict:
437
441
  return {"token": self.token().as_dict()}
@@ -708,9 +712,10 @@ class ClientCredentials(Refreshable):
708
712
  scopes: List[str] = None
709
713
  use_params: bool = False
710
714
  use_header: bool = False
715
+ disable_async: bool = True
711
716
 
712
717
  def __post_init__(self):
713
- super().__init__()
718
+ super().__init__(disable_async=self.disable_async)
714
719
 
715
720
  def refresh(self) -> Token:
716
721
  params = {"grant_type": "client_credentials"}
@@ -3846,6 +3846,10 @@ class DestroyResponse:
3846
3846
 
3847
3847
  @dataclass
3848
3848
  class DiskSpec:
3849
+ """Describes the disks that are launched for each instance in the spark cluster. For example, if
3850
+ the cluster has 3 instances, each instance is configured to launch 2 disks, 100 GiB each, then
3851
+ Databricks will launch a total of 6 disks, 100 GiB each, for this cluster."""
3852
+
3849
3853
  disk_count: Optional[int] = None
3850
3854
  """The number of disks launched for each instance: - This feature is only enabled for supported
3851
3855
  node types. - Users can choose up to the limit of the disks supported by the node type. - For
@@ -3920,9 +3924,15 @@ class DiskSpec:
3920
3924
 
3921
3925
  @dataclass
3922
3926
  class DiskType:
3927
+ """Describes the disk type."""
3928
+
3923
3929
  azure_disk_volume_type: Optional[DiskTypeAzureDiskVolumeType] = None
3930
+ """All Azure Disk types that Databricks supports. See
3931
+ https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks"""
3924
3932
 
3925
3933
  ebs_volume_type: Optional[DiskTypeEbsVolumeType] = None
3934
+ """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for
3935
+ details."""
3926
3936
 
3927
3937
  def as_dict(self) -> dict:
3928
3938
  """Serializes the DiskType into a dictionary suitable for use as a JSON request body."""
@@ -3952,12 +3962,16 @@ class DiskType:
3952
3962
 
3953
3963
 
3954
3964
  class DiskTypeAzureDiskVolumeType(Enum):
3965
+ """All Azure Disk types that Databricks supports. See
3966
+ https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks"""
3955
3967
 
3956
3968
  PREMIUM_LRS = "PREMIUM_LRS"
3957
3969
  STANDARD_LRS = "STANDARD_LRS"
3958
3970
 
3959
3971
 
3960
3972
  class DiskTypeEbsVolumeType(Enum):
3973
+ """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for
3974
+ details."""
3961
3975
 
3962
3976
  GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD"
3963
3977
  THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD"
@@ -3998,6 +4012,7 @@ class DockerBasicAuth:
3998
4012
  @dataclass
3999
4013
  class DockerImage:
4000
4014
  basic_auth: Optional[DockerBasicAuth] = None
4015
+ """Basic auth with username and password"""
4001
4016
 
4002
4017
  url: Optional[str] = None
4003
4018
  """URL of the docker image."""
@@ -5334,7 +5349,7 @@ class GetInstancePool:
5334
5349
  - Currently, Databricks allows at most 45 custom tags"""
5335
5350
 
5336
5351
  default_tags: Optional[Dict[str, str]] = None
5337
- """Tags that are added by Databricks regardless of any `custom_tags`, including:
5352
+ """Tags that are added by Databricks regardless of any ``custom_tags``, including:
5338
5353
 
5339
5354
  - Vendor: Databricks
5340
5355
 
@@ -6250,7 +6265,7 @@ class InstancePoolAndStats:
6250
6265
  - Currently, Databricks allows at most 45 custom tags"""
6251
6266
 
6252
6267
  default_tags: Optional[Dict[str, str]] = None
6253
- """Tags that are added by Databricks regardless of any `custom_tags`, including:
6268
+ """Tags that are added by Databricks regardless of any ``custom_tags``, including:
6254
6269
 
6255
6270
  - Vendor: Databricks
6256
6271
 
@@ -6427,10 +6442,10 @@ class InstancePoolAndStats:
6427
6442
 
6428
6443
  @dataclass
6429
6444
  class InstancePoolAwsAttributes:
6445
+ """Attributes set during instance pool creation which are related to Amazon Web Services."""
6446
+
6430
6447
  availability: Optional[InstancePoolAwsAttributesAvailability] = None
6431
- """Availability type used for the spot nodes.
6432
-
6433
- The default value is defined by InstancePoolConf.instancePoolDefaultAwsAvailability"""
6448
+ """Availability type used for the spot nodes."""
6434
6449
 
6435
6450
  spot_bid_price_percent: Optional[int] = None
6436
6451
  """Calculates the bid price for AWS spot instances, as a percentage of the corresponding instance
@@ -6439,10 +6454,7 @@ class InstancePoolAwsAttributes:
6439
6454
  instances. Similarly, if this field is set to 200, the bid price is twice the price of on-demand
6440
6455
  `r3.xlarge` instances. If not specified, the default value is 100. When spot instances are
6441
6456
  requested for this cluster, only spot instances whose bid price percentage matches this field
6442
- will be considered. Note that, for safety, we enforce this field to be no more than 10000.
6443
-
6444
- The default value and documentation here should be kept consistent with
6445
- CommonConf.defaultSpotBidPricePercent and CommonConf.maxSpotBidPricePercent."""
6457
+ will be considered. Note that, for safety, we enforce this field to be no more than 10000."""
6446
6458
 
6447
6459
  zone_id: Optional[str] = None
6448
6460
  """Identifier for the availability zone/datacenter in which the cluster resides. This string will
@@ -6485,9 +6497,7 @@ class InstancePoolAwsAttributes:
6485
6497
 
6486
6498
 
6487
6499
  class InstancePoolAwsAttributesAvailability(Enum):
6488
- """Availability type used for the spot nodes.
6489
-
6490
- The default value is defined by InstancePoolConf.instancePoolDefaultAwsAvailability"""
6500
+ """The set of AWS availability types supported when setting up nodes for a cluster."""
6491
6501
 
6492
6502
  ON_DEMAND = "ON_DEMAND"
6493
6503
  SPOT = "SPOT"
@@ -6495,14 +6505,16 @@ class InstancePoolAwsAttributesAvailability(Enum):
6495
6505
 
6496
6506
  @dataclass
6497
6507
  class InstancePoolAzureAttributes:
6508
+ """Attributes set during instance pool creation which are related to Azure."""
6509
+
6498
6510
  availability: Optional[InstancePoolAzureAttributesAvailability] = None
6499
- """Shows the Availability type used for the spot nodes.
6500
-
6501
- The default value is defined by InstancePoolConf.instancePoolDefaultAzureAvailability"""
6511
+ """Availability type used for the spot nodes."""
6502
6512
 
6503
6513
  spot_bid_max_price: Optional[float] = None
6504
- """The default value and documentation here should be kept consistent with
6505
- CommonConf.defaultSpotBidMaxPrice."""
6514
+ """With variable pricing, you have option to set a max price, in US dollars (USD) For example, the
6515
+ value 2 would be a max price of $2.00 USD per hour. If you set the max price to be -1, the VM
6516
+ won't be evicted based on price. The price for the VM will be the current price for spot or the
6517
+ price for a standard VM, which ever is less, as long as there is capacity and quota available."""
6506
6518
 
6507
6519
  def as_dict(self) -> dict:
6508
6520
  """Serializes the InstancePoolAzureAttributes into a dictionary suitable for use as a JSON request body."""
@@ -6532,9 +6544,7 @@ class InstancePoolAzureAttributes:
6532
6544
 
6533
6545
 
6534
6546
  class InstancePoolAzureAttributesAvailability(Enum):
6535
- """Shows the Availability type used for the spot nodes.
6536
-
6537
- The default value is defined by InstancePoolConf.instancePoolDefaultAzureAvailability"""
6547
+ """The set of Azure availability types supported when setting up nodes for a cluster."""
6538
6548
 
6539
6549
  ON_DEMAND_AZURE = "ON_DEMAND_AZURE"
6540
6550
  SPOT_AZURE = "SPOT_AZURE"
@@ -6542,6 +6552,8 @@ class InstancePoolAzureAttributesAvailability(Enum):
6542
6552
 
6543
6553
  @dataclass
6544
6554
  class InstancePoolGcpAttributes:
6555
+ """Attributes set during instance pool creation which are related to GCP."""
6556
+
6545
6557
  gcp_availability: Optional[GcpAvailability] = None
6546
6558
  """This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or
6547
6559
  preemptible VMs with a fallback to on-demand VMs if the former is unavailable."""
@@ -6756,7 +6768,10 @@ class InstancePoolPermissionsRequest:
6756
6768
 
6757
6769
 
6758
6770
  class InstancePoolState(Enum):
6759
- """Current state of the instance pool."""
6771
+ """The state of a Cluster. The current allowable state transitions are as follows:
6772
+
6773
+ - ``ACTIVE`` -> ``STOPPED`` - ``ACTIVE`` -> ``DELETED`` - ``STOPPED`` -> ``ACTIVE`` -
6774
+ ``STOPPED`` -> ``DELETED``"""
6760
6775
 
6761
6776
  ACTIVE = "ACTIVE"
6762
6777
  DELETED = "DELETED"
@@ -7865,6 +7880,8 @@ class NodeType:
7865
7880
 
7866
7881
  @dataclass
7867
7882
  class PendingInstanceError:
7883
+ """Error message of a failed pending instances"""
7884
+
7868
7885
  instance_id: Optional[str] = None
7869
7886
 
7870
7887
  message: Optional[str] = None
@@ -594,12 +594,15 @@ class GenieMessage:
594
594
  `ASKING_AI`: Waiting for the LLM to respond to the user's question. * `PENDING_WAREHOUSE`:
595
595
  Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing a
596
596
  generated SQL query. Get the SQL query result by calling
597
- [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `FAILED`: The response
598
- generation or query execution failed. See `error` field. * `COMPLETED`: Message processing is
599
- completed. Results are in the `attachments` field. Get the SQL query result by calling
600
- [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message has
601
- been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user needs to
602
- rerun the query. * `CANCELLED`: Message has been cancelled."""
597
+ [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) API. *
598
+ `FAILED`: The response generation or query execution failed. See `error` field. * `COMPLETED`:
599
+ Message processing is completed. Results are in the `attachments` field. Get the SQL query
600
+ result by calling
601
+ [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) API. *
602
+ `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available
603
+ anymore. The user needs to rerun the query. Rerun the SQL query result by calling
604
+ [executeMessageAttachmentQuery](:method:genie/executeMessageAttachmentQuery) API. * `CANCELLED`:
605
+ Message has been cancelled."""
603
606
 
604
607
  user_id: Optional[int] = None
605
608
  """ID of the user who created the message"""
@@ -697,6 +700,10 @@ class GenieQueryAttachment:
697
700
  query_result_metadata: Optional[GenieResultMetadata] = None
698
701
  """Metadata associated with the query result."""
699
702
 
703
+ statement_id: Optional[str] = None
704
+ """Statement Execution API statement id. Use [Get status, manifest, and result first
705
+ chunk](:method:statementexecution/getstatement) to get the full result data."""
706
+
700
707
  title: Optional[str] = None
701
708
  """Name of the query"""
702
709
 
@@ -713,6 +720,8 @@ class GenieQueryAttachment:
713
720
  body["query"] = self.query
714
721
  if self.query_result_metadata:
715
722
  body["query_result_metadata"] = self.query_result_metadata.as_dict()
723
+ if self.statement_id is not None:
724
+ body["statement_id"] = self.statement_id
716
725
  if self.title is not None:
717
726
  body["title"] = self.title
718
727
  return body
@@ -730,6 +739,8 @@ class GenieQueryAttachment:
730
739
  body["query"] = self.query
731
740
  if self.query_result_metadata:
732
741
  body["query_result_metadata"] = self.query_result_metadata
742
+ if self.statement_id is not None:
743
+ body["statement_id"] = self.statement_id
733
744
  if self.title is not None:
734
745
  body["title"] = self.title
735
746
  return body
@@ -743,6 +754,7 @@ class GenieQueryAttachment:
743
754
  last_updated_timestamp=d.get("last_updated_timestamp", None),
744
755
  query=d.get("query", None),
745
756
  query_result_metadata=_from_dict(d, "query_result_metadata", GenieResultMetadata),
757
+ statement_id=d.get("statement_id", None),
746
758
  title=d.get("title", None),
747
759
  )
748
760
 
@@ -1062,6 +1074,7 @@ class MessageErrorType(Enum):
1062
1074
  CHAT_COMPLETION_NETWORK_EXCEPTION = "CHAT_COMPLETION_NETWORK_EXCEPTION"
1063
1075
  CONTENT_FILTER_EXCEPTION = "CONTENT_FILTER_EXCEPTION"
1064
1076
  CONTEXT_EXCEEDED_EXCEPTION = "CONTEXT_EXCEEDED_EXCEPTION"
1077
+ COULD_NOT_GET_MODEL_DEPLOYMENTS_EXCEPTION = "COULD_NOT_GET_MODEL_DEPLOYMENTS_EXCEPTION"
1065
1078
  COULD_NOT_GET_UC_SCHEMA_EXCEPTION = "COULD_NOT_GET_UC_SCHEMA_EXCEPTION"
1066
1079
  DEPLOYMENT_NOT_FOUND_EXCEPTION = "DEPLOYMENT_NOT_FOUND_EXCEPTION"
1067
1080
  FUNCTIONS_NOT_AVAILABLE_EXCEPTION = "FUNCTIONS_NOT_AVAILABLE_EXCEPTION"
@@ -1107,12 +1120,15 @@ class MessageStatus(Enum):
1107
1120
  `ASKING_AI`: Waiting for the LLM to respond to the user's question. * `PENDING_WAREHOUSE`:
1108
1121
  Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing a
1109
1122
  generated SQL query. Get the SQL query result by calling
1110
- [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `FAILED`: The response
1111
- generation or query execution failed. See `error` field. * `COMPLETED`: Message processing is
1112
- completed. Results are in the `attachments` field. Get the SQL query result by calling
1113
- [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message has
1114
- been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user needs to
1115
- rerun the query. * `CANCELLED`: Message has been cancelled."""
1123
+ [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) API. *
1124
+ `FAILED`: The response generation or query execution failed. See `error` field. * `COMPLETED`:
1125
+ Message processing is completed. Results are in the `attachments` field. Get the SQL query
1126
+ result by calling
1127
+ [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) API. *
1128
+ `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available
1129
+ anymore. The user needs to rerun the query. Rerun the SQL query result by calling
1130
+ [executeMessageAttachmentQuery](:method:genie/executeMessageAttachmentQuery) API. * `CANCELLED`:
1131
+ Message has been cancelled."""
1116
1132
 
1117
1133
  ASKING_AI = "ASKING_AI"
1118
1134
  CANCELLED = "CANCELLED"
@@ -1917,7 +1933,8 @@ class GenieAPI:
1917
1933
  ) -> GenieGetMessageQueryResultResponse:
1918
1934
  """Execute message attachment SQL query.
1919
1935
 
1920
- Execute the SQL for a message query attachment.
1936
+ Execute the SQL for a message query attachment. Use this API when the query attachment has expired and
1937
+ needs to be re-executed.
1921
1938
 
1922
1939
  :param space_id: str
1923
1940
  Genie space ID
@@ -1945,7 +1962,7 @@ class GenieAPI:
1945
1962
  def execute_message_query(
1946
1963
  self, space_id: str, conversation_id: str, message_id: str
1947
1964
  ) -> GenieGetMessageQueryResultResponse:
1948
- """Execute SQL query in a conversation message.
1965
+ """[Deprecated] Execute SQL query in a conversation message.
1949
1966
 
1950
1967
  Execute the SQL query in the message.
1951
1968
 
@@ -2059,7 +2076,7 @@ class GenieAPI:
2059
2076
  def get_message_query_result_by_attachment(
2060
2077
  self, space_id: str, conversation_id: str, message_id: str, attachment_id: str
2061
2078
  ) -> GenieGetMessageQueryResultResponse:
2062
- """[deprecated] Get conversation message SQL query result.
2079
+ """[Deprecated] Get conversation message SQL query result.
2063
2080
 
2064
2081
  Get the result of SQL query if the message has a query attachment. This is only available if a message
2065
2082
  has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`.
@@ -2088,9 +2105,9 @@ class GenieAPI:
2088
2105
  return GenieGetMessageQueryResultResponse.from_dict(res)
2089
2106
 
2090
2107
  def get_space(self, space_id: str) -> GenieSpace:
2091
- """Get details of a Genie Space.
2108
+ """Get Genie Space.
2092
2109
 
2093
- Get a Genie Space.
2110
+ Get details of a Genie Space.
2094
2111
 
2095
2112
  :param space_id: str
2096
2113
  The ID associated with the Genie space
@@ -314,12 +314,14 @@ class DirectoryEntry:
314
314
  @dataclass
315
315
  class DownloadResponse:
316
316
  content_length: Optional[int] = None
317
+ """The length of the HTTP response body in bytes."""
317
318
 
318
319
  content_type: Optional[str] = None
319
320
 
320
321
  contents: Optional[BinaryIO] = None
321
322
 
322
323
  last_modified: Optional[str] = None
324
+ """The last modified time of the file in HTTP-date (RFC 7231) format."""
323
325
 
324
326
  def as_dict(self) -> dict:
325
327
  """Serializes the DownloadResponse into a dictionary suitable for use as a JSON request body."""
@@ -430,10 +432,12 @@ class GetDirectoryMetadataResponse:
430
432
  @dataclass
431
433
  class GetMetadataResponse:
432
434
  content_length: Optional[int] = None
435
+ """The length of the HTTP response body in bytes."""
433
436
 
434
437
  content_type: Optional[str] = None
435
438
 
436
439
  last_modified: Optional[str] = None
440
+ """The last modified time of the file in HTTP-date (RFC 7231) format."""
437
441
 
438
442
  def as_dict(self) -> dict:
439
443
  """Serializes the GetMetadataResponse into a dictionary suitable for use as a JSON request body."""