databricks-sdk 0.49.0__py3-none-any.whl → 0.51.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -86,19 +86,21 @@ from databricks.sdk.service.settings import (
86
86
  AibiDashboardEmbeddingApprovedDomainsAPI, AutomaticClusterUpdateAPI,
87
87
  ComplianceSecurityProfileAPI, CredentialsManagerAPI,
88
88
  CspEnablementAccountAPI, DefaultNamespaceAPI, DisableLegacyAccessAPI,
89
- DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI, EnableIpAccessListsAPI,
90
- EnhancedSecurityMonitoringAPI, EsmEnablementAccountAPI, IpAccessListsAPI,
91
- NetworkConnectivityAPI, NotificationDestinationsAPI, PersonalComputeAPI,
89
+ DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI, EnableExportNotebookAPI,
90
+ EnableIpAccessListsAPI, EnableNotebookTableClipboardAPI,
91
+ EnableResultsDownloadingAPI, EnhancedSecurityMonitoringAPI,
92
+ EsmEnablementAccountAPI, IpAccessListsAPI, NetworkConnectivityAPI,
93
+ NotificationDestinationsAPI, PersonalComputeAPI,
92
94
  RestrictWorkspaceAdminsAPI, SettingsAPI, TokenManagementAPI, TokensAPI,
93
95
  WorkspaceConfAPI)
94
96
  from databricks.sdk.service.sharing import (ProvidersAPI,
95
97
  RecipientActivationAPI,
96
98
  RecipientsAPI, SharesAPI)
97
99
  from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI,
98
- DashboardsAPI, DashboardWidgetsAPI,
99
- DataSourcesAPI, DbsqlPermissionsAPI,
100
- QueriesAPI, QueriesLegacyAPI,
101
- QueryHistoryAPI,
100
+ AlertsV2API, DashboardsAPI,
101
+ DashboardWidgetsAPI, DataSourcesAPI,
102
+ DbsqlPermissionsAPI, QueriesAPI,
103
+ QueriesLegacyAPI, QueryHistoryAPI,
102
104
  QueryVisualizationsAPI,
103
105
  QueryVisualizationsLegacyAPI,
104
106
  RedashConfigAPI, StatementExecutionAPI,
@@ -168,6 +170,7 @@ class WorkspaceClient:
168
170
  product_version="0.0.0",
169
171
  credentials_strategy: Optional[CredentialsStrategy] = None,
170
172
  credentials_provider: Optional[CredentialsStrategy] = None,
173
+ token_audience: Optional[str] = None,
171
174
  config: Optional[client.Config] = None,
172
175
  ):
173
176
  if not config:
@@ -196,6 +199,7 @@ class WorkspaceClient:
196
199
  debug_headers=debug_headers,
197
200
  product=product,
198
201
  product_version=product_version,
202
+ token_audience=token_audience,
199
203
  )
200
204
  self._config = config.copy()
201
205
  self._dbutils = _make_dbutils(self._config)
@@ -205,6 +209,7 @@ class WorkspaceClient:
205
209
  self._account_access_control_proxy = service.iam.AccountAccessControlProxyAPI(self._api_client)
206
210
  self._alerts = service.sql.AlertsAPI(self._api_client)
207
211
  self._alerts_legacy = service.sql.AlertsLegacyAPI(self._api_client)
212
+ self._alerts_v2 = service.sql.AlertsV2API(self._api_client)
208
213
  self._apps = service.apps.AppsAPI(self._api_client)
209
214
  self._artifact_allowlists = service.catalog.ArtifactAllowlistsAPI(self._api_client)
210
215
  self._catalogs = service.catalog.CatalogsAPI(self._api_client)
@@ -287,7 +292,7 @@ class WorkspaceClient:
287
292
  self._service_principals = service.iam.ServicePrincipalsAPI(self._api_client)
288
293
  self._serving_endpoints = serving_endpoints
289
294
  serving_endpoints_data_plane_token_source = DataPlaneTokenSource(
290
- self._config.host, self._config.oauth_token, not self._config.enable_experimental_async_token_refresh
295
+ self._config.host, self._config.oauth_token, self._config.disable_async_token_refresh
291
296
  )
292
297
  self._serving_endpoints_data_plane = service.serving.ServingEndpointsDataPlaneAPI(
293
298
  self._api_client, serving_endpoints, serving_endpoints_data_plane_token_source
@@ -344,6 +349,11 @@ class WorkspaceClient:
344
349
  """The alerts API can be used to perform CRUD operations on alerts."""
345
350
  return self._alerts_legacy
346
351
 
352
+ @property
353
+ def alerts_v2(self) -> service.sql.AlertsV2API:
354
+ """TODO: Add description."""
355
+ return self._alerts_v2
356
+
347
357
  @property
348
358
  def apps(self) -> service.apps.AppsAPI:
349
359
  """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
@@ -860,6 +870,7 @@ class AccountClient:
860
870
  product_version="0.0.0",
861
871
  credentials_strategy: Optional[CredentialsStrategy] = None,
862
872
  credentials_provider: Optional[CredentialsStrategy] = None,
873
+ token_audience: Optional[str] = None,
863
874
  config: Optional[client.Config] = None,
864
875
  ):
865
876
  if not config:
@@ -888,6 +899,7 @@ class AccountClient:
888
899
  debug_headers=debug_headers,
889
900
  product=product,
890
901
  product_version=product_version,
902
+ token_audience=token_audience,
891
903
  )
892
904
  self._config = config.copy()
893
905
  self._api_client = client.ApiClient(self._config)
databricks/sdk/config.py CHANGED
@@ -61,6 +61,7 @@ class Config:
61
61
  host: str = ConfigAttribute(env="DATABRICKS_HOST")
62
62
  account_id: str = ConfigAttribute(env="DATABRICKS_ACCOUNT_ID")
63
63
  token: str = ConfigAttribute(env="DATABRICKS_TOKEN", auth="pat", sensitive=True)
64
+ token_audience: str = ConfigAttribute(env="DATABRICKS_TOKEN_AUDIENCE", auth="github-oidc")
64
65
  username: str = ConfigAttribute(env="DATABRICKS_USERNAME", auth="basic")
65
66
  password: str = ConfigAttribute(env="DATABRICKS_PASSWORD", auth="basic", sensitive=True)
66
67
  client_id: str = ConfigAttribute(env="DATABRICKS_CLIENT_ID", auth="oauth")
@@ -95,9 +96,7 @@ class Config:
95
96
  max_connections_per_pool: int = ConfigAttribute()
96
97
  databricks_environment: Optional[DatabricksEnvironment] = None
97
98
 
98
- enable_experimental_async_token_refresh: bool = ConfigAttribute(
99
- env="DATABRICKS_ENABLE_EXPERIMENTAL_ASYNC_TOKEN_REFRESH"
100
- )
99
+ disable_async_token_refresh: bool = ConfigAttribute(env="DATABRICKS_DISABLE_ASYNC_TOKEN_REFRESH")
101
100
 
102
101
  enable_experimental_files_api_client: bool = ConfigAttribute(env="DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT")
103
102
  files_api_client_download_max_total_recovers = None
@@ -23,6 +23,7 @@ from google.oauth2 import service_account # type: ignore
23
23
  from .azure import add_sp_management_token, add_workspace_id_header
24
24
  from .oauth import (ClientCredentials, OAuthClient, Refreshable, Token,
25
25
  TokenCache, TokenSource)
26
+ from .oidc_token_supplier import GitHubOIDCTokenSupplier
26
27
 
27
28
  CredentialsProvider = Callable[[], Dict[str, str]]
28
29
 
@@ -191,7 +192,7 @@ def oauth_service_principal(cfg: "Config") -> Optional[CredentialsProvider]:
191
192
  token_url=oidc.token_endpoint,
192
193
  scopes=["all-apis"],
193
194
  use_header=True,
194
- disable_async=not cfg.enable_experimental_async_token_refresh,
195
+ disable_async=cfg.disable_async_token_refresh,
195
196
  )
196
197
 
197
198
  def inner() -> Dict[str, str]:
@@ -291,7 +292,7 @@ def azure_service_principal(cfg: "Config") -> CredentialsProvider:
291
292
  token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
292
293
  endpoint_params={"resource": resource},
293
294
  use_params=True,
294
- disable_async=not cfg.enable_experimental_async_token_refresh,
295
+ disable_async=cfg.disable_async_token_refresh,
295
296
  )
296
297
 
297
298
  _ensure_host_present(cfg, token_source_for)
@@ -314,6 +315,58 @@ def azure_service_principal(cfg: "Config") -> CredentialsProvider:
314
315
  return OAuthCredentialsProvider(refreshed_headers, token)
315
316
 
316
317
 
318
+ @oauth_credentials_strategy("github-oidc", ["host", "client_id"])
319
+ def databricks_wif(cfg: "Config") -> Optional[CredentialsProvider]:
320
+ """
321
+ DatabricksWIFCredentials uses a Token Supplier to get a JWT Token and exchanges
322
+ it for a Databricks Token.
323
+
324
+ Supported suppliers:
325
+ - GitHub OIDC
326
+ """
327
+ supplier = GitHubOIDCTokenSupplier()
328
+
329
+ audience = cfg.token_audience
330
+ if audience is None and cfg.is_account_client:
331
+ audience = cfg.account_id
332
+ if audience is None and not cfg.is_account_client:
333
+ audience = cfg.oidc_endpoints.token_endpoint
334
+
335
+ # Try to get an idToken. If no supplier returns a token, we cannot use this authentication mode.
336
+ id_token = supplier.get_oidc_token(audience)
337
+ if not id_token:
338
+ return None
339
+
340
+ def token_source_for(audience: str) -> TokenSource:
341
+ id_token = supplier.get_oidc_token(audience)
342
+ if not id_token:
343
+ # Should not happen, since we checked it above.
344
+ raise Exception("Cannot get OIDC token")
345
+ params = {
346
+ "subject_token_type": "urn:ietf:params:oauth:token-type:jwt",
347
+ "subject_token": id_token,
348
+ "grant_type": "urn:ietf:params:oauth:grant-type:token-exchange",
349
+ }
350
+ return ClientCredentials(
351
+ client_id=cfg.client_id,
352
+ client_secret="", # we have no (rotatable) secrets in OIDC flow
353
+ token_url=cfg.oidc_endpoints.token_endpoint,
354
+ endpoint_params=params,
355
+ scopes=["all-apis"],
356
+ use_params=True,
357
+ disable_async=cfg.disable_async_token_refresh,
358
+ )
359
+
360
+ def refreshed_headers() -> Dict[str, str]:
361
+ token = token_source_for(audience).token()
362
+ return {"Authorization": f"{token.token_type} {token.access_token}"}
363
+
364
+ def token() -> Token:
365
+ return token_source_for(audience).token()
366
+
367
+ return OAuthCredentialsProvider(refreshed_headers, token)
368
+
369
+
317
370
  @oauth_credentials_strategy("github-oidc-azure", ["host", "azure_client_id"])
318
371
  def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
319
372
  if "ACTIONS_ID_TOKEN_REQUEST_TOKEN" not in os.environ:
@@ -325,16 +378,8 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
325
378
  if not cfg.is_azure:
326
379
  return None
327
380
 
328
- # See https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-cloud-providers
329
- headers = {"Authorization": f"Bearer {os.environ['ACTIONS_ID_TOKEN_REQUEST_TOKEN']}"}
330
- endpoint = f"{os.environ['ACTIONS_ID_TOKEN_REQUEST_URL']}&audience=api://AzureADTokenExchange"
331
- response = requests.get(endpoint, headers=headers)
332
- if not response.ok:
333
- return None
334
-
335
- # get the ID Token with aud=api://AzureADTokenExchange sub=repo:org/repo:environment:name
336
- response_json = response.json()
337
- if "value" not in response_json:
381
+ token = GitHubOIDCTokenSupplier().get_oidc_token("api://AzureADTokenExchange")
382
+ if not token:
338
383
  return None
339
384
 
340
385
  logger.info(
@@ -344,7 +389,7 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
344
389
  params = {
345
390
  "client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
346
391
  "resource": cfg.effective_azure_login_app_id,
347
- "client_assertion": response_json["value"],
392
+ "client_assertion": token,
348
393
  }
349
394
  aad_endpoint = cfg.arm_environment.active_directory_endpoint
350
395
  if not cfg.azure_tenant_id:
@@ -357,7 +402,7 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
357
402
  token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
358
403
  endpoint_params=params,
359
404
  use_params=True,
360
- disable_async=not cfg.enable_experimental_async_token_refresh,
405
+ disable_async=cfg.disable_async_token_refresh,
361
406
  )
362
407
 
363
408
  def refreshed_headers() -> Dict[str, str]:
@@ -694,7 +739,7 @@ class DatabricksCliTokenSource(CliTokenSource):
694
739
  token_type_field="token_type",
695
740
  access_token_field="access_token",
696
741
  expiry_field="expiry",
697
- disable_async=not cfg.enable_experimental_async_token_refresh,
742
+ disable_async=cfg.disable_async_token_refresh,
698
743
  )
699
744
 
700
745
  @staticmethod
@@ -927,6 +972,7 @@ class DefaultCredentials:
927
972
  basic_auth,
928
973
  metadata_service,
929
974
  oauth_service_principal,
975
+ databricks_wif,
930
976
  azure_service_principal,
931
977
  github_oidc_azure,
932
978
  azure_cli,
@@ -0,0 +1,28 @@
1
+ import os
2
+ from typing import Optional
3
+
4
+ import requests
5
+
6
+
7
+ class GitHubOIDCTokenSupplier:
8
+ """
9
+ Supplies OIDC tokens from GitHub Actions.
10
+ """
11
+
12
+ def get_oidc_token(self, audience: str) -> Optional[str]:
13
+ if "ACTIONS_ID_TOKEN_REQUEST_TOKEN" not in os.environ or "ACTIONS_ID_TOKEN_REQUEST_URL" not in os.environ:
14
+ # not in GitHub actions
15
+ return None
16
+ # See https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-cloud-providers
17
+ headers = {"Authorization": f"Bearer {os.environ['ACTIONS_ID_TOKEN_REQUEST_TOKEN']}"}
18
+ endpoint = f"{os.environ['ACTIONS_ID_TOKEN_REQUEST_URL']}&audience={audience}"
19
+ response = requests.get(endpoint, headers=headers)
20
+ if not response.ok:
21
+ return None
22
+
23
+ # get the ID Token with aud=api://AzureADTokenExchange sub=repo:org/repo:environment:name
24
+ response_json = response.json()
25
+ if "value" not in response_json:
26
+ return None
27
+
28
+ return response_json["value"]
@@ -1173,12 +1173,12 @@ class AppsAPI:
1173
1173
  attempt += 1
1174
1174
  raise TimeoutError(f"timed out after {timeout}: {status_message}")
1175
1175
 
1176
- def create(self, *, app: Optional[App] = None, no_compute: Optional[bool] = None) -> Wait[App]:
1176
+ def create(self, app: App, *, no_compute: Optional[bool] = None) -> Wait[App]:
1177
1177
  """Create an app.
1178
1178
 
1179
1179
  Creates a new app.
1180
1180
 
1181
- :param app: :class:`App` (optional)
1181
+ :param app: :class:`App`
1182
1182
  :param no_compute: bool (optional)
1183
1183
  If true, the app will not be started after creation.
1184
1184
 
@@ -1198,9 +1198,7 @@ class AppsAPI:
1198
1198
  op_response = self._api.do("POST", "/api/2.0/apps", query=query, body=body, headers=headers)
1199
1199
  return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response["name"])
1200
1200
 
1201
- def create_and_wait(
1202
- self, *, app: Optional[App] = None, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)
1203
- ) -> App:
1201
+ def create_and_wait(self, app: App, *, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)) -> App:
1204
1202
  return self.create(app=app, no_compute=no_compute).result(timeout=timeout)
1205
1203
 
1206
1204
  def delete(self, name: str) -> App:
@@ -1221,14 +1219,14 @@ class AppsAPI:
1221
1219
  res = self._api.do("DELETE", f"/api/2.0/apps/{name}", headers=headers)
1222
1220
  return App.from_dict(res)
1223
1221
 
1224
- def deploy(self, app_name: str, *, app_deployment: Optional[AppDeployment] = None) -> Wait[AppDeployment]:
1222
+ def deploy(self, app_name: str, app_deployment: AppDeployment) -> Wait[AppDeployment]:
1225
1223
  """Create an app deployment.
1226
1224
 
1227
1225
  Creates an app deployment for the app with the supplied name.
1228
1226
 
1229
1227
  :param app_name: str
1230
1228
  The name of the app.
1231
- :param app_deployment: :class:`AppDeployment` (optional)
1229
+ :param app_deployment: :class:`AppDeployment`
1232
1230
 
1233
1231
  :returns:
1234
1232
  Long-running operation waiter for :class:`AppDeployment`.
@@ -1249,7 +1247,7 @@ class AppsAPI:
1249
1247
  )
1250
1248
 
1251
1249
  def deploy_and_wait(
1252
- self, app_name: str, *, app_deployment: Optional[AppDeployment] = None, timeout=timedelta(minutes=20)
1250
+ self, app_name: str, app_deployment: AppDeployment, timeout=timedelta(minutes=20)
1253
1251
  ) -> AppDeployment:
1254
1252
  return self.deploy(app_deployment=app_deployment, app_name=app_name).result(timeout=timeout)
1255
1253
 
@@ -1466,7 +1464,7 @@ class AppsAPI:
1466
1464
  def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
1467
1465
  return self.stop(name=name).result(timeout=timeout)
1468
1466
 
1469
- def update(self, name: str, *, app: Optional[App] = None) -> App:
1467
+ def update(self, name: str, app: App) -> App:
1470
1468
  """Update an app.
1471
1469
 
1472
1470
  Updates the app with the supplied name.
@@ -1474,7 +1472,7 @@ class AppsAPI:
1474
1472
  :param name: str
1475
1473
  The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
1476
1474
  must be unique within the workspace.
1477
- :param app: :class:`App` (optional)
1475
+ :param app: :class:`App`
1478
1476
 
1479
1477
  :returns: :class:`App`
1480
1478
  """
@@ -364,6 +364,10 @@ class BudgetConfigurationFilterWorkspaceIdClause:
364
364
  class BudgetPolicy:
365
365
  """Contains the BudgetPolicy details."""
366
366
 
367
+ binding_workspace_ids: Optional[List[int]] = None
368
+ """List of workspaces that this budget policy will be exclusively bound to. An empty binding
369
+ implies that this budget policy is open to any workspace in the account."""
370
+
367
371
  custom_tags: Optional[List[compute.CustomPolicyTag]] = None
368
372
  """A list of tags defined by the customer. At most 20 entries are allowed per policy."""
369
373
 
@@ -378,6 +382,8 @@ class BudgetPolicy:
378
382
  def as_dict(self) -> dict:
379
383
  """Serializes the BudgetPolicy into a dictionary suitable for use as a JSON request body."""
380
384
  body = {}
385
+ if self.binding_workspace_ids:
386
+ body["binding_workspace_ids"] = [v for v in self.binding_workspace_ids]
381
387
  if self.custom_tags:
382
388
  body["custom_tags"] = [v.as_dict() for v in self.custom_tags]
383
389
  if self.policy_id is not None:
@@ -389,6 +395,8 @@ class BudgetPolicy:
389
395
  def as_shallow_dict(self) -> dict:
390
396
  """Serializes the BudgetPolicy into a shallow dictionary of its immediate attributes."""
391
397
  body = {}
398
+ if self.binding_workspace_ids:
399
+ body["binding_workspace_ids"] = self.binding_workspace_ids
392
400
  if self.custom_tags:
393
401
  body["custom_tags"] = self.custom_tags
394
402
  if self.policy_id is not None:
@@ -401,6 +409,7 @@ class BudgetPolicy:
401
409
  def from_dict(cls, d: Dict[str, Any]) -> BudgetPolicy:
402
410
  """Deserializes the BudgetPolicy from a dictionary."""
403
411
  return cls(
412
+ binding_workspace_ids=d.get("binding_workspace_ids", None),
404
413
  custom_tags=_repeated_dict(d, "custom_tags", compute.CustomPolicyTag),
405
414
  policy_id=d.get("policy_id", None),
406
415
  policy_name=d.get("policy_name", None),
@@ -1864,7 +1873,7 @@ class BudgetPolicyAPI:
1864
1873
  query["page_token"] = json["next_page_token"]
1865
1874
 
1866
1875
  def update(
1867
- self, policy_id: str, *, limit_config: Optional[LimitConfig] = None, policy: Optional[BudgetPolicy] = None
1876
+ self, policy_id: str, policy: BudgetPolicy, *, limit_config: Optional[LimitConfig] = None
1868
1877
  ) -> BudgetPolicy:
1869
1878
  """Update a budget policy.
1870
1879
 
@@ -1872,10 +1881,10 @@ class BudgetPolicyAPI:
1872
1881
 
1873
1882
  :param policy_id: str
1874
1883
  The Id of the policy. This field is generated by Databricks and globally unique.
1884
+ :param policy: :class:`BudgetPolicy`
1885
+ Contains the BudgetPolicy details.
1875
1886
  :param limit_config: :class:`LimitConfig` (optional)
1876
1887
  DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy
1877
- :param policy: :class:`BudgetPolicy` (optional)
1878
- Contains the BudgetPolicy details.
1879
1888
 
1880
1889
  :returns: :class:`BudgetPolicy`
1881
1890
  """
@@ -1271,6 +1271,8 @@ class ColumnTypeName(Enum):
1271
1271
  DECIMAL = "DECIMAL"
1272
1272
  DOUBLE = "DOUBLE"
1273
1273
  FLOAT = "FLOAT"
1274
+ GEOGRAPHY = "GEOGRAPHY"
1275
+ GEOMETRY = "GEOMETRY"
1274
1276
  INT = "INT"
1275
1277
  INTERVAL = "INTERVAL"
1276
1278
  LONG = "LONG"
@@ -2592,6 +2594,11 @@ class CreateVolumeRequestContent:
2592
2594
  """The name of the volume"""
2593
2595
 
2594
2596
  volume_type: VolumeType
2597
+ """The type of the volume. An external volume is located in the specified external location. A
2598
+ managed volume is located in the default location which is specified by the parent schema, or
2599
+ the parent catalog, or the Metastore. [Learn more]
2600
+
2601
+ [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external"""
2595
2602
 
2596
2603
  comment: Optional[str] = None
2597
2604
  """The comment attached to the volume"""
@@ -6668,6 +6675,7 @@ class Privilege(Enum):
6668
6675
  BROWSE = "BROWSE"
6669
6676
  CREATE = "CREATE"
6670
6677
  CREATE_CATALOG = "CREATE_CATALOG"
6678
+ CREATE_CLEAN_ROOM = "CREATE_CLEAN_ROOM"
6671
6679
  CREATE_CONNECTION = "CREATE_CONNECTION"
6672
6680
  CREATE_EXTERNAL_LOCATION = "CREATE_EXTERNAL_LOCATION"
6673
6681
  CREATE_EXTERNAL_TABLE = "CREATE_EXTERNAL_TABLE"
@@ -6688,9 +6696,11 @@ class Privilege(Enum):
6688
6696
  CREATE_VIEW = "CREATE_VIEW"
6689
6697
  CREATE_VOLUME = "CREATE_VOLUME"
6690
6698
  EXECUTE = "EXECUTE"
6699
+ EXECUTE_CLEAN_ROOM_TASK = "EXECUTE_CLEAN_ROOM_TASK"
6691
6700
  MANAGE = "MANAGE"
6692
6701
  MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST"
6693
6702
  MODIFY = "MODIFY"
6703
+ MODIFY_CLEAN_ROOM = "MODIFY_CLEAN_ROOM"
6694
6704
  READ_FILES = "READ_FILES"
6695
6705
  READ_PRIVATE_FILES = "READ_PRIVATE_FILES"
6696
6706
  READ_VOLUME = "READ_VOLUME"
@@ -7353,6 +7363,15 @@ class SetArtifactAllowlist:
7353
7363
  artifact_type: Optional[ArtifactType] = None
7354
7364
  """The artifact type of the allowlist."""
7355
7365
 
7366
+ created_at: Optional[int] = None
7367
+ """Time at which this artifact allowlist was set, in epoch milliseconds."""
7368
+
7369
+ created_by: Optional[str] = None
7370
+ """Username of the user who set the artifact allowlist."""
7371
+
7372
+ metastore_id: Optional[str] = None
7373
+ """Unique identifier of parent metastore."""
7374
+
7356
7375
  def as_dict(self) -> dict:
7357
7376
  """Serializes the SetArtifactAllowlist into a dictionary suitable for use as a JSON request body."""
7358
7377
  body = {}
@@ -7360,6 +7379,12 @@ class SetArtifactAllowlist:
7360
7379
  body["artifact_matchers"] = [v.as_dict() for v in self.artifact_matchers]
7361
7380
  if self.artifact_type is not None:
7362
7381
  body["artifact_type"] = self.artifact_type.value
7382
+ if self.created_at is not None:
7383
+ body["created_at"] = self.created_at
7384
+ if self.created_by is not None:
7385
+ body["created_by"] = self.created_by
7386
+ if self.metastore_id is not None:
7387
+ body["metastore_id"] = self.metastore_id
7363
7388
  return body
7364
7389
 
7365
7390
  def as_shallow_dict(self) -> dict:
@@ -7369,6 +7394,12 @@ class SetArtifactAllowlist:
7369
7394
  body["artifact_matchers"] = self.artifact_matchers
7370
7395
  if self.artifact_type is not None:
7371
7396
  body["artifact_type"] = self.artifact_type
7397
+ if self.created_at is not None:
7398
+ body["created_at"] = self.created_at
7399
+ if self.created_by is not None:
7400
+ body["created_by"] = self.created_by
7401
+ if self.metastore_id is not None:
7402
+ body["metastore_id"] = self.metastore_id
7372
7403
  return body
7373
7404
 
7374
7405
  @classmethod
@@ -7377,6 +7408,9 @@ class SetArtifactAllowlist:
7377
7408
  return cls(
7378
7409
  artifact_matchers=_repeated_dict(d, "artifact_matchers", ArtifactMatcher),
7379
7410
  artifact_type=_enum(d, "artifact_type", ArtifactType),
7411
+ created_at=d.get("created_at", None),
7412
+ created_by=d.get("created_by", None),
7413
+ metastore_id=d.get("metastore_id", None),
7380
7414
  )
7381
7415
 
7382
7416
 
@@ -9835,6 +9869,11 @@ class VolumeInfo:
9835
9869
  """The unique identifier of the volume"""
9836
9870
 
9837
9871
  volume_type: Optional[VolumeType] = None
9872
+ """The type of the volume. An external volume is located in the specified external location. A
9873
+ managed volume is located in the default location which is specified by the parent schema, or
9874
+ the parent catalog, or the Metastore. [Learn more]
9875
+
9876
+ [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external"""
9838
9877
 
9839
9878
  def as_dict(self) -> dict:
9840
9879
  """Serializes the VolumeInfo into a dictionary suitable for use as a JSON request body."""
@@ -9939,6 +9978,11 @@ class VolumeInfo:
9939
9978
 
9940
9979
 
9941
9980
  class VolumeType(Enum):
9981
+ """The type of the volume. An external volume is located in the specified external location. A
9982
+ managed volume is located in the default location which is specified by the parent schema, or
9983
+ the parent catalog, or the Metastore. [Learn more]
9984
+
9985
+ [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external"""
9942
9986
 
9943
9987
  EXTERNAL = "EXTERNAL"
9944
9988
  MANAGED = "MANAGED"
@@ -10450,7 +10494,15 @@ class ArtifactAllowlistsAPI:
10450
10494
  res = self._api.do("GET", f"/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}", headers=headers)
10451
10495
  return ArtifactAllowlistInfo.from_dict(res)
10452
10496
 
10453
- def update(self, artifact_type: ArtifactType, artifact_matchers: List[ArtifactMatcher]) -> ArtifactAllowlistInfo:
10497
+ def update(
10498
+ self,
10499
+ artifact_type: ArtifactType,
10500
+ artifact_matchers: List[ArtifactMatcher],
10501
+ *,
10502
+ created_at: Optional[int] = None,
10503
+ created_by: Optional[str] = None,
10504
+ metastore_id: Optional[str] = None,
10505
+ ) -> ArtifactAllowlistInfo:
10454
10506
  """Set an artifact allowlist.
10455
10507
 
10456
10508
  Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with
@@ -10461,12 +10513,24 @@ class ArtifactAllowlistsAPI:
10461
10513
  The artifact type of the allowlist.
10462
10514
  :param artifact_matchers: List[:class:`ArtifactMatcher`]
10463
10515
  A list of allowed artifact match patterns.
10516
+ :param created_at: int (optional)
10517
+ Time at which this artifact allowlist was set, in epoch milliseconds.
10518
+ :param created_by: str (optional)
10519
+ Username of the user who set the artifact allowlist.
10520
+ :param metastore_id: str (optional)
10521
+ Unique identifier of parent metastore.
10464
10522
 
10465
10523
  :returns: :class:`ArtifactAllowlistInfo`
10466
10524
  """
10467
10525
  body = {}
10468
10526
  if artifact_matchers is not None:
10469
10527
  body["artifact_matchers"] = [v.as_dict() for v in artifact_matchers]
10528
+ if created_at is not None:
10529
+ body["created_at"] = created_at
10530
+ if created_by is not None:
10531
+ body["created_by"] = created_by
10532
+ if metastore_id is not None:
10533
+ body["metastore_id"] = metastore_id
10470
10534
  headers = {
10471
10535
  "Accept": "application/json",
10472
10536
  "Content-Type": "application/json",
@@ -12337,12 +12401,12 @@ class OnlineTablesAPI:
12337
12401
  attempt += 1
12338
12402
  raise TimeoutError(f"timed out after {timeout}: {status_message}")
12339
12403
 
12340
- def create(self, *, table: Optional[OnlineTable] = None) -> Wait[OnlineTable]:
12404
+ def create(self, table: OnlineTable) -> Wait[OnlineTable]:
12341
12405
  """Create an Online Table.
12342
12406
 
12343
12407
  Create a new Online Table.
12344
12408
 
12345
- :param table: :class:`OnlineTable` (optional)
12409
+ :param table: :class:`OnlineTable`
12346
12410
  Online Table information.
12347
12411
 
12348
12412
  :returns:
@@ -12360,7 +12424,7 @@ class OnlineTablesAPI:
12360
12424
  self.wait_get_online_table_active, response=OnlineTable.from_dict(op_response), name=op_response["name"]
12361
12425
  )
12362
12426
 
12363
- def create_and_wait(self, *, table: Optional[OnlineTable] = None, timeout=timedelta(minutes=20)) -> OnlineTable:
12427
+ def create_and_wait(self, table: OnlineTable, timeout=timedelta(minutes=20)) -> OnlineTable:
12364
12428
  return self.create(table=table).result(timeout=timeout)
12365
12429
 
12366
12430
  def delete(self, name: str):
@@ -14270,6 +14334,11 @@ class VolumesAPI:
14270
14334
  :param name: str
14271
14335
  The name of the volume
14272
14336
  :param volume_type: :class:`VolumeType`
14337
+ The type of the volume. An external volume is located in the specified external location. A managed
14338
+ volume is located in the default location which is specified by the parent schema, or the parent
14339
+ catalog, or the Metastore. [Learn more]
14340
+
14341
+ [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external
14273
14342
  :param comment: str (optional)
14274
14343
  The comment attached to the volume
14275
14344
  :param storage_location: str (optional)