databricks-sdk 0.50.0__py3-none-any.whl → 0.52.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +15 -5
- databricks/sdk/config.py +2 -3
- databricks/sdk/credentials_provider.py +61 -15
- databricks/sdk/oidc_token_supplier.py +28 -0
- databricks/sdk/service/apps.py +8 -10
- databricks/sdk/service/billing.py +3 -3
- databricks/sdk/service/catalog.py +51 -4
- databricks/sdk/service/cleanrooms.py +9 -14
- databricks/sdk/service/compute.py +138 -6
- databricks/sdk/service/dashboards.py +24 -29
- databricks/sdk/service/files.py +2 -1
- databricks/sdk/service/jobs.py +73 -18
- databricks/sdk/service/ml.py +19 -2
- databricks/sdk/service/oauth2.py +8 -13
- databricks/sdk/service/pipelines.py +55 -27
- databricks/sdk/service/serving.py +11 -14
- databricks/sdk/service/settings.py +214 -125
- databricks/sdk/service/sql.py +744 -6
- databricks/sdk/service/vectorsearch.py +355 -159
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.50.0.dist-info → databricks_sdk-0.52.0.dist-info}/METADATA +11 -11
- {databricks_sdk-0.50.0.dist-info → databricks_sdk-0.52.0.dist-info}/RECORD +26 -25
- {databricks_sdk-0.50.0.dist-info → databricks_sdk-0.52.0.dist-info}/WHEEL +1 -1
- {databricks_sdk-0.50.0.dist-info → databricks_sdk-0.52.0.dist-info}/licenses/LICENSE +0 -0
- {databricks_sdk-0.50.0.dist-info → databricks_sdk-0.52.0.dist-info}/licenses/NOTICE +0 -0
- {databricks_sdk-0.50.0.dist-info → databricks_sdk-0.52.0.dist-info}/top_level.txt +0 -0
databricks/sdk/__init__.py
CHANGED
|
@@ -97,10 +97,10 @@ from databricks.sdk.service.sharing import (ProvidersAPI,
|
|
|
97
97
|
RecipientActivationAPI,
|
|
98
98
|
RecipientsAPI, SharesAPI)
|
|
99
99
|
from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI,
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
QueryHistoryAPI,
|
|
100
|
+
AlertsV2API, DashboardsAPI,
|
|
101
|
+
DashboardWidgetsAPI, DataSourcesAPI,
|
|
102
|
+
DbsqlPermissionsAPI, QueriesAPI,
|
|
103
|
+
QueriesLegacyAPI, QueryHistoryAPI,
|
|
104
104
|
QueryVisualizationsAPI,
|
|
105
105
|
QueryVisualizationsLegacyAPI,
|
|
106
106
|
RedashConfigAPI, StatementExecutionAPI,
|
|
@@ -170,6 +170,7 @@ class WorkspaceClient:
|
|
|
170
170
|
product_version="0.0.0",
|
|
171
171
|
credentials_strategy: Optional[CredentialsStrategy] = None,
|
|
172
172
|
credentials_provider: Optional[CredentialsStrategy] = None,
|
|
173
|
+
token_audience: Optional[str] = None,
|
|
173
174
|
config: Optional[client.Config] = None,
|
|
174
175
|
):
|
|
175
176
|
if not config:
|
|
@@ -198,6 +199,7 @@ class WorkspaceClient:
|
|
|
198
199
|
debug_headers=debug_headers,
|
|
199
200
|
product=product,
|
|
200
201
|
product_version=product_version,
|
|
202
|
+
token_audience=token_audience,
|
|
201
203
|
)
|
|
202
204
|
self._config = config.copy()
|
|
203
205
|
self._dbutils = _make_dbutils(self._config)
|
|
@@ -207,6 +209,7 @@ class WorkspaceClient:
|
|
|
207
209
|
self._account_access_control_proxy = service.iam.AccountAccessControlProxyAPI(self._api_client)
|
|
208
210
|
self._alerts = service.sql.AlertsAPI(self._api_client)
|
|
209
211
|
self._alerts_legacy = service.sql.AlertsLegacyAPI(self._api_client)
|
|
212
|
+
self._alerts_v2 = service.sql.AlertsV2API(self._api_client)
|
|
210
213
|
self._apps = service.apps.AppsAPI(self._api_client)
|
|
211
214
|
self._artifact_allowlists = service.catalog.ArtifactAllowlistsAPI(self._api_client)
|
|
212
215
|
self._catalogs = service.catalog.CatalogsAPI(self._api_client)
|
|
@@ -289,7 +292,7 @@ class WorkspaceClient:
|
|
|
289
292
|
self._service_principals = service.iam.ServicePrincipalsAPI(self._api_client)
|
|
290
293
|
self._serving_endpoints = serving_endpoints
|
|
291
294
|
serving_endpoints_data_plane_token_source = DataPlaneTokenSource(
|
|
292
|
-
self._config.host, self._config.oauth_token,
|
|
295
|
+
self._config.host, self._config.oauth_token, self._config.disable_async_token_refresh
|
|
293
296
|
)
|
|
294
297
|
self._serving_endpoints_data_plane = service.serving.ServingEndpointsDataPlaneAPI(
|
|
295
298
|
self._api_client, serving_endpoints, serving_endpoints_data_plane_token_source
|
|
@@ -346,6 +349,11 @@ class WorkspaceClient:
|
|
|
346
349
|
"""The alerts API can be used to perform CRUD operations on alerts."""
|
|
347
350
|
return self._alerts_legacy
|
|
348
351
|
|
|
352
|
+
@property
|
|
353
|
+
def alerts_v2(self) -> service.sql.AlertsV2API:
|
|
354
|
+
"""TODO: Add description."""
|
|
355
|
+
return self._alerts_v2
|
|
356
|
+
|
|
349
357
|
@property
|
|
350
358
|
def apps(self) -> service.apps.AppsAPI:
|
|
351
359
|
"""Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
|
|
@@ -862,6 +870,7 @@ class AccountClient:
|
|
|
862
870
|
product_version="0.0.0",
|
|
863
871
|
credentials_strategy: Optional[CredentialsStrategy] = None,
|
|
864
872
|
credentials_provider: Optional[CredentialsStrategy] = None,
|
|
873
|
+
token_audience: Optional[str] = None,
|
|
865
874
|
config: Optional[client.Config] = None,
|
|
866
875
|
):
|
|
867
876
|
if not config:
|
|
@@ -890,6 +899,7 @@ class AccountClient:
|
|
|
890
899
|
debug_headers=debug_headers,
|
|
891
900
|
product=product,
|
|
892
901
|
product_version=product_version,
|
|
902
|
+
token_audience=token_audience,
|
|
893
903
|
)
|
|
894
904
|
self._config = config.copy()
|
|
895
905
|
self._api_client = client.ApiClient(self._config)
|
databricks/sdk/config.py
CHANGED
|
@@ -61,6 +61,7 @@ class Config:
|
|
|
61
61
|
host: str = ConfigAttribute(env="DATABRICKS_HOST")
|
|
62
62
|
account_id: str = ConfigAttribute(env="DATABRICKS_ACCOUNT_ID")
|
|
63
63
|
token: str = ConfigAttribute(env="DATABRICKS_TOKEN", auth="pat", sensitive=True)
|
|
64
|
+
token_audience: str = ConfigAttribute(env="DATABRICKS_TOKEN_AUDIENCE", auth="github-oidc")
|
|
64
65
|
username: str = ConfigAttribute(env="DATABRICKS_USERNAME", auth="basic")
|
|
65
66
|
password: str = ConfigAttribute(env="DATABRICKS_PASSWORD", auth="basic", sensitive=True)
|
|
66
67
|
client_id: str = ConfigAttribute(env="DATABRICKS_CLIENT_ID", auth="oauth")
|
|
@@ -95,9 +96,7 @@ class Config:
|
|
|
95
96
|
max_connections_per_pool: int = ConfigAttribute()
|
|
96
97
|
databricks_environment: Optional[DatabricksEnvironment] = None
|
|
97
98
|
|
|
98
|
-
|
|
99
|
-
env="DATABRICKS_ENABLE_EXPERIMENTAL_ASYNC_TOKEN_REFRESH"
|
|
100
|
-
)
|
|
99
|
+
disable_async_token_refresh: bool = ConfigAttribute(env="DATABRICKS_DISABLE_ASYNC_TOKEN_REFRESH")
|
|
101
100
|
|
|
102
101
|
enable_experimental_files_api_client: bool = ConfigAttribute(env="DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT")
|
|
103
102
|
files_api_client_download_max_total_recovers = None
|
|
@@ -23,6 +23,7 @@ from google.oauth2 import service_account # type: ignore
|
|
|
23
23
|
from .azure import add_sp_management_token, add_workspace_id_header
|
|
24
24
|
from .oauth import (ClientCredentials, OAuthClient, Refreshable, Token,
|
|
25
25
|
TokenCache, TokenSource)
|
|
26
|
+
from .oidc_token_supplier import GitHubOIDCTokenSupplier
|
|
26
27
|
|
|
27
28
|
CredentialsProvider = Callable[[], Dict[str, str]]
|
|
28
29
|
|
|
@@ -191,7 +192,7 @@ def oauth_service_principal(cfg: "Config") -> Optional[CredentialsProvider]:
|
|
|
191
192
|
token_url=oidc.token_endpoint,
|
|
192
193
|
scopes=["all-apis"],
|
|
193
194
|
use_header=True,
|
|
194
|
-
disable_async=
|
|
195
|
+
disable_async=cfg.disable_async_token_refresh,
|
|
195
196
|
)
|
|
196
197
|
|
|
197
198
|
def inner() -> Dict[str, str]:
|
|
@@ -291,7 +292,7 @@ def azure_service_principal(cfg: "Config") -> CredentialsProvider:
|
|
|
291
292
|
token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
|
|
292
293
|
endpoint_params={"resource": resource},
|
|
293
294
|
use_params=True,
|
|
294
|
-
disable_async=
|
|
295
|
+
disable_async=cfg.disable_async_token_refresh,
|
|
295
296
|
)
|
|
296
297
|
|
|
297
298
|
_ensure_host_present(cfg, token_source_for)
|
|
@@ -314,6 +315,58 @@ def azure_service_principal(cfg: "Config") -> CredentialsProvider:
|
|
|
314
315
|
return OAuthCredentialsProvider(refreshed_headers, token)
|
|
315
316
|
|
|
316
317
|
|
|
318
|
+
@oauth_credentials_strategy("github-oidc", ["host", "client_id"])
|
|
319
|
+
def databricks_wif(cfg: "Config") -> Optional[CredentialsProvider]:
|
|
320
|
+
"""
|
|
321
|
+
DatabricksWIFCredentials uses a Token Supplier to get a JWT Token and exchanges
|
|
322
|
+
it for a Databricks Token.
|
|
323
|
+
|
|
324
|
+
Supported suppliers:
|
|
325
|
+
- GitHub OIDC
|
|
326
|
+
"""
|
|
327
|
+
supplier = GitHubOIDCTokenSupplier()
|
|
328
|
+
|
|
329
|
+
audience = cfg.token_audience
|
|
330
|
+
if audience is None and cfg.is_account_client:
|
|
331
|
+
audience = cfg.account_id
|
|
332
|
+
if audience is None and not cfg.is_account_client:
|
|
333
|
+
audience = cfg.oidc_endpoints.token_endpoint
|
|
334
|
+
|
|
335
|
+
# Try to get an idToken. If no supplier returns a token, we cannot use this authentication mode.
|
|
336
|
+
id_token = supplier.get_oidc_token(audience)
|
|
337
|
+
if not id_token:
|
|
338
|
+
return None
|
|
339
|
+
|
|
340
|
+
def token_source_for(audience: str) -> TokenSource:
|
|
341
|
+
id_token = supplier.get_oidc_token(audience)
|
|
342
|
+
if not id_token:
|
|
343
|
+
# Should not happen, since we checked it above.
|
|
344
|
+
raise Exception("Cannot get OIDC token")
|
|
345
|
+
params = {
|
|
346
|
+
"subject_token_type": "urn:ietf:params:oauth:token-type:jwt",
|
|
347
|
+
"subject_token": id_token,
|
|
348
|
+
"grant_type": "urn:ietf:params:oauth:grant-type:token-exchange",
|
|
349
|
+
}
|
|
350
|
+
return ClientCredentials(
|
|
351
|
+
client_id=cfg.client_id,
|
|
352
|
+
client_secret="", # we have no (rotatable) secrets in OIDC flow
|
|
353
|
+
token_url=cfg.oidc_endpoints.token_endpoint,
|
|
354
|
+
endpoint_params=params,
|
|
355
|
+
scopes=["all-apis"],
|
|
356
|
+
use_params=True,
|
|
357
|
+
disable_async=cfg.disable_async_token_refresh,
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
def refreshed_headers() -> Dict[str, str]:
|
|
361
|
+
token = token_source_for(audience).token()
|
|
362
|
+
return {"Authorization": f"{token.token_type} {token.access_token}"}
|
|
363
|
+
|
|
364
|
+
def token() -> Token:
|
|
365
|
+
return token_source_for(audience).token()
|
|
366
|
+
|
|
367
|
+
return OAuthCredentialsProvider(refreshed_headers, token)
|
|
368
|
+
|
|
369
|
+
|
|
317
370
|
@oauth_credentials_strategy("github-oidc-azure", ["host", "azure_client_id"])
|
|
318
371
|
def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
|
|
319
372
|
if "ACTIONS_ID_TOKEN_REQUEST_TOKEN" not in os.environ:
|
|
@@ -325,16 +378,8 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
|
|
|
325
378
|
if not cfg.is_azure:
|
|
326
379
|
return None
|
|
327
380
|
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
endpoint = f"{os.environ['ACTIONS_ID_TOKEN_REQUEST_URL']}&audience=api://AzureADTokenExchange"
|
|
331
|
-
response = requests.get(endpoint, headers=headers)
|
|
332
|
-
if not response.ok:
|
|
333
|
-
return None
|
|
334
|
-
|
|
335
|
-
# get the ID Token with aud=api://AzureADTokenExchange sub=repo:org/repo:environment:name
|
|
336
|
-
response_json = response.json()
|
|
337
|
-
if "value" not in response_json:
|
|
381
|
+
token = GitHubOIDCTokenSupplier().get_oidc_token("api://AzureADTokenExchange")
|
|
382
|
+
if not token:
|
|
338
383
|
return None
|
|
339
384
|
|
|
340
385
|
logger.info(
|
|
@@ -344,7 +389,7 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
|
|
|
344
389
|
params = {
|
|
345
390
|
"client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
|
|
346
391
|
"resource": cfg.effective_azure_login_app_id,
|
|
347
|
-
"client_assertion":
|
|
392
|
+
"client_assertion": token,
|
|
348
393
|
}
|
|
349
394
|
aad_endpoint = cfg.arm_environment.active_directory_endpoint
|
|
350
395
|
if not cfg.azure_tenant_id:
|
|
@@ -357,7 +402,7 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
|
|
|
357
402
|
token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
|
|
358
403
|
endpoint_params=params,
|
|
359
404
|
use_params=True,
|
|
360
|
-
disable_async=
|
|
405
|
+
disable_async=cfg.disable_async_token_refresh,
|
|
361
406
|
)
|
|
362
407
|
|
|
363
408
|
def refreshed_headers() -> Dict[str, str]:
|
|
@@ -694,7 +739,7 @@ class DatabricksCliTokenSource(CliTokenSource):
|
|
|
694
739
|
token_type_field="token_type",
|
|
695
740
|
access_token_field="access_token",
|
|
696
741
|
expiry_field="expiry",
|
|
697
|
-
disable_async=
|
|
742
|
+
disable_async=cfg.disable_async_token_refresh,
|
|
698
743
|
)
|
|
699
744
|
|
|
700
745
|
@staticmethod
|
|
@@ -927,6 +972,7 @@ class DefaultCredentials:
|
|
|
927
972
|
basic_auth,
|
|
928
973
|
metadata_service,
|
|
929
974
|
oauth_service_principal,
|
|
975
|
+
databricks_wif,
|
|
930
976
|
azure_service_principal,
|
|
931
977
|
github_oidc_azure,
|
|
932
978
|
azure_cli,
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
import requests
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class GitHubOIDCTokenSupplier:
|
|
8
|
+
"""
|
|
9
|
+
Supplies OIDC tokens from GitHub Actions.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
def get_oidc_token(self, audience: str) -> Optional[str]:
|
|
13
|
+
if "ACTIONS_ID_TOKEN_REQUEST_TOKEN" not in os.environ or "ACTIONS_ID_TOKEN_REQUEST_URL" not in os.environ:
|
|
14
|
+
# not in GitHub actions
|
|
15
|
+
return None
|
|
16
|
+
# See https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-cloud-providers
|
|
17
|
+
headers = {"Authorization": f"Bearer {os.environ['ACTIONS_ID_TOKEN_REQUEST_TOKEN']}"}
|
|
18
|
+
endpoint = f"{os.environ['ACTIONS_ID_TOKEN_REQUEST_URL']}&audience={audience}"
|
|
19
|
+
response = requests.get(endpoint, headers=headers)
|
|
20
|
+
if not response.ok:
|
|
21
|
+
return None
|
|
22
|
+
|
|
23
|
+
# get the ID Token with aud=api://AzureADTokenExchange sub=repo:org/repo:environment:name
|
|
24
|
+
response_json = response.json()
|
|
25
|
+
if "value" not in response_json:
|
|
26
|
+
return None
|
|
27
|
+
|
|
28
|
+
return response_json["value"]
|
databricks/sdk/service/apps.py
CHANGED
|
@@ -1173,12 +1173,12 @@ class AppsAPI:
|
|
|
1173
1173
|
attempt += 1
|
|
1174
1174
|
raise TimeoutError(f"timed out after {timeout}: {status_message}")
|
|
1175
1175
|
|
|
1176
|
-
def create(self,
|
|
1176
|
+
def create(self, app: App, *, no_compute: Optional[bool] = None) -> Wait[App]:
|
|
1177
1177
|
"""Create an app.
|
|
1178
1178
|
|
|
1179
1179
|
Creates a new app.
|
|
1180
1180
|
|
|
1181
|
-
:param app: :class:`App`
|
|
1181
|
+
:param app: :class:`App`
|
|
1182
1182
|
:param no_compute: bool (optional)
|
|
1183
1183
|
If true, the app will not be started after creation.
|
|
1184
1184
|
|
|
@@ -1198,9 +1198,7 @@ class AppsAPI:
|
|
|
1198
1198
|
op_response = self._api.do("POST", "/api/2.0/apps", query=query, body=body, headers=headers)
|
|
1199
1199
|
return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response["name"])
|
|
1200
1200
|
|
|
1201
|
-
def create_and_wait(
|
|
1202
|
-
self, *, app: Optional[App] = None, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)
|
|
1203
|
-
) -> App:
|
|
1201
|
+
def create_and_wait(self, app: App, *, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)) -> App:
|
|
1204
1202
|
return self.create(app=app, no_compute=no_compute).result(timeout=timeout)
|
|
1205
1203
|
|
|
1206
1204
|
def delete(self, name: str) -> App:
|
|
@@ -1221,14 +1219,14 @@ class AppsAPI:
|
|
|
1221
1219
|
res = self._api.do("DELETE", f"/api/2.0/apps/{name}", headers=headers)
|
|
1222
1220
|
return App.from_dict(res)
|
|
1223
1221
|
|
|
1224
|
-
def deploy(self, app_name: str,
|
|
1222
|
+
def deploy(self, app_name: str, app_deployment: AppDeployment) -> Wait[AppDeployment]:
|
|
1225
1223
|
"""Create an app deployment.
|
|
1226
1224
|
|
|
1227
1225
|
Creates an app deployment for the app with the supplied name.
|
|
1228
1226
|
|
|
1229
1227
|
:param app_name: str
|
|
1230
1228
|
The name of the app.
|
|
1231
|
-
:param app_deployment: :class:`AppDeployment`
|
|
1229
|
+
:param app_deployment: :class:`AppDeployment`
|
|
1232
1230
|
|
|
1233
1231
|
:returns:
|
|
1234
1232
|
Long-running operation waiter for :class:`AppDeployment`.
|
|
@@ -1249,7 +1247,7 @@ class AppsAPI:
|
|
|
1249
1247
|
)
|
|
1250
1248
|
|
|
1251
1249
|
def deploy_and_wait(
|
|
1252
|
-
self, app_name: str,
|
|
1250
|
+
self, app_name: str, app_deployment: AppDeployment, timeout=timedelta(minutes=20)
|
|
1253
1251
|
) -> AppDeployment:
|
|
1254
1252
|
return self.deploy(app_deployment=app_deployment, app_name=app_name).result(timeout=timeout)
|
|
1255
1253
|
|
|
@@ -1466,7 +1464,7 @@ class AppsAPI:
|
|
|
1466
1464
|
def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
|
|
1467
1465
|
return self.stop(name=name).result(timeout=timeout)
|
|
1468
1466
|
|
|
1469
|
-
def update(self, name: str,
|
|
1467
|
+
def update(self, name: str, app: App) -> App:
|
|
1470
1468
|
"""Update an app.
|
|
1471
1469
|
|
|
1472
1470
|
Updates the app with the supplied name.
|
|
@@ -1474,7 +1472,7 @@ class AppsAPI:
|
|
|
1474
1472
|
:param name: str
|
|
1475
1473
|
The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
|
|
1476
1474
|
must be unique within the workspace.
|
|
1477
|
-
:param app: :class:`App`
|
|
1475
|
+
:param app: :class:`App`
|
|
1478
1476
|
|
|
1479
1477
|
:returns: :class:`App`
|
|
1480
1478
|
"""
|
|
@@ -1873,7 +1873,7 @@ class BudgetPolicyAPI:
|
|
|
1873
1873
|
query["page_token"] = json["next_page_token"]
|
|
1874
1874
|
|
|
1875
1875
|
def update(
|
|
1876
|
-
self, policy_id: str, *, limit_config: Optional[LimitConfig] = None
|
|
1876
|
+
self, policy_id: str, policy: BudgetPolicy, *, limit_config: Optional[LimitConfig] = None
|
|
1877
1877
|
) -> BudgetPolicy:
|
|
1878
1878
|
"""Update a budget policy.
|
|
1879
1879
|
|
|
@@ -1881,10 +1881,10 @@ class BudgetPolicyAPI:
|
|
|
1881
1881
|
|
|
1882
1882
|
:param policy_id: str
|
|
1883
1883
|
The Id of the policy. This field is generated by Databricks and globally unique.
|
|
1884
|
+
:param policy: :class:`BudgetPolicy`
|
|
1885
|
+
Contains the BudgetPolicy details.
|
|
1884
1886
|
:param limit_config: :class:`LimitConfig` (optional)
|
|
1885
1887
|
DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy
|
|
1886
|
-
:param policy: :class:`BudgetPolicy` (optional)
|
|
1887
|
-
Contains the BudgetPolicy details.
|
|
1888
1888
|
|
|
1889
1889
|
:returns: :class:`BudgetPolicy`
|
|
1890
1890
|
"""
|
|
@@ -6675,6 +6675,7 @@ class Privilege(Enum):
|
|
|
6675
6675
|
BROWSE = "BROWSE"
|
|
6676
6676
|
CREATE = "CREATE"
|
|
6677
6677
|
CREATE_CATALOG = "CREATE_CATALOG"
|
|
6678
|
+
CREATE_CLEAN_ROOM = "CREATE_CLEAN_ROOM"
|
|
6678
6679
|
CREATE_CONNECTION = "CREATE_CONNECTION"
|
|
6679
6680
|
CREATE_EXTERNAL_LOCATION = "CREATE_EXTERNAL_LOCATION"
|
|
6680
6681
|
CREATE_EXTERNAL_TABLE = "CREATE_EXTERNAL_TABLE"
|
|
@@ -6695,9 +6696,11 @@ class Privilege(Enum):
|
|
|
6695
6696
|
CREATE_VIEW = "CREATE_VIEW"
|
|
6696
6697
|
CREATE_VOLUME = "CREATE_VOLUME"
|
|
6697
6698
|
EXECUTE = "EXECUTE"
|
|
6699
|
+
EXECUTE_CLEAN_ROOM_TASK = "EXECUTE_CLEAN_ROOM_TASK"
|
|
6698
6700
|
MANAGE = "MANAGE"
|
|
6699
6701
|
MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST"
|
|
6700
6702
|
MODIFY = "MODIFY"
|
|
6703
|
+
MODIFY_CLEAN_ROOM = "MODIFY_CLEAN_ROOM"
|
|
6701
6704
|
READ_FILES = "READ_FILES"
|
|
6702
6705
|
READ_PRIVATE_FILES = "READ_PRIVATE_FILES"
|
|
6703
6706
|
READ_VOLUME = "READ_VOLUME"
|
|
@@ -7360,6 +7363,15 @@ class SetArtifactAllowlist:
|
|
|
7360
7363
|
artifact_type: Optional[ArtifactType] = None
|
|
7361
7364
|
"""The artifact type of the allowlist."""
|
|
7362
7365
|
|
|
7366
|
+
created_at: Optional[int] = None
|
|
7367
|
+
"""Time at which this artifact allowlist was set, in epoch milliseconds."""
|
|
7368
|
+
|
|
7369
|
+
created_by: Optional[str] = None
|
|
7370
|
+
"""Username of the user who set the artifact allowlist."""
|
|
7371
|
+
|
|
7372
|
+
metastore_id: Optional[str] = None
|
|
7373
|
+
"""Unique identifier of parent metastore."""
|
|
7374
|
+
|
|
7363
7375
|
def as_dict(self) -> dict:
|
|
7364
7376
|
"""Serializes the SetArtifactAllowlist into a dictionary suitable for use as a JSON request body."""
|
|
7365
7377
|
body = {}
|
|
@@ -7367,6 +7379,12 @@ class SetArtifactAllowlist:
|
|
|
7367
7379
|
body["artifact_matchers"] = [v.as_dict() for v in self.artifact_matchers]
|
|
7368
7380
|
if self.artifact_type is not None:
|
|
7369
7381
|
body["artifact_type"] = self.artifact_type.value
|
|
7382
|
+
if self.created_at is not None:
|
|
7383
|
+
body["created_at"] = self.created_at
|
|
7384
|
+
if self.created_by is not None:
|
|
7385
|
+
body["created_by"] = self.created_by
|
|
7386
|
+
if self.metastore_id is not None:
|
|
7387
|
+
body["metastore_id"] = self.metastore_id
|
|
7370
7388
|
return body
|
|
7371
7389
|
|
|
7372
7390
|
def as_shallow_dict(self) -> dict:
|
|
@@ -7376,6 +7394,12 @@ class SetArtifactAllowlist:
|
|
|
7376
7394
|
body["artifact_matchers"] = self.artifact_matchers
|
|
7377
7395
|
if self.artifact_type is not None:
|
|
7378
7396
|
body["artifact_type"] = self.artifact_type
|
|
7397
|
+
if self.created_at is not None:
|
|
7398
|
+
body["created_at"] = self.created_at
|
|
7399
|
+
if self.created_by is not None:
|
|
7400
|
+
body["created_by"] = self.created_by
|
|
7401
|
+
if self.metastore_id is not None:
|
|
7402
|
+
body["metastore_id"] = self.metastore_id
|
|
7379
7403
|
return body
|
|
7380
7404
|
|
|
7381
7405
|
@classmethod
|
|
@@ -7384,6 +7408,9 @@ class SetArtifactAllowlist:
|
|
|
7384
7408
|
return cls(
|
|
7385
7409
|
artifact_matchers=_repeated_dict(d, "artifact_matchers", ArtifactMatcher),
|
|
7386
7410
|
artifact_type=_enum(d, "artifact_type", ArtifactType),
|
|
7411
|
+
created_at=d.get("created_at", None),
|
|
7412
|
+
created_by=d.get("created_by", None),
|
|
7413
|
+
metastore_id=d.get("metastore_id", None),
|
|
7387
7414
|
)
|
|
7388
7415
|
|
|
7389
7416
|
|
|
@@ -10467,7 +10494,15 @@ class ArtifactAllowlistsAPI:
|
|
|
10467
10494
|
res = self._api.do("GET", f"/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}", headers=headers)
|
|
10468
10495
|
return ArtifactAllowlistInfo.from_dict(res)
|
|
10469
10496
|
|
|
10470
|
-
def update(
|
|
10497
|
+
def update(
|
|
10498
|
+
self,
|
|
10499
|
+
artifact_type: ArtifactType,
|
|
10500
|
+
artifact_matchers: List[ArtifactMatcher],
|
|
10501
|
+
*,
|
|
10502
|
+
created_at: Optional[int] = None,
|
|
10503
|
+
created_by: Optional[str] = None,
|
|
10504
|
+
metastore_id: Optional[str] = None,
|
|
10505
|
+
) -> ArtifactAllowlistInfo:
|
|
10471
10506
|
"""Set an artifact allowlist.
|
|
10472
10507
|
|
|
10473
10508
|
Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with
|
|
@@ -10478,12 +10513,24 @@ class ArtifactAllowlistsAPI:
|
|
|
10478
10513
|
The artifact type of the allowlist.
|
|
10479
10514
|
:param artifact_matchers: List[:class:`ArtifactMatcher`]
|
|
10480
10515
|
A list of allowed artifact match patterns.
|
|
10516
|
+
:param created_at: int (optional)
|
|
10517
|
+
Time at which this artifact allowlist was set, in epoch milliseconds.
|
|
10518
|
+
:param created_by: str (optional)
|
|
10519
|
+
Username of the user who set the artifact allowlist.
|
|
10520
|
+
:param metastore_id: str (optional)
|
|
10521
|
+
Unique identifier of parent metastore.
|
|
10481
10522
|
|
|
10482
10523
|
:returns: :class:`ArtifactAllowlistInfo`
|
|
10483
10524
|
"""
|
|
10484
10525
|
body = {}
|
|
10485
10526
|
if artifact_matchers is not None:
|
|
10486
10527
|
body["artifact_matchers"] = [v.as_dict() for v in artifact_matchers]
|
|
10528
|
+
if created_at is not None:
|
|
10529
|
+
body["created_at"] = created_at
|
|
10530
|
+
if created_by is not None:
|
|
10531
|
+
body["created_by"] = created_by
|
|
10532
|
+
if metastore_id is not None:
|
|
10533
|
+
body["metastore_id"] = metastore_id
|
|
10487
10534
|
headers = {
|
|
10488
10535
|
"Accept": "application/json",
|
|
10489
10536
|
"Content-Type": "application/json",
|
|
@@ -12354,12 +12401,12 @@ class OnlineTablesAPI:
|
|
|
12354
12401
|
attempt += 1
|
|
12355
12402
|
raise TimeoutError(f"timed out after {timeout}: {status_message}")
|
|
12356
12403
|
|
|
12357
|
-
def create(self,
|
|
12404
|
+
def create(self, table: OnlineTable) -> Wait[OnlineTable]:
|
|
12358
12405
|
"""Create an Online Table.
|
|
12359
12406
|
|
|
12360
12407
|
Create a new Online Table.
|
|
12361
12408
|
|
|
12362
|
-
:param table: :class:`OnlineTable`
|
|
12409
|
+
:param table: :class:`OnlineTable`
|
|
12363
12410
|
Online Table information.
|
|
12364
12411
|
|
|
12365
12412
|
:returns:
|
|
@@ -12377,7 +12424,7 @@ class OnlineTablesAPI:
|
|
|
12377
12424
|
self.wait_get_online_table_active, response=OnlineTable.from_dict(op_response), name=op_response["name"]
|
|
12378
12425
|
)
|
|
12379
12426
|
|
|
12380
|
-
def create_and_wait(self,
|
|
12427
|
+
def create_and_wait(self, table: OnlineTable, timeout=timedelta(minutes=20)) -> OnlineTable:
|
|
12381
12428
|
return self.create(table=table).result(timeout=timeout)
|
|
12382
12429
|
|
|
12383
12430
|
def delete(self, name: str):
|
|
@@ -332,7 +332,7 @@ class CleanRoomAssetForeignTableLocalDetails:
|
|
|
332
332
|
@dataclass
|
|
333
333
|
class CleanRoomAssetNotebook:
|
|
334
334
|
etag: Optional[str] = None
|
|
335
|
-
"""Server generated
|
|
335
|
+
"""Server generated etag that represents the notebook version."""
|
|
336
336
|
|
|
337
337
|
notebook_content: Optional[str] = None
|
|
338
338
|
"""Base 64 representation of the notebook contents. This is the same format as returned by
|
|
@@ -1097,7 +1097,7 @@ class CleanRoomAssetsAPI:
|
|
|
1097
1097
|
def __init__(self, api_client):
|
|
1098
1098
|
self._api = api_client
|
|
1099
1099
|
|
|
1100
|
-
def create(self, clean_room_name: str,
|
|
1100
|
+
def create(self, clean_room_name: str, asset: CleanRoomAsset) -> CleanRoomAsset:
|
|
1101
1101
|
"""Create an asset.
|
|
1102
1102
|
|
|
1103
1103
|
Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC
|
|
@@ -1107,7 +1107,7 @@ class CleanRoomAssetsAPI:
|
|
|
1107
1107
|
|
|
1108
1108
|
:param clean_room_name: str
|
|
1109
1109
|
Name of the clean room.
|
|
1110
|
-
:param asset: :class:`CleanRoomAsset`
|
|
1110
|
+
:param asset: :class:`CleanRoomAsset`
|
|
1111
1111
|
Metadata of the clean room asset
|
|
1112
1112
|
|
|
1113
1113
|
:returns: :class:`CleanRoomAsset`
|
|
@@ -1200,12 +1200,7 @@ class CleanRoomAssetsAPI:
|
|
|
1200
1200
|
query["page_token"] = json["next_page_token"]
|
|
1201
1201
|
|
|
1202
1202
|
def update(
|
|
1203
|
-
self,
|
|
1204
|
-
clean_room_name: str,
|
|
1205
|
-
asset_type: CleanRoomAssetAssetType,
|
|
1206
|
-
name: str,
|
|
1207
|
-
*,
|
|
1208
|
-
asset: Optional[CleanRoomAsset] = None,
|
|
1203
|
+
self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str, asset: CleanRoomAsset
|
|
1209
1204
|
) -> CleanRoomAsset:
|
|
1210
1205
|
"""Update an asset.
|
|
1211
1206
|
|
|
@@ -1224,7 +1219,7 @@ class CleanRoomAssetsAPI:
|
|
|
1224
1219
|
*shared_catalog*.*shared_schema*.*asset_name*
|
|
1225
1220
|
|
|
1226
1221
|
For notebooks, the name is the notebook file name.
|
|
1227
|
-
:param asset: :class:`CleanRoomAsset`
|
|
1222
|
+
:param asset: :class:`CleanRoomAsset`
|
|
1228
1223
|
Metadata of the clean room asset
|
|
1229
1224
|
|
|
1230
1225
|
:returns: :class:`CleanRoomAsset`
|
|
@@ -1303,7 +1298,7 @@ class CleanRoomsAPI:
|
|
|
1303
1298
|
def __init__(self, api_client):
|
|
1304
1299
|
self._api = api_client
|
|
1305
1300
|
|
|
1306
|
-
def create(self,
|
|
1301
|
+
def create(self, clean_room: CleanRoom) -> CleanRoom:
|
|
1307
1302
|
"""Create a clean room.
|
|
1308
1303
|
|
|
1309
1304
|
Create a new clean room with the specified collaborators. This method is asynchronous; the returned
|
|
@@ -1314,7 +1309,7 @@ class CleanRoomsAPI:
|
|
|
1314
1309
|
|
|
1315
1310
|
The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.
|
|
1316
1311
|
|
|
1317
|
-
:param clean_room: :class:`CleanRoom`
|
|
1312
|
+
:param clean_room: :class:`CleanRoom`
|
|
1318
1313
|
|
|
1319
1314
|
:returns: :class:`CleanRoom`
|
|
1320
1315
|
"""
|
|
@@ -1328,7 +1323,7 @@ class CleanRoomsAPI:
|
|
|
1328
1323
|
return CleanRoom.from_dict(res)
|
|
1329
1324
|
|
|
1330
1325
|
def create_output_catalog(
|
|
1331
|
-
self, clean_room_name: str,
|
|
1326
|
+
self, clean_room_name: str, output_catalog: CleanRoomOutputCatalog
|
|
1332
1327
|
) -> CreateCleanRoomOutputCatalogResponse:
|
|
1333
1328
|
"""Create an output catalog.
|
|
1334
1329
|
|
|
@@ -1336,7 +1331,7 @@ class CleanRoomsAPI:
|
|
|
1336
1331
|
|
|
1337
1332
|
:param clean_room_name: str
|
|
1338
1333
|
Name of the clean room.
|
|
1339
|
-
:param output_catalog: :class:`CleanRoomOutputCatalog`
|
|
1334
|
+
:param output_catalog: :class:`CleanRoomOutputCatalog`
|
|
1340
1335
|
|
|
1341
1336
|
:returns: :class:`CreateCleanRoomOutputCatalogResponse`
|
|
1342
1337
|
"""
|