databricks-sdk 0.50.0__tar.gz → 0.51.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/CHANGELOG.md +51 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/PKG-INFO +11 -11
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/README.md +10 -10
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/__init__.py +15 -5
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/config.py +2 -3
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/credentials_provider.py +61 -15
- databricks_sdk-0.51.0/databricks/sdk/oidc_token_supplier.py +28 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/apps.py +8 -10
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/billing.py +3 -3
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/catalog.py +51 -4
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/cleanrooms.py +9 -14
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/compute.py +131 -6
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/dashboards.py +23 -28
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/jobs.py +73 -18
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/oauth2.py +8 -13
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/serving.py +11 -14
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/settings.py +214 -125
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/sql.py +876 -12
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/vectorsearch.py +355 -159
- databricks_sdk-0.51.0/databricks/sdk/version.py +1 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks_sdk.egg-info/PKG-INFO +11 -11
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks_sdk.egg-info/SOURCES.txt +1 -0
- databricks_sdk-0.50.0/databricks/sdk/version.py +0 -1
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/CONTRIBUTING.md +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/DCO +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/LICENSE +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/MANIFEST.in +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/Makefile +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/NOTICE +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/SECURITY.md +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/__init__.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/_base_client.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/_property.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/_widgets/__init__.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/azure.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/casing.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/clock.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/core.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/data_plane.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/dbutils.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/environments.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/__init__.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/base.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/customizer.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/deserializer.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/details.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/mapper.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/overrides.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/parser.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/platform.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/private_link.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/sdk.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/logger/__init__.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/mixins/__init__.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/mixins/compute.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/mixins/files.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/mixins/jobs.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/mixins/open_ai_client.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/mixins/workspace.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/oauth.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/py.typed +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/retries.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/runtime/__init__.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/__init__.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/_internal.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/files.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/iam.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/marketplace.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/ml.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/pipelines.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/provisioning.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/sharing.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/service/workspace.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks/sdk/useragent.py +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks_sdk.egg-info/requires.txt +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/databricks_sdk.egg-info/top_level.txt +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/pyproject.toml +0 -0
- {databricks_sdk-0.50.0 → databricks_sdk-0.51.0}/setup.cfg +0 -0
|
@@ -1,5 +1,56 @@
|
|
|
1
1
|
# Version changelog
|
|
2
2
|
|
|
3
|
+
## Release v0.51.0
|
|
4
|
+
|
|
5
|
+
### New Features and Improvements
|
|
6
|
+
* Enabled asynchronous token refreshes by default. A new `disable_async_token_refresh` configuration option has been added to allow disabling this feature if necessary ([#952](https://github.com/databricks/databricks-sdk-py/pull/952)).
|
|
7
|
+
To disable asynchronous token refresh, set the environment variable `DATABRICKS_DISABLE_ASYNC_TOKEN_REFRESH=true` or configure it within your configuration object.
|
|
8
|
+
The previous `enable_experimental_async_token_refresh` option has been removed as asynchronous refresh is now the default behavior.
|
|
9
|
+
* Introduce support for Databricks Workload Identity Federation in GitHub workflows ([933](https://github.com/databricks/databricks-sdk-py/pull/933)).
|
|
10
|
+
See README.md for instructions.
|
|
11
|
+
* [Breaking] Users running their workflows in GitHub Actions, which use Cloud native authentication and also have a `DATABRICKS_CLIENT_ID` and `DATABRICKS_HOST`
|
|
12
|
+
environment variables set may see their authentication start failing due to the order in which the SDK tries different authentication methods.
|
|
13
|
+
|
|
14
|
+
### API Changes
|
|
15
|
+
* Added [w.alerts_v2](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/alerts_v2.html) workspace-level service.
|
|
16
|
+
* Added `update_ncc_azure_private_endpoint_rule_public()` method for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service.
|
|
17
|
+
* Added `update_endpoint_budget_policy()` and `update_endpoint_custom_tags()` methods for [w.vector_search_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_endpoints.html) workspace-level service.
|
|
18
|
+
* Added `created_at`, `created_by` and `metastore_id` fields for `databricks.sdk.service.catalog.SetArtifactAllowlist`.
|
|
19
|
+
* Added `node_type_flexibility` field for `databricks.sdk.service.compute.EditInstancePool`.
|
|
20
|
+
* Added `page_size` and `page_token` fields for `databricks.sdk.service.compute.GetEvents`.
|
|
21
|
+
* Added `next_page_token` and `prev_page_token` fields for `databricks.sdk.service.compute.GetEventsResponse`.
|
|
22
|
+
* Added `node_type_flexibility` field for `databricks.sdk.service.compute.GetInstancePool`.
|
|
23
|
+
* Added `node_type_flexibility` field for `databricks.sdk.service.compute.InstancePoolAndStats`.
|
|
24
|
+
* Added `effective_performance_target` field for `databricks.sdk.service.jobs.RepairHistoryItem`.
|
|
25
|
+
* Added `performance_target` field for `databricks.sdk.service.jobs.RepairRun`.
|
|
26
|
+
* [Breaking] Added `network_connectivity_config` field for `databricks.sdk.service.settings.CreateNetworkConnectivityConfigRequest`.
|
|
27
|
+
* [Breaking] Added `private_endpoint_rule` field for `databricks.sdk.service.settings.CreatePrivateEndpointRuleRequest`.
|
|
28
|
+
* Added `domain_names` field for `databricks.sdk.service.settings.NccAzurePrivateEndpointRule`.
|
|
29
|
+
* Added `auto_resolve_display_name` field for `databricks.sdk.service.sql.CreateAlertRequest`.
|
|
30
|
+
* Added `auto_resolve_display_name` field for `databricks.sdk.service.sql.CreateQueryRequest`.
|
|
31
|
+
* Added `budget_policy_id` field for `databricks.sdk.service.vectorsearch.CreateEndpoint`.
|
|
32
|
+
* Added `custom_tags` and `effective_budget_policy_id` fields for `databricks.sdk.service.vectorsearch.EndpointInfo`.
|
|
33
|
+
* Added `create_clean_room`, `execute_clean_room_task` and `modify_clean_room` enum values for `databricks.sdk.service.catalog.Privilege`.
|
|
34
|
+
* Added `dns_resolution_error` and `gcp_denied_by_org_policy` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
|
|
35
|
+
* Added `disabled` enum value for `databricks.sdk.service.jobs.TerminationCodeCode`.
|
|
36
|
+
* Added `expired` enum value for `databricks.sdk.service.settings.NccAzurePrivateEndpointRuleConnectionState`.
|
|
37
|
+
* [Breaking] Changed `create_network_connectivity_configuration()` and `create_private_endpoint_rule()` methods for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service with new required argument order.
|
|
38
|
+
* [Breaking] Changed `create_index()` method for [w.vector_search_indexes](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_indexes.html) workspace-level service to return `databricks.sdk.service.vectorsearch.VectorIndex` dataclass.
|
|
39
|
+
* [Breaking] Changed `delete_data_vector_index()` method for [w.vector_search_indexes](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_indexes.html) workspace-level service . HTTP method/verb has changed.
|
|
40
|
+
* [Breaking] Changed `delete_data_vector_index()` method for [w.vector_search_indexes](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_indexes.html) workspace-level service with new required argument order.
|
|
41
|
+
* [Breaking] Changed `databricks.sdk.service.vectorsearch.List` dataclass to.
|
|
42
|
+
* [Breaking] Changed `workload_size` field for `databricks.sdk.service.serving.ServedModelInput` to type `str` dataclass.
|
|
43
|
+
* [Breaking] Changed `group_id` field for `databricks.sdk.service.settings.NccAzurePrivateEndpointRule` to type `str` dataclass.
|
|
44
|
+
* [Breaking] Changed `target_services` field for `databricks.sdk.service.settings.NccAzureServiceEndpointRule` to type `databricks.sdk.service.settings.EgressResourceTypeList` dataclass.
|
|
45
|
+
* [Breaking] Changed `data_array` field for `databricks.sdk.service.vectorsearch.ResultData` to type `databricks.sdk.service.vectorsearch.ListValueList` dataclass.
|
|
46
|
+
* [Breaking] Changed waiter for [VectorSearchEndpointsAPI.create_endpoint](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_endpoints.html#databricks.sdk.service.vectorsearch.VectorSearchEndpointsAPI.create_endpoint) method.
|
|
47
|
+
* [Breaking] Removed `name` and `region` fields for `databricks.sdk.service.settings.CreateNetworkConnectivityConfigRequest`.
|
|
48
|
+
* [Breaking] Removed `group_id` and `resource_id` fields for `databricks.sdk.service.settings.CreatePrivateEndpointRuleRequest`.
|
|
49
|
+
* [Breaking] Removed `null_value` field for `databricks.sdk.service.vectorsearch.Value`.
|
|
50
|
+
* [Breaking] Removed `large`, `medium` and `small` enum values for `databricks.sdk.service.serving.ServedModelInputWorkloadSize`.
|
|
51
|
+
* [Breaking] Removed `blob`, `dfs`, `mysql_server` and `sql_server` enum values for `databricks.sdk.service.settings.NccAzurePrivateEndpointRuleGroupId`.
|
|
52
|
+
|
|
53
|
+
|
|
3
54
|
## Release v0.50.0
|
|
4
55
|
|
|
5
56
|
### API Changes
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: databricks-sdk
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.51.0
|
|
4
4
|
Summary: Databricks SDK for Python (Beta)
|
|
5
5
|
Project-URL: Documentation, https://databricks-sdk-py.readthedocs.io
|
|
6
6
|
Keywords: databricks,sdk
|
|
@@ -180,18 +180,18 @@ Depending on the Databricks authentication method, the SDK uses the following in
|
|
|
180
180
|
|
|
181
181
|
### Databricks native authentication
|
|
182
182
|
|
|
183
|
-
By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Databricks
|
|
183
|
+
By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Databricks Workload Identity Federation (WIF) authentication using OIDC (`auth_type="github-oidc"` argument).
|
|
184
184
|
|
|
185
185
|
- For Databricks token authentication, you must provide `host` and `token`; or their environment variable or `.databrickscfg` file field equivalents.
|
|
186
|
-
- For Databricks
|
|
187
|
-
|
|
188
|
-
| Argument
|
|
189
|
-
|
|
190
|
-
| `host`
|
|
191
|
-
| `account_id`
|
|
192
|
-
| `token`
|
|
193
|
-
| `
|
|
194
|
-
| `
|
|
186
|
+
- For Databricks OIDC authentication, you must provide the `host`, `client_id` and `token_audience` _(optional)_ either directly, through the corresponding environment variables, or in your `.databrickscfg` configuration file.
|
|
187
|
+
|
|
188
|
+
| Argument | Description | Environment variable |
|
|
189
|
+
|------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------|
|
|
190
|
+
| `host` | _(String)_ The Databricks host URL for either the Databricks workspace endpoint or the Databricks accounts endpoint. | `DATABRICKS_HOST` |
|
|
191
|
+
| `account_id` | _(String)_ The Databricks account ID for the Databricks accounts endpoint. Only has effect when `Host` is either `https://accounts.cloud.databricks.com/` _(AWS)_, `https://accounts.azuredatabricks.net/` _(Azure)_, or `https://accounts.gcp.databricks.com/` _(GCP)_. | `DATABRICKS_ACCOUNT_ID` |
|
|
192
|
+
| `token` | _(String)_ The Databricks personal access token (PAT) _(AWS, Azure, and GCP)_ or Azure Active Directory (Azure AD) token _(Azure)_. | `DATABRICKS_TOKEN` |
|
|
193
|
+
| `client_id` | _(String)_ The Databricks Service Principal Application ID. | `DATABRICKS_CLIENT_ID` |
|
|
194
|
+
| `token_audience` | _(String)_ When using Workload Identity Federation, the audience to specify when fetching an ID token from the ID token supplier. | `TOKEN_AUDIENCE` |
|
|
195
195
|
|
|
196
196
|
For example, to use Databricks token authentication:
|
|
197
197
|
|
|
@@ -126,18 +126,18 @@ Depending on the Databricks authentication method, the SDK uses the following in
|
|
|
126
126
|
|
|
127
127
|
### Databricks native authentication
|
|
128
128
|
|
|
129
|
-
By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Databricks
|
|
129
|
+
By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Databricks Workload Identity Federation (WIF) authentication using OIDC (`auth_type="github-oidc"` argument).
|
|
130
130
|
|
|
131
131
|
- For Databricks token authentication, you must provide `host` and `token`; or their environment variable or `.databrickscfg` file field equivalents.
|
|
132
|
-
- For Databricks
|
|
133
|
-
|
|
134
|
-
| Argument
|
|
135
|
-
|
|
136
|
-
| `host`
|
|
137
|
-
| `account_id`
|
|
138
|
-
| `token`
|
|
139
|
-
| `
|
|
140
|
-
| `
|
|
132
|
+
- For Databricks OIDC authentication, you must provide the `host`, `client_id` and `token_audience` _(optional)_ either directly, through the corresponding environment variables, or in your `.databrickscfg` configuration file.
|
|
133
|
+
|
|
134
|
+
| Argument | Description | Environment variable |
|
|
135
|
+
|------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------|
|
|
136
|
+
| `host` | _(String)_ The Databricks host URL for either the Databricks workspace endpoint or the Databricks accounts endpoint. | `DATABRICKS_HOST` |
|
|
137
|
+
| `account_id` | _(String)_ The Databricks account ID for the Databricks accounts endpoint. Only has effect when `Host` is either `https://accounts.cloud.databricks.com/` _(AWS)_, `https://accounts.azuredatabricks.net/` _(Azure)_, or `https://accounts.gcp.databricks.com/` _(GCP)_. | `DATABRICKS_ACCOUNT_ID` |
|
|
138
|
+
| `token` | _(String)_ The Databricks personal access token (PAT) _(AWS, Azure, and GCP)_ or Azure Active Directory (Azure AD) token _(Azure)_. | `DATABRICKS_TOKEN` |
|
|
139
|
+
| `client_id` | _(String)_ The Databricks Service Principal Application ID. | `DATABRICKS_CLIENT_ID` |
|
|
140
|
+
| `token_audience` | _(String)_ When using Workload Identity Federation, the audience to specify when fetching an ID token from the ID token supplier. | `TOKEN_AUDIENCE` |
|
|
141
141
|
|
|
142
142
|
For example, to use Databricks token authentication:
|
|
143
143
|
|
|
@@ -97,10 +97,10 @@ from databricks.sdk.service.sharing import (ProvidersAPI,
|
|
|
97
97
|
RecipientActivationAPI,
|
|
98
98
|
RecipientsAPI, SharesAPI)
|
|
99
99
|
from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI,
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
QueryHistoryAPI,
|
|
100
|
+
AlertsV2API, DashboardsAPI,
|
|
101
|
+
DashboardWidgetsAPI, DataSourcesAPI,
|
|
102
|
+
DbsqlPermissionsAPI, QueriesAPI,
|
|
103
|
+
QueriesLegacyAPI, QueryHistoryAPI,
|
|
104
104
|
QueryVisualizationsAPI,
|
|
105
105
|
QueryVisualizationsLegacyAPI,
|
|
106
106
|
RedashConfigAPI, StatementExecutionAPI,
|
|
@@ -170,6 +170,7 @@ class WorkspaceClient:
|
|
|
170
170
|
product_version="0.0.0",
|
|
171
171
|
credentials_strategy: Optional[CredentialsStrategy] = None,
|
|
172
172
|
credentials_provider: Optional[CredentialsStrategy] = None,
|
|
173
|
+
token_audience: Optional[str] = None,
|
|
173
174
|
config: Optional[client.Config] = None,
|
|
174
175
|
):
|
|
175
176
|
if not config:
|
|
@@ -198,6 +199,7 @@ class WorkspaceClient:
|
|
|
198
199
|
debug_headers=debug_headers,
|
|
199
200
|
product=product,
|
|
200
201
|
product_version=product_version,
|
|
202
|
+
token_audience=token_audience,
|
|
201
203
|
)
|
|
202
204
|
self._config = config.copy()
|
|
203
205
|
self._dbutils = _make_dbutils(self._config)
|
|
@@ -207,6 +209,7 @@ class WorkspaceClient:
|
|
|
207
209
|
self._account_access_control_proxy = service.iam.AccountAccessControlProxyAPI(self._api_client)
|
|
208
210
|
self._alerts = service.sql.AlertsAPI(self._api_client)
|
|
209
211
|
self._alerts_legacy = service.sql.AlertsLegacyAPI(self._api_client)
|
|
212
|
+
self._alerts_v2 = service.sql.AlertsV2API(self._api_client)
|
|
210
213
|
self._apps = service.apps.AppsAPI(self._api_client)
|
|
211
214
|
self._artifact_allowlists = service.catalog.ArtifactAllowlistsAPI(self._api_client)
|
|
212
215
|
self._catalogs = service.catalog.CatalogsAPI(self._api_client)
|
|
@@ -289,7 +292,7 @@ class WorkspaceClient:
|
|
|
289
292
|
self._service_principals = service.iam.ServicePrincipalsAPI(self._api_client)
|
|
290
293
|
self._serving_endpoints = serving_endpoints
|
|
291
294
|
serving_endpoints_data_plane_token_source = DataPlaneTokenSource(
|
|
292
|
-
self._config.host, self._config.oauth_token,
|
|
295
|
+
self._config.host, self._config.oauth_token, self._config.disable_async_token_refresh
|
|
293
296
|
)
|
|
294
297
|
self._serving_endpoints_data_plane = service.serving.ServingEndpointsDataPlaneAPI(
|
|
295
298
|
self._api_client, serving_endpoints, serving_endpoints_data_plane_token_source
|
|
@@ -346,6 +349,11 @@ class WorkspaceClient:
|
|
|
346
349
|
"""The alerts API can be used to perform CRUD operations on alerts."""
|
|
347
350
|
return self._alerts_legacy
|
|
348
351
|
|
|
352
|
+
@property
|
|
353
|
+
def alerts_v2(self) -> service.sql.AlertsV2API:
|
|
354
|
+
"""TODO: Add description."""
|
|
355
|
+
return self._alerts_v2
|
|
356
|
+
|
|
349
357
|
@property
|
|
350
358
|
def apps(self) -> service.apps.AppsAPI:
|
|
351
359
|
"""Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
|
|
@@ -862,6 +870,7 @@ class AccountClient:
|
|
|
862
870
|
product_version="0.0.0",
|
|
863
871
|
credentials_strategy: Optional[CredentialsStrategy] = None,
|
|
864
872
|
credentials_provider: Optional[CredentialsStrategy] = None,
|
|
873
|
+
token_audience: Optional[str] = None,
|
|
865
874
|
config: Optional[client.Config] = None,
|
|
866
875
|
):
|
|
867
876
|
if not config:
|
|
@@ -890,6 +899,7 @@ class AccountClient:
|
|
|
890
899
|
debug_headers=debug_headers,
|
|
891
900
|
product=product,
|
|
892
901
|
product_version=product_version,
|
|
902
|
+
token_audience=token_audience,
|
|
893
903
|
)
|
|
894
904
|
self._config = config.copy()
|
|
895
905
|
self._api_client = client.ApiClient(self._config)
|
|
@@ -61,6 +61,7 @@ class Config:
|
|
|
61
61
|
host: str = ConfigAttribute(env="DATABRICKS_HOST")
|
|
62
62
|
account_id: str = ConfigAttribute(env="DATABRICKS_ACCOUNT_ID")
|
|
63
63
|
token: str = ConfigAttribute(env="DATABRICKS_TOKEN", auth="pat", sensitive=True)
|
|
64
|
+
token_audience: str = ConfigAttribute(env="DATABRICKS_TOKEN_AUDIENCE", auth="github-oidc")
|
|
64
65
|
username: str = ConfigAttribute(env="DATABRICKS_USERNAME", auth="basic")
|
|
65
66
|
password: str = ConfigAttribute(env="DATABRICKS_PASSWORD", auth="basic", sensitive=True)
|
|
66
67
|
client_id: str = ConfigAttribute(env="DATABRICKS_CLIENT_ID", auth="oauth")
|
|
@@ -95,9 +96,7 @@ class Config:
|
|
|
95
96
|
max_connections_per_pool: int = ConfigAttribute()
|
|
96
97
|
databricks_environment: Optional[DatabricksEnvironment] = None
|
|
97
98
|
|
|
98
|
-
|
|
99
|
-
env="DATABRICKS_ENABLE_EXPERIMENTAL_ASYNC_TOKEN_REFRESH"
|
|
100
|
-
)
|
|
99
|
+
disable_async_token_refresh: bool = ConfigAttribute(env="DATABRICKS_DISABLE_ASYNC_TOKEN_REFRESH")
|
|
101
100
|
|
|
102
101
|
enable_experimental_files_api_client: bool = ConfigAttribute(env="DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT")
|
|
103
102
|
files_api_client_download_max_total_recovers = None
|
|
@@ -23,6 +23,7 @@ from google.oauth2 import service_account # type: ignore
|
|
|
23
23
|
from .azure import add_sp_management_token, add_workspace_id_header
|
|
24
24
|
from .oauth import (ClientCredentials, OAuthClient, Refreshable, Token,
|
|
25
25
|
TokenCache, TokenSource)
|
|
26
|
+
from .oidc_token_supplier import GitHubOIDCTokenSupplier
|
|
26
27
|
|
|
27
28
|
CredentialsProvider = Callable[[], Dict[str, str]]
|
|
28
29
|
|
|
@@ -191,7 +192,7 @@ def oauth_service_principal(cfg: "Config") -> Optional[CredentialsProvider]:
|
|
|
191
192
|
token_url=oidc.token_endpoint,
|
|
192
193
|
scopes=["all-apis"],
|
|
193
194
|
use_header=True,
|
|
194
|
-
disable_async=
|
|
195
|
+
disable_async=cfg.disable_async_token_refresh,
|
|
195
196
|
)
|
|
196
197
|
|
|
197
198
|
def inner() -> Dict[str, str]:
|
|
@@ -291,7 +292,7 @@ def azure_service_principal(cfg: "Config") -> CredentialsProvider:
|
|
|
291
292
|
token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
|
|
292
293
|
endpoint_params={"resource": resource},
|
|
293
294
|
use_params=True,
|
|
294
|
-
disable_async=
|
|
295
|
+
disable_async=cfg.disable_async_token_refresh,
|
|
295
296
|
)
|
|
296
297
|
|
|
297
298
|
_ensure_host_present(cfg, token_source_for)
|
|
@@ -314,6 +315,58 @@ def azure_service_principal(cfg: "Config") -> CredentialsProvider:
|
|
|
314
315
|
return OAuthCredentialsProvider(refreshed_headers, token)
|
|
315
316
|
|
|
316
317
|
|
|
318
|
+
@oauth_credentials_strategy("github-oidc", ["host", "client_id"])
|
|
319
|
+
def databricks_wif(cfg: "Config") -> Optional[CredentialsProvider]:
|
|
320
|
+
"""
|
|
321
|
+
DatabricksWIFCredentials uses a Token Supplier to get a JWT Token and exchanges
|
|
322
|
+
it for a Databricks Token.
|
|
323
|
+
|
|
324
|
+
Supported suppliers:
|
|
325
|
+
- GitHub OIDC
|
|
326
|
+
"""
|
|
327
|
+
supplier = GitHubOIDCTokenSupplier()
|
|
328
|
+
|
|
329
|
+
audience = cfg.token_audience
|
|
330
|
+
if audience is None and cfg.is_account_client:
|
|
331
|
+
audience = cfg.account_id
|
|
332
|
+
if audience is None and not cfg.is_account_client:
|
|
333
|
+
audience = cfg.oidc_endpoints.token_endpoint
|
|
334
|
+
|
|
335
|
+
# Try to get an idToken. If no supplier returns a token, we cannot use this authentication mode.
|
|
336
|
+
id_token = supplier.get_oidc_token(audience)
|
|
337
|
+
if not id_token:
|
|
338
|
+
return None
|
|
339
|
+
|
|
340
|
+
def token_source_for(audience: str) -> TokenSource:
|
|
341
|
+
id_token = supplier.get_oidc_token(audience)
|
|
342
|
+
if not id_token:
|
|
343
|
+
# Should not happen, since we checked it above.
|
|
344
|
+
raise Exception("Cannot get OIDC token")
|
|
345
|
+
params = {
|
|
346
|
+
"subject_token_type": "urn:ietf:params:oauth:token-type:jwt",
|
|
347
|
+
"subject_token": id_token,
|
|
348
|
+
"grant_type": "urn:ietf:params:oauth:grant-type:token-exchange",
|
|
349
|
+
}
|
|
350
|
+
return ClientCredentials(
|
|
351
|
+
client_id=cfg.client_id,
|
|
352
|
+
client_secret="", # we have no (rotatable) secrets in OIDC flow
|
|
353
|
+
token_url=cfg.oidc_endpoints.token_endpoint,
|
|
354
|
+
endpoint_params=params,
|
|
355
|
+
scopes=["all-apis"],
|
|
356
|
+
use_params=True,
|
|
357
|
+
disable_async=cfg.disable_async_token_refresh,
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
def refreshed_headers() -> Dict[str, str]:
|
|
361
|
+
token = token_source_for(audience).token()
|
|
362
|
+
return {"Authorization": f"{token.token_type} {token.access_token}"}
|
|
363
|
+
|
|
364
|
+
def token() -> Token:
|
|
365
|
+
return token_source_for(audience).token()
|
|
366
|
+
|
|
367
|
+
return OAuthCredentialsProvider(refreshed_headers, token)
|
|
368
|
+
|
|
369
|
+
|
|
317
370
|
@oauth_credentials_strategy("github-oidc-azure", ["host", "azure_client_id"])
|
|
318
371
|
def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
|
|
319
372
|
if "ACTIONS_ID_TOKEN_REQUEST_TOKEN" not in os.environ:
|
|
@@ -325,16 +378,8 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
|
|
|
325
378
|
if not cfg.is_azure:
|
|
326
379
|
return None
|
|
327
380
|
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
endpoint = f"{os.environ['ACTIONS_ID_TOKEN_REQUEST_URL']}&audience=api://AzureADTokenExchange"
|
|
331
|
-
response = requests.get(endpoint, headers=headers)
|
|
332
|
-
if not response.ok:
|
|
333
|
-
return None
|
|
334
|
-
|
|
335
|
-
# get the ID Token with aud=api://AzureADTokenExchange sub=repo:org/repo:environment:name
|
|
336
|
-
response_json = response.json()
|
|
337
|
-
if "value" not in response_json:
|
|
381
|
+
token = GitHubOIDCTokenSupplier().get_oidc_token("api://AzureADTokenExchange")
|
|
382
|
+
if not token:
|
|
338
383
|
return None
|
|
339
384
|
|
|
340
385
|
logger.info(
|
|
@@ -344,7 +389,7 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
|
|
|
344
389
|
params = {
|
|
345
390
|
"client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
|
|
346
391
|
"resource": cfg.effective_azure_login_app_id,
|
|
347
|
-
"client_assertion":
|
|
392
|
+
"client_assertion": token,
|
|
348
393
|
}
|
|
349
394
|
aad_endpoint = cfg.arm_environment.active_directory_endpoint
|
|
350
395
|
if not cfg.azure_tenant_id:
|
|
@@ -357,7 +402,7 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
|
|
|
357
402
|
token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
|
|
358
403
|
endpoint_params=params,
|
|
359
404
|
use_params=True,
|
|
360
|
-
disable_async=
|
|
405
|
+
disable_async=cfg.disable_async_token_refresh,
|
|
361
406
|
)
|
|
362
407
|
|
|
363
408
|
def refreshed_headers() -> Dict[str, str]:
|
|
@@ -694,7 +739,7 @@ class DatabricksCliTokenSource(CliTokenSource):
|
|
|
694
739
|
token_type_field="token_type",
|
|
695
740
|
access_token_field="access_token",
|
|
696
741
|
expiry_field="expiry",
|
|
697
|
-
disable_async=
|
|
742
|
+
disable_async=cfg.disable_async_token_refresh,
|
|
698
743
|
)
|
|
699
744
|
|
|
700
745
|
@staticmethod
|
|
@@ -927,6 +972,7 @@ class DefaultCredentials:
|
|
|
927
972
|
basic_auth,
|
|
928
973
|
metadata_service,
|
|
929
974
|
oauth_service_principal,
|
|
975
|
+
databricks_wif,
|
|
930
976
|
azure_service_principal,
|
|
931
977
|
github_oidc_azure,
|
|
932
978
|
azure_cli,
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
import requests
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class GitHubOIDCTokenSupplier:
|
|
8
|
+
"""
|
|
9
|
+
Supplies OIDC tokens from GitHub Actions.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
def get_oidc_token(self, audience: str) -> Optional[str]:
|
|
13
|
+
if "ACTIONS_ID_TOKEN_REQUEST_TOKEN" not in os.environ or "ACTIONS_ID_TOKEN_REQUEST_URL" not in os.environ:
|
|
14
|
+
# not in GitHub actions
|
|
15
|
+
return None
|
|
16
|
+
# See https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-cloud-providers
|
|
17
|
+
headers = {"Authorization": f"Bearer {os.environ['ACTIONS_ID_TOKEN_REQUEST_TOKEN']}"}
|
|
18
|
+
endpoint = f"{os.environ['ACTIONS_ID_TOKEN_REQUEST_URL']}&audience={audience}"
|
|
19
|
+
response = requests.get(endpoint, headers=headers)
|
|
20
|
+
if not response.ok:
|
|
21
|
+
return None
|
|
22
|
+
|
|
23
|
+
# get the ID Token with aud=api://AzureADTokenExchange sub=repo:org/repo:environment:name
|
|
24
|
+
response_json = response.json()
|
|
25
|
+
if "value" not in response_json:
|
|
26
|
+
return None
|
|
27
|
+
|
|
28
|
+
return response_json["value"]
|
|
@@ -1173,12 +1173,12 @@ class AppsAPI:
|
|
|
1173
1173
|
attempt += 1
|
|
1174
1174
|
raise TimeoutError(f"timed out after {timeout}: {status_message}")
|
|
1175
1175
|
|
|
1176
|
-
def create(self,
|
|
1176
|
+
def create(self, app: App, *, no_compute: Optional[bool] = None) -> Wait[App]:
|
|
1177
1177
|
"""Create an app.
|
|
1178
1178
|
|
|
1179
1179
|
Creates a new app.
|
|
1180
1180
|
|
|
1181
|
-
:param app: :class:`App`
|
|
1181
|
+
:param app: :class:`App`
|
|
1182
1182
|
:param no_compute: bool (optional)
|
|
1183
1183
|
If true, the app will not be started after creation.
|
|
1184
1184
|
|
|
@@ -1198,9 +1198,7 @@ class AppsAPI:
|
|
|
1198
1198
|
op_response = self._api.do("POST", "/api/2.0/apps", query=query, body=body, headers=headers)
|
|
1199
1199
|
return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response["name"])
|
|
1200
1200
|
|
|
1201
|
-
def create_and_wait(
|
|
1202
|
-
self, *, app: Optional[App] = None, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)
|
|
1203
|
-
) -> App:
|
|
1201
|
+
def create_and_wait(self, app: App, *, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)) -> App:
|
|
1204
1202
|
return self.create(app=app, no_compute=no_compute).result(timeout=timeout)
|
|
1205
1203
|
|
|
1206
1204
|
def delete(self, name: str) -> App:
|
|
@@ -1221,14 +1219,14 @@ class AppsAPI:
|
|
|
1221
1219
|
res = self._api.do("DELETE", f"/api/2.0/apps/{name}", headers=headers)
|
|
1222
1220
|
return App.from_dict(res)
|
|
1223
1221
|
|
|
1224
|
-
def deploy(self, app_name: str,
|
|
1222
|
+
def deploy(self, app_name: str, app_deployment: AppDeployment) -> Wait[AppDeployment]:
|
|
1225
1223
|
"""Create an app deployment.
|
|
1226
1224
|
|
|
1227
1225
|
Creates an app deployment for the app with the supplied name.
|
|
1228
1226
|
|
|
1229
1227
|
:param app_name: str
|
|
1230
1228
|
The name of the app.
|
|
1231
|
-
:param app_deployment: :class:`AppDeployment`
|
|
1229
|
+
:param app_deployment: :class:`AppDeployment`
|
|
1232
1230
|
|
|
1233
1231
|
:returns:
|
|
1234
1232
|
Long-running operation waiter for :class:`AppDeployment`.
|
|
@@ -1249,7 +1247,7 @@ class AppsAPI:
|
|
|
1249
1247
|
)
|
|
1250
1248
|
|
|
1251
1249
|
def deploy_and_wait(
|
|
1252
|
-
self, app_name: str,
|
|
1250
|
+
self, app_name: str, app_deployment: AppDeployment, timeout=timedelta(minutes=20)
|
|
1253
1251
|
) -> AppDeployment:
|
|
1254
1252
|
return self.deploy(app_deployment=app_deployment, app_name=app_name).result(timeout=timeout)
|
|
1255
1253
|
|
|
@@ -1466,7 +1464,7 @@ class AppsAPI:
|
|
|
1466
1464
|
def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
|
|
1467
1465
|
return self.stop(name=name).result(timeout=timeout)
|
|
1468
1466
|
|
|
1469
|
-
def update(self, name: str,
|
|
1467
|
+
def update(self, name: str, app: App) -> App:
|
|
1470
1468
|
"""Update an app.
|
|
1471
1469
|
|
|
1472
1470
|
Updates the app with the supplied name.
|
|
@@ -1474,7 +1472,7 @@ class AppsAPI:
|
|
|
1474
1472
|
:param name: str
|
|
1475
1473
|
The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
|
|
1476
1474
|
must be unique within the workspace.
|
|
1477
|
-
:param app: :class:`App`
|
|
1475
|
+
:param app: :class:`App`
|
|
1478
1476
|
|
|
1479
1477
|
:returns: :class:`App`
|
|
1480
1478
|
"""
|
|
@@ -1873,7 +1873,7 @@ class BudgetPolicyAPI:
|
|
|
1873
1873
|
query["page_token"] = json["next_page_token"]
|
|
1874
1874
|
|
|
1875
1875
|
def update(
|
|
1876
|
-
self, policy_id: str, *, limit_config: Optional[LimitConfig] = None
|
|
1876
|
+
self, policy_id: str, policy: BudgetPolicy, *, limit_config: Optional[LimitConfig] = None
|
|
1877
1877
|
) -> BudgetPolicy:
|
|
1878
1878
|
"""Update a budget policy.
|
|
1879
1879
|
|
|
@@ -1881,10 +1881,10 @@ class BudgetPolicyAPI:
|
|
|
1881
1881
|
|
|
1882
1882
|
:param policy_id: str
|
|
1883
1883
|
The Id of the policy. This field is generated by Databricks and globally unique.
|
|
1884
|
+
:param policy: :class:`BudgetPolicy`
|
|
1885
|
+
Contains the BudgetPolicy details.
|
|
1884
1886
|
:param limit_config: :class:`LimitConfig` (optional)
|
|
1885
1887
|
DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy
|
|
1886
|
-
:param policy: :class:`BudgetPolicy` (optional)
|
|
1887
|
-
Contains the BudgetPolicy details.
|
|
1888
1888
|
|
|
1889
1889
|
:returns: :class:`BudgetPolicy`
|
|
1890
1890
|
"""
|
|
@@ -6675,6 +6675,7 @@ class Privilege(Enum):
|
|
|
6675
6675
|
BROWSE = "BROWSE"
|
|
6676
6676
|
CREATE = "CREATE"
|
|
6677
6677
|
CREATE_CATALOG = "CREATE_CATALOG"
|
|
6678
|
+
CREATE_CLEAN_ROOM = "CREATE_CLEAN_ROOM"
|
|
6678
6679
|
CREATE_CONNECTION = "CREATE_CONNECTION"
|
|
6679
6680
|
CREATE_EXTERNAL_LOCATION = "CREATE_EXTERNAL_LOCATION"
|
|
6680
6681
|
CREATE_EXTERNAL_TABLE = "CREATE_EXTERNAL_TABLE"
|
|
@@ -6695,9 +6696,11 @@ class Privilege(Enum):
|
|
|
6695
6696
|
CREATE_VIEW = "CREATE_VIEW"
|
|
6696
6697
|
CREATE_VOLUME = "CREATE_VOLUME"
|
|
6697
6698
|
EXECUTE = "EXECUTE"
|
|
6699
|
+
EXECUTE_CLEAN_ROOM_TASK = "EXECUTE_CLEAN_ROOM_TASK"
|
|
6698
6700
|
MANAGE = "MANAGE"
|
|
6699
6701
|
MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST"
|
|
6700
6702
|
MODIFY = "MODIFY"
|
|
6703
|
+
MODIFY_CLEAN_ROOM = "MODIFY_CLEAN_ROOM"
|
|
6701
6704
|
READ_FILES = "READ_FILES"
|
|
6702
6705
|
READ_PRIVATE_FILES = "READ_PRIVATE_FILES"
|
|
6703
6706
|
READ_VOLUME = "READ_VOLUME"
|
|
@@ -7360,6 +7363,15 @@ class SetArtifactAllowlist:
|
|
|
7360
7363
|
artifact_type: Optional[ArtifactType] = None
|
|
7361
7364
|
"""The artifact type of the allowlist."""
|
|
7362
7365
|
|
|
7366
|
+
created_at: Optional[int] = None
|
|
7367
|
+
"""Time at which this artifact allowlist was set, in epoch milliseconds."""
|
|
7368
|
+
|
|
7369
|
+
created_by: Optional[str] = None
|
|
7370
|
+
"""Username of the user who set the artifact allowlist."""
|
|
7371
|
+
|
|
7372
|
+
metastore_id: Optional[str] = None
|
|
7373
|
+
"""Unique identifier of parent metastore."""
|
|
7374
|
+
|
|
7363
7375
|
def as_dict(self) -> dict:
|
|
7364
7376
|
"""Serializes the SetArtifactAllowlist into a dictionary suitable for use as a JSON request body."""
|
|
7365
7377
|
body = {}
|
|
@@ -7367,6 +7379,12 @@ class SetArtifactAllowlist:
|
|
|
7367
7379
|
body["artifact_matchers"] = [v.as_dict() for v in self.artifact_matchers]
|
|
7368
7380
|
if self.artifact_type is not None:
|
|
7369
7381
|
body["artifact_type"] = self.artifact_type.value
|
|
7382
|
+
if self.created_at is not None:
|
|
7383
|
+
body["created_at"] = self.created_at
|
|
7384
|
+
if self.created_by is not None:
|
|
7385
|
+
body["created_by"] = self.created_by
|
|
7386
|
+
if self.metastore_id is not None:
|
|
7387
|
+
body["metastore_id"] = self.metastore_id
|
|
7370
7388
|
return body
|
|
7371
7389
|
|
|
7372
7390
|
def as_shallow_dict(self) -> dict:
|
|
@@ -7376,6 +7394,12 @@ class SetArtifactAllowlist:
|
|
|
7376
7394
|
body["artifact_matchers"] = self.artifact_matchers
|
|
7377
7395
|
if self.artifact_type is not None:
|
|
7378
7396
|
body["artifact_type"] = self.artifact_type
|
|
7397
|
+
if self.created_at is not None:
|
|
7398
|
+
body["created_at"] = self.created_at
|
|
7399
|
+
if self.created_by is not None:
|
|
7400
|
+
body["created_by"] = self.created_by
|
|
7401
|
+
if self.metastore_id is not None:
|
|
7402
|
+
body["metastore_id"] = self.metastore_id
|
|
7379
7403
|
return body
|
|
7380
7404
|
|
|
7381
7405
|
@classmethod
|
|
@@ -7384,6 +7408,9 @@ class SetArtifactAllowlist:
|
|
|
7384
7408
|
return cls(
|
|
7385
7409
|
artifact_matchers=_repeated_dict(d, "artifact_matchers", ArtifactMatcher),
|
|
7386
7410
|
artifact_type=_enum(d, "artifact_type", ArtifactType),
|
|
7411
|
+
created_at=d.get("created_at", None),
|
|
7412
|
+
created_by=d.get("created_by", None),
|
|
7413
|
+
metastore_id=d.get("metastore_id", None),
|
|
7387
7414
|
)
|
|
7388
7415
|
|
|
7389
7416
|
|
|
@@ -10467,7 +10494,15 @@ class ArtifactAllowlistsAPI:
|
|
|
10467
10494
|
res = self._api.do("GET", f"/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}", headers=headers)
|
|
10468
10495
|
return ArtifactAllowlistInfo.from_dict(res)
|
|
10469
10496
|
|
|
10470
|
-
def update(
|
|
10497
|
+
def update(
|
|
10498
|
+
self,
|
|
10499
|
+
artifact_type: ArtifactType,
|
|
10500
|
+
artifact_matchers: List[ArtifactMatcher],
|
|
10501
|
+
*,
|
|
10502
|
+
created_at: Optional[int] = None,
|
|
10503
|
+
created_by: Optional[str] = None,
|
|
10504
|
+
metastore_id: Optional[str] = None,
|
|
10505
|
+
) -> ArtifactAllowlistInfo:
|
|
10471
10506
|
"""Set an artifact allowlist.
|
|
10472
10507
|
|
|
10473
10508
|
Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with
|
|
@@ -10478,12 +10513,24 @@ class ArtifactAllowlistsAPI:
|
|
|
10478
10513
|
The artifact type of the allowlist.
|
|
10479
10514
|
:param artifact_matchers: List[:class:`ArtifactMatcher`]
|
|
10480
10515
|
A list of allowed artifact match patterns.
|
|
10516
|
+
:param created_at: int (optional)
|
|
10517
|
+
Time at which this artifact allowlist was set, in epoch milliseconds.
|
|
10518
|
+
:param created_by: str (optional)
|
|
10519
|
+
Username of the user who set the artifact allowlist.
|
|
10520
|
+
:param metastore_id: str (optional)
|
|
10521
|
+
Unique identifier of parent metastore.
|
|
10481
10522
|
|
|
10482
10523
|
:returns: :class:`ArtifactAllowlistInfo`
|
|
10483
10524
|
"""
|
|
10484
10525
|
body = {}
|
|
10485
10526
|
if artifact_matchers is not None:
|
|
10486
10527
|
body["artifact_matchers"] = [v.as_dict() for v in artifact_matchers]
|
|
10528
|
+
if created_at is not None:
|
|
10529
|
+
body["created_at"] = created_at
|
|
10530
|
+
if created_by is not None:
|
|
10531
|
+
body["created_by"] = created_by
|
|
10532
|
+
if metastore_id is not None:
|
|
10533
|
+
body["metastore_id"] = metastore_id
|
|
10487
10534
|
headers = {
|
|
10488
10535
|
"Accept": "application/json",
|
|
10489
10536
|
"Content-Type": "application/json",
|
|
@@ -12354,12 +12401,12 @@ class OnlineTablesAPI:
|
|
|
12354
12401
|
attempt += 1
|
|
12355
12402
|
raise TimeoutError(f"timed out after {timeout}: {status_message}")
|
|
12356
12403
|
|
|
12357
|
-
def create(self,
|
|
12404
|
+
def create(self, table: OnlineTable) -> Wait[OnlineTable]:
|
|
12358
12405
|
"""Create an Online Table.
|
|
12359
12406
|
|
|
12360
12407
|
Create a new Online Table.
|
|
12361
12408
|
|
|
12362
|
-
:param table: :class:`OnlineTable`
|
|
12409
|
+
:param table: :class:`OnlineTable`
|
|
12363
12410
|
Online Table information.
|
|
12364
12411
|
|
|
12365
12412
|
:returns:
|
|
@@ -12377,7 +12424,7 @@ class OnlineTablesAPI:
|
|
|
12377
12424
|
self.wait_get_online_table_active, response=OnlineTable.from_dict(op_response), name=op_response["name"]
|
|
12378
12425
|
)
|
|
12379
12426
|
|
|
12380
|
-
def create_and_wait(self,
|
|
12427
|
+
def create_and_wait(self, table: OnlineTable, timeout=timedelta(minutes=20)) -> OnlineTable:
|
|
12381
12428
|
return self.create(table=table).result(timeout=timeout)
|
|
12382
12429
|
|
|
12383
12430
|
def delete(self, name: str):
|