databricks-sdk 0.50.0__tar.gz → 0.52.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (83) hide show
  1. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/CHANGELOG.md +73 -0
  2. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/PKG-INFO +11 -11
  3. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/README.md +10 -10
  4. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/__init__.py +15 -5
  5. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/config.py +2 -3
  6. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/credentials_provider.py +61 -15
  7. databricks_sdk-0.52.0/databricks/sdk/oidc_token_supplier.py +28 -0
  8. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/apps.py +8 -10
  9. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/billing.py +3 -3
  10. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/catalog.py +51 -4
  11. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/cleanrooms.py +9 -14
  12. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/compute.py +138 -6
  13. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/dashboards.py +24 -29
  14. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/files.py +2 -1
  15. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/jobs.py +73 -18
  16. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/ml.py +19 -2
  17. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/oauth2.py +8 -13
  18. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/pipelines.py +55 -27
  19. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/serving.py +11 -14
  20. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/settings.py +214 -125
  21. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/sql.py +744 -6
  22. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/vectorsearch.py +355 -159
  23. databricks_sdk-0.52.0/databricks/sdk/version.py +1 -0
  24. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks_sdk.egg-info/PKG-INFO +11 -11
  25. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks_sdk.egg-info/SOURCES.txt +1 -0
  26. databricks_sdk-0.50.0/databricks/sdk/version.py +0 -1
  27. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/CONTRIBUTING.md +0 -0
  28. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/DCO +0 -0
  29. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/LICENSE +0 -0
  30. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/MANIFEST.in +0 -0
  31. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/Makefile +0 -0
  32. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/NOTICE +0 -0
  33. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/SECURITY.md +0 -0
  34. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/__init__.py +0 -0
  35. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/_base_client.py +0 -0
  36. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/_property.py +0 -0
  37. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/_widgets/__init__.py +0 -0
  38. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
  39. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
  40. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/azure.py +0 -0
  41. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/casing.py +0 -0
  42. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/clock.py +0 -0
  43. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/core.py +0 -0
  44. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/data_plane.py +0 -0
  45. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/dbutils.py +0 -0
  46. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/environments.py +0 -0
  47. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/errors/__init__.py +0 -0
  48. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/errors/base.py +0 -0
  49. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/errors/customizer.py +0 -0
  50. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/errors/deserializer.py +0 -0
  51. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/errors/details.py +0 -0
  52. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/errors/mapper.py +0 -0
  53. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/errors/overrides.py +0 -0
  54. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/errors/parser.py +0 -0
  55. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/errors/platform.py +0 -0
  56. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/errors/private_link.py +0 -0
  57. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/errors/sdk.py +0 -0
  58. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/logger/__init__.py +0 -0
  59. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
  60. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/mixins/__init__.py +0 -0
  61. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/mixins/compute.py +0 -0
  62. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/mixins/files.py +0 -0
  63. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/mixins/jobs.py +0 -0
  64. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/mixins/open_ai_client.py +0 -0
  65. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/mixins/workspace.py +0 -0
  66. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/oauth.py +0 -0
  67. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/py.typed +0 -0
  68. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/retries.py +0 -0
  69. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/runtime/__init__.py +0 -0
  70. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
  71. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/__init__.py +0 -0
  72. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/_internal.py +0 -0
  73. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/iam.py +0 -0
  74. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/marketplace.py +0 -0
  75. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/provisioning.py +0 -0
  76. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/sharing.py +0 -0
  77. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/service/workspace.py +0 -0
  78. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks/sdk/useragent.py +0 -0
  79. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
  80. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks_sdk.egg-info/requires.txt +0 -0
  81. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/databricks_sdk.egg-info/top_level.txt +0 -0
  82. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/pyproject.toml +0 -0
  83. {databricks_sdk-0.50.0 → databricks_sdk-0.52.0}/setup.cfg +0 -0
@@ -1,5 +1,78 @@
1
1
  # Version changelog
2
2
 
3
+ ## Release v0.52.0
4
+
5
+ ### API Changes
6
+ * Added `future_feature_data_path` field for `databricks.sdk.service.ml.CreateForecastingExperimentRequest`.
7
+ * Added `exclude_columns` and `include_columns` fields for `databricks.sdk.service.pipelines.TableSpecificConfig`.
8
+ * Added `network_check_control_plane_failure`, `network_check_dns_server_failure`, `network_check_metadata_endpoint_failure`, `network_check_multiple_components_failure`, `network_check_nic_failure`, `network_check_storage_failure` and `secret_permission_denied` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
9
+ * [Breaking] Changed `pipeline_id` field for `databricks.sdk.service.pipelines.EditPipeline` to be required.
10
+ * Changed `connection_name`, `gateway_storage_catalog` and `gateway_storage_schema` fields for `databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition` to be required.
11
+ * [Breaking] Changed `connection_name`, `gateway_storage_catalog` and `gateway_storage_schema` fields for `databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition` to be required.
12
+ * Changed `kind` field for `databricks.sdk.service.pipelines.PipelineDeployment` to be required.
13
+ * [Breaking] Changed `kind` field for `databricks.sdk.service.pipelines.PipelineDeployment` to be required.
14
+ * Changed `destination_catalog`, `destination_schema` and `source_url` fields for `databricks.sdk.service.pipelines.ReportSpec` to be required.
15
+ * [Breaking] Changed `destination_catalog`, `destination_schema` and `source_url` fields for `databricks.sdk.service.pipelines.ReportSpec` to be required.
16
+ * Changed `destination_catalog`, `destination_schema` and `source_schema` fields for `databricks.sdk.service.pipelines.SchemaSpec` to be required.
17
+ * [Breaking] Changed `destination_catalog`, `destination_schema` and `source_schema` fields for `databricks.sdk.service.pipelines.SchemaSpec` to be required.
18
+ * [Breaking] Changed `destination_catalog`, `destination_schema` and `source_table` fields for `databricks.sdk.service.pipelines.TableSpec` to be required.
19
+ * Changed `destination_catalog`, `destination_schema` and `source_table` fields for `databricks.sdk.service.pipelines.TableSpec` to be required.
20
+ * [Breaking] Changed `results` field for `databricks.sdk.service.sql.ListAlertsV2Response` to type `databricks.sdk.service.sql.AlertV2List` dataclass.
21
+ * [Breaking] Changed pagination for [AlertsV2API.list_alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/alerts_v2.html#databricks.sdk.service.sql.AlertsV2API.list_alerts) method.
22
+ * Fixed waiter for [GenieAPI.create_message](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html#databricks.sdk.service.dashboards.GenieAPI.create_message) method.
23
+
24
+
25
+ ## Release v0.51.0
26
+
27
+ ### New Features and Improvements
28
+ * Enabled asynchronous token refreshes by default. A new `disable_async_token_refresh` configuration option has been added to allow disabling this feature if necessary ([#952](https://github.com/databricks/databricks-sdk-py/pull/952)).
29
+ To disable asynchronous token refresh, set the environment variable `DATABRICKS_DISABLE_ASYNC_TOKEN_REFRESH=true` or configure it within your configuration object.
30
+ The previous `enable_experimental_async_token_refresh` option has been removed as asynchronous refresh is now the default behavior.
31
+ * Introduce support for Databricks Workload Identity Federation in GitHub workflows ([933](https://github.com/databricks/databricks-sdk-py/pull/933)).
32
+ See README.md for instructions.
33
+ * [Breaking] Users running their workflows in GitHub Actions, which use Cloud native authentication and also have a `DATABRICKS_CLIENT_ID` and `DATABRICKS_HOST`
34
+ environment variables set may see their authentication start failing due to the order in which the SDK tries different authentication methods.
35
+
36
+ ### API Changes
37
+ * Added [w.alerts_v2](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/alerts_v2.html) workspace-level service.
38
+ * Added `update_ncc_azure_private_endpoint_rule_public()` method for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service.
39
+ * Added `update_endpoint_budget_policy()` and `update_endpoint_custom_tags()` methods for [w.vector_search_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_endpoints.html) workspace-level service.
40
+ * Added `created_at`, `created_by` and `metastore_id` fields for `databricks.sdk.service.catalog.SetArtifactAllowlist`.
41
+ * Added `node_type_flexibility` field for `databricks.sdk.service.compute.EditInstancePool`.
42
+ * Added `page_size` and `page_token` fields for `databricks.sdk.service.compute.GetEvents`.
43
+ * Added `next_page_token` and `prev_page_token` fields for `databricks.sdk.service.compute.GetEventsResponse`.
44
+ * Added `node_type_flexibility` field for `databricks.sdk.service.compute.GetInstancePool`.
45
+ * Added `node_type_flexibility` field for `databricks.sdk.service.compute.InstancePoolAndStats`.
46
+ * Added `effective_performance_target` field for `databricks.sdk.service.jobs.RepairHistoryItem`.
47
+ * Added `performance_target` field for `databricks.sdk.service.jobs.RepairRun`.
48
+ * [Breaking] Added `network_connectivity_config` field for `databricks.sdk.service.settings.CreateNetworkConnectivityConfigRequest`.
49
+ * [Breaking] Added `private_endpoint_rule` field for `databricks.sdk.service.settings.CreatePrivateEndpointRuleRequest`.
50
+ * Added `domain_names` field for `databricks.sdk.service.settings.NccAzurePrivateEndpointRule`.
51
+ * Added `auto_resolve_display_name` field for `databricks.sdk.service.sql.CreateAlertRequest`.
52
+ * Added `auto_resolve_display_name` field for `databricks.sdk.service.sql.CreateQueryRequest`.
53
+ * Added `budget_policy_id` field for `databricks.sdk.service.vectorsearch.CreateEndpoint`.
54
+ * Added `custom_tags` and `effective_budget_policy_id` fields for `databricks.sdk.service.vectorsearch.EndpointInfo`.
55
+ * Added `create_clean_room`, `execute_clean_room_task` and `modify_clean_room` enum values for `databricks.sdk.service.catalog.Privilege`.
56
+ * Added `dns_resolution_error` and `gcp_denied_by_org_policy` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
57
+ * Added `disabled` enum value for `databricks.sdk.service.jobs.TerminationCodeCode`.
58
+ * Added `expired` enum value for `databricks.sdk.service.settings.NccAzurePrivateEndpointRuleConnectionState`.
59
+ * [Breaking] Changed `create_network_connectivity_configuration()` and `create_private_endpoint_rule()` methods for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service with new required argument order.
60
+ * [Breaking] Changed `create_index()` method for [w.vector_search_indexes](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_indexes.html) workspace-level service to return `databricks.sdk.service.vectorsearch.VectorIndex` dataclass.
61
+ * [Breaking] Changed `delete_data_vector_index()` method for [w.vector_search_indexes](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_indexes.html) workspace-level service . HTTP method/verb has changed.
62
+ * [Breaking] Changed `delete_data_vector_index()` method for [w.vector_search_indexes](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_indexes.html) workspace-level service with new required argument order.
63
+ * [Breaking] Changed `databricks.sdk.service.vectorsearch.List` dataclass to.
64
+ * [Breaking] Changed `workload_size` field for `databricks.sdk.service.serving.ServedModelInput` to type `str` dataclass.
65
+ * [Breaking] Changed `group_id` field for `databricks.sdk.service.settings.NccAzurePrivateEndpointRule` to type `str` dataclass.
66
+ * [Breaking] Changed `target_services` field for `databricks.sdk.service.settings.NccAzureServiceEndpointRule` to type `databricks.sdk.service.settings.EgressResourceTypeList` dataclass.
67
+ * [Breaking] Changed `data_array` field for `databricks.sdk.service.vectorsearch.ResultData` to type `databricks.sdk.service.vectorsearch.ListValueList` dataclass.
68
+ * [Breaking] Changed waiter for [VectorSearchEndpointsAPI.create_endpoint](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_endpoints.html#databricks.sdk.service.vectorsearch.VectorSearchEndpointsAPI.create_endpoint) method.
69
+ * [Breaking] Removed `name` and `region` fields for `databricks.sdk.service.settings.CreateNetworkConnectivityConfigRequest`.
70
+ * [Breaking] Removed `group_id` and `resource_id` fields for `databricks.sdk.service.settings.CreatePrivateEndpointRuleRequest`.
71
+ * [Breaking] Removed `null_value` field for `databricks.sdk.service.vectorsearch.Value`.
72
+ * [Breaking] Removed `large`, `medium` and `small` enum values for `databricks.sdk.service.serving.ServedModelInputWorkloadSize`.
73
+ * [Breaking] Removed `blob`, `dfs`, `mysql_server` and `sql_server` enum values for `databricks.sdk.service.settings.NccAzurePrivateEndpointRuleGroupId`.
74
+
75
+
3
76
  ## Release v0.50.0
4
77
 
5
78
  ### API Changes
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-sdk
3
- Version: 0.50.0
3
+ Version: 0.52.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Project-URL: Documentation, https://databricks-sdk-py.readthedocs.io
6
6
  Keywords: databricks,sdk
@@ -180,18 +180,18 @@ Depending on the Databricks authentication method, the SDK uses the following in
180
180
 
181
181
  ### Databricks native authentication
182
182
 
183
- By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Databricks basic (username/password) authentication (`auth_type="basic"` argument).
183
+ By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Databricks Workload Identity Federation (WIF) authentication using OIDC (`auth_type="github-oidc"` argument).
184
184
 
185
185
  - For Databricks token authentication, you must provide `host` and `token`; or their environment variable or `.databrickscfg` file field equivalents.
186
- - For Databricks basic authentication, you must provide `host`, `username`, and `password` _(for AWS workspace-level operations)_; or `host`, `account_id`, `username`, and `password` _(for AWS, Azure, or GCP account-level operations)_; or their environment variable or `.databrickscfg` file field equivalents.
187
-
188
- | Argument | Description | Environment variable |
189
- |--------------|-------------|-------------------|
190
- | `host` | _(String)_ The Databricks host URL for either the Databricks workspace endpoint or the Databricks accounts endpoint. | `DATABRICKS_HOST` |
191
- | `account_id` | _(String)_ The Databricks account ID for the Databricks accounts endpoint. Only has effect when `Host` is either `https://accounts.cloud.databricks.com/` _(AWS)_, `https://accounts.azuredatabricks.net/` _(Azure)_, or `https://accounts.gcp.databricks.com/` _(GCP)_. | `DATABRICKS_ACCOUNT_ID` |
192
- | `token` | _(String)_ The Databricks personal access token (PAT) _(AWS, Azure, and GCP)_ or Azure Active Directory (Azure AD) token _(Azure)_. | `DATABRICKS_TOKEN` |
193
- | `username` | _(String)_ The Databricks username part of basic authentication. Only possible when `Host` is `*.cloud.databricks.com` _(AWS)_. | `DATABRICKS_USERNAME` |
194
- | `password` | _(String)_ The Databricks password part of basic authentication. Only possible when `Host` is `*.cloud.databricks.com` _(AWS)_. | `DATABRICKS_PASSWORD` |
186
+ - For Databricks OIDC authentication, you must provide the `host`, `client_id` and `token_audience` _(optional)_ either directly, through the corresponding environment variables, or in your `.databrickscfg` configuration file.
187
+
188
+ | Argument | Description | Environment variable |
189
+ |------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------|
190
+ | `host` | _(String)_ The Databricks host URL for either the Databricks workspace endpoint or the Databricks accounts endpoint. | `DATABRICKS_HOST` |
191
+ | `account_id` | _(String)_ The Databricks account ID for the Databricks accounts endpoint. Only has effect when `Host` is either `https://accounts.cloud.databricks.com/` _(AWS)_, `https://accounts.azuredatabricks.net/` _(Azure)_, or `https://accounts.gcp.databricks.com/` _(GCP)_. | `DATABRICKS_ACCOUNT_ID` |
192
+ | `token` | _(String)_ The Databricks personal access token (PAT) _(AWS, Azure, and GCP)_ or Azure Active Directory (Azure AD) token _(Azure)_. | `DATABRICKS_TOKEN` |
193
+ | `client_id` | _(String)_ The Databricks Service Principal Application ID. | `DATABRICKS_CLIENT_ID` |
194
+ | `token_audience` | _(String)_ When using Workload Identity Federation, the audience to specify when fetching an ID token from the ID token supplier. | `TOKEN_AUDIENCE` |
195
195
 
196
196
  For example, to use Databricks token authentication:
197
197
 
@@ -126,18 +126,18 @@ Depending on the Databricks authentication method, the SDK uses the following in
126
126
 
127
127
  ### Databricks native authentication
128
128
 
129
- By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Databricks basic (username/password) authentication (`auth_type="basic"` argument).
129
+ By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Databricks Workload Identity Federation (WIF) authentication using OIDC (`auth_type="github-oidc"` argument).
130
130
 
131
131
  - For Databricks token authentication, you must provide `host` and `token`; or their environment variable or `.databrickscfg` file field equivalents.
132
- - For Databricks basic authentication, you must provide `host`, `username`, and `password` _(for AWS workspace-level operations)_; or `host`, `account_id`, `username`, and `password` _(for AWS, Azure, or GCP account-level operations)_; or their environment variable or `.databrickscfg` file field equivalents.
133
-
134
- | Argument | Description | Environment variable |
135
- |--------------|-------------|-------------------|
136
- | `host` | _(String)_ The Databricks host URL for either the Databricks workspace endpoint or the Databricks accounts endpoint. | `DATABRICKS_HOST` |
137
- | `account_id` | _(String)_ The Databricks account ID for the Databricks accounts endpoint. Only has effect when `Host` is either `https://accounts.cloud.databricks.com/` _(AWS)_, `https://accounts.azuredatabricks.net/` _(Azure)_, or `https://accounts.gcp.databricks.com/` _(GCP)_. | `DATABRICKS_ACCOUNT_ID` |
138
- | `token` | _(String)_ The Databricks personal access token (PAT) _(AWS, Azure, and GCP)_ or Azure Active Directory (Azure AD) token _(Azure)_. | `DATABRICKS_TOKEN` |
139
- | `username` | _(String)_ The Databricks username part of basic authentication. Only possible when `Host` is `*.cloud.databricks.com` _(AWS)_. | `DATABRICKS_USERNAME` |
140
- | `password` | _(String)_ The Databricks password part of basic authentication. Only possible when `Host` is `*.cloud.databricks.com` _(AWS)_. | `DATABRICKS_PASSWORD` |
132
+ - For Databricks OIDC authentication, you must provide the `host`, `client_id` and `token_audience` _(optional)_ either directly, through the corresponding environment variables, or in your `.databrickscfg` configuration file.
133
+
134
+ | Argument | Description | Environment variable |
135
+ |------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------|
136
+ | `host` | _(String)_ The Databricks host URL for either the Databricks workspace endpoint or the Databricks accounts endpoint. | `DATABRICKS_HOST` |
137
+ | `account_id` | _(String)_ The Databricks account ID for the Databricks accounts endpoint. Only has effect when `Host` is either `https://accounts.cloud.databricks.com/` _(AWS)_, `https://accounts.azuredatabricks.net/` _(Azure)_, or `https://accounts.gcp.databricks.com/` _(GCP)_. | `DATABRICKS_ACCOUNT_ID` |
138
+ | `token` | _(String)_ The Databricks personal access token (PAT) _(AWS, Azure, and GCP)_ or Azure Active Directory (Azure AD) token _(Azure)_. | `DATABRICKS_TOKEN` |
139
+ | `client_id` | _(String)_ The Databricks Service Principal Application ID. | `DATABRICKS_CLIENT_ID` |
140
+ | `token_audience` | _(String)_ When using Workload Identity Federation, the audience to specify when fetching an ID token from the ID token supplier. | `TOKEN_AUDIENCE` |
141
141
 
142
142
  For example, to use Databricks token authentication:
143
143
 
@@ -97,10 +97,10 @@ from databricks.sdk.service.sharing import (ProvidersAPI,
97
97
  RecipientActivationAPI,
98
98
  RecipientsAPI, SharesAPI)
99
99
  from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI,
100
- DashboardsAPI, DashboardWidgetsAPI,
101
- DataSourcesAPI, DbsqlPermissionsAPI,
102
- QueriesAPI, QueriesLegacyAPI,
103
- QueryHistoryAPI,
100
+ AlertsV2API, DashboardsAPI,
101
+ DashboardWidgetsAPI, DataSourcesAPI,
102
+ DbsqlPermissionsAPI, QueriesAPI,
103
+ QueriesLegacyAPI, QueryHistoryAPI,
104
104
  QueryVisualizationsAPI,
105
105
  QueryVisualizationsLegacyAPI,
106
106
  RedashConfigAPI, StatementExecutionAPI,
@@ -170,6 +170,7 @@ class WorkspaceClient:
170
170
  product_version="0.0.0",
171
171
  credentials_strategy: Optional[CredentialsStrategy] = None,
172
172
  credentials_provider: Optional[CredentialsStrategy] = None,
173
+ token_audience: Optional[str] = None,
173
174
  config: Optional[client.Config] = None,
174
175
  ):
175
176
  if not config:
@@ -198,6 +199,7 @@ class WorkspaceClient:
198
199
  debug_headers=debug_headers,
199
200
  product=product,
200
201
  product_version=product_version,
202
+ token_audience=token_audience,
201
203
  )
202
204
  self._config = config.copy()
203
205
  self._dbutils = _make_dbutils(self._config)
@@ -207,6 +209,7 @@ class WorkspaceClient:
207
209
  self._account_access_control_proxy = service.iam.AccountAccessControlProxyAPI(self._api_client)
208
210
  self._alerts = service.sql.AlertsAPI(self._api_client)
209
211
  self._alerts_legacy = service.sql.AlertsLegacyAPI(self._api_client)
212
+ self._alerts_v2 = service.sql.AlertsV2API(self._api_client)
210
213
  self._apps = service.apps.AppsAPI(self._api_client)
211
214
  self._artifact_allowlists = service.catalog.ArtifactAllowlistsAPI(self._api_client)
212
215
  self._catalogs = service.catalog.CatalogsAPI(self._api_client)
@@ -289,7 +292,7 @@ class WorkspaceClient:
289
292
  self._service_principals = service.iam.ServicePrincipalsAPI(self._api_client)
290
293
  self._serving_endpoints = serving_endpoints
291
294
  serving_endpoints_data_plane_token_source = DataPlaneTokenSource(
292
- self._config.host, self._config.oauth_token, not self._config.enable_experimental_async_token_refresh
295
+ self._config.host, self._config.oauth_token, self._config.disable_async_token_refresh
293
296
  )
294
297
  self._serving_endpoints_data_plane = service.serving.ServingEndpointsDataPlaneAPI(
295
298
  self._api_client, serving_endpoints, serving_endpoints_data_plane_token_source
@@ -346,6 +349,11 @@ class WorkspaceClient:
346
349
  """The alerts API can be used to perform CRUD operations on alerts."""
347
350
  return self._alerts_legacy
348
351
 
352
+ @property
353
+ def alerts_v2(self) -> service.sql.AlertsV2API:
354
+ """TODO: Add description."""
355
+ return self._alerts_v2
356
+
349
357
  @property
350
358
  def apps(self) -> service.apps.AppsAPI:
351
359
  """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
@@ -862,6 +870,7 @@ class AccountClient:
862
870
  product_version="0.0.0",
863
871
  credentials_strategy: Optional[CredentialsStrategy] = None,
864
872
  credentials_provider: Optional[CredentialsStrategy] = None,
873
+ token_audience: Optional[str] = None,
865
874
  config: Optional[client.Config] = None,
866
875
  ):
867
876
  if not config:
@@ -890,6 +899,7 @@ class AccountClient:
890
899
  debug_headers=debug_headers,
891
900
  product=product,
892
901
  product_version=product_version,
902
+ token_audience=token_audience,
893
903
  )
894
904
  self._config = config.copy()
895
905
  self._api_client = client.ApiClient(self._config)
@@ -61,6 +61,7 @@ class Config:
61
61
  host: str = ConfigAttribute(env="DATABRICKS_HOST")
62
62
  account_id: str = ConfigAttribute(env="DATABRICKS_ACCOUNT_ID")
63
63
  token: str = ConfigAttribute(env="DATABRICKS_TOKEN", auth="pat", sensitive=True)
64
+ token_audience: str = ConfigAttribute(env="DATABRICKS_TOKEN_AUDIENCE", auth="github-oidc")
64
65
  username: str = ConfigAttribute(env="DATABRICKS_USERNAME", auth="basic")
65
66
  password: str = ConfigAttribute(env="DATABRICKS_PASSWORD", auth="basic", sensitive=True)
66
67
  client_id: str = ConfigAttribute(env="DATABRICKS_CLIENT_ID", auth="oauth")
@@ -95,9 +96,7 @@ class Config:
95
96
  max_connections_per_pool: int = ConfigAttribute()
96
97
  databricks_environment: Optional[DatabricksEnvironment] = None
97
98
 
98
- enable_experimental_async_token_refresh: bool = ConfigAttribute(
99
- env="DATABRICKS_ENABLE_EXPERIMENTAL_ASYNC_TOKEN_REFRESH"
100
- )
99
+ disable_async_token_refresh: bool = ConfigAttribute(env="DATABRICKS_DISABLE_ASYNC_TOKEN_REFRESH")
101
100
 
102
101
  enable_experimental_files_api_client: bool = ConfigAttribute(env="DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT")
103
102
  files_api_client_download_max_total_recovers = None
@@ -23,6 +23,7 @@ from google.oauth2 import service_account # type: ignore
23
23
  from .azure import add_sp_management_token, add_workspace_id_header
24
24
  from .oauth import (ClientCredentials, OAuthClient, Refreshable, Token,
25
25
  TokenCache, TokenSource)
26
+ from .oidc_token_supplier import GitHubOIDCTokenSupplier
26
27
 
27
28
  CredentialsProvider = Callable[[], Dict[str, str]]
28
29
 
@@ -191,7 +192,7 @@ def oauth_service_principal(cfg: "Config") -> Optional[CredentialsProvider]:
191
192
  token_url=oidc.token_endpoint,
192
193
  scopes=["all-apis"],
193
194
  use_header=True,
194
- disable_async=not cfg.enable_experimental_async_token_refresh,
195
+ disable_async=cfg.disable_async_token_refresh,
195
196
  )
196
197
 
197
198
  def inner() -> Dict[str, str]:
@@ -291,7 +292,7 @@ def azure_service_principal(cfg: "Config") -> CredentialsProvider:
291
292
  token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
292
293
  endpoint_params={"resource": resource},
293
294
  use_params=True,
294
- disable_async=not cfg.enable_experimental_async_token_refresh,
295
+ disable_async=cfg.disable_async_token_refresh,
295
296
  )
296
297
 
297
298
  _ensure_host_present(cfg, token_source_for)
@@ -314,6 +315,58 @@ def azure_service_principal(cfg: "Config") -> CredentialsProvider:
314
315
  return OAuthCredentialsProvider(refreshed_headers, token)
315
316
 
316
317
 
318
+ @oauth_credentials_strategy("github-oidc", ["host", "client_id"])
319
+ def databricks_wif(cfg: "Config") -> Optional[CredentialsProvider]:
320
+ """
321
+ DatabricksWIFCredentials uses a Token Supplier to get a JWT Token and exchanges
322
+ it for a Databricks Token.
323
+
324
+ Supported suppliers:
325
+ - GitHub OIDC
326
+ """
327
+ supplier = GitHubOIDCTokenSupplier()
328
+
329
+ audience = cfg.token_audience
330
+ if audience is None and cfg.is_account_client:
331
+ audience = cfg.account_id
332
+ if audience is None and not cfg.is_account_client:
333
+ audience = cfg.oidc_endpoints.token_endpoint
334
+
335
+ # Try to get an idToken. If no supplier returns a token, we cannot use this authentication mode.
336
+ id_token = supplier.get_oidc_token(audience)
337
+ if not id_token:
338
+ return None
339
+
340
+ def token_source_for(audience: str) -> TokenSource:
341
+ id_token = supplier.get_oidc_token(audience)
342
+ if not id_token:
343
+ # Should not happen, since we checked it above.
344
+ raise Exception("Cannot get OIDC token")
345
+ params = {
346
+ "subject_token_type": "urn:ietf:params:oauth:token-type:jwt",
347
+ "subject_token": id_token,
348
+ "grant_type": "urn:ietf:params:oauth:grant-type:token-exchange",
349
+ }
350
+ return ClientCredentials(
351
+ client_id=cfg.client_id,
352
+ client_secret="", # we have no (rotatable) secrets in OIDC flow
353
+ token_url=cfg.oidc_endpoints.token_endpoint,
354
+ endpoint_params=params,
355
+ scopes=["all-apis"],
356
+ use_params=True,
357
+ disable_async=cfg.disable_async_token_refresh,
358
+ )
359
+
360
+ def refreshed_headers() -> Dict[str, str]:
361
+ token = token_source_for(audience).token()
362
+ return {"Authorization": f"{token.token_type} {token.access_token}"}
363
+
364
+ def token() -> Token:
365
+ return token_source_for(audience).token()
366
+
367
+ return OAuthCredentialsProvider(refreshed_headers, token)
368
+
369
+
317
370
  @oauth_credentials_strategy("github-oidc-azure", ["host", "azure_client_id"])
318
371
  def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
319
372
  if "ACTIONS_ID_TOKEN_REQUEST_TOKEN" not in os.environ:
@@ -325,16 +378,8 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
325
378
  if not cfg.is_azure:
326
379
  return None
327
380
 
328
- # See https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-cloud-providers
329
- headers = {"Authorization": f"Bearer {os.environ['ACTIONS_ID_TOKEN_REQUEST_TOKEN']}"}
330
- endpoint = f"{os.environ['ACTIONS_ID_TOKEN_REQUEST_URL']}&audience=api://AzureADTokenExchange"
331
- response = requests.get(endpoint, headers=headers)
332
- if not response.ok:
333
- return None
334
-
335
- # get the ID Token with aud=api://AzureADTokenExchange sub=repo:org/repo:environment:name
336
- response_json = response.json()
337
- if "value" not in response_json:
381
+ token = GitHubOIDCTokenSupplier().get_oidc_token("api://AzureADTokenExchange")
382
+ if not token:
338
383
  return None
339
384
 
340
385
  logger.info(
@@ -344,7 +389,7 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
344
389
  params = {
345
390
  "client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
346
391
  "resource": cfg.effective_azure_login_app_id,
347
- "client_assertion": response_json["value"],
392
+ "client_assertion": token,
348
393
  }
349
394
  aad_endpoint = cfg.arm_environment.active_directory_endpoint
350
395
  if not cfg.azure_tenant_id:
@@ -357,7 +402,7 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
357
402
  token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
358
403
  endpoint_params=params,
359
404
  use_params=True,
360
- disable_async=not cfg.enable_experimental_async_token_refresh,
405
+ disable_async=cfg.disable_async_token_refresh,
361
406
  )
362
407
 
363
408
  def refreshed_headers() -> Dict[str, str]:
@@ -694,7 +739,7 @@ class DatabricksCliTokenSource(CliTokenSource):
694
739
  token_type_field="token_type",
695
740
  access_token_field="access_token",
696
741
  expiry_field="expiry",
697
- disable_async=not cfg.enable_experimental_async_token_refresh,
742
+ disable_async=cfg.disable_async_token_refresh,
698
743
  )
699
744
 
700
745
  @staticmethod
@@ -927,6 +972,7 @@ class DefaultCredentials:
927
972
  basic_auth,
928
973
  metadata_service,
929
974
  oauth_service_principal,
975
+ databricks_wif,
930
976
  azure_service_principal,
931
977
  github_oidc_azure,
932
978
  azure_cli,
@@ -0,0 +1,28 @@
1
+ import os
2
+ from typing import Optional
3
+
4
+ import requests
5
+
6
+
7
+ class GitHubOIDCTokenSupplier:
8
+ """
9
+ Supplies OIDC tokens from GitHub Actions.
10
+ """
11
+
12
+ def get_oidc_token(self, audience: str) -> Optional[str]:
13
+ if "ACTIONS_ID_TOKEN_REQUEST_TOKEN" not in os.environ or "ACTIONS_ID_TOKEN_REQUEST_URL" not in os.environ:
14
+ # not in GitHub actions
15
+ return None
16
+ # See https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-cloud-providers
17
+ headers = {"Authorization": f"Bearer {os.environ['ACTIONS_ID_TOKEN_REQUEST_TOKEN']}"}
18
+ endpoint = f"{os.environ['ACTIONS_ID_TOKEN_REQUEST_URL']}&audience={audience}"
19
+ response = requests.get(endpoint, headers=headers)
20
+ if not response.ok:
21
+ return None
22
+
23
+ # get the ID Token with aud=api://AzureADTokenExchange sub=repo:org/repo:environment:name
24
+ response_json = response.json()
25
+ if "value" not in response_json:
26
+ return None
27
+
28
+ return response_json["value"]
@@ -1173,12 +1173,12 @@ class AppsAPI:
1173
1173
  attempt += 1
1174
1174
  raise TimeoutError(f"timed out after {timeout}: {status_message}")
1175
1175
 
1176
- def create(self, *, app: Optional[App] = None, no_compute: Optional[bool] = None) -> Wait[App]:
1176
+ def create(self, app: App, *, no_compute: Optional[bool] = None) -> Wait[App]:
1177
1177
  """Create an app.
1178
1178
 
1179
1179
  Creates a new app.
1180
1180
 
1181
- :param app: :class:`App` (optional)
1181
+ :param app: :class:`App`
1182
1182
  :param no_compute: bool (optional)
1183
1183
  If true, the app will not be started after creation.
1184
1184
 
@@ -1198,9 +1198,7 @@ class AppsAPI:
1198
1198
  op_response = self._api.do("POST", "/api/2.0/apps", query=query, body=body, headers=headers)
1199
1199
  return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response["name"])
1200
1200
 
1201
- def create_and_wait(
1202
- self, *, app: Optional[App] = None, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)
1203
- ) -> App:
1201
+ def create_and_wait(self, app: App, *, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)) -> App:
1204
1202
  return self.create(app=app, no_compute=no_compute).result(timeout=timeout)
1205
1203
 
1206
1204
  def delete(self, name: str) -> App:
@@ -1221,14 +1219,14 @@ class AppsAPI:
1221
1219
  res = self._api.do("DELETE", f"/api/2.0/apps/{name}", headers=headers)
1222
1220
  return App.from_dict(res)
1223
1221
 
1224
- def deploy(self, app_name: str, *, app_deployment: Optional[AppDeployment] = None) -> Wait[AppDeployment]:
1222
+ def deploy(self, app_name: str, app_deployment: AppDeployment) -> Wait[AppDeployment]:
1225
1223
  """Create an app deployment.
1226
1224
 
1227
1225
  Creates an app deployment for the app with the supplied name.
1228
1226
 
1229
1227
  :param app_name: str
1230
1228
  The name of the app.
1231
- :param app_deployment: :class:`AppDeployment` (optional)
1229
+ :param app_deployment: :class:`AppDeployment`
1232
1230
 
1233
1231
  :returns:
1234
1232
  Long-running operation waiter for :class:`AppDeployment`.
@@ -1249,7 +1247,7 @@ class AppsAPI:
1249
1247
  )
1250
1248
 
1251
1249
  def deploy_and_wait(
1252
- self, app_name: str, *, app_deployment: Optional[AppDeployment] = None, timeout=timedelta(minutes=20)
1250
+ self, app_name: str, app_deployment: AppDeployment, timeout=timedelta(minutes=20)
1253
1251
  ) -> AppDeployment:
1254
1252
  return self.deploy(app_deployment=app_deployment, app_name=app_name).result(timeout=timeout)
1255
1253
 
@@ -1466,7 +1464,7 @@ class AppsAPI:
1466
1464
  def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
1467
1465
  return self.stop(name=name).result(timeout=timeout)
1468
1466
 
1469
- def update(self, name: str, *, app: Optional[App] = None) -> App:
1467
+ def update(self, name: str, app: App) -> App:
1470
1468
  """Update an app.
1471
1469
 
1472
1470
  Updates the app with the supplied name.
@@ -1474,7 +1472,7 @@ class AppsAPI:
1474
1472
  :param name: str
1475
1473
  The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
1476
1474
  must be unique within the workspace.
1477
- :param app: :class:`App` (optional)
1475
+ :param app: :class:`App`
1478
1476
 
1479
1477
  :returns: :class:`App`
1480
1478
  """
@@ -1873,7 +1873,7 @@ class BudgetPolicyAPI:
1873
1873
  query["page_token"] = json["next_page_token"]
1874
1874
 
1875
1875
  def update(
1876
- self, policy_id: str, *, limit_config: Optional[LimitConfig] = None, policy: Optional[BudgetPolicy] = None
1876
+ self, policy_id: str, policy: BudgetPolicy, *, limit_config: Optional[LimitConfig] = None
1877
1877
  ) -> BudgetPolicy:
1878
1878
  """Update a budget policy.
1879
1879
 
@@ -1881,10 +1881,10 @@ class BudgetPolicyAPI:
1881
1881
 
1882
1882
  :param policy_id: str
1883
1883
  The Id of the policy. This field is generated by Databricks and globally unique.
1884
+ :param policy: :class:`BudgetPolicy`
1885
+ Contains the BudgetPolicy details.
1884
1886
  :param limit_config: :class:`LimitConfig` (optional)
1885
1887
  DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy
1886
- :param policy: :class:`BudgetPolicy` (optional)
1887
- Contains the BudgetPolicy details.
1888
1888
 
1889
1889
  :returns: :class:`BudgetPolicy`
1890
1890
  """