databricks-sdk 0.49.0__tar.gz → 0.51.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (83) hide show
  1. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/CHANGELOG.md +82 -0
  2. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/PKG-INFO +11 -11
  3. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/README.md +10 -10
  4. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/__init__.py +20 -8
  5. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/config.py +2 -3
  6. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/credentials_provider.py +61 -15
  7. databricks_sdk-0.51.0/databricks/sdk/oidc_token_supplier.py +28 -0
  8. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/apps.py +8 -10
  9. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/billing.py +12 -3
  10. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/catalog.py +73 -4
  11. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/cleanrooms.py +9 -14
  12. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/compute.py +151 -7
  13. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/dashboards.py +253 -42
  14. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/jobs.py +602 -83
  15. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/ml.py +408 -72
  16. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/oauth2.py +8 -13
  17. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/pipelines.py +0 -32
  18. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/serving.py +26 -26
  19. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/settings.py +670 -113
  20. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/sql.py +881 -6
  21. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/vectorsearch.py +355 -159
  22. databricks_sdk-0.51.0/databricks/sdk/version.py +1 -0
  23. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks_sdk.egg-info/PKG-INFO +11 -11
  24. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks_sdk.egg-info/SOURCES.txt +1 -0
  25. databricks_sdk-0.49.0/databricks/sdk/version.py +0 -1
  26. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/CONTRIBUTING.md +0 -0
  27. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/DCO +0 -0
  28. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/LICENSE +0 -0
  29. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/MANIFEST.in +0 -0
  30. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/Makefile +0 -0
  31. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/NOTICE +0 -0
  32. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/SECURITY.md +0 -0
  33. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/__init__.py +0 -0
  34. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/_base_client.py +0 -0
  35. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/_property.py +0 -0
  36. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/_widgets/__init__.py +0 -0
  37. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
  38. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
  39. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/azure.py +0 -0
  40. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/casing.py +0 -0
  41. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/clock.py +0 -0
  42. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/core.py +0 -0
  43. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/data_plane.py +0 -0
  44. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/dbutils.py +0 -0
  45. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/environments.py +0 -0
  46. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/__init__.py +0 -0
  47. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/base.py +0 -0
  48. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/customizer.py +0 -0
  49. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/deserializer.py +0 -0
  50. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/details.py +0 -0
  51. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/mapper.py +0 -0
  52. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/overrides.py +0 -0
  53. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/parser.py +0 -0
  54. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/platform.py +0 -0
  55. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/private_link.py +0 -0
  56. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/errors/sdk.py +0 -0
  57. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/logger/__init__.py +0 -0
  58. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
  59. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/mixins/__init__.py +0 -0
  60. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/mixins/compute.py +0 -0
  61. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/mixins/files.py +0 -0
  62. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/mixins/jobs.py +0 -0
  63. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/mixins/open_ai_client.py +0 -0
  64. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/mixins/workspace.py +0 -0
  65. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/oauth.py +0 -0
  66. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/py.typed +0 -0
  67. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/retries.py +0 -0
  68. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/runtime/__init__.py +0 -0
  69. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
  70. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/__init__.py +0 -0
  71. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/_internal.py +0 -0
  72. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/files.py +0 -0
  73. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/iam.py +0 -0
  74. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/marketplace.py +0 -0
  75. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/provisioning.py +0 -0
  76. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/sharing.py +0 -0
  77. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/service/workspace.py +0 -0
  78. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks/sdk/useragent.py +0 -0
  79. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
  80. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks_sdk.egg-info/requires.txt +0 -0
  81. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/databricks_sdk.egg-info/top_level.txt +0 -0
  82. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/pyproject.toml +0 -0
  83. {databricks_sdk-0.49.0 → databricks_sdk-0.51.0}/setup.cfg +0 -0
@@ -1,5 +1,87 @@
1
1
  # Version changelog
2
2
 
3
+ ## Release v0.51.0
4
+
5
+ ### New Features and Improvements
6
+ * Enabled asynchronous token refreshes by default. A new `disable_async_token_refresh` configuration option has been added to allow disabling this feature if necessary ([#952](https://github.com/databricks/databricks-sdk-py/pull/952)).
7
+ To disable asynchronous token refresh, set the environment variable `DATABRICKS_DISABLE_ASYNC_TOKEN_REFRESH=true` or configure it within your configuration object.
8
+ The previous `enable_experimental_async_token_refresh` option has been removed as asynchronous refresh is now the default behavior.
9
+ * Introduce support for Databricks Workload Identity Federation in GitHub workflows ([933](https://github.com/databricks/databricks-sdk-py/pull/933)).
10
+ See README.md for instructions.
11
+ * [Breaking] Users running their workflows in GitHub Actions, which use Cloud native authentication and also have a `DATABRICKS_CLIENT_ID` and `DATABRICKS_HOST`
12
+ environment variables set may see their authentication start failing due to the order in which the SDK tries different authentication methods.
13
+
14
+ ### API Changes
15
+ * Added [w.alerts_v2](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/alerts_v2.html) workspace-level service.
16
+ * Added `update_ncc_azure_private_endpoint_rule_public()` method for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service.
17
+ * Added `update_endpoint_budget_policy()` and `update_endpoint_custom_tags()` methods for [w.vector_search_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_endpoints.html) workspace-level service.
18
+ * Added `created_at`, `created_by` and `metastore_id` fields for `databricks.sdk.service.catalog.SetArtifactAllowlist`.
19
+ * Added `node_type_flexibility` field for `databricks.sdk.service.compute.EditInstancePool`.
20
+ * Added `page_size` and `page_token` fields for `databricks.sdk.service.compute.GetEvents`.
21
+ * Added `next_page_token` and `prev_page_token` fields for `databricks.sdk.service.compute.GetEventsResponse`.
22
+ * Added `node_type_flexibility` field for `databricks.sdk.service.compute.GetInstancePool`.
23
+ * Added `node_type_flexibility` field for `databricks.sdk.service.compute.InstancePoolAndStats`.
24
+ * Added `effective_performance_target` field for `databricks.sdk.service.jobs.RepairHistoryItem`.
25
+ * Added `performance_target` field for `databricks.sdk.service.jobs.RepairRun`.
26
+ * [Breaking] Added `network_connectivity_config` field for `databricks.sdk.service.settings.CreateNetworkConnectivityConfigRequest`.
27
+ * [Breaking] Added `private_endpoint_rule` field for `databricks.sdk.service.settings.CreatePrivateEndpointRuleRequest`.
28
+ * Added `domain_names` field for `databricks.sdk.service.settings.NccAzurePrivateEndpointRule`.
29
+ * Added `auto_resolve_display_name` field for `databricks.sdk.service.sql.CreateAlertRequest`.
30
+ * Added `auto_resolve_display_name` field for `databricks.sdk.service.sql.CreateQueryRequest`.
31
+ * Added `budget_policy_id` field for `databricks.sdk.service.vectorsearch.CreateEndpoint`.
32
+ * Added `custom_tags` and `effective_budget_policy_id` fields for `databricks.sdk.service.vectorsearch.EndpointInfo`.
33
+ * Added `create_clean_room`, `execute_clean_room_task` and `modify_clean_room` enum values for `databricks.sdk.service.catalog.Privilege`.
34
+ * Added `dns_resolution_error` and `gcp_denied_by_org_policy` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
35
+ * Added `disabled` enum value for `databricks.sdk.service.jobs.TerminationCodeCode`.
36
+ * Added `expired` enum value for `databricks.sdk.service.settings.NccAzurePrivateEndpointRuleConnectionState`.
37
+ * [Breaking] Changed `create_network_connectivity_configuration()` and `create_private_endpoint_rule()` methods for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service with new required argument order.
38
+ * [Breaking] Changed `create_index()` method for [w.vector_search_indexes](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_indexes.html) workspace-level service to return `databricks.sdk.service.vectorsearch.VectorIndex` dataclass.
39
+ * [Breaking] Changed `delete_data_vector_index()` method for [w.vector_search_indexes](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_indexes.html) workspace-level service . HTTP method/verb has changed.
40
+ * [Breaking] Changed `delete_data_vector_index()` method for [w.vector_search_indexes](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_indexes.html) workspace-level service with new required argument order.
41
+ * [Breaking] Changed `databricks.sdk.service.vectorsearch.List` dataclass to.
42
+ * [Breaking] Changed `workload_size` field for `databricks.sdk.service.serving.ServedModelInput` to type `str` dataclass.
43
+ * [Breaking] Changed `group_id` field for `databricks.sdk.service.settings.NccAzurePrivateEndpointRule` to type `str` dataclass.
44
+ * [Breaking] Changed `target_services` field for `databricks.sdk.service.settings.NccAzureServiceEndpointRule` to type `databricks.sdk.service.settings.EgressResourceTypeList` dataclass.
45
+ * [Breaking] Changed `data_array` field for `databricks.sdk.service.vectorsearch.ResultData` to type `databricks.sdk.service.vectorsearch.ListValueList` dataclass.
46
+ * [Breaking] Changed waiter for [VectorSearchEndpointsAPI.create_endpoint](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_endpoints.html#databricks.sdk.service.vectorsearch.VectorSearchEndpointsAPI.create_endpoint) method.
47
+ * [Breaking] Removed `name` and `region` fields for `databricks.sdk.service.settings.CreateNetworkConnectivityConfigRequest`.
48
+ * [Breaking] Removed `group_id` and `resource_id` fields for `databricks.sdk.service.settings.CreatePrivateEndpointRuleRequest`.
49
+ * [Breaking] Removed `null_value` field for `databricks.sdk.service.vectorsearch.Value`.
50
+ * [Breaking] Removed `large`, `medium` and `small` enum values for `databricks.sdk.service.serving.ServedModelInputWorkloadSize`.
51
+ * [Breaking] Removed `blob`, `dfs`, `mysql_server` and `sql_server` enum values for `databricks.sdk.service.settings.NccAzurePrivateEndpointRuleGroupId`.
52
+
53
+
54
+ ## Release v0.50.0
55
+
56
+ ### API Changes
57
+ * Added [w.enable_export_notebook](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/settings/enable_export_notebook.html) workspace-level service, [w.enable_notebook_table_clipboard](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/settings/enable_notebook_table_clipboard.html) workspace-level service and [w.enable_results_downloading](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/settings/enable_results_downloading.html) workspace-level service.
58
+ * Added `get_credentials_for_trace_data_download()` and `get_credentials_for_trace_data_upload()` methods for [w.experiments](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/experiments.html) workspace-level service.
59
+ * Added `get_download_full_query_result()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html) workspace-level service.
60
+ * Added `get_published_dashboard_token_info()` method for [w.lakeview_embedded](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/lakeview_embedded.html) workspace-level service.
61
+ * Added `binding_workspace_ids` field for `databricks.sdk.service.billing.BudgetPolicy`.
62
+ * Added `download_id` field for `databricks.sdk.service.dashboards.GenieGenerateDownloadFullQueryResultResponse`.
63
+ * Added `dashboard_output` field for `databricks.sdk.service.jobs.RunOutput`.
64
+ * Added `dashboard_task` and `power_bi_task` fields for `databricks.sdk.service.jobs.RunTask`.
65
+ * Added `dashboard_task` and `power_bi_task` fields for `databricks.sdk.service.jobs.SubmitTask`.
66
+ * Added `dashboard_task` and `power_bi_task` fields for `databricks.sdk.service.jobs.Task`.
67
+ * Added `include_features` field for `databricks.sdk.service.ml.CreateForecastingExperimentRequest`.
68
+ * Added `models` field for `databricks.sdk.service.ml.LogInputs`.
69
+ * Added `dataset_digest`, `dataset_name` and `model_id` fields for `databricks.sdk.service.ml.LogMetric`.
70
+ * Added `dataset_digest`, `dataset_name`, `model_id` and `run_id` fields for `databricks.sdk.service.ml.Metric`.
71
+ * Added `model_inputs` field for `databricks.sdk.service.ml.RunInputs`.
72
+ * Added `client_application` field for `databricks.sdk.service.sql.QueryInfo`.
73
+ * Added `geography` and `geometry` enum values for `databricks.sdk.service.catalog.ColumnTypeName`.
74
+ * Added `allocation_timeout_no_healthy_and_warmed_up_clusters`, `docker_container_creation_exception`, `docker_image_too_large_for_instance_exception` and `docker_invalid_os_exception` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
75
+ * Added `standard` enum value for `databricks.sdk.service.jobs.PerformanceTarget`.
76
+ * Added `can_view` enum value for `databricks.sdk.service.sql.WarehousePermissionLevel`.
77
+ * [Breaking] Changed `generate_download_full_query_result()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html) workspace-level service . Method path has changed.
78
+ * [Breaking] Changed waiter for [CommandExecutionAPI.create](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/compute/command_execution.html#databricks.sdk.service.compute.CommandExecutionAPI.create) method.
79
+ * [Breaking] Changed waiter for [CommandExecutionAPI.execute](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/compute/command_execution.html#databricks.sdk.service.compute.CommandExecutionAPI.execute) method.
80
+ * [Breaking] Removed `error`, `status` and `transient_statement_id` fields for `databricks.sdk.service.dashboards.GenieGenerateDownloadFullQueryResultResponse`.
81
+ * [Breaking] Removed `balanced` and `cost_optimized` enum values for `databricks.sdk.service.jobs.PerformanceTarget`.
82
+ * [Breaking] Removed [PipelinesAPI.wait_get_pipeline_running](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/pipelines/pipelines.html#databricks.sdk.service.pipelines.PipelinesAPI.wait_get_pipeline_running) method.
83
+
84
+
3
85
  ## Release v0.49.0
4
86
 
5
87
  ### API Changes
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-sdk
3
- Version: 0.49.0
3
+ Version: 0.51.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Project-URL: Documentation, https://databricks-sdk-py.readthedocs.io
6
6
  Keywords: databricks,sdk
@@ -180,18 +180,18 @@ Depending on the Databricks authentication method, the SDK uses the following in
180
180
 
181
181
  ### Databricks native authentication
182
182
 
183
- By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Databricks basic (username/password) authentication (`auth_type="basic"` argument).
183
+ By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Databricks Workload Identity Federation (WIF) authentication using OIDC (`auth_type="github-oidc"` argument).
184
184
 
185
185
  - For Databricks token authentication, you must provide `host` and `token`; or their environment variable or `.databrickscfg` file field equivalents.
186
- - For Databricks basic authentication, you must provide `host`, `username`, and `password` _(for AWS workspace-level operations)_; or `host`, `account_id`, `username`, and `password` _(for AWS, Azure, or GCP account-level operations)_; or their environment variable or `.databrickscfg` file field equivalents.
187
-
188
- | Argument | Description | Environment variable |
189
- |--------------|-------------|-------------------|
190
- | `host` | _(String)_ The Databricks host URL for either the Databricks workspace endpoint or the Databricks accounts endpoint. | `DATABRICKS_HOST` |
191
- | `account_id` | _(String)_ The Databricks account ID for the Databricks accounts endpoint. Only has effect when `Host` is either `https://accounts.cloud.databricks.com/` _(AWS)_, `https://accounts.azuredatabricks.net/` _(Azure)_, or `https://accounts.gcp.databricks.com/` _(GCP)_. | `DATABRICKS_ACCOUNT_ID` |
192
- | `token` | _(String)_ The Databricks personal access token (PAT) _(AWS, Azure, and GCP)_ or Azure Active Directory (Azure AD) token _(Azure)_. | `DATABRICKS_TOKEN` |
193
- | `username` | _(String)_ The Databricks username part of basic authentication. Only possible when `Host` is `*.cloud.databricks.com` _(AWS)_. | `DATABRICKS_USERNAME` |
194
- | `password` | _(String)_ The Databricks password part of basic authentication. Only possible when `Host` is `*.cloud.databricks.com` _(AWS)_. | `DATABRICKS_PASSWORD` |
186
+ - For Databricks OIDC authentication, you must provide the `host`, `client_id` and `token_audience` _(optional)_ either directly, through the corresponding environment variables, or in your `.databrickscfg` configuration file.
187
+
188
+ | Argument | Description | Environment variable |
189
+ |------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------|
190
+ | `host` | _(String)_ The Databricks host URL for either the Databricks workspace endpoint or the Databricks accounts endpoint. | `DATABRICKS_HOST` |
191
+ | `account_id` | _(String)_ The Databricks account ID for the Databricks accounts endpoint. Only has effect when `Host` is either `https://accounts.cloud.databricks.com/` _(AWS)_, `https://accounts.azuredatabricks.net/` _(Azure)_, or `https://accounts.gcp.databricks.com/` _(GCP)_. | `DATABRICKS_ACCOUNT_ID` |
192
+ | `token` | _(String)_ The Databricks personal access token (PAT) _(AWS, Azure, and GCP)_ or Azure Active Directory (Azure AD) token _(Azure)_. | `DATABRICKS_TOKEN` |
193
+ | `client_id` | _(String)_ The Databricks Service Principal Application ID. | `DATABRICKS_CLIENT_ID` |
194
+ | `token_audience` | _(String)_ When using Workload Identity Federation, the audience to specify when fetching an ID token from the ID token supplier. | `TOKEN_AUDIENCE` |
195
195
 
196
196
  For example, to use Databricks token authentication:
197
197
 
@@ -126,18 +126,18 @@ Depending on the Databricks authentication method, the SDK uses the following in
126
126
 
127
127
  ### Databricks native authentication
128
128
 
129
- By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Databricks basic (username/password) authentication (`auth_type="basic"` argument).
129
+ By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Databricks Workload Identity Federation (WIF) authentication using OIDC (`auth_type="github-oidc"` argument).
130
130
 
131
131
  - For Databricks token authentication, you must provide `host` and `token`; or their environment variable or `.databrickscfg` file field equivalents.
132
- - For Databricks basic authentication, you must provide `host`, `username`, and `password` _(for AWS workspace-level operations)_; or `host`, `account_id`, `username`, and `password` _(for AWS, Azure, or GCP account-level operations)_; or their environment variable or `.databrickscfg` file field equivalents.
133
-
134
- | Argument | Description | Environment variable |
135
- |--------------|-------------|-------------------|
136
- | `host` | _(String)_ The Databricks host URL for either the Databricks workspace endpoint or the Databricks accounts endpoint. | `DATABRICKS_HOST` |
137
- | `account_id` | _(String)_ The Databricks account ID for the Databricks accounts endpoint. Only has effect when `Host` is either `https://accounts.cloud.databricks.com/` _(AWS)_, `https://accounts.azuredatabricks.net/` _(Azure)_, or `https://accounts.gcp.databricks.com/` _(GCP)_. | `DATABRICKS_ACCOUNT_ID` |
138
- | `token` | _(String)_ The Databricks personal access token (PAT) _(AWS, Azure, and GCP)_ or Azure Active Directory (Azure AD) token _(Azure)_. | `DATABRICKS_TOKEN` |
139
- | `username` | _(String)_ The Databricks username part of basic authentication. Only possible when `Host` is `*.cloud.databricks.com` _(AWS)_. | `DATABRICKS_USERNAME` |
140
- | `password` | _(String)_ The Databricks password part of basic authentication. Only possible when `Host` is `*.cloud.databricks.com` _(AWS)_. | `DATABRICKS_PASSWORD` |
132
+ - For Databricks OIDC authentication, you must provide the `host`, `client_id` and `token_audience` _(optional)_ either directly, through the corresponding environment variables, or in your `.databrickscfg` configuration file.
133
+
134
+ | Argument | Description | Environment variable |
135
+ |------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------|
136
+ | `host` | _(String)_ The Databricks host URL for either the Databricks workspace endpoint or the Databricks accounts endpoint. | `DATABRICKS_HOST` |
137
+ | `account_id` | _(String)_ The Databricks account ID for the Databricks accounts endpoint. Only has effect when `Host` is either `https://accounts.cloud.databricks.com/` _(AWS)_, `https://accounts.azuredatabricks.net/` _(Azure)_, or `https://accounts.gcp.databricks.com/` _(GCP)_. | `DATABRICKS_ACCOUNT_ID` |
138
+ | `token` | _(String)_ The Databricks personal access token (PAT) _(AWS, Azure, and GCP)_ or Azure Active Directory (Azure AD) token _(Azure)_. | `DATABRICKS_TOKEN` |
139
+ | `client_id` | _(String)_ The Databricks Service Principal Application ID. | `DATABRICKS_CLIENT_ID` |
140
+ | `token_audience` | _(String)_ When using Workload Identity Federation, the audience to specify when fetching an ID token from the ID token supplier. | `TOKEN_AUDIENCE` |
141
141
 
142
142
  For example, to use Databricks token authentication:
143
143
 
@@ -86,19 +86,21 @@ from databricks.sdk.service.settings import (
86
86
  AibiDashboardEmbeddingApprovedDomainsAPI, AutomaticClusterUpdateAPI,
87
87
  ComplianceSecurityProfileAPI, CredentialsManagerAPI,
88
88
  CspEnablementAccountAPI, DefaultNamespaceAPI, DisableLegacyAccessAPI,
89
- DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI, EnableIpAccessListsAPI,
90
- EnhancedSecurityMonitoringAPI, EsmEnablementAccountAPI, IpAccessListsAPI,
91
- NetworkConnectivityAPI, NotificationDestinationsAPI, PersonalComputeAPI,
89
+ DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI, EnableExportNotebookAPI,
90
+ EnableIpAccessListsAPI, EnableNotebookTableClipboardAPI,
91
+ EnableResultsDownloadingAPI, EnhancedSecurityMonitoringAPI,
92
+ EsmEnablementAccountAPI, IpAccessListsAPI, NetworkConnectivityAPI,
93
+ NotificationDestinationsAPI, PersonalComputeAPI,
92
94
  RestrictWorkspaceAdminsAPI, SettingsAPI, TokenManagementAPI, TokensAPI,
93
95
  WorkspaceConfAPI)
94
96
  from databricks.sdk.service.sharing import (ProvidersAPI,
95
97
  RecipientActivationAPI,
96
98
  RecipientsAPI, SharesAPI)
97
99
  from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI,
98
- DashboardsAPI, DashboardWidgetsAPI,
99
- DataSourcesAPI, DbsqlPermissionsAPI,
100
- QueriesAPI, QueriesLegacyAPI,
101
- QueryHistoryAPI,
100
+ AlertsV2API, DashboardsAPI,
101
+ DashboardWidgetsAPI, DataSourcesAPI,
102
+ DbsqlPermissionsAPI, QueriesAPI,
103
+ QueriesLegacyAPI, QueryHistoryAPI,
102
104
  QueryVisualizationsAPI,
103
105
  QueryVisualizationsLegacyAPI,
104
106
  RedashConfigAPI, StatementExecutionAPI,
@@ -168,6 +170,7 @@ class WorkspaceClient:
168
170
  product_version="0.0.0",
169
171
  credentials_strategy: Optional[CredentialsStrategy] = None,
170
172
  credentials_provider: Optional[CredentialsStrategy] = None,
173
+ token_audience: Optional[str] = None,
171
174
  config: Optional[client.Config] = None,
172
175
  ):
173
176
  if not config:
@@ -196,6 +199,7 @@ class WorkspaceClient:
196
199
  debug_headers=debug_headers,
197
200
  product=product,
198
201
  product_version=product_version,
202
+ token_audience=token_audience,
199
203
  )
200
204
  self._config = config.copy()
201
205
  self._dbutils = _make_dbutils(self._config)
@@ -205,6 +209,7 @@ class WorkspaceClient:
205
209
  self._account_access_control_proxy = service.iam.AccountAccessControlProxyAPI(self._api_client)
206
210
  self._alerts = service.sql.AlertsAPI(self._api_client)
207
211
  self._alerts_legacy = service.sql.AlertsLegacyAPI(self._api_client)
212
+ self._alerts_v2 = service.sql.AlertsV2API(self._api_client)
208
213
  self._apps = service.apps.AppsAPI(self._api_client)
209
214
  self._artifact_allowlists = service.catalog.ArtifactAllowlistsAPI(self._api_client)
210
215
  self._catalogs = service.catalog.CatalogsAPI(self._api_client)
@@ -287,7 +292,7 @@ class WorkspaceClient:
287
292
  self._service_principals = service.iam.ServicePrincipalsAPI(self._api_client)
288
293
  self._serving_endpoints = serving_endpoints
289
294
  serving_endpoints_data_plane_token_source = DataPlaneTokenSource(
290
- self._config.host, self._config.oauth_token, not self._config.enable_experimental_async_token_refresh
295
+ self._config.host, self._config.oauth_token, self._config.disable_async_token_refresh
291
296
  )
292
297
  self._serving_endpoints_data_plane = service.serving.ServingEndpointsDataPlaneAPI(
293
298
  self._api_client, serving_endpoints, serving_endpoints_data_plane_token_source
@@ -344,6 +349,11 @@ class WorkspaceClient:
344
349
  """The alerts API can be used to perform CRUD operations on alerts."""
345
350
  return self._alerts_legacy
346
351
 
352
+ @property
353
+ def alerts_v2(self) -> service.sql.AlertsV2API:
354
+ """TODO: Add description."""
355
+ return self._alerts_v2
356
+
347
357
  @property
348
358
  def apps(self) -> service.apps.AppsAPI:
349
359
  """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
@@ -860,6 +870,7 @@ class AccountClient:
860
870
  product_version="0.0.0",
861
871
  credentials_strategy: Optional[CredentialsStrategy] = None,
862
872
  credentials_provider: Optional[CredentialsStrategy] = None,
873
+ token_audience: Optional[str] = None,
863
874
  config: Optional[client.Config] = None,
864
875
  ):
865
876
  if not config:
@@ -888,6 +899,7 @@ class AccountClient:
888
899
  debug_headers=debug_headers,
889
900
  product=product,
890
901
  product_version=product_version,
902
+ token_audience=token_audience,
891
903
  )
892
904
  self._config = config.copy()
893
905
  self._api_client = client.ApiClient(self._config)
@@ -61,6 +61,7 @@ class Config:
61
61
  host: str = ConfigAttribute(env="DATABRICKS_HOST")
62
62
  account_id: str = ConfigAttribute(env="DATABRICKS_ACCOUNT_ID")
63
63
  token: str = ConfigAttribute(env="DATABRICKS_TOKEN", auth="pat", sensitive=True)
64
+ token_audience: str = ConfigAttribute(env="DATABRICKS_TOKEN_AUDIENCE", auth="github-oidc")
64
65
  username: str = ConfigAttribute(env="DATABRICKS_USERNAME", auth="basic")
65
66
  password: str = ConfigAttribute(env="DATABRICKS_PASSWORD", auth="basic", sensitive=True)
66
67
  client_id: str = ConfigAttribute(env="DATABRICKS_CLIENT_ID", auth="oauth")
@@ -95,9 +96,7 @@ class Config:
95
96
  max_connections_per_pool: int = ConfigAttribute()
96
97
  databricks_environment: Optional[DatabricksEnvironment] = None
97
98
 
98
- enable_experimental_async_token_refresh: bool = ConfigAttribute(
99
- env="DATABRICKS_ENABLE_EXPERIMENTAL_ASYNC_TOKEN_REFRESH"
100
- )
99
+ disable_async_token_refresh: bool = ConfigAttribute(env="DATABRICKS_DISABLE_ASYNC_TOKEN_REFRESH")
101
100
 
102
101
  enable_experimental_files_api_client: bool = ConfigAttribute(env="DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT")
103
102
  files_api_client_download_max_total_recovers = None
@@ -23,6 +23,7 @@ from google.oauth2 import service_account # type: ignore
23
23
  from .azure import add_sp_management_token, add_workspace_id_header
24
24
  from .oauth import (ClientCredentials, OAuthClient, Refreshable, Token,
25
25
  TokenCache, TokenSource)
26
+ from .oidc_token_supplier import GitHubOIDCTokenSupplier
26
27
 
27
28
  CredentialsProvider = Callable[[], Dict[str, str]]
28
29
 
@@ -191,7 +192,7 @@ def oauth_service_principal(cfg: "Config") -> Optional[CredentialsProvider]:
191
192
  token_url=oidc.token_endpoint,
192
193
  scopes=["all-apis"],
193
194
  use_header=True,
194
- disable_async=not cfg.enable_experimental_async_token_refresh,
195
+ disable_async=cfg.disable_async_token_refresh,
195
196
  )
196
197
 
197
198
  def inner() -> Dict[str, str]:
@@ -291,7 +292,7 @@ def azure_service_principal(cfg: "Config") -> CredentialsProvider:
291
292
  token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
292
293
  endpoint_params={"resource": resource},
293
294
  use_params=True,
294
- disable_async=not cfg.enable_experimental_async_token_refresh,
295
+ disable_async=cfg.disable_async_token_refresh,
295
296
  )
296
297
 
297
298
  _ensure_host_present(cfg, token_source_for)
@@ -314,6 +315,58 @@ def azure_service_principal(cfg: "Config") -> CredentialsProvider:
314
315
  return OAuthCredentialsProvider(refreshed_headers, token)
315
316
 
316
317
 
318
+ @oauth_credentials_strategy("github-oidc", ["host", "client_id"])
319
+ def databricks_wif(cfg: "Config") -> Optional[CredentialsProvider]:
320
+ """
321
+ DatabricksWIFCredentials uses a Token Supplier to get a JWT Token and exchanges
322
+ it for a Databricks Token.
323
+
324
+ Supported suppliers:
325
+ - GitHub OIDC
326
+ """
327
+ supplier = GitHubOIDCTokenSupplier()
328
+
329
+ audience = cfg.token_audience
330
+ if audience is None and cfg.is_account_client:
331
+ audience = cfg.account_id
332
+ if audience is None and not cfg.is_account_client:
333
+ audience = cfg.oidc_endpoints.token_endpoint
334
+
335
+ # Try to get an idToken. If no supplier returns a token, we cannot use this authentication mode.
336
+ id_token = supplier.get_oidc_token(audience)
337
+ if not id_token:
338
+ return None
339
+
340
+ def token_source_for(audience: str) -> TokenSource:
341
+ id_token = supplier.get_oidc_token(audience)
342
+ if not id_token:
343
+ # Should not happen, since we checked it above.
344
+ raise Exception("Cannot get OIDC token")
345
+ params = {
346
+ "subject_token_type": "urn:ietf:params:oauth:token-type:jwt",
347
+ "subject_token": id_token,
348
+ "grant_type": "urn:ietf:params:oauth:grant-type:token-exchange",
349
+ }
350
+ return ClientCredentials(
351
+ client_id=cfg.client_id,
352
+ client_secret="", # we have no (rotatable) secrets in OIDC flow
353
+ token_url=cfg.oidc_endpoints.token_endpoint,
354
+ endpoint_params=params,
355
+ scopes=["all-apis"],
356
+ use_params=True,
357
+ disable_async=cfg.disable_async_token_refresh,
358
+ )
359
+
360
+ def refreshed_headers() -> Dict[str, str]:
361
+ token = token_source_for(audience).token()
362
+ return {"Authorization": f"{token.token_type} {token.access_token}"}
363
+
364
+ def token() -> Token:
365
+ return token_source_for(audience).token()
366
+
367
+ return OAuthCredentialsProvider(refreshed_headers, token)
368
+
369
+
317
370
  @oauth_credentials_strategy("github-oidc-azure", ["host", "azure_client_id"])
318
371
  def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
319
372
  if "ACTIONS_ID_TOKEN_REQUEST_TOKEN" not in os.environ:
@@ -325,16 +378,8 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
325
378
  if not cfg.is_azure:
326
379
  return None
327
380
 
328
- # See https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-cloud-providers
329
- headers = {"Authorization": f"Bearer {os.environ['ACTIONS_ID_TOKEN_REQUEST_TOKEN']}"}
330
- endpoint = f"{os.environ['ACTIONS_ID_TOKEN_REQUEST_URL']}&audience=api://AzureADTokenExchange"
331
- response = requests.get(endpoint, headers=headers)
332
- if not response.ok:
333
- return None
334
-
335
- # get the ID Token with aud=api://AzureADTokenExchange sub=repo:org/repo:environment:name
336
- response_json = response.json()
337
- if "value" not in response_json:
381
+ token = GitHubOIDCTokenSupplier().get_oidc_token("api://AzureADTokenExchange")
382
+ if not token:
338
383
  return None
339
384
 
340
385
  logger.info(
@@ -344,7 +389,7 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
344
389
  params = {
345
390
  "client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
346
391
  "resource": cfg.effective_azure_login_app_id,
347
- "client_assertion": response_json["value"],
392
+ "client_assertion": token,
348
393
  }
349
394
  aad_endpoint = cfg.arm_environment.active_directory_endpoint
350
395
  if not cfg.azure_tenant_id:
@@ -357,7 +402,7 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
357
402
  token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
358
403
  endpoint_params=params,
359
404
  use_params=True,
360
- disable_async=not cfg.enable_experimental_async_token_refresh,
405
+ disable_async=cfg.disable_async_token_refresh,
361
406
  )
362
407
 
363
408
  def refreshed_headers() -> Dict[str, str]:
@@ -694,7 +739,7 @@ class DatabricksCliTokenSource(CliTokenSource):
694
739
  token_type_field="token_type",
695
740
  access_token_field="access_token",
696
741
  expiry_field="expiry",
697
- disable_async=not cfg.enable_experimental_async_token_refresh,
742
+ disable_async=cfg.disable_async_token_refresh,
698
743
  )
699
744
 
700
745
  @staticmethod
@@ -927,6 +972,7 @@ class DefaultCredentials:
927
972
  basic_auth,
928
973
  metadata_service,
929
974
  oauth_service_principal,
975
+ databricks_wif,
930
976
  azure_service_principal,
931
977
  github_oidc_azure,
932
978
  azure_cli,
@@ -0,0 +1,28 @@
1
+ import os
2
+ from typing import Optional
3
+
4
+ import requests
5
+
6
+
7
+ class GitHubOIDCTokenSupplier:
8
+ """
9
+ Supplies OIDC tokens from GitHub Actions.
10
+ """
11
+
12
+ def get_oidc_token(self, audience: str) -> Optional[str]:
13
+ if "ACTIONS_ID_TOKEN_REQUEST_TOKEN" not in os.environ or "ACTIONS_ID_TOKEN_REQUEST_URL" not in os.environ:
14
+ # not in GitHub actions
15
+ return None
16
+ # See https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-cloud-providers
17
+ headers = {"Authorization": f"Bearer {os.environ['ACTIONS_ID_TOKEN_REQUEST_TOKEN']}"}
18
+ endpoint = f"{os.environ['ACTIONS_ID_TOKEN_REQUEST_URL']}&audience={audience}"
19
+ response = requests.get(endpoint, headers=headers)
20
+ if not response.ok:
21
+ return None
22
+
23
+ # get the ID Token with aud=api://AzureADTokenExchange sub=repo:org/repo:environment:name
24
+ response_json = response.json()
25
+ if "value" not in response_json:
26
+ return None
27
+
28
+ return response_json["value"]
@@ -1173,12 +1173,12 @@ class AppsAPI:
1173
1173
  attempt += 1
1174
1174
  raise TimeoutError(f"timed out after {timeout}: {status_message}")
1175
1175
 
1176
- def create(self, *, app: Optional[App] = None, no_compute: Optional[bool] = None) -> Wait[App]:
1176
+ def create(self, app: App, *, no_compute: Optional[bool] = None) -> Wait[App]:
1177
1177
  """Create an app.
1178
1178
 
1179
1179
  Creates a new app.
1180
1180
 
1181
- :param app: :class:`App` (optional)
1181
+ :param app: :class:`App`
1182
1182
  :param no_compute: bool (optional)
1183
1183
  If true, the app will not be started after creation.
1184
1184
 
@@ -1198,9 +1198,7 @@ class AppsAPI:
1198
1198
  op_response = self._api.do("POST", "/api/2.0/apps", query=query, body=body, headers=headers)
1199
1199
  return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response["name"])
1200
1200
 
1201
- def create_and_wait(
1202
- self, *, app: Optional[App] = None, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)
1203
- ) -> App:
1201
+ def create_and_wait(self, app: App, *, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)) -> App:
1204
1202
  return self.create(app=app, no_compute=no_compute).result(timeout=timeout)
1205
1203
 
1206
1204
  def delete(self, name: str) -> App:
@@ -1221,14 +1219,14 @@ class AppsAPI:
1221
1219
  res = self._api.do("DELETE", f"/api/2.0/apps/{name}", headers=headers)
1222
1220
  return App.from_dict(res)
1223
1221
 
1224
- def deploy(self, app_name: str, *, app_deployment: Optional[AppDeployment] = None) -> Wait[AppDeployment]:
1222
+ def deploy(self, app_name: str, app_deployment: AppDeployment) -> Wait[AppDeployment]:
1225
1223
  """Create an app deployment.
1226
1224
 
1227
1225
  Creates an app deployment for the app with the supplied name.
1228
1226
 
1229
1227
  :param app_name: str
1230
1228
  The name of the app.
1231
- :param app_deployment: :class:`AppDeployment` (optional)
1229
+ :param app_deployment: :class:`AppDeployment`
1232
1230
 
1233
1231
  :returns:
1234
1232
  Long-running operation waiter for :class:`AppDeployment`.
@@ -1249,7 +1247,7 @@ class AppsAPI:
1249
1247
  )
1250
1248
 
1251
1249
  def deploy_and_wait(
1252
- self, app_name: str, *, app_deployment: Optional[AppDeployment] = None, timeout=timedelta(minutes=20)
1250
+ self, app_name: str, app_deployment: AppDeployment, timeout=timedelta(minutes=20)
1253
1251
  ) -> AppDeployment:
1254
1252
  return self.deploy(app_deployment=app_deployment, app_name=app_name).result(timeout=timeout)
1255
1253
 
@@ -1466,7 +1464,7 @@ class AppsAPI:
1466
1464
  def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
1467
1465
  return self.stop(name=name).result(timeout=timeout)
1468
1466
 
1469
- def update(self, name: str, *, app: Optional[App] = None) -> App:
1467
+ def update(self, name: str, app: App) -> App:
1470
1468
  """Update an app.
1471
1469
 
1472
1470
  Updates the app with the supplied name.
@@ -1474,7 +1472,7 @@ class AppsAPI:
1474
1472
  :param name: str
1475
1473
  The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
1476
1474
  must be unique within the workspace.
1477
- :param app: :class:`App` (optional)
1475
+ :param app: :class:`App`
1478
1476
 
1479
1477
  :returns: :class:`App`
1480
1478
  """
@@ -364,6 +364,10 @@ class BudgetConfigurationFilterWorkspaceIdClause:
364
364
  class BudgetPolicy:
365
365
  """Contains the BudgetPolicy details."""
366
366
 
367
+ binding_workspace_ids: Optional[List[int]] = None
368
+ """List of workspaces that this budget policy will be exclusively bound to. An empty binding
369
+ implies that this budget policy is open to any workspace in the account."""
370
+
367
371
  custom_tags: Optional[List[compute.CustomPolicyTag]] = None
368
372
  """A list of tags defined by the customer. At most 20 entries are allowed per policy."""
369
373
 
@@ -378,6 +382,8 @@ class BudgetPolicy:
378
382
  def as_dict(self) -> dict:
379
383
  """Serializes the BudgetPolicy into a dictionary suitable for use as a JSON request body."""
380
384
  body = {}
385
+ if self.binding_workspace_ids:
386
+ body["binding_workspace_ids"] = [v for v in self.binding_workspace_ids]
381
387
  if self.custom_tags:
382
388
  body["custom_tags"] = [v.as_dict() for v in self.custom_tags]
383
389
  if self.policy_id is not None:
@@ -389,6 +395,8 @@ class BudgetPolicy:
389
395
  def as_shallow_dict(self) -> dict:
390
396
  """Serializes the BudgetPolicy into a shallow dictionary of its immediate attributes."""
391
397
  body = {}
398
+ if self.binding_workspace_ids:
399
+ body["binding_workspace_ids"] = self.binding_workspace_ids
392
400
  if self.custom_tags:
393
401
  body["custom_tags"] = self.custom_tags
394
402
  if self.policy_id is not None:
@@ -401,6 +409,7 @@ class BudgetPolicy:
401
409
  def from_dict(cls, d: Dict[str, Any]) -> BudgetPolicy:
402
410
  """Deserializes the BudgetPolicy from a dictionary."""
403
411
  return cls(
412
+ binding_workspace_ids=d.get("binding_workspace_ids", None),
404
413
  custom_tags=_repeated_dict(d, "custom_tags", compute.CustomPolicyTag),
405
414
  policy_id=d.get("policy_id", None),
406
415
  policy_name=d.get("policy_name", None),
@@ -1864,7 +1873,7 @@ class BudgetPolicyAPI:
1864
1873
  query["page_token"] = json["next_page_token"]
1865
1874
 
1866
1875
  def update(
1867
- self, policy_id: str, *, limit_config: Optional[LimitConfig] = None, policy: Optional[BudgetPolicy] = None
1876
+ self, policy_id: str, policy: BudgetPolicy, *, limit_config: Optional[LimitConfig] = None
1868
1877
  ) -> BudgetPolicy:
1869
1878
  """Update a budget policy.
1870
1879
 
@@ -1872,10 +1881,10 @@ class BudgetPolicyAPI:
1872
1881
 
1873
1882
  :param policy_id: str
1874
1883
  The Id of the policy. This field is generated by Databricks and globally unique.
1884
+ :param policy: :class:`BudgetPolicy`
1885
+ Contains the BudgetPolicy details.
1875
1886
  :param limit_config: :class:`LimitConfig` (optional)
1876
1887
  DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy
1877
- :param policy: :class:`BudgetPolicy` (optional)
1878
- Contains the BudgetPolicy details.
1879
1888
 
1880
1889
  :returns: :class:`BudgetPolicy`
1881
1890
  """