databricks-sdk 0.28.0__tar.gz → 0.30.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (86) hide show
  1. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/PKG-INFO +46 -7
  2. databricks-sdk-0.28.0/databricks_sdk.egg-info/PKG-INFO → databricks_sdk-0.30.0/README.md +24 -31
  3. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/__init__.py +74 -22
  4. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/config.py +89 -48
  5. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/core.py +38 -9
  6. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/credentials_provider.py +134 -57
  7. databricks_sdk-0.30.0/databricks/sdk/data_plane.py +65 -0
  8. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/dbutils.py +81 -3
  9. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/mixins/files.py +12 -4
  10. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/oauth.py +8 -6
  11. databricks_sdk-0.30.0/databricks/sdk/service/apps.py +977 -0
  12. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/billing.py +602 -218
  13. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/catalog.py +263 -62
  14. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/compute.py +515 -94
  15. databricks_sdk-0.30.0/databricks/sdk/service/dashboards.py +1775 -0
  16. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/iam.py +99 -88
  17. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/jobs.py +159 -166
  18. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/marketplace.py +74 -58
  19. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/oauth2.py +149 -70
  20. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/pipelines.py +73 -53
  21. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/serving.py +332 -694
  22. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/settings.py +424 -4
  23. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/sharing.py +235 -26
  24. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/sql.py +2484 -553
  25. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/vectorsearch.py +75 -0
  26. databricks_sdk-0.30.0/databricks/sdk/useragent.py +144 -0
  27. databricks_sdk-0.30.0/databricks/sdk/version.py +1 -0
  28. databricks-sdk-0.28.0/README.md → databricks_sdk-0.30.0/databricks_sdk.egg-info/PKG-INFO +70 -0
  29. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks_sdk.egg-info/SOURCES.txt +23 -1
  30. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks_sdk.egg-info/requires.txt +12 -11
  31. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/setup.py +1 -1
  32. databricks_sdk-0.30.0/tests/test_auth.py +283 -0
  33. databricks_sdk-0.30.0/tests/test_auth_manual_tests.py +62 -0
  34. databricks_sdk-0.30.0/tests/test_client.py +17 -0
  35. databricks_sdk-0.30.0/tests/test_compute_mixins.py +33 -0
  36. databricks_sdk-0.30.0/tests/test_config.py +116 -0
  37. databricks_sdk-0.30.0/tests/test_core.py +607 -0
  38. databricks_sdk-0.30.0/tests/test_data_plane.py +59 -0
  39. databricks_sdk-0.30.0/tests/test_dbfs_mixins.py +102 -0
  40. databricks_sdk-0.30.0/tests/test_dbutils.py +248 -0
  41. databricks_sdk-0.30.0/tests/test_environments.py +19 -0
  42. databricks_sdk-0.30.0/tests/test_errors.py +97 -0
  43. databricks_sdk-0.30.0/tests/test_init_file.py +17 -0
  44. databricks_sdk-0.30.0/tests/test_internal.py +51 -0
  45. databricks_sdk-0.30.0/tests/test_jobs.py +100 -0
  46. databricks_sdk-0.30.0/tests/test_metadata_service_auth.py +80 -0
  47. databricks_sdk-0.30.0/tests/test_misc.py +45 -0
  48. databricks_sdk-0.30.0/tests/test_oauth.py +29 -0
  49. databricks_sdk-0.30.0/tests/test_retries.py +63 -0
  50. databricks_sdk-0.30.0/tests/test_user_agent.py +42 -0
  51. databricks-sdk-0.28.0/databricks/sdk/service/dashboards.py +0 -467
  52. databricks-sdk-0.28.0/databricks/sdk/version.py +0 -1
  53. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/LICENSE +0 -0
  54. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/NOTICE +0 -0
  55. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/__init__.py +0 -0
  56. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/_property.py +0 -0
  57. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/_widgets/__init__.py +0 -0
  58. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
  59. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
  60. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/azure.py +0 -0
  61. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/casing.py +0 -0
  62. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/clock.py +0 -0
  63. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/environments.py +0 -0
  64. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/errors/__init__.py +0 -0
  65. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/errors/base.py +0 -0
  66. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/errors/mapper.py +0 -0
  67. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/errors/overrides.py +0 -0
  68. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/errors/platform.py +0 -0
  69. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/errors/private_link.py +0 -0
  70. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/errors/sdk.py +0 -0
  71. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/mixins/__init__.py +0 -0
  72. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/mixins/compute.py +0 -0
  73. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/mixins/workspace.py +0 -0
  74. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/py.typed +0 -0
  75. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/retries.py +0 -0
  76. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/runtime/__init__.py +0 -0
  77. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
  78. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/__init__.py +0 -0
  79. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/_internal.py +0 -0
  80. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/files.py +0 -0
  81. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/ml.py +0 -0
  82. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/provisioning.py +0 -0
  83. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks/sdk/service/workspace.py +0 -0
  84. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
  85. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/databricks_sdk.egg-info/top_level.txt +0 -0
  86. {databricks-sdk-0.28.0 → databricks_sdk-0.30.0}/setup.cfg +0 -0
@@ -1,13 +1,11 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: databricks-sdk
3
- Version: 0.28.0
3
+ Version: 0.30.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Home-page: https://databricks-sdk-py.readthedocs.io
6
6
  Author: Serge Smertin
7
7
  Author-email: serge.smertin@databricks.com
8
- License: UNKNOWN
9
8
  Keywords: databricks sdk
10
- Platform: UNKNOWN
11
9
  Classifier: Development Status :: 4 - Beta
12
10
  Classifier: Intended Audience :: Developers
13
11
  Classifier: Intended Audience :: Science/Research
@@ -22,10 +20,29 @@ Classifier: Programming Language :: Python :: 3.12
22
20
  Classifier: Operating System :: OS Independent
23
21
  Requires-Python: >=3.7
24
22
  Description-Content-Type: text/markdown
25
- Provides-Extra: dev
26
- Provides-Extra: notebook
27
23
  License-File: LICENSE
28
24
  License-File: NOTICE
25
+ Requires-Dist: requests<3,>=2.28.1
26
+ Requires-Dist: google-auth~=2.0
27
+ Provides-Extra: dev
28
+ Requires-Dist: pytest; extra == "dev"
29
+ Requires-Dist: pytest-cov; extra == "dev"
30
+ Requires-Dist: pytest-xdist; extra == "dev"
31
+ Requires-Dist: pytest-mock; extra == "dev"
32
+ Requires-Dist: yapf; extra == "dev"
33
+ Requires-Dist: pycodestyle; extra == "dev"
34
+ Requires-Dist: autoflake; extra == "dev"
35
+ Requires-Dist: isort; extra == "dev"
36
+ Requires-Dist: wheel; extra == "dev"
37
+ Requires-Dist: ipython; extra == "dev"
38
+ Requires-Dist: ipywidgets; extra == "dev"
39
+ Requires-Dist: requests-mock; extra == "dev"
40
+ Requires-Dist: pyfakefs; extra == "dev"
41
+ Requires-Dist: databricks-connect; extra == "dev"
42
+ Requires-Dist: pytest-rerunfailures; extra == "dev"
43
+ Provides-Extra: notebook
44
+ Requires-Dist: ipython<9,>=8; extra == "notebook"
45
+ Requires-Dist: ipywidgets<9,>=8; extra == "notebook"
29
46
 
30
47
  # Databricks SDK for Python (Beta)
31
48
 
@@ -59,6 +76,7 @@ The SDK's internal HTTP client is robust and handles failures on different level
59
76
  - [Long-running operations](#long-running-operations)
60
77
  - [Paginated responses](#paginated-responses)
61
78
  - [Single-sign-on with OAuth](#single-sign-on-sso-with-oauth)
79
+ - [User Agent Request Attribution](#user-agent-request-attribution)
62
80
  - [Error handling](#error-handling)
63
81
  - [Logging](#logging)
64
82
  - [Integration with `dbutils`](#interaction-with-dbutils)
@@ -537,6 +555,29 @@ logging.info(f'Created new custom app: '
537
555
  f'--client_secret {custom_app.client_secret}')
538
556
  ```
539
557
 
558
+ ## User Agent Request Attribution<a id="user-agent-request-attribution"></a>
559
+
560
+ The Databricks SDK for Python uses the `User-Agent` header to include request metadata along with each request. By default, this includes the version of the Python SDK, the version of the Python language used by your application, and the underlying operating system. To statically add additional metadata, you can use the `with_partner()` and `with_product()` functions in the `databricks.sdk.useragent` module. `with_partner()` can be used by partners to indicate that code using the Databricks SDK for Go should be attributed to a specific partner. Multiple partners can be registered at once. Partner names can contain any number, digit, `.`, `-`, `_` or `+`.
561
+
562
+ ```python
563
+ from databricks.sdk import useragent
564
+ useragent.with_product("partner-abc")
565
+ useragent.with_partner("partner-xyz")
566
+ ```
567
+
568
+ `with_product()` can be used to define the name and version of the product that is built with the Databricks SDK for Python. The product name has the same restrictions as the partner name above, and the product version must be a valid [SemVer](https://semver.org/). Subsequent calls to `with_product()` replace the original product with the new user-specified one.
569
+
570
+ ```go
571
+ from databricks.sdk import useragent
572
+ useragent.with_product("databricks-example-product", "1.2.0")
573
+ ```
574
+
575
+ If both the `DATABRICKS_SDK_UPSTREAM` and `DATABRICKS_SDK_UPSTREAM_VERSION` environment variables are defined, these will also be included in the `User-Agent` header.
576
+
577
+ If additional metadata needs to be specified that isn't already supported by the above interfaces, you can use the `with_user_agent_extra()` function to register arbitrary key-value pairs to include in the user agent. Multiple values associated with the same key are allowed. Keys have the same restrictions as the partner name above. Values must be either as described above or SemVer strings.
578
+
579
+ Additional `User-Agent` information can be associated with different instances of `DatabricksConfig`. To add metadata to a specific instance of `DatabricksConfig`, use the `with_user_agent_extra()` method.
580
+
540
581
  ## Error handling<a id="error-handling"></a>
541
582
 
542
583
  The Databricks SDK for Python provides a robust error-handling mechanism that allows developers to catch and handle API errors. When an error occurs, the SDK will raise an exception that contains information about the error, such as the HTTP status code, error message, and error details. Developers can catch these exceptions and handle them appropriately in their code.
@@ -636,5 +677,3 @@ API clients for all services are generated from specification files that are syn
636
677
  You are highly encouraged to pin the exact dependency version and read the [changelog](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
637
678
  where Databricks documents the changes. Databricks may have minor [documented](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
638
679
  backward-incompatible changes, such as renaming some type names to bring more consistency.
639
-
640
-
@@ -1,32 +1,3 @@
1
- Metadata-Version: 2.1
2
- Name: databricks-sdk
3
- Version: 0.28.0
4
- Summary: Databricks SDK for Python (Beta)
5
- Home-page: https://databricks-sdk-py.readthedocs.io
6
- Author: Serge Smertin
7
- Author-email: serge.smertin@databricks.com
8
- License: UNKNOWN
9
- Keywords: databricks sdk
10
- Platform: UNKNOWN
11
- Classifier: Development Status :: 4 - Beta
12
- Classifier: Intended Audience :: Developers
13
- Classifier: Intended Audience :: Science/Research
14
- Classifier: Intended Audience :: System Administrators
15
- Classifier: License :: OSI Approved :: Apache Software License
16
- Classifier: Programming Language :: Python :: 3.7
17
- Classifier: Programming Language :: Python :: 3.8
18
- Classifier: Programming Language :: Python :: 3.9
19
- Classifier: Programming Language :: Python :: 3.10
20
- Classifier: Programming Language :: Python :: 3.11
21
- Classifier: Programming Language :: Python :: 3.12
22
- Classifier: Operating System :: OS Independent
23
- Requires-Python: >=3.7
24
- Description-Content-Type: text/markdown
25
- Provides-Extra: dev
26
- Provides-Extra: notebook
27
- License-File: LICENSE
28
- License-File: NOTICE
29
-
30
1
  # Databricks SDK for Python (Beta)
31
2
 
32
3
  [![PyPI - Downloads](https://img.shields.io/pypi/dw/databricks-sdk)](https://pypistats.org/packages/databricks-sdk)
@@ -59,6 +30,7 @@ The SDK's internal HTTP client is robust and handles failures on different level
59
30
  - [Long-running operations](#long-running-operations)
60
31
  - [Paginated responses](#paginated-responses)
61
32
  - [Single-sign-on with OAuth](#single-sign-on-sso-with-oauth)
33
+ - [User Agent Request Attribution](#user-agent-request-attribution)
62
34
  - [Error handling](#error-handling)
63
35
  - [Logging](#logging)
64
36
  - [Integration with `dbutils`](#interaction-with-dbutils)
@@ -537,6 +509,29 @@ logging.info(f'Created new custom app: '
537
509
  f'--client_secret {custom_app.client_secret}')
538
510
  ```
539
511
 
512
+ ## User Agent Request Attribution<a id="user-agent-request-attribution"></a>
513
+
514
+ The Databricks SDK for Python uses the `User-Agent` header to include request metadata along with each request. By default, this includes the version of the Python SDK, the version of the Python language used by your application, and the underlying operating system. To statically add additional metadata, you can use the `with_partner()` and `with_product()` functions in the `databricks.sdk.useragent` module. `with_partner()` can be used by partners to indicate that code using the Databricks SDK for Go should be attributed to a specific partner. Multiple partners can be registered at once. Partner names can contain any number, digit, `.`, `-`, `_` or `+`.
515
+
516
+ ```python
517
+ from databricks.sdk import useragent
518
+ useragent.with_product("partner-abc")
519
+ useragent.with_partner("partner-xyz")
520
+ ```
521
+
522
+ `with_product()` can be used to define the name and version of the product that is built with the Databricks SDK for Python. The product name has the same restrictions as the partner name above, and the product version must be a valid [SemVer](https://semver.org/). Subsequent calls to `with_product()` replace the original product with the new user-specified one.
523
+
524
+ ```go
525
+ from databricks.sdk import useragent
526
+ useragent.with_product("databricks-example-product", "1.2.0")
527
+ ```
528
+
529
+ If both the `DATABRICKS_SDK_UPSTREAM` and `DATABRICKS_SDK_UPSTREAM_VERSION` environment variables are defined, these will also be included in the `User-Agent` header.
530
+
531
+ If additional metadata needs to be specified that isn't already supported by the above interfaces, you can use the `with_user_agent_extra()` function to register arbitrary key-value pairs to include in the user agent. Multiple values associated with the same key are allowed. Keys have the same restrictions as the partner name above. Values must be either as described above or SemVer strings.
532
+
533
+ Additional `User-Agent` information can be associated with different instances of `DatabricksConfig`. To add metadata to a specific instance of `DatabricksConfig`, use the `with_user_agent_extra()` method.
534
+
540
535
  ## Error handling<a id="error-handling"></a>
541
536
 
542
537
  The Databricks SDK for Python provides a robust error-handling mechanism that allows developers to catch and handle API errors. When an error occurs, the SDK will raise an exception that contains information about the error, such as the HTTP status code, error message, and error details. Developers can catch these exceptions and handle them appropriately in their code.
@@ -636,5 +631,3 @@ API clients for all services are generated from specification files that are syn
636
631
  You are highly encouraged to pin the exact dependency version and read the [changelog](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
637
632
  where Databricks documents the changes. Databricks may have minor [documented](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
638
633
  backward-incompatible changes, such as renaming some type names to bring more consistency.
639
-
640
-
@@ -1,12 +1,13 @@
1
1
  import databricks.sdk.core as client
2
2
  import databricks.sdk.dbutils as dbutils
3
3
  from databricks.sdk import azure
4
- from databricks.sdk.credentials_provider import CredentialsProvider
4
+ from databricks.sdk.credentials_provider import CredentialsStrategy
5
5
  from databricks.sdk.mixins.compute import ClustersExt
6
6
  from databricks.sdk.mixins.files import DbfsExt
7
7
  from databricks.sdk.mixins.workspace import WorkspaceExt
8
+ from databricks.sdk.service.apps import AppsAPI
8
9
  from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI,
9
- LogDeliveryAPI)
10
+ LogDeliveryAPI, UsageDashboardsAPI)
10
11
  from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
11
12
  AccountMetastoresAPI,
12
13
  AccountStorageCredentialsAPI,
@@ -27,7 +28,7 @@ from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
27
28
  InstancePoolsAPI,
28
29
  InstanceProfilesAPI, LibrariesAPI,
29
30
  PolicyFamiliesAPI)
30
- from databricks.sdk.service.dashboards import LakeviewAPI
31
+ from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI
31
32
  from databricks.sdk.service.files import DbfsAPI, FilesAPI
32
33
  from databricks.sdk.service.iam import (AccountAccessControlAPI,
33
34
  AccountAccessControlProxyAPI,
@@ -55,7 +56,8 @@ from databricks.sdk.service.provisioning import (CredentialsAPI,
55
56
  NetworksAPI, PrivateAccessAPI,
56
57
  StorageAPI, VpcEndpointsAPI,
57
58
  Workspace, WorkspacesAPI)
58
- from databricks.sdk.service.serving import AppsAPI, ServingEndpointsAPI
59
+ from databricks.sdk.service.serving import (ServingEndpointsAPI,
60
+ ServingEndpointsDataPlaneAPI)
59
61
  from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
60
62
  AccountSettingsAPI,
61
63
  AutomaticClusterUpdateAPI,
@@ -67,6 +69,7 @@ from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
67
69
  EsmEnablementAccountAPI,
68
70
  IpAccessListsAPI,
69
71
  NetworkConnectivityAPI,
72
+ NotificationDestinationsAPI,
70
73
  PersonalComputeAPI,
71
74
  RestrictWorkspaceAdminsAPI,
72
75
  SettingsAPI, TokenManagementAPI,
@@ -74,11 +77,13 @@ from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
74
77
  from databricks.sdk.service.sharing import (CleanRoomsAPI, ProvidersAPI,
75
78
  RecipientActivationAPI,
76
79
  RecipientsAPI, SharesAPI)
77
- from databricks.sdk.service.sql import (AlertsAPI, DashboardsAPI,
78
- DashboardWidgetsAPI, DataSourcesAPI,
79
- DbsqlPermissionsAPI, QueriesAPI,
80
+ from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI,
81
+ DashboardsAPI, DashboardWidgetsAPI,
82
+ DataSourcesAPI, DbsqlPermissionsAPI,
83
+ QueriesAPI, QueriesLegacyAPI,
80
84
  QueryHistoryAPI,
81
85
  QueryVisualizationsAPI,
86
+ QueryVisualizationsLegacyAPI,
82
87
  StatementExecutionAPI, WarehousesAPI)
83
88
  from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI,
84
89
  VectorSearchIndexesAPI)
@@ -131,7 +136,8 @@ class WorkspaceClient:
131
136
  debug_headers: bool = None,
132
137
  product="unknown",
133
138
  product_version="0.0.0",
134
- credentials_provider: CredentialsProvider = None,
139
+ credentials_strategy: CredentialsStrategy = None,
140
+ credentials_provider: CredentialsStrategy = None,
135
141
  config: client.Config = None):
136
142
  if not config:
137
143
  config = client.Config(host=host,
@@ -152,6 +158,7 @@ class WorkspaceClient:
152
158
  cluster_id=cluster_id,
153
159
  google_credentials=google_credentials,
154
160
  google_service_account=google_service_account,
161
+ credentials_strategy=credentials_strategy,
155
162
  credentials_provider=credentials_provider,
156
163
  debug_truncate_bytes=debug_truncate_bytes,
157
164
  debug_headers=debug_headers,
@@ -160,8 +167,10 @@ class WorkspaceClient:
160
167
  self._config = config.copy()
161
168
  self._dbutils = _make_dbutils(self._config)
162
169
  self._api_client = client.ApiClient(self._config)
170
+ serving_endpoints = ServingEndpointsAPI(self._api_client)
163
171
  self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client)
164
172
  self._alerts = AlertsAPI(self._api_client)
173
+ self._alerts_legacy = AlertsLegacyAPI(self._api_client)
165
174
  self._apps = AppsAPI(self._api_client)
166
175
  self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client)
167
176
  self._catalogs = CatalogsAPI(self._api_client)
@@ -186,6 +195,7 @@ class WorkspaceClient:
186
195
  self._external_locations = ExternalLocationsAPI(self._api_client)
187
196
  self._files = FilesAPI(self._api_client)
188
197
  self._functions = FunctionsAPI(self._api_client)
198
+ self._genie = GenieAPI(self._api_client)
189
199
  self._git_credentials = GitCredentialsAPI(self._api_client)
190
200
  self._global_init_scripts = GlobalInitScriptsAPI(self._api_client)
191
201
  self._grants = GrantsAPI(self._api_client)
@@ -199,6 +209,7 @@ class WorkspaceClient:
199
209
  self._metastores = MetastoresAPI(self._api_client)
200
210
  self._model_registry = ModelRegistryAPI(self._api_client)
201
211
  self._model_versions = ModelVersionsAPI(self._api_client)
212
+ self._notification_destinations = NotificationDestinationsAPI(self._api_client)
202
213
  self._online_tables = OnlineTablesAPI(self._api_client)
203
214
  self._permission_migration = PermissionMigrationAPI(self._api_client)
204
215
  self._permissions = PermissionsAPI(self._api_client)
@@ -215,8 +226,10 @@ class WorkspaceClient:
215
226
  self._providers = ProvidersAPI(self._api_client)
216
227
  self._quality_monitors = QualityMonitorsAPI(self._api_client)
217
228
  self._queries = QueriesAPI(self._api_client)
229
+ self._queries_legacy = QueriesLegacyAPI(self._api_client)
218
230
  self._query_history = QueryHistoryAPI(self._api_client)
219
231
  self._query_visualizations = QueryVisualizationsAPI(self._api_client)
232
+ self._query_visualizations_legacy = QueryVisualizationsLegacyAPI(self._api_client)
220
233
  self._recipient_activation = RecipientActivationAPI(self._api_client)
221
234
  self._recipients = RecipientsAPI(self._api_client)
222
235
  self._registered_models = RegisteredModelsAPI(self._api_client)
@@ -224,7 +237,8 @@ class WorkspaceClient:
224
237
  self._schemas = SchemasAPI(self._api_client)
225
238
  self._secrets = SecretsAPI(self._api_client)
226
239
  self._service_principals = ServicePrincipalsAPI(self._api_client)
227
- self._serving_endpoints = ServingEndpointsAPI(self._api_client)
240
+ self._serving_endpoints = serving_endpoints
241
+ self._serving_endpoints_data_plane = ServingEndpointsDataPlaneAPI(self._api_client, serving_endpoints)
228
242
  self._settings = SettingsAPI(self._api_client)
229
243
  self._shares = SharesAPI(self._api_client)
230
244
  self._statement_execution = StatementExecutionAPI(self._api_client)
@@ -265,6 +279,11 @@ class WorkspaceClient:
265
279
  """The alerts API can be used to perform CRUD operations on alerts."""
266
280
  return self._alerts
267
281
 
282
+ @property
283
+ def alerts_legacy(self) -> AlertsLegacyAPI:
284
+ """The alerts API can be used to perform CRUD operations on alerts."""
285
+ return self._alerts_legacy
286
+
268
287
  @property
269
288
  def apps(self) -> AppsAPI:
270
289
  """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
@@ -385,6 +404,11 @@ class WorkspaceClient:
385
404
  """Functions implement User-Defined Functions (UDFs) in Unity Catalog."""
386
405
  return self._functions
387
406
 
407
+ @property
408
+ def genie(self) -> GenieAPI:
409
+ """Genie provides a no-code experience for business users, powered by AI/BI."""
410
+ return self._genie
411
+
388
412
  @property
389
413
  def git_credentials(self) -> GitCredentialsAPI:
390
414
  """Registers personal access token for Databricks to do operations on behalf of the user."""
@@ -450,6 +474,11 @@ class WorkspaceClient:
450
474
  """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog."""
451
475
  return self._model_versions
452
476
 
477
+ @property
478
+ def notification_destinations(self) -> NotificationDestinationsAPI:
479
+ """The notification destinations API lets you programmatically manage a workspace's notification destinations."""
480
+ return self._notification_destinations
481
+
453
482
  @property
454
483
  def online_tables(self) -> OnlineTablesAPI:
455
484
  """Online tables provide lower latency and higher QPS access to data from Delta tables."""
@@ -457,7 +486,7 @@ class WorkspaceClient:
457
486
 
458
487
  @property
459
488
  def permission_migration(self) -> PermissionMigrationAPI:
460
- """This spec contains undocumented permission migration APIs used in https://github.com/databrickslabs/ucx."""
489
+ """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx."""
461
490
  return self._permission_migration
462
491
 
463
492
  @property
@@ -522,19 +551,29 @@ class WorkspaceClient:
522
551
 
523
552
  @property
524
553
  def queries(self) -> QueriesAPI:
525
- """These endpoints are used for CRUD operations on query definitions."""
554
+ """The queries API can be used to perform CRUD operations on queries."""
526
555
  return self._queries
527
556
 
557
+ @property
558
+ def queries_legacy(self) -> QueriesLegacyAPI:
559
+ """These endpoints are used for CRUD operations on query definitions."""
560
+ return self._queries_legacy
561
+
528
562
  @property
529
563
  def query_history(self) -> QueryHistoryAPI:
530
- """Access the history of queries through SQL warehouses."""
564
+ """A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless compute, and DLT."""
531
565
  return self._query_history
532
566
 
533
567
  @property
534
568
  def query_visualizations(self) -> QueryVisualizationsAPI:
535
- """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace."""
569
+ """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace."""
536
570
  return self._query_visualizations
537
571
 
572
+ @property
573
+ def query_visualizations_legacy(self) -> QueryVisualizationsLegacyAPI:
574
+ """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace."""
575
+ return self._query_visualizations_legacy
576
+
538
577
  @property
539
578
  def recipient_activation(self) -> RecipientActivationAPI:
540
579
  """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`."""
@@ -575,6 +614,11 @@ class WorkspaceClient:
575
614
  """The Serving Endpoints API allows you to create, update, and delete model serving endpoints."""
576
615
  return self._serving_endpoints
577
616
 
617
+ @property
618
+ def serving_endpoints_data_plane(self) -> ServingEndpointsDataPlaneAPI:
619
+ """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service."""
620
+ return self._serving_endpoints_data_plane
621
+
578
622
  @property
579
623
  def settings(self) -> SettingsAPI:
580
624
  """Workspace Settings API allows users to manage settings at the workspace level."""
@@ -700,7 +744,8 @@ class AccountClient:
700
744
  debug_headers: bool = None,
701
745
  product="unknown",
702
746
  product_version="0.0.0",
703
- credentials_provider: CredentialsProvider = None,
747
+ credentials_strategy: CredentialsStrategy = None,
748
+ credentials_provider: CredentialsStrategy = None,
704
749
  config: client.Config = None):
705
750
  if not config:
706
751
  config = client.Config(host=host,
@@ -721,6 +766,7 @@ class AccountClient:
721
766
  cluster_id=cluster_id,
722
767
  google_credentials=google_credentials,
723
768
  google_service_account=google_service_account,
769
+ credentials_strategy=credentials_strategy,
724
770
  credentials_provider=credentials_provider,
725
771
  debug_truncate_bytes=debug_truncate_bytes,
726
772
  debug_headers=debug_headers,
@@ -730,7 +776,6 @@ class AccountClient:
730
776
  self._api_client = client.ApiClient(self._config)
731
777
  self._access_control = AccountAccessControlAPI(self._api_client)
732
778
  self._billable_usage = BillableUsageAPI(self._api_client)
733
- self._budgets = BudgetsAPI(self._api_client)
734
779
  self._credentials = CredentialsAPI(self._api_client)
735
780
  self._custom_app_integration = CustomAppIntegrationAPI(self._api_client)
736
781
  self._encryption_keys = EncryptionKeysAPI(self._api_client)
@@ -749,10 +794,12 @@ class AccountClient:
749
794
  self._settings = AccountSettingsAPI(self._api_client)
750
795
  self._storage = StorageAPI(self._api_client)
751
796
  self._storage_credentials = AccountStorageCredentialsAPI(self._api_client)
797
+ self._usage_dashboards = UsageDashboardsAPI(self._api_client)
752
798
  self._users = AccountUsersAPI(self._api_client)
753
799
  self._vpc_endpoints = VpcEndpointsAPI(self._api_client)
754
800
  self._workspace_assignment = WorkspaceAssignmentAPI(self._api_client)
755
801
  self._workspaces = WorkspacesAPI(self._api_client)
802
+ self._budgets = BudgetsAPI(self._api_client)
756
803
 
757
804
  @property
758
805
  def config(self) -> client.Config:
@@ -772,11 +819,6 @@ class AccountClient:
772
819
  """This API allows you to download billable usage logs for the specified account and date range."""
773
820
  return self._billable_usage
774
821
 
775
- @property
776
- def budgets(self) -> BudgetsAPI:
777
- """These APIs manage budget configuration including notifications for exceeding a budget for a period."""
778
- return self._budgets
779
-
780
822
  @property
781
823
  def credentials(self) -> CredentialsAPI:
782
824
  """These APIs manage credential configurations for this workspace."""
@@ -784,7 +826,7 @@ class AccountClient:
784
826
 
785
827
  @property
786
828
  def custom_app_integration(self) -> CustomAppIntegrationAPI:
787
- """These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
829
+ """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
788
830
  return self._custom_app_integration
789
831
 
790
832
  @property
@@ -839,7 +881,7 @@ class AccountClient:
839
881
 
840
882
  @property
841
883
  def published_app_integration(self) -> PublishedAppIntegrationAPI:
842
- """These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
884
+ """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
843
885
  return self._published_app_integration
844
886
 
845
887
  @property
@@ -867,6 +909,11 @@ class AccountClient:
867
909
  """These APIs manage storage credentials for a particular metastore."""
868
910
  return self._storage_credentials
869
911
 
912
+ @property
913
+ def usage_dashboards(self) -> UsageDashboardsAPI:
914
+ """These APIs manage usage dashboards for this account."""
915
+ return self._usage_dashboards
916
+
870
917
  @property
871
918
  def users(self) -> AccountUsersAPI:
872
919
  """User identities recognized by Databricks and represented by email addresses."""
@@ -887,6 +934,11 @@ class AccountClient:
887
934
  """These APIs manage workspaces for this account."""
888
935
  return self._workspaces
889
936
 
937
+ @property
938
+ def budgets(self) -> BudgetsAPI:
939
+ """These APIs manage budget configurations for this account."""
940
+ return self._budgets
941
+
890
942
  def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient:
891
943
  """Constructs a ``WorkspaceClient`` for the given workspace.
892
944