databricks-sdk 0.29.0__tar.gz → 0.31.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (85) hide show
  1. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/PKG-INFO +46 -7
  2. databricks-sdk-0.29.0/databricks_sdk.egg-info/PKG-INFO → databricks_sdk-0.31.0/README.md +24 -31
  3. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/__init__.py +89 -21
  4. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/config.py +61 -75
  5. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/core.py +16 -9
  6. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/credentials_provider.py +15 -15
  7. databricks_sdk-0.31.0/databricks/sdk/data_plane.py +65 -0
  8. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/errors/overrides.py +8 -0
  9. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/errors/platform.py +5 -0
  10. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/mixins/files.py +12 -4
  11. databricks_sdk-0.31.0/databricks/sdk/service/apps.py +977 -0
  12. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/billing.py +602 -218
  13. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/catalog.py +324 -34
  14. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/compute.py +766 -81
  15. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/dashboards.py +628 -18
  16. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/iam.py +99 -88
  17. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/jobs.py +332 -23
  18. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/marketplace.py +2 -122
  19. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/oauth2.py +127 -70
  20. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/pipelines.py +72 -52
  21. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/serving.py +303 -750
  22. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/settings.py +423 -4
  23. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/sharing.py +235 -25
  24. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/sql.py +2328 -544
  25. databricks_sdk-0.31.0/databricks/sdk/useragent.py +151 -0
  26. databricks_sdk-0.31.0/databricks/sdk/version.py +1 -0
  27. databricks-sdk-0.29.0/README.md → databricks_sdk-0.31.0/databricks_sdk.egg-info/PKG-INFO +70 -0
  28. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks_sdk.egg-info/SOURCES.txt +23 -1
  29. databricks_sdk-0.31.0/tests/test_auth.py +283 -0
  30. databricks_sdk-0.31.0/tests/test_auth_manual_tests.py +62 -0
  31. databricks_sdk-0.31.0/tests/test_client.py +17 -0
  32. databricks_sdk-0.31.0/tests/test_compute_mixins.py +33 -0
  33. databricks_sdk-0.31.0/tests/test_config.py +116 -0
  34. databricks_sdk-0.31.0/tests/test_core.py +607 -0
  35. databricks_sdk-0.31.0/tests/test_data_plane.py +59 -0
  36. databricks_sdk-0.31.0/tests/test_dbfs_mixins.py +102 -0
  37. databricks_sdk-0.31.0/tests/test_dbutils.py +248 -0
  38. databricks_sdk-0.31.0/tests/test_environments.py +19 -0
  39. databricks_sdk-0.31.0/tests/test_errors.py +97 -0
  40. databricks_sdk-0.31.0/tests/test_init_file.py +17 -0
  41. databricks_sdk-0.31.0/tests/test_internal.py +51 -0
  42. databricks_sdk-0.31.0/tests/test_jobs.py +100 -0
  43. databricks_sdk-0.31.0/tests/test_metadata_service_auth.py +80 -0
  44. databricks_sdk-0.31.0/tests/test_misc.py +45 -0
  45. databricks_sdk-0.31.0/tests/test_oauth.py +29 -0
  46. databricks_sdk-0.31.0/tests/test_retries.py +63 -0
  47. databricks_sdk-0.31.0/tests/test_user_agent.py +42 -0
  48. databricks-sdk-0.29.0/databricks/sdk/version.py +0 -1
  49. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/LICENSE +0 -0
  50. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/NOTICE +0 -0
  51. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/__init__.py +0 -0
  52. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/_property.py +0 -0
  53. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/_widgets/__init__.py +0 -0
  54. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
  55. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
  56. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/azure.py +0 -0
  57. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/casing.py +0 -0
  58. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/clock.py +0 -0
  59. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/dbutils.py +0 -0
  60. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/environments.py +0 -0
  61. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/errors/__init__.py +0 -0
  62. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/errors/base.py +0 -0
  63. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/errors/mapper.py +0 -0
  64. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/errors/private_link.py +0 -0
  65. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/errors/sdk.py +0 -0
  66. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/mixins/__init__.py +0 -0
  67. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/mixins/compute.py +0 -0
  68. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/mixins/workspace.py +0 -0
  69. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/oauth.py +0 -0
  70. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/py.typed +0 -0
  71. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/retries.py +0 -0
  72. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/runtime/__init__.py +0 -0
  73. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
  74. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/__init__.py +0 -0
  75. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/_internal.py +0 -0
  76. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/files.py +0 -0
  77. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/ml.py +0 -0
  78. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/provisioning.py +0 -0
  79. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/vectorsearch.py +0 -0
  80. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks/sdk/service/workspace.py +0 -0
  81. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
  82. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks_sdk.egg-info/requires.txt +11 -11
  83. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/databricks_sdk.egg-info/top_level.txt +0 -0
  84. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/setup.cfg +0 -0
  85. {databricks-sdk-0.29.0 → databricks_sdk-0.31.0}/setup.py +0 -0
@@ -1,13 +1,11 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: databricks-sdk
3
- Version: 0.29.0
3
+ Version: 0.31.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Home-page: https://databricks-sdk-py.readthedocs.io
6
6
  Author: Serge Smertin
7
7
  Author-email: serge.smertin@databricks.com
8
- License: UNKNOWN
9
8
  Keywords: databricks sdk
10
- Platform: UNKNOWN
11
9
  Classifier: Development Status :: 4 - Beta
12
10
  Classifier: Intended Audience :: Developers
13
11
  Classifier: Intended Audience :: Science/Research
@@ -22,10 +20,29 @@ Classifier: Programming Language :: Python :: 3.12
22
20
  Classifier: Operating System :: OS Independent
23
21
  Requires-Python: >=3.7
24
22
  Description-Content-Type: text/markdown
25
- Provides-Extra: dev
26
- Provides-Extra: notebook
27
23
  License-File: LICENSE
28
24
  License-File: NOTICE
25
+ Requires-Dist: requests<3,>=2.28.1
26
+ Requires-Dist: google-auth~=2.0
27
+ Provides-Extra: dev
28
+ Requires-Dist: pytest; extra == "dev"
29
+ Requires-Dist: pytest-cov; extra == "dev"
30
+ Requires-Dist: pytest-xdist; extra == "dev"
31
+ Requires-Dist: pytest-mock; extra == "dev"
32
+ Requires-Dist: yapf; extra == "dev"
33
+ Requires-Dist: pycodestyle; extra == "dev"
34
+ Requires-Dist: autoflake; extra == "dev"
35
+ Requires-Dist: isort; extra == "dev"
36
+ Requires-Dist: wheel; extra == "dev"
37
+ Requires-Dist: ipython; extra == "dev"
38
+ Requires-Dist: ipywidgets; extra == "dev"
39
+ Requires-Dist: requests-mock; extra == "dev"
40
+ Requires-Dist: pyfakefs; extra == "dev"
41
+ Requires-Dist: databricks-connect; extra == "dev"
42
+ Requires-Dist: pytest-rerunfailures; extra == "dev"
43
+ Provides-Extra: notebook
44
+ Requires-Dist: ipython<9,>=8; extra == "notebook"
45
+ Requires-Dist: ipywidgets<9,>=8; extra == "notebook"
29
46
 
30
47
  # Databricks SDK for Python (Beta)
31
48
 
@@ -59,6 +76,7 @@ The SDK's internal HTTP client is robust and handles failures on different level
59
76
  - [Long-running operations](#long-running-operations)
60
77
  - [Paginated responses](#paginated-responses)
61
78
  - [Single-sign-on with OAuth](#single-sign-on-sso-with-oauth)
79
+ - [User Agent Request Attribution](#user-agent-request-attribution)
62
80
  - [Error handling](#error-handling)
63
81
  - [Logging](#logging)
64
82
  - [Integration with `dbutils`](#interaction-with-dbutils)
@@ -537,6 +555,29 @@ logging.info(f'Created new custom app: '
537
555
  f'--client_secret {custom_app.client_secret}')
538
556
  ```
539
557
 
558
+ ## User Agent Request Attribution<a id="user-agent-request-attribution"></a>
559
+
560
+ The Databricks SDK for Python uses the `User-Agent` header to include request metadata along with each request. By default, this includes the version of the Python SDK, the version of the Python language used by your application, and the underlying operating system. To statically add additional metadata, you can use the `with_partner()` and `with_product()` functions in the `databricks.sdk.useragent` module. `with_partner()` can be used by partners to indicate that code using the Databricks SDK for Go should be attributed to a specific partner. Multiple partners can be registered at once. Partner names can contain any number, digit, `.`, `-`, `_` or `+`.
561
+
562
+ ```python
563
+ from databricks.sdk import useragent
564
+ useragent.with_product("partner-abc")
565
+ useragent.with_partner("partner-xyz")
566
+ ```
567
+
568
+ `with_product()` can be used to define the name and version of the product that is built with the Databricks SDK for Python. The product name has the same restrictions as the partner name above, and the product version must be a valid [SemVer](https://semver.org/). Subsequent calls to `with_product()` replace the original product with the new user-specified one.
569
+
570
+ ```go
571
+ from databricks.sdk import useragent
572
+ useragent.with_product("databricks-example-product", "1.2.0")
573
+ ```
574
+
575
+ If both the `DATABRICKS_SDK_UPSTREAM` and `DATABRICKS_SDK_UPSTREAM_VERSION` environment variables are defined, these will also be included in the `User-Agent` header.
576
+
577
+ If additional metadata needs to be specified that isn't already supported by the above interfaces, you can use the `with_user_agent_extra()` function to register arbitrary key-value pairs to include in the user agent. Multiple values associated with the same key are allowed. Keys have the same restrictions as the partner name above. Values must be either as described above or SemVer strings.
578
+
579
+ Additional `User-Agent` information can be associated with different instances of `DatabricksConfig`. To add metadata to a specific instance of `DatabricksConfig`, use the `with_user_agent_extra()` method.
580
+
540
581
  ## Error handling<a id="error-handling"></a>
541
582
 
542
583
  The Databricks SDK for Python provides a robust error-handling mechanism that allows developers to catch and handle API errors. When an error occurs, the SDK will raise an exception that contains information about the error, such as the HTTP status code, error message, and error details. Developers can catch these exceptions and handle them appropriately in their code.
@@ -636,5 +677,3 @@ API clients for all services are generated from specification files that are syn
636
677
  You are highly encouraged to pin the exact dependency version and read the [changelog](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
637
678
  where Databricks documents the changes. Databricks may have minor [documented](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
638
679
  backward-incompatible changes, such as renaming some type names to bring more consistency.
639
-
640
-
@@ -1,32 +1,3 @@
1
- Metadata-Version: 2.1
2
- Name: databricks-sdk
3
- Version: 0.29.0
4
- Summary: Databricks SDK for Python (Beta)
5
- Home-page: https://databricks-sdk-py.readthedocs.io
6
- Author: Serge Smertin
7
- Author-email: serge.smertin@databricks.com
8
- License: UNKNOWN
9
- Keywords: databricks sdk
10
- Platform: UNKNOWN
11
- Classifier: Development Status :: 4 - Beta
12
- Classifier: Intended Audience :: Developers
13
- Classifier: Intended Audience :: Science/Research
14
- Classifier: Intended Audience :: System Administrators
15
- Classifier: License :: OSI Approved :: Apache Software License
16
- Classifier: Programming Language :: Python :: 3.7
17
- Classifier: Programming Language :: Python :: 3.8
18
- Classifier: Programming Language :: Python :: 3.9
19
- Classifier: Programming Language :: Python :: 3.10
20
- Classifier: Programming Language :: Python :: 3.11
21
- Classifier: Programming Language :: Python :: 3.12
22
- Classifier: Operating System :: OS Independent
23
- Requires-Python: >=3.7
24
- Description-Content-Type: text/markdown
25
- Provides-Extra: dev
26
- Provides-Extra: notebook
27
- License-File: LICENSE
28
- License-File: NOTICE
29
-
30
1
  # Databricks SDK for Python (Beta)
31
2
 
32
3
  [![PyPI - Downloads](https://img.shields.io/pypi/dw/databricks-sdk)](https://pypistats.org/packages/databricks-sdk)
@@ -59,6 +30,7 @@ The SDK's internal HTTP client is robust and handles failures on different level
59
30
  - [Long-running operations](#long-running-operations)
60
31
  - [Paginated responses](#paginated-responses)
61
32
  - [Single-sign-on with OAuth](#single-sign-on-sso-with-oauth)
33
+ - [User Agent Request Attribution](#user-agent-request-attribution)
62
34
  - [Error handling](#error-handling)
63
35
  - [Logging](#logging)
64
36
  - [Integration with `dbutils`](#interaction-with-dbutils)
@@ -537,6 +509,29 @@ logging.info(f'Created new custom app: '
537
509
  f'--client_secret {custom_app.client_secret}')
538
510
  ```
539
511
 
512
+ ## User Agent Request Attribution<a id="user-agent-request-attribution"></a>
513
+
514
+ The Databricks SDK for Python uses the `User-Agent` header to include request metadata along with each request. By default, this includes the version of the Python SDK, the version of the Python language used by your application, and the underlying operating system. To statically add additional metadata, you can use the `with_partner()` and `with_product()` functions in the `databricks.sdk.useragent` module. `with_partner()` can be used by partners to indicate that code using the Databricks SDK for Go should be attributed to a specific partner. Multiple partners can be registered at once. Partner names can contain any number, digit, `.`, `-`, `_` or `+`.
515
+
516
+ ```python
517
+ from databricks.sdk import useragent
518
+ useragent.with_product("partner-abc")
519
+ useragent.with_partner("partner-xyz")
520
+ ```
521
+
522
+ `with_product()` can be used to define the name and version of the product that is built with the Databricks SDK for Python. The product name has the same restrictions as the partner name above, and the product version must be a valid [SemVer](https://semver.org/). Subsequent calls to `with_product()` replace the original product with the new user-specified one.
523
+
524
+ ```go
525
+ from databricks.sdk import useragent
526
+ useragent.with_product("databricks-example-product", "1.2.0")
527
+ ```
528
+
529
+ If both the `DATABRICKS_SDK_UPSTREAM` and `DATABRICKS_SDK_UPSTREAM_VERSION` environment variables are defined, these will also be included in the `User-Agent` header.
530
+
531
+ If additional metadata needs to be specified that isn't already supported by the above interfaces, you can use the `with_user_agent_extra()` function to register arbitrary key-value pairs to include in the user agent. Multiple values associated with the same key are allowed. Keys have the same restrictions as the partner name above. Values must be either as described above or SemVer strings.
532
+
533
+ Additional `User-Agent` information can be associated with different instances of `DatabricksConfig`. To add metadata to a specific instance of `DatabricksConfig`, use the `with_user_agent_extra()` method.
534
+
540
535
  ## Error handling<a id="error-handling"></a>
541
536
 
542
537
  The Databricks SDK for Python provides a robust error-handling mechanism that allows developers to catch and handle API errors. When an error occurs, the SDK will raise an exception that contains information about the error, such as the HTTP status code, error message, and error details. Developers can catch these exceptions and handle them appropriately in their code.
@@ -636,5 +631,3 @@ API clients for all services are generated from specification files that are syn
636
631
  You are highly encouraged to pin the exact dependency version and read the [changelog](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
637
632
  where Databricks documents the changes. Databricks may have minor [documented](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
638
633
  backward-incompatible changes, such as renaming some type names to bring more consistency.
639
-
640
-
@@ -5,8 +5,9 @@ from databricks.sdk.credentials_provider import CredentialsStrategy
5
5
  from databricks.sdk.mixins.compute import ClustersExt
6
6
  from databricks.sdk.mixins.files import DbfsExt
7
7
  from databricks.sdk.mixins.workspace import WorkspaceExt
8
+ from databricks.sdk.service.apps import AppsAPI
8
9
  from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI,
9
- LogDeliveryAPI)
10
+ LogDeliveryAPI, UsageDashboardsAPI)
10
11
  from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
11
12
  AccountMetastoresAPI,
12
13
  AccountStorageCredentialsAPI,
@@ -16,7 +17,8 @@ from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
16
17
  GrantsAPI, MetastoresAPI,
17
18
  ModelVersionsAPI, OnlineTablesAPI,
18
19
  QualityMonitorsAPI,
19
- RegisteredModelsAPI, SchemasAPI,
20
+ RegisteredModelsAPI,
21
+ ResourceQuotasAPI, SchemasAPI,
20
22
  StorageCredentialsAPI,
21
23
  SystemSchemasAPI,
22
24
  TableConstraintsAPI, TablesAPI,
@@ -26,8 +28,9 @@ from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
26
28
  GlobalInitScriptsAPI,
27
29
  InstancePoolsAPI,
28
30
  InstanceProfilesAPI, LibrariesAPI,
31
+ PolicyComplianceForClustersAPI,
29
32
  PolicyFamiliesAPI)
30
- from databricks.sdk.service.dashboards import LakeviewAPI
33
+ from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI
31
34
  from databricks.sdk.service.files import DbfsAPI, FilesAPI
32
35
  from databricks.sdk.service.iam import (AccountAccessControlAPI,
33
36
  AccountAccessControlProxyAPI,
@@ -37,7 +40,7 @@ from databricks.sdk.service.iam import (AccountAccessControlAPI,
37
40
  GroupsAPI, PermissionMigrationAPI,
38
41
  PermissionsAPI, ServicePrincipalsAPI,
39
42
  UsersAPI, WorkspaceAssignmentAPI)
40
- from databricks.sdk.service.jobs import JobsAPI
43
+ from databricks.sdk.service.jobs import JobsAPI, PolicyComplianceForJobsAPI
41
44
  from databricks.sdk.service.marketplace import (
42
45
  ConsumerFulfillmentsAPI, ConsumerInstallationsAPI, ConsumerListingsAPI,
43
46
  ConsumerPersonalizationRequestsAPI, ConsumerProvidersAPI,
@@ -55,7 +58,8 @@ from databricks.sdk.service.provisioning import (CredentialsAPI,
55
58
  NetworksAPI, PrivateAccessAPI,
56
59
  StorageAPI, VpcEndpointsAPI,
57
60
  Workspace, WorkspacesAPI)
58
- from databricks.sdk.service.serving import AppsAPI, ServingEndpointsAPI
61
+ from databricks.sdk.service.serving import (ServingEndpointsAPI,
62
+ ServingEndpointsDataPlaneAPI)
59
63
  from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
60
64
  AccountSettingsAPI,
61
65
  AutomaticClusterUpdateAPI,
@@ -67,6 +71,7 @@ from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
67
71
  EsmEnablementAccountAPI,
68
72
  IpAccessListsAPI,
69
73
  NetworkConnectivityAPI,
74
+ NotificationDestinationsAPI,
70
75
  PersonalComputeAPI,
71
76
  RestrictWorkspaceAdminsAPI,
72
77
  SettingsAPI, TokenManagementAPI,
@@ -74,11 +79,13 @@ from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
74
79
  from databricks.sdk.service.sharing import (CleanRoomsAPI, ProvidersAPI,
75
80
  RecipientActivationAPI,
76
81
  RecipientsAPI, SharesAPI)
77
- from databricks.sdk.service.sql import (AlertsAPI, DashboardsAPI,
78
- DashboardWidgetsAPI, DataSourcesAPI,
79
- DbsqlPermissionsAPI, QueriesAPI,
82
+ from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI,
83
+ DashboardsAPI, DashboardWidgetsAPI,
84
+ DataSourcesAPI, DbsqlPermissionsAPI,
85
+ QueriesAPI, QueriesLegacyAPI,
80
86
  QueryHistoryAPI,
81
87
  QueryVisualizationsAPI,
88
+ QueryVisualizationsLegacyAPI,
82
89
  StatementExecutionAPI, WarehousesAPI)
83
90
  from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI,
84
91
  VectorSearchIndexesAPI)
@@ -162,8 +169,10 @@ class WorkspaceClient:
162
169
  self._config = config.copy()
163
170
  self._dbutils = _make_dbutils(self._config)
164
171
  self._api_client = client.ApiClient(self._config)
172
+ serving_endpoints = ServingEndpointsAPI(self._api_client)
165
173
  self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client)
166
174
  self._alerts = AlertsAPI(self._api_client)
175
+ self._alerts_legacy = AlertsLegacyAPI(self._api_client)
167
176
  self._apps = AppsAPI(self._api_client)
168
177
  self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client)
169
178
  self._catalogs = CatalogsAPI(self._api_client)
@@ -188,6 +197,7 @@ class WorkspaceClient:
188
197
  self._external_locations = ExternalLocationsAPI(self._api_client)
189
198
  self._files = FilesAPI(self._api_client)
190
199
  self._functions = FunctionsAPI(self._api_client)
200
+ self._genie = GenieAPI(self._api_client)
191
201
  self._git_credentials = GitCredentialsAPI(self._api_client)
192
202
  self._global_init_scripts = GlobalInitScriptsAPI(self._api_client)
193
203
  self._grants = GrantsAPI(self._api_client)
@@ -201,10 +211,13 @@ class WorkspaceClient:
201
211
  self._metastores = MetastoresAPI(self._api_client)
202
212
  self._model_registry = ModelRegistryAPI(self._api_client)
203
213
  self._model_versions = ModelVersionsAPI(self._api_client)
214
+ self._notification_destinations = NotificationDestinationsAPI(self._api_client)
204
215
  self._online_tables = OnlineTablesAPI(self._api_client)
205
216
  self._permission_migration = PermissionMigrationAPI(self._api_client)
206
217
  self._permissions = PermissionsAPI(self._api_client)
207
218
  self._pipelines = PipelinesAPI(self._api_client)
219
+ self._policy_compliance_for_clusters = PolicyComplianceForClustersAPI(self._api_client)
220
+ self._policy_compliance_for_jobs = PolicyComplianceForJobsAPI(self._api_client)
208
221
  self._policy_families = PolicyFamiliesAPI(self._api_client)
209
222
  self._provider_exchange_filters = ProviderExchangeFiltersAPI(self._api_client)
210
223
  self._provider_exchanges = ProviderExchangesAPI(self._api_client)
@@ -217,16 +230,20 @@ class WorkspaceClient:
217
230
  self._providers = ProvidersAPI(self._api_client)
218
231
  self._quality_monitors = QualityMonitorsAPI(self._api_client)
219
232
  self._queries = QueriesAPI(self._api_client)
233
+ self._queries_legacy = QueriesLegacyAPI(self._api_client)
220
234
  self._query_history = QueryHistoryAPI(self._api_client)
221
235
  self._query_visualizations = QueryVisualizationsAPI(self._api_client)
236
+ self._query_visualizations_legacy = QueryVisualizationsLegacyAPI(self._api_client)
222
237
  self._recipient_activation = RecipientActivationAPI(self._api_client)
223
238
  self._recipients = RecipientsAPI(self._api_client)
224
239
  self._registered_models = RegisteredModelsAPI(self._api_client)
225
240
  self._repos = ReposAPI(self._api_client)
241
+ self._resource_quotas = ResourceQuotasAPI(self._api_client)
226
242
  self._schemas = SchemasAPI(self._api_client)
227
243
  self._secrets = SecretsAPI(self._api_client)
228
244
  self._service_principals = ServicePrincipalsAPI(self._api_client)
229
- self._serving_endpoints = ServingEndpointsAPI(self._api_client)
245
+ self._serving_endpoints = serving_endpoints
246
+ self._serving_endpoints_data_plane = ServingEndpointsDataPlaneAPI(self._api_client, serving_endpoints)
230
247
  self._settings = SettingsAPI(self._api_client)
231
248
  self._shares = SharesAPI(self._api_client)
232
249
  self._statement_execution = StatementExecutionAPI(self._api_client)
@@ -267,6 +284,11 @@ class WorkspaceClient:
267
284
  """The alerts API can be used to perform CRUD operations on alerts."""
268
285
  return self._alerts
269
286
 
287
+ @property
288
+ def alerts_legacy(self) -> AlertsLegacyAPI:
289
+ """The alerts API can be used to perform CRUD operations on alerts."""
290
+ return self._alerts_legacy
291
+
270
292
  @property
271
293
  def apps(self) -> AppsAPI:
272
294
  """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
@@ -387,6 +409,11 @@ class WorkspaceClient:
387
409
  """Functions implement User-Defined Functions (UDFs) in Unity Catalog."""
388
410
  return self._functions
389
411
 
412
+ @property
413
+ def genie(self) -> GenieAPI:
414
+ """Genie provides a no-code experience for business users, powered by AI/BI."""
415
+ return self._genie
416
+
390
417
  @property
391
418
  def git_credentials(self) -> GitCredentialsAPI:
392
419
  """Registers personal access token for Databricks to do operations on behalf of the user."""
@@ -452,6 +479,11 @@ class WorkspaceClient:
452
479
  """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog."""
453
480
  return self._model_versions
454
481
 
482
+ @property
483
+ def notification_destinations(self) -> NotificationDestinationsAPI:
484
+ """The notification destinations API lets you programmatically manage a workspace's notification destinations."""
485
+ return self._notification_destinations
486
+
455
487
  @property
456
488
  def online_tables(self) -> OnlineTablesAPI:
457
489
  """Online tables provide lower latency and higher QPS access to data from Delta tables."""
@@ -459,7 +491,7 @@ class WorkspaceClient:
459
491
 
460
492
  @property
461
493
  def permission_migration(self) -> PermissionMigrationAPI:
462
- """This spec contains undocumented permission migration APIs used in https://github.com/databrickslabs/ucx."""
494
+ """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx."""
463
495
  return self._permission_migration
464
496
 
465
497
  @property
@@ -472,6 +504,16 @@ class WorkspaceClient:
472
504
  """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines."""
473
505
  return self._pipelines
474
506
 
507
+ @property
508
+ def policy_compliance_for_clusters(self) -> PolicyComplianceForClustersAPI:
509
+ """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace."""
510
+ return self._policy_compliance_for_clusters
511
+
512
+ @property
513
+ def policy_compliance_for_jobs(self) -> PolicyComplianceForJobsAPI:
514
+ """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace."""
515
+ return self._policy_compliance_for_jobs
516
+
475
517
  @property
476
518
  def policy_families(self) -> PolicyFamiliesAPI:
477
519
  """View available policy families."""
@@ -524,19 +566,29 @@ class WorkspaceClient:
524
566
 
525
567
  @property
526
568
  def queries(self) -> QueriesAPI:
527
- """These endpoints are used for CRUD operations on query definitions."""
569
+ """The queries API can be used to perform CRUD operations on queries."""
528
570
  return self._queries
529
571
 
572
+ @property
573
+ def queries_legacy(self) -> QueriesLegacyAPI:
574
+ """These endpoints are used for CRUD operations on query definitions."""
575
+ return self._queries_legacy
576
+
530
577
  @property
531
578
  def query_history(self) -> QueryHistoryAPI:
532
- """Access the history of queries through SQL warehouses."""
579
+ """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute."""
533
580
  return self._query_history
534
581
 
535
582
  @property
536
583
  def query_visualizations(self) -> QueryVisualizationsAPI:
537
- """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace."""
584
+ """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace."""
538
585
  return self._query_visualizations
539
586
 
587
+ @property
588
+ def query_visualizations_legacy(self) -> QueryVisualizationsLegacyAPI:
589
+ """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace."""
590
+ return self._query_visualizations_legacy
591
+
540
592
  @property
541
593
  def recipient_activation(self) -> RecipientActivationAPI:
542
594
  """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`."""
@@ -557,6 +609,11 @@ class WorkspaceClient:
557
609
  """The Repos API allows users to manage their git repos."""
558
610
  return self._repos
559
611
 
612
+ @property
613
+ def resource_quotas(self) -> ResourceQuotasAPI:
614
+ """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created."""
615
+ return self._resource_quotas
616
+
560
617
  @property
561
618
  def schemas(self) -> SchemasAPI:
562
619
  """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace."""
@@ -577,6 +634,11 @@ class WorkspaceClient:
577
634
  """The Serving Endpoints API allows you to create, update, and delete model serving endpoints."""
578
635
  return self._serving_endpoints
579
636
 
637
+ @property
638
+ def serving_endpoints_data_plane(self) -> ServingEndpointsDataPlaneAPI:
639
+ """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service."""
640
+ return self._serving_endpoints_data_plane
641
+
580
642
  @property
581
643
  def settings(self) -> SettingsAPI:
582
644
  """Workspace Settings API allows users to manage settings at the workspace level."""
@@ -734,7 +796,6 @@ class AccountClient:
734
796
  self._api_client = client.ApiClient(self._config)
735
797
  self._access_control = AccountAccessControlAPI(self._api_client)
736
798
  self._billable_usage = BillableUsageAPI(self._api_client)
737
- self._budgets = BudgetsAPI(self._api_client)
738
799
  self._credentials = CredentialsAPI(self._api_client)
739
800
  self._custom_app_integration = CustomAppIntegrationAPI(self._api_client)
740
801
  self._encryption_keys = EncryptionKeysAPI(self._api_client)
@@ -753,10 +814,12 @@ class AccountClient:
753
814
  self._settings = AccountSettingsAPI(self._api_client)
754
815
  self._storage = StorageAPI(self._api_client)
755
816
  self._storage_credentials = AccountStorageCredentialsAPI(self._api_client)
817
+ self._usage_dashboards = UsageDashboardsAPI(self._api_client)
756
818
  self._users = AccountUsersAPI(self._api_client)
757
819
  self._vpc_endpoints = VpcEndpointsAPI(self._api_client)
758
820
  self._workspace_assignment = WorkspaceAssignmentAPI(self._api_client)
759
821
  self._workspaces = WorkspacesAPI(self._api_client)
822
+ self._budgets = BudgetsAPI(self._api_client)
760
823
 
761
824
  @property
762
825
  def config(self) -> client.Config:
@@ -776,11 +839,6 @@ class AccountClient:
776
839
  """This API allows you to download billable usage logs for the specified account and date range."""
777
840
  return self._billable_usage
778
841
 
779
- @property
780
- def budgets(self) -> BudgetsAPI:
781
- """These APIs manage budget configuration including notifications for exceeding a budget for a period."""
782
- return self._budgets
783
-
784
842
  @property
785
843
  def credentials(self) -> CredentialsAPI:
786
844
  """These APIs manage credential configurations for this workspace."""
@@ -788,7 +846,7 @@ class AccountClient:
788
846
 
789
847
  @property
790
848
  def custom_app_integration(self) -> CustomAppIntegrationAPI:
791
- """These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
849
+ """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
792
850
  return self._custom_app_integration
793
851
 
794
852
  @property
@@ -843,7 +901,7 @@ class AccountClient:
843
901
 
844
902
  @property
845
903
  def published_app_integration(self) -> PublishedAppIntegrationAPI:
846
- """These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
904
+ """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
847
905
  return self._published_app_integration
848
906
 
849
907
  @property
@@ -871,6 +929,11 @@ class AccountClient:
871
929
  """These APIs manage storage credentials for a particular metastore."""
872
930
  return self._storage_credentials
873
931
 
932
+ @property
933
+ def usage_dashboards(self) -> UsageDashboardsAPI:
934
+ """These APIs manage usage dashboards for this account."""
935
+ return self._usage_dashboards
936
+
874
937
  @property
875
938
  def users(self) -> AccountUsersAPI:
876
939
  """User identities recognized by Databricks and represented by email addresses."""
@@ -891,6 +954,11 @@ class AccountClient:
891
954
  """These APIs manage workspaces for this account."""
892
955
  return self._workspaces
893
956
 
957
+ @property
958
+ def budgets(self) -> BudgetsAPI:
959
+ """These APIs manage budget configurations for this account."""
960
+ return self._budgets
961
+
894
962
  def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient:
895
963
  """Constructs a ``WorkspaceClient`` for the given workspace.
896
964