databricks-sdk 0.63.0__tar.gz → 0.65.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/CHANGELOG.md +47 -0
  2. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/PKG-INFO +2 -2
  3. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/__init__.py +45 -3
  4. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/mixins/open_ai_client.py +55 -6
  5. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/agentbricks.py +3 -3
  6. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/apps.py +519 -0
  7. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/catalog.py +712 -45
  8. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/cleanrooms.py +7 -6
  9. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/dashboards.py +155 -6
  10. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/database.py +20 -0
  11. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/iam.py +3 -1
  12. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/jobs.py +57 -5
  13. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/ml.py +47 -47
  14. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/pipelines.py +130 -0
  15. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/serving.py +16 -0
  16. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/settings.py +69 -2
  17. databricks_sdk-0.65.0/databricks/sdk/service/settingsv2.py +937 -0
  18. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/sharing.py +1 -28
  19. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/sql.py +64 -1
  20. databricks_sdk-0.65.0/databricks/sdk/service/tags.py +232 -0
  21. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/vectorsearch.py +82 -5
  22. databricks_sdk-0.65.0/databricks/sdk/version.py +1 -0
  23. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks_sdk.egg-info/PKG-INFO +2 -2
  24. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks_sdk.egg-info/SOURCES.txt +2 -0
  25. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks_sdk.egg-info/requires.txt +1 -1
  26. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/pyproject.toml +1 -1
  27. databricks_sdk-0.63.0/databricks/sdk/version.py +0 -1
  28. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/CONTRIBUTING.md +0 -0
  29. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/DCO +0 -0
  30. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/LICENSE +0 -0
  31. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/MANIFEST.in +0 -0
  32. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/Makefile +0 -0
  33. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/NOTICE +0 -0
  34. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/README.md +0 -0
  35. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/SECURITY.md +0 -0
  36. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/__init__.py +0 -0
  37. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/_base_client.py +0 -0
  38. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/_property.py +0 -0
  39. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/_widgets/__init__.py +0 -0
  40. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
  41. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
  42. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/azure.py +0 -0
  43. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/casing.py +0 -0
  44. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/clock.py +0 -0
  45. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/config.py +0 -0
  46. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/core.py +0 -0
  47. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/credentials_provider.py +0 -0
  48. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/data_plane.py +0 -0
  49. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/dbutils.py +0 -0
  50. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/environments.py +0 -0
  51. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/errors/__init__.py +0 -0
  52. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/errors/base.py +0 -0
  53. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/errors/customizer.py +0 -0
  54. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/errors/deserializer.py +0 -0
  55. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/errors/details.py +0 -0
  56. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/errors/mapper.py +0 -0
  57. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/errors/overrides.py +0 -0
  58. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/errors/parser.py +0 -0
  59. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/errors/platform.py +0 -0
  60. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/errors/private_link.py +0 -0
  61. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/errors/sdk.py +0 -0
  62. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/logger/__init__.py +0 -0
  63. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
  64. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/mixins/__init__.py +0 -0
  65. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/mixins/compute.py +0 -0
  66. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/mixins/files.py +0 -0
  67. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/mixins/jobs.py +0 -0
  68. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/mixins/workspace.py +0 -0
  69. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/oauth.py +0 -0
  70. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/oidc.py +0 -0
  71. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/oidc_token_supplier.py +0 -0
  72. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/py.typed +0 -0
  73. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/retries.py +0 -0
  74. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/runtime/__init__.py +0 -0
  75. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
  76. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/__init__.py +0 -0
  77. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/_internal.py +0 -0
  78. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/billing.py +0 -0
  79. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/compute.py +0 -0
  80. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/files.py +0 -0
  81. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/marketplace.py +0 -0
  82. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/oauth2.py +0 -0
  83. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/provisioning.py +0 -0
  84. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/qualitymonitorv2.py +0 -0
  85. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/service/workspace.py +0 -0
  86. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks/sdk/useragent.py +0 -0
  87. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
  88. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/databricks_sdk.egg-info/top_level.txt +0 -0
  89. {databricks_sdk-0.63.0 → databricks_sdk-0.65.0}/setup.cfg +0 -0
@@ -1,5 +1,52 @@
1
1
  # Version changelog
2
2
 
3
+ ## Release v0.65.0
4
+
5
+ ### New Features and Improvements
6
+
7
+ * Added support for passing additional kwargs to `WorkspaceClient().serving_endpoints.get_open_ai_client()` ([#1025](https://github.com/databricks/databricks-sdk-py/pull/1025)). Users can now pass standard OpenAI client parameters like `timeout` and `max_retries` when creating an OpenAI client for Databricks Model Serving.
8
+
9
+ ### API Changes
10
+ * Added `comment` field for `databricks.sdk.service.dashboards.GenieSendMessageFeedbackRequest`.
11
+ * [Breaking] Added `rating` field for `databricks.sdk.service.dashboards.GenieSendMessageFeedbackRequest`.
12
+ * Added `effective_enable_pg_native_login` and `enable_pg_native_login` fields for `databricks.sdk.service.database.DatabaseInstance`.
13
+ * Added `task_retry_mode` field for `databricks.sdk.service.jobs.Continuous`.
14
+ * Added `source_configurations` field for `databricks.sdk.service.pipelines.IngestionPipelineDefinition`.
15
+ * Added `app_id`, `app_id_set`, `auth_secret`, `auth_secret_set`, `channel_url`, `channel_url_set`, `tenant_id` and `tenant_id_set` fields for `databricks.sdk.service.settings.MicrosoftTeamsConfig`.
16
+ * Added `ensure_reranker_compatible` field for `databricks.sdk.service.vectorsearch.GetIndexRequest`.
17
+ * Added `reranker` field for `databricks.sdk.service.vectorsearch.QueryVectorIndexRequest`.
18
+ * [Breaking] Changed `create_clean_room_asset_review()` method for [w.clean_room_assets](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/cleanrooms/clean_room_assets.html) workspace-level service with new required argument order.
19
+ * [Breaking] Changed `send_message_feedback()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html) workspace-level service with new required argument order.
20
+ * Changed `notebook_review` field for `databricks.sdk.service.cleanrooms.CreateCleanRoomAssetReviewRequest` to no longer be required.
21
+ * [Breaking] Changed `features` field for `databricks.sdk.service.ml.FeatureList` to type list[`databricks.sdk.service.ml.LinkedFeature`] dataclass.
22
+ * [Breaking] Removed `feedback_rating` and `feedback_text` fields for `databricks.sdk.service.dashboards.GenieSendMessageFeedbackRequest`.
23
+
24
+
25
+ ## Release v0.64.0
26
+
27
+ ### API Changes
28
+ * Added `databricks.sdk.service.settingsv2` and `databricks.sdk.service.tags` packages.
29
+ * Added [w.apps_settings](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps/apps_settings.html) workspace-level service.
30
+ * Added [w.entity_tag_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/entity_tag_assignments.html) workspace-level service and [w.rfa](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/rfa.html) workspace-level service.
31
+ * Added [a.account_settings_v2](https://databricks-sdk-py.readthedocs.io/en/latest/account/settingsv2/account_settings_v2.html) account-level service and [w.workspace_settings_v2](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settingsv2/workspace_settings_v2.html) workspace-level service.
32
+ * Added [w.tag_policies](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/tags/tag_policies.html) workspace-level service.
33
+ * Added `delete_conversation_message()`, `list_conversation_messages()` and `send_message_feedback()` methods for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html) workspace-level service.
34
+ * Added `include_all` field for `databricks.sdk.service.dashboards.GenieListConversationsRequest`.
35
+ * Added `effective_usage_policy_id` field for `databricks.sdk.service.jobs.BaseJob`.
36
+ * Added `effective_usage_policy_id` field for `databricks.sdk.service.jobs.BaseRun`.
37
+ * Added `effective_usage_policy_id` field for `databricks.sdk.service.jobs.Job`.
38
+ * Added `effective_usage_policy_id` field for `databricks.sdk.service.jobs.Run`.
39
+ * Added `tokens` field for `databricks.sdk.service.serving.AiGatewayRateLimit`.
40
+ * Added `usage_policy_id` field for `databricks.sdk.service.serving.ServingEndpoint`.
41
+ * Added `effective_run_as` and `run_as` fields for `databricks.sdk.service.sql.AlertV2`.
42
+ * Added `cache_query_id` field for `databricks.sdk.service.sql.QueryInfo`.
43
+ * Added `model_endpoint_name_for_query` field for `databricks.sdk.service.vectorsearch.EmbeddingSourceColumn`.
44
+ * [Breaking] Removed `environment_settings` field for `databricks.sdk.service.catalog.ConnectionInfo`.
45
+ * [Breaking] Removed `environment_settings` field for `databricks.sdk.service.catalog.CreateConnection`.
46
+ * [Breaking] Removed `environment_settings` field for `databricks.sdk.service.catalog.UpdateConnection`.
47
+ * [Breaking] Removed `comment`, `display_name` and `tags` fields for `databricks.sdk.service.sharing.Share`.
48
+
49
+
3
50
  ## Release v0.63.0
4
51
 
5
52
  ### API Changes
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-sdk
3
- Version: 0.63.0
3
+ Version: 0.65.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Project-URL: Documentation, https://databricks-sdk-py.readthedocs.io
6
6
  Keywords: databricks,sdk
@@ -26,7 +26,7 @@ Requires-Dist: google-auth~=2.0
26
26
  Provides-Extra: dev
27
27
  Requires-Dist: pytest; extra == "dev"
28
28
  Requires-Dist: pytest-cov; extra == "dev"
29
- Requires-Dist: pytest-xdist; extra == "dev"
29
+ Requires-Dist: pytest-xdist<4.0,>=3.6.1; extra == "dev"
30
30
  Requires-Dist: pytest-mock; extra == "dev"
31
31
  Requires-Dist: black; extra == "dev"
32
32
  Requires-Dist: pycodestyle; extra == "dev"
@@ -32,12 +32,14 @@ from databricks.sdk.service import provisioning as pkg_provisioning
32
32
  from databricks.sdk.service import qualitymonitorv2 as pkg_qualitymonitorv2
33
33
  from databricks.sdk.service import serving as pkg_serving
34
34
  from databricks.sdk.service import settings as pkg_settings
35
+ from databricks.sdk.service import settingsv2 as pkg_settingsv2
35
36
  from databricks.sdk.service import sharing as pkg_sharing
36
37
  from databricks.sdk.service import sql as pkg_sql
38
+ from databricks.sdk.service import tags as pkg_tags
37
39
  from databricks.sdk.service import vectorsearch as pkg_vectorsearch
38
40
  from databricks.sdk.service import workspace as pkg_workspace
39
41
  from databricks.sdk.service.agentbricks import AgentBricksAPI
40
- from databricks.sdk.service.apps import AppsAPI
42
+ from databricks.sdk.service.apps import AppsAPI, AppsSettingsAPI
41
43
  from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI,
42
44
  BudgetsAPI, LogDeliveryAPI,
43
45
  UsageDashboardsAPI)
@@ -46,6 +48,7 @@ from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
46
48
  AccountStorageCredentialsAPI,
47
49
  ArtifactAllowlistsAPI, CatalogsAPI,
48
50
  ConnectionsAPI, CredentialsAPI,
51
+ EntityTagAssignmentsAPI,
49
52
  ExternalLineageAPI,
50
53
  ExternalLocationsAPI,
51
54
  ExternalMetadataAPI, FunctionsAPI,
@@ -53,8 +56,8 @@ from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
53
56
  ModelVersionsAPI, OnlineTablesAPI,
54
57
  PoliciesAPI, QualityMonitorsAPI,
55
58
  RegisteredModelsAPI,
56
- ResourceQuotasAPI, SchemasAPI,
57
- StorageCredentialsAPI,
59
+ ResourceQuotasAPI, RfaAPI,
60
+ SchemasAPI, StorageCredentialsAPI,
58
61
  SystemSchemasAPI,
59
62
  TableConstraintsAPI, TablesAPI,
60
63
  TemporaryPathCredentialsAPI,
@@ -128,6 +131,8 @@ from databricks.sdk.service.settings import (
128
131
  RestrictWorkspaceAdminsAPI, SettingsAPI, SqlResultsDownloadAPI,
129
132
  TokenManagementAPI, TokensAPI, WorkspaceConfAPI,
130
133
  WorkspaceNetworkConfigurationAPI)
134
+ from databricks.sdk.service.settingsv2 import (AccountSettingsV2API,
135
+ WorkspaceSettingsV2API)
131
136
  from databricks.sdk.service.sharing import (ProvidersAPI,
132
137
  RecipientActivationAPI,
133
138
  RecipientFederationPoliciesAPI,
@@ -141,6 +146,7 @@ from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI,
141
146
  QueryVisualizationsLegacyAPI,
142
147
  RedashConfigAPI, StatementExecutionAPI,
143
148
  WarehousesAPI)
149
+ from databricks.sdk.service.tags import TagPoliciesAPI
144
150
  from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI,
145
151
  VectorSearchIndexesAPI)
146
152
  from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI,
@@ -248,6 +254,7 @@ class WorkspaceClient:
248
254
  self._alerts_legacy = pkg_sql.AlertsLegacyAPI(self._api_client)
249
255
  self._alerts_v2 = pkg_sql.AlertsV2API(self._api_client)
250
256
  self._apps = pkg_apps.AppsAPI(self._api_client)
257
+ self._apps_settings = pkg_apps.AppsSettingsAPI(self._api_client)
251
258
  self._artifact_allowlists = pkg_catalog.ArtifactAllowlistsAPI(self._api_client)
252
259
  self._catalogs = pkg_catalog.CatalogsAPI(self._api_client)
253
260
  self._clean_room_asset_revisions = pkg_cleanrooms.CleanRoomAssetRevisionsAPI(self._api_client)
@@ -273,6 +280,7 @@ class WorkspaceClient:
273
280
  self._database = pkg_database.DatabaseAPI(self._api_client)
274
281
  self._dbfs = DbfsExt(self._api_client)
275
282
  self._dbsql_permissions = pkg_sql.DbsqlPermissionsAPI(self._api_client)
283
+ self._entity_tag_assignments = pkg_catalog.EntityTagAssignmentsAPI(self._api_client)
276
284
  self._experiments = pkg_ml.ExperimentsAPI(self._api_client)
277
285
  self._external_lineage = pkg_catalog.ExternalLineageAPI(self._api_client)
278
286
  self._external_locations = pkg_catalog.ExternalLocationsAPI(self._api_client)
@@ -329,6 +337,7 @@ class WorkspaceClient:
329
337
  self._registered_models = pkg_catalog.RegisteredModelsAPI(self._api_client)
330
338
  self._repos = pkg_workspace.ReposAPI(self._api_client)
331
339
  self._resource_quotas = pkg_catalog.ResourceQuotasAPI(self._api_client)
340
+ self._rfa = pkg_catalog.RfaAPI(self._api_client)
332
341
  self._schemas = pkg_catalog.SchemasAPI(self._api_client)
333
342
  self._secrets = pkg_workspace.SecretsAPI(self._api_client)
334
343
  self._service_principal_secrets_proxy = pkg_oauth2.ServicePrincipalSecretsProxyAPI(self._api_client)
@@ -347,6 +356,7 @@ class WorkspaceClient:
347
356
  self._system_schemas = pkg_catalog.SystemSchemasAPI(self._api_client)
348
357
  self._table_constraints = pkg_catalog.TableConstraintsAPI(self._api_client)
349
358
  self._tables = pkg_catalog.TablesAPI(self._api_client)
359
+ self._tag_policies = pkg_tags.TagPoliciesAPI(self._api_client)
350
360
  self._temporary_path_credentials = pkg_catalog.TemporaryPathCredentialsAPI(self._api_client)
351
361
  self._temporary_table_credentials = pkg_catalog.TemporaryTableCredentialsAPI(self._api_client)
352
362
  self._token_management = pkg_settings.TokenManagementAPI(self._api_client)
@@ -359,6 +369,7 @@ class WorkspaceClient:
359
369
  self._workspace = WorkspaceExt(self._api_client)
360
370
  self._workspace_bindings = pkg_catalog.WorkspaceBindingsAPI(self._api_client)
361
371
  self._workspace_conf = pkg_settings.WorkspaceConfAPI(self._api_client)
372
+ self._workspace_settings_v2 = pkg_settingsv2.WorkspaceSettingsV2API(self._api_client)
362
373
  self._forecasting = pkg_ml.ForecastingAPI(self._api_client)
363
374
 
364
375
  @property
@@ -408,6 +419,11 @@ class WorkspaceClient:
408
419
  """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
409
420
  return self._apps
410
421
 
422
+ @property
423
+ def apps_settings(self) -> pkg_apps.AppsSettingsAPI:
424
+ """Apps Settings manage the settings for the Apps service on a customer's Databricks instance."""
425
+ return self._apps_settings
426
+
411
427
  @property
412
428
  def artifact_allowlists(self) -> pkg_catalog.ArtifactAllowlistsAPI:
413
429
  """In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so that users can leverage these artifacts on compute configured with shared access mode."""
@@ -533,6 +549,11 @@ class WorkspaceClient:
533
549
  """The SQL Permissions API is similar to the endpoints of the :method:permissions/set."""
534
550
  return self._dbsql_permissions
535
551
 
552
+ @property
553
+ def entity_tag_assignments(self) -> pkg_catalog.EntityTagAssignmentsAPI:
554
+ """Tags are attributes that include keys and optional values that you can use to organize and categorize entities in Unity Catalog."""
555
+ return self._entity_tag_assignments
556
+
536
557
  @property
537
558
  def experiments(self) -> pkg_ml.ExperimentsAPI:
538
559
  """Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment."""
@@ -803,6 +824,11 @@ class WorkspaceClient:
803
824
  """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created."""
804
825
  return self._resource_quotas
805
826
 
827
+ @property
828
+ def rfa(self) -> pkg_catalog.RfaAPI:
829
+ """Request for Access enables customers to request access to and manage access request destinations for Unity Catalog securables."""
830
+ return self._rfa
831
+
806
832
  @property
807
833
  def schemas(self) -> pkg_catalog.SchemasAPI:
808
834
  """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace."""
@@ -868,6 +894,11 @@ class WorkspaceClient:
868
894
  """A table resides in the third layer of Unity Catalog’s three-level namespace."""
869
895
  return self._tables
870
896
 
897
+ @property
898
+ def tag_policies(self) -> pkg_tags.TagPoliciesAPI:
899
+ """The Tag Policy API allows you to manage tag policies in Databricks."""
900
+ return self._tag_policies
901
+
871
902
  @property
872
903
  def temporary_path_credentials(self) -> pkg_catalog.TemporaryPathCredentialsAPI:
873
904
  """Temporary Path Credentials refer to short-lived, downscoped credentials used to access external cloud storage locations registered in Databricks."""
@@ -928,6 +959,11 @@ class WorkspaceClient:
928
959
  """This API allows updating known workspace settings for advanced users."""
929
960
  return self._workspace_conf
930
961
 
962
+ @property
963
+ def workspace_settings_v2(self) -> pkg_settingsv2.WorkspaceSettingsV2API:
964
+ """APIs to manage workspace level settings."""
965
+ return self._workspace_settings_v2
966
+
931
967
  @property
932
968
  def forecasting(self) -> pkg_ml.ForecastingAPI:
933
969
  """The Forecasting API allows you to create and get serverless forecasting experiments."""
@@ -1029,6 +1065,7 @@ class AccountClient:
1029
1065
  self._service_principal_secrets = pkg_oauth2.ServicePrincipalSecretsAPI(self._api_client)
1030
1066
  self._service_principals = pkg_iam.AccountServicePrincipalsAPI(self._api_client)
1031
1067
  self._settings = pkg_settings.AccountSettingsAPI(self._api_client)
1068
+ self._settings_v2 = pkg_settingsv2.AccountSettingsV2API(self._api_client)
1032
1069
  self._storage = pkg_provisioning.StorageAPI(self._api_client)
1033
1070
  self._storage_credentials = pkg_catalog.AccountStorageCredentialsAPI(self._api_client)
1034
1071
  self._usage_dashboards = pkg_billing.UsageDashboardsAPI(self._api_client)
@@ -1157,6 +1194,11 @@ class AccountClient:
1157
1194
  """Accounts Settings API allows users to manage settings at the account level."""
1158
1195
  return self._settings
1159
1196
 
1197
+ @property
1198
+ def settings_v2(self) -> pkg_settingsv2.AccountSettingsV2API:
1199
+ """APIs to manage account level settings."""
1200
+ return self._settings_v2
1201
+
1160
1202
  @property
1161
1203
  def storage(self) -> pkg_provisioning.StorageAPI:
1162
1204
  """These APIs manage storage configurations for this workspace."""
@@ -31,7 +31,41 @@ class ServingEndpointsExt(ServingEndpointsAPI):
31
31
  http_client = httpx.Client(auth=databricks_token_auth)
32
32
  return http_client
33
33
 
34
- def get_open_ai_client(self):
34
+ def get_open_ai_client(self, **kwargs):
35
+ """Create an OpenAI client configured for Databricks Model Serving.
36
+
37
+ Returns an OpenAI client instance that is pre-configured to send requests to
38
+ Databricks Model Serving endpoints. The client uses Databricks authentication
39
+ to query endpoints within the workspace associated with the current WorkspaceClient
40
+ instance.
41
+
42
+ Args:
43
+ **kwargs: Additional parameters to pass to the OpenAI client constructor.
44
+ Common parameters include:
45
+ - timeout (float): Request timeout in seconds (e.g., 30.0)
46
+ - max_retries (int): Maximum number of retries for failed requests (e.g., 3)
47
+ - default_headers (dict): Additional headers to include with requests
48
+ - default_query (dict): Additional query parameters to include with requests
49
+
50
+ Any parameter accepted by the OpenAI client constructor can be passed here,
51
+ except for the following parameters which are reserved for Databricks integration:
52
+ base_url, api_key, http_client
53
+
54
+ Returns:
55
+ OpenAI: An OpenAI client instance configured for Databricks Model Serving.
56
+
57
+ Raises:
58
+ ImportError: If the OpenAI library is not installed.
59
+ ValueError: If any reserved Databricks parameters are provided in kwargs.
60
+
61
+ Example:
62
+ >>> client = workspace_client.serving_endpoints.get_open_ai_client()
63
+ >>> # With custom timeout and retries
64
+ >>> client = workspace_client.serving_endpoints.get_open_ai_client(
65
+ ... timeout=30.0,
66
+ ... max_retries=5
67
+ ... )
68
+ """
35
69
  try:
36
70
  from openai import OpenAI
37
71
  except Exception:
@@ -39,11 +73,26 @@ class ServingEndpointsExt(ServingEndpointsAPI):
39
73
  "Open AI is not installed. Please install the Databricks SDK with the following command `pip install databricks-sdk[openai]`"
40
74
  )
41
75
 
42
- return OpenAI(
43
- base_url=self._api._cfg.host + "/serving-endpoints",
44
- api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
45
- http_client=self._get_authorized_http_client(),
46
- )
76
+ # Check for reserved parameters that should not be overridden
77
+ reserved_params = {"base_url", "api_key", "http_client"}
78
+ conflicting_params = reserved_params.intersection(kwargs.keys())
79
+ if conflicting_params:
80
+ raise ValueError(
81
+ f"Cannot override reserved Databricks parameters: {', '.join(sorted(conflicting_params))}. "
82
+ f"These parameters are automatically configured for Databricks Model Serving."
83
+ )
84
+
85
+ # Default parameters that are required for Databricks integration
86
+ client_params = {
87
+ "base_url": self._api._cfg.host + "/serving-endpoints",
88
+ "api_key": "no-token", # Passing in a placeholder to pass validations, this will not be used
89
+ "http_client": self._get_authorized_http_client(),
90
+ }
91
+
92
+ # Update with any additional parameters passed by the user
93
+ client_params.update(kwargs)
94
+
95
+ return OpenAI(**client_params)
47
96
 
48
97
  def get_langchain_chat_open_ai_client(self, model):
49
98
  try:
@@ -227,9 +227,9 @@ class AgentBricksAPI:
227
227
  :param instructions: str
228
228
  Instructions for the custom LLM to follow
229
229
  :param agent_artifact_path: str (optional)
230
- Optional: UC path for agent artifacts. If you are using a dataset that you only have read
231
- permissions, please provide a destination path where you have write permissions. Please provide this
232
- in catalog.schema format.
230
+ This will soon be deprecated!! Optional: UC path for agent artifacts. If you are using a dataset
231
+ that you only have read permissions, please provide a destination path where you have write
232
+ permissions. Please provide this in catalog.schema format.
233
233
  :param datasets: List[:class:`Dataset`] (optional)
234
234
  Datasets used for training and evaluating the model, not for inference. Currently, only 1 dataset is
235
235
  accepted.