databricks-sdk 0.45.0__tar.gz → 0.47.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/CHANGELOG.md +56 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/PKG-INFO +4 -3
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/__init__.py +13 -2
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/config.py +4 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/credentials_provider.py +6 -1
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/data_plane.py +1 -59
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/oauth.py +12 -5
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/catalog.py +2 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/compute.py +414 -202
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/dashboards.py +36 -17
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/files.py +4 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/iam.py +12 -29
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/jobs.py +1 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/marketplace.py +2 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/ml.py +451 -2
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/pipelines.py +25 -28
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/serving.py +222 -21
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/sharing.py +71 -71
- databricks_sdk-0.47.0/databricks/sdk/version.py +1 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks_sdk.egg-info/PKG-INFO +4 -3
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks_sdk.egg-info/requires.txt +1 -1
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/pyproject.toml +1 -1
- databricks_sdk-0.47.0/setup.cfg +23 -0
- databricks_sdk-0.45.0/databricks/sdk/version.py +0 -1
- databricks_sdk-0.45.0/setup.cfg +0 -73
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/CONTRIBUTING.md +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/DCO +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/LICENSE +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/MANIFEST.in +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/Makefile +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/NOTICE +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/README.md +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/SECURITY.md +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/__init__.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/_base_client.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/_property.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/_widgets/__init__.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/azure.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/casing.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/clock.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/core.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/dbutils.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/environments.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/errors/__init__.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/errors/base.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/errors/customizer.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/errors/deserializer.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/errors/details.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/errors/mapper.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/errors/overrides.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/errors/parser.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/errors/platform.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/errors/private_link.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/errors/sdk.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/logger/__init__.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/mixins/__init__.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/mixins/compute.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/mixins/files.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/mixins/jobs.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/mixins/open_ai_client.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/mixins/workspace.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/py.typed +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/retries.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/runtime/__init__.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/__init__.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/_internal.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/apps.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/billing.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/cleanrooms.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/oauth2.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/provisioning.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/settings.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/sql.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/vectorsearch.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/service/workspace.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks/sdk/useragent.py +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks_sdk.egg-info/SOURCES.txt +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
- {databricks_sdk-0.45.0 → databricks_sdk-0.47.0}/databricks_sdk.egg-info/top_level.txt +0 -0
|
@@ -1,5 +1,61 @@
|
|
|
1
1
|
# Version changelog
|
|
2
2
|
|
|
3
|
+
## Release v0.47.0
|
|
4
|
+
|
|
5
|
+
### Bug Fixes
|
|
6
|
+
|
|
7
|
+
* Ensure that refresh tokens are returned when using the `external-browser` credentials strategy.
|
|
8
|
+
|
|
9
|
+
### API Changes
|
|
10
|
+
* Added `abfss`, `dbfs`, `error_message`, `execution_duration_seconds`, `file`, `gcs`, `s3`, `status`, `volumes` and `workspace` fields for `databricks.sdk.service.compute.InitScriptInfoAndExecutionDetails`.
|
|
11
|
+
* [Breaking] Added `forecast_granularity` field for `databricks.sdk.service.ml.CreateForecastingExperimentRequest`.
|
|
12
|
+
* Added `jwks_uri` field for `databricks.sdk.service.oauth2.OidcFederationPolicy`.
|
|
13
|
+
* Added `fallback_config` field for `databricks.sdk.service.serving.AiGatewayConfig`.
|
|
14
|
+
* Added `custom_provider_config` field for `databricks.sdk.service.serving.ExternalModel`.
|
|
15
|
+
* Added `fallback_config` field for `databricks.sdk.service.serving.PutAiGatewayRequest`.
|
|
16
|
+
* Added `fallback_config` field for `databricks.sdk.service.serving.PutAiGatewayResponse`.
|
|
17
|
+
* Added `aliases`, `comment`, `data_type`, `dependency_list`, `full_data_type`, `id`, `input_params`, `name`, `properties`, `routine_definition`, `schema`, `securable_kind`, `share`, `share_id`, `storage_location` and `tags` fields for `databricks.sdk.service.sharing.DeltaSharingFunction`.
|
|
18
|
+
* Added `access_token_failure`, `allocation_timeout`, `allocation_timeout_node_daemon_not_ready`, `allocation_timeout_no_healthy_clusters`, `allocation_timeout_no_matched_clusters`, `allocation_timeout_no_ready_clusters`, `allocation_timeout_no_unallocated_clusters`, `allocation_timeout_no_warmed_up_clusters`, `aws_inaccessible_kms_key_failure`, `aws_instance_profile_update_failure`, `aws_invalid_key_pair`, `aws_invalid_kms_key_state`, `aws_resource_quota_exceeded`, `azure_packed_deployment_partial_failure`, `bootstrap_timeout_due_to_misconfig`, `budget_policy_limit_enforcement_activated`, `budget_policy_resolution_failure`, `cloud_account_setup_failure`, `cloud_operation_cancelled`, `cloud_provider_instance_not_launched`, `cloud_provider_launch_failure_due_to_misconfig`, `cloud_provider_resource_stockout_due_to_misconfig`, `cluster_operation_throttled`, `cluster_operation_timeout`, `control_plane_request_failure_due_to_misconfig`, `data_access_config_changed`, `disaster_recovery_replication`, `driver_eviction`, `driver_launch_timeout`, `driver_node_unreachable`, `driver_out_of_disk`, `driver_out_of_memory`, `driver_pod_creation_failure`, `driver_unexpected_failure`, `dynamic_spark_conf_size_exceeded`, `eos_spark_image`, `executor_pod_unscheduled`, `gcp_api_rate_quota_exceeded`, `gcp_forbidden`, `gcp_iam_timeout`, `gcp_inaccessible_kms_key_failure`, `gcp_insufficient_capacity`, `gcp_ip_space_exhausted`, `gcp_kms_key_permission_denied`, `gcp_not_found`, `gcp_resource_quota_exceeded`, `gcp_service_account_access_denied`, `gcp_service_account_not_found`, `gcp_subnet_not_ready`, `gcp_trusted_image_projects_violated`, `gke_based_cluster_termination`, `init_container_not_finished`, `instance_pool_max_capacity_reached`, `instance_pool_not_found`, `instance_unreachable_due_to_misconfig`, `internal_capacity_failure`, `invalid_aws_parameter`, `invalid_instance_placement_protocol`, `invalid_worker_image_failure`, `in_penalty_box`, `lazy_allocation_timeout`, `maintenance_mode`, `netvisor_setup_timeout`, `no_matched_k8s`, `no_matched_k8s_testing_tag`, `pod_assignment_failure`, `pod_scheduling_failure`, `resource_usage_blocked`, `secret_creation_failure`, `serverless_long_running_terminated`, `spark_image_download_throttled`, `spark_image_not_found`, `ssh_bootstrap_failure`, `storage_download_failure_due_to_misconfig`, `storage_download_failure_slow`, `storage_download_failure_throttled`, `unexpected_pod_recreation`, `user_initiated_vm_termination` and `workspace_update` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
|
|
19
|
+
* Added `generated_sql_query_too_long_exception` and `missing_sql_query_exception` enum values for `databricks.sdk.service.dashboards.MessageErrorType`.
|
|
20
|
+
* Added `balanced` enum value for `databricks.sdk.service.jobs.PerformanceTarget`.
|
|
21
|
+
* Added `listing_resource` enum value for `databricks.sdk.service.marketplace.FileParentType`.
|
|
22
|
+
* Added `app` enum value for `databricks.sdk.service.marketplace.MarketplaceFileType`.
|
|
23
|
+
* Added `custom` enum value for `databricks.sdk.service.serving.ExternalModelProvider`.
|
|
24
|
+
* [Breaking] Changed `create_experiment()` method for [w.forecasting](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/forecasting.html) workspace-level service with new required argument order.
|
|
25
|
+
* Changed `instance_type_id` field for `databricks.sdk.service.compute.NodeInstanceType` to be required.
|
|
26
|
+
* Changed `category` field for `databricks.sdk.service.compute.NodeType` to be required.
|
|
27
|
+
* [Breaking] Changed `functions` field for `databricks.sdk.service.sharing.ListProviderShareAssetsResponse` to type `databricks.sdk.service.sharing.DeltaSharingFunctionList` dataclass.
|
|
28
|
+
* [Breaking] Changed waiter for [ClustersAPI.create](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/compute/clusters.html#databricks.sdk.service.compute.ClustersAPI.create) method.
|
|
29
|
+
* [Breaking] Changed waiter for [ClustersAPI.delete](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/compute/clusters.html#databricks.sdk.service.compute.ClustersAPI.delete) method.
|
|
30
|
+
* [Breaking] Changed waiter for [ClustersAPI.edit](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/compute/clusters.html#databricks.sdk.service.compute.ClustersAPI.edit) method.
|
|
31
|
+
* [Breaking] Changed waiter for [ClustersAPI.get](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/compute/clusters.html#databricks.sdk.service.compute.ClustersAPI.get) method.
|
|
32
|
+
* [Breaking] Changed waiter for [ClustersAPI.resize](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/compute/clusters.html#databricks.sdk.service.compute.ClustersAPI.resize) method.
|
|
33
|
+
* [Breaking] Changed waiter for [ClustersAPI.restart](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/compute/clusters.html#databricks.sdk.service.compute.ClustersAPI.restart) method.
|
|
34
|
+
* [Breaking] Changed waiter for [ClustersAPI.start](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/compute/clusters.html#databricks.sdk.service.compute.ClustersAPI.start) method.
|
|
35
|
+
* [Breaking] Changed waiter for [ClustersAPI.update](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/compute/clusters.html#databricks.sdk.service.compute.ClustersAPI.update) method.
|
|
36
|
+
* [Breaking] Removed `execution_details` and `script` fields for `databricks.sdk.service.compute.InitScriptInfoAndExecutionDetails`.
|
|
37
|
+
* [Breaking] Removed `supports_elastic_disk` field for `databricks.sdk.service.compute.NodeType`.
|
|
38
|
+
* [Breaking] Removed `data_granularity_quantity` and `data_granularity_unit` fields for `databricks.sdk.service.ml.CreateForecastingExperimentRequest`.
|
|
39
|
+
* [Breaking] Removed `aliases`, `comment`, `data_type`, `dependency_list`, `full_data_type`, `id`, `input_params`, `name`, `properties`, `routine_definition`, `schema`, `securable_kind`, `share`, `share_id`, `storage_location` and `tags` fields for `databricks.sdk.service.sharing.Function`.
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
## Release v0.46.0
|
|
43
|
+
|
|
44
|
+
### New Features and Improvements
|
|
45
|
+
* [Experimental] Add support for async token refresh ([#916](https://github.com/databricks/databricks-sdk-py/pull/916)).
|
|
46
|
+
This can be enabled with by setting the following setting:
|
|
47
|
+
```
|
|
48
|
+
export DATABRICKS_ENABLE_EXPERIMENTAL_ASYNC_TOKEN_REFRESH=1.
|
|
49
|
+
```
|
|
50
|
+
This feature and its setting are experimental and may be removed in future releases.
|
|
51
|
+
|
|
52
|
+
### API Changes
|
|
53
|
+
* Added [w.forecasting](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/forecasting.html) workspace-level service.
|
|
54
|
+
* Added `statement_id` field for `databricks.sdk.service.dashboards.GenieQueryAttachment`.
|
|
55
|
+
* Added `could_not_get_model_deployments_exception` enum value for `databricks.sdk.service.dashboards.MessageErrorType`.
|
|
56
|
+
* [Breaking] Removed `jwks_uri` field for `databricks.sdk.service.oauth2.OidcFederationPolicy`.
|
|
57
|
+
|
|
58
|
+
|
|
3
59
|
## Release v0.45.0
|
|
4
60
|
|
|
5
61
|
### New Features and Improvements
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: databricks-sdk
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.47.0
|
|
4
4
|
Summary: Databricks SDK for Python (Beta)
|
|
5
5
|
Project-URL: Documentation, https://databricks-sdk-py.readthedocs.io
|
|
6
6
|
Keywords: databricks,sdk
|
|
@@ -44,12 +44,13 @@ Requires-Dist: langchain-openai; python_version > "3.7" and extra == "dev"
|
|
|
44
44
|
Requires-Dist: httpx; extra == "dev"
|
|
45
45
|
Requires-Dist: build; extra == "dev"
|
|
46
46
|
Provides-Extra: notebook
|
|
47
|
-
Requires-Dist: ipython<
|
|
47
|
+
Requires-Dist: ipython<10,>=8; extra == "notebook"
|
|
48
48
|
Requires-Dist: ipywidgets<9,>=8; extra == "notebook"
|
|
49
49
|
Provides-Extra: openai
|
|
50
50
|
Requires-Dist: openai; extra == "openai"
|
|
51
51
|
Requires-Dist: langchain-openai; python_version > "3.7" and extra == "openai"
|
|
52
52
|
Requires-Dist: httpx; extra == "openai"
|
|
53
|
+
Dynamic: license-file
|
|
53
54
|
|
|
54
55
|
# Databricks SDK for Python (Beta)
|
|
55
56
|
|
|
@@ -8,6 +8,7 @@ import databricks.sdk.dbutils as dbutils
|
|
|
8
8
|
import databricks.sdk.service as service
|
|
9
9
|
from databricks.sdk import azure
|
|
10
10
|
from databricks.sdk.credentials_provider import CredentialsStrategy
|
|
11
|
+
from databricks.sdk.data_plane import DataPlaneTokenSource
|
|
11
12
|
from databricks.sdk.mixins.compute import ClustersExt
|
|
12
13
|
from databricks.sdk.mixins.files import DbfsExt, FilesExt
|
|
13
14
|
from databricks.sdk.mixins.jobs import JobsExt
|
|
@@ -63,7 +64,8 @@ from databricks.sdk.service.marketplace import (
|
|
|
63
64
|
ProviderExchangeFiltersAPI, ProviderExchangesAPI, ProviderFilesAPI,
|
|
64
65
|
ProviderListingsAPI, ProviderPersonalizationRequestsAPI,
|
|
65
66
|
ProviderProviderAnalyticsDashboardsAPI, ProviderProvidersAPI)
|
|
66
|
-
from databricks.sdk.service.ml import ExperimentsAPI,
|
|
67
|
+
from databricks.sdk.service.ml import (ExperimentsAPI, ForecastingAPI,
|
|
68
|
+
ModelRegistryAPI)
|
|
67
69
|
from databricks.sdk.service.oauth2 import (AccountFederationPolicyAPI,
|
|
68
70
|
CustomAppIntegrationAPI,
|
|
69
71
|
OAuthPublishedAppsAPI,
|
|
@@ -284,8 +286,11 @@ class WorkspaceClient:
|
|
|
284
286
|
self._secrets = service.workspace.SecretsAPI(self._api_client)
|
|
285
287
|
self._service_principals = service.iam.ServicePrincipalsAPI(self._api_client)
|
|
286
288
|
self._serving_endpoints = serving_endpoints
|
|
289
|
+
serving_endpoints_data_plane_token_source = DataPlaneTokenSource(
|
|
290
|
+
self._config.host, self._config.oauth_token, not self._config.enable_experimental_async_token_refresh
|
|
291
|
+
)
|
|
287
292
|
self._serving_endpoints_data_plane = service.serving.ServingEndpointsDataPlaneAPI(
|
|
288
|
-
self._api_client, serving_endpoints
|
|
293
|
+
self._api_client, serving_endpoints, serving_endpoints_data_plane_token_source
|
|
289
294
|
)
|
|
290
295
|
self._settings = service.settings.SettingsAPI(self._api_client)
|
|
291
296
|
self._shares = service.sharing.SharesAPI(self._api_client)
|
|
@@ -305,6 +310,7 @@ class WorkspaceClient:
|
|
|
305
310
|
self._workspace = WorkspaceExt(self._api_client)
|
|
306
311
|
self._workspace_bindings = service.catalog.WorkspaceBindingsAPI(self._api_client)
|
|
307
312
|
self._workspace_conf = service.settings.WorkspaceConfAPI(self._api_client)
|
|
313
|
+
self._forecasting = service.ml.ForecastingAPI(self._api_client)
|
|
308
314
|
|
|
309
315
|
@property
|
|
310
316
|
def config(self) -> client.Config:
|
|
@@ -808,6 +814,11 @@ class WorkspaceClient:
|
|
|
808
814
|
"""This API allows updating known workspace settings for advanced users."""
|
|
809
815
|
return self._workspace_conf
|
|
810
816
|
|
|
817
|
+
@property
|
|
818
|
+
def forecasting(self) -> service.ml.ForecastingAPI:
|
|
819
|
+
"""The Forecasting API allows you to create and get serverless forecasting experiments."""
|
|
820
|
+
return self._forecasting
|
|
821
|
+
|
|
811
822
|
def get_workspace_id(self) -> int:
|
|
812
823
|
"""Get the workspace ID of the workspace that this client is connected to."""
|
|
813
824
|
response = self._api_client.do("GET", "/api/2.0/preview/scim/v2/Me", response_headers=["X-Databricks-Org-Id"])
|
|
@@ -95,6 +95,10 @@ class Config:
|
|
|
95
95
|
max_connections_per_pool: int = ConfigAttribute()
|
|
96
96
|
databricks_environment: Optional[DatabricksEnvironment] = None
|
|
97
97
|
|
|
98
|
+
enable_experimental_async_token_refresh: bool = ConfigAttribute(
|
|
99
|
+
env="DATABRICKS_ENABLE_EXPERIMENTAL_ASYNC_TOKEN_REFRESH"
|
|
100
|
+
)
|
|
101
|
+
|
|
98
102
|
enable_experimental_files_api_client: bool = ConfigAttribute(env="DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT")
|
|
99
103
|
files_api_client_download_max_total_recovers = None
|
|
100
104
|
files_api_client_download_max_total_recovers_without_progressing = 1
|
|
@@ -191,6 +191,7 @@ def oauth_service_principal(cfg: "Config") -> Optional[CredentialsProvider]:
|
|
|
191
191
|
token_url=oidc.token_endpoint,
|
|
192
192
|
scopes=["all-apis"],
|
|
193
193
|
use_header=True,
|
|
194
|
+
disable_async=not cfg.enable_experimental_async_token_refresh,
|
|
194
195
|
)
|
|
195
196
|
|
|
196
197
|
def inner() -> Dict[str, str]:
|
|
@@ -290,6 +291,7 @@ def azure_service_principal(cfg: "Config") -> CredentialsProvider:
|
|
|
290
291
|
token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
|
|
291
292
|
endpoint_params={"resource": resource},
|
|
292
293
|
use_params=True,
|
|
294
|
+
disable_async=not cfg.enable_experimental_async_token_refresh,
|
|
293
295
|
)
|
|
294
296
|
|
|
295
297
|
_ensure_host_present(cfg, token_source_for)
|
|
@@ -355,6 +357,7 @@ def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
|
|
|
355
357
|
token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
|
|
356
358
|
endpoint_params=params,
|
|
357
359
|
use_params=True,
|
|
360
|
+
disable_async=not cfg.enable_experimental_async_token_refresh,
|
|
358
361
|
)
|
|
359
362
|
|
|
360
363
|
def refreshed_headers() -> Dict[str, str]:
|
|
@@ -458,8 +461,9 @@ class CliTokenSource(Refreshable):
|
|
|
458
461
|
token_type_field: str,
|
|
459
462
|
access_token_field: str,
|
|
460
463
|
expiry_field: str,
|
|
464
|
+
disable_async: bool = True,
|
|
461
465
|
):
|
|
462
|
-
super().__init__()
|
|
466
|
+
super().__init__(disable_async=disable_async)
|
|
463
467
|
self._cmd = cmd
|
|
464
468
|
self._token_type_field = token_type_field
|
|
465
469
|
self._access_token_field = access_token_field
|
|
@@ -690,6 +694,7 @@ class DatabricksCliTokenSource(CliTokenSource):
|
|
|
690
694
|
token_type_field="token_type",
|
|
691
695
|
access_token_field="access_token",
|
|
692
696
|
expiry_field="expiry",
|
|
697
|
+
disable_async=not cfg.enable_experimental_async_token_refresh,
|
|
693
698
|
)
|
|
694
699
|
|
|
695
700
|
@staticmethod
|
|
@@ -2,7 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import threading
|
|
4
4
|
from dataclasses import dataclass
|
|
5
|
-
from typing import Callable,
|
|
5
|
+
from typing import Callable, Optional
|
|
6
6
|
from urllib import parse
|
|
7
7
|
|
|
8
8
|
from databricks.sdk import oauth
|
|
@@ -88,61 +88,3 @@ class DataPlaneDetails:
|
|
|
88
88
|
"""URL used to query the endpoint through the DataPlane."""
|
|
89
89
|
token: Token
|
|
90
90
|
"""Token to query the DataPlane endpoint."""
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
## Old implementation. #TODO: Remove after the new implementation is used
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
class DataPlaneService:
|
|
97
|
-
"""Helper class to fetch and manage DataPlane details."""
|
|
98
|
-
|
|
99
|
-
from .service.serving import DataPlaneInfo
|
|
100
|
-
|
|
101
|
-
def __init__(self):
|
|
102
|
-
self._data_plane_info = {}
|
|
103
|
-
self._tokens = {}
|
|
104
|
-
self._lock = threading.Lock()
|
|
105
|
-
|
|
106
|
-
def get_data_plane_details(
|
|
107
|
-
self,
|
|
108
|
-
method: str,
|
|
109
|
-
params: List[str],
|
|
110
|
-
info_getter: Callable[[], DataPlaneInfo],
|
|
111
|
-
refresh: Callable[[str], Token],
|
|
112
|
-
):
|
|
113
|
-
"""Get and cache information required to query a Data Plane endpoint using the provided methods.
|
|
114
|
-
|
|
115
|
-
Returns a cached DataPlaneDetails if the details have already been fetched previously and are still valid.
|
|
116
|
-
If not, it uses the provided functions to fetch the details.
|
|
117
|
-
|
|
118
|
-
:param method: method name. Used to construct a unique key for the cache.
|
|
119
|
-
:param params: path params used in the "get" operation which uniquely determine the object. Used to construct a unique key for the cache.
|
|
120
|
-
:param info_getter: function which returns the DataPlaneInfo. It will only be called if the information is not already present in the cache.
|
|
121
|
-
:param refresh: function to refresh the token. It will only be called if the token is missing or expired.
|
|
122
|
-
"""
|
|
123
|
-
all_elements = params.copy()
|
|
124
|
-
all_elements.insert(0, method)
|
|
125
|
-
map_key = "/".join(all_elements)
|
|
126
|
-
info = self._data_plane_info.get(map_key)
|
|
127
|
-
if not info:
|
|
128
|
-
self._lock.acquire()
|
|
129
|
-
try:
|
|
130
|
-
info = self._data_plane_info.get(map_key)
|
|
131
|
-
if not info:
|
|
132
|
-
info = info_getter()
|
|
133
|
-
self._data_plane_info[map_key] = info
|
|
134
|
-
finally:
|
|
135
|
-
self._lock.release()
|
|
136
|
-
|
|
137
|
-
token = self._tokens.get(map_key)
|
|
138
|
-
if not token or not token.valid:
|
|
139
|
-
self._lock.acquire()
|
|
140
|
-
token = self._tokens.get(map_key)
|
|
141
|
-
try:
|
|
142
|
-
if not token or not token.valid:
|
|
143
|
-
token = refresh(info.authorization_details)
|
|
144
|
-
self._tokens[map_key] = token
|
|
145
|
-
finally:
|
|
146
|
-
self._lock.release()
|
|
147
|
-
|
|
148
|
-
return DataPlaneDetails(endpoint_url=info.endpoint_url, token=token)
|
|
@@ -426,12 +426,16 @@ class SessionCredentials(Refreshable):
|
|
|
426
426
|
client_id: str,
|
|
427
427
|
client_secret: str = None,
|
|
428
428
|
redirect_url: str = None,
|
|
429
|
+
disable_async: bool = True,
|
|
429
430
|
):
|
|
430
431
|
self._token_endpoint = token_endpoint
|
|
431
432
|
self._client_id = client_id
|
|
432
433
|
self._client_secret = client_secret
|
|
433
434
|
self._redirect_url = redirect_url
|
|
434
|
-
super().__init__(
|
|
435
|
+
super().__init__(
|
|
436
|
+
token=token,
|
|
437
|
+
disable_async=disable_async,
|
|
438
|
+
)
|
|
435
439
|
|
|
436
440
|
def as_dict(self) -> dict:
|
|
437
441
|
return {"token": self.token().as_dict()}
|
|
@@ -625,7 +629,11 @@ class OAuthClient:
|
|
|
625
629
|
):
|
|
626
630
|
|
|
627
631
|
if not scopes:
|
|
628
|
-
|
|
632
|
+
# all-apis ensures that the returned OAuth token can be used with all APIs, aside
|
|
633
|
+
# from direct-to-dataplane APIs.
|
|
634
|
+
# offline_access ensures that the response from the Authorization server includes
|
|
635
|
+
# a refresh token.
|
|
636
|
+
scopes = ["all-apis", "offline_access"]
|
|
629
637
|
|
|
630
638
|
self.redirect_url = redirect_url
|
|
631
639
|
self._client_id = client_id
|
|
@@ -650,8 +658,6 @@ class OAuthClient:
|
|
|
650
658
|
return lambda: {}
|
|
651
659
|
|
|
652
660
|
config = Config(host=host, credentials_strategy=noop_credentials)
|
|
653
|
-
if not scopes:
|
|
654
|
-
scopes = ["all-apis"]
|
|
655
661
|
oidc = config.oidc_endpoints
|
|
656
662
|
if not oidc:
|
|
657
663
|
raise ValueError(f"{host} does not support OAuth")
|
|
@@ -708,9 +714,10 @@ class ClientCredentials(Refreshable):
|
|
|
708
714
|
scopes: List[str] = None
|
|
709
715
|
use_params: bool = False
|
|
710
716
|
use_header: bool = False
|
|
717
|
+
disable_async: bool = True
|
|
711
718
|
|
|
712
719
|
def __post_init__(self):
|
|
713
|
-
super().__init__()
|
|
720
|
+
super().__init__(disable_async=self.disable_async)
|
|
714
721
|
|
|
715
722
|
def refresh(self) -> Token:
|
|
716
723
|
params = {"grant_type": "client_credentials"}
|