databricks-sdk 0.66.0__tar.gz → 0.68.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/CHANGELOG.md +267 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/PKG-INFO +4 -2
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/README.md +2 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/__init__.py +10 -3
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/_base_client.py +4 -1
- databricks_sdk-0.68.0/databricks/sdk/common/lro.py +17 -0
- databricks_sdk-0.68.0/databricks/sdk/common/types/fieldmask.py +39 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/credentials_provider.py +61 -12
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/dbutils.py +5 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/parser.py +8 -3
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/mixins/files.py +1 -0
- databricks_sdk-0.68.0/databricks/sdk/oidc_token_supplier.py +108 -0
- databricks_sdk-0.68.0/databricks/sdk/retries.py +169 -0
- databricks_sdk-0.68.0/databricks/sdk/service/__init__.py +0 -0
- databricks_sdk-0.68.0/databricks/sdk/service/_internal.py +164 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/agentbricks.py +1 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/apps.py +264 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/billing.py +2 -3
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/catalog.py +1030 -537
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/cleanrooms.py +3 -3
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/compute.py +21 -33
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/dashboards.py +51 -3
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/database.py +99 -8
- databricks_sdk-0.68.0/databricks/sdk/service/dataquality.py +1145 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/files.py +2 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/iam.py +6 -5
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/iamv2.py +1 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/jobs.py +6 -9
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/marketplace.py +3 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/ml.py +3 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/oauth2.py +1 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/pipelines.py +5 -6
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/provisioning.py +544 -655
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/qualitymonitorv2.py +1 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/serving.py +59 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/settings.py +5 -2
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/settingsv2.py +1 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/sharing.py +12 -3
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/sql.py +305 -70
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/tags.py +1 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/vectorsearch.py +3 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/service/workspace.py +70 -17
- databricks_sdk-0.68.0/databricks/sdk/version.py +1 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks_sdk.egg-info/PKG-INFO +4 -2
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks_sdk.egg-info/SOURCES.txt +4 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks_sdk.egg-info/requires.txt +1 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/pyproject.toml +1 -0
- databricks_sdk-0.66.0/databricks/sdk/oidc_token_supplier.py +0 -28
- databricks_sdk-0.66.0/databricks/sdk/retries.py +0 -69
- databricks_sdk-0.66.0/databricks/sdk/service/_internal.py +0 -72
- databricks_sdk-0.66.0/databricks/sdk/version.py +0 -1
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/CONTRIBUTING.md +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/DCO +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/LICENSE +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/MANIFEST.in +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/Makefile +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/NOTICE +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/SECURITY.md +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/__init__.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/_property.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/_widgets/__init__.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/azure.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/casing.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/clock.py +0 -0
- {databricks_sdk-0.66.0/databricks/sdk/mixins → databricks_sdk-0.68.0/databricks/sdk/common/types}/__init__.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/config.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/core.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/data_plane.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/environments.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/__init__.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/base.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/customizer.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/deserializer.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/details.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/mapper.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/overrides.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/platform.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/private_link.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/sdk.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/logger/__init__.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
- {databricks_sdk-0.66.0/databricks/sdk/service → databricks_sdk-0.68.0/databricks/sdk/mixins}/__init__.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/mixins/compute.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/mixins/jobs.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/mixins/open_ai_client.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/mixins/sharing.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/mixins/workspace.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/oauth.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/oidc.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/py.typed +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/runtime/__init__.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks/sdk/useragent.py +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/databricks_sdk.egg-info/top_level.txt +0 -0
- {databricks_sdk-0.66.0 → databricks_sdk-0.68.0}/setup.cfg +0 -0
|
@@ -1,5 +1,272 @@
|
|
|
1
1
|
# Version changelog
|
|
2
2
|
|
|
3
|
+
## Release v0.68.0
|
|
4
|
+
|
|
5
|
+
### New Features and Improvements
|
|
6
|
+
|
|
7
|
+
* Add native support for authentication through Azure DevOps OIDC.
|
|
8
|
+
|
|
9
|
+
### Bug Fixes
|
|
10
|
+
* Fix a security issue that resulted in bearer tokens being logged in exception messages.
|
|
11
|
+
|
|
12
|
+
### API Changes
|
|
13
|
+
* Add `databricks.sdk.service.dataquality` package.
|
|
14
|
+
* Add [w.data_quality](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dataquality/data_quality.html) workspace-level service.
|
|
15
|
+
* Add `create_update()` and `get_update()` methods for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps/apps.html) workspace-level service.
|
|
16
|
+
* Add `compute_size` field for `databricks.sdk.service.apps.App`.
|
|
17
|
+
* Add `genie_space` field for `databricks.sdk.service.apps.AppResource`.
|
|
18
|
+
* Add `skip_validation` field for `databricks.sdk.service.catalog.AccountsCreateStorageCredential`.
|
|
19
|
+
* Add `skip_validation` field for `databricks.sdk.service.catalog.AccountsUpdateStorageCredential`.
|
|
20
|
+
* Add `aliases`, `browse_only`, `created_at`, `created_by`, `full_name`, `metastore_id`, `owner`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.CreateRegisteredModelRequest`.
|
|
21
|
+
* Add `catalog_name`, `id`, `model_name` and `schema_name` fields for `databricks.sdk.service.catalog.RegisteredModelAlias`.
|
|
22
|
+
* Add `aliases`, `catalog_name`, `created_at`, `created_by`, `id`, `metastore_id`, `model_name`, `model_version_dependencies`, `run_id`, `run_workspace_id`, `schema_name`, `source`, `status`, `storage_location`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.UpdateModelVersionRequest`.
|
|
23
|
+
* Add `aliases`, `browse_only`, `catalog_name`, `created_at`, `created_by`, `metastore_id`, `name`, `schema_name`, `storage_location`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.UpdateRegisteredModelRequest`.
|
|
24
|
+
* Add `key_region` field for `databricks.sdk.service.provisioning.CreateAwsKeyInfo`.
|
|
25
|
+
* Add `role_arn` field for `databricks.sdk.service.provisioning.CreateStorageConfigurationRequest`.
|
|
26
|
+
* Add `azure_key_info` field for `databricks.sdk.service.provisioning.CustomerManagedKey`.
|
|
27
|
+
* [Breaking] Add `customer_facing_private_access_settings` field for `databricks.sdk.service.provisioning.ReplacePrivateAccessSettingsRequest`.
|
|
28
|
+
* Add `role_arn` field for `databricks.sdk.service.provisioning.StorageConfiguration`.
|
|
29
|
+
* [Breaking] Add `customer_facing_workspace` field for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
|
|
30
|
+
* Add `update_mask` field for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
|
|
31
|
+
* Add `compute_mode`, `network`, `network_connectivity_config_id` and `storage_mode` fields for `databricks.sdk.service.provisioning.Workspace`.
|
|
32
|
+
* Add `enable_serverless_compute` field for `databricks.sdk.service.sql.GetWorkspaceWarehouseConfigResponse`.
|
|
33
|
+
* Add `page_size` and `page_token` fields for `databricks.sdk.service.sql.ListWarehousesRequest`.
|
|
34
|
+
* Add `next_page_token` field for `databricks.sdk.service.sql.ListWarehousesResponse`.
|
|
35
|
+
* Add `enable_serverless_compute` field for `databricks.sdk.service.sql.SetWorkspaceWarehouseConfigRequest`.
|
|
36
|
+
* Add `model_version_status_unknown` enum value for `databricks.sdk.service.catalog.ModelVersionInfoStatus`.
|
|
37
|
+
* Add `k8s_active_pod_quota_exceeded` and `cloud_account_pod_quota_exceeded` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
|
|
38
|
+
* Add `internal_catalog_asset_creation_ongoing_exception`, `internal_catalog_asset_creation_failed_exception` and `internal_catalog_asset_creation_unsupported_exception` enum values for `databricks.sdk.service.dashboards.MessageErrorType`.
|
|
39
|
+
* Add `ssh_bootstrap_failure`, `aws_inaccessible_kms_key_failure`, `init_container_not_finished`, `spark_image_download_throttled`, `spark_image_not_found`, `cluster_operation_throttled`, `cluster_operation_timeout`, `serverless_long_running_terminated`, `azure_packed_deployment_partial_failure`, `invalid_worker_image_failure`, `workspace_update`, `invalid_aws_parameter`, `driver_out_of_disk`, `driver_out_of_memory`, `driver_launch_timeout`, `driver_unexpected_failure`, `unexpected_pod_recreation`, `gcp_inaccessible_kms_key_failure`, `gcp_kms_key_permission_denied`, `driver_eviction`, `user_initiated_vm_termination`, `gcp_iam_timeout`, `aws_resource_quota_exceeded`, `cloud_account_setup_failure`, `aws_invalid_key_pair`, `driver_pod_creation_failure`, `maintenance_mode`, `internal_capacity_failure`, `executor_pod_unscheduled`, `storage_download_failure_slow`, `storage_download_failure_throttled`, `dynamic_spark_conf_size_exceeded`, `aws_instance_profile_update_failure`, `instance_pool_not_found`, `instance_pool_max_capacity_reached`, `aws_invalid_kms_key_state`, `gcp_insufficient_capacity`, `gcp_api_rate_quota_exceeded`, `gcp_resource_quota_exceeded`, `gcp_ip_space_exhausted`, `gcp_service_account_access_denied`, `gcp_service_account_not_found`, `gcp_forbidden`, `gcp_not_found`, `resource_usage_blocked`, `data_access_config_changed`, `access_token_failure`, `invalid_instance_placement_protocol`, `budget_policy_resolution_failure`, `in_penalty_box`, `disaster_recovery_replication`, `bootstrap_timeout_due_to_misconfig`, `instance_unreachable_due_to_misconfig`, `storage_download_failure_due_to_misconfig`, `control_plane_request_failure_due_to_misconfig`, `cloud_provider_launch_failure_due_to_misconfig`, `gcp_subnet_not_ready`, `cloud_operation_cancelled`, `cloud_provider_instance_not_launched`, `gcp_trusted_image_projects_violated`, `budget_policy_limit_enforcement_activated`, `eos_spark_image`, `no_matched_k8s`, `lazy_allocation_timeout`, `driver_node_unreachable`, `secret_creation_failure`, `pod_scheduling_failure`, `pod_assignment_failure`, `allocation_timeout`, `allocation_timeout_no_unallocated_clusters`, `allocation_timeout_no_matched_clusters`, `allocation_timeout_no_ready_clusters`, `allocation_timeout_no_warmed_up_clusters`, `allocation_timeout_node_daemon_not_ready`, `allocation_timeout_no_healthy_clusters`, `netvisor_setup_timeout`, `no_matched_k8s_testing_tag`, `cloud_provider_resource_stockout_due_to_misconfig`, `gke_based_cluster_termination`, `allocation_timeout_no_healthy_and_warmed_up_clusters`, `docker_invalid_os_exception`, `docker_container_creation_exception`, `docker_image_too_large_for_instance_exception`, `dns_resolution_error`, `gcp_denied_by_org_policy`, `secret_permission_denied`, `network_check_nic_failure`, `network_check_dns_server_failure`, `network_check_storage_failure`, `network_check_metadata_endpoint_failure`, `network_check_control_plane_failure`, `network_check_multiple_components_failure`, `driver_unhealthy`, `security_agents_failed_initial_verification`, `driver_dns_resolution_failure`, `no_activated_k8s`, `usage_policy_entitlement_denied`, `no_activated_k8s_testing_tag`, `k8s_active_pod_quota_exceeded` and `cloud_account_pod_quota_exceeded` enum values for `databricks.sdk.service.sql.TerminationReasonCode`.
|
|
40
|
+
* [Breaking] Change `create()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsCreateMetastoreAssignmentResponse` dataclass.
|
|
41
|
+
* [Breaking] Change `delete()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsDeleteMetastoreAssignmentResponse` dataclass.
|
|
42
|
+
* [Breaking] Change `update()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsUpdateMetastoreAssignmentResponse` dataclass.
|
|
43
|
+
* [Breaking] Change `create()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsCreateMetastoreResponse` dataclass.
|
|
44
|
+
* [Breaking] Change `delete()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsDeleteMetastoreResponse` dataclass.
|
|
45
|
+
* [Breaking] Change `get()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsGetMetastoreResponse` dataclass.
|
|
46
|
+
* [Breaking] Change `list()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsListMetastoresResponse` dataclass.
|
|
47
|
+
* [Breaking] Change `update()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsUpdateMetastoreResponse` dataclass.
|
|
48
|
+
* [Breaking] Change `create()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to return `databricks.sdk.service.catalog.AccountsCreateStorageCredentialInfo` dataclass.
|
|
49
|
+
* [Breaking] Change `delete()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsDeleteStorageCredentialResponse` dataclass.
|
|
50
|
+
* [Breaking] Change `update()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to return `databricks.sdk.service.catalog.AccountsUpdateStorageCredentialResponse` dataclass.
|
|
51
|
+
* [Breaking] Change `create()` method for [w.registered_models](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/registered_models.html) workspace-level service with new required argument order.
|
|
52
|
+
* [Breaking] Change `delete()` method for [a.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/credentials.html) account-level service to start returning `databricks.sdk.service.provisioning.Credential` dataclass.
|
|
53
|
+
* [Breaking] Change `delete()` method for [a.encryption_keys](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/encryption_keys.html) account-level service to start returning `databricks.sdk.service.provisioning.CustomerManagedKey` dataclass.
|
|
54
|
+
* [Breaking] Change `create()` method for [a.networks](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/networks.html) account-level service with new required argument order.
|
|
55
|
+
* [Breaking] Change `delete()` method for [a.networks](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/networks.html) account-level service to start returning `databricks.sdk.service.provisioning.Network` dataclass.
|
|
56
|
+
* [Breaking] Change `create()` and `replace()` methods for [a.private_access](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/private_access.html) account-level service with new required argument order.
|
|
57
|
+
* [Breaking] Change `delete()` and `replace()` methods for [a.private_access](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/private_access.html) account-level service to start returning `databricks.sdk.service.provisioning.PrivateAccessSettings` dataclass.
|
|
58
|
+
* [Breaking] Change `delete()` method for [a.storage](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/storage.html) account-level service to start returning `databricks.sdk.service.provisioning.StorageConfiguration` dataclass.
|
|
59
|
+
* [Breaking] Change `create()` method for [a.vpc_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/vpc_endpoints.html) account-level service with new required argument order.
|
|
60
|
+
* [Breaking] Change `delete()` method for [a.vpc_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/vpc_endpoints.html) account-level service to start returning `databricks.sdk.service.provisioning.VpcEndpoint` dataclass.
|
|
61
|
+
* [Breaking] Change `create()` and `update()` methods for [a.workspaces](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html) account-level service with new required argument order.
|
|
62
|
+
* [Breaking] Change `delete()` and `update()` methods for [a.workspaces](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html) account-level service to start returning `databricks.sdk.service.provisioning.Workspace` dataclass.
|
|
63
|
+
* [Breaking] Change `execute_statement()` method for [w.statement_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/statement_execution.html) workspace-level service . Method path has changed.
|
|
64
|
+
* [Breaking] Change `metastore_info` field for `databricks.sdk.service.catalog.AccountsCreateMetastore` to type `databricks.sdk.service.catalog.CreateAccountsMetastore` dataclass.
|
|
65
|
+
* [Breaking] Change `credential_info` field for `databricks.sdk.service.catalog.AccountsCreateStorageCredential` to type `databricks.sdk.service.catalog.CreateAccountsStorageCredential` dataclass.
|
|
66
|
+
* [Breaking] Change `metastore_info` field for `databricks.sdk.service.catalog.AccountsUpdateMetastore` to type `databricks.sdk.service.catalog.UpdateAccountsMetastore` dataclass.
|
|
67
|
+
* [Breaking] Change `credential_info` field for `databricks.sdk.service.catalog.AccountsUpdateStorageCredential` to type `databricks.sdk.service.catalog.UpdateAccountsStorageCredential` dataclass.
|
|
68
|
+
* Change `catalog_name`, `name` and `schema_name` fields for `databricks.sdk.service.catalog.CreateRegisteredModelRequest` to no longer be required.
|
|
69
|
+
* Change `network_name` field for `databricks.sdk.service.provisioning.CreateNetworkRequest` to no longer be required.
|
|
70
|
+
* Change `private_access_settings_name` and `region` fields for `databricks.sdk.service.provisioning.CreatePrivateAccessSettingsRequest` to no longer be required.
|
|
71
|
+
* Change `vpc_endpoint_name` field for `databricks.sdk.service.provisioning.CreateVpcEndpointRequest` to no longer be required.
|
|
72
|
+
* Change `workspace_name` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest` to no longer be required.
|
|
73
|
+
* [Breaking] Change `dataplane_relay` and `rest_api` fields for `databricks.sdk.service.provisioning.NetworkVpcEndpoints` to no longer be required.
|
|
74
|
+
* Change `dataplane_relay` and `rest_api` fields for `databricks.sdk.service.provisioning.NetworkVpcEndpoints` to no longer be required.
|
|
75
|
+
* [Breaking] Change waiter for [WorkspacesAPI.update](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html#databricks.sdk.service.provisioning.WorkspacesAPI.update) method.
|
|
76
|
+
* [Breaking] Remove `browse_only` field for `databricks.sdk.service.catalog.ModelVersionInfo`.
|
|
77
|
+
* [Breaking] Remove `jar_dependencies` field for `databricks.sdk.service.compute.Environment`.
|
|
78
|
+
* [Breaking] Remove `is_no_public_ip_enabled` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest`.
|
|
79
|
+
* [Breaking] Remove `allowed_vpc_endpoint_ids`, `private_access_level`, `private_access_settings_name`, `public_access_enabled` and `region` fields for `databricks.sdk.service.provisioning.ReplacePrivateAccessSettingsRequest`.
|
|
80
|
+
* [Breaking] Remove `external_id` field for `databricks.sdk.service.provisioning.StsRole`.
|
|
81
|
+
* [Breaking] Remove `aws_region`, `credentials_id`, `custom_tags`, `managed_services_customer_managed_key_id`, `network_connectivity_config_id`, `network_id`, `private_access_settings_id`, `storage_configuration_id` and `storage_customer_managed_key_id` fields for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
|
|
82
|
+
* [Breaking] Remove `external_customer_info` and `is_no_public_ip_enabled` fields for `databricks.sdk.service.provisioning.Workspace`.
|
|
83
|
+
* [Breaking] Remove `status_unspecified` enum value for `databricks.sdk.service.sql.Status`.
|
|
84
|
+
* Add `browse_only` field for `databricks.sdk.service.catalog.ModelVersionInfo`.
|
|
85
|
+
* Add `jar_dependencies` field for `databricks.sdk.service.compute.Environment`.
|
|
86
|
+
* Add `is_no_public_ip_enabled` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest`.
|
|
87
|
+
* Add `allowed_vpc_endpoint_ids`, `private_access_level` and `public_access_enabled` fields for `databricks.sdk.service.provisioning.ReplacePrivateAccessSettingsRequest`.
|
|
88
|
+
* [Breaking] Add `private_access_settings_name` and `region` fields for `databricks.sdk.service.provisioning.ReplacePrivateAccessSettingsRequest`.
|
|
89
|
+
* Add `external_id` field for `databricks.sdk.service.provisioning.StsRole`.
|
|
90
|
+
* Add `aws_region`, `credentials_id`, `custom_tags`, `managed_services_customer_managed_key_id`, `network_connectivity_config_id`, `network_id`, `private_access_settings_id`, `storage_configuration_id` and `storage_customer_managed_key_id` fields for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
|
|
91
|
+
* Add `external_customer_info` and `is_no_public_ip_enabled` fields for `databricks.sdk.service.provisioning.Workspace`.
|
|
92
|
+
* Add `status_unspecified` enum value for `databricks.sdk.service.sql.Status`.
|
|
93
|
+
* [Breaking] Change `create()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to no longer return `databricks.sdk.service.catalog.AccountsCreateMetastoreAssignmentResponse` dataclass.
|
|
94
|
+
* [Breaking] Change `delete()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to no longer return `databricks.sdk.service.catalog.AccountsDeleteMetastoreAssignmentResponse` dataclass.
|
|
95
|
+
* [Breaking] Change `update()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to no longer return `databricks.sdk.service.catalog.AccountsUpdateMetastoreAssignmentResponse` dataclass.
|
|
96
|
+
* [Breaking] Change `create()`, `get()` and `update()` methods for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsMetastoreInfo` dataclass.
|
|
97
|
+
* [Breaking] Change `delete()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to no longer return `databricks.sdk.service.catalog.AccountsDeleteMetastoreResponse` dataclass.
|
|
98
|
+
* [Breaking] Change `list()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.ListMetastoresResponse` dataclass.
|
|
99
|
+
* [Breaking] Change `create()` and `update()` methods for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to return `databricks.sdk.service.catalog.AccountsStorageCredentialInfo` dataclass.
|
|
100
|
+
* [Breaking] Change `delete()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to no longer return `databricks.sdk.service.catalog.AccountsDeleteStorageCredentialResponse` dataclass.
|
|
101
|
+
* [Breaking] Change `create()` method for [w.registered_models](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/registered_models.html) workspace-level service with new required argument order.
|
|
102
|
+
* [Breaking] Change `delete()` method for [a.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/credentials.html) account-level service to no longer return `databricks.sdk.service.provisioning.Credential` dataclass.
|
|
103
|
+
* [Breaking] Change `delete()` method for [a.encryption_keys](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/encryption_keys.html) account-level service to no longer return `databricks.sdk.service.provisioning.CustomerManagedKey` dataclass.
|
|
104
|
+
* [Breaking] Change `create()` method for [a.networks](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/networks.html) account-level service with new required argument order.
|
|
105
|
+
* [Breaking] Change `delete()` method for [a.networks](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/networks.html) account-level service to no longer return `databricks.sdk.service.provisioning.Network` dataclass.
|
|
106
|
+
* [Breaking] Change `create()` and `replace()` methods for [a.private_access](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/private_access.html) account-level service with new required argument order.
|
|
107
|
+
* [Breaking] Change `delete()` and `replace()` methods for [a.private_access](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/private_access.html) account-level service to no longer return `databricks.sdk.service.provisioning.PrivateAccessSettings` dataclass.
|
|
108
|
+
* [Breaking] Change `delete()` method for [a.storage](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/storage.html) account-level service to no longer return `databricks.sdk.service.provisioning.StorageConfiguration` dataclass.
|
|
109
|
+
* [Breaking] Change `create()` method for [a.vpc_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/vpc_endpoints.html) account-level service with new required argument order.
|
|
110
|
+
* [Breaking] Change `delete()` method for [a.vpc_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/vpc_endpoints.html) account-level service to no longer return `databricks.sdk.service.provisioning.VpcEndpoint` dataclass.
|
|
111
|
+
* [Breaking] Change `create()` and `update()` methods for [a.workspaces](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html) account-level service with new required argument order.
|
|
112
|
+
* [Breaking] Change `delete()` and `update()` methods for [a.workspaces](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html) account-level service to no longer return `databricks.sdk.service.provisioning.Workspace` dataclass.
|
|
113
|
+
* [Breaking] Change `execute_statement()` method for [w.statement_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/statement_execution.html) workspace-level service . Method path has changed.
|
|
114
|
+
* [Breaking] Change `metastore_info` field for `databricks.sdk.service.catalog.AccountsCreateMetastore` to type `databricks.sdk.service.catalog.CreateMetastore` dataclass.
|
|
115
|
+
* [Breaking] Change `credential_info` field for `databricks.sdk.service.catalog.AccountsCreateStorageCredential` to type `databricks.sdk.service.catalog.CreateStorageCredential` dataclass.
|
|
116
|
+
* [Breaking] Change `metastore_info` field for `databricks.sdk.service.catalog.AccountsUpdateMetastore` to type `databricks.sdk.service.catalog.UpdateMetastore` dataclass.
|
|
117
|
+
* [Breaking] Change `credential_info` field for `databricks.sdk.service.catalog.AccountsUpdateStorageCredential` to type `databricks.sdk.service.catalog.UpdateStorageCredential` dataclass.
|
|
118
|
+
* [Breaking] Change `catalog_name`, `name` and `schema_name` fields for `databricks.sdk.service.catalog.CreateRegisteredModelRequest` to be required.
|
|
119
|
+
* [Breaking] Change `name` field for `databricks.sdk.service.database.DatabaseInstanceRole` to no longer be required.
|
|
120
|
+
* Change `name` field for `databricks.sdk.service.database.DatabaseInstanceRole` to no longer be required.
|
|
121
|
+
* [Breaking] Change `network_name` field for `databricks.sdk.service.provisioning.CreateNetworkRequest` to be required.
|
|
122
|
+
* [Breaking] Change `private_access_settings_name` and `region` fields for `databricks.sdk.service.provisioning.CreatePrivateAccessSettingsRequest` to be required.
|
|
123
|
+
* [Breaking] Change `vpc_endpoint_name` field for `databricks.sdk.service.provisioning.CreateVpcEndpointRequest` to be required.
|
|
124
|
+
* [Breaking] Change `workspace_name` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest` to be required.
|
|
125
|
+
* Change `dataplane_relay` and `rest_api` fields for `databricks.sdk.service.provisioning.NetworkVpcEndpoints` to be required.
|
|
126
|
+
* [Breaking] Change `dataplane_relay` and `rest_api` fields for `databricks.sdk.service.provisioning.NetworkVpcEndpoints` to be required.
|
|
127
|
+
* [Breaking] Change waiter for [WorkspacesAPI.update](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html#databricks.sdk.service.provisioning.WorkspacesAPI.update) method.
|
|
128
|
+
* [Breaking] Remove `databricks.sdk.service.dataquality` package.
|
|
129
|
+
* [Breaking] Remove `create_update()` and `get_update()` methods for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps/apps.html) workspace-level service.
|
|
130
|
+
* [Breaking] Remove `update_notifications()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service.
|
|
131
|
+
* [Breaking] Remove `compute_size` field for `databricks.sdk.service.apps.App`.
|
|
132
|
+
* [Breaking] Remove `genie_space` field for `databricks.sdk.service.apps.AppResource`.
|
|
133
|
+
* [Breaking] Remove `skip_validation` field for `databricks.sdk.service.catalog.AccountsCreateStorageCredential`.
|
|
134
|
+
* [Breaking] Remove `skip_validation` field for `databricks.sdk.service.catalog.AccountsUpdateStorageCredential`.
|
|
135
|
+
* [Breaking] Remove `aliases`, `browse_only`, `created_at`, `created_by`, `full_name`, `metastore_id`, `owner`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.CreateRegisteredModelRequest`.
|
|
136
|
+
* [Breaking] Remove `catalog_name`, `id`, `model_name` and `schema_name` fields for `databricks.sdk.service.catalog.RegisteredModelAlias`.
|
|
137
|
+
* [Breaking] Remove `aliases`, `catalog_name`, `created_at`, `created_by`, `id`, `metastore_id`, `model_name`, `model_version_dependencies`, `run_id`, `run_workspace_id`, `schema_name`, `source`, `status`, `storage_location`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.UpdateModelVersionRequest`.
|
|
138
|
+
* [Breaking] Remove `aliases`, `browse_only`, `catalog_name`, `created_at`, `created_by`, `metastore_id`, `name`, `schema_name`, `storage_location`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.UpdateRegisteredModelRequest`.
|
|
139
|
+
* [Breaking] Remove `parameters` field for `databricks.sdk.service.dashboards.GenieQueryAttachment`.
|
|
140
|
+
* [Breaking] Remove `database_instance_name` field for `databricks.sdk.service.database.CreateDatabaseInstanceRoleRequest`.
|
|
141
|
+
* [Breaking] Remove `custom_tags`, `effective_custom_tags`, `effective_usage_policy_id` and `usage_policy_id` fields for `databricks.sdk.service.database.DatabaseInstance`.
|
|
142
|
+
* [Breaking] Remove `effective_attributes` and `instance_name` fields for `databricks.sdk.service.database.DatabaseInstanceRole`.
|
|
143
|
+
* [Breaking] Remove `key_region` field for `databricks.sdk.service.provisioning.CreateAwsKeyInfo`.
|
|
144
|
+
* [Breaking] Remove `role_arn` field for `databricks.sdk.service.provisioning.CreateStorageConfigurationRequest`.
|
|
145
|
+
* [Breaking] Remove `azure_key_info` field for `databricks.sdk.service.provisioning.CustomerManagedKey`.
|
|
146
|
+
* [Breaking] Remove `customer_facing_private_access_settings` field for `databricks.sdk.service.provisioning.ReplacePrivateAccessSettingsRequest`.
|
|
147
|
+
* [Breaking] Remove `role_arn` field for `databricks.sdk.service.provisioning.StorageConfiguration`.
|
|
148
|
+
* [Breaking] Remove `customer_facing_workspace` and `update_mask` fields for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
|
|
149
|
+
* [Breaking] Remove `compute_mode`, `network`, `network_connectivity_config_id` and `storage_mode` fields for `databricks.sdk.service.provisioning.Workspace`.
|
|
150
|
+
* [Breaking] Remove `enable_serverless_compute` field for `databricks.sdk.service.sql.GetWorkspaceWarehouseConfigResponse`.
|
|
151
|
+
* [Breaking] Remove `page_size` and `page_token` fields for `databricks.sdk.service.sql.ListWarehousesRequest`.
|
|
152
|
+
* [Breaking] Remove `next_page_token` field for `databricks.sdk.service.sql.ListWarehousesResponse`.
|
|
153
|
+
* [Breaking] Remove `enable_serverless_compute` field for `databricks.sdk.service.sql.SetWorkspaceWarehouseConfigRequest`.
|
|
154
|
+
* [Breaking] Remove `model_version_status_unknown` enum value for `databricks.sdk.service.catalog.ModelVersionInfoStatus`.
|
|
155
|
+
* [Breaking] Remove `stream_native` enum value for `databricks.sdk.service.catalog.SystemType`.
|
|
156
|
+
* [Breaking] Remove `k8s_active_pod_quota_exceeded` and `cloud_account_pod_quota_exceeded` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
|
|
157
|
+
* [Breaking] Remove `exceeded_max_token_length_exception`, `internal_catalog_asset_creation_ongoing_exception`, `internal_catalog_asset_creation_failed_exception` and `internal_catalog_asset_creation_unsupported_exception` enum values for `databricks.sdk.service.dashboards.MessageErrorType`.
|
|
158
|
+
* [Breaking] Remove `ssh_bootstrap_failure`, `aws_inaccessible_kms_key_failure`, `init_container_not_finished`, `spark_image_download_throttled`, `spark_image_not_found`, `cluster_operation_throttled`, `cluster_operation_timeout`, `serverless_long_running_terminated`, `azure_packed_deployment_partial_failure`, `invalid_worker_image_failure`, `workspace_update`, `invalid_aws_parameter`, `driver_out_of_disk`, `driver_out_of_memory`, `driver_launch_timeout`, `driver_unexpected_failure`, `unexpected_pod_recreation`, `gcp_inaccessible_kms_key_failure`, `gcp_kms_key_permission_denied`, `driver_eviction`, `user_initiated_vm_termination`, `gcp_iam_timeout`, `aws_resource_quota_exceeded`, `cloud_account_setup_failure`, `aws_invalid_key_pair`, `driver_pod_creation_failure`, `maintenance_mode`, `internal_capacity_failure`, `executor_pod_unscheduled`, `storage_download_failure_slow`, `storage_download_failure_throttled`, `dynamic_spark_conf_size_exceeded`, `aws_instance_profile_update_failure`, `instance_pool_not_found`, `instance_pool_max_capacity_reached`, `aws_invalid_kms_key_state`, `gcp_insufficient_capacity`, `gcp_api_rate_quota_exceeded`, `gcp_resource_quota_exceeded`, `gcp_ip_space_exhausted`, `gcp_service_account_access_denied`, `gcp_service_account_not_found`, `gcp_forbidden`, `gcp_not_found`, `resource_usage_blocked`, `data_access_config_changed`, `access_token_failure`, `invalid_instance_placement_protocol`, `budget_policy_resolution_failure`, `in_penalty_box`, `disaster_recovery_replication`, `bootstrap_timeout_due_to_misconfig`, `instance_unreachable_due_to_misconfig`, `storage_download_failure_due_to_misconfig`, `control_plane_request_failure_due_to_misconfig`, `cloud_provider_launch_failure_due_to_misconfig`, `gcp_subnet_not_ready`, `cloud_operation_cancelled`, `cloud_provider_instance_not_launched`, `gcp_trusted_image_projects_violated`, `budget_policy_limit_enforcement_activated`, `eos_spark_image`, `no_matched_k8s`, `lazy_allocation_timeout`, `driver_node_unreachable`, `secret_creation_failure`, `pod_scheduling_failure`, `pod_assignment_failure`, `allocation_timeout`, `allocation_timeout_no_unallocated_clusters`, `allocation_timeout_no_matched_clusters`, `allocation_timeout_no_ready_clusters`, `allocation_timeout_no_warmed_up_clusters`, `allocation_timeout_node_daemon_not_ready`, `allocation_timeout_no_healthy_clusters`, `netvisor_setup_timeout`, `no_matched_k8s_testing_tag`, `cloud_provider_resource_stockout_due_to_misconfig`, `gke_based_cluster_termination`, `allocation_timeout_no_healthy_and_warmed_up_clusters`, `docker_invalid_os_exception`, `docker_container_creation_exception`, `docker_image_too_large_for_instance_exception`, `dns_resolution_error`, `gcp_denied_by_org_policy`, `secret_permission_denied`, `network_check_nic_failure`, `network_check_dns_server_failure`, `network_check_storage_failure`, `network_check_metadata_endpoint_failure`, `network_check_control_plane_failure`, `network_check_multiple_components_failure`, `driver_unhealthy`, `security_agents_failed_initial_verification`, `driver_dns_resolution_failure`, `no_activated_k8s`, `usage_policy_entitlement_denied`, `no_activated_k8s_testing_tag`, `k8s_active_pod_quota_exceeded` and `cloud_account_pod_quota_exceeded` enum values for `databricks.sdk.service.sql.TerminationReasonCode`.
|
|
159
|
+
* Add `databricks.sdk.service.dataquality` package.
|
|
160
|
+
* Add [w.data_quality](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dataquality/data_quality.html) workspace-level service.
|
|
161
|
+
* Add `create_update()` and `get_update()` methods for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps/apps.html) workspace-level service.
|
|
162
|
+
* Add `update_notifications()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service.
|
|
163
|
+
* Add `compute_size` field for `databricks.sdk.service.apps.App`.
|
|
164
|
+
* Add `genie_space` field for `databricks.sdk.service.apps.AppResource`.
|
|
165
|
+
* Add `skip_validation` field for `databricks.sdk.service.catalog.AccountsCreateStorageCredential`.
|
|
166
|
+
* Add `skip_validation` field for `databricks.sdk.service.catalog.AccountsUpdateStorageCredential`.
|
|
167
|
+
* Add `aliases`, `browse_only`, `created_at`, `created_by`, `full_name`, `metastore_id`, `owner`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.CreateRegisteredModelRequest`.
|
|
168
|
+
* Add `include_unbound` field for `databricks.sdk.service.catalog.ListCatalogsRequest`.
|
|
169
|
+
* Add `include_unbound` field for `databricks.sdk.service.catalog.ListCredentialsRequest`.
|
|
170
|
+
* Add `include_unbound` field for `databricks.sdk.service.catalog.ListExternalLocationsRequest`.
|
|
171
|
+
* Add `include_unbound` field for `databricks.sdk.service.catalog.ListStorageCredentialsRequest`.
|
|
172
|
+
* Add `catalog_name`, `id`, `model_name` and `schema_name` fields for `databricks.sdk.service.catalog.RegisteredModelAlias`.
|
|
173
|
+
* Add `aliases`, `catalog_name`, `created_at`, `created_by`, `id`, `metastore_id`, `model_name`, `model_version_dependencies`, `run_id`, `run_workspace_id`, `schema_name`, `source`, `status`, `storage_location`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.UpdateModelVersionRequest`.
|
|
174
|
+
* Add `aliases`, `browse_only`, `catalog_name`, `created_at`, `created_by`, `metastore_id`, `name`, `schema_name`, `storage_location`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.UpdateRegisteredModelRequest`.
|
|
175
|
+
* Add `parameters` field for `databricks.sdk.service.dashboards.GenieQueryAttachment`.
|
|
176
|
+
* Add `database_instance_name` field for `databricks.sdk.service.database.CreateDatabaseInstanceRoleRequest`.
|
|
177
|
+
* Add `custom_tags`, `effective_custom_tags`, `effective_usage_policy_id` and `usage_policy_id` fields for `databricks.sdk.service.database.DatabaseInstance`.
|
|
178
|
+
* Add `effective_attributes` and `instance_name` fields for `databricks.sdk.service.database.DatabaseInstanceRole`.
|
|
179
|
+
* Add `key_region` field for `databricks.sdk.service.provisioning.CreateAwsKeyInfo`.
|
|
180
|
+
* Add `role_arn` field for `databricks.sdk.service.provisioning.CreateStorageConfigurationRequest`.
|
|
181
|
+
* Add `azure_key_info` field for `databricks.sdk.service.provisioning.CustomerManagedKey`.
|
|
182
|
+
* [Breaking] Add `customer_facing_private_access_settings` field for `databricks.sdk.service.provisioning.ReplacePrivateAccessSettingsRequest`.
|
|
183
|
+
* Add `role_arn` field for `databricks.sdk.service.provisioning.StorageConfiguration`.
|
|
184
|
+
* [Breaking] Add `customer_facing_workspace` field for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
|
|
185
|
+
* Add `update_mask` field for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
|
|
186
|
+
* Add `compute_mode`, `network`, `network_connectivity_config_id` and `storage_mode` fields for `databricks.sdk.service.provisioning.Workspace`.
|
|
187
|
+
* Add `enable_serverless_compute` field for `databricks.sdk.service.sql.GetWorkspaceWarehouseConfigResponse`.
|
|
188
|
+
* Add `page_size` and `page_token` fields for `databricks.sdk.service.sql.ListWarehousesRequest`.
|
|
189
|
+
* Add `next_page_token` field for `databricks.sdk.service.sql.ListWarehousesResponse`.
|
|
190
|
+
* Add `enable_serverless_compute` field for `databricks.sdk.service.sql.SetWorkspaceWarehouseConfigRequest`.
|
|
191
|
+
* Add `model_version_status_unknown` enum value for `databricks.sdk.service.catalog.ModelVersionInfoStatus`.
|
|
192
|
+
* Add `stream_native` enum value for `databricks.sdk.service.catalog.SystemType`.
|
|
193
|
+
* Add `k8s_active_pod_quota_exceeded` and `cloud_account_pod_quota_exceeded` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
|
|
194
|
+
* Add `exceeded_max_token_length_exception`, `internal_catalog_asset_creation_ongoing_exception`, `internal_catalog_asset_creation_failed_exception` and `internal_catalog_asset_creation_unsupported_exception` enum values for `databricks.sdk.service.dashboards.MessageErrorType`.
|
|
195
|
+
* Add `asset_type_mcp` enum value for `databricks.sdk.service.marketplace.AssetType`.
|
|
196
|
+
* Add `ssh_bootstrap_failure`, `aws_inaccessible_kms_key_failure`, `init_container_not_finished`, `spark_image_download_throttled`, `spark_image_not_found`, `cluster_operation_throttled`, `cluster_operation_timeout`, `serverless_long_running_terminated`, `azure_packed_deployment_partial_failure`, `invalid_worker_image_failure`, `workspace_update`, `invalid_aws_parameter`, `driver_out_of_disk`, `driver_out_of_memory`, `driver_launch_timeout`, `driver_unexpected_failure`, `unexpected_pod_recreation`, `gcp_inaccessible_kms_key_failure`, `gcp_kms_key_permission_denied`, `driver_eviction`, `user_initiated_vm_termination`, `gcp_iam_timeout`, `aws_resource_quota_exceeded`, `cloud_account_setup_failure`, `aws_invalid_key_pair`, `driver_pod_creation_failure`, `maintenance_mode`, `internal_capacity_failure`, `executor_pod_unscheduled`, `storage_download_failure_slow`, `storage_download_failure_throttled`, `dynamic_spark_conf_size_exceeded`, `aws_instance_profile_update_failure`, `instance_pool_not_found`, `instance_pool_max_capacity_reached`, `aws_invalid_kms_key_state`, `gcp_insufficient_capacity`, `gcp_api_rate_quota_exceeded`, `gcp_resource_quota_exceeded`, `gcp_ip_space_exhausted`, `gcp_service_account_access_denied`, `gcp_service_account_not_found`, `gcp_forbidden`, `gcp_not_found`, `resource_usage_blocked`, `data_access_config_changed`, `access_token_failure`, `invalid_instance_placement_protocol`, `budget_policy_resolution_failure`, `in_penalty_box`, `disaster_recovery_replication`, `bootstrap_timeout_due_to_misconfig`, `instance_unreachable_due_to_misconfig`, `storage_download_failure_due_to_misconfig`, `control_plane_request_failure_due_to_misconfig`, `cloud_provider_launch_failure_due_to_misconfig`, `gcp_subnet_not_ready`, `cloud_operation_cancelled`, `cloud_provider_instance_not_launched`, `gcp_trusted_image_projects_violated`, `budget_policy_limit_enforcement_activated`, `eos_spark_image`, `no_matched_k8s`, `lazy_allocation_timeout`, `driver_node_unreachable`, `secret_creation_failure`, `pod_scheduling_failure`, `pod_assignment_failure`, `allocation_timeout`, `allocation_timeout_no_unallocated_clusters`, `allocation_timeout_no_matched_clusters`, `allocation_timeout_no_ready_clusters`, `allocation_timeout_no_warmed_up_clusters`, `allocation_timeout_node_daemon_not_ready`, `allocation_timeout_no_healthy_clusters`, `netvisor_setup_timeout`, `no_matched_k8s_testing_tag`, `cloud_provider_resource_stockout_due_to_misconfig`, `gke_based_cluster_termination`, `allocation_timeout_no_healthy_and_warmed_up_clusters`, `docker_invalid_os_exception`, `docker_container_creation_exception`, `docker_image_too_large_for_instance_exception`, `dns_resolution_error`, `gcp_denied_by_org_policy`, `secret_permission_denied`, `network_check_nic_failure`, `network_check_dns_server_failure`, `network_check_storage_failure`, `network_check_metadata_endpoint_failure`, `network_check_control_plane_failure`, `network_check_multiple_components_failure`, `driver_unhealthy`, `security_agents_failed_initial_verification`, `driver_dns_resolution_failure`, `no_activated_k8s`, `usage_policy_entitlement_denied`, `no_activated_k8s_testing_tag`, `k8s_active_pod_quota_exceeded` and `cloud_account_pod_quota_exceeded` enum values for `databricks.sdk.service.sql.TerminationReasonCode`.
|
|
197
|
+
* [Breaking] Change `create()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsCreateMetastoreAssignmentResponse` dataclass.
|
|
198
|
+
* [Breaking] Change `delete()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsDeleteMetastoreAssignmentResponse` dataclass.
|
|
199
|
+
* [Breaking] Change `update()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsUpdateMetastoreAssignmentResponse` dataclass.
|
|
200
|
+
* [Breaking] Change `create()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsCreateMetastoreResponse` dataclass.
|
|
201
|
+
* [Breaking] Change `delete()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsDeleteMetastoreResponse` dataclass.
|
|
202
|
+
* [Breaking] Change `get()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsGetMetastoreResponse` dataclass.
|
|
203
|
+
* [Breaking] Change `list()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsListMetastoresResponse` dataclass.
|
|
204
|
+
* [Breaking] Change `update()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsUpdateMetastoreResponse` dataclass.
|
|
205
|
+
* [Breaking] Change `create()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to return `databricks.sdk.service.catalog.AccountsCreateStorageCredentialInfo` dataclass.
|
|
206
|
+
* [Breaking] Change `delete()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsDeleteStorageCredentialResponse` dataclass.
|
|
207
|
+
* [Breaking] Change `update()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to return `databricks.sdk.service.catalog.AccountsUpdateStorageCredentialResponse` dataclass.
|
|
208
|
+
* [Breaking] Change `create()` method for [w.registered_models](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/registered_models.html) workspace-level service with new required argument order.
|
|
209
|
+
* [Breaking] Change `delete()` method for [a.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/credentials.html) account-level service to start returning `databricks.sdk.service.provisioning.Credential` dataclass.
|
|
210
|
+
* [Breaking] Change `delete()` method for [a.encryption_keys](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/encryption_keys.html) account-level service to start returning `databricks.sdk.service.provisioning.CustomerManagedKey` dataclass.
|
|
211
|
+
* [Breaking] Change `create()` method for [a.networks](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/networks.html) account-level service with new required argument order.
|
|
212
|
+
* [Breaking] Change `delete()` method for [a.networks](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/networks.html) account-level service to start returning `databricks.sdk.service.provisioning.Network` dataclass.
|
|
213
|
+
* [Breaking] Change `create()` and `replace()` methods for [a.private_access](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/private_access.html) account-level service with new required argument order.
|
|
214
|
+
* [Breaking] Change `delete()` and `replace()` methods for [a.private_access](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/private_access.html) account-level service to start returning `databricks.sdk.service.provisioning.PrivateAccessSettings` dataclass.
|
|
215
|
+
* [Breaking] Change `delete()` method for [a.storage](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/storage.html) account-level service to start returning `databricks.sdk.service.provisioning.StorageConfiguration` dataclass.
|
|
216
|
+
* [Breaking] Change `create()` method for [a.vpc_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/vpc_endpoints.html) account-level service with new required argument order.
|
|
217
|
+
* [Breaking] Change `delete()` method for [a.vpc_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/vpc_endpoints.html) account-level service to start returning `databricks.sdk.service.provisioning.VpcEndpoint` dataclass.
|
|
218
|
+
* [Breaking] Change `create()` and `update()` methods for [a.workspaces](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html) account-level service with new required argument order.
|
|
219
|
+
* [Breaking] Change `delete()` and `update()` methods for [a.workspaces](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html) account-level service to start returning `databricks.sdk.service.provisioning.Workspace` dataclass.
|
|
220
|
+
* [Breaking] Change `execute_statement()` method for [w.statement_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/statement_execution.html) workspace-level service . Method path has changed.
|
|
221
|
+
* [Breaking] Change `metastore_info` field for `databricks.sdk.service.catalog.AccountsCreateMetastore` to type `databricks.sdk.service.catalog.CreateAccountsMetastore` dataclass.
|
|
222
|
+
* [Breaking] Change `credential_info` field for `databricks.sdk.service.catalog.AccountsCreateStorageCredential` to type `databricks.sdk.service.catalog.CreateAccountsStorageCredential` dataclass.
|
|
223
|
+
* [Breaking] Change `metastore_info` field for `databricks.sdk.service.catalog.AccountsUpdateMetastore` to type `databricks.sdk.service.catalog.UpdateAccountsMetastore` dataclass.
|
|
224
|
+
* [Breaking] Change `credential_info` field for `databricks.sdk.service.catalog.AccountsUpdateStorageCredential` to type `databricks.sdk.service.catalog.UpdateAccountsStorageCredential` dataclass.
|
|
225
|
+
* Change `catalog_name`, `name` and `schema_name` fields for `databricks.sdk.service.catalog.CreateRegisteredModelRequest` to no longer be required.
|
|
226
|
+
* [Breaking] Change `name` field for `databricks.sdk.service.database.DatabaseInstanceRole` to be required.
|
|
227
|
+
* Change `name` field for `databricks.sdk.service.database.DatabaseInstanceRole` to be required.
|
|
228
|
+
* Change `network_name` field for `databricks.sdk.service.provisioning.CreateNetworkRequest` to no longer be required.
|
|
229
|
+
* Change `private_access_settings_name` and `region` fields for `databricks.sdk.service.provisioning.CreatePrivateAccessSettingsRequest` to no longer be required.
|
|
230
|
+
* Change `vpc_endpoint_name` field for `databricks.sdk.service.provisioning.CreateVpcEndpointRequest` to no longer be required.
|
|
231
|
+
* Change `workspace_name` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest` to no longer be required.
|
|
232
|
+
* Change `dataplane_relay` and `rest_api` fields for `databricks.sdk.service.provisioning.NetworkVpcEndpoints` to no longer be required.
|
|
233
|
+
* [Breaking] Change `dataplane_relay` and `rest_api` fields for `databricks.sdk.service.provisioning.NetworkVpcEndpoints` to no longer be required.
|
|
234
|
+
* [Breaking] Change waiter for [WorkspacesAPI.update](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html#databricks.sdk.service.provisioning.WorkspacesAPI.update) method.
|
|
235
|
+
* [Breaking] Remove `browse_only` field for `databricks.sdk.service.catalog.ModelVersionInfo`.
|
|
236
|
+
* [Breaking] Remove `jar_dependencies` field for `databricks.sdk.service.compute.Environment`.
|
|
237
|
+
* [Breaking] Remove `is_no_public_ip_enabled` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest`.
|
|
238
|
+
* [Breaking] Remove `allowed_vpc_endpoint_ids`, `private_access_level`, `private_access_settings_name`, `public_access_enabled` and `region` fields for `databricks.sdk.service.provisioning.ReplacePrivateAccessSettingsRequest`.
|
|
239
|
+
* [Breaking] Remove `external_id` field for `databricks.sdk.service.provisioning.StsRole`.
|
|
240
|
+
* [Breaking] Remove `aws_region`, `credentials_id`, `custom_tags`, `managed_services_customer_managed_key_id`, `network_connectivity_config_id`, `network_id`, `private_access_settings_id`, `storage_configuration_id` and `storage_customer_managed_key_id` fields for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
|
|
241
|
+
* [Breaking] Remove `external_customer_info` and `is_no_public_ip_enabled` fields for `databricks.sdk.service.provisioning.Workspace`.
|
|
242
|
+
* [Breaking] Remove `status_unspecified` enum value for `databricks.sdk.service.sql.Status`.
|
|
243
|
+
* Add `compute_mode` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest`.
|
|
244
|
+
* Add `expected_workspace_status` field for `databricks.sdk.service.provisioning.Workspace`.
|
|
245
|
+
* Add `dependency_storage_locations` field for `databricks.sdk.service.sharing.TableInternalAttributes`.
|
|
246
|
+
* Add `git_email` field for `databricks.sdk.service.workspace.CreateCredentialsRequest`.
|
|
247
|
+
* Add `git_email` field for `databricks.sdk.service.workspace.CreateCredentialsResponse`.
|
|
248
|
+
* Add `git_email` field for `databricks.sdk.service.workspace.CredentialInfo`.
|
|
249
|
+
* Add `git_email` field for `databricks.sdk.service.workspace.GetCredentialsResponse`.
|
|
250
|
+
* Add `git_email` field for `databricks.sdk.service.workspace.UpdateCredentialsRequest`.
|
|
251
|
+
* Add `germany_tisax` enum value for `databricks.sdk.service.settings.ComplianceStandard`.
|
|
252
|
+
* [Breaking] Remove `prediction_probability_column` field for `databricks.sdk.service.dataquality.InferenceLogConfig`.
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
## Release v0.67.0
|
|
256
|
+
|
|
257
|
+
### API Changes
|
|
258
|
+
* Add `update_notifications()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service.
|
|
259
|
+
* Add `parameters` field for `databricks.sdk.service.dashboards.GenieQueryAttachment`.
|
|
260
|
+
* Add `database_instance_name` field for `databricks.sdk.service.database.CreateDatabaseInstanceRoleRequest`.
|
|
261
|
+
* Add `custom_tags`, `effective_custom_tags`, `effective_usage_policy_id` and `usage_policy_id` fields for `databricks.sdk.service.database.DatabaseInstance`.
|
|
262
|
+
* Add `effective_attributes` and `instance_name` fields for `databricks.sdk.service.database.DatabaseInstanceRole`.
|
|
263
|
+
* Add `external_use_schema` enum value for `databricks.sdk.service.catalog.Privilege`.
|
|
264
|
+
* Add `stream_native` enum value for `databricks.sdk.service.catalog.SystemType`.
|
|
265
|
+
* Add `exceeded_max_token_length_exception` enum value for `databricks.sdk.service.dashboards.MessageErrorType`.
|
|
266
|
+
* Change `name` field for `databricks.sdk.service.database.DatabaseInstanceRole` to be required.
|
|
267
|
+
* [Breaking] Change `name` field for `databricks.sdk.service.database.DatabaseInstanceRole` to be required.
|
|
268
|
+
|
|
269
|
+
|
|
3
270
|
## Release v0.66.0
|
|
4
271
|
|
|
5
272
|
### New Features and Improvements
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: databricks-sdk
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.68.0
|
|
4
4
|
Summary: Databricks SDK for Python (Beta)
|
|
5
5
|
Project-URL: Documentation, https://databricks-sdk-py.readthedocs.io
|
|
6
6
|
Keywords: databricks,sdk
|
|
@@ -23,6 +23,7 @@ License-File: LICENSE
|
|
|
23
23
|
License-File: NOTICE
|
|
24
24
|
Requires-Dist: requests<3,>=2.28.1
|
|
25
25
|
Requires-Dist: google-auth~=2.0
|
|
26
|
+
Requires-Dist: protobuf<7.0,>=4.21.0
|
|
26
27
|
Provides-Extra: dev
|
|
27
28
|
Requires-Dist: pytest; extra == "dev"
|
|
28
29
|
Requires-Dist: pytest-cov; extra == "dev"
|
|
@@ -180,10 +181,11 @@ Depending on the Databricks authentication method, the SDK uses the following in
|
|
|
180
181
|
|
|
181
182
|
### Databricks native authentication
|
|
182
183
|
|
|
183
|
-
By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries
|
|
184
|
+
By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Workload Identity Federation (WIF). See [Supported WIF](https://docs.databricks.com/aws/en/dev-tools/auth/oauth-federation-provider) for the supported JWT token providers.
|
|
184
185
|
|
|
185
186
|
- For Databricks token authentication, you must provide `host` and `token`; or their environment variable or `.databrickscfg` file field equivalents.
|
|
186
187
|
- For Databricks OIDC authentication, you must provide the `host`, `client_id` and `token_audience` _(optional)_ either directly, through the corresponding environment variables, or in your `.databrickscfg` configuration file.
|
|
188
|
+
- For Azure DevOps OIDC authentication, the `token_audience` is irrelevant as the audience is always set to `api://AzureADTokenExchange`. Also, the `System.AccessToken` pipeline variable required for OIDC request must be exposed as the `SYSTEM_ACCESSTOKEN` environment variable, following [Pipeline variables](https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#systemaccesstoken)
|
|
187
189
|
|
|
188
190
|
| Argument | Description | Environment variable |
|
|
189
191
|
|------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------|
|
|
@@ -126,10 +126,11 @@ Depending on the Databricks authentication method, the SDK uses the following in
|
|
|
126
126
|
|
|
127
127
|
### Databricks native authentication
|
|
128
128
|
|
|
129
|
-
By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries
|
|
129
|
+
By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Workload Identity Federation (WIF). See [Supported WIF](https://docs.databricks.com/aws/en/dev-tools/auth/oauth-federation-provider) for the supported JWT token providers.
|
|
130
130
|
|
|
131
131
|
- For Databricks token authentication, you must provide `host` and `token`; or their environment variable or `.databrickscfg` file field equivalents.
|
|
132
132
|
- For Databricks OIDC authentication, you must provide the `host`, `client_id` and `token_audience` _(optional)_ either directly, through the corresponding environment variables, or in your `.databrickscfg` configuration file.
|
|
133
|
+
- For Azure DevOps OIDC authentication, the `token_audience` is irrelevant as the audience is always set to `api://AzureADTokenExchange`. Also, the `System.AccessToken` pipeline variable required for OIDC request must be exposed as the `SYSTEM_ACCESSTOKEN` environment variable, following [Pipeline variables](https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#systemaccesstoken)
|
|
133
134
|
|
|
134
135
|
| Argument | Description | Environment variable |
|
|
135
136
|
|------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------|
|
|
@@ -12,7 +12,6 @@ from databricks.sdk.mixins.compute import ClustersExt
|
|
|
12
12
|
from databricks.sdk.mixins.files import DbfsExt, FilesExt
|
|
13
13
|
from databricks.sdk.mixins.jobs import JobsExt
|
|
14
14
|
from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
|
|
15
|
-
from databricks.sdk.mixins.sharing import SharesExt
|
|
16
15
|
from databricks.sdk.mixins.workspace import WorkspaceExt
|
|
17
16
|
from databricks.sdk.service import agentbricks as pkg_agentbricks
|
|
18
17
|
from databricks.sdk.service import apps as pkg_apps
|
|
@@ -22,6 +21,7 @@ from databricks.sdk.service import cleanrooms as pkg_cleanrooms
|
|
|
22
21
|
from databricks.sdk.service import compute as pkg_compute
|
|
23
22
|
from databricks.sdk.service import dashboards as pkg_dashboards
|
|
24
23
|
from databricks.sdk.service import database as pkg_database
|
|
24
|
+
from databricks.sdk.service import dataquality as pkg_dataquality
|
|
25
25
|
from databricks.sdk.service import files as pkg_files
|
|
26
26
|
from databricks.sdk.service import iam as pkg_iam
|
|
27
27
|
from databricks.sdk.service import iamv2 as pkg_iamv2
|
|
@@ -80,6 +80,7 @@ from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
|
|
|
80
80
|
from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI,
|
|
81
81
|
LakeviewEmbeddedAPI)
|
|
82
82
|
from databricks.sdk.service.database import DatabaseAPI
|
|
83
|
+
from databricks.sdk.service.dataquality import DataQualityAPI
|
|
83
84
|
from databricks.sdk.service.files import DbfsAPI, FilesAPI
|
|
84
85
|
from databricks.sdk.service.iam import (AccessControlAPI,
|
|
85
86
|
AccountAccessControlAPI,
|
|
@@ -283,6 +284,7 @@ class WorkspaceClient:
|
|
|
283
284
|
self._current_user = pkg_iam.CurrentUserAPI(self._api_client)
|
|
284
285
|
self._dashboard_widgets = pkg_sql.DashboardWidgetsAPI(self._api_client)
|
|
285
286
|
self._dashboards = pkg_sql.DashboardsAPI(self._api_client)
|
|
287
|
+
self._data_quality = pkg_dataquality.DataQualityAPI(self._api_client)
|
|
286
288
|
self._data_sources = pkg_sql.DataSourcesAPI(self._api_client)
|
|
287
289
|
self._database = pkg_database.DatabaseAPI(self._api_client)
|
|
288
290
|
self._dbfs = DbfsExt(self._api_client)
|
|
@@ -358,7 +360,7 @@ class WorkspaceClient:
|
|
|
358
360
|
self._api_client, serving_endpoints, serving_endpoints_data_plane_token_source
|
|
359
361
|
)
|
|
360
362
|
self._settings = pkg_settings.SettingsAPI(self._api_client)
|
|
361
|
-
self._shares =
|
|
363
|
+
self._shares = pkg_sharing.SharesAPI(self._api_client)
|
|
362
364
|
self._statement_execution = pkg_sql.StatementExecutionAPI(self._api_client)
|
|
363
365
|
self._storage_credentials = pkg_catalog.StorageCredentialsAPI(self._api_client)
|
|
364
366
|
self._system_schemas = pkg_catalog.SystemSchemasAPI(self._api_client)
|
|
@@ -541,6 +543,11 @@ class WorkspaceClient:
|
|
|
541
543
|
"""In general, there is little need to modify dashboards using the API."""
|
|
542
544
|
return self._dashboards
|
|
543
545
|
|
|
546
|
+
@property
|
|
547
|
+
def data_quality(self) -> pkg_dataquality.DataQualityAPI:
|
|
548
|
+
"""Manage the data quality of Unity Catalog objects (currently support `schema` and `table`)."""
|
|
549
|
+
return self._data_quality
|
|
550
|
+
|
|
544
551
|
@property
|
|
545
552
|
def data_sources(self) -> pkg_sql.DataSourcesAPI:
|
|
546
553
|
"""This API is provided to assist you in making new query objects."""
|
|
@@ -882,7 +889,7 @@ class WorkspaceClient:
|
|
|
882
889
|
return self._settings
|
|
883
890
|
|
|
884
891
|
@property
|
|
885
|
-
def shares(self) ->
|
|
892
|
+
def shares(self) -> pkg_sharing.SharesAPI:
|
|
886
893
|
"""A share is a container instantiated with :method:shares/create."""
|
|
887
894
|
return self._shares
|
|
888
895
|
|
|
@@ -99,7 +99,10 @@ class _BaseClient:
|
|
|
99
99
|
# Default to 60 seconds
|
|
100
100
|
self._http_timeout_seconds = http_timeout_seconds or 60
|
|
101
101
|
|
|
102
|
-
self._error_parser = _Parser(
|
|
102
|
+
self._error_parser = _Parser(
|
|
103
|
+
extra_error_customizers=extra_error_customizers,
|
|
104
|
+
debug_headers=debug_headers,
|
|
105
|
+
)
|
|
103
106
|
|
|
104
107
|
def _authenticate(self, r: requests.PreparedRequest) -> requests.PreparedRequest:
|
|
105
108
|
if self._header_factory:
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from datetime import timedelta
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class LroOptions:
|
|
6
|
+
"""LroOptions is the options for the Long Running Operations.
|
|
7
|
+
DO NOT USE THIS OPTION. This option is still under development
|
|
8
|
+
and can be updated in the future without notice.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
def __init__(self, *, timeout: Optional[timedelta] = None):
|
|
12
|
+
"""
|
|
13
|
+
Args:
|
|
14
|
+
timeout: The timeout for the Long Running Operations.
|
|
15
|
+
If not set, the default timeout is 20 minutes.
|
|
16
|
+
"""
|
|
17
|
+
self.timeout = timeout or timedelta(minutes=20)
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
class FieldMask(object):
|
|
2
|
+
"""Class for FieldMask message type."""
|
|
3
|
+
|
|
4
|
+
# This is based on the base implementation from protobuf.
|
|
5
|
+
# https://pigweed.googlesource.com/third_party/github/protocolbuffers/protobuf/+/HEAD/python/google/protobuf/internal/field_mask.py
|
|
6
|
+
# The original implementation only works with proto generated classes.
|
|
7
|
+
# Since our classes are not generated from proto files, we need to implement it manually.
|
|
8
|
+
|
|
9
|
+
def __init__(self, field_mask=None):
|
|
10
|
+
"""Initializes the FieldMask."""
|
|
11
|
+
if field_mask:
|
|
12
|
+
self.paths = field_mask
|
|
13
|
+
|
|
14
|
+
def ToJsonString(self) -> str:
|
|
15
|
+
"""Converts FieldMask to string."""
|
|
16
|
+
return ",".join(self.paths)
|
|
17
|
+
|
|
18
|
+
def FromJsonString(self, value: str) -> None:
|
|
19
|
+
"""Converts string to FieldMask."""
|
|
20
|
+
if not isinstance(value, str):
|
|
21
|
+
raise ValueError("FieldMask JSON value not a string: {!r}".format(value))
|
|
22
|
+
if value:
|
|
23
|
+
self.paths = value.split(",")
|
|
24
|
+
else:
|
|
25
|
+
self.paths = []
|
|
26
|
+
|
|
27
|
+
def __eq__(self, other) -> bool:
|
|
28
|
+
"""Check equality based on paths."""
|
|
29
|
+
if not isinstance(other, FieldMask):
|
|
30
|
+
return False
|
|
31
|
+
return self.paths == other.paths
|
|
32
|
+
|
|
33
|
+
def __hash__(self) -> int:
|
|
34
|
+
"""Hash based on paths tuple."""
|
|
35
|
+
return hash(tuple(self.paths))
|
|
36
|
+
|
|
37
|
+
def __repr__(self) -> str:
|
|
38
|
+
"""String representation for debugging."""
|
|
39
|
+
return f"FieldMask(paths={self.paths})"
|