databricks-sdk 0.67.0__tar.gz → 0.68.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (98) hide show
  1. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/CHANGELOG.md +252 -0
  2. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/PKG-INFO +4 -2
  3. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/README.md +2 -1
  4. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/__init__.py +8 -0
  5. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/_base_client.py +4 -1
  6. databricks_sdk-0.68.0/databricks/sdk/common/lro.py +17 -0
  7. databricks_sdk-0.68.0/databricks/sdk/common/types/fieldmask.py +39 -0
  8. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/credentials_provider.py +61 -12
  9. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/dbutils.py +5 -1
  10. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/parser.py +8 -3
  11. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/mixins/files.py +1 -0
  12. databricks_sdk-0.68.0/databricks/sdk/oidc_token_supplier.py +108 -0
  13. databricks_sdk-0.68.0/databricks/sdk/retries.py +169 -0
  14. databricks_sdk-0.68.0/databricks/sdk/service/__init__.py +0 -0
  15. databricks_sdk-0.68.0/databricks/sdk/service/_internal.py +164 -0
  16. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/agentbricks.py +1 -1
  17. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/apps.py +264 -1
  18. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/billing.py +2 -3
  19. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/catalog.py +1026 -540
  20. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/cleanrooms.py +3 -3
  21. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/compute.py +21 -33
  22. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/dashboards.py +7 -3
  23. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/database.py +3 -2
  24. databricks_sdk-0.68.0/databricks/sdk/service/dataquality.py +1145 -0
  25. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/files.py +2 -1
  26. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/iam.py +2 -1
  27. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/iamv2.py +1 -1
  28. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/jobs.py +6 -9
  29. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/marketplace.py +3 -1
  30. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/ml.py +3 -1
  31. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/oauth2.py +1 -1
  32. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/pipelines.py +5 -6
  33. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/provisioning.py +544 -655
  34. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/qualitymonitorv2.py +1 -1
  35. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/serving.py +3 -1
  36. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/settings.py +5 -2
  37. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/settingsv2.py +1 -1
  38. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/sharing.py +12 -3
  39. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/sql.py +305 -70
  40. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/tags.py +1 -1
  41. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/vectorsearch.py +3 -1
  42. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/service/workspace.py +70 -17
  43. databricks_sdk-0.68.0/databricks/sdk/version.py +1 -0
  44. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks_sdk.egg-info/PKG-INFO +4 -2
  45. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks_sdk.egg-info/SOURCES.txt +4 -0
  46. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks_sdk.egg-info/requires.txt +1 -0
  47. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/pyproject.toml +1 -0
  48. databricks_sdk-0.67.0/databricks/sdk/oidc_token_supplier.py +0 -28
  49. databricks_sdk-0.67.0/databricks/sdk/retries.py +0 -69
  50. databricks_sdk-0.67.0/databricks/sdk/service/_internal.py +0 -72
  51. databricks_sdk-0.67.0/databricks/sdk/version.py +0 -1
  52. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/CONTRIBUTING.md +0 -0
  53. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/DCO +0 -0
  54. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/LICENSE +0 -0
  55. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/MANIFEST.in +0 -0
  56. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/Makefile +0 -0
  57. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/NOTICE +0 -0
  58. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/SECURITY.md +0 -0
  59. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/__init__.py +0 -0
  60. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/_property.py +0 -0
  61. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/_widgets/__init__.py +0 -0
  62. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
  63. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
  64. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/azure.py +0 -0
  65. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/casing.py +0 -0
  66. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/clock.py +0 -0
  67. {databricks_sdk-0.67.0/databricks/sdk/mixins → databricks_sdk-0.68.0/databricks/sdk/common/types}/__init__.py +0 -0
  68. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/config.py +0 -0
  69. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/core.py +0 -0
  70. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/data_plane.py +0 -0
  71. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/environments.py +0 -0
  72. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/__init__.py +0 -0
  73. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/base.py +0 -0
  74. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/customizer.py +0 -0
  75. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/deserializer.py +0 -0
  76. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/details.py +0 -0
  77. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/mapper.py +0 -0
  78. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/overrides.py +0 -0
  79. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/platform.py +0 -0
  80. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/private_link.py +0 -0
  81. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/errors/sdk.py +0 -0
  82. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/logger/__init__.py +0 -0
  83. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
  84. {databricks_sdk-0.67.0/databricks/sdk/service → databricks_sdk-0.68.0/databricks/sdk/mixins}/__init__.py +0 -0
  85. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/mixins/compute.py +0 -0
  86. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/mixins/jobs.py +0 -0
  87. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/mixins/open_ai_client.py +0 -0
  88. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/mixins/sharing.py +0 -0
  89. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/mixins/workspace.py +0 -0
  90. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/oauth.py +0 -0
  91. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/oidc.py +0 -0
  92. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/py.typed +0 -0
  93. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/runtime/__init__.py +0 -0
  94. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
  95. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks/sdk/useragent.py +0 -0
  96. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
  97. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/databricks_sdk.egg-info/top_level.txt +0 -0
  98. {databricks_sdk-0.67.0 → databricks_sdk-0.68.0}/setup.cfg +0 -0
@@ -1,5 +1,257 @@
1
1
  # Version changelog
2
2
 
3
+ ## Release v0.68.0
4
+
5
+ ### New Features and Improvements
6
+
7
+ * Add native support for authentication through Azure DevOps OIDC.
8
+
9
+ ### Bug Fixes
10
+ * Fix a security issue that resulted in bearer tokens being logged in exception messages.
11
+
12
+ ### API Changes
13
+ * Add `databricks.sdk.service.dataquality` package.
14
+ * Add [w.data_quality](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dataquality/data_quality.html) workspace-level service.
15
+ * Add `create_update()` and `get_update()` methods for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps/apps.html) workspace-level service.
16
+ * Add `compute_size` field for `databricks.sdk.service.apps.App`.
17
+ * Add `genie_space` field for `databricks.sdk.service.apps.AppResource`.
18
+ * Add `skip_validation` field for `databricks.sdk.service.catalog.AccountsCreateStorageCredential`.
19
+ * Add `skip_validation` field for `databricks.sdk.service.catalog.AccountsUpdateStorageCredential`.
20
+ * Add `aliases`, `browse_only`, `created_at`, `created_by`, `full_name`, `metastore_id`, `owner`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.CreateRegisteredModelRequest`.
21
+ * Add `catalog_name`, `id`, `model_name` and `schema_name` fields for `databricks.sdk.service.catalog.RegisteredModelAlias`.
22
+ * Add `aliases`, `catalog_name`, `created_at`, `created_by`, `id`, `metastore_id`, `model_name`, `model_version_dependencies`, `run_id`, `run_workspace_id`, `schema_name`, `source`, `status`, `storage_location`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.UpdateModelVersionRequest`.
23
+ * Add `aliases`, `browse_only`, `catalog_name`, `created_at`, `created_by`, `metastore_id`, `name`, `schema_name`, `storage_location`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.UpdateRegisteredModelRequest`.
24
+ * Add `key_region` field for `databricks.sdk.service.provisioning.CreateAwsKeyInfo`.
25
+ * Add `role_arn` field for `databricks.sdk.service.provisioning.CreateStorageConfigurationRequest`.
26
+ * Add `azure_key_info` field for `databricks.sdk.service.provisioning.CustomerManagedKey`.
27
+ * [Breaking] Add `customer_facing_private_access_settings` field for `databricks.sdk.service.provisioning.ReplacePrivateAccessSettingsRequest`.
28
+ * Add `role_arn` field for `databricks.sdk.service.provisioning.StorageConfiguration`.
29
+ * [Breaking] Add `customer_facing_workspace` field for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
30
+ * Add `update_mask` field for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
31
+ * Add `compute_mode`, `network`, `network_connectivity_config_id` and `storage_mode` fields for `databricks.sdk.service.provisioning.Workspace`.
32
+ * Add `enable_serverless_compute` field for `databricks.sdk.service.sql.GetWorkspaceWarehouseConfigResponse`.
33
+ * Add `page_size` and `page_token` fields for `databricks.sdk.service.sql.ListWarehousesRequest`.
34
+ * Add `next_page_token` field for `databricks.sdk.service.sql.ListWarehousesResponse`.
35
+ * Add `enable_serverless_compute` field for `databricks.sdk.service.sql.SetWorkspaceWarehouseConfigRequest`.
36
+ * Add `model_version_status_unknown` enum value for `databricks.sdk.service.catalog.ModelVersionInfoStatus`.
37
+ * Add `k8s_active_pod_quota_exceeded` and `cloud_account_pod_quota_exceeded` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
38
+ * Add `internal_catalog_asset_creation_ongoing_exception`, `internal_catalog_asset_creation_failed_exception` and `internal_catalog_asset_creation_unsupported_exception` enum values for `databricks.sdk.service.dashboards.MessageErrorType`.
39
+ * Add `ssh_bootstrap_failure`, `aws_inaccessible_kms_key_failure`, `init_container_not_finished`, `spark_image_download_throttled`, `spark_image_not_found`, `cluster_operation_throttled`, `cluster_operation_timeout`, `serverless_long_running_terminated`, `azure_packed_deployment_partial_failure`, `invalid_worker_image_failure`, `workspace_update`, `invalid_aws_parameter`, `driver_out_of_disk`, `driver_out_of_memory`, `driver_launch_timeout`, `driver_unexpected_failure`, `unexpected_pod_recreation`, `gcp_inaccessible_kms_key_failure`, `gcp_kms_key_permission_denied`, `driver_eviction`, `user_initiated_vm_termination`, `gcp_iam_timeout`, `aws_resource_quota_exceeded`, `cloud_account_setup_failure`, `aws_invalid_key_pair`, `driver_pod_creation_failure`, `maintenance_mode`, `internal_capacity_failure`, `executor_pod_unscheduled`, `storage_download_failure_slow`, `storage_download_failure_throttled`, `dynamic_spark_conf_size_exceeded`, `aws_instance_profile_update_failure`, `instance_pool_not_found`, `instance_pool_max_capacity_reached`, `aws_invalid_kms_key_state`, `gcp_insufficient_capacity`, `gcp_api_rate_quota_exceeded`, `gcp_resource_quota_exceeded`, `gcp_ip_space_exhausted`, `gcp_service_account_access_denied`, `gcp_service_account_not_found`, `gcp_forbidden`, `gcp_not_found`, `resource_usage_blocked`, `data_access_config_changed`, `access_token_failure`, `invalid_instance_placement_protocol`, `budget_policy_resolution_failure`, `in_penalty_box`, `disaster_recovery_replication`, `bootstrap_timeout_due_to_misconfig`, `instance_unreachable_due_to_misconfig`, `storage_download_failure_due_to_misconfig`, `control_plane_request_failure_due_to_misconfig`, `cloud_provider_launch_failure_due_to_misconfig`, `gcp_subnet_not_ready`, `cloud_operation_cancelled`, `cloud_provider_instance_not_launched`, `gcp_trusted_image_projects_violated`, `budget_policy_limit_enforcement_activated`, `eos_spark_image`, `no_matched_k8s`, `lazy_allocation_timeout`, `driver_node_unreachable`, `secret_creation_failure`, `pod_scheduling_failure`, `pod_assignment_failure`, `allocation_timeout`, `allocation_timeout_no_unallocated_clusters`, `allocation_timeout_no_matched_clusters`, `allocation_timeout_no_ready_clusters`, `allocation_timeout_no_warmed_up_clusters`, `allocation_timeout_node_daemon_not_ready`, `allocation_timeout_no_healthy_clusters`, `netvisor_setup_timeout`, `no_matched_k8s_testing_tag`, `cloud_provider_resource_stockout_due_to_misconfig`, `gke_based_cluster_termination`, `allocation_timeout_no_healthy_and_warmed_up_clusters`, `docker_invalid_os_exception`, `docker_container_creation_exception`, `docker_image_too_large_for_instance_exception`, `dns_resolution_error`, `gcp_denied_by_org_policy`, `secret_permission_denied`, `network_check_nic_failure`, `network_check_dns_server_failure`, `network_check_storage_failure`, `network_check_metadata_endpoint_failure`, `network_check_control_plane_failure`, `network_check_multiple_components_failure`, `driver_unhealthy`, `security_agents_failed_initial_verification`, `driver_dns_resolution_failure`, `no_activated_k8s`, `usage_policy_entitlement_denied`, `no_activated_k8s_testing_tag`, `k8s_active_pod_quota_exceeded` and `cloud_account_pod_quota_exceeded` enum values for `databricks.sdk.service.sql.TerminationReasonCode`.
40
+ * [Breaking] Change `create()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsCreateMetastoreAssignmentResponse` dataclass.
41
+ * [Breaking] Change `delete()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsDeleteMetastoreAssignmentResponse` dataclass.
42
+ * [Breaking] Change `update()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsUpdateMetastoreAssignmentResponse` dataclass.
43
+ * [Breaking] Change `create()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsCreateMetastoreResponse` dataclass.
44
+ * [Breaking] Change `delete()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsDeleteMetastoreResponse` dataclass.
45
+ * [Breaking] Change `get()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsGetMetastoreResponse` dataclass.
46
+ * [Breaking] Change `list()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsListMetastoresResponse` dataclass.
47
+ * [Breaking] Change `update()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsUpdateMetastoreResponse` dataclass.
48
+ * [Breaking] Change `create()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to return `databricks.sdk.service.catalog.AccountsCreateStorageCredentialInfo` dataclass.
49
+ * [Breaking] Change `delete()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsDeleteStorageCredentialResponse` dataclass.
50
+ * [Breaking] Change `update()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to return `databricks.sdk.service.catalog.AccountsUpdateStorageCredentialResponse` dataclass.
51
+ * [Breaking] Change `create()` method for [w.registered_models](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/registered_models.html) workspace-level service with new required argument order.
52
+ * [Breaking] Change `delete()` method for [a.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/credentials.html) account-level service to start returning `databricks.sdk.service.provisioning.Credential` dataclass.
53
+ * [Breaking] Change `delete()` method for [a.encryption_keys](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/encryption_keys.html) account-level service to start returning `databricks.sdk.service.provisioning.CustomerManagedKey` dataclass.
54
+ * [Breaking] Change `create()` method for [a.networks](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/networks.html) account-level service with new required argument order.
55
+ * [Breaking] Change `delete()` method for [a.networks](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/networks.html) account-level service to start returning `databricks.sdk.service.provisioning.Network` dataclass.
56
+ * [Breaking] Change `create()` and `replace()` methods for [a.private_access](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/private_access.html) account-level service with new required argument order.
57
+ * [Breaking] Change `delete()` and `replace()` methods for [a.private_access](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/private_access.html) account-level service to start returning `databricks.sdk.service.provisioning.PrivateAccessSettings` dataclass.
58
+ * [Breaking] Change `delete()` method for [a.storage](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/storage.html) account-level service to start returning `databricks.sdk.service.provisioning.StorageConfiguration` dataclass.
59
+ * [Breaking] Change `create()` method for [a.vpc_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/vpc_endpoints.html) account-level service with new required argument order.
60
+ * [Breaking] Change `delete()` method for [a.vpc_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/vpc_endpoints.html) account-level service to start returning `databricks.sdk.service.provisioning.VpcEndpoint` dataclass.
61
+ * [Breaking] Change `create()` and `update()` methods for [a.workspaces](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html) account-level service with new required argument order.
62
+ * [Breaking] Change `delete()` and `update()` methods for [a.workspaces](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html) account-level service to start returning `databricks.sdk.service.provisioning.Workspace` dataclass.
63
+ * [Breaking] Change `execute_statement()` method for [w.statement_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/statement_execution.html) workspace-level service . Method path has changed.
64
+ * [Breaking] Change `metastore_info` field for `databricks.sdk.service.catalog.AccountsCreateMetastore` to type `databricks.sdk.service.catalog.CreateAccountsMetastore` dataclass.
65
+ * [Breaking] Change `credential_info` field for `databricks.sdk.service.catalog.AccountsCreateStorageCredential` to type `databricks.sdk.service.catalog.CreateAccountsStorageCredential` dataclass.
66
+ * [Breaking] Change `metastore_info` field for `databricks.sdk.service.catalog.AccountsUpdateMetastore` to type `databricks.sdk.service.catalog.UpdateAccountsMetastore` dataclass.
67
+ * [Breaking] Change `credential_info` field for `databricks.sdk.service.catalog.AccountsUpdateStorageCredential` to type `databricks.sdk.service.catalog.UpdateAccountsStorageCredential` dataclass.
68
+ * Change `catalog_name`, `name` and `schema_name` fields for `databricks.sdk.service.catalog.CreateRegisteredModelRequest` to no longer be required.
69
+ * Change `network_name` field for `databricks.sdk.service.provisioning.CreateNetworkRequest` to no longer be required.
70
+ * Change `private_access_settings_name` and `region` fields for `databricks.sdk.service.provisioning.CreatePrivateAccessSettingsRequest` to no longer be required.
71
+ * Change `vpc_endpoint_name` field for `databricks.sdk.service.provisioning.CreateVpcEndpointRequest` to no longer be required.
72
+ * Change `workspace_name` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest` to no longer be required.
73
+ * [Breaking] Change `dataplane_relay` and `rest_api` fields for `databricks.sdk.service.provisioning.NetworkVpcEndpoints` to no longer be required.
74
+ * Change `dataplane_relay` and `rest_api` fields for `databricks.sdk.service.provisioning.NetworkVpcEndpoints` to no longer be required.
75
+ * [Breaking] Change waiter for [WorkspacesAPI.update](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html#databricks.sdk.service.provisioning.WorkspacesAPI.update) method.
76
+ * [Breaking] Remove `browse_only` field for `databricks.sdk.service.catalog.ModelVersionInfo`.
77
+ * [Breaking] Remove `jar_dependencies` field for `databricks.sdk.service.compute.Environment`.
78
+ * [Breaking] Remove `is_no_public_ip_enabled` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest`.
79
+ * [Breaking] Remove `allowed_vpc_endpoint_ids`, `private_access_level`, `private_access_settings_name`, `public_access_enabled` and `region` fields for `databricks.sdk.service.provisioning.ReplacePrivateAccessSettingsRequest`.
80
+ * [Breaking] Remove `external_id` field for `databricks.sdk.service.provisioning.StsRole`.
81
+ * [Breaking] Remove `aws_region`, `credentials_id`, `custom_tags`, `managed_services_customer_managed_key_id`, `network_connectivity_config_id`, `network_id`, `private_access_settings_id`, `storage_configuration_id` and `storage_customer_managed_key_id` fields for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
82
+ * [Breaking] Remove `external_customer_info` and `is_no_public_ip_enabled` fields for `databricks.sdk.service.provisioning.Workspace`.
83
+ * [Breaking] Remove `status_unspecified` enum value for `databricks.sdk.service.sql.Status`.
84
+ * Add `browse_only` field for `databricks.sdk.service.catalog.ModelVersionInfo`.
85
+ * Add `jar_dependencies` field for `databricks.sdk.service.compute.Environment`.
86
+ * Add `is_no_public_ip_enabled` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest`.
87
+ * Add `allowed_vpc_endpoint_ids`, `private_access_level` and `public_access_enabled` fields for `databricks.sdk.service.provisioning.ReplacePrivateAccessSettingsRequest`.
88
+ * [Breaking] Add `private_access_settings_name` and `region` fields for `databricks.sdk.service.provisioning.ReplacePrivateAccessSettingsRequest`.
89
+ * Add `external_id` field for `databricks.sdk.service.provisioning.StsRole`.
90
+ * Add `aws_region`, `credentials_id`, `custom_tags`, `managed_services_customer_managed_key_id`, `network_connectivity_config_id`, `network_id`, `private_access_settings_id`, `storage_configuration_id` and `storage_customer_managed_key_id` fields for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
91
+ * Add `external_customer_info` and `is_no_public_ip_enabled` fields for `databricks.sdk.service.provisioning.Workspace`.
92
+ * Add `status_unspecified` enum value for `databricks.sdk.service.sql.Status`.
93
+ * [Breaking] Change `create()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to no longer return `databricks.sdk.service.catalog.AccountsCreateMetastoreAssignmentResponse` dataclass.
94
+ * [Breaking] Change `delete()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to no longer return `databricks.sdk.service.catalog.AccountsDeleteMetastoreAssignmentResponse` dataclass.
95
+ * [Breaking] Change `update()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to no longer return `databricks.sdk.service.catalog.AccountsUpdateMetastoreAssignmentResponse` dataclass.
96
+ * [Breaking] Change `create()`, `get()` and `update()` methods for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsMetastoreInfo` dataclass.
97
+ * [Breaking] Change `delete()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to no longer return `databricks.sdk.service.catalog.AccountsDeleteMetastoreResponse` dataclass.
98
+ * [Breaking] Change `list()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.ListMetastoresResponse` dataclass.
99
+ * [Breaking] Change `create()` and `update()` methods for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to return `databricks.sdk.service.catalog.AccountsStorageCredentialInfo` dataclass.
100
+ * [Breaking] Change `delete()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to no longer return `databricks.sdk.service.catalog.AccountsDeleteStorageCredentialResponse` dataclass.
101
+ * [Breaking] Change `create()` method for [w.registered_models](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/registered_models.html) workspace-level service with new required argument order.
102
+ * [Breaking] Change `delete()` method for [a.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/credentials.html) account-level service to no longer return `databricks.sdk.service.provisioning.Credential` dataclass.
103
+ * [Breaking] Change `delete()` method for [a.encryption_keys](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/encryption_keys.html) account-level service to no longer return `databricks.sdk.service.provisioning.CustomerManagedKey` dataclass.
104
+ * [Breaking] Change `create()` method for [a.networks](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/networks.html) account-level service with new required argument order.
105
+ * [Breaking] Change `delete()` method for [a.networks](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/networks.html) account-level service to no longer return `databricks.sdk.service.provisioning.Network` dataclass.
106
+ * [Breaking] Change `create()` and `replace()` methods for [a.private_access](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/private_access.html) account-level service with new required argument order.
107
+ * [Breaking] Change `delete()` and `replace()` methods for [a.private_access](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/private_access.html) account-level service to no longer return `databricks.sdk.service.provisioning.PrivateAccessSettings` dataclass.
108
+ * [Breaking] Change `delete()` method for [a.storage](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/storage.html) account-level service to no longer return `databricks.sdk.service.provisioning.StorageConfiguration` dataclass.
109
+ * [Breaking] Change `create()` method for [a.vpc_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/vpc_endpoints.html) account-level service with new required argument order.
110
+ * [Breaking] Change `delete()` method for [a.vpc_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/vpc_endpoints.html) account-level service to no longer return `databricks.sdk.service.provisioning.VpcEndpoint` dataclass.
111
+ * [Breaking] Change `create()` and `update()` methods for [a.workspaces](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html) account-level service with new required argument order.
112
+ * [Breaking] Change `delete()` and `update()` methods for [a.workspaces](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html) account-level service to no longer return `databricks.sdk.service.provisioning.Workspace` dataclass.
113
+ * [Breaking] Change `execute_statement()` method for [w.statement_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/statement_execution.html) workspace-level service . Method path has changed.
114
+ * [Breaking] Change `metastore_info` field for `databricks.sdk.service.catalog.AccountsCreateMetastore` to type `databricks.sdk.service.catalog.CreateMetastore` dataclass.
115
+ * [Breaking] Change `credential_info` field for `databricks.sdk.service.catalog.AccountsCreateStorageCredential` to type `databricks.sdk.service.catalog.CreateStorageCredential` dataclass.
116
+ * [Breaking] Change `metastore_info` field for `databricks.sdk.service.catalog.AccountsUpdateMetastore` to type `databricks.sdk.service.catalog.UpdateMetastore` dataclass.
117
+ * [Breaking] Change `credential_info` field for `databricks.sdk.service.catalog.AccountsUpdateStorageCredential` to type `databricks.sdk.service.catalog.UpdateStorageCredential` dataclass.
118
+ * [Breaking] Change `catalog_name`, `name` and `schema_name` fields for `databricks.sdk.service.catalog.CreateRegisteredModelRequest` to be required.
119
+ * [Breaking] Change `name` field for `databricks.sdk.service.database.DatabaseInstanceRole` to no longer be required.
120
+ * Change `name` field for `databricks.sdk.service.database.DatabaseInstanceRole` to no longer be required.
121
+ * [Breaking] Change `network_name` field for `databricks.sdk.service.provisioning.CreateNetworkRequest` to be required.
122
+ * [Breaking] Change `private_access_settings_name` and `region` fields for `databricks.sdk.service.provisioning.CreatePrivateAccessSettingsRequest` to be required.
123
+ * [Breaking] Change `vpc_endpoint_name` field for `databricks.sdk.service.provisioning.CreateVpcEndpointRequest` to be required.
124
+ * [Breaking] Change `workspace_name` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest` to be required.
125
+ * Change `dataplane_relay` and `rest_api` fields for `databricks.sdk.service.provisioning.NetworkVpcEndpoints` to be required.
126
+ * [Breaking] Change `dataplane_relay` and `rest_api` fields for `databricks.sdk.service.provisioning.NetworkVpcEndpoints` to be required.
127
+ * [Breaking] Change waiter for [WorkspacesAPI.update](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html#databricks.sdk.service.provisioning.WorkspacesAPI.update) method.
128
+ * [Breaking] Remove `databricks.sdk.service.dataquality` package.
129
+ * [Breaking] Remove `create_update()` and `get_update()` methods for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps/apps.html) workspace-level service.
130
+ * [Breaking] Remove `update_notifications()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service.
131
+ * [Breaking] Remove `compute_size` field for `databricks.sdk.service.apps.App`.
132
+ * [Breaking] Remove `genie_space` field for `databricks.sdk.service.apps.AppResource`.
133
+ * [Breaking] Remove `skip_validation` field for `databricks.sdk.service.catalog.AccountsCreateStorageCredential`.
134
+ * [Breaking] Remove `skip_validation` field for `databricks.sdk.service.catalog.AccountsUpdateStorageCredential`.
135
+ * [Breaking] Remove `aliases`, `browse_only`, `created_at`, `created_by`, `full_name`, `metastore_id`, `owner`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.CreateRegisteredModelRequest`.
136
+ * [Breaking] Remove `catalog_name`, `id`, `model_name` and `schema_name` fields for `databricks.sdk.service.catalog.RegisteredModelAlias`.
137
+ * [Breaking] Remove `aliases`, `catalog_name`, `created_at`, `created_by`, `id`, `metastore_id`, `model_name`, `model_version_dependencies`, `run_id`, `run_workspace_id`, `schema_name`, `source`, `status`, `storage_location`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.UpdateModelVersionRequest`.
138
+ * [Breaking] Remove `aliases`, `browse_only`, `catalog_name`, `created_at`, `created_by`, `metastore_id`, `name`, `schema_name`, `storage_location`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.UpdateRegisteredModelRequest`.
139
+ * [Breaking] Remove `parameters` field for `databricks.sdk.service.dashboards.GenieQueryAttachment`.
140
+ * [Breaking] Remove `database_instance_name` field for `databricks.sdk.service.database.CreateDatabaseInstanceRoleRequest`.
141
+ * [Breaking] Remove `custom_tags`, `effective_custom_tags`, `effective_usage_policy_id` and `usage_policy_id` fields for `databricks.sdk.service.database.DatabaseInstance`.
142
+ * [Breaking] Remove `effective_attributes` and `instance_name` fields for `databricks.sdk.service.database.DatabaseInstanceRole`.
143
+ * [Breaking] Remove `key_region` field for `databricks.sdk.service.provisioning.CreateAwsKeyInfo`.
144
+ * [Breaking] Remove `role_arn` field for `databricks.sdk.service.provisioning.CreateStorageConfigurationRequest`.
145
+ * [Breaking] Remove `azure_key_info` field for `databricks.sdk.service.provisioning.CustomerManagedKey`.
146
+ * [Breaking] Remove `customer_facing_private_access_settings` field for `databricks.sdk.service.provisioning.ReplacePrivateAccessSettingsRequest`.
147
+ * [Breaking] Remove `role_arn` field for `databricks.sdk.service.provisioning.StorageConfiguration`.
148
+ * [Breaking] Remove `customer_facing_workspace` and `update_mask` fields for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
149
+ * [Breaking] Remove `compute_mode`, `network`, `network_connectivity_config_id` and `storage_mode` fields for `databricks.sdk.service.provisioning.Workspace`.
150
+ * [Breaking] Remove `enable_serverless_compute` field for `databricks.sdk.service.sql.GetWorkspaceWarehouseConfigResponse`.
151
+ * [Breaking] Remove `page_size` and `page_token` fields for `databricks.sdk.service.sql.ListWarehousesRequest`.
152
+ * [Breaking] Remove `next_page_token` field for `databricks.sdk.service.sql.ListWarehousesResponse`.
153
+ * [Breaking] Remove `enable_serverless_compute` field for `databricks.sdk.service.sql.SetWorkspaceWarehouseConfigRequest`.
154
+ * [Breaking] Remove `model_version_status_unknown` enum value for `databricks.sdk.service.catalog.ModelVersionInfoStatus`.
155
+ * [Breaking] Remove `stream_native` enum value for `databricks.sdk.service.catalog.SystemType`.
156
+ * [Breaking] Remove `k8s_active_pod_quota_exceeded` and `cloud_account_pod_quota_exceeded` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
157
+ * [Breaking] Remove `exceeded_max_token_length_exception`, `internal_catalog_asset_creation_ongoing_exception`, `internal_catalog_asset_creation_failed_exception` and `internal_catalog_asset_creation_unsupported_exception` enum values for `databricks.sdk.service.dashboards.MessageErrorType`.
158
+ * [Breaking] Remove `ssh_bootstrap_failure`, `aws_inaccessible_kms_key_failure`, `init_container_not_finished`, `spark_image_download_throttled`, `spark_image_not_found`, `cluster_operation_throttled`, `cluster_operation_timeout`, `serverless_long_running_terminated`, `azure_packed_deployment_partial_failure`, `invalid_worker_image_failure`, `workspace_update`, `invalid_aws_parameter`, `driver_out_of_disk`, `driver_out_of_memory`, `driver_launch_timeout`, `driver_unexpected_failure`, `unexpected_pod_recreation`, `gcp_inaccessible_kms_key_failure`, `gcp_kms_key_permission_denied`, `driver_eviction`, `user_initiated_vm_termination`, `gcp_iam_timeout`, `aws_resource_quota_exceeded`, `cloud_account_setup_failure`, `aws_invalid_key_pair`, `driver_pod_creation_failure`, `maintenance_mode`, `internal_capacity_failure`, `executor_pod_unscheduled`, `storage_download_failure_slow`, `storage_download_failure_throttled`, `dynamic_spark_conf_size_exceeded`, `aws_instance_profile_update_failure`, `instance_pool_not_found`, `instance_pool_max_capacity_reached`, `aws_invalid_kms_key_state`, `gcp_insufficient_capacity`, `gcp_api_rate_quota_exceeded`, `gcp_resource_quota_exceeded`, `gcp_ip_space_exhausted`, `gcp_service_account_access_denied`, `gcp_service_account_not_found`, `gcp_forbidden`, `gcp_not_found`, `resource_usage_blocked`, `data_access_config_changed`, `access_token_failure`, `invalid_instance_placement_protocol`, `budget_policy_resolution_failure`, `in_penalty_box`, `disaster_recovery_replication`, `bootstrap_timeout_due_to_misconfig`, `instance_unreachable_due_to_misconfig`, `storage_download_failure_due_to_misconfig`, `control_plane_request_failure_due_to_misconfig`, `cloud_provider_launch_failure_due_to_misconfig`, `gcp_subnet_not_ready`, `cloud_operation_cancelled`, `cloud_provider_instance_not_launched`, `gcp_trusted_image_projects_violated`, `budget_policy_limit_enforcement_activated`, `eos_spark_image`, `no_matched_k8s`, `lazy_allocation_timeout`, `driver_node_unreachable`, `secret_creation_failure`, `pod_scheduling_failure`, `pod_assignment_failure`, `allocation_timeout`, `allocation_timeout_no_unallocated_clusters`, `allocation_timeout_no_matched_clusters`, `allocation_timeout_no_ready_clusters`, `allocation_timeout_no_warmed_up_clusters`, `allocation_timeout_node_daemon_not_ready`, `allocation_timeout_no_healthy_clusters`, `netvisor_setup_timeout`, `no_matched_k8s_testing_tag`, `cloud_provider_resource_stockout_due_to_misconfig`, `gke_based_cluster_termination`, `allocation_timeout_no_healthy_and_warmed_up_clusters`, `docker_invalid_os_exception`, `docker_container_creation_exception`, `docker_image_too_large_for_instance_exception`, `dns_resolution_error`, `gcp_denied_by_org_policy`, `secret_permission_denied`, `network_check_nic_failure`, `network_check_dns_server_failure`, `network_check_storage_failure`, `network_check_metadata_endpoint_failure`, `network_check_control_plane_failure`, `network_check_multiple_components_failure`, `driver_unhealthy`, `security_agents_failed_initial_verification`, `driver_dns_resolution_failure`, `no_activated_k8s`, `usage_policy_entitlement_denied`, `no_activated_k8s_testing_tag`, `k8s_active_pod_quota_exceeded` and `cloud_account_pod_quota_exceeded` enum values for `databricks.sdk.service.sql.TerminationReasonCode`.
159
+ * Add `databricks.sdk.service.dataquality` package.
160
+ * Add [w.data_quality](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dataquality/data_quality.html) workspace-level service.
161
+ * Add `create_update()` and `get_update()` methods for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps/apps.html) workspace-level service.
162
+ * Add `update_notifications()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service.
163
+ * Add `compute_size` field for `databricks.sdk.service.apps.App`.
164
+ * Add `genie_space` field for `databricks.sdk.service.apps.AppResource`.
165
+ * Add `skip_validation` field for `databricks.sdk.service.catalog.AccountsCreateStorageCredential`.
166
+ * Add `skip_validation` field for `databricks.sdk.service.catalog.AccountsUpdateStorageCredential`.
167
+ * Add `aliases`, `browse_only`, `created_at`, `created_by`, `full_name`, `metastore_id`, `owner`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.CreateRegisteredModelRequest`.
168
+ * Add `include_unbound` field for `databricks.sdk.service.catalog.ListCatalogsRequest`.
169
+ * Add `include_unbound` field for `databricks.sdk.service.catalog.ListCredentialsRequest`.
170
+ * Add `include_unbound` field for `databricks.sdk.service.catalog.ListExternalLocationsRequest`.
171
+ * Add `include_unbound` field for `databricks.sdk.service.catalog.ListStorageCredentialsRequest`.
172
+ * Add `catalog_name`, `id`, `model_name` and `schema_name` fields for `databricks.sdk.service.catalog.RegisteredModelAlias`.
173
+ * Add `aliases`, `catalog_name`, `created_at`, `created_by`, `id`, `metastore_id`, `model_name`, `model_version_dependencies`, `run_id`, `run_workspace_id`, `schema_name`, `source`, `status`, `storage_location`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.UpdateModelVersionRequest`.
174
+ * Add `aliases`, `browse_only`, `catalog_name`, `created_at`, `created_by`, `metastore_id`, `name`, `schema_name`, `storage_location`, `updated_at` and `updated_by` fields for `databricks.sdk.service.catalog.UpdateRegisteredModelRequest`.
175
+ * Add `parameters` field for `databricks.sdk.service.dashboards.GenieQueryAttachment`.
176
+ * Add `database_instance_name` field for `databricks.sdk.service.database.CreateDatabaseInstanceRoleRequest`.
177
+ * Add `custom_tags`, `effective_custom_tags`, `effective_usage_policy_id` and `usage_policy_id` fields for `databricks.sdk.service.database.DatabaseInstance`.
178
+ * Add `effective_attributes` and `instance_name` fields for `databricks.sdk.service.database.DatabaseInstanceRole`.
179
+ * Add `key_region` field for `databricks.sdk.service.provisioning.CreateAwsKeyInfo`.
180
+ * Add `role_arn` field for `databricks.sdk.service.provisioning.CreateStorageConfigurationRequest`.
181
+ * Add `azure_key_info` field for `databricks.sdk.service.provisioning.CustomerManagedKey`.
182
+ * [Breaking] Add `customer_facing_private_access_settings` field for `databricks.sdk.service.provisioning.ReplacePrivateAccessSettingsRequest`.
183
+ * Add `role_arn` field for `databricks.sdk.service.provisioning.StorageConfiguration`.
184
+ * [Breaking] Add `customer_facing_workspace` field for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
185
+ * Add `update_mask` field for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
186
+ * Add `compute_mode`, `network`, `network_connectivity_config_id` and `storage_mode` fields for `databricks.sdk.service.provisioning.Workspace`.
187
+ * Add `enable_serverless_compute` field for `databricks.sdk.service.sql.GetWorkspaceWarehouseConfigResponse`.
188
+ * Add `page_size` and `page_token` fields for `databricks.sdk.service.sql.ListWarehousesRequest`.
189
+ * Add `next_page_token` field for `databricks.sdk.service.sql.ListWarehousesResponse`.
190
+ * Add `enable_serverless_compute` field for `databricks.sdk.service.sql.SetWorkspaceWarehouseConfigRequest`.
191
+ * Add `model_version_status_unknown` enum value for `databricks.sdk.service.catalog.ModelVersionInfoStatus`.
192
+ * Add `stream_native` enum value for `databricks.sdk.service.catalog.SystemType`.
193
+ * Add `k8s_active_pod_quota_exceeded` and `cloud_account_pod_quota_exceeded` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
194
+ * Add `exceeded_max_token_length_exception`, `internal_catalog_asset_creation_ongoing_exception`, `internal_catalog_asset_creation_failed_exception` and `internal_catalog_asset_creation_unsupported_exception` enum values for `databricks.sdk.service.dashboards.MessageErrorType`.
195
+ * Add `asset_type_mcp` enum value for `databricks.sdk.service.marketplace.AssetType`.
196
+ * Add `ssh_bootstrap_failure`, `aws_inaccessible_kms_key_failure`, `init_container_not_finished`, `spark_image_download_throttled`, `spark_image_not_found`, `cluster_operation_throttled`, `cluster_operation_timeout`, `serverless_long_running_terminated`, `azure_packed_deployment_partial_failure`, `invalid_worker_image_failure`, `workspace_update`, `invalid_aws_parameter`, `driver_out_of_disk`, `driver_out_of_memory`, `driver_launch_timeout`, `driver_unexpected_failure`, `unexpected_pod_recreation`, `gcp_inaccessible_kms_key_failure`, `gcp_kms_key_permission_denied`, `driver_eviction`, `user_initiated_vm_termination`, `gcp_iam_timeout`, `aws_resource_quota_exceeded`, `cloud_account_setup_failure`, `aws_invalid_key_pair`, `driver_pod_creation_failure`, `maintenance_mode`, `internal_capacity_failure`, `executor_pod_unscheduled`, `storage_download_failure_slow`, `storage_download_failure_throttled`, `dynamic_spark_conf_size_exceeded`, `aws_instance_profile_update_failure`, `instance_pool_not_found`, `instance_pool_max_capacity_reached`, `aws_invalid_kms_key_state`, `gcp_insufficient_capacity`, `gcp_api_rate_quota_exceeded`, `gcp_resource_quota_exceeded`, `gcp_ip_space_exhausted`, `gcp_service_account_access_denied`, `gcp_service_account_not_found`, `gcp_forbidden`, `gcp_not_found`, `resource_usage_blocked`, `data_access_config_changed`, `access_token_failure`, `invalid_instance_placement_protocol`, `budget_policy_resolution_failure`, `in_penalty_box`, `disaster_recovery_replication`, `bootstrap_timeout_due_to_misconfig`, `instance_unreachable_due_to_misconfig`, `storage_download_failure_due_to_misconfig`, `control_plane_request_failure_due_to_misconfig`, `cloud_provider_launch_failure_due_to_misconfig`, `gcp_subnet_not_ready`, `cloud_operation_cancelled`, `cloud_provider_instance_not_launched`, `gcp_trusted_image_projects_violated`, `budget_policy_limit_enforcement_activated`, `eos_spark_image`, `no_matched_k8s`, `lazy_allocation_timeout`, `driver_node_unreachable`, `secret_creation_failure`, `pod_scheduling_failure`, `pod_assignment_failure`, `allocation_timeout`, `allocation_timeout_no_unallocated_clusters`, `allocation_timeout_no_matched_clusters`, `allocation_timeout_no_ready_clusters`, `allocation_timeout_no_warmed_up_clusters`, `allocation_timeout_node_daemon_not_ready`, `allocation_timeout_no_healthy_clusters`, `netvisor_setup_timeout`, `no_matched_k8s_testing_tag`, `cloud_provider_resource_stockout_due_to_misconfig`, `gke_based_cluster_termination`, `allocation_timeout_no_healthy_and_warmed_up_clusters`, `docker_invalid_os_exception`, `docker_container_creation_exception`, `docker_image_too_large_for_instance_exception`, `dns_resolution_error`, `gcp_denied_by_org_policy`, `secret_permission_denied`, `network_check_nic_failure`, `network_check_dns_server_failure`, `network_check_storage_failure`, `network_check_metadata_endpoint_failure`, `network_check_control_plane_failure`, `network_check_multiple_components_failure`, `driver_unhealthy`, `security_agents_failed_initial_verification`, `driver_dns_resolution_failure`, `no_activated_k8s`, `usage_policy_entitlement_denied`, `no_activated_k8s_testing_tag`, `k8s_active_pod_quota_exceeded` and `cloud_account_pod_quota_exceeded` enum values for `databricks.sdk.service.sql.TerminationReasonCode`.
197
+ * [Breaking] Change `create()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsCreateMetastoreAssignmentResponse` dataclass.
198
+ * [Breaking] Change `delete()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsDeleteMetastoreAssignmentResponse` dataclass.
199
+ * [Breaking] Change `update()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastore_assignments.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsUpdateMetastoreAssignmentResponse` dataclass.
200
+ * [Breaking] Change `create()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsCreateMetastoreResponse` dataclass.
201
+ * [Breaking] Change `delete()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsDeleteMetastoreResponse` dataclass.
202
+ * [Breaking] Change `get()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsGetMetastoreResponse` dataclass.
203
+ * [Breaking] Change `list()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsListMetastoresResponse` dataclass.
204
+ * [Breaking] Change `update()` method for [a.account_metastores](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_metastores.html) account-level service to return `databricks.sdk.service.catalog.AccountsUpdateMetastoreResponse` dataclass.
205
+ * [Breaking] Change `create()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to return `databricks.sdk.service.catalog.AccountsCreateStorageCredentialInfo` dataclass.
206
+ * [Breaking] Change `delete()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to start returning `databricks.sdk.service.catalog.AccountsDeleteStorageCredentialResponse` dataclass.
207
+ * [Breaking] Change `update()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/catalog/account_storage_credentials.html) account-level service to return `databricks.sdk.service.catalog.AccountsUpdateStorageCredentialResponse` dataclass.
208
+ * [Breaking] Change `create()` method for [w.registered_models](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/registered_models.html) workspace-level service with new required argument order.
209
+ * [Breaking] Change `delete()` method for [a.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/credentials.html) account-level service to start returning `databricks.sdk.service.provisioning.Credential` dataclass.
210
+ * [Breaking] Change `delete()` method for [a.encryption_keys](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/encryption_keys.html) account-level service to start returning `databricks.sdk.service.provisioning.CustomerManagedKey` dataclass.
211
+ * [Breaking] Change `create()` method for [a.networks](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/networks.html) account-level service with new required argument order.
212
+ * [Breaking] Change `delete()` method for [a.networks](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/networks.html) account-level service to start returning `databricks.sdk.service.provisioning.Network` dataclass.
213
+ * [Breaking] Change `create()` and `replace()` methods for [a.private_access](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/private_access.html) account-level service with new required argument order.
214
+ * [Breaking] Change `delete()` and `replace()` methods for [a.private_access](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/private_access.html) account-level service to start returning `databricks.sdk.service.provisioning.PrivateAccessSettings` dataclass.
215
+ * [Breaking] Change `delete()` method for [a.storage](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/storage.html) account-level service to start returning `databricks.sdk.service.provisioning.StorageConfiguration` dataclass.
216
+ * [Breaking] Change `create()` method for [a.vpc_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/vpc_endpoints.html) account-level service with new required argument order.
217
+ * [Breaking] Change `delete()` method for [a.vpc_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/vpc_endpoints.html) account-level service to start returning `databricks.sdk.service.provisioning.VpcEndpoint` dataclass.
218
+ * [Breaking] Change `create()` and `update()` methods for [a.workspaces](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html) account-level service with new required argument order.
219
+ * [Breaking] Change `delete()` and `update()` methods for [a.workspaces](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html) account-level service to start returning `databricks.sdk.service.provisioning.Workspace` dataclass.
220
+ * [Breaking] Change `execute_statement()` method for [w.statement_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/statement_execution.html) workspace-level service . Method path has changed.
221
+ * [Breaking] Change `metastore_info` field for `databricks.sdk.service.catalog.AccountsCreateMetastore` to type `databricks.sdk.service.catalog.CreateAccountsMetastore` dataclass.
222
+ * [Breaking] Change `credential_info` field for `databricks.sdk.service.catalog.AccountsCreateStorageCredential` to type `databricks.sdk.service.catalog.CreateAccountsStorageCredential` dataclass.
223
+ * [Breaking] Change `metastore_info` field for `databricks.sdk.service.catalog.AccountsUpdateMetastore` to type `databricks.sdk.service.catalog.UpdateAccountsMetastore` dataclass.
224
+ * [Breaking] Change `credential_info` field for `databricks.sdk.service.catalog.AccountsUpdateStorageCredential` to type `databricks.sdk.service.catalog.UpdateAccountsStorageCredential` dataclass.
225
+ * Change `catalog_name`, `name` and `schema_name` fields for `databricks.sdk.service.catalog.CreateRegisteredModelRequest` to no longer be required.
226
+ * [Breaking] Change `name` field for `databricks.sdk.service.database.DatabaseInstanceRole` to be required.
227
+ * Change `name` field for `databricks.sdk.service.database.DatabaseInstanceRole` to be required.
228
+ * Change `network_name` field for `databricks.sdk.service.provisioning.CreateNetworkRequest` to no longer be required.
229
+ * Change `private_access_settings_name` and `region` fields for `databricks.sdk.service.provisioning.CreatePrivateAccessSettingsRequest` to no longer be required.
230
+ * Change `vpc_endpoint_name` field for `databricks.sdk.service.provisioning.CreateVpcEndpointRequest` to no longer be required.
231
+ * Change `workspace_name` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest` to no longer be required.
232
+ * Change `dataplane_relay` and `rest_api` fields for `databricks.sdk.service.provisioning.NetworkVpcEndpoints` to no longer be required.
233
+ * [Breaking] Change `dataplane_relay` and `rest_api` fields for `databricks.sdk.service.provisioning.NetworkVpcEndpoints` to no longer be required.
234
+ * [Breaking] Change waiter for [WorkspacesAPI.update](https://databricks-sdk-py.readthedocs.io/en/latest/account/provisioning/workspaces.html#databricks.sdk.service.provisioning.WorkspacesAPI.update) method.
235
+ * [Breaking] Remove `browse_only` field for `databricks.sdk.service.catalog.ModelVersionInfo`.
236
+ * [Breaking] Remove `jar_dependencies` field for `databricks.sdk.service.compute.Environment`.
237
+ * [Breaking] Remove `is_no_public_ip_enabled` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest`.
238
+ * [Breaking] Remove `allowed_vpc_endpoint_ids`, `private_access_level`, `private_access_settings_name`, `public_access_enabled` and `region` fields for `databricks.sdk.service.provisioning.ReplacePrivateAccessSettingsRequest`.
239
+ * [Breaking] Remove `external_id` field for `databricks.sdk.service.provisioning.StsRole`.
240
+ * [Breaking] Remove `aws_region`, `credentials_id`, `custom_tags`, `managed_services_customer_managed_key_id`, `network_connectivity_config_id`, `network_id`, `private_access_settings_id`, `storage_configuration_id` and `storage_customer_managed_key_id` fields for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
241
+ * [Breaking] Remove `external_customer_info` and `is_no_public_ip_enabled` fields for `databricks.sdk.service.provisioning.Workspace`.
242
+ * [Breaking] Remove `status_unspecified` enum value for `databricks.sdk.service.sql.Status`.
243
+ * Add `compute_mode` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest`.
244
+ * Add `expected_workspace_status` field for `databricks.sdk.service.provisioning.Workspace`.
245
+ * Add `dependency_storage_locations` field for `databricks.sdk.service.sharing.TableInternalAttributes`.
246
+ * Add `git_email` field for `databricks.sdk.service.workspace.CreateCredentialsRequest`.
247
+ * Add `git_email` field for `databricks.sdk.service.workspace.CreateCredentialsResponse`.
248
+ * Add `git_email` field for `databricks.sdk.service.workspace.CredentialInfo`.
249
+ * Add `git_email` field for `databricks.sdk.service.workspace.GetCredentialsResponse`.
250
+ * Add `git_email` field for `databricks.sdk.service.workspace.UpdateCredentialsRequest`.
251
+ * Add `germany_tisax` enum value for `databricks.sdk.service.settings.ComplianceStandard`.
252
+ * [Breaking] Remove `prediction_probability_column` field for `databricks.sdk.service.dataquality.InferenceLogConfig`.
253
+
254
+
3
255
  ## Release v0.67.0
4
256
 
5
257
  ### API Changes
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-sdk
3
- Version: 0.67.0
3
+ Version: 0.68.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Project-URL: Documentation, https://databricks-sdk-py.readthedocs.io
6
6
  Keywords: databricks,sdk
@@ -23,6 +23,7 @@ License-File: LICENSE
23
23
  License-File: NOTICE
24
24
  Requires-Dist: requests<3,>=2.28.1
25
25
  Requires-Dist: google-auth~=2.0
26
+ Requires-Dist: protobuf<7.0,>=4.21.0
26
27
  Provides-Extra: dev
27
28
  Requires-Dist: pytest; extra == "dev"
28
29
  Requires-Dist: pytest-cov; extra == "dev"
@@ -180,10 +181,11 @@ Depending on the Databricks authentication method, the SDK uses the following in
180
181
 
181
182
  ### Databricks native authentication
182
183
 
183
- By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Databricks Workload Identity Federation (WIF) authentication using OIDC (`auth_type="github-oidc"` argument).
184
+ By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Workload Identity Federation (WIF). See [Supported WIF](https://docs.databricks.com/aws/en/dev-tools/auth/oauth-federation-provider) for the supported JWT token providers.
184
185
 
185
186
  - For Databricks token authentication, you must provide `host` and `token`; or their environment variable or `.databrickscfg` file field equivalents.
186
187
  - For Databricks OIDC authentication, you must provide the `host`, `client_id` and `token_audience` _(optional)_ either directly, through the corresponding environment variables, or in your `.databrickscfg` configuration file.
188
+ - For Azure DevOps OIDC authentication, the `token_audience` is irrelevant as the audience is always set to `api://AzureADTokenExchange`. Also, the `System.AccessToken` pipeline variable required for OIDC request must be exposed as the `SYSTEM_ACCESSTOKEN` environment variable, following [Pipeline variables](https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#systemaccesstoken)
187
189
 
188
190
  | Argument | Description | Environment variable |
189
191
  |------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------|
@@ -126,10 +126,11 @@ Depending on the Databricks authentication method, the SDK uses the following in
126
126
 
127
127
  ### Databricks native authentication
128
128
 
129
- By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Databricks Workload Identity Federation (WIF) authentication using OIDC (`auth_type="github-oidc"` argument).
129
+ By default, the Databricks SDK for Python initially tries [Databricks token authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) (`auth_type='pat'` argument). If the SDK is unsuccessful, it then tries Workload Identity Federation (WIF). See [Supported WIF](https://docs.databricks.com/aws/en/dev-tools/auth/oauth-federation-provider) for the supported JWT token providers.
130
130
 
131
131
  - For Databricks token authentication, you must provide `host` and `token`; or their environment variable or `.databrickscfg` file field equivalents.
132
132
  - For Databricks OIDC authentication, you must provide the `host`, `client_id` and `token_audience` _(optional)_ either directly, through the corresponding environment variables, or in your `.databrickscfg` configuration file.
133
+ - For Azure DevOps OIDC authentication, the `token_audience` is irrelevant as the audience is always set to `api://AzureADTokenExchange`. Also, the `System.AccessToken` pipeline variable required for OIDC request must be exposed as the `SYSTEM_ACCESSTOKEN` environment variable, following [Pipeline variables](https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#systemaccesstoken)
133
134
 
134
135
  | Argument | Description | Environment variable |
135
136
  |------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------|
@@ -21,6 +21,7 @@ from databricks.sdk.service import cleanrooms as pkg_cleanrooms
21
21
  from databricks.sdk.service import compute as pkg_compute
22
22
  from databricks.sdk.service import dashboards as pkg_dashboards
23
23
  from databricks.sdk.service import database as pkg_database
24
+ from databricks.sdk.service import dataquality as pkg_dataquality
24
25
  from databricks.sdk.service import files as pkg_files
25
26
  from databricks.sdk.service import iam as pkg_iam
26
27
  from databricks.sdk.service import iamv2 as pkg_iamv2
@@ -79,6 +80,7 @@ from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
79
80
  from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI,
80
81
  LakeviewEmbeddedAPI)
81
82
  from databricks.sdk.service.database import DatabaseAPI
83
+ from databricks.sdk.service.dataquality import DataQualityAPI
82
84
  from databricks.sdk.service.files import DbfsAPI, FilesAPI
83
85
  from databricks.sdk.service.iam import (AccessControlAPI,
84
86
  AccountAccessControlAPI,
@@ -282,6 +284,7 @@ class WorkspaceClient:
282
284
  self._current_user = pkg_iam.CurrentUserAPI(self._api_client)
283
285
  self._dashboard_widgets = pkg_sql.DashboardWidgetsAPI(self._api_client)
284
286
  self._dashboards = pkg_sql.DashboardsAPI(self._api_client)
287
+ self._data_quality = pkg_dataquality.DataQualityAPI(self._api_client)
285
288
  self._data_sources = pkg_sql.DataSourcesAPI(self._api_client)
286
289
  self._database = pkg_database.DatabaseAPI(self._api_client)
287
290
  self._dbfs = DbfsExt(self._api_client)
@@ -540,6 +543,11 @@ class WorkspaceClient:
540
543
  """In general, there is little need to modify dashboards using the API."""
541
544
  return self._dashboards
542
545
 
546
+ @property
547
+ def data_quality(self) -> pkg_dataquality.DataQualityAPI:
548
+ """Manage the data quality of Unity Catalog objects (currently support `schema` and `table`)."""
549
+ return self._data_quality
550
+
543
551
  @property
544
552
  def data_sources(self) -> pkg_sql.DataSourcesAPI:
545
553
  """This API is provided to assist you in making new query objects."""
@@ -99,7 +99,10 @@ class _BaseClient:
99
99
  # Default to 60 seconds
100
100
  self._http_timeout_seconds = http_timeout_seconds or 60
101
101
 
102
- self._error_parser = _Parser(extra_error_customizers=extra_error_customizers)
102
+ self._error_parser = _Parser(
103
+ extra_error_customizers=extra_error_customizers,
104
+ debug_headers=debug_headers,
105
+ )
103
106
 
104
107
  def _authenticate(self, r: requests.PreparedRequest) -> requests.PreparedRequest:
105
108
  if self._header_factory:
@@ -0,0 +1,17 @@
1
+ from datetime import timedelta
2
+ from typing import Optional
3
+
4
+
5
+ class LroOptions:
6
+ """LroOptions is the options for the Long Running Operations.
7
+ DO NOT USE THIS OPTION. This option is still under development
8
+ and can be updated in the future without notice.
9
+ """
10
+
11
+ def __init__(self, *, timeout: Optional[timedelta] = None):
12
+ """
13
+ Args:
14
+ timeout: The timeout for the Long Running Operations.
15
+ If not set, the default timeout is 20 minutes.
16
+ """
17
+ self.timeout = timeout or timedelta(minutes=20)
@@ -0,0 +1,39 @@
1
+ class FieldMask(object):
2
+ """Class for FieldMask message type."""
3
+
4
+ # This is based on the base implementation from protobuf.
5
+ # https://pigweed.googlesource.com/third_party/github/protocolbuffers/protobuf/+/HEAD/python/google/protobuf/internal/field_mask.py
6
+ # The original implementation only works with proto generated classes.
7
+ # Since our classes are not generated from proto files, we need to implement it manually.
8
+
9
+ def __init__(self, field_mask=None):
10
+ """Initializes the FieldMask."""
11
+ if field_mask:
12
+ self.paths = field_mask
13
+
14
+ def ToJsonString(self) -> str:
15
+ """Converts FieldMask to string."""
16
+ return ",".join(self.paths)
17
+
18
+ def FromJsonString(self, value: str) -> None:
19
+ """Converts string to FieldMask."""
20
+ if not isinstance(value, str):
21
+ raise ValueError("FieldMask JSON value not a string: {!r}".format(value))
22
+ if value:
23
+ self.paths = value.split(",")
24
+ else:
25
+ self.paths = []
26
+
27
+ def __eq__(self, other) -> bool:
28
+ """Check equality based on paths."""
29
+ if not isinstance(other, FieldMask):
30
+ return False
31
+ return self.paths == other.paths
32
+
33
+ def __hash__(self) -> int:
34
+ """Hash based on paths tuple."""
35
+ return hash(tuple(self.paths))
36
+
37
+ def __repr__(self) -> str:
38
+ """String representation for debugging."""
39
+ return f"FieldMask(paths={self.paths})"
@@ -12,7 +12,7 @@ import sys
12
12
  import threading
13
13
  import time
14
14
  from datetime import datetime
15
- from typing import Callable, Dict, List, Optional, Tuple, Union
15
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Union
16
16
 
17
17
  import google.auth # type: ignore
18
18
  import requests
@@ -89,7 +89,6 @@ def credentials_strategy(name: str, require: List[str]):
89
89
  @functools.wraps(func)
90
90
  def wrapper(cfg: "Config") -> Optional[CredentialsProvider]:
91
91
  for attr in require:
92
- getattr(cfg, attr)
93
92
  if not getattr(cfg, attr):
94
93
  return None
95
94
  return func(cfg)
@@ -103,7 +102,12 @@ def credentials_strategy(name: str, require: List[str]):
103
102
  def oauth_credentials_strategy(name: str, require: List[str]):
104
103
  """Given the function that receives a Config and returns an OauthHeaderFactory,
105
104
  create an OauthCredentialsProvider with a given name and required configuration
106
- attribute names to be present for this function to be called."""
105
+ attribute names to be present for this function to be called.
106
+
107
+ Args:
108
+ name: The name of the authentication strategy
109
+ require: List of config attributes that must be present
110
+ """
107
111
 
108
112
  def inner(
109
113
  func: Callable[["Config"], OAuthCredentialsProvider],
@@ -356,33 +360,47 @@ def oidc_credentials_provider(cfg, id_token_source: oidc.IdTokenSource) -> Optio
356
360
  return OAuthCredentialsProvider(refreshed_headers, token)
357
361
 
358
362
 
359
- @oauth_credentials_strategy("github-oidc", ["host", "client_id"])
360
- def github_oidc(cfg: "Config") -> Optional[CredentialsProvider]:
363
+ def _oidc_credentials_provider(
364
+ cfg: "Config", supplier_factory: Callable[[], Any], provider_name: str
365
+ ) -> Optional[CredentialsProvider]:
361
366
  """
362
- DatabricksWIFCredentials uses a Token Supplier to get a JWT Token and exchanges
363
- it for a Databricks Token.
367
+ Generic OIDC credentials provider that works with any OIDC token supplier.
368
+
369
+ Args:
370
+ cfg: Databricks configuration
371
+ supplier_factory: Callable that returns an OIDC token supplier instance
372
+ provider_name: Human-readable name (e.g., "GitHub OIDC", "Azure DevOps OIDC")
364
373
 
365
- Supported suppliers:
366
- - GitHub OIDC
374
+ Returns:
375
+ OAuthCredentialsProvider if successful, None if supplier unavailable or token retrieval fails
367
376
  """
368
- supplier = oidc_token_supplier.GitHubOIDCTokenSupplier()
377
+ # Try to create the supplier
378
+ try:
379
+ supplier = supplier_factory()
380
+ except Exception as e:
381
+ logger.debug(f"{provider_name}: {str(e)}")
382
+ return None
369
383
 
384
+ # Determine the audience for token exchange
370
385
  audience = cfg.token_audience
371
386
  if audience is None and cfg.is_account_client:
372
387
  audience = cfg.account_id
373
388
  if audience is None and not cfg.is_account_client:
374
389
  audience = cfg.oidc_endpoints.token_endpoint
375
390
 
376
- # Try to get an idToken. If no supplier returns a token, we cannot use this authentication mode.
391
+ # Try to get an OIDC token. If no supplier returns a token, we cannot use this authentication mode.
377
392
  id_token = supplier.get_oidc_token(audience)
378
393
  if not id_token:
394
+ logger.debug(f"{provider_name}: no token available, skipping authentication method")
379
395
  return None
380
396
 
397
+ logger.info(f"Configured {provider_name} authentication")
398
+
381
399
  def token_source_for(audience: str) -> oauth.TokenSource:
382
400
  id_token = supplier.get_oidc_token(audience)
383
401
  if not id_token:
384
402
  # Should not happen, since we checked it above.
385
- raise Exception("Cannot get OIDC token")
403
+ raise Exception(f"Cannot get {provider_name} token")
386
404
 
387
405
  return oauth.ClientCredentials(
388
406
  client_id=cfg.client_id,
@@ -408,6 +426,36 @@ def github_oidc(cfg: "Config") -> Optional[CredentialsProvider]:
408
426
  return OAuthCredentialsProvider(refreshed_headers, token)
409
427
 
410
428
 
429
+ @oauth_credentials_strategy("github-oidc", ["host", "client_id"])
430
+ def github_oidc(cfg: "Config") -> Optional[CredentialsProvider]:
431
+ """
432
+ GitHub OIDC authentication uses a Token Supplier to get a JWT Token and exchanges
433
+ it for a Databricks Token.
434
+
435
+ Supported in GitHub Actions with OIDC service connections.
436
+ """
437
+ return _oidc_credentials_provider(
438
+ cfg=cfg,
439
+ supplier_factory=lambda: oidc_token_supplier.GitHubOIDCTokenSupplier(),
440
+ provider_name="GitHub OIDC",
441
+ )
442
+
443
+
444
+ @oauth_credentials_strategy("azure-devops-oidc", ["host", "client_id"])
445
+ def azure_devops_oidc(cfg: "Config") -> Optional[CredentialsProvider]:
446
+ """
447
+ Azure DevOps OIDC authentication uses a Token Supplier to get a JWT Token
448
+ and exchanges it for a Databricks Token.
449
+
450
+ Supported in Azure DevOps pipelines with OIDC service connections.
451
+ """
452
+ return _oidc_credentials_provider(
453
+ cfg=cfg,
454
+ supplier_factory=lambda: oidc_token_supplier.AzureDevOpsOIDCTokenSupplier(),
455
+ provider_name="Azure DevOps OIDC",
456
+ )
457
+
458
+
411
459
  @oauth_credentials_strategy("github-oidc-azure", ["host", "azure_client_id"])
412
460
  def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
413
461
  if "ACTIONS_ID_TOKEN_REQUEST_TOKEN" not in os.environ:
@@ -1019,6 +1067,7 @@ class DefaultCredentials:
1019
1067
  azure_service_principal,
1020
1068
  github_oidc_azure,
1021
1069
  azure_cli,
1070
+ azure_devops_oidc,
1022
1071
  external_browser,
1023
1072
  databricks_cli,
1024
1073
  runtime_native_auth,
@@ -210,7 +210,11 @@ class _JobsUtil:
210
210
  class RemoteDbUtils:
211
211
 
212
212
  def __init__(self, config: "Config" = None):
213
- self._config = Config() if not config else config
213
+ # Create a shallow copy of the config to allow the use of a custom
214
+ # user-agent while avoiding modifying the original config.
215
+ self._config = Config() if not config else config.copy()
216
+ self._config.with_user_agent_extra("dbutils", "remote")
217
+
214
218
  self._client = ApiClient(self._config)
215
219
  self._clusters = compute_ext.ClustersExt(self._client)
216
220
  self._commands = compute.CommandExecutionAPI(self._client)