databricks-sdk 0.48.0__tar.gz → 0.50.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/CHANGELOG.md +67 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/PKG-INFO +1 -1
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/__init__.py +5 -3
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/apps.py +29 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/billing.py +11 -1
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/catalog.py +26 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/compute.py +396 -182
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/dashboards.py +292 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/iam.py +12 -29
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/jobs.py +539 -74
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/marketplace.py +2 -3
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/ml.py +420 -109
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/oauth2.py +12 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/pipelines.py +100 -60
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/serving.py +210 -12
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/settings.py +476 -4
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/sharing.py +71 -71
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/sql.py +138 -0
- databricks_sdk-0.50.0/databricks/sdk/version.py +1 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks_sdk.egg-info/PKG-INFO +1 -1
- databricks_sdk-0.48.0/databricks/sdk/version.py +0 -1
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/CONTRIBUTING.md +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/DCO +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/LICENSE +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/MANIFEST.in +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/Makefile +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/NOTICE +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/README.md +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/SECURITY.md +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/__init__.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/_base_client.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/_property.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/_widgets/__init__.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/azure.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/casing.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/clock.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/config.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/core.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/credentials_provider.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/data_plane.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/dbutils.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/environments.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/errors/__init__.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/errors/base.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/errors/customizer.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/errors/deserializer.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/errors/details.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/errors/mapper.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/errors/overrides.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/errors/parser.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/errors/platform.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/errors/private_link.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/errors/sdk.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/logger/__init__.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/mixins/__init__.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/mixins/compute.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/mixins/files.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/mixins/jobs.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/mixins/open_ai_client.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/mixins/workspace.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/oauth.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/py.typed +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/retries.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/runtime/__init__.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/__init__.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/_internal.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/cleanrooms.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/files.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/provisioning.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/vectorsearch.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/service/workspace.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks/sdk/useragent.py +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks_sdk.egg-info/SOURCES.txt +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks_sdk.egg-info/requires.txt +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/databricks_sdk.egg-info/top_level.txt +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/pyproject.toml +0 -0
- {databricks_sdk-0.48.0 → databricks_sdk-0.50.0}/setup.cfg +0 -0
|
@@ -1,5 +1,72 @@
|
|
|
1
1
|
# Version changelog
|
|
2
2
|
|
|
3
|
+
## Release v0.50.0
|
|
4
|
+
|
|
5
|
+
### API Changes
|
|
6
|
+
* Added [w.enable_export_notebook](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/settings/enable_export_notebook.html) workspace-level service, [w.enable_notebook_table_clipboard](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/settings/enable_notebook_table_clipboard.html) workspace-level service and [w.enable_results_downloading](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/settings/enable_results_downloading.html) workspace-level service.
|
|
7
|
+
* Added `get_credentials_for_trace_data_download()` and `get_credentials_for_trace_data_upload()` methods for [w.experiments](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/experiments.html) workspace-level service.
|
|
8
|
+
* Added `get_download_full_query_result()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html) workspace-level service.
|
|
9
|
+
* Added `get_published_dashboard_token_info()` method for [w.lakeview_embedded](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/lakeview_embedded.html) workspace-level service.
|
|
10
|
+
* Added `binding_workspace_ids` field for `databricks.sdk.service.billing.BudgetPolicy`.
|
|
11
|
+
* Added `download_id` field for `databricks.sdk.service.dashboards.GenieGenerateDownloadFullQueryResultResponse`.
|
|
12
|
+
* Added `dashboard_output` field for `databricks.sdk.service.jobs.RunOutput`.
|
|
13
|
+
* Added `dashboard_task` and `power_bi_task` fields for `databricks.sdk.service.jobs.RunTask`.
|
|
14
|
+
* Added `dashboard_task` and `power_bi_task` fields for `databricks.sdk.service.jobs.SubmitTask`.
|
|
15
|
+
* Added `dashboard_task` and `power_bi_task` fields for `databricks.sdk.service.jobs.Task`.
|
|
16
|
+
* Added `include_features` field for `databricks.sdk.service.ml.CreateForecastingExperimentRequest`.
|
|
17
|
+
* Added `models` field for `databricks.sdk.service.ml.LogInputs`.
|
|
18
|
+
* Added `dataset_digest`, `dataset_name` and `model_id` fields for `databricks.sdk.service.ml.LogMetric`.
|
|
19
|
+
* Added `dataset_digest`, `dataset_name`, `model_id` and `run_id` fields for `databricks.sdk.service.ml.Metric`.
|
|
20
|
+
* Added `model_inputs` field for `databricks.sdk.service.ml.RunInputs`.
|
|
21
|
+
* Added `client_application` field for `databricks.sdk.service.sql.QueryInfo`.
|
|
22
|
+
* Added `geography` and `geometry` enum values for `databricks.sdk.service.catalog.ColumnTypeName`.
|
|
23
|
+
* Added `allocation_timeout_no_healthy_and_warmed_up_clusters`, `docker_container_creation_exception`, `docker_image_too_large_for_instance_exception` and `docker_invalid_os_exception` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
|
|
24
|
+
* Added `standard` enum value for `databricks.sdk.service.jobs.PerformanceTarget`.
|
|
25
|
+
* Added `can_view` enum value for `databricks.sdk.service.sql.WarehousePermissionLevel`.
|
|
26
|
+
* [Breaking] Changed `generate_download_full_query_result()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html) workspace-level service . Method path has changed.
|
|
27
|
+
* [Breaking] Changed waiter for [CommandExecutionAPI.create](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/compute/command_execution.html#databricks.sdk.service.compute.CommandExecutionAPI.create) method.
|
|
28
|
+
* [Breaking] Changed waiter for [CommandExecutionAPI.execute](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/compute/command_execution.html#databricks.sdk.service.compute.CommandExecutionAPI.execute) method.
|
|
29
|
+
* [Breaking] Removed `error`, `status` and `transient_statement_id` fields for `databricks.sdk.service.dashboards.GenieGenerateDownloadFullQueryResultResponse`.
|
|
30
|
+
* [Breaking] Removed `balanced` and `cost_optimized` enum values for `databricks.sdk.service.jobs.PerformanceTarget`.
|
|
31
|
+
* [Breaking] Removed [PipelinesAPI.wait_get_pipeline_running](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/pipelines/pipelines.html#databricks.sdk.service.pipelines.PipelinesAPI.wait_get_pipeline_running) method.
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
## Release v0.49.0
|
|
35
|
+
|
|
36
|
+
### API Changes
|
|
37
|
+
* Added `generate_download_full_query_result()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html) workspace-level service.
|
|
38
|
+
* Added `effective_user_api_scopes`, `oauth2_app_client_id`, `oauth2_app_integration_id` and `user_api_scopes` fields for `databricks.sdk.service.apps.App`.
|
|
39
|
+
* Added `abfss`, `dbfs`, `error_message`, `execution_duration_seconds`, `file`, `gcs`, `s3`, `status`, `volumes` and `workspace` fields for `databricks.sdk.service.compute.InitScriptInfoAndExecutionDetails`.
|
|
40
|
+
* [Breaking] Added `forecast_granularity` field for `databricks.sdk.service.ml.CreateForecastingExperimentRequest`.
|
|
41
|
+
* Added `jwks_uri` field for `databricks.sdk.service.oauth2.OidcFederationPolicy`.
|
|
42
|
+
* Added `event_log` field for `databricks.sdk.service.pipelines.CreatePipeline`.
|
|
43
|
+
* Added `event_log` field for `databricks.sdk.service.pipelines.EditPipeline`.
|
|
44
|
+
* Added `event_log` field for `databricks.sdk.service.pipelines.PipelineSpec`.
|
|
45
|
+
* Added `fallback_config` field for `databricks.sdk.service.serving.AiGatewayConfig`.
|
|
46
|
+
* Added `custom_provider_config` field for `databricks.sdk.service.serving.ExternalModel`.
|
|
47
|
+
* Added `fallback_config` field for `databricks.sdk.service.serving.PutAiGatewayRequest`.
|
|
48
|
+
* Added `fallback_config` field for `databricks.sdk.service.serving.PutAiGatewayResponse`.
|
|
49
|
+
* Added `aliases`, `comment`, `data_type`, `dependency_list`, `full_data_type`, `id`, `input_params`, `name`, `properties`, `routine_definition`, `schema`, `securable_kind`, `share`, `share_id`, `storage_location` and `tags` fields for `databricks.sdk.service.sharing.DeltaSharingFunction`.
|
|
50
|
+
* Added `query_source` field for `databricks.sdk.service.sql.QueryInfo`.
|
|
51
|
+
* Added `foreign_catalog` enum value for `databricks.sdk.service.catalog.CatalogType`.
|
|
52
|
+
* Added `browse` enum value for `databricks.sdk.service.catalog.Privilege`.
|
|
53
|
+
* Added `access_token_failure`, `allocation_timeout`, `allocation_timeout_node_daemon_not_ready`, `allocation_timeout_no_healthy_clusters`, `allocation_timeout_no_matched_clusters`, `allocation_timeout_no_ready_clusters`, `allocation_timeout_no_unallocated_clusters`, `allocation_timeout_no_warmed_up_clusters`, `aws_inaccessible_kms_key_failure`, `aws_instance_profile_update_failure`, `aws_invalid_key_pair`, `aws_invalid_kms_key_state`, `aws_resource_quota_exceeded`, `azure_packed_deployment_partial_failure`, `bootstrap_timeout_due_to_misconfig`, `budget_policy_limit_enforcement_activated`, `budget_policy_resolution_failure`, `cloud_account_setup_failure`, `cloud_operation_cancelled`, `cloud_provider_instance_not_launched`, `cloud_provider_launch_failure_due_to_misconfig`, `cloud_provider_resource_stockout_due_to_misconfig`, `cluster_operation_throttled`, `cluster_operation_timeout`, `control_plane_request_failure_due_to_misconfig`, `data_access_config_changed`, `disaster_recovery_replication`, `driver_eviction`, `driver_launch_timeout`, `driver_node_unreachable`, `driver_out_of_disk`, `driver_out_of_memory`, `driver_pod_creation_failure`, `driver_unexpected_failure`, `dynamic_spark_conf_size_exceeded`, `eos_spark_image`, `executor_pod_unscheduled`, `gcp_api_rate_quota_exceeded`, `gcp_forbidden`, `gcp_iam_timeout`, `gcp_inaccessible_kms_key_failure`, `gcp_insufficient_capacity`, `gcp_ip_space_exhausted`, `gcp_kms_key_permission_denied`, `gcp_not_found`, `gcp_resource_quota_exceeded`, `gcp_service_account_access_denied`, `gcp_service_account_not_found`, `gcp_subnet_not_ready`, `gcp_trusted_image_projects_violated`, `gke_based_cluster_termination`, `init_container_not_finished`, `instance_pool_max_capacity_reached`, `instance_pool_not_found`, `instance_unreachable_due_to_misconfig`, `internal_capacity_failure`, `invalid_aws_parameter`, `invalid_instance_placement_protocol`, `invalid_worker_image_failure`, `in_penalty_box`, `lazy_allocation_timeout`, `maintenance_mode`, `netvisor_setup_timeout`, `no_matched_k8s`, `no_matched_k8s_testing_tag`, `pod_assignment_failure`, `pod_scheduling_failure`, `resource_usage_blocked`, `secret_creation_failure`, `serverless_long_running_terminated`, `spark_image_download_throttled`, `spark_image_not_found`, `ssh_bootstrap_failure`, `storage_download_failure_due_to_misconfig`, `storage_download_failure_slow`, `storage_download_failure_throttled`, `unexpected_pod_recreation`, `user_initiated_vm_termination` and `workspace_update` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
|
|
54
|
+
* Added `generated_sql_query_too_long_exception` and `missing_sql_query_exception` enum values for `databricks.sdk.service.dashboards.MessageErrorType`.
|
|
55
|
+
* Added `balanced` enum value for `databricks.sdk.service.jobs.PerformanceTarget`.
|
|
56
|
+
* Added `listing_resource` enum value for `databricks.sdk.service.marketplace.FileParentType`.
|
|
57
|
+
* Added `app` enum value for `databricks.sdk.service.marketplace.MarketplaceFileType`.
|
|
58
|
+
* Added `custom` enum value for `databricks.sdk.service.serving.ExternalModelProvider`.
|
|
59
|
+
* Added `arclight_multi_tenant_azure_exchange_token` and `arclight_multi_tenant_azure_exchange_token_with_user_delegation_key` enum values for `databricks.sdk.service.settings.TokenType`.
|
|
60
|
+
* [Breaking] Changed `create_experiment()` method for [w.forecasting](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/forecasting.html) workspace-level service with new required argument order.
|
|
61
|
+
* Changed `instance_type_id` field for `databricks.sdk.service.compute.NodeInstanceType` to be required.
|
|
62
|
+
* Changed `category` field for `databricks.sdk.service.compute.NodeType` to be required.
|
|
63
|
+
* [Breaking] Changed `functions` field for `databricks.sdk.service.sharing.ListProviderShareAssetsResponse` to type `databricks.sdk.service.sharing.DeltaSharingFunctionList` dataclass.
|
|
64
|
+
* [Breaking] Removed `execution_details` and `script` fields for `databricks.sdk.service.compute.InitScriptInfoAndExecutionDetails`.
|
|
65
|
+
* [Breaking] Removed `supports_elastic_disk` field for `databricks.sdk.service.compute.NodeType`.
|
|
66
|
+
* [Breaking] Removed `data_granularity_quantity` and `data_granularity_unit` fields for `databricks.sdk.service.ml.CreateForecastingExperimentRequest`.
|
|
67
|
+
* [Breaking] Removed `aliases`, `comment`, `data_type`, `dependency_list`, `full_data_type`, `id`, `input_params`, `name`, `properties`, `routine_definition`, `schema`, `securable_kind`, `share`, `share_id`, `storage_location` and `tags` fields for `databricks.sdk.service.sharing.Function`.
|
|
68
|
+
|
|
69
|
+
|
|
3
70
|
## Release v0.48.0
|
|
4
71
|
|
|
5
72
|
### API Changes
|
|
@@ -86,9 +86,11 @@ from databricks.sdk.service.settings import (
|
|
|
86
86
|
AibiDashboardEmbeddingApprovedDomainsAPI, AutomaticClusterUpdateAPI,
|
|
87
87
|
ComplianceSecurityProfileAPI, CredentialsManagerAPI,
|
|
88
88
|
CspEnablementAccountAPI, DefaultNamespaceAPI, DisableLegacyAccessAPI,
|
|
89
|
-
DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI,
|
|
90
|
-
|
|
91
|
-
|
|
89
|
+
DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI, EnableExportNotebookAPI,
|
|
90
|
+
EnableIpAccessListsAPI, EnableNotebookTableClipboardAPI,
|
|
91
|
+
EnableResultsDownloadingAPI, EnhancedSecurityMonitoringAPI,
|
|
92
|
+
EsmEnablementAccountAPI, IpAccessListsAPI, NetworkConnectivityAPI,
|
|
93
|
+
NotificationDestinationsAPI, PersonalComputeAPI,
|
|
92
94
|
RestrictWorkspaceAdminsAPI, SettingsAPI, TokenManagementAPI, TokensAPI,
|
|
93
95
|
WorkspaceConfAPI)
|
|
94
96
|
from databricks.sdk.service.sharing import (ProvidersAPI,
|
|
@@ -50,9 +50,16 @@ class App:
|
|
|
50
50
|
|
|
51
51
|
effective_budget_policy_id: Optional[str] = None
|
|
52
52
|
|
|
53
|
+
effective_user_api_scopes: Optional[List[str]] = None
|
|
54
|
+
"""The effective api scopes granted to the user access token."""
|
|
55
|
+
|
|
53
56
|
id: Optional[str] = None
|
|
54
57
|
"""The unique identifier of the app."""
|
|
55
58
|
|
|
59
|
+
oauth2_app_client_id: Optional[str] = None
|
|
60
|
+
|
|
61
|
+
oauth2_app_integration_id: Optional[str] = None
|
|
62
|
+
|
|
56
63
|
pending_deployment: Optional[AppDeployment] = None
|
|
57
64
|
"""The pending deployment of the app. A deployment is considered pending when it is being prepared
|
|
58
65
|
for deployment to the app compute."""
|
|
@@ -75,6 +82,8 @@ class App:
|
|
|
75
82
|
url: Optional[str] = None
|
|
76
83
|
"""The URL of the app once it is deployed."""
|
|
77
84
|
|
|
85
|
+
user_api_scopes: Optional[List[str]] = None
|
|
86
|
+
|
|
78
87
|
def as_dict(self) -> dict:
|
|
79
88
|
"""Serializes the App into a dictionary suitable for use as a JSON request body."""
|
|
80
89
|
body = {}
|
|
@@ -96,10 +105,16 @@ class App:
|
|
|
96
105
|
body["description"] = self.description
|
|
97
106
|
if self.effective_budget_policy_id is not None:
|
|
98
107
|
body["effective_budget_policy_id"] = self.effective_budget_policy_id
|
|
108
|
+
if self.effective_user_api_scopes:
|
|
109
|
+
body["effective_user_api_scopes"] = [v for v in self.effective_user_api_scopes]
|
|
99
110
|
if self.id is not None:
|
|
100
111
|
body["id"] = self.id
|
|
101
112
|
if self.name is not None:
|
|
102
113
|
body["name"] = self.name
|
|
114
|
+
if self.oauth2_app_client_id is not None:
|
|
115
|
+
body["oauth2_app_client_id"] = self.oauth2_app_client_id
|
|
116
|
+
if self.oauth2_app_integration_id is not None:
|
|
117
|
+
body["oauth2_app_integration_id"] = self.oauth2_app_integration_id
|
|
103
118
|
if self.pending_deployment:
|
|
104
119
|
body["pending_deployment"] = self.pending_deployment.as_dict()
|
|
105
120
|
if self.resources:
|
|
@@ -116,6 +131,8 @@ class App:
|
|
|
116
131
|
body["updater"] = self.updater
|
|
117
132
|
if self.url is not None:
|
|
118
133
|
body["url"] = self.url
|
|
134
|
+
if self.user_api_scopes:
|
|
135
|
+
body["user_api_scopes"] = [v for v in self.user_api_scopes]
|
|
119
136
|
return body
|
|
120
137
|
|
|
121
138
|
def as_shallow_dict(self) -> dict:
|
|
@@ -139,10 +156,16 @@ class App:
|
|
|
139
156
|
body["description"] = self.description
|
|
140
157
|
if self.effective_budget_policy_id is not None:
|
|
141
158
|
body["effective_budget_policy_id"] = self.effective_budget_policy_id
|
|
159
|
+
if self.effective_user_api_scopes:
|
|
160
|
+
body["effective_user_api_scopes"] = self.effective_user_api_scopes
|
|
142
161
|
if self.id is not None:
|
|
143
162
|
body["id"] = self.id
|
|
144
163
|
if self.name is not None:
|
|
145
164
|
body["name"] = self.name
|
|
165
|
+
if self.oauth2_app_client_id is not None:
|
|
166
|
+
body["oauth2_app_client_id"] = self.oauth2_app_client_id
|
|
167
|
+
if self.oauth2_app_integration_id is not None:
|
|
168
|
+
body["oauth2_app_integration_id"] = self.oauth2_app_integration_id
|
|
146
169
|
if self.pending_deployment:
|
|
147
170
|
body["pending_deployment"] = self.pending_deployment
|
|
148
171
|
if self.resources:
|
|
@@ -159,6 +182,8 @@ class App:
|
|
|
159
182
|
body["updater"] = self.updater
|
|
160
183
|
if self.url is not None:
|
|
161
184
|
body["url"] = self.url
|
|
185
|
+
if self.user_api_scopes:
|
|
186
|
+
body["user_api_scopes"] = self.user_api_scopes
|
|
162
187
|
return body
|
|
163
188
|
|
|
164
189
|
@classmethod
|
|
@@ -174,8 +199,11 @@ class App:
|
|
|
174
199
|
default_source_code_path=d.get("default_source_code_path", None),
|
|
175
200
|
description=d.get("description", None),
|
|
176
201
|
effective_budget_policy_id=d.get("effective_budget_policy_id", None),
|
|
202
|
+
effective_user_api_scopes=d.get("effective_user_api_scopes", None),
|
|
177
203
|
id=d.get("id", None),
|
|
178
204
|
name=d.get("name", None),
|
|
205
|
+
oauth2_app_client_id=d.get("oauth2_app_client_id", None),
|
|
206
|
+
oauth2_app_integration_id=d.get("oauth2_app_integration_id", None),
|
|
179
207
|
pending_deployment=_from_dict(d, "pending_deployment", AppDeployment),
|
|
180
208
|
resources=_repeated_dict(d, "resources", AppResource),
|
|
181
209
|
service_principal_client_id=d.get("service_principal_client_id", None),
|
|
@@ -184,6 +212,7 @@ class App:
|
|
|
184
212
|
update_time=d.get("update_time", None),
|
|
185
213
|
updater=d.get("updater", None),
|
|
186
214
|
url=d.get("url", None),
|
|
215
|
+
user_api_scopes=d.get("user_api_scopes", None),
|
|
187
216
|
)
|
|
188
217
|
|
|
189
218
|
|
|
@@ -364,6 +364,10 @@ class BudgetConfigurationFilterWorkspaceIdClause:
|
|
|
364
364
|
class BudgetPolicy:
|
|
365
365
|
"""Contains the BudgetPolicy details."""
|
|
366
366
|
|
|
367
|
+
binding_workspace_ids: Optional[List[int]] = None
|
|
368
|
+
"""List of workspaces that this budget policy will be exclusively bound to. An empty binding
|
|
369
|
+
implies that this budget policy is open to any workspace in the account."""
|
|
370
|
+
|
|
367
371
|
custom_tags: Optional[List[compute.CustomPolicyTag]] = None
|
|
368
372
|
"""A list of tags defined by the customer. At most 20 entries are allowed per policy."""
|
|
369
373
|
|
|
@@ -372,11 +376,14 @@ class BudgetPolicy:
|
|
|
372
376
|
|
|
373
377
|
policy_name: Optional[str] = None
|
|
374
378
|
"""The name of the policy. - Must be unique among active policies. - Can contain only characters
|
|
375
|
-
from the ISO 8859-1 (latin1) set.
|
|
379
|
+
from the ISO 8859-1 (latin1) set. - Can't start with reserved keywords such as
|
|
380
|
+
`databricks:default-policy`."""
|
|
376
381
|
|
|
377
382
|
def as_dict(self) -> dict:
|
|
378
383
|
"""Serializes the BudgetPolicy into a dictionary suitable for use as a JSON request body."""
|
|
379
384
|
body = {}
|
|
385
|
+
if self.binding_workspace_ids:
|
|
386
|
+
body["binding_workspace_ids"] = [v for v in self.binding_workspace_ids]
|
|
380
387
|
if self.custom_tags:
|
|
381
388
|
body["custom_tags"] = [v.as_dict() for v in self.custom_tags]
|
|
382
389
|
if self.policy_id is not None:
|
|
@@ -388,6 +395,8 @@ class BudgetPolicy:
|
|
|
388
395
|
def as_shallow_dict(self) -> dict:
|
|
389
396
|
"""Serializes the BudgetPolicy into a shallow dictionary of its immediate attributes."""
|
|
390
397
|
body = {}
|
|
398
|
+
if self.binding_workspace_ids:
|
|
399
|
+
body["binding_workspace_ids"] = self.binding_workspace_ids
|
|
391
400
|
if self.custom_tags:
|
|
392
401
|
body["custom_tags"] = self.custom_tags
|
|
393
402
|
if self.policy_id is not None:
|
|
@@ -400,6 +409,7 @@ class BudgetPolicy:
|
|
|
400
409
|
def from_dict(cls, d: Dict[str, Any]) -> BudgetPolicy:
|
|
401
410
|
"""Deserializes the BudgetPolicy from a dictionary."""
|
|
402
411
|
return cls(
|
|
412
|
+
binding_workspace_ids=d.get("binding_workspace_ids", None),
|
|
403
413
|
custom_tags=_repeated_dict(d, "custom_tags", compute.CustomPolicyTag),
|
|
404
414
|
policy_id=d.get("policy_id", None),
|
|
405
415
|
policy_name=d.get("policy_name", None),
|
|
@@ -1065,6 +1065,7 @@ class CatalogType(Enum):
|
|
|
1065
1065
|
"""The type of the catalog."""
|
|
1066
1066
|
|
|
1067
1067
|
DELTASHARING_CATALOG = "DELTASHARING_CATALOG"
|
|
1068
|
+
FOREIGN_CATALOG = "FOREIGN_CATALOG"
|
|
1068
1069
|
MANAGED_CATALOG = "MANAGED_CATALOG"
|
|
1069
1070
|
SYSTEM_CATALOG = "SYSTEM_CATALOG"
|
|
1070
1071
|
|
|
@@ -1270,6 +1271,8 @@ class ColumnTypeName(Enum):
|
|
|
1270
1271
|
DECIMAL = "DECIMAL"
|
|
1271
1272
|
DOUBLE = "DOUBLE"
|
|
1272
1273
|
FLOAT = "FLOAT"
|
|
1274
|
+
GEOGRAPHY = "GEOGRAPHY"
|
|
1275
|
+
GEOMETRY = "GEOMETRY"
|
|
1273
1276
|
INT = "INT"
|
|
1274
1277
|
INTERVAL = "INTERVAL"
|
|
1275
1278
|
LONG = "LONG"
|
|
@@ -2591,6 +2594,11 @@ class CreateVolumeRequestContent:
|
|
|
2591
2594
|
"""The name of the volume"""
|
|
2592
2595
|
|
|
2593
2596
|
volume_type: VolumeType
|
|
2597
|
+
"""The type of the volume. An external volume is located in the specified external location. A
|
|
2598
|
+
managed volume is located in the default location which is specified by the parent schema, or
|
|
2599
|
+
the parent catalog, or the Metastore. [Learn more]
|
|
2600
|
+
|
|
2601
|
+
[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external"""
|
|
2594
2602
|
|
|
2595
2603
|
comment: Optional[str] = None
|
|
2596
2604
|
"""The comment attached to the volume"""
|
|
@@ -6664,6 +6672,7 @@ class Privilege(Enum):
|
|
|
6664
6672
|
ACCESS = "ACCESS"
|
|
6665
6673
|
ALL_PRIVILEGES = "ALL_PRIVILEGES"
|
|
6666
6674
|
APPLY_TAG = "APPLY_TAG"
|
|
6675
|
+
BROWSE = "BROWSE"
|
|
6667
6676
|
CREATE = "CREATE"
|
|
6668
6677
|
CREATE_CATALOG = "CREATE_CATALOG"
|
|
6669
6678
|
CREATE_CONNECTION = "CREATE_CONNECTION"
|
|
@@ -9471,6 +9480,8 @@ class UpdateWorkspaceBindingsParameters:
|
|
|
9471
9480
|
|
|
9472
9481
|
@dataclass
|
|
9473
9482
|
class ValidateCredentialRequest:
|
|
9483
|
+
"""Next ID: 17"""
|
|
9484
|
+
|
|
9474
9485
|
aws_iam_role: Optional[AwsIamRole] = None
|
|
9475
9486
|
"""The AWS IAM role configuration"""
|
|
9476
9487
|
|
|
@@ -9831,6 +9842,11 @@ class VolumeInfo:
|
|
|
9831
9842
|
"""The unique identifier of the volume"""
|
|
9832
9843
|
|
|
9833
9844
|
volume_type: Optional[VolumeType] = None
|
|
9845
|
+
"""The type of the volume. An external volume is located in the specified external location. A
|
|
9846
|
+
managed volume is located in the default location which is specified by the parent schema, or
|
|
9847
|
+
the parent catalog, or the Metastore. [Learn more]
|
|
9848
|
+
|
|
9849
|
+
[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external"""
|
|
9834
9850
|
|
|
9835
9851
|
def as_dict(self) -> dict:
|
|
9836
9852
|
"""Serializes the VolumeInfo into a dictionary suitable for use as a JSON request body."""
|
|
@@ -9935,6 +9951,11 @@ class VolumeInfo:
|
|
|
9935
9951
|
|
|
9936
9952
|
|
|
9937
9953
|
class VolumeType(Enum):
|
|
9954
|
+
"""The type of the volume. An external volume is located in the specified external location. A
|
|
9955
|
+
managed volume is located in the default location which is specified by the parent schema, or
|
|
9956
|
+
the parent catalog, or the Metastore. [Learn more]
|
|
9957
|
+
|
|
9958
|
+
[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external"""
|
|
9938
9959
|
|
|
9939
9960
|
EXTERNAL = "EXTERNAL"
|
|
9940
9961
|
MANAGED = "MANAGED"
|
|
@@ -14266,6 +14287,11 @@ class VolumesAPI:
|
|
|
14266
14287
|
:param name: str
|
|
14267
14288
|
The name of the volume
|
|
14268
14289
|
:param volume_type: :class:`VolumeType`
|
|
14290
|
+
The type of the volume. An external volume is located in the specified external location. A managed
|
|
14291
|
+
volume is located in the default location which is specified by the parent schema, or the parent
|
|
14292
|
+
catalog, or the Metastore. [Learn more]
|
|
14293
|
+
|
|
14294
|
+
[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external
|
|
14269
14295
|
:param comment: str (optional)
|
|
14270
14296
|
The comment attached to the volume
|
|
14271
14297
|
:param storage_location: str (optional)
|