databricks-sdk 0.47.0__tar.gz → 0.49.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/CHANGELOG.md +64 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/PKG-INFO +1 -1
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/apps.py +29 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/billing.py +2 -1
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/catalog.py +2 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/dashboards.py +74 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/marketplace.py +0 -3
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/pipelines.py +75 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/serving.py +2 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/settings.py +4 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/sql.py +127 -0
- databricks_sdk-0.49.0/databricks/sdk/version.py +1 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks_sdk.egg-info/PKG-INFO +1 -1
- databricks_sdk-0.47.0/databricks/sdk/version.py +0 -1
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/CONTRIBUTING.md +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/DCO +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/LICENSE +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/MANIFEST.in +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/Makefile +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/NOTICE +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/README.md +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/SECURITY.md +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/__init__.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/__init__.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/_base_client.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/_property.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/_widgets/__init__.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/azure.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/casing.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/clock.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/config.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/core.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/credentials_provider.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/data_plane.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/dbutils.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/environments.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/errors/__init__.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/errors/base.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/errors/customizer.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/errors/deserializer.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/errors/details.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/errors/mapper.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/errors/overrides.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/errors/parser.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/errors/platform.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/errors/private_link.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/errors/sdk.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/logger/__init__.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/mixins/__init__.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/mixins/compute.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/mixins/files.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/mixins/jobs.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/mixins/open_ai_client.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/mixins/workspace.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/oauth.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/py.typed +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/retries.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/runtime/__init__.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/__init__.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/_internal.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/cleanrooms.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/compute.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/files.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/iam.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/jobs.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/ml.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/oauth2.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/provisioning.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/sharing.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/vectorsearch.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/service/workspace.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/useragent.py +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks_sdk.egg-info/SOURCES.txt +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks_sdk.egg-info/requires.txt +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks_sdk.egg-info/top_level.txt +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/pyproject.toml +0 -0
- {databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/setup.cfg +0 -0
|
@@ -1,5 +1,69 @@
|
|
|
1
1
|
# Version changelog
|
|
2
2
|
|
|
3
|
+
## Release v0.49.0
|
|
4
|
+
|
|
5
|
+
### API Changes
|
|
6
|
+
* Added `generate_download_full_query_result()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html) workspace-level service.
|
|
7
|
+
* Added `effective_user_api_scopes`, `oauth2_app_client_id`, `oauth2_app_integration_id` and `user_api_scopes` fields for `databricks.sdk.service.apps.App`.
|
|
8
|
+
* Added `abfss`, `dbfs`, `error_message`, `execution_duration_seconds`, `file`, `gcs`, `s3`, `status`, `volumes` and `workspace` fields for `databricks.sdk.service.compute.InitScriptInfoAndExecutionDetails`.
|
|
9
|
+
* [Breaking] Added `forecast_granularity` field for `databricks.sdk.service.ml.CreateForecastingExperimentRequest`.
|
|
10
|
+
* Added `jwks_uri` field for `databricks.sdk.service.oauth2.OidcFederationPolicy`.
|
|
11
|
+
* Added `event_log` field for `databricks.sdk.service.pipelines.CreatePipeline`.
|
|
12
|
+
* Added `event_log` field for `databricks.sdk.service.pipelines.EditPipeline`.
|
|
13
|
+
* Added `event_log` field for `databricks.sdk.service.pipelines.PipelineSpec`.
|
|
14
|
+
* Added `fallback_config` field for `databricks.sdk.service.serving.AiGatewayConfig`.
|
|
15
|
+
* Added `custom_provider_config` field for `databricks.sdk.service.serving.ExternalModel`.
|
|
16
|
+
* Added `fallback_config` field for `databricks.sdk.service.serving.PutAiGatewayRequest`.
|
|
17
|
+
* Added `fallback_config` field for `databricks.sdk.service.serving.PutAiGatewayResponse`.
|
|
18
|
+
* Added `aliases`, `comment`, `data_type`, `dependency_list`, `full_data_type`, `id`, `input_params`, `name`, `properties`, `routine_definition`, `schema`, `securable_kind`, `share`, `share_id`, `storage_location` and `tags` fields for `databricks.sdk.service.sharing.DeltaSharingFunction`.
|
|
19
|
+
* Added `query_source` field for `databricks.sdk.service.sql.QueryInfo`.
|
|
20
|
+
* Added `foreign_catalog` enum value for `databricks.sdk.service.catalog.CatalogType`.
|
|
21
|
+
* Added `browse` enum value for `databricks.sdk.service.catalog.Privilege`.
|
|
22
|
+
* Added `access_token_failure`, `allocation_timeout`, `allocation_timeout_node_daemon_not_ready`, `allocation_timeout_no_healthy_clusters`, `allocation_timeout_no_matched_clusters`, `allocation_timeout_no_ready_clusters`, `allocation_timeout_no_unallocated_clusters`, `allocation_timeout_no_warmed_up_clusters`, `aws_inaccessible_kms_key_failure`, `aws_instance_profile_update_failure`, `aws_invalid_key_pair`, `aws_invalid_kms_key_state`, `aws_resource_quota_exceeded`, `azure_packed_deployment_partial_failure`, `bootstrap_timeout_due_to_misconfig`, `budget_policy_limit_enforcement_activated`, `budget_policy_resolution_failure`, `cloud_account_setup_failure`, `cloud_operation_cancelled`, `cloud_provider_instance_not_launched`, `cloud_provider_launch_failure_due_to_misconfig`, `cloud_provider_resource_stockout_due_to_misconfig`, `cluster_operation_throttled`, `cluster_operation_timeout`, `control_plane_request_failure_due_to_misconfig`, `data_access_config_changed`, `disaster_recovery_replication`, `driver_eviction`, `driver_launch_timeout`, `driver_node_unreachable`, `driver_out_of_disk`, `driver_out_of_memory`, `driver_pod_creation_failure`, `driver_unexpected_failure`, `dynamic_spark_conf_size_exceeded`, `eos_spark_image`, `executor_pod_unscheduled`, `gcp_api_rate_quota_exceeded`, `gcp_forbidden`, `gcp_iam_timeout`, `gcp_inaccessible_kms_key_failure`, `gcp_insufficient_capacity`, `gcp_ip_space_exhausted`, `gcp_kms_key_permission_denied`, `gcp_not_found`, `gcp_resource_quota_exceeded`, `gcp_service_account_access_denied`, `gcp_service_account_not_found`, `gcp_subnet_not_ready`, `gcp_trusted_image_projects_violated`, `gke_based_cluster_termination`, `init_container_not_finished`, `instance_pool_max_capacity_reached`, `instance_pool_not_found`, `instance_unreachable_due_to_misconfig`, `internal_capacity_failure`, `invalid_aws_parameter`, `invalid_instance_placement_protocol`, `invalid_worker_image_failure`, `in_penalty_box`, `lazy_allocation_timeout`, `maintenance_mode`, `netvisor_setup_timeout`, `no_matched_k8s`, `no_matched_k8s_testing_tag`, `pod_assignment_failure`, `pod_scheduling_failure`, `resource_usage_blocked`, `secret_creation_failure`, `serverless_long_running_terminated`, `spark_image_download_throttled`, `spark_image_not_found`, `ssh_bootstrap_failure`, `storage_download_failure_due_to_misconfig`, `storage_download_failure_slow`, `storage_download_failure_throttled`, `unexpected_pod_recreation`, `user_initiated_vm_termination` and `workspace_update` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
|
|
23
|
+
* Added `generated_sql_query_too_long_exception` and `missing_sql_query_exception` enum values for `databricks.sdk.service.dashboards.MessageErrorType`.
|
|
24
|
+
* Added `balanced` enum value for `databricks.sdk.service.jobs.PerformanceTarget`.
|
|
25
|
+
* Added `listing_resource` enum value for `databricks.sdk.service.marketplace.FileParentType`.
|
|
26
|
+
* Added `app` enum value for `databricks.sdk.service.marketplace.MarketplaceFileType`.
|
|
27
|
+
* Added `custom` enum value for `databricks.sdk.service.serving.ExternalModelProvider`.
|
|
28
|
+
* Added `arclight_multi_tenant_azure_exchange_token` and `arclight_multi_tenant_azure_exchange_token_with_user_delegation_key` enum values for `databricks.sdk.service.settings.TokenType`.
|
|
29
|
+
* [Breaking] Changed `create_experiment()` method for [w.forecasting](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/forecasting.html) workspace-level service with new required argument order.
|
|
30
|
+
* Changed `instance_type_id` field for `databricks.sdk.service.compute.NodeInstanceType` to be required.
|
|
31
|
+
* Changed `category` field for `databricks.sdk.service.compute.NodeType` to be required.
|
|
32
|
+
* [Breaking] Changed `functions` field for `databricks.sdk.service.sharing.ListProviderShareAssetsResponse` to type `databricks.sdk.service.sharing.DeltaSharingFunctionList` dataclass.
|
|
33
|
+
* [Breaking] Removed `execution_details` and `script` fields for `databricks.sdk.service.compute.InitScriptInfoAndExecutionDetails`.
|
|
34
|
+
* [Breaking] Removed `supports_elastic_disk` field for `databricks.sdk.service.compute.NodeType`.
|
|
35
|
+
* [Breaking] Removed `data_granularity_quantity` and `data_granularity_unit` fields for `databricks.sdk.service.ml.CreateForecastingExperimentRequest`.
|
|
36
|
+
* [Breaking] Removed `aliases`, `comment`, `data_type`, `dependency_list`, `full_data_type`, `id`, `input_params`, `name`, `properties`, `routine_definition`, `schema`, `securable_kind`, `share`, `share_id`, `storage_location` and `tags` fields for `databricks.sdk.service.sharing.Function`.
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
## Release v0.48.0
|
|
40
|
+
|
|
41
|
+
### API Changes
|
|
42
|
+
* Added `execution_details` and `script` fields for `databricks.sdk.service.compute.InitScriptInfoAndExecutionDetails`.
|
|
43
|
+
* Added `supports_elastic_disk` field for `databricks.sdk.service.compute.NodeType`.
|
|
44
|
+
* Added `data_granularity_quantity` field for `databricks.sdk.service.ml.CreateForecastingExperimentRequest`.
|
|
45
|
+
* [Breaking] Added `data_granularity_unit` field for `databricks.sdk.service.ml.CreateForecastingExperimentRequest`.
|
|
46
|
+
* Added `aliases`, `comment`, `data_type`, `dependency_list`, `full_data_type`, `id`, `input_params`, `name`, `properties`, `routine_definition`, `schema`, `securable_kind`, `share`, `share_id`, `storage_location` and `tags` fields for `databricks.sdk.service.sharing.Function`.
|
|
47
|
+
* [Breaking] Changed `create_experiment()` method for [w.forecasting](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/forecasting.html) workspace-level service with new required argument order.
|
|
48
|
+
* [Breaking] Changed `instance_type_id` field for `databricks.sdk.service.compute.NodeInstanceType` to no longer be required.
|
|
49
|
+
* [Breaking] Changed `category` field for `databricks.sdk.service.compute.NodeType` to no longer be required.
|
|
50
|
+
* [Breaking] Changed `functions` field for `databricks.sdk.service.sharing.ListProviderShareAssetsResponse` to type `databricks.sdk.service.sharing.FunctionList` dataclass.
|
|
51
|
+
* [Breaking] Removed `abfss`, `dbfs`, `error_message`, `execution_duration_seconds`, `file`, `gcs`, `s3`, `status`, `volumes` and `workspace` fields for `databricks.sdk.service.compute.InitScriptInfoAndExecutionDetails`.
|
|
52
|
+
* [Breaking] Removed `forecast_granularity` field for `databricks.sdk.service.ml.CreateForecastingExperimentRequest`.
|
|
53
|
+
* [Breaking] Removed `jwks_uri` field for `databricks.sdk.service.oauth2.OidcFederationPolicy`.
|
|
54
|
+
* [Breaking] Removed `fallback_config` field for `databricks.sdk.service.serving.AiGatewayConfig`.
|
|
55
|
+
* [Breaking] Removed `custom_provider_config` field for `databricks.sdk.service.serving.ExternalModel`.
|
|
56
|
+
* [Breaking] Removed `fallback_config` field for `databricks.sdk.service.serving.PutAiGatewayRequest`.
|
|
57
|
+
* [Breaking] Removed `fallback_config` field for `databricks.sdk.service.serving.PutAiGatewayResponse`.
|
|
58
|
+
* [Breaking] Removed `aliases`, `comment`, `data_type`, `dependency_list`, `full_data_type`, `id`, `input_params`, `name`, `properties`, `routine_definition`, `schema`, `securable_kind`, `share`, `share_id`, `storage_location` and `tags` fields for `databricks.sdk.service.sharing.DeltaSharingFunction`.
|
|
59
|
+
* [Breaking] Removed `access_token_failure`, `allocation_timeout`, `allocation_timeout_node_daemon_not_ready`, `allocation_timeout_no_healthy_clusters`, `allocation_timeout_no_matched_clusters`, `allocation_timeout_no_ready_clusters`, `allocation_timeout_no_unallocated_clusters`, `allocation_timeout_no_warmed_up_clusters`, `aws_inaccessible_kms_key_failure`, `aws_instance_profile_update_failure`, `aws_invalid_key_pair`, `aws_invalid_kms_key_state`, `aws_resource_quota_exceeded`, `azure_packed_deployment_partial_failure`, `bootstrap_timeout_due_to_misconfig`, `budget_policy_limit_enforcement_activated`, `budget_policy_resolution_failure`, `cloud_account_setup_failure`, `cloud_operation_cancelled`, `cloud_provider_instance_not_launched`, `cloud_provider_launch_failure_due_to_misconfig`, `cloud_provider_resource_stockout_due_to_misconfig`, `cluster_operation_throttled`, `cluster_operation_timeout`, `control_plane_request_failure_due_to_misconfig`, `data_access_config_changed`, `disaster_recovery_replication`, `driver_eviction`, `driver_launch_timeout`, `driver_node_unreachable`, `driver_out_of_disk`, `driver_out_of_memory`, `driver_pod_creation_failure`, `driver_unexpected_failure`, `dynamic_spark_conf_size_exceeded`, `eos_spark_image`, `executor_pod_unscheduled`, `gcp_api_rate_quota_exceeded`, `gcp_forbidden`, `gcp_iam_timeout`, `gcp_inaccessible_kms_key_failure`, `gcp_insufficient_capacity`, `gcp_ip_space_exhausted`, `gcp_kms_key_permission_denied`, `gcp_not_found`, `gcp_resource_quota_exceeded`, `gcp_service_account_access_denied`, `gcp_service_account_not_found`, `gcp_subnet_not_ready`, `gcp_trusted_image_projects_violated`, `gke_based_cluster_termination`, `init_container_not_finished`, `instance_pool_max_capacity_reached`, `instance_pool_not_found`, `instance_unreachable_due_to_misconfig`, `internal_capacity_failure`, `invalid_aws_parameter`, `invalid_instance_placement_protocol`, `invalid_worker_image_failure`, `in_penalty_box`, `lazy_allocation_timeout`, `maintenance_mode`, `netvisor_setup_timeout`, `no_matched_k8s`, `no_matched_k8s_testing_tag`, `pod_assignment_failure`, `pod_scheduling_failure`, `resource_usage_blocked`, `secret_creation_failure`, `serverless_long_running_terminated`, `spark_image_download_throttled`, `spark_image_not_found`, `ssh_bootstrap_failure`, `storage_download_failure_due_to_misconfig`, `storage_download_failure_slow`, `storage_download_failure_throttled`, `unexpected_pod_recreation`, `user_initiated_vm_termination` and `workspace_update` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
|
|
60
|
+
* [Breaking] Removed `generated_sql_query_too_long_exception` and `missing_sql_query_exception` enum values for `databricks.sdk.service.dashboards.MessageErrorType`.
|
|
61
|
+
* [Breaking] Removed `balanced` enum value for `databricks.sdk.service.jobs.PerformanceTarget`.
|
|
62
|
+
* [Breaking] Removed `listing_resource` enum value for `databricks.sdk.service.marketplace.FileParentType`.
|
|
63
|
+
* [Breaking] Removed `app` enum value for `databricks.sdk.service.marketplace.MarketplaceFileType`.
|
|
64
|
+
* [Breaking] Removed `custom` enum value for `databricks.sdk.service.serving.ExternalModelProvider`.
|
|
65
|
+
|
|
66
|
+
|
|
3
67
|
## Release v0.47.0
|
|
4
68
|
|
|
5
69
|
### Bug Fixes
|
|
@@ -50,9 +50,16 @@ class App:
|
|
|
50
50
|
|
|
51
51
|
effective_budget_policy_id: Optional[str] = None
|
|
52
52
|
|
|
53
|
+
effective_user_api_scopes: Optional[List[str]] = None
|
|
54
|
+
"""The effective api scopes granted to the user access token."""
|
|
55
|
+
|
|
53
56
|
id: Optional[str] = None
|
|
54
57
|
"""The unique identifier of the app."""
|
|
55
58
|
|
|
59
|
+
oauth2_app_client_id: Optional[str] = None
|
|
60
|
+
|
|
61
|
+
oauth2_app_integration_id: Optional[str] = None
|
|
62
|
+
|
|
56
63
|
pending_deployment: Optional[AppDeployment] = None
|
|
57
64
|
"""The pending deployment of the app. A deployment is considered pending when it is being prepared
|
|
58
65
|
for deployment to the app compute."""
|
|
@@ -75,6 +82,8 @@ class App:
|
|
|
75
82
|
url: Optional[str] = None
|
|
76
83
|
"""The URL of the app once it is deployed."""
|
|
77
84
|
|
|
85
|
+
user_api_scopes: Optional[List[str]] = None
|
|
86
|
+
|
|
78
87
|
def as_dict(self) -> dict:
|
|
79
88
|
"""Serializes the App into a dictionary suitable for use as a JSON request body."""
|
|
80
89
|
body = {}
|
|
@@ -96,10 +105,16 @@ class App:
|
|
|
96
105
|
body["description"] = self.description
|
|
97
106
|
if self.effective_budget_policy_id is not None:
|
|
98
107
|
body["effective_budget_policy_id"] = self.effective_budget_policy_id
|
|
108
|
+
if self.effective_user_api_scopes:
|
|
109
|
+
body["effective_user_api_scopes"] = [v for v in self.effective_user_api_scopes]
|
|
99
110
|
if self.id is not None:
|
|
100
111
|
body["id"] = self.id
|
|
101
112
|
if self.name is not None:
|
|
102
113
|
body["name"] = self.name
|
|
114
|
+
if self.oauth2_app_client_id is not None:
|
|
115
|
+
body["oauth2_app_client_id"] = self.oauth2_app_client_id
|
|
116
|
+
if self.oauth2_app_integration_id is not None:
|
|
117
|
+
body["oauth2_app_integration_id"] = self.oauth2_app_integration_id
|
|
103
118
|
if self.pending_deployment:
|
|
104
119
|
body["pending_deployment"] = self.pending_deployment.as_dict()
|
|
105
120
|
if self.resources:
|
|
@@ -116,6 +131,8 @@ class App:
|
|
|
116
131
|
body["updater"] = self.updater
|
|
117
132
|
if self.url is not None:
|
|
118
133
|
body["url"] = self.url
|
|
134
|
+
if self.user_api_scopes:
|
|
135
|
+
body["user_api_scopes"] = [v for v in self.user_api_scopes]
|
|
119
136
|
return body
|
|
120
137
|
|
|
121
138
|
def as_shallow_dict(self) -> dict:
|
|
@@ -139,10 +156,16 @@ class App:
|
|
|
139
156
|
body["description"] = self.description
|
|
140
157
|
if self.effective_budget_policy_id is not None:
|
|
141
158
|
body["effective_budget_policy_id"] = self.effective_budget_policy_id
|
|
159
|
+
if self.effective_user_api_scopes:
|
|
160
|
+
body["effective_user_api_scopes"] = self.effective_user_api_scopes
|
|
142
161
|
if self.id is not None:
|
|
143
162
|
body["id"] = self.id
|
|
144
163
|
if self.name is not None:
|
|
145
164
|
body["name"] = self.name
|
|
165
|
+
if self.oauth2_app_client_id is not None:
|
|
166
|
+
body["oauth2_app_client_id"] = self.oauth2_app_client_id
|
|
167
|
+
if self.oauth2_app_integration_id is not None:
|
|
168
|
+
body["oauth2_app_integration_id"] = self.oauth2_app_integration_id
|
|
146
169
|
if self.pending_deployment:
|
|
147
170
|
body["pending_deployment"] = self.pending_deployment
|
|
148
171
|
if self.resources:
|
|
@@ -159,6 +182,8 @@ class App:
|
|
|
159
182
|
body["updater"] = self.updater
|
|
160
183
|
if self.url is not None:
|
|
161
184
|
body["url"] = self.url
|
|
185
|
+
if self.user_api_scopes:
|
|
186
|
+
body["user_api_scopes"] = self.user_api_scopes
|
|
162
187
|
return body
|
|
163
188
|
|
|
164
189
|
@classmethod
|
|
@@ -174,8 +199,11 @@ class App:
|
|
|
174
199
|
default_source_code_path=d.get("default_source_code_path", None),
|
|
175
200
|
description=d.get("description", None),
|
|
176
201
|
effective_budget_policy_id=d.get("effective_budget_policy_id", None),
|
|
202
|
+
effective_user_api_scopes=d.get("effective_user_api_scopes", None),
|
|
177
203
|
id=d.get("id", None),
|
|
178
204
|
name=d.get("name", None),
|
|
205
|
+
oauth2_app_client_id=d.get("oauth2_app_client_id", None),
|
|
206
|
+
oauth2_app_integration_id=d.get("oauth2_app_integration_id", None),
|
|
179
207
|
pending_deployment=_from_dict(d, "pending_deployment", AppDeployment),
|
|
180
208
|
resources=_repeated_dict(d, "resources", AppResource),
|
|
181
209
|
service_principal_client_id=d.get("service_principal_client_id", None),
|
|
@@ -184,6 +212,7 @@ class App:
|
|
|
184
212
|
update_time=d.get("update_time", None),
|
|
185
213
|
updater=d.get("updater", None),
|
|
186
214
|
url=d.get("url", None),
|
|
215
|
+
user_api_scopes=d.get("user_api_scopes", None),
|
|
187
216
|
)
|
|
188
217
|
|
|
189
218
|
|
|
@@ -372,7 +372,8 @@ class BudgetPolicy:
|
|
|
372
372
|
|
|
373
373
|
policy_name: Optional[str] = None
|
|
374
374
|
"""The name of the policy. - Must be unique among active policies. - Can contain only characters
|
|
375
|
-
from the ISO 8859-1 (latin1) set.
|
|
375
|
+
from the ISO 8859-1 (latin1) set. - Can't start with reserved keywords such as
|
|
376
|
+
`databricks:default-policy`."""
|
|
376
377
|
|
|
377
378
|
def as_dict(self) -> dict:
|
|
378
379
|
"""Serializes the BudgetPolicy into a dictionary suitable for use as a JSON request body."""
|
|
@@ -1065,6 +1065,7 @@ class CatalogType(Enum):
|
|
|
1065
1065
|
"""The type of the catalog."""
|
|
1066
1066
|
|
|
1067
1067
|
DELTASHARING_CATALOG = "DELTASHARING_CATALOG"
|
|
1068
|
+
FOREIGN_CATALOG = "FOREIGN_CATALOG"
|
|
1068
1069
|
MANAGED_CATALOG = "MANAGED_CATALOG"
|
|
1069
1070
|
SYSTEM_CATALOG = "SYSTEM_CATALOG"
|
|
1070
1071
|
|
|
@@ -6664,6 +6665,7 @@ class Privilege(Enum):
|
|
|
6664
6665
|
ACCESS = "ACCESS"
|
|
6665
6666
|
ALL_PRIVILEGES = "ALL_PRIVILEGES"
|
|
6666
6667
|
APPLY_TAG = "APPLY_TAG"
|
|
6668
|
+
BROWSE = "BROWSE"
|
|
6667
6669
|
CREATE = "CREATE"
|
|
6668
6670
|
CREATE_CATALOG = "CREATE_CATALOG"
|
|
6669
6671
|
CREATE_CONNECTION = "CREATE_CONNECTION"
|
|
@@ -529,6 +529,49 @@ class GenieCreateConversationMessageRequest:
|
|
|
529
529
|
)
|
|
530
530
|
|
|
531
531
|
|
|
532
|
+
@dataclass
|
|
533
|
+
class GenieGenerateDownloadFullQueryResultResponse:
|
|
534
|
+
error: Optional[str] = None
|
|
535
|
+
"""Error message if Genie failed to download the result"""
|
|
536
|
+
|
|
537
|
+
status: Optional[MessageStatus] = None
|
|
538
|
+
"""Download result status"""
|
|
539
|
+
|
|
540
|
+
transient_statement_id: Optional[str] = None
|
|
541
|
+
"""Transient Statement ID. Use this ID to track the download request in subsequent polling calls"""
|
|
542
|
+
|
|
543
|
+
def as_dict(self) -> dict:
|
|
544
|
+
"""Serializes the GenieGenerateDownloadFullQueryResultResponse into a dictionary suitable for use as a JSON request body."""
|
|
545
|
+
body = {}
|
|
546
|
+
if self.error is not None:
|
|
547
|
+
body["error"] = self.error
|
|
548
|
+
if self.status is not None:
|
|
549
|
+
body["status"] = self.status.value
|
|
550
|
+
if self.transient_statement_id is not None:
|
|
551
|
+
body["transient_statement_id"] = self.transient_statement_id
|
|
552
|
+
return body
|
|
553
|
+
|
|
554
|
+
def as_shallow_dict(self) -> dict:
|
|
555
|
+
"""Serializes the GenieGenerateDownloadFullQueryResultResponse into a shallow dictionary of its immediate attributes."""
|
|
556
|
+
body = {}
|
|
557
|
+
if self.error is not None:
|
|
558
|
+
body["error"] = self.error
|
|
559
|
+
if self.status is not None:
|
|
560
|
+
body["status"] = self.status
|
|
561
|
+
if self.transient_statement_id is not None:
|
|
562
|
+
body["transient_statement_id"] = self.transient_statement_id
|
|
563
|
+
return body
|
|
564
|
+
|
|
565
|
+
@classmethod
|
|
566
|
+
def from_dict(cls, d: Dict[str, Any]) -> GenieGenerateDownloadFullQueryResultResponse:
|
|
567
|
+
"""Deserializes the GenieGenerateDownloadFullQueryResultResponse from a dictionary."""
|
|
568
|
+
return cls(
|
|
569
|
+
error=d.get("error", None),
|
|
570
|
+
status=_enum(d, "status", MessageStatus),
|
|
571
|
+
transient_statement_id=d.get("transient_statement_id", None),
|
|
572
|
+
)
|
|
573
|
+
|
|
574
|
+
|
|
532
575
|
@dataclass
|
|
533
576
|
class GenieGetMessageQueryResultResponse:
|
|
534
577
|
statement_response: Optional[sql.StatementResponse] = None
|
|
@@ -1989,6 +2032,37 @@ class GenieAPI:
|
|
|
1989
2032
|
)
|
|
1990
2033
|
return GenieGetMessageQueryResultResponse.from_dict(res)
|
|
1991
2034
|
|
|
2035
|
+
def generate_download_full_query_result(
|
|
2036
|
+
self, space_id: str, conversation_id: str, message_id: str, attachment_id: str
|
|
2037
|
+
) -> GenieGenerateDownloadFullQueryResultResponse:
|
|
2038
|
+
"""Generate full query result download.
|
|
2039
|
+
|
|
2040
|
+
Initiate full SQL query result download and obtain a transient ID for tracking the download progress.
|
|
2041
|
+
This call initiates a new SQL execution to generate the query result.
|
|
2042
|
+
|
|
2043
|
+
:param space_id: str
|
|
2044
|
+
Space ID
|
|
2045
|
+
:param conversation_id: str
|
|
2046
|
+
Conversation ID
|
|
2047
|
+
:param message_id: str
|
|
2048
|
+
Message ID
|
|
2049
|
+
:param attachment_id: str
|
|
2050
|
+
Attachment ID
|
|
2051
|
+
|
|
2052
|
+
:returns: :class:`GenieGenerateDownloadFullQueryResultResponse`
|
|
2053
|
+
"""
|
|
2054
|
+
|
|
2055
|
+
headers = {
|
|
2056
|
+
"Accept": "application/json",
|
|
2057
|
+
}
|
|
2058
|
+
|
|
2059
|
+
res = self._api.do(
|
|
2060
|
+
"POST",
|
|
2061
|
+
f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/generate-download",
|
|
2062
|
+
headers=headers,
|
|
2063
|
+
)
|
|
2064
|
+
return GenieGenerateDownloadFullQueryResultResponse.from_dict(res)
|
|
2065
|
+
|
|
1992
2066
|
def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage:
|
|
1993
2067
|
"""Get conversation message.
|
|
1994
2068
|
|
|
@@ -1948,7 +1948,6 @@ class ListProvidersResponse:
|
|
|
1948
1948
|
@dataclass
|
|
1949
1949
|
class Listing:
|
|
1950
1950
|
summary: ListingSummary
|
|
1951
|
-
"""Next Number: 26"""
|
|
1952
1951
|
|
|
1953
1952
|
detail: Optional[ListingDetail] = None
|
|
1954
1953
|
|
|
@@ -2252,8 +2251,6 @@ class ListingStatus(Enum):
|
|
|
2252
2251
|
|
|
2253
2252
|
@dataclass
|
|
2254
2253
|
class ListingSummary:
|
|
2255
|
-
"""Next Number: 26"""
|
|
2256
|
-
|
|
2257
2254
|
name: str
|
|
2258
2255
|
|
|
2259
2256
|
listing_type: ListingType
|
|
@@ -58,6 +58,9 @@ class CreatePipeline:
|
|
|
58
58
|
edition: Optional[str] = None
|
|
59
59
|
"""Pipeline product edition."""
|
|
60
60
|
|
|
61
|
+
event_log: Optional[EventLogSpec] = None
|
|
62
|
+
"""Event log configuration for this pipeline"""
|
|
63
|
+
|
|
61
64
|
filters: Optional[Filters] = None
|
|
62
65
|
"""Filters on which Pipeline packages to include in the deployed graph."""
|
|
63
66
|
|
|
@@ -136,6 +139,8 @@ class CreatePipeline:
|
|
|
136
139
|
body["dry_run"] = self.dry_run
|
|
137
140
|
if self.edition is not None:
|
|
138
141
|
body["edition"] = self.edition
|
|
142
|
+
if self.event_log:
|
|
143
|
+
body["event_log"] = self.event_log.as_dict()
|
|
139
144
|
if self.filters:
|
|
140
145
|
body["filters"] = self.filters.as_dict()
|
|
141
146
|
if self.gateway_definition:
|
|
@@ -193,6 +198,8 @@ class CreatePipeline:
|
|
|
193
198
|
body["dry_run"] = self.dry_run
|
|
194
199
|
if self.edition is not None:
|
|
195
200
|
body["edition"] = self.edition
|
|
201
|
+
if self.event_log:
|
|
202
|
+
body["event_log"] = self.event_log
|
|
196
203
|
if self.filters:
|
|
197
204
|
body["filters"] = self.filters
|
|
198
205
|
if self.gateway_definition:
|
|
@@ -240,6 +247,7 @@ class CreatePipeline:
|
|
|
240
247
|
development=d.get("development", None),
|
|
241
248
|
dry_run=d.get("dry_run", None),
|
|
242
249
|
edition=d.get("edition", None),
|
|
250
|
+
event_log=_from_dict(d, "event_log", EventLogSpec),
|
|
243
251
|
filters=_from_dict(d, "filters", Filters),
|
|
244
252
|
gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
|
|
245
253
|
id=d.get("id", None),
|
|
@@ -427,6 +435,9 @@ class EditPipeline:
|
|
|
427
435
|
edition: Optional[str] = None
|
|
428
436
|
"""Pipeline product edition."""
|
|
429
437
|
|
|
438
|
+
event_log: Optional[EventLogSpec] = None
|
|
439
|
+
"""Event log configuration for this pipeline"""
|
|
440
|
+
|
|
430
441
|
expected_last_modified: Optional[int] = None
|
|
431
442
|
"""If present, the last-modified time of the pipeline settings before the edit. If the settings
|
|
432
443
|
were modified after that time, then the request will fail with a conflict."""
|
|
@@ -510,6 +521,8 @@ class EditPipeline:
|
|
|
510
521
|
body["development"] = self.development
|
|
511
522
|
if self.edition is not None:
|
|
512
523
|
body["edition"] = self.edition
|
|
524
|
+
if self.event_log:
|
|
525
|
+
body["event_log"] = self.event_log.as_dict()
|
|
513
526
|
if self.expected_last_modified is not None:
|
|
514
527
|
body["expected_last_modified"] = self.expected_last_modified
|
|
515
528
|
if self.filters:
|
|
@@ -569,6 +582,8 @@ class EditPipeline:
|
|
|
569
582
|
body["development"] = self.development
|
|
570
583
|
if self.edition is not None:
|
|
571
584
|
body["edition"] = self.edition
|
|
585
|
+
if self.event_log:
|
|
586
|
+
body["event_log"] = self.event_log
|
|
572
587
|
if self.expected_last_modified is not None:
|
|
573
588
|
body["expected_last_modified"] = self.expected_last_modified
|
|
574
589
|
if self.filters:
|
|
@@ -619,6 +634,7 @@ class EditPipeline:
|
|
|
619
634
|
deployment=_from_dict(d, "deployment", PipelineDeployment),
|
|
620
635
|
development=d.get("development", None),
|
|
621
636
|
edition=d.get("edition", None),
|
|
637
|
+
event_log=_from_dict(d, "event_log", EventLogSpec),
|
|
622
638
|
expected_last_modified=d.get("expected_last_modified", None),
|
|
623
639
|
filters=_from_dict(d, "filters", Filters),
|
|
624
640
|
gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
|
|
@@ -698,6 +714,47 @@ class EventLevel(Enum):
|
|
|
698
714
|
WARN = "WARN"
|
|
699
715
|
|
|
700
716
|
|
|
717
|
+
@dataclass
|
|
718
|
+
class EventLogSpec:
|
|
719
|
+
"""Configurable event log parameters."""
|
|
720
|
+
|
|
721
|
+
catalog: Optional[str] = None
|
|
722
|
+
"""The UC catalog the event log is published under."""
|
|
723
|
+
|
|
724
|
+
name: Optional[str] = None
|
|
725
|
+
"""The name the event log is published to in UC."""
|
|
726
|
+
|
|
727
|
+
schema: Optional[str] = None
|
|
728
|
+
"""The UC schema the event log is published under."""
|
|
729
|
+
|
|
730
|
+
def as_dict(self) -> dict:
|
|
731
|
+
"""Serializes the EventLogSpec into a dictionary suitable for use as a JSON request body."""
|
|
732
|
+
body = {}
|
|
733
|
+
if self.catalog is not None:
|
|
734
|
+
body["catalog"] = self.catalog
|
|
735
|
+
if self.name is not None:
|
|
736
|
+
body["name"] = self.name
|
|
737
|
+
if self.schema is not None:
|
|
738
|
+
body["schema"] = self.schema
|
|
739
|
+
return body
|
|
740
|
+
|
|
741
|
+
def as_shallow_dict(self) -> dict:
|
|
742
|
+
"""Serializes the EventLogSpec into a shallow dictionary of its immediate attributes."""
|
|
743
|
+
body = {}
|
|
744
|
+
if self.catalog is not None:
|
|
745
|
+
body["catalog"] = self.catalog
|
|
746
|
+
if self.name is not None:
|
|
747
|
+
body["name"] = self.name
|
|
748
|
+
if self.schema is not None:
|
|
749
|
+
body["schema"] = self.schema
|
|
750
|
+
return body
|
|
751
|
+
|
|
752
|
+
@classmethod
|
|
753
|
+
def from_dict(cls, d: Dict[str, Any]) -> EventLogSpec:
|
|
754
|
+
"""Deserializes the EventLogSpec from a dictionary."""
|
|
755
|
+
return cls(catalog=d.get("catalog", None), name=d.get("name", None), schema=d.get("schema", None))
|
|
756
|
+
|
|
757
|
+
|
|
701
758
|
@dataclass
|
|
702
759
|
class FileLibrary:
|
|
703
760
|
path: Optional[str] = None
|
|
@@ -2205,6 +2262,9 @@ class PipelineSpec:
|
|
|
2205
2262
|
edition: Optional[str] = None
|
|
2206
2263
|
"""Pipeline product edition."""
|
|
2207
2264
|
|
|
2265
|
+
event_log: Optional[EventLogSpec] = None
|
|
2266
|
+
"""Event log configuration for this pipeline"""
|
|
2267
|
+
|
|
2208
2268
|
filters: Optional[Filters] = None
|
|
2209
2269
|
"""Filters on which Pipeline packages to include in the deployed graph."""
|
|
2210
2270
|
|
|
@@ -2271,6 +2331,8 @@ class PipelineSpec:
|
|
|
2271
2331
|
body["development"] = self.development
|
|
2272
2332
|
if self.edition is not None:
|
|
2273
2333
|
body["edition"] = self.edition
|
|
2334
|
+
if self.event_log:
|
|
2335
|
+
body["event_log"] = self.event_log.as_dict()
|
|
2274
2336
|
if self.filters:
|
|
2275
2337
|
body["filters"] = self.filters.as_dict()
|
|
2276
2338
|
if self.gateway_definition:
|
|
@@ -2322,6 +2384,8 @@ class PipelineSpec:
|
|
|
2322
2384
|
body["development"] = self.development
|
|
2323
2385
|
if self.edition is not None:
|
|
2324
2386
|
body["edition"] = self.edition
|
|
2387
|
+
if self.event_log:
|
|
2388
|
+
body["event_log"] = self.event_log
|
|
2325
2389
|
if self.filters:
|
|
2326
2390
|
body["filters"] = self.filters
|
|
2327
2391
|
if self.gateway_definition:
|
|
@@ -2365,6 +2429,7 @@ class PipelineSpec:
|
|
|
2365
2429
|
deployment=_from_dict(d, "deployment", PipelineDeployment),
|
|
2366
2430
|
development=d.get("development", None),
|
|
2367
2431
|
edition=d.get("edition", None),
|
|
2432
|
+
event_log=_from_dict(d, "event_log", EventLogSpec),
|
|
2368
2433
|
filters=_from_dict(d, "filters", Filters),
|
|
2369
2434
|
gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
|
|
2370
2435
|
id=d.get("id", None),
|
|
@@ -3403,6 +3468,7 @@ class PipelinesAPI:
|
|
|
3403
3468
|
development: Optional[bool] = None,
|
|
3404
3469
|
dry_run: Optional[bool] = None,
|
|
3405
3470
|
edition: Optional[str] = None,
|
|
3471
|
+
event_log: Optional[EventLogSpec] = None,
|
|
3406
3472
|
filters: Optional[Filters] = None,
|
|
3407
3473
|
gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
|
|
3408
3474
|
id: Optional[str] = None,
|
|
@@ -3447,6 +3513,8 @@ class PipelinesAPI:
|
|
|
3447
3513
|
:param dry_run: bool (optional)
|
|
3448
3514
|
:param edition: str (optional)
|
|
3449
3515
|
Pipeline product edition.
|
|
3516
|
+
:param event_log: :class:`EventLogSpec` (optional)
|
|
3517
|
+
Event log configuration for this pipeline
|
|
3450
3518
|
:param filters: :class:`Filters` (optional)
|
|
3451
3519
|
Filters on which Pipeline packages to include in the deployed graph.
|
|
3452
3520
|
:param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
|
|
@@ -3510,6 +3578,8 @@ class PipelinesAPI:
|
|
|
3510
3578
|
body["dry_run"] = dry_run
|
|
3511
3579
|
if edition is not None:
|
|
3512
3580
|
body["edition"] = edition
|
|
3581
|
+
if event_log is not None:
|
|
3582
|
+
body["event_log"] = event_log.as_dict()
|
|
3513
3583
|
if filters is not None:
|
|
3514
3584
|
body["filters"] = filters.as_dict()
|
|
3515
3585
|
if gateway_definition is not None:
|
|
@@ -3903,6 +3973,7 @@ class PipelinesAPI:
|
|
|
3903
3973
|
deployment: Optional[PipelineDeployment] = None,
|
|
3904
3974
|
development: Optional[bool] = None,
|
|
3905
3975
|
edition: Optional[str] = None,
|
|
3976
|
+
event_log: Optional[EventLogSpec] = None,
|
|
3906
3977
|
expected_last_modified: Optional[int] = None,
|
|
3907
3978
|
filters: Optional[Filters] = None,
|
|
3908
3979
|
gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
|
|
@@ -3948,6 +4019,8 @@ class PipelinesAPI:
|
|
|
3948
4019
|
Whether the pipeline is in Development mode. Defaults to false.
|
|
3949
4020
|
:param edition: str (optional)
|
|
3950
4021
|
Pipeline product edition.
|
|
4022
|
+
:param event_log: :class:`EventLogSpec` (optional)
|
|
4023
|
+
Event log configuration for this pipeline
|
|
3951
4024
|
:param expected_last_modified: int (optional)
|
|
3952
4025
|
If present, the last-modified time of the pipeline settings before the edit. If the settings were
|
|
3953
4026
|
modified after that time, then the request will fail with a conflict.
|
|
@@ -4012,6 +4085,8 @@ class PipelinesAPI:
|
|
|
4012
4085
|
body["development"] = development
|
|
4013
4086
|
if edition is not None:
|
|
4014
4087
|
body["edition"] = edition
|
|
4088
|
+
if event_log is not None:
|
|
4089
|
+
body["event_log"] = event_log.as_dict()
|
|
4015
4090
|
if expected_last_modified is not None:
|
|
4016
4091
|
body["expected_last_modified"] = expected_last_modified
|
|
4017
4092
|
if filters is not None:
|
|
@@ -3294,6 +3294,7 @@ class ServedModelInputWorkloadSize(Enum):
|
|
|
3294
3294
|
|
|
3295
3295
|
|
|
3296
3296
|
class ServedModelInputWorkloadType(Enum):
|
|
3297
|
+
"""Please keep this in sync with with workload types in InferenceEndpointEntities.scala"""
|
|
3297
3298
|
|
|
3298
3299
|
CPU = "CPU"
|
|
3299
3300
|
GPU_LARGE = "GPU_LARGE"
|
|
@@ -4059,6 +4060,7 @@ class ServingEndpointPermissionsRequest:
|
|
|
4059
4060
|
|
|
4060
4061
|
|
|
4061
4062
|
class ServingModelWorkloadType(Enum):
|
|
4063
|
+
"""Please keep this in sync with with workload types in InferenceEndpointEntities.scala"""
|
|
4062
4064
|
|
|
4063
4065
|
CPU = "CPU"
|
|
4064
4066
|
GPU_LARGE = "GPU_LARGE"
|
|
@@ -4146,6 +4146,10 @@ class TokenType(Enum):
|
|
|
4146
4146
|
|
|
4147
4147
|
ARCLIGHT_AZURE_EXCHANGE_TOKEN = "ARCLIGHT_AZURE_EXCHANGE_TOKEN"
|
|
4148
4148
|
ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = "ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY"
|
|
4149
|
+
ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN = "ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN"
|
|
4150
|
+
ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = (
|
|
4151
|
+
"ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY"
|
|
4152
|
+
)
|
|
4149
4153
|
AZURE_ACTIVE_DIRECTORY_TOKEN = "AZURE_ACTIVE_DIRECTORY_TOKEN"
|
|
4150
4154
|
|
|
4151
4155
|
|
|
@@ -3161,6 +3161,123 @@ class ExternalLink:
|
|
|
3161
3161
|
)
|
|
3162
3162
|
|
|
3163
3163
|
|
|
3164
|
+
@dataclass
|
|
3165
|
+
class ExternalQuerySource:
|
|
3166
|
+
alert_id: Optional[str] = None
|
|
3167
|
+
"""The canonical identifier for this SQL alert"""
|
|
3168
|
+
|
|
3169
|
+
dashboard_id: Optional[str] = None
|
|
3170
|
+
"""The canonical identifier for this Lakeview dashboard"""
|
|
3171
|
+
|
|
3172
|
+
genie_space_id: Optional[str] = None
|
|
3173
|
+
"""The canonical identifier for this Genie space"""
|
|
3174
|
+
|
|
3175
|
+
job_info: Optional[ExternalQuerySourceJobInfo] = None
|
|
3176
|
+
|
|
3177
|
+
legacy_dashboard_id: Optional[str] = None
|
|
3178
|
+
"""The canonical identifier for this legacy dashboard"""
|
|
3179
|
+
|
|
3180
|
+
notebook_id: Optional[str] = None
|
|
3181
|
+
"""The canonical identifier for this notebook"""
|
|
3182
|
+
|
|
3183
|
+
sql_query_id: Optional[str] = None
|
|
3184
|
+
"""The canonical identifier for this SQL query"""
|
|
3185
|
+
|
|
3186
|
+
def as_dict(self) -> dict:
|
|
3187
|
+
"""Serializes the ExternalQuerySource into a dictionary suitable for use as a JSON request body."""
|
|
3188
|
+
body = {}
|
|
3189
|
+
if self.alert_id is not None:
|
|
3190
|
+
body["alert_id"] = self.alert_id
|
|
3191
|
+
if self.dashboard_id is not None:
|
|
3192
|
+
body["dashboard_id"] = self.dashboard_id
|
|
3193
|
+
if self.genie_space_id is not None:
|
|
3194
|
+
body["genie_space_id"] = self.genie_space_id
|
|
3195
|
+
if self.job_info:
|
|
3196
|
+
body["job_info"] = self.job_info.as_dict()
|
|
3197
|
+
if self.legacy_dashboard_id is not None:
|
|
3198
|
+
body["legacy_dashboard_id"] = self.legacy_dashboard_id
|
|
3199
|
+
if self.notebook_id is not None:
|
|
3200
|
+
body["notebook_id"] = self.notebook_id
|
|
3201
|
+
if self.sql_query_id is not None:
|
|
3202
|
+
body["sql_query_id"] = self.sql_query_id
|
|
3203
|
+
return body
|
|
3204
|
+
|
|
3205
|
+
def as_shallow_dict(self) -> dict:
|
|
3206
|
+
"""Serializes the ExternalQuerySource into a shallow dictionary of its immediate attributes."""
|
|
3207
|
+
body = {}
|
|
3208
|
+
if self.alert_id is not None:
|
|
3209
|
+
body["alert_id"] = self.alert_id
|
|
3210
|
+
if self.dashboard_id is not None:
|
|
3211
|
+
body["dashboard_id"] = self.dashboard_id
|
|
3212
|
+
if self.genie_space_id is not None:
|
|
3213
|
+
body["genie_space_id"] = self.genie_space_id
|
|
3214
|
+
if self.job_info:
|
|
3215
|
+
body["job_info"] = self.job_info
|
|
3216
|
+
if self.legacy_dashboard_id is not None:
|
|
3217
|
+
body["legacy_dashboard_id"] = self.legacy_dashboard_id
|
|
3218
|
+
if self.notebook_id is not None:
|
|
3219
|
+
body["notebook_id"] = self.notebook_id
|
|
3220
|
+
if self.sql_query_id is not None:
|
|
3221
|
+
body["sql_query_id"] = self.sql_query_id
|
|
3222
|
+
return body
|
|
3223
|
+
|
|
3224
|
+
@classmethod
|
|
3225
|
+
def from_dict(cls, d: Dict[str, Any]) -> ExternalQuerySource:
|
|
3226
|
+
"""Deserializes the ExternalQuerySource from a dictionary."""
|
|
3227
|
+
return cls(
|
|
3228
|
+
alert_id=d.get("alert_id", None),
|
|
3229
|
+
dashboard_id=d.get("dashboard_id", None),
|
|
3230
|
+
genie_space_id=d.get("genie_space_id", None),
|
|
3231
|
+
job_info=_from_dict(d, "job_info", ExternalQuerySourceJobInfo),
|
|
3232
|
+
legacy_dashboard_id=d.get("legacy_dashboard_id", None),
|
|
3233
|
+
notebook_id=d.get("notebook_id", None),
|
|
3234
|
+
sql_query_id=d.get("sql_query_id", None),
|
|
3235
|
+
)
|
|
3236
|
+
|
|
3237
|
+
|
|
3238
|
+
@dataclass
|
|
3239
|
+
class ExternalQuerySourceJobInfo:
|
|
3240
|
+
job_id: Optional[str] = None
|
|
3241
|
+
"""The canonical identifier for this job."""
|
|
3242
|
+
|
|
3243
|
+
job_run_id: Optional[str] = None
|
|
3244
|
+
"""The canonical identifier of the run. This ID is unique across all runs of all jobs."""
|
|
3245
|
+
|
|
3246
|
+
job_task_run_id: Optional[str] = None
|
|
3247
|
+
"""The canonical identifier of the task run."""
|
|
3248
|
+
|
|
3249
|
+
def as_dict(self) -> dict:
|
|
3250
|
+
"""Serializes the ExternalQuerySourceJobInfo into a dictionary suitable for use as a JSON request body."""
|
|
3251
|
+
body = {}
|
|
3252
|
+
if self.job_id is not None:
|
|
3253
|
+
body["job_id"] = self.job_id
|
|
3254
|
+
if self.job_run_id is not None:
|
|
3255
|
+
body["job_run_id"] = self.job_run_id
|
|
3256
|
+
if self.job_task_run_id is not None:
|
|
3257
|
+
body["job_task_run_id"] = self.job_task_run_id
|
|
3258
|
+
return body
|
|
3259
|
+
|
|
3260
|
+
def as_shallow_dict(self) -> dict:
|
|
3261
|
+
"""Serializes the ExternalQuerySourceJobInfo into a shallow dictionary of its immediate attributes."""
|
|
3262
|
+
body = {}
|
|
3263
|
+
if self.job_id is not None:
|
|
3264
|
+
body["job_id"] = self.job_id
|
|
3265
|
+
if self.job_run_id is not None:
|
|
3266
|
+
body["job_run_id"] = self.job_run_id
|
|
3267
|
+
if self.job_task_run_id is not None:
|
|
3268
|
+
body["job_task_run_id"] = self.job_task_run_id
|
|
3269
|
+
return body
|
|
3270
|
+
|
|
3271
|
+
@classmethod
|
|
3272
|
+
def from_dict(cls, d: Dict[str, Any]) -> ExternalQuerySourceJobInfo:
|
|
3273
|
+
"""Deserializes the ExternalQuerySourceJobInfo from a dictionary."""
|
|
3274
|
+
return cls(
|
|
3275
|
+
job_id=d.get("job_id", None),
|
|
3276
|
+
job_run_id=d.get("job_run_id", None),
|
|
3277
|
+
job_task_run_id=d.get("job_task_run_id", None),
|
|
3278
|
+
)
|
|
3279
|
+
|
|
3280
|
+
|
|
3164
3281
|
class Format(Enum):
|
|
3165
3282
|
|
|
3166
3283
|
ARROW_STREAM = "ARROW_STREAM"
|
|
@@ -5113,6 +5230,11 @@ class QueryInfo:
|
|
|
5113
5230
|
query_id: Optional[str] = None
|
|
5114
5231
|
"""The query ID."""
|
|
5115
5232
|
|
|
5233
|
+
query_source: Optional[ExternalQuerySource] = None
|
|
5234
|
+
"""A struct that contains key-value pairs representing Databricks entities that were involved in
|
|
5235
|
+
the execution of this statement, such as jobs, notebooks, or dashboards. This field only records
|
|
5236
|
+
Databricks entities."""
|
|
5237
|
+
|
|
5116
5238
|
query_start_time_ms: Optional[int] = None
|
|
5117
5239
|
"""The time the query started."""
|
|
5118
5240
|
|
|
@@ -5173,6 +5295,8 @@ class QueryInfo:
|
|
|
5173
5295
|
body["query_end_time_ms"] = self.query_end_time_ms
|
|
5174
5296
|
if self.query_id is not None:
|
|
5175
5297
|
body["query_id"] = self.query_id
|
|
5298
|
+
if self.query_source:
|
|
5299
|
+
body["query_source"] = self.query_source.as_dict()
|
|
5176
5300
|
if self.query_start_time_ms is not None:
|
|
5177
5301
|
body["query_start_time_ms"] = self.query_start_time_ms
|
|
5178
5302
|
if self.query_text is not None:
|
|
@@ -5222,6 +5346,8 @@ class QueryInfo:
|
|
|
5222
5346
|
body["query_end_time_ms"] = self.query_end_time_ms
|
|
5223
5347
|
if self.query_id is not None:
|
|
5224
5348
|
body["query_id"] = self.query_id
|
|
5349
|
+
if self.query_source:
|
|
5350
|
+
body["query_source"] = self.query_source
|
|
5225
5351
|
if self.query_start_time_ms is not None:
|
|
5226
5352
|
body["query_start_time_ms"] = self.query_start_time_ms
|
|
5227
5353
|
if self.query_text is not None:
|
|
@@ -5259,6 +5385,7 @@ class QueryInfo:
|
|
|
5259
5385
|
plans_state=_enum(d, "plans_state", PlansState),
|
|
5260
5386
|
query_end_time_ms=d.get("query_end_time_ms", None),
|
|
5261
5387
|
query_id=d.get("query_id", None),
|
|
5388
|
+
query_source=_from_dict(d, "query_source", ExternalQuerySource),
|
|
5262
5389
|
query_start_time_ms=d.get("query_start_time_ms", None),
|
|
5263
5390
|
query_text=d.get("query_text", None),
|
|
5264
5391
|
rows_produced=d.get("rows_produced", None),
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.49.0"
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "0.47.0"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks/sdk/_widgets/default_widgets_utils.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{databricks_sdk-0.47.0 → databricks_sdk-0.49.0}/databricks_sdk.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|