databricks-sdk 0.55.0__tar.gz → 0.56.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/CHANGELOG.md +79 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/PKG-INFO +1 -1
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/__init__.py +33 -22
- databricks_sdk-0.56.0/databricks/sdk/service/aibuilder.py +364 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/billing.py +150 -169
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/catalog.py +263 -835
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/cleanrooms.py +15 -10
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/compute.py +12 -22
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/dashboards.py +59 -451
- databricks_sdk-0.56.0/databricks/sdk/service/database.py +1256 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/files.py +2 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/iam.py +6 -6
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/jobs.py +238 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/ml.py +8 -271
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/pipelines.py +45 -1
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/provisioning.py +0 -3
- databricks_sdk-0.56.0/databricks/sdk/service/qualitymonitorv2.py +275 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/serving.py +76 -4
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/settings.py +982 -99
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/sharing.py +3 -2
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/sql.py +218 -1
- databricks_sdk-0.56.0/databricks/sdk/version.py +1 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks_sdk.egg-info/PKG-INFO +1 -1
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks_sdk.egg-info/SOURCES.txt +3 -0
- databricks_sdk-0.55.0/databricks/sdk/version.py +0 -1
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/CONTRIBUTING.md +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/DCO +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/LICENSE +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/MANIFEST.in +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/Makefile +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/NOTICE +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/README.md +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/SECURITY.md +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/__init__.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/_base_client.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/_property.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/_widgets/__init__.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/azure.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/casing.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/clock.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/config.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/core.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/credentials_provider.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/data_plane.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/dbutils.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/environments.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/errors/__init__.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/errors/base.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/errors/customizer.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/errors/deserializer.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/errors/details.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/errors/mapper.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/errors/overrides.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/errors/parser.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/errors/platform.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/errors/private_link.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/errors/sdk.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/logger/__init__.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/mixins/__init__.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/mixins/compute.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/mixins/files.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/mixins/jobs.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/mixins/open_ai_client.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/mixins/workspace.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/oauth.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/oidc.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/oidc_token_supplier.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/py.typed +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/retries.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/runtime/__init__.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/__init__.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/_internal.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/apps.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/marketplace.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/oauth2.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/vectorsearch.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/service/workspace.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks/sdk/useragent.py +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks_sdk.egg-info/requires.txt +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/databricks_sdk.egg-info/top_level.txt +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/pyproject.toml +0 -0
- {databricks_sdk-0.55.0 → databricks_sdk-0.56.0}/setup.cfg +0 -0
|
@@ -1,5 +1,84 @@
|
|
|
1
1
|
# Version changelog
|
|
2
2
|
|
|
3
|
+
## Release v0.56.0
|
|
4
|
+
|
|
5
|
+
### API Changes
|
|
6
|
+
* Added `databricks.sdk.service.aibuilder`, `databricks.sdk.service.database` and `databricks.sdk.service.qualitymonitorv2` packages.
|
|
7
|
+
* Added [w.custom_llms](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/aibuilder/custom_llms.html) workspace-level service.
|
|
8
|
+
* Added [w.dashboard_email_subscriptions](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/settings/dashboard_email_subscriptions.html) workspace-level service and [w.sql_results_download](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/settings/sql_results_download.html) workspace-level service.
|
|
9
|
+
* Added [w.database](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/database/database.html) workspace-level service.
|
|
10
|
+
* Added [w.quality_monitor_v2](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/qualitymonitorv2/quality_monitor_v2.html) workspace-level service.
|
|
11
|
+
* Added `update_private_endpoint_rule()` method for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service.
|
|
12
|
+
* Added `list_spaces()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html) workspace-level service.
|
|
13
|
+
* Added `page_token` field for `databricks.sdk.service.billing.ListLogDeliveryRequest`.
|
|
14
|
+
* Added `next_page_token` field for `databricks.sdk.service.billing.WrappedLogDeliveryConfigurations`.
|
|
15
|
+
* Added `next_page_token` field for `databricks.sdk.service.catalog.EffectivePermissionsList`.
|
|
16
|
+
* Added `max_results` and `page_token` fields for `databricks.sdk.service.catalog.GetEffectiveRequest`.
|
|
17
|
+
* Added `max_results` and `page_token` fields for `databricks.sdk.service.catalog.GetGrantRequest`.
|
|
18
|
+
* Added `next_page_token` field for `databricks.sdk.service.catalog.ListMetastoresResponse`.
|
|
19
|
+
* Added `clean_room_name` field for `databricks.sdk.service.cleanrooms.CleanRoomAsset`.
|
|
20
|
+
* [Breaking] Added `name` field for `databricks.sdk.service.cleanrooms.DeleteCleanRoomAssetRequest`.
|
|
21
|
+
* [Breaking] Added `name` field for `databricks.sdk.service.cleanrooms.GetCleanRoomAssetRequest`.
|
|
22
|
+
* Added `trigger_state` field for `databricks.sdk.service.jobs.BaseJob`.
|
|
23
|
+
* Added `trigger_state` field for `databricks.sdk.service.jobs.Job`.
|
|
24
|
+
* Added `dbt_cloud_output` field for `databricks.sdk.service.jobs.RunOutput`.
|
|
25
|
+
* Added `dbt_cloud_task` field for `databricks.sdk.service.jobs.RunTask`.
|
|
26
|
+
* Added `dbt_cloud_task` field for `databricks.sdk.service.jobs.SubmitTask`.
|
|
27
|
+
* Added `dbt_cloud_task` field for `databricks.sdk.service.jobs.Task`.
|
|
28
|
+
* Added `tags` field for `databricks.sdk.service.pipelines.CreatePipeline`.
|
|
29
|
+
* Added `tags` field for `databricks.sdk.service.pipelines.EditPipeline`.
|
|
30
|
+
* Added `tags` field for `databricks.sdk.service.pipelines.PipelineSpec`.
|
|
31
|
+
* Added `max_provisioned_concurrency` and `min_provisioned_concurrency` fields for `databricks.sdk.service.serving.ServedEntityInput`.
|
|
32
|
+
* Added `max_provisioned_concurrency` and `min_provisioned_concurrency` fields for `databricks.sdk.service.serving.ServedEntityOutput`.
|
|
33
|
+
* Added `max_provisioned_concurrency` and `min_provisioned_concurrency` fields for `databricks.sdk.service.serving.ServedModelInput`.
|
|
34
|
+
* Added `max_provisioned_concurrency` and `min_provisioned_concurrency` fields for `databricks.sdk.service.serving.ServedModelOutput`.
|
|
35
|
+
* Added `endpoint_service` and `resource_names` fields for `databricks.sdk.service.settings.CreatePrivateEndpointRule`.
|
|
36
|
+
* Added `aws_private_endpoint_rules` field for `databricks.sdk.service.settings.NccEgressTargetRules`.
|
|
37
|
+
* Added `task_time_over_time_range` field for `databricks.sdk.service.sql.QueryMetrics`.
|
|
38
|
+
* Added `deltasharing_catalog`, `foreign_catalog`, `internal_catalog`, `managed_catalog`, `managed_online_catalog`, `system_catalog` and `unknown_catalog_type` enum values for `databricks.sdk.service.catalog.CatalogType`.
|
|
39
|
+
* Added `ga4_raw_data`, `power_bi`, `salesforce`, `salesforce_data_cloud`, `servicenow`, `unknown_connection_type` and `workday_raas` enum values for `databricks.sdk.service.catalog.ConnectionType`.
|
|
40
|
+
* Added `oauth_access_token`, `oauth_m2m`, `oauth_refresh_token`, `oauth_resource_owner_password`, `oauth_u2m`, `oauth_u2m_mapping`, `oidc_token`, `pem_private_key`, `service_credential` and `unknown_credential_type` enum values for `databricks.sdk.service.catalog.CredentialType`.
|
|
41
|
+
* Added `internal` and `internal_and_external` enum values for `databricks.sdk.service.catalog.DeltaSharingScopeEnum`.
|
|
42
|
+
* Added `catalog`, `clean_room`, `connection`, `credential`, `external_location`, `external_metadata`, `function`, `metastore`, `pipeline`, `provider`, `recipient`, `schema`, `share`, `staging_table`, `storage_credential`, `table`, `unknown_securable_type` and `volume` enum values for `databricks.sdk.service.catalog.SecurableType`.
|
|
43
|
+
* Added `cluster_migrated` enum value for `databricks.sdk.service.compute.EventType`.
|
|
44
|
+
* Added `driver_unhealthy` enum value for `databricks.sdk.service.compute.TerminationReasonCode`.
|
|
45
|
+
* Added `teradata` enum value for `databricks.sdk.service.pipelines.IngestionSourceType`.
|
|
46
|
+
* Added `oidc_federation` enum value for `databricks.sdk.service.sharing.AuthenticationType`.
|
|
47
|
+
* [Breaking] Changed `create()` method for [a.log_delivery](https://databricks-sdk-py.readthedocs.io/en/latest/account/billing/log_delivery.html) account-level service with new required argument order.
|
|
48
|
+
* [Breaking] Changed `get()` method for [a.log_delivery](https://databricks-sdk-py.readthedocs.io/en/latest/account/billing/log_delivery.html) account-level service to return `databricks.sdk.service.billing.GetLogDeliveryConfigurationResponse` dataclass.
|
|
49
|
+
* [Breaking] Changed `create_private_endpoint_rule()`, `delete_private_endpoint_rule()` and `get_private_endpoint_rule()` methods for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service to return `databricks.sdk.service.settings.NccPrivateEndpointRule` dataclass.
|
|
50
|
+
* [Breaking] Changed `list_private_endpoint_rules()` method for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service to return `databricks.sdk.service.settings.ListPrivateEndpointRulesResponse` dataclass.
|
|
51
|
+
* [Breaking] Changed `delete()` and `get()` methods for [w.clean_room_assets](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/cleanrooms/clean_room_assets.html) workspace-level service . Method path has changed.
|
|
52
|
+
* [Breaking] Changed `delete()` and `get()` methods for [w.clean_room_assets](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/cleanrooms/clean_room_assets.html) workspace-level service with new required argument order.
|
|
53
|
+
* [Breaking] Changed `get()` method for [w.grants](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/grants.html) workspace-level service to return `databricks.sdk.service.catalog.GetPermissionsResponse` dataclass.
|
|
54
|
+
* [Breaking] Changed `update()` method for [w.grants](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/grants.html) workspace-level service to return `databricks.sdk.service.catalog.UpdatePermissionsResponse` dataclass.
|
|
55
|
+
* [Breaking] Changed `list()` method for [w.metastores](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/metastores.html) workspace-level service to require request of `databricks.sdk.service.catalog.ListMetastoresRequest` dataclass.
|
|
56
|
+
* Changed `account_id`, `credentials_id`, `log_type`, `output_format` and `storage_configuration_id` fields for `databricks.sdk.service.billing.LogDeliveryConfiguration` to be required.
|
|
57
|
+
* Changed `message` and `status` fields for `databricks.sdk.service.billing.LogDeliveryStatus` to be required.
|
|
58
|
+
* [Breaking] Changed `log_delivery_configuration` field for `databricks.sdk.service.billing.WrappedCreateLogDeliveryConfiguration` to be required.
|
|
59
|
+
* [Breaking] Changed `securable_type` field for `databricks.sdk.service.catalog.ConnectionInfo` to type `databricks.sdk.service.catalog.SecurableType` dataclass.
|
|
60
|
+
* [Breaking] Changed `securable_type` field for `databricks.sdk.service.catalog.GetEffectiveRequest` to type `str` dataclass.
|
|
61
|
+
* [Breaking] Changed `securable_type` field for `databricks.sdk.service.catalog.GetGrantRequest` to type `str` dataclass.
|
|
62
|
+
* [Breaking] Changed `delta_sharing_scope` field for `databricks.sdk.service.catalog.GetMetastoreSummaryResponse` to type `databricks.sdk.service.catalog.DeltaSharingScopeEnum` dataclass.
|
|
63
|
+
* [Breaking] Changed `delta_sharing_scope` field for `databricks.sdk.service.catalog.MetastoreInfo` to type `databricks.sdk.service.catalog.DeltaSharingScopeEnum` dataclass.
|
|
64
|
+
* [Breaking] Changed `catalog_type` field for `databricks.sdk.service.catalog.SchemaInfo` to type `databricks.sdk.service.catalog.CatalogType` dataclass.
|
|
65
|
+
* [Breaking] Changed `delta_sharing_scope` field for `databricks.sdk.service.catalog.UpdateMetastore` to type `databricks.sdk.service.catalog.DeltaSharingScopeEnum` dataclass.
|
|
66
|
+
* [Breaking] Changed `securable_type` field for `databricks.sdk.service.catalog.UpdatePermissions` to type `str` dataclass.
|
|
67
|
+
* Changed `resource_id` field for `databricks.sdk.service.settings.CreatePrivateEndpointRule` to no longer be required.
|
|
68
|
+
* [Breaking] Changed pagination for [NetworkConnectivityAPI.list_private_endpoint_rules](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html#databricks.sdk.service.settings.NetworkConnectivityAPI.list_private_endpoint_rules) method.
|
|
69
|
+
* [Breaking] Removed [w.database_instances](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/database_instances.html) workspace-level service.
|
|
70
|
+
* [Breaking] Removed [w.query_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/query_execution.html) workspace-level service.
|
|
71
|
+
* [Breaking] Removed `update_ncc_azure_private_endpoint_rule_public()` method for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service.
|
|
72
|
+
* [Breaking] Removed `get_credentials_for_trace_data_download()`, `get_credentials_for_trace_data_upload()` and `list_logged_model_artifacts()` methods for [w.experiments](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/experiments.html) workspace-level service.
|
|
73
|
+
* [Breaking] Removed `get_published_dashboard_embedded()` method for [w.lakeview_embedded](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/lakeview_embedded.html) workspace-level service.
|
|
74
|
+
* [Breaking] Removed `asset_full_name` field for `databricks.sdk.service.cleanrooms.DeleteCleanRoomAssetRequest`.
|
|
75
|
+
* [Breaking] Removed `asset_full_name` field for `databricks.sdk.service.cleanrooms.GetCleanRoomAssetRequest`.
|
|
76
|
+
* [Breaking] Removed `internal` and `internal_and_external` enum values for `databricks.sdk.service.catalog.GetMetastoreSummaryResponseDeltaSharingScope`.
|
|
77
|
+
* [Breaking] Removed `internal` and `internal_and_external` enum values for `databricks.sdk.service.catalog.MetastoreInfoDeltaSharingScope`.
|
|
78
|
+
* [Breaking] Removed `catalog`, `clean_room`, `connection`, `credential`, `external_location`, `external_metadata`, `function`, `metastore`, `pipeline`, `provider`, `recipient`, `schema`, `share`, `staging_table`, `storage_credential`, `table`, `unknown_securable_type` and `volume` enum values for `databricks.sdk.service.catalog.SecurableType`.
|
|
79
|
+
* [Breaking] Removed `internal` and `internal_and_external` enum values for `databricks.sdk.service.catalog.UpdateMetastoreDeltaSharingScope`.
|
|
80
|
+
|
|
81
|
+
|
|
3
82
|
## Release v0.55.0
|
|
4
83
|
|
|
5
84
|
### New Features and Improvements
|
|
@@ -13,12 +13,14 @@ from databricks.sdk.mixins.files import DbfsExt, FilesExt
|
|
|
13
13
|
from databricks.sdk.mixins.jobs import JobsExt
|
|
14
14
|
from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
|
|
15
15
|
from databricks.sdk.mixins.workspace import WorkspaceExt
|
|
16
|
+
from databricks.sdk.service import aibuilder as pkg_aibuilder
|
|
16
17
|
from databricks.sdk.service import apps as pkg_apps
|
|
17
18
|
from databricks.sdk.service import billing as pkg_billing
|
|
18
19
|
from databricks.sdk.service import catalog as pkg_catalog
|
|
19
20
|
from databricks.sdk.service import cleanrooms as pkg_cleanrooms
|
|
20
21
|
from databricks.sdk.service import compute as pkg_compute
|
|
21
22
|
from databricks.sdk.service import dashboards as pkg_dashboards
|
|
23
|
+
from databricks.sdk.service import database as pkg_database
|
|
22
24
|
from databricks.sdk.service import files as pkg_files
|
|
23
25
|
from databricks.sdk.service import iam as pkg_iam
|
|
24
26
|
from databricks.sdk.service import jobs as pkg_jobs
|
|
@@ -27,12 +29,14 @@ from databricks.sdk.service import ml as pkg_ml
|
|
|
27
29
|
from databricks.sdk.service import oauth2 as pkg_oauth2
|
|
28
30
|
from databricks.sdk.service import pipelines as pkg_pipelines
|
|
29
31
|
from databricks.sdk.service import provisioning as pkg_provisioning
|
|
32
|
+
from databricks.sdk.service import qualitymonitorv2 as pkg_qualitymonitorv2
|
|
30
33
|
from databricks.sdk.service import serving as pkg_serving
|
|
31
34
|
from databricks.sdk.service import settings as pkg_settings
|
|
32
35
|
from databricks.sdk.service import sharing as pkg_sharing
|
|
33
36
|
from databricks.sdk.service import sql as pkg_sql
|
|
34
37
|
from databricks.sdk.service import vectorsearch as pkg_vectorsearch
|
|
35
38
|
from databricks.sdk.service import workspace as pkg_workspace
|
|
39
|
+
from databricks.sdk.service.aibuilder import CustomLlmsAPI
|
|
36
40
|
from databricks.sdk.service.apps import AppsAPI
|
|
37
41
|
from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI,
|
|
38
42
|
BudgetsAPI, LogDeliveryAPI,
|
|
@@ -42,7 +46,6 @@ from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
|
|
|
42
46
|
AccountStorageCredentialsAPI,
|
|
43
47
|
ArtifactAllowlistsAPI, CatalogsAPI,
|
|
44
48
|
ConnectionsAPI, CredentialsAPI,
|
|
45
|
-
DatabaseInstancesAPI,
|
|
46
49
|
ExternalLocationsAPI, FunctionsAPI,
|
|
47
50
|
GrantsAPI, MetastoresAPI,
|
|
48
51
|
ModelVersionsAPI, OnlineTablesAPI,
|
|
@@ -65,8 +68,8 @@ from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
|
|
|
65
68
|
PolicyComplianceForClustersAPI,
|
|
66
69
|
PolicyFamiliesAPI)
|
|
67
70
|
from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI,
|
|
68
|
-
LakeviewEmbeddedAPI
|
|
69
|
-
|
|
71
|
+
LakeviewEmbeddedAPI)
|
|
72
|
+
from databricks.sdk.service.database import DatabaseAPI
|
|
70
73
|
from databricks.sdk.service.files import DbfsAPI, FilesAPI
|
|
71
74
|
from databricks.sdk.service.iam import (AccessControlAPI,
|
|
72
75
|
AccountAccessControlAPI,
|
|
@@ -98,6 +101,7 @@ from databricks.sdk.service.provisioning import (CredentialsAPI,
|
|
|
98
101
|
NetworksAPI, PrivateAccessAPI,
|
|
99
102
|
StorageAPI, VpcEndpointsAPI,
|
|
100
103
|
Workspace, WorkspacesAPI)
|
|
104
|
+
from databricks.sdk.service.qualitymonitorv2 import QualityMonitorV2API
|
|
101
105
|
from databricks.sdk.service.serving import (ServingEndpointsAPI,
|
|
102
106
|
ServingEndpointsDataPlaneAPI)
|
|
103
107
|
from databricks.sdk.service.settings import (
|
|
@@ -105,16 +109,17 @@ from databricks.sdk.service.settings import (
|
|
|
105
109
|
AibiDashboardEmbeddingAccessPolicyAPI,
|
|
106
110
|
AibiDashboardEmbeddingApprovedDomainsAPI, AutomaticClusterUpdateAPI,
|
|
107
111
|
ComplianceSecurityProfileAPI, CredentialsManagerAPI,
|
|
108
|
-
CspEnablementAccountAPI,
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
EsmEnablementAccountAPI, IpAccessListsAPI,
|
|
112
|
+
CspEnablementAccountAPI, DashboardEmailSubscriptionsAPI,
|
|
113
|
+
DefaultNamespaceAPI, DisableLegacyAccessAPI, DisableLegacyDbfsAPI,
|
|
114
|
+
DisableLegacyFeaturesAPI, EnableExportNotebookAPI, EnableIpAccessListsAPI,
|
|
115
|
+
EnableNotebookTableClipboardAPI, EnableResultsDownloadingAPI,
|
|
116
|
+
EnhancedSecurityMonitoringAPI, EsmEnablementAccountAPI, IpAccessListsAPI,
|
|
113
117
|
LlmProxyPartnerPoweredAccountAPI, LlmProxyPartnerPoweredEnforceAPI,
|
|
114
118
|
LlmProxyPartnerPoweredWorkspaceAPI, NetworkConnectivityAPI,
|
|
115
119
|
NetworkPoliciesAPI, NotificationDestinationsAPI, PersonalComputeAPI,
|
|
116
|
-
RestrictWorkspaceAdminsAPI, SettingsAPI,
|
|
117
|
-
WorkspaceConfAPI,
|
|
120
|
+
RestrictWorkspaceAdminsAPI, SettingsAPI, SqlResultsDownloadAPI,
|
|
121
|
+
TokenManagementAPI, TokensAPI, WorkspaceConfAPI,
|
|
122
|
+
WorkspaceNetworkConfigurationAPI)
|
|
118
123
|
from databricks.sdk.service.sharing import (ProvidersAPI,
|
|
119
124
|
RecipientActivationAPI,
|
|
120
125
|
RecipientFederationPoliciesAPI,
|
|
@@ -251,10 +256,11 @@ class WorkspaceClient:
|
|
|
251
256
|
self._credentials = pkg_catalog.CredentialsAPI(self._api_client)
|
|
252
257
|
self._credentials_manager = pkg_settings.CredentialsManagerAPI(self._api_client)
|
|
253
258
|
self._current_user = pkg_iam.CurrentUserAPI(self._api_client)
|
|
259
|
+
self._custom_llms = pkg_aibuilder.CustomLlmsAPI(self._api_client)
|
|
254
260
|
self._dashboard_widgets = pkg_sql.DashboardWidgetsAPI(self._api_client)
|
|
255
261
|
self._dashboards = pkg_sql.DashboardsAPI(self._api_client)
|
|
256
262
|
self._data_sources = pkg_sql.DataSourcesAPI(self._api_client)
|
|
257
|
-
self.
|
|
263
|
+
self._database = pkg_database.DatabaseAPI(self._api_client)
|
|
258
264
|
self._dbfs = DbfsExt(self._api_client)
|
|
259
265
|
self._dbsql_permissions = pkg_sql.DbsqlPermissionsAPI(self._api_client)
|
|
260
266
|
self._experiments = pkg_ml.ExperimentsAPI(self._api_client)
|
|
@@ -294,10 +300,10 @@ class WorkspaceClient:
|
|
|
294
300
|
)
|
|
295
301
|
self._provider_providers = pkg_marketplace.ProviderProvidersAPI(self._api_client)
|
|
296
302
|
self._providers = pkg_sharing.ProvidersAPI(self._api_client)
|
|
303
|
+
self._quality_monitor_v2 = pkg_qualitymonitorv2.QualityMonitorV2API(self._api_client)
|
|
297
304
|
self._quality_monitors = pkg_catalog.QualityMonitorsAPI(self._api_client)
|
|
298
305
|
self._queries = pkg_sql.QueriesAPI(self._api_client)
|
|
299
306
|
self._queries_legacy = pkg_sql.QueriesLegacyAPI(self._api_client)
|
|
300
|
-
self._query_execution = pkg_dashboards.QueryExecutionAPI(self._api_client)
|
|
301
307
|
self._query_history = pkg_sql.QueryHistoryAPI(self._api_client)
|
|
302
308
|
self._query_visualizations = pkg_sql.QueryVisualizationsAPI(self._api_client)
|
|
303
309
|
self._query_visualizations_legacy = pkg_sql.QueryVisualizationsLegacyAPI(self._api_client)
|
|
@@ -372,7 +378,7 @@ class WorkspaceClient:
|
|
|
372
378
|
|
|
373
379
|
@property
|
|
374
380
|
def alerts_v2(self) -> pkg_sql.AlertsV2API:
|
|
375
|
-
"""
|
|
381
|
+
"""New version of SQL Alerts."""
|
|
376
382
|
return self._alerts_v2
|
|
377
383
|
|
|
378
384
|
@property
|
|
@@ -465,6 +471,11 @@ class WorkspaceClient:
|
|
|
465
471
|
"""This API allows retrieving information about currently authenticated user or service principal."""
|
|
466
472
|
return self._current_user
|
|
467
473
|
|
|
474
|
+
@property
|
|
475
|
+
def custom_llms(self) -> pkg_aibuilder.CustomLlmsAPI:
|
|
476
|
+
"""The Custom LLMs service manages state and powers the UI for the Custom LLM product."""
|
|
477
|
+
return self._custom_llms
|
|
478
|
+
|
|
468
479
|
@property
|
|
469
480
|
def dashboard_widgets(self) -> pkg_sql.DashboardWidgetsAPI:
|
|
470
481
|
"""This is an evolving API that facilitates the addition and removal of widgets from existing dashboards within the Databricks Workspace."""
|
|
@@ -481,9 +492,9 @@ class WorkspaceClient:
|
|
|
481
492
|
return self._data_sources
|
|
482
493
|
|
|
483
494
|
@property
|
|
484
|
-
def
|
|
495
|
+
def database(self) -> pkg_database.DatabaseAPI:
|
|
485
496
|
"""Database Instances provide access to a database via REST API or direct SQL."""
|
|
486
|
-
return self.
|
|
497
|
+
return self._database
|
|
487
498
|
|
|
488
499
|
@property
|
|
489
500
|
def dbfs(self) -> DbfsExt:
|
|
@@ -670,6 +681,11 @@ class WorkspaceClient:
|
|
|
670
681
|
"""A data provider is an object representing the organization in the real world who shares the data."""
|
|
671
682
|
return self._providers
|
|
672
683
|
|
|
684
|
+
@property
|
|
685
|
+
def quality_monitor_v2(self) -> pkg_qualitymonitorv2.QualityMonitorV2API:
|
|
686
|
+
"""Manage data quality of UC objects (currently support `schema`)."""
|
|
687
|
+
return self._quality_monitor_v2
|
|
688
|
+
|
|
673
689
|
@property
|
|
674
690
|
def quality_monitors(self) -> pkg_catalog.QualityMonitorsAPI:
|
|
675
691
|
"""A monitor computes and monitors data or model quality metrics for a table over time."""
|
|
@@ -685,11 +701,6 @@ class WorkspaceClient:
|
|
|
685
701
|
"""These endpoints are used for CRUD operations on query definitions."""
|
|
686
702
|
return self._queries_legacy
|
|
687
703
|
|
|
688
|
-
@property
|
|
689
|
-
def query_execution(self) -> pkg_dashboards.QueryExecutionAPI:
|
|
690
|
-
"""Query execution APIs for AI / BI Dashboards."""
|
|
691
|
-
return self._query_execution
|
|
692
|
-
|
|
693
704
|
@property
|
|
694
705
|
def query_history(self) -> pkg_sql.QueryHistoryAPI:
|
|
695
706
|
"""A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute."""
|
|
@@ -1021,7 +1032,7 @@ class AccountClient:
|
|
|
1021
1032
|
|
|
1022
1033
|
@property
|
|
1023
1034
|
def log_delivery(self) -> pkg_billing.LogDeliveryAPI:
|
|
1024
|
-
"""These APIs manage
|
|
1035
|
+
"""These APIs manage Log delivery configurations for this account."""
|
|
1025
1036
|
return self._log_delivery
|
|
1026
1037
|
|
|
1027
1038
|
@property
|
|
@@ -1116,7 +1127,7 @@ class AccountClient:
|
|
|
1116
1127
|
|
|
1117
1128
|
@property
|
|
1118
1129
|
def workspace_network_configuration(self) -> pkg_settings.WorkspaceNetworkConfigurationAPI:
|
|
1119
|
-
"""These APIs allow configuration of network settings for Databricks workspaces."""
|
|
1130
|
+
"""These APIs allow configuration of network settings for Databricks workspaces by selecting which network policy to associate with the workspace."""
|
|
1120
1131
|
return self._workspace_network_configuration
|
|
1121
1132
|
|
|
1122
1133
|
@property
|
|
@@ -0,0 +1,364 @@
|
|
|
1
|
+
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from enum import Enum
|
|
8
|
+
from typing import Any, Dict, List, Optional
|
|
9
|
+
|
|
10
|
+
from ._internal import _enum, _from_dict, _repeated_dict
|
|
11
|
+
|
|
12
|
+
_LOG = logging.getLogger("databricks.sdk")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
# all definitions in this file are in alphabetical order
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class CancelCustomLlmOptimizationRunRequest:
|
|
20
|
+
id: Optional[str] = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class CancelResponse:
|
|
25
|
+
def as_dict(self) -> dict:
|
|
26
|
+
"""Serializes the CancelResponse into a dictionary suitable for use as a JSON request body."""
|
|
27
|
+
body = {}
|
|
28
|
+
return body
|
|
29
|
+
|
|
30
|
+
def as_shallow_dict(self) -> dict:
|
|
31
|
+
"""Serializes the CancelResponse into a shallow dictionary of its immediate attributes."""
|
|
32
|
+
body = {}
|
|
33
|
+
return body
|
|
34
|
+
|
|
35
|
+
@classmethod
|
|
36
|
+
def from_dict(cls, d: Dict[str, Any]) -> CancelResponse:
|
|
37
|
+
"""Deserializes the CancelResponse from a dictionary."""
|
|
38
|
+
return cls()
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class CustomLlm:
|
|
43
|
+
name: str
|
|
44
|
+
"""Name of the custom LLM"""
|
|
45
|
+
|
|
46
|
+
instructions: str
|
|
47
|
+
"""Instructions for the custom LLM to follow"""
|
|
48
|
+
|
|
49
|
+
optimization_state: State
|
|
50
|
+
"""If optimization is kicked off, tracks the state of the custom LLM"""
|
|
51
|
+
|
|
52
|
+
agent_artifact_path: Optional[str] = None
|
|
53
|
+
|
|
54
|
+
creation_time: Optional[str] = None
|
|
55
|
+
"""Creation timestamp of the custom LLM"""
|
|
56
|
+
|
|
57
|
+
creator: Optional[str] = None
|
|
58
|
+
"""Creator of the custom LLM"""
|
|
59
|
+
|
|
60
|
+
datasets: Optional[List[Dataset]] = None
|
|
61
|
+
"""Datasets used for training and evaluating the model, not for inference"""
|
|
62
|
+
|
|
63
|
+
endpoint_name: Optional[str] = None
|
|
64
|
+
"""Name of the endpoint that will be used to serve the custom LLM"""
|
|
65
|
+
|
|
66
|
+
guidelines: Optional[List[str]] = None
|
|
67
|
+
"""Guidelines for the custom LLM to adhere to"""
|
|
68
|
+
|
|
69
|
+
id: Optional[str] = None
|
|
70
|
+
|
|
71
|
+
def as_dict(self) -> dict:
|
|
72
|
+
"""Serializes the CustomLlm into a dictionary suitable for use as a JSON request body."""
|
|
73
|
+
body = {}
|
|
74
|
+
if self.agent_artifact_path is not None:
|
|
75
|
+
body["agent_artifact_path"] = self.agent_artifact_path
|
|
76
|
+
if self.creation_time is not None:
|
|
77
|
+
body["creation_time"] = self.creation_time
|
|
78
|
+
if self.creator is not None:
|
|
79
|
+
body["creator"] = self.creator
|
|
80
|
+
if self.datasets:
|
|
81
|
+
body["datasets"] = [v.as_dict() for v in self.datasets]
|
|
82
|
+
if self.endpoint_name is not None:
|
|
83
|
+
body["endpoint_name"] = self.endpoint_name
|
|
84
|
+
if self.guidelines:
|
|
85
|
+
body["guidelines"] = [v for v in self.guidelines]
|
|
86
|
+
if self.id is not None:
|
|
87
|
+
body["id"] = self.id
|
|
88
|
+
if self.instructions is not None:
|
|
89
|
+
body["instructions"] = self.instructions
|
|
90
|
+
if self.name is not None:
|
|
91
|
+
body["name"] = self.name
|
|
92
|
+
if self.optimization_state is not None:
|
|
93
|
+
body["optimization_state"] = self.optimization_state.value
|
|
94
|
+
return body
|
|
95
|
+
|
|
96
|
+
def as_shallow_dict(self) -> dict:
|
|
97
|
+
"""Serializes the CustomLlm into a shallow dictionary of its immediate attributes."""
|
|
98
|
+
body = {}
|
|
99
|
+
if self.agent_artifact_path is not None:
|
|
100
|
+
body["agent_artifact_path"] = self.agent_artifact_path
|
|
101
|
+
if self.creation_time is not None:
|
|
102
|
+
body["creation_time"] = self.creation_time
|
|
103
|
+
if self.creator is not None:
|
|
104
|
+
body["creator"] = self.creator
|
|
105
|
+
if self.datasets:
|
|
106
|
+
body["datasets"] = self.datasets
|
|
107
|
+
if self.endpoint_name is not None:
|
|
108
|
+
body["endpoint_name"] = self.endpoint_name
|
|
109
|
+
if self.guidelines:
|
|
110
|
+
body["guidelines"] = self.guidelines
|
|
111
|
+
if self.id is not None:
|
|
112
|
+
body["id"] = self.id
|
|
113
|
+
if self.instructions is not None:
|
|
114
|
+
body["instructions"] = self.instructions
|
|
115
|
+
if self.name is not None:
|
|
116
|
+
body["name"] = self.name
|
|
117
|
+
if self.optimization_state is not None:
|
|
118
|
+
body["optimization_state"] = self.optimization_state
|
|
119
|
+
return body
|
|
120
|
+
|
|
121
|
+
@classmethod
|
|
122
|
+
def from_dict(cls, d: Dict[str, Any]) -> CustomLlm:
|
|
123
|
+
"""Deserializes the CustomLlm from a dictionary."""
|
|
124
|
+
return cls(
|
|
125
|
+
agent_artifact_path=d.get("agent_artifact_path", None),
|
|
126
|
+
creation_time=d.get("creation_time", None),
|
|
127
|
+
creator=d.get("creator", None),
|
|
128
|
+
datasets=_repeated_dict(d, "datasets", Dataset),
|
|
129
|
+
endpoint_name=d.get("endpoint_name", None),
|
|
130
|
+
guidelines=d.get("guidelines", None),
|
|
131
|
+
id=d.get("id", None),
|
|
132
|
+
instructions=d.get("instructions", None),
|
|
133
|
+
name=d.get("name", None),
|
|
134
|
+
optimization_state=_enum(d, "optimization_state", State),
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
@dataclass
|
|
139
|
+
class Dataset:
|
|
140
|
+
table: Table
|
|
141
|
+
|
|
142
|
+
def as_dict(self) -> dict:
|
|
143
|
+
"""Serializes the Dataset into a dictionary suitable for use as a JSON request body."""
|
|
144
|
+
body = {}
|
|
145
|
+
if self.table:
|
|
146
|
+
body["table"] = self.table.as_dict()
|
|
147
|
+
return body
|
|
148
|
+
|
|
149
|
+
def as_shallow_dict(self) -> dict:
|
|
150
|
+
"""Serializes the Dataset into a shallow dictionary of its immediate attributes."""
|
|
151
|
+
body = {}
|
|
152
|
+
if self.table:
|
|
153
|
+
body["table"] = self.table
|
|
154
|
+
return body
|
|
155
|
+
|
|
156
|
+
@classmethod
|
|
157
|
+
def from_dict(cls, d: Dict[str, Any]) -> Dataset:
|
|
158
|
+
"""Deserializes the Dataset from a dictionary."""
|
|
159
|
+
return cls(table=_from_dict(d, "table", Table))
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
@dataclass
|
|
163
|
+
class StartCustomLlmOptimizationRunRequest:
|
|
164
|
+
id: Optional[str] = None
|
|
165
|
+
"""The Id of the tile."""
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
class State(Enum):
|
|
169
|
+
"""States of Custom LLM optimization lifecycle."""
|
|
170
|
+
|
|
171
|
+
CANCELLED = "CANCELLED"
|
|
172
|
+
COMPLETED = "COMPLETED"
|
|
173
|
+
CREATED = "CREATED"
|
|
174
|
+
FAILED = "FAILED"
|
|
175
|
+
PENDING = "PENDING"
|
|
176
|
+
RUNNING = "RUNNING"
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
@dataclass
|
|
180
|
+
class Table:
|
|
181
|
+
table_path: str
|
|
182
|
+
"""Full UC table path in catalog.schema.table_name format"""
|
|
183
|
+
|
|
184
|
+
request_col: str
|
|
185
|
+
"""Name of the request column"""
|
|
186
|
+
|
|
187
|
+
response_col: Optional[str] = None
|
|
188
|
+
"""Optional: Name of the response column if the data is labeled"""
|
|
189
|
+
|
|
190
|
+
def as_dict(self) -> dict:
|
|
191
|
+
"""Serializes the Table into a dictionary suitable for use as a JSON request body."""
|
|
192
|
+
body = {}
|
|
193
|
+
if self.request_col is not None:
|
|
194
|
+
body["request_col"] = self.request_col
|
|
195
|
+
if self.response_col is not None:
|
|
196
|
+
body["response_col"] = self.response_col
|
|
197
|
+
if self.table_path is not None:
|
|
198
|
+
body["table_path"] = self.table_path
|
|
199
|
+
return body
|
|
200
|
+
|
|
201
|
+
def as_shallow_dict(self) -> dict:
|
|
202
|
+
"""Serializes the Table into a shallow dictionary of its immediate attributes."""
|
|
203
|
+
body = {}
|
|
204
|
+
if self.request_col is not None:
|
|
205
|
+
body["request_col"] = self.request_col
|
|
206
|
+
if self.response_col is not None:
|
|
207
|
+
body["response_col"] = self.response_col
|
|
208
|
+
if self.table_path is not None:
|
|
209
|
+
body["table_path"] = self.table_path
|
|
210
|
+
return body
|
|
211
|
+
|
|
212
|
+
@classmethod
|
|
213
|
+
def from_dict(cls, d: Dict[str, Any]) -> Table:
|
|
214
|
+
"""Deserializes the Table from a dictionary."""
|
|
215
|
+
return cls(
|
|
216
|
+
request_col=d.get("request_col", None),
|
|
217
|
+
response_col=d.get("response_col", None),
|
|
218
|
+
table_path=d.get("table_path", None),
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
@dataclass
|
|
223
|
+
class UpdateCustomLlmRequest:
|
|
224
|
+
custom_llm: CustomLlm
|
|
225
|
+
"""The CustomLlm containing the fields which should be updated."""
|
|
226
|
+
|
|
227
|
+
update_mask: str
|
|
228
|
+
"""The list of the CustomLlm fields to update. These should correspond to the values (or lack
|
|
229
|
+
thereof) present in `custom_llm`.
|
|
230
|
+
|
|
231
|
+
The field mask must be a single string, with multiple fields separated by commas (no spaces).
|
|
232
|
+
The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
|
|
233
|
+
(e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
|
|
234
|
+
as only the entire collection field can be specified. Field names must exactly match the
|
|
235
|
+
resource field names.
|
|
236
|
+
|
|
237
|
+
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
|
|
238
|
+
fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
|
|
239
|
+
API changes in the future."""
|
|
240
|
+
|
|
241
|
+
id: Optional[str] = None
|
|
242
|
+
"""The id of the custom llm"""
|
|
243
|
+
|
|
244
|
+
def as_dict(self) -> dict:
|
|
245
|
+
"""Serializes the UpdateCustomLlmRequest into a dictionary suitable for use as a JSON request body."""
|
|
246
|
+
body = {}
|
|
247
|
+
if self.custom_llm:
|
|
248
|
+
body["custom_llm"] = self.custom_llm.as_dict()
|
|
249
|
+
if self.id is not None:
|
|
250
|
+
body["id"] = self.id
|
|
251
|
+
if self.update_mask is not None:
|
|
252
|
+
body["update_mask"] = self.update_mask
|
|
253
|
+
return body
|
|
254
|
+
|
|
255
|
+
def as_shallow_dict(self) -> dict:
|
|
256
|
+
"""Serializes the UpdateCustomLlmRequest into a shallow dictionary of its immediate attributes."""
|
|
257
|
+
body = {}
|
|
258
|
+
if self.custom_llm:
|
|
259
|
+
body["custom_llm"] = self.custom_llm
|
|
260
|
+
if self.id is not None:
|
|
261
|
+
body["id"] = self.id
|
|
262
|
+
if self.update_mask is not None:
|
|
263
|
+
body["update_mask"] = self.update_mask
|
|
264
|
+
return body
|
|
265
|
+
|
|
266
|
+
@classmethod
|
|
267
|
+
def from_dict(cls, d: Dict[str, Any]) -> UpdateCustomLlmRequest:
|
|
268
|
+
"""Deserializes the UpdateCustomLlmRequest from a dictionary."""
|
|
269
|
+
return cls(
|
|
270
|
+
custom_llm=_from_dict(d, "custom_llm", CustomLlm),
|
|
271
|
+
id=d.get("id", None),
|
|
272
|
+
update_mask=d.get("update_mask", None),
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
class CustomLlmsAPI:
|
|
277
|
+
"""The Custom LLMs service manages state and powers the UI for the Custom LLM product."""
|
|
278
|
+
|
|
279
|
+
def __init__(self, api_client):
|
|
280
|
+
self._api = api_client
|
|
281
|
+
|
|
282
|
+
def cancel(self, id: str):
|
|
283
|
+
"""Cancel a Custom LLM Optimization Run.
|
|
284
|
+
|
|
285
|
+
:param id: str
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
"""
|
|
289
|
+
|
|
290
|
+
headers = {
|
|
291
|
+
"Accept": "application/json",
|
|
292
|
+
"Content-Type": "application/json",
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
self._api.do("POST", f"/api/2.0/custom-llms/{id}/optimize/cancel", headers=headers)
|
|
296
|
+
|
|
297
|
+
def create(self, id: str) -> CustomLlm:
|
|
298
|
+
"""Start a Custom LLM Optimization Run.
|
|
299
|
+
|
|
300
|
+
:param id: str
|
|
301
|
+
The Id of the tile.
|
|
302
|
+
|
|
303
|
+
:returns: :class:`CustomLlm`
|
|
304
|
+
"""
|
|
305
|
+
|
|
306
|
+
headers = {
|
|
307
|
+
"Accept": "application/json",
|
|
308
|
+
"Content-Type": "application/json",
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
res = self._api.do("POST", f"/api/2.0/custom-llms/{id}/optimize", headers=headers)
|
|
312
|
+
return CustomLlm.from_dict(res)
|
|
313
|
+
|
|
314
|
+
def get(self, id: str) -> CustomLlm:
|
|
315
|
+
"""Get a Custom LLM.
|
|
316
|
+
|
|
317
|
+
:param id: str
|
|
318
|
+
The id of the custom llm
|
|
319
|
+
|
|
320
|
+
:returns: :class:`CustomLlm`
|
|
321
|
+
"""
|
|
322
|
+
|
|
323
|
+
headers = {
|
|
324
|
+
"Accept": "application/json",
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
res = self._api.do("GET", f"/api/2.0/custom-llms/{id}", headers=headers)
|
|
328
|
+
return CustomLlm.from_dict(res)
|
|
329
|
+
|
|
330
|
+
def update(self, id: str, custom_llm: CustomLlm, update_mask: str) -> CustomLlm:
|
|
331
|
+
"""Update a Custom LLM.
|
|
332
|
+
|
|
333
|
+
:param id: str
|
|
334
|
+
The id of the custom llm
|
|
335
|
+
:param custom_llm: :class:`CustomLlm`
|
|
336
|
+
The CustomLlm containing the fields which should be updated.
|
|
337
|
+
:param update_mask: str
|
|
338
|
+
The list of the CustomLlm fields to update. These should correspond to the values (or lack thereof)
|
|
339
|
+
present in `custom_llm`.
|
|
340
|
+
|
|
341
|
+
The field mask must be a single string, with multiple fields separated by commas (no spaces). The
|
|
342
|
+
field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
|
|
343
|
+
`author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
|
|
344
|
+
the entire collection field can be specified. Field names must exactly match the resource field
|
|
345
|
+
names.
|
|
346
|
+
|
|
347
|
+
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
|
|
348
|
+
fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
|
|
349
|
+
changes in the future.
|
|
350
|
+
|
|
351
|
+
:returns: :class:`CustomLlm`
|
|
352
|
+
"""
|
|
353
|
+
body = {}
|
|
354
|
+
if custom_llm is not None:
|
|
355
|
+
body["custom_llm"] = custom_llm.as_dict()
|
|
356
|
+
if update_mask is not None:
|
|
357
|
+
body["update_mask"] = update_mask
|
|
358
|
+
headers = {
|
|
359
|
+
"Accept": "application/json",
|
|
360
|
+
"Content-Type": "application/json",
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
res = self._api.do("PATCH", f"/api/2.0/custom-llms/{id}", body=body, headers=headers)
|
|
364
|
+
return CustomLlm.from_dict(res)
|