databricks-sdk 0.57.0__py3-none-any.whl → 0.58.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (30) hide show
  1. databricks/sdk/__init__.py +25 -4
  2. databricks/sdk/service/aibuilder.py +0 -36
  3. databricks/sdk/service/apps.py +1 -3
  4. databricks/sdk/service/billing.py +53 -23
  5. databricks/sdk/service/catalog.py +1692 -150
  6. databricks/sdk/service/cleanrooms.py +3 -22
  7. databricks/sdk/service/compute.py +245 -322
  8. databricks/sdk/service/dashboards.py +129 -162
  9. databricks/sdk/service/database.py +612 -97
  10. databricks/sdk/service/iam.py +3 -3
  11. databricks/sdk/service/jobs.py +6 -129
  12. databricks/sdk/service/marketplace.py +3 -2
  13. databricks/sdk/service/ml.py +713 -262
  14. databricks/sdk/service/oauth2.py +0 -1
  15. databricks/sdk/service/pipelines.py +12 -29
  16. databricks/sdk/service/provisioning.py +7 -125
  17. databricks/sdk/service/qualitymonitorv2.py +0 -18
  18. databricks/sdk/service/serving.py +39 -13
  19. databricks/sdk/service/settings.py +11 -128
  20. databricks/sdk/service/sharing.py +3 -9
  21. databricks/sdk/service/sql.py +94 -74
  22. databricks/sdk/service/vectorsearch.py +0 -19
  23. databricks/sdk/service/workspace.py +2 -6
  24. databricks/sdk/version.py +1 -1
  25. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/METADATA +1 -1
  26. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/RECORD +30 -30
  27. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/WHEEL +0 -0
  28. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/licenses/LICENSE +0 -0
  29. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/licenses/NOTICE +0 -0
  30. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/top_level.txt +0 -0
@@ -46,7 +46,9 @@ from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
46
46
  AccountStorageCredentialsAPI,
47
47
  ArtifactAllowlistsAPI, CatalogsAPI,
48
48
  ConnectionsAPI, CredentialsAPI,
49
- ExternalLocationsAPI, FunctionsAPI,
49
+ ExternalLineageAPI,
50
+ ExternalLocationsAPI,
51
+ ExternalMetadataAPI, FunctionsAPI,
50
52
  GrantsAPI, MetastoresAPI,
51
53
  ModelVersionsAPI, OnlineTablesAPI,
52
54
  QualityMonitorsAPI,
@@ -88,7 +90,8 @@ from databricks.sdk.service.marketplace import (
88
90
  ProviderListingsAPI, ProviderPersonalizationRequestsAPI,
89
91
  ProviderProviderAnalyticsDashboardsAPI, ProviderProvidersAPI)
90
92
  from databricks.sdk.service.ml import (ExperimentsAPI, FeatureStoreAPI,
91
- ForecastingAPI, ModelRegistryAPI)
93
+ ForecastingAPI, MaterializedFeaturesAPI,
94
+ ModelRegistryAPI)
92
95
  from databricks.sdk.service.oauth2 import (AccountFederationPolicyAPI,
93
96
  CustomAppIntegrationAPI,
94
97
  OAuthPublishedAppsAPI,
@@ -264,7 +267,9 @@ class WorkspaceClient:
264
267
  self._dbfs = DbfsExt(self._api_client)
265
268
  self._dbsql_permissions = pkg_sql.DbsqlPermissionsAPI(self._api_client)
266
269
  self._experiments = pkg_ml.ExperimentsAPI(self._api_client)
270
+ self._external_lineage = pkg_catalog.ExternalLineageAPI(self._api_client)
267
271
  self._external_locations = pkg_catalog.ExternalLocationsAPI(self._api_client)
272
+ self._external_metadata = pkg_catalog.ExternalMetadataAPI(self._api_client)
268
273
  self._feature_store = pkg_ml.FeatureStoreAPI(self._api_client)
269
274
  self._files = _make_files_client(self._api_client, self._config)
270
275
  self._functions = pkg_catalog.FunctionsAPI(self._api_client)
@@ -280,6 +285,7 @@ class WorkspaceClient:
280
285
  self._lakeview = pkg_dashboards.LakeviewAPI(self._api_client)
281
286
  self._lakeview_embedded = pkg_dashboards.LakeviewEmbeddedAPI(self._api_client)
282
287
  self._libraries = pkg_compute.LibrariesAPI(self._api_client)
288
+ self._materialized_features = pkg_ml.MaterializedFeaturesAPI(self._api_client)
283
289
  self._metastores = pkg_catalog.MetastoresAPI(self._api_client)
284
290
  self._model_registry = pkg_ml.ModelRegistryAPI(self._api_client)
285
291
  self._model_versions = pkg_catalog.ModelVersionsAPI(self._api_client)
@@ -414,7 +420,7 @@ class WorkspaceClient:
414
420
 
415
421
  @property
416
422
  def clean_rooms(self) -> pkg_cleanrooms.CleanRoomsAPI:
417
- """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each others data."""
423
+ """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other's data."""
418
424
  return self._clean_rooms
419
425
 
420
426
  @property
@@ -512,11 +518,21 @@ class WorkspaceClient:
512
518
  """Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment."""
513
519
  return self._experiments
514
520
 
521
+ @property
522
+ def external_lineage(self) -> pkg_catalog.ExternalLineageAPI:
523
+ """External Lineage APIs enable defining and managing lineage relationships between Databricks objects and external systems."""
524
+ return self._external_lineage
525
+
515
526
  @property
516
527
  def external_locations(self) -> pkg_catalog.ExternalLocationsAPI:
517
528
  """An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path."""
518
529
  return self._external_locations
519
530
 
531
+ @property
532
+ def external_metadata(self) -> pkg_catalog.ExternalMetadataAPI:
533
+ """External Metadata objects enable customers to register and manage metadata about external systems within Unity Catalog."""
534
+ return self._external_metadata
535
+
520
536
  @property
521
537
  def feature_store(self) -> pkg_ml.FeatureStoreAPI:
522
538
  """A feature store is a centralized repository that enables data scientists to find and share features."""
@@ -592,6 +608,11 @@ class WorkspaceClient:
592
608
  """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster."""
593
609
  return self._libraries
594
610
 
611
+ @property
612
+ def materialized_features(self) -> pkg_ml.MaterializedFeaturesAPI:
613
+ """Materialized Features are columns in tables and views that can be directly used as features to train and serve ML models."""
614
+ return self._materialized_features
615
+
595
616
  @property
596
617
  def metastores(self) -> pkg_catalog.MetastoresAPI:
597
618
  """A metastore is the top-level container of objects in Unity Catalog."""
@@ -1038,7 +1059,7 @@ class AccountClient:
1038
1059
 
1039
1060
  @property
1040
1061
  def log_delivery(self) -> pkg_billing.LogDeliveryAPI:
1041
- """These APIs manage Log delivery configurations for this account."""
1062
+ """These APIs manage log delivery configurations for this account."""
1042
1063
  return self._log_delivery
1043
1064
 
1044
1065
  @property
@@ -20,24 +20,6 @@ class CancelCustomLlmOptimizationRunRequest:
20
20
  id: Optional[str] = None
21
21
 
22
22
 
23
- @dataclass
24
- class CancelOptimizeResponse:
25
- def as_dict(self) -> dict:
26
- """Serializes the CancelOptimizeResponse into a dictionary suitable for use as a JSON request body."""
27
- body = {}
28
- return body
29
-
30
- def as_shallow_dict(self) -> dict:
31
- """Serializes the CancelOptimizeResponse into a shallow dictionary of its immediate attributes."""
32
- body = {}
33
- return body
34
-
35
- @classmethod
36
- def from_dict(cls, d: Dict[str, Any]) -> CancelOptimizeResponse:
37
- """Deserializes the CancelOptimizeResponse from a dictionary."""
38
- return cls()
39
-
40
-
41
23
  @dataclass
42
24
  class CreateCustomLlmRequest:
43
25
  name: str
@@ -221,24 +203,6 @@ class Dataset:
221
203
  return cls(table=_from_dict(d, "table", Table))
222
204
 
223
205
 
224
- @dataclass
225
- class DeleteCustomLlmResponse:
226
- def as_dict(self) -> dict:
227
- """Serializes the DeleteCustomLlmResponse into a dictionary suitable for use as a JSON request body."""
228
- body = {}
229
- return body
230
-
231
- def as_shallow_dict(self) -> dict:
232
- """Serializes the DeleteCustomLlmResponse into a shallow dictionary of its immediate attributes."""
233
- body = {}
234
- return body
235
-
236
- @classmethod
237
- def from_dict(cls, d: Dict[str, Any]) -> DeleteCustomLlmResponse:
238
- """Deserializes the DeleteCustomLlmResponse from a dictionary."""
239
- return cls()
240
-
241
-
242
206
  @dataclass
243
207
  class StartCustomLlmOptimizationRunRequest:
244
208
  id: Optional[str] = None
@@ -222,7 +222,6 @@ class AppAccessControlRequest:
222
222
  """name of the group"""
223
223
 
224
224
  permission_level: Optional[AppPermissionLevel] = None
225
- """Permission level"""
226
225
 
227
226
  service_principal_name: Optional[str] = None
228
227
  """application ID of a service principal"""
@@ -491,7 +490,6 @@ class AppPermission:
491
490
  inherited_from_object: Optional[List[str]] = None
492
491
 
493
492
  permission_level: Optional[AppPermissionLevel] = None
494
- """Permission level"""
495
493
 
496
494
  def as_dict(self) -> dict:
497
495
  """Serializes the AppPermission into a dictionary suitable for use as a JSON request body."""
@@ -577,7 +575,6 @@ class AppPermissionsDescription:
577
575
  description: Optional[str] = None
578
576
 
579
577
  permission_level: Optional[AppPermissionLevel] = None
580
- """Permission level"""
581
578
 
582
579
  def as_dict(self) -> dict:
583
580
  """Serializes the AppPermissionsDescription into a dictionary suitable for use as a JSON request body."""
@@ -1279,6 +1276,7 @@ class AppsAPI:
1279
1276
  :param app_name: str
1280
1277
  The name of the app.
1281
1278
  :param app_deployment: :class:`AppDeployment`
1279
+ The app deployment configuration.
1282
1280
 
1283
1281
  :returns:
1284
1282
  Long-running operation waiter for :class:`AppDeployment`.
@@ -863,24 +863,6 @@ class DeleteBudgetConfigurationResponse:
863
863
  return cls()
864
864
 
865
865
 
866
- @dataclass
867
- class DeleteResponse:
868
- def as_dict(self) -> dict:
869
- """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body."""
870
- body = {}
871
- return body
872
-
873
- def as_shallow_dict(self) -> dict:
874
- """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
875
- body = {}
876
- return body
877
-
878
- @classmethod
879
- def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse:
880
- """Deserializes the DeleteResponse from a dictionary."""
881
- return cls()
882
-
883
-
884
866
  class DeliveryStatus(Enum):
885
867
  """* The status string for log delivery. Possible values are: `CREATED`: There were no log delivery
886
868
  attempts since the config was created. `SUCCEEDED`: The latest attempt of log delivery has
@@ -1626,7 +1608,6 @@ class WrappedCreateLogDeliveryConfiguration:
1626
1608
  """* Properties of the new log delivery configuration."""
1627
1609
 
1628
1610
  log_delivery_configuration: CreateLogDeliveryConfigurationParams
1629
- """* Log Delivery Configuration"""
1630
1611
 
1631
1612
  def as_dict(self) -> dict:
1632
1613
  """Serializes the WrappedCreateLogDeliveryConfiguration into a dictionary suitable for use as a JSON request body."""
@@ -1883,7 +1864,8 @@ class BudgetPolicyAPI:
1883
1864
  :param policy_id: str
1884
1865
  The Id of the policy. This field is generated by Databricks and globally unique.
1885
1866
  :param policy: :class:`BudgetPolicy`
1886
- Contains the BudgetPolicy details.
1867
+ The policy to update. `creator_user_id` cannot be specified in the request. All other fields must be
1868
+ specified even if not changed. The `policy_id` is used to identify the policy to update.
1887
1869
  :param limit_config: :class:`LimitConfig` (optional)
1888
1870
  DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy
1889
1871
 
@@ -2022,8 +2004,57 @@ class BudgetsAPI:
2022
2004
 
2023
2005
 
2024
2006
  class LogDeliveryAPI:
2025
- """These APIs manage Log delivery configurations for this account. Log delivery configs enable you to
2026
- configure the delivery of the specified type of logs to your storage account."""
2007
+ """These APIs manage log delivery configurations for this account. The two supported log types for this API
2008
+ are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This feature works with all
2009
+ account ID types.
2010
+
2011
+ Log delivery works with all account types. However, if your account is on the E2 version of the platform
2012
+ or on a select custom plan that allows multiple workspaces per account, you can optionally configure
2013
+ different storage destinations for each workspace. Log delivery status is also provided to know the latest
2014
+ status of log delivery attempts.
2015
+
2016
+ The high-level flow of billable usage delivery:
2017
+
2018
+ 1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. Using
2019
+ Databricks APIs, call the Account API to create a [storage configuration object](:method:Storage/Create)
2020
+ that uses the bucket name.
2021
+
2022
+ 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For full details, including the
2023
+ required IAM role policies and trust relationship, see [Billable usage log delivery]. Using Databricks
2024
+ APIs, call the Account API to create a [credential configuration object](:method:Credentials/Create) that
2025
+ uses the IAM role's ARN.
2026
+
2027
+ 3. **Create log delivery configuration**: Using Databricks APIs, call the Account API to [create a log
2028
+ delivery configuration](:method:LogDelivery/Create) that uses the credential and storage configuration
2029
+ objects from previous steps. You can specify if the logs should include all events of that log type in
2030
+ your account (_Account level_ delivery) or only events for a specific set of workspaces (_workspace level_
2031
+ delivery). Account level log delivery applies to all current and future workspaces plus account level
2032
+ logs, while workspace level log delivery solely delivers logs related to the specified workspaces. You can
2033
+ create multiple types of delivery configurations per account.
2034
+
2035
+ For billable usage delivery: * For more information about billable usage logs, see [Billable usage log
2036
+ delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
2037
+ `<bucket-name>/<prefix>/billable-usage/csv/`, where `<prefix>` is the name of the optional delivery path
2038
+ prefix you set up during log delivery configuration. Files are named
2039
+ `workspaceId=<workspace-id>-usageMonth=<month>.csv`. * All billable usage logs apply to specific
2040
+ workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating an
2041
+ _account level_ delivery configuration that delivers logs for all current and future workspaces in your
2042
+ account. * The files are delivered daily by overwriting the month's CSV file for each workspace.
2043
+
2044
+ For audit log delivery: * For more information about about audit log delivery, see [Audit log delivery],
2045
+ which includes information about the used JSON schema. * The delivery location is
2046
+ `<bucket-name>/<delivery-path-prefix>/workspaceId=<workspaceId>/date=<yyyy-mm-dd>/auditlogs_<internal-id>.json`.
2047
+ Files may get overwritten with the same content multiple times to achieve exactly-once delivery. * If the
2048
+ audit log delivery configuration included specific workspace IDs, only _workspace-level_ audit logs for
2049
+ those workspaces are delivered. If the log delivery configuration applies to the entire account (_account
2050
+ level_ delivery configuration), the audit log delivery includes workspace-level audit logs for all
2051
+ workspaces in the account as well as account-level audit logs. See [Audit log delivery] for details. *
2052
+ Auditable events are typically available in logs within 15 minutes.
2053
+
2054
+ [Audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
2055
+ [Billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
2056
+ [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
2057
+ [create a new AWS S3 bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html"""
2027
2058
 
2028
2059
  def __init__(self, api_client):
2029
2060
  self._api = api_client
@@ -2053,7 +2084,6 @@ class LogDeliveryAPI:
2053
2084
  [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
2054
2085
 
2055
2086
  :param log_delivery_configuration: :class:`CreateLogDeliveryConfigurationParams`
2056
- * Log Delivery Configuration
2057
2087
 
2058
2088
  :returns: :class:`WrappedLogDeliveryConfiguration`
2059
2089
  """