databricks-sdk 0.59.0__py3-none-any.whl → 0.61.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1034,6 +1034,7 @@ class MessageErrorType(Enum):
1034
1034
  INVALID_SQL_UNKNOWN_TABLE_EXCEPTION = "INVALID_SQL_UNKNOWN_TABLE_EXCEPTION"
1035
1035
  INVALID_TABLE_IDENTIFIER_EXCEPTION = "INVALID_TABLE_IDENTIFIER_EXCEPTION"
1036
1036
  LOCAL_CONTEXT_EXCEEDED_EXCEPTION = "LOCAL_CONTEXT_EXCEEDED_EXCEPTION"
1037
+ MESSAGE_ATTACHMENT_TOO_LONG_ERROR = "MESSAGE_ATTACHMENT_TOO_LONG_ERROR"
1037
1038
  MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION = "MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION"
1038
1039
  MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = "MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION"
1039
1040
  MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = "MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION"
@@ -3,11 +3,14 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import logging
6
+ import random
7
+ import time
6
8
  from dataclasses import dataclass
9
+ from datetime import timedelta
7
10
  from enum import Enum
8
- from typing import Any, Dict, Iterator, List, Optional
11
+ from typing import Any, Callable, Dict, Iterator, List, Optional
9
12
 
10
- from ._internal import _enum, _from_dict, _repeated_dict
13
+ from ._internal import Wait, _enum, _from_dict, _repeated_dict
11
14
 
12
15
  _LOG = logging.getLogger("databricks.sdk")
13
16
 
@@ -707,6 +710,13 @@ class ProvisioningInfoState(Enum):
707
710
  UPDATING = "UPDATING"
708
711
 
709
712
 
713
+ class ProvisioningPhase(Enum):
714
+
715
+ PROVISIONING_PHASE_INDEX_SCAN = "PROVISIONING_PHASE_INDEX_SCAN"
716
+ PROVISIONING_PHASE_INDEX_SORT = "PROVISIONING_PHASE_INDEX_SORT"
717
+ PROVISIONING_PHASE_MAIN = "PROVISIONING_PHASE_MAIN"
718
+
719
+
710
720
  @dataclass
711
721
  class RequestedClaims:
712
722
  permission_set: Optional[RequestedClaimsPermissionSet] = None
@@ -960,6 +970,9 @@ class SyncedTablePipelineProgress:
960
970
  """The source table Delta version that was last processed by the pipeline. The pipeline may not
961
971
  have completely processed this version yet."""
962
972
 
973
+ provisioning_phase: Optional[ProvisioningPhase] = None
974
+ """The current phase of the data synchronization pipeline."""
975
+
963
976
  sync_progress_completion: Optional[float] = None
964
977
  """The completion ratio of this update. This is a number between 0 and 1."""
965
978
 
@@ -976,6 +989,8 @@ class SyncedTablePipelineProgress:
976
989
  body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds
977
990
  if self.latest_version_currently_processing is not None:
978
991
  body["latest_version_currently_processing"] = self.latest_version_currently_processing
992
+ if self.provisioning_phase is not None:
993
+ body["provisioning_phase"] = self.provisioning_phase.value
979
994
  if self.sync_progress_completion is not None:
980
995
  body["sync_progress_completion"] = self.sync_progress_completion
981
996
  if self.synced_row_count is not None:
@@ -991,6 +1006,8 @@ class SyncedTablePipelineProgress:
991
1006
  body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds
992
1007
  if self.latest_version_currently_processing is not None:
993
1008
  body["latest_version_currently_processing"] = self.latest_version_currently_processing
1009
+ if self.provisioning_phase is not None:
1010
+ body["provisioning_phase"] = self.provisioning_phase
994
1011
  if self.sync_progress_completion is not None:
995
1012
  body["sync_progress_completion"] = self.sync_progress_completion
996
1013
  if self.synced_row_count is not None:
@@ -1005,6 +1022,7 @@ class SyncedTablePipelineProgress:
1005
1022
  return cls(
1006
1023
  estimated_completion_time_seconds=d.get("estimated_completion_time_seconds", None),
1007
1024
  latest_version_currently_processing=d.get("latest_version_currently_processing", None),
1025
+ provisioning_phase=_enum(d, "provisioning_phase", ProvisioningPhase),
1008
1026
  sync_progress_completion=d.get("sync_progress_completion", None),
1009
1027
  synced_row_count=d.get("synced_row_count", None),
1010
1028
  total_row_count=d.get("total_row_count", None),
@@ -1343,6 +1361,31 @@ class DatabaseAPI:
1343
1361
  def __init__(self, api_client):
1344
1362
  self._api = api_client
1345
1363
 
1364
+ def wait_get_database_instance_database_available(
1365
+ self, name: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[DatabaseInstance], None]] = None
1366
+ ) -> DatabaseInstance:
1367
+ deadline = time.time() + timeout.total_seconds()
1368
+ target_states = (DatabaseInstanceState.AVAILABLE,)
1369
+ status_message = "polling..."
1370
+ attempt = 1
1371
+ while time.time() < deadline:
1372
+ poll = self.get_database_instance(name=name)
1373
+ status = poll.state
1374
+ status_message = f"current status: {status}"
1375
+ if status in target_states:
1376
+ return poll
1377
+ if callback:
1378
+ callback(poll)
1379
+ prefix = f"name={name}"
1380
+ sleep = attempt
1381
+ if sleep > 10:
1382
+ # sleep 10s max per attempt
1383
+ sleep = 10
1384
+ _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
1385
+ time.sleep(sleep + random.random())
1386
+ attempt += 1
1387
+ raise TimeoutError(f"timed out after {timeout}: {status_message}")
1388
+
1346
1389
  def create_database_catalog(self, catalog: DatabaseCatalog) -> DatabaseCatalog:
1347
1390
  """Create a Database Catalog.
1348
1391
 
@@ -1359,13 +1402,15 @@ class DatabaseAPI:
1359
1402
  res = self._api.do("POST", "/api/2.0/database/catalogs", body=body, headers=headers)
1360
1403
  return DatabaseCatalog.from_dict(res)
1361
1404
 
1362
- def create_database_instance(self, database_instance: DatabaseInstance) -> DatabaseInstance:
1405
+ def create_database_instance(self, database_instance: DatabaseInstance) -> Wait[DatabaseInstance]:
1363
1406
  """Create a Database Instance.
1364
1407
 
1365
1408
  :param database_instance: :class:`DatabaseInstance`
1366
1409
  Instance to create.
1367
1410
 
1368
- :returns: :class:`DatabaseInstance`
1411
+ :returns:
1412
+ Long-running operation waiter for :class:`DatabaseInstance`.
1413
+ See :method:wait_get_database_instance_database_available for more details.
1369
1414
  """
1370
1415
  body = database_instance.as_dict()
1371
1416
  headers = {
@@ -1373,8 +1418,17 @@ class DatabaseAPI:
1373
1418
  "Content-Type": "application/json",
1374
1419
  }
1375
1420
 
1376
- res = self._api.do("POST", "/api/2.0/database/instances", body=body, headers=headers)
1377
- return DatabaseInstance.from_dict(res)
1421
+ op_response = self._api.do("POST", "/api/2.0/database/instances", body=body, headers=headers)
1422
+ return Wait(
1423
+ self.wait_get_database_instance_database_available,
1424
+ response=DatabaseInstance.from_dict(op_response),
1425
+ name=op_response["name"],
1426
+ )
1427
+
1428
+ def create_database_instance_and_wait(
1429
+ self, database_instance: DatabaseInstance, timeout=timedelta(minutes=20)
1430
+ ) -> DatabaseInstance:
1431
+ return self.create_database_instance(database_instance=database_instance).result(timeout=timeout)
1378
1432
 
1379
1433
  def create_database_instance_role(
1380
1434
  self, instance_name: str, database_instance_role: DatabaseInstanceRole
@@ -1735,7 +1789,7 @@ class DatabaseAPI:
1735
1789
  The name of the instance. This is the unique identifier for the instance.
1736
1790
  :param database_instance: :class:`DatabaseInstance`
1737
1791
  :param update_mask: str
1738
- The list of fields to update.
1792
+ The list of fields to update. This field is not yet supported, and is ignored by the server.
1739
1793
 
1740
1794
  :returns: :class:`DatabaseInstance`
1741
1795
  """
@@ -734,9 +734,11 @@ class IngestionSourceType(Enum):
734
734
  NETSUITE = "NETSUITE"
735
735
  ORACLE = "ORACLE"
736
736
  POSTGRESQL = "POSTGRESQL"
737
+ REDSHIFT = "REDSHIFT"
737
738
  SALESFORCE = "SALESFORCE"
738
739
  SERVICENOW = "SERVICENOW"
739
740
  SHAREPOINT = "SHAREPOINT"
741
+ SQLDW = "SQLDW"
740
742
  SQLSERVER = "SQLSERVER"
741
743
  TERADATA = "TERADATA"
742
744
  WORKDAY_RAAS = "WORKDAY_RAAS"
@@ -205,6 +205,7 @@ class AiGatewayGuardrailPiiBehavior:
205
205
  class AiGatewayGuardrailPiiBehaviorBehavior(Enum):
206
206
 
207
207
  BLOCK = "BLOCK"
208
+ MASK = "MASK"
208
209
  NONE = "NONE"
209
210
 
210
211
 
@@ -667,6 +667,7 @@ class ComplianceStandard(Enum):
667
667
  FEDRAMP_HIGH = "FEDRAMP_HIGH"
668
668
  FEDRAMP_IL5 = "FEDRAMP_IL5"
669
669
  FEDRAMP_MODERATE = "FEDRAMP_MODERATE"
670
+ GERMANY_C5 = "GERMANY_C5"
670
671
  HIPAA = "HIPAA"
671
672
  HITRUST = "HITRUST"
672
673
  IRAP_PROTECTED = "IRAP_PROTECTED"
@@ -727,6 +727,9 @@ class ListProviderShareAssetsResponse:
727
727
  notebooks: Optional[List[NotebookFile]] = None
728
728
  """The list of notebooks in the share."""
729
729
 
730
+ share: Optional[Share] = None
731
+ """The metadata of the share."""
732
+
730
733
  tables: Optional[List[Table]] = None
731
734
  """The list of tables in the share."""
732
735
 
@@ -740,6 +743,8 @@ class ListProviderShareAssetsResponse:
740
743
  body["functions"] = [v.as_dict() for v in self.functions]
741
744
  if self.notebooks:
742
745
  body["notebooks"] = [v.as_dict() for v in self.notebooks]
746
+ if self.share:
747
+ body["share"] = self.share.as_dict()
743
748
  if self.tables:
744
749
  body["tables"] = [v.as_dict() for v in self.tables]
745
750
  if self.volumes:
@@ -753,6 +758,8 @@ class ListProviderShareAssetsResponse:
753
758
  body["functions"] = self.functions
754
759
  if self.notebooks:
755
760
  body["notebooks"] = self.notebooks
761
+ if self.share:
762
+ body["share"] = self.share
756
763
  if self.tables:
757
764
  body["tables"] = self.tables
758
765
  if self.volumes:
@@ -765,6 +772,7 @@ class ListProviderShareAssetsResponse:
765
772
  return cls(
766
773
  functions=_repeated_dict(d, "functions", DeltaSharingFunction),
767
774
  notebooks=_repeated_dict(d, "notebooks", NotebookFile),
775
+ share=_from_dict(d, "share", Share),
768
776
  tables=_repeated_dict(d, "tables", Table),
769
777
  volumes=_repeated_dict(d, "volumes", Volume),
770
778
  )
@@ -1817,6 +1825,63 @@ class SecurablePropertiesKvPairs:
1817
1825
  return cls(properties=d.get("properties", None))
1818
1826
 
1819
1827
 
1828
+ @dataclass
1829
+ class Share:
1830
+ comment: Optional[str] = None
1831
+ """The comment of the share."""
1832
+
1833
+ display_name: Optional[str] = None
1834
+ """The display name of the share. If defined, it will be shown in the UI."""
1835
+
1836
+ id: Optional[str] = None
1837
+
1838
+ name: Optional[str] = None
1839
+
1840
+ tags: Optional[List[catalog.TagKeyValue]] = None
1841
+ """The tags of the share."""
1842
+
1843
+ def as_dict(self) -> dict:
1844
+ """Serializes the Share into a dictionary suitable for use as a JSON request body."""
1845
+ body = {}
1846
+ if self.comment is not None:
1847
+ body["comment"] = self.comment
1848
+ if self.display_name is not None:
1849
+ body["display_name"] = self.display_name
1850
+ if self.id is not None:
1851
+ body["id"] = self.id
1852
+ if self.name is not None:
1853
+ body["name"] = self.name
1854
+ if self.tags:
1855
+ body["tags"] = [v.as_dict() for v in self.tags]
1856
+ return body
1857
+
1858
+ def as_shallow_dict(self) -> dict:
1859
+ """Serializes the Share into a shallow dictionary of its immediate attributes."""
1860
+ body = {}
1861
+ if self.comment is not None:
1862
+ body["comment"] = self.comment
1863
+ if self.display_name is not None:
1864
+ body["display_name"] = self.display_name
1865
+ if self.id is not None:
1866
+ body["id"] = self.id
1867
+ if self.name is not None:
1868
+ body["name"] = self.name
1869
+ if self.tags:
1870
+ body["tags"] = self.tags
1871
+ return body
1872
+
1873
+ @classmethod
1874
+ def from_dict(cls, d: Dict[str, Any]) -> Share:
1875
+ """Deserializes the Share from a dictionary."""
1876
+ return cls(
1877
+ comment=d.get("comment", None),
1878
+ display_name=d.get("display_name", None),
1879
+ id=d.get("id", None),
1880
+ name=d.get("name", None),
1881
+ tags=_repeated_dict(d, "tags", catalog.TagKeyValue),
1882
+ )
1883
+
1884
+
1820
1885
  @dataclass
1821
1886
  class ShareInfo:
1822
1887
  comment: Optional[str] = None
@@ -2121,7 +2186,8 @@ class SharedDataObjectUpdate:
2121
2186
  """One of: **ADD**, **REMOVE**, **UPDATE**."""
2122
2187
 
2123
2188
  data_object: Optional[SharedDataObject] = None
2124
- """The data object that is being added, removed, or updated."""
2189
+ """The data object that is being added, removed, or updated. The maximum number update data objects
2190
+ allowed is a 100."""
2125
2191
 
2126
2192
  def as_dict(self) -> dict:
2127
2193
  """Serializes the SharedDataObjectUpdate into a dictionary suitable for use as a JSON request body."""
@@ -5017,6 +5017,10 @@ class QueryMetrics:
5017
5017
  projected_remaining_task_total_time_ms: Optional[int] = None
5018
5018
  """projected remaining work to be done aggregated across all stages in the query, in milliseconds"""
5019
5019
 
5020
+ projected_remaining_wallclock_time_ms: Optional[int] = None
5021
+ """projected lower bound on remaining total task time based on
5022
+ projected_remaining_task_total_time_ms / maximum concurrency"""
5023
+
5020
5024
  provisioning_queue_start_timestamp: Optional[int] = None
5021
5025
  """Timestamp of when the query was enqueued waiting for a cluster to be provisioned for the
5022
5026
  warehouse. This field is optional and will not appear if the query skipped the provisioning
@@ -5102,6 +5106,8 @@ class QueryMetrics:
5102
5106
  body["photon_total_time_ms"] = self.photon_total_time_ms
5103
5107
  if self.projected_remaining_task_total_time_ms is not None:
5104
5108
  body["projected_remaining_task_total_time_ms"] = self.projected_remaining_task_total_time_ms
5109
+ if self.projected_remaining_wallclock_time_ms is not None:
5110
+ body["projected_remaining_wallclock_time_ms"] = self.projected_remaining_wallclock_time_ms
5105
5111
  if self.provisioning_queue_start_timestamp is not None:
5106
5112
  body["provisioning_queue_start_timestamp"] = self.provisioning_queue_start_timestamp
5107
5113
  if self.pruned_bytes is not None:
@@ -5161,6 +5167,8 @@ class QueryMetrics:
5161
5167
  body["photon_total_time_ms"] = self.photon_total_time_ms
5162
5168
  if self.projected_remaining_task_total_time_ms is not None:
5163
5169
  body["projected_remaining_task_total_time_ms"] = self.projected_remaining_task_total_time_ms
5170
+ if self.projected_remaining_wallclock_time_ms is not None:
5171
+ body["projected_remaining_wallclock_time_ms"] = self.projected_remaining_wallclock_time_ms
5164
5172
  if self.provisioning_queue_start_timestamp is not None:
5165
5173
  body["provisioning_queue_start_timestamp"] = self.provisioning_queue_start_timestamp
5166
5174
  if self.pruned_bytes is not None:
@@ -5215,6 +5223,7 @@ class QueryMetrics:
5215
5223
  overloading_queue_start_timestamp=d.get("overloading_queue_start_timestamp", None),
5216
5224
  photon_total_time_ms=d.get("photon_total_time_ms", None),
5217
5225
  projected_remaining_task_total_time_ms=d.get("projected_remaining_task_total_time_ms", None),
5226
+ projected_remaining_wallclock_time_ms=d.get("projected_remaining_wallclock_time_ms", None),
5218
5227
  provisioning_queue_start_timestamp=d.get("provisioning_queue_start_timestamp", None),
5219
5228
  pruned_bytes=d.get("pruned_bytes", None),
5220
5229
  pruned_files_count=d.get("pruned_files_count", None),
databricks/sdk/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.59.0"
1
+ __version__ = "0.61.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-sdk
3
- Version: 0.59.0
3
+ Version: 0.61.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Project-URL: Documentation, https://databricks-sdk-py.readthedocs.io
6
6
  Keywords: databricks,sdk
@@ -1,5 +1,5 @@
1
1
  databricks/__init__.py,sha256=CF2MJcZFwbpn9TwQER8qnCDhkPooBGQNVkX4v7g6p3g,537
2
- databricks/sdk/__init__.py,sha256=oYv56z-ssw6rcHyDgK65wnwyk8TUpiCtV-Ux2vJ7Kk0,60529
2
+ databricks/sdk/__init__.py,sha256=OEdQs8tEg30PJNwIRxtye9A4NGk5C9KcAX1DITpWSps,61427
3
3
  databricks/sdk/_base_client.py,sha256=IMHtzC5BhWt-lBVjifewR1Ah5fegGDMv0__-O1hCxWI,15850
4
4
  databricks/sdk/_property.py,sha256=ccbxhkXZmZOxbx2sqKMTzhVZDuvWXG0WPHFRgac6JAM,1701
5
5
  databricks/sdk/azure.py,sha256=sN_ARpmP9h1JovtiHIsDLtrVQP_K11eNDDtHS6PD19k,1015
@@ -7,17 +7,17 @@ databricks/sdk/casing.py,sha256=gZy-FlI7og5WNVX88Vb_7S1WeInwJLGws80CGj_9s48,1137
7
7
  databricks/sdk/clock.py,sha256=Ivlow0r_TkXcTJ8UXkxSA0czKrY0GvwHAeOvjPkJnAQ,1360
8
8
  databricks/sdk/config.py,sha256=rebzZAw0aMSxSwBeXKsF2VE9X_Y33Kjvcd1PO-5wgc4,23401
9
9
  databricks/sdk/core.py,sha256=6lsRl6BL3pLgqMMVFrOnQsx-RxxaJJL_Gt2jJfWUovs,3724
10
- databricks/sdk/credentials_provider.py,sha256=9_P3N52S87xPwI_yUSajnT49--kJWLhKCoHpn5Dwzps,41305
10
+ databricks/sdk/credentials_provider.py,sha256=YSlonZeqtR0zCLrsNPqXGPLoFg_9xih9MldBValvmd8,41340
11
11
  databricks/sdk/data_plane.py,sha256=br5IPnOdE611IBubxP8xkUR9_qzbSRSYyVWSua6znWs,3109
12
12
  databricks/sdk/dbutils.py,sha256=PoDIwNAYGZhVZC7krox7tsudUDNVSk0gsFjFWlKJXVk,15753
13
13
  databricks/sdk/environments.py,sha256=9eVeb68cksqY2Lqwth2PJNmK0JEGdIjh-ebrrmUbqCc,3963
14
14
  databricks/sdk/oauth.py,sha256=wOcZVfi-Jd83XQDW1rbDIJbxFqJOjaeTSlUgQYD8VWQ,28406
15
- databricks/sdk/oidc.py,sha256=A9umMkfnL-Nwfw2GljGxqTtkz7PjMTzltGaeckfrvT4,5749
15
+ databricks/sdk/oidc.py,sha256=_o6Ul8QMyxi_qgKJVSB4YdGuoiRqiZnChMdsRs0QgiM,5974
16
16
  databricks/sdk/oidc_token_supplier.py,sha256=QrO6J0QY4yFfcdQDL5h2OfxMxvBZJPtPmPeqLbPJ5Xw,1065
17
17
  databricks/sdk/py.typed,sha256=pSvaHpbY1UPNEXyVFUjlgBhjPFZMmVC_UNrPC7eMOHI,74
18
18
  databricks/sdk/retries.py,sha256=7k2kEexGqGKXHNAWHbPFSZSugU8UIU0qtyly_hix22Q,2581
19
19
  databricks/sdk/useragent.py,sha256=boEgzTv-Zmo6boipZKjSopNy0CXg4GShC1_lTKpJgqs,7361
20
- databricks/sdk/version.py,sha256=HMyZ2jNndR4_RS4EcSAHrVoKBifJVKSLuqmMuKnl_JI,23
20
+ databricks/sdk/version.py,sha256=57kaeWxmyt5jDwUTomJhqevhkw_IXJwindDD018ZEtY,23
21
21
  databricks/sdk/_widgets/__init__.py,sha256=VhI-VvLlr3rKUT1nbROslHJIbmZX_tPJ9rRhrdFsYUA,2811
22
22
  databricks/sdk/_widgets/default_widgets_utils.py,sha256=_hwCbptLbRzWEmknco0H1wQNAYcuy2pjFO9NiRbvFeo,1127
23
23
  databricks/sdk/_widgets/ipywidgets_utils.py,sha256=mg3rEPG9z76e0yVjGgcLybUvd_zSuN5ziGeKiZ-c8Ew,2927
@@ -36,40 +36,40 @@ databricks/sdk/logger/__init__.py,sha256=0_sSQfDkaFGqMHZUVw-g_Ax-RFmOv0Z6NjxCVAe
36
36
  databricks/sdk/logger/round_trip_logger.py,sha256=H2YhxUPZpWSwAwCdfa03D5vRUFxsV73bbM8eF_l9QrQ,4873
37
37
  databricks/sdk/mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
38
  databricks/sdk/mixins/compute.py,sha256=76Fhc7cDQfOf2IHkPtHZpAnxNfrSLMKl9dbQ6KswXaM,11066
39
- databricks/sdk/mixins/files.py,sha256=er_bDsMAmvqENkiYJmvLGchbKYmIw_KN0PsojigSCUM,56556
39
+ databricks/sdk/mixins/files.py,sha256=Y3IvOnB9Yogf6Ndr2uJ-HLGm57sHACoic4N3MXXmx38,57605
40
40
  databricks/sdk/mixins/jobs.py,sha256=4ywi0dZ8mEN8KZWLmZBFfdbejTP6JATvf9wCCRkdJBw,11558
41
- databricks/sdk/mixins/open_ai_client.py,sha256=cLfJAywSPfa4X-DLYOGZdKv7nIud2kja9cjrHQ4iYHM,4571
41
+ databricks/sdk/mixins/open_ai_client.py,sha256=FxMatC648dY6_PU-ClO172Iowivt4LzLgem00tNPPvQ,5552
42
42
  databricks/sdk/mixins/workspace.py,sha256=sgahprJIPLAxTvikHd9Wq2ifBW1Mcc5qz9u6EB-qm7w,4958
43
43
  databricks/sdk/runtime/__init__.py,sha256=6nthZxeYY1HjHieQcP7kXVLIId7w2yfHpZRXXtDLDAc,7333
44
44
  databricks/sdk/runtime/dbutils_stub.py,sha256=S_pgWyGmwp3Ay-pMDEXccYsPwNVqCtz7MpD3fZVlHUA,11408
45
45
  databricks/sdk/service/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
46
  databricks/sdk/service/_internal.py,sha256=PY83MPehEwGuMzCnyvolqglnfZeQ7-eS38kedTa7KDU,1985
47
- databricks/sdk/service/aibuilder.py,sha256=sNdOOYn7-VXpcK0YTiO5CU2LkkOlHU7Jh6hQSlxJ_C8,11655
47
+ databricks/sdk/service/agentbricks.py,sha256=0HI48RP5XAZwbNk4kz2wFcn0isSyIJkwsfV9y-oaQ0I,11675
48
48
  databricks/sdk/service/apps.py,sha256=xievdxty2wRngg1MIPNN3nCjTbgf3kWbXNQHh-JV-xk,58002
49
49
  databricks/sdk/service/billing.py,sha256=Y1tuA7x-wl720TCA98merqUqwrhA4KGd92oWCv679ps,90880
50
- databricks/sdk/service/catalog.py,sha256=FChGIfAPttOKMQIGmyyKi8JA2yI8yfGe9QNtBGRCMMI,594350
51
- databricks/sdk/service/cleanrooms.py,sha256=2zD06mz4cFo1Se_9zYErHnQpPpkT5H8M7Sr68641JJg,61799
50
+ databricks/sdk/service/catalog.py,sha256=oyevh95V0bMbkB2uj3nzFrYG5N01CWyyy8xP3PSUisY,595946
51
+ databricks/sdk/service/cleanrooms.py,sha256=T7fusuiGGYd5yLGqqP0OsmaylmzoPFGjnEQUnoqoo5M,80618
52
52
  databricks/sdk/service/compute.py,sha256=Obx66MV4eXj1J85rNNy76-LTr0DB9m2iNc_iE1wa-Mw,467530
53
- databricks/sdk/service/dashboards.py,sha256=Ds4oHTqXmR5LmDQ-gVn3PQqKwSG6MKn4MzI91Xc5nbg,92454
54
- databricks/sdk/service/database.py,sha256=nBg4NqBDY9C-KoNCICR41NlyfY1wahtQ7yeE-fZoapA,74337
53
+ databricks/sdk/service/dashboards.py,sha256=Tv82LqtIc3MiwdMRVwakJPWPFVWICp-WJR4b5Z-gdqw,92530
54
+ databricks/sdk/service/database.py,sha256=8wYFKfRnu0fVmd2-FuqL2sjtNB3XJEX32X7wkF9JFq8,76769
55
55
  databricks/sdk/service/files.py,sha256=k28gM20L9bw_RmKcIm8IcNFfHVtENCbakptM3p8bgbw,38403
56
56
  databricks/sdk/service/iam.py,sha256=cSKocvBtKqC9IARCBpYpCfUZAnmGdRis2vVdzx6X_WQ,165768
57
57
  databricks/sdk/service/jobs.py,sha256=2H9Uj_lSJbdrUUV8ehbo_o_fkad9NxfSE3jbt7xmTuo,425121
58
58
  databricks/sdk/service/marketplace.py,sha256=8MpP8Y65R6bGyvuWW4ZU6b-6__a4WLZVcDScLh0to4g,153028
59
59
  databricks/sdk/service/ml.py,sha256=th5S-VD_NGA6nqOMQrjrr7ubrsTSm0oZdOZ4XZbVdZQ,303895
60
60
  databricks/sdk/service/oauth2.py,sha256=6yoa5zmpJ68lCIIFyqcrM3fiSaWvPtf7Pl1dNhXL9pU,75330
61
- databricks/sdk/service/pipelines.py,sha256=bK3dcPrpaGY4BWAcSBeK7-9WZ4ChN5ZwJQJgu5lW5CU,154568
61
+ databricks/sdk/service/pipelines.py,sha256=CblPuyVF6z5cLEbxuU0JpYLPDtGavvUN8vyiaAYGOPA,154614
62
62
  databricks/sdk/service/provisioning.py,sha256=zZm_4lDO_mhDAOcaNoMDDz4Die_MXoapNa4NuwLJYww,129980
63
63
  databricks/sdk/service/qualitymonitorv2.py,sha256=82IUD7oTDNPwMcIDE_v59-nr2I3gpL65Ih7UfB5eInY,9202
64
- databricks/sdk/service/serving.py,sha256=9hz_YJ4wG64xTW72NrXB5whrqdndGgPPXYJuQbtiHKU,209023
65
- databricks/sdk/service/settings.py,sha256=kfAVeQI5_9ywh8xwHHvWHOW0Q6IPq3nSH-fI0ErSP0A,371344
66
- databricks/sdk/service/sharing.py,sha256=WCluDVgW80zVbVABREEShR8ht7tBBtb7Bwn5Tqi80ps,141194
67
- databricks/sdk/service/sql.py,sha256=cYqo-nPiS1b9QBWdYT9GmBbiMjDtnsGWL7KLCRiDc9E,383657
64
+ databricks/sdk/service/serving.py,sha256=22s7VRKLACqp_oJ0qrFCpexdNufxWtfrMtC-bIb5r_I,209041
65
+ databricks/sdk/service/settings.py,sha256=aebimI7E1p1HUncwrOtSyRM3Hq_gPaCAUH8z5slcTrA,371374
66
+ databricks/sdk/service/sharing.py,sha256=sWUWkM5IYqSitZBd1TcucmZiie8d1NHmsCB8kAKvmnU,143362
67
+ databricks/sdk/service/sql.py,sha256=hxrAnJuByGo3ItzGZ64VyWO27W-FQwFSv4RgVa-iwpg,384301
68
68
  databricks/sdk/service/vectorsearch.py,sha256=8aARB3z0HAVKX7wMD3ZHqxG7_OXHvTccSYi-CQU8TgI,69088
69
69
  databricks/sdk/service/workspace.py,sha256=iss6wuYvMDSMrgwks0FuRRBeJSZFmWNOCkPIMJAzMgY,111868
70
- databricks_sdk-0.59.0.dist-info/licenses/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
71
- databricks_sdk-0.59.0.dist-info/licenses/NOTICE,sha256=tkRcQYA1k68wDLcnOWbg2xJDsUOJw8G8DGBhb8dnI3w,1588
72
- databricks_sdk-0.59.0.dist-info/METADATA,sha256=2eKwNhUneFBTjasvLEHngUZn0VP-MFCNNy53wvY-i8o,39397
73
- databricks_sdk-0.59.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
74
- databricks_sdk-0.59.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
75
- databricks_sdk-0.59.0.dist-info/RECORD,,
70
+ databricks_sdk-0.61.0.dist-info/licenses/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
71
+ databricks_sdk-0.61.0.dist-info/licenses/NOTICE,sha256=tkRcQYA1k68wDLcnOWbg2xJDsUOJw8G8DGBhb8dnI3w,1588
72
+ databricks_sdk-0.61.0.dist-info/METADATA,sha256=kIVNmjfs1-RrelPeAGKZGRaSE5TLuNfzvnYTfpTqiwQ,39397
73
+ databricks_sdk-0.61.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
74
+ databricks_sdk-0.61.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
75
+ databricks_sdk-0.61.0.dist-info/RECORD,,