databricks-sdk 0.60.0__py3-none-any.whl → 0.62.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -3,11 +3,14 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import logging
6
+ import random
7
+ import time
6
8
  from dataclasses import dataclass
9
+ from datetime import timedelta
7
10
  from enum import Enum
8
- from typing import Any, Dict, Iterator, List, Optional
11
+ from typing import Any, Callable, Dict, Iterator, List, Optional
9
12
 
10
- from ._internal import _enum, _from_dict, _repeated_dict
13
+ from ._internal import Wait, _enum, _from_dict, _repeated_dict
11
14
 
12
15
  _LOG = logging.getLogger("databricks.sdk")
13
16
 
@@ -787,7 +790,7 @@ class RequestedResource:
787
790
 
788
791
  @dataclass
789
792
  class SyncedDatabaseTable:
790
- """Next field marker: 12"""
793
+ """Next field marker: 14"""
791
794
 
792
795
  name: str
793
796
  """Full three-part (catalog, schema, table) name of the table."""
@@ -802,6 +805,14 @@ class SyncedDatabaseTable:
802
805
  database instance name MUST match that of the registered catalog (or the request will be
803
806
  rejected)."""
804
807
 
808
+ effective_database_instance_name: Optional[str] = None
809
+ """The name of the database instance that this table is registered to. This field is always
810
+ returned, and for tables inside database catalogs is inferred database instance associated with
811
+ the catalog."""
812
+
813
+ effective_logical_database_name: Optional[str] = None
814
+ """The name of the logical database that this table is registered to."""
815
+
805
816
  logical_database_name: Optional[str] = None
806
817
  """Target Postgres database object (logical database) name for this table.
807
818
 
@@ -828,6 +839,10 @@ class SyncedDatabaseTable:
828
839
  body["data_synchronization_status"] = self.data_synchronization_status.as_dict()
829
840
  if self.database_instance_name is not None:
830
841
  body["database_instance_name"] = self.database_instance_name
842
+ if self.effective_database_instance_name is not None:
843
+ body["effective_database_instance_name"] = self.effective_database_instance_name
844
+ if self.effective_logical_database_name is not None:
845
+ body["effective_logical_database_name"] = self.effective_logical_database_name
831
846
  if self.logical_database_name is not None:
832
847
  body["logical_database_name"] = self.logical_database_name
833
848
  if self.name is not None:
@@ -845,6 +860,10 @@ class SyncedDatabaseTable:
845
860
  body["data_synchronization_status"] = self.data_synchronization_status
846
861
  if self.database_instance_name is not None:
847
862
  body["database_instance_name"] = self.database_instance_name
863
+ if self.effective_database_instance_name is not None:
864
+ body["effective_database_instance_name"] = self.effective_database_instance_name
865
+ if self.effective_logical_database_name is not None:
866
+ body["effective_logical_database_name"] = self.effective_logical_database_name
848
867
  if self.logical_database_name is not None:
849
868
  body["logical_database_name"] = self.logical_database_name
850
869
  if self.name is not None:
@@ -861,6 +880,8 @@ class SyncedDatabaseTable:
861
880
  return cls(
862
881
  data_synchronization_status=_from_dict(d, "data_synchronization_status", SyncedTableStatus),
863
882
  database_instance_name=d.get("database_instance_name", None),
883
+ effective_database_instance_name=d.get("effective_database_instance_name", None),
884
+ effective_logical_database_name=d.get("effective_logical_database_name", None),
864
885
  logical_database_name=d.get("logical_database_name", None),
865
886
  name=d.get("name", None),
866
887
  spec=_from_dict(d, "spec", SyncedTableSpec),
@@ -1358,6 +1379,31 @@ class DatabaseAPI:
1358
1379
  def __init__(self, api_client):
1359
1380
  self._api = api_client
1360
1381
 
1382
+ def wait_get_database_instance_database_available(
1383
+ self, name: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[DatabaseInstance], None]] = None
1384
+ ) -> DatabaseInstance:
1385
+ deadline = time.time() + timeout.total_seconds()
1386
+ target_states = (DatabaseInstanceState.AVAILABLE,)
1387
+ status_message = "polling..."
1388
+ attempt = 1
1389
+ while time.time() < deadline:
1390
+ poll = self.get_database_instance(name=name)
1391
+ status = poll.state
1392
+ status_message = f"current status: {status}"
1393
+ if status in target_states:
1394
+ return poll
1395
+ if callback:
1396
+ callback(poll)
1397
+ prefix = f"name={name}"
1398
+ sleep = attempt
1399
+ if sleep > 10:
1400
+ # sleep 10s max per attempt
1401
+ sleep = 10
1402
+ _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
1403
+ time.sleep(sleep + random.random())
1404
+ attempt += 1
1405
+ raise TimeoutError(f"timed out after {timeout}: {status_message}")
1406
+
1361
1407
  def create_database_catalog(self, catalog: DatabaseCatalog) -> DatabaseCatalog:
1362
1408
  """Create a Database Catalog.
1363
1409
 
@@ -1374,13 +1420,15 @@ class DatabaseAPI:
1374
1420
  res = self._api.do("POST", "/api/2.0/database/catalogs", body=body, headers=headers)
1375
1421
  return DatabaseCatalog.from_dict(res)
1376
1422
 
1377
- def create_database_instance(self, database_instance: DatabaseInstance) -> DatabaseInstance:
1423
+ def create_database_instance(self, database_instance: DatabaseInstance) -> Wait[DatabaseInstance]:
1378
1424
  """Create a Database Instance.
1379
1425
 
1380
1426
  :param database_instance: :class:`DatabaseInstance`
1381
1427
  Instance to create.
1382
1428
 
1383
- :returns: :class:`DatabaseInstance`
1429
+ :returns:
1430
+ Long-running operation waiter for :class:`DatabaseInstance`.
1431
+ See :method:wait_get_database_instance_database_available for more details.
1384
1432
  """
1385
1433
  body = database_instance.as_dict()
1386
1434
  headers = {
@@ -1388,8 +1436,17 @@ class DatabaseAPI:
1388
1436
  "Content-Type": "application/json",
1389
1437
  }
1390
1438
 
1391
- res = self._api.do("POST", "/api/2.0/database/instances", body=body, headers=headers)
1392
- return DatabaseInstance.from_dict(res)
1439
+ op_response = self._api.do("POST", "/api/2.0/database/instances", body=body, headers=headers)
1440
+ return Wait(
1441
+ self.wait_get_database_instance_database_available,
1442
+ response=DatabaseInstance.from_dict(op_response),
1443
+ name=op_response["name"],
1444
+ )
1445
+
1446
+ def create_database_instance_and_wait(
1447
+ self, database_instance: DatabaseInstance, timeout=timedelta(minutes=20)
1448
+ ) -> DatabaseInstance:
1449
+ return self.create_database_instance(database_instance=database_instance).result(timeout=timeout)
1393
1450
 
1394
1451
  def create_database_instance_role(
1395
1452
  self, instance_name: str, database_instance_role: DatabaseInstanceRole
@@ -7223,6 +7223,73 @@ class SubscriptionSubscriber:
7223
7223
  return cls(destination_id=d.get("destination_id", None), user_name=d.get("user_name", None))
7224
7224
 
7225
7225
 
7226
+ @dataclass
7227
+ class TableState:
7228
+ has_seen_updates: Optional[bool] = None
7229
+ """Whether or not the table has seen updates since either the creation of the trigger or the last
7230
+ successful evaluation of the trigger"""
7231
+
7232
+ table_name: Optional[str] = None
7233
+ """Full table name of the table to monitor, e.g. `mycatalog.myschema.mytable`"""
7234
+
7235
+ def as_dict(self) -> dict:
7236
+ """Serializes the TableState into a dictionary suitable for use as a JSON request body."""
7237
+ body = {}
7238
+ if self.has_seen_updates is not None:
7239
+ body["has_seen_updates"] = self.has_seen_updates
7240
+ if self.table_name is not None:
7241
+ body["table_name"] = self.table_name
7242
+ return body
7243
+
7244
+ def as_shallow_dict(self) -> dict:
7245
+ """Serializes the TableState into a shallow dictionary of its immediate attributes."""
7246
+ body = {}
7247
+ if self.has_seen_updates is not None:
7248
+ body["has_seen_updates"] = self.has_seen_updates
7249
+ if self.table_name is not None:
7250
+ body["table_name"] = self.table_name
7251
+ return body
7252
+
7253
+ @classmethod
7254
+ def from_dict(cls, d: Dict[str, Any]) -> TableState:
7255
+ """Deserializes the TableState from a dictionary."""
7256
+ return cls(has_seen_updates=d.get("has_seen_updates", None), table_name=d.get("table_name", None))
7257
+
7258
+
7259
+ @dataclass
7260
+ class TableTriggerState:
7261
+ last_seen_table_states: Optional[List[TableState]] = None
7262
+
7263
+ using_scalable_monitoring: Optional[bool] = None
7264
+ """Indicates whether the trigger is using scalable monitoring."""
7265
+
7266
+ def as_dict(self) -> dict:
7267
+ """Serializes the TableTriggerState into a dictionary suitable for use as a JSON request body."""
7268
+ body = {}
7269
+ if self.last_seen_table_states:
7270
+ body["last_seen_table_states"] = [v.as_dict() for v in self.last_seen_table_states]
7271
+ if self.using_scalable_monitoring is not None:
7272
+ body["using_scalable_monitoring"] = self.using_scalable_monitoring
7273
+ return body
7274
+
7275
+ def as_shallow_dict(self) -> dict:
7276
+ """Serializes the TableTriggerState into a shallow dictionary of its immediate attributes."""
7277
+ body = {}
7278
+ if self.last_seen_table_states:
7279
+ body["last_seen_table_states"] = self.last_seen_table_states
7280
+ if self.using_scalable_monitoring is not None:
7281
+ body["using_scalable_monitoring"] = self.using_scalable_monitoring
7282
+ return body
7283
+
7284
+ @classmethod
7285
+ def from_dict(cls, d: Dict[str, Any]) -> TableTriggerState:
7286
+ """Deserializes the TableTriggerState from a dictionary."""
7287
+ return cls(
7288
+ last_seen_table_states=_repeated_dict(d, "last_seen_table_states", TableState),
7289
+ using_scalable_monitoring=d.get("using_scalable_monitoring", None),
7290
+ )
7291
+
7292
+
7226
7293
  @dataclass
7227
7294
  class TableUpdateTriggerConfiguration:
7228
7295
  condition: Optional[Condition] = None
@@ -7993,11 +8060,15 @@ class TriggerSettings:
7993
8060
  class TriggerStateProto:
7994
8061
  file_arrival: Optional[FileArrivalTriggerState] = None
7995
8062
 
8063
+ table: Optional[TableTriggerState] = None
8064
+
7996
8065
  def as_dict(self) -> dict:
7997
8066
  """Serializes the TriggerStateProto into a dictionary suitable for use as a JSON request body."""
7998
8067
  body = {}
7999
8068
  if self.file_arrival:
8000
8069
  body["file_arrival"] = self.file_arrival.as_dict()
8070
+ if self.table:
8071
+ body["table"] = self.table.as_dict()
8001
8072
  return body
8002
8073
 
8003
8074
  def as_shallow_dict(self) -> dict:
@@ -8005,12 +8076,17 @@ class TriggerStateProto:
8005
8076
  body = {}
8006
8077
  if self.file_arrival:
8007
8078
  body["file_arrival"] = self.file_arrival
8079
+ if self.table:
8080
+ body["table"] = self.table
8008
8081
  return body
8009
8082
 
8010
8083
  @classmethod
8011
8084
  def from_dict(cls, d: Dict[str, Any]) -> TriggerStateProto:
8012
8085
  """Deserializes the TriggerStateProto from a dictionary."""
8013
- return cls(file_arrival=_from_dict(d, "file_arrival", FileArrivalTriggerState))
8086
+ return cls(
8087
+ file_arrival=_from_dict(d, "file_arrival", FileArrivalTriggerState),
8088
+ table=_from_dict(d, "table", TableTriggerState),
8089
+ )
8014
8090
 
8015
8091
 
8016
8092
  class TriggerType(Enum):
@@ -205,6 +205,7 @@ class AiGatewayGuardrailPiiBehavior:
205
205
  class AiGatewayGuardrailPiiBehaviorBehavior(Enum):
206
206
 
207
207
  BLOCK = "BLOCK"
208
+ MASK = "MASK"
208
209
  NONE = "NONE"
209
210
 
210
211
 
@@ -989,10 +990,13 @@ class DatabricksModelServingConfig:
989
990
  @dataclass
990
991
  class DataframeSplitInput:
991
992
  columns: Optional[List[Any]] = None
993
+ """Columns array for the dataframe"""
992
994
 
993
995
  data: Optional[List[Any]] = None
996
+ """Data array for the dataframe"""
994
997
 
995
998
  index: Optional[List[int]] = None
999
+ """Index array for the dataframe"""
996
1000
 
997
1001
  def as_dict(self) -> dict:
998
1002
  """Serializes the DataframeSplitInput into a dictionary suitable for use as a JSON request body."""
@@ -1040,9 +1044,46 @@ class DeleteResponse:
1040
1044
  return cls()
1041
1045
 
1042
1046
 
1047
+ @dataclass
1048
+ class EmailNotifications:
1049
+ on_update_failure: Optional[List[str]] = None
1050
+ """A list of email addresses to be notified when an endpoint fails to update its configuration or
1051
+ state."""
1052
+
1053
+ on_update_success: Optional[List[str]] = None
1054
+ """A list of email addresses to be notified when an endpoint successfully updates its configuration
1055
+ or state."""
1056
+
1057
+ def as_dict(self) -> dict:
1058
+ """Serializes the EmailNotifications into a dictionary suitable for use as a JSON request body."""
1059
+ body = {}
1060
+ if self.on_update_failure:
1061
+ body["on_update_failure"] = [v for v in self.on_update_failure]
1062
+ if self.on_update_success:
1063
+ body["on_update_success"] = [v for v in self.on_update_success]
1064
+ return body
1065
+
1066
+ def as_shallow_dict(self) -> dict:
1067
+ """Serializes the EmailNotifications into a shallow dictionary of its immediate attributes."""
1068
+ body = {}
1069
+ if self.on_update_failure:
1070
+ body["on_update_failure"] = self.on_update_failure
1071
+ if self.on_update_success:
1072
+ body["on_update_success"] = self.on_update_success
1073
+ return body
1074
+
1075
+ @classmethod
1076
+ def from_dict(cls, d: Dict[str, Any]) -> EmailNotifications:
1077
+ """Deserializes the EmailNotifications from a dictionary."""
1078
+ return cls(
1079
+ on_update_failure=d.get("on_update_failure", None), on_update_success=d.get("on_update_success", None)
1080
+ )
1081
+
1082
+
1043
1083
  @dataclass
1044
1084
  class EmbeddingsV1ResponseEmbeddingElement:
1045
1085
  embedding: Optional[List[float]] = None
1086
+ """The embedding vector"""
1046
1087
 
1047
1088
  index: Optional[int] = None
1048
1089
  """The index of the embedding in the response."""
@@ -3261,11 +3302,11 @@ class ServedModelState:
3261
3302
 
3262
3303
  class ServedModelStateDeployment(Enum):
3263
3304
 
3264
- ABORTED = "DEPLOYMENT_ABORTED"
3265
- CREATING = "DEPLOYMENT_CREATING"
3266
- FAILED = "DEPLOYMENT_FAILED"
3267
- READY = "DEPLOYMENT_READY"
3268
- RECOVERING = "DEPLOYMENT_RECOVERING"
3305
+ DEPLOYMENT_ABORTED = "DEPLOYMENT_ABORTED"
3306
+ DEPLOYMENT_CREATING = "DEPLOYMENT_CREATING"
3307
+ DEPLOYMENT_FAILED = "DEPLOYMENT_FAILED"
3308
+ DEPLOYMENT_READY = "DEPLOYMENT_READY"
3309
+ DEPLOYMENT_RECOVERING = "DEPLOYMENT_RECOVERING"
3269
3310
 
3270
3311
 
3271
3312
  @dataclass
@@ -3544,6 +3585,9 @@ class ServingEndpointDetailed:
3544
3585
  description: Optional[str] = None
3545
3586
  """Description of the serving model"""
3546
3587
 
3588
+ email_notifications: Optional[EmailNotifications] = None
3589
+ """Email notification settings."""
3590
+
3547
3591
  endpoint_url: Optional[str] = None
3548
3592
  """Endpoint invocation url if route optimization is enabled for endpoint"""
3549
3593
 
@@ -3592,6 +3636,8 @@ class ServingEndpointDetailed:
3592
3636
  body["data_plane_info"] = self.data_plane_info.as_dict()
3593
3637
  if self.description is not None:
3594
3638
  body["description"] = self.description
3639
+ if self.email_notifications:
3640
+ body["email_notifications"] = self.email_notifications.as_dict()
3595
3641
  if self.endpoint_url is not None:
3596
3642
  body["endpoint_url"] = self.endpoint_url
3597
3643
  if self.id is not None:
@@ -3631,6 +3677,8 @@ class ServingEndpointDetailed:
3631
3677
  body["data_plane_info"] = self.data_plane_info
3632
3678
  if self.description is not None:
3633
3679
  body["description"] = self.description
3680
+ if self.email_notifications:
3681
+ body["email_notifications"] = self.email_notifications
3634
3682
  if self.endpoint_url is not None:
3635
3683
  body["endpoint_url"] = self.endpoint_url
3636
3684
  if self.id is not None:
@@ -3664,6 +3712,7 @@ class ServingEndpointDetailed:
3664
3712
  creator=d.get("creator", None),
3665
3713
  data_plane_info=_from_dict(d, "data_plane_info", ModelDataPlaneInfo),
3666
3714
  description=d.get("description", None),
3715
+ email_notifications=_from_dict(d, "email_notifications", EmailNotifications),
3667
3716
  endpoint_url=d.get("endpoint_url", None),
3668
3717
  id=d.get("id", None),
3669
3718
  last_updated_timestamp=d.get("last_updated_timestamp", None),
@@ -3977,6 +4026,7 @@ class ServingEndpointsAPI:
3977
4026
  budget_policy_id: Optional[str] = None,
3978
4027
  config: Optional[EndpointCoreConfigInput] = None,
3979
4028
  description: Optional[str] = None,
4029
+ email_notifications: Optional[EmailNotifications] = None,
3980
4030
  rate_limits: Optional[List[RateLimit]] = None,
3981
4031
  route_optimized: Optional[bool] = None,
3982
4032
  tags: Optional[List[EndpointTag]] = None,
@@ -3995,6 +4045,8 @@ class ServingEndpointsAPI:
3995
4045
  :param config: :class:`EndpointCoreConfigInput` (optional)
3996
4046
  The core config of the serving endpoint.
3997
4047
  :param description: str (optional)
4048
+ :param email_notifications: :class:`EmailNotifications` (optional)
4049
+ Email notification settings.
3998
4050
  :param rate_limits: List[:class:`RateLimit`] (optional)
3999
4051
  Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
4000
4052
  Gateway to manage rate limits.
@@ -4016,6 +4068,8 @@ class ServingEndpointsAPI:
4016
4068
  body["config"] = config.as_dict()
4017
4069
  if description is not None:
4018
4070
  body["description"] = description
4071
+ if email_notifications is not None:
4072
+ body["email_notifications"] = email_notifications.as_dict()
4019
4073
  if name is not None:
4020
4074
  body["name"] = name
4021
4075
  if rate_limits is not None:
@@ -4044,6 +4098,7 @@ class ServingEndpointsAPI:
4044
4098
  budget_policy_id: Optional[str] = None,
4045
4099
  config: Optional[EndpointCoreConfigInput] = None,
4046
4100
  description: Optional[str] = None,
4101
+ email_notifications: Optional[EmailNotifications] = None,
4047
4102
  rate_limits: Optional[List[RateLimit]] = None,
4048
4103
  route_optimized: Optional[bool] = None,
4049
4104
  tags: Optional[List[EndpointTag]] = None,
@@ -4054,6 +4109,7 @@ class ServingEndpointsAPI:
4054
4109
  budget_policy_id=budget_policy_id,
4055
4110
  config=config,
4056
4111
  description=description,
4112
+ email_notifications=email_notifications,
4057
4113
  name=name,
4058
4114
  rate_limits=rate_limits,
4059
4115
  route_optimized=route_optimized,
@@ -4067,6 +4123,7 @@ class ServingEndpointsAPI:
4067
4123
  *,
4068
4124
  ai_gateway: Optional[AiGatewayConfig] = None,
4069
4125
  budget_policy_id: Optional[str] = None,
4126
+ email_notifications: Optional[EmailNotifications] = None,
4070
4127
  tags: Optional[List[EndpointTag]] = None,
4071
4128
  ) -> Wait[ServingEndpointDetailed]:
4072
4129
  """Create a new PT serving endpoint.
@@ -4080,6 +4137,8 @@ class ServingEndpointsAPI:
4080
4137
  The AI Gateway configuration for the serving endpoint.
4081
4138
  :param budget_policy_id: str (optional)
4082
4139
  The budget policy associated with the endpoint.
4140
+ :param email_notifications: :class:`EmailNotifications` (optional)
4141
+ Email notification settings.
4083
4142
  :param tags: List[:class:`EndpointTag`] (optional)
4084
4143
  Tags to be attached to the serving endpoint and automatically propagated to billing logs.
4085
4144
 
@@ -4094,6 +4153,8 @@ class ServingEndpointsAPI:
4094
4153
  body["budget_policy_id"] = budget_policy_id
4095
4154
  if config is not None:
4096
4155
  body["config"] = config.as_dict()
4156
+ if email_notifications is not None:
4157
+ body["email_notifications"] = email_notifications.as_dict()
4097
4158
  if name is not None:
4098
4159
  body["name"] = name
4099
4160
  if tags is not None:
@@ -4117,11 +4178,17 @@ class ServingEndpointsAPI:
4117
4178
  *,
4118
4179
  ai_gateway: Optional[AiGatewayConfig] = None,
4119
4180
  budget_policy_id: Optional[str] = None,
4181
+ email_notifications: Optional[EmailNotifications] = None,
4120
4182
  tags: Optional[List[EndpointTag]] = None,
4121
4183
  timeout=timedelta(minutes=20),
4122
4184
  ) -> ServingEndpointDetailed:
4123
4185
  return self.create_provisioned_throughput_endpoint(
4124
- ai_gateway=ai_gateway, budget_policy_id=budget_policy_id, config=config, name=name, tags=tags
4186
+ ai_gateway=ai_gateway,
4187
+ budget_policy_id=budget_policy_id,
4188
+ config=config,
4189
+ email_notifications=email_notifications,
4190
+ name=name,
4191
+ tags=tags,
4125
4192
  ).result(timeout=timeout)
4126
4193
 
4127
4194
  def delete(self, name: str):
@@ -4421,10 +4488,10 @@ class ServingEndpointsAPI:
4421
4488
  stream: Optional[bool] = None,
4422
4489
  temperature: Optional[float] = None,
4423
4490
  ) -> QueryEndpointResponse:
4424
- """Query a serving endpoint.
4491
+ """Query a serving endpoint
4425
4492
 
4426
4493
  :param name: str
4427
- The name of the serving endpoint. This field is required.
4494
+ The name of the serving endpoint. This field is required and is provided via the path parameter.
4428
4495
  :param dataframe_records: List[Any] (optional)
4429
4496
  Pandas Dataframe input in the records orientation.
4430
4497
  :param dataframe_split: :class:`DataframeSplitInput` (optional)
@@ -4445,8 +4512,8 @@ class ServingEndpointsAPI:
4445
4512
  The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving
4446
4513
  endpoints. This is an integer and should only be used with other chat/completions query fields.
4447
4514
  :param messages: List[:class:`ChatMessage`] (optional)
4448
- The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a
4449
- map of strings and should only be used with other chat query fields.
4515
+ The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is an
4516
+ array of ChatMessage objects and should only be used with other chat query fields.
4450
4517
  :param n: int (optional)
4451
4518
  The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation
4452
4519
  model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be
@@ -4724,10 +4791,10 @@ class ServingEndpointsDataPlaneAPI:
4724
4791
  stream: Optional[bool] = None,
4725
4792
  temperature: Optional[float] = None,
4726
4793
  ) -> QueryEndpointResponse:
4727
- """Query a serving endpoint.
4794
+ """Query a serving endpoint
4728
4795
 
4729
4796
  :param name: str
4730
- The name of the serving endpoint. This field is required.
4797
+ The name of the serving endpoint. This field is required and is provided via the path parameter.
4731
4798
  :param dataframe_records: List[Any] (optional)
4732
4799
  Pandas Dataframe input in the records orientation.
4733
4800
  :param dataframe_split: :class:`DataframeSplitInput` (optional)
@@ -4748,8 +4815,8 @@ class ServingEndpointsDataPlaneAPI:
4748
4815
  The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving
4749
4816
  endpoints. This is an integer and should only be used with other chat/completions query fields.
4750
4817
  :param messages: List[:class:`ChatMessage`] (optional)
4751
- The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a
4752
- map of strings and should only be used with other chat query fields.
4818
+ The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is an
4819
+ array of ChatMessage objects and should only be used with other chat query fields.
4753
4820
  :param n: int (optional)
4754
4821
  The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation
4755
4822
  model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be
@@ -727,6 +727,9 @@ class ListProviderShareAssetsResponse:
727
727
  notebooks: Optional[List[NotebookFile]] = None
728
728
  """The list of notebooks in the share."""
729
729
 
730
+ share: Optional[Share] = None
731
+ """The metadata of the share."""
732
+
730
733
  tables: Optional[List[Table]] = None
731
734
  """The list of tables in the share."""
732
735
 
@@ -740,6 +743,8 @@ class ListProviderShareAssetsResponse:
740
743
  body["functions"] = [v.as_dict() for v in self.functions]
741
744
  if self.notebooks:
742
745
  body["notebooks"] = [v.as_dict() for v in self.notebooks]
746
+ if self.share:
747
+ body["share"] = self.share.as_dict()
743
748
  if self.tables:
744
749
  body["tables"] = [v.as_dict() for v in self.tables]
745
750
  if self.volumes:
@@ -753,6 +758,8 @@ class ListProviderShareAssetsResponse:
753
758
  body["functions"] = self.functions
754
759
  if self.notebooks:
755
760
  body["notebooks"] = self.notebooks
761
+ if self.share:
762
+ body["share"] = self.share
756
763
  if self.tables:
757
764
  body["tables"] = self.tables
758
765
  if self.volumes:
@@ -765,6 +772,7 @@ class ListProviderShareAssetsResponse:
765
772
  return cls(
766
773
  functions=_repeated_dict(d, "functions", DeltaSharingFunction),
767
774
  notebooks=_repeated_dict(d, "notebooks", NotebookFile),
775
+ share=_from_dict(d, "share", Share),
768
776
  tables=_repeated_dict(d, "tables", Table),
769
777
  volumes=_repeated_dict(d, "volumes", Volume),
770
778
  )
@@ -1817,6 +1825,63 @@ class SecurablePropertiesKvPairs:
1817
1825
  return cls(properties=d.get("properties", None))
1818
1826
 
1819
1827
 
1828
+ @dataclass
1829
+ class Share:
1830
+ comment: Optional[str] = None
1831
+ """The comment of the share."""
1832
+
1833
+ display_name: Optional[str] = None
1834
+ """The display name of the share. If defined, it will be shown in the UI."""
1835
+
1836
+ id: Optional[str] = None
1837
+
1838
+ name: Optional[str] = None
1839
+
1840
+ tags: Optional[List[catalog.TagKeyValue]] = None
1841
+ """The tags of the share."""
1842
+
1843
+ def as_dict(self) -> dict:
1844
+ """Serializes the Share into a dictionary suitable for use as a JSON request body."""
1845
+ body = {}
1846
+ if self.comment is not None:
1847
+ body["comment"] = self.comment
1848
+ if self.display_name is not None:
1849
+ body["display_name"] = self.display_name
1850
+ if self.id is not None:
1851
+ body["id"] = self.id
1852
+ if self.name is not None:
1853
+ body["name"] = self.name
1854
+ if self.tags:
1855
+ body["tags"] = [v.as_dict() for v in self.tags]
1856
+ return body
1857
+
1858
+ def as_shallow_dict(self) -> dict:
1859
+ """Serializes the Share into a shallow dictionary of its immediate attributes."""
1860
+ body = {}
1861
+ if self.comment is not None:
1862
+ body["comment"] = self.comment
1863
+ if self.display_name is not None:
1864
+ body["display_name"] = self.display_name
1865
+ if self.id is not None:
1866
+ body["id"] = self.id
1867
+ if self.name is not None:
1868
+ body["name"] = self.name
1869
+ if self.tags:
1870
+ body["tags"] = self.tags
1871
+ return body
1872
+
1873
+ @classmethod
1874
+ def from_dict(cls, d: Dict[str, Any]) -> Share:
1875
+ """Deserializes the Share from a dictionary."""
1876
+ return cls(
1877
+ comment=d.get("comment", None),
1878
+ display_name=d.get("display_name", None),
1879
+ id=d.get("id", None),
1880
+ name=d.get("name", None),
1881
+ tags=_repeated_dict(d, "tags", catalog.TagKeyValue),
1882
+ )
1883
+
1884
+
1820
1885
  @dataclass
1821
1886
  class ShareInfo:
1822
1887
  comment: Optional[str] = None
@@ -5017,6 +5017,10 @@ class QueryMetrics:
5017
5017
  projected_remaining_task_total_time_ms: Optional[int] = None
5018
5018
  """projected remaining work to be done aggregated across all stages in the query, in milliseconds"""
5019
5019
 
5020
+ projected_remaining_wallclock_time_ms: Optional[int] = None
5021
+ """projected lower bound on remaining total task time based on
5022
+ projected_remaining_task_total_time_ms / maximum concurrency"""
5023
+
5020
5024
  provisioning_queue_start_timestamp: Optional[int] = None
5021
5025
  """Timestamp of when the query was enqueued waiting for a cluster to be provisioned for the
5022
5026
  warehouse. This field is optional and will not appear if the query skipped the provisioning
@@ -5102,6 +5106,8 @@ class QueryMetrics:
5102
5106
  body["photon_total_time_ms"] = self.photon_total_time_ms
5103
5107
  if self.projected_remaining_task_total_time_ms is not None:
5104
5108
  body["projected_remaining_task_total_time_ms"] = self.projected_remaining_task_total_time_ms
5109
+ if self.projected_remaining_wallclock_time_ms is not None:
5110
+ body["projected_remaining_wallclock_time_ms"] = self.projected_remaining_wallclock_time_ms
5105
5111
  if self.provisioning_queue_start_timestamp is not None:
5106
5112
  body["provisioning_queue_start_timestamp"] = self.provisioning_queue_start_timestamp
5107
5113
  if self.pruned_bytes is not None:
@@ -5161,6 +5167,8 @@ class QueryMetrics:
5161
5167
  body["photon_total_time_ms"] = self.photon_total_time_ms
5162
5168
  if self.projected_remaining_task_total_time_ms is not None:
5163
5169
  body["projected_remaining_task_total_time_ms"] = self.projected_remaining_task_total_time_ms
5170
+ if self.projected_remaining_wallclock_time_ms is not None:
5171
+ body["projected_remaining_wallclock_time_ms"] = self.projected_remaining_wallclock_time_ms
5164
5172
  if self.provisioning_queue_start_timestamp is not None:
5165
5173
  body["provisioning_queue_start_timestamp"] = self.provisioning_queue_start_timestamp
5166
5174
  if self.pruned_bytes is not None:
@@ -5215,6 +5223,7 @@ class QueryMetrics:
5215
5223
  overloading_queue_start_timestamp=d.get("overloading_queue_start_timestamp", None),
5216
5224
  photon_total_time_ms=d.get("photon_total_time_ms", None),
5217
5225
  projected_remaining_task_total_time_ms=d.get("projected_remaining_task_total_time_ms", None),
5226
+ projected_remaining_wallclock_time_ms=d.get("projected_remaining_wallclock_time_ms", None),
5218
5227
  provisioning_queue_start_timestamp=d.get("provisioning_queue_start_timestamp", None),
5219
5228
  pruned_bytes=d.get("pruned_bytes", None),
5220
5229
  pruned_files_count=d.get("pruned_files_count", None),
databricks/sdk/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.60.0"
1
+ __version__ = "0.62.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-sdk
3
- Version: 0.60.0
3
+ Version: 0.62.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Project-URL: Documentation, https://databricks-sdk-py.readthedocs.io
6
6
  Keywords: databricks,sdk