databricks-sdk 0.71.0__py3-none-any.whl → 0.72.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -711,7 +711,7 @@ class WorkspaceClient:
711
711
 
712
712
  @property
713
713
  def pipelines(self) -> pkg_pipelines.PipelinesAPI:
714
- """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines."""
714
+ """The Lakeflow Spark Declarative Pipelines API allows you to create, edit, delete, start, and view details about pipelines."""
715
715
  return self._pipelines
716
716
 
717
717
  @property
@@ -33,7 +33,7 @@ from requests import RequestException
33
33
  from .._base_client import _BaseClient, _RawResponse, _StreamingResponse
34
34
  from .._property import _cached_property
35
35
  from ..config import Config
36
- from ..errors import AlreadyExists, NotFound, PermissionDenied
36
+ from ..errors import AlreadyExists, InternalError, NotFound, PermissionDenied
37
37
  from ..errors.mapper import _error_mapper
38
38
  from ..retries import retried
39
39
  from ..service import files
@@ -1134,7 +1134,9 @@ class FilesExt(files.FilesAPI):
1134
1134
  f"Upload context: part_size={ctx.part_size}, batch_size={ctx.batch_size}, content_length={ctx.content_length}"
1135
1135
  )
1136
1136
 
1137
- if ctx.use_parallel:
1137
+ if ctx.use_parallel and (
1138
+ ctx.content_length is None or ctx.content_length >= self._config.files_ext_multipart_upload_min_stream_size
1139
+ ):
1138
1140
  self._parallel_upload_from_stream(ctx, contents)
1139
1141
  return UploadStreamResult()
1140
1142
  elif ctx.content_length is not None:
@@ -1206,7 +1208,7 @@ class FilesExt(files.FilesAPI):
1206
1208
  use_parallel=use_parallel,
1207
1209
  parallelism=parallelism,
1208
1210
  )
1209
- if ctx.use_parallel:
1211
+ if ctx.use_parallel and ctx.content_length >= self._config.files_ext_multipart_upload_min_stream_size:
1210
1212
  self._parallel_upload_from_file(ctx)
1211
1213
  return UploadFileResult()
1212
1214
  else:
@@ -1459,8 +1461,9 @@ class FilesExt(files.FilesAPI):
1459
1461
  # Do the first part read ahead
1460
1462
  pre_read_buffer = content.read(ctx.part_size)
1461
1463
  if not pre_read_buffer:
1462
- self._complete_multipart_upload(ctx, {}, session_token)
1463
- return
1464
+ raise FallbackToUploadUsingFilesApi(
1465
+ b"", "Falling back to single-shot upload with Files API due to empty input stream"
1466
+ )
1464
1467
  try:
1465
1468
  etag = self._do_upload_one_part(
1466
1469
  ctx, cloud_provider_session, 1, 0, len(pre_read_buffer), session_token, BytesIO(pre_read_buffer)
@@ -1650,6 +1653,13 @@ class FilesExt(files.FilesAPI):
1650
1653
  raise FallbackToUploadUsingFilesApi(None, "Presigned URLs are disabled")
1651
1654
  else:
1652
1655
  raise e from None
1656
+ except InternalError as e:
1657
+ if self._is_presigned_urls_network_zone_error(e):
1658
+ raise FallbackToUploadUsingFilesApi(
1659
+ None, "Presigned URLs are not supported in the current network zone"
1660
+ )
1661
+ else:
1662
+ raise e from None
1653
1663
 
1654
1664
  upload_part_urls = upload_part_urls_response.get("upload_part_urls", [])
1655
1665
  if len(upload_part_urls) == 0:
@@ -1760,6 +1770,13 @@ class FilesExt(files.FilesAPI):
1760
1770
  raise FallbackToUploadUsingFilesApi(buffer, "Presigned URLs are disabled")
1761
1771
  else:
1762
1772
  raise e from None
1773
+ except InternalError as e:
1774
+ if chunk_offset == 0 and self._is_presigned_urls_network_zone_error(e):
1775
+ raise FallbackToUploadUsingFilesApi(
1776
+ buffer, "Presigned URLs are not supported in the current network zone"
1777
+ )
1778
+ else:
1779
+ raise e from None
1763
1780
 
1764
1781
  upload_part_urls = upload_part_urls_response.get("upload_part_urls", [])
1765
1782
  if len(upload_part_urls) == 0:
@@ -1917,6 +1934,13 @@ class FilesExt(files.FilesAPI):
1917
1934
  return True
1918
1935
  return False
1919
1936
 
1937
+ def _is_presigned_urls_network_zone_error(self, e: InternalError) -> bool:
1938
+ error_infos = e.get_error_info()
1939
+ for error_info in error_infos:
1940
+ if error_info.reason == "FILES_API_REQUESTER_NETWORK_ZONE_UNKNOWN":
1941
+ return True
1942
+ return False
1943
+
1920
1944
  def _perform_resumable_upload(
1921
1945
  self,
1922
1946
  ctx: _UploadContext,
@@ -1966,6 +1990,13 @@ class FilesExt(files.FilesAPI):
1966
1990
  raise FallbackToUploadUsingFilesApi(pre_read_buffer, "Presigned URLs are disabled")
1967
1991
  else:
1968
1992
  raise e from None
1993
+ except InternalError as e:
1994
+ if self._is_presigned_urls_network_zone_error(e):
1995
+ raise FallbackToUploadUsingFilesApi(
1996
+ pre_read_buffer, "Presigned URLs are not supported in the current network zone"
1997
+ )
1998
+ else:
1999
+ raise e from None
1969
2000
 
1970
2001
  resumable_upload_url_node = resumable_upload_url_response.get("resumable_upload_url")
1971
2002
  if not resumable_upload_url_node:
@@ -2350,6 +2381,11 @@ class FilesExt(files.FilesAPI):
2350
2381
  raise FallbackToDownloadUsingFilesApi(f"Presigned URLs are disabled")
2351
2382
  else:
2352
2383
  raise e from None
2384
+ except InternalError as e:
2385
+ if self._is_presigned_urls_network_zone_error(e):
2386
+ raise FallbackToDownloadUsingFilesApi("Presigned URLs are not supported in the current network zone")
2387
+ else:
2388
+ raise e from None
2353
2389
 
2354
2390
  def _init_download_response_presigned_api(self, file_path: str, added_headers: dict[str, str]) -> DownloadResponse:
2355
2391
  """
@@ -11210,6 +11210,8 @@ class CatalogsAPI:
11210
11210
  "Accept": "application/json",
11211
11211
  }
11212
11212
 
11213
+ if "max_results" not in query:
11214
+ query["max_results"] = 0
11213
11215
  while True:
11214
11216
  json = self._api.do("GET", "/api/2.1/unity-catalog/catalogs", query=query, headers=headers)
11215
11217
  if "catalogs" in json:
@@ -11404,6 +11406,8 @@ class ConnectionsAPI:
11404
11406
  "Accept": "application/json",
11405
11407
  }
11406
11408
 
11409
+ if "max_results" not in query:
11410
+ query["max_results"] = 0
11407
11411
  while True:
11408
11412
  json = self._api.do("GET", "/api/2.1/unity-catalog/connections", query=query, headers=headers)
11409
11413
  if "connections" in json:
@@ -12316,6 +12320,8 @@ class ExternalLocationsAPI:
12316
12320
  "Accept": "application/json",
12317
12321
  }
12318
12322
 
12323
+ if "max_results" not in query:
12324
+ query["max_results"] = 0
12319
12325
  while True:
12320
12326
  json = self._api.do("GET", "/api/2.1/unity-catalog/external-locations", query=query, headers=headers)
12321
12327
  if "external_locations" in json:
@@ -12694,6 +12700,8 @@ class FunctionsAPI:
12694
12700
  "Accept": "application/json",
12695
12701
  }
12696
12702
 
12703
+ if "max_results" not in query:
12704
+ query["max_results"] = 0
12697
12705
  while True:
12698
12706
  json = self._api.do("GET", "/api/2.1/unity-catalog/functions", query=query, headers=headers)
12699
12707
  if "functions" in json:
@@ -13053,6 +13061,8 @@ class MetastoresAPI:
13053
13061
  "Accept": "application/json",
13054
13062
  }
13055
13063
 
13064
+ if "max_results" not in query:
13065
+ query["max_results"] = 0
13056
13066
  while True:
13057
13067
  json = self._api.do("GET", "/api/2.1/unity-catalog/metastores", query=query, headers=headers)
13058
13068
  if "metastores" in json:
@@ -14894,6 +14904,8 @@ class SchemasAPI:
14894
14904
  "Accept": "application/json",
14895
14905
  }
14896
14906
 
14907
+ if "max_results" not in query:
14908
+ query["max_results"] = 0
14897
14909
  while True:
14898
14910
  json = self._api.do("GET", "/api/2.1/unity-catalog/schemas", query=query, headers=headers)
14899
14911
  if "schemas" in json:
@@ -15122,6 +15134,8 @@ class StorageCredentialsAPI:
15122
15134
  "Accept": "application/json",
15123
15135
  }
15124
15136
 
15137
+ if "max_results" not in query:
15138
+ query["max_results"] = 0
15125
15139
  while True:
15126
15140
  json = self._api.do("GET", "/api/2.1/unity-catalog/storage-credentials", query=query, headers=headers)
15127
15141
  if "storage_credentials" in json:
@@ -15381,6 +15395,8 @@ class SystemSchemasAPI:
15381
15395
  "Accept": "application/json",
15382
15396
  }
15383
15397
 
15398
+ if "max_results" not in query:
15399
+ query["max_results"] = 0
15384
15400
  while True:
15385
15401
  json = self._api.do(
15386
15402
  "GET", f"/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas", query=query, headers=headers
@@ -15714,6 +15730,8 @@ class TablesAPI:
15714
15730
  "Accept": "application/json",
15715
15731
  }
15716
15732
 
15733
+ if "max_results" not in query:
15734
+ query["max_results"] = 0
15717
15735
  while True:
15718
15736
  json = self._api.do("GET", "/api/2.1/unity-catalog/tables", query=query, headers=headers)
15719
15737
  if "tables" in json:
@@ -16247,6 +16265,8 @@ class WorkspaceBindingsAPI:
16247
16265
  "Accept": "application/json",
16248
16266
  }
16249
16267
 
16268
+ if "max_results" not in query:
16269
+ query["max_results"] = 0
16250
16270
  while True:
16251
16271
  json = self._api.do(
16252
16272
  "GET",
@@ -3363,6 +3363,7 @@ class EventDetailsCause(Enum):
3363
3363
 
3364
3364
  AUTORECOVERY = "AUTORECOVERY"
3365
3365
  AUTOSCALE = "AUTOSCALE"
3366
+ AUTOSCALE_V2 = "AUTOSCALE_V2"
3366
3367
  REPLACE_BAD_NODES = "REPLACE_BAD_NODES"
3367
3368
  USER_REQUEST = "USER_REQUEST"
3368
3369
 
@@ -1189,6 +1189,7 @@ class MessageErrorType(Enum):
1189
1189
  TOO_MANY_TABLES_EXCEPTION = "TOO_MANY_TABLES_EXCEPTION"
1190
1190
  UNEXPECTED_REPLY_PROCESS_EXCEPTION = "UNEXPECTED_REPLY_PROCESS_EXCEPTION"
1191
1191
  UNKNOWN_AI_MODEL = "UNKNOWN_AI_MODEL"
1192
+ UNSUPPORTED_CONVERSATION_TYPE_EXCEPTION = "UNSUPPORTED_CONVERSATION_TYPE_EXCEPTION"
1192
1193
  WAREHOUSE_ACCESS_MISSING_EXCEPTION = "WAREHOUSE_ACCESS_MISSING_EXCEPTION"
1193
1194
  WAREHOUSE_NOT_FOUND_EXCEPTION = "WAREHOUSE_NOT_FOUND_EXCEPTION"
1194
1195
 
@@ -6036,8 +6036,11 @@ class Source(Enum):
6036
6036
  @dataclass
6037
6037
  class SparkJarTask:
6038
6038
  jar_uri: Optional[str] = None
6039
- """Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an example,
6040
- see :method:jobs/create."""
6039
+ """Deprecated since 04/2016. For classic compute, provide a `jar` through the `libraries` field
6040
+ instead. For serverless compute, provide a `jar` though the `java_dependencies` field inside the
6041
+ `environments` list.
6042
+
6043
+ See the examples of classic and serverless compute usage at the top of the page."""
6041
6044
 
6042
6045
  main_class_name: Optional[str] = None
6043
6046
  """The full name of the class containing the main method to be executed. This class must be
@@ -7240,6 +7243,10 @@ class TableTriggerState:
7240
7243
 
7241
7244
  @dataclass
7242
7245
  class TableUpdateTriggerConfiguration:
7246
+ table_names: List[str]
7247
+ """A list of tables to monitor for changes. The table name must be in the format
7248
+ `catalog_name.schema_name.table_name`."""
7249
+
7243
7250
  condition: Optional[Condition] = None
7244
7251
  """The table(s) condition based on which to trigger a job run."""
7245
7252
 
@@ -7247,10 +7254,6 @@ class TableUpdateTriggerConfiguration:
7247
7254
  """If set, the trigger starts a run only after the specified amount of time has passed since the
7248
7255
  last time the trigger fired. The minimum allowed value is 60 seconds."""
7249
7256
 
7250
- table_names: Optional[List[str]] = None
7251
- """A list of tables to monitor for changes. The table name must be in the format
7252
- `catalog_name.schema_name.table_name`."""
7253
-
7254
7257
  wait_after_last_change_seconds: Optional[int] = None
7255
7258
  """If set, the trigger starts a run only after no table updates have occurred for the specified
7256
7259
  time and can be used to wait for a series of table updates before triggering a run. The minimum
@@ -201,6 +201,31 @@ class ApproveTransitionRequestResponse:
201
201
  return cls(activity=_from_dict(d, "activity", Activity))
202
202
 
203
203
 
204
+ @dataclass
205
+ class BatchCreateMaterializedFeaturesResponse:
206
+ materialized_features: Optional[List[MaterializedFeature]] = None
207
+ """The created materialized features with assigned IDs."""
208
+
209
+ def as_dict(self) -> dict:
210
+ """Serializes the BatchCreateMaterializedFeaturesResponse into a dictionary suitable for use as a JSON request body."""
211
+ body = {}
212
+ if self.materialized_features:
213
+ body["materialized_features"] = [v.as_dict() for v in self.materialized_features]
214
+ return body
215
+
216
+ def as_shallow_dict(self) -> dict:
217
+ """Serializes the BatchCreateMaterializedFeaturesResponse into a shallow dictionary of its immediate attributes."""
218
+ body = {}
219
+ if self.materialized_features:
220
+ body["materialized_features"] = self.materialized_features
221
+ return body
222
+
223
+ @classmethod
224
+ def from_dict(cls, d: Dict[str, Any]) -> BatchCreateMaterializedFeaturesResponse:
225
+ """Deserializes the BatchCreateMaterializedFeaturesResponse from a dictionary."""
226
+ return cls(materialized_features=_repeated_dict(d, "materialized_features", MaterializedFeature))
227
+
228
+
204
229
  class CommentActivityAction(Enum):
205
230
  """An action that a user (with sufficient permissions) could take on an activity or comment.
206
231
 
@@ -423,6 +448,31 @@ class CreateLoggedModelResponse:
423
448
  return cls(model=_from_dict(d, "model", LoggedModel))
424
449
 
425
450
 
451
+ @dataclass
452
+ class CreateMaterializedFeatureRequest:
453
+ materialized_feature: MaterializedFeature
454
+ """The materialized feature to create."""
455
+
456
+ def as_dict(self) -> dict:
457
+ """Serializes the CreateMaterializedFeatureRequest into a dictionary suitable for use as a JSON request body."""
458
+ body = {}
459
+ if self.materialized_feature:
460
+ body["materialized_feature"] = self.materialized_feature.as_dict()
461
+ return body
462
+
463
+ def as_shallow_dict(self) -> dict:
464
+ """Serializes the CreateMaterializedFeatureRequest into a shallow dictionary of its immediate attributes."""
465
+ body = {}
466
+ if self.materialized_feature:
467
+ body["materialized_feature"] = self.materialized_feature
468
+ return body
469
+
470
+ @classmethod
471
+ def from_dict(cls, d: Dict[str, Any]) -> CreateMaterializedFeatureRequest:
472
+ """Deserializes the CreateMaterializedFeatureRequest from a dictionary."""
473
+ return cls(materialized_feature=_from_dict(d, "materialized_feature", MaterializedFeature))
474
+
475
+
426
476
  @dataclass
427
477
  class CreateModelResponse:
428
478
  registered_model: Optional[Model] = None
@@ -1334,6 +1384,9 @@ class Feature:
1334
1384
  filter_condition: Optional[str] = None
1335
1385
  """The filter condition applied to the source data before aggregation."""
1336
1386
 
1387
+ lineage_context: Optional[LineageContext] = None
1388
+ """Lineage context information for this feature."""
1389
+
1337
1390
  def as_dict(self) -> dict:
1338
1391
  """Serializes the Feature into a dictionary suitable for use as a JSON request body."""
1339
1392
  body = {}
@@ -1347,6 +1400,8 @@ class Feature:
1347
1400
  body["function"] = self.function.as_dict()
1348
1401
  if self.inputs:
1349
1402
  body["inputs"] = [v for v in self.inputs]
1403
+ if self.lineage_context:
1404
+ body["lineage_context"] = self.lineage_context.as_dict()
1350
1405
  if self.source:
1351
1406
  body["source"] = self.source.as_dict()
1352
1407
  if self.time_window:
@@ -1366,6 +1421,8 @@ class Feature:
1366
1421
  body["function"] = self.function
1367
1422
  if self.inputs:
1368
1423
  body["inputs"] = self.inputs
1424
+ if self.lineage_context:
1425
+ body["lineage_context"] = self.lineage_context
1369
1426
  if self.source:
1370
1427
  body["source"] = self.source
1371
1428
  if self.time_window:
@@ -1381,6 +1438,7 @@ class Feature:
1381
1438
  full_name=d.get("full_name", None),
1382
1439
  function=_from_dict(d, "function", Function),
1383
1440
  inputs=d.get("inputs", None),
1441
+ lineage_context=_from_dict(d, "lineage_context", LineageContext),
1384
1442
  source=_from_dict(d, "source", DataSource),
1385
1443
  time_window=_from_dict(d, "time_window", TimeWindow),
1386
1444
  )
@@ -2192,6 +2250,38 @@ class InputTag:
2192
2250
  return cls(key=d.get("key", None), value=d.get("value", None))
2193
2251
 
2194
2252
 
2253
+ @dataclass
2254
+ class JobContext:
2255
+ job_id: Optional[int] = None
2256
+ """The job ID where this API invoked."""
2257
+
2258
+ job_run_id: Optional[int] = None
2259
+ """The job run ID where this API was invoked."""
2260
+
2261
+ def as_dict(self) -> dict:
2262
+ """Serializes the JobContext into a dictionary suitable for use as a JSON request body."""
2263
+ body = {}
2264
+ if self.job_id is not None:
2265
+ body["job_id"] = self.job_id
2266
+ if self.job_run_id is not None:
2267
+ body["job_run_id"] = self.job_run_id
2268
+ return body
2269
+
2270
+ def as_shallow_dict(self) -> dict:
2271
+ """Serializes the JobContext into a shallow dictionary of its immediate attributes."""
2272
+ body = {}
2273
+ if self.job_id is not None:
2274
+ body["job_id"] = self.job_id
2275
+ if self.job_run_id is not None:
2276
+ body["job_run_id"] = self.job_run_id
2277
+ return body
2278
+
2279
+ @classmethod
2280
+ def from_dict(cls, d: Dict[str, Any]) -> JobContext:
2281
+ """Deserializes the JobContext from a dictionary."""
2282
+ return cls(job_id=d.get("job_id", None), job_run_id=d.get("job_run_id", None))
2283
+
2284
+
2195
2285
  @dataclass
2196
2286
  class JobSpec:
2197
2287
  job_id: str
@@ -2269,6 +2359,42 @@ class JobSpecWithoutSecret:
2269
2359
  return cls(job_id=d.get("job_id", None), workspace_url=d.get("workspace_url", None))
2270
2360
 
2271
2361
 
2362
+ @dataclass
2363
+ class LineageContext:
2364
+ """Lineage context information for tracking where an API was invoked. This will allow us to track
2365
+ lineage, which currently uses caller entity information for use across the Lineage Client and
2366
+ Observability in Lumberjack."""
2367
+
2368
+ job_context: Optional[JobContext] = None
2369
+ """Job context information including job ID and run ID."""
2370
+
2371
+ notebook_id: Optional[int] = None
2372
+ """The notebook ID where this API was invoked."""
2373
+
2374
+ def as_dict(self) -> dict:
2375
+ """Serializes the LineageContext into a dictionary suitable for use as a JSON request body."""
2376
+ body = {}
2377
+ if self.job_context:
2378
+ body["job_context"] = self.job_context.as_dict()
2379
+ if self.notebook_id is not None:
2380
+ body["notebook_id"] = self.notebook_id
2381
+ return body
2382
+
2383
+ def as_shallow_dict(self) -> dict:
2384
+ """Serializes the LineageContext into a shallow dictionary of its immediate attributes."""
2385
+ body = {}
2386
+ if self.job_context:
2387
+ body["job_context"] = self.job_context
2388
+ if self.notebook_id is not None:
2389
+ body["notebook_id"] = self.notebook_id
2390
+ return body
2391
+
2392
+ @classmethod
2393
+ def from_dict(cls, d: Dict[str, Any]) -> LineageContext:
2394
+ """Deserializes the LineageContext from a dictionary."""
2395
+ return cls(job_context=_from_dict(d, "job_context", JobContext), notebook_id=d.get("notebook_id", None))
2396
+
2397
+
2272
2398
  @dataclass
2273
2399
  class LinkedFeature:
2274
2400
  """Feature for model version. ([ML-57150] Renamed from Feature to LinkedFeature)"""
@@ -6844,6 +6970,30 @@ class FeatureEngineeringAPI:
6844
6970
  def __init__(self, api_client):
6845
6971
  self._api = api_client
6846
6972
 
6973
+ def batch_create_materialized_features(
6974
+ self, requests: List[CreateMaterializedFeatureRequest]
6975
+ ) -> BatchCreateMaterializedFeaturesResponse:
6976
+ """Batch create materialized features.
6977
+
6978
+ :param requests: List[:class:`CreateMaterializedFeatureRequest`]
6979
+ The requests to create materialized features.
6980
+
6981
+ :returns: :class:`BatchCreateMaterializedFeaturesResponse`
6982
+ """
6983
+
6984
+ body = {}
6985
+ if requests is not None:
6986
+ body["requests"] = [v.as_dict() for v in requests]
6987
+ headers = {
6988
+ "Accept": "application/json",
6989
+ "Content-Type": "application/json",
6990
+ }
6991
+
6992
+ res = self._api.do(
6993
+ "POST", "/api/2.0/feature-engineering/materialized-features:batchCreate", body=body, headers=headers
6994
+ )
6995
+ return BatchCreateMaterializedFeaturesResponse.from_dict(res)
6996
+
6847
6997
  def create_feature(self, feature: Feature) -> Feature:
6848
6998
  """Create a Feature.
6849
6999
 
@@ -556,8 +556,8 @@ class IngestionGatewayPipelineDefinition:
556
556
 
557
557
  gateway_storage_name: Optional[str] = None
558
558
  """Optional. The Unity Catalog-compatible name for the gateway storage location. This is the
559
- destination to use for the data that is extracted by the gateway. Delta Live Tables system will
560
- automatically create the storage location under the catalog and schema."""
559
+ destination to use for the data that is extracted by the gateway. Spark Declarative Pipelines
560
+ system will automatically create the storage location under the catalog and schema."""
561
561
 
562
562
  def as_dict(self) -> dict:
563
563
  """Serializes the IngestionGatewayPipelineDefinition into a dictionary suitable for use as a JSON request body."""
@@ -832,6 +832,9 @@ class IngestionSourceType(Enum):
832
832
  DYNAMICS365 = "DYNAMICS365"
833
833
  FOREIGN_CATALOG = "FOREIGN_CATALOG"
834
834
  GA4_RAW_DATA = "GA4_RAW_DATA"
835
+ GOOGLE_ADS = "GOOGLE_ADS"
836
+ GUIDEWIRE = "GUIDEWIRE"
837
+ HUBSPOT = "HUBSPOT"
835
838
  MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL"
836
839
  META_MARKETING = "META_MARKETING"
837
840
  MYSQL = "MYSQL"
@@ -840,12 +843,16 @@ class IngestionSourceType(Enum):
840
843
  POSTGRESQL = "POSTGRESQL"
841
844
  REDSHIFT = "REDSHIFT"
842
845
  SALESFORCE = "SALESFORCE"
846
+ SALESFORCE_MARKETING_CLOUD = "SALESFORCE_MARKETING_CLOUD"
843
847
  SERVICENOW = "SERVICENOW"
844
848
  SHAREPOINT = "SHAREPOINT"
845
849
  SQLDW = "SQLDW"
846
850
  SQLSERVER = "SQLSERVER"
847
851
  TERADATA = "TERADATA"
852
+ TIKTOK_ADS = "TIKTOK_ADS"
853
+ WORKDAY_HCM = "WORKDAY_HCM"
848
854
  WORKDAY_RAAS = "WORKDAY_RAAS"
855
+ ZENDESK = "ZENDESK"
849
856
 
850
857
 
851
858
  @dataclass
@@ -2972,8 +2979,8 @@ class TableSpecificConfig:
2972
2979
  """The SCD type to use to ingest the table."""
2973
2980
 
2974
2981
  sequence_by: Optional[List[str]] = None
2975
- """The column names specifying the logical order of events in the source data. Delta Live Tables
2976
- uses this sequencing to handle change events that arrive out of order."""
2982
+ """The column names specifying the logical order of events in the source data. Spark Declarative
2983
+ Pipelines uses this sequencing to handle change events that arrive out of order."""
2977
2984
 
2978
2985
  workday_report_parameters: Optional[IngestionPipelineDefinitionWorkdayReportParameters] = None
2979
2986
  """(Optional) Additional custom parameters for Workday Report"""
@@ -3247,16 +3254,17 @@ class UpdateStateInfoState(Enum):
3247
3254
 
3248
3255
 
3249
3256
  class PipelinesAPI:
3250
- """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.
3257
+ """The Lakeflow Spark Declarative Pipelines API allows you to create, edit, delete, start, and view details
3258
+ about pipelines.
3251
3259
 
3252
- Delta Live Tables is a framework for building reliable, maintainable, and testable data processing
3253
- pipelines. You define the transformations to perform on your data, and Delta Live Tables manages task
3254
- orchestration, cluster management, monitoring, data quality, and error handling.
3260
+ Spark Declarative Pipelines is a framework for building reliable, maintainable, and testable data
3261
+ processing pipelines. You define the transformations to perform on your data, and Spark Declarative
3262
+ Pipelines manages task orchestration, cluster management, monitoring, data quality, and error handling.
3255
3263
 
3256
- Instead of defining your data pipelines using a series of separate Apache Spark tasks, Delta Live Tables
3257
- manages how your data is transformed based on a target schema you define for each processing step. You can
3258
- also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected
3259
- data quality and specify how to handle records that fail those expectations."""
3264
+ Instead of defining your data pipelines using a series of separate Apache Spark tasks, Spark Declarative
3265
+ Pipelines manages how your data is transformed based on a target schema you define for each processing
3266
+ step. You can also enforce data quality with Spark Declarative Pipelines expectations. Expectations allow
3267
+ you to define expected data quality and specify how to handle records that fail those expectations."""
3260
3268
 
3261
3269
  def __init__(self, api_client):
3262
3270
  self._api = api_client
@@ -3619,7 +3627,7 @@ class PipelinesAPI:
3619
3627
  order_by: Optional[List[str]] = None,
3620
3628
  page_token: Optional[str] = None,
3621
3629
  ) -> Iterator[PipelineStateInfo]:
3622
- """Lists pipelines defined in the Delta Live Tables system.
3630
+ """Lists pipelines defined in the Spark Declarative Pipelines system.
3623
3631
 
3624
3632
  :param filter: str (optional)
3625
3633
  Select a subset of results based on the specified criteria. The supported filters are:
@@ -598,6 +598,8 @@ class EndpointStatusState(Enum):
598
598
  OFFLINE = "OFFLINE"
599
599
  ONLINE = "ONLINE"
600
600
  PROVISIONING = "PROVISIONING"
601
+ RED_STATE = "RED_STATE"
602
+ YELLOW_STATE = "YELLOW_STATE"
601
603
 
602
604
 
603
605
  class EndpointType(Enum):
@@ -1756,7 +1758,7 @@ class VectorSearchIndexesAPI:
1756
1758
  :param query_text: str (optional)
1757
1759
  Query text. Required for Delta Sync Index using model endpoint.
1758
1760
  :param query_type: str (optional)
1759
- The query type to use. Choices are `ANN` and `HYBRID`. Defaults to `ANN`.
1761
+ The query type to use. Choices are `ANN` and `HYBRID` and `FULL_TEXT`. Defaults to `ANN`.
1760
1762
  :param query_vector: List[float] (optional)
1761
1763
  Query vector. Required for Direct Vector Access Index and Delta Sync Index using self-managed
1762
1764
  vectors.
databricks/sdk/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.71.0"
1
+ __version__ = "0.72.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-sdk
3
- Version: 0.71.0
3
+ Version: 0.72.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Project-URL: Documentation, https://databricks-sdk-py.readthedocs.io
6
6
  Keywords: databricks,sdk
@@ -1,5 +1,5 @@
1
1
  databricks/__init__.py,sha256=CF2MJcZFwbpn9TwQER8qnCDhkPooBGQNVkX4v7g6p3g,537
2
- databricks/sdk/__init__.py,sha256=kbsENXhiWRFFtWP20CvtkKgwgeSDL1ZT965l8FCgfqE,67676
2
+ databricks/sdk/__init__.py,sha256=7JlUVvMtvG1WUYyJFHvuf9rXVmjjQAxyuT3bie_QpGA,67695
3
3
  databricks/sdk/_base_client.py,sha256=EjFRrACf_uj1KFVDBx3pIkn2HFnFpXjW2tvCj70nQPY,15914
4
4
  databricks/sdk/_property.py,sha256=ccbxhkXZmZOxbx2sqKMTzhVZDuvWXG0WPHFRgac6JAM,1701
5
5
  databricks/sdk/azure.py,sha256=sN_ARpmP9h1JovtiHIsDLtrVQP_K11eNDDtHS6PD19k,1015
@@ -17,7 +17,7 @@ databricks/sdk/oidc_token_supplier.py,sha256=2cpcmcfVNF6KXD9mBXFWV9IpH2gNTF7ITUB
17
17
  databricks/sdk/py.typed,sha256=pSvaHpbY1UPNEXyVFUjlgBhjPFZMmVC_UNrPC7eMOHI,74
18
18
  databricks/sdk/retries.py,sha256=dZW6kz-6NCi-lI5N3bcTKpZjxhi4-WCrWgbFhEIEt1k,5715
19
19
  databricks/sdk/useragent.py,sha256=boEgzTv-Zmo6boipZKjSopNy0CXg4GShC1_lTKpJgqs,7361
20
- databricks/sdk/version.py,sha256=mNejnumd8tVE1XGXnMJ_BwjG-FRuO9U5tz0dQVo3UpU,23
20
+ databricks/sdk/version.py,sha256=oLwc2jwvoU9UISGgw7ct8h1SMKWzWPg21DSMF9jrXj0,23
21
21
  databricks/sdk/_widgets/__init__.py,sha256=VhI-VvLlr3rKUT1nbROslHJIbmZX_tPJ9rRhrdFsYUA,2811
22
22
  databricks/sdk/_widgets/default_widgets_utils.py,sha256=_hwCbptLbRzWEmknco0H1wQNAYcuy2pjFO9NiRbvFeo,1127
23
23
  databricks/sdk/_widgets/ipywidgets_utils.py,sha256=mg3rEPG9z76e0yVjGgcLybUvd_zSuN5ziGeKiZ-c8Ew,2927
@@ -39,7 +39,7 @@ databricks/sdk/logger/__init__.py,sha256=0_sSQfDkaFGqMHZUVw-g_Ax-RFmOv0Z6NjxCVAe
39
39
  databricks/sdk/logger/round_trip_logger.py,sha256=H2YhxUPZpWSwAwCdfa03D5vRUFxsV73bbM8eF_l9QrQ,4873
40
40
  databricks/sdk/mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
41
  databricks/sdk/mixins/compute.py,sha256=76Fhc7cDQfOf2IHkPtHZpAnxNfrSLMKl9dbQ6KswXaM,11066
42
- databricks/sdk/mixins/files.py,sha256=xZHY6XwLEgTnpaIfi34f6QhMqnlc5GTur-NEPxT_rcc,105425
42
+ databricks/sdk/mixins/files.py,sha256=C1QyRxQmaWrgKs79YTLgQ8OCK2uOycnrdVR6NJMDuF4,107307
43
43
  databricks/sdk/mixins/files_utils.py,sha256=mF1FMuPj3-m5qdOrGTolVM8rCFhLyGYw_LBktszP_Ls,11198
44
44
  databricks/sdk/mixins/jobs.py,sha256=4ywi0dZ8mEN8KZWLmZBFfdbejTP6JATvf9wCCRkdJBw,11558
45
45
  databricks/sdk/mixins/open_ai_client.py,sha256=Tur77AvlUJd-gDSfLb1mtMJhWuTwp1ufon9-7HGGOnQ,7969
@@ -52,20 +52,20 @@ databricks/sdk/service/_internal.py,sha256=DQ-S6QxJUzzQO-fEmgs3z24_cKsFlT0CPDVTP
52
52
  databricks/sdk/service/agentbricks.py,sha256=ZFFNgjShAwfkZXeHx4mQkAqyRz1p9pnkeLZ5yyfukFY,11730
53
53
  databricks/sdk/service/apps.py,sha256=A2eXbDALuZj5xfvTH6VRdNJCPGzTLpGl2lbnOg__M5U,87599
54
54
  databricks/sdk/service/billing.py,sha256=LdDLZxY6Fs9wZ8CqNv6Q7hGRDiKgaxAgYcJApy33s3s,91031
55
- databricks/sdk/service/catalog.py,sha256=nJvbwNRhdDOcks0TX25MNVTbYlz0RT2SOrDCyC_cYRk,684835
55
+ databricks/sdk/service/catalog.py,sha256=6KBlcoDhCayyazwaa46sRVESPVuWs8hUr3ZgegJyA_8,685595
56
56
  databricks/sdk/service/cleanrooms.py,sha256=bCcvIX5akA_DrQOQD1HQU5hKri4h34rt7PY1qNVL4uY,81237
57
- databricks/sdk/service/compute.py,sha256=yqITP39mciwxY3Sh5lA9ZcBsLRYcvRhYDWp37ExoXKo,470697
58
- databricks/sdk/service/dashboards.py,sha256=Nlic3DOtSAGb_WnQBGz2rBxlrB5NurXeRVm8b63MN_I,102961
57
+ databricks/sdk/service/compute.py,sha256=VFKInOascsYKbwhELXq4KbvVB9YZNoHeDy3UqnxqIa8,470731
58
+ databricks/sdk/service/dashboards.py,sha256=0RJfdIsvRYe_gyp6zW38bG3_nhznP4VIBEJ9Q7B2SF8,103049
59
59
  databricks/sdk/service/database.py,sha256=t5HO0CelGai1KFK1KQKxGeVJ2UAgt2eYDa0ViLkQxTI,89620
60
60
  databricks/sdk/service/dataquality.py,sha256=NzqJ0tfBsQJ8r5E9RJHvGEOOnI9hhOg9gOAaD9qvfnI,57461
61
61
  databricks/sdk/service/files.py,sha256=_o3sZUcU0UQf7-jvH1DXuXVXIrdiBLNRBSwFTmazbAI,36083
62
62
  databricks/sdk/service/iam.py,sha256=U6sVhLY7QDJLC7zYOhGjsJTD_X_9WK0edPi915vMAAs,246859
63
63
  databricks/sdk/service/iamv2.py,sha256=Z_fI31Sdj7yxB_mu0XPKUdMgwInF4mjuoilGPfEifOU,24110
64
- databricks/sdk/service/jobs.py,sha256=fFQj-Wzn2MK9bIk_UdYEOpZn4Ed_K40sy3zpvoDF3nY,428615
64
+ databricks/sdk/service/jobs.py,sha256=7XJXgCsqOOBI706_AThRr0gMvN9wMt7bGcV4rJ9lh6c,428781
65
65
  databricks/sdk/service/marketplace.py,sha256=zycSn4ZqIh_RT2rwGka1hayr6Y0JsBi3G-lA3oiMp3k,153257
66
- databricks/sdk/service/ml.py,sha256=5kk0i9s4sAE16Zwm-eEQHgJ6yODBJroLFstXDfmUq-8,332637
66
+ databricks/sdk/service/ml.py,sha256=YZEWuWYNlHS1ccfgOfd6Tfvatq-cSJrLpzN8b7CRj_g,338593
67
67
  databricks/sdk/service/oauth2.py,sha256=gE_ktB_jSgaKOXPOh-LMdvzK5Zft0Ef44GzrtkVj5Z0,76865
68
- databricks/sdk/service/pipelines.py,sha256=fZVE80Ozl9HkfeJ-Fok8MZvZrUA2jsuyfD8OssQ1bYU,165624
68
+ databricks/sdk/service/pipelines.py,sha256=ij99X_werWa00l83QwfmDXDh_QNDNUO29TKUmukcEIw,165947
69
69
  databricks/sdk/service/provisioning.py,sha256=b-zpOVIFE977eTB60RUztVLfXRp2SvWRqqk7a09Jr7Y,121780
70
70
  databricks/sdk/service/qualitymonitorv2.py,sha256=NTwjscs_gvAiFbeSoStqW1pB3yhse4BLIGmhbk1rGec,9226
71
71
  databricks/sdk/service/serving.py,sha256=Wc7wiVkVYexE66cwH0XCIrtTeJpHv1T-1T9a6vH9QMY,215953
@@ -74,11 +74,11 @@ databricks/sdk/service/settingsv2.py,sha256=LPFu85GlUrmA8BDk9pR9NMFvrNflYPlB4uEj
74
74
  databricks/sdk/service/sharing.py,sha256=cyp4AZam_VLz3gE5IeIGf-QuoskRW4Q4Pm4JK1aWzgA,141618
75
75
  databricks/sdk/service/sql.py,sha256=Uy9eeM_uvca-R6aSaLEQgSMb6iXnA01c6yjfL9tEaKg,402525
76
76
  databricks/sdk/service/tags.py,sha256=_FJoXAINfRqe8E_BWbDOPCHcSLpAaANVv9ZtBGEgimM,8890
77
- databricks/sdk/service/vectorsearch.py,sha256=fcgJPmlq1wTS6WKwr30RGn6Mk-yY8k-aaCM8wa7th14,72671
77
+ databricks/sdk/service/vectorsearch.py,sha256=8abaxOnVKM0eoGgG97suridKU1euQYQecQXa_2UD1fs,72749
78
78
  databricks/sdk/service/workspace.py,sha256=0FJy_VZJZSjIgH9MvSwOKzmaPSAOUJoHF2NAQiR6wgA,112325
79
- databricks_sdk-0.71.0.dist-info/licenses/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
80
- databricks_sdk-0.71.0.dist-info/licenses/NOTICE,sha256=tkRcQYA1k68wDLcnOWbg2xJDsUOJw8G8DGBhb8dnI3w,1588
81
- databricks_sdk-0.71.0.dist-info/METADATA,sha256=Jsf_TDu-mHa3na3pGWkdJOzgbt4LV9oRWothR3_RZSo,39938
82
- databricks_sdk-0.71.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
83
- databricks_sdk-0.71.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
84
- databricks_sdk-0.71.0.dist-info/RECORD,,
79
+ databricks_sdk-0.72.0.dist-info/licenses/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
80
+ databricks_sdk-0.72.0.dist-info/licenses/NOTICE,sha256=tkRcQYA1k68wDLcnOWbg2xJDsUOJw8G8DGBhb8dnI3w,1588
81
+ databricks_sdk-0.72.0.dist-info/METADATA,sha256=ieXJFQ0eM7rO8RZFFyvqlYNA5I2El_QIuwRXpMLSCzw,39938
82
+ databricks_sdk-0.72.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
83
+ databricks_sdk-0.72.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
84
+ databricks_sdk-0.72.0.dist-info/RECORD,,