databricks-sdk 0.54.0__py3-none-any.whl → 0.56.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +304 -278
- databricks/sdk/config.py +15 -4
- databricks/sdk/credentials_provider.py +101 -55
- databricks/sdk/oauth.py +0 -5
- databricks/sdk/oidc.py +206 -0
- databricks/sdk/service/aibuilder.py +364 -0
- databricks/sdk/service/billing.py +150 -169
- databricks/sdk/service/catalog.py +263 -835
- databricks/sdk/service/cleanrooms.py +15 -10
- databricks/sdk/service/compute.py +12 -22
- databricks/sdk/service/dashboards.py +59 -451
- databricks/sdk/service/database.py +1256 -0
- databricks/sdk/service/files.py +2 -0
- databricks/sdk/service/iam.py +6 -6
- databricks/sdk/service/jobs.py +238 -0
- databricks/sdk/service/ml.py +8 -271
- databricks/sdk/service/pipelines.py +45 -1
- databricks/sdk/service/provisioning.py +0 -3
- databricks/sdk/service/qualitymonitorv2.py +275 -0
- databricks/sdk/service/serving.py +76 -4
- databricks/sdk/service/settings.py +982 -99
- databricks/sdk/service/sharing.py +3 -2
- databricks/sdk/service/sql.py +218 -1
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.54.0.dist-info → databricks_sdk-0.56.0.dist-info}/METADATA +1 -1
- {databricks_sdk-0.54.0.dist-info → databricks_sdk-0.56.0.dist-info}/RECORD +30 -26
- {databricks_sdk-0.54.0.dist-info → databricks_sdk-0.56.0.dist-info}/WHEEL +1 -1
- {databricks_sdk-0.54.0.dist-info → databricks_sdk-0.56.0.dist-info}/licenses/LICENSE +0 -0
- {databricks_sdk-0.54.0.dist-info → databricks_sdk-0.56.0.dist-info}/licenses/NOTICE +0 -0
- {databricks_sdk-0.54.0.dist-info → databricks_sdk-0.56.0.dist-info}/top_level.txt +0 -0
databricks/sdk/service/ml.py
CHANGED
|
@@ -271,109 +271,6 @@ class ApproveTransitionRequestResponse:
|
|
|
271
271
|
return cls(activity=_from_dict(d, "activity", Activity))
|
|
272
272
|
|
|
273
273
|
|
|
274
|
-
@dataclass
|
|
275
|
-
class ArtifactCredentialInfo:
|
|
276
|
-
headers: Optional[List[ArtifactCredentialInfoHttpHeader]] = None
|
|
277
|
-
"""A collection of HTTP headers that should be specified when uploading to or downloading from the
|
|
278
|
-
specified `signed_uri`."""
|
|
279
|
-
|
|
280
|
-
path: Optional[str] = None
|
|
281
|
-
"""The path, relative to the Run's artifact root location, of the artifact that can be accessed
|
|
282
|
-
with the credential."""
|
|
283
|
-
|
|
284
|
-
run_id: Optional[str] = None
|
|
285
|
-
"""The ID of the MLflow Run containing the artifact that can be accessed with the credential."""
|
|
286
|
-
|
|
287
|
-
signed_uri: Optional[str] = None
|
|
288
|
-
"""The signed URI credential that provides access to the artifact."""
|
|
289
|
-
|
|
290
|
-
type: Optional[ArtifactCredentialType] = None
|
|
291
|
-
"""The type of the signed credential URI (e.g., an AWS presigned URL or an Azure Shared Access
|
|
292
|
-
Signature URI)."""
|
|
293
|
-
|
|
294
|
-
def as_dict(self) -> dict:
|
|
295
|
-
"""Serializes the ArtifactCredentialInfo into a dictionary suitable for use as a JSON request body."""
|
|
296
|
-
body = {}
|
|
297
|
-
if self.headers:
|
|
298
|
-
body["headers"] = [v.as_dict() for v in self.headers]
|
|
299
|
-
if self.path is not None:
|
|
300
|
-
body["path"] = self.path
|
|
301
|
-
if self.run_id is not None:
|
|
302
|
-
body["run_id"] = self.run_id
|
|
303
|
-
if self.signed_uri is not None:
|
|
304
|
-
body["signed_uri"] = self.signed_uri
|
|
305
|
-
if self.type is not None:
|
|
306
|
-
body["type"] = self.type.value
|
|
307
|
-
return body
|
|
308
|
-
|
|
309
|
-
def as_shallow_dict(self) -> dict:
|
|
310
|
-
"""Serializes the ArtifactCredentialInfo into a shallow dictionary of its immediate attributes."""
|
|
311
|
-
body = {}
|
|
312
|
-
if self.headers:
|
|
313
|
-
body["headers"] = self.headers
|
|
314
|
-
if self.path is not None:
|
|
315
|
-
body["path"] = self.path
|
|
316
|
-
if self.run_id is not None:
|
|
317
|
-
body["run_id"] = self.run_id
|
|
318
|
-
if self.signed_uri is not None:
|
|
319
|
-
body["signed_uri"] = self.signed_uri
|
|
320
|
-
if self.type is not None:
|
|
321
|
-
body["type"] = self.type
|
|
322
|
-
return body
|
|
323
|
-
|
|
324
|
-
@classmethod
|
|
325
|
-
def from_dict(cls, d: Dict[str, Any]) -> ArtifactCredentialInfo:
|
|
326
|
-
"""Deserializes the ArtifactCredentialInfo from a dictionary."""
|
|
327
|
-
return cls(
|
|
328
|
-
headers=_repeated_dict(d, "headers", ArtifactCredentialInfoHttpHeader),
|
|
329
|
-
path=d.get("path", None),
|
|
330
|
-
run_id=d.get("run_id", None),
|
|
331
|
-
signed_uri=d.get("signed_uri", None),
|
|
332
|
-
type=_enum(d, "type", ArtifactCredentialType),
|
|
333
|
-
)
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
@dataclass
|
|
337
|
-
class ArtifactCredentialInfoHttpHeader:
|
|
338
|
-
name: Optional[str] = None
|
|
339
|
-
"""The HTTP header name."""
|
|
340
|
-
|
|
341
|
-
value: Optional[str] = None
|
|
342
|
-
"""The HTTP header value."""
|
|
343
|
-
|
|
344
|
-
def as_dict(self) -> dict:
|
|
345
|
-
"""Serializes the ArtifactCredentialInfoHttpHeader into a dictionary suitable for use as a JSON request body."""
|
|
346
|
-
body = {}
|
|
347
|
-
if self.name is not None:
|
|
348
|
-
body["name"] = self.name
|
|
349
|
-
if self.value is not None:
|
|
350
|
-
body["value"] = self.value
|
|
351
|
-
return body
|
|
352
|
-
|
|
353
|
-
def as_shallow_dict(self) -> dict:
|
|
354
|
-
"""Serializes the ArtifactCredentialInfoHttpHeader into a shallow dictionary of its immediate attributes."""
|
|
355
|
-
body = {}
|
|
356
|
-
if self.name is not None:
|
|
357
|
-
body["name"] = self.name
|
|
358
|
-
if self.value is not None:
|
|
359
|
-
body["value"] = self.value
|
|
360
|
-
return body
|
|
361
|
-
|
|
362
|
-
@classmethod
|
|
363
|
-
def from_dict(cls, d: Dict[str, Any]) -> ArtifactCredentialInfoHttpHeader:
|
|
364
|
-
"""Deserializes the ArtifactCredentialInfoHttpHeader from a dictionary."""
|
|
365
|
-
return cls(name=d.get("name", None), value=d.get("value", None))
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
class ArtifactCredentialType(Enum):
|
|
369
|
-
"""The type of a given artifact access credential"""
|
|
370
|
-
|
|
371
|
-
AWS_PRESIGNED_URL = "AWS_PRESIGNED_URL"
|
|
372
|
-
AZURE_ADLS_GEN2_SAS_URI = "AZURE_ADLS_GEN2_SAS_URI"
|
|
373
|
-
AZURE_SAS_URI = "AZURE_SAS_URI"
|
|
374
|
-
GCP_SIGNED_URL = "GCP_SIGNED_URL"
|
|
375
|
-
|
|
376
|
-
|
|
377
274
|
class CommentActivityAction(Enum):
|
|
378
275
|
"""An action that a user (with sufficient permissions) could take on a comment. Valid values are: *
|
|
379
276
|
`EDIT_COMMENT`: Edit the comment
|
|
@@ -1076,7 +973,8 @@ class CreateRegistryWebhook:
|
|
|
1076
973
|
job_spec: Optional[JobSpec] = None
|
|
1077
974
|
|
|
1078
975
|
model_name: Optional[str] = None
|
|
1079
|
-
"""
|
|
976
|
+
"""If model name is not specified, a registry-wide webhook is created that listens for the
|
|
977
|
+
specified events across all versions of all registered models."""
|
|
1080
978
|
|
|
1081
979
|
status: Optional[RegistryWebhookStatus] = None
|
|
1082
980
|
"""Enable or disable triggering the webhook, or put the webhook into test mode. The default is
|
|
@@ -2235,7 +2133,7 @@ class FileInfo:
|
|
|
2235
2133
|
class FinalizeLoggedModelRequest:
|
|
2236
2134
|
status: LoggedModelStatus
|
|
2237
2135
|
"""Whether or not the model is ready for use. ``"LOGGED_MODEL_UPLOAD_FAILED"`` indicates that
|
|
2238
|
-
something went wrong when logging the model weights / agent code
|
|
2136
|
+
something went wrong when logging the model weights / agent code."""
|
|
2239
2137
|
|
|
2240
2138
|
model_id: Optional[str] = None
|
|
2241
2139
|
"""The ID of the logged model to finalize."""
|
|
@@ -2343,56 +2241,6 @@ class ForecastingExperimentState(Enum):
|
|
|
2343
2241
|
SUCCEEDED = "SUCCEEDED"
|
|
2344
2242
|
|
|
2345
2243
|
|
|
2346
|
-
@dataclass
|
|
2347
|
-
class GetCredentialsForTraceDataDownloadResponse:
|
|
2348
|
-
credential_info: Optional[ArtifactCredentialInfo] = None
|
|
2349
|
-
"""The artifact download credentials for the specified trace data."""
|
|
2350
|
-
|
|
2351
|
-
def as_dict(self) -> dict:
|
|
2352
|
-
"""Serializes the GetCredentialsForTraceDataDownloadResponse into a dictionary suitable for use as a JSON request body."""
|
|
2353
|
-
body = {}
|
|
2354
|
-
if self.credential_info:
|
|
2355
|
-
body["credential_info"] = self.credential_info.as_dict()
|
|
2356
|
-
return body
|
|
2357
|
-
|
|
2358
|
-
def as_shallow_dict(self) -> dict:
|
|
2359
|
-
"""Serializes the GetCredentialsForTraceDataDownloadResponse into a shallow dictionary of its immediate attributes."""
|
|
2360
|
-
body = {}
|
|
2361
|
-
if self.credential_info:
|
|
2362
|
-
body["credential_info"] = self.credential_info
|
|
2363
|
-
return body
|
|
2364
|
-
|
|
2365
|
-
@classmethod
|
|
2366
|
-
def from_dict(cls, d: Dict[str, Any]) -> GetCredentialsForTraceDataDownloadResponse:
|
|
2367
|
-
"""Deserializes the GetCredentialsForTraceDataDownloadResponse from a dictionary."""
|
|
2368
|
-
return cls(credential_info=_from_dict(d, "credential_info", ArtifactCredentialInfo))
|
|
2369
|
-
|
|
2370
|
-
|
|
2371
|
-
@dataclass
|
|
2372
|
-
class GetCredentialsForTraceDataUploadResponse:
|
|
2373
|
-
credential_info: Optional[ArtifactCredentialInfo] = None
|
|
2374
|
-
"""The artifact upload credentials for the specified trace data."""
|
|
2375
|
-
|
|
2376
|
-
def as_dict(self) -> dict:
|
|
2377
|
-
"""Serializes the GetCredentialsForTraceDataUploadResponse into a dictionary suitable for use as a JSON request body."""
|
|
2378
|
-
body = {}
|
|
2379
|
-
if self.credential_info:
|
|
2380
|
-
body["credential_info"] = self.credential_info.as_dict()
|
|
2381
|
-
return body
|
|
2382
|
-
|
|
2383
|
-
def as_shallow_dict(self) -> dict:
|
|
2384
|
-
"""Serializes the GetCredentialsForTraceDataUploadResponse into a shallow dictionary of its immediate attributes."""
|
|
2385
|
-
body = {}
|
|
2386
|
-
if self.credential_info:
|
|
2387
|
-
body["credential_info"] = self.credential_info
|
|
2388
|
-
return body
|
|
2389
|
-
|
|
2390
|
-
@classmethod
|
|
2391
|
-
def from_dict(cls, d: Dict[str, Any]) -> GetCredentialsForTraceDataUploadResponse:
|
|
2392
|
-
"""Deserializes the GetCredentialsForTraceDataUploadResponse from a dictionary."""
|
|
2393
|
-
return cls(credential_info=_from_dict(d, "credential_info", ArtifactCredentialInfo))
|
|
2394
|
-
|
|
2395
|
-
|
|
2396
2244
|
@dataclass
|
|
2397
2245
|
class GetExperimentByNameResponse:
|
|
2398
2246
|
experiment: Optional[Experiment] = None
|
|
@@ -2993,49 +2841,6 @@ class ListExperimentsResponse:
|
|
|
2993
2841
|
)
|
|
2994
2842
|
|
|
2995
2843
|
|
|
2996
|
-
@dataclass
|
|
2997
|
-
class ListLoggedModelArtifactsResponse:
|
|
2998
|
-
files: Optional[List[FileInfo]] = None
|
|
2999
|
-
"""File location and metadata for artifacts."""
|
|
3000
|
-
|
|
3001
|
-
next_page_token: Optional[str] = None
|
|
3002
|
-
"""Token that can be used to retrieve the next page of artifact results"""
|
|
3003
|
-
|
|
3004
|
-
root_uri: Optional[str] = None
|
|
3005
|
-
"""Root artifact directory for the logged model."""
|
|
3006
|
-
|
|
3007
|
-
def as_dict(self) -> dict:
|
|
3008
|
-
"""Serializes the ListLoggedModelArtifactsResponse into a dictionary suitable for use as a JSON request body."""
|
|
3009
|
-
body = {}
|
|
3010
|
-
if self.files:
|
|
3011
|
-
body["files"] = [v.as_dict() for v in self.files]
|
|
3012
|
-
if self.next_page_token is not None:
|
|
3013
|
-
body["next_page_token"] = self.next_page_token
|
|
3014
|
-
if self.root_uri is not None:
|
|
3015
|
-
body["root_uri"] = self.root_uri
|
|
3016
|
-
return body
|
|
3017
|
-
|
|
3018
|
-
def as_shallow_dict(self) -> dict:
|
|
3019
|
-
"""Serializes the ListLoggedModelArtifactsResponse into a shallow dictionary of its immediate attributes."""
|
|
3020
|
-
body = {}
|
|
3021
|
-
if self.files:
|
|
3022
|
-
body["files"] = self.files
|
|
3023
|
-
if self.next_page_token is not None:
|
|
3024
|
-
body["next_page_token"] = self.next_page_token
|
|
3025
|
-
if self.root_uri is not None:
|
|
3026
|
-
body["root_uri"] = self.root_uri
|
|
3027
|
-
return body
|
|
3028
|
-
|
|
3029
|
-
@classmethod
|
|
3030
|
-
def from_dict(cls, d: Dict[str, Any]) -> ListLoggedModelArtifactsResponse:
|
|
3031
|
-
"""Deserializes the ListLoggedModelArtifactsResponse from a dictionary."""
|
|
3032
|
-
return cls(
|
|
3033
|
-
files=_repeated_dict(d, "files", FileInfo),
|
|
3034
|
-
next_page_token=d.get("next_page_token", None),
|
|
3035
|
-
root_uri=d.get("root_uri", None),
|
|
3036
|
-
)
|
|
3037
|
-
|
|
3038
|
-
|
|
3039
2844
|
@dataclass
|
|
3040
2845
|
class ListModelsResponse:
|
|
3041
2846
|
next_page_token: Optional[str] = None
|
|
@@ -5494,10 +5299,7 @@ class RunInputs:
|
|
|
5494
5299
|
"""Run metrics."""
|
|
5495
5300
|
|
|
5496
5301
|
model_inputs: Optional[List[ModelInput]] = None
|
|
5497
|
-
"""
|
|
5498
|
-
warning.
|
|
5499
|
-
|
|
5500
|
-
Model inputs to the Run."""
|
|
5302
|
+
"""Model inputs to the Run."""
|
|
5501
5303
|
|
|
5502
5304
|
def as_dict(self) -> dict:
|
|
5503
5305
|
"""Serializes the RunInputs into a dictionary suitable for use as a JSON request body."""
|
|
@@ -7339,7 +7141,7 @@ class ExperimentsAPI:
|
|
|
7339
7141
|
The ID of the logged model to finalize.
|
|
7340
7142
|
:param status: :class:`LoggedModelStatus`
|
|
7341
7143
|
Whether or not the model is ready for use. ``"LOGGED_MODEL_UPLOAD_FAILED"`` indicates that something
|
|
7342
|
-
went wrong when logging the model weights / agent code
|
|
7144
|
+
went wrong when logging the model weights / agent code.
|
|
7343
7145
|
|
|
7344
7146
|
:returns: :class:`FinalizeLoggedModelResponse`
|
|
7345
7147
|
"""
|
|
@@ -7381,38 +7183,6 @@ class ExperimentsAPI:
|
|
|
7381
7183
|
res = self._api.do("GET", "/api/2.0/mlflow/experiments/get-by-name", query=query, headers=headers)
|
|
7382
7184
|
return GetExperimentByNameResponse.from_dict(res)
|
|
7383
7185
|
|
|
7384
|
-
def get_credentials_for_trace_data_download(self, request_id: str) -> GetCredentialsForTraceDataDownloadResponse:
|
|
7385
|
-
"""Get credentials to download trace data.
|
|
7386
|
-
|
|
7387
|
-
:param request_id: str
|
|
7388
|
-
The ID of the trace to fetch artifact download credentials for.
|
|
7389
|
-
|
|
7390
|
-
:returns: :class:`GetCredentialsForTraceDataDownloadResponse`
|
|
7391
|
-
"""
|
|
7392
|
-
|
|
7393
|
-
headers = {
|
|
7394
|
-
"Accept": "application/json",
|
|
7395
|
-
}
|
|
7396
|
-
|
|
7397
|
-
res = self._api.do("GET", f"/api/2.0/mlflow/traces/{request_id}/credentials-for-data-download", headers=headers)
|
|
7398
|
-
return GetCredentialsForTraceDataDownloadResponse.from_dict(res)
|
|
7399
|
-
|
|
7400
|
-
def get_credentials_for_trace_data_upload(self, request_id: str) -> GetCredentialsForTraceDataUploadResponse:
|
|
7401
|
-
"""Get credentials to upload trace data.
|
|
7402
|
-
|
|
7403
|
-
:param request_id: str
|
|
7404
|
-
The ID of the trace to fetch artifact upload credentials for.
|
|
7405
|
-
|
|
7406
|
-
:returns: :class:`GetCredentialsForTraceDataUploadResponse`
|
|
7407
|
-
"""
|
|
7408
|
-
|
|
7409
|
-
headers = {
|
|
7410
|
-
"Accept": "application/json",
|
|
7411
|
-
}
|
|
7412
|
-
|
|
7413
|
-
res = self._api.do("GET", f"/api/2.0/mlflow/traces/{request_id}/credentials-for-data-upload", headers=headers)
|
|
7414
|
-
return GetCredentialsForTraceDataUploadResponse.from_dict(res)
|
|
7415
|
-
|
|
7416
7186
|
def get_experiment(self, experiment_id: str) -> GetExperimentResponse:
|
|
7417
7187
|
"""Get an experiment.
|
|
7418
7188
|
|
|
@@ -7666,41 +7436,6 @@ class ExperimentsAPI:
|
|
|
7666
7436
|
return
|
|
7667
7437
|
query["page_token"] = json["next_page_token"]
|
|
7668
7438
|
|
|
7669
|
-
def list_logged_model_artifacts(
|
|
7670
|
-
self, model_id: str, *, artifact_directory_path: Optional[str] = None, page_token: Optional[str] = None
|
|
7671
|
-
) -> ListLoggedModelArtifactsResponse:
|
|
7672
|
-
"""List artifacts for a logged model.
|
|
7673
|
-
|
|
7674
|
-
List artifacts for a logged model. Takes an optional ``artifact_directory_path`` prefix which if
|
|
7675
|
-
specified, the response contains only artifacts with the specified prefix.
|
|
7676
|
-
|
|
7677
|
-
:param model_id: str
|
|
7678
|
-
The ID of the logged model for which to list the artifacts.
|
|
7679
|
-
:param artifact_directory_path: str (optional)
|
|
7680
|
-
Filter artifacts matching this path (a relative path from the root artifact directory).
|
|
7681
|
-
:param page_token: str (optional)
|
|
7682
|
-
Token indicating the page of artifact results to fetch. `page_token` is not supported when listing
|
|
7683
|
-
artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
|
|
7684
|
-
`/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports
|
|
7685
|
-
pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents).
|
|
7686
|
-
|
|
7687
|
-
:returns: :class:`ListLoggedModelArtifactsResponse`
|
|
7688
|
-
"""
|
|
7689
|
-
|
|
7690
|
-
query = {}
|
|
7691
|
-
if artifact_directory_path is not None:
|
|
7692
|
-
query["artifact_directory_path"] = artifact_directory_path
|
|
7693
|
-
if page_token is not None:
|
|
7694
|
-
query["page_token"] = page_token
|
|
7695
|
-
headers = {
|
|
7696
|
-
"Accept": "application/json",
|
|
7697
|
-
}
|
|
7698
|
-
|
|
7699
|
-
res = self._api.do(
|
|
7700
|
-
"GET", f"/api/2.0/mlflow/logged-models/{model_id}/artifacts/directories", query=query, headers=headers
|
|
7701
|
-
)
|
|
7702
|
-
return ListLoggedModelArtifactsResponse.from_dict(res)
|
|
7703
|
-
|
|
7704
7439
|
def log_batch(
|
|
7705
7440
|
self,
|
|
7706
7441
|
*,
|
|
@@ -8949,7 +8684,8 @@ class ModelRegistryAPI:
|
|
|
8949
8684
|
:param http_url_spec: :class:`HttpUrlSpec` (optional)
|
|
8950
8685
|
:param job_spec: :class:`JobSpec` (optional)
|
|
8951
8686
|
:param model_name: str (optional)
|
|
8952
|
-
|
|
8687
|
+
If model name is not specified, a registry-wide webhook is created that listens for the specified
|
|
8688
|
+
events across all versions of all registered models.
|
|
8953
8689
|
:param status: :class:`RegistryWebhookStatus` (optional)
|
|
8954
8690
|
Enable or disable triggering the webhook, or put the webhook into test mode. The default is
|
|
8955
8691
|
`ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens.
|
|
@@ -8988,6 +8724,7 @@ class ModelRegistryAPI:
|
|
|
8988
8724
|
Deletes a comment on a model version.
|
|
8989
8725
|
|
|
8990
8726
|
:param id: str
|
|
8727
|
+
Unique identifier of an activity
|
|
8991
8728
|
|
|
8992
8729
|
|
|
8993
8730
|
"""
|
|
@@ -111,6 +111,11 @@ class CreatePipeline:
|
|
|
111
111
|
storage: Optional[str] = None
|
|
112
112
|
"""DBFS root directory for storing checkpoints and tables."""
|
|
113
113
|
|
|
114
|
+
tags: Optional[Dict[str, str]] = None
|
|
115
|
+
"""A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags,
|
|
116
|
+
and are therefore subject to the same limitations. A maximum of 25 tags can be added to the
|
|
117
|
+
pipeline."""
|
|
118
|
+
|
|
114
119
|
target: Optional[str] = None
|
|
115
120
|
"""Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
|
|
116
121
|
must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
|
|
@@ -174,6 +179,8 @@ class CreatePipeline:
|
|
|
174
179
|
body["serverless"] = self.serverless
|
|
175
180
|
if self.storage is not None:
|
|
176
181
|
body["storage"] = self.storage
|
|
182
|
+
if self.tags:
|
|
183
|
+
body["tags"] = self.tags
|
|
177
184
|
if self.target is not None:
|
|
178
185
|
body["target"] = self.target
|
|
179
186
|
if self.trigger:
|
|
@@ -235,6 +242,8 @@ class CreatePipeline:
|
|
|
235
242
|
body["serverless"] = self.serverless
|
|
236
243
|
if self.storage is not None:
|
|
237
244
|
body["storage"] = self.storage
|
|
245
|
+
if self.tags:
|
|
246
|
+
body["tags"] = self.tags
|
|
238
247
|
if self.target is not None:
|
|
239
248
|
body["target"] = self.target
|
|
240
249
|
if self.trigger:
|
|
@@ -271,6 +280,7 @@ class CreatePipeline:
|
|
|
271
280
|
schema=d.get("schema", None),
|
|
272
281
|
serverless=d.get("serverless", None),
|
|
273
282
|
storage=d.get("storage", None),
|
|
283
|
+
tags=d.get("tags", None),
|
|
274
284
|
target=d.get("target", None),
|
|
275
285
|
trigger=_from_dict(d, "trigger", PipelineTrigger),
|
|
276
286
|
)
|
|
@@ -505,6 +515,11 @@ class EditPipeline:
|
|
|
505
515
|
storage: Optional[str] = None
|
|
506
516
|
"""DBFS root directory for storing checkpoints and tables."""
|
|
507
517
|
|
|
518
|
+
tags: Optional[Dict[str, str]] = None
|
|
519
|
+
"""A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags,
|
|
520
|
+
and are therefore subject to the same limitations. A maximum of 25 tags can be added to the
|
|
521
|
+
pipeline."""
|
|
522
|
+
|
|
508
523
|
target: Optional[str] = None
|
|
509
524
|
"""Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
|
|
510
525
|
must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
|
|
@@ -570,6 +585,8 @@ class EditPipeline:
|
|
|
570
585
|
body["serverless"] = self.serverless
|
|
571
586
|
if self.storage is not None:
|
|
572
587
|
body["storage"] = self.storage
|
|
588
|
+
if self.tags:
|
|
589
|
+
body["tags"] = self.tags
|
|
573
590
|
if self.target is not None:
|
|
574
591
|
body["target"] = self.target
|
|
575
592
|
if self.trigger:
|
|
@@ -633,6 +650,8 @@ class EditPipeline:
|
|
|
633
650
|
body["serverless"] = self.serverless
|
|
634
651
|
if self.storage is not None:
|
|
635
652
|
body["storage"] = self.storage
|
|
653
|
+
if self.tags:
|
|
654
|
+
body["tags"] = self.tags
|
|
636
655
|
if self.target is not None:
|
|
637
656
|
body["target"] = self.target
|
|
638
657
|
if self.trigger:
|
|
@@ -670,6 +689,7 @@ class EditPipeline:
|
|
|
670
689
|
schema=d.get("schema", None),
|
|
671
690
|
serverless=d.get("serverless", None),
|
|
672
691
|
storage=d.get("storage", None),
|
|
692
|
+
tags=d.get("tags", None),
|
|
673
693
|
target=d.get("target", None),
|
|
674
694
|
trigger=_from_dict(d, "trigger", PipelineTrigger),
|
|
675
695
|
)
|
|
@@ -1186,6 +1206,7 @@ class IngestionSourceType(Enum):
|
|
|
1186
1206
|
SERVICENOW = "SERVICENOW"
|
|
1187
1207
|
SHAREPOINT = "SHAREPOINT"
|
|
1188
1208
|
SQLSERVER = "SQLSERVER"
|
|
1209
|
+
TERADATA = "TERADATA"
|
|
1189
1210
|
WORKDAY_RAAS = "WORKDAY_RAAS"
|
|
1190
1211
|
|
|
1191
1212
|
|
|
@@ -2386,6 +2407,11 @@ class PipelineSpec:
|
|
|
2386
2407
|
storage: Optional[str] = None
|
|
2387
2408
|
"""DBFS root directory for storing checkpoints and tables."""
|
|
2388
2409
|
|
|
2410
|
+
tags: Optional[Dict[str, str]] = None
|
|
2411
|
+
"""A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags,
|
|
2412
|
+
and are therefore subject to the same limitations. A maximum of 25 tags can be added to the
|
|
2413
|
+
pipeline."""
|
|
2414
|
+
|
|
2389
2415
|
target: Optional[str] = None
|
|
2390
2416
|
"""Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
|
|
2391
2417
|
must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
|
|
@@ -2443,6 +2469,8 @@ class PipelineSpec:
|
|
|
2443
2469
|
body["serverless"] = self.serverless
|
|
2444
2470
|
if self.storage is not None:
|
|
2445
2471
|
body["storage"] = self.storage
|
|
2472
|
+
if self.tags:
|
|
2473
|
+
body["tags"] = self.tags
|
|
2446
2474
|
if self.target is not None:
|
|
2447
2475
|
body["target"] = self.target
|
|
2448
2476
|
if self.trigger:
|
|
@@ -2498,6 +2526,8 @@ class PipelineSpec:
|
|
|
2498
2526
|
body["serverless"] = self.serverless
|
|
2499
2527
|
if self.storage is not None:
|
|
2500
2528
|
body["storage"] = self.storage
|
|
2529
|
+
if self.tags:
|
|
2530
|
+
body["tags"] = self.tags
|
|
2501
2531
|
if self.target is not None:
|
|
2502
2532
|
body["target"] = self.target
|
|
2503
2533
|
if self.trigger:
|
|
@@ -2531,6 +2561,7 @@ class PipelineSpec:
|
|
|
2531
2561
|
schema=d.get("schema", None),
|
|
2532
2562
|
serverless=d.get("serverless", None),
|
|
2533
2563
|
storage=d.get("storage", None),
|
|
2564
|
+
tags=d.get("tags", None),
|
|
2534
2565
|
target=d.get("target", None),
|
|
2535
2566
|
trigger=_from_dict(d, "trigger", PipelineTrigger),
|
|
2536
2567
|
)
|
|
@@ -3568,6 +3599,7 @@ class PipelinesAPI:
|
|
|
3568
3599
|
schema: Optional[str] = None,
|
|
3569
3600
|
serverless: Optional[bool] = None,
|
|
3570
3601
|
storage: Optional[str] = None,
|
|
3602
|
+
tags: Optional[Dict[str, str]] = None,
|
|
3571
3603
|
target: Optional[str] = None,
|
|
3572
3604
|
trigger: Optional[PipelineTrigger] = None,
|
|
3573
3605
|
) -> CreatePipelineResponse:
|
|
@@ -3636,6 +3668,9 @@ class PipelinesAPI:
|
|
|
3636
3668
|
Whether serverless compute is enabled for this pipeline.
|
|
3637
3669
|
:param storage: str (optional)
|
|
3638
3670
|
DBFS root directory for storing checkpoints and tables.
|
|
3671
|
+
:param tags: Dict[str,str] (optional)
|
|
3672
|
+
A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, and
|
|
3673
|
+
are therefore subject to the same limitations. A maximum of 25 tags can be added to the pipeline.
|
|
3639
3674
|
:param target: str (optional)
|
|
3640
3675
|
Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must
|
|
3641
3676
|
be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated
|
|
@@ -3698,6 +3733,8 @@ class PipelinesAPI:
|
|
|
3698
3733
|
body["serverless"] = serverless
|
|
3699
3734
|
if storage is not None:
|
|
3700
3735
|
body["storage"] = storage
|
|
3736
|
+
if tags is not None:
|
|
3737
|
+
body["tags"] = tags
|
|
3701
3738
|
if target is not None:
|
|
3702
3739
|
body["target"] = target
|
|
3703
3740
|
if trigger is not None:
|
|
@@ -3713,7 +3750,8 @@ class PipelinesAPI:
|
|
|
3713
3750
|
def delete(self, pipeline_id: str):
|
|
3714
3751
|
"""Delete a pipeline.
|
|
3715
3752
|
|
|
3716
|
-
Deletes a pipeline.
|
|
3753
|
+
Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the pipeline and
|
|
3754
|
+
its tables. You cannot undo this action.
|
|
3717
3755
|
|
|
3718
3756
|
:param pipeline_id: str
|
|
3719
3757
|
|
|
@@ -4083,6 +4121,7 @@ class PipelinesAPI:
|
|
|
4083
4121
|
schema: Optional[str] = None,
|
|
4084
4122
|
serverless: Optional[bool] = None,
|
|
4085
4123
|
storage: Optional[str] = None,
|
|
4124
|
+
tags: Optional[Dict[str, str]] = None,
|
|
4086
4125
|
target: Optional[str] = None,
|
|
4087
4126
|
trigger: Optional[PipelineTrigger] = None,
|
|
4088
4127
|
):
|
|
@@ -4154,6 +4193,9 @@ class PipelinesAPI:
|
|
|
4154
4193
|
Whether serverless compute is enabled for this pipeline.
|
|
4155
4194
|
:param storage: str (optional)
|
|
4156
4195
|
DBFS root directory for storing checkpoints and tables.
|
|
4196
|
+
:param tags: Dict[str,str] (optional)
|
|
4197
|
+
A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, and
|
|
4198
|
+
are therefore subject to the same limitations. A maximum of 25 tags can be added to the pipeline.
|
|
4157
4199
|
:param target: str (optional)
|
|
4158
4200
|
Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must
|
|
4159
4201
|
be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated
|
|
@@ -4216,6 +4258,8 @@ class PipelinesAPI:
|
|
|
4216
4258
|
body["serverless"] = serverless
|
|
4217
4259
|
if storage is not None:
|
|
4218
4260
|
body["storage"] = storage
|
|
4261
|
+
if tags is not None:
|
|
4262
|
+
body["tags"] = tags
|
|
4219
4263
|
if target is not None:
|
|
4220
4264
|
body["target"] = target
|
|
4221
4265
|
if trigger is not None:
|