databricks-sdk 0.20.0__py3-none-any.whl → 0.21.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +21 -6
- databricks/sdk/_widgets/__init__.py +2 -2
- databricks/sdk/config.py +3 -2
- databricks/sdk/oauth.py +1 -1
- databricks/sdk/runtime/__init__.py +85 -11
- databricks/sdk/runtime/dbutils_stub.py +1 -1
- databricks/sdk/service/_internal.py +1 -1
- databricks/sdk/service/billing.py +42 -0
- databricks/sdk/service/catalog.py +245 -44
- databricks/sdk/service/compute.py +334 -13
- databricks/sdk/service/dashboards.py +14 -0
- databricks/sdk/service/files.py +154 -12
- databricks/sdk/service/iam.py +161 -0
- databricks/sdk/service/jobs.py +95 -8
- databricks/sdk/service/ml.py +350 -0
- databricks/sdk/service/oauth2.py +70 -0
- databricks/sdk/service/pipelines.py +66 -8
- databricks/sdk/service/provisioning.py +78 -36
- databricks/sdk/service/serving.py +28 -0
- databricks/sdk/service/settings.py +1292 -203
- databricks/sdk/service/sharing.py +56 -0
- databricks/sdk/service/sql.py +138 -11
- databricks/sdk/service/vectorsearch.py +95 -60
- databricks/sdk/service/workspace.py +141 -1
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.20.0.dist-info → databricks_sdk-0.21.0.dist-info}/METADATA +3 -1
- databricks_sdk-0.21.0.dist-info/RECORD +53 -0
- databricks/sdk/runtime/stub.py +0 -48
- databricks_sdk-0.20.0.dist-info/RECORD +0 -54
- {databricks_sdk-0.20.0.dist-info → databricks_sdk-0.21.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.20.0.dist-info → databricks_sdk-0.21.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.20.0.dist-info → databricks_sdk-0.21.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.20.0.dist-info → databricks_sdk-0.21.0.dist-info}/top_level.txt +0 -0
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
|
|
5
5
|
import logging
|
|
6
6
|
from dataclasses import dataclass
|
|
7
7
|
from enum import Enum
|
|
8
|
-
from typing import
|
|
8
|
+
from typing import Dict, Iterator, List, Optional
|
|
9
9
|
|
|
10
10
|
from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum
|
|
11
11
|
|
|
@@ -260,6 +260,20 @@ class ArtifactType(Enum):
|
|
|
260
260
|
LIBRARY_MAVEN = 'LIBRARY_MAVEN'
|
|
261
261
|
|
|
262
262
|
|
|
263
|
+
@dataclass
|
|
264
|
+
class AssignResponse:
|
|
265
|
+
|
|
266
|
+
def as_dict(self) -> dict:
|
|
267
|
+
"""Serializes the AssignResponse into a dictionary suitable for use as a JSON request body."""
|
|
268
|
+
body = {}
|
|
269
|
+
return body
|
|
270
|
+
|
|
271
|
+
@classmethod
|
|
272
|
+
def from_dict(cls, d: Dict[str, any]) -> AssignResponse:
|
|
273
|
+
"""Deserializes the AssignResponse from a dictionary."""
|
|
274
|
+
return cls()
|
|
275
|
+
|
|
276
|
+
|
|
263
277
|
@dataclass
|
|
264
278
|
class AwsIamRole:
|
|
265
279
|
role_arn: str
|
|
@@ -347,6 +361,20 @@ class AzureServicePrincipal:
|
|
|
347
361
|
directory_id=d.get('directory_id', None))
|
|
348
362
|
|
|
349
363
|
|
|
364
|
+
@dataclass
|
|
365
|
+
class CancelRefreshResponse:
|
|
366
|
+
|
|
367
|
+
def as_dict(self) -> dict:
|
|
368
|
+
"""Serializes the CancelRefreshResponse into a dictionary suitable for use as a JSON request body."""
|
|
369
|
+
body = {}
|
|
370
|
+
return body
|
|
371
|
+
|
|
372
|
+
@classmethod
|
|
373
|
+
def from_dict(cls, d: Dict[str, any]) -> CancelRefreshResponse:
|
|
374
|
+
"""Deserializes the CancelRefreshResponse from a dictionary."""
|
|
375
|
+
return cls()
|
|
376
|
+
|
|
377
|
+
|
|
350
378
|
@dataclass
|
|
351
379
|
class CatalogInfo:
|
|
352
380
|
browse_only: Optional[bool] = None
|
|
@@ -1238,7 +1266,7 @@ class CreateMonitor:
|
|
|
1238
1266
|
expression independently, resulting in a separate slice for each predicate and its complements.
|
|
1239
1267
|
For high-cardinality columns, only the top 100 unique values by frequency will generate slices."""
|
|
1240
1268
|
|
|
1241
|
-
snapshot: Optional[
|
|
1269
|
+
snapshot: Optional[MonitorSnapshotProfileType] = None
|
|
1242
1270
|
"""Configuration for monitoring snapshot tables."""
|
|
1243
1271
|
|
|
1244
1272
|
time_series: Optional[MonitorTimeSeriesProfileType] = None
|
|
@@ -1264,7 +1292,7 @@ class CreateMonitor:
|
|
|
1264
1292
|
if self.skip_builtin_dashboard is not None:
|
|
1265
1293
|
body['skip_builtin_dashboard'] = self.skip_builtin_dashboard
|
|
1266
1294
|
if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs]
|
|
1267
|
-
if self.snapshot: body['snapshot'] = self.snapshot
|
|
1295
|
+
if self.snapshot: body['snapshot'] = self.snapshot.as_dict()
|
|
1268
1296
|
if self.time_series: body['time_series'] = self.time_series.as_dict()
|
|
1269
1297
|
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
1270
1298
|
return body
|
|
@@ -1284,7 +1312,7 @@ class CreateMonitor:
|
|
|
1284
1312
|
schedule=_from_dict(d, 'schedule', MonitorCronSchedule),
|
|
1285
1313
|
skip_builtin_dashboard=d.get('skip_builtin_dashboard', None),
|
|
1286
1314
|
slicing_exprs=d.get('slicing_exprs', None),
|
|
1287
|
-
snapshot=d
|
|
1315
|
+
snapshot=_from_dict(d, 'snapshot', MonitorSnapshotProfileType),
|
|
1288
1316
|
time_series=_from_dict(d, 'time_series', MonitorTimeSeriesProfileType),
|
|
1289
1317
|
warehouse_id=d.get('warehouse_id', None))
|
|
1290
1318
|
|
|
@@ -1326,6 +1354,20 @@ class CreateRegisteredModelRequest:
|
|
|
1326
1354
|
storage_location=d.get('storage_location', None))
|
|
1327
1355
|
|
|
1328
1356
|
|
|
1357
|
+
@dataclass
|
|
1358
|
+
class CreateResponse:
|
|
1359
|
+
|
|
1360
|
+
def as_dict(self) -> dict:
|
|
1361
|
+
"""Serializes the CreateResponse into a dictionary suitable for use as a JSON request body."""
|
|
1362
|
+
body = {}
|
|
1363
|
+
return body
|
|
1364
|
+
|
|
1365
|
+
@classmethod
|
|
1366
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateResponse:
|
|
1367
|
+
"""Deserializes the CreateResponse from a dictionary."""
|
|
1368
|
+
return cls()
|
|
1369
|
+
|
|
1370
|
+
|
|
1329
1371
|
@dataclass
|
|
1330
1372
|
class CreateSchema:
|
|
1331
1373
|
name: str
|
|
@@ -1383,7 +1425,7 @@ class CreateStorageCredential:
|
|
|
1383
1425
|
comment: Optional[str] = None
|
|
1384
1426
|
"""Comment associated with the credential."""
|
|
1385
1427
|
|
|
1386
|
-
databricks_gcp_service_account: Optional[
|
|
1428
|
+
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None
|
|
1387
1429
|
"""The <Databricks> managed GCP service account configuration."""
|
|
1388
1430
|
|
|
1389
1431
|
read_only: Optional[bool] = None
|
|
@@ -1402,7 +1444,7 @@ class CreateStorageCredential:
|
|
|
1402
1444
|
if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict()
|
|
1403
1445
|
if self.comment is not None: body['comment'] = self.comment
|
|
1404
1446
|
if self.databricks_gcp_service_account:
|
|
1405
|
-
body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
|
|
1447
|
+
body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
|
|
1406
1448
|
if self.name is not None: body['name'] = self.name
|
|
1407
1449
|
if self.read_only is not None: body['read_only'] = self.read_only
|
|
1408
1450
|
if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
|
|
@@ -1416,7 +1458,8 @@ class CreateStorageCredential:
|
|
|
1416
1458
|
azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
|
|
1417
1459
|
cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken),
|
|
1418
1460
|
comment=d.get('comment', None),
|
|
1419
|
-
databricks_gcp_service_account=d
|
|
1461
|
+
databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
|
|
1462
|
+
DatabricksGcpServiceAccountRequest),
|
|
1420
1463
|
name=d.get('name', None),
|
|
1421
1464
|
read_only=d.get('read_only', None),
|
|
1422
1465
|
skip_validation=d.get('skip_validation', None))
|
|
@@ -1525,6 +1568,20 @@ class DataSourceFormat(Enum):
|
|
|
1525
1568
|
UNITY_CATALOG = 'UNITY_CATALOG'
|
|
1526
1569
|
|
|
1527
1570
|
|
|
1571
|
+
@dataclass
|
|
1572
|
+
class DatabricksGcpServiceAccountRequest:
|
|
1573
|
+
|
|
1574
|
+
def as_dict(self) -> dict:
|
|
1575
|
+
"""Serializes the DatabricksGcpServiceAccountRequest into a dictionary suitable for use as a JSON request body."""
|
|
1576
|
+
body = {}
|
|
1577
|
+
return body
|
|
1578
|
+
|
|
1579
|
+
@classmethod
|
|
1580
|
+
def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccountRequest:
|
|
1581
|
+
"""Deserializes the DatabricksGcpServiceAccountRequest from a dictionary."""
|
|
1582
|
+
return cls()
|
|
1583
|
+
|
|
1584
|
+
|
|
1528
1585
|
@dataclass
|
|
1529
1586
|
class DatabricksGcpServiceAccountResponse:
|
|
1530
1587
|
credential_id: Optional[str] = None
|
|
@@ -1546,6 +1603,34 @@ class DatabricksGcpServiceAccountResponse:
|
|
|
1546
1603
|
return cls(credential_id=d.get('credential_id', None), email=d.get('email', None))
|
|
1547
1604
|
|
|
1548
1605
|
|
|
1606
|
+
@dataclass
|
|
1607
|
+
class DeleteAliasResponse:
|
|
1608
|
+
|
|
1609
|
+
def as_dict(self) -> dict:
|
|
1610
|
+
"""Serializes the DeleteAliasResponse into a dictionary suitable for use as a JSON request body."""
|
|
1611
|
+
body = {}
|
|
1612
|
+
return body
|
|
1613
|
+
|
|
1614
|
+
@classmethod
|
|
1615
|
+
def from_dict(cls, d: Dict[str, any]) -> DeleteAliasResponse:
|
|
1616
|
+
"""Deserializes the DeleteAliasResponse from a dictionary."""
|
|
1617
|
+
return cls()
|
|
1618
|
+
|
|
1619
|
+
|
|
1620
|
+
@dataclass
|
|
1621
|
+
class DeleteResponse:
|
|
1622
|
+
|
|
1623
|
+
def as_dict(self) -> dict:
|
|
1624
|
+
"""Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body."""
|
|
1625
|
+
body = {}
|
|
1626
|
+
return body
|
|
1627
|
+
|
|
1628
|
+
@classmethod
|
|
1629
|
+
def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
|
|
1630
|
+
"""Deserializes the DeleteResponse from a dictionary."""
|
|
1631
|
+
return cls()
|
|
1632
|
+
|
|
1633
|
+
|
|
1549
1634
|
@dataclass
|
|
1550
1635
|
class DeltaRuntimePropertiesKvPairs:
|
|
1551
1636
|
"""Properties pertaining to the current state of the delta table as given by the commit server.
|
|
@@ -1610,6 +1695,20 @@ class DependencyList:
|
|
|
1610
1695
|
return cls(dependencies=_repeated_dict(d, 'dependencies', Dependency))
|
|
1611
1696
|
|
|
1612
1697
|
|
|
1698
|
+
@dataclass
|
|
1699
|
+
class DisableResponse:
|
|
1700
|
+
|
|
1701
|
+
def as_dict(self) -> dict:
|
|
1702
|
+
"""Serializes the DisableResponse into a dictionary suitable for use as a JSON request body."""
|
|
1703
|
+
body = {}
|
|
1704
|
+
return body
|
|
1705
|
+
|
|
1706
|
+
@classmethod
|
|
1707
|
+
def from_dict(cls, d: Dict[str, any]) -> DisableResponse:
|
|
1708
|
+
"""Deserializes the DisableResponse from a dictionary."""
|
|
1709
|
+
return cls()
|
|
1710
|
+
|
|
1711
|
+
|
|
1613
1712
|
class DisableSchemaName(Enum):
|
|
1614
1713
|
|
|
1615
1714
|
ACCESS = 'access'
|
|
@@ -1734,6 +1833,20 @@ class EnablePredictiveOptimization(Enum):
|
|
|
1734
1833
|
INHERIT = 'INHERIT'
|
|
1735
1834
|
|
|
1736
1835
|
|
|
1836
|
+
@dataclass
|
|
1837
|
+
class EnableResponse:
|
|
1838
|
+
|
|
1839
|
+
def as_dict(self) -> dict:
|
|
1840
|
+
"""Serializes the EnableResponse into a dictionary suitable for use as a JSON request body."""
|
|
1841
|
+
body = {}
|
|
1842
|
+
return body
|
|
1843
|
+
|
|
1844
|
+
@classmethod
|
|
1845
|
+
def from_dict(cls, d: Dict[str, any]) -> EnableResponse:
|
|
1846
|
+
"""Deserializes the EnableResponse from a dictionary."""
|
|
1847
|
+
return cls()
|
|
1848
|
+
|
|
1849
|
+
|
|
1737
1850
|
class EnableSchemaName(Enum):
|
|
1738
1851
|
|
|
1739
1852
|
ACCESS = 'access'
|
|
@@ -3118,7 +3231,7 @@ class MonitorInfo:
|
|
|
3118
3231
|
expression independently, resulting in a separate slice for each predicate and its complements.
|
|
3119
3232
|
For high-cardinality columns, only the top 100 unique values by frequency will generate slices."""
|
|
3120
3233
|
|
|
3121
|
-
snapshot: Optional[
|
|
3234
|
+
snapshot: Optional[MonitorSnapshotProfileType] = None
|
|
3122
3235
|
"""Configuration for monitoring snapshot tables."""
|
|
3123
3236
|
|
|
3124
3237
|
status: Optional[MonitorInfoStatus] = None
|
|
@@ -3151,7 +3264,7 @@ class MonitorInfo:
|
|
|
3151
3264
|
body['profile_metrics_table_name'] = self.profile_metrics_table_name
|
|
3152
3265
|
if self.schedule: body['schedule'] = self.schedule.as_dict()
|
|
3153
3266
|
if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs]
|
|
3154
|
-
if self.snapshot: body['snapshot'] = self.snapshot
|
|
3267
|
+
if self.snapshot: body['snapshot'] = self.snapshot.as_dict()
|
|
3155
3268
|
if self.status is not None: body['status'] = self.status.value
|
|
3156
3269
|
if self.table_name is not None: body['table_name'] = self.table_name
|
|
3157
3270
|
if self.time_series: body['time_series'] = self.time_series.as_dict()
|
|
@@ -3175,7 +3288,7 @@ class MonitorInfo:
|
|
|
3175
3288
|
profile_metrics_table_name=d.get('profile_metrics_table_name', None),
|
|
3176
3289
|
schedule=_from_dict(d, 'schedule', MonitorCronSchedule),
|
|
3177
3290
|
slicing_exprs=d.get('slicing_exprs', None),
|
|
3178
|
-
snapshot=d
|
|
3291
|
+
snapshot=_from_dict(d, 'snapshot', MonitorSnapshotProfileType),
|
|
3179
3292
|
status=_enum(d, 'status', MonitorInfoStatus),
|
|
3180
3293
|
table_name=d.get('table_name', None),
|
|
3181
3294
|
time_series=_from_dict(d, 'time_series', MonitorTimeSeriesProfileType))
|
|
@@ -3255,6 +3368,20 @@ class MonitorRefreshInfoState(Enum):
|
|
|
3255
3368
|
SUCCESS = 'SUCCESS'
|
|
3256
3369
|
|
|
3257
3370
|
|
|
3371
|
+
@dataclass
|
|
3372
|
+
class MonitorSnapshotProfileType:
|
|
3373
|
+
|
|
3374
|
+
def as_dict(self) -> dict:
|
|
3375
|
+
"""Serializes the MonitorSnapshotProfileType into a dictionary suitable for use as a JSON request body."""
|
|
3376
|
+
body = {}
|
|
3377
|
+
return body
|
|
3378
|
+
|
|
3379
|
+
@classmethod
|
|
3380
|
+
def from_dict(cls, d: Dict[str, any]) -> MonitorSnapshotProfileType:
|
|
3381
|
+
"""Deserializes the MonitorSnapshotProfileType from a dictionary."""
|
|
3382
|
+
return cls()
|
|
3383
|
+
|
|
3384
|
+
|
|
3258
3385
|
@dataclass
|
|
3259
3386
|
class MonitorTimeSeriesProfileType:
|
|
3260
3387
|
granularities: Optional[List[str]] = None
|
|
@@ -3341,10 +3468,10 @@ class OnlineTableSpec:
|
|
|
3341
3468
|
primary_key_columns: Optional[List[str]] = None
|
|
3342
3469
|
"""Primary Key columns to be used for data insert/update in the destination."""
|
|
3343
3470
|
|
|
3344
|
-
run_continuously: Optional[
|
|
3471
|
+
run_continuously: Optional[OnlineTableSpecContinuousSchedulingPolicy] = None
|
|
3345
3472
|
"""Pipeline runs continuously after generating the initial data."""
|
|
3346
3473
|
|
|
3347
|
-
run_triggered: Optional[
|
|
3474
|
+
run_triggered: Optional[OnlineTableSpecTriggeredSchedulingPolicy] = None
|
|
3348
3475
|
"""Pipeline stops after generating the initial data and can be triggered later (manually, through a
|
|
3349
3476
|
cron job or through data triggers)"""
|
|
3350
3477
|
|
|
@@ -3360,8 +3487,8 @@ class OnlineTableSpec:
|
|
|
3360
3487
|
if self.perform_full_copy is not None: body['perform_full_copy'] = self.perform_full_copy
|
|
3361
3488
|
if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
|
|
3362
3489
|
if self.primary_key_columns: body['primary_key_columns'] = [v for v in self.primary_key_columns]
|
|
3363
|
-
if self.run_continuously: body['run_continuously'] = self.run_continuously
|
|
3364
|
-
if self.run_triggered: body['run_triggered'] = self.run_triggered
|
|
3490
|
+
if self.run_continuously: body['run_continuously'] = self.run_continuously.as_dict()
|
|
3491
|
+
if self.run_triggered: body['run_triggered'] = self.run_triggered.as_dict()
|
|
3365
3492
|
if self.source_table_full_name is not None:
|
|
3366
3493
|
body['source_table_full_name'] = self.source_table_full_name
|
|
3367
3494
|
if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key
|
|
@@ -3373,12 +3500,41 @@ class OnlineTableSpec:
|
|
|
3373
3500
|
return cls(perform_full_copy=d.get('perform_full_copy', None),
|
|
3374
3501
|
pipeline_id=d.get('pipeline_id', None),
|
|
3375
3502
|
primary_key_columns=d.get('primary_key_columns', None),
|
|
3376
|
-
run_continuously=d
|
|
3377
|
-
|
|
3503
|
+
run_continuously=_from_dict(d, 'run_continuously',
|
|
3504
|
+
OnlineTableSpecContinuousSchedulingPolicy),
|
|
3505
|
+
run_triggered=_from_dict(d, 'run_triggered', OnlineTableSpecTriggeredSchedulingPolicy),
|
|
3378
3506
|
source_table_full_name=d.get('source_table_full_name', None),
|
|
3379
3507
|
timeseries_key=d.get('timeseries_key', None))
|
|
3380
3508
|
|
|
3381
3509
|
|
|
3510
|
+
@dataclass
|
|
3511
|
+
class OnlineTableSpecContinuousSchedulingPolicy:
|
|
3512
|
+
|
|
3513
|
+
def as_dict(self) -> dict:
|
|
3514
|
+
"""Serializes the OnlineTableSpecContinuousSchedulingPolicy into a dictionary suitable for use as a JSON request body."""
|
|
3515
|
+
body = {}
|
|
3516
|
+
return body
|
|
3517
|
+
|
|
3518
|
+
@classmethod
|
|
3519
|
+
def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpecContinuousSchedulingPolicy:
|
|
3520
|
+
"""Deserializes the OnlineTableSpecContinuousSchedulingPolicy from a dictionary."""
|
|
3521
|
+
return cls()
|
|
3522
|
+
|
|
3523
|
+
|
|
3524
|
+
@dataclass
|
|
3525
|
+
class OnlineTableSpecTriggeredSchedulingPolicy:
|
|
3526
|
+
|
|
3527
|
+
def as_dict(self) -> dict:
|
|
3528
|
+
"""Serializes the OnlineTableSpecTriggeredSchedulingPolicy into a dictionary suitable for use as a JSON request body."""
|
|
3529
|
+
body = {}
|
|
3530
|
+
return body
|
|
3531
|
+
|
|
3532
|
+
@classmethod
|
|
3533
|
+
def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpecTriggeredSchedulingPolicy:
|
|
3534
|
+
"""Deserializes the OnlineTableSpecTriggeredSchedulingPolicy from a dictionary."""
|
|
3535
|
+
return cls()
|
|
3536
|
+
|
|
3537
|
+
|
|
3382
3538
|
class OnlineTableState(Enum):
|
|
3383
3539
|
"""The state of an online table."""
|
|
3384
3540
|
|
|
@@ -4432,6 +4588,34 @@ class TriggeredUpdateStatus:
|
|
|
4432
4588
|
triggered_update_progress=_from_dict(d, 'triggered_update_progress', PipelineProgress))
|
|
4433
4589
|
|
|
4434
4590
|
|
|
4591
|
+
@dataclass
|
|
4592
|
+
class UnassignResponse:
|
|
4593
|
+
|
|
4594
|
+
def as_dict(self) -> dict:
|
|
4595
|
+
"""Serializes the UnassignResponse into a dictionary suitable for use as a JSON request body."""
|
|
4596
|
+
body = {}
|
|
4597
|
+
return body
|
|
4598
|
+
|
|
4599
|
+
@classmethod
|
|
4600
|
+
def from_dict(cls, d: Dict[str, any]) -> UnassignResponse:
|
|
4601
|
+
"""Deserializes the UnassignResponse from a dictionary."""
|
|
4602
|
+
return cls()
|
|
4603
|
+
|
|
4604
|
+
|
|
4605
|
+
@dataclass
|
|
4606
|
+
class UpdateAssignmentResponse:
|
|
4607
|
+
|
|
4608
|
+
def as_dict(self) -> dict:
|
|
4609
|
+
"""Serializes the UpdateAssignmentResponse into a dictionary suitable for use as a JSON request body."""
|
|
4610
|
+
body = {}
|
|
4611
|
+
return body
|
|
4612
|
+
|
|
4613
|
+
@classmethod
|
|
4614
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateAssignmentResponse:
|
|
4615
|
+
"""Deserializes the UpdateAssignmentResponse from a dictionary."""
|
|
4616
|
+
return cls()
|
|
4617
|
+
|
|
4618
|
+
|
|
4435
4619
|
@dataclass
|
|
4436
4620
|
class UpdateCatalog:
|
|
4437
4621
|
comment: Optional[str] = None
|
|
@@ -4756,7 +4940,7 @@ class UpdateMonitor:
|
|
|
4756
4940
|
expression independently, resulting in a separate slice for each predicate and its complements.
|
|
4757
4941
|
For high-cardinality columns, only the top 100 unique values by frequency will generate slices."""
|
|
4758
4942
|
|
|
4759
|
-
snapshot: Optional[
|
|
4943
|
+
snapshot: Optional[MonitorSnapshotProfileType] = None
|
|
4760
4944
|
"""Configuration for monitoring snapshot tables."""
|
|
4761
4945
|
|
|
4762
4946
|
time_series: Optional[MonitorTimeSeriesProfileType] = None
|
|
@@ -4775,7 +4959,7 @@ class UpdateMonitor:
|
|
|
4775
4959
|
if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
|
|
4776
4960
|
if self.schedule: body['schedule'] = self.schedule.as_dict()
|
|
4777
4961
|
if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs]
|
|
4778
|
-
if self.snapshot: body['snapshot'] = self.snapshot
|
|
4962
|
+
if self.snapshot: body['snapshot'] = self.snapshot.as_dict()
|
|
4779
4963
|
if self.time_series: body['time_series'] = self.time_series.as_dict()
|
|
4780
4964
|
return body
|
|
4781
4965
|
|
|
@@ -4792,7 +4976,7 @@ class UpdateMonitor:
|
|
|
4792
4976
|
output_schema_name=d.get('output_schema_name', None),
|
|
4793
4977
|
schedule=_from_dict(d, 'schedule', MonitorCronSchedule),
|
|
4794
4978
|
slicing_exprs=d.get('slicing_exprs', None),
|
|
4795
|
-
snapshot=d
|
|
4979
|
+
snapshot=_from_dict(d, 'snapshot', MonitorSnapshotProfileType),
|
|
4796
4980
|
time_series=_from_dict(d, 'time_series', MonitorTimeSeriesProfileType))
|
|
4797
4981
|
|
|
4798
4982
|
|
|
@@ -4855,6 +5039,20 @@ class UpdateRegisteredModelRequest:
|
|
|
4855
5039
|
owner=d.get('owner', None))
|
|
4856
5040
|
|
|
4857
5041
|
|
|
5042
|
+
@dataclass
|
|
5043
|
+
class UpdateResponse:
|
|
5044
|
+
|
|
5045
|
+
def as_dict(self) -> dict:
|
|
5046
|
+
"""Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body."""
|
|
5047
|
+
body = {}
|
|
5048
|
+
return body
|
|
5049
|
+
|
|
5050
|
+
@classmethod
|
|
5051
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
|
|
5052
|
+
"""Deserializes the UpdateResponse from a dictionary."""
|
|
5053
|
+
return cls()
|
|
5054
|
+
|
|
5055
|
+
|
|
4858
5056
|
@dataclass
|
|
4859
5057
|
class UpdateSchema:
|
|
4860
5058
|
comment: Optional[str] = None
|
|
@@ -4916,7 +5114,7 @@ class UpdateStorageCredential:
|
|
|
4916
5114
|
comment: Optional[str] = None
|
|
4917
5115
|
"""Comment associated with the credential."""
|
|
4918
5116
|
|
|
4919
|
-
databricks_gcp_service_account: Optional[
|
|
5117
|
+
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None
|
|
4920
5118
|
"""The <Databricks> managed GCP service account configuration."""
|
|
4921
5119
|
|
|
4922
5120
|
force: Optional[bool] = None
|
|
@@ -4947,7 +5145,7 @@ class UpdateStorageCredential:
|
|
|
4947
5145
|
if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict()
|
|
4948
5146
|
if self.comment is not None: body['comment'] = self.comment
|
|
4949
5147
|
if self.databricks_gcp_service_account:
|
|
4950
|
-
body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
|
|
5148
|
+
body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
|
|
4951
5149
|
if self.force is not None: body['force'] = self.force
|
|
4952
5150
|
if self.name is not None: body['name'] = self.name
|
|
4953
5151
|
if self.new_name is not None: body['new_name'] = self.new_name
|
|
@@ -4964,7 +5162,8 @@ class UpdateStorageCredential:
|
|
|
4964
5162
|
azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
|
|
4965
5163
|
cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken),
|
|
4966
5164
|
comment=d.get('comment', None),
|
|
4967
|
-
databricks_gcp_service_account=d
|
|
5165
|
+
databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
|
|
5166
|
+
DatabricksGcpServiceAccountRequest),
|
|
4968
5167
|
force=d.get('force', None),
|
|
4969
5168
|
name=d.get('name', None),
|
|
4970
5169
|
new_name=d.get('new_name', None),
|
|
@@ -5078,7 +5277,7 @@ class ValidateStorageCredential:
|
|
|
5078
5277
|
cloudflare_api_token: Optional[CloudflareApiToken] = None
|
|
5079
5278
|
"""The Cloudflare API token configuration."""
|
|
5080
5279
|
|
|
5081
|
-
databricks_gcp_service_account: Optional[
|
|
5280
|
+
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None
|
|
5082
5281
|
"""The Databricks created GCP service account configuration."""
|
|
5083
5282
|
|
|
5084
5283
|
external_location_name: Optional[str] = None
|
|
@@ -5087,7 +5286,7 @@ class ValidateStorageCredential:
|
|
|
5087
5286
|
read_only: Optional[bool] = None
|
|
5088
5287
|
"""Whether the storage credential is only usable for read operations."""
|
|
5089
5288
|
|
|
5090
|
-
storage_credential_name: Optional[
|
|
5289
|
+
storage_credential_name: Optional[str] = None
|
|
5091
5290
|
"""The name of the storage credential to validate."""
|
|
5092
5291
|
|
|
5093
5292
|
url: Optional[str] = None
|
|
@@ -5102,11 +5301,12 @@ class ValidateStorageCredential:
|
|
|
5102
5301
|
body['azure_service_principal'] = self.azure_service_principal.as_dict()
|
|
5103
5302
|
if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict()
|
|
5104
5303
|
if self.databricks_gcp_service_account:
|
|
5105
|
-
body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
|
|
5304
|
+
body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
|
|
5106
5305
|
if self.external_location_name is not None:
|
|
5107
5306
|
body['external_location_name'] = self.external_location_name
|
|
5108
5307
|
if self.read_only is not None: body['read_only'] = self.read_only
|
|
5109
|
-
if self.storage_credential_name
|
|
5308
|
+
if self.storage_credential_name is not None:
|
|
5309
|
+
body['storage_credential_name'] = self.storage_credential_name
|
|
5110
5310
|
if self.url is not None: body['url'] = self.url
|
|
5111
5311
|
return body
|
|
5112
5312
|
|
|
@@ -5117,7 +5317,8 @@ class ValidateStorageCredential:
|
|
|
5117
5317
|
azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
|
|
5118
5318
|
azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
|
|
5119
5319
|
cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken),
|
|
5120
|
-
databricks_gcp_service_account=d
|
|
5320
|
+
databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
|
|
5321
|
+
DatabricksGcpServiceAccountRequest),
|
|
5121
5322
|
external_location_name=d.get('external_location_name', None),
|
|
5122
5323
|
read_only=d.get('read_only', None),
|
|
5123
5324
|
storage_credential_name=d.get('storage_credential_name', None),
|
|
@@ -6563,7 +6764,7 @@ class LakehouseMonitorsAPI:
|
|
|
6563
6764
|
schedule: Optional[MonitorCronSchedule] = None,
|
|
6564
6765
|
skip_builtin_dashboard: Optional[bool] = None,
|
|
6565
6766
|
slicing_exprs: Optional[List[str]] = None,
|
|
6566
|
-
snapshot: Optional[
|
|
6767
|
+
snapshot: Optional[MonitorSnapshotProfileType] = None,
|
|
6567
6768
|
time_series: Optional[MonitorTimeSeriesProfileType] = None,
|
|
6568
6769
|
warehouse_id: Optional[str] = None) -> MonitorInfo:
|
|
6569
6770
|
"""Create a table monitor.
|
|
@@ -6604,7 +6805,7 @@ class LakehouseMonitorsAPI:
|
|
|
6604
6805
|
List of column expressions to slice data with for targeted analysis. The data is grouped by each
|
|
6605
6806
|
expression independently, resulting in a separate slice for each predicate and its complements. For
|
|
6606
6807
|
high-cardinality columns, only the top 100 unique values by frequency will generate slices.
|
|
6607
|
-
:param snapshot:
|
|
6808
|
+
:param snapshot: :class:`MonitorSnapshotProfileType` (optional)
|
|
6608
6809
|
Configuration for monitoring snapshot tables.
|
|
6609
6810
|
:param time_series: :class:`MonitorTimeSeriesProfileType` (optional)
|
|
6610
6811
|
Configuration for monitoring time series tables.
|
|
@@ -6626,7 +6827,7 @@ class LakehouseMonitorsAPI:
|
|
|
6626
6827
|
if schedule is not None: body['schedule'] = schedule.as_dict()
|
|
6627
6828
|
if skip_builtin_dashboard is not None: body['skip_builtin_dashboard'] = skip_builtin_dashboard
|
|
6628
6829
|
if slicing_exprs is not None: body['slicing_exprs'] = [v for v in slicing_exprs]
|
|
6629
|
-
if snapshot is not None: body['snapshot'] = snapshot
|
|
6830
|
+
if snapshot is not None: body['snapshot'] = snapshot.as_dict()
|
|
6630
6831
|
if time_series is not None: body['time_series'] = time_series.as_dict()
|
|
6631
6832
|
if warehouse_id is not None: body['warehouse_id'] = warehouse_id
|
|
6632
6833
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
@@ -6776,7 +6977,7 @@ class LakehouseMonitorsAPI:
|
|
|
6776
6977
|
notifications: Optional[List[MonitorNotificationsConfig]] = None,
|
|
6777
6978
|
schedule: Optional[MonitorCronSchedule] = None,
|
|
6778
6979
|
slicing_exprs: Optional[List[str]] = None,
|
|
6779
|
-
snapshot: Optional[
|
|
6980
|
+
snapshot: Optional[MonitorSnapshotProfileType] = None,
|
|
6780
6981
|
time_series: Optional[MonitorTimeSeriesProfileType] = None) -> MonitorInfo:
|
|
6781
6982
|
"""Update a table monitor.
|
|
6782
6983
|
|
|
@@ -6814,7 +7015,7 @@ class LakehouseMonitorsAPI:
|
|
|
6814
7015
|
List of column expressions to slice data with for targeted analysis. The data is grouped by each
|
|
6815
7016
|
expression independently, resulting in a separate slice for each predicate and its complements. For
|
|
6816
7017
|
high-cardinality columns, only the top 100 unique values by frequency will generate slices.
|
|
6817
|
-
:param snapshot:
|
|
7018
|
+
:param snapshot: :class:`MonitorSnapshotProfileType` (optional)
|
|
6818
7019
|
Configuration for monitoring snapshot tables.
|
|
6819
7020
|
:param time_series: :class:`MonitorTimeSeriesProfileType` (optional)
|
|
6820
7021
|
Configuration for monitoring time series tables.
|
|
@@ -6831,7 +7032,7 @@ class LakehouseMonitorsAPI:
|
|
|
6831
7032
|
if output_schema_name is not None: body['output_schema_name'] = output_schema_name
|
|
6832
7033
|
if schedule is not None: body['schedule'] = schedule.as_dict()
|
|
6833
7034
|
if slicing_exprs is not None: body['slicing_exprs'] = [v for v in slicing_exprs]
|
|
6834
|
-
if snapshot is not None: body['snapshot'] = snapshot
|
|
7035
|
+
if snapshot is not None: body['snapshot'] = snapshot.as_dict()
|
|
6835
7036
|
if time_series is not None: body['time_series'] = time_series.as_dict()
|
|
6836
7037
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6837
7038
|
|
|
@@ -7755,7 +7956,7 @@ class StorageCredentialsAPI:
|
|
|
7755
7956
|
azure_service_principal: Optional[AzureServicePrincipal] = None,
|
|
7756
7957
|
cloudflare_api_token: Optional[CloudflareApiToken] = None,
|
|
7757
7958
|
comment: Optional[str] = None,
|
|
7758
|
-
databricks_gcp_service_account: Optional[
|
|
7959
|
+
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None,
|
|
7759
7960
|
read_only: Optional[bool] = None,
|
|
7760
7961
|
skip_validation: Optional[bool] = None) -> StorageCredentialInfo:
|
|
7761
7962
|
"""Create a storage credential.
|
|
@@ -7774,7 +7975,7 @@ class StorageCredentialsAPI:
|
|
|
7774
7975
|
The Cloudflare API token configuration.
|
|
7775
7976
|
:param comment: str (optional)
|
|
7776
7977
|
Comment associated with the credential.
|
|
7777
|
-
:param databricks_gcp_service_account:
|
|
7978
|
+
:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
|
|
7778
7979
|
The <Databricks> managed GCP service account configuration.
|
|
7779
7980
|
:param read_only: bool (optional)
|
|
7780
7981
|
Whether the storage credential is only usable for read operations.
|
|
@@ -7792,7 +7993,7 @@ class StorageCredentialsAPI:
|
|
|
7792
7993
|
if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict()
|
|
7793
7994
|
if comment is not None: body['comment'] = comment
|
|
7794
7995
|
if databricks_gcp_service_account is not None:
|
|
7795
|
-
body['databricks_gcp_service_account'] = databricks_gcp_service_account
|
|
7996
|
+
body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
|
|
7796
7997
|
if name is not None: body['name'] = name
|
|
7797
7998
|
if read_only is not None: body['read_only'] = read_only
|
|
7798
7999
|
if skip_validation is not None: body['skip_validation'] = skip_validation
|
|
@@ -7890,7 +8091,7 @@ class StorageCredentialsAPI:
|
|
|
7890
8091
|
azure_service_principal: Optional[AzureServicePrincipal] = None,
|
|
7891
8092
|
cloudflare_api_token: Optional[CloudflareApiToken] = None,
|
|
7892
8093
|
comment: Optional[str] = None,
|
|
7893
|
-
databricks_gcp_service_account: Optional[
|
|
8094
|
+
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None,
|
|
7894
8095
|
force: Optional[bool] = None,
|
|
7895
8096
|
new_name: Optional[str] = None,
|
|
7896
8097
|
owner: Optional[str] = None,
|
|
@@ -7912,7 +8113,7 @@ class StorageCredentialsAPI:
|
|
|
7912
8113
|
The Cloudflare API token configuration.
|
|
7913
8114
|
:param comment: str (optional)
|
|
7914
8115
|
Comment associated with the credential.
|
|
7915
|
-
:param databricks_gcp_service_account:
|
|
8116
|
+
:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
|
|
7916
8117
|
The <Databricks> managed GCP service account configuration.
|
|
7917
8118
|
:param force: bool (optional)
|
|
7918
8119
|
Force update even if there are dependent external locations or external tables.
|
|
@@ -7936,7 +8137,7 @@ class StorageCredentialsAPI:
|
|
|
7936
8137
|
if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict()
|
|
7937
8138
|
if comment is not None: body['comment'] = comment
|
|
7938
8139
|
if databricks_gcp_service_account is not None:
|
|
7939
|
-
body['databricks_gcp_service_account'] = databricks_gcp_service_account
|
|
8140
|
+
body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
|
|
7940
8141
|
if force is not None: body['force'] = force
|
|
7941
8142
|
if new_name is not None: body['new_name'] = new_name
|
|
7942
8143
|
if owner is not None: body['owner'] = owner
|
|
@@ -7956,10 +8157,10 @@ class StorageCredentialsAPI:
|
|
|
7956
8157
|
azure_managed_identity: Optional[AzureManagedIdentity] = None,
|
|
7957
8158
|
azure_service_principal: Optional[AzureServicePrincipal] = None,
|
|
7958
8159
|
cloudflare_api_token: Optional[CloudflareApiToken] = None,
|
|
7959
|
-
databricks_gcp_service_account: Optional[
|
|
8160
|
+
databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None,
|
|
7960
8161
|
external_location_name: Optional[str] = None,
|
|
7961
8162
|
read_only: Optional[bool] = None,
|
|
7962
|
-
storage_credential_name: Optional[
|
|
8163
|
+
storage_credential_name: Optional[str] = None,
|
|
7963
8164
|
url: Optional[str] = None) -> ValidateStorageCredentialResponse:
|
|
7964
8165
|
"""Validate a storage credential.
|
|
7965
8166
|
|
|
@@ -7981,13 +8182,13 @@ class StorageCredentialsAPI:
|
|
|
7981
8182
|
The Azure service principal configuration.
|
|
7982
8183
|
:param cloudflare_api_token: :class:`CloudflareApiToken` (optional)
|
|
7983
8184
|
The Cloudflare API token configuration.
|
|
7984
|
-
:param databricks_gcp_service_account:
|
|
8185
|
+
:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
|
|
7985
8186
|
The Databricks created GCP service account configuration.
|
|
7986
8187
|
:param external_location_name: str (optional)
|
|
7987
8188
|
The name of an existing external location to validate.
|
|
7988
8189
|
:param read_only: bool (optional)
|
|
7989
8190
|
Whether the storage credential is only usable for read operations.
|
|
7990
|
-
:param storage_credential_name:
|
|
8191
|
+
:param storage_credential_name: str (optional)
|
|
7991
8192
|
The name of the storage credential to validate.
|
|
7992
8193
|
:param url: str (optional)
|
|
7993
8194
|
The external location url to validate.
|
|
@@ -8002,7 +8203,7 @@ class StorageCredentialsAPI:
|
|
|
8002
8203
|
body['azure_service_principal'] = azure_service_principal.as_dict()
|
|
8003
8204
|
if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict()
|
|
8004
8205
|
if databricks_gcp_service_account is not None:
|
|
8005
|
-
body['databricks_gcp_service_account'] = databricks_gcp_service_account
|
|
8206
|
+
body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
|
|
8006
8207
|
if external_location_name is not None: body['external_location_name'] = external_location_name
|
|
8007
8208
|
if read_only is not None: body['read_only'] = read_only
|
|
8008
8209
|
if storage_credential_name is not None: body['storage_credential_name'] = storage_credential_name
|