databricks-sdk 0.66.0__py3-none-any.whl → 0.68.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +10 -3
- databricks/sdk/_base_client.py +4 -1
- databricks/sdk/common/lro.py +17 -0
- databricks/sdk/common/types/__init__.py +0 -0
- databricks/sdk/common/types/fieldmask.py +39 -0
- databricks/sdk/credentials_provider.py +61 -12
- databricks/sdk/dbutils.py +5 -1
- databricks/sdk/errors/parser.py +8 -3
- databricks/sdk/mixins/files.py +1 -0
- databricks/sdk/oidc_token_supplier.py +80 -0
- databricks/sdk/retries.py +102 -2
- databricks/sdk/service/_internal.py +93 -1
- databricks/sdk/service/agentbricks.py +1 -1
- databricks/sdk/service/apps.py +264 -1
- databricks/sdk/service/billing.py +2 -3
- databricks/sdk/service/catalog.py +1030 -537
- databricks/sdk/service/cleanrooms.py +3 -3
- databricks/sdk/service/compute.py +21 -33
- databricks/sdk/service/dashboards.py +51 -3
- databricks/sdk/service/database.py +99 -8
- databricks/sdk/service/dataquality.py +1145 -0
- databricks/sdk/service/files.py +2 -1
- databricks/sdk/service/iam.py +6 -5
- databricks/sdk/service/iamv2.py +1 -1
- databricks/sdk/service/jobs.py +6 -9
- databricks/sdk/service/marketplace.py +3 -1
- databricks/sdk/service/ml.py +3 -1
- databricks/sdk/service/oauth2.py +1 -1
- databricks/sdk/service/pipelines.py +5 -6
- databricks/sdk/service/provisioning.py +544 -655
- databricks/sdk/service/qualitymonitorv2.py +1 -1
- databricks/sdk/service/serving.py +59 -1
- databricks/sdk/service/settings.py +5 -2
- databricks/sdk/service/settingsv2.py +1 -1
- databricks/sdk/service/sharing.py +12 -3
- databricks/sdk/service/sql.py +305 -70
- databricks/sdk/service/tags.py +1 -1
- databricks/sdk/service/vectorsearch.py +3 -1
- databricks/sdk/service/workspace.py +70 -17
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.66.0.dist-info → databricks_sdk-0.68.0.dist-info}/METADATA +4 -2
- databricks_sdk-0.68.0.dist-info/RECORD +83 -0
- databricks_sdk-0.66.0.dist-info/RECORD +0 -79
- {databricks_sdk-0.66.0.dist-info → databricks_sdk-0.68.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.66.0.dist-info → databricks_sdk-0.68.0.dist-info}/licenses/LICENSE +0 -0
- {databricks_sdk-0.66.0.dist-info → databricks_sdk-0.68.0.dist-info}/licenses/NOTICE +0 -0
- {databricks_sdk-0.66.0.dist-info → databricks_sdk-0.68.0.dist-info}/top_level.txt +0 -0
|
@@ -10,13 +10,13 @@ from datetime import timedelta
|
|
|
10
10
|
from enum import Enum
|
|
11
11
|
from typing import Any, Callable, Dict, Iterator, List, Optional
|
|
12
12
|
|
|
13
|
-
from .
|
|
13
|
+
from databricks.sdk.service import catalog, jobs, settings, sharing
|
|
14
|
+
from databricks.sdk.service._internal import (Wait, _enum, _from_dict,
|
|
15
|
+
_repeated_dict)
|
|
14
16
|
|
|
15
17
|
_LOG = logging.getLogger("databricks.sdk")
|
|
16
18
|
|
|
17
19
|
|
|
18
|
-
from databricks.sdk.service import catalog, jobs, settings, sharing
|
|
19
|
-
|
|
20
20
|
# all definitions in this file are in alphabetical order
|
|
21
21
|
|
|
22
22
|
|
|
@@ -10,8 +10,10 @@ from datetime import timedelta
|
|
|
10
10
|
from enum import Enum
|
|
11
11
|
from typing import Any, Callable, Dict, Iterator, List, Optional
|
|
12
12
|
|
|
13
|
+
from databricks.sdk.service._internal import (Wait, _enum, _from_dict,
|
|
14
|
+
_repeated_dict, _repeated_enum)
|
|
15
|
+
|
|
13
16
|
from ..errors import OperationFailed
|
|
14
|
-
from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum
|
|
15
17
|
|
|
16
18
|
_LOG = logging.getLogger("databricks.sdk")
|
|
17
19
|
|
|
@@ -3135,12 +3137,9 @@ class Environment:
|
|
|
3135
3137
|
"""Required. Environment version used by the environment. Each version comes with a specific Python
|
|
3136
3138
|
version and a set of Python packages. The version is a string, consisting of an integer."""
|
|
3137
3139
|
|
|
3138
|
-
jar_dependencies: Optional[List[str]] = None
|
|
3139
|
-
"""Use `java_dependencies` instead."""
|
|
3140
|
-
|
|
3141
3140
|
java_dependencies: Optional[List[str]] = None
|
|
3142
|
-
"""List of
|
|
3143
|
-
`/Volumes/path/to/test.jar`."""
|
|
3141
|
+
"""List of java dependencies. Each dependency is a string representing a java library path. For
|
|
3142
|
+
example: `/Volumes/path/to/test.jar`."""
|
|
3144
3143
|
|
|
3145
3144
|
def as_dict(self) -> dict:
|
|
3146
3145
|
"""Serializes the Environment into a dictionary suitable for use as a JSON request body."""
|
|
@@ -3151,8 +3150,6 @@ class Environment:
|
|
|
3151
3150
|
body["dependencies"] = [v for v in self.dependencies]
|
|
3152
3151
|
if self.environment_version is not None:
|
|
3153
3152
|
body["environment_version"] = self.environment_version
|
|
3154
|
-
if self.jar_dependencies:
|
|
3155
|
-
body["jar_dependencies"] = [v for v in self.jar_dependencies]
|
|
3156
3153
|
if self.java_dependencies:
|
|
3157
3154
|
body["java_dependencies"] = [v for v in self.java_dependencies]
|
|
3158
3155
|
return body
|
|
@@ -3166,8 +3163,6 @@ class Environment:
|
|
|
3166
3163
|
body["dependencies"] = self.dependencies
|
|
3167
3164
|
if self.environment_version is not None:
|
|
3168
3165
|
body["environment_version"] = self.environment_version
|
|
3169
|
-
if self.jar_dependencies:
|
|
3170
|
-
body["jar_dependencies"] = self.jar_dependencies
|
|
3171
3166
|
if self.java_dependencies:
|
|
3172
3167
|
body["java_dependencies"] = self.java_dependencies
|
|
3173
3168
|
return body
|
|
@@ -3179,7 +3174,6 @@ class Environment:
|
|
|
3179
3174
|
client=d.get("client", None),
|
|
3180
3175
|
dependencies=d.get("dependencies", None),
|
|
3181
3176
|
environment_version=d.get("environment_version", None),
|
|
3182
|
-
jar_dependencies=d.get("jar_dependencies", None),
|
|
3183
3177
|
java_dependencies=d.get("java_dependencies", None),
|
|
3184
3178
|
)
|
|
3185
3179
|
|
|
@@ -6621,9 +6615,16 @@ class Results:
|
|
|
6621
6615
|
data: Optional[Any] = None
|
|
6622
6616
|
|
|
6623
6617
|
file_name: Optional[str] = None
|
|
6624
|
-
"""The image
|
|
6618
|
+
"""The image data in one of the following formats:
|
|
6619
|
+
|
|
6620
|
+
1. A Data URL with base64-encoded image data: `data:image/{type};base64,{base64-data}`. Example:
|
|
6621
|
+
`data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUA...`
|
|
6622
|
+
|
|
6623
|
+
2. A FileStore file path for large images: `/plots/{filename}.png`. Example:
|
|
6624
|
+
`/plots/b6a7ad70-fb2c-4353-8aed-3f1e015174a4.png`"""
|
|
6625
6625
|
|
|
6626
6626
|
file_names: Optional[List[str]] = None
|
|
6627
|
+
"""List of image data for multiple images. Each element follows the same format as file_name."""
|
|
6627
6628
|
|
|
6628
6629
|
is_json_schema: Optional[bool] = None
|
|
6629
6630
|
"""true if a JSON schema is returned instead of a string representation of the Hive type."""
|
|
@@ -7067,6 +7068,7 @@ class TerminationReasonCode(Enum):
|
|
|
7067
7068
|
BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG = "BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG"
|
|
7068
7069
|
BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED = "BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED"
|
|
7069
7070
|
BUDGET_POLICY_RESOLUTION_FAILURE = "BUDGET_POLICY_RESOLUTION_FAILURE"
|
|
7071
|
+
CLOUD_ACCOUNT_POD_QUOTA_EXCEEDED = "CLOUD_ACCOUNT_POD_QUOTA_EXCEEDED"
|
|
7070
7072
|
CLOUD_ACCOUNT_SETUP_FAILURE = "CLOUD_ACCOUNT_SETUP_FAILURE"
|
|
7071
7073
|
CLOUD_OPERATION_CANCELLED = "CLOUD_OPERATION_CANCELLED"
|
|
7072
7074
|
CLOUD_PROVIDER_DISK_SETUP_FAILURE = "CLOUD_PROVIDER_DISK_SETUP_FAILURE"
|
|
@@ -7144,6 +7146,7 @@ class TerminationReasonCode(Enum):
|
|
|
7144
7146
|
IN_PENALTY_BOX = "IN_PENALTY_BOX"
|
|
7145
7147
|
IP_EXHAUSTION_FAILURE = "IP_EXHAUSTION_FAILURE"
|
|
7146
7148
|
JOB_FINISHED = "JOB_FINISHED"
|
|
7149
|
+
K8S_ACTIVE_POD_QUOTA_EXCEEDED = "K8S_ACTIVE_POD_QUOTA_EXCEEDED"
|
|
7147
7150
|
K8S_AUTOSCALING_FAILURE = "K8S_AUTOSCALING_FAILURE"
|
|
7148
7151
|
K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = "K8S_DBR_CLUSTER_LAUNCH_TIMEOUT"
|
|
7149
7152
|
LAZY_ALLOCATION_TIMEOUT = "LAZY_ALLOCATION_TIMEOUT"
|
|
@@ -8493,11 +8496,7 @@ class ClustersAPI:
|
|
|
8493
8496
|
}
|
|
8494
8497
|
|
|
8495
8498
|
op_response = self._api.do("POST", "/api/2.1/clusters/delete", body=body, headers=headers)
|
|
8496
|
-
return Wait(
|
|
8497
|
-
self.wait_get_cluster_terminated,
|
|
8498
|
-
response=DeleteClusterResponse.from_dict(op_response),
|
|
8499
|
-
cluster_id=cluster_id,
|
|
8500
|
-
)
|
|
8499
|
+
return Wait(self.wait_get_cluster_terminated, cluster_id=cluster_id)
|
|
8501
8500
|
|
|
8502
8501
|
def delete_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails:
|
|
8503
8502
|
return self.delete(cluster_id=cluster_id).result(timeout=timeout)
|
|
@@ -8759,9 +8758,7 @@ class ClustersAPI:
|
|
|
8759
8758
|
}
|
|
8760
8759
|
|
|
8761
8760
|
op_response = self._api.do("POST", "/api/2.1/clusters/edit", body=body, headers=headers)
|
|
8762
|
-
return Wait(
|
|
8763
|
-
self.wait_get_cluster_running, response=EditClusterResponse.from_dict(op_response), cluster_id=cluster_id
|
|
8764
|
-
)
|
|
8761
|
+
return Wait(self.wait_get_cluster_running, cluster_id=cluster_id)
|
|
8765
8762
|
|
|
8766
8763
|
def edit_and_wait(
|
|
8767
8764
|
self,
|
|
@@ -9123,9 +9120,7 @@ class ClustersAPI:
|
|
|
9123
9120
|
}
|
|
9124
9121
|
|
|
9125
9122
|
op_response = self._api.do("POST", "/api/2.1/clusters/resize", body=body, headers=headers)
|
|
9126
|
-
return Wait(
|
|
9127
|
-
self.wait_get_cluster_running, response=ResizeClusterResponse.from_dict(op_response), cluster_id=cluster_id
|
|
9128
|
-
)
|
|
9123
|
+
return Wait(self.wait_get_cluster_running, cluster_id=cluster_id)
|
|
9129
9124
|
|
|
9130
9125
|
def resize_and_wait(
|
|
9131
9126
|
self,
|
|
@@ -9160,9 +9155,7 @@ class ClustersAPI:
|
|
|
9160
9155
|
}
|
|
9161
9156
|
|
|
9162
9157
|
op_response = self._api.do("POST", "/api/2.1/clusters/restart", body=body, headers=headers)
|
|
9163
|
-
return Wait(
|
|
9164
|
-
self.wait_get_cluster_running, response=RestartClusterResponse.from_dict(op_response), cluster_id=cluster_id
|
|
9165
|
-
)
|
|
9158
|
+
return Wait(self.wait_get_cluster_running, cluster_id=cluster_id)
|
|
9166
9159
|
|
|
9167
9160
|
def restart_and_wait(
|
|
9168
9161
|
self, cluster_id: str, *, restart_user: Optional[str] = None, timeout=timedelta(minutes=20)
|
|
@@ -9229,9 +9222,7 @@ class ClustersAPI:
|
|
|
9229
9222
|
}
|
|
9230
9223
|
|
|
9231
9224
|
op_response = self._api.do("POST", "/api/2.1/clusters/start", body=body, headers=headers)
|
|
9232
|
-
return Wait(
|
|
9233
|
-
self.wait_get_cluster_running, response=StartClusterResponse.from_dict(op_response), cluster_id=cluster_id
|
|
9234
|
-
)
|
|
9225
|
+
return Wait(self.wait_get_cluster_running, cluster_id=cluster_id)
|
|
9235
9226
|
|
|
9236
9227
|
def start_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails:
|
|
9237
9228
|
return self.start(cluster_id=cluster_id).result(timeout=timeout)
|
|
@@ -9302,9 +9293,7 @@ class ClustersAPI:
|
|
|
9302
9293
|
}
|
|
9303
9294
|
|
|
9304
9295
|
op_response = self._api.do("POST", "/api/2.1/clusters/update", body=body, headers=headers)
|
|
9305
|
-
return Wait(
|
|
9306
|
-
self.wait_get_cluster_running, response=UpdateClusterResponse.from_dict(op_response), cluster_id=cluster_id
|
|
9307
|
-
)
|
|
9296
|
+
return Wait(self.wait_get_cluster_running, cluster_id=cluster_id)
|
|
9308
9297
|
|
|
9309
9298
|
def update_and_wait(
|
|
9310
9299
|
self,
|
|
@@ -9485,7 +9474,6 @@ class CommandExecutionAPI:
|
|
|
9485
9474
|
op_response = self._api.do("POST", "/api/1.2/commands/cancel", body=body, headers=headers)
|
|
9486
9475
|
return Wait(
|
|
9487
9476
|
self.wait_command_status_command_execution_cancelled,
|
|
9488
|
-
response=CancelResponse.from_dict(op_response),
|
|
9489
9477
|
cluster_id=cluster_id,
|
|
9490
9478
|
command_id=command_id,
|
|
9491
9479
|
context_id=context_id,
|
|
@@ -10,14 +10,15 @@ from datetime import timedelta
|
|
|
10
10
|
from enum import Enum
|
|
11
11
|
from typing import Any, Callable, Dict, Iterator, List, Optional
|
|
12
12
|
|
|
13
|
+
from databricks.sdk.service import sql
|
|
14
|
+
from databricks.sdk.service._internal import (Wait, _enum, _from_dict,
|
|
15
|
+
_repeated_dict)
|
|
16
|
+
|
|
13
17
|
from ..errors import OperationFailed
|
|
14
|
-
from ._internal import Wait, _enum, _from_dict, _repeated_dict
|
|
15
18
|
|
|
16
19
|
_LOG = logging.getLogger("databricks.sdk")
|
|
17
20
|
|
|
18
21
|
|
|
19
|
-
from databricks.sdk.service import sql
|
|
20
|
-
|
|
21
22
|
# all definitions in this file are in alphabetical order
|
|
22
23
|
|
|
23
24
|
|
|
@@ -714,6 +715,8 @@ class GenieQueryAttachment:
|
|
|
714
715
|
last_updated_timestamp: Optional[int] = None
|
|
715
716
|
"""Time when the user updated the query last"""
|
|
716
717
|
|
|
718
|
+
parameters: Optional[List[QueryAttachmentParameter]] = None
|
|
719
|
+
|
|
717
720
|
query: Optional[str] = None
|
|
718
721
|
"""AI generated SQL query"""
|
|
719
722
|
|
|
@@ -736,6 +739,8 @@ class GenieQueryAttachment:
|
|
|
736
739
|
body["id"] = self.id
|
|
737
740
|
if self.last_updated_timestamp is not None:
|
|
738
741
|
body["last_updated_timestamp"] = self.last_updated_timestamp
|
|
742
|
+
if self.parameters:
|
|
743
|
+
body["parameters"] = [v.as_dict() for v in self.parameters]
|
|
739
744
|
if self.query is not None:
|
|
740
745
|
body["query"] = self.query
|
|
741
746
|
if self.query_result_metadata:
|
|
@@ -755,6 +760,8 @@ class GenieQueryAttachment:
|
|
|
755
760
|
body["id"] = self.id
|
|
756
761
|
if self.last_updated_timestamp is not None:
|
|
757
762
|
body["last_updated_timestamp"] = self.last_updated_timestamp
|
|
763
|
+
if self.parameters:
|
|
764
|
+
body["parameters"] = self.parameters
|
|
758
765
|
if self.query is not None:
|
|
759
766
|
body["query"] = self.query
|
|
760
767
|
if self.query_result_metadata:
|
|
@@ -772,6 +779,7 @@ class GenieQueryAttachment:
|
|
|
772
779
|
description=d.get("description", None),
|
|
773
780
|
id=d.get("id", None),
|
|
774
781
|
last_updated_timestamp=d.get("last_updated_timestamp", None),
|
|
782
|
+
parameters=_repeated_dict(d, "parameters", QueryAttachmentParameter),
|
|
775
783
|
query=d.get("query", None),
|
|
776
784
|
query_result_metadata=_from_dict(d, "query_result_metadata", GenieResultMetadata),
|
|
777
785
|
statement_id=d.get("statement_id", None),
|
|
@@ -1135,6 +1143,7 @@ class MessageErrorType(Enum):
|
|
|
1135
1143
|
DESCRIBE_QUERY_INVALID_SQL_ERROR = "DESCRIBE_QUERY_INVALID_SQL_ERROR"
|
|
1136
1144
|
DESCRIBE_QUERY_TIMEOUT = "DESCRIBE_QUERY_TIMEOUT"
|
|
1137
1145
|
DESCRIBE_QUERY_UNEXPECTED_FAILURE = "DESCRIBE_QUERY_UNEXPECTED_FAILURE"
|
|
1146
|
+
EXCEEDED_MAX_TOKEN_LENGTH_EXCEPTION = "EXCEEDED_MAX_TOKEN_LENGTH_EXCEPTION"
|
|
1138
1147
|
FUNCTIONS_NOT_AVAILABLE_EXCEPTION = "FUNCTIONS_NOT_AVAILABLE_EXCEPTION"
|
|
1139
1148
|
FUNCTION_ARGUMENTS_INVALID_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_EXCEPTION"
|
|
1140
1149
|
FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION"
|
|
@@ -1145,6 +1154,9 @@ class MessageErrorType(Enum):
|
|
|
1145
1154
|
GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = "GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION"
|
|
1146
1155
|
GENERIC_SQL_EXEC_API_CALL_EXCEPTION = "GENERIC_SQL_EXEC_API_CALL_EXCEPTION"
|
|
1147
1156
|
ILLEGAL_PARAMETER_DEFINITION_EXCEPTION = "ILLEGAL_PARAMETER_DEFINITION_EXCEPTION"
|
|
1157
|
+
INTERNAL_CATALOG_ASSET_CREATION_FAILED_EXCEPTION = "INTERNAL_CATALOG_ASSET_CREATION_FAILED_EXCEPTION"
|
|
1158
|
+
INTERNAL_CATALOG_ASSET_CREATION_ONGOING_EXCEPTION = "INTERNAL_CATALOG_ASSET_CREATION_ONGOING_EXCEPTION"
|
|
1159
|
+
INTERNAL_CATALOG_ASSET_CREATION_UNSUPPORTED_EXCEPTION = "INTERNAL_CATALOG_ASSET_CREATION_UNSUPPORTED_EXCEPTION"
|
|
1148
1160
|
INTERNAL_CATALOG_MISSING_UC_PATH_EXCEPTION = "INTERNAL_CATALOG_MISSING_UC_PATH_EXCEPTION"
|
|
1149
1161
|
INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION = "INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION"
|
|
1150
1162
|
INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = "INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION"
|
|
@@ -1260,6 +1272,42 @@ class PublishedDashboard:
|
|
|
1260
1272
|
)
|
|
1261
1273
|
|
|
1262
1274
|
|
|
1275
|
+
@dataclass
|
|
1276
|
+
class QueryAttachmentParameter:
|
|
1277
|
+
keyword: Optional[str] = None
|
|
1278
|
+
|
|
1279
|
+
sql_type: Optional[str] = None
|
|
1280
|
+
|
|
1281
|
+
value: Optional[str] = None
|
|
1282
|
+
|
|
1283
|
+
def as_dict(self) -> dict:
|
|
1284
|
+
"""Serializes the QueryAttachmentParameter into a dictionary suitable for use as a JSON request body."""
|
|
1285
|
+
body = {}
|
|
1286
|
+
if self.keyword is not None:
|
|
1287
|
+
body["keyword"] = self.keyword
|
|
1288
|
+
if self.sql_type is not None:
|
|
1289
|
+
body["sql_type"] = self.sql_type
|
|
1290
|
+
if self.value is not None:
|
|
1291
|
+
body["value"] = self.value
|
|
1292
|
+
return body
|
|
1293
|
+
|
|
1294
|
+
def as_shallow_dict(self) -> dict:
|
|
1295
|
+
"""Serializes the QueryAttachmentParameter into a shallow dictionary of its immediate attributes."""
|
|
1296
|
+
body = {}
|
|
1297
|
+
if self.keyword is not None:
|
|
1298
|
+
body["keyword"] = self.keyword
|
|
1299
|
+
if self.sql_type is not None:
|
|
1300
|
+
body["sql_type"] = self.sql_type
|
|
1301
|
+
if self.value is not None:
|
|
1302
|
+
body["value"] = self.value
|
|
1303
|
+
return body
|
|
1304
|
+
|
|
1305
|
+
@classmethod
|
|
1306
|
+
def from_dict(cls, d: Dict[str, Any]) -> QueryAttachmentParameter:
|
|
1307
|
+
"""Deserializes the QueryAttachmentParameter from a dictionary."""
|
|
1308
|
+
return cls(keyword=d.get("keyword", None), sql_type=d.get("sql_type", None), value=d.get("value", None))
|
|
1309
|
+
|
|
1310
|
+
|
|
1263
1311
|
@dataclass
|
|
1264
1312
|
class Result:
|
|
1265
1313
|
is_truncated: Optional[bool] = None
|
|
@@ -10,7 +10,8 @@ from datetime import timedelta
|
|
|
10
10
|
from enum import Enum
|
|
11
11
|
from typing import Any, Callable, Dict, Iterator, List, Optional
|
|
12
12
|
|
|
13
|
-
from ._internal import Wait, _enum, _from_dict,
|
|
13
|
+
from databricks.sdk.service._internal import (Wait, _enum, _from_dict,
|
|
14
|
+
_repeated_dict)
|
|
14
15
|
|
|
15
16
|
_LOG = logging.getLogger("databricks.sdk")
|
|
16
17
|
|
|
@@ -18,6 +19,38 @@ _LOG = logging.getLogger("databricks.sdk")
|
|
|
18
19
|
# all definitions in this file are in alphabetical order
|
|
19
20
|
|
|
20
21
|
|
|
22
|
+
@dataclass
|
|
23
|
+
class CustomTag:
|
|
24
|
+
key: Optional[str] = None
|
|
25
|
+
"""The key of the custom tag."""
|
|
26
|
+
|
|
27
|
+
value: Optional[str] = None
|
|
28
|
+
"""The value of the custom tag."""
|
|
29
|
+
|
|
30
|
+
def as_dict(self) -> dict:
|
|
31
|
+
"""Serializes the CustomTag into a dictionary suitable for use as a JSON request body."""
|
|
32
|
+
body = {}
|
|
33
|
+
if self.key is not None:
|
|
34
|
+
body["key"] = self.key
|
|
35
|
+
if self.value is not None:
|
|
36
|
+
body["value"] = self.value
|
|
37
|
+
return body
|
|
38
|
+
|
|
39
|
+
def as_shallow_dict(self) -> dict:
|
|
40
|
+
"""Serializes the CustomTag into a shallow dictionary of its immediate attributes."""
|
|
41
|
+
body = {}
|
|
42
|
+
if self.key is not None:
|
|
43
|
+
body["key"] = self.key
|
|
44
|
+
if self.value is not None:
|
|
45
|
+
body["value"] = self.value
|
|
46
|
+
return body
|
|
47
|
+
|
|
48
|
+
@classmethod
|
|
49
|
+
def from_dict(cls, d: Dict[str, Any]) -> CustomTag:
|
|
50
|
+
"""Deserializes the CustomTag from a dictionary."""
|
|
51
|
+
return cls(key=d.get("key", None), value=d.get("value", None))
|
|
52
|
+
|
|
53
|
+
|
|
21
54
|
@dataclass
|
|
22
55
|
class DatabaseCatalog:
|
|
23
56
|
name: str
|
|
@@ -125,9 +158,16 @@ class DatabaseInstance:
|
|
|
125
158
|
creator: Optional[str] = None
|
|
126
159
|
"""The email of the creator of the instance."""
|
|
127
160
|
|
|
161
|
+
custom_tags: Optional[List[CustomTag]] = None
|
|
162
|
+
"""Custom tags associated with the instance. This field is only included on create and update
|
|
163
|
+
responses."""
|
|
164
|
+
|
|
128
165
|
effective_capacity: Optional[str] = None
|
|
129
166
|
"""Deprecated. The sku of the instance; this field will always match the value of capacity."""
|
|
130
167
|
|
|
168
|
+
effective_custom_tags: Optional[List[CustomTag]] = None
|
|
169
|
+
"""The recorded custom tags associated with the instance."""
|
|
170
|
+
|
|
131
171
|
effective_enable_pg_native_login: Optional[bool] = None
|
|
132
172
|
"""Whether the instance has PG native password login enabled."""
|
|
133
173
|
|
|
@@ -145,6 +185,9 @@ class DatabaseInstance:
|
|
|
145
185
|
effective_stopped: Optional[bool] = None
|
|
146
186
|
"""Whether the instance is stopped."""
|
|
147
187
|
|
|
188
|
+
effective_usage_policy_id: Optional[str] = None
|
|
189
|
+
"""The policy that is applied to the instance."""
|
|
190
|
+
|
|
148
191
|
enable_pg_native_login: Optional[bool] = None
|
|
149
192
|
"""Whether to enable PG native password login on the instance. Defaults to false."""
|
|
150
193
|
|
|
@@ -184,6 +227,9 @@ class DatabaseInstance:
|
|
|
184
227
|
uid: Optional[str] = None
|
|
185
228
|
"""An immutable UUID identifier for the instance."""
|
|
186
229
|
|
|
230
|
+
usage_policy_id: Optional[str] = None
|
|
231
|
+
"""The desired usage policy to associate with the instance."""
|
|
232
|
+
|
|
187
233
|
def as_dict(self) -> dict:
|
|
188
234
|
"""Serializes the DatabaseInstance into a dictionary suitable for use as a JSON request body."""
|
|
189
235
|
body = {}
|
|
@@ -195,8 +241,12 @@ class DatabaseInstance:
|
|
|
195
241
|
body["creation_time"] = self.creation_time
|
|
196
242
|
if self.creator is not None:
|
|
197
243
|
body["creator"] = self.creator
|
|
244
|
+
if self.custom_tags:
|
|
245
|
+
body["custom_tags"] = [v.as_dict() for v in self.custom_tags]
|
|
198
246
|
if self.effective_capacity is not None:
|
|
199
247
|
body["effective_capacity"] = self.effective_capacity
|
|
248
|
+
if self.effective_custom_tags:
|
|
249
|
+
body["effective_custom_tags"] = [v.as_dict() for v in self.effective_custom_tags]
|
|
200
250
|
if self.effective_enable_pg_native_login is not None:
|
|
201
251
|
body["effective_enable_pg_native_login"] = self.effective_enable_pg_native_login
|
|
202
252
|
if self.effective_enable_readable_secondaries is not None:
|
|
@@ -207,6 +257,8 @@ class DatabaseInstance:
|
|
|
207
257
|
body["effective_retention_window_in_days"] = self.effective_retention_window_in_days
|
|
208
258
|
if self.effective_stopped is not None:
|
|
209
259
|
body["effective_stopped"] = self.effective_stopped
|
|
260
|
+
if self.effective_usage_policy_id is not None:
|
|
261
|
+
body["effective_usage_policy_id"] = self.effective_usage_policy_id
|
|
210
262
|
if self.enable_pg_native_login is not None:
|
|
211
263
|
body["enable_pg_native_login"] = self.enable_pg_native_login
|
|
212
264
|
if self.enable_readable_secondaries is not None:
|
|
@@ -231,6 +283,8 @@ class DatabaseInstance:
|
|
|
231
283
|
body["stopped"] = self.stopped
|
|
232
284
|
if self.uid is not None:
|
|
233
285
|
body["uid"] = self.uid
|
|
286
|
+
if self.usage_policy_id is not None:
|
|
287
|
+
body["usage_policy_id"] = self.usage_policy_id
|
|
234
288
|
return body
|
|
235
289
|
|
|
236
290
|
def as_shallow_dict(self) -> dict:
|
|
@@ -244,8 +298,12 @@ class DatabaseInstance:
|
|
|
244
298
|
body["creation_time"] = self.creation_time
|
|
245
299
|
if self.creator is not None:
|
|
246
300
|
body["creator"] = self.creator
|
|
301
|
+
if self.custom_tags:
|
|
302
|
+
body["custom_tags"] = self.custom_tags
|
|
247
303
|
if self.effective_capacity is not None:
|
|
248
304
|
body["effective_capacity"] = self.effective_capacity
|
|
305
|
+
if self.effective_custom_tags:
|
|
306
|
+
body["effective_custom_tags"] = self.effective_custom_tags
|
|
249
307
|
if self.effective_enable_pg_native_login is not None:
|
|
250
308
|
body["effective_enable_pg_native_login"] = self.effective_enable_pg_native_login
|
|
251
309
|
if self.effective_enable_readable_secondaries is not None:
|
|
@@ -256,6 +314,8 @@ class DatabaseInstance:
|
|
|
256
314
|
body["effective_retention_window_in_days"] = self.effective_retention_window_in_days
|
|
257
315
|
if self.effective_stopped is not None:
|
|
258
316
|
body["effective_stopped"] = self.effective_stopped
|
|
317
|
+
if self.effective_usage_policy_id is not None:
|
|
318
|
+
body["effective_usage_policy_id"] = self.effective_usage_policy_id
|
|
259
319
|
if self.enable_pg_native_login is not None:
|
|
260
320
|
body["enable_pg_native_login"] = self.enable_pg_native_login
|
|
261
321
|
if self.enable_readable_secondaries is not None:
|
|
@@ -280,6 +340,8 @@ class DatabaseInstance:
|
|
|
280
340
|
body["stopped"] = self.stopped
|
|
281
341
|
if self.uid is not None:
|
|
282
342
|
body["uid"] = self.uid
|
|
343
|
+
if self.usage_policy_id is not None:
|
|
344
|
+
body["usage_policy_id"] = self.usage_policy_id
|
|
283
345
|
return body
|
|
284
346
|
|
|
285
347
|
@classmethod
|
|
@@ -290,12 +352,15 @@ class DatabaseInstance:
|
|
|
290
352
|
child_instance_refs=_repeated_dict(d, "child_instance_refs", DatabaseInstanceRef),
|
|
291
353
|
creation_time=d.get("creation_time", None),
|
|
292
354
|
creator=d.get("creator", None),
|
|
355
|
+
custom_tags=_repeated_dict(d, "custom_tags", CustomTag),
|
|
293
356
|
effective_capacity=d.get("effective_capacity", None),
|
|
357
|
+
effective_custom_tags=_repeated_dict(d, "effective_custom_tags", CustomTag),
|
|
294
358
|
effective_enable_pg_native_login=d.get("effective_enable_pg_native_login", None),
|
|
295
359
|
effective_enable_readable_secondaries=d.get("effective_enable_readable_secondaries", None),
|
|
296
360
|
effective_node_count=d.get("effective_node_count", None),
|
|
297
361
|
effective_retention_window_in_days=d.get("effective_retention_window_in_days", None),
|
|
298
362
|
effective_stopped=d.get("effective_stopped", None),
|
|
363
|
+
effective_usage_policy_id=d.get("effective_usage_policy_id", None),
|
|
299
364
|
enable_pg_native_login=d.get("enable_pg_native_login", None),
|
|
300
365
|
enable_readable_secondaries=d.get("enable_readable_secondaries", None),
|
|
301
366
|
name=d.get("name", None),
|
|
@@ -308,6 +373,7 @@ class DatabaseInstance:
|
|
|
308
373
|
state=_enum(d, "state", DatabaseInstanceState),
|
|
309
374
|
stopped=d.get("stopped", None),
|
|
310
375
|
uid=d.get("uid", None),
|
|
376
|
+
usage_policy_id=d.get("usage_policy_id", None),
|
|
311
377
|
)
|
|
312
378
|
|
|
313
379
|
|
|
@@ -390,25 +456,34 @@ class DatabaseInstanceRef:
|
|
|
390
456
|
class DatabaseInstanceRole:
|
|
391
457
|
"""A DatabaseInstanceRole represents a Postgres role in a database instance."""
|
|
392
458
|
|
|
459
|
+
name: str
|
|
460
|
+
"""The name of the role. This is the unique identifier for the role in an instance."""
|
|
461
|
+
|
|
393
462
|
attributes: Optional[DatabaseInstanceRoleAttributes] = None
|
|
394
|
-
"""API-exposed Postgres role
|
|
463
|
+
"""The desired API-exposed Postgres role attribute to associate with the role. Optional."""
|
|
464
|
+
|
|
465
|
+
effective_attributes: Optional[DatabaseInstanceRoleAttributes] = None
|
|
466
|
+
"""The attributes that are applied to the role."""
|
|
395
467
|
|
|
396
468
|
identity_type: Optional[DatabaseInstanceRoleIdentityType] = None
|
|
397
469
|
"""The type of the role."""
|
|
398
470
|
|
|
471
|
+
instance_name: Optional[str] = None
|
|
472
|
+
|
|
399
473
|
membership_role: Optional[DatabaseInstanceRoleMembershipRole] = None
|
|
400
474
|
"""An enum value for a standard role that this role is a member of."""
|
|
401
475
|
|
|
402
|
-
name: Optional[str] = None
|
|
403
|
-
"""The name of the role. This is the unique identifier for the role in an instance."""
|
|
404
|
-
|
|
405
476
|
def as_dict(self) -> dict:
|
|
406
477
|
"""Serializes the DatabaseInstanceRole into a dictionary suitable for use as a JSON request body."""
|
|
407
478
|
body = {}
|
|
408
479
|
if self.attributes:
|
|
409
480
|
body["attributes"] = self.attributes.as_dict()
|
|
481
|
+
if self.effective_attributes:
|
|
482
|
+
body["effective_attributes"] = self.effective_attributes.as_dict()
|
|
410
483
|
if self.identity_type is not None:
|
|
411
484
|
body["identity_type"] = self.identity_type.value
|
|
485
|
+
if self.instance_name is not None:
|
|
486
|
+
body["instance_name"] = self.instance_name
|
|
412
487
|
if self.membership_role is not None:
|
|
413
488
|
body["membership_role"] = self.membership_role.value
|
|
414
489
|
if self.name is not None:
|
|
@@ -420,8 +495,12 @@ class DatabaseInstanceRole:
|
|
|
420
495
|
body = {}
|
|
421
496
|
if self.attributes:
|
|
422
497
|
body["attributes"] = self.attributes
|
|
498
|
+
if self.effective_attributes:
|
|
499
|
+
body["effective_attributes"] = self.effective_attributes
|
|
423
500
|
if self.identity_type is not None:
|
|
424
501
|
body["identity_type"] = self.identity_type
|
|
502
|
+
if self.instance_name is not None:
|
|
503
|
+
body["instance_name"] = self.instance_name
|
|
425
504
|
if self.membership_role is not None:
|
|
426
505
|
body["membership_role"] = self.membership_role
|
|
427
506
|
if self.name is not None:
|
|
@@ -433,7 +512,9 @@ class DatabaseInstanceRole:
|
|
|
433
512
|
"""Deserializes the DatabaseInstanceRole from a dictionary."""
|
|
434
513
|
return cls(
|
|
435
514
|
attributes=_from_dict(d, "attributes", DatabaseInstanceRoleAttributes),
|
|
515
|
+
effective_attributes=_from_dict(d, "effective_attributes", DatabaseInstanceRoleAttributes),
|
|
436
516
|
identity_type=_enum(d, "identity_type", DatabaseInstanceRoleIdentityType),
|
|
517
|
+
instance_name=d.get("instance_name", None),
|
|
437
518
|
membership_role=_enum(d, "membership_role", DatabaseInstanceRoleMembershipRole),
|
|
438
519
|
name=d.get("name", None),
|
|
439
520
|
)
|
|
@@ -868,7 +949,7 @@ class RequestedResource:
|
|
|
868
949
|
|
|
869
950
|
@dataclass
|
|
870
951
|
class SyncedDatabaseTable:
|
|
871
|
-
"""Next field marker:
|
|
952
|
+
"""Next field marker: 18"""
|
|
872
953
|
|
|
873
954
|
name: str
|
|
874
955
|
"""Full three-part (catalog, schema, table) name of the table."""
|
|
@@ -1527,22 +1608,32 @@ class DatabaseAPI:
|
|
|
1527
1608
|
return self.create_database_instance(database_instance=database_instance).result(timeout=timeout)
|
|
1528
1609
|
|
|
1529
1610
|
def create_database_instance_role(
|
|
1530
|
-
self,
|
|
1611
|
+
self,
|
|
1612
|
+
instance_name: str,
|
|
1613
|
+
database_instance_role: DatabaseInstanceRole,
|
|
1614
|
+
*,
|
|
1615
|
+
database_instance_name: Optional[str] = None,
|
|
1531
1616
|
) -> DatabaseInstanceRole:
|
|
1532
1617
|
"""Create a role for a Database Instance.
|
|
1533
1618
|
|
|
1534
1619
|
:param instance_name: str
|
|
1535
1620
|
:param database_instance_role: :class:`DatabaseInstanceRole`
|
|
1621
|
+
:param database_instance_name: str (optional)
|
|
1536
1622
|
|
|
1537
1623
|
:returns: :class:`DatabaseInstanceRole`
|
|
1538
1624
|
"""
|
|
1539
1625
|
body = database_instance_role.as_dict()
|
|
1626
|
+
query = {}
|
|
1627
|
+
if database_instance_name is not None:
|
|
1628
|
+
query["database_instance_name"] = database_instance_name
|
|
1540
1629
|
headers = {
|
|
1541
1630
|
"Accept": "application/json",
|
|
1542
1631
|
"Content-Type": "application/json",
|
|
1543
1632
|
}
|
|
1544
1633
|
|
|
1545
|
-
res = self._api.do(
|
|
1634
|
+
res = self._api.do(
|
|
1635
|
+
"POST", f"/api/2.0/database/instances/{instance_name}/roles", query=query, body=body, headers=headers
|
|
1636
|
+
)
|
|
1546
1637
|
return DatabaseInstanceRole.from_dict(res)
|
|
1547
1638
|
|
|
1548
1639
|
def create_database_table(self, table: DatabaseTable) -> DatabaseTable:
|