databricks-sdk 0.67.0__py3-none-any.whl → 0.69.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +14 -10
- databricks/sdk/_base_client.py +4 -1
- databricks/sdk/common/lro.py +17 -0
- databricks/sdk/common/types/__init__.py +0 -0
- databricks/sdk/common/types/fieldmask.py +39 -0
- databricks/sdk/config.py +62 -14
- databricks/sdk/credentials_provider.py +61 -12
- databricks/sdk/dbutils.py +5 -1
- databricks/sdk/errors/parser.py +8 -3
- databricks/sdk/mixins/files.py +1156 -111
- databricks/sdk/mixins/files_utils.py +293 -0
- databricks/sdk/oidc_token_supplier.py +80 -0
- databricks/sdk/retries.py +102 -2
- databricks/sdk/service/_internal.py +93 -1
- databricks/sdk/service/agentbricks.py +1 -1
- databricks/sdk/service/apps.py +264 -1
- databricks/sdk/service/billing.py +2 -3
- databricks/sdk/service/catalog.py +1026 -540
- databricks/sdk/service/cleanrooms.py +3 -3
- databricks/sdk/service/compute.py +21 -33
- databricks/sdk/service/dashboards.py +7 -3
- databricks/sdk/service/database.py +3 -2
- databricks/sdk/service/dataquality.py +1145 -0
- databricks/sdk/service/files.py +2 -1
- databricks/sdk/service/iam.py +2 -1
- databricks/sdk/service/iamv2.py +1 -1
- databricks/sdk/service/jobs.py +6 -9
- databricks/sdk/service/marketplace.py +3 -1
- databricks/sdk/service/ml.py +3 -1
- databricks/sdk/service/oauth2.py +1 -1
- databricks/sdk/service/pipelines.py +5 -6
- databricks/sdk/service/provisioning.py +544 -655
- databricks/sdk/service/qualitymonitorv2.py +1 -1
- databricks/sdk/service/serving.py +3 -1
- databricks/sdk/service/settings.py +5 -2
- databricks/sdk/service/settingsv2.py +1 -1
- databricks/sdk/service/sharing.py +12 -3
- databricks/sdk/service/sql.py +305 -70
- databricks/sdk/service/tags.py +1 -1
- databricks/sdk/service/vectorsearch.py +3 -1
- databricks/sdk/service/workspace.py +70 -17
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.69.0.dist-info}/METADATA +4 -2
- databricks_sdk-0.69.0.dist-info/RECORD +84 -0
- databricks_sdk-0.67.0.dist-info/RECORD +0 -79
- {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.69.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.69.0.dist-info}/licenses/LICENSE +0 -0
- {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.69.0.dist-info}/licenses/NOTICE +0 -0
- {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.69.0.dist-info}/top_level.txt +0 -0
|
@@ -10,13 +10,13 @@ from datetime import timedelta
|
|
|
10
10
|
from enum import Enum
|
|
11
11
|
from typing import Any, Callable, Dict, Iterator, List, Optional
|
|
12
12
|
|
|
13
|
-
from .
|
|
13
|
+
from databricks.sdk.service import catalog, jobs, settings, sharing
|
|
14
|
+
from databricks.sdk.service._internal import (Wait, _enum, _from_dict,
|
|
15
|
+
_repeated_dict)
|
|
14
16
|
|
|
15
17
|
_LOG = logging.getLogger("databricks.sdk")
|
|
16
18
|
|
|
17
19
|
|
|
18
|
-
from databricks.sdk.service import catalog, jobs, settings, sharing
|
|
19
|
-
|
|
20
20
|
# all definitions in this file are in alphabetical order
|
|
21
21
|
|
|
22
22
|
|
|
@@ -10,8 +10,10 @@ from datetime import timedelta
|
|
|
10
10
|
from enum import Enum
|
|
11
11
|
from typing import Any, Callable, Dict, Iterator, List, Optional
|
|
12
12
|
|
|
13
|
+
from databricks.sdk.service._internal import (Wait, _enum, _from_dict,
|
|
14
|
+
_repeated_dict, _repeated_enum)
|
|
15
|
+
|
|
13
16
|
from ..errors import OperationFailed
|
|
14
|
-
from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum
|
|
15
17
|
|
|
16
18
|
_LOG = logging.getLogger("databricks.sdk")
|
|
17
19
|
|
|
@@ -3135,12 +3137,9 @@ class Environment:
|
|
|
3135
3137
|
"""Required. Environment version used by the environment. Each version comes with a specific Python
|
|
3136
3138
|
version and a set of Python packages. The version is a string, consisting of an integer."""
|
|
3137
3139
|
|
|
3138
|
-
jar_dependencies: Optional[List[str]] = None
|
|
3139
|
-
"""Use `java_dependencies` instead."""
|
|
3140
|
-
|
|
3141
3140
|
java_dependencies: Optional[List[str]] = None
|
|
3142
|
-
"""List of
|
|
3143
|
-
`/Volumes/path/to/test.jar`."""
|
|
3141
|
+
"""List of java dependencies. Each dependency is a string representing a java library path. For
|
|
3142
|
+
example: `/Volumes/path/to/test.jar`."""
|
|
3144
3143
|
|
|
3145
3144
|
def as_dict(self) -> dict:
|
|
3146
3145
|
"""Serializes the Environment into a dictionary suitable for use as a JSON request body."""
|
|
@@ -3151,8 +3150,6 @@ class Environment:
|
|
|
3151
3150
|
body["dependencies"] = [v for v in self.dependencies]
|
|
3152
3151
|
if self.environment_version is not None:
|
|
3153
3152
|
body["environment_version"] = self.environment_version
|
|
3154
|
-
if self.jar_dependencies:
|
|
3155
|
-
body["jar_dependencies"] = [v for v in self.jar_dependencies]
|
|
3156
3153
|
if self.java_dependencies:
|
|
3157
3154
|
body["java_dependencies"] = [v for v in self.java_dependencies]
|
|
3158
3155
|
return body
|
|
@@ -3166,8 +3163,6 @@ class Environment:
|
|
|
3166
3163
|
body["dependencies"] = self.dependencies
|
|
3167
3164
|
if self.environment_version is not None:
|
|
3168
3165
|
body["environment_version"] = self.environment_version
|
|
3169
|
-
if self.jar_dependencies:
|
|
3170
|
-
body["jar_dependencies"] = self.jar_dependencies
|
|
3171
3166
|
if self.java_dependencies:
|
|
3172
3167
|
body["java_dependencies"] = self.java_dependencies
|
|
3173
3168
|
return body
|
|
@@ -3179,7 +3174,6 @@ class Environment:
|
|
|
3179
3174
|
client=d.get("client", None),
|
|
3180
3175
|
dependencies=d.get("dependencies", None),
|
|
3181
3176
|
environment_version=d.get("environment_version", None),
|
|
3182
|
-
jar_dependencies=d.get("jar_dependencies", None),
|
|
3183
3177
|
java_dependencies=d.get("java_dependencies", None),
|
|
3184
3178
|
)
|
|
3185
3179
|
|
|
@@ -6621,9 +6615,16 @@ class Results:
|
|
|
6621
6615
|
data: Optional[Any] = None
|
|
6622
6616
|
|
|
6623
6617
|
file_name: Optional[str] = None
|
|
6624
|
-
"""The image
|
|
6618
|
+
"""The image data in one of the following formats:
|
|
6619
|
+
|
|
6620
|
+
1. A Data URL with base64-encoded image data: `data:image/{type};base64,{base64-data}`. Example:
|
|
6621
|
+
`data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUA...`
|
|
6622
|
+
|
|
6623
|
+
2. A FileStore file path for large images: `/plots/{filename}.png`. Example:
|
|
6624
|
+
`/plots/b6a7ad70-fb2c-4353-8aed-3f1e015174a4.png`"""
|
|
6625
6625
|
|
|
6626
6626
|
file_names: Optional[List[str]] = None
|
|
6627
|
+
"""List of image data for multiple images. Each element follows the same format as file_name."""
|
|
6627
6628
|
|
|
6628
6629
|
is_json_schema: Optional[bool] = None
|
|
6629
6630
|
"""true if a JSON schema is returned instead of a string representation of the Hive type."""
|
|
@@ -7067,6 +7068,7 @@ class TerminationReasonCode(Enum):
|
|
|
7067
7068
|
BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG = "BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG"
|
|
7068
7069
|
BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED = "BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED"
|
|
7069
7070
|
BUDGET_POLICY_RESOLUTION_FAILURE = "BUDGET_POLICY_RESOLUTION_FAILURE"
|
|
7071
|
+
CLOUD_ACCOUNT_POD_QUOTA_EXCEEDED = "CLOUD_ACCOUNT_POD_QUOTA_EXCEEDED"
|
|
7070
7072
|
CLOUD_ACCOUNT_SETUP_FAILURE = "CLOUD_ACCOUNT_SETUP_FAILURE"
|
|
7071
7073
|
CLOUD_OPERATION_CANCELLED = "CLOUD_OPERATION_CANCELLED"
|
|
7072
7074
|
CLOUD_PROVIDER_DISK_SETUP_FAILURE = "CLOUD_PROVIDER_DISK_SETUP_FAILURE"
|
|
@@ -7144,6 +7146,7 @@ class TerminationReasonCode(Enum):
|
|
|
7144
7146
|
IN_PENALTY_BOX = "IN_PENALTY_BOX"
|
|
7145
7147
|
IP_EXHAUSTION_FAILURE = "IP_EXHAUSTION_FAILURE"
|
|
7146
7148
|
JOB_FINISHED = "JOB_FINISHED"
|
|
7149
|
+
K8S_ACTIVE_POD_QUOTA_EXCEEDED = "K8S_ACTIVE_POD_QUOTA_EXCEEDED"
|
|
7147
7150
|
K8S_AUTOSCALING_FAILURE = "K8S_AUTOSCALING_FAILURE"
|
|
7148
7151
|
K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = "K8S_DBR_CLUSTER_LAUNCH_TIMEOUT"
|
|
7149
7152
|
LAZY_ALLOCATION_TIMEOUT = "LAZY_ALLOCATION_TIMEOUT"
|
|
@@ -8493,11 +8496,7 @@ class ClustersAPI:
|
|
|
8493
8496
|
}
|
|
8494
8497
|
|
|
8495
8498
|
op_response = self._api.do("POST", "/api/2.1/clusters/delete", body=body, headers=headers)
|
|
8496
|
-
return Wait(
|
|
8497
|
-
self.wait_get_cluster_terminated,
|
|
8498
|
-
response=DeleteClusterResponse.from_dict(op_response),
|
|
8499
|
-
cluster_id=cluster_id,
|
|
8500
|
-
)
|
|
8499
|
+
return Wait(self.wait_get_cluster_terminated, cluster_id=cluster_id)
|
|
8501
8500
|
|
|
8502
8501
|
def delete_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails:
|
|
8503
8502
|
return self.delete(cluster_id=cluster_id).result(timeout=timeout)
|
|
@@ -8759,9 +8758,7 @@ class ClustersAPI:
|
|
|
8759
8758
|
}
|
|
8760
8759
|
|
|
8761
8760
|
op_response = self._api.do("POST", "/api/2.1/clusters/edit", body=body, headers=headers)
|
|
8762
|
-
return Wait(
|
|
8763
|
-
self.wait_get_cluster_running, response=EditClusterResponse.from_dict(op_response), cluster_id=cluster_id
|
|
8764
|
-
)
|
|
8761
|
+
return Wait(self.wait_get_cluster_running, cluster_id=cluster_id)
|
|
8765
8762
|
|
|
8766
8763
|
def edit_and_wait(
|
|
8767
8764
|
self,
|
|
@@ -9123,9 +9120,7 @@ class ClustersAPI:
|
|
|
9123
9120
|
}
|
|
9124
9121
|
|
|
9125
9122
|
op_response = self._api.do("POST", "/api/2.1/clusters/resize", body=body, headers=headers)
|
|
9126
|
-
return Wait(
|
|
9127
|
-
self.wait_get_cluster_running, response=ResizeClusterResponse.from_dict(op_response), cluster_id=cluster_id
|
|
9128
|
-
)
|
|
9123
|
+
return Wait(self.wait_get_cluster_running, cluster_id=cluster_id)
|
|
9129
9124
|
|
|
9130
9125
|
def resize_and_wait(
|
|
9131
9126
|
self,
|
|
@@ -9160,9 +9155,7 @@ class ClustersAPI:
|
|
|
9160
9155
|
}
|
|
9161
9156
|
|
|
9162
9157
|
op_response = self._api.do("POST", "/api/2.1/clusters/restart", body=body, headers=headers)
|
|
9163
|
-
return Wait(
|
|
9164
|
-
self.wait_get_cluster_running, response=RestartClusterResponse.from_dict(op_response), cluster_id=cluster_id
|
|
9165
|
-
)
|
|
9158
|
+
return Wait(self.wait_get_cluster_running, cluster_id=cluster_id)
|
|
9166
9159
|
|
|
9167
9160
|
def restart_and_wait(
|
|
9168
9161
|
self, cluster_id: str, *, restart_user: Optional[str] = None, timeout=timedelta(minutes=20)
|
|
@@ -9229,9 +9222,7 @@ class ClustersAPI:
|
|
|
9229
9222
|
}
|
|
9230
9223
|
|
|
9231
9224
|
op_response = self._api.do("POST", "/api/2.1/clusters/start", body=body, headers=headers)
|
|
9232
|
-
return Wait(
|
|
9233
|
-
self.wait_get_cluster_running, response=StartClusterResponse.from_dict(op_response), cluster_id=cluster_id
|
|
9234
|
-
)
|
|
9225
|
+
return Wait(self.wait_get_cluster_running, cluster_id=cluster_id)
|
|
9235
9226
|
|
|
9236
9227
|
def start_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails:
|
|
9237
9228
|
return self.start(cluster_id=cluster_id).result(timeout=timeout)
|
|
@@ -9302,9 +9293,7 @@ class ClustersAPI:
|
|
|
9302
9293
|
}
|
|
9303
9294
|
|
|
9304
9295
|
op_response = self._api.do("POST", "/api/2.1/clusters/update", body=body, headers=headers)
|
|
9305
|
-
return Wait(
|
|
9306
|
-
self.wait_get_cluster_running, response=UpdateClusterResponse.from_dict(op_response), cluster_id=cluster_id
|
|
9307
|
-
)
|
|
9296
|
+
return Wait(self.wait_get_cluster_running, cluster_id=cluster_id)
|
|
9308
9297
|
|
|
9309
9298
|
def update_and_wait(
|
|
9310
9299
|
self,
|
|
@@ -9485,7 +9474,6 @@ class CommandExecutionAPI:
|
|
|
9485
9474
|
op_response = self._api.do("POST", "/api/1.2/commands/cancel", body=body, headers=headers)
|
|
9486
9475
|
return Wait(
|
|
9487
9476
|
self.wait_command_status_command_execution_cancelled,
|
|
9488
|
-
response=CancelResponse.from_dict(op_response),
|
|
9489
9477
|
cluster_id=cluster_id,
|
|
9490
9478
|
command_id=command_id,
|
|
9491
9479
|
context_id=context_id,
|
|
@@ -10,14 +10,15 @@ from datetime import timedelta
|
|
|
10
10
|
from enum import Enum
|
|
11
11
|
from typing import Any, Callable, Dict, Iterator, List, Optional
|
|
12
12
|
|
|
13
|
+
from databricks.sdk.service import sql
|
|
14
|
+
from databricks.sdk.service._internal import (Wait, _enum, _from_dict,
|
|
15
|
+
_repeated_dict)
|
|
16
|
+
|
|
13
17
|
from ..errors import OperationFailed
|
|
14
|
-
from ._internal import Wait, _enum, _from_dict, _repeated_dict
|
|
15
18
|
|
|
16
19
|
_LOG = logging.getLogger("databricks.sdk")
|
|
17
20
|
|
|
18
21
|
|
|
19
|
-
from databricks.sdk.service import sql
|
|
20
|
-
|
|
21
22
|
# all definitions in this file are in alphabetical order
|
|
22
23
|
|
|
23
24
|
|
|
@@ -1153,6 +1154,9 @@ class MessageErrorType(Enum):
|
|
|
1153
1154
|
GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = "GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION"
|
|
1154
1155
|
GENERIC_SQL_EXEC_API_CALL_EXCEPTION = "GENERIC_SQL_EXEC_API_CALL_EXCEPTION"
|
|
1155
1156
|
ILLEGAL_PARAMETER_DEFINITION_EXCEPTION = "ILLEGAL_PARAMETER_DEFINITION_EXCEPTION"
|
|
1157
|
+
INTERNAL_CATALOG_ASSET_CREATION_FAILED_EXCEPTION = "INTERNAL_CATALOG_ASSET_CREATION_FAILED_EXCEPTION"
|
|
1158
|
+
INTERNAL_CATALOG_ASSET_CREATION_ONGOING_EXCEPTION = "INTERNAL_CATALOG_ASSET_CREATION_ONGOING_EXCEPTION"
|
|
1159
|
+
INTERNAL_CATALOG_ASSET_CREATION_UNSUPPORTED_EXCEPTION = "INTERNAL_CATALOG_ASSET_CREATION_UNSUPPORTED_EXCEPTION"
|
|
1156
1160
|
INTERNAL_CATALOG_MISSING_UC_PATH_EXCEPTION = "INTERNAL_CATALOG_MISSING_UC_PATH_EXCEPTION"
|
|
1157
1161
|
INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION = "INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION"
|
|
1158
1162
|
INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = "INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION"
|
|
@@ -10,7 +10,8 @@ from datetime import timedelta
|
|
|
10
10
|
from enum import Enum
|
|
11
11
|
from typing import Any, Callable, Dict, Iterator, List, Optional
|
|
12
12
|
|
|
13
|
-
from ._internal import Wait, _enum, _from_dict,
|
|
13
|
+
from databricks.sdk.service._internal import (Wait, _enum, _from_dict,
|
|
14
|
+
_repeated_dict)
|
|
14
15
|
|
|
15
16
|
_LOG = logging.getLogger("databricks.sdk")
|
|
16
17
|
|
|
@@ -948,7 +949,7 @@ class RequestedResource:
|
|
|
948
949
|
|
|
949
950
|
@dataclass
|
|
950
951
|
class SyncedDatabaseTable:
|
|
951
|
-
"""Next field marker:
|
|
952
|
+
"""Next field marker: 18"""
|
|
952
953
|
|
|
953
954
|
name: str
|
|
954
955
|
"""Full three-part (catalog, schema, table) name of the table."""
|