databricks-sdk 0.0.7__py3-none-any.whl → 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +121 -104
- databricks/sdk/core.py +76 -16
- databricks/sdk/dbutils.py +18 -17
- databricks/sdk/mixins/compute.py +6 -6
- databricks/sdk/mixins/dbfs.py +6 -6
- databricks/sdk/oauth.py +28 -14
- databricks/sdk/service/{unitycatalog.py → catalog.py} +375 -1146
- databricks/sdk/service/{clusters.py → compute.py} +2176 -61
- databricks/sdk/service/{dbfs.py → files.py} +6 -6
- databricks/sdk/service/{scim.py → iam.py} +567 -27
- databricks/sdk/service/jobs.py +44 -34
- databricks/sdk/service/{mlflow.py → ml.py} +976 -1071
- databricks/sdk/service/oauth2.py +3 -3
- databricks/sdk/service/pipelines.py +46 -30
- databricks/sdk/service/{deployment.py → provisioning.py} +47 -29
- databricks/sdk/service/settings.py +849 -0
- databricks/sdk/service/sharing.py +1176 -0
- databricks/sdk/service/sql.py +15 -15
- databricks/sdk/service/workspace.py +917 -22
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/METADATA +3 -1
- databricks_sdk-0.1.1.dist-info/RECORD +37 -0
- databricks/sdk/service/clusterpolicies.py +0 -399
- databricks/sdk/service/commands.py +0 -478
- databricks/sdk/service/gitcredentials.py +0 -202
- databricks/sdk/service/globalinitscripts.py +0 -262
- databricks/sdk/service/instancepools.py +0 -757
- databricks/sdk/service/ipaccesslists.py +0 -340
- databricks/sdk/service/libraries.py +0 -282
- databricks/sdk/service/permissions.py +0 -470
- databricks/sdk/service/repos.py +0 -250
- databricks/sdk/service/secrets.py +0 -472
- databricks/sdk/service/tokenmanagement.py +0 -182
- databricks/sdk/service/tokens.py +0 -137
- databricks/sdk/service/workspaceconf.py +0 -50
- databricks_sdk-0.0.7.dist-info/RECORD +0 -48
- /databricks/sdk/service/{endpoints.py → serving.py} +0 -0
- {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/top_level.txt +0 -0
databricks/sdk/__init__.py
CHANGED
|
@@ -1,33 +1,56 @@
|
|
|
1
1
|
import databricks.sdk.core as client
|
|
2
2
|
import databricks.sdk.dbutils as dbutils
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
3
|
+
from databricks.sdk.mixins.compute import ClustersExt
|
|
4
|
+
from databricks.sdk.mixins.dbfs import DbfsExt
|
|
5
|
+
from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI,
|
|
6
|
+
LogDeliveryAPI)
|
|
7
|
+
from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
|
|
8
|
+
AccountMetastoresAPI,
|
|
9
|
+
AccountStorageCredentialsAPI,
|
|
10
|
+
CatalogsAPI, ExternalLocationsAPI,
|
|
11
|
+
FunctionsAPI, GrantsAPI,
|
|
12
|
+
MetastoresAPI, SchemasAPI,
|
|
13
|
+
StorageCredentialsAPI,
|
|
14
|
+
TableConstraintsAPI, TablesAPI,
|
|
15
|
+
VolumesAPI)
|
|
16
|
+
from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
|
|
17
|
+
CommandExecutionAPI,
|
|
18
|
+
GlobalInitScriptsAPI,
|
|
19
|
+
InstancePoolsAPI,
|
|
20
|
+
InstanceProfilesAPI, LibrariesAPI,
|
|
21
|
+
PolicyFamiliesAPI)
|
|
22
|
+
from databricks.sdk.service.files import DbfsAPI
|
|
23
|
+
from databricks.sdk.service.iam import (AccountGroupsAPI,
|
|
24
|
+
AccountServicePrincipalsAPI,
|
|
25
|
+
AccountUsersAPI, CurrentUserAPI,
|
|
26
|
+
GroupsAPI, PermissionsAPI,
|
|
27
|
+
ServicePrincipalsAPI, UsersAPI,
|
|
28
|
+
WorkspaceAssignmentAPI)
|
|
29
|
+
from databricks.sdk.service.jobs import JobsAPI
|
|
30
|
+
from databricks.sdk.service.ml import ExperimentsAPI, ModelRegistryAPI
|
|
31
|
+
from databricks.sdk.service.oauth2 import (CustomAppIntegrationAPI,
|
|
32
|
+
OAuthEnrollmentAPI,
|
|
33
|
+
PublishedAppIntegrationAPI)
|
|
34
|
+
from databricks.sdk.service.pipelines import PipelinesAPI
|
|
35
|
+
from databricks.sdk.service.provisioning import (CredentialsAPI,
|
|
36
|
+
EncryptionKeysAPI,
|
|
37
|
+
NetworksAPI, PrivateAccessAPI,
|
|
38
|
+
StorageAPI, VpcEndpointsAPI,
|
|
39
|
+
WorkspacesAPI)
|
|
40
|
+
from databricks.sdk.service.serving import ServingEndpointsAPI
|
|
41
|
+
from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
|
|
42
|
+
IpAccessListsAPI,
|
|
43
|
+
TokenManagementAPI, TokensAPI,
|
|
44
|
+
WorkspaceConfAPI)
|
|
45
|
+
from databricks.sdk.service.sharing import (ProvidersAPI,
|
|
46
|
+
RecipientActivationAPI,
|
|
47
|
+
RecipientsAPI, SharesAPI)
|
|
48
|
+
from databricks.sdk.service.sql import (AlertsAPI, DashboardsAPI,
|
|
49
|
+
DataSourcesAPI, DbsqlPermissionsAPI,
|
|
50
|
+
QueriesAPI, QueryHistoryAPI,
|
|
51
|
+
StatementExecutionAPI, WarehousesAPI)
|
|
52
|
+
from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI,
|
|
53
|
+
SecretsAPI, WorkspaceAPI)
|
|
31
54
|
|
|
32
55
|
|
|
33
56
|
class WorkspaceClient:
|
|
@@ -81,62 +104,55 @@ class WorkspaceClient:
|
|
|
81
104
|
self.config = config
|
|
82
105
|
self.dbutils = dbutils.RemoteDbUtils(self.config)
|
|
83
106
|
self.api_client = client.ApiClient(self.config)
|
|
84
|
-
self.alerts =
|
|
85
|
-
self.catalogs =
|
|
86
|
-
self.cluster_policies =
|
|
87
|
-
self.clusters =
|
|
88
|
-
self.command_execution =
|
|
89
|
-
self.current_user =
|
|
90
|
-
self.dashboards =
|
|
91
|
-
self.data_sources =
|
|
92
|
-
self.dbfs =
|
|
93
|
-
self.dbsql_permissions =
|
|
94
|
-
self.experiments =
|
|
95
|
-
self.external_locations =
|
|
96
|
-
self.functions =
|
|
97
|
-
self.git_credentials =
|
|
98
|
-
self.global_init_scripts =
|
|
99
|
-
self.grants =
|
|
100
|
-
self.groups =
|
|
101
|
-
self.instance_pools =
|
|
102
|
-
self.instance_profiles =
|
|
103
|
-
self.ip_access_lists =
|
|
104
|
-
self.jobs =
|
|
105
|
-
self.libraries =
|
|
106
|
-
self.
|
|
107
|
-
self.
|
|
108
|
-
self.
|
|
109
|
-
self.
|
|
110
|
-
self.
|
|
111
|
-
self.
|
|
112
|
-
self.
|
|
113
|
-
self.
|
|
114
|
-
self.
|
|
115
|
-
self.
|
|
116
|
-
self.
|
|
117
|
-
self.
|
|
118
|
-
self.
|
|
119
|
-
self.
|
|
120
|
-
self.
|
|
121
|
-
self.
|
|
122
|
-
self.
|
|
123
|
-
self.
|
|
124
|
-
self.
|
|
125
|
-
self.
|
|
126
|
-
self.
|
|
127
|
-
self.
|
|
128
|
-
self.
|
|
129
|
-
self.
|
|
130
|
-
self.
|
|
131
|
-
self.
|
|
132
|
-
self.
|
|
133
|
-
self.token_management = tokenmanagement.TokenManagementAPI(self.api_client)
|
|
134
|
-
self.tokens = tokens.TokensAPI(self.api_client)
|
|
135
|
-
self.transition_requests = mlflow.TransitionRequestsAPI(self.api_client)
|
|
136
|
-
self.users = scim.UsersAPI(self.api_client)
|
|
137
|
-
self.warehouses = sql.WarehousesAPI(self.api_client)
|
|
138
|
-
self.workspace = workspace.WorkspaceAPI(self.api_client)
|
|
139
|
-
self.workspace_conf = workspaceconf.WorkspaceConfAPI(self.api_client)
|
|
107
|
+
self.alerts = AlertsAPI(self.api_client)
|
|
108
|
+
self.catalogs = CatalogsAPI(self.api_client)
|
|
109
|
+
self.cluster_policies = ClusterPoliciesAPI(self.api_client)
|
|
110
|
+
self.clusters = ClustersExt(self.api_client)
|
|
111
|
+
self.command_execution = CommandExecutionAPI(self.api_client)
|
|
112
|
+
self.current_user = CurrentUserAPI(self.api_client)
|
|
113
|
+
self.dashboards = DashboardsAPI(self.api_client)
|
|
114
|
+
self.data_sources = DataSourcesAPI(self.api_client)
|
|
115
|
+
self.dbfs = DbfsExt(self.api_client)
|
|
116
|
+
self.dbsql_permissions = DbsqlPermissionsAPI(self.api_client)
|
|
117
|
+
self.experiments = ExperimentsAPI(self.api_client)
|
|
118
|
+
self.external_locations = ExternalLocationsAPI(self.api_client)
|
|
119
|
+
self.functions = FunctionsAPI(self.api_client)
|
|
120
|
+
self.git_credentials = GitCredentialsAPI(self.api_client)
|
|
121
|
+
self.global_init_scripts = GlobalInitScriptsAPI(self.api_client)
|
|
122
|
+
self.grants = GrantsAPI(self.api_client)
|
|
123
|
+
self.groups = GroupsAPI(self.api_client)
|
|
124
|
+
self.instance_pools = InstancePoolsAPI(self.api_client)
|
|
125
|
+
self.instance_profiles = InstanceProfilesAPI(self.api_client)
|
|
126
|
+
self.ip_access_lists = IpAccessListsAPI(self.api_client)
|
|
127
|
+
self.jobs = JobsAPI(self.api_client)
|
|
128
|
+
self.libraries = LibrariesAPI(self.api_client)
|
|
129
|
+
self.metastores = MetastoresAPI(self.api_client)
|
|
130
|
+
self.model_registry = ModelRegistryAPI(self.api_client)
|
|
131
|
+
self.permissions = PermissionsAPI(self.api_client)
|
|
132
|
+
self.pipelines = PipelinesAPI(self.api_client)
|
|
133
|
+
self.policy_families = PolicyFamiliesAPI(self.api_client)
|
|
134
|
+
self.providers = ProvidersAPI(self.api_client)
|
|
135
|
+
self.queries = QueriesAPI(self.api_client)
|
|
136
|
+
self.query_history = QueryHistoryAPI(self.api_client)
|
|
137
|
+
self.recipient_activation = RecipientActivationAPI(self.api_client)
|
|
138
|
+
self.recipients = RecipientsAPI(self.api_client)
|
|
139
|
+
self.repos = ReposAPI(self.api_client)
|
|
140
|
+
self.schemas = SchemasAPI(self.api_client)
|
|
141
|
+
self.secrets = SecretsAPI(self.api_client)
|
|
142
|
+
self.service_principals = ServicePrincipalsAPI(self.api_client)
|
|
143
|
+
self.serving_endpoints = ServingEndpointsAPI(self.api_client)
|
|
144
|
+
self.shares = SharesAPI(self.api_client)
|
|
145
|
+
self.statement_execution = StatementExecutionAPI(self.api_client)
|
|
146
|
+
self.storage_credentials = StorageCredentialsAPI(self.api_client)
|
|
147
|
+
self.table_constraints = TableConstraintsAPI(self.api_client)
|
|
148
|
+
self.tables = TablesAPI(self.api_client)
|
|
149
|
+
self.token_management = TokenManagementAPI(self.api_client)
|
|
150
|
+
self.tokens = TokensAPI(self.api_client)
|
|
151
|
+
self.users = UsersAPI(self.api_client)
|
|
152
|
+
self.volumes = VolumesAPI(self.api_client)
|
|
153
|
+
self.warehouses = WarehousesAPI(self.api_client)
|
|
154
|
+
self.workspace = WorkspaceAPI(self.api_client)
|
|
155
|
+
self.workspace_conf = WorkspaceConfAPI(self.api_client)
|
|
140
156
|
|
|
141
157
|
|
|
142
158
|
class AccountClient:
|
|
@@ -189,23 +205,24 @@ class AccountClient:
|
|
|
189
205
|
product_version=product_version)
|
|
190
206
|
self.config = config
|
|
191
207
|
self.api_client = client.ApiClient(self.config)
|
|
192
|
-
self.billable_usage =
|
|
193
|
-
self.budgets =
|
|
194
|
-
self.credentials =
|
|
195
|
-
self.custom_app_integration =
|
|
196
|
-
self.encryption_keys =
|
|
197
|
-
self.
|
|
198
|
-
self.
|
|
199
|
-
self.
|
|
200
|
-
self.
|
|
201
|
-
self.
|
|
202
|
-
self.
|
|
203
|
-
self.
|
|
204
|
-
self.
|
|
205
|
-
self.
|
|
206
|
-
self.
|
|
207
|
-
self.
|
|
208
|
-
self.
|
|
209
|
-
self.
|
|
210
|
-
self.
|
|
211
|
-
self.
|
|
208
|
+
self.billable_usage = BillableUsageAPI(self.api_client)
|
|
209
|
+
self.budgets = BudgetsAPI(self.api_client)
|
|
210
|
+
self.credentials = CredentialsAPI(self.api_client)
|
|
211
|
+
self.custom_app_integration = CustomAppIntegrationAPI(self.api_client)
|
|
212
|
+
self.encryption_keys = EncryptionKeysAPI(self.api_client)
|
|
213
|
+
self.groups = AccountGroupsAPI(self.api_client)
|
|
214
|
+
self.ip_access_lists = AccountIpAccessListsAPI(self.api_client)
|
|
215
|
+
self.log_delivery = LogDeliveryAPI(self.api_client)
|
|
216
|
+
self.metastore_assignments = AccountMetastoreAssignmentsAPI(self.api_client)
|
|
217
|
+
self.metastores = AccountMetastoresAPI(self.api_client)
|
|
218
|
+
self.networks = NetworksAPI(self.api_client)
|
|
219
|
+
self.o_auth_enrollment = OAuthEnrollmentAPI(self.api_client)
|
|
220
|
+
self.private_access = PrivateAccessAPI(self.api_client)
|
|
221
|
+
self.published_app_integration = PublishedAppIntegrationAPI(self.api_client)
|
|
222
|
+
self.service_principals = AccountServicePrincipalsAPI(self.api_client)
|
|
223
|
+
self.storage = StorageAPI(self.api_client)
|
|
224
|
+
self.storage_credentials = AccountStorageCredentialsAPI(self.api_client)
|
|
225
|
+
self.users = AccountUsersAPI(self.api_client)
|
|
226
|
+
self.vpc_endpoints = VpcEndpointsAPI(self.api_client)
|
|
227
|
+
self.workspace_assignment = WorkspaceAssignmentAPI(self.api_client)
|
|
228
|
+
self.workspaces = WorkspacesAPI(self.api_client)
|
databricks/sdk/core.py
CHANGED
|
@@ -114,7 +114,9 @@ def oauth_service_principal(cfg: 'Config') -> Optional[HeaderFactory]:
|
|
|
114
114
|
def external_browser(cfg: 'Config') -> Optional[HeaderFactory]:
|
|
115
115
|
if cfg.auth_type != 'external-browser':
|
|
116
116
|
return None
|
|
117
|
-
if cfg.
|
|
117
|
+
if cfg.client_id:
|
|
118
|
+
client_id = cfg.client_id
|
|
119
|
+
elif cfg.is_aws:
|
|
118
120
|
client_id = 'databricks-cli'
|
|
119
121
|
elif cfg.is_azure:
|
|
120
122
|
# Use Azure AD app for cases when Azure CLI is not available on the machine.
|
|
@@ -123,7 +125,10 @@ def external_browser(cfg: 'Config') -> Optional[HeaderFactory]:
|
|
|
123
125
|
client_id = '6128a518-99a9-425b-8333-4cc94f04cacd'
|
|
124
126
|
else:
|
|
125
127
|
raise ValueError(f'local browser SSO is not supported')
|
|
126
|
-
oauth_client = OAuthClient(cfg.host,
|
|
128
|
+
oauth_client = OAuthClient(host=cfg.host,
|
|
129
|
+
client_id=client_id,
|
|
130
|
+
redirect_url='http://localhost:8020',
|
|
131
|
+
client_secret=cfg.client_secret)
|
|
127
132
|
consent = oauth_client.initiate_consent()
|
|
128
133
|
if not consent:
|
|
129
134
|
return None
|
|
@@ -177,16 +182,18 @@ def azure_service_principal(cfg: 'Config') -> HeaderFactory:
|
|
|
177
182
|
return refreshed_headers
|
|
178
183
|
|
|
179
184
|
|
|
180
|
-
class
|
|
181
|
-
""" Obtain the token granted by `az login` CLI command """
|
|
185
|
+
class CliTokenSource(Refreshable):
|
|
182
186
|
|
|
183
|
-
def __init__(self,
|
|
187
|
+
def __init__(self, cmd: List[str], token_type_field: str, access_token_field: str, expiry_field: str):
|
|
184
188
|
super().__init__()
|
|
185
|
-
self.
|
|
189
|
+
self._cmd = cmd
|
|
190
|
+
self._token_type_field = token_type_field
|
|
191
|
+
self._access_token_field = access_token_field
|
|
192
|
+
self._expiry_field = expiry_field
|
|
186
193
|
|
|
187
194
|
@staticmethod
|
|
188
195
|
def _parse_expiry(expiry: str) -> datetime:
|
|
189
|
-
for fmt in ("%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S"):
|
|
196
|
+
for fmt in ("%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S", "%Y-%m-%dT%H:%M:%S.%f%z"):
|
|
190
197
|
try:
|
|
191
198
|
return datetime.strptime(expiry, fmt)
|
|
192
199
|
except ValueError as e:
|
|
@@ -196,18 +203,28 @@ class AzureCliTokenSource(Refreshable):
|
|
|
196
203
|
|
|
197
204
|
def refresh(self) -> Token:
|
|
198
205
|
try:
|
|
199
|
-
|
|
200
|
-
out = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
|
|
206
|
+
out = subprocess.check_output(self._cmd, stderr=subprocess.STDOUT)
|
|
201
207
|
it = json.loads(out.decode())
|
|
202
|
-
expires_on = self._parse_expiry(it[
|
|
203
|
-
return Token(access_token=it[
|
|
204
|
-
|
|
205
|
-
token_type=it["tokenType"],
|
|
208
|
+
expires_on = self._parse_expiry(it[self._expiry_field])
|
|
209
|
+
return Token(access_token=it[self._access_token_field],
|
|
210
|
+
token_type=it[self._token_type_field],
|
|
206
211
|
expiry=expires_on)
|
|
207
212
|
except ValueError as e:
|
|
208
213
|
raise ValueError(f"cannot unmarshal CLI result: {e}")
|
|
209
214
|
except subprocess.CalledProcessError as e:
|
|
210
|
-
|
|
215
|
+
message = e.output.decode().strip()
|
|
216
|
+
raise IOError(f'cannot get access token: {message}') from e
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
class AzureCliTokenSource(CliTokenSource):
|
|
220
|
+
""" Obtain the token granted by `az login` CLI command """
|
|
221
|
+
|
|
222
|
+
def __init__(self, resource: str):
|
|
223
|
+
cmd = ["az", "account", "get-access-token", "--resource", resource, "--output", "json"]
|
|
224
|
+
super().__init__(cmd=cmd,
|
|
225
|
+
token_type_field='tokenType',
|
|
226
|
+
access_token_field='accessToken',
|
|
227
|
+
expiry_field='expiresOn')
|
|
211
228
|
|
|
212
229
|
|
|
213
230
|
@credentials_provider('azure-cli', ['is_azure'])
|
|
@@ -231,6 +248,45 @@ def azure_cli(cfg: 'Config') -> Optional[HeaderFactory]:
|
|
|
231
248
|
return inner
|
|
232
249
|
|
|
233
250
|
|
|
251
|
+
class BricksCliTokenSource(CliTokenSource):
|
|
252
|
+
""" Obtain the token granted by `bricks auth login` CLI command """
|
|
253
|
+
|
|
254
|
+
def __init__(self, cfg: 'Config'):
|
|
255
|
+
cli_path = cfg.bricks_cli_path
|
|
256
|
+
if not cli_path:
|
|
257
|
+
cli_path = 'bricks'
|
|
258
|
+
cmd = [cli_path, 'auth', 'token', '--host', cfg.host]
|
|
259
|
+
if cfg.is_account_client:
|
|
260
|
+
cmd += ['--account-id', cfg.account_id]
|
|
261
|
+
super().__init__(cmd=cmd,
|
|
262
|
+
token_type_field='token_type',
|
|
263
|
+
access_token_field='access_token',
|
|
264
|
+
expiry_field='expiry')
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
@credentials_provider('bricks-cli', ['host', 'is_aws'])
|
|
268
|
+
def bricks_cli(cfg: 'Config') -> Optional[HeaderFactory]:
|
|
269
|
+
token_source = BricksCliTokenSource(cfg)
|
|
270
|
+
try:
|
|
271
|
+
token_source.token()
|
|
272
|
+
except FileNotFoundError:
|
|
273
|
+
logger.debug(f'Most likely Bricks CLI is not installed.')
|
|
274
|
+
return None
|
|
275
|
+
except IOError as e:
|
|
276
|
+
if 'databricks OAuth is not' in str(e):
|
|
277
|
+
logger.debug(f'OAuth not configured or not available: {e}')
|
|
278
|
+
return None
|
|
279
|
+
raise e
|
|
280
|
+
|
|
281
|
+
logger.info("Using Bricks CLI authentication")
|
|
282
|
+
|
|
283
|
+
def inner() -> Dict[str, str]:
|
|
284
|
+
token = token_source.token()
|
|
285
|
+
return {'Authorization': f'{token.token_type} {token.access_token}'}
|
|
286
|
+
|
|
287
|
+
return inner
|
|
288
|
+
|
|
289
|
+
|
|
234
290
|
class DefaultCredentials:
|
|
235
291
|
""" Select the first applicable credential provider from the chain """
|
|
236
292
|
|
|
@@ -243,7 +299,7 @@ class DefaultCredentials:
|
|
|
243
299
|
def __call__(self, cfg: 'Config') -> HeaderFactory:
|
|
244
300
|
auth_providers = [
|
|
245
301
|
pat_auth, basic_auth, oauth_service_principal, azure_service_principal, azure_cli,
|
|
246
|
-
external_browser
|
|
302
|
+
external_browser, bricks_cli
|
|
247
303
|
]
|
|
248
304
|
for provider in auth_providers:
|
|
249
305
|
auth_type = provider.auth_type()
|
|
@@ -372,7 +428,11 @@ class Config:
|
|
|
372
428
|
def is_azure(self) -> bool:
|
|
373
429
|
has_resource_id = self.azure_workspace_resource_id is not None
|
|
374
430
|
has_host = self.host is not None
|
|
375
|
-
|
|
431
|
+
is_public_cloud = has_host and ".azuredatabricks.net" in self.host
|
|
432
|
+
is_china_cloud = has_host and ".databricks.azure.cn" in self.host
|
|
433
|
+
is_gov_cloud = has_host and ".databricks.azure.us" in self.host
|
|
434
|
+
is_valid_cloud = is_public_cloud or is_china_cloud or is_gov_cloud
|
|
435
|
+
return has_resource_id or (has_host and is_valid_cloud)
|
|
376
436
|
|
|
377
437
|
@property
|
|
378
438
|
def is_gcp(self) -> bool:
|
databricks/sdk/dbutils.py
CHANGED
|
@@ -6,8 +6,9 @@ import typing
|
|
|
6
6
|
from collections import namedtuple
|
|
7
7
|
|
|
8
8
|
from .core import ApiClient, Config
|
|
9
|
-
from .mixins import compute
|
|
10
|
-
from .
|
|
9
|
+
from .mixins import compute as compute_ext
|
|
10
|
+
from .mixins import dbfs as dbfs_ext
|
|
11
|
+
from .service import compute, workspace
|
|
11
12
|
|
|
12
13
|
|
|
13
14
|
class FileInfo(namedtuple('FileInfo', ['path', 'name', 'size', "modificationTime"])):
|
|
@@ -31,7 +32,7 @@ class SecretMetadata(namedtuple('SecretMetadata', ['key'])):
|
|
|
31
32
|
class _FsUtil:
|
|
32
33
|
""" Manipulates the Databricks filesystem (DBFS) """
|
|
33
34
|
|
|
34
|
-
def __init__(self, dbfs_ext:
|
|
35
|
+
def __init__(self, dbfs_ext: dbfs_ext.DbfsExt, proxy_factory: typing.Callable[[str], '_ProxyUtil']):
|
|
35
36
|
self._dbfs = dbfs_ext
|
|
36
37
|
self._proxy_factory = proxy_factory
|
|
37
38
|
|
|
@@ -177,7 +178,7 @@ _FILTER = _RedactingFilter()
|
|
|
177
178
|
class _SecretsUtil:
|
|
178
179
|
"""Remote equivalent of secrets util"""
|
|
179
180
|
|
|
180
|
-
def __init__(self, secrets_api:
|
|
181
|
+
def __init__(self, secrets_api: workspace.SecretsAPI):
|
|
181
182
|
self._api = secrets_api # nolint
|
|
182
183
|
|
|
183
184
|
def getBytes(self, scope: str, key: str) -> bytes:
|
|
@@ -215,13 +216,13 @@ class RemoteDbUtils:
|
|
|
215
216
|
def __init__(self, config: 'Config' = None):
|
|
216
217
|
self._config = Config() if not config else config
|
|
217
218
|
self._client = ApiClient(self._config)
|
|
218
|
-
self._clusters =
|
|
219
|
-
self._commands =
|
|
219
|
+
self._clusters = compute_ext.ClustersExt(self._client)
|
|
220
|
+
self._commands = compute.CommandExecutionAPI(self._client)
|
|
220
221
|
self._lock = threading.Lock()
|
|
221
222
|
self._ctx = None
|
|
222
223
|
|
|
223
|
-
self.fs = _FsUtil(
|
|
224
|
-
self.secrets = _SecretsUtil(
|
|
224
|
+
self.fs = _FsUtil(dbfs_ext.DbfsExt(self._client), self.__getattr__)
|
|
225
|
+
self.secrets = _SecretsUtil(workspace.SecretsAPI(self._client))
|
|
225
226
|
|
|
226
227
|
@property
|
|
227
228
|
def _cluster_id(self) -> str:
|
|
@@ -231,7 +232,7 @@ class RemoteDbUtils:
|
|
|
231
232
|
raise ValueError(self._config.wrap_debug_info(message))
|
|
232
233
|
return cluster_id
|
|
233
234
|
|
|
234
|
-
def _running_command_context(self) ->
|
|
235
|
+
def _running_command_context(self) -> compute.ContextStatusResponse:
|
|
235
236
|
if self._ctx:
|
|
236
237
|
return self._ctx
|
|
237
238
|
with self._lock:
|
|
@@ -239,7 +240,7 @@ class RemoteDbUtils:
|
|
|
239
240
|
return self._ctx
|
|
240
241
|
self._clusters.ensure_cluster_is_running(self._cluster_id)
|
|
241
242
|
self._ctx = self._commands.create(cluster_id=self._cluster_id,
|
|
242
|
-
language=
|
|
243
|
+
language=compute.Language.python).result()
|
|
243
244
|
return self._ctx
|
|
244
245
|
|
|
245
246
|
def __getattr__(self, util) -> '_ProxyUtil':
|
|
@@ -252,9 +253,9 @@ class RemoteDbUtils:
|
|
|
252
253
|
class _ProxyUtil:
|
|
253
254
|
"""Enables temporary workaround to call remote in-REPL dbutils without having to re-implement them"""
|
|
254
255
|
|
|
255
|
-
def __init__(self, *, command_execution:
|
|
256
|
-
context_factory: typing.Callable[[],
|
|
257
|
-
|
|
256
|
+
def __init__(self, *, command_execution: compute.CommandExecutionAPI,
|
|
257
|
+
context_factory: typing.Callable[[],
|
|
258
|
+
compute.ContextStatusResponse], cluster_id: str, name: str):
|
|
258
259
|
self._commands = command_execution
|
|
259
260
|
self._cluster_id = cluster_id
|
|
260
261
|
self._context_factory = context_factory
|
|
@@ -270,8 +271,8 @@ class _ProxyUtil:
|
|
|
270
271
|
|
|
271
272
|
class _ProxyCall:
|
|
272
273
|
|
|
273
|
-
def __init__(self, *, command_execution:
|
|
274
|
-
context_factory: typing.Callable[[],
|
|
274
|
+
def __init__(self, *, command_execution: compute.CommandExecutionAPI,
|
|
275
|
+
context_factory: typing.Callable[[], compute.ContextStatusResponse], cluster_id: str,
|
|
275
276
|
util: str, method: str):
|
|
276
277
|
self._commands = command_execution
|
|
277
278
|
self._cluster_id = cluster_id
|
|
@@ -289,10 +290,10 @@ class _ProxyCall:
|
|
|
289
290
|
'''
|
|
290
291
|
ctx = self._context_factory()
|
|
291
292
|
result = self._commands.execute(cluster_id=self._cluster_id,
|
|
292
|
-
language=
|
|
293
|
+
language=compute.Language.python,
|
|
293
294
|
context_id=ctx.id,
|
|
294
295
|
command=code).result()
|
|
295
|
-
if result.status ==
|
|
296
|
+
if result.status == compute.CommandStatus.Finished:
|
|
296
297
|
raw = result.results.data
|
|
297
298
|
return json.loads(raw)
|
|
298
299
|
else:
|
databricks/sdk/mixins/compute.py
CHANGED
|
@@ -2,7 +2,7 @@ import re
|
|
|
2
2
|
from dataclasses import dataclass
|
|
3
3
|
from typing import Optional
|
|
4
4
|
|
|
5
|
-
from databricks.sdk.service import
|
|
5
|
+
from databricks.sdk.service import compute
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
@dataclass
|
|
@@ -54,7 +54,7 @@ class SemVer:
|
|
|
54
54
|
return self.build < other.build
|
|
55
55
|
|
|
56
56
|
|
|
57
|
-
class ClustersExt(
|
|
57
|
+
class ClustersExt(compute.ClustersAPI):
|
|
58
58
|
|
|
59
59
|
def select_spark_version(self,
|
|
60
60
|
long_term_support: bool = False,
|
|
@@ -92,7 +92,7 @@ class ClustersExt(clusters.ClustersAPI):
|
|
|
92
92
|
return versions[0]
|
|
93
93
|
|
|
94
94
|
@staticmethod
|
|
95
|
-
def _node_sorting_tuple(item:
|
|
95
|
+
def _node_sorting_tuple(item: compute.NodeType) -> tuple:
|
|
96
96
|
local_disks = local_disk_size_gb = local_nvme_disk = local_nvme_disk_size_gb = 0
|
|
97
97
|
if item.node_instance_type is not None:
|
|
98
98
|
local_disks = item.node_instance_type.local_disks
|
|
@@ -103,12 +103,12 @@ class ClustersExt(clusters.ClustersAPI):
|
|
|
103
103
|
local_nvme_disk, local_nvme_disk_size_gb, item.num_gpus, item.instance_type_id)
|
|
104
104
|
|
|
105
105
|
@staticmethod
|
|
106
|
-
def _should_node_be_skipped(nt:
|
|
106
|
+
def _should_node_be_skipped(nt: compute.NodeType) -> bool:
|
|
107
107
|
if not nt.node_info:
|
|
108
108
|
return False
|
|
109
109
|
if not nt.node_info.status:
|
|
110
110
|
return False
|
|
111
|
-
val =
|
|
111
|
+
val = compute.CloudProviderNodeStatus
|
|
112
112
|
for st in nt.node_info.status:
|
|
113
113
|
if st in (val.NotAvailableInRegion, val.NotEnabledOnSubscription):
|
|
114
114
|
return True
|
|
@@ -173,7 +173,7 @@ class ClustersExt(clusters.ClustersAPI):
|
|
|
173
173
|
raise ValueError("cannot determine smallest node type")
|
|
174
174
|
|
|
175
175
|
def ensure_cluster_is_running(self, cluster_id: str):
|
|
176
|
-
state =
|
|
176
|
+
state = compute.State
|
|
177
177
|
info = self.get(cluster_id)
|
|
178
178
|
if info.state == state.TERMINATED:
|
|
179
179
|
self.start(cluster_id).result()
|
databricks/sdk/mixins/dbfs.py
CHANGED
|
@@ -9,7 +9,7 @@ from typing import TYPE_CHECKING, AnyStr, BinaryIO, Iterable, Iterator, Type
|
|
|
9
9
|
|
|
10
10
|
from databricks.sdk.core import DatabricksError
|
|
11
11
|
|
|
12
|
-
from ..service import
|
|
12
|
+
from ..service import files
|
|
13
13
|
|
|
14
14
|
if TYPE_CHECKING:
|
|
15
15
|
from _typeshed import Self
|
|
@@ -18,13 +18,13 @@ if TYPE_CHECKING:
|
|
|
18
18
|
class _DbfsIO(BinaryIO):
|
|
19
19
|
MAX_CHUNK_SIZE = 1024 * 1024
|
|
20
20
|
|
|
21
|
-
_status:
|
|
22
|
-
_created:
|
|
21
|
+
_status: files.FileInfo = None
|
|
22
|
+
_created: files.CreateResponse = None
|
|
23
23
|
_offset = 0
|
|
24
24
|
_closed = False
|
|
25
25
|
|
|
26
26
|
def __init__(self,
|
|
27
|
-
api:
|
|
27
|
+
api: files.DbfsAPI,
|
|
28
28
|
path: str,
|
|
29
29
|
*,
|
|
30
30
|
read: bool = False,
|
|
@@ -308,12 +308,12 @@ class _DbfsPath(_Path):
|
|
|
308
308
|
return f'<_DbfsPath {self._path}>'
|
|
309
309
|
|
|
310
310
|
|
|
311
|
-
class DbfsExt(
|
|
311
|
+
class DbfsExt(files.DbfsAPI):
|
|
312
312
|
|
|
313
313
|
def open(self, path: str, *, read: bool = False, write: bool = False, overwrite: bool = False) -> _DbfsIO:
|
|
314
314
|
return _DbfsIO(self, path, read=read, write=write, overwrite=overwrite)
|
|
315
315
|
|
|
316
|
-
def list(self, path: str, *, recursive=False) -> Iterator[
|
|
316
|
+
def list(self, path: str, *, recursive=False) -> Iterator[files.FileInfo]:
|
|
317
317
|
"""List directory contents or file details.
|
|
318
318
|
|
|
319
319
|
List the contents of a directory, or details of the file. If the file or directory does not exist,
|