databricks-sdk 0.0.7__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (41) hide show
  1. databricks/sdk/__init__.py +121 -104
  2. databricks/sdk/core.py +76 -16
  3. databricks/sdk/dbutils.py +18 -17
  4. databricks/sdk/mixins/compute.py +6 -6
  5. databricks/sdk/mixins/dbfs.py +6 -6
  6. databricks/sdk/oauth.py +28 -14
  7. databricks/sdk/service/{unitycatalog.py → catalog.py} +375 -1146
  8. databricks/sdk/service/{clusters.py → compute.py} +2176 -61
  9. databricks/sdk/service/{dbfs.py → files.py} +6 -6
  10. databricks/sdk/service/{scim.py → iam.py} +567 -27
  11. databricks/sdk/service/jobs.py +44 -34
  12. databricks/sdk/service/{mlflow.py → ml.py} +976 -1071
  13. databricks/sdk/service/oauth2.py +3 -3
  14. databricks/sdk/service/pipelines.py +46 -30
  15. databricks/sdk/service/{deployment.py → provisioning.py} +47 -29
  16. databricks/sdk/service/settings.py +849 -0
  17. databricks/sdk/service/sharing.py +1176 -0
  18. databricks/sdk/service/sql.py +15 -15
  19. databricks/sdk/service/workspace.py +917 -22
  20. databricks/sdk/version.py +1 -1
  21. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/METADATA +3 -1
  22. databricks_sdk-0.1.1.dist-info/RECORD +37 -0
  23. databricks/sdk/service/clusterpolicies.py +0 -399
  24. databricks/sdk/service/commands.py +0 -478
  25. databricks/sdk/service/gitcredentials.py +0 -202
  26. databricks/sdk/service/globalinitscripts.py +0 -262
  27. databricks/sdk/service/instancepools.py +0 -757
  28. databricks/sdk/service/ipaccesslists.py +0 -340
  29. databricks/sdk/service/libraries.py +0 -282
  30. databricks/sdk/service/permissions.py +0 -470
  31. databricks/sdk/service/repos.py +0 -250
  32. databricks/sdk/service/secrets.py +0 -472
  33. databricks/sdk/service/tokenmanagement.py +0 -182
  34. databricks/sdk/service/tokens.py +0 -137
  35. databricks/sdk/service/workspaceconf.py +0 -50
  36. databricks_sdk-0.0.7.dist-info/RECORD +0 -48
  37. /databricks/sdk/service/{endpoints.py → serving.py} +0 -0
  38. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/LICENSE +0 -0
  39. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/NOTICE +0 -0
  40. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/WHEEL +0 -0
  41. {databricks_sdk-0.0.7.dist-info → databricks_sdk-0.1.1.dist-info}/top_level.txt +0 -0
@@ -1,33 +1,56 @@
1
1
  import databricks.sdk.core as client
2
2
  import databricks.sdk.dbutils as dbutils
3
- import databricks.sdk.mixins.compute as compute_mixin
4
- import databricks.sdk.mixins.dbfs as dbfs_mixin
5
- import databricks.sdk.service.billing as billing
6
- import databricks.sdk.service.clusterpolicies as clusterpolicies
7
- import databricks.sdk.service.clusters as clusters
8
- import databricks.sdk.service.commands as commands
9
- import databricks.sdk.service.dbfs as dbfs
10
- import databricks.sdk.service.deployment as deployment
11
- import databricks.sdk.service.endpoints as endpoints
12
- import databricks.sdk.service.gitcredentials as gitcredentials
13
- import databricks.sdk.service.globalinitscripts as globalinitscripts
14
- import databricks.sdk.service.instancepools as instancepools
15
- import databricks.sdk.service.ipaccesslists as ipaccesslists
16
- import databricks.sdk.service.jobs as jobs
17
- import databricks.sdk.service.libraries as libraries
18
- import databricks.sdk.service.mlflow as mlflow
19
- import databricks.sdk.service.oauth2 as oauth2
20
- import databricks.sdk.service.permissions as permissions
21
- import databricks.sdk.service.pipelines as pipelines
22
- import databricks.sdk.service.repos as repos
23
- import databricks.sdk.service.scim as scim
24
- import databricks.sdk.service.secrets as secrets
25
- import databricks.sdk.service.sql as sql
26
- import databricks.sdk.service.tokenmanagement as tokenmanagement
27
- import databricks.sdk.service.tokens as tokens
28
- import databricks.sdk.service.unitycatalog as unitycatalog
29
- import databricks.sdk.service.workspace as workspace
30
- import databricks.sdk.service.workspaceconf as workspaceconf
3
+ from databricks.sdk.mixins.compute import ClustersExt
4
+ from databricks.sdk.mixins.dbfs import DbfsExt
5
+ from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI,
6
+ LogDeliveryAPI)
7
+ from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
8
+ AccountMetastoresAPI,
9
+ AccountStorageCredentialsAPI,
10
+ CatalogsAPI, ExternalLocationsAPI,
11
+ FunctionsAPI, GrantsAPI,
12
+ MetastoresAPI, SchemasAPI,
13
+ StorageCredentialsAPI,
14
+ TableConstraintsAPI, TablesAPI,
15
+ VolumesAPI)
16
+ from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
17
+ CommandExecutionAPI,
18
+ GlobalInitScriptsAPI,
19
+ InstancePoolsAPI,
20
+ InstanceProfilesAPI, LibrariesAPI,
21
+ PolicyFamiliesAPI)
22
+ from databricks.sdk.service.files import DbfsAPI
23
+ from databricks.sdk.service.iam import (AccountGroupsAPI,
24
+ AccountServicePrincipalsAPI,
25
+ AccountUsersAPI, CurrentUserAPI,
26
+ GroupsAPI, PermissionsAPI,
27
+ ServicePrincipalsAPI, UsersAPI,
28
+ WorkspaceAssignmentAPI)
29
+ from databricks.sdk.service.jobs import JobsAPI
30
+ from databricks.sdk.service.ml import ExperimentsAPI, ModelRegistryAPI
31
+ from databricks.sdk.service.oauth2 import (CustomAppIntegrationAPI,
32
+ OAuthEnrollmentAPI,
33
+ PublishedAppIntegrationAPI)
34
+ from databricks.sdk.service.pipelines import PipelinesAPI
35
+ from databricks.sdk.service.provisioning import (CredentialsAPI,
36
+ EncryptionKeysAPI,
37
+ NetworksAPI, PrivateAccessAPI,
38
+ StorageAPI, VpcEndpointsAPI,
39
+ WorkspacesAPI)
40
+ from databricks.sdk.service.serving import ServingEndpointsAPI
41
+ from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
42
+ IpAccessListsAPI,
43
+ TokenManagementAPI, TokensAPI,
44
+ WorkspaceConfAPI)
45
+ from databricks.sdk.service.sharing import (ProvidersAPI,
46
+ RecipientActivationAPI,
47
+ RecipientsAPI, SharesAPI)
48
+ from databricks.sdk.service.sql import (AlertsAPI, DashboardsAPI,
49
+ DataSourcesAPI, DbsqlPermissionsAPI,
50
+ QueriesAPI, QueryHistoryAPI,
51
+ StatementExecutionAPI, WarehousesAPI)
52
+ from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI,
53
+ SecretsAPI, WorkspaceAPI)
31
54
 
32
55
 
33
56
  class WorkspaceClient:
@@ -81,62 +104,55 @@ class WorkspaceClient:
81
104
  self.config = config
82
105
  self.dbutils = dbutils.RemoteDbUtils(self.config)
83
106
  self.api_client = client.ApiClient(self.config)
84
- self.alerts = sql.AlertsAPI(self.api_client)
85
- self.catalogs = unitycatalog.CatalogsAPI(self.api_client)
86
- self.cluster_policies = clusterpolicies.ClusterPoliciesAPI(self.api_client)
87
- self.clusters = compute_mixin.ClustersExt(self.api_client)
88
- self.command_execution = commands.CommandExecutionAPI(self.api_client)
89
- self.current_user = scim.CurrentUserAPI(self.api_client)
90
- self.dashboards = sql.DashboardsAPI(self.api_client)
91
- self.data_sources = sql.DataSourcesAPI(self.api_client)
92
- self.dbfs = dbfs_mixin.DbfsExt(self.api_client)
93
- self.dbsql_permissions = sql.DbsqlPermissionsAPI(self.api_client)
94
- self.experiments = mlflow.ExperimentsAPI(self.api_client)
95
- self.external_locations = unitycatalog.ExternalLocationsAPI(self.api_client)
96
- self.functions = unitycatalog.FunctionsAPI(self.api_client)
97
- self.git_credentials = gitcredentials.GitCredentialsAPI(self.api_client)
98
- self.global_init_scripts = globalinitscripts.GlobalInitScriptsAPI(self.api_client)
99
- self.grants = unitycatalog.GrantsAPI(self.api_client)
100
- self.groups = scim.GroupsAPI(self.api_client)
101
- self.instance_pools = instancepools.InstancePoolsAPI(self.api_client)
102
- self.instance_profiles = clusters.InstanceProfilesAPI(self.api_client)
103
- self.ip_access_lists = ipaccesslists.IpAccessListsAPI(self.api_client)
104
- self.jobs = jobs.JobsAPI(self.api_client)
105
- self.libraries = libraries.LibrariesAPI(self.api_client)
106
- self.m_lflow_artifacts = mlflow.MLflowArtifactsAPI(self.api_client)
107
- self.m_lflow_databricks = mlflow.MLflowDatabricksAPI(self.api_client)
108
- self.m_lflow_metrics = mlflow.MLflowMetricsAPI(self.api_client)
109
- self.m_lflow_runs = mlflow.MLflowRunsAPI(self.api_client)
110
- self.metastores = unitycatalog.MetastoresAPI(self.api_client)
111
- self.model_version_comments = mlflow.ModelVersionCommentsAPI(self.api_client)
112
- self.model_versions = mlflow.ModelVersionsAPI(self.api_client)
113
- self.permissions = permissions.PermissionsAPI(self.api_client)
114
- self.pipelines = pipelines.PipelinesAPI(self.api_client)
115
- self.policy_families = clusterpolicies.PolicyFamiliesAPI(self.api_client)
116
- self.providers = unitycatalog.ProvidersAPI(self.api_client)
117
- self.queries = sql.QueriesAPI(self.api_client)
118
- self.query_history = sql.QueryHistoryAPI(self.api_client)
119
- self.recipient_activation = unitycatalog.RecipientActivationAPI(self.api_client)
120
- self.recipients = unitycatalog.RecipientsAPI(self.api_client)
121
- self.registered_models = mlflow.RegisteredModelsAPI(self.api_client)
122
- self.registry_webhooks = mlflow.RegistryWebhooksAPI(self.api_client)
123
- self.repos = repos.ReposAPI(self.api_client)
124
- self.schemas = unitycatalog.SchemasAPI(self.api_client)
125
- self.secrets = secrets.SecretsAPI(self.api_client)
126
- self.service_principals = scim.ServicePrincipalsAPI(self.api_client)
127
- self.serving_endpoints = endpoints.ServingEndpointsAPI(self.api_client)
128
- self.shares = unitycatalog.SharesAPI(self.api_client)
129
- self.statement_execution = sql.StatementExecutionAPI(self.api_client)
130
- self.storage_credentials = unitycatalog.StorageCredentialsAPI(self.api_client)
131
- self.table_constraints = unitycatalog.TableConstraintsAPI(self.api_client)
132
- self.tables = unitycatalog.TablesAPI(self.api_client)
133
- self.token_management = tokenmanagement.TokenManagementAPI(self.api_client)
134
- self.tokens = tokens.TokensAPI(self.api_client)
135
- self.transition_requests = mlflow.TransitionRequestsAPI(self.api_client)
136
- self.users = scim.UsersAPI(self.api_client)
137
- self.warehouses = sql.WarehousesAPI(self.api_client)
138
- self.workspace = workspace.WorkspaceAPI(self.api_client)
139
- self.workspace_conf = workspaceconf.WorkspaceConfAPI(self.api_client)
107
+ self.alerts = AlertsAPI(self.api_client)
108
+ self.catalogs = CatalogsAPI(self.api_client)
109
+ self.cluster_policies = ClusterPoliciesAPI(self.api_client)
110
+ self.clusters = ClustersExt(self.api_client)
111
+ self.command_execution = CommandExecutionAPI(self.api_client)
112
+ self.current_user = CurrentUserAPI(self.api_client)
113
+ self.dashboards = DashboardsAPI(self.api_client)
114
+ self.data_sources = DataSourcesAPI(self.api_client)
115
+ self.dbfs = DbfsExt(self.api_client)
116
+ self.dbsql_permissions = DbsqlPermissionsAPI(self.api_client)
117
+ self.experiments = ExperimentsAPI(self.api_client)
118
+ self.external_locations = ExternalLocationsAPI(self.api_client)
119
+ self.functions = FunctionsAPI(self.api_client)
120
+ self.git_credentials = GitCredentialsAPI(self.api_client)
121
+ self.global_init_scripts = GlobalInitScriptsAPI(self.api_client)
122
+ self.grants = GrantsAPI(self.api_client)
123
+ self.groups = GroupsAPI(self.api_client)
124
+ self.instance_pools = InstancePoolsAPI(self.api_client)
125
+ self.instance_profiles = InstanceProfilesAPI(self.api_client)
126
+ self.ip_access_lists = IpAccessListsAPI(self.api_client)
127
+ self.jobs = JobsAPI(self.api_client)
128
+ self.libraries = LibrariesAPI(self.api_client)
129
+ self.metastores = MetastoresAPI(self.api_client)
130
+ self.model_registry = ModelRegistryAPI(self.api_client)
131
+ self.permissions = PermissionsAPI(self.api_client)
132
+ self.pipelines = PipelinesAPI(self.api_client)
133
+ self.policy_families = PolicyFamiliesAPI(self.api_client)
134
+ self.providers = ProvidersAPI(self.api_client)
135
+ self.queries = QueriesAPI(self.api_client)
136
+ self.query_history = QueryHistoryAPI(self.api_client)
137
+ self.recipient_activation = RecipientActivationAPI(self.api_client)
138
+ self.recipients = RecipientsAPI(self.api_client)
139
+ self.repos = ReposAPI(self.api_client)
140
+ self.schemas = SchemasAPI(self.api_client)
141
+ self.secrets = SecretsAPI(self.api_client)
142
+ self.service_principals = ServicePrincipalsAPI(self.api_client)
143
+ self.serving_endpoints = ServingEndpointsAPI(self.api_client)
144
+ self.shares = SharesAPI(self.api_client)
145
+ self.statement_execution = StatementExecutionAPI(self.api_client)
146
+ self.storage_credentials = StorageCredentialsAPI(self.api_client)
147
+ self.table_constraints = TableConstraintsAPI(self.api_client)
148
+ self.tables = TablesAPI(self.api_client)
149
+ self.token_management = TokenManagementAPI(self.api_client)
150
+ self.tokens = TokensAPI(self.api_client)
151
+ self.users = UsersAPI(self.api_client)
152
+ self.volumes = VolumesAPI(self.api_client)
153
+ self.warehouses = WarehousesAPI(self.api_client)
154
+ self.workspace = WorkspaceAPI(self.api_client)
155
+ self.workspace_conf = WorkspaceConfAPI(self.api_client)
140
156
 
141
157
 
142
158
  class AccountClient:
@@ -189,23 +205,24 @@ class AccountClient:
189
205
  product_version=product_version)
190
206
  self.config = config
191
207
  self.api_client = client.ApiClient(self.config)
192
- self.billable_usage = billing.BillableUsageAPI(self.api_client)
193
- self.budgets = billing.BudgetsAPI(self.api_client)
194
- self.credentials = deployment.CredentialsAPI(self.api_client)
195
- self.custom_app_integration = oauth2.CustomAppIntegrationAPI(self.api_client)
196
- self.encryption_keys = deployment.EncryptionKeysAPI(self.api_client)
197
- self.account_groups = scim.AccountGroupsAPI(self.api_client)
198
- self.log_delivery = billing.LogDeliveryAPI(self.api_client)
199
- self.account_metastore_assignments = unitycatalog.AccountMetastoreAssignmentsAPI(self.api_client)
200
- self.account_metastores = unitycatalog.AccountMetastoresAPI(self.api_client)
201
- self.networks = deployment.NetworksAPI(self.api_client)
202
- self.o_auth_enrollment = oauth2.OAuthEnrollmentAPI(self.api_client)
203
- self.private_access = deployment.PrivateAccessAPI(self.api_client)
204
- self.published_app_integration = oauth2.PublishedAppIntegrationAPI(self.api_client)
205
- self.account_service_principals = scim.AccountServicePrincipalsAPI(self.api_client)
206
- self.storage = deployment.StorageAPI(self.api_client)
207
- self.account_storage_credentials = unitycatalog.AccountStorageCredentialsAPI(self.api_client)
208
- self.account_users = scim.AccountUsersAPI(self.api_client)
209
- self.vpc_endpoints = deployment.VpcEndpointsAPI(self.api_client)
210
- self.workspace_assignment = permissions.WorkspaceAssignmentAPI(self.api_client)
211
- self.workspaces = deployment.WorkspacesAPI(self.api_client)
208
+ self.billable_usage = BillableUsageAPI(self.api_client)
209
+ self.budgets = BudgetsAPI(self.api_client)
210
+ self.credentials = CredentialsAPI(self.api_client)
211
+ self.custom_app_integration = CustomAppIntegrationAPI(self.api_client)
212
+ self.encryption_keys = EncryptionKeysAPI(self.api_client)
213
+ self.groups = AccountGroupsAPI(self.api_client)
214
+ self.ip_access_lists = AccountIpAccessListsAPI(self.api_client)
215
+ self.log_delivery = LogDeliveryAPI(self.api_client)
216
+ self.metastore_assignments = AccountMetastoreAssignmentsAPI(self.api_client)
217
+ self.metastores = AccountMetastoresAPI(self.api_client)
218
+ self.networks = NetworksAPI(self.api_client)
219
+ self.o_auth_enrollment = OAuthEnrollmentAPI(self.api_client)
220
+ self.private_access = PrivateAccessAPI(self.api_client)
221
+ self.published_app_integration = PublishedAppIntegrationAPI(self.api_client)
222
+ self.service_principals = AccountServicePrincipalsAPI(self.api_client)
223
+ self.storage = StorageAPI(self.api_client)
224
+ self.storage_credentials = AccountStorageCredentialsAPI(self.api_client)
225
+ self.users = AccountUsersAPI(self.api_client)
226
+ self.vpc_endpoints = VpcEndpointsAPI(self.api_client)
227
+ self.workspace_assignment = WorkspaceAssignmentAPI(self.api_client)
228
+ self.workspaces = WorkspacesAPI(self.api_client)
databricks/sdk/core.py CHANGED
@@ -114,7 +114,9 @@ def oauth_service_principal(cfg: 'Config') -> Optional[HeaderFactory]:
114
114
  def external_browser(cfg: 'Config') -> Optional[HeaderFactory]:
115
115
  if cfg.auth_type != 'external-browser':
116
116
  return None
117
- if cfg.is_aws:
117
+ if cfg.client_id:
118
+ client_id = cfg.client_id
119
+ elif cfg.is_aws:
118
120
  client_id = 'databricks-cli'
119
121
  elif cfg.is_azure:
120
122
  # Use Azure AD app for cases when Azure CLI is not available on the machine.
@@ -123,7 +125,10 @@ def external_browser(cfg: 'Config') -> Optional[HeaderFactory]:
123
125
  client_id = '6128a518-99a9-425b-8333-4cc94f04cacd'
124
126
  else:
125
127
  raise ValueError(f'local browser SSO is not supported')
126
- oauth_client = OAuthClient(cfg.host, client_id, 'http://localhost:8020')
128
+ oauth_client = OAuthClient(host=cfg.host,
129
+ client_id=client_id,
130
+ redirect_url='http://localhost:8020',
131
+ client_secret=cfg.client_secret)
127
132
  consent = oauth_client.initiate_consent()
128
133
  if not consent:
129
134
  return None
@@ -177,16 +182,18 @@ def azure_service_principal(cfg: 'Config') -> HeaderFactory:
177
182
  return refreshed_headers
178
183
 
179
184
 
180
- class AzureCliTokenSource(Refreshable):
181
- """ Obtain the token granted by `az login` CLI command """
185
+ class CliTokenSource(Refreshable):
182
186
 
183
- def __init__(self, resource: str):
187
+ def __init__(self, cmd: List[str], token_type_field: str, access_token_field: str, expiry_field: str):
184
188
  super().__init__()
185
- self.resource = resource
189
+ self._cmd = cmd
190
+ self._token_type_field = token_type_field
191
+ self._access_token_field = access_token_field
192
+ self._expiry_field = expiry_field
186
193
 
187
194
  @staticmethod
188
195
  def _parse_expiry(expiry: str) -> datetime:
189
- for fmt in ("%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S"):
196
+ for fmt in ("%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S", "%Y-%m-%dT%H:%M:%S.%f%z"):
190
197
  try:
191
198
  return datetime.strptime(expiry, fmt)
192
199
  except ValueError as e:
@@ -196,18 +203,28 @@ class AzureCliTokenSource(Refreshable):
196
203
 
197
204
  def refresh(self) -> Token:
198
205
  try:
199
- cmd = ["az", "account", "get-access-token", "--resource", self.resource, "--output", "json"]
200
- out = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
206
+ out = subprocess.check_output(self._cmd, stderr=subprocess.STDOUT)
201
207
  it = json.loads(out.decode())
202
- expires_on = self._parse_expiry(it["expiresOn"])
203
- return Token(access_token=it["accessToken"],
204
- refresh_token=it.get('refreshToken', None),
205
- token_type=it["tokenType"],
208
+ expires_on = self._parse_expiry(it[self._expiry_field])
209
+ return Token(access_token=it[self._access_token_field],
210
+ token_type=it[self._token_type_field],
206
211
  expiry=expires_on)
207
212
  except ValueError as e:
208
213
  raise ValueError(f"cannot unmarshal CLI result: {e}")
209
214
  except subprocess.CalledProcessError as e:
210
- raise IOError(f'cannot get access token: {e.output.decode()}') from e
215
+ message = e.output.decode().strip()
216
+ raise IOError(f'cannot get access token: {message}') from e
217
+
218
+
219
+ class AzureCliTokenSource(CliTokenSource):
220
+ """ Obtain the token granted by `az login` CLI command """
221
+
222
+ def __init__(self, resource: str):
223
+ cmd = ["az", "account", "get-access-token", "--resource", resource, "--output", "json"]
224
+ super().__init__(cmd=cmd,
225
+ token_type_field='tokenType',
226
+ access_token_field='accessToken',
227
+ expiry_field='expiresOn')
211
228
 
212
229
 
213
230
  @credentials_provider('azure-cli', ['is_azure'])
@@ -231,6 +248,45 @@ def azure_cli(cfg: 'Config') -> Optional[HeaderFactory]:
231
248
  return inner
232
249
 
233
250
 
251
+ class BricksCliTokenSource(CliTokenSource):
252
+ """ Obtain the token granted by `bricks auth login` CLI command """
253
+
254
+ def __init__(self, cfg: 'Config'):
255
+ cli_path = cfg.bricks_cli_path
256
+ if not cli_path:
257
+ cli_path = 'bricks'
258
+ cmd = [cli_path, 'auth', 'token', '--host', cfg.host]
259
+ if cfg.is_account_client:
260
+ cmd += ['--account-id', cfg.account_id]
261
+ super().__init__(cmd=cmd,
262
+ token_type_field='token_type',
263
+ access_token_field='access_token',
264
+ expiry_field='expiry')
265
+
266
+
267
+ @credentials_provider('bricks-cli', ['host', 'is_aws'])
268
+ def bricks_cli(cfg: 'Config') -> Optional[HeaderFactory]:
269
+ token_source = BricksCliTokenSource(cfg)
270
+ try:
271
+ token_source.token()
272
+ except FileNotFoundError:
273
+ logger.debug(f'Most likely Bricks CLI is not installed.')
274
+ return None
275
+ except IOError as e:
276
+ if 'databricks OAuth is not' in str(e):
277
+ logger.debug(f'OAuth not configured or not available: {e}')
278
+ return None
279
+ raise e
280
+
281
+ logger.info("Using Bricks CLI authentication")
282
+
283
+ def inner() -> Dict[str, str]:
284
+ token = token_source.token()
285
+ return {'Authorization': f'{token.token_type} {token.access_token}'}
286
+
287
+ return inner
288
+
289
+
234
290
  class DefaultCredentials:
235
291
  """ Select the first applicable credential provider from the chain """
236
292
 
@@ -243,7 +299,7 @@ class DefaultCredentials:
243
299
  def __call__(self, cfg: 'Config') -> HeaderFactory:
244
300
  auth_providers = [
245
301
  pat_auth, basic_auth, oauth_service_principal, azure_service_principal, azure_cli,
246
- external_browser
302
+ external_browser, bricks_cli
247
303
  ]
248
304
  for provider in auth_providers:
249
305
  auth_type = provider.auth_type()
@@ -372,7 +428,11 @@ class Config:
372
428
  def is_azure(self) -> bool:
373
429
  has_resource_id = self.azure_workspace_resource_id is not None
374
430
  has_host = self.host is not None
375
- return has_resource_id or (has_host and ".azuredatabricks.net" in self.host)
431
+ is_public_cloud = has_host and ".azuredatabricks.net" in self.host
432
+ is_china_cloud = has_host and ".databricks.azure.cn" in self.host
433
+ is_gov_cloud = has_host and ".databricks.azure.us" in self.host
434
+ is_valid_cloud = is_public_cloud or is_china_cloud or is_gov_cloud
435
+ return has_resource_id or (has_host and is_valid_cloud)
376
436
 
377
437
  @property
378
438
  def is_gcp(self) -> bool:
databricks/sdk/dbutils.py CHANGED
@@ -6,8 +6,9 @@ import typing
6
6
  from collections import namedtuple
7
7
 
8
8
  from .core import ApiClient, Config
9
- from .mixins import compute, dbfs
10
- from .service import commands, secrets
9
+ from .mixins import compute as compute_ext
10
+ from .mixins import dbfs as dbfs_ext
11
+ from .service import compute, workspace
11
12
 
12
13
 
13
14
  class FileInfo(namedtuple('FileInfo', ['path', 'name', 'size', "modificationTime"])):
@@ -31,7 +32,7 @@ class SecretMetadata(namedtuple('SecretMetadata', ['key'])):
31
32
  class _FsUtil:
32
33
  """ Manipulates the Databricks filesystem (DBFS) """
33
34
 
34
- def __init__(self, dbfs_ext: dbfs.DbfsExt, proxy_factory: typing.Callable[[str], '_ProxyUtil']):
35
+ def __init__(self, dbfs_ext: dbfs_ext.DbfsExt, proxy_factory: typing.Callable[[str], '_ProxyUtil']):
35
36
  self._dbfs = dbfs_ext
36
37
  self._proxy_factory = proxy_factory
37
38
 
@@ -177,7 +178,7 @@ _FILTER = _RedactingFilter()
177
178
  class _SecretsUtil:
178
179
  """Remote equivalent of secrets util"""
179
180
 
180
- def __init__(self, secrets_api: secrets.SecretsAPI):
181
+ def __init__(self, secrets_api: workspace.SecretsAPI):
181
182
  self._api = secrets_api # nolint
182
183
 
183
184
  def getBytes(self, scope: str, key: str) -> bytes:
@@ -215,13 +216,13 @@ class RemoteDbUtils:
215
216
  def __init__(self, config: 'Config' = None):
216
217
  self._config = Config() if not config else config
217
218
  self._client = ApiClient(self._config)
218
- self._clusters = compute.ClustersExt(self._client)
219
- self._commands = commands.CommandExecutionAPI(self._client)
219
+ self._clusters = compute_ext.ClustersExt(self._client)
220
+ self._commands = compute.CommandExecutionAPI(self._client)
220
221
  self._lock = threading.Lock()
221
222
  self._ctx = None
222
223
 
223
- self.fs = _FsUtil(dbfs.DbfsExt(self._client), self.__getattr__)
224
- self.secrets = _SecretsUtil(secrets.SecretsAPI(self._client))
224
+ self.fs = _FsUtil(dbfs_ext.DbfsExt(self._client), self.__getattr__)
225
+ self.secrets = _SecretsUtil(workspace.SecretsAPI(self._client))
225
226
 
226
227
  @property
227
228
  def _cluster_id(self) -> str:
@@ -231,7 +232,7 @@ class RemoteDbUtils:
231
232
  raise ValueError(self._config.wrap_debug_info(message))
232
233
  return cluster_id
233
234
 
234
- def _running_command_context(self) -> commands.ContextStatusResponse:
235
+ def _running_command_context(self) -> compute.ContextStatusResponse:
235
236
  if self._ctx:
236
237
  return self._ctx
237
238
  with self._lock:
@@ -239,7 +240,7 @@ class RemoteDbUtils:
239
240
  return self._ctx
240
241
  self._clusters.ensure_cluster_is_running(self._cluster_id)
241
242
  self._ctx = self._commands.create(cluster_id=self._cluster_id,
242
- language=commands.Language.python).result()
243
+ language=compute.Language.python).result()
243
244
  return self._ctx
244
245
 
245
246
  def __getattr__(self, util) -> '_ProxyUtil':
@@ -252,9 +253,9 @@ class RemoteDbUtils:
252
253
  class _ProxyUtil:
253
254
  """Enables temporary workaround to call remote in-REPL dbutils without having to re-implement them"""
254
255
 
255
- def __init__(self, *, command_execution: commands.CommandExecutionAPI,
256
- context_factory: typing.Callable[[], commands.ContextStatusResponse], cluster_id: str,
257
- name: str):
256
+ def __init__(self, *, command_execution: compute.CommandExecutionAPI,
257
+ context_factory: typing.Callable[[],
258
+ compute.ContextStatusResponse], cluster_id: str, name: str):
258
259
  self._commands = command_execution
259
260
  self._cluster_id = cluster_id
260
261
  self._context_factory = context_factory
@@ -270,8 +271,8 @@ class _ProxyUtil:
270
271
 
271
272
  class _ProxyCall:
272
273
 
273
- def __init__(self, *, command_execution: commands.CommandExecutionAPI,
274
- context_factory: typing.Callable[[], commands.ContextStatusResponse], cluster_id: str,
274
+ def __init__(self, *, command_execution: compute.CommandExecutionAPI,
275
+ context_factory: typing.Callable[[], compute.ContextStatusResponse], cluster_id: str,
275
276
  util: str, method: str):
276
277
  self._commands = command_execution
277
278
  self._cluster_id = cluster_id
@@ -289,10 +290,10 @@ class _ProxyCall:
289
290
  '''
290
291
  ctx = self._context_factory()
291
292
  result = self._commands.execute(cluster_id=self._cluster_id,
292
- language=commands.Language.python,
293
+ language=compute.Language.python,
293
294
  context_id=ctx.id,
294
295
  command=code).result()
295
- if result.status == commands.CommandStatus.Finished:
296
+ if result.status == compute.CommandStatus.Finished:
296
297
  raw = result.results.data
297
298
  return json.loads(raw)
298
299
  else:
@@ -2,7 +2,7 @@ import re
2
2
  from dataclasses import dataclass
3
3
  from typing import Optional
4
4
 
5
- from databricks.sdk.service import clusters
5
+ from databricks.sdk.service import compute
6
6
 
7
7
 
8
8
  @dataclass
@@ -54,7 +54,7 @@ class SemVer:
54
54
  return self.build < other.build
55
55
 
56
56
 
57
- class ClustersExt(clusters.ClustersAPI):
57
+ class ClustersExt(compute.ClustersAPI):
58
58
 
59
59
  def select_spark_version(self,
60
60
  long_term_support: bool = False,
@@ -92,7 +92,7 @@ class ClustersExt(clusters.ClustersAPI):
92
92
  return versions[0]
93
93
 
94
94
  @staticmethod
95
- def _node_sorting_tuple(item: clusters.NodeType) -> tuple:
95
+ def _node_sorting_tuple(item: compute.NodeType) -> tuple:
96
96
  local_disks = local_disk_size_gb = local_nvme_disk = local_nvme_disk_size_gb = 0
97
97
  if item.node_instance_type is not None:
98
98
  local_disks = item.node_instance_type.local_disks
@@ -103,12 +103,12 @@ class ClustersExt(clusters.ClustersAPI):
103
103
  local_nvme_disk, local_nvme_disk_size_gb, item.num_gpus, item.instance_type_id)
104
104
 
105
105
  @staticmethod
106
- def _should_node_be_skipped(nt: clusters.NodeType) -> bool:
106
+ def _should_node_be_skipped(nt: compute.NodeType) -> bool:
107
107
  if not nt.node_info:
108
108
  return False
109
109
  if not nt.node_info.status:
110
110
  return False
111
- val = clusters.CloudProviderNodeStatus
111
+ val = compute.CloudProviderNodeStatus
112
112
  for st in nt.node_info.status:
113
113
  if st in (val.NotAvailableInRegion, val.NotEnabledOnSubscription):
114
114
  return True
@@ -173,7 +173,7 @@ class ClustersExt(clusters.ClustersAPI):
173
173
  raise ValueError("cannot determine smallest node type")
174
174
 
175
175
  def ensure_cluster_is_running(self, cluster_id: str):
176
- state = clusters.State
176
+ state = compute.State
177
177
  info = self.get(cluster_id)
178
178
  if info.state == state.TERMINATED:
179
179
  self.start(cluster_id).result()
@@ -9,7 +9,7 @@ from typing import TYPE_CHECKING, AnyStr, BinaryIO, Iterable, Iterator, Type
9
9
 
10
10
  from databricks.sdk.core import DatabricksError
11
11
 
12
- from ..service import dbfs
12
+ from ..service import files
13
13
 
14
14
  if TYPE_CHECKING:
15
15
  from _typeshed import Self
@@ -18,13 +18,13 @@ if TYPE_CHECKING:
18
18
  class _DbfsIO(BinaryIO):
19
19
  MAX_CHUNK_SIZE = 1024 * 1024
20
20
 
21
- _status: dbfs.FileInfo = None
22
- _created: dbfs.CreateResponse = None
21
+ _status: files.FileInfo = None
22
+ _created: files.CreateResponse = None
23
23
  _offset = 0
24
24
  _closed = False
25
25
 
26
26
  def __init__(self,
27
- api: dbfs.DbfsAPI,
27
+ api: files.DbfsAPI,
28
28
  path: str,
29
29
  *,
30
30
  read: bool = False,
@@ -308,12 +308,12 @@ class _DbfsPath(_Path):
308
308
  return f'<_DbfsPath {self._path}>'
309
309
 
310
310
 
311
- class DbfsExt(dbfs.DbfsAPI):
311
+ class DbfsExt(files.DbfsAPI):
312
312
 
313
313
  def open(self, path: str, *, read: bool = False, write: bool = False, overwrite: bool = False) -> _DbfsIO:
314
314
  return _DbfsIO(self, path, read=read, write=write, overwrite=overwrite)
315
315
 
316
- def list(self, path: str, *, recursive=False) -> Iterator[dbfs.FileInfo]:
316
+ def list(self, path: str, *, recursive=False) -> Iterator[files.FileInfo]:
317
317
  """List directory contents or file details.
318
318
 
319
319
  List the contents of a directory, or details of the file. If the file or directory does not exist,