databricks-sdk 0.29.0__py3-none-any.whl → 0.31.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (30) hide show
  1. databricks/sdk/__init__.py +89 -21
  2. databricks/sdk/config.py +61 -75
  3. databricks/sdk/core.py +16 -9
  4. databricks/sdk/credentials_provider.py +15 -15
  5. databricks/sdk/data_plane.py +65 -0
  6. databricks/sdk/errors/overrides.py +8 -0
  7. databricks/sdk/errors/platform.py +5 -0
  8. databricks/sdk/mixins/files.py +12 -4
  9. databricks/sdk/service/apps.py +977 -0
  10. databricks/sdk/service/billing.py +602 -218
  11. databricks/sdk/service/catalog.py +324 -34
  12. databricks/sdk/service/compute.py +766 -81
  13. databricks/sdk/service/dashboards.py +628 -18
  14. databricks/sdk/service/iam.py +99 -88
  15. databricks/sdk/service/jobs.py +332 -23
  16. databricks/sdk/service/marketplace.py +2 -122
  17. databricks/sdk/service/oauth2.py +127 -70
  18. databricks/sdk/service/pipelines.py +72 -52
  19. databricks/sdk/service/serving.py +303 -750
  20. databricks/sdk/service/settings.py +423 -4
  21. databricks/sdk/service/sharing.py +235 -25
  22. databricks/sdk/service/sql.py +2328 -544
  23. databricks/sdk/useragent.py +151 -0
  24. databricks/sdk/version.py +1 -1
  25. {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.31.0.dist-info}/METADATA +36 -16
  26. {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.31.0.dist-info}/RECORD +30 -27
  27. {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.31.0.dist-info}/WHEEL +1 -1
  28. {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.31.0.dist-info}/LICENSE +0 -0
  29. {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.31.0.dist-info}/NOTICE +0 -0
  30. {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.31.0.dist-info}/top_level.txt +0 -0
@@ -5,8 +5,9 @@ from databricks.sdk.credentials_provider import CredentialsStrategy
5
5
  from databricks.sdk.mixins.compute import ClustersExt
6
6
  from databricks.sdk.mixins.files import DbfsExt
7
7
  from databricks.sdk.mixins.workspace import WorkspaceExt
8
+ from databricks.sdk.service.apps import AppsAPI
8
9
  from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI,
9
- LogDeliveryAPI)
10
+ LogDeliveryAPI, UsageDashboardsAPI)
10
11
  from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
11
12
  AccountMetastoresAPI,
12
13
  AccountStorageCredentialsAPI,
@@ -16,7 +17,8 @@ from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
16
17
  GrantsAPI, MetastoresAPI,
17
18
  ModelVersionsAPI, OnlineTablesAPI,
18
19
  QualityMonitorsAPI,
19
- RegisteredModelsAPI, SchemasAPI,
20
+ RegisteredModelsAPI,
21
+ ResourceQuotasAPI, SchemasAPI,
20
22
  StorageCredentialsAPI,
21
23
  SystemSchemasAPI,
22
24
  TableConstraintsAPI, TablesAPI,
@@ -26,8 +28,9 @@ from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
26
28
  GlobalInitScriptsAPI,
27
29
  InstancePoolsAPI,
28
30
  InstanceProfilesAPI, LibrariesAPI,
31
+ PolicyComplianceForClustersAPI,
29
32
  PolicyFamiliesAPI)
30
- from databricks.sdk.service.dashboards import LakeviewAPI
33
+ from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI
31
34
  from databricks.sdk.service.files import DbfsAPI, FilesAPI
32
35
  from databricks.sdk.service.iam import (AccountAccessControlAPI,
33
36
  AccountAccessControlProxyAPI,
@@ -37,7 +40,7 @@ from databricks.sdk.service.iam import (AccountAccessControlAPI,
37
40
  GroupsAPI, PermissionMigrationAPI,
38
41
  PermissionsAPI, ServicePrincipalsAPI,
39
42
  UsersAPI, WorkspaceAssignmentAPI)
40
- from databricks.sdk.service.jobs import JobsAPI
43
+ from databricks.sdk.service.jobs import JobsAPI, PolicyComplianceForJobsAPI
41
44
  from databricks.sdk.service.marketplace import (
42
45
  ConsumerFulfillmentsAPI, ConsumerInstallationsAPI, ConsumerListingsAPI,
43
46
  ConsumerPersonalizationRequestsAPI, ConsumerProvidersAPI,
@@ -55,7 +58,8 @@ from databricks.sdk.service.provisioning import (CredentialsAPI,
55
58
  NetworksAPI, PrivateAccessAPI,
56
59
  StorageAPI, VpcEndpointsAPI,
57
60
  Workspace, WorkspacesAPI)
58
- from databricks.sdk.service.serving import AppsAPI, ServingEndpointsAPI
61
+ from databricks.sdk.service.serving import (ServingEndpointsAPI,
62
+ ServingEndpointsDataPlaneAPI)
59
63
  from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
60
64
  AccountSettingsAPI,
61
65
  AutomaticClusterUpdateAPI,
@@ -67,6 +71,7 @@ from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
67
71
  EsmEnablementAccountAPI,
68
72
  IpAccessListsAPI,
69
73
  NetworkConnectivityAPI,
74
+ NotificationDestinationsAPI,
70
75
  PersonalComputeAPI,
71
76
  RestrictWorkspaceAdminsAPI,
72
77
  SettingsAPI, TokenManagementAPI,
@@ -74,11 +79,13 @@ from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
74
79
  from databricks.sdk.service.sharing import (CleanRoomsAPI, ProvidersAPI,
75
80
  RecipientActivationAPI,
76
81
  RecipientsAPI, SharesAPI)
77
- from databricks.sdk.service.sql import (AlertsAPI, DashboardsAPI,
78
- DashboardWidgetsAPI, DataSourcesAPI,
79
- DbsqlPermissionsAPI, QueriesAPI,
82
+ from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI,
83
+ DashboardsAPI, DashboardWidgetsAPI,
84
+ DataSourcesAPI, DbsqlPermissionsAPI,
85
+ QueriesAPI, QueriesLegacyAPI,
80
86
  QueryHistoryAPI,
81
87
  QueryVisualizationsAPI,
88
+ QueryVisualizationsLegacyAPI,
82
89
  StatementExecutionAPI, WarehousesAPI)
83
90
  from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI,
84
91
  VectorSearchIndexesAPI)
@@ -162,8 +169,10 @@ class WorkspaceClient:
162
169
  self._config = config.copy()
163
170
  self._dbutils = _make_dbutils(self._config)
164
171
  self._api_client = client.ApiClient(self._config)
172
+ serving_endpoints = ServingEndpointsAPI(self._api_client)
165
173
  self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client)
166
174
  self._alerts = AlertsAPI(self._api_client)
175
+ self._alerts_legacy = AlertsLegacyAPI(self._api_client)
167
176
  self._apps = AppsAPI(self._api_client)
168
177
  self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client)
169
178
  self._catalogs = CatalogsAPI(self._api_client)
@@ -188,6 +197,7 @@ class WorkspaceClient:
188
197
  self._external_locations = ExternalLocationsAPI(self._api_client)
189
198
  self._files = FilesAPI(self._api_client)
190
199
  self._functions = FunctionsAPI(self._api_client)
200
+ self._genie = GenieAPI(self._api_client)
191
201
  self._git_credentials = GitCredentialsAPI(self._api_client)
192
202
  self._global_init_scripts = GlobalInitScriptsAPI(self._api_client)
193
203
  self._grants = GrantsAPI(self._api_client)
@@ -201,10 +211,13 @@ class WorkspaceClient:
201
211
  self._metastores = MetastoresAPI(self._api_client)
202
212
  self._model_registry = ModelRegistryAPI(self._api_client)
203
213
  self._model_versions = ModelVersionsAPI(self._api_client)
214
+ self._notification_destinations = NotificationDestinationsAPI(self._api_client)
204
215
  self._online_tables = OnlineTablesAPI(self._api_client)
205
216
  self._permission_migration = PermissionMigrationAPI(self._api_client)
206
217
  self._permissions = PermissionsAPI(self._api_client)
207
218
  self._pipelines = PipelinesAPI(self._api_client)
219
+ self._policy_compliance_for_clusters = PolicyComplianceForClustersAPI(self._api_client)
220
+ self._policy_compliance_for_jobs = PolicyComplianceForJobsAPI(self._api_client)
208
221
  self._policy_families = PolicyFamiliesAPI(self._api_client)
209
222
  self._provider_exchange_filters = ProviderExchangeFiltersAPI(self._api_client)
210
223
  self._provider_exchanges = ProviderExchangesAPI(self._api_client)
@@ -217,16 +230,20 @@ class WorkspaceClient:
217
230
  self._providers = ProvidersAPI(self._api_client)
218
231
  self._quality_monitors = QualityMonitorsAPI(self._api_client)
219
232
  self._queries = QueriesAPI(self._api_client)
233
+ self._queries_legacy = QueriesLegacyAPI(self._api_client)
220
234
  self._query_history = QueryHistoryAPI(self._api_client)
221
235
  self._query_visualizations = QueryVisualizationsAPI(self._api_client)
236
+ self._query_visualizations_legacy = QueryVisualizationsLegacyAPI(self._api_client)
222
237
  self._recipient_activation = RecipientActivationAPI(self._api_client)
223
238
  self._recipients = RecipientsAPI(self._api_client)
224
239
  self._registered_models = RegisteredModelsAPI(self._api_client)
225
240
  self._repos = ReposAPI(self._api_client)
241
+ self._resource_quotas = ResourceQuotasAPI(self._api_client)
226
242
  self._schemas = SchemasAPI(self._api_client)
227
243
  self._secrets = SecretsAPI(self._api_client)
228
244
  self._service_principals = ServicePrincipalsAPI(self._api_client)
229
- self._serving_endpoints = ServingEndpointsAPI(self._api_client)
245
+ self._serving_endpoints = serving_endpoints
246
+ self._serving_endpoints_data_plane = ServingEndpointsDataPlaneAPI(self._api_client, serving_endpoints)
230
247
  self._settings = SettingsAPI(self._api_client)
231
248
  self._shares = SharesAPI(self._api_client)
232
249
  self._statement_execution = StatementExecutionAPI(self._api_client)
@@ -267,6 +284,11 @@ class WorkspaceClient:
267
284
  """The alerts API can be used to perform CRUD operations on alerts."""
268
285
  return self._alerts
269
286
 
287
+ @property
288
+ def alerts_legacy(self) -> AlertsLegacyAPI:
289
+ """The alerts API can be used to perform CRUD operations on alerts."""
290
+ return self._alerts_legacy
291
+
270
292
  @property
271
293
  def apps(self) -> AppsAPI:
272
294
  """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
@@ -387,6 +409,11 @@ class WorkspaceClient:
387
409
  """Functions implement User-Defined Functions (UDFs) in Unity Catalog."""
388
410
  return self._functions
389
411
 
412
+ @property
413
+ def genie(self) -> GenieAPI:
414
+ """Genie provides a no-code experience for business users, powered by AI/BI."""
415
+ return self._genie
416
+
390
417
  @property
391
418
  def git_credentials(self) -> GitCredentialsAPI:
392
419
  """Registers personal access token for Databricks to do operations on behalf of the user."""
@@ -452,6 +479,11 @@ class WorkspaceClient:
452
479
  """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog."""
453
480
  return self._model_versions
454
481
 
482
+ @property
483
+ def notification_destinations(self) -> NotificationDestinationsAPI:
484
+ """The notification destinations API lets you programmatically manage a workspace's notification destinations."""
485
+ return self._notification_destinations
486
+
455
487
  @property
456
488
  def online_tables(self) -> OnlineTablesAPI:
457
489
  """Online tables provide lower latency and higher QPS access to data from Delta tables."""
@@ -459,7 +491,7 @@ class WorkspaceClient:
459
491
 
460
492
  @property
461
493
  def permission_migration(self) -> PermissionMigrationAPI:
462
- """This spec contains undocumented permission migration APIs used in https://github.com/databrickslabs/ucx."""
494
+ """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx."""
463
495
  return self._permission_migration
464
496
 
465
497
  @property
@@ -472,6 +504,16 @@ class WorkspaceClient:
472
504
  """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines."""
473
505
  return self._pipelines
474
506
 
507
+ @property
508
+ def policy_compliance_for_clusters(self) -> PolicyComplianceForClustersAPI:
509
+ """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace."""
510
+ return self._policy_compliance_for_clusters
511
+
512
+ @property
513
+ def policy_compliance_for_jobs(self) -> PolicyComplianceForJobsAPI:
514
+ """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace."""
515
+ return self._policy_compliance_for_jobs
516
+
475
517
  @property
476
518
  def policy_families(self) -> PolicyFamiliesAPI:
477
519
  """View available policy families."""
@@ -524,19 +566,29 @@ class WorkspaceClient:
524
566
 
525
567
  @property
526
568
  def queries(self) -> QueriesAPI:
527
- """These endpoints are used for CRUD operations on query definitions."""
569
+ """The queries API can be used to perform CRUD operations on queries."""
528
570
  return self._queries
529
571
 
572
+ @property
573
+ def queries_legacy(self) -> QueriesLegacyAPI:
574
+ """These endpoints are used for CRUD operations on query definitions."""
575
+ return self._queries_legacy
576
+
530
577
  @property
531
578
  def query_history(self) -> QueryHistoryAPI:
532
- """Access the history of queries through SQL warehouses."""
579
+ """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute."""
533
580
  return self._query_history
534
581
 
535
582
  @property
536
583
  def query_visualizations(self) -> QueryVisualizationsAPI:
537
- """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace."""
584
+ """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace."""
538
585
  return self._query_visualizations
539
586
 
587
+ @property
588
+ def query_visualizations_legacy(self) -> QueryVisualizationsLegacyAPI:
589
+ """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace."""
590
+ return self._query_visualizations_legacy
591
+
540
592
  @property
541
593
  def recipient_activation(self) -> RecipientActivationAPI:
542
594
  """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`."""
@@ -557,6 +609,11 @@ class WorkspaceClient:
557
609
  """The Repos API allows users to manage their git repos."""
558
610
  return self._repos
559
611
 
612
+ @property
613
+ def resource_quotas(self) -> ResourceQuotasAPI:
614
+ """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created."""
615
+ return self._resource_quotas
616
+
560
617
  @property
561
618
  def schemas(self) -> SchemasAPI:
562
619
  """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace."""
@@ -577,6 +634,11 @@ class WorkspaceClient:
577
634
  """The Serving Endpoints API allows you to create, update, and delete model serving endpoints."""
578
635
  return self._serving_endpoints
579
636
 
637
+ @property
638
+ def serving_endpoints_data_plane(self) -> ServingEndpointsDataPlaneAPI:
639
+ """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service."""
640
+ return self._serving_endpoints_data_plane
641
+
580
642
  @property
581
643
  def settings(self) -> SettingsAPI:
582
644
  """Workspace Settings API allows users to manage settings at the workspace level."""
@@ -734,7 +796,6 @@ class AccountClient:
734
796
  self._api_client = client.ApiClient(self._config)
735
797
  self._access_control = AccountAccessControlAPI(self._api_client)
736
798
  self._billable_usage = BillableUsageAPI(self._api_client)
737
- self._budgets = BudgetsAPI(self._api_client)
738
799
  self._credentials = CredentialsAPI(self._api_client)
739
800
  self._custom_app_integration = CustomAppIntegrationAPI(self._api_client)
740
801
  self._encryption_keys = EncryptionKeysAPI(self._api_client)
@@ -753,10 +814,12 @@ class AccountClient:
753
814
  self._settings = AccountSettingsAPI(self._api_client)
754
815
  self._storage = StorageAPI(self._api_client)
755
816
  self._storage_credentials = AccountStorageCredentialsAPI(self._api_client)
817
+ self._usage_dashboards = UsageDashboardsAPI(self._api_client)
756
818
  self._users = AccountUsersAPI(self._api_client)
757
819
  self._vpc_endpoints = VpcEndpointsAPI(self._api_client)
758
820
  self._workspace_assignment = WorkspaceAssignmentAPI(self._api_client)
759
821
  self._workspaces = WorkspacesAPI(self._api_client)
822
+ self._budgets = BudgetsAPI(self._api_client)
760
823
 
761
824
  @property
762
825
  def config(self) -> client.Config:
@@ -776,11 +839,6 @@ class AccountClient:
776
839
  """This API allows you to download billable usage logs for the specified account and date range."""
777
840
  return self._billable_usage
778
841
 
779
- @property
780
- def budgets(self) -> BudgetsAPI:
781
- """These APIs manage budget configuration including notifications for exceeding a budget for a period."""
782
- return self._budgets
783
-
784
842
  @property
785
843
  def credentials(self) -> CredentialsAPI:
786
844
  """These APIs manage credential configurations for this workspace."""
@@ -788,7 +846,7 @@ class AccountClient:
788
846
 
789
847
  @property
790
848
  def custom_app_integration(self) -> CustomAppIntegrationAPI:
791
- """These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
849
+ """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
792
850
  return self._custom_app_integration
793
851
 
794
852
  @property
@@ -843,7 +901,7 @@ class AccountClient:
843
901
 
844
902
  @property
845
903
  def published_app_integration(self) -> PublishedAppIntegrationAPI:
846
- """These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
904
+ """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
847
905
  return self._published_app_integration
848
906
 
849
907
  @property
@@ -871,6 +929,11 @@ class AccountClient:
871
929
  """These APIs manage storage credentials for a particular metastore."""
872
930
  return self._storage_credentials
873
931
 
932
+ @property
933
+ def usage_dashboards(self) -> UsageDashboardsAPI:
934
+ """These APIs manage usage dashboards for this account."""
935
+ return self._usage_dashboards
936
+
874
937
  @property
875
938
  def users(self) -> AccountUsersAPI:
876
939
  """User identities recognized by Databricks and represented by email addresses."""
@@ -891,6 +954,11 @@ class AccountClient:
891
954
  """These APIs manage workspaces for this account."""
892
955
  return self._workspaces
893
956
 
957
+ @property
958
+ def budgets(self) -> BudgetsAPI:
959
+ """These APIs manage budget configurations for this account."""
960
+ return self._budgets
961
+
894
962
  def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient:
895
963
  """Constructs a ``WorkspaceClient`` for the given workspace.
896
964
 
databricks/sdk/config.py CHANGED
@@ -3,19 +3,18 @@ import copy
3
3
  import logging
4
4
  import os
5
5
  import pathlib
6
- import platform
7
6
  import sys
8
7
  import urllib.parse
9
- from typing import Dict, Iterable, List, Optional, Tuple
8
+ from typing import Dict, Iterable, Optional
10
9
 
11
10
  import requests
12
11
 
12
+ from . import useragent
13
13
  from .clock import Clock, RealClock
14
14
  from .credentials_provider import CredentialsStrategy, DefaultCredentials
15
15
  from .environments import (ALL_ENVS, AzureEnvironment, Cloud,
16
16
  DatabricksEnvironment, get_environment_for_hostname)
17
17
  from .oauth import OidcEndpoints, Token
18
- from .version import __version__
19
18
 
20
19
  logger = logging.getLogger('databricks.sdk')
21
20
 
@@ -44,30 +43,14 @@ class ConfigAttribute:
44
43
  return f"<ConfigAttribute '{self.name}' {self.transform.__name__}>"
45
44
 
46
45
 
47
- _DEFAULT_PRODUCT_NAME = 'unknown'
48
- _DEFAULT_PRODUCT_VERSION = '0.0.0'
49
- _STATIC_USER_AGENT: Tuple[str, str, List[str]] = (_DEFAULT_PRODUCT_NAME, _DEFAULT_PRODUCT_VERSION, [])
50
-
51
-
52
46
  def with_product(product: str, product_version: str):
53
47
  """[INTERNAL API] Change the product name and version used in the User-Agent header."""
54
- global _STATIC_USER_AGENT
55
- prev_product, prev_version, prev_other_info = _STATIC_USER_AGENT
56
- logger.debug(f'Changing product from {prev_product}/{prev_version} to {product}/{product_version}')
57
- _STATIC_USER_AGENT = product, product_version, prev_other_info
48
+ useragent.with_product(product, product_version)
58
49
 
59
50
 
60
51
  def with_user_agent_extra(key: str, value: str):
61
52
  """[INTERNAL API] Add extra metadata to the User-Agent header when developing a library."""
62
- global _STATIC_USER_AGENT
63
- product_name, product_version, other_info = _STATIC_USER_AGENT
64
- for item in other_info:
65
- if item.startswith(f"{key}/"):
66
- # ensure that we don't have duplicates
67
- other_info.remove(item)
68
- break
69
- other_info.append(f"{key}/{value}")
70
- _STATIC_USER_AGENT = product_name, product_version, other_info
53
+ useragent.with_extra(key, value)
71
54
 
72
55
 
73
56
  class Config:
@@ -111,21 +94,12 @@ class Config:
111
94
  # Deprecated. Use credentials_strategy instead.
112
95
  credentials_provider: CredentialsStrategy = None,
113
96
  credentials_strategy: CredentialsStrategy = None,
114
- product=_DEFAULT_PRODUCT_NAME,
115
- product_version=_DEFAULT_PRODUCT_VERSION,
97
+ product=None,
98
+ product_version=None,
116
99
  clock: Clock = None,
117
100
  **kwargs):
118
101
  self._header_factory = None
119
102
  self._inner = {}
120
- # as in SDK for Go, pull information from global static user agent context,
121
- # so that we can track additional metadata for mid-stream libraries, as well
122
- # as for cases, when the downstream product is used as a library and is not
123
- # configured with a proper product name and version.
124
- static_product, static_version, _ = _STATIC_USER_AGENT
125
- if product == _DEFAULT_PRODUCT_NAME:
126
- product = static_product
127
- if product_version == _DEFAULT_PRODUCT_VERSION:
128
- product_version = static_version
129
103
  self._user_agent_other_info = []
130
104
  if credentials_strategy and credentials_provider:
131
105
  raise ValueError(
@@ -147,8 +121,7 @@ class Config:
147
121
  self._fix_host_if_needed()
148
122
  self._validate()
149
123
  self.init_auth()
150
- self._product = product
151
- self._product_version = product_version
124
+ self._init_product(product, product_version)
152
125
  except ValueError as e:
153
126
  message = self.wrap_debug_info(str(e))
154
127
  raise ValueError(message) from e
@@ -260,47 +233,19 @@ class Config:
260
233
  @property
261
234
  def user_agent(self):
262
235
  """ Returns User-Agent header used by this SDK """
263
- py_version = platform.python_version()
264
- os_name = platform.uname().system.lower()
265
-
266
- ua = [
267
- f"{self._product}/{self._product_version}", f"databricks-sdk-py/{__version__}",
268
- f"python/{py_version}", f"os/{os_name}", f"auth/{self.auth_type}",
269
- ]
270
- if len(self._user_agent_other_info) > 0:
271
- ua.append(' '.join(self._user_agent_other_info))
272
- # as in SDK for Go, pull information from global static user agent context,
273
- # so that we can track additional metadata for mid-stream libraries. this value
274
- # is shared across all instances of Config objects intentionally.
275
- _, _, static_info = _STATIC_USER_AGENT
276
- if len(static_info) > 0:
277
- ua.append(' '.join(static_info))
278
- if len(self._upstream_user_agent) > 0:
279
- ua.append(self._upstream_user_agent)
280
- if 'DATABRICKS_RUNTIME_VERSION' in os.environ:
281
- runtime_version = os.environ['DATABRICKS_RUNTIME_VERSION']
282
- if runtime_version != '':
283
- runtime_version = self._sanitize_header_value(runtime_version)
284
- ua.append(f'runtime/{runtime_version}')
285
-
286
- return ' '.join(ua)
287
236
 
288
- @staticmethod
289
- def _sanitize_header_value(value: str) -> str:
290
- value = value.replace(' ', '-')
291
- value = value.replace('/', '-')
292
- return value
237
+ # global user agent includes SDK version, product name & version, platform info,
238
+ # and global extra info. Config can have specific extra info associated with it,
239
+ # such as an override product, auth type, and other user-defined information.
240
+ return useragent.to_string(self._product_info,
241
+ [("auth", self.auth_type)] + self._user_agent_other_info)
293
242
 
294
243
  @property
295
244
  def _upstream_user_agent(self) -> str:
296
- product = os.environ.get('DATABRICKS_SDK_UPSTREAM', None)
297
- product_version = os.environ.get('DATABRICKS_SDK_UPSTREAM_VERSION', None)
298
- if product is not None and product_version is not None:
299
- return f"upstream/{product} upstream-version/{product_version}"
300
- return ""
245
+ return " ".join(f"{k}/{v}" for k, v in useragent._get_upstream_user_agent_info())
301
246
 
302
247
  def with_user_agent_extra(self, key: str, value: str) -> 'Config':
303
- self._user_agent_other_info.append(f"{key}/{value}")
248
+ self._user_agent_other_info.append((key, value))
304
249
  return self
305
250
 
306
251
  @property
@@ -403,13 +348,47 @@ class Config:
403
348
  def _fix_host_if_needed(self):
404
349
  if not self.host:
405
350
  return
406
- # fix url to remove trailing slash
351
+
352
+ # Add a default scheme if it's missing
353
+ if '://' not in self.host:
354
+ self.host = 'https://' + self.host
355
+
407
356
  o = urllib.parse.urlparse(self.host)
408
- if not o.hostname:
409
- # only hostname is specified
410
- self.host = f"https://{self.host}"
411
- else:
412
- self.host = f"{o.scheme}://{o.netloc}"
357
+ # remove trailing slash
358
+ path = o.path.rstrip('/')
359
+ # remove port if 443
360
+ netloc = o.netloc
361
+ if o.port == 443:
362
+ netloc = netloc.split(':')[0]
363
+
364
+ self.host = urllib.parse.urlunparse((o.scheme, netloc, path, o.params, o.query, o.fragment))
365
+
366
+ def load_azure_tenant_id(self):
367
+ """[Internal] Load the Azure tenant ID from the Azure Databricks login page.
368
+
369
+ If the tenant ID is already set, this method does nothing."""
370
+ if not self.is_azure or self.azure_tenant_id is not None or self.host is None:
371
+ return
372
+ login_url = f'{self.host}/aad/auth'
373
+ logger.debug(f'Loading tenant ID from {login_url}')
374
+ resp = requests.get(login_url, allow_redirects=False)
375
+ if resp.status_code // 100 != 3:
376
+ logger.debug(
377
+ f'Failed to get tenant ID from {login_url}: expected status code 3xx, got {resp.status_code}')
378
+ return
379
+ entra_id_endpoint = resp.headers.get('Location')
380
+ if entra_id_endpoint is None:
381
+ logger.debug(f'No Location header in response from {login_url}')
382
+ return
383
+ # The Location header has the following form: https://login.microsoftonline.com/<tenant-id>/oauth2/authorize?...
384
+ # The domain may change depending on the Azure cloud (e.g. login.microsoftonline.us for US Government cloud).
385
+ url = urllib.parse.urlparse(entra_id_endpoint)
386
+ path_segments = url.path.split('/')
387
+ if len(path_segments) < 2:
388
+ logger.debug(f'Invalid path in Location header: {url.path}')
389
+ return
390
+ self.azure_tenant_id = path_segments[1]
391
+ logger.debug(f'Loaded tenant ID: {self.azure_tenant_id}')
413
392
 
414
393
  def _set_inner_config(self, keyword_args: Dict[str, any]):
415
394
  for attr in self.attributes():
@@ -498,6 +477,13 @@ class Config:
498
477
  except ValueError as e:
499
478
  raise ValueError(f'{self._credentials_strategy.auth_type()} auth: {e}') from e
500
479
 
480
+ def _init_product(self, product, product_version):
481
+ if product is not None or product_version is not None:
482
+ default_product, default_version = useragent.product()
483
+ self._product_info = (product or default_product, product_version or default_version)
484
+ else:
485
+ self._product_info = None
486
+
501
487
  def __repr__(self):
502
488
  return f'<{self.debug_string()}>'
503
489
 
databricks/sdk/core.py CHANGED
@@ -133,31 +133,36 @@ class ApiClient:
133
133
 
134
134
  def do(self,
135
135
  method: str,
136
- path: str,
136
+ path: str = None,
137
+ url: str = None,
137
138
  query: dict = None,
138
139
  headers: dict = None,
139
140
  body: dict = None,
140
141
  raw: bool = False,
141
142
  files=None,
142
143
  data=None,
144
+ auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None,
143
145
  response_headers: List[str] = None) -> Union[dict, BinaryIO]:
144
- # Remove extra `/` from path for Files API
145
- # Once we've fixed the OpenAPI spec, we can remove this
146
- path = re.sub('^/api/2.0/fs/files//', '/api/2.0/fs/files/', path)
147
146
  if headers is None:
148
147
  headers = {}
148
+ if url is None:
149
+ # Remove extra `/` from path for Files API
150
+ # Once we've fixed the OpenAPI spec, we can remove this
151
+ path = re.sub('^/api/2.0/fs/files//', '/api/2.0/fs/files/', path)
152
+ url = f"{self._cfg.host}{path}"
149
153
  headers['User-Agent'] = self._user_agent_base
150
154
  retryable = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
151
155
  is_retryable=self._is_retryable,
152
156
  clock=self._cfg.clock)
153
157
  response = retryable(self._perform)(method,
154
- path,
158
+ url,
155
159
  query=query,
156
160
  headers=headers,
157
161
  body=body,
158
162
  raw=raw,
159
163
  files=files,
160
- data=data)
164
+ data=data,
165
+ auth=auth)
161
166
 
162
167
  resp = dict()
163
168
  for header in response_headers if response_headers else []:
@@ -239,20 +244,22 @@ class ApiClient:
239
244
 
240
245
  def _perform(self,
241
246
  method: str,
242
- path: str,
247
+ url: str,
243
248
  query: dict = None,
244
249
  headers: dict = None,
245
250
  body: dict = None,
246
251
  raw: bool = False,
247
252
  files=None,
248
- data=None):
253
+ data=None,
254
+ auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None):
249
255
  response = self._session.request(method,
250
- f"{self._cfg.host}{path}",
256
+ url,
251
257
  params=self._fix_query_string(query),
252
258
  json=body,
253
259
  headers=headers,
254
260
  files=files,
255
261
  data=data,
262
+ auth=auth,
256
263
  stream=raw,
257
264
  timeout=self._http_timeout_seconds)
258
265
  try: