databricks-sdk 0.29.0__tar.gz → 0.30.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/PKG-INFO +46 -7
- databricks-sdk-0.29.0/databricks_sdk.egg-info/PKG-INFO → databricks_sdk-0.30.0/README.md +24 -31
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/__init__.py +67 -19
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/config.py +61 -75
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/core.py +16 -9
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/credentials_provider.py +15 -15
- databricks_sdk-0.30.0/databricks/sdk/data_plane.py +65 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/mixins/files.py +12 -4
- databricks_sdk-0.30.0/databricks/sdk/service/apps.py +977 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/billing.py +602 -218
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/catalog.py +131 -34
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/compute.py +494 -81
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/dashboards.py +608 -5
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/iam.py +99 -88
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/jobs.py +34 -15
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/marketplace.py +2 -122
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/oauth2.py +127 -70
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/pipelines.py +72 -52
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/serving.py +303 -750
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/settings.py +423 -4
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/sharing.py +235 -25
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/sql.py +2417 -566
- databricks_sdk-0.30.0/databricks/sdk/useragent.py +144 -0
- databricks_sdk-0.30.0/databricks/sdk/version.py +1 -0
- databricks-sdk-0.29.0/README.md → databricks_sdk-0.30.0/databricks_sdk.egg-info/PKG-INFO +70 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks_sdk.egg-info/SOURCES.txt +23 -1
- databricks_sdk-0.30.0/tests/test_auth.py +283 -0
- databricks_sdk-0.30.0/tests/test_auth_manual_tests.py +62 -0
- databricks_sdk-0.30.0/tests/test_client.py +17 -0
- databricks_sdk-0.30.0/tests/test_compute_mixins.py +33 -0
- databricks_sdk-0.30.0/tests/test_config.py +116 -0
- databricks_sdk-0.30.0/tests/test_core.py +607 -0
- databricks_sdk-0.30.0/tests/test_data_plane.py +59 -0
- databricks_sdk-0.30.0/tests/test_dbfs_mixins.py +102 -0
- databricks_sdk-0.30.0/tests/test_dbutils.py +248 -0
- databricks_sdk-0.30.0/tests/test_environments.py +19 -0
- databricks_sdk-0.30.0/tests/test_errors.py +97 -0
- databricks_sdk-0.30.0/tests/test_init_file.py +17 -0
- databricks_sdk-0.30.0/tests/test_internal.py +51 -0
- databricks_sdk-0.30.0/tests/test_jobs.py +100 -0
- databricks_sdk-0.30.0/tests/test_metadata_service_auth.py +80 -0
- databricks_sdk-0.30.0/tests/test_misc.py +45 -0
- databricks_sdk-0.30.0/tests/test_oauth.py +29 -0
- databricks_sdk-0.30.0/tests/test_retries.py +63 -0
- databricks_sdk-0.30.0/tests/test_user_agent.py +42 -0
- databricks-sdk-0.29.0/databricks/sdk/version.py +0 -1
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/LICENSE +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/NOTICE +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/__init__.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/_property.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/_widgets/__init__.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/azure.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/casing.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/clock.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/dbutils.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/environments.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/errors/__init__.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/errors/base.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/errors/mapper.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/errors/overrides.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/errors/platform.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/errors/private_link.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/errors/sdk.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/mixins/__init__.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/mixins/compute.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/mixins/workspace.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/oauth.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/py.typed +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/retries.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/runtime/__init__.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/__init__.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/_internal.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/files.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/ml.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/provisioning.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/vectorsearch.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks/sdk/service/workspace.py +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks_sdk.egg-info/requires.txt +11 -11
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/databricks_sdk.egg-info/top_level.txt +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/setup.cfg +0 -0
- {databricks-sdk-0.29.0 → databricks_sdk-0.30.0}/setup.py +0 -0
|
@@ -1,13 +1,11 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: databricks-sdk
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.30.0
|
|
4
4
|
Summary: Databricks SDK for Python (Beta)
|
|
5
5
|
Home-page: https://databricks-sdk-py.readthedocs.io
|
|
6
6
|
Author: Serge Smertin
|
|
7
7
|
Author-email: serge.smertin@databricks.com
|
|
8
|
-
License: UNKNOWN
|
|
9
8
|
Keywords: databricks sdk
|
|
10
|
-
Platform: UNKNOWN
|
|
11
9
|
Classifier: Development Status :: 4 - Beta
|
|
12
10
|
Classifier: Intended Audience :: Developers
|
|
13
11
|
Classifier: Intended Audience :: Science/Research
|
|
@@ -22,10 +20,29 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
22
20
|
Classifier: Operating System :: OS Independent
|
|
23
21
|
Requires-Python: >=3.7
|
|
24
22
|
Description-Content-Type: text/markdown
|
|
25
|
-
Provides-Extra: dev
|
|
26
|
-
Provides-Extra: notebook
|
|
27
23
|
License-File: LICENSE
|
|
28
24
|
License-File: NOTICE
|
|
25
|
+
Requires-Dist: requests<3,>=2.28.1
|
|
26
|
+
Requires-Dist: google-auth~=2.0
|
|
27
|
+
Provides-Extra: dev
|
|
28
|
+
Requires-Dist: pytest; extra == "dev"
|
|
29
|
+
Requires-Dist: pytest-cov; extra == "dev"
|
|
30
|
+
Requires-Dist: pytest-xdist; extra == "dev"
|
|
31
|
+
Requires-Dist: pytest-mock; extra == "dev"
|
|
32
|
+
Requires-Dist: yapf; extra == "dev"
|
|
33
|
+
Requires-Dist: pycodestyle; extra == "dev"
|
|
34
|
+
Requires-Dist: autoflake; extra == "dev"
|
|
35
|
+
Requires-Dist: isort; extra == "dev"
|
|
36
|
+
Requires-Dist: wheel; extra == "dev"
|
|
37
|
+
Requires-Dist: ipython; extra == "dev"
|
|
38
|
+
Requires-Dist: ipywidgets; extra == "dev"
|
|
39
|
+
Requires-Dist: requests-mock; extra == "dev"
|
|
40
|
+
Requires-Dist: pyfakefs; extra == "dev"
|
|
41
|
+
Requires-Dist: databricks-connect; extra == "dev"
|
|
42
|
+
Requires-Dist: pytest-rerunfailures; extra == "dev"
|
|
43
|
+
Provides-Extra: notebook
|
|
44
|
+
Requires-Dist: ipython<9,>=8; extra == "notebook"
|
|
45
|
+
Requires-Dist: ipywidgets<9,>=8; extra == "notebook"
|
|
29
46
|
|
|
30
47
|
# Databricks SDK for Python (Beta)
|
|
31
48
|
|
|
@@ -59,6 +76,7 @@ The SDK's internal HTTP client is robust and handles failures on different level
|
|
|
59
76
|
- [Long-running operations](#long-running-operations)
|
|
60
77
|
- [Paginated responses](#paginated-responses)
|
|
61
78
|
- [Single-sign-on with OAuth](#single-sign-on-sso-with-oauth)
|
|
79
|
+
- [User Agent Request Attribution](#user-agent-request-attribution)
|
|
62
80
|
- [Error handling](#error-handling)
|
|
63
81
|
- [Logging](#logging)
|
|
64
82
|
- [Integration with `dbutils`](#interaction-with-dbutils)
|
|
@@ -537,6 +555,29 @@ logging.info(f'Created new custom app: '
|
|
|
537
555
|
f'--client_secret {custom_app.client_secret}')
|
|
538
556
|
```
|
|
539
557
|
|
|
558
|
+
## User Agent Request Attribution<a id="user-agent-request-attribution"></a>
|
|
559
|
+
|
|
560
|
+
The Databricks SDK for Python uses the `User-Agent` header to include request metadata along with each request. By default, this includes the version of the Python SDK, the version of the Python language used by your application, and the underlying operating system. To statically add additional metadata, you can use the `with_partner()` and `with_product()` functions in the `databricks.sdk.useragent` module. `with_partner()` can be used by partners to indicate that code using the Databricks SDK for Go should be attributed to a specific partner. Multiple partners can be registered at once. Partner names can contain any number, digit, `.`, `-`, `_` or `+`.
|
|
561
|
+
|
|
562
|
+
```python
|
|
563
|
+
from databricks.sdk import useragent
|
|
564
|
+
useragent.with_product("partner-abc")
|
|
565
|
+
useragent.with_partner("partner-xyz")
|
|
566
|
+
```
|
|
567
|
+
|
|
568
|
+
`with_product()` can be used to define the name and version of the product that is built with the Databricks SDK for Python. The product name has the same restrictions as the partner name above, and the product version must be a valid [SemVer](https://semver.org/). Subsequent calls to `with_product()` replace the original product with the new user-specified one.
|
|
569
|
+
|
|
570
|
+
```go
|
|
571
|
+
from databricks.sdk import useragent
|
|
572
|
+
useragent.with_product("databricks-example-product", "1.2.0")
|
|
573
|
+
```
|
|
574
|
+
|
|
575
|
+
If both the `DATABRICKS_SDK_UPSTREAM` and `DATABRICKS_SDK_UPSTREAM_VERSION` environment variables are defined, these will also be included in the `User-Agent` header.
|
|
576
|
+
|
|
577
|
+
If additional metadata needs to be specified that isn't already supported by the above interfaces, you can use the `with_user_agent_extra()` function to register arbitrary key-value pairs to include in the user agent. Multiple values associated with the same key are allowed. Keys have the same restrictions as the partner name above. Values must be either as described above or SemVer strings.
|
|
578
|
+
|
|
579
|
+
Additional `User-Agent` information can be associated with different instances of `DatabricksConfig`. To add metadata to a specific instance of `DatabricksConfig`, use the `with_user_agent_extra()` method.
|
|
580
|
+
|
|
540
581
|
## Error handling<a id="error-handling"></a>
|
|
541
582
|
|
|
542
583
|
The Databricks SDK for Python provides a robust error-handling mechanism that allows developers to catch and handle API errors. When an error occurs, the SDK will raise an exception that contains information about the error, such as the HTTP status code, error message, and error details. Developers can catch these exceptions and handle them appropriately in their code.
|
|
@@ -636,5 +677,3 @@ API clients for all services are generated from specification files that are syn
|
|
|
636
677
|
You are highly encouraged to pin the exact dependency version and read the [changelog](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
|
|
637
678
|
where Databricks documents the changes. Databricks may have minor [documented](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
|
|
638
679
|
backward-incompatible changes, such as renaming some type names to bring more consistency.
|
|
639
|
-
|
|
640
|
-
|
|
@@ -1,32 +1,3 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: databricks-sdk
|
|
3
|
-
Version: 0.29.0
|
|
4
|
-
Summary: Databricks SDK for Python (Beta)
|
|
5
|
-
Home-page: https://databricks-sdk-py.readthedocs.io
|
|
6
|
-
Author: Serge Smertin
|
|
7
|
-
Author-email: serge.smertin@databricks.com
|
|
8
|
-
License: UNKNOWN
|
|
9
|
-
Keywords: databricks sdk
|
|
10
|
-
Platform: UNKNOWN
|
|
11
|
-
Classifier: Development Status :: 4 - Beta
|
|
12
|
-
Classifier: Intended Audience :: Developers
|
|
13
|
-
Classifier: Intended Audience :: Science/Research
|
|
14
|
-
Classifier: Intended Audience :: System Administrators
|
|
15
|
-
Classifier: License :: OSI Approved :: Apache Software License
|
|
16
|
-
Classifier: Programming Language :: Python :: 3.7
|
|
17
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
18
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
19
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
20
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
21
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
22
|
-
Classifier: Operating System :: OS Independent
|
|
23
|
-
Requires-Python: >=3.7
|
|
24
|
-
Description-Content-Type: text/markdown
|
|
25
|
-
Provides-Extra: dev
|
|
26
|
-
Provides-Extra: notebook
|
|
27
|
-
License-File: LICENSE
|
|
28
|
-
License-File: NOTICE
|
|
29
|
-
|
|
30
1
|
# Databricks SDK for Python (Beta)
|
|
31
2
|
|
|
32
3
|
[](https://pypistats.org/packages/databricks-sdk)
|
|
@@ -59,6 +30,7 @@ The SDK's internal HTTP client is robust and handles failures on different level
|
|
|
59
30
|
- [Long-running operations](#long-running-operations)
|
|
60
31
|
- [Paginated responses](#paginated-responses)
|
|
61
32
|
- [Single-sign-on with OAuth](#single-sign-on-sso-with-oauth)
|
|
33
|
+
- [User Agent Request Attribution](#user-agent-request-attribution)
|
|
62
34
|
- [Error handling](#error-handling)
|
|
63
35
|
- [Logging](#logging)
|
|
64
36
|
- [Integration with `dbutils`](#interaction-with-dbutils)
|
|
@@ -537,6 +509,29 @@ logging.info(f'Created new custom app: '
|
|
|
537
509
|
f'--client_secret {custom_app.client_secret}')
|
|
538
510
|
```
|
|
539
511
|
|
|
512
|
+
## User Agent Request Attribution<a id="user-agent-request-attribution"></a>
|
|
513
|
+
|
|
514
|
+
The Databricks SDK for Python uses the `User-Agent` header to include request metadata along with each request. By default, this includes the version of the Python SDK, the version of the Python language used by your application, and the underlying operating system. To statically add additional metadata, you can use the `with_partner()` and `with_product()` functions in the `databricks.sdk.useragent` module. `with_partner()` can be used by partners to indicate that code using the Databricks SDK for Go should be attributed to a specific partner. Multiple partners can be registered at once. Partner names can contain any number, digit, `.`, `-`, `_` or `+`.
|
|
515
|
+
|
|
516
|
+
```python
|
|
517
|
+
from databricks.sdk import useragent
|
|
518
|
+
useragent.with_product("partner-abc")
|
|
519
|
+
useragent.with_partner("partner-xyz")
|
|
520
|
+
```
|
|
521
|
+
|
|
522
|
+
`with_product()` can be used to define the name and version of the product that is built with the Databricks SDK for Python. The product name has the same restrictions as the partner name above, and the product version must be a valid [SemVer](https://semver.org/). Subsequent calls to `with_product()` replace the original product with the new user-specified one.
|
|
523
|
+
|
|
524
|
+
```go
|
|
525
|
+
from databricks.sdk import useragent
|
|
526
|
+
useragent.with_product("databricks-example-product", "1.2.0")
|
|
527
|
+
```
|
|
528
|
+
|
|
529
|
+
If both the `DATABRICKS_SDK_UPSTREAM` and `DATABRICKS_SDK_UPSTREAM_VERSION` environment variables are defined, these will also be included in the `User-Agent` header.
|
|
530
|
+
|
|
531
|
+
If additional metadata needs to be specified that isn't already supported by the above interfaces, you can use the `with_user_agent_extra()` function to register arbitrary key-value pairs to include in the user agent. Multiple values associated with the same key are allowed. Keys have the same restrictions as the partner name above. Values must be either as described above or SemVer strings.
|
|
532
|
+
|
|
533
|
+
Additional `User-Agent` information can be associated with different instances of `DatabricksConfig`. To add metadata to a specific instance of `DatabricksConfig`, use the `with_user_agent_extra()` method.
|
|
534
|
+
|
|
540
535
|
## Error handling<a id="error-handling"></a>
|
|
541
536
|
|
|
542
537
|
The Databricks SDK for Python provides a robust error-handling mechanism that allows developers to catch and handle API errors. When an error occurs, the SDK will raise an exception that contains information about the error, such as the HTTP status code, error message, and error details. Developers can catch these exceptions and handle them appropriately in their code.
|
|
@@ -636,5 +631,3 @@ API clients for all services are generated from specification files that are syn
|
|
|
636
631
|
You are highly encouraged to pin the exact dependency version and read the [changelog](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
|
|
637
632
|
where Databricks documents the changes. Databricks may have minor [documented](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
|
|
638
633
|
backward-incompatible changes, such as renaming some type names to bring more consistency.
|
|
639
|
-
|
|
640
|
-
|
|
@@ -5,8 +5,9 @@ from databricks.sdk.credentials_provider import CredentialsStrategy
|
|
|
5
5
|
from databricks.sdk.mixins.compute import ClustersExt
|
|
6
6
|
from databricks.sdk.mixins.files import DbfsExt
|
|
7
7
|
from databricks.sdk.mixins.workspace import WorkspaceExt
|
|
8
|
+
from databricks.sdk.service.apps import AppsAPI
|
|
8
9
|
from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI,
|
|
9
|
-
LogDeliveryAPI)
|
|
10
|
+
LogDeliveryAPI, UsageDashboardsAPI)
|
|
10
11
|
from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
|
|
11
12
|
AccountMetastoresAPI,
|
|
12
13
|
AccountStorageCredentialsAPI,
|
|
@@ -27,7 +28,7 @@ from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
|
|
|
27
28
|
InstancePoolsAPI,
|
|
28
29
|
InstanceProfilesAPI, LibrariesAPI,
|
|
29
30
|
PolicyFamiliesAPI)
|
|
30
|
-
from databricks.sdk.service.dashboards import LakeviewAPI
|
|
31
|
+
from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI
|
|
31
32
|
from databricks.sdk.service.files import DbfsAPI, FilesAPI
|
|
32
33
|
from databricks.sdk.service.iam import (AccountAccessControlAPI,
|
|
33
34
|
AccountAccessControlProxyAPI,
|
|
@@ -55,7 +56,8 @@ from databricks.sdk.service.provisioning import (CredentialsAPI,
|
|
|
55
56
|
NetworksAPI, PrivateAccessAPI,
|
|
56
57
|
StorageAPI, VpcEndpointsAPI,
|
|
57
58
|
Workspace, WorkspacesAPI)
|
|
58
|
-
from databricks.sdk.service.serving import
|
|
59
|
+
from databricks.sdk.service.serving import (ServingEndpointsAPI,
|
|
60
|
+
ServingEndpointsDataPlaneAPI)
|
|
59
61
|
from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
|
|
60
62
|
AccountSettingsAPI,
|
|
61
63
|
AutomaticClusterUpdateAPI,
|
|
@@ -67,6 +69,7 @@ from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
|
|
|
67
69
|
EsmEnablementAccountAPI,
|
|
68
70
|
IpAccessListsAPI,
|
|
69
71
|
NetworkConnectivityAPI,
|
|
72
|
+
NotificationDestinationsAPI,
|
|
70
73
|
PersonalComputeAPI,
|
|
71
74
|
RestrictWorkspaceAdminsAPI,
|
|
72
75
|
SettingsAPI, TokenManagementAPI,
|
|
@@ -74,11 +77,13 @@ from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
|
|
|
74
77
|
from databricks.sdk.service.sharing import (CleanRoomsAPI, ProvidersAPI,
|
|
75
78
|
RecipientActivationAPI,
|
|
76
79
|
RecipientsAPI, SharesAPI)
|
|
77
|
-
from databricks.sdk.service.sql import (AlertsAPI,
|
|
78
|
-
|
|
79
|
-
|
|
80
|
+
from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI,
|
|
81
|
+
DashboardsAPI, DashboardWidgetsAPI,
|
|
82
|
+
DataSourcesAPI, DbsqlPermissionsAPI,
|
|
83
|
+
QueriesAPI, QueriesLegacyAPI,
|
|
80
84
|
QueryHistoryAPI,
|
|
81
85
|
QueryVisualizationsAPI,
|
|
86
|
+
QueryVisualizationsLegacyAPI,
|
|
82
87
|
StatementExecutionAPI, WarehousesAPI)
|
|
83
88
|
from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI,
|
|
84
89
|
VectorSearchIndexesAPI)
|
|
@@ -162,8 +167,10 @@ class WorkspaceClient:
|
|
|
162
167
|
self._config = config.copy()
|
|
163
168
|
self._dbutils = _make_dbutils(self._config)
|
|
164
169
|
self._api_client = client.ApiClient(self._config)
|
|
170
|
+
serving_endpoints = ServingEndpointsAPI(self._api_client)
|
|
165
171
|
self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client)
|
|
166
172
|
self._alerts = AlertsAPI(self._api_client)
|
|
173
|
+
self._alerts_legacy = AlertsLegacyAPI(self._api_client)
|
|
167
174
|
self._apps = AppsAPI(self._api_client)
|
|
168
175
|
self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client)
|
|
169
176
|
self._catalogs = CatalogsAPI(self._api_client)
|
|
@@ -188,6 +195,7 @@ class WorkspaceClient:
|
|
|
188
195
|
self._external_locations = ExternalLocationsAPI(self._api_client)
|
|
189
196
|
self._files = FilesAPI(self._api_client)
|
|
190
197
|
self._functions = FunctionsAPI(self._api_client)
|
|
198
|
+
self._genie = GenieAPI(self._api_client)
|
|
191
199
|
self._git_credentials = GitCredentialsAPI(self._api_client)
|
|
192
200
|
self._global_init_scripts = GlobalInitScriptsAPI(self._api_client)
|
|
193
201
|
self._grants = GrantsAPI(self._api_client)
|
|
@@ -201,6 +209,7 @@ class WorkspaceClient:
|
|
|
201
209
|
self._metastores = MetastoresAPI(self._api_client)
|
|
202
210
|
self._model_registry = ModelRegistryAPI(self._api_client)
|
|
203
211
|
self._model_versions = ModelVersionsAPI(self._api_client)
|
|
212
|
+
self._notification_destinations = NotificationDestinationsAPI(self._api_client)
|
|
204
213
|
self._online_tables = OnlineTablesAPI(self._api_client)
|
|
205
214
|
self._permission_migration = PermissionMigrationAPI(self._api_client)
|
|
206
215
|
self._permissions = PermissionsAPI(self._api_client)
|
|
@@ -217,8 +226,10 @@ class WorkspaceClient:
|
|
|
217
226
|
self._providers = ProvidersAPI(self._api_client)
|
|
218
227
|
self._quality_monitors = QualityMonitorsAPI(self._api_client)
|
|
219
228
|
self._queries = QueriesAPI(self._api_client)
|
|
229
|
+
self._queries_legacy = QueriesLegacyAPI(self._api_client)
|
|
220
230
|
self._query_history = QueryHistoryAPI(self._api_client)
|
|
221
231
|
self._query_visualizations = QueryVisualizationsAPI(self._api_client)
|
|
232
|
+
self._query_visualizations_legacy = QueryVisualizationsLegacyAPI(self._api_client)
|
|
222
233
|
self._recipient_activation = RecipientActivationAPI(self._api_client)
|
|
223
234
|
self._recipients = RecipientsAPI(self._api_client)
|
|
224
235
|
self._registered_models = RegisteredModelsAPI(self._api_client)
|
|
@@ -226,7 +237,8 @@ class WorkspaceClient:
|
|
|
226
237
|
self._schemas = SchemasAPI(self._api_client)
|
|
227
238
|
self._secrets = SecretsAPI(self._api_client)
|
|
228
239
|
self._service_principals = ServicePrincipalsAPI(self._api_client)
|
|
229
|
-
self._serving_endpoints =
|
|
240
|
+
self._serving_endpoints = serving_endpoints
|
|
241
|
+
self._serving_endpoints_data_plane = ServingEndpointsDataPlaneAPI(self._api_client, serving_endpoints)
|
|
230
242
|
self._settings = SettingsAPI(self._api_client)
|
|
231
243
|
self._shares = SharesAPI(self._api_client)
|
|
232
244
|
self._statement_execution = StatementExecutionAPI(self._api_client)
|
|
@@ -267,6 +279,11 @@ class WorkspaceClient:
|
|
|
267
279
|
"""The alerts API can be used to perform CRUD operations on alerts."""
|
|
268
280
|
return self._alerts
|
|
269
281
|
|
|
282
|
+
@property
|
|
283
|
+
def alerts_legacy(self) -> AlertsLegacyAPI:
|
|
284
|
+
"""The alerts API can be used to perform CRUD operations on alerts."""
|
|
285
|
+
return self._alerts_legacy
|
|
286
|
+
|
|
270
287
|
@property
|
|
271
288
|
def apps(self) -> AppsAPI:
|
|
272
289
|
"""Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
|
|
@@ -387,6 +404,11 @@ class WorkspaceClient:
|
|
|
387
404
|
"""Functions implement User-Defined Functions (UDFs) in Unity Catalog."""
|
|
388
405
|
return self._functions
|
|
389
406
|
|
|
407
|
+
@property
|
|
408
|
+
def genie(self) -> GenieAPI:
|
|
409
|
+
"""Genie provides a no-code experience for business users, powered by AI/BI."""
|
|
410
|
+
return self._genie
|
|
411
|
+
|
|
390
412
|
@property
|
|
391
413
|
def git_credentials(self) -> GitCredentialsAPI:
|
|
392
414
|
"""Registers personal access token for Databricks to do operations on behalf of the user."""
|
|
@@ -452,6 +474,11 @@ class WorkspaceClient:
|
|
|
452
474
|
"""Databricks provides a hosted version of MLflow Model Registry in Unity Catalog."""
|
|
453
475
|
return self._model_versions
|
|
454
476
|
|
|
477
|
+
@property
|
|
478
|
+
def notification_destinations(self) -> NotificationDestinationsAPI:
|
|
479
|
+
"""The notification destinations API lets you programmatically manage a workspace's notification destinations."""
|
|
480
|
+
return self._notification_destinations
|
|
481
|
+
|
|
455
482
|
@property
|
|
456
483
|
def online_tables(self) -> OnlineTablesAPI:
|
|
457
484
|
"""Online tables provide lower latency and higher QPS access to data from Delta tables."""
|
|
@@ -459,7 +486,7 @@ class WorkspaceClient:
|
|
|
459
486
|
|
|
460
487
|
@property
|
|
461
488
|
def permission_migration(self) -> PermissionMigrationAPI:
|
|
462
|
-
"""
|
|
489
|
+
"""APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx."""
|
|
463
490
|
return self._permission_migration
|
|
464
491
|
|
|
465
492
|
@property
|
|
@@ -524,19 +551,29 @@ class WorkspaceClient:
|
|
|
524
551
|
|
|
525
552
|
@property
|
|
526
553
|
def queries(self) -> QueriesAPI:
|
|
527
|
-
"""
|
|
554
|
+
"""The queries API can be used to perform CRUD operations on queries."""
|
|
528
555
|
return self._queries
|
|
529
556
|
|
|
557
|
+
@property
|
|
558
|
+
def queries_legacy(self) -> QueriesLegacyAPI:
|
|
559
|
+
"""These endpoints are used for CRUD operations on query definitions."""
|
|
560
|
+
return self._queries_legacy
|
|
561
|
+
|
|
530
562
|
@property
|
|
531
563
|
def query_history(self) -> QueryHistoryAPI:
|
|
532
|
-
"""
|
|
564
|
+
"""A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless compute, and DLT."""
|
|
533
565
|
return self._query_history
|
|
534
566
|
|
|
535
567
|
@property
|
|
536
568
|
def query_visualizations(self) -> QueryVisualizationsAPI:
|
|
537
|
-
"""This is an evolving API that facilitates the addition and removal of
|
|
569
|
+
"""This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace."""
|
|
538
570
|
return self._query_visualizations
|
|
539
571
|
|
|
572
|
+
@property
|
|
573
|
+
def query_visualizations_legacy(self) -> QueryVisualizationsLegacyAPI:
|
|
574
|
+
"""This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace."""
|
|
575
|
+
return self._query_visualizations_legacy
|
|
576
|
+
|
|
540
577
|
@property
|
|
541
578
|
def recipient_activation(self) -> RecipientActivationAPI:
|
|
542
579
|
"""The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`."""
|
|
@@ -577,6 +614,11 @@ class WorkspaceClient:
|
|
|
577
614
|
"""The Serving Endpoints API allows you to create, update, and delete model serving endpoints."""
|
|
578
615
|
return self._serving_endpoints
|
|
579
616
|
|
|
617
|
+
@property
|
|
618
|
+
def serving_endpoints_data_plane(self) -> ServingEndpointsDataPlaneAPI:
|
|
619
|
+
"""Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service."""
|
|
620
|
+
return self._serving_endpoints_data_plane
|
|
621
|
+
|
|
580
622
|
@property
|
|
581
623
|
def settings(self) -> SettingsAPI:
|
|
582
624
|
"""Workspace Settings API allows users to manage settings at the workspace level."""
|
|
@@ -734,7 +776,6 @@ class AccountClient:
|
|
|
734
776
|
self._api_client = client.ApiClient(self._config)
|
|
735
777
|
self._access_control = AccountAccessControlAPI(self._api_client)
|
|
736
778
|
self._billable_usage = BillableUsageAPI(self._api_client)
|
|
737
|
-
self._budgets = BudgetsAPI(self._api_client)
|
|
738
779
|
self._credentials = CredentialsAPI(self._api_client)
|
|
739
780
|
self._custom_app_integration = CustomAppIntegrationAPI(self._api_client)
|
|
740
781
|
self._encryption_keys = EncryptionKeysAPI(self._api_client)
|
|
@@ -753,10 +794,12 @@ class AccountClient:
|
|
|
753
794
|
self._settings = AccountSettingsAPI(self._api_client)
|
|
754
795
|
self._storage = StorageAPI(self._api_client)
|
|
755
796
|
self._storage_credentials = AccountStorageCredentialsAPI(self._api_client)
|
|
797
|
+
self._usage_dashboards = UsageDashboardsAPI(self._api_client)
|
|
756
798
|
self._users = AccountUsersAPI(self._api_client)
|
|
757
799
|
self._vpc_endpoints = VpcEndpointsAPI(self._api_client)
|
|
758
800
|
self._workspace_assignment = WorkspaceAssignmentAPI(self._api_client)
|
|
759
801
|
self._workspaces = WorkspacesAPI(self._api_client)
|
|
802
|
+
self._budgets = BudgetsAPI(self._api_client)
|
|
760
803
|
|
|
761
804
|
@property
|
|
762
805
|
def config(self) -> client.Config:
|
|
@@ -776,11 +819,6 @@ class AccountClient:
|
|
|
776
819
|
"""This API allows you to download billable usage logs for the specified account and date range."""
|
|
777
820
|
return self._billable_usage
|
|
778
821
|
|
|
779
|
-
@property
|
|
780
|
-
def budgets(self) -> BudgetsAPI:
|
|
781
|
-
"""These APIs manage budget configuration including notifications for exceeding a budget for a period."""
|
|
782
|
-
return self._budgets
|
|
783
|
-
|
|
784
822
|
@property
|
|
785
823
|
def credentials(self) -> CredentialsAPI:
|
|
786
824
|
"""These APIs manage credential configurations for this workspace."""
|
|
@@ -788,7 +826,7 @@ class AccountClient:
|
|
|
788
826
|
|
|
789
827
|
@property
|
|
790
828
|
def custom_app_integration(self) -> CustomAppIntegrationAPI:
|
|
791
|
-
"""These APIs enable administrators to manage custom
|
|
829
|
+
"""These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
|
|
792
830
|
return self._custom_app_integration
|
|
793
831
|
|
|
794
832
|
@property
|
|
@@ -843,7 +881,7 @@ class AccountClient:
|
|
|
843
881
|
|
|
844
882
|
@property
|
|
845
883
|
def published_app_integration(self) -> PublishedAppIntegrationAPI:
|
|
846
|
-
"""These APIs enable administrators to manage published
|
|
884
|
+
"""These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
|
|
847
885
|
return self._published_app_integration
|
|
848
886
|
|
|
849
887
|
@property
|
|
@@ -871,6 +909,11 @@ class AccountClient:
|
|
|
871
909
|
"""These APIs manage storage credentials for a particular metastore."""
|
|
872
910
|
return self._storage_credentials
|
|
873
911
|
|
|
912
|
+
@property
|
|
913
|
+
def usage_dashboards(self) -> UsageDashboardsAPI:
|
|
914
|
+
"""These APIs manage usage dashboards for this account."""
|
|
915
|
+
return self._usage_dashboards
|
|
916
|
+
|
|
874
917
|
@property
|
|
875
918
|
def users(self) -> AccountUsersAPI:
|
|
876
919
|
"""User identities recognized by Databricks and represented by email addresses."""
|
|
@@ -891,6 +934,11 @@ class AccountClient:
|
|
|
891
934
|
"""These APIs manage workspaces for this account."""
|
|
892
935
|
return self._workspaces
|
|
893
936
|
|
|
937
|
+
@property
|
|
938
|
+
def budgets(self) -> BudgetsAPI:
|
|
939
|
+
"""These APIs manage budget configurations for this account."""
|
|
940
|
+
return self._budgets
|
|
941
|
+
|
|
894
942
|
def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient:
|
|
895
943
|
"""Constructs a ``WorkspaceClient`` for the given workspace.
|
|
896
944
|
|
|
@@ -3,19 +3,18 @@ import copy
|
|
|
3
3
|
import logging
|
|
4
4
|
import os
|
|
5
5
|
import pathlib
|
|
6
|
-
import platform
|
|
7
6
|
import sys
|
|
8
7
|
import urllib.parse
|
|
9
|
-
from typing import Dict, Iterable,
|
|
8
|
+
from typing import Dict, Iterable, Optional
|
|
10
9
|
|
|
11
10
|
import requests
|
|
12
11
|
|
|
12
|
+
from . import useragent
|
|
13
13
|
from .clock import Clock, RealClock
|
|
14
14
|
from .credentials_provider import CredentialsStrategy, DefaultCredentials
|
|
15
15
|
from .environments import (ALL_ENVS, AzureEnvironment, Cloud,
|
|
16
16
|
DatabricksEnvironment, get_environment_for_hostname)
|
|
17
17
|
from .oauth import OidcEndpoints, Token
|
|
18
|
-
from .version import __version__
|
|
19
18
|
|
|
20
19
|
logger = logging.getLogger('databricks.sdk')
|
|
21
20
|
|
|
@@ -44,30 +43,14 @@ class ConfigAttribute:
|
|
|
44
43
|
return f"<ConfigAttribute '{self.name}' {self.transform.__name__}>"
|
|
45
44
|
|
|
46
45
|
|
|
47
|
-
_DEFAULT_PRODUCT_NAME = 'unknown'
|
|
48
|
-
_DEFAULT_PRODUCT_VERSION = '0.0.0'
|
|
49
|
-
_STATIC_USER_AGENT: Tuple[str, str, List[str]] = (_DEFAULT_PRODUCT_NAME, _DEFAULT_PRODUCT_VERSION, [])
|
|
50
|
-
|
|
51
|
-
|
|
52
46
|
def with_product(product: str, product_version: str):
|
|
53
47
|
"""[INTERNAL API] Change the product name and version used in the User-Agent header."""
|
|
54
|
-
|
|
55
|
-
prev_product, prev_version, prev_other_info = _STATIC_USER_AGENT
|
|
56
|
-
logger.debug(f'Changing product from {prev_product}/{prev_version} to {product}/{product_version}')
|
|
57
|
-
_STATIC_USER_AGENT = product, product_version, prev_other_info
|
|
48
|
+
useragent.with_product(product, product_version)
|
|
58
49
|
|
|
59
50
|
|
|
60
51
|
def with_user_agent_extra(key: str, value: str):
|
|
61
52
|
"""[INTERNAL API] Add extra metadata to the User-Agent header when developing a library."""
|
|
62
|
-
|
|
63
|
-
product_name, product_version, other_info = _STATIC_USER_AGENT
|
|
64
|
-
for item in other_info:
|
|
65
|
-
if item.startswith(f"{key}/"):
|
|
66
|
-
# ensure that we don't have duplicates
|
|
67
|
-
other_info.remove(item)
|
|
68
|
-
break
|
|
69
|
-
other_info.append(f"{key}/{value}")
|
|
70
|
-
_STATIC_USER_AGENT = product_name, product_version, other_info
|
|
53
|
+
useragent.with_extra(key, value)
|
|
71
54
|
|
|
72
55
|
|
|
73
56
|
class Config:
|
|
@@ -111,21 +94,12 @@ class Config:
|
|
|
111
94
|
# Deprecated. Use credentials_strategy instead.
|
|
112
95
|
credentials_provider: CredentialsStrategy = None,
|
|
113
96
|
credentials_strategy: CredentialsStrategy = None,
|
|
114
|
-
product=
|
|
115
|
-
product_version=
|
|
97
|
+
product=None,
|
|
98
|
+
product_version=None,
|
|
116
99
|
clock: Clock = None,
|
|
117
100
|
**kwargs):
|
|
118
101
|
self._header_factory = None
|
|
119
102
|
self._inner = {}
|
|
120
|
-
# as in SDK for Go, pull information from global static user agent context,
|
|
121
|
-
# so that we can track additional metadata for mid-stream libraries, as well
|
|
122
|
-
# as for cases, when the downstream product is used as a library and is not
|
|
123
|
-
# configured with a proper product name and version.
|
|
124
|
-
static_product, static_version, _ = _STATIC_USER_AGENT
|
|
125
|
-
if product == _DEFAULT_PRODUCT_NAME:
|
|
126
|
-
product = static_product
|
|
127
|
-
if product_version == _DEFAULT_PRODUCT_VERSION:
|
|
128
|
-
product_version = static_version
|
|
129
103
|
self._user_agent_other_info = []
|
|
130
104
|
if credentials_strategy and credentials_provider:
|
|
131
105
|
raise ValueError(
|
|
@@ -147,8 +121,7 @@ class Config:
|
|
|
147
121
|
self._fix_host_if_needed()
|
|
148
122
|
self._validate()
|
|
149
123
|
self.init_auth()
|
|
150
|
-
self.
|
|
151
|
-
self._product_version = product_version
|
|
124
|
+
self._init_product(product, product_version)
|
|
152
125
|
except ValueError as e:
|
|
153
126
|
message = self.wrap_debug_info(str(e))
|
|
154
127
|
raise ValueError(message) from e
|
|
@@ -260,47 +233,19 @@ class Config:
|
|
|
260
233
|
@property
|
|
261
234
|
def user_agent(self):
|
|
262
235
|
""" Returns User-Agent header used by this SDK """
|
|
263
|
-
py_version = platform.python_version()
|
|
264
|
-
os_name = platform.uname().system.lower()
|
|
265
|
-
|
|
266
|
-
ua = [
|
|
267
|
-
f"{self._product}/{self._product_version}", f"databricks-sdk-py/{__version__}",
|
|
268
|
-
f"python/{py_version}", f"os/{os_name}", f"auth/{self.auth_type}",
|
|
269
|
-
]
|
|
270
|
-
if len(self._user_agent_other_info) > 0:
|
|
271
|
-
ua.append(' '.join(self._user_agent_other_info))
|
|
272
|
-
# as in SDK for Go, pull information from global static user agent context,
|
|
273
|
-
# so that we can track additional metadata for mid-stream libraries. this value
|
|
274
|
-
# is shared across all instances of Config objects intentionally.
|
|
275
|
-
_, _, static_info = _STATIC_USER_AGENT
|
|
276
|
-
if len(static_info) > 0:
|
|
277
|
-
ua.append(' '.join(static_info))
|
|
278
|
-
if len(self._upstream_user_agent) > 0:
|
|
279
|
-
ua.append(self._upstream_user_agent)
|
|
280
|
-
if 'DATABRICKS_RUNTIME_VERSION' in os.environ:
|
|
281
|
-
runtime_version = os.environ['DATABRICKS_RUNTIME_VERSION']
|
|
282
|
-
if runtime_version != '':
|
|
283
|
-
runtime_version = self._sanitize_header_value(runtime_version)
|
|
284
|
-
ua.append(f'runtime/{runtime_version}')
|
|
285
|
-
|
|
286
|
-
return ' '.join(ua)
|
|
287
236
|
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
237
|
+
# global user agent includes SDK version, product name & version, platform info,
|
|
238
|
+
# and global extra info. Config can have specific extra info associated with it,
|
|
239
|
+
# such as an override product, auth type, and other user-defined information.
|
|
240
|
+
return useragent.to_string(self._product_info,
|
|
241
|
+
[("auth", self.auth_type)] + self._user_agent_other_info)
|
|
293
242
|
|
|
294
243
|
@property
|
|
295
244
|
def _upstream_user_agent(self) -> str:
|
|
296
|
-
|
|
297
|
-
product_version = os.environ.get('DATABRICKS_SDK_UPSTREAM_VERSION', None)
|
|
298
|
-
if product is not None and product_version is not None:
|
|
299
|
-
return f"upstream/{product} upstream-version/{product_version}"
|
|
300
|
-
return ""
|
|
245
|
+
return " ".join(f"{k}/{v}" for k, v in useragent._get_upstream_user_agent_info())
|
|
301
246
|
|
|
302
247
|
def with_user_agent_extra(self, key: str, value: str) -> 'Config':
|
|
303
|
-
self._user_agent_other_info.append(
|
|
248
|
+
self._user_agent_other_info.append((key, value))
|
|
304
249
|
return self
|
|
305
250
|
|
|
306
251
|
@property
|
|
@@ -403,13 +348,47 @@ class Config:
|
|
|
403
348
|
def _fix_host_if_needed(self):
|
|
404
349
|
if not self.host:
|
|
405
350
|
return
|
|
406
|
-
|
|
351
|
+
|
|
352
|
+
# Add a default scheme if it's missing
|
|
353
|
+
if '://' not in self.host:
|
|
354
|
+
self.host = 'https://' + self.host
|
|
355
|
+
|
|
407
356
|
o = urllib.parse.urlparse(self.host)
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
357
|
+
# remove trailing slash
|
|
358
|
+
path = o.path.rstrip('/')
|
|
359
|
+
# remove port if 443
|
|
360
|
+
netloc = o.netloc
|
|
361
|
+
if o.port == 443:
|
|
362
|
+
netloc = netloc.split(':')[0]
|
|
363
|
+
|
|
364
|
+
self.host = urllib.parse.urlunparse((o.scheme, netloc, path, o.params, o.query, o.fragment))
|
|
365
|
+
|
|
366
|
+
def load_azure_tenant_id(self):
|
|
367
|
+
"""[Internal] Load the Azure tenant ID from the Azure Databricks login page.
|
|
368
|
+
|
|
369
|
+
If the tenant ID is already set, this method does nothing."""
|
|
370
|
+
if not self.is_azure or self.azure_tenant_id is not None or self.host is None:
|
|
371
|
+
return
|
|
372
|
+
login_url = f'{self.host}/aad/auth'
|
|
373
|
+
logger.debug(f'Loading tenant ID from {login_url}')
|
|
374
|
+
resp = requests.get(login_url, allow_redirects=False)
|
|
375
|
+
if resp.status_code // 100 != 3:
|
|
376
|
+
logger.debug(
|
|
377
|
+
f'Failed to get tenant ID from {login_url}: expected status code 3xx, got {resp.status_code}')
|
|
378
|
+
return
|
|
379
|
+
entra_id_endpoint = resp.headers.get('Location')
|
|
380
|
+
if entra_id_endpoint is None:
|
|
381
|
+
logger.debug(f'No Location header in response from {login_url}')
|
|
382
|
+
return
|
|
383
|
+
# The Location header has the following form: https://login.microsoftonline.com/<tenant-id>/oauth2/authorize?...
|
|
384
|
+
# The domain may change depending on the Azure cloud (e.g. login.microsoftonline.us for US Government cloud).
|
|
385
|
+
url = urllib.parse.urlparse(entra_id_endpoint)
|
|
386
|
+
path_segments = url.path.split('/')
|
|
387
|
+
if len(path_segments) < 2:
|
|
388
|
+
logger.debug(f'Invalid path in Location header: {url.path}')
|
|
389
|
+
return
|
|
390
|
+
self.azure_tenant_id = path_segments[1]
|
|
391
|
+
logger.debug(f'Loaded tenant ID: {self.azure_tenant_id}')
|
|
413
392
|
|
|
414
393
|
def _set_inner_config(self, keyword_args: Dict[str, any]):
|
|
415
394
|
for attr in self.attributes():
|
|
@@ -498,6 +477,13 @@ class Config:
|
|
|
498
477
|
except ValueError as e:
|
|
499
478
|
raise ValueError(f'{self._credentials_strategy.auth_type()} auth: {e}') from e
|
|
500
479
|
|
|
480
|
+
def _init_product(self, product, product_version):
|
|
481
|
+
if product is not None or product_version is not None:
|
|
482
|
+
default_product, default_version = useragent.product()
|
|
483
|
+
self._product_info = (product or default_product, product_version or default_version)
|
|
484
|
+
else:
|
|
485
|
+
self._product_info = None
|
|
486
|
+
|
|
501
487
|
def __repr__(self):
|
|
502
488
|
return f'<{self.debug_string()}>'
|
|
503
489
|
|