databricks-sdk 0.29.0__py3-none-any.whl → 0.31.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +89 -21
- databricks/sdk/config.py +61 -75
- databricks/sdk/core.py +16 -9
- databricks/sdk/credentials_provider.py +15 -15
- databricks/sdk/data_plane.py +65 -0
- databricks/sdk/errors/overrides.py +8 -0
- databricks/sdk/errors/platform.py +5 -0
- databricks/sdk/mixins/files.py +12 -4
- databricks/sdk/service/apps.py +977 -0
- databricks/sdk/service/billing.py +602 -218
- databricks/sdk/service/catalog.py +324 -34
- databricks/sdk/service/compute.py +766 -81
- databricks/sdk/service/dashboards.py +628 -18
- databricks/sdk/service/iam.py +99 -88
- databricks/sdk/service/jobs.py +332 -23
- databricks/sdk/service/marketplace.py +2 -122
- databricks/sdk/service/oauth2.py +127 -70
- databricks/sdk/service/pipelines.py +72 -52
- databricks/sdk/service/serving.py +303 -750
- databricks/sdk/service/settings.py +423 -4
- databricks/sdk/service/sharing.py +235 -25
- databricks/sdk/service/sql.py +2328 -544
- databricks/sdk/useragent.py +151 -0
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.31.0.dist-info}/METADATA +36 -16
- {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.31.0.dist-info}/RECORD +30 -27
- {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.31.0.dist-info}/WHEEL +1 -1
- {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.31.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.31.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.31.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
import copy
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
import platform
|
|
5
|
+
import re
|
|
6
|
+
from typing import List, Optional, Tuple
|
|
7
|
+
|
|
8
|
+
from .version import __version__
|
|
9
|
+
|
|
10
|
+
# Constants
|
|
11
|
+
RUNTIME_KEY = 'runtime'
|
|
12
|
+
CICD_KEY = 'cicd'
|
|
13
|
+
AUTH_KEY = 'auth'
|
|
14
|
+
|
|
15
|
+
_product_name = "unknown"
|
|
16
|
+
_product_version = "0.0.0"
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger("databricks.sdk.useragent")
|
|
19
|
+
|
|
20
|
+
_extra = []
|
|
21
|
+
|
|
22
|
+
# Precompiled regex patterns
|
|
23
|
+
alphanum_pattern = re.compile(r'^[a-zA-Z0-9_.+-]+$')
|
|
24
|
+
|
|
25
|
+
# official https://semver.org/ recommendation: https://regex101.com/r/Ly7O1x/
|
|
26
|
+
# with addition of "x" wildcards for minor/patch versions. Also, patch version may be omitted.
|
|
27
|
+
semver_pattern = re.compile(r"^"
|
|
28
|
+
r"(?P<major>0|[1-9]\d*)\.(?P<minor>x|0|[1-9]\d*)(\.(?P<patch>x|0|[1-9x]\d*))?"
|
|
29
|
+
r"(?:-(?P<pre_release>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)"
|
|
30
|
+
r"(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?"
|
|
31
|
+
r"(?:\+(?P<build>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _match_alphanum(value):
|
|
35
|
+
if not alphanum_pattern.match(value):
|
|
36
|
+
raise ValueError(f"Invalid alphanumeric value: {value}")
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _match_semver(value):
|
|
40
|
+
if not semver_pattern.match(value):
|
|
41
|
+
raise ValueError(f"Invalid semantic version: {value}")
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _match_alphanum_or_semver(value):
|
|
45
|
+
if not alphanum_pattern.match(value) and not semver_pattern.match(value):
|
|
46
|
+
raise ValueError(f"Invalid value: {value}")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def product() -> Tuple[str, str]:
|
|
50
|
+
"""Return the global product name and version that will be submitted to Databricks on every request."""
|
|
51
|
+
return _product_name, _product_version
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def with_product(name: str, version: str):
|
|
55
|
+
"""Change the product name and version that will be submitted to Databricks on every request."""
|
|
56
|
+
global _product_name, _product_version
|
|
57
|
+
_match_alphanum(name)
|
|
58
|
+
_match_semver(version)
|
|
59
|
+
logger.debug(f'Changing product from {_product_name}/{_product_version} to {name}/{version}')
|
|
60
|
+
_product_name = name
|
|
61
|
+
_product_version = version
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _reset_product():
|
|
65
|
+
"""[Internal API] Reset product name and version to the default values.
|
|
66
|
+
|
|
67
|
+
Used for testing purposes only."""
|
|
68
|
+
global _product_name, _product_version
|
|
69
|
+
_product_name = "unknown"
|
|
70
|
+
_product_version = "0.0.0"
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def with_extra(key: str, value: str):
|
|
74
|
+
"""Add extra metadata to all requests submitted to Databricks.
|
|
75
|
+
|
|
76
|
+
User-specified extra metadata can be inserted into request headers to provide additional context to Databricks
|
|
77
|
+
about usage of different tools in the Databricks ecosystem. This can be useful for collecting telemetry about SDK
|
|
78
|
+
usage from tools that are built on top of the SDK.
|
|
79
|
+
"""
|
|
80
|
+
global _extra
|
|
81
|
+
_match_alphanum(key)
|
|
82
|
+
_match_alphanum_or_semver(value)
|
|
83
|
+
logger.debug(f'Adding {key}/{value} to User-Agent')
|
|
84
|
+
_extra.append((key, value))
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def extra() -> List[Tuple[str, str]]:
|
|
88
|
+
"""Returns the current extra metadata that will be submitted to Databricks on every request."""
|
|
89
|
+
return copy.deepcopy(_extra)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def _reset_extra(extra: List[Tuple[str, str]]):
|
|
93
|
+
"""[INTERNAL API] Reset the extra metadata to a new list.
|
|
94
|
+
|
|
95
|
+
Prefer using with_user_agent_extra instead of this method to avoid overwriting other information included in the
|
|
96
|
+
user agent."""
|
|
97
|
+
global _extra
|
|
98
|
+
_extra = extra
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def with_partner(partner: str):
|
|
102
|
+
"""Adds the given partner to the metadata submitted to Databricks on every request."""
|
|
103
|
+
with_extra("partner", partner)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def _get_upstream_user_agent_info() -> List[Tuple[str, str]]:
|
|
107
|
+
"""[INTERNAL API] Return the upstream product and version if specified in the system environment."""
|
|
108
|
+
product = os.getenv("DATABRICKS_SDK_UPSTREAM")
|
|
109
|
+
version = os.getenv("DATABRICKS_SDK_UPSTREAM_VERSION")
|
|
110
|
+
if not product or not version:
|
|
111
|
+
return []
|
|
112
|
+
return [("upstream", product), ("upstream-version", version)]
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _get_runtime_info() -> List[Tuple[str, str]]:
|
|
116
|
+
"""[INTERNAL API] Return the runtime version if running on Databricks."""
|
|
117
|
+
if 'DATABRICKS_RUNTIME_VERSION' in os.environ:
|
|
118
|
+
runtime_version = os.environ['DATABRICKS_RUNTIME_VERSION']
|
|
119
|
+
if runtime_version != '':
|
|
120
|
+
runtime_version = _sanitize_header_value(runtime_version)
|
|
121
|
+
return [('runtime', runtime_version)]
|
|
122
|
+
return []
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def _sanitize_header_value(value: str) -> str:
|
|
126
|
+
value = value.replace(' ', '-')
|
|
127
|
+
value = value.replace('/', '-')
|
|
128
|
+
return value
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def to_string(alternate_product_info: Optional[Tuple[str, str]] = None,
|
|
132
|
+
other_info: Optional[List[Tuple[str, str]]] = None) -> str:
|
|
133
|
+
"""Compute the full User-Agent header.
|
|
134
|
+
|
|
135
|
+
The User-Agent header contains the product name, version, and other metadata that is submitted to Databricks on
|
|
136
|
+
every request. There are some static components that are included by default in every request, like the SDK version,
|
|
137
|
+
OS name, and Python version. Other components can be optionally overridden or augmented in DatabricksConfig, like
|
|
138
|
+
the product name, product version, and extra user-defined information."""
|
|
139
|
+
base = []
|
|
140
|
+
if alternate_product_info:
|
|
141
|
+
base.append(alternate_product_info)
|
|
142
|
+
else:
|
|
143
|
+
base.append((_product_name, _product_version))
|
|
144
|
+
base.extend([("databricks-sdk-py", __version__), ("python", platform.python_version()),
|
|
145
|
+
("os", platform.uname().system.lower()), ])
|
|
146
|
+
if other_info:
|
|
147
|
+
base.extend(other_info)
|
|
148
|
+
base.extend(_extra)
|
|
149
|
+
base.extend(_get_upstream_user_agent_info())
|
|
150
|
+
base.extend(_get_runtime_info())
|
|
151
|
+
return " ".join(f"{k}/{v}" for k, v in base)
|
databricks/sdk/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '0.
|
|
1
|
+
__version__ = '0.31.0'
|
|
@@ -1,13 +1,11 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: databricks-sdk
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.31.0
|
|
4
4
|
Summary: Databricks SDK for Python (Beta)
|
|
5
5
|
Home-page: https://databricks-sdk-py.readthedocs.io
|
|
6
6
|
Author: Serge Smertin
|
|
7
7
|
Author-email: serge.smertin@databricks.com
|
|
8
|
-
License: UNKNOWN
|
|
9
8
|
Keywords: databricks sdk
|
|
10
|
-
Platform: UNKNOWN
|
|
11
9
|
Classifier: Development Status :: 4 - Beta
|
|
12
10
|
Classifier: Intended Audience :: Developers
|
|
13
11
|
Classifier: Intended Audience :: Science/Research
|
|
@@ -24,24 +22,24 @@ Requires-Python: >=3.7
|
|
|
24
22
|
Description-Content-Type: text/markdown
|
|
25
23
|
License-File: LICENSE
|
|
26
24
|
License-File: NOTICE
|
|
27
|
-
Requires-Dist: google-auth (~=2.0)
|
|
28
25
|
Requires-Dist: requests (<3,>=2.28.1)
|
|
26
|
+
Requires-Dist: google-auth (~=2.0)
|
|
29
27
|
Provides-Extra: dev
|
|
28
|
+
Requires-Dist: pytest ; extra == 'dev'
|
|
29
|
+
Requires-Dist: pytest-cov ; extra == 'dev'
|
|
30
|
+
Requires-Dist: pytest-xdist ; extra == 'dev'
|
|
31
|
+
Requires-Dist: pytest-mock ; extra == 'dev'
|
|
32
|
+
Requires-Dist: yapf ; extra == 'dev'
|
|
33
|
+
Requires-Dist: pycodestyle ; extra == 'dev'
|
|
30
34
|
Requires-Dist: autoflake ; extra == 'dev'
|
|
31
|
-
Requires-Dist:
|
|
35
|
+
Requires-Dist: isort ; extra == 'dev'
|
|
36
|
+
Requires-Dist: wheel ; extra == 'dev'
|
|
32
37
|
Requires-Dist: ipython ; extra == 'dev'
|
|
33
38
|
Requires-Dist: ipywidgets ; extra == 'dev'
|
|
34
|
-
Requires-Dist:
|
|
35
|
-
Requires-Dist: pycodestyle ; extra == 'dev'
|
|
39
|
+
Requires-Dist: requests-mock ; extra == 'dev'
|
|
36
40
|
Requires-Dist: pyfakefs ; extra == 'dev'
|
|
37
|
-
Requires-Dist:
|
|
38
|
-
Requires-Dist: pytest-cov ; extra == 'dev'
|
|
39
|
-
Requires-Dist: pytest-mock ; extra == 'dev'
|
|
41
|
+
Requires-Dist: databricks-connect ; extra == 'dev'
|
|
40
42
|
Requires-Dist: pytest-rerunfailures ; extra == 'dev'
|
|
41
|
-
Requires-Dist: pytest-xdist ; extra == 'dev'
|
|
42
|
-
Requires-Dist: requests-mock ; extra == 'dev'
|
|
43
|
-
Requires-Dist: wheel ; extra == 'dev'
|
|
44
|
-
Requires-Dist: yapf ; extra == 'dev'
|
|
45
43
|
Provides-Extra: notebook
|
|
46
44
|
Requires-Dist: ipython (<9,>=8) ; extra == 'notebook'
|
|
47
45
|
Requires-Dist: ipywidgets (<9,>=8) ; extra == 'notebook'
|
|
@@ -78,6 +76,7 @@ The SDK's internal HTTP client is robust and handles failures on different level
|
|
|
78
76
|
- [Long-running operations](#long-running-operations)
|
|
79
77
|
- [Paginated responses](#paginated-responses)
|
|
80
78
|
- [Single-sign-on with OAuth](#single-sign-on-sso-with-oauth)
|
|
79
|
+
- [User Agent Request Attribution](#user-agent-request-attribution)
|
|
81
80
|
- [Error handling](#error-handling)
|
|
82
81
|
- [Logging](#logging)
|
|
83
82
|
- [Integration with `dbutils`](#interaction-with-dbutils)
|
|
@@ -556,6 +555,29 @@ logging.info(f'Created new custom app: '
|
|
|
556
555
|
f'--client_secret {custom_app.client_secret}')
|
|
557
556
|
```
|
|
558
557
|
|
|
558
|
+
## User Agent Request Attribution<a id="user-agent-request-attribution"></a>
|
|
559
|
+
|
|
560
|
+
The Databricks SDK for Python uses the `User-Agent` header to include request metadata along with each request. By default, this includes the version of the Python SDK, the version of the Python language used by your application, and the underlying operating system. To statically add additional metadata, you can use the `with_partner()` and `with_product()` functions in the `databricks.sdk.useragent` module. `with_partner()` can be used by partners to indicate that code using the Databricks SDK for Go should be attributed to a specific partner. Multiple partners can be registered at once. Partner names can contain any number, digit, `.`, `-`, `_` or `+`.
|
|
561
|
+
|
|
562
|
+
```python
|
|
563
|
+
from databricks.sdk import useragent
|
|
564
|
+
useragent.with_product("partner-abc")
|
|
565
|
+
useragent.with_partner("partner-xyz")
|
|
566
|
+
```
|
|
567
|
+
|
|
568
|
+
`with_product()` can be used to define the name and version of the product that is built with the Databricks SDK for Python. The product name has the same restrictions as the partner name above, and the product version must be a valid [SemVer](https://semver.org/). Subsequent calls to `with_product()` replace the original product with the new user-specified one.
|
|
569
|
+
|
|
570
|
+
```go
|
|
571
|
+
from databricks.sdk import useragent
|
|
572
|
+
useragent.with_product("databricks-example-product", "1.2.0")
|
|
573
|
+
```
|
|
574
|
+
|
|
575
|
+
If both the `DATABRICKS_SDK_UPSTREAM` and `DATABRICKS_SDK_UPSTREAM_VERSION` environment variables are defined, these will also be included in the `User-Agent` header.
|
|
576
|
+
|
|
577
|
+
If additional metadata needs to be specified that isn't already supported by the above interfaces, you can use the `with_user_agent_extra()` function to register arbitrary key-value pairs to include in the user agent. Multiple values associated with the same key are allowed. Keys have the same restrictions as the partner name above. Values must be either as described above or SemVer strings.
|
|
578
|
+
|
|
579
|
+
Additional `User-Agent` information can be associated with different instances of `DatabricksConfig`. To add metadata to a specific instance of `DatabricksConfig`, use the `with_user_agent_extra()` method.
|
|
580
|
+
|
|
559
581
|
## Error handling<a id="error-handling"></a>
|
|
560
582
|
|
|
561
583
|
The Databricks SDK for Python provides a robust error-handling mechanism that allows developers to catch and handle API errors. When an error occurs, the SDK will raise an exception that contains information about the error, such as the HTTP status code, error message, and error details. Developers can catch these exceptions and handle them appropriately in their code.
|
|
@@ -655,5 +677,3 @@ API clients for all services are generated from specification files that are syn
|
|
|
655
677
|
You are highly encouraged to pin the exact dependency version and read the [changelog](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
|
|
656
678
|
where Databricks documents the changes. Databricks may have minor [documented](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
|
|
657
679
|
backward-incompatible changes, such as renaming some type names to bring more consistency.
|
|
658
|
-
|
|
659
|
-
|
|
@@ -1,57 +1,60 @@
|
|
|
1
1
|
databricks/__init__.py,sha256=CF2MJcZFwbpn9TwQER8qnCDhkPooBGQNVkX4v7g6p3g,537
|
|
2
|
-
databricks/sdk/__init__.py,sha256=
|
|
2
|
+
databricks/sdk/__init__.py,sha256=2c9APHl0rcAMtOIc5M-RgVdsHmVVvghRtKvyY1Q_AnY,47821
|
|
3
3
|
databricks/sdk/_property.py,sha256=sGjsipeFrjMBSVPjtIb0HNCRcMIhFpVx6wq4BkC3LWs,1636
|
|
4
4
|
databricks/sdk/azure.py,sha256=8P7nEdun0hbQCap9Ojo7yZse_JHxnhYsE6ApojnPz7Q,1009
|
|
5
5
|
databricks/sdk/casing.py,sha256=NKYPrfPbQjM7lU4hhNQK3z1jb_VEA29BfH4FEdby2tg,1137
|
|
6
6
|
databricks/sdk/clock.py,sha256=Ivlow0r_TkXcTJ8UXkxSA0czKrY0GvwHAeOvjPkJnAQ,1360
|
|
7
|
-
databricks/sdk/config.py,sha256=
|
|
8
|
-
databricks/sdk/core.py,sha256=
|
|
9
|
-
databricks/sdk/credentials_provider.py,sha256=
|
|
7
|
+
databricks/sdk/config.py,sha256=FWEiIY34C_4Mmv8B9w284BR2FHug2T2ySpmtyg51ttA,21139
|
|
8
|
+
databricks/sdk/core.py,sha256=PWU2kTHXOF6x7i9_yRUFGj-iusr_Mo7awROiBpA9nJQ,20398
|
|
9
|
+
databricks/sdk/credentials_provider.py,sha256=V8QxVUvZmOfVjEpogNEzu5nUBXzRLevWfi-NiPaDOks,29232
|
|
10
|
+
databricks/sdk/data_plane.py,sha256=Er2z2fT-KVupJKzGozGGZ-jCQ3AmDWq-DZppahIK6tU,2591
|
|
10
11
|
databricks/sdk/dbutils.py,sha256=HFCuB-el6SFKhF8qRfJxYANtyLTm-VG9GtQuQgZXFkM,15741
|
|
11
12
|
databricks/sdk/environments.py,sha256=5KoVuVfF-ZX17rua1sH3EJCCtniVrREXBXsMNDEV-UU,4293
|
|
12
13
|
databricks/sdk/oauth.py,sha256=KzcJPYLL3JL6RDvf_Q8SDAaF9xSaoYNCRD4rYInZDuo,18319
|
|
13
14
|
databricks/sdk/py.typed,sha256=pSvaHpbY1UPNEXyVFUjlgBhjPFZMmVC_UNrPC7eMOHI,74
|
|
14
15
|
databricks/sdk/retries.py,sha256=WgLh12bwdBc6fCQlaig3kKu18cVhPzFDGsspvq629Ew,2454
|
|
15
|
-
databricks/sdk/
|
|
16
|
+
databricks/sdk/useragent.py,sha256=I2-VnJSE6cg9QV4GXkoQSkHsEB3bDvRGgkawbBNl4G0,5540
|
|
17
|
+
databricks/sdk/version.py,sha256=aHhOnzInPOGVCJ67D-A5ijrJDM_PjhpzFe3OpLz4xig,23
|
|
16
18
|
databricks/sdk/_widgets/__init__.py,sha256=Qm3JB8LmdPgEn_-VgxKkodTO4gn6OdaDPwsYcDmeIRI,2667
|
|
17
19
|
databricks/sdk/_widgets/default_widgets_utils.py,sha256=Rk59AFzVYVpOektB_yC_7j-vSt5OdtZA85IlG0kw0xA,1202
|
|
18
20
|
databricks/sdk/_widgets/ipywidgets_utils.py,sha256=P-AyGeahPiX3S59mxpAMgffi4gyJ0irEOY7Ekkn9nQ0,2850
|
|
19
21
|
databricks/sdk/errors/__init__.py,sha256=3l_wHB0S9Y6mDc_c5mUHb-TndDQxa-tdPeWmTbnBNAo,176
|
|
20
22
|
databricks/sdk/errors/base.py,sha256=oawBxpuoyImsLu29ntpAgOc6RQ7kD-UcuFFER9jB3iI,3880
|
|
21
23
|
databricks/sdk/errors/mapper.py,sha256=sK4aoloV-F8h1J4YHFrcNVAUBLLQQFti-ceXVmm6HpU,1386
|
|
22
|
-
databricks/sdk/errors/overrides.py,sha256=
|
|
23
|
-
databricks/sdk/errors/platform.py,sha256=
|
|
24
|
+
databricks/sdk/errors/overrides.py,sha256=u1fZ1X2gPRv_zf1u_4EqVzbWHiFsPzm_X0sMNOCMwAE,1649
|
|
25
|
+
databricks/sdk/errors/platform.py,sha256=0EwGUTcmoobAK41KsFAnRkT6AlOY_umzr4jWEgd-6hY,3113
|
|
24
26
|
databricks/sdk/errors/private_link.py,sha256=6wVRJQqousGQC7qfT0pV8LqujqfR3XLbSix_XjqVC8s,2304
|
|
25
27
|
databricks/sdk/errors/sdk.py,sha256=_euMruhvquB0v_SKtgqxJUiyXHWuTb4Jl7ji6_h0E_A,109
|
|
26
28
|
databricks/sdk/mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
27
29
|
databricks/sdk/mixins/compute.py,sha256=khb00BzBckc4RLUF4-GnNMCSO5lXKt_XYMM3IhiUxlA,11237
|
|
28
|
-
databricks/sdk/mixins/files.py,sha256=
|
|
30
|
+
databricks/sdk/mixins/files.py,sha256=bLGFu1kVIQECTmuc_9jUf-n_Cth4COBMbmKqAYxkEkM,20542
|
|
29
31
|
databricks/sdk/mixins/workspace.py,sha256=dWMNvuEi8jJ5wMhrDt1LiqxNdWSsmEuDTzrcZR-eJzY,4896
|
|
30
32
|
databricks/sdk/runtime/__init__.py,sha256=9NnZkBzeZXZRQxcE1qKzAszQEzcpIgpL7lQzW3_kxEU,7266
|
|
31
33
|
databricks/sdk/runtime/dbutils_stub.py,sha256=UFbRZF-bBcwxjbv_pxma00bjNtktLLaYpo8oHRc4-9g,11421
|
|
32
34
|
databricks/sdk/service/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
33
35
|
databricks/sdk/service/_internal.py,sha256=nWbJfW5eJCQgAZ3TmA26xoWb6SNZ5N76ZA8bO1N4AsU,1961
|
|
34
|
-
databricks/sdk/service/
|
|
35
|
-
databricks/sdk/service/
|
|
36
|
-
databricks/sdk/service/
|
|
37
|
-
databricks/sdk/service/
|
|
36
|
+
databricks/sdk/service/apps.py,sha256=536HvScC96edq9EXMUcyVh1h2jE5zCeCMa_l7HZiu20,38170
|
|
37
|
+
databricks/sdk/service/billing.py,sha256=Ru6GumI-M4_X71HTMj2VSVBQ7tRMTrwKzhdwNyiC3fA,69733
|
|
38
|
+
databricks/sdk/service/catalog.py,sha256=pJ3LWcHaljhfXKAJoHnbJpaiSOUpGymsWLHfSYaDOdo,430741
|
|
39
|
+
databricks/sdk/service/compute.py,sha256=u8iVnqGFS7WGrLYQ0CrhwQEXs0WkjoPbHsHeVA6-h6g,433402
|
|
40
|
+
databricks/sdk/service/dashboards.py,sha256=-nrRkNLBDJU4w0OAy-EjZkI8I5ktYqX475YTAW65u9I,76081
|
|
38
41
|
databricks/sdk/service/files.py,sha256=VCt83YSI9rhQexmxaQdrUXHq2UCYfZcDMLvJx5X6n1M,38162
|
|
39
|
-
databricks/sdk/service/iam.py,sha256=
|
|
40
|
-
databricks/sdk/service/jobs.py,sha256=
|
|
41
|
-
databricks/sdk/service/marketplace.py,sha256=
|
|
42
|
+
databricks/sdk/service/iam.py,sha256=fj1RQtCdg8E8oUt1SEnm6PzMR6UB-jaCn8M354KiB-o,148500
|
|
43
|
+
databricks/sdk/service/jobs.py,sha256=c9m2GoNfMBCGSfr82MZd6zf2aDIza6Ip0jaaW8E_3hQ,318698
|
|
44
|
+
databricks/sdk/service/marketplace.py,sha256=Fgk_8V9zbQ8QcNPUw-yZehHv8LgnDtFJUe-YixjxkYo,136405
|
|
42
45
|
databricks/sdk/service/ml.py,sha256=vohBdESClI3EOpO-ZZ44W-CMz1alq5Tw4oJnWa99Z2M,236128
|
|
43
|
-
databricks/sdk/service/oauth2.py,sha256=
|
|
44
|
-
databricks/sdk/service/pipelines.py,sha256=
|
|
46
|
+
databricks/sdk/service/oauth2.py,sha256=67pr6gUnYwO6BaGNQfjW1qvcEB3ejdNbI9Pmvqs5bSE,39928
|
|
47
|
+
databricks/sdk/service/pipelines.py,sha256=tGCo1F3tW1GxB9Q63qsh2AyisJmXqYSsGkJK0OdS06Q,119378
|
|
45
48
|
databricks/sdk/service/provisioning.py,sha256=DP4Df4X-p0JEUk4zAJQhjX_wxpMi673OKLXFhxl6YSE,142678
|
|
46
|
-
databricks/sdk/service/serving.py,sha256=
|
|
47
|
-
databricks/sdk/service/settings.py,sha256=
|
|
48
|
-
databricks/sdk/service/sharing.py,sha256=
|
|
49
|
-
databricks/sdk/service/sql.py,sha256=
|
|
49
|
+
databricks/sdk/service/serving.py,sha256=BfShf0ceupXgLccU5zp1CZyBW1260Ga73USM2T5KxXs,140008
|
|
50
|
+
databricks/sdk/service/settings.py,sha256=7PXxsrXUe7exM35O7_iUp9r78zn5oGnPbhX_sh3v1_0,193732
|
|
51
|
+
databricks/sdk/service/sharing.py,sha256=kalJYd0v1SwuGhlCaq4l2ZhzNlev9OwNbCXFIOKIMXU,113253
|
|
52
|
+
databricks/sdk/service/sql.py,sha256=RaXIYMDtHbhvB7gtSMyvQsqiO_E0cMz5NXeTsrqtPVk,334558
|
|
50
53
|
databricks/sdk/service/vectorsearch.py,sha256=ZfiTEpTNg8nnzPuw24MeiDn8eq6PHmEWqTHS0zdDdEo,62484
|
|
51
54
|
databricks/sdk/service/workspace.py,sha256=FKLf5esRmfFstIXo7HQg6HQCzQ2svrb6ulr8yzZ7-8U,101182
|
|
52
|
-
databricks_sdk-0.
|
|
53
|
-
databricks_sdk-0.
|
|
54
|
-
databricks_sdk-0.
|
|
55
|
-
databricks_sdk-0.
|
|
56
|
-
databricks_sdk-0.
|
|
57
|
-
databricks_sdk-0.
|
|
55
|
+
databricks_sdk-0.31.0.dist-info/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
|
|
56
|
+
databricks_sdk-0.31.0.dist-info/METADATA,sha256=MM4NlZk11KyaPzRoCsIQwAQ6cwOviQ8e7nXSd92TjTU,37967
|
|
57
|
+
databricks_sdk-0.31.0.dist-info/NOTICE,sha256=Qnc0m8JjZNTDV80y0h1aJGvsr4GqM63m1nr2VTypg6E,963
|
|
58
|
+
databricks_sdk-0.31.0.dist-info/WHEEL,sha256=Mdi9PDNwEZptOjTlUcAth7XJDFtKrHYaQMPulZeBCiQ,91
|
|
59
|
+
databricks_sdk-0.31.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
|
|
60
|
+
databricks_sdk-0.31.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|