databricks-sdk 0.29.0__py3-none-any.whl → 0.30.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -0,0 +1,144 @@
1
+ import copy
2
+ import logging
3
+ import os
4
+ import platform
5
+ import re
6
+ from typing import List, Optional, Tuple
7
+
8
+ from .version import __version__
9
+
10
+ # Constants
11
+ RUNTIME_KEY = 'runtime'
12
+ CICD_KEY = 'cicd'
13
+ AUTH_KEY = 'auth'
14
+
15
+ _product_name = "unknown"
16
+ _product_version = "0.0.0"
17
+
18
+ logger = logging.getLogger("databricks.sdk.useragent")
19
+
20
+ _extra = []
21
+
22
+ # Precompiled regex patterns
23
+ alphanum_pattern = re.compile(r'^[a-zA-Z0-9_.+-]+$')
24
+ semver_pattern = re.compile(r'^v?(\d+\.)?(\d+\.)?(\*|\d+)$')
25
+
26
+
27
+ def _match_alphanum(value):
28
+ if not alphanum_pattern.match(value):
29
+ raise ValueError(f"Invalid alphanumeric value: {value}")
30
+
31
+
32
+ def _match_semver(value):
33
+ if not semver_pattern.match(value):
34
+ raise ValueError(f"Invalid semantic version: {value}")
35
+
36
+
37
+ def _match_alphanum_or_semver(value):
38
+ if not alphanum_pattern.match(value) and not semver_pattern.match(value):
39
+ raise ValueError(f"Invalid value: {value}")
40
+
41
+
42
+ def product() -> Tuple[str, str]:
43
+ """Return the global product name and version that will be submitted to Databricks on every request."""
44
+ return _product_name, _product_version
45
+
46
+
47
+ def with_product(name: str, version: str):
48
+ """Change the product name and version that will be submitted to Databricks on every request."""
49
+ global _product_name, _product_version
50
+ _match_alphanum(name)
51
+ _match_semver(version)
52
+ logger.debug(f'Changing product from {_product_name}/{_product_version} to {name}/{version}')
53
+ _product_name = name
54
+ _product_version = version
55
+
56
+
57
+ def _reset_product():
58
+ """[Internal API] Reset product name and version to the default values.
59
+
60
+ Used for testing purposes only."""
61
+ global _product_name, _product_version
62
+ _product_name = "unknown"
63
+ _product_version = "0.0.0"
64
+
65
+
66
+ def with_extra(key: str, value: str):
67
+ """Add extra metadata to all requests submitted to Databricks.
68
+
69
+ User-specified extra metadata can be inserted into request headers to provide additional context to Databricks
70
+ about usage of different tools in the Databricks ecosystem. This can be useful for collecting telemetry about SDK
71
+ usage from tools that are built on top of the SDK.
72
+ """
73
+ global _extra
74
+ _match_alphanum(key)
75
+ _match_alphanum_or_semver(value)
76
+ logger.debug(f'Adding {key}/{value} to User-Agent')
77
+ _extra.append((key, value))
78
+
79
+
80
+ def extra() -> List[Tuple[str, str]]:
81
+ """Returns the current extra metadata that will be submitted to Databricks on every request."""
82
+ return copy.deepcopy(_extra)
83
+
84
+
85
+ def _reset_extra(extra: List[Tuple[str, str]]):
86
+ """[INTERNAL API] Reset the extra metadata to a new list.
87
+
88
+ Prefer using with_user_agent_extra instead of this method to avoid overwriting other information included in the
89
+ user agent."""
90
+ global _extra
91
+ _extra = extra
92
+
93
+
94
+ def with_partner(partner: str):
95
+ """Adds the given partner to the metadata submitted to Databricks on every request."""
96
+ with_extra("partner", partner)
97
+
98
+
99
+ def _get_upstream_user_agent_info() -> List[Tuple[str, str]]:
100
+ """[INTERNAL API] Return the upstream product and version if specified in the system environment."""
101
+ product = os.getenv("DATABRICKS_SDK_UPSTREAM")
102
+ version = os.getenv("DATABRICKS_SDK_UPSTREAM_VERSION")
103
+ if not product or not version:
104
+ return []
105
+ return [("upstream", product), ("upstream-version", version)]
106
+
107
+
108
+ def _get_runtime_info() -> List[Tuple[str, str]]:
109
+ """[INTERNAL API] Return the runtime version if running on Databricks."""
110
+ if 'DATABRICKS_RUNTIME_VERSION' in os.environ:
111
+ runtime_version = os.environ['DATABRICKS_RUNTIME_VERSION']
112
+ if runtime_version != '':
113
+ runtime_version = _sanitize_header_value(runtime_version)
114
+ return [('runtime', runtime_version)]
115
+ return []
116
+
117
+
118
+ def _sanitize_header_value(value: str) -> str:
119
+ value = value.replace(' ', '-')
120
+ value = value.replace('/', '-')
121
+ return value
122
+
123
+
124
+ def to_string(alternate_product_info: Optional[Tuple[str, str]] = None,
125
+ other_info: Optional[List[Tuple[str, str]]] = None) -> str:
126
+ """Compute the full User-Agent header.
127
+
128
+ The User-Agent header contains the product name, version, and other metadata that is submitted to Databricks on
129
+ every request. There are some static components that are included by default in every request, like the SDK version,
130
+ OS name, and Python version. Other components can be optionally overridden or augmented in DatabricksConfig, like
131
+ the product name, product version, and extra user-defined information."""
132
+ base = []
133
+ if alternate_product_info:
134
+ base.append(alternate_product_info)
135
+ else:
136
+ base.append((_product_name, _product_version))
137
+ base.extend([("databricks-sdk-py", __version__), ("python", platform.python_version()),
138
+ ("os", platform.uname().system.lower()), ])
139
+ if other_info:
140
+ base.extend(other_info)
141
+ base.extend(_extra)
142
+ base.extend(_get_upstream_user_agent_info())
143
+ base.extend(_get_runtime_info())
144
+ return " ".join(f"{k}/{v}" for k, v in base)
databricks/sdk/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = '0.29.0'
1
+ __version__ = '0.30.0'
@@ -1,13 +1,11 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: databricks-sdk
3
- Version: 0.29.0
3
+ Version: 0.30.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Home-page: https://databricks-sdk-py.readthedocs.io
6
6
  Author: Serge Smertin
7
7
  Author-email: serge.smertin@databricks.com
8
- License: UNKNOWN
9
8
  Keywords: databricks sdk
10
- Platform: UNKNOWN
11
9
  Classifier: Development Status :: 4 - Beta
12
10
  Classifier: Intended Audience :: Developers
13
11
  Classifier: Intended Audience :: Science/Research
@@ -24,24 +22,24 @@ Requires-Python: >=3.7
24
22
  Description-Content-Type: text/markdown
25
23
  License-File: LICENSE
26
24
  License-File: NOTICE
27
- Requires-Dist: google-auth (~=2.0)
28
25
  Requires-Dist: requests (<3,>=2.28.1)
26
+ Requires-Dist: google-auth (~=2.0)
29
27
  Provides-Extra: dev
28
+ Requires-Dist: pytest ; extra == 'dev'
29
+ Requires-Dist: pytest-cov ; extra == 'dev'
30
+ Requires-Dist: pytest-xdist ; extra == 'dev'
31
+ Requires-Dist: pytest-mock ; extra == 'dev'
32
+ Requires-Dist: yapf ; extra == 'dev'
33
+ Requires-Dist: pycodestyle ; extra == 'dev'
30
34
  Requires-Dist: autoflake ; extra == 'dev'
31
- Requires-Dist: databricks-connect ; extra == 'dev'
35
+ Requires-Dist: isort ; extra == 'dev'
36
+ Requires-Dist: wheel ; extra == 'dev'
32
37
  Requires-Dist: ipython ; extra == 'dev'
33
38
  Requires-Dist: ipywidgets ; extra == 'dev'
34
- Requires-Dist: isort ; extra == 'dev'
35
- Requires-Dist: pycodestyle ; extra == 'dev'
39
+ Requires-Dist: requests-mock ; extra == 'dev'
36
40
  Requires-Dist: pyfakefs ; extra == 'dev'
37
- Requires-Dist: pytest ; extra == 'dev'
38
- Requires-Dist: pytest-cov ; extra == 'dev'
39
- Requires-Dist: pytest-mock ; extra == 'dev'
41
+ Requires-Dist: databricks-connect ; extra == 'dev'
40
42
  Requires-Dist: pytest-rerunfailures ; extra == 'dev'
41
- Requires-Dist: pytest-xdist ; extra == 'dev'
42
- Requires-Dist: requests-mock ; extra == 'dev'
43
- Requires-Dist: wheel ; extra == 'dev'
44
- Requires-Dist: yapf ; extra == 'dev'
45
43
  Provides-Extra: notebook
46
44
  Requires-Dist: ipython (<9,>=8) ; extra == 'notebook'
47
45
  Requires-Dist: ipywidgets (<9,>=8) ; extra == 'notebook'
@@ -78,6 +76,7 @@ The SDK's internal HTTP client is robust and handles failures on different level
78
76
  - [Long-running operations](#long-running-operations)
79
77
  - [Paginated responses](#paginated-responses)
80
78
  - [Single-sign-on with OAuth](#single-sign-on-sso-with-oauth)
79
+ - [User Agent Request Attribution](#user-agent-request-attribution)
81
80
  - [Error handling](#error-handling)
82
81
  - [Logging](#logging)
83
82
  - [Integration with `dbutils`](#interaction-with-dbutils)
@@ -556,6 +555,29 @@ logging.info(f'Created new custom app: '
556
555
  f'--client_secret {custom_app.client_secret}')
557
556
  ```
558
557
 
558
+ ## User Agent Request Attribution<a id="user-agent-request-attribution"></a>
559
+
560
+ The Databricks SDK for Python uses the `User-Agent` header to include request metadata along with each request. By default, this includes the version of the Python SDK, the version of the Python language used by your application, and the underlying operating system. To statically add additional metadata, you can use the `with_partner()` and `with_product()` functions in the `databricks.sdk.useragent` module. `with_partner()` can be used by partners to indicate that code using the Databricks SDK for Go should be attributed to a specific partner. Multiple partners can be registered at once. Partner names can contain any number, digit, `.`, `-`, `_` or `+`.
561
+
562
+ ```python
563
+ from databricks.sdk import useragent
564
+ useragent.with_product("partner-abc")
565
+ useragent.with_partner("partner-xyz")
566
+ ```
567
+
568
+ `with_product()` can be used to define the name and version of the product that is built with the Databricks SDK for Python. The product name has the same restrictions as the partner name above, and the product version must be a valid [SemVer](https://semver.org/). Subsequent calls to `with_product()` replace the original product with the new user-specified one.
569
+
570
+ ```go
571
+ from databricks.sdk import useragent
572
+ useragent.with_product("databricks-example-product", "1.2.0")
573
+ ```
574
+
575
+ If both the `DATABRICKS_SDK_UPSTREAM` and `DATABRICKS_SDK_UPSTREAM_VERSION` environment variables are defined, these will also be included in the `User-Agent` header.
576
+
577
+ If additional metadata needs to be specified that isn't already supported by the above interfaces, you can use the `with_user_agent_extra()` function to register arbitrary key-value pairs to include in the user agent. Multiple values associated with the same key are allowed. Keys have the same restrictions as the partner name above. Values must be either as described above or SemVer strings.
578
+
579
+ Additional `User-Agent` information can be associated with different instances of `DatabricksConfig`. To add metadata to a specific instance of `DatabricksConfig`, use the `with_user_agent_extra()` method.
580
+
559
581
  ## Error handling<a id="error-handling"></a>
560
582
 
561
583
  The Databricks SDK for Python provides a robust error-handling mechanism that allows developers to catch and handle API errors. When an error occurs, the SDK will raise an exception that contains information about the error, such as the HTTP status code, error message, and error details. Developers can catch these exceptions and handle them appropriately in their code.
@@ -655,5 +677,3 @@ API clients for all services are generated from specification files that are syn
655
677
  You are highly encouraged to pin the exact dependency version and read the [changelog](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
656
678
  where Databricks documents the changes. Databricks may have minor [documented](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
657
679
  backward-incompatible changes, such as renaming some type names to bring more consistency.
658
-
659
-
@@ -1,18 +1,20 @@
1
1
  databricks/__init__.py,sha256=CF2MJcZFwbpn9TwQER8qnCDhkPooBGQNVkX4v7g6p3g,537
2
- databricks/sdk/__init__.py,sha256=BvgOwu_sZ7kMVO0ukpgMLnAxKHkGnY5_4QeBr-AdFt8,44140
2
+ databricks/sdk/__init__.py,sha256=I1YtsgPGK82d8EknfaKJLmp950-rg2kdDpl-forqUfA,46636
3
3
  databricks/sdk/_property.py,sha256=sGjsipeFrjMBSVPjtIb0HNCRcMIhFpVx6wq4BkC3LWs,1636
4
4
  databricks/sdk/azure.py,sha256=8P7nEdun0hbQCap9Ojo7yZse_JHxnhYsE6ApojnPz7Q,1009
5
5
  databricks/sdk/casing.py,sha256=NKYPrfPbQjM7lU4hhNQK3z1jb_VEA29BfH4FEdby2tg,1137
6
6
  databricks/sdk/clock.py,sha256=Ivlow0r_TkXcTJ8UXkxSA0czKrY0GvwHAeOvjPkJnAQ,1360
7
- databricks/sdk/config.py,sha256=sBIWdZHaL3WfJz9gVrlxSRiQpe_tPBMpWA0aaO4bHD4,21745
8
- databricks/sdk/core.py,sha256=R9PeiAhPMapuC9S5Mw6UmpI2MDp4E32-NzhWm4oFAck,20018
9
- databricks/sdk/credentials_provider.py,sha256=RVyP0IQbiUbXeUTv2IPmweRzvrgyEhmc6VB0zuetR2I,29204
7
+ databricks/sdk/config.py,sha256=FWEiIY34C_4Mmv8B9w284BR2FHug2T2ySpmtyg51ttA,21139
8
+ databricks/sdk/core.py,sha256=PWU2kTHXOF6x7i9_yRUFGj-iusr_Mo7awROiBpA9nJQ,20398
9
+ databricks/sdk/credentials_provider.py,sha256=V8QxVUvZmOfVjEpogNEzu5nUBXzRLevWfi-NiPaDOks,29232
10
+ databricks/sdk/data_plane.py,sha256=Er2z2fT-KVupJKzGozGGZ-jCQ3AmDWq-DZppahIK6tU,2591
10
11
  databricks/sdk/dbutils.py,sha256=HFCuB-el6SFKhF8qRfJxYANtyLTm-VG9GtQuQgZXFkM,15741
11
12
  databricks/sdk/environments.py,sha256=5KoVuVfF-ZX17rua1sH3EJCCtniVrREXBXsMNDEV-UU,4293
12
13
  databricks/sdk/oauth.py,sha256=KzcJPYLL3JL6RDvf_Q8SDAaF9xSaoYNCRD4rYInZDuo,18319
13
14
  databricks/sdk/py.typed,sha256=pSvaHpbY1UPNEXyVFUjlgBhjPFZMmVC_UNrPC7eMOHI,74
14
15
  databricks/sdk/retries.py,sha256=WgLh12bwdBc6fCQlaig3kKu18cVhPzFDGsspvq629Ew,2454
15
- databricks/sdk/version.py,sha256=mXICichpk9VjfOW_vwM-zwKsrhkYyfYKUsV4l3tgiYM,23
16
+ databricks/sdk/useragent.py,sha256=8yXu6l2mGZR6874u9Jtn2qNSDuNDEp4jzh0CbJ9yssw,5028
17
+ databricks/sdk/version.py,sha256=dSJBJnSMHNhD9IFG9yG2hFHceWiae5G4B2UFREtze5M,23
16
18
  databricks/sdk/_widgets/__init__.py,sha256=Qm3JB8LmdPgEn_-VgxKkodTO4gn6OdaDPwsYcDmeIRI,2667
17
19
  databricks/sdk/_widgets/default_widgets_utils.py,sha256=Rk59AFzVYVpOektB_yC_7j-vSt5OdtZA85IlG0kw0xA,1202
18
20
  databricks/sdk/_widgets/ipywidgets_utils.py,sha256=P-AyGeahPiX3S59mxpAMgffi4gyJ0irEOY7Ekkn9nQ0,2850
@@ -25,33 +27,34 @@ databricks/sdk/errors/private_link.py,sha256=6wVRJQqousGQC7qfT0pV8LqujqfR3XLbSix
25
27
  databricks/sdk/errors/sdk.py,sha256=_euMruhvquB0v_SKtgqxJUiyXHWuTb4Jl7ji6_h0E_A,109
26
28
  databricks/sdk/mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
29
  databricks/sdk/mixins/compute.py,sha256=khb00BzBckc4RLUF4-GnNMCSO5lXKt_XYMM3IhiUxlA,11237
28
- databricks/sdk/mixins/files.py,sha256=8Nh4TAB0BASZfTylW1P93_Sj_-zBlzEZ6CN38x2UoTQ,20205
30
+ databricks/sdk/mixins/files.py,sha256=bLGFu1kVIQECTmuc_9jUf-n_Cth4COBMbmKqAYxkEkM,20542
29
31
  databricks/sdk/mixins/workspace.py,sha256=dWMNvuEi8jJ5wMhrDt1LiqxNdWSsmEuDTzrcZR-eJzY,4896
30
32
  databricks/sdk/runtime/__init__.py,sha256=9NnZkBzeZXZRQxcE1qKzAszQEzcpIgpL7lQzW3_kxEU,7266
31
33
  databricks/sdk/runtime/dbutils_stub.py,sha256=UFbRZF-bBcwxjbv_pxma00bjNtktLLaYpo8oHRc4-9g,11421
32
34
  databricks/sdk/service/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
35
  databricks/sdk/service/_internal.py,sha256=nWbJfW5eJCQgAZ3TmA26xoWb6SNZ5N76ZA8bO1N4AsU,1961
34
- databricks/sdk/service/billing.py,sha256=Hbe5bMsBrpebuAl8yj-GwVRGktrzKwiZJj3gq1wUMaI,50625
35
- databricks/sdk/service/catalog.py,sha256=m69PNTCN8AmvYeU5NFKQHIf6JsPLkNm4Q9Vj_bm2mlE,416701
36
- databricks/sdk/service/compute.py,sha256=Wn0M4irami1Qy6wUoyU8Ub_CbSwsSVUF_bdHqUpEh0M,398609
37
- databricks/sdk/service/dashboards.py,sha256=Ac7K8ekefbfXyZ2ITq7h7uN1MRjCHC6mQoVPcCFgPX8,48962
36
+ databricks/sdk/service/apps.py,sha256=536HvScC96edq9EXMUcyVh1h2jE5zCeCMa_l7HZiu20,38170
37
+ databricks/sdk/service/billing.py,sha256=Ru6GumI-M4_X71HTMj2VSVBQ7tRMTrwKzhdwNyiC3fA,69733
38
+ databricks/sdk/service/catalog.py,sha256=DT-T8vSdHN3GL_dBUDp2fFS6ypNIgqVxtlKijw-fpUc,421856
39
+ databricks/sdk/service/compute.py,sha256=2CkQPllHlpVZGWXCWIe9bcVcQyDsldfE4kmiXFoF7Jc,420685
40
+ databricks/sdk/service/dashboards.py,sha256=SqaySudMTniP3AIMW5Lx2v6k0yxwVtIEZQzEJNPW3-Q,75352
38
41
  databricks/sdk/service/files.py,sha256=VCt83YSI9rhQexmxaQdrUXHq2UCYfZcDMLvJx5X6n1M,38162
39
- databricks/sdk/service/iam.py,sha256=11L45bjOYwzxMVlAXpKrFMOxrZzgZy75JSIOkeAXuFg,147645
40
- databricks/sdk/service/jobs.py,sha256=v9BoJ0k0BRr0dAjmwX_BQ0Ao8n-gU5t7rhrF1qc0KE4,303103
41
- databricks/sdk/service/marketplace.py,sha256=0xNm1Ob7G6Le5ieG7meJpBbmkt18gko1bhmORG9DwoM,141176
42
+ databricks/sdk/service/iam.py,sha256=fj1RQtCdg8E8oUt1SEnm6PzMR6UB-jaCn8M354KiB-o,148500
43
+ databricks/sdk/service/jobs.py,sha256=Jc2PF58kvpDuHOTVfwQQAJsdR_RJ9Q7qDDtr4Hpqggo,304259
44
+ databricks/sdk/service/marketplace.py,sha256=Fgk_8V9zbQ8QcNPUw-yZehHv8LgnDtFJUe-YixjxkYo,136405
42
45
  databricks/sdk/service/ml.py,sha256=vohBdESClI3EOpO-ZZ44W-CMz1alq5Tw4oJnWa99Z2M,236128
43
- databricks/sdk/service/oauth2.py,sha256=yBY6S_rI2ottFjttYDDijjyoAWFndwfqFC50sdimcSY,37100
44
- databricks/sdk/service/pipelines.py,sha256=Zp-ogtJl2VN5ssi65ii2wI2nbOYPE8Qdhgp36aUitfo,118753
46
+ databricks/sdk/service/oauth2.py,sha256=67pr6gUnYwO6BaGNQfjW1qvcEB3ejdNbI9Pmvqs5bSE,39928
47
+ databricks/sdk/service/pipelines.py,sha256=tGCo1F3tW1GxB9Q63qsh2AyisJmXqYSsGkJK0OdS06Q,119378
45
48
  databricks/sdk/service/provisioning.py,sha256=DP4Df4X-p0JEUk4zAJQhjX_wxpMi673OKLXFhxl6YSE,142678
46
- databricks/sdk/service/serving.py,sha256=Hg5v87ap49j4H5B6_RKe8auGKDamWskp1PQTBDCCwQ0,150238
47
- databricks/sdk/service/settings.py,sha256=7c_gSsOZtTpmSGE-OVKMnkpbc5wqoN447zX-vyDhrJk,177409
48
- databricks/sdk/service/sharing.py,sha256=dtMJdIhGLh8a70Aq55dqGsUG12ykiz_7zgEvg2Iw42A,100135
49
- databricks/sdk/service/sql.py,sha256=v8oP0sA_mix3ftSujtaVG4tfXGnSHeBJbA7c6iWW-1c,262958
49
+ databricks/sdk/service/serving.py,sha256=BfShf0ceupXgLccU5zp1CZyBW1260Ga73USM2T5KxXs,140008
50
+ databricks/sdk/service/settings.py,sha256=7PXxsrXUe7exM35O7_iUp9r78zn5oGnPbhX_sh3v1_0,193732
51
+ databricks/sdk/service/sharing.py,sha256=kalJYd0v1SwuGhlCaq4l2ZhzNlev9OwNbCXFIOKIMXU,113253
52
+ databricks/sdk/service/sql.py,sha256=b8FoGZnwHqxDb2FQr_XOeegFVqDEfBXZH318NpRVtdU,337519
50
53
  databricks/sdk/service/vectorsearch.py,sha256=ZfiTEpTNg8nnzPuw24MeiDn8eq6PHmEWqTHS0zdDdEo,62484
51
54
  databricks/sdk/service/workspace.py,sha256=FKLf5esRmfFstIXo7HQg6HQCzQ2svrb6ulr8yzZ7-8U,101182
52
- databricks_sdk-0.29.0.dist-info/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
53
- databricks_sdk-0.29.0.dist-info/METADATA,sha256=4w5fp5admIsLz4trTedAhLYAvDfpqmm_e75ZIs3qsMc,35819
54
- databricks_sdk-0.29.0.dist-info/NOTICE,sha256=Qnc0m8JjZNTDV80y0h1aJGvsr4GqM63m1nr2VTypg6E,963
55
- databricks_sdk-0.29.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
56
- databricks_sdk-0.29.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
57
- databricks_sdk-0.29.0.dist-info/RECORD,,
55
+ databricks_sdk-0.30.0.dist-info/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
56
+ databricks_sdk-0.30.0.dist-info/METADATA,sha256=00btvZMl5DDI3ovz5PebGVKtknsKQDnL_Fi1rKIZG-I,37967
57
+ databricks_sdk-0.30.0.dist-info/NOTICE,sha256=Qnc0m8JjZNTDV80y0h1aJGvsr4GqM63m1nr2VTypg6E,963
58
+ databricks_sdk-0.30.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
59
+ databricks_sdk-0.30.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
60
+ databricks_sdk-0.30.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.37.1)
2
+ Generator: setuptools (72.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5