databricks-sdk 0.28.0__py3-none-any.whl → 0.30.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (31) hide show
  1. databricks/sdk/__init__.py +74 -22
  2. databricks/sdk/config.py +89 -48
  3. databricks/sdk/core.py +38 -9
  4. databricks/sdk/credentials_provider.py +134 -57
  5. databricks/sdk/data_plane.py +65 -0
  6. databricks/sdk/dbutils.py +81 -3
  7. databricks/sdk/mixins/files.py +12 -4
  8. databricks/sdk/oauth.py +8 -6
  9. databricks/sdk/service/apps.py +977 -0
  10. databricks/sdk/service/billing.py +602 -218
  11. databricks/sdk/service/catalog.py +263 -62
  12. databricks/sdk/service/compute.py +515 -94
  13. databricks/sdk/service/dashboards.py +1310 -2
  14. databricks/sdk/service/iam.py +99 -88
  15. databricks/sdk/service/jobs.py +159 -166
  16. databricks/sdk/service/marketplace.py +74 -58
  17. databricks/sdk/service/oauth2.py +149 -70
  18. databricks/sdk/service/pipelines.py +73 -53
  19. databricks/sdk/service/serving.py +332 -694
  20. databricks/sdk/service/settings.py +424 -4
  21. databricks/sdk/service/sharing.py +235 -26
  22. databricks/sdk/service/sql.py +2484 -553
  23. databricks/sdk/service/vectorsearch.py +75 -0
  24. databricks/sdk/useragent.py +144 -0
  25. databricks/sdk/version.py +1 -1
  26. {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.30.0.dist-info}/METADATA +37 -16
  27. {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.30.0.dist-info}/RECORD +31 -28
  28. {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.30.0.dist-info}/WHEEL +1 -1
  29. {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.30.0.dist-info}/LICENSE +0 -0
  30. {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.30.0.dist-info}/NOTICE +0 -0
  31. {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.30.0.dist-info}/top_level.txt +0 -0
@@ -644,6 +644,35 @@ class PipelineType(Enum):
644
644
  TRIGGERED = 'TRIGGERED'
645
645
 
646
646
 
647
+ @dataclass
648
+ class QueryVectorIndexNextPageRequest:
649
+ """Request payload for getting next page of results."""
650
+
651
+ endpoint_name: Optional[str] = None
652
+ """Name of the endpoint."""
653
+
654
+ index_name: Optional[str] = None
655
+ """Name of the vector index to query."""
656
+
657
+ page_token: Optional[str] = None
658
+ """Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API."""
659
+
660
+ def as_dict(self) -> dict:
661
+ """Serializes the QueryVectorIndexNextPageRequest into a dictionary suitable for use as a JSON request body."""
662
+ body = {}
663
+ if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
664
+ if self.index_name is not None: body['index_name'] = self.index_name
665
+ if self.page_token is not None: body['page_token'] = self.page_token
666
+ return body
667
+
668
+ @classmethod
669
+ def from_dict(cls, d: Dict[str, any]) -> QueryVectorIndexNextPageRequest:
670
+ """Deserializes the QueryVectorIndexNextPageRequest from a dictionary."""
671
+ return cls(endpoint_name=d.get('endpoint_name', None),
672
+ index_name=d.get('index_name', None),
673
+ page_token=d.get('page_token', None))
674
+
675
+
647
676
  @dataclass
648
677
  class QueryVectorIndexRequest:
649
678
  columns: List[str]
@@ -665,6 +694,9 @@ class QueryVectorIndexRequest:
665
694
  query_text: Optional[str] = None
666
695
  """Query text. Required for Delta Sync Index using model endpoint."""
667
696
 
697
+ query_type: Optional[str] = None
698
+ """The query type to use. Choices are `ANN` and `HYBRID`. Defaults to `ANN`."""
699
+
668
700
  query_vector: Optional[List[float]] = None
669
701
  """Query vector. Required for Direct Vector Access Index and Delta Sync Index using self-managed
670
702
  vectors."""
@@ -680,6 +712,7 @@ class QueryVectorIndexRequest:
680
712
  if self.index_name is not None: body['index_name'] = self.index_name
681
713
  if self.num_results is not None: body['num_results'] = self.num_results
682
714
  if self.query_text is not None: body['query_text'] = self.query_text
715
+ if self.query_type is not None: body['query_type'] = self.query_type
683
716
  if self.query_vector: body['query_vector'] = [v for v in self.query_vector]
684
717
  if self.score_threshold is not None: body['score_threshold'] = self.score_threshold
685
718
  return body
@@ -692,6 +725,7 @@ class QueryVectorIndexRequest:
692
725
  index_name=d.get('index_name', None),
693
726
  num_results=d.get('num_results', None),
694
727
  query_text=d.get('query_text', None),
728
+ query_type=d.get('query_type', None),
695
729
  query_vector=d.get('query_vector', None),
696
730
  score_threshold=d.get('score_threshold', None))
697
731
 
@@ -701,6 +735,11 @@ class QueryVectorIndexResponse:
701
735
  manifest: Optional[ResultManifest] = None
702
736
  """Metadata about the result set."""
703
737
 
738
+ next_page_token: Optional[str] = None
739
+ """[Optional] Token that can be used in `QueryVectorIndexNextPage` API to get next page of results.
740
+ If more than 1000 results satisfy the query, they are returned in groups of 1000. Empty value
741
+ means no more results."""
742
+
704
743
  result: Optional[ResultData] = None
705
744
  """Data returned in the query result."""
706
745
 
@@ -708,6 +747,7 @@ class QueryVectorIndexResponse:
708
747
  """Serializes the QueryVectorIndexResponse into a dictionary suitable for use as a JSON request body."""
709
748
  body = {}
710
749
  if self.manifest: body['manifest'] = self.manifest.as_dict()
750
+ if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
711
751
  if self.result: body['result'] = self.result.as_dict()
712
752
  return body
713
753
 
@@ -715,6 +755,7 @@ class QueryVectorIndexResponse:
715
755
  def from_dict(cls, d: Dict[str, any]) -> QueryVectorIndexResponse:
716
756
  """Deserializes the QueryVectorIndexResponse from a dictionary."""
717
757
  return cls(manifest=_from_dict(d, 'manifest', ResultManifest),
758
+ next_page_token=d.get('next_page_token', None),
718
759
  result=_from_dict(d, 'result', ResultData))
719
760
 
720
761
 
@@ -1330,6 +1371,7 @@ class VectorSearchIndexesAPI:
1330
1371
  filters_json: Optional[str] = None,
1331
1372
  num_results: Optional[int] = None,
1332
1373
  query_text: Optional[str] = None,
1374
+ query_type: Optional[str] = None,
1333
1375
  query_vector: Optional[List[float]] = None,
1334
1376
  score_threshold: Optional[float] = None) -> QueryVectorIndexResponse:
1335
1377
  """Query an index.
@@ -1350,6 +1392,8 @@ class VectorSearchIndexesAPI:
1350
1392
  Number of results to return. Defaults to 10.
1351
1393
  :param query_text: str (optional)
1352
1394
  Query text. Required for Delta Sync Index using model endpoint.
1395
+ :param query_type: str (optional)
1396
+ The query type to use. Choices are `ANN` and `HYBRID`. Defaults to `ANN`.
1353
1397
  :param query_vector: List[float] (optional)
1354
1398
  Query vector. Required for Direct Vector Access Index and Delta Sync Index using self-managed
1355
1399
  vectors.
@@ -1363,6 +1407,7 @@ class VectorSearchIndexesAPI:
1363
1407
  if filters_json is not None: body['filters_json'] = filters_json
1364
1408
  if num_results is not None: body['num_results'] = num_results
1365
1409
  if query_text is not None: body['query_text'] = query_text
1410
+ if query_type is not None: body['query_type'] = query_type
1366
1411
  if query_vector is not None: body['query_vector'] = [v for v in query_vector]
1367
1412
  if score_threshold is not None: body['score_threshold'] = score_threshold
1368
1413
  headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
@@ -1373,6 +1418,36 @@ class VectorSearchIndexesAPI:
1373
1418
  headers=headers)
1374
1419
  return QueryVectorIndexResponse.from_dict(res)
1375
1420
 
1421
+ def query_next_page(self,
1422
+ index_name: str,
1423
+ *,
1424
+ endpoint_name: Optional[str] = None,
1425
+ page_token: Optional[str] = None) -> QueryVectorIndexResponse:
1426
+ """Query next page.
1427
+
1428
+ Use `next_page_token` returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` request
1429
+ to fetch next page of results.
1430
+
1431
+ :param index_name: str
1432
+ Name of the vector index to query.
1433
+ :param endpoint_name: str (optional)
1434
+ Name of the endpoint.
1435
+ :param page_token: str (optional)
1436
+ Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API.
1437
+
1438
+ :returns: :class:`QueryVectorIndexResponse`
1439
+ """
1440
+ body = {}
1441
+ if endpoint_name is not None: body['endpoint_name'] = endpoint_name
1442
+ if page_token is not None: body['page_token'] = page_token
1443
+ headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
1444
+
1445
+ res = self._api.do('POST',
1446
+ f'/api/2.0/vector-search/indexes/{index_name}/query-next-page',
1447
+ body=body,
1448
+ headers=headers)
1449
+ return QueryVectorIndexResponse.from_dict(res)
1450
+
1376
1451
  def scan_index(self,
1377
1452
  index_name: str,
1378
1453
  *,
@@ -0,0 +1,144 @@
1
+ import copy
2
+ import logging
3
+ import os
4
+ import platform
5
+ import re
6
+ from typing import List, Optional, Tuple
7
+
8
+ from .version import __version__
9
+
10
+ # Constants
11
+ RUNTIME_KEY = 'runtime'
12
+ CICD_KEY = 'cicd'
13
+ AUTH_KEY = 'auth'
14
+
15
+ _product_name = "unknown"
16
+ _product_version = "0.0.0"
17
+
18
+ logger = logging.getLogger("databricks.sdk.useragent")
19
+
20
+ _extra = []
21
+
22
+ # Precompiled regex patterns
23
+ alphanum_pattern = re.compile(r'^[a-zA-Z0-9_.+-]+$')
24
+ semver_pattern = re.compile(r'^v?(\d+\.)?(\d+\.)?(\*|\d+)$')
25
+
26
+
27
+ def _match_alphanum(value):
28
+ if not alphanum_pattern.match(value):
29
+ raise ValueError(f"Invalid alphanumeric value: {value}")
30
+
31
+
32
+ def _match_semver(value):
33
+ if not semver_pattern.match(value):
34
+ raise ValueError(f"Invalid semantic version: {value}")
35
+
36
+
37
+ def _match_alphanum_or_semver(value):
38
+ if not alphanum_pattern.match(value) and not semver_pattern.match(value):
39
+ raise ValueError(f"Invalid value: {value}")
40
+
41
+
42
+ def product() -> Tuple[str, str]:
43
+ """Return the global product name and version that will be submitted to Databricks on every request."""
44
+ return _product_name, _product_version
45
+
46
+
47
+ def with_product(name: str, version: str):
48
+ """Change the product name and version that will be submitted to Databricks on every request."""
49
+ global _product_name, _product_version
50
+ _match_alphanum(name)
51
+ _match_semver(version)
52
+ logger.debug(f'Changing product from {_product_name}/{_product_version} to {name}/{version}')
53
+ _product_name = name
54
+ _product_version = version
55
+
56
+
57
+ def _reset_product():
58
+ """[Internal API] Reset product name and version to the default values.
59
+
60
+ Used for testing purposes only."""
61
+ global _product_name, _product_version
62
+ _product_name = "unknown"
63
+ _product_version = "0.0.0"
64
+
65
+
66
+ def with_extra(key: str, value: str):
67
+ """Add extra metadata to all requests submitted to Databricks.
68
+
69
+ User-specified extra metadata can be inserted into request headers to provide additional context to Databricks
70
+ about usage of different tools in the Databricks ecosystem. This can be useful for collecting telemetry about SDK
71
+ usage from tools that are built on top of the SDK.
72
+ """
73
+ global _extra
74
+ _match_alphanum(key)
75
+ _match_alphanum_or_semver(value)
76
+ logger.debug(f'Adding {key}/{value} to User-Agent')
77
+ _extra.append((key, value))
78
+
79
+
80
+ def extra() -> List[Tuple[str, str]]:
81
+ """Returns the current extra metadata that will be submitted to Databricks on every request."""
82
+ return copy.deepcopy(_extra)
83
+
84
+
85
+ def _reset_extra(extra: List[Tuple[str, str]]):
86
+ """[INTERNAL API] Reset the extra metadata to a new list.
87
+
88
+ Prefer using with_user_agent_extra instead of this method to avoid overwriting other information included in the
89
+ user agent."""
90
+ global _extra
91
+ _extra = extra
92
+
93
+
94
+ def with_partner(partner: str):
95
+ """Adds the given partner to the metadata submitted to Databricks on every request."""
96
+ with_extra("partner", partner)
97
+
98
+
99
+ def _get_upstream_user_agent_info() -> List[Tuple[str, str]]:
100
+ """[INTERNAL API] Return the upstream product and version if specified in the system environment."""
101
+ product = os.getenv("DATABRICKS_SDK_UPSTREAM")
102
+ version = os.getenv("DATABRICKS_SDK_UPSTREAM_VERSION")
103
+ if not product or not version:
104
+ return []
105
+ return [("upstream", product), ("upstream-version", version)]
106
+
107
+
108
+ def _get_runtime_info() -> List[Tuple[str, str]]:
109
+ """[INTERNAL API] Return the runtime version if running on Databricks."""
110
+ if 'DATABRICKS_RUNTIME_VERSION' in os.environ:
111
+ runtime_version = os.environ['DATABRICKS_RUNTIME_VERSION']
112
+ if runtime_version != '':
113
+ runtime_version = _sanitize_header_value(runtime_version)
114
+ return [('runtime', runtime_version)]
115
+ return []
116
+
117
+
118
+ def _sanitize_header_value(value: str) -> str:
119
+ value = value.replace(' ', '-')
120
+ value = value.replace('/', '-')
121
+ return value
122
+
123
+
124
+ def to_string(alternate_product_info: Optional[Tuple[str, str]] = None,
125
+ other_info: Optional[List[Tuple[str, str]]] = None) -> str:
126
+ """Compute the full User-Agent header.
127
+
128
+ The User-Agent header contains the product name, version, and other metadata that is submitted to Databricks on
129
+ every request. There are some static components that are included by default in every request, like the SDK version,
130
+ OS name, and Python version. Other components can be optionally overridden or augmented in DatabricksConfig, like
131
+ the product name, product version, and extra user-defined information."""
132
+ base = []
133
+ if alternate_product_info:
134
+ base.append(alternate_product_info)
135
+ else:
136
+ base.append((_product_name, _product_version))
137
+ base.extend([("databricks-sdk-py", __version__), ("python", platform.python_version()),
138
+ ("os", platform.uname().system.lower()), ])
139
+ if other_info:
140
+ base.extend(other_info)
141
+ base.extend(_extra)
142
+ base.extend(_get_upstream_user_agent_info())
143
+ base.extend(_get_runtime_info())
144
+ return " ".join(f"{k}/{v}" for k, v in base)
databricks/sdk/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = '0.28.0'
1
+ __version__ = '0.30.0'
@@ -1,13 +1,11 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: databricks-sdk
3
- Version: 0.28.0
3
+ Version: 0.30.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Home-page: https://databricks-sdk-py.readthedocs.io
6
6
  Author: Serge Smertin
7
7
  Author-email: serge.smertin@databricks.com
8
- License: UNKNOWN
9
8
  Keywords: databricks sdk
10
- Platform: UNKNOWN
11
9
  Classifier: Development Status :: 4 - Beta
12
10
  Classifier: Intended Audience :: Developers
13
11
  Classifier: Intended Audience :: Science/Research
@@ -24,23 +22,24 @@ Requires-Python: >=3.7
24
22
  Description-Content-Type: text/markdown
25
23
  License-File: LICENSE
26
24
  License-File: NOTICE
27
- Requires-Dist: google-auth (~=2.0)
28
25
  Requires-Dist: requests (<3,>=2.28.1)
26
+ Requires-Dist: google-auth (~=2.0)
29
27
  Provides-Extra: dev
30
- Requires-Dist: autoflake ; extra == 'dev'
31
- Requires-Dist: databricks-connect ; extra == 'dev'
32
- Requires-Dist: ipython ; extra == 'dev'
33
- Requires-Dist: ipywidgets ; extra == 'dev'
34
- Requires-Dist: isort ; extra == 'dev'
35
- Requires-Dist: pycodestyle ; extra == 'dev'
36
- Requires-Dist: pyfakefs ; extra == 'dev'
37
28
  Requires-Dist: pytest ; extra == 'dev'
38
29
  Requires-Dist: pytest-cov ; extra == 'dev'
39
- Requires-Dist: pytest-mock ; extra == 'dev'
40
30
  Requires-Dist: pytest-xdist ; extra == 'dev'
41
- Requires-Dist: requests-mock ; extra == 'dev'
42
- Requires-Dist: wheel ; extra == 'dev'
31
+ Requires-Dist: pytest-mock ; extra == 'dev'
43
32
  Requires-Dist: yapf ; extra == 'dev'
33
+ Requires-Dist: pycodestyle ; extra == 'dev'
34
+ Requires-Dist: autoflake ; extra == 'dev'
35
+ Requires-Dist: isort ; extra == 'dev'
36
+ Requires-Dist: wheel ; extra == 'dev'
37
+ Requires-Dist: ipython ; extra == 'dev'
38
+ Requires-Dist: ipywidgets ; extra == 'dev'
39
+ Requires-Dist: requests-mock ; extra == 'dev'
40
+ Requires-Dist: pyfakefs ; extra == 'dev'
41
+ Requires-Dist: databricks-connect ; extra == 'dev'
42
+ Requires-Dist: pytest-rerunfailures ; extra == 'dev'
44
43
  Provides-Extra: notebook
45
44
  Requires-Dist: ipython (<9,>=8) ; extra == 'notebook'
46
45
  Requires-Dist: ipywidgets (<9,>=8) ; extra == 'notebook'
@@ -77,6 +76,7 @@ The SDK's internal HTTP client is robust and handles failures on different level
77
76
  - [Long-running operations](#long-running-operations)
78
77
  - [Paginated responses](#paginated-responses)
79
78
  - [Single-sign-on with OAuth](#single-sign-on-sso-with-oauth)
79
+ - [User Agent Request Attribution](#user-agent-request-attribution)
80
80
  - [Error handling](#error-handling)
81
81
  - [Logging](#logging)
82
82
  - [Integration with `dbutils`](#interaction-with-dbutils)
@@ -555,6 +555,29 @@ logging.info(f'Created new custom app: '
555
555
  f'--client_secret {custom_app.client_secret}')
556
556
  ```
557
557
 
558
+ ## User Agent Request Attribution<a id="user-agent-request-attribution"></a>
559
+
560
+ The Databricks SDK for Python uses the `User-Agent` header to include request metadata along with each request. By default, this includes the version of the Python SDK, the version of the Python language used by your application, and the underlying operating system. To statically add additional metadata, you can use the `with_partner()` and `with_product()` functions in the `databricks.sdk.useragent` module. `with_partner()` can be used by partners to indicate that code using the Databricks SDK for Go should be attributed to a specific partner. Multiple partners can be registered at once. Partner names can contain any number, digit, `.`, `-`, `_` or `+`.
561
+
562
+ ```python
563
+ from databricks.sdk import useragent
564
+ useragent.with_product("partner-abc")
565
+ useragent.with_partner("partner-xyz")
566
+ ```
567
+
568
+ `with_product()` can be used to define the name and version of the product that is built with the Databricks SDK for Python. The product name has the same restrictions as the partner name above, and the product version must be a valid [SemVer](https://semver.org/). Subsequent calls to `with_product()` replace the original product with the new user-specified one.
569
+
570
+ ```go
571
+ from databricks.sdk import useragent
572
+ useragent.with_product("databricks-example-product", "1.2.0")
573
+ ```
574
+
575
+ If both the `DATABRICKS_SDK_UPSTREAM` and `DATABRICKS_SDK_UPSTREAM_VERSION` environment variables are defined, these will also be included in the `User-Agent` header.
576
+
577
+ If additional metadata needs to be specified that isn't already supported by the above interfaces, you can use the `with_user_agent_extra()` function to register arbitrary key-value pairs to include in the user agent. Multiple values associated with the same key are allowed. Keys have the same restrictions as the partner name above. Values must be either as described above or SemVer strings.
578
+
579
+ Additional `User-Agent` information can be associated with different instances of `DatabricksConfig`. To add metadata to a specific instance of `DatabricksConfig`, use the `with_user_agent_extra()` method.
580
+
558
581
  ## Error handling<a id="error-handling"></a>
559
582
 
560
583
  The Databricks SDK for Python provides a robust error-handling mechanism that allows developers to catch and handle API errors. When an error occurs, the SDK will raise an exception that contains information about the error, such as the HTTP status code, error message, and error details. Developers can catch these exceptions and handle them appropriately in their code.
@@ -654,5 +677,3 @@ API clients for all services are generated from specification files that are syn
654
677
  You are highly encouraged to pin the exact dependency version and read the [changelog](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
655
678
  where Databricks documents the changes. Databricks may have minor [documented](https://github.com/databricks/databricks-sdk-py/blob/main/CHANGELOG.md)
656
679
  backward-incompatible changes, such as renaming some type names to bring more consistency.
657
-
658
-
@@ -1,18 +1,20 @@
1
1
  databricks/__init__.py,sha256=CF2MJcZFwbpn9TwQER8qnCDhkPooBGQNVkX4v7g6p3g,537
2
- databricks/sdk/__init__.py,sha256=XlfPt-_tfDGubwEhhpc8QowEU24QX4gY7dhHIO6moHk,43850
2
+ databricks/sdk/__init__.py,sha256=I1YtsgPGK82d8EknfaKJLmp950-rg2kdDpl-forqUfA,46636
3
3
  databricks/sdk/_property.py,sha256=sGjsipeFrjMBSVPjtIb0HNCRcMIhFpVx6wq4BkC3LWs,1636
4
4
  databricks/sdk/azure.py,sha256=8P7nEdun0hbQCap9Ojo7yZse_JHxnhYsE6ApojnPz7Q,1009
5
5
  databricks/sdk/casing.py,sha256=NKYPrfPbQjM7lU4hhNQK3z1jb_VEA29BfH4FEdby2tg,1137
6
6
  databricks/sdk/clock.py,sha256=Ivlow0r_TkXcTJ8UXkxSA0czKrY0GvwHAeOvjPkJnAQ,1360
7
- databricks/sdk/config.py,sha256=ZPoO-o3U4PtlcPdKY-eOQxAVRsUrE9omeaEVzbuUen8,18906
8
- databricks/sdk/core.py,sha256=e3RlEv7CDiVd-1i0XJUIx8iJ6bY__i39OCcwoniUfx8,19048
9
- databricks/sdk/credentials_provider.py,sha256=zLmXLbt6zDS-P4jRBiS9if6QQGOea2CZn3fUrmJuJLY,26255
10
- databricks/sdk/dbutils.py,sha256=JUoT5hJVe_fi95g_BqX08iDzsoYfneybXRub42VC-Bw,12771
7
+ databricks/sdk/config.py,sha256=FWEiIY34C_4Mmv8B9w284BR2FHug2T2ySpmtyg51ttA,21139
8
+ databricks/sdk/core.py,sha256=PWU2kTHXOF6x7i9_yRUFGj-iusr_Mo7awROiBpA9nJQ,20398
9
+ databricks/sdk/credentials_provider.py,sha256=V8QxVUvZmOfVjEpogNEzu5nUBXzRLevWfi-NiPaDOks,29232
10
+ databricks/sdk/data_plane.py,sha256=Er2z2fT-KVupJKzGozGGZ-jCQ3AmDWq-DZppahIK6tU,2591
11
+ databricks/sdk/dbutils.py,sha256=HFCuB-el6SFKhF8qRfJxYANtyLTm-VG9GtQuQgZXFkM,15741
11
12
  databricks/sdk/environments.py,sha256=5KoVuVfF-ZX17rua1sH3EJCCtniVrREXBXsMNDEV-UU,4293
12
- databricks/sdk/oauth.py,sha256=jqe0yrrTUfRL8kpR21Odwn4R_X6Ns-hTLu3dKYDI1EM,18313
13
+ databricks/sdk/oauth.py,sha256=KzcJPYLL3JL6RDvf_Q8SDAaF9xSaoYNCRD4rYInZDuo,18319
13
14
  databricks/sdk/py.typed,sha256=pSvaHpbY1UPNEXyVFUjlgBhjPFZMmVC_UNrPC7eMOHI,74
14
15
  databricks/sdk/retries.py,sha256=WgLh12bwdBc6fCQlaig3kKu18cVhPzFDGsspvq629Ew,2454
15
- databricks/sdk/version.py,sha256=eKEnUCzRu6EJ3qyBcomfuYJe60IGd89zgi1KXWEOG4o,23
16
+ databricks/sdk/useragent.py,sha256=8yXu6l2mGZR6874u9Jtn2qNSDuNDEp4jzh0CbJ9yssw,5028
17
+ databricks/sdk/version.py,sha256=dSJBJnSMHNhD9IFG9yG2hFHceWiae5G4B2UFREtze5M,23
16
18
  databricks/sdk/_widgets/__init__.py,sha256=Qm3JB8LmdPgEn_-VgxKkodTO4gn6OdaDPwsYcDmeIRI,2667
17
19
  databricks/sdk/_widgets/default_widgets_utils.py,sha256=Rk59AFzVYVpOektB_yC_7j-vSt5OdtZA85IlG0kw0xA,1202
18
20
  databricks/sdk/_widgets/ipywidgets_utils.py,sha256=P-AyGeahPiX3S59mxpAMgffi4gyJ0irEOY7Ekkn9nQ0,2850
@@ -25,33 +27,34 @@ databricks/sdk/errors/private_link.py,sha256=6wVRJQqousGQC7qfT0pV8LqujqfR3XLbSix
25
27
  databricks/sdk/errors/sdk.py,sha256=_euMruhvquB0v_SKtgqxJUiyXHWuTb4Jl7ji6_h0E_A,109
26
28
  databricks/sdk/mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
29
  databricks/sdk/mixins/compute.py,sha256=khb00BzBckc4RLUF4-GnNMCSO5lXKt_XYMM3IhiUxlA,11237
28
- databricks/sdk/mixins/files.py,sha256=8Nh4TAB0BASZfTylW1P93_Sj_-zBlzEZ6CN38x2UoTQ,20205
30
+ databricks/sdk/mixins/files.py,sha256=bLGFu1kVIQECTmuc_9jUf-n_Cth4COBMbmKqAYxkEkM,20542
29
31
  databricks/sdk/mixins/workspace.py,sha256=dWMNvuEi8jJ5wMhrDt1LiqxNdWSsmEuDTzrcZR-eJzY,4896
30
32
  databricks/sdk/runtime/__init__.py,sha256=9NnZkBzeZXZRQxcE1qKzAszQEzcpIgpL7lQzW3_kxEU,7266
31
33
  databricks/sdk/runtime/dbutils_stub.py,sha256=UFbRZF-bBcwxjbv_pxma00bjNtktLLaYpo8oHRc4-9g,11421
32
34
  databricks/sdk/service/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
35
  databricks/sdk/service/_internal.py,sha256=nWbJfW5eJCQgAZ3TmA26xoWb6SNZ5N76ZA8bO1N4AsU,1961
34
- databricks/sdk/service/billing.py,sha256=Hbe5bMsBrpebuAl8yj-GwVRGktrzKwiZJj3gq1wUMaI,50625
35
- databricks/sdk/service/catalog.py,sha256=hGYFAIr80CmBxeASIvEnlYdkXGXcC_8d_4H0yW-SXGE,410757
36
- databricks/sdk/service/compute.py,sha256=YdaGAH1YfNsiKzEu6ASglDvoj9YJCbPSKvJ-xQzQ6rA,397674
37
- databricks/sdk/service/dashboards.py,sha256=PwhX73El3POXdblc7ZOm2PAkhf5TcSZ5Na73_ne2Zb4,18801
36
+ databricks/sdk/service/apps.py,sha256=536HvScC96edq9EXMUcyVh1h2jE5zCeCMa_l7HZiu20,38170
37
+ databricks/sdk/service/billing.py,sha256=Ru6GumI-M4_X71HTMj2VSVBQ7tRMTrwKzhdwNyiC3fA,69733
38
+ databricks/sdk/service/catalog.py,sha256=DT-T8vSdHN3GL_dBUDp2fFS6ypNIgqVxtlKijw-fpUc,421856
39
+ databricks/sdk/service/compute.py,sha256=2CkQPllHlpVZGWXCWIe9bcVcQyDsldfE4kmiXFoF7Jc,420685
40
+ databricks/sdk/service/dashboards.py,sha256=SqaySudMTniP3AIMW5Lx2v6k0yxwVtIEZQzEJNPW3-Q,75352
38
41
  databricks/sdk/service/files.py,sha256=VCt83YSI9rhQexmxaQdrUXHq2UCYfZcDMLvJx5X6n1M,38162
39
- databricks/sdk/service/iam.py,sha256=11L45bjOYwzxMVlAXpKrFMOxrZzgZy75JSIOkeAXuFg,147645
40
- databricks/sdk/service/jobs.py,sha256=vpOT9WCDOkTO2uA_DqU-3s2wdplaZ4gvOs-Hicp2y38,304830
41
- databricks/sdk/service/marketplace.py,sha256=-TE_cZ05oOw-immazvAbJZrzG7bjVuFP3D68ny9FHj0,135895
42
+ databricks/sdk/service/iam.py,sha256=fj1RQtCdg8E8oUt1SEnm6PzMR6UB-jaCn8M354KiB-o,148500
43
+ databricks/sdk/service/jobs.py,sha256=Jc2PF58kvpDuHOTVfwQQAJsdR_RJ9Q7qDDtr4Hpqggo,304259
44
+ databricks/sdk/service/marketplace.py,sha256=Fgk_8V9zbQ8QcNPUw-yZehHv8LgnDtFJUe-YixjxkYo,136405
42
45
  databricks/sdk/service/ml.py,sha256=vohBdESClI3EOpO-ZZ44W-CMz1alq5Tw4oJnWa99Z2M,236128
43
- databricks/sdk/service/oauth2.py,sha256=zpEA7glY_EsPvMgkk-hmt4eVgrmtcSGgduI7XlShNUo,36215
44
- databricks/sdk/service/pipelines.py,sha256=yY_C6MN3yNtp7Kemp6RkJPRgAu-HQf-ywrVontT0mb4,118755
46
+ databricks/sdk/service/oauth2.py,sha256=67pr6gUnYwO6BaGNQfjW1qvcEB3ejdNbI9Pmvqs5bSE,39928
47
+ databricks/sdk/service/pipelines.py,sha256=tGCo1F3tW1GxB9Q63qsh2AyisJmXqYSsGkJK0OdS06Q,119378
45
48
  databricks/sdk/service/provisioning.py,sha256=DP4Df4X-p0JEUk4zAJQhjX_wxpMi673OKLXFhxl6YSE,142678
46
- databricks/sdk/service/serving.py,sha256=0Kuy_W4UOQ-7tRkxgQiqJ_1L2ttwhIJdL_6LEd2kO1o,146399
47
- databricks/sdk/service/settings.py,sha256=bhbYqlLj4gpy_GhCifa_0sLvoDBRNTJzU9H5TerFU4E,177359
48
- databricks/sdk/service/sharing.py,sha256=v6MMjz7n-gfqwcALKSWspRGa7LahNGTwb1oArjigBZA,100181
49
- databricks/sdk/service/sql.py,sha256=BurJRqsYl4kACpPp2StXEBDL1jjI_-BzydGhOOgwhwU,257882
50
- databricks/sdk/service/vectorsearch.py,sha256=Z3fW8ZRB6Lwob_U-TIzstXyKSTLJGfVtfXWG6rUxDh4,59038
49
+ databricks/sdk/service/serving.py,sha256=BfShf0ceupXgLccU5zp1CZyBW1260Ga73USM2T5KxXs,140008
50
+ databricks/sdk/service/settings.py,sha256=7PXxsrXUe7exM35O7_iUp9r78zn5oGnPbhX_sh3v1_0,193732
51
+ databricks/sdk/service/sharing.py,sha256=kalJYd0v1SwuGhlCaq4l2ZhzNlev9OwNbCXFIOKIMXU,113253
52
+ databricks/sdk/service/sql.py,sha256=b8FoGZnwHqxDb2FQr_XOeegFVqDEfBXZH318NpRVtdU,337519
53
+ databricks/sdk/service/vectorsearch.py,sha256=ZfiTEpTNg8nnzPuw24MeiDn8eq6PHmEWqTHS0zdDdEo,62484
51
54
  databricks/sdk/service/workspace.py,sha256=FKLf5esRmfFstIXo7HQg6HQCzQ2svrb6ulr8yzZ7-8U,101182
52
- databricks_sdk-0.28.0.dist-info/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
53
- databricks_sdk-0.28.0.dist-info/METADATA,sha256=n3jNER2VjZIXRlcclyLqxqJCTtsbXb8gYD8jXdP0yT4,35766
54
- databricks_sdk-0.28.0.dist-info/NOTICE,sha256=Qnc0m8JjZNTDV80y0h1aJGvsr4GqM63m1nr2VTypg6E,963
55
- databricks_sdk-0.28.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
56
- databricks_sdk-0.28.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
57
- databricks_sdk-0.28.0.dist-info/RECORD,,
55
+ databricks_sdk-0.30.0.dist-info/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
56
+ databricks_sdk-0.30.0.dist-info/METADATA,sha256=00btvZMl5DDI3ovz5PebGVKtknsKQDnL_Fi1rKIZG-I,37967
57
+ databricks_sdk-0.30.0.dist-info/NOTICE,sha256=Qnc0m8JjZNTDV80y0h1aJGvsr4GqM63m1nr2VTypg6E,963
58
+ databricks_sdk-0.30.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
59
+ databricks_sdk-0.30.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
60
+ databricks_sdk-0.30.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.37.1)
2
+ Generator: setuptools (72.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5