databricks-sdk 0.27.1__py3-none-any.whl → 0.29.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +16 -12
- databricks/sdk/azure.py +0 -27
- databricks/sdk/config.py +71 -19
- databricks/sdk/core.py +27 -0
- databricks/sdk/credentials_provider.py +121 -44
- databricks/sdk/dbutils.py +81 -3
- databricks/sdk/environments.py +34 -1
- databricks/sdk/errors/__init__.py +1 -0
- databricks/sdk/errors/mapper.py +4 -0
- databricks/sdk/errors/private_link.py +60 -0
- databricks/sdk/oauth.py +8 -6
- databricks/sdk/service/catalog.py +774 -632
- databricks/sdk/service/compute.py +91 -116
- databricks/sdk/service/dashboards.py +707 -2
- databricks/sdk/service/jobs.py +126 -163
- databricks/sdk/service/marketplace.py +145 -31
- databricks/sdk/service/oauth2.py +22 -0
- databricks/sdk/service/pipelines.py +119 -4
- databricks/sdk/service/serving.py +217 -64
- databricks/sdk/service/settings.py +1 -0
- databricks/sdk/service/sharing.py +36 -2
- databricks/sdk/service/sql.py +103 -24
- databricks/sdk/service/vectorsearch.py +263 -1
- databricks/sdk/service/workspace.py +8 -4
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/METADATA +2 -1
- databricks_sdk-0.29.0.dist-info/RECORD +57 -0
- databricks_sdk-0.27.1.dist-info/RECORD +0 -56
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/top_level.txt +0 -0
|
@@ -531,17 +531,23 @@ class CreateShare:
|
|
|
531
531
|
comment: Optional[str] = None
|
|
532
532
|
"""User-provided free-form text description."""
|
|
533
533
|
|
|
534
|
+
storage_root: Optional[str] = None
|
|
535
|
+
"""Storage root URL for the share."""
|
|
536
|
+
|
|
534
537
|
def as_dict(self) -> dict:
|
|
535
538
|
"""Serializes the CreateShare into a dictionary suitable for use as a JSON request body."""
|
|
536
539
|
body = {}
|
|
537
540
|
if self.comment is not None: body['comment'] = self.comment
|
|
538
541
|
if self.name is not None: body['name'] = self.name
|
|
542
|
+
if self.storage_root is not None: body['storage_root'] = self.storage_root
|
|
539
543
|
return body
|
|
540
544
|
|
|
541
545
|
@classmethod
|
|
542
546
|
def from_dict(cls, d: Dict[str, any]) -> CreateShare:
|
|
543
547
|
"""Deserializes the CreateShare from a dictionary."""
|
|
544
|
-
return cls(comment=d.get('comment', None),
|
|
548
|
+
return cls(comment=d.get('comment', None),
|
|
549
|
+
name=d.get('name', None),
|
|
550
|
+
storage_root=d.get('storage_root', None))
|
|
545
551
|
|
|
546
552
|
|
|
547
553
|
@dataclass
|
|
@@ -1215,6 +1221,12 @@ class ShareInfo:
|
|
|
1215
1221
|
owner: Optional[str] = None
|
|
1216
1222
|
"""Username of current owner of share."""
|
|
1217
1223
|
|
|
1224
|
+
storage_location: Optional[str] = None
|
|
1225
|
+
"""Storage Location URL (full path) for the share."""
|
|
1226
|
+
|
|
1227
|
+
storage_root: Optional[str] = None
|
|
1228
|
+
"""Storage root URL for the share."""
|
|
1229
|
+
|
|
1218
1230
|
updated_at: Optional[int] = None
|
|
1219
1231
|
"""Time at which this share was updated, in epoch milliseconds."""
|
|
1220
1232
|
|
|
@@ -1230,6 +1242,8 @@ class ShareInfo:
|
|
|
1230
1242
|
if self.name is not None: body['name'] = self.name
|
|
1231
1243
|
if self.objects: body['objects'] = [v.as_dict() for v in self.objects]
|
|
1232
1244
|
if self.owner is not None: body['owner'] = self.owner
|
|
1245
|
+
if self.storage_location is not None: body['storage_location'] = self.storage_location
|
|
1246
|
+
if self.storage_root is not None: body['storage_root'] = self.storage_root
|
|
1233
1247
|
if self.updated_at is not None: body['updated_at'] = self.updated_at
|
|
1234
1248
|
if self.updated_by is not None: body['updated_by'] = self.updated_by
|
|
1235
1249
|
return body
|
|
@@ -1243,6 +1257,8 @@ class ShareInfo:
|
|
|
1243
1257
|
name=d.get('name', None),
|
|
1244
1258
|
objects=_repeated_dict(d, 'objects', SharedDataObject),
|
|
1245
1259
|
owner=d.get('owner', None),
|
|
1260
|
+
storage_location=d.get('storage_location', None),
|
|
1261
|
+
storage_root=d.get('storage_root', None),
|
|
1246
1262
|
updated_at=d.get('updated_at', None),
|
|
1247
1263
|
updated_by=d.get('updated_by', None))
|
|
1248
1264
|
|
|
@@ -1576,6 +1592,9 @@ class UpdateShare:
|
|
|
1576
1592
|
owner: Optional[str] = None
|
|
1577
1593
|
"""Username of current owner of share."""
|
|
1578
1594
|
|
|
1595
|
+
storage_root: Optional[str] = None
|
|
1596
|
+
"""Storage root URL for the share."""
|
|
1597
|
+
|
|
1579
1598
|
updates: Optional[List[SharedDataObjectUpdate]] = None
|
|
1580
1599
|
"""Array of shared data object updates."""
|
|
1581
1600
|
|
|
@@ -1586,6 +1605,7 @@ class UpdateShare:
|
|
|
1586
1605
|
if self.name is not None: body['name'] = self.name
|
|
1587
1606
|
if self.new_name is not None: body['new_name'] = self.new_name
|
|
1588
1607
|
if self.owner is not None: body['owner'] = self.owner
|
|
1608
|
+
if self.storage_root is not None: body['storage_root'] = self.storage_root
|
|
1589
1609
|
if self.updates: body['updates'] = [v.as_dict() for v in self.updates]
|
|
1590
1610
|
return body
|
|
1591
1611
|
|
|
@@ -1596,6 +1616,7 @@ class UpdateShare:
|
|
|
1596
1616
|
name=d.get('name', None),
|
|
1597
1617
|
new_name=d.get('new_name', None),
|
|
1598
1618
|
owner=d.get('owner', None),
|
|
1619
|
+
storage_root=d.get('storage_root', None),
|
|
1599
1620
|
updates=_repeated_dict(d, 'updates', SharedDataObjectUpdate))
|
|
1600
1621
|
|
|
1601
1622
|
|
|
@@ -2193,7 +2214,11 @@ class SharesAPI:
|
|
|
2193
2214
|
def __init__(self, api_client):
|
|
2194
2215
|
self._api = api_client
|
|
2195
2216
|
|
|
2196
|
-
def create(self,
|
|
2217
|
+
def create(self,
|
|
2218
|
+
name: str,
|
|
2219
|
+
*,
|
|
2220
|
+
comment: Optional[str] = None,
|
|
2221
|
+
storage_root: Optional[str] = None) -> ShareInfo:
|
|
2197
2222
|
"""Create a share.
|
|
2198
2223
|
|
|
2199
2224
|
Creates a new share for data objects. Data objects can be added after creation with **update**. The
|
|
@@ -2203,12 +2228,15 @@ class SharesAPI:
|
|
|
2203
2228
|
Name of the share.
|
|
2204
2229
|
:param comment: str (optional)
|
|
2205
2230
|
User-provided free-form text description.
|
|
2231
|
+
:param storage_root: str (optional)
|
|
2232
|
+
Storage root URL for the share.
|
|
2206
2233
|
|
|
2207
2234
|
:returns: :class:`ShareInfo`
|
|
2208
2235
|
"""
|
|
2209
2236
|
body = {}
|
|
2210
2237
|
if comment is not None: body['comment'] = comment
|
|
2211
2238
|
if name is not None: body['name'] = name
|
|
2239
|
+
if storage_root is not None: body['storage_root'] = storage_root
|
|
2212
2240
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
2213
2241
|
|
|
2214
2242
|
res = self._api.do('POST', '/api/2.1/unity-catalog/shares', body=body, headers=headers)
|
|
@@ -2288,6 +2316,7 @@ class SharesAPI:
|
|
|
2288
2316
|
comment: Optional[str] = None,
|
|
2289
2317
|
new_name: Optional[str] = None,
|
|
2290
2318
|
owner: Optional[str] = None,
|
|
2319
|
+
storage_root: Optional[str] = None,
|
|
2291
2320
|
updates: Optional[List[SharedDataObjectUpdate]] = None) -> ShareInfo:
|
|
2292
2321
|
"""Update a share.
|
|
2293
2322
|
|
|
@@ -2299,6 +2328,8 @@ class SharesAPI:
|
|
|
2299
2328
|
In the case that the share name is changed, **updateShare** requires that the caller is both the share
|
|
2300
2329
|
owner and a metastore admin.
|
|
2301
2330
|
|
|
2331
|
+
If there are notebook files in the share, the __storage_root__ field cannot be updated.
|
|
2332
|
+
|
|
2302
2333
|
For each table that is added through this method, the share owner must also have **SELECT** privilege
|
|
2303
2334
|
on the table. This privilege must be maintained indefinitely for recipients to be able to access the
|
|
2304
2335
|
table. Typically, you should use a group as the share owner.
|
|
@@ -2313,6 +2344,8 @@ class SharesAPI:
|
|
|
2313
2344
|
New name for the share.
|
|
2314
2345
|
:param owner: str (optional)
|
|
2315
2346
|
Username of current owner of share.
|
|
2347
|
+
:param storage_root: str (optional)
|
|
2348
|
+
Storage root URL for the share.
|
|
2316
2349
|
:param updates: List[:class:`SharedDataObjectUpdate`] (optional)
|
|
2317
2350
|
Array of shared data object updates.
|
|
2318
2351
|
|
|
@@ -2322,6 +2355,7 @@ class SharesAPI:
|
|
|
2322
2355
|
if comment is not None: body['comment'] = comment
|
|
2323
2356
|
if new_name is not None: body['new_name'] = new_name
|
|
2324
2357
|
if owner is not None: body['owner'] = owner
|
|
2358
|
+
if storage_root is not None: body['storage_root'] = storage_root
|
|
2325
2359
|
if updates is not None: body['updates'] = [v.as_dict() for v in updates]
|
|
2326
2360
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
2327
2361
|
|
databricks/sdk/service/sql.py
CHANGED
|
@@ -182,7 +182,7 @@ class AlertQuery:
|
|
|
182
182
|
|
|
183
183
|
data_source_id: Optional[str] = None
|
|
184
184
|
"""Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
185
|
-
warehouse ID. [Learn more]
|
|
185
|
+
warehouse ID. [Learn more]
|
|
186
186
|
|
|
187
187
|
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
|
|
188
188
|
|
|
@@ -360,7 +360,6 @@ class ChannelInfo:
|
|
|
360
360
|
|
|
361
361
|
|
|
362
362
|
class ChannelName(Enum):
|
|
363
|
-
"""Name of the channel"""
|
|
364
363
|
|
|
365
364
|
CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT'
|
|
366
365
|
CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM'
|
|
@@ -857,7 +856,7 @@ class DataSource:
|
|
|
857
856
|
|
|
858
857
|
id: Optional[str] = None
|
|
859
858
|
"""Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
860
|
-
warehouse ID. [Learn more]
|
|
859
|
+
warehouse ID. [Learn more]
|
|
861
860
|
|
|
862
861
|
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
|
|
863
862
|
|
|
@@ -1391,8 +1390,9 @@ class ExecuteStatementRequest:
|
|
|
1391
1390
|
"""The SQL statement to execute. The statement can optionally be parameterized, see `parameters`."""
|
|
1392
1391
|
|
|
1393
1392
|
warehouse_id: str
|
|
1394
|
-
"""Warehouse upon which to execute a statement. See also [What are SQL
|
|
1395
|
-
|
|
1393
|
+
"""Warehouse upon which to execute a statement. See also [What are SQL warehouses?]
|
|
1394
|
+
|
|
1395
|
+
[What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html"""
|
|
1396
1396
|
|
|
1397
1397
|
byte_limit: Optional[int] = None
|
|
1398
1398
|
"""Applies the given byte limit to the statement's result size. Byte counts are based on internal
|
|
@@ -2242,7 +2242,7 @@ class Query:
|
|
|
2242
2242
|
|
|
2243
2243
|
data_source_id: Optional[str] = None
|
|
2244
2244
|
"""Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
2245
|
-
warehouse ID. [Learn more]
|
|
2245
|
+
warehouse ID. [Learn more]
|
|
2246
2246
|
|
|
2247
2247
|
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
|
|
2248
2248
|
|
|
@@ -2375,7 +2375,7 @@ class Query:
|
|
|
2375
2375
|
class QueryEditContent:
|
|
2376
2376
|
data_source_id: Optional[str] = None
|
|
2377
2377
|
"""Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
2378
|
-
warehouse ID. [Learn more]
|
|
2378
|
+
warehouse ID. [Learn more]
|
|
2379
2379
|
|
|
2380
2380
|
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
|
|
2381
2381
|
|
|
@@ -2473,7 +2473,7 @@ class QueryInfo:
|
|
|
2473
2473
|
"""Channel information for the SQL warehouse at the time of query execution"""
|
|
2474
2474
|
|
|
2475
2475
|
duration: Optional[int] = None
|
|
2476
|
-
"""Total execution time of the
|
|
2476
|
+
"""Total execution time of the statement ( excluding result fetch time )."""
|
|
2477
2477
|
|
|
2478
2478
|
endpoint_id: Optional[str] = None
|
|
2479
2479
|
"""Alias for `warehouse_id`."""
|
|
@@ -2808,7 +2808,7 @@ class QueryOptions:
|
|
|
2808
2808
|
class QueryPostContent:
|
|
2809
2809
|
data_source_id: Optional[str] = None
|
|
2810
2810
|
"""Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
2811
|
-
warehouse ID. [Learn more]
|
|
2811
|
+
warehouse ID. [Learn more]
|
|
2812
2812
|
|
|
2813
2813
|
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
|
|
2814
2814
|
|
|
@@ -3274,8 +3274,10 @@ class StatementParameterListItem:
|
|
|
3274
3274
|
type: Optional[str] = None
|
|
3275
3275
|
"""The data type, given as a string. For example: `INT`, `STRING`, `DECIMAL(10,2)`. If no type is
|
|
3276
3276
|
given the type is assumed to be `STRING`. Complex types, such as `ARRAY`, `MAP`, and `STRUCT`
|
|
3277
|
-
are not supported. For valid types, refer to the section [Data
|
|
3278
|
-
|
|
3277
|
+
are not supported. For valid types, refer to the section [Data types] of the SQL language
|
|
3278
|
+
reference.
|
|
3279
|
+
|
|
3280
|
+
[Data types]: https://docs.databricks.com/sql/language-manual/functions/cast.html"""
|
|
3279
3281
|
|
|
3280
3282
|
value: Optional[str] = None
|
|
3281
3283
|
"""The value to substitute, represented as a string. If omitted, the value is interpreted as NULL."""
|
|
@@ -3960,7 +3962,11 @@ class AlertsAPI:
|
|
|
3960
3962
|
"""The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that
|
|
3961
3963
|
periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
|
|
3962
3964
|
notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
|
|
3963
|
-
the Jobs API, e.g. :method:jobs/create.
|
|
3965
|
+
the Jobs API, e.g. :method:jobs/create.
|
|
3966
|
+
|
|
3967
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
3968
|
+
|
|
3969
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources"""
|
|
3964
3970
|
|
|
3965
3971
|
def __init__(self, api_client):
|
|
3966
3972
|
self._api = api_client
|
|
@@ -3977,6 +3983,10 @@ class AlertsAPI:
|
|
|
3977
3983
|
Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a
|
|
3978
3984
|
condition of its result, and notifies users or notification destinations if the condition was met.
|
|
3979
3985
|
|
|
3986
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
3987
|
+
|
|
3988
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
3989
|
+
|
|
3980
3990
|
:param name: str
|
|
3981
3991
|
Name of the alert.
|
|
3982
3992
|
:param options: :class:`AlertOptions`
|
|
@@ -4005,9 +4015,13 @@ class AlertsAPI:
|
|
|
4005
4015
|
def delete(self, alert_id: str):
|
|
4006
4016
|
"""Delete an alert.
|
|
4007
4017
|
|
|
4008
|
-
Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note
|
|
4018
|
+
Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike
|
|
4009
4019
|
queries and dashboards, alerts cannot be moved to the trash.
|
|
4010
4020
|
|
|
4021
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4022
|
+
|
|
4023
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
4024
|
+
|
|
4011
4025
|
:param alert_id: str
|
|
4012
4026
|
|
|
4013
4027
|
|
|
@@ -4022,6 +4036,10 @@ class AlertsAPI:
|
|
|
4022
4036
|
|
|
4023
4037
|
Gets an alert.
|
|
4024
4038
|
|
|
4039
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4040
|
+
|
|
4041
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
4042
|
+
|
|
4025
4043
|
:param alert_id: str
|
|
4026
4044
|
|
|
4027
4045
|
:returns: :class:`Alert`
|
|
@@ -4037,6 +4055,10 @@ class AlertsAPI:
|
|
|
4037
4055
|
|
|
4038
4056
|
Gets a list of alerts.
|
|
4039
4057
|
|
|
4058
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4059
|
+
|
|
4060
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
4061
|
+
|
|
4040
4062
|
:returns: Iterator over :class:`Alert`
|
|
4041
4063
|
"""
|
|
4042
4064
|
|
|
@@ -4056,6 +4078,10 @@ class AlertsAPI:
|
|
|
4056
4078
|
|
|
4057
4079
|
Updates an alert.
|
|
4058
4080
|
|
|
4081
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4082
|
+
|
|
4083
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
4084
|
+
|
|
4059
4085
|
:param alert_id: str
|
|
4060
4086
|
:param name: str
|
|
4061
4087
|
Name of the alert.
|
|
@@ -4257,8 +4283,8 @@ class DashboardsAPI:
|
|
|
4257
4283
|
|
|
4258
4284
|
Fetch a paginated list of dashboard objects.
|
|
4259
4285
|
|
|
4260
|
-
|
|
4261
|
-
degradation, or a temporary ban
|
|
4286
|
+
**Warning**: Calling this API concurrently 10 or more times could result in throttling, service
|
|
4287
|
+
degradation, or a temporary ban.
|
|
4262
4288
|
|
|
4263
4289
|
:param order: :class:`ListOrder` (optional)
|
|
4264
4290
|
Name of dashboard attribute to order by.
|
|
@@ -4352,7 +4378,11 @@ class DataSourcesAPI:
|
|
|
4352
4378
|
|
|
4353
4379
|
This API does not support searches. It returns the full list of SQL warehouses in your workspace. We
|
|
4354
4380
|
advise you to use any text editor, REST client, or `grep` to search the response from this API for the
|
|
4355
|
-
name of your SQL warehouse as it appears in Databricks SQL.
|
|
4381
|
+
name of your SQL warehouse as it appears in Databricks SQL.
|
|
4382
|
+
|
|
4383
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4384
|
+
|
|
4385
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources"""
|
|
4356
4386
|
|
|
4357
4387
|
def __init__(self, api_client):
|
|
4358
4388
|
self._api = api_client
|
|
@@ -4364,6 +4394,10 @@ class DataSourcesAPI:
|
|
|
4364
4394
|
API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new
|
|
4365
4395
|
queries against it.
|
|
4366
4396
|
|
|
4397
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4398
|
+
|
|
4399
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
4400
|
+
|
|
4367
4401
|
:returns: Iterator over :class:`DataSource`
|
|
4368
4402
|
"""
|
|
4369
4403
|
|
|
@@ -4384,7 +4418,11 @@ class DbsqlPermissionsAPI:
|
|
|
4384
4418
|
|
|
4385
4419
|
- `CAN_RUN`: Allows read access and run access (superset of `CAN_VIEW`)
|
|
4386
4420
|
|
|
4387
|
-
- `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`)
|
|
4421
|
+
- `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`)
|
|
4422
|
+
|
|
4423
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4424
|
+
|
|
4425
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources"""
|
|
4388
4426
|
|
|
4389
4427
|
def __init__(self, api_client):
|
|
4390
4428
|
self._api = api_client
|
|
@@ -4394,6 +4432,10 @@ class DbsqlPermissionsAPI:
|
|
|
4394
4432
|
|
|
4395
4433
|
Gets a JSON representation of the access control list (ACL) for a specified object.
|
|
4396
4434
|
|
|
4435
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4436
|
+
|
|
4437
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
4438
|
+
|
|
4397
4439
|
:param object_type: :class:`ObjectTypePlural`
|
|
4398
4440
|
The type of object permissions to check.
|
|
4399
4441
|
:param object_id: str
|
|
@@ -4419,6 +4461,10 @@ class DbsqlPermissionsAPI:
|
|
|
4419
4461
|
Sets the access control list (ACL) for a specified object. This operation will complete rewrite the
|
|
4420
4462
|
ACL.
|
|
4421
4463
|
|
|
4464
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4465
|
+
|
|
4466
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
4467
|
+
|
|
4422
4468
|
:param object_type: :class:`ObjectTypePlural`
|
|
4423
4469
|
The type of object permission to set.
|
|
4424
4470
|
:param object_id: str
|
|
@@ -4447,6 +4493,10 @@ class DbsqlPermissionsAPI:
|
|
|
4447
4493
|
|
|
4448
4494
|
Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key.
|
|
4449
4495
|
|
|
4496
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4497
|
+
|
|
4498
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
4499
|
+
|
|
4450
4500
|
:param object_type: :class:`OwnableObjectType`
|
|
4451
4501
|
The type of object on which to change ownership.
|
|
4452
4502
|
:param object_id: :class:`TransferOwnershipObjectId`
|
|
@@ -4470,7 +4520,11 @@ class DbsqlPermissionsAPI:
|
|
|
4470
4520
|
class QueriesAPI:
|
|
4471
4521
|
"""These endpoints are used for CRUD operations on query definitions. Query definitions include the target
|
|
4472
4522
|
SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be
|
|
4473
|
-
scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
|
|
4523
|
+
scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
|
|
4524
|
+
|
|
4525
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4526
|
+
|
|
4527
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources"""
|
|
4474
4528
|
|
|
4475
4529
|
def __init__(self, api_client):
|
|
4476
4530
|
self._api = api_client
|
|
@@ -4496,9 +4550,13 @@ class QueriesAPI:
|
|
|
4496
4550
|
|
|
4497
4551
|
**Note**: You cannot add a visualization until you create the query.
|
|
4498
4552
|
|
|
4553
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4554
|
+
|
|
4555
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
4556
|
+
|
|
4499
4557
|
:param data_source_id: str (optional)
|
|
4500
4558
|
Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
4501
|
-
warehouse ID. [Learn more]
|
|
4559
|
+
warehouse ID. [Learn more]
|
|
4502
4560
|
|
|
4503
4561
|
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list
|
|
4504
4562
|
:param description: str (optional)
|
|
@@ -4540,6 +4598,10 @@ class QueriesAPI:
|
|
|
4540
4598
|
Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
|
|
4541
4599
|
they cannot be used for alerts. The trash is deleted after 30 days.
|
|
4542
4600
|
|
|
4601
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4602
|
+
|
|
4603
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
4604
|
+
|
|
4543
4605
|
:param query_id: str
|
|
4544
4606
|
|
|
4545
4607
|
|
|
@@ -4555,6 +4617,10 @@ class QueriesAPI:
|
|
|
4555
4617
|
Retrieve a query object definition along with contextual permissions information about the currently
|
|
4556
4618
|
authenticated user.
|
|
4557
4619
|
|
|
4620
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4621
|
+
|
|
4622
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
4623
|
+
|
|
4558
4624
|
:param query_id: str
|
|
4559
4625
|
|
|
4560
4626
|
:returns: :class:`Query`
|
|
@@ -4575,8 +4641,12 @@ class QueriesAPI:
|
|
|
4575
4641
|
|
|
4576
4642
|
Gets a list of queries. Optionally, this list can be filtered by a search term.
|
|
4577
4643
|
|
|
4578
|
-
|
|
4579
|
-
degradation, or a temporary ban
|
|
4644
|
+
**Warning**: Calling this API concurrently 10 or more times could result in throttling, service
|
|
4645
|
+
degradation, or a temporary ban.
|
|
4646
|
+
|
|
4647
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4648
|
+
|
|
4649
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
4580
4650
|
|
|
4581
4651
|
:param order: str (optional)
|
|
4582
4652
|
Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order
|
|
@@ -4631,6 +4701,10 @@ class QueriesAPI:
|
|
|
4631
4701
|
Restore a query that has been moved to the trash. A restored query appears in list views and searches.
|
|
4632
4702
|
You can use restored queries for alerts.
|
|
4633
4703
|
|
|
4704
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4705
|
+
|
|
4706
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
4707
|
+
|
|
4634
4708
|
:param query_id: str
|
|
4635
4709
|
|
|
4636
4710
|
|
|
@@ -4656,10 +4730,14 @@ class QueriesAPI:
|
|
|
4656
4730
|
|
|
4657
4731
|
**Note**: You cannot undo this operation.
|
|
4658
4732
|
|
|
4733
|
+
**Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
|
|
4734
|
+
|
|
4735
|
+
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
|
|
4736
|
+
|
|
4659
4737
|
:param query_id: str
|
|
4660
4738
|
:param data_source_id: str (optional)
|
|
4661
4739
|
Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
4662
|
-
warehouse ID. [Learn more]
|
|
4740
|
+
warehouse ID. [Learn more]
|
|
4663
4741
|
|
|
4664
4742
|
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list
|
|
4665
4743
|
:param description: str (optional)
|
|
@@ -4961,8 +5039,9 @@ class StatementExecutionAPI:
|
|
|
4961
5039
|
:param statement: str
|
|
4962
5040
|
The SQL statement to execute. The statement can optionally be parameterized, see `parameters`.
|
|
4963
5041
|
:param warehouse_id: str
|
|
4964
|
-
Warehouse upon which to execute a statement. See also [What are SQL
|
|
4965
|
-
|
|
5042
|
+
Warehouse upon which to execute a statement. See also [What are SQL warehouses?]
|
|
5043
|
+
|
|
5044
|
+
[What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html
|
|
4966
5045
|
:param byte_limit: int (optional)
|
|
4967
5046
|
Applies the given byte limit to the statement's result size. Byte counts are based on internal data
|
|
4968
5047
|
representations and might not match the final size in the requested `format`. If the result was
|