databricks-sdk 0.28.0__py3-none-any.whl → 0.30.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +74 -22
- databricks/sdk/config.py +89 -48
- databricks/sdk/core.py +38 -9
- databricks/sdk/credentials_provider.py +134 -57
- databricks/sdk/data_plane.py +65 -0
- databricks/sdk/dbutils.py +81 -3
- databricks/sdk/mixins/files.py +12 -4
- databricks/sdk/oauth.py +8 -6
- databricks/sdk/service/apps.py +977 -0
- databricks/sdk/service/billing.py +602 -218
- databricks/sdk/service/catalog.py +263 -62
- databricks/sdk/service/compute.py +515 -94
- databricks/sdk/service/dashboards.py +1310 -2
- databricks/sdk/service/iam.py +99 -88
- databricks/sdk/service/jobs.py +159 -166
- databricks/sdk/service/marketplace.py +74 -58
- databricks/sdk/service/oauth2.py +149 -70
- databricks/sdk/service/pipelines.py +73 -53
- databricks/sdk/service/serving.py +332 -694
- databricks/sdk/service/settings.py +424 -4
- databricks/sdk/service/sharing.py +235 -26
- databricks/sdk/service/sql.py +2484 -553
- databricks/sdk/service/vectorsearch.py +75 -0
- databricks/sdk/useragent.py +144 -0
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.30.0.dist-info}/METADATA +37 -16
- {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.30.0.dist-info}/RECORD +31 -28
- {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.30.0.dist-info}/WHEEL +1 -1
- {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.30.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.30.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.28.0.dist-info → databricks_sdk-0.30.0.dist-info}/top_level.txt +0 -0
databricks/sdk/service/sql.py
CHANGED
|
@@ -46,69 +46,206 @@ class AccessControl:
|
|
|
46
46
|
|
|
47
47
|
@dataclass
|
|
48
48
|
class Alert:
|
|
49
|
-
|
|
50
|
-
"""
|
|
49
|
+
condition: Optional[AlertCondition] = None
|
|
50
|
+
"""Trigger conditions of the alert."""
|
|
51
51
|
|
|
52
|
-
|
|
53
|
-
"""
|
|
52
|
+
create_time: Optional[str] = None
|
|
53
|
+
"""The timestamp indicating when the alert was created."""
|
|
54
54
|
|
|
55
|
-
|
|
56
|
-
"""
|
|
55
|
+
custom_body: Optional[str] = None
|
|
56
|
+
"""Custom body of alert notification, if it exists. See [here] for custom templating instructions.
|
|
57
|
+
|
|
58
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
57
59
|
|
|
58
|
-
|
|
59
|
-
"""
|
|
60
|
+
custom_subject: Optional[str] = None
|
|
61
|
+
"""Custom subject of alert notification, if it exists. This can include email subject entries and
|
|
62
|
+
Slack notification headers, for example. See [here] for custom templating instructions.
|
|
63
|
+
|
|
64
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
60
65
|
|
|
61
|
-
|
|
62
|
-
"""
|
|
66
|
+
display_name: Optional[str] = None
|
|
67
|
+
"""The display name of the alert."""
|
|
63
68
|
|
|
64
|
-
|
|
65
|
-
"""
|
|
69
|
+
id: Optional[str] = None
|
|
70
|
+
"""UUID identifying the alert."""
|
|
66
71
|
|
|
67
|
-
|
|
72
|
+
lifecycle_state: Optional[LifecycleState] = None
|
|
73
|
+
"""The workspace state of the alert. Used for tracking trashed status."""
|
|
68
74
|
|
|
69
|
-
|
|
70
|
-
"""
|
|
71
|
-
|
|
75
|
+
owner_user_name: Optional[str] = None
|
|
76
|
+
"""The owner's username. This field is set to "Unavailable" if the user has been deleted."""
|
|
77
|
+
|
|
78
|
+
parent_path: Optional[str] = None
|
|
79
|
+
"""The workspace path of the folder containing the alert."""
|
|
80
|
+
|
|
81
|
+
query_id: Optional[str] = None
|
|
82
|
+
"""UUID of the query attached to the alert."""
|
|
83
|
+
|
|
84
|
+
seconds_to_retrigger: Optional[int] = None
|
|
85
|
+
"""Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it
|
|
86
|
+
can be triggered again. If 0 or not specified, the alert will not be triggered again."""
|
|
72
87
|
|
|
73
88
|
state: Optional[AlertState] = None
|
|
74
|
-
"""
|
|
75
|
-
|
|
89
|
+
"""Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not
|
|
90
|
+
yet been evaluated or ran into an error during the last evaluation."""
|
|
76
91
|
|
|
77
|
-
|
|
78
|
-
"""Timestamp when the alert was last
|
|
92
|
+
trigger_time: Optional[str] = None
|
|
93
|
+
"""Timestamp when the alert was last triggered, if the alert has been triggered before."""
|
|
79
94
|
|
|
80
|
-
|
|
95
|
+
update_time: Optional[str] = None
|
|
96
|
+
"""The timestamp indicating when the alert was updated."""
|
|
81
97
|
|
|
82
98
|
def as_dict(self) -> dict:
|
|
83
99
|
"""Serializes the Alert into a dictionary suitable for use as a JSON request body."""
|
|
84
100
|
body = {}
|
|
85
|
-
if self.
|
|
101
|
+
if self.condition: body['condition'] = self.condition.as_dict()
|
|
102
|
+
if self.create_time is not None: body['create_time'] = self.create_time
|
|
103
|
+
if self.custom_body is not None: body['custom_body'] = self.custom_body
|
|
104
|
+
if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
|
|
105
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
86
106
|
if self.id is not None: body['id'] = self.id
|
|
87
|
-
if self.
|
|
88
|
-
if self.
|
|
89
|
-
if self.
|
|
90
|
-
if self.
|
|
91
|
-
if self.
|
|
92
|
-
if self.rearm is not None: body['rearm'] = self.rearm
|
|
107
|
+
if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
|
|
108
|
+
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
109
|
+
if self.parent_path is not None: body['parent_path'] = self.parent_path
|
|
110
|
+
if self.query_id is not None: body['query_id'] = self.query_id
|
|
111
|
+
if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
|
|
93
112
|
if self.state is not None: body['state'] = self.state.value
|
|
94
|
-
if self.
|
|
95
|
-
if self.
|
|
113
|
+
if self.trigger_time is not None: body['trigger_time'] = self.trigger_time
|
|
114
|
+
if self.update_time is not None: body['update_time'] = self.update_time
|
|
96
115
|
return body
|
|
97
116
|
|
|
98
117
|
@classmethod
|
|
99
118
|
def from_dict(cls, d: Dict[str, any]) -> Alert:
|
|
100
119
|
"""Deserializes the Alert from a dictionary."""
|
|
101
|
-
return cls(
|
|
120
|
+
return cls(condition=_from_dict(d, 'condition', AlertCondition),
|
|
121
|
+
create_time=d.get('create_time', None),
|
|
122
|
+
custom_body=d.get('custom_body', None),
|
|
123
|
+
custom_subject=d.get('custom_subject', None),
|
|
124
|
+
display_name=d.get('display_name', None),
|
|
102
125
|
id=d.get('id', None),
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
rearm=d.get('rearm', None),
|
|
126
|
+
lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
|
|
127
|
+
owner_user_name=d.get('owner_user_name', None),
|
|
128
|
+
parent_path=d.get('parent_path', None),
|
|
129
|
+
query_id=d.get('query_id', None),
|
|
130
|
+
seconds_to_retrigger=d.get('seconds_to_retrigger', None),
|
|
109
131
|
state=_enum(d, 'state', AlertState),
|
|
110
|
-
|
|
111
|
-
|
|
132
|
+
trigger_time=d.get('trigger_time', None),
|
|
133
|
+
update_time=d.get('update_time', None))
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
@dataclass
|
|
137
|
+
class AlertCondition:
|
|
138
|
+
empty_result_state: Optional[AlertState] = None
|
|
139
|
+
"""Alert state if result is empty."""
|
|
140
|
+
|
|
141
|
+
op: Optional[AlertOperator] = None
|
|
142
|
+
"""Operator used for comparison in alert evaluation."""
|
|
143
|
+
|
|
144
|
+
operand: Optional[AlertConditionOperand] = None
|
|
145
|
+
"""Name of the column from the query result to use for comparison in alert evaluation."""
|
|
146
|
+
|
|
147
|
+
threshold: Optional[AlertConditionThreshold] = None
|
|
148
|
+
"""Threshold value used for comparison in alert evaluation."""
|
|
149
|
+
|
|
150
|
+
def as_dict(self) -> dict:
|
|
151
|
+
"""Serializes the AlertCondition into a dictionary suitable for use as a JSON request body."""
|
|
152
|
+
body = {}
|
|
153
|
+
if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state.value
|
|
154
|
+
if self.op is not None: body['op'] = self.op.value
|
|
155
|
+
if self.operand: body['operand'] = self.operand.as_dict()
|
|
156
|
+
if self.threshold: body['threshold'] = self.threshold.as_dict()
|
|
157
|
+
return body
|
|
158
|
+
|
|
159
|
+
@classmethod
|
|
160
|
+
def from_dict(cls, d: Dict[str, any]) -> AlertCondition:
|
|
161
|
+
"""Deserializes the AlertCondition from a dictionary."""
|
|
162
|
+
return cls(empty_result_state=_enum(d, 'empty_result_state', AlertState),
|
|
163
|
+
op=_enum(d, 'op', AlertOperator),
|
|
164
|
+
operand=_from_dict(d, 'operand', AlertConditionOperand),
|
|
165
|
+
threshold=_from_dict(d, 'threshold', AlertConditionThreshold))
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
@dataclass
|
|
169
|
+
class AlertConditionOperand:
|
|
170
|
+
column: Optional[AlertOperandColumn] = None
|
|
171
|
+
|
|
172
|
+
def as_dict(self) -> dict:
|
|
173
|
+
"""Serializes the AlertConditionOperand into a dictionary suitable for use as a JSON request body."""
|
|
174
|
+
body = {}
|
|
175
|
+
if self.column: body['column'] = self.column.as_dict()
|
|
176
|
+
return body
|
|
177
|
+
|
|
178
|
+
@classmethod
|
|
179
|
+
def from_dict(cls, d: Dict[str, any]) -> AlertConditionOperand:
|
|
180
|
+
"""Deserializes the AlertConditionOperand from a dictionary."""
|
|
181
|
+
return cls(column=_from_dict(d, 'column', AlertOperandColumn))
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
@dataclass
|
|
185
|
+
class AlertConditionThreshold:
|
|
186
|
+
value: Optional[AlertOperandValue] = None
|
|
187
|
+
|
|
188
|
+
def as_dict(self) -> dict:
|
|
189
|
+
"""Serializes the AlertConditionThreshold into a dictionary suitable for use as a JSON request body."""
|
|
190
|
+
body = {}
|
|
191
|
+
if self.value: body['value'] = self.value.as_dict()
|
|
192
|
+
return body
|
|
193
|
+
|
|
194
|
+
@classmethod
|
|
195
|
+
def from_dict(cls, d: Dict[str, any]) -> AlertConditionThreshold:
|
|
196
|
+
"""Deserializes the AlertConditionThreshold from a dictionary."""
|
|
197
|
+
return cls(value=_from_dict(d, 'value', AlertOperandValue))
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
@dataclass
|
|
201
|
+
class AlertOperandColumn:
|
|
202
|
+
name: Optional[str] = None
|
|
203
|
+
|
|
204
|
+
def as_dict(self) -> dict:
|
|
205
|
+
"""Serializes the AlertOperandColumn into a dictionary suitable for use as a JSON request body."""
|
|
206
|
+
body = {}
|
|
207
|
+
if self.name is not None: body['name'] = self.name
|
|
208
|
+
return body
|
|
209
|
+
|
|
210
|
+
@classmethod
|
|
211
|
+
def from_dict(cls, d: Dict[str, any]) -> AlertOperandColumn:
|
|
212
|
+
"""Deserializes the AlertOperandColumn from a dictionary."""
|
|
213
|
+
return cls(name=d.get('name', None))
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
@dataclass
|
|
217
|
+
class AlertOperandValue:
|
|
218
|
+
bool_value: Optional[bool] = None
|
|
219
|
+
|
|
220
|
+
double_value: Optional[float] = None
|
|
221
|
+
|
|
222
|
+
string_value: Optional[str] = None
|
|
223
|
+
|
|
224
|
+
def as_dict(self) -> dict:
|
|
225
|
+
"""Serializes the AlertOperandValue into a dictionary suitable for use as a JSON request body."""
|
|
226
|
+
body = {}
|
|
227
|
+
if self.bool_value is not None: body['bool_value'] = self.bool_value
|
|
228
|
+
if self.double_value is not None: body['double_value'] = self.double_value
|
|
229
|
+
if self.string_value is not None: body['string_value'] = self.string_value
|
|
230
|
+
return body
|
|
231
|
+
|
|
232
|
+
@classmethod
|
|
233
|
+
def from_dict(cls, d: Dict[str, any]) -> AlertOperandValue:
|
|
234
|
+
"""Deserializes the AlertOperandValue from a dictionary."""
|
|
235
|
+
return cls(bool_value=d.get('bool_value', None),
|
|
236
|
+
double_value=d.get('double_value', None),
|
|
237
|
+
string_value=d.get('string_value', None))
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
class AlertOperator(Enum):
|
|
241
|
+
|
|
242
|
+
EQUAL = 'EQUAL'
|
|
243
|
+
GREATER_THAN = 'GREATER_THAN'
|
|
244
|
+
GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL'
|
|
245
|
+
IS_NULL = 'IS_NULL'
|
|
246
|
+
LESS_THAN = 'LESS_THAN'
|
|
247
|
+
LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL'
|
|
248
|
+
NOT_EQUAL = 'NOT_EQUAL'
|
|
112
249
|
|
|
113
250
|
|
|
114
251
|
@dataclass
|
|
@@ -182,7 +319,7 @@ class AlertQuery:
|
|
|
182
319
|
|
|
183
320
|
data_source_id: Optional[str] = None
|
|
184
321
|
"""Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
185
|
-
warehouse ID. [Learn more]
|
|
322
|
+
warehouse ID. [Learn more]
|
|
186
323
|
|
|
187
324
|
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
|
|
188
325
|
|
|
@@ -259,12 +396,10 @@ class AlertQuery:
|
|
|
259
396
|
|
|
260
397
|
|
|
261
398
|
class AlertState(Enum):
|
|
262
|
-
"""State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated
|
|
263
|
-
and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions)."""
|
|
264
399
|
|
|
265
|
-
OK = '
|
|
266
|
-
TRIGGERED = '
|
|
267
|
-
UNKNOWN = '
|
|
400
|
+
OK = 'OK'
|
|
401
|
+
TRIGGERED = 'TRIGGERED'
|
|
402
|
+
UNKNOWN = 'UNKNOWN'
|
|
268
403
|
|
|
269
404
|
|
|
270
405
|
@dataclass
|
|
@@ -338,10 +473,10 @@ class Channel:
|
|
|
338
473
|
|
|
339
474
|
@dataclass
|
|
340
475
|
class ChannelInfo:
|
|
341
|
-
"""
|
|
476
|
+
"""Details about a Channel."""
|
|
342
477
|
|
|
343
478
|
dbsql_version: Optional[str] = None
|
|
344
|
-
"""
|
|
479
|
+
"""DB SQL Version the Channel is mapped to."""
|
|
345
480
|
|
|
346
481
|
name: Optional[ChannelName] = None
|
|
347
482
|
"""Name of the channel"""
|
|
@@ -368,6 +503,29 @@ class ChannelName(Enum):
|
|
|
368
503
|
CHANNEL_NAME_UNSPECIFIED = 'CHANNEL_NAME_UNSPECIFIED'
|
|
369
504
|
|
|
370
505
|
|
|
506
|
+
@dataclass
|
|
507
|
+
class ClientCallContext:
|
|
508
|
+
"""Client code that triggered the request"""
|
|
509
|
+
|
|
510
|
+
file_name: Optional[EncodedText] = None
|
|
511
|
+
"""File name that contains the last line that triggered the request."""
|
|
512
|
+
|
|
513
|
+
line_number: Optional[int] = None
|
|
514
|
+
"""Last line number within a file or notebook cell that triggered the request."""
|
|
515
|
+
|
|
516
|
+
def as_dict(self) -> dict:
|
|
517
|
+
"""Serializes the ClientCallContext into a dictionary suitable for use as a JSON request body."""
|
|
518
|
+
body = {}
|
|
519
|
+
if self.file_name: body['file_name'] = self.file_name.as_dict()
|
|
520
|
+
if self.line_number is not None: body['line_number'] = self.line_number
|
|
521
|
+
return body
|
|
522
|
+
|
|
523
|
+
@classmethod
|
|
524
|
+
def from_dict(cls, d: Dict[str, any]) -> ClientCallContext:
|
|
525
|
+
"""Deserializes the ClientCallContext from a dictionary."""
|
|
526
|
+
return cls(file_name=_from_dict(d, 'file_name', EncodedText), line_number=d.get('line_number', None))
|
|
527
|
+
|
|
528
|
+
|
|
371
529
|
@dataclass
|
|
372
530
|
class ColumnInfo:
|
|
373
531
|
name: Optional[str] = None
|
|
@@ -442,6 +600,68 @@ class ColumnInfoTypeName(Enum):
|
|
|
442
600
|
USER_DEFINED_TYPE = 'USER_DEFINED_TYPE'
|
|
443
601
|
|
|
444
602
|
|
|
603
|
+
@dataclass
|
|
604
|
+
class ContextFilter:
|
|
605
|
+
dbsql_alert_id: Optional[str] = None
|
|
606
|
+
"""Databricks SQL Alert id"""
|
|
607
|
+
|
|
608
|
+
dbsql_dashboard_id: Optional[str] = None
|
|
609
|
+
"""Databricks SQL Dashboard id"""
|
|
610
|
+
|
|
611
|
+
dbsql_query_id: Optional[str] = None
|
|
612
|
+
"""Databricks SQL Query id"""
|
|
613
|
+
|
|
614
|
+
dbsql_session_id: Optional[str] = None
|
|
615
|
+
"""Databricks SQL Query session id"""
|
|
616
|
+
|
|
617
|
+
job_id: Optional[str] = None
|
|
618
|
+
"""Databricks Workflows id"""
|
|
619
|
+
|
|
620
|
+
job_run_id: Optional[str] = None
|
|
621
|
+
"""Databricks Workflows task run id"""
|
|
622
|
+
|
|
623
|
+
lakeview_dashboard_id: Optional[str] = None
|
|
624
|
+
"""Databricks Lakeview Dashboard id"""
|
|
625
|
+
|
|
626
|
+
notebook_cell_run_id: Optional[str] = None
|
|
627
|
+
"""Databricks Notebook runnableCommandId"""
|
|
628
|
+
|
|
629
|
+
notebook_id: Optional[str] = None
|
|
630
|
+
"""Databricks Notebook id"""
|
|
631
|
+
|
|
632
|
+
statement_ids: Optional[List[str]] = None
|
|
633
|
+
"""Databricks Query History statement ids."""
|
|
634
|
+
|
|
635
|
+
def as_dict(self) -> dict:
|
|
636
|
+
"""Serializes the ContextFilter into a dictionary suitable for use as a JSON request body."""
|
|
637
|
+
body = {}
|
|
638
|
+
if self.dbsql_alert_id is not None: body['dbsql_alert_id'] = self.dbsql_alert_id
|
|
639
|
+
if self.dbsql_dashboard_id is not None: body['dbsql_dashboard_id'] = self.dbsql_dashboard_id
|
|
640
|
+
if self.dbsql_query_id is not None: body['dbsql_query_id'] = self.dbsql_query_id
|
|
641
|
+
if self.dbsql_session_id is not None: body['dbsql_session_id'] = self.dbsql_session_id
|
|
642
|
+
if self.job_id is not None: body['job_id'] = self.job_id
|
|
643
|
+
if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
|
|
644
|
+
if self.lakeview_dashboard_id is not None: body['lakeview_dashboard_id'] = self.lakeview_dashboard_id
|
|
645
|
+
if self.notebook_cell_run_id is not None: body['notebook_cell_run_id'] = self.notebook_cell_run_id
|
|
646
|
+
if self.notebook_id is not None: body['notebook_id'] = self.notebook_id
|
|
647
|
+
if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids]
|
|
648
|
+
return body
|
|
649
|
+
|
|
650
|
+
@classmethod
|
|
651
|
+
def from_dict(cls, d: Dict[str, any]) -> ContextFilter:
|
|
652
|
+
"""Deserializes the ContextFilter from a dictionary."""
|
|
653
|
+
return cls(dbsql_alert_id=d.get('dbsql_alert_id', None),
|
|
654
|
+
dbsql_dashboard_id=d.get('dbsql_dashboard_id', None),
|
|
655
|
+
dbsql_query_id=d.get('dbsql_query_id', None),
|
|
656
|
+
dbsql_session_id=d.get('dbsql_session_id', None),
|
|
657
|
+
job_id=d.get('job_id', None),
|
|
658
|
+
job_run_id=d.get('job_run_id', None),
|
|
659
|
+
lakeview_dashboard_id=d.get('lakeview_dashboard_id', None),
|
|
660
|
+
notebook_cell_run_id=d.get('notebook_cell_run_id', None),
|
|
661
|
+
notebook_id=d.get('notebook_id', None),
|
|
662
|
+
statement_ids=d.get('statement_ids', None))
|
|
663
|
+
|
|
664
|
+
|
|
445
665
|
@dataclass
|
|
446
666
|
class CreateAlert:
|
|
447
667
|
name: str
|
|
@@ -481,99 +701,305 @@ class CreateAlert:
|
|
|
481
701
|
|
|
482
702
|
|
|
483
703
|
@dataclass
|
|
484
|
-
class
|
|
485
|
-
|
|
486
|
-
"""The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries)
|
|
487
|
-
before it is automatically stopped.
|
|
488
|
-
|
|
489
|
-
Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
|
|
490
|
-
|
|
491
|
-
Defaults to 120 mins"""
|
|
704
|
+
class CreateAlertRequest:
|
|
705
|
+
alert: Optional[CreateAlertRequestAlert] = None
|
|
492
706
|
|
|
493
|
-
|
|
494
|
-
|
|
707
|
+
def as_dict(self) -> dict:
|
|
708
|
+
"""Serializes the CreateAlertRequest into a dictionary suitable for use as a JSON request body."""
|
|
709
|
+
body = {}
|
|
710
|
+
if self.alert: body['alert'] = self.alert.as_dict()
|
|
711
|
+
return body
|
|
495
712
|
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large
|
|
502
|
-
- 4X-Large"""
|
|
713
|
+
@classmethod
|
|
714
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequest:
|
|
715
|
+
"""Deserializes the CreateAlertRequest from a dictionary."""
|
|
716
|
+
return cls(alert=_from_dict(d, 'alert', CreateAlertRequestAlert))
|
|
503
717
|
|
|
504
|
-
creator_name: Optional[str] = None
|
|
505
|
-
"""warehouse creator name"""
|
|
506
718
|
|
|
507
|
-
|
|
508
|
-
|
|
719
|
+
@dataclass
|
|
720
|
+
class CreateAlertRequestAlert:
|
|
721
|
+
condition: Optional[AlertCondition] = None
|
|
722
|
+
"""Trigger conditions of the alert."""
|
|
723
|
+
|
|
724
|
+
custom_body: Optional[str] = None
|
|
725
|
+
"""Custom body of alert notification, if it exists. See [here] for custom templating instructions.
|
|
509
726
|
|
|
510
|
-
|
|
727
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
511
728
|
|
|
512
|
-
|
|
513
|
-
"""
|
|
729
|
+
custom_subject: Optional[str] = None
|
|
730
|
+
"""Custom subject of alert notification, if it exists. This can include email subject entries and
|
|
731
|
+
Slack notification headers, for example. See [here] for custom templating instructions.
|
|
732
|
+
|
|
733
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
514
734
|
|
|
515
|
-
|
|
516
|
-
"""
|
|
735
|
+
display_name: Optional[str] = None
|
|
736
|
+
"""The display name of the alert."""
|
|
517
737
|
|
|
518
|
-
|
|
519
|
-
"""
|
|
520
|
-
|
|
521
|
-
Supported values: - Must be >= min_num_clusters - Must be <= 30.
|
|
522
|
-
|
|
523
|
-
Defaults to min_clusters if unset."""
|
|
738
|
+
parent_path: Optional[str] = None
|
|
739
|
+
"""The workspace path of the folder containing the alert."""
|
|
524
740
|
|
|
525
|
-
|
|
526
|
-
"""
|
|
527
|
-
this will ensure that a larger number of clusters are always running and therefore may reduce
|
|
528
|
-
the cold start time for new queries. This is similar to reserved vs. revocable cores in a
|
|
529
|
-
resource manager.
|
|
530
|
-
|
|
531
|
-
Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
|
|
532
|
-
|
|
533
|
-
Defaults to 1"""
|
|
741
|
+
query_id: Optional[str] = None
|
|
742
|
+
"""UUID of the query attached to the alert."""
|
|
534
743
|
|
|
535
|
-
|
|
536
|
-
"""
|
|
537
|
-
|
|
538
|
-
Supported values: - Must be unique within an org. - Must be less than 100 characters."""
|
|
744
|
+
seconds_to_retrigger: Optional[int] = None
|
|
745
|
+
"""Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it
|
|
746
|
+
can be triggered again. If 0 or not specified, the alert will not be triggered again."""
|
|
539
747
|
|
|
540
|
-
|
|
541
|
-
|
|
748
|
+
def as_dict(self) -> dict:
|
|
749
|
+
"""Serializes the CreateAlertRequestAlert into a dictionary suitable for use as a JSON request body."""
|
|
750
|
+
body = {}
|
|
751
|
+
if self.condition: body['condition'] = self.condition.as_dict()
|
|
752
|
+
if self.custom_body is not None: body['custom_body'] = self.custom_body
|
|
753
|
+
if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
|
|
754
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
755
|
+
if self.parent_path is not None: body['parent_path'] = self.parent_path
|
|
756
|
+
if self.query_id is not None: body['query_id'] = self.query_id
|
|
757
|
+
if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
|
|
758
|
+
return body
|
|
542
759
|
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
760
|
+
@classmethod
|
|
761
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequestAlert:
|
|
762
|
+
"""Deserializes the CreateAlertRequestAlert from a dictionary."""
|
|
763
|
+
return cls(condition=_from_dict(d, 'condition', AlertCondition),
|
|
764
|
+
custom_body=d.get('custom_body', None),
|
|
765
|
+
custom_subject=d.get('custom_subject', None),
|
|
766
|
+
display_name=d.get('display_name', None),
|
|
767
|
+
parent_path=d.get('parent_path', None),
|
|
768
|
+
query_id=d.get('query_id', None),
|
|
769
|
+
seconds_to_retrigger=d.get('seconds_to_retrigger', None))
|
|
548
770
|
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
771
|
+
|
|
772
|
+
@dataclass
|
|
773
|
+
class CreateQueryRequest:
|
|
774
|
+
query: Optional[CreateQueryRequestQuery] = None
|
|
552
775
|
|
|
553
776
|
def as_dict(self) -> dict:
|
|
554
|
-
"""Serializes the
|
|
777
|
+
"""Serializes the CreateQueryRequest into a dictionary suitable for use as a JSON request body."""
|
|
555
778
|
body = {}
|
|
556
|
-
if self.
|
|
557
|
-
if self.channel: body['channel'] = self.channel.as_dict()
|
|
558
|
-
if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
|
|
559
|
-
if self.creator_name is not None: body['creator_name'] = self.creator_name
|
|
560
|
-
if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
|
|
561
|
-
if self.enable_serverless_compute is not None:
|
|
562
|
-
body['enable_serverless_compute'] = self.enable_serverless_compute
|
|
563
|
-
if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
|
|
564
|
-
if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
|
|
565
|
-
if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
|
|
566
|
-
if self.name is not None: body['name'] = self.name
|
|
567
|
-
if self.spot_instance_policy is not None:
|
|
568
|
-
body['spot_instance_policy'] = self.spot_instance_policy.value
|
|
569
|
-
if self.tags: body['tags'] = self.tags.as_dict()
|
|
570
|
-
if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
|
|
779
|
+
if self.query: body['query'] = self.query.as_dict()
|
|
571
780
|
return body
|
|
572
781
|
|
|
573
782
|
@classmethod
|
|
574
|
-
def from_dict(cls, d: Dict[str, any]) ->
|
|
575
|
-
"""Deserializes the
|
|
576
|
-
return cls(
|
|
783
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequest:
|
|
784
|
+
"""Deserializes the CreateQueryRequest from a dictionary."""
|
|
785
|
+
return cls(query=_from_dict(d, 'query', CreateQueryRequestQuery))
|
|
786
|
+
|
|
787
|
+
|
|
788
|
+
@dataclass
|
|
789
|
+
class CreateQueryRequestQuery:
|
|
790
|
+
apply_auto_limit: Optional[bool] = None
|
|
791
|
+
"""Whether to apply a 1000 row limit to the query result."""
|
|
792
|
+
|
|
793
|
+
catalog: Optional[str] = None
|
|
794
|
+
"""Name of the catalog where this query will be executed."""
|
|
795
|
+
|
|
796
|
+
description: Optional[str] = None
|
|
797
|
+
"""General description that conveys additional information about this query such as usage notes."""
|
|
798
|
+
|
|
799
|
+
display_name: Optional[str] = None
|
|
800
|
+
"""Display name of the query that appears in list views, widget headings, and on the query page."""
|
|
801
|
+
|
|
802
|
+
parameters: Optional[List[QueryParameter]] = None
|
|
803
|
+
"""List of query parameter definitions."""
|
|
804
|
+
|
|
805
|
+
parent_path: Optional[str] = None
|
|
806
|
+
"""Workspace path of the workspace folder containing the object."""
|
|
807
|
+
|
|
808
|
+
query_text: Optional[str] = None
|
|
809
|
+
"""Text of the query to be run."""
|
|
810
|
+
|
|
811
|
+
run_as_mode: Optional[RunAsMode] = None
|
|
812
|
+
"""Sets the "Run as" role for the object."""
|
|
813
|
+
|
|
814
|
+
schema: Optional[str] = None
|
|
815
|
+
"""Name of the schema where this query will be executed."""
|
|
816
|
+
|
|
817
|
+
tags: Optional[List[str]] = None
|
|
818
|
+
|
|
819
|
+
warehouse_id: Optional[str] = None
|
|
820
|
+
"""ID of the SQL warehouse attached to the query."""
|
|
821
|
+
|
|
822
|
+
def as_dict(self) -> dict:
|
|
823
|
+
"""Serializes the CreateQueryRequestQuery into a dictionary suitable for use as a JSON request body."""
|
|
824
|
+
body = {}
|
|
825
|
+
if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
|
|
826
|
+
if self.catalog is not None: body['catalog'] = self.catalog
|
|
827
|
+
if self.description is not None: body['description'] = self.description
|
|
828
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
829
|
+
if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
|
|
830
|
+
if self.parent_path is not None: body['parent_path'] = self.parent_path
|
|
831
|
+
if self.query_text is not None: body['query_text'] = self.query_text
|
|
832
|
+
if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
|
|
833
|
+
if self.schema is not None: body['schema'] = self.schema
|
|
834
|
+
if self.tags: body['tags'] = [v for v in self.tags]
|
|
835
|
+
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
836
|
+
return body
|
|
837
|
+
|
|
838
|
+
@classmethod
|
|
839
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequestQuery:
|
|
840
|
+
"""Deserializes the CreateQueryRequestQuery from a dictionary."""
|
|
841
|
+
return cls(apply_auto_limit=d.get('apply_auto_limit', None),
|
|
842
|
+
catalog=d.get('catalog', None),
|
|
843
|
+
description=d.get('description', None),
|
|
844
|
+
display_name=d.get('display_name', None),
|
|
845
|
+
parameters=_repeated_dict(d, 'parameters', QueryParameter),
|
|
846
|
+
parent_path=d.get('parent_path', None),
|
|
847
|
+
query_text=d.get('query_text', None),
|
|
848
|
+
run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
|
|
849
|
+
schema=d.get('schema', None),
|
|
850
|
+
tags=d.get('tags', None),
|
|
851
|
+
warehouse_id=d.get('warehouse_id', None))
|
|
852
|
+
|
|
853
|
+
|
|
854
|
+
@dataclass
|
|
855
|
+
class CreateVisualizationRequest:
|
|
856
|
+
visualization: Optional[CreateVisualizationRequestVisualization] = None
|
|
857
|
+
|
|
858
|
+
def as_dict(self) -> dict:
|
|
859
|
+
"""Serializes the CreateVisualizationRequest into a dictionary suitable for use as a JSON request body."""
|
|
860
|
+
body = {}
|
|
861
|
+
if self.visualization: body['visualization'] = self.visualization.as_dict()
|
|
862
|
+
return body
|
|
863
|
+
|
|
864
|
+
@classmethod
|
|
865
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequest:
|
|
866
|
+
"""Deserializes the CreateVisualizationRequest from a dictionary."""
|
|
867
|
+
return cls(visualization=_from_dict(d, 'visualization', CreateVisualizationRequestVisualization))
|
|
868
|
+
|
|
869
|
+
|
|
870
|
+
@dataclass
|
|
871
|
+
class CreateVisualizationRequestVisualization:
|
|
872
|
+
display_name: Optional[str] = None
|
|
873
|
+
"""The display name of the visualization."""
|
|
874
|
+
|
|
875
|
+
query_id: Optional[str] = None
|
|
876
|
+
"""UUID of the query that the visualization is attached to."""
|
|
877
|
+
|
|
878
|
+
serialized_options: Optional[str] = None
|
|
879
|
+
"""The visualization options varies widely from one visualization type to the next and is
|
|
880
|
+
unsupported. Databricks does not recommend modifying visualization options directly."""
|
|
881
|
+
|
|
882
|
+
serialized_query_plan: Optional[str] = None
|
|
883
|
+
"""The visualization query plan varies widely from one visualization type to the next and is
|
|
884
|
+
unsupported. Databricks does not recommend modifying the visualization query plan directly."""
|
|
885
|
+
|
|
886
|
+
type: Optional[str] = None
|
|
887
|
+
"""The type of visualization: counter, table, funnel, and so on."""
|
|
888
|
+
|
|
889
|
+
def as_dict(self) -> dict:
|
|
890
|
+
"""Serializes the CreateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body."""
|
|
891
|
+
body = {}
|
|
892
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
893
|
+
if self.query_id is not None: body['query_id'] = self.query_id
|
|
894
|
+
if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
|
|
895
|
+
if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
|
|
896
|
+
if self.type is not None: body['type'] = self.type
|
|
897
|
+
return body
|
|
898
|
+
|
|
899
|
+
@classmethod
|
|
900
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequestVisualization:
|
|
901
|
+
"""Deserializes the CreateVisualizationRequestVisualization from a dictionary."""
|
|
902
|
+
return cls(display_name=d.get('display_name', None),
|
|
903
|
+
query_id=d.get('query_id', None),
|
|
904
|
+
serialized_options=d.get('serialized_options', None),
|
|
905
|
+
serialized_query_plan=d.get('serialized_query_plan', None),
|
|
906
|
+
type=d.get('type', None))
|
|
907
|
+
|
|
908
|
+
|
|
909
|
+
@dataclass
|
|
910
|
+
class CreateWarehouseRequest:
|
|
911
|
+
auto_stop_mins: Optional[int] = None
|
|
912
|
+
"""The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries)
|
|
913
|
+
before it is automatically stopped.
|
|
914
|
+
|
|
915
|
+
Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
|
|
916
|
+
|
|
917
|
+
Defaults to 120 mins"""
|
|
918
|
+
|
|
919
|
+
channel: Optional[Channel] = None
|
|
920
|
+
"""Channel Details"""
|
|
921
|
+
|
|
922
|
+
cluster_size: Optional[str] = None
|
|
923
|
+
"""Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows
|
|
924
|
+
you to run larger queries on it. If you want to increase the number of concurrent queries,
|
|
925
|
+
please tune max_num_clusters.
|
|
926
|
+
|
|
927
|
+
Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large
|
|
928
|
+
- 4X-Large"""
|
|
929
|
+
|
|
930
|
+
creator_name: Optional[str] = None
|
|
931
|
+
"""warehouse creator name"""
|
|
932
|
+
|
|
933
|
+
enable_photon: Optional[bool] = None
|
|
934
|
+
"""Configures whether the warehouse should use Photon optimized clusters.
|
|
935
|
+
|
|
936
|
+
Defaults to false."""
|
|
937
|
+
|
|
938
|
+
enable_serverless_compute: Optional[bool] = None
|
|
939
|
+
"""Configures whether the warehouse should use serverless compute"""
|
|
940
|
+
|
|
941
|
+
instance_profile_arn: Optional[str] = None
|
|
942
|
+
"""Deprecated. Instance profile used to pass IAM role to the cluster"""
|
|
943
|
+
|
|
944
|
+
max_num_clusters: Optional[int] = None
|
|
945
|
+
"""Maximum number of clusters that the autoscaler will create to handle concurrent queries.
|
|
946
|
+
|
|
947
|
+
Supported values: - Must be >= min_num_clusters - Must be <= 30.
|
|
948
|
+
|
|
949
|
+
Defaults to min_clusters if unset."""
|
|
950
|
+
|
|
951
|
+
min_num_clusters: Optional[int] = None
|
|
952
|
+
"""Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing
|
|
953
|
+
this will ensure that a larger number of clusters are always running and therefore may reduce
|
|
954
|
+
the cold start time for new queries. This is similar to reserved vs. revocable cores in a
|
|
955
|
+
resource manager.
|
|
956
|
+
|
|
957
|
+
Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
|
|
958
|
+
|
|
959
|
+
Defaults to 1"""
|
|
960
|
+
|
|
961
|
+
name: Optional[str] = None
|
|
962
|
+
"""Logical name for the cluster.
|
|
963
|
+
|
|
964
|
+
Supported values: - Must be unique within an org. - Must be less than 100 characters."""
|
|
965
|
+
|
|
966
|
+
spot_instance_policy: Optional[SpotInstancePolicy] = None
|
|
967
|
+
"""Configurations whether the warehouse should use spot instances."""
|
|
968
|
+
|
|
969
|
+
tags: Optional[EndpointTags] = None
|
|
970
|
+
"""A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS
|
|
971
|
+
volumes) associated with this SQL warehouse.
|
|
972
|
+
|
|
973
|
+
Supported values: - Number of tags < 45."""
|
|
974
|
+
|
|
975
|
+
warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None
|
|
976
|
+
"""Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO`
|
|
977
|
+
and also set the field `enable_serverless_compute` to `true`."""
|
|
978
|
+
|
|
979
|
+
def as_dict(self) -> dict:
|
|
980
|
+
"""Serializes the CreateWarehouseRequest into a dictionary suitable for use as a JSON request body."""
|
|
981
|
+
body = {}
|
|
982
|
+
if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
|
|
983
|
+
if self.channel: body['channel'] = self.channel.as_dict()
|
|
984
|
+
if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
|
|
985
|
+
if self.creator_name is not None: body['creator_name'] = self.creator_name
|
|
986
|
+
if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
|
|
987
|
+
if self.enable_serverless_compute is not None:
|
|
988
|
+
body['enable_serverless_compute'] = self.enable_serverless_compute
|
|
989
|
+
if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
|
|
990
|
+
if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
|
|
991
|
+
if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
|
|
992
|
+
if self.name is not None: body['name'] = self.name
|
|
993
|
+
if self.spot_instance_policy is not None:
|
|
994
|
+
body['spot_instance_policy'] = self.spot_instance_policy.value
|
|
995
|
+
if self.tags: body['tags'] = self.tags.as_dict()
|
|
996
|
+
if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
|
|
997
|
+
return body
|
|
998
|
+
|
|
999
|
+
@classmethod
|
|
1000
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateWarehouseRequest:
|
|
1001
|
+
"""Deserializes the CreateWarehouseRequest from a dictionary."""
|
|
1002
|
+
return cls(auto_stop_mins=d.get('auto_stop_mins', None),
|
|
577
1003
|
channel=_from_dict(d, 'channel', Channel),
|
|
578
1004
|
cluster_size=d.get('cluster_size', None),
|
|
579
1005
|
creator_name=d.get('creator_name', None),
|
|
@@ -856,7 +1282,7 @@ class DataSource:
|
|
|
856
1282
|
|
|
857
1283
|
id: Optional[str] = None
|
|
858
1284
|
"""Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
859
|
-
warehouse ID. [Learn more]
|
|
1285
|
+
warehouse ID. [Learn more]
|
|
860
1286
|
|
|
861
1287
|
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
|
|
862
1288
|
|
|
@@ -913,6 +1339,121 @@ class DataSource:
|
|
|
913
1339
|
warehouse_id=d.get('warehouse_id', None))
|
|
914
1340
|
|
|
915
1341
|
|
|
1342
|
+
class DatePrecision(Enum):
|
|
1343
|
+
|
|
1344
|
+
DAY_PRECISION = 'DAY_PRECISION'
|
|
1345
|
+
MINUTE_PRECISION = 'MINUTE_PRECISION'
|
|
1346
|
+
SECOND_PRECISION = 'SECOND_PRECISION'
|
|
1347
|
+
|
|
1348
|
+
|
|
1349
|
+
@dataclass
|
|
1350
|
+
class DateRange:
|
|
1351
|
+
start: str
|
|
1352
|
+
|
|
1353
|
+
end: str
|
|
1354
|
+
|
|
1355
|
+
def as_dict(self) -> dict:
|
|
1356
|
+
"""Serializes the DateRange into a dictionary suitable for use as a JSON request body."""
|
|
1357
|
+
body = {}
|
|
1358
|
+
if self.end is not None: body['end'] = self.end
|
|
1359
|
+
if self.start is not None: body['start'] = self.start
|
|
1360
|
+
return body
|
|
1361
|
+
|
|
1362
|
+
@classmethod
|
|
1363
|
+
def from_dict(cls, d: Dict[str, any]) -> DateRange:
|
|
1364
|
+
"""Deserializes the DateRange from a dictionary."""
|
|
1365
|
+
return cls(end=d.get('end', None), start=d.get('start', None))
|
|
1366
|
+
|
|
1367
|
+
|
|
1368
|
+
@dataclass
|
|
1369
|
+
class DateRangeValue:
|
|
1370
|
+
date_range_value: Optional[DateRange] = None
|
|
1371
|
+
"""Manually specified date-time range value."""
|
|
1372
|
+
|
|
1373
|
+
dynamic_date_range_value: Optional[DateRangeValueDynamicDateRange] = None
|
|
1374
|
+
"""Dynamic date-time range value based on current date-time."""
|
|
1375
|
+
|
|
1376
|
+
precision: Optional[DatePrecision] = None
|
|
1377
|
+
"""Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION
|
|
1378
|
+
(YYYY-MM-DD)."""
|
|
1379
|
+
|
|
1380
|
+
start_day_of_week: Optional[int] = None
|
|
1381
|
+
|
|
1382
|
+
def as_dict(self) -> dict:
|
|
1383
|
+
"""Serializes the DateRangeValue into a dictionary suitable for use as a JSON request body."""
|
|
1384
|
+
body = {}
|
|
1385
|
+
if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict()
|
|
1386
|
+
if self.dynamic_date_range_value is not None:
|
|
1387
|
+
body['dynamic_date_range_value'] = self.dynamic_date_range_value.value
|
|
1388
|
+
if self.precision is not None: body['precision'] = self.precision.value
|
|
1389
|
+
if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week
|
|
1390
|
+
return body
|
|
1391
|
+
|
|
1392
|
+
@classmethod
|
|
1393
|
+
def from_dict(cls, d: Dict[str, any]) -> DateRangeValue:
|
|
1394
|
+
"""Deserializes the DateRangeValue from a dictionary."""
|
|
1395
|
+
return cls(date_range_value=_from_dict(d, 'date_range_value', DateRange),
|
|
1396
|
+
dynamic_date_range_value=_enum(d, 'dynamic_date_range_value',
|
|
1397
|
+
DateRangeValueDynamicDateRange),
|
|
1398
|
+
precision=_enum(d, 'precision', DatePrecision),
|
|
1399
|
+
start_day_of_week=d.get('start_day_of_week', None))
|
|
1400
|
+
|
|
1401
|
+
|
|
1402
|
+
class DateRangeValueDynamicDateRange(Enum):
|
|
1403
|
+
|
|
1404
|
+
LAST_12_MONTHS = 'LAST_12_MONTHS'
|
|
1405
|
+
LAST_14_DAYS = 'LAST_14_DAYS'
|
|
1406
|
+
LAST_24_HOURS = 'LAST_24_HOURS'
|
|
1407
|
+
LAST_30_DAYS = 'LAST_30_DAYS'
|
|
1408
|
+
LAST_60_DAYS = 'LAST_60_DAYS'
|
|
1409
|
+
LAST_7_DAYS = 'LAST_7_DAYS'
|
|
1410
|
+
LAST_8_HOURS = 'LAST_8_HOURS'
|
|
1411
|
+
LAST_90_DAYS = 'LAST_90_DAYS'
|
|
1412
|
+
LAST_HOUR = 'LAST_HOUR'
|
|
1413
|
+
LAST_MONTH = 'LAST_MONTH'
|
|
1414
|
+
LAST_WEEK = 'LAST_WEEK'
|
|
1415
|
+
LAST_YEAR = 'LAST_YEAR'
|
|
1416
|
+
THIS_MONTH = 'THIS_MONTH'
|
|
1417
|
+
THIS_WEEK = 'THIS_WEEK'
|
|
1418
|
+
THIS_YEAR = 'THIS_YEAR'
|
|
1419
|
+
TODAY = 'TODAY'
|
|
1420
|
+
YESTERDAY = 'YESTERDAY'
|
|
1421
|
+
|
|
1422
|
+
|
|
1423
|
+
@dataclass
|
|
1424
|
+
class DateValue:
|
|
1425
|
+
date_value: Optional[str] = None
|
|
1426
|
+
"""Manually specified date-time value."""
|
|
1427
|
+
|
|
1428
|
+
dynamic_date_value: Optional[DateValueDynamicDate] = None
|
|
1429
|
+
"""Dynamic date-time value based on current date-time."""
|
|
1430
|
+
|
|
1431
|
+
precision: Optional[DatePrecision] = None
|
|
1432
|
+
"""Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION
|
|
1433
|
+
(YYYY-MM-DD)."""
|
|
1434
|
+
|
|
1435
|
+
def as_dict(self) -> dict:
|
|
1436
|
+
"""Serializes the DateValue into a dictionary suitable for use as a JSON request body."""
|
|
1437
|
+
body = {}
|
|
1438
|
+
if self.date_value is not None: body['date_value'] = self.date_value
|
|
1439
|
+
if self.dynamic_date_value is not None: body['dynamic_date_value'] = self.dynamic_date_value.value
|
|
1440
|
+
if self.precision is not None: body['precision'] = self.precision.value
|
|
1441
|
+
return body
|
|
1442
|
+
|
|
1443
|
+
@classmethod
|
|
1444
|
+
def from_dict(cls, d: Dict[str, any]) -> DateValue:
|
|
1445
|
+
"""Deserializes the DateValue from a dictionary."""
|
|
1446
|
+
return cls(date_value=d.get('date_value', None),
|
|
1447
|
+
dynamic_date_value=_enum(d, 'dynamic_date_value', DateValueDynamicDate),
|
|
1448
|
+
precision=_enum(d, 'precision', DatePrecision))
|
|
1449
|
+
|
|
1450
|
+
|
|
1451
|
+
class DateValueDynamicDate(Enum):
|
|
1452
|
+
|
|
1453
|
+
NOW = 'NOW'
|
|
1454
|
+
YESTERDAY = 'YESTERDAY'
|
|
1455
|
+
|
|
1456
|
+
|
|
916
1457
|
@dataclass
|
|
917
1458
|
class DeleteResponse:
|
|
918
1459
|
|
|
@@ -942,26 +1483,6 @@ class DeleteWarehouseResponse:
|
|
|
942
1483
|
|
|
943
1484
|
|
|
944
1485
|
class Disposition(Enum):
|
|
945
|
-
"""The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`.
|
|
946
|
-
|
|
947
|
-
Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY`
|
|
948
|
-
format, in a series of chunks. If a given statement produces a result set with a size larger
|
|
949
|
-
than 25 MiB, that statement execution is aborted, and no result set will be available.
|
|
950
|
-
|
|
951
|
-
**NOTE** Byte limits are computed based upon internal representations of the result set data,
|
|
952
|
-
and might not match the sizes visible in JSON responses.
|
|
953
|
-
|
|
954
|
-
Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links:
|
|
955
|
-
URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition
|
|
956
|
-
allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The
|
|
957
|
-
resulting links have two important properties:
|
|
958
|
-
|
|
959
|
-
1. They point to resources _external_ to the Databricks compute; therefore any associated
|
|
960
|
-
authentication information (typically a personal access token, OAuth token, or similar) _must be
|
|
961
|
-
removed_ when fetching from these links.
|
|
962
|
-
|
|
963
|
-
2. These are presigned URLs with a specific expiration, indicated in the response. The behavior
|
|
964
|
-
when attempting to use an expired link is cloud specific."""
|
|
965
1486
|
|
|
966
1487
|
EXTERNAL_LINKS = 'EXTERNAL_LINKS'
|
|
967
1488
|
INLINE = 'INLINE'
|
|
@@ -1140,6 +1661,50 @@ class EditWarehouseResponse:
|
|
|
1140
1661
|
return cls()
|
|
1141
1662
|
|
|
1142
1663
|
|
|
1664
|
+
@dataclass
|
|
1665
|
+
class Empty:
|
|
1666
|
+
"""Represents an empty message, similar to google.protobuf.Empty, which is not available in the
|
|
1667
|
+
firm right now."""
|
|
1668
|
+
|
|
1669
|
+
def as_dict(self) -> dict:
|
|
1670
|
+
"""Serializes the Empty into a dictionary suitable for use as a JSON request body."""
|
|
1671
|
+
body = {}
|
|
1672
|
+
return body
|
|
1673
|
+
|
|
1674
|
+
@classmethod
|
|
1675
|
+
def from_dict(cls, d: Dict[str, any]) -> Empty:
|
|
1676
|
+
"""Deserializes the Empty from a dictionary."""
|
|
1677
|
+
return cls()
|
|
1678
|
+
|
|
1679
|
+
|
|
1680
|
+
@dataclass
|
|
1681
|
+
class EncodedText:
|
|
1682
|
+
encoding: Optional[EncodedTextEncoding] = None
|
|
1683
|
+
"""Carry text data in different form."""
|
|
1684
|
+
|
|
1685
|
+
text: Optional[str] = None
|
|
1686
|
+
"""text data"""
|
|
1687
|
+
|
|
1688
|
+
def as_dict(self) -> dict:
|
|
1689
|
+
"""Serializes the EncodedText into a dictionary suitable for use as a JSON request body."""
|
|
1690
|
+
body = {}
|
|
1691
|
+
if self.encoding is not None: body['encoding'] = self.encoding.value
|
|
1692
|
+
if self.text is not None: body['text'] = self.text
|
|
1693
|
+
return body
|
|
1694
|
+
|
|
1695
|
+
@classmethod
|
|
1696
|
+
def from_dict(cls, d: Dict[str, any]) -> EncodedText:
|
|
1697
|
+
"""Deserializes the EncodedText from a dictionary."""
|
|
1698
|
+
return cls(encoding=_enum(d, 'encoding', EncodedTextEncoding), text=d.get('text', None))
|
|
1699
|
+
|
|
1700
|
+
|
|
1701
|
+
class EncodedTextEncoding(Enum):
|
|
1702
|
+
"""Carry text data in different form."""
|
|
1703
|
+
|
|
1704
|
+
BASE64 = 'BASE64'
|
|
1705
|
+
PLAIN = 'PLAIN'
|
|
1706
|
+
|
|
1707
|
+
|
|
1143
1708
|
@dataclass
|
|
1144
1709
|
class EndpointConfPair:
|
|
1145
1710
|
key: Optional[str] = None
|
|
@@ -1384,14 +1949,42 @@ class EndpointTags:
|
|
|
1384
1949
|
return cls(custom_tags=_repeated_dict(d, 'custom_tags', EndpointTagPair))
|
|
1385
1950
|
|
|
1386
1951
|
|
|
1952
|
+
@dataclass
|
|
1953
|
+
class EnumValue:
|
|
1954
|
+
enum_options: Optional[str] = None
|
|
1955
|
+
"""List of valid query parameter values, newline delimited."""
|
|
1956
|
+
|
|
1957
|
+
multi_values_options: Optional[MultiValuesOptions] = None
|
|
1958
|
+
"""If specified, allows multiple values to be selected for this parameter."""
|
|
1959
|
+
|
|
1960
|
+
values: Optional[List[str]] = None
|
|
1961
|
+
"""List of selected query parameter values."""
|
|
1962
|
+
|
|
1963
|
+
def as_dict(self) -> dict:
|
|
1964
|
+
"""Serializes the EnumValue into a dictionary suitable for use as a JSON request body."""
|
|
1965
|
+
body = {}
|
|
1966
|
+
if self.enum_options is not None: body['enum_options'] = self.enum_options
|
|
1967
|
+
if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict()
|
|
1968
|
+
if self.values: body['values'] = [v for v in self.values]
|
|
1969
|
+
return body
|
|
1970
|
+
|
|
1971
|
+
@classmethod
|
|
1972
|
+
def from_dict(cls, d: Dict[str, any]) -> EnumValue:
|
|
1973
|
+
"""Deserializes the EnumValue from a dictionary."""
|
|
1974
|
+
return cls(enum_options=d.get('enum_options', None),
|
|
1975
|
+
multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions),
|
|
1976
|
+
values=d.get('values', None))
|
|
1977
|
+
|
|
1978
|
+
|
|
1387
1979
|
@dataclass
|
|
1388
1980
|
class ExecuteStatementRequest:
|
|
1389
1981
|
statement: str
|
|
1390
1982
|
"""The SQL statement to execute. The statement can optionally be parameterized, see `parameters`."""
|
|
1391
1983
|
|
|
1392
1984
|
warehouse_id: str
|
|
1393
|
-
"""Warehouse upon which to execute a statement. See also [What are SQL
|
|
1394
|
-
|
|
1985
|
+
"""Warehouse upon which to execute a statement. See also [What are SQL warehouses?]
|
|
1986
|
+
|
|
1987
|
+
[What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html"""
|
|
1395
1988
|
|
|
1396
1989
|
byte_limit: Optional[int] = None
|
|
1397
1990
|
"""Applies the given byte limit to the statement's result size. Byte counts are based on internal
|
|
@@ -1406,26 +1999,6 @@ class ExecuteStatementRequest:
|
|
|
1406
1999
|
[`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html"""
|
|
1407
2000
|
|
|
1408
2001
|
disposition: Optional[Disposition] = None
|
|
1409
|
-
"""The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`.
|
|
1410
|
-
|
|
1411
|
-
Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY`
|
|
1412
|
-
format, in a series of chunks. If a given statement produces a result set with a size larger
|
|
1413
|
-
than 25 MiB, that statement execution is aborted, and no result set will be available.
|
|
1414
|
-
|
|
1415
|
-
**NOTE** Byte limits are computed based upon internal representations of the result set data,
|
|
1416
|
-
and might not match the sizes visible in JSON responses.
|
|
1417
|
-
|
|
1418
|
-
Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links:
|
|
1419
|
-
URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition
|
|
1420
|
-
allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The
|
|
1421
|
-
resulting links have two important properties:
|
|
1422
|
-
|
|
1423
|
-
1. They point to resources _external_ to the Databricks compute; therefore any associated
|
|
1424
|
-
authentication information (typically a personal access token, OAuth token, or similar) _must be
|
|
1425
|
-
removed_ when fetching from these links.
|
|
1426
|
-
|
|
1427
|
-
2. These are presigned URLs with a specific expiration, indicated in the response. The behavior
|
|
1428
|
-
when attempting to use an expired link is cloud specific."""
|
|
1429
2002
|
|
|
1430
2003
|
format: Optional[Format] = None
|
|
1431
2004
|
"""Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and
|
|
@@ -1564,43 +2137,6 @@ class ExecuteStatementRequestOnWaitTimeout(Enum):
|
|
|
1564
2137
|
CONTINUE = 'CONTINUE'
|
|
1565
2138
|
|
|
1566
2139
|
|
|
1567
|
-
@dataclass
|
|
1568
|
-
class ExecuteStatementResponse:
|
|
1569
|
-
manifest: Optional[ResultManifest] = None
|
|
1570
|
-
"""The result manifest provides schema and metadata for the result set."""
|
|
1571
|
-
|
|
1572
|
-
result: Optional[ResultData] = None
|
|
1573
|
-
"""Contains the result data of a single chunk when using `INLINE` disposition. When using
|
|
1574
|
-
`EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned
|
|
1575
|
-
URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the
|
|
1576
|
-
`external_links` array prepares the API to return multiple links in a single response. Currently
|
|
1577
|
-
only a single link is returned.)"""
|
|
1578
|
-
|
|
1579
|
-
statement_id: Optional[str] = None
|
|
1580
|
-
"""The statement ID is returned upon successfully submitting a SQL statement, and is a required
|
|
1581
|
-
reference for all subsequent calls."""
|
|
1582
|
-
|
|
1583
|
-
status: Optional[StatementStatus] = None
|
|
1584
|
-
"""The status response includes execution state and if relevant, error information."""
|
|
1585
|
-
|
|
1586
|
-
def as_dict(self) -> dict:
|
|
1587
|
-
"""Serializes the ExecuteStatementResponse into a dictionary suitable for use as a JSON request body."""
|
|
1588
|
-
body = {}
|
|
1589
|
-
if self.manifest: body['manifest'] = self.manifest.as_dict()
|
|
1590
|
-
if self.result: body['result'] = self.result.as_dict()
|
|
1591
|
-
if self.statement_id is not None: body['statement_id'] = self.statement_id
|
|
1592
|
-
if self.status: body['status'] = self.status.as_dict()
|
|
1593
|
-
return body
|
|
1594
|
-
|
|
1595
|
-
@classmethod
|
|
1596
|
-
def from_dict(cls, d: Dict[str, any]) -> ExecuteStatementResponse:
|
|
1597
|
-
"""Deserializes the ExecuteStatementResponse from a dictionary."""
|
|
1598
|
-
return cls(manifest=_from_dict(d, 'manifest', ResultManifest),
|
|
1599
|
-
result=_from_dict(d, 'result', ResultData),
|
|
1600
|
-
statement_id=d.get('statement_id', None),
|
|
1601
|
-
status=_from_dict(d, 'status', StatementStatus))
|
|
1602
|
-
|
|
1603
|
-
|
|
1604
2140
|
@dataclass
|
|
1605
2141
|
class ExternalLink:
|
|
1606
2142
|
byte_count: Optional[int] = None
|
|
@@ -1615,9 +2151,6 @@ class ExternalLink:
|
|
|
1615
2151
|
which point a new `external_link` must be requested."""
|
|
1616
2152
|
|
|
1617
2153
|
external_link: Optional[str] = None
|
|
1618
|
-
"""A presigned URL pointing to a chunk of result data, hosted by an external service, with a short
|
|
1619
|
-
expiration time (<= 15 minutes). As this URL contains a temporary credential, it should be
|
|
1620
|
-
considered sensitive and the client should not expose this URL in a log."""
|
|
1621
2154
|
|
|
1622
2155
|
http_headers: Optional[Dict[str, str]] = None
|
|
1623
2156
|
"""HTTP headers that must be included with a GET request to the `external_link`. Each header is
|
|
@@ -1704,43 +2237,6 @@ class GetResponse:
|
|
|
1704
2237
|
object_type=_enum(d, 'object_type', ObjectType))
|
|
1705
2238
|
|
|
1706
2239
|
|
|
1707
|
-
@dataclass
|
|
1708
|
-
class GetStatementResponse:
|
|
1709
|
-
manifest: Optional[ResultManifest] = None
|
|
1710
|
-
"""The result manifest provides schema and metadata for the result set."""
|
|
1711
|
-
|
|
1712
|
-
result: Optional[ResultData] = None
|
|
1713
|
-
"""Contains the result data of a single chunk when using `INLINE` disposition. When using
|
|
1714
|
-
`EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned
|
|
1715
|
-
URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the
|
|
1716
|
-
`external_links` array prepares the API to return multiple links in a single response. Currently
|
|
1717
|
-
only a single link is returned.)"""
|
|
1718
|
-
|
|
1719
|
-
statement_id: Optional[str] = None
|
|
1720
|
-
"""The statement ID is returned upon successfully submitting a SQL statement, and is a required
|
|
1721
|
-
reference for all subsequent calls."""
|
|
1722
|
-
|
|
1723
|
-
status: Optional[StatementStatus] = None
|
|
1724
|
-
"""The status response includes execution state and if relevant, error information."""
|
|
1725
|
-
|
|
1726
|
-
def as_dict(self) -> dict:
|
|
1727
|
-
"""Serializes the GetStatementResponse into a dictionary suitable for use as a JSON request body."""
|
|
1728
|
-
body = {}
|
|
1729
|
-
if self.manifest: body['manifest'] = self.manifest.as_dict()
|
|
1730
|
-
if self.result: body['result'] = self.result.as_dict()
|
|
1731
|
-
if self.statement_id is not None: body['statement_id'] = self.statement_id
|
|
1732
|
-
if self.status: body['status'] = self.status.as_dict()
|
|
1733
|
-
return body
|
|
1734
|
-
|
|
1735
|
-
@classmethod
|
|
1736
|
-
def from_dict(cls, d: Dict[str, any]) -> GetStatementResponse:
|
|
1737
|
-
"""Deserializes the GetStatementResponse from a dictionary."""
|
|
1738
|
-
return cls(manifest=_from_dict(d, 'manifest', ResultManifest),
|
|
1739
|
-
result=_from_dict(d, 'result', ResultData),
|
|
1740
|
-
statement_id=d.get('statement_id', None),
|
|
1741
|
-
status=_from_dict(d, 'status', StatementStatus))
|
|
1742
|
-
|
|
1743
|
-
|
|
1744
2240
|
@dataclass
|
|
1745
2241
|
class GetWarehousePermissionLevelsResponse:
|
|
1746
2242
|
permission_levels: Optional[List[WarehousePermissionsDescription]] = None
|
|
@@ -1986,6 +2482,386 @@ class GetWorkspaceWarehouseConfigResponseSecurityPolicy(Enum):
|
|
|
1986
2482
|
PASSTHROUGH = 'PASSTHROUGH'
|
|
1987
2483
|
|
|
1988
2484
|
|
|
2485
|
+
@dataclass
|
|
2486
|
+
class LegacyAlert:
|
|
2487
|
+
created_at: Optional[str] = None
|
|
2488
|
+
"""Timestamp when the alert was created."""
|
|
2489
|
+
|
|
2490
|
+
id: Optional[str] = None
|
|
2491
|
+
"""Alert ID."""
|
|
2492
|
+
|
|
2493
|
+
last_triggered_at: Optional[str] = None
|
|
2494
|
+
"""Timestamp when the alert was last triggered."""
|
|
2495
|
+
|
|
2496
|
+
name: Optional[str] = None
|
|
2497
|
+
"""Name of the alert."""
|
|
2498
|
+
|
|
2499
|
+
options: Optional[AlertOptions] = None
|
|
2500
|
+
"""Alert configuration options."""
|
|
2501
|
+
|
|
2502
|
+
parent: Optional[str] = None
|
|
2503
|
+
"""The identifier of the workspace folder containing the object."""
|
|
2504
|
+
|
|
2505
|
+
query: Optional[AlertQuery] = None
|
|
2506
|
+
|
|
2507
|
+
rearm: Optional[int] = None
|
|
2508
|
+
"""Number of seconds after being triggered before the alert rearms itself and can be triggered
|
|
2509
|
+
again. If `null`, alert will never be triggered again."""
|
|
2510
|
+
|
|
2511
|
+
state: Optional[LegacyAlertState] = None
|
|
2512
|
+
"""State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated
|
|
2513
|
+
and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions)."""
|
|
2514
|
+
|
|
2515
|
+
updated_at: Optional[str] = None
|
|
2516
|
+
"""Timestamp when the alert was last updated."""
|
|
2517
|
+
|
|
2518
|
+
user: Optional[User] = None
|
|
2519
|
+
|
|
2520
|
+
def as_dict(self) -> dict:
|
|
2521
|
+
"""Serializes the LegacyAlert into a dictionary suitable for use as a JSON request body."""
|
|
2522
|
+
body = {}
|
|
2523
|
+
if self.created_at is not None: body['created_at'] = self.created_at
|
|
2524
|
+
if self.id is not None: body['id'] = self.id
|
|
2525
|
+
if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at
|
|
2526
|
+
if self.name is not None: body['name'] = self.name
|
|
2527
|
+
if self.options: body['options'] = self.options.as_dict()
|
|
2528
|
+
if self.parent is not None: body['parent'] = self.parent
|
|
2529
|
+
if self.query: body['query'] = self.query.as_dict()
|
|
2530
|
+
if self.rearm is not None: body['rearm'] = self.rearm
|
|
2531
|
+
if self.state is not None: body['state'] = self.state.value
|
|
2532
|
+
if self.updated_at is not None: body['updated_at'] = self.updated_at
|
|
2533
|
+
if self.user: body['user'] = self.user.as_dict()
|
|
2534
|
+
return body
|
|
2535
|
+
|
|
2536
|
+
@classmethod
|
|
2537
|
+
def from_dict(cls, d: Dict[str, any]) -> LegacyAlert:
|
|
2538
|
+
"""Deserializes the LegacyAlert from a dictionary."""
|
|
2539
|
+
return cls(created_at=d.get('created_at', None),
|
|
2540
|
+
id=d.get('id', None),
|
|
2541
|
+
last_triggered_at=d.get('last_triggered_at', None),
|
|
2542
|
+
name=d.get('name', None),
|
|
2543
|
+
options=_from_dict(d, 'options', AlertOptions),
|
|
2544
|
+
parent=d.get('parent', None),
|
|
2545
|
+
query=_from_dict(d, 'query', AlertQuery),
|
|
2546
|
+
rearm=d.get('rearm', None),
|
|
2547
|
+
state=_enum(d, 'state', LegacyAlertState),
|
|
2548
|
+
updated_at=d.get('updated_at', None),
|
|
2549
|
+
user=_from_dict(d, 'user', User))
|
|
2550
|
+
|
|
2551
|
+
|
|
2552
|
+
class LegacyAlertState(Enum):
|
|
2553
|
+
"""State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated
|
|
2554
|
+
and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions)."""
|
|
2555
|
+
|
|
2556
|
+
OK = 'ok'
|
|
2557
|
+
TRIGGERED = 'triggered'
|
|
2558
|
+
UNKNOWN = 'unknown'
|
|
2559
|
+
|
|
2560
|
+
|
|
2561
|
+
@dataclass
|
|
2562
|
+
class LegacyQuery:
|
|
2563
|
+
can_edit: Optional[bool] = None
|
|
2564
|
+
"""Describes whether the authenticated user is allowed to edit the definition of this query."""
|
|
2565
|
+
|
|
2566
|
+
created_at: Optional[str] = None
|
|
2567
|
+
"""The timestamp when this query was created."""
|
|
2568
|
+
|
|
2569
|
+
data_source_id: Optional[str] = None
|
|
2570
|
+
"""Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
2571
|
+
warehouse ID. [Learn more]
|
|
2572
|
+
|
|
2573
|
+
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
|
|
2574
|
+
|
|
2575
|
+
description: Optional[str] = None
|
|
2576
|
+
"""General description that conveys additional information about this query such as usage notes."""
|
|
2577
|
+
|
|
2578
|
+
id: Optional[str] = None
|
|
2579
|
+
"""Query ID."""
|
|
2580
|
+
|
|
2581
|
+
is_archived: Optional[bool] = None
|
|
2582
|
+
"""Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear
|
|
2583
|
+
in search results. If this boolean is `true`, the `options` property for this query includes a
|
|
2584
|
+
`moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days."""
|
|
2585
|
+
|
|
2586
|
+
is_draft: Optional[bool] = None
|
|
2587
|
+
"""Whether the query is a draft. Draft queries only appear in list views for their owners.
|
|
2588
|
+
Visualizations from draft queries cannot appear on dashboards."""
|
|
2589
|
+
|
|
2590
|
+
is_favorite: Optional[bool] = None
|
|
2591
|
+
"""Whether this query object appears in the current user's favorites list. This flag determines
|
|
2592
|
+
whether the star icon for favorites is selected."""
|
|
2593
|
+
|
|
2594
|
+
is_safe: Optional[bool] = None
|
|
2595
|
+
"""Text parameter types are not safe from SQL injection for all types of data source. Set this
|
|
2596
|
+
Boolean parameter to `true` if a query either does not use any text type parameters or uses a
|
|
2597
|
+
data source type where text type parameters are handled safely."""
|
|
2598
|
+
|
|
2599
|
+
last_modified_by: Optional[User] = None
|
|
2600
|
+
|
|
2601
|
+
last_modified_by_id: Optional[int] = None
|
|
2602
|
+
"""The ID of the user who last saved changes to this query."""
|
|
2603
|
+
|
|
2604
|
+
latest_query_data_id: Optional[str] = None
|
|
2605
|
+
"""If there is a cached result for this query and user, this field includes the query result ID. If
|
|
2606
|
+
this query uses parameters, this field is always null."""
|
|
2607
|
+
|
|
2608
|
+
name: Optional[str] = None
|
|
2609
|
+
"""The title of this query that appears in list views, widget headings, and on the query page."""
|
|
2610
|
+
|
|
2611
|
+
options: Optional[QueryOptions] = None
|
|
2612
|
+
|
|
2613
|
+
parent: Optional[str] = None
|
|
2614
|
+
"""The identifier of the workspace folder containing the object."""
|
|
2615
|
+
|
|
2616
|
+
permission_tier: Optional[PermissionLevel] = None
|
|
2617
|
+
"""* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query
|
|
2618
|
+
* `CAN_MANAGE`: Can manage the query"""
|
|
2619
|
+
|
|
2620
|
+
query: Optional[str] = None
|
|
2621
|
+
"""The text of the query to be run."""
|
|
2622
|
+
|
|
2623
|
+
query_hash: Optional[str] = None
|
|
2624
|
+
"""A SHA-256 hash of the query text along with the authenticated user ID."""
|
|
2625
|
+
|
|
2626
|
+
run_as_role: Optional[RunAsRole] = None
|
|
2627
|
+
"""Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
|
|
2628
|
+
viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
|
|
2629
|
+
|
|
2630
|
+
tags: Optional[List[str]] = None
|
|
2631
|
+
|
|
2632
|
+
updated_at: Optional[str] = None
|
|
2633
|
+
"""The timestamp at which this query was last updated."""
|
|
2634
|
+
|
|
2635
|
+
user: Optional[User] = None
|
|
2636
|
+
|
|
2637
|
+
user_id: Optional[int] = None
|
|
2638
|
+
"""The ID of the user who owns the query."""
|
|
2639
|
+
|
|
2640
|
+
visualizations: Optional[List[LegacyVisualization]] = None
|
|
2641
|
+
|
|
2642
|
+
def as_dict(self) -> dict:
|
|
2643
|
+
"""Serializes the LegacyQuery into a dictionary suitable for use as a JSON request body."""
|
|
2644
|
+
body = {}
|
|
2645
|
+
if self.can_edit is not None: body['can_edit'] = self.can_edit
|
|
2646
|
+
if self.created_at is not None: body['created_at'] = self.created_at
|
|
2647
|
+
if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
|
|
2648
|
+
if self.description is not None: body['description'] = self.description
|
|
2649
|
+
if self.id is not None: body['id'] = self.id
|
|
2650
|
+
if self.is_archived is not None: body['is_archived'] = self.is_archived
|
|
2651
|
+
if self.is_draft is not None: body['is_draft'] = self.is_draft
|
|
2652
|
+
if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
|
|
2653
|
+
if self.is_safe is not None: body['is_safe'] = self.is_safe
|
|
2654
|
+
if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict()
|
|
2655
|
+
if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id
|
|
2656
|
+
if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id
|
|
2657
|
+
if self.name is not None: body['name'] = self.name
|
|
2658
|
+
if self.options: body['options'] = self.options.as_dict()
|
|
2659
|
+
if self.parent is not None: body['parent'] = self.parent
|
|
2660
|
+
if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value
|
|
2661
|
+
if self.query is not None: body['query'] = self.query
|
|
2662
|
+
if self.query_hash is not None: body['query_hash'] = self.query_hash
|
|
2663
|
+
if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
|
|
2664
|
+
if self.tags: body['tags'] = [v for v in self.tags]
|
|
2665
|
+
if self.updated_at is not None: body['updated_at'] = self.updated_at
|
|
2666
|
+
if self.user: body['user'] = self.user.as_dict()
|
|
2667
|
+
if self.user_id is not None: body['user_id'] = self.user_id
|
|
2668
|
+
if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations]
|
|
2669
|
+
return body
|
|
2670
|
+
|
|
2671
|
+
@classmethod
|
|
2672
|
+
def from_dict(cls, d: Dict[str, any]) -> LegacyQuery:
|
|
2673
|
+
"""Deserializes the LegacyQuery from a dictionary."""
|
|
2674
|
+
return cls(can_edit=d.get('can_edit', None),
|
|
2675
|
+
created_at=d.get('created_at', None),
|
|
2676
|
+
data_source_id=d.get('data_source_id', None),
|
|
2677
|
+
description=d.get('description', None),
|
|
2678
|
+
id=d.get('id', None),
|
|
2679
|
+
is_archived=d.get('is_archived', None),
|
|
2680
|
+
is_draft=d.get('is_draft', None),
|
|
2681
|
+
is_favorite=d.get('is_favorite', None),
|
|
2682
|
+
is_safe=d.get('is_safe', None),
|
|
2683
|
+
last_modified_by=_from_dict(d, 'last_modified_by', User),
|
|
2684
|
+
last_modified_by_id=d.get('last_modified_by_id', None),
|
|
2685
|
+
latest_query_data_id=d.get('latest_query_data_id', None),
|
|
2686
|
+
name=d.get('name', None),
|
|
2687
|
+
options=_from_dict(d, 'options', QueryOptions),
|
|
2688
|
+
parent=d.get('parent', None),
|
|
2689
|
+
permission_tier=_enum(d, 'permission_tier', PermissionLevel),
|
|
2690
|
+
query=d.get('query', None),
|
|
2691
|
+
query_hash=d.get('query_hash', None),
|
|
2692
|
+
run_as_role=_enum(d, 'run_as_role', RunAsRole),
|
|
2693
|
+
tags=d.get('tags', None),
|
|
2694
|
+
updated_at=d.get('updated_at', None),
|
|
2695
|
+
user=_from_dict(d, 'user', User),
|
|
2696
|
+
user_id=d.get('user_id', None),
|
|
2697
|
+
visualizations=_repeated_dict(d, 'visualizations', LegacyVisualization))
|
|
2698
|
+
|
|
2699
|
+
|
|
2700
|
+
@dataclass
|
|
2701
|
+
class LegacyVisualization:
|
|
2702
|
+
"""The visualization description API changes frequently and is unsupported. You can duplicate a
|
|
2703
|
+
visualization by copying description objects received _from the API_ and then using them to
|
|
2704
|
+
create a new one with a POST request to the same endpoint. Databricks does not recommend
|
|
2705
|
+
constructing ad-hoc visualizations entirely in JSON."""
|
|
2706
|
+
|
|
2707
|
+
created_at: Optional[str] = None
|
|
2708
|
+
|
|
2709
|
+
description: Optional[str] = None
|
|
2710
|
+
"""A short description of this visualization. This is not displayed in the UI."""
|
|
2711
|
+
|
|
2712
|
+
id: Optional[str] = None
|
|
2713
|
+
"""The UUID for this visualization."""
|
|
2714
|
+
|
|
2715
|
+
name: Optional[str] = None
|
|
2716
|
+
"""The name of the visualization that appears on dashboards and the query screen."""
|
|
2717
|
+
|
|
2718
|
+
options: Optional[Any] = None
|
|
2719
|
+
"""The options object varies widely from one visualization type to the next and is unsupported.
|
|
2720
|
+
Databricks does not recommend modifying visualization settings in JSON."""
|
|
2721
|
+
|
|
2722
|
+
query: Optional[LegacyQuery] = None
|
|
2723
|
+
|
|
2724
|
+
type: Optional[str] = None
|
|
2725
|
+
"""The type of visualization: chart, table, pivot table, and so on."""
|
|
2726
|
+
|
|
2727
|
+
updated_at: Optional[str] = None
|
|
2728
|
+
|
|
2729
|
+
def as_dict(self) -> dict:
|
|
2730
|
+
"""Serializes the LegacyVisualization into a dictionary suitable for use as a JSON request body."""
|
|
2731
|
+
body = {}
|
|
2732
|
+
if self.created_at is not None: body['created_at'] = self.created_at
|
|
2733
|
+
if self.description is not None: body['description'] = self.description
|
|
2734
|
+
if self.id is not None: body['id'] = self.id
|
|
2735
|
+
if self.name is not None: body['name'] = self.name
|
|
2736
|
+
if self.options: body['options'] = self.options
|
|
2737
|
+
if self.query: body['query'] = self.query.as_dict()
|
|
2738
|
+
if self.type is not None: body['type'] = self.type
|
|
2739
|
+
if self.updated_at is not None: body['updated_at'] = self.updated_at
|
|
2740
|
+
return body
|
|
2741
|
+
|
|
2742
|
+
@classmethod
|
|
2743
|
+
def from_dict(cls, d: Dict[str, any]) -> LegacyVisualization:
|
|
2744
|
+
"""Deserializes the LegacyVisualization from a dictionary."""
|
|
2745
|
+
return cls(created_at=d.get('created_at', None),
|
|
2746
|
+
description=d.get('description', None),
|
|
2747
|
+
id=d.get('id', None),
|
|
2748
|
+
name=d.get('name', None),
|
|
2749
|
+
options=d.get('options', None),
|
|
2750
|
+
query=_from_dict(d, 'query', LegacyQuery),
|
|
2751
|
+
type=d.get('type', None),
|
|
2752
|
+
updated_at=d.get('updated_at', None))
|
|
2753
|
+
|
|
2754
|
+
|
|
2755
|
+
class LifecycleState(Enum):
|
|
2756
|
+
|
|
2757
|
+
ACTIVE = 'ACTIVE'
|
|
2758
|
+
TRASHED = 'TRASHED'
|
|
2759
|
+
|
|
2760
|
+
|
|
2761
|
+
@dataclass
|
|
2762
|
+
class ListAlertsResponse:
|
|
2763
|
+
next_page_token: Optional[str] = None
|
|
2764
|
+
|
|
2765
|
+
results: Optional[List[ListAlertsResponseAlert]] = None
|
|
2766
|
+
|
|
2767
|
+
def as_dict(self) -> dict:
|
|
2768
|
+
"""Serializes the ListAlertsResponse into a dictionary suitable for use as a JSON request body."""
|
|
2769
|
+
body = {}
|
|
2770
|
+
if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
|
|
2771
|
+
if self.results: body['results'] = [v.as_dict() for v in self.results]
|
|
2772
|
+
return body
|
|
2773
|
+
|
|
2774
|
+
@classmethod
|
|
2775
|
+
def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponse:
|
|
2776
|
+
"""Deserializes the ListAlertsResponse from a dictionary."""
|
|
2777
|
+
return cls(next_page_token=d.get('next_page_token', None),
|
|
2778
|
+
results=_repeated_dict(d, 'results', ListAlertsResponseAlert))
|
|
2779
|
+
|
|
2780
|
+
|
|
2781
|
+
@dataclass
|
|
2782
|
+
class ListAlertsResponseAlert:
|
|
2783
|
+
condition: Optional[AlertCondition] = None
|
|
2784
|
+
"""Trigger conditions of the alert."""
|
|
2785
|
+
|
|
2786
|
+
create_time: Optional[str] = None
|
|
2787
|
+
"""The timestamp indicating when the alert was created."""
|
|
2788
|
+
|
|
2789
|
+
custom_body: Optional[str] = None
|
|
2790
|
+
"""Custom body of alert notification, if it exists. See [here] for custom templating instructions.
|
|
2791
|
+
|
|
2792
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
2793
|
+
|
|
2794
|
+
custom_subject: Optional[str] = None
|
|
2795
|
+
"""Custom subject of alert notification, if it exists. This can include email subject entries and
|
|
2796
|
+
Slack notification headers, for example. See [here] for custom templating instructions.
|
|
2797
|
+
|
|
2798
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
2799
|
+
|
|
2800
|
+
display_name: Optional[str] = None
|
|
2801
|
+
"""The display name of the alert."""
|
|
2802
|
+
|
|
2803
|
+
id: Optional[str] = None
|
|
2804
|
+
"""UUID identifying the alert."""
|
|
2805
|
+
|
|
2806
|
+
lifecycle_state: Optional[LifecycleState] = None
|
|
2807
|
+
"""The workspace state of the alert. Used for tracking trashed status."""
|
|
2808
|
+
|
|
2809
|
+
owner_user_name: Optional[str] = None
|
|
2810
|
+
"""The owner's username. This field is set to "Unavailable" if the user has been deleted."""
|
|
2811
|
+
|
|
2812
|
+
query_id: Optional[str] = None
|
|
2813
|
+
"""UUID of the query attached to the alert."""
|
|
2814
|
+
|
|
2815
|
+
seconds_to_retrigger: Optional[int] = None
|
|
2816
|
+
"""Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it
|
|
2817
|
+
can be triggered again. If 0 or not specified, the alert will not be triggered again."""
|
|
2818
|
+
|
|
2819
|
+
state: Optional[AlertState] = None
|
|
2820
|
+
"""Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not
|
|
2821
|
+
yet been evaluated or ran into an error during the last evaluation."""
|
|
2822
|
+
|
|
2823
|
+
trigger_time: Optional[str] = None
|
|
2824
|
+
"""Timestamp when the alert was last triggered, if the alert has been triggered before."""
|
|
2825
|
+
|
|
2826
|
+
update_time: Optional[str] = None
|
|
2827
|
+
"""The timestamp indicating when the alert was updated."""
|
|
2828
|
+
|
|
2829
|
+
def as_dict(self) -> dict:
|
|
2830
|
+
"""Serializes the ListAlertsResponseAlert into a dictionary suitable for use as a JSON request body."""
|
|
2831
|
+
body = {}
|
|
2832
|
+
if self.condition: body['condition'] = self.condition.as_dict()
|
|
2833
|
+
if self.create_time is not None: body['create_time'] = self.create_time
|
|
2834
|
+
if self.custom_body is not None: body['custom_body'] = self.custom_body
|
|
2835
|
+
if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
|
|
2836
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
2837
|
+
if self.id is not None: body['id'] = self.id
|
|
2838
|
+
if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
|
|
2839
|
+
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
2840
|
+
if self.query_id is not None: body['query_id'] = self.query_id
|
|
2841
|
+
if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
|
|
2842
|
+
if self.state is not None: body['state'] = self.state.value
|
|
2843
|
+
if self.trigger_time is not None: body['trigger_time'] = self.trigger_time
|
|
2844
|
+
if self.update_time is not None: body['update_time'] = self.update_time
|
|
2845
|
+
return body
|
|
2846
|
+
|
|
2847
|
+
@classmethod
|
|
2848
|
+
def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponseAlert:
|
|
2849
|
+
"""Deserializes the ListAlertsResponseAlert from a dictionary."""
|
|
2850
|
+
return cls(condition=_from_dict(d, 'condition', AlertCondition),
|
|
2851
|
+
create_time=d.get('create_time', None),
|
|
2852
|
+
custom_body=d.get('custom_body', None),
|
|
2853
|
+
custom_subject=d.get('custom_subject', None),
|
|
2854
|
+
display_name=d.get('display_name', None),
|
|
2855
|
+
id=d.get('id', None),
|
|
2856
|
+
lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
|
|
2857
|
+
owner_user_name=d.get('owner_user_name', None),
|
|
2858
|
+
query_id=d.get('query_id', None),
|
|
2859
|
+
seconds_to_retrigger=d.get('seconds_to_retrigger', None),
|
|
2860
|
+
state=_enum(d, 'state', AlertState),
|
|
2861
|
+
trigger_time=d.get('trigger_time', None),
|
|
2862
|
+
update_time=d.get('update_time', None))
|
|
2863
|
+
|
|
2864
|
+
|
|
1989
2865
|
class ListOrder(Enum):
|
|
1990
2866
|
|
|
1991
2867
|
CREATED_AT = 'created_at'
|
|
@@ -2018,6 +2894,118 @@ class ListQueriesResponse:
|
|
|
2018
2894
|
res=_repeated_dict(d, 'res', QueryInfo))
|
|
2019
2895
|
|
|
2020
2896
|
|
|
2897
|
+
@dataclass
|
|
2898
|
+
class ListQueryObjectsResponse:
|
|
2899
|
+
next_page_token: Optional[str] = None
|
|
2900
|
+
|
|
2901
|
+
results: Optional[List[ListQueryObjectsResponseQuery]] = None
|
|
2902
|
+
|
|
2903
|
+
def as_dict(self) -> dict:
|
|
2904
|
+
"""Serializes the ListQueryObjectsResponse into a dictionary suitable for use as a JSON request body."""
|
|
2905
|
+
body = {}
|
|
2906
|
+
if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
|
|
2907
|
+
if self.results: body['results'] = [v.as_dict() for v in self.results]
|
|
2908
|
+
return body
|
|
2909
|
+
|
|
2910
|
+
@classmethod
|
|
2911
|
+
def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponse:
|
|
2912
|
+
"""Deserializes the ListQueryObjectsResponse from a dictionary."""
|
|
2913
|
+
return cls(next_page_token=d.get('next_page_token', None),
|
|
2914
|
+
results=_repeated_dict(d, 'results', ListQueryObjectsResponseQuery))
|
|
2915
|
+
|
|
2916
|
+
|
|
2917
|
+
@dataclass
|
|
2918
|
+
class ListQueryObjectsResponseQuery:
|
|
2919
|
+
apply_auto_limit: Optional[bool] = None
|
|
2920
|
+
"""Whether to apply a 1000 row limit to the query result."""
|
|
2921
|
+
|
|
2922
|
+
catalog: Optional[str] = None
|
|
2923
|
+
"""Name of the catalog where this query will be executed."""
|
|
2924
|
+
|
|
2925
|
+
create_time: Optional[str] = None
|
|
2926
|
+
"""Timestamp when this query was created."""
|
|
2927
|
+
|
|
2928
|
+
description: Optional[str] = None
|
|
2929
|
+
"""General description that conveys additional information about this query such as usage notes."""
|
|
2930
|
+
|
|
2931
|
+
display_name: Optional[str] = None
|
|
2932
|
+
"""Display name of the query that appears in list views, widget headings, and on the query page."""
|
|
2933
|
+
|
|
2934
|
+
id: Optional[str] = None
|
|
2935
|
+
"""UUID identifying the query."""
|
|
2936
|
+
|
|
2937
|
+
last_modifier_user_name: Optional[str] = None
|
|
2938
|
+
"""Username of the user who last saved changes to this query."""
|
|
2939
|
+
|
|
2940
|
+
lifecycle_state: Optional[LifecycleState] = None
|
|
2941
|
+
"""Indicates whether the query is trashed."""
|
|
2942
|
+
|
|
2943
|
+
owner_user_name: Optional[str] = None
|
|
2944
|
+
"""Username of the user that owns the query."""
|
|
2945
|
+
|
|
2946
|
+
parameters: Optional[List[QueryParameter]] = None
|
|
2947
|
+
"""List of query parameter definitions."""
|
|
2948
|
+
|
|
2949
|
+
query_text: Optional[str] = None
|
|
2950
|
+
"""Text of the query to be run."""
|
|
2951
|
+
|
|
2952
|
+
run_as_mode: Optional[RunAsMode] = None
|
|
2953
|
+
"""Sets the "Run as" role for the object."""
|
|
2954
|
+
|
|
2955
|
+
schema: Optional[str] = None
|
|
2956
|
+
"""Name of the schema where this query will be executed."""
|
|
2957
|
+
|
|
2958
|
+
tags: Optional[List[str]] = None
|
|
2959
|
+
|
|
2960
|
+
update_time: Optional[str] = None
|
|
2961
|
+
"""Timestamp when this query was last updated."""
|
|
2962
|
+
|
|
2963
|
+
warehouse_id: Optional[str] = None
|
|
2964
|
+
"""ID of the SQL warehouse attached to the query."""
|
|
2965
|
+
|
|
2966
|
+
def as_dict(self) -> dict:
|
|
2967
|
+
"""Serializes the ListQueryObjectsResponseQuery into a dictionary suitable for use as a JSON request body."""
|
|
2968
|
+
body = {}
|
|
2969
|
+
if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
|
|
2970
|
+
if self.catalog is not None: body['catalog'] = self.catalog
|
|
2971
|
+
if self.create_time is not None: body['create_time'] = self.create_time
|
|
2972
|
+
if self.description is not None: body['description'] = self.description
|
|
2973
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
2974
|
+
if self.id is not None: body['id'] = self.id
|
|
2975
|
+
if self.last_modifier_user_name is not None:
|
|
2976
|
+
body['last_modifier_user_name'] = self.last_modifier_user_name
|
|
2977
|
+
if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
|
|
2978
|
+
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
2979
|
+
if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
|
|
2980
|
+
if self.query_text is not None: body['query_text'] = self.query_text
|
|
2981
|
+
if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
|
|
2982
|
+
if self.schema is not None: body['schema'] = self.schema
|
|
2983
|
+
if self.tags: body['tags'] = [v for v in self.tags]
|
|
2984
|
+
if self.update_time is not None: body['update_time'] = self.update_time
|
|
2985
|
+
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
2986
|
+
return body
|
|
2987
|
+
|
|
2988
|
+
@classmethod
|
|
2989
|
+
def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponseQuery:
|
|
2990
|
+
"""Deserializes the ListQueryObjectsResponseQuery from a dictionary."""
|
|
2991
|
+
return cls(apply_auto_limit=d.get('apply_auto_limit', None),
|
|
2992
|
+
catalog=d.get('catalog', None),
|
|
2993
|
+
create_time=d.get('create_time', None),
|
|
2994
|
+
description=d.get('description', None),
|
|
2995
|
+
display_name=d.get('display_name', None),
|
|
2996
|
+
id=d.get('id', None),
|
|
2997
|
+
last_modifier_user_name=d.get('last_modifier_user_name', None),
|
|
2998
|
+
lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
|
|
2999
|
+
owner_user_name=d.get('owner_user_name', None),
|
|
3000
|
+
parameters=_repeated_dict(d, 'parameters', QueryParameter),
|
|
3001
|
+
query_text=d.get('query_text', None),
|
|
3002
|
+
run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
|
|
3003
|
+
schema=d.get('schema', None),
|
|
3004
|
+
tags=d.get('tags', None),
|
|
3005
|
+
update_time=d.get('update_time', None),
|
|
3006
|
+
warehouse_id=d.get('warehouse_id', None))
|
|
3007
|
+
|
|
3008
|
+
|
|
2021
3009
|
@dataclass
|
|
2022
3010
|
class ListResponse:
|
|
2023
3011
|
count: Optional[int] = None
|
|
@@ -2050,6 +3038,26 @@ class ListResponse:
|
|
|
2050
3038
|
results=_repeated_dict(d, 'results', Dashboard))
|
|
2051
3039
|
|
|
2052
3040
|
|
|
3041
|
+
@dataclass
|
|
3042
|
+
class ListVisualizationsForQueryResponse:
|
|
3043
|
+
next_page_token: Optional[str] = None
|
|
3044
|
+
|
|
3045
|
+
results: Optional[List[Visualization]] = None
|
|
3046
|
+
|
|
3047
|
+
def as_dict(self) -> dict:
|
|
3048
|
+
"""Serializes the ListVisualizationsForQueryResponse into a dictionary suitable for use as a JSON request body."""
|
|
3049
|
+
body = {}
|
|
3050
|
+
if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
|
|
3051
|
+
if self.results: body['results'] = [v.as_dict() for v in self.results]
|
|
3052
|
+
return body
|
|
3053
|
+
|
|
3054
|
+
@classmethod
|
|
3055
|
+
def from_dict(cls, d: Dict[str, any]) -> ListVisualizationsForQueryResponse:
|
|
3056
|
+
"""Deserializes the ListVisualizationsForQueryResponse from a dictionary."""
|
|
3057
|
+
return cls(next_page_token=d.get('next_page_token', None),
|
|
3058
|
+
results=_repeated_dict(d, 'results', Visualization))
|
|
3059
|
+
|
|
3060
|
+
|
|
2053
3061
|
@dataclass
|
|
2054
3062
|
class ListWarehousesResponse:
|
|
2055
3063
|
warehouses: Optional[List[EndpointInfo]] = None
|
|
@@ -2069,9 +3077,6 @@ class ListWarehousesResponse:
|
|
|
2069
3077
|
|
|
2070
3078
|
@dataclass
|
|
2071
3079
|
class MultiValuesOptions:
|
|
2072
|
-
"""If specified, allows multiple values to be selected for this parameter. Only applies to dropdown
|
|
2073
|
-
list and query-based dropdown list parameters."""
|
|
2074
|
-
|
|
2075
3080
|
prefix: Optional[str] = None
|
|
2076
3081
|
"""Character that prefixes each selected parameter value."""
|
|
2077
3082
|
|
|
@@ -2097,6 +3102,22 @@ class MultiValuesOptions:
|
|
|
2097
3102
|
suffix=d.get('suffix', None))
|
|
2098
3103
|
|
|
2099
3104
|
|
|
3105
|
+
@dataclass
|
|
3106
|
+
class NumericValue:
|
|
3107
|
+
value: Optional[float] = None
|
|
3108
|
+
|
|
3109
|
+
def as_dict(self) -> dict:
|
|
3110
|
+
"""Serializes the NumericValue into a dictionary suitable for use as a JSON request body."""
|
|
3111
|
+
body = {}
|
|
3112
|
+
if self.value is not None: body['value'] = self.value
|
|
3113
|
+
return body
|
|
3114
|
+
|
|
3115
|
+
@classmethod
|
|
3116
|
+
def from_dict(cls, d: Dict[str, any]) -> NumericValue:
|
|
3117
|
+
"""Deserializes the NumericValue from a dictionary."""
|
|
3118
|
+
return cls(value=d.get('value', None))
|
|
3119
|
+
|
|
3120
|
+
|
|
2100
3121
|
class ObjectType(Enum):
|
|
2101
3122
|
"""A singular noun object type."""
|
|
2102
3123
|
|
|
@@ -2221,7 +3242,7 @@ class PermissionLevel(Enum):
|
|
|
2221
3242
|
|
|
2222
3243
|
|
|
2223
3244
|
class PlansState(Enum):
|
|
2224
|
-
"""
|
|
3245
|
+
"""Possible Reasons for which we have not saved plans in the database"""
|
|
2225
3246
|
|
|
2226
3247
|
EMPTY = 'EMPTY'
|
|
2227
3248
|
EXISTS = 'EXISTS'
|
|
@@ -2231,150 +3252,135 @@ class PlansState(Enum):
|
|
|
2231
3252
|
UNKNOWN = 'UNKNOWN'
|
|
2232
3253
|
|
|
2233
3254
|
|
|
2234
|
-
@dataclass
|
|
2235
|
-
class Query:
|
|
2236
|
-
|
|
2237
|
-
"""
|
|
2238
|
-
|
|
2239
|
-
created_at: Optional[str] = None
|
|
2240
|
-
"""The timestamp when this query was created."""
|
|
2241
|
-
|
|
2242
|
-
data_source_id: Optional[str] = None
|
|
2243
|
-
"""Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
2244
|
-
warehouse ID. [Learn more].
|
|
2245
|
-
|
|
2246
|
-
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
|
|
2247
|
-
|
|
2248
|
-
description: Optional[str] = None
|
|
2249
|
-
"""General description that conveys additional information about this query such as usage notes."""
|
|
2250
|
-
|
|
2251
|
-
id: Optional[str] = None
|
|
2252
|
-
"""Query ID."""
|
|
2253
|
-
|
|
2254
|
-
is_archived: Optional[bool] = None
|
|
2255
|
-
"""Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear
|
|
2256
|
-
in search results. If this boolean is `true`, the `options` property for this query includes a
|
|
2257
|
-
`moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days."""
|
|
2258
|
-
|
|
2259
|
-
is_draft: Optional[bool] = None
|
|
2260
|
-
"""Whether the query is a draft. Draft queries only appear in list views for their owners.
|
|
2261
|
-
Visualizations from draft queries cannot appear on dashboards."""
|
|
2262
|
-
|
|
2263
|
-
is_favorite: Optional[bool] = None
|
|
2264
|
-
"""Whether this query object appears in the current user's favorites list. This flag determines
|
|
2265
|
-
whether the star icon for favorites is selected."""
|
|
2266
|
-
|
|
2267
|
-
is_safe: Optional[bool] = None
|
|
2268
|
-
"""Text parameter types are not safe from SQL injection for all types of data source. Set this
|
|
2269
|
-
Boolean parameter to `true` if a query either does not use any text type parameters or uses a
|
|
2270
|
-
data source type where text type parameters are handled safely."""
|
|
3255
|
+
@dataclass
|
|
3256
|
+
class Query:
|
|
3257
|
+
apply_auto_limit: Optional[bool] = None
|
|
3258
|
+
"""Whether to apply a 1000 row limit to the query result."""
|
|
2271
3259
|
|
|
2272
|
-
|
|
3260
|
+
catalog: Optional[str] = None
|
|
3261
|
+
"""Name of the catalog where this query will be executed."""
|
|
2273
3262
|
|
|
2274
|
-
|
|
2275
|
-
"""
|
|
3263
|
+
create_time: Optional[str] = None
|
|
3264
|
+
"""Timestamp when this query was created."""
|
|
2276
3265
|
|
|
2277
|
-
|
|
2278
|
-
"""
|
|
2279
|
-
this query uses parameters, this field is always null."""
|
|
3266
|
+
description: Optional[str] = None
|
|
3267
|
+
"""General description that conveys additional information about this query such as usage notes."""
|
|
2280
3268
|
|
|
2281
|
-
|
|
2282
|
-
"""
|
|
3269
|
+
display_name: Optional[str] = None
|
|
3270
|
+
"""Display name of the query that appears in list views, widget headings, and on the query page."""
|
|
2283
3271
|
|
|
2284
|
-
|
|
3272
|
+
id: Optional[str] = None
|
|
3273
|
+
"""UUID identifying the query."""
|
|
2285
3274
|
|
|
2286
|
-
|
|
2287
|
-
"""
|
|
3275
|
+
last_modifier_user_name: Optional[str] = None
|
|
3276
|
+
"""Username of the user who last saved changes to this query."""
|
|
2288
3277
|
|
|
2289
|
-
|
|
2290
|
-
"""
|
|
2291
|
-
* `CAN_MANAGE`: Can manage the query"""
|
|
3278
|
+
lifecycle_state: Optional[LifecycleState] = None
|
|
3279
|
+
"""Indicates whether the query is trashed."""
|
|
2292
3280
|
|
|
2293
|
-
|
|
2294
|
-
"""
|
|
3281
|
+
owner_user_name: Optional[str] = None
|
|
3282
|
+
"""Username of the user that owns the query."""
|
|
2295
3283
|
|
|
2296
|
-
|
|
2297
|
-
"""
|
|
3284
|
+
parameters: Optional[List[QueryParameter]] = None
|
|
3285
|
+
"""List of query parameter definitions."""
|
|
2298
3286
|
|
|
2299
|
-
|
|
2300
|
-
"""
|
|
2301
|
-
viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
|
|
3287
|
+
parent_path: Optional[str] = None
|
|
3288
|
+
"""Workspace path of the workspace folder containing the object."""
|
|
2302
3289
|
|
|
2303
|
-
|
|
3290
|
+
query_text: Optional[str] = None
|
|
3291
|
+
"""Text of the query to be run."""
|
|
2304
3292
|
|
|
2305
|
-
|
|
2306
|
-
"""
|
|
3293
|
+
run_as_mode: Optional[RunAsMode] = None
|
|
3294
|
+
"""Sets the "Run as" role for the object."""
|
|
2307
3295
|
|
|
2308
|
-
|
|
3296
|
+
schema: Optional[str] = None
|
|
3297
|
+
"""Name of the schema where this query will be executed."""
|
|
2309
3298
|
|
|
2310
|
-
|
|
2311
|
-
|
|
3299
|
+
tags: Optional[List[str]] = None
|
|
3300
|
+
|
|
3301
|
+
update_time: Optional[str] = None
|
|
3302
|
+
"""Timestamp when this query was last updated."""
|
|
2312
3303
|
|
|
2313
|
-
|
|
3304
|
+
warehouse_id: Optional[str] = None
|
|
3305
|
+
"""ID of the SQL warehouse attached to the query."""
|
|
2314
3306
|
|
|
2315
3307
|
def as_dict(self) -> dict:
|
|
2316
3308
|
"""Serializes the Query into a dictionary suitable for use as a JSON request body."""
|
|
2317
3309
|
body = {}
|
|
2318
|
-
if self.
|
|
2319
|
-
if self.
|
|
2320
|
-
if self.
|
|
3310
|
+
if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
|
|
3311
|
+
if self.catalog is not None: body['catalog'] = self.catalog
|
|
3312
|
+
if self.create_time is not None: body['create_time'] = self.create_time
|
|
2321
3313
|
if self.description is not None: body['description'] = self.description
|
|
3314
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
2322
3315
|
if self.id is not None: body['id'] = self.id
|
|
2323
|
-
if self.
|
|
2324
|
-
|
|
2325
|
-
if self.
|
|
2326
|
-
if self.
|
|
2327
|
-
if self.
|
|
2328
|
-
if self.
|
|
2329
|
-
if self.
|
|
2330
|
-
if self.
|
|
2331
|
-
if self.
|
|
2332
|
-
if self.parent is not None: body['parent'] = self.parent
|
|
2333
|
-
if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value
|
|
2334
|
-
if self.query is not None: body['query'] = self.query
|
|
2335
|
-
if self.query_hash is not None: body['query_hash'] = self.query_hash
|
|
2336
|
-
if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
|
|
3316
|
+
if self.last_modifier_user_name is not None:
|
|
3317
|
+
body['last_modifier_user_name'] = self.last_modifier_user_name
|
|
3318
|
+
if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
|
|
3319
|
+
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
3320
|
+
if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
|
|
3321
|
+
if self.parent_path is not None: body['parent_path'] = self.parent_path
|
|
3322
|
+
if self.query_text is not None: body['query_text'] = self.query_text
|
|
3323
|
+
if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
|
|
3324
|
+
if self.schema is not None: body['schema'] = self.schema
|
|
2337
3325
|
if self.tags: body['tags'] = [v for v in self.tags]
|
|
2338
|
-
if self.
|
|
2339
|
-
if self.
|
|
2340
|
-
if self.user_id is not None: body['user_id'] = self.user_id
|
|
2341
|
-
if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations]
|
|
3326
|
+
if self.update_time is not None: body['update_time'] = self.update_time
|
|
3327
|
+
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
2342
3328
|
return body
|
|
2343
3329
|
|
|
2344
3330
|
@classmethod
|
|
2345
3331
|
def from_dict(cls, d: Dict[str, any]) -> Query:
|
|
2346
3332
|
"""Deserializes the Query from a dictionary."""
|
|
2347
|
-
return cls(
|
|
2348
|
-
|
|
2349
|
-
|
|
3333
|
+
return cls(apply_auto_limit=d.get('apply_auto_limit', None),
|
|
3334
|
+
catalog=d.get('catalog', None),
|
|
3335
|
+
create_time=d.get('create_time', None),
|
|
2350
3336
|
description=d.get('description', None),
|
|
3337
|
+
display_name=d.get('display_name', None),
|
|
2351
3338
|
id=d.get('id', None),
|
|
2352
|
-
|
|
2353
|
-
|
|
2354
|
-
|
|
2355
|
-
|
|
2356
|
-
|
|
2357
|
-
|
|
2358
|
-
|
|
2359
|
-
|
|
2360
|
-
options=_from_dict(d, 'options', QueryOptions),
|
|
2361
|
-
parent=d.get('parent', None),
|
|
2362
|
-
permission_tier=_enum(d, 'permission_tier', PermissionLevel),
|
|
2363
|
-
query=d.get('query', None),
|
|
2364
|
-
query_hash=d.get('query_hash', None),
|
|
2365
|
-
run_as_role=_enum(d, 'run_as_role', RunAsRole),
|
|
3339
|
+
last_modifier_user_name=d.get('last_modifier_user_name', None),
|
|
3340
|
+
lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
|
|
3341
|
+
owner_user_name=d.get('owner_user_name', None),
|
|
3342
|
+
parameters=_repeated_dict(d, 'parameters', QueryParameter),
|
|
3343
|
+
parent_path=d.get('parent_path', None),
|
|
3344
|
+
query_text=d.get('query_text', None),
|
|
3345
|
+
run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
|
|
3346
|
+
schema=d.get('schema', None),
|
|
2366
3347
|
tags=d.get('tags', None),
|
|
2367
|
-
|
|
2368
|
-
|
|
2369
|
-
|
|
2370
|
-
|
|
3348
|
+
update_time=d.get('update_time', None),
|
|
3349
|
+
warehouse_id=d.get('warehouse_id', None))
|
|
3350
|
+
|
|
3351
|
+
|
|
3352
|
+
@dataclass
|
|
3353
|
+
class QueryBackedValue:
|
|
3354
|
+
multi_values_options: Optional[MultiValuesOptions] = None
|
|
3355
|
+
"""If specified, allows multiple values to be selected for this parameter."""
|
|
3356
|
+
|
|
3357
|
+
query_id: Optional[str] = None
|
|
3358
|
+
"""UUID of the query that provides the parameter values."""
|
|
3359
|
+
|
|
3360
|
+
values: Optional[List[str]] = None
|
|
3361
|
+
"""List of selected query parameter values."""
|
|
3362
|
+
|
|
3363
|
+
def as_dict(self) -> dict:
|
|
3364
|
+
"""Serializes the QueryBackedValue into a dictionary suitable for use as a JSON request body."""
|
|
3365
|
+
body = {}
|
|
3366
|
+
if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict()
|
|
3367
|
+
if self.query_id is not None: body['query_id'] = self.query_id
|
|
3368
|
+
if self.values: body['values'] = [v for v in self.values]
|
|
3369
|
+
return body
|
|
3370
|
+
|
|
3371
|
+
@classmethod
|
|
3372
|
+
def from_dict(cls, d: Dict[str, any]) -> QueryBackedValue:
|
|
3373
|
+
"""Deserializes the QueryBackedValue from a dictionary."""
|
|
3374
|
+
return cls(multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions),
|
|
3375
|
+
query_id=d.get('query_id', None),
|
|
3376
|
+
values=d.get('values', None))
|
|
2371
3377
|
|
|
2372
3378
|
|
|
2373
3379
|
@dataclass
|
|
2374
3380
|
class QueryEditContent:
|
|
2375
3381
|
data_source_id: Optional[str] = None
|
|
2376
3382
|
"""Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
2377
|
-
warehouse ID. [Learn more]
|
|
3383
|
+
warehouse ID. [Learn more]
|
|
2378
3384
|
|
|
2379
3385
|
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
|
|
2380
3386
|
|
|
@@ -2428,12 +3434,11 @@ class QueryEditContent:
|
|
|
2428
3434
|
|
|
2429
3435
|
@dataclass
|
|
2430
3436
|
class QueryFilter:
|
|
2431
|
-
|
|
3437
|
+
context_filter: Optional[ContextFilter] = None
|
|
3438
|
+
"""Filter by one or more property describing where the query was generated"""
|
|
2432
3439
|
|
|
2433
3440
|
query_start_time_range: Optional[TimeRange] = None
|
|
2434
|
-
|
|
2435
|
-
statement_ids: Optional[List[str]] = None
|
|
2436
|
-
"""A list of statement IDs."""
|
|
3441
|
+
"""A range filter for query submitted time. The time range must be <= 30 days."""
|
|
2437
3442
|
|
|
2438
3443
|
statuses: Optional[List[QueryStatus]] = None
|
|
2439
3444
|
|
|
@@ -2446,8 +3451,8 @@ class QueryFilter:
|
|
|
2446
3451
|
def as_dict(self) -> dict:
|
|
2447
3452
|
"""Serializes the QueryFilter into a dictionary suitable for use as a JSON request body."""
|
|
2448
3453
|
body = {}
|
|
3454
|
+
if self.context_filter: body['context_filter'] = self.context_filter.as_dict()
|
|
2449
3455
|
if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range.as_dict()
|
|
2450
|
-
if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids]
|
|
2451
3456
|
if self.statuses: body['statuses'] = [v.value for v in self.statuses]
|
|
2452
3457
|
if self.user_ids: body['user_ids'] = [v for v in self.user_ids]
|
|
2453
3458
|
if self.warehouse_ids: body['warehouse_ids'] = [v for v in self.warehouse_ids]
|
|
@@ -2456,8 +3461,8 @@ class QueryFilter:
|
|
|
2456
3461
|
@classmethod
|
|
2457
3462
|
def from_dict(cls, d: Dict[str, any]) -> QueryFilter:
|
|
2458
3463
|
"""Deserializes the QueryFilter from a dictionary."""
|
|
2459
|
-
return cls(
|
|
2460
|
-
|
|
3464
|
+
return cls(context_filter=_from_dict(d, 'context_filter', ContextFilter),
|
|
3465
|
+
query_start_time_range=_from_dict(d, 'query_start_time_range', TimeRange),
|
|
2461
3466
|
statuses=_repeated_enum(d, 'statuses', QueryStatus),
|
|
2462
3467
|
user_ids=d.get('user_ids', None),
|
|
2463
3468
|
warehouse_ids=d.get('warehouse_ids', None))
|
|
@@ -2465,14 +3470,11 @@ class QueryFilter:
|
|
|
2465
3470
|
|
|
2466
3471
|
@dataclass
|
|
2467
3472
|
class QueryInfo:
|
|
2468
|
-
can_subscribe_to_live_query: Optional[bool] = None
|
|
2469
|
-
"""Reserved for internal use."""
|
|
2470
|
-
|
|
2471
3473
|
channel_used: Optional[ChannelInfo] = None
|
|
2472
|
-
"""
|
|
3474
|
+
"""SQL Warehouse channel information at the time of query execution"""
|
|
2473
3475
|
|
|
2474
3476
|
duration: Optional[int] = None
|
|
2475
|
-
"""Total execution time of the
|
|
3477
|
+
"""Total execution time of the statement ( excluding result fetch time )."""
|
|
2476
3478
|
|
|
2477
3479
|
endpoint_id: Optional[str] = None
|
|
2478
3480
|
"""Alias for `warehouse_id`."""
|
|
@@ -2507,6 +3509,8 @@ class QueryInfo:
|
|
|
2507
3509
|
query_id: Optional[str] = None
|
|
2508
3510
|
"""The query ID."""
|
|
2509
3511
|
|
|
3512
|
+
query_source: Optional[QuerySource] = None
|
|
3513
|
+
|
|
2510
3514
|
query_start_time_ms: Optional[int] = None
|
|
2511
3515
|
"""The time the query started."""
|
|
2512
3516
|
|
|
@@ -2517,15 +3521,17 @@ class QueryInfo:
|
|
|
2517
3521
|
"""The number of results returned by the query."""
|
|
2518
3522
|
|
|
2519
3523
|
spark_ui_url: Optional[str] = None
|
|
2520
|
-
"""URL to the query plan."""
|
|
3524
|
+
"""URL to the Spark UI query plan."""
|
|
2521
3525
|
|
|
2522
3526
|
statement_type: Optional[QueryStatementType] = None
|
|
2523
3527
|
"""Type of statement for this query"""
|
|
2524
3528
|
|
|
2525
3529
|
status: Optional[QueryStatus] = None
|
|
2526
|
-
"""Query status with one the following values:
|
|
2527
|
-
|
|
2528
|
-
Query has
|
|
3530
|
+
"""Query status with one the following values:
|
|
3531
|
+
|
|
3532
|
+
- `QUEUED`: Query has been received and queued. - `RUNNING`: Query has started. - `CANCELED`:
|
|
3533
|
+
Query has been cancelled by the user. - `FAILED`: Query has failed. - `FINISHED`: Query has
|
|
3534
|
+
completed."""
|
|
2529
3535
|
|
|
2530
3536
|
user_id: Optional[int] = None
|
|
2531
3537
|
"""The ID of the user who ran the query."""
|
|
@@ -2539,8 +3545,6 @@ class QueryInfo:
|
|
|
2539
3545
|
def as_dict(self) -> dict:
|
|
2540
3546
|
"""Serializes the QueryInfo into a dictionary suitable for use as a JSON request body."""
|
|
2541
3547
|
body = {}
|
|
2542
|
-
if self.can_subscribe_to_live_query is not None:
|
|
2543
|
-
body['canSubscribeToLiveQuery'] = self.can_subscribe_to_live_query
|
|
2544
3548
|
if self.channel_used: body['channel_used'] = self.channel_used.as_dict()
|
|
2545
3549
|
if self.duration is not None: body['duration'] = self.duration
|
|
2546
3550
|
if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id
|
|
@@ -2554,6 +3558,7 @@ class QueryInfo:
|
|
|
2554
3558
|
if self.plans_state is not None: body['plans_state'] = self.plans_state.value
|
|
2555
3559
|
if self.query_end_time_ms is not None: body['query_end_time_ms'] = self.query_end_time_ms
|
|
2556
3560
|
if self.query_id is not None: body['query_id'] = self.query_id
|
|
3561
|
+
if self.query_source: body['query_source'] = self.query_source.as_dict()
|
|
2557
3562
|
if self.query_start_time_ms is not None: body['query_start_time_ms'] = self.query_start_time_ms
|
|
2558
3563
|
if self.query_text is not None: body['query_text'] = self.query_text
|
|
2559
3564
|
if self.rows_produced is not None: body['rows_produced'] = self.rows_produced
|
|
@@ -2568,8 +3573,7 @@ class QueryInfo:
|
|
|
2568
3573
|
@classmethod
|
|
2569
3574
|
def from_dict(cls, d: Dict[str, any]) -> QueryInfo:
|
|
2570
3575
|
"""Deserializes the QueryInfo from a dictionary."""
|
|
2571
|
-
return cls(
|
|
2572
|
-
channel_used=_from_dict(d, 'channel_used', ChannelInfo),
|
|
3576
|
+
return cls(channel_used=_from_dict(d, 'channel_used', ChannelInfo),
|
|
2573
3577
|
duration=d.get('duration', None),
|
|
2574
3578
|
endpoint_id=d.get('endpoint_id', None),
|
|
2575
3579
|
error_message=d.get('error_message', None),
|
|
@@ -2582,6 +3586,7 @@ class QueryInfo:
|
|
|
2582
3586
|
plans_state=_enum(d, 'plans_state', PlansState),
|
|
2583
3587
|
query_end_time_ms=d.get('query_end_time_ms', None),
|
|
2584
3588
|
query_id=d.get('query_id', None),
|
|
3589
|
+
query_source=_from_dict(d, 'query_source', QuerySource),
|
|
2585
3590
|
query_start_time_ms=d.get('query_start_time_ms', None),
|
|
2586
3591
|
query_text=d.get('query_text', None),
|
|
2587
3592
|
rows_produced=d.get('rows_produced', None),
|
|
@@ -2604,7 +3609,7 @@ class QueryList:
|
|
|
2604
3609
|
page_size: Optional[int] = None
|
|
2605
3610
|
"""The number of queries per page."""
|
|
2606
3611
|
|
|
2607
|
-
results: Optional[List[
|
|
3612
|
+
results: Optional[List[LegacyQuery]] = None
|
|
2608
3613
|
"""List of queries returned."""
|
|
2609
3614
|
|
|
2610
3615
|
def as_dict(self) -> dict:
|
|
@@ -2622,12 +3627,13 @@ class QueryList:
|
|
|
2622
3627
|
return cls(count=d.get('count', None),
|
|
2623
3628
|
page=d.get('page', None),
|
|
2624
3629
|
page_size=d.get('page_size', None),
|
|
2625
|
-
results=_repeated_dict(d, 'results',
|
|
3630
|
+
results=_repeated_dict(d, 'results', LegacyQuery))
|
|
2626
3631
|
|
|
2627
3632
|
|
|
2628
3633
|
@dataclass
|
|
2629
3634
|
class QueryMetrics:
|
|
2630
|
-
"""
|
|
3635
|
+
"""A query metric that encapsulates a set of measurements for a single query. Metrics come from the
|
|
3636
|
+
driver and are stored in the history service database."""
|
|
2631
3637
|
|
|
2632
3638
|
compilation_time_ms: Optional[int] = None
|
|
2633
3639
|
"""Time spent loading metadata and optimizing the query, in milliseconds."""
|
|
@@ -2635,9 +3641,6 @@ class QueryMetrics:
|
|
|
2635
3641
|
execution_time_ms: Optional[int] = None
|
|
2636
3642
|
"""Time spent executing the query, in milliseconds."""
|
|
2637
3643
|
|
|
2638
|
-
metadata_time_ms: Optional[int] = None
|
|
2639
|
-
"""Reserved for internal use."""
|
|
2640
|
-
|
|
2641
3644
|
network_sent_bytes: Optional[int] = None
|
|
2642
3645
|
"""Total amount of data sent over the network between executor nodes during shuffle, in bytes."""
|
|
2643
3646
|
|
|
@@ -2648,9 +3651,6 @@ class QueryMetrics:
|
|
|
2648
3651
|
photon_total_time_ms: Optional[int] = None
|
|
2649
3652
|
"""Total execution time for all individual Photon query engine tasks in the query, in milliseconds."""
|
|
2650
3653
|
|
|
2651
|
-
planning_time_ms: Optional[int] = None
|
|
2652
|
-
"""Reserved for internal use."""
|
|
2653
|
-
|
|
2654
3654
|
provisioning_queue_start_timestamp: Optional[int] = None
|
|
2655
3655
|
"""Timestamp of when the query was enqueued waiting for a cluster to be provisioned for the
|
|
2656
3656
|
warehouse. This field is optional and will not appear if the query skipped the provisioning
|
|
@@ -2665,9 +3665,6 @@ class QueryMetrics:
|
|
|
2665
3665
|
query_compilation_start_timestamp: Optional[int] = None
|
|
2666
3666
|
"""Timestamp of when the underlying compute started compilation of the query."""
|
|
2667
3667
|
|
|
2668
|
-
query_execution_time_ms: Optional[int] = None
|
|
2669
|
-
"""Reserved for internal use."""
|
|
2670
|
-
|
|
2671
3668
|
read_bytes: Optional[int] = None
|
|
2672
3669
|
"""Total size of data read by the query, in bytes."""
|
|
2673
3670
|
|
|
@@ -2675,7 +3672,7 @@ class QueryMetrics:
|
|
|
2675
3672
|
"""Size of persistent data read from the cache, in bytes."""
|
|
2676
3673
|
|
|
2677
3674
|
read_files_count: Optional[int] = None
|
|
2678
|
-
"""Number of files read after pruning
|
|
3675
|
+
"""Number of files read after pruning"""
|
|
2679
3676
|
|
|
2680
3677
|
read_partitions_count: Optional[int] = None
|
|
2681
3678
|
"""Number of partitions read after pruning."""
|
|
@@ -2687,7 +3684,7 @@ class QueryMetrics:
|
|
|
2687
3684
|
"""Time spent fetching the query results after the execution finished, in milliseconds."""
|
|
2688
3685
|
|
|
2689
3686
|
result_from_cache: Optional[bool] = None
|
|
2690
|
-
"""true if the query result was fetched from cache, false otherwise."""
|
|
3687
|
+
"""`true` if the query result was fetched from cache, `false` otherwise."""
|
|
2691
3688
|
|
|
2692
3689
|
rows_produced_count: Optional[int] = None
|
|
2693
3690
|
"""Total number of rows returned by the query."""
|
|
@@ -2712,20 +3709,16 @@ class QueryMetrics:
|
|
|
2712
3709
|
body = {}
|
|
2713
3710
|
if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms
|
|
2714
3711
|
if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms
|
|
2715
|
-
if self.metadata_time_ms is not None: body['metadata_time_ms'] = self.metadata_time_ms
|
|
2716
3712
|
if self.network_sent_bytes is not None: body['network_sent_bytes'] = self.network_sent_bytes
|
|
2717
3713
|
if self.overloading_queue_start_timestamp is not None:
|
|
2718
3714
|
body['overloading_queue_start_timestamp'] = self.overloading_queue_start_timestamp
|
|
2719
3715
|
if self.photon_total_time_ms is not None: body['photon_total_time_ms'] = self.photon_total_time_ms
|
|
2720
|
-
if self.planning_time_ms is not None: body['planning_time_ms'] = self.planning_time_ms
|
|
2721
3716
|
if self.provisioning_queue_start_timestamp is not None:
|
|
2722
3717
|
body['provisioning_queue_start_timestamp'] = self.provisioning_queue_start_timestamp
|
|
2723
3718
|
if self.pruned_bytes is not None: body['pruned_bytes'] = self.pruned_bytes
|
|
2724
3719
|
if self.pruned_files_count is not None: body['pruned_files_count'] = self.pruned_files_count
|
|
2725
3720
|
if self.query_compilation_start_timestamp is not None:
|
|
2726
3721
|
body['query_compilation_start_timestamp'] = self.query_compilation_start_timestamp
|
|
2727
|
-
if self.query_execution_time_ms is not None:
|
|
2728
|
-
body['query_execution_time_ms'] = self.query_execution_time_ms
|
|
2729
3722
|
if self.read_bytes is not None: body['read_bytes'] = self.read_bytes
|
|
2730
3723
|
if self.read_cache_bytes is not None: body['read_cache_bytes'] = self.read_cache_bytes
|
|
2731
3724
|
if self.read_files_count is not None: body['read_files_count'] = self.read_files_count
|
|
@@ -2746,16 +3739,13 @@ class QueryMetrics:
|
|
|
2746
3739
|
"""Deserializes the QueryMetrics from a dictionary."""
|
|
2747
3740
|
return cls(compilation_time_ms=d.get('compilation_time_ms', None),
|
|
2748
3741
|
execution_time_ms=d.get('execution_time_ms', None),
|
|
2749
|
-
metadata_time_ms=d.get('metadata_time_ms', None),
|
|
2750
3742
|
network_sent_bytes=d.get('network_sent_bytes', None),
|
|
2751
3743
|
overloading_queue_start_timestamp=d.get('overloading_queue_start_timestamp', None),
|
|
2752
3744
|
photon_total_time_ms=d.get('photon_total_time_ms', None),
|
|
2753
|
-
planning_time_ms=d.get('planning_time_ms', None),
|
|
2754
3745
|
provisioning_queue_start_timestamp=d.get('provisioning_queue_start_timestamp', None),
|
|
2755
3746
|
pruned_bytes=d.get('pruned_bytes', None),
|
|
2756
3747
|
pruned_files_count=d.get('pruned_files_count', None),
|
|
2757
3748
|
query_compilation_start_timestamp=d.get('query_compilation_start_timestamp', None),
|
|
2758
|
-
query_execution_time_ms=d.get('query_execution_time_ms', None),
|
|
2759
3749
|
read_bytes=d.get('read_bytes', None),
|
|
2760
3750
|
read_cache_bytes=d.get('read_cache_bytes', None),
|
|
2761
3751
|
read_files_count=d.get('read_files_count', None),
|
|
@@ -2803,11 +3793,64 @@ class QueryOptions:
|
|
|
2803
3793
|
schema=d.get('schema', None))
|
|
2804
3794
|
|
|
2805
3795
|
|
|
3796
|
+
@dataclass
|
|
3797
|
+
class QueryParameter:
|
|
3798
|
+
date_range_value: Optional[DateRangeValue] = None
|
|
3799
|
+
"""Date-range query parameter value. Can only specify one of `dynamic_date_range_value` or
|
|
3800
|
+
`date_range_value`."""
|
|
3801
|
+
|
|
3802
|
+
date_value: Optional[DateValue] = None
|
|
3803
|
+
"""Date query parameter value. Can only specify one of `dynamic_date_value` or `date_value`."""
|
|
3804
|
+
|
|
3805
|
+
enum_value: Optional[EnumValue] = None
|
|
3806
|
+
"""Dropdown query parameter value."""
|
|
3807
|
+
|
|
3808
|
+
name: Optional[str] = None
|
|
3809
|
+
"""Literal parameter marker that appears between double curly braces in the query text."""
|
|
3810
|
+
|
|
3811
|
+
numeric_value: Optional[NumericValue] = None
|
|
3812
|
+
"""Numeric query parameter value."""
|
|
3813
|
+
|
|
3814
|
+
query_backed_value: Optional[QueryBackedValue] = None
|
|
3815
|
+
"""Query-based dropdown query parameter value."""
|
|
3816
|
+
|
|
3817
|
+
text_value: Optional[TextValue] = None
|
|
3818
|
+
"""Text query parameter value."""
|
|
3819
|
+
|
|
3820
|
+
title: Optional[str] = None
|
|
3821
|
+
"""Text displayed in the user-facing parameter widget in the UI."""
|
|
3822
|
+
|
|
3823
|
+
def as_dict(self) -> dict:
|
|
3824
|
+
"""Serializes the QueryParameter into a dictionary suitable for use as a JSON request body."""
|
|
3825
|
+
body = {}
|
|
3826
|
+
if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict()
|
|
3827
|
+
if self.date_value: body['date_value'] = self.date_value.as_dict()
|
|
3828
|
+
if self.enum_value: body['enum_value'] = self.enum_value.as_dict()
|
|
3829
|
+
if self.name is not None: body['name'] = self.name
|
|
3830
|
+
if self.numeric_value: body['numeric_value'] = self.numeric_value.as_dict()
|
|
3831
|
+
if self.query_backed_value: body['query_backed_value'] = self.query_backed_value.as_dict()
|
|
3832
|
+
if self.text_value: body['text_value'] = self.text_value.as_dict()
|
|
3833
|
+
if self.title is not None: body['title'] = self.title
|
|
3834
|
+
return body
|
|
3835
|
+
|
|
3836
|
+
@classmethod
|
|
3837
|
+
def from_dict(cls, d: Dict[str, any]) -> QueryParameter:
|
|
3838
|
+
"""Deserializes the QueryParameter from a dictionary."""
|
|
3839
|
+
return cls(date_range_value=_from_dict(d, 'date_range_value', DateRangeValue),
|
|
3840
|
+
date_value=_from_dict(d, 'date_value', DateValue),
|
|
3841
|
+
enum_value=_from_dict(d, 'enum_value', EnumValue),
|
|
3842
|
+
name=d.get('name', None),
|
|
3843
|
+
numeric_value=_from_dict(d, 'numeric_value', NumericValue),
|
|
3844
|
+
query_backed_value=_from_dict(d, 'query_backed_value', QueryBackedValue),
|
|
3845
|
+
text_value=_from_dict(d, 'text_value', TextValue),
|
|
3846
|
+
title=d.get('title', None))
|
|
3847
|
+
|
|
3848
|
+
|
|
2806
3849
|
@dataclass
|
|
2807
3850
|
class QueryPostContent:
|
|
2808
3851
|
data_source_id: Optional[str] = None
|
|
2809
3852
|
"""Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
2810
|
-
warehouse ID. [Learn more]
|
|
3853
|
+
warehouse ID. [Learn more]
|
|
2811
3854
|
|
|
2812
3855
|
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
|
|
2813
3856
|
|
|
@@ -2860,8 +3903,187 @@ class QueryPostContent:
|
|
|
2860
3903
|
tags=d.get('tags', None))
|
|
2861
3904
|
|
|
2862
3905
|
|
|
3906
|
+
@dataclass
|
|
3907
|
+
class QuerySource:
|
|
3908
|
+
alert_id: Optional[str] = None
|
|
3909
|
+
"""UUID"""
|
|
3910
|
+
|
|
3911
|
+
client_call_context: Optional[ClientCallContext] = None
|
|
3912
|
+
"""Client code that triggered the request"""
|
|
3913
|
+
|
|
3914
|
+
command_id: Optional[str] = None
|
|
3915
|
+
"""Id associated with a notebook cell"""
|
|
3916
|
+
|
|
3917
|
+
command_run_id: Optional[str] = None
|
|
3918
|
+
"""Id associated with a notebook run or execution"""
|
|
3919
|
+
|
|
3920
|
+
dashboard_id: Optional[str] = None
|
|
3921
|
+
"""UUID"""
|
|
3922
|
+
|
|
3923
|
+
dashboard_v3_id: Optional[str] = None
|
|
3924
|
+
"""UUID for Lakeview Dashboards, separate from DBSQL Dashboards (dashboard_id)"""
|
|
3925
|
+
|
|
3926
|
+
driver_info: Optional[QuerySourceDriverInfo] = None
|
|
3927
|
+
|
|
3928
|
+
entry_point: Optional[QuerySourceEntryPoint] = None
|
|
3929
|
+
"""Spark service that received and processed the query"""
|
|
3930
|
+
|
|
3931
|
+
genie_space_id: Optional[str] = None
|
|
3932
|
+
"""UUID for Genie space"""
|
|
3933
|
+
|
|
3934
|
+
is_cloud_fetch: Optional[bool] = None
|
|
3935
|
+
|
|
3936
|
+
is_databricks_sql_exec_api: Optional[bool] = None
|
|
3937
|
+
|
|
3938
|
+
job_id: Optional[str] = None
|
|
3939
|
+
|
|
3940
|
+
job_managed_by: Optional[QuerySourceJobManager] = None
|
|
3941
|
+
"""With background compute, jobs can be managed by different internal teams. When not specified,
|
|
3942
|
+
not a background compute job When specified and the value is not JOBS, it is a background
|
|
3943
|
+
compute job"""
|
|
3944
|
+
|
|
3945
|
+
notebook_id: Optional[str] = None
|
|
3946
|
+
|
|
3947
|
+
pipeline_id: Optional[str] = None
|
|
3948
|
+
"""Id associated with a DLT pipeline"""
|
|
3949
|
+
|
|
3950
|
+
pipeline_update_id: Optional[str] = None
|
|
3951
|
+
"""Id associated with a DLT update"""
|
|
3952
|
+
|
|
3953
|
+
query_tags: Optional[str] = None
|
|
3954
|
+
"""String provided by a customer that'll help them identify the query"""
|
|
3955
|
+
|
|
3956
|
+
run_id: Optional[str] = None
|
|
3957
|
+
"""Id associated with a job run or execution"""
|
|
3958
|
+
|
|
3959
|
+
runnable_command_id: Optional[str] = None
|
|
3960
|
+
"""Id associated with a notebook cell run or execution"""
|
|
3961
|
+
|
|
3962
|
+
scheduled_by: Optional[QuerySourceTrigger] = None
|
|
3963
|
+
|
|
3964
|
+
serverless_channel_info: Optional[ServerlessChannelInfo] = None
|
|
3965
|
+
|
|
3966
|
+
source_query_id: Optional[str] = None
|
|
3967
|
+
"""UUID"""
|
|
3968
|
+
|
|
3969
|
+
def as_dict(self) -> dict:
|
|
3970
|
+
"""Serializes the QuerySource into a dictionary suitable for use as a JSON request body."""
|
|
3971
|
+
body = {}
|
|
3972
|
+
if self.alert_id is not None: body['alert_id'] = self.alert_id
|
|
3973
|
+
if self.client_call_context: body['client_call_context'] = self.client_call_context.as_dict()
|
|
3974
|
+
if self.command_id is not None: body['command_id'] = self.command_id
|
|
3975
|
+
if self.command_run_id is not None: body['command_run_id'] = self.command_run_id
|
|
3976
|
+
if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
|
|
3977
|
+
if self.dashboard_v3_id is not None: body['dashboard_v3_id'] = self.dashboard_v3_id
|
|
3978
|
+
if self.driver_info: body['driver_info'] = self.driver_info.as_dict()
|
|
3979
|
+
if self.entry_point is not None: body['entry_point'] = self.entry_point.value
|
|
3980
|
+
if self.genie_space_id is not None: body['genie_space_id'] = self.genie_space_id
|
|
3981
|
+
if self.is_cloud_fetch is not None: body['is_cloud_fetch'] = self.is_cloud_fetch
|
|
3982
|
+
if self.is_databricks_sql_exec_api is not None:
|
|
3983
|
+
body['is_databricks_sql_exec_api'] = self.is_databricks_sql_exec_api
|
|
3984
|
+
if self.job_id is not None: body['job_id'] = self.job_id
|
|
3985
|
+
if self.job_managed_by is not None: body['job_managed_by'] = self.job_managed_by.value
|
|
3986
|
+
if self.notebook_id is not None: body['notebook_id'] = self.notebook_id
|
|
3987
|
+
if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
|
|
3988
|
+
if self.pipeline_update_id is not None: body['pipeline_update_id'] = self.pipeline_update_id
|
|
3989
|
+
if self.query_tags is not None: body['query_tags'] = self.query_tags
|
|
3990
|
+
if self.run_id is not None: body['run_id'] = self.run_id
|
|
3991
|
+
if self.runnable_command_id is not None: body['runnable_command_id'] = self.runnable_command_id
|
|
3992
|
+
if self.scheduled_by is not None: body['scheduled_by'] = self.scheduled_by.value
|
|
3993
|
+
if self.serverless_channel_info:
|
|
3994
|
+
body['serverless_channel_info'] = self.serverless_channel_info.as_dict()
|
|
3995
|
+
if self.source_query_id is not None: body['source_query_id'] = self.source_query_id
|
|
3996
|
+
return body
|
|
3997
|
+
|
|
3998
|
+
@classmethod
|
|
3999
|
+
def from_dict(cls, d: Dict[str, any]) -> QuerySource:
|
|
4000
|
+
"""Deserializes the QuerySource from a dictionary."""
|
|
4001
|
+
return cls(alert_id=d.get('alert_id', None),
|
|
4002
|
+
client_call_context=_from_dict(d, 'client_call_context', ClientCallContext),
|
|
4003
|
+
command_id=d.get('command_id', None),
|
|
4004
|
+
command_run_id=d.get('command_run_id', None),
|
|
4005
|
+
dashboard_id=d.get('dashboard_id', None),
|
|
4006
|
+
dashboard_v3_id=d.get('dashboard_v3_id', None),
|
|
4007
|
+
driver_info=_from_dict(d, 'driver_info', QuerySourceDriverInfo),
|
|
4008
|
+
entry_point=_enum(d, 'entry_point', QuerySourceEntryPoint),
|
|
4009
|
+
genie_space_id=d.get('genie_space_id', None),
|
|
4010
|
+
is_cloud_fetch=d.get('is_cloud_fetch', None),
|
|
4011
|
+
is_databricks_sql_exec_api=d.get('is_databricks_sql_exec_api', None),
|
|
4012
|
+
job_id=d.get('job_id', None),
|
|
4013
|
+
job_managed_by=_enum(d, 'job_managed_by', QuerySourceJobManager),
|
|
4014
|
+
notebook_id=d.get('notebook_id', None),
|
|
4015
|
+
pipeline_id=d.get('pipeline_id', None),
|
|
4016
|
+
pipeline_update_id=d.get('pipeline_update_id', None),
|
|
4017
|
+
query_tags=d.get('query_tags', None),
|
|
4018
|
+
run_id=d.get('run_id', None),
|
|
4019
|
+
runnable_command_id=d.get('runnable_command_id', None),
|
|
4020
|
+
scheduled_by=_enum(d, 'scheduled_by', QuerySourceTrigger),
|
|
4021
|
+
serverless_channel_info=_from_dict(d, 'serverless_channel_info', ServerlessChannelInfo),
|
|
4022
|
+
source_query_id=d.get('source_query_id', None))
|
|
4023
|
+
|
|
4024
|
+
|
|
4025
|
+
@dataclass
|
|
4026
|
+
class QuerySourceDriverInfo:
|
|
4027
|
+
bi_tool_entry: Optional[str] = None
|
|
4028
|
+
|
|
4029
|
+
driver_name: Optional[str] = None
|
|
4030
|
+
|
|
4031
|
+
simba_branding_vendor: Optional[str] = None
|
|
4032
|
+
|
|
4033
|
+
version_number: Optional[str] = None
|
|
4034
|
+
|
|
4035
|
+
def as_dict(self) -> dict:
|
|
4036
|
+
"""Serializes the QuerySourceDriverInfo into a dictionary suitable for use as a JSON request body."""
|
|
4037
|
+
body = {}
|
|
4038
|
+
if self.bi_tool_entry is not None: body['bi_tool_entry'] = self.bi_tool_entry
|
|
4039
|
+
if self.driver_name is not None: body['driver_name'] = self.driver_name
|
|
4040
|
+
if self.simba_branding_vendor is not None: body['simba_branding_vendor'] = self.simba_branding_vendor
|
|
4041
|
+
if self.version_number is not None: body['version_number'] = self.version_number
|
|
4042
|
+
return body
|
|
4043
|
+
|
|
4044
|
+
@classmethod
|
|
4045
|
+
def from_dict(cls, d: Dict[str, any]) -> QuerySourceDriverInfo:
|
|
4046
|
+
"""Deserializes the QuerySourceDriverInfo from a dictionary."""
|
|
4047
|
+
return cls(bi_tool_entry=d.get('bi_tool_entry', None),
|
|
4048
|
+
driver_name=d.get('driver_name', None),
|
|
4049
|
+
simba_branding_vendor=d.get('simba_branding_vendor', None),
|
|
4050
|
+
version_number=d.get('version_number', None))
|
|
4051
|
+
|
|
4052
|
+
|
|
4053
|
+
class QuerySourceEntryPoint(Enum):
|
|
4054
|
+
"""Spark service that received and processed the query"""
|
|
4055
|
+
|
|
4056
|
+
DLT = 'DLT'
|
|
4057
|
+
SPARK_CONNECT = 'SPARK_CONNECT'
|
|
4058
|
+
THRIFT_SERVER = 'THRIFT_SERVER'
|
|
4059
|
+
|
|
4060
|
+
|
|
4061
|
+
class QuerySourceJobManager(Enum):
|
|
4062
|
+
"""Copied from elastic-spark-common/api/messages/manager.proto with enum values changed by 1 to
|
|
4063
|
+
accommodate JOB_MANAGER_UNSPECIFIED"""
|
|
4064
|
+
|
|
4065
|
+
APP_SYSTEM_TABLE = 'APP_SYSTEM_TABLE'
|
|
4066
|
+
AUTOML = 'AUTOML'
|
|
4067
|
+
AUTO_MAINTENANCE = 'AUTO_MAINTENANCE'
|
|
4068
|
+
CLEAN_ROOMS = 'CLEAN_ROOMS'
|
|
4069
|
+
DATA_MONITORING = 'DATA_MONITORING'
|
|
4070
|
+
DATA_SHARING = 'DATA_SHARING'
|
|
4071
|
+
ENCRYPTION = 'ENCRYPTION'
|
|
4072
|
+
FABRIC_CRAWLER = 'FABRIC_CRAWLER'
|
|
4073
|
+
JOBS = 'JOBS'
|
|
4074
|
+
LAKEVIEW = 'LAKEVIEW'
|
|
4075
|
+
MANAGED_RAG = 'MANAGED_RAG'
|
|
4076
|
+
SCHEDULED_MV_REFRESH = 'SCHEDULED_MV_REFRESH'
|
|
4077
|
+
TESTING = 'TESTING'
|
|
4078
|
+
|
|
4079
|
+
|
|
4080
|
+
class QuerySourceTrigger(Enum):
|
|
4081
|
+
|
|
4082
|
+
MANUAL = 'MANUAL'
|
|
4083
|
+
SCHEDULED = 'SCHEDULED'
|
|
4084
|
+
|
|
4085
|
+
|
|
2863
4086
|
class QueryStatementType(Enum):
|
|
2864
|
-
"""Type of statement for this query"""
|
|
2865
4087
|
|
|
2866
4088
|
ALTER = 'ALTER'
|
|
2867
4089
|
ANALYZE = 'ANALYZE'
|
|
@@ -2888,15 +4110,16 @@ class QueryStatementType(Enum):
|
|
|
2888
4110
|
|
|
2889
4111
|
|
|
2890
4112
|
class QueryStatus(Enum):
|
|
2891
|
-
"""
|
|
2892
|
-
`RUNNING`: Query has started. * `CANCELED`: Query has been cancelled by the user. * `FAILED`:
|
|
2893
|
-
Query has failed. * `FINISHED`: Query has completed."""
|
|
4113
|
+
"""Statuses which are also used by OperationStatus in runtime"""
|
|
2894
4114
|
|
|
2895
4115
|
CANCELED = 'CANCELED'
|
|
4116
|
+
COMPILED = 'COMPILED'
|
|
4117
|
+
COMPILING = 'COMPILING'
|
|
2896
4118
|
FAILED = 'FAILED'
|
|
2897
4119
|
FINISHED = 'FINISHED'
|
|
2898
4120
|
QUEUED = 'QUEUED'
|
|
2899
4121
|
RUNNING = 'RUNNING'
|
|
4122
|
+
STARTED = 'STARTED'
|
|
2900
4123
|
|
|
2901
4124
|
|
|
2902
4125
|
@dataclass
|
|
@@ -2937,12 +4160,6 @@ class RestoreResponse:
|
|
|
2937
4160
|
|
|
2938
4161
|
@dataclass
|
|
2939
4162
|
class ResultData:
|
|
2940
|
-
"""Contains the result data of a single chunk when using `INLINE` disposition. When using
|
|
2941
|
-
`EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned
|
|
2942
|
-
URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the
|
|
2943
|
-
`external_links` array prepares the API to return multiple links in a single response. Currently
|
|
2944
|
-
only a single link is returned.)"""
|
|
2945
|
-
|
|
2946
4163
|
byte_count: Optional[int] = None
|
|
2947
4164
|
"""The number of bytes in the result chunk. This field is not available when using `INLINE`
|
|
2948
4165
|
disposition."""
|
|
@@ -3069,6 +4286,12 @@ class ResultSchema:
|
|
|
3069
4286
|
return cls(column_count=d.get('column_count', None), columns=_repeated_dict(d, 'columns', ColumnInfo))
|
|
3070
4287
|
|
|
3071
4288
|
|
|
4289
|
+
class RunAsMode(Enum):
|
|
4290
|
+
|
|
4291
|
+
OWNER = 'OWNER'
|
|
4292
|
+
VIEWER = 'VIEWER'
|
|
4293
|
+
|
|
4294
|
+
|
|
3072
4295
|
class RunAsRole(Enum):
|
|
3073
4296
|
"""Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
|
|
3074
4297
|
viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
|
|
@@ -3077,6 +4300,23 @@ class RunAsRole(Enum):
|
|
|
3077
4300
|
VIEWER = 'viewer'
|
|
3078
4301
|
|
|
3079
4302
|
|
|
4303
|
+
@dataclass
|
|
4304
|
+
class ServerlessChannelInfo:
|
|
4305
|
+
name: Optional[ChannelName] = None
|
|
4306
|
+
"""Name of the Channel"""
|
|
4307
|
+
|
|
4308
|
+
def as_dict(self) -> dict:
|
|
4309
|
+
"""Serializes the ServerlessChannelInfo into a dictionary suitable for use as a JSON request body."""
|
|
4310
|
+
body = {}
|
|
4311
|
+
if self.name is not None: body['name'] = self.name.value
|
|
4312
|
+
return body
|
|
4313
|
+
|
|
4314
|
+
@classmethod
|
|
4315
|
+
def from_dict(cls, d: Dict[str, any]) -> ServerlessChannelInfo:
|
|
4316
|
+
"""Deserializes the ServerlessChannelInfo from a dictionary."""
|
|
4317
|
+
return cls(name=_enum(d, 'name', ChannelName))
|
|
4318
|
+
|
|
4319
|
+
|
|
3080
4320
|
@dataclass
|
|
3081
4321
|
class ServiceError:
|
|
3082
4322
|
error_code: Optional[ServiceErrorCode] = None
|
|
@@ -3273,8 +4513,10 @@ class StatementParameterListItem:
|
|
|
3273
4513
|
type: Optional[str] = None
|
|
3274
4514
|
"""The data type, given as a string. For example: `INT`, `STRING`, `DECIMAL(10,2)`. If no type is
|
|
3275
4515
|
given the type is assumed to be `STRING`. Complex types, such as `ARRAY`, `MAP`, and `STRUCT`
|
|
3276
|
-
are not supported. For valid types, refer to the section [Data
|
|
3277
|
-
|
|
4516
|
+
are not supported. For valid types, refer to the section [Data types] of the SQL language
|
|
4517
|
+
reference.
|
|
4518
|
+
|
|
4519
|
+
[Data types]: https://docs.databricks.com/sql/language-manual/functions/cast.html"""
|
|
3278
4520
|
|
|
3279
4521
|
value: Optional[str] = None
|
|
3280
4522
|
"""The value to substitute, represented as a string. If omitted, the value is interpreted as NULL."""
|
|
@@ -3293,6 +4535,38 @@ class StatementParameterListItem:
|
|
|
3293
4535
|
return cls(name=d.get('name', None), type=d.get('type', None), value=d.get('value', None))
|
|
3294
4536
|
|
|
3295
4537
|
|
|
4538
|
+
@dataclass
|
|
4539
|
+
class StatementResponse:
|
|
4540
|
+
manifest: Optional[ResultManifest] = None
|
|
4541
|
+
"""The result manifest provides schema and metadata for the result set."""
|
|
4542
|
+
|
|
4543
|
+
result: Optional[ResultData] = None
|
|
4544
|
+
|
|
4545
|
+
statement_id: Optional[str] = None
|
|
4546
|
+
"""The statement ID is returned upon successfully submitting a SQL statement, and is a required
|
|
4547
|
+
reference for all subsequent calls."""
|
|
4548
|
+
|
|
4549
|
+
status: Optional[StatementStatus] = None
|
|
4550
|
+
"""The status response includes execution state and if relevant, error information."""
|
|
4551
|
+
|
|
4552
|
+
def as_dict(self) -> dict:
|
|
4553
|
+
"""Serializes the StatementResponse into a dictionary suitable for use as a JSON request body."""
|
|
4554
|
+
body = {}
|
|
4555
|
+
if self.manifest: body['manifest'] = self.manifest.as_dict()
|
|
4556
|
+
if self.result: body['result'] = self.result.as_dict()
|
|
4557
|
+
if self.statement_id is not None: body['statement_id'] = self.statement_id
|
|
4558
|
+
if self.status: body['status'] = self.status.as_dict()
|
|
4559
|
+
return body
|
|
4560
|
+
|
|
4561
|
+
@classmethod
|
|
4562
|
+
def from_dict(cls, d: Dict[str, any]) -> StatementResponse:
|
|
4563
|
+
"""Deserializes the StatementResponse from a dictionary."""
|
|
4564
|
+
return cls(manifest=_from_dict(d, 'manifest', ResultManifest),
|
|
4565
|
+
result=_from_dict(d, 'result', ResultData),
|
|
4566
|
+
statement_id=d.get('statement_id', None),
|
|
4567
|
+
status=_from_dict(d, 'status', StatementStatus))
|
|
4568
|
+
|
|
4569
|
+
|
|
3296
4570
|
class StatementState(Enum):
|
|
3297
4571
|
"""Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running -
|
|
3298
4572
|
`SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution
|
|
@@ -3492,19 +4766,35 @@ class TerminationReasonCode(Enum):
|
|
|
3492
4766
|
class TerminationReasonType(Enum):
|
|
3493
4767
|
"""type of the termination"""
|
|
3494
4768
|
|
|
3495
|
-
CLIENT_ERROR = 'CLIENT_ERROR'
|
|
3496
|
-
CLOUD_FAILURE = 'CLOUD_FAILURE'
|
|
3497
|
-
SERVICE_FAULT = 'SERVICE_FAULT'
|
|
3498
|
-
SUCCESS = 'SUCCESS'
|
|
4769
|
+
CLIENT_ERROR = 'CLIENT_ERROR'
|
|
4770
|
+
CLOUD_FAILURE = 'CLOUD_FAILURE'
|
|
4771
|
+
SERVICE_FAULT = 'SERVICE_FAULT'
|
|
4772
|
+
SUCCESS = 'SUCCESS'
|
|
4773
|
+
|
|
4774
|
+
|
|
4775
|
+
@dataclass
|
|
4776
|
+
class TextValue:
|
|
4777
|
+
value: Optional[str] = None
|
|
4778
|
+
|
|
4779
|
+
def as_dict(self) -> dict:
|
|
4780
|
+
"""Serializes the TextValue into a dictionary suitable for use as a JSON request body."""
|
|
4781
|
+
body = {}
|
|
4782
|
+
if self.value is not None: body['value'] = self.value
|
|
4783
|
+
return body
|
|
4784
|
+
|
|
4785
|
+
@classmethod
|
|
4786
|
+
def from_dict(cls, d: Dict[str, any]) -> TextValue:
|
|
4787
|
+
"""Deserializes the TextValue from a dictionary."""
|
|
4788
|
+
return cls(value=d.get('value', None))
|
|
3499
4789
|
|
|
3500
4790
|
|
|
3501
4791
|
@dataclass
|
|
3502
4792
|
class TimeRange:
|
|
3503
4793
|
end_time_ms: Optional[int] = None
|
|
3504
|
-
"""
|
|
4794
|
+
"""The end time in milliseconds."""
|
|
3505
4795
|
|
|
3506
4796
|
start_time_ms: Optional[int] = None
|
|
3507
|
-
"""
|
|
4797
|
+
"""The start time in milliseconds."""
|
|
3508
4798
|
|
|
3509
4799
|
def as_dict(self) -> dict:
|
|
3510
4800
|
"""Serializes the TimeRange into a dictionary suitable for use as a JSON request body."""
|
|
@@ -3536,6 +4826,179 @@ class TransferOwnershipObjectId:
|
|
|
3536
4826
|
return cls(new_owner=d.get('new_owner', None))
|
|
3537
4827
|
|
|
3538
4828
|
|
|
4829
|
+
@dataclass
|
|
4830
|
+
class UpdateAlertRequest:
|
|
4831
|
+
update_mask: str
|
|
4832
|
+
"""Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
|
|
4833
|
+
the setting payload will be updated. The field mask needs to be supplied as single string. To
|
|
4834
|
+
specify multiple fields in the field mask, use comma as the separator (no space)."""
|
|
4835
|
+
|
|
4836
|
+
alert: Optional[UpdateAlertRequestAlert] = None
|
|
4837
|
+
|
|
4838
|
+
id: Optional[str] = None
|
|
4839
|
+
|
|
4840
|
+
def as_dict(self) -> dict:
|
|
4841
|
+
"""Serializes the UpdateAlertRequest into a dictionary suitable for use as a JSON request body."""
|
|
4842
|
+
body = {}
|
|
4843
|
+
if self.alert: body['alert'] = self.alert.as_dict()
|
|
4844
|
+
if self.id is not None: body['id'] = self.id
|
|
4845
|
+
if self.update_mask is not None: body['update_mask'] = self.update_mask
|
|
4846
|
+
return body
|
|
4847
|
+
|
|
4848
|
+
@classmethod
|
|
4849
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequest:
|
|
4850
|
+
"""Deserializes the UpdateAlertRequest from a dictionary."""
|
|
4851
|
+
return cls(alert=_from_dict(d, 'alert', UpdateAlertRequestAlert),
|
|
4852
|
+
id=d.get('id', None),
|
|
4853
|
+
update_mask=d.get('update_mask', None))
|
|
4854
|
+
|
|
4855
|
+
|
|
4856
|
+
@dataclass
|
|
4857
|
+
class UpdateAlertRequestAlert:
|
|
4858
|
+
condition: Optional[AlertCondition] = None
|
|
4859
|
+
"""Trigger conditions of the alert."""
|
|
4860
|
+
|
|
4861
|
+
custom_body: Optional[str] = None
|
|
4862
|
+
"""Custom body of alert notification, if it exists. See [here] for custom templating instructions.
|
|
4863
|
+
|
|
4864
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
4865
|
+
|
|
4866
|
+
custom_subject: Optional[str] = None
|
|
4867
|
+
"""Custom subject of alert notification, if it exists. This can include email subject entries and
|
|
4868
|
+
Slack notification headers, for example. See [here] for custom templating instructions.
|
|
4869
|
+
|
|
4870
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
4871
|
+
|
|
4872
|
+
display_name: Optional[str] = None
|
|
4873
|
+
"""The display name of the alert."""
|
|
4874
|
+
|
|
4875
|
+
owner_user_name: Optional[str] = None
|
|
4876
|
+
"""The owner's username. This field is set to "Unavailable" if the user has been deleted."""
|
|
4877
|
+
|
|
4878
|
+
query_id: Optional[str] = None
|
|
4879
|
+
"""UUID of the query attached to the alert."""
|
|
4880
|
+
|
|
4881
|
+
seconds_to_retrigger: Optional[int] = None
|
|
4882
|
+
"""Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it
|
|
4883
|
+
can be triggered again. If 0 or not specified, the alert will not be triggered again."""
|
|
4884
|
+
|
|
4885
|
+
def as_dict(self) -> dict:
|
|
4886
|
+
"""Serializes the UpdateAlertRequestAlert into a dictionary suitable for use as a JSON request body."""
|
|
4887
|
+
body = {}
|
|
4888
|
+
if self.condition: body['condition'] = self.condition.as_dict()
|
|
4889
|
+
if self.custom_body is not None: body['custom_body'] = self.custom_body
|
|
4890
|
+
if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
|
|
4891
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
4892
|
+
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
4893
|
+
if self.query_id is not None: body['query_id'] = self.query_id
|
|
4894
|
+
if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
|
|
4895
|
+
return body
|
|
4896
|
+
|
|
4897
|
+
@classmethod
|
|
4898
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequestAlert:
|
|
4899
|
+
"""Deserializes the UpdateAlertRequestAlert from a dictionary."""
|
|
4900
|
+
return cls(condition=_from_dict(d, 'condition', AlertCondition),
|
|
4901
|
+
custom_body=d.get('custom_body', None),
|
|
4902
|
+
custom_subject=d.get('custom_subject', None),
|
|
4903
|
+
display_name=d.get('display_name', None),
|
|
4904
|
+
owner_user_name=d.get('owner_user_name', None),
|
|
4905
|
+
query_id=d.get('query_id', None),
|
|
4906
|
+
seconds_to_retrigger=d.get('seconds_to_retrigger', None))
|
|
4907
|
+
|
|
4908
|
+
|
|
4909
|
+
@dataclass
|
|
4910
|
+
class UpdateQueryRequest:
|
|
4911
|
+
update_mask: str
|
|
4912
|
+
"""Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
|
|
4913
|
+
the setting payload will be updated. The field mask needs to be supplied as single string. To
|
|
4914
|
+
specify multiple fields in the field mask, use comma as the separator (no space)."""
|
|
4915
|
+
|
|
4916
|
+
id: Optional[str] = None
|
|
4917
|
+
|
|
4918
|
+
query: Optional[UpdateQueryRequestQuery] = None
|
|
4919
|
+
|
|
4920
|
+
def as_dict(self) -> dict:
|
|
4921
|
+
"""Serializes the UpdateQueryRequest into a dictionary suitable for use as a JSON request body."""
|
|
4922
|
+
body = {}
|
|
4923
|
+
if self.id is not None: body['id'] = self.id
|
|
4924
|
+
if self.query: body['query'] = self.query.as_dict()
|
|
4925
|
+
if self.update_mask is not None: body['update_mask'] = self.update_mask
|
|
4926
|
+
return body
|
|
4927
|
+
|
|
4928
|
+
@classmethod
|
|
4929
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequest:
|
|
4930
|
+
"""Deserializes the UpdateQueryRequest from a dictionary."""
|
|
4931
|
+
return cls(id=d.get('id', None),
|
|
4932
|
+
query=_from_dict(d, 'query', UpdateQueryRequestQuery),
|
|
4933
|
+
update_mask=d.get('update_mask', None))
|
|
4934
|
+
|
|
4935
|
+
|
|
4936
|
+
@dataclass
|
|
4937
|
+
class UpdateQueryRequestQuery:
|
|
4938
|
+
apply_auto_limit: Optional[bool] = None
|
|
4939
|
+
"""Whether to apply a 1000 row limit to the query result."""
|
|
4940
|
+
|
|
4941
|
+
catalog: Optional[str] = None
|
|
4942
|
+
"""Name of the catalog where this query will be executed."""
|
|
4943
|
+
|
|
4944
|
+
description: Optional[str] = None
|
|
4945
|
+
"""General description that conveys additional information about this query such as usage notes."""
|
|
4946
|
+
|
|
4947
|
+
display_name: Optional[str] = None
|
|
4948
|
+
"""Display name of the query that appears in list views, widget headings, and on the query page."""
|
|
4949
|
+
|
|
4950
|
+
owner_user_name: Optional[str] = None
|
|
4951
|
+
"""Username of the user that owns the query."""
|
|
4952
|
+
|
|
4953
|
+
parameters: Optional[List[QueryParameter]] = None
|
|
4954
|
+
"""List of query parameter definitions."""
|
|
4955
|
+
|
|
4956
|
+
query_text: Optional[str] = None
|
|
4957
|
+
"""Text of the query to be run."""
|
|
4958
|
+
|
|
4959
|
+
run_as_mode: Optional[RunAsMode] = None
|
|
4960
|
+
"""Sets the "Run as" role for the object."""
|
|
4961
|
+
|
|
4962
|
+
schema: Optional[str] = None
|
|
4963
|
+
"""Name of the schema where this query will be executed."""
|
|
4964
|
+
|
|
4965
|
+
tags: Optional[List[str]] = None
|
|
4966
|
+
|
|
4967
|
+
warehouse_id: Optional[str] = None
|
|
4968
|
+
"""ID of the SQL warehouse attached to the query."""
|
|
4969
|
+
|
|
4970
|
+
def as_dict(self) -> dict:
|
|
4971
|
+
"""Serializes the UpdateQueryRequestQuery into a dictionary suitable for use as a JSON request body."""
|
|
4972
|
+
body = {}
|
|
4973
|
+
if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
|
|
4974
|
+
if self.catalog is not None: body['catalog'] = self.catalog
|
|
4975
|
+
if self.description is not None: body['description'] = self.description
|
|
4976
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
4977
|
+
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
4978
|
+
if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
|
|
4979
|
+
if self.query_text is not None: body['query_text'] = self.query_text
|
|
4980
|
+
if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
|
|
4981
|
+
if self.schema is not None: body['schema'] = self.schema
|
|
4982
|
+
if self.tags: body['tags'] = [v for v in self.tags]
|
|
4983
|
+
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
4984
|
+
return body
|
|
4985
|
+
|
|
4986
|
+
@classmethod
|
|
4987
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequestQuery:
|
|
4988
|
+
"""Deserializes the UpdateQueryRequestQuery from a dictionary."""
|
|
4989
|
+
return cls(apply_auto_limit=d.get('apply_auto_limit', None),
|
|
4990
|
+
catalog=d.get('catalog', None),
|
|
4991
|
+
description=d.get('description', None),
|
|
4992
|
+
display_name=d.get('display_name', None),
|
|
4993
|
+
owner_user_name=d.get('owner_user_name', None),
|
|
4994
|
+
parameters=_repeated_dict(d, 'parameters', QueryParameter),
|
|
4995
|
+
query_text=d.get('query_text', None),
|
|
4996
|
+
run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
|
|
4997
|
+
schema=d.get('schema', None),
|
|
4998
|
+
tags=d.get('tags', None),
|
|
4999
|
+
warehouse_id=d.get('warehouse_id', None))
|
|
5000
|
+
|
|
5001
|
+
|
|
3539
5002
|
@dataclass
|
|
3540
5003
|
class UpdateResponse:
|
|
3541
5004
|
|
|
@@ -3550,6 +5013,67 @@ class UpdateResponse:
|
|
|
3550
5013
|
return cls()
|
|
3551
5014
|
|
|
3552
5015
|
|
|
5016
|
+
@dataclass
|
|
5017
|
+
class UpdateVisualizationRequest:
|
|
5018
|
+
update_mask: str
|
|
5019
|
+
"""Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
|
|
5020
|
+
the setting payload will be updated. The field mask needs to be supplied as single string. To
|
|
5021
|
+
specify multiple fields in the field mask, use comma as the separator (no space)."""
|
|
5022
|
+
|
|
5023
|
+
id: Optional[str] = None
|
|
5024
|
+
|
|
5025
|
+
visualization: Optional[UpdateVisualizationRequestVisualization] = None
|
|
5026
|
+
|
|
5027
|
+
def as_dict(self) -> dict:
|
|
5028
|
+
"""Serializes the UpdateVisualizationRequest into a dictionary suitable for use as a JSON request body."""
|
|
5029
|
+
body = {}
|
|
5030
|
+
if self.id is not None: body['id'] = self.id
|
|
5031
|
+
if self.update_mask is not None: body['update_mask'] = self.update_mask
|
|
5032
|
+
if self.visualization: body['visualization'] = self.visualization.as_dict()
|
|
5033
|
+
return body
|
|
5034
|
+
|
|
5035
|
+
@classmethod
|
|
5036
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequest:
|
|
5037
|
+
"""Deserializes the UpdateVisualizationRequest from a dictionary."""
|
|
5038
|
+
return cls(id=d.get('id', None),
|
|
5039
|
+
update_mask=d.get('update_mask', None),
|
|
5040
|
+
visualization=_from_dict(d, 'visualization', UpdateVisualizationRequestVisualization))
|
|
5041
|
+
|
|
5042
|
+
|
|
5043
|
+
@dataclass
|
|
5044
|
+
class UpdateVisualizationRequestVisualization:
|
|
5045
|
+
display_name: Optional[str] = None
|
|
5046
|
+
"""The display name of the visualization."""
|
|
5047
|
+
|
|
5048
|
+
serialized_options: Optional[str] = None
|
|
5049
|
+
"""The visualization options varies widely from one visualization type to the next and is
|
|
5050
|
+
unsupported. Databricks does not recommend modifying visualization options directly."""
|
|
5051
|
+
|
|
5052
|
+
serialized_query_plan: Optional[str] = None
|
|
5053
|
+
"""The visualization query plan varies widely from one visualization type to the next and is
|
|
5054
|
+
unsupported. Databricks does not recommend modifying the visualization query plan directly."""
|
|
5055
|
+
|
|
5056
|
+
type: Optional[str] = None
|
|
5057
|
+
"""The type of visualization: counter, table, funnel, and so on."""
|
|
5058
|
+
|
|
5059
|
+
def as_dict(self) -> dict:
|
|
5060
|
+
"""Serializes the UpdateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body."""
|
|
5061
|
+
body = {}
|
|
5062
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
5063
|
+
if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
|
|
5064
|
+
if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
|
|
5065
|
+
if self.type is not None: body['type'] = self.type
|
|
5066
|
+
return body
|
|
5067
|
+
|
|
5068
|
+
@classmethod
|
|
5069
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequestVisualization:
|
|
5070
|
+
"""Deserializes the UpdateVisualizationRequestVisualization from a dictionary."""
|
|
5071
|
+
return cls(display_name=d.get('display_name', None),
|
|
5072
|
+
serialized_options=d.get('serialized_options', None),
|
|
5073
|
+
serialized_query_plan=d.get('serialized_query_plan', None),
|
|
5074
|
+
type=d.get('type', None))
|
|
5075
|
+
|
|
5076
|
+
|
|
3553
5077
|
@dataclass
|
|
3554
5078
|
class User:
|
|
3555
5079
|
email: Optional[str] = None
|
|
@@ -3574,57 +5098,56 @@ class User:
|
|
|
3574
5098
|
|
|
3575
5099
|
@dataclass
|
|
3576
5100
|
class Visualization:
|
|
3577
|
-
|
|
3578
|
-
|
|
3579
|
-
create a new one with a POST request to the same endpoint. Databricks does not recommend
|
|
3580
|
-
constructing ad-hoc visualizations entirely in JSON."""
|
|
5101
|
+
create_time: Optional[str] = None
|
|
5102
|
+
"""The timestamp indicating when the visualization was created."""
|
|
3581
5103
|
|
|
3582
|
-
|
|
3583
|
-
|
|
3584
|
-
description: Optional[str] = None
|
|
3585
|
-
"""A short description of this visualization. This is not displayed in the UI."""
|
|
5104
|
+
display_name: Optional[str] = None
|
|
5105
|
+
"""The display name of the visualization."""
|
|
3586
5106
|
|
|
3587
5107
|
id: Optional[str] = None
|
|
3588
|
-
"""
|
|
5108
|
+
"""UUID identifying the visualization."""
|
|
3589
5109
|
|
|
3590
|
-
|
|
3591
|
-
"""
|
|
5110
|
+
query_id: Optional[str] = None
|
|
5111
|
+
"""UUID of the query that the visualization is attached to."""
|
|
3592
5112
|
|
|
3593
|
-
|
|
3594
|
-
"""The options
|
|
3595
|
-
Databricks does not recommend modifying visualization
|
|
5113
|
+
serialized_options: Optional[str] = None
|
|
5114
|
+
"""The visualization options varies widely from one visualization type to the next and is
|
|
5115
|
+
unsupported. Databricks does not recommend modifying visualization options directly."""
|
|
3596
5116
|
|
|
3597
|
-
|
|
5117
|
+
serialized_query_plan: Optional[str] = None
|
|
5118
|
+
"""The visualization query plan varies widely from one visualization type to the next and is
|
|
5119
|
+
unsupported. Databricks does not recommend modifying the visualization query plan directly."""
|
|
3598
5120
|
|
|
3599
5121
|
type: Optional[str] = None
|
|
3600
|
-
"""The type of visualization:
|
|
5122
|
+
"""The type of visualization: counter, table, funnel, and so on."""
|
|
3601
5123
|
|
|
3602
|
-
|
|
5124
|
+
update_time: Optional[str] = None
|
|
5125
|
+
"""The timestamp indicating when the visualization was updated."""
|
|
3603
5126
|
|
|
3604
5127
|
def as_dict(self) -> dict:
|
|
3605
5128
|
"""Serializes the Visualization into a dictionary suitable for use as a JSON request body."""
|
|
3606
5129
|
body = {}
|
|
3607
|
-
if self.
|
|
3608
|
-
if self.
|
|
5130
|
+
if self.create_time is not None: body['create_time'] = self.create_time
|
|
5131
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
3609
5132
|
if self.id is not None: body['id'] = self.id
|
|
3610
|
-
if self.
|
|
3611
|
-
if self.
|
|
3612
|
-
if self.
|
|
5133
|
+
if self.query_id is not None: body['query_id'] = self.query_id
|
|
5134
|
+
if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
|
|
5135
|
+
if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
|
|
3613
5136
|
if self.type is not None: body['type'] = self.type
|
|
3614
|
-
if self.
|
|
5137
|
+
if self.update_time is not None: body['update_time'] = self.update_time
|
|
3615
5138
|
return body
|
|
3616
5139
|
|
|
3617
5140
|
@classmethod
|
|
3618
5141
|
def from_dict(cls, d: Dict[str, any]) -> Visualization:
|
|
3619
5142
|
"""Deserializes the Visualization from a dictionary."""
|
|
3620
|
-
return cls(
|
|
3621
|
-
|
|
5143
|
+
return cls(create_time=d.get('create_time', None),
|
|
5144
|
+
display_name=d.get('display_name', None),
|
|
3622
5145
|
id=d.get('id', None),
|
|
3623
|
-
|
|
3624
|
-
|
|
3625
|
-
|
|
5146
|
+
query_id=d.get('query_id', None),
|
|
5147
|
+
serialized_options=d.get('serialized_options', None),
|
|
5148
|
+
serialized_query_plan=d.get('serialized_query_plan', None),
|
|
3626
5149
|
type=d.get('type', None),
|
|
3627
|
-
|
|
5150
|
+
update_time=d.get('update_time', None))
|
|
3628
5151
|
|
|
3629
5152
|
|
|
3630
5153
|
@dataclass
|
|
@@ -3727,6 +5250,7 @@ class WarehousePermissionLevel(Enum):
|
|
|
3727
5250
|
"""Permission level"""
|
|
3728
5251
|
|
|
3729
5252
|
CAN_MANAGE = 'CAN_MANAGE'
|
|
5253
|
+
CAN_MONITOR = 'CAN_MONITOR'
|
|
3730
5254
|
CAN_USE = 'CAN_USE'
|
|
3731
5255
|
IS_OWNER = 'IS_OWNER'
|
|
3732
5256
|
|
|
@@ -3839,7 +5363,7 @@ class Widget:
|
|
|
3839
5363
|
|
|
3840
5364
|
options: Optional[WidgetOptions] = None
|
|
3841
5365
|
|
|
3842
|
-
visualization: Optional[
|
|
5366
|
+
visualization: Optional[LegacyVisualization] = None
|
|
3843
5367
|
"""The visualization description API changes frequently and is unsupported. You can duplicate a
|
|
3844
5368
|
visualization by copying description objects received _from the API_ and then using them to
|
|
3845
5369
|
create a new one with a POST request to the same endpoint. Databricks does not recommend
|
|
@@ -3862,7 +5386,7 @@ class Widget:
|
|
|
3862
5386
|
"""Deserializes the Widget from a dictionary."""
|
|
3863
5387
|
return cls(id=d.get('id', None),
|
|
3864
5388
|
options=_from_dict(d, 'options', WidgetOptions),
|
|
3865
|
-
visualization=_from_dict(d, 'visualization',
|
|
5389
|
+
visualization=_from_dict(d, 'visualization', LegacyVisualization),
|
|
3866
5390
|
width=d.get('width', None))
|
|
3867
5391
|
|
|
3868
5392
|
|
|
@@ -3929,37 +5453,150 @@ class WidgetPosition:
|
|
|
3929
5453
|
row: Optional[int] = None
|
|
3930
5454
|
"""row in the dashboard grid. Values start with 0"""
|
|
3931
5455
|
|
|
3932
|
-
size_x: Optional[int] = None
|
|
3933
|
-
"""width of the widget measured in dashboard grid cells"""
|
|
5456
|
+
size_x: Optional[int] = None
|
|
5457
|
+
"""width of the widget measured in dashboard grid cells"""
|
|
5458
|
+
|
|
5459
|
+
size_y: Optional[int] = None
|
|
5460
|
+
"""height of the widget measured in dashboard grid cells"""
|
|
5461
|
+
|
|
5462
|
+
def as_dict(self) -> dict:
|
|
5463
|
+
"""Serializes the WidgetPosition into a dictionary suitable for use as a JSON request body."""
|
|
5464
|
+
body = {}
|
|
5465
|
+
if self.auto_height is not None: body['autoHeight'] = self.auto_height
|
|
5466
|
+
if self.col is not None: body['col'] = self.col
|
|
5467
|
+
if self.row is not None: body['row'] = self.row
|
|
5468
|
+
if self.size_x is not None: body['sizeX'] = self.size_x
|
|
5469
|
+
if self.size_y is not None: body['sizeY'] = self.size_y
|
|
5470
|
+
return body
|
|
5471
|
+
|
|
5472
|
+
@classmethod
|
|
5473
|
+
def from_dict(cls, d: Dict[str, any]) -> WidgetPosition:
|
|
5474
|
+
"""Deserializes the WidgetPosition from a dictionary."""
|
|
5475
|
+
return cls(auto_height=d.get('autoHeight', None),
|
|
5476
|
+
col=d.get('col', None),
|
|
5477
|
+
row=d.get('row', None),
|
|
5478
|
+
size_x=d.get('sizeX', None),
|
|
5479
|
+
size_y=d.get('sizeY', None))
|
|
5480
|
+
|
|
5481
|
+
|
|
5482
|
+
class AlertsAPI:
|
|
5483
|
+
"""The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that
|
|
5484
|
+
periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
|
|
5485
|
+
notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
|
|
5486
|
+
the Jobs API, e.g. :method:jobs/create."""
|
|
5487
|
+
|
|
5488
|
+
def __init__(self, api_client):
|
|
5489
|
+
self._api = api_client
|
|
5490
|
+
|
|
5491
|
+
def create(self, *, alert: Optional[CreateAlertRequestAlert] = None) -> Alert:
|
|
5492
|
+
"""Create an alert.
|
|
5493
|
+
|
|
5494
|
+
Creates an alert.
|
|
5495
|
+
|
|
5496
|
+
:param alert: :class:`CreateAlertRequestAlert` (optional)
|
|
5497
|
+
|
|
5498
|
+
:returns: :class:`Alert`
|
|
5499
|
+
"""
|
|
5500
|
+
body = {}
|
|
5501
|
+
if alert is not None: body['alert'] = alert.as_dict()
|
|
5502
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
5503
|
+
|
|
5504
|
+
res = self._api.do('POST', '/api/2.0/sql/alerts', body=body, headers=headers)
|
|
5505
|
+
return Alert.from_dict(res)
|
|
5506
|
+
|
|
5507
|
+
def delete(self, id: str):
|
|
5508
|
+
"""Delete an alert.
|
|
5509
|
+
|
|
5510
|
+
Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and
|
|
5511
|
+
can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently
|
|
5512
|
+
deleted after 30 days.
|
|
5513
|
+
|
|
5514
|
+
:param id: str
|
|
5515
|
+
|
|
5516
|
+
|
|
5517
|
+
"""
|
|
5518
|
+
|
|
5519
|
+
headers = {'Accept': 'application/json', }
|
|
5520
|
+
|
|
5521
|
+
self._api.do('DELETE', f'/api/2.0/sql/alerts/{id}', headers=headers)
|
|
5522
|
+
|
|
5523
|
+
def get(self, id: str) -> Alert:
|
|
5524
|
+
"""Get an alert.
|
|
5525
|
+
|
|
5526
|
+
Gets an alert.
|
|
5527
|
+
|
|
5528
|
+
:param id: str
|
|
5529
|
+
|
|
5530
|
+
:returns: :class:`Alert`
|
|
5531
|
+
"""
|
|
5532
|
+
|
|
5533
|
+
headers = {'Accept': 'application/json', }
|
|
5534
|
+
|
|
5535
|
+
res = self._api.do('GET', f'/api/2.0/sql/alerts/{id}', headers=headers)
|
|
5536
|
+
return Alert.from_dict(res)
|
|
5537
|
+
|
|
5538
|
+
def list(self,
|
|
5539
|
+
*,
|
|
5540
|
+
page_size: Optional[int] = None,
|
|
5541
|
+
page_token: Optional[str] = None) -> Iterator[ListAlertsResponseAlert]:
|
|
5542
|
+
"""List alerts.
|
|
5543
|
+
|
|
5544
|
+
Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API
|
|
5545
|
+
concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
|
|
5546
|
+
|
|
5547
|
+
:param page_size: int (optional)
|
|
5548
|
+
:param page_token: str (optional)
|
|
5549
|
+
|
|
5550
|
+
:returns: Iterator over :class:`ListAlertsResponseAlert`
|
|
5551
|
+
"""
|
|
5552
|
+
|
|
5553
|
+
query = {}
|
|
5554
|
+
if page_size is not None: query['page_size'] = page_size
|
|
5555
|
+
if page_token is not None: query['page_token'] = page_token
|
|
5556
|
+
headers = {'Accept': 'application/json', }
|
|
3934
5557
|
|
|
3935
|
-
|
|
3936
|
-
|
|
5558
|
+
while True:
|
|
5559
|
+
json = self._api.do('GET', '/api/2.0/sql/alerts', query=query, headers=headers)
|
|
5560
|
+
if 'results' in json:
|
|
5561
|
+
for v in json['results']:
|
|
5562
|
+
yield ListAlertsResponseAlert.from_dict(v)
|
|
5563
|
+
if 'next_page_token' not in json or not json['next_page_token']:
|
|
5564
|
+
return
|
|
5565
|
+
query['page_token'] = json['next_page_token']
|
|
3937
5566
|
|
|
3938
|
-
def
|
|
3939
|
-
"""
|
|
5567
|
+
def update(self, id: str, update_mask: str, *, alert: Optional[UpdateAlertRequestAlert] = None) -> Alert:
|
|
5568
|
+
"""Update an alert.
|
|
5569
|
+
|
|
5570
|
+
Updates an alert.
|
|
5571
|
+
|
|
5572
|
+
:param id: str
|
|
5573
|
+
:param update_mask: str
|
|
5574
|
+
Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
|
|
5575
|
+
setting payload will be updated. The field mask needs to be supplied as single string. To specify
|
|
5576
|
+
multiple fields in the field mask, use comma as the separator (no space).
|
|
5577
|
+
:param alert: :class:`UpdateAlertRequestAlert` (optional)
|
|
5578
|
+
|
|
5579
|
+
:returns: :class:`Alert`
|
|
5580
|
+
"""
|
|
3940
5581
|
body = {}
|
|
3941
|
-
if
|
|
3942
|
-
if
|
|
3943
|
-
|
|
3944
|
-
if self.size_x is not None: body['sizeX'] = self.size_x
|
|
3945
|
-
if self.size_y is not None: body['sizeY'] = self.size_y
|
|
3946
|
-
return body
|
|
5582
|
+
if alert is not None: body['alert'] = alert.as_dict()
|
|
5583
|
+
if update_mask is not None: body['update_mask'] = update_mask
|
|
5584
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
3947
5585
|
|
|
3948
|
-
|
|
3949
|
-
|
|
3950
|
-
"""Deserializes the WidgetPosition from a dictionary."""
|
|
3951
|
-
return cls(auto_height=d.get('autoHeight', None),
|
|
3952
|
-
col=d.get('col', None),
|
|
3953
|
-
row=d.get('row', None),
|
|
3954
|
-
size_x=d.get('sizeX', None),
|
|
3955
|
-
size_y=d.get('sizeY', None))
|
|
5586
|
+
res = self._api.do('PATCH', f'/api/2.0/sql/alerts/{id}', body=body, headers=headers)
|
|
5587
|
+
return Alert.from_dict(res)
|
|
3956
5588
|
|
|
3957
5589
|
|
|
3958
|
-
class
|
|
5590
|
+
class AlertsLegacyAPI:
|
|
3959
5591
|
"""The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that
|
|
3960
5592
|
periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
|
|
3961
5593
|
notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
|
|
3962
|
-
the Jobs API, e.g. :method:jobs/create.
|
|
5594
|
+
the Jobs API, e.g. :method:jobs/create.
|
|
5595
|
+
|
|
5596
|
+
**Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
|
|
5597
|
+
more]
|
|
5598
|
+
|
|
5599
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
|
|
3963
5600
|
|
|
3964
5601
|
def __init__(self, api_client):
|
|
3965
5602
|
self._api = api_client
|
|
@@ -3970,12 +5607,17 @@ class AlertsAPI:
|
|
|
3970
5607
|
query_id: str,
|
|
3971
5608
|
*,
|
|
3972
5609
|
parent: Optional[str] = None,
|
|
3973
|
-
rearm: Optional[int] = None) ->
|
|
5610
|
+
rearm: Optional[int] = None) -> LegacyAlert:
|
|
3974
5611
|
"""Create an alert.
|
|
3975
5612
|
|
|
3976
5613
|
Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a
|
|
3977
5614
|
condition of its result, and notifies users or notification destinations if the condition was met.
|
|
3978
5615
|
|
|
5616
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create
|
|
5617
|
+
instead. [Learn more]
|
|
5618
|
+
|
|
5619
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
5620
|
+
|
|
3979
5621
|
:param name: str
|
|
3980
5622
|
Name of the alert.
|
|
3981
5623
|
:param options: :class:`AlertOptions`
|
|
@@ -3988,7 +5630,7 @@ class AlertsAPI:
|
|
|
3988
5630
|
Number of seconds after being triggered before the alert rearms itself and can be triggered again.
|
|
3989
5631
|
If `null`, alert will never be triggered again.
|
|
3990
5632
|
|
|
3991
|
-
:returns: :class:`
|
|
5633
|
+
:returns: :class:`LegacyAlert`
|
|
3992
5634
|
"""
|
|
3993
5635
|
body = {}
|
|
3994
5636
|
if name is not None: body['name'] = name
|
|
@@ -3999,14 +5641,19 @@ class AlertsAPI:
|
|
|
3999
5641
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
4000
5642
|
|
|
4001
5643
|
res = self._api.do('POST', '/api/2.0/preview/sql/alerts', body=body, headers=headers)
|
|
4002
|
-
return
|
|
5644
|
+
return LegacyAlert.from_dict(res)
|
|
4003
5645
|
|
|
4004
5646
|
def delete(self, alert_id: str):
|
|
4005
5647
|
"""Delete an alert.
|
|
4006
5648
|
|
|
4007
|
-
Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note
|
|
5649
|
+
Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike
|
|
4008
5650
|
queries and dashboards, alerts cannot be moved to the trash.
|
|
4009
5651
|
|
|
5652
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete
|
|
5653
|
+
instead. [Learn more]
|
|
5654
|
+
|
|
5655
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
5656
|
+
|
|
4010
5657
|
:param alert_id: str
|
|
4011
5658
|
|
|
4012
5659
|
|
|
@@ -4016,33 +5663,43 @@ class AlertsAPI:
|
|
|
4016
5663
|
|
|
4017
5664
|
self._api.do('DELETE', f'/api/2.0/preview/sql/alerts/{alert_id}', headers=headers)
|
|
4018
5665
|
|
|
4019
|
-
def get(self, alert_id: str) ->
|
|
5666
|
+
def get(self, alert_id: str) -> LegacyAlert:
|
|
4020
5667
|
"""Get an alert.
|
|
4021
5668
|
|
|
4022
5669
|
Gets an alert.
|
|
4023
5670
|
|
|
5671
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get
|
|
5672
|
+
instead. [Learn more]
|
|
5673
|
+
|
|
5674
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
5675
|
+
|
|
4024
5676
|
:param alert_id: str
|
|
4025
5677
|
|
|
4026
|
-
:returns: :class:`
|
|
5678
|
+
:returns: :class:`LegacyAlert`
|
|
4027
5679
|
"""
|
|
4028
5680
|
|
|
4029
5681
|
headers = {'Accept': 'application/json', }
|
|
4030
5682
|
|
|
4031
5683
|
res = self._api.do('GET', f'/api/2.0/preview/sql/alerts/{alert_id}', headers=headers)
|
|
4032
|
-
return
|
|
5684
|
+
return LegacyAlert.from_dict(res)
|
|
4033
5685
|
|
|
4034
|
-
def list(self) -> Iterator[
|
|
5686
|
+
def list(self) -> Iterator[LegacyAlert]:
|
|
4035
5687
|
"""Get alerts.
|
|
4036
5688
|
|
|
4037
5689
|
Gets a list of alerts.
|
|
4038
5690
|
|
|
4039
|
-
|
|
5691
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list
|
|
5692
|
+
instead. [Learn more]
|
|
5693
|
+
|
|
5694
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
5695
|
+
|
|
5696
|
+
:returns: Iterator over :class:`LegacyAlert`
|
|
4040
5697
|
"""
|
|
4041
5698
|
|
|
4042
5699
|
headers = {'Accept': 'application/json', }
|
|
4043
5700
|
|
|
4044
5701
|
res = self._api.do('GET', '/api/2.0/preview/sql/alerts', headers=headers)
|
|
4045
|
-
return [
|
|
5702
|
+
return [LegacyAlert.from_dict(v) for v in res]
|
|
4046
5703
|
|
|
4047
5704
|
def update(self,
|
|
4048
5705
|
alert_id: str,
|
|
@@ -4055,6 +5712,11 @@ class AlertsAPI:
|
|
|
4055
5712
|
|
|
4056
5713
|
Updates an alert.
|
|
4057
5714
|
|
|
5715
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update
|
|
5716
|
+
instead. [Learn more]
|
|
5717
|
+
|
|
5718
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
5719
|
+
|
|
4058
5720
|
:param alert_id: str
|
|
4059
5721
|
:param name: str
|
|
4060
5722
|
Name of the alert.
|
|
@@ -4256,8 +5918,8 @@ class DashboardsAPI:
|
|
|
4256
5918
|
|
|
4257
5919
|
Fetch a paginated list of dashboard objects.
|
|
4258
5920
|
|
|
4259
|
-
|
|
4260
|
-
degradation, or a temporary ban
|
|
5921
|
+
**Warning**: Calling this API concurrently 10 or more times could result in throttling, service
|
|
5922
|
+
degradation, or a temporary ban.
|
|
4261
5923
|
|
|
4262
5924
|
:param order: :class:`ListOrder` (optional)
|
|
4263
5925
|
Name of dashboard attribute to order by.
|
|
@@ -4351,7 +6013,11 @@ class DataSourcesAPI:
|
|
|
4351
6013
|
|
|
4352
6014
|
This API does not support searches. It returns the full list of SQL warehouses in your workspace. We
|
|
4353
6015
|
advise you to use any text editor, REST client, or `grep` to search the response from this API for the
|
|
4354
|
-
name of your SQL warehouse as it appears in Databricks SQL.
|
|
6016
|
+
name of your SQL warehouse as it appears in Databricks SQL.
|
|
6017
|
+
|
|
6018
|
+
**Note**: A new version of the Databricks SQL API is now available. [Learn more]
|
|
6019
|
+
|
|
6020
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
|
|
4355
6021
|
|
|
4356
6022
|
def __init__(self, api_client):
|
|
4357
6023
|
self._api = api_client
|
|
@@ -4363,6 +6029,11 @@ class DataSourcesAPI:
|
|
|
4363
6029
|
API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new
|
|
4364
6030
|
queries against it.
|
|
4365
6031
|
|
|
6032
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list
|
|
6033
|
+
instead. [Learn more]
|
|
6034
|
+
|
|
6035
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6036
|
+
|
|
4366
6037
|
:returns: Iterator over :class:`DataSource`
|
|
4367
6038
|
"""
|
|
4368
6039
|
|
|
@@ -4383,7 +6054,11 @@ class DbsqlPermissionsAPI:
|
|
|
4383
6054
|
|
|
4384
6055
|
- `CAN_RUN`: Allows read access and run access (superset of `CAN_VIEW`)
|
|
4385
6056
|
|
|
4386
|
-
- `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`)
|
|
6057
|
+
- `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`)
|
|
6058
|
+
|
|
6059
|
+
**Note**: A new version of the Databricks SQL API is now available. [Learn more]
|
|
6060
|
+
|
|
6061
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
|
|
4387
6062
|
|
|
4388
6063
|
def __init__(self, api_client):
|
|
4389
6064
|
self._api = api_client
|
|
@@ -4393,6 +6068,11 @@ class DbsqlPermissionsAPI:
|
|
|
4393
6068
|
|
|
4394
6069
|
Gets a JSON representation of the access control list (ACL) for a specified object.
|
|
4395
6070
|
|
|
6071
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use
|
|
6072
|
+
:method:workspace/getpermissions instead. [Learn more]
|
|
6073
|
+
|
|
6074
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6075
|
+
|
|
4396
6076
|
:param object_type: :class:`ObjectTypePlural`
|
|
4397
6077
|
The type of object permissions to check.
|
|
4398
6078
|
:param object_id: str
|
|
@@ -4418,6 +6098,11 @@ class DbsqlPermissionsAPI:
|
|
|
4418
6098
|
Sets the access control list (ACL) for a specified object. This operation will complete rewrite the
|
|
4419
6099
|
ACL.
|
|
4420
6100
|
|
|
6101
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use
|
|
6102
|
+
:method:workspace/setpermissions instead. [Learn more]
|
|
6103
|
+
|
|
6104
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6105
|
+
|
|
4421
6106
|
:param object_type: :class:`ObjectTypePlural`
|
|
4422
6107
|
The type of object permission to set.
|
|
4423
6108
|
:param object_id: str
|
|
@@ -4446,6 +6131,11 @@ class DbsqlPermissionsAPI:
|
|
|
4446
6131
|
|
|
4447
6132
|
Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key.
|
|
4448
6133
|
|
|
6134
|
+
**Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use
|
|
6135
|
+
:method:queries/update and :method:alerts/update respectively instead. [Learn more]
|
|
6136
|
+
|
|
6137
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6138
|
+
|
|
4449
6139
|
:param object_type: :class:`OwnableObjectType`
|
|
4450
6140
|
The type of object on which to change ownership.
|
|
4451
6141
|
:param object_id: :class:`TransferOwnershipObjectId`
|
|
@@ -4467,9 +6157,154 @@ class DbsqlPermissionsAPI:
|
|
|
4467
6157
|
|
|
4468
6158
|
|
|
4469
6159
|
class QueriesAPI:
|
|
6160
|
+
"""The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL object that
|
|
6161
|
+
includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be
|
|
6162
|
+
scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create."""
|
|
6163
|
+
|
|
6164
|
+
def __init__(self, api_client):
|
|
6165
|
+
self._api = api_client
|
|
6166
|
+
|
|
6167
|
+
def create(self, *, query: Optional[CreateQueryRequestQuery] = None) -> Query:
|
|
6168
|
+
"""Create a query.
|
|
6169
|
+
|
|
6170
|
+
Creates a query.
|
|
6171
|
+
|
|
6172
|
+
:param query: :class:`CreateQueryRequestQuery` (optional)
|
|
6173
|
+
|
|
6174
|
+
:returns: :class:`Query`
|
|
6175
|
+
"""
|
|
6176
|
+
body = {}
|
|
6177
|
+
if query is not None: body['query'] = query.as_dict()
|
|
6178
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6179
|
+
|
|
6180
|
+
res = self._api.do('POST', '/api/2.0/sql/queries', body=body, headers=headers)
|
|
6181
|
+
return Query.from_dict(res)
|
|
6182
|
+
|
|
6183
|
+
def delete(self, id: str):
|
|
6184
|
+
"""Delete a query.
|
|
6185
|
+
|
|
6186
|
+
Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
|
|
6187
|
+
cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is
|
|
6188
|
+
permanently deleted after 30 days.
|
|
6189
|
+
|
|
6190
|
+
:param id: str
|
|
6191
|
+
|
|
6192
|
+
|
|
6193
|
+
"""
|
|
6194
|
+
|
|
6195
|
+
headers = {'Accept': 'application/json', }
|
|
6196
|
+
|
|
6197
|
+
self._api.do('DELETE', f'/api/2.0/sql/queries/{id}', headers=headers)
|
|
6198
|
+
|
|
6199
|
+
def get(self, id: str) -> Query:
|
|
6200
|
+
"""Get a query.
|
|
6201
|
+
|
|
6202
|
+
Gets a query.
|
|
6203
|
+
|
|
6204
|
+
:param id: str
|
|
6205
|
+
|
|
6206
|
+
:returns: :class:`Query`
|
|
6207
|
+
"""
|
|
6208
|
+
|
|
6209
|
+
headers = {'Accept': 'application/json', }
|
|
6210
|
+
|
|
6211
|
+
res = self._api.do('GET', f'/api/2.0/sql/queries/{id}', headers=headers)
|
|
6212
|
+
return Query.from_dict(res)
|
|
6213
|
+
|
|
6214
|
+
def list(self,
|
|
6215
|
+
*,
|
|
6216
|
+
page_size: Optional[int] = None,
|
|
6217
|
+
page_token: Optional[str] = None) -> Iterator[ListQueryObjectsResponseQuery]:
|
|
6218
|
+
"""List queries.
|
|
6219
|
+
|
|
6220
|
+
Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API
|
|
6221
|
+
concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
|
|
6222
|
+
|
|
6223
|
+
:param page_size: int (optional)
|
|
6224
|
+
:param page_token: str (optional)
|
|
6225
|
+
|
|
6226
|
+
:returns: Iterator over :class:`ListQueryObjectsResponseQuery`
|
|
6227
|
+
"""
|
|
6228
|
+
|
|
6229
|
+
query = {}
|
|
6230
|
+
if page_size is not None: query['page_size'] = page_size
|
|
6231
|
+
if page_token is not None: query['page_token'] = page_token
|
|
6232
|
+
headers = {'Accept': 'application/json', }
|
|
6233
|
+
|
|
6234
|
+
while True:
|
|
6235
|
+
json = self._api.do('GET', '/api/2.0/sql/queries', query=query, headers=headers)
|
|
6236
|
+
if 'results' in json:
|
|
6237
|
+
for v in json['results']:
|
|
6238
|
+
yield ListQueryObjectsResponseQuery.from_dict(v)
|
|
6239
|
+
if 'next_page_token' not in json or not json['next_page_token']:
|
|
6240
|
+
return
|
|
6241
|
+
query['page_token'] = json['next_page_token']
|
|
6242
|
+
|
|
6243
|
+
def list_visualizations(self,
|
|
6244
|
+
id: str,
|
|
6245
|
+
*,
|
|
6246
|
+
page_size: Optional[int] = None,
|
|
6247
|
+
page_token: Optional[str] = None) -> Iterator[Visualization]:
|
|
6248
|
+
"""List visualizations on a query.
|
|
6249
|
+
|
|
6250
|
+
Gets a list of visualizations on a query.
|
|
6251
|
+
|
|
6252
|
+
:param id: str
|
|
6253
|
+
:param page_size: int (optional)
|
|
6254
|
+
:param page_token: str (optional)
|
|
6255
|
+
|
|
6256
|
+
:returns: Iterator over :class:`Visualization`
|
|
6257
|
+
"""
|
|
6258
|
+
|
|
6259
|
+
query = {}
|
|
6260
|
+
if page_size is not None: query['page_size'] = page_size
|
|
6261
|
+
if page_token is not None: query['page_token'] = page_token
|
|
6262
|
+
headers = {'Accept': 'application/json', }
|
|
6263
|
+
|
|
6264
|
+
while True:
|
|
6265
|
+
json = self._api.do('GET',
|
|
6266
|
+
f'/api/2.0/sql/queries/{id}/visualizations',
|
|
6267
|
+
query=query,
|
|
6268
|
+
headers=headers)
|
|
6269
|
+
if 'results' in json:
|
|
6270
|
+
for v in json['results']:
|
|
6271
|
+
yield Visualization.from_dict(v)
|
|
6272
|
+
if 'next_page_token' not in json or not json['next_page_token']:
|
|
6273
|
+
return
|
|
6274
|
+
query['page_token'] = json['next_page_token']
|
|
6275
|
+
|
|
6276
|
+
def update(self, id: str, update_mask: str, *, query: Optional[UpdateQueryRequestQuery] = None) -> Query:
|
|
6277
|
+
"""Update a query.
|
|
6278
|
+
|
|
6279
|
+
Updates a query.
|
|
6280
|
+
|
|
6281
|
+
:param id: str
|
|
6282
|
+
:param update_mask: str
|
|
6283
|
+
Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
|
|
6284
|
+
setting payload will be updated. The field mask needs to be supplied as single string. To specify
|
|
6285
|
+
multiple fields in the field mask, use comma as the separator (no space).
|
|
6286
|
+
:param query: :class:`UpdateQueryRequestQuery` (optional)
|
|
6287
|
+
|
|
6288
|
+
:returns: :class:`Query`
|
|
6289
|
+
"""
|
|
6290
|
+
body = {}
|
|
6291
|
+
if query is not None: body['query'] = query.as_dict()
|
|
6292
|
+
if update_mask is not None: body['update_mask'] = update_mask
|
|
6293
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6294
|
+
|
|
6295
|
+
res = self._api.do('PATCH', f'/api/2.0/sql/queries/{id}', body=body, headers=headers)
|
|
6296
|
+
return Query.from_dict(res)
|
|
6297
|
+
|
|
6298
|
+
|
|
6299
|
+
class QueriesLegacyAPI:
|
|
4470
6300
|
"""These endpoints are used for CRUD operations on query definitions. Query definitions include the target
|
|
4471
6301
|
SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be
|
|
4472
|
-
scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
|
|
6302
|
+
scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
|
|
6303
|
+
|
|
6304
|
+
**Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
|
|
6305
|
+
more]
|
|
6306
|
+
|
|
6307
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
|
|
4473
6308
|
|
|
4474
6309
|
def __init__(self, api_client):
|
|
4475
6310
|
self._api = api_client
|
|
@@ -4483,7 +6318,7 @@ class QueriesAPI:
|
|
|
4483
6318
|
parent: Optional[str] = None,
|
|
4484
6319
|
query: Optional[str] = None,
|
|
4485
6320
|
run_as_role: Optional[RunAsRole] = None,
|
|
4486
|
-
tags: Optional[List[str]] = None) ->
|
|
6321
|
+
tags: Optional[List[str]] = None) -> LegacyQuery:
|
|
4487
6322
|
"""Create a new query definition.
|
|
4488
6323
|
|
|
4489
6324
|
Creates a new query definition. Queries created with this endpoint belong to the authenticated user
|
|
@@ -4495,9 +6330,14 @@ class QueriesAPI:
|
|
|
4495
6330
|
|
|
4496
6331
|
**Note**: You cannot add a visualization until you create the query.
|
|
4497
6332
|
|
|
6333
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create
|
|
6334
|
+
instead. [Learn more]
|
|
6335
|
+
|
|
6336
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6337
|
+
|
|
4498
6338
|
:param data_source_id: str (optional)
|
|
4499
6339
|
Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
4500
|
-
warehouse ID. [Learn more]
|
|
6340
|
+
warehouse ID. [Learn more]
|
|
4501
6341
|
|
|
4502
6342
|
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list
|
|
4503
6343
|
:param description: str (optional)
|
|
@@ -4517,7 +6357,7 @@ class QueriesAPI:
|
|
|
4517
6357
|
viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
|
|
4518
6358
|
:param tags: List[str] (optional)
|
|
4519
6359
|
|
|
4520
|
-
:returns: :class:`
|
|
6360
|
+
:returns: :class:`LegacyQuery`
|
|
4521
6361
|
"""
|
|
4522
6362
|
body = {}
|
|
4523
6363
|
if data_source_id is not None: body['data_source_id'] = data_source_id
|
|
@@ -4531,7 +6371,7 @@ class QueriesAPI:
|
|
|
4531
6371
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
4532
6372
|
|
|
4533
6373
|
res = self._api.do('POST', '/api/2.0/preview/sql/queries', body=body, headers=headers)
|
|
4534
|
-
return
|
|
6374
|
+
return LegacyQuery.from_dict(res)
|
|
4535
6375
|
|
|
4536
6376
|
def delete(self, query_id: str):
|
|
4537
6377
|
"""Delete a query.
|
|
@@ -4539,6 +6379,11 @@ class QueriesAPI:
|
|
|
4539
6379
|
Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
|
|
4540
6380
|
they cannot be used for alerts. The trash is deleted after 30 days.
|
|
4541
6381
|
|
|
6382
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete
|
|
6383
|
+
instead. [Learn more]
|
|
6384
|
+
|
|
6385
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6386
|
+
|
|
4542
6387
|
:param query_id: str
|
|
4543
6388
|
|
|
4544
6389
|
|
|
@@ -4548,34 +6393,44 @@ class QueriesAPI:
|
|
|
4548
6393
|
|
|
4549
6394
|
self._api.do('DELETE', f'/api/2.0/preview/sql/queries/{query_id}', headers=headers)
|
|
4550
6395
|
|
|
4551
|
-
def get(self, query_id: str) ->
|
|
6396
|
+
def get(self, query_id: str) -> LegacyQuery:
|
|
4552
6397
|
"""Get a query definition.
|
|
4553
6398
|
|
|
4554
6399
|
Retrieve a query object definition along with contextual permissions information about the currently
|
|
4555
6400
|
authenticated user.
|
|
4556
6401
|
|
|
6402
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get
|
|
6403
|
+
instead. [Learn more]
|
|
6404
|
+
|
|
6405
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6406
|
+
|
|
4557
6407
|
:param query_id: str
|
|
4558
6408
|
|
|
4559
|
-
:returns: :class:`
|
|
6409
|
+
:returns: :class:`LegacyQuery`
|
|
4560
6410
|
"""
|
|
4561
6411
|
|
|
4562
6412
|
headers = {'Accept': 'application/json', }
|
|
4563
6413
|
|
|
4564
6414
|
res = self._api.do('GET', f'/api/2.0/preview/sql/queries/{query_id}', headers=headers)
|
|
4565
|
-
return
|
|
6415
|
+
return LegacyQuery.from_dict(res)
|
|
4566
6416
|
|
|
4567
6417
|
def list(self,
|
|
4568
6418
|
*,
|
|
4569
6419
|
order: Optional[str] = None,
|
|
4570
6420
|
page: Optional[int] = None,
|
|
4571
6421
|
page_size: Optional[int] = None,
|
|
4572
|
-
q: Optional[str] = None) -> Iterator[
|
|
6422
|
+
q: Optional[str] = None) -> Iterator[LegacyQuery]:
|
|
4573
6423
|
"""Get a list of queries.
|
|
4574
6424
|
|
|
4575
6425
|
Gets a list of queries. Optionally, this list can be filtered by a search term.
|
|
4576
6426
|
|
|
4577
|
-
|
|
4578
|
-
degradation, or a temporary ban
|
|
6427
|
+
**Warning**: Calling this API concurrently 10 or more times could result in throttling, service
|
|
6428
|
+
degradation, or a temporary ban.
|
|
6429
|
+
|
|
6430
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list
|
|
6431
|
+
instead. [Learn more]
|
|
6432
|
+
|
|
6433
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4579
6434
|
|
|
4580
6435
|
:param order: str (optional)
|
|
4581
6436
|
Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order
|
|
@@ -4598,7 +6453,7 @@ class QueriesAPI:
|
|
|
4598
6453
|
:param q: str (optional)
|
|
4599
6454
|
Full text search term
|
|
4600
6455
|
|
|
4601
|
-
:returns: Iterator over :class:`
|
|
6456
|
+
:returns: Iterator over :class:`LegacyQuery`
|
|
4602
6457
|
"""
|
|
4603
6458
|
|
|
4604
6459
|
query = {}
|
|
@@ -4619,7 +6474,7 @@ class QueriesAPI:
|
|
|
4619
6474
|
if i in seen:
|
|
4620
6475
|
continue
|
|
4621
6476
|
seen.add(i)
|
|
4622
|
-
yield
|
|
6477
|
+
yield LegacyQuery.from_dict(v)
|
|
4623
6478
|
if 'results' not in json or not json['results']:
|
|
4624
6479
|
return
|
|
4625
6480
|
query['page'] += 1
|
|
@@ -4630,6 +6485,11 @@ class QueriesAPI:
|
|
|
4630
6485
|
Restore a query that has been moved to the trash. A restored query appears in list views and searches.
|
|
4631
6486
|
You can use restored queries for alerts.
|
|
4632
6487
|
|
|
6488
|
+
**Note**: A new version of the Databricks SQL API is now available. Please see the latest version.
|
|
6489
|
+
[Learn more]
|
|
6490
|
+
|
|
6491
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6492
|
+
|
|
4633
6493
|
:param query_id: str
|
|
4634
6494
|
|
|
4635
6495
|
|
|
@@ -4648,17 +6508,22 @@ class QueriesAPI:
|
|
|
4648
6508
|
options: Optional[Any] = None,
|
|
4649
6509
|
query: Optional[str] = None,
|
|
4650
6510
|
run_as_role: Optional[RunAsRole] = None,
|
|
4651
|
-
tags: Optional[List[str]] = None) ->
|
|
6511
|
+
tags: Optional[List[str]] = None) -> LegacyQuery:
|
|
4652
6512
|
"""Change a query definition.
|
|
4653
6513
|
|
|
4654
6514
|
Modify this query definition.
|
|
4655
6515
|
|
|
4656
6516
|
**Note**: You cannot undo this operation.
|
|
4657
6517
|
|
|
6518
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update
|
|
6519
|
+
instead. [Learn more]
|
|
6520
|
+
|
|
6521
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6522
|
+
|
|
4658
6523
|
:param query_id: str
|
|
4659
6524
|
:param data_source_id: str (optional)
|
|
4660
6525
|
Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
4661
|
-
warehouse ID. [Learn more]
|
|
6526
|
+
warehouse ID. [Learn more]
|
|
4662
6527
|
|
|
4663
6528
|
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list
|
|
4664
6529
|
:param description: str (optional)
|
|
@@ -4676,7 +6541,7 @@ class QueriesAPI:
|
|
|
4676
6541
|
viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
|
|
4677
6542
|
:param tags: List[str] (optional)
|
|
4678
6543
|
|
|
4679
|
-
:returns: :class:`
|
|
6544
|
+
:returns: :class:`LegacyQuery`
|
|
4680
6545
|
"""
|
|
4681
6546
|
body = {}
|
|
4682
6547
|
if data_source_id is not None: body['data_source_id'] = data_source_id
|
|
@@ -4689,11 +6554,12 @@ class QueriesAPI:
|
|
|
4689
6554
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
4690
6555
|
|
|
4691
6556
|
res = self._api.do('POST', f'/api/2.0/preview/sql/queries/{query_id}', body=body, headers=headers)
|
|
4692
|
-
return
|
|
6557
|
+
return LegacyQuery.from_dict(res)
|
|
4693
6558
|
|
|
4694
6559
|
|
|
4695
6560
|
class QueryHistoryAPI:
|
|
4696
|
-
"""
|
|
6561
|
+
"""A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless
|
|
6562
|
+
compute, and DLT."""
|
|
4697
6563
|
|
|
4698
6564
|
def __init__(self, api_client):
|
|
4699
6565
|
self._api = api_client
|
|
@@ -4701,49 +6567,112 @@ class QueryHistoryAPI:
|
|
|
4701
6567
|
def list(self,
|
|
4702
6568
|
*,
|
|
4703
6569
|
filter_by: Optional[QueryFilter] = None,
|
|
4704
|
-
include_metrics: Optional[bool] = None,
|
|
4705
6570
|
max_results: Optional[int] = None,
|
|
4706
|
-
page_token: Optional[str] = None) ->
|
|
6571
|
+
page_token: Optional[str] = None) -> ListQueriesResponse:
|
|
4707
6572
|
"""List Queries.
|
|
4708
6573
|
|
|
4709
|
-
List the history of queries through SQL warehouses.
|
|
6574
|
+
List the history of queries through SQL warehouses, serverless compute, and DLT.
|
|
4710
6575
|
|
|
4711
|
-
You can filter by user ID, warehouse ID, status, and time range.
|
|
6576
|
+
You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are
|
|
6577
|
+
returned first (up to max_results in request). The pagination token returned in response can be used
|
|
6578
|
+
to list subsequent query statuses.
|
|
4712
6579
|
|
|
4713
6580
|
:param filter_by: :class:`QueryFilter` (optional)
|
|
4714
6581
|
A filter to limit query history results. This field is optional.
|
|
4715
|
-
:param include_metrics: bool (optional)
|
|
4716
|
-
Whether to include metrics about query.
|
|
4717
6582
|
:param max_results: int (optional)
|
|
4718
|
-
Limit the number of results returned in one page.
|
|
6583
|
+
Limit the number of results returned in one page. Must be less than 1000 and the default is 100.
|
|
4719
6584
|
:param page_token: str (optional)
|
|
4720
6585
|
A token that can be used to get the next page of results. The token can contains characters that
|
|
4721
6586
|
need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by
|
|
4722
|
-
%2B.
|
|
6587
|
+
%2B. This field is optional.
|
|
4723
6588
|
|
|
4724
|
-
:returns:
|
|
6589
|
+
:returns: :class:`ListQueriesResponse`
|
|
4725
6590
|
"""
|
|
4726
6591
|
|
|
4727
6592
|
query = {}
|
|
4728
6593
|
if filter_by is not None: query['filter_by'] = filter_by.as_dict()
|
|
4729
|
-
if include_metrics is not None: query['include_metrics'] = include_metrics
|
|
4730
6594
|
if max_results is not None: query['max_results'] = max_results
|
|
4731
6595
|
if page_token is not None: query['page_token'] = page_token
|
|
4732
6596
|
headers = {'Accept': 'application/json', }
|
|
4733
6597
|
|
|
4734
|
-
|
|
4735
|
-
|
|
4736
|
-
if 'res' in json:
|
|
4737
|
-
for v in json['res']:
|
|
4738
|
-
yield QueryInfo.from_dict(v)
|
|
4739
|
-
if 'next_page_token' not in json or not json['next_page_token']:
|
|
4740
|
-
return
|
|
4741
|
-
query['page_token'] = json['next_page_token']
|
|
6598
|
+
res = self._api.do('GET', '/api/2.0/sql/history/queries', query=query, headers=headers)
|
|
6599
|
+
return ListQueriesResponse.from_dict(res)
|
|
4742
6600
|
|
|
4743
6601
|
|
|
4744
6602
|
class QueryVisualizationsAPI:
|
|
6603
|
+
"""This is an evolving API that facilitates the addition and removal of visualizations from existing queries
|
|
6604
|
+
in the Databricks Workspace. Data structures can change over time."""
|
|
6605
|
+
|
|
6606
|
+
def __init__(self, api_client):
|
|
6607
|
+
self._api = api_client
|
|
6608
|
+
|
|
6609
|
+
def create(self,
|
|
6610
|
+
*,
|
|
6611
|
+
visualization: Optional[CreateVisualizationRequestVisualization] = None) -> Visualization:
|
|
6612
|
+
"""Add a visualization to a query.
|
|
6613
|
+
|
|
6614
|
+
Adds a visualization to a query.
|
|
6615
|
+
|
|
6616
|
+
:param visualization: :class:`CreateVisualizationRequestVisualization` (optional)
|
|
6617
|
+
|
|
6618
|
+
:returns: :class:`Visualization`
|
|
6619
|
+
"""
|
|
6620
|
+
body = {}
|
|
6621
|
+
if visualization is not None: body['visualization'] = visualization.as_dict()
|
|
6622
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6623
|
+
|
|
6624
|
+
res = self._api.do('POST', '/api/2.0/sql/visualizations', body=body, headers=headers)
|
|
6625
|
+
return Visualization.from_dict(res)
|
|
6626
|
+
|
|
6627
|
+
def delete(self, id: str):
|
|
6628
|
+
"""Remove a visualization.
|
|
6629
|
+
|
|
6630
|
+
Removes a visualization.
|
|
6631
|
+
|
|
6632
|
+
:param id: str
|
|
6633
|
+
|
|
6634
|
+
|
|
6635
|
+
"""
|
|
6636
|
+
|
|
6637
|
+
headers = {'Accept': 'application/json', }
|
|
6638
|
+
|
|
6639
|
+
self._api.do('DELETE', f'/api/2.0/sql/visualizations/{id}', headers=headers)
|
|
6640
|
+
|
|
6641
|
+
def update(self,
|
|
6642
|
+
id: str,
|
|
6643
|
+
update_mask: str,
|
|
6644
|
+
*,
|
|
6645
|
+
visualization: Optional[UpdateVisualizationRequestVisualization] = None) -> Visualization:
|
|
6646
|
+
"""Update a visualization.
|
|
6647
|
+
|
|
6648
|
+
Updates a visualization.
|
|
6649
|
+
|
|
6650
|
+
:param id: str
|
|
6651
|
+
:param update_mask: str
|
|
6652
|
+
Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
|
|
6653
|
+
setting payload will be updated. The field mask needs to be supplied as single string. To specify
|
|
6654
|
+
multiple fields in the field mask, use comma as the separator (no space).
|
|
6655
|
+
:param visualization: :class:`UpdateVisualizationRequestVisualization` (optional)
|
|
6656
|
+
|
|
6657
|
+
:returns: :class:`Visualization`
|
|
6658
|
+
"""
|
|
6659
|
+
body = {}
|
|
6660
|
+
if update_mask is not None: body['update_mask'] = update_mask
|
|
6661
|
+
if visualization is not None: body['visualization'] = visualization.as_dict()
|
|
6662
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6663
|
+
|
|
6664
|
+
res = self._api.do('PATCH', f'/api/2.0/sql/visualizations/{id}', body=body, headers=headers)
|
|
6665
|
+
return Visualization.from_dict(res)
|
|
6666
|
+
|
|
6667
|
+
|
|
6668
|
+
class QueryVisualizationsLegacyAPI:
|
|
4745
6669
|
"""This is an evolving API that facilitates the addition and removal of vizualisations from existing queries
|
|
4746
|
-
within the Databricks Workspace. Data structures may change over time.
|
|
6670
|
+
within the Databricks Workspace. Data structures may change over time.
|
|
6671
|
+
|
|
6672
|
+
**Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
|
|
6673
|
+
more]
|
|
6674
|
+
|
|
6675
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
|
|
4747
6676
|
|
|
4748
6677
|
def __init__(self, api_client):
|
|
4749
6678
|
self._api = api_client
|
|
@@ -4754,9 +6683,16 @@ class QueryVisualizationsAPI:
|
|
|
4754
6683
|
options: Any,
|
|
4755
6684
|
*,
|
|
4756
6685
|
description: Optional[str] = None,
|
|
4757
|
-
name: Optional[str] = None) ->
|
|
6686
|
+
name: Optional[str] = None) -> LegacyVisualization:
|
|
4758
6687
|
"""Add visualization to a query.
|
|
4759
6688
|
|
|
6689
|
+
Creates visualization in the query.
|
|
6690
|
+
|
|
6691
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use
|
|
6692
|
+
:method:queryvisualizations/create instead. [Learn more]
|
|
6693
|
+
|
|
6694
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6695
|
+
|
|
4760
6696
|
:param query_id: str
|
|
4761
6697
|
The identifier returned by :method:queries/create
|
|
4762
6698
|
:param type: str
|
|
@@ -4769,7 +6705,7 @@ class QueryVisualizationsAPI:
|
|
|
4769
6705
|
:param name: str (optional)
|
|
4770
6706
|
The name of the visualization that appears on dashboards and the query screen.
|
|
4771
6707
|
|
|
4772
|
-
:returns: :class:`
|
|
6708
|
+
:returns: :class:`LegacyVisualization`
|
|
4773
6709
|
"""
|
|
4774
6710
|
body = {}
|
|
4775
6711
|
if description is not None: body['description'] = description
|
|
@@ -4780,11 +6716,18 @@ class QueryVisualizationsAPI:
|
|
|
4780
6716
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
4781
6717
|
|
|
4782
6718
|
res = self._api.do('POST', '/api/2.0/preview/sql/visualizations', body=body, headers=headers)
|
|
4783
|
-
return
|
|
6719
|
+
return LegacyVisualization.from_dict(res)
|
|
4784
6720
|
|
|
4785
6721
|
def delete(self, id: str):
|
|
4786
6722
|
"""Remove visualization.
|
|
4787
6723
|
|
|
6724
|
+
Removes a visualization from the query.
|
|
6725
|
+
|
|
6726
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use
|
|
6727
|
+
:method:queryvisualizations/delete instead. [Learn more]
|
|
6728
|
+
|
|
6729
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6730
|
+
|
|
4788
6731
|
:param id: str
|
|
4789
6732
|
Widget ID returned by :method:queryvizualisations/create
|
|
4790
6733
|
|
|
@@ -4802,11 +6745,18 @@ class QueryVisualizationsAPI:
|
|
|
4802
6745
|
description: Optional[str] = None,
|
|
4803
6746
|
name: Optional[str] = None,
|
|
4804
6747
|
options: Optional[Any] = None,
|
|
4805
|
-
query: Optional[
|
|
6748
|
+
query: Optional[LegacyQuery] = None,
|
|
4806
6749
|
type: Optional[str] = None,
|
|
4807
|
-
updated_at: Optional[str] = None) ->
|
|
6750
|
+
updated_at: Optional[str] = None) -> LegacyVisualization:
|
|
4808
6751
|
"""Edit existing visualization.
|
|
4809
6752
|
|
|
6753
|
+
Updates visualization in the query.
|
|
6754
|
+
|
|
6755
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use
|
|
6756
|
+
:method:queryvisualizations/update instead. [Learn more]
|
|
6757
|
+
|
|
6758
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6759
|
+
|
|
4810
6760
|
:param id: str
|
|
4811
6761
|
The UUID for this visualization.
|
|
4812
6762
|
:param created_at: str (optional)
|
|
@@ -4817,12 +6767,12 @@ class QueryVisualizationsAPI:
|
|
|
4817
6767
|
:param options: Any (optional)
|
|
4818
6768
|
The options object varies widely from one visualization type to the next and is unsupported.
|
|
4819
6769
|
Databricks does not recommend modifying visualization settings in JSON.
|
|
4820
|
-
:param query: :class:`
|
|
6770
|
+
:param query: :class:`LegacyQuery` (optional)
|
|
4821
6771
|
:param type: str (optional)
|
|
4822
6772
|
The type of visualization: chart, table, pivot table, and so on.
|
|
4823
6773
|
:param updated_at: str (optional)
|
|
4824
6774
|
|
|
4825
|
-
:returns: :class:`
|
|
6775
|
+
:returns: :class:`LegacyVisualization`
|
|
4826
6776
|
"""
|
|
4827
6777
|
body = {}
|
|
4828
6778
|
if created_at is not None: body['created_at'] = created_at
|
|
@@ -4835,7 +6785,7 @@ class QueryVisualizationsAPI:
|
|
|
4835
6785
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
4836
6786
|
|
|
4837
6787
|
res = self._api.do('POST', f'/api/2.0/preview/sql/visualizations/{id}', body=body, headers=headers)
|
|
4838
|
-
return
|
|
6788
|
+
return LegacyVisualization.from_dict(res)
|
|
4839
6789
|
|
|
4840
6790
|
|
|
4841
6791
|
class StatementExecutionAPI:
|
|
@@ -4954,14 +6904,15 @@ class StatementExecutionAPI:
|
|
|
4954
6904
|
parameters: Optional[List[StatementParameterListItem]] = None,
|
|
4955
6905
|
row_limit: Optional[int] = None,
|
|
4956
6906
|
schema: Optional[str] = None,
|
|
4957
|
-
wait_timeout: Optional[str] = None) ->
|
|
6907
|
+
wait_timeout: Optional[str] = None) -> StatementResponse:
|
|
4958
6908
|
"""Execute a SQL statement.
|
|
4959
6909
|
|
|
4960
6910
|
:param statement: str
|
|
4961
6911
|
The SQL statement to execute. The statement can optionally be parameterized, see `parameters`.
|
|
4962
6912
|
:param warehouse_id: str
|
|
4963
|
-
Warehouse upon which to execute a statement. See also [What are SQL
|
|
4964
|
-
|
|
6913
|
+
Warehouse upon which to execute a statement. See also [What are SQL warehouses?]
|
|
6914
|
+
|
|
6915
|
+
[What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html
|
|
4965
6916
|
:param byte_limit: int (optional)
|
|
4966
6917
|
Applies the given byte limit to the statement's result size. Byte counts are based on internal data
|
|
4967
6918
|
representations and might not match the final size in the requested `format`. If the result was
|
|
@@ -4973,26 +6924,6 @@ class StatementExecutionAPI:
|
|
|
4973
6924
|
|
|
4974
6925
|
[`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html
|
|
4975
6926
|
:param disposition: :class:`Disposition` (optional)
|
|
4976
|
-
The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`.
|
|
4977
|
-
|
|
4978
|
-
Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY`
|
|
4979
|
-
format, in a series of chunks. If a given statement produces a result set with a size larger than 25
|
|
4980
|
-
MiB, that statement execution is aborted, and no result set will be available.
|
|
4981
|
-
|
|
4982
|
-
**NOTE** Byte limits are computed based upon internal representations of the result set data, and
|
|
4983
|
-
might not match the sizes visible in JSON responses.
|
|
4984
|
-
|
|
4985
|
-
Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links:
|
|
4986
|
-
URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition
|
|
4987
|
-
allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The
|
|
4988
|
-
resulting links have two important properties:
|
|
4989
|
-
|
|
4990
|
-
1. They point to resources _external_ to the Databricks compute; therefore any associated
|
|
4991
|
-
authentication information (typically a personal access token, OAuth token, or similar) _must be
|
|
4992
|
-
removed_ when fetching from these links.
|
|
4993
|
-
|
|
4994
|
-
2. These are presigned URLs with a specific expiration, indicated in the response. The behavior when
|
|
4995
|
-
attempting to use an expired link is cloud specific.
|
|
4996
6927
|
:param format: :class:`Format` (optional)
|
|
4997
6928
|
Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and
|
|
4998
6929
|
`CSV`.
|
|
@@ -5080,7 +7011,7 @@ class StatementExecutionAPI:
|
|
|
5080
7011
|
the statement takes longer to execute, `on_wait_timeout` determines what should happen after the
|
|
5081
7012
|
timeout is reached.
|
|
5082
7013
|
|
|
5083
|
-
:returns: :class:`
|
|
7014
|
+
:returns: :class:`StatementResponse`
|
|
5084
7015
|
"""
|
|
5085
7016
|
body = {}
|
|
5086
7017
|
if byte_limit is not None: body['byte_limit'] = byte_limit
|
|
@@ -5097,9 +7028,9 @@ class StatementExecutionAPI:
|
|
|
5097
7028
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
5098
7029
|
|
|
5099
7030
|
res = self._api.do('POST', '/api/2.0/sql/statements/', body=body, headers=headers)
|
|
5100
|
-
return
|
|
7031
|
+
return StatementResponse.from_dict(res)
|
|
5101
7032
|
|
|
5102
|
-
def get_statement(self, statement_id: str) ->
|
|
7033
|
+
def get_statement(self, statement_id: str) -> StatementResponse:
|
|
5103
7034
|
"""Get status, manifest, and result first chunk.
|
|
5104
7035
|
|
|
5105
7036
|
This request can be used to poll for the statement's status. When the `status.state` field is
|
|
@@ -5114,13 +7045,13 @@ class StatementExecutionAPI:
|
|
|
5114
7045
|
The statement ID is returned upon successfully submitting a SQL statement, and is a required
|
|
5115
7046
|
reference for all subsequent calls.
|
|
5116
7047
|
|
|
5117
|
-
:returns: :class:`
|
|
7048
|
+
:returns: :class:`StatementResponse`
|
|
5118
7049
|
"""
|
|
5119
7050
|
|
|
5120
7051
|
headers = {'Accept': 'application/json', }
|
|
5121
7052
|
|
|
5122
7053
|
res = self._api.do('GET', f'/api/2.0/sql/statements/{statement_id}', headers=headers)
|
|
5123
|
-
return
|
|
7054
|
+
return StatementResponse.from_dict(res)
|
|
5124
7055
|
|
|
5125
7056
|
def get_statement_result_chunk_n(self, statement_id: str, chunk_index: int) -> ResultData:
|
|
5126
7057
|
"""Get result chunk by index.
|