databricks-sdk 0.29.0__py3-none-any.whl → 0.30.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +67 -19
- databricks/sdk/config.py +61 -75
- databricks/sdk/core.py +16 -9
- databricks/sdk/credentials_provider.py +15 -15
- databricks/sdk/data_plane.py +65 -0
- databricks/sdk/mixins/files.py +12 -4
- databricks/sdk/service/apps.py +977 -0
- databricks/sdk/service/billing.py +602 -218
- databricks/sdk/service/catalog.py +131 -34
- databricks/sdk/service/compute.py +494 -81
- databricks/sdk/service/dashboards.py +608 -5
- databricks/sdk/service/iam.py +99 -88
- databricks/sdk/service/jobs.py +34 -15
- databricks/sdk/service/marketplace.py +2 -122
- databricks/sdk/service/oauth2.py +127 -70
- databricks/sdk/service/pipelines.py +72 -52
- databricks/sdk/service/serving.py +303 -750
- databricks/sdk/service/settings.py +423 -4
- databricks/sdk/service/sharing.py +235 -25
- databricks/sdk/service/sql.py +2417 -566
- databricks/sdk/useragent.py +144 -0
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.30.0.dist-info}/METADATA +36 -16
- {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.30.0.dist-info}/RECORD +28 -25
- {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.30.0.dist-info}/WHEEL +1 -1
- {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.30.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.30.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.29.0.dist-info → databricks_sdk-0.30.0.dist-info}/top_level.txt +0 -0
databricks/sdk/service/sql.py
CHANGED
|
@@ -46,69 +46,206 @@ class AccessControl:
|
|
|
46
46
|
|
|
47
47
|
@dataclass
|
|
48
48
|
class Alert:
|
|
49
|
-
|
|
50
|
-
"""
|
|
49
|
+
condition: Optional[AlertCondition] = None
|
|
50
|
+
"""Trigger conditions of the alert."""
|
|
51
51
|
|
|
52
|
-
|
|
53
|
-
"""
|
|
52
|
+
create_time: Optional[str] = None
|
|
53
|
+
"""The timestamp indicating when the alert was created."""
|
|
54
54
|
|
|
55
|
-
|
|
56
|
-
"""
|
|
55
|
+
custom_body: Optional[str] = None
|
|
56
|
+
"""Custom body of alert notification, if it exists. See [here] for custom templating instructions.
|
|
57
|
+
|
|
58
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
57
59
|
|
|
58
|
-
|
|
59
|
-
"""
|
|
60
|
+
custom_subject: Optional[str] = None
|
|
61
|
+
"""Custom subject of alert notification, if it exists. This can include email subject entries and
|
|
62
|
+
Slack notification headers, for example. See [here] for custom templating instructions.
|
|
63
|
+
|
|
64
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
60
65
|
|
|
61
|
-
|
|
62
|
-
"""
|
|
66
|
+
display_name: Optional[str] = None
|
|
67
|
+
"""The display name of the alert."""
|
|
63
68
|
|
|
64
|
-
|
|
65
|
-
"""
|
|
69
|
+
id: Optional[str] = None
|
|
70
|
+
"""UUID identifying the alert."""
|
|
66
71
|
|
|
67
|
-
|
|
72
|
+
lifecycle_state: Optional[LifecycleState] = None
|
|
73
|
+
"""The workspace state of the alert. Used for tracking trashed status."""
|
|
68
74
|
|
|
69
|
-
|
|
70
|
-
"""
|
|
71
|
-
|
|
75
|
+
owner_user_name: Optional[str] = None
|
|
76
|
+
"""The owner's username. This field is set to "Unavailable" if the user has been deleted."""
|
|
77
|
+
|
|
78
|
+
parent_path: Optional[str] = None
|
|
79
|
+
"""The workspace path of the folder containing the alert."""
|
|
80
|
+
|
|
81
|
+
query_id: Optional[str] = None
|
|
82
|
+
"""UUID of the query attached to the alert."""
|
|
83
|
+
|
|
84
|
+
seconds_to_retrigger: Optional[int] = None
|
|
85
|
+
"""Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it
|
|
86
|
+
can be triggered again. If 0 or not specified, the alert will not be triggered again."""
|
|
72
87
|
|
|
73
88
|
state: Optional[AlertState] = None
|
|
74
|
-
"""
|
|
75
|
-
|
|
89
|
+
"""Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not
|
|
90
|
+
yet been evaluated or ran into an error during the last evaluation."""
|
|
76
91
|
|
|
77
|
-
|
|
78
|
-
"""Timestamp when the alert was last
|
|
92
|
+
trigger_time: Optional[str] = None
|
|
93
|
+
"""Timestamp when the alert was last triggered, if the alert has been triggered before."""
|
|
79
94
|
|
|
80
|
-
|
|
95
|
+
update_time: Optional[str] = None
|
|
96
|
+
"""The timestamp indicating when the alert was updated."""
|
|
81
97
|
|
|
82
98
|
def as_dict(self) -> dict:
|
|
83
99
|
"""Serializes the Alert into a dictionary suitable for use as a JSON request body."""
|
|
84
100
|
body = {}
|
|
85
|
-
if self.
|
|
101
|
+
if self.condition: body['condition'] = self.condition.as_dict()
|
|
102
|
+
if self.create_time is not None: body['create_time'] = self.create_time
|
|
103
|
+
if self.custom_body is not None: body['custom_body'] = self.custom_body
|
|
104
|
+
if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
|
|
105
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
86
106
|
if self.id is not None: body['id'] = self.id
|
|
87
|
-
if self.
|
|
88
|
-
if self.
|
|
89
|
-
if self.
|
|
90
|
-
if self.
|
|
91
|
-
if self.
|
|
92
|
-
if self.rearm is not None: body['rearm'] = self.rearm
|
|
107
|
+
if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
|
|
108
|
+
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
109
|
+
if self.parent_path is not None: body['parent_path'] = self.parent_path
|
|
110
|
+
if self.query_id is not None: body['query_id'] = self.query_id
|
|
111
|
+
if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
|
|
93
112
|
if self.state is not None: body['state'] = self.state.value
|
|
94
|
-
if self.
|
|
95
|
-
if self.
|
|
113
|
+
if self.trigger_time is not None: body['trigger_time'] = self.trigger_time
|
|
114
|
+
if self.update_time is not None: body['update_time'] = self.update_time
|
|
96
115
|
return body
|
|
97
116
|
|
|
98
117
|
@classmethod
|
|
99
118
|
def from_dict(cls, d: Dict[str, any]) -> Alert:
|
|
100
119
|
"""Deserializes the Alert from a dictionary."""
|
|
101
|
-
return cls(
|
|
120
|
+
return cls(condition=_from_dict(d, 'condition', AlertCondition),
|
|
121
|
+
create_time=d.get('create_time', None),
|
|
122
|
+
custom_body=d.get('custom_body', None),
|
|
123
|
+
custom_subject=d.get('custom_subject', None),
|
|
124
|
+
display_name=d.get('display_name', None),
|
|
102
125
|
id=d.get('id', None),
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
rearm=d.get('rearm', None),
|
|
126
|
+
lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
|
|
127
|
+
owner_user_name=d.get('owner_user_name', None),
|
|
128
|
+
parent_path=d.get('parent_path', None),
|
|
129
|
+
query_id=d.get('query_id', None),
|
|
130
|
+
seconds_to_retrigger=d.get('seconds_to_retrigger', None),
|
|
109
131
|
state=_enum(d, 'state', AlertState),
|
|
110
|
-
|
|
111
|
-
|
|
132
|
+
trigger_time=d.get('trigger_time', None),
|
|
133
|
+
update_time=d.get('update_time', None))
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
@dataclass
|
|
137
|
+
class AlertCondition:
|
|
138
|
+
empty_result_state: Optional[AlertState] = None
|
|
139
|
+
"""Alert state if result is empty."""
|
|
140
|
+
|
|
141
|
+
op: Optional[AlertOperator] = None
|
|
142
|
+
"""Operator used for comparison in alert evaluation."""
|
|
143
|
+
|
|
144
|
+
operand: Optional[AlertConditionOperand] = None
|
|
145
|
+
"""Name of the column from the query result to use for comparison in alert evaluation."""
|
|
146
|
+
|
|
147
|
+
threshold: Optional[AlertConditionThreshold] = None
|
|
148
|
+
"""Threshold value used for comparison in alert evaluation."""
|
|
149
|
+
|
|
150
|
+
def as_dict(self) -> dict:
|
|
151
|
+
"""Serializes the AlertCondition into a dictionary suitable for use as a JSON request body."""
|
|
152
|
+
body = {}
|
|
153
|
+
if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state.value
|
|
154
|
+
if self.op is not None: body['op'] = self.op.value
|
|
155
|
+
if self.operand: body['operand'] = self.operand.as_dict()
|
|
156
|
+
if self.threshold: body['threshold'] = self.threshold.as_dict()
|
|
157
|
+
return body
|
|
158
|
+
|
|
159
|
+
@classmethod
|
|
160
|
+
def from_dict(cls, d: Dict[str, any]) -> AlertCondition:
|
|
161
|
+
"""Deserializes the AlertCondition from a dictionary."""
|
|
162
|
+
return cls(empty_result_state=_enum(d, 'empty_result_state', AlertState),
|
|
163
|
+
op=_enum(d, 'op', AlertOperator),
|
|
164
|
+
operand=_from_dict(d, 'operand', AlertConditionOperand),
|
|
165
|
+
threshold=_from_dict(d, 'threshold', AlertConditionThreshold))
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
@dataclass
|
|
169
|
+
class AlertConditionOperand:
|
|
170
|
+
column: Optional[AlertOperandColumn] = None
|
|
171
|
+
|
|
172
|
+
def as_dict(self) -> dict:
|
|
173
|
+
"""Serializes the AlertConditionOperand into a dictionary suitable for use as a JSON request body."""
|
|
174
|
+
body = {}
|
|
175
|
+
if self.column: body['column'] = self.column.as_dict()
|
|
176
|
+
return body
|
|
177
|
+
|
|
178
|
+
@classmethod
|
|
179
|
+
def from_dict(cls, d: Dict[str, any]) -> AlertConditionOperand:
|
|
180
|
+
"""Deserializes the AlertConditionOperand from a dictionary."""
|
|
181
|
+
return cls(column=_from_dict(d, 'column', AlertOperandColumn))
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
@dataclass
|
|
185
|
+
class AlertConditionThreshold:
|
|
186
|
+
value: Optional[AlertOperandValue] = None
|
|
187
|
+
|
|
188
|
+
def as_dict(self) -> dict:
|
|
189
|
+
"""Serializes the AlertConditionThreshold into a dictionary suitable for use as a JSON request body."""
|
|
190
|
+
body = {}
|
|
191
|
+
if self.value: body['value'] = self.value.as_dict()
|
|
192
|
+
return body
|
|
193
|
+
|
|
194
|
+
@classmethod
|
|
195
|
+
def from_dict(cls, d: Dict[str, any]) -> AlertConditionThreshold:
|
|
196
|
+
"""Deserializes the AlertConditionThreshold from a dictionary."""
|
|
197
|
+
return cls(value=_from_dict(d, 'value', AlertOperandValue))
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
@dataclass
|
|
201
|
+
class AlertOperandColumn:
|
|
202
|
+
name: Optional[str] = None
|
|
203
|
+
|
|
204
|
+
def as_dict(self) -> dict:
|
|
205
|
+
"""Serializes the AlertOperandColumn into a dictionary suitable for use as a JSON request body."""
|
|
206
|
+
body = {}
|
|
207
|
+
if self.name is not None: body['name'] = self.name
|
|
208
|
+
return body
|
|
209
|
+
|
|
210
|
+
@classmethod
|
|
211
|
+
def from_dict(cls, d: Dict[str, any]) -> AlertOperandColumn:
|
|
212
|
+
"""Deserializes the AlertOperandColumn from a dictionary."""
|
|
213
|
+
return cls(name=d.get('name', None))
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
@dataclass
|
|
217
|
+
class AlertOperandValue:
|
|
218
|
+
bool_value: Optional[bool] = None
|
|
219
|
+
|
|
220
|
+
double_value: Optional[float] = None
|
|
221
|
+
|
|
222
|
+
string_value: Optional[str] = None
|
|
223
|
+
|
|
224
|
+
def as_dict(self) -> dict:
|
|
225
|
+
"""Serializes the AlertOperandValue into a dictionary suitable for use as a JSON request body."""
|
|
226
|
+
body = {}
|
|
227
|
+
if self.bool_value is not None: body['bool_value'] = self.bool_value
|
|
228
|
+
if self.double_value is not None: body['double_value'] = self.double_value
|
|
229
|
+
if self.string_value is not None: body['string_value'] = self.string_value
|
|
230
|
+
return body
|
|
231
|
+
|
|
232
|
+
@classmethod
|
|
233
|
+
def from_dict(cls, d: Dict[str, any]) -> AlertOperandValue:
|
|
234
|
+
"""Deserializes the AlertOperandValue from a dictionary."""
|
|
235
|
+
return cls(bool_value=d.get('bool_value', None),
|
|
236
|
+
double_value=d.get('double_value', None),
|
|
237
|
+
string_value=d.get('string_value', None))
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
class AlertOperator(Enum):
|
|
241
|
+
|
|
242
|
+
EQUAL = 'EQUAL'
|
|
243
|
+
GREATER_THAN = 'GREATER_THAN'
|
|
244
|
+
GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL'
|
|
245
|
+
IS_NULL = 'IS_NULL'
|
|
246
|
+
LESS_THAN = 'LESS_THAN'
|
|
247
|
+
LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL'
|
|
248
|
+
NOT_EQUAL = 'NOT_EQUAL'
|
|
112
249
|
|
|
113
250
|
|
|
114
251
|
@dataclass
|
|
@@ -259,12 +396,10 @@ class AlertQuery:
|
|
|
259
396
|
|
|
260
397
|
|
|
261
398
|
class AlertState(Enum):
|
|
262
|
-
"""State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated
|
|
263
|
-
and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions)."""
|
|
264
399
|
|
|
265
|
-
OK = '
|
|
266
|
-
TRIGGERED = '
|
|
267
|
-
UNKNOWN = '
|
|
400
|
+
OK = 'OK'
|
|
401
|
+
TRIGGERED = 'TRIGGERED'
|
|
402
|
+
UNKNOWN = 'UNKNOWN'
|
|
268
403
|
|
|
269
404
|
|
|
270
405
|
@dataclass
|
|
@@ -338,10 +473,10 @@ class Channel:
|
|
|
338
473
|
|
|
339
474
|
@dataclass
|
|
340
475
|
class ChannelInfo:
|
|
341
|
-
"""
|
|
476
|
+
"""Details about a Channel."""
|
|
342
477
|
|
|
343
478
|
dbsql_version: Optional[str] = None
|
|
344
|
-
"""
|
|
479
|
+
"""DB SQL Version the Channel is mapped to."""
|
|
345
480
|
|
|
346
481
|
name: Optional[ChannelName] = None
|
|
347
482
|
"""Name of the channel"""
|
|
@@ -368,6 +503,29 @@ class ChannelName(Enum):
|
|
|
368
503
|
CHANNEL_NAME_UNSPECIFIED = 'CHANNEL_NAME_UNSPECIFIED'
|
|
369
504
|
|
|
370
505
|
|
|
506
|
+
@dataclass
|
|
507
|
+
class ClientCallContext:
|
|
508
|
+
"""Client code that triggered the request"""
|
|
509
|
+
|
|
510
|
+
file_name: Optional[EncodedText] = None
|
|
511
|
+
"""File name that contains the last line that triggered the request."""
|
|
512
|
+
|
|
513
|
+
line_number: Optional[int] = None
|
|
514
|
+
"""Last line number within a file or notebook cell that triggered the request."""
|
|
515
|
+
|
|
516
|
+
def as_dict(self) -> dict:
|
|
517
|
+
"""Serializes the ClientCallContext into a dictionary suitable for use as a JSON request body."""
|
|
518
|
+
body = {}
|
|
519
|
+
if self.file_name: body['file_name'] = self.file_name.as_dict()
|
|
520
|
+
if self.line_number is not None: body['line_number'] = self.line_number
|
|
521
|
+
return body
|
|
522
|
+
|
|
523
|
+
@classmethod
|
|
524
|
+
def from_dict(cls, d: Dict[str, any]) -> ClientCallContext:
|
|
525
|
+
"""Deserializes the ClientCallContext from a dictionary."""
|
|
526
|
+
return cls(file_name=_from_dict(d, 'file_name', EncodedText), line_number=d.get('line_number', None))
|
|
527
|
+
|
|
528
|
+
|
|
371
529
|
@dataclass
|
|
372
530
|
class ColumnInfo:
|
|
373
531
|
name: Optional[str] = None
|
|
@@ -442,6 +600,68 @@ class ColumnInfoTypeName(Enum):
|
|
|
442
600
|
USER_DEFINED_TYPE = 'USER_DEFINED_TYPE'
|
|
443
601
|
|
|
444
602
|
|
|
603
|
+
@dataclass
|
|
604
|
+
class ContextFilter:
|
|
605
|
+
dbsql_alert_id: Optional[str] = None
|
|
606
|
+
"""Databricks SQL Alert id"""
|
|
607
|
+
|
|
608
|
+
dbsql_dashboard_id: Optional[str] = None
|
|
609
|
+
"""Databricks SQL Dashboard id"""
|
|
610
|
+
|
|
611
|
+
dbsql_query_id: Optional[str] = None
|
|
612
|
+
"""Databricks SQL Query id"""
|
|
613
|
+
|
|
614
|
+
dbsql_session_id: Optional[str] = None
|
|
615
|
+
"""Databricks SQL Query session id"""
|
|
616
|
+
|
|
617
|
+
job_id: Optional[str] = None
|
|
618
|
+
"""Databricks Workflows id"""
|
|
619
|
+
|
|
620
|
+
job_run_id: Optional[str] = None
|
|
621
|
+
"""Databricks Workflows task run id"""
|
|
622
|
+
|
|
623
|
+
lakeview_dashboard_id: Optional[str] = None
|
|
624
|
+
"""Databricks Lakeview Dashboard id"""
|
|
625
|
+
|
|
626
|
+
notebook_cell_run_id: Optional[str] = None
|
|
627
|
+
"""Databricks Notebook runnableCommandId"""
|
|
628
|
+
|
|
629
|
+
notebook_id: Optional[str] = None
|
|
630
|
+
"""Databricks Notebook id"""
|
|
631
|
+
|
|
632
|
+
statement_ids: Optional[List[str]] = None
|
|
633
|
+
"""Databricks Query History statement ids."""
|
|
634
|
+
|
|
635
|
+
def as_dict(self) -> dict:
|
|
636
|
+
"""Serializes the ContextFilter into a dictionary suitable for use as a JSON request body."""
|
|
637
|
+
body = {}
|
|
638
|
+
if self.dbsql_alert_id is not None: body['dbsql_alert_id'] = self.dbsql_alert_id
|
|
639
|
+
if self.dbsql_dashboard_id is not None: body['dbsql_dashboard_id'] = self.dbsql_dashboard_id
|
|
640
|
+
if self.dbsql_query_id is not None: body['dbsql_query_id'] = self.dbsql_query_id
|
|
641
|
+
if self.dbsql_session_id is not None: body['dbsql_session_id'] = self.dbsql_session_id
|
|
642
|
+
if self.job_id is not None: body['job_id'] = self.job_id
|
|
643
|
+
if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
|
|
644
|
+
if self.lakeview_dashboard_id is not None: body['lakeview_dashboard_id'] = self.lakeview_dashboard_id
|
|
645
|
+
if self.notebook_cell_run_id is not None: body['notebook_cell_run_id'] = self.notebook_cell_run_id
|
|
646
|
+
if self.notebook_id is not None: body['notebook_id'] = self.notebook_id
|
|
647
|
+
if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids]
|
|
648
|
+
return body
|
|
649
|
+
|
|
650
|
+
@classmethod
|
|
651
|
+
def from_dict(cls, d: Dict[str, any]) -> ContextFilter:
|
|
652
|
+
"""Deserializes the ContextFilter from a dictionary."""
|
|
653
|
+
return cls(dbsql_alert_id=d.get('dbsql_alert_id', None),
|
|
654
|
+
dbsql_dashboard_id=d.get('dbsql_dashboard_id', None),
|
|
655
|
+
dbsql_query_id=d.get('dbsql_query_id', None),
|
|
656
|
+
dbsql_session_id=d.get('dbsql_session_id', None),
|
|
657
|
+
job_id=d.get('job_id', None),
|
|
658
|
+
job_run_id=d.get('job_run_id', None),
|
|
659
|
+
lakeview_dashboard_id=d.get('lakeview_dashboard_id', None),
|
|
660
|
+
notebook_cell_run_id=d.get('notebook_cell_run_id', None),
|
|
661
|
+
notebook_id=d.get('notebook_id', None),
|
|
662
|
+
statement_ids=d.get('statement_ids', None))
|
|
663
|
+
|
|
664
|
+
|
|
445
665
|
@dataclass
|
|
446
666
|
class CreateAlert:
|
|
447
667
|
name: str
|
|
@@ -481,99 +701,305 @@ class CreateAlert:
|
|
|
481
701
|
|
|
482
702
|
|
|
483
703
|
@dataclass
|
|
484
|
-
class
|
|
485
|
-
|
|
486
|
-
"""The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries)
|
|
487
|
-
before it is automatically stopped.
|
|
488
|
-
|
|
489
|
-
Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
|
|
490
|
-
|
|
491
|
-
Defaults to 120 mins"""
|
|
704
|
+
class CreateAlertRequest:
|
|
705
|
+
alert: Optional[CreateAlertRequestAlert] = None
|
|
492
706
|
|
|
493
|
-
|
|
494
|
-
|
|
707
|
+
def as_dict(self) -> dict:
|
|
708
|
+
"""Serializes the CreateAlertRequest into a dictionary suitable for use as a JSON request body."""
|
|
709
|
+
body = {}
|
|
710
|
+
if self.alert: body['alert'] = self.alert.as_dict()
|
|
711
|
+
return body
|
|
495
712
|
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large
|
|
502
|
-
- 4X-Large"""
|
|
713
|
+
@classmethod
|
|
714
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequest:
|
|
715
|
+
"""Deserializes the CreateAlertRequest from a dictionary."""
|
|
716
|
+
return cls(alert=_from_dict(d, 'alert', CreateAlertRequestAlert))
|
|
503
717
|
|
|
504
|
-
creator_name: Optional[str] = None
|
|
505
|
-
"""warehouse creator name"""
|
|
506
718
|
|
|
507
|
-
|
|
508
|
-
|
|
719
|
+
@dataclass
|
|
720
|
+
class CreateAlertRequestAlert:
|
|
721
|
+
condition: Optional[AlertCondition] = None
|
|
722
|
+
"""Trigger conditions of the alert."""
|
|
723
|
+
|
|
724
|
+
custom_body: Optional[str] = None
|
|
725
|
+
"""Custom body of alert notification, if it exists. See [here] for custom templating instructions.
|
|
509
726
|
|
|
510
|
-
|
|
727
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
511
728
|
|
|
512
|
-
|
|
513
|
-
"""
|
|
729
|
+
custom_subject: Optional[str] = None
|
|
730
|
+
"""Custom subject of alert notification, if it exists. This can include email subject entries and
|
|
731
|
+
Slack notification headers, for example. See [here] for custom templating instructions.
|
|
732
|
+
|
|
733
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
514
734
|
|
|
515
|
-
|
|
516
|
-
"""
|
|
735
|
+
display_name: Optional[str] = None
|
|
736
|
+
"""The display name of the alert."""
|
|
517
737
|
|
|
518
|
-
|
|
519
|
-
"""
|
|
520
|
-
|
|
521
|
-
Supported values: - Must be >= min_num_clusters - Must be <= 30.
|
|
522
|
-
|
|
523
|
-
Defaults to min_clusters if unset."""
|
|
738
|
+
parent_path: Optional[str] = None
|
|
739
|
+
"""The workspace path of the folder containing the alert."""
|
|
524
740
|
|
|
525
|
-
|
|
526
|
-
"""
|
|
527
|
-
this will ensure that a larger number of clusters are always running and therefore may reduce
|
|
528
|
-
the cold start time for new queries. This is similar to reserved vs. revocable cores in a
|
|
529
|
-
resource manager.
|
|
530
|
-
|
|
531
|
-
Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
|
|
532
|
-
|
|
533
|
-
Defaults to 1"""
|
|
741
|
+
query_id: Optional[str] = None
|
|
742
|
+
"""UUID of the query attached to the alert."""
|
|
534
743
|
|
|
535
|
-
|
|
536
|
-
"""
|
|
537
|
-
|
|
538
|
-
Supported values: - Must be unique within an org. - Must be less than 100 characters."""
|
|
744
|
+
seconds_to_retrigger: Optional[int] = None
|
|
745
|
+
"""Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it
|
|
746
|
+
can be triggered again. If 0 or not specified, the alert will not be triggered again."""
|
|
539
747
|
|
|
540
|
-
|
|
541
|
-
|
|
748
|
+
def as_dict(self) -> dict:
|
|
749
|
+
"""Serializes the CreateAlertRequestAlert into a dictionary suitable for use as a JSON request body."""
|
|
750
|
+
body = {}
|
|
751
|
+
if self.condition: body['condition'] = self.condition.as_dict()
|
|
752
|
+
if self.custom_body is not None: body['custom_body'] = self.custom_body
|
|
753
|
+
if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
|
|
754
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
755
|
+
if self.parent_path is not None: body['parent_path'] = self.parent_path
|
|
756
|
+
if self.query_id is not None: body['query_id'] = self.query_id
|
|
757
|
+
if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
|
|
758
|
+
return body
|
|
542
759
|
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
760
|
+
@classmethod
|
|
761
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequestAlert:
|
|
762
|
+
"""Deserializes the CreateAlertRequestAlert from a dictionary."""
|
|
763
|
+
return cls(condition=_from_dict(d, 'condition', AlertCondition),
|
|
764
|
+
custom_body=d.get('custom_body', None),
|
|
765
|
+
custom_subject=d.get('custom_subject', None),
|
|
766
|
+
display_name=d.get('display_name', None),
|
|
767
|
+
parent_path=d.get('parent_path', None),
|
|
768
|
+
query_id=d.get('query_id', None),
|
|
769
|
+
seconds_to_retrigger=d.get('seconds_to_retrigger', None))
|
|
548
770
|
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
771
|
+
|
|
772
|
+
@dataclass
|
|
773
|
+
class CreateQueryRequest:
|
|
774
|
+
query: Optional[CreateQueryRequestQuery] = None
|
|
552
775
|
|
|
553
776
|
def as_dict(self) -> dict:
|
|
554
|
-
"""Serializes the
|
|
777
|
+
"""Serializes the CreateQueryRequest into a dictionary suitable for use as a JSON request body."""
|
|
555
778
|
body = {}
|
|
556
|
-
if self.
|
|
557
|
-
if self.channel: body['channel'] = self.channel.as_dict()
|
|
558
|
-
if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
|
|
559
|
-
if self.creator_name is not None: body['creator_name'] = self.creator_name
|
|
560
|
-
if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
|
|
561
|
-
if self.enable_serverless_compute is not None:
|
|
562
|
-
body['enable_serverless_compute'] = self.enable_serverless_compute
|
|
563
|
-
if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
|
|
564
|
-
if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
|
|
565
|
-
if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
|
|
566
|
-
if self.name is not None: body['name'] = self.name
|
|
567
|
-
if self.spot_instance_policy is not None:
|
|
568
|
-
body['spot_instance_policy'] = self.spot_instance_policy.value
|
|
569
|
-
if self.tags: body['tags'] = self.tags.as_dict()
|
|
570
|
-
if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
|
|
779
|
+
if self.query: body['query'] = self.query.as_dict()
|
|
571
780
|
return body
|
|
572
781
|
|
|
573
782
|
@classmethod
|
|
574
|
-
def from_dict(cls, d: Dict[str, any]) ->
|
|
575
|
-
"""Deserializes the
|
|
576
|
-
return cls(
|
|
783
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequest:
|
|
784
|
+
"""Deserializes the CreateQueryRequest from a dictionary."""
|
|
785
|
+
return cls(query=_from_dict(d, 'query', CreateQueryRequestQuery))
|
|
786
|
+
|
|
787
|
+
|
|
788
|
+
@dataclass
|
|
789
|
+
class CreateQueryRequestQuery:
|
|
790
|
+
apply_auto_limit: Optional[bool] = None
|
|
791
|
+
"""Whether to apply a 1000 row limit to the query result."""
|
|
792
|
+
|
|
793
|
+
catalog: Optional[str] = None
|
|
794
|
+
"""Name of the catalog where this query will be executed."""
|
|
795
|
+
|
|
796
|
+
description: Optional[str] = None
|
|
797
|
+
"""General description that conveys additional information about this query such as usage notes."""
|
|
798
|
+
|
|
799
|
+
display_name: Optional[str] = None
|
|
800
|
+
"""Display name of the query that appears in list views, widget headings, and on the query page."""
|
|
801
|
+
|
|
802
|
+
parameters: Optional[List[QueryParameter]] = None
|
|
803
|
+
"""List of query parameter definitions."""
|
|
804
|
+
|
|
805
|
+
parent_path: Optional[str] = None
|
|
806
|
+
"""Workspace path of the workspace folder containing the object."""
|
|
807
|
+
|
|
808
|
+
query_text: Optional[str] = None
|
|
809
|
+
"""Text of the query to be run."""
|
|
810
|
+
|
|
811
|
+
run_as_mode: Optional[RunAsMode] = None
|
|
812
|
+
"""Sets the "Run as" role for the object."""
|
|
813
|
+
|
|
814
|
+
schema: Optional[str] = None
|
|
815
|
+
"""Name of the schema where this query will be executed."""
|
|
816
|
+
|
|
817
|
+
tags: Optional[List[str]] = None
|
|
818
|
+
|
|
819
|
+
warehouse_id: Optional[str] = None
|
|
820
|
+
"""ID of the SQL warehouse attached to the query."""
|
|
821
|
+
|
|
822
|
+
def as_dict(self) -> dict:
|
|
823
|
+
"""Serializes the CreateQueryRequestQuery into a dictionary suitable for use as a JSON request body."""
|
|
824
|
+
body = {}
|
|
825
|
+
if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
|
|
826
|
+
if self.catalog is not None: body['catalog'] = self.catalog
|
|
827
|
+
if self.description is not None: body['description'] = self.description
|
|
828
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
829
|
+
if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
|
|
830
|
+
if self.parent_path is not None: body['parent_path'] = self.parent_path
|
|
831
|
+
if self.query_text is not None: body['query_text'] = self.query_text
|
|
832
|
+
if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
|
|
833
|
+
if self.schema is not None: body['schema'] = self.schema
|
|
834
|
+
if self.tags: body['tags'] = [v for v in self.tags]
|
|
835
|
+
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
836
|
+
return body
|
|
837
|
+
|
|
838
|
+
@classmethod
|
|
839
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequestQuery:
|
|
840
|
+
"""Deserializes the CreateQueryRequestQuery from a dictionary."""
|
|
841
|
+
return cls(apply_auto_limit=d.get('apply_auto_limit', None),
|
|
842
|
+
catalog=d.get('catalog', None),
|
|
843
|
+
description=d.get('description', None),
|
|
844
|
+
display_name=d.get('display_name', None),
|
|
845
|
+
parameters=_repeated_dict(d, 'parameters', QueryParameter),
|
|
846
|
+
parent_path=d.get('parent_path', None),
|
|
847
|
+
query_text=d.get('query_text', None),
|
|
848
|
+
run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
|
|
849
|
+
schema=d.get('schema', None),
|
|
850
|
+
tags=d.get('tags', None),
|
|
851
|
+
warehouse_id=d.get('warehouse_id', None))
|
|
852
|
+
|
|
853
|
+
|
|
854
|
+
@dataclass
|
|
855
|
+
class CreateVisualizationRequest:
|
|
856
|
+
visualization: Optional[CreateVisualizationRequestVisualization] = None
|
|
857
|
+
|
|
858
|
+
def as_dict(self) -> dict:
|
|
859
|
+
"""Serializes the CreateVisualizationRequest into a dictionary suitable for use as a JSON request body."""
|
|
860
|
+
body = {}
|
|
861
|
+
if self.visualization: body['visualization'] = self.visualization.as_dict()
|
|
862
|
+
return body
|
|
863
|
+
|
|
864
|
+
@classmethod
|
|
865
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequest:
|
|
866
|
+
"""Deserializes the CreateVisualizationRequest from a dictionary."""
|
|
867
|
+
return cls(visualization=_from_dict(d, 'visualization', CreateVisualizationRequestVisualization))
|
|
868
|
+
|
|
869
|
+
|
|
870
|
+
@dataclass
|
|
871
|
+
class CreateVisualizationRequestVisualization:
|
|
872
|
+
display_name: Optional[str] = None
|
|
873
|
+
"""The display name of the visualization."""
|
|
874
|
+
|
|
875
|
+
query_id: Optional[str] = None
|
|
876
|
+
"""UUID of the query that the visualization is attached to."""
|
|
877
|
+
|
|
878
|
+
serialized_options: Optional[str] = None
|
|
879
|
+
"""The visualization options varies widely from one visualization type to the next and is
|
|
880
|
+
unsupported. Databricks does not recommend modifying visualization options directly."""
|
|
881
|
+
|
|
882
|
+
serialized_query_plan: Optional[str] = None
|
|
883
|
+
"""The visualization query plan varies widely from one visualization type to the next and is
|
|
884
|
+
unsupported. Databricks does not recommend modifying the visualization query plan directly."""
|
|
885
|
+
|
|
886
|
+
type: Optional[str] = None
|
|
887
|
+
"""The type of visualization: counter, table, funnel, and so on."""
|
|
888
|
+
|
|
889
|
+
def as_dict(self) -> dict:
|
|
890
|
+
"""Serializes the CreateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body."""
|
|
891
|
+
body = {}
|
|
892
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
893
|
+
if self.query_id is not None: body['query_id'] = self.query_id
|
|
894
|
+
if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
|
|
895
|
+
if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
|
|
896
|
+
if self.type is not None: body['type'] = self.type
|
|
897
|
+
return body
|
|
898
|
+
|
|
899
|
+
@classmethod
|
|
900
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequestVisualization:
|
|
901
|
+
"""Deserializes the CreateVisualizationRequestVisualization from a dictionary."""
|
|
902
|
+
return cls(display_name=d.get('display_name', None),
|
|
903
|
+
query_id=d.get('query_id', None),
|
|
904
|
+
serialized_options=d.get('serialized_options', None),
|
|
905
|
+
serialized_query_plan=d.get('serialized_query_plan', None),
|
|
906
|
+
type=d.get('type', None))
|
|
907
|
+
|
|
908
|
+
|
|
909
|
+
@dataclass
|
|
910
|
+
class CreateWarehouseRequest:
|
|
911
|
+
auto_stop_mins: Optional[int] = None
|
|
912
|
+
"""The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries)
|
|
913
|
+
before it is automatically stopped.
|
|
914
|
+
|
|
915
|
+
Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
|
|
916
|
+
|
|
917
|
+
Defaults to 120 mins"""
|
|
918
|
+
|
|
919
|
+
channel: Optional[Channel] = None
|
|
920
|
+
"""Channel Details"""
|
|
921
|
+
|
|
922
|
+
cluster_size: Optional[str] = None
|
|
923
|
+
"""Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows
|
|
924
|
+
you to run larger queries on it. If you want to increase the number of concurrent queries,
|
|
925
|
+
please tune max_num_clusters.
|
|
926
|
+
|
|
927
|
+
Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large
|
|
928
|
+
- 4X-Large"""
|
|
929
|
+
|
|
930
|
+
creator_name: Optional[str] = None
|
|
931
|
+
"""warehouse creator name"""
|
|
932
|
+
|
|
933
|
+
enable_photon: Optional[bool] = None
|
|
934
|
+
"""Configures whether the warehouse should use Photon optimized clusters.
|
|
935
|
+
|
|
936
|
+
Defaults to false."""
|
|
937
|
+
|
|
938
|
+
enable_serverless_compute: Optional[bool] = None
|
|
939
|
+
"""Configures whether the warehouse should use serverless compute"""
|
|
940
|
+
|
|
941
|
+
instance_profile_arn: Optional[str] = None
|
|
942
|
+
"""Deprecated. Instance profile used to pass IAM role to the cluster"""
|
|
943
|
+
|
|
944
|
+
max_num_clusters: Optional[int] = None
|
|
945
|
+
"""Maximum number of clusters that the autoscaler will create to handle concurrent queries.
|
|
946
|
+
|
|
947
|
+
Supported values: - Must be >= min_num_clusters - Must be <= 30.
|
|
948
|
+
|
|
949
|
+
Defaults to min_clusters if unset."""
|
|
950
|
+
|
|
951
|
+
min_num_clusters: Optional[int] = None
|
|
952
|
+
"""Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing
|
|
953
|
+
this will ensure that a larger number of clusters are always running and therefore may reduce
|
|
954
|
+
the cold start time for new queries. This is similar to reserved vs. revocable cores in a
|
|
955
|
+
resource manager.
|
|
956
|
+
|
|
957
|
+
Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
|
|
958
|
+
|
|
959
|
+
Defaults to 1"""
|
|
960
|
+
|
|
961
|
+
name: Optional[str] = None
|
|
962
|
+
"""Logical name for the cluster.
|
|
963
|
+
|
|
964
|
+
Supported values: - Must be unique within an org. - Must be less than 100 characters."""
|
|
965
|
+
|
|
966
|
+
spot_instance_policy: Optional[SpotInstancePolicy] = None
|
|
967
|
+
"""Configurations whether the warehouse should use spot instances."""
|
|
968
|
+
|
|
969
|
+
tags: Optional[EndpointTags] = None
|
|
970
|
+
"""A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS
|
|
971
|
+
volumes) associated with this SQL warehouse.
|
|
972
|
+
|
|
973
|
+
Supported values: - Number of tags < 45."""
|
|
974
|
+
|
|
975
|
+
warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None
|
|
976
|
+
"""Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO`
|
|
977
|
+
and also set the field `enable_serverless_compute` to `true`."""
|
|
978
|
+
|
|
979
|
+
def as_dict(self) -> dict:
|
|
980
|
+
"""Serializes the CreateWarehouseRequest into a dictionary suitable for use as a JSON request body."""
|
|
981
|
+
body = {}
|
|
982
|
+
if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
|
|
983
|
+
if self.channel: body['channel'] = self.channel.as_dict()
|
|
984
|
+
if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
|
|
985
|
+
if self.creator_name is not None: body['creator_name'] = self.creator_name
|
|
986
|
+
if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
|
|
987
|
+
if self.enable_serverless_compute is not None:
|
|
988
|
+
body['enable_serverless_compute'] = self.enable_serverless_compute
|
|
989
|
+
if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
|
|
990
|
+
if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
|
|
991
|
+
if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
|
|
992
|
+
if self.name is not None: body['name'] = self.name
|
|
993
|
+
if self.spot_instance_policy is not None:
|
|
994
|
+
body['spot_instance_policy'] = self.spot_instance_policy.value
|
|
995
|
+
if self.tags: body['tags'] = self.tags.as_dict()
|
|
996
|
+
if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
|
|
997
|
+
return body
|
|
998
|
+
|
|
999
|
+
@classmethod
|
|
1000
|
+
def from_dict(cls, d: Dict[str, any]) -> CreateWarehouseRequest:
|
|
1001
|
+
"""Deserializes the CreateWarehouseRequest from a dictionary."""
|
|
1002
|
+
return cls(auto_stop_mins=d.get('auto_stop_mins', None),
|
|
577
1003
|
channel=_from_dict(d, 'channel', Channel),
|
|
578
1004
|
cluster_size=d.get('cluster_size', None),
|
|
579
1005
|
creator_name=d.get('creator_name', None),
|
|
@@ -913,6 +1339,121 @@ class DataSource:
|
|
|
913
1339
|
warehouse_id=d.get('warehouse_id', None))
|
|
914
1340
|
|
|
915
1341
|
|
|
1342
|
+
class DatePrecision(Enum):
|
|
1343
|
+
|
|
1344
|
+
DAY_PRECISION = 'DAY_PRECISION'
|
|
1345
|
+
MINUTE_PRECISION = 'MINUTE_PRECISION'
|
|
1346
|
+
SECOND_PRECISION = 'SECOND_PRECISION'
|
|
1347
|
+
|
|
1348
|
+
|
|
1349
|
+
@dataclass
|
|
1350
|
+
class DateRange:
|
|
1351
|
+
start: str
|
|
1352
|
+
|
|
1353
|
+
end: str
|
|
1354
|
+
|
|
1355
|
+
def as_dict(self) -> dict:
|
|
1356
|
+
"""Serializes the DateRange into a dictionary suitable for use as a JSON request body."""
|
|
1357
|
+
body = {}
|
|
1358
|
+
if self.end is not None: body['end'] = self.end
|
|
1359
|
+
if self.start is not None: body['start'] = self.start
|
|
1360
|
+
return body
|
|
1361
|
+
|
|
1362
|
+
@classmethod
|
|
1363
|
+
def from_dict(cls, d: Dict[str, any]) -> DateRange:
|
|
1364
|
+
"""Deserializes the DateRange from a dictionary."""
|
|
1365
|
+
return cls(end=d.get('end', None), start=d.get('start', None))
|
|
1366
|
+
|
|
1367
|
+
|
|
1368
|
+
@dataclass
|
|
1369
|
+
class DateRangeValue:
|
|
1370
|
+
date_range_value: Optional[DateRange] = None
|
|
1371
|
+
"""Manually specified date-time range value."""
|
|
1372
|
+
|
|
1373
|
+
dynamic_date_range_value: Optional[DateRangeValueDynamicDateRange] = None
|
|
1374
|
+
"""Dynamic date-time range value based on current date-time."""
|
|
1375
|
+
|
|
1376
|
+
precision: Optional[DatePrecision] = None
|
|
1377
|
+
"""Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION
|
|
1378
|
+
(YYYY-MM-DD)."""
|
|
1379
|
+
|
|
1380
|
+
start_day_of_week: Optional[int] = None
|
|
1381
|
+
|
|
1382
|
+
def as_dict(self) -> dict:
|
|
1383
|
+
"""Serializes the DateRangeValue into a dictionary suitable for use as a JSON request body."""
|
|
1384
|
+
body = {}
|
|
1385
|
+
if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict()
|
|
1386
|
+
if self.dynamic_date_range_value is not None:
|
|
1387
|
+
body['dynamic_date_range_value'] = self.dynamic_date_range_value.value
|
|
1388
|
+
if self.precision is not None: body['precision'] = self.precision.value
|
|
1389
|
+
if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week
|
|
1390
|
+
return body
|
|
1391
|
+
|
|
1392
|
+
@classmethod
|
|
1393
|
+
def from_dict(cls, d: Dict[str, any]) -> DateRangeValue:
|
|
1394
|
+
"""Deserializes the DateRangeValue from a dictionary."""
|
|
1395
|
+
return cls(date_range_value=_from_dict(d, 'date_range_value', DateRange),
|
|
1396
|
+
dynamic_date_range_value=_enum(d, 'dynamic_date_range_value',
|
|
1397
|
+
DateRangeValueDynamicDateRange),
|
|
1398
|
+
precision=_enum(d, 'precision', DatePrecision),
|
|
1399
|
+
start_day_of_week=d.get('start_day_of_week', None))
|
|
1400
|
+
|
|
1401
|
+
|
|
1402
|
+
class DateRangeValueDynamicDateRange(Enum):
|
|
1403
|
+
|
|
1404
|
+
LAST_12_MONTHS = 'LAST_12_MONTHS'
|
|
1405
|
+
LAST_14_DAYS = 'LAST_14_DAYS'
|
|
1406
|
+
LAST_24_HOURS = 'LAST_24_HOURS'
|
|
1407
|
+
LAST_30_DAYS = 'LAST_30_DAYS'
|
|
1408
|
+
LAST_60_DAYS = 'LAST_60_DAYS'
|
|
1409
|
+
LAST_7_DAYS = 'LAST_7_DAYS'
|
|
1410
|
+
LAST_8_HOURS = 'LAST_8_HOURS'
|
|
1411
|
+
LAST_90_DAYS = 'LAST_90_DAYS'
|
|
1412
|
+
LAST_HOUR = 'LAST_HOUR'
|
|
1413
|
+
LAST_MONTH = 'LAST_MONTH'
|
|
1414
|
+
LAST_WEEK = 'LAST_WEEK'
|
|
1415
|
+
LAST_YEAR = 'LAST_YEAR'
|
|
1416
|
+
THIS_MONTH = 'THIS_MONTH'
|
|
1417
|
+
THIS_WEEK = 'THIS_WEEK'
|
|
1418
|
+
THIS_YEAR = 'THIS_YEAR'
|
|
1419
|
+
TODAY = 'TODAY'
|
|
1420
|
+
YESTERDAY = 'YESTERDAY'
|
|
1421
|
+
|
|
1422
|
+
|
|
1423
|
+
@dataclass
|
|
1424
|
+
class DateValue:
|
|
1425
|
+
date_value: Optional[str] = None
|
|
1426
|
+
"""Manually specified date-time value."""
|
|
1427
|
+
|
|
1428
|
+
dynamic_date_value: Optional[DateValueDynamicDate] = None
|
|
1429
|
+
"""Dynamic date-time value based on current date-time."""
|
|
1430
|
+
|
|
1431
|
+
precision: Optional[DatePrecision] = None
|
|
1432
|
+
"""Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION
|
|
1433
|
+
(YYYY-MM-DD)."""
|
|
1434
|
+
|
|
1435
|
+
def as_dict(self) -> dict:
|
|
1436
|
+
"""Serializes the DateValue into a dictionary suitable for use as a JSON request body."""
|
|
1437
|
+
body = {}
|
|
1438
|
+
if self.date_value is not None: body['date_value'] = self.date_value
|
|
1439
|
+
if self.dynamic_date_value is not None: body['dynamic_date_value'] = self.dynamic_date_value.value
|
|
1440
|
+
if self.precision is not None: body['precision'] = self.precision.value
|
|
1441
|
+
return body
|
|
1442
|
+
|
|
1443
|
+
@classmethod
|
|
1444
|
+
def from_dict(cls, d: Dict[str, any]) -> DateValue:
|
|
1445
|
+
"""Deserializes the DateValue from a dictionary."""
|
|
1446
|
+
return cls(date_value=d.get('date_value', None),
|
|
1447
|
+
dynamic_date_value=_enum(d, 'dynamic_date_value', DateValueDynamicDate),
|
|
1448
|
+
precision=_enum(d, 'precision', DatePrecision))
|
|
1449
|
+
|
|
1450
|
+
|
|
1451
|
+
class DateValueDynamicDate(Enum):
|
|
1452
|
+
|
|
1453
|
+
NOW = 'NOW'
|
|
1454
|
+
YESTERDAY = 'YESTERDAY'
|
|
1455
|
+
|
|
1456
|
+
|
|
916
1457
|
@dataclass
|
|
917
1458
|
class DeleteResponse:
|
|
918
1459
|
|
|
@@ -942,26 +1483,6 @@ class DeleteWarehouseResponse:
|
|
|
942
1483
|
|
|
943
1484
|
|
|
944
1485
|
class Disposition(Enum):
|
|
945
|
-
"""The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`.
|
|
946
|
-
|
|
947
|
-
Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY`
|
|
948
|
-
format, in a series of chunks. If a given statement produces a result set with a size larger
|
|
949
|
-
than 25 MiB, that statement execution is aborted, and no result set will be available.
|
|
950
|
-
|
|
951
|
-
**NOTE** Byte limits are computed based upon internal representations of the result set data,
|
|
952
|
-
and might not match the sizes visible in JSON responses.
|
|
953
|
-
|
|
954
|
-
Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links:
|
|
955
|
-
URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition
|
|
956
|
-
allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The
|
|
957
|
-
resulting links have two important properties:
|
|
958
|
-
|
|
959
|
-
1. They point to resources _external_ to the Databricks compute; therefore any associated
|
|
960
|
-
authentication information (typically a personal access token, OAuth token, or similar) _must be
|
|
961
|
-
removed_ when fetching from these links.
|
|
962
|
-
|
|
963
|
-
2. These are presigned URLs with a specific expiration, indicated in the response. The behavior
|
|
964
|
-
when attempting to use an expired link is cloud specific."""
|
|
965
1486
|
|
|
966
1487
|
EXTERNAL_LINKS = 'EXTERNAL_LINKS'
|
|
967
1488
|
INLINE = 'INLINE'
|
|
@@ -1140,6 +1661,50 @@ class EditWarehouseResponse:
|
|
|
1140
1661
|
return cls()
|
|
1141
1662
|
|
|
1142
1663
|
|
|
1664
|
+
@dataclass
|
|
1665
|
+
class Empty:
|
|
1666
|
+
"""Represents an empty message, similar to google.protobuf.Empty, which is not available in the
|
|
1667
|
+
firm right now."""
|
|
1668
|
+
|
|
1669
|
+
def as_dict(self) -> dict:
|
|
1670
|
+
"""Serializes the Empty into a dictionary suitable for use as a JSON request body."""
|
|
1671
|
+
body = {}
|
|
1672
|
+
return body
|
|
1673
|
+
|
|
1674
|
+
@classmethod
|
|
1675
|
+
def from_dict(cls, d: Dict[str, any]) -> Empty:
|
|
1676
|
+
"""Deserializes the Empty from a dictionary."""
|
|
1677
|
+
return cls()
|
|
1678
|
+
|
|
1679
|
+
|
|
1680
|
+
@dataclass
|
|
1681
|
+
class EncodedText:
|
|
1682
|
+
encoding: Optional[EncodedTextEncoding] = None
|
|
1683
|
+
"""Carry text data in different form."""
|
|
1684
|
+
|
|
1685
|
+
text: Optional[str] = None
|
|
1686
|
+
"""text data"""
|
|
1687
|
+
|
|
1688
|
+
def as_dict(self) -> dict:
|
|
1689
|
+
"""Serializes the EncodedText into a dictionary suitable for use as a JSON request body."""
|
|
1690
|
+
body = {}
|
|
1691
|
+
if self.encoding is not None: body['encoding'] = self.encoding.value
|
|
1692
|
+
if self.text is not None: body['text'] = self.text
|
|
1693
|
+
return body
|
|
1694
|
+
|
|
1695
|
+
@classmethod
|
|
1696
|
+
def from_dict(cls, d: Dict[str, any]) -> EncodedText:
|
|
1697
|
+
"""Deserializes the EncodedText from a dictionary."""
|
|
1698
|
+
return cls(encoding=_enum(d, 'encoding', EncodedTextEncoding), text=d.get('text', None))
|
|
1699
|
+
|
|
1700
|
+
|
|
1701
|
+
class EncodedTextEncoding(Enum):
|
|
1702
|
+
"""Carry text data in different form."""
|
|
1703
|
+
|
|
1704
|
+
BASE64 = 'BASE64'
|
|
1705
|
+
PLAIN = 'PLAIN'
|
|
1706
|
+
|
|
1707
|
+
|
|
1143
1708
|
@dataclass
|
|
1144
1709
|
class EndpointConfPair:
|
|
1145
1710
|
key: Optional[str] = None
|
|
@@ -1384,6 +1949,33 @@ class EndpointTags:
|
|
|
1384
1949
|
return cls(custom_tags=_repeated_dict(d, 'custom_tags', EndpointTagPair))
|
|
1385
1950
|
|
|
1386
1951
|
|
|
1952
|
+
@dataclass
|
|
1953
|
+
class EnumValue:
|
|
1954
|
+
enum_options: Optional[str] = None
|
|
1955
|
+
"""List of valid query parameter values, newline delimited."""
|
|
1956
|
+
|
|
1957
|
+
multi_values_options: Optional[MultiValuesOptions] = None
|
|
1958
|
+
"""If specified, allows multiple values to be selected for this parameter."""
|
|
1959
|
+
|
|
1960
|
+
values: Optional[List[str]] = None
|
|
1961
|
+
"""List of selected query parameter values."""
|
|
1962
|
+
|
|
1963
|
+
def as_dict(self) -> dict:
|
|
1964
|
+
"""Serializes the EnumValue into a dictionary suitable for use as a JSON request body."""
|
|
1965
|
+
body = {}
|
|
1966
|
+
if self.enum_options is not None: body['enum_options'] = self.enum_options
|
|
1967
|
+
if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict()
|
|
1968
|
+
if self.values: body['values'] = [v for v in self.values]
|
|
1969
|
+
return body
|
|
1970
|
+
|
|
1971
|
+
@classmethod
|
|
1972
|
+
def from_dict(cls, d: Dict[str, any]) -> EnumValue:
|
|
1973
|
+
"""Deserializes the EnumValue from a dictionary."""
|
|
1974
|
+
return cls(enum_options=d.get('enum_options', None),
|
|
1975
|
+
multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions),
|
|
1976
|
+
values=d.get('values', None))
|
|
1977
|
+
|
|
1978
|
+
|
|
1387
1979
|
@dataclass
|
|
1388
1980
|
class ExecuteStatementRequest:
|
|
1389
1981
|
statement: str
|
|
@@ -1407,26 +1999,6 @@ class ExecuteStatementRequest:
|
|
|
1407
1999
|
[`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html"""
|
|
1408
2000
|
|
|
1409
2001
|
disposition: Optional[Disposition] = None
|
|
1410
|
-
"""The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`.
|
|
1411
|
-
|
|
1412
|
-
Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY`
|
|
1413
|
-
format, in a series of chunks. If a given statement produces a result set with a size larger
|
|
1414
|
-
than 25 MiB, that statement execution is aborted, and no result set will be available.
|
|
1415
|
-
|
|
1416
|
-
**NOTE** Byte limits are computed based upon internal representations of the result set data,
|
|
1417
|
-
and might not match the sizes visible in JSON responses.
|
|
1418
|
-
|
|
1419
|
-
Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links:
|
|
1420
|
-
URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition
|
|
1421
|
-
allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The
|
|
1422
|
-
resulting links have two important properties:
|
|
1423
|
-
|
|
1424
|
-
1. They point to resources _external_ to the Databricks compute; therefore any associated
|
|
1425
|
-
authentication information (typically a personal access token, OAuth token, or similar) _must be
|
|
1426
|
-
removed_ when fetching from these links.
|
|
1427
|
-
|
|
1428
|
-
2. These are presigned URLs with a specific expiration, indicated in the response. The behavior
|
|
1429
|
-
when attempting to use an expired link is cloud specific."""
|
|
1430
2002
|
|
|
1431
2003
|
format: Optional[Format] = None
|
|
1432
2004
|
"""Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and
|
|
@@ -1565,43 +2137,6 @@ class ExecuteStatementRequestOnWaitTimeout(Enum):
|
|
|
1565
2137
|
CONTINUE = 'CONTINUE'
|
|
1566
2138
|
|
|
1567
2139
|
|
|
1568
|
-
@dataclass
|
|
1569
|
-
class ExecuteStatementResponse:
|
|
1570
|
-
manifest: Optional[ResultManifest] = None
|
|
1571
|
-
"""The result manifest provides schema and metadata for the result set."""
|
|
1572
|
-
|
|
1573
|
-
result: Optional[ResultData] = None
|
|
1574
|
-
"""Contains the result data of a single chunk when using `INLINE` disposition. When using
|
|
1575
|
-
`EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned
|
|
1576
|
-
URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the
|
|
1577
|
-
`external_links` array prepares the API to return multiple links in a single response. Currently
|
|
1578
|
-
only a single link is returned.)"""
|
|
1579
|
-
|
|
1580
|
-
statement_id: Optional[str] = None
|
|
1581
|
-
"""The statement ID is returned upon successfully submitting a SQL statement, and is a required
|
|
1582
|
-
reference for all subsequent calls."""
|
|
1583
|
-
|
|
1584
|
-
status: Optional[StatementStatus] = None
|
|
1585
|
-
"""The status response includes execution state and if relevant, error information."""
|
|
1586
|
-
|
|
1587
|
-
def as_dict(self) -> dict:
|
|
1588
|
-
"""Serializes the ExecuteStatementResponse into a dictionary suitable for use as a JSON request body."""
|
|
1589
|
-
body = {}
|
|
1590
|
-
if self.manifest: body['manifest'] = self.manifest.as_dict()
|
|
1591
|
-
if self.result: body['result'] = self.result.as_dict()
|
|
1592
|
-
if self.statement_id is not None: body['statement_id'] = self.statement_id
|
|
1593
|
-
if self.status: body['status'] = self.status.as_dict()
|
|
1594
|
-
return body
|
|
1595
|
-
|
|
1596
|
-
@classmethod
|
|
1597
|
-
def from_dict(cls, d: Dict[str, any]) -> ExecuteStatementResponse:
|
|
1598
|
-
"""Deserializes the ExecuteStatementResponse from a dictionary."""
|
|
1599
|
-
return cls(manifest=_from_dict(d, 'manifest', ResultManifest),
|
|
1600
|
-
result=_from_dict(d, 'result', ResultData),
|
|
1601
|
-
statement_id=d.get('statement_id', None),
|
|
1602
|
-
status=_from_dict(d, 'status', StatementStatus))
|
|
1603
|
-
|
|
1604
|
-
|
|
1605
2140
|
@dataclass
|
|
1606
2141
|
class ExternalLink:
|
|
1607
2142
|
byte_count: Optional[int] = None
|
|
@@ -1616,9 +2151,6 @@ class ExternalLink:
|
|
|
1616
2151
|
which point a new `external_link` must be requested."""
|
|
1617
2152
|
|
|
1618
2153
|
external_link: Optional[str] = None
|
|
1619
|
-
"""A presigned URL pointing to a chunk of result data, hosted by an external service, with a short
|
|
1620
|
-
expiration time (<= 15 minutes). As this URL contains a temporary credential, it should be
|
|
1621
|
-
considered sensitive and the client should not expose this URL in a log."""
|
|
1622
2154
|
|
|
1623
2155
|
http_headers: Optional[Dict[str, str]] = None
|
|
1624
2156
|
"""HTTP headers that must be included with a GET request to the `external_link`. Each header is
|
|
@@ -1705,43 +2237,6 @@ class GetResponse:
|
|
|
1705
2237
|
object_type=_enum(d, 'object_type', ObjectType))
|
|
1706
2238
|
|
|
1707
2239
|
|
|
1708
|
-
@dataclass
|
|
1709
|
-
class GetStatementResponse:
|
|
1710
|
-
manifest: Optional[ResultManifest] = None
|
|
1711
|
-
"""The result manifest provides schema and metadata for the result set."""
|
|
1712
|
-
|
|
1713
|
-
result: Optional[ResultData] = None
|
|
1714
|
-
"""Contains the result data of a single chunk when using `INLINE` disposition. When using
|
|
1715
|
-
`EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned
|
|
1716
|
-
URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the
|
|
1717
|
-
`external_links` array prepares the API to return multiple links in a single response. Currently
|
|
1718
|
-
only a single link is returned.)"""
|
|
1719
|
-
|
|
1720
|
-
statement_id: Optional[str] = None
|
|
1721
|
-
"""The statement ID is returned upon successfully submitting a SQL statement, and is a required
|
|
1722
|
-
reference for all subsequent calls."""
|
|
1723
|
-
|
|
1724
|
-
status: Optional[StatementStatus] = None
|
|
1725
|
-
"""The status response includes execution state and if relevant, error information."""
|
|
1726
|
-
|
|
1727
|
-
def as_dict(self) -> dict:
|
|
1728
|
-
"""Serializes the GetStatementResponse into a dictionary suitable for use as a JSON request body."""
|
|
1729
|
-
body = {}
|
|
1730
|
-
if self.manifest: body['manifest'] = self.manifest.as_dict()
|
|
1731
|
-
if self.result: body['result'] = self.result.as_dict()
|
|
1732
|
-
if self.statement_id is not None: body['statement_id'] = self.statement_id
|
|
1733
|
-
if self.status: body['status'] = self.status.as_dict()
|
|
1734
|
-
return body
|
|
1735
|
-
|
|
1736
|
-
@classmethod
|
|
1737
|
-
def from_dict(cls, d: Dict[str, any]) -> GetStatementResponse:
|
|
1738
|
-
"""Deserializes the GetStatementResponse from a dictionary."""
|
|
1739
|
-
return cls(manifest=_from_dict(d, 'manifest', ResultManifest),
|
|
1740
|
-
result=_from_dict(d, 'result', ResultData),
|
|
1741
|
-
statement_id=d.get('statement_id', None),
|
|
1742
|
-
status=_from_dict(d, 'status', StatementStatus))
|
|
1743
|
-
|
|
1744
|
-
|
|
1745
2240
|
@dataclass
|
|
1746
2241
|
class GetWarehousePermissionLevelsResponse:
|
|
1747
2242
|
permission_levels: Optional[List[WarehousePermissionsDescription]] = None
|
|
@@ -1987,6 +2482,386 @@ class GetWorkspaceWarehouseConfigResponseSecurityPolicy(Enum):
|
|
|
1987
2482
|
PASSTHROUGH = 'PASSTHROUGH'
|
|
1988
2483
|
|
|
1989
2484
|
|
|
2485
|
+
@dataclass
|
|
2486
|
+
class LegacyAlert:
|
|
2487
|
+
created_at: Optional[str] = None
|
|
2488
|
+
"""Timestamp when the alert was created."""
|
|
2489
|
+
|
|
2490
|
+
id: Optional[str] = None
|
|
2491
|
+
"""Alert ID."""
|
|
2492
|
+
|
|
2493
|
+
last_triggered_at: Optional[str] = None
|
|
2494
|
+
"""Timestamp when the alert was last triggered."""
|
|
2495
|
+
|
|
2496
|
+
name: Optional[str] = None
|
|
2497
|
+
"""Name of the alert."""
|
|
2498
|
+
|
|
2499
|
+
options: Optional[AlertOptions] = None
|
|
2500
|
+
"""Alert configuration options."""
|
|
2501
|
+
|
|
2502
|
+
parent: Optional[str] = None
|
|
2503
|
+
"""The identifier of the workspace folder containing the object."""
|
|
2504
|
+
|
|
2505
|
+
query: Optional[AlertQuery] = None
|
|
2506
|
+
|
|
2507
|
+
rearm: Optional[int] = None
|
|
2508
|
+
"""Number of seconds after being triggered before the alert rearms itself and can be triggered
|
|
2509
|
+
again. If `null`, alert will never be triggered again."""
|
|
2510
|
+
|
|
2511
|
+
state: Optional[LegacyAlertState] = None
|
|
2512
|
+
"""State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated
|
|
2513
|
+
and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions)."""
|
|
2514
|
+
|
|
2515
|
+
updated_at: Optional[str] = None
|
|
2516
|
+
"""Timestamp when the alert was last updated."""
|
|
2517
|
+
|
|
2518
|
+
user: Optional[User] = None
|
|
2519
|
+
|
|
2520
|
+
def as_dict(self) -> dict:
|
|
2521
|
+
"""Serializes the LegacyAlert into a dictionary suitable for use as a JSON request body."""
|
|
2522
|
+
body = {}
|
|
2523
|
+
if self.created_at is not None: body['created_at'] = self.created_at
|
|
2524
|
+
if self.id is not None: body['id'] = self.id
|
|
2525
|
+
if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at
|
|
2526
|
+
if self.name is not None: body['name'] = self.name
|
|
2527
|
+
if self.options: body['options'] = self.options.as_dict()
|
|
2528
|
+
if self.parent is not None: body['parent'] = self.parent
|
|
2529
|
+
if self.query: body['query'] = self.query.as_dict()
|
|
2530
|
+
if self.rearm is not None: body['rearm'] = self.rearm
|
|
2531
|
+
if self.state is not None: body['state'] = self.state.value
|
|
2532
|
+
if self.updated_at is not None: body['updated_at'] = self.updated_at
|
|
2533
|
+
if self.user: body['user'] = self.user.as_dict()
|
|
2534
|
+
return body
|
|
2535
|
+
|
|
2536
|
+
@classmethod
|
|
2537
|
+
def from_dict(cls, d: Dict[str, any]) -> LegacyAlert:
|
|
2538
|
+
"""Deserializes the LegacyAlert from a dictionary."""
|
|
2539
|
+
return cls(created_at=d.get('created_at', None),
|
|
2540
|
+
id=d.get('id', None),
|
|
2541
|
+
last_triggered_at=d.get('last_triggered_at', None),
|
|
2542
|
+
name=d.get('name', None),
|
|
2543
|
+
options=_from_dict(d, 'options', AlertOptions),
|
|
2544
|
+
parent=d.get('parent', None),
|
|
2545
|
+
query=_from_dict(d, 'query', AlertQuery),
|
|
2546
|
+
rearm=d.get('rearm', None),
|
|
2547
|
+
state=_enum(d, 'state', LegacyAlertState),
|
|
2548
|
+
updated_at=d.get('updated_at', None),
|
|
2549
|
+
user=_from_dict(d, 'user', User))
|
|
2550
|
+
|
|
2551
|
+
|
|
2552
|
+
class LegacyAlertState(Enum):
|
|
2553
|
+
"""State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated
|
|
2554
|
+
and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions)."""
|
|
2555
|
+
|
|
2556
|
+
OK = 'ok'
|
|
2557
|
+
TRIGGERED = 'triggered'
|
|
2558
|
+
UNKNOWN = 'unknown'
|
|
2559
|
+
|
|
2560
|
+
|
|
2561
|
+
@dataclass
|
|
2562
|
+
class LegacyQuery:
|
|
2563
|
+
can_edit: Optional[bool] = None
|
|
2564
|
+
"""Describes whether the authenticated user is allowed to edit the definition of this query."""
|
|
2565
|
+
|
|
2566
|
+
created_at: Optional[str] = None
|
|
2567
|
+
"""The timestamp when this query was created."""
|
|
2568
|
+
|
|
2569
|
+
data_source_id: Optional[str] = None
|
|
2570
|
+
"""Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
2571
|
+
warehouse ID. [Learn more]
|
|
2572
|
+
|
|
2573
|
+
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
|
|
2574
|
+
|
|
2575
|
+
description: Optional[str] = None
|
|
2576
|
+
"""General description that conveys additional information about this query such as usage notes."""
|
|
2577
|
+
|
|
2578
|
+
id: Optional[str] = None
|
|
2579
|
+
"""Query ID."""
|
|
2580
|
+
|
|
2581
|
+
is_archived: Optional[bool] = None
|
|
2582
|
+
"""Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear
|
|
2583
|
+
in search results. If this boolean is `true`, the `options` property for this query includes a
|
|
2584
|
+
`moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days."""
|
|
2585
|
+
|
|
2586
|
+
is_draft: Optional[bool] = None
|
|
2587
|
+
"""Whether the query is a draft. Draft queries only appear in list views for their owners.
|
|
2588
|
+
Visualizations from draft queries cannot appear on dashboards."""
|
|
2589
|
+
|
|
2590
|
+
is_favorite: Optional[bool] = None
|
|
2591
|
+
"""Whether this query object appears in the current user's favorites list. This flag determines
|
|
2592
|
+
whether the star icon for favorites is selected."""
|
|
2593
|
+
|
|
2594
|
+
is_safe: Optional[bool] = None
|
|
2595
|
+
"""Text parameter types are not safe from SQL injection for all types of data source. Set this
|
|
2596
|
+
Boolean parameter to `true` if a query either does not use any text type parameters or uses a
|
|
2597
|
+
data source type where text type parameters are handled safely."""
|
|
2598
|
+
|
|
2599
|
+
last_modified_by: Optional[User] = None
|
|
2600
|
+
|
|
2601
|
+
last_modified_by_id: Optional[int] = None
|
|
2602
|
+
"""The ID of the user who last saved changes to this query."""
|
|
2603
|
+
|
|
2604
|
+
latest_query_data_id: Optional[str] = None
|
|
2605
|
+
"""If there is a cached result for this query and user, this field includes the query result ID. If
|
|
2606
|
+
this query uses parameters, this field is always null."""
|
|
2607
|
+
|
|
2608
|
+
name: Optional[str] = None
|
|
2609
|
+
"""The title of this query that appears in list views, widget headings, and on the query page."""
|
|
2610
|
+
|
|
2611
|
+
options: Optional[QueryOptions] = None
|
|
2612
|
+
|
|
2613
|
+
parent: Optional[str] = None
|
|
2614
|
+
"""The identifier of the workspace folder containing the object."""
|
|
2615
|
+
|
|
2616
|
+
permission_tier: Optional[PermissionLevel] = None
|
|
2617
|
+
"""* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query
|
|
2618
|
+
* `CAN_MANAGE`: Can manage the query"""
|
|
2619
|
+
|
|
2620
|
+
query: Optional[str] = None
|
|
2621
|
+
"""The text of the query to be run."""
|
|
2622
|
+
|
|
2623
|
+
query_hash: Optional[str] = None
|
|
2624
|
+
"""A SHA-256 hash of the query text along with the authenticated user ID."""
|
|
2625
|
+
|
|
2626
|
+
run_as_role: Optional[RunAsRole] = None
|
|
2627
|
+
"""Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
|
|
2628
|
+
viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
|
|
2629
|
+
|
|
2630
|
+
tags: Optional[List[str]] = None
|
|
2631
|
+
|
|
2632
|
+
updated_at: Optional[str] = None
|
|
2633
|
+
"""The timestamp at which this query was last updated."""
|
|
2634
|
+
|
|
2635
|
+
user: Optional[User] = None
|
|
2636
|
+
|
|
2637
|
+
user_id: Optional[int] = None
|
|
2638
|
+
"""The ID of the user who owns the query."""
|
|
2639
|
+
|
|
2640
|
+
visualizations: Optional[List[LegacyVisualization]] = None
|
|
2641
|
+
|
|
2642
|
+
def as_dict(self) -> dict:
|
|
2643
|
+
"""Serializes the LegacyQuery into a dictionary suitable for use as a JSON request body."""
|
|
2644
|
+
body = {}
|
|
2645
|
+
if self.can_edit is not None: body['can_edit'] = self.can_edit
|
|
2646
|
+
if self.created_at is not None: body['created_at'] = self.created_at
|
|
2647
|
+
if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
|
|
2648
|
+
if self.description is not None: body['description'] = self.description
|
|
2649
|
+
if self.id is not None: body['id'] = self.id
|
|
2650
|
+
if self.is_archived is not None: body['is_archived'] = self.is_archived
|
|
2651
|
+
if self.is_draft is not None: body['is_draft'] = self.is_draft
|
|
2652
|
+
if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
|
|
2653
|
+
if self.is_safe is not None: body['is_safe'] = self.is_safe
|
|
2654
|
+
if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict()
|
|
2655
|
+
if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id
|
|
2656
|
+
if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id
|
|
2657
|
+
if self.name is not None: body['name'] = self.name
|
|
2658
|
+
if self.options: body['options'] = self.options.as_dict()
|
|
2659
|
+
if self.parent is not None: body['parent'] = self.parent
|
|
2660
|
+
if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value
|
|
2661
|
+
if self.query is not None: body['query'] = self.query
|
|
2662
|
+
if self.query_hash is not None: body['query_hash'] = self.query_hash
|
|
2663
|
+
if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
|
|
2664
|
+
if self.tags: body['tags'] = [v for v in self.tags]
|
|
2665
|
+
if self.updated_at is not None: body['updated_at'] = self.updated_at
|
|
2666
|
+
if self.user: body['user'] = self.user.as_dict()
|
|
2667
|
+
if self.user_id is not None: body['user_id'] = self.user_id
|
|
2668
|
+
if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations]
|
|
2669
|
+
return body
|
|
2670
|
+
|
|
2671
|
+
@classmethod
|
|
2672
|
+
def from_dict(cls, d: Dict[str, any]) -> LegacyQuery:
|
|
2673
|
+
"""Deserializes the LegacyQuery from a dictionary."""
|
|
2674
|
+
return cls(can_edit=d.get('can_edit', None),
|
|
2675
|
+
created_at=d.get('created_at', None),
|
|
2676
|
+
data_source_id=d.get('data_source_id', None),
|
|
2677
|
+
description=d.get('description', None),
|
|
2678
|
+
id=d.get('id', None),
|
|
2679
|
+
is_archived=d.get('is_archived', None),
|
|
2680
|
+
is_draft=d.get('is_draft', None),
|
|
2681
|
+
is_favorite=d.get('is_favorite', None),
|
|
2682
|
+
is_safe=d.get('is_safe', None),
|
|
2683
|
+
last_modified_by=_from_dict(d, 'last_modified_by', User),
|
|
2684
|
+
last_modified_by_id=d.get('last_modified_by_id', None),
|
|
2685
|
+
latest_query_data_id=d.get('latest_query_data_id', None),
|
|
2686
|
+
name=d.get('name', None),
|
|
2687
|
+
options=_from_dict(d, 'options', QueryOptions),
|
|
2688
|
+
parent=d.get('parent', None),
|
|
2689
|
+
permission_tier=_enum(d, 'permission_tier', PermissionLevel),
|
|
2690
|
+
query=d.get('query', None),
|
|
2691
|
+
query_hash=d.get('query_hash', None),
|
|
2692
|
+
run_as_role=_enum(d, 'run_as_role', RunAsRole),
|
|
2693
|
+
tags=d.get('tags', None),
|
|
2694
|
+
updated_at=d.get('updated_at', None),
|
|
2695
|
+
user=_from_dict(d, 'user', User),
|
|
2696
|
+
user_id=d.get('user_id', None),
|
|
2697
|
+
visualizations=_repeated_dict(d, 'visualizations', LegacyVisualization))
|
|
2698
|
+
|
|
2699
|
+
|
|
2700
|
+
@dataclass
|
|
2701
|
+
class LegacyVisualization:
|
|
2702
|
+
"""The visualization description API changes frequently and is unsupported. You can duplicate a
|
|
2703
|
+
visualization by copying description objects received _from the API_ and then using them to
|
|
2704
|
+
create a new one with a POST request to the same endpoint. Databricks does not recommend
|
|
2705
|
+
constructing ad-hoc visualizations entirely in JSON."""
|
|
2706
|
+
|
|
2707
|
+
created_at: Optional[str] = None
|
|
2708
|
+
|
|
2709
|
+
description: Optional[str] = None
|
|
2710
|
+
"""A short description of this visualization. This is not displayed in the UI."""
|
|
2711
|
+
|
|
2712
|
+
id: Optional[str] = None
|
|
2713
|
+
"""The UUID for this visualization."""
|
|
2714
|
+
|
|
2715
|
+
name: Optional[str] = None
|
|
2716
|
+
"""The name of the visualization that appears on dashboards and the query screen."""
|
|
2717
|
+
|
|
2718
|
+
options: Optional[Any] = None
|
|
2719
|
+
"""The options object varies widely from one visualization type to the next and is unsupported.
|
|
2720
|
+
Databricks does not recommend modifying visualization settings in JSON."""
|
|
2721
|
+
|
|
2722
|
+
query: Optional[LegacyQuery] = None
|
|
2723
|
+
|
|
2724
|
+
type: Optional[str] = None
|
|
2725
|
+
"""The type of visualization: chart, table, pivot table, and so on."""
|
|
2726
|
+
|
|
2727
|
+
updated_at: Optional[str] = None
|
|
2728
|
+
|
|
2729
|
+
def as_dict(self) -> dict:
|
|
2730
|
+
"""Serializes the LegacyVisualization into a dictionary suitable for use as a JSON request body."""
|
|
2731
|
+
body = {}
|
|
2732
|
+
if self.created_at is not None: body['created_at'] = self.created_at
|
|
2733
|
+
if self.description is not None: body['description'] = self.description
|
|
2734
|
+
if self.id is not None: body['id'] = self.id
|
|
2735
|
+
if self.name is not None: body['name'] = self.name
|
|
2736
|
+
if self.options: body['options'] = self.options
|
|
2737
|
+
if self.query: body['query'] = self.query.as_dict()
|
|
2738
|
+
if self.type is not None: body['type'] = self.type
|
|
2739
|
+
if self.updated_at is not None: body['updated_at'] = self.updated_at
|
|
2740
|
+
return body
|
|
2741
|
+
|
|
2742
|
+
@classmethod
|
|
2743
|
+
def from_dict(cls, d: Dict[str, any]) -> LegacyVisualization:
|
|
2744
|
+
"""Deserializes the LegacyVisualization from a dictionary."""
|
|
2745
|
+
return cls(created_at=d.get('created_at', None),
|
|
2746
|
+
description=d.get('description', None),
|
|
2747
|
+
id=d.get('id', None),
|
|
2748
|
+
name=d.get('name', None),
|
|
2749
|
+
options=d.get('options', None),
|
|
2750
|
+
query=_from_dict(d, 'query', LegacyQuery),
|
|
2751
|
+
type=d.get('type', None),
|
|
2752
|
+
updated_at=d.get('updated_at', None))
|
|
2753
|
+
|
|
2754
|
+
|
|
2755
|
+
class LifecycleState(Enum):
|
|
2756
|
+
|
|
2757
|
+
ACTIVE = 'ACTIVE'
|
|
2758
|
+
TRASHED = 'TRASHED'
|
|
2759
|
+
|
|
2760
|
+
|
|
2761
|
+
@dataclass
|
|
2762
|
+
class ListAlertsResponse:
|
|
2763
|
+
next_page_token: Optional[str] = None
|
|
2764
|
+
|
|
2765
|
+
results: Optional[List[ListAlertsResponseAlert]] = None
|
|
2766
|
+
|
|
2767
|
+
def as_dict(self) -> dict:
|
|
2768
|
+
"""Serializes the ListAlertsResponse into a dictionary suitable for use as a JSON request body."""
|
|
2769
|
+
body = {}
|
|
2770
|
+
if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
|
|
2771
|
+
if self.results: body['results'] = [v.as_dict() for v in self.results]
|
|
2772
|
+
return body
|
|
2773
|
+
|
|
2774
|
+
@classmethod
|
|
2775
|
+
def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponse:
|
|
2776
|
+
"""Deserializes the ListAlertsResponse from a dictionary."""
|
|
2777
|
+
return cls(next_page_token=d.get('next_page_token', None),
|
|
2778
|
+
results=_repeated_dict(d, 'results', ListAlertsResponseAlert))
|
|
2779
|
+
|
|
2780
|
+
|
|
2781
|
+
@dataclass
|
|
2782
|
+
class ListAlertsResponseAlert:
|
|
2783
|
+
condition: Optional[AlertCondition] = None
|
|
2784
|
+
"""Trigger conditions of the alert."""
|
|
2785
|
+
|
|
2786
|
+
create_time: Optional[str] = None
|
|
2787
|
+
"""The timestamp indicating when the alert was created."""
|
|
2788
|
+
|
|
2789
|
+
custom_body: Optional[str] = None
|
|
2790
|
+
"""Custom body of alert notification, if it exists. See [here] for custom templating instructions.
|
|
2791
|
+
|
|
2792
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
2793
|
+
|
|
2794
|
+
custom_subject: Optional[str] = None
|
|
2795
|
+
"""Custom subject of alert notification, if it exists. This can include email subject entries and
|
|
2796
|
+
Slack notification headers, for example. See [here] for custom templating instructions.
|
|
2797
|
+
|
|
2798
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
2799
|
+
|
|
2800
|
+
display_name: Optional[str] = None
|
|
2801
|
+
"""The display name of the alert."""
|
|
2802
|
+
|
|
2803
|
+
id: Optional[str] = None
|
|
2804
|
+
"""UUID identifying the alert."""
|
|
2805
|
+
|
|
2806
|
+
lifecycle_state: Optional[LifecycleState] = None
|
|
2807
|
+
"""The workspace state of the alert. Used for tracking trashed status."""
|
|
2808
|
+
|
|
2809
|
+
owner_user_name: Optional[str] = None
|
|
2810
|
+
"""The owner's username. This field is set to "Unavailable" if the user has been deleted."""
|
|
2811
|
+
|
|
2812
|
+
query_id: Optional[str] = None
|
|
2813
|
+
"""UUID of the query attached to the alert."""
|
|
2814
|
+
|
|
2815
|
+
seconds_to_retrigger: Optional[int] = None
|
|
2816
|
+
"""Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it
|
|
2817
|
+
can be triggered again. If 0 or not specified, the alert will not be triggered again."""
|
|
2818
|
+
|
|
2819
|
+
state: Optional[AlertState] = None
|
|
2820
|
+
"""Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not
|
|
2821
|
+
yet been evaluated or ran into an error during the last evaluation."""
|
|
2822
|
+
|
|
2823
|
+
trigger_time: Optional[str] = None
|
|
2824
|
+
"""Timestamp when the alert was last triggered, if the alert has been triggered before."""
|
|
2825
|
+
|
|
2826
|
+
update_time: Optional[str] = None
|
|
2827
|
+
"""The timestamp indicating when the alert was updated."""
|
|
2828
|
+
|
|
2829
|
+
def as_dict(self) -> dict:
|
|
2830
|
+
"""Serializes the ListAlertsResponseAlert into a dictionary suitable for use as a JSON request body."""
|
|
2831
|
+
body = {}
|
|
2832
|
+
if self.condition: body['condition'] = self.condition.as_dict()
|
|
2833
|
+
if self.create_time is not None: body['create_time'] = self.create_time
|
|
2834
|
+
if self.custom_body is not None: body['custom_body'] = self.custom_body
|
|
2835
|
+
if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
|
|
2836
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
2837
|
+
if self.id is not None: body['id'] = self.id
|
|
2838
|
+
if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
|
|
2839
|
+
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
2840
|
+
if self.query_id is not None: body['query_id'] = self.query_id
|
|
2841
|
+
if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
|
|
2842
|
+
if self.state is not None: body['state'] = self.state.value
|
|
2843
|
+
if self.trigger_time is not None: body['trigger_time'] = self.trigger_time
|
|
2844
|
+
if self.update_time is not None: body['update_time'] = self.update_time
|
|
2845
|
+
return body
|
|
2846
|
+
|
|
2847
|
+
@classmethod
|
|
2848
|
+
def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponseAlert:
|
|
2849
|
+
"""Deserializes the ListAlertsResponseAlert from a dictionary."""
|
|
2850
|
+
return cls(condition=_from_dict(d, 'condition', AlertCondition),
|
|
2851
|
+
create_time=d.get('create_time', None),
|
|
2852
|
+
custom_body=d.get('custom_body', None),
|
|
2853
|
+
custom_subject=d.get('custom_subject', None),
|
|
2854
|
+
display_name=d.get('display_name', None),
|
|
2855
|
+
id=d.get('id', None),
|
|
2856
|
+
lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
|
|
2857
|
+
owner_user_name=d.get('owner_user_name', None),
|
|
2858
|
+
query_id=d.get('query_id', None),
|
|
2859
|
+
seconds_to_retrigger=d.get('seconds_to_retrigger', None),
|
|
2860
|
+
state=_enum(d, 'state', AlertState),
|
|
2861
|
+
trigger_time=d.get('trigger_time', None),
|
|
2862
|
+
update_time=d.get('update_time', None))
|
|
2863
|
+
|
|
2864
|
+
|
|
1990
2865
|
class ListOrder(Enum):
|
|
1991
2866
|
|
|
1992
2867
|
CREATED_AT = 'created_at'
|
|
@@ -2019,6 +2894,118 @@ class ListQueriesResponse:
|
|
|
2019
2894
|
res=_repeated_dict(d, 'res', QueryInfo))
|
|
2020
2895
|
|
|
2021
2896
|
|
|
2897
|
+
@dataclass
|
|
2898
|
+
class ListQueryObjectsResponse:
|
|
2899
|
+
next_page_token: Optional[str] = None
|
|
2900
|
+
|
|
2901
|
+
results: Optional[List[ListQueryObjectsResponseQuery]] = None
|
|
2902
|
+
|
|
2903
|
+
def as_dict(self) -> dict:
|
|
2904
|
+
"""Serializes the ListQueryObjectsResponse into a dictionary suitable for use as a JSON request body."""
|
|
2905
|
+
body = {}
|
|
2906
|
+
if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
|
|
2907
|
+
if self.results: body['results'] = [v.as_dict() for v in self.results]
|
|
2908
|
+
return body
|
|
2909
|
+
|
|
2910
|
+
@classmethod
|
|
2911
|
+
def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponse:
|
|
2912
|
+
"""Deserializes the ListQueryObjectsResponse from a dictionary."""
|
|
2913
|
+
return cls(next_page_token=d.get('next_page_token', None),
|
|
2914
|
+
results=_repeated_dict(d, 'results', ListQueryObjectsResponseQuery))
|
|
2915
|
+
|
|
2916
|
+
|
|
2917
|
+
@dataclass
|
|
2918
|
+
class ListQueryObjectsResponseQuery:
|
|
2919
|
+
apply_auto_limit: Optional[bool] = None
|
|
2920
|
+
"""Whether to apply a 1000 row limit to the query result."""
|
|
2921
|
+
|
|
2922
|
+
catalog: Optional[str] = None
|
|
2923
|
+
"""Name of the catalog where this query will be executed."""
|
|
2924
|
+
|
|
2925
|
+
create_time: Optional[str] = None
|
|
2926
|
+
"""Timestamp when this query was created."""
|
|
2927
|
+
|
|
2928
|
+
description: Optional[str] = None
|
|
2929
|
+
"""General description that conveys additional information about this query such as usage notes."""
|
|
2930
|
+
|
|
2931
|
+
display_name: Optional[str] = None
|
|
2932
|
+
"""Display name of the query that appears in list views, widget headings, and on the query page."""
|
|
2933
|
+
|
|
2934
|
+
id: Optional[str] = None
|
|
2935
|
+
"""UUID identifying the query."""
|
|
2936
|
+
|
|
2937
|
+
last_modifier_user_name: Optional[str] = None
|
|
2938
|
+
"""Username of the user who last saved changes to this query."""
|
|
2939
|
+
|
|
2940
|
+
lifecycle_state: Optional[LifecycleState] = None
|
|
2941
|
+
"""Indicates whether the query is trashed."""
|
|
2942
|
+
|
|
2943
|
+
owner_user_name: Optional[str] = None
|
|
2944
|
+
"""Username of the user that owns the query."""
|
|
2945
|
+
|
|
2946
|
+
parameters: Optional[List[QueryParameter]] = None
|
|
2947
|
+
"""List of query parameter definitions."""
|
|
2948
|
+
|
|
2949
|
+
query_text: Optional[str] = None
|
|
2950
|
+
"""Text of the query to be run."""
|
|
2951
|
+
|
|
2952
|
+
run_as_mode: Optional[RunAsMode] = None
|
|
2953
|
+
"""Sets the "Run as" role for the object."""
|
|
2954
|
+
|
|
2955
|
+
schema: Optional[str] = None
|
|
2956
|
+
"""Name of the schema where this query will be executed."""
|
|
2957
|
+
|
|
2958
|
+
tags: Optional[List[str]] = None
|
|
2959
|
+
|
|
2960
|
+
update_time: Optional[str] = None
|
|
2961
|
+
"""Timestamp when this query was last updated."""
|
|
2962
|
+
|
|
2963
|
+
warehouse_id: Optional[str] = None
|
|
2964
|
+
"""ID of the SQL warehouse attached to the query."""
|
|
2965
|
+
|
|
2966
|
+
def as_dict(self) -> dict:
|
|
2967
|
+
"""Serializes the ListQueryObjectsResponseQuery into a dictionary suitable for use as a JSON request body."""
|
|
2968
|
+
body = {}
|
|
2969
|
+
if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
|
|
2970
|
+
if self.catalog is not None: body['catalog'] = self.catalog
|
|
2971
|
+
if self.create_time is not None: body['create_time'] = self.create_time
|
|
2972
|
+
if self.description is not None: body['description'] = self.description
|
|
2973
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
2974
|
+
if self.id is not None: body['id'] = self.id
|
|
2975
|
+
if self.last_modifier_user_name is not None:
|
|
2976
|
+
body['last_modifier_user_name'] = self.last_modifier_user_name
|
|
2977
|
+
if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
|
|
2978
|
+
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
2979
|
+
if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
|
|
2980
|
+
if self.query_text is not None: body['query_text'] = self.query_text
|
|
2981
|
+
if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
|
|
2982
|
+
if self.schema is not None: body['schema'] = self.schema
|
|
2983
|
+
if self.tags: body['tags'] = [v for v in self.tags]
|
|
2984
|
+
if self.update_time is not None: body['update_time'] = self.update_time
|
|
2985
|
+
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
2986
|
+
return body
|
|
2987
|
+
|
|
2988
|
+
@classmethod
|
|
2989
|
+
def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponseQuery:
|
|
2990
|
+
"""Deserializes the ListQueryObjectsResponseQuery from a dictionary."""
|
|
2991
|
+
return cls(apply_auto_limit=d.get('apply_auto_limit', None),
|
|
2992
|
+
catalog=d.get('catalog', None),
|
|
2993
|
+
create_time=d.get('create_time', None),
|
|
2994
|
+
description=d.get('description', None),
|
|
2995
|
+
display_name=d.get('display_name', None),
|
|
2996
|
+
id=d.get('id', None),
|
|
2997
|
+
last_modifier_user_name=d.get('last_modifier_user_name', None),
|
|
2998
|
+
lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
|
|
2999
|
+
owner_user_name=d.get('owner_user_name', None),
|
|
3000
|
+
parameters=_repeated_dict(d, 'parameters', QueryParameter),
|
|
3001
|
+
query_text=d.get('query_text', None),
|
|
3002
|
+
run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
|
|
3003
|
+
schema=d.get('schema', None),
|
|
3004
|
+
tags=d.get('tags', None),
|
|
3005
|
+
update_time=d.get('update_time', None),
|
|
3006
|
+
warehouse_id=d.get('warehouse_id', None))
|
|
3007
|
+
|
|
3008
|
+
|
|
2022
3009
|
@dataclass
|
|
2023
3010
|
class ListResponse:
|
|
2024
3011
|
count: Optional[int] = None
|
|
@@ -2051,6 +3038,26 @@ class ListResponse:
|
|
|
2051
3038
|
results=_repeated_dict(d, 'results', Dashboard))
|
|
2052
3039
|
|
|
2053
3040
|
|
|
3041
|
+
@dataclass
|
|
3042
|
+
class ListVisualizationsForQueryResponse:
|
|
3043
|
+
next_page_token: Optional[str] = None
|
|
3044
|
+
|
|
3045
|
+
results: Optional[List[Visualization]] = None
|
|
3046
|
+
|
|
3047
|
+
def as_dict(self) -> dict:
|
|
3048
|
+
"""Serializes the ListVisualizationsForQueryResponse into a dictionary suitable for use as a JSON request body."""
|
|
3049
|
+
body = {}
|
|
3050
|
+
if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
|
|
3051
|
+
if self.results: body['results'] = [v.as_dict() for v in self.results]
|
|
3052
|
+
return body
|
|
3053
|
+
|
|
3054
|
+
@classmethod
|
|
3055
|
+
def from_dict(cls, d: Dict[str, any]) -> ListVisualizationsForQueryResponse:
|
|
3056
|
+
"""Deserializes the ListVisualizationsForQueryResponse from a dictionary."""
|
|
3057
|
+
return cls(next_page_token=d.get('next_page_token', None),
|
|
3058
|
+
results=_repeated_dict(d, 'results', Visualization))
|
|
3059
|
+
|
|
3060
|
+
|
|
2054
3061
|
@dataclass
|
|
2055
3062
|
class ListWarehousesResponse:
|
|
2056
3063
|
warehouses: Optional[List[EndpointInfo]] = None
|
|
@@ -2070,9 +3077,6 @@ class ListWarehousesResponse:
|
|
|
2070
3077
|
|
|
2071
3078
|
@dataclass
|
|
2072
3079
|
class MultiValuesOptions:
|
|
2073
|
-
"""If specified, allows multiple values to be selected for this parameter. Only applies to dropdown
|
|
2074
|
-
list and query-based dropdown list parameters."""
|
|
2075
|
-
|
|
2076
3080
|
prefix: Optional[str] = None
|
|
2077
3081
|
"""Character that prefixes each selected parameter value."""
|
|
2078
3082
|
|
|
@@ -2098,6 +3102,22 @@ class MultiValuesOptions:
|
|
|
2098
3102
|
suffix=d.get('suffix', None))
|
|
2099
3103
|
|
|
2100
3104
|
|
|
3105
|
+
@dataclass
|
|
3106
|
+
class NumericValue:
|
|
3107
|
+
value: Optional[float] = None
|
|
3108
|
+
|
|
3109
|
+
def as_dict(self) -> dict:
|
|
3110
|
+
"""Serializes the NumericValue into a dictionary suitable for use as a JSON request body."""
|
|
3111
|
+
body = {}
|
|
3112
|
+
if self.value is not None: body['value'] = self.value
|
|
3113
|
+
return body
|
|
3114
|
+
|
|
3115
|
+
@classmethod
|
|
3116
|
+
def from_dict(cls, d: Dict[str, any]) -> NumericValue:
|
|
3117
|
+
"""Deserializes the NumericValue from a dictionary."""
|
|
3118
|
+
return cls(value=d.get('value', None))
|
|
3119
|
+
|
|
3120
|
+
|
|
2101
3121
|
class ObjectType(Enum):
|
|
2102
3122
|
"""A singular noun object type."""
|
|
2103
3123
|
|
|
@@ -2222,7 +3242,7 @@ class PermissionLevel(Enum):
|
|
|
2222
3242
|
|
|
2223
3243
|
|
|
2224
3244
|
class PlansState(Enum):
|
|
2225
|
-
"""
|
|
3245
|
+
"""Possible Reasons for which we have not saved plans in the database"""
|
|
2226
3246
|
|
|
2227
3247
|
EMPTY = 'EMPTY'
|
|
2228
3248
|
EXISTS = 'EXISTS'
|
|
@@ -2234,141 +3254,126 @@ class PlansState(Enum):
|
|
|
2234
3254
|
|
|
2235
3255
|
@dataclass
|
|
2236
3256
|
class Query:
|
|
2237
|
-
|
|
2238
|
-
"""
|
|
3257
|
+
apply_auto_limit: Optional[bool] = None
|
|
3258
|
+
"""Whether to apply a 1000 row limit to the query result."""
|
|
2239
3259
|
|
|
2240
|
-
|
|
2241
|
-
"""
|
|
3260
|
+
catalog: Optional[str] = None
|
|
3261
|
+
"""Name of the catalog where this query will be executed."""
|
|
2242
3262
|
|
|
2243
|
-
|
|
2244
|
-
"""
|
|
2245
|
-
warehouse ID. [Learn more]
|
|
2246
|
-
|
|
2247
|
-
[Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
|
|
3263
|
+
create_time: Optional[str] = None
|
|
3264
|
+
"""Timestamp when this query was created."""
|
|
2248
3265
|
|
|
2249
3266
|
description: Optional[str] = None
|
|
2250
3267
|
"""General description that conveys additional information about this query such as usage notes."""
|
|
2251
3268
|
|
|
2252
|
-
|
|
2253
|
-
"""
|
|
2254
|
-
|
|
2255
|
-
is_archived: Optional[bool] = None
|
|
2256
|
-
"""Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear
|
|
2257
|
-
in search results. If this boolean is `true`, the `options` property for this query includes a
|
|
2258
|
-
`moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days."""
|
|
2259
|
-
|
|
2260
|
-
is_draft: Optional[bool] = None
|
|
2261
|
-
"""Whether the query is a draft. Draft queries only appear in list views for their owners.
|
|
2262
|
-
Visualizations from draft queries cannot appear on dashboards."""
|
|
2263
|
-
|
|
2264
|
-
is_favorite: Optional[bool] = None
|
|
2265
|
-
"""Whether this query object appears in the current user's favorites list. This flag determines
|
|
2266
|
-
whether the star icon for favorites is selected."""
|
|
2267
|
-
|
|
2268
|
-
is_safe: Optional[bool] = None
|
|
2269
|
-
"""Text parameter types are not safe from SQL injection for all types of data source. Set this
|
|
2270
|
-
Boolean parameter to `true` if a query either does not use any text type parameters or uses a
|
|
2271
|
-
data source type where text type parameters are handled safely."""
|
|
2272
|
-
|
|
2273
|
-
last_modified_by: Optional[User] = None
|
|
3269
|
+
display_name: Optional[str] = None
|
|
3270
|
+
"""Display name of the query that appears in list views, widget headings, and on the query page."""
|
|
2274
3271
|
|
|
2275
|
-
|
|
2276
|
-
"""
|
|
3272
|
+
id: Optional[str] = None
|
|
3273
|
+
"""UUID identifying the query."""
|
|
2277
3274
|
|
|
2278
|
-
|
|
2279
|
-
"""
|
|
2280
|
-
this query uses parameters, this field is always null."""
|
|
3275
|
+
last_modifier_user_name: Optional[str] = None
|
|
3276
|
+
"""Username of the user who last saved changes to this query."""
|
|
2281
3277
|
|
|
2282
|
-
|
|
2283
|
-
"""
|
|
3278
|
+
lifecycle_state: Optional[LifecycleState] = None
|
|
3279
|
+
"""Indicates whether the query is trashed."""
|
|
2284
3280
|
|
|
2285
|
-
|
|
3281
|
+
owner_user_name: Optional[str] = None
|
|
3282
|
+
"""Username of the user that owns the query."""
|
|
2286
3283
|
|
|
2287
|
-
|
|
2288
|
-
"""
|
|
3284
|
+
parameters: Optional[List[QueryParameter]] = None
|
|
3285
|
+
"""List of query parameter definitions."""
|
|
2289
3286
|
|
|
2290
|
-
|
|
2291
|
-
"""
|
|
2292
|
-
* `CAN_MANAGE`: Can manage the query"""
|
|
3287
|
+
parent_path: Optional[str] = None
|
|
3288
|
+
"""Workspace path of the workspace folder containing the object."""
|
|
2293
3289
|
|
|
2294
|
-
|
|
2295
|
-
"""
|
|
3290
|
+
query_text: Optional[str] = None
|
|
3291
|
+
"""Text of the query to be run."""
|
|
2296
3292
|
|
|
2297
|
-
|
|
2298
|
-
"""
|
|
3293
|
+
run_as_mode: Optional[RunAsMode] = None
|
|
3294
|
+
"""Sets the "Run as" role for the object."""
|
|
2299
3295
|
|
|
2300
|
-
|
|
2301
|
-
"""
|
|
2302
|
-
viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
|
|
3296
|
+
schema: Optional[str] = None
|
|
3297
|
+
"""Name of the schema where this query will be executed."""
|
|
2303
3298
|
|
|
2304
3299
|
tags: Optional[List[str]] = None
|
|
2305
3300
|
|
|
2306
|
-
|
|
2307
|
-
"""
|
|
2308
|
-
|
|
2309
|
-
user: Optional[User] = None
|
|
2310
|
-
|
|
2311
|
-
user_id: Optional[int] = None
|
|
2312
|
-
"""The ID of the user who owns the query."""
|
|
3301
|
+
update_time: Optional[str] = None
|
|
3302
|
+
"""Timestamp when this query was last updated."""
|
|
2313
3303
|
|
|
2314
|
-
|
|
3304
|
+
warehouse_id: Optional[str] = None
|
|
3305
|
+
"""ID of the SQL warehouse attached to the query."""
|
|
2315
3306
|
|
|
2316
3307
|
def as_dict(self) -> dict:
|
|
2317
3308
|
"""Serializes the Query into a dictionary suitable for use as a JSON request body."""
|
|
2318
3309
|
body = {}
|
|
2319
|
-
if self.
|
|
2320
|
-
if self.
|
|
2321
|
-
if self.
|
|
3310
|
+
if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
|
|
3311
|
+
if self.catalog is not None: body['catalog'] = self.catalog
|
|
3312
|
+
if self.create_time is not None: body['create_time'] = self.create_time
|
|
2322
3313
|
if self.description is not None: body['description'] = self.description
|
|
3314
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
2323
3315
|
if self.id is not None: body['id'] = self.id
|
|
2324
|
-
if self.
|
|
2325
|
-
|
|
2326
|
-
if self.
|
|
2327
|
-
if self.
|
|
2328
|
-
if self.
|
|
2329
|
-
if self.
|
|
2330
|
-
if self.
|
|
2331
|
-
if self.
|
|
2332
|
-
if self.
|
|
2333
|
-
if self.parent is not None: body['parent'] = self.parent
|
|
2334
|
-
if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value
|
|
2335
|
-
if self.query is not None: body['query'] = self.query
|
|
2336
|
-
if self.query_hash is not None: body['query_hash'] = self.query_hash
|
|
2337
|
-
if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
|
|
3316
|
+
if self.last_modifier_user_name is not None:
|
|
3317
|
+
body['last_modifier_user_name'] = self.last_modifier_user_name
|
|
3318
|
+
if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
|
|
3319
|
+
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
3320
|
+
if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
|
|
3321
|
+
if self.parent_path is not None: body['parent_path'] = self.parent_path
|
|
3322
|
+
if self.query_text is not None: body['query_text'] = self.query_text
|
|
3323
|
+
if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
|
|
3324
|
+
if self.schema is not None: body['schema'] = self.schema
|
|
2338
3325
|
if self.tags: body['tags'] = [v for v in self.tags]
|
|
2339
|
-
if self.
|
|
2340
|
-
if self.
|
|
2341
|
-
if self.user_id is not None: body['user_id'] = self.user_id
|
|
2342
|
-
if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations]
|
|
3326
|
+
if self.update_time is not None: body['update_time'] = self.update_time
|
|
3327
|
+
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
2343
3328
|
return body
|
|
2344
3329
|
|
|
2345
3330
|
@classmethod
|
|
2346
3331
|
def from_dict(cls, d: Dict[str, any]) -> Query:
|
|
2347
3332
|
"""Deserializes the Query from a dictionary."""
|
|
2348
|
-
return cls(
|
|
2349
|
-
|
|
2350
|
-
|
|
3333
|
+
return cls(apply_auto_limit=d.get('apply_auto_limit', None),
|
|
3334
|
+
catalog=d.get('catalog', None),
|
|
3335
|
+
create_time=d.get('create_time', None),
|
|
2351
3336
|
description=d.get('description', None),
|
|
3337
|
+
display_name=d.get('display_name', None),
|
|
2352
3338
|
id=d.get('id', None),
|
|
2353
|
-
|
|
2354
|
-
|
|
2355
|
-
|
|
2356
|
-
|
|
2357
|
-
|
|
2358
|
-
|
|
2359
|
-
|
|
2360
|
-
|
|
2361
|
-
options=_from_dict(d, 'options', QueryOptions),
|
|
2362
|
-
parent=d.get('parent', None),
|
|
2363
|
-
permission_tier=_enum(d, 'permission_tier', PermissionLevel),
|
|
2364
|
-
query=d.get('query', None),
|
|
2365
|
-
query_hash=d.get('query_hash', None),
|
|
2366
|
-
run_as_role=_enum(d, 'run_as_role', RunAsRole),
|
|
3339
|
+
last_modifier_user_name=d.get('last_modifier_user_name', None),
|
|
3340
|
+
lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
|
|
3341
|
+
owner_user_name=d.get('owner_user_name', None),
|
|
3342
|
+
parameters=_repeated_dict(d, 'parameters', QueryParameter),
|
|
3343
|
+
parent_path=d.get('parent_path', None),
|
|
3344
|
+
query_text=d.get('query_text', None),
|
|
3345
|
+
run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
|
|
3346
|
+
schema=d.get('schema', None),
|
|
2367
3347
|
tags=d.get('tags', None),
|
|
2368
|
-
|
|
2369
|
-
|
|
2370
|
-
|
|
2371
|
-
|
|
3348
|
+
update_time=d.get('update_time', None),
|
|
3349
|
+
warehouse_id=d.get('warehouse_id', None))
|
|
3350
|
+
|
|
3351
|
+
|
|
3352
|
+
@dataclass
|
|
3353
|
+
class QueryBackedValue:
|
|
3354
|
+
multi_values_options: Optional[MultiValuesOptions] = None
|
|
3355
|
+
"""If specified, allows multiple values to be selected for this parameter."""
|
|
3356
|
+
|
|
3357
|
+
query_id: Optional[str] = None
|
|
3358
|
+
"""UUID of the query that provides the parameter values."""
|
|
3359
|
+
|
|
3360
|
+
values: Optional[List[str]] = None
|
|
3361
|
+
"""List of selected query parameter values."""
|
|
3362
|
+
|
|
3363
|
+
def as_dict(self) -> dict:
|
|
3364
|
+
"""Serializes the QueryBackedValue into a dictionary suitable for use as a JSON request body."""
|
|
3365
|
+
body = {}
|
|
3366
|
+
if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict()
|
|
3367
|
+
if self.query_id is not None: body['query_id'] = self.query_id
|
|
3368
|
+
if self.values: body['values'] = [v for v in self.values]
|
|
3369
|
+
return body
|
|
3370
|
+
|
|
3371
|
+
@classmethod
|
|
3372
|
+
def from_dict(cls, d: Dict[str, any]) -> QueryBackedValue:
|
|
3373
|
+
"""Deserializes the QueryBackedValue from a dictionary."""
|
|
3374
|
+
return cls(multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions),
|
|
3375
|
+
query_id=d.get('query_id', None),
|
|
3376
|
+
values=d.get('values', None))
|
|
2372
3377
|
|
|
2373
3378
|
|
|
2374
3379
|
@dataclass
|
|
@@ -2429,12 +3434,11 @@ class QueryEditContent:
|
|
|
2429
3434
|
|
|
2430
3435
|
@dataclass
|
|
2431
3436
|
class QueryFilter:
|
|
2432
|
-
|
|
3437
|
+
context_filter: Optional[ContextFilter] = None
|
|
3438
|
+
"""Filter by one or more property describing where the query was generated"""
|
|
2433
3439
|
|
|
2434
3440
|
query_start_time_range: Optional[TimeRange] = None
|
|
2435
|
-
|
|
2436
|
-
statement_ids: Optional[List[str]] = None
|
|
2437
|
-
"""A list of statement IDs."""
|
|
3441
|
+
"""A range filter for query submitted time. The time range must be <= 30 days."""
|
|
2438
3442
|
|
|
2439
3443
|
statuses: Optional[List[QueryStatus]] = None
|
|
2440
3444
|
|
|
@@ -2447,8 +3451,8 @@ class QueryFilter:
|
|
|
2447
3451
|
def as_dict(self) -> dict:
|
|
2448
3452
|
"""Serializes the QueryFilter into a dictionary suitable for use as a JSON request body."""
|
|
2449
3453
|
body = {}
|
|
3454
|
+
if self.context_filter: body['context_filter'] = self.context_filter.as_dict()
|
|
2450
3455
|
if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range.as_dict()
|
|
2451
|
-
if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids]
|
|
2452
3456
|
if self.statuses: body['statuses'] = [v.value for v in self.statuses]
|
|
2453
3457
|
if self.user_ids: body['user_ids'] = [v for v in self.user_ids]
|
|
2454
3458
|
if self.warehouse_ids: body['warehouse_ids'] = [v for v in self.warehouse_ids]
|
|
@@ -2457,8 +3461,8 @@ class QueryFilter:
|
|
|
2457
3461
|
@classmethod
|
|
2458
3462
|
def from_dict(cls, d: Dict[str, any]) -> QueryFilter:
|
|
2459
3463
|
"""Deserializes the QueryFilter from a dictionary."""
|
|
2460
|
-
return cls(
|
|
2461
|
-
|
|
3464
|
+
return cls(context_filter=_from_dict(d, 'context_filter', ContextFilter),
|
|
3465
|
+
query_start_time_range=_from_dict(d, 'query_start_time_range', TimeRange),
|
|
2462
3466
|
statuses=_repeated_enum(d, 'statuses', QueryStatus),
|
|
2463
3467
|
user_ids=d.get('user_ids', None),
|
|
2464
3468
|
warehouse_ids=d.get('warehouse_ids', None))
|
|
@@ -2466,11 +3470,8 @@ class QueryFilter:
|
|
|
2466
3470
|
|
|
2467
3471
|
@dataclass
|
|
2468
3472
|
class QueryInfo:
|
|
2469
|
-
can_subscribe_to_live_query: Optional[bool] = None
|
|
2470
|
-
"""Reserved for internal use."""
|
|
2471
|
-
|
|
2472
3473
|
channel_used: Optional[ChannelInfo] = None
|
|
2473
|
-
"""
|
|
3474
|
+
"""SQL Warehouse channel information at the time of query execution"""
|
|
2474
3475
|
|
|
2475
3476
|
duration: Optional[int] = None
|
|
2476
3477
|
"""Total execution time of the statement ( excluding result fetch time )."""
|
|
@@ -2508,6 +3509,8 @@ class QueryInfo:
|
|
|
2508
3509
|
query_id: Optional[str] = None
|
|
2509
3510
|
"""The query ID."""
|
|
2510
3511
|
|
|
3512
|
+
query_source: Optional[QuerySource] = None
|
|
3513
|
+
|
|
2511
3514
|
query_start_time_ms: Optional[int] = None
|
|
2512
3515
|
"""The time the query started."""
|
|
2513
3516
|
|
|
@@ -2518,15 +3521,17 @@ class QueryInfo:
|
|
|
2518
3521
|
"""The number of results returned by the query."""
|
|
2519
3522
|
|
|
2520
3523
|
spark_ui_url: Optional[str] = None
|
|
2521
|
-
"""URL to the query plan."""
|
|
3524
|
+
"""URL to the Spark UI query plan."""
|
|
2522
3525
|
|
|
2523
3526
|
statement_type: Optional[QueryStatementType] = None
|
|
2524
3527
|
"""Type of statement for this query"""
|
|
2525
3528
|
|
|
2526
3529
|
status: Optional[QueryStatus] = None
|
|
2527
|
-
"""Query status with one the following values:
|
|
2528
|
-
|
|
2529
|
-
Query has
|
|
3530
|
+
"""Query status with one the following values:
|
|
3531
|
+
|
|
3532
|
+
- `QUEUED`: Query has been received and queued. - `RUNNING`: Query has started. - `CANCELED`:
|
|
3533
|
+
Query has been cancelled by the user. - `FAILED`: Query has failed. - `FINISHED`: Query has
|
|
3534
|
+
completed."""
|
|
2530
3535
|
|
|
2531
3536
|
user_id: Optional[int] = None
|
|
2532
3537
|
"""The ID of the user who ran the query."""
|
|
@@ -2540,8 +3545,6 @@ class QueryInfo:
|
|
|
2540
3545
|
def as_dict(self) -> dict:
|
|
2541
3546
|
"""Serializes the QueryInfo into a dictionary suitable for use as a JSON request body."""
|
|
2542
3547
|
body = {}
|
|
2543
|
-
if self.can_subscribe_to_live_query is not None:
|
|
2544
|
-
body['canSubscribeToLiveQuery'] = self.can_subscribe_to_live_query
|
|
2545
3548
|
if self.channel_used: body['channel_used'] = self.channel_used.as_dict()
|
|
2546
3549
|
if self.duration is not None: body['duration'] = self.duration
|
|
2547
3550
|
if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id
|
|
@@ -2555,6 +3558,7 @@ class QueryInfo:
|
|
|
2555
3558
|
if self.plans_state is not None: body['plans_state'] = self.plans_state.value
|
|
2556
3559
|
if self.query_end_time_ms is not None: body['query_end_time_ms'] = self.query_end_time_ms
|
|
2557
3560
|
if self.query_id is not None: body['query_id'] = self.query_id
|
|
3561
|
+
if self.query_source: body['query_source'] = self.query_source.as_dict()
|
|
2558
3562
|
if self.query_start_time_ms is not None: body['query_start_time_ms'] = self.query_start_time_ms
|
|
2559
3563
|
if self.query_text is not None: body['query_text'] = self.query_text
|
|
2560
3564
|
if self.rows_produced is not None: body['rows_produced'] = self.rows_produced
|
|
@@ -2569,8 +3573,7 @@ class QueryInfo:
|
|
|
2569
3573
|
@classmethod
|
|
2570
3574
|
def from_dict(cls, d: Dict[str, any]) -> QueryInfo:
|
|
2571
3575
|
"""Deserializes the QueryInfo from a dictionary."""
|
|
2572
|
-
return cls(
|
|
2573
|
-
channel_used=_from_dict(d, 'channel_used', ChannelInfo),
|
|
3576
|
+
return cls(channel_used=_from_dict(d, 'channel_used', ChannelInfo),
|
|
2574
3577
|
duration=d.get('duration', None),
|
|
2575
3578
|
endpoint_id=d.get('endpoint_id', None),
|
|
2576
3579
|
error_message=d.get('error_message', None),
|
|
@@ -2583,6 +3586,7 @@ class QueryInfo:
|
|
|
2583
3586
|
plans_state=_enum(d, 'plans_state', PlansState),
|
|
2584
3587
|
query_end_time_ms=d.get('query_end_time_ms', None),
|
|
2585
3588
|
query_id=d.get('query_id', None),
|
|
3589
|
+
query_source=_from_dict(d, 'query_source', QuerySource),
|
|
2586
3590
|
query_start_time_ms=d.get('query_start_time_ms', None),
|
|
2587
3591
|
query_text=d.get('query_text', None),
|
|
2588
3592
|
rows_produced=d.get('rows_produced', None),
|
|
@@ -2605,7 +3609,7 @@ class QueryList:
|
|
|
2605
3609
|
page_size: Optional[int] = None
|
|
2606
3610
|
"""The number of queries per page."""
|
|
2607
3611
|
|
|
2608
|
-
results: Optional[List[
|
|
3612
|
+
results: Optional[List[LegacyQuery]] = None
|
|
2609
3613
|
"""List of queries returned."""
|
|
2610
3614
|
|
|
2611
3615
|
def as_dict(self) -> dict:
|
|
@@ -2623,12 +3627,13 @@ class QueryList:
|
|
|
2623
3627
|
return cls(count=d.get('count', None),
|
|
2624
3628
|
page=d.get('page', None),
|
|
2625
3629
|
page_size=d.get('page_size', None),
|
|
2626
|
-
results=_repeated_dict(d, 'results',
|
|
3630
|
+
results=_repeated_dict(d, 'results', LegacyQuery))
|
|
2627
3631
|
|
|
2628
3632
|
|
|
2629
3633
|
@dataclass
|
|
2630
3634
|
class QueryMetrics:
|
|
2631
|
-
"""
|
|
3635
|
+
"""A query metric that encapsulates a set of measurements for a single query. Metrics come from the
|
|
3636
|
+
driver and are stored in the history service database."""
|
|
2632
3637
|
|
|
2633
3638
|
compilation_time_ms: Optional[int] = None
|
|
2634
3639
|
"""Time spent loading metadata and optimizing the query, in milliseconds."""
|
|
@@ -2636,9 +3641,6 @@ class QueryMetrics:
|
|
|
2636
3641
|
execution_time_ms: Optional[int] = None
|
|
2637
3642
|
"""Time spent executing the query, in milliseconds."""
|
|
2638
3643
|
|
|
2639
|
-
metadata_time_ms: Optional[int] = None
|
|
2640
|
-
"""Reserved for internal use."""
|
|
2641
|
-
|
|
2642
3644
|
network_sent_bytes: Optional[int] = None
|
|
2643
3645
|
"""Total amount of data sent over the network between executor nodes during shuffle, in bytes."""
|
|
2644
3646
|
|
|
@@ -2649,9 +3651,6 @@ class QueryMetrics:
|
|
|
2649
3651
|
photon_total_time_ms: Optional[int] = None
|
|
2650
3652
|
"""Total execution time for all individual Photon query engine tasks in the query, in milliseconds."""
|
|
2651
3653
|
|
|
2652
|
-
planning_time_ms: Optional[int] = None
|
|
2653
|
-
"""Reserved for internal use."""
|
|
2654
|
-
|
|
2655
3654
|
provisioning_queue_start_timestamp: Optional[int] = None
|
|
2656
3655
|
"""Timestamp of when the query was enqueued waiting for a cluster to be provisioned for the
|
|
2657
3656
|
warehouse. This field is optional and will not appear if the query skipped the provisioning
|
|
@@ -2666,9 +3665,6 @@ class QueryMetrics:
|
|
|
2666
3665
|
query_compilation_start_timestamp: Optional[int] = None
|
|
2667
3666
|
"""Timestamp of when the underlying compute started compilation of the query."""
|
|
2668
3667
|
|
|
2669
|
-
query_execution_time_ms: Optional[int] = None
|
|
2670
|
-
"""Reserved for internal use."""
|
|
2671
|
-
|
|
2672
3668
|
read_bytes: Optional[int] = None
|
|
2673
3669
|
"""Total size of data read by the query, in bytes."""
|
|
2674
3670
|
|
|
@@ -2676,7 +3672,7 @@ class QueryMetrics:
|
|
|
2676
3672
|
"""Size of persistent data read from the cache, in bytes."""
|
|
2677
3673
|
|
|
2678
3674
|
read_files_count: Optional[int] = None
|
|
2679
|
-
"""Number of files read after pruning
|
|
3675
|
+
"""Number of files read after pruning"""
|
|
2680
3676
|
|
|
2681
3677
|
read_partitions_count: Optional[int] = None
|
|
2682
3678
|
"""Number of partitions read after pruning."""
|
|
@@ -2688,7 +3684,7 @@ class QueryMetrics:
|
|
|
2688
3684
|
"""Time spent fetching the query results after the execution finished, in milliseconds."""
|
|
2689
3685
|
|
|
2690
3686
|
result_from_cache: Optional[bool] = None
|
|
2691
|
-
"""true if the query result was fetched from cache, false otherwise."""
|
|
3687
|
+
"""`true` if the query result was fetched from cache, `false` otherwise."""
|
|
2692
3688
|
|
|
2693
3689
|
rows_produced_count: Optional[int] = None
|
|
2694
3690
|
"""Total number of rows returned by the query."""
|
|
@@ -2713,20 +3709,16 @@ class QueryMetrics:
|
|
|
2713
3709
|
body = {}
|
|
2714
3710
|
if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms
|
|
2715
3711
|
if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms
|
|
2716
|
-
if self.metadata_time_ms is not None: body['metadata_time_ms'] = self.metadata_time_ms
|
|
2717
3712
|
if self.network_sent_bytes is not None: body['network_sent_bytes'] = self.network_sent_bytes
|
|
2718
3713
|
if self.overloading_queue_start_timestamp is not None:
|
|
2719
3714
|
body['overloading_queue_start_timestamp'] = self.overloading_queue_start_timestamp
|
|
2720
3715
|
if self.photon_total_time_ms is not None: body['photon_total_time_ms'] = self.photon_total_time_ms
|
|
2721
|
-
if self.planning_time_ms is not None: body['planning_time_ms'] = self.planning_time_ms
|
|
2722
3716
|
if self.provisioning_queue_start_timestamp is not None:
|
|
2723
3717
|
body['provisioning_queue_start_timestamp'] = self.provisioning_queue_start_timestamp
|
|
2724
3718
|
if self.pruned_bytes is not None: body['pruned_bytes'] = self.pruned_bytes
|
|
2725
3719
|
if self.pruned_files_count is not None: body['pruned_files_count'] = self.pruned_files_count
|
|
2726
3720
|
if self.query_compilation_start_timestamp is not None:
|
|
2727
3721
|
body['query_compilation_start_timestamp'] = self.query_compilation_start_timestamp
|
|
2728
|
-
if self.query_execution_time_ms is not None:
|
|
2729
|
-
body['query_execution_time_ms'] = self.query_execution_time_ms
|
|
2730
3722
|
if self.read_bytes is not None: body['read_bytes'] = self.read_bytes
|
|
2731
3723
|
if self.read_cache_bytes is not None: body['read_cache_bytes'] = self.read_cache_bytes
|
|
2732
3724
|
if self.read_files_count is not None: body['read_files_count'] = self.read_files_count
|
|
@@ -2747,16 +3739,13 @@ class QueryMetrics:
|
|
|
2747
3739
|
"""Deserializes the QueryMetrics from a dictionary."""
|
|
2748
3740
|
return cls(compilation_time_ms=d.get('compilation_time_ms', None),
|
|
2749
3741
|
execution_time_ms=d.get('execution_time_ms', None),
|
|
2750
|
-
metadata_time_ms=d.get('metadata_time_ms', None),
|
|
2751
3742
|
network_sent_bytes=d.get('network_sent_bytes', None),
|
|
2752
3743
|
overloading_queue_start_timestamp=d.get('overloading_queue_start_timestamp', None),
|
|
2753
3744
|
photon_total_time_ms=d.get('photon_total_time_ms', None),
|
|
2754
|
-
planning_time_ms=d.get('planning_time_ms', None),
|
|
2755
3745
|
provisioning_queue_start_timestamp=d.get('provisioning_queue_start_timestamp', None),
|
|
2756
3746
|
pruned_bytes=d.get('pruned_bytes', None),
|
|
2757
3747
|
pruned_files_count=d.get('pruned_files_count', None),
|
|
2758
3748
|
query_compilation_start_timestamp=d.get('query_compilation_start_timestamp', None),
|
|
2759
|
-
query_execution_time_ms=d.get('query_execution_time_ms', None),
|
|
2760
3749
|
read_bytes=d.get('read_bytes', None),
|
|
2761
3750
|
read_cache_bytes=d.get('read_cache_bytes', None),
|
|
2762
3751
|
read_files_count=d.get('read_files_count', None),
|
|
@@ -2804,6 +3793,59 @@ class QueryOptions:
|
|
|
2804
3793
|
schema=d.get('schema', None))
|
|
2805
3794
|
|
|
2806
3795
|
|
|
3796
|
+
@dataclass
|
|
3797
|
+
class QueryParameter:
|
|
3798
|
+
date_range_value: Optional[DateRangeValue] = None
|
|
3799
|
+
"""Date-range query parameter value. Can only specify one of `dynamic_date_range_value` or
|
|
3800
|
+
`date_range_value`."""
|
|
3801
|
+
|
|
3802
|
+
date_value: Optional[DateValue] = None
|
|
3803
|
+
"""Date query parameter value. Can only specify one of `dynamic_date_value` or `date_value`."""
|
|
3804
|
+
|
|
3805
|
+
enum_value: Optional[EnumValue] = None
|
|
3806
|
+
"""Dropdown query parameter value."""
|
|
3807
|
+
|
|
3808
|
+
name: Optional[str] = None
|
|
3809
|
+
"""Literal parameter marker that appears between double curly braces in the query text."""
|
|
3810
|
+
|
|
3811
|
+
numeric_value: Optional[NumericValue] = None
|
|
3812
|
+
"""Numeric query parameter value."""
|
|
3813
|
+
|
|
3814
|
+
query_backed_value: Optional[QueryBackedValue] = None
|
|
3815
|
+
"""Query-based dropdown query parameter value."""
|
|
3816
|
+
|
|
3817
|
+
text_value: Optional[TextValue] = None
|
|
3818
|
+
"""Text query parameter value."""
|
|
3819
|
+
|
|
3820
|
+
title: Optional[str] = None
|
|
3821
|
+
"""Text displayed in the user-facing parameter widget in the UI."""
|
|
3822
|
+
|
|
3823
|
+
def as_dict(self) -> dict:
|
|
3824
|
+
"""Serializes the QueryParameter into a dictionary suitable for use as a JSON request body."""
|
|
3825
|
+
body = {}
|
|
3826
|
+
if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict()
|
|
3827
|
+
if self.date_value: body['date_value'] = self.date_value.as_dict()
|
|
3828
|
+
if self.enum_value: body['enum_value'] = self.enum_value.as_dict()
|
|
3829
|
+
if self.name is not None: body['name'] = self.name
|
|
3830
|
+
if self.numeric_value: body['numeric_value'] = self.numeric_value.as_dict()
|
|
3831
|
+
if self.query_backed_value: body['query_backed_value'] = self.query_backed_value.as_dict()
|
|
3832
|
+
if self.text_value: body['text_value'] = self.text_value.as_dict()
|
|
3833
|
+
if self.title is not None: body['title'] = self.title
|
|
3834
|
+
return body
|
|
3835
|
+
|
|
3836
|
+
@classmethod
|
|
3837
|
+
def from_dict(cls, d: Dict[str, any]) -> QueryParameter:
|
|
3838
|
+
"""Deserializes the QueryParameter from a dictionary."""
|
|
3839
|
+
return cls(date_range_value=_from_dict(d, 'date_range_value', DateRangeValue),
|
|
3840
|
+
date_value=_from_dict(d, 'date_value', DateValue),
|
|
3841
|
+
enum_value=_from_dict(d, 'enum_value', EnumValue),
|
|
3842
|
+
name=d.get('name', None),
|
|
3843
|
+
numeric_value=_from_dict(d, 'numeric_value', NumericValue),
|
|
3844
|
+
query_backed_value=_from_dict(d, 'query_backed_value', QueryBackedValue),
|
|
3845
|
+
text_value=_from_dict(d, 'text_value', TextValue),
|
|
3846
|
+
title=d.get('title', None))
|
|
3847
|
+
|
|
3848
|
+
|
|
2807
3849
|
@dataclass
|
|
2808
3850
|
class QueryPostContent:
|
|
2809
3851
|
data_source_id: Optional[str] = None
|
|
@@ -2823,46 +3865,225 @@ class QueryPostContent:
|
|
|
2823
3865
|
`title`, `name`, `type`, and `value` properties. The `value` field here is the default value. It
|
|
2824
3866
|
can be overridden at runtime."""
|
|
2825
3867
|
|
|
2826
|
-
parent: Optional[str] = None
|
|
2827
|
-
"""The identifier of the workspace folder containing the object."""
|
|
3868
|
+
parent: Optional[str] = None
|
|
3869
|
+
"""The identifier of the workspace folder containing the object."""
|
|
3870
|
+
|
|
3871
|
+
query: Optional[str] = None
|
|
3872
|
+
"""The text of the query to be run."""
|
|
3873
|
+
|
|
3874
|
+
run_as_role: Optional[RunAsRole] = None
|
|
3875
|
+
"""Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
|
|
3876
|
+
viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
|
|
3877
|
+
|
|
3878
|
+
tags: Optional[List[str]] = None
|
|
3879
|
+
|
|
3880
|
+
def as_dict(self) -> dict:
|
|
3881
|
+
"""Serializes the QueryPostContent into a dictionary suitable for use as a JSON request body."""
|
|
3882
|
+
body = {}
|
|
3883
|
+
if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
|
|
3884
|
+
if self.description is not None: body['description'] = self.description
|
|
3885
|
+
if self.name is not None: body['name'] = self.name
|
|
3886
|
+
if self.options: body['options'] = self.options
|
|
3887
|
+
if self.parent is not None: body['parent'] = self.parent
|
|
3888
|
+
if self.query is not None: body['query'] = self.query
|
|
3889
|
+
if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
|
|
3890
|
+
if self.tags: body['tags'] = [v for v in self.tags]
|
|
3891
|
+
return body
|
|
3892
|
+
|
|
3893
|
+
@classmethod
|
|
3894
|
+
def from_dict(cls, d: Dict[str, any]) -> QueryPostContent:
|
|
3895
|
+
"""Deserializes the QueryPostContent from a dictionary."""
|
|
3896
|
+
return cls(data_source_id=d.get('data_source_id', None),
|
|
3897
|
+
description=d.get('description', None),
|
|
3898
|
+
name=d.get('name', None),
|
|
3899
|
+
options=d.get('options', None),
|
|
3900
|
+
parent=d.get('parent', None),
|
|
3901
|
+
query=d.get('query', None),
|
|
3902
|
+
run_as_role=_enum(d, 'run_as_role', RunAsRole),
|
|
3903
|
+
tags=d.get('tags', None))
|
|
3904
|
+
|
|
3905
|
+
|
|
3906
|
+
@dataclass
|
|
3907
|
+
class QuerySource:
|
|
3908
|
+
alert_id: Optional[str] = None
|
|
3909
|
+
"""UUID"""
|
|
3910
|
+
|
|
3911
|
+
client_call_context: Optional[ClientCallContext] = None
|
|
3912
|
+
"""Client code that triggered the request"""
|
|
3913
|
+
|
|
3914
|
+
command_id: Optional[str] = None
|
|
3915
|
+
"""Id associated with a notebook cell"""
|
|
3916
|
+
|
|
3917
|
+
command_run_id: Optional[str] = None
|
|
3918
|
+
"""Id associated with a notebook run or execution"""
|
|
3919
|
+
|
|
3920
|
+
dashboard_id: Optional[str] = None
|
|
3921
|
+
"""UUID"""
|
|
3922
|
+
|
|
3923
|
+
dashboard_v3_id: Optional[str] = None
|
|
3924
|
+
"""UUID for Lakeview Dashboards, separate from DBSQL Dashboards (dashboard_id)"""
|
|
3925
|
+
|
|
3926
|
+
driver_info: Optional[QuerySourceDriverInfo] = None
|
|
3927
|
+
|
|
3928
|
+
entry_point: Optional[QuerySourceEntryPoint] = None
|
|
3929
|
+
"""Spark service that received and processed the query"""
|
|
3930
|
+
|
|
3931
|
+
genie_space_id: Optional[str] = None
|
|
3932
|
+
"""UUID for Genie space"""
|
|
3933
|
+
|
|
3934
|
+
is_cloud_fetch: Optional[bool] = None
|
|
3935
|
+
|
|
3936
|
+
is_databricks_sql_exec_api: Optional[bool] = None
|
|
3937
|
+
|
|
3938
|
+
job_id: Optional[str] = None
|
|
3939
|
+
|
|
3940
|
+
job_managed_by: Optional[QuerySourceJobManager] = None
|
|
3941
|
+
"""With background compute, jobs can be managed by different internal teams. When not specified,
|
|
3942
|
+
not a background compute job When specified and the value is not JOBS, it is a background
|
|
3943
|
+
compute job"""
|
|
3944
|
+
|
|
3945
|
+
notebook_id: Optional[str] = None
|
|
3946
|
+
|
|
3947
|
+
pipeline_id: Optional[str] = None
|
|
3948
|
+
"""Id associated with a DLT pipeline"""
|
|
3949
|
+
|
|
3950
|
+
pipeline_update_id: Optional[str] = None
|
|
3951
|
+
"""Id associated with a DLT update"""
|
|
3952
|
+
|
|
3953
|
+
query_tags: Optional[str] = None
|
|
3954
|
+
"""String provided by a customer that'll help them identify the query"""
|
|
3955
|
+
|
|
3956
|
+
run_id: Optional[str] = None
|
|
3957
|
+
"""Id associated with a job run or execution"""
|
|
3958
|
+
|
|
3959
|
+
runnable_command_id: Optional[str] = None
|
|
3960
|
+
"""Id associated with a notebook cell run or execution"""
|
|
3961
|
+
|
|
3962
|
+
scheduled_by: Optional[QuerySourceTrigger] = None
|
|
3963
|
+
|
|
3964
|
+
serverless_channel_info: Optional[ServerlessChannelInfo] = None
|
|
3965
|
+
|
|
3966
|
+
source_query_id: Optional[str] = None
|
|
3967
|
+
"""UUID"""
|
|
3968
|
+
|
|
3969
|
+
def as_dict(self) -> dict:
|
|
3970
|
+
"""Serializes the QuerySource into a dictionary suitable for use as a JSON request body."""
|
|
3971
|
+
body = {}
|
|
3972
|
+
if self.alert_id is not None: body['alert_id'] = self.alert_id
|
|
3973
|
+
if self.client_call_context: body['client_call_context'] = self.client_call_context.as_dict()
|
|
3974
|
+
if self.command_id is not None: body['command_id'] = self.command_id
|
|
3975
|
+
if self.command_run_id is not None: body['command_run_id'] = self.command_run_id
|
|
3976
|
+
if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
|
|
3977
|
+
if self.dashboard_v3_id is not None: body['dashboard_v3_id'] = self.dashboard_v3_id
|
|
3978
|
+
if self.driver_info: body['driver_info'] = self.driver_info.as_dict()
|
|
3979
|
+
if self.entry_point is not None: body['entry_point'] = self.entry_point.value
|
|
3980
|
+
if self.genie_space_id is not None: body['genie_space_id'] = self.genie_space_id
|
|
3981
|
+
if self.is_cloud_fetch is not None: body['is_cloud_fetch'] = self.is_cloud_fetch
|
|
3982
|
+
if self.is_databricks_sql_exec_api is not None:
|
|
3983
|
+
body['is_databricks_sql_exec_api'] = self.is_databricks_sql_exec_api
|
|
3984
|
+
if self.job_id is not None: body['job_id'] = self.job_id
|
|
3985
|
+
if self.job_managed_by is not None: body['job_managed_by'] = self.job_managed_by.value
|
|
3986
|
+
if self.notebook_id is not None: body['notebook_id'] = self.notebook_id
|
|
3987
|
+
if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
|
|
3988
|
+
if self.pipeline_update_id is not None: body['pipeline_update_id'] = self.pipeline_update_id
|
|
3989
|
+
if self.query_tags is not None: body['query_tags'] = self.query_tags
|
|
3990
|
+
if self.run_id is not None: body['run_id'] = self.run_id
|
|
3991
|
+
if self.runnable_command_id is not None: body['runnable_command_id'] = self.runnable_command_id
|
|
3992
|
+
if self.scheduled_by is not None: body['scheduled_by'] = self.scheduled_by.value
|
|
3993
|
+
if self.serverless_channel_info:
|
|
3994
|
+
body['serverless_channel_info'] = self.serverless_channel_info.as_dict()
|
|
3995
|
+
if self.source_query_id is not None: body['source_query_id'] = self.source_query_id
|
|
3996
|
+
return body
|
|
3997
|
+
|
|
3998
|
+
@classmethod
|
|
3999
|
+
def from_dict(cls, d: Dict[str, any]) -> QuerySource:
|
|
4000
|
+
"""Deserializes the QuerySource from a dictionary."""
|
|
4001
|
+
return cls(alert_id=d.get('alert_id', None),
|
|
4002
|
+
client_call_context=_from_dict(d, 'client_call_context', ClientCallContext),
|
|
4003
|
+
command_id=d.get('command_id', None),
|
|
4004
|
+
command_run_id=d.get('command_run_id', None),
|
|
4005
|
+
dashboard_id=d.get('dashboard_id', None),
|
|
4006
|
+
dashboard_v3_id=d.get('dashboard_v3_id', None),
|
|
4007
|
+
driver_info=_from_dict(d, 'driver_info', QuerySourceDriverInfo),
|
|
4008
|
+
entry_point=_enum(d, 'entry_point', QuerySourceEntryPoint),
|
|
4009
|
+
genie_space_id=d.get('genie_space_id', None),
|
|
4010
|
+
is_cloud_fetch=d.get('is_cloud_fetch', None),
|
|
4011
|
+
is_databricks_sql_exec_api=d.get('is_databricks_sql_exec_api', None),
|
|
4012
|
+
job_id=d.get('job_id', None),
|
|
4013
|
+
job_managed_by=_enum(d, 'job_managed_by', QuerySourceJobManager),
|
|
4014
|
+
notebook_id=d.get('notebook_id', None),
|
|
4015
|
+
pipeline_id=d.get('pipeline_id', None),
|
|
4016
|
+
pipeline_update_id=d.get('pipeline_update_id', None),
|
|
4017
|
+
query_tags=d.get('query_tags', None),
|
|
4018
|
+
run_id=d.get('run_id', None),
|
|
4019
|
+
runnable_command_id=d.get('runnable_command_id', None),
|
|
4020
|
+
scheduled_by=_enum(d, 'scheduled_by', QuerySourceTrigger),
|
|
4021
|
+
serverless_channel_info=_from_dict(d, 'serverless_channel_info', ServerlessChannelInfo),
|
|
4022
|
+
source_query_id=d.get('source_query_id', None))
|
|
4023
|
+
|
|
2828
4024
|
|
|
2829
|
-
|
|
2830
|
-
|
|
4025
|
+
@dataclass
|
|
4026
|
+
class QuerySourceDriverInfo:
|
|
4027
|
+
bi_tool_entry: Optional[str] = None
|
|
2831
4028
|
|
|
2832
|
-
|
|
2833
|
-
"""Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
|
|
2834
|
-
viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
|
|
4029
|
+
driver_name: Optional[str] = None
|
|
2835
4030
|
|
|
2836
|
-
|
|
4031
|
+
simba_branding_vendor: Optional[str] = None
|
|
4032
|
+
|
|
4033
|
+
version_number: Optional[str] = None
|
|
2837
4034
|
|
|
2838
4035
|
def as_dict(self) -> dict:
|
|
2839
|
-
"""Serializes the
|
|
4036
|
+
"""Serializes the QuerySourceDriverInfo into a dictionary suitable for use as a JSON request body."""
|
|
2840
4037
|
body = {}
|
|
2841
|
-
if self.
|
|
2842
|
-
if self.
|
|
2843
|
-
if self.
|
|
2844
|
-
if self.
|
|
2845
|
-
if self.parent is not None: body['parent'] = self.parent
|
|
2846
|
-
if self.query is not None: body['query'] = self.query
|
|
2847
|
-
if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
|
|
2848
|
-
if self.tags: body['tags'] = [v for v in self.tags]
|
|
4038
|
+
if self.bi_tool_entry is not None: body['bi_tool_entry'] = self.bi_tool_entry
|
|
4039
|
+
if self.driver_name is not None: body['driver_name'] = self.driver_name
|
|
4040
|
+
if self.simba_branding_vendor is not None: body['simba_branding_vendor'] = self.simba_branding_vendor
|
|
4041
|
+
if self.version_number is not None: body['version_number'] = self.version_number
|
|
2849
4042
|
return body
|
|
2850
4043
|
|
|
2851
4044
|
@classmethod
|
|
2852
|
-
def from_dict(cls, d: Dict[str, any]) ->
|
|
2853
|
-
"""Deserializes the
|
|
2854
|
-
return cls(
|
|
2855
|
-
|
|
2856
|
-
|
|
2857
|
-
|
|
2858
|
-
|
|
2859
|
-
|
|
2860
|
-
|
|
2861
|
-
|
|
4045
|
+
def from_dict(cls, d: Dict[str, any]) -> QuerySourceDriverInfo:
|
|
4046
|
+
"""Deserializes the QuerySourceDriverInfo from a dictionary."""
|
|
4047
|
+
return cls(bi_tool_entry=d.get('bi_tool_entry', None),
|
|
4048
|
+
driver_name=d.get('driver_name', None),
|
|
4049
|
+
simba_branding_vendor=d.get('simba_branding_vendor', None),
|
|
4050
|
+
version_number=d.get('version_number', None))
|
|
4051
|
+
|
|
4052
|
+
|
|
4053
|
+
class QuerySourceEntryPoint(Enum):
|
|
4054
|
+
"""Spark service that received and processed the query"""
|
|
4055
|
+
|
|
4056
|
+
DLT = 'DLT'
|
|
4057
|
+
SPARK_CONNECT = 'SPARK_CONNECT'
|
|
4058
|
+
THRIFT_SERVER = 'THRIFT_SERVER'
|
|
4059
|
+
|
|
4060
|
+
|
|
4061
|
+
class QuerySourceJobManager(Enum):
|
|
4062
|
+
"""Copied from elastic-spark-common/api/messages/manager.proto with enum values changed by 1 to
|
|
4063
|
+
accommodate JOB_MANAGER_UNSPECIFIED"""
|
|
4064
|
+
|
|
4065
|
+
APP_SYSTEM_TABLE = 'APP_SYSTEM_TABLE'
|
|
4066
|
+
AUTOML = 'AUTOML'
|
|
4067
|
+
AUTO_MAINTENANCE = 'AUTO_MAINTENANCE'
|
|
4068
|
+
CLEAN_ROOMS = 'CLEAN_ROOMS'
|
|
4069
|
+
DATA_MONITORING = 'DATA_MONITORING'
|
|
4070
|
+
DATA_SHARING = 'DATA_SHARING'
|
|
4071
|
+
ENCRYPTION = 'ENCRYPTION'
|
|
4072
|
+
FABRIC_CRAWLER = 'FABRIC_CRAWLER'
|
|
4073
|
+
JOBS = 'JOBS'
|
|
4074
|
+
LAKEVIEW = 'LAKEVIEW'
|
|
4075
|
+
MANAGED_RAG = 'MANAGED_RAG'
|
|
4076
|
+
SCHEDULED_MV_REFRESH = 'SCHEDULED_MV_REFRESH'
|
|
4077
|
+
TESTING = 'TESTING'
|
|
4078
|
+
|
|
4079
|
+
|
|
4080
|
+
class QuerySourceTrigger(Enum):
|
|
4081
|
+
|
|
4082
|
+
MANUAL = 'MANUAL'
|
|
4083
|
+
SCHEDULED = 'SCHEDULED'
|
|
2862
4084
|
|
|
2863
4085
|
|
|
2864
4086
|
class QueryStatementType(Enum):
|
|
2865
|
-
"""Type of statement for this query"""
|
|
2866
4087
|
|
|
2867
4088
|
ALTER = 'ALTER'
|
|
2868
4089
|
ANALYZE = 'ANALYZE'
|
|
@@ -2889,15 +4110,16 @@ class QueryStatementType(Enum):
|
|
|
2889
4110
|
|
|
2890
4111
|
|
|
2891
4112
|
class QueryStatus(Enum):
|
|
2892
|
-
"""
|
|
2893
|
-
`RUNNING`: Query has started. * `CANCELED`: Query has been cancelled by the user. * `FAILED`:
|
|
2894
|
-
Query has failed. * `FINISHED`: Query has completed."""
|
|
4113
|
+
"""Statuses which are also used by OperationStatus in runtime"""
|
|
2895
4114
|
|
|
2896
4115
|
CANCELED = 'CANCELED'
|
|
4116
|
+
COMPILED = 'COMPILED'
|
|
4117
|
+
COMPILING = 'COMPILING'
|
|
2897
4118
|
FAILED = 'FAILED'
|
|
2898
4119
|
FINISHED = 'FINISHED'
|
|
2899
4120
|
QUEUED = 'QUEUED'
|
|
2900
4121
|
RUNNING = 'RUNNING'
|
|
4122
|
+
STARTED = 'STARTED'
|
|
2901
4123
|
|
|
2902
4124
|
|
|
2903
4125
|
@dataclass
|
|
@@ -2938,12 +4160,6 @@ class RestoreResponse:
|
|
|
2938
4160
|
|
|
2939
4161
|
@dataclass
|
|
2940
4162
|
class ResultData:
|
|
2941
|
-
"""Contains the result data of a single chunk when using `INLINE` disposition. When using
|
|
2942
|
-
`EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned
|
|
2943
|
-
URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the
|
|
2944
|
-
`external_links` array prepares the API to return multiple links in a single response. Currently
|
|
2945
|
-
only a single link is returned.)"""
|
|
2946
|
-
|
|
2947
4163
|
byte_count: Optional[int] = None
|
|
2948
4164
|
"""The number of bytes in the result chunk. This field is not available when using `INLINE`
|
|
2949
4165
|
disposition."""
|
|
@@ -3070,6 +4286,12 @@ class ResultSchema:
|
|
|
3070
4286
|
return cls(column_count=d.get('column_count', None), columns=_repeated_dict(d, 'columns', ColumnInfo))
|
|
3071
4287
|
|
|
3072
4288
|
|
|
4289
|
+
class RunAsMode(Enum):
|
|
4290
|
+
|
|
4291
|
+
OWNER = 'OWNER'
|
|
4292
|
+
VIEWER = 'VIEWER'
|
|
4293
|
+
|
|
4294
|
+
|
|
3073
4295
|
class RunAsRole(Enum):
|
|
3074
4296
|
"""Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
|
|
3075
4297
|
viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
|
|
@@ -3078,6 +4300,23 @@ class RunAsRole(Enum):
|
|
|
3078
4300
|
VIEWER = 'viewer'
|
|
3079
4301
|
|
|
3080
4302
|
|
|
4303
|
+
@dataclass
|
|
4304
|
+
class ServerlessChannelInfo:
|
|
4305
|
+
name: Optional[ChannelName] = None
|
|
4306
|
+
"""Name of the Channel"""
|
|
4307
|
+
|
|
4308
|
+
def as_dict(self) -> dict:
|
|
4309
|
+
"""Serializes the ServerlessChannelInfo into a dictionary suitable for use as a JSON request body."""
|
|
4310
|
+
body = {}
|
|
4311
|
+
if self.name is not None: body['name'] = self.name.value
|
|
4312
|
+
return body
|
|
4313
|
+
|
|
4314
|
+
@classmethod
|
|
4315
|
+
def from_dict(cls, d: Dict[str, any]) -> ServerlessChannelInfo:
|
|
4316
|
+
"""Deserializes the ServerlessChannelInfo from a dictionary."""
|
|
4317
|
+
return cls(name=_enum(d, 'name', ChannelName))
|
|
4318
|
+
|
|
4319
|
+
|
|
3081
4320
|
@dataclass
|
|
3082
4321
|
class ServiceError:
|
|
3083
4322
|
error_code: Optional[ServiceErrorCode] = None
|
|
@@ -3296,6 +4535,38 @@ class StatementParameterListItem:
|
|
|
3296
4535
|
return cls(name=d.get('name', None), type=d.get('type', None), value=d.get('value', None))
|
|
3297
4536
|
|
|
3298
4537
|
|
|
4538
|
+
@dataclass
|
|
4539
|
+
class StatementResponse:
|
|
4540
|
+
manifest: Optional[ResultManifest] = None
|
|
4541
|
+
"""The result manifest provides schema and metadata for the result set."""
|
|
4542
|
+
|
|
4543
|
+
result: Optional[ResultData] = None
|
|
4544
|
+
|
|
4545
|
+
statement_id: Optional[str] = None
|
|
4546
|
+
"""The statement ID is returned upon successfully submitting a SQL statement, and is a required
|
|
4547
|
+
reference for all subsequent calls."""
|
|
4548
|
+
|
|
4549
|
+
status: Optional[StatementStatus] = None
|
|
4550
|
+
"""The status response includes execution state and if relevant, error information."""
|
|
4551
|
+
|
|
4552
|
+
def as_dict(self) -> dict:
|
|
4553
|
+
"""Serializes the StatementResponse into a dictionary suitable for use as a JSON request body."""
|
|
4554
|
+
body = {}
|
|
4555
|
+
if self.manifest: body['manifest'] = self.manifest.as_dict()
|
|
4556
|
+
if self.result: body['result'] = self.result.as_dict()
|
|
4557
|
+
if self.statement_id is not None: body['statement_id'] = self.statement_id
|
|
4558
|
+
if self.status: body['status'] = self.status.as_dict()
|
|
4559
|
+
return body
|
|
4560
|
+
|
|
4561
|
+
@classmethod
|
|
4562
|
+
def from_dict(cls, d: Dict[str, any]) -> StatementResponse:
|
|
4563
|
+
"""Deserializes the StatementResponse from a dictionary."""
|
|
4564
|
+
return cls(manifest=_from_dict(d, 'manifest', ResultManifest),
|
|
4565
|
+
result=_from_dict(d, 'result', ResultData),
|
|
4566
|
+
statement_id=d.get('statement_id', None),
|
|
4567
|
+
status=_from_dict(d, 'status', StatementStatus))
|
|
4568
|
+
|
|
4569
|
+
|
|
3299
4570
|
class StatementState(Enum):
|
|
3300
4571
|
"""Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running -
|
|
3301
4572
|
`SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution
|
|
@@ -3501,13 +4772,29 @@ class TerminationReasonType(Enum):
|
|
|
3501
4772
|
SUCCESS = 'SUCCESS'
|
|
3502
4773
|
|
|
3503
4774
|
|
|
4775
|
+
@dataclass
|
|
4776
|
+
class TextValue:
|
|
4777
|
+
value: Optional[str] = None
|
|
4778
|
+
|
|
4779
|
+
def as_dict(self) -> dict:
|
|
4780
|
+
"""Serializes the TextValue into a dictionary suitable for use as a JSON request body."""
|
|
4781
|
+
body = {}
|
|
4782
|
+
if self.value is not None: body['value'] = self.value
|
|
4783
|
+
return body
|
|
4784
|
+
|
|
4785
|
+
@classmethod
|
|
4786
|
+
def from_dict(cls, d: Dict[str, any]) -> TextValue:
|
|
4787
|
+
"""Deserializes the TextValue from a dictionary."""
|
|
4788
|
+
return cls(value=d.get('value', None))
|
|
4789
|
+
|
|
4790
|
+
|
|
3504
4791
|
@dataclass
|
|
3505
4792
|
class TimeRange:
|
|
3506
4793
|
end_time_ms: Optional[int] = None
|
|
3507
|
-
"""
|
|
4794
|
+
"""The end time in milliseconds."""
|
|
3508
4795
|
|
|
3509
4796
|
start_time_ms: Optional[int] = None
|
|
3510
|
-
"""
|
|
4797
|
+
"""The start time in milliseconds."""
|
|
3511
4798
|
|
|
3512
4799
|
def as_dict(self) -> dict:
|
|
3513
4800
|
"""Serializes the TimeRange into a dictionary suitable for use as a JSON request body."""
|
|
@@ -3539,6 +4826,179 @@ class TransferOwnershipObjectId:
|
|
|
3539
4826
|
return cls(new_owner=d.get('new_owner', None))
|
|
3540
4827
|
|
|
3541
4828
|
|
|
4829
|
+
@dataclass
|
|
4830
|
+
class UpdateAlertRequest:
|
|
4831
|
+
update_mask: str
|
|
4832
|
+
"""Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
|
|
4833
|
+
the setting payload will be updated. The field mask needs to be supplied as single string. To
|
|
4834
|
+
specify multiple fields in the field mask, use comma as the separator (no space)."""
|
|
4835
|
+
|
|
4836
|
+
alert: Optional[UpdateAlertRequestAlert] = None
|
|
4837
|
+
|
|
4838
|
+
id: Optional[str] = None
|
|
4839
|
+
|
|
4840
|
+
def as_dict(self) -> dict:
|
|
4841
|
+
"""Serializes the UpdateAlertRequest into a dictionary suitable for use as a JSON request body."""
|
|
4842
|
+
body = {}
|
|
4843
|
+
if self.alert: body['alert'] = self.alert.as_dict()
|
|
4844
|
+
if self.id is not None: body['id'] = self.id
|
|
4845
|
+
if self.update_mask is not None: body['update_mask'] = self.update_mask
|
|
4846
|
+
return body
|
|
4847
|
+
|
|
4848
|
+
@classmethod
|
|
4849
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequest:
|
|
4850
|
+
"""Deserializes the UpdateAlertRequest from a dictionary."""
|
|
4851
|
+
return cls(alert=_from_dict(d, 'alert', UpdateAlertRequestAlert),
|
|
4852
|
+
id=d.get('id', None),
|
|
4853
|
+
update_mask=d.get('update_mask', None))
|
|
4854
|
+
|
|
4855
|
+
|
|
4856
|
+
@dataclass
|
|
4857
|
+
class UpdateAlertRequestAlert:
|
|
4858
|
+
condition: Optional[AlertCondition] = None
|
|
4859
|
+
"""Trigger conditions of the alert."""
|
|
4860
|
+
|
|
4861
|
+
custom_body: Optional[str] = None
|
|
4862
|
+
"""Custom body of alert notification, if it exists. See [here] for custom templating instructions.
|
|
4863
|
+
|
|
4864
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
4865
|
+
|
|
4866
|
+
custom_subject: Optional[str] = None
|
|
4867
|
+
"""Custom subject of alert notification, if it exists. This can include email subject entries and
|
|
4868
|
+
Slack notification headers, for example. See [here] for custom templating instructions.
|
|
4869
|
+
|
|
4870
|
+
[here]: https://docs.databricks.com/sql/user/alerts/index.html"""
|
|
4871
|
+
|
|
4872
|
+
display_name: Optional[str] = None
|
|
4873
|
+
"""The display name of the alert."""
|
|
4874
|
+
|
|
4875
|
+
owner_user_name: Optional[str] = None
|
|
4876
|
+
"""The owner's username. This field is set to "Unavailable" if the user has been deleted."""
|
|
4877
|
+
|
|
4878
|
+
query_id: Optional[str] = None
|
|
4879
|
+
"""UUID of the query attached to the alert."""
|
|
4880
|
+
|
|
4881
|
+
seconds_to_retrigger: Optional[int] = None
|
|
4882
|
+
"""Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it
|
|
4883
|
+
can be triggered again. If 0 or not specified, the alert will not be triggered again."""
|
|
4884
|
+
|
|
4885
|
+
def as_dict(self) -> dict:
|
|
4886
|
+
"""Serializes the UpdateAlertRequestAlert into a dictionary suitable for use as a JSON request body."""
|
|
4887
|
+
body = {}
|
|
4888
|
+
if self.condition: body['condition'] = self.condition.as_dict()
|
|
4889
|
+
if self.custom_body is not None: body['custom_body'] = self.custom_body
|
|
4890
|
+
if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
|
|
4891
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
4892
|
+
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
4893
|
+
if self.query_id is not None: body['query_id'] = self.query_id
|
|
4894
|
+
if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
|
|
4895
|
+
return body
|
|
4896
|
+
|
|
4897
|
+
@classmethod
|
|
4898
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequestAlert:
|
|
4899
|
+
"""Deserializes the UpdateAlertRequestAlert from a dictionary."""
|
|
4900
|
+
return cls(condition=_from_dict(d, 'condition', AlertCondition),
|
|
4901
|
+
custom_body=d.get('custom_body', None),
|
|
4902
|
+
custom_subject=d.get('custom_subject', None),
|
|
4903
|
+
display_name=d.get('display_name', None),
|
|
4904
|
+
owner_user_name=d.get('owner_user_name', None),
|
|
4905
|
+
query_id=d.get('query_id', None),
|
|
4906
|
+
seconds_to_retrigger=d.get('seconds_to_retrigger', None))
|
|
4907
|
+
|
|
4908
|
+
|
|
4909
|
+
@dataclass
|
|
4910
|
+
class UpdateQueryRequest:
|
|
4911
|
+
update_mask: str
|
|
4912
|
+
"""Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
|
|
4913
|
+
the setting payload will be updated. The field mask needs to be supplied as single string. To
|
|
4914
|
+
specify multiple fields in the field mask, use comma as the separator (no space)."""
|
|
4915
|
+
|
|
4916
|
+
id: Optional[str] = None
|
|
4917
|
+
|
|
4918
|
+
query: Optional[UpdateQueryRequestQuery] = None
|
|
4919
|
+
|
|
4920
|
+
def as_dict(self) -> dict:
|
|
4921
|
+
"""Serializes the UpdateQueryRequest into a dictionary suitable for use as a JSON request body."""
|
|
4922
|
+
body = {}
|
|
4923
|
+
if self.id is not None: body['id'] = self.id
|
|
4924
|
+
if self.query: body['query'] = self.query.as_dict()
|
|
4925
|
+
if self.update_mask is not None: body['update_mask'] = self.update_mask
|
|
4926
|
+
return body
|
|
4927
|
+
|
|
4928
|
+
@classmethod
|
|
4929
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequest:
|
|
4930
|
+
"""Deserializes the UpdateQueryRequest from a dictionary."""
|
|
4931
|
+
return cls(id=d.get('id', None),
|
|
4932
|
+
query=_from_dict(d, 'query', UpdateQueryRequestQuery),
|
|
4933
|
+
update_mask=d.get('update_mask', None))
|
|
4934
|
+
|
|
4935
|
+
|
|
4936
|
+
@dataclass
|
|
4937
|
+
class UpdateQueryRequestQuery:
|
|
4938
|
+
apply_auto_limit: Optional[bool] = None
|
|
4939
|
+
"""Whether to apply a 1000 row limit to the query result."""
|
|
4940
|
+
|
|
4941
|
+
catalog: Optional[str] = None
|
|
4942
|
+
"""Name of the catalog where this query will be executed."""
|
|
4943
|
+
|
|
4944
|
+
description: Optional[str] = None
|
|
4945
|
+
"""General description that conveys additional information about this query such as usage notes."""
|
|
4946
|
+
|
|
4947
|
+
display_name: Optional[str] = None
|
|
4948
|
+
"""Display name of the query that appears in list views, widget headings, and on the query page."""
|
|
4949
|
+
|
|
4950
|
+
owner_user_name: Optional[str] = None
|
|
4951
|
+
"""Username of the user that owns the query."""
|
|
4952
|
+
|
|
4953
|
+
parameters: Optional[List[QueryParameter]] = None
|
|
4954
|
+
"""List of query parameter definitions."""
|
|
4955
|
+
|
|
4956
|
+
query_text: Optional[str] = None
|
|
4957
|
+
"""Text of the query to be run."""
|
|
4958
|
+
|
|
4959
|
+
run_as_mode: Optional[RunAsMode] = None
|
|
4960
|
+
"""Sets the "Run as" role for the object."""
|
|
4961
|
+
|
|
4962
|
+
schema: Optional[str] = None
|
|
4963
|
+
"""Name of the schema where this query will be executed."""
|
|
4964
|
+
|
|
4965
|
+
tags: Optional[List[str]] = None
|
|
4966
|
+
|
|
4967
|
+
warehouse_id: Optional[str] = None
|
|
4968
|
+
"""ID of the SQL warehouse attached to the query."""
|
|
4969
|
+
|
|
4970
|
+
def as_dict(self) -> dict:
|
|
4971
|
+
"""Serializes the UpdateQueryRequestQuery into a dictionary suitable for use as a JSON request body."""
|
|
4972
|
+
body = {}
|
|
4973
|
+
if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
|
|
4974
|
+
if self.catalog is not None: body['catalog'] = self.catalog
|
|
4975
|
+
if self.description is not None: body['description'] = self.description
|
|
4976
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
4977
|
+
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
4978
|
+
if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
|
|
4979
|
+
if self.query_text is not None: body['query_text'] = self.query_text
|
|
4980
|
+
if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
|
|
4981
|
+
if self.schema is not None: body['schema'] = self.schema
|
|
4982
|
+
if self.tags: body['tags'] = [v for v in self.tags]
|
|
4983
|
+
if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
|
|
4984
|
+
return body
|
|
4985
|
+
|
|
4986
|
+
@classmethod
|
|
4987
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequestQuery:
|
|
4988
|
+
"""Deserializes the UpdateQueryRequestQuery from a dictionary."""
|
|
4989
|
+
return cls(apply_auto_limit=d.get('apply_auto_limit', None),
|
|
4990
|
+
catalog=d.get('catalog', None),
|
|
4991
|
+
description=d.get('description', None),
|
|
4992
|
+
display_name=d.get('display_name', None),
|
|
4993
|
+
owner_user_name=d.get('owner_user_name', None),
|
|
4994
|
+
parameters=_repeated_dict(d, 'parameters', QueryParameter),
|
|
4995
|
+
query_text=d.get('query_text', None),
|
|
4996
|
+
run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
|
|
4997
|
+
schema=d.get('schema', None),
|
|
4998
|
+
tags=d.get('tags', None),
|
|
4999
|
+
warehouse_id=d.get('warehouse_id', None))
|
|
5000
|
+
|
|
5001
|
+
|
|
3542
5002
|
@dataclass
|
|
3543
5003
|
class UpdateResponse:
|
|
3544
5004
|
|
|
@@ -3553,6 +5013,67 @@ class UpdateResponse:
|
|
|
3553
5013
|
return cls()
|
|
3554
5014
|
|
|
3555
5015
|
|
|
5016
|
+
@dataclass
|
|
5017
|
+
class UpdateVisualizationRequest:
|
|
5018
|
+
update_mask: str
|
|
5019
|
+
"""Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
|
|
5020
|
+
the setting payload will be updated. The field mask needs to be supplied as single string. To
|
|
5021
|
+
specify multiple fields in the field mask, use comma as the separator (no space)."""
|
|
5022
|
+
|
|
5023
|
+
id: Optional[str] = None
|
|
5024
|
+
|
|
5025
|
+
visualization: Optional[UpdateVisualizationRequestVisualization] = None
|
|
5026
|
+
|
|
5027
|
+
def as_dict(self) -> dict:
|
|
5028
|
+
"""Serializes the UpdateVisualizationRequest into a dictionary suitable for use as a JSON request body."""
|
|
5029
|
+
body = {}
|
|
5030
|
+
if self.id is not None: body['id'] = self.id
|
|
5031
|
+
if self.update_mask is not None: body['update_mask'] = self.update_mask
|
|
5032
|
+
if self.visualization: body['visualization'] = self.visualization.as_dict()
|
|
5033
|
+
return body
|
|
5034
|
+
|
|
5035
|
+
@classmethod
|
|
5036
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequest:
|
|
5037
|
+
"""Deserializes the UpdateVisualizationRequest from a dictionary."""
|
|
5038
|
+
return cls(id=d.get('id', None),
|
|
5039
|
+
update_mask=d.get('update_mask', None),
|
|
5040
|
+
visualization=_from_dict(d, 'visualization', UpdateVisualizationRequestVisualization))
|
|
5041
|
+
|
|
5042
|
+
|
|
5043
|
+
@dataclass
|
|
5044
|
+
class UpdateVisualizationRequestVisualization:
|
|
5045
|
+
display_name: Optional[str] = None
|
|
5046
|
+
"""The display name of the visualization."""
|
|
5047
|
+
|
|
5048
|
+
serialized_options: Optional[str] = None
|
|
5049
|
+
"""The visualization options varies widely from one visualization type to the next and is
|
|
5050
|
+
unsupported. Databricks does not recommend modifying visualization options directly."""
|
|
5051
|
+
|
|
5052
|
+
serialized_query_plan: Optional[str] = None
|
|
5053
|
+
"""The visualization query plan varies widely from one visualization type to the next and is
|
|
5054
|
+
unsupported. Databricks does not recommend modifying the visualization query plan directly."""
|
|
5055
|
+
|
|
5056
|
+
type: Optional[str] = None
|
|
5057
|
+
"""The type of visualization: counter, table, funnel, and so on."""
|
|
5058
|
+
|
|
5059
|
+
def as_dict(self) -> dict:
|
|
5060
|
+
"""Serializes the UpdateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body."""
|
|
5061
|
+
body = {}
|
|
5062
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
5063
|
+
if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
|
|
5064
|
+
if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
|
|
5065
|
+
if self.type is not None: body['type'] = self.type
|
|
5066
|
+
return body
|
|
5067
|
+
|
|
5068
|
+
@classmethod
|
|
5069
|
+
def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequestVisualization:
|
|
5070
|
+
"""Deserializes the UpdateVisualizationRequestVisualization from a dictionary."""
|
|
5071
|
+
return cls(display_name=d.get('display_name', None),
|
|
5072
|
+
serialized_options=d.get('serialized_options', None),
|
|
5073
|
+
serialized_query_plan=d.get('serialized_query_plan', None),
|
|
5074
|
+
type=d.get('type', None))
|
|
5075
|
+
|
|
5076
|
+
|
|
3556
5077
|
@dataclass
|
|
3557
5078
|
class User:
|
|
3558
5079
|
email: Optional[str] = None
|
|
@@ -3577,57 +5098,56 @@ class User:
|
|
|
3577
5098
|
|
|
3578
5099
|
@dataclass
|
|
3579
5100
|
class Visualization:
|
|
3580
|
-
|
|
3581
|
-
|
|
3582
|
-
create a new one with a POST request to the same endpoint. Databricks does not recommend
|
|
3583
|
-
constructing ad-hoc visualizations entirely in JSON."""
|
|
3584
|
-
|
|
3585
|
-
created_at: Optional[str] = None
|
|
5101
|
+
create_time: Optional[str] = None
|
|
5102
|
+
"""The timestamp indicating when the visualization was created."""
|
|
3586
5103
|
|
|
3587
|
-
|
|
3588
|
-
"""
|
|
5104
|
+
display_name: Optional[str] = None
|
|
5105
|
+
"""The display name of the visualization."""
|
|
3589
5106
|
|
|
3590
5107
|
id: Optional[str] = None
|
|
3591
|
-
"""
|
|
5108
|
+
"""UUID identifying the visualization."""
|
|
3592
5109
|
|
|
3593
|
-
|
|
3594
|
-
"""
|
|
5110
|
+
query_id: Optional[str] = None
|
|
5111
|
+
"""UUID of the query that the visualization is attached to."""
|
|
3595
5112
|
|
|
3596
|
-
|
|
3597
|
-
"""The options
|
|
3598
|
-
Databricks does not recommend modifying visualization
|
|
5113
|
+
serialized_options: Optional[str] = None
|
|
5114
|
+
"""The visualization options varies widely from one visualization type to the next and is
|
|
5115
|
+
unsupported. Databricks does not recommend modifying visualization options directly."""
|
|
3599
5116
|
|
|
3600
|
-
|
|
5117
|
+
serialized_query_plan: Optional[str] = None
|
|
5118
|
+
"""The visualization query plan varies widely from one visualization type to the next and is
|
|
5119
|
+
unsupported. Databricks does not recommend modifying the visualization query plan directly."""
|
|
3601
5120
|
|
|
3602
5121
|
type: Optional[str] = None
|
|
3603
|
-
"""The type of visualization:
|
|
5122
|
+
"""The type of visualization: counter, table, funnel, and so on."""
|
|
3604
5123
|
|
|
3605
|
-
|
|
5124
|
+
update_time: Optional[str] = None
|
|
5125
|
+
"""The timestamp indicating when the visualization was updated."""
|
|
3606
5126
|
|
|
3607
5127
|
def as_dict(self) -> dict:
|
|
3608
5128
|
"""Serializes the Visualization into a dictionary suitable for use as a JSON request body."""
|
|
3609
5129
|
body = {}
|
|
3610
|
-
if self.
|
|
3611
|
-
if self.
|
|
5130
|
+
if self.create_time is not None: body['create_time'] = self.create_time
|
|
5131
|
+
if self.display_name is not None: body['display_name'] = self.display_name
|
|
3612
5132
|
if self.id is not None: body['id'] = self.id
|
|
3613
|
-
if self.
|
|
3614
|
-
if self.
|
|
3615
|
-
if self.
|
|
5133
|
+
if self.query_id is not None: body['query_id'] = self.query_id
|
|
5134
|
+
if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
|
|
5135
|
+
if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
|
|
3616
5136
|
if self.type is not None: body['type'] = self.type
|
|
3617
|
-
if self.
|
|
5137
|
+
if self.update_time is not None: body['update_time'] = self.update_time
|
|
3618
5138
|
return body
|
|
3619
5139
|
|
|
3620
5140
|
@classmethod
|
|
3621
5141
|
def from_dict(cls, d: Dict[str, any]) -> Visualization:
|
|
3622
|
-
"""Deserializes the Visualization from a dictionary."""
|
|
3623
|
-
return cls(
|
|
3624
|
-
|
|
5142
|
+
"""Deserializes the Visualization from a dictionary."""
|
|
5143
|
+
return cls(create_time=d.get('create_time', None),
|
|
5144
|
+
display_name=d.get('display_name', None),
|
|
3625
5145
|
id=d.get('id', None),
|
|
3626
|
-
|
|
3627
|
-
|
|
3628
|
-
|
|
5146
|
+
query_id=d.get('query_id', None),
|
|
5147
|
+
serialized_options=d.get('serialized_options', None),
|
|
5148
|
+
serialized_query_plan=d.get('serialized_query_plan', None),
|
|
3629
5149
|
type=d.get('type', None),
|
|
3630
|
-
|
|
5150
|
+
update_time=d.get('update_time', None))
|
|
3631
5151
|
|
|
3632
5152
|
|
|
3633
5153
|
@dataclass
|
|
@@ -3730,6 +5250,7 @@ class WarehousePermissionLevel(Enum):
|
|
|
3730
5250
|
"""Permission level"""
|
|
3731
5251
|
|
|
3732
5252
|
CAN_MANAGE = 'CAN_MANAGE'
|
|
5253
|
+
CAN_MONITOR = 'CAN_MONITOR'
|
|
3733
5254
|
CAN_USE = 'CAN_USE'
|
|
3734
5255
|
IS_OWNER = 'IS_OWNER'
|
|
3735
5256
|
|
|
@@ -3842,7 +5363,7 @@ class Widget:
|
|
|
3842
5363
|
|
|
3843
5364
|
options: Optional[WidgetOptions] = None
|
|
3844
5365
|
|
|
3845
|
-
visualization: Optional[
|
|
5366
|
+
visualization: Optional[LegacyVisualization] = None
|
|
3846
5367
|
"""The visualization description API changes frequently and is unsupported. You can duplicate a
|
|
3847
5368
|
visualization by copying description objects received _from the API_ and then using them to
|
|
3848
5369
|
create a new one with a POST request to the same endpoint. Databricks does not recommend
|
|
@@ -3865,7 +5386,7 @@ class Widget:
|
|
|
3865
5386
|
"""Deserializes the Widget from a dictionary."""
|
|
3866
5387
|
return cls(id=d.get('id', None),
|
|
3867
5388
|
options=_from_dict(d, 'options', WidgetOptions),
|
|
3868
|
-
visualization=_from_dict(d, 'visualization',
|
|
5389
|
+
visualization=_from_dict(d, 'visualization', LegacyVisualization),
|
|
3869
5390
|
width=d.get('width', None))
|
|
3870
5391
|
|
|
3871
5392
|
|
|
@@ -3959,14 +5480,123 @@ class WidgetPosition:
|
|
|
3959
5480
|
|
|
3960
5481
|
|
|
3961
5482
|
class AlertsAPI:
|
|
5483
|
+
"""The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that
|
|
5484
|
+
periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
|
|
5485
|
+
notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
|
|
5486
|
+
the Jobs API, e.g. :method:jobs/create."""
|
|
5487
|
+
|
|
5488
|
+
def __init__(self, api_client):
|
|
5489
|
+
self._api = api_client
|
|
5490
|
+
|
|
5491
|
+
def create(self, *, alert: Optional[CreateAlertRequestAlert] = None) -> Alert:
|
|
5492
|
+
"""Create an alert.
|
|
5493
|
+
|
|
5494
|
+
Creates an alert.
|
|
5495
|
+
|
|
5496
|
+
:param alert: :class:`CreateAlertRequestAlert` (optional)
|
|
5497
|
+
|
|
5498
|
+
:returns: :class:`Alert`
|
|
5499
|
+
"""
|
|
5500
|
+
body = {}
|
|
5501
|
+
if alert is not None: body['alert'] = alert.as_dict()
|
|
5502
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
5503
|
+
|
|
5504
|
+
res = self._api.do('POST', '/api/2.0/sql/alerts', body=body, headers=headers)
|
|
5505
|
+
return Alert.from_dict(res)
|
|
5506
|
+
|
|
5507
|
+
def delete(self, id: str):
|
|
5508
|
+
"""Delete an alert.
|
|
5509
|
+
|
|
5510
|
+
Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and
|
|
5511
|
+
can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently
|
|
5512
|
+
deleted after 30 days.
|
|
5513
|
+
|
|
5514
|
+
:param id: str
|
|
5515
|
+
|
|
5516
|
+
|
|
5517
|
+
"""
|
|
5518
|
+
|
|
5519
|
+
headers = {'Accept': 'application/json', }
|
|
5520
|
+
|
|
5521
|
+
self._api.do('DELETE', f'/api/2.0/sql/alerts/{id}', headers=headers)
|
|
5522
|
+
|
|
5523
|
+
def get(self, id: str) -> Alert:
|
|
5524
|
+
"""Get an alert.
|
|
5525
|
+
|
|
5526
|
+
Gets an alert.
|
|
5527
|
+
|
|
5528
|
+
:param id: str
|
|
5529
|
+
|
|
5530
|
+
:returns: :class:`Alert`
|
|
5531
|
+
"""
|
|
5532
|
+
|
|
5533
|
+
headers = {'Accept': 'application/json', }
|
|
5534
|
+
|
|
5535
|
+
res = self._api.do('GET', f'/api/2.0/sql/alerts/{id}', headers=headers)
|
|
5536
|
+
return Alert.from_dict(res)
|
|
5537
|
+
|
|
5538
|
+
def list(self,
|
|
5539
|
+
*,
|
|
5540
|
+
page_size: Optional[int] = None,
|
|
5541
|
+
page_token: Optional[str] = None) -> Iterator[ListAlertsResponseAlert]:
|
|
5542
|
+
"""List alerts.
|
|
5543
|
+
|
|
5544
|
+
Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API
|
|
5545
|
+
concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
|
|
5546
|
+
|
|
5547
|
+
:param page_size: int (optional)
|
|
5548
|
+
:param page_token: str (optional)
|
|
5549
|
+
|
|
5550
|
+
:returns: Iterator over :class:`ListAlertsResponseAlert`
|
|
5551
|
+
"""
|
|
5552
|
+
|
|
5553
|
+
query = {}
|
|
5554
|
+
if page_size is not None: query['page_size'] = page_size
|
|
5555
|
+
if page_token is not None: query['page_token'] = page_token
|
|
5556
|
+
headers = {'Accept': 'application/json', }
|
|
5557
|
+
|
|
5558
|
+
while True:
|
|
5559
|
+
json = self._api.do('GET', '/api/2.0/sql/alerts', query=query, headers=headers)
|
|
5560
|
+
if 'results' in json:
|
|
5561
|
+
for v in json['results']:
|
|
5562
|
+
yield ListAlertsResponseAlert.from_dict(v)
|
|
5563
|
+
if 'next_page_token' not in json or not json['next_page_token']:
|
|
5564
|
+
return
|
|
5565
|
+
query['page_token'] = json['next_page_token']
|
|
5566
|
+
|
|
5567
|
+
def update(self, id: str, update_mask: str, *, alert: Optional[UpdateAlertRequestAlert] = None) -> Alert:
|
|
5568
|
+
"""Update an alert.
|
|
5569
|
+
|
|
5570
|
+
Updates an alert.
|
|
5571
|
+
|
|
5572
|
+
:param id: str
|
|
5573
|
+
:param update_mask: str
|
|
5574
|
+
Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
|
|
5575
|
+
setting payload will be updated. The field mask needs to be supplied as single string. To specify
|
|
5576
|
+
multiple fields in the field mask, use comma as the separator (no space).
|
|
5577
|
+
:param alert: :class:`UpdateAlertRequestAlert` (optional)
|
|
5578
|
+
|
|
5579
|
+
:returns: :class:`Alert`
|
|
5580
|
+
"""
|
|
5581
|
+
body = {}
|
|
5582
|
+
if alert is not None: body['alert'] = alert.as_dict()
|
|
5583
|
+
if update_mask is not None: body['update_mask'] = update_mask
|
|
5584
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
5585
|
+
|
|
5586
|
+
res = self._api.do('PATCH', f'/api/2.0/sql/alerts/{id}', body=body, headers=headers)
|
|
5587
|
+
return Alert.from_dict(res)
|
|
5588
|
+
|
|
5589
|
+
|
|
5590
|
+
class AlertsLegacyAPI:
|
|
3962
5591
|
"""The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that
|
|
3963
5592
|
periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
|
|
3964
5593
|
notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
|
|
3965
5594
|
the Jobs API, e.g. :method:jobs/create.
|
|
3966
5595
|
|
|
3967
|
-
**Note**: A new version of the Databricks SQL API
|
|
5596
|
+
**Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
|
|
5597
|
+
more]
|
|
3968
5598
|
|
|
3969
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
5599
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
|
|
3970
5600
|
|
|
3971
5601
|
def __init__(self, api_client):
|
|
3972
5602
|
self._api = api_client
|
|
@@ -3977,15 +5607,16 @@ class AlertsAPI:
|
|
|
3977
5607
|
query_id: str,
|
|
3978
5608
|
*,
|
|
3979
5609
|
parent: Optional[str] = None,
|
|
3980
|
-
rearm: Optional[int] = None) ->
|
|
5610
|
+
rearm: Optional[int] = None) -> LegacyAlert:
|
|
3981
5611
|
"""Create an alert.
|
|
3982
5612
|
|
|
3983
5613
|
Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a
|
|
3984
5614
|
condition of its result, and notifies users or notification destinations if the condition was met.
|
|
3985
5615
|
|
|
3986
|
-
**Note**: A new version of the Databricks SQL API
|
|
5616
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create
|
|
5617
|
+
instead. [Learn more]
|
|
3987
5618
|
|
|
3988
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
5619
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
3989
5620
|
|
|
3990
5621
|
:param name: str
|
|
3991
5622
|
Name of the alert.
|
|
@@ -3999,7 +5630,7 @@ class AlertsAPI:
|
|
|
3999
5630
|
Number of seconds after being triggered before the alert rearms itself and can be triggered again.
|
|
4000
5631
|
If `null`, alert will never be triggered again.
|
|
4001
5632
|
|
|
4002
|
-
:returns: :class:`
|
|
5633
|
+
:returns: :class:`LegacyAlert`
|
|
4003
5634
|
"""
|
|
4004
5635
|
body = {}
|
|
4005
5636
|
if name is not None: body['name'] = name
|
|
@@ -4010,7 +5641,7 @@ class AlertsAPI:
|
|
|
4010
5641
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
4011
5642
|
|
|
4012
5643
|
res = self._api.do('POST', '/api/2.0/preview/sql/alerts', body=body, headers=headers)
|
|
4013
|
-
return
|
|
5644
|
+
return LegacyAlert.from_dict(res)
|
|
4014
5645
|
|
|
4015
5646
|
def delete(self, alert_id: str):
|
|
4016
5647
|
"""Delete an alert.
|
|
@@ -4018,9 +5649,10 @@ class AlertsAPI:
|
|
|
4018
5649
|
Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike
|
|
4019
5650
|
queries and dashboards, alerts cannot be moved to the trash.
|
|
4020
5651
|
|
|
4021
|
-
**Note**: A new version of the Databricks SQL API
|
|
5652
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete
|
|
5653
|
+
instead. [Learn more]
|
|
4022
5654
|
|
|
4023
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
5655
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4024
5656
|
|
|
4025
5657
|
:param alert_id: str
|
|
4026
5658
|
|
|
@@ -4031,41 +5663,43 @@ class AlertsAPI:
|
|
|
4031
5663
|
|
|
4032
5664
|
self._api.do('DELETE', f'/api/2.0/preview/sql/alerts/{alert_id}', headers=headers)
|
|
4033
5665
|
|
|
4034
|
-
def get(self, alert_id: str) ->
|
|
5666
|
+
def get(self, alert_id: str) -> LegacyAlert:
|
|
4035
5667
|
"""Get an alert.
|
|
4036
5668
|
|
|
4037
5669
|
Gets an alert.
|
|
4038
5670
|
|
|
4039
|
-
**Note**: A new version of the Databricks SQL API
|
|
5671
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get
|
|
5672
|
+
instead. [Learn more]
|
|
4040
5673
|
|
|
4041
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
5674
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4042
5675
|
|
|
4043
5676
|
:param alert_id: str
|
|
4044
5677
|
|
|
4045
|
-
:returns: :class:`
|
|
5678
|
+
:returns: :class:`LegacyAlert`
|
|
4046
5679
|
"""
|
|
4047
5680
|
|
|
4048
5681
|
headers = {'Accept': 'application/json', }
|
|
4049
5682
|
|
|
4050
5683
|
res = self._api.do('GET', f'/api/2.0/preview/sql/alerts/{alert_id}', headers=headers)
|
|
4051
|
-
return
|
|
5684
|
+
return LegacyAlert.from_dict(res)
|
|
4052
5685
|
|
|
4053
|
-
def list(self) -> Iterator[
|
|
5686
|
+
def list(self) -> Iterator[LegacyAlert]:
|
|
4054
5687
|
"""Get alerts.
|
|
4055
5688
|
|
|
4056
5689
|
Gets a list of alerts.
|
|
4057
5690
|
|
|
4058
|
-
**Note**: A new version of the Databricks SQL API
|
|
5691
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list
|
|
5692
|
+
instead. [Learn more]
|
|
4059
5693
|
|
|
4060
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
5694
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4061
5695
|
|
|
4062
|
-
:returns: Iterator over :class:`
|
|
5696
|
+
:returns: Iterator over :class:`LegacyAlert`
|
|
4063
5697
|
"""
|
|
4064
5698
|
|
|
4065
5699
|
headers = {'Accept': 'application/json', }
|
|
4066
5700
|
|
|
4067
5701
|
res = self._api.do('GET', '/api/2.0/preview/sql/alerts', headers=headers)
|
|
4068
|
-
return [
|
|
5702
|
+
return [LegacyAlert.from_dict(v) for v in res]
|
|
4069
5703
|
|
|
4070
5704
|
def update(self,
|
|
4071
5705
|
alert_id: str,
|
|
@@ -4078,9 +5712,10 @@ class AlertsAPI:
|
|
|
4078
5712
|
|
|
4079
5713
|
Updates an alert.
|
|
4080
5714
|
|
|
4081
|
-
**Note**: A new version of the Databricks SQL API
|
|
5715
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update
|
|
5716
|
+
instead. [Learn more]
|
|
4082
5717
|
|
|
4083
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
5718
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4084
5719
|
|
|
4085
5720
|
:param alert_id: str
|
|
4086
5721
|
:param name: str
|
|
@@ -4380,9 +6015,9 @@ class DataSourcesAPI:
|
|
|
4380
6015
|
advise you to use any text editor, REST client, or `grep` to search the response from this API for the
|
|
4381
6016
|
name of your SQL warehouse as it appears in Databricks SQL.
|
|
4382
6017
|
|
|
4383
|
-
**Note**: A new version of the Databricks SQL API
|
|
6018
|
+
**Note**: A new version of the Databricks SQL API is now available. [Learn more]
|
|
4384
6019
|
|
|
4385
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
6020
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
|
|
4386
6021
|
|
|
4387
6022
|
def __init__(self, api_client):
|
|
4388
6023
|
self._api = api_client
|
|
@@ -4394,9 +6029,10 @@ class DataSourcesAPI:
|
|
|
4394
6029
|
API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new
|
|
4395
6030
|
queries against it.
|
|
4396
6031
|
|
|
4397
|
-
**Note**: A new version of the Databricks SQL API
|
|
6032
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list
|
|
6033
|
+
instead. [Learn more]
|
|
4398
6034
|
|
|
4399
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
6035
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4400
6036
|
|
|
4401
6037
|
:returns: Iterator over :class:`DataSource`
|
|
4402
6038
|
"""
|
|
@@ -4420,9 +6056,9 @@ class DbsqlPermissionsAPI:
|
|
|
4420
6056
|
|
|
4421
6057
|
- `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`)
|
|
4422
6058
|
|
|
4423
|
-
**Note**: A new version of the Databricks SQL API
|
|
6059
|
+
**Note**: A new version of the Databricks SQL API is now available. [Learn more]
|
|
4424
6060
|
|
|
4425
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
6061
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
|
|
4426
6062
|
|
|
4427
6063
|
def __init__(self, api_client):
|
|
4428
6064
|
self._api = api_client
|
|
@@ -4432,9 +6068,10 @@ class DbsqlPermissionsAPI:
|
|
|
4432
6068
|
|
|
4433
6069
|
Gets a JSON representation of the access control list (ACL) for a specified object.
|
|
4434
6070
|
|
|
4435
|
-
**Note**: A new version of the Databricks SQL API
|
|
6071
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use
|
|
6072
|
+
:method:workspace/getpermissions instead. [Learn more]
|
|
4436
6073
|
|
|
4437
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
6074
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4438
6075
|
|
|
4439
6076
|
:param object_type: :class:`ObjectTypePlural`
|
|
4440
6077
|
The type of object permissions to check.
|
|
@@ -4461,9 +6098,10 @@ class DbsqlPermissionsAPI:
|
|
|
4461
6098
|
Sets the access control list (ACL) for a specified object. This operation will complete rewrite the
|
|
4462
6099
|
ACL.
|
|
4463
6100
|
|
|
4464
|
-
**Note**: A new version of the Databricks SQL API
|
|
6101
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use
|
|
6102
|
+
:method:workspace/setpermissions instead. [Learn more]
|
|
4465
6103
|
|
|
4466
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
6104
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4467
6105
|
|
|
4468
6106
|
:param object_type: :class:`ObjectTypePlural`
|
|
4469
6107
|
The type of object permission to set.
|
|
@@ -4493,9 +6131,10 @@ class DbsqlPermissionsAPI:
|
|
|
4493
6131
|
|
|
4494
6132
|
Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key.
|
|
4495
6133
|
|
|
4496
|
-
**Note**: A new version of the Databricks SQL API
|
|
6134
|
+
**Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use
|
|
6135
|
+
:method:queries/update and :method:alerts/update respectively instead. [Learn more]
|
|
4497
6136
|
|
|
4498
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
6137
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4499
6138
|
|
|
4500
6139
|
:param object_type: :class:`OwnableObjectType`
|
|
4501
6140
|
The type of object on which to change ownership.
|
|
@@ -4518,13 +6157,154 @@ class DbsqlPermissionsAPI:
|
|
|
4518
6157
|
|
|
4519
6158
|
|
|
4520
6159
|
class QueriesAPI:
|
|
6160
|
+
"""The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL object that
|
|
6161
|
+
includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be
|
|
6162
|
+
scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create."""
|
|
6163
|
+
|
|
6164
|
+
def __init__(self, api_client):
|
|
6165
|
+
self._api = api_client
|
|
6166
|
+
|
|
6167
|
+
def create(self, *, query: Optional[CreateQueryRequestQuery] = None) -> Query:
|
|
6168
|
+
"""Create a query.
|
|
6169
|
+
|
|
6170
|
+
Creates a query.
|
|
6171
|
+
|
|
6172
|
+
:param query: :class:`CreateQueryRequestQuery` (optional)
|
|
6173
|
+
|
|
6174
|
+
:returns: :class:`Query`
|
|
6175
|
+
"""
|
|
6176
|
+
body = {}
|
|
6177
|
+
if query is not None: body['query'] = query.as_dict()
|
|
6178
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6179
|
+
|
|
6180
|
+
res = self._api.do('POST', '/api/2.0/sql/queries', body=body, headers=headers)
|
|
6181
|
+
return Query.from_dict(res)
|
|
6182
|
+
|
|
6183
|
+
def delete(self, id: str):
|
|
6184
|
+
"""Delete a query.
|
|
6185
|
+
|
|
6186
|
+
Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
|
|
6187
|
+
cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is
|
|
6188
|
+
permanently deleted after 30 days.
|
|
6189
|
+
|
|
6190
|
+
:param id: str
|
|
6191
|
+
|
|
6192
|
+
|
|
6193
|
+
"""
|
|
6194
|
+
|
|
6195
|
+
headers = {'Accept': 'application/json', }
|
|
6196
|
+
|
|
6197
|
+
self._api.do('DELETE', f'/api/2.0/sql/queries/{id}', headers=headers)
|
|
6198
|
+
|
|
6199
|
+
def get(self, id: str) -> Query:
|
|
6200
|
+
"""Get a query.
|
|
6201
|
+
|
|
6202
|
+
Gets a query.
|
|
6203
|
+
|
|
6204
|
+
:param id: str
|
|
6205
|
+
|
|
6206
|
+
:returns: :class:`Query`
|
|
6207
|
+
"""
|
|
6208
|
+
|
|
6209
|
+
headers = {'Accept': 'application/json', }
|
|
6210
|
+
|
|
6211
|
+
res = self._api.do('GET', f'/api/2.0/sql/queries/{id}', headers=headers)
|
|
6212
|
+
return Query.from_dict(res)
|
|
6213
|
+
|
|
6214
|
+
def list(self,
|
|
6215
|
+
*,
|
|
6216
|
+
page_size: Optional[int] = None,
|
|
6217
|
+
page_token: Optional[str] = None) -> Iterator[ListQueryObjectsResponseQuery]:
|
|
6218
|
+
"""List queries.
|
|
6219
|
+
|
|
6220
|
+
Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API
|
|
6221
|
+
concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
|
|
6222
|
+
|
|
6223
|
+
:param page_size: int (optional)
|
|
6224
|
+
:param page_token: str (optional)
|
|
6225
|
+
|
|
6226
|
+
:returns: Iterator over :class:`ListQueryObjectsResponseQuery`
|
|
6227
|
+
"""
|
|
6228
|
+
|
|
6229
|
+
query = {}
|
|
6230
|
+
if page_size is not None: query['page_size'] = page_size
|
|
6231
|
+
if page_token is not None: query['page_token'] = page_token
|
|
6232
|
+
headers = {'Accept': 'application/json', }
|
|
6233
|
+
|
|
6234
|
+
while True:
|
|
6235
|
+
json = self._api.do('GET', '/api/2.0/sql/queries', query=query, headers=headers)
|
|
6236
|
+
if 'results' in json:
|
|
6237
|
+
for v in json['results']:
|
|
6238
|
+
yield ListQueryObjectsResponseQuery.from_dict(v)
|
|
6239
|
+
if 'next_page_token' not in json or not json['next_page_token']:
|
|
6240
|
+
return
|
|
6241
|
+
query['page_token'] = json['next_page_token']
|
|
6242
|
+
|
|
6243
|
+
def list_visualizations(self,
|
|
6244
|
+
id: str,
|
|
6245
|
+
*,
|
|
6246
|
+
page_size: Optional[int] = None,
|
|
6247
|
+
page_token: Optional[str] = None) -> Iterator[Visualization]:
|
|
6248
|
+
"""List visualizations on a query.
|
|
6249
|
+
|
|
6250
|
+
Gets a list of visualizations on a query.
|
|
6251
|
+
|
|
6252
|
+
:param id: str
|
|
6253
|
+
:param page_size: int (optional)
|
|
6254
|
+
:param page_token: str (optional)
|
|
6255
|
+
|
|
6256
|
+
:returns: Iterator over :class:`Visualization`
|
|
6257
|
+
"""
|
|
6258
|
+
|
|
6259
|
+
query = {}
|
|
6260
|
+
if page_size is not None: query['page_size'] = page_size
|
|
6261
|
+
if page_token is not None: query['page_token'] = page_token
|
|
6262
|
+
headers = {'Accept': 'application/json', }
|
|
6263
|
+
|
|
6264
|
+
while True:
|
|
6265
|
+
json = self._api.do('GET',
|
|
6266
|
+
f'/api/2.0/sql/queries/{id}/visualizations',
|
|
6267
|
+
query=query,
|
|
6268
|
+
headers=headers)
|
|
6269
|
+
if 'results' in json:
|
|
6270
|
+
for v in json['results']:
|
|
6271
|
+
yield Visualization.from_dict(v)
|
|
6272
|
+
if 'next_page_token' not in json or not json['next_page_token']:
|
|
6273
|
+
return
|
|
6274
|
+
query['page_token'] = json['next_page_token']
|
|
6275
|
+
|
|
6276
|
+
def update(self, id: str, update_mask: str, *, query: Optional[UpdateQueryRequestQuery] = None) -> Query:
|
|
6277
|
+
"""Update a query.
|
|
6278
|
+
|
|
6279
|
+
Updates a query.
|
|
6280
|
+
|
|
6281
|
+
:param id: str
|
|
6282
|
+
:param update_mask: str
|
|
6283
|
+
Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
|
|
6284
|
+
setting payload will be updated. The field mask needs to be supplied as single string. To specify
|
|
6285
|
+
multiple fields in the field mask, use comma as the separator (no space).
|
|
6286
|
+
:param query: :class:`UpdateQueryRequestQuery` (optional)
|
|
6287
|
+
|
|
6288
|
+
:returns: :class:`Query`
|
|
6289
|
+
"""
|
|
6290
|
+
body = {}
|
|
6291
|
+
if query is not None: body['query'] = query.as_dict()
|
|
6292
|
+
if update_mask is not None: body['update_mask'] = update_mask
|
|
6293
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6294
|
+
|
|
6295
|
+
res = self._api.do('PATCH', f'/api/2.0/sql/queries/{id}', body=body, headers=headers)
|
|
6296
|
+
return Query.from_dict(res)
|
|
6297
|
+
|
|
6298
|
+
|
|
6299
|
+
class QueriesLegacyAPI:
|
|
4521
6300
|
"""These endpoints are used for CRUD operations on query definitions. Query definitions include the target
|
|
4522
6301
|
SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be
|
|
4523
6302
|
scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
|
|
4524
6303
|
|
|
4525
|
-
**Note**: A new version of the Databricks SQL API
|
|
6304
|
+
**Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
|
|
6305
|
+
more]
|
|
4526
6306
|
|
|
4527
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
6307
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
|
|
4528
6308
|
|
|
4529
6309
|
def __init__(self, api_client):
|
|
4530
6310
|
self._api = api_client
|
|
@@ -4538,7 +6318,7 @@ class QueriesAPI:
|
|
|
4538
6318
|
parent: Optional[str] = None,
|
|
4539
6319
|
query: Optional[str] = None,
|
|
4540
6320
|
run_as_role: Optional[RunAsRole] = None,
|
|
4541
|
-
tags: Optional[List[str]] = None) ->
|
|
6321
|
+
tags: Optional[List[str]] = None) -> LegacyQuery:
|
|
4542
6322
|
"""Create a new query definition.
|
|
4543
6323
|
|
|
4544
6324
|
Creates a new query definition. Queries created with this endpoint belong to the authenticated user
|
|
@@ -4550,9 +6330,10 @@ class QueriesAPI:
|
|
|
4550
6330
|
|
|
4551
6331
|
**Note**: You cannot add a visualization until you create the query.
|
|
4552
6332
|
|
|
4553
|
-
**Note**: A new version of the Databricks SQL API
|
|
6333
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create
|
|
6334
|
+
instead. [Learn more]
|
|
4554
6335
|
|
|
4555
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
6336
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4556
6337
|
|
|
4557
6338
|
:param data_source_id: str (optional)
|
|
4558
6339
|
Data source ID maps to the ID of the data source used by the resource and is distinct from the
|
|
@@ -4576,7 +6357,7 @@ class QueriesAPI:
|
|
|
4576
6357
|
viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
|
|
4577
6358
|
:param tags: List[str] (optional)
|
|
4578
6359
|
|
|
4579
|
-
:returns: :class:`
|
|
6360
|
+
:returns: :class:`LegacyQuery`
|
|
4580
6361
|
"""
|
|
4581
6362
|
body = {}
|
|
4582
6363
|
if data_source_id is not None: body['data_source_id'] = data_source_id
|
|
@@ -4590,7 +6371,7 @@ class QueriesAPI:
|
|
|
4590
6371
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
4591
6372
|
|
|
4592
6373
|
res = self._api.do('POST', '/api/2.0/preview/sql/queries', body=body, headers=headers)
|
|
4593
|
-
return
|
|
6374
|
+
return LegacyQuery.from_dict(res)
|
|
4594
6375
|
|
|
4595
6376
|
def delete(self, query_id: str):
|
|
4596
6377
|
"""Delete a query.
|
|
@@ -4598,9 +6379,10 @@ class QueriesAPI:
|
|
|
4598
6379
|
Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
|
|
4599
6380
|
they cannot be used for alerts. The trash is deleted after 30 days.
|
|
4600
6381
|
|
|
4601
|
-
**Note**: A new version of the Databricks SQL API
|
|
6382
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete
|
|
6383
|
+
instead. [Learn more]
|
|
4602
6384
|
|
|
4603
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
6385
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4604
6386
|
|
|
4605
6387
|
:param query_id: str
|
|
4606
6388
|
|
|
@@ -4611,32 +6393,33 @@ class QueriesAPI:
|
|
|
4611
6393
|
|
|
4612
6394
|
self._api.do('DELETE', f'/api/2.0/preview/sql/queries/{query_id}', headers=headers)
|
|
4613
6395
|
|
|
4614
|
-
def get(self, query_id: str) ->
|
|
6396
|
+
def get(self, query_id: str) -> LegacyQuery:
|
|
4615
6397
|
"""Get a query definition.
|
|
4616
6398
|
|
|
4617
6399
|
Retrieve a query object definition along with contextual permissions information about the currently
|
|
4618
6400
|
authenticated user.
|
|
4619
6401
|
|
|
4620
|
-
**Note**: A new version of the Databricks SQL API
|
|
6402
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get
|
|
6403
|
+
instead. [Learn more]
|
|
4621
6404
|
|
|
4622
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
6405
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4623
6406
|
|
|
4624
6407
|
:param query_id: str
|
|
4625
6408
|
|
|
4626
|
-
:returns: :class:`
|
|
6409
|
+
:returns: :class:`LegacyQuery`
|
|
4627
6410
|
"""
|
|
4628
6411
|
|
|
4629
6412
|
headers = {'Accept': 'application/json', }
|
|
4630
6413
|
|
|
4631
6414
|
res = self._api.do('GET', f'/api/2.0/preview/sql/queries/{query_id}', headers=headers)
|
|
4632
|
-
return
|
|
6415
|
+
return LegacyQuery.from_dict(res)
|
|
4633
6416
|
|
|
4634
6417
|
def list(self,
|
|
4635
6418
|
*,
|
|
4636
6419
|
order: Optional[str] = None,
|
|
4637
6420
|
page: Optional[int] = None,
|
|
4638
6421
|
page_size: Optional[int] = None,
|
|
4639
|
-
q: Optional[str] = None) -> Iterator[
|
|
6422
|
+
q: Optional[str] = None) -> Iterator[LegacyQuery]:
|
|
4640
6423
|
"""Get a list of queries.
|
|
4641
6424
|
|
|
4642
6425
|
Gets a list of queries. Optionally, this list can be filtered by a search term.
|
|
@@ -4644,9 +6427,10 @@ class QueriesAPI:
|
|
|
4644
6427
|
**Warning**: Calling this API concurrently 10 or more times could result in throttling, service
|
|
4645
6428
|
degradation, or a temporary ban.
|
|
4646
6429
|
|
|
4647
|
-
**Note**: A new version of the Databricks SQL API
|
|
6430
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list
|
|
6431
|
+
instead. [Learn more]
|
|
4648
6432
|
|
|
4649
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
6433
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4650
6434
|
|
|
4651
6435
|
:param order: str (optional)
|
|
4652
6436
|
Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order
|
|
@@ -4669,7 +6453,7 @@ class QueriesAPI:
|
|
|
4669
6453
|
:param q: str (optional)
|
|
4670
6454
|
Full text search term
|
|
4671
6455
|
|
|
4672
|
-
:returns: Iterator over :class:`
|
|
6456
|
+
:returns: Iterator over :class:`LegacyQuery`
|
|
4673
6457
|
"""
|
|
4674
6458
|
|
|
4675
6459
|
query = {}
|
|
@@ -4690,7 +6474,7 @@ class QueriesAPI:
|
|
|
4690
6474
|
if i in seen:
|
|
4691
6475
|
continue
|
|
4692
6476
|
seen.add(i)
|
|
4693
|
-
yield
|
|
6477
|
+
yield LegacyQuery.from_dict(v)
|
|
4694
6478
|
if 'results' not in json or not json['results']:
|
|
4695
6479
|
return
|
|
4696
6480
|
query['page'] += 1
|
|
@@ -4701,9 +6485,10 @@ class QueriesAPI:
|
|
|
4701
6485
|
Restore a query that has been moved to the trash. A restored query appears in list views and searches.
|
|
4702
6486
|
You can use restored queries for alerts.
|
|
4703
6487
|
|
|
4704
|
-
**Note**: A new version of the Databricks SQL API
|
|
6488
|
+
**Note**: A new version of the Databricks SQL API is now available. Please see the latest version.
|
|
6489
|
+
[Learn more]
|
|
4705
6490
|
|
|
4706
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
6491
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4707
6492
|
|
|
4708
6493
|
:param query_id: str
|
|
4709
6494
|
|
|
@@ -4723,16 +6508,17 @@ class QueriesAPI:
|
|
|
4723
6508
|
options: Optional[Any] = None,
|
|
4724
6509
|
query: Optional[str] = None,
|
|
4725
6510
|
run_as_role: Optional[RunAsRole] = None,
|
|
4726
|
-
tags: Optional[List[str]] = None) ->
|
|
6511
|
+
tags: Optional[List[str]] = None) -> LegacyQuery:
|
|
4727
6512
|
"""Change a query definition.
|
|
4728
6513
|
|
|
4729
6514
|
Modify this query definition.
|
|
4730
6515
|
|
|
4731
6516
|
**Note**: You cannot undo this operation.
|
|
4732
6517
|
|
|
4733
|
-
**Note**: A new version of the Databricks SQL API
|
|
6518
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update
|
|
6519
|
+
instead. [Learn more]
|
|
4734
6520
|
|
|
4735
|
-
[Learn more]: https://docs.databricks.com/en/
|
|
6521
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
4736
6522
|
|
|
4737
6523
|
:param query_id: str
|
|
4738
6524
|
:param data_source_id: str (optional)
|
|
@@ -4755,7 +6541,7 @@ class QueriesAPI:
|
|
|
4755
6541
|
viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
|
|
4756
6542
|
:param tags: List[str] (optional)
|
|
4757
6543
|
|
|
4758
|
-
:returns: :class:`
|
|
6544
|
+
:returns: :class:`LegacyQuery`
|
|
4759
6545
|
"""
|
|
4760
6546
|
body = {}
|
|
4761
6547
|
if data_source_id is not None: body['data_source_id'] = data_source_id
|
|
@@ -4768,11 +6554,12 @@ class QueriesAPI:
|
|
|
4768
6554
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
4769
6555
|
|
|
4770
6556
|
res = self._api.do('POST', f'/api/2.0/preview/sql/queries/{query_id}', body=body, headers=headers)
|
|
4771
|
-
return
|
|
6557
|
+
return LegacyQuery.from_dict(res)
|
|
4772
6558
|
|
|
4773
6559
|
|
|
4774
6560
|
class QueryHistoryAPI:
|
|
4775
|
-
"""
|
|
6561
|
+
"""A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless
|
|
6562
|
+
compute, and DLT."""
|
|
4776
6563
|
|
|
4777
6564
|
def __init__(self, api_client):
|
|
4778
6565
|
self._api = api_client
|
|
@@ -4780,49 +6567,112 @@ class QueryHistoryAPI:
|
|
|
4780
6567
|
def list(self,
|
|
4781
6568
|
*,
|
|
4782
6569
|
filter_by: Optional[QueryFilter] = None,
|
|
4783
|
-
include_metrics: Optional[bool] = None,
|
|
4784
6570
|
max_results: Optional[int] = None,
|
|
4785
|
-
page_token: Optional[str] = None) ->
|
|
6571
|
+
page_token: Optional[str] = None) -> ListQueriesResponse:
|
|
4786
6572
|
"""List Queries.
|
|
4787
6573
|
|
|
4788
|
-
List the history of queries through SQL warehouses.
|
|
6574
|
+
List the history of queries through SQL warehouses, serverless compute, and DLT.
|
|
4789
6575
|
|
|
4790
|
-
You can filter by user ID, warehouse ID, status, and time range.
|
|
6576
|
+
You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are
|
|
6577
|
+
returned first (up to max_results in request). The pagination token returned in response can be used
|
|
6578
|
+
to list subsequent query statuses.
|
|
4791
6579
|
|
|
4792
6580
|
:param filter_by: :class:`QueryFilter` (optional)
|
|
4793
6581
|
A filter to limit query history results. This field is optional.
|
|
4794
|
-
:param include_metrics: bool (optional)
|
|
4795
|
-
Whether to include metrics about query.
|
|
4796
6582
|
:param max_results: int (optional)
|
|
4797
|
-
Limit the number of results returned in one page.
|
|
6583
|
+
Limit the number of results returned in one page. Must be less than 1000 and the default is 100.
|
|
4798
6584
|
:param page_token: str (optional)
|
|
4799
6585
|
A token that can be used to get the next page of results. The token can contains characters that
|
|
4800
6586
|
need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by
|
|
4801
|
-
%2B.
|
|
6587
|
+
%2B. This field is optional.
|
|
4802
6588
|
|
|
4803
|
-
:returns:
|
|
6589
|
+
:returns: :class:`ListQueriesResponse`
|
|
4804
6590
|
"""
|
|
4805
6591
|
|
|
4806
6592
|
query = {}
|
|
4807
6593
|
if filter_by is not None: query['filter_by'] = filter_by.as_dict()
|
|
4808
|
-
if include_metrics is not None: query['include_metrics'] = include_metrics
|
|
4809
6594
|
if max_results is not None: query['max_results'] = max_results
|
|
4810
6595
|
if page_token is not None: query['page_token'] = page_token
|
|
4811
6596
|
headers = {'Accept': 'application/json', }
|
|
4812
6597
|
|
|
4813
|
-
|
|
4814
|
-
|
|
4815
|
-
if 'res' in json:
|
|
4816
|
-
for v in json['res']:
|
|
4817
|
-
yield QueryInfo.from_dict(v)
|
|
4818
|
-
if 'next_page_token' not in json or not json['next_page_token']:
|
|
4819
|
-
return
|
|
4820
|
-
query['page_token'] = json['next_page_token']
|
|
6598
|
+
res = self._api.do('GET', '/api/2.0/sql/history/queries', query=query, headers=headers)
|
|
6599
|
+
return ListQueriesResponse.from_dict(res)
|
|
4821
6600
|
|
|
4822
6601
|
|
|
4823
6602
|
class QueryVisualizationsAPI:
|
|
6603
|
+
"""This is an evolving API that facilitates the addition and removal of visualizations from existing queries
|
|
6604
|
+
in the Databricks Workspace. Data structures can change over time."""
|
|
6605
|
+
|
|
6606
|
+
def __init__(self, api_client):
|
|
6607
|
+
self._api = api_client
|
|
6608
|
+
|
|
6609
|
+
def create(self,
|
|
6610
|
+
*,
|
|
6611
|
+
visualization: Optional[CreateVisualizationRequestVisualization] = None) -> Visualization:
|
|
6612
|
+
"""Add a visualization to a query.
|
|
6613
|
+
|
|
6614
|
+
Adds a visualization to a query.
|
|
6615
|
+
|
|
6616
|
+
:param visualization: :class:`CreateVisualizationRequestVisualization` (optional)
|
|
6617
|
+
|
|
6618
|
+
:returns: :class:`Visualization`
|
|
6619
|
+
"""
|
|
6620
|
+
body = {}
|
|
6621
|
+
if visualization is not None: body['visualization'] = visualization.as_dict()
|
|
6622
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6623
|
+
|
|
6624
|
+
res = self._api.do('POST', '/api/2.0/sql/visualizations', body=body, headers=headers)
|
|
6625
|
+
return Visualization.from_dict(res)
|
|
6626
|
+
|
|
6627
|
+
def delete(self, id: str):
|
|
6628
|
+
"""Remove a visualization.
|
|
6629
|
+
|
|
6630
|
+
Removes a visualization.
|
|
6631
|
+
|
|
6632
|
+
:param id: str
|
|
6633
|
+
|
|
6634
|
+
|
|
6635
|
+
"""
|
|
6636
|
+
|
|
6637
|
+
headers = {'Accept': 'application/json', }
|
|
6638
|
+
|
|
6639
|
+
self._api.do('DELETE', f'/api/2.0/sql/visualizations/{id}', headers=headers)
|
|
6640
|
+
|
|
6641
|
+
def update(self,
|
|
6642
|
+
id: str,
|
|
6643
|
+
update_mask: str,
|
|
6644
|
+
*,
|
|
6645
|
+
visualization: Optional[UpdateVisualizationRequestVisualization] = None) -> Visualization:
|
|
6646
|
+
"""Update a visualization.
|
|
6647
|
+
|
|
6648
|
+
Updates a visualization.
|
|
6649
|
+
|
|
6650
|
+
:param id: str
|
|
6651
|
+
:param update_mask: str
|
|
6652
|
+
Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
|
|
6653
|
+
setting payload will be updated. The field mask needs to be supplied as single string. To specify
|
|
6654
|
+
multiple fields in the field mask, use comma as the separator (no space).
|
|
6655
|
+
:param visualization: :class:`UpdateVisualizationRequestVisualization` (optional)
|
|
6656
|
+
|
|
6657
|
+
:returns: :class:`Visualization`
|
|
6658
|
+
"""
|
|
6659
|
+
body = {}
|
|
6660
|
+
if update_mask is not None: body['update_mask'] = update_mask
|
|
6661
|
+
if visualization is not None: body['visualization'] = visualization.as_dict()
|
|
6662
|
+
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
6663
|
+
|
|
6664
|
+
res = self._api.do('PATCH', f'/api/2.0/sql/visualizations/{id}', body=body, headers=headers)
|
|
6665
|
+
return Visualization.from_dict(res)
|
|
6666
|
+
|
|
6667
|
+
|
|
6668
|
+
class QueryVisualizationsLegacyAPI:
|
|
4824
6669
|
"""This is an evolving API that facilitates the addition and removal of vizualisations from existing queries
|
|
4825
|
-
within the Databricks Workspace. Data structures may change over time.
|
|
6670
|
+
within the Databricks Workspace. Data structures may change over time.
|
|
6671
|
+
|
|
6672
|
+
**Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
|
|
6673
|
+
more]
|
|
6674
|
+
|
|
6675
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
|
|
4826
6676
|
|
|
4827
6677
|
def __init__(self, api_client):
|
|
4828
6678
|
self._api = api_client
|
|
@@ -4833,9 +6683,16 @@ class QueryVisualizationsAPI:
|
|
|
4833
6683
|
options: Any,
|
|
4834
6684
|
*,
|
|
4835
6685
|
description: Optional[str] = None,
|
|
4836
|
-
name: Optional[str] = None) ->
|
|
6686
|
+
name: Optional[str] = None) -> LegacyVisualization:
|
|
4837
6687
|
"""Add visualization to a query.
|
|
4838
6688
|
|
|
6689
|
+
Creates visualization in the query.
|
|
6690
|
+
|
|
6691
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use
|
|
6692
|
+
:method:queryvisualizations/create instead. [Learn more]
|
|
6693
|
+
|
|
6694
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6695
|
+
|
|
4839
6696
|
:param query_id: str
|
|
4840
6697
|
The identifier returned by :method:queries/create
|
|
4841
6698
|
:param type: str
|
|
@@ -4848,7 +6705,7 @@ class QueryVisualizationsAPI:
|
|
|
4848
6705
|
:param name: str (optional)
|
|
4849
6706
|
The name of the visualization that appears on dashboards and the query screen.
|
|
4850
6707
|
|
|
4851
|
-
:returns: :class:`
|
|
6708
|
+
:returns: :class:`LegacyVisualization`
|
|
4852
6709
|
"""
|
|
4853
6710
|
body = {}
|
|
4854
6711
|
if description is not None: body['description'] = description
|
|
@@ -4859,11 +6716,18 @@ class QueryVisualizationsAPI:
|
|
|
4859
6716
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
4860
6717
|
|
|
4861
6718
|
res = self._api.do('POST', '/api/2.0/preview/sql/visualizations', body=body, headers=headers)
|
|
4862
|
-
return
|
|
6719
|
+
return LegacyVisualization.from_dict(res)
|
|
4863
6720
|
|
|
4864
6721
|
def delete(self, id: str):
|
|
4865
6722
|
"""Remove visualization.
|
|
4866
6723
|
|
|
6724
|
+
Removes a visualization from the query.
|
|
6725
|
+
|
|
6726
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use
|
|
6727
|
+
:method:queryvisualizations/delete instead. [Learn more]
|
|
6728
|
+
|
|
6729
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6730
|
+
|
|
4867
6731
|
:param id: str
|
|
4868
6732
|
Widget ID returned by :method:queryvizualisations/create
|
|
4869
6733
|
|
|
@@ -4881,11 +6745,18 @@ class QueryVisualizationsAPI:
|
|
|
4881
6745
|
description: Optional[str] = None,
|
|
4882
6746
|
name: Optional[str] = None,
|
|
4883
6747
|
options: Optional[Any] = None,
|
|
4884
|
-
query: Optional[
|
|
6748
|
+
query: Optional[LegacyQuery] = None,
|
|
4885
6749
|
type: Optional[str] = None,
|
|
4886
|
-
updated_at: Optional[str] = None) ->
|
|
6750
|
+
updated_at: Optional[str] = None) -> LegacyVisualization:
|
|
4887
6751
|
"""Edit existing visualization.
|
|
4888
6752
|
|
|
6753
|
+
Updates visualization in the query.
|
|
6754
|
+
|
|
6755
|
+
**Note**: A new version of the Databricks SQL API is now available. Please use
|
|
6756
|
+
:method:queryvisualizations/update instead. [Learn more]
|
|
6757
|
+
|
|
6758
|
+
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
|
|
6759
|
+
|
|
4889
6760
|
:param id: str
|
|
4890
6761
|
The UUID for this visualization.
|
|
4891
6762
|
:param created_at: str (optional)
|
|
@@ -4896,12 +6767,12 @@ class QueryVisualizationsAPI:
|
|
|
4896
6767
|
:param options: Any (optional)
|
|
4897
6768
|
The options object varies widely from one visualization type to the next and is unsupported.
|
|
4898
6769
|
Databricks does not recommend modifying visualization settings in JSON.
|
|
4899
|
-
:param query: :class:`
|
|
6770
|
+
:param query: :class:`LegacyQuery` (optional)
|
|
4900
6771
|
:param type: str (optional)
|
|
4901
6772
|
The type of visualization: chart, table, pivot table, and so on.
|
|
4902
6773
|
:param updated_at: str (optional)
|
|
4903
6774
|
|
|
4904
|
-
:returns: :class:`
|
|
6775
|
+
:returns: :class:`LegacyVisualization`
|
|
4905
6776
|
"""
|
|
4906
6777
|
body = {}
|
|
4907
6778
|
if created_at is not None: body['created_at'] = created_at
|
|
@@ -4914,7 +6785,7 @@ class QueryVisualizationsAPI:
|
|
|
4914
6785
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
4915
6786
|
|
|
4916
6787
|
res = self._api.do('POST', f'/api/2.0/preview/sql/visualizations/{id}', body=body, headers=headers)
|
|
4917
|
-
return
|
|
6788
|
+
return LegacyVisualization.from_dict(res)
|
|
4918
6789
|
|
|
4919
6790
|
|
|
4920
6791
|
class StatementExecutionAPI:
|
|
@@ -5033,7 +6904,7 @@ class StatementExecutionAPI:
|
|
|
5033
6904
|
parameters: Optional[List[StatementParameterListItem]] = None,
|
|
5034
6905
|
row_limit: Optional[int] = None,
|
|
5035
6906
|
schema: Optional[str] = None,
|
|
5036
|
-
wait_timeout: Optional[str] = None) ->
|
|
6907
|
+
wait_timeout: Optional[str] = None) -> StatementResponse:
|
|
5037
6908
|
"""Execute a SQL statement.
|
|
5038
6909
|
|
|
5039
6910
|
:param statement: str
|
|
@@ -5053,26 +6924,6 @@ class StatementExecutionAPI:
|
|
|
5053
6924
|
|
|
5054
6925
|
[`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html
|
|
5055
6926
|
:param disposition: :class:`Disposition` (optional)
|
|
5056
|
-
The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`.
|
|
5057
|
-
|
|
5058
|
-
Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY`
|
|
5059
|
-
format, in a series of chunks. If a given statement produces a result set with a size larger than 25
|
|
5060
|
-
MiB, that statement execution is aborted, and no result set will be available.
|
|
5061
|
-
|
|
5062
|
-
**NOTE** Byte limits are computed based upon internal representations of the result set data, and
|
|
5063
|
-
might not match the sizes visible in JSON responses.
|
|
5064
|
-
|
|
5065
|
-
Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links:
|
|
5066
|
-
URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition
|
|
5067
|
-
allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The
|
|
5068
|
-
resulting links have two important properties:
|
|
5069
|
-
|
|
5070
|
-
1. They point to resources _external_ to the Databricks compute; therefore any associated
|
|
5071
|
-
authentication information (typically a personal access token, OAuth token, or similar) _must be
|
|
5072
|
-
removed_ when fetching from these links.
|
|
5073
|
-
|
|
5074
|
-
2. These are presigned URLs with a specific expiration, indicated in the response. The behavior when
|
|
5075
|
-
attempting to use an expired link is cloud specific.
|
|
5076
6927
|
:param format: :class:`Format` (optional)
|
|
5077
6928
|
Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and
|
|
5078
6929
|
`CSV`.
|
|
@@ -5160,7 +7011,7 @@ class StatementExecutionAPI:
|
|
|
5160
7011
|
the statement takes longer to execute, `on_wait_timeout` determines what should happen after the
|
|
5161
7012
|
timeout is reached.
|
|
5162
7013
|
|
|
5163
|
-
:returns: :class:`
|
|
7014
|
+
:returns: :class:`StatementResponse`
|
|
5164
7015
|
"""
|
|
5165
7016
|
body = {}
|
|
5166
7017
|
if byte_limit is not None: body['byte_limit'] = byte_limit
|
|
@@ -5177,9 +7028,9 @@ class StatementExecutionAPI:
|
|
|
5177
7028
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
5178
7029
|
|
|
5179
7030
|
res = self._api.do('POST', '/api/2.0/sql/statements/', body=body, headers=headers)
|
|
5180
|
-
return
|
|
7031
|
+
return StatementResponse.from_dict(res)
|
|
5181
7032
|
|
|
5182
|
-
def get_statement(self, statement_id: str) ->
|
|
7033
|
+
def get_statement(self, statement_id: str) -> StatementResponse:
|
|
5183
7034
|
"""Get status, manifest, and result first chunk.
|
|
5184
7035
|
|
|
5185
7036
|
This request can be used to poll for the statement's status. When the `status.state` field is
|
|
@@ -5194,13 +7045,13 @@ class StatementExecutionAPI:
|
|
|
5194
7045
|
The statement ID is returned upon successfully submitting a SQL statement, and is a required
|
|
5195
7046
|
reference for all subsequent calls.
|
|
5196
7047
|
|
|
5197
|
-
:returns: :class:`
|
|
7048
|
+
:returns: :class:`StatementResponse`
|
|
5198
7049
|
"""
|
|
5199
7050
|
|
|
5200
7051
|
headers = {'Accept': 'application/json', }
|
|
5201
7052
|
|
|
5202
7053
|
res = self._api.do('GET', f'/api/2.0/sql/statements/{statement_id}', headers=headers)
|
|
5203
|
-
return
|
|
7054
|
+
return StatementResponse.from_dict(res)
|
|
5204
7055
|
|
|
5205
7056
|
def get_statement_result_chunk_n(self, statement_id: str, chunk_index: int) -> ResultData:
|
|
5206
7057
|
"""Get result chunk by index.
|