singlestoredb 0.9.1__cp36-abi3-win32.whl → 0.9.2__cp36-abi3-win32.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of singlestoredb might be problematic. Click here for more details.
- _singlestoredb_accel.pyd +0 -0
- singlestoredb/__init__.py +1 -1
- singlestoredb/fusion/__init__.py +11 -0
- singlestoredb/fusion/handler.py +542 -0
- singlestoredb/fusion/handlers/__init__.py +0 -0
- singlestoredb/fusion/handlers/workspace.py +361 -0
- singlestoredb/fusion/registry.py +112 -0
- singlestoredb/fusion/result.py +120 -0
- singlestoredb/http/connection.py +65 -12
- singlestoredb/management/workspace.py +1 -1
- singlestoredb/mysql/connection.py +10 -4
- singlestoredb/mysql/constants/FIELD_TYPE.py +1 -0
- singlestoredb/tests/test_results.py +6 -6
- singlestoredb/utils/mogrify.py +151 -0
- {singlestoredb-0.9.1.dist-info → singlestoredb-0.9.2.dist-info}/METADATA +2 -1
- {singlestoredb-0.9.1.dist-info → singlestoredb-0.9.2.dist-info}/RECORD +19 -12
- {singlestoredb-0.9.1.dist-info → singlestoredb-0.9.2.dist-info}/LICENSE +0 -0
- {singlestoredb-0.9.1.dist-info → singlestoredb-0.9.2.dist-info}/WHEEL +0 -0
- {singlestoredb-0.9.1.dist-info → singlestoredb-0.9.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,361 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
import datetime
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
from typing import Any
|
|
6
|
+
from typing import Dict
|
|
7
|
+
from typing import Optional
|
|
8
|
+
|
|
9
|
+
from .. import result
|
|
10
|
+
from ...management import manage_workspaces
|
|
11
|
+
from ...management.workspace import WorkspaceGroup
|
|
12
|
+
from ..handler import SQLHandler
|
|
13
|
+
|
|
14
|
+
manager = manage_workspaces(os.environ.get('SINGLESTOREDB_MANAGEMENT_TOKEN', 'DEAD'))
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def dt_isoformat(dt: Optional[datetime.datetime]) -> Optional[str]:
|
|
18
|
+
"""Convert datetime to string."""
|
|
19
|
+
if dt is None:
|
|
20
|
+
return None
|
|
21
|
+
return dt.isoformat()
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def get_workspace_group(params: Dict[str, Any]) -> WorkspaceGroup:
|
|
25
|
+
"""Find a workspace group matching group_id or group_name."""
|
|
26
|
+
if 'group_name' in params:
|
|
27
|
+
workspace_groups = [
|
|
28
|
+
x for x in manager.workspace_groups
|
|
29
|
+
if x.name == params['group_name']
|
|
30
|
+
]
|
|
31
|
+
if not workspace_groups:
|
|
32
|
+
raise KeyError(
|
|
33
|
+
'no workspace group found with name "{}"'.format(params['group_name']),
|
|
34
|
+
)
|
|
35
|
+
if len(workspace_groups) > 1:
|
|
36
|
+
ids = ', '.join(x.id for x in workspace_groups)
|
|
37
|
+
raise ValueError(
|
|
38
|
+
f'more than one workspace group with given name was found: {ids}',
|
|
39
|
+
)
|
|
40
|
+
return workspace_groups[0]
|
|
41
|
+
return manager.get_workspace_group(params['group_id'])
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class ShowRegionsHandler(SQLHandler):
|
|
45
|
+
"""
|
|
46
|
+
SHOW REGIONS [ like ];
|
|
47
|
+
|
|
48
|
+
# Region name pattern
|
|
49
|
+
like = LIKE '<pattern>'
|
|
50
|
+
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
def run(self, params: Dict[str, Any]) -> Optional[result.FusionSQLResult]:
|
|
54
|
+
res = self.create_result()
|
|
55
|
+
res.add_field('Name', result.STRING)
|
|
56
|
+
res.add_field('ID', result.STRING)
|
|
57
|
+
res.add_field('Provider', result.STRING)
|
|
58
|
+
|
|
59
|
+
is_like = self.create_like_func(params.get('like', None))
|
|
60
|
+
|
|
61
|
+
res.set_rows([(x.name, x.id, x.provider)
|
|
62
|
+
for x in manager.regions if is_like(x.name)])
|
|
63
|
+
return res
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
ShowRegionsHandler.register()
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class ShowWorkspaceGroupsHandler(SQLHandler):
|
|
70
|
+
"""
|
|
71
|
+
SHOW WORKSPACE GROUPS [ like ] [ extended ];
|
|
72
|
+
|
|
73
|
+
# Workspace group name pattern
|
|
74
|
+
like = LIKE '<pattern>'
|
|
75
|
+
|
|
76
|
+
# Add additional data columns
|
|
77
|
+
extended = EXTENDED
|
|
78
|
+
|
|
79
|
+
"""
|
|
80
|
+
|
|
81
|
+
def run(self, params: Dict[str, Any]) -> Optional[result.FusionSQLResult]:
|
|
82
|
+
res = self.create_result()
|
|
83
|
+
res.add_field('Name', result.STRING)
|
|
84
|
+
res.add_field('ID', result.STRING)
|
|
85
|
+
res.add_field('Region Name', result.STRING)
|
|
86
|
+
res.add_field('Firewall Ranges', result.JSON)
|
|
87
|
+
|
|
88
|
+
if params.get('extended'):
|
|
89
|
+
res.add_field('Created At', result.DATETIME)
|
|
90
|
+
res.add_field('Terminated At', result.DATETIME)
|
|
91
|
+
|
|
92
|
+
def fields(x: Any) -> Any:
|
|
93
|
+
return (
|
|
94
|
+
x.name, x.id, x.region.name,
|
|
95
|
+
json.dumps(x.firewall_ranges),
|
|
96
|
+
dt_isoformat(x.created_at),
|
|
97
|
+
dt_isoformat(x.terminated_at),
|
|
98
|
+
)
|
|
99
|
+
else:
|
|
100
|
+
def fields(x: Any) -> Any:
|
|
101
|
+
return (x.name, x.id, x.region.name, x.firewall_ranges)
|
|
102
|
+
|
|
103
|
+
is_like = self.create_like_func(params.get('like', None))
|
|
104
|
+
|
|
105
|
+
res.set_rows([fields(x) for x in manager.workspace_groups if is_like(x.name)])
|
|
106
|
+
|
|
107
|
+
return res
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
ShowWorkspaceGroupsHandler.register()
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
class ShowWorkspacesHandler(SQLHandler):
|
|
114
|
+
"""
|
|
115
|
+
SHOW WORKSPACES IN GROUP { group_id | group_name } [ like ] [ extended ];
|
|
116
|
+
|
|
117
|
+
# ID of group
|
|
118
|
+
group_id = ID '<group-id>'
|
|
119
|
+
|
|
120
|
+
# Name of group
|
|
121
|
+
group_name = '<group-name>'
|
|
122
|
+
|
|
123
|
+
# Workspace group name pattern
|
|
124
|
+
like = LIKE '<pattern>'
|
|
125
|
+
|
|
126
|
+
# Add additional data columns
|
|
127
|
+
extended = EXTENDED
|
|
128
|
+
|
|
129
|
+
"""
|
|
130
|
+
|
|
131
|
+
def run(self, params: Dict[str, Any]) -> Optional[result.FusionSQLResult]:
|
|
132
|
+
res = self.create_result()
|
|
133
|
+
res.add_field('Name', result.STRING)
|
|
134
|
+
res.add_field('ID', result.STRING)
|
|
135
|
+
res.add_field('Size', result.STRING)
|
|
136
|
+
res.add_field('State', result.STRING)
|
|
137
|
+
|
|
138
|
+
workspace_group = get_workspace_group(params)
|
|
139
|
+
|
|
140
|
+
if params.get('extended'):
|
|
141
|
+
res.add_field('Endpoint', result.STRING)
|
|
142
|
+
res.add_field('Created At', result.DATETIME)
|
|
143
|
+
res.add_field('Terminated At', result.DATETIME)
|
|
144
|
+
|
|
145
|
+
def fields(x: Any) -> Any:
|
|
146
|
+
return (
|
|
147
|
+
x.name, x.id, x.size, x.state,
|
|
148
|
+
x.endpoint, dt_isoformat(x.created_at),
|
|
149
|
+
dt_isoformat(x.terminated_at),
|
|
150
|
+
)
|
|
151
|
+
else:
|
|
152
|
+
def fields(x: Any) -> Any:
|
|
153
|
+
return (x.name, x.id, x.size, x.state)
|
|
154
|
+
|
|
155
|
+
is_like = self.create_like_func(params.get('like', None))
|
|
156
|
+
|
|
157
|
+
res.set_rows([fields(x) for x in workspace_group.workspaces if is_like(x.name)])
|
|
158
|
+
|
|
159
|
+
return res
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
ShowWorkspacesHandler.register()
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
class CreateWorkspaceGroupHandler(SQLHandler):
|
|
166
|
+
"""
|
|
167
|
+
CREATE WORKSPACE GROUP [ if_not_exists ] group_name
|
|
168
|
+
IN REGION { region_id | region_name }
|
|
169
|
+
[ with_password ]
|
|
170
|
+
[ expires_at ]
|
|
171
|
+
[ with_firewall_ranges ]
|
|
172
|
+
;
|
|
173
|
+
|
|
174
|
+
# Only create workspace group if it doesn't exist already
|
|
175
|
+
if_not_exists = IF NOT EXISTS
|
|
176
|
+
|
|
177
|
+
# Name of the workspace group
|
|
178
|
+
group_name = '<group-name>'
|
|
179
|
+
|
|
180
|
+
# ID of region to create workspace group in
|
|
181
|
+
region_id = ID '<region-id>'
|
|
182
|
+
|
|
183
|
+
# Name of region to create workspace group in
|
|
184
|
+
region_name = '<region-name>'
|
|
185
|
+
|
|
186
|
+
# Admin password
|
|
187
|
+
with_password = WITH PASSWORD '<password>'
|
|
188
|
+
|
|
189
|
+
# Datetime or interval for expiration date/time of workspace group
|
|
190
|
+
expires_at = EXPIRES AT '<iso-datetime-or-interval>'
|
|
191
|
+
|
|
192
|
+
# Incoming IP ranges
|
|
193
|
+
with_firewall_ranges = WITH FIREWALL RANGES '<ip-range>',...
|
|
194
|
+
|
|
195
|
+
"""
|
|
196
|
+
|
|
197
|
+
def run(self, params: Dict[str, Any]) -> Optional[result.FusionSQLResult]:
|
|
198
|
+
# Only create if one doesn't exist
|
|
199
|
+
if params.get('if_not_exists'):
|
|
200
|
+
try:
|
|
201
|
+
get_workspace_group(params)
|
|
202
|
+
return None
|
|
203
|
+
except (ValueError, KeyError):
|
|
204
|
+
pass
|
|
205
|
+
|
|
206
|
+
# Get region ID
|
|
207
|
+
if 'region_name' in params:
|
|
208
|
+
regs = [x for x in manager.regions if x.name == params['region_name']]
|
|
209
|
+
if not regs:
|
|
210
|
+
raise ValueError(f'no region found with name "{params["region_name"]}"')
|
|
211
|
+
if len(regs) > 1:
|
|
212
|
+
raise ValueError(
|
|
213
|
+
f'multiple regions found with the name "{params["region_name"]}"',
|
|
214
|
+
)
|
|
215
|
+
region_id = regs[0].id
|
|
216
|
+
else:
|
|
217
|
+
region_id = params['region_id']
|
|
218
|
+
|
|
219
|
+
manager.create_workspace_group(
|
|
220
|
+
params['group_name'],
|
|
221
|
+
region=region_id,
|
|
222
|
+
admin_password=params.get('with_password'),
|
|
223
|
+
expires_at=params.get('expires_at'),
|
|
224
|
+
firewall_ranges=params.get('with_firewall_ranges', []),
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
return self.create_result()
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
CreateWorkspaceGroupHandler.register()
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
class CreateWorkspaceHandler(SQLHandler):
|
|
234
|
+
"""
|
|
235
|
+
CREATE WORKSPACE [ if_not_exists ] workspace_name
|
|
236
|
+
IN GROUP { group_id | group_name }
|
|
237
|
+
WITH SIZE size [ wait_on_active ];
|
|
238
|
+
|
|
239
|
+
# Only run command if workspace doesn't already exist
|
|
240
|
+
if_not_exists = IF NOT EXISTS
|
|
241
|
+
|
|
242
|
+
# Name of the workspace
|
|
243
|
+
workspace_name = '<workspace-name>'
|
|
244
|
+
|
|
245
|
+
# ID of the group to create workspace in
|
|
246
|
+
group_id = ID '<group-id>'
|
|
247
|
+
|
|
248
|
+
# Name of the group to create workspace in
|
|
249
|
+
group_name = '<group-name>'
|
|
250
|
+
|
|
251
|
+
# Runtime size
|
|
252
|
+
size = '<size>'
|
|
253
|
+
|
|
254
|
+
# Wait for workspace to be active before continuing
|
|
255
|
+
wait_on_active = WAIT ON ACTIVE
|
|
256
|
+
|
|
257
|
+
"""
|
|
258
|
+
|
|
259
|
+
def run(self, params: Dict[str, Any]) -> Optional[result.FusionSQLResult]:
|
|
260
|
+
workspace_group = get_workspace_group(params)
|
|
261
|
+
|
|
262
|
+
# Only create if one doesn't exist
|
|
263
|
+
if params.get('if_not_exists'):
|
|
264
|
+
try:
|
|
265
|
+
workspace_group.workspaces[params['workspace_name']]
|
|
266
|
+
return None
|
|
267
|
+
except KeyError:
|
|
268
|
+
pass
|
|
269
|
+
|
|
270
|
+
workspace_group.create_workspace(
|
|
271
|
+
params['workspace_name'], size=params['size'],
|
|
272
|
+
wait_on_active=params.get('wait_on_active', False),
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
return None
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
CreateWorkspaceHandler.register()
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
class DropWorkspaceGroupHandler(SQLHandler):
|
|
282
|
+
"""
|
|
283
|
+
DROP WORKSPACE GROUP [ if_exists ] { group_id | group_name }
|
|
284
|
+
[ wait_on_terminated ];
|
|
285
|
+
|
|
286
|
+
# Only run command if the workspace group exists
|
|
287
|
+
if_exists = IF EXISTS
|
|
288
|
+
|
|
289
|
+
# ID of the workspace group to delete
|
|
290
|
+
group_id = ID '<group-id>'
|
|
291
|
+
|
|
292
|
+
# Name of the workspace group to delete
|
|
293
|
+
group_name = '<group-name>'
|
|
294
|
+
|
|
295
|
+
# Wait for termination to complete before continuing
|
|
296
|
+
wait_on_terminated = WAIT ON TERMINATED
|
|
297
|
+
|
|
298
|
+
"""
|
|
299
|
+
|
|
300
|
+
def run(self, params: Dict[str, Any]) -> Optional[result.FusionSQLResult]:
|
|
301
|
+
try:
|
|
302
|
+
name_or_id = params.get('group_name', params.get('group_id'))
|
|
303
|
+
wg = manager.workspace_groups[name_or_id]
|
|
304
|
+
wg.terminate(wait_on_terminated=params.get('wait_on_terminated', False))
|
|
305
|
+
|
|
306
|
+
except KeyError:
|
|
307
|
+
if not params.get('if_exists'):
|
|
308
|
+
raise ValueError(f"could not find workspace group '{name_or_id}'")
|
|
309
|
+
|
|
310
|
+
return None
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
DropWorkspaceGroupHandler.register()
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
class DropWorkspaceHandler(SQLHandler):
|
|
317
|
+
"""
|
|
318
|
+
DROP WORKSPACE [ if_exists ] { workspace_id | workspace_name }
|
|
319
|
+
IN GROUP { group_id | group_name } [ wait_on_terminated ];
|
|
320
|
+
|
|
321
|
+
# Only drop workspace if it exists
|
|
322
|
+
if_exists = IF EXISTS
|
|
323
|
+
|
|
324
|
+
# ID of workspace
|
|
325
|
+
workspace_id = ID '<workspace-id>'
|
|
326
|
+
|
|
327
|
+
# Name of workspace
|
|
328
|
+
workspace_name = '<workspace-name>'
|
|
329
|
+
|
|
330
|
+
# ID of workspace group
|
|
331
|
+
group_id = ID '<group-id>'
|
|
332
|
+
|
|
333
|
+
# Name of workspace group
|
|
334
|
+
group_name = '<group-name>'
|
|
335
|
+
|
|
336
|
+
# Wait for workspace to be terminated before continuing
|
|
337
|
+
wait_on_terminated = WAIT ON TERMINATED
|
|
338
|
+
|
|
339
|
+
"""
|
|
340
|
+
|
|
341
|
+
def run(self, params: Dict[str, Any]) -> Optional[result.FusionSQLResult]:
|
|
342
|
+
try:
|
|
343
|
+
workspace_name_or_id = params.get(
|
|
344
|
+
'workspace_name', params.get('workspace_id'),
|
|
345
|
+
)
|
|
346
|
+
group_name_or_id = params.get('group_name', params.get('group_id'))
|
|
347
|
+
wg = manager.workspace_groups[group_name_or_id]
|
|
348
|
+
ws = wg.workspaces[workspace_name_or_id]
|
|
349
|
+
ws.terminate(wait_on_terminated=params.get('wait_on_terminated', False))
|
|
350
|
+
|
|
351
|
+
except KeyError:
|
|
352
|
+
if not params.get('if_exists'):
|
|
353
|
+
raise ValueError(
|
|
354
|
+
f"could not find workspace '{workspace_name_or_id}' "
|
|
355
|
+
f"in group '{group_name_or_id}'",
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
return None
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
DropWorkspaceHandler.register()
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
from typing import Any
|
|
5
|
+
from typing import Dict
|
|
6
|
+
from typing import Optional
|
|
7
|
+
from typing import Type
|
|
8
|
+
from typing import Union
|
|
9
|
+
|
|
10
|
+
from . import result
|
|
11
|
+
from .. import connection
|
|
12
|
+
from .handler import SQLHandler
|
|
13
|
+
|
|
14
|
+
_enabled = ('1', 'yes', 'on', 'enabled', 'true')
|
|
15
|
+
_handlers: Dict[str, Type[SQLHandler]] = {}
|
|
16
|
+
_handlers_re: Optional[Any] = None
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def register_handler(handler: Type[SQLHandler], overwrite: bool = False) -> None:
|
|
20
|
+
"""
|
|
21
|
+
Register a new SQL handler.
|
|
22
|
+
|
|
23
|
+
Parameters
|
|
24
|
+
----------
|
|
25
|
+
handler : SQLHandler subclass
|
|
26
|
+
The handler class to register
|
|
27
|
+
overwrite : bool, optional
|
|
28
|
+
Should an existing handler be overwritten if it uses the same command key?
|
|
29
|
+
|
|
30
|
+
"""
|
|
31
|
+
global _handlers
|
|
32
|
+
global _handlers_re
|
|
33
|
+
|
|
34
|
+
# Build key for handler
|
|
35
|
+
key = ' '.join(x.upper() for x in handler.command_key)
|
|
36
|
+
|
|
37
|
+
# Check for existing handler with same key
|
|
38
|
+
if not overwrite and key in _handlers:
|
|
39
|
+
raise ValueError(f'command already exists, use overwrite=True to override: {key}')
|
|
40
|
+
|
|
41
|
+
# Add handler to registry
|
|
42
|
+
_handlers[key] = handler
|
|
43
|
+
|
|
44
|
+
# Build regex to detect fusion query
|
|
45
|
+
keys = sorted(_handlers.keys(), key=lambda x: -len(x[0]))
|
|
46
|
+
keys_str = '|'.join(x.replace(' ', '\\s+') for x in keys)
|
|
47
|
+
_handlers_re = re.compile(f'^\\s*({keys_str})(?:\\s+|;|$)', flags=re.I)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def get_handler(sql: Union[str, bytes]) -> Optional[Type[SQLHandler]]:
|
|
51
|
+
"""
|
|
52
|
+
Return a fusion handler for the given query.
|
|
53
|
+
|
|
54
|
+
Parameters
|
|
55
|
+
----------
|
|
56
|
+
sql : str or bytes
|
|
57
|
+
The SQL query
|
|
58
|
+
|
|
59
|
+
Returns
|
|
60
|
+
-------
|
|
61
|
+
SQLHandler - if a matching one exists
|
|
62
|
+
None - if no matching handler could be found
|
|
63
|
+
|
|
64
|
+
"""
|
|
65
|
+
if not os.environ.get('SINGLESTOREDB_ENABLE_FUSION', '').lower() in _enabled:
|
|
66
|
+
return None
|
|
67
|
+
|
|
68
|
+
if isinstance(sql, (bytes, bytearray)):
|
|
69
|
+
sql = sql.decode('utf-8')
|
|
70
|
+
|
|
71
|
+
if _handlers_re is None:
|
|
72
|
+
return None
|
|
73
|
+
|
|
74
|
+
m = _handlers_re.match(sql)
|
|
75
|
+
if m:
|
|
76
|
+
return _handlers[re.sub(r'\s+', r' ', m.group(1).strip().upper())]
|
|
77
|
+
|
|
78
|
+
return None
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def execute(
|
|
82
|
+
connection: connection.Connection,
|
|
83
|
+
sql: str,
|
|
84
|
+
handler: Optional[Type[SQLHandler]] = None,
|
|
85
|
+
) -> result.FusionSQLResult:
|
|
86
|
+
"""
|
|
87
|
+
Execute a SQL query in the management interface.
|
|
88
|
+
|
|
89
|
+
Parameters
|
|
90
|
+
----------
|
|
91
|
+
connection : Connection
|
|
92
|
+
The SingleStoreDB connection object
|
|
93
|
+
sql : str
|
|
94
|
+
The SQL query
|
|
95
|
+
handler : SQLHandler, optional
|
|
96
|
+
The handler to use for the commands. If not supplied, one will be
|
|
97
|
+
looked up in the registry.
|
|
98
|
+
|
|
99
|
+
Returns
|
|
100
|
+
-------
|
|
101
|
+
FusionSQLResult
|
|
102
|
+
|
|
103
|
+
"""
|
|
104
|
+
if not os.environ.get('SINGLESTOREDB_ENABLE_FUSION', '').lower() in _enabled:
|
|
105
|
+
raise RuntimeError('management API queries have not been enabled')
|
|
106
|
+
|
|
107
|
+
if handler is None:
|
|
108
|
+
handler = get_handler(sql)
|
|
109
|
+
if handler is None:
|
|
110
|
+
raise RuntimeError(f'could not find handler for query: {sql}')
|
|
111
|
+
|
|
112
|
+
return handler(connection).execute(sql)
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
from typing import Any
|
|
5
|
+
from typing import List
|
|
6
|
+
from typing import Optional
|
|
7
|
+
from typing import Tuple
|
|
8
|
+
|
|
9
|
+
from .. import connection
|
|
10
|
+
from ..mysql.constants.FIELD_TYPE import BLOB # noqa: F401
|
|
11
|
+
from ..mysql.constants.FIELD_TYPE import BOOL # noqa: F401
|
|
12
|
+
from ..mysql.constants.FIELD_TYPE import DATE # noqa: F401
|
|
13
|
+
from ..mysql.constants.FIELD_TYPE import DATETIME # noqa: F401
|
|
14
|
+
from ..mysql.constants.FIELD_TYPE import DOUBLE # noqa: F401
|
|
15
|
+
from ..mysql.constants.FIELD_TYPE import JSON # noqa: F401
|
|
16
|
+
from ..mysql.constants.FIELD_TYPE import LONGLONG as INTEGER # noqa: F401
|
|
17
|
+
from ..mysql.constants.FIELD_TYPE import STRING # noqa: F401
|
|
18
|
+
from ..utils.results import Description
|
|
19
|
+
from ..utils.results import format_results
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class FusionField(object):
|
|
23
|
+
"""Field for PyMySQL compatibility."""
|
|
24
|
+
|
|
25
|
+
def __init__(self, name: str, flags: int, charset: int) -> None:
|
|
26
|
+
self.name = name
|
|
27
|
+
self.flags = flags
|
|
28
|
+
self.charsetnr = charset
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class FusionSQLResult:
|
|
32
|
+
"""Result for Fusion SQL commands."""
|
|
33
|
+
|
|
34
|
+
def __init__(self, connection: connection.Connection, unbuffered: bool = False):
|
|
35
|
+
self.connection: Any = connection
|
|
36
|
+
self.affected_rows: Optional[int] = None
|
|
37
|
+
self.insert_id: int = 0
|
|
38
|
+
self.server_status: Optional[int] = None
|
|
39
|
+
self.warning_count: int = 0
|
|
40
|
+
self.message: Optional[str] = None
|
|
41
|
+
self.description: List[Description] = []
|
|
42
|
+
self.rows: Any = []
|
|
43
|
+
self.has_next: bool = False
|
|
44
|
+
self.unbuffered_active: bool = False
|
|
45
|
+
self.converters: List[Any] = []
|
|
46
|
+
self.fields: List[FusionField] = []
|
|
47
|
+
self._row_idx: int = -1
|
|
48
|
+
|
|
49
|
+
def _read_rowdata_packet_unbuffered(self, size: int = 1) -> Optional[List[Any]]:
|
|
50
|
+
if not self.rows:
|
|
51
|
+
return None
|
|
52
|
+
|
|
53
|
+
out = []
|
|
54
|
+
|
|
55
|
+
try:
|
|
56
|
+
for i in range(1, size + 1):
|
|
57
|
+
out.append(self.rows[self._row_idx + i])
|
|
58
|
+
except IndexError:
|
|
59
|
+
self._row_idx = -1
|
|
60
|
+
self.rows = []
|
|
61
|
+
return None
|
|
62
|
+
else:
|
|
63
|
+
self._row_idx += size
|
|
64
|
+
|
|
65
|
+
return out
|
|
66
|
+
|
|
67
|
+
def _finish_unbuffered_query(self) -> None:
|
|
68
|
+
self._row_idx = -1
|
|
69
|
+
self.rows = []
|
|
70
|
+
self.affected_rows = None
|
|
71
|
+
|
|
72
|
+
def add_field(self, name: str, dtype: int) -> None:
|
|
73
|
+
"""
|
|
74
|
+
Add a new field / column to the data set.
|
|
75
|
+
|
|
76
|
+
Parameters
|
|
77
|
+
----------
|
|
78
|
+
name : str
|
|
79
|
+
The name of the field / column
|
|
80
|
+
dtype : int
|
|
81
|
+
The MySQL field type: BLOB, BOOL, DATE, DATETIME,
|
|
82
|
+
DOUBLE, JSON, INTEGER, or STRING
|
|
83
|
+
|
|
84
|
+
"""
|
|
85
|
+
charset = 0
|
|
86
|
+
if dtype in (JSON, STRING):
|
|
87
|
+
encoding = 'utf-8'
|
|
88
|
+
elif dtype == BLOB:
|
|
89
|
+
charset = 63
|
|
90
|
+
encoding = None
|
|
91
|
+
else:
|
|
92
|
+
encoding = 'ascii'
|
|
93
|
+
self.description.append(
|
|
94
|
+
Description(name, dtype, None, None, 0, 0, True, 0, charset),
|
|
95
|
+
)
|
|
96
|
+
self.fields.append(FusionField(name, 0, charset))
|
|
97
|
+
converter = self.connection.decoders.get(dtype)
|
|
98
|
+
self.converters.append((encoding, converter))
|
|
99
|
+
|
|
100
|
+
def set_rows(self, data: List[Tuple[Any, ...]]) -> None:
|
|
101
|
+
"""
|
|
102
|
+
Set the rows of the result.
|
|
103
|
+
|
|
104
|
+
Parameters
|
|
105
|
+
----------
|
|
106
|
+
data : List[Tuple[Any, ...]]
|
|
107
|
+
The data should be a list of tuples where each element of the
|
|
108
|
+
tuple corresponds to a field added to the result with
|
|
109
|
+
the :meth:`add_field` method.
|
|
110
|
+
|
|
111
|
+
"""
|
|
112
|
+
# Convert values
|
|
113
|
+
for i, row in enumerate(list(data)):
|
|
114
|
+
new_row = []
|
|
115
|
+
for (_, converter), value in zip(self.converters, row):
|
|
116
|
+
new_row.append(converter(value) if converter is not None else value)
|
|
117
|
+
data[i] = tuple(new_row)
|
|
118
|
+
|
|
119
|
+
self.rows = format_results(self.connection._results_type, self.description, data)
|
|
120
|
+
self.affected_rows = 0
|
singlestoredb/http/connection.py
CHANGED
|
@@ -41,6 +41,7 @@ except ImportError:
|
|
|
41
41
|
has_shapely = False
|
|
42
42
|
|
|
43
43
|
from .. import connection
|
|
44
|
+
from .. import fusion
|
|
44
45
|
from .. import types
|
|
45
46
|
from ..config import get_option
|
|
46
47
|
from ..converters import converters
|
|
@@ -56,6 +57,7 @@ from ..exceptions import ProgrammingError
|
|
|
56
57
|
from ..exceptions import Warning # noqa: F401
|
|
57
58
|
from ..utils.convert_rows import convert_rows
|
|
58
59
|
from ..utils.debug import log_query
|
|
60
|
+
from ..utils.mogrify import mogrify
|
|
59
61
|
from ..utils.results import Description
|
|
60
62
|
from ..utils.results import format_results
|
|
61
63
|
from ..utils.results import Result
|
|
@@ -431,16 +433,79 @@ class Cursor(connection.Cursor):
|
|
|
431
433
|
else:
|
|
432
434
|
query = query % args
|
|
433
435
|
|
|
436
|
+
def _execute_fusion_query(
|
|
437
|
+
self,
|
|
438
|
+
oper: Union[str, bytes],
|
|
439
|
+
params: Optional[Union[Sequence[Any], Dict[str, Any]]] = None,
|
|
440
|
+
handler: Any = None,
|
|
441
|
+
) -> int:
|
|
442
|
+
oper = mogrify(oper, params)
|
|
443
|
+
|
|
444
|
+
if isinstance(oper, bytes):
|
|
445
|
+
oper = oper.decode('utf-8')
|
|
446
|
+
|
|
447
|
+
log_query(oper, None)
|
|
448
|
+
|
|
449
|
+
results_type = self._results_type
|
|
450
|
+
self._results_type = 'tuples'
|
|
451
|
+
try:
|
|
452
|
+
mgmt_res = fusion.execute(
|
|
453
|
+
self._connection, # type: ignore
|
|
454
|
+
oper,
|
|
455
|
+
handler=handler,
|
|
456
|
+
)
|
|
457
|
+
finally:
|
|
458
|
+
self._results_type = results_type
|
|
459
|
+
|
|
460
|
+
self._descriptions.append(list(mgmt_res.description))
|
|
461
|
+
self._results.append(list(mgmt_res.rows))
|
|
462
|
+
self.rowcount = len(self._results[-1])
|
|
463
|
+
|
|
464
|
+
pymy_res = PyMyResult()
|
|
465
|
+
for field in mgmt_res.fields:
|
|
466
|
+
pymy_res.append(
|
|
467
|
+
PyMyField(
|
|
468
|
+
field.name,
|
|
469
|
+
field.flags,
|
|
470
|
+
field.charsetnr,
|
|
471
|
+
),
|
|
472
|
+
)
|
|
473
|
+
|
|
474
|
+
self._pymy_results.append(pymy_res)
|
|
475
|
+
|
|
476
|
+
if self._results and self._results[0]:
|
|
477
|
+
self._row_idx = 0
|
|
478
|
+
self._result_idx = 0
|
|
479
|
+
|
|
480
|
+
return self.rowcount
|
|
481
|
+
|
|
434
482
|
def _execute(
|
|
435
483
|
self, oper: str,
|
|
436
484
|
params: Optional[Union[Sequence[Any], Dict[str, Any]]] = None,
|
|
437
485
|
is_callproc: bool = False,
|
|
438
486
|
) -> int:
|
|
487
|
+
self._descriptions = []
|
|
488
|
+
self._results = []
|
|
489
|
+
self._pymy_results = []
|
|
490
|
+
self._row_idx = -1
|
|
491
|
+
self._result_idx = -1
|
|
492
|
+
self.rowcount = 0
|
|
493
|
+
self._expect_results = False
|
|
494
|
+
|
|
439
495
|
if self._connection is None:
|
|
440
496
|
raise ProgrammingError(errno=2048, msg='Connection is closed.')
|
|
441
497
|
|
|
498
|
+
sql_type = 'exec'
|
|
499
|
+
if re.match(r'^\s*(select|show|call|echo|describe|with)\s+', oper, flags=re.I):
|
|
500
|
+
self._expect_results = True
|
|
501
|
+
sql_type = 'query'
|
|
502
|
+
|
|
442
503
|
self._validate_param_subs(oper, params)
|
|
443
504
|
|
|
505
|
+
handler = fusion.get_handler(oper)
|
|
506
|
+
if handler is not None:
|
|
507
|
+
return self._execute_fusion_query(oper, params, handler=handler)
|
|
508
|
+
|
|
444
509
|
oper, params = self._connection._convert_params(oper, params)
|
|
445
510
|
|
|
446
511
|
log_query(oper, params)
|
|
@@ -455,12 +520,6 @@ class Cursor(connection.Cursor):
|
|
|
455
520
|
if self._connection._database:
|
|
456
521
|
data['database'] = self._connection._database
|
|
457
522
|
|
|
458
|
-
self._expect_results = False
|
|
459
|
-
sql_type = 'exec'
|
|
460
|
-
if re.match(r'^\s*(select|show|call|echo|describe|with)\s+', oper, flags=re.I):
|
|
461
|
-
self._expect_results = True
|
|
462
|
-
sql_type = 'query'
|
|
463
|
-
|
|
464
523
|
if sql_type == 'query':
|
|
465
524
|
res = self._post('query/tuples', json=data)
|
|
466
525
|
else:
|
|
@@ -479,12 +538,6 @@ class Cursor(connection.Cursor):
|
|
|
479
538
|
|
|
480
539
|
out = json.loads(res.text)
|
|
481
540
|
|
|
482
|
-
self._descriptions = []
|
|
483
|
-
self._results = []
|
|
484
|
-
self._row_idx = -1
|
|
485
|
-
self._result_idx = -1
|
|
486
|
-
self.rowcount = 0
|
|
487
|
-
|
|
488
541
|
if sql_type == 'query':
|
|
489
542
|
# description: (name, type_code, display_size, internal_size,
|
|
490
543
|
# precision, scale, null_ok, column_flags, charset)
|