singlestoredb 0.8.8__cp36-abi3-win_amd64.whl → 0.9.0__cp36-abi3-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of singlestoredb might be problematic. Click here for more details.
- _singlestoredb_accel.pyd +0 -0
- singlestoredb/__init__.py +1 -1
- singlestoredb/config.py +6 -0
- singlestoredb/exceptions.py +24 -0
- singlestoredb/functions/__init__.py +1 -0
- singlestoredb/functions/decorator.py +165 -0
- singlestoredb/functions/dtypes.py +1396 -0
- singlestoredb/functions/ext/__init__.py +2 -0
- singlestoredb/functions/ext/asgi.py +357 -0
- singlestoredb/functions/ext/json.py +49 -0
- singlestoredb/functions/ext/rowdat_1.py +111 -0
- singlestoredb/functions/signature.py +607 -0
- singlestoredb/management/billing_usage.py +148 -0
- singlestoredb/management/manager.py +42 -1
- singlestoredb/management/organization.py +85 -0
- singlestoredb/management/utils.py +118 -1
- singlestoredb/management/workspace.py +881 -5
- singlestoredb/mysql/__init__.py +12 -10
- singlestoredb/mysql/_auth.py +3 -1
- singlestoredb/mysql/charset.py +12 -11
- singlestoredb/mysql/connection.py +4 -3
- singlestoredb/mysql/constants/CLIENT.py +0 -1
- singlestoredb/mysql/constants/COMMAND.py +0 -1
- singlestoredb/mysql/constants/CR.py +0 -2
- singlestoredb/mysql/constants/ER.py +0 -1
- singlestoredb/mysql/constants/FIELD_TYPE.py +0 -1
- singlestoredb/mysql/constants/FLAG.py +0 -1
- singlestoredb/mysql/constants/SERVER_STATUS.py +0 -1
- singlestoredb/mysql/converters.py +49 -28
- singlestoredb/mysql/err.py +3 -3
- singlestoredb/mysql/optionfile.py +4 -4
- singlestoredb/mysql/protocol.py +2 -1
- singlestoredb/mysql/times.py +3 -4
- singlestoredb/tests/test2.sql +1 -0
- singlestoredb/tests/test_management.py +393 -3
- singlestoredb/tests/test_udf.py +698 -0
- {singlestoredb-0.8.8.dist-info → singlestoredb-0.9.0.dist-info}/METADATA +1 -1
- {singlestoredb-0.8.8.dist-info → singlestoredb-0.9.0.dist-info}/RECORD +41 -29
- {singlestoredb-0.8.8.dist-info → singlestoredb-0.9.0.dist-info}/LICENSE +0 -0
- {singlestoredb-0.8.8.dist-info → singlestoredb-0.9.0.dist-info}/WHEEL +0 -0
- {singlestoredb-0.8.8.dist-info → singlestoredb-0.9.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,357 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
'''
|
|
3
|
+
Web application for SingleStoreDB external functions.
|
|
4
|
+
|
|
5
|
+
This module supplies a function that can create web apps intended for use
|
|
6
|
+
with the external function feature of SingleStoreDB. The application
|
|
7
|
+
function is a standard ASGI <https://asgi.readthedocs.io/en/latest/index.html>
|
|
8
|
+
request handler for use with servers such as Uvicorn <https://www.uvicorn.org>.
|
|
9
|
+
|
|
10
|
+
An external function web application can be created using the `create_app`
|
|
11
|
+
function. By default, the exported Python functions are specified by
|
|
12
|
+
environment variables starting with SINGLESTOREDB_EXT_FUNCTIONS. See the
|
|
13
|
+
documentation in `create_app` for the full syntax. If the application is
|
|
14
|
+
created in Python code rather than from the command-line, exported
|
|
15
|
+
functions can be specified in the parameters.
|
|
16
|
+
|
|
17
|
+
An example of starting a server is shown below.
|
|
18
|
+
|
|
19
|
+
Example
|
|
20
|
+
-------
|
|
21
|
+
$ SINGLESTOREDB_EXT_FUNCTIONS='myfuncs.[percentage_90,percentage_95]' \
|
|
22
|
+
uvicorn --factory singlestoredb.functions.ext:create_app
|
|
23
|
+
|
|
24
|
+
'''
|
|
25
|
+
import importlib
|
|
26
|
+
import itertools
|
|
27
|
+
import os
|
|
28
|
+
from typing import Any
|
|
29
|
+
from typing import Awaitable
|
|
30
|
+
from typing import Callable
|
|
31
|
+
from typing import Dict
|
|
32
|
+
from typing import Iterable
|
|
33
|
+
from typing import List
|
|
34
|
+
from typing import Optional
|
|
35
|
+
from typing import Sequence
|
|
36
|
+
from typing import Tuple
|
|
37
|
+
from typing import Union
|
|
38
|
+
|
|
39
|
+
from . import json as jdata
|
|
40
|
+
from . import rowdat_1
|
|
41
|
+
from ...mysql.constants import FIELD_TYPE as ft
|
|
42
|
+
from ..signature import get_signature
|
|
43
|
+
from ..signature import signature_to_sql
|
|
44
|
+
|
|
45
|
+
# If a number of processes is specified, create a pool of workers
|
|
46
|
+
num_processes = max(0, int(os.environ.get('SINGLESTOREDB_EXT_NUM_PROCESSES', 0)))
|
|
47
|
+
if num_processes > 1:
|
|
48
|
+
try:
|
|
49
|
+
from ray.util.multiprocessing import Pool
|
|
50
|
+
except ImportError:
|
|
51
|
+
from multiprocessing import Pool
|
|
52
|
+
func_map = Pool(num_processes).starmap
|
|
53
|
+
else:
|
|
54
|
+
func_map = itertools.starmap
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
# Use negative values to indicate unsigned ints / binary data / usec time precision
|
|
58
|
+
rowdat_1_type_map = {
|
|
59
|
+
'bool': ft.LONGLONG,
|
|
60
|
+
'int8': ft.LONGLONG,
|
|
61
|
+
'int16': ft.LONGLONG,
|
|
62
|
+
'int32': ft.LONGLONG,
|
|
63
|
+
'int64': ft.LONGLONG,
|
|
64
|
+
'uint8': -ft.LONGLONG,
|
|
65
|
+
'uint16': -ft.LONGLONG,
|
|
66
|
+
'uint32': -ft.LONGLONG,
|
|
67
|
+
'uint64': -ft.LONGLONG,
|
|
68
|
+
'float32': ft.DOUBLE,
|
|
69
|
+
'float64': ft.DOUBLE,
|
|
70
|
+
'str': ft.STRING,
|
|
71
|
+
'bytes': -ft.STRING,
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def get_func_names(funcs: str) -> List[Tuple[str, str]]:
|
|
76
|
+
'''
|
|
77
|
+
Parse all function names from string.
|
|
78
|
+
|
|
79
|
+
Parameters
|
|
80
|
+
----------
|
|
81
|
+
func_names : str
|
|
82
|
+
String containing one or more function names. The syntax is
|
|
83
|
+
as follows: [func-name-1@func-alias-1,func-name-2@func-alias-2,...].
|
|
84
|
+
The optional '@name' portion is an alias if you want the function
|
|
85
|
+
to be renamed.
|
|
86
|
+
|
|
87
|
+
Returns
|
|
88
|
+
-------
|
|
89
|
+
List[Tuple[str]] : a list of tuples containing the names and aliases
|
|
90
|
+
of each function.
|
|
91
|
+
|
|
92
|
+
'''
|
|
93
|
+
if funcs.startswith('['):
|
|
94
|
+
func_names = funcs.replace('[', '').replace(']', '').split(',')
|
|
95
|
+
func_names = [x.strip() for x in func_names]
|
|
96
|
+
else:
|
|
97
|
+
func_names = [funcs]
|
|
98
|
+
|
|
99
|
+
out = []
|
|
100
|
+
for name in func_names:
|
|
101
|
+
alias = name
|
|
102
|
+
if '@' in name:
|
|
103
|
+
name, alias = name.split('@', 1)
|
|
104
|
+
out.append((name, alias))
|
|
105
|
+
|
|
106
|
+
return out
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def make_func(name: str, func: Callable[..., Any]) -> Callable[..., Any]:
|
|
110
|
+
'''
|
|
111
|
+
Make a function endpoint.
|
|
112
|
+
|
|
113
|
+
Parameters
|
|
114
|
+
----------
|
|
115
|
+
name : str
|
|
116
|
+
Name of the function to create
|
|
117
|
+
func : Callable
|
|
118
|
+
The function to call as the endpoint
|
|
119
|
+
|
|
120
|
+
Returns
|
|
121
|
+
-------
|
|
122
|
+
Callable
|
|
123
|
+
|
|
124
|
+
'''
|
|
125
|
+
async def do_func(row_ids: Sequence[int], rows: Sequence[Any]) -> List[Any]:
|
|
126
|
+
'''Call function on given rows of data.'''
|
|
127
|
+
return list(zip(row_ids, func_map(func, rows)))
|
|
128
|
+
|
|
129
|
+
do_func.__name__ = name
|
|
130
|
+
do_func.__doc__ = func.__doc__
|
|
131
|
+
|
|
132
|
+
sig = get_signature(func, name=name)
|
|
133
|
+
|
|
134
|
+
# Store signature for generating CREATE FUNCTION calls
|
|
135
|
+
do_func._ext_func_signature = sig # type: ignore
|
|
136
|
+
|
|
137
|
+
# Setup argument types for rowdat_1 parser
|
|
138
|
+
colspec = []
|
|
139
|
+
for x in sig['args']:
|
|
140
|
+
dtype = x['dtype'].replace('?', '')
|
|
141
|
+
if dtype not in rowdat_1_type_map:
|
|
142
|
+
raise TypeError(f'no data type mapping for {dtype}')
|
|
143
|
+
colspec.append((x['name'], rowdat_1_type_map[dtype]))
|
|
144
|
+
do_func._ext_func_colspec = colspec # type: ignore
|
|
145
|
+
|
|
146
|
+
# Setup return type
|
|
147
|
+
dtype = sig['returns']['dtype'].replace('?', '')
|
|
148
|
+
if dtype not in rowdat_1_type_map:
|
|
149
|
+
raise TypeError(f'no data type mapping for {dtype}')
|
|
150
|
+
do_func._ext_func_returns = [rowdat_1_type_map[dtype]] # type: ignore
|
|
151
|
+
|
|
152
|
+
return do_func
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def create_app(
|
|
156
|
+
functions: Optional[
|
|
157
|
+
Union[
|
|
158
|
+
str,
|
|
159
|
+
Iterable[str],
|
|
160
|
+
Callable[..., Any],
|
|
161
|
+
Iterable[Callable[..., Any]],
|
|
162
|
+
]
|
|
163
|
+
] = None,
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
) -> Callable[..., Any]:
|
|
167
|
+
'''
|
|
168
|
+
Create an external function application.
|
|
169
|
+
|
|
170
|
+
If `functions` is None, the environment is searched for function
|
|
171
|
+
specifications in variables starting with `SINGLESTOREDB_EXT_FUNCTIONS`.
|
|
172
|
+
Any number of environment variables can be specified as long as they
|
|
173
|
+
have this prefix. The format of the environment variable value is the
|
|
174
|
+
same as for the `functions` parameter.
|
|
175
|
+
|
|
176
|
+
Parameters
|
|
177
|
+
----------
|
|
178
|
+
functions : str or Iterable[str], optional
|
|
179
|
+
Python functions are specified using a string format as follows:
|
|
180
|
+
* Single function : <pkg1>.<func1>
|
|
181
|
+
* Multiple functions : <pkg1>.[<func1-name,func2-name,...]
|
|
182
|
+
* Function aliases : <pkg1>.[<func1@alias1,func2@alias2,...]
|
|
183
|
+
* Multiple packages : <pkg1>.<func1>:<pkg2>.<func2>
|
|
184
|
+
|
|
185
|
+
Returns
|
|
186
|
+
-------
|
|
187
|
+
Callable : the application request handler
|
|
188
|
+
|
|
189
|
+
'''
|
|
190
|
+
|
|
191
|
+
# List of functions specs
|
|
192
|
+
specs: List[Union[str, Callable[..., Any]]] = []
|
|
193
|
+
|
|
194
|
+
# Look up Python function specifications
|
|
195
|
+
if functions is None:
|
|
196
|
+
for k, v in os.environ.items():
|
|
197
|
+
if k.startswith('SINGLESTOREDB_EXT_FUNCTIONS'):
|
|
198
|
+
specs.append(v)
|
|
199
|
+
elif isinstance(functions, str):
|
|
200
|
+
specs = [functions]
|
|
201
|
+
elif callable(functions):
|
|
202
|
+
specs = [functions]
|
|
203
|
+
else:
|
|
204
|
+
specs = list(functions)
|
|
205
|
+
|
|
206
|
+
# Add functions to application
|
|
207
|
+
endpoints = dict()
|
|
208
|
+
for funcs in itertools.chain(specs):
|
|
209
|
+
if isinstance(funcs, str):
|
|
210
|
+
pkg_path, func_names = funcs.rsplit('.', 1)
|
|
211
|
+
pkg = importlib.import_module(pkg_path)
|
|
212
|
+
|
|
213
|
+
# Add endpoint for each exported function
|
|
214
|
+
for name, alias in get_func_names(func_names):
|
|
215
|
+
item = getattr(pkg, name)
|
|
216
|
+
func = make_func(alias, item)
|
|
217
|
+
endpoints[alias.encode('utf-8')] = func
|
|
218
|
+
else:
|
|
219
|
+
alias = funcs.__name__
|
|
220
|
+
func = make_func(alias, item)
|
|
221
|
+
endpoints[alias.encode('utf-8')] = func
|
|
222
|
+
|
|
223
|
+
# Plain text response start
|
|
224
|
+
text_response_dict: Dict[str, Any] = dict(
|
|
225
|
+
type='http.response.start',
|
|
226
|
+
status=200,
|
|
227
|
+
headers=[(b'content-type', b'text/plain')],
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
# JSON response start
|
|
231
|
+
json_response_dict: Dict[str, Any] = dict(
|
|
232
|
+
type='http.response.start',
|
|
233
|
+
status=200,
|
|
234
|
+
headers=[(b'content-type', b'application/json')],
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
# ROWDAT_1 response start
|
|
238
|
+
rowdat_1_response_dict: Dict[str, Any] = dict(
|
|
239
|
+
type='http.response.start',
|
|
240
|
+
status=200,
|
|
241
|
+
headers=[(b'content-type', b'x-application/rowdat_1')],
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
# Path not found response start
|
|
245
|
+
path_not_found_response_dict: Dict[str, Any] = dict(
|
|
246
|
+
type='http.response.start',
|
|
247
|
+
status=404,
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
# Response body template
|
|
251
|
+
body_response_dict: Dict[str, Any] = dict(
|
|
252
|
+
type='http.response.body',
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
# Data format + version handlers
|
|
256
|
+
handlers = {
|
|
257
|
+
(b'application/octet-stream', b'1.0'): dict(
|
|
258
|
+
load=rowdat_1.load,
|
|
259
|
+
dump=rowdat_1.dump,
|
|
260
|
+
response=rowdat_1_response_dict,
|
|
261
|
+
),
|
|
262
|
+
(b'application/json', b'1.0'): dict(
|
|
263
|
+
load=jdata.load,
|
|
264
|
+
dump=jdata.dump,
|
|
265
|
+
response=json_response_dict,
|
|
266
|
+
),
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
# Valid URL paths
|
|
270
|
+
invoke_path = ('invoke',)
|
|
271
|
+
show_create_function_path = ('show', 'create_function')
|
|
272
|
+
|
|
273
|
+
async def app(
|
|
274
|
+
scope: Dict[str, Any],
|
|
275
|
+
receive: Callable[..., Awaitable[Any]],
|
|
276
|
+
send: Callable[..., Awaitable[Any]],
|
|
277
|
+
) -> None:
|
|
278
|
+
'''
|
|
279
|
+
Application request handler.
|
|
280
|
+
|
|
281
|
+
Parameters
|
|
282
|
+
----------
|
|
283
|
+
scope : dict
|
|
284
|
+
ASGI request scope
|
|
285
|
+
receive : Callable
|
|
286
|
+
Function to receieve request information
|
|
287
|
+
send : Callable
|
|
288
|
+
Function to send response information
|
|
289
|
+
|
|
290
|
+
'''
|
|
291
|
+
assert scope['type'] == 'http'
|
|
292
|
+
|
|
293
|
+
method = scope['method']
|
|
294
|
+
path = tuple(x for x in scope['path'].split('/') if x)
|
|
295
|
+
headers = dict(scope['headers'])
|
|
296
|
+
|
|
297
|
+
content_type = headers.get(
|
|
298
|
+
b'content-type',
|
|
299
|
+
b'application/octet-stream',
|
|
300
|
+
)
|
|
301
|
+
accepts = headers.get(b'accepts', content_type)
|
|
302
|
+
func_name = headers.get(b's2-ef-name', b'')
|
|
303
|
+
func = endpoints.get(func_name)
|
|
304
|
+
|
|
305
|
+
# Call the endpoint
|
|
306
|
+
if method == 'POST' and func is not None and path == invoke_path:
|
|
307
|
+
data = []
|
|
308
|
+
more_body = True
|
|
309
|
+
while more_body:
|
|
310
|
+
request = await receive()
|
|
311
|
+
data.append(request['body'])
|
|
312
|
+
more_body = request.get('more_body', False)
|
|
313
|
+
|
|
314
|
+
data_version = headers.get(b's2-ef-version', b'')
|
|
315
|
+
input_handler = handlers[(content_type, data_version)]
|
|
316
|
+
output_handler = handlers[(accepts, data_version)]
|
|
317
|
+
|
|
318
|
+
out = await func(
|
|
319
|
+
*input_handler['load'](
|
|
320
|
+
func._ext_func_colspec, b''.join(data), # type: ignore
|
|
321
|
+
),
|
|
322
|
+
)
|
|
323
|
+
body = output_handler['dump'](func._ext_func_returns, out) # type: ignore
|
|
324
|
+
|
|
325
|
+
await send(output_handler['response'])
|
|
326
|
+
|
|
327
|
+
# Handle api reflection
|
|
328
|
+
elif method == 'GET' and path == show_create_function_path:
|
|
329
|
+
host = headers.get(b'host', b'localhost:80')
|
|
330
|
+
url = f'{scope["scheme"]}://{host.decode("utf-8")}/invoke'
|
|
331
|
+
data_format = 'json' if b'json' in content_type else 'rowdat_1'
|
|
332
|
+
|
|
333
|
+
syntax = []
|
|
334
|
+
for key, endpoint in endpoints.items():
|
|
335
|
+
if not func_name or key == func_name:
|
|
336
|
+
syntax.append(
|
|
337
|
+
signature_to_sql(
|
|
338
|
+
endpoint._ext_func_signature, # type: ignore
|
|
339
|
+
base_url=url,
|
|
340
|
+
data_format=data_format,
|
|
341
|
+
),
|
|
342
|
+
)
|
|
343
|
+
body = '\n'.join(syntax).encode('utf-8')
|
|
344
|
+
|
|
345
|
+
await send(text_response_dict)
|
|
346
|
+
|
|
347
|
+
# Path not found
|
|
348
|
+
else:
|
|
349
|
+
body = b''
|
|
350
|
+
await send(path_not_found_response_dict)
|
|
351
|
+
|
|
352
|
+
# Send body
|
|
353
|
+
out = body_response_dict.copy()
|
|
354
|
+
out['body'] = body
|
|
355
|
+
await send(out)
|
|
356
|
+
|
|
357
|
+
return app
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
import json
|
|
3
|
+
from typing import Any
|
|
4
|
+
from typing import Iterable
|
|
5
|
+
from typing import List
|
|
6
|
+
from typing import Tuple
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def load(colspec: Iterable[Tuple[str, int]], data: bytes) -> Tuple[List[int], List[Any]]:
|
|
10
|
+
'''
|
|
11
|
+
Convert bytes in JSON format into rows of data.
|
|
12
|
+
|
|
13
|
+
Parameters
|
|
14
|
+
----------
|
|
15
|
+
colspec : Iterable[str]
|
|
16
|
+
An Iterable of column data types
|
|
17
|
+
data : bytes
|
|
18
|
+
The data in JSON format
|
|
19
|
+
|
|
20
|
+
Returns
|
|
21
|
+
-------
|
|
22
|
+
list[list[Any]]
|
|
23
|
+
|
|
24
|
+
'''
|
|
25
|
+
row_ids = []
|
|
26
|
+
rows = []
|
|
27
|
+
for row_id, *row in json.loads(data.decode('utf-8'))['data']:
|
|
28
|
+
row_ids.append(row_id)
|
|
29
|
+
rows.append(row)
|
|
30
|
+
return row_ids, rows
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def dump(returns: Iterable[int], data: Iterable[Tuple[int, Any]]) -> bytes:
|
|
34
|
+
'''
|
|
35
|
+
Convert a list of lists of data into JSON format.
|
|
36
|
+
|
|
37
|
+
Parameters
|
|
38
|
+
----------
|
|
39
|
+
returns : str
|
|
40
|
+
The returned data type
|
|
41
|
+
data : list[list[Any]]
|
|
42
|
+
The rows of data to serialize
|
|
43
|
+
|
|
44
|
+
Returns
|
|
45
|
+
-------
|
|
46
|
+
bytes
|
|
47
|
+
|
|
48
|
+
'''
|
|
49
|
+
return json.dumps(dict(data=list(data))).encode('utf-8')
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
import struct
|
|
3
|
+
import warnings
|
|
4
|
+
from io import BytesIO
|
|
5
|
+
from typing import Any
|
|
6
|
+
from typing import Iterable
|
|
7
|
+
from typing import List
|
|
8
|
+
from typing import Tuple
|
|
9
|
+
|
|
10
|
+
try:
|
|
11
|
+
import _singlestoredb_accel
|
|
12
|
+
except ImportError:
|
|
13
|
+
warnings.warn(
|
|
14
|
+
'could not load accelerated data reader for external functions; '
|
|
15
|
+
'using pure Python implementation.',
|
|
16
|
+
RuntimeWarning,
|
|
17
|
+
)
|
|
18
|
+
_singlestoredb_accel = None
|
|
19
|
+
|
|
20
|
+
from ...mysql.constants import FIELD_TYPE as ft
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _load(colspec: Iterable[Tuple[str, int]], data: bytes) -> Tuple[List[int], List[Any]]:
|
|
24
|
+
'''
|
|
25
|
+
Convert bytes in rowdat_1 format into rows of data.
|
|
26
|
+
|
|
27
|
+
Parameters
|
|
28
|
+
----------
|
|
29
|
+
colspec : Iterable[str]
|
|
30
|
+
An Iterable of column data types
|
|
31
|
+
data : bytes
|
|
32
|
+
The data in rowdat_1 format
|
|
33
|
+
|
|
34
|
+
Returns
|
|
35
|
+
-------
|
|
36
|
+
list[list[Any]]
|
|
37
|
+
|
|
38
|
+
'''
|
|
39
|
+
data_len = len(data)
|
|
40
|
+
data_io = BytesIO(data)
|
|
41
|
+
row_ids = []
|
|
42
|
+
rows = []
|
|
43
|
+
val = None
|
|
44
|
+
while data_io.tell() < data_len:
|
|
45
|
+
row_ids.append(struct.unpack('<q', data_io.read(8))[0])
|
|
46
|
+
row = []
|
|
47
|
+
for _, ctype in colspec:
|
|
48
|
+
is_null = data_io.read(1) == b'\x01'
|
|
49
|
+
if ctype is ft.LONGLONG:
|
|
50
|
+
val = struct.unpack('<q', data_io.read(8))[0]
|
|
51
|
+
elif ctype is ft.DOUBLE:
|
|
52
|
+
val = struct.unpack('<d', data_io.read(8))[0]
|
|
53
|
+
elif ctype is ft.STRING:
|
|
54
|
+
slen = struct.unpack('<q', data_io.read(8))[0]
|
|
55
|
+
val = data_io.read(slen).decode('utf-8')
|
|
56
|
+
else:
|
|
57
|
+
raise TypeError(f'unrecognized column type: {ctype}')
|
|
58
|
+
row.append(None if is_null else val)
|
|
59
|
+
rows.append(row)
|
|
60
|
+
return row_ids, rows
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _dump(returns: Iterable[int], data: Iterable[Tuple[int, Any]]) -> bytes:
|
|
64
|
+
'''
|
|
65
|
+
Convert a list of lists of data into rowdat_1 format.
|
|
66
|
+
|
|
67
|
+
Parameters
|
|
68
|
+
----------
|
|
69
|
+
returns : str
|
|
70
|
+
sThe returned data type
|
|
71
|
+
data : list[list[Any]]
|
|
72
|
+
The rows of data to serialize
|
|
73
|
+
|
|
74
|
+
Returns
|
|
75
|
+
-------
|
|
76
|
+
bytes
|
|
77
|
+
|
|
78
|
+
'''
|
|
79
|
+
out = BytesIO()
|
|
80
|
+
for row_id, value in data:
|
|
81
|
+
out.write(struct.pack('<q', row_id))
|
|
82
|
+
for rtype in returns:
|
|
83
|
+
out.write(b'\x01' if value is None else b'\x00')
|
|
84
|
+
if rtype is ft.LONGLONG:
|
|
85
|
+
if value is None:
|
|
86
|
+
out.write(struct.pack('<q', 0))
|
|
87
|
+
else:
|
|
88
|
+
out.write(struct.pack('<q', value))
|
|
89
|
+
elif rtype is ft.DOUBLE:
|
|
90
|
+
if value is None:
|
|
91
|
+
out.write(struct.pack('<d', 0.0))
|
|
92
|
+
else:
|
|
93
|
+
out.write(struct.pack('<d', value))
|
|
94
|
+
elif rtype is ft.STRING:
|
|
95
|
+
if value is None:
|
|
96
|
+
out.write(struct.pack('<q', 0))
|
|
97
|
+
else:
|
|
98
|
+
sval = value.encode('utf-8')
|
|
99
|
+
out.write(struct.pack('<q', len(sval)))
|
|
100
|
+
out.write(sval)
|
|
101
|
+
else:
|
|
102
|
+
raise TypeError(f'unrecognized column type: {rtype}')
|
|
103
|
+
return out.getvalue()
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
if _singlestoredb_accel is None:
|
|
107
|
+
load = _load
|
|
108
|
+
dump = _dump
|
|
109
|
+
else:
|
|
110
|
+
load = _singlestoredb_accel.load_rowdat_1
|
|
111
|
+
dump = _singlestoredb_accel.dump_rowdat_1
|