singlestoredb 1.12.3__cp38-abi3-win32.whl → 1.13.0__cp38-abi3-win32.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of singlestoredb might be problematic. Click here for more details.
- _singlestoredb_accel.pyd +0 -0
- singlestoredb/__init__.py +1 -1
- singlestoredb/apps/__init__.py +1 -0
- singlestoredb/apps/_config.py +6 -0
- singlestoredb/apps/_connection_info.py +8 -0
- singlestoredb/apps/_python_udfs.py +85 -0
- singlestoredb/config.py +14 -2
- singlestoredb/functions/__init__.py +11 -1
- singlestoredb/functions/decorator.py +102 -252
- singlestoredb/functions/dtypes.py +545 -198
- singlestoredb/functions/ext/asgi.py +288 -90
- singlestoredb/functions/ext/json.py +29 -36
- singlestoredb/functions/ext/mmap.py +1 -1
- singlestoredb/functions/ext/rowdat_1.py +50 -70
- singlestoredb/functions/signature.py +816 -144
- singlestoredb/functions/typing.py +41 -0
- singlestoredb/functions/utils.py +342 -0
- singlestoredb/http/connection.py +3 -1
- singlestoredb/management/manager.py +6 -1
- singlestoredb/management/utils.py +2 -2
- singlestoredb/mysql/connection.py +17 -11
- singlestoredb/tests/ext_funcs/__init__.py +476 -237
- singlestoredb/tests/test_basics.py +2 -0
- singlestoredb/tests/test_ext_func.py +192 -3
- singlestoredb/tests/test_udf.py +101 -131
- singlestoredb/tests/test_udf_returns.py +459 -0
- {singlestoredb-1.12.3.dist-info → singlestoredb-1.13.0.dist-info}/METADATA +2 -1
- {singlestoredb-1.12.3.dist-info → singlestoredb-1.13.0.dist-info}/RECORD +32 -28
- {singlestoredb-1.12.3.dist-info → singlestoredb-1.13.0.dist-info}/LICENSE +0 -0
- {singlestoredb-1.12.3.dist-info → singlestoredb-1.13.0.dist-info}/WHEEL +0 -0
- {singlestoredb-1.12.3.dist-info → singlestoredb-1.13.0.dist-info}/entry_points.txt +0 -0
- {singlestoredb-1.12.3.dist-info → singlestoredb-1.13.0.dist-info}/top_level.txt +0 -0
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
|
+
import base64
|
|
2
3
|
import json
|
|
3
4
|
from typing import Any
|
|
4
5
|
from typing import List
|
|
5
6
|
from typing import Tuple
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
6
8
|
|
|
7
9
|
from ..dtypes import DEFAULT_VALUES
|
|
8
10
|
from ..dtypes import NUMPY_TYPE_MAP
|
|
@@ -11,36 +13,30 @@ from ..dtypes import POLARS_TYPE_MAP
|
|
|
11
13
|
from ..dtypes import PYARROW_TYPE_MAP
|
|
12
14
|
from ..dtypes import PYTHON_CONVERTERS
|
|
13
15
|
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
except ImportError:
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
except ImportError:
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
try:
|
|
33
|
-
import pyarrow as pa
|
|
34
|
-
has_pyarrow = True
|
|
35
|
-
except ImportError:
|
|
36
|
-
has_pyarrow = False
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
try:
|
|
18
|
+
import numpy as np
|
|
19
|
+
except ImportError:
|
|
20
|
+
pass
|
|
21
|
+
try:
|
|
22
|
+
import pandas as pd
|
|
23
|
+
except ImportError:
|
|
24
|
+
pass
|
|
25
|
+
try:
|
|
26
|
+
import polars as pl
|
|
27
|
+
except ImportError:
|
|
28
|
+
pass
|
|
29
|
+
try:
|
|
30
|
+
import pyarrow as pa
|
|
31
|
+
except ImportError:
|
|
32
|
+
pass
|
|
37
33
|
|
|
38
34
|
|
|
39
35
|
class JSONEncoder(json.JSONEncoder):
|
|
40
36
|
|
|
41
37
|
def default(self, obj: Any) -> Any:
|
|
42
38
|
if isinstance(obj, bytes):
|
|
43
|
-
return obj.
|
|
39
|
+
return base64.b64encode(obj).decode('utf-8')
|
|
44
40
|
return json.JSONEncoder.default(self, obj)
|
|
45
41
|
|
|
46
42
|
|
|
@@ -135,9 +131,8 @@ def load_pandas(
|
|
|
135
131
|
Tuple[pd.Series[int], List[pd.Series[Any]]
|
|
136
132
|
|
|
137
133
|
'''
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
134
|
+
import numpy as np
|
|
135
|
+
import pandas as pd
|
|
141
136
|
row_ids, cols = _load_vectors(colspec, data)
|
|
142
137
|
index = pd.Series(row_ids, dtype=np.longlong)
|
|
143
138
|
return index, \
|
|
@@ -172,9 +167,7 @@ def load_polars(
|
|
|
172
167
|
Tuple[polars.Series[int], List[polars.Series[Any]]
|
|
173
168
|
|
|
174
169
|
'''
|
|
175
|
-
|
|
176
|
-
raise RuntimeError('This operation requires polars and numpy to be installed')
|
|
177
|
-
|
|
170
|
+
import polars as pl
|
|
178
171
|
row_ids, cols = _load_vectors(colspec, data)
|
|
179
172
|
return pl.Series(None, row_ids, dtype=pl.Int64), \
|
|
180
173
|
[
|
|
@@ -205,9 +198,7 @@ def load_numpy(
|
|
|
205
198
|
Tuple[np.ndarray[int], List[np.ndarray[Any]]
|
|
206
199
|
|
|
207
200
|
'''
|
|
208
|
-
|
|
209
|
-
raise RuntimeError('This operation requires numpy to be installed')
|
|
210
|
-
|
|
201
|
+
import numpy as np
|
|
211
202
|
row_ids, cols = _load_vectors(colspec, data)
|
|
212
203
|
return np.asarray(row_ids, dtype=np.longlong), \
|
|
213
204
|
[
|
|
@@ -238,9 +229,7 @@ def load_arrow(
|
|
|
238
229
|
Tuple[pyarrow.Array[int], List[pyarrow.Array[Any]]
|
|
239
230
|
|
|
240
231
|
'''
|
|
241
|
-
|
|
242
|
-
raise RuntimeError('This operation requires pyarrow and numpy to be installed')
|
|
243
|
-
|
|
232
|
+
import pyarrow as pa
|
|
244
233
|
row_ids, cols = _load_vectors(colspec, data)
|
|
245
234
|
return pa.array(row_ids, type=pa.int64()), \
|
|
246
235
|
[
|
|
@@ -313,6 +302,10 @@ def _dump_vectors(
|
|
|
313
302
|
return json.dumps(dict(data=data), cls=JSONEncoder).encode('utf-8')
|
|
314
303
|
|
|
315
304
|
|
|
305
|
+
load_list = _load_vectors
|
|
306
|
+
dump_list = _dump_vectors
|
|
307
|
+
|
|
308
|
+
|
|
316
309
|
def dump_pandas(
|
|
317
310
|
returns: List[int],
|
|
318
311
|
row_ids: 'pd.Series[int]',
|
|
@@ -338,7 +338,7 @@ def main(argv: Optional[List[str]] = None) -> None:
|
|
|
338
338
|
app_mode='collocated',
|
|
339
339
|
)
|
|
340
340
|
|
|
341
|
-
funcs = app.
|
|
341
|
+
funcs = app.get_create_functions(replace=args.replace_existing)
|
|
342
342
|
if not funcs:
|
|
343
343
|
raise RuntimeError('no functions specified')
|
|
344
344
|
|
|
@@ -7,40 +7,37 @@ from typing import List
|
|
|
7
7
|
from typing import Optional
|
|
8
8
|
from typing import Sequence
|
|
9
9
|
from typing import Tuple
|
|
10
|
+
from typing import TYPE_CHECKING
|
|
10
11
|
|
|
11
12
|
from ...config import get_option
|
|
13
|
+
from ...mysql.constants import FIELD_TYPE as ft
|
|
12
14
|
from ..dtypes import DEFAULT_VALUES
|
|
13
15
|
from ..dtypes import NUMPY_TYPE_MAP
|
|
14
16
|
from ..dtypes import PANDAS_TYPE_MAP
|
|
15
17
|
from ..dtypes import POLARS_TYPE_MAP
|
|
16
18
|
from ..dtypes import PYARROW_TYPE_MAP
|
|
17
19
|
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
except ImportError:
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
except ImportError:
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
has_pyarrow = True
|
|
40
|
-
except ImportError:
|
|
41
|
-
has_pyarrow = False
|
|
42
|
-
|
|
43
|
-
from ...mysql.constants import FIELD_TYPE as ft
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
try:
|
|
22
|
+
import numpy as np
|
|
23
|
+
except ImportError:
|
|
24
|
+
pass
|
|
25
|
+
try:
|
|
26
|
+
import polars as pl
|
|
27
|
+
except ImportError:
|
|
28
|
+
pass
|
|
29
|
+
try:
|
|
30
|
+
import pandas as pd
|
|
31
|
+
except ImportError:
|
|
32
|
+
pass
|
|
33
|
+
try:
|
|
34
|
+
import pyarrow as pa
|
|
35
|
+
except ImportError:
|
|
36
|
+
pass
|
|
37
|
+
try:
|
|
38
|
+
import pyarrow.compute as pc # noqa: F401
|
|
39
|
+
except ImportError:
|
|
40
|
+
pass
|
|
44
41
|
|
|
45
42
|
has_accel = False
|
|
46
43
|
try:
|
|
@@ -208,8 +205,8 @@ def _load_pandas(
|
|
|
208
205
|
Tuple[pd.Series[int], List[Tuple[pd.Series[Any], pd.Series[bool]]]]
|
|
209
206
|
|
|
210
207
|
'''
|
|
211
|
-
|
|
212
|
-
|
|
208
|
+
import numpy as np
|
|
209
|
+
import pandas as pd
|
|
213
210
|
|
|
214
211
|
row_ids, cols = _load_vectors(colspec, data)
|
|
215
212
|
index = pd.Series(row_ids)
|
|
@@ -244,8 +241,7 @@ def _load_polars(
|
|
|
244
241
|
Tuple[polars.Series[int], List[polars.Series[Any]]]
|
|
245
242
|
|
|
246
243
|
'''
|
|
247
|
-
|
|
248
|
-
raise RuntimeError('polars must be installed for this operation')
|
|
244
|
+
import polars as pl
|
|
249
245
|
|
|
250
246
|
row_ids, cols = _load_vectors(colspec, data)
|
|
251
247
|
return pl.Series(None, row_ids, dtype=pl.Int64), \
|
|
@@ -280,8 +276,7 @@ def _load_numpy(
|
|
|
280
276
|
Tuple[np.ndarray[int], List[np.ndarray[Any]]]
|
|
281
277
|
|
|
282
278
|
'''
|
|
283
|
-
|
|
284
|
-
raise RuntimeError('numpy must be installed for this operation')
|
|
279
|
+
import numpy as np
|
|
285
280
|
|
|
286
281
|
row_ids, cols = _load_vectors(colspec, data)
|
|
287
282
|
return np.asarray(row_ids, dtype=np.int64), \
|
|
@@ -298,8 +293,8 @@ def _load_arrow(
|
|
|
298
293
|
colspec: List[Tuple[str, int]],
|
|
299
294
|
data: bytes,
|
|
300
295
|
) -> Tuple[
|
|
301
|
-
'pa.Array[pa.int64
|
|
302
|
-
List[Tuple['pa.Array[Any]', 'pa.Array[pa.bool_
|
|
296
|
+
'pa.Array[pa.int64]',
|
|
297
|
+
List[Tuple['pa.Array[Any]', 'pa.Array[pa.bool_]']],
|
|
303
298
|
]:
|
|
304
299
|
'''
|
|
305
300
|
Convert bytes in rowdat_1 format into rows of data.
|
|
@@ -316,8 +311,7 @@ def _load_arrow(
|
|
|
316
311
|
Tuple[pyarrow.Array[int], List[pyarrow.Array[Any]]]
|
|
317
312
|
|
|
318
313
|
'''
|
|
319
|
-
|
|
320
|
-
raise RuntimeError('pyarrow must be installed for this operation')
|
|
314
|
+
import pyarrow as pa
|
|
321
315
|
|
|
322
316
|
row_ids, cols = _load_vectors(colspec, data)
|
|
323
317
|
return pa.array(row_ids, type=pa.int64()), \
|
|
@@ -488,9 +482,6 @@ def _dump_arrow(
|
|
|
488
482
|
row_ids: 'pa.Array[int]',
|
|
489
483
|
cols: List[Tuple['pa.Array[Any]', 'pa.Array[bool]']],
|
|
490
484
|
) -> bytes:
|
|
491
|
-
if not has_pyarrow:
|
|
492
|
-
raise RuntimeError('pyarrow must be installed for this operation')
|
|
493
|
-
|
|
494
485
|
return _dump_vectors(
|
|
495
486
|
returns,
|
|
496
487
|
row_ids.tolist(),
|
|
@@ -503,9 +494,6 @@ def _dump_numpy(
|
|
|
503
494
|
row_ids: 'np.typing.NDArray[np.int64]',
|
|
504
495
|
cols: List[Tuple['np.typing.NDArray[Any]', 'np.typing.NDArray[np.bool_]']],
|
|
505
496
|
) -> bytes:
|
|
506
|
-
if not has_numpy:
|
|
507
|
-
raise RuntimeError('numpy must be installed for this operation')
|
|
508
|
-
|
|
509
497
|
return _dump_vectors(
|
|
510
498
|
returns,
|
|
511
499
|
row_ids.tolist(),
|
|
@@ -518,9 +506,6 @@ def _dump_pandas(
|
|
|
518
506
|
row_ids: 'pd.Series[np.int64]',
|
|
519
507
|
cols: List[Tuple['pd.Series[Any]', 'pd.Series[np.bool_]']],
|
|
520
508
|
) -> bytes:
|
|
521
|
-
if not has_pandas or not has_numpy:
|
|
522
|
-
raise RuntimeError('pandas must be installed for this operation')
|
|
523
|
-
|
|
524
509
|
return _dump_vectors(
|
|
525
510
|
returns,
|
|
526
511
|
row_ids.to_list(),
|
|
@@ -533,9 +518,6 @@ def _dump_polars(
|
|
|
533
518
|
row_ids: 'pl.Series[pl.Int64]',
|
|
534
519
|
cols: List[Tuple['pl.Series[Any]', 'pl.Series[pl.Boolean]']],
|
|
535
520
|
) -> bytes:
|
|
536
|
-
if not has_polars:
|
|
537
|
-
raise RuntimeError('polars must be installed for this operation')
|
|
538
|
-
|
|
539
521
|
return _dump_vectors(
|
|
540
522
|
returns,
|
|
541
523
|
row_ids.to_list(),
|
|
@@ -550,8 +532,6 @@ def _load_numpy_accel(
|
|
|
550
532
|
'np.typing.NDArray[np.int64]',
|
|
551
533
|
List[Tuple['np.typing.NDArray[Any]', 'np.typing.NDArray[np.bool_]']],
|
|
552
534
|
]:
|
|
553
|
-
if not has_numpy:
|
|
554
|
-
raise RuntimeError('numpy must be installed for this operation')
|
|
555
535
|
if not has_accel:
|
|
556
536
|
raise RuntimeError('could not load SingleStoreDB extension')
|
|
557
537
|
|
|
@@ -563,8 +543,6 @@ def _dump_numpy_accel(
|
|
|
563
543
|
row_ids: 'np.typing.NDArray[np.int64]',
|
|
564
544
|
cols: List[Tuple['np.typing.NDArray[Any]', 'np.typing.NDArray[np.bool_]']],
|
|
565
545
|
) -> bytes:
|
|
566
|
-
if not has_numpy:
|
|
567
|
-
raise RuntimeError('numpy must be installed for this operation')
|
|
568
546
|
if not has_accel:
|
|
569
547
|
raise RuntimeError('could not load SingleStoreDB extension')
|
|
570
548
|
|
|
@@ -578,11 +556,12 @@ def _load_pandas_accel(
|
|
|
578
556
|
'pd.Series[np.int64]',
|
|
579
557
|
List[Tuple['pd.Series[Any]', 'pd.Series[np.bool_]']],
|
|
580
558
|
]:
|
|
581
|
-
if not has_pandas or not has_numpy:
|
|
582
|
-
raise RuntimeError('pandas must be installed for this operation')
|
|
583
559
|
if not has_accel:
|
|
584
560
|
raise RuntimeError('could not load SingleStoreDB extension')
|
|
585
561
|
|
|
562
|
+
import numpy as np
|
|
563
|
+
import pandas as pd
|
|
564
|
+
|
|
586
565
|
numpy_ids, numpy_cols = _singlestoredb_accel.load_rowdat_1_numpy(colspec, data)
|
|
587
566
|
cols = [
|
|
588
567
|
(
|
|
@@ -599,8 +578,6 @@ def _dump_pandas_accel(
|
|
|
599
578
|
row_ids: 'pd.Series[np.int64]',
|
|
600
579
|
cols: List[Tuple['pd.Series[Any]', 'pd.Series[np.bool_]']],
|
|
601
580
|
) -> bytes:
|
|
602
|
-
if not has_pandas or not has_numpy:
|
|
603
|
-
raise RuntimeError('pandas must be installed for this operation')
|
|
604
581
|
if not has_accel:
|
|
605
582
|
raise RuntimeError('could not load SingleStoreDB extension')
|
|
606
583
|
|
|
@@ -622,11 +599,11 @@ def _load_polars_accel(
|
|
|
622
599
|
'pl.Series[pl.Int64]',
|
|
623
600
|
List[Tuple['pl.Series[Any]', 'pl.Series[pl.Boolean]']],
|
|
624
601
|
]:
|
|
625
|
-
if not has_polars:
|
|
626
|
-
raise RuntimeError('polars must be installed for this operation')
|
|
627
602
|
if not has_accel:
|
|
628
603
|
raise RuntimeError('could not load SingleStoreDB extension')
|
|
629
604
|
|
|
605
|
+
import polars as pl
|
|
606
|
+
|
|
630
607
|
numpy_ids, numpy_cols = _singlestoredb_accel.load_rowdat_1_numpy(colspec, data)
|
|
631
608
|
cols = [
|
|
632
609
|
(
|
|
@@ -647,8 +624,6 @@ def _dump_polars_accel(
|
|
|
647
624
|
row_ids: 'pl.Series[pl.Int64]',
|
|
648
625
|
cols: List[Tuple['pl.Series[Any]', 'pl.Series[pl.Boolean]']],
|
|
649
626
|
) -> bytes:
|
|
650
|
-
if not has_polars:
|
|
651
|
-
raise RuntimeError('polars must be installed for this operation')
|
|
652
627
|
if not has_accel:
|
|
653
628
|
raise RuntimeError('could not load SingleStoreDB extension')
|
|
654
629
|
|
|
@@ -667,14 +642,14 @@ def _load_arrow_accel(
|
|
|
667
642
|
colspec: List[Tuple[str, int]],
|
|
668
643
|
data: bytes,
|
|
669
644
|
) -> Tuple[
|
|
670
|
-
'pa.Array[pa.int64
|
|
671
|
-
List[Tuple['pa.Array[Any]', 'pa.Array[pa.bool_
|
|
645
|
+
'pa.Array[pa.int64]',
|
|
646
|
+
List[Tuple['pa.Array[Any]', 'pa.Array[pa.bool_]']],
|
|
672
647
|
]:
|
|
673
|
-
if not has_pyarrow:
|
|
674
|
-
raise RuntimeError('pyarrow must be installed for this operation')
|
|
675
648
|
if not has_accel:
|
|
676
649
|
raise RuntimeError('could not load SingleStoreDB extension')
|
|
677
650
|
|
|
651
|
+
import pyarrow as pa
|
|
652
|
+
|
|
678
653
|
numpy_ids, numpy_cols = _singlestoredb_accel.load_rowdat_1_numpy(colspec, data)
|
|
679
654
|
cols = [
|
|
680
655
|
(
|
|
@@ -688,20 +663,21 @@ def _load_arrow_accel(
|
|
|
688
663
|
|
|
689
664
|
def _create_arrow_mask(
|
|
690
665
|
data: 'pa.Array[Any]',
|
|
691
|
-
mask: 'pa.Array[pa.bool_
|
|
692
|
-
) -> 'pa.Array[pa.bool_
|
|
666
|
+
mask: 'pa.Array[pa.bool_]',
|
|
667
|
+
) -> 'pa.Array[pa.bool_]':
|
|
668
|
+
import pyarrow.compute as pc # noqa: F811
|
|
669
|
+
|
|
693
670
|
if mask is None:
|
|
694
671
|
return data.is_null().to_numpy(zero_copy_only=False)
|
|
672
|
+
|
|
695
673
|
return pc.or_(data.is_null(), mask.is_null()).to_numpy(zero_copy_only=False)
|
|
696
674
|
|
|
697
675
|
|
|
698
676
|
def _dump_arrow_accel(
|
|
699
677
|
returns: List[int],
|
|
700
|
-
row_ids: 'pa.Array[pa.int64
|
|
701
|
-
cols: List[Tuple['pa.Array[Any]', 'pa.Array[pa.bool_
|
|
678
|
+
row_ids: 'pa.Array[pa.int64]',
|
|
679
|
+
cols: List[Tuple['pa.Array[Any]', 'pa.Array[pa.bool_]']],
|
|
702
680
|
) -> bytes:
|
|
703
|
-
if not has_pyarrow:
|
|
704
|
-
raise RuntimeError('pyarrow must be installed for this operation')
|
|
705
681
|
if not has_accel:
|
|
706
682
|
raise RuntimeError('could not load SingleStoreDB extension')
|
|
707
683
|
|
|
@@ -720,6 +696,8 @@ def _dump_arrow_accel(
|
|
|
720
696
|
if not has_accel:
|
|
721
697
|
load = _load_accel = _load
|
|
722
698
|
dump = _dump_accel = _dump
|
|
699
|
+
load_list = _load_vectors # noqa: F811
|
|
700
|
+
dump_list = _dump_vectors # noqa: F811
|
|
723
701
|
load_pandas = _load_pandas_accel = _load_pandas # noqa: F811
|
|
724
702
|
dump_pandas = _dump_pandas_accel = _dump_pandas # noqa: F811
|
|
725
703
|
load_numpy = _load_numpy_accel = _load_numpy # noqa: F811
|
|
@@ -734,6 +712,8 @@ else:
|
|
|
734
712
|
_dump_accel = _singlestoredb_accel.dump_rowdat_1
|
|
735
713
|
load = _load_accel
|
|
736
714
|
dump = _dump_accel
|
|
715
|
+
load_list = _load_vectors
|
|
716
|
+
dump_list = _dump_vectors
|
|
737
717
|
load_pandas = _load_pandas_accel
|
|
738
718
|
dump_pandas = _dump_pandas_accel
|
|
739
719
|
load_numpy = _load_numpy_accel
|