singlestoredb 1.0.4__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of singlestoredb might be problematic. Click here for more details.
- singlestoredb/__init__.py +1 -1
- singlestoredb/config.py +131 -0
- singlestoredb/connection.py +3 -0
- singlestoredb/converters.py +390 -0
- singlestoredb/functions/dtypes.py +5 -198
- singlestoredb/functions/ext/__init__.py +0 -1
- singlestoredb/functions/ext/asgi.py +671 -153
- singlestoredb/functions/ext/json.py +2 -2
- singlestoredb/functions/ext/mmap.py +174 -67
- singlestoredb/functions/ext/rowdat_1.py +2 -2
- singlestoredb/functions/ext/utils.py +169 -0
- singlestoredb/fusion/handler.py +115 -9
- singlestoredb/fusion/handlers/stage.py +246 -13
- singlestoredb/fusion/handlers/workspace.py +417 -14
- singlestoredb/fusion/registry.py +86 -1
- singlestoredb/http/connection.py +40 -2
- singlestoredb/management/__init__.py +1 -0
- singlestoredb/management/organization.py +4 -0
- singlestoredb/management/utils.py +2 -2
- singlestoredb/management/workspace.py +79 -6
- singlestoredb/mysql/connection.py +81 -0
- singlestoredb/mysql/constants/EXTENDED_TYPE.py +3 -0
- singlestoredb/mysql/constants/FIELD_TYPE.py +16 -0
- singlestoredb/mysql/constants/VECTOR_TYPE.py +6 -0
- singlestoredb/mysql/cursors.py +177 -4
- singlestoredb/mysql/protocol.py +50 -1
- singlestoredb/notebook/__init__.py +15 -0
- singlestoredb/notebook/_objects.py +212 -0
- singlestoredb/tests/test.sql +259 -0
- singlestoredb/tests/test_connection.py +1715 -133
- singlestoredb/tests/test_ext_func.py +2 -2
- singlestoredb/tests/test_ext_func_data.py +1 -1
- singlestoredb/utils/dtypes.py +205 -0
- singlestoredb/utils/results.py +367 -14
- {singlestoredb-1.0.4.dist-info → singlestoredb-1.2.0.dist-info}/METADATA +2 -1
- {singlestoredb-1.0.4.dist-info → singlestoredb-1.2.0.dist-info}/RECORD +40 -34
- {singlestoredb-1.0.4.dist-info → singlestoredb-1.2.0.dist-info}/LICENSE +0 -0
- {singlestoredb-1.0.4.dist-info → singlestoredb-1.2.0.dist-info}/WHEEL +0 -0
- {singlestoredb-1.0.4.dist-info → singlestoredb-1.2.0.dist-info}/entry_points.txt +0 -0
- {singlestoredb-1.0.4.dist-info → singlestoredb-1.2.0.dist-info}/top_level.txt +0 -0
singlestoredb/http/connection.py
CHANGED
|
@@ -62,6 +62,7 @@ from ..utils.debug import log_query
|
|
|
62
62
|
from ..utils.mogrify import mogrify
|
|
63
63
|
from ..utils.results import Description
|
|
64
64
|
from ..utils.results import format_results
|
|
65
|
+
from ..utils.results import get_schema
|
|
65
66
|
from ..utils.results import Result
|
|
66
67
|
|
|
67
68
|
|
|
@@ -333,6 +334,7 @@ class Cursor(connection.Cursor):
|
|
|
333
334
|
self._row_idx: int = -1
|
|
334
335
|
self._result_idx: int = -1
|
|
335
336
|
self._descriptions: List[List[Description]] = []
|
|
337
|
+
self._schemas: List[Dict[str, Any]] = []
|
|
336
338
|
self.arraysize: int = get_option('results.arraysize')
|
|
337
339
|
self.rowcount: int = 0
|
|
338
340
|
self.lastrowid: Optional[int] = None
|
|
@@ -355,6 +357,14 @@ class Cursor(connection.Cursor):
|
|
|
355
357
|
return self._descriptions[self._result_idx]
|
|
356
358
|
return None
|
|
357
359
|
|
|
360
|
+
@property
|
|
361
|
+
def _schema(self) -> Optional[Any]:
|
|
362
|
+
if not self._schemas:
|
|
363
|
+
return None
|
|
364
|
+
if self._result_idx >= 0 and self._result_idx < len(self._schemas):
|
|
365
|
+
return self._schemas[self._result_idx]
|
|
366
|
+
return None
|
|
367
|
+
|
|
358
368
|
def _post(self, path: str, *args: Any, **kwargs: Any) -> requests.Response:
|
|
359
369
|
"""
|
|
360
370
|
Invoke a POST request on the HTTP connection.
|
|
@@ -460,6 +470,7 @@ class Cursor(connection.Cursor):
|
|
|
460
470
|
self._results_type = results_type
|
|
461
471
|
|
|
462
472
|
self._descriptions.append(list(mgmt_res.description))
|
|
473
|
+
self._schemas.append(get_schema(self._results_type, list(mgmt_res.description)))
|
|
463
474
|
self._results.append(list(mgmt_res.rows))
|
|
464
475
|
self.rowcount = len(self._results[-1])
|
|
465
476
|
|
|
@@ -487,6 +498,7 @@ class Cursor(connection.Cursor):
|
|
|
487
498
|
is_callproc: bool = False,
|
|
488
499
|
) -> int:
|
|
489
500
|
self._descriptions = []
|
|
501
|
+
self._schemas = []
|
|
490
502
|
self._results = []
|
|
491
503
|
self._pymy_results = []
|
|
492
504
|
self._row_idx = -1
|
|
@@ -571,6 +583,20 @@ class Cursor(connection.Cursor):
|
|
|
571
583
|
if isinstance(k, int):
|
|
572
584
|
http_converters[k] = v
|
|
573
585
|
|
|
586
|
+
# Make JSON a string for Arrow
|
|
587
|
+
if 'arrow' in self._results_type:
|
|
588
|
+
def json_to_str(x: Any) -> Optional[str]:
|
|
589
|
+
if x is None:
|
|
590
|
+
return None
|
|
591
|
+
return json.dumps(x)
|
|
592
|
+
http_converters[245] = json_to_str
|
|
593
|
+
|
|
594
|
+
# Don't convert date/times in polars
|
|
595
|
+
elif 'polars' in self._results_type:
|
|
596
|
+
http_converters.pop(7, None)
|
|
597
|
+
http_converters.pop(10, None)
|
|
598
|
+
http_converters.pop(12, None)
|
|
599
|
+
|
|
574
600
|
results = out['results']
|
|
575
601
|
|
|
576
602
|
# Convert data to Python types
|
|
@@ -616,6 +642,7 @@ class Cursor(connection.Cursor):
|
|
|
616
642
|
)
|
|
617
643
|
pymy_res.append(PyMyField(col['name'], flags, charset))
|
|
618
644
|
self._descriptions.append(description)
|
|
645
|
+
self._schemas.append(get_schema(self._results_type, description))
|
|
619
646
|
|
|
620
647
|
rows = convert_rows(result.get('rows', []), convs)
|
|
621
648
|
|
|
@@ -659,6 +686,7 @@ class Cursor(connection.Cursor):
|
|
|
659
686
|
rowcount = 0
|
|
660
687
|
if args is not None and len(args) > 0:
|
|
661
688
|
description = []
|
|
689
|
+
schema = {}
|
|
662
690
|
# Detect dataframes
|
|
663
691
|
if hasattr(args, 'itertuples'):
|
|
664
692
|
argiter = args.itertuples(index=False) # type: ignore
|
|
@@ -668,11 +696,14 @@ class Cursor(connection.Cursor):
|
|
|
668
696
|
self.execute(query, params)
|
|
669
697
|
if self._descriptions:
|
|
670
698
|
description = self._descriptions[-1]
|
|
699
|
+
if self._schemas:
|
|
700
|
+
schema = self._schemas[-1]
|
|
671
701
|
if self._rows is not None:
|
|
672
702
|
results.append(self._rows)
|
|
673
703
|
rowcount += self.rowcount
|
|
674
704
|
self._results = results
|
|
675
705
|
self._descriptions = [description for _ in range(len(results))]
|
|
706
|
+
self._schemas = [schema for _ in range(len(results))]
|
|
676
707
|
else:
|
|
677
708
|
self.execute(query)
|
|
678
709
|
rowcount += self.rowcount
|
|
@@ -721,6 +752,7 @@ class Cursor(connection.Cursor):
|
|
|
721
752
|
self._results_type,
|
|
722
753
|
self.description or [],
|
|
723
754
|
out, single=True,
|
|
755
|
+
schema=self._schema,
|
|
724
756
|
)
|
|
725
757
|
|
|
726
758
|
def fetchmany(
|
|
@@ -752,7 +784,10 @@ class Cursor(connection.Cursor):
|
|
|
752
784
|
size = max(int(size), 1)
|
|
753
785
|
out = self._rows[self._row_idx:self._row_idx+size]
|
|
754
786
|
self._row_idx += len(out)
|
|
755
|
-
return format_results(
|
|
787
|
+
return format_results(
|
|
788
|
+
self._results_type, self.description or [],
|
|
789
|
+
out, schema=self._schema,
|
|
790
|
+
)
|
|
756
791
|
|
|
757
792
|
def fetchall(self) -> Result:
|
|
758
793
|
"""
|
|
@@ -774,7 +809,10 @@ class Cursor(connection.Cursor):
|
|
|
774
809
|
return tuple()
|
|
775
810
|
out = list(self._rows[self._row_idx:])
|
|
776
811
|
self._row_idx = len(out)
|
|
777
|
-
return format_results(
|
|
812
|
+
return format_results(
|
|
813
|
+
self._results_type, self.description or [],
|
|
814
|
+
out, schema=self._schema,
|
|
815
|
+
)
|
|
778
816
|
|
|
779
817
|
def nextset(self) -> Optional[bool]:
|
|
780
818
|
"""Skip to the next available result set."""
|
|
@@ -121,6 +121,10 @@ class Organization(object):
|
|
|
121
121
|
|
|
122
122
|
"""
|
|
123
123
|
|
|
124
|
+
id: str
|
|
125
|
+
name: str
|
|
126
|
+
firewall_ranges: List[str]
|
|
127
|
+
|
|
124
128
|
def __init__(self, id: str, name: str, firewall_ranges: List[str]):
|
|
125
129
|
"""Use :attr:`WorkspaceManager.organization` instead."""
|
|
126
130
|
#: Unique ID of the organization
|
|
@@ -294,7 +294,7 @@ def snake_to_camel_dict(
|
|
|
294
294
|
return None
|
|
295
295
|
out = {}
|
|
296
296
|
for k, v in s.items():
|
|
297
|
-
if isinstance(
|
|
297
|
+
if isinstance(v, Mapping):
|
|
298
298
|
out[str(snake_to_camel(k))] = snake_to_camel_dict(v, cap_first=cap_first)
|
|
299
299
|
else:
|
|
300
300
|
out[str(snake_to_camel(k))] = v
|
|
@@ -307,7 +307,7 @@ def camel_to_snake_dict(s: Optional[Mapping[str, Any]]) -> Optional[Dict[str, An
|
|
|
307
307
|
return None
|
|
308
308
|
out = {}
|
|
309
309
|
for k, v in s.items():
|
|
310
|
-
if isinstance(
|
|
310
|
+
if isinstance(v, Mapping):
|
|
311
311
|
out[str(camel_to_snake(k))] = camel_to_snake_dict(v)
|
|
312
312
|
else:
|
|
313
313
|
out[str(camel_to_snake(k))] = v
|
|
@@ -34,9 +34,53 @@ from .utils import ttl_property
|
|
|
34
34
|
from .utils import vars_to_str
|
|
35
35
|
|
|
36
36
|
|
|
37
|
+
def get_organization() -> Organization:
|
|
38
|
+
"""Get the organization."""
|
|
39
|
+
return manage_workspaces().organization
|
|
40
|
+
|
|
41
|
+
|
|
37
42
|
def get_secret(name: str) -> str:
|
|
38
43
|
"""Get a secret from the organization."""
|
|
39
|
-
return
|
|
44
|
+
return get_organization().get_secret(name).value
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def get_workspace_group(
|
|
48
|
+
workspace_group: Optional[Union[WorkspaceGroup, str]] = None,
|
|
49
|
+
) -> WorkspaceGroup:
|
|
50
|
+
"""Get the stage for the workspace group."""
|
|
51
|
+
if isinstance(workspace_group, WorkspaceGroup):
|
|
52
|
+
return workspace_group
|
|
53
|
+
elif workspace_group:
|
|
54
|
+
return manage_workspaces().workspace_groups[workspace_group]
|
|
55
|
+
elif 'SINGLESTOREDB_WORKSPACE_GROUP' in os.environ:
|
|
56
|
+
return manage_workspaces().workspace_groups[
|
|
57
|
+
os.environ['SINGLESTOREDB_WORKSPACE_GROUP']
|
|
58
|
+
]
|
|
59
|
+
raise RuntimeError('no workspace group specified')
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def get_stage(
|
|
63
|
+
workspace_group: Optional[Union[WorkspaceGroup, str]] = None,
|
|
64
|
+
) -> Stage:
|
|
65
|
+
"""Get the stage for the workspace group."""
|
|
66
|
+
return get_workspace_group(workspace_group).stage
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def get_workspace(
|
|
70
|
+
workspace_group: Optional[Union[WorkspaceGroup, str]] = None,
|
|
71
|
+
workspace: Optional[Union[Workspace, str]] = None,
|
|
72
|
+
) -> Workspace:
|
|
73
|
+
"""Get the workspaces for a workspace_group."""
|
|
74
|
+
if isinstance(workspace, Workspace):
|
|
75
|
+
return workspace
|
|
76
|
+
wg = get_workspace_group(workspace_group)
|
|
77
|
+
if workspace:
|
|
78
|
+
return wg.workspaces[workspace]
|
|
79
|
+
elif 'SINGLESTOREDB_WORKSPACE' in os.environ:
|
|
80
|
+
return wg.workspaces[
|
|
81
|
+
os.environ['SINGLESTOREDB_WORKSPACE']
|
|
82
|
+
]
|
|
83
|
+
raise RuntimeError('no workspace group specified')
|
|
40
84
|
|
|
41
85
|
|
|
42
86
|
class StageObject(object):
|
|
@@ -926,6 +970,21 @@ class Workspace(object):
|
|
|
926
970
|
|
|
927
971
|
"""
|
|
928
972
|
|
|
973
|
+
name: str
|
|
974
|
+
id: str
|
|
975
|
+
group_id: str
|
|
976
|
+
size: str
|
|
977
|
+
state: str
|
|
978
|
+
created_at: Optional[datetime.datetime]
|
|
979
|
+
terminated_at: Optional[datetime.datetime]
|
|
980
|
+
endpoint: Optional[str]
|
|
981
|
+
auto_suspend: Optional[Dict[str, Any]]
|
|
982
|
+
cache_config: Optional[int]
|
|
983
|
+
deployment_type: Optional[str]
|
|
984
|
+
resume_attachments: Optional[List[Dict[str, Any]]]
|
|
985
|
+
scaling_progress: Optional[int]
|
|
986
|
+
last_resumed_at: Optional[datetime.datetime]
|
|
987
|
+
|
|
929
988
|
def __init__(
|
|
930
989
|
self,
|
|
931
990
|
name: str,
|
|
@@ -939,9 +998,9 @@ class Workspace(object):
|
|
|
939
998
|
auto_suspend: Optional[Dict[str, Any]] = None,
|
|
940
999
|
cache_config: Optional[int] = None,
|
|
941
1000
|
deployment_type: Optional[str] = None,
|
|
942
|
-
resume_attachments: Optional[Dict[str, Any]] = None,
|
|
1001
|
+
resume_attachments: Optional[List[Dict[str, Any]]] = None,
|
|
943
1002
|
scaling_progress: Optional[int] = None,
|
|
944
|
-
last_resumed_at: Optional[str] = None,
|
|
1003
|
+
last_resumed_at: Optional[Union[str, datetime.datetime]] = None,
|
|
945
1004
|
):
|
|
946
1005
|
#: Name of the workspace
|
|
947
1006
|
self.name = name
|
|
@@ -981,7 +1040,11 @@ class Workspace(object):
|
|
|
981
1040
|
self.deployment_type = deployment_type
|
|
982
1041
|
|
|
983
1042
|
#: Database attachments
|
|
984
|
-
self.resume_attachments =
|
|
1043
|
+
self.resume_attachments = [
|
|
1044
|
+
camel_to_snake_dict(x) # type: ignore
|
|
1045
|
+
for x in resume_attachments or []
|
|
1046
|
+
if x is not None
|
|
1047
|
+
]
|
|
985
1048
|
|
|
986
1049
|
#: Current progress percentage for scaling the workspace
|
|
987
1050
|
self.scaling_progress = scaling_progress
|
|
@@ -1248,8 +1311,18 @@ class WorkspaceGroup(object):
|
|
|
1248
1311
|
|
|
1249
1312
|
"""
|
|
1250
1313
|
|
|
1314
|
+
name: str
|
|
1315
|
+
id: str
|
|
1316
|
+
created_at: Optional[datetime.datetime]
|
|
1317
|
+
region: Optional[Region]
|
|
1318
|
+
firewall_ranges: List[str]
|
|
1319
|
+
terminated_at: Optional[datetime.datetime]
|
|
1320
|
+
allow_all_traffic: bool
|
|
1321
|
+
|
|
1251
1322
|
def __init__(
|
|
1252
|
-
self,
|
|
1323
|
+
self,
|
|
1324
|
+
name: str,
|
|
1325
|
+
id: str,
|
|
1253
1326
|
created_at: Union[str, datetime.datetime],
|
|
1254
1327
|
region: Optional[Region],
|
|
1255
1328
|
firewall_ranges: List[str],
|
|
@@ -1275,7 +1348,7 @@ class WorkspaceGroup(object):
|
|
|
1275
1348
|
self.terminated_at = to_datetime(terminated_at)
|
|
1276
1349
|
|
|
1277
1350
|
#: Should all traffic be allowed?
|
|
1278
|
-
self.allow_all_traffic = allow_all_traffic
|
|
1351
|
+
self.allow_all_traffic = allow_all_traffic or False
|
|
1279
1352
|
|
|
1280
1353
|
self._manager: Optional[WorkspaceManager] = None
|
|
1281
1354
|
|
|
@@ -29,12 +29,28 @@ from .cursors import (
|
|
|
29
29
|
DictCursorSV,
|
|
30
30
|
NamedtupleCursor,
|
|
31
31
|
NamedtupleCursorSV,
|
|
32
|
+
ArrowCursor,
|
|
33
|
+
ArrowCursorSV,
|
|
34
|
+
NumpyCursor,
|
|
35
|
+
NumpyCursorSV,
|
|
36
|
+
PandasCursor,
|
|
37
|
+
PandasCursorSV,
|
|
38
|
+
PolarsCursor,
|
|
39
|
+
PolarsCursorSV,
|
|
32
40
|
SSCursor,
|
|
33
41
|
SSCursorSV,
|
|
34
42
|
SSDictCursor,
|
|
35
43
|
SSDictCursorSV,
|
|
36
44
|
SSNamedtupleCursor,
|
|
37
45
|
SSNamedtupleCursorSV,
|
|
46
|
+
SSArrowCursor,
|
|
47
|
+
SSArrowCursorSV,
|
|
48
|
+
SSNumpyCursor,
|
|
49
|
+
SSNumpyCursorSV,
|
|
50
|
+
SSPandasCursor,
|
|
51
|
+
SSPandasCursorSV,
|
|
52
|
+
SSPolarsCursor,
|
|
53
|
+
SSPolarsCursorSV,
|
|
38
54
|
)
|
|
39
55
|
from .optionfile import Parser
|
|
40
56
|
from .protocol import (
|
|
@@ -243,6 +259,8 @@ class Connection(BaseConnection):
|
|
|
243
259
|
uploading data?
|
|
244
260
|
track_env : bool, optional
|
|
245
261
|
Should the connection track the SINGLESTOREDB_URL environment variable?
|
|
262
|
+
enable_extended_data_types : bool, optional
|
|
263
|
+
Should extended data types (BSON, vector) be enabled?
|
|
246
264
|
|
|
247
265
|
See `Connection <https://www.python.org/dev/peps/pep-0249/#connection-objects>`_
|
|
248
266
|
in the specification.
|
|
@@ -312,6 +330,7 @@ class Connection(BaseConnection):
|
|
|
312
330
|
inf_as_null=None,
|
|
313
331
|
encoding_errors='strict',
|
|
314
332
|
track_env=False,
|
|
333
|
+
enable_extended_data_types=True,
|
|
315
334
|
):
|
|
316
335
|
BaseConnection.__init__(**dict(locals()))
|
|
317
336
|
|
|
@@ -443,6 +462,14 @@ class Connection(BaseConnection):
|
|
|
443
462
|
self.cursorclass = DictCursor
|
|
444
463
|
elif 'namedtuple' in self.results_type:
|
|
445
464
|
self.cursorclass = NamedtupleCursor
|
|
465
|
+
elif 'numpy' in self.results_type:
|
|
466
|
+
self.cursorclass = NumpyCursor
|
|
467
|
+
elif 'arrow' in self.results_type:
|
|
468
|
+
self.cursorclass = ArrowCursor
|
|
469
|
+
elif 'pandas' in self.results_type:
|
|
470
|
+
self.cursorclass = PandasCursor
|
|
471
|
+
elif 'polars' in self.results_type:
|
|
472
|
+
self.cursorclass = PolarsCursor
|
|
446
473
|
else:
|
|
447
474
|
self.cursorclass = Cursor
|
|
448
475
|
else:
|
|
@@ -450,6 +477,14 @@ class Connection(BaseConnection):
|
|
|
450
477
|
self.cursorclass = SSDictCursor
|
|
451
478
|
elif 'namedtuple' in self.results_type:
|
|
452
479
|
self.cursorclass = SSNamedtupleCursor
|
|
480
|
+
elif 'numpy' in self.results_type:
|
|
481
|
+
self.cursorclass = SSNumpyCursor
|
|
482
|
+
elif 'arrow' in self.results_type:
|
|
483
|
+
self.cursorclass = SSArrowCursor
|
|
484
|
+
elif 'pandas' in self.results_type:
|
|
485
|
+
self.cursorclass = SSPandasCursor
|
|
486
|
+
elif 'polars' in self.results_type:
|
|
487
|
+
self.cursorclass = SSPolarsCursor
|
|
453
488
|
else:
|
|
454
489
|
self.cursorclass = SSCursor
|
|
455
490
|
|
|
@@ -487,6 +522,30 @@ class Connection(BaseConnection):
|
|
|
487
522
|
elif self.cursorclass is SSNamedtupleCursor:
|
|
488
523
|
self.cursorclass = SSNamedtupleCursorSV
|
|
489
524
|
self.results_type = 'namedtuples'
|
|
525
|
+
elif self.cursorclass is NumpyCursor:
|
|
526
|
+
self.cursorclass = NumpyCursorSV
|
|
527
|
+
self.results_type = 'numpy'
|
|
528
|
+
elif self.cursorclass is SSNumpyCursor:
|
|
529
|
+
self.cursorclass = SSNumpyCursorSV
|
|
530
|
+
self.results_type = 'numpy'
|
|
531
|
+
elif self.cursorclass is ArrowCursor:
|
|
532
|
+
self.cursorclass = ArrowCursorSV
|
|
533
|
+
self.results_type = 'arrow'
|
|
534
|
+
elif self.cursorclass is SSArrowCursor:
|
|
535
|
+
self.cursorclass = SSArrowCursorSV
|
|
536
|
+
self.results_type = 'arrow'
|
|
537
|
+
elif self.cursorclass is PandasCursor:
|
|
538
|
+
self.cursorclass = PandasCursorSV
|
|
539
|
+
self.results_type = 'pandas'
|
|
540
|
+
elif self.cursorclass is SSPandasCursor:
|
|
541
|
+
self.cursorclass = SSPandasCursorSV
|
|
542
|
+
self.results_type = 'pandas'
|
|
543
|
+
elif self.cursorclass is PolarsCursor:
|
|
544
|
+
self.cursorclass = PolarsCursorSV
|
|
545
|
+
self.results_type = 'polars'
|
|
546
|
+
elif self.cursorclass is SSPolarsCursor:
|
|
547
|
+
self.cursorclass = SSPolarsCursorSV
|
|
548
|
+
self.results_type = 'polars'
|
|
490
549
|
|
|
491
550
|
self._result = None
|
|
492
551
|
self._affected_rows = 0
|
|
@@ -498,9 +557,22 @@ class Connection(BaseConnection):
|
|
|
498
557
|
if conv is None:
|
|
499
558
|
conv = converters.conversions
|
|
500
559
|
|
|
560
|
+
conv = conv.copy()
|
|
561
|
+
|
|
501
562
|
self.parse_json = parse_json
|
|
502
563
|
self.invalid_values = (invalid_values or {}).copy()
|
|
503
564
|
|
|
565
|
+
# Disable JSON parsing for Arrow
|
|
566
|
+
if self.results_type in ['arrow']:
|
|
567
|
+
conv[245] = None
|
|
568
|
+
self.parse_json = False
|
|
569
|
+
|
|
570
|
+
# Disable date/time parsing for polars; let polars do the parsing
|
|
571
|
+
elif self.results_type in ['polars']:
|
|
572
|
+
conv[7] = None
|
|
573
|
+
conv[10] = None
|
|
574
|
+
conv[12] = None
|
|
575
|
+
|
|
504
576
|
# Need for MySQLdb compatibility.
|
|
505
577
|
self.encoders = {k: v for (k, v) in conv.items() if type(k) is not int}
|
|
506
578
|
self.decoders = {k: v for (k, v) in conv.items() if type(k) is int}
|
|
@@ -540,6 +612,7 @@ class Connection(BaseConnection):
|
|
|
540
612
|
|
|
541
613
|
self._in_sync = False
|
|
542
614
|
self._track_env = bool(track_env) or self.host == 'singlestore.com'
|
|
615
|
+
self._enable_extended_data_types = enable_extended_data_types
|
|
543
616
|
|
|
544
617
|
if defer_connect or self._track_env:
|
|
545
618
|
self._sock = None
|
|
@@ -1001,6 +1074,14 @@ class Connection(BaseConnection):
|
|
|
1001
1074
|
c.execute('SET sql_mode=%s', (self.sql_mode,))
|
|
1002
1075
|
c.close()
|
|
1003
1076
|
|
|
1077
|
+
if self._enable_extended_data_types:
|
|
1078
|
+
c = self.cursor()
|
|
1079
|
+
try:
|
|
1080
|
+
c.execute('SET @@SESSION.enable_extended_types_metadata=on')
|
|
1081
|
+
except self.OperationalError:
|
|
1082
|
+
pass
|
|
1083
|
+
c.close()
|
|
1084
|
+
|
|
1004
1085
|
if self.init_command is not None:
|
|
1005
1086
|
c = self.cursor()
|
|
1006
1087
|
c.execute(self.init_command)
|
|
@@ -30,3 +30,19 @@ GEOMETRY = 255
|
|
|
30
30
|
CHAR = TINY
|
|
31
31
|
INTERVAL = ENUM
|
|
32
32
|
BOOL = TINY
|
|
33
|
+
|
|
34
|
+
# SingleStoreDB-specific.
|
|
35
|
+
# Only enabled when enable_extended_types_metadata=1 in the server.
|
|
36
|
+
BSON = 1001
|
|
37
|
+
FLOAT32_VECTOR_JSON = 2001
|
|
38
|
+
FLOAT64_VECTOR_JSON = 2002
|
|
39
|
+
INT8_VECTOR_JSON = 2003
|
|
40
|
+
INT16_VECTOR_JSON = 2004
|
|
41
|
+
INT32_VECTOR_JSON = 2005
|
|
42
|
+
INT64_VECTOR_JSON = 2006
|
|
43
|
+
FLOAT32_VECTOR = 3001
|
|
44
|
+
FLOAT64_VECTOR = 3002
|
|
45
|
+
INT8_VECTOR = 3003
|
|
46
|
+
INT16_VECTOR = 3004
|
|
47
|
+
INT32_VECTOR = 3005
|
|
48
|
+
INT64_VECTOR = 3006
|