singlestoredb 1.1.0__cp38-abi3-win32.whl → 1.2.0__cp38-abi3-win32.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of singlestoredb might be problematic. Click here for more details.

Files changed (30) hide show
  1. _singlestoredb_accel.pyd +0 -0
  2. singlestoredb/__init__.py +1 -1
  3. singlestoredb/config.py +6 -0
  4. singlestoredb/connection.py +3 -0
  5. singlestoredb/converters.py +390 -0
  6. singlestoredb/functions/ext/asgi.py +7 -1
  7. singlestoredb/fusion/handler.py +14 -8
  8. singlestoredb/fusion/handlers/stage.py +167 -84
  9. singlestoredb/fusion/handlers/workspace.py +250 -108
  10. singlestoredb/fusion/registry.py +27 -10
  11. singlestoredb/management/__init__.py +1 -0
  12. singlestoredb/management/organization.py +4 -0
  13. singlestoredb/management/utils.py +2 -2
  14. singlestoredb/management/workspace.py +79 -6
  15. singlestoredb/mysql/connection.py +12 -0
  16. singlestoredb/mysql/constants/EXTENDED_TYPE.py +3 -0
  17. singlestoredb/mysql/constants/FIELD_TYPE.py +16 -0
  18. singlestoredb/mysql/constants/VECTOR_TYPE.py +6 -0
  19. singlestoredb/mysql/cursors.py +5 -4
  20. singlestoredb/mysql/protocol.py +50 -1
  21. singlestoredb/notebook/__init__.py +15 -0
  22. singlestoredb/notebook/_objects.py +212 -0
  23. singlestoredb/tests/test.sql +49 -0
  24. singlestoredb/tests/test_connection.py +174 -0
  25. {singlestoredb-1.1.0.dist-info → singlestoredb-1.2.0.dist-info}/METADATA +1 -1
  26. {singlestoredb-1.1.0.dist-info → singlestoredb-1.2.0.dist-info}/RECORD +30 -26
  27. {singlestoredb-1.1.0.dist-info → singlestoredb-1.2.0.dist-info}/LICENSE +0 -0
  28. {singlestoredb-1.1.0.dist-info → singlestoredb-1.2.0.dist-info}/WHEEL +0 -0
  29. {singlestoredb-1.1.0.dist-info → singlestoredb-1.2.0.dist-info}/entry_points.txt +0 -0
  30. {singlestoredb-1.1.0.dist-info → singlestoredb-1.2.0.dist-info}/top_level.txt +0 -0
@@ -34,9 +34,53 @@ from .utils import ttl_property
34
34
  from .utils import vars_to_str
35
35
 
36
36
 
37
+ def get_organization() -> Organization:
38
+ """Get the organization."""
39
+ return manage_workspaces().organization
40
+
41
+
37
42
  def get_secret(name: str) -> str:
38
43
  """Get a secret from the organization."""
39
- return manage_workspaces().organization.get_secret(name).value
44
+ return get_organization().get_secret(name).value
45
+
46
+
47
+ def get_workspace_group(
48
+ workspace_group: Optional[Union[WorkspaceGroup, str]] = None,
49
+ ) -> WorkspaceGroup:
50
+ """Get the stage for the workspace group."""
51
+ if isinstance(workspace_group, WorkspaceGroup):
52
+ return workspace_group
53
+ elif workspace_group:
54
+ return manage_workspaces().workspace_groups[workspace_group]
55
+ elif 'SINGLESTOREDB_WORKSPACE_GROUP' in os.environ:
56
+ return manage_workspaces().workspace_groups[
57
+ os.environ['SINGLESTOREDB_WORKSPACE_GROUP']
58
+ ]
59
+ raise RuntimeError('no workspace group specified')
60
+
61
+
62
+ def get_stage(
63
+ workspace_group: Optional[Union[WorkspaceGroup, str]] = None,
64
+ ) -> Stage:
65
+ """Get the stage for the workspace group."""
66
+ return get_workspace_group(workspace_group).stage
67
+
68
+
69
+ def get_workspace(
70
+ workspace_group: Optional[Union[WorkspaceGroup, str]] = None,
71
+ workspace: Optional[Union[Workspace, str]] = None,
72
+ ) -> Workspace:
73
+ """Get the workspaces for a workspace_group."""
74
+ if isinstance(workspace, Workspace):
75
+ return workspace
76
+ wg = get_workspace_group(workspace_group)
77
+ if workspace:
78
+ return wg.workspaces[workspace]
79
+ elif 'SINGLESTOREDB_WORKSPACE' in os.environ:
80
+ return wg.workspaces[
81
+ os.environ['SINGLESTOREDB_WORKSPACE']
82
+ ]
83
+ raise RuntimeError('no workspace group specified')
40
84
 
41
85
 
42
86
  class StageObject(object):
@@ -926,6 +970,21 @@ class Workspace(object):
926
970
 
927
971
  """
928
972
 
973
+ name: str
974
+ id: str
975
+ group_id: str
976
+ size: str
977
+ state: str
978
+ created_at: Optional[datetime.datetime]
979
+ terminated_at: Optional[datetime.datetime]
980
+ endpoint: Optional[str]
981
+ auto_suspend: Optional[Dict[str, Any]]
982
+ cache_config: Optional[int]
983
+ deployment_type: Optional[str]
984
+ resume_attachments: Optional[List[Dict[str, Any]]]
985
+ scaling_progress: Optional[int]
986
+ last_resumed_at: Optional[datetime.datetime]
987
+
929
988
  def __init__(
930
989
  self,
931
990
  name: str,
@@ -939,9 +998,9 @@ class Workspace(object):
939
998
  auto_suspend: Optional[Dict[str, Any]] = None,
940
999
  cache_config: Optional[int] = None,
941
1000
  deployment_type: Optional[str] = None,
942
- resume_attachments: Optional[Dict[str, Any]] = None,
1001
+ resume_attachments: Optional[List[Dict[str, Any]]] = None,
943
1002
  scaling_progress: Optional[int] = None,
944
- last_resumed_at: Optional[str] = None,
1003
+ last_resumed_at: Optional[Union[str, datetime.datetime]] = None,
945
1004
  ):
946
1005
  #: Name of the workspace
947
1006
  self.name = name
@@ -981,7 +1040,11 @@ class Workspace(object):
981
1040
  self.deployment_type = deployment_type
982
1041
 
983
1042
  #: Database attachments
984
- self.resume_attachments = camel_to_snake_dict(resume_attachments)
1043
+ self.resume_attachments = [
1044
+ camel_to_snake_dict(x) # type: ignore
1045
+ for x in resume_attachments or []
1046
+ if x is not None
1047
+ ]
985
1048
 
986
1049
  #: Current progress percentage for scaling the workspace
987
1050
  self.scaling_progress = scaling_progress
@@ -1248,8 +1311,18 @@ class WorkspaceGroup(object):
1248
1311
 
1249
1312
  """
1250
1313
 
1314
+ name: str
1315
+ id: str
1316
+ created_at: Optional[datetime.datetime]
1317
+ region: Optional[Region]
1318
+ firewall_ranges: List[str]
1319
+ terminated_at: Optional[datetime.datetime]
1320
+ allow_all_traffic: bool
1321
+
1251
1322
  def __init__(
1252
- self, name: str, id: str,
1323
+ self,
1324
+ name: str,
1325
+ id: str,
1253
1326
  created_at: Union[str, datetime.datetime],
1254
1327
  region: Optional[Region],
1255
1328
  firewall_ranges: List[str],
@@ -1275,7 +1348,7 @@ class WorkspaceGroup(object):
1275
1348
  self.terminated_at = to_datetime(terminated_at)
1276
1349
 
1277
1350
  #: Should all traffic be allowed?
1278
- self.allow_all_traffic = allow_all_traffic
1351
+ self.allow_all_traffic = allow_all_traffic or False
1279
1352
 
1280
1353
  self._manager: Optional[WorkspaceManager] = None
1281
1354
 
@@ -259,6 +259,8 @@ class Connection(BaseConnection):
259
259
  uploading data?
260
260
  track_env : bool, optional
261
261
  Should the connection track the SINGLESTOREDB_URL environment variable?
262
+ enable_extended_data_types : bool, optional
263
+ Should extended data types (BSON, vector) be enabled?
262
264
 
263
265
  See `Connection <https://www.python.org/dev/peps/pep-0249/#connection-objects>`_
264
266
  in the specification.
@@ -328,6 +330,7 @@ class Connection(BaseConnection):
328
330
  inf_as_null=None,
329
331
  encoding_errors='strict',
330
332
  track_env=False,
333
+ enable_extended_data_types=True,
331
334
  ):
332
335
  BaseConnection.__init__(**dict(locals()))
333
336
 
@@ -609,6 +612,7 @@ class Connection(BaseConnection):
609
612
 
610
613
  self._in_sync = False
611
614
  self._track_env = bool(track_env) or self.host == 'singlestore.com'
615
+ self._enable_extended_data_types = enable_extended_data_types
612
616
 
613
617
  if defer_connect or self._track_env:
614
618
  self._sock = None
@@ -1070,6 +1074,14 @@ class Connection(BaseConnection):
1070
1074
  c.execute('SET sql_mode=%s', (self.sql_mode,))
1071
1075
  c.close()
1072
1076
 
1077
+ if self._enable_extended_data_types:
1078
+ c = self.cursor()
1079
+ try:
1080
+ c.execute('SET @@SESSION.enable_extended_types_metadata=on')
1081
+ except self.OperationalError:
1082
+ pass
1083
+ c.close()
1084
+
1073
1085
  if self.init_command is not None:
1074
1086
  c = self.cursor()
1075
1087
  c.execute(self.init_command)
@@ -0,0 +1,3 @@
1
+ NONE = 0
2
+ BSON = 1
3
+ VECTOR = 2
@@ -30,3 +30,19 @@ GEOMETRY = 255
30
30
  CHAR = TINY
31
31
  INTERVAL = ENUM
32
32
  BOOL = TINY
33
+
34
+ # SingleStoreDB-specific.
35
+ # Only enabled when enable_extended_types_metadata=1 in the server.
36
+ BSON = 1001
37
+ FLOAT32_VECTOR_JSON = 2001
38
+ FLOAT64_VECTOR_JSON = 2002
39
+ INT8_VECTOR_JSON = 2003
40
+ INT16_VECTOR_JSON = 2004
41
+ INT32_VECTOR_JSON = 2005
42
+ INT64_VECTOR_JSON = 2006
43
+ FLOAT32_VECTOR = 3001
44
+ FLOAT64_VECTOR = 3002
45
+ INT8_VECTOR = 3003
46
+ INT16_VECTOR = 3004
47
+ INT32_VECTOR = 3005
48
+ INT64_VECTOR = 3006
@@ -0,0 +1,6 @@
1
+ FLOAT32 = 1
2
+ FLOAT64 = 2
3
+ INT8 = 3
4
+ INT16 = 4
5
+ INT32 = 5
6
+ INT64 = 6
@@ -417,10 +417,11 @@ class Cursor(BaseCursor):
417
417
  if self.rowcount == 18446744073709551615:
418
418
  self.rowcount = -1
419
419
  self._description = result.description
420
- self._format_schema = get_schema(
421
- self.connection._results_type,
422
- result.description,
423
- )
420
+ if self._description:
421
+ self._format_schema = get_schema(
422
+ self.connection._results_type,
423
+ result.description,
424
+ )
424
425
  self.lastrowid = result.insert_id
425
426
  self._rows = result.rows
426
427
 
@@ -8,8 +8,10 @@ from . import err
8
8
  from ..config import get_option
9
9
  from ..utils.results import Description
10
10
  from .charset import MBLENGTH
11
+ from .constants import EXTENDED_TYPE
11
12
  from .constants import FIELD_TYPE
12
13
  from .constants import SERVER_STATUS
14
+ from .constants import VECTOR_TYPE
13
15
 
14
16
 
15
17
  DEBUG = get_option('debug.connection')
@@ -264,16 +266,63 @@ class FieldDescriptorPacket(MysqlPacket):
264
266
  self.org_table = self.read_length_coded_string().decode(encoding)
265
267
  self.name = self.read_length_coded_string().decode(encoding)
266
268
  self.org_name = self.read_length_coded_string().decode(encoding)
269
+ n_bytes = 0
267
270
  (
271
+ n_bytes,
268
272
  self.charsetnr,
269
273
  self.length,
270
274
  self.type_code,
271
275
  self.flags,
272
276
  self.scale,
273
- ) = self.read_struct('<xHIBHBxx')
277
+ ) = self.read_struct('<BHIBHBxx')
278
+
274
279
  # 'default' is a length coded binary and is still in the buffer?
275
280
  # not used for normal result sets...
276
281
 
282
+ # Extended types
283
+ if n_bytes > 12:
284
+ ext_type_code = self.read_uint8()
285
+ if ext_type_code == EXTENDED_TYPE.NONE:
286
+ pass
287
+ elif ext_type_code == EXTENDED_TYPE.BSON:
288
+ self.type_code = FIELD_TYPE.BSON
289
+ elif ext_type_code == EXTENDED_TYPE.VECTOR:
290
+ (self.length, vec_type) = self.read_struct('<IB')
291
+ if vec_type == VECTOR_TYPE.FLOAT32:
292
+ if self.charsetnr == 63:
293
+ self.type_code = FIELD_TYPE.FLOAT32_VECTOR
294
+ else:
295
+ self.type_code = FIELD_TYPE.FLOAT32_VECTOR_JSON
296
+ elif vec_type == VECTOR_TYPE.FLOAT64:
297
+ if self.charsetnr == 63:
298
+ self.type_code = FIELD_TYPE.FLOAT64_VECTOR
299
+ else:
300
+ self.type_code = FIELD_TYPE.FLOAT64_VECTOR_JSON
301
+ elif vec_type == VECTOR_TYPE.INT8:
302
+ if self.charsetnr == 63:
303
+ self.type_code = FIELD_TYPE.INT8_VECTOR
304
+ else:
305
+ self.type_code = FIELD_TYPE.INT8_VECTOR_JSON
306
+ elif vec_type == VECTOR_TYPE.INT16:
307
+ if self.charsetnr == 63:
308
+ self.type_code = FIELD_TYPE.INT16_VECTOR
309
+ else:
310
+ self.type_code = FIELD_TYPE.INT16_VECTOR_JSON
311
+ elif vec_type == VECTOR_TYPE.INT32:
312
+ if self.charsetnr == 63:
313
+ self.type_code = FIELD_TYPE.INT32_VECTOR
314
+ else:
315
+ self.type_code = FIELD_TYPE.INT32_VECTOR_JSON
316
+ elif vec_type == VECTOR_TYPE.INT64:
317
+ if self.charsetnr == 63:
318
+ self.type_code = FIELD_TYPE.INT64_VECTOR
319
+ else:
320
+ self.type_code = FIELD_TYPE.INT64_VECTOR_JSON
321
+ else:
322
+ raise TypeError(f'unrecognized vector data type: {vec_type}')
323
+ else:
324
+ raise TypeError(f'unrecognized extended data type: {ext_type_code}')
325
+
277
326
  def description(self):
278
327
  """Provides a 7-item tuple compatible with the Python PEP249 DB Spec."""
279
328
  return Description(
@@ -0,0 +1,15 @@
1
+ #!/usr/bin/env python
2
+ import os as _os
3
+ import warnings as _warnings
4
+
5
+ from ._objects import organization # noqa: F401
6
+ from ._objects import secrets # noqa: F401
7
+ from ._objects import stage # noqa: F401
8
+ from ._objects import workspace # noqa: F401
9
+ from ._objects import workspace_group # noqa: F401
10
+
11
+ if 'SINGLESTOREDB_ORGANIZATION' not in _os.environ:
12
+ _warnings.warn(
13
+ 'This package is intended for use in the SingleStoreDB notebook environment',
14
+ RuntimeWarning,
15
+ )
@@ -0,0 +1,212 @@
1
+ #!/usr/bin/env python
2
+ import functools
3
+ from typing import Any
4
+
5
+ from ..management import workspace as _ws
6
+
7
+
8
+ class Secrets(object):
9
+ """Wrapper for accessing secrets as object attributes."""
10
+
11
+ def __getattr__(self, name: str) -> str:
12
+ if name.startswith('_ipython') or name.startswith('_repr_'):
13
+ raise AttributeError(name)
14
+ return _ws.get_secret(name)
15
+
16
+ def __getitem__(self, name: str) -> str:
17
+ return _ws.get_secret(name)
18
+
19
+
20
+ class Stage(object):
21
+
22
+ def __new__(cls, *args: Any, **kwargs: Any) -> Any:
23
+ # We are remapping the methods and attributes here so that
24
+ # autocomplete still works in Jupyter / IPython, but we
25
+ # bypass the real method / attribute calls and apply them
26
+ # to the currently selected stage.
27
+ for name in [x for x in dir(_ws.Stage) if not x.startswith('_')]:
28
+ if name in ['from_dict', 'refresh', 'update']:
29
+ continue
30
+ attr = getattr(_ws.Stage, name)
31
+
32
+ def make_wrapper(m: str, is_method: bool = False) -> Any:
33
+ if is_method:
34
+ def wrap(self: Stage, *a: Any, **kw: Any) -> Any:
35
+ return getattr(_ws.get_stage(), m)(*a, **kw)
36
+ return functools.update_wrapper(wrap, attr)
37
+ else:
38
+ def wrap(self: Stage, *a: Any, **kw: Any) -> Any:
39
+ return getattr(_ws.get_stage(), m)
40
+ return property(functools.update_wrapper(wrap, attr))
41
+
42
+ setattr(cls, name, make_wrapper(m=name, is_method=callable(attr)))
43
+
44
+ for name in [
45
+ x for x in _ws.Stage.__annotations__.keys()
46
+ if not x.startswith('_')
47
+ ]:
48
+
49
+ def make_wrapper(m: str, is_method: bool = False) -> Any:
50
+ def wrap(self: Stage) -> Any:
51
+ return getattr(_ws.get_stage(), m)
52
+ return property(functools.update_wrapper(wrap, attr))
53
+
54
+ setattr(cls, name, make_wrapper(m=name))
55
+
56
+ cls.__doc__ = _ws.Stage.__doc__
57
+
58
+ return super().__new__(cls, *args, **kwargs)
59
+
60
+
61
+ class WorkspaceGroup(object):
62
+
63
+ def __new__(cls, *args: Any, **kwargs: Any) -> Any:
64
+ # We are remapping the methods and attributes here so that
65
+ # autocomplete still works in Jupyter / IPython, but we
66
+ # bypass the real method / attribute calls and apply them
67
+ # to the currently selected workspace group.
68
+ for name in [x for x in dir(_ws.WorkspaceGroup) if not x.startswith('_')]:
69
+ if name in ['from_dict', 'refresh', 'update']:
70
+ continue
71
+
72
+ attr = getattr(_ws.WorkspaceGroup, name)
73
+
74
+ def make_wrapper(m: str, is_method: bool = False) -> Any:
75
+ if is_method:
76
+ def wrap(self: WorkspaceGroup, *a: Any, **kw: Any) -> Any:
77
+ return getattr(_ws.get_workspace_group(), m)(*a, **kw)
78
+ return functools.update_wrapper(wrap, attr)
79
+ else:
80
+ def wrap(self: WorkspaceGroup, *a: Any, **kw: Any) -> Any:
81
+ return getattr(_ws.get_workspace_group(), m)
82
+ return property(functools.update_wrapper(wrap, attr))
83
+
84
+ setattr(cls, name, make_wrapper(m=name, is_method=callable(attr)))
85
+
86
+ for name in [
87
+ x for x in _ws.WorkspaceGroup.__annotations__.keys()
88
+ if not x.startswith('_')
89
+ ]:
90
+
91
+ def make_wrapper(m: str, is_method: bool = False) -> Any:
92
+ def wrap(self: WorkspaceGroup) -> Any:
93
+ return getattr(_ws.get_workspace_group(), m)
94
+ return property(functools.update_wrapper(wrap, attr))
95
+
96
+ setattr(cls, name, make_wrapper(m=name))
97
+
98
+ cls.__doc__ = _ws.WorkspaceGroup.__doc__
99
+
100
+ return super().__new__(cls, *args, **kwargs)
101
+
102
+ def __str__(self) -> str:
103
+ return _ws.get_workspace_group().__str__()
104
+
105
+ def __repr__(self) -> str:
106
+ return _ws.get_workspace_group().__repr__()
107
+
108
+
109
+ class Workspace(object):
110
+
111
+ def __new__(cls, *args: Any, **kwargs: Any) -> Any:
112
+ # We are remapping the methods and attributes here so that
113
+ # autocomplete still works in Jupyter / IPython, but we
114
+ # bypass the real method / attribute calls and apply them
115
+ # to the currently selected workspace.
116
+ for name in [x for x in dir(_ws.Workspace) if not x.startswith('_')]:
117
+ if name in ['from_dict', 'refresh', 'update']:
118
+ continue
119
+
120
+ attr = getattr(_ws.Workspace, name)
121
+
122
+ def make_wrapper(m: str, is_method: bool = False) -> Any:
123
+ if is_method:
124
+ def wrap(self: Workspace, *a: Any, **kw: Any) -> Any:
125
+ return getattr(_ws.get_workspace(), m)(*a, **kw)
126
+ return functools.update_wrapper(wrap, attr)
127
+ else:
128
+ def wrap(self: Workspace, *a: Any, **kw: Any) -> Any:
129
+ return getattr(_ws.get_workspace(), m)
130
+ return property(functools.update_wrapper(wrap, attr))
131
+
132
+ setattr(cls, name, make_wrapper(m=name, is_method=callable(attr)))
133
+
134
+ for name in [
135
+ x for x in _ws.Workspace.__annotations__.keys()
136
+ if not x.startswith('_')
137
+ ]:
138
+
139
+ def make_wrapper(m: str, is_method: bool = False) -> Any:
140
+ def wrap(self: Workspace) -> Any:
141
+ return getattr(_ws.get_workspace(), m)
142
+ return property(functools.update_wrapper(wrap, attr))
143
+
144
+ setattr(cls, name, make_wrapper(m=name))
145
+
146
+ cls.__doc__ = _ws.Workspace.__doc__
147
+
148
+ return super().__new__(cls, *args, **kwargs)
149
+
150
+ def __str__(self) -> str:
151
+ return _ws.get_workspace().__str__()
152
+
153
+ def __repr__(self) -> str:
154
+ return _ws.get_workspace().__repr__()
155
+
156
+
157
+ class Organization(object):
158
+
159
+ def __new__(cls, *args: Any, **kwargs: Any) -> Any:
160
+ # We are remapping the methods and attributes here so that
161
+ # autocomplete still works in Jupyter / IPython, but we
162
+ # bypass the real method / attribute calls and apply them
163
+ # to the currently selected organization.
164
+ for name in [x for x in dir(_ws.Organization) if not x.startswith('_')]:
165
+ if name in ['from_dict', 'refresh', 'update']:
166
+ continue
167
+
168
+ attr = getattr(_ws.Organization, name)
169
+
170
+ def make_wrapper(m: str, is_method: bool = False) -> Any:
171
+ if is_method:
172
+ def wrap(self: Organization, *a: Any, **kw: Any) -> Any:
173
+ return getattr(_ws.get_organization(), m)(*a, **kw)
174
+ return functools.update_wrapper(wrap, attr)
175
+ else:
176
+ def wrap(self: Organization, *a: Any, **kw: Any) -> Any:
177
+ return getattr(_ws.get_organization(), m)
178
+ return property(functools.update_wrapper(wrap, attr))
179
+
180
+ setattr(cls, name, make_wrapper(m=name, is_method=callable(attr)))
181
+
182
+ for name in [
183
+ x for x in _ws.Organization.__annotations__.keys()
184
+ if not x.startswith('_')
185
+ ]:
186
+
187
+ def make_wrapper(m: str, is_method: bool = False) -> Any:
188
+ def wrap(self: Organization) -> Any:
189
+ return getattr(_ws.get_organization(), m)
190
+ return property(functools.update_wrapper(wrap, attr))
191
+
192
+ setattr(cls, name, make_wrapper(m=name))
193
+
194
+ cls.__doc__ = _ws.Organization.__doc__
195
+
196
+ return super().__new__(cls, *args, **kwargs)
197
+
198
+ def __str__(self) -> str:
199
+ return _ws.get_organization().__str__()
200
+
201
+ def __repr__(self) -> str:
202
+ return _ws.get_organization().__repr__()
203
+
204
+
205
+ secrets = Secrets()
206
+ stage = Stage()
207
+ organization = Organization()
208
+ workspace_group = WorkspaceGroup()
209
+ workspace = Workspace()
210
+
211
+
212
+ __all__ = ['secrets', 'stage', 'workspace', 'workspace_group', 'organization']
@@ -606,4 +606,53 @@ INSERT INTO `badutf8` VALUES ('🥷🧙👻.eth');
606
606
  INSERT INTO `badutf8` VALUES ('🥒rick.eth');
607
607
 
608
608
 
609
+ CREATE TABLE IF NOT EXISTS `f32_vectors` (
610
+ id INT(11),
611
+ a VECTOR(3)
612
+ );
613
+ INSERT INTO f32_vectors VALUES(1, '[0.267261237,0.534522474,0.801783681]');
614
+ INSERT INTO f32_vectors VALUES(2, '[0.371390671,0.557085991,0.742781341]');
615
+ INSERT INTO f32_vectors VALUES(3, '[-0.424264073,-0.565685451,0.707106829]');
616
+
617
+ CREATE TABLE IF NOT EXISTS `f64_vectors` (
618
+ id INT(11),
619
+ a VECTOR(3, F64)
620
+ );
621
+ INSERT INTO f64_vectors VALUES(1, '[0.267261237,0.534522474,0.801783681]');
622
+ INSERT INTO f64_vectors VALUES(2, '[0.371390671,0.557085991,0.742781341]');
623
+ INSERT INTO f64_vectors VALUES(3, '[-0.424264073,-0.565685451,0.707106829]');
624
+
625
+ CREATE TABLE `i8_vectors` (
626
+ id INT(11),
627
+ a VECTOR(3, I8)
628
+ );
629
+ INSERT INTO i8_vectors VALUES(1, '[1, 2, 3]');
630
+ INSERT INTO i8_vectors VALUES(2, '[4, 5, 6]');
631
+ INSERT INTO i8_vectors VALUES(3, '[-1, -4, 8]');
632
+
633
+ CREATE TABLE `i16_vectors` (
634
+ id INT(11),
635
+ a VECTOR(3, I16)
636
+ );
637
+ INSERT INTO i16_vectors VALUES(1, '[1, 2, 3]');
638
+ INSERT INTO i16_vectors VALUES(2, '[4, 5, 6]');
639
+ INSERT INTO i16_vectors VALUES(3, '[-1, -4, 8]');
640
+
641
+ CREATE TABLE `i32_vectors` (
642
+ id INT(11),
643
+ a VECTOR(3, I32)
644
+ );
645
+ INSERT INTO i32_vectors VALUES(1, '[1, 2, 3]');
646
+ INSERT INTO i32_vectors VALUES(2, '[4, 5, 6]');
647
+ INSERT INTO i32_vectors VALUES(3, '[-1, -4, 8]');
648
+
649
+ CREATE TABLE `i64_vectors` (
650
+ id INT(11),
651
+ a VECTOR(3, I64)
652
+ );
653
+ INSERT INTO i64_vectors VALUES(1, '[1, 2, 3]');
654
+ INSERT INTO i64_vectors VALUES(2, '[4, 5, 6]');
655
+ INSERT INTO i64_vectors VALUES(3, '[-1, -4, 8]');
656
+
657
+
609
658
  COMMIT;