singlestoredb 1.13.1__py3-none-any.whl → 1.14.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of singlestoredb might be problematic. Click here for more details.

singlestoredb/__init__.py CHANGED
@@ -13,7 +13,7 @@ Examples
13
13
 
14
14
  """
15
15
 
16
- __version__ = '1.13.1'
16
+ __version__ = '1.14.1'
17
17
 
18
18
  from typing import Any
19
19
 
@@ -31,6 +31,14 @@ from .types import (
31
31
  Date, Time, Timestamp, DateFromTicks, TimeFromTicks, TimestampFromTicks,
32
32
  Binary, STRING, BINARY, NUMBER, DATETIME, ROWID,
33
33
  )
34
+ from .vectorstore import (
35
+ vector_db, IndexInterface, IndexList, IndexModel, MatchTypedDict,
36
+ Metric, IndexStatsTypedDict, NamespaceStatsTypedDict, Vector,
37
+ VectorDictMetadataValue, VectorMetadataTypedDict, VectorTuple,
38
+ VectorTupleWithMetadata, DeletionProtection, AndFilter, EqFilter,
39
+ ExactMatchFilter, FilterTypedDict, GteFilter, GtFilter, InFilter,
40
+ LteFilter, LtFilter, NeFilter, NinFilter, OrFilter, SimpleFilter,
41
+ )
34
42
 
35
43
 
36
44
  #
@@ -1,6 +1,7 @@
1
1
  #!/usr/bin/env python
2
2
  """SingleStoreDB connections and cursors."""
3
3
  import abc
4
+ import functools
4
5
  import inspect
5
6
  import io
6
7
  import queue
@@ -1288,6 +1289,14 @@ class Connection(metaclass=abc.ABCMeta):
1288
1289
  """Access server properties managed by the SHOW statement."""
1289
1290
  return ShowAccessor(self)
1290
1291
 
1292
+ @functools.cached_property
1293
+ def vector_db(self) -> Any:
1294
+ """
1295
+ Get vectorstore API accessor
1296
+ """
1297
+ from vectorstore import VectorDB
1298
+ return VectorDB(connection=self)
1299
+
1291
1300
 
1292
1301
  #
1293
1302
  # NOTE: When adding parameters to this function, you should always
@@ -1,10 +1,12 @@
1
1
  #!/usr/bin/env python3
2
+ import datetime
2
3
  import json
3
4
  from typing import Any
4
5
  from typing import Dict
5
6
  from typing import Optional
6
7
 
7
8
  from .. import result
9
+ from ...management.export import _get_exports
8
10
  from ...management.export import ExportService
9
11
  from ...management.export import ExportStatus
10
12
  from ..handler import SQLHandler
@@ -104,7 +106,100 @@ class CreateClusterIdentity(SQLHandler):
104
106
  CreateClusterIdentity.register(overwrite=True)
105
107
 
106
108
 
107
- class CreateExport(SQLHandler):
109
+ def _start_export(params: Dict[str, Any]) -> Optional[FusionSQLResult]:
110
+ # From table
111
+ if isinstance(params['from_table'], str):
112
+ from_database = None
113
+ from_table = params['from_table']
114
+ else:
115
+ from_database, from_table = params['from_table']
116
+
117
+ # Catalog
118
+ catalog_config = json.loads(params['catalog'].get('catalog_config', '{}') or '{}')
119
+ catalog_creds = json.loads(params['catalog'].get('catalog_creds', '{}') or '{}')
120
+
121
+ # Storage
122
+ storage_config = json.loads(params['storage'].get('link_config', '{}') or '{}')
123
+ storage_creds = json.loads(params['storage'].get('link_creds', '{}') or '{}')
124
+
125
+ storage_config['provider'] = 'S3'
126
+
127
+ wsg = get_workspace_group({})
128
+
129
+ if from_database is None:
130
+ raise ValueError('database name must be specified for source table')
131
+
132
+ if wsg._manager is None:
133
+ raise TypeError('no workspace manager is associated with workspace group')
134
+
135
+ partition_by = []
136
+ if params['partition_by']:
137
+ for key in params['partition_by']:
138
+ transform = key['partition_key']['transform']['col_transform']
139
+ part = {}
140
+ part['transform'] = transform[0].lower()
141
+ part['name'] = transform[-1]['transform_col']
142
+ partition_by.append(part)
143
+
144
+ order_by = []
145
+ if params['order_by'] and params['order_by']['by']:
146
+ for key in params['order_by']['by']:
147
+ transform = key['transform']['col_transform']
148
+ order = {}
149
+ order['transform'] = transform[0].lower()
150
+ order['name'] = transform[-1]['transform_col']
151
+ order['direction'] = 'ascending'
152
+ order['null_order'] = 'nulls_first'
153
+ if key.get('direction'):
154
+ if 'desc' in key['direction'].lower():
155
+ order['direction'] = 'descending'
156
+ if key.get('null_order'):
157
+ if 'last' in key['null_order'].lower():
158
+ order['null_order'] = 'nulls_last'
159
+ order_by.append(order)
160
+
161
+ # Refresh interval
162
+ refresh_interval_delta = None
163
+ refresh_interval = params.get('refresh_interval', None)
164
+ if refresh_interval is not None:
165
+ value = int(refresh_interval['refresh_interval_value'])
166
+ time_unit = refresh_interval['refresh_interval_time_unit'].upper()
167
+ if value < 0:
168
+ raise ValueError('refresh interval must be greater than 0')
169
+ if time_unit == 'SECONDS':
170
+ refresh_interval_delta = datetime.timedelta(seconds=int(value))
171
+ elif time_unit == 'MINUTES':
172
+ refresh_interval_delta = datetime.timedelta(minutes=int(value))
173
+ elif time_unit == 'HOURS':
174
+ refresh_interval_delta = datetime.timedelta(hours=int(value))
175
+ elif time_unit == 'DAYS':
176
+ refresh_interval_delta = datetime.timedelta(days=int(value))
177
+ else:
178
+ raise ValueError('invalid refresh interval time unit')
179
+
180
+ out = ExportService(
181
+ wsg,
182
+ from_database,
183
+ from_table,
184
+ dict(**catalog_config, **catalog_creds),
185
+ dict(**storage_config, **storage_creds),
186
+ columns=None,
187
+ partition_by=partition_by or None,
188
+ order_by=order_by or None,
189
+ properties=json.loads(params['properties']) if params['properties'] else None,
190
+ incremental=params.get('incremental', False),
191
+ refresh_interval=int(refresh_interval_delta.total_seconds())
192
+ if refresh_interval_delta is not None else None,
193
+ ).start()
194
+
195
+ res = FusionSQLResult()
196
+ res.add_field('ExportID', result.STRING)
197
+ res.set_rows([(out.export_id,)])
198
+
199
+ return res
200
+
201
+
202
+ class StartExport(SQLHandler):
108
203
  """
109
204
  START EXPORT
110
205
  from_table
@@ -150,7 +245,7 @@ class CreateExport(SQLHandler):
150
245
 
151
246
  Description
152
247
  -----------
153
- Create an export configuration.
248
+ Start an export.
154
249
 
155
250
  Arguments
156
251
  ---------
@@ -180,7 +275,6 @@ class CreateExport(SQLHandler):
180
275
  LINK S3 CONFIG '{
181
276
  "region": "us-east-1",
182
277
  "endpoint_url": "s3://bucket-name"
183
-
184
278
  }'
185
279
  ;
186
280
 
@@ -189,77 +283,129 @@ class CreateExport(SQLHandler):
189
283
  _enabled = False
190
284
 
191
285
  def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
192
- # From table
193
- if isinstance(params['from_table'], str):
194
- from_database = None
195
- from_table = params['from_table']
196
- else:
197
- from_database, from_table = params['from_table']
286
+ return _start_export(params)
198
287
 
199
- # Catalog
200
- catalog_config = json.loads(params['catalog'].get('catalog_config', '{}') or '{}')
201
- catalog_creds = json.loads(params['catalog'].get('catalog_creds', '{}') or '{}')
202
288
 
203
- # Storage
204
- storage_config = json.loads(params['storage'].get('link_config', '{}') or '{}')
205
- storage_creds = json.loads(params['storage'].get('link_creds', '{}') or '{}')
289
+ StartExport.register(overwrite=True)
206
290
 
207
- storage_config['provider'] = 'S3'
208
291
 
209
- wsg = get_workspace_group({})
292
+ class StartIncrementalExport(SQLHandler):
293
+ """
294
+ START INCREMENTAL EXPORT
295
+ from_table
296
+ catalog
297
+ storage
298
+ [ partition_by ]
299
+ [ order_by ]
300
+ [ properties ]
301
+ [ refresh_interval ]
302
+ ;
210
303
 
211
- if from_database is None:
212
- raise ValueError('database name must be specified for source table')
304
+ # From table
305
+ from_table = FROM <table>
213
306
 
214
- if wsg._manager is None:
215
- raise TypeError('no workspace manager is associated with workspace group')
307
+ # Transforms
308
+ _col_transform = { VOID | IDENTITY | YEAR | MONTH | DAY | HOUR } ( _transform_col )
309
+ _transform_col = <column>
310
+ _arg_transform = { BUCKET | TRUNCATE } ( _transform_col <comma> _transform_arg )
311
+ _transform_arg = <integer>
312
+ transform = { _col_transform | _arg_transform }
216
313
 
217
- partition_by = []
218
- if params['partition_by']:
219
- for key in params['partition_by']:
220
- transform = key['partition_key']['transform']['col_transform']
221
- part = {}
222
- part['transform'] = transform[0].lower()
223
- part['name'] = transform[-1]['transform_col']
224
- partition_by.append(part)
225
-
226
- order_by = []
227
- if params['order_by'] and params['order_by']['by']:
228
- for key in params['order_by']['by']:
229
- transform = key['transform']['col_transform']
230
- order = {}
231
- order['transform'] = transform[0].lower()
232
- order['name'] = transform[-1]['transform_col']
233
- order['direction'] = 'ascending'
234
- order['null_order'] = 'nulls_first'
235
- if key.get('direction'):
236
- if 'desc' in key['direction'].lower():
237
- order['direction'] = 'descending'
238
- if key.get('null_order'):
239
- if 'last' in key['null_order'].lower():
240
- order['null_order'] = 'nulls_last'
241
- order_by.append(order)
314
+ # Partitions
315
+ partition_by = PARTITION BY partition_key,...
316
+ partition_key = transform
242
317
 
243
- out = ExportService(
244
- wsg,
245
- from_database,
246
- from_table,
247
- dict(**catalog_config, **catalog_creds),
248
- dict(**storage_config, **storage_creds),
249
- columns=None,
250
- partition_by=partition_by or None,
251
- order_by=order_by or None,
252
- properties=json.loads(params['properties']) if params['properties'] else None,
253
- ).start()
318
+ # Sort order
319
+ order_by = ORDER BY sort_key,...
320
+ sort_key = transform [ direction ] [ null_order ]
321
+ direction = { ASC | DESC | ASCENDING | DESCENDING }
322
+ null_order = { NULLS_FIRST | NULLS_LAST }
254
323
 
255
- res = FusionSQLResult()
256
- res.add_field('ExportID', result.STRING)
257
- res.set_rows([(out.export_id,)])
324
+ # Properties
325
+ properties = PROPERTIES '<json>'
258
326
 
259
- return res
327
+ # Catolog
328
+ catalog = CATALOG [ _catalog_config ] [ _catalog_creds ]
329
+ _catalog_config = CONFIG '<catalog-config>'
330
+ _catalog_creds = CREDENTIALS '<catalog-creds>'
331
+
332
+ # Storage
333
+ storage = LINK [ _link_config ] [ _link_creds ]
334
+ _link_config = S3 CONFIG '<link-config>'
335
+ _link_creds = CREDENTIALS '<link-creds>'
336
+
337
+ # Refresh interval
338
+ refresh_interval = REFRESH INTERVAL _refresh_interval_value _refresh_interval_time_unit
339
+ _refresh_interval_value = <integer>
340
+ _refresh_interval_time_unit = { SECONDS | MINUTES | HOURS | DAYS }
341
+
342
+ Description
343
+ -----------
344
+ Start an incremental export.
345
+
346
+ Arguments
347
+ ---------
348
+ * ``<catalog-config>`` and ``<catalog-creds>``: The catalog configuration.
349
+ * ``<link-config>`` and ``<link-creds>``: The storage link configuration.
350
+
351
+ Remarks
352
+ -------
353
+ * ``FROM <table>`` specifies the SingleStore table to export. The same name will
354
+ be used for the exported table.
355
+ * ``CATALOG`` specifies the details of the catalog to connect to.
356
+ * ``LINK`` specifies the details of the data storage to connect to.
357
+ * ``REFRESH INTERVAL`` specifies the interval for refreshing the
358
+ incremental export. The default is 1 day.
359
+
360
+ Examples
361
+ --------
362
+ The following statement starts an export operation with the given
363
+ catalog and link configurations. The source table to export is
364
+ named "customer_data"::
260
365
 
366
+ START INCREMENTAL EXPORT FROM my_db.customer_data
367
+ CATALOG CONFIG '{
368
+ "catalog_type": "GLUE",
369
+ "table_format": "ICEBERG",
370
+ "catalog_id": "13983498723498",
371
+ "catalog_region": "us-east-1"
372
+ }'
373
+ LINK S3 CONFIG '{
374
+ "region": "us-east-1",
375
+ "endpoint_url": "s3://bucket-name"
376
+ }'
377
+ REFRESH INTERVAL 24 HOURS
378
+ ;
379
+
380
+ """ # noqa
381
+
382
+ _enabled = False
383
+
384
+ def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
385
+ params['incremental'] = True
386
+ return _start_export(params)
387
+
388
+
389
+ StartIncrementalExport.register(overwrite=True)
390
+
391
+
392
+ def _format_status(export_id: str, status: ExportStatus) -> Optional[FusionSQLResult]:
393
+ """Return the status of an export operation."""
394
+ info = status._info()
261
395
 
262
- CreateExport.register(overwrite=True)
396
+ res = FusionSQLResult()
397
+ res.add_field('ExportID', result.STRING)
398
+ res.add_field('Status', result.STRING)
399
+ res.add_field('Message', result.STRING)
400
+ res.set_rows([
401
+ (
402
+ export_id,
403
+ info.get('status', 'Unknown'),
404
+ info.get('statusMsg', ''),
405
+ ),
406
+ ])
407
+
408
+ return res
263
409
 
264
410
 
265
411
  class ShowExport(SQLHandler):
@@ -275,9 +421,29 @@ class ShowExport(SQLHandler):
275
421
 
276
422
  def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
277
423
  wsg = get_workspace_group({})
278
- out = ExportStatus(params['export_id'], wsg)
424
+ return _format_status(
425
+ params['export_id'], ExportStatus(params['export_id'], wsg),
426
+ )
427
+
428
+
429
+ ShowExport.register(overwrite=True)
430
+
431
+
432
+ class ShowExports(SQLHandler):
433
+ """
434
+ SHOW EXPORTS [ scope ];
435
+
436
+ # Location of the export
437
+ scope = FOR '<scope>'
438
+
439
+ """
440
+
441
+ _enabled = False
442
+
443
+ def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
444
+ wsg = get_workspace_group({})
279
445
 
280
- status = out._info()
446
+ exports = _get_exports(wsg, params.get('scope', 'all'))
281
447
 
282
448
  res = FusionSQLResult()
283
449
  res.add_field('ExportID', result.STRING)
@@ -285,13 +451,75 @@ class ShowExport(SQLHandler):
285
451
  res.add_field('Message', result.STRING)
286
452
  res.set_rows([
287
453
  (
288
- params['export_id'],
289
- status.get('status', 'Unknown'),
290
- status.get('statusMsg', ''),
291
- ),
454
+ info['egressID'],
455
+ info.get('status', 'Unknown'),
456
+ info.get('statusMsg', ''),
457
+ )
458
+ for info in [x._info() for x in exports]
292
459
  ])
293
460
 
294
461
  return res
295
462
 
296
463
 
297
- ShowExport.register(overwrite=True)
464
+ ShowExports.register(overwrite=True)
465
+
466
+
467
+ class SuspendExport(SQLHandler):
468
+ """
469
+ SUSPEND EXPORT export_id;
470
+
471
+ # ID of export
472
+ export_id = '<export-id>'
473
+
474
+ """
475
+
476
+ _enabled = False
477
+
478
+ def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
479
+ wsg = get_workspace_group({})
480
+ service = ExportService.from_export_id(wsg, params['export_id'])
481
+ return _format_status(params['export_id'], service.suspend())
482
+
483
+
484
+ SuspendExport.register(overwrite=True)
485
+
486
+
487
+ class ResumeExport(SQLHandler):
488
+ """
489
+ RESUME EXPORT export_id;
490
+
491
+ # ID of export
492
+ export_id = '<export-id>'
493
+
494
+ """
495
+
496
+ _enabled = False
497
+
498
+ def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
499
+ wsg = get_workspace_group({})
500
+ service = ExportService.from_export_id(wsg, params['export_id'])
501
+ return _format_status(params['export_id'], service.resume())
502
+
503
+
504
+ ResumeExport.register(overwrite=True)
505
+
506
+
507
+ class DropExport(SQLHandler):
508
+ """
509
+ DROP EXPORT export_id;
510
+
511
+ # ID of export
512
+ export_id = '<export-id>'
513
+
514
+ """
515
+
516
+ _enabled = False
517
+
518
+ def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
519
+ wsg = get_workspace_group({})
520
+ service = ExportService.from_export_id(wsg, params['export_id'])
521
+ service.drop()
522
+ return None
523
+
524
+
525
+ DropExport.register(overwrite=True)
@@ -569,8 +569,10 @@ class Cursor(connection.Cursor):
569
569
 
570
570
  if res.status_code >= 400:
571
571
  if res.text:
572
- if re.match(r'^Error\s+\d+:', res.text):
573
- code, msg = res.text.split(':', 1)
572
+ m = re.match(r'^Error\s+(\d+).*?:', res.text)
573
+ if m:
574
+ code = m.group(1)
575
+ msg = res.text.split(':', 1)[-1]
574
576
  icode = int(code.split()[-1])
575
577
  else:
576
578
  icode = res.status_code
@@ -27,6 +27,9 @@ class ExportService(object):
27
27
  partition_by: Optional[List[Dict[str, str]]]
28
28
  order_by: Optional[List[Dict[str, Dict[str, str]]]]
29
29
  properties: Optional[Dict[str, Any]]
30
+ incremental: bool
31
+ refresh_interval: Optional[int]
32
+ export_id: Optional[str]
30
33
 
31
34
  def __init__(
32
35
  self,
@@ -38,6 +41,8 @@ class ExportService(object):
38
41
  columns: Optional[List[str]] = None,
39
42
  partition_by: Optional[List[Dict[str, str]]] = None,
40
43
  order_by: Optional[List[Dict[str, Dict[str, str]]]] = None,
44
+ incremental: bool = False,
45
+ refresh_interval: Optional[int] = None,
41
46
  properties: Optional[Dict[str, Any]] = None,
42
47
  ):
43
48
  #: Workspace group
@@ -68,8 +73,30 @@ class ExportService(object):
68
73
  self.order_by = order_by or None
69
74
  self.properties = properties or None
70
75
 
76
+ self.incremental = incremental
77
+ self.refresh_interval = refresh_interval
78
+
79
+ self.export_id = None
80
+
71
81
  self._manager: Optional[WorkspaceManager] = workspace_group._manager
72
82
 
83
+ @classmethod
84
+ def from_export_id(
85
+ self,
86
+ workspace_group: WorkspaceGroup,
87
+ export_id: str,
88
+ ) -> ExportService:
89
+ """Create export service from export ID."""
90
+ out = ExportService(
91
+ workspace_group=workspace_group,
92
+ database='',
93
+ table='',
94
+ catalog_info={},
95
+ storage_info={},
96
+ )
97
+ out.export_id = export_id
98
+ return out
99
+
73
100
  def __str__(self) -> str:
74
101
  """Return string representation."""
75
102
  return vars_to_str(self)
@@ -98,6 +125,11 @@ class ExportService(object):
98
125
 
99
126
  def start(self, tags: Optional[List[str]] = None) -> 'ExportStatus':
100
127
  """Start the export process."""
128
+ if not self.table or not self.database:
129
+ raise ManagementError(
130
+ msg='Database and table must be set before starting the export.',
131
+ )
132
+
101
133
  if self._manager is None:
102
134
  raise ManagementError(
103
135
  msg='No workspace manager is associated with this object.',
@@ -122,11 +154,87 @@ class ExportService(object):
122
154
  partitionSpec=partition_spec,
123
155
  sortOrderSpec=sort_order_spec,
124
156
  properties=self.properties,
157
+ incremental=self.incremental or None,
158
+ refreshInterval=self.refresh_interval
159
+ if self.refresh_interval is not None else None,
125
160
  ).items() if v is not None
126
161
  },
127
162
  )
128
163
 
129
- return ExportStatus(out.json()['egressID'], self.workspace_group)
164
+ self.export_id = str(out.json()['egressID'])
165
+
166
+ return ExportStatus(self.export_id, self.workspace_group)
167
+
168
+ def suspend(self) -> 'ExportStatus':
169
+ """Suspend the export process."""
170
+ if self._manager is None:
171
+ raise ManagementError(
172
+ msg='No workspace manager is associated with this object.',
173
+ )
174
+
175
+ if self.export_id is None:
176
+ raise ManagementError(
177
+ msg='Export ID is not set. You must start the export first.',
178
+ )
179
+
180
+ self._manager._post(
181
+ f'workspaceGroups/{self.workspace_group.id}/egress/suspendTableEgress',
182
+ json=dict(egressID=self.export_id),
183
+ )
184
+
185
+ return ExportStatus(self.export_id, self.workspace_group)
186
+
187
+ def resume(self) -> 'ExportStatus':
188
+ """Resume the export process."""
189
+ if self._manager is None:
190
+ raise ManagementError(
191
+ msg='No workspace manager is associated with this object.',
192
+ )
193
+
194
+ if self.export_id is None:
195
+ raise ManagementError(
196
+ msg='Export ID is not set. You must start the export first.',
197
+ )
198
+
199
+ self._manager._post(
200
+ f'workspaceGroups/{self.workspace_group.id}/egress/resumeTableEgress',
201
+ json=dict(egressID=self.export_id),
202
+ )
203
+
204
+ return ExportStatus(self.export_id, self.workspace_group)
205
+
206
+ def drop(self) -> None:
207
+ """Drop the export process."""
208
+ if self._manager is None:
209
+ raise ManagementError(
210
+ msg='No workspace manager is associated with this object.',
211
+ )
212
+
213
+ if self.export_id is None:
214
+ raise ManagementError(
215
+ msg='Export ID is not set. You must start the export first.',
216
+ )
217
+
218
+ self._manager._post(
219
+ f'workspaceGroups/{self.workspace_group.id}/egress/dropTableEgress',
220
+ json=dict(egressID=self.export_id),
221
+ )
222
+
223
+ return None
224
+
225
+ def status(self) -> ExportStatus:
226
+ """Get the status of the export process."""
227
+ if self._manager is None:
228
+ raise ManagementError(
229
+ msg='No workspace manager is associated with this object.',
230
+ )
231
+
232
+ if self.export_id is None:
233
+ raise ManagementError(
234
+ msg='Export ID is not set. You must start the export first.',
235
+ )
236
+
237
+ return ExportStatus(self.export_id, self.workspace_group)
130
238
 
131
239
 
132
240
  class ExportStatus(object):
@@ -167,3 +275,21 @@ class ExportStatus(object):
167
275
 
168
276
  def __repr__(self) -> str:
169
277
  return self.status
278
+
279
+
280
+ def _get_exports(
281
+ workspace_group: WorkspaceGroup,
282
+ scope: str = 'all',
283
+ ) -> List[ExportStatus]:
284
+ """Get all exports in the workspace group."""
285
+ if workspace_group._manager is None:
286
+ raise ManagementError(
287
+ msg='No workspace manager is associated with this object.',
288
+ )
289
+
290
+ out = workspace_group._manager._get(
291
+ f'workspaceGroups/{workspace_group.id}/egress/tableEgressStatus',
292
+ json=dict(scope=scope),
293
+ )
294
+
295
+ return out.json()
@@ -450,55 +450,59 @@ class TestStage(unittest.TestCase):
450
450
  def test_open(self):
451
451
  st = self.wg.stage
452
452
 
453
+ open_test_sql = f'open_test_{id(self)}.sql'
454
+
453
455
  # See if error is raised for non-existent file
454
456
  with self.assertRaises(s2.ManagementError):
455
- st.open('open_test.sql', 'r')
457
+ st.open(open_test_sql, 'r')
456
458
 
457
459
  # Load test file
458
- st.upload_file(TEST_DIR / 'test.sql', 'open_test.sql')
460
+ st.upload_file(TEST_DIR / 'test.sql', open_test_sql)
459
461
 
460
462
  # Read file using `open`
461
- with st.open('open_test.sql', 'r') as rfile:
463
+ with st.open(open_test_sql, 'r') as rfile:
462
464
  assert rfile.read() == open(TEST_DIR / 'test.sql').read()
463
465
 
464
466
  # Read file using `open` with 'rt' mode
465
- with st.open('open_test.sql', 'rt') as rfile:
467
+ with st.open(open_test_sql, 'rt') as rfile:
466
468
  assert rfile.read() == open(TEST_DIR / 'test.sql').read()
467
469
 
468
470
  # Read file using `open` with 'rb' mode
469
- with st.open('open_test.sql', 'rb') as rfile:
471
+ with st.open(open_test_sql, 'rb') as rfile:
470
472
  assert rfile.read() == open(TEST_DIR / 'test.sql', 'rb').read()
471
473
 
472
474
  # Read file using `open` with 'rb' mode
473
475
  with self.assertRaises(ValueError):
474
- with st.open('open_test.sql', 'b') as rfile:
476
+ with st.open(open_test_sql, 'b') as rfile:
475
477
  pass
476
478
 
477
479
  # Attempt overwrite file using `open` with mode 'x'
478
480
  with self.assertRaises(OSError):
479
- with st.open('open_test.sql', 'x') as wfile:
481
+ with st.open(open_test_sql, 'x') as wfile:
480
482
  pass
481
483
 
482
484
  # Attempt overwrite file using `open` with mode 'w'
483
- with st.open('open_test.sql', 'w') as wfile:
485
+ with st.open(open_test_sql, 'w') as wfile:
484
486
  wfile.write(open(TEST_DIR / 'test2.sql').read())
485
487
 
486
- txt = st.download_file('open_test.sql', encoding='utf-8')
488
+ txt = st.download_file(open_test_sql, encoding='utf-8')
487
489
 
488
490
  assert txt == open(TEST_DIR / 'test2.sql').read()
489
491
 
492
+ open_raw_test_sql = f'open_raw_test_{id(self)}.sql'
493
+
490
494
  # Test writer without context manager
491
- wfile = st.open('open_raw_test.sql', 'w')
495
+ wfile = st.open(open_raw_test_sql, 'w')
492
496
  for line in open(TEST_DIR / 'test.sql'):
493
497
  wfile.write(line)
494
498
  wfile.close()
495
499
 
496
- txt = st.download_file('open_raw_test.sql', encoding='utf-8')
500
+ txt = st.download_file(open_raw_test_sql, encoding='utf-8')
497
501
 
498
502
  assert txt == open(TEST_DIR / 'test.sql').read()
499
503
 
500
504
  # Test reader without context manager
501
- rfile = st.open('open_raw_test.sql', 'r')
505
+ rfile = st.open(open_raw_test_sql, 'r')
502
506
  txt = ''
503
507
  for line in rfile:
504
508
  txt += line
@@ -509,15 +513,18 @@ class TestStage(unittest.TestCase):
509
513
  def test_obj_open(self):
510
514
  st = self.wg.stage
511
515
 
516
+ obj_open_test_sql = f'obj_open_test_{id(self)}.sql'
517
+ obj_open_dir = f'obj_open_dir_{id(self)}'
518
+
512
519
  # Load test file
513
- f = st.upload_file(TEST_DIR / 'test.sql', 'obj_open_test.sql')
520
+ f = st.upload_file(TEST_DIR / 'test.sql', obj_open_test_sql)
514
521
 
515
522
  # Read file using `open`
516
523
  with f.open() as rfile:
517
524
  assert rfile.read() == open(TEST_DIR / 'test.sql').read()
518
525
 
519
526
  # Make sure directories error out
520
- d = st.mkdir('obj_open_dir')
527
+ d = st.mkdir(obj_open_dir)
521
528
  with self.assertRaises(IsADirectoryError):
522
529
  d.open()
523
530
 
@@ -1143,58 +1150,62 @@ class TestFileSpaces(unittest.TestCase):
1143
1150
 
1144
1151
  def test_open(self):
1145
1152
  for space in [self.personal_space, self.shared_space]:
1153
+ open_test_ipynb = f'open_test_ipynb_{id(self)}.ipynb'
1154
+
1146
1155
  # See if error is raised for non-existent file
1147
1156
  with self.assertRaises(s2.ManagementError):
1148
- space.open('open_test.ipynb', 'r')
1157
+ space.open(open_test_ipynb, 'r')
1149
1158
 
1150
1159
  # Load test file
1151
- space.upload_file(TEST_DIR / 'test.ipynb', 'open_test.ipynb')
1160
+ space.upload_file(TEST_DIR / 'test.ipynb', open_test_ipynb)
1152
1161
 
1153
1162
  # Read file using `open`
1154
- with space.open('open_test.ipynb', 'r') as rfile:
1163
+ with space.open(open_test_ipynb, 'r') as rfile:
1155
1164
  assert rfile.read() == open(TEST_DIR / 'test.ipynb').read()
1156
1165
 
1157
1166
  # Read file using `open` with 'rt' mode
1158
- with space.open('open_test.ipynb', 'rt') as rfile:
1167
+ with space.open(open_test_ipynb, 'rt') as rfile:
1159
1168
  assert rfile.read() == open(TEST_DIR / 'test.ipynb').read()
1160
1169
 
1161
1170
  # Read file using `open` with 'rb' mode
1162
- with space.open('open_test.ipynb', 'rb') as rfile:
1171
+ with space.open(open_test_ipynb, 'rb') as rfile:
1163
1172
  assert rfile.read() == open(TEST_DIR / 'test.ipynb', 'rb').read()
1164
1173
 
1165
1174
  # Read file using `open` with 'rb' mode
1166
1175
  with self.assertRaises(ValueError):
1167
- with space.open('open_test.ipynb', 'b') as rfile:
1176
+ with space.open(open_test_ipynb, 'b') as rfile:
1168
1177
  pass
1169
1178
 
1170
1179
  # Attempt overwrite file using `open` with mode 'x'
1171
1180
  with self.assertRaises(OSError):
1172
- with space.open('open_test.ipynb', 'x') as wfile:
1181
+ with space.open(open_test_ipynb, 'x') as wfile:
1173
1182
  pass
1174
1183
 
1175
1184
  # Attempt overwrite file using `open` with mode 'w'
1176
- with space.open('open_test.ipynb', 'w') as wfile:
1185
+ with space.open(open_test_ipynb, 'w') as wfile:
1177
1186
  wfile.write(open(TEST_DIR / 'test2.ipynb').read())
1178
1187
 
1179
- txt = space.download_file('open_test.ipynb', encoding='utf-8')
1188
+ txt = space.download_file(open_test_ipynb, encoding='utf-8')
1180
1189
 
1181
1190
  assert txt == open(TEST_DIR / 'test2.ipynb').read()
1182
1191
 
1192
+ open_raw_test_ipynb = f'open_raw_test_{id(self)}.ipynb'
1193
+
1183
1194
  # Test writer without context manager
1184
- wfile = space.open('open_raw_test.ipynb', 'w')
1195
+ wfile = space.open(open_raw_test_ipynb, 'w')
1185
1196
  for line in open(TEST_DIR / 'test.ipynb'):
1186
1197
  wfile.write(line)
1187
1198
  wfile.close()
1188
1199
 
1189
1200
  txt = space.download_file(
1190
- 'open_raw_test.ipynb',
1201
+ open_raw_test_ipynb,
1191
1202
  encoding='utf-8',
1192
1203
  )
1193
1204
 
1194
1205
  assert txt == open(TEST_DIR / 'test.ipynb').read()
1195
1206
 
1196
1207
  # Test reader without context manager
1197
- rfile = space.open('open_raw_test.ipynb', 'r')
1208
+ rfile = space.open(open_raw_test_ipynb, 'r')
1198
1209
  txt = ''
1199
1210
  for line in rfile:
1200
1211
  txt += line
@@ -1203,15 +1214,18 @@ class TestFileSpaces(unittest.TestCase):
1203
1214
  assert txt == open(TEST_DIR / 'test.ipynb').read()
1204
1215
 
1205
1216
  # Cleanup
1206
- space.remove('open_test.ipynb')
1207
- space.remove('open_raw_test.ipynb')
1217
+ space.remove(open_test_ipynb)
1218
+ space.remove(open_raw_test_ipynb)
1208
1219
 
1209
1220
  def test_obj_open(self):
1210
1221
  for space in [self.personal_space, self.shared_space]:
1222
+ obj_open_test_ipynb = f'obj_open_test_{id(self)}.ipynb'
1223
+ obj_open_dir = f'obj_open_dir_{id(self)}'
1224
+
1211
1225
  # Load test file
1212
1226
  f = space.upload_file(
1213
1227
  TEST_DIR / 'test.ipynb',
1214
- 'obj_open_test.ipynb',
1228
+ obj_open_test_ipynb,
1215
1229
  )
1216
1230
 
1217
1231
  # Read file using `open`
@@ -1220,7 +1234,7 @@ class TestFileSpaces(unittest.TestCase):
1220
1234
 
1221
1235
  # Make sure directories error out
1222
1236
  with self.assertRaises(s2.ManagementError):
1223
- space.mkdir('obj_open_dir')
1237
+ space.mkdir(obj_open_dir)
1224
1238
 
1225
1239
  # Write file using `open`
1226
1240
  with f.open('w', encoding='utf-8') as wfile:
@@ -1248,7 +1262,7 @@ class TestFileSpaces(unittest.TestCase):
1248
1262
  assert txt == open(TEST_DIR / 'test.ipynb').read()
1249
1263
 
1250
1264
  # Cleanup
1251
- space.remove('obj_open_test.ipynb')
1265
+ space.remove(obj_open_test_ipynb)
1252
1266
 
1253
1267
  def test_os_directories(self):
1254
1268
  for space in [self.personal_space, self.shared_space]:
@@ -0,0 +1,51 @@
1
+ import os
2
+ import unittest
3
+
4
+ from vectorstore import VectorDB
5
+
6
+ import singlestoredb as s2
7
+ from . import utils
8
+
9
+
10
+ class TestVectorDB(unittest.TestCase):
11
+
12
+ driver = s2
13
+
14
+ dbname: str = ''
15
+ dbexisted: bool = False
16
+
17
+ @classmethod
18
+ def setUpClass(cls) -> None:
19
+ sql_file = os.path.join(os.path.dirname(__file__), 'empty.sql')
20
+ cls.dbname, cls.dbexisted = utils.load_sql(sql_file) # type: ignore
21
+
22
+ @classmethod
23
+ def tearDownClass(cls) -> None:
24
+ if not cls.dbexisted:
25
+ utils.drop_database(cls.dbname) # type: ignore
26
+
27
+ def test_vectordb_from_params(self) -> None:
28
+ db: VectorDB = s2.vector_db(database=type(self).dbname)
29
+ index = db.create_index(
30
+ name='test_index', dimension=3,
31
+ tags={'name': 'test_tag'},
32
+ )
33
+ assert index.name == 'test_index'
34
+ assert index.dimension == 3
35
+ assert index.tags == {'name': 'test_tag'}
36
+ assert db.has_index('test_index')
37
+
38
+ def test_vectordb_from_connection(self) -> None:
39
+ with s2.connect(database=type(self).dbname) as conn:
40
+ db: VectorDB = conn.vector_db
41
+ index = db.create_index(
42
+ name='test_index_1',
43
+ dimension=4, tags={'name': 'test_tag'},
44
+ )
45
+ assert index.name == 'test_index_1'
46
+ assert index.dimension == 4
47
+ assert index.tags == {'name': 'test_tag'}
48
+ assert db.has_index('test_index_1')
49
+
50
+ db2: VectorDB = conn.vector_db
51
+ assert db2.has_index('test_index_1')
singlestoredb/types.py CHANGED
@@ -173,17 +173,25 @@ class ColumnType(object):
173
173
  'DECIMAL', 'DEC', 'FIXED', 'NUMERIC', 0, decimal.Decimal,
174
174
  )
175
175
  DEC = FIXED = NUMERIC = DECIMAL
176
- TINY = TINYINT = BOOL = BOOLEAN = NumberDBAPIType(
177
- 'TINY', 'TINYINT', 'BOOL', 'BOOLEAN', 1,
176
+ TINY = TINYINT = BOOL = BOOLEAN = UNSIGNED_TINY = UNSIGNED_TINYINT = NumberDBAPIType(
177
+ 'TINY', 'TINYINT', 'BOOL', 'BOOLEAN', 'UNSIGNED TINY', 'UNSIGNED TINYINT', 1,
178
+ )
179
+ SHORT = SMALLINT = UNSIGNED_SHORT = UNSIGNED_SMALLINT = NumberDBAPIType(
180
+ 'SMALLINT', 'SHORT', 'UNSIGNED SHORT', 'UNSIGNED SMALLINT', 2,
181
+ )
182
+ LONG = INT = UNSIGNED_LONG = UNSIGNED_INT = NumberDBAPIType(
183
+ 'LONG', 'INT', 'UNSIGNED LONG', 'UNSIGNED INT', 3,
178
184
  )
179
- SHORT = SMALLINT = NumberDBAPIType('SMALLINT', 'SHORT', 2)
180
- LONG = INT = NumberDBAPIType('LONG', 'INT', 3)
181
185
  FLOAT = NumberDBAPIType('FLOAT', 4)
182
186
  DOUBLE = REAL = NumberDBAPIType('DOUBLE', 5, float)
183
187
  NULL = DBAPIType('NULL', 6)
184
188
  TIMESTAMP = DatetimeDBAPIType('TIMESTAMP', 7)
185
- LONGLONG = BIGINT = NumberDBAPIType('BIGINT', 'LONGLONG', 8, int)
186
- MEDIUMINT = INT24 = NumberDBAPIType('MEDIUMINT', 'INT24', 9)
189
+ LONGLONG = BIGINT = UNSIGNED_LONGLONG = UNSIGNED_BIGINT = NumberDBAPIType(
190
+ 'BIGINT', 'LONGLONG', 'UNSIGNED LONGLONG', 'UNSIGNED BIGINT', 8, int,
191
+ )
192
+ MEDIUMINT = INT24 = UNSIGNED_MEDIUMINT = UNSIGNED_INT24 = NumberDBAPIType(
193
+ 'MEDIUMINT', 'INT24', 'UNSIGNED MEDIUMINT', 'UNSIGNED INT24', 9,
194
+ )
187
195
  DATE = DBAPIType('DATE', 10, datetime.date)
188
196
  TIME = DBAPIType('TIME', 11, datetime.time)
189
197
  DATETIME = DatetimeDBAPIType('DATETIME', 12, datetime.datetime)
@@ -0,0 +1,192 @@
1
+ from typing import Any
2
+ from typing import Callable
3
+ from typing import Dict
4
+ from typing import Optional
5
+
6
+ from vectorstore import AndFilter # noqa: F401
7
+ from vectorstore import DeletionProtection # noqa: F401
8
+ from vectorstore import EqFilter # noqa: F401
9
+ from vectorstore import ExactMatchFilter # noqa: F401
10
+ from vectorstore import FilterTypedDict # noqa: F401
11
+ from vectorstore import GteFilter # noqa: F401
12
+ from vectorstore import GtFilter # noqa: F401
13
+ from vectorstore import IndexInterface # noqa: F401
14
+ from vectorstore import IndexList # noqa: F401
15
+ from vectorstore import IndexModel # noqa: F401
16
+ from vectorstore import IndexStatsTypedDict # noqa: F401
17
+ from vectorstore import InFilter # noqa: F401
18
+ from vectorstore import LteFilter # noqa: F401
19
+ from vectorstore import LtFilter # noqa: F401
20
+ from vectorstore import MatchTypedDict # noqa: F401
21
+ from vectorstore import Metric # noqa: F401
22
+ from vectorstore import NamespaceStatsTypedDict # noqa: F401
23
+ from vectorstore import NeFilter # noqa: F401
24
+ from vectorstore import NinFilter # noqa: F401
25
+ from vectorstore import OrFilter # noqa: F401
26
+ from vectorstore import SimpleFilter # noqa: F401
27
+ from vectorstore import Vector # noqa: F401
28
+ from vectorstore import VectorDictMetadataValue # noqa: F401
29
+ from vectorstore import VectorMetadataTypedDict # noqa: F401
30
+ from vectorstore import VectorTuple # noqa: F401
31
+ from vectorstore import VectorTupleWithMetadata # noqa: F401
32
+
33
+
34
+ def vector_db(
35
+ host: Optional[str] = None, user: Optional[str] = None,
36
+ password: Optional[str] = None, port: Optional[int] = None,
37
+ database: Optional[str] = None, driver: Optional[str] = None,
38
+ pure_python: Optional[bool] = None, local_infile: Optional[bool] = None,
39
+ charset: Optional[str] = None,
40
+ ssl_key: Optional[str] = None, ssl_cert: Optional[str] = None,
41
+ ssl_ca: Optional[str] = None, ssl_disabled: Optional[bool] = None,
42
+ ssl_cipher: Optional[str] = None, ssl_verify_cert: Optional[bool] = None,
43
+ tls_sni_servername: Optional[str] = None,
44
+ ssl_verify_identity: Optional[bool] = None,
45
+ conv: Optional[Dict[int, Callable[..., Any]]] = None,
46
+ credential_type: Optional[str] = None,
47
+ autocommit: Optional[bool] = None,
48
+ results_type: Optional[str] = None,
49
+ buffered: Optional[bool] = None,
50
+ results_format: Optional[str] = None,
51
+ program_name: Optional[str] = None,
52
+ conn_attrs: Optional[Dict[str, str]] = {},
53
+ multi_statements: Optional[bool] = None,
54
+ client_found_rows: Optional[bool] = None,
55
+ connect_timeout: Optional[int] = None,
56
+ nan_as_null: Optional[bool] = None,
57
+ inf_as_null: Optional[bool] = None,
58
+ encoding_errors: Optional[str] = None,
59
+ track_env: Optional[bool] = None,
60
+ enable_extended_data_types: Optional[bool] = None,
61
+ vector_data_format: Optional[str] = None,
62
+ parse_json: Optional[bool] = None,
63
+ pool_size: Optional[int] = 5,
64
+ max_overflow: Optional[int] = 10,
65
+ timeout: Optional[float] = 30,
66
+ ) -> Any:
67
+ """
68
+ Return a vectorstore API connection.
69
+ Database should be specified in the URL or as a keyword.
70
+
71
+ Parameters
72
+ ----------
73
+ host : str, optional
74
+ Hostname, IP address, or URL that describes the connection.
75
+ The scheme or protocol defines which database connector to use.
76
+ By default, the ``mysql`` scheme is used. To connect to the
77
+ HTTP API, the scheme can be set to ``http`` or ``https``. The username,
78
+ password, host, and port are specified as in a standard URL. The path
79
+ indicates the database name. The overall form of the URL is:
80
+ ``scheme://user:password@host:port/db_name``. The scheme can
81
+ typically be left off (unless you are using the HTTP API):
82
+ ``user:password@host:port/db_name``.
83
+ user : str, optional
84
+ Database user name
85
+ password : str, optional
86
+ Database user password
87
+ port : int, optional
88
+ Database port. This defaults to 3306 for non-HTTP connections, 80
89
+ for HTTP connections, and 443 for HTTPS connections.
90
+ database : str, optional
91
+ Database name.
92
+ pure_python : bool, optional
93
+ Use the connector in pure Python mode
94
+ local_infile : bool, optional
95
+ Allow local file uploads
96
+ charset : str, optional
97
+ Character set for string values
98
+ ssl_key : str, optional
99
+ File containing SSL key
100
+ ssl_cert : str, optional
101
+ File containing SSL certificate
102
+ ssl_ca : str, optional
103
+ File containing SSL certificate authority
104
+ ssl_cipher : str, optional
105
+ Sets the SSL cipher list
106
+ ssl_disabled : bool, optional
107
+ Disable SSL usage
108
+ ssl_verify_cert : bool, optional
109
+ Verify the server's certificate. This is automatically enabled if
110
+ ``ssl_ca`` is also specified.
111
+ ssl_verify_identity : bool, optional
112
+ Verify the server's identity
113
+ conv : dict[int, Callable], optional
114
+ Dictionary of data conversion functions
115
+ credential_type : str, optional
116
+ Type of authentication to use: auth.PASSWORD, auth.JWT, or auth.BROWSER_SSO
117
+ autocommit : bool, optional
118
+ Enable autocommits
119
+ results_type : str, optional
120
+ The form of the query results: tuples, namedtuples, dicts,
121
+ numpy, polars, pandas, arrow
122
+ buffered : bool, optional
123
+ Should the entire query result be buffered in memory? This is the default
124
+ behavior which allows full cursor control of the result, but does consume
125
+ more memory.
126
+ results_format : str, optional
127
+ Deprecated. This option has been renamed to results_type.
128
+ program_name : str, optional
129
+ Name of the program
130
+ conn_attrs : dict, optional
131
+ Additional connection attributes for telemetry. Example:
132
+ {'program_version': "1.0.2", "_connector_name": "dbt connector"}
133
+ multi_statements: bool, optional
134
+ Should multiple statements be allowed within a single query?
135
+ connect_timeout : int, optional
136
+ The timeout for connecting to the database in seconds.
137
+ (default: 10, min: 1, max: 31536000)
138
+ nan_as_null : bool, optional
139
+ Should NaN values be treated as NULLs when used in parameter
140
+ substitutions including uploaded data?
141
+ inf_as_null : bool, optional
142
+ Should Inf values be treated as NULLs when used in parameter
143
+ substitutions including uploaded data?
144
+ encoding_errors : str, optional
145
+ The error handler name for value decoding errors
146
+ track_env : bool, optional
147
+ Should the connection track the SINGLESTOREDB_URL environment variable?
148
+ enable_extended_data_types : bool, optional
149
+ Should extended data types (BSON, vector) be enabled?
150
+ vector_data_format : str, optional
151
+ Format for vector types: json or binary
152
+ pool_size : int, optional
153
+ The number of connections to keep in the connection pool. Default is 5.
154
+ max_overflow : int, optional
155
+ The maximum number of connections to allow beyond the pool size.
156
+ Default is 10.
157
+ timeout : float, optional
158
+ The timeout for acquiring a connection from the pool in seconds.
159
+ Default is 30 seconds.
160
+
161
+ See Also
162
+ --------
163
+ :class:`Connection`
164
+
165
+ Returns
166
+ -------
167
+ :class:`VectorDB`
168
+
169
+ """
170
+ from vectorstore import VectorDB
171
+ return VectorDB(
172
+ host=host, user=user, password=password, port=port,
173
+ database=database, driver=driver, pure_python=pure_python,
174
+ local_infile=local_infile, charset=charset,
175
+ ssl_key=ssl_key, ssl_cert=ssl_cert, ssl_ca=ssl_ca,
176
+ ssl_disabled=ssl_disabled, ssl_cipher=ssl_cipher,
177
+ ssl_verify_cert=ssl_verify_cert,
178
+ tls_sni_servername=tls_sni_servername,
179
+ ssl_verify_identity=ssl_verify_identity, conv=conv,
180
+ credential_type=credential_type, autocommit=autocommit,
181
+ results_type=results_type, buffered=buffered,
182
+ results_format=results_format, program_name=program_name,
183
+ conn_attrs=conn_attrs, multi_statements=multi_statements,
184
+ client_found_rows=client_found_rows,
185
+ connect_timeout=connect_timeout, nan_as_null=nan_as_null,
186
+ inf_as_null=inf_as_null, encoding_errors=encoding_errors,
187
+ track_env=track_env,
188
+ enable_extended_data_types=enable_extended_data_types,
189
+ vector_data_format=vector_data_format,
190
+ parse_json=parse_json, pool_size=pool_size,
191
+ max_overflow=max_overflow, timeout=timeout,
192
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: singlestoredb
3
- Version: 1.13.1
3
+ Version: 1.14.1
4
4
  Summary: Interface to the SingleStoreDB database and workspace management APIs
5
5
  Home-page: https://github.com/singlestore-labs/singlestoredb-python
6
6
  Author: SingleStore
@@ -11,7 +11,7 @@ Classifier: License :: OSI Approved :: Apache Software License
11
11
  Classifier: Programming Language :: Python :: 3
12
12
  Classifier: Programming Language :: Python :: 3 :: Only
13
13
  Classifier: Topic :: Database
14
- Requires-Python: >=3.8
14
+ Requires-Python: >=3.9
15
15
  Description-Content-Type: text/markdown
16
16
  License-File: LICENSE
17
17
  Requires-Dist: PyJWT
@@ -19,6 +19,7 @@ Requires-Dist: build
19
19
  Requires-Dist: parsimonious
20
20
  Requires-Dist: requests
21
21
  Requires-Dist: setuptools
22
+ Requires-Dist: singlestore-vectorstore>=0.1.2
22
23
  Requires-Dist: sqlparams
23
24
  Requires-Dist: wheel
24
25
  Requires-Dist: tomli>=1.1.0; python_version < "3.11"
@@ -1,12 +1,13 @@
1
- singlestoredb/__init__.py,sha256=TlTiqjO5Ea2FP3Iq8Kmk0zAA2vR6oOj-HjURaUcQKcM,1649
1
+ singlestoredb/__init__.py,sha256=EzlSL8GTLh0YsZT6nfBumDkUyKFIwMdORoJ_z5Xx8TI,2091
2
2
  singlestoredb/auth.py,sha256=u8D9tpKzrqa4ssaHjyZnGDX1q8XBpGtuoOkTkSv7B28,7599
3
3
  singlestoredb/config.py,sha256=dayUWwSy2YdgmhF8tzH-7FwFpwon5bgX_VeX-Yu5ia4,12969
4
- singlestoredb/connection.py,sha256=0HEpjBZXLqQwOTEfveMkgej1H3Kyof47prIHvJJZtoo,45831
4
+ singlestoredb/connection.py,sha256=ELk3-UpM6RaB993aIt08MydKiiDnejHQ1s8EFiacrAI,46055
5
5
  singlestoredb/converters.py,sha256=Ui-AqdW3pRAQ8A_YcK9EqVYyM4Pt1_Q-tjlotbpK6Cw,20686
6
6
  singlestoredb/exceptions.py,sha256=HuoA6sMRL5qiCiee-_5ddTGmFbYC9Euk8TYUsh5GvTw,3234
7
7
  singlestoredb/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  singlestoredb/pytest.py,sha256=OyF3BO9mgxenifYhOihnzGk8WzCJ_zN5_mxe8XyFPOc,9074
9
- singlestoredb/types.py,sha256=FIqO1A7e0Gkk7ITmIysBy-P5S--ItbMSlYvblzqGS30,9969
9
+ singlestoredb/types.py,sha256=Qp_PWYjSYG6PRnmXAZZ7K2QehUqfoG4KSllI3O1stPE,10397
10
+ singlestoredb/vectorstore.py,sha256=BZb8e7m02_XVHqOyu8tA94R6kHb3n-BC8F08JyJwDzY,8408
10
11
  singlestoredb/ai/__init__.py,sha256=-uNcq-bY-AiWhZ5Plq2ZXtfIVL4PaifMJsJf58rdN8I,114
11
12
  singlestoredb/ai/chat.py,sha256=8OSBZJ3J2zOlVXzJ_sHSAAyu5E6sy7jqqiNeFhtmjOI,802
12
13
  singlestoredb/ai/embeddings.py,sha256=X3g0sJNDVOzXzZwoXz3M3ch-IERQXNkHxuH4cj125I8,815
@@ -39,7 +40,7 @@ singlestoredb/fusion/handler.py,sha256=HEW83De1zj94hvG7rbqlOszIIgBKiag0UGO5I0WoJ
39
40
  singlestoredb/fusion/registry.py,sha256=jjdRTYZ3ylhy6gAoW5xBj0tkxGFBT-2yLQ0tztTgDIY,6112
40
41
  singlestoredb/fusion/result.py,sha256=p5I65C-Dhhl1yeZwetXXZabwritr8Ph2mFvJJ3ovcBM,11790
41
42
  singlestoredb/fusion/handlers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
- singlestoredb/fusion/handlers/export.py,sha256=3moTJeqsHkDDpitUUAE6x95JYH2rmb28MALbO4x0dcc,8981
43
+ singlestoredb/fusion/handlers/export.py,sha256=Af4eIMPGKEOpmf4LXnvQsgmnvx5F8B5FkRI20RvEa7o,15309
43
44
  singlestoredb/fusion/handlers/files.py,sha256=McoRacihcQn0-qILujBi0HCNyuFcrBoIUGlkWlg1cII,18991
44
45
  singlestoredb/fusion/handlers/job.py,sha256=r0KdOD55VUDw-SymC__5Mn-fzJTZE_xcBgH-O8DYVHc,21095
45
46
  singlestoredb/fusion/handlers/models.py,sha256=xJPIG0_GgF-VrmPoIsU2U4AsS7ytDz8JMRaqchglAR0,6236
@@ -47,14 +48,14 @@ singlestoredb/fusion/handlers/stage.py,sha256=kYVjbPys83kf3jX6jWwN8Ju0oEocKVZ3TI
47
48
  singlestoredb/fusion/handlers/utils.py,sha256=ozHOWUraoN8XGTK9JZdhv5HV8AQR8zfUd1yh1kLvUXY,10685
48
49
  singlestoredb/fusion/handlers/workspace.py,sha256=4xN2TFO4yF7KZB2Fcht7IuvoDdAT6fDfDLjixiHZN8w,27506
49
50
  singlestoredb/http/__init__.py,sha256=A_2ZUCCpvRYIA6YDpPy57wL5R1eZ5SfP6I1To5nfJ2s,912
50
- singlestoredb/http/connection.py,sha256=EgE2m_nxisGPo6YV3AJd-RRafdT0f70HRbIo1ONQ668,39893
51
+ singlestoredb/http/connection.py,sha256=X5GEPPOE-rMm17d0-TPhcdxUHibcYl-MZAnPhut8xyo,39956
51
52
  singlestoredb/magics/__init__.py,sha256=lZjkT3Webo9c1EQAzlRCRh6B2pckQH8uvNrrB__abcI,1210
52
53
  singlestoredb/magics/run_personal.py,sha256=2f7u1T7iblxGzZurHNgNXLrPBvsvPADZKo_RD_IjYuE,1844
53
54
  singlestoredb/magics/run_shared.py,sha256=SI8dCBRMaGn-xZU7dto4jsAqKBi-Ll14htUsMUSBpJM,1752
54
55
  singlestoredb/management/__init__.py,sha256=ofNTPCdkZ1dS_aX2aUujd8aMHQi8Lle5Ced0aaO3RH4,269
55
56
  singlestoredb/management/billing_usage.py,sha256=9ighjIpcopgIyJOktBYQ6pahBZmWGHOPyyCW4gu9FGs,3735
56
57
  singlestoredb/management/cluster.py,sha256=h75grXSxq4Anr4RxwKxcZW4TkWJ4bFg_ql5iRWCNLdQ,14405
57
- singlestoredb/management/export.py,sha256=jJCe25ecH_LzKSDc7vS1-5DQaWFrZipeawLPpArByJE,5108
58
+ singlestoredb/management/export.py,sha256=VrpGyHVUadtJfprcsi9N2SNS6dunDfA1EV2SfWaPlmA,9081
58
59
  singlestoredb/management/files.py,sha256=89IhpGw9WdwxVeksavHEDMVn9wb_jxb-utZuIDqkLHw,30477
59
60
  singlestoredb/management/inference_api.py,sha256=L6eFqaUaPugF_cmrZ4xlArj8CIv25vWqQs1vwgKPEF4,2583
60
61
  singlestoredb/management/job.py,sha256=4-xLWzbE8odQogVVaFer80UEoTAZY1T28VZ9Ug4rbmM,24611
@@ -126,12 +127,13 @@ singlestoredb/tests/test_ext_func.py,sha256=s1k1cBxQ7vIS1zSrKGkKTgLZE1DT_Rqj-3VN
126
127
  singlestoredb/tests/test_ext_func_data.py,sha256=yTADD93nPxX6_rZXXLZaOWEI_yPvYyir9psn5PK9ctU,47695
127
128
  singlestoredb/tests/test_fusion.py,sha256=EH1mRwdX2Fajsq6x2l0gBhH1YhcxtvDGIKC9HJ4sDbQ,50521
128
129
  singlestoredb/tests/test_http.py,sha256=RXasTqBWRn__omj0eLFTJYIbZjd0PPdIV2d4Cqz0MC8,8580
129
- singlestoredb/tests/test_management.py,sha256=6evsyQWA-lOKMehJi8xvjp0udm85EHBHuZDHwQEzxPg,45149
130
+ singlestoredb/tests/test_management.py,sha256=IGwBijg5NnSSGVNjbtW2m7wVv2b0SFdvfW4sBXTPLP0,45571
130
131
  singlestoredb/tests/test_plugin.py,sha256=qpO9wmWc62VaijN1sJ97YSYIX7I7Y5C6sY-WzwrutDQ,812
131
132
  singlestoredb/tests/test_results.py,sha256=wg93sujwt-R9_eJCgSCElgAZhLDkIiAo3qPkPydOv78,6582
132
133
  singlestoredb/tests/test_types.py,sha256=jqoAaSjhbgwB3vt0KsTcl7XBWoMMIa0mPFKhEi5bBjo,4500
133
134
  singlestoredb/tests/test_udf.py,sha256=Kb7-oJpnN6MTT3aE5V5dry_r5ze0EwaAIJeh_zR3l0I,28844
134
135
  singlestoredb/tests/test_udf_returns.py,sha256=k31L6Ir2Xw8MEZ18upuu0p_D_OpbrPAzWhDQXVFDS7I,15541
136
+ singlestoredb/tests/test_vectorstore.py,sha256=anHfp5gQrQy8Iw3Ub4mxFEkaZWahs566OXuKqjpkozM,1554
135
137
  singlestoredb/tests/test_xdict.py,sha256=fqHspoi39nbX3fIDVkkRXcd5H50xdOsSvK0bxAMQnaE,10408
136
138
  singlestoredb/tests/utils.py,sha256=2A2tEdD3t8aXWUnHtAIcFlWrflsz2MlMcCbUDaAG29c,4995
137
139
  singlestoredb/tests/ext_funcs/__init__.py,sha256=gtyhykoEk8_-il5ukTwvqDu-4D1LgwxMFseYg1wgOHo,14103
@@ -146,9 +148,9 @@ singlestoredb/utils/results.py,sha256=bJtaUaDiFq26IsPAKZ2FHGB7csMn94EAxLKrP4HaEE
146
148
  singlestoredb/utils/xdict.py,sha256=S9HKgrPrnu_6b7iOwa2KrW8CmU1Uqx0BWdEyogFzWbE,12896
147
149
  sqlx/__init__.py,sha256=aBYiU8DZXCogvWu3yWafOz7bZS5WWwLZXj7oL0dXGyU,85
148
150
  sqlx/magic.py,sha256=JsS9_9aBFaOt91Torm1JPN0c8qB2QmYJmNSKtbSQIY0,3509
149
- singlestoredb-1.13.1.dist-info/LICENSE,sha256=Mlq78idURT-9G026aMYswwwnnrLcgzTLuXeAs5hjDLM,11341
150
- singlestoredb-1.13.1.dist-info/METADATA,sha256=rybhqtR5TzE7p-keWvO6V21MlYNZZtIULT9atDRgXpw,5688
151
- singlestoredb-1.13.1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
152
- singlestoredb-1.13.1.dist-info/entry_points.txt,sha256=bSLaTWB5zGjpVYPAaI46MkkDup0su-eb3uAhCNYuRV0,48
153
- singlestoredb-1.13.1.dist-info/top_level.txt,sha256=DfFGz7bM4XrshloiCeTABgylT3BUnS8T5pJam3ewT6Q,19
154
- singlestoredb-1.13.1.dist-info/RECORD,,
151
+ singlestoredb-1.14.1.dist-info/LICENSE,sha256=Mlq78idURT-9G026aMYswwwnnrLcgzTLuXeAs5hjDLM,11341
152
+ singlestoredb-1.14.1.dist-info/METADATA,sha256=PEUJtCAYwCFmN7FKYnqwkV3o8Ai9_eVYbS3D4dyY1_8,5734
153
+ singlestoredb-1.14.1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
154
+ singlestoredb-1.14.1.dist-info/entry_points.txt,sha256=bSLaTWB5zGjpVYPAaI46MkkDup0su-eb3uAhCNYuRV0,48
155
+ singlestoredb-1.14.1.dist-info/top_level.txt,sha256=DfFGz7bM4XrshloiCeTABgylT3BUnS8T5pJam3ewT6Q,19
156
+ singlestoredb-1.14.1.dist-info/RECORD,,