singlestoredb 1.14.0__cp38-abi3-macosx_10_9_universal2.whl → 1.14.2__cp38-abi3-macosx_10_9_universal2.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of singlestoredb might be problematic. Click here for more details.

Binary file
singlestoredb/__init__.py CHANGED
@@ -13,7 +13,7 @@ Examples
13
13
 
14
14
  """
15
15
 
16
- __version__ = '1.14.0'
16
+ __version__ = '1.14.2'
17
17
 
18
18
  from typing import Any
19
19
 
@@ -31,14 +31,18 @@ from .types import (
31
31
  Date, Time, Timestamp, DateFromTicks, TimeFromTicks, TimestampFromTicks,
32
32
  Binary, STRING, BINARY, NUMBER, DATETIME, ROWID,
33
33
  )
34
- from .vectorstore import (
35
- vector_db, IndexInterface, IndexList, IndexModel, MatchTypedDict,
36
- Metric, IndexStatsTypedDict, NamespaceStatsTypedDict, Vector,
37
- VectorDictMetadataValue, VectorMetadataTypedDict, VectorTuple,
38
- VectorTupleWithMetadata, DeletionProtection, AndFilter, EqFilter,
39
- ExactMatchFilter, FilterTypedDict, GteFilter, GtFilter, InFilter,
40
- LteFilter, LtFilter, NeFilter, NinFilter, OrFilter, SimpleFilter,
41
- )
34
+ # These are only loaded if the singlestore-vectorstore package is available
35
+ try:
36
+ from .vectorstore import (
37
+ vector_db, IndexInterface, IndexList, IndexModel, MatchTypedDict,
38
+ Metric, IndexStatsTypedDict, NamespaceStatsTypedDict, Vector,
39
+ VectorDictMetadataValue, VectorMetadataTypedDict, VectorTuple,
40
+ VectorTupleWithMetadata, DeletionProtection, AndFilter, EqFilter,
41
+ ExactMatchFilter, FilterTypedDict, GteFilter, GtFilter, InFilter,
42
+ LteFilter, LtFilter, NeFilter, NinFilter, OrFilter, SimpleFilter,
43
+ )
44
+ except (ImportError, ModuleNotFoundError):
45
+ pass
42
46
 
43
47
 
44
48
  #
@@ -33,7 +33,7 @@ CORE_GRAMMAR = r'''
33
33
  close_paren = ws* ")" ws*
34
34
  open_repeats = ws* ~r"[\(\[\{]" ws*
35
35
  close_repeats = ws* ~r"[\)\]\}]" ws*
36
- select = ~r"SELECT"i ws+ ~r".+" ws*
36
+ statement = ~r"[\s\S]*" ws*
37
37
  table = ~r"(?:([A-Za-z0-9_\-]+)|`([^\`]+)`)(?:\.(?:([A-Za-z0-9_\-]+)|`([^\`]+)`))?" ws*
38
38
  column = ~r"(?:([A-Za-z0-9_\-]+)|`([^\`]+)`)(?:\.(?:([A-Za-z0-9_\-]+)|`([^\`]+)`))?" ws*
39
39
  link_name = ~r"(?:([A-Za-z0-9_\-]+)|`([^\`]+)`)(?:\.(?:([A-Za-z0-9_\-]+)|`([^\`]+)`))?" ws*
@@ -77,6 +77,7 @@ BUILTINS = {
77
77
  '<file-type>': r'''
78
78
  file_type = { FILE | FOLDER }
79
79
  ''',
80
+ '<statement>': '',
80
81
  }
81
82
 
82
83
  BUILTIN_DEFAULTS = { # type: ignore
@@ -627,6 +628,18 @@ class SQLHandler(NodeVisitor):
627
628
  cls.compile()
628
629
  registry.register_handler(cls, overwrite=overwrite)
629
630
 
631
+ def create_result(self) -> result.FusionSQLResult:
632
+ """
633
+ Create a new result object.
634
+
635
+ Returns
636
+ -------
637
+ FusionSQLResult
638
+ A new result object for this handler
639
+
640
+ """
641
+ return result.FusionSQLResult()
642
+
630
643
  def execute(self, sql: str) -> result.FusionSQLResult:
631
644
  """
632
645
  Parse the SQL and invoke the handler method.
@@ -746,9 +759,9 @@ class SQLHandler(NodeVisitor):
746
759
  _, out, *_ = visited_children
747
760
  return out
748
761
 
749
- def visit_select(self, node: Node, visited_children: Iterable[Any]) -> Any:
750
- out = ' '.join(flatten(visited_children))
751
- return {'select': out}
762
+ def visit_statement(self, node: Node, visited_children: Iterable[Any]) -> Any:
763
+ out = ' '.join(flatten(visited_children)).strip()
764
+ return {'statement': out}
752
765
 
753
766
  def visit_order_by(self, node: Node, visited_children: Iterable[Any]) -> Any:
754
767
  """Handle ORDER BY."""
@@ -1,10 +1,12 @@
1
1
  #!/usr/bin/env python3
2
+ import datetime
2
3
  import json
3
4
  from typing import Any
4
5
  from typing import Dict
5
6
  from typing import Optional
6
7
 
7
8
  from .. import result
9
+ from ...management.export import _get_exports
8
10
  from ...management.export import ExportService
9
11
  from ...management.export import ExportStatus
10
12
  from ..handler import SQLHandler
@@ -104,7 +106,100 @@ class CreateClusterIdentity(SQLHandler):
104
106
  CreateClusterIdentity.register(overwrite=True)
105
107
 
106
108
 
107
- class CreateExport(SQLHandler):
109
+ def _start_export(params: Dict[str, Any]) -> Optional[FusionSQLResult]:
110
+ # From table
111
+ if isinstance(params['from_table'], str):
112
+ from_database = None
113
+ from_table = params['from_table']
114
+ else:
115
+ from_database, from_table = params['from_table']
116
+
117
+ # Catalog
118
+ catalog_config = json.loads(params['catalog'].get('catalog_config', '{}') or '{}')
119
+ catalog_creds = json.loads(params['catalog'].get('catalog_creds', '{}') or '{}')
120
+
121
+ # Storage
122
+ storage_config = json.loads(params['storage'].get('link_config', '{}') or '{}')
123
+ storage_creds = json.loads(params['storage'].get('link_creds', '{}') or '{}')
124
+
125
+ storage_config['provider'] = 'S3'
126
+
127
+ wsg = get_workspace_group({})
128
+
129
+ if from_database is None:
130
+ raise ValueError('database name must be specified for source table')
131
+
132
+ if wsg._manager is None:
133
+ raise TypeError('no workspace manager is associated with workspace group')
134
+
135
+ partition_by = []
136
+ if params['partition_by']:
137
+ for key in params['partition_by']:
138
+ transform = key['partition_key']['transform']['col_transform']
139
+ part = {}
140
+ part['transform'] = transform[0].lower()
141
+ part['name'] = transform[-1]['transform_col']
142
+ partition_by.append(part)
143
+
144
+ order_by = []
145
+ if params['order_by'] and params['order_by']['by']:
146
+ for key in params['order_by']['by']:
147
+ transform = key['transform']['col_transform']
148
+ order = {}
149
+ order['transform'] = transform[0].lower()
150
+ order['name'] = transform[-1]['transform_col']
151
+ order['direction'] = 'ascending'
152
+ order['null_order'] = 'nulls_first'
153
+ if key.get('direction'):
154
+ if 'desc' in key['direction'].lower():
155
+ order['direction'] = 'descending'
156
+ if key.get('null_order'):
157
+ if 'last' in key['null_order'].lower():
158
+ order['null_order'] = 'nulls_last'
159
+ order_by.append(order)
160
+
161
+ # Refresh interval
162
+ refresh_interval_delta = None
163
+ refresh_interval = params.get('refresh_interval', None)
164
+ if refresh_interval is not None:
165
+ value = int(refresh_interval['refresh_interval_value'])
166
+ time_unit = refresh_interval['refresh_interval_time_unit'].upper()
167
+ if value < 0:
168
+ raise ValueError('refresh interval must be greater than 0')
169
+ if time_unit == 'SECONDS':
170
+ refresh_interval_delta = datetime.timedelta(seconds=int(value))
171
+ elif time_unit == 'MINUTES':
172
+ refresh_interval_delta = datetime.timedelta(minutes=int(value))
173
+ elif time_unit == 'HOURS':
174
+ refresh_interval_delta = datetime.timedelta(hours=int(value))
175
+ elif time_unit == 'DAYS':
176
+ refresh_interval_delta = datetime.timedelta(days=int(value))
177
+ else:
178
+ raise ValueError('invalid refresh interval time unit')
179
+
180
+ out = ExportService(
181
+ wsg,
182
+ from_database,
183
+ from_table,
184
+ dict(**catalog_config, **catalog_creds),
185
+ dict(**storage_config, **storage_creds),
186
+ columns=None,
187
+ partition_by=partition_by or None,
188
+ order_by=order_by or None,
189
+ properties=json.loads(params['properties']) if params['properties'] else None,
190
+ incremental=params.get('incremental', False),
191
+ refresh_interval=int(refresh_interval_delta.total_seconds())
192
+ if refresh_interval_delta is not None else None,
193
+ ).start()
194
+
195
+ res = FusionSQLResult()
196
+ res.add_field('ExportID', result.STRING)
197
+ res.set_rows([(out.export_id,)])
198
+
199
+ return res
200
+
201
+
202
+ class StartExport(SQLHandler):
108
203
  """
109
204
  START EXPORT
110
205
  from_table
@@ -150,7 +245,7 @@ class CreateExport(SQLHandler):
150
245
 
151
246
  Description
152
247
  -----------
153
- Create an export configuration.
248
+ Start an export.
154
249
 
155
250
  Arguments
156
251
  ---------
@@ -180,7 +275,6 @@ class CreateExport(SQLHandler):
180
275
  LINK S3 CONFIG '{
181
276
  "region": "us-east-1",
182
277
  "endpoint_url": "s3://bucket-name"
183
-
184
278
  }'
185
279
  ;
186
280
 
@@ -189,77 +283,129 @@ class CreateExport(SQLHandler):
189
283
  _enabled = False
190
284
 
191
285
  def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
192
- # From table
193
- if isinstance(params['from_table'], str):
194
- from_database = None
195
- from_table = params['from_table']
196
- else:
197
- from_database, from_table = params['from_table']
286
+ return _start_export(params)
198
287
 
199
- # Catalog
200
- catalog_config = json.loads(params['catalog'].get('catalog_config', '{}') or '{}')
201
- catalog_creds = json.loads(params['catalog'].get('catalog_creds', '{}') or '{}')
202
288
 
203
- # Storage
204
- storage_config = json.loads(params['storage'].get('link_config', '{}') or '{}')
205
- storage_creds = json.loads(params['storage'].get('link_creds', '{}') or '{}')
289
+ StartExport.register(overwrite=True)
206
290
 
207
- storage_config['provider'] = 'S3'
208
291
 
209
- wsg = get_workspace_group({})
292
+ class StartIncrementalExport(SQLHandler):
293
+ """
294
+ START INCREMENTAL EXPORT
295
+ from_table
296
+ catalog
297
+ storage
298
+ [ partition_by ]
299
+ [ order_by ]
300
+ [ properties ]
301
+ [ refresh_interval ]
302
+ ;
210
303
 
211
- if from_database is None:
212
- raise ValueError('database name must be specified for source table')
304
+ # From table
305
+ from_table = FROM <table>
213
306
 
214
- if wsg._manager is None:
215
- raise TypeError('no workspace manager is associated with workspace group')
307
+ # Transforms
308
+ _col_transform = { VOID | IDENTITY | YEAR | MONTH | DAY | HOUR } ( _transform_col )
309
+ _transform_col = <column>
310
+ _arg_transform = { BUCKET | TRUNCATE } ( _transform_col <comma> _transform_arg )
311
+ _transform_arg = <integer>
312
+ transform = { _col_transform | _arg_transform }
216
313
 
217
- partition_by = []
218
- if params['partition_by']:
219
- for key in params['partition_by']:
220
- transform = key['partition_key']['transform']['col_transform']
221
- part = {}
222
- part['transform'] = transform[0].lower()
223
- part['name'] = transform[-1]['transform_col']
224
- partition_by.append(part)
225
-
226
- order_by = []
227
- if params['order_by'] and params['order_by']['by']:
228
- for key in params['order_by']['by']:
229
- transform = key['transform']['col_transform']
230
- order = {}
231
- order['transform'] = transform[0].lower()
232
- order['name'] = transform[-1]['transform_col']
233
- order['direction'] = 'ascending'
234
- order['null_order'] = 'nulls_first'
235
- if key.get('direction'):
236
- if 'desc' in key['direction'].lower():
237
- order['direction'] = 'descending'
238
- if key.get('null_order'):
239
- if 'last' in key['null_order'].lower():
240
- order['null_order'] = 'nulls_last'
241
- order_by.append(order)
314
+ # Partitions
315
+ partition_by = PARTITION BY partition_key,...
316
+ partition_key = transform
242
317
 
243
- out = ExportService(
244
- wsg,
245
- from_database,
246
- from_table,
247
- dict(**catalog_config, **catalog_creds),
248
- dict(**storage_config, **storage_creds),
249
- columns=None,
250
- partition_by=partition_by or None,
251
- order_by=order_by or None,
252
- properties=json.loads(params['properties']) if params['properties'] else None,
253
- ).start()
318
+ # Sort order
319
+ order_by = ORDER BY sort_key,...
320
+ sort_key = transform [ direction ] [ null_order ]
321
+ direction = { ASC | DESC | ASCENDING | DESCENDING }
322
+ null_order = { NULLS_FIRST | NULLS_LAST }
254
323
 
255
- res = FusionSQLResult()
256
- res.add_field('ExportID', result.STRING)
257
- res.set_rows([(out.export_id,)])
324
+ # Properties
325
+ properties = PROPERTIES '<json>'
258
326
 
259
- return res
327
+ # Catolog
328
+ catalog = CATALOG [ _catalog_config ] [ _catalog_creds ]
329
+ _catalog_config = CONFIG '<catalog-config>'
330
+ _catalog_creds = CREDENTIALS '<catalog-creds>'
331
+
332
+ # Storage
333
+ storage = LINK [ _link_config ] [ _link_creds ]
334
+ _link_config = S3 CONFIG '<link-config>'
335
+ _link_creds = CREDENTIALS '<link-creds>'
336
+
337
+ # Refresh interval
338
+ refresh_interval = REFRESH INTERVAL _refresh_interval_value _refresh_interval_time_unit
339
+ _refresh_interval_value = <integer>
340
+ _refresh_interval_time_unit = { SECONDS | MINUTES | HOURS | DAYS }
341
+
342
+ Description
343
+ -----------
344
+ Start an incremental export.
345
+
346
+ Arguments
347
+ ---------
348
+ * ``<catalog-config>`` and ``<catalog-creds>``: The catalog configuration.
349
+ * ``<link-config>`` and ``<link-creds>``: The storage link configuration.
350
+
351
+ Remarks
352
+ -------
353
+ * ``FROM <table>`` specifies the SingleStore table to export. The same name will
354
+ be used for the exported table.
355
+ * ``CATALOG`` specifies the details of the catalog to connect to.
356
+ * ``LINK`` specifies the details of the data storage to connect to.
357
+ * ``REFRESH INTERVAL`` specifies the interval for refreshing the
358
+ incremental export. The default is 1 day.
359
+
360
+ Examples
361
+ --------
362
+ The following statement starts an export operation with the given
363
+ catalog and link configurations. The source table to export is
364
+ named "customer_data"::
260
365
 
366
+ START INCREMENTAL EXPORT FROM my_db.customer_data
367
+ CATALOG CONFIG '{
368
+ "catalog_type": "GLUE",
369
+ "table_format": "ICEBERG",
370
+ "catalog_id": "13983498723498",
371
+ "catalog_region": "us-east-1"
372
+ }'
373
+ LINK S3 CONFIG '{
374
+ "region": "us-east-1",
375
+ "endpoint_url": "s3://bucket-name"
376
+ }'
377
+ REFRESH INTERVAL 24 HOURS
378
+ ;
379
+
380
+ """ # noqa
381
+
382
+ _enabled = False
383
+
384
+ def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
385
+ params['incremental'] = True
386
+ return _start_export(params)
387
+
388
+
389
+ StartIncrementalExport.register(overwrite=True)
390
+
391
+
392
+ def _format_status(export_id: str, status: ExportStatus) -> Optional[FusionSQLResult]:
393
+ """Return the status of an export operation."""
394
+ info = status._info()
261
395
 
262
- CreateExport.register(overwrite=True)
396
+ res = FusionSQLResult()
397
+ res.add_field('ExportID', result.STRING)
398
+ res.add_field('Status', result.STRING)
399
+ res.add_field('Message', result.STRING)
400
+ res.set_rows([
401
+ (
402
+ export_id,
403
+ info.get('status', 'Unknown'),
404
+ info.get('statusMsg', ''),
405
+ ),
406
+ ])
407
+
408
+ return res
263
409
 
264
410
 
265
411
  class ShowExport(SQLHandler):
@@ -275,9 +421,29 @@ class ShowExport(SQLHandler):
275
421
 
276
422
  def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
277
423
  wsg = get_workspace_group({})
278
- out = ExportStatus(params['export_id'], wsg)
424
+ return _format_status(
425
+ params['export_id'], ExportStatus(params['export_id'], wsg),
426
+ )
427
+
428
+
429
+ ShowExport.register(overwrite=True)
430
+
431
+
432
+ class ShowExports(SQLHandler):
433
+ """
434
+ SHOW EXPORTS [ scope ];
435
+
436
+ # Location of the export
437
+ scope = FOR '<scope>'
438
+
439
+ """
440
+
441
+ _enabled = False
442
+
443
+ def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
444
+ wsg = get_workspace_group({})
279
445
 
280
- status = out._info()
446
+ exports = _get_exports(wsg, params.get('scope', 'all'))
281
447
 
282
448
  res = FusionSQLResult()
283
449
  res.add_field('ExportID', result.STRING)
@@ -285,13 +451,75 @@ class ShowExport(SQLHandler):
285
451
  res.add_field('Message', result.STRING)
286
452
  res.set_rows([
287
453
  (
288
- params['export_id'],
289
- status.get('status', 'Unknown'),
290
- status.get('statusMsg', ''),
291
- ),
454
+ info['egressID'],
455
+ info.get('status', 'Unknown'),
456
+ info.get('statusMsg', ''),
457
+ )
458
+ for info in [x._info() for x in exports]
292
459
  ])
293
460
 
294
461
  return res
295
462
 
296
463
 
297
- ShowExport.register(overwrite=True)
464
+ ShowExports.register(overwrite=True)
465
+
466
+
467
+ class SuspendExport(SQLHandler):
468
+ """
469
+ SUSPEND EXPORT export_id;
470
+
471
+ # ID of export
472
+ export_id = '<export-id>'
473
+
474
+ """
475
+
476
+ _enabled = False
477
+
478
+ def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
479
+ wsg = get_workspace_group({})
480
+ service = ExportService.from_export_id(wsg, params['export_id'])
481
+ return _format_status(params['export_id'], service.suspend())
482
+
483
+
484
+ SuspendExport.register(overwrite=True)
485
+
486
+
487
+ class ResumeExport(SQLHandler):
488
+ """
489
+ RESUME EXPORT export_id;
490
+
491
+ # ID of export
492
+ export_id = '<export-id>'
493
+
494
+ """
495
+
496
+ _enabled = False
497
+
498
+ def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
499
+ wsg = get_workspace_group({})
500
+ service = ExportService.from_export_id(wsg, params['export_id'])
501
+ return _format_status(params['export_id'], service.resume())
502
+
503
+
504
+ ResumeExport.register(overwrite=True)
505
+
506
+
507
+ class DropExport(SQLHandler):
508
+ """
509
+ DROP EXPORT export_id;
510
+
511
+ # ID of export
512
+ export_id = '<export-id>'
513
+
514
+ """
515
+
516
+ _enabled = False
517
+
518
+ def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
519
+ wsg = get_workspace_group({})
520
+ service = ExportService.from_export_id(wsg, params['export_id'])
521
+ service.drop()
522
+ return None
523
+
524
+
525
+ DropExport.register(overwrite=True)
@@ -569,8 +569,10 @@ class Cursor(connection.Cursor):
569
569
 
570
570
  if res.status_code >= 400:
571
571
  if res.text:
572
- if re.match(r'^Error\s+\d+:', res.text):
573
- code, msg = res.text.split(':', 1)
572
+ m = re.match(r'^Error\s+(\d+).*?:', res.text)
573
+ if m:
574
+ code = m.group(1)
575
+ msg = res.text.split(':', 1)[-1]
574
576
  icode = int(code.split()[-1])
575
577
  else:
576
578
  icode = res.status_code
@@ -27,6 +27,9 @@ class ExportService(object):
27
27
  partition_by: Optional[List[Dict[str, str]]]
28
28
  order_by: Optional[List[Dict[str, Dict[str, str]]]]
29
29
  properties: Optional[Dict[str, Any]]
30
+ incremental: bool
31
+ refresh_interval: Optional[int]
32
+ export_id: Optional[str]
30
33
 
31
34
  def __init__(
32
35
  self,
@@ -38,6 +41,8 @@ class ExportService(object):
38
41
  columns: Optional[List[str]] = None,
39
42
  partition_by: Optional[List[Dict[str, str]]] = None,
40
43
  order_by: Optional[List[Dict[str, Dict[str, str]]]] = None,
44
+ incremental: bool = False,
45
+ refresh_interval: Optional[int] = None,
41
46
  properties: Optional[Dict[str, Any]] = None,
42
47
  ):
43
48
  #: Workspace group
@@ -68,8 +73,30 @@ class ExportService(object):
68
73
  self.order_by = order_by or None
69
74
  self.properties = properties or None
70
75
 
76
+ self.incremental = incremental
77
+ self.refresh_interval = refresh_interval
78
+
79
+ self.export_id = None
80
+
71
81
  self._manager: Optional[WorkspaceManager] = workspace_group._manager
72
82
 
83
+ @classmethod
84
+ def from_export_id(
85
+ self,
86
+ workspace_group: WorkspaceGroup,
87
+ export_id: str,
88
+ ) -> ExportService:
89
+ """Create export service from export ID."""
90
+ out = ExportService(
91
+ workspace_group=workspace_group,
92
+ database='',
93
+ table='',
94
+ catalog_info={},
95
+ storage_info={},
96
+ )
97
+ out.export_id = export_id
98
+ return out
99
+
73
100
  def __str__(self) -> str:
74
101
  """Return string representation."""
75
102
  return vars_to_str(self)
@@ -98,6 +125,11 @@ class ExportService(object):
98
125
 
99
126
  def start(self, tags: Optional[List[str]] = None) -> 'ExportStatus':
100
127
  """Start the export process."""
128
+ if not self.table or not self.database:
129
+ raise ManagementError(
130
+ msg='Database and table must be set before starting the export.',
131
+ )
132
+
101
133
  if self._manager is None:
102
134
  raise ManagementError(
103
135
  msg='No workspace manager is associated with this object.',
@@ -122,11 +154,87 @@ class ExportService(object):
122
154
  partitionSpec=partition_spec,
123
155
  sortOrderSpec=sort_order_spec,
124
156
  properties=self.properties,
157
+ incremental=self.incremental or None,
158
+ refreshInterval=self.refresh_interval
159
+ if self.refresh_interval is not None else None,
125
160
  ).items() if v is not None
126
161
  },
127
162
  )
128
163
 
129
- return ExportStatus(out.json()['egressID'], self.workspace_group)
164
+ self.export_id = str(out.json()['egressID'])
165
+
166
+ return ExportStatus(self.export_id, self.workspace_group)
167
+
168
+ def suspend(self) -> 'ExportStatus':
169
+ """Suspend the export process."""
170
+ if self._manager is None:
171
+ raise ManagementError(
172
+ msg='No workspace manager is associated with this object.',
173
+ )
174
+
175
+ if self.export_id is None:
176
+ raise ManagementError(
177
+ msg='Export ID is not set. You must start the export first.',
178
+ )
179
+
180
+ self._manager._post(
181
+ f'workspaceGroups/{self.workspace_group.id}/egress/suspendTableEgress',
182
+ json=dict(egressID=self.export_id),
183
+ )
184
+
185
+ return ExportStatus(self.export_id, self.workspace_group)
186
+
187
+ def resume(self) -> 'ExportStatus':
188
+ """Resume the export process."""
189
+ if self._manager is None:
190
+ raise ManagementError(
191
+ msg='No workspace manager is associated with this object.',
192
+ )
193
+
194
+ if self.export_id is None:
195
+ raise ManagementError(
196
+ msg='Export ID is not set. You must start the export first.',
197
+ )
198
+
199
+ self._manager._post(
200
+ f'workspaceGroups/{self.workspace_group.id}/egress/resumeTableEgress',
201
+ json=dict(egressID=self.export_id),
202
+ )
203
+
204
+ return ExportStatus(self.export_id, self.workspace_group)
205
+
206
+ def drop(self) -> None:
207
+ """Drop the export process."""
208
+ if self._manager is None:
209
+ raise ManagementError(
210
+ msg='No workspace manager is associated with this object.',
211
+ )
212
+
213
+ if self.export_id is None:
214
+ raise ManagementError(
215
+ msg='Export ID is not set. You must start the export first.',
216
+ )
217
+
218
+ self._manager._delete(
219
+ f'workspaceGroups/{self.workspace_group.id}/egress/dropTableEgress',
220
+ json=dict(egressID=self.export_id),
221
+ )
222
+
223
+ return None
224
+
225
+ def status(self) -> ExportStatus:
226
+ """Get the status of the export process."""
227
+ if self._manager is None:
228
+ raise ManagementError(
229
+ msg='No workspace manager is associated with this object.',
230
+ )
231
+
232
+ if self.export_id is None:
233
+ raise ManagementError(
234
+ msg='Export ID is not set. You must start the export first.',
235
+ )
236
+
237
+ return ExportStatus(self.export_id, self.workspace_group)
130
238
 
131
239
 
132
240
  class ExportStatus(object):
@@ -167,3 +275,21 @@ class ExportStatus(object):
167
275
 
168
276
  def __repr__(self) -> str:
169
277
  return self.status
278
+
279
+
280
+ def _get_exports(
281
+ workspace_group: WorkspaceGroup,
282
+ scope: str = 'all',
283
+ ) -> List[ExportStatus]:
284
+ """Get all exports in the workspace group."""
285
+ if workspace_group._manager is None:
286
+ raise ManagementError(
287
+ msg='No workspace manager is associated with this object.',
288
+ )
289
+
290
+ out = workspace_group._manager._get(
291
+ f'workspaceGroups/{workspace_group.id}/egress/tableEgressStatus',
292
+ json=dict(scope=scope),
293
+ )
294
+
295
+ return out.json()
@@ -499,7 +499,10 @@ class TestJobsFusion(unittest.TestCase):
499
499
  @classmethod
500
500
  def tearDownClass(cls):
501
501
  for job_id in cls.job_ids:
502
- cls.manager.organizations.current.jobs.delete(job_id)
502
+ try:
503
+ cls.manager.organizations.current.jobs.delete(job_id)
504
+ except Exception:
505
+ pass
503
506
  if cls.workspace_group is not None:
504
507
  cls.workspace_group.terminate(force=True)
505
508
  cls.manager = None
@@ -397,13 +397,16 @@ class TestStage(unittest.TestCase):
397
397
  def test_upload_file(self):
398
398
  st = self.wg.stage
399
399
 
400
+ upload_test_sql = f'upload_test_{id(self)}.sql'
401
+ upload_test2_sql = f'upload_test2_{id(self)}.sql'
402
+
400
403
  root = st.info('/')
401
404
  assert str(root.path) == '/'
402
405
  assert root.type == 'directory'
403
406
 
404
407
  # Upload file
405
- f = st.upload_file(TEST_DIR / 'test.sql', 'upload_test.sql')
406
- assert str(f.path) == 'upload_test.sql'
408
+ f = st.upload_file(TEST_DIR / 'test.sql', upload_test_sql)
409
+ assert str(f.path) == upload_test_sql
407
410
  assert f.type == 'file'
408
411
 
409
412
  # Download and compare to original
@@ -412,15 +415,15 @@ class TestStage(unittest.TestCase):
412
415
 
413
416
  # Make sure we can't overwrite
414
417
  with self.assertRaises(OSError):
415
- st.upload_file(TEST_DIR / 'test.sql', 'upload_test.sql')
418
+ st.upload_file(TEST_DIR / 'test.sql', upload_test_sql)
416
419
 
417
420
  # Force overwrite with new content; use file object this time
418
421
  f = st.upload_file(
419
422
  open(TEST_DIR / 'test2.sql', 'r'),
420
- 'upload_test.sql',
423
+ upload_test_sql,
421
424
  overwrite=True,
422
425
  )
423
- assert str(f.path) == 'upload_test.sql'
426
+ assert str(f.path) == upload_test_sql
424
427
  assert f.type == 'file'
425
428
 
426
429
  # Verify new content
@@ -442,63 +445,67 @@ class TestStage(unittest.TestCase):
442
445
  # Write file into folder
443
446
  f = st.upload_file(
444
447
  TEST_DIR / 'test2.sql',
445
- os.path.join(lib.path, 'upload_test2.sql'),
448
+ os.path.join(lib.path, upload_test2_sql),
446
449
  )
447
- assert str(f.path) == 'lib/upload_test2.sql'
450
+ assert str(f.path) == 'lib/' + upload_test2_sql
448
451
  assert f.type == 'file'
449
452
 
450
453
  def test_open(self):
451
454
  st = self.wg.stage
452
455
 
456
+ open_test_sql = f'open_test_{id(self)}.sql'
457
+
453
458
  # See if error is raised for non-existent file
454
459
  with self.assertRaises(s2.ManagementError):
455
- st.open('open_test.sql', 'r')
460
+ st.open(open_test_sql, 'r')
456
461
 
457
462
  # Load test file
458
- st.upload_file(TEST_DIR / 'test.sql', 'open_test.sql')
463
+ st.upload_file(TEST_DIR / 'test.sql', open_test_sql)
459
464
 
460
465
  # Read file using `open`
461
- with st.open('open_test.sql', 'r') as rfile:
466
+ with st.open(open_test_sql, 'r') as rfile:
462
467
  assert rfile.read() == open(TEST_DIR / 'test.sql').read()
463
468
 
464
469
  # Read file using `open` with 'rt' mode
465
- with st.open('open_test.sql', 'rt') as rfile:
470
+ with st.open(open_test_sql, 'rt') as rfile:
466
471
  assert rfile.read() == open(TEST_DIR / 'test.sql').read()
467
472
 
468
473
  # Read file using `open` with 'rb' mode
469
- with st.open('open_test.sql', 'rb') as rfile:
474
+ with st.open(open_test_sql, 'rb') as rfile:
470
475
  assert rfile.read() == open(TEST_DIR / 'test.sql', 'rb').read()
471
476
 
472
477
  # Read file using `open` with 'rb' mode
473
478
  with self.assertRaises(ValueError):
474
- with st.open('open_test.sql', 'b') as rfile:
479
+ with st.open(open_test_sql, 'b') as rfile:
475
480
  pass
476
481
 
477
482
  # Attempt overwrite file using `open` with mode 'x'
478
483
  with self.assertRaises(OSError):
479
- with st.open('open_test.sql', 'x') as wfile:
484
+ with st.open(open_test_sql, 'x') as wfile:
480
485
  pass
481
486
 
482
487
  # Attempt overwrite file using `open` with mode 'w'
483
- with st.open('open_test.sql', 'w') as wfile:
488
+ with st.open(open_test_sql, 'w') as wfile:
484
489
  wfile.write(open(TEST_DIR / 'test2.sql').read())
485
490
 
486
- txt = st.download_file('open_test.sql', encoding='utf-8')
491
+ txt = st.download_file(open_test_sql, encoding='utf-8')
487
492
 
488
493
  assert txt == open(TEST_DIR / 'test2.sql').read()
489
494
 
495
+ open_raw_test_sql = f'open_raw_test_{id(self)}.sql'
496
+
490
497
  # Test writer without context manager
491
- wfile = st.open('open_raw_test.sql', 'w')
498
+ wfile = st.open(open_raw_test_sql, 'w')
492
499
  for line in open(TEST_DIR / 'test.sql'):
493
500
  wfile.write(line)
494
501
  wfile.close()
495
502
 
496
- txt = st.download_file('open_raw_test.sql', encoding='utf-8')
503
+ txt = st.download_file(open_raw_test_sql, encoding='utf-8')
497
504
 
498
505
  assert txt == open(TEST_DIR / 'test.sql').read()
499
506
 
500
507
  # Test reader without context manager
501
- rfile = st.open('open_raw_test.sql', 'r')
508
+ rfile = st.open(open_raw_test_sql, 'r')
502
509
  txt = ''
503
510
  for line in rfile:
504
511
  txt += line
@@ -509,15 +516,18 @@ class TestStage(unittest.TestCase):
509
516
  def test_obj_open(self):
510
517
  st = self.wg.stage
511
518
 
519
+ obj_open_test_sql = f'obj_open_test_{id(self)}.sql'
520
+ obj_open_dir = f'obj_open_dir_{id(self)}'
521
+
512
522
  # Load test file
513
- f = st.upload_file(TEST_DIR / 'test.sql', 'obj_open_test.sql')
523
+ f = st.upload_file(TEST_DIR / 'test.sql', obj_open_test_sql)
514
524
 
515
525
  # Read file using `open`
516
526
  with f.open() as rfile:
517
527
  assert rfile.read() == open(TEST_DIR / 'test.sql').read()
518
528
 
519
529
  # Make sure directories error out
520
- d = st.mkdir('obj_open_dir')
530
+ d = st.mkdir(obj_open_dir)
521
531
  with self.assertRaises(IsADirectoryError):
522
532
  d.open()
523
533
 
@@ -921,7 +931,10 @@ class TestJob(unittest.TestCase):
921
931
  @classmethod
922
932
  def tearDownClass(cls):
923
933
  for job_id in cls.job_ids:
924
- cls.manager.organizations.current.jobs.delete(job_id)
934
+ try:
935
+ cls.manager.organizations.current.jobs.delete(job_id)
936
+ except Exception:
937
+ pass
925
938
  if cls.workspace_group is not None:
926
939
  cls.workspace_group.terminate(force=True)
927
940
  cls.workspace_group = None
@@ -1054,6 +1067,8 @@ class TestFileSpaces(unittest.TestCase):
1054
1067
  cls.shared_space = None
1055
1068
 
1056
1069
  def test_upload_file(self):
1070
+ upload_test_ipynb = f'upload_test_{id(self)}.ipynb'
1071
+
1057
1072
  for space in [self.personal_space, self.shared_space]:
1058
1073
  root = space.info('/')
1059
1074
  assert str(root.path) == '/'
@@ -1062,9 +1077,9 @@ class TestFileSpaces(unittest.TestCase):
1062
1077
  # Upload files
1063
1078
  f = space.upload_file(
1064
1079
  TEST_DIR / 'test.ipynb',
1065
- 'upload_test.ipynb',
1080
+ upload_test_ipynb,
1066
1081
  )
1067
- assert str(f.path) == 'upload_test.ipynb'
1082
+ assert str(f.path) == upload_test_ipynb
1068
1083
  assert f.type == 'notebook'
1069
1084
 
1070
1085
  # Download and compare to original
@@ -1075,15 +1090,15 @@ class TestFileSpaces(unittest.TestCase):
1075
1090
  with self.assertRaises(OSError):
1076
1091
  space.upload_file(
1077
1092
  TEST_DIR / 'test.ipynb',
1078
- 'upload_test.ipynb',
1093
+ upload_test_ipynb,
1079
1094
  )
1080
1095
 
1081
1096
  # Force overwrite with new content
1082
1097
  f = space.upload_file(
1083
1098
  TEST_DIR / 'test2.ipynb',
1084
- 'upload_test.ipynb', overwrite=True,
1099
+ upload_test_ipynb, overwrite=True,
1085
1100
  )
1086
- assert str(f.path) == 'upload_test.ipynb'
1101
+ assert str(f.path) == upload_test_ipynb
1087
1102
  assert f.type == 'notebook'
1088
1103
 
1089
1104
  # Verify new content
@@ -1095,9 +1110,11 @@ class TestFileSpaces(unittest.TestCase):
1095
1110
  space.upload_folder(TEST_DIR, 'test')
1096
1111
 
1097
1112
  # Cleanup
1098
- space.remove('upload_test.ipynb')
1113
+ space.remove(upload_test_ipynb)
1099
1114
 
1100
1115
  def test_upload_file_io(self):
1116
+ upload_test_ipynb = f'upload_test_{id(self)}.ipynb'
1117
+
1101
1118
  for space in [self.personal_space, self.shared_space]:
1102
1119
  root = space.info('/')
1103
1120
  assert str(root.path) == '/'
@@ -1106,9 +1123,9 @@ class TestFileSpaces(unittest.TestCase):
1106
1123
  # Upload files
1107
1124
  f = space.upload_file(
1108
1125
  open(TEST_DIR / 'test.ipynb', 'r'),
1109
- 'upload_test.ipynb',
1126
+ upload_test_ipynb,
1110
1127
  )
1111
- assert str(f.path) == 'upload_test.ipynb'
1128
+ assert str(f.path) == upload_test_ipynb
1112
1129
  assert f.type == 'notebook'
1113
1130
 
1114
1131
  # Download and compare to original
@@ -1119,15 +1136,15 @@ class TestFileSpaces(unittest.TestCase):
1119
1136
  with self.assertRaises(OSError):
1120
1137
  space.upload_file(
1121
1138
  open(TEST_DIR / 'test.ipynb', 'r'),
1122
- 'upload_test.ipynb',
1139
+ upload_test_ipynb,
1123
1140
  )
1124
1141
 
1125
1142
  # Force overwrite with new content
1126
1143
  f = space.upload_file(
1127
1144
  open(TEST_DIR / 'test2.ipynb', 'r'),
1128
- 'upload_test.ipynb', overwrite=True,
1145
+ upload_test_ipynb, overwrite=True,
1129
1146
  )
1130
- assert str(f.path) == 'upload_test.ipynb'
1147
+ assert str(f.path) == upload_test_ipynb
1131
1148
  assert f.type == 'notebook'
1132
1149
 
1133
1150
  # Verify new content
@@ -1139,62 +1156,66 @@ class TestFileSpaces(unittest.TestCase):
1139
1156
  space.upload_folder(TEST_DIR, 'test')
1140
1157
 
1141
1158
  # Cleanup
1142
- space.remove('upload_test.ipynb')
1159
+ space.remove(upload_test_ipynb)
1143
1160
 
1144
1161
  def test_open(self):
1145
1162
  for space in [self.personal_space, self.shared_space]:
1163
+ open_test_ipynb = f'open_test_ipynb_{id(self)}.ipynb'
1164
+
1146
1165
  # See if error is raised for non-existent file
1147
1166
  with self.assertRaises(s2.ManagementError):
1148
- space.open('open_test.ipynb', 'r')
1167
+ space.open(open_test_ipynb, 'r')
1149
1168
 
1150
1169
  # Load test file
1151
- space.upload_file(TEST_DIR / 'test.ipynb', 'open_test.ipynb')
1170
+ space.upload_file(TEST_DIR / 'test.ipynb', open_test_ipynb)
1152
1171
 
1153
1172
  # Read file using `open`
1154
- with space.open('open_test.ipynb', 'r') as rfile:
1173
+ with space.open(open_test_ipynb, 'r') as rfile:
1155
1174
  assert rfile.read() == open(TEST_DIR / 'test.ipynb').read()
1156
1175
 
1157
1176
  # Read file using `open` with 'rt' mode
1158
- with space.open('open_test.ipynb', 'rt') as rfile:
1177
+ with space.open(open_test_ipynb, 'rt') as rfile:
1159
1178
  assert rfile.read() == open(TEST_DIR / 'test.ipynb').read()
1160
1179
 
1161
1180
  # Read file using `open` with 'rb' mode
1162
- with space.open('open_test.ipynb', 'rb') as rfile:
1181
+ with space.open(open_test_ipynb, 'rb') as rfile:
1163
1182
  assert rfile.read() == open(TEST_DIR / 'test.ipynb', 'rb').read()
1164
1183
 
1165
1184
  # Read file using `open` with 'rb' mode
1166
1185
  with self.assertRaises(ValueError):
1167
- with space.open('open_test.ipynb', 'b') as rfile:
1186
+ with space.open(open_test_ipynb, 'b') as rfile:
1168
1187
  pass
1169
1188
 
1170
1189
  # Attempt overwrite file using `open` with mode 'x'
1171
1190
  with self.assertRaises(OSError):
1172
- with space.open('open_test.ipynb', 'x') as wfile:
1191
+ with space.open(open_test_ipynb, 'x') as wfile:
1173
1192
  pass
1174
1193
 
1175
1194
  # Attempt overwrite file using `open` with mode 'w'
1176
- with space.open('open_test.ipynb', 'w') as wfile:
1195
+ with space.open(open_test_ipynb, 'w') as wfile:
1177
1196
  wfile.write(open(TEST_DIR / 'test2.ipynb').read())
1178
1197
 
1179
- txt = space.download_file('open_test.ipynb', encoding='utf-8')
1198
+ txt = space.download_file(open_test_ipynb, encoding='utf-8')
1180
1199
 
1181
1200
  assert txt == open(TEST_DIR / 'test2.ipynb').read()
1182
1201
 
1202
+ open_raw_test_ipynb = f'open_raw_test_{id(self)}.ipynb'
1203
+
1183
1204
  # Test writer without context manager
1184
- wfile = space.open('open_raw_test.ipynb', 'w')
1205
+ wfile = space.open(open_raw_test_ipynb, 'w')
1185
1206
  for line in open(TEST_DIR / 'test.ipynb'):
1186
1207
  wfile.write(line)
1187
1208
  wfile.close()
1188
1209
 
1189
1210
  txt = space.download_file(
1190
- 'open_raw_test.ipynb',
1211
+ open_raw_test_ipynb,
1191
1212
  encoding='utf-8',
1192
1213
  )
1193
1214
 
1194
1215
  assert txt == open(TEST_DIR / 'test.ipynb').read()
1195
1216
 
1196
1217
  # Test reader without context manager
1197
- rfile = space.open('open_raw_test.ipynb', 'r')
1218
+ rfile = space.open(open_raw_test_ipynb, 'r')
1198
1219
  txt = ''
1199
1220
  for line in rfile:
1200
1221
  txt += line
@@ -1203,15 +1224,18 @@ class TestFileSpaces(unittest.TestCase):
1203
1224
  assert txt == open(TEST_DIR / 'test.ipynb').read()
1204
1225
 
1205
1226
  # Cleanup
1206
- space.remove('open_test.ipynb')
1207
- space.remove('open_raw_test.ipynb')
1227
+ space.remove(open_test_ipynb)
1228
+ space.remove(open_raw_test_ipynb)
1208
1229
 
1209
1230
  def test_obj_open(self):
1210
1231
  for space in [self.personal_space, self.shared_space]:
1232
+ obj_open_test_ipynb = f'obj_open_test_{id(self)}.ipynb'
1233
+ obj_open_dir = f'obj_open_dir_{id(self)}'
1234
+
1211
1235
  # Load test file
1212
1236
  f = space.upload_file(
1213
1237
  TEST_DIR / 'test.ipynb',
1214
- 'obj_open_test.ipynb',
1238
+ obj_open_test_ipynb,
1215
1239
  )
1216
1240
 
1217
1241
  # Read file using `open`
@@ -1220,7 +1244,7 @@ class TestFileSpaces(unittest.TestCase):
1220
1244
 
1221
1245
  # Make sure directories error out
1222
1246
  with self.assertRaises(s2.ManagementError):
1223
- space.mkdir('obj_open_dir')
1247
+ space.mkdir(obj_open_dir)
1224
1248
 
1225
1249
  # Write file using `open`
1226
1250
  with f.open('w', encoding='utf-8') as wfile:
@@ -1248,7 +1272,7 @@ class TestFileSpaces(unittest.TestCase):
1248
1272
  assert txt == open(TEST_DIR / 'test.ipynb').read()
1249
1273
 
1250
1274
  # Cleanup
1251
- space.remove('obj_open_test.ipynb')
1275
+ space.remove(obj_open_test_ipynb)
1252
1276
 
1253
1277
  def test_os_directories(self):
1254
1278
  for space in [self.personal_space, self.shared_space]:
singlestoredb/types.py CHANGED
@@ -173,17 +173,25 @@ class ColumnType(object):
173
173
  'DECIMAL', 'DEC', 'FIXED', 'NUMERIC', 0, decimal.Decimal,
174
174
  )
175
175
  DEC = FIXED = NUMERIC = DECIMAL
176
- TINY = TINYINT = BOOL = BOOLEAN = NumberDBAPIType(
177
- 'TINY', 'TINYINT', 'BOOL', 'BOOLEAN', 1,
176
+ TINY = TINYINT = BOOL = BOOLEAN = UNSIGNED_TINY = UNSIGNED_TINYINT = NumberDBAPIType(
177
+ 'TINY', 'TINYINT', 'BOOL', 'BOOLEAN', 'UNSIGNED TINY', 'UNSIGNED TINYINT', 1,
178
+ )
179
+ SHORT = SMALLINT = UNSIGNED_SHORT = UNSIGNED_SMALLINT = NumberDBAPIType(
180
+ 'SMALLINT', 'SHORT', 'UNSIGNED SHORT', 'UNSIGNED SMALLINT', 2,
181
+ )
182
+ LONG = INT = UNSIGNED_LONG = UNSIGNED_INT = NumberDBAPIType(
183
+ 'LONG', 'INT', 'UNSIGNED LONG', 'UNSIGNED INT', 3,
178
184
  )
179
- SHORT = SMALLINT = NumberDBAPIType('SMALLINT', 'SHORT', 2)
180
- LONG = INT = NumberDBAPIType('LONG', 'INT', 3)
181
185
  FLOAT = NumberDBAPIType('FLOAT', 4)
182
186
  DOUBLE = REAL = NumberDBAPIType('DOUBLE', 5, float)
183
187
  NULL = DBAPIType('NULL', 6)
184
188
  TIMESTAMP = DatetimeDBAPIType('TIMESTAMP', 7)
185
- LONGLONG = BIGINT = NumberDBAPIType('BIGINT', 'LONGLONG', 8, int)
186
- MEDIUMINT = INT24 = NumberDBAPIType('MEDIUMINT', 'INT24', 9)
189
+ LONGLONG = BIGINT = UNSIGNED_LONGLONG = UNSIGNED_BIGINT = NumberDBAPIType(
190
+ 'BIGINT', 'LONGLONG', 'UNSIGNED LONGLONG', 'UNSIGNED BIGINT', 8, int,
191
+ )
192
+ MEDIUMINT = INT24 = UNSIGNED_MEDIUMINT = UNSIGNED_INT24 = NumberDBAPIType(
193
+ 'MEDIUMINT', 'INT24', 'UNSIGNED MEDIUMINT', 'UNSIGNED INT24', 9,
194
+ )
187
195
  DATE = DBAPIType('DATE', 10, datetime.date)
188
196
  TIME = DBAPIType('TIME', 11, datetime.time)
189
197
  DATETIME = DatetimeDBAPIType('DATETIME', 12, datetime.datetime)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: singlestoredb
3
- Version: 1.14.0
3
+ Version: 1.14.2
4
4
  Summary: Interface to the SingleStoreDB database and workspace management APIs
5
5
  Home-page: https://github.com/singlestore-labs/singlestoredb-python
6
6
  Author: SingleStore
@@ -19,7 +19,6 @@ Requires-Dist: build
19
19
  Requires-Dist: parsimonious
20
20
  Requires-Dist: requests
21
21
  Requires-Dist: setuptools
22
- Requires-Dist: singlestore-vectorstore >=0.1.2
23
22
  Requires-Dist: sqlparams
24
23
  Requires-Dist: wheel
25
24
  Requires-Dist: tomli >=1.1.0 ; python_version < "3.11"
@@ -44,6 +43,8 @@ Provides-Extra: rsa
44
43
  Requires-Dist: cryptography ; extra == 'rsa'
45
44
  Provides-Extra: sqlalchemy
46
45
  Requires-Dist: sqlalchemy-singlestoredb >=1.0.0 ; extra == 'sqlalchemy'
46
+ Provides-Extra: vectorstore
47
+ Requires-Dist: singlestore-vectorstore >=0.1.2 ; extra == 'vectorstore'
47
48
 
48
49
  # <img src="https://github.com/singlestore-labs/singlestoredb-python/blob/main/resources/singlestore-logo.png" height="60" valign="middle"/> SingleStoreDB Python SDK
49
50
 
@@ -1,24 +1,24 @@
1
- _singlestoredb_accel.abi3.so,sha256=Xxm-lpuGz75wOE4dR6ZzwdAsUFVZd0D5uBf1-2QqJ_o,207216
1
+ _singlestoredb_accel.abi3.so,sha256=KFB7Tj4Gf86RMqkWtxyU_QxE8if2oQkBMkTyk304zmk,207216
2
+ singlestoredb-1.14.2.dist-info/RECORD,,
3
+ singlestoredb-1.14.2.dist-info/LICENSE,sha256=Mlq78idURT-9G026aMYswwwnnrLcgzTLuXeAs5hjDLM,11341
4
+ singlestoredb-1.14.2.dist-info/WHEEL,sha256=_VEguvlLpUd-c8RbFMA4yMIVNMBv2LhpxYLCEQ-Bogk,113
5
+ singlestoredb-1.14.2.dist-info/entry_points.txt,sha256=bSLaTWB5zGjpVYPAaI46MkkDup0su-eb3uAhCNYuRV0,48
6
+ singlestoredb-1.14.2.dist-info/top_level.txt,sha256=lA65Vf4qAMfg_s1oG3LEO90h4t1Z-SPDbRqkevI3bSY,40
7
+ singlestoredb-1.14.2.dist-info/METADATA,sha256=oSEkI_zMB3TgGwyd6a3fR6OGXdvPZFr0U5zVXoOmq7E,5804
2
8
  sqlx/magic.py,sha256=JsS9_9aBFaOt91Torm1JPN0c8qB2QmYJmNSKtbSQIY0,3509
3
9
  sqlx/__init__.py,sha256=aBYiU8DZXCogvWu3yWafOz7bZS5WWwLZXj7oL0dXGyU,85
4
- singlestoredb-1.14.0.dist-info/RECORD,,
5
- singlestoredb-1.14.0.dist-info/LICENSE,sha256=Mlq78idURT-9G026aMYswwwnnrLcgzTLuXeAs5hjDLM,11341
6
- singlestoredb-1.14.0.dist-info/WHEEL,sha256=_VEguvlLpUd-c8RbFMA4yMIVNMBv2LhpxYLCEQ-Bogk,113
7
- singlestoredb-1.14.0.dist-info/entry_points.txt,sha256=bSLaTWB5zGjpVYPAaI46MkkDup0su-eb3uAhCNYuRV0,48
8
- singlestoredb-1.14.0.dist-info/top_level.txt,sha256=lA65Vf4qAMfg_s1oG3LEO90h4t1Z-SPDbRqkevI3bSY,40
9
- singlestoredb-1.14.0.dist-info/METADATA,sha256=VuCeWXgdykhi4lW_WxMRT3qUsLRZAR3GmLLLhBs5YAw,5751
10
10
  singlestoredb/auth.py,sha256=u8D9tpKzrqa4ssaHjyZnGDX1q8XBpGtuoOkTkSv7B28,7599
11
11
  singlestoredb/config.py,sha256=dayUWwSy2YdgmhF8tzH-7FwFpwon5bgX_VeX-Yu5ia4,12969
12
12
  singlestoredb/vectorstore.py,sha256=BZb8e7m02_XVHqOyu8tA94R6kHb3n-BC8F08JyJwDzY,8408
13
- singlestoredb/__init__.py,sha256=ztn02QsJ2xuKyXGfbfR6nBSJPkhLlSRWcOT2X3myhrU,2091
14
- singlestoredb/types.py,sha256=FIqO1A7e0Gkk7ITmIysBy-P5S--ItbMSlYvblzqGS30,9969
13
+ singlestoredb/__init__.py,sha256=LXawJyI2-AilQxUSQ8IJnDUiwZk5nrB-FGGpHWcToGI,2256
14
+ singlestoredb/types.py,sha256=Qp_PWYjSYG6PRnmXAZZ7K2QehUqfoG4KSllI3O1stPE,10397
15
15
  singlestoredb/connection.py,sha256=ELk3-UpM6RaB993aIt08MydKiiDnejHQ1s8EFiacrAI,46055
16
16
  singlestoredb/pytest.py,sha256=OyF3BO9mgxenifYhOihnzGk8WzCJ_zN5_mxe8XyFPOc,9074
17
17
  singlestoredb/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
18
  singlestoredb/exceptions.py,sha256=HuoA6sMRL5qiCiee-_5ddTGmFbYC9Euk8TYUsh5GvTw,3234
19
19
  singlestoredb/converters.py,sha256=Ui-AqdW3pRAQ8A_YcK9EqVYyM4Pt1_Q-tjlotbpK6Cw,20686
20
20
  singlestoredb/fusion/graphql.py,sha256=ZA3HcDq5rER-dCEavwTqnF7KM0D2LCYIY7nLQk7lSso,5207
21
- singlestoredb/fusion/handler.py,sha256=HEW83De1zj94hvG7rbqlOszIIgBKiag0UGO5I0WoJ6A,27400
21
+ singlestoredb/fusion/handler.py,sha256=M5iyNP4zOaGqUqnZg_b5xhRE-8tHgfZSHDH0zKTiJmE,27692
22
22
  singlestoredb/fusion/registry.py,sha256=jjdRTYZ3ylhy6gAoW5xBj0tkxGFBT-2yLQ0tztTgDIY,6112
23
23
  singlestoredb/fusion/__init__.py,sha256=Qo7SuqGw-l-vE8-EI2jhm6hXJkYfOLUKIws9c7LFNX0,356
24
24
  singlestoredb/fusion/result.py,sha256=p5I65C-Dhhl1yeZwetXXZabwritr8Ph2mFvJJ3ovcBM,11790
@@ -26,7 +26,7 @@ singlestoredb/fusion/handlers/files.py,sha256=McoRacihcQn0-qILujBi0HCNyuFcrBoIUG
26
26
  singlestoredb/fusion/handlers/models.py,sha256=xJPIG0_GgF-VrmPoIsU2U4AsS7ytDz8JMRaqchglAR0,6236
27
27
  singlestoredb/fusion/handlers/job.py,sha256=r0KdOD55VUDw-SymC__5Mn-fzJTZE_xcBgH-O8DYVHc,21095
28
28
  singlestoredb/fusion/handlers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
- singlestoredb/fusion/handlers/export.py,sha256=3moTJeqsHkDDpitUUAE6x95JYH2rmb28MALbO4x0dcc,8981
29
+ singlestoredb/fusion/handlers/export.py,sha256=Af4eIMPGKEOpmf4LXnvQsgmnvx5F8B5FkRI20RvEa7o,15309
30
30
  singlestoredb/fusion/handlers/utils.py,sha256=ozHOWUraoN8XGTK9JZdhv5HV8AQR8zfUd1yh1kLvUXY,10685
31
31
  singlestoredb/fusion/handlers/stage.py,sha256=kYVjbPys83kf3jX6jWwN8Ju0oEocKVZ3TIOt2HiC5Ew,14287
32
32
  singlestoredb/fusion/handlers/workspace.py,sha256=4xN2TFO4yF7KZB2Fcht7IuvoDdAT6fDfDLjixiHZN8w,27506
@@ -34,7 +34,7 @@ singlestoredb/tests/test.sql,sha256=dfMehVCQ9wObSVTQKyQi-fRFDZeqRxV4Cj8doBCPEFM,
34
34
  singlestoredb/tests/test_xdict.py,sha256=fqHspoi39nbX3fIDVkkRXcd5H50xdOsSvK0bxAMQnaE,10408
35
35
  singlestoredb/tests/test_results.py,sha256=wg93sujwt-R9_eJCgSCElgAZhLDkIiAo3qPkPydOv78,6582
36
36
  singlestoredb/tests/test_vectorstore.py,sha256=anHfp5gQrQy8Iw3Ub4mxFEkaZWahs566OXuKqjpkozM,1554
37
- singlestoredb/tests/test_fusion.py,sha256=EH1mRwdX2Fajsq6x2l0gBhH1YhcxtvDGIKC9HJ4sDbQ,50521
37
+ singlestoredb/tests/test_fusion.py,sha256=7YQ_nOQoV_7yD4OEpJz2Ov-zok-cBFK9IOJ3FgZ0xo0,50593
38
38
  singlestoredb/tests/test_plugin.py,sha256=qpO9wmWc62VaijN1sJ97YSYIX7I7Y5C6sY-WzwrutDQ,812
39
39
  singlestoredb/tests/test_basics.py,sha256=Dw1irrtf3gWN7tqGruSH6uhWi5zkmCpJl6ZMQxMqlf4,48446
40
40
  singlestoredb/tests/test_ext_func.py,sha256=s1k1cBxQ7vIS1zSrKGkKTgLZE1DT_Rqj-3VNSCSv68I,43261
@@ -43,7 +43,7 @@ singlestoredb/tests/test2.ipynb,sha256=yd1PE1VK-DwiRd6mYS4_0cPBtuVkvcDtycvTwD-Yn
43
43
  singlestoredb/tests/test_ext_func_data.py,sha256=yTADD93nPxX6_rZXXLZaOWEI_yPvYyir9psn5PK9ctU,47695
44
44
  singlestoredb/tests/test_exceptions.py,sha256=tfr_8X2w1UmG4nkSBzWGB0C7ehrf1GAVgj6_ODaG-TM,1131
45
45
  singlestoredb/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
- singlestoredb/tests/test_management.py,sha256=6evsyQWA-lOKMehJi8xvjp0udm85EHBHuZDHwQEzxPg,45149
46
+ singlestoredb/tests/test_management.py,sha256=rzM5xZllXys6OzyFBxOieYSn4kDGCwWuyIkJWsPXvAY,45847
47
47
  singlestoredb/tests/test_udf.py,sha256=Kb7-oJpnN6MTT3aE5V5dry_r5ze0EwaAIJeh_zR3l0I,28844
48
48
  singlestoredb/tests/test_http.py,sha256=RXasTqBWRn__omj0eLFTJYIbZjd0PPdIV2d4Cqz0MC8,8580
49
49
  singlestoredb/tests/utils.py,sha256=2A2tEdD3t8aXWUnHtAIcFlWrflsz2MlMcCbUDaAG29c,4995
@@ -64,7 +64,7 @@ singlestoredb/management/organization.py,sha256=_JvW0Znu5emR5uYGVEcZvakQqftNb_vR
64
64
  singlestoredb/management/job.py,sha256=4-xLWzbE8odQogVVaFer80UEoTAZY1T28VZ9Ug4rbmM,24611
65
65
  singlestoredb/management/region.py,sha256=HnLcWUh7r_aLECliplCDHak4a_F3B7LOSXEYMW66qD0,1611
66
66
  singlestoredb/management/__init__.py,sha256=ofNTPCdkZ1dS_aX2aUujd8aMHQi8Lle5Ced0aaO3RH4,269
67
- singlestoredb/management/export.py,sha256=jJCe25ecH_LzKSDc7vS1-5DQaWFrZipeawLPpArByJE,5108
67
+ singlestoredb/management/export.py,sha256=yR-yZUE9USFrP5OR_5iLFqEc8GLiKDQypSEp08CmT5k,9083
68
68
  singlestoredb/management/utils.py,sha256=QIhZCZSRaDbAG35xu1_n7ihmRXON8swc-gEK2FGYutI,13203
69
69
  singlestoredb/management/cluster.py,sha256=h75grXSxq4Anr4RxwKxcZW4TkWJ4bFg_ql5iRWCNLdQ,14405
70
70
  singlestoredb/management/inference_api.py,sha256=L6eFqaUaPugF_cmrZ4xlArj8CIv25vWqQs1vwgKPEF4,2583
@@ -84,7 +84,7 @@ singlestoredb/utils/xdict.py,sha256=S9HKgrPrnu_6b7iOwa2KrW8CmU1Uqx0BWdEyogFzWbE,
84
84
  singlestoredb/utils/debug.py,sha256=0JiLA37u_9CKiDGiN9BK_PtFMUku3vIcNjERWaTNRSU,349
85
85
  singlestoredb/utils/mogrify.py,sha256=-a56IF70U6CkfadeaZgfjRSVsAD3PuqRrzPpjZlgbwY,4050
86
86
  singlestoredb/http/__init__.py,sha256=A_2ZUCCpvRYIA6YDpPy57wL5R1eZ5SfP6I1To5nfJ2s,912
87
- singlestoredb/http/connection.py,sha256=EgE2m_nxisGPo6YV3AJd-RRafdT0f70HRbIo1ONQ668,39893
87
+ singlestoredb/http/connection.py,sha256=X5GEPPOE-rMm17d0-TPhcdxUHibcYl-MZAnPhut8xyo,39956
88
88
  singlestoredb/ai/__init__.py,sha256=-uNcq-bY-AiWhZ5Plq2ZXtfIVL4PaifMJsJf58rdN8I,114
89
89
  singlestoredb/ai/chat.py,sha256=8OSBZJ3J2zOlVXzJ_sHSAAyu5E6sy7jqqiNeFhtmjOI,802
90
90
  singlestoredb/ai/embeddings.py,sha256=X3g0sJNDVOzXzZwoXz3M3ch-IERQXNkHxuH4cj125I8,815