singlestoredb 1.14.0__cp38-abi3-win_amd64.whl → 1.14.2__cp38-abi3-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of singlestoredb might be problematic. Click here for more details.
- _singlestoredb_accel.pyd +0 -0
- singlestoredb/__init__.py +13 -9
- singlestoredb/fusion/handler.py +17 -4
- singlestoredb/fusion/handlers/export.py +297 -69
- singlestoredb/http/connection.py +4 -2
- singlestoredb/management/export.py +127 -1
- singlestoredb/tests/test_fusion.py +4 -1
- singlestoredb/tests/test_management.py +75 -51
- singlestoredb/types.py +14 -6
- {singlestoredb-1.14.0.dist-info → singlestoredb-1.14.2.dist-info}/METADATA +3 -2
- {singlestoredb-1.14.0.dist-info → singlestoredb-1.14.2.dist-info}/RECORD +15 -15
- {singlestoredb-1.14.0.dist-info → singlestoredb-1.14.2.dist-info}/LICENSE +0 -0
- {singlestoredb-1.14.0.dist-info → singlestoredb-1.14.2.dist-info}/WHEEL +0 -0
- {singlestoredb-1.14.0.dist-info → singlestoredb-1.14.2.dist-info}/entry_points.txt +0 -0
- {singlestoredb-1.14.0.dist-info → singlestoredb-1.14.2.dist-info}/top_level.txt +0 -0
_singlestoredb_accel.pyd
CHANGED
|
Binary file
|
singlestoredb/__init__.py
CHANGED
|
@@ -13,7 +13,7 @@ Examples
|
|
|
13
13
|
|
|
14
14
|
"""
|
|
15
15
|
|
|
16
|
-
__version__ = '1.14.
|
|
16
|
+
__version__ = '1.14.2'
|
|
17
17
|
|
|
18
18
|
from typing import Any
|
|
19
19
|
|
|
@@ -31,14 +31,18 @@ from .types import (
|
|
|
31
31
|
Date, Time, Timestamp, DateFromTicks, TimeFromTicks, TimestampFromTicks,
|
|
32
32
|
Binary, STRING, BINARY, NUMBER, DATETIME, ROWID,
|
|
33
33
|
)
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
34
|
+
# These are only loaded if the singlestore-vectorstore package is available
|
|
35
|
+
try:
|
|
36
|
+
from .vectorstore import (
|
|
37
|
+
vector_db, IndexInterface, IndexList, IndexModel, MatchTypedDict,
|
|
38
|
+
Metric, IndexStatsTypedDict, NamespaceStatsTypedDict, Vector,
|
|
39
|
+
VectorDictMetadataValue, VectorMetadataTypedDict, VectorTuple,
|
|
40
|
+
VectorTupleWithMetadata, DeletionProtection, AndFilter, EqFilter,
|
|
41
|
+
ExactMatchFilter, FilterTypedDict, GteFilter, GtFilter, InFilter,
|
|
42
|
+
LteFilter, LtFilter, NeFilter, NinFilter, OrFilter, SimpleFilter,
|
|
43
|
+
)
|
|
44
|
+
except (ImportError, ModuleNotFoundError):
|
|
45
|
+
pass
|
|
42
46
|
|
|
43
47
|
|
|
44
48
|
#
|
singlestoredb/fusion/handler.py
CHANGED
|
@@ -33,7 +33,7 @@ CORE_GRAMMAR = r'''
|
|
|
33
33
|
close_paren = ws* ")" ws*
|
|
34
34
|
open_repeats = ws* ~r"[\(\[\{]" ws*
|
|
35
35
|
close_repeats = ws* ~r"[\)\]\}]" ws*
|
|
36
|
-
|
|
36
|
+
statement = ~r"[\s\S]*" ws*
|
|
37
37
|
table = ~r"(?:([A-Za-z0-9_\-]+)|`([^\`]+)`)(?:\.(?:([A-Za-z0-9_\-]+)|`([^\`]+)`))?" ws*
|
|
38
38
|
column = ~r"(?:([A-Za-z0-9_\-]+)|`([^\`]+)`)(?:\.(?:([A-Za-z0-9_\-]+)|`([^\`]+)`))?" ws*
|
|
39
39
|
link_name = ~r"(?:([A-Za-z0-9_\-]+)|`([^\`]+)`)(?:\.(?:([A-Za-z0-9_\-]+)|`([^\`]+)`))?" ws*
|
|
@@ -77,6 +77,7 @@ BUILTINS = {
|
|
|
77
77
|
'<file-type>': r'''
|
|
78
78
|
file_type = { FILE | FOLDER }
|
|
79
79
|
''',
|
|
80
|
+
'<statement>': '',
|
|
80
81
|
}
|
|
81
82
|
|
|
82
83
|
BUILTIN_DEFAULTS = { # type: ignore
|
|
@@ -627,6 +628,18 @@ class SQLHandler(NodeVisitor):
|
|
|
627
628
|
cls.compile()
|
|
628
629
|
registry.register_handler(cls, overwrite=overwrite)
|
|
629
630
|
|
|
631
|
+
def create_result(self) -> result.FusionSQLResult:
|
|
632
|
+
"""
|
|
633
|
+
Create a new result object.
|
|
634
|
+
|
|
635
|
+
Returns
|
|
636
|
+
-------
|
|
637
|
+
FusionSQLResult
|
|
638
|
+
A new result object for this handler
|
|
639
|
+
|
|
640
|
+
"""
|
|
641
|
+
return result.FusionSQLResult()
|
|
642
|
+
|
|
630
643
|
def execute(self, sql: str) -> result.FusionSQLResult:
|
|
631
644
|
"""
|
|
632
645
|
Parse the SQL and invoke the handler method.
|
|
@@ -746,9 +759,9 @@ class SQLHandler(NodeVisitor):
|
|
|
746
759
|
_, out, *_ = visited_children
|
|
747
760
|
return out
|
|
748
761
|
|
|
749
|
-
def
|
|
750
|
-
out = ' '.join(flatten(visited_children))
|
|
751
|
-
return {'
|
|
762
|
+
def visit_statement(self, node: Node, visited_children: Iterable[Any]) -> Any:
|
|
763
|
+
out = ' '.join(flatten(visited_children)).strip()
|
|
764
|
+
return {'statement': out}
|
|
752
765
|
|
|
753
766
|
def visit_order_by(self, node: Node, visited_children: Iterable[Any]) -> Any:
|
|
754
767
|
"""Handle ORDER BY."""
|
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
|
+
import datetime
|
|
2
3
|
import json
|
|
3
4
|
from typing import Any
|
|
4
5
|
from typing import Dict
|
|
5
6
|
from typing import Optional
|
|
6
7
|
|
|
7
8
|
from .. import result
|
|
9
|
+
from ...management.export import _get_exports
|
|
8
10
|
from ...management.export import ExportService
|
|
9
11
|
from ...management.export import ExportStatus
|
|
10
12
|
from ..handler import SQLHandler
|
|
@@ -104,7 +106,100 @@ class CreateClusterIdentity(SQLHandler):
|
|
|
104
106
|
CreateClusterIdentity.register(overwrite=True)
|
|
105
107
|
|
|
106
108
|
|
|
107
|
-
|
|
109
|
+
def _start_export(params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
110
|
+
# From table
|
|
111
|
+
if isinstance(params['from_table'], str):
|
|
112
|
+
from_database = None
|
|
113
|
+
from_table = params['from_table']
|
|
114
|
+
else:
|
|
115
|
+
from_database, from_table = params['from_table']
|
|
116
|
+
|
|
117
|
+
# Catalog
|
|
118
|
+
catalog_config = json.loads(params['catalog'].get('catalog_config', '{}') or '{}')
|
|
119
|
+
catalog_creds = json.loads(params['catalog'].get('catalog_creds', '{}') or '{}')
|
|
120
|
+
|
|
121
|
+
# Storage
|
|
122
|
+
storage_config = json.loads(params['storage'].get('link_config', '{}') or '{}')
|
|
123
|
+
storage_creds = json.loads(params['storage'].get('link_creds', '{}') or '{}')
|
|
124
|
+
|
|
125
|
+
storage_config['provider'] = 'S3'
|
|
126
|
+
|
|
127
|
+
wsg = get_workspace_group({})
|
|
128
|
+
|
|
129
|
+
if from_database is None:
|
|
130
|
+
raise ValueError('database name must be specified for source table')
|
|
131
|
+
|
|
132
|
+
if wsg._manager is None:
|
|
133
|
+
raise TypeError('no workspace manager is associated with workspace group')
|
|
134
|
+
|
|
135
|
+
partition_by = []
|
|
136
|
+
if params['partition_by']:
|
|
137
|
+
for key in params['partition_by']:
|
|
138
|
+
transform = key['partition_key']['transform']['col_transform']
|
|
139
|
+
part = {}
|
|
140
|
+
part['transform'] = transform[0].lower()
|
|
141
|
+
part['name'] = transform[-1]['transform_col']
|
|
142
|
+
partition_by.append(part)
|
|
143
|
+
|
|
144
|
+
order_by = []
|
|
145
|
+
if params['order_by'] and params['order_by']['by']:
|
|
146
|
+
for key in params['order_by']['by']:
|
|
147
|
+
transform = key['transform']['col_transform']
|
|
148
|
+
order = {}
|
|
149
|
+
order['transform'] = transform[0].lower()
|
|
150
|
+
order['name'] = transform[-1]['transform_col']
|
|
151
|
+
order['direction'] = 'ascending'
|
|
152
|
+
order['null_order'] = 'nulls_first'
|
|
153
|
+
if key.get('direction'):
|
|
154
|
+
if 'desc' in key['direction'].lower():
|
|
155
|
+
order['direction'] = 'descending'
|
|
156
|
+
if key.get('null_order'):
|
|
157
|
+
if 'last' in key['null_order'].lower():
|
|
158
|
+
order['null_order'] = 'nulls_last'
|
|
159
|
+
order_by.append(order)
|
|
160
|
+
|
|
161
|
+
# Refresh interval
|
|
162
|
+
refresh_interval_delta = None
|
|
163
|
+
refresh_interval = params.get('refresh_interval', None)
|
|
164
|
+
if refresh_interval is not None:
|
|
165
|
+
value = int(refresh_interval['refresh_interval_value'])
|
|
166
|
+
time_unit = refresh_interval['refresh_interval_time_unit'].upper()
|
|
167
|
+
if value < 0:
|
|
168
|
+
raise ValueError('refresh interval must be greater than 0')
|
|
169
|
+
if time_unit == 'SECONDS':
|
|
170
|
+
refresh_interval_delta = datetime.timedelta(seconds=int(value))
|
|
171
|
+
elif time_unit == 'MINUTES':
|
|
172
|
+
refresh_interval_delta = datetime.timedelta(minutes=int(value))
|
|
173
|
+
elif time_unit == 'HOURS':
|
|
174
|
+
refresh_interval_delta = datetime.timedelta(hours=int(value))
|
|
175
|
+
elif time_unit == 'DAYS':
|
|
176
|
+
refresh_interval_delta = datetime.timedelta(days=int(value))
|
|
177
|
+
else:
|
|
178
|
+
raise ValueError('invalid refresh interval time unit')
|
|
179
|
+
|
|
180
|
+
out = ExportService(
|
|
181
|
+
wsg,
|
|
182
|
+
from_database,
|
|
183
|
+
from_table,
|
|
184
|
+
dict(**catalog_config, **catalog_creds),
|
|
185
|
+
dict(**storage_config, **storage_creds),
|
|
186
|
+
columns=None,
|
|
187
|
+
partition_by=partition_by or None,
|
|
188
|
+
order_by=order_by or None,
|
|
189
|
+
properties=json.loads(params['properties']) if params['properties'] else None,
|
|
190
|
+
incremental=params.get('incremental', False),
|
|
191
|
+
refresh_interval=int(refresh_interval_delta.total_seconds())
|
|
192
|
+
if refresh_interval_delta is not None else None,
|
|
193
|
+
).start()
|
|
194
|
+
|
|
195
|
+
res = FusionSQLResult()
|
|
196
|
+
res.add_field('ExportID', result.STRING)
|
|
197
|
+
res.set_rows([(out.export_id,)])
|
|
198
|
+
|
|
199
|
+
return res
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
class StartExport(SQLHandler):
|
|
108
203
|
"""
|
|
109
204
|
START EXPORT
|
|
110
205
|
from_table
|
|
@@ -150,7 +245,7 @@ class CreateExport(SQLHandler):
|
|
|
150
245
|
|
|
151
246
|
Description
|
|
152
247
|
-----------
|
|
153
|
-
|
|
248
|
+
Start an export.
|
|
154
249
|
|
|
155
250
|
Arguments
|
|
156
251
|
---------
|
|
@@ -180,7 +275,6 @@ class CreateExport(SQLHandler):
|
|
|
180
275
|
LINK S3 CONFIG '{
|
|
181
276
|
"region": "us-east-1",
|
|
182
277
|
"endpoint_url": "s3://bucket-name"
|
|
183
|
-
|
|
184
278
|
}'
|
|
185
279
|
;
|
|
186
280
|
|
|
@@ -189,77 +283,129 @@ class CreateExport(SQLHandler):
|
|
|
189
283
|
_enabled = False
|
|
190
284
|
|
|
191
285
|
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
192
|
-
|
|
193
|
-
if isinstance(params['from_table'], str):
|
|
194
|
-
from_database = None
|
|
195
|
-
from_table = params['from_table']
|
|
196
|
-
else:
|
|
197
|
-
from_database, from_table = params['from_table']
|
|
286
|
+
return _start_export(params)
|
|
198
287
|
|
|
199
|
-
# Catalog
|
|
200
|
-
catalog_config = json.loads(params['catalog'].get('catalog_config', '{}') or '{}')
|
|
201
|
-
catalog_creds = json.loads(params['catalog'].get('catalog_creds', '{}') or '{}')
|
|
202
288
|
|
|
203
|
-
|
|
204
|
-
storage_config = json.loads(params['storage'].get('link_config', '{}') or '{}')
|
|
205
|
-
storage_creds = json.loads(params['storage'].get('link_creds', '{}') or '{}')
|
|
289
|
+
StartExport.register(overwrite=True)
|
|
206
290
|
|
|
207
|
-
storage_config['provider'] = 'S3'
|
|
208
291
|
|
|
209
|
-
|
|
292
|
+
class StartIncrementalExport(SQLHandler):
|
|
293
|
+
"""
|
|
294
|
+
START INCREMENTAL EXPORT
|
|
295
|
+
from_table
|
|
296
|
+
catalog
|
|
297
|
+
storage
|
|
298
|
+
[ partition_by ]
|
|
299
|
+
[ order_by ]
|
|
300
|
+
[ properties ]
|
|
301
|
+
[ refresh_interval ]
|
|
302
|
+
;
|
|
210
303
|
|
|
211
|
-
|
|
212
|
-
|
|
304
|
+
# From table
|
|
305
|
+
from_table = FROM <table>
|
|
213
306
|
|
|
214
|
-
|
|
215
|
-
|
|
307
|
+
# Transforms
|
|
308
|
+
_col_transform = { VOID | IDENTITY | YEAR | MONTH | DAY | HOUR } ( _transform_col )
|
|
309
|
+
_transform_col = <column>
|
|
310
|
+
_arg_transform = { BUCKET | TRUNCATE } ( _transform_col <comma> _transform_arg )
|
|
311
|
+
_transform_arg = <integer>
|
|
312
|
+
transform = { _col_transform | _arg_transform }
|
|
216
313
|
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
transform = key['partition_key']['transform']['col_transform']
|
|
221
|
-
part = {}
|
|
222
|
-
part['transform'] = transform[0].lower()
|
|
223
|
-
part['name'] = transform[-1]['transform_col']
|
|
224
|
-
partition_by.append(part)
|
|
225
|
-
|
|
226
|
-
order_by = []
|
|
227
|
-
if params['order_by'] and params['order_by']['by']:
|
|
228
|
-
for key in params['order_by']['by']:
|
|
229
|
-
transform = key['transform']['col_transform']
|
|
230
|
-
order = {}
|
|
231
|
-
order['transform'] = transform[0].lower()
|
|
232
|
-
order['name'] = transform[-1]['transform_col']
|
|
233
|
-
order['direction'] = 'ascending'
|
|
234
|
-
order['null_order'] = 'nulls_first'
|
|
235
|
-
if key.get('direction'):
|
|
236
|
-
if 'desc' in key['direction'].lower():
|
|
237
|
-
order['direction'] = 'descending'
|
|
238
|
-
if key.get('null_order'):
|
|
239
|
-
if 'last' in key['null_order'].lower():
|
|
240
|
-
order['null_order'] = 'nulls_last'
|
|
241
|
-
order_by.append(order)
|
|
314
|
+
# Partitions
|
|
315
|
+
partition_by = PARTITION BY partition_key,...
|
|
316
|
+
partition_key = transform
|
|
242
317
|
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
dict(**storage_config, **storage_creds),
|
|
249
|
-
columns=None,
|
|
250
|
-
partition_by=partition_by or None,
|
|
251
|
-
order_by=order_by or None,
|
|
252
|
-
properties=json.loads(params['properties']) if params['properties'] else None,
|
|
253
|
-
).start()
|
|
318
|
+
# Sort order
|
|
319
|
+
order_by = ORDER BY sort_key,...
|
|
320
|
+
sort_key = transform [ direction ] [ null_order ]
|
|
321
|
+
direction = { ASC | DESC | ASCENDING | DESCENDING }
|
|
322
|
+
null_order = { NULLS_FIRST | NULLS_LAST }
|
|
254
323
|
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
res.set_rows([(out.export_id,)])
|
|
324
|
+
# Properties
|
|
325
|
+
properties = PROPERTIES '<json>'
|
|
258
326
|
|
|
259
|
-
|
|
327
|
+
# Catolog
|
|
328
|
+
catalog = CATALOG [ _catalog_config ] [ _catalog_creds ]
|
|
329
|
+
_catalog_config = CONFIG '<catalog-config>'
|
|
330
|
+
_catalog_creds = CREDENTIALS '<catalog-creds>'
|
|
331
|
+
|
|
332
|
+
# Storage
|
|
333
|
+
storage = LINK [ _link_config ] [ _link_creds ]
|
|
334
|
+
_link_config = S3 CONFIG '<link-config>'
|
|
335
|
+
_link_creds = CREDENTIALS '<link-creds>'
|
|
336
|
+
|
|
337
|
+
# Refresh interval
|
|
338
|
+
refresh_interval = REFRESH INTERVAL _refresh_interval_value _refresh_interval_time_unit
|
|
339
|
+
_refresh_interval_value = <integer>
|
|
340
|
+
_refresh_interval_time_unit = { SECONDS | MINUTES | HOURS | DAYS }
|
|
341
|
+
|
|
342
|
+
Description
|
|
343
|
+
-----------
|
|
344
|
+
Start an incremental export.
|
|
345
|
+
|
|
346
|
+
Arguments
|
|
347
|
+
---------
|
|
348
|
+
* ``<catalog-config>`` and ``<catalog-creds>``: The catalog configuration.
|
|
349
|
+
* ``<link-config>`` and ``<link-creds>``: The storage link configuration.
|
|
350
|
+
|
|
351
|
+
Remarks
|
|
352
|
+
-------
|
|
353
|
+
* ``FROM <table>`` specifies the SingleStore table to export. The same name will
|
|
354
|
+
be used for the exported table.
|
|
355
|
+
* ``CATALOG`` specifies the details of the catalog to connect to.
|
|
356
|
+
* ``LINK`` specifies the details of the data storage to connect to.
|
|
357
|
+
* ``REFRESH INTERVAL`` specifies the interval for refreshing the
|
|
358
|
+
incremental export. The default is 1 day.
|
|
359
|
+
|
|
360
|
+
Examples
|
|
361
|
+
--------
|
|
362
|
+
The following statement starts an export operation with the given
|
|
363
|
+
catalog and link configurations. The source table to export is
|
|
364
|
+
named "customer_data"::
|
|
260
365
|
|
|
366
|
+
START INCREMENTAL EXPORT FROM my_db.customer_data
|
|
367
|
+
CATALOG CONFIG '{
|
|
368
|
+
"catalog_type": "GLUE",
|
|
369
|
+
"table_format": "ICEBERG",
|
|
370
|
+
"catalog_id": "13983498723498",
|
|
371
|
+
"catalog_region": "us-east-1"
|
|
372
|
+
}'
|
|
373
|
+
LINK S3 CONFIG '{
|
|
374
|
+
"region": "us-east-1",
|
|
375
|
+
"endpoint_url": "s3://bucket-name"
|
|
376
|
+
}'
|
|
377
|
+
REFRESH INTERVAL 24 HOURS
|
|
378
|
+
;
|
|
379
|
+
|
|
380
|
+
""" # noqa
|
|
381
|
+
|
|
382
|
+
_enabled = False
|
|
383
|
+
|
|
384
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
385
|
+
params['incremental'] = True
|
|
386
|
+
return _start_export(params)
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
StartIncrementalExport.register(overwrite=True)
|
|
390
|
+
|
|
391
|
+
|
|
392
|
+
def _format_status(export_id: str, status: ExportStatus) -> Optional[FusionSQLResult]:
|
|
393
|
+
"""Return the status of an export operation."""
|
|
394
|
+
info = status._info()
|
|
261
395
|
|
|
262
|
-
|
|
396
|
+
res = FusionSQLResult()
|
|
397
|
+
res.add_field('ExportID', result.STRING)
|
|
398
|
+
res.add_field('Status', result.STRING)
|
|
399
|
+
res.add_field('Message', result.STRING)
|
|
400
|
+
res.set_rows([
|
|
401
|
+
(
|
|
402
|
+
export_id,
|
|
403
|
+
info.get('status', 'Unknown'),
|
|
404
|
+
info.get('statusMsg', ''),
|
|
405
|
+
),
|
|
406
|
+
])
|
|
407
|
+
|
|
408
|
+
return res
|
|
263
409
|
|
|
264
410
|
|
|
265
411
|
class ShowExport(SQLHandler):
|
|
@@ -275,9 +421,29 @@ class ShowExport(SQLHandler):
|
|
|
275
421
|
|
|
276
422
|
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
277
423
|
wsg = get_workspace_group({})
|
|
278
|
-
|
|
424
|
+
return _format_status(
|
|
425
|
+
params['export_id'], ExportStatus(params['export_id'], wsg),
|
|
426
|
+
)
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
ShowExport.register(overwrite=True)
|
|
430
|
+
|
|
431
|
+
|
|
432
|
+
class ShowExports(SQLHandler):
|
|
433
|
+
"""
|
|
434
|
+
SHOW EXPORTS [ scope ];
|
|
435
|
+
|
|
436
|
+
# Location of the export
|
|
437
|
+
scope = FOR '<scope>'
|
|
438
|
+
|
|
439
|
+
"""
|
|
440
|
+
|
|
441
|
+
_enabled = False
|
|
442
|
+
|
|
443
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
444
|
+
wsg = get_workspace_group({})
|
|
279
445
|
|
|
280
|
-
|
|
446
|
+
exports = _get_exports(wsg, params.get('scope', 'all'))
|
|
281
447
|
|
|
282
448
|
res = FusionSQLResult()
|
|
283
449
|
res.add_field('ExportID', result.STRING)
|
|
@@ -285,13 +451,75 @@ class ShowExport(SQLHandler):
|
|
|
285
451
|
res.add_field('Message', result.STRING)
|
|
286
452
|
res.set_rows([
|
|
287
453
|
(
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
)
|
|
454
|
+
info['egressID'],
|
|
455
|
+
info.get('status', 'Unknown'),
|
|
456
|
+
info.get('statusMsg', ''),
|
|
457
|
+
)
|
|
458
|
+
for info in [x._info() for x in exports]
|
|
292
459
|
])
|
|
293
460
|
|
|
294
461
|
return res
|
|
295
462
|
|
|
296
463
|
|
|
297
|
-
|
|
464
|
+
ShowExports.register(overwrite=True)
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
class SuspendExport(SQLHandler):
|
|
468
|
+
"""
|
|
469
|
+
SUSPEND EXPORT export_id;
|
|
470
|
+
|
|
471
|
+
# ID of export
|
|
472
|
+
export_id = '<export-id>'
|
|
473
|
+
|
|
474
|
+
"""
|
|
475
|
+
|
|
476
|
+
_enabled = False
|
|
477
|
+
|
|
478
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
479
|
+
wsg = get_workspace_group({})
|
|
480
|
+
service = ExportService.from_export_id(wsg, params['export_id'])
|
|
481
|
+
return _format_status(params['export_id'], service.suspend())
|
|
482
|
+
|
|
483
|
+
|
|
484
|
+
SuspendExport.register(overwrite=True)
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
class ResumeExport(SQLHandler):
|
|
488
|
+
"""
|
|
489
|
+
RESUME EXPORT export_id;
|
|
490
|
+
|
|
491
|
+
# ID of export
|
|
492
|
+
export_id = '<export-id>'
|
|
493
|
+
|
|
494
|
+
"""
|
|
495
|
+
|
|
496
|
+
_enabled = False
|
|
497
|
+
|
|
498
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
499
|
+
wsg = get_workspace_group({})
|
|
500
|
+
service = ExportService.from_export_id(wsg, params['export_id'])
|
|
501
|
+
return _format_status(params['export_id'], service.resume())
|
|
502
|
+
|
|
503
|
+
|
|
504
|
+
ResumeExport.register(overwrite=True)
|
|
505
|
+
|
|
506
|
+
|
|
507
|
+
class DropExport(SQLHandler):
|
|
508
|
+
"""
|
|
509
|
+
DROP EXPORT export_id;
|
|
510
|
+
|
|
511
|
+
# ID of export
|
|
512
|
+
export_id = '<export-id>'
|
|
513
|
+
|
|
514
|
+
"""
|
|
515
|
+
|
|
516
|
+
_enabled = False
|
|
517
|
+
|
|
518
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
519
|
+
wsg = get_workspace_group({})
|
|
520
|
+
service = ExportService.from_export_id(wsg, params['export_id'])
|
|
521
|
+
service.drop()
|
|
522
|
+
return None
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
DropExport.register(overwrite=True)
|
singlestoredb/http/connection.py
CHANGED
|
@@ -569,8 +569,10 @@ class Cursor(connection.Cursor):
|
|
|
569
569
|
|
|
570
570
|
if res.status_code >= 400:
|
|
571
571
|
if res.text:
|
|
572
|
-
|
|
573
|
-
|
|
572
|
+
m = re.match(r'^Error\s+(\d+).*?:', res.text)
|
|
573
|
+
if m:
|
|
574
|
+
code = m.group(1)
|
|
575
|
+
msg = res.text.split(':', 1)[-1]
|
|
574
576
|
icode = int(code.split()[-1])
|
|
575
577
|
else:
|
|
576
578
|
icode = res.status_code
|
|
@@ -27,6 +27,9 @@ class ExportService(object):
|
|
|
27
27
|
partition_by: Optional[List[Dict[str, str]]]
|
|
28
28
|
order_by: Optional[List[Dict[str, Dict[str, str]]]]
|
|
29
29
|
properties: Optional[Dict[str, Any]]
|
|
30
|
+
incremental: bool
|
|
31
|
+
refresh_interval: Optional[int]
|
|
32
|
+
export_id: Optional[str]
|
|
30
33
|
|
|
31
34
|
def __init__(
|
|
32
35
|
self,
|
|
@@ -38,6 +41,8 @@ class ExportService(object):
|
|
|
38
41
|
columns: Optional[List[str]] = None,
|
|
39
42
|
partition_by: Optional[List[Dict[str, str]]] = None,
|
|
40
43
|
order_by: Optional[List[Dict[str, Dict[str, str]]]] = None,
|
|
44
|
+
incremental: bool = False,
|
|
45
|
+
refresh_interval: Optional[int] = None,
|
|
41
46
|
properties: Optional[Dict[str, Any]] = None,
|
|
42
47
|
):
|
|
43
48
|
#: Workspace group
|
|
@@ -68,8 +73,30 @@ class ExportService(object):
|
|
|
68
73
|
self.order_by = order_by or None
|
|
69
74
|
self.properties = properties or None
|
|
70
75
|
|
|
76
|
+
self.incremental = incremental
|
|
77
|
+
self.refresh_interval = refresh_interval
|
|
78
|
+
|
|
79
|
+
self.export_id = None
|
|
80
|
+
|
|
71
81
|
self._manager: Optional[WorkspaceManager] = workspace_group._manager
|
|
72
82
|
|
|
83
|
+
@classmethod
|
|
84
|
+
def from_export_id(
|
|
85
|
+
self,
|
|
86
|
+
workspace_group: WorkspaceGroup,
|
|
87
|
+
export_id: str,
|
|
88
|
+
) -> ExportService:
|
|
89
|
+
"""Create export service from export ID."""
|
|
90
|
+
out = ExportService(
|
|
91
|
+
workspace_group=workspace_group,
|
|
92
|
+
database='',
|
|
93
|
+
table='',
|
|
94
|
+
catalog_info={},
|
|
95
|
+
storage_info={},
|
|
96
|
+
)
|
|
97
|
+
out.export_id = export_id
|
|
98
|
+
return out
|
|
99
|
+
|
|
73
100
|
def __str__(self) -> str:
|
|
74
101
|
"""Return string representation."""
|
|
75
102
|
return vars_to_str(self)
|
|
@@ -98,6 +125,11 @@ class ExportService(object):
|
|
|
98
125
|
|
|
99
126
|
def start(self, tags: Optional[List[str]] = None) -> 'ExportStatus':
|
|
100
127
|
"""Start the export process."""
|
|
128
|
+
if not self.table or not self.database:
|
|
129
|
+
raise ManagementError(
|
|
130
|
+
msg='Database and table must be set before starting the export.',
|
|
131
|
+
)
|
|
132
|
+
|
|
101
133
|
if self._manager is None:
|
|
102
134
|
raise ManagementError(
|
|
103
135
|
msg='No workspace manager is associated with this object.',
|
|
@@ -122,11 +154,87 @@ class ExportService(object):
|
|
|
122
154
|
partitionSpec=partition_spec,
|
|
123
155
|
sortOrderSpec=sort_order_spec,
|
|
124
156
|
properties=self.properties,
|
|
157
|
+
incremental=self.incremental or None,
|
|
158
|
+
refreshInterval=self.refresh_interval
|
|
159
|
+
if self.refresh_interval is not None else None,
|
|
125
160
|
).items() if v is not None
|
|
126
161
|
},
|
|
127
162
|
)
|
|
128
163
|
|
|
129
|
-
|
|
164
|
+
self.export_id = str(out.json()['egressID'])
|
|
165
|
+
|
|
166
|
+
return ExportStatus(self.export_id, self.workspace_group)
|
|
167
|
+
|
|
168
|
+
def suspend(self) -> 'ExportStatus':
|
|
169
|
+
"""Suspend the export process."""
|
|
170
|
+
if self._manager is None:
|
|
171
|
+
raise ManagementError(
|
|
172
|
+
msg='No workspace manager is associated with this object.',
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
if self.export_id is None:
|
|
176
|
+
raise ManagementError(
|
|
177
|
+
msg='Export ID is not set. You must start the export first.',
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
self._manager._post(
|
|
181
|
+
f'workspaceGroups/{self.workspace_group.id}/egress/suspendTableEgress',
|
|
182
|
+
json=dict(egressID=self.export_id),
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
return ExportStatus(self.export_id, self.workspace_group)
|
|
186
|
+
|
|
187
|
+
def resume(self) -> 'ExportStatus':
|
|
188
|
+
"""Resume the export process."""
|
|
189
|
+
if self._manager is None:
|
|
190
|
+
raise ManagementError(
|
|
191
|
+
msg='No workspace manager is associated with this object.',
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
if self.export_id is None:
|
|
195
|
+
raise ManagementError(
|
|
196
|
+
msg='Export ID is not set. You must start the export first.',
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
self._manager._post(
|
|
200
|
+
f'workspaceGroups/{self.workspace_group.id}/egress/resumeTableEgress',
|
|
201
|
+
json=dict(egressID=self.export_id),
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
return ExportStatus(self.export_id, self.workspace_group)
|
|
205
|
+
|
|
206
|
+
def drop(self) -> None:
|
|
207
|
+
"""Drop the export process."""
|
|
208
|
+
if self._manager is None:
|
|
209
|
+
raise ManagementError(
|
|
210
|
+
msg='No workspace manager is associated with this object.',
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
if self.export_id is None:
|
|
214
|
+
raise ManagementError(
|
|
215
|
+
msg='Export ID is not set. You must start the export first.',
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
self._manager._delete(
|
|
219
|
+
f'workspaceGroups/{self.workspace_group.id}/egress/dropTableEgress',
|
|
220
|
+
json=dict(egressID=self.export_id),
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
return None
|
|
224
|
+
|
|
225
|
+
def status(self) -> ExportStatus:
|
|
226
|
+
"""Get the status of the export process."""
|
|
227
|
+
if self._manager is None:
|
|
228
|
+
raise ManagementError(
|
|
229
|
+
msg='No workspace manager is associated with this object.',
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
if self.export_id is None:
|
|
233
|
+
raise ManagementError(
|
|
234
|
+
msg='Export ID is not set. You must start the export first.',
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
return ExportStatus(self.export_id, self.workspace_group)
|
|
130
238
|
|
|
131
239
|
|
|
132
240
|
class ExportStatus(object):
|
|
@@ -167,3 +275,21 @@ class ExportStatus(object):
|
|
|
167
275
|
|
|
168
276
|
def __repr__(self) -> str:
|
|
169
277
|
return self.status
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
def _get_exports(
|
|
281
|
+
workspace_group: WorkspaceGroup,
|
|
282
|
+
scope: str = 'all',
|
|
283
|
+
) -> List[ExportStatus]:
|
|
284
|
+
"""Get all exports in the workspace group."""
|
|
285
|
+
if workspace_group._manager is None:
|
|
286
|
+
raise ManagementError(
|
|
287
|
+
msg='No workspace manager is associated with this object.',
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
out = workspace_group._manager._get(
|
|
291
|
+
f'workspaceGroups/{workspace_group.id}/egress/tableEgressStatus',
|
|
292
|
+
json=dict(scope=scope),
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
return out.json()
|
|
@@ -499,7 +499,10 @@ class TestJobsFusion(unittest.TestCase):
|
|
|
499
499
|
@classmethod
|
|
500
500
|
def tearDownClass(cls):
|
|
501
501
|
for job_id in cls.job_ids:
|
|
502
|
-
|
|
502
|
+
try:
|
|
503
|
+
cls.manager.organizations.current.jobs.delete(job_id)
|
|
504
|
+
except Exception:
|
|
505
|
+
pass
|
|
503
506
|
if cls.workspace_group is not None:
|
|
504
507
|
cls.workspace_group.terminate(force=True)
|
|
505
508
|
cls.manager = None
|
|
@@ -397,13 +397,16 @@ class TestStage(unittest.TestCase):
|
|
|
397
397
|
def test_upload_file(self):
|
|
398
398
|
st = self.wg.stage
|
|
399
399
|
|
|
400
|
+
upload_test_sql = f'upload_test_{id(self)}.sql'
|
|
401
|
+
upload_test2_sql = f'upload_test2_{id(self)}.sql'
|
|
402
|
+
|
|
400
403
|
root = st.info('/')
|
|
401
404
|
assert str(root.path) == '/'
|
|
402
405
|
assert root.type == 'directory'
|
|
403
406
|
|
|
404
407
|
# Upload file
|
|
405
|
-
f = st.upload_file(TEST_DIR / 'test.sql',
|
|
406
|
-
assert str(f.path) ==
|
|
408
|
+
f = st.upload_file(TEST_DIR / 'test.sql', upload_test_sql)
|
|
409
|
+
assert str(f.path) == upload_test_sql
|
|
407
410
|
assert f.type == 'file'
|
|
408
411
|
|
|
409
412
|
# Download and compare to original
|
|
@@ -412,15 +415,15 @@ class TestStage(unittest.TestCase):
|
|
|
412
415
|
|
|
413
416
|
# Make sure we can't overwrite
|
|
414
417
|
with self.assertRaises(OSError):
|
|
415
|
-
st.upload_file(TEST_DIR / 'test.sql',
|
|
418
|
+
st.upload_file(TEST_DIR / 'test.sql', upload_test_sql)
|
|
416
419
|
|
|
417
420
|
# Force overwrite with new content; use file object this time
|
|
418
421
|
f = st.upload_file(
|
|
419
422
|
open(TEST_DIR / 'test2.sql', 'r'),
|
|
420
|
-
|
|
423
|
+
upload_test_sql,
|
|
421
424
|
overwrite=True,
|
|
422
425
|
)
|
|
423
|
-
assert str(f.path) ==
|
|
426
|
+
assert str(f.path) == upload_test_sql
|
|
424
427
|
assert f.type == 'file'
|
|
425
428
|
|
|
426
429
|
# Verify new content
|
|
@@ -442,63 +445,67 @@ class TestStage(unittest.TestCase):
|
|
|
442
445
|
# Write file into folder
|
|
443
446
|
f = st.upload_file(
|
|
444
447
|
TEST_DIR / 'test2.sql',
|
|
445
|
-
os.path.join(lib.path,
|
|
448
|
+
os.path.join(lib.path, upload_test2_sql),
|
|
446
449
|
)
|
|
447
|
-
assert str(f.path) == 'lib/
|
|
450
|
+
assert str(f.path) == 'lib/' + upload_test2_sql
|
|
448
451
|
assert f.type == 'file'
|
|
449
452
|
|
|
450
453
|
def test_open(self):
|
|
451
454
|
st = self.wg.stage
|
|
452
455
|
|
|
456
|
+
open_test_sql = f'open_test_{id(self)}.sql'
|
|
457
|
+
|
|
453
458
|
# See if error is raised for non-existent file
|
|
454
459
|
with self.assertRaises(s2.ManagementError):
|
|
455
|
-
st.open(
|
|
460
|
+
st.open(open_test_sql, 'r')
|
|
456
461
|
|
|
457
462
|
# Load test file
|
|
458
|
-
st.upload_file(TEST_DIR / 'test.sql',
|
|
463
|
+
st.upload_file(TEST_DIR / 'test.sql', open_test_sql)
|
|
459
464
|
|
|
460
465
|
# Read file using `open`
|
|
461
|
-
with st.open(
|
|
466
|
+
with st.open(open_test_sql, 'r') as rfile:
|
|
462
467
|
assert rfile.read() == open(TEST_DIR / 'test.sql').read()
|
|
463
468
|
|
|
464
469
|
# Read file using `open` with 'rt' mode
|
|
465
|
-
with st.open(
|
|
470
|
+
with st.open(open_test_sql, 'rt') as rfile:
|
|
466
471
|
assert rfile.read() == open(TEST_DIR / 'test.sql').read()
|
|
467
472
|
|
|
468
473
|
# Read file using `open` with 'rb' mode
|
|
469
|
-
with st.open(
|
|
474
|
+
with st.open(open_test_sql, 'rb') as rfile:
|
|
470
475
|
assert rfile.read() == open(TEST_DIR / 'test.sql', 'rb').read()
|
|
471
476
|
|
|
472
477
|
# Read file using `open` with 'rb' mode
|
|
473
478
|
with self.assertRaises(ValueError):
|
|
474
|
-
with st.open(
|
|
479
|
+
with st.open(open_test_sql, 'b') as rfile:
|
|
475
480
|
pass
|
|
476
481
|
|
|
477
482
|
# Attempt overwrite file using `open` with mode 'x'
|
|
478
483
|
with self.assertRaises(OSError):
|
|
479
|
-
with st.open(
|
|
484
|
+
with st.open(open_test_sql, 'x') as wfile:
|
|
480
485
|
pass
|
|
481
486
|
|
|
482
487
|
# Attempt overwrite file using `open` with mode 'w'
|
|
483
|
-
with st.open(
|
|
488
|
+
with st.open(open_test_sql, 'w') as wfile:
|
|
484
489
|
wfile.write(open(TEST_DIR / 'test2.sql').read())
|
|
485
490
|
|
|
486
|
-
txt = st.download_file(
|
|
491
|
+
txt = st.download_file(open_test_sql, encoding='utf-8')
|
|
487
492
|
|
|
488
493
|
assert txt == open(TEST_DIR / 'test2.sql').read()
|
|
489
494
|
|
|
495
|
+
open_raw_test_sql = f'open_raw_test_{id(self)}.sql'
|
|
496
|
+
|
|
490
497
|
# Test writer without context manager
|
|
491
|
-
wfile = st.open(
|
|
498
|
+
wfile = st.open(open_raw_test_sql, 'w')
|
|
492
499
|
for line in open(TEST_DIR / 'test.sql'):
|
|
493
500
|
wfile.write(line)
|
|
494
501
|
wfile.close()
|
|
495
502
|
|
|
496
|
-
txt = st.download_file(
|
|
503
|
+
txt = st.download_file(open_raw_test_sql, encoding='utf-8')
|
|
497
504
|
|
|
498
505
|
assert txt == open(TEST_DIR / 'test.sql').read()
|
|
499
506
|
|
|
500
507
|
# Test reader without context manager
|
|
501
|
-
rfile = st.open(
|
|
508
|
+
rfile = st.open(open_raw_test_sql, 'r')
|
|
502
509
|
txt = ''
|
|
503
510
|
for line in rfile:
|
|
504
511
|
txt += line
|
|
@@ -509,15 +516,18 @@ class TestStage(unittest.TestCase):
|
|
|
509
516
|
def test_obj_open(self):
|
|
510
517
|
st = self.wg.stage
|
|
511
518
|
|
|
519
|
+
obj_open_test_sql = f'obj_open_test_{id(self)}.sql'
|
|
520
|
+
obj_open_dir = f'obj_open_dir_{id(self)}'
|
|
521
|
+
|
|
512
522
|
# Load test file
|
|
513
|
-
f = st.upload_file(TEST_DIR / 'test.sql',
|
|
523
|
+
f = st.upload_file(TEST_DIR / 'test.sql', obj_open_test_sql)
|
|
514
524
|
|
|
515
525
|
# Read file using `open`
|
|
516
526
|
with f.open() as rfile:
|
|
517
527
|
assert rfile.read() == open(TEST_DIR / 'test.sql').read()
|
|
518
528
|
|
|
519
529
|
# Make sure directories error out
|
|
520
|
-
d = st.mkdir(
|
|
530
|
+
d = st.mkdir(obj_open_dir)
|
|
521
531
|
with self.assertRaises(IsADirectoryError):
|
|
522
532
|
d.open()
|
|
523
533
|
|
|
@@ -921,7 +931,10 @@ class TestJob(unittest.TestCase):
|
|
|
921
931
|
@classmethod
|
|
922
932
|
def tearDownClass(cls):
|
|
923
933
|
for job_id in cls.job_ids:
|
|
924
|
-
|
|
934
|
+
try:
|
|
935
|
+
cls.manager.organizations.current.jobs.delete(job_id)
|
|
936
|
+
except Exception:
|
|
937
|
+
pass
|
|
925
938
|
if cls.workspace_group is not None:
|
|
926
939
|
cls.workspace_group.terminate(force=True)
|
|
927
940
|
cls.workspace_group = None
|
|
@@ -1054,6 +1067,8 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1054
1067
|
cls.shared_space = None
|
|
1055
1068
|
|
|
1056
1069
|
def test_upload_file(self):
|
|
1070
|
+
upload_test_ipynb = f'upload_test_{id(self)}.ipynb'
|
|
1071
|
+
|
|
1057
1072
|
for space in [self.personal_space, self.shared_space]:
|
|
1058
1073
|
root = space.info('/')
|
|
1059
1074
|
assert str(root.path) == '/'
|
|
@@ -1062,9 +1077,9 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1062
1077
|
# Upload files
|
|
1063
1078
|
f = space.upload_file(
|
|
1064
1079
|
TEST_DIR / 'test.ipynb',
|
|
1065
|
-
|
|
1080
|
+
upload_test_ipynb,
|
|
1066
1081
|
)
|
|
1067
|
-
assert str(f.path) ==
|
|
1082
|
+
assert str(f.path) == upload_test_ipynb
|
|
1068
1083
|
assert f.type == 'notebook'
|
|
1069
1084
|
|
|
1070
1085
|
# Download and compare to original
|
|
@@ -1075,15 +1090,15 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1075
1090
|
with self.assertRaises(OSError):
|
|
1076
1091
|
space.upload_file(
|
|
1077
1092
|
TEST_DIR / 'test.ipynb',
|
|
1078
|
-
|
|
1093
|
+
upload_test_ipynb,
|
|
1079
1094
|
)
|
|
1080
1095
|
|
|
1081
1096
|
# Force overwrite with new content
|
|
1082
1097
|
f = space.upload_file(
|
|
1083
1098
|
TEST_DIR / 'test2.ipynb',
|
|
1084
|
-
|
|
1099
|
+
upload_test_ipynb, overwrite=True,
|
|
1085
1100
|
)
|
|
1086
|
-
assert str(f.path) ==
|
|
1101
|
+
assert str(f.path) == upload_test_ipynb
|
|
1087
1102
|
assert f.type == 'notebook'
|
|
1088
1103
|
|
|
1089
1104
|
# Verify new content
|
|
@@ -1095,9 +1110,11 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1095
1110
|
space.upload_folder(TEST_DIR, 'test')
|
|
1096
1111
|
|
|
1097
1112
|
# Cleanup
|
|
1098
|
-
space.remove(
|
|
1113
|
+
space.remove(upload_test_ipynb)
|
|
1099
1114
|
|
|
1100
1115
|
def test_upload_file_io(self):
|
|
1116
|
+
upload_test_ipynb = f'upload_test_{id(self)}.ipynb'
|
|
1117
|
+
|
|
1101
1118
|
for space in [self.personal_space, self.shared_space]:
|
|
1102
1119
|
root = space.info('/')
|
|
1103
1120
|
assert str(root.path) == '/'
|
|
@@ -1106,9 +1123,9 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1106
1123
|
# Upload files
|
|
1107
1124
|
f = space.upload_file(
|
|
1108
1125
|
open(TEST_DIR / 'test.ipynb', 'r'),
|
|
1109
|
-
|
|
1126
|
+
upload_test_ipynb,
|
|
1110
1127
|
)
|
|
1111
|
-
assert str(f.path) ==
|
|
1128
|
+
assert str(f.path) == upload_test_ipynb
|
|
1112
1129
|
assert f.type == 'notebook'
|
|
1113
1130
|
|
|
1114
1131
|
# Download and compare to original
|
|
@@ -1119,15 +1136,15 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1119
1136
|
with self.assertRaises(OSError):
|
|
1120
1137
|
space.upload_file(
|
|
1121
1138
|
open(TEST_DIR / 'test.ipynb', 'r'),
|
|
1122
|
-
|
|
1139
|
+
upload_test_ipynb,
|
|
1123
1140
|
)
|
|
1124
1141
|
|
|
1125
1142
|
# Force overwrite with new content
|
|
1126
1143
|
f = space.upload_file(
|
|
1127
1144
|
open(TEST_DIR / 'test2.ipynb', 'r'),
|
|
1128
|
-
|
|
1145
|
+
upload_test_ipynb, overwrite=True,
|
|
1129
1146
|
)
|
|
1130
|
-
assert str(f.path) ==
|
|
1147
|
+
assert str(f.path) == upload_test_ipynb
|
|
1131
1148
|
assert f.type == 'notebook'
|
|
1132
1149
|
|
|
1133
1150
|
# Verify new content
|
|
@@ -1139,62 +1156,66 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1139
1156
|
space.upload_folder(TEST_DIR, 'test')
|
|
1140
1157
|
|
|
1141
1158
|
# Cleanup
|
|
1142
|
-
space.remove(
|
|
1159
|
+
space.remove(upload_test_ipynb)
|
|
1143
1160
|
|
|
1144
1161
|
def test_open(self):
|
|
1145
1162
|
for space in [self.personal_space, self.shared_space]:
|
|
1163
|
+
open_test_ipynb = f'open_test_ipynb_{id(self)}.ipynb'
|
|
1164
|
+
|
|
1146
1165
|
# See if error is raised for non-existent file
|
|
1147
1166
|
with self.assertRaises(s2.ManagementError):
|
|
1148
|
-
space.open(
|
|
1167
|
+
space.open(open_test_ipynb, 'r')
|
|
1149
1168
|
|
|
1150
1169
|
# Load test file
|
|
1151
|
-
space.upload_file(TEST_DIR / 'test.ipynb',
|
|
1170
|
+
space.upload_file(TEST_DIR / 'test.ipynb', open_test_ipynb)
|
|
1152
1171
|
|
|
1153
1172
|
# Read file using `open`
|
|
1154
|
-
with space.open(
|
|
1173
|
+
with space.open(open_test_ipynb, 'r') as rfile:
|
|
1155
1174
|
assert rfile.read() == open(TEST_DIR / 'test.ipynb').read()
|
|
1156
1175
|
|
|
1157
1176
|
# Read file using `open` with 'rt' mode
|
|
1158
|
-
with space.open(
|
|
1177
|
+
with space.open(open_test_ipynb, 'rt') as rfile:
|
|
1159
1178
|
assert rfile.read() == open(TEST_DIR / 'test.ipynb').read()
|
|
1160
1179
|
|
|
1161
1180
|
# Read file using `open` with 'rb' mode
|
|
1162
|
-
with space.open(
|
|
1181
|
+
with space.open(open_test_ipynb, 'rb') as rfile:
|
|
1163
1182
|
assert rfile.read() == open(TEST_DIR / 'test.ipynb', 'rb').read()
|
|
1164
1183
|
|
|
1165
1184
|
# Read file using `open` with 'rb' mode
|
|
1166
1185
|
with self.assertRaises(ValueError):
|
|
1167
|
-
with space.open(
|
|
1186
|
+
with space.open(open_test_ipynb, 'b') as rfile:
|
|
1168
1187
|
pass
|
|
1169
1188
|
|
|
1170
1189
|
# Attempt overwrite file using `open` with mode 'x'
|
|
1171
1190
|
with self.assertRaises(OSError):
|
|
1172
|
-
with space.open(
|
|
1191
|
+
with space.open(open_test_ipynb, 'x') as wfile:
|
|
1173
1192
|
pass
|
|
1174
1193
|
|
|
1175
1194
|
# Attempt overwrite file using `open` with mode 'w'
|
|
1176
|
-
with space.open(
|
|
1195
|
+
with space.open(open_test_ipynb, 'w') as wfile:
|
|
1177
1196
|
wfile.write(open(TEST_DIR / 'test2.ipynb').read())
|
|
1178
1197
|
|
|
1179
|
-
txt = space.download_file(
|
|
1198
|
+
txt = space.download_file(open_test_ipynb, encoding='utf-8')
|
|
1180
1199
|
|
|
1181
1200
|
assert txt == open(TEST_DIR / 'test2.ipynb').read()
|
|
1182
1201
|
|
|
1202
|
+
open_raw_test_ipynb = f'open_raw_test_{id(self)}.ipynb'
|
|
1203
|
+
|
|
1183
1204
|
# Test writer without context manager
|
|
1184
|
-
wfile = space.open(
|
|
1205
|
+
wfile = space.open(open_raw_test_ipynb, 'w')
|
|
1185
1206
|
for line in open(TEST_DIR / 'test.ipynb'):
|
|
1186
1207
|
wfile.write(line)
|
|
1187
1208
|
wfile.close()
|
|
1188
1209
|
|
|
1189
1210
|
txt = space.download_file(
|
|
1190
|
-
|
|
1211
|
+
open_raw_test_ipynb,
|
|
1191
1212
|
encoding='utf-8',
|
|
1192
1213
|
)
|
|
1193
1214
|
|
|
1194
1215
|
assert txt == open(TEST_DIR / 'test.ipynb').read()
|
|
1195
1216
|
|
|
1196
1217
|
# Test reader without context manager
|
|
1197
|
-
rfile = space.open(
|
|
1218
|
+
rfile = space.open(open_raw_test_ipynb, 'r')
|
|
1198
1219
|
txt = ''
|
|
1199
1220
|
for line in rfile:
|
|
1200
1221
|
txt += line
|
|
@@ -1203,15 +1224,18 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1203
1224
|
assert txt == open(TEST_DIR / 'test.ipynb').read()
|
|
1204
1225
|
|
|
1205
1226
|
# Cleanup
|
|
1206
|
-
space.remove(
|
|
1207
|
-
space.remove(
|
|
1227
|
+
space.remove(open_test_ipynb)
|
|
1228
|
+
space.remove(open_raw_test_ipynb)
|
|
1208
1229
|
|
|
1209
1230
|
def test_obj_open(self):
|
|
1210
1231
|
for space in [self.personal_space, self.shared_space]:
|
|
1232
|
+
obj_open_test_ipynb = f'obj_open_test_{id(self)}.ipynb'
|
|
1233
|
+
obj_open_dir = f'obj_open_dir_{id(self)}'
|
|
1234
|
+
|
|
1211
1235
|
# Load test file
|
|
1212
1236
|
f = space.upload_file(
|
|
1213
1237
|
TEST_DIR / 'test.ipynb',
|
|
1214
|
-
|
|
1238
|
+
obj_open_test_ipynb,
|
|
1215
1239
|
)
|
|
1216
1240
|
|
|
1217
1241
|
# Read file using `open`
|
|
@@ -1220,7 +1244,7 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1220
1244
|
|
|
1221
1245
|
# Make sure directories error out
|
|
1222
1246
|
with self.assertRaises(s2.ManagementError):
|
|
1223
|
-
space.mkdir(
|
|
1247
|
+
space.mkdir(obj_open_dir)
|
|
1224
1248
|
|
|
1225
1249
|
# Write file using `open`
|
|
1226
1250
|
with f.open('w', encoding='utf-8') as wfile:
|
|
@@ -1248,7 +1272,7 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1248
1272
|
assert txt == open(TEST_DIR / 'test.ipynb').read()
|
|
1249
1273
|
|
|
1250
1274
|
# Cleanup
|
|
1251
|
-
space.remove(
|
|
1275
|
+
space.remove(obj_open_test_ipynb)
|
|
1252
1276
|
|
|
1253
1277
|
def test_os_directories(self):
|
|
1254
1278
|
for space in [self.personal_space, self.shared_space]:
|
singlestoredb/types.py
CHANGED
|
@@ -173,17 +173,25 @@ class ColumnType(object):
|
|
|
173
173
|
'DECIMAL', 'DEC', 'FIXED', 'NUMERIC', 0, decimal.Decimal,
|
|
174
174
|
)
|
|
175
175
|
DEC = FIXED = NUMERIC = DECIMAL
|
|
176
|
-
TINY = TINYINT = BOOL = BOOLEAN = NumberDBAPIType(
|
|
177
|
-
'TINY', 'TINYINT', 'BOOL', 'BOOLEAN', 1,
|
|
176
|
+
TINY = TINYINT = BOOL = BOOLEAN = UNSIGNED_TINY = UNSIGNED_TINYINT = NumberDBAPIType(
|
|
177
|
+
'TINY', 'TINYINT', 'BOOL', 'BOOLEAN', 'UNSIGNED TINY', 'UNSIGNED TINYINT', 1,
|
|
178
|
+
)
|
|
179
|
+
SHORT = SMALLINT = UNSIGNED_SHORT = UNSIGNED_SMALLINT = NumberDBAPIType(
|
|
180
|
+
'SMALLINT', 'SHORT', 'UNSIGNED SHORT', 'UNSIGNED SMALLINT', 2,
|
|
181
|
+
)
|
|
182
|
+
LONG = INT = UNSIGNED_LONG = UNSIGNED_INT = NumberDBAPIType(
|
|
183
|
+
'LONG', 'INT', 'UNSIGNED LONG', 'UNSIGNED INT', 3,
|
|
178
184
|
)
|
|
179
|
-
SHORT = SMALLINT = NumberDBAPIType('SMALLINT', 'SHORT', 2)
|
|
180
|
-
LONG = INT = NumberDBAPIType('LONG', 'INT', 3)
|
|
181
185
|
FLOAT = NumberDBAPIType('FLOAT', 4)
|
|
182
186
|
DOUBLE = REAL = NumberDBAPIType('DOUBLE', 5, float)
|
|
183
187
|
NULL = DBAPIType('NULL', 6)
|
|
184
188
|
TIMESTAMP = DatetimeDBAPIType('TIMESTAMP', 7)
|
|
185
|
-
LONGLONG = BIGINT =
|
|
186
|
-
|
|
189
|
+
LONGLONG = BIGINT = UNSIGNED_LONGLONG = UNSIGNED_BIGINT = NumberDBAPIType(
|
|
190
|
+
'BIGINT', 'LONGLONG', 'UNSIGNED LONGLONG', 'UNSIGNED BIGINT', 8, int,
|
|
191
|
+
)
|
|
192
|
+
MEDIUMINT = INT24 = UNSIGNED_MEDIUMINT = UNSIGNED_INT24 = NumberDBAPIType(
|
|
193
|
+
'MEDIUMINT', 'INT24', 'UNSIGNED MEDIUMINT', 'UNSIGNED INT24', 9,
|
|
194
|
+
)
|
|
187
195
|
DATE = DBAPIType('DATE', 10, datetime.date)
|
|
188
196
|
TIME = DBAPIType('TIME', 11, datetime.time)
|
|
189
197
|
DATETIME = DatetimeDBAPIType('DATETIME', 12, datetime.datetime)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: singlestoredb
|
|
3
|
-
Version: 1.14.
|
|
3
|
+
Version: 1.14.2
|
|
4
4
|
Summary: Interface to the SingleStoreDB database and workspace management APIs
|
|
5
5
|
Home-page: https://github.com/singlestore-labs/singlestoredb-python
|
|
6
6
|
Author: SingleStore
|
|
@@ -19,7 +19,6 @@ Requires-Dist: build
|
|
|
19
19
|
Requires-Dist: parsimonious
|
|
20
20
|
Requires-Dist: requests
|
|
21
21
|
Requires-Dist: setuptools
|
|
22
|
-
Requires-Dist: singlestore-vectorstore >=0.1.2
|
|
23
22
|
Requires-Dist: sqlparams
|
|
24
23
|
Requires-Dist: wheel
|
|
25
24
|
Requires-Dist: tomli >=1.1.0 ; python_version < "3.11"
|
|
@@ -44,6 +43,8 @@ Provides-Extra: rsa
|
|
|
44
43
|
Requires-Dist: cryptography ; extra == 'rsa'
|
|
45
44
|
Provides-Extra: sqlalchemy
|
|
46
45
|
Requires-Dist: sqlalchemy-singlestoredb >=1.0.0 ; extra == 'sqlalchemy'
|
|
46
|
+
Provides-Extra: vectorstore
|
|
47
|
+
Requires-Dist: singlestore-vectorstore >=0.1.2 ; extra == 'vectorstore'
|
|
47
48
|
|
|
48
49
|
# <img src="https://github.com/singlestore-labs/singlestoredb-python/blob/main/resources/singlestore-logo.png" height="60" valign="middle"/> SingleStoreDB Python SDK
|
|
49
50
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
_singlestoredb_accel.pyd,sha256=
|
|
2
|
-
singlestoredb/__init__.py,sha256=
|
|
1
|
+
_singlestoredb_accel.pyd,sha256=MaogiX8M7Vi9gkhHL0JN_KgiUjYmjUbXgzzlCfb4vAY,61952
|
|
2
|
+
singlestoredb/__init__.py,sha256=NFQVpJ4HpaZ09FGns8WHu4EFEqcI3FQ5Xmo65kb12vA,2331
|
|
3
3
|
singlestoredb/auth.py,sha256=RmYiH0Wlc2RXc4pTlRMysxtBI445ggCIwojWKC_eDLE,7844
|
|
4
4
|
singlestoredb/config.py,sha256=t3aiWi1i3kT5VhEgXca0gwT6591YkZUed-wzvVEBMs0,13424
|
|
5
5
|
singlestoredb/connection.py,sha256=I2AP_0l7hNARfXiSuVW953CsGYn_rKbTg_NyWEiGHbY,47542
|
|
@@ -7,7 +7,7 @@ singlestoredb/converters.py,sha256=6gN3_RzSbw0Aimd5cGgBNPNq1yiHb1a_NK8qC9DmOQ0,2
|
|
|
7
7
|
singlestoredb/exceptions.py,sha256=WCCJrNSsU-hD-621Jpd6bwmvGftQ7byXkk-XKXlaxpg,3354
|
|
8
8
|
singlestoredb/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
9
|
singlestoredb/pytest.py,sha256=TH364xRCN7_QaN0oRQDHixrEcDx_ZBgu3bmY0tvKrYU,9357
|
|
10
|
-
singlestoredb/types.py,sha256=
|
|
10
|
+
singlestoredb/types.py,sha256=g6iJnOSCuRUkuUJOYSdRPt3QTjC9h2Dq4fqFFktXxXg,10770
|
|
11
11
|
singlestoredb/vectorstore.py,sha256=4YvXml3PpOEOtUGO7gylucKG2Rny8Bx6L29kmhsFiCY,8600
|
|
12
12
|
singlestoredb/ai/__init__.py,sha256=5vlx0XpzxalMKySnVF7y40gfuCgaz7COUKqN4KfNKF8,116
|
|
13
13
|
singlestoredb/ai/chat.py,sha256=oDig8C8QdPEHL-JmmpdFvt_Ct7-K_D0pG_UJ00WCZ7Y,828
|
|
@@ -37,11 +37,11 @@ singlestoredb/functions/ext/rowdat_1.py,sha256=UNMMUA8mb6iIRfJV2FsdA20Sw6s-LEdHQ
|
|
|
37
37
|
singlestoredb/functions/ext/utils.py,sha256=OPMFD-tTCx2Kk9jguQkrTr7e4AgNkt15YsvaT1YSmN8,5480
|
|
38
38
|
singlestoredb/fusion/__init__.py,sha256=FHWtrg6OJFTf6Ye197V5sU6ssryr2h6FBcDIgXP7-H4,367
|
|
39
39
|
singlestoredb/fusion/graphql.py,sha256=SHqsPe4xgawdsTPHEtJGQlybYGWqPrGMmyK-v20RLac,5420
|
|
40
|
-
singlestoredb/fusion/handler.py,sha256=
|
|
40
|
+
singlestoredb/fusion/handler.py,sha256=JmdIjBUUUDUKsgqTBc7pL_NVvUPXha90VpfAd03mHL4,28598
|
|
41
41
|
singlestoredb/fusion/registry.py,sha256=_eT1gd38VPlFKs5f9Pu6lqQyoDQ_ixW5O56QwYLQ89Y,6361
|
|
42
42
|
singlestoredb/fusion/result.py,sha256=KAwhXxXVgfkAWekCFY8-Y03ANKDiTflYRXyEc_1Id0k,12189
|
|
43
43
|
singlestoredb/fusion/handlers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
44
|
-
singlestoredb/fusion/handlers/export.py,sha256=
|
|
44
|
+
singlestoredb/fusion/handlers/export.py,sha256=MqPINMHGl-7SkKzdHcgg343uoFQDXaKSHdoFmefA-KM,15834
|
|
45
45
|
singlestoredb/fusion/handlers/files.py,sha256=pCx1sqnjPtQrp39rv_V4RX9CVtj6uSiL6HPUyiABYpI,19681
|
|
46
46
|
singlestoredb/fusion/handlers/job.py,sha256=3enfxHwERH7T4u0FEwOPN0IL0GtepaCYgEsisiy3Df4,21753
|
|
47
47
|
singlestoredb/fusion/handlers/models.py,sha256=XWaPJQc3GQIOAcjNcxBSGUBJ3xu2qkzQ4ILa40TFQmY,6486
|
|
@@ -49,14 +49,14 @@ singlestoredb/fusion/handlers/stage.py,sha256=PP-SSP204lwpmnycSXXSmFPzoN535JVuwg
|
|
|
49
49
|
singlestoredb/fusion/handlers/utils.py,sha256=nV2lSzKhv7CzM7I_uIh5kmDV0Ec6VeeKoHczx5pVNcw,11009
|
|
50
50
|
singlestoredb/fusion/handlers/workspace.py,sha256=NxoEY5xd5lCQmXiim4nhAYCL0agHo1H_rGPpqa31hiw,28397
|
|
51
51
|
singlestoredb/http/__init__.py,sha256=4cEDvLloGc3LSpU-PnIwacyu0n5oIIIE6xk2SPyWD_w,939
|
|
52
|
-
singlestoredb/http/connection.py,sha256=
|
|
52
|
+
singlestoredb/http/connection.py,sha256=X-zRf7BfsaKRg8GXcaa5Ic42b9uqEfqqxiI47ZijpDE,41221
|
|
53
53
|
singlestoredb/magics/__init__.py,sha256=fqCBQ0s8o1CYE4Xo_XiSbkLDzLgMNDgpSkOx66-uDZw,1244
|
|
54
54
|
singlestoredb/magics/run_personal.py,sha256=M11xHi9lWquh_pLSpFI89LGE7PhOPQOGqlSPDl48itE,1900
|
|
55
55
|
singlestoredb/magics/run_shared.py,sha256=rnKpW4d8CJvD6ehK8jG8FlxuqZvjZl4KocPTsk-23O8,1805
|
|
56
56
|
singlestoredb/management/__init__.py,sha256=A66ZnFyX--PsAZ2tvtYUfIUBvVGDBFQsnVc6nGTlX60,277
|
|
57
57
|
singlestoredb/management/billing_usage.py,sha256=0UHFSPCrN0nyeGFFM-HXS3NP8pYmYo2BCCahDEPXvzg,3883
|
|
58
58
|
singlestoredb/management/cluster.py,sha256=auBzNYIXvnI6rq3DNpPgJhwWoT6JsyZRikjpON23Pxg,14867
|
|
59
|
-
singlestoredb/management/export.py,sha256=
|
|
59
|
+
singlestoredb/management/export.py,sha256=9kNb9C9HHyUrIfIWVvpghal47SQutoa2PTPmysIPSq8,9378
|
|
60
60
|
singlestoredb/management/files.py,sha256=Z9GpS2EHf9atE8kJdz1vJtsiT80O6TV00MPhqyXfAAw,31579
|
|
61
61
|
singlestoredb/management/inference_api.py,sha256=9d9-7edoZ6JI3SPvStcVDOSHOY6l38V1MFpyskdLAZY,2684
|
|
62
62
|
singlestoredb/management/job.py,sha256=Npfe1JLYJlggGBrXLniPKwKUKF1i3alvSY1SFtvauSs,25498
|
|
@@ -126,9 +126,9 @@ singlestoredb/tests/test_dbapi.py,sha256=cNJoTEZvYG7ckcwT7xqlkJX-2TDEYGTDDU1Iguc
|
|
|
126
126
|
singlestoredb/tests/test_exceptions.py,sha256=vscMYmdOJr0JmkTAJrNI2w0Q96Nfugjkrt5_lYnw8i0,1176
|
|
127
127
|
singlestoredb/tests/test_ext_func.py,sha256=LhuPz8o3UF7x2LNod5oZ1tlxeLvGDEUE5FnzdsIbSPs,44643
|
|
128
128
|
singlestoredb/tests/test_ext_func_data.py,sha256=9kn8BWmCjkbnP6hSbFhmhcdW4OmVT-GSvBTIzFBLEys,48796
|
|
129
|
-
singlestoredb/tests/test_fusion.py,sha256=
|
|
129
|
+
singlestoredb/tests/test_fusion.py,sha256=XT5rhYx32mndcZGaW2Xc7DTLMLEcf_vO3w1Dxss9nMM,52120
|
|
130
130
|
singlestoredb/tests/test_http.py,sha256=7hwXe61hlUes3nji0MTTZweo94tJAlJ-vA5ct9geXFQ,8868
|
|
131
|
-
singlestoredb/tests/test_management.py,sha256=
|
|
131
|
+
singlestoredb/tests/test_management.py,sha256=PdY_rrMvossP0HGsppN-F0KfP5WuZmDaJcTCjIPONEo,47221
|
|
132
132
|
singlestoredb/tests/test_plugin.py,sha256=P1nXLnTafaHkHN-6bVbGryxTu7OWJPU9SYFZ_WQUwq8,845
|
|
133
133
|
singlestoredb/tests/test_results.py,sha256=Zg1ynZFRZqalAMfNLOU5C6BDXaox6JxrKm_XZwVNFcg,6753
|
|
134
134
|
singlestoredb/tests/test_types.py,sha256=YeVE6KPqlqzJke-4hbRmc8ko1E7RLHu5S8qLg04Bl5Y,4632
|
|
@@ -149,9 +149,9 @@ singlestoredb/utils/results.py,sha256=wR70LhCqlobniZf52r67zYLBOKjWHQm68NAskdRQND
|
|
|
149
149
|
singlestoredb/utils/xdict.py,sha256=-wi1lSPTnY99fhVMBhPKJ8cCsQhNG4GMUfkEBDKYgCw,13321
|
|
150
150
|
sqlx/__init__.py,sha256=4Sdn8HN-Hf8v0_wCt60DCckCg8BvgM3-9r4YVfZycRE,89
|
|
151
151
|
sqlx/magic.py,sha256=6VBlotgjautjev599tHaTYOfcfOA9m6gV_-P1_Qc4lI,3622
|
|
152
|
-
singlestoredb-1.14.
|
|
153
|
-
singlestoredb-1.14.
|
|
154
|
-
singlestoredb-1.14.
|
|
155
|
-
singlestoredb-1.14.
|
|
156
|
-
singlestoredb-1.14.
|
|
157
|
-
singlestoredb-1.14.
|
|
152
|
+
singlestoredb-1.14.2.dist-info/LICENSE,sha256=Bojenzui8aPNjlF3w4ojguDP7sTf8vFV_9Gc2UAG1sg,11542
|
|
153
|
+
singlestoredb-1.14.2.dist-info/METADATA,sha256=82J9S2a0qPCrJocyKKdv6cXWsGpVvoT_x2cCYuOW9fY,5949
|
|
154
|
+
singlestoredb-1.14.2.dist-info/WHEEL,sha256=UyMHzmWA0xVqVPKfTiLs2eN3OWWZUl-kQemNbpIqlKo,100
|
|
155
|
+
singlestoredb-1.14.2.dist-info/entry_points.txt,sha256=bSLaTWB5zGjpVYPAaI46MkkDup0su-eb3uAhCNYuRV0,48
|
|
156
|
+
singlestoredb-1.14.2.dist-info/top_level.txt,sha256=lA65Vf4qAMfg_s1oG3LEO90h4t1Z-SPDbRqkevI3bSY,40
|
|
157
|
+
singlestoredb-1.14.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|