singlestoredb 1.14.0__cp38-abi3-win32.whl → 1.14.1__cp38-abi3-win32.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of singlestoredb might be problematic. Click here for more details.
- _singlestoredb_accel.pyd +0 -0
- singlestoredb/__init__.py +1 -1
- singlestoredb/fusion/handlers/export.py +297 -69
- singlestoredb/http/connection.py +4 -2
- singlestoredb/management/export.py +127 -1
- singlestoredb/tests/test_management.py +45 -31
- singlestoredb/types.py +14 -6
- {singlestoredb-1.14.0.dist-info → singlestoredb-1.14.1.dist-info}/METADATA +1 -1
- {singlestoredb-1.14.0.dist-info → singlestoredb-1.14.1.dist-info}/RECORD +13 -13
- {singlestoredb-1.14.0.dist-info → singlestoredb-1.14.1.dist-info}/LICENSE +0 -0
- {singlestoredb-1.14.0.dist-info → singlestoredb-1.14.1.dist-info}/WHEEL +0 -0
- {singlestoredb-1.14.0.dist-info → singlestoredb-1.14.1.dist-info}/entry_points.txt +0 -0
- {singlestoredb-1.14.0.dist-info → singlestoredb-1.14.1.dist-info}/top_level.txt +0 -0
_singlestoredb_accel.pyd
CHANGED
|
Binary file
|
singlestoredb/__init__.py
CHANGED
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
|
+
import datetime
|
|
2
3
|
import json
|
|
3
4
|
from typing import Any
|
|
4
5
|
from typing import Dict
|
|
5
6
|
from typing import Optional
|
|
6
7
|
|
|
7
8
|
from .. import result
|
|
9
|
+
from ...management.export import _get_exports
|
|
8
10
|
from ...management.export import ExportService
|
|
9
11
|
from ...management.export import ExportStatus
|
|
10
12
|
from ..handler import SQLHandler
|
|
@@ -104,7 +106,100 @@ class CreateClusterIdentity(SQLHandler):
|
|
|
104
106
|
CreateClusterIdentity.register(overwrite=True)
|
|
105
107
|
|
|
106
108
|
|
|
107
|
-
|
|
109
|
+
def _start_export(params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
110
|
+
# From table
|
|
111
|
+
if isinstance(params['from_table'], str):
|
|
112
|
+
from_database = None
|
|
113
|
+
from_table = params['from_table']
|
|
114
|
+
else:
|
|
115
|
+
from_database, from_table = params['from_table']
|
|
116
|
+
|
|
117
|
+
# Catalog
|
|
118
|
+
catalog_config = json.loads(params['catalog'].get('catalog_config', '{}') or '{}')
|
|
119
|
+
catalog_creds = json.loads(params['catalog'].get('catalog_creds', '{}') or '{}')
|
|
120
|
+
|
|
121
|
+
# Storage
|
|
122
|
+
storage_config = json.loads(params['storage'].get('link_config', '{}') or '{}')
|
|
123
|
+
storage_creds = json.loads(params['storage'].get('link_creds', '{}') or '{}')
|
|
124
|
+
|
|
125
|
+
storage_config['provider'] = 'S3'
|
|
126
|
+
|
|
127
|
+
wsg = get_workspace_group({})
|
|
128
|
+
|
|
129
|
+
if from_database is None:
|
|
130
|
+
raise ValueError('database name must be specified for source table')
|
|
131
|
+
|
|
132
|
+
if wsg._manager is None:
|
|
133
|
+
raise TypeError('no workspace manager is associated with workspace group')
|
|
134
|
+
|
|
135
|
+
partition_by = []
|
|
136
|
+
if params['partition_by']:
|
|
137
|
+
for key in params['partition_by']:
|
|
138
|
+
transform = key['partition_key']['transform']['col_transform']
|
|
139
|
+
part = {}
|
|
140
|
+
part['transform'] = transform[0].lower()
|
|
141
|
+
part['name'] = transform[-1]['transform_col']
|
|
142
|
+
partition_by.append(part)
|
|
143
|
+
|
|
144
|
+
order_by = []
|
|
145
|
+
if params['order_by'] and params['order_by']['by']:
|
|
146
|
+
for key in params['order_by']['by']:
|
|
147
|
+
transform = key['transform']['col_transform']
|
|
148
|
+
order = {}
|
|
149
|
+
order['transform'] = transform[0].lower()
|
|
150
|
+
order['name'] = transform[-1]['transform_col']
|
|
151
|
+
order['direction'] = 'ascending'
|
|
152
|
+
order['null_order'] = 'nulls_first'
|
|
153
|
+
if key.get('direction'):
|
|
154
|
+
if 'desc' in key['direction'].lower():
|
|
155
|
+
order['direction'] = 'descending'
|
|
156
|
+
if key.get('null_order'):
|
|
157
|
+
if 'last' in key['null_order'].lower():
|
|
158
|
+
order['null_order'] = 'nulls_last'
|
|
159
|
+
order_by.append(order)
|
|
160
|
+
|
|
161
|
+
# Refresh interval
|
|
162
|
+
refresh_interval_delta = None
|
|
163
|
+
refresh_interval = params.get('refresh_interval', None)
|
|
164
|
+
if refresh_interval is not None:
|
|
165
|
+
value = int(refresh_interval['refresh_interval_value'])
|
|
166
|
+
time_unit = refresh_interval['refresh_interval_time_unit'].upper()
|
|
167
|
+
if value < 0:
|
|
168
|
+
raise ValueError('refresh interval must be greater than 0')
|
|
169
|
+
if time_unit == 'SECONDS':
|
|
170
|
+
refresh_interval_delta = datetime.timedelta(seconds=int(value))
|
|
171
|
+
elif time_unit == 'MINUTES':
|
|
172
|
+
refresh_interval_delta = datetime.timedelta(minutes=int(value))
|
|
173
|
+
elif time_unit == 'HOURS':
|
|
174
|
+
refresh_interval_delta = datetime.timedelta(hours=int(value))
|
|
175
|
+
elif time_unit == 'DAYS':
|
|
176
|
+
refresh_interval_delta = datetime.timedelta(days=int(value))
|
|
177
|
+
else:
|
|
178
|
+
raise ValueError('invalid refresh interval time unit')
|
|
179
|
+
|
|
180
|
+
out = ExportService(
|
|
181
|
+
wsg,
|
|
182
|
+
from_database,
|
|
183
|
+
from_table,
|
|
184
|
+
dict(**catalog_config, **catalog_creds),
|
|
185
|
+
dict(**storage_config, **storage_creds),
|
|
186
|
+
columns=None,
|
|
187
|
+
partition_by=partition_by or None,
|
|
188
|
+
order_by=order_by or None,
|
|
189
|
+
properties=json.loads(params['properties']) if params['properties'] else None,
|
|
190
|
+
incremental=params.get('incremental', False),
|
|
191
|
+
refresh_interval=int(refresh_interval_delta.total_seconds())
|
|
192
|
+
if refresh_interval_delta is not None else None,
|
|
193
|
+
).start()
|
|
194
|
+
|
|
195
|
+
res = FusionSQLResult()
|
|
196
|
+
res.add_field('ExportID', result.STRING)
|
|
197
|
+
res.set_rows([(out.export_id,)])
|
|
198
|
+
|
|
199
|
+
return res
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
class StartExport(SQLHandler):
|
|
108
203
|
"""
|
|
109
204
|
START EXPORT
|
|
110
205
|
from_table
|
|
@@ -150,7 +245,7 @@ class CreateExport(SQLHandler):
|
|
|
150
245
|
|
|
151
246
|
Description
|
|
152
247
|
-----------
|
|
153
|
-
|
|
248
|
+
Start an export.
|
|
154
249
|
|
|
155
250
|
Arguments
|
|
156
251
|
---------
|
|
@@ -180,7 +275,6 @@ class CreateExport(SQLHandler):
|
|
|
180
275
|
LINK S3 CONFIG '{
|
|
181
276
|
"region": "us-east-1",
|
|
182
277
|
"endpoint_url": "s3://bucket-name"
|
|
183
|
-
|
|
184
278
|
}'
|
|
185
279
|
;
|
|
186
280
|
|
|
@@ -189,77 +283,129 @@ class CreateExport(SQLHandler):
|
|
|
189
283
|
_enabled = False
|
|
190
284
|
|
|
191
285
|
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
192
|
-
|
|
193
|
-
if isinstance(params['from_table'], str):
|
|
194
|
-
from_database = None
|
|
195
|
-
from_table = params['from_table']
|
|
196
|
-
else:
|
|
197
|
-
from_database, from_table = params['from_table']
|
|
286
|
+
return _start_export(params)
|
|
198
287
|
|
|
199
|
-
# Catalog
|
|
200
|
-
catalog_config = json.loads(params['catalog'].get('catalog_config', '{}') or '{}')
|
|
201
|
-
catalog_creds = json.loads(params['catalog'].get('catalog_creds', '{}') or '{}')
|
|
202
288
|
|
|
203
|
-
|
|
204
|
-
storage_config = json.loads(params['storage'].get('link_config', '{}') or '{}')
|
|
205
|
-
storage_creds = json.loads(params['storage'].get('link_creds', '{}') or '{}')
|
|
289
|
+
StartExport.register(overwrite=True)
|
|
206
290
|
|
|
207
|
-
storage_config['provider'] = 'S3'
|
|
208
291
|
|
|
209
|
-
|
|
292
|
+
class StartIncrementalExport(SQLHandler):
|
|
293
|
+
"""
|
|
294
|
+
START INCREMENTAL EXPORT
|
|
295
|
+
from_table
|
|
296
|
+
catalog
|
|
297
|
+
storage
|
|
298
|
+
[ partition_by ]
|
|
299
|
+
[ order_by ]
|
|
300
|
+
[ properties ]
|
|
301
|
+
[ refresh_interval ]
|
|
302
|
+
;
|
|
210
303
|
|
|
211
|
-
|
|
212
|
-
|
|
304
|
+
# From table
|
|
305
|
+
from_table = FROM <table>
|
|
213
306
|
|
|
214
|
-
|
|
215
|
-
|
|
307
|
+
# Transforms
|
|
308
|
+
_col_transform = { VOID | IDENTITY | YEAR | MONTH | DAY | HOUR } ( _transform_col )
|
|
309
|
+
_transform_col = <column>
|
|
310
|
+
_arg_transform = { BUCKET | TRUNCATE } ( _transform_col <comma> _transform_arg )
|
|
311
|
+
_transform_arg = <integer>
|
|
312
|
+
transform = { _col_transform | _arg_transform }
|
|
216
313
|
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
transform = key['partition_key']['transform']['col_transform']
|
|
221
|
-
part = {}
|
|
222
|
-
part['transform'] = transform[0].lower()
|
|
223
|
-
part['name'] = transform[-1]['transform_col']
|
|
224
|
-
partition_by.append(part)
|
|
225
|
-
|
|
226
|
-
order_by = []
|
|
227
|
-
if params['order_by'] and params['order_by']['by']:
|
|
228
|
-
for key in params['order_by']['by']:
|
|
229
|
-
transform = key['transform']['col_transform']
|
|
230
|
-
order = {}
|
|
231
|
-
order['transform'] = transform[0].lower()
|
|
232
|
-
order['name'] = transform[-1]['transform_col']
|
|
233
|
-
order['direction'] = 'ascending'
|
|
234
|
-
order['null_order'] = 'nulls_first'
|
|
235
|
-
if key.get('direction'):
|
|
236
|
-
if 'desc' in key['direction'].lower():
|
|
237
|
-
order['direction'] = 'descending'
|
|
238
|
-
if key.get('null_order'):
|
|
239
|
-
if 'last' in key['null_order'].lower():
|
|
240
|
-
order['null_order'] = 'nulls_last'
|
|
241
|
-
order_by.append(order)
|
|
314
|
+
# Partitions
|
|
315
|
+
partition_by = PARTITION BY partition_key,...
|
|
316
|
+
partition_key = transform
|
|
242
317
|
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
dict(**storage_config, **storage_creds),
|
|
249
|
-
columns=None,
|
|
250
|
-
partition_by=partition_by or None,
|
|
251
|
-
order_by=order_by or None,
|
|
252
|
-
properties=json.loads(params['properties']) if params['properties'] else None,
|
|
253
|
-
).start()
|
|
318
|
+
# Sort order
|
|
319
|
+
order_by = ORDER BY sort_key,...
|
|
320
|
+
sort_key = transform [ direction ] [ null_order ]
|
|
321
|
+
direction = { ASC | DESC | ASCENDING | DESCENDING }
|
|
322
|
+
null_order = { NULLS_FIRST | NULLS_LAST }
|
|
254
323
|
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
res.set_rows([(out.export_id,)])
|
|
324
|
+
# Properties
|
|
325
|
+
properties = PROPERTIES '<json>'
|
|
258
326
|
|
|
259
|
-
|
|
327
|
+
# Catolog
|
|
328
|
+
catalog = CATALOG [ _catalog_config ] [ _catalog_creds ]
|
|
329
|
+
_catalog_config = CONFIG '<catalog-config>'
|
|
330
|
+
_catalog_creds = CREDENTIALS '<catalog-creds>'
|
|
331
|
+
|
|
332
|
+
# Storage
|
|
333
|
+
storage = LINK [ _link_config ] [ _link_creds ]
|
|
334
|
+
_link_config = S3 CONFIG '<link-config>'
|
|
335
|
+
_link_creds = CREDENTIALS '<link-creds>'
|
|
336
|
+
|
|
337
|
+
# Refresh interval
|
|
338
|
+
refresh_interval = REFRESH INTERVAL _refresh_interval_value _refresh_interval_time_unit
|
|
339
|
+
_refresh_interval_value = <integer>
|
|
340
|
+
_refresh_interval_time_unit = { SECONDS | MINUTES | HOURS | DAYS }
|
|
341
|
+
|
|
342
|
+
Description
|
|
343
|
+
-----------
|
|
344
|
+
Start an incremental export.
|
|
345
|
+
|
|
346
|
+
Arguments
|
|
347
|
+
---------
|
|
348
|
+
* ``<catalog-config>`` and ``<catalog-creds>``: The catalog configuration.
|
|
349
|
+
* ``<link-config>`` and ``<link-creds>``: The storage link configuration.
|
|
350
|
+
|
|
351
|
+
Remarks
|
|
352
|
+
-------
|
|
353
|
+
* ``FROM <table>`` specifies the SingleStore table to export. The same name will
|
|
354
|
+
be used for the exported table.
|
|
355
|
+
* ``CATALOG`` specifies the details of the catalog to connect to.
|
|
356
|
+
* ``LINK`` specifies the details of the data storage to connect to.
|
|
357
|
+
* ``REFRESH INTERVAL`` specifies the interval for refreshing the
|
|
358
|
+
incremental export. The default is 1 day.
|
|
359
|
+
|
|
360
|
+
Examples
|
|
361
|
+
--------
|
|
362
|
+
The following statement starts an export operation with the given
|
|
363
|
+
catalog and link configurations. The source table to export is
|
|
364
|
+
named "customer_data"::
|
|
260
365
|
|
|
366
|
+
START INCREMENTAL EXPORT FROM my_db.customer_data
|
|
367
|
+
CATALOG CONFIG '{
|
|
368
|
+
"catalog_type": "GLUE",
|
|
369
|
+
"table_format": "ICEBERG",
|
|
370
|
+
"catalog_id": "13983498723498",
|
|
371
|
+
"catalog_region": "us-east-1"
|
|
372
|
+
}'
|
|
373
|
+
LINK S3 CONFIG '{
|
|
374
|
+
"region": "us-east-1",
|
|
375
|
+
"endpoint_url": "s3://bucket-name"
|
|
376
|
+
}'
|
|
377
|
+
REFRESH INTERVAL 24 HOURS
|
|
378
|
+
;
|
|
379
|
+
|
|
380
|
+
""" # noqa
|
|
381
|
+
|
|
382
|
+
_enabled = False
|
|
383
|
+
|
|
384
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
385
|
+
params['incremental'] = True
|
|
386
|
+
return _start_export(params)
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
StartIncrementalExport.register(overwrite=True)
|
|
390
|
+
|
|
391
|
+
|
|
392
|
+
def _format_status(export_id: str, status: ExportStatus) -> Optional[FusionSQLResult]:
|
|
393
|
+
"""Return the status of an export operation."""
|
|
394
|
+
info = status._info()
|
|
261
395
|
|
|
262
|
-
|
|
396
|
+
res = FusionSQLResult()
|
|
397
|
+
res.add_field('ExportID', result.STRING)
|
|
398
|
+
res.add_field('Status', result.STRING)
|
|
399
|
+
res.add_field('Message', result.STRING)
|
|
400
|
+
res.set_rows([
|
|
401
|
+
(
|
|
402
|
+
export_id,
|
|
403
|
+
info.get('status', 'Unknown'),
|
|
404
|
+
info.get('statusMsg', ''),
|
|
405
|
+
),
|
|
406
|
+
])
|
|
407
|
+
|
|
408
|
+
return res
|
|
263
409
|
|
|
264
410
|
|
|
265
411
|
class ShowExport(SQLHandler):
|
|
@@ -275,9 +421,29 @@ class ShowExport(SQLHandler):
|
|
|
275
421
|
|
|
276
422
|
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
277
423
|
wsg = get_workspace_group({})
|
|
278
|
-
|
|
424
|
+
return _format_status(
|
|
425
|
+
params['export_id'], ExportStatus(params['export_id'], wsg),
|
|
426
|
+
)
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
ShowExport.register(overwrite=True)
|
|
430
|
+
|
|
431
|
+
|
|
432
|
+
class ShowExports(SQLHandler):
|
|
433
|
+
"""
|
|
434
|
+
SHOW EXPORTS [ scope ];
|
|
435
|
+
|
|
436
|
+
# Location of the export
|
|
437
|
+
scope = FOR '<scope>'
|
|
438
|
+
|
|
439
|
+
"""
|
|
440
|
+
|
|
441
|
+
_enabled = False
|
|
442
|
+
|
|
443
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
444
|
+
wsg = get_workspace_group({})
|
|
279
445
|
|
|
280
|
-
|
|
446
|
+
exports = _get_exports(wsg, params.get('scope', 'all'))
|
|
281
447
|
|
|
282
448
|
res = FusionSQLResult()
|
|
283
449
|
res.add_field('ExportID', result.STRING)
|
|
@@ -285,13 +451,75 @@ class ShowExport(SQLHandler):
|
|
|
285
451
|
res.add_field('Message', result.STRING)
|
|
286
452
|
res.set_rows([
|
|
287
453
|
(
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
)
|
|
454
|
+
info['egressID'],
|
|
455
|
+
info.get('status', 'Unknown'),
|
|
456
|
+
info.get('statusMsg', ''),
|
|
457
|
+
)
|
|
458
|
+
for info in [x._info() for x in exports]
|
|
292
459
|
])
|
|
293
460
|
|
|
294
461
|
return res
|
|
295
462
|
|
|
296
463
|
|
|
297
|
-
|
|
464
|
+
ShowExports.register(overwrite=True)
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
class SuspendExport(SQLHandler):
|
|
468
|
+
"""
|
|
469
|
+
SUSPEND EXPORT export_id;
|
|
470
|
+
|
|
471
|
+
# ID of export
|
|
472
|
+
export_id = '<export-id>'
|
|
473
|
+
|
|
474
|
+
"""
|
|
475
|
+
|
|
476
|
+
_enabled = False
|
|
477
|
+
|
|
478
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
479
|
+
wsg = get_workspace_group({})
|
|
480
|
+
service = ExportService.from_export_id(wsg, params['export_id'])
|
|
481
|
+
return _format_status(params['export_id'], service.suspend())
|
|
482
|
+
|
|
483
|
+
|
|
484
|
+
SuspendExport.register(overwrite=True)
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
class ResumeExport(SQLHandler):
|
|
488
|
+
"""
|
|
489
|
+
RESUME EXPORT export_id;
|
|
490
|
+
|
|
491
|
+
# ID of export
|
|
492
|
+
export_id = '<export-id>'
|
|
493
|
+
|
|
494
|
+
"""
|
|
495
|
+
|
|
496
|
+
_enabled = False
|
|
497
|
+
|
|
498
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
499
|
+
wsg = get_workspace_group({})
|
|
500
|
+
service = ExportService.from_export_id(wsg, params['export_id'])
|
|
501
|
+
return _format_status(params['export_id'], service.resume())
|
|
502
|
+
|
|
503
|
+
|
|
504
|
+
ResumeExport.register(overwrite=True)
|
|
505
|
+
|
|
506
|
+
|
|
507
|
+
class DropExport(SQLHandler):
|
|
508
|
+
"""
|
|
509
|
+
DROP EXPORT export_id;
|
|
510
|
+
|
|
511
|
+
# ID of export
|
|
512
|
+
export_id = '<export-id>'
|
|
513
|
+
|
|
514
|
+
"""
|
|
515
|
+
|
|
516
|
+
_enabled = False
|
|
517
|
+
|
|
518
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
519
|
+
wsg = get_workspace_group({})
|
|
520
|
+
service = ExportService.from_export_id(wsg, params['export_id'])
|
|
521
|
+
service.drop()
|
|
522
|
+
return None
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
DropExport.register(overwrite=True)
|
singlestoredb/http/connection.py
CHANGED
|
@@ -569,8 +569,10 @@ class Cursor(connection.Cursor):
|
|
|
569
569
|
|
|
570
570
|
if res.status_code >= 400:
|
|
571
571
|
if res.text:
|
|
572
|
-
|
|
573
|
-
|
|
572
|
+
m = re.match(r'^Error\s+(\d+).*?:', res.text)
|
|
573
|
+
if m:
|
|
574
|
+
code = m.group(1)
|
|
575
|
+
msg = res.text.split(':', 1)[-1]
|
|
574
576
|
icode = int(code.split()[-1])
|
|
575
577
|
else:
|
|
576
578
|
icode = res.status_code
|
|
@@ -27,6 +27,9 @@ class ExportService(object):
|
|
|
27
27
|
partition_by: Optional[List[Dict[str, str]]]
|
|
28
28
|
order_by: Optional[List[Dict[str, Dict[str, str]]]]
|
|
29
29
|
properties: Optional[Dict[str, Any]]
|
|
30
|
+
incremental: bool
|
|
31
|
+
refresh_interval: Optional[int]
|
|
32
|
+
export_id: Optional[str]
|
|
30
33
|
|
|
31
34
|
def __init__(
|
|
32
35
|
self,
|
|
@@ -38,6 +41,8 @@ class ExportService(object):
|
|
|
38
41
|
columns: Optional[List[str]] = None,
|
|
39
42
|
partition_by: Optional[List[Dict[str, str]]] = None,
|
|
40
43
|
order_by: Optional[List[Dict[str, Dict[str, str]]]] = None,
|
|
44
|
+
incremental: bool = False,
|
|
45
|
+
refresh_interval: Optional[int] = None,
|
|
41
46
|
properties: Optional[Dict[str, Any]] = None,
|
|
42
47
|
):
|
|
43
48
|
#: Workspace group
|
|
@@ -68,8 +73,30 @@ class ExportService(object):
|
|
|
68
73
|
self.order_by = order_by or None
|
|
69
74
|
self.properties = properties or None
|
|
70
75
|
|
|
76
|
+
self.incremental = incremental
|
|
77
|
+
self.refresh_interval = refresh_interval
|
|
78
|
+
|
|
79
|
+
self.export_id = None
|
|
80
|
+
|
|
71
81
|
self._manager: Optional[WorkspaceManager] = workspace_group._manager
|
|
72
82
|
|
|
83
|
+
@classmethod
|
|
84
|
+
def from_export_id(
|
|
85
|
+
self,
|
|
86
|
+
workspace_group: WorkspaceGroup,
|
|
87
|
+
export_id: str,
|
|
88
|
+
) -> ExportService:
|
|
89
|
+
"""Create export service from export ID."""
|
|
90
|
+
out = ExportService(
|
|
91
|
+
workspace_group=workspace_group,
|
|
92
|
+
database='',
|
|
93
|
+
table='',
|
|
94
|
+
catalog_info={},
|
|
95
|
+
storage_info={},
|
|
96
|
+
)
|
|
97
|
+
out.export_id = export_id
|
|
98
|
+
return out
|
|
99
|
+
|
|
73
100
|
def __str__(self) -> str:
|
|
74
101
|
"""Return string representation."""
|
|
75
102
|
return vars_to_str(self)
|
|
@@ -98,6 +125,11 @@ class ExportService(object):
|
|
|
98
125
|
|
|
99
126
|
def start(self, tags: Optional[List[str]] = None) -> 'ExportStatus':
|
|
100
127
|
"""Start the export process."""
|
|
128
|
+
if not self.table or not self.database:
|
|
129
|
+
raise ManagementError(
|
|
130
|
+
msg='Database and table must be set before starting the export.',
|
|
131
|
+
)
|
|
132
|
+
|
|
101
133
|
if self._manager is None:
|
|
102
134
|
raise ManagementError(
|
|
103
135
|
msg='No workspace manager is associated with this object.',
|
|
@@ -122,11 +154,87 @@ class ExportService(object):
|
|
|
122
154
|
partitionSpec=partition_spec,
|
|
123
155
|
sortOrderSpec=sort_order_spec,
|
|
124
156
|
properties=self.properties,
|
|
157
|
+
incremental=self.incremental or None,
|
|
158
|
+
refreshInterval=self.refresh_interval
|
|
159
|
+
if self.refresh_interval is not None else None,
|
|
125
160
|
).items() if v is not None
|
|
126
161
|
},
|
|
127
162
|
)
|
|
128
163
|
|
|
129
|
-
|
|
164
|
+
self.export_id = str(out.json()['egressID'])
|
|
165
|
+
|
|
166
|
+
return ExportStatus(self.export_id, self.workspace_group)
|
|
167
|
+
|
|
168
|
+
def suspend(self) -> 'ExportStatus':
|
|
169
|
+
"""Suspend the export process."""
|
|
170
|
+
if self._manager is None:
|
|
171
|
+
raise ManagementError(
|
|
172
|
+
msg='No workspace manager is associated with this object.',
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
if self.export_id is None:
|
|
176
|
+
raise ManagementError(
|
|
177
|
+
msg='Export ID is not set. You must start the export first.',
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
self._manager._post(
|
|
181
|
+
f'workspaceGroups/{self.workspace_group.id}/egress/suspendTableEgress',
|
|
182
|
+
json=dict(egressID=self.export_id),
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
return ExportStatus(self.export_id, self.workspace_group)
|
|
186
|
+
|
|
187
|
+
def resume(self) -> 'ExportStatus':
|
|
188
|
+
"""Resume the export process."""
|
|
189
|
+
if self._manager is None:
|
|
190
|
+
raise ManagementError(
|
|
191
|
+
msg='No workspace manager is associated with this object.',
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
if self.export_id is None:
|
|
195
|
+
raise ManagementError(
|
|
196
|
+
msg='Export ID is not set. You must start the export first.',
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
self._manager._post(
|
|
200
|
+
f'workspaceGroups/{self.workspace_group.id}/egress/resumeTableEgress',
|
|
201
|
+
json=dict(egressID=self.export_id),
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
return ExportStatus(self.export_id, self.workspace_group)
|
|
205
|
+
|
|
206
|
+
def drop(self) -> None:
|
|
207
|
+
"""Drop the export process."""
|
|
208
|
+
if self._manager is None:
|
|
209
|
+
raise ManagementError(
|
|
210
|
+
msg='No workspace manager is associated with this object.',
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
if self.export_id is None:
|
|
214
|
+
raise ManagementError(
|
|
215
|
+
msg='Export ID is not set. You must start the export first.',
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
self._manager._post(
|
|
219
|
+
f'workspaceGroups/{self.workspace_group.id}/egress/dropTableEgress',
|
|
220
|
+
json=dict(egressID=self.export_id),
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
return None
|
|
224
|
+
|
|
225
|
+
def status(self) -> ExportStatus:
|
|
226
|
+
"""Get the status of the export process."""
|
|
227
|
+
if self._manager is None:
|
|
228
|
+
raise ManagementError(
|
|
229
|
+
msg='No workspace manager is associated with this object.',
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
if self.export_id is None:
|
|
233
|
+
raise ManagementError(
|
|
234
|
+
msg='Export ID is not set. You must start the export first.',
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
return ExportStatus(self.export_id, self.workspace_group)
|
|
130
238
|
|
|
131
239
|
|
|
132
240
|
class ExportStatus(object):
|
|
@@ -167,3 +275,21 @@ class ExportStatus(object):
|
|
|
167
275
|
|
|
168
276
|
def __repr__(self) -> str:
|
|
169
277
|
return self.status
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
def _get_exports(
|
|
281
|
+
workspace_group: WorkspaceGroup,
|
|
282
|
+
scope: str = 'all',
|
|
283
|
+
) -> List[ExportStatus]:
|
|
284
|
+
"""Get all exports in the workspace group."""
|
|
285
|
+
if workspace_group._manager is None:
|
|
286
|
+
raise ManagementError(
|
|
287
|
+
msg='No workspace manager is associated with this object.',
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
out = workspace_group._manager._get(
|
|
291
|
+
f'workspaceGroups/{workspace_group.id}/egress/tableEgressStatus',
|
|
292
|
+
json=dict(scope=scope),
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
return out.json()
|
|
@@ -450,55 +450,59 @@ class TestStage(unittest.TestCase):
|
|
|
450
450
|
def test_open(self):
|
|
451
451
|
st = self.wg.stage
|
|
452
452
|
|
|
453
|
+
open_test_sql = f'open_test_{id(self)}.sql'
|
|
454
|
+
|
|
453
455
|
# See if error is raised for non-existent file
|
|
454
456
|
with self.assertRaises(s2.ManagementError):
|
|
455
|
-
st.open(
|
|
457
|
+
st.open(open_test_sql, 'r')
|
|
456
458
|
|
|
457
459
|
# Load test file
|
|
458
|
-
st.upload_file(TEST_DIR / 'test.sql',
|
|
460
|
+
st.upload_file(TEST_DIR / 'test.sql', open_test_sql)
|
|
459
461
|
|
|
460
462
|
# Read file using `open`
|
|
461
|
-
with st.open(
|
|
463
|
+
with st.open(open_test_sql, 'r') as rfile:
|
|
462
464
|
assert rfile.read() == open(TEST_DIR / 'test.sql').read()
|
|
463
465
|
|
|
464
466
|
# Read file using `open` with 'rt' mode
|
|
465
|
-
with st.open(
|
|
467
|
+
with st.open(open_test_sql, 'rt') as rfile:
|
|
466
468
|
assert rfile.read() == open(TEST_DIR / 'test.sql').read()
|
|
467
469
|
|
|
468
470
|
# Read file using `open` with 'rb' mode
|
|
469
|
-
with st.open(
|
|
471
|
+
with st.open(open_test_sql, 'rb') as rfile:
|
|
470
472
|
assert rfile.read() == open(TEST_DIR / 'test.sql', 'rb').read()
|
|
471
473
|
|
|
472
474
|
# Read file using `open` with 'rb' mode
|
|
473
475
|
with self.assertRaises(ValueError):
|
|
474
|
-
with st.open(
|
|
476
|
+
with st.open(open_test_sql, 'b') as rfile:
|
|
475
477
|
pass
|
|
476
478
|
|
|
477
479
|
# Attempt overwrite file using `open` with mode 'x'
|
|
478
480
|
with self.assertRaises(OSError):
|
|
479
|
-
with st.open(
|
|
481
|
+
with st.open(open_test_sql, 'x') as wfile:
|
|
480
482
|
pass
|
|
481
483
|
|
|
482
484
|
# Attempt overwrite file using `open` with mode 'w'
|
|
483
|
-
with st.open(
|
|
485
|
+
with st.open(open_test_sql, 'w') as wfile:
|
|
484
486
|
wfile.write(open(TEST_DIR / 'test2.sql').read())
|
|
485
487
|
|
|
486
|
-
txt = st.download_file(
|
|
488
|
+
txt = st.download_file(open_test_sql, encoding='utf-8')
|
|
487
489
|
|
|
488
490
|
assert txt == open(TEST_DIR / 'test2.sql').read()
|
|
489
491
|
|
|
492
|
+
open_raw_test_sql = f'open_raw_test_{id(self)}.sql'
|
|
493
|
+
|
|
490
494
|
# Test writer without context manager
|
|
491
|
-
wfile = st.open(
|
|
495
|
+
wfile = st.open(open_raw_test_sql, 'w')
|
|
492
496
|
for line in open(TEST_DIR / 'test.sql'):
|
|
493
497
|
wfile.write(line)
|
|
494
498
|
wfile.close()
|
|
495
499
|
|
|
496
|
-
txt = st.download_file(
|
|
500
|
+
txt = st.download_file(open_raw_test_sql, encoding='utf-8')
|
|
497
501
|
|
|
498
502
|
assert txt == open(TEST_DIR / 'test.sql').read()
|
|
499
503
|
|
|
500
504
|
# Test reader without context manager
|
|
501
|
-
rfile = st.open(
|
|
505
|
+
rfile = st.open(open_raw_test_sql, 'r')
|
|
502
506
|
txt = ''
|
|
503
507
|
for line in rfile:
|
|
504
508
|
txt += line
|
|
@@ -509,15 +513,18 @@ class TestStage(unittest.TestCase):
|
|
|
509
513
|
def test_obj_open(self):
|
|
510
514
|
st = self.wg.stage
|
|
511
515
|
|
|
516
|
+
obj_open_test_sql = f'obj_open_test_{id(self)}.sql'
|
|
517
|
+
obj_open_dir = f'obj_open_dir_{id(self)}'
|
|
518
|
+
|
|
512
519
|
# Load test file
|
|
513
|
-
f = st.upload_file(TEST_DIR / 'test.sql',
|
|
520
|
+
f = st.upload_file(TEST_DIR / 'test.sql', obj_open_test_sql)
|
|
514
521
|
|
|
515
522
|
# Read file using `open`
|
|
516
523
|
with f.open() as rfile:
|
|
517
524
|
assert rfile.read() == open(TEST_DIR / 'test.sql').read()
|
|
518
525
|
|
|
519
526
|
# Make sure directories error out
|
|
520
|
-
d = st.mkdir(
|
|
527
|
+
d = st.mkdir(obj_open_dir)
|
|
521
528
|
with self.assertRaises(IsADirectoryError):
|
|
522
529
|
d.open()
|
|
523
530
|
|
|
@@ -1143,58 +1150,62 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1143
1150
|
|
|
1144
1151
|
def test_open(self):
|
|
1145
1152
|
for space in [self.personal_space, self.shared_space]:
|
|
1153
|
+
open_test_ipynb = f'open_test_ipynb_{id(self)}.ipynb'
|
|
1154
|
+
|
|
1146
1155
|
# See if error is raised for non-existent file
|
|
1147
1156
|
with self.assertRaises(s2.ManagementError):
|
|
1148
|
-
space.open(
|
|
1157
|
+
space.open(open_test_ipynb, 'r')
|
|
1149
1158
|
|
|
1150
1159
|
# Load test file
|
|
1151
|
-
space.upload_file(TEST_DIR / 'test.ipynb',
|
|
1160
|
+
space.upload_file(TEST_DIR / 'test.ipynb', open_test_ipynb)
|
|
1152
1161
|
|
|
1153
1162
|
# Read file using `open`
|
|
1154
|
-
with space.open(
|
|
1163
|
+
with space.open(open_test_ipynb, 'r') as rfile:
|
|
1155
1164
|
assert rfile.read() == open(TEST_DIR / 'test.ipynb').read()
|
|
1156
1165
|
|
|
1157
1166
|
# Read file using `open` with 'rt' mode
|
|
1158
|
-
with space.open(
|
|
1167
|
+
with space.open(open_test_ipynb, 'rt') as rfile:
|
|
1159
1168
|
assert rfile.read() == open(TEST_DIR / 'test.ipynb').read()
|
|
1160
1169
|
|
|
1161
1170
|
# Read file using `open` with 'rb' mode
|
|
1162
|
-
with space.open(
|
|
1171
|
+
with space.open(open_test_ipynb, 'rb') as rfile:
|
|
1163
1172
|
assert rfile.read() == open(TEST_DIR / 'test.ipynb', 'rb').read()
|
|
1164
1173
|
|
|
1165
1174
|
# Read file using `open` with 'rb' mode
|
|
1166
1175
|
with self.assertRaises(ValueError):
|
|
1167
|
-
with space.open(
|
|
1176
|
+
with space.open(open_test_ipynb, 'b') as rfile:
|
|
1168
1177
|
pass
|
|
1169
1178
|
|
|
1170
1179
|
# Attempt overwrite file using `open` with mode 'x'
|
|
1171
1180
|
with self.assertRaises(OSError):
|
|
1172
|
-
with space.open(
|
|
1181
|
+
with space.open(open_test_ipynb, 'x') as wfile:
|
|
1173
1182
|
pass
|
|
1174
1183
|
|
|
1175
1184
|
# Attempt overwrite file using `open` with mode 'w'
|
|
1176
|
-
with space.open(
|
|
1185
|
+
with space.open(open_test_ipynb, 'w') as wfile:
|
|
1177
1186
|
wfile.write(open(TEST_DIR / 'test2.ipynb').read())
|
|
1178
1187
|
|
|
1179
|
-
txt = space.download_file(
|
|
1188
|
+
txt = space.download_file(open_test_ipynb, encoding='utf-8')
|
|
1180
1189
|
|
|
1181
1190
|
assert txt == open(TEST_DIR / 'test2.ipynb').read()
|
|
1182
1191
|
|
|
1192
|
+
open_raw_test_ipynb = f'open_raw_test_{id(self)}.ipynb'
|
|
1193
|
+
|
|
1183
1194
|
# Test writer without context manager
|
|
1184
|
-
wfile = space.open(
|
|
1195
|
+
wfile = space.open(open_raw_test_ipynb, 'w')
|
|
1185
1196
|
for line in open(TEST_DIR / 'test.ipynb'):
|
|
1186
1197
|
wfile.write(line)
|
|
1187
1198
|
wfile.close()
|
|
1188
1199
|
|
|
1189
1200
|
txt = space.download_file(
|
|
1190
|
-
|
|
1201
|
+
open_raw_test_ipynb,
|
|
1191
1202
|
encoding='utf-8',
|
|
1192
1203
|
)
|
|
1193
1204
|
|
|
1194
1205
|
assert txt == open(TEST_DIR / 'test.ipynb').read()
|
|
1195
1206
|
|
|
1196
1207
|
# Test reader without context manager
|
|
1197
|
-
rfile = space.open(
|
|
1208
|
+
rfile = space.open(open_raw_test_ipynb, 'r')
|
|
1198
1209
|
txt = ''
|
|
1199
1210
|
for line in rfile:
|
|
1200
1211
|
txt += line
|
|
@@ -1203,15 +1214,18 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1203
1214
|
assert txt == open(TEST_DIR / 'test.ipynb').read()
|
|
1204
1215
|
|
|
1205
1216
|
# Cleanup
|
|
1206
|
-
space.remove(
|
|
1207
|
-
space.remove(
|
|
1217
|
+
space.remove(open_test_ipynb)
|
|
1218
|
+
space.remove(open_raw_test_ipynb)
|
|
1208
1219
|
|
|
1209
1220
|
def test_obj_open(self):
|
|
1210
1221
|
for space in [self.personal_space, self.shared_space]:
|
|
1222
|
+
obj_open_test_ipynb = f'obj_open_test_{id(self)}.ipynb'
|
|
1223
|
+
obj_open_dir = f'obj_open_dir_{id(self)}'
|
|
1224
|
+
|
|
1211
1225
|
# Load test file
|
|
1212
1226
|
f = space.upload_file(
|
|
1213
1227
|
TEST_DIR / 'test.ipynb',
|
|
1214
|
-
|
|
1228
|
+
obj_open_test_ipynb,
|
|
1215
1229
|
)
|
|
1216
1230
|
|
|
1217
1231
|
# Read file using `open`
|
|
@@ -1220,7 +1234,7 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1220
1234
|
|
|
1221
1235
|
# Make sure directories error out
|
|
1222
1236
|
with self.assertRaises(s2.ManagementError):
|
|
1223
|
-
space.mkdir(
|
|
1237
|
+
space.mkdir(obj_open_dir)
|
|
1224
1238
|
|
|
1225
1239
|
# Write file using `open`
|
|
1226
1240
|
with f.open('w', encoding='utf-8') as wfile:
|
|
@@ -1248,7 +1262,7 @@ class TestFileSpaces(unittest.TestCase):
|
|
|
1248
1262
|
assert txt == open(TEST_DIR / 'test.ipynb').read()
|
|
1249
1263
|
|
|
1250
1264
|
# Cleanup
|
|
1251
|
-
space.remove(
|
|
1265
|
+
space.remove(obj_open_test_ipynb)
|
|
1252
1266
|
|
|
1253
1267
|
def test_os_directories(self):
|
|
1254
1268
|
for space in [self.personal_space, self.shared_space]:
|
singlestoredb/types.py
CHANGED
|
@@ -173,17 +173,25 @@ class ColumnType(object):
|
|
|
173
173
|
'DECIMAL', 'DEC', 'FIXED', 'NUMERIC', 0, decimal.Decimal,
|
|
174
174
|
)
|
|
175
175
|
DEC = FIXED = NUMERIC = DECIMAL
|
|
176
|
-
TINY = TINYINT = BOOL = BOOLEAN = NumberDBAPIType(
|
|
177
|
-
'TINY', 'TINYINT', 'BOOL', 'BOOLEAN', 1,
|
|
176
|
+
TINY = TINYINT = BOOL = BOOLEAN = UNSIGNED_TINY = UNSIGNED_TINYINT = NumberDBAPIType(
|
|
177
|
+
'TINY', 'TINYINT', 'BOOL', 'BOOLEAN', 'UNSIGNED TINY', 'UNSIGNED TINYINT', 1,
|
|
178
|
+
)
|
|
179
|
+
SHORT = SMALLINT = UNSIGNED_SHORT = UNSIGNED_SMALLINT = NumberDBAPIType(
|
|
180
|
+
'SMALLINT', 'SHORT', 'UNSIGNED SHORT', 'UNSIGNED SMALLINT', 2,
|
|
181
|
+
)
|
|
182
|
+
LONG = INT = UNSIGNED_LONG = UNSIGNED_INT = NumberDBAPIType(
|
|
183
|
+
'LONG', 'INT', 'UNSIGNED LONG', 'UNSIGNED INT', 3,
|
|
178
184
|
)
|
|
179
|
-
SHORT = SMALLINT = NumberDBAPIType('SMALLINT', 'SHORT', 2)
|
|
180
|
-
LONG = INT = NumberDBAPIType('LONG', 'INT', 3)
|
|
181
185
|
FLOAT = NumberDBAPIType('FLOAT', 4)
|
|
182
186
|
DOUBLE = REAL = NumberDBAPIType('DOUBLE', 5, float)
|
|
183
187
|
NULL = DBAPIType('NULL', 6)
|
|
184
188
|
TIMESTAMP = DatetimeDBAPIType('TIMESTAMP', 7)
|
|
185
|
-
LONGLONG = BIGINT =
|
|
186
|
-
|
|
189
|
+
LONGLONG = BIGINT = UNSIGNED_LONGLONG = UNSIGNED_BIGINT = NumberDBAPIType(
|
|
190
|
+
'BIGINT', 'LONGLONG', 'UNSIGNED LONGLONG', 'UNSIGNED BIGINT', 8, int,
|
|
191
|
+
)
|
|
192
|
+
MEDIUMINT = INT24 = UNSIGNED_MEDIUMINT = UNSIGNED_INT24 = NumberDBAPIType(
|
|
193
|
+
'MEDIUMINT', 'INT24', 'UNSIGNED MEDIUMINT', 'UNSIGNED INT24', 9,
|
|
194
|
+
)
|
|
187
195
|
DATE = DBAPIType('DATE', 10, datetime.date)
|
|
188
196
|
TIME = DBAPIType('TIME', 11, datetime.time)
|
|
189
197
|
DATETIME = DatetimeDBAPIType('DATETIME', 12, datetime.datetime)
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
_singlestoredb_accel.pyd,sha256=
|
|
2
|
-
singlestoredb/__init__.py,sha256=
|
|
1
|
+
_singlestoredb_accel.pyd,sha256=44TC8rZGqJ2E373qMsrMCXTy3HvMz65Vh-x52kQtmDY,62464
|
|
2
|
+
singlestoredb/__init__.py,sha256=S78W6cTcUSwZR1BTOayQSnIGbOgnKs4Tmw_VSxYqgco,2162
|
|
3
3
|
singlestoredb/auth.py,sha256=RmYiH0Wlc2RXc4pTlRMysxtBI445ggCIwojWKC_eDLE,7844
|
|
4
4
|
singlestoredb/config.py,sha256=t3aiWi1i3kT5VhEgXca0gwT6591YkZUed-wzvVEBMs0,13424
|
|
5
5
|
singlestoredb/connection.py,sha256=I2AP_0l7hNARfXiSuVW953CsGYn_rKbTg_NyWEiGHbY,47542
|
|
@@ -7,7 +7,7 @@ singlestoredb/converters.py,sha256=6gN3_RzSbw0Aimd5cGgBNPNq1yiHb1a_NK8qC9DmOQ0,2
|
|
|
7
7
|
singlestoredb/exceptions.py,sha256=WCCJrNSsU-hD-621Jpd6bwmvGftQ7byXkk-XKXlaxpg,3354
|
|
8
8
|
singlestoredb/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
9
|
singlestoredb/pytest.py,sha256=TH364xRCN7_QaN0oRQDHixrEcDx_ZBgu3bmY0tvKrYU,9357
|
|
10
|
-
singlestoredb/types.py,sha256=
|
|
10
|
+
singlestoredb/types.py,sha256=g6iJnOSCuRUkuUJOYSdRPt3QTjC9h2Dq4fqFFktXxXg,10770
|
|
11
11
|
singlestoredb/vectorstore.py,sha256=4YvXml3PpOEOtUGO7gylucKG2Rny8Bx6L29kmhsFiCY,8600
|
|
12
12
|
singlestoredb/ai/__init__.py,sha256=5vlx0XpzxalMKySnVF7y40gfuCgaz7COUKqN4KfNKF8,116
|
|
13
13
|
singlestoredb/ai/chat.py,sha256=oDig8C8QdPEHL-JmmpdFvt_Ct7-K_D0pG_UJ00WCZ7Y,828
|
|
@@ -41,7 +41,7 @@ singlestoredb/fusion/handler.py,sha256=ohnU0BIoJ9AHrVLlCHI-3E4Icqoocxqip8T-XyYxB
|
|
|
41
41
|
singlestoredb/fusion/registry.py,sha256=_eT1gd38VPlFKs5f9Pu6lqQyoDQ_ixW5O56QwYLQ89Y,6361
|
|
42
42
|
singlestoredb/fusion/result.py,sha256=KAwhXxXVgfkAWekCFY8-Y03ANKDiTflYRXyEc_1Id0k,12189
|
|
43
43
|
singlestoredb/fusion/handlers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
44
|
-
singlestoredb/fusion/handlers/export.py,sha256=
|
|
44
|
+
singlestoredb/fusion/handlers/export.py,sha256=MqPINMHGl-7SkKzdHcgg343uoFQDXaKSHdoFmefA-KM,15834
|
|
45
45
|
singlestoredb/fusion/handlers/files.py,sha256=pCx1sqnjPtQrp39rv_V4RX9CVtj6uSiL6HPUyiABYpI,19681
|
|
46
46
|
singlestoredb/fusion/handlers/job.py,sha256=3enfxHwERH7T4u0FEwOPN0IL0GtepaCYgEsisiy3Df4,21753
|
|
47
47
|
singlestoredb/fusion/handlers/models.py,sha256=XWaPJQc3GQIOAcjNcxBSGUBJ3xu2qkzQ4ILa40TFQmY,6486
|
|
@@ -49,14 +49,14 @@ singlestoredb/fusion/handlers/stage.py,sha256=PP-SSP204lwpmnycSXXSmFPzoN535JVuwg
|
|
|
49
49
|
singlestoredb/fusion/handlers/utils.py,sha256=nV2lSzKhv7CzM7I_uIh5kmDV0Ec6VeeKoHczx5pVNcw,11009
|
|
50
50
|
singlestoredb/fusion/handlers/workspace.py,sha256=NxoEY5xd5lCQmXiim4nhAYCL0agHo1H_rGPpqa31hiw,28397
|
|
51
51
|
singlestoredb/http/__init__.py,sha256=4cEDvLloGc3LSpU-PnIwacyu0n5oIIIE6xk2SPyWD_w,939
|
|
52
|
-
singlestoredb/http/connection.py,sha256=
|
|
52
|
+
singlestoredb/http/connection.py,sha256=X-zRf7BfsaKRg8GXcaa5Ic42b9uqEfqqxiI47ZijpDE,41221
|
|
53
53
|
singlestoredb/magics/__init__.py,sha256=fqCBQ0s8o1CYE4Xo_XiSbkLDzLgMNDgpSkOx66-uDZw,1244
|
|
54
54
|
singlestoredb/magics/run_personal.py,sha256=M11xHi9lWquh_pLSpFI89LGE7PhOPQOGqlSPDl48itE,1900
|
|
55
55
|
singlestoredb/magics/run_shared.py,sha256=rnKpW4d8CJvD6ehK8jG8FlxuqZvjZl4KocPTsk-23O8,1805
|
|
56
56
|
singlestoredb/management/__init__.py,sha256=A66ZnFyX--PsAZ2tvtYUfIUBvVGDBFQsnVc6nGTlX60,277
|
|
57
57
|
singlestoredb/management/billing_usage.py,sha256=0UHFSPCrN0nyeGFFM-HXS3NP8pYmYo2BCCahDEPXvzg,3883
|
|
58
58
|
singlestoredb/management/cluster.py,sha256=auBzNYIXvnI6rq3DNpPgJhwWoT6JsyZRikjpON23Pxg,14867
|
|
59
|
-
singlestoredb/management/export.py,sha256=
|
|
59
|
+
singlestoredb/management/export.py,sha256=T2uU2EaeFYitvK_7475U_wYJS-rnGLE1RsTsP6XbsQs,9376
|
|
60
60
|
singlestoredb/management/files.py,sha256=Z9GpS2EHf9atE8kJdz1vJtsiT80O6TV00MPhqyXfAAw,31579
|
|
61
61
|
singlestoredb/management/inference_api.py,sha256=9d9-7edoZ6JI3SPvStcVDOSHOY6l38V1MFpyskdLAZY,2684
|
|
62
62
|
singlestoredb/management/job.py,sha256=Npfe1JLYJlggGBrXLniPKwKUKF1i3alvSY1SFtvauSs,25498
|
|
@@ -128,7 +128,7 @@ singlestoredb/tests/test_ext_func.py,sha256=LhuPz8o3UF7x2LNod5oZ1tlxeLvGDEUE5Fnz
|
|
|
128
128
|
singlestoredb/tests/test_ext_func_data.py,sha256=9kn8BWmCjkbnP6hSbFhmhcdW4OmVT-GSvBTIzFBLEys,48796
|
|
129
129
|
singlestoredb/tests/test_fusion.py,sha256=S0Jk2NrcOitqM98r5fosHGbZ1sCZ2uxar5t48v-uOD0,52045
|
|
130
130
|
singlestoredb/tests/test_http.py,sha256=7hwXe61hlUes3nji0MTTZweo94tJAlJ-vA5ct9geXFQ,8868
|
|
131
|
-
singlestoredb/tests/test_management.py,sha256=
|
|
131
|
+
singlestoredb/tests/test_management.py,sha256=JGmfI408wp6lCzeteK7qSuiFOMfAgqkpE0jy-DP7uL0,46935
|
|
132
132
|
singlestoredb/tests/test_plugin.py,sha256=P1nXLnTafaHkHN-6bVbGryxTu7OWJPU9SYFZ_WQUwq8,845
|
|
133
133
|
singlestoredb/tests/test_results.py,sha256=Zg1ynZFRZqalAMfNLOU5C6BDXaox6JxrKm_XZwVNFcg,6753
|
|
134
134
|
singlestoredb/tests/test_types.py,sha256=YeVE6KPqlqzJke-4hbRmc8ko1E7RLHu5S8qLg04Bl5Y,4632
|
|
@@ -149,9 +149,9 @@ singlestoredb/utils/results.py,sha256=wR70LhCqlobniZf52r67zYLBOKjWHQm68NAskdRQND
|
|
|
149
149
|
singlestoredb/utils/xdict.py,sha256=-wi1lSPTnY99fhVMBhPKJ8cCsQhNG4GMUfkEBDKYgCw,13321
|
|
150
150
|
sqlx/__init__.py,sha256=4Sdn8HN-Hf8v0_wCt60DCckCg8BvgM3-9r4YVfZycRE,89
|
|
151
151
|
sqlx/magic.py,sha256=6VBlotgjautjev599tHaTYOfcfOA9m6gV_-P1_Qc4lI,3622
|
|
152
|
-
singlestoredb-1.14.
|
|
153
|
-
singlestoredb-1.14.
|
|
154
|
-
singlestoredb-1.14.
|
|
155
|
-
singlestoredb-1.14.
|
|
156
|
-
singlestoredb-1.14.
|
|
157
|
-
singlestoredb-1.14.
|
|
152
|
+
singlestoredb-1.14.1.dist-info/LICENSE,sha256=Bojenzui8aPNjlF3w4ojguDP7sTf8vFV_9Gc2UAG1sg,11542
|
|
153
|
+
singlestoredb-1.14.1.dist-info/METADATA,sha256=7RYWH5ZbFrph0qvRKBQo-ZnhQhL9xuVb2rGrdezCQGU,5895
|
|
154
|
+
singlestoredb-1.14.1.dist-info/WHEEL,sha256=c4k7z5HB0t-y0nBCv6KyJ6KCjn8SEGPddD0lhaPtU3E,96
|
|
155
|
+
singlestoredb-1.14.1.dist-info/entry_points.txt,sha256=bSLaTWB5zGjpVYPAaI46MkkDup0su-eb3uAhCNYuRV0,48
|
|
156
|
+
singlestoredb-1.14.1.dist-info/top_level.txt,sha256=lA65Vf4qAMfg_s1oG3LEO90h4t1Z-SPDbRqkevI3bSY,40
|
|
157
|
+
singlestoredb-1.14.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|