singlestoredb 1.10.0__py3-none-any.whl → 1.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of singlestoredb might be problematic. Click here for more details.
- singlestoredb/__init__.py +1 -1
- singlestoredb/config.py +6 -0
- singlestoredb/connection.py +7 -0
- singlestoredb/converters.py +5 -5
- singlestoredb/functions/__init__.py +1 -0
- singlestoredb/functions/decorator.py +258 -69
- singlestoredb/functions/ext/asgi.py +121 -27
- singlestoredb/functions/signature.py +100 -9
- singlestoredb/fusion/handlers/export.py +58 -2
- singlestoredb/fusion/handlers/files.py +6 -6
- singlestoredb/fusion/handlers/models.py +250 -0
- singlestoredb/fusion/handlers/utils.py +5 -5
- singlestoredb/fusion/result.py +1 -1
- singlestoredb/http/connection.py +4 -0
- singlestoredb/management/export.py +30 -7
- singlestoredb/management/files.py +89 -26
- singlestoredb/mysql/connection.py +25 -19
- singlestoredb/server/__init__.py +0 -0
- singlestoredb/server/docker.py +455 -0
- singlestoredb/server/free_tier.py +267 -0
- singlestoredb/tests/test_udf.py +84 -32
- singlestoredb/utils/events.py +16 -0
- {singlestoredb-1.10.0.dist-info → singlestoredb-1.12.0.dist-info}/METADATA +3 -1
- {singlestoredb-1.10.0.dist-info → singlestoredb-1.12.0.dist-info}/RECORD +28 -24
- {singlestoredb-1.10.0.dist-info → singlestoredb-1.12.0.dist-info}/LICENSE +0 -0
- {singlestoredb-1.10.0.dist-info → singlestoredb-1.12.0.dist-info}/WHEEL +0 -0
- {singlestoredb-1.10.0.dist-info → singlestoredb-1.12.0.dist-info}/entry_points.txt +0 -0
- {singlestoredb-1.10.0.dist-info → singlestoredb-1.12.0.dist-info}/top_level.txt +0 -0
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
|
+
import dataclasses
|
|
2
3
|
import datetime
|
|
3
4
|
import inspect
|
|
4
5
|
import numbers
|
|
@@ -22,6 +23,12 @@ try:
|
|
|
22
23
|
except ImportError:
|
|
23
24
|
has_numpy = False
|
|
24
25
|
|
|
26
|
+
try:
|
|
27
|
+
import pydantic
|
|
28
|
+
has_pydantic = True
|
|
29
|
+
except ImportError:
|
|
30
|
+
has_pydantic = False
|
|
31
|
+
|
|
25
32
|
from . import dtypes as dt
|
|
26
33
|
from ..mysql.converters import escape_item # type: ignore
|
|
27
34
|
|
|
@@ -243,6 +250,9 @@ def classify_dtype(dtype: Any) -> str:
|
|
|
243
250
|
if isinstance(dtype, list):
|
|
244
251
|
return '|'.join(classify_dtype(x) for x in dtype)
|
|
245
252
|
|
|
253
|
+
if isinstance(dtype, str):
|
|
254
|
+
return sql_to_dtype(dtype)
|
|
255
|
+
|
|
246
256
|
# Specific types
|
|
247
257
|
if dtype is None or dtype is type(None): # noqa: E721
|
|
248
258
|
return 'null'
|
|
@@ -253,6 +263,21 @@ def classify_dtype(dtype: Any) -> str:
|
|
|
253
263
|
if dtype is bool:
|
|
254
264
|
return 'bool'
|
|
255
265
|
|
|
266
|
+
if dataclasses.is_dataclass(dtype):
|
|
267
|
+
fields = dataclasses.fields(dtype)
|
|
268
|
+
item_dtypes = ','.join(
|
|
269
|
+
f'{classify_dtype(simplify_dtype(x.type))}' for x in fields
|
|
270
|
+
)
|
|
271
|
+
return f'tuple[{item_dtypes}]'
|
|
272
|
+
|
|
273
|
+
if has_pydantic and inspect.isclass(dtype) and issubclass(dtype, pydantic.BaseModel):
|
|
274
|
+
fields = dtype.model_fields.values()
|
|
275
|
+
item_dtypes = ','.join(
|
|
276
|
+
f'{classify_dtype(simplify_dtype(x.annotation))}' # type: ignore
|
|
277
|
+
for x in fields
|
|
278
|
+
)
|
|
279
|
+
return f'tuple[{item_dtypes}]'
|
|
280
|
+
|
|
256
281
|
if not inspect.isclass(dtype):
|
|
257
282
|
# Check for compound types
|
|
258
283
|
origin = typing.get_origin(dtype)
|
|
@@ -261,7 +286,7 @@ def classify_dtype(dtype: Any) -> str:
|
|
|
261
286
|
if origin is Tuple:
|
|
262
287
|
args = typing.get_args(dtype)
|
|
263
288
|
item_dtypes = ','.join(classify_dtype(x) for x in args)
|
|
264
|
-
return f'tuple
|
|
289
|
+
return f'tuple[{item_dtypes}]'
|
|
265
290
|
|
|
266
291
|
# Array types
|
|
267
292
|
elif issubclass(origin, array_types):
|
|
@@ -312,7 +337,10 @@ def classify_dtype(dtype: Any) -> str:
|
|
|
312
337
|
if is_int:
|
|
313
338
|
return int_type_map.get(name, 'int64')
|
|
314
339
|
|
|
315
|
-
raise TypeError(
|
|
340
|
+
raise TypeError(
|
|
341
|
+
f'unsupported type annotation: {dtype}; '
|
|
342
|
+
'use `args`/`returns` on the @udf/@tvf decotator to specify the data type',
|
|
343
|
+
)
|
|
316
344
|
|
|
317
345
|
|
|
318
346
|
def collapse_dtypes(dtypes: Union[str, List[str]]) -> str:
|
|
@@ -428,6 +456,7 @@ def get_signature(func: Callable[..., Any], name: Optional[str] = None) -> Dict[
|
|
|
428
456
|
args: List[Dict[str, Any]] = []
|
|
429
457
|
attrs = getattr(func, '_singlestoredb_attrs', {})
|
|
430
458
|
name = attrs.get('name', name if name else func.__name__)
|
|
459
|
+
function_type = attrs.get('function_type', 'udf')
|
|
431
460
|
out: Dict[str, Any] = dict(name=name, args=args)
|
|
432
461
|
|
|
433
462
|
arg_names = [x for x in signature.parameters]
|
|
@@ -448,6 +477,7 @@ def get_signature(func: Callable[..., Any], name: Optional[str] = None) -> Dict[
|
|
|
448
477
|
|
|
449
478
|
args_overrides = attrs.get('args', None)
|
|
450
479
|
returns_overrides = attrs.get('returns', None)
|
|
480
|
+
output_fields = attrs.get('output_fields', None)
|
|
451
481
|
|
|
452
482
|
spec_diff = set(arg_names).difference(set(annotations.keys()))
|
|
453
483
|
|
|
@@ -488,7 +518,7 @@ def get_signature(func: Callable[..., Any], name: Optional[str] = None) -> Dict[
|
|
|
488
518
|
arg_type = collapse_dtypes([
|
|
489
519
|
classify_dtype(x) for x in simplify_dtype(annotations[arg])
|
|
490
520
|
])
|
|
491
|
-
sql = dtype_to_sql(arg_type)
|
|
521
|
+
sql = dtype_to_sql(arg_type, function_type=function_type)
|
|
492
522
|
args.append(dict(name=arg, dtype=arg_type, sql=sql, default=defaults[i]))
|
|
493
523
|
|
|
494
524
|
if returns_overrides is None \
|
|
@@ -498,13 +528,56 @@ def get_signature(func: Callable[..., Any], name: Optional[str] = None) -> Dict[
|
|
|
498
528
|
if isinstance(returns_overrides, str):
|
|
499
529
|
sql = returns_overrides
|
|
500
530
|
out_type = sql_to_dtype(sql)
|
|
531
|
+
elif isinstance(returns_overrides, list):
|
|
532
|
+
if not output_fields:
|
|
533
|
+
output_fields = [
|
|
534
|
+
string.ascii_letters[i] for i in range(len(returns_overrides))
|
|
535
|
+
]
|
|
536
|
+
out_type = 'tuple[' + collapse_dtypes([
|
|
537
|
+
classify_dtype(x)
|
|
538
|
+
for x in simplify_dtype(returns_overrides)
|
|
539
|
+
]).replace('|', ',') + ']'
|
|
540
|
+
sql = dtype_to_sql(
|
|
541
|
+
out_type, function_type=function_type, field_names=output_fields,
|
|
542
|
+
)
|
|
543
|
+
elif dataclasses.is_dataclass(returns_overrides):
|
|
544
|
+
out_type = collapse_dtypes([
|
|
545
|
+
classify_dtype(x)
|
|
546
|
+
for x in simplify_dtype([x.type for x in returns_overrides.fields])
|
|
547
|
+
])
|
|
548
|
+
sql = dtype_to_sql(
|
|
549
|
+
out_type,
|
|
550
|
+
function_type=function_type,
|
|
551
|
+
field_names=[x.name for x in returns_overrides.fields],
|
|
552
|
+
)
|
|
553
|
+
elif has_pydantic and inspect.isclass(returns_overrides) \
|
|
554
|
+
and issubclass(returns_overrides, pydantic.BaseModel):
|
|
555
|
+
out_type = collapse_dtypes([
|
|
556
|
+
classify_dtype(x)
|
|
557
|
+
for x in simplify_dtype([x for x in returns_overrides.model_fields.values()])
|
|
558
|
+
])
|
|
559
|
+
sql = dtype_to_sql(
|
|
560
|
+
out_type,
|
|
561
|
+
function_type=function_type,
|
|
562
|
+
field_names=[x for x in returns_overrides.model_fields.keys()],
|
|
563
|
+
)
|
|
501
564
|
elif returns_overrides is not None and not isinstance(returns_overrides, str):
|
|
502
565
|
raise TypeError(f'unrecognized type for return value: {returns_overrides}')
|
|
503
566
|
else:
|
|
567
|
+
if not output_fields:
|
|
568
|
+
if dataclasses.is_dataclass(signature.return_annotation):
|
|
569
|
+
output_fields = [
|
|
570
|
+
x.name for x in dataclasses.fields(signature.return_annotation)
|
|
571
|
+
]
|
|
572
|
+
elif has_pydantic and inspect.isclass(signature.return_annotation) \
|
|
573
|
+
and issubclass(signature.return_annotation, pydantic.BaseModel):
|
|
574
|
+
output_fields = list(signature.return_annotation.model_fields.keys())
|
|
504
575
|
out_type = collapse_dtypes([
|
|
505
576
|
classify_dtype(x) for x in simplify_dtype(signature.return_annotation)
|
|
506
577
|
])
|
|
507
|
-
sql = dtype_to_sql(
|
|
578
|
+
sql = dtype_to_sql(
|
|
579
|
+
out_type, function_type=function_type, field_names=output_fields,
|
|
580
|
+
)
|
|
508
581
|
out['returns'] = dict(dtype=out_type, sql=sql, default=None)
|
|
509
582
|
|
|
510
583
|
copied_keys = ['database', 'environment', 'packages', 'resources', 'replace']
|
|
@@ -559,7 +632,12 @@ def sql_to_dtype(sql: str) -> str:
|
|
|
559
632
|
return dtype
|
|
560
633
|
|
|
561
634
|
|
|
562
|
-
def dtype_to_sql(
|
|
635
|
+
def dtype_to_sql(
|
|
636
|
+
dtype: str,
|
|
637
|
+
default: Any = None,
|
|
638
|
+
field_names: Optional[List[str]] = None,
|
|
639
|
+
function_type: str = 'udf',
|
|
640
|
+
) -> str:
|
|
563
641
|
"""
|
|
564
642
|
Convert a collapsed dtype string to a SQL type.
|
|
565
643
|
|
|
@@ -569,6 +647,8 @@ def dtype_to_sql(dtype: str, default: Any = None) -> str:
|
|
|
569
647
|
Simplified data type string
|
|
570
648
|
default : Any, optional
|
|
571
649
|
Default value
|
|
650
|
+
field_names : List[str], optional
|
|
651
|
+
Field names for tuple types
|
|
572
652
|
|
|
573
653
|
Returns
|
|
574
654
|
-------
|
|
@@ -592,7 +672,7 @@ def dtype_to_sql(dtype: str, default: Any = None) -> str:
|
|
|
592
672
|
if dtype.startswith('array['):
|
|
593
673
|
_, dtypes = dtype.split('[', 1)
|
|
594
674
|
dtypes = dtypes[:-1]
|
|
595
|
-
item_dtype = dtype_to_sql(dtypes)
|
|
675
|
+
item_dtype = dtype_to_sql(dtypes, function_type=function_type)
|
|
596
676
|
return f'ARRAY({item_dtype}){nullable}{default_clause}'
|
|
597
677
|
|
|
598
678
|
if dtype.startswith('tuple['):
|
|
@@ -600,11 +680,22 @@ def dtype_to_sql(dtype: str, default: Any = None) -> str:
|
|
|
600
680
|
dtypes = dtypes[:-1]
|
|
601
681
|
item_dtypes = []
|
|
602
682
|
for i, item in enumerate(dtypes.split(',')):
|
|
603
|
-
|
|
683
|
+
if field_names:
|
|
684
|
+
name = field_names[i]
|
|
685
|
+
else:
|
|
686
|
+
name = string.ascii_letters[i]
|
|
604
687
|
if '=' in item:
|
|
605
688
|
name, item = item.split('=', 1)
|
|
606
|
-
item_dtypes.append(
|
|
607
|
-
|
|
689
|
+
item_dtypes.append(
|
|
690
|
+
f'`{name}` ' + dtype_to_sql(item, function_type=function_type),
|
|
691
|
+
)
|
|
692
|
+
if function_type == 'udf':
|
|
693
|
+
return f'RECORD({", ".join(item_dtypes)}){nullable}{default_clause}'
|
|
694
|
+
else:
|
|
695
|
+
return re.sub(
|
|
696
|
+
r' NOT NULL\s*$', r'',
|
|
697
|
+
f'TABLE({", ".join(item_dtypes)}){nullable}{default_clause}',
|
|
698
|
+
)
|
|
608
699
|
|
|
609
700
|
return f'{sql_type_map[dtype]}{nullable}{default_clause}'
|
|
610
701
|
|
|
@@ -43,8 +43,6 @@ class CreateClusterIdentity(SQLHandler):
|
|
|
43
43
|
|
|
44
44
|
Remarks
|
|
45
45
|
-------
|
|
46
|
-
* ``FROM <table>`` specifies the SingleStore table to export. The same name will
|
|
47
|
-
be used for the exported table.
|
|
48
46
|
* ``CATALOG`` specifies the details of the catalog to connect to.
|
|
49
47
|
* ``LINK`` specifies the details of the data storage to connect to.
|
|
50
48
|
|
|
@@ -69,6 +67,8 @@ class CreateClusterIdentity(SQLHandler):
|
|
|
69
67
|
|
|
70
68
|
"""
|
|
71
69
|
|
|
70
|
+
_enabled = False
|
|
71
|
+
|
|
72
72
|
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
73
73
|
# Catalog
|
|
74
74
|
catalog_config = json.loads(params['catalog'].get('catalog_config', '{}') or '{}')
|
|
@@ -110,11 +110,34 @@ class CreateExport(SQLHandler):
|
|
|
110
110
|
from_table
|
|
111
111
|
catalog
|
|
112
112
|
storage
|
|
113
|
+
[ partition_by ]
|
|
114
|
+
[ order_by ]
|
|
115
|
+
[ properties ]
|
|
113
116
|
;
|
|
114
117
|
|
|
115
118
|
# From table
|
|
116
119
|
from_table = FROM <table>
|
|
117
120
|
|
|
121
|
+
# Transforms
|
|
122
|
+
_col_transform = { VOID | IDENTITY | YEAR | MONTH | DAY | HOUR } ( _transform_col )
|
|
123
|
+
_transform_col = <column>
|
|
124
|
+
_arg_transform = { BUCKET | TRUNCATE } ( _transform_col <comma> _transform_arg )
|
|
125
|
+
_transform_arg = <integer>
|
|
126
|
+
transform = { _col_transform | _arg_transform }
|
|
127
|
+
|
|
128
|
+
# Partitions
|
|
129
|
+
partition_by = PARTITION BY partition_key,...
|
|
130
|
+
partition_key = transform
|
|
131
|
+
|
|
132
|
+
# Sort order
|
|
133
|
+
order_by = ORDER BY sort_key,...
|
|
134
|
+
sort_key = transform [ direction ] [ null_order ]
|
|
135
|
+
direction = { ASC | DESC | ASCENDING | DESCENDING }
|
|
136
|
+
null_order = { NULLS_FIRST | NULLS_LAST }
|
|
137
|
+
|
|
138
|
+
# Properties
|
|
139
|
+
properties = PROPERTIES '<json>'
|
|
140
|
+
|
|
118
141
|
# Catolog
|
|
119
142
|
catalog = CATALOG [ _catalog_config ] [ _catalog_creds ]
|
|
120
143
|
_catalog_config = CONFIG '<catalog-config>'
|
|
@@ -163,6 +186,8 @@ class CreateExport(SQLHandler):
|
|
|
163
186
|
|
|
164
187
|
""" # noqa
|
|
165
188
|
|
|
189
|
+
_enabled = False
|
|
190
|
+
|
|
166
191
|
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
167
192
|
# From table
|
|
168
193
|
if isinstance(params['from_table'], str):
|
|
@@ -189,6 +214,32 @@ class CreateExport(SQLHandler):
|
|
|
189
214
|
if wsg._manager is None:
|
|
190
215
|
raise TypeError('no workspace manager is associated with workspace group')
|
|
191
216
|
|
|
217
|
+
partition_by = []
|
|
218
|
+
if params['partition_by']:
|
|
219
|
+
for key in params['partition_by']:
|
|
220
|
+
transform = key['partition_key']['transform']['col_transform']
|
|
221
|
+
part = {}
|
|
222
|
+
part['transform'] = transform[0].lower()
|
|
223
|
+
part['name'] = transform[-1]['transform_col']
|
|
224
|
+
partition_by.append(part)
|
|
225
|
+
|
|
226
|
+
order_by = []
|
|
227
|
+
if params['order_by'] and params['order_by']['by']:
|
|
228
|
+
for key in params['order_by']['by']:
|
|
229
|
+
transform = key['transform']['col_transform']
|
|
230
|
+
order = {}
|
|
231
|
+
order['transform'] = transform[0].lower()
|
|
232
|
+
order['name'] = transform[-1]['transform_col']
|
|
233
|
+
order['direction'] = 'ascending'
|
|
234
|
+
order['null_order'] = 'nulls_first'
|
|
235
|
+
if key.get('direction'):
|
|
236
|
+
if 'desc' in key['direction'].lower():
|
|
237
|
+
order['direction'] = 'descending'
|
|
238
|
+
if key.get('null_order'):
|
|
239
|
+
if 'last' in key['null_order'].lower():
|
|
240
|
+
order['null_order'] = 'nulls_last'
|
|
241
|
+
order_by.append(order)
|
|
242
|
+
|
|
192
243
|
out = ExportService(
|
|
193
244
|
wsg,
|
|
194
245
|
from_database,
|
|
@@ -196,6 +247,9 @@ class CreateExport(SQLHandler):
|
|
|
196
247
|
dict(**catalog_config, **catalog_creds),
|
|
197
248
|
dict(**storage_config, **storage_creds),
|
|
198
249
|
columns=None,
|
|
250
|
+
partition_by=partition_by or None,
|
|
251
|
+
order_by=order_by or None,
|
|
252
|
+
properties=json.loads(params['properties']) if params['properties'] else None,
|
|
199
253
|
).start()
|
|
200
254
|
|
|
201
255
|
res = FusionSQLResult()
|
|
@@ -217,6 +271,8 @@ class ShowExport(SQLHandler):
|
|
|
217
271
|
|
|
218
272
|
"""
|
|
219
273
|
|
|
274
|
+
_enabled = False
|
|
275
|
+
|
|
220
276
|
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
221
277
|
wsg = get_workspace_group({})
|
|
222
278
|
out = ExportStatus(params['export_id'], wsg)
|
|
@@ -209,7 +209,7 @@ class UploadPersonalFileHandler(UploadFileHandler):
|
|
|
209
209
|
FROM local_path [ overwrite ];
|
|
210
210
|
|
|
211
211
|
# Path to file
|
|
212
|
-
path = '<
|
|
212
|
+
path = '<filename>'
|
|
213
213
|
|
|
214
214
|
# Path to local file
|
|
215
215
|
local_path = '<local-path>'
|
|
@@ -223,7 +223,7 @@ class UploadPersonalFileHandler(UploadFileHandler):
|
|
|
223
223
|
|
|
224
224
|
Arguments
|
|
225
225
|
---------
|
|
226
|
-
* ``<
|
|
226
|
+
* ``<filename>``: The filename in the personal/shared space where the file is uploaded.
|
|
227
227
|
* ``<local-path>``: The path to the file to upload in the local
|
|
228
228
|
directory.
|
|
229
229
|
|
|
@@ -237,7 +237,7 @@ class UploadPersonalFileHandler(UploadFileHandler):
|
|
|
237
237
|
The following command uploads a file to a personal/shared space and overwrite any
|
|
238
238
|
existing files at the specified path::
|
|
239
239
|
|
|
240
|
-
UPLOAD PERSONAL FILE TO '
|
|
240
|
+
UPLOAD PERSONAL FILE TO 'stats.csv'
|
|
241
241
|
FROM '/tmp/user/stats.csv' OVERWRITE;
|
|
242
242
|
|
|
243
243
|
See Also
|
|
@@ -259,7 +259,7 @@ class UploadSharedFileHandler(UploadFileHandler):
|
|
|
259
259
|
FROM local_path [ overwrite ];
|
|
260
260
|
|
|
261
261
|
# Path to file
|
|
262
|
-
path = '<
|
|
262
|
+
path = '<filename>'
|
|
263
263
|
|
|
264
264
|
# Path to local file
|
|
265
265
|
local_path = '<local-path>'
|
|
@@ -273,7 +273,7 @@ class UploadSharedFileHandler(UploadFileHandler):
|
|
|
273
273
|
|
|
274
274
|
Arguments
|
|
275
275
|
---------
|
|
276
|
-
* ``<
|
|
276
|
+
* ``<filename>``: The filename in the personal/shared space where the file is uploaded.
|
|
277
277
|
* ``<local-path>``: The path to the file to upload in the local
|
|
278
278
|
directory.
|
|
279
279
|
|
|
@@ -287,7 +287,7 @@ class UploadSharedFileHandler(UploadFileHandler):
|
|
|
287
287
|
The following command uploads a file to a personal/shared space and overwrite any
|
|
288
288
|
existing files at the specified path::
|
|
289
289
|
|
|
290
|
-
UPLOAD SHARED FILE TO '
|
|
290
|
+
UPLOAD SHARED FILE TO 'stats.csv'
|
|
291
291
|
FROM '/tmp/user/stats.csv' OVERWRITE;
|
|
292
292
|
|
|
293
293
|
See Also
|
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
import os
|
|
3
|
+
from typing import Any
|
|
4
|
+
from typing import Dict
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
from ..handler import SQLHandler
|
|
8
|
+
from ..result import FusionSQLResult
|
|
9
|
+
from .files import ShowFilesHandler
|
|
10
|
+
from .utils import get_file_space
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ShowModelsHandler(ShowFilesHandler):
|
|
14
|
+
"""
|
|
15
|
+
SHOW MODELS
|
|
16
|
+
[ at_path ] [ <like> ]
|
|
17
|
+
[ <order-by> ]
|
|
18
|
+
[ <limit> ] [ recursive ] [ extended ];
|
|
19
|
+
|
|
20
|
+
# File path to list
|
|
21
|
+
at_path = AT '<path>'
|
|
22
|
+
|
|
23
|
+
# Should the listing be recursive?
|
|
24
|
+
recursive = RECURSIVE
|
|
25
|
+
|
|
26
|
+
# Should extended attributes be shown?
|
|
27
|
+
extended = EXTENDED
|
|
28
|
+
|
|
29
|
+
Description
|
|
30
|
+
-----------
|
|
31
|
+
Displays the list of models in models space.
|
|
32
|
+
|
|
33
|
+
Arguments
|
|
34
|
+
---------
|
|
35
|
+
* ``<path>``: A path in the models space.
|
|
36
|
+
* ``<pattern>``: A pattern similar to SQL LIKE clause.
|
|
37
|
+
Uses ``%`` as the wildcard character.
|
|
38
|
+
|
|
39
|
+
Remarks
|
|
40
|
+
-------
|
|
41
|
+
* Use the ``LIKE`` clause to specify a pattern and return only the
|
|
42
|
+
files that match the specified pattern.
|
|
43
|
+
* The ``LIMIT`` clause limits the number of results to the
|
|
44
|
+
specified number.
|
|
45
|
+
* Use the ``ORDER BY`` clause to sort the results by the specified
|
|
46
|
+
key. By default, the results are sorted in the ascending order.
|
|
47
|
+
* The ``AT PATH`` clause specifies the path in the models
|
|
48
|
+
space to list the files from.
|
|
49
|
+
* To return more information about the files, use the ``EXTENDED``
|
|
50
|
+
clause.
|
|
51
|
+
|
|
52
|
+
Examples
|
|
53
|
+
--------
|
|
54
|
+
The following command lists the models::
|
|
55
|
+
|
|
56
|
+
SHOW MODELS;
|
|
57
|
+
|
|
58
|
+
The following command lists the models with additional information::
|
|
59
|
+
|
|
60
|
+
SHOW MODELS EXTENDED;
|
|
61
|
+
|
|
62
|
+
See Also
|
|
63
|
+
--------
|
|
64
|
+
* ``UPLOAD MODEL model_name FROM path``
|
|
65
|
+
* ``DOWNLOAD MODEL model_name``
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
""" # noqa: E501
|
|
69
|
+
|
|
70
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
71
|
+
params['file_location'] = 'MODELS'
|
|
72
|
+
|
|
73
|
+
return super().run(params)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
ShowModelsHandler.register(overwrite=True)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class UploadModelHandler(SQLHandler):
|
|
80
|
+
"""
|
|
81
|
+
UPLOAD MODEL model_name
|
|
82
|
+
FROM local_path [ overwrite ];
|
|
83
|
+
|
|
84
|
+
# Model Name
|
|
85
|
+
model_name = '<model-name>'
|
|
86
|
+
|
|
87
|
+
# Path to local file or directory
|
|
88
|
+
local_path = '<local-path>'
|
|
89
|
+
|
|
90
|
+
# Should an existing file be overwritten?
|
|
91
|
+
overwrite = OVERWRITE
|
|
92
|
+
|
|
93
|
+
Description
|
|
94
|
+
-----------
|
|
95
|
+
Uploads a file or folder to models space.
|
|
96
|
+
|
|
97
|
+
Arguments
|
|
98
|
+
---------
|
|
99
|
+
* ``<model-name>``: Model name.
|
|
100
|
+
* ``<local-path>``: The path to the file or folder to upload in the local
|
|
101
|
+
directory.
|
|
102
|
+
|
|
103
|
+
Remarks
|
|
104
|
+
-------
|
|
105
|
+
* If the ``OVERWRITE`` clause is specified, any existing file at the
|
|
106
|
+
specified path in the models space is overwritten.
|
|
107
|
+
|
|
108
|
+
Examples
|
|
109
|
+
--------
|
|
110
|
+
The following command uploads a file to models space and overwrite any
|
|
111
|
+
existing files at the specified path::
|
|
112
|
+
|
|
113
|
+
UPLOAD MODEL model_name
|
|
114
|
+
FROM 'llama3/' OVERWRITE;
|
|
115
|
+
|
|
116
|
+
See Also
|
|
117
|
+
--------
|
|
118
|
+
* ``DOWNLOAD MODEL model_name``
|
|
119
|
+
|
|
120
|
+
""" # noqa: E501
|
|
121
|
+
|
|
122
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
123
|
+
params['file_location'] = 'MODELS'
|
|
124
|
+
|
|
125
|
+
model_name = params['model_name']
|
|
126
|
+
local_path = params['local_path']
|
|
127
|
+
|
|
128
|
+
file_space = get_file_space(params)
|
|
129
|
+
|
|
130
|
+
if os.path.isdir(local_path):
|
|
131
|
+
file_space.upload_folder(
|
|
132
|
+
local_path=local_path,
|
|
133
|
+
path=os.path.join(model_name, ''),
|
|
134
|
+
overwrite=params['overwrite'],
|
|
135
|
+
)
|
|
136
|
+
else:
|
|
137
|
+
file_space.upload_file(
|
|
138
|
+
local_path=local_path,
|
|
139
|
+
path=os.path.join(model_name, local_path),
|
|
140
|
+
overwrite=params['overwrite'],
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
return None
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
UploadModelHandler.register(overwrite=True)
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
class DownloadModelHandler(SQLHandler):
|
|
150
|
+
"""
|
|
151
|
+
DOWNLOAD MODEL model_name
|
|
152
|
+
[ local_path ]
|
|
153
|
+
[ overwrite ];
|
|
154
|
+
|
|
155
|
+
# Model Name
|
|
156
|
+
model_name = '<model-name>'
|
|
157
|
+
|
|
158
|
+
# Path to local directory
|
|
159
|
+
local_path = TO '<local-path>'
|
|
160
|
+
|
|
161
|
+
# Should an existing directory be overwritten?
|
|
162
|
+
overwrite = OVERWRITE
|
|
163
|
+
|
|
164
|
+
Description
|
|
165
|
+
-----------
|
|
166
|
+
Download a model from models space.
|
|
167
|
+
|
|
168
|
+
Arguments
|
|
169
|
+
---------
|
|
170
|
+
* ``<model-name>``: Model name to download in models space.
|
|
171
|
+
* ``<local-path>``: Specifies the path in the local directory
|
|
172
|
+
where the model is downloaded.
|
|
173
|
+
|
|
174
|
+
Remarks
|
|
175
|
+
-------
|
|
176
|
+
* If the ``OVERWRITE`` clause is specified, any existing file or folder at
|
|
177
|
+
the download location is overwritten.
|
|
178
|
+
* If ``<local-path>`` is not specified, the model is downloaded to the current location.
|
|
179
|
+
|
|
180
|
+
Examples
|
|
181
|
+
--------
|
|
182
|
+
The following command displays the contents of the file on the
|
|
183
|
+
standard output::
|
|
184
|
+
|
|
185
|
+
DOWNLOAD MODEL llama3;
|
|
186
|
+
|
|
187
|
+
The following command downloads a model to a specific location and
|
|
188
|
+
overwrites any existing models folder with the name ``local_llama3`` on the local storage::
|
|
189
|
+
|
|
190
|
+
DOWNLOAD MODEL llama3
|
|
191
|
+
TO 'local_llama3' OVERWRITE;
|
|
192
|
+
|
|
193
|
+
See Also
|
|
194
|
+
--------
|
|
195
|
+
* ``UPLOAD MODEL model_name FROM local_path``
|
|
196
|
+
|
|
197
|
+
""" # noqa: E501
|
|
198
|
+
|
|
199
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
200
|
+
params['file_location'] = 'MODELS'
|
|
201
|
+
|
|
202
|
+
file_space = get_file_space(params)
|
|
203
|
+
|
|
204
|
+
model_name = params['model_name']
|
|
205
|
+
file_space.download_folder(
|
|
206
|
+
path=os.path.join(model_name, ''),
|
|
207
|
+
local_path=params['local_path'] or model_name,
|
|
208
|
+
overwrite=params['overwrite'],
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
return None
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
DownloadModelHandler.register(overwrite=True)
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
class DropModelsHandler(SQLHandler):
|
|
218
|
+
"""
|
|
219
|
+
DROP MODEL model_name;
|
|
220
|
+
|
|
221
|
+
# Model Name
|
|
222
|
+
model_name = '<model-name>'
|
|
223
|
+
|
|
224
|
+
Description
|
|
225
|
+
-----------
|
|
226
|
+
Deletes a model from models space.
|
|
227
|
+
|
|
228
|
+
Arguments
|
|
229
|
+
---------
|
|
230
|
+
* ``<model-name>``: Model name to delete in models space.
|
|
231
|
+
|
|
232
|
+
Example
|
|
233
|
+
--------
|
|
234
|
+
The following commands deletes a model from a model space::
|
|
235
|
+
|
|
236
|
+
DROP MODEL llama3;
|
|
237
|
+
|
|
238
|
+
""" # noqa: E501
|
|
239
|
+
|
|
240
|
+
def run(self, params: Dict[str, Any]) -> Optional[FusionSQLResult]:
|
|
241
|
+
params['file_location'] = 'MODELS'
|
|
242
|
+
path = os.path.join(params['model_name'], '')
|
|
243
|
+
|
|
244
|
+
file_space = get_file_space(params)
|
|
245
|
+
file_space.removedirs(path=path)
|
|
246
|
+
|
|
247
|
+
return None
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
DropModelsHandler.register(overwrite=True)
|
|
@@ -11,6 +11,7 @@ from ...management import manage_workspaces
|
|
|
11
11
|
from ...management.files import FilesManager
|
|
12
12
|
from ...management.files import FileSpace
|
|
13
13
|
from ...management.files import manage_files
|
|
14
|
+
from ...management.files import MODELS_SPACE
|
|
14
15
|
from ...management.files import PERSONAL_SPACE
|
|
15
16
|
from ...management.files import SHARED_SPACE
|
|
16
17
|
from ...management.workspace import StarterWorkspace
|
|
@@ -296,15 +297,14 @@ def get_file_space(params: Dict[str, Any]) -> FileSpace:
|
|
|
296
297
|
file_location = params.get('file_location')
|
|
297
298
|
if file_location:
|
|
298
299
|
file_location_lower_case = file_location.lower()
|
|
299
|
-
if (
|
|
300
|
-
file_location_lower_case != PERSONAL_SPACE and
|
|
301
|
-
file_location_lower_case != SHARED_SPACE
|
|
302
|
-
):
|
|
303
|
-
raise ValueError(f'invalid file location: {file_location}')
|
|
304
300
|
|
|
305
301
|
if file_location_lower_case == PERSONAL_SPACE:
|
|
306
302
|
return manager.personal_space
|
|
307
303
|
elif file_location_lower_case == SHARED_SPACE:
|
|
308
304
|
return manager.shared_space
|
|
305
|
+
elif file_location_lower_case == MODELS_SPACE:
|
|
306
|
+
return manager.models_space
|
|
307
|
+
else:
|
|
308
|
+
raise ValueError(f'invalid file location: {file_location}')
|
|
309
309
|
|
|
310
310
|
raise KeyError('no file space was specified')
|
singlestoredb/fusion/result.py
CHANGED
singlestoredb/http/connection.py
CHANGED
|
@@ -972,6 +972,10 @@ class Connection(connection.Connection):
|
|
|
972
972
|
|
|
973
973
|
def __init__(self, **kwargs: Any):
|
|
974
974
|
from .. import __version__ as client_version
|
|
975
|
+
|
|
976
|
+
if 'SINGLESTOREDB_WORKLOAD_TYPE' in os.environ:
|
|
977
|
+
client_version += '+' + os.environ['SINGLESTOREDB_WORKLOAD_TYPE']
|
|
978
|
+
|
|
975
979
|
connection.Connection.__init__(self, **kwargs)
|
|
976
980
|
|
|
977
981
|
host = kwargs.get('host', get_option('host'))
|