singlestoredb 1.12.4__py3-none-any.whl → 1.13.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of singlestoredb might be problematic. Click here for more details.
- singlestoredb/__init__.py +1 -1
- singlestoredb/ai/__init__.py +1 -0
- singlestoredb/ai/chat.py +26 -0
- singlestoredb/ai/embeddings.py +18 -15
- singlestoredb/apps/__init__.py +1 -0
- singlestoredb/apps/_config.py +6 -0
- singlestoredb/apps/_connection_info.py +8 -0
- singlestoredb/apps/_python_udfs.py +85 -0
- singlestoredb/config.py +14 -2
- singlestoredb/functions/__init__.py +15 -1
- singlestoredb/functions/decorator.py +102 -252
- singlestoredb/functions/dtypes.py +545 -198
- singlestoredb/functions/ext/asgi.py +421 -129
- singlestoredb/functions/ext/json.py +29 -36
- singlestoredb/functions/ext/mmap.py +1 -1
- singlestoredb/functions/ext/rowdat_1.py +50 -70
- singlestoredb/functions/signature.py +816 -144
- singlestoredb/functions/typing.py +41 -0
- singlestoredb/functions/utils.py +421 -0
- singlestoredb/http/connection.py +3 -1
- singlestoredb/management/inference_api.py +101 -0
- singlestoredb/management/manager.py +6 -1
- singlestoredb/management/organization.py +17 -0
- singlestoredb/management/utils.py +2 -2
- singlestoredb/tests/ext_funcs/__init__.py +476 -237
- singlestoredb/tests/test_ext_func.py +192 -3
- singlestoredb/tests/test_management.py +5 -5
- singlestoredb/tests/test_udf.py +101 -131
- singlestoredb/tests/test_udf_returns.py +459 -0
- {singlestoredb-1.12.4.dist-info → singlestoredb-1.13.1.dist-info}/METADATA +2 -1
- {singlestoredb-1.12.4.dist-info → singlestoredb-1.13.1.dist-info}/RECORD +35 -29
- {singlestoredb-1.12.4.dist-info → singlestoredb-1.13.1.dist-info}/LICENSE +0 -0
- {singlestoredb-1.12.4.dist-info → singlestoredb-1.13.1.dist-info}/WHEEL +0 -0
- {singlestoredb-1.12.4.dist-info → singlestoredb-1.13.1.dist-info}/entry_points.txt +0 -0
- {singlestoredb-1.12.4.dist-info → singlestoredb-1.13.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
from typing import Iterable
|
|
3
|
+
from typing import Tuple
|
|
4
|
+
from typing import TypeVar
|
|
5
|
+
|
|
6
|
+
try:
|
|
7
|
+
from typing import TypeVarTuple # type: ignore
|
|
8
|
+
from typing import Unpack # type: ignore
|
|
9
|
+
except ImportError:
|
|
10
|
+
# Python 3.8 and earlier do not have TypeVarTuple
|
|
11
|
+
from typing_extensions import TypeVarTuple # type: ignore
|
|
12
|
+
from typing_extensions import Unpack # type: ignore
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
T = TypeVar('T', bound=Iterable[Any]) # Generic type for iterable types
|
|
16
|
+
|
|
17
|
+
#
|
|
18
|
+
# Masked types are used for pairs of vectors where the first element is the
|
|
19
|
+
# vector and the second element is a boolean mask indicating which elements
|
|
20
|
+
# are NULL. The boolean mask is a vector of the same length as the first
|
|
21
|
+
# element, where True indicates that the corresponding element in the first
|
|
22
|
+
# element is NULL.
|
|
23
|
+
#
|
|
24
|
+
# This is needed for vector types that do not support NULL values, such as
|
|
25
|
+
# numpy arrays and pandas Series.
|
|
26
|
+
#
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class Masked(Tuple[T, T]):
|
|
30
|
+
def __new__(cls, *args: T) -> 'Masked[Tuple[T, T]]': # type: ignore
|
|
31
|
+
return tuple.__new__(cls, (args[0], args[1])) # type: ignore
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
Ts = TypeVarTuple('Ts')
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class Table(Tuple[Unpack[Ts]]):
|
|
38
|
+
"""Return type for a table valued function."""
|
|
39
|
+
|
|
40
|
+
def __new__(cls, *args: Unpack[Ts]) -> 'Table[Tuple[Unpack[Ts]]]': # type: ignore
|
|
41
|
+
return tuple.__new__(cls, args) # type: ignore
|
|
@@ -0,0 +1,421 @@
|
|
|
1
|
+
import dataclasses
|
|
2
|
+
import inspect
|
|
3
|
+
import struct
|
|
4
|
+
import sys
|
|
5
|
+
import types
|
|
6
|
+
import typing
|
|
7
|
+
from enum import Enum
|
|
8
|
+
from typing import Any
|
|
9
|
+
from typing import Dict
|
|
10
|
+
from typing import Iterable
|
|
11
|
+
from typing import Tuple
|
|
12
|
+
from typing import Union
|
|
13
|
+
|
|
14
|
+
from .typing import Masked
|
|
15
|
+
|
|
16
|
+
if sys.version_info >= (3, 10):
|
|
17
|
+
_UNION_TYPES = {typing.Union, types.UnionType}
|
|
18
|
+
else:
|
|
19
|
+
_UNION_TYPES = {typing.Union}
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
is_dataclass = dataclasses.is_dataclass
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def is_masked(obj: Any) -> bool:
|
|
26
|
+
"""Check if an object is a Masked type."""
|
|
27
|
+
origin = typing.get_origin(obj)
|
|
28
|
+
if origin is not None:
|
|
29
|
+
return origin is Masked or \
|
|
30
|
+
(inspect.isclass(origin) and issubclass(origin, Masked))
|
|
31
|
+
return False
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def is_union(x: Any) -> bool:
|
|
35
|
+
"""Check if the object is a Union."""
|
|
36
|
+
return typing.get_origin(x) in _UNION_TYPES
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def get_annotations(obj: Any) -> Dict[str, Any]:
|
|
40
|
+
"""Get the annotations of an object."""
|
|
41
|
+
return typing.get_type_hints(obj)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def get_module(obj: Any) -> str:
|
|
45
|
+
"""Get the module of an object."""
|
|
46
|
+
module = getattr(obj, '__module__', '').split('.')
|
|
47
|
+
if module:
|
|
48
|
+
return module[0]
|
|
49
|
+
return ''
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def get_type_name(obj: Any) -> str:
|
|
53
|
+
"""Get the type name of an object."""
|
|
54
|
+
if hasattr(obj, '__name__'):
|
|
55
|
+
return obj.__name__
|
|
56
|
+
if hasattr(obj, '__class__'):
|
|
57
|
+
return obj.__class__.__name__
|
|
58
|
+
return ''
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def is_numpy(obj: Any) -> bool:
|
|
62
|
+
"""Check if an object is a numpy array."""
|
|
63
|
+
if str(obj).startswith('numpy.ndarray['):
|
|
64
|
+
return True
|
|
65
|
+
|
|
66
|
+
if inspect.isclass(obj):
|
|
67
|
+
if get_module(obj) == 'numpy':
|
|
68
|
+
return get_type_name(obj) == 'ndarray'
|
|
69
|
+
|
|
70
|
+
origin = typing.get_origin(obj)
|
|
71
|
+
if get_module(origin) == 'numpy':
|
|
72
|
+
if get_type_name(origin) == 'ndarray':
|
|
73
|
+
return True
|
|
74
|
+
|
|
75
|
+
dtype = type(obj)
|
|
76
|
+
if get_module(dtype) == 'numpy':
|
|
77
|
+
return get_type_name(dtype) == 'ndarray'
|
|
78
|
+
|
|
79
|
+
return False
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def is_dataframe(obj: Any) -> bool:
|
|
83
|
+
"""Check if an object is a DataFrame."""
|
|
84
|
+
# Cheating here a bit so we don't have to import pandas / polars / pyarrow:
|
|
85
|
+
# unless we absolutely need to
|
|
86
|
+
if get_module(obj) == 'pandas':
|
|
87
|
+
return get_type_name(obj) == 'DataFrame'
|
|
88
|
+
if get_module(obj) == 'polars':
|
|
89
|
+
return get_type_name(obj) == 'DataFrame'
|
|
90
|
+
if get_module(obj) == 'pyarrow':
|
|
91
|
+
return get_type_name(obj) == 'Table'
|
|
92
|
+
return False
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def is_vector(obj: Any, include_masks: bool = False) -> bool:
|
|
96
|
+
"""Check if an object is a vector type."""
|
|
97
|
+
return is_pandas_series(obj) \
|
|
98
|
+
or is_polars_series(obj) \
|
|
99
|
+
or is_pyarrow_array(obj) \
|
|
100
|
+
or is_numpy(obj) \
|
|
101
|
+
or is_masked(obj)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def get_data_format(obj: Any) -> str:
|
|
105
|
+
"""Return the data format of the DataFrame / Table / vector."""
|
|
106
|
+
# Cheating here a bit so we don't have to import pandas / polars / pyarrow
|
|
107
|
+
# unless we absolutely need to
|
|
108
|
+
if get_module(obj) == 'pandas':
|
|
109
|
+
return 'pandas'
|
|
110
|
+
if get_module(obj) == 'polars':
|
|
111
|
+
return 'polars'
|
|
112
|
+
if get_module(obj) == 'pyarrow':
|
|
113
|
+
return 'arrow'
|
|
114
|
+
if get_module(obj) == 'numpy':
|
|
115
|
+
return 'numpy'
|
|
116
|
+
if isinstance(obj, list):
|
|
117
|
+
return 'list'
|
|
118
|
+
return 'scalar'
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def is_pandas_series(obj: Any) -> bool:
|
|
122
|
+
"""Check if an object is a pandas Series."""
|
|
123
|
+
if is_union(obj):
|
|
124
|
+
obj = typing.get_args(obj)[0]
|
|
125
|
+
return (
|
|
126
|
+
get_module(obj) == 'pandas' and
|
|
127
|
+
get_type_name(obj) == 'Series'
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def is_polars_series(obj: Any) -> bool:
|
|
132
|
+
"""Check if an object is a polars Series."""
|
|
133
|
+
if is_union(obj):
|
|
134
|
+
obj = typing.get_args(obj)[0]
|
|
135
|
+
return (
|
|
136
|
+
get_module(obj) == 'polars' and
|
|
137
|
+
get_type_name(obj) == 'Series'
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def is_pyarrow_array(obj: Any) -> bool:
|
|
142
|
+
"""Check if an object is a pyarrow Array."""
|
|
143
|
+
if is_union(obj):
|
|
144
|
+
obj = typing.get_args(obj)[0]
|
|
145
|
+
return (
|
|
146
|
+
get_module(obj) == 'pyarrow' and
|
|
147
|
+
get_type_name(obj) == 'Array'
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def is_typeddict(obj: Any) -> bool:
|
|
152
|
+
"""Check if an object is a TypedDict."""
|
|
153
|
+
if hasattr(typing, 'is_typeddict'):
|
|
154
|
+
return typing.is_typeddict(obj) # noqa: TYP006
|
|
155
|
+
return False
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def is_namedtuple(obj: Any) -> bool:
|
|
159
|
+
"""Check if an object is a named tuple."""
|
|
160
|
+
if inspect.isclass(obj):
|
|
161
|
+
return (
|
|
162
|
+
issubclass(obj, tuple) and
|
|
163
|
+
hasattr(obj, '_asdict') and
|
|
164
|
+
hasattr(obj, '_fields')
|
|
165
|
+
)
|
|
166
|
+
return (
|
|
167
|
+
isinstance(obj, tuple) and
|
|
168
|
+
hasattr(obj, '_asdict') and
|
|
169
|
+
hasattr(obj, '_fields')
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def is_pydantic(obj: Any) -> bool:
|
|
174
|
+
"""Check if an object is a pydantic model."""
|
|
175
|
+
if not inspect.isclass(obj):
|
|
176
|
+
return False
|
|
177
|
+
# We don't want to import pydantic here, so we check if
|
|
178
|
+
# the class is a subclass
|
|
179
|
+
return bool([
|
|
180
|
+
x for x in inspect.getmro(obj)
|
|
181
|
+
if get_module(x) == 'pydantic'
|
|
182
|
+
and get_type_name(x) == 'BaseModel'
|
|
183
|
+
])
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
class VectorTypes(str, Enum):
|
|
187
|
+
"""Enum for vector types."""
|
|
188
|
+
F16 = 'f16'
|
|
189
|
+
F32 = 'f32'
|
|
190
|
+
F64 = 'f64'
|
|
191
|
+
I8 = 'i8'
|
|
192
|
+
I16 = 'i16'
|
|
193
|
+
I32 = 'i32'
|
|
194
|
+
I64 = 'i64'
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def _vector_type_to_numpy_type(
|
|
198
|
+
vector_type: VectorTypes,
|
|
199
|
+
) -> str:
|
|
200
|
+
"""Convert a vector type to a numpy type."""
|
|
201
|
+
if vector_type == VectorTypes.F32:
|
|
202
|
+
return 'f4'
|
|
203
|
+
elif vector_type == VectorTypes.F64:
|
|
204
|
+
return 'f8'
|
|
205
|
+
elif vector_type == VectorTypes.I8:
|
|
206
|
+
return 'i1'
|
|
207
|
+
elif vector_type == VectorTypes.I16:
|
|
208
|
+
return 'i2'
|
|
209
|
+
elif vector_type == VectorTypes.I32:
|
|
210
|
+
return 'i4'
|
|
211
|
+
elif vector_type == VectorTypes.I64:
|
|
212
|
+
return 'i8'
|
|
213
|
+
raise ValueError(f'unsupported element type: {vector_type}')
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def _vector_type_to_struct_format(
|
|
217
|
+
vec: Any,
|
|
218
|
+
vector_type: VectorTypes,
|
|
219
|
+
) -> str:
|
|
220
|
+
"""Convert a vector type to a struct format string."""
|
|
221
|
+
n = len(vec)
|
|
222
|
+
if vector_type == VectorTypes.F32:
|
|
223
|
+
if isinstance(vec, (bytes, bytearray)):
|
|
224
|
+
n = n // 4
|
|
225
|
+
return f'<{n}f'
|
|
226
|
+
elif vector_type == VectorTypes.F64:
|
|
227
|
+
if isinstance(vec, (bytes, bytearray)):
|
|
228
|
+
n = n // 8
|
|
229
|
+
return f'<{n}d'
|
|
230
|
+
elif vector_type == VectorTypes.I8:
|
|
231
|
+
return f'<{n}b'
|
|
232
|
+
elif vector_type == VectorTypes.I16:
|
|
233
|
+
if isinstance(vec, (bytes, bytearray)):
|
|
234
|
+
n = n // 2
|
|
235
|
+
return f'<{n}h'
|
|
236
|
+
elif vector_type == VectorTypes.I32:
|
|
237
|
+
if isinstance(vec, (bytes, bytearray)):
|
|
238
|
+
n = n // 4
|
|
239
|
+
return f'<{n}i'
|
|
240
|
+
elif vector_type == VectorTypes.I64:
|
|
241
|
+
if isinstance(vec, (bytes, bytearray)):
|
|
242
|
+
n = n // 8
|
|
243
|
+
return f'<{n}q'
|
|
244
|
+
raise ValueError(f'unsupported element type: {vector_type}')
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def unpack_vector(
|
|
248
|
+
obj: Union[bytes, bytearray],
|
|
249
|
+
vec_type: VectorTypes = VectorTypes.F32,
|
|
250
|
+
) -> Tuple[Any]:
|
|
251
|
+
"""
|
|
252
|
+
Unpack a vector from bytes.
|
|
253
|
+
|
|
254
|
+
Parameters
|
|
255
|
+
----------
|
|
256
|
+
obj : bytes or bytearray
|
|
257
|
+
The object to unpack.
|
|
258
|
+
vec_type : VectorTypes
|
|
259
|
+
The type of the elements in the vector.
|
|
260
|
+
Can be one of 'f32', 'f64', 'i8', 'i16', 'i32', or 'i64'.
|
|
261
|
+
Default is 'f32'.
|
|
262
|
+
|
|
263
|
+
Returns
|
|
264
|
+
-------
|
|
265
|
+
Tuple[Any]
|
|
266
|
+
The unpacked vector.
|
|
267
|
+
|
|
268
|
+
"""
|
|
269
|
+
return struct.unpack(_vector_type_to_struct_format(obj, vec_type), obj)
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def pack_vector(
|
|
273
|
+
obj: Any,
|
|
274
|
+
vec_type: VectorTypes = VectorTypes.F32,
|
|
275
|
+
) -> bytes:
|
|
276
|
+
"""
|
|
277
|
+
Pack a vector into bytes.
|
|
278
|
+
|
|
279
|
+
Parameters
|
|
280
|
+
----------
|
|
281
|
+
obj : Any
|
|
282
|
+
The object to pack.
|
|
283
|
+
vec_type : VectorTypes
|
|
284
|
+
The type of the elements in the vector.
|
|
285
|
+
Can be one of 'f32', 'f64', 'i8', 'i16', 'i32', or 'i64'.
|
|
286
|
+
Default is 'f32'.
|
|
287
|
+
|
|
288
|
+
Returns
|
|
289
|
+
-------
|
|
290
|
+
bytes
|
|
291
|
+
The packed vector.
|
|
292
|
+
|
|
293
|
+
"""
|
|
294
|
+
if isinstance(obj, (list, tuple)):
|
|
295
|
+
return struct.pack(_vector_type_to_struct_format(obj, vec_type), *obj)
|
|
296
|
+
|
|
297
|
+
if is_numpy(obj):
|
|
298
|
+
return obj.tobytes()
|
|
299
|
+
|
|
300
|
+
if is_pandas_series(obj):
|
|
301
|
+
import pandas as pd
|
|
302
|
+
return pd.Series(obj).to_numpy().tobytes()
|
|
303
|
+
|
|
304
|
+
if is_polars_series(obj):
|
|
305
|
+
import polars as pl
|
|
306
|
+
return pl.Series(obj).to_numpy().tobytes()
|
|
307
|
+
|
|
308
|
+
if is_pyarrow_array(obj):
|
|
309
|
+
import pyarrow as pa
|
|
310
|
+
return pa.array(obj).to_numpy().tobytes()
|
|
311
|
+
|
|
312
|
+
raise ValueError(
|
|
313
|
+
f'unsupported object type: {type(obj)}',
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
def unpack_vectors(
|
|
318
|
+
arr_of_vec: Any,
|
|
319
|
+
vec_type: VectorTypes = VectorTypes.F32,
|
|
320
|
+
) -> Iterable[Any]:
|
|
321
|
+
"""
|
|
322
|
+
Unpack a vector from an array of bytes.
|
|
323
|
+
|
|
324
|
+
Parameters
|
|
325
|
+
----------
|
|
326
|
+
arr_of_vec : Iterable[Any]
|
|
327
|
+
The array of bytes to unpack.
|
|
328
|
+
vec_type : VectorTypes
|
|
329
|
+
The type of the elements in the vector.
|
|
330
|
+
Can be one of 'f32', 'f64', 'i8', 'i16', 'i32', or 'i64'.
|
|
331
|
+
Default is 'f32'.
|
|
332
|
+
|
|
333
|
+
Returns
|
|
334
|
+
-------
|
|
335
|
+
Iterable[Any]
|
|
336
|
+
The unpacked vector.
|
|
337
|
+
|
|
338
|
+
"""
|
|
339
|
+
if isinstance(arr_of_vec, (list, tuple)):
|
|
340
|
+
return [unpack_vector(x, vec_type) for x in arr_of_vec]
|
|
341
|
+
|
|
342
|
+
import numpy as np
|
|
343
|
+
|
|
344
|
+
dtype = _vector_type_to_numpy_type(vec_type)
|
|
345
|
+
|
|
346
|
+
np_arr = np.array(
|
|
347
|
+
[np.frombuffer(x, dtype=dtype) for x in arr_of_vec],
|
|
348
|
+
dtype=dtype,
|
|
349
|
+
)
|
|
350
|
+
|
|
351
|
+
if is_numpy(arr_of_vec):
|
|
352
|
+
return np_arr
|
|
353
|
+
|
|
354
|
+
if is_pandas_series(arr_of_vec):
|
|
355
|
+
import pandas as pd
|
|
356
|
+
return pd.Series(np_arr)
|
|
357
|
+
|
|
358
|
+
if is_polars_series(arr_of_vec):
|
|
359
|
+
import polars as pl
|
|
360
|
+
return pl.Series(np_arr)
|
|
361
|
+
|
|
362
|
+
if is_pyarrow_array(arr_of_vec):
|
|
363
|
+
import pyarrow as pa
|
|
364
|
+
return pa.array(np_arr)
|
|
365
|
+
|
|
366
|
+
raise ValueError(
|
|
367
|
+
f'unsupported object type: {type(arr_of_vec)}',
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
def pack_vectors(
|
|
372
|
+
arr_of_arr: Iterable[Any],
|
|
373
|
+
vec_type: VectorTypes = VectorTypes.F32,
|
|
374
|
+
) -> Iterable[Any]:
|
|
375
|
+
"""
|
|
376
|
+
Pack a vector into an array of bytes.
|
|
377
|
+
|
|
378
|
+
Parameters
|
|
379
|
+
----------
|
|
380
|
+
arr_of_arr : Iterable[Any]
|
|
381
|
+
The array of bytes to pack.
|
|
382
|
+
vec_type : VectorTypes
|
|
383
|
+
The type of the elements in the vector.
|
|
384
|
+
Can be one of 'f32', 'f64', 'i8', 'i16', 'i32', or 'i64'.
|
|
385
|
+
Default is 'f32'.
|
|
386
|
+
|
|
387
|
+
Returns
|
|
388
|
+
-------
|
|
389
|
+
Iterable[Any]
|
|
390
|
+
The array of packed vectors.
|
|
391
|
+
|
|
392
|
+
"""
|
|
393
|
+
if isinstance(arr_of_arr, (list, tuple)):
|
|
394
|
+
if not arr_of_arr:
|
|
395
|
+
return []
|
|
396
|
+
fmt = _vector_type_to_struct_format(arr_of_arr[0], vec_type)
|
|
397
|
+
return [struct.pack(fmt, x) for x in arr_of_arr]
|
|
398
|
+
|
|
399
|
+
import numpy as np
|
|
400
|
+
|
|
401
|
+
# Use object type because numpy truncates nulls at the end of fixed binary
|
|
402
|
+
np_arr = np.array([x.tobytes() for x in arr_of_arr], dtype=np.object_)
|
|
403
|
+
|
|
404
|
+
if is_numpy(arr_of_arr):
|
|
405
|
+
return np_arr
|
|
406
|
+
|
|
407
|
+
if is_pandas_series(arr_of_arr):
|
|
408
|
+
import pandas as pd
|
|
409
|
+
return pd.Series(np_arr)
|
|
410
|
+
|
|
411
|
+
if is_polars_series(arr_of_arr):
|
|
412
|
+
import polars as pl
|
|
413
|
+
return pl.Series(np_arr)
|
|
414
|
+
|
|
415
|
+
if is_pyarrow_array(arr_of_arr):
|
|
416
|
+
import pyarrow as pa
|
|
417
|
+
return pa.array(np_arr)
|
|
418
|
+
|
|
419
|
+
raise ValueError(
|
|
420
|
+
f'unsupported object type: {type(arr_of_arr)}',
|
|
421
|
+
)
|
singlestoredb/http/connection.py
CHANGED
|
@@ -648,7 +648,9 @@ class Cursor(connection.Cursor):
|
|
|
648
648
|
if 'UNSIGNED' in data_type:
|
|
649
649
|
flags = 32
|
|
650
650
|
if data_type.endswith('BLOB') or data_type.endswith('BINARY'):
|
|
651
|
-
converter = functools.partial(
|
|
651
|
+
converter = functools.partial(
|
|
652
|
+
b64decode_converter, converter, # type: ignore
|
|
653
|
+
)
|
|
652
654
|
charset = 63 # BINARY
|
|
653
655
|
if type_code == 0: # DECIMAL
|
|
654
656
|
type_code = types.ColumnType.get_code('NEWDECIMAL')
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""SingleStoreDB Cloud Inference API."""
|
|
3
|
+
import os
|
|
4
|
+
from typing import Any
|
|
5
|
+
from typing import Dict
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
from .utils import vars_to_str
|
|
9
|
+
from singlestoredb.exceptions import ManagementError
|
|
10
|
+
from singlestoredb.management.manager import Manager
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class InferenceAPIInfo(object):
|
|
14
|
+
"""
|
|
15
|
+
Inference API definition.
|
|
16
|
+
|
|
17
|
+
This object is not directly instantiated. It is used in results
|
|
18
|
+
of API calls on the :class:`InferenceAPIManager`. See :meth:`InferenceAPIManager.get`.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
service_id: str
|
|
22
|
+
model_name: str
|
|
23
|
+
name: str
|
|
24
|
+
connection_url: str
|
|
25
|
+
project_id: str
|
|
26
|
+
|
|
27
|
+
def __init__(
|
|
28
|
+
self,
|
|
29
|
+
service_id: str,
|
|
30
|
+
model_name: str,
|
|
31
|
+
name: str,
|
|
32
|
+
connection_url: str,
|
|
33
|
+
project_id: str,
|
|
34
|
+
):
|
|
35
|
+
self.service_id = service_id
|
|
36
|
+
self.connection_url = connection_url
|
|
37
|
+
self.model_name = model_name
|
|
38
|
+
self.name = name
|
|
39
|
+
self.project_id = project_id
|
|
40
|
+
|
|
41
|
+
@classmethod
|
|
42
|
+
def from_dict(
|
|
43
|
+
cls,
|
|
44
|
+
obj: Dict[str, Any],
|
|
45
|
+
) -> 'InferenceAPIInfo':
|
|
46
|
+
"""
|
|
47
|
+
Construct a Inference API from a dictionary of values.
|
|
48
|
+
|
|
49
|
+
Parameters
|
|
50
|
+
----------
|
|
51
|
+
obj : dict
|
|
52
|
+
Dictionary of values
|
|
53
|
+
|
|
54
|
+
Returns
|
|
55
|
+
-------
|
|
56
|
+
:class:`Job`
|
|
57
|
+
|
|
58
|
+
"""
|
|
59
|
+
out = cls(
|
|
60
|
+
service_id=obj['serviceID'],
|
|
61
|
+
project_id=obj['projectID'],
|
|
62
|
+
model_name=obj['modelName'],
|
|
63
|
+
name=obj['name'],
|
|
64
|
+
connection_url=obj['connectionURL'],
|
|
65
|
+
)
|
|
66
|
+
return out
|
|
67
|
+
|
|
68
|
+
def __str__(self) -> str:
|
|
69
|
+
"""Return string representation."""
|
|
70
|
+
return vars_to_str(self)
|
|
71
|
+
|
|
72
|
+
def __repr__(self) -> str:
|
|
73
|
+
"""Return string representation."""
|
|
74
|
+
return str(self)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class InferenceAPIManager(object):
|
|
78
|
+
"""
|
|
79
|
+
SingleStoreDB Inference APIs manager.
|
|
80
|
+
|
|
81
|
+
This class should be instantiated using :attr:`Organization.inference_apis`.
|
|
82
|
+
|
|
83
|
+
Parameters
|
|
84
|
+
----------
|
|
85
|
+
manager : InferenceAPIManager, optional
|
|
86
|
+
The InferenceAPIManager the InferenceAPIManager belongs to
|
|
87
|
+
|
|
88
|
+
See Also
|
|
89
|
+
--------
|
|
90
|
+
:attr:`InferenceAPI`
|
|
91
|
+
"""
|
|
92
|
+
|
|
93
|
+
def __init__(self, manager: Optional[Manager]):
|
|
94
|
+
self._manager = manager
|
|
95
|
+
self.project_id = os.environ.get('SINGLESTOREDB_PROJECT')
|
|
96
|
+
|
|
97
|
+
def get(self, model_name: str) -> InferenceAPIInfo:
|
|
98
|
+
if self._manager is None:
|
|
99
|
+
raise ManagementError(msg='Manager not initialized')
|
|
100
|
+
res = self._manager._get(f'inferenceapis/{self.project_id}/{model_name}').json()
|
|
101
|
+
return InferenceAPIInfo.from_dict(res)
|
|
@@ -62,6 +62,7 @@ class Manager(object):
|
|
|
62
62
|
)
|
|
63
63
|
if not new_access_token:
|
|
64
64
|
raise ManagementError(msg='No management token was configured.')
|
|
65
|
+
|
|
65
66
|
self._is_jwt = not access_token and new_access_token and is_jwt(new_access_token)
|
|
66
67
|
self._sess = requests.Session()
|
|
67
68
|
self._sess.headers.update({
|
|
@@ -70,10 +71,14 @@ class Manager(object):
|
|
|
70
71
|
'Accept': 'application/json',
|
|
71
72
|
'User-Agent': f'SingleStoreDB-Python/{client_version}',
|
|
72
73
|
})
|
|
74
|
+
|
|
73
75
|
self._base_url = urljoin(
|
|
74
|
-
base_url
|
|
76
|
+
base_url
|
|
77
|
+
or config.get_option('management.base_url')
|
|
78
|
+
or type(self).default_base_url,
|
|
75
79
|
version or type(self).default_version,
|
|
76
80
|
) + '/'
|
|
81
|
+
|
|
77
82
|
self._params: Dict[str, str] = {}
|
|
78
83
|
if organization_id:
|
|
79
84
|
self._params['organizationID'] = organization_id
|
|
@@ -7,6 +7,7 @@ from typing import Optional
|
|
|
7
7
|
from typing import Union
|
|
8
8
|
|
|
9
9
|
from ..exceptions import ManagementError
|
|
10
|
+
from .inference_api import InferenceAPIManager
|
|
10
11
|
from .job import JobsManager
|
|
11
12
|
from .manager import Manager
|
|
12
13
|
from .utils import vars_to_str
|
|
@@ -207,3 +208,19 @@ class Organization(object):
|
|
|
207
208
|
:class:`JobsManager`
|
|
208
209
|
"""
|
|
209
210
|
return JobsManager(self._manager)
|
|
211
|
+
|
|
212
|
+
@property
|
|
213
|
+
def inference_apis(self) -> InferenceAPIManager:
|
|
214
|
+
"""
|
|
215
|
+
Retrieve a SingleStoreDB inference api manager.
|
|
216
|
+
|
|
217
|
+
Parameters
|
|
218
|
+
----------
|
|
219
|
+
manager : WorkspaceManager, optional
|
|
220
|
+
The WorkspaceManager the InferenceAPIManager belongs to
|
|
221
|
+
|
|
222
|
+
Returns
|
|
223
|
+
-------
|
|
224
|
+
:class:`InferenceAPIManager`
|
|
225
|
+
"""
|
|
226
|
+
return InferenceAPIManager(self._manager)
|
|
@@ -30,7 +30,7 @@ JSONList = List[JSON]
|
|
|
30
30
|
T = TypeVar('T')
|
|
31
31
|
|
|
32
32
|
if sys.version_info < (3, 10):
|
|
33
|
-
PathLike = Union[str, os.PathLike]
|
|
33
|
+
PathLike = Union[str, os.PathLike] # type: ignore
|
|
34
34
|
PathLikeABC = os.PathLike
|
|
35
35
|
else:
|
|
36
36
|
PathLike = Union[str, os.PathLike[str]]
|
|
@@ -73,7 +73,7 @@ def ttl_property(ttl: datetime.timedelta) -> Callable[[Any], Any]:
|
|
|
73
73
|
"""Property with a time-to-live."""
|
|
74
74
|
def wrapper(func: Callable[[Any], Any]) -> Any:
|
|
75
75
|
out = TTLProperty(func, ttl=ttl)
|
|
76
|
-
return functools.wraps(func)(out)
|
|
76
|
+
return functools.wraps(func)(out) # type: ignore
|
|
77
77
|
return wrapper
|
|
78
78
|
|
|
79
79
|
|