singlestoredb 1.13.0__cp38-abi3-macosx_10_9_universal2.whl → 1.13.1__cp38-abi3-macosx_10_9_universal2.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of singlestoredb might be problematic. Click here for more details.

Binary file
singlestoredb/__init__.py CHANGED
@@ -13,7 +13,7 @@ Examples
13
13
 
14
14
  """
15
15
 
16
- __version__ = '1.13.0'
16
+ __version__ = '1.13.1'
17
17
 
18
18
  from typing import Any
19
19
 
@@ -1 +1,2 @@
1
+ from .chat import SingleStoreChatOpenAI # noqa: F401
1
2
  from .embeddings import SingleStoreEmbeddings # noqa: F401
@@ -0,0 +1,26 @@
1
+ import os
2
+ from typing import Any
3
+
4
+ from singlestoredb.fusion.handlers.utils import get_workspace_manager
5
+
6
+ try:
7
+ from langchain_openai import ChatOpenAI
8
+ except ImportError:
9
+ raise ImportError(
10
+ 'Could not import langchain_openai python package. '
11
+ 'Please install it with `pip install langchain_openai`.',
12
+ )
13
+
14
+
15
+ class SingleStoreChatOpenAI(ChatOpenAI):
16
+ def __init__(self, model_name: str, **kwargs: Any):
17
+ inference_api_manger = (
18
+ get_workspace_manager().organizations.current.inference_apis
19
+ )
20
+ info = inference_api_manger.get(model_name=model_name)
21
+ super().__init__(
22
+ base_url=info.connection_url,
23
+ api_key=os.environ.get('SINGLESTOREDB_USER_TOKEN'),
24
+ model=model_name,
25
+ **kwargs,
26
+ )
@@ -1,24 +1,27 @@
1
- import os as _os
1
+ import os
2
2
  from typing import Any
3
3
 
4
+ from singlestoredb.fusion.handlers.utils import get_workspace_manager
5
+
4
6
  try:
5
- from langchain_community.embeddings.ollama import OllamaEmbeddings
7
+ from langchain_openai import OpenAIEmbeddings
6
8
  except ImportError:
7
9
  raise ImportError(
8
- 'Could not import langchain_community python package. '
9
- 'Please install it with `pip install langchain_community`.',
10
+ 'Could not import langchain_openai python package. '
11
+ 'Please install it with `pip install langchain_openai`.',
10
12
  )
11
13
 
12
14
 
13
- class SingleStoreEmbeddings(OllamaEmbeddings):
14
-
15
- def __init__(self, **kwargs: Any):
16
- url = _os.getenv('SINGLESTORE_AI_EXPERIMENTAL_URL')
17
- if not url:
18
- raise ValueError(
19
- "Environment variable 'SINGLESTORE_AI_EXPERIMENTAL_URL' must be set",
20
- )
15
+ class SingleStoreEmbeddings(OpenAIEmbeddings):
21
16
 
22
- base_url = url.strip('/v1')
23
- kwargs = {'model': 'nomic-embed-text', **kwargs}
24
- super().__init__(base_url=base_url, **kwargs)
17
+ def __init__(self, model_name: str, **kwargs: Any):
18
+ inference_api_manger = (
19
+ get_workspace_manager().organizations.current.inference_apis
20
+ )
21
+ info = inference_api_manger.get(model_name=model_name)
22
+ super().__init__(
23
+ base_url=info.connection_url,
24
+ api_key=os.environ.get('SINGLESTOREDB_USER_TOKEN'),
25
+ model=model_name,
26
+ **kwargs,
27
+ )
@@ -1,6 +1,10 @@
1
1
  from .decorator import udf # noqa: F401
2
2
  from .typing import Masked # noqa: F401
3
3
  from .typing import Table # noqa: F401
4
+ from .utils import pack_vector # noqa: F401
5
+ from .utils import pack_vectors # noqa: F401
6
+ from .utils import unpack_vector # noqa: F401
7
+ from .utils import unpack_vectors # noqa: F401
4
8
  from .utils import VectorTypes
5
9
 
6
10
 
@@ -246,6 +246,192 @@ def get_masked_params(func: Callable[..., Any]) -> List[bool]:
246
246
  return [typing.get_origin(x.annotation) is Masked for x in params.values()]
247
247
 
248
248
 
249
+ def build_tuple(x: Any) -> Any:
250
+ """Convert object to tuple."""
251
+ return tuple(x) if isinstance(x, Masked) else (x, None)
252
+
253
+
254
+ def build_udf_endpoint(
255
+ func: Callable[..., Any],
256
+ returns_data_format: str,
257
+ ) -> Callable[..., Any]:
258
+ """
259
+ Build a UDF endpoint for scalar / list types (row-based).
260
+
261
+ Parameters
262
+ ----------
263
+ func : Callable
264
+ The function to call as the endpoint
265
+ returns_data_format : str
266
+ The format of the return values
267
+
268
+ Returns
269
+ -------
270
+ Callable
271
+ The function endpoint
272
+
273
+ """
274
+ if returns_data_format in ['scalar', 'list']:
275
+
276
+ async def do_func(
277
+ row_ids: Sequence[int],
278
+ rows: Sequence[Sequence[Any]],
279
+ ) -> Tuple[Sequence[int], List[Tuple[Any, ...]]]:
280
+ '''Call function on given rows of data.'''
281
+ return row_ids, [as_tuple(x) for x in zip(func_map(func, rows))]
282
+
283
+ return do_func
284
+
285
+ return build_vector_udf_endpoint(func, returns_data_format)
286
+
287
+
288
+ def build_vector_udf_endpoint(
289
+ func: Callable[..., Any],
290
+ returns_data_format: str,
291
+ ) -> Callable[..., Any]:
292
+ """
293
+ Build a UDF endpoint for vector formats (column-based).
294
+
295
+ Parameters
296
+ ----------
297
+ func : Callable
298
+ The function to call as the endpoint
299
+ returns_data_format : str
300
+ The format of the return values
301
+
302
+ Returns
303
+ -------
304
+ Callable
305
+ The function endpoint
306
+
307
+ """
308
+ masks = get_masked_params(func)
309
+ array_cls = get_array_class(returns_data_format)
310
+
311
+ async def do_func(
312
+ row_ids: Sequence[int],
313
+ cols: Sequence[Tuple[Sequence[Any], Optional[Sequence[bool]]]],
314
+ ) -> Tuple[
315
+ Sequence[int],
316
+ List[Tuple[Sequence[Any], Optional[Sequence[bool]]]],
317
+ ]:
318
+ '''Call function on given columns of data.'''
319
+ row_ids = array_cls(row_ids)
320
+
321
+ # Call the function with `cols` as the function parameters
322
+ if cols and cols[0]:
323
+ out = func(*[x if m else x[0] for x, m in zip(cols, masks)])
324
+ else:
325
+ out = func()
326
+
327
+ # Single masked value
328
+ if isinstance(out, Masked):
329
+ return row_ids, [tuple(out)]
330
+
331
+ # Multiple return values
332
+ if isinstance(out, tuple):
333
+ return row_ids, [build_tuple(x) for x in out]
334
+
335
+ # Single return value
336
+ return row_ids, [(out, None)]
337
+
338
+ return do_func
339
+
340
+
341
+ def build_tvf_endpoint(
342
+ func: Callable[..., Any],
343
+ returns_data_format: str,
344
+ ) -> Callable[..., Any]:
345
+ """
346
+ Build a TVF endpoint for scalar / list types (row-based).
347
+
348
+ Parameters
349
+ ----------
350
+ func : Callable
351
+ The function to call as the endpoint
352
+ returns_data_format : str
353
+ The format of the return values
354
+
355
+ Returns
356
+ -------
357
+ Callable
358
+ The function endpoint
359
+
360
+ """
361
+ if returns_data_format in ['scalar', 'list']:
362
+
363
+ async def do_func(
364
+ row_ids: Sequence[int],
365
+ rows: Sequence[Sequence[Any]],
366
+ ) -> Tuple[Sequence[int], List[Tuple[Any, ...]]]:
367
+ '''Call function on given rows of data.'''
368
+ out_ids: List[int] = []
369
+ out = []
370
+ # Call function on each row of data
371
+ for i, res in zip(row_ids, func_map(func, rows)):
372
+ out.extend(as_list_of_tuples(res))
373
+ out_ids.extend([row_ids[i]] * (len(out)-len(out_ids)))
374
+ return out_ids, out
375
+
376
+ return do_func
377
+
378
+ return build_vector_tvf_endpoint(func, returns_data_format)
379
+
380
+
381
+ def build_vector_tvf_endpoint(
382
+ func: Callable[..., Any],
383
+ returns_data_format: str,
384
+ ) -> Callable[..., Any]:
385
+ """
386
+ Build a TVF endpoint for vector formats (column-based).
387
+
388
+ Parameters
389
+ ----------
390
+ func : Callable
391
+ The function to call as the endpoint
392
+ returns_data_format : str
393
+ The format of the return values
394
+
395
+ Returns
396
+ -------
397
+ Callable
398
+ The function endpoint
399
+
400
+ """
401
+ masks = get_masked_params(func)
402
+ array_cls = get_array_class(returns_data_format)
403
+
404
+ async def do_func(
405
+ row_ids: Sequence[int],
406
+ cols: Sequence[Tuple[Sequence[Any], Optional[Sequence[bool]]]],
407
+ ) -> Tuple[
408
+ Sequence[int],
409
+ List[Tuple[Sequence[Any], Optional[Sequence[bool]]]],
410
+ ]:
411
+ '''Call function on given columns of data.'''
412
+ # NOTE: There is no way to determine which row ID belongs to
413
+ # each result row, so we just have to use the same
414
+ # row ID for all rows in the result.
415
+
416
+ # Call function on each column of data
417
+ if cols and cols[0]:
418
+ res = get_dataframe_columns(
419
+ func(*[x if m else x[0] for x, m in zip(cols, masks)]),
420
+ )
421
+ else:
422
+ res = get_dataframe_columns(func())
423
+
424
+ # Generate row IDs
425
+ if isinstance(res[0], Masked):
426
+ row_ids = array_cls([row_ids[0]] * len(res[0][0]))
427
+ else:
428
+ row_ids = array_cls([row_ids[0]] * len(res[0]))
429
+
430
+ return row_ids, [build_tuple(x) for x in res]
431
+
432
+ return do_func
433
+
434
+
249
435
  def make_func(
250
436
  name: str,
251
437
  func: Callable[..., Any],
@@ -273,102 +459,10 @@ def make_func(
273
459
  args_data_format = sig.get('args_data_format', 'scalar')
274
460
  returns_data_format = sig.get('returns_data_format', 'scalar')
275
461
 
276
- masks = get_masked_params(func)
277
-
278
462
  if function_type == 'tvf':
279
- # Scalar / list types (row-based)
280
- if returns_data_format in ['scalar', 'list']:
281
- async def do_func(
282
- row_ids: Sequence[int],
283
- rows: Sequence[Sequence[Any]],
284
- ) -> Tuple[Sequence[int], List[Tuple[Any, ...]]]:
285
- '''Call function on given rows of data.'''
286
- out_ids: List[int] = []
287
- out = []
288
- # Call function on each row of data
289
- for i, res in zip(row_ids, func_map(func, rows)):
290
- out.extend(as_list_of_tuples(res))
291
- out_ids.extend([row_ids[i]] * (len(out)-len(out_ids)))
292
- return out_ids, out
293
-
294
- # Vector formats (column-based)
295
- else:
296
- array_cls = get_array_class(returns_data_format)
297
-
298
- async def do_func( # type: ignore
299
- row_ids: Sequence[int],
300
- cols: Sequence[Tuple[Sequence[Any], Optional[Sequence[bool]]]],
301
- ) -> Tuple[
302
- Sequence[int],
303
- List[Tuple[Sequence[Any], Optional[Sequence[bool]]]],
304
- ]:
305
- '''Call function on given cols of data.'''
306
- # NOTE: There is no way to determine which row ID belongs to
307
- # each result row, so we just have to use the same
308
- # row ID for all rows in the result.
309
-
310
- def build_tuple(x: Any) -> Any:
311
- return tuple(x) if isinstance(x, Masked) else (x, None)
312
-
313
- # Call function on each column of data
314
- if cols and cols[0]:
315
- res = get_dataframe_columns(
316
- func(*[x if m else x[0] for x, m in zip(cols, masks)]),
317
- )
318
- else:
319
- res = get_dataframe_columns(func())
320
-
321
- # Generate row IDs
322
- if isinstance(res[0], Masked):
323
- row_ids = array_cls([row_ids[0]] * len(res[0][0]))
324
- else:
325
- row_ids = array_cls([row_ids[0]] * len(res[0]))
326
-
327
- return row_ids, [build_tuple(x) for x in res]
328
-
463
+ do_func = build_tvf_endpoint(func, returns_data_format)
329
464
  else:
330
- # Scalar / list types (row-based)
331
- if returns_data_format in ['scalar', 'list']:
332
- async def do_func(
333
- row_ids: Sequence[int],
334
- rows: Sequence[Sequence[Any]],
335
- ) -> Tuple[Sequence[int], List[Tuple[Any, ...]]]:
336
- '''Call function on given rows of data.'''
337
- return row_ids, [as_tuple(x) for x in zip(func_map(func, rows))]
338
-
339
- # Vector formats (column-based)
340
- else:
341
- array_cls = get_array_class(returns_data_format)
342
-
343
- async def do_func( # type: ignore
344
- row_ids: Sequence[int],
345
- cols: Sequence[Tuple[Sequence[Any], Optional[Sequence[bool]]]],
346
- ) -> Tuple[
347
- Sequence[int],
348
- List[Tuple[Sequence[Any], Optional[Sequence[bool]]]],
349
- ]:
350
- '''Call function on given cols of data.'''
351
- row_ids = array_cls(row_ids)
352
-
353
- def build_tuple(x: Any) -> Any:
354
- return tuple(x) if isinstance(x, Masked) else (x, None)
355
-
356
- # Call the function with `cols` as the function parameters
357
- if cols and cols[0]:
358
- out = func(*[x if m else x[0] for x, m in zip(cols, masks)])
359
- else:
360
- out = func()
361
-
362
- # Single masked value
363
- if isinstance(out, Masked):
364
- return row_ids, [tuple(out)]
365
-
366
- # Multiple return values
367
- if isinstance(out, tuple):
368
- return row_ids, [build_tuple(x) for x in out]
369
-
370
- # Single return value
371
- return row_ids, [(out, None)]
465
+ do_func = build_udf_endpoint(func, returns_data_format)
372
466
 
373
467
  do_func.__name__ = name
374
468
  do_func.__doc__ = func.__doc__
@@ -8,6 +8,8 @@ from enum import Enum
8
8
  from typing import Any
9
9
  from typing import Dict
10
10
  from typing import Iterable
11
+ from typing import Tuple
12
+ from typing import Union
11
13
 
12
14
  from .typing import Masked
13
15
 
@@ -192,151 +194,228 @@ class VectorTypes(str, Enum):
192
194
  I64 = 'i64'
193
195
 
194
196
 
197
+ def _vector_type_to_numpy_type(
198
+ vector_type: VectorTypes,
199
+ ) -> str:
200
+ """Convert a vector type to a numpy type."""
201
+ if vector_type == VectorTypes.F32:
202
+ return 'f4'
203
+ elif vector_type == VectorTypes.F64:
204
+ return 'f8'
205
+ elif vector_type == VectorTypes.I8:
206
+ return 'i1'
207
+ elif vector_type == VectorTypes.I16:
208
+ return 'i2'
209
+ elif vector_type == VectorTypes.I32:
210
+ return 'i4'
211
+ elif vector_type == VectorTypes.I64:
212
+ return 'i8'
213
+ raise ValueError(f'unsupported element type: {vector_type}')
214
+
215
+
216
+ def _vector_type_to_struct_format(
217
+ vec: Any,
218
+ vector_type: VectorTypes,
219
+ ) -> str:
220
+ """Convert a vector type to a struct format string."""
221
+ n = len(vec)
222
+ if vector_type == VectorTypes.F32:
223
+ if isinstance(vec, (bytes, bytearray)):
224
+ n = n // 4
225
+ return f'<{n}f'
226
+ elif vector_type == VectorTypes.F64:
227
+ if isinstance(vec, (bytes, bytearray)):
228
+ n = n // 8
229
+ return f'<{n}d'
230
+ elif vector_type == VectorTypes.I8:
231
+ return f'<{n}b'
232
+ elif vector_type == VectorTypes.I16:
233
+ if isinstance(vec, (bytes, bytearray)):
234
+ n = n // 2
235
+ return f'<{n}h'
236
+ elif vector_type == VectorTypes.I32:
237
+ if isinstance(vec, (bytes, bytearray)):
238
+ n = n // 4
239
+ return f'<{n}i'
240
+ elif vector_type == VectorTypes.I64:
241
+ if isinstance(vec, (bytes, bytearray)):
242
+ n = n // 8
243
+ return f'<{n}q'
244
+ raise ValueError(f'unsupported element type: {vector_type}')
245
+
246
+
195
247
  def unpack_vector(
196
- obj: Any,
197
- element_type: VectorTypes = VectorTypes.F32,
198
- ) -> Iterable[Any]:
248
+ obj: Union[bytes, bytearray],
249
+ vec_type: VectorTypes = VectorTypes.F32,
250
+ ) -> Tuple[Any]:
199
251
  """
200
252
  Unpack a vector from bytes.
201
253
 
202
254
  Parameters
203
255
  ----------
204
- obj : Any
256
+ obj : bytes or bytearray
205
257
  The object to unpack.
206
- element_type : VectorTypes
258
+ vec_type : VectorTypes
207
259
  The type of the elements in the vector.
208
260
  Can be one of 'f32', 'f64', 'i8', 'i16', 'i32', or 'i64'.
209
261
  Default is 'f32'.
210
262
 
211
263
  Returns
212
264
  -------
213
- Iterable[Any]
265
+ Tuple[Any]
214
266
  The unpacked vector.
215
267
 
216
268
  """
217
- if isinstance(obj, (bytes, bytearray, list, tuple)):
218
- if element_type == 'f32':
219
- n = len(obj) // 4
220
- fmt = 'f'
221
- elif element_type == 'f64':
222
- n = len(obj) // 8
223
- fmt = 'd'
224
- elif element_type == 'i8':
225
- n = len(obj)
226
- fmt = 'b'
227
- elif element_type == 'i16':
228
- n = len(obj) // 2
229
- fmt = 'h'
230
- elif element_type == 'i32':
231
- n = len(obj) // 4
232
- fmt = 'i'
233
- elif element_type == 'i64':
234
- n = len(obj) // 8
235
- fmt = 'q'
236
- else:
237
- raise ValueError(f'unsupported element type: {element_type}')
238
-
239
- if isinstance(obj, (bytes, bytearray)):
240
- return struct.unpack(f'<{n}{fmt}', obj)
241
- return tuple([struct.unpack(f'<{n}{fmt}', x) for x in obj])
242
-
243
- if element_type == 'f32':
244
- np_type = 'f4'
245
- elif element_type == 'f64':
246
- np_type = 'f8'
247
- elif element_type == 'i8':
248
- np_type = 'i1'
249
- elif element_type == 'i16':
250
- np_type = 'i2'
251
- elif element_type == 'i32':
252
- np_type = 'i4'
253
- elif element_type == 'i64':
254
- np_type = 'i8'
255
- else:
256
- raise ValueError(f'unsupported element type: {element_type}')
269
+ return struct.unpack(_vector_type_to_struct_format(obj, vec_type), obj)
270
+
271
+
272
+ def pack_vector(
273
+ obj: Any,
274
+ vec_type: VectorTypes = VectorTypes.F32,
275
+ ) -> bytes:
276
+ """
277
+ Pack a vector into bytes.
278
+
279
+ Parameters
280
+ ----------
281
+ obj : Any
282
+ The object to pack.
283
+ vec_type : VectorTypes
284
+ The type of the elements in the vector.
285
+ Can be one of 'f32', 'f64', 'i8', 'i16', 'i32', or 'i64'.
286
+ Default is 'f32'.
287
+
288
+ Returns
289
+ -------
290
+ bytes
291
+ The packed vector.
292
+
293
+ """
294
+ if isinstance(obj, (list, tuple)):
295
+ return struct.pack(_vector_type_to_struct_format(obj, vec_type), *obj)
257
296
 
258
297
  if is_numpy(obj):
259
- import numpy as np
260
- return np.array([np.frombuffer(x, dtype=np_type) for x in obj])
298
+ return obj.tobytes()
261
299
 
262
300
  if is_pandas_series(obj):
263
- import numpy as np
264
301
  import pandas as pd
265
- return pd.Series([np.frombuffer(x, dtype=np_type) for x in obj])
302
+ return pd.Series(obj).to_numpy().tobytes()
266
303
 
267
304
  if is_polars_series(obj):
268
- import numpy as np
269
305
  import polars as pl
270
- return pl.Series([np.frombuffer(x, dtype=np_type) for x in obj])
306
+ return pl.Series(obj).to_numpy().tobytes()
271
307
 
272
308
  if is_pyarrow_array(obj):
273
- import numpy as np
274
309
  import pyarrow as pa
275
- return pa.array([np.frombuffer(x, dtype=np_type) for x in obj])
310
+ return pa.array(obj).to_numpy().tobytes()
276
311
 
277
312
  raise ValueError(
278
313
  f'unsupported object type: {type(obj)}',
279
314
  )
280
315
 
281
316
 
282
- def pack_vector(
283
- obj: Any,
284
- element_type: VectorTypes = VectorTypes.F32,
285
- ) -> bytes:
317
+ def unpack_vectors(
318
+ arr_of_vec: Any,
319
+ vec_type: VectorTypes = VectorTypes.F32,
320
+ ) -> Iterable[Any]:
286
321
  """
287
- Pack a vector into bytes.
322
+ Unpack a vector from an array of bytes.
288
323
 
289
324
  Parameters
290
325
  ----------
291
- obj : Any
292
- The object to pack.
293
- element_type : VectorTypes
326
+ arr_of_vec : Iterable[Any]
327
+ The array of bytes to unpack.
328
+ vec_type : VectorTypes
294
329
  The type of the elements in the vector.
295
330
  Can be one of 'f32', 'f64', 'i8', 'i16', 'i32', or 'i64'.
296
331
  Default is 'f32'.
297
332
 
298
333
  Returns
299
334
  -------
300
- bytes
301
- The packed vector.
335
+ Iterable[Any]
336
+ The unpacked vector.
302
337
 
303
338
  """
304
- if element_type == 'f32':
305
- fmt = 'f'
306
- elif element_type == 'f64':
307
- fmt = 'd'
308
- elif element_type == 'i8':
309
- fmt = 'b'
310
- elif element_type == 'i16':
311
- fmt = 'h'
312
- elif element_type == 'i32':
313
- fmt = 'i'
314
- elif element_type == 'i64':
315
- fmt = 'q'
316
- else:
317
- raise ValueError(f'unsupported element type: {element_type}')
339
+ if isinstance(arr_of_vec, (list, tuple)):
340
+ return [unpack_vector(x, vec_type) for x in arr_of_vec]
318
341
 
319
- if isinstance(obj, (list, tuple)):
320
- return struct.pack(f'<{len(obj)}{fmt}', *obj)
342
+ import numpy as np
321
343
 
322
- elif is_numpy(obj):
323
- return obj.tobytes()
344
+ dtype = _vector_type_to_numpy_type(vec_type)
324
345
 
325
- elif is_pandas_series(obj):
326
- # TODO: Nested vectors
346
+ np_arr = np.array(
347
+ [np.frombuffer(x, dtype=dtype) for x in arr_of_vec],
348
+ dtype=dtype,
349
+ )
350
+
351
+ if is_numpy(arr_of_vec):
352
+ return np_arr
353
+
354
+ if is_pandas_series(arr_of_vec):
327
355
  import pandas as pd
328
- return pd.Series(obj).to_numpy().tobytes()
356
+ return pd.Series(np_arr)
329
357
 
330
- elif is_polars_series(obj):
331
- # TODO: Nested vectors
358
+ if is_polars_series(arr_of_vec):
332
359
  import polars as pl
333
- return pl.Series(obj).to_numpy().tobytes()
360
+ return pl.Series(np_arr)
334
361
 
335
- elif is_pyarrow_array(obj):
336
- # TODO: Nested vectors
362
+ if is_pyarrow_array(arr_of_vec):
337
363
  import pyarrow as pa
338
- return pa.array(obj).to_numpy().tobytes()
364
+ return pa.array(np_arr)
339
365
 
340
366
  raise ValueError(
341
- f'unsupported object type: {type(obj)}',
367
+ f'unsupported object type: {type(arr_of_vec)}',
368
+ )
369
+
370
+
371
+ def pack_vectors(
372
+ arr_of_arr: Iterable[Any],
373
+ vec_type: VectorTypes = VectorTypes.F32,
374
+ ) -> Iterable[Any]:
375
+ """
376
+ Pack a vector into an array of bytes.
377
+
378
+ Parameters
379
+ ----------
380
+ arr_of_arr : Iterable[Any]
381
+ The array of bytes to pack.
382
+ vec_type : VectorTypes
383
+ The type of the elements in the vector.
384
+ Can be one of 'f32', 'f64', 'i8', 'i16', 'i32', or 'i64'.
385
+ Default is 'f32'.
386
+
387
+ Returns
388
+ -------
389
+ Iterable[Any]
390
+ The array of packed vectors.
391
+
392
+ """
393
+ if isinstance(arr_of_arr, (list, tuple)):
394
+ if not arr_of_arr:
395
+ return []
396
+ fmt = _vector_type_to_struct_format(arr_of_arr[0], vec_type)
397
+ return [struct.pack(fmt, x) for x in arr_of_arr]
398
+
399
+ import numpy as np
400
+
401
+ # Use object type because numpy truncates nulls at the end of fixed binary
402
+ np_arr = np.array([x.tobytes() for x in arr_of_arr], dtype=np.object_)
403
+
404
+ if is_numpy(arr_of_arr):
405
+ return np_arr
406
+
407
+ if is_pandas_series(arr_of_arr):
408
+ import pandas as pd
409
+ return pd.Series(np_arr)
410
+
411
+ if is_polars_series(arr_of_arr):
412
+ import polars as pl
413
+ return pl.Series(np_arr)
414
+
415
+ if is_pyarrow_array(arr_of_arr):
416
+ import pyarrow as pa
417
+ return pa.array(np_arr)
418
+
419
+ raise ValueError(
420
+ f'unsupported object type: {type(arr_of_arr)}',
342
421
  )
@@ -0,0 +1,101 @@
1
+ #!/usr/bin/env python
2
+ """SingleStoreDB Cloud Inference API."""
3
+ import os
4
+ from typing import Any
5
+ from typing import Dict
6
+ from typing import Optional
7
+
8
+ from .utils import vars_to_str
9
+ from singlestoredb.exceptions import ManagementError
10
+ from singlestoredb.management.manager import Manager
11
+
12
+
13
+ class InferenceAPIInfo(object):
14
+ """
15
+ Inference API definition.
16
+
17
+ This object is not directly instantiated. It is used in results
18
+ of API calls on the :class:`InferenceAPIManager`. See :meth:`InferenceAPIManager.get`.
19
+ """
20
+
21
+ service_id: str
22
+ model_name: str
23
+ name: str
24
+ connection_url: str
25
+ project_id: str
26
+
27
+ def __init__(
28
+ self,
29
+ service_id: str,
30
+ model_name: str,
31
+ name: str,
32
+ connection_url: str,
33
+ project_id: str,
34
+ ):
35
+ self.service_id = service_id
36
+ self.connection_url = connection_url
37
+ self.model_name = model_name
38
+ self.name = name
39
+ self.project_id = project_id
40
+
41
+ @classmethod
42
+ def from_dict(
43
+ cls,
44
+ obj: Dict[str, Any],
45
+ ) -> 'InferenceAPIInfo':
46
+ """
47
+ Construct a Inference API from a dictionary of values.
48
+
49
+ Parameters
50
+ ----------
51
+ obj : dict
52
+ Dictionary of values
53
+
54
+ Returns
55
+ -------
56
+ :class:`Job`
57
+
58
+ """
59
+ out = cls(
60
+ service_id=obj['serviceID'],
61
+ project_id=obj['projectID'],
62
+ model_name=obj['modelName'],
63
+ name=obj['name'],
64
+ connection_url=obj['connectionURL'],
65
+ )
66
+ return out
67
+
68
+ def __str__(self) -> str:
69
+ """Return string representation."""
70
+ return vars_to_str(self)
71
+
72
+ def __repr__(self) -> str:
73
+ """Return string representation."""
74
+ return str(self)
75
+
76
+
77
+ class InferenceAPIManager(object):
78
+ """
79
+ SingleStoreDB Inference APIs manager.
80
+
81
+ This class should be instantiated using :attr:`Organization.inference_apis`.
82
+
83
+ Parameters
84
+ ----------
85
+ manager : InferenceAPIManager, optional
86
+ The InferenceAPIManager the InferenceAPIManager belongs to
87
+
88
+ See Also
89
+ --------
90
+ :attr:`InferenceAPI`
91
+ """
92
+
93
+ def __init__(self, manager: Optional[Manager]):
94
+ self._manager = manager
95
+ self.project_id = os.environ.get('SINGLESTOREDB_PROJECT')
96
+
97
+ def get(self, model_name: str) -> InferenceAPIInfo:
98
+ if self._manager is None:
99
+ raise ManagementError(msg='Manager not initialized')
100
+ res = self._manager._get(f'inferenceapis/{self.project_id}/{model_name}').json()
101
+ return InferenceAPIInfo.from_dict(res)
@@ -7,6 +7,7 @@ from typing import Optional
7
7
  from typing import Union
8
8
 
9
9
  from ..exceptions import ManagementError
10
+ from .inference_api import InferenceAPIManager
10
11
  from .job import JobsManager
11
12
  from .manager import Manager
12
13
  from .utils import vars_to_str
@@ -207,3 +208,19 @@ class Organization(object):
207
208
  :class:`JobsManager`
208
209
  """
209
210
  return JobsManager(self._manager)
211
+
212
+ @property
213
+ def inference_apis(self) -> InferenceAPIManager:
214
+ """
215
+ Retrieve a SingleStoreDB inference api manager.
216
+
217
+ Parameters
218
+ ----------
219
+ manager : WorkspaceManager, optional
220
+ The WorkspaceManager the InferenceAPIManager belongs to
221
+
222
+ Returns
223
+ -------
224
+ :class:`InferenceAPIManager`
225
+ """
226
+ return InferenceAPIManager(self._manager)
@@ -35,7 +35,7 @@ class TestCluster(unittest.TestCase):
35
35
  cls.manager = s2.manage_cluster()
36
36
 
37
37
  us_regions = [x for x in cls.manager.regions if 'US' in x.name]
38
- cls.password = secrets.token_urlsafe(20) + '-x&'
38
+ cls.password = secrets.token_urlsafe(20) + '-x&$'
39
39
 
40
40
  cls.cluster = cls.manager.create_cluster(
41
41
  clean_name('cm-test-{}'.format(secrets.token_urlsafe(20)[:20])),
@@ -201,7 +201,7 @@ class TestWorkspace(unittest.TestCase):
201
201
  cls.manager = s2.manage_workspaces()
202
202
 
203
203
  us_regions = [x for x in cls.manager.regions if 'US' in x.name]
204
- cls.password = secrets.token_urlsafe(20)
204
+ cls.password = secrets.token_urlsafe(20) + '-x&$'
205
205
 
206
206
  name = clean_name(secrets.token_urlsafe(20)[:20])
207
207
 
@@ -375,7 +375,7 @@ class TestStage(unittest.TestCase):
375
375
  cls.manager = s2.manage_workspaces()
376
376
 
377
377
  us_regions = [x for x in cls.manager.regions if 'US' in x.name]
378
- cls.password = secrets.token_urlsafe(20)
378
+ cls.password = secrets.token_urlsafe(20) + '-x&$'
379
379
 
380
380
  name = clean_name(secrets.token_urlsafe(20)[:20])
381
381
 
@@ -839,7 +839,7 @@ class TestSecrets(unittest.TestCase):
839
839
  cls.manager = s2.manage_workspaces()
840
840
 
841
841
  us_regions = [x for x in cls.manager.regions if 'US' in x.name]
842
- cls.password = secrets.token_urlsafe(20)
842
+ cls.password = secrets.token_urlsafe(20) + '-x&$'
843
843
 
844
844
  name = clean_name(secrets.token_urlsafe(20)[:20])
845
845
 
@@ -898,7 +898,7 @@ class TestJob(unittest.TestCase):
898
898
  cls.manager = s2.manage_workspaces()
899
899
 
900
900
  us_regions = [x for x in cls.manager.regions if 'US' in x.name]
901
- cls.password = secrets.token_urlsafe(20)
901
+ cls.password = secrets.token_urlsafe(20) + '-x&$'
902
902
 
903
903
  name = clean_name(secrets.token_urlsafe(20)[:20])
904
904
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: singlestoredb
3
- Version: 1.13.0
3
+ Version: 1.13.1
4
4
  Summary: Interface to the SingleStoreDB database and workspace management APIs
5
5
  Home-page: https://github.com/singlestore-labs/singlestoredb-python
6
6
  Author: SingleStore
@@ -1,15 +1,15 @@
1
- _singlestoredb_accel.abi3.so,sha256=BJqI1UzqOlV6c18HLVPajUchi1XGtHywpqswbq2EI0A,207216
2
- singlestoredb-1.13.0.dist-info/RECORD,,
3
- singlestoredb-1.13.0.dist-info/LICENSE,sha256=Mlq78idURT-9G026aMYswwwnnrLcgzTLuXeAs5hjDLM,11341
4
- singlestoredb-1.13.0.dist-info/WHEEL,sha256=_VEguvlLpUd-c8RbFMA4yMIVNMBv2LhpxYLCEQ-Bogk,113
5
- singlestoredb-1.13.0.dist-info/entry_points.txt,sha256=bSLaTWB5zGjpVYPAaI46MkkDup0su-eb3uAhCNYuRV0,48
6
- singlestoredb-1.13.0.dist-info/top_level.txt,sha256=lA65Vf4qAMfg_s1oG3LEO90h4t1Z-SPDbRqkevI3bSY,40
7
- singlestoredb-1.13.0.dist-info/METADATA,sha256=AHg5zGAz1nwnk8Oif-_p2kIh_3hb58w360X_45lYtlQ,5704
1
+ _singlestoredb_accel.abi3.so,sha256=IsoVyW-ePKT060IlNlsevh53uaNnUeubSIg5m1Gdzuw,207216
2
+ singlestoredb-1.13.1.dist-info/RECORD,,
3
+ singlestoredb-1.13.1.dist-info/LICENSE,sha256=Mlq78idURT-9G026aMYswwwnnrLcgzTLuXeAs5hjDLM,11341
4
+ singlestoredb-1.13.1.dist-info/WHEEL,sha256=_VEguvlLpUd-c8RbFMA4yMIVNMBv2LhpxYLCEQ-Bogk,113
5
+ singlestoredb-1.13.1.dist-info/entry_points.txt,sha256=bSLaTWB5zGjpVYPAaI46MkkDup0su-eb3uAhCNYuRV0,48
6
+ singlestoredb-1.13.1.dist-info/top_level.txt,sha256=lA65Vf4qAMfg_s1oG3LEO90h4t1Z-SPDbRqkevI3bSY,40
7
+ singlestoredb-1.13.1.dist-info/METADATA,sha256=ltU2QTTsEEUm2bU_Z3IIZEhUUvT2CfHNCFMAWLbLAOI,5704
8
8
  sqlx/magic.py,sha256=JsS9_9aBFaOt91Torm1JPN0c8qB2QmYJmNSKtbSQIY0,3509
9
9
  sqlx/__init__.py,sha256=aBYiU8DZXCogvWu3yWafOz7bZS5WWwLZXj7oL0dXGyU,85
10
10
  singlestoredb/auth.py,sha256=u8D9tpKzrqa4ssaHjyZnGDX1q8XBpGtuoOkTkSv7B28,7599
11
11
  singlestoredb/config.py,sha256=dayUWwSy2YdgmhF8tzH-7FwFpwon5bgX_VeX-Yu5ia4,12969
12
- singlestoredb/__init__.py,sha256=pSlk_E6uKKk-ggsyyJ88yB16QB_URGaYVMYIoWO21hU,1649
12
+ singlestoredb/__init__.py,sha256=TlTiqjO5Ea2FP3Iq8Kmk0zAA2vR6oOj-HjURaUcQKcM,1649
13
13
  singlestoredb/types.py,sha256=FIqO1A7e0Gkk7ITmIysBy-P5S--ItbMSlYvblzqGS30,9969
14
14
  singlestoredb/connection.py,sha256=0HEpjBZXLqQwOTEfveMkgej1H3Kyof47prIHvJJZtoo,45831
15
15
  singlestoredb/pytest.py,sha256=OyF3BO9mgxenifYhOihnzGk8WzCJ_zN5_mxe8XyFPOc,9074
@@ -41,7 +41,7 @@ singlestoredb/tests/test2.ipynb,sha256=yd1PE1VK-DwiRd6mYS4_0cPBtuVkvcDtycvTwD-Yn
41
41
  singlestoredb/tests/test_ext_func_data.py,sha256=yTADD93nPxX6_rZXXLZaOWEI_yPvYyir9psn5PK9ctU,47695
42
42
  singlestoredb/tests/test_exceptions.py,sha256=tfr_8X2w1UmG4nkSBzWGB0C7ehrf1GAVgj6_ODaG-TM,1131
43
43
  singlestoredb/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
- singlestoredb/tests/test_management.py,sha256=9WKaFChAWkKMm0kDhwTgjaJIAcga7mQZiF0x9BNI1Tc,45112
44
+ singlestoredb/tests/test_management.py,sha256=6evsyQWA-lOKMehJi8xvjp0udm85EHBHuZDHwQEzxPg,45149
45
45
  singlestoredb/tests/test_udf.py,sha256=Kb7-oJpnN6MTT3aE5V5dry_r5ze0EwaAIJeh_zR3l0I,28844
46
46
  singlestoredb/tests/test_http.py,sha256=RXasTqBWRn__omj0eLFTJYIbZjd0PPdIV2d4Cqz0MC8,8580
47
47
  singlestoredb/tests/utils.py,sha256=2A2tEdD3t8aXWUnHtAIcFlWrflsz2MlMcCbUDaAG29c,4995
@@ -58,13 +58,14 @@ singlestoredb/magics/__init__.py,sha256=lZjkT3Webo9c1EQAzlRCRh6B2pckQH8uvNrrB__a
58
58
  singlestoredb/magics/run_shared.py,sha256=SI8dCBRMaGn-xZU7dto4jsAqKBi-Ll14htUsMUSBpJM,1752
59
59
  singlestoredb/magics/run_personal.py,sha256=2f7u1T7iblxGzZurHNgNXLrPBvsvPADZKo_RD_IjYuE,1844
60
60
  singlestoredb/management/files.py,sha256=89IhpGw9WdwxVeksavHEDMVn9wb_jxb-utZuIDqkLHw,30477
61
- singlestoredb/management/organization.py,sha256=hqMaM7H-naMjNbxDl_f7G_2o5TkiGKyzPhxuzDveJAw,5402
61
+ singlestoredb/management/organization.py,sha256=_JvW0Znu5emR5uYGVEcZvakQqftNb_vRhzmkOoPRPfc,5869
62
62
  singlestoredb/management/job.py,sha256=4-xLWzbE8odQogVVaFer80UEoTAZY1T28VZ9Ug4rbmM,24611
63
63
  singlestoredb/management/region.py,sha256=HnLcWUh7r_aLECliplCDHak4a_F3B7LOSXEYMW66qD0,1611
64
64
  singlestoredb/management/__init__.py,sha256=ofNTPCdkZ1dS_aX2aUujd8aMHQi8Lle5Ced0aaO3RH4,269
65
65
  singlestoredb/management/export.py,sha256=jJCe25ecH_LzKSDc7vS1-5DQaWFrZipeawLPpArByJE,5108
66
66
  singlestoredb/management/utils.py,sha256=QIhZCZSRaDbAG35xu1_n7ihmRXON8swc-gEK2FGYutI,13203
67
67
  singlestoredb/management/cluster.py,sha256=h75grXSxq4Anr4RxwKxcZW4TkWJ4bFg_ql5iRWCNLdQ,14405
68
+ singlestoredb/management/inference_api.py,sha256=L6eFqaUaPugF_cmrZ4xlArj8CIv25vWqQs1vwgKPEF4,2583
68
69
  singlestoredb/management/workspace.py,sha256=ze-eE-cO3JCrR3uttVFaBOndDbEE8_qWR2kzOjzbKaY,56234
69
70
  singlestoredb/management/manager.py,sha256=V9_PVMpUOj8laKwNFtp4Nd2Taww2Y65TeSRK5ZWzOo0,8922
70
71
  singlestoredb/management/billing_usage.py,sha256=9ighjIpcopgIyJOktBYQ6pahBZmWGHOPyyCW4gu9FGs,3735
@@ -82,8 +83,9 @@ singlestoredb/utils/debug.py,sha256=0JiLA37u_9CKiDGiN9BK_PtFMUku3vIcNjERWaTNRSU,
82
83
  singlestoredb/utils/mogrify.py,sha256=-a56IF70U6CkfadeaZgfjRSVsAD3PuqRrzPpjZlgbwY,4050
83
84
  singlestoredb/http/__init__.py,sha256=A_2ZUCCpvRYIA6YDpPy57wL5R1eZ5SfP6I1To5nfJ2s,912
84
85
  singlestoredb/http/connection.py,sha256=EgE2m_nxisGPo6YV3AJd-RRafdT0f70HRbIo1ONQ668,39893
85
- singlestoredb/ai/__init__.py,sha256=7Pubobzx5OlyepNo5DOOxWev1DUW9WFc9P6Qver2xpY,60
86
- singlestoredb/ai/embeddings.py,sha256=3jghE4WMf7vy8RobhrMOLvMLnDNGbkPCF48B3fGM38U,746
86
+ singlestoredb/ai/__init__.py,sha256=-uNcq-bY-AiWhZ5Plq2ZXtfIVL4PaifMJsJf58rdN8I,114
87
+ singlestoredb/ai/chat.py,sha256=8OSBZJ3J2zOlVXzJ_sHSAAyu5E6sy7jqqiNeFhtmjOI,802
88
+ singlestoredb/ai/embeddings.py,sha256=X3g0sJNDVOzXzZwoXz3M3ch-IERQXNkHxuH4cj125I8,815
87
89
  singlestoredb/mysql/protocol.py,sha256=2GG8qTXy5npqo7z2D2K5T0S8PtoUOS-hFDEXy8VConw,14451
88
90
  singlestoredb/mysql/cursors.py,sha256=aOLfHkj83aYZPOVuhJPkZ83CWByszIBRynq0fqSaWvY,27046
89
91
  singlestoredb/mysql/__init__.py,sha256=olUTAvkiERhDW41JXQMawkg-i0tvBEkoTkII1tt6lxU,4492
@@ -126,12 +128,12 @@ singlestoredb/mysql/tests/thirdparty/test_MySQLdb/test_MySQLdb_dbapi20.py,sha256
126
128
  singlestoredb/mysql/tests/thirdparty/test_MySQLdb/capabilities.py,sha256=AgEdvx7Njz_Y7KDMeQPMYI7y4nJRKblocVrC0VxVZZE,10171
127
129
  singlestoredb/mysql/tests/thirdparty/test_MySQLdb/test_MySQLdb_capabilities.py,sha256=szE4Zodgf7YwhkMBOrCvUwhTWppVtaodsqlV-vJ7fmY,3090
128
130
  singlestoredb/functions/decorator.py,sha256=gylwivCwpNMCUmgBEUEYf2ogIpInNhu6IFeTU82W7Ko,5433
129
- singlestoredb/functions/__init__.py,sha256=tB7KVTtjsx0mdnxb0NX2QOqB2FkEnmrXMMcsAjlVW6c,285
131
+ singlestoredb/functions/__init__.py,sha256=I2GnxOhLb4_7xhgOxdIwmwD5NiK7QYPYaE3PUIX-7xk,471
130
132
  singlestoredb/functions/dtypes.py,sha256=DgJaNXouJ2t-qIqDiQlUYU9IhkXXUTigWeE_MAcmvHM,39814
131
- singlestoredb/functions/utils.py,sha256=-Arg-bw__7gnREj-i9tmILctTpzkjIdCWYGD1UwF7dM,8886
133
+ singlestoredb/functions/utils.py,sha256=1L0Phgzq0XdWK3ecfOOydq4zV955yCwpDoAaCYRGldk,10769
132
134
  singlestoredb/functions/typing.py,sha256=gT_Sz5YH-L-9WeIHwWYMEx-hUCZqis7ec5Ipk3JXpnM,1339
133
135
  singlestoredb/functions/signature.py,sha256=avErza5t3p0vy94p4yjw7Hy2cCDvjolwCyYjEI0PKXM,42481
134
- singlestoredb/functions/ext/asgi.py,sha256=iCr_0n6Yr2adVVoW6fGSzGUp3q4Dh1qLI1nb5ooys0I,50591
136
+ singlestoredb/functions/ext/asgi.py,sha256=CSjGB8YnBYf0Ca4qMjl25AG1ExBmmnzxKTFAogi_mDc,51874
135
137
  singlestoredb/functions/ext/arrow.py,sha256=WB7n1ACslyd8nlbFzUvlbxn1BVuEjA9-BGBEqCWlSOo,9061
136
138
  singlestoredb/functions/ext/__init__.py,sha256=1oLL20yLB1GL9IbFiZD8OReDqiCpFr-yetIR6x1cNkI,23
137
139
  singlestoredb/functions/ext/utils.py,sha256=2-B8YU_Iekv8JcpI-ochs9TIeuyatLaLAH-AyYyUUIg,5311