singlestoredb 1.12.4__cp38-abi3-win32.whl → 1.13.0__cp38-abi3-win32.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of singlestoredb might be problematic. Click here for more details.

Files changed (30) hide show
  1. _singlestoredb_accel.pyd +0 -0
  2. singlestoredb/__init__.py +1 -1
  3. singlestoredb/apps/__init__.py +1 -0
  4. singlestoredb/apps/_config.py +6 -0
  5. singlestoredb/apps/_connection_info.py +8 -0
  6. singlestoredb/apps/_python_udfs.py +85 -0
  7. singlestoredb/config.py +14 -2
  8. singlestoredb/functions/__init__.py +11 -1
  9. singlestoredb/functions/decorator.py +102 -252
  10. singlestoredb/functions/dtypes.py +545 -198
  11. singlestoredb/functions/ext/asgi.py +288 -90
  12. singlestoredb/functions/ext/json.py +29 -36
  13. singlestoredb/functions/ext/mmap.py +1 -1
  14. singlestoredb/functions/ext/rowdat_1.py +50 -70
  15. singlestoredb/functions/signature.py +816 -144
  16. singlestoredb/functions/typing.py +41 -0
  17. singlestoredb/functions/utils.py +342 -0
  18. singlestoredb/http/connection.py +3 -1
  19. singlestoredb/management/manager.py +6 -1
  20. singlestoredb/management/utils.py +2 -2
  21. singlestoredb/tests/ext_funcs/__init__.py +476 -237
  22. singlestoredb/tests/test_ext_func.py +192 -3
  23. singlestoredb/tests/test_udf.py +101 -131
  24. singlestoredb/tests/test_udf_returns.py +459 -0
  25. {singlestoredb-1.12.4.dist-info → singlestoredb-1.13.0.dist-info}/METADATA +2 -1
  26. {singlestoredb-1.12.4.dist-info → singlestoredb-1.13.0.dist-info}/RECORD +30 -26
  27. {singlestoredb-1.12.4.dist-info → singlestoredb-1.13.0.dist-info}/LICENSE +0 -0
  28. {singlestoredb-1.12.4.dist-info → singlestoredb-1.13.0.dist-info}/WHEEL +0 -0
  29. {singlestoredb-1.12.4.dist-info → singlestoredb-1.13.0.dist-info}/entry_points.txt +0 -0
  30. {singlestoredb-1.12.4.dist-info → singlestoredb-1.13.0.dist-info}/top_level.txt +0 -0
_singlestoredb_accel.pyd CHANGED
Binary file
singlestoredb/__init__.py CHANGED
@@ -13,7 +13,7 @@ Examples
13
13
 
14
14
  """
15
15
 
16
- __version__ = '1.12.4'
16
+ __version__ = '1.13.0'
17
17
 
18
18
  from typing import Any
19
19
 
@@ -1,2 +1,3 @@
1
1
  from ._cloud_functions import run_function_app # noqa: F401
2
2
  from ._dashboards import run_dashboard_app # noqa: F401
3
+ from ._python_udfs import run_udf_app # noqa: F401
@@ -8,10 +8,12 @@ class AppConfig:
8
8
  listen_port: int
9
9
  base_url: str
10
10
  base_path: str
11
+ notebook_server_id: str
11
12
  app_token: Optional[str]
12
13
  user_token: Optional[str]
13
14
  running_interactively: bool
14
15
  is_gateway_enabled: bool
16
+ is_local_dev: bool
15
17
 
16
18
  @staticmethod
17
19
  def _read_variable(name: str) -> str:
@@ -28,6 +30,8 @@ class AppConfig:
28
30
  port = cls._read_variable('SINGLESTOREDB_APP_LISTEN_PORT')
29
31
  base_url = cls._read_variable('SINGLESTOREDB_APP_BASE_URL')
30
32
  base_path = cls._read_variable('SINGLESTOREDB_APP_BASE_PATH')
33
+ notebook_server_id = cls._read_variable('SINGLESTOREDB_NOTEBOOK_SERVER_ID')
34
+ is_local_dev_env_var = cls._read_variable('SINGLESTOREDB_IS_LOCAL_DEV')
31
35
 
32
36
  workload_type = os.environ.get('SINGLESTOREDB_WORKLOAD_TYPE')
33
37
  running_interactively = workload_type == 'InteractiveNotebook'
@@ -49,10 +53,12 @@ class AppConfig:
49
53
  listen_port=int(port),
50
54
  base_url=base_url,
51
55
  base_path=base_path,
56
+ notebook_server_id=notebook_server_id,
52
57
  app_token=app_token,
53
58
  user_token=user_token,
54
59
  running_interactively=running_interactively,
55
60
  is_gateway_enabled=is_gateway_enabled,
61
+ is_local_dev=is_local_dev_env_var == 'true',
56
62
  )
57
63
 
58
64
  @property
@@ -1,4 +1,6 @@
1
1
  from dataclasses import dataclass
2
+ from typing import Any
3
+ from typing import Dict
2
4
  from typing import Optional
3
5
 
4
6
 
@@ -8,3 +10,9 @@ class ConnectionInfo:
8
10
 
9
11
  # Only present in interactive mode
10
12
  token: Optional[str]
13
+
14
+
15
+ @dataclass
16
+ class UdfConnectionInfo:
17
+ url: str
18
+ functions: Dict[str, Any]
@@ -0,0 +1,85 @@
1
+ import asyncio
2
+ import os
3
+ import typing
4
+
5
+ from ..functions.ext.asgi import Application
6
+ from ._config import AppConfig
7
+ from ._connection_info import UdfConnectionInfo
8
+ from ._process import kill_process_by_port
9
+
10
+ if typing.TYPE_CHECKING:
11
+ from ._uvicorn_util import AwaitableUvicornServer
12
+
13
+ # Keep track of currently running server
14
+ _running_server: 'typing.Optional[AwaitableUvicornServer]' = None
15
+
16
+
17
+ async def run_udf_app(
18
+ replace_existing: bool,
19
+ log_level: str = 'error',
20
+ kill_existing_app_server: bool = True,
21
+ ) -> UdfConnectionInfo:
22
+ global _running_server
23
+ from ._uvicorn_util import AwaitableUvicornServer
24
+
25
+ try:
26
+ import uvicorn
27
+ except ImportError:
28
+ raise ImportError('package uvicorn is required to run python udfs')
29
+
30
+ app_config = AppConfig.from_env()
31
+
32
+ if kill_existing_app_server:
33
+ # Shutdown the server gracefully if it was started by us.
34
+ # Since the uvicorn server doesn't start a new subprocess
35
+ # killing the process would result in kernel dying.
36
+ if _running_server is not None:
37
+ await _running_server.shutdown()
38
+ _running_server = None
39
+
40
+ # Kill if any other process is occupying the port
41
+ kill_process_by_port(app_config.listen_port)
42
+
43
+ base_url = generate_base_url(app_config)
44
+
45
+ udf_suffix = ''
46
+ if app_config.running_interactively:
47
+ udf_suffix = '_test'
48
+ app = Application(url=base_url, app_mode='managed', name_suffix=udf_suffix)
49
+
50
+ config = uvicorn.Config(
51
+ app,
52
+ host='0.0.0.0',
53
+ port=app_config.listen_port,
54
+ log_level=log_level,
55
+ )
56
+ _running_server = AwaitableUvicornServer(config)
57
+
58
+ # Register the functions
59
+ app.register_functions(replace=replace_existing)
60
+
61
+ asyncio.create_task(_running_server.serve())
62
+ await _running_server.wait_for_startup()
63
+
64
+ print(f'Python UDF registered at {base_url}')
65
+
66
+ return UdfConnectionInfo(base_url, app.get_function_info())
67
+
68
+
69
+ def generate_base_url(app_config: AppConfig) -> str:
70
+ if not app_config.is_gateway_enabled:
71
+ raise RuntimeError('Python UDFs are not available if Nova Gateway is not enabled')
72
+
73
+ if not app_config.running_interactively:
74
+ return app_config.base_url
75
+
76
+ # generate python udf endpoint for interactive notebooks
77
+ gateway_url = os.environ.get('SINGLESTOREDB_NOVA_GATEWAY_ENDPOINT')
78
+ if app_config.is_local_dev:
79
+ gateway_url = os.environ.get('SINGLESTOREDB_NOVA_GATEWAY_DEV_ENDPOINT')
80
+ if gateway_url is None:
81
+ raise RuntimeError(
82
+ 'Missing SINGLESTOREDB_NOVA_GATEWAY_DEV_ENDPOINT environment variable.',
83
+ )
84
+
85
+ return f'{gateway_url}/pythonudfs/{app_config.notebook_server_id}/interactive/'
singlestoredb/config.py CHANGED
@@ -317,7 +317,7 @@ register_option(
317
317
  'external_function.app_mode', 'string',
318
318
  functools.partial(
319
319
  check_str,
320
- valid_values=['remote', 'collocated'],
320
+ valid_values=['remote', 'collocated', 'managed'],
321
321
  ),
322
322
  'remote',
323
323
  'Specifies the mode of operation of the external function application.',
@@ -407,6 +407,18 @@ register_option(
407
407
  environ=['SINGLESTOREDB_EXT_FUNC_LOG_LEVEL'],
408
408
  )
409
409
 
410
+ register_option(
411
+ 'external_function.name_prefix', 'string', check_str, '',
412
+ 'Prefix to add to external function names.',
413
+ environ=['SINGLESTOREDB_EXT_FUNC_NAME_PREFIX'],
414
+ )
415
+
416
+ register_option(
417
+ 'external_function.name_suffix', 'string', check_str, '',
418
+ 'Suffix to add to external function names.',
419
+ environ=['SINGLESTOREDB_EXT_FUNC_NAME_SUFFIX'],
420
+ )
421
+
410
422
  register_option(
411
423
  'external_function.connection', 'string', check_str,
412
424
  os.environ.get('SINGLESTOREDB_URL') or None,
@@ -415,7 +427,7 @@ register_option(
415
427
  )
416
428
 
417
429
  register_option(
418
- 'external_function.host', 'string', check_str, '127.0.0.1',
430
+ 'external_function.host', 'string', check_str, 'localhost',
419
431
  'Specifies the host to bind the server to.',
420
432
  environ=['SINGLESTOREDB_EXT_FUNC_HOST'],
421
433
  )
@@ -1,2 +1,12 @@
1
- from .decorator import tvf # noqa: F401
2
1
  from .decorator import udf # noqa: F401
2
+ from .typing import Masked # noqa: F401
3
+ from .typing import Table # noqa: F401
4
+ from .utils import VectorTypes
5
+
6
+
7
+ F32 = VectorTypes.F32
8
+ F64 = VectorTypes.F64
9
+ I8 = VectorTypes.I8
10
+ I16 = VectorTypes.I16
11
+ I32 = VectorTypes.I32
12
+ I64 = VectorTypes.I64
@@ -1,183 +1,113 @@
1
- import dataclasses
2
- import datetime
3
1
  import functools
4
2
  import inspect
5
3
  from typing import Any
6
4
  from typing import Callable
7
- from typing import Dict
8
5
  from typing import List
9
6
  from typing import Optional
10
- from typing import Tuple
7
+ from typing import Type
11
8
  from typing import Union
12
9
 
13
- from . import dtypes
14
- from .dtypes import DataType
15
- from .signature import simplify_dtype
16
-
17
- try:
18
- import pydantic
19
- has_pydantic = True
20
- except ImportError:
21
- has_pydantic = False
22
-
23
- python_type_map: Dict[Any, Callable[..., str]] = {
24
- str: dtypes.TEXT,
25
- int: dtypes.BIGINT,
26
- float: dtypes.DOUBLE,
27
- bool: dtypes.BOOL,
28
- bytes: dtypes.BINARY,
29
- bytearray: dtypes.BINARY,
30
- datetime.datetime: dtypes.DATETIME,
31
- datetime.date: dtypes.DATE,
32
- datetime.timedelta: dtypes.TIME,
33
- }
34
-
35
-
36
- def listify(x: Any) -> List[Any]:
37
- """Make sure sure value is a list."""
38
- if x is None:
39
- return []
40
- if isinstance(x, (list, tuple, set)):
41
- return list(x)
42
- return [x]
43
-
44
-
45
- def process_annotation(annotation: Any) -> Tuple[Any, bool]:
46
- types = simplify_dtype(annotation)
47
- if isinstance(types, list):
48
- nullable = False
49
- if type(None) in types:
50
- nullable = True
51
- types = [x for x in types if x is not type(None)]
52
- if len(types) > 1:
53
- raise ValueError(f'multiple types not supported: {annotation}')
54
- return types[0], nullable
55
- return types, True
56
-
57
-
58
- def process_types(params: Any) -> Any:
59
- if params is None:
60
- return params, []
61
-
62
- elif isinstance(params, (list, tuple)):
63
- params = list(params)
64
- for i, item in enumerate(params):
65
- if params[i] in python_type_map:
66
- params[i] = python_type_map[params[i]]()
67
- elif callable(item):
68
- params[i] = item()
69
- for item in params:
70
- if not isinstance(item, str):
71
- raise TypeError(f'unrecognized type for parameter: {item}')
72
- return params, []
73
-
74
- elif isinstance(params, dict):
75
- names = []
76
- params = dict(params)
77
- for k, v in list(params.items()):
78
- names.append(k)
79
- if params[k] in python_type_map:
80
- params[k] = python_type_map[params[k]]()
81
- elif callable(v):
82
- params[k] = v()
83
- for item in params.values():
84
- if not isinstance(item, str):
85
- raise TypeError(f'unrecognized type for parameter: {item}')
86
- return params, names
87
-
88
- elif dataclasses.is_dataclass(params):
89
- names = []
90
- out = []
91
- for item in dataclasses.fields(params):
92
- typ, nullable = process_annotation(item.type)
93
- sql_type = process_types(typ)[0]
94
- if not nullable:
95
- sql_type = sql_type.replace('NULL', 'NOT NULL')
96
- out.append(sql_type)
97
- names.append(item.name)
98
- return out, names
99
-
100
- elif has_pydantic and inspect.isclass(params) \
101
- and issubclass(params, pydantic.BaseModel):
102
- names = []
103
- out = []
104
- for name, item in params.model_fields.items():
105
- typ, nullable = process_annotation(item.annotation)
106
- sql_type = process_types(typ)[0]
107
- if not nullable:
108
- sql_type = sql_type.replace('NULL', 'NOT NULL')
109
- out.append(sql_type)
110
- names.append(name)
111
- return out, names
112
-
113
- elif params in python_type_map:
114
- return python_type_map[params](), []
115
-
116
- elif callable(params):
117
- return params(), []
118
-
119
- elif isinstance(params, str):
120
- return params, []
121
-
122
- raise TypeError(f'unrecognized data type for args: {params}')
10
+ from . import utils
11
+ from .dtypes import SQLString
12
+
13
+
14
+ ParameterType = Union[
15
+ str,
16
+ Callable[..., SQLString],
17
+ List[Union[str, Callable[..., SQLString]]],
18
+ Type[Any],
19
+ ]
20
+
21
+ ReturnType = ParameterType
22
+
23
+
24
+ def is_valid_type(obj: Any) -> bool:
25
+ """Check if the object is a valid type for a schema definition."""
26
+ if not inspect.isclass(obj):
27
+ return False
28
+
29
+ if utils.is_typeddict(obj):
30
+ return True
31
+
32
+ if utils.is_namedtuple(obj):
33
+ return True
34
+
35
+ if utils.is_dataclass(obj):
36
+ return True
37
+
38
+ # We don't want to import pydantic here, so we check if
39
+ # the class is a subclass
40
+ if utils.is_pydantic(obj):
41
+ return True
42
+
43
+ return False
44
+
45
+
46
+ def is_valid_callable(obj: Any) -> bool:
47
+ """Check if the object is a valid callable for a parameter type."""
48
+ if not callable(obj):
49
+ return False
50
+
51
+ returns = utils.get_annotations(obj).get('return', None)
52
+
53
+ if inspect.isclass(returns) and issubclass(returns, str):
54
+ return True
55
+
56
+ raise TypeError(
57
+ f'callable {obj} must return a str, '
58
+ f'but got {returns}',
59
+ )
60
+
61
+
62
+ def expand_types(args: Any) -> Optional[Union[List[str], Type[Any]]]:
63
+ """Expand the types for the function arguments / return values."""
64
+ if args is None:
65
+ return None
66
+
67
+ # SQL string
68
+ if isinstance(args, str):
69
+ return [args]
70
+
71
+ # General way of accepting pydantic.BaseModel, NamedTuple, TypedDict
72
+ elif is_valid_type(args):
73
+ return args
74
+
75
+ # List of SQL strings or callables
76
+ elif isinstance(args, list):
77
+ new_args = []
78
+ for arg in args:
79
+ if isinstance(arg, str):
80
+ new_args.append(arg)
81
+ elif callable(arg):
82
+ new_args.append(arg())
83
+ else:
84
+ raise TypeError(f'unrecognized type for parameter: {arg}')
85
+ return new_args
86
+
87
+ # Callable that returns a SQL string
88
+ elif is_valid_callable(args):
89
+ out = args()
90
+ if not isinstance(out, str):
91
+ raise TypeError(f'unrecognized type for parameter: {args}')
92
+ return [out]
93
+
94
+ raise TypeError(f'unrecognized type for parameter: {args}')
123
95
 
124
96
 
125
97
  def _func(
126
98
  func: Optional[Callable[..., Any]] = None,
127
99
  *,
128
100
  name: Optional[str] = None,
129
- args: Optional[
130
- Union[
131
- DataType,
132
- List[DataType],
133
- Dict[str, DataType],
134
- 'pydantic.BaseModel',
135
- type,
136
- ]
137
- ] = None,
138
- returns: Optional[
139
- Union[
140
- str,
141
- List[DataType],
142
- List[type],
143
- 'pydantic.BaseModel',
144
- type,
145
- ]
146
- ] = None,
147
- data_format: Optional[str] = None,
148
- include_masks: bool = False,
149
- function_type: str = 'udf',
150
- output_fields: Optional[List[str]] = None,
101
+ args: Optional[ParameterType] = None,
102
+ returns: Optional[ReturnType] = None,
151
103
  ) -> Callable[..., Any]:
152
104
  """Generic wrapper for UDF and TVF decorators."""
153
- args, _ = process_types(args)
154
- returns, fields = process_types(returns)
155
-
156
- if not output_fields and fields:
157
- output_fields = fields
158
-
159
- if isinstance(returns, list) \
160
- and isinstance(output_fields, list) \
161
- and len(output_fields) != len(returns):
162
- raise ValueError(
163
- 'The number of output fields must match the number of return types',
164
- )
165
-
166
- if include_masks and data_format == 'python':
167
- raise RuntimeError(
168
- 'include_masks is only valid when using '
169
- 'vectors for input parameters',
170
- )
171
105
 
172
106
  _singlestoredb_attrs = { # type: ignore
173
107
  k: v for k, v in dict(
174
108
  name=name,
175
- args=args,
176
- returns=returns,
177
- data_format=data_format,
178
- include_masks=include_masks,
179
- function_type=function_type,
180
- output_fields=output_fields or None,
109
+ args=expand_types(args),
110
+ returns=expand_types(returns),
181
111
  ).items() if v is not None
182
112
  }
183
113
 
@@ -186,10 +116,14 @@ def _func(
186
116
  # in at that time.
187
117
  if func is None:
188
118
  def decorate(func: Callable[..., Any]) -> Callable[..., Any]:
119
+
189
120
  def wrapper(*args: Any, **kwargs: Any) -> Callable[..., Any]:
190
121
  return func(*args, **kwargs) # type: ignore
122
+
191
123
  wrapper._singlestoredb_attrs = _singlestoredb_attrs # type: ignore
124
+
192
125
  return functools.wraps(func)(wrapper)
126
+
193
127
  return decorate
194
128
 
195
129
  def wrapper(*args: Any, **kwargs: Any) -> Callable[..., Any]:
@@ -204,13 +138,11 @@ def udf(
204
138
  func: Optional[Callable[..., Any]] = None,
205
139
  *,
206
140
  name: Optional[str] = None,
207
- args: Optional[Union[DataType, List[DataType], Dict[str, DataType]]] = None,
208
- returns: Optional[Union[str, List[DataType], List[type]]] = None,
209
- data_format: Optional[str] = None,
210
- include_masks: bool = False,
141
+ args: Optional[ParameterType] = None,
142
+ returns: Optional[ReturnType] = None,
211
143
  ) -> Callable[..., Any]:
212
144
  """
213
- Apply attributes to a UDF.
145
+ Define a user-defined function (UDF).
214
146
 
215
147
  Parameters
216
148
  ----------
@@ -218,71 +150,7 @@ def udf(
218
150
  The UDF to apply parameters to
219
151
  name : str, optional
220
152
  The name to use for the UDF in the database
221
- args : str | Callable | List[str | Callable] | Dict[str, str | Callable], optional
222
- Specifies the data types of the function arguments. Typically,
223
- the function data types are derived from the function parameter
224
- annotations. These annotations can be overridden. If the function
225
- takes a single type for all parameters, `args` can be set to a
226
- SQL string describing all parameters. If the function takes more
227
- than one parameter and all of the parameters are being manually
228
- defined, a list of SQL strings may be used (one for each parameter).
229
- A dictionary of SQL strings may be used to specify a parameter type
230
- for a subset of parameters; the keys are the names of the
231
- function parameters. Callables may also be used for datatypes. This
232
- is primarily for using the functions in the ``dtypes`` module that
233
- are associated with SQL types with all default options (e.g., ``dt.FLOAT``).
234
- returns : str, optional
235
- Specifies the return data type of the function. If not specified,
236
- the type annotation from the function is used.
237
- data_format : str, optional
238
- The data format of each parameter: python, pandas, arrow, polars
239
- include_masks : bool, optional
240
- Should boolean masks be included with each input parameter to indicate
241
- which elements are NULL? This is only used when a input parameters are
242
- configured to a vector type (numpy, pandas, polars, arrow).
243
-
244
- Returns
245
- -------
246
- Callable
247
-
248
- """
249
- return _func(
250
- func=func,
251
- name=name,
252
- args=args,
253
- returns=returns,
254
- data_format=data_format,
255
- include_masks=include_masks,
256
- function_type='udf',
257
- )
258
-
259
-
260
- udf.pandas = functools.partial(udf, data_format='pandas') # type: ignore
261
- udf.polars = functools.partial(udf, data_format='polars') # type: ignore
262
- udf.arrow = functools.partial(udf, data_format='arrow') # type: ignore
263
- udf.numpy = functools.partial(udf, data_format='numpy') # type: ignore
264
-
265
-
266
- def tvf(
267
- func: Optional[Callable[..., Any]] = None,
268
- *,
269
- name: Optional[str] = None,
270
- args: Optional[Union[DataType, List[DataType], Dict[str, DataType]]] = None,
271
- returns: Optional[Union[str, List[DataType], List[type]]] = None,
272
- data_format: Optional[str] = None,
273
- include_masks: bool = False,
274
- output_fields: Optional[List[str]] = None,
275
- ) -> Callable[..., Any]:
276
- """
277
- Apply attributes to a TVF.
278
-
279
- Parameters
280
- ----------
281
- func : callable, optional
282
- The TVF to apply parameters to
283
- name : str, optional
284
- The name to use for the TVF in the database
285
- args : str | Callable | List[str | Callable] | Dict[str, str | Callable], optional
153
+ args : str | Type | Callable | List[str | Callable], optional
286
154
  Specifies the data types of the function arguments. Typically,
287
155
  the function data types are derived from the function parameter
288
156
  annotations. These annotations can be overridden. If the function
@@ -295,18 +163,10 @@ def tvf(
295
163
  function parameters. Callables may also be used for datatypes. This
296
164
  is primarily for using the functions in the ``dtypes`` module that
297
165
  are associated with SQL types with all default options (e.g., ``dt.FLOAT``).
298
- returns : str, optional
299
- Specifies the return data type of the function. If not specified,
300
- the type annotation from the function is used.
301
- data_format : str, optional
302
- The data format of each parameter: python, pandas, arrow, polars
303
- include_masks : bool, optional
304
- Should boolean masks be included with each input parameter to indicate
305
- which elements are NULL? This is only used when a input parameters are
306
- configured to a vector type (numpy, pandas, polars, arrow).
307
- output_fields : List[str], optional
308
- The names of the output fields for the TVF. If not specified, the
309
- names are generated.
166
+ returns : str | Type | Callable | List[str | Callable] | Table, optional
167
+ Specifies the return data type of the function. This parameter
168
+ works the same way as `args`. If the function is a table-valued
169
+ function, the return type should be a `Table` object.
310
170
 
311
171
  Returns
312
172
  -------
@@ -318,14 +178,4 @@ def tvf(
318
178
  name=name,
319
179
  args=args,
320
180
  returns=returns,
321
- data_format=data_format,
322
- include_masks=include_masks,
323
- function_type='tvf',
324
- output_fields=output_fields,
325
181
  )
326
-
327
-
328
- tvf.pandas = functools.partial(tvf, data_format='pandas') # type: ignore
329
- tvf.polars = functools.partial(tvf, data_format='polars') # type: ignore
330
- tvf.arrow = functools.partial(tvf, data_format='arrow') # type: ignore
331
- tvf.numpy = functools.partial(tvf, data_format='numpy') # type: ignore