duckdb 1.5.0.dev44__cp39-cp39-macosx_11_0_arm64.whl → 1.5.0.dev94__cp39-cp39-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of duckdb might be problematic. Click here for more details.
- _duckdb-stubs/__init__.pyi +1443 -0
- _duckdb-stubs/_func.pyi +46 -0
- _duckdb-stubs/_sqltypes.pyi +75 -0
- _duckdb.cpython-39-darwin.so +0 -0
- adbc_driver_duckdb/__init__.py +49 -0
- adbc_driver_duckdb/dbapi.py +115 -0
- duckdb/__init__.py +341 -435
- duckdb/_dbapi_type_object.py +231 -0
- duckdb/_version.py +22 -0
- duckdb/bytes_io_wrapper.py +12 -9
- duckdb/experimental/__init__.py +2 -1
- duckdb/experimental/spark/__init__.py +3 -4
- duckdb/experimental/spark/_globals.py +8 -8
- duckdb/experimental/spark/_typing.py +7 -9
- duckdb/experimental/spark/conf.py +16 -15
- duckdb/experimental/spark/context.py +60 -44
- duckdb/experimental/spark/errors/__init__.py +33 -35
- duckdb/experimental/spark/errors/error_classes.py +1 -1
- duckdb/experimental/spark/errors/exceptions/__init__.py +1 -1
- duckdb/experimental/spark/errors/exceptions/base.py +39 -88
- duckdb/experimental/spark/errors/utils.py +11 -16
- duckdb/experimental/spark/exception.py +9 -6
- duckdb/experimental/spark/sql/__init__.py +5 -5
- duckdb/experimental/spark/sql/_typing.py +8 -15
- duckdb/experimental/spark/sql/catalog.py +21 -20
- duckdb/experimental/spark/sql/column.py +48 -55
- duckdb/experimental/spark/sql/conf.py +9 -8
- duckdb/experimental/spark/sql/dataframe.py +185 -233
- duckdb/experimental/spark/sql/functions.py +1222 -1248
- duckdb/experimental/spark/sql/group.py +56 -52
- duckdb/experimental/spark/sql/readwriter.py +80 -94
- duckdb/experimental/spark/sql/session.py +64 -59
- duckdb/experimental/spark/sql/streaming.py +9 -10
- duckdb/experimental/spark/sql/type_utils.py +67 -65
- duckdb/experimental/spark/sql/types.py +309 -345
- duckdb/experimental/spark/sql/udf.py +6 -6
- duckdb/filesystem.py +26 -16
- duckdb/func/__init__.py +3 -0
- duckdb/functional/__init__.py +12 -16
- duckdb/polars_io.py +130 -83
- duckdb/query_graph/__main__.py +91 -96
- duckdb/sqltypes/__init__.py +63 -0
- duckdb/typing/__init__.py +18 -8
- duckdb/udf.py +10 -5
- duckdb/value/__init__.py +1 -0
- duckdb/value/constant/__init__.py +62 -60
- {duckdb-1.5.0.dev44.dist-info → duckdb-1.5.0.dev94.dist-info}/METADATA +12 -4
- duckdb-1.5.0.dev94.dist-info/RECORD +52 -0
- duckdb/__init__.pyi +0 -713
- duckdb/functional/__init__.pyi +0 -31
- duckdb/typing/__init__.pyi +0 -36
- duckdb/value/constant/__init__.pyi +0 -115
- duckdb-1.5.0.dev44.dist-info/RECORD +0 -47
- /duckdb/{value/__init__.pyi → py.typed} +0 -0
- {duckdb-1.5.0.dev44.dist-info → duckdb-1.5.0.dev94.dist-info}/WHEEL +0 -0
- {duckdb-1.5.0.dev44.dist-info → duckdb-1.5.0.dev94.dist-info}/licenses/LICENSE +0 -0
duckdb/__init__.pyi
DELETED
|
@@ -1,713 +0,0 @@
|
|
|
1
|
-
# to regenerate this from scratch, run scripts/regenerate_python_stubs.sh .
|
|
2
|
-
# be warned - currently there are still tweaks needed after this file is
|
|
3
|
-
# generated. These should be annotated with a comment like
|
|
4
|
-
# # stubgen override
|
|
5
|
-
# to help the sanity of maintainers.
|
|
6
|
-
|
|
7
|
-
import duckdb.typing as typing
|
|
8
|
-
import duckdb.functional as functional
|
|
9
|
-
from duckdb.typing import DuckDBPyType
|
|
10
|
-
from duckdb.functional import FunctionNullHandling, PythonUDFType
|
|
11
|
-
from duckdb.value.constant import (
|
|
12
|
-
Value,
|
|
13
|
-
NullValue,
|
|
14
|
-
BooleanValue,
|
|
15
|
-
UnsignedBinaryValue,
|
|
16
|
-
UnsignedShortValue,
|
|
17
|
-
UnsignedIntegerValue,
|
|
18
|
-
UnsignedLongValue,
|
|
19
|
-
BinaryValue,
|
|
20
|
-
ShortValue,
|
|
21
|
-
IntegerValue,
|
|
22
|
-
LongValue,
|
|
23
|
-
HugeIntegerValue,
|
|
24
|
-
FloatValue,
|
|
25
|
-
DoubleValue,
|
|
26
|
-
DecimalValue,
|
|
27
|
-
StringValue,
|
|
28
|
-
UUIDValue,
|
|
29
|
-
BitValue,
|
|
30
|
-
BlobValue,
|
|
31
|
-
DateValue,
|
|
32
|
-
IntervalValue,
|
|
33
|
-
TimestampValue,
|
|
34
|
-
TimestampSecondValue,
|
|
35
|
-
TimestampMilisecondValue,
|
|
36
|
-
TimestampNanosecondValue,
|
|
37
|
-
TimestampTimeZoneValue,
|
|
38
|
-
TimeValue,
|
|
39
|
-
TimeTimeZoneValue,
|
|
40
|
-
)
|
|
41
|
-
|
|
42
|
-
# We also run this in python3.7, where this is needed
|
|
43
|
-
from typing_extensions import Literal
|
|
44
|
-
# stubgen override - missing import of Set
|
|
45
|
-
from typing import Any, ClassVar, Set, Optional, Callable
|
|
46
|
-
from io import StringIO, TextIOBase
|
|
47
|
-
from pathlib import Path
|
|
48
|
-
|
|
49
|
-
from typing import overload, Dict, List, Union, Tuple
|
|
50
|
-
import pandas
|
|
51
|
-
# stubgen override - unfortunately we need this for version checks
|
|
52
|
-
import sys
|
|
53
|
-
import fsspec
|
|
54
|
-
import pyarrow.lib
|
|
55
|
-
import polars
|
|
56
|
-
# stubgen override - This should probably not be exposed
|
|
57
|
-
apilevel: str
|
|
58
|
-
comment: token_type
|
|
59
|
-
identifier: token_type
|
|
60
|
-
keyword: token_type
|
|
61
|
-
numeric_const: token_type
|
|
62
|
-
operator: token_type
|
|
63
|
-
paramstyle: str
|
|
64
|
-
string_const: token_type
|
|
65
|
-
threadsafety: int
|
|
66
|
-
__standard_vector_size__: int
|
|
67
|
-
STANDARD: ExplainType
|
|
68
|
-
ANALYZE: ExplainType
|
|
69
|
-
DEFAULT: PythonExceptionHandling
|
|
70
|
-
RETURN_NULL: PythonExceptionHandling
|
|
71
|
-
ROWS: RenderMode
|
|
72
|
-
COLUMNS: RenderMode
|
|
73
|
-
|
|
74
|
-
__version__: str
|
|
75
|
-
|
|
76
|
-
__interactive__: bool
|
|
77
|
-
__jupyter__: bool
|
|
78
|
-
__formatted_python_version__: str
|
|
79
|
-
|
|
80
|
-
class BinderException(ProgrammingError): ...
|
|
81
|
-
|
|
82
|
-
class CatalogException(ProgrammingError): ...
|
|
83
|
-
|
|
84
|
-
class ConnectionException(OperationalError): ...
|
|
85
|
-
|
|
86
|
-
class ConstraintException(IntegrityError): ...
|
|
87
|
-
|
|
88
|
-
class ConversionException(DataError): ...
|
|
89
|
-
|
|
90
|
-
class DataError(Error): ...
|
|
91
|
-
|
|
92
|
-
class ExplainType:
|
|
93
|
-
STANDARD: ExplainType
|
|
94
|
-
ANALYZE: ExplainType
|
|
95
|
-
def __int__(self) -> int: ...
|
|
96
|
-
def __index__(self) -> int: ...
|
|
97
|
-
@property
|
|
98
|
-
def __members__(self) -> Dict[str, ExplainType]: ...
|
|
99
|
-
@property
|
|
100
|
-
def name(self) -> str: ...
|
|
101
|
-
@property
|
|
102
|
-
def value(self) -> int: ...
|
|
103
|
-
|
|
104
|
-
class RenderMode:
|
|
105
|
-
ROWS: RenderMode
|
|
106
|
-
COLUMNS: RenderMode
|
|
107
|
-
def __int__(self) -> int: ...
|
|
108
|
-
def __index__(self) -> int: ...
|
|
109
|
-
@property
|
|
110
|
-
def __members__(self) -> Dict[str, RenderMode]: ...
|
|
111
|
-
@property
|
|
112
|
-
def name(self) -> str: ...
|
|
113
|
-
@property
|
|
114
|
-
def value(self) -> int: ...
|
|
115
|
-
|
|
116
|
-
class PythonExceptionHandling:
|
|
117
|
-
DEFAULT: PythonExceptionHandling
|
|
118
|
-
RETURN_NULL: PythonExceptionHandling
|
|
119
|
-
def __int__(self) -> int: ...
|
|
120
|
-
def __index__(self) -> int: ...
|
|
121
|
-
@property
|
|
122
|
-
def __members__(self) -> Dict[str, PythonExceptionHandling]: ...
|
|
123
|
-
@property
|
|
124
|
-
def name(self) -> str: ...
|
|
125
|
-
@property
|
|
126
|
-
def value(self) -> int: ...
|
|
127
|
-
|
|
128
|
-
class CSVLineTerminator:
|
|
129
|
-
LINE_FEED: CSVLineTerminator
|
|
130
|
-
CARRIAGE_RETURN_LINE_FEED: CSVLineTerminator
|
|
131
|
-
def __int__(self) -> int: ...
|
|
132
|
-
def __index__(self) -> int: ...
|
|
133
|
-
@property
|
|
134
|
-
def __members__(self) -> Dict[str, CSVLineTerminator]: ...
|
|
135
|
-
@property
|
|
136
|
-
def name(self) -> str: ...
|
|
137
|
-
@property
|
|
138
|
-
def value(self) -> int: ...
|
|
139
|
-
|
|
140
|
-
class ExpectedResultType:
|
|
141
|
-
QUERY_RESULT: ExpectedResultType
|
|
142
|
-
CHANGED_ROWS: ExpectedResultType
|
|
143
|
-
NOTHING: ExpectedResultType
|
|
144
|
-
def __int__(self) -> int: ...
|
|
145
|
-
def __index__(self) -> int: ...
|
|
146
|
-
@property
|
|
147
|
-
def __members__(self) -> Dict[str, ExpectedResultType]: ...
|
|
148
|
-
@property
|
|
149
|
-
def name(self) -> str: ...
|
|
150
|
-
@property
|
|
151
|
-
def value(self) -> int: ...
|
|
152
|
-
|
|
153
|
-
class StatementType:
|
|
154
|
-
INVALID: StatementType
|
|
155
|
-
SELECT: StatementType
|
|
156
|
-
INSERT: StatementType
|
|
157
|
-
UPDATE: StatementType
|
|
158
|
-
CREATE: StatementType
|
|
159
|
-
DELETE: StatementType
|
|
160
|
-
PREPARE: StatementType
|
|
161
|
-
EXECUTE: StatementType
|
|
162
|
-
ALTER: StatementType
|
|
163
|
-
TRANSACTION: StatementType
|
|
164
|
-
COPY: StatementType
|
|
165
|
-
ANALYZE: StatementType
|
|
166
|
-
VARIABLE_SET: StatementType
|
|
167
|
-
CREATE_FUNC: StatementType
|
|
168
|
-
EXPLAIN: StatementType
|
|
169
|
-
DROP: StatementType
|
|
170
|
-
EXPORT: StatementType
|
|
171
|
-
PRAGMA: StatementType
|
|
172
|
-
VACUUM: StatementType
|
|
173
|
-
CALL: StatementType
|
|
174
|
-
SET: StatementType
|
|
175
|
-
LOAD: StatementType
|
|
176
|
-
RELATION: StatementType
|
|
177
|
-
EXTENSION: StatementType
|
|
178
|
-
LOGICAL_PLAN: StatementType
|
|
179
|
-
ATTACH: StatementType
|
|
180
|
-
DETACH: StatementType
|
|
181
|
-
MULTI: StatementType
|
|
182
|
-
COPY_DATABASE: StatementType
|
|
183
|
-
MERGE_INTO: StatementType
|
|
184
|
-
def __int__(self) -> int: ...
|
|
185
|
-
def __index__(self) -> int: ...
|
|
186
|
-
@property
|
|
187
|
-
def __members__(self) -> Dict[str, StatementType]: ...
|
|
188
|
-
@property
|
|
189
|
-
def name(self) -> str: ...
|
|
190
|
-
@property
|
|
191
|
-
def value(self) -> int: ...
|
|
192
|
-
|
|
193
|
-
class Statement:
|
|
194
|
-
def __init__(self, *args, **kwargs) -> None: ...
|
|
195
|
-
@property
|
|
196
|
-
def query(self) -> str: ...
|
|
197
|
-
@property
|
|
198
|
-
def named_parameters(self) -> Set[str]: ...
|
|
199
|
-
@property
|
|
200
|
-
def expected_result_type(self) -> List[ExpectedResultType]: ...
|
|
201
|
-
@property
|
|
202
|
-
def type(self) -> StatementType: ...
|
|
203
|
-
|
|
204
|
-
class Expression:
|
|
205
|
-
def __init__(self, *args, **kwargs) -> None: ...
|
|
206
|
-
def __neg__(self) -> "Expression": ...
|
|
207
|
-
|
|
208
|
-
def __add__(self, expr: "Expression") -> "Expression": ...
|
|
209
|
-
def __radd__(self, expr: "Expression") -> "Expression": ...
|
|
210
|
-
|
|
211
|
-
def __sub__(self, expr: "Expression") -> "Expression": ...
|
|
212
|
-
def __rsub__(self, expr: "Expression") -> "Expression": ...
|
|
213
|
-
|
|
214
|
-
def __mul__(self, expr: "Expression") -> "Expression": ...
|
|
215
|
-
def __rmul__(self, expr: "Expression") -> "Expression": ...
|
|
216
|
-
|
|
217
|
-
def __div__(self, expr: "Expression") -> "Expression": ...
|
|
218
|
-
def __rdiv__(self, expr: "Expression") -> "Expression": ...
|
|
219
|
-
|
|
220
|
-
def __truediv__(self, expr: "Expression") -> "Expression": ...
|
|
221
|
-
def __rtruediv__(self, expr: "Expression") -> "Expression": ...
|
|
222
|
-
|
|
223
|
-
def __floordiv__(self, expr: "Expression") -> "Expression": ...
|
|
224
|
-
def __rfloordiv__(self, expr: "Expression") -> "Expression": ...
|
|
225
|
-
|
|
226
|
-
def __mod__(self, expr: "Expression") -> "Expression": ...
|
|
227
|
-
def __rmod__(self, expr: "Expression") -> "Expression": ...
|
|
228
|
-
|
|
229
|
-
def __pow__(self, expr: "Expression") -> "Expression": ...
|
|
230
|
-
def __rpow__(self, expr: "Expression") -> "Expression": ...
|
|
231
|
-
|
|
232
|
-
def __and__(self, expr: "Expression") -> "Expression": ...
|
|
233
|
-
def __rand__(self, expr: "Expression") -> "Expression": ...
|
|
234
|
-
def __or__(self, expr: "Expression") -> "Expression": ...
|
|
235
|
-
def __ror__(self, expr: "Expression") -> "Expression": ...
|
|
236
|
-
def __invert__(self) -> "Expression": ...
|
|
237
|
-
|
|
238
|
-
def __eq__(# type: ignore[override]
|
|
239
|
-
self, expr: "Expression") -> "Expression": ...
|
|
240
|
-
def __ne__(# type: ignore[override]
|
|
241
|
-
self, expr: "Expression") -> "Expression": ...
|
|
242
|
-
def __gt__(self, expr: "Expression") -> "Expression": ...
|
|
243
|
-
def __ge__(self, expr: "Expression") -> "Expression": ...
|
|
244
|
-
def __lt__(self, expr: "Expression") -> "Expression": ...
|
|
245
|
-
def __le__(self, expr: "Expression") -> "Expression": ...
|
|
246
|
-
|
|
247
|
-
def show(self) -> None: ...
|
|
248
|
-
def __repr__(self) -> str: ...
|
|
249
|
-
def get_name(self) -> str: ...
|
|
250
|
-
def alias(self, alias: str) -> "Expression": ...
|
|
251
|
-
def when(self, condition: "Expression", value: "Expression") -> "Expression": ...
|
|
252
|
-
def otherwise(self, value: "Expression") -> "Expression": ...
|
|
253
|
-
def cast(self, type: DuckDBPyType) -> "Expression": ...
|
|
254
|
-
def between(self, lower: "Expression", upper: "Expression") -> "Expression": ...
|
|
255
|
-
def collate(self, collation: str) -> "Expression": ...
|
|
256
|
-
def asc(self) -> "Expression": ...
|
|
257
|
-
def desc(self) -> "Expression": ...
|
|
258
|
-
def nulls_first(self) -> "Expression": ...
|
|
259
|
-
def nulls_last(self) -> "Expression": ...
|
|
260
|
-
def isnull(self) -> "Expression": ...
|
|
261
|
-
def isnotnull(self) -> "Expression": ...
|
|
262
|
-
def isin(self, *cols: "Expression") -> "Expression": ...
|
|
263
|
-
def isnotin(self, *cols: "Expression") -> "Expression": ...
|
|
264
|
-
|
|
265
|
-
def StarExpression(exclude: Optional[List[str]] = None) -> Expression: ...
|
|
266
|
-
def ColumnExpression(column: str) -> Expression: ...
|
|
267
|
-
def DefaultExpression() -> Expression: ...
|
|
268
|
-
def ConstantExpression(val: Any) -> Expression: ...
|
|
269
|
-
def CaseExpression(condition: Expression, value: Expression) -> Expression: ...
|
|
270
|
-
def FunctionExpression(function: str, *cols: Expression) -> Expression: ...
|
|
271
|
-
def CoalesceOperator(*cols: Expression) -> Expression: ...
|
|
272
|
-
def LambdaExpression(lhs: Union[Tuple["Expression", ...], str], rhs: Expression) -> Expression: ...
|
|
273
|
-
def SQLExpression(expr: str) -> Expression: ...
|
|
274
|
-
|
|
275
|
-
class DuckDBPyConnection:
|
|
276
|
-
def __init__(self, *args, **kwargs) -> None: ...
|
|
277
|
-
def __enter__(self) -> DuckDBPyConnection: ...
|
|
278
|
-
def __exit__(self, exc_type: object, exc: object, traceback: object) -> None: ...
|
|
279
|
-
def __del__(self) -> None: ...
|
|
280
|
-
@property
|
|
281
|
-
def description(self) -> Optional[List[Any]]: ...
|
|
282
|
-
@property
|
|
283
|
-
def rowcount(self) -> int: ...
|
|
284
|
-
|
|
285
|
-
# NOTE: this section is generated by tools/pythonpkg/scripts/generate_connection_stubs.py.
|
|
286
|
-
# Do not edit this section manually, your changes will be overwritten!
|
|
287
|
-
|
|
288
|
-
# START OF CONNECTION METHODS
|
|
289
|
-
def cursor(self) -> DuckDBPyConnection: ...
|
|
290
|
-
def register_filesystem(self, filesystem: fsspec.AbstractFileSystem) -> None: ...
|
|
291
|
-
def unregister_filesystem(self, name: str) -> None: ...
|
|
292
|
-
def list_filesystems(self) -> list: ...
|
|
293
|
-
def filesystem_is_registered(self, name: str) -> bool: ...
|
|
294
|
-
def create_function(self, name: str, function: function, parameters: Optional[List[DuckDBPyType]] = None, return_type: Optional[DuckDBPyType] = None, *, type: Optional[PythonUDFType] = PythonUDFType.NATIVE, null_handling: Optional[FunctionNullHandling] = FunctionNullHandling.DEFAULT, exception_handling: Optional[PythonExceptionHandling] = PythonExceptionHandling.DEFAULT, side_effects: bool = False) -> DuckDBPyConnection: ...
|
|
295
|
-
def remove_function(self, name: str) -> DuckDBPyConnection: ...
|
|
296
|
-
def sqltype(self, type_str: str) -> DuckDBPyType: ...
|
|
297
|
-
def dtype(self, type_str: str) -> DuckDBPyType: ...
|
|
298
|
-
def type(self, type_str: str) -> DuckDBPyType: ...
|
|
299
|
-
def array_type(self, type: DuckDBPyType, size: int) -> DuckDBPyType: ...
|
|
300
|
-
def list_type(self, type: DuckDBPyType) -> DuckDBPyType: ...
|
|
301
|
-
def union_type(self, members: DuckDBPyType) -> DuckDBPyType: ...
|
|
302
|
-
def string_type(self, collation: str = "") -> DuckDBPyType: ...
|
|
303
|
-
def enum_type(self, name: str, type: DuckDBPyType, values: List[Any]) -> DuckDBPyType: ...
|
|
304
|
-
def decimal_type(self, width: int, scale: int) -> DuckDBPyType: ...
|
|
305
|
-
def struct_type(self, fields: Union[Dict[str, DuckDBPyType], List[str]]) -> DuckDBPyType: ...
|
|
306
|
-
def row_type(self, fields: Union[Dict[str, DuckDBPyType], List[str]]) -> DuckDBPyType: ...
|
|
307
|
-
def map_type(self, key: DuckDBPyType, value: DuckDBPyType) -> DuckDBPyType: ...
|
|
308
|
-
def duplicate(self) -> DuckDBPyConnection: ...
|
|
309
|
-
def execute(self, query: object, parameters: object = None) -> DuckDBPyConnection: ...
|
|
310
|
-
def executemany(self, query: object, parameters: object = None) -> DuckDBPyConnection: ...
|
|
311
|
-
def close(self) -> None: ...
|
|
312
|
-
def interrupt(self) -> None: ...
|
|
313
|
-
def query_progress(self) -> float: ...
|
|
314
|
-
def fetchone(self) -> Optional[tuple]: ...
|
|
315
|
-
def fetchmany(self, size: int = 1) -> List[Any]: ...
|
|
316
|
-
def fetchall(self) -> List[Any]: ...
|
|
317
|
-
def fetchnumpy(self) -> dict: ...
|
|
318
|
-
def fetchdf(self, *, date_as_object: bool = False) -> pandas.DataFrame: ...
|
|
319
|
-
def fetch_df(self, *, date_as_object: bool = False) -> pandas.DataFrame: ...
|
|
320
|
-
def df(self, *, date_as_object: bool = False) -> pandas.DataFrame: ...
|
|
321
|
-
def fetch_df_chunk(self, vectors_per_chunk: int = 1, *, date_as_object: bool = False) -> pandas.DataFrame: ...
|
|
322
|
-
def pl(self, rows_per_batch: int = 1000000, *, lazy: bool = False) -> polars.DataFrame: ...
|
|
323
|
-
def fetch_arrow_table(self, rows_per_batch: int = 1000000) -> pyarrow.lib.Table: ...
|
|
324
|
-
def fetch_record_batch(self, rows_per_batch: int = 1000000) -> pyarrow.lib.RecordBatchReader: ...
|
|
325
|
-
def arrow(self, rows_per_batch: int = 1000000) -> pyarrow.lib.RecordBatchReader: ...
|
|
326
|
-
def torch(self) -> dict: ...
|
|
327
|
-
def tf(self) -> dict: ...
|
|
328
|
-
def begin(self) -> DuckDBPyConnection: ...
|
|
329
|
-
def commit(self) -> DuckDBPyConnection: ...
|
|
330
|
-
def rollback(self) -> DuckDBPyConnection: ...
|
|
331
|
-
def checkpoint(self) -> DuckDBPyConnection: ...
|
|
332
|
-
def append(self, table_name: str, df: pandas.DataFrame, *, by_name: bool = False) -> DuckDBPyConnection: ...
|
|
333
|
-
def register(self, view_name: str, python_object: object) -> DuckDBPyConnection: ...
|
|
334
|
-
def unregister(self, view_name: str) -> DuckDBPyConnection: ...
|
|
335
|
-
def table(self, table_name: str) -> DuckDBPyRelation: ...
|
|
336
|
-
def view(self, view_name: str) -> DuckDBPyRelation: ...
|
|
337
|
-
def values(self, *args: Union[List[Any],Expression, Tuple[Expression]]) -> DuckDBPyRelation: ...
|
|
338
|
-
def table_function(self, name: str, parameters: object = None) -> DuckDBPyRelation: ...
|
|
339
|
-
def read_json(self, path_or_buffer: Union[str, StringIO, TextIOBase], *, columns: Optional[Dict[str,str]] = None, sample_size: Optional[int] = None, maximum_depth: Optional[int] = None, records: Optional[str] = None, format: Optional[str] = None, date_format: Optional[str] = None, timestamp_format: Optional[str] = None, compression: Optional[str] = None, maximum_object_size: Optional[int] = None, ignore_errors: Optional[bool] = None, convert_strings_to_integers: Optional[bool] = None, field_appearance_threshold: Optional[float] = None, map_inference_threshold: Optional[int] = None, maximum_sample_files: Optional[int] = None, filename: Optional[bool | str] = None, hive_partitioning: Optional[bool] = None, union_by_name: Optional[bool] = None, hive_types: Optional[Dict[str, str]] = None, hive_types_autocast: Optional[bool] = None) -> DuckDBPyRelation: ...
|
|
340
|
-
def extract_statements(self, query: str) -> List[Statement]: ...
|
|
341
|
-
def sql(self, query: str, *, alias: str = "", params: object = None) -> DuckDBPyRelation: ...
|
|
342
|
-
def query(self, query: str, *, alias: str = "", params: object = None) -> DuckDBPyRelation: ...
|
|
343
|
-
def from_query(self, query: str, *, alias: str = "", params: object = None) -> DuckDBPyRelation: ...
|
|
344
|
-
def read_csv(self, path_or_buffer: Union[str, StringIO, TextIOBase], *, header: Optional[bool | int] = None, compression: Optional[str] = None, sep: Optional[str] = None, delimiter: Optional[str] = None, dtype: Optional[Dict[str, str] | List[str]] = None, na_values: Optional[str| List[str]] = None, skiprows: Optional[int] = None, quotechar: Optional[str] = None, escapechar: Optional[str] = None, encoding: Optional[str] = None, parallel: Optional[bool] = None, date_format: Optional[str] = None, timestamp_format: Optional[str] = None, sample_size: Optional[int] = None, all_varchar: Optional[bool] = None, normalize_names: Optional[bool] = None, null_padding: Optional[bool] = None, names: Optional[List[str]] = None, lineterminator: Optional[str] = None, columns: Optional[Dict[str, str]] = None, auto_type_candidates: Optional[List[str]] = None, max_line_size: Optional[int] = None, ignore_errors: Optional[bool] = None, store_rejects: Optional[bool] = None, rejects_table: Optional[str] = None, rejects_scan: Optional[str] = None, rejects_limit: Optional[int] = None, force_not_null: Optional[List[str]] = None, buffer_size: Optional[int] = None, decimal: Optional[str] = None, allow_quoted_nulls: Optional[bool] = None, filename: Optional[bool | str] = None, hive_partitioning: Optional[bool] = None, union_by_name: Optional[bool] = None, hive_types: Optional[Dict[str, str]] = None, hive_types_autocast: Optional[bool] = None) -> DuckDBPyRelation: ...
|
|
345
|
-
def from_csv_auto(self, path_or_buffer: Union[str, StringIO, TextIOBase], *, header: Optional[bool | int] = None, compression: Optional[str] = None, sep: Optional[str] = None, delimiter: Optional[str] = None, dtype: Optional[Dict[str, str] | List[str]] = None, na_values: Optional[str| List[str]] = None, skiprows: Optional[int] = None, quotechar: Optional[str] = None, escapechar: Optional[str] = None, encoding: Optional[str] = None, parallel: Optional[bool] = None, date_format: Optional[str] = None, timestamp_format: Optional[str] = None, sample_size: Optional[int] = None, all_varchar: Optional[bool] = None, normalize_names: Optional[bool] = None, null_padding: Optional[bool] = None, names: Optional[List[str]] = None, lineterminator: Optional[str] = None, columns: Optional[Dict[str, str]] = None, auto_type_candidates: Optional[List[str]] = None, max_line_size: Optional[int] = None, ignore_errors: Optional[bool] = None, store_rejects: Optional[bool] = None, rejects_table: Optional[str] = None, rejects_scan: Optional[str] = None, rejects_limit: Optional[int] = None, force_not_null: Optional[List[str]] = None, buffer_size: Optional[int] = None, decimal: Optional[str] = None, allow_quoted_nulls: Optional[bool] = None, filename: Optional[bool | str] = None, hive_partitioning: Optional[bool] = None, union_by_name: Optional[bool] = None, hive_types: Optional[Dict[str, str]] = None, hive_types_autocast: Optional[bool] = None) -> DuckDBPyRelation: ...
|
|
346
|
-
def from_df(self, df: pandas.DataFrame) -> DuckDBPyRelation: ...
|
|
347
|
-
def from_arrow(self, arrow_object: object) -> DuckDBPyRelation: ...
|
|
348
|
-
def from_parquet(self, file_glob: str, binary_as_string: bool = False, *, file_row_number: bool = False, filename: bool = False, hive_partitioning: bool = False, union_by_name: bool = False, compression: Optional[str] = None) -> DuckDBPyRelation: ...
|
|
349
|
-
def read_parquet(self, file_glob: str, binary_as_string: bool = False, *, file_row_number: bool = False, filename: bool = False, hive_partitioning: bool = False, union_by_name: bool = False, compression: Optional[str] = None) -> DuckDBPyRelation: ...
|
|
350
|
-
def get_table_names(self, query: str, *, qualified: bool = False) -> Set[str]: ...
|
|
351
|
-
def install_extension(self, extension: str, *, force_install: bool = False, repository: Optional[str] = None, repository_url: Optional[str] = None, version: Optional[str] = None) -> None: ...
|
|
352
|
-
def load_extension(self, extension: str) -> None: ...
|
|
353
|
-
# END OF CONNECTION METHODS
|
|
354
|
-
|
|
355
|
-
class DuckDBPyRelation:
|
|
356
|
-
def close(self) -> None: ...
|
|
357
|
-
def __getattr__(self, name: str) -> DuckDBPyRelation: ...
|
|
358
|
-
def __getitem__(self, name: str) -> DuckDBPyRelation: ...
|
|
359
|
-
def __init__(self, *args, **kwargs) -> None: ...
|
|
360
|
-
def __contains__(self, name: str) -> bool: ...
|
|
361
|
-
def aggregate(self, aggr_expr: str, group_expr: str = ...) -> DuckDBPyRelation: ...
|
|
362
|
-
def apply(self, function_name: str, function_aggr: str, group_expr: str = ..., function_parameter: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
363
|
-
|
|
364
|
-
def cume_dist(self, window_spec: str, projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
365
|
-
def dense_rank(self, window_spec: str, projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
366
|
-
def percent_rank(self, window_spec: str, projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
367
|
-
def rank(self, window_spec: str, projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
368
|
-
def rank_dense(self, window_spec: str, projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
369
|
-
def row_number(self, window_spec: str, projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
370
|
-
|
|
371
|
-
def lag(self, column: str, window_spec: str, offset: int, default_value: str, ignore_nulls: bool, projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
372
|
-
def lead(self, column: str, window_spec: str, offset: int, default_value: str, ignore_nulls: bool, projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
373
|
-
def nth_value(self, column: str, window_spec: str, offset: int, ignore_nulls: bool = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
374
|
-
|
|
375
|
-
def value_counts(self, column: str, groups: str = ...) -> DuckDBPyRelation: ...
|
|
376
|
-
def geomean(self, column: str, groups: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
377
|
-
def first(self, column: str, groups: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
378
|
-
def first_value(self, column: str, window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
379
|
-
def last(self, column: str, groups: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
380
|
-
def last_value(self, column: str, window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
381
|
-
def mode(self, aggregation_columns: str, group_columns: str = ...) -> DuckDBPyRelation: ...
|
|
382
|
-
def n_tile(self, window_spec: str, num_buckets: int, projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
383
|
-
def quantile_cont(self, column: str, q: Union[float, List[float]] = ..., groups: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
384
|
-
def quantile_disc(self, column: str, q: Union[float, List[float]] = ..., groups: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
385
|
-
def sum(self, sum_aggr: str, group_expr: str = ...) -> DuckDBPyRelation: ...
|
|
386
|
-
|
|
387
|
-
def any_value(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
388
|
-
def arg_max(self, arg_column: str, value_column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
389
|
-
def arg_min(self, arg_column: str, value_column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
390
|
-
def avg(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
391
|
-
def bit_and(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
392
|
-
def bit_or(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
393
|
-
def bit_xor(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
394
|
-
def bitstring_agg(self, column: str, min: Optional[int], max: Optional[int], groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
395
|
-
def bool_and(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
396
|
-
def bool_or(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
397
|
-
def count(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
398
|
-
def favg(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
399
|
-
def fsum(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
400
|
-
def histogram(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
401
|
-
def max(self, max_aggr: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
402
|
-
def min(self, min_aggr: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
403
|
-
def mean(self, mean_aggr: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
404
|
-
def median(self, median_aggr: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
405
|
-
def product(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
406
|
-
def quantile(self, q: str, quantile_aggr: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
407
|
-
def std(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
408
|
-
def stddev(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
409
|
-
def stddev_pop(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
410
|
-
def stddev_samp(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
411
|
-
def string_agg(self, column: str, sep: str = ..., groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
412
|
-
def var(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
413
|
-
def var_pop(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
414
|
-
def var_samp(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
415
|
-
def variance(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
416
|
-
def list(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
|
|
417
|
-
|
|
418
|
-
def arrow(self, batch_size: int = ...) -> pyarrow.lib.RecordBatchReader: ...
|
|
419
|
-
def __arrow_c_stream__(self, requested_schema: Optional[object] = None) -> object: ...
|
|
420
|
-
def create(self, table_name: str) -> None: ...
|
|
421
|
-
def create_view(self, view_name: str, replace: bool = ...) -> DuckDBPyRelation: ...
|
|
422
|
-
def describe(self) -> DuckDBPyRelation: ...
|
|
423
|
-
def df(self, *args, **kwargs) -> pandas.DataFrame: ...
|
|
424
|
-
def distinct(self) -> DuckDBPyRelation: ...
|
|
425
|
-
def except_(self, other_rel: DuckDBPyRelation) -> DuckDBPyRelation: ...
|
|
426
|
-
def execute(self, *args, **kwargs) -> DuckDBPyRelation: ...
|
|
427
|
-
def explain(self, type: Optional[Literal['standard', 'analyze'] | int] = 'standard') -> str: ...
|
|
428
|
-
def fetchall(self) -> List[Any]: ...
|
|
429
|
-
def fetchmany(self, size: int = ...) -> List[Any]: ...
|
|
430
|
-
def fetchnumpy(self) -> dict: ...
|
|
431
|
-
def fetchone(self) -> Optional[tuple]: ...
|
|
432
|
-
def fetchdf(self, *args, **kwargs) -> Any: ...
|
|
433
|
-
def fetch_arrow_reader(self, batch_size: int = ...) -> pyarrow.lib.RecordBatchReader: ...
|
|
434
|
-
def fetch_arrow_table(self, rows_per_batch: int = ...) -> pyarrow.lib.Table: ...
|
|
435
|
-
def filter(self, filter_expr: Union[Expression, str]) -> DuckDBPyRelation: ...
|
|
436
|
-
def insert(self, values: List[Any]) -> None: ...
|
|
437
|
-
def update(self, set: Dict[str, Expression], condition: Optional[Expression] = None) -> None: ...
|
|
438
|
-
def insert_into(self, table_name: str) -> None: ...
|
|
439
|
-
def intersect(self, other_rel: DuckDBPyRelation) -> DuckDBPyRelation: ...
|
|
440
|
-
def join(self, other_rel: DuckDBPyRelation, condition: Union[str, Expression], how: str = ...) -> DuckDBPyRelation: ...
|
|
441
|
-
def cross(self, other_rel: DuckDBPyRelation) -> DuckDBPyRelation: ...
|
|
442
|
-
def limit(self, n: int, offset: int = ...) -> DuckDBPyRelation: ...
|
|
443
|
-
def map(self, map_function: function, schema: Optional[Dict[str, DuckDBPyType]] = None) -> DuckDBPyRelation: ...
|
|
444
|
-
def order(self, order_expr: str) -> DuckDBPyRelation: ...
|
|
445
|
-
def sort(self, *cols: Expression) -> DuckDBPyRelation: ...
|
|
446
|
-
def project(self, *cols: Union[str, Expression]) -> DuckDBPyRelation: ...
|
|
447
|
-
def select(self, *cols: Union[str, Expression]) -> DuckDBPyRelation: ...
|
|
448
|
-
def pl(self, rows_per_batch: int = ..., connection: DuckDBPyConnection = ...) -> polars.DataFrame: ...
|
|
449
|
-
def query(self, virtual_table_name: str, sql_query: str) -> DuckDBPyRelation: ...
|
|
450
|
-
def record_batch(self, batch_size: int = ...) -> pyarrow.lib.RecordBatchReader: ...
|
|
451
|
-
def fetch_record_batch(self, rows_per_batch: int = 1000000, *, connection: DuckDBPyConnection = ...) -> pyarrow.lib.RecordBatchReader: ...
|
|
452
|
-
def select_types(self, types: List[Union[str, DuckDBPyType]]) -> DuckDBPyRelation: ...
|
|
453
|
-
def select_dtypes(self, types: List[Union[str, DuckDBPyType]]) -> DuckDBPyRelation: ...
|
|
454
|
-
def set_alias(self, alias: str) -> DuckDBPyRelation: ...
|
|
455
|
-
def show(self, max_width: Optional[int] = None, max_rows: Optional[int] = None, max_col_width: Optional[int] = None, null_value: Optional[str] = None, render_mode: Optional[RenderMode] = None) -> None: ...
|
|
456
|
-
def sql_query(self) -> str: ...
|
|
457
|
-
def to_arrow_table(self, batch_size: int = ...) -> pyarrow.lib.Table: ...
|
|
458
|
-
def to_csv(
|
|
459
|
-
self,
|
|
460
|
-
file_name: str,
|
|
461
|
-
sep: Optional[str] = None,
|
|
462
|
-
na_rep: Optional[str] = None,
|
|
463
|
-
header: Optional[bool] = None,
|
|
464
|
-
quotechar: Optional[str] = None,
|
|
465
|
-
escapechar: Optional[str] = None,
|
|
466
|
-
date_format: Optional[str] = None,
|
|
467
|
-
timestamp_format: Optional[str] = None,
|
|
468
|
-
quoting: Optional[str | int] = None,
|
|
469
|
-
encoding: Optional[str] = None,
|
|
470
|
-
compression: Optional[str] = None,
|
|
471
|
-
write_partition_columns: Optional[bool] = None,
|
|
472
|
-
overwrite: Optional[bool] = None,
|
|
473
|
-
per_thread_output: Optional[bool] = None,
|
|
474
|
-
use_tmp_file: Optional[bool] = None,
|
|
475
|
-
partition_by: Optional[List[str]] = None
|
|
476
|
-
) -> None: ...
|
|
477
|
-
def to_df(self, *args, **kwargs) -> pandas.DataFrame: ...
|
|
478
|
-
def to_parquet(
|
|
479
|
-
self,
|
|
480
|
-
file_name: str,
|
|
481
|
-
compression: Optional[str] = None,
|
|
482
|
-
field_ids: Optional[dict | str] = None,
|
|
483
|
-
row_group_size_bytes: Optional[int | str] = None,
|
|
484
|
-
row_group_size: Optional[int] = None,
|
|
485
|
-
partition_by: Optional[List[str]] = None,
|
|
486
|
-
write_partition_columns: Optional[bool] = None,
|
|
487
|
-
overwrite: Optional[bool] = None,
|
|
488
|
-
per_thread_output: Optional[bool] = None,
|
|
489
|
-
use_tmp_file: Optional[bool] = None,
|
|
490
|
-
append: Optional[bool] = None
|
|
491
|
-
) -> None: ...
|
|
492
|
-
def fetch_df_chunk(self, vectors_per_chunk: int = 1, *, date_as_object: bool = False) -> pandas.DataFrame: ...
|
|
493
|
-
def to_table(self, table_name: str) -> None: ...
|
|
494
|
-
def to_view(self, view_name: str, replace: bool = ...) -> DuckDBPyRelation: ...
|
|
495
|
-
def torch(self, connection: DuckDBPyConnection = ...) -> dict: ...
|
|
496
|
-
def tf(self, connection: DuckDBPyConnection = ...) -> dict: ...
|
|
497
|
-
def union(self, union_rel: DuckDBPyRelation) -> DuckDBPyRelation: ...
|
|
498
|
-
def unique(self, unique_aggr: str) -> DuckDBPyRelation: ...
|
|
499
|
-
def write_csv(
|
|
500
|
-
self,
|
|
501
|
-
file_name: str,
|
|
502
|
-
sep: Optional[str] = None,
|
|
503
|
-
na_rep: Optional[str] = None,
|
|
504
|
-
header: Optional[bool] = None,
|
|
505
|
-
quotechar: Optional[str] = None,
|
|
506
|
-
escapechar: Optional[str] = None,
|
|
507
|
-
date_format: Optional[str] = None,
|
|
508
|
-
timestamp_format: Optional[str] = None,
|
|
509
|
-
quoting: Optional[str | int] = None,
|
|
510
|
-
encoding: Optional[str] = None,
|
|
511
|
-
compression: Optional[str] = None,
|
|
512
|
-
write_partition_columns: Optional[bool] = None,
|
|
513
|
-
overwrite: Optional[bool] = None,
|
|
514
|
-
per_thread_output: Optional[bool] = None,
|
|
515
|
-
use_tmp_file: Optional[bool] = None,
|
|
516
|
-
partition_by: Optional[List[str]] = None
|
|
517
|
-
) -> None: ...
|
|
518
|
-
def write_parquet(
|
|
519
|
-
self,
|
|
520
|
-
file_name: str,
|
|
521
|
-
compression: Optional[str] = None,
|
|
522
|
-
field_ids: Optional[dict | str] = None,
|
|
523
|
-
row_group_size_bytes: Optional[int | str] = None,
|
|
524
|
-
row_group_size: Optional[int] = None,
|
|
525
|
-
partition_by: Optional[List[str]] = None,
|
|
526
|
-
write_partition_columns: Optional[bool] = None,
|
|
527
|
-
overwrite: Optional[bool] = None,
|
|
528
|
-
per_thread_output: Optional[bool] = None,
|
|
529
|
-
use_tmp_file: Optional[bool] = None,
|
|
530
|
-
append: Optional[bool] = None
|
|
531
|
-
) -> None: ...
|
|
532
|
-
def __len__(self) -> int: ...
|
|
533
|
-
@property
|
|
534
|
-
def alias(self) -> str: ...
|
|
535
|
-
@property
|
|
536
|
-
def columns(self) -> List[str]: ...
|
|
537
|
-
@property
|
|
538
|
-
def dtypes(self) -> List[DuckDBPyType]: ...
|
|
539
|
-
@property
|
|
540
|
-
def description(self) -> List[Any]: ...
|
|
541
|
-
@property
|
|
542
|
-
def shape(self) -> tuple[int, int]: ...
|
|
543
|
-
@property
|
|
544
|
-
def type(self) -> str: ...
|
|
545
|
-
@property
|
|
546
|
-
def types(self) -> List[DuckDBPyType]: ...
|
|
547
|
-
|
|
548
|
-
class Error(Exception): ...
|
|
549
|
-
|
|
550
|
-
class FatalException(Error): ...
|
|
551
|
-
|
|
552
|
-
class HTTPException(IOException):
|
|
553
|
-
status_code: int
|
|
554
|
-
body: str
|
|
555
|
-
reason: str
|
|
556
|
-
headers: Dict[str, str]
|
|
557
|
-
|
|
558
|
-
class IOException(OperationalError): ...
|
|
559
|
-
|
|
560
|
-
class IntegrityError(Error): ...
|
|
561
|
-
|
|
562
|
-
class InternalError(Error): ...
|
|
563
|
-
|
|
564
|
-
class InternalException(InternalError): ...
|
|
565
|
-
|
|
566
|
-
class InterruptException(Error): ...
|
|
567
|
-
|
|
568
|
-
class InvalidInputException(ProgrammingError): ...
|
|
569
|
-
|
|
570
|
-
class InvalidTypeException(ProgrammingError): ...
|
|
571
|
-
|
|
572
|
-
class NotImplementedException(NotSupportedError): ...
|
|
573
|
-
|
|
574
|
-
class NotSupportedError(Error): ...
|
|
575
|
-
|
|
576
|
-
class OperationalError(Error): ...
|
|
577
|
-
|
|
578
|
-
class OutOfMemoryException(OperationalError): ...
|
|
579
|
-
|
|
580
|
-
class OutOfRangeException(DataError): ...
|
|
581
|
-
|
|
582
|
-
class ParserException(ProgrammingError): ...
|
|
583
|
-
|
|
584
|
-
class PermissionException(Error): ...
|
|
585
|
-
|
|
586
|
-
class ProgrammingError(Error): ...
|
|
587
|
-
|
|
588
|
-
class SequenceException(Error): ...
|
|
589
|
-
|
|
590
|
-
class SerializationException(OperationalError): ...
|
|
591
|
-
|
|
592
|
-
class SyntaxException(ProgrammingError): ...
|
|
593
|
-
|
|
594
|
-
class TransactionException(OperationalError): ...
|
|
595
|
-
|
|
596
|
-
class TypeMismatchException(DataError): ...
|
|
597
|
-
|
|
598
|
-
class Warning(Exception): ...
|
|
599
|
-
|
|
600
|
-
class token_type:
|
|
601
|
-
# stubgen override - these make mypy sad
|
|
602
|
-
#__doc__: ClassVar[str] = ... # read-only
|
|
603
|
-
#__members__: ClassVar[dict] = ... # read-only
|
|
604
|
-
__entries: ClassVar[dict] = ...
|
|
605
|
-
comment: ClassVar[token_type] = ...
|
|
606
|
-
identifier: ClassVar[token_type] = ...
|
|
607
|
-
keyword: ClassVar[token_type] = ...
|
|
608
|
-
numeric_const: ClassVar[token_type] = ...
|
|
609
|
-
operator: ClassVar[token_type] = ...
|
|
610
|
-
string_const: ClassVar[token_type] = ...
|
|
611
|
-
def __init__(self, value: int) -> None: ...
|
|
612
|
-
def __eq__(self, other: object) -> bool: ...
|
|
613
|
-
def __getstate__(self) -> int: ...
|
|
614
|
-
def __hash__(self) -> int: ...
|
|
615
|
-
# stubgen override - pybind only puts index in python >= 3.8: https://github.com/EricCousineau-TRI/pybind11/blob/54430436/include/pybind11/pybind11.h#L1789
|
|
616
|
-
if sys.version_info >= (3, 7):
|
|
617
|
-
def __index__(self) -> int: ...
|
|
618
|
-
def __int__(self) -> int: ...
|
|
619
|
-
def __ne__(self, other: object) -> bool: ...
|
|
620
|
-
def __setstate__(self, state: int) -> None: ...
|
|
621
|
-
@property
|
|
622
|
-
def name(self) -> str: ...
|
|
623
|
-
@property
|
|
624
|
-
def value(self) -> int: ...
|
|
625
|
-
@property
|
|
626
|
-
# stubgen override - this gets removed by stubgen but it shouldn't
|
|
627
|
-
def __members__(self) -> object: ...
|
|
628
|
-
|
|
629
|
-
def connect(database: Union[str, Path] = ..., read_only: bool = ..., config: dict = ...) -> DuckDBPyConnection: ...
|
|
630
|
-
def default_connection() -> DuckDBPyConnection: ...
|
|
631
|
-
def set_default_connection(connection: DuckDBPyConnection) -> None: ...
|
|
632
|
-
def tokenize(query: str) -> List[Any]: ...
|
|
633
|
-
|
|
634
|
-
# NOTE: this section is generated by tools/pythonpkg/scripts/generate_connection_wrapper_stubs.py.
|
|
635
|
-
# Do not edit this section manually, your changes will be overwritten!
|
|
636
|
-
|
|
637
|
-
# START OF CONNECTION WRAPPER
|
|
638
|
-
def cursor(*, connection: DuckDBPyConnection = ...) -> DuckDBPyConnection: ...
|
|
639
|
-
def register_filesystem(filesystem: fsspec.AbstractFileSystem, *, connection: DuckDBPyConnection = ...) -> None: ...
|
|
640
|
-
def unregister_filesystem(name: str, *, connection: DuckDBPyConnection = ...) -> None: ...
|
|
641
|
-
def list_filesystems(*, connection: DuckDBPyConnection = ...) -> list: ...
|
|
642
|
-
def filesystem_is_registered(name: str, *, connection: DuckDBPyConnection = ...) -> bool: ...
|
|
643
|
-
def create_function(name: str, function: function, parameters: Optional[List[DuckDBPyType]] = None, return_type: Optional[DuckDBPyType] = None, *, type: Optional[PythonUDFType] = PythonUDFType.NATIVE, null_handling: Optional[FunctionNullHandling] = FunctionNullHandling.DEFAULT, exception_handling: Optional[PythonExceptionHandling] = PythonExceptionHandling.DEFAULT, side_effects: bool = False, connection: DuckDBPyConnection = ...) -> DuckDBPyConnection: ...
|
|
644
|
-
def remove_function(name: str, *, connection: DuckDBPyConnection = ...) -> DuckDBPyConnection: ...
|
|
645
|
-
def sqltype(type_str: str, *, connection: DuckDBPyConnection = ...) -> DuckDBPyType: ...
|
|
646
|
-
def dtype(type_str: str, *, connection: DuckDBPyConnection = ...) -> DuckDBPyType: ...
|
|
647
|
-
def type(type_str: str, *, connection: DuckDBPyConnection = ...) -> DuckDBPyType: ...
|
|
648
|
-
def array_type(type: DuckDBPyType, size: int, *, connection: DuckDBPyConnection = ...) -> DuckDBPyType: ...
|
|
649
|
-
def list_type(type: DuckDBPyType, *, connection: DuckDBPyConnection = ...) -> DuckDBPyType: ...
|
|
650
|
-
def union_type(members: DuckDBPyType, *, connection: DuckDBPyConnection = ...) -> DuckDBPyType: ...
|
|
651
|
-
def string_type(collation: str = "", *, connection: DuckDBPyConnection = ...) -> DuckDBPyType: ...
|
|
652
|
-
def enum_type(name: str, type: DuckDBPyType, values: List[Any], *, connection: DuckDBPyConnection = ...) -> DuckDBPyType: ...
|
|
653
|
-
def decimal_type(width: int, scale: int, *, connection: DuckDBPyConnection = ...) -> DuckDBPyType: ...
|
|
654
|
-
def struct_type(fields: Union[Dict[str, DuckDBPyType], List[str]], *, connection: DuckDBPyConnection = ...) -> DuckDBPyType: ...
|
|
655
|
-
def row_type(fields: Union[Dict[str, DuckDBPyType], List[str]], *, connection: DuckDBPyConnection = ...) -> DuckDBPyType: ...
|
|
656
|
-
def map_type(key: DuckDBPyType, value: DuckDBPyType, *, connection: DuckDBPyConnection = ...) -> DuckDBPyType: ...
|
|
657
|
-
def duplicate(*, connection: DuckDBPyConnection = ...) -> DuckDBPyConnection: ...
|
|
658
|
-
def execute(query: object, parameters: object = None, *, connection: DuckDBPyConnection = ...) -> DuckDBPyConnection: ...
|
|
659
|
-
def executemany(query: object, parameters: object = None, *, connection: DuckDBPyConnection = ...) -> DuckDBPyConnection: ...
|
|
660
|
-
def close(*, connection: DuckDBPyConnection = ...) -> None: ...
|
|
661
|
-
def interrupt(*, connection: DuckDBPyConnection = ...) -> None: ...
|
|
662
|
-
def query_progress(*, connection: DuckDBPyConnection = ...) -> float: ...
|
|
663
|
-
def fetchone(*, connection: DuckDBPyConnection = ...) -> Optional[tuple]: ...
|
|
664
|
-
def fetchmany(size: int = 1, *, connection: DuckDBPyConnection = ...) -> List[Any]: ...
|
|
665
|
-
def fetchall(*, connection: DuckDBPyConnection = ...) -> List[Any]: ...
|
|
666
|
-
def fetchnumpy(*, connection: DuckDBPyConnection = ...) -> dict: ...
|
|
667
|
-
def fetchdf(*, date_as_object: bool = False, connection: DuckDBPyConnection = ...) -> pandas.DataFrame: ...
|
|
668
|
-
def fetch_df(*, date_as_object: bool = False, connection: DuckDBPyConnection = ...) -> pandas.DataFrame: ...
|
|
669
|
-
def df(*, date_as_object: bool = False, connection: DuckDBPyConnection = ...) -> pandas.DataFrame: ...
|
|
670
|
-
def fetch_df_chunk(vectors_per_chunk: int = 1, *, date_as_object: bool = False, connection: DuckDBPyConnection = ...) -> pandas.DataFrame: ...
|
|
671
|
-
def pl(rows_per_batch: int = 1000000, *, lazy: bool = False, connection: DuckDBPyConnection = ...) -> polars.DataFrame: ...
|
|
672
|
-
def fetch_arrow_table(rows_per_batch: int = 1000000, *, connection: DuckDBPyConnection = ...) -> pyarrow.lib.Table: ...
|
|
673
|
-
def fetch_record_batch(rows_per_batch: int = 1000000, *, connection: DuckDBPyConnection = ...) -> pyarrow.lib.RecordBatchReader: ...
|
|
674
|
-
def arrow(rows_per_batch: int = 1000000, *, connection: DuckDBPyConnection = ...) -> pyarrow.lib.RecordBatchReader: ...
|
|
675
|
-
def torch(*, connection: DuckDBPyConnection = ...) -> dict: ...
|
|
676
|
-
def tf(*, connection: DuckDBPyConnection = ...) -> dict: ...
|
|
677
|
-
def begin(*, connection: DuckDBPyConnection = ...) -> DuckDBPyConnection: ...
|
|
678
|
-
def commit(*, connection: DuckDBPyConnection = ...) -> DuckDBPyConnection: ...
|
|
679
|
-
def rollback(*, connection: DuckDBPyConnection = ...) -> DuckDBPyConnection: ...
|
|
680
|
-
def checkpoint(*, connection: DuckDBPyConnection = ...) -> DuckDBPyConnection: ...
|
|
681
|
-
def append(table_name: str, df: pandas.DataFrame, *, by_name: bool = False, connection: DuckDBPyConnection = ...) -> DuckDBPyConnection: ...
|
|
682
|
-
def register(view_name: str, python_object: object, *, connection: DuckDBPyConnection = ...) -> DuckDBPyConnection: ...
|
|
683
|
-
def unregister(view_name: str, *, connection: DuckDBPyConnection = ...) -> DuckDBPyConnection: ...
|
|
684
|
-
def table(table_name: str, *, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
685
|
-
def view(view_name: str, *, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
686
|
-
def values(*args: Union[List[Any],Expression, Tuple[Expression]], connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
687
|
-
def table_function(name: str, parameters: object = None, *, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
688
|
-
def read_json(path_or_buffer: Union[str, StringIO, TextIOBase], *, columns: Optional[Dict[str,str]] = None, sample_size: Optional[int] = None, maximum_depth: Optional[int] = None, records: Optional[str] = None, format: Optional[str] = None, date_format: Optional[str] = None, timestamp_format: Optional[str] = None, compression: Optional[str] = None, maximum_object_size: Optional[int] = None, ignore_errors: Optional[bool] = None, convert_strings_to_integers: Optional[bool] = None, field_appearance_threshold: Optional[float] = None, map_inference_threshold: Optional[int] = None, maximum_sample_files: Optional[int] = None, filename: Optional[bool | str] = None, hive_partitioning: Optional[bool] = None, union_by_name: Optional[bool] = None, hive_types: Optional[Dict[str, str]] = None, hive_types_autocast: Optional[bool] = None, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
689
|
-
def extract_statements(query: str, *, connection: DuckDBPyConnection = ...) -> List[Statement]: ...
|
|
690
|
-
def sql(query: str, *, alias: str = "", params: object = None, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
691
|
-
def query(query: str, *, alias: str = "", params: object = None, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
692
|
-
def from_query(query: str, *, alias: str = "", params: object = None, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
693
|
-
def read_csv(path_or_buffer: Union[str, StringIO, TextIOBase], *, header: Optional[bool | int] = None, compression: Optional[str] = None, sep: Optional[str] = None, delimiter: Optional[str] = None, dtype: Optional[Dict[str, str] | List[str]] = None, na_values: Optional[str| List[str]] = None, skiprows: Optional[int] = None, quotechar: Optional[str] = None, escapechar: Optional[str] = None, encoding: Optional[str] = None, parallel: Optional[bool] = None, date_format: Optional[str] = None, timestamp_format: Optional[str] = None, sample_size: Optional[int] = None, all_varchar: Optional[bool] = None, normalize_names: Optional[bool] = None, null_padding: Optional[bool] = None, names: Optional[List[str]] = None, lineterminator: Optional[str] = None, columns: Optional[Dict[str, str]] = None, auto_type_candidates: Optional[List[str]] = None, max_line_size: Optional[int] = None, ignore_errors: Optional[bool] = None, store_rejects: Optional[bool] = None, rejects_table: Optional[str] = None, rejects_scan: Optional[str] = None, rejects_limit: Optional[int] = None, force_not_null: Optional[List[str]] = None, buffer_size: Optional[int] = None, decimal: Optional[str] = None, allow_quoted_nulls: Optional[bool] = None, filename: Optional[bool | str] = None, hive_partitioning: Optional[bool] = None, union_by_name: Optional[bool] = None, hive_types: Optional[Dict[str, str]] = None, hive_types_autocast: Optional[bool] = None, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
694
|
-
def from_csv_auto(path_or_buffer: Union[str, StringIO, TextIOBase], *, header: Optional[bool | int] = None, compression: Optional[str] = None, sep: Optional[str] = None, delimiter: Optional[str] = None, dtype: Optional[Dict[str, str] | List[str]] = None, na_values: Optional[str| List[str]] = None, skiprows: Optional[int] = None, quotechar: Optional[str] = None, escapechar: Optional[str] = None, encoding: Optional[str] = None, parallel: Optional[bool] = None, date_format: Optional[str] = None, timestamp_format: Optional[str] = None, sample_size: Optional[int] = None, all_varchar: Optional[bool] = None, normalize_names: Optional[bool] = None, null_padding: Optional[bool] = None, names: Optional[List[str]] = None, lineterminator: Optional[str] = None, columns: Optional[Dict[str, str]] = None, auto_type_candidates: Optional[List[str]] = None, max_line_size: Optional[int] = None, ignore_errors: Optional[bool] = None, store_rejects: Optional[bool] = None, rejects_table: Optional[str] = None, rejects_scan: Optional[str] = None, rejects_limit: Optional[int] = None, force_not_null: Optional[List[str]] = None, buffer_size: Optional[int] = None, decimal: Optional[str] = None, allow_quoted_nulls: Optional[bool] = None, filename: Optional[bool | str] = None, hive_partitioning: Optional[bool] = None, union_by_name: Optional[bool] = None, hive_types: Optional[Dict[str, str]] = None, hive_types_autocast: Optional[bool] = None, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
695
|
-
def from_df(df: pandas.DataFrame, *, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
696
|
-
def from_arrow(arrow_object: object, *, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
697
|
-
def from_parquet(file_glob: str, binary_as_string: bool = False, *, file_row_number: bool = False, filename: bool = False, hive_partitioning: bool = False, union_by_name: bool = False, compression: Optional[str] = None, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
698
|
-
def read_parquet(file_glob: str, binary_as_string: bool = False, *, file_row_number: bool = False, filename: bool = False, hive_partitioning: bool = False, union_by_name: bool = False, compression: Optional[str] = None, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
699
|
-
def get_table_names(query: str, *, qualified: bool = False, connection: DuckDBPyConnection = ...) -> Set[str]: ...
|
|
700
|
-
def install_extension(extension: str, *, force_install: bool = False, repository: Optional[str] = None, repository_url: Optional[str] = None, version: Optional[str] = None, connection: DuckDBPyConnection = ...) -> None: ...
|
|
701
|
-
def load_extension(extension: str, *, connection: DuckDBPyConnection = ...) -> None: ...
|
|
702
|
-
def project(df: pandas.DataFrame, *args: str, groups: str = "", connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
703
|
-
def distinct(df: pandas.DataFrame, *, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
704
|
-
def write_csv(df: pandas.DataFrame, filename: str, *, sep: Optional[str] = None, na_rep: Optional[str] = None, header: Optional[bool] = None, quotechar: Optional[str] = None, escapechar: Optional[str] = None, date_format: Optional[str] = None, timestamp_format: Optional[str] = None, quoting: Optional[str | int] = None, encoding: Optional[str] = None, compression: Optional[str] = None, overwrite: Optional[bool] = None, per_thread_output: Optional[bool] = None, use_tmp_file: Optional[bool] = None, partition_by: Optional[List[str]] = None, write_partition_columns: Optional[bool] = None, connection: DuckDBPyConnection = ...) -> None: ...
|
|
705
|
-
def aggregate(df: pandas.DataFrame, aggr_expr: str | List[Expression], group_expr: str = "", *, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
706
|
-
def alias(df: pandas.DataFrame, alias: str, *, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
707
|
-
def filter(df: pandas.DataFrame, filter_expr: str, *, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
708
|
-
def limit(df: pandas.DataFrame, n: int, offset: int = 0, *, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
709
|
-
def order(df: pandas.DataFrame, order_expr: str, *, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
710
|
-
def query_df(df: pandas.DataFrame, virtual_table_name: str, sql_query: str, *, connection: DuckDBPyConnection = ...) -> DuckDBPyRelation: ...
|
|
711
|
-
def description(*, connection: DuckDBPyConnection = ...) -> Optional[List[Any]]: ...
|
|
712
|
-
def rowcount(*, connection: DuckDBPyConnection = ...) -> int: ...
|
|
713
|
-
# END OF CONNECTION WRAPPER
|