polars-runtime-compat 1.34.0b3__cp39-abi3-macosx_11_0_arm64.whl → 1.34.0b5__cp39-abi3-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of polars-runtime-compat might be problematic. Click here for more details.
- _polars_runtime_compat/_polars_runtime_compat.abi3.so +0 -0
- polars_runtime_compat-1.34.0b5.dist-info/METADATA +35 -0
- polars_runtime_compat-1.34.0b5.dist-info/RECORD +6 -0
- polars/__init__.py +0 -528
- polars/_cpu_check.py +0 -265
- polars/_dependencies.py +0 -355
- polars/_plr.py +0 -99
- polars/_plr.pyi +0 -2496
- polars/_reexport.py +0 -23
- polars/_typing.py +0 -478
- polars/_utils/__init__.py +0 -37
- polars/_utils/async_.py +0 -102
- polars/_utils/cache.py +0 -176
- polars/_utils/cloud.py +0 -40
- polars/_utils/constants.py +0 -29
- polars/_utils/construction/__init__.py +0 -46
- polars/_utils/construction/dataframe.py +0 -1397
- polars/_utils/construction/other.py +0 -72
- polars/_utils/construction/series.py +0 -560
- polars/_utils/construction/utils.py +0 -118
- polars/_utils/convert.py +0 -224
- polars/_utils/deprecation.py +0 -406
- polars/_utils/getitem.py +0 -457
- polars/_utils/logging.py +0 -11
- polars/_utils/nest_asyncio.py +0 -264
- polars/_utils/parquet.py +0 -15
- polars/_utils/parse/__init__.py +0 -12
- polars/_utils/parse/expr.py +0 -242
- polars/_utils/polars_version.py +0 -19
- polars/_utils/pycapsule.py +0 -53
- polars/_utils/scan.py +0 -27
- polars/_utils/serde.py +0 -63
- polars/_utils/slice.py +0 -215
- polars/_utils/udfs.py +0 -1251
- polars/_utils/unstable.py +0 -63
- polars/_utils/various.py +0 -782
- polars/_utils/wrap.py +0 -25
- polars/api.py +0 -370
- polars/catalog/__init__.py +0 -0
- polars/catalog/unity/__init__.py +0 -19
- polars/catalog/unity/client.py +0 -733
- polars/catalog/unity/models.py +0 -152
- polars/config.py +0 -1571
- polars/convert/__init__.py +0 -25
- polars/convert/general.py +0 -1046
- polars/convert/normalize.py +0 -261
- polars/dataframe/__init__.py +0 -5
- polars/dataframe/_html.py +0 -186
- polars/dataframe/frame.py +0 -12582
- polars/dataframe/group_by.py +0 -1067
- polars/dataframe/plotting.py +0 -257
- polars/datatype_expr/__init__.py +0 -5
- polars/datatype_expr/array.py +0 -56
- polars/datatype_expr/datatype_expr.py +0 -304
- polars/datatype_expr/list.py +0 -18
- polars/datatype_expr/struct.py +0 -69
- polars/datatypes/__init__.py +0 -122
- polars/datatypes/_parse.py +0 -195
- polars/datatypes/_utils.py +0 -48
- polars/datatypes/classes.py +0 -1213
- polars/datatypes/constants.py +0 -11
- polars/datatypes/constructor.py +0 -172
- polars/datatypes/convert.py +0 -366
- polars/datatypes/group.py +0 -130
- polars/exceptions.py +0 -230
- polars/expr/__init__.py +0 -7
- polars/expr/array.py +0 -964
- polars/expr/binary.py +0 -346
- polars/expr/categorical.py +0 -306
- polars/expr/datetime.py +0 -2620
- polars/expr/expr.py +0 -11272
- polars/expr/list.py +0 -1408
- polars/expr/meta.py +0 -444
- polars/expr/name.py +0 -321
- polars/expr/string.py +0 -3045
- polars/expr/struct.py +0 -357
- polars/expr/whenthen.py +0 -185
- polars/functions/__init__.py +0 -193
- polars/functions/aggregation/__init__.py +0 -33
- polars/functions/aggregation/horizontal.py +0 -298
- polars/functions/aggregation/vertical.py +0 -341
- polars/functions/as_datatype.py +0 -848
- polars/functions/business.py +0 -138
- polars/functions/col.py +0 -384
- polars/functions/datatype.py +0 -121
- polars/functions/eager.py +0 -524
- polars/functions/escape_regex.py +0 -29
- polars/functions/lazy.py +0 -2751
- polars/functions/len.py +0 -68
- polars/functions/lit.py +0 -210
- polars/functions/random.py +0 -22
- polars/functions/range/__init__.py +0 -19
- polars/functions/range/_utils.py +0 -15
- polars/functions/range/date_range.py +0 -303
- polars/functions/range/datetime_range.py +0 -370
- polars/functions/range/int_range.py +0 -348
- polars/functions/range/linear_space.py +0 -311
- polars/functions/range/time_range.py +0 -287
- polars/functions/repeat.py +0 -301
- polars/functions/whenthen.py +0 -353
- polars/interchange/__init__.py +0 -10
- polars/interchange/buffer.py +0 -77
- polars/interchange/column.py +0 -190
- polars/interchange/dataframe.py +0 -230
- polars/interchange/from_dataframe.py +0 -328
- polars/interchange/protocol.py +0 -303
- polars/interchange/utils.py +0 -170
- polars/io/__init__.py +0 -64
- polars/io/_utils.py +0 -317
- polars/io/avro.py +0 -49
- polars/io/clipboard.py +0 -36
- polars/io/cloud/__init__.py +0 -17
- polars/io/cloud/_utils.py +0 -80
- polars/io/cloud/credential_provider/__init__.py +0 -17
- polars/io/cloud/credential_provider/_builder.py +0 -520
- polars/io/cloud/credential_provider/_providers.py +0 -618
- polars/io/csv/__init__.py +0 -9
- polars/io/csv/_utils.py +0 -38
- polars/io/csv/batched_reader.py +0 -142
- polars/io/csv/functions.py +0 -1495
- polars/io/database/__init__.py +0 -6
- polars/io/database/_arrow_registry.py +0 -70
- polars/io/database/_cursor_proxies.py +0 -147
- polars/io/database/_executor.py +0 -578
- polars/io/database/_inference.py +0 -314
- polars/io/database/_utils.py +0 -144
- polars/io/database/functions.py +0 -516
- polars/io/delta.py +0 -499
- polars/io/iceberg/__init__.py +0 -3
- polars/io/iceberg/_utils.py +0 -697
- polars/io/iceberg/dataset.py +0 -556
- polars/io/iceberg/functions.py +0 -151
- polars/io/ipc/__init__.py +0 -8
- polars/io/ipc/functions.py +0 -514
- polars/io/json/__init__.py +0 -3
- polars/io/json/read.py +0 -101
- polars/io/ndjson.py +0 -332
- polars/io/parquet/__init__.py +0 -17
- polars/io/parquet/field_overwrites.py +0 -140
- polars/io/parquet/functions.py +0 -722
- polars/io/partition.py +0 -491
- polars/io/plugins.py +0 -187
- polars/io/pyarrow_dataset/__init__.py +0 -5
- polars/io/pyarrow_dataset/anonymous_scan.py +0 -109
- polars/io/pyarrow_dataset/functions.py +0 -79
- polars/io/scan_options/__init__.py +0 -5
- polars/io/scan_options/_options.py +0 -59
- polars/io/scan_options/cast_options.py +0 -126
- polars/io/spreadsheet/__init__.py +0 -6
- polars/io/spreadsheet/_utils.py +0 -52
- polars/io/spreadsheet/_write_utils.py +0 -647
- polars/io/spreadsheet/functions.py +0 -1323
- polars/lazyframe/__init__.py +0 -9
- polars/lazyframe/engine_config.py +0 -61
- polars/lazyframe/frame.py +0 -8564
- polars/lazyframe/group_by.py +0 -669
- polars/lazyframe/in_process.py +0 -42
- polars/lazyframe/opt_flags.py +0 -333
- polars/meta/__init__.py +0 -14
- polars/meta/build.py +0 -33
- polars/meta/index_type.py +0 -27
- polars/meta/thread_pool.py +0 -50
- polars/meta/versions.py +0 -120
- polars/ml/__init__.py +0 -0
- polars/ml/torch.py +0 -213
- polars/ml/utilities.py +0 -30
- polars/plugins.py +0 -155
- polars/py.typed +0 -0
- polars/pyproject.toml +0 -103
- polars/schema.py +0 -265
- polars/selectors.py +0 -3117
- polars/series/__init__.py +0 -5
- polars/series/array.py +0 -776
- polars/series/binary.py +0 -254
- polars/series/categorical.py +0 -246
- polars/series/datetime.py +0 -2275
- polars/series/list.py +0 -1087
- polars/series/plotting.py +0 -191
- polars/series/series.py +0 -9197
- polars/series/string.py +0 -2367
- polars/series/struct.py +0 -154
- polars/series/utils.py +0 -191
- polars/sql/__init__.py +0 -7
- polars/sql/context.py +0 -677
- polars/sql/functions.py +0 -139
- polars/string_cache.py +0 -185
- polars/testing/__init__.py +0 -13
- polars/testing/asserts/__init__.py +0 -9
- polars/testing/asserts/frame.py +0 -231
- polars/testing/asserts/series.py +0 -219
- polars/testing/asserts/utils.py +0 -12
- polars/testing/parametric/__init__.py +0 -33
- polars/testing/parametric/profiles.py +0 -107
- polars/testing/parametric/strategies/__init__.py +0 -22
- polars/testing/parametric/strategies/_utils.py +0 -14
- polars/testing/parametric/strategies/core.py +0 -615
- polars/testing/parametric/strategies/data.py +0 -452
- polars/testing/parametric/strategies/dtype.py +0 -436
- polars/testing/parametric/strategies/legacy.py +0 -169
- polars/type_aliases.py +0 -24
- polars_runtime_compat-1.34.0b3.dist-info/METADATA +0 -190
- polars_runtime_compat-1.34.0b3.dist-info/RECORD +0 -203
- {polars_runtime_compat-1.34.0b3.dist-info → polars_runtime_compat-1.34.0b5.dist-info}/WHEEL +0 -0
- {polars_runtime_compat-1.34.0b3.dist-info → polars_runtime_compat-1.34.0b5.dist-info}/licenses/LICENSE +0 -0
polars/io/database/__init__.py
DELETED
|
@@ -1,70 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
from typing import TypedDict
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
class ArrowDriverProperties(TypedDict):
|
|
7
|
-
# name of the method that fetches all arrow data; tuple form
|
|
8
|
-
# calls the fetch_all method with the given chunk size (int)
|
|
9
|
-
fetch_all: str
|
|
10
|
-
# name of the method that fetches arrow data in batches
|
|
11
|
-
fetch_batches: str | None
|
|
12
|
-
# indicate whether the given batch size is respected exactly
|
|
13
|
-
exact_batch_size: bool | None
|
|
14
|
-
# repeat batch calls (if False, the batch call is a generator)
|
|
15
|
-
repeat_batch_calls: bool
|
|
16
|
-
# if arrow/polars functionality requires a minimum module version
|
|
17
|
-
minimum_version: str | None
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
ARROW_DRIVER_REGISTRY: dict[str, ArrowDriverProperties] = {
|
|
21
|
-
"adbc_.*": {
|
|
22
|
-
"fetch_all": "fetch_arrow_table",
|
|
23
|
-
"fetch_batches": None,
|
|
24
|
-
"exact_batch_size": None,
|
|
25
|
-
"repeat_batch_calls": False,
|
|
26
|
-
"minimum_version": None,
|
|
27
|
-
},
|
|
28
|
-
"arrow_odbc_proxy": {
|
|
29
|
-
"fetch_all": "fetch_arrow_table",
|
|
30
|
-
"fetch_batches": "fetch_record_batches",
|
|
31
|
-
"exact_batch_size": True,
|
|
32
|
-
"repeat_batch_calls": False,
|
|
33
|
-
"minimum_version": None,
|
|
34
|
-
},
|
|
35
|
-
"databricks": {
|
|
36
|
-
"fetch_all": "fetchall_arrow",
|
|
37
|
-
"fetch_batches": "fetchmany_arrow",
|
|
38
|
-
"exact_batch_size": True,
|
|
39
|
-
"repeat_batch_calls": True,
|
|
40
|
-
"minimum_version": None,
|
|
41
|
-
},
|
|
42
|
-
"duckdb": {
|
|
43
|
-
"fetch_all": "fetch_arrow_table",
|
|
44
|
-
"fetch_batches": "fetch_record_batch",
|
|
45
|
-
"exact_batch_size": True,
|
|
46
|
-
"repeat_batch_calls": False,
|
|
47
|
-
"minimum_version": None,
|
|
48
|
-
},
|
|
49
|
-
"kuzu": {
|
|
50
|
-
"fetch_all": "get_as_pl",
|
|
51
|
-
"fetch_batches": None,
|
|
52
|
-
"exact_batch_size": None,
|
|
53
|
-
"repeat_batch_calls": False,
|
|
54
|
-
"minimum_version": "0.3.2",
|
|
55
|
-
},
|
|
56
|
-
"snowflake": {
|
|
57
|
-
"fetch_all": "fetch_arrow_all",
|
|
58
|
-
"fetch_batches": "fetch_arrow_batches",
|
|
59
|
-
"exact_batch_size": False,
|
|
60
|
-
"repeat_batch_calls": False,
|
|
61
|
-
"minimum_version": None,
|
|
62
|
-
},
|
|
63
|
-
"turbodbc": {
|
|
64
|
-
"fetch_all": "fetchallarrow",
|
|
65
|
-
"fetch_batches": "fetcharrowbatches",
|
|
66
|
-
"exact_batch_size": False,
|
|
67
|
-
"repeat_batch_calls": False,
|
|
68
|
-
"minimum_version": None,
|
|
69
|
-
},
|
|
70
|
-
}
|
|
@@ -1,147 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
from typing import TYPE_CHECKING, Any
|
|
4
|
-
|
|
5
|
-
from polars._dependencies import import_optional
|
|
6
|
-
from polars.io.database._utils import _run_async
|
|
7
|
-
|
|
8
|
-
if TYPE_CHECKING:
|
|
9
|
-
import sys
|
|
10
|
-
from collections.abc import Coroutine, Iterable
|
|
11
|
-
|
|
12
|
-
import pyarrow as pa
|
|
13
|
-
|
|
14
|
-
if sys.version_info >= (3, 11):
|
|
15
|
-
from typing import Self
|
|
16
|
-
else:
|
|
17
|
-
from typing_extensions import Self
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
class ODBCCursorProxy:
|
|
21
|
-
"""Cursor proxy for ODBC connections (requires `arrow-odbc`)."""
|
|
22
|
-
|
|
23
|
-
def __init__(self, connection_string: str) -> None:
|
|
24
|
-
self.connection_string = connection_string
|
|
25
|
-
self.execute_options: dict[str, Any] = {}
|
|
26
|
-
self.query: str | None = None
|
|
27
|
-
|
|
28
|
-
def close(self) -> None:
|
|
29
|
-
"""Close the cursor."""
|
|
30
|
-
# n/a: nothing to close
|
|
31
|
-
|
|
32
|
-
def execute(self, query: str, **execute_options: Any) -> None:
|
|
33
|
-
"""Execute a query (n/a: just store query for the fetch* methods)."""
|
|
34
|
-
self.execute_options = execute_options
|
|
35
|
-
self.query = query
|
|
36
|
-
|
|
37
|
-
def fetch_arrow_table(
|
|
38
|
-
self, batch_size: int = 10_000, *, fetch_all: bool = False
|
|
39
|
-
) -> pa.Table:
|
|
40
|
-
"""Fetch all results as a pyarrow Table."""
|
|
41
|
-
from pyarrow import Table
|
|
42
|
-
|
|
43
|
-
return Table.from_batches(
|
|
44
|
-
self.fetch_record_batches(batch_size=batch_size, fetch_all=True)
|
|
45
|
-
)
|
|
46
|
-
|
|
47
|
-
def fetch_record_batches(
|
|
48
|
-
self, batch_size: int = 10_000, *, fetch_all: bool = False
|
|
49
|
-
) -> Iterable[pa.RecordBatch]:
|
|
50
|
-
"""Fetch results as an iterable of RecordBatches."""
|
|
51
|
-
from arrow_odbc import read_arrow_batches_from_odbc
|
|
52
|
-
from pyarrow import RecordBatch
|
|
53
|
-
|
|
54
|
-
n_batches = 0
|
|
55
|
-
batch_reader = read_arrow_batches_from_odbc(
|
|
56
|
-
query=self.query,
|
|
57
|
-
batch_size=batch_size,
|
|
58
|
-
connection_string=self.connection_string,
|
|
59
|
-
**self.execute_options,
|
|
60
|
-
)
|
|
61
|
-
for batch in batch_reader:
|
|
62
|
-
yield batch
|
|
63
|
-
n_batches += 1
|
|
64
|
-
|
|
65
|
-
if n_batches == 0 and fetch_all:
|
|
66
|
-
# empty result set; return empty batch with accurate schema
|
|
67
|
-
yield RecordBatch.from_pylist([], schema=batch_reader.schema)
|
|
68
|
-
|
|
69
|
-
# note: internally arrow-odbc always reads batches
|
|
70
|
-
fetchall = fetch_arrow_table
|
|
71
|
-
fetchmany = fetch_record_batches
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
class SurrealDBCursorProxy:
|
|
75
|
-
"""Cursor proxy for both SurrealDB and AsyncSurrealDB connections."""
|
|
76
|
-
|
|
77
|
-
_cached_result: list[dict[str, Any]] | None = None
|
|
78
|
-
|
|
79
|
-
def __init__(self, client: Any) -> None:
|
|
80
|
-
surrealdb = import_optional("surrealdb")
|
|
81
|
-
self.is_async = isinstance(client, surrealdb.AsyncSurrealDB)
|
|
82
|
-
self.execute_options: dict[str, Any] = {}
|
|
83
|
-
self.client = client
|
|
84
|
-
self.query: str = None # type: ignore[assignment]
|
|
85
|
-
|
|
86
|
-
@staticmethod
|
|
87
|
-
async def _unpack_result_async(
|
|
88
|
-
result: Coroutine[Any, Any, list[dict[str, Any]]],
|
|
89
|
-
) -> Coroutine[Any, Any, list[dict[str, Any]]]:
|
|
90
|
-
"""Unpack the async query result."""
|
|
91
|
-
response = (await result)[0]
|
|
92
|
-
if response["status"] != "OK":
|
|
93
|
-
raise RuntimeError(response["result"])
|
|
94
|
-
return response["result"]
|
|
95
|
-
|
|
96
|
-
@staticmethod
|
|
97
|
-
def _unpack_result(
|
|
98
|
-
result: list[dict[str, Any]],
|
|
99
|
-
) -> list[dict[str, Any]]:
|
|
100
|
-
"""Unpack the query result."""
|
|
101
|
-
response = result[0]
|
|
102
|
-
if response["status"] != "OK":
|
|
103
|
-
raise RuntimeError(response["result"])
|
|
104
|
-
return response["result"]
|
|
105
|
-
|
|
106
|
-
def close(self) -> None:
|
|
107
|
-
"""Close the cursor."""
|
|
108
|
-
# no-op; never close a user's Surreal session
|
|
109
|
-
|
|
110
|
-
def execute(self, query: str, **execute_options: Any) -> Self:
|
|
111
|
-
"""Execute a query (n/a: just store query for the fetch* methods)."""
|
|
112
|
-
self._cached_result = None
|
|
113
|
-
self.execute_options = execute_options
|
|
114
|
-
self.query = query
|
|
115
|
-
return self
|
|
116
|
-
|
|
117
|
-
def fetchall(self) -> list[dict[str, Any]]:
|
|
118
|
-
"""Fetch all results (as a list of dictionaries)."""
|
|
119
|
-
return (
|
|
120
|
-
_run_async(
|
|
121
|
-
self._unpack_result_async(
|
|
122
|
-
result=self.client.query(
|
|
123
|
-
query=self.query,
|
|
124
|
-
variables=(self.execute_options or None),
|
|
125
|
-
),
|
|
126
|
-
)
|
|
127
|
-
)
|
|
128
|
-
if self.is_async
|
|
129
|
-
else self._unpack_result(
|
|
130
|
-
result=self.client.query(
|
|
131
|
-
query=self.query,
|
|
132
|
-
variables=(self.execute_options or None),
|
|
133
|
-
),
|
|
134
|
-
)
|
|
135
|
-
)
|
|
136
|
-
|
|
137
|
-
def fetchmany(self, size: int) -> list[dict[str, Any]]:
|
|
138
|
-
"""Fetch results in batches (simulated)."""
|
|
139
|
-
# first 'fetchmany' call acquires/caches the result object
|
|
140
|
-
if self._cached_result is None:
|
|
141
|
-
self._cached_result = self.fetchall()
|
|
142
|
-
|
|
143
|
-
# return batches from the result, actively removing from the cache
|
|
144
|
-
# as we go, so as not to hold on to additional copies when done
|
|
145
|
-
result = self._cached_result[:size]
|
|
146
|
-
del self._cached_result[:size]
|
|
147
|
-
return result
|