polars-runtime-compat 1.34.0b2__cp39-abi3-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of polars-runtime-compat might be problematic. Click here for more details.
- _polars_runtime_compat/.gitkeep +0 -0
- _polars_runtime_compat/_polars_runtime_compat.abi3.so +0 -0
- polars/__init__.py +528 -0
- polars/_cpu_check.py +265 -0
- polars/_dependencies.py +355 -0
- polars/_plr.py +99 -0
- polars/_plr.pyi +2496 -0
- polars/_reexport.py +23 -0
- polars/_typing.py +478 -0
- polars/_utils/__init__.py +37 -0
- polars/_utils/async_.py +102 -0
- polars/_utils/cache.py +176 -0
- polars/_utils/cloud.py +40 -0
- polars/_utils/constants.py +29 -0
- polars/_utils/construction/__init__.py +46 -0
- polars/_utils/construction/dataframe.py +1397 -0
- polars/_utils/construction/other.py +72 -0
- polars/_utils/construction/series.py +560 -0
- polars/_utils/construction/utils.py +118 -0
- polars/_utils/convert.py +224 -0
- polars/_utils/deprecation.py +406 -0
- polars/_utils/getitem.py +457 -0
- polars/_utils/logging.py +11 -0
- polars/_utils/nest_asyncio.py +264 -0
- polars/_utils/parquet.py +15 -0
- polars/_utils/parse/__init__.py +12 -0
- polars/_utils/parse/expr.py +242 -0
- polars/_utils/polars_version.py +19 -0
- polars/_utils/pycapsule.py +53 -0
- polars/_utils/scan.py +27 -0
- polars/_utils/serde.py +63 -0
- polars/_utils/slice.py +215 -0
- polars/_utils/udfs.py +1251 -0
- polars/_utils/unstable.py +63 -0
- polars/_utils/various.py +782 -0
- polars/_utils/wrap.py +25 -0
- polars/api.py +370 -0
- polars/catalog/__init__.py +0 -0
- polars/catalog/unity/__init__.py +19 -0
- polars/catalog/unity/client.py +733 -0
- polars/catalog/unity/models.py +152 -0
- polars/config.py +1571 -0
- polars/convert/__init__.py +25 -0
- polars/convert/general.py +1046 -0
- polars/convert/normalize.py +261 -0
- polars/dataframe/__init__.py +5 -0
- polars/dataframe/_html.py +186 -0
- polars/dataframe/frame.py +12582 -0
- polars/dataframe/group_by.py +1067 -0
- polars/dataframe/plotting.py +257 -0
- polars/datatype_expr/__init__.py +5 -0
- polars/datatype_expr/array.py +56 -0
- polars/datatype_expr/datatype_expr.py +304 -0
- polars/datatype_expr/list.py +18 -0
- polars/datatype_expr/struct.py +69 -0
- polars/datatypes/__init__.py +122 -0
- polars/datatypes/_parse.py +195 -0
- polars/datatypes/_utils.py +48 -0
- polars/datatypes/classes.py +1213 -0
- polars/datatypes/constants.py +11 -0
- polars/datatypes/constructor.py +172 -0
- polars/datatypes/convert.py +366 -0
- polars/datatypes/group.py +130 -0
- polars/exceptions.py +230 -0
- polars/expr/__init__.py +7 -0
- polars/expr/array.py +964 -0
- polars/expr/binary.py +346 -0
- polars/expr/categorical.py +306 -0
- polars/expr/datetime.py +2620 -0
- polars/expr/expr.py +11272 -0
- polars/expr/list.py +1408 -0
- polars/expr/meta.py +444 -0
- polars/expr/name.py +321 -0
- polars/expr/string.py +3045 -0
- polars/expr/struct.py +357 -0
- polars/expr/whenthen.py +185 -0
- polars/functions/__init__.py +193 -0
- polars/functions/aggregation/__init__.py +33 -0
- polars/functions/aggregation/horizontal.py +298 -0
- polars/functions/aggregation/vertical.py +341 -0
- polars/functions/as_datatype.py +848 -0
- polars/functions/business.py +138 -0
- polars/functions/col.py +384 -0
- polars/functions/datatype.py +121 -0
- polars/functions/eager.py +524 -0
- polars/functions/escape_regex.py +29 -0
- polars/functions/lazy.py +2751 -0
- polars/functions/len.py +68 -0
- polars/functions/lit.py +210 -0
- polars/functions/random.py +22 -0
- polars/functions/range/__init__.py +19 -0
- polars/functions/range/_utils.py +15 -0
- polars/functions/range/date_range.py +303 -0
- polars/functions/range/datetime_range.py +370 -0
- polars/functions/range/int_range.py +348 -0
- polars/functions/range/linear_space.py +311 -0
- polars/functions/range/time_range.py +287 -0
- polars/functions/repeat.py +301 -0
- polars/functions/whenthen.py +353 -0
- polars/interchange/__init__.py +10 -0
- polars/interchange/buffer.py +77 -0
- polars/interchange/column.py +190 -0
- polars/interchange/dataframe.py +230 -0
- polars/interchange/from_dataframe.py +328 -0
- polars/interchange/protocol.py +303 -0
- polars/interchange/utils.py +170 -0
- polars/io/__init__.py +64 -0
- polars/io/_utils.py +317 -0
- polars/io/avro.py +49 -0
- polars/io/clipboard.py +36 -0
- polars/io/cloud/__init__.py +17 -0
- polars/io/cloud/_utils.py +80 -0
- polars/io/cloud/credential_provider/__init__.py +17 -0
- polars/io/cloud/credential_provider/_builder.py +520 -0
- polars/io/cloud/credential_provider/_providers.py +618 -0
- polars/io/csv/__init__.py +9 -0
- polars/io/csv/_utils.py +38 -0
- polars/io/csv/batched_reader.py +142 -0
- polars/io/csv/functions.py +1495 -0
- polars/io/database/__init__.py +6 -0
- polars/io/database/_arrow_registry.py +70 -0
- polars/io/database/_cursor_proxies.py +147 -0
- polars/io/database/_executor.py +578 -0
- polars/io/database/_inference.py +314 -0
- polars/io/database/_utils.py +144 -0
- polars/io/database/functions.py +516 -0
- polars/io/delta.py +499 -0
- polars/io/iceberg/__init__.py +3 -0
- polars/io/iceberg/_utils.py +697 -0
- polars/io/iceberg/dataset.py +556 -0
- polars/io/iceberg/functions.py +151 -0
- polars/io/ipc/__init__.py +8 -0
- polars/io/ipc/functions.py +514 -0
- polars/io/json/__init__.py +3 -0
- polars/io/json/read.py +101 -0
- polars/io/ndjson.py +332 -0
- polars/io/parquet/__init__.py +17 -0
- polars/io/parquet/field_overwrites.py +140 -0
- polars/io/parquet/functions.py +722 -0
- polars/io/partition.py +491 -0
- polars/io/plugins.py +187 -0
- polars/io/pyarrow_dataset/__init__.py +5 -0
- polars/io/pyarrow_dataset/anonymous_scan.py +109 -0
- polars/io/pyarrow_dataset/functions.py +79 -0
- polars/io/scan_options/__init__.py +5 -0
- polars/io/scan_options/_options.py +59 -0
- polars/io/scan_options/cast_options.py +126 -0
- polars/io/spreadsheet/__init__.py +6 -0
- polars/io/spreadsheet/_utils.py +52 -0
- polars/io/spreadsheet/_write_utils.py +647 -0
- polars/io/spreadsheet/functions.py +1323 -0
- polars/lazyframe/__init__.py +9 -0
- polars/lazyframe/engine_config.py +61 -0
- polars/lazyframe/frame.py +8564 -0
- polars/lazyframe/group_by.py +669 -0
- polars/lazyframe/in_process.py +42 -0
- polars/lazyframe/opt_flags.py +333 -0
- polars/meta/__init__.py +14 -0
- polars/meta/build.py +33 -0
- polars/meta/index_type.py +27 -0
- polars/meta/thread_pool.py +50 -0
- polars/meta/versions.py +120 -0
- polars/ml/__init__.py +0 -0
- polars/ml/torch.py +213 -0
- polars/ml/utilities.py +30 -0
- polars/plugins.py +155 -0
- polars/py.typed +0 -0
- polars/pyproject.toml +96 -0
- polars/schema.py +265 -0
- polars/selectors.py +3117 -0
- polars/series/__init__.py +5 -0
- polars/series/array.py +776 -0
- polars/series/binary.py +254 -0
- polars/series/categorical.py +246 -0
- polars/series/datetime.py +2275 -0
- polars/series/list.py +1087 -0
- polars/series/plotting.py +191 -0
- polars/series/series.py +9197 -0
- polars/series/string.py +2367 -0
- polars/series/struct.py +154 -0
- polars/series/utils.py +191 -0
- polars/sql/__init__.py +7 -0
- polars/sql/context.py +677 -0
- polars/sql/functions.py +139 -0
- polars/string_cache.py +185 -0
- polars/testing/__init__.py +13 -0
- polars/testing/asserts/__init__.py +9 -0
- polars/testing/asserts/frame.py +231 -0
- polars/testing/asserts/series.py +219 -0
- polars/testing/asserts/utils.py +12 -0
- polars/testing/parametric/__init__.py +33 -0
- polars/testing/parametric/profiles.py +107 -0
- polars/testing/parametric/strategies/__init__.py +22 -0
- polars/testing/parametric/strategies/_utils.py +14 -0
- polars/testing/parametric/strategies/core.py +615 -0
- polars/testing/parametric/strategies/data.py +452 -0
- polars/testing/parametric/strategies/dtype.py +436 -0
- polars/testing/parametric/strategies/legacy.py +169 -0
- polars/type_aliases.py +24 -0
- polars_runtime_compat-1.34.0b2.dist-info/METADATA +190 -0
- polars_runtime_compat-1.34.0b2.dist-info/RECORD +203 -0
- polars_runtime_compat-1.34.0b2.dist-info/WHEEL +4 -0
- polars_runtime_compat-1.34.0b2.dist-info/licenses/LICENSE +20 -0
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
from polars._utils.wrap import wrap_df
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from polars import DataFrame
|
|
9
|
+
from polars._plr import PyInProcessQuery
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class InProcessQuery:
|
|
13
|
+
"""
|
|
14
|
+
A placeholder for an in process query.
|
|
15
|
+
|
|
16
|
+
This can be used to do something else while a query is running.
|
|
17
|
+
The queries can be cancelled. You can peek if the query is finished,
|
|
18
|
+
or you can await the result.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def __init__(self, ipq: PyInProcessQuery) -> None:
|
|
22
|
+
self._inner = ipq
|
|
23
|
+
|
|
24
|
+
def cancel(self) -> None:
|
|
25
|
+
"""Cancel the query at earliest convenience."""
|
|
26
|
+
self._inner.cancel()
|
|
27
|
+
|
|
28
|
+
def fetch(self) -> DataFrame | None:
|
|
29
|
+
"""
|
|
30
|
+
Fetch the result.
|
|
31
|
+
|
|
32
|
+
If it is ready, a materialized DataFrame is returned.
|
|
33
|
+
If it is not ready it will return `None`.
|
|
34
|
+
"""
|
|
35
|
+
if (out := self._inner.fetch()) is not None:
|
|
36
|
+
return wrap_df(out)
|
|
37
|
+
else:
|
|
38
|
+
return None
|
|
39
|
+
|
|
40
|
+
def fetch_blocking(self) -> DataFrame:
|
|
41
|
+
"""Await the result synchronously."""
|
|
42
|
+
return wrap_df(self._inner.fetch_blocking())
|
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import contextlib
|
|
4
|
+
|
|
5
|
+
from polars._utils.deprecation import issue_deprecation_warning
|
|
6
|
+
|
|
7
|
+
with contextlib.suppress(ImportError): # Module not available when building docs
|
|
8
|
+
from polars._plr import PyOptFlags
|
|
9
|
+
|
|
10
|
+
import inspect
|
|
11
|
+
from functools import wraps
|
|
12
|
+
from typing import TYPE_CHECKING, Callable, TypeVar
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
import sys
|
|
16
|
+
|
|
17
|
+
if sys.version_info >= (3, 10):
|
|
18
|
+
from typing import ParamSpec
|
|
19
|
+
else:
|
|
20
|
+
from typing_extensions import ParamSpec
|
|
21
|
+
|
|
22
|
+
P = ParamSpec("P")
|
|
23
|
+
T = TypeVar("T")
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class QueryOptFlags:
|
|
27
|
+
"""
|
|
28
|
+
The set of the optimizations considered during query optimization.
|
|
29
|
+
|
|
30
|
+
.. warning::
|
|
31
|
+
This functionality is considered **unstable**. It may be changed
|
|
32
|
+
at any point without it being considered a breaking change.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
def __init__(
|
|
36
|
+
self,
|
|
37
|
+
*,
|
|
38
|
+
predicate_pushdown: None | bool = None,
|
|
39
|
+
projection_pushdown: None | bool = None,
|
|
40
|
+
simplify_expression: None | bool = None,
|
|
41
|
+
slice_pushdown: None | bool = None,
|
|
42
|
+
comm_subplan_elim: None | bool = None,
|
|
43
|
+
comm_subexpr_elim: None | bool = None,
|
|
44
|
+
cluster_with_columns: None | bool = None,
|
|
45
|
+
collapse_joins: None | bool = None,
|
|
46
|
+
check_order_observe: None | bool = None,
|
|
47
|
+
fast_projection: None | bool = None,
|
|
48
|
+
) -> None:
|
|
49
|
+
self._pyoptflags = PyOptFlags.default()
|
|
50
|
+
self.update(
|
|
51
|
+
predicate_pushdown=predicate_pushdown,
|
|
52
|
+
projection_pushdown=projection_pushdown,
|
|
53
|
+
simplify_expression=simplify_expression,
|
|
54
|
+
slice_pushdown=slice_pushdown,
|
|
55
|
+
comm_subplan_elim=comm_subplan_elim,
|
|
56
|
+
comm_subexpr_elim=comm_subexpr_elim,
|
|
57
|
+
cluster_with_columns=cluster_with_columns,
|
|
58
|
+
collapse_joins=collapse_joins,
|
|
59
|
+
check_order_observe=check_order_observe,
|
|
60
|
+
fast_projection=fast_projection,
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
@classmethod
|
|
64
|
+
def _from_pyoptflags(self, pyoptflags: PyOptFlags) -> QueryOptFlags:
|
|
65
|
+
optflags = self.__new__(self)
|
|
66
|
+
optflags._pyoptflags = pyoptflags
|
|
67
|
+
return optflags
|
|
68
|
+
|
|
69
|
+
@staticmethod
|
|
70
|
+
def none(
|
|
71
|
+
*,
|
|
72
|
+
predicate_pushdown: None | bool = None,
|
|
73
|
+
projection_pushdown: None | bool = None,
|
|
74
|
+
simplify_expression: None | bool = None,
|
|
75
|
+
slice_pushdown: None | bool = None,
|
|
76
|
+
comm_subplan_elim: None | bool = None,
|
|
77
|
+
comm_subexpr_elim: None | bool = None,
|
|
78
|
+
cluster_with_columns: None | bool = None,
|
|
79
|
+
collapse_joins: None | bool = None,
|
|
80
|
+
check_order_observe: None | bool = None,
|
|
81
|
+
fast_projection: None | bool = None,
|
|
82
|
+
) -> QueryOptFlags:
|
|
83
|
+
"""Create new empty set off optimizations."""
|
|
84
|
+
optflags = QueryOptFlags()
|
|
85
|
+
optflags.no_optimizations()
|
|
86
|
+
return optflags.update(
|
|
87
|
+
predicate_pushdown=predicate_pushdown,
|
|
88
|
+
projection_pushdown=projection_pushdown,
|
|
89
|
+
simplify_expression=simplify_expression,
|
|
90
|
+
slice_pushdown=slice_pushdown,
|
|
91
|
+
comm_subplan_elim=comm_subplan_elim,
|
|
92
|
+
comm_subexpr_elim=comm_subexpr_elim,
|
|
93
|
+
cluster_with_columns=cluster_with_columns,
|
|
94
|
+
collapse_joins=collapse_joins,
|
|
95
|
+
check_order_observe=check_order_observe,
|
|
96
|
+
fast_projection=fast_projection,
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
def update(
|
|
100
|
+
self,
|
|
101
|
+
*,
|
|
102
|
+
predicate_pushdown: None | bool = None,
|
|
103
|
+
projection_pushdown: None | bool = None,
|
|
104
|
+
simplify_expression: None | bool = None,
|
|
105
|
+
slice_pushdown: None | bool = None,
|
|
106
|
+
comm_subplan_elim: None | bool = None,
|
|
107
|
+
comm_subexpr_elim: None | bool = None,
|
|
108
|
+
cluster_with_columns: None | bool = None,
|
|
109
|
+
collapse_joins: None | bool = None,
|
|
110
|
+
check_order_observe: None | bool = None,
|
|
111
|
+
fast_projection: None | bool = None,
|
|
112
|
+
) -> QueryOptFlags:
|
|
113
|
+
"""Update the current optimization flags."""
|
|
114
|
+
if predicate_pushdown is not None:
|
|
115
|
+
self.predicate_pushdown = predicate_pushdown
|
|
116
|
+
if projection_pushdown is not None:
|
|
117
|
+
self.projection_pushdown = projection_pushdown
|
|
118
|
+
if simplify_expression is not None:
|
|
119
|
+
self.simplify_expression = simplify_expression
|
|
120
|
+
if slice_pushdown is not None:
|
|
121
|
+
self.slice_pushdown = slice_pushdown
|
|
122
|
+
if comm_subplan_elim is not None:
|
|
123
|
+
self.comm_subplan_elim = comm_subplan_elim
|
|
124
|
+
if comm_subexpr_elim is not None:
|
|
125
|
+
self.comm_subexpr_elim = comm_subexpr_elim
|
|
126
|
+
if cluster_with_columns is not None:
|
|
127
|
+
self.cluster_with_columns = cluster_with_columns
|
|
128
|
+
if collapse_joins is not None:
|
|
129
|
+
issue_deprecation_warning(
|
|
130
|
+
"the `collapse_joins` parameter for `QueryOptFlags` is deprecated. "
|
|
131
|
+
"Use `predicate_pushdown` instead.",
|
|
132
|
+
version="1.33.1",
|
|
133
|
+
)
|
|
134
|
+
if not collapse_joins:
|
|
135
|
+
self.predicate_pushdown = False
|
|
136
|
+
if check_order_observe is not None:
|
|
137
|
+
self.check_order_observe = check_order_observe
|
|
138
|
+
if fast_projection is not None:
|
|
139
|
+
self.fast_projection = fast_projection
|
|
140
|
+
|
|
141
|
+
return self
|
|
142
|
+
|
|
143
|
+
@staticmethod
|
|
144
|
+
def _eager() -> QueryOptFlags:
|
|
145
|
+
"""Create new empty set off optimizations."""
|
|
146
|
+
optflags = QueryOptFlags()
|
|
147
|
+
optflags.no_optimizations()
|
|
148
|
+
optflags._pyoptflags.eager = True
|
|
149
|
+
optflags.simplify_expression = True
|
|
150
|
+
return optflags
|
|
151
|
+
|
|
152
|
+
def __copy__(self) -> QueryOptFlags:
|
|
153
|
+
return QueryOptFlags._from_pyoptflags(self._pyoptflags.copy())
|
|
154
|
+
|
|
155
|
+
def __deepcopy__(self) -> QueryOptFlags:
|
|
156
|
+
return QueryOptFlags._from_pyoptflags(self._pyoptflags.copy())
|
|
157
|
+
|
|
158
|
+
def no_optimizations(self) -> None:
|
|
159
|
+
"""Remove selected optimizations."""
|
|
160
|
+
self._pyoptflags.no_optimizations()
|
|
161
|
+
|
|
162
|
+
@property
|
|
163
|
+
def projection_pushdown(self) -> bool:
|
|
164
|
+
"""Only read columns that are used later in the query."""
|
|
165
|
+
return self._pyoptflags.projection_pushdown
|
|
166
|
+
|
|
167
|
+
@projection_pushdown.setter
|
|
168
|
+
def projection_pushdown(self, value: bool) -> None:
|
|
169
|
+
self._pyoptflags.projection_pushdown = value
|
|
170
|
+
|
|
171
|
+
@property
|
|
172
|
+
def predicate_pushdown(self) -> bool:
|
|
173
|
+
"""Apply predicates/filters as early as possible."""
|
|
174
|
+
return self._pyoptflags.predicate_pushdown
|
|
175
|
+
|
|
176
|
+
@predicate_pushdown.setter
|
|
177
|
+
def predicate_pushdown(self, value: bool) -> None:
|
|
178
|
+
self._pyoptflags.predicate_pushdown = value
|
|
179
|
+
|
|
180
|
+
@property
|
|
181
|
+
def cluster_with_columns(self) -> bool:
|
|
182
|
+
"""Cluster sequential `with_columns` calls to independent calls."""
|
|
183
|
+
return self._pyoptflags.cluster_with_columns
|
|
184
|
+
|
|
185
|
+
@cluster_with_columns.setter
|
|
186
|
+
def cluster_with_columns(self, value: bool) -> None:
|
|
187
|
+
self._pyoptflags.cluster_with_columns = value
|
|
188
|
+
|
|
189
|
+
@property
|
|
190
|
+
def simplify_expression(self) -> bool:
|
|
191
|
+
"""Run many expression optimization rules until fixed point."""
|
|
192
|
+
return self._pyoptflags.simplify_expression
|
|
193
|
+
|
|
194
|
+
@simplify_expression.setter
|
|
195
|
+
def simplify_expression(self, value: bool) -> None:
|
|
196
|
+
self._pyoptflags.simplify_expression = value
|
|
197
|
+
|
|
198
|
+
@property
|
|
199
|
+
def slice_pushdown(self) -> bool:
|
|
200
|
+
"""Pushdown slices/limits."""
|
|
201
|
+
return self._pyoptflags.slice_pushdown
|
|
202
|
+
|
|
203
|
+
@slice_pushdown.setter
|
|
204
|
+
def slice_pushdown(self, value: bool) -> None:
|
|
205
|
+
self._pyoptflags.slice_pushdown = value
|
|
206
|
+
|
|
207
|
+
@property
|
|
208
|
+
def comm_subplan_elim(self) -> bool:
|
|
209
|
+
"""Elide duplicate plans and caches their outputs."""
|
|
210
|
+
return self._pyoptflags.comm_subplan_elim
|
|
211
|
+
|
|
212
|
+
@comm_subplan_elim.setter
|
|
213
|
+
def comm_subplan_elim(self, value: bool) -> None:
|
|
214
|
+
self._pyoptflags.comm_subplan_elim = value
|
|
215
|
+
|
|
216
|
+
@property
|
|
217
|
+
def comm_subexpr_elim(self) -> bool:
|
|
218
|
+
"""Elide duplicate expressions and caches their outputs."""
|
|
219
|
+
return self._pyoptflags.comm_subexpr_elim
|
|
220
|
+
|
|
221
|
+
@comm_subexpr_elim.setter
|
|
222
|
+
def comm_subexpr_elim(self, value: bool) -> None:
|
|
223
|
+
self._pyoptflags.comm_subexpr_elim = value
|
|
224
|
+
|
|
225
|
+
@property
|
|
226
|
+
def check_order_observe(self) -> bool:
|
|
227
|
+
"""Do not maintain order if the order would not be observed."""
|
|
228
|
+
return self._pyoptflags.check_order_observe
|
|
229
|
+
|
|
230
|
+
@check_order_observe.setter
|
|
231
|
+
def check_order_observe(self, value: bool) -> None:
|
|
232
|
+
self._pyoptflags.check_order_observe = value
|
|
233
|
+
|
|
234
|
+
@property
|
|
235
|
+
def fast_projection(self) -> bool:
|
|
236
|
+
"""Replace simple projections with a faster inlined projection that skips the expression engine.""" # noqa: W505
|
|
237
|
+
return self._pyoptflags.fast_projection
|
|
238
|
+
|
|
239
|
+
@fast_projection.setter
|
|
240
|
+
def fast_projection(self, value: bool) -> None:
|
|
241
|
+
self._pyoptflags.fast_projection = value
|
|
242
|
+
|
|
243
|
+
def __str__(self) -> str:
|
|
244
|
+
return f"""
|
|
245
|
+
QueryOptFlags {{
|
|
246
|
+
type_coercion: {self._pyoptflags.type_coercion}
|
|
247
|
+
type_check: {self._pyoptflags.type_check}
|
|
248
|
+
|
|
249
|
+
predicate_pushdown: {self.predicate_pushdown}
|
|
250
|
+
projection_pushdown: {self.projection_pushdown}
|
|
251
|
+
simplify_expression: {self.simplify_expression}
|
|
252
|
+
slice_pushdown: {self.slice_pushdown}
|
|
253
|
+
comm_subplan_elim: {self.comm_subplan_elim}
|
|
254
|
+
comm_subexpr_elim: {self.comm_subexpr_elim}
|
|
255
|
+
cluster_with_columns: {self.cluster_with_columns}
|
|
256
|
+
check_order_observe: {self.check_order_observe}
|
|
257
|
+
fast_projection: {self.fast_projection}
|
|
258
|
+
|
|
259
|
+
eager: {self._pyoptflags.eager}
|
|
260
|
+
streaming: {self._pyoptflags.streaming}
|
|
261
|
+
}}
|
|
262
|
+
""".strip()
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
DEFAULT_QUERY_OPT_FLAGS: QueryOptFlags
|
|
266
|
+
try: # Module not available when building docs
|
|
267
|
+
DEFAULT_QUERY_OPT_FLAGS = QueryOptFlags()
|
|
268
|
+
except (ImportError, NameError) as _:
|
|
269
|
+
DEFAULT_QUERY_OPT_FLAGS = () # type: ignore[assignment]
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def forward_old_opt_flags() -> Callable[[Callable[P, T]], Callable[P, T]]:
|
|
273
|
+
"""Decorator to mark to forward the old optimization flags."""
|
|
274
|
+
|
|
275
|
+
def helper(f: QueryOptFlags, field_name: str, value: bool) -> QueryOptFlags: # noqa: FBT001
|
|
276
|
+
setattr(f, field_name, value)
|
|
277
|
+
return f
|
|
278
|
+
|
|
279
|
+
def helper_hidden(f: QueryOptFlags, field_name: str, value: bool) -> QueryOptFlags: # noqa: FBT001
|
|
280
|
+
setattr(f._pyoptflags, field_name, value)
|
|
281
|
+
return f
|
|
282
|
+
|
|
283
|
+
def clear_optimizations(f: QueryOptFlags, value: bool) -> QueryOptFlags: # noqa: FBT001
|
|
284
|
+
if value:
|
|
285
|
+
return QueryOptFlags.none()
|
|
286
|
+
else:
|
|
287
|
+
return f
|
|
288
|
+
|
|
289
|
+
def eager(f: QueryOptFlags, value: bool) -> QueryOptFlags: # noqa: FBT001
|
|
290
|
+
if value:
|
|
291
|
+
return QueryOptFlags._eager()
|
|
292
|
+
else:
|
|
293
|
+
return f
|
|
294
|
+
|
|
295
|
+
OLD_OPT_PARAMETERS_MAPPING = {
|
|
296
|
+
"no_optimization": lambda f, v: clear_optimizations(f, v),
|
|
297
|
+
"_eager": lambda f, v: eager(f, v),
|
|
298
|
+
"type_coercion": lambda f, v: helper_hidden(f, "type_coercion", v),
|
|
299
|
+
"_type_check": lambda f, v: helper_hidden(f, "type_check", v),
|
|
300
|
+
"predicate_pushdown": lambda f, v: helper(f, "predicate_pushdown", v),
|
|
301
|
+
"projection_pushdown": lambda f, v: helper(f, "projection_pushdown", v),
|
|
302
|
+
"simplify_expression": lambda f, v: helper(f, "simplify_expression", v),
|
|
303
|
+
"slice_pushdown": lambda f, v: helper(f, "slice_pushdown", v),
|
|
304
|
+
"comm_subplan_elim": lambda f, v: helper(f, "comm_subplan_elim", v),
|
|
305
|
+
"comm_subexpr_elim": lambda f, v: helper(f, "comm_subexpr_elim", v),
|
|
306
|
+
"cluster_with_columns": lambda f, v: helper(f, "cluster_with_columns", v),
|
|
307
|
+
"collapse_joins": lambda f, v: helper(f, "collapse_joins", v),
|
|
308
|
+
"_check_order": lambda f, v: helper(f, "check_order_observe", v),
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
def decorate(function: Callable[P, T]) -> Callable[P, T]:
|
|
312
|
+
@wraps(function)
|
|
313
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
|
|
314
|
+
optflags: QueryOptFlags = kwargs.get(
|
|
315
|
+
"optimizations", DEFAULT_QUERY_OPT_FLAGS
|
|
316
|
+
) # type: ignore[assignment]
|
|
317
|
+
optflags = optflags.__copy__()
|
|
318
|
+
for key in list(kwargs.keys()):
|
|
319
|
+
cb = OLD_OPT_PARAMETERS_MAPPING.get(key)
|
|
320
|
+
if cb is not None:
|
|
321
|
+
from polars._utils.various import issue_warning
|
|
322
|
+
|
|
323
|
+
message = f"optimization flag `{key}` is deprecated. Please use `optimizations` parameter\n(Deprecated in version 1.30.0)"
|
|
324
|
+
issue_warning(message, DeprecationWarning)
|
|
325
|
+
optflags = cb(optflags, kwargs.pop(key)) # type: ignore[no-untyped-call,unused-ignore]
|
|
326
|
+
|
|
327
|
+
kwargs["optimizations"] = optflags
|
|
328
|
+
return function(*args, **kwargs)
|
|
329
|
+
|
|
330
|
+
wrapper.__signature__ = inspect.signature(function) # type: ignore[attr-defined]
|
|
331
|
+
return wrapper
|
|
332
|
+
|
|
333
|
+
return decorate
|
polars/meta/__init__.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"""Public functions that provide information about the Polars package or the environment it runs in.""" # noqa: W505
|
|
2
|
+
|
|
3
|
+
from polars.meta.build import build_info
|
|
4
|
+
from polars.meta.index_type import get_index_type
|
|
5
|
+
from polars.meta.thread_pool import thread_pool_size, threadpool_size
|
|
6
|
+
from polars.meta.versions import show_versions
|
|
7
|
+
|
|
8
|
+
__all__ = [
|
|
9
|
+
"build_info",
|
|
10
|
+
"get_index_type",
|
|
11
|
+
"show_versions",
|
|
12
|
+
"thread_pool_size",
|
|
13
|
+
"threadpool_size",
|
|
14
|
+
]
|
polars/meta/build.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from polars._utils.polars_version import get_polars_version
|
|
6
|
+
|
|
7
|
+
try:
|
|
8
|
+
from polars._plr import __build__
|
|
9
|
+
except ImportError:
|
|
10
|
+
__build__ = {}
|
|
11
|
+
|
|
12
|
+
__build__["version"] = get_polars_version() or "<missing>"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def build_info() -> dict[str, Any]:
|
|
16
|
+
"""
|
|
17
|
+
Return detailed Polars build information.
|
|
18
|
+
|
|
19
|
+
The dictionary with build information contains the following keys:
|
|
20
|
+
|
|
21
|
+
- `"compiler"`
|
|
22
|
+
- `"time"`
|
|
23
|
+
- `"dependencies"`
|
|
24
|
+
- `"features"`
|
|
25
|
+
- `"host"`
|
|
26
|
+
- `"target"`
|
|
27
|
+
- `"git"`
|
|
28
|
+
- `"version"`
|
|
29
|
+
|
|
30
|
+
If Polars was compiled without the `build_info` feature flag, only the `"version"`
|
|
31
|
+
key is included.
|
|
32
|
+
"""
|
|
33
|
+
return __build__
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import contextlib
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
with contextlib.suppress(ImportError): # Module not available when building docs
|
|
7
|
+
import polars._plr as plr
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from polars._typing import PolarsIntegerType
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def get_index_type() -> PolarsIntegerType:
|
|
14
|
+
"""
|
|
15
|
+
Return the data type used for Polars indexing.
|
|
16
|
+
|
|
17
|
+
Returns
|
|
18
|
+
-------
|
|
19
|
+
PolarsIntegerType
|
|
20
|
+
:class:`UInt32` in regular Polars, :class:`UInt64` in bigidx Polars.
|
|
21
|
+
|
|
22
|
+
Examples
|
|
23
|
+
--------
|
|
24
|
+
>>> pl.get_index_type()
|
|
25
|
+
UInt32
|
|
26
|
+
"""
|
|
27
|
+
return plr.get_index_type()
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import contextlib
|
|
4
|
+
|
|
5
|
+
from polars._utils.deprecation import deprecated
|
|
6
|
+
|
|
7
|
+
with contextlib.suppress(ImportError): # Module not available when building docs
|
|
8
|
+
import polars._plr as plr
|
|
9
|
+
|
|
10
|
+
from typing import TYPE_CHECKING
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
import sys
|
|
14
|
+
|
|
15
|
+
if sys.version_info >= (3, 13):
|
|
16
|
+
from warnings import deprecated
|
|
17
|
+
else:
|
|
18
|
+
from typing_extensions import deprecated # noqa: TC004
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def thread_pool_size() -> int:
|
|
22
|
+
"""
|
|
23
|
+
Return the number of threads in the Polars thread pool.
|
|
24
|
+
|
|
25
|
+
Notes
|
|
26
|
+
-----
|
|
27
|
+
The thread pool size can be overridden by setting the `POLARS_MAX_THREADS`
|
|
28
|
+
environment variable before process start. The thread pool is not behind a
|
|
29
|
+
lock, so it cannot be modified once set. A reasonable use case for this might
|
|
30
|
+
be temporarily limiting the number of threads before importing Polars in a
|
|
31
|
+
PySpark UDF or similar context. Otherwise, it is strongly recommended not to
|
|
32
|
+
override this value as it will be set automatically by the engine.
|
|
33
|
+
|
|
34
|
+
Examples
|
|
35
|
+
--------
|
|
36
|
+
>>> pl.thread_pool_size() # doctest: +SKIP
|
|
37
|
+
16
|
|
38
|
+
"""
|
|
39
|
+
return plr.thread_pool_size()
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@deprecated("`threadpool_size` was renamed; use `thread_pool_size` instead.")
|
|
43
|
+
def threadpool_size() -> int:
|
|
44
|
+
"""
|
|
45
|
+
Return the number of threads in the Polars thread pool.
|
|
46
|
+
|
|
47
|
+
.. deprecated:: 0.20.7
|
|
48
|
+
This function has been renamed to :func:`thread_pool_size`.
|
|
49
|
+
"""
|
|
50
|
+
return thread_pool_size()
|
polars/meta/versions.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
|
|
5
|
+
from polars._cpu_check import get_runtime_repr
|
|
6
|
+
from polars._utils.polars_version import get_polars_version
|
|
7
|
+
from polars.meta.index_type import get_index_type
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def show_versions() -> None:
|
|
11
|
+
"""
|
|
12
|
+
Print out the version of Polars and its optional dependencies.
|
|
13
|
+
|
|
14
|
+
Examples
|
|
15
|
+
--------
|
|
16
|
+
>>> pl.show_versions() # doctest: +SKIP
|
|
17
|
+
--------Version info---------
|
|
18
|
+
Polars: 0.20.22
|
|
19
|
+
Index type: UInt32
|
|
20
|
+
Platform: macOS-14.4.1-arm64-arm-64bit
|
|
21
|
+
Python: 3.11.8 (main, Feb 6 2024, 21:21:21) [Clang 15.0.0 (clang-1500.1.0.2.5)]
|
|
22
|
+
LTS CPU: False
|
|
23
|
+
----Optional dependencies----
|
|
24
|
+
adbc_driver_manager: 0.11.0
|
|
25
|
+
altair: 5.4.0
|
|
26
|
+
cloudpickle: 3.0.0
|
|
27
|
+
connectorx: 0.3.2
|
|
28
|
+
deltalake: 0.17.1
|
|
29
|
+
fastexcel: 0.10.4
|
|
30
|
+
fsspec: 2023.12.2
|
|
31
|
+
gevent: 24.2.1
|
|
32
|
+
matplotlib: 3.8.4
|
|
33
|
+
numpy: 1.26.4
|
|
34
|
+
openpyxl: 3.1.2
|
|
35
|
+
pandas: 2.2.2
|
|
36
|
+
pyarrow: 16.0.0
|
|
37
|
+
pydantic: 2.7.1
|
|
38
|
+
pyiceberg: 0.7.1
|
|
39
|
+
sqlalchemy: 2.0.29
|
|
40
|
+
torch: 2.2.2
|
|
41
|
+
xlsx2csv: 0.8.2
|
|
42
|
+
xlsxwriter: 3.2.0
|
|
43
|
+
""" # noqa: W505
|
|
44
|
+
# Note: we import 'platform' here (rather than at the top of the
|
|
45
|
+
# module) as a micro-optimization for polars' initial import
|
|
46
|
+
import platform
|
|
47
|
+
|
|
48
|
+
deps = _get_dependency_list()
|
|
49
|
+
core_properties = ("Polars", "Index type", "Platform", "Python", "Runtime")
|
|
50
|
+
keylen = max(len(x) for x in [*core_properties, "Azure CLI", *deps]) + 1
|
|
51
|
+
|
|
52
|
+
print("--------Version info---------")
|
|
53
|
+
print(f"{'Polars:':{keylen}s} {get_polars_version()}")
|
|
54
|
+
print(f"{'Index type:':{keylen}s} {get_index_type()}")
|
|
55
|
+
print(f"{'Platform:':{keylen}s} {platform.platform()}")
|
|
56
|
+
print(f"{'Python:':{keylen}s} {sys.version}")
|
|
57
|
+
print(f"{'Runtime:':{keylen}s} {get_runtime_repr()}")
|
|
58
|
+
|
|
59
|
+
print("\n----Optional dependencies----")
|
|
60
|
+
|
|
61
|
+
from polars.io.cloud.credential_provider import CredentialProviderAzure
|
|
62
|
+
|
|
63
|
+
print(f"{'Azure CLI':{keylen}s} ", end="", flush=True)
|
|
64
|
+
print(CredentialProviderAzure._azcli_version() or "<not installed>")
|
|
65
|
+
|
|
66
|
+
for name in deps:
|
|
67
|
+
print(f"{name:{keylen}s} ", end="", flush=True)
|
|
68
|
+
print(_get_dependency_version(name))
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
# See the list of dependencies in pyproject.toml.
|
|
72
|
+
def _get_dependency_list() -> list[str]:
|
|
73
|
+
return [
|
|
74
|
+
"adbc_driver_manager",
|
|
75
|
+
"altair",
|
|
76
|
+
"azure.identity",
|
|
77
|
+
"boto3",
|
|
78
|
+
"cloudpickle",
|
|
79
|
+
"connectorx",
|
|
80
|
+
"deltalake",
|
|
81
|
+
"fastexcel",
|
|
82
|
+
"fsspec",
|
|
83
|
+
"gevent",
|
|
84
|
+
"google.auth",
|
|
85
|
+
"great_tables",
|
|
86
|
+
"matplotlib",
|
|
87
|
+
"numpy",
|
|
88
|
+
"openpyxl",
|
|
89
|
+
"pandas",
|
|
90
|
+
"polars_cloud",
|
|
91
|
+
"pyarrow",
|
|
92
|
+
"pydantic",
|
|
93
|
+
"pyiceberg",
|
|
94
|
+
"sqlalchemy",
|
|
95
|
+
"torch",
|
|
96
|
+
"xlsx2csv",
|
|
97
|
+
"xlsxwriter",
|
|
98
|
+
]
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def _get_dependency_version(dep_name: str) -> str:
|
|
102
|
+
# note: we import 'importlib' inside the function as an
|
|
103
|
+
# optimisation for initial polars module import
|
|
104
|
+
import importlib
|
|
105
|
+
import importlib.metadata
|
|
106
|
+
|
|
107
|
+
try:
|
|
108
|
+
module = importlib.import_module(dep_name)
|
|
109
|
+
except ImportError:
|
|
110
|
+
return "<not installed>"
|
|
111
|
+
|
|
112
|
+
if hasattr(module, "__version__"):
|
|
113
|
+
module_version = module.__version__
|
|
114
|
+
else:
|
|
115
|
+
try:
|
|
116
|
+
module_version = importlib.metadata.version(dep_name) # pragma: no cover
|
|
117
|
+
except Exception:
|
|
118
|
+
return "<invalid install>"
|
|
119
|
+
|
|
120
|
+
return module_version
|
polars/ml/__init__.py
ADDED
|
File without changes
|