polars-runtime-compat 1.34.0b3__cp39-abi3-win_arm64.whl → 1.34.0b5__cp39-abi3-win_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of polars-runtime-compat might be problematic. Click here for more details.
- _polars_runtime_compat/_polars_runtime_compat.pyd +0 -0
- {polars_runtime_compat-1.34.0b3.dist-info → polars_runtime_compat-1.34.0b5.dist-info}/METADATA +6 -2
- polars_runtime_compat-1.34.0b5.dist-info/RECORD +6 -0
- polars/__init__.py +0 -528
- polars/_cpu_check.py +0 -265
- polars/_dependencies.py +0 -355
- polars/_plr.py +0 -99
- polars/_plr.pyi +0 -2496
- polars/_reexport.py +0 -23
- polars/_typing.py +0 -478
- polars/_utils/__init__.py +0 -37
- polars/_utils/async_.py +0 -102
- polars/_utils/cache.py +0 -176
- polars/_utils/cloud.py +0 -40
- polars/_utils/constants.py +0 -29
- polars/_utils/construction/__init__.py +0 -46
- polars/_utils/construction/dataframe.py +0 -1397
- polars/_utils/construction/other.py +0 -72
- polars/_utils/construction/series.py +0 -560
- polars/_utils/construction/utils.py +0 -118
- polars/_utils/convert.py +0 -224
- polars/_utils/deprecation.py +0 -406
- polars/_utils/getitem.py +0 -457
- polars/_utils/logging.py +0 -11
- polars/_utils/nest_asyncio.py +0 -264
- polars/_utils/parquet.py +0 -15
- polars/_utils/parse/__init__.py +0 -12
- polars/_utils/parse/expr.py +0 -242
- polars/_utils/polars_version.py +0 -19
- polars/_utils/pycapsule.py +0 -53
- polars/_utils/scan.py +0 -27
- polars/_utils/serde.py +0 -63
- polars/_utils/slice.py +0 -215
- polars/_utils/udfs.py +0 -1251
- polars/_utils/unstable.py +0 -63
- polars/_utils/various.py +0 -782
- polars/_utils/wrap.py +0 -25
- polars/api.py +0 -370
- polars/catalog/__init__.py +0 -0
- polars/catalog/unity/__init__.py +0 -19
- polars/catalog/unity/client.py +0 -733
- polars/catalog/unity/models.py +0 -152
- polars/config.py +0 -1571
- polars/convert/__init__.py +0 -25
- polars/convert/general.py +0 -1046
- polars/convert/normalize.py +0 -261
- polars/dataframe/__init__.py +0 -5
- polars/dataframe/_html.py +0 -186
- polars/dataframe/frame.py +0 -12582
- polars/dataframe/group_by.py +0 -1067
- polars/dataframe/plotting.py +0 -257
- polars/datatype_expr/__init__.py +0 -5
- polars/datatype_expr/array.py +0 -56
- polars/datatype_expr/datatype_expr.py +0 -304
- polars/datatype_expr/list.py +0 -18
- polars/datatype_expr/struct.py +0 -69
- polars/datatypes/__init__.py +0 -122
- polars/datatypes/_parse.py +0 -195
- polars/datatypes/_utils.py +0 -48
- polars/datatypes/classes.py +0 -1213
- polars/datatypes/constants.py +0 -11
- polars/datatypes/constructor.py +0 -172
- polars/datatypes/convert.py +0 -366
- polars/datatypes/group.py +0 -130
- polars/exceptions.py +0 -230
- polars/expr/__init__.py +0 -7
- polars/expr/array.py +0 -964
- polars/expr/binary.py +0 -346
- polars/expr/categorical.py +0 -306
- polars/expr/datetime.py +0 -2620
- polars/expr/expr.py +0 -11272
- polars/expr/list.py +0 -1408
- polars/expr/meta.py +0 -444
- polars/expr/name.py +0 -321
- polars/expr/string.py +0 -3045
- polars/expr/struct.py +0 -357
- polars/expr/whenthen.py +0 -185
- polars/functions/__init__.py +0 -193
- polars/functions/aggregation/__init__.py +0 -33
- polars/functions/aggregation/horizontal.py +0 -298
- polars/functions/aggregation/vertical.py +0 -341
- polars/functions/as_datatype.py +0 -848
- polars/functions/business.py +0 -138
- polars/functions/col.py +0 -384
- polars/functions/datatype.py +0 -121
- polars/functions/eager.py +0 -524
- polars/functions/escape_regex.py +0 -29
- polars/functions/lazy.py +0 -2751
- polars/functions/len.py +0 -68
- polars/functions/lit.py +0 -210
- polars/functions/random.py +0 -22
- polars/functions/range/__init__.py +0 -19
- polars/functions/range/_utils.py +0 -15
- polars/functions/range/date_range.py +0 -303
- polars/functions/range/datetime_range.py +0 -370
- polars/functions/range/int_range.py +0 -348
- polars/functions/range/linear_space.py +0 -311
- polars/functions/range/time_range.py +0 -287
- polars/functions/repeat.py +0 -301
- polars/functions/whenthen.py +0 -353
- polars/interchange/__init__.py +0 -10
- polars/interchange/buffer.py +0 -77
- polars/interchange/column.py +0 -190
- polars/interchange/dataframe.py +0 -230
- polars/interchange/from_dataframe.py +0 -328
- polars/interchange/protocol.py +0 -303
- polars/interchange/utils.py +0 -170
- polars/io/__init__.py +0 -64
- polars/io/_utils.py +0 -317
- polars/io/avro.py +0 -49
- polars/io/clipboard.py +0 -36
- polars/io/cloud/__init__.py +0 -17
- polars/io/cloud/_utils.py +0 -80
- polars/io/cloud/credential_provider/__init__.py +0 -17
- polars/io/cloud/credential_provider/_builder.py +0 -520
- polars/io/cloud/credential_provider/_providers.py +0 -618
- polars/io/csv/__init__.py +0 -9
- polars/io/csv/_utils.py +0 -38
- polars/io/csv/batched_reader.py +0 -142
- polars/io/csv/functions.py +0 -1495
- polars/io/database/__init__.py +0 -6
- polars/io/database/_arrow_registry.py +0 -70
- polars/io/database/_cursor_proxies.py +0 -147
- polars/io/database/_executor.py +0 -578
- polars/io/database/_inference.py +0 -314
- polars/io/database/_utils.py +0 -144
- polars/io/database/functions.py +0 -516
- polars/io/delta.py +0 -499
- polars/io/iceberg/__init__.py +0 -3
- polars/io/iceberg/_utils.py +0 -697
- polars/io/iceberg/dataset.py +0 -556
- polars/io/iceberg/functions.py +0 -151
- polars/io/ipc/__init__.py +0 -8
- polars/io/ipc/functions.py +0 -514
- polars/io/json/__init__.py +0 -3
- polars/io/json/read.py +0 -101
- polars/io/ndjson.py +0 -332
- polars/io/parquet/__init__.py +0 -17
- polars/io/parquet/field_overwrites.py +0 -140
- polars/io/parquet/functions.py +0 -722
- polars/io/partition.py +0 -491
- polars/io/plugins.py +0 -187
- polars/io/pyarrow_dataset/__init__.py +0 -5
- polars/io/pyarrow_dataset/anonymous_scan.py +0 -109
- polars/io/pyarrow_dataset/functions.py +0 -79
- polars/io/scan_options/__init__.py +0 -5
- polars/io/scan_options/_options.py +0 -59
- polars/io/scan_options/cast_options.py +0 -126
- polars/io/spreadsheet/__init__.py +0 -6
- polars/io/spreadsheet/_utils.py +0 -52
- polars/io/spreadsheet/_write_utils.py +0 -647
- polars/io/spreadsheet/functions.py +0 -1323
- polars/lazyframe/__init__.py +0 -9
- polars/lazyframe/engine_config.py +0 -61
- polars/lazyframe/frame.py +0 -8564
- polars/lazyframe/group_by.py +0 -669
- polars/lazyframe/in_process.py +0 -42
- polars/lazyframe/opt_flags.py +0 -333
- polars/meta/__init__.py +0 -14
- polars/meta/build.py +0 -33
- polars/meta/index_type.py +0 -27
- polars/meta/thread_pool.py +0 -50
- polars/meta/versions.py +0 -120
- polars/ml/__init__.py +0 -0
- polars/ml/torch.py +0 -213
- polars/ml/utilities.py +0 -30
- polars/plugins.py +0 -155
- polars/py.typed +0 -0
- polars/pyproject.toml +0 -103
- polars/schema.py +0 -265
- polars/selectors.py +0 -3117
- polars/series/__init__.py +0 -5
- polars/series/array.py +0 -776
- polars/series/binary.py +0 -254
- polars/series/categorical.py +0 -246
- polars/series/datetime.py +0 -2275
- polars/series/list.py +0 -1087
- polars/series/plotting.py +0 -191
- polars/series/series.py +0 -9197
- polars/series/string.py +0 -2367
- polars/series/struct.py +0 -154
- polars/series/utils.py +0 -191
- polars/sql/__init__.py +0 -7
- polars/sql/context.py +0 -677
- polars/sql/functions.py +0 -139
- polars/string_cache.py +0 -185
- polars/testing/__init__.py +0 -13
- polars/testing/asserts/__init__.py +0 -9
- polars/testing/asserts/frame.py +0 -231
- polars/testing/asserts/series.py +0 -219
- polars/testing/asserts/utils.py +0 -12
- polars/testing/parametric/__init__.py +0 -33
- polars/testing/parametric/profiles.py +0 -107
- polars/testing/parametric/strategies/__init__.py +0 -22
- polars/testing/parametric/strategies/_utils.py +0 -14
- polars/testing/parametric/strategies/core.py +0 -615
- polars/testing/parametric/strategies/data.py +0 -452
- polars/testing/parametric/strategies/dtype.py +0 -436
- polars/testing/parametric/strategies/legacy.py +0 -169
- polars/type_aliases.py +0 -24
- polars_runtime_compat-1.34.0b3.dist-info/RECORD +0 -203
- {polars_runtime_compat-1.34.0b3.dist-info → polars_runtime_compat-1.34.0b5.dist-info}/WHEEL +0 -0
- {polars_runtime_compat-1.34.0b3.dist-info → polars_runtime_compat-1.34.0b5.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,118 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
from collections.abc import Sequence
|
|
4
|
-
from functools import lru_cache
|
|
5
|
-
from typing import TYPE_CHECKING, Any, Callable, get_type_hints
|
|
6
|
-
|
|
7
|
-
from polars._dependencies import _check_for_pydantic, pydantic
|
|
8
|
-
|
|
9
|
-
if TYPE_CHECKING:
|
|
10
|
-
import pandas as pd
|
|
11
|
-
|
|
12
|
-
PANDAS_SIMPLE_NUMPY_DTYPES = {
|
|
13
|
-
"int64",
|
|
14
|
-
"int32",
|
|
15
|
-
"int16",
|
|
16
|
-
"int8",
|
|
17
|
-
"uint64",
|
|
18
|
-
"uint32",
|
|
19
|
-
"uint16",
|
|
20
|
-
"uint8",
|
|
21
|
-
"float64",
|
|
22
|
-
"float32",
|
|
23
|
-
"datetime64[ms]",
|
|
24
|
-
"datetime64[us]",
|
|
25
|
-
"datetime64[ns]",
|
|
26
|
-
"timedelta64[ms]",
|
|
27
|
-
"timedelta64[us]",
|
|
28
|
-
"timedelta64[ns]",
|
|
29
|
-
"bool",
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
def _get_annotations(obj: type) -> dict[str, Any]:
|
|
34
|
-
return getattr(obj, "__annotations__", {})
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
def try_get_type_hints(obj: type) -> dict[str, Any]:
|
|
38
|
-
try:
|
|
39
|
-
# often the same as obj.__annotations__, but handles forward references
|
|
40
|
-
# encoded as string literals, adds Optional[t] if a default value equal
|
|
41
|
-
# to None is set and recursively replaces 'Annotated[T, ...]' with 'T'.
|
|
42
|
-
return get_type_hints(obj)
|
|
43
|
-
except TypeError:
|
|
44
|
-
# fallback on edge-cases (eg: InitVar inference on python 3.10).
|
|
45
|
-
return _get_annotations(obj)
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
@lru_cache(64)
|
|
49
|
-
def is_namedtuple(cls: Any, *, annotated: bool = False) -> bool:
|
|
50
|
-
"""Check if given class derives from NamedTuple."""
|
|
51
|
-
if all(hasattr(cls, attr) for attr in ("_fields", "_field_defaults", "_replace")):
|
|
52
|
-
if not isinstance(cls._fields, property):
|
|
53
|
-
if not annotated or len(cls.__annotations__) == len(cls._fields):
|
|
54
|
-
return all(isinstance(fld, str) for fld in cls._fields)
|
|
55
|
-
return False
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def is_pydantic_model(value: Any) -> bool:
|
|
59
|
-
"""Check if value derives from a pydantic.BaseModel."""
|
|
60
|
-
return _check_for_pydantic(value) and isinstance(value, pydantic.BaseModel)
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
def is_sqlalchemy_row(value: Any) -> bool:
|
|
64
|
-
"""Check if value is an instance of a SQLAlchemy sequence or mapping object."""
|
|
65
|
-
return getattr(value, "__module__", "").startswith("sqlalchemy.") and isinstance(
|
|
66
|
-
value, Sequence
|
|
67
|
-
)
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
def get_first_non_none(values: Sequence[Any | None]) -> Any:
|
|
71
|
-
"""
|
|
72
|
-
Return the first value from a sequence that isn't None.
|
|
73
|
-
|
|
74
|
-
If sequence doesn't contain non-None values, return None.
|
|
75
|
-
"""
|
|
76
|
-
if values is not None:
|
|
77
|
-
return next((v for v in values if v is not None), None)
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
def nt_unpack(obj: Any) -> Any:
|
|
81
|
-
"""Recursively unpack a nested NamedTuple."""
|
|
82
|
-
if isinstance(obj, dict):
|
|
83
|
-
return {key: nt_unpack(value) for key, value in obj.items()}
|
|
84
|
-
elif isinstance(obj, list):
|
|
85
|
-
return [nt_unpack(value) for value in obj]
|
|
86
|
-
elif is_namedtuple(obj.__class__):
|
|
87
|
-
return {key: nt_unpack(value) for key, value in obj._asdict().items()}
|
|
88
|
-
elif isinstance(obj, tuple):
|
|
89
|
-
return tuple(nt_unpack(value) for value in obj)
|
|
90
|
-
else:
|
|
91
|
-
return obj
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
def contains_nested(value: Any, is_nested: Callable[[Any], bool]) -> bool:
|
|
95
|
-
"""Determine if value contains (or is) nested structured data."""
|
|
96
|
-
if is_nested(value):
|
|
97
|
-
return True
|
|
98
|
-
elif isinstance(value, dict):
|
|
99
|
-
return any(contains_nested(v, is_nested) for v in value.values())
|
|
100
|
-
elif isinstance(value, (list, tuple)):
|
|
101
|
-
return any(contains_nested(v, is_nested) for v in value)
|
|
102
|
-
return False
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
def is_simple_numpy_backed_pandas_series(
|
|
106
|
-
series: pd.Series[Any] | pd.Index[Any] | pd.DatetimeIndex,
|
|
107
|
-
) -> bool:
|
|
108
|
-
if len(series.shape) > 1:
|
|
109
|
-
# Pandas Series is actually a Pandas DataFrame when the original DataFrame
|
|
110
|
-
# contains duplicated columns and a duplicated column is requested with df["a"].
|
|
111
|
-
msg = f"duplicate column names found: {series.columns.tolist()!s}" # type: ignore[union-attr]
|
|
112
|
-
raise ValueError(msg)
|
|
113
|
-
return (str(series.dtype) in PANDAS_SIMPLE_NUMPY_DTYPES) or (
|
|
114
|
-
series.dtype == "object"
|
|
115
|
-
and not series.hasnans
|
|
116
|
-
and not series.empty
|
|
117
|
-
and isinstance(next(iter(series)), str)
|
|
118
|
-
)
|
polars/_utils/convert.py
DELETED
|
@@ -1,224 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
from datetime import datetime, time, timedelta, timezone
|
|
4
|
-
from decimal import Context
|
|
5
|
-
from functools import lru_cache
|
|
6
|
-
from typing import (
|
|
7
|
-
TYPE_CHECKING,
|
|
8
|
-
Any,
|
|
9
|
-
Callable,
|
|
10
|
-
NoReturn,
|
|
11
|
-
overload,
|
|
12
|
-
)
|
|
13
|
-
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
|
14
|
-
|
|
15
|
-
from polars._utils.constants import (
|
|
16
|
-
EPOCH,
|
|
17
|
-
EPOCH_DATE,
|
|
18
|
-
EPOCH_UTC,
|
|
19
|
-
MS_PER_SECOND,
|
|
20
|
-
NS_PER_SECOND,
|
|
21
|
-
SECONDS_PER_DAY,
|
|
22
|
-
SECONDS_PER_HOUR,
|
|
23
|
-
US_PER_SECOND,
|
|
24
|
-
)
|
|
25
|
-
|
|
26
|
-
if TYPE_CHECKING:
|
|
27
|
-
from datetime import date, tzinfo
|
|
28
|
-
from decimal import Decimal
|
|
29
|
-
|
|
30
|
-
from polars._typing import TimeUnit
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
@overload
|
|
34
|
-
def parse_as_duration_string(td: None) -> None: ...
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
@overload
|
|
38
|
-
def parse_as_duration_string(td: timedelta | str) -> str: ...
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
def parse_as_duration_string(td: timedelta | str | None) -> str | None:
|
|
42
|
-
"""Parse duration input as a Polars duration string."""
|
|
43
|
-
if td is None or isinstance(td, str):
|
|
44
|
-
return td
|
|
45
|
-
return _timedelta_to_duration_string(td)
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
def _timedelta_to_duration_string(td: timedelta) -> str:
|
|
49
|
-
"""Convert a Python timedelta object to a Polars duration string."""
|
|
50
|
-
# Positive duration
|
|
51
|
-
if td.days >= 0:
|
|
52
|
-
d = f"{td.days}d" if td.days != 0 else ""
|
|
53
|
-
s = f"{td.seconds}s" if td.seconds != 0 else ""
|
|
54
|
-
us = f"{td.microseconds}us" if td.microseconds != 0 else ""
|
|
55
|
-
# Negative, whole days
|
|
56
|
-
elif td.seconds == 0 and td.microseconds == 0:
|
|
57
|
-
return f"{td.days}d"
|
|
58
|
-
# Negative, other
|
|
59
|
-
else:
|
|
60
|
-
corrected_d = td.days + 1
|
|
61
|
-
corrected_seconds = SECONDS_PER_DAY - (td.seconds + (td.microseconds > 0))
|
|
62
|
-
d = f"{corrected_d}d" if corrected_d != 0 else "-"
|
|
63
|
-
s = f"{corrected_seconds}s" if corrected_seconds != 0 else ""
|
|
64
|
-
us = f"{10**6 - td.microseconds}us" if td.microseconds != 0 else ""
|
|
65
|
-
|
|
66
|
-
return f"{d}{s}{us}"
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
def negate_duration_string(duration: str) -> str:
|
|
70
|
-
"""Negate a Polars duration string."""
|
|
71
|
-
if duration.startswith("-"):
|
|
72
|
-
return duration[1:]
|
|
73
|
-
else:
|
|
74
|
-
return f"-{duration}"
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
def date_to_int(d: date) -> int:
|
|
78
|
-
"""Convert a Python time object to an integer."""
|
|
79
|
-
return (d - EPOCH_DATE).days
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
def time_to_int(t: time) -> int:
|
|
83
|
-
"""Convert a Python time object to an integer."""
|
|
84
|
-
t = t.replace(tzinfo=timezone.utc)
|
|
85
|
-
seconds = t.hour * SECONDS_PER_HOUR + t.minute * 60 + t.second
|
|
86
|
-
microseconds = t.microsecond
|
|
87
|
-
return seconds * NS_PER_SECOND + microseconds * 1_000
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
def datetime_to_int(dt: datetime, time_unit: TimeUnit) -> int:
|
|
91
|
-
"""Convert a Python datetime object to an integer."""
|
|
92
|
-
# Make sure to use UTC rather than system time zone
|
|
93
|
-
if dt.tzinfo is None:
|
|
94
|
-
dt = dt.replace(tzinfo=timezone.utc)
|
|
95
|
-
|
|
96
|
-
td = dt - EPOCH_UTC
|
|
97
|
-
seconds = td.days * SECONDS_PER_DAY + td.seconds
|
|
98
|
-
microseconds = dt.microsecond
|
|
99
|
-
|
|
100
|
-
if time_unit == "us":
|
|
101
|
-
return seconds * US_PER_SECOND + microseconds
|
|
102
|
-
elif time_unit == "ns":
|
|
103
|
-
return seconds * NS_PER_SECOND + microseconds * 1_000
|
|
104
|
-
elif time_unit == "ms":
|
|
105
|
-
return seconds * MS_PER_SECOND + microseconds // 1_000
|
|
106
|
-
else:
|
|
107
|
-
_raise_invalid_time_unit(time_unit)
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
def timedelta_to_int(td: timedelta, time_unit: TimeUnit) -> int:
|
|
111
|
-
"""Convert a Python timedelta object to an integer."""
|
|
112
|
-
seconds = td.days * SECONDS_PER_DAY + td.seconds
|
|
113
|
-
microseconds = td.microseconds
|
|
114
|
-
|
|
115
|
-
if time_unit == "us":
|
|
116
|
-
return seconds * US_PER_SECOND + microseconds
|
|
117
|
-
elif time_unit == "ns":
|
|
118
|
-
return seconds * NS_PER_SECOND + microseconds * 1_000
|
|
119
|
-
elif time_unit == "ms":
|
|
120
|
-
return seconds * MS_PER_SECOND + microseconds // 1_000
|
|
121
|
-
else:
|
|
122
|
-
_raise_invalid_time_unit(time_unit)
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
@lru_cache(256)
|
|
126
|
-
def to_py_date(value: int | float) -> date:
|
|
127
|
-
"""Convert an integer or float to a Python date object."""
|
|
128
|
-
return EPOCH_DATE + timedelta(days=value)
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
def to_py_time(value: int) -> time:
|
|
132
|
-
"""Convert an integer to a Python time object."""
|
|
133
|
-
# Fast path for 00:00
|
|
134
|
-
if value == 0:
|
|
135
|
-
return time()
|
|
136
|
-
|
|
137
|
-
seconds, nanoseconds = divmod(value, NS_PER_SECOND)
|
|
138
|
-
minutes, seconds = divmod(seconds, 60)
|
|
139
|
-
hours, minutes = divmod(minutes, 60)
|
|
140
|
-
return time(
|
|
141
|
-
hour=hours, minute=minutes, second=seconds, microsecond=nanoseconds // 1_000
|
|
142
|
-
)
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
def to_py_datetime(
|
|
146
|
-
value: int | float,
|
|
147
|
-
time_unit: TimeUnit,
|
|
148
|
-
time_zone: str | None = None,
|
|
149
|
-
) -> datetime:
|
|
150
|
-
"""Convert an integer or float to a Python datetime object."""
|
|
151
|
-
if time_unit == "us":
|
|
152
|
-
td = timedelta(microseconds=value)
|
|
153
|
-
elif time_unit == "ns":
|
|
154
|
-
td = timedelta(microseconds=value // 1_000)
|
|
155
|
-
elif time_unit == "ms":
|
|
156
|
-
td = timedelta(milliseconds=value)
|
|
157
|
-
else:
|
|
158
|
-
_raise_invalid_time_unit(time_unit)
|
|
159
|
-
|
|
160
|
-
if time_zone is None:
|
|
161
|
-
return EPOCH + td
|
|
162
|
-
else:
|
|
163
|
-
dt = EPOCH_UTC + td
|
|
164
|
-
return _localize_datetime(dt, time_zone)
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
def _localize_datetime(dt: datetime, time_zone: str) -> datetime:
|
|
168
|
-
# zone info installation should already be checked
|
|
169
|
-
tz: ZoneInfo | tzinfo
|
|
170
|
-
try:
|
|
171
|
-
tz = ZoneInfo(time_zone)
|
|
172
|
-
except ZoneInfoNotFoundError:
|
|
173
|
-
# try fixed offset, which is not supported by ZoneInfo
|
|
174
|
-
tz = _parse_fixed_tz_offset(time_zone)
|
|
175
|
-
|
|
176
|
-
return dt.astimezone(tz)
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
# cache here as we have a single tz per column
|
|
180
|
-
# and this function will be called on every conversion
|
|
181
|
-
@lru_cache(16)
|
|
182
|
-
def _parse_fixed_tz_offset(offset: str) -> tzinfo:
|
|
183
|
-
try:
|
|
184
|
-
# use fromisoformat to parse the offset
|
|
185
|
-
dt_offset = datetime.fromisoformat("2000-01-01T00:00:00" + offset)
|
|
186
|
-
|
|
187
|
-
# alternatively, we parse the offset ourselves extracting hours and
|
|
188
|
-
# minutes, then we can construct:
|
|
189
|
-
# tzinfo=timezone(timedelta(hours=..., minutes=...))
|
|
190
|
-
except ValueError:
|
|
191
|
-
msg = f"unexpected time zone offset: {offset!r}"
|
|
192
|
-
raise ValueError(msg) from None
|
|
193
|
-
|
|
194
|
-
return dt_offset.tzinfo # type: ignore[return-value]
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
def to_py_timedelta(value: int | float, time_unit: TimeUnit) -> timedelta:
|
|
198
|
-
"""Convert an integer or float to a Python timedelta object."""
|
|
199
|
-
if time_unit == "us":
|
|
200
|
-
return timedelta(microseconds=value)
|
|
201
|
-
elif time_unit == "ns":
|
|
202
|
-
return timedelta(microseconds=value // 1_000)
|
|
203
|
-
elif time_unit == "ms":
|
|
204
|
-
return timedelta(milliseconds=value)
|
|
205
|
-
else:
|
|
206
|
-
_raise_invalid_time_unit(time_unit)
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
def to_py_decimal(prec: int, value: str) -> Decimal:
|
|
210
|
-
"""Convert decimal components to a Python Decimal object."""
|
|
211
|
-
return _create_decimal_with_prec(prec)(value)
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
@lru_cache(None)
|
|
215
|
-
def _create_decimal_with_prec(
|
|
216
|
-
precision: int,
|
|
217
|
-
) -> Callable[[str], Decimal]:
|
|
218
|
-
# pre-cache contexts so we don't have to spend time on recreating them every time
|
|
219
|
-
return Context(prec=precision).create_decimal
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
def _raise_invalid_time_unit(time_unit: Any) -> NoReturn:
|
|
223
|
-
msg = f"`time_unit` must be one of {{'ms', 'us', 'ns'}}, got {time_unit!r}"
|
|
224
|
-
raise ValueError(msg)
|