dycw-utilities 0.155.0__py3-none-any.whl → 0.155.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dycw-utilities
3
- Version: 0.155.0
3
+ Version: 0.155.2
4
4
  Author-email: Derek Wan <d.wan@icloud.com>
5
5
  License-File: LICENSE
6
6
  Requires-Python: >=3.12
@@ -13,7 +13,6 @@ Requires-Dist: coloredlogs<15.1,>=15.0.1; extra == 'logging'
13
13
  Provides-Extra: test
14
14
  Requires-Dist: dycw-pytest-only<2.2,>=2.1.1; extra == 'test'
15
15
  Requires-Dist: hypothesis<6.138,>=6.137.1; extra == 'test'
16
- Requires-Dist: pudb<2025.2,>=2025.1; extra == 'test'
17
16
  Requires-Dist: pytest-asyncio<1.2,>=1.1.0; extra == 'test'
18
17
  Requires-Dist: pytest-cov<6.3,>=6.2.1; extra == 'test'
19
18
  Requires-Dist: pytest-instafail<0.6,>=0.5.0; extra == 'test'
@@ -1,4 +1,4 @@
1
- utilities/__init__.py,sha256=OFslnM101fl40brwBMTE9hAplv30EfT_NmRkbGPfu-g,60
1
+ utilities/__init__.py,sha256=UpA18BgKE4JKTnomInevMSHAcDOtpqnipumof6hoK_c,60
2
2
  utilities/altair.py,sha256=92E2lCdyHY4Zb-vCw6rEJIsWdKipuu-Tu2ab1ufUfAk,9079
3
3
  utilities/asyncio.py,sha256=QXkTtugXkqtYt7Do23zgYErqzdp6jwzPpV_SP9fJ1gI,16780
4
4
  utilities/atomicwrites.py,sha256=tPo6r-Rypd9u99u66B9z86YBPpnLrlHtwox_8Z7T34Y,5790
@@ -22,7 +22,7 @@ utilities/getpass.py,sha256=DfN5UgMAtFCqS3dSfFHUfqIMZX2shXvwphOz_6J6f6A,103
22
22
  utilities/gzip.py,sha256=fkGP3KdsBfXlstodT4wtlp-PwNyUsogpbDCVVVGdsm4,781
23
23
  utilities/hashlib.py,sha256=SVTgtguur0P4elppvzOBbLEjVM3Pea0eWB61yg2ilxo,309
24
24
  utilities/http.py,sha256=TsavEfHlRtlLaeV21Z6KZh0qbPw-kvD1zsQdZ7Kep5Q,977
25
- utilities/hypothesis.py,sha256=hoa1Szk3oLa07W4nl6Uhy2vsToIxx8AYjJg9PLc9JvQ,43516
25
+ utilities/hypothesis.py,sha256=2lAUvuXj_zswg-8Ot0ZuBzTbZyaRYWRmeR8qSF7Mmys,43817
26
26
  utilities/importlib.py,sha256=mV1xT_O_zt_GnZZ36tl3xOmMaN_3jErDWY54fX39F6Y,429
27
27
  utilities/inflect.py,sha256=v7YkOWSu8NAmVghPcf4F3YBZQoJCS47_DLf9jbfWIs0,581
28
28
  utilities/ipython.py,sha256=V2oMYHvEKvlNBzxDXdLvKi48oUq2SclRg5xasjaXStw,763
@@ -45,7 +45,7 @@ utilities/parse.py,sha256=JcJn5yXKhIWXBCwgBdPsyu7Hvcuw6kyEdqvaebCaI9k,17951
45
45
  utilities/pathlib.py,sha256=qGuU8XPmdgGpy8tOMUgelfXx3kxI8h9IaV3TI_06QGE,8428
46
46
  utilities/pickle.py,sha256=MBT2xZCsv0pH868IXLGKnlcqNx2IRVKYNpRcqiQQqxw,653
47
47
  utilities/platform.py,sha256=pTn7gw6N4T6LdKrf0virwarof_mze9WtoQlrGMzhGVI,2798
48
- utilities/polars.py,sha256=DxGDEw3KRxQJkuJ1S0fduXfCiyXJ-9mul0kYj3lFt_Q,78530
48
+ utilities/polars.py,sha256=kEYLh5kcpvjOqNS-Pv-DAjvzPZYm6qZ7ywMt-wZvRGE,78434
49
49
  utilities/polars_ols.py,sha256=Uc9V5kvlWZ5cU93lKZ-cfAKdVFFw81tqwLW9PxtUvMs,5618
50
50
  utilities/postgres.py,sha256=ynCTTaF-bVEOSW-KEAR-dlLh_hYjeVVjm__-4pEU8Zk,12269
51
51
  utilities/pottery.py,sha256=HJ96oLRarTP37Vhg0WTyB3yAu2hETeg6HgRmpDIqyUs,6581
@@ -65,30 +65,30 @@ utilities/shelve.py,sha256=4OzjQI6kGuUbJciqf535rwnao-_IBv66gsT6tRGiUt0,759
65
65
  utilities/slack_sdk.py,sha256=ppFBvKgfg5IRWiIoKPtpTyzBtBF4XmwEvU3I5wLJikM,2140
66
66
  utilities/socket.py,sha256=K77vfREvzoVTrpYKo6MZakol0EYu2q1sWJnnZqL0So0,118
67
67
  utilities/sqlalchemy.py,sha256=IJKzrKUd_eBOkyK6CucDlxtHwo2vYH3t-rV2_5rAxq8,40554
68
- utilities/sqlalchemy_polars.py,sha256=Mm-sShZfqqgnzTrupMQdCfSM2akrybXHXAErTs-ofM8,14244
68
+ utilities/sqlalchemy_polars.py,sha256=5Q9HReETYg0qB6E6WQhFh4QAZlKE-IWlogj2BVif_-w,14246
69
69
  utilities/statsmodels.py,sha256=koyiBHvpMcSiBfh99wFUfSggLNx7cuAw3rwyfAhoKpQ,3410
70
70
  utilities/string.py,sha256=shmBK87zZwzGyixuNuXCiUbqzfeZ9xlrFwz6JTaRvDk,582
71
71
  utilities/tempfile.py,sha256=HxB2BF28CcecDJLQ3Bx2Ej-Pb6RJc6W9ngSpB9CnP4k,2018
72
72
  utilities/text.py,sha256=uwCDgpEunYruyh6sKMfNWK3Rp5H3ndpKRAkq86CBNys,13043
73
73
  utilities/threading.py,sha256=GvBOp4CyhHfN90wGXZuA2VKe9fGzMaEa7oCl4f3nnPU,1009
74
74
  utilities/timer.py,sha256=oXfTii6ymu57niP0BDGZjFD55LEHi2a19kqZKiTgaFQ,2588
75
- utilities/traceback.py,sha256=TjO7em98FDFLvROZ7gi2UJftFWNuSTkbCrf7mk-fg28,9416
75
+ utilities/traceback.py,sha256=1k5JgumSMaqAGLd0dZ36CtPS0EGaglxTr29r2Dz4D60,9457
76
76
  utilities/typed_settings.py,sha256=SFWqS3lAzV7IfNRwqFcTk0YynTcQ7BmrcW2mr_KUnos,4466
77
77
  utilities/types.py,sha256=L4cjFPyFZX58Urfw0S_i-XRywPIFyuSLOieewj0qqsM,18516
78
- utilities/typing.py,sha256=Z-_XDaWyT_6wIo3qfNK-hvRlzxP2Jxa9PgXzm5rDYRA,13790
78
+ utilities/typing.py,sha256=7ZgCNZwA6oaiwpSJIS9Rj3i3MbRBYHMqbC3jMe5KiNg,13992
79
79
  utilities/tzdata.py,sha256=fgNVj66yUbCSI_-vrRVzSD3gtf-L_8IEJEPjP_Jel5Y,266
80
80
  utilities/tzlocal.py,sha256=KyCXEgCTjqGFx-389JdTuhMRUaT06U1RCMdWoED-qro,728
81
81
  utilities/uuid.py,sha256=nQZs6tFX4mqtc2Ku3KqjloYCqwpTKeTj8eKwQwh3FQI,1572
82
82
  utilities/version.py,sha256=ipBj5-WYY_nelp2uwFlApfWWCzTLzPwpovUi9x_OBMs,5085
83
83
  utilities/warnings.py,sha256=un1LvHv70PU-LLv8RxPVmugTzDJkkGXRMZTE2-fTQHw,1771
84
- utilities/whenever.py,sha256=gPnFKWws4_tjiHPLzX1AukSwDjfMIO9Iim0DDNQyAqY,57532
84
+ utilities/whenever.py,sha256=vsoVRd8-KXVn9Ik5PveIGgOCuIGnMNqSEoPCsR0sZ30,57755
85
85
  utilities/zipfile.py,sha256=24lQc9ATcJxHXBPc_tBDiJk48pWyRrlxO2fIsFxU0A8,699
86
86
  utilities/zoneinfo.py,sha256=FBMcUQ4662Aq8SsuCL1OAhDQiyANmVjtb-C30DRrWoE,1966
87
87
  utilities/pytest_plugins/__init__.py,sha256=U4S_2y3zgLZVfMenHRaJFBW8yqh2mUBuI291LGQVOJ8,35
88
88
  utilities/pytest_plugins/pytest_randomly.py,sha256=B1qYVlExGOxTywq2r1SMi5o7btHLk2PNdY_b1p98dkE,409
89
89
  utilities/pytest_plugins/pytest_regressions.py,sha256=9v8kAXDM2ycIXJBimoiF4EgrwbUvxTycFWJiGR_GHhM,1466
90
- dycw_utilities-0.155.0.dist-info/METADATA,sha256=AFafkgKG-CWYCGGKjJxAXn1dYS1wM_OkjeV2m7ZnaQs,1696
91
- dycw_utilities-0.155.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
92
- dycw_utilities-0.155.0.dist-info/entry_points.txt,sha256=BOD_SoDxwsfJYOLxhrSXhHP_T7iw-HXI9f2WVkzYxvQ,135
93
- dycw_utilities-0.155.0.dist-info/licenses/LICENSE,sha256=gppZp16M6nSVpBbUBrNL6JuYfvKwZiKgV7XoKKsHzqo,1066
94
- dycw_utilities-0.155.0.dist-info/RECORD,,
90
+ dycw_utilities-0.155.2.dist-info/METADATA,sha256=bti46Z55f6RXvAyFTu6sAzeis6LgdIEkMPTR0rQWj1w,1643
91
+ dycw_utilities-0.155.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
92
+ dycw_utilities-0.155.2.dist-info/entry_points.txt,sha256=BOD_SoDxwsfJYOLxhrSXhHP_T7iw-HXI9f2WVkzYxvQ,135
93
+ dycw_utilities-0.155.2.dist-info/licenses/LICENSE,sha256=gppZp16M6nSVpBbUBrNL6JuYfvKwZiKgV7XoKKsHzqo,1066
94
+ dycw_utilities-0.155.2.dist-info/RECORD,,
utilities/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  from __future__ import annotations
2
2
 
3
- __version__ = "0.155.0"
3
+ __version__ = "0.155.2"
utilities/hypothesis.py CHANGED
@@ -236,6 +236,7 @@ def date_time_deltas(
236
236
  min_value: MaybeSearchStrategy[DateTimeDelta | None] = None,
237
237
  max_value: MaybeSearchStrategy[DateTimeDelta | None] = None,
238
238
  parsable: MaybeSearchStrategy[bool] = False,
239
+ nativable: MaybeSearchStrategy[bool] = False,
239
240
  ) -> DateTimeDelta:
240
241
  """Strategy for generating date deltas."""
241
242
  min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
@@ -257,7 +258,13 @@ def date_time_deltas(
257
258
  if draw2(draw, parsable):
258
259
  min_nanos = max(min_nanos, to_nanoseconds(DATE_TIME_DELTA_PARSABLE_MIN))
259
260
  max_nanos = min(max_nanos, to_nanoseconds(DATE_TIME_DELTA_PARSABLE_MAX))
260
- nanos = draw(integers(min_value=min_nanos, max_value=max_nanos))
261
+ if draw2(draw, nativable):
262
+ min_micros, _ = divmod(min_nanos, 1000)
263
+ max_micros, _ = divmod(max_nanos, 1000)
264
+ micros = draw(integers(min_value=min_micros + 1, max_value=max_micros))
265
+ nanos = 1000 * micros
266
+ else:
267
+ nanos = draw(integers(min_value=min_nanos, max_value=max_nanos))
261
268
  return to_date_time_delta(nanos)
262
269
 
263
270
 
utilities/polars.py CHANGED
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import datetime as dt
4
3
  import enum
5
4
  from collections.abc import Callable, Iterator, Sequence
6
5
  from collections.abc import Set as AbstractSet
@@ -15,11 +14,12 @@ from uuid import UUID
15
14
  from zoneinfo import ZoneInfo
16
15
 
17
16
  import polars as pl
17
+ import whenever
18
18
  from polars import (
19
19
  Boolean,
20
20
  DataFrame,
21
- Date,
22
21
  Datetime,
22
+ Duration,
23
23
  Expr,
24
24
  Float64,
25
25
  Int64,
@@ -50,9 +50,9 @@ from polars.exceptions import (
50
50
  )
51
51
  from polars.schema import Schema
52
52
  from polars.testing import assert_frame_equal, assert_series_equal
53
- from whenever import ZonedDateTime
53
+ from whenever import DateDelta, DateTimeDelta, PlainDateTime, TimeDelta, ZonedDateTime
54
54
 
55
- from utilities.dataclasses import _YieldFieldsInstance, yield_fields
55
+ from utilities.dataclasses import yield_fields
56
56
  from utilities.errors import ImpossibleCaseError
57
57
  from utilities.functions import (
58
58
  EnsureIntError,
@@ -93,14 +93,18 @@ from utilities.typing import (
93
93
  get_args,
94
94
  get_type_hints,
95
95
  is_frozenset_type,
96
- is_instance_gen,
97
96
  is_list_type,
98
97
  is_literal_type,
99
98
  is_optional_type,
100
99
  is_set_type,
101
- is_union_type,
102
100
  )
103
101
  from utilities.warnings import suppress_warnings
102
+ from utilities.whenever import (
103
+ DatePeriod,
104
+ TimePeriod,
105
+ ZonedDateTimePeriod,
106
+ to_py_time_delta,
107
+ )
104
108
  from utilities.zoneinfo import UTC, ensure_time_zone, get_time_zone_name
105
109
 
106
110
  if TYPE_CHECKING:
@@ -131,6 +135,10 @@ DatetimeTokyo = Datetime(time_zone="Asia/Tokyo")
131
135
  DatetimeUSCentral = Datetime(time_zone="US/Central")
132
136
  DatetimeUSEastern = Datetime(time_zone="US/Eastern")
133
137
  DatetimeUTC = Datetime(time_zone="UTC")
138
+ DatePeriodDType = Struct({"start": pl.Date, "end": pl.Date})
139
+ TimePeriodDType = Struct({"start": pl.Time, "end": pl.Time})
140
+
141
+
134
142
  _FINITE_EWM_MIN_WEIGHT = 0.9999
135
143
 
136
144
 
@@ -453,29 +461,6 @@ class BooleanValueCountsError(Exception):
453
461
  ##
454
462
 
455
463
 
456
- @overload
457
- def ceil_datetime(column: ExprLike, every: ExprLike, /) -> Expr: ...
458
- @overload
459
- def ceil_datetime(column: Series, every: ExprLike, /) -> Series: ...
460
- @overload
461
- def ceil_datetime(column: IntoExprColumn, every: ExprLike, /) -> ExprOrSeries: ...
462
- def ceil_datetime(column: IntoExprColumn, every: ExprLike, /) -> ExprOrSeries:
463
- """Compute the `ceil` of a datetime column."""
464
- column = ensure_expr_or_series(column)
465
- rounded = column.dt.round(every)
466
- ceil = (
467
- when(column <= rounded)
468
- .then(rounded)
469
- .otherwise(column.dt.offset_by(every).dt.round(every))
470
- )
471
- if isinstance(column, Expr):
472
- return ceil
473
- return DataFrame().with_columns(ceil.alias(column.name))[column.name]
474
-
475
-
476
- ##
477
-
478
-
479
464
  def check_polars_dataframe(
480
465
  df: DataFrame,
481
466
  /,
@@ -1051,16 +1036,38 @@ def dataclass_to_dataframe(
1051
1036
 
1052
1037
 
1053
1038
  def _dataclass_to_dataframe_cast(series: Series, /) -> Series:
1054
- if series.dtype == Object:
1055
- is_path = series.map_elements(make_isinstance(Path), return_dtype=Boolean).all()
1056
- is_uuid = series.map_elements(make_isinstance(UUID), return_dtype=Boolean).all()
1057
- if is_path or is_uuid:
1058
- with suppress_warnings(category=PolarsInefficientMapWarning):
1059
- return series.map_elements(str, return_dtype=String)
1060
- else: # pragma: no cover
1061
- msg = f"{is_path=}, f{is_uuid=}"
1062
- raise NotImplementedError(msg)
1063
- return series
1039
+ if series.dtype != Object:
1040
+ return series
1041
+ if series.map_elements(make_isinstance(whenever.Date), return_dtype=Boolean).all():
1042
+ return series.map_elements(lambda x: x.py_date(), return_dtype=pl.Date)
1043
+ if series.map_elements(make_isinstance(DateDelta), return_dtype=Boolean).all():
1044
+ return series.map_elements(to_py_time_delta, return_dtype=Duration)
1045
+ if series.map_elements(make_isinstance(DateTimeDelta), return_dtype=Boolean).all():
1046
+ return series.map_elements(to_py_time_delta, return_dtype=Duration)
1047
+ is_path = series.map_elements(make_isinstance(Path), return_dtype=Boolean).all()
1048
+ is_uuid = series.map_elements(make_isinstance(UUID), return_dtype=Boolean).all()
1049
+ if is_path or is_uuid:
1050
+ with suppress_warnings(category=PolarsInefficientMapWarning):
1051
+ return series.map_elements(str, return_dtype=String)
1052
+ if series.map_elements(make_isinstance(whenever.Time), return_dtype=Boolean).all():
1053
+ return series.map_elements(lambda x: x.py_time(), return_dtype=pl.Time)
1054
+ if series.map_elements(make_isinstance(TimeDelta), return_dtype=Boolean).all():
1055
+ return series.map_elements(to_py_time_delta, return_dtype=Duration)
1056
+ if series.map_elements(make_isinstance(ZonedDateTime), return_dtype=Boolean).all():
1057
+ return_dtype = zoned_date_time_dtype(time_zone=one({dt.tz for dt in series}))
1058
+ return series.map_elements(lambda x: x.py_datetime(), return_dtype=return_dtype)
1059
+ if series.map_elements(
1060
+ lambda x: isinstance(x, dict) and (set(x) == {"start", "end"}),
1061
+ return_dtype=Boolean,
1062
+ ).all():
1063
+ start = _dataclass_to_dataframe_cast(
1064
+ series.map_elements(lambda x: x["start"], return_dtype=Object)
1065
+ ).alias("start")
1066
+ end = _dataclass_to_dataframe_cast(
1067
+ series.map_elements(lambda x: x["end"], return_dtype=Object)
1068
+ ).alias("end")
1069
+ return concat_series(start, end).select(x=struct(start=start, end=end))["x"]
1070
+ raise NotImplementedError(series) # pragma: no cover
1064
1071
 
1065
1072
 
1066
1073
  @dataclass(kw_only=True, slots=True)
@@ -1101,20 +1108,14 @@ def dataclass_to_schema(
1101
1108
  for field in yield_fields(
1102
1109
  obj, globalns=globalns, localns=localns, warn_name_errors=warn_name_errors
1103
1110
  ):
1104
- if is_dataclass_instance(field.value):
1111
+ if is_dataclass_instance(field.value) and not (
1112
+ isinstance(field.type_, type)
1113
+ and issubclass(field.type_, (DatePeriod, TimePeriod, ZonedDateTimePeriod))
1114
+ ):
1105
1115
  dtypes = dataclass_to_schema(
1106
1116
  field.value, globalns=globalns, localns=localns
1107
1117
  )
1108
1118
  dtype = struct_dtype(**dtypes)
1109
- elif field.type_ is dt.datetime:
1110
- dtype = _dataclass_to_schema_datetime(field)
1111
- elif is_union_type(field.type_) and set(
1112
- get_args(field.type_, optional_drop_none=True)
1113
- ) == {dt.date, dt.datetime}:
1114
- if is_instance_gen(field.value, dt.date):
1115
- dtype = Date
1116
- else:
1117
- dtype = _dataclass_to_schema_datetime(field)
1118
1119
  else:
1119
1120
  dtype = _dataclass_to_schema_one(
1120
1121
  field.type_, globalns=globalns, localns=localns
@@ -1123,14 +1124,6 @@ def dataclass_to_schema(
1123
1124
  return out
1124
1125
 
1125
1126
 
1126
- def _dataclass_to_schema_datetime(
1127
- field: _YieldFieldsInstance[dt.datetime], /
1128
- ) -> PolarsDataType:
1129
- if field.value.tzinfo is None:
1130
- return Datetime
1131
- return zoned_datetime_dtype(time_zone=ensure_time_zone(field.value.tzinfo))
1132
-
1133
-
1134
1127
  def _dataclass_to_schema_one(
1135
1128
  obj: Any,
1136
1129
  /,
@@ -1138,20 +1131,35 @@ def _dataclass_to_schema_one(
1138
1131
  globalns: StrMapping | None = None,
1139
1132
  localns: StrMapping | None = None,
1140
1133
  ) -> PolarsDataType:
1141
- if obj is bool:
1142
- return Boolean
1143
- if obj is int:
1144
- return Int64
1145
- if obj is float:
1146
- return Float64
1147
- if obj is str:
1148
- return String
1149
- if obj is dt.date:
1150
- return Date
1151
- if obj in {Path, UUID}:
1152
- return Object
1153
- if isinstance(obj, type) and issubclass(obj, enum.Enum):
1154
- return pl.Enum([e.name for e in obj])
1134
+ if isinstance(obj, type):
1135
+ if issubclass(obj, bool):
1136
+ return Boolean
1137
+ if issubclass(obj, int):
1138
+ return Int64
1139
+ if issubclass(obj, float):
1140
+ return Float64
1141
+ if issubclass(obj, str):
1142
+ return String
1143
+ if issubclass(
1144
+ obj,
1145
+ (
1146
+ DateDelta,
1147
+ DatePeriod,
1148
+ DateTimeDelta,
1149
+ Path,
1150
+ PlainDateTime,
1151
+ TimeDelta,
1152
+ TimePeriod,
1153
+ UUID,
1154
+ ZonedDateTime,
1155
+ ZonedDateTimePeriod,
1156
+ whenever.Date,
1157
+ whenever.Time,
1158
+ ),
1159
+ ):
1160
+ return Object
1161
+ if issubclass(obj, enum.Enum):
1162
+ return pl.Enum([e.name for e in obj])
1155
1163
  if is_dataclass_class(obj):
1156
1164
  out: dict[str, Any] = {}
1157
1165
  for field in yield_fields(obj, globalns=globalns, localns=localns):
@@ -1335,29 +1343,6 @@ class _FiniteEWMWeightsError(Exception):
1335
1343
  ##
1336
1344
 
1337
1345
 
1338
- @overload
1339
- def floor_datetime(column: ExprLike, every: ExprLike, /) -> Expr: ...
1340
- @overload
1341
- def floor_datetime(column: Series, every: ExprLike, /) -> Series: ...
1342
- @overload
1343
- def floor_datetime(column: IntoExprColumn, every: ExprLike, /) -> ExprOrSeries: ...
1344
- def floor_datetime(column: IntoExprColumn, every: ExprLike, /) -> ExprOrSeries:
1345
- """Compute the `floor` of a datetime column."""
1346
- column = ensure_expr_or_series(column)
1347
- rounded = column.dt.round(every)
1348
- floor = (
1349
- when(column >= rounded)
1350
- .then(rounded)
1351
- .otherwise(column.dt.offset_by("-" + every).dt.round(every))
1352
- )
1353
- if isinstance(column, Expr):
1354
- return floor
1355
- return DataFrame().with_columns(floor.alias(column.name))[column.name]
1356
-
1357
-
1358
- ##
1359
-
1360
-
1361
1346
  def get_data_type_or_series_time_zone(
1362
1347
  dtype_or_series: PolarsDataType | Series, /
1363
1348
  ) -> ZoneInfo:
@@ -2444,13 +2429,15 @@ def struct_from_dataclass(
2444
2429
  def _struct_from_dataclass_one(
2445
2430
  ann: Any, /, *, time_zone: TimeZoneLike | None = None
2446
2431
  ) -> PolarsDataType:
2447
- mapping = {bool: Boolean, dt.date: Date, float: Float64, int: Int64, str: String}
2432
+ mapping = {
2433
+ bool: Boolean,
2434
+ whenever.Date: pl.Date,
2435
+ float: Float64,
2436
+ int: Int64,
2437
+ str: String,
2438
+ }
2448
2439
  with suppress(KeyError):
2449
2440
  return mapping[ann]
2450
- if ann is dt.datetime:
2451
- if time_zone is None:
2452
- raise _StructFromDataClassTimeZoneMissingError
2453
- return zoned_datetime_dtype(time_zone=time_zone)
2454
2441
  if is_dataclass_class(ann):
2455
2442
  return struct_from_dataclass(ann, time_zone=time_zone)
2456
2443
  if (isinstance(ann, type) and issubclass(ann, enum.Enum)) or (
@@ -2479,13 +2466,6 @@ class _StructFromDataClassNotADataclassError(StructFromDataClassError):
2479
2466
  return f"Object must be a dataclass; got {self.cls}"
2480
2467
 
2481
2468
 
2482
- @dataclass(kw_only=True, slots=True)
2483
- class _StructFromDataClassTimeZoneMissingError(StructFromDataClassError):
2484
- @override
2485
- def __str__(self) -> str:
2486
- return "Time-zone must be given"
2487
-
2488
-
2489
2469
  @dataclass(kw_only=True, slots=True)
2490
2470
  class _StructFromDataClassTypeError(StructFromDataClassError):
2491
2471
  ann: Any
@@ -2579,27 +2559,27 @@ def week_num(column: IntoExprColumn, /, *, start: WeekDay = "mon") -> ExprOrSeri
2579
2559
  ##
2580
2560
 
2581
2561
 
2582
- def zoned_datetime_dtype(
2562
+ def zoned_date_time_dtype(
2583
2563
  *, time_unit: TimeUnit = "us", time_zone: TimeZoneLike = UTC
2584
2564
  ) -> Datetime:
2585
- """Create a zoned datetime data type."""
2565
+ """Create a zoned date-time data type."""
2586
2566
  return Datetime(time_unit=time_unit, time_zone=get_time_zone_name(time_zone))
2587
2567
 
2588
2568
 
2589
- def zoned_datetime_period_dtype(
2569
+ def zoned_date_time_period_dtype(
2590
2570
  *,
2591
2571
  time_unit: TimeUnit = "us",
2592
2572
  time_zone: TimeZoneLike | tuple[TimeZoneLike, TimeZoneLike] = UTC,
2593
2573
  ) -> Struct:
2594
- """Create a zoned datetime period data type."""
2574
+ """Create a zoned date-time period data type."""
2595
2575
  match time_zone:
2596
2576
  case start, end:
2597
2577
  return struct_dtype(
2598
- start=zoned_datetime_dtype(time_unit=time_unit, time_zone=start),
2599
- end=zoned_datetime_dtype(time_unit=time_unit, time_zone=end),
2578
+ start=zoned_date_time_dtype(time_unit=time_unit, time_zone=start),
2579
+ end=zoned_date_time_dtype(time_unit=time_unit, time_zone=end),
2600
2580
  )
2601
2581
  case _:
2602
- dtype = zoned_datetime_dtype(time_unit=time_unit, time_zone=time_zone)
2582
+ dtype = zoned_date_time_dtype(time_unit=time_unit, time_zone=time_zone)
2603
2583
  return struct_dtype(start=dtype, end=dtype)
2604
2584
 
2605
2585
 
@@ -2608,6 +2588,7 @@ __all__ = [
2608
2588
  "CheckPolarsDataFrameError",
2609
2589
  "ColumnsToDictError",
2610
2590
  "DataClassToDataFrameError",
2591
+ "DatePeriodDType",
2611
2592
  "DatetimeHongKong",
2612
2593
  "DatetimeTokyo",
2613
2594
  "DatetimeUSCentral",
@@ -2625,6 +2606,7 @@ __all__ = [
2625
2606
  "IsNullStructSeriesError",
2626
2607
  "SetFirstRowAsColumnsError",
2627
2608
  "StructFromDataClassError",
2609
+ "TimePeriodDType",
2628
2610
  "acf",
2629
2611
  "adjust_frequencies",
2630
2612
  "all_dataframe_columns",
@@ -2635,7 +2617,6 @@ __all__ = [
2635
2617
  "are_frames_equal",
2636
2618
  "bernoulli",
2637
2619
  "boolean_value_counts",
2638
- "ceil_datetime",
2639
2620
  "check_polars_dataframe",
2640
2621
  "choice",
2641
2622
  "collect_series",
@@ -2652,7 +2633,6 @@ __all__ = [
2652
2633
  "ensure_expr_or_series",
2653
2634
  "ensure_expr_or_series_many",
2654
2635
  "finite_ewm_mean",
2655
- "floor_datetime",
2656
2636
  "get_data_type_or_series_time_zone",
2657
2637
  "get_expr_name",
2658
2638
  "get_frequency_spectrum",
@@ -2688,6 +2668,6 @@ __all__ = [
2688
2668
  "unique_element",
2689
2669
  "write_dataframe",
2690
2670
  "write_series",
2691
- "zoned_datetime_dtype",
2692
- "zoned_datetime_period_dtype",
2671
+ "zoned_date_time_dtype",
2672
+ "zoned_date_time_period_dtype",
2693
2673
  ]
@@ -35,7 +35,7 @@ from utilities.iterables import (
35
35
  chunked,
36
36
  one,
37
37
  )
38
- from utilities.polars import zoned_datetime_dtype
38
+ from utilities.polars import zoned_date_time_dtype
39
39
  from utilities.reprlib import get_repr
40
40
  from utilities.sqlalchemy import (
41
41
  CHUNK_SIZE_FRAC,
@@ -390,7 +390,7 @@ def _select_to_dataframe_map_table_column_type_to_dtype(
390
390
  return pl.Date
391
391
  if is_subclass_gen(py_type, dt.datetime):
392
392
  has_tz: bool = type_use.timezone
393
- return zoned_datetime_dtype(time_zone=time_zone) if has_tz else Datetime()
393
+ return zoned_date_time_dtype(time_zone=time_zone) if has_tz else Datetime()
394
394
  if issubclass(py_type, dt.time):
395
395
  return Time
396
396
  if issubclass(py_type, dt.timedelta):
utilities/traceback.py CHANGED
@@ -284,7 +284,7 @@ def _make_except_hook_inner(
284
284
  except SendToSlackError as error:
285
285
  _ = stderr.write(f"{error}\n")
286
286
  if to_bool(pudb): # pragma: no cover
287
- from pudb import post_mortem
287
+ from pudb import post_mortem # pyright: ignore[reportMissingImports]
288
288
 
289
289
  post_mortem(tb=traceback, e_type=exc_type, e_value=exc_val)
290
290
 
utilities/typing.py CHANGED
@@ -23,6 +23,7 @@ from typing import get_type_hints as _get_type_hints
23
23
  from uuid import UUID
24
24
  from warnings import warn
25
25
 
26
+ import whenever
26
27
  from whenever import (
27
28
  Date,
28
29
  DateDelta,
@@ -122,7 +123,6 @@ def get_type_hints(
122
123
  warn_name_errors: bool = False,
123
124
  ) -> dict[str, Any]:
124
125
  """Get the type hints of an object."""
125
- result: dict[str, Any] = obj.__annotations__
126
126
  _ = {
127
127
  Date,
128
128
  DateDelta,
@@ -136,10 +136,17 @@ def get_type_hints(
136
136
  TimeDelta,
137
137
  UUID,
138
138
  ZonedDateTime,
139
- dt,
139
+ whenever.Date,
140
+ whenever.DateDelta,
141
+ whenever.DateTimeDelta,
142
+ whenever.PlainDateTime,
143
+ whenever.Time,
144
+ whenever.TimeDelta,
145
+ whenever.ZonedDateTime,
140
146
  }
141
147
  globalns_use = globals() | ({} if globalns is None else dict(globalns))
142
148
  localns_use = {} if localns is None else dict(localns)
149
+ result: dict[str, Any] = obj.__annotations__
143
150
  try:
144
151
  hints = _get_type_hints(obj, globalns=globalns_use, localns=localns_use)
145
152
  except NameError as error:
utilities/whenever.py CHANGED
@@ -367,6 +367,14 @@ def get_now_local() -> ZonedDateTime:
367
367
  NOW_LOCAL = get_now_local()
368
368
 
369
369
 
370
+ def get_now_plain(time_zone: TimeZoneLike = UTC, /) -> PlainDateTime:
371
+ """Get the current zoned datetime."""
372
+ return get_now(time_zone).to_plain()
373
+
374
+
375
+ NOW_PLAIN = get_now_plain()
376
+
377
+
370
378
  ##
371
379
 
372
380
 
@@ -1927,6 +1935,7 @@ __all__ = [
1927
1935
  "MINUTE",
1928
1936
  "MONTH",
1929
1937
  "NOW_LOCAL",
1938
+ "NOW_PLAIN",
1930
1939
  "SECOND",
1931
1940
  "TIME_DELTA_MAX",
1932
1941
  "TIME_DELTA_MIN",
@@ -1966,6 +1975,7 @@ __all__ = [
1966
1975
  "from_timestamp_nanos",
1967
1976
  "get_now",
1968
1977
  "get_now_local",
1978
+ "get_now_plain",
1969
1979
  "get_today",
1970
1980
  "get_today_local",
1971
1981
  "mean_datetime",