dycw-utilities 0.153.15__py3-none-any.whl → 0.155.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dycw_utilities-0.153.15.dist-info → dycw_utilities-0.155.0.dist-info}/METADATA +2 -2
- {dycw_utilities-0.153.15.dist-info → dycw_utilities-0.155.0.dist-info}/RECORD +16 -16
- utilities/__init__.py +1 -1
- utilities/fpdf2.py +2 -2
- utilities/hypothesis.py +143 -41
- utilities/iterables.py +1 -21
- utilities/logging.py +8 -49
- utilities/orjson.py +45 -12
- utilities/polars.py +2 -2
- utilities/pyinstrument.py +2 -4
- utilities/pytest_regressions.py +3 -1
- utilities/traceback.py +24 -16
- utilities/whenever.py +8 -4
- {dycw_utilities-0.153.15.dist-info → dycw_utilities-0.155.0.dist-info}/WHEEL +0 -0
- {dycw_utilities-0.153.15.dist-info → dycw_utilities-0.155.0.dist-info}/entry_points.txt +0 -0
- {dycw_utilities-0.153.15.dist-info → dycw_utilities-0.155.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: dycw-utilities
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.155.0
|
4
4
|
Author-email: Derek Wan <d.wan@icloud.com>
|
5
5
|
License-File: LICENSE
|
6
6
|
Requires-Python: >=3.12
|
@@ -12,7 +12,7 @@ Provides-Extra: logging
|
|
12
12
|
Requires-Dist: coloredlogs<15.1,>=15.0.1; extra == 'logging'
|
13
13
|
Provides-Extra: test
|
14
14
|
Requires-Dist: dycw-pytest-only<2.2,>=2.1.1; extra == 'test'
|
15
|
-
Requires-Dist: hypothesis<6.
|
15
|
+
Requires-Dist: hypothesis<6.138,>=6.137.1; extra == 'test'
|
16
16
|
Requires-Dist: pudb<2025.2,>=2025.1; extra == 'test'
|
17
17
|
Requires-Dist: pytest-asyncio<1.2,>=1.1.0; extra == 'test'
|
18
18
|
Requires-Dist: pytest-cov<6.3,>=6.2.1; extra == 'test'
|
@@ -1,4 +1,4 @@
|
|
1
|
-
utilities/__init__.py,sha256=
|
1
|
+
utilities/__init__.py,sha256=OFslnM101fl40brwBMTE9hAplv30EfT_NmRkbGPfu-g,60
|
2
2
|
utilities/altair.py,sha256=92E2lCdyHY4Zb-vCw6rEJIsWdKipuu-Tu2ab1ufUfAk,9079
|
3
3
|
utilities/asyncio.py,sha256=QXkTtugXkqtYt7Do23zgYErqzdp6jwzPpV_SP9fJ1gI,16780
|
4
4
|
utilities/atomicwrites.py,sha256=tPo6r-Rypd9u99u66B9z86YBPpnLrlHtwox_8Z7T34Y,5790
|
@@ -15,23 +15,23 @@ utilities/enum.py,sha256=5l6pwZD1cjSlVW4ss-zBPspWvrbrYrdtJWcg6f5_J5w,5781
|
|
15
15
|
utilities/errors.py,sha256=mFlDGSM0LI1jZ1pbqwLAH3ttLZ2JVIxyZLojw8tGVZU,1479
|
16
16
|
utilities/eventkit.py,sha256=ddoleSwW9zdc2tjX5Ge0pMKtYwV_JMxhHYOxnWX2AGM,12609
|
17
17
|
utilities/fastapi.py,sha256=3wpd63Tw9paSyy7STpAD7GGe8fLkLaRC6TPCwIGm1BU,1361
|
18
|
-
utilities/fpdf2.py,sha256=
|
18
|
+
utilities/fpdf2.py,sha256=HgM8JSvoioDXrjC0UR3HVLjnMnnb_mML7nL2EmkTwGI,1854
|
19
19
|
utilities/functions.py,sha256=0mmeZ8op3QkAooYRAyRZhpi3TgaJCiMnqbJtZl-myug,28266
|
20
20
|
utilities/functools.py,sha256=I00ru2gQPakZw2SHVeKIKXfTv741655s6HI0lUoE0D4,1552
|
21
21
|
utilities/getpass.py,sha256=DfN5UgMAtFCqS3dSfFHUfqIMZX2shXvwphOz_6J6f6A,103
|
22
22
|
utilities/gzip.py,sha256=fkGP3KdsBfXlstodT4wtlp-PwNyUsogpbDCVVVGdsm4,781
|
23
23
|
utilities/hashlib.py,sha256=SVTgtguur0P4elppvzOBbLEjVM3Pea0eWB61yg2ilxo,309
|
24
24
|
utilities/http.py,sha256=TsavEfHlRtlLaeV21Z6KZh0qbPw-kvD1zsQdZ7Kep5Q,977
|
25
|
-
utilities/hypothesis.py,sha256=
|
25
|
+
utilities/hypothesis.py,sha256=hoa1Szk3oLa07W4nl6Uhy2vsToIxx8AYjJg9PLc9JvQ,43516
|
26
26
|
utilities/importlib.py,sha256=mV1xT_O_zt_GnZZ36tl3xOmMaN_3jErDWY54fX39F6Y,429
|
27
27
|
utilities/inflect.py,sha256=v7YkOWSu8NAmVghPcf4F3YBZQoJCS47_DLf9jbfWIs0,581
|
28
28
|
utilities/ipython.py,sha256=V2oMYHvEKvlNBzxDXdLvKi48oUq2SclRg5xasjaXStw,763
|
29
|
-
utilities/iterables.py,sha256=
|
29
|
+
utilities/iterables.py,sha256=ZmXBSk_Rio-aqLwTaoX69HD81YVcndeLYQwjv0P64JM,43009
|
30
30
|
utilities/json.py,sha256=-WcGtSsCr9Y42wHZzAMnfvU6ihAfVftylFfRUORaDFo,2102
|
31
31
|
utilities/jupyter.py,sha256=ft5JA7fBxXKzP-L9W8f2-wbF0QeYc_2uLQNFDVk4Z-M,2917
|
32
32
|
utilities/libcst.py,sha256=TKgKN4bNmtBNEE-TUfhTyd1BrTncfsl_7tTuhpesGYY,5585
|
33
33
|
utilities/lightweight_charts.py,sha256=YM3ojBvJxuCSUBu_KrhFBmaMCvRPvupKC3qkm-UVZq4,2751
|
34
|
-
utilities/logging.py,sha256=
|
34
|
+
utilities/logging.py,sha256=ihbfQJgjc7t3Pds0oPvF_J1eigiqFKzxNOijzoee8U4,18064
|
35
35
|
utilities/math.py,sha256=7ve4RxX3g-FGGVnWV0K9bBeGnKUEjnTbH13VxdvFtGE,26847
|
36
36
|
utilities/memory_profiler.py,sha256=XzN56jDCa5aqXS_DxEjb_K4L6aIWh_5zyKi6OhcIxw0,853
|
37
37
|
utilities/modules.py,sha256=iuvLluJya-hvl1Q25-Jk3dLgx2Es3ck4SjJiEkAlVTs,3195
|
@@ -39,22 +39,22 @@ utilities/more_itertools.py,sha256=rklJ5vpvXr_H5pAGpWmwVpqtBVehoJ0-jBGYsZbux3M,1
|
|
39
39
|
utilities/numpy.py,sha256=Xn23sA2ZbVNqwUYEgNJD3XBYH6IbCri_WkHSNhg3NkY,26122
|
40
40
|
utilities/operator.py,sha256=nhxn5q6CFNzUm1wpTwWPCu9JGCqVHSlaJf0o1-efoII,3616
|
41
41
|
utilities/optuna.py,sha256=C-fhWYiXHVPo1l8QctYkFJ4DyhbSrGorzP1dJb_qvd8,1933
|
42
|
-
utilities/orjson.py,sha256=
|
42
|
+
utilities/orjson.py,sha256=Ll0U172ITMqOJc3kjV90C0eI-EWzSXlMHSdUBaUSe80,41499
|
43
43
|
utilities/os.py,sha256=mFvjydySvjtSXpk7tLStUJcndauAoujxUUmj_CO7LWY,3778
|
44
44
|
utilities/parse.py,sha256=JcJn5yXKhIWXBCwgBdPsyu7Hvcuw6kyEdqvaebCaI9k,17951
|
45
45
|
utilities/pathlib.py,sha256=qGuU8XPmdgGpy8tOMUgelfXx3kxI8h9IaV3TI_06QGE,8428
|
46
46
|
utilities/pickle.py,sha256=MBT2xZCsv0pH868IXLGKnlcqNx2IRVKYNpRcqiQQqxw,653
|
47
47
|
utilities/platform.py,sha256=pTn7gw6N4T6LdKrf0virwarof_mze9WtoQlrGMzhGVI,2798
|
48
|
-
utilities/polars.py,sha256=
|
48
|
+
utilities/polars.py,sha256=DxGDEw3KRxQJkuJ1S0fduXfCiyXJ-9mul0kYj3lFt_Q,78530
|
49
49
|
utilities/polars_ols.py,sha256=Uc9V5kvlWZ5cU93lKZ-cfAKdVFFw81tqwLW9PxtUvMs,5618
|
50
50
|
utilities/postgres.py,sha256=ynCTTaF-bVEOSW-KEAR-dlLh_hYjeVVjm__-4pEU8Zk,12269
|
51
51
|
utilities/pottery.py,sha256=HJ96oLRarTP37Vhg0WTyB3yAu2hETeg6HgRmpDIqyUs,6581
|
52
52
|
utilities/pqdm.py,sha256=z8bSMS7QJmWun65FQZruAqT-R3wqPAzNzhWcX9Nvr0A,3087
|
53
53
|
utilities/psutil.py,sha256=KUlu4lrUw9Zg1V7ZGetpWpGb9DB8l_SSDWGbANFNCPU,2104
|
54
54
|
utilities/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
55
|
-
utilities/pyinstrument.py,sha256=
|
55
|
+
utilities/pyinstrument.py,sha256=NZCZz2nBo0BLJ9DTf7H_Q_KGxvsf2S2M3h0qYoYh2kw,804
|
56
56
|
utilities/pytest.py,sha256=2HHfAWkzZeK2OAzL2F49EDKooMkfDoGqg8Ev4cHC_N8,7869
|
57
|
-
utilities/pytest_regressions.py,sha256=
|
57
|
+
utilities/pytest_regressions.py,sha256=8by5DWEL89Y469TI5AzX1pMy3NJWVtjEg2xQdOOdYuM,4169
|
58
58
|
utilities/random.py,sha256=hZlH4gnAtoaofWswuJYjcygejrY8db4CzP-z_adO2Mo,4165
|
59
59
|
utilities/re.py,sha256=S4h-DLL6ScMPqjboZ_uQ1BVTJajrqV06r_81D--_HCE,4573
|
60
60
|
utilities/redis.py,sha256=2fdveFbqL2pEAeyiVuN_Je8nSM_IZHeahPduMHhFRzY,28381
|
@@ -72,7 +72,7 @@ utilities/tempfile.py,sha256=HxB2BF28CcecDJLQ3Bx2Ej-Pb6RJc6W9ngSpB9CnP4k,2018
|
|
72
72
|
utilities/text.py,sha256=uwCDgpEunYruyh6sKMfNWK3Rp5H3ndpKRAkq86CBNys,13043
|
73
73
|
utilities/threading.py,sha256=GvBOp4CyhHfN90wGXZuA2VKe9fGzMaEa7oCl4f3nnPU,1009
|
74
74
|
utilities/timer.py,sha256=oXfTii6ymu57niP0BDGZjFD55LEHi2a19kqZKiTgaFQ,2588
|
75
|
-
utilities/traceback.py,sha256=
|
75
|
+
utilities/traceback.py,sha256=TjO7em98FDFLvROZ7gi2UJftFWNuSTkbCrf7mk-fg28,9416
|
76
76
|
utilities/typed_settings.py,sha256=SFWqS3lAzV7IfNRwqFcTk0YynTcQ7BmrcW2mr_KUnos,4466
|
77
77
|
utilities/types.py,sha256=L4cjFPyFZX58Urfw0S_i-XRywPIFyuSLOieewj0qqsM,18516
|
78
78
|
utilities/typing.py,sha256=Z-_XDaWyT_6wIo3qfNK-hvRlzxP2Jxa9PgXzm5rDYRA,13790
|
@@ -81,14 +81,14 @@ utilities/tzlocal.py,sha256=KyCXEgCTjqGFx-389JdTuhMRUaT06U1RCMdWoED-qro,728
|
|
81
81
|
utilities/uuid.py,sha256=nQZs6tFX4mqtc2Ku3KqjloYCqwpTKeTj8eKwQwh3FQI,1572
|
82
82
|
utilities/version.py,sha256=ipBj5-WYY_nelp2uwFlApfWWCzTLzPwpovUi9x_OBMs,5085
|
83
83
|
utilities/warnings.py,sha256=un1LvHv70PU-LLv8RxPVmugTzDJkkGXRMZTE2-fTQHw,1771
|
84
|
-
utilities/whenever.py,sha256=
|
84
|
+
utilities/whenever.py,sha256=gPnFKWws4_tjiHPLzX1AukSwDjfMIO9Iim0DDNQyAqY,57532
|
85
85
|
utilities/zipfile.py,sha256=24lQc9ATcJxHXBPc_tBDiJk48pWyRrlxO2fIsFxU0A8,699
|
86
86
|
utilities/zoneinfo.py,sha256=FBMcUQ4662Aq8SsuCL1OAhDQiyANmVjtb-C30DRrWoE,1966
|
87
87
|
utilities/pytest_plugins/__init__.py,sha256=U4S_2y3zgLZVfMenHRaJFBW8yqh2mUBuI291LGQVOJ8,35
|
88
88
|
utilities/pytest_plugins/pytest_randomly.py,sha256=B1qYVlExGOxTywq2r1SMi5o7btHLk2PNdY_b1p98dkE,409
|
89
89
|
utilities/pytest_plugins/pytest_regressions.py,sha256=9v8kAXDM2ycIXJBimoiF4EgrwbUvxTycFWJiGR_GHhM,1466
|
90
|
-
dycw_utilities-0.
|
91
|
-
dycw_utilities-0.
|
92
|
-
dycw_utilities-0.
|
93
|
-
dycw_utilities-0.
|
94
|
-
dycw_utilities-0.
|
90
|
+
dycw_utilities-0.155.0.dist-info/METADATA,sha256=AFafkgKG-CWYCGGKjJxAXn1dYS1wM_OkjeV2m7ZnaQs,1696
|
91
|
+
dycw_utilities-0.155.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
92
|
+
dycw_utilities-0.155.0.dist-info/entry_points.txt,sha256=BOD_SoDxwsfJYOLxhrSXhHP_T7iw-HXI9f2WVkzYxvQ,135
|
93
|
+
dycw_utilities-0.155.0.dist-info/licenses/LICENSE,sha256=gppZp16M6nSVpBbUBrNL6JuYfvKwZiKgV7XoKKsHzqo,1066
|
94
|
+
dycw_utilities-0.155.0.dist-info/RECORD,,
|
utilities/__init__.py
CHANGED
utilities/fpdf2.py
CHANGED
@@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, override
|
|
6
6
|
from fpdf import FPDF
|
7
7
|
from fpdf.enums import XPos, YPos
|
8
8
|
|
9
|
-
from utilities.whenever import
|
9
|
+
from utilities.whenever import get_now, to_local_plain
|
10
10
|
|
11
11
|
if TYPE_CHECKING:
|
12
12
|
from collections.abc import Iterator
|
@@ -47,7 +47,7 @@ def yield_pdf(*, header: str | None = None) -> Iterator[_BasePDF]:
|
|
47
47
|
def footer(self) -> None:
|
48
48
|
self.set_y(-15)
|
49
49
|
self.set_font(family="Helvetica", style="I", size=8)
|
50
|
-
page_no, now = (self.page_no(),
|
50
|
+
page_no, now = (self.page_no(), to_local_plain(get_now()))
|
51
51
|
text = f"page {page_no}/{{}}; {now}"
|
52
52
|
_ = self.cell(
|
53
53
|
w=0,
|
utilities/hypothesis.py
CHANGED
@@ -50,10 +50,12 @@ from whenever import (
|
|
50
50
|
ZonedDateTime,
|
51
51
|
)
|
52
52
|
|
53
|
-
from utilities.functions import ensure_int, ensure_str
|
53
|
+
from utilities.functions import ensure_int, ensure_str, max_nullable, min_nullable
|
54
54
|
from utilities.math import (
|
55
55
|
MAX_FLOAT32,
|
56
56
|
MAX_FLOAT64,
|
57
|
+
MAX_INT8,
|
58
|
+
MAX_INT16,
|
57
59
|
MAX_INT32,
|
58
60
|
MAX_INT64,
|
59
61
|
MAX_UINT8,
|
@@ -62,6 +64,8 @@ from utilities.math import (
|
|
62
64
|
MAX_UINT64,
|
63
65
|
MIN_FLOAT32,
|
64
66
|
MIN_FLOAT64,
|
67
|
+
MIN_INT8,
|
68
|
+
MIN_INT16,
|
65
69
|
MIN_INT32,
|
66
70
|
MIN_INT64,
|
67
71
|
MIN_UINT8,
|
@@ -90,6 +94,9 @@ from utilities.whenever import (
|
|
90
94
|
DAY,
|
91
95
|
TIME_DELTA_MAX,
|
92
96
|
TIME_DELTA_MIN,
|
97
|
+
DatePeriod,
|
98
|
+
TimePeriod,
|
99
|
+
ZonedDateTimePeriod,
|
93
100
|
to_date_time_delta,
|
94
101
|
to_days,
|
95
102
|
to_nanoseconds,
|
@@ -201,6 +208,26 @@ def date_deltas(
|
|
201
208
|
##
|
202
209
|
|
203
210
|
|
211
|
+
@composite
|
212
|
+
def date_periods(
|
213
|
+
draw: DrawFn,
|
214
|
+
/,
|
215
|
+
*,
|
216
|
+
min_value: MaybeSearchStrategy[Date | None] = None,
|
217
|
+
max_value: MaybeSearchStrategy[Date | None] = None,
|
218
|
+
two_digit: MaybeSearchStrategy[bool] = False,
|
219
|
+
) -> DatePeriod:
|
220
|
+
"""Strategy for generating date periods."""
|
221
|
+
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
222
|
+
two_digit_ = draw2(draw, two_digit)
|
223
|
+
strategy = dates(min_value=min_value_, max_value=max_value_, two_digit=two_digit_)
|
224
|
+
start, end = draw(pairs(strategy, sorted=True))
|
225
|
+
return DatePeriod(start, end)
|
226
|
+
|
227
|
+
|
228
|
+
##
|
229
|
+
|
230
|
+
|
204
231
|
@composite
|
205
232
|
def date_time_deltas(
|
206
233
|
draw: DrawFn,
|
@@ -373,13 +400,13 @@ def float32s(
|
|
373
400
|
draw: DrawFn,
|
374
401
|
/,
|
375
402
|
*,
|
376
|
-
min_value: MaybeSearchStrategy[float] =
|
377
|
-
max_value: MaybeSearchStrategy[float] =
|
403
|
+
min_value: MaybeSearchStrategy[float | None] = None,
|
404
|
+
max_value: MaybeSearchStrategy[float | None] = None,
|
378
405
|
) -> float:
|
379
406
|
"""Strategy for generating float32s."""
|
380
407
|
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
381
|
-
min_value_ =
|
382
|
-
max_value_ =
|
408
|
+
min_value_ = max_nullable([min_value_, MIN_FLOAT32])
|
409
|
+
max_value_ = min_nullable([max_value_, MAX_FLOAT32])
|
383
410
|
if is_zero(min_value_) and is_zero(max_value_):
|
384
411
|
min_value_ = max_value_ = 0.0
|
385
412
|
return draw(floats(min_value_, max_value_, width=32))
|
@@ -390,13 +417,13 @@ def float64s(
|
|
390
417
|
draw: DrawFn,
|
391
418
|
/,
|
392
419
|
*,
|
393
|
-
min_value: MaybeSearchStrategy[float] =
|
394
|
-
max_value: MaybeSearchStrategy[float] =
|
420
|
+
min_value: MaybeSearchStrategy[float | None] = None,
|
421
|
+
max_value: MaybeSearchStrategy[float | None] = None,
|
395
422
|
) -> float:
|
396
423
|
"""Strategy for generating float64s."""
|
397
424
|
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
398
|
-
min_value_ =
|
399
|
-
max_value_ =
|
425
|
+
min_value_ = max_nullable([min_value_, MIN_FLOAT64])
|
426
|
+
max_value_ = min_nullable([max_value_, MAX_FLOAT64])
|
400
427
|
if is_zero(min_value_) and is_zero(max_value_):
|
401
428
|
min_value_ = max_value_ = 0.0
|
402
429
|
return draw(floats(min_value_, max_value_, width=64))
|
@@ -601,18 +628,48 @@ def int_arrays(
|
|
601
628
|
##
|
602
629
|
|
603
630
|
|
631
|
+
@composite
|
632
|
+
def int8s(
|
633
|
+
draw: DrawFn,
|
634
|
+
/,
|
635
|
+
*,
|
636
|
+
min_value: MaybeSearchStrategy[int | None] = None,
|
637
|
+
max_value: MaybeSearchStrategy[int | None] = None,
|
638
|
+
) -> int:
|
639
|
+
"""Strategy for generating int8s."""
|
640
|
+
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
641
|
+
min_value_ = max_nullable([min_value_, MIN_INT8])
|
642
|
+
max_value_ = min_nullable([max_value_, MAX_INT8])
|
643
|
+
return draw(integers(min_value=min_value_, max_value=max_value_))
|
644
|
+
|
645
|
+
|
646
|
+
@composite
|
647
|
+
def int16s(
|
648
|
+
draw: DrawFn,
|
649
|
+
/,
|
650
|
+
*,
|
651
|
+
min_value: MaybeSearchStrategy[int | None] = None,
|
652
|
+
max_value: MaybeSearchStrategy[int | None] = None,
|
653
|
+
) -> int:
|
654
|
+
"""Strategy for generating int16s."""
|
655
|
+
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
656
|
+
min_value_ = max_nullable([min_value_, MIN_INT16])
|
657
|
+
max_value_ = min_nullable([max_value_, MAX_INT16])
|
658
|
+
return draw(integers(min_value=min_value_, max_value=max_value_))
|
659
|
+
|
660
|
+
|
604
661
|
@composite
|
605
662
|
def int32s(
|
606
663
|
draw: DrawFn,
|
607
664
|
/,
|
608
665
|
*,
|
609
|
-
min_value: MaybeSearchStrategy[int] =
|
610
|
-
max_value: MaybeSearchStrategy[int] =
|
666
|
+
min_value: MaybeSearchStrategy[int | None] = None,
|
667
|
+
max_value: MaybeSearchStrategy[int | None] = None,
|
611
668
|
) -> int:
|
612
669
|
"""Strategy for generating int32s."""
|
613
670
|
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
614
|
-
min_value_ =
|
615
|
-
max_value_ =
|
671
|
+
min_value_ = max_nullable([min_value_, MIN_INT32])
|
672
|
+
max_value_ = min_nullable([max_value_, MAX_INT32])
|
616
673
|
return draw(integers(min_value_, max_value_))
|
617
674
|
|
618
675
|
|
@@ -621,13 +678,13 @@ def int64s(
|
|
621
678
|
draw: DrawFn,
|
622
679
|
/,
|
623
680
|
*,
|
624
|
-
min_value: MaybeSearchStrategy[int] =
|
625
|
-
max_value: MaybeSearchStrategy[int] =
|
681
|
+
min_value: MaybeSearchStrategy[int | None] = None,
|
682
|
+
max_value: MaybeSearchStrategy[int | None] = None,
|
626
683
|
) -> int:
|
627
684
|
"""Strategy for generating int64s."""
|
628
685
|
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
629
|
-
min_value_ =
|
630
|
-
max_value_ =
|
686
|
+
min_value_ = max_nullable([min_value_, MIN_INT64])
|
687
|
+
max_value_ = min_nullable([max_value_, MAX_INT64])
|
631
688
|
return draw(integers(min_value_, max_value_))
|
632
689
|
|
633
690
|
|
@@ -780,7 +837,7 @@ def _path_parts(draw: DrawFn, /) -> str:
|
|
780
837
|
|
781
838
|
|
782
839
|
@composite
|
783
|
-
def
|
840
|
+
def plain_date_times(
|
784
841
|
draw: DrawFn,
|
785
842
|
/,
|
786
843
|
*,
|
@@ -1174,6 +1231,24 @@ def time_deltas(
|
|
1174
1231
|
##
|
1175
1232
|
|
1176
1233
|
|
1234
|
+
@composite
|
1235
|
+
def time_periods(
|
1236
|
+
draw: DrawFn,
|
1237
|
+
/,
|
1238
|
+
*,
|
1239
|
+
min_value: MaybeSearchStrategy[Time | None] = None,
|
1240
|
+
max_value: MaybeSearchStrategy[Time | None] = None,
|
1241
|
+
) -> TimePeriod:
|
1242
|
+
"""Strategy for generating time periods."""
|
1243
|
+
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
1244
|
+
strategy = times(min_value=min_value_, max_value=max_value_)
|
1245
|
+
start, end = draw(pairs(strategy, sorted=True))
|
1246
|
+
return TimePeriod(start, end)
|
1247
|
+
|
1248
|
+
|
1249
|
+
##
|
1250
|
+
|
1251
|
+
|
1177
1252
|
@composite
|
1178
1253
|
def times(
|
1179
1254
|
draw: DrawFn,
|
@@ -1235,13 +1310,13 @@ def uint8s(
|
|
1235
1310
|
draw: DrawFn,
|
1236
1311
|
/,
|
1237
1312
|
*,
|
1238
|
-
min_value: MaybeSearchStrategy[int] =
|
1239
|
-
max_value: MaybeSearchStrategy[int] =
|
1313
|
+
min_value: MaybeSearchStrategy[int | None] = None,
|
1314
|
+
max_value: MaybeSearchStrategy[int | None] = None,
|
1240
1315
|
) -> int:
|
1241
1316
|
"""Strategy for generating uint8s."""
|
1242
1317
|
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
1243
|
-
min_value_ =
|
1244
|
-
max_value_ =
|
1318
|
+
min_value_ = max_nullable([min_value_, MIN_UINT8])
|
1319
|
+
max_value_ = min_nullable([max_value_, MAX_UINT8])
|
1245
1320
|
return draw(integers(min_value=min_value_, max_value=max_value_))
|
1246
1321
|
|
1247
1322
|
|
@@ -1250,13 +1325,13 @@ def uint16s(
|
|
1250
1325
|
draw: DrawFn,
|
1251
1326
|
/,
|
1252
1327
|
*,
|
1253
|
-
min_value: MaybeSearchStrategy[int] =
|
1254
|
-
max_value: MaybeSearchStrategy[int] =
|
1328
|
+
min_value: MaybeSearchStrategy[int | None] = None,
|
1329
|
+
max_value: MaybeSearchStrategy[int | None] = None,
|
1255
1330
|
) -> int:
|
1256
1331
|
"""Strategy for generating uint16s."""
|
1257
1332
|
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
1258
|
-
min_value_ =
|
1259
|
-
max_value_ =
|
1333
|
+
min_value_ = max_nullable([min_value_, MIN_UINT16])
|
1334
|
+
max_value_ = min_nullable([max_value_, MAX_UINT16])
|
1260
1335
|
return draw(integers(min_value=min_value_, max_value=max_value_))
|
1261
1336
|
|
1262
1337
|
|
@@ -1265,13 +1340,13 @@ def uint32s(
|
|
1265
1340
|
draw: DrawFn,
|
1266
1341
|
/,
|
1267
1342
|
*,
|
1268
|
-
min_value: MaybeSearchStrategy[int] =
|
1269
|
-
max_value: MaybeSearchStrategy[int] =
|
1343
|
+
min_value: MaybeSearchStrategy[int | None] = None,
|
1344
|
+
max_value: MaybeSearchStrategy[int | None] = None,
|
1270
1345
|
) -> int:
|
1271
1346
|
"""Strategy for generating uint32s."""
|
1272
1347
|
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
1273
|
-
min_value_ =
|
1274
|
-
max_value_ =
|
1348
|
+
min_value_ = max_nullable([min_value_, MIN_UINT32])
|
1349
|
+
max_value_ = min_nullable([max_value_, MAX_UINT32])
|
1275
1350
|
return draw(integers(min_value=min_value_, max_value=max_value_))
|
1276
1351
|
|
1277
1352
|
|
@@ -1280,13 +1355,13 @@ def uint64s(
|
|
1280
1355
|
draw: DrawFn,
|
1281
1356
|
/,
|
1282
1357
|
*,
|
1283
|
-
min_value: MaybeSearchStrategy[int] =
|
1284
|
-
max_value: MaybeSearchStrategy[int] =
|
1358
|
+
min_value: MaybeSearchStrategy[int | None] = None,
|
1359
|
+
max_value: MaybeSearchStrategy[int | None] = None,
|
1285
1360
|
) -> int:
|
1286
1361
|
"""Strategy for generating uint64s."""
|
1287
1362
|
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
1288
|
-
min_value_ =
|
1289
|
-
max_value_ =
|
1363
|
+
min_value_ = max_nullable([min_value_, MIN_UINT64])
|
1364
|
+
max_value_ = min_nullable([max_value_, MAX_UINT64])
|
1290
1365
|
return draw(integers(min_value=min_value_, max_value=max_value_))
|
1291
1366
|
|
1292
1367
|
|
@@ -1374,7 +1449,29 @@ def year_months(
|
|
1374
1449
|
|
1375
1450
|
|
1376
1451
|
@composite
|
1377
|
-
def
|
1452
|
+
def zoned_date_time_periods(
|
1453
|
+
draw: DrawFn,
|
1454
|
+
/,
|
1455
|
+
*,
|
1456
|
+
min_value: MaybeSearchStrategy[PlainDateTime | ZonedDateTime | None] = None,
|
1457
|
+
max_value: MaybeSearchStrategy[PlainDateTime | ZonedDateTime | None] = None,
|
1458
|
+
time_zone: MaybeSearchStrategy[TimeZoneLike] = UTC,
|
1459
|
+
) -> ZonedDateTimePeriod:
|
1460
|
+
"""Strategy for generating zoned date-time periods."""
|
1461
|
+
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
1462
|
+
time_zone_: TimeZoneLike = draw2(draw, time_zone)
|
1463
|
+
strategy = zoned_date_times(
|
1464
|
+
min_value=min_value_, max_value=max_value_, time_zone=time_zone_
|
1465
|
+
)
|
1466
|
+
start, end = draw(pairs(strategy, sorted=True))
|
1467
|
+
return ZonedDateTimePeriod(start, end)
|
1468
|
+
|
1469
|
+
|
1470
|
+
##
|
1471
|
+
|
1472
|
+
|
1473
|
+
@composite
|
1474
|
+
def zoned_date_times(
|
1378
1475
|
draw: DrawFn,
|
1379
1476
|
/,
|
1380
1477
|
*,
|
@@ -1382,7 +1479,7 @@ def zoned_datetimes(
|
|
1382
1479
|
max_value: MaybeSearchStrategy[PlainDateTime | ZonedDateTime | None] = None,
|
1383
1480
|
time_zone: MaybeSearchStrategy[TimeZoneLike] = UTC,
|
1384
1481
|
) -> ZonedDateTime:
|
1385
|
-
"""Strategy for generating zoned
|
1482
|
+
"""Strategy for generating zoned date-times."""
|
1386
1483
|
min_value_, max_value_ = [draw2(draw, v) for v in [min_value, max_value]]
|
1387
1484
|
time_zone_ = ensure_time_zone(draw2(draw, time_zone))
|
1388
1485
|
match min_value_:
|
@@ -1401,7 +1498,7 @@ def zoned_datetimes(
|
|
1401
1498
|
max_value_ = max_value_.to_tz(time_zone_.key).to_plain()
|
1402
1499
|
case never:
|
1403
1500
|
assert_never(never)
|
1404
|
-
plain = draw(
|
1501
|
+
plain = draw(plain_date_times(min_value=min_value_, max_value=max_value_))
|
1405
1502
|
with (
|
1406
1503
|
assume_does_not_raise(RepeatedTime),
|
1407
1504
|
assume_does_not_raise(SkippedTime),
|
@@ -1415,7 +1512,7 @@ def zoned_datetimes(
|
|
1415
1512
|
return zoned
|
1416
1513
|
|
1417
1514
|
|
1418
|
-
|
1515
|
+
zoned_date_times_2000 = zoned_date_times(
|
1419
1516
|
min_value=ZonedDateTime(2000, 1, 1, tz=UTC.key),
|
1420
1517
|
max_value=ZonedDateTime(2000, 12, 31, tz=UTC.key),
|
1421
1518
|
)
|
@@ -1427,6 +1524,7 @@ __all__ = [
|
|
1427
1524
|
"assume_does_not_raise",
|
1428
1525
|
"bool_arrays",
|
1429
1526
|
"date_deltas",
|
1527
|
+
"date_periods",
|
1430
1528
|
"date_time_deltas",
|
1431
1529
|
"dates",
|
1432
1530
|
"draw2",
|
@@ -1438,6 +1536,8 @@ __all__ = [
|
|
1438
1536
|
"hashables",
|
1439
1537
|
"import_froms",
|
1440
1538
|
"imports",
|
1539
|
+
"int8s",
|
1540
|
+
"int16s",
|
1441
1541
|
"int32s",
|
1442
1542
|
"int64s",
|
1443
1543
|
"int_arrays",
|
@@ -1446,7 +1546,7 @@ __all__ = [
|
|
1446
1546
|
"numbers",
|
1447
1547
|
"pairs",
|
1448
1548
|
"paths",
|
1449
|
-
"
|
1549
|
+
"plain_date_times",
|
1450
1550
|
"py_datetimes",
|
1451
1551
|
"random_states",
|
1452
1552
|
"sentinels",
|
@@ -1463,6 +1563,7 @@ __all__ = [
|
|
1463
1563
|
"text_digits",
|
1464
1564
|
"text_printable",
|
1465
1565
|
"time_deltas",
|
1566
|
+
"time_periods",
|
1466
1567
|
"times",
|
1467
1568
|
"triples",
|
1468
1569
|
"uint8s",
|
@@ -1472,6 +1573,7 @@ __all__ = [
|
|
1472
1573
|
"urls",
|
1473
1574
|
"versions",
|
1474
1575
|
"year_months",
|
1475
|
-
"
|
1476
|
-
"
|
1576
|
+
"zoned_date_time_periods",
|
1577
|
+
"zoned_date_times",
|
1578
|
+
"zoned_date_times_2000",
|
1477
1579
|
]
|
utilities/iterables.py
CHANGED
@@ -18,7 +18,7 @@ from enum import Enum
|
|
18
18
|
from functools import cmp_to_key, partial, reduce
|
19
19
|
from itertools import accumulate, chain, groupby, islice, pairwise, product
|
20
20
|
from math import isnan
|
21
|
-
from operator import add,
|
21
|
+
from operator import add, or_
|
22
22
|
from typing import (
|
23
23
|
TYPE_CHECKING,
|
24
24
|
Any,
|
@@ -821,24 +821,6 @@ def filter_include_and_exclude[T, U](
|
|
821
821
|
##
|
822
822
|
|
823
823
|
|
824
|
-
def group_consecutive_integers(iterable: Iterable[int], /) -> Iterable[tuple[int, int]]:
|
825
|
-
"""Group consecutive integers."""
|
826
|
-
integers = sorted(iterable)
|
827
|
-
for _, group in groupby(enumerate(integers), key=lambda x: x[1] - x[0]):
|
828
|
-
as_list = list(map(itemgetter(1), group))
|
829
|
-
yield as_list[0], as_list[-1]
|
830
|
-
|
831
|
-
|
832
|
-
def ungroup_consecutive_integers(
|
833
|
-
iterable: Iterable[tuple[int, int]], /
|
834
|
-
) -> Iterable[int]:
|
835
|
-
"""Ungroup consecutive integers."""
|
836
|
-
return chain.from_iterable(range(start, end + 1) for start, end in iterable)
|
837
|
-
|
838
|
-
|
839
|
-
##
|
840
|
-
|
841
|
-
|
842
824
|
@overload
|
843
825
|
def groupby_lists[T](
|
844
826
|
iterable: Iterable[T], /, *, key: None = None
|
@@ -1504,7 +1486,6 @@ __all__ = [
|
|
1504
1486
|
"enumerate_with_edge",
|
1505
1487
|
"expanding_window",
|
1506
1488
|
"filter_include_and_exclude",
|
1507
|
-
"group_consecutive_integers",
|
1508
1489
|
"groupby_lists",
|
1509
1490
|
"hashable_to_iterable",
|
1510
1491
|
"is_iterable",
|
@@ -1527,6 +1508,5 @@ __all__ = [
|
|
1527
1508
|
"sum_mappings",
|
1528
1509
|
"take",
|
1529
1510
|
"transpose",
|
1530
|
-
"ungroup_consecutive_integers",
|
1531
1511
|
"unique_everseen",
|
1532
1512
|
]
|
utilities/logging.py
CHANGED
@@ -31,7 +31,7 @@ from typing import (
|
|
31
31
|
override,
|
32
32
|
)
|
33
33
|
|
34
|
-
from whenever import
|
34
|
+
from whenever import ZonedDateTime
|
35
35
|
|
36
36
|
from utilities.atomicwrites import move_many
|
37
37
|
from utilities.dataclasses import replace_non_sentinel
|
@@ -45,16 +45,15 @@ from utilities.re import (
|
|
45
45
|
extract_groups,
|
46
46
|
)
|
47
47
|
from utilities.sentinel import Sentinel, sentinel
|
48
|
-
from utilities.tzlocal import LOCAL_TIME_ZONE_NAME
|
49
48
|
from utilities.whenever import (
|
50
49
|
WheneverLogRecord,
|
51
|
-
format_compact,
|
52
50
|
get_now_local,
|
51
|
+
parse_plain_local,
|
53
52
|
to_local_plain,
|
54
53
|
)
|
55
54
|
|
56
55
|
if TYPE_CHECKING:
|
57
|
-
from collections.abc import
|
56
|
+
from collections.abc import Iterable, Mapping
|
58
57
|
from datetime import time
|
59
58
|
from logging import _FilterType
|
60
59
|
|
@@ -151,42 +150,6 @@ def basic_config(
|
|
151
150
|
##
|
152
151
|
|
153
152
|
|
154
|
-
def filter_for_key(
|
155
|
-
key: str, /, *, default: bool = False
|
156
|
-
) -> Callable[[LogRecord], bool]:
|
157
|
-
"""Make a filter for a given attribute."""
|
158
|
-
if (key in _FILTER_FOR_KEY_BLACKLIST) or key.startswith("__"):
|
159
|
-
raise FilterForKeyError(key=key)
|
160
|
-
|
161
|
-
def filter_(record: LogRecord, /) -> bool:
|
162
|
-
try:
|
163
|
-
value = getattr(record, key)
|
164
|
-
except AttributeError:
|
165
|
-
return default
|
166
|
-
return bool(value)
|
167
|
-
|
168
|
-
return filter_
|
169
|
-
|
170
|
-
|
171
|
-
# fmt: off
|
172
|
-
_FILTER_FOR_KEY_BLACKLIST = {
|
173
|
-
"args", "created", "exc_info", "exc_text", "filename", "funcName", "getMessage", "levelname", "levelno", "lineno", "module", "msecs", "msg", "name", "pathname", "process", "processName", "relativeCreated", "stack_info", "taskName", "thread", "threadName"
|
174
|
-
}
|
175
|
-
# fmt: on
|
176
|
-
|
177
|
-
|
178
|
-
@dataclass(kw_only=True, slots=True)
|
179
|
-
class FilterForKeyError(Exception):
|
180
|
-
key: str
|
181
|
-
|
182
|
-
@override
|
183
|
-
def __str__(self) -> str:
|
184
|
-
return f"Invalid key: {self.key!r}"
|
185
|
-
|
186
|
-
|
187
|
-
##
|
188
|
-
|
189
|
-
|
190
153
|
def get_format_str(*, prefix: str | None = None, hostname: bool = False) -> str:
|
191
154
|
"""Generate a format string."""
|
192
155
|
parts: list[str] = [
|
@@ -535,10 +498,8 @@ class _RotatingLogFile:
|
|
535
498
|
stem=stem,
|
536
499
|
suffix=suffix,
|
537
500
|
index=int(index),
|
538
|
-
start=
|
539
|
-
|
540
|
-
),
|
541
|
-
end=PlainDateTime.parse_common_iso(end).assume_tz(LOCAL_TIME_ZONE_NAME),
|
501
|
+
start=parse_plain_local(start),
|
502
|
+
end=parse_plain_local(end),
|
542
503
|
)
|
543
504
|
try:
|
544
505
|
index, end = extract_groups(patterns.pattern2, path.name)
|
@@ -550,7 +511,7 @@ class _RotatingLogFile:
|
|
550
511
|
stem=stem,
|
551
512
|
suffix=suffix,
|
552
513
|
index=int(index),
|
553
|
-
end=
|
514
|
+
end=parse_plain_local(end),
|
554
515
|
)
|
555
516
|
try:
|
556
517
|
index = extract_group(patterns.pattern1, path.name)
|
@@ -571,9 +532,9 @@ class _RotatingLogFile:
|
|
571
532
|
case int() as index, None, None:
|
572
533
|
tail = str(index)
|
573
534
|
case int() as index, None, ZonedDateTime() as end:
|
574
|
-
tail = f"{index}__{
|
535
|
+
tail = f"{index}__{to_local_plain(end)}"
|
575
536
|
case int() as index, ZonedDateTime() as start, ZonedDateTime() as end:
|
576
|
-
tail = f"{index}__{
|
537
|
+
tail = f"{index}__{to_local_plain(start)}__{to_local_plain(end)}"
|
577
538
|
case _: # pragma: no cover
|
578
539
|
raise ImpossibleCaseError(
|
579
540
|
case=[f"{self.index=}", f"{self.start=}", f"{self.end=}"]
|
@@ -626,12 +587,10 @@ def to_logger(logger: LoggerLike | None = None, /) -> Logger:
|
|
626
587
|
|
627
588
|
|
628
589
|
__all__ = [
|
629
|
-
"FilterForKeyError",
|
630
590
|
"GetLoggingLevelNumberError",
|
631
591
|
"SizeAndTimeRotatingFileHandler",
|
632
592
|
"add_filters",
|
633
593
|
"basic_config",
|
634
|
-
"filter_for_key",
|
635
594
|
"get_format_str",
|
636
595
|
"get_logging_level_number",
|
637
596
|
"setup_logging",
|
utilities/orjson.py
CHANGED
@@ -5,7 +5,7 @@ import re
|
|
5
5
|
from collections.abc import Callable, Iterable, Mapping, Sequence
|
6
6
|
from contextlib import suppress
|
7
7
|
from dataclasses import dataclass, field, replace
|
8
|
-
from enum import Enum, unique
|
8
|
+
from enum import Enum, StrEnum, unique
|
9
9
|
from functools import cached_property, partial
|
10
10
|
from itertools import chain
|
11
11
|
from logging import Formatter, LogRecord
|
@@ -52,7 +52,12 @@ from utilities.math import MAX_INT64, MIN_INT64
|
|
52
52
|
from utilities.types import Dataclass, LogLevel, MaybeIterable, PathLike, StrMapping
|
53
53
|
from utilities.tzlocal import LOCAL_TIME_ZONE
|
54
54
|
from utilities.version import Version, parse_version
|
55
|
-
from utilities.whenever import
|
55
|
+
from utilities.whenever import (
|
56
|
+
DatePeriod,
|
57
|
+
TimePeriod,
|
58
|
+
ZonedDateTimePeriod,
|
59
|
+
from_timestamp,
|
60
|
+
)
|
56
61
|
|
57
62
|
if TYPE_CHECKING:
|
58
63
|
from collections.abc import Set as AbstractSet
|
@@ -65,10 +70,11 @@ if TYPE_CHECKING:
|
|
65
70
|
|
66
71
|
|
67
72
|
@unique
|
68
|
-
class _Prefixes(
|
73
|
+
class _Prefixes(StrEnum):
|
69
74
|
dataclass = "dc"
|
70
75
|
date = "d"
|
71
76
|
date_delta = "dd"
|
77
|
+
date_period = "dp"
|
72
78
|
date_time_delta = "D"
|
73
79
|
enum = "e"
|
74
80
|
exception_class = "Ex"
|
@@ -77,7 +83,7 @@ class _Prefixes(Enum):
|
|
77
83
|
frozenset_ = "fr"
|
78
84
|
list_ = "l"
|
79
85
|
month_day = "md"
|
80
|
-
none = "
|
86
|
+
none = "0"
|
81
87
|
path = "p"
|
82
88
|
plain_date_time = "pd"
|
83
89
|
py_date = "!d"
|
@@ -87,12 +93,14 @@ class _Prefixes(Enum):
|
|
87
93
|
set_ = "s"
|
88
94
|
time = "ti"
|
89
95
|
time_delta = "td"
|
96
|
+
time_period = "tp"
|
90
97
|
tuple_ = "tu"
|
91
98
|
unserializable = "un"
|
92
99
|
uuid = "uu"
|
93
100
|
version = "v"
|
94
101
|
year_month = "ym"
|
95
102
|
zoned_date_time = "zd"
|
103
|
+
zoned_date_time_period = "zp"
|
96
104
|
|
97
105
|
|
98
106
|
type _DataclassHook = Callable[[type[Dataclass], StrMapping], StrMapping]
|
@@ -167,8 +175,10 @@ def _pre_process(
|
|
167
175
|
return f"[{_Prefixes.date.value}]{date}"
|
168
176
|
case DateDelta() as date:
|
169
177
|
return f"[{_Prefixes.date_delta.value}]{date}"
|
170
|
-
case
|
171
|
-
return f"[{_Prefixes.
|
178
|
+
case DatePeriod() as period:
|
179
|
+
return f"[{_Prefixes.date_period.value}]{period.start},{period.end}"
|
180
|
+
case DateTimeDelta() as date_time_delta:
|
181
|
+
return f"[{_Prefixes.date_time_delta.value}]{date_time_delta}"
|
172
182
|
case Exception() as error_:
|
173
183
|
return {
|
174
184
|
f"[{_Prefixes.exception_instance.value}|{type(error_).__qualname__}]": pre(
|
@@ -187,14 +197,16 @@ def _pre_process(
|
|
187
197
|
return f"[{_Prefixes.month_day.value}]{month_day!s}"
|
188
198
|
case Path() as path:
|
189
199
|
return f"[{_Prefixes.path.value}]{path!s}"
|
190
|
-
case PlainDateTime() as
|
191
|
-
return f"[{_Prefixes.plain_date_time.value}]{
|
192
|
-
case str() as
|
193
|
-
return
|
200
|
+
case PlainDateTime() as date_time:
|
201
|
+
return f"[{_Prefixes.plain_date_time.value}]{date_time}"
|
202
|
+
case str() as text:
|
203
|
+
return text
|
194
204
|
case Time() as time:
|
195
205
|
return f"[{_Prefixes.time.value}]{time}"
|
196
206
|
case TimeDelta() as time_delta:
|
197
207
|
return f"[{_Prefixes.time_delta.value}]{time_delta}"
|
208
|
+
case TimePeriod() as period:
|
209
|
+
return f"[{_Prefixes.time_period.value}]{period.start},{period.end}"
|
198
210
|
case type() as error_cls if issubclass(error_cls, Exception):
|
199
211
|
return f"[{_Prefixes.exception_class.value}|{error_cls.__qualname__}]"
|
200
212
|
case UUID() as uuid:
|
@@ -203,8 +215,12 @@ def _pre_process(
|
|
203
215
|
return f"[{_Prefixes.version.value}]{version}"
|
204
216
|
case YearMonth() as year_month:
|
205
217
|
return f"[{_Prefixes.year_month.value}]{year_month}"
|
206
|
-
case ZonedDateTime() as
|
207
|
-
return f"[{_Prefixes.zoned_date_time.value}]{
|
218
|
+
case ZonedDateTime() as date_time:
|
219
|
+
return f"[{_Prefixes.zoned_date_time.value}]{date_time}"
|
220
|
+
case ZonedDateTimePeriod() as period:
|
221
|
+
return (
|
222
|
+
f"[{_Prefixes.zoned_date_time_period.value}]{period.start},{period.end}"
|
223
|
+
)
|
208
224
|
case dt.datetime() as py_datetime:
|
209
225
|
match py_datetime.tzinfo:
|
210
226
|
case None:
|
@@ -369,6 +385,7 @@ def deserialize(
|
|
369
385
|
(
|
370
386
|
_DATE_PATTERN,
|
371
387
|
_DATE_DELTA_PATTERN,
|
388
|
+
_DATE_PERIOD_PATTERN,
|
372
389
|
_DATE_TIME_DELTA_PATTERN,
|
373
390
|
_FLOAT_PATTERN,
|
374
391
|
_MONTH_DAY_PATTERN,
|
@@ -381,15 +398,18 @@ def deserialize(
|
|
381
398
|
_PY_ZONED_DATE_TIME_PATTERN,
|
382
399
|
_TIME_PATTERN,
|
383
400
|
_TIME_DELTA_PATTERN,
|
401
|
+
_TIME_PERIOD_PATTERN,
|
384
402
|
_UUID_PATTERN,
|
385
403
|
_VERSION_PATTERN,
|
386
404
|
_YEAR_MONTH_PATTERN,
|
387
405
|
_ZONED_DATE_TIME_PATTERN,
|
406
|
+
_ZONED_DATE_TIME_PERIOD_PATTERN,
|
388
407
|
) = [
|
389
408
|
re.compile(r"^\[" + p.value + r"\](" + ".*" + ")$")
|
390
409
|
for p in [
|
391
410
|
_Prefixes.date,
|
392
411
|
_Prefixes.date_delta,
|
412
|
+
_Prefixes.date_period,
|
393
413
|
_Prefixes.date_time_delta,
|
394
414
|
_Prefixes.float_,
|
395
415
|
_Prefixes.month_day,
|
@@ -402,10 +422,12 @@ def deserialize(
|
|
402
422
|
_Prefixes.py_zoned_date_time,
|
403
423
|
_Prefixes.time,
|
404
424
|
_Prefixes.time_delta,
|
425
|
+
_Prefixes.time_period,
|
405
426
|
_Prefixes.uuid,
|
406
427
|
_Prefixes.version,
|
407
428
|
_Prefixes.year_month,
|
408
429
|
_Prefixes.zoned_date_time,
|
430
|
+
_Prefixes.zoned_date_time_period,
|
409
431
|
]
|
410
432
|
]
|
411
433
|
|
@@ -453,6 +475,9 @@ def _object_hook(
|
|
453
475
|
return Date.parse_common_iso(match.group(1))
|
454
476
|
if match := _DATE_DELTA_PATTERN.search(text):
|
455
477
|
return DateDelta.parse_common_iso(match.group(1))
|
478
|
+
if match := _DATE_PERIOD_PATTERN.search(text):
|
479
|
+
start, end = map(Date.parse_common_iso, match.group(1).split(","))
|
480
|
+
return DatePeriod(start, end)
|
456
481
|
if match := _DATE_TIME_DELTA_PATTERN.search(text):
|
457
482
|
return DateTimeDelta.parse_common_iso(match.group(1))
|
458
483
|
if match := _FLOAT_PATTERN.search(text):
|
@@ -475,6 +500,9 @@ def _object_hook(
|
|
475
500
|
return Time.parse_common_iso(match.group(1))
|
476
501
|
if match := _TIME_DELTA_PATTERN.search(text):
|
477
502
|
return TimeDelta.parse_common_iso(match.group(1))
|
503
|
+
if match := _TIME_PERIOD_PATTERN.search(text):
|
504
|
+
start, end = map(Time.parse_common_iso, match.group(1).split(","))
|
505
|
+
return TimePeriod(start, end)
|
478
506
|
if match := _UUID_PATTERN.search(text):
|
479
507
|
return UUID(match.group(1))
|
480
508
|
if match := _VERSION_PATTERN.search(text):
|
@@ -483,6 +511,11 @@ def _object_hook(
|
|
483
511
|
return YearMonth.parse_common_iso(match.group(1))
|
484
512
|
if match := _ZONED_DATE_TIME_PATTERN.search(text):
|
485
513
|
return ZonedDateTime.parse_common_iso(match.group(1))
|
514
|
+
if match := _ZONED_DATE_TIME_PERIOD_PATTERN.search(text):
|
515
|
+
start, end = map(
|
516
|
+
ZonedDateTime.parse_common_iso, match.group(1).split(",")
|
517
|
+
)
|
518
|
+
return ZonedDateTimePeriod(start, end)
|
486
519
|
if (
|
487
520
|
exc_class := _object_hook_exception_class(
|
488
521
|
text, data=data, objects=objects, redirects=redirects
|
utilities/polars.py
CHANGED
@@ -43,9 +43,9 @@ from polars import (
|
|
43
43
|
from polars._typing import PolarsDataType
|
44
44
|
from polars.datatypes import DataType, DataTypeClass
|
45
45
|
from polars.exceptions import (
|
46
|
-
ColumnNotFoundError,
|
46
|
+
ColumnNotFoundError, # pyright: ignore[reportAttributeAccessIssue]
|
47
47
|
NoRowsReturnedError,
|
48
|
-
OutOfBoundsError,
|
48
|
+
OutOfBoundsError, # pyright: ignore[reportAttributeAccessIssue]
|
49
49
|
PolarsInefficientMapWarning,
|
50
50
|
)
|
51
51
|
from polars.schema import Schema
|
utilities/pyinstrument.py
CHANGED
@@ -8,7 +8,7 @@ from pyinstrument.profiler import Profiler
|
|
8
8
|
|
9
9
|
from utilities.atomicwrites import writer
|
10
10
|
from utilities.pathlib import to_path
|
11
|
-
from utilities.whenever import
|
11
|
+
from utilities.whenever import get_now, to_local_plain
|
12
12
|
|
13
13
|
if TYPE_CHECKING:
|
14
14
|
from collections.abc import Iterator
|
@@ -21,9 +21,7 @@ def profile(path: MaybeCallablePathLike = Path.cwd, /) -> Iterator[None]:
|
|
21
21
|
"""Profile the contents of a block."""
|
22
22
|
with Profiler() as profiler:
|
23
23
|
yield
|
24
|
-
filename = to_path(path).joinpath(
|
25
|
-
f"profile__{format_compact(to_local_plain(get_now()))}.html"
|
26
|
-
)
|
24
|
+
filename = to_path(path).joinpath(f"profile__{to_local_plain(get_now())}.html")
|
27
25
|
with writer(filename) as temp:
|
28
26
|
_ = temp.write_text(profiler.output_html())
|
29
27
|
|
utilities/pytest_regressions.py
CHANGED
@@ -91,7 +91,9 @@ class PolarsRegressionFixture:
|
|
91
91
|
def check(self, obj: Series | DataFrame, /, *, suffix: str | None = None) -> None:
|
92
92
|
"""Check the Series/DataFrame summary against the baseline."""
|
93
93
|
from polars import DataFrame, Series, col
|
94
|
-
from polars.exceptions import
|
94
|
+
from polars.exceptions import (
|
95
|
+
InvalidOperationError, # pyright: ignore[reportAttributeAccessIssue]
|
96
|
+
)
|
95
97
|
|
96
98
|
data: StrMapping = {
|
97
99
|
"describe": obj.describe(percentiles=[i / 10 for i in range(1, 10)]).rows(
|
utilities/traceback.py
CHANGED
@@ -33,6 +33,7 @@ from utilities.whenever import (
|
|
33
33
|
format_compact,
|
34
34
|
get_now,
|
35
35
|
get_now_local,
|
36
|
+
parse_plain_local,
|
36
37
|
to_local_plain,
|
37
38
|
to_zoned_date_time,
|
38
39
|
)
|
@@ -43,6 +44,7 @@ if TYPE_CHECKING:
|
|
43
44
|
from types import TracebackType
|
44
45
|
|
45
46
|
from utilities.types import (
|
47
|
+
Delta,
|
46
48
|
MaybeCallableBoolLike,
|
47
49
|
MaybeCallablePathLike,
|
48
50
|
MaybeCallableZonedDateTimeLike,
|
@@ -95,16 +97,10 @@ def _yield_header_lines(
|
|
95
97
|
) -> Iterator[str]:
|
96
98
|
"""Yield the header lines."""
|
97
99
|
now = get_now_local()
|
98
|
-
start_use = to_zoned_date_time(start)
|
99
100
|
yield f"Date/time | {format_compact(now)}"
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
start_str = format_compact(start_use.to_tz(LOCAL_TIME_ZONE_NAME))
|
104
|
-
yield f"Started | {start_str}"
|
105
|
-
delta = None if start_use is None else (now - start_use)
|
106
|
-
delta_str = "" if delta is None else delta.format_common_iso()
|
107
|
-
yield f"Duration | {delta_str}"
|
101
|
+
start_use = to_zoned_date_time(start).to_tz(LOCAL_TIME_ZONE_NAME)
|
102
|
+
yield f"Started | {format_compact(start_use)}"
|
103
|
+
yield f"Duration | {(now - start_use).format_common_iso()}"
|
108
104
|
yield f"User | {getuser()}"
|
109
105
|
yield f"Host | {gethostname()}"
|
110
106
|
yield f"Process ID | {getpid()}"
|
@@ -205,6 +201,7 @@ def make_except_hook(
|
|
205
201
|
start: MaybeCallableZonedDateTimeLike = get_now,
|
206
202
|
version: MaybeCallableVersionLike | None = None,
|
207
203
|
path: MaybeCallablePathLike | None = None,
|
204
|
+
path_max_age: Delta | None = None,
|
208
205
|
max_width: int = RICH_MAX_WIDTH,
|
209
206
|
indent_size: int = RICH_INDENT_SIZE,
|
210
207
|
max_length: int | None = RICH_MAX_LENGTH,
|
@@ -222,6 +219,7 @@ def make_except_hook(
|
|
222
219
|
start=start,
|
223
220
|
version=version,
|
224
221
|
path=path,
|
222
|
+
path_max_age=path_max_age,
|
225
223
|
max_width=max_width,
|
226
224
|
indent_size=indent_size,
|
227
225
|
max_length=max_length,
|
@@ -242,6 +240,7 @@ def _make_except_hook_inner(
|
|
242
240
|
start: MaybeCallableZonedDateTimeLike = get_now,
|
243
241
|
version: MaybeCallableVersionLike | None = None,
|
244
242
|
path: MaybeCallablePathLike | None = None,
|
243
|
+
path_max_age: Delta | None = None,
|
245
244
|
max_width: int = RICH_MAX_WIDTH,
|
246
245
|
indent_size: int = RICH_INDENT_SIZE,
|
247
246
|
max_length: int | None = RICH_MAX_LENGTH,
|
@@ -258,11 +257,8 @@ def _make_except_hook_inner(
|
|
258
257
|
slim = format_exception_stack(exc_val, header=True, start=start, version=version)
|
259
258
|
_ = sys.stderr.write(f"{slim}\n") # don't 'from sys import stderr'
|
260
259
|
if path is not None:
|
261
|
-
path = (
|
262
|
-
|
263
|
-
.joinpath(format_compact(to_local_plain(get_now())))
|
264
|
-
.with_suffix(".txt")
|
265
|
-
)
|
260
|
+
path = to_path(path)
|
261
|
+
path_log = path.joinpath(to_local_plain(get_now())).with_suffix(".txt")
|
266
262
|
full = format_exception_stack(
|
267
263
|
exc_val,
|
268
264
|
header=True,
|
@@ -276,8 +272,10 @@ def _make_except_hook_inner(
|
|
276
272
|
max_depth=max_depth,
|
277
273
|
expand_all=expand_all,
|
278
274
|
)
|
279
|
-
with writer(
|
275
|
+
with writer(path_log, overwrite=True) as temp:
|
280
276
|
_ = temp.write_text(full)
|
277
|
+
if path_max_age is not None:
|
278
|
+
_make_except_hook_purge(path, path_max_age)
|
281
279
|
if slack_url is not None: # pragma: no cover
|
282
280
|
from utilities.slack_sdk import SendToSlackError, send_to_slack
|
283
281
|
|
@@ -285,13 +283,23 @@ def _make_except_hook_inner(
|
|
285
283
|
send_to_slack(slack_url, f"```{slim}```")
|
286
284
|
except SendToSlackError as error:
|
287
285
|
_ = stderr.write(f"{error}\n")
|
288
|
-
|
289
286
|
if to_bool(pudb): # pragma: no cover
|
290
287
|
from pudb import post_mortem
|
291
288
|
|
292
289
|
post_mortem(tb=traceback, e_type=exc_type, e_value=exc_val)
|
293
290
|
|
294
291
|
|
292
|
+
def _make_except_hook_purge(path: PathLike, max_age: Delta, /) -> None:
|
293
|
+
threshold = get_now() - max_age
|
294
|
+
paths = {
|
295
|
+
p
|
296
|
+
for p in Path(path).iterdir()
|
297
|
+
if p.is_file() and (parse_plain_local(p.stem) <= threshold)
|
298
|
+
}
|
299
|
+
for p in paths:
|
300
|
+
p.unlink(missing_ok=True)
|
301
|
+
|
302
|
+
|
295
303
|
@dataclass(kw_only=True, slots=True)
|
296
304
|
class MakeExceptHookError(Exception):
|
297
305
|
@override
|
utilities/whenever.py
CHANGED
@@ -52,8 +52,6 @@ if TYPE_CHECKING:
|
|
52
52
|
TimeZoneLike,
|
53
53
|
)
|
54
54
|
|
55
|
-
# type vars
|
56
|
-
|
57
55
|
|
58
56
|
# bounds
|
59
57
|
|
@@ -1008,9 +1006,14 @@ class _ToHoursNanosecondsError(ToHoursError):
|
|
1008
1006
|
##
|
1009
1007
|
|
1010
1008
|
|
1011
|
-
def to_local_plain(date_time: ZonedDateTime, /) ->
|
1009
|
+
def to_local_plain(date_time: ZonedDateTime, /) -> str:
|
1012
1010
|
"""Convert a datetime to its local/plain variant."""
|
1013
|
-
return date_time.to_tz(LOCAL_TIME_ZONE_NAME).to_plain()
|
1011
|
+
return format_compact(date_time.to_tz(LOCAL_TIME_ZONE_NAME).to_plain())
|
1012
|
+
|
1013
|
+
|
1014
|
+
def parse_plain_local(text: str, /) -> ZonedDateTime:
|
1015
|
+
"""Parse a plain, local datetime."""
|
1016
|
+
return PlainDateTime.parse_common_iso(text).assume_tz(LOCAL_TIME_ZONE_NAME)
|
1014
1017
|
|
1015
1018
|
|
1016
1019
|
##
|
@@ -1967,6 +1970,7 @@ __all__ = [
|
|
1967
1970
|
"get_today_local",
|
1968
1971
|
"mean_datetime",
|
1969
1972
|
"min_max_date",
|
1973
|
+
"parse_plain_local",
|
1970
1974
|
"round_date_or_date_time",
|
1971
1975
|
"sub_year_month",
|
1972
1976
|
"to_date",
|
File without changes
|
File without changes
|
File without changes
|