dycw-utilities 0.129.10__py3-none-any.whl → 0.175.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. dycw_utilities-0.175.17.dist-info/METADATA +34 -0
  2. dycw_utilities-0.175.17.dist-info/RECORD +103 -0
  3. dycw_utilities-0.175.17.dist-info/WHEEL +4 -0
  4. dycw_utilities-0.175.17.dist-info/entry_points.txt +4 -0
  5. utilities/__init__.py +1 -1
  6. utilities/altair.py +14 -14
  7. utilities/asyncio.py +350 -819
  8. utilities/atomicwrites.py +18 -6
  9. utilities/atools.py +77 -22
  10. utilities/cachetools.py +24 -29
  11. utilities/click.py +393 -237
  12. utilities/concurrent.py +8 -11
  13. utilities/contextlib.py +216 -17
  14. utilities/contextvars.py +20 -1
  15. utilities/cryptography.py +3 -3
  16. utilities/dataclasses.py +83 -118
  17. utilities/docker.py +293 -0
  18. utilities/enum.py +26 -23
  19. utilities/errors.py +17 -3
  20. utilities/fastapi.py +29 -65
  21. utilities/fpdf2.py +3 -3
  22. utilities/functions.py +169 -416
  23. utilities/functools.py +18 -19
  24. utilities/git.py +9 -30
  25. utilities/grp.py +28 -0
  26. utilities/gzip.py +31 -0
  27. utilities/http.py +3 -2
  28. utilities/hypothesis.py +738 -589
  29. utilities/importlib.py +17 -1
  30. utilities/inflect.py +25 -0
  31. utilities/iterables.py +194 -262
  32. utilities/jinja2.py +148 -0
  33. utilities/json.py +70 -0
  34. utilities/libcst.py +38 -17
  35. utilities/lightweight_charts.py +5 -9
  36. utilities/logging.py +345 -543
  37. utilities/math.py +18 -13
  38. utilities/memory_profiler.py +11 -15
  39. utilities/more_itertools.py +200 -131
  40. utilities/operator.py +33 -29
  41. utilities/optuna.py +6 -6
  42. utilities/orjson.py +272 -137
  43. utilities/os.py +61 -4
  44. utilities/parse.py +59 -61
  45. utilities/pathlib.py +281 -40
  46. utilities/permissions.py +298 -0
  47. utilities/pickle.py +2 -2
  48. utilities/platform.py +24 -5
  49. utilities/polars.py +1214 -430
  50. utilities/polars_ols.py +1 -1
  51. utilities/postgres.py +408 -0
  52. utilities/pottery.py +113 -26
  53. utilities/pqdm.py +10 -11
  54. utilities/psutil.py +6 -57
  55. utilities/pwd.py +28 -0
  56. utilities/pydantic.py +4 -54
  57. utilities/pydantic_settings.py +240 -0
  58. utilities/pydantic_settings_sops.py +76 -0
  59. utilities/pyinstrument.py +8 -10
  60. utilities/pytest.py +227 -121
  61. utilities/pytest_plugins/__init__.py +1 -0
  62. utilities/pytest_plugins/pytest_randomly.py +23 -0
  63. utilities/pytest_plugins/pytest_regressions.py +56 -0
  64. utilities/pytest_regressions.py +26 -46
  65. utilities/random.py +13 -9
  66. utilities/re.py +58 -28
  67. utilities/redis.py +401 -550
  68. utilities/scipy.py +1 -1
  69. utilities/sentinel.py +10 -0
  70. utilities/shelve.py +4 -1
  71. utilities/shutil.py +25 -0
  72. utilities/slack_sdk.py +36 -106
  73. utilities/sqlalchemy.py +502 -473
  74. utilities/sqlalchemy_polars.py +38 -94
  75. utilities/string.py +2 -3
  76. utilities/subprocess.py +1572 -0
  77. utilities/tempfile.py +86 -4
  78. utilities/testbook.py +50 -0
  79. utilities/text.py +165 -42
  80. utilities/timer.py +37 -65
  81. utilities/traceback.py +158 -929
  82. utilities/types.py +146 -116
  83. utilities/typing.py +531 -71
  84. utilities/tzdata.py +1 -53
  85. utilities/tzlocal.py +6 -23
  86. utilities/uuid.py +43 -5
  87. utilities/version.py +27 -26
  88. utilities/whenever.py +1776 -386
  89. utilities/zoneinfo.py +84 -22
  90. dycw_utilities-0.129.10.dist-info/METADATA +0 -241
  91. dycw_utilities-0.129.10.dist-info/RECORD +0 -96
  92. dycw_utilities-0.129.10.dist-info/WHEEL +0 -4
  93. dycw_utilities-0.129.10.dist-info/licenses/LICENSE +0 -21
  94. utilities/datetime.py +0 -1409
  95. utilities/eventkit.py +0 -402
  96. utilities/loguru.py +0 -144
  97. utilities/luigi.py +0 -228
  98. utilities/period.py +0 -324
  99. utilities/pyrsistent.py +0 -89
  100. utilities/python_dotenv.py +0 -105
  101. utilities/streamlit.py +0 -105
  102. utilities/sys.py +0 -87
  103. utilities/tenacity.py +0 -145
utilities/orjson.py CHANGED
@@ -5,7 +5,7 @@ import re
5
5
  from collections.abc import Callable, Iterable, Mapping, Sequence
6
6
  from contextlib import suppress
7
7
  from dataclasses import dataclass, field, replace
8
- from enum import Enum, unique
8
+ from enum import Enum, StrEnum, unique
9
9
  from functools import cached_property, partial
10
10
  from itertools import chain
11
11
  from logging import Formatter, LogRecord
@@ -14,18 +14,32 @@ from pathlib import Path
14
14
  from re import Pattern, search
15
15
  from typing import TYPE_CHECKING, Any, Literal, Self, assert_never, overload, override
16
16
  from uuid import UUID
17
+ from zoneinfo import ZoneInfo
17
18
 
18
19
  from orjson import (
19
20
  OPT_PASSTHROUGH_DATACLASS,
20
21
  OPT_PASSTHROUGH_DATETIME,
21
22
  OPT_SORT_KEYS,
23
+ JSONDecodeError,
22
24
  dumps,
23
25
  loads,
24
26
  )
27
+ from whenever import (
28
+ Date,
29
+ DateDelta,
30
+ DateTimeDelta,
31
+ MonthDay,
32
+ PlainDateTime,
33
+ Time,
34
+ TimeDelta,
35
+ YearMonth,
36
+ ZonedDateTime,
37
+ )
25
38
 
26
39
  from utilities.concurrent import concurrent_map
27
40
  from utilities.dataclasses import dataclass_to_dict
28
- from utilities.functions import ensure_class, is_string_mapping
41
+ from utilities.functions import ensure_class
42
+ from utilities.gzip import read_binary
29
43
  from utilities.iterables import (
30
44
  OneEmptyError,
31
45
  always_iterable,
@@ -33,31 +47,19 @@ from utilities.iterables import (
33
47
  one,
34
48
  one_unique,
35
49
  )
50
+ from utilities.json import write_formatted_json
36
51
  from utilities.logging import get_logging_level_number
37
52
  from utilities.math import MAX_INT64, MIN_INT64
38
- from utilities.types import (
39
- Dataclass,
40
- DateOrDateTime,
41
- LogLevel,
42
- MaybeIterable,
43
- PathLike,
44
- StrMapping,
45
- )
46
- from utilities.tzlocal import get_local_time_zone
47
- from utilities.uuid import UUID_PATTERN
53
+ from utilities.types import Dataclass, LogLevel, MaybeIterable, PathLike, StrMapping
54
+ from utilities.typing import is_string_mapping
55
+ from utilities.tzlocal import LOCAL_TIME_ZONE
48
56
  from utilities.version import Version, parse_version
49
57
  from utilities.whenever import (
50
- parse_date,
51
- parse_plain_datetime,
52
- parse_time,
53
- parse_timedelta,
54
- parse_zoned_datetime,
55
- serialize_date,
56
- serialize_datetime,
57
- serialize_time,
58
- serialize_timedelta,
58
+ DatePeriod,
59
+ TimePeriod,
60
+ ZonedDateTimePeriod,
61
+ from_timestamp,
59
62
  )
60
- from utilities.zoneinfo import ensure_time_zone
61
63
 
62
64
  if TYPE_CHECKING:
63
65
  from collections.abc import Set as AbstractSet
@@ -70,28 +72,37 @@ if TYPE_CHECKING:
70
72
 
71
73
 
72
74
  @unique
73
- class _Prefixes(Enum):
75
+ class _Prefixes(StrEnum):
74
76
  dataclass = "dc"
75
77
  date = "d"
76
- datetime = "dt"
78
+ date_delta = "dd"
79
+ date_period = "dp"
80
+ date_time_delta = "D"
77
81
  enum = "e"
78
- exception_class = "exc"
79
- exception_instance = "exi"
82
+ exception_class = "Ex"
83
+ exception_instance = "ex"
80
84
  float_ = "fl"
81
85
  frozenset_ = "fr"
82
86
  list_ = "l"
83
- nan = "nan"
84
- none = "none"
87
+ month_day = "md"
88
+ none = "0"
85
89
  path = "p"
86
- pos_inf = "pos_inf"
87
- neg_inf = "neg_inf"
90
+ plain_date_time = "pd"
91
+ py_date = "!d"
92
+ py_plain_date_time = "!pd"
93
+ py_time = "!ti"
94
+ py_zoned_date_time = "!zd"
88
95
  set_ = "s"
89
- timedelta = "td"
90
- time = "tm"
96
+ time = "ti"
97
+ time_delta = "td"
98
+ time_period = "tp"
91
99
  tuple_ = "tu"
92
100
  unserializable = "un"
93
101
  uuid = "uu"
94
102
  version = "v"
103
+ year_month = "ym"
104
+ zoned_date_time = "zd"
105
+ zoned_date_time_period = "zp"
95
106
 
96
107
 
97
108
  type _DataclassHook = Callable[[type[Dataclass], StrMapping], StrMapping]
@@ -162,14 +173,14 @@ def _pre_process(
162
173
  # singletons
163
174
  case None:
164
175
  return f"[{_Prefixes.none.value}]"
165
- case dt.datetime() as datetime:
166
- return f"[{_Prefixes.datetime.value}]{serialize_datetime(datetime)}"
167
- case dt.date() as date: # after datetime
168
- return f"[{_Prefixes.date.value}]{serialize_date(date)}"
169
- case dt.time() as time:
170
- return f"[{_Prefixes.time.value}]{serialize_time(time)}"
171
- case dt.timedelta() as timedelta:
172
- return f"[{_Prefixes.timedelta.value}]{serialize_timedelta(timedelta)}"
176
+ case Date() as date:
177
+ return f"[{_Prefixes.date.value}]{date}"
178
+ case DateDelta() as date:
179
+ return f"[{_Prefixes.date_delta.value}]{date}"
180
+ case DatePeriod() as period:
181
+ return f"[{_Prefixes.date_period.value}]{period.start},{period.end}"
182
+ case DateTimeDelta() as date_time_delta:
183
+ return f"[{_Prefixes.date_time_delta.value}]{date_time_delta}"
173
184
  case Exception() as error_:
174
185
  return {
175
186
  f"[{_Prefixes.exception_instance.value}|{type(error_).__qualname__}]": pre(
@@ -184,16 +195,48 @@ def _pre_process(
184
195
  if MIN_INT64 <= int_ <= MAX_INT64:
185
196
  return int_
186
197
  raise _SerializeIntegerError(obj=int_)
187
- case UUID() as uuid:
188
- return f"[{_Prefixes.uuid.value}]{uuid}"
198
+ case MonthDay() as month_day:
199
+ return f"[{_Prefixes.month_day.value}]{month_day!s}"
189
200
  case Path() as path:
190
201
  return f"[{_Prefixes.path.value}]{path!s}"
191
- case str() as str_:
192
- return str_
202
+ case PlainDateTime() as date_time:
203
+ return f"[{_Prefixes.plain_date_time.value}]{date_time}"
204
+ case str() as text:
205
+ return text
206
+ case Time() as time:
207
+ return f"[{_Prefixes.time.value}]{time}"
208
+ case TimeDelta() as time_delta:
209
+ return f"[{_Prefixes.time_delta.value}]{time_delta}"
210
+ case TimePeriod() as period:
211
+ return f"[{_Prefixes.time_period.value}]{period.start},{period.end}"
193
212
  case type() as error_cls if issubclass(error_cls, Exception):
194
213
  return f"[{_Prefixes.exception_class.value}|{error_cls.__qualname__}]"
214
+ case UUID() as uuid:
215
+ return f"[{_Prefixes.uuid.value}]{uuid}"
195
216
  case Version() as version:
196
- return f"[{_Prefixes.version.value}]{version!s}"
217
+ return f"[{_Prefixes.version.value}]{version}"
218
+ case YearMonth() as year_month:
219
+ return f"[{_Prefixes.year_month.value}]{year_month}"
220
+ case ZonedDateTime() as date_time:
221
+ return f"[{_Prefixes.zoned_date_time.value}]{date_time}"
222
+ case ZonedDateTimePeriod() as period:
223
+ return f"[{_Prefixes.zoned_date_time_period.value}]{period.start.to_plain()},{period.end}"
224
+ case dt.datetime() as py_datetime:
225
+ match py_datetime.tzinfo:
226
+ case None:
227
+ datetime = PlainDateTime.from_py_datetime(py_datetime)
228
+ return f"[{_Prefixes.py_plain_date_time.value}]{datetime}"
229
+ case ZoneInfo():
230
+ datetime = ZonedDateTime.from_py_datetime(py_datetime)
231
+ return f"[{_Prefixes.py_zoned_date_time.value}]{datetime}"
232
+ case _: # pragma: no cover
233
+ raise NotImplementedError
234
+ case dt.date() as py_date:
235
+ date = Date.from_py_date(py_date)
236
+ return f"[{_Prefixes.py_date.value}]{date}"
237
+ case dt.time() as py_time:
238
+ time = Time.from_py_time(py_time)
239
+ return f"[{_Prefixes.py_time.value}]{time}"
197
240
  # contains
198
241
  case Dataclass() as dataclass:
199
242
  asdict = dataclass_to_dict(
@@ -261,6 +304,7 @@ def _pre_process(
261
304
  qualname=type(obj).__qualname__, repr=repr(obj), str=str(obj)
262
305
  )
263
306
  return pre(unserializable)
307
+ return None
264
308
 
265
309
 
266
310
  def _pre_process_container(
@@ -329,8 +373,12 @@ def deserialize(
329
373
  redirects: Mapping[str, type[Any]] | None = None,
330
374
  ) -> Any:
331
375
  """Deserialize an object."""
376
+ try:
377
+ obj = loads(data)
378
+ except JSONDecodeError:
379
+ raise _DeserializeInvalidJSONError(data=data) from None
332
380
  return _object_hook(
333
- loads(data),
381
+ obj,
334
382
  data=data,
335
383
  dataclass_hook=dataclass_hook,
336
384
  objects=objects,
@@ -338,46 +386,59 @@ def deserialize(
338
386
  )
339
387
 
340
388
 
341
- _NONE_PATTERN = re.compile(r"^\[" + _Prefixes.none.value + r"\]$")
342
- _LOCAL_DATETIME_PATTERN = re.compile(
343
- r"^\["
344
- + _Prefixes.datetime.value
345
- + r"\](\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{1,6})?)$"
346
- )
347
- _UUID_PATTERN = re.compile(r"^\[" + _Prefixes.uuid.value + r"\](" + UUID_PATTERN + ")$")
348
- _ZONED_DATETIME_PATTERN = re.compile(
349
- r"^\["
350
- + _Prefixes.datetime.value
351
- + r"\](\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{1,6})?[\+\-]\d{2}:\d{2}(?::\d{2})?\[(?!(?:dt\.)).+?\])$"
352
- )
353
-
354
-
355
- def _make_unit_pattern(prefix: _Prefixes, /) -> Pattern[str]:
356
- return re.compile(r"^\[" + prefix.value + r"\](.+)$")
389
+ @dataclass(kw_only=True, slots=True)
390
+ class DeerializeError(Exception):
391
+ obj: Any
357
392
 
358
393
 
359
394
  (
360
395
  _DATE_PATTERN,
396
+ _DATE_DELTA_PATTERN,
397
+ _DATE_PERIOD_PATTERN,
398
+ _DATE_TIME_DELTA_PATTERN,
361
399
  _FLOAT_PATTERN,
400
+ _MONTH_DAY_PATTERN,
401
+ _NONE_PATTERN,
362
402
  _PATH_PATTERN,
403
+ _PLAIN_DATE_TIME_PATTERN,
404
+ _PY_DATE_PATTERN,
405
+ _PY_PLAIN_DATE_TIME_PATTERN,
406
+ _PY_TIME_PATTERN,
407
+ _PY_ZONED_DATE_TIME_PATTERN,
363
408
  _TIME_PATTERN,
364
- _TIMEDELTA_PATTERN,
409
+ _TIME_DELTA_PATTERN,
410
+ _TIME_PERIOD_PATTERN,
411
+ _UUID_PATTERN,
365
412
  _VERSION_PATTERN,
366
- ) = map(
367
- _make_unit_pattern,
368
- [
413
+ _YEAR_MONTH_PATTERN,
414
+ _ZONED_DATE_TIME_PATTERN,
415
+ _ZONED_DATE_TIME_PERIOD_PATTERN,
416
+ ) = [
417
+ re.compile(r"^\[" + p.value + r"\](" + ".*" + ")$")
418
+ for p in [
369
419
  _Prefixes.date,
420
+ _Prefixes.date_delta,
421
+ _Prefixes.date_period,
422
+ _Prefixes.date_time_delta,
370
423
  _Prefixes.float_,
424
+ _Prefixes.month_day,
425
+ _Prefixes.none,
371
426
  _Prefixes.path,
427
+ _Prefixes.plain_date_time,
428
+ _Prefixes.py_date,
429
+ _Prefixes.py_plain_date_time,
430
+ _Prefixes.py_time,
431
+ _Prefixes.py_zoned_date_time,
372
432
  _Prefixes.time,
373
- _Prefixes.timedelta,
433
+ _Prefixes.time_delta,
434
+ _Prefixes.time_period,
435
+ _Prefixes.uuid,
374
436
  _Prefixes.version,
375
- ],
376
- )
377
-
378
-
379
- def _make_container_pattern(prefix: _Prefixes, /) -> Pattern[str]:
380
- return re.compile(r"^\[" + prefix.value + r"(?:\|(.+))?\]$")
437
+ _Prefixes.year_month,
438
+ _Prefixes.zoned_date_time,
439
+ _Prefixes.zoned_date_time_period,
440
+ ]
441
+ ]
381
442
 
382
443
 
383
444
  (
@@ -389,9 +450,9 @@ def _make_container_pattern(prefix: _Prefixes, /) -> Pattern[str]:
389
450
  _LIST_PATTERN,
390
451
  _SET_PATTERN,
391
452
  _TUPLE_PATTERN,
392
- ) = map(
393
- _make_container_pattern,
394
- [
453
+ ) = [
454
+ re.compile(r"^\[" + p.value + r"(?:\|(.+))?\]$")
455
+ for p in [
395
456
  _Prefixes.dataclass,
396
457
  _Prefixes.enum,
397
458
  _Prefixes.exception_class,
@@ -400,8 +461,8 @@ def _make_container_pattern(prefix: _Prefixes, /) -> Pattern[str]:
400
461
  _Prefixes.list_,
401
462
  _Prefixes.set_,
402
463
  _Prefixes.tuple_,
403
- ],
404
- )
464
+ ]
465
+ ]
405
466
 
406
467
 
407
468
  def _object_hook(
@@ -420,23 +481,50 @@ def _object_hook(
420
481
  if match := _NONE_PATTERN.search(text):
421
482
  return None
422
483
  if match := _DATE_PATTERN.search(text):
423
- return parse_date(match.group(1))
484
+ return Date.parse_iso(match.group(1))
485
+ if match := _DATE_DELTA_PATTERN.search(text):
486
+ return DateDelta.parse_iso(match.group(1))
487
+ if match := _DATE_PERIOD_PATTERN.search(text):
488
+ start, end = map(Date.parse_iso, match.group(1).split(","))
489
+ return DatePeriod(start, end)
490
+ if match := _DATE_TIME_DELTA_PATTERN.search(text):
491
+ return DateTimeDelta.parse_iso(match.group(1))
424
492
  if match := _FLOAT_PATTERN.search(text):
425
493
  return float(match.group(1))
426
- if match := _LOCAL_DATETIME_PATTERN.search(text):
427
- return parse_plain_datetime(match.group(1))
494
+ if match := _MONTH_DAY_PATTERN.search(text):
495
+ return MonthDay.parse_iso(match.group(1))
428
496
  if match := _PATH_PATTERN.search(text):
429
497
  return Path(match.group(1))
498
+ if match := _PLAIN_DATE_TIME_PATTERN.search(text):
499
+ return PlainDateTime.parse_iso(match.group(1))
500
+ if match := _PY_DATE_PATTERN.search(text):
501
+ return Date.parse_iso(match.group(1)).py_date()
502
+ if match := _PY_PLAIN_DATE_TIME_PATTERN.search(text):
503
+ return PlainDateTime.parse_iso(match.group(1)).py_datetime()
504
+ if match := _PY_TIME_PATTERN.search(text):
505
+ return Time.parse_iso(match.group(1)).py_time()
506
+ if match := _PY_ZONED_DATE_TIME_PATTERN.search(text):
507
+ return ZonedDateTime.parse_iso(match.group(1)).py_datetime()
430
508
  if match := _TIME_PATTERN.search(text):
431
- return parse_time(match.group(1))
432
- if match := _TIMEDELTA_PATTERN.search(text):
433
- return parse_timedelta(match.group(1))
509
+ return Time.parse_iso(match.group(1))
510
+ if match := _TIME_DELTA_PATTERN.search(text):
511
+ return TimeDelta.parse_iso(match.group(1))
512
+ if match := _TIME_PERIOD_PATTERN.search(text):
513
+ start, end = map(Time.parse_iso, match.group(1).split(","))
514
+ return TimePeriod(start, end)
434
515
  if match := _UUID_PATTERN.search(text):
435
516
  return UUID(match.group(1))
436
517
  if match := _VERSION_PATTERN.search(text):
437
518
  return parse_version(match.group(1))
438
- if match := _ZONED_DATETIME_PATTERN.search(text):
439
- return parse_zoned_datetime(match.group(1))
519
+ if match := _YEAR_MONTH_PATTERN.search(text):
520
+ return YearMonth.parse_iso(match.group(1))
521
+ if match := _ZONED_DATE_TIME_PATTERN.search(text):
522
+ return ZonedDateTime.parse_iso(match.group(1))
523
+ if match := _ZONED_DATE_TIME_PERIOD_PATTERN.search(text):
524
+ start, end = match.group(1).split(",")
525
+ end = ZonedDateTime.parse_iso(end)
526
+ start = PlainDateTime.parse_iso(start).assume_tz(end.tz)
527
+ return ZonedDateTimePeriod(start, end)
440
528
  if (
441
529
  exc_class := _object_hook_exception_class(
442
530
  text, data=data, objects=objects, redirects=redirects
@@ -518,7 +606,7 @@ def _object_hook(
518
606
  )
519
607
  for k, v in mapping.items()
520
608
  }
521
- case _ as never:
609
+ case never:
522
610
  assert_never(never)
523
611
 
524
612
 
@@ -662,11 +750,19 @@ def _object_hook_get_object(
662
750
  @dataclass(kw_only=True, slots=True)
663
751
  class DeserializeError(Exception):
664
752
  data: bytes
665
- qualname: str
753
+
754
+
755
+ @dataclass(kw_only=True, slots=True)
756
+ class _DeserializeInvalidJSONError(DeserializeError):
757
+ @override
758
+ def __str__(self) -> str:
759
+ return f"Invalid JSON: {self.data!r}"
666
760
 
667
761
 
668
762
  @dataclass(kw_only=True, slots=True)
669
763
  class _DeserializeNoObjectsError(DeserializeError):
764
+ qualname: str
765
+
670
766
  @override
671
767
  def __str__(self) -> str:
672
768
  return f"Objects required to deserialize {self.qualname!r} from {self.data!r}"
@@ -674,6 +770,8 @@ class _DeserializeNoObjectsError(DeserializeError):
674
770
 
675
771
  @dataclass(kw_only=True, slots=True)
676
772
  class _DeserializeObjectNotFoundError(DeserializeError):
773
+ qualname: str
774
+
677
775
  @override
678
776
  def __str__(self) -> str:
679
777
  return (
@@ -742,8 +840,6 @@ class OrjsonFormatter(Formatter):
742
840
 
743
841
  @override
744
842
  def format(self, record: LogRecord) -> str:
745
- from utilities.tzlocal import get_local_time_zone
746
-
747
843
  extra = {
748
844
  k: v
749
845
  for k, v in record.__dict__.items()
@@ -755,9 +851,7 @@ class OrjsonFormatter(Formatter):
755
851
  path_name=Path(record.pathname),
756
852
  line_num=record.lineno,
757
853
  message=record.getMessage(),
758
- datetime=dt.datetime.fromtimestamp(
759
- record.created, tz=get_local_time_zone()
760
- ),
854
+ datetime=from_timestamp(record.created, time_zone=LOCAL_TIME_ZONE),
761
855
  func_name=record.funcName,
762
856
  extra=extra if len(extra) >= 1 else None,
763
857
  )
@@ -848,9 +942,7 @@ class GetLogRecordsOutput:
848
942
 
849
943
  @cached_property
850
944
  def dataframe(self) -> Any:
851
- from polars import DataFrame, Object, String, UInt64
852
-
853
- from utilities.polars import zoned_datetime
945
+ from polars import DataFrame, Datetime, Object, String, UInt64
854
946
 
855
947
  records = [
856
948
  replace(
@@ -861,11 +953,16 @@ class GetLogRecordsOutput:
861
953
  for r in self.records
862
954
  ]
863
955
  if len(records) >= 1:
864
- time_zone = one_unique(ensure_time_zone(r.datetime) for r in records)
956
+ time_zone = one_unique(ZoneInfo(r.datetime.tz) for r in records)
865
957
  else:
866
- time_zone = get_local_time_zone()
958
+ time_zone = LOCAL_TIME_ZONE
867
959
  return DataFrame(
868
- data=[dataclass_to_dict(r, recursive=False) for r in records],
960
+ data=[
961
+ dataclass_to_dict(
962
+ replace(r, datetime=r.datetime.py_datetime()), recursive=False
963
+ )
964
+ for r in records
965
+ ],
869
966
  schema={
870
967
  "index": UInt64,
871
968
  "name": String,
@@ -873,7 +970,7 @@ class GetLogRecordsOutput:
873
970
  "level": UInt64,
874
971
  "path_name": String,
875
972
  "line_num": UInt64,
876
- "datetime": zoned_datetime(time_zone=time_zone),
973
+ "datetime": Datetime(time_zone=time_zone),
877
974
  "func_name": String,
878
975
  "stack_info": String,
879
976
  "extra": Object,
@@ -893,9 +990,12 @@ class GetLogRecordsOutput:
893
990
  level: LogLevel | None = None,
894
991
  min_level: LogLevel | None = None,
895
992
  max_level: LogLevel | None = None,
896
- date_or_datetime: DateOrDateTime | None = None,
897
- min_date_or_datetime: DateOrDateTime | None = None,
898
- max_date_or_datetime: DateOrDateTime | None = None,
993
+ date: Date | None = None,
994
+ min_date: Date | None = None,
995
+ max_date: Date | None = None,
996
+ datetime: ZonedDateTime | None = None,
997
+ min_datetime: ZonedDateTime | None = None,
998
+ max_datetime: ZonedDateTime | None = None,
899
999
  func_name: bool | str | None = None,
900
1000
  extra: bool | MaybeIterable[str] | None = None,
901
1001
  log_file: bool | PathLike | None = None,
@@ -934,30 +1034,18 @@ class GetLogRecordsOutput:
934
1034
  records = [
935
1035
  r for r in records if r.level <= get_logging_level_number(max_level)
936
1036
  ]
937
- if date_or_datetime is not None:
938
- match date_or_datetime:
939
- case dt.datetime() as datetime:
940
- records = [r for r in records if r.datetime == datetime]
941
- case dt.date() as date:
942
- records = [r for r in records if r.date == date]
943
- case _ as never:
944
- assert_never(never)
945
- if min_date_or_datetime is not None:
946
- match min_date_or_datetime:
947
- case dt.datetime() as min_datetime:
948
- records = [r for r in records if r.datetime >= min_datetime]
949
- case dt.date() as min_date:
950
- records = [r for r in records if r.date >= min_date]
951
- case _ as never:
952
- assert_never(never)
953
- if max_date_or_datetime is not None:
954
- match max_date_or_datetime:
955
- case dt.datetime() as max_datetime:
956
- records = [r for r in records if r.datetime <= max_datetime]
957
- case dt.date() as max_date:
958
- records = [r for r in records if r.date <= max_date]
959
- case _ as never:
960
- assert_never(never)
1037
+ if date is not None:
1038
+ records = [r for r in records if r.date == date]
1039
+ if min_date is not None:
1040
+ records = [r for r in records if r.date >= min_date]
1041
+ if max_date is not None:
1042
+ records = [r for r in records if r.date <= max_date]
1043
+ if datetime is not None:
1044
+ records = [r for r in records if r.datetime == datetime]
1045
+ if min_datetime is not None:
1046
+ records = [r for r in records if r.datetime >= min_datetime]
1047
+ if max_datetime is not None:
1048
+ records = [r for r in records if r.datetime <= max_datetime]
961
1049
  if func_name is not None:
962
1050
  match func_name:
963
1051
  case bool() as has_func_name:
@@ -970,7 +1058,7 @@ class GetLogRecordsOutput:
970
1058
  for r in records
971
1059
  if (r.func_name is not None) and search(func_name, r.func_name)
972
1060
  ]
973
- case _ as never:
1061
+ case never:
974
1062
  assert_never(never)
975
1063
  if extra is not None:
976
1064
  match extra:
@@ -983,7 +1071,7 @@ class GetLogRecordsOutput:
983
1071
  if (r.extra is not None)
984
1072
  and set(r.extra).issuperset(always_iterable(keys))
985
1073
  ]
986
- case _ as never:
1074
+ case never:
987
1075
  assert_never(never)
988
1076
  if log_file is not None:
989
1077
  match log_file:
@@ -998,7 +1086,7 @@ class GetLogRecordsOutput:
998
1086
  if (r.log_file is not None)
999
1087
  and search(str(log_file), str(r.log_file))
1000
1088
  ]
1001
- case _ as never:
1089
+ case never:
1002
1090
  assert_never(never)
1003
1091
  if log_file_line_num is not None:
1004
1092
  match log_file_line_num:
@@ -1012,7 +1100,7 @@ class GetLogRecordsOutput:
1012
1100
  records = [
1013
1101
  r for r in records if r.log_file_line_num == log_file_line_num
1014
1102
  ]
1015
- case _ as never:
1103
+ case never:
1016
1104
  assert_never(never)
1017
1105
  if min_log_file_line_num is not None:
1018
1106
  records = [
@@ -1026,7 +1114,7 @@ class GetLogRecordsOutput:
1026
1114
  r
1027
1115
  for r in records
1028
1116
  if (r.log_file_line_num is not None)
1029
- and (r.log_file_line_num >= max_log_file_line_num)
1117
+ and (r.log_file_line_num <= max_log_file_line_num)
1030
1118
  ]
1031
1119
  return replace(self, records=records)
1032
1120
 
@@ -1060,7 +1148,7 @@ class OrjsonLogRecord:
1060
1148
  level: int
1061
1149
  path_name: Path
1062
1150
  line_num: int
1063
- datetime: dt.datetime
1151
+ datetime: ZonedDateTime
1064
1152
  func_name: str | None = None
1065
1153
  stack_info: str | None = None
1066
1154
  extra: StrMapping | None = None
@@ -1068,7 +1156,7 @@ class OrjsonLogRecord:
1068
1156
  log_file_line_num: int | None = None
1069
1157
 
1070
1158
  @cached_property
1071
- def date(self) -> dt.date:
1159
+ def date(self) -> Date:
1072
1160
  return self.datetime.date()
1073
1161
 
1074
1162
 
@@ -1089,9 +1177,8 @@ def _get_log_records_one(
1089
1177
  ) -> _GetLogRecordsOneOutput:
1090
1178
  path = Path(path)
1091
1179
  try:
1092
- with path.open() as fh:
1093
- lines = fh.readlines()
1094
- except UnicodeDecodeError as error: # skipif-ci-and-windows
1180
+ lines = path.read_text().splitlines()
1181
+ except UnicodeDecodeError as error:
1095
1182
  return _GetLogRecordsOneOutput(path=path, file_ok=False, other_errors=[error])
1096
1183
  num_lines_blank, num_lines_error = 0, 0
1097
1184
  missing: set[str] = set()
@@ -1163,6 +1250,52 @@ class _GetLogRecordsOneOutput:
1163
1250
  other_errors: list[Exception] = field(default_factory=list, repr=False)
1164
1251
 
1165
1252
 
1253
+ # read/write
1254
+
1255
+
1256
+ def read_object(
1257
+ path: PathLike,
1258
+ /,
1259
+ *,
1260
+ decompress: bool = False,
1261
+ dataclass_hook: _DataclassHook | None = None,
1262
+ objects: AbstractSet[type[Any]] | None = None,
1263
+ redirects: Mapping[str, type[Any]] | None = None,
1264
+ ) -> Any:
1265
+ """Read an object from disk."""
1266
+ data = read_binary(path, decompress=decompress)
1267
+ return deserialize(
1268
+ data, dataclass_hook=dataclass_hook, objects=objects, redirects=redirects
1269
+ )
1270
+
1271
+
1272
+ def write_object(
1273
+ obj: Any,
1274
+ path: PathLike,
1275
+ /,
1276
+ *,
1277
+ before: Callable[[Any], Any] | None = None,
1278
+ globalns: StrMapping | None = None,
1279
+ localns: StrMapping | None = None,
1280
+ warn_name_errors: bool = False,
1281
+ dataclass_hook: _DataclassHook | None = None,
1282
+ dataclass_defaults: bool = False,
1283
+ compress: bool = False,
1284
+ overwrite: bool = False,
1285
+ ) -> None:
1286
+ """Write an object to disk."""
1287
+ data = serialize(
1288
+ obj,
1289
+ before=before,
1290
+ globalns=globalns,
1291
+ localns=localns,
1292
+ warn_name_errors=warn_name_errors,
1293
+ dataclass_hook=dataclass_hook,
1294
+ dataclass_defaults=dataclass_defaults,
1295
+ )
1296
+ write_formatted_json(data, path, compress=compress, overwrite=overwrite)
1297
+
1298
+
1166
1299
  __all__ = [
1167
1300
  "DeserializeError",
1168
1301
  "GetLogRecordsOutput",
@@ -1171,5 +1304,7 @@ __all__ = [
1171
1304
  "SerializeError",
1172
1305
  "deserialize",
1173
1306
  "get_log_records",
1307
+ "read_object",
1174
1308
  "serialize",
1309
+ "write_object",
1175
1310
  ]