dycw-utilities 0.135.0__py3-none-any.whl → 0.178.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dycw-utilities might be problematic. Click here for more details.

Files changed (97) hide show
  1. dycw_utilities-0.178.1.dist-info/METADATA +34 -0
  2. dycw_utilities-0.178.1.dist-info/RECORD +105 -0
  3. dycw_utilities-0.178.1.dist-info/WHEEL +4 -0
  4. dycw_utilities-0.178.1.dist-info/entry_points.txt +4 -0
  5. utilities/__init__.py +1 -1
  6. utilities/altair.py +13 -10
  7. utilities/asyncio.py +312 -787
  8. utilities/atomicwrites.py +18 -6
  9. utilities/atools.py +64 -4
  10. utilities/cachetools.py +9 -6
  11. utilities/click.py +195 -77
  12. utilities/concurrent.py +1 -1
  13. utilities/contextlib.py +216 -17
  14. utilities/contextvars.py +20 -1
  15. utilities/cryptography.py +3 -3
  16. utilities/dataclasses.py +15 -28
  17. utilities/docker.py +387 -0
  18. utilities/enum.py +2 -2
  19. utilities/errors.py +17 -3
  20. utilities/fastapi.py +28 -59
  21. utilities/fpdf2.py +2 -2
  22. utilities/functions.py +24 -269
  23. utilities/git.py +9 -30
  24. utilities/grp.py +28 -0
  25. utilities/gzip.py +31 -0
  26. utilities/http.py +3 -2
  27. utilities/hypothesis.py +513 -159
  28. utilities/importlib.py +17 -1
  29. utilities/inflect.py +12 -4
  30. utilities/iterables.py +33 -58
  31. utilities/jinja2.py +148 -0
  32. utilities/json.py +70 -0
  33. utilities/libcst.py +38 -17
  34. utilities/lightweight_charts.py +4 -7
  35. utilities/logging.py +136 -93
  36. utilities/math.py +8 -4
  37. utilities/more_itertools.py +43 -45
  38. utilities/operator.py +27 -27
  39. utilities/orjson.py +189 -36
  40. utilities/os.py +61 -4
  41. utilities/packaging.py +115 -0
  42. utilities/parse.py +8 -5
  43. utilities/pathlib.py +269 -40
  44. utilities/permissions.py +298 -0
  45. utilities/platform.py +7 -6
  46. utilities/polars.py +1205 -413
  47. utilities/polars_ols.py +1 -1
  48. utilities/postgres.py +408 -0
  49. utilities/pottery.py +43 -19
  50. utilities/pqdm.py +3 -3
  51. utilities/psutil.py +5 -57
  52. utilities/pwd.py +28 -0
  53. utilities/pydantic.py +4 -52
  54. utilities/pydantic_settings.py +240 -0
  55. utilities/pydantic_settings_sops.py +76 -0
  56. utilities/pyinstrument.py +7 -7
  57. utilities/pytest.py +104 -143
  58. utilities/pytest_plugins/__init__.py +1 -0
  59. utilities/pytest_plugins/pytest_randomly.py +23 -0
  60. utilities/pytest_plugins/pytest_regressions.py +56 -0
  61. utilities/pytest_regressions.py +26 -46
  62. utilities/random.py +11 -6
  63. utilities/re.py +1 -1
  64. utilities/redis.py +220 -343
  65. utilities/sentinel.py +10 -0
  66. utilities/shelve.py +4 -1
  67. utilities/shutil.py +25 -0
  68. utilities/slack_sdk.py +35 -104
  69. utilities/sqlalchemy.py +496 -471
  70. utilities/sqlalchemy_polars.py +29 -54
  71. utilities/string.py +2 -3
  72. utilities/subprocess.py +1977 -0
  73. utilities/tempfile.py +112 -4
  74. utilities/testbook.py +50 -0
  75. utilities/text.py +174 -42
  76. utilities/throttle.py +158 -0
  77. utilities/timer.py +2 -2
  78. utilities/traceback.py +70 -35
  79. utilities/types.py +102 -30
  80. utilities/typing.py +479 -19
  81. utilities/uuid.py +42 -5
  82. utilities/version.py +27 -26
  83. utilities/whenever.py +1559 -361
  84. utilities/zoneinfo.py +80 -22
  85. dycw_utilities-0.135.0.dist-info/METADATA +0 -39
  86. dycw_utilities-0.135.0.dist-info/RECORD +0 -96
  87. dycw_utilities-0.135.0.dist-info/WHEEL +0 -4
  88. dycw_utilities-0.135.0.dist-info/licenses/LICENSE +0 -21
  89. utilities/aiolimiter.py +0 -25
  90. utilities/arq.py +0 -216
  91. utilities/eventkit.py +0 -388
  92. utilities/luigi.py +0 -183
  93. utilities/period.py +0 -152
  94. utilities/pudb.py +0 -62
  95. utilities/python_dotenv.py +0 -101
  96. utilities/streamlit.py +0 -105
  97. utilities/typed_settings.py +0 -123
utilities/operator.py CHANGED
@@ -6,9 +6,9 @@ from dataclasses import asdict, dataclass
6
6
  from typing import TYPE_CHECKING, Any, cast, override
7
7
 
8
8
  import utilities.math
9
- from utilities.functions import is_dataclass_instance
10
9
  from utilities.iterables import SortIterableError, sort_iterable
11
10
  from utilities.reprlib import get_repr
11
+ from utilities.typing import is_dataclass_instance
12
12
 
13
13
  if TYPE_CHECKING:
14
14
  from utilities.types import Dataclass, Number
@@ -52,6 +52,14 @@ def is_equal[T](
52
52
  return is_equal(x.args, y.args)
53
53
 
54
54
  # collections
55
+ if isinstance(x, AbstractSet):
56
+ y = cast("AbstractSet[Any]", y)
57
+ try:
58
+ x_sorted = sort_iterable(x)
59
+ y_sorted = sort_iterable(y)
60
+ except SortIterableError:
61
+ return _is_in(x, y) and _is_in(y, x)
62
+ return is_equal(x_sorted, y_sorted, rel_tol=rel_tol, abs_tol=abs_tol)
55
63
  if isinstance(x, Mapping):
56
64
  y = cast("Mapping[Any, Any]", y)
57
65
  x_keys = set(x)
@@ -61,32 +69,6 @@ def is_equal[T](
61
69
  x_values = [x[i] for i in x]
62
70
  y_values = [y[i] for i in x]
63
71
  return is_equal(x_values, y_values, rel_tol=rel_tol, abs_tol=abs_tol)
64
- if isinstance(x, AbstractSet):
65
- y = cast("AbstractSet[Any]", y)
66
- try:
67
- x_sorted = sort_iterable(x)
68
- y_sorted = sort_iterable(y)
69
- except SortIterableError:
70
- x_in_y = all(
71
- any(
72
- is_equal(
73
- x_i, y_i, rel_tol=rel_tol, abs_tol=abs_tol, extra=extra
74
- )
75
- for y_i in y
76
- )
77
- for x_i in x
78
- )
79
- y_in_x = all(
80
- any(
81
- is_equal(
82
- x_i, y_i, rel_tol=rel_tol, abs_tol=abs_tol, extra=extra
83
- )
84
- for x_i in x
85
- )
86
- for y_i in y
87
- )
88
- return x_in_y and y_in_x
89
- return is_equal(x_sorted, y_sorted, rel_tol=rel_tol, abs_tol=abs_tol)
90
72
  if isinstance(x, Sequence):
91
73
  y = cast("Sequence[Any]", y)
92
74
  if len(x) != len(y):
@@ -102,6 +84,24 @@ def is_equal[T](
102
84
  return (type(x) is type(y)) and (x == y)
103
85
 
104
86
 
87
+ def _is_in[T](
88
+ x: AbstractSet[Any],
89
+ y: AbstractSet[Any],
90
+ /,
91
+ *,
92
+ rel_tol: float | None = None,
93
+ abs_tol: float | None = None,
94
+ extra: Mapping[type[T], Callable[[T, T], bool]] | None = None,
95
+ ) -> bool:
96
+ return all(
97
+ any(
98
+ is_equal(x_i, y_i, rel_tol=rel_tol, abs_tol=abs_tol, extra=extra)
99
+ for y_i in y
100
+ )
101
+ for x_i in x
102
+ )
103
+
104
+
105
105
  @dataclass(kw_only=True, slots=True)
106
106
  class IsEqualError(Exception):
107
107
  x: Any
utilities/orjson.py CHANGED
@@ -1,10 +1,11 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import datetime as dt
3
4
  import re
4
5
  from collections.abc import Callable, Iterable, Mapping, Sequence
5
6
  from contextlib import suppress
6
7
  from dataclasses import dataclass, field, replace
7
- from enum import Enum, unique
8
+ from enum import Enum, StrEnum, unique
8
9
  from functools import cached_property, partial
9
10
  from itertools import chain
10
11
  from logging import Formatter, LogRecord
@@ -19,6 +20,7 @@ from orjson import (
19
20
  OPT_PASSTHROUGH_DATACLASS,
20
21
  OPT_PASSTHROUGH_DATETIME,
21
22
  OPT_SORT_KEYS,
23
+ JSONDecodeError,
22
24
  dumps,
23
25
  loads,
24
26
  )
@@ -26,15 +28,18 @@ from whenever import (
26
28
  Date,
27
29
  DateDelta,
28
30
  DateTimeDelta,
31
+ MonthDay,
29
32
  PlainDateTime,
30
33
  Time,
31
34
  TimeDelta,
35
+ YearMonth,
32
36
  ZonedDateTime,
33
37
  )
34
38
 
35
39
  from utilities.concurrent import concurrent_map
36
40
  from utilities.dataclasses import dataclass_to_dict
37
- from utilities.functions import ensure_class, is_string_mapping
41
+ from utilities.functions import ensure_class
42
+ from utilities.gzip import read_binary
38
43
  from utilities.iterables import (
39
44
  OneEmptyError,
40
45
  always_iterable,
@@ -42,12 +47,19 @@ from utilities.iterables import (
42
47
  one,
43
48
  one_unique,
44
49
  )
50
+ from utilities.json import write_formatted_json
45
51
  from utilities.logging import get_logging_level_number
46
52
  from utilities.math import MAX_INT64, MIN_INT64
47
53
  from utilities.types import Dataclass, LogLevel, MaybeIterable, PathLike, StrMapping
54
+ from utilities.typing import is_string_mapping
48
55
  from utilities.tzlocal import LOCAL_TIME_ZONE
49
56
  from utilities.version import Version, parse_version
50
- from utilities.whenever import from_timestamp
57
+ from utilities.whenever import (
58
+ DatePeriod,
59
+ TimePeriod,
60
+ ZonedDateTimePeriod,
61
+ from_timestamp,
62
+ )
51
63
 
52
64
  if TYPE_CHECKING:
53
65
  from collections.abc import Set as AbstractSet
@@ -60,10 +72,11 @@ if TYPE_CHECKING:
60
72
 
61
73
 
62
74
  @unique
63
- class _Prefixes(Enum):
75
+ class _Prefixes(StrEnum):
64
76
  dataclass = "dc"
65
77
  date = "d"
66
78
  date_delta = "dd"
79
+ date_period = "dp"
67
80
  date_time_delta = "D"
68
81
  enum = "e"
69
82
  exception_class = "Ex"
@@ -71,17 +84,25 @@ class _Prefixes(Enum):
71
84
  float_ = "fl"
72
85
  frozenset_ = "fr"
73
86
  list_ = "l"
74
- none = "none"
87
+ month_day = "md"
88
+ none = "0"
75
89
  path = "p"
76
90
  plain_date_time = "pd"
91
+ py_date = "!d"
92
+ py_plain_date_time = "!pd"
93
+ py_time = "!ti"
94
+ py_zoned_date_time = "!zd"
77
95
  set_ = "s"
78
96
  time = "ti"
79
97
  time_delta = "td"
98
+ time_period = "tp"
80
99
  tuple_ = "tu"
81
100
  unserializable = "un"
82
101
  uuid = "uu"
83
102
  version = "v"
103
+ year_month = "ym"
84
104
  zoned_date_time = "zd"
105
+ zoned_date_time_period = "zp"
85
106
 
86
107
 
87
108
  type _DataclassHook = Callable[[type[Dataclass], StrMapping], StrMapping]
@@ -156,8 +177,10 @@ def _pre_process(
156
177
  return f"[{_Prefixes.date.value}]{date}"
157
178
  case DateDelta() as date:
158
179
  return f"[{_Prefixes.date_delta.value}]{date}"
159
- case DateTimeDelta() as date:
160
- return f"[{_Prefixes.date_time_delta.value}]{date}"
180
+ case DatePeriod() as period:
181
+ return f"[{_Prefixes.date_period.value}]{period.start},{period.end}"
182
+ case DateTimeDelta() as date_time_delta:
183
+ return f"[{_Prefixes.date_time_delta.value}]{date_time_delta}"
161
184
  case Exception() as error_:
162
185
  return {
163
186
  f"[{_Prefixes.exception_instance.value}|{type(error_).__qualname__}]": pre(
@@ -172,24 +195,48 @@ def _pre_process(
172
195
  if MIN_INT64 <= int_ <= MAX_INT64:
173
196
  return int_
174
197
  raise _SerializeIntegerError(obj=int_)
198
+ case MonthDay() as month_day:
199
+ return f"[{_Prefixes.month_day.value}]{month_day!s}"
175
200
  case Path() as path:
176
201
  return f"[{_Prefixes.path.value}]{path!s}"
177
- case PlainDateTime() as datetime:
178
- return f"[{_Prefixes.plain_date_time.value}]{datetime}"
179
- case str() as str_:
180
- return str_
202
+ case PlainDateTime() as date_time:
203
+ return f"[{_Prefixes.plain_date_time.value}]{date_time}"
204
+ case str() as text:
205
+ return text
181
206
  case Time() as time:
182
207
  return f"[{_Prefixes.time.value}]{time}"
183
208
  case TimeDelta() as time_delta:
184
209
  return f"[{_Prefixes.time_delta.value}]{time_delta}"
210
+ case TimePeriod() as period:
211
+ return f"[{_Prefixes.time_period.value}]{period.start},{period.end}"
185
212
  case type() as error_cls if issubclass(error_cls, Exception):
186
213
  return f"[{_Prefixes.exception_class.value}|{error_cls.__qualname__}]"
187
214
  case UUID() as uuid:
188
215
  return f"[{_Prefixes.uuid.value}]{uuid}"
189
216
  case Version() as version:
190
- return f"[{_Prefixes.version.value}]{version!s}"
191
- case ZonedDateTime() as datetime:
192
- return f"[{_Prefixes.zoned_date_time.value}]{datetime}"
217
+ return f"[{_Prefixes.version.value}]{version}"
218
+ case YearMonth() as year_month:
219
+ return f"[{_Prefixes.year_month.value}]{year_month}"
220
+ case ZonedDateTime() as date_time:
221
+ return f"[{_Prefixes.zoned_date_time.value}]{date_time}"
222
+ case ZonedDateTimePeriod() as period:
223
+ return f"[{_Prefixes.zoned_date_time_period.value}]{period.start.to_plain()},{period.end}"
224
+ case dt.datetime() as py_datetime:
225
+ match py_datetime.tzinfo:
226
+ case None:
227
+ datetime = PlainDateTime.from_py_datetime(py_datetime)
228
+ return f"[{_Prefixes.py_plain_date_time.value}]{datetime}"
229
+ case ZoneInfo():
230
+ datetime = ZonedDateTime.from_py_datetime(py_datetime)
231
+ return f"[{_Prefixes.py_zoned_date_time.value}]{datetime}"
232
+ case _: # pragma: no cover
233
+ raise NotImplementedError
234
+ case dt.date() as py_date:
235
+ date = Date.from_py_date(py_date)
236
+ return f"[{_Prefixes.py_date.value}]{date}"
237
+ case dt.time() as py_time:
238
+ time = Time.from_py_time(py_time)
239
+ return f"[{_Prefixes.py_time.value}]{time}"
193
240
  # contains
194
241
  case Dataclass() as dataclass:
195
242
  asdict = dataclass_to_dict(
@@ -257,6 +304,7 @@ def _pre_process(
257
304
  qualname=type(obj).__qualname__, repr=repr(obj), str=str(obj)
258
305
  )
259
306
  return pre(unserializable)
307
+ return None
260
308
 
261
309
 
262
310
  def _pre_process_container(
@@ -325,8 +373,12 @@ def deserialize(
325
373
  redirects: Mapping[str, type[Any]] | None = None,
326
374
  ) -> Any:
327
375
  """Deserialize an object."""
376
+ try:
377
+ obj = loads(data)
378
+ except JSONDecodeError:
379
+ raise _DeserializeInvalidJSONError(data=data) from None
328
380
  return _object_hook(
329
- loads(data),
381
+ obj,
330
382
  data=data,
331
383
  dataclass_hook=dataclass_hook,
332
384
  objects=objects,
@@ -334,34 +386,57 @@ def deserialize(
334
386
  )
335
387
 
336
388
 
389
+ @dataclass(kw_only=True, slots=True)
390
+ class DeerializeError(Exception):
391
+ obj: Any
392
+
393
+
337
394
  (
338
395
  _DATE_PATTERN,
339
396
  _DATE_DELTA_PATTERN,
397
+ _DATE_PERIOD_PATTERN,
340
398
  _DATE_TIME_DELTA_PATTERN,
341
399
  _FLOAT_PATTERN,
400
+ _MONTH_DAY_PATTERN,
342
401
  _NONE_PATTERN,
343
402
  _PATH_PATTERN,
344
403
  _PLAIN_DATE_TIME_PATTERN,
404
+ _PY_DATE_PATTERN,
405
+ _PY_PLAIN_DATE_TIME_PATTERN,
406
+ _PY_TIME_PATTERN,
407
+ _PY_ZONED_DATE_TIME_PATTERN,
345
408
  _TIME_PATTERN,
346
409
  _TIME_DELTA_PATTERN,
410
+ _TIME_PERIOD_PATTERN,
347
411
  _UUID_PATTERN,
348
412
  _VERSION_PATTERN,
413
+ _YEAR_MONTH_PATTERN,
349
414
  _ZONED_DATE_TIME_PATTERN,
415
+ _ZONED_DATE_TIME_PERIOD_PATTERN,
350
416
  ) = [
351
417
  re.compile(r"^\[" + p.value + r"\](" + ".*" + ")$")
352
418
  for p in [
353
419
  _Prefixes.date,
354
420
  _Prefixes.date_delta,
421
+ _Prefixes.date_period,
355
422
  _Prefixes.date_time_delta,
356
423
  _Prefixes.float_,
424
+ _Prefixes.month_day,
357
425
  _Prefixes.none,
358
426
  _Prefixes.path,
359
427
  _Prefixes.plain_date_time,
428
+ _Prefixes.py_date,
429
+ _Prefixes.py_plain_date_time,
430
+ _Prefixes.py_time,
431
+ _Prefixes.py_zoned_date_time,
360
432
  _Prefixes.time,
361
433
  _Prefixes.time_delta,
434
+ _Prefixes.time_period,
362
435
  _Prefixes.uuid,
363
436
  _Prefixes.version,
437
+ _Prefixes.year_month,
364
438
  _Prefixes.zoned_date_time,
439
+ _Prefixes.zoned_date_time_period,
365
440
  ]
366
441
  ]
367
442
 
@@ -406,27 +481,50 @@ def _object_hook(
406
481
  if match := _NONE_PATTERN.search(text):
407
482
  return None
408
483
  if match := _DATE_PATTERN.search(text):
409
- return Date.parse_common_iso(match.group(1))
484
+ return Date.parse_iso(match.group(1))
410
485
  if match := _DATE_DELTA_PATTERN.search(text):
411
- return DateDelta.parse_common_iso(match.group(1))
486
+ return DateDelta.parse_iso(match.group(1))
487
+ if match := _DATE_PERIOD_PATTERN.search(text):
488
+ start, end = map(Date.parse_iso, match.group(1).split(","))
489
+ return DatePeriod(start, end)
412
490
  if match := _DATE_TIME_DELTA_PATTERN.search(text):
413
- return DateTimeDelta.parse_common_iso(match.group(1))
491
+ return DateTimeDelta.parse_iso(match.group(1))
414
492
  if match := _FLOAT_PATTERN.search(text):
415
493
  return float(match.group(1))
494
+ if match := _MONTH_DAY_PATTERN.search(text):
495
+ return MonthDay.parse_iso(match.group(1))
416
496
  if match := _PATH_PATTERN.search(text):
417
497
  return Path(match.group(1))
418
498
  if match := _PLAIN_DATE_TIME_PATTERN.search(text):
419
- return PlainDateTime.parse_common_iso(match.group(1))
499
+ return PlainDateTime.parse_iso(match.group(1))
500
+ if match := _PY_DATE_PATTERN.search(text):
501
+ return Date.parse_iso(match.group(1)).py_date()
502
+ if match := _PY_PLAIN_DATE_TIME_PATTERN.search(text):
503
+ return PlainDateTime.parse_iso(match.group(1)).py_datetime()
504
+ if match := _PY_TIME_PATTERN.search(text):
505
+ return Time.parse_iso(match.group(1)).py_time()
506
+ if match := _PY_ZONED_DATE_TIME_PATTERN.search(text):
507
+ return ZonedDateTime.parse_iso(match.group(1)).py_datetime()
420
508
  if match := _TIME_PATTERN.search(text):
421
- return Time.parse_common_iso(match.group(1))
509
+ return Time.parse_iso(match.group(1))
422
510
  if match := _TIME_DELTA_PATTERN.search(text):
423
- return TimeDelta.parse_common_iso(match.group(1))
511
+ return TimeDelta.parse_iso(match.group(1))
512
+ if match := _TIME_PERIOD_PATTERN.search(text):
513
+ start, end = map(Time.parse_iso, match.group(1).split(","))
514
+ return TimePeriod(start, end)
424
515
  if match := _UUID_PATTERN.search(text):
425
516
  return UUID(match.group(1))
426
517
  if match := _VERSION_PATTERN.search(text):
427
518
  return parse_version(match.group(1))
519
+ if match := _YEAR_MONTH_PATTERN.search(text):
520
+ return YearMonth.parse_iso(match.group(1))
428
521
  if match := _ZONED_DATE_TIME_PATTERN.search(text):
429
- return ZonedDateTime.parse_common_iso(match.group(1))
522
+ return ZonedDateTime.parse_iso(match.group(1))
523
+ if match := _ZONED_DATE_TIME_PERIOD_PATTERN.search(text):
524
+ start, end = match.group(1).split(",")
525
+ end = ZonedDateTime.parse_iso(end)
526
+ start = PlainDateTime.parse_iso(start).assume_tz(end.tz)
527
+ return ZonedDateTimePeriod(start, end)
430
528
  if (
431
529
  exc_class := _object_hook_exception_class(
432
530
  text, data=data, objects=objects, redirects=redirects
@@ -508,7 +606,7 @@ def _object_hook(
508
606
  )
509
607
  for k, v in mapping.items()
510
608
  }
511
- case _ as never:
609
+ case never:
512
610
  assert_never(never)
513
611
 
514
612
 
@@ -652,11 +750,19 @@ def _object_hook_get_object(
652
750
  @dataclass(kw_only=True, slots=True)
653
751
  class DeserializeError(Exception):
654
752
  data: bytes
655
- qualname: str
753
+
754
+
755
+ @dataclass(kw_only=True, slots=True)
756
+ class _DeserializeInvalidJSONError(DeserializeError):
757
+ @override
758
+ def __str__(self) -> str:
759
+ return f"Invalid JSON: {self.data!r}"
656
760
 
657
761
 
658
762
  @dataclass(kw_only=True, slots=True)
659
763
  class _DeserializeNoObjectsError(DeserializeError):
764
+ qualname: str
765
+
660
766
  @override
661
767
  def __str__(self) -> str:
662
768
  return f"Objects required to deserialize {self.qualname!r} from {self.data!r}"
@@ -664,6 +770,8 @@ class _DeserializeNoObjectsError(DeserializeError):
664
770
 
665
771
  @dataclass(kw_only=True, slots=True)
666
772
  class _DeserializeObjectNotFoundError(DeserializeError):
773
+ qualname: str
774
+
667
775
  @override
668
776
  def __str__(self) -> str:
669
777
  return (
@@ -834,9 +942,7 @@ class GetLogRecordsOutput:
834
942
 
835
943
  @cached_property
836
944
  def dataframe(self) -> Any:
837
- from polars import DataFrame, Object, String, UInt64
838
-
839
- from utilities.polars import zoned_datetime
945
+ from polars import DataFrame, Datetime, Object, String, UInt64
840
946
 
841
947
  records = [
842
948
  replace(
@@ -864,7 +970,7 @@ class GetLogRecordsOutput:
864
970
  "level": UInt64,
865
971
  "path_name": String,
866
972
  "line_num": UInt64,
867
- "datetime": zoned_datetime(time_zone=time_zone),
973
+ "datetime": Datetime(time_zone=time_zone),
868
974
  "func_name": String,
869
975
  "stack_info": String,
870
976
  "extra": Object,
@@ -952,7 +1058,7 @@ class GetLogRecordsOutput:
952
1058
  for r in records
953
1059
  if (r.func_name is not None) and search(func_name, r.func_name)
954
1060
  ]
955
- case _ as never:
1061
+ case never:
956
1062
  assert_never(never)
957
1063
  if extra is not None:
958
1064
  match extra:
@@ -965,7 +1071,7 @@ class GetLogRecordsOutput:
965
1071
  if (r.extra is not None)
966
1072
  and set(r.extra).issuperset(always_iterable(keys))
967
1073
  ]
968
- case _ as never:
1074
+ case never:
969
1075
  assert_never(never)
970
1076
  if log_file is not None:
971
1077
  match log_file:
@@ -980,7 +1086,7 @@ class GetLogRecordsOutput:
980
1086
  if (r.log_file is not None)
981
1087
  and search(str(log_file), str(r.log_file))
982
1088
  ]
983
- case _ as never:
1089
+ case never:
984
1090
  assert_never(never)
985
1091
  if log_file_line_num is not None:
986
1092
  match log_file_line_num:
@@ -994,7 +1100,7 @@ class GetLogRecordsOutput:
994
1100
  records = [
995
1101
  r for r in records if r.log_file_line_num == log_file_line_num
996
1102
  ]
997
- case _ as never:
1103
+ case never:
998
1104
  assert_never(never)
999
1105
  if min_log_file_line_num is not None:
1000
1106
  records = [
@@ -1008,7 +1114,7 @@ class GetLogRecordsOutput:
1008
1114
  r
1009
1115
  for r in records
1010
1116
  if (r.log_file_line_num is not None)
1011
- and (r.log_file_line_num >= max_log_file_line_num)
1117
+ and (r.log_file_line_num <= max_log_file_line_num)
1012
1118
  ]
1013
1119
  return replace(self, records=records)
1014
1120
 
@@ -1071,9 +1177,8 @@ def _get_log_records_one(
1071
1177
  ) -> _GetLogRecordsOneOutput:
1072
1178
  path = Path(path)
1073
1179
  try:
1074
- with path.open() as fh:
1075
- lines = fh.readlines()
1076
- except UnicodeDecodeError as error: # skipif-ci-and-windows
1180
+ lines = path.read_text().splitlines()
1181
+ except UnicodeDecodeError as error:
1077
1182
  return _GetLogRecordsOneOutput(path=path, file_ok=False, other_errors=[error])
1078
1183
  num_lines_blank, num_lines_error = 0, 0
1079
1184
  missing: set[str] = set()
@@ -1145,6 +1250,52 @@ class _GetLogRecordsOneOutput:
1145
1250
  other_errors: list[Exception] = field(default_factory=list, repr=False)
1146
1251
 
1147
1252
 
1253
+ # read/write
1254
+
1255
+
1256
+ def read_object(
1257
+ path: PathLike,
1258
+ /,
1259
+ *,
1260
+ decompress: bool = False,
1261
+ dataclass_hook: _DataclassHook | None = None,
1262
+ objects: AbstractSet[type[Any]] | None = None,
1263
+ redirects: Mapping[str, type[Any]] | None = None,
1264
+ ) -> Any:
1265
+ """Read an object from disk."""
1266
+ data = read_binary(path, decompress=decompress)
1267
+ return deserialize(
1268
+ data, dataclass_hook=dataclass_hook, objects=objects, redirects=redirects
1269
+ )
1270
+
1271
+
1272
+ def write_object(
1273
+ obj: Any,
1274
+ path: PathLike,
1275
+ /,
1276
+ *,
1277
+ before: Callable[[Any], Any] | None = None,
1278
+ globalns: StrMapping | None = None,
1279
+ localns: StrMapping | None = None,
1280
+ warn_name_errors: bool = False,
1281
+ dataclass_hook: _DataclassHook | None = None,
1282
+ dataclass_defaults: bool = False,
1283
+ compress: bool = False,
1284
+ overwrite: bool = False,
1285
+ ) -> None:
1286
+ """Write an object to disk."""
1287
+ data = serialize(
1288
+ obj,
1289
+ before=before,
1290
+ globalns=globalns,
1291
+ localns=localns,
1292
+ warn_name_errors=warn_name_errors,
1293
+ dataclass_hook=dataclass_hook,
1294
+ dataclass_defaults=dataclass_defaults,
1295
+ )
1296
+ write_formatted_json(data, path, compress=compress, overwrite=overwrite)
1297
+
1298
+
1148
1299
  __all__ = [
1149
1300
  "DeserializeError",
1150
1301
  "GetLogRecordsOutput",
@@ -1153,5 +1304,7 @@ __all__ = [
1153
1304
  "SerializeError",
1154
1305
  "deserialize",
1155
1306
  "get_log_records",
1307
+ "read_object",
1156
1308
  "serialize",
1309
+ "write_object",
1157
1310
  ]
utilities/os.py CHANGED
@@ -1,11 +1,13 @@
1
1
  from __future__ import annotations
2
2
 
3
- from contextlib import contextmanager, suppress
3
+ from contextlib import suppress
4
4
  from dataclasses import dataclass
5
5
  from os import cpu_count, environ, getenv
6
6
  from typing import TYPE_CHECKING, Literal, assert_never, overload, override
7
7
 
8
+ from utilities.contextlib import enhanced_context_manager
8
9
  from utilities.iterables import OneStrEmptyError, one_str
10
+ from utilities.platform import SYSTEM
9
11
 
10
12
  if TYPE_CHECKING:
11
13
  from collections.abc import Iterator, Mapping
@@ -47,7 +49,7 @@ def get_cpu_use(*, n: IntOrAll = "all") -> int:
47
49
  raise GetCPUUseError(n=n)
48
50
  case "all":
49
51
  return CPU_COUNT
50
- case _ as never:
52
+ case never:
51
53
  assert_never(never)
52
54
 
53
55
 
@@ -104,7 +106,7 @@ def get_env_var(
104
106
  return None
105
107
  case str(), _:
106
108
  return default
107
- case _ as never:
109
+ case never:
108
110
  assert_never(never)
109
111
  return environ[key_use]
110
112
 
@@ -123,7 +125,56 @@ class GetEnvVarError(Exception):
123
125
  ##
124
126
 
125
127
 
126
- @contextmanager
128
+ def get_effective_group_id() -> int | None:
129
+ """Get the effective group ID."""
130
+ match SYSTEM:
131
+ case "windows": # skipif-not-windows
132
+ return None
133
+ case "mac" | "linux": # skipif-windows
134
+ from os import getegid
135
+
136
+ return getegid()
137
+ case never:
138
+ assert_never(never)
139
+
140
+
141
+ def get_effective_user_id() -> int | None:
142
+ """Get the effective user ID."""
143
+ match SYSTEM:
144
+ case "windows": # skipif-not-windows
145
+ return None
146
+ case "mac" | "linux": # skipif-windows
147
+ from os import geteuid
148
+
149
+ return geteuid()
150
+ case never:
151
+ assert_never(never)
152
+
153
+
154
+ EFFECTIVE_USER_ID = get_effective_user_id()
155
+ EFFECTIVE_GROUP_ID = get_effective_group_id()
156
+
157
+
158
+ ##
159
+
160
+
161
+ def is_debug() -> bool:
162
+ """Check if we are in `DEBUG` mode."""
163
+ return get_env_var("DEBUG", nullable=True) is not None
164
+
165
+
166
+ ##
167
+
168
+
169
+ def is_pytest() -> bool:
170
+ """Check if `pytest` is running."""
171
+ return get_env_var("PYTEST_VERSION", nullable=True) is not None
172
+
173
+
174
+ ##
175
+
176
+
177
+ @enhanced_context_manager
127
178
  def temp_environ(
128
179
  env: Mapping[str, str | None] | None = None, **env_kwargs: str | None
129
180
  ) -> Iterator[None]:
@@ -148,11 +199,17 @@ def temp_environ(
148
199
 
149
200
  __all__ = [
150
201
  "CPU_COUNT",
202
+ "EFFECTIVE_GROUP_ID",
203
+ "EFFECTIVE_USER_ID",
151
204
  "GetCPUCountError",
152
205
  "GetCPUUseError",
153
206
  "IntOrAll",
154
207
  "get_cpu_count",
155
208
  "get_cpu_use",
209
+ "get_effective_group_id",
210
+ "get_effective_user_id",
156
211
  "get_env_var",
212
+ "is_debug",
213
+ "is_pytest",
157
214
  "temp_environ",
158
215
  ]