dycw-utilities 0.175.17__py3-none-any.whl → 0.185.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. dycw_utilities-0.185.8.dist-info/METADATA +33 -0
  2. dycw_utilities-0.185.8.dist-info/RECORD +90 -0
  3. {dycw_utilities-0.175.17.dist-info → dycw_utilities-0.185.8.dist-info}/WHEEL +2 -2
  4. utilities/__init__.py +1 -1
  5. utilities/altair.py +8 -6
  6. utilities/asyncio.py +40 -56
  7. utilities/atools.py +9 -11
  8. utilities/cachetools.py +8 -6
  9. utilities/click.py +4 -3
  10. utilities/concurrent.py +1 -1
  11. utilities/constants.py +492 -0
  12. utilities/contextlib.py +23 -30
  13. utilities/contextvars.py +1 -23
  14. utilities/core.py +2581 -0
  15. utilities/dataclasses.py +16 -119
  16. utilities/docker.py +139 -45
  17. utilities/enum.py +1 -1
  18. utilities/errors.py +2 -16
  19. utilities/fastapi.py +5 -5
  20. utilities/fpdf2.py +2 -1
  21. utilities/functions.py +33 -264
  22. utilities/http.py +2 -3
  23. utilities/hypothesis.py +48 -25
  24. utilities/iterables.py +39 -575
  25. utilities/jinja2.py +3 -6
  26. utilities/jupyter.py +5 -3
  27. utilities/libcst.py +1 -1
  28. utilities/lightweight_charts.py +4 -6
  29. utilities/logging.py +17 -15
  30. utilities/math.py +1 -36
  31. utilities/more_itertools.py +4 -6
  32. utilities/numpy.py +2 -1
  33. utilities/operator.py +2 -2
  34. utilities/orjson.py +24 -25
  35. utilities/os.py +4 -185
  36. utilities/packaging.py +129 -0
  37. utilities/parse.py +33 -13
  38. utilities/pathlib.py +2 -136
  39. utilities/platform.py +8 -90
  40. utilities/polars.py +34 -31
  41. utilities/postgres.py +9 -4
  42. utilities/pottery.py +20 -18
  43. utilities/pqdm.py +3 -4
  44. utilities/psutil.py +2 -3
  45. utilities/pydantic.py +18 -4
  46. utilities/pydantic_settings.py +7 -9
  47. utilities/pydantic_settings_sops.py +3 -3
  48. utilities/pyinstrument.py +4 -4
  49. utilities/pytest.py +49 -108
  50. utilities/pytest_plugins/pytest_regressions.py +2 -2
  51. utilities/pytest_regressions.py +8 -6
  52. utilities/random.py +2 -8
  53. utilities/redis.py +98 -94
  54. utilities/reprlib.py +11 -118
  55. utilities/shellingham.py +66 -0
  56. utilities/slack_sdk.py +13 -12
  57. utilities/sqlalchemy.py +42 -30
  58. utilities/sqlalchemy_polars.py +16 -25
  59. utilities/subprocess.py +1166 -148
  60. utilities/tabulate.py +32 -0
  61. utilities/testbook.py +8 -8
  62. utilities/text.py +24 -115
  63. utilities/throttle.py +159 -0
  64. utilities/time.py +18 -0
  65. utilities/timer.py +29 -12
  66. utilities/traceback.py +15 -22
  67. utilities/types.py +38 -3
  68. utilities/typing.py +18 -12
  69. utilities/uuid.py +1 -1
  70. utilities/version.py +202 -45
  71. utilities/whenever.py +22 -150
  72. dycw_utilities-0.175.17.dist-info/METADATA +0 -34
  73. dycw_utilities-0.175.17.dist-info/RECORD +0 -103
  74. utilities/atomicwrites.py +0 -182
  75. utilities/cryptography.py +0 -41
  76. utilities/getpass.py +0 -8
  77. utilities/git.py +0 -19
  78. utilities/grp.py +0 -28
  79. utilities/gzip.py +0 -31
  80. utilities/json.py +0 -70
  81. utilities/permissions.py +0 -298
  82. utilities/pickle.py +0 -25
  83. utilities/pwd.py +0 -28
  84. utilities/re.py +0 -156
  85. utilities/sentinel.py +0 -73
  86. utilities/socket.py +0 -8
  87. utilities/string.py +0 -20
  88. utilities/tempfile.py +0 -136
  89. utilities/tzdata.py +0 -11
  90. utilities/tzlocal.py +0 -28
  91. utilities/warnings.py +0 -65
  92. utilities/zipfile.py +0 -25
  93. utilities/zoneinfo.py +0 -133
  94. {dycw_utilities-0.175.17.dist-info → dycw_utilities-0.185.8.dist-info}/entry_points.txt +0 -0
utilities/jinja2.py CHANGED
@@ -9,7 +9,6 @@ from jinja2.defaults import (
9
9
  BLOCK_START_STRING,
10
10
  COMMENT_END_STRING,
11
11
  COMMENT_START_STRING,
12
- KEEP_TRAILING_NEWLINE,
13
12
  LINE_COMMENT_PREFIX,
14
13
  LINE_STATEMENT_PREFIX,
15
14
  LSTRIP_BLOCKS,
@@ -19,8 +18,7 @@ from jinja2.defaults import (
19
18
  VARIABLE_START_STRING,
20
19
  )
21
20
 
22
- from utilities.atomicwrites import writer
23
- from utilities.text import kebab_case, pascal_case, snake_case
21
+ from utilities.core import kebab_case, pascal_case, snake_case, write_text
24
22
 
25
23
  if TYPE_CHECKING:
26
24
  from collections.abc import Callable, Sequence
@@ -48,7 +46,7 @@ class EnhancedEnvironment(Environment):
48
46
  trim_blocks: bool = TRIM_BLOCKS,
49
47
  lstrip_blocks: bool = LSTRIP_BLOCKS,
50
48
  newline_sequence: Literal["\n", "\r\n", "\r"] = NEWLINE_SEQUENCE,
51
- keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE,
49
+ keep_trailing_newline: bool = True,
52
50
  extensions: Sequence[str | type[Extension]] = (),
53
51
  optimized: bool = True,
54
52
  undefined: type[Undefined] = Undefined,
@@ -108,8 +106,7 @@ class TemplateJob:
108
106
  """Run the job."""
109
107
  match self.mode:
110
108
  case "write":
111
- with writer(self.target, overwrite=True) as temp:
112
- _ = temp.write_text(self.rendered)
109
+ write_text(self.target, self.rendered, overwrite=True)
113
110
  case "append":
114
111
  with self.target.open(mode="a") as fh:
115
112
  _ = fh.write(self.rendered)
utilities/jupyter.py CHANGED
@@ -3,13 +3,15 @@ from __future__ import annotations
3
3
  from contextlib import ExitStack
4
4
  from dataclasses import dataclass, field, replace
5
5
  from itertools import chain
6
- from typing import TYPE_CHECKING, Any, Self
6
+ from typing import TYPE_CHECKING, Self
7
7
 
8
8
  from utilities.ipython import check_ipython_class
9
9
 
10
10
  if TYPE_CHECKING:
11
11
  from types import TracebackType
12
12
 
13
+ from utilities.types import StrDict
14
+
13
15
 
14
16
  def is_jupyter() -> bool:
15
17
  """Check if `jupyter` is running."""
@@ -55,7 +57,7 @@ class _Show:
55
57
  except ModuleNotFoundError: # pragma: no cover
56
58
  pass
57
59
  else:
58
- kwargs: dict[str, Any] = {}
60
+ kwargs: StrDict = {}
59
61
  if self.dp is not None:
60
62
  kwargs["display.precision"] = self.dp
61
63
  if self.rows is not None:
@@ -72,7 +74,7 @@ class _Show:
72
74
  except ModuleNotFoundError: # pragma: no cover
73
75
  pass
74
76
  else:
75
- kwargs: dict[str, Any] = {}
77
+ kwargs: StrDict = {}
76
78
  if self.dp is not None:
77
79
  kwargs["float_precision"] = self.dp
78
80
  if self.rows is not None:
utilities/libcst.py CHANGED
@@ -72,7 +72,7 @@ class GenerateImportFromError(Exception):
72
72
  ##
73
73
 
74
74
 
75
- @dataclass(kw_only=True, slots=True)
75
+ @dataclass(order=True, unsafe_hash=True, kw_only=True, slots=True)
76
76
  class _ParseImportOutput:
77
77
  module: str
78
78
  name: str | None = None
@@ -3,10 +3,8 @@ from __future__ import annotations
3
3
  from dataclasses import dataclass
4
4
  from typing import TYPE_CHECKING, override
5
5
 
6
- from utilities.atomicwrites import writer # pragma: no cover
7
6
  from utilities.contextlib import enhanced_async_context_manager
8
- from utilities.iterables import OneEmptyError, OneNonUniqueError, one
9
- from utilities.reprlib import get_repr
7
+ from utilities.core import OneEmptyError, OneNonUniqueError, one, repr_, write_bytes
10
8
 
11
9
  if TYPE_CHECKING:
12
10
  from collections.abc import AsyncIterator
@@ -25,8 +23,8 @@ if TYPE_CHECKING:
25
23
  def save_chart(chart: Chart, path: PathLike, /, *, overwrite: bool = False) -> None:
26
24
  """Atomically save a chart to disk."""
27
25
  chart.show(block=False) # pragma: no cover
28
- with writer(path, overwrite=overwrite) as temp: # pragma: no cover
29
- _ = temp.write_bytes(chart.screenshot())
26
+ data = chart.screenshot() # pragma: no cover
27
+ write_bytes(path, data, overwrite=overwrite) # pragma: no cover
30
28
  chart.exit() # pragma: no cover
31
29
 
32
30
 
@@ -72,7 +70,7 @@ class _SetDataFrameNonUniqueError(SetDataFrameError):
72
70
 
73
71
  @override
74
72
  def __str__(self) -> str:
75
- return f"{get_repr(self.schema)} must contain exactly 1 date/datetime column; got {self.first!r}, {self.second!r} and perhaps more"
73
+ return f"{repr_(self.schema)} must contain exactly 1 date/datetime column; got {self.first!r}, {self.second!r} and perhaps more"
76
74
 
77
75
 
78
76
  ##
utilities/logging.py CHANGED
@@ -35,24 +35,23 @@ from typing import (
35
35
 
36
36
  from whenever import ZonedDateTime
37
37
 
38
- from utilities.atomicwrites import move_many
39
- from utilities.dataclasses import replace_non_sentinel
40
- from utilities.errors import ImpossibleCaseError
41
- from utilities.iterables import OneEmptyError, always_iterable, one
42
- from utilities.pathlib import ensure_suffix, to_path
43
- from utilities.re import (
38
+ from utilities.constants import SECOND, Sentinel, sentinel
39
+ from utilities.core import (
44
40
  ExtractGroupError,
45
41
  ExtractGroupsError,
42
+ OneEmptyError,
43
+ always_iterable,
46
44
  extract_group,
47
45
  extract_groups,
48
- )
49
- from utilities.sentinel import Sentinel, sentinel
50
- from utilities.whenever import (
51
- WheneverLogRecord,
52
- format_compact,
53
46
  get_now_local,
54
- to_zoned_date_time,
47
+ move_many,
48
+ one,
49
+ replace_non_sentinel,
55
50
  )
51
+ from utilities.errors import ImpossibleCaseError
52
+ from utilities.functions import in_seconds
53
+ from utilities.pathlib import ensure_suffix, to_path
54
+ from utilities.whenever import WheneverLogRecord, format_compact, to_zoned_date_time
56
55
 
57
56
  if TYPE_CHECKING:
58
57
  from collections.abc import Callable, Iterable, Mapping, MutableMapping
@@ -60,6 +59,7 @@ if TYPE_CHECKING:
60
59
  from logging import _FilterType
61
60
 
62
61
  from utilities.types import (
62
+ Duration,
63
63
  LoggerLike,
64
64
  LogLevel,
65
65
  MaybeCallablePathLike,
@@ -356,7 +356,7 @@ class SizeAndTimeRotatingFileHandler(BaseRotatingHandler):
356
356
  errors: Literal["strict", "ignore", "replace"] | None = None,
357
357
  maxBytes: int = _DEFAULT_MAX_BYTES,
358
358
  when: _When = _DEFAULT_WHEN,
359
- interval: int = 1,
359
+ interval: Duration = SECOND,
360
360
  backupCount: int = _DEFAULT_BACKUP_COUNT,
361
361
  utc: bool = False,
362
362
  atTime: time | None = None,
@@ -374,7 +374,7 @@ class SizeAndTimeRotatingFileHandler(BaseRotatingHandler):
374
374
  self._time_handler = TimedRotatingFileHandler(
375
375
  path,
376
376
  when=when,
377
- interval=interval,
377
+ interval=cast("Any", in_seconds(interval)), # float is OK
378
378
  backupCount=backupCount,
379
379
  encoding=encoding,
380
380
  delay=delay,
@@ -492,7 +492,9 @@ class _RolloverActions:
492
492
  def do(self) -> None:
493
493
  for deletion in self.deletions:
494
494
  deletion.delete()
495
- move_many(*((r.file.path, r.destination) for r in self.rotations))
495
+ move_many(
496
+ *((r.file.path, r.destination) for r in self.rotations), overwrite=True
497
+ )
496
498
 
497
499
 
498
500
  @dataclass(order=True, unsafe_hash=True, kw_only=True)
utilities/math.py CHANGED
@@ -7,28 +7,13 @@ from math import ceil, exp, floor, isclose, isfinite, isinf, isnan, log, log10,
7
7
  from re import Match, search
8
8
  from typing import TYPE_CHECKING, Literal, assert_never, overload, override
9
9
 
10
+ from utilities.core import ExtractGroupsError, extract_groups
10
11
  from utilities.errors import ImpossibleCaseError
11
- from utilities.re import ExtractGroupsError, extract_groups
12
12
 
13
13
  if TYPE_CHECKING:
14
14
  from utilities.types import MathRoundMode, Number, Sign
15
15
 
16
16
 
17
- MIN_FLOAT32, MAX_FLOAT32 = -3.4028234663852886e38, 3.4028234663852886e38
18
- MIN_FLOAT64, MAX_FLOAT64 = -1.7976931348623157e308, 1.7976931348623157e308
19
- MIN_INT8, MAX_INT8 = -(2 ** (8 - 1)), 2 ** (8 - 1) - 1
20
- MIN_INT16, MAX_INT16 = -(2 ** (16 - 1)), 2 ** (16 - 1) - 1
21
- MIN_INT32, MAX_INT32 = -(2 ** (32 - 1)), 2 ** (32 - 1) - 1
22
- MIN_INT64, MAX_INT64 = -(2 ** (64 - 1)), 2 ** (64 - 1) - 1
23
- MIN_UINT8, MAX_UINT8 = 0, 2**8 - 1
24
- MIN_UINT16, MAX_UINT16 = 0, 2**16 - 1
25
- MIN_UINT32, MAX_UINT32 = 0, 2**32 - 1
26
- MIN_UINT64, MAX_UINT64 = 0, 2**64 - 1
27
-
28
-
29
- ##
30
-
31
-
32
17
  def check_integer(
33
18
  n: int,
34
19
  /,
@@ -893,26 +878,6 @@ def significant_figures(x: float, /, *, n: int = 2) -> str:
893
878
 
894
879
  __all__ = [
895
880
  "MAX_DECIMALS",
896
- "MAX_FLOAT32",
897
- "MAX_FLOAT64",
898
- "MAX_INT8",
899
- "MAX_INT16",
900
- "MAX_INT32",
901
- "MAX_INT64",
902
- "MAX_UINT8",
903
- "MAX_UINT16",
904
- "MAX_UINT32",
905
- "MAX_UINT64",
906
- "MIN_FLOAT32",
907
- "MIN_FLOAT64",
908
- "MIN_INT8",
909
- "MIN_INT16",
910
- "MIN_INT32",
911
- "MIN_INT64",
912
- "MIN_UINT8",
913
- "MIN_UINT16",
914
- "MIN_UINT32",
915
- "MIN_UINT64",
916
881
  "CheckIntegerError",
917
882
  "EWMParametersError",
918
883
  "ParseNumberError",
@@ -18,10 +18,8 @@ from typing import (
18
18
  from more_itertools import bucket, partition, split_into
19
19
  from more_itertools import peekable as _peekable
20
20
 
21
- from utilities.functions import get_class_name
22
- from utilities.iterables import OneNonUniqueError, one
23
- from utilities.reprlib import get_repr
24
- from utilities.sentinel import Sentinel, is_sentinel, sentinel
21
+ from utilities.constants import Sentinel, sentinel
22
+ from utilities.core import OneNonUniqueError, get_class_name, is_sentinel, one, repr_
25
23
 
26
24
  if TYPE_CHECKING:
27
25
  from collections.abc import Iterable, Iterator, Mapping, Sequence
@@ -232,7 +230,7 @@ class BucketMappingError[K: Hashable, V](Exception):
232
230
  @override
233
231
  def __str__(self) -> str:
234
232
  parts = [
235
- f"{get_repr(key)} (#1: {get_repr(first)}, #2: {get_repr(second)})"
233
+ f"{repr_(key)} (#1: {repr_(first)}, #2: {repr_(second)})"
236
234
  for key, (first, second) in self.errors.items()
237
235
  ]
238
236
  desc = ", ".join(parts)
@@ -316,7 +314,7 @@ class Split[T]:
316
314
  tail_first = indent("tail=", spaces)
317
315
  tail_rest = indent(repr(self.tail), 2 * spaces)
318
316
  joined = f"{head_first}\n{head_rest}\n{tail_first}\n{tail_rest}"
319
- return f"{cls}(\n{joined}\n)"
317
+ return f"{cls}(\n{joined}\n)\n"
320
318
 
321
319
 
322
320
  def yield_splits[T](
utilities/numpy.py CHANGED
@@ -39,7 +39,8 @@ from numpy.linalg import det, eig
39
39
  from numpy.random import default_rng
40
40
  from numpy.typing import NDArray
41
41
 
42
- from utilities.iterables import always_iterable, is_iterable_not_str
42
+ from utilities.core import always_iterable
43
+ from utilities.iterables import is_iterable_not_str
43
44
 
44
45
  if TYPE_CHECKING:
45
46
  from collections.abc import Callable, Iterable
utilities/operator.py CHANGED
@@ -6,8 +6,8 @@ from dataclasses import asdict, dataclass
6
6
  from typing import TYPE_CHECKING, Any, cast, override
7
7
 
8
8
  import utilities.math
9
+ from utilities.core import repr_
9
10
  from utilities.iterables import SortIterableError, sort_iterable
10
- from utilities.reprlib import get_repr
11
11
  from utilities.typing import is_dataclass_instance
12
12
 
13
13
  if TYPE_CHECKING:
@@ -109,7 +109,7 @@ class IsEqualError(Exception):
109
109
 
110
110
  @override
111
111
  def __str__(self) -> str:
112
- return f"Unable to sort {get_repr(self.x)} and {get_repr(self.y)}" # pragma: no cover
112
+ return f"Unable to sort {repr_(self.x)} and {repr_(self.y)}" # pragma: no cover
113
113
 
114
114
 
115
115
  __all__ = ["IsEqualError", "is_equal"]
utilities/orjson.py CHANGED
@@ -37,23 +37,15 @@ from whenever import (
37
37
  )
38
38
 
39
39
  from utilities.concurrent import concurrent_map
40
+ from utilities.constants import LOCAL_TIME_ZONE, MAX_INT64, MIN_INT64
41
+ from utilities.core import OneEmptyError, always_iterable, one, read_bytes, write_bytes
40
42
  from utilities.dataclasses import dataclass_to_dict
41
43
  from utilities.functions import ensure_class
42
- from utilities.gzip import read_binary
43
- from utilities.iterables import (
44
- OneEmptyError,
45
- always_iterable,
46
- merge_sets,
47
- one,
48
- one_unique,
49
- )
50
- from utilities.json import write_formatted_json
44
+ from utilities.iterables import merge_sets
51
45
  from utilities.logging import get_logging_level_number
52
- from utilities.math import MAX_INT64, MIN_INT64
53
46
  from utilities.types import Dataclass, LogLevel, MaybeIterable, PathLike, StrMapping
54
- from utilities.typing import is_string_mapping
55
- from utilities.tzlocal import LOCAL_TIME_ZONE
56
- from utilities.version import Version, parse_version
47
+ from utilities.typing import is_str_mapping
48
+ from utilities.version import Version2, Version3
57
49
  from utilities.whenever import (
58
50
  DatePeriod,
59
51
  TimePeriod,
@@ -99,7 +91,8 @@ class _Prefixes(StrEnum):
99
91
  tuple_ = "tu"
100
92
  unserializable = "un"
101
93
  uuid = "uu"
102
- version = "v"
94
+ version2 = "v2"
95
+ version3 = "v3"
103
96
  year_month = "ym"
104
97
  zoned_date_time = "zd"
105
98
  zoned_date_time_period = "zp"
@@ -213,8 +206,10 @@ def _pre_process(
213
206
  return f"[{_Prefixes.exception_class.value}|{error_cls.__qualname__}]"
214
207
  case UUID() as uuid:
215
208
  return f"[{_Prefixes.uuid.value}]{uuid}"
216
- case Version() as version:
217
- return f"[{_Prefixes.version.value}]{version}"
209
+ case Version2() as version:
210
+ return f"[{_Prefixes.version2.value}]{version}"
211
+ case Version3() as version:
212
+ return f"[{_Prefixes.version3.value}]{version}"
218
213
  case YearMonth() as year_month:
219
214
  return f"[{_Prefixes.year_month.value}]{year_month}"
220
215
  case ZonedDateTime() as date_time:
@@ -409,7 +404,8 @@ class DeerializeError(Exception):
409
404
  _TIME_DELTA_PATTERN,
410
405
  _TIME_PERIOD_PATTERN,
411
406
  _UUID_PATTERN,
412
- _VERSION_PATTERN,
407
+ _VERSION2_PATTERN,
408
+ _VERSION3_PATTERN,
413
409
  _YEAR_MONTH_PATTERN,
414
410
  _ZONED_DATE_TIME_PATTERN,
415
411
  _ZONED_DATE_TIME_PERIOD_PATTERN,
@@ -433,7 +429,8 @@ class DeerializeError(Exception):
433
429
  _Prefixes.time_delta,
434
430
  _Prefixes.time_period,
435
431
  _Prefixes.uuid,
436
- _Prefixes.version,
432
+ _Prefixes.version2,
433
+ _Prefixes.version3,
437
434
  _Prefixes.year_month,
438
435
  _Prefixes.zoned_date_time,
439
436
  _Prefixes.zoned_date_time_period,
@@ -514,8 +511,10 @@ def _object_hook(
514
511
  return TimePeriod(start, end)
515
512
  if match := _UUID_PATTERN.search(text):
516
513
  return UUID(match.group(1))
517
- if match := _VERSION_PATTERN.search(text):
518
- return parse_version(match.group(1))
514
+ if match := _VERSION2_PATTERN.search(text):
515
+ return Version2.parse(match.group(1))
516
+ if match := _VERSION3_PATTERN.search(text):
517
+ return Version3.parse(match.group(1))
519
518
  if match := _YEAR_MONTH_PATTERN.search(text):
520
519
  return YearMonth.parse_iso(match.group(1))
521
520
  if match := _ZONED_DATE_TIME_PATTERN.search(text):
@@ -566,7 +565,7 @@ def _object_hook(
566
565
  ) is not None:
567
566
  return container
568
567
  if (
569
- is_string_mapping(value)
568
+ is_str_mapping(value)
570
569
  and (
571
570
  dataclass := _object_hook_dataclass(
572
571
  key,
@@ -587,7 +586,7 @@ def _object_hook(
587
586
  ) is not None:
588
587
  return enum
589
588
  if (
590
- is_string_mapping(value)
589
+ is_str_mapping(value)
591
590
  and (
592
591
  exc_instance := _object_hook_exception_instance(
593
592
  key, value, data=data, objects=objects, redirects=redirects
@@ -953,7 +952,7 @@ class GetLogRecordsOutput:
953
952
  for r in self.records
954
953
  ]
955
954
  if len(records) >= 1:
956
- time_zone = one_unique(ZoneInfo(r.datetime.tz) for r in records)
955
+ time_zone = one({ZoneInfo(r.datetime.tz) for r in records})
957
956
  else:
958
957
  time_zone = LOCAL_TIME_ZONE
959
958
  return DataFrame(
@@ -1263,7 +1262,7 @@ def read_object(
1263
1262
  redirects: Mapping[str, type[Any]] | None = None,
1264
1263
  ) -> Any:
1265
1264
  """Read an object from disk."""
1266
- data = read_binary(path, decompress=decompress)
1265
+ data = read_bytes(path, decompress=decompress)
1267
1266
  return deserialize(
1268
1267
  data, dataclass_hook=dataclass_hook, objects=objects, redirects=redirects
1269
1268
  )
@@ -1293,7 +1292,7 @@ def write_object(
1293
1292
  dataclass_hook=dataclass_hook,
1294
1293
  dataclass_defaults=dataclass_defaults,
1295
1294
  )
1296
- write_formatted_json(data, path, compress=compress, overwrite=overwrite)
1295
+ write_bytes(path, data, compress=compress, overwrite=overwrite, json=True)
1297
1296
 
1298
1297
 
1299
1298
  __all__ = [
utilities/os.py CHANGED
@@ -1,43 +1,12 @@
1
1
  from __future__ import annotations
2
2
 
3
- from contextlib import suppress
4
3
  from dataclasses import dataclass
5
- from os import cpu_count, environ, getenv
6
- from typing import TYPE_CHECKING, Literal, assert_never, overload, override
4
+ from typing import TYPE_CHECKING, assert_never, override
7
5
 
8
- from utilities.contextlib import enhanced_context_manager
9
- from utilities.iterables import OneStrEmptyError, one_str
10
- from utilities.platform import SYSTEM
6
+ from utilities.constants import CPU_COUNT
11
7
 
12
8
  if TYPE_CHECKING:
13
- from collections.abc import Iterator, Mapping
14
-
15
-
16
- type IntOrAll = int | Literal["all"]
17
-
18
-
19
- ##
20
-
21
-
22
- def get_cpu_count() -> int:
23
- """Get the CPU count."""
24
- count = cpu_count()
25
- if count is None: # pragma: no cover
26
- raise GetCPUCountError
27
- return count
28
-
29
-
30
- @dataclass(kw_only=True, slots=True)
31
- class GetCPUCountError(Exception):
32
- @override
33
- def __str__(self) -> str:
34
- return "CPU count must not be None" # pragma: no cover
35
-
36
-
37
- CPU_COUNT = get_cpu_count()
38
-
39
-
40
- ##
9
+ from utilities.types import IntOrAll
41
10
 
42
11
 
43
12
  def get_cpu_use(*, n: IntOrAll = "all") -> int:
@@ -62,154 +31,4 @@ class GetCPUUseError(Exception):
62
31
  return f"Invalid number of CPUs to use: {self.n}"
63
32
 
64
33
 
65
- ##
66
-
67
-
68
- @overload
69
- def get_env_var(
70
- key: str, /, *, case_sensitive: bool = False, default: str, nullable: bool = False
71
- ) -> str: ...
72
- @overload
73
- def get_env_var(
74
- key: str,
75
- /,
76
- *,
77
- case_sensitive: bool = False,
78
- default: None = None,
79
- nullable: Literal[False] = False,
80
- ) -> str: ...
81
- @overload
82
- def get_env_var(
83
- key: str,
84
- /,
85
- *,
86
- case_sensitive: bool = False,
87
- default: str | None = None,
88
- nullable: bool = False,
89
- ) -> str | None: ...
90
- def get_env_var(
91
- key: str,
92
- /,
93
- *,
94
- case_sensitive: bool = False,
95
- default: str | None = None,
96
- nullable: bool = False,
97
- ) -> str | None:
98
- """Get an environment variable."""
99
- try:
100
- key_use = one_str(environ, key, case_sensitive=case_sensitive)
101
- except OneStrEmptyError:
102
- match default, nullable:
103
- case None, False:
104
- raise GetEnvVarError(key=key, case_sensitive=case_sensitive) from None
105
- case None, True:
106
- return None
107
- case str(), _:
108
- return default
109
- case never:
110
- assert_never(never)
111
- return environ[key_use]
112
-
113
-
114
- @dataclass(kw_only=True, slots=True)
115
- class GetEnvVarError(Exception):
116
- key: str
117
- case_sensitive: bool = False
118
-
119
- @override
120
- def __str__(self) -> str:
121
- desc = f"No environment variable {self.key!r}"
122
- return desc if self.case_sensitive else f"{desc} (modulo case)"
123
-
124
-
125
- ##
126
-
127
-
128
- def get_effective_group_id() -> int | None:
129
- """Get the effective group ID."""
130
- match SYSTEM:
131
- case "windows": # skipif-not-windows
132
- return None
133
- case "mac" | "linux": # skipif-windows
134
- from os import getegid
135
-
136
- return getegid()
137
- case never:
138
- assert_never(never)
139
-
140
-
141
- def get_effective_user_id() -> int | None:
142
- """Get the effective user ID."""
143
- match SYSTEM:
144
- case "windows": # skipif-not-windows
145
- return None
146
- case "mac" | "linux": # skipif-windows
147
- from os import geteuid
148
-
149
- return geteuid()
150
- case never:
151
- assert_never(never)
152
-
153
-
154
- EFFECTIVE_USER_ID = get_effective_user_id()
155
- EFFECTIVE_GROUP_ID = get_effective_group_id()
156
-
157
-
158
- ##
159
-
160
-
161
- def is_debug() -> bool:
162
- """Check if we are in `DEBUG` mode."""
163
- return get_env_var("DEBUG", nullable=True) is not None
164
-
165
-
166
- ##
167
-
168
-
169
- def is_pytest() -> bool:
170
- """Check if `pytest` is running."""
171
- return get_env_var("PYTEST_VERSION", nullable=True) is not None
172
-
173
-
174
- ##
175
-
176
-
177
- @enhanced_context_manager
178
- def temp_environ(
179
- env: Mapping[str, str | None] | None = None, **env_kwargs: str | None
180
- ) -> Iterator[None]:
181
- """Context manager with temporary environment variable set."""
182
- mapping: dict[str, str | None] = ({} if env is None else dict(env)) | env_kwargs
183
- prev = {key: getenv(key) for key in mapping}
184
-
185
- def apply(mapping: Mapping[str, str | None], /) -> None:
186
- for key, value in mapping.items():
187
- if value is None:
188
- with suppress(KeyError):
189
- del environ[key]
190
- else:
191
- environ[key] = value
192
-
193
- apply(mapping)
194
- try:
195
- yield
196
- finally:
197
- apply(prev)
198
-
199
-
200
- __all__ = [
201
- "CPU_COUNT",
202
- "EFFECTIVE_GROUP_ID",
203
- "EFFECTIVE_USER_ID",
204
- "GetCPUCountError",
205
- "GetCPUUseError",
206
- "IntOrAll",
207
- "get_cpu_count",
208
- "get_cpu_use",
209
- "get_effective_group_id",
210
- "get_effective_user_id",
211
- "get_env_var",
212
- "is_debug",
213
- "is_pytest",
214
- "temp_environ",
215
- ]
34
+ __all__ = ["GetCPUUseError", "get_cpu_use"]