dycw-utilities 0.166.30__py3-none-any.whl → 0.185.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. dycw_utilities-0.185.8.dist-info/METADATA +33 -0
  2. dycw_utilities-0.185.8.dist-info/RECORD +90 -0
  3. {dycw_utilities-0.166.30.dist-info → dycw_utilities-0.185.8.dist-info}/WHEEL +1 -1
  4. {dycw_utilities-0.166.30.dist-info → dycw_utilities-0.185.8.dist-info}/entry_points.txt +1 -0
  5. utilities/__init__.py +1 -1
  6. utilities/altair.py +17 -10
  7. utilities/asyncio.py +50 -72
  8. utilities/atools.py +9 -11
  9. utilities/cachetools.py +16 -11
  10. utilities/click.py +76 -19
  11. utilities/concurrent.py +1 -1
  12. utilities/constants.py +492 -0
  13. utilities/contextlib.py +23 -30
  14. utilities/contextvars.py +1 -23
  15. utilities/core.py +2581 -0
  16. utilities/dataclasses.py +16 -119
  17. utilities/docker.py +387 -0
  18. utilities/enum.py +1 -1
  19. utilities/errors.py +2 -16
  20. utilities/fastapi.py +5 -5
  21. utilities/fpdf2.py +2 -1
  22. utilities/functions.py +34 -265
  23. utilities/http.py +2 -3
  24. utilities/hypothesis.py +84 -29
  25. utilities/importlib.py +17 -1
  26. utilities/iterables.py +39 -575
  27. utilities/jinja2.py +145 -0
  28. utilities/jupyter.py +5 -3
  29. utilities/libcst.py +1 -1
  30. utilities/lightweight_charts.py +4 -6
  31. utilities/logging.py +24 -24
  32. utilities/math.py +1 -36
  33. utilities/more_itertools.py +4 -6
  34. utilities/numpy.py +2 -1
  35. utilities/operator.py +2 -2
  36. utilities/orjson.py +42 -43
  37. utilities/os.py +4 -147
  38. utilities/packaging.py +129 -0
  39. utilities/parse.py +35 -15
  40. utilities/pathlib.py +3 -120
  41. utilities/platform.py +8 -90
  42. utilities/polars.py +38 -32
  43. utilities/postgres.py +37 -33
  44. utilities/pottery.py +20 -18
  45. utilities/pqdm.py +3 -4
  46. utilities/psutil.py +2 -3
  47. utilities/pydantic.py +25 -0
  48. utilities/pydantic_settings.py +87 -16
  49. utilities/pydantic_settings_sops.py +16 -3
  50. utilities/pyinstrument.py +4 -4
  51. utilities/pytest.py +96 -125
  52. utilities/pytest_plugins/pytest_regressions.py +2 -2
  53. utilities/pytest_regressions.py +32 -11
  54. utilities/random.py +2 -8
  55. utilities/redis.py +98 -94
  56. utilities/reprlib.py +11 -118
  57. utilities/shellingham.py +66 -0
  58. utilities/shutil.py +25 -0
  59. utilities/slack_sdk.py +13 -12
  60. utilities/sqlalchemy.py +57 -30
  61. utilities/sqlalchemy_polars.py +16 -25
  62. utilities/subprocess.py +2590 -0
  63. utilities/tabulate.py +32 -0
  64. utilities/testbook.py +8 -8
  65. utilities/text.py +24 -99
  66. utilities/throttle.py +159 -0
  67. utilities/time.py +18 -0
  68. utilities/timer.py +31 -14
  69. utilities/traceback.py +16 -23
  70. utilities/types.py +42 -2
  71. utilities/typing.py +26 -14
  72. utilities/uuid.py +1 -1
  73. utilities/version.py +202 -45
  74. utilities/whenever.py +53 -150
  75. dycw_utilities-0.166.30.dist-info/METADATA +0 -41
  76. dycw_utilities-0.166.30.dist-info/RECORD +0 -98
  77. dycw_utilities-0.166.30.dist-info/licenses/LICENSE +0 -21
  78. utilities/aeventkit.py +0 -388
  79. utilities/atomicwrites.py +0 -182
  80. utilities/cryptography.py +0 -41
  81. utilities/getpass.py +0 -8
  82. utilities/git.py +0 -19
  83. utilities/gzip.py +0 -31
  84. utilities/json.py +0 -70
  85. utilities/pickle.py +0 -25
  86. utilities/re.py +0 -156
  87. utilities/sentinel.py +0 -73
  88. utilities/socket.py +0 -8
  89. utilities/string.py +0 -20
  90. utilities/tempfile.py +0 -77
  91. utilities/typed_settings.py +0 -152
  92. utilities/tzdata.py +0 -11
  93. utilities/tzlocal.py +0 -28
  94. utilities/warnings.py +0 -65
  95. utilities/zipfile.py +0 -25
  96. utilities/zoneinfo.py +0 -133
utilities/jinja2.py ADDED
@@ -0,0 +1,145 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from typing import TYPE_CHECKING, Any, Literal, assert_never, override
5
+
6
+ from jinja2 import BaseLoader, BytecodeCache, Environment, FileSystemLoader, Undefined
7
+ from jinja2.defaults import (
8
+ BLOCK_END_STRING,
9
+ BLOCK_START_STRING,
10
+ COMMENT_END_STRING,
11
+ COMMENT_START_STRING,
12
+ LINE_COMMENT_PREFIX,
13
+ LINE_STATEMENT_PREFIX,
14
+ LSTRIP_BLOCKS,
15
+ NEWLINE_SEQUENCE,
16
+ TRIM_BLOCKS,
17
+ VARIABLE_END_STRING,
18
+ VARIABLE_START_STRING,
19
+ )
20
+
21
+ from utilities.core import kebab_case, pascal_case, snake_case, write_text
22
+
23
+ if TYPE_CHECKING:
24
+ from collections.abc import Callable, Sequence
25
+ from pathlib import Path
26
+
27
+ from jinja2.ext import Extension
28
+
29
+ from utilities.types import StrMapping
30
+
31
+
32
+ class EnhancedEnvironment(Environment):
33
+ """Environment with enhanced features."""
34
+
35
+ @override
36
+ def __init__(
37
+ self,
38
+ block_start_string: str = BLOCK_START_STRING,
39
+ block_end_string: str = BLOCK_END_STRING,
40
+ variable_start_string: str = VARIABLE_START_STRING,
41
+ variable_end_string: str = VARIABLE_END_STRING,
42
+ comment_start_string: str = COMMENT_START_STRING,
43
+ comment_end_string: str = COMMENT_END_STRING,
44
+ line_statement_prefix: str | None = LINE_STATEMENT_PREFIX,
45
+ line_comment_prefix: str | None = LINE_COMMENT_PREFIX,
46
+ trim_blocks: bool = TRIM_BLOCKS,
47
+ lstrip_blocks: bool = LSTRIP_BLOCKS,
48
+ newline_sequence: Literal["\n", "\r\n", "\r"] = NEWLINE_SEQUENCE,
49
+ keep_trailing_newline: bool = True,
50
+ extensions: Sequence[str | type[Extension]] = (),
51
+ optimized: bool = True,
52
+ undefined: type[Undefined] = Undefined,
53
+ finalize: Callable[..., Any] | None = None,
54
+ autoescape: bool | Callable[[str | None], bool] = False,
55
+ loader: BaseLoader | None = None,
56
+ cache_size: int = 400,
57
+ auto_reload: bool = True,
58
+ bytecode_cache: BytecodeCache | None = None,
59
+ enable_async: bool = False,
60
+ ) -> None:
61
+ super().__init__(
62
+ block_start_string,
63
+ block_end_string,
64
+ variable_start_string,
65
+ variable_end_string,
66
+ comment_start_string,
67
+ comment_end_string,
68
+ line_statement_prefix,
69
+ line_comment_prefix,
70
+ trim_blocks,
71
+ lstrip_blocks,
72
+ newline_sequence,
73
+ keep_trailing_newline,
74
+ extensions,
75
+ optimized,
76
+ undefined,
77
+ finalize,
78
+ autoescape,
79
+ loader,
80
+ cache_size,
81
+ auto_reload,
82
+ bytecode_cache,
83
+ enable_async,
84
+ )
85
+ self.filters["kebab"] = kebab_case
86
+ self.filters["pascal"] = pascal_case
87
+ self.filters["snake"] = snake_case
88
+
89
+
90
+ @dataclass(order=True, unsafe_hash=True, kw_only=True, slots=True)
91
+ class TemplateJob:
92
+ """A template with an associated rendering job."""
93
+
94
+ template: Path
95
+ kwargs: StrMapping
96
+ target: Path
97
+ mode: Literal["write", "append"] = "write"
98
+
99
+ def __post_init__(self) -> None:
100
+ if not self.template.exists():
101
+ raise _TemplateJobTemplateDoesNotExistError(path=self.template)
102
+ if (self.mode == "append") and not self.target.exists():
103
+ raise _TemplateJobTargetDoesNotExistError(path=self.template)
104
+
105
+ def run(self) -> None:
106
+ """Run the job."""
107
+ match self.mode:
108
+ case "write":
109
+ write_text(self.target, self.rendered, overwrite=True)
110
+ case "append":
111
+ with self.target.open(mode="a") as fh:
112
+ _ = fh.write(self.rendered)
113
+ case never:
114
+ assert_never(never)
115
+
116
+ @property
117
+ def rendered(self) -> str:
118
+ """The template, rendered."""
119
+ env = EnhancedEnvironment(loader=FileSystemLoader(self.template.parent))
120
+ return env.get_template(self.template.name).render(self.kwargs)
121
+
122
+
123
+ @dataclass(kw_only=True, slots=True)
124
+ class TemplateJobError(Exception): ...
125
+
126
+
127
+ @dataclass(kw_only=True, slots=True)
128
+ class _TemplateJobTemplateDoesNotExistError(TemplateJobError):
129
+ path: Path
130
+
131
+ @override
132
+ def __str__(self) -> str:
133
+ return f"Template {str(self.path)!r} does not exist"
134
+
135
+
136
+ @dataclass(kw_only=True, slots=True)
137
+ class _TemplateJobTargetDoesNotExistError(TemplateJobError):
138
+ path: Path
139
+
140
+ @override
141
+ def __str__(self) -> str:
142
+ return f"Target {str(self.path)!r} does not exist"
143
+
144
+
145
+ __all__ = ["EnhancedEnvironment", "TemplateJob", "TemplateJobError"]
utilities/jupyter.py CHANGED
@@ -3,13 +3,15 @@ from __future__ import annotations
3
3
  from contextlib import ExitStack
4
4
  from dataclasses import dataclass, field, replace
5
5
  from itertools import chain
6
- from typing import TYPE_CHECKING, Any, Self
6
+ from typing import TYPE_CHECKING, Self
7
7
 
8
8
  from utilities.ipython import check_ipython_class
9
9
 
10
10
  if TYPE_CHECKING:
11
11
  from types import TracebackType
12
12
 
13
+ from utilities.types import StrDict
14
+
13
15
 
14
16
  def is_jupyter() -> bool:
15
17
  """Check if `jupyter` is running."""
@@ -55,7 +57,7 @@ class _Show:
55
57
  except ModuleNotFoundError: # pragma: no cover
56
58
  pass
57
59
  else:
58
- kwargs: dict[str, Any] = {}
60
+ kwargs: StrDict = {}
59
61
  if self.dp is not None:
60
62
  kwargs["display.precision"] = self.dp
61
63
  if self.rows is not None:
@@ -72,7 +74,7 @@ class _Show:
72
74
  except ModuleNotFoundError: # pragma: no cover
73
75
  pass
74
76
  else:
75
- kwargs: dict[str, Any] = {}
77
+ kwargs: StrDict = {}
76
78
  if self.dp is not None:
77
79
  kwargs["float_precision"] = self.dp
78
80
  if self.rows is not None:
utilities/libcst.py CHANGED
@@ -72,7 +72,7 @@ class GenerateImportFromError(Exception):
72
72
  ##
73
73
 
74
74
 
75
- @dataclass(kw_only=True, slots=True)
75
+ @dataclass(order=True, unsafe_hash=True, kw_only=True, slots=True)
76
76
  class _ParseImportOutput:
77
77
  module: str
78
78
  name: str | None = None
@@ -3,10 +3,8 @@ from __future__ import annotations
3
3
  from dataclasses import dataclass
4
4
  from typing import TYPE_CHECKING, override
5
5
 
6
- from utilities.atomicwrites import writer # pragma: no cover
7
6
  from utilities.contextlib import enhanced_async_context_manager
8
- from utilities.iterables import OneEmptyError, OneNonUniqueError, one
9
- from utilities.reprlib import get_repr
7
+ from utilities.core import OneEmptyError, OneNonUniqueError, one, repr_, write_bytes
10
8
 
11
9
  if TYPE_CHECKING:
12
10
  from collections.abc import AsyncIterator
@@ -25,8 +23,8 @@ if TYPE_CHECKING:
25
23
  def save_chart(chart: Chart, path: PathLike, /, *, overwrite: bool = False) -> None:
26
24
  """Atomically save a chart to disk."""
27
25
  chart.show(block=False) # pragma: no cover
28
- with writer(path, overwrite=overwrite) as temp: # pragma: no cover
29
- _ = temp.write_bytes(chart.screenshot())
26
+ data = chart.screenshot() # pragma: no cover
27
+ write_bytes(path, data, overwrite=overwrite) # pragma: no cover
30
28
  chart.exit() # pragma: no cover
31
29
 
32
30
 
@@ -72,7 +70,7 @@ class _SetDataFrameNonUniqueError(SetDataFrameError):
72
70
 
73
71
  @override
74
72
  def __str__(self) -> str:
75
- return f"{get_repr(self.schema)} must contain exactly 1 date/datetime column; got {self.first!r}, {self.second!r} and perhaps more"
73
+ return f"{repr_(self.schema)} must contain exactly 1 date/datetime column; got {self.first!r}, {self.second!r} and perhaps more"
76
74
 
77
75
 
78
76
  ##
utilities/logging.py CHANGED
@@ -35,24 +35,23 @@ from typing import (
35
35
 
36
36
  from whenever import ZonedDateTime
37
37
 
38
- from utilities.atomicwrites import move_many
39
- from utilities.dataclasses import replace_non_sentinel
40
- from utilities.errors import ImpossibleCaseError
41
- from utilities.iterables import OneEmptyError, always_iterable, one
42
- from utilities.pathlib import ensure_suffix, to_path
43
- from utilities.re import (
38
+ from utilities.constants import SECOND, Sentinel, sentinel
39
+ from utilities.core import (
44
40
  ExtractGroupError,
45
41
  ExtractGroupsError,
42
+ OneEmptyError,
43
+ always_iterable,
46
44
  extract_group,
47
45
  extract_groups,
48
- )
49
- from utilities.sentinel import Sentinel, sentinel
50
- from utilities.whenever import (
51
- WheneverLogRecord,
52
- format_compact,
53
46
  get_now_local,
54
- to_zoned_date_time,
47
+ move_many,
48
+ one,
49
+ replace_non_sentinel,
55
50
  )
51
+ from utilities.errors import ImpossibleCaseError
52
+ from utilities.functions import in_seconds
53
+ from utilities.pathlib import ensure_suffix, to_path
54
+ from utilities.whenever import WheneverLogRecord, format_compact, to_zoned_date_time
56
55
 
57
56
  if TYPE_CHECKING:
58
57
  from collections.abc import Callable, Iterable, Mapping, MutableMapping
@@ -60,6 +59,7 @@ if TYPE_CHECKING:
60
59
  from logging import _FilterType
61
60
 
62
61
  from utilities.types import (
62
+ Duration,
63
63
  LoggerLike,
64
64
  LogLevel,
65
65
  MaybeCallablePathLike,
@@ -356,7 +356,7 @@ class SizeAndTimeRotatingFileHandler(BaseRotatingHandler):
356
356
  errors: Literal["strict", "ignore", "replace"] | None = None,
357
357
  maxBytes: int = _DEFAULT_MAX_BYTES,
358
358
  when: _When = _DEFAULT_WHEN,
359
- interval: int = 1,
359
+ interval: Duration = SECOND,
360
360
  backupCount: int = _DEFAULT_BACKUP_COUNT,
361
361
  utc: bool = False,
362
362
  atTime: time | None = None,
@@ -374,7 +374,7 @@ class SizeAndTimeRotatingFileHandler(BaseRotatingHandler):
374
374
  self._time_handler = TimedRotatingFileHandler(
375
375
  path,
376
376
  when=when,
377
- interval=interval,
377
+ interval=cast("Any", in_seconds(interval)), # float is OK
378
378
  backupCount=backupCount,
379
379
  encoding=encoding,
380
380
  delay=delay,
@@ -387,9 +387,7 @@ class SizeAndTimeRotatingFileHandler(BaseRotatingHandler):
387
387
  def emit(self, record: LogRecord) -> None:
388
388
  try:
389
389
  if (self._backup_count is not None) and self._should_rollover(record):
390
- self._do_rollover( # skipif-ci-and-windows
391
- backup_count=self._backup_count
392
- )
390
+ self._do_rollover(backup_count=self._backup_count)
393
391
  FileHandler.emit(self, record)
394
392
  except Exception: # noqa: BLE001 # pragma: no cover
395
393
  self.handleError(record)
@@ -399,23 +397,23 @@ class SizeAndTimeRotatingFileHandler(BaseRotatingHandler):
399
397
  self.stream.close()
400
398
  self.stream = None
401
399
 
402
- actions = _compute_rollover_actions( # skipif-ci-and-windows
400
+ actions = _compute_rollover_actions(
403
401
  self._directory,
404
402
  self._stem,
405
403
  self._suffix,
406
404
  patterns=self._patterns,
407
405
  backup_count=backup_count,
408
406
  )
409
- actions.do() # skipif-ci-and-windows
407
+ actions.do()
410
408
 
411
409
  if not self.delay: # pragma: no cover
412
410
  self.stream = self._open()
413
- self._time_handler.rolloverAt = ( # skipif-ci-and-windows
414
- self._time_handler.computeRollover(get_now_local().timestamp())
411
+ self._time_handler.rolloverAt = self._time_handler.computeRollover(
412
+ get_now_local().timestamp()
415
413
  )
416
414
 
417
415
  def _should_rollover(self, record: LogRecord, /) -> bool:
418
- if self._max_bytes is not None: # skipif-ci-and-windows
416
+ if self._max_bytes is not None:
419
417
  try:
420
418
  size = self._filename.stat().st_size
421
419
  except FileNotFoundError:
@@ -423,7 +421,7 @@ class SizeAndTimeRotatingFileHandler(BaseRotatingHandler):
423
421
  else:
424
422
  if size >= self._max_bytes:
425
423
  return True
426
- return bool(self._time_handler.shouldRollover(record)) # skipif-ci-and-windows
424
+ return bool(self._time_handler.shouldRollover(record))
427
425
 
428
426
 
429
427
  def _compute_rollover_patterns(stem: str, suffix: str, /) -> _RolloverPatterns:
@@ -494,7 +492,9 @@ class _RolloverActions:
494
492
  def do(self) -> None:
495
493
  for deletion in self.deletions:
496
494
  deletion.delete()
497
- move_many(*((r.file.path, r.destination) for r in self.rotations))
495
+ move_many(
496
+ *((r.file.path, r.destination) for r in self.rotations), overwrite=True
497
+ )
498
498
 
499
499
 
500
500
  @dataclass(order=True, unsafe_hash=True, kw_only=True)
utilities/math.py CHANGED
@@ -7,28 +7,13 @@ from math import ceil, exp, floor, isclose, isfinite, isinf, isnan, log, log10,
7
7
  from re import Match, search
8
8
  from typing import TYPE_CHECKING, Literal, assert_never, overload, override
9
9
 
10
+ from utilities.core import ExtractGroupsError, extract_groups
10
11
  from utilities.errors import ImpossibleCaseError
11
- from utilities.re import ExtractGroupsError, extract_groups
12
12
 
13
13
  if TYPE_CHECKING:
14
14
  from utilities.types import MathRoundMode, Number, Sign
15
15
 
16
16
 
17
- MIN_FLOAT32, MAX_FLOAT32 = -3.4028234663852886e38, 3.4028234663852886e38
18
- MIN_FLOAT64, MAX_FLOAT64 = -1.7976931348623157e308, 1.7976931348623157e308
19
- MIN_INT8, MAX_INT8 = -(2 ** (8 - 1)), 2 ** (8 - 1) - 1
20
- MIN_INT16, MAX_INT16 = -(2 ** (16 - 1)), 2 ** (16 - 1) - 1
21
- MIN_INT32, MAX_INT32 = -(2 ** (32 - 1)), 2 ** (32 - 1) - 1
22
- MIN_INT64, MAX_INT64 = -(2 ** (64 - 1)), 2 ** (64 - 1) - 1
23
- MIN_UINT8, MAX_UINT8 = 0, 2**8 - 1
24
- MIN_UINT16, MAX_UINT16 = 0, 2**16 - 1
25
- MIN_UINT32, MAX_UINT32 = 0, 2**32 - 1
26
- MIN_UINT64, MAX_UINT64 = 0, 2**64 - 1
27
-
28
-
29
- ##
30
-
31
-
32
17
  def check_integer(
33
18
  n: int,
34
19
  /,
@@ -893,26 +878,6 @@ def significant_figures(x: float, /, *, n: int = 2) -> str:
893
878
 
894
879
  __all__ = [
895
880
  "MAX_DECIMALS",
896
- "MAX_FLOAT32",
897
- "MAX_FLOAT64",
898
- "MAX_INT8",
899
- "MAX_INT16",
900
- "MAX_INT32",
901
- "MAX_INT64",
902
- "MAX_UINT8",
903
- "MAX_UINT16",
904
- "MAX_UINT32",
905
- "MAX_UINT64",
906
- "MIN_FLOAT32",
907
- "MIN_FLOAT64",
908
- "MIN_INT8",
909
- "MIN_INT16",
910
- "MIN_INT32",
911
- "MIN_INT64",
912
- "MIN_UINT8",
913
- "MIN_UINT16",
914
- "MIN_UINT32",
915
- "MIN_UINT64",
916
881
  "CheckIntegerError",
917
882
  "EWMParametersError",
918
883
  "ParseNumberError",
@@ -18,10 +18,8 @@ from typing import (
18
18
  from more_itertools import bucket, partition, split_into
19
19
  from more_itertools import peekable as _peekable
20
20
 
21
- from utilities.functions import get_class_name
22
- from utilities.iterables import OneNonUniqueError, one
23
- from utilities.reprlib import get_repr
24
- from utilities.sentinel import Sentinel, is_sentinel, sentinel
21
+ from utilities.constants import Sentinel, sentinel
22
+ from utilities.core import OneNonUniqueError, get_class_name, is_sentinel, one, repr_
25
23
 
26
24
  if TYPE_CHECKING:
27
25
  from collections.abc import Iterable, Iterator, Mapping, Sequence
@@ -232,7 +230,7 @@ class BucketMappingError[K: Hashable, V](Exception):
232
230
  @override
233
231
  def __str__(self) -> str:
234
232
  parts = [
235
- f"{get_repr(key)} (#1: {get_repr(first)}, #2: {get_repr(second)})"
233
+ f"{repr_(key)} (#1: {repr_(first)}, #2: {repr_(second)})"
236
234
  for key, (first, second) in self.errors.items()
237
235
  ]
238
236
  desc = ", ".join(parts)
@@ -316,7 +314,7 @@ class Split[T]:
316
314
  tail_first = indent("tail=", spaces)
317
315
  tail_rest = indent(repr(self.tail), 2 * spaces)
318
316
  joined = f"{head_first}\n{head_rest}\n{tail_first}\n{tail_rest}"
319
- return f"{cls}(\n{joined}\n)"
317
+ return f"{cls}(\n{joined}\n)\n"
320
318
 
321
319
 
322
320
  def yield_splits[T](
utilities/numpy.py CHANGED
@@ -39,7 +39,8 @@ from numpy.linalg import det, eig
39
39
  from numpy.random import default_rng
40
40
  from numpy.typing import NDArray
41
41
 
42
- from utilities.iterables import always_iterable, is_iterable_not_str
42
+ from utilities.core import always_iterable
43
+ from utilities.iterables import is_iterable_not_str
43
44
 
44
45
  if TYPE_CHECKING:
45
46
  from collections.abc import Callable, Iterable
utilities/operator.py CHANGED
@@ -6,8 +6,8 @@ from dataclasses import asdict, dataclass
6
6
  from typing import TYPE_CHECKING, Any, cast, override
7
7
 
8
8
  import utilities.math
9
+ from utilities.core import repr_
9
10
  from utilities.iterables import SortIterableError, sort_iterable
10
- from utilities.reprlib import get_repr
11
11
  from utilities.typing import is_dataclass_instance
12
12
 
13
13
  if TYPE_CHECKING:
@@ -109,7 +109,7 @@ class IsEqualError(Exception):
109
109
 
110
110
  @override
111
111
  def __str__(self) -> str:
112
- return f"Unable to sort {get_repr(self.x)} and {get_repr(self.y)}" # pragma: no cover
112
+ return f"Unable to sort {repr_(self.x)} and {repr_(self.y)}" # pragma: no cover
113
113
 
114
114
 
115
115
  __all__ = ["IsEqualError", "is_equal"]
utilities/orjson.py CHANGED
@@ -37,23 +37,15 @@ from whenever import (
37
37
  )
38
38
 
39
39
  from utilities.concurrent import concurrent_map
40
+ from utilities.constants import LOCAL_TIME_ZONE, MAX_INT64, MIN_INT64
41
+ from utilities.core import OneEmptyError, always_iterable, one, read_bytes, write_bytes
40
42
  from utilities.dataclasses import dataclass_to_dict
41
43
  from utilities.functions import ensure_class
42
- from utilities.gzip import read_binary
43
- from utilities.iterables import (
44
- OneEmptyError,
45
- always_iterable,
46
- merge_sets,
47
- one,
48
- one_unique,
49
- )
50
- from utilities.json import write_formatted_json
44
+ from utilities.iterables import merge_sets
51
45
  from utilities.logging import get_logging_level_number
52
- from utilities.math import MAX_INT64, MIN_INT64
53
46
  from utilities.types import Dataclass, LogLevel, MaybeIterable, PathLike, StrMapping
54
- from utilities.typing import is_string_mapping
55
- from utilities.tzlocal import LOCAL_TIME_ZONE
56
- from utilities.version import Version, parse_version
47
+ from utilities.typing import is_str_mapping
48
+ from utilities.version import Version2, Version3
57
49
  from utilities.whenever import (
58
50
  DatePeriod,
59
51
  TimePeriod,
@@ -99,7 +91,8 @@ class _Prefixes(StrEnum):
99
91
  tuple_ = "tu"
100
92
  unserializable = "un"
101
93
  uuid = "uu"
102
- version = "v"
94
+ version2 = "v2"
95
+ version3 = "v3"
103
96
  year_month = "ym"
104
97
  zoned_date_time = "zd"
105
98
  zoned_date_time_period = "zp"
@@ -213,8 +206,10 @@ def _pre_process(
213
206
  return f"[{_Prefixes.exception_class.value}|{error_cls.__qualname__}]"
214
207
  case UUID() as uuid:
215
208
  return f"[{_Prefixes.uuid.value}]{uuid}"
216
- case Version() as version:
217
- return f"[{_Prefixes.version.value}]{version}"
209
+ case Version2() as version:
210
+ return f"[{_Prefixes.version2.value}]{version}"
211
+ case Version3() as version:
212
+ return f"[{_Prefixes.version3.value}]{version}"
218
213
  case YearMonth() as year_month:
219
214
  return f"[{_Prefixes.year_month.value}]{year_month}"
220
215
  case ZonedDateTime() as date_time:
@@ -409,7 +404,8 @@ class DeerializeError(Exception):
409
404
  _TIME_DELTA_PATTERN,
410
405
  _TIME_PERIOD_PATTERN,
411
406
  _UUID_PATTERN,
412
- _VERSION_PATTERN,
407
+ _VERSION2_PATTERN,
408
+ _VERSION3_PATTERN,
413
409
  _YEAR_MONTH_PATTERN,
414
410
  _ZONED_DATE_TIME_PATTERN,
415
411
  _ZONED_DATE_TIME_PERIOD_PATTERN,
@@ -433,7 +429,8 @@ class DeerializeError(Exception):
433
429
  _Prefixes.time_delta,
434
430
  _Prefixes.time_period,
435
431
  _Prefixes.uuid,
436
- _Prefixes.version,
432
+ _Prefixes.version2,
433
+ _Prefixes.version3,
437
434
  _Prefixes.year_month,
438
435
  _Prefixes.zoned_date_time,
439
436
  _Prefixes.zoned_date_time_period,
@@ -481,49 +478,51 @@ def _object_hook(
481
478
  if match := _NONE_PATTERN.search(text):
482
479
  return None
483
480
  if match := _DATE_PATTERN.search(text):
484
- return Date.parse_common_iso(match.group(1))
481
+ return Date.parse_iso(match.group(1))
485
482
  if match := _DATE_DELTA_PATTERN.search(text):
486
- return DateDelta.parse_common_iso(match.group(1))
483
+ return DateDelta.parse_iso(match.group(1))
487
484
  if match := _DATE_PERIOD_PATTERN.search(text):
488
- start, end = map(Date.parse_common_iso, match.group(1).split(","))
485
+ start, end = map(Date.parse_iso, match.group(1).split(","))
489
486
  return DatePeriod(start, end)
490
487
  if match := _DATE_TIME_DELTA_PATTERN.search(text):
491
- return DateTimeDelta.parse_common_iso(match.group(1))
488
+ return DateTimeDelta.parse_iso(match.group(1))
492
489
  if match := _FLOAT_PATTERN.search(text):
493
490
  return float(match.group(1))
494
491
  if match := _MONTH_DAY_PATTERN.search(text):
495
- return MonthDay.parse_common_iso(match.group(1))
492
+ return MonthDay.parse_iso(match.group(1))
496
493
  if match := _PATH_PATTERN.search(text):
497
494
  return Path(match.group(1))
498
495
  if match := _PLAIN_DATE_TIME_PATTERN.search(text):
499
- return PlainDateTime.parse_common_iso(match.group(1))
496
+ return PlainDateTime.parse_iso(match.group(1))
500
497
  if match := _PY_DATE_PATTERN.search(text):
501
- return Date.parse_common_iso(match.group(1)).py_date()
498
+ return Date.parse_iso(match.group(1)).py_date()
502
499
  if match := _PY_PLAIN_DATE_TIME_PATTERN.search(text):
503
- return PlainDateTime.parse_common_iso(match.group(1)).py_datetime()
500
+ return PlainDateTime.parse_iso(match.group(1)).py_datetime()
504
501
  if match := _PY_TIME_PATTERN.search(text):
505
- return Time.parse_common_iso(match.group(1)).py_time()
502
+ return Time.parse_iso(match.group(1)).py_time()
506
503
  if match := _PY_ZONED_DATE_TIME_PATTERN.search(text):
507
- return ZonedDateTime.parse_common_iso(match.group(1)).py_datetime()
504
+ return ZonedDateTime.parse_iso(match.group(1)).py_datetime()
508
505
  if match := _TIME_PATTERN.search(text):
509
- return Time.parse_common_iso(match.group(1))
506
+ return Time.parse_iso(match.group(1))
510
507
  if match := _TIME_DELTA_PATTERN.search(text):
511
- return TimeDelta.parse_common_iso(match.group(1))
508
+ return TimeDelta.parse_iso(match.group(1))
512
509
  if match := _TIME_PERIOD_PATTERN.search(text):
513
- start, end = map(Time.parse_common_iso, match.group(1).split(","))
510
+ start, end = map(Time.parse_iso, match.group(1).split(","))
514
511
  return TimePeriod(start, end)
515
512
  if match := _UUID_PATTERN.search(text):
516
513
  return UUID(match.group(1))
517
- if match := _VERSION_PATTERN.search(text):
518
- return parse_version(match.group(1))
514
+ if match := _VERSION2_PATTERN.search(text):
515
+ return Version2.parse(match.group(1))
516
+ if match := _VERSION3_PATTERN.search(text):
517
+ return Version3.parse(match.group(1))
519
518
  if match := _YEAR_MONTH_PATTERN.search(text):
520
- return YearMonth.parse_common_iso(match.group(1))
519
+ return YearMonth.parse_iso(match.group(1))
521
520
  if match := _ZONED_DATE_TIME_PATTERN.search(text):
522
- return ZonedDateTime.parse_common_iso(match.group(1))
521
+ return ZonedDateTime.parse_iso(match.group(1))
523
522
  if match := _ZONED_DATE_TIME_PERIOD_PATTERN.search(text):
524
523
  start, end = match.group(1).split(",")
525
- end = ZonedDateTime.parse_common_iso(end)
526
- start = PlainDateTime.parse_common_iso(start).assume_tz(end.tz)
524
+ end = ZonedDateTime.parse_iso(end)
525
+ start = PlainDateTime.parse_iso(start).assume_tz(end.tz)
527
526
  return ZonedDateTimePeriod(start, end)
528
527
  if (
529
528
  exc_class := _object_hook_exception_class(
@@ -566,7 +565,7 @@ def _object_hook(
566
565
  ) is not None:
567
566
  return container
568
567
  if (
569
- is_string_mapping(value)
568
+ is_str_mapping(value)
570
569
  and (
571
570
  dataclass := _object_hook_dataclass(
572
571
  key,
@@ -587,7 +586,7 @@ def _object_hook(
587
586
  ) is not None:
588
587
  return enum
589
588
  if (
590
- is_string_mapping(value)
589
+ is_str_mapping(value)
591
590
  and (
592
591
  exc_instance := _object_hook_exception_instance(
593
592
  key, value, data=data, objects=objects, redirects=redirects
@@ -953,7 +952,7 @@ class GetLogRecordsOutput:
953
952
  for r in self.records
954
953
  ]
955
954
  if len(records) >= 1:
956
- time_zone = one_unique(ZoneInfo(r.datetime.tz) for r in records)
955
+ time_zone = one({ZoneInfo(r.datetime.tz) for r in records})
957
956
  else:
958
957
  time_zone = LOCAL_TIME_ZONE
959
958
  return DataFrame(
@@ -1178,7 +1177,7 @@ def _get_log_records_one(
1178
1177
  path = Path(path)
1179
1178
  try:
1180
1179
  lines = path.read_text().splitlines()
1181
- except UnicodeDecodeError as error: # skipif-ci-and-windows
1180
+ except UnicodeDecodeError as error:
1182
1181
  return _GetLogRecordsOneOutput(path=path, file_ok=False, other_errors=[error])
1183
1182
  num_lines_blank, num_lines_error = 0, 0
1184
1183
  missing: set[str] = set()
@@ -1263,7 +1262,7 @@ def read_object(
1263
1262
  redirects: Mapping[str, type[Any]] | None = None,
1264
1263
  ) -> Any:
1265
1264
  """Read an object from disk."""
1266
- data = read_binary(path, decompress=decompress)
1265
+ data = read_bytes(path, decompress=decompress)
1267
1266
  return deserialize(
1268
1267
  data, dataclass_hook=dataclass_hook, objects=objects, redirects=redirects
1269
1268
  )
@@ -1293,7 +1292,7 @@ def write_object(
1293
1292
  dataclass_hook=dataclass_hook,
1294
1293
  dataclass_defaults=dataclass_defaults,
1295
1294
  )
1296
- write_formatted_json(data, path, compress=compress, overwrite=overwrite)
1295
+ write_bytes(path, data, compress=compress, overwrite=overwrite, json=True)
1297
1296
 
1298
1297
 
1299
1298
  __all__ = [