dycw-utilities 0.166.30__py3-none-any.whl → 0.185.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. dycw_utilities-0.185.8.dist-info/METADATA +33 -0
  2. dycw_utilities-0.185.8.dist-info/RECORD +90 -0
  3. {dycw_utilities-0.166.30.dist-info → dycw_utilities-0.185.8.dist-info}/WHEEL +1 -1
  4. {dycw_utilities-0.166.30.dist-info → dycw_utilities-0.185.8.dist-info}/entry_points.txt +1 -0
  5. utilities/__init__.py +1 -1
  6. utilities/altair.py +17 -10
  7. utilities/asyncio.py +50 -72
  8. utilities/atools.py +9 -11
  9. utilities/cachetools.py +16 -11
  10. utilities/click.py +76 -19
  11. utilities/concurrent.py +1 -1
  12. utilities/constants.py +492 -0
  13. utilities/contextlib.py +23 -30
  14. utilities/contextvars.py +1 -23
  15. utilities/core.py +2581 -0
  16. utilities/dataclasses.py +16 -119
  17. utilities/docker.py +387 -0
  18. utilities/enum.py +1 -1
  19. utilities/errors.py +2 -16
  20. utilities/fastapi.py +5 -5
  21. utilities/fpdf2.py +2 -1
  22. utilities/functions.py +34 -265
  23. utilities/http.py +2 -3
  24. utilities/hypothesis.py +84 -29
  25. utilities/importlib.py +17 -1
  26. utilities/iterables.py +39 -575
  27. utilities/jinja2.py +145 -0
  28. utilities/jupyter.py +5 -3
  29. utilities/libcst.py +1 -1
  30. utilities/lightweight_charts.py +4 -6
  31. utilities/logging.py +24 -24
  32. utilities/math.py +1 -36
  33. utilities/more_itertools.py +4 -6
  34. utilities/numpy.py +2 -1
  35. utilities/operator.py +2 -2
  36. utilities/orjson.py +42 -43
  37. utilities/os.py +4 -147
  38. utilities/packaging.py +129 -0
  39. utilities/parse.py +35 -15
  40. utilities/pathlib.py +3 -120
  41. utilities/platform.py +8 -90
  42. utilities/polars.py +38 -32
  43. utilities/postgres.py +37 -33
  44. utilities/pottery.py +20 -18
  45. utilities/pqdm.py +3 -4
  46. utilities/psutil.py +2 -3
  47. utilities/pydantic.py +25 -0
  48. utilities/pydantic_settings.py +87 -16
  49. utilities/pydantic_settings_sops.py +16 -3
  50. utilities/pyinstrument.py +4 -4
  51. utilities/pytest.py +96 -125
  52. utilities/pytest_plugins/pytest_regressions.py +2 -2
  53. utilities/pytest_regressions.py +32 -11
  54. utilities/random.py +2 -8
  55. utilities/redis.py +98 -94
  56. utilities/reprlib.py +11 -118
  57. utilities/shellingham.py +66 -0
  58. utilities/shutil.py +25 -0
  59. utilities/slack_sdk.py +13 -12
  60. utilities/sqlalchemy.py +57 -30
  61. utilities/sqlalchemy_polars.py +16 -25
  62. utilities/subprocess.py +2590 -0
  63. utilities/tabulate.py +32 -0
  64. utilities/testbook.py +8 -8
  65. utilities/text.py +24 -99
  66. utilities/throttle.py +159 -0
  67. utilities/time.py +18 -0
  68. utilities/timer.py +31 -14
  69. utilities/traceback.py +16 -23
  70. utilities/types.py +42 -2
  71. utilities/typing.py +26 -14
  72. utilities/uuid.py +1 -1
  73. utilities/version.py +202 -45
  74. utilities/whenever.py +53 -150
  75. dycw_utilities-0.166.30.dist-info/METADATA +0 -41
  76. dycw_utilities-0.166.30.dist-info/RECORD +0 -98
  77. dycw_utilities-0.166.30.dist-info/licenses/LICENSE +0 -21
  78. utilities/aeventkit.py +0 -388
  79. utilities/atomicwrites.py +0 -182
  80. utilities/cryptography.py +0 -41
  81. utilities/getpass.py +0 -8
  82. utilities/git.py +0 -19
  83. utilities/gzip.py +0 -31
  84. utilities/json.py +0 -70
  85. utilities/pickle.py +0 -25
  86. utilities/re.py +0 -156
  87. utilities/sentinel.py +0 -73
  88. utilities/socket.py +0 -8
  89. utilities/string.py +0 -20
  90. utilities/tempfile.py +0 -77
  91. utilities/typed_settings.py +0 -152
  92. utilities/tzdata.py +0 -11
  93. utilities/tzlocal.py +0 -28
  94. utilities/warnings.py +0 -65
  95. utilities/zipfile.py +0 -25
  96. utilities/zoneinfo.py +0 -133
@@ -2,10 +2,12 @@ from __future__ import annotations
2
2
 
3
3
  from functools import reduce
4
4
  from pathlib import Path
5
- from typing import TYPE_CHECKING, Any, ClassVar, assert_never, override
5
+ from typing import TYPE_CHECKING, Any, ClassVar, assert_never, cast, override
6
6
 
7
+ from pydantic import Field, create_model
7
8
  from pydantic_settings import (
8
9
  BaseSettings,
10
+ CliSettingsSource,
9
11
  JsonConfigSettingsSource,
10
12
  PydanticBaseSettingsSource,
11
13
  SettingsConfigDict,
@@ -14,14 +16,15 @@ from pydantic_settings import (
14
16
  )
15
17
  from pydantic_settings.sources import DEFAULT_PATH
16
18
 
17
- from utilities.iterables import always_iterable
19
+ from utilities.core import always_iterable
20
+ from utilities.errors import ImpossibleCaseError
18
21
 
19
22
  if TYPE_CHECKING:
20
23
  from collections.abc import Iterator, Sequence
21
24
 
22
25
  from pydantic_settings.sources import PathType
23
26
 
24
- from utilities.types import MaybeSequenceStr, PathLike
27
+ from utilities.types import MaybeSequenceStr, PathLike, StrDict
25
28
 
26
29
 
27
30
  type PathLikeWithSection = tuple[PathLike, MaybeSequenceStr]
@@ -29,7 +32,7 @@ type PathLikeOrWithSection = PathLike | PathLikeWithSection
29
32
 
30
33
 
31
34
  class CustomBaseSettings(BaseSettings):
32
- """Base settings for loading JSON files."""
35
+ """Base settings for loading JSON/TOML/YAML files."""
33
36
 
34
37
  # paths
35
38
  json_files: ClassVar[Sequence[PathLikeOrWithSection]] = []
@@ -38,7 +41,7 @@ class CustomBaseSettings(BaseSettings):
38
41
 
39
42
  # config
40
43
  model_config: ClassVar[SettingsConfigDict] = SettingsConfigDict(
41
- env_nested_delimiter="__"
44
+ frozen=True, env_nested_delimiter="__"
42
45
  )
43
46
 
44
47
  @classmethod
@@ -76,11 +79,6 @@ class CustomBaseSettings(BaseSettings):
76
79
  )
77
80
 
78
81
 
79
- def load_settings[T: BaseSettings](cls: type[T], /) -> T:
80
- """Load a set of settings."""
81
- return cls()
82
-
83
-
84
82
  class JsonConfigSectionSettingsSource(JsonConfigSettingsSource):
85
83
  @override
86
84
  def __init__(
@@ -97,7 +95,7 @@ class JsonConfigSectionSettingsSource(JsonConfigSettingsSource):
97
95
  self.section = section
98
96
 
99
97
  @override
100
- def __call__(self) -> dict[str, Any]:
98
+ def __call__(self) -> StrDict:
101
99
  return _get_section(super().__call__(), self.section)
102
100
 
103
101
 
@@ -114,7 +112,7 @@ class TomlConfigSectionSettingsSource(TomlConfigSettingsSource):
114
112
  self.section = section
115
113
 
116
114
  @override
117
- def __call__(self) -> dict[str, Any]:
115
+ def __call__(self) -> StrDict:
118
116
  return _get_section(super().__call__(), self.section)
119
117
 
120
118
 
@@ -138,7 +136,7 @@ class YamlConfigSectionSettingsSource(YamlConfigSettingsSource):
138
136
  self.section = section
139
137
 
140
138
  @override
141
- def __call__(self) -> dict[str, Any]:
139
+ def __call__(self) -> StrDict:
142
140
  return _get_section(super().__call__(), self.section)
143
141
 
144
142
 
@@ -152,14 +150,87 @@ def _ensure_section(file: PathLikeOrWithSection, /) -> PathLikeWithSection:
152
150
  assert_never(never)
153
151
 
154
152
 
155
- def _get_section(
156
- mapping: dict[str, Any], section: MaybeSequenceStr, /
157
- ) -> dict[str, Any]:
153
+ def _get_section(mapping: StrDict, section: MaybeSequenceStr, /) -> StrDict:
158
154
  return reduce(lambda acc, el: acc.get(el, {}), always_iterable(section), mapping)
159
155
 
160
156
 
157
+ ##
158
+
159
+
160
+ class HashableBaseSettings(BaseSettings):
161
+ """Base settings for loading JSON files."""
162
+
163
+ # config
164
+ model_config: ClassVar[SettingsConfigDict] = SettingsConfigDict(frozen=True)
165
+
166
+
167
+ ##
168
+
169
+
170
+ def load_settings[T: BaseSettings](cls: type[T], /, *, cli: bool = False) -> T:
171
+ """Load a set of settings."""
172
+ _ = cls.model_rebuild()
173
+ if cli:
174
+ cls_with_defaults = _load_settings_create_model(cls)
175
+
176
+ @classmethod
177
+ def settings_customise_sources(
178
+ cls: type[BaseSettings],
179
+ settings_cls: type[BaseSettings],
180
+ init_settings: PydanticBaseSettingsSource,
181
+ env_settings: PydanticBaseSettingsSource,
182
+ dotenv_settings: PydanticBaseSettingsSource,
183
+ file_secret_settings: PydanticBaseSettingsSource,
184
+ ) -> tuple[PydanticBaseSettingsSource, ...]:
185
+ parent = cast(
186
+ "Any", super(cls_with_defaults, cls)
187
+ ).settings_customise_sources(
188
+ settings_cls=settings_cls,
189
+ init_settings=init_settings,
190
+ env_settings=env_settings,
191
+ dotenv_settings=dotenv_settings,
192
+ file_secret_settings=file_secret_settings,
193
+ )
194
+ return (
195
+ CliSettingsSource(
196
+ settings_cls, cli_parse_args=True, case_sensitive=False
197
+ ),
198
+ *parent,
199
+ )
200
+
201
+ cls_use = type(
202
+ cls.__name__,
203
+ (cls_with_defaults,),
204
+ {"settings_customise_sources": settings_customise_sources},
205
+ )
206
+ cls_use = cast("type[T]", cls_use)
207
+ else:
208
+ cls_use = cls
209
+ return cls_use()
210
+
211
+
212
+ def _load_settings_create_model[T: BaseSettings](
213
+ cls: type[T], /, *, values: T | None = None
214
+ ) -> type[T]:
215
+ values_use = cls() if values is None else values
216
+ kwargs: StrDict = {}
217
+ for name, field in cls.model_fields.items():
218
+ if (ann := field.annotation) is None:
219
+ raise ImpossibleCaseError(case=[f"{ann=}"]) # pragma: no cover
220
+ value = getattr(values_use, name)
221
+ if (
222
+ isinstance(cast("Any", ann), type) # 'ann' is possible not a type
223
+ and issubclass(ann, BaseSettings)
224
+ ):
225
+ kwargs[name] = _load_settings_create_model(ann, values=value)
226
+ else:
227
+ kwargs[name] = (field.annotation, Field(default=value))
228
+ return create_model(cls.__name__, __base__=cls, **kwargs)
229
+
230
+
161
231
  __all__ = [
162
232
  "CustomBaseSettings",
233
+ "HashableBaseSettings",
163
234
  "JsonConfigSectionSettingsSource",
164
235
  "TomlConfigSectionSettingsSource",
165
236
  "YamlConfigSectionSettingsSource",
@@ -1,6 +1,8 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import TYPE_CHECKING, Any, ClassVar, override
3
+ from logging import Filter, LogRecord, getLogger
4
+ from re import search
5
+ from typing import TYPE_CHECKING, ClassVar, override
4
6
 
5
7
  from pydantic_settings.sources import DEFAULT_PATH
6
8
  from pydantic_settings_sops import SOPSConfigSettingsSource
@@ -18,7 +20,18 @@ if TYPE_CHECKING:
18
20
  from pydantic_settings import BaseSettings, PydanticBaseSettingsSource
19
21
  from pydantic_settings.sources import PathType
20
22
 
21
- from utilities.types import MaybeSequenceStr
23
+ from utilities.types import MaybeSequenceStr, StrDict
24
+
25
+
26
+ class _SuppressDefaultConfigMessage(Filter):
27
+ @override
28
+ def filter(self, record: LogRecord) -> bool:
29
+ return not search(
30
+ r"^default config file does not exists '.*'$", record.getMessage()
31
+ )
32
+
33
+
34
+ getLogger("sopsy.utils").addFilter(_SuppressDefaultConfigMessage())
22
35
 
23
36
 
24
37
  class SopsBaseSettings(CustomBaseSettings):
@@ -56,7 +69,7 @@ class SOPSConfigSectionSettingsSource(SOPSConfigSettingsSource):
56
69
  self.section = section
57
70
 
58
71
  @override
59
- def __call__(self) -> dict[str, Any]:
72
+ def __call__(self) -> StrDict:
60
73
  return _get_section(super().__call__(), self.section)
61
74
 
62
75
 
utilities/pyinstrument.py CHANGED
@@ -6,9 +6,9 @@ from typing import TYPE_CHECKING
6
6
 
7
7
  from pyinstrument.profiler import Profiler
8
8
 
9
- from utilities.atomicwrites import writer
9
+ from utilities.core import get_now_local, write_text
10
10
  from utilities.pathlib import to_path
11
- from utilities.whenever import format_compact, get_now_local
11
+ from utilities.whenever import format_compact
12
12
 
13
13
  if TYPE_CHECKING:
14
14
  from collections.abc import Iterator
@@ -24,8 +24,8 @@ def profile(path: MaybeCallablePathLike = Path.cwd, /) -> Iterator[None]:
24
24
  filename = to_path(path).joinpath(
25
25
  f"profile__{format_compact(get_now_local(), path=True)}.html"
26
26
  )
27
- with writer(filename) as temp:
28
- _ = temp.write_text(profiler.output_html())
27
+ text = profiler.output_html()
28
+ write_text(filename, text, overwrite=True)
29
29
 
30
30
 
31
31
  __all__ = ["profile"]
utilities/pytest.py CHANGED
@@ -1,63 +1,86 @@
1
1
  from __future__ import annotations
2
2
 
3
+ from collections.abc import Callable
3
4
  from dataclasses import dataclass
4
5
  from functools import partial, wraps
5
6
  from inspect import iscoroutinefunction
6
7
  from os import environ
7
8
  from pathlib import Path
8
- from typing import TYPE_CHECKING, Any, assert_never, cast, override
9
+ from re import sub
10
+ from types import FunctionType
11
+ from typing import TYPE_CHECKING, Any, NoReturn, assert_never, cast, override
9
12
 
10
- from whenever import ZonedDateTime
11
-
12
- from utilities.atomicwrites import writer
13
+ from utilities.constants import (
14
+ IS_CI,
15
+ IS_CI_AND_NOT_LINUX,
16
+ IS_LINUX,
17
+ IS_MAC,
18
+ IS_NOT_LINUX,
19
+ IS_NOT_MAC,
20
+ SECOND,
21
+ )
13
22
  from utilities.functools import cache
14
23
  from utilities.hashlib import md5_hash
15
- from utilities.os import get_env_var
16
24
  from utilities.pathlib import (
17
25
  _GetTailEmptyError,
18
26
  ensure_suffix,
19
- get_root,
27
+ get_repo_root,
20
28
  get_tail,
21
29
  module_path,
22
30
  )
23
- from utilities.platform import (
24
- IS_LINUX,
25
- IS_MAC,
26
- IS_NOT_LINUX,
27
- IS_NOT_MAC,
28
- IS_NOT_WINDOWS,
29
- IS_WINDOWS,
30
- )
31
31
  from utilities.random import bernoulli
32
32
  from utilities.text import to_bool
33
- from utilities.types import MaybeCallableBoolLike, MaybeCoro, Seed
34
- from utilities.whenever import SECOND, get_now_local
33
+ from utilities.throttle import throttle
34
+ from utilities.types import (
35
+ Coro,
36
+ Duration,
37
+ MaybeCallableBoolLike,
38
+ MaybeCoro,
39
+ PathLike,
40
+ Seed,
41
+ )
35
42
 
36
43
  if TYPE_CHECKING:
37
- from collections.abc import Callable, Iterable
38
-
39
- from utilities.types import Coro, Delta, PathLike
44
+ from collections.abc import Iterable
40
45
 
41
- try: # WARNING: this package cannot use unguarded `pytest` imports
42
46
  from _pytest.config import Config
43
47
  from _pytest.config.argparsing import Parser
44
48
  from _pytest.python import Function
45
- from pytest import mark, skip
49
+
50
+ from utilities.types import PathLike
51
+
52
+
53
+ try: # WARNING: this package cannot use unguarded `pytest` imports
54
+ from pytest import mark
46
55
  except ModuleNotFoundError: # pragma: no cover
47
- from typing import Any as Config
48
- from typing import Any as Function
49
- from typing import Any as Parser
50
56
 
51
- mark = skip = skipif_windows = skipif_mac = skipif_linux = skipif_not_windows = (
52
- skipif_not_mac
53
- ) = skipif_not_linux = None
57
+ def skipif_ci[F: Callable](func: F) -> F:
58
+ return func
59
+
60
+ def skipif_mac[F: Callable](func: F) -> F:
61
+ return func
62
+
63
+ def skipif_linux[F: Callable](func: F) -> F:
64
+ return func
65
+
66
+ def skipif_not_mac[F: Callable](func: F) -> F:
67
+ return func
68
+
69
+ def skipif_not_linux[F: Callable](func: F) -> F:
70
+ return func
71
+
72
+ def skipif_ci_and_not_linux[F: Callable](func: F) -> F:
73
+ return func
74
+
54
75
  else:
55
- skipif_windows = mark.skipif(IS_WINDOWS, reason="Skipped for Windows")
76
+ skipif_ci = mark.skipif(IS_CI, reason="Skipped for CI")
56
77
  skipif_mac = mark.skipif(IS_MAC, reason="Skipped for Mac")
57
78
  skipif_linux = mark.skipif(IS_LINUX, reason="Skipped for Linux")
58
- skipif_not_windows = mark.skipif(IS_NOT_WINDOWS, reason="Skipped for non-Windows")
59
79
  skipif_not_mac = mark.skipif(IS_NOT_MAC, reason="Skipped for non-Mac")
60
80
  skipif_not_linux = mark.skipif(IS_NOT_LINUX, reason="Skipped for non-Linux")
81
+ skipif_ci_and_not_linux = mark.skipif(
82
+ IS_CI_AND_NOT_LINUX, reason="Skipped for CI/non-Linux"
83
+ )
61
84
 
62
85
 
63
86
  def add_pytest_addoption(parser: Parser, options: list[str], /) -> None:
@@ -90,6 +113,8 @@ def add_pytest_collection_modifyitems(
90
113
  def pytest_collection_modifyitems(config, items):
91
114
  add_pytest_collection_modifyitems(config, items, ["slow"])
92
115
  """
116
+ from pytest import mark
117
+
93
118
  options = list(options)
94
119
  missing = {opt for opt in options if not config.getoption(f"--{opt}")}
95
120
  for item in items:
@@ -119,6 +144,15 @@ def add_pytest_configure(config: Config, options: Iterable[tuple[str, str]], /)
119
144
  ##
120
145
 
121
146
 
147
+ def make_ids(obj: Any, /) -> str:
148
+ if isinstance(obj, FunctionType): # pragma: no cover
149
+ return sub(r"\s+at +0x[0-9a-fA-F]+", "", repr(obj))
150
+ return repr(obj) # pragma: no cover
151
+
152
+
153
+ ##
154
+
155
+
122
156
  def node_id_path(
123
157
  node_id: str, /, *, root: PathLike | None = None, suffix: str | None = None
124
158
  ) -> Path:
@@ -168,7 +202,7 @@ class _NodeIdToPathNotGetTailError(NodeIdToPathError):
168
202
  ##
169
203
 
170
204
 
171
- def run_frac[F: Callable[..., MaybeCoro[None]]](
205
+ def run_test_frac[F: Callable[..., MaybeCoro[None]]](
172
206
  *,
173
207
  predicate: MaybeCallableBoolLike | None = None,
174
208
  frac: float = 0.5,
@@ -176,11 +210,11 @@ def run_frac[F: Callable[..., MaybeCoro[None]]](
176
210
  ) -> Callable[[F], F]:
177
211
  """Run a test only a fraction of the time.."""
178
212
  return cast(
179
- "Any", partial(_run_frac_inner, predicate=predicate, frac=frac, seed=seed)
213
+ "Any", partial(_run_test_frac_inner, predicate=predicate, frac=frac, seed=seed)
180
214
  )
181
215
 
182
216
 
183
- def _run_frac_inner[F: Callable[..., MaybeCoro[None]]](
217
+ def _run_test_frac_inner[F: Callable[..., MaybeCoro[None]]](
184
218
  func: F,
185
219
  /,
186
220
  *,
@@ -217,132 +251,69 @@ def _skipif_frac(
217
251
  frac: float = 0.5,
218
252
  seed: Seed | None = None,
219
253
  ) -> None:
220
- if skip is None:
221
- return # pragma: no cover
254
+ from pytest import skip
255
+
222
256
  if ((predicate is None) or to_bool(predicate)) and bernoulli(
223
257
  true=1 - frac, seed=seed
224
258
  ):
225
- _ = skip(reason=f"{_get_name()} skipped (run {frac:.0%})")
259
+ skip(reason=f"{_get_name()} skipped (run {frac:.0%})")
226
260
 
227
261
 
228
262
  ##
229
263
 
230
264
 
231
- def throttle[F: Callable[..., MaybeCoro[None]]](
232
- *, root: PathLike | None = None, delta: Delta = SECOND, on_try: bool = False
265
+ def throttle_test[F: Callable[..., MaybeCoro[None]]](
266
+ *, on_try: bool = False, root: PathLike | None = None, duration: Duration = SECOND
233
267
  ) -> Callable[[F], F]:
234
268
  """Throttle a test. On success by default, on try otherwise."""
235
- return cast("Any", partial(_throttle_inner, root=root, delta=delta, on_try=on_try))
236
-
237
-
238
- def _throttle_inner[F: Callable[..., MaybeCoro[None]]](
239
- func: F,
240
- /,
241
- *,
242
- root: PathLike | None = None,
243
- delta: Delta = SECOND,
244
- on_try: bool = False,
245
- ) -> F:
246
- if get_env_var("THROTTLE", nullable=True) is not None:
247
- return func
248
- match bool(iscoroutinefunction(func)), on_try:
249
- case False, False:
250
-
251
- @wraps(func)
252
- def throttle_sync_on_pass(*args: Any, **kwargs: Any) -> None:
253
- _skipif_recent(root=root, delta=delta)
254
- cast("Callable[..., None]", func)(*args, **kwargs)
255
- _write(root)
256
-
257
- return cast("Any", throttle_sync_on_pass)
258
-
259
- case False, True:
260
-
261
- @wraps(func)
262
- def throttle_sync_on_try(*args: Any, **kwargs: Any) -> None:
263
- _skipif_recent(root=root, delta=delta)
264
- _write(root)
265
- cast("Callable[..., None]", func)(*args, **kwargs)
266
-
267
- return cast("Any", throttle_sync_on_try)
269
+ return throttle(
270
+ on_try=on_try,
271
+ duration=duration,
272
+ path=partial(_get_test_path, root=root),
273
+ raiser=_run_skip,
274
+ )
268
275
 
269
- case True, False:
270
276
 
271
- @wraps(func)
272
- async def throttle_async_on_pass(*args: Any, **kwargs: Any) -> None:
273
- _skipif_recent(root=root, delta=delta)
274
- await cast("Callable[..., Coro[None]]", func)(*args, **kwargs)
275
- _write(root)
277
+ def _run_skip() -> NoReturn:
278
+ from pytest import skip
276
279
 
277
- return cast("Any", throttle_async_on_pass)
280
+ skip(reason=f"{_get_name()} throttled")
278
281
 
279
- case True, True:
280
282
 
281
- @wraps(func)
282
- async def throttle_async_on_try(*args: Any, **kwargs: Any) -> None:
283
- _skipif_recent(root=root, delta=delta)
284
- _write(root)
285
- await cast("Callable[..., Coro[None]]", func)(*args, **kwargs)
283
+ def _get_name() -> str:
284
+ return environ["PYTEST_CURRENT_TEST"]
286
285
 
287
- return cast("Any", throttle_async_on_try)
288
286
 
289
- case never:
290
- assert_never(never)
287
+ @cache
288
+ def _md5_hash_cached(text: str, /) -> str:
289
+ return md5_hash(text)
291
290
 
292
291
 
293
- def _skipif_recent(*, root: PathLike | None = None, delta: Delta = SECOND) -> None:
294
- if skip is None:
295
- return # pragma: no cover
296
- path = _get_path(root)
297
- try:
298
- contents = path.read_text()
299
- except FileNotFoundError:
300
- return
301
- try:
302
- last = ZonedDateTime.parse_common_iso(contents)
303
- except ValueError:
304
- return
305
- now = get_now_local()
306
- if (now - delta) < last:
307
- age = now - last
308
- _ = skip(reason=f"{_get_name()} throttled (age {age})")
309
-
310
-
311
- def _get_path(root: PathLike | None = None, /) -> Path:
292
+ def _get_test_path(*, root: PathLike | None = None) -> Path:
312
293
  if root is None:
313
- root_use = get_root().joinpath(".pytest_cache", "throttle") # pragma: no cover
294
+ root_use = get_repo_root().joinpath(
295
+ ".pytest_cache", "throttle"
296
+ ) # pragma: no cover
314
297
  else:
315
298
  root_use = root
316
299
  return Path(root_use, _md5_hash_cached(_get_name()))
317
300
 
318
301
 
319
- @cache
320
- def _md5_hash_cached(text: str, /) -> str:
321
- return md5_hash(text)
322
-
323
-
324
- def _get_name() -> str:
325
- return environ["PYTEST_CURRENT_TEST"]
326
-
327
-
328
- def _write(root: PathLike | None = None, /) -> None:
329
- path = _get_path(root)
330
- with writer(path, overwrite=True) as temp:
331
- _ = temp.write_text(get_now_local().format_common_iso())
332
-
333
-
334
302
  __all__ = [
303
+ "IS_CI",
304
+ "IS_CI_AND_NOT_LINUX",
335
305
  "NodeIdToPathError",
336
306
  "add_pytest_addoption",
337
307
  "add_pytest_collection_modifyitems",
338
308
  "add_pytest_configure",
309
+ "make_ids",
339
310
  "node_id_path",
340
- "run_frac",
311
+ "run_test_frac",
312
+ "skipif_ci",
313
+ "skipif_ci_and_not_linux",
341
314
  "skipif_linux",
342
315
  "skipif_mac",
343
316
  "skipif_not_linux",
344
317
  "skipif_not_mac",
345
- "skipif_not_windows",
346
- "skipif_windows",
347
- "throttle",
318
+ "throttle_test",
348
319
  ]
@@ -40,7 +40,7 @@ else:
40
40
 
41
41
 
42
42
  def _get_path(request: FixtureRequest, /) -> Path:
43
- from utilities.pathlib import get_root
43
+ from utilities.pathlib import get_repo_root
44
44
  from utilities.pytest import _NodeIdToPathNotGetTailError, node_id_path
45
45
 
46
46
  path = Path(cast("Any", request).fspath)
@@ -50,7 +50,7 @@ def _get_path(request: FixtureRequest, /) -> Path:
50
50
  except _NodeIdToPathNotGetTailError:
51
51
  root = Path("tests")
52
52
  tail = node_id_path(request.node.nodeid, root=root)
53
- return get_root(path).joinpath(root, "regressions", tail)
53
+ return get_repo_root(path).joinpath(root, "regressions", tail)
54
54
 
55
55
 
56
56
  __all__ = ["orjson_regression", "polars_regression"]
@@ -1,13 +1,15 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from contextlib import suppress
4
+ from dataclasses import dataclass
4
5
  from json import loads
5
6
  from pathlib import Path
6
- from shutil import copytree
7
- from typing import TYPE_CHECKING, Any, assert_never
7
+ from typing import TYPE_CHECKING, Any, assert_never, override
8
8
 
9
+ from pytest_datadir.plugin import LazyDataDir
9
10
  from pytest_regressions.file_regression import FileRegressionFixture
10
11
 
12
+ from utilities.core import _CopyOrMoveSourceNotFoundError, copy, repr_
11
13
  from utilities.functions import ensure_str
12
14
  from utilities.operator import is_equal
13
15
 
@@ -31,10 +33,12 @@ class OrjsonRegressionFixture:
31
33
  path = Path(path)
32
34
  original_datadir = path.parent
33
35
  data_dir = tmp_path.joinpath(ensure_str(request.fixturename))
34
- with suppress(FileNotFoundError):
35
- _ = copytree(original_datadir, data_dir)
36
+ with suppress(_CopyOrMoveSourceNotFoundError):
37
+ copy(original_datadir, data_dir, overwrite=True)
36
38
  self._fixture = FileRegressionFixture(
37
- datadir=data_dir, original_datadir=original_datadir, request=request
39
+ datadir=LazyDataDir(original_datadir=original_datadir, tmp_path=data_dir),
40
+ original_datadir=original_datadir,
41
+ request=request,
38
42
  )
39
43
  self._basename = path.name
40
44
 
@@ -70,10 +74,28 @@ class OrjsonRegressionFixture:
70
74
  check_fn=self._check_fn,
71
75
  )
72
76
 
73
- def _check_fn(self, path1: Path, path2: Path, /) -> None:
74
- left = loads(path1.read_text())
75
- right = loads(path2.read_text())
76
- assert is_equal(left, right), f"{left=}, {right=}"
77
+ def _check_fn(self, path_obtained: Path, path_existing: Path, /) -> None:
78
+ obtained = loads(path_obtained.read_text())
79
+ existing = loads(path_existing.read_text())
80
+ if not is_equal(obtained, existing):
81
+ raise OrjsonRegressionError(
82
+ path_obtained=path_obtained,
83
+ path_existing=path_existing,
84
+ obtained=obtained,
85
+ existing=existing,
86
+ )
87
+
88
+
89
+ @dataclass(kw_only=True, slots=True)
90
+ class OrjsonRegressionError(Exception):
91
+ path_obtained: Path
92
+ path_existing: Path
93
+ obtained: Any
94
+ existing: Any
95
+
96
+ @override
97
+ def __str__(self) -> str:
98
+ return f"Obtained object (at {str(self.path_obtained)!r}) and existing object (at {str(self.path_existing)!r}) differ; got {repr_(self.obtained)} and {repr_(self.existing)}"
77
99
 
78
100
 
79
101
  ##
@@ -97,7 +119,6 @@ class PolarsRegressionFixture:
97
119
  "describe": obj.describe(percentiles=[i / 10 for i in range(1, 10)]).rows(
98
120
  named=True
99
121
  ),
100
- "estimated_size": obj.estimated_size(),
101
122
  "is_empty": obj.is_empty(),
102
123
  "n_unique": obj.n_unique(),
103
124
  }
@@ -115,7 +136,7 @@ class PolarsRegressionFixture:
115
136
  col(column).approx_n_unique()
116
137
  ).item()
117
138
  data["approx_n_unique"] = approx_n_unique
118
- data["glimpse"] = df.glimpse(return_as_string=True)
139
+ data["glimpse"] = df.glimpse(return_type="string")
119
140
  data["null_count"] = df.null_count().row(0, named=True)
120
141
  case never:
121
142
  assert_never(never)
utilities/random.py CHANGED
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- from random import Random, SystemRandom
3
+ from random import Random
4
4
  from typing import TYPE_CHECKING
5
5
 
6
6
  from utilities.functools import cache
@@ -11,12 +11,6 @@ if TYPE_CHECKING:
11
11
  from utilities.types import Seed
12
12
 
13
13
 
14
- SYSTEM_RANDOM = SystemRandom()
15
-
16
-
17
- ##
18
-
19
-
20
14
  def bernoulli(*, true: float = 0.5, seed: Seed | None = None) -> bool:
21
15
  """Return a Bernoulli random variate."""
22
16
  state = get_state(seed)
@@ -66,4 +60,4 @@ def shuffle[T](iterable: Iterable[T], /, *, seed: Seed | None = None) -> list[T]
66
60
  return copy
67
61
 
68
62
 
69
- __all__ = ["SYSTEM_RANDOM", "bernoulli", "get_docker_name", "get_state", "shuffle"]
63
+ __all__ = ["bernoulli", "get_docker_name", "get_state", "shuffle"]