dycw-utilities 0.175.17__py3-none-any.whl → 0.185.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dycw_utilities-0.185.8.dist-info/METADATA +33 -0
- dycw_utilities-0.185.8.dist-info/RECORD +90 -0
- {dycw_utilities-0.175.17.dist-info → dycw_utilities-0.185.8.dist-info}/WHEEL +2 -2
- utilities/__init__.py +1 -1
- utilities/altair.py +8 -6
- utilities/asyncio.py +40 -56
- utilities/atools.py +9 -11
- utilities/cachetools.py +8 -6
- utilities/click.py +4 -3
- utilities/concurrent.py +1 -1
- utilities/constants.py +492 -0
- utilities/contextlib.py +23 -30
- utilities/contextvars.py +1 -23
- utilities/core.py +2581 -0
- utilities/dataclasses.py +16 -119
- utilities/docker.py +139 -45
- utilities/enum.py +1 -1
- utilities/errors.py +2 -16
- utilities/fastapi.py +5 -5
- utilities/fpdf2.py +2 -1
- utilities/functions.py +33 -264
- utilities/http.py +2 -3
- utilities/hypothesis.py +48 -25
- utilities/iterables.py +39 -575
- utilities/jinja2.py +3 -6
- utilities/jupyter.py +5 -3
- utilities/libcst.py +1 -1
- utilities/lightweight_charts.py +4 -6
- utilities/logging.py +17 -15
- utilities/math.py +1 -36
- utilities/more_itertools.py +4 -6
- utilities/numpy.py +2 -1
- utilities/operator.py +2 -2
- utilities/orjson.py +24 -25
- utilities/os.py +4 -185
- utilities/packaging.py +129 -0
- utilities/parse.py +33 -13
- utilities/pathlib.py +2 -136
- utilities/platform.py +8 -90
- utilities/polars.py +34 -31
- utilities/postgres.py +9 -4
- utilities/pottery.py +20 -18
- utilities/pqdm.py +3 -4
- utilities/psutil.py +2 -3
- utilities/pydantic.py +18 -4
- utilities/pydantic_settings.py +7 -9
- utilities/pydantic_settings_sops.py +3 -3
- utilities/pyinstrument.py +4 -4
- utilities/pytest.py +49 -108
- utilities/pytest_plugins/pytest_regressions.py +2 -2
- utilities/pytest_regressions.py +8 -6
- utilities/random.py +2 -8
- utilities/redis.py +98 -94
- utilities/reprlib.py +11 -118
- utilities/shellingham.py +66 -0
- utilities/slack_sdk.py +13 -12
- utilities/sqlalchemy.py +42 -30
- utilities/sqlalchemy_polars.py +16 -25
- utilities/subprocess.py +1166 -148
- utilities/tabulate.py +32 -0
- utilities/testbook.py +8 -8
- utilities/text.py +24 -115
- utilities/throttle.py +159 -0
- utilities/time.py +18 -0
- utilities/timer.py +29 -12
- utilities/traceback.py +15 -22
- utilities/types.py +38 -3
- utilities/typing.py +18 -12
- utilities/uuid.py +1 -1
- utilities/version.py +202 -45
- utilities/whenever.py +22 -150
- dycw_utilities-0.175.17.dist-info/METADATA +0 -34
- dycw_utilities-0.175.17.dist-info/RECORD +0 -103
- utilities/atomicwrites.py +0 -182
- utilities/cryptography.py +0 -41
- utilities/getpass.py +0 -8
- utilities/git.py +0 -19
- utilities/grp.py +0 -28
- utilities/gzip.py +0 -31
- utilities/json.py +0 -70
- utilities/permissions.py +0 -298
- utilities/pickle.py +0 -25
- utilities/pwd.py +0 -28
- utilities/re.py +0 -156
- utilities/sentinel.py +0 -73
- utilities/socket.py +0 -8
- utilities/string.py +0 -20
- utilities/tempfile.py +0 -136
- utilities/tzdata.py +0 -11
- utilities/tzlocal.py +0 -28
- utilities/warnings.py +0 -65
- utilities/zipfile.py +0 -25
- utilities/zoneinfo.py +0 -133
- {dycw_utilities-0.175.17.dist-info → dycw_utilities-0.185.8.dist-info}/entry_points.txt +0 -0
utilities/dataclasses.py
CHANGED
|
@@ -2,17 +2,27 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from collections.abc import Mapping
|
|
4
4
|
from contextlib import suppress
|
|
5
|
-
from dataclasses import MISSING, dataclass, field, fields
|
|
6
|
-
from typing import TYPE_CHECKING, Any,
|
|
5
|
+
from dataclasses import MISSING, dataclass, field, fields
|
|
6
|
+
from typing import TYPE_CHECKING, Any, assert_never, overload, override
|
|
7
7
|
|
|
8
|
-
from utilities.
|
|
9
|
-
|
|
10
|
-
|
|
8
|
+
from utilities.constants import (
|
|
9
|
+
BRACKETS,
|
|
10
|
+
LIST_SEPARATOR,
|
|
11
|
+
PAIR_SEPARATOR,
|
|
12
|
+
Sentinel,
|
|
13
|
+
sentinel,
|
|
14
|
+
)
|
|
15
|
+
from utilities.core import (
|
|
16
|
+
ExtractGroupError,
|
|
11
17
|
OneStrEmptyError,
|
|
12
18
|
OneStrNonUniqueError,
|
|
13
|
-
|
|
19
|
+
extract_group,
|
|
20
|
+
get_class_name,
|
|
21
|
+
is_sentinel,
|
|
14
22
|
one_str,
|
|
15
23
|
)
|
|
24
|
+
from utilities.errors import ImpossibleCaseError
|
|
25
|
+
from utilities.iterables import cmp_nullable
|
|
16
26
|
from utilities.operator import is_equal
|
|
17
27
|
from utilities.parse import (
|
|
18
28
|
_ParseObjectExtraNonUniqueError,
|
|
@@ -20,12 +30,7 @@ from utilities.parse import (
|
|
|
20
30
|
parse_object,
|
|
21
31
|
serialize_object,
|
|
22
32
|
)
|
|
23
|
-
from utilities.re import ExtractGroupError, extract_group
|
|
24
|
-
from utilities.sentinel import Sentinel, is_sentinel, sentinel
|
|
25
33
|
from utilities.text import (
|
|
26
|
-
BRACKETS,
|
|
27
|
-
LIST_SEPARATOR,
|
|
28
|
-
PAIR_SEPARATOR,
|
|
29
34
|
_SplitKeyValuePairsDuplicateKeysError,
|
|
30
35
|
_SplitKeyValuePairsSplitError,
|
|
31
36
|
split_key_value_pairs,
|
|
@@ -46,85 +51,6 @@ if TYPE_CHECKING:
|
|
|
46
51
|
)
|
|
47
52
|
|
|
48
53
|
|
|
49
|
-
def dataclass_repr[T](
|
|
50
|
-
obj: Dataclass,
|
|
51
|
-
/,
|
|
52
|
-
*,
|
|
53
|
-
globalns: StrMapping | None = None,
|
|
54
|
-
localns: StrMapping | None = None,
|
|
55
|
-
warn_name_errors: bool = False,
|
|
56
|
-
include: Iterable[str] | None = None,
|
|
57
|
-
exclude: Iterable[str] | None = None,
|
|
58
|
-
rel_tol: float | None = None,
|
|
59
|
-
abs_tol: float | None = None,
|
|
60
|
-
extra: Mapping[type[T], Callable[[T, T], bool]] | None = None,
|
|
61
|
-
defaults: bool = False,
|
|
62
|
-
recursive: bool = False,
|
|
63
|
-
) -> str:
|
|
64
|
-
"""Repr a dataclass, without its defaults."""
|
|
65
|
-
out: dict[str, str] = {}
|
|
66
|
-
for fld in yield_fields(
|
|
67
|
-
obj, globalns=globalns, localns=localns, warn_name_errors=warn_name_errors
|
|
68
|
-
):
|
|
69
|
-
if (
|
|
70
|
-
fld.keep(
|
|
71
|
-
include=include,
|
|
72
|
-
exclude=exclude,
|
|
73
|
-
rel_tol=rel_tol,
|
|
74
|
-
abs_tol=abs_tol,
|
|
75
|
-
extra=extra,
|
|
76
|
-
defaults=defaults,
|
|
77
|
-
)
|
|
78
|
-
and fld.repr
|
|
79
|
-
):
|
|
80
|
-
if recursive:
|
|
81
|
-
if is_dataclass_instance(fld.value):
|
|
82
|
-
repr_ = dataclass_repr(
|
|
83
|
-
fld.value,
|
|
84
|
-
globalns=globalns,
|
|
85
|
-
localns=localns,
|
|
86
|
-
warn_name_errors=warn_name_errors,
|
|
87
|
-
include=include,
|
|
88
|
-
exclude=exclude,
|
|
89
|
-
rel_tol=rel_tol,
|
|
90
|
-
abs_tol=abs_tol,
|
|
91
|
-
extra=extra,
|
|
92
|
-
defaults=defaults,
|
|
93
|
-
recursive=recursive,
|
|
94
|
-
)
|
|
95
|
-
elif isinstance(fld.value, list):
|
|
96
|
-
repr_ = [
|
|
97
|
-
dataclass_repr(
|
|
98
|
-
v,
|
|
99
|
-
globalns=globalns,
|
|
100
|
-
localns=localns,
|
|
101
|
-
warn_name_errors=warn_name_errors,
|
|
102
|
-
include=include,
|
|
103
|
-
exclude=exclude,
|
|
104
|
-
rel_tol=rel_tol,
|
|
105
|
-
abs_tol=abs_tol,
|
|
106
|
-
extra=extra,
|
|
107
|
-
defaults=defaults,
|
|
108
|
-
recursive=recursive,
|
|
109
|
-
)
|
|
110
|
-
if is_dataclass_instance(v)
|
|
111
|
-
else repr(v)
|
|
112
|
-
for v in fld.value
|
|
113
|
-
]
|
|
114
|
-
repr_ = f"[{', '.join(repr_)}]"
|
|
115
|
-
else:
|
|
116
|
-
repr_ = repr(fld.value)
|
|
117
|
-
else:
|
|
118
|
-
repr_ = repr(fld.value)
|
|
119
|
-
out[fld.name] = repr_
|
|
120
|
-
cls = get_class_name(obj)
|
|
121
|
-
joined = ", ".join(f"{k}={v}" for k, v in out.items())
|
|
122
|
-
return f"{cls}({joined})"
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
##
|
|
126
|
-
|
|
127
|
-
|
|
128
54
|
def dataclass_to_dict[T](
|
|
129
55
|
obj: Dataclass,
|
|
130
56
|
/,
|
|
@@ -411,33 +337,6 @@ class _OneFieldNonUniqueError(OneFieldError):
|
|
|
411
337
|
##
|
|
412
338
|
|
|
413
339
|
|
|
414
|
-
@overload
|
|
415
|
-
def replace_non_sentinel(
|
|
416
|
-
obj: Dataclass, /, *, in_place: Literal[True], **kwargs: Any
|
|
417
|
-
) -> None: ...
|
|
418
|
-
@overload
|
|
419
|
-
def replace_non_sentinel[T: Dataclass](
|
|
420
|
-
obj: T, /, *, in_place: Literal[False] = False, **kwargs: Any
|
|
421
|
-
) -> T: ...
|
|
422
|
-
@overload
|
|
423
|
-
def replace_non_sentinel[T: Dataclass](
|
|
424
|
-
obj: T, /, *, in_place: bool = False, **kwargs: Any
|
|
425
|
-
) -> T | None: ...
|
|
426
|
-
def replace_non_sentinel[T: Dataclass](
|
|
427
|
-
obj: T, /, *, in_place: bool = False, **kwargs: Any
|
|
428
|
-
) -> T | None:
|
|
429
|
-
"""Replace attributes on a dataclass, filtering out sentinel values."""
|
|
430
|
-
if in_place:
|
|
431
|
-
for k, v in kwargs.items():
|
|
432
|
-
if not is_sentinel(v):
|
|
433
|
-
setattr(obj, k, v)
|
|
434
|
-
return None
|
|
435
|
-
return replace(obj, **{k: v for k, v in kwargs.items() if not is_sentinel(v)})
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
##
|
|
439
|
-
|
|
440
|
-
|
|
441
340
|
def serialize_dataclass[T](
|
|
442
341
|
obj: Dataclass,
|
|
443
342
|
/,
|
|
@@ -1041,13 +940,11 @@ __all__ = [
|
|
|
1041
940
|
"ParseDataClassError",
|
|
1042
941
|
"StrMappingToFieldMappingError",
|
|
1043
942
|
"YieldFieldsError",
|
|
1044
|
-
"dataclass_repr",
|
|
1045
943
|
"dataclass_to_dict",
|
|
1046
944
|
"is_nullable_lt",
|
|
1047
945
|
"mapping_to_dataclass",
|
|
1048
946
|
"one_field",
|
|
1049
947
|
"parse_dataclass",
|
|
1050
|
-
"replace_non_sentinel",
|
|
1051
948
|
"str_mapping_to_field_mapping",
|
|
1052
949
|
"yield_fields",
|
|
1053
950
|
]
|
utilities/docker.py
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from contextlib import contextmanager
|
|
4
3
|
from pathlib import Path
|
|
5
4
|
from typing import TYPE_CHECKING, Literal, overload
|
|
6
5
|
|
|
6
|
+
from utilities.contextlib import enhanced_context_manager
|
|
7
|
+
from utilities.core import always_iterable
|
|
7
8
|
from utilities.errors import ImpossibleCaseError
|
|
8
9
|
from utilities.logging import to_logger
|
|
9
10
|
from utilities.subprocess import (
|
|
@@ -18,7 +19,93 @@ from utilities.subprocess import (
|
|
|
18
19
|
if TYPE_CHECKING:
|
|
19
20
|
from collections.abc import Iterator
|
|
20
21
|
|
|
21
|
-
from utilities.types import
|
|
22
|
+
from utilities.types import (
|
|
23
|
+
LoggerLike,
|
|
24
|
+
MaybeIterable,
|
|
25
|
+
PathLike,
|
|
26
|
+
Retry,
|
|
27
|
+
StrStrMapping,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def docker_compose_down(
|
|
32
|
+
*,
|
|
33
|
+
files: MaybeIterable[PathLike] | None = None,
|
|
34
|
+
print: bool = False, # noqa: A002
|
|
35
|
+
print_stdout: bool = False,
|
|
36
|
+
print_stderr: bool = False,
|
|
37
|
+
) -> None:
|
|
38
|
+
"""Stop and remove containers."""
|
|
39
|
+
args = docker_compose_down_cmd(files=files) # pragma: no cover
|
|
40
|
+
run( # pragma: no cover
|
|
41
|
+
*args, print=print, print_stdout=print_stdout, print_stderr=print_stderr
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def docker_compose_down_cmd(
|
|
46
|
+
*, files: MaybeIterable[PathLike] | None = None
|
|
47
|
+
) -> list[str]:
|
|
48
|
+
"""Command to use 'docker compose down' to stop and remove containers."""
|
|
49
|
+
return _docker_compose_cmd("down", files=files)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def docker_compose_pull(
|
|
53
|
+
*,
|
|
54
|
+
files: MaybeIterable[PathLike] | None = None,
|
|
55
|
+
print: bool = False, # noqa: A002
|
|
56
|
+
print_stdout: bool = False,
|
|
57
|
+
print_stderr: bool = False,
|
|
58
|
+
) -> None:
|
|
59
|
+
"""Pull service images."""
|
|
60
|
+
args = docker_compose_pull_cmd(files=files) # pragma: no cover
|
|
61
|
+
run( # pragma: no cover
|
|
62
|
+
*args, print=print, print_stdout=print_stdout, print_stderr=print_stderr
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def docker_compose_pull_cmd(
|
|
67
|
+
*, files: MaybeIterable[PathLike] | None = None
|
|
68
|
+
) -> list[str]:
|
|
69
|
+
"""Command to use 'docker compose pull' to pull service images."""
|
|
70
|
+
return _docker_compose_cmd("pull", files=files)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def docker_compose_up(
|
|
74
|
+
*,
|
|
75
|
+
files: MaybeIterable[PathLike] | None = None,
|
|
76
|
+
detach: bool = True,
|
|
77
|
+
print: bool = False, # noqa: A002
|
|
78
|
+
print_stdout: bool = False,
|
|
79
|
+
print_stderr: bool = False,
|
|
80
|
+
) -> None:
|
|
81
|
+
"""Create and start containers."""
|
|
82
|
+
args = docker_compose_up_cmd(files=files, detach=detach) # pragma: no cover
|
|
83
|
+
run( # pragma: no cover
|
|
84
|
+
*args, print=print, print_stdout=print_stdout, print_stderr=print_stderr
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def docker_compose_up_cmd(
|
|
89
|
+
*, files: MaybeIterable[PathLike] | None = None, detach: bool = True
|
|
90
|
+
) -> list[str]:
|
|
91
|
+
"""Command to use 'docker compose up' to create and start containers."""
|
|
92
|
+
args: list[str] = []
|
|
93
|
+
if detach:
|
|
94
|
+
args.append("--detach")
|
|
95
|
+
return _docker_compose_cmd("up", *args, files=files)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _docker_compose_cmd(
|
|
99
|
+
cmd: str, /, *args: str, files: MaybeIterable[PathLike] | None = None
|
|
100
|
+
) -> list[str]:
|
|
101
|
+
all_args: list[str] = ["docker", "compose"]
|
|
102
|
+
if files is not None:
|
|
103
|
+
for file in always_iterable(files):
|
|
104
|
+
all_args.extend(["--file", str(file)])
|
|
105
|
+
return [*all_args, cmd, *args]
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
##
|
|
22
109
|
|
|
23
110
|
|
|
24
111
|
@overload
|
|
@@ -47,48 +134,39 @@ def docker_cp(
|
|
|
47
134
|
sudo: bool = False,
|
|
48
135
|
logger: LoggerLike | None = None,
|
|
49
136
|
) -> None:
|
|
137
|
+
"""Copy between a container and the local file system."""
|
|
50
138
|
match src, dest: # skipif-ci
|
|
51
139
|
case Path() | str(), (str() as cont, Path() | str() as dest_path):
|
|
52
140
|
docker_exec(
|
|
53
141
|
cont, *maybe_sudo_cmd(*mkdir_cmd(dest_path, parent=True), sudo=sudo)
|
|
54
142
|
)
|
|
55
|
-
run(*docker_cp_cmd(src, dest, sudo=sudo), logger=logger)
|
|
143
|
+
run(*maybe_sudo_cmd(*docker_cp_cmd(src, dest), sudo=sudo), logger=logger)
|
|
56
144
|
case (str(), Path() | str()), Path() | str():
|
|
57
145
|
mkdir(dest, parent=True, sudo=sudo)
|
|
58
|
-
run(*docker_cp_cmd(src, dest, sudo=sudo), logger=logger)
|
|
146
|
+
run(*maybe_sudo_cmd(*docker_cp_cmd(src, dest), sudo=sudo), logger=logger)
|
|
59
147
|
case _: # pragma: no cover
|
|
60
148
|
raise ImpossibleCaseError(case=[f"{src}", f"{dest=}"])
|
|
61
149
|
|
|
62
150
|
|
|
63
151
|
@overload
|
|
64
|
-
def docker_cp_cmd(
|
|
65
|
-
src: tuple[str, PathLike], dest: PathLike, /, *, sudo: bool = False
|
|
66
|
-
) -> list[str]: ...
|
|
152
|
+
def docker_cp_cmd(src: tuple[str, PathLike], dest: PathLike, /) -> list[str]: ...
|
|
67
153
|
@overload
|
|
154
|
+
def docker_cp_cmd(src: PathLike, dest: tuple[str, PathLike], /) -> list[str]: ...
|
|
68
155
|
def docker_cp_cmd(
|
|
69
|
-
src: PathLike
|
|
70
|
-
) -> list[str]: ...
|
|
71
|
-
def docker_cp_cmd(
|
|
72
|
-
src: PathLike | tuple[str, PathLike],
|
|
73
|
-
dest: PathLike | tuple[str, PathLike],
|
|
74
|
-
/,
|
|
75
|
-
*,
|
|
76
|
-
sudo: bool = False,
|
|
156
|
+
src: PathLike | tuple[str, PathLike], dest: PathLike | tuple[str, PathLike], /
|
|
77
157
|
) -> list[str]:
|
|
158
|
+
"""Command to use 'docker cp' to copy between a container and the local file system."""
|
|
159
|
+
args: list[str] = ["docker", "cp"]
|
|
78
160
|
match src, dest:
|
|
79
|
-
case (Path() | str()) as
|
|
80
|
-
str()
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
dest_use = f"{dest_cont}:{dest_path}"
|
|
84
|
-
case (str() as src_cont, (Path() | str()) as src_path), (
|
|
85
|
-
Path() | str() as dest_use
|
|
86
|
-
):
|
|
87
|
-
src_use = f"{src_cont}:{src_path}"
|
|
161
|
+
case ((Path() | str()), (str() as cont, Path() | str() as path)):
|
|
162
|
+
return [*args, str(src), f"{cont}:{path}"]
|
|
163
|
+
case (str() as cont, (Path() | str()) as path), (Path() | str() as dest):
|
|
164
|
+
return [*args, f"{cont}:{path}", str(dest)]
|
|
88
165
|
case _: # pragma: no cover
|
|
89
166
|
raise ImpossibleCaseError(case=[f"{src}", f"{dest=}"])
|
|
90
|
-
|
|
91
|
-
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
##
|
|
92
170
|
|
|
93
171
|
|
|
94
172
|
@overload
|
|
@@ -96,7 +174,7 @@ def docker_exec(
|
|
|
96
174
|
container: str,
|
|
97
175
|
cmd: str,
|
|
98
176
|
/,
|
|
99
|
-
*
|
|
177
|
+
*args: str,
|
|
100
178
|
env: StrStrMapping | None = None,
|
|
101
179
|
user: str | None = None,
|
|
102
180
|
workdir: PathLike | None = None,
|
|
@@ -116,7 +194,7 @@ def docker_exec(
|
|
|
116
194
|
container: str,
|
|
117
195
|
cmd: str,
|
|
118
196
|
/,
|
|
119
|
-
*
|
|
197
|
+
*args: str,
|
|
120
198
|
env: StrStrMapping | None = None,
|
|
121
199
|
user: str | None = None,
|
|
122
200
|
workdir: PathLike | None = None,
|
|
@@ -136,7 +214,7 @@ def docker_exec(
|
|
|
136
214
|
container: str,
|
|
137
215
|
cmd: str,
|
|
138
216
|
/,
|
|
139
|
-
*
|
|
217
|
+
*args: str,
|
|
140
218
|
env: StrStrMapping | None = None,
|
|
141
219
|
user: str | None = None,
|
|
142
220
|
workdir: PathLike | None = None,
|
|
@@ -156,7 +234,7 @@ def docker_exec(
|
|
|
156
234
|
container: str,
|
|
157
235
|
cmd: str,
|
|
158
236
|
/,
|
|
159
|
-
*
|
|
237
|
+
*args: str,
|
|
160
238
|
env: StrStrMapping | None = None,
|
|
161
239
|
user: str | None = None,
|
|
162
240
|
workdir: PathLike | None = None,
|
|
@@ -176,7 +254,7 @@ def docker_exec(
|
|
|
176
254
|
container: str,
|
|
177
255
|
cmd: str,
|
|
178
256
|
/,
|
|
179
|
-
*
|
|
257
|
+
*args: str,
|
|
180
258
|
env: StrStrMapping | None = None,
|
|
181
259
|
user: str | None = None,
|
|
182
260
|
workdir: PathLike | None = None,
|
|
@@ -195,7 +273,7 @@ def docker_exec(
|
|
|
195
273
|
container: str,
|
|
196
274
|
cmd: str,
|
|
197
275
|
/,
|
|
198
|
-
*
|
|
276
|
+
*args: str,
|
|
199
277
|
env: StrStrMapping | None = None,
|
|
200
278
|
user: str | None = None,
|
|
201
279
|
workdir: PathLike | None = None,
|
|
@@ -210,10 +288,11 @@ def docker_exec(
|
|
|
210
288
|
logger: LoggerLike | None = None,
|
|
211
289
|
**env_kwargs: str,
|
|
212
290
|
) -> str | None:
|
|
213
|
-
|
|
291
|
+
"""Execute a command in a container."""
|
|
292
|
+
run_cmd_and_args = docker_exec_cmd( # skipif-ci
|
|
214
293
|
container,
|
|
215
294
|
cmd,
|
|
216
|
-
*
|
|
295
|
+
*args,
|
|
217
296
|
env=env,
|
|
218
297
|
interactive=input is not None,
|
|
219
298
|
user=user,
|
|
@@ -221,7 +300,7 @@ def docker_exec(
|
|
|
221
300
|
**env_kwargs,
|
|
222
301
|
)
|
|
223
302
|
return run( # skipif-ci
|
|
224
|
-
*
|
|
303
|
+
*run_cmd_and_args,
|
|
225
304
|
input=input,
|
|
226
305
|
print=print,
|
|
227
306
|
print_stdout=print_stdout,
|
|
@@ -238,28 +317,31 @@ def docker_exec_cmd(
|
|
|
238
317
|
container: str,
|
|
239
318
|
cmd: str,
|
|
240
319
|
/,
|
|
241
|
-
*
|
|
320
|
+
*args: str,
|
|
242
321
|
env: StrStrMapping | None = None,
|
|
243
322
|
interactive: bool = False,
|
|
244
323
|
user: str | None = None,
|
|
245
324
|
workdir: PathLike | None = None,
|
|
246
325
|
**env_kwargs: str,
|
|
247
326
|
) -> list[str]:
|
|
248
|
-
"""
|
|
249
|
-
|
|
327
|
+
"""Command to use `docker exec` to execute a command in a container."""
|
|
328
|
+
all_args: list[str] = ["docker", "exec"]
|
|
250
329
|
mapping: dict[str, str] = ({} if env is None else dict(env)) | env_kwargs
|
|
251
330
|
for key, value in mapping.items():
|
|
252
|
-
|
|
331
|
+
all_args.extend(["--env", f"{key}={value}"])
|
|
253
332
|
if interactive:
|
|
254
|
-
|
|
333
|
+
all_args.append("--interactive")
|
|
255
334
|
if user is not None:
|
|
256
|
-
|
|
335
|
+
all_args.extend(["--user", user])
|
|
257
336
|
if workdir is not None:
|
|
258
|
-
|
|
259
|
-
return [*
|
|
337
|
+
all_args.extend(["--workdir", str(workdir)])
|
|
338
|
+
return [*all_args, container, cmd, *args]
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
##
|
|
260
342
|
|
|
261
343
|
|
|
262
|
-
@
|
|
344
|
+
@enhanced_context_manager
|
|
263
345
|
def yield_docker_temp_dir(
|
|
264
346
|
container: str,
|
|
265
347
|
/,
|
|
@@ -290,4 +372,16 @@ def yield_docker_temp_dir(
|
|
|
290
372
|
docker_exec(container, *rm_cmd(path), user=user, retry=retry, logger=logger)
|
|
291
373
|
|
|
292
374
|
|
|
293
|
-
__all__ = [
|
|
375
|
+
__all__ = [
|
|
376
|
+
"docker_compose_down",
|
|
377
|
+
"docker_compose_down_cmd",
|
|
378
|
+
"docker_compose_pull",
|
|
379
|
+
"docker_compose_pull_cmd",
|
|
380
|
+
"docker_compose_up",
|
|
381
|
+
"docker_compose_up_cmd",
|
|
382
|
+
"docker_cp",
|
|
383
|
+
"docker_cp_cmd",
|
|
384
|
+
"docker_exec",
|
|
385
|
+
"docker_exec_cmd",
|
|
386
|
+
"yield_docker_temp_dir",
|
|
387
|
+
]
|
utilities/enum.py
CHANGED
|
@@ -4,8 +4,8 @@ from dataclasses import dataclass
|
|
|
4
4
|
from enum import Enum, StrEnum
|
|
5
5
|
from typing import TYPE_CHECKING, Literal, assert_never, overload, override
|
|
6
6
|
|
|
7
|
+
from utilities.core import OneStrEmptyError, OneStrNonUniqueError, one_str
|
|
7
8
|
from utilities.functions import ensure_str
|
|
8
|
-
from utilities.iterables import OneStrEmptyError, OneStrNonUniqueError, one_str
|
|
9
9
|
|
|
10
10
|
if TYPE_CHECKING:
|
|
11
11
|
from utilities.types import EnumLike
|
utilities/errors.py
CHANGED
|
@@ -4,7 +4,7 @@ from dataclasses import dataclass
|
|
|
4
4
|
from typing import TYPE_CHECKING, assert_never, override
|
|
5
5
|
|
|
6
6
|
if TYPE_CHECKING:
|
|
7
|
-
from utilities.types import
|
|
7
|
+
from utilities.types import MaybeType
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
@dataclass(kw_only=True, slots=True)
|
|
@@ -21,20 +21,6 @@ class ImpossibleCaseError(Exception):
|
|
|
21
21
|
##
|
|
22
22
|
|
|
23
23
|
|
|
24
|
-
def is_instance_error(
|
|
25
|
-
error: BaseException, class_or_tuple: ExceptionTypeLike[Exception], /
|
|
26
|
-
) -> bool:
|
|
27
|
-
"""Check if an instance relationship holds, allowing for groups."""
|
|
28
|
-
if isinstance(error, class_or_tuple):
|
|
29
|
-
return True
|
|
30
|
-
if not isinstance(error, BaseExceptionGroup):
|
|
31
|
-
return False
|
|
32
|
-
return any(is_instance_error(e, class_or_tuple) for e in error.exceptions)
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
##
|
|
36
|
-
|
|
37
|
-
|
|
38
24
|
def repr_error(error: MaybeType[BaseException], /) -> str:
|
|
39
25
|
"""Get a string representation of an error."""
|
|
40
26
|
match error:
|
|
@@ -50,4 +36,4 @@ def repr_error(error: MaybeType[BaseException], /) -> str:
|
|
|
50
36
|
assert_never(never)
|
|
51
37
|
|
|
52
38
|
|
|
53
|
-
__all__ = ["ImpossibleCaseError", "
|
|
39
|
+
__all__ = ["ImpossibleCaseError", "repr_error"]
|
utilities/fastapi.py
CHANGED
|
@@ -6,14 +6,14 @@ from typing import TYPE_CHECKING, Any, override
|
|
|
6
6
|
from fastapi import FastAPI
|
|
7
7
|
from uvicorn import Config, Server
|
|
8
8
|
|
|
9
|
-
|
|
9
|
+
import utilities.asyncio
|
|
10
10
|
from utilities.contextlib import enhanced_async_context_manager
|
|
11
|
-
from utilities.
|
|
11
|
+
from utilities.core import get_now_local
|
|
12
12
|
|
|
13
13
|
if TYPE_CHECKING:
|
|
14
14
|
from collections.abc import AsyncIterator
|
|
15
15
|
|
|
16
|
-
from utilities.types import
|
|
16
|
+
from utilities.types import Duration, MaybeType
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
_TASKS: list[Task[None]] = []
|
|
@@ -39,7 +39,7 @@ async def yield_ping_receiver(
|
|
|
39
39
|
/,
|
|
40
40
|
*,
|
|
41
41
|
host: str = "localhost",
|
|
42
|
-
timeout:
|
|
42
|
+
timeout: Duration | None = None,
|
|
43
43
|
error: MaybeType[BaseException] = TimeoutError,
|
|
44
44
|
) -> AsyncIterator[None]:
|
|
45
45
|
"""Yield the ping receiver."""
|
|
@@ -47,7 +47,7 @@ async def yield_ping_receiver(
|
|
|
47
47
|
server = Server(Config(app, host=host, port=port)) # skipif-ci
|
|
48
48
|
_TASKS.append(create_task(server.serve())) # skipif-ci
|
|
49
49
|
try: # skipif-ci
|
|
50
|
-
async with
|
|
50
|
+
async with utilities.asyncio.timeout(timeout, error=error):
|
|
51
51
|
yield
|
|
52
52
|
finally: # skipif-ci
|
|
53
53
|
await server.shutdown()
|
utilities/fpdf2.py
CHANGED
|
@@ -6,7 +6,8 @@ from typing import TYPE_CHECKING, override
|
|
|
6
6
|
from fpdf import FPDF
|
|
7
7
|
from fpdf.enums import XPos, YPos
|
|
8
8
|
|
|
9
|
-
from utilities.
|
|
9
|
+
from utilities.core import get_now_local
|
|
10
|
+
from utilities.whenever import format_compact
|
|
10
11
|
|
|
11
12
|
if TYPE_CHECKING:
|
|
12
13
|
from collections.abc import Iterator
|