dycw-utilities 0.166.30__py3-none-any.whl → 0.185.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dycw_utilities-0.185.8.dist-info/METADATA +33 -0
- dycw_utilities-0.185.8.dist-info/RECORD +90 -0
- {dycw_utilities-0.166.30.dist-info → dycw_utilities-0.185.8.dist-info}/WHEEL +1 -1
- {dycw_utilities-0.166.30.dist-info → dycw_utilities-0.185.8.dist-info}/entry_points.txt +1 -0
- utilities/__init__.py +1 -1
- utilities/altair.py +17 -10
- utilities/asyncio.py +50 -72
- utilities/atools.py +9 -11
- utilities/cachetools.py +16 -11
- utilities/click.py +76 -19
- utilities/concurrent.py +1 -1
- utilities/constants.py +492 -0
- utilities/contextlib.py +23 -30
- utilities/contextvars.py +1 -23
- utilities/core.py +2581 -0
- utilities/dataclasses.py +16 -119
- utilities/docker.py +387 -0
- utilities/enum.py +1 -1
- utilities/errors.py +2 -16
- utilities/fastapi.py +5 -5
- utilities/fpdf2.py +2 -1
- utilities/functions.py +34 -265
- utilities/http.py +2 -3
- utilities/hypothesis.py +84 -29
- utilities/importlib.py +17 -1
- utilities/iterables.py +39 -575
- utilities/jinja2.py +145 -0
- utilities/jupyter.py +5 -3
- utilities/libcst.py +1 -1
- utilities/lightweight_charts.py +4 -6
- utilities/logging.py +24 -24
- utilities/math.py +1 -36
- utilities/more_itertools.py +4 -6
- utilities/numpy.py +2 -1
- utilities/operator.py +2 -2
- utilities/orjson.py +42 -43
- utilities/os.py +4 -147
- utilities/packaging.py +129 -0
- utilities/parse.py +35 -15
- utilities/pathlib.py +3 -120
- utilities/platform.py +8 -90
- utilities/polars.py +38 -32
- utilities/postgres.py +37 -33
- utilities/pottery.py +20 -18
- utilities/pqdm.py +3 -4
- utilities/psutil.py +2 -3
- utilities/pydantic.py +25 -0
- utilities/pydantic_settings.py +87 -16
- utilities/pydantic_settings_sops.py +16 -3
- utilities/pyinstrument.py +4 -4
- utilities/pytest.py +96 -125
- utilities/pytest_plugins/pytest_regressions.py +2 -2
- utilities/pytest_regressions.py +32 -11
- utilities/random.py +2 -8
- utilities/redis.py +98 -94
- utilities/reprlib.py +11 -118
- utilities/shellingham.py +66 -0
- utilities/shutil.py +25 -0
- utilities/slack_sdk.py +13 -12
- utilities/sqlalchemy.py +57 -30
- utilities/sqlalchemy_polars.py +16 -25
- utilities/subprocess.py +2590 -0
- utilities/tabulate.py +32 -0
- utilities/testbook.py +8 -8
- utilities/text.py +24 -99
- utilities/throttle.py +159 -0
- utilities/time.py +18 -0
- utilities/timer.py +31 -14
- utilities/traceback.py +16 -23
- utilities/types.py +42 -2
- utilities/typing.py +26 -14
- utilities/uuid.py +1 -1
- utilities/version.py +202 -45
- utilities/whenever.py +53 -150
- dycw_utilities-0.166.30.dist-info/METADATA +0 -41
- dycw_utilities-0.166.30.dist-info/RECORD +0 -98
- dycw_utilities-0.166.30.dist-info/licenses/LICENSE +0 -21
- utilities/aeventkit.py +0 -388
- utilities/atomicwrites.py +0 -182
- utilities/cryptography.py +0 -41
- utilities/getpass.py +0 -8
- utilities/git.py +0 -19
- utilities/gzip.py +0 -31
- utilities/json.py +0 -70
- utilities/pickle.py +0 -25
- utilities/re.py +0 -156
- utilities/sentinel.py +0 -73
- utilities/socket.py +0 -8
- utilities/string.py +0 -20
- utilities/tempfile.py +0 -77
- utilities/typed_settings.py +0 -152
- utilities/tzdata.py +0 -11
- utilities/tzlocal.py +0 -28
- utilities/warnings.py +0 -65
- utilities/zipfile.py +0 -25
- utilities/zoneinfo.py +0 -133
utilities/core.py
ADDED
|
@@ -0,0 +1,2581 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import gzip
|
|
5
|
+
import os
|
|
6
|
+
import pickle
|
|
7
|
+
import re
|
|
8
|
+
import reprlib
|
|
9
|
+
import shutil
|
|
10
|
+
import tempfile
|
|
11
|
+
from bz2 import BZ2File
|
|
12
|
+
from collections.abc import Callable, Iterable, Iterator
|
|
13
|
+
from contextlib import ExitStack, contextmanager, suppress
|
|
14
|
+
from dataclasses import dataclass, replace
|
|
15
|
+
from functools import _lru_cache_wrapper, partial, reduce, wraps
|
|
16
|
+
from gzip import GzipFile
|
|
17
|
+
from itertools import chain, islice
|
|
18
|
+
from lzma import LZMAFile
|
|
19
|
+
from operator import or_
|
|
20
|
+
from os import chdir, environ, getenv, getpid
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
from re import VERBOSE, Pattern, findall
|
|
23
|
+
from shutil import copyfile, copyfileobj, copytree
|
|
24
|
+
from stat import (
|
|
25
|
+
S_IMODE,
|
|
26
|
+
S_IRGRP,
|
|
27
|
+
S_IROTH,
|
|
28
|
+
S_IRUSR,
|
|
29
|
+
S_IWGRP,
|
|
30
|
+
S_IWOTH,
|
|
31
|
+
S_IWUSR,
|
|
32
|
+
S_IXGRP,
|
|
33
|
+
S_IXOTH,
|
|
34
|
+
S_IXUSR,
|
|
35
|
+
)
|
|
36
|
+
from string import Template
|
|
37
|
+
from subprocess import check_output
|
|
38
|
+
from tarfile import ReadError, TarFile
|
|
39
|
+
from tempfile import NamedTemporaryFile as _NamedTemporaryFile
|
|
40
|
+
from textwrap import dedent
|
|
41
|
+
from threading import get_ident
|
|
42
|
+
from time import time_ns
|
|
43
|
+
from types import (
|
|
44
|
+
BuiltinFunctionType,
|
|
45
|
+
FunctionType,
|
|
46
|
+
MethodDescriptorType,
|
|
47
|
+
MethodType,
|
|
48
|
+
MethodWrapperType,
|
|
49
|
+
WrapperDescriptorType,
|
|
50
|
+
)
|
|
51
|
+
from typing import (
|
|
52
|
+
TYPE_CHECKING,
|
|
53
|
+
Any,
|
|
54
|
+
Literal,
|
|
55
|
+
Self,
|
|
56
|
+
assert_never,
|
|
57
|
+
cast,
|
|
58
|
+
overload,
|
|
59
|
+
override,
|
|
60
|
+
)
|
|
61
|
+
from uuid import uuid4
|
|
62
|
+
from warnings import catch_warnings, filterwarnings
|
|
63
|
+
from zipfile import ZipFile
|
|
64
|
+
from zoneinfo import ZoneInfo
|
|
65
|
+
|
|
66
|
+
from typing_extensions import TypeIs
|
|
67
|
+
from whenever import Date, PlainDateTime, Time, ZonedDateTime
|
|
68
|
+
|
|
69
|
+
import utilities.constants
|
|
70
|
+
from utilities.constants import (
|
|
71
|
+
LOCAL_TIME_ZONE,
|
|
72
|
+
LOCAL_TIME_ZONE_NAME,
|
|
73
|
+
RICH_EXPAND_ALL,
|
|
74
|
+
RICH_INDENT_SIZE,
|
|
75
|
+
RICH_MAX_DEPTH,
|
|
76
|
+
RICH_MAX_LENGTH,
|
|
77
|
+
RICH_MAX_STRING,
|
|
78
|
+
RICH_MAX_WIDTH,
|
|
79
|
+
UTC,
|
|
80
|
+
Sentinel,
|
|
81
|
+
_get_now,
|
|
82
|
+
sentinel,
|
|
83
|
+
)
|
|
84
|
+
from utilities.types import (
|
|
85
|
+
TIME_ZONES,
|
|
86
|
+
CopyOrMove,
|
|
87
|
+
Dataclass,
|
|
88
|
+
FilterWarningsAction,
|
|
89
|
+
PathToBinaryIO,
|
|
90
|
+
PatternLike,
|
|
91
|
+
StrDict,
|
|
92
|
+
StrMapping,
|
|
93
|
+
SupportsRichComparison,
|
|
94
|
+
TimeZone,
|
|
95
|
+
TimeZoneLike,
|
|
96
|
+
TypeLike,
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
if TYPE_CHECKING:
|
|
100
|
+
from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
|
|
101
|
+
from contextvars import ContextVar
|
|
102
|
+
from types import TracebackType
|
|
103
|
+
|
|
104
|
+
from whenever import Date, PlainDateTime, Time
|
|
105
|
+
|
|
106
|
+
from utilities.types import FileOrDir, MaybeIterable, PathLike
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
###############################################################################
|
|
110
|
+
#### builtins #################################################################
|
|
111
|
+
###############################################################################
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
@overload
|
|
115
|
+
def get_class[T](obj: type[T], /) -> type[T]: ...
|
|
116
|
+
@overload
|
|
117
|
+
def get_class[T](obj: T, /) -> type[T]: ...
|
|
118
|
+
def get_class[T](obj: T | type[T], /) -> type[T]:
|
|
119
|
+
"""Get the class of an object, unless it is already a class."""
|
|
120
|
+
return obj if isinstance(obj, type) else type(obj)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def get_class_name(obj: Any, /, *, qual: bool = False) -> str:
|
|
124
|
+
"""Get the name of the class of an object, unless it is already a class."""
|
|
125
|
+
cls = get_class(obj)
|
|
126
|
+
return f"{cls.__module__}.{cls.__qualname__}" if qual else cls.__name__
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
##
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def get_func_name(obj: Callable[..., Any], /) -> str:
|
|
133
|
+
"""Get the name of a callable."""
|
|
134
|
+
if isinstance(obj, BuiltinFunctionType):
|
|
135
|
+
return obj.__name__
|
|
136
|
+
if isinstance(obj, FunctionType):
|
|
137
|
+
name = obj.__name__
|
|
138
|
+
pattern = r"^.+\.([A-Z]\w+\." + name + ")$"
|
|
139
|
+
try:
|
|
140
|
+
(full_name,) = findall(pattern, obj.__qualname__)
|
|
141
|
+
except ValueError:
|
|
142
|
+
return name
|
|
143
|
+
return full_name
|
|
144
|
+
if isinstance(obj, MethodType):
|
|
145
|
+
return f"{get_class_name(obj.__self__)}.{obj.__name__}"
|
|
146
|
+
if isinstance(
|
|
147
|
+
obj,
|
|
148
|
+
MethodType | MethodDescriptorType | MethodWrapperType | WrapperDescriptorType,
|
|
149
|
+
):
|
|
150
|
+
return obj.__qualname__
|
|
151
|
+
if isinstance(obj, _lru_cache_wrapper):
|
|
152
|
+
return cast("Any", obj).__name__
|
|
153
|
+
if isinstance(obj, partial):
|
|
154
|
+
return get_func_name(obj.func)
|
|
155
|
+
return get_class_name(obj)
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
##
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
@overload
|
|
162
|
+
def min_nullable[T: SupportsRichComparison](
|
|
163
|
+
iterable: Iterable[T | None], /, *, default: Sentinel = ...
|
|
164
|
+
) -> T: ...
|
|
165
|
+
@overload
|
|
166
|
+
def min_nullable[T: SupportsRichComparison, U](
|
|
167
|
+
iterable: Iterable[T | None], /, *, default: U = ...
|
|
168
|
+
) -> T | U: ...
|
|
169
|
+
def min_nullable[T: SupportsRichComparison, U](
|
|
170
|
+
iterable: Iterable[T | None], /, *, default: U | Sentinel = sentinel
|
|
171
|
+
) -> T | U:
|
|
172
|
+
"""Compute the minimum of a set of values; ignoring nulls."""
|
|
173
|
+
values = (i for i in iterable if i is not None)
|
|
174
|
+
if is_sentinel(default):
|
|
175
|
+
try:
|
|
176
|
+
return min(values)
|
|
177
|
+
except ValueError:
|
|
178
|
+
raise MinNullableError(iterable=iterable) from None
|
|
179
|
+
return min(values, default=default)
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
@dataclass(kw_only=True, slots=True)
|
|
183
|
+
class MinNullableError[T: SupportsRichComparison](Exception):
|
|
184
|
+
iterable: Iterable[T | None]
|
|
185
|
+
|
|
186
|
+
@override
|
|
187
|
+
def __str__(self) -> str:
|
|
188
|
+
return f"Minimum of {repr_(self.iterable)} is undefined"
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
@overload
|
|
192
|
+
def max_nullable[T: SupportsRichComparison](
|
|
193
|
+
iterable: Iterable[T | None], /, *, default: Sentinel = ...
|
|
194
|
+
) -> T: ...
|
|
195
|
+
@overload
|
|
196
|
+
def max_nullable[T: SupportsRichComparison, U](
|
|
197
|
+
iterable: Iterable[T | None], /, *, default: U = ...
|
|
198
|
+
) -> T | U: ...
|
|
199
|
+
def max_nullable[T: SupportsRichComparison, U](
|
|
200
|
+
iterable: Iterable[T | None], /, *, default: U | Sentinel = sentinel
|
|
201
|
+
) -> T | U:
|
|
202
|
+
"""Compute the maximum of a set of values; ignoring nulls."""
|
|
203
|
+
values = (i for i in iterable if i is not None)
|
|
204
|
+
if is_sentinel(default):
|
|
205
|
+
try:
|
|
206
|
+
return max(values)
|
|
207
|
+
except ValueError:
|
|
208
|
+
raise MaxNullableError(iterable=iterable) from None
|
|
209
|
+
return max(values, default=default)
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
@dataclass(kw_only=True, slots=True)
|
|
213
|
+
class MaxNullableError[T: SupportsRichComparison](Exception):
|
|
214
|
+
iterable: Iterable[T | None]
|
|
215
|
+
|
|
216
|
+
@override
|
|
217
|
+
def __str__(self) -> str:
|
|
218
|
+
return f"Maximum of {repr_(self.iterable)} is undefined"
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
###############################################################################
|
|
222
|
+
#### compression ##############################################################
|
|
223
|
+
###############################################################################
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def compress_bz2(
|
|
227
|
+
src_or_dest: PathLike, /, *srcs_or_dest: PathLike, overwrite: bool = False
|
|
228
|
+
) -> None:
|
|
229
|
+
"""Create a BZ2 file."""
|
|
230
|
+
|
|
231
|
+
def func(path: PathLike, /) -> BZ2File:
|
|
232
|
+
return BZ2File(path, mode="wb")
|
|
233
|
+
|
|
234
|
+
func2 = cast("PathToBinaryIO", func)
|
|
235
|
+
try:
|
|
236
|
+
_compress_files(func2, src_or_dest, *srcs_or_dest, overwrite=overwrite)
|
|
237
|
+
except _CompressFilesError as error:
|
|
238
|
+
raise CompressBZ2Error(srcs=error.srcs, dest=error.dest) from None
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
@dataclass(kw_only=True, slots=True)
|
|
242
|
+
class CompressBZ2Error(Exception):
|
|
243
|
+
srcs: list[Path]
|
|
244
|
+
dest: Path
|
|
245
|
+
|
|
246
|
+
@override
|
|
247
|
+
def __str__(self) -> str:
|
|
248
|
+
return f"Cannot compress source(s) {repr_(list(map(str, self.srcs)))} since destination {repr_str(self.dest)} already exists"
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def compress_gzip(
|
|
252
|
+
src_or_dest: PathLike, /, *srcs_or_dest: PathLike, overwrite: bool = False
|
|
253
|
+
) -> None:
|
|
254
|
+
"""Create a Gzip file."""
|
|
255
|
+
|
|
256
|
+
def func(path: PathLike, /) -> GzipFile:
|
|
257
|
+
return GzipFile(path, mode="wb")
|
|
258
|
+
|
|
259
|
+
func2 = cast("PathToBinaryIO", func)
|
|
260
|
+
try:
|
|
261
|
+
_compress_files(func2, src_or_dest, *srcs_or_dest, overwrite=overwrite)
|
|
262
|
+
except _CompressFilesError as error:
|
|
263
|
+
raise CompressGzipError(srcs=error.srcs, dest=error.dest) from None
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
@dataclass(kw_only=True, slots=True)
|
|
267
|
+
class CompressGzipError(Exception):
|
|
268
|
+
srcs: list[Path]
|
|
269
|
+
dest: Path
|
|
270
|
+
|
|
271
|
+
@override
|
|
272
|
+
def __str__(self) -> str:
|
|
273
|
+
return f"Cannot compress source(s) {repr_(list(map(str, self.srcs)))} since destination {repr_str(self.dest)} already exists"
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
def compress_lzma(
|
|
277
|
+
src_or_dest: PathLike, /, *srcs_or_dest: PathLike, overwrite: bool = False
|
|
278
|
+
) -> None:
|
|
279
|
+
"""Create an LZMA file."""
|
|
280
|
+
|
|
281
|
+
def func(path: PathLike, /) -> LZMAFile:
|
|
282
|
+
return LZMAFile(path, mode="wb")
|
|
283
|
+
|
|
284
|
+
func2 = cast("PathToBinaryIO", func)
|
|
285
|
+
try:
|
|
286
|
+
_compress_files(func2, src_or_dest, *srcs_or_dest, overwrite=overwrite)
|
|
287
|
+
except _CompressFilesError as error:
|
|
288
|
+
raise CompressLZMAError(srcs=error.srcs, dest=error.dest) from None
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
@dataclass(kw_only=True, slots=True)
|
|
292
|
+
class CompressLZMAError(Exception):
|
|
293
|
+
srcs: list[Path]
|
|
294
|
+
dest: Path
|
|
295
|
+
|
|
296
|
+
@override
|
|
297
|
+
def __str__(self) -> str:
|
|
298
|
+
return f"Cannot compress source(s) {repr_(list(map(str, self.srcs)))} since destination {repr_str(self.dest)} already exists"
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
def _compress_files(
|
|
302
|
+
func: PathToBinaryIO,
|
|
303
|
+
src_or_dest: PathLike,
|
|
304
|
+
/,
|
|
305
|
+
*srcs_or_dest: PathLike,
|
|
306
|
+
overwrite: bool = False,
|
|
307
|
+
perms: PermissionsLike | None = None,
|
|
308
|
+
owner: str | int | None = None,
|
|
309
|
+
group: str | int | None = None,
|
|
310
|
+
) -> None:
|
|
311
|
+
*srcs, dest = map(Path, [src_or_dest, *srcs_or_dest])
|
|
312
|
+
try:
|
|
313
|
+
with (
|
|
314
|
+
yield_write_path(
|
|
315
|
+
dest, overwrite=overwrite, perms=perms, owner=owner, group=group
|
|
316
|
+
) as temp,
|
|
317
|
+
func(temp) as buffer,
|
|
318
|
+
):
|
|
319
|
+
match srcs:
|
|
320
|
+
case [src]:
|
|
321
|
+
match file_or_dir(src):
|
|
322
|
+
case "file":
|
|
323
|
+
with src.open(mode="rb") as fh:
|
|
324
|
+
copyfileobj(fh, buffer)
|
|
325
|
+
case "dir":
|
|
326
|
+
with TarFile(mode="w", fileobj=buffer) as tar:
|
|
327
|
+
_compress_files_add_dir(src, tar)
|
|
328
|
+
case None:
|
|
329
|
+
...
|
|
330
|
+
case never:
|
|
331
|
+
assert_never(never)
|
|
332
|
+
case _:
|
|
333
|
+
with TarFile(mode="w", fileobj=buffer) as tar:
|
|
334
|
+
for src_i in sorted(srcs):
|
|
335
|
+
match file_or_dir(src_i):
|
|
336
|
+
case "file":
|
|
337
|
+
tar.add(src_i, src_i.name)
|
|
338
|
+
case "dir":
|
|
339
|
+
_compress_files_add_dir(src_i, tar)
|
|
340
|
+
case None:
|
|
341
|
+
...
|
|
342
|
+
case never:
|
|
343
|
+
assert_never(never)
|
|
344
|
+
except YieldWritePathError as error:
|
|
345
|
+
raise _CompressFilesError(srcs=srcs, dest=error.path) from None
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
@dataclass(kw_only=True, slots=True)
|
|
349
|
+
class _CompressFilesError(Exception):
|
|
350
|
+
srcs: list[Path]
|
|
351
|
+
dest: Path
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
def _compress_files_add_dir(path: PathLike, tar: TarFile, /) -> None:
|
|
355
|
+
path = Path(path)
|
|
356
|
+
for p in sorted(path.rglob("**/*")):
|
|
357
|
+
tar.add(p, p.relative_to(path))
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
##
|
|
361
|
+
|
|
362
|
+
|
|
363
|
+
def compress_zip(
|
|
364
|
+
src_or_dest: PathLike,
|
|
365
|
+
/,
|
|
366
|
+
*srcs_or_dest: PathLike,
|
|
367
|
+
overwrite: bool = False,
|
|
368
|
+
perms: PermissionsLike | None = None,
|
|
369
|
+
owner: str | int | None = None,
|
|
370
|
+
group: str | int | None = None,
|
|
371
|
+
) -> None:
|
|
372
|
+
"""Create a Zip file."""
|
|
373
|
+
*srcs, dest = map(Path, [src_or_dest, *srcs_or_dest])
|
|
374
|
+
try:
|
|
375
|
+
with (
|
|
376
|
+
yield_write_path(
|
|
377
|
+
dest, overwrite=overwrite, perms=perms, owner=owner, group=group
|
|
378
|
+
) as temp,
|
|
379
|
+
ZipFile(temp, mode="w") as zf,
|
|
380
|
+
):
|
|
381
|
+
for src_i in sorted(srcs):
|
|
382
|
+
match file_or_dir(src_i):
|
|
383
|
+
case "file":
|
|
384
|
+
zf.write(src_i, src_i.name)
|
|
385
|
+
case "dir":
|
|
386
|
+
for p in sorted(src_i.rglob("**/*")):
|
|
387
|
+
zf.write(p, p.relative_to(src_i))
|
|
388
|
+
case None:
|
|
389
|
+
...
|
|
390
|
+
case never:
|
|
391
|
+
assert_never(never)
|
|
392
|
+
except YieldWritePathError as error:
|
|
393
|
+
raise CompressZipError(srcs=srcs, dest=error.path) from None
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
@dataclass(kw_only=True, slots=True)
|
|
397
|
+
class CompressZipError(Exception):
|
|
398
|
+
srcs: list[Path]
|
|
399
|
+
dest: Path
|
|
400
|
+
|
|
401
|
+
@override
|
|
402
|
+
def __str__(self) -> str:
|
|
403
|
+
return f"Cannot compress source(s) {repr_(list(map(str, self.srcs)))} since destination {repr_str(self.dest)} already exists"
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
##
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
@contextmanager
|
|
410
|
+
def yield_bz2(path: PathLike, /) -> Iterator[Path]:
|
|
411
|
+
"""Yield the contents of a BZ2 file."""
|
|
412
|
+
|
|
413
|
+
def func(path: PathLike, /) -> BZ2File:
|
|
414
|
+
return BZ2File(path, mode="rb")
|
|
415
|
+
|
|
416
|
+
try:
|
|
417
|
+
with _yield_uncompressed(path, cast("PathToBinaryIO", func)) as temp:
|
|
418
|
+
yield temp
|
|
419
|
+
except _YieldUncompressedError as error:
|
|
420
|
+
raise YieldBZ2Error(path=error.path) from None
|
|
421
|
+
|
|
422
|
+
|
|
423
|
+
@dataclass(kw_only=True, slots=True)
|
|
424
|
+
class YieldBZ2Error(Exception):
|
|
425
|
+
path: Path
|
|
426
|
+
|
|
427
|
+
@override
|
|
428
|
+
def __str__(self) -> str:
|
|
429
|
+
return f"Cannot uncompress {repr_str(self.path)} since it does not exist"
|
|
430
|
+
|
|
431
|
+
|
|
432
|
+
@contextmanager
|
|
433
|
+
def yield_gzip(path: PathLike, /) -> Iterator[Path]:
|
|
434
|
+
"""Yield the contents of a Gzip file."""
|
|
435
|
+
|
|
436
|
+
def func(path: PathLike, /) -> GzipFile:
|
|
437
|
+
return GzipFile(path, mode="rb")
|
|
438
|
+
|
|
439
|
+
try:
|
|
440
|
+
with _yield_uncompressed(path, cast("PathToBinaryIO", func)) as temp:
|
|
441
|
+
yield temp
|
|
442
|
+
except _YieldUncompressedError as error:
|
|
443
|
+
raise YieldGzipError(path=error.path) from None
|
|
444
|
+
|
|
445
|
+
|
|
446
|
+
@dataclass(kw_only=True, slots=True)
|
|
447
|
+
class YieldGzipError(Exception):
|
|
448
|
+
path: Path
|
|
449
|
+
|
|
450
|
+
@override
|
|
451
|
+
def __str__(self) -> str:
|
|
452
|
+
return f"Cannot uncompress {repr_str(self.path)} since it does not exist"
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
@contextmanager
|
|
456
|
+
def yield_lzma(path: PathLike, /) -> Iterator[Path]:
|
|
457
|
+
"""Yield the contents of an LZMA file."""
|
|
458
|
+
|
|
459
|
+
def func(path: PathLike, /) -> LZMAFile:
|
|
460
|
+
return LZMAFile(path, mode="rb")
|
|
461
|
+
|
|
462
|
+
try:
|
|
463
|
+
with _yield_uncompressed(path, cast("PathToBinaryIO", func)) as temp:
|
|
464
|
+
yield temp
|
|
465
|
+
except _YieldUncompressedError as error:
|
|
466
|
+
raise YieldLZMAError(path=error.path) from None
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
@dataclass(kw_only=True, slots=True)
|
|
470
|
+
class YieldLZMAError(Exception):
|
|
471
|
+
path: Path
|
|
472
|
+
|
|
473
|
+
@override
|
|
474
|
+
def __str__(self) -> str:
|
|
475
|
+
return f"Cannot uncompress {repr_str(self.path)} since it does not exist"
|
|
476
|
+
|
|
477
|
+
|
|
478
|
+
@contextmanager
|
|
479
|
+
def _yield_uncompressed(path: PathLike, func: PathToBinaryIO, /) -> Iterator[Path]:
|
|
480
|
+
path = Path(path)
|
|
481
|
+
try:
|
|
482
|
+
with func(path) as buffer:
|
|
483
|
+
try:
|
|
484
|
+
with TarFile(fileobj=buffer) as tf, TemporaryDirectory() as temp:
|
|
485
|
+
tf.extractall(path=temp, filter="data")
|
|
486
|
+
try:
|
|
487
|
+
yield one(temp.iterdir())
|
|
488
|
+
except (OneEmptyError, OneNonUniqueError):
|
|
489
|
+
yield temp
|
|
490
|
+
except ReadError as error:
|
|
491
|
+
(arg,) = error.args
|
|
492
|
+
if arg == "empty file":
|
|
493
|
+
with TemporaryDirectory() as temp:
|
|
494
|
+
yield temp
|
|
495
|
+
elif arg in {"bad checksum", "invalid header", "truncated header"}:
|
|
496
|
+
_ = buffer.seek(0)
|
|
497
|
+
with TemporaryFile() as temp, temp.open(mode="wb") as fh:
|
|
498
|
+
copyfileobj(buffer, fh)
|
|
499
|
+
_ = fh.seek(0)
|
|
500
|
+
yield temp
|
|
501
|
+
else: # pragma: no cover
|
|
502
|
+
raise NotImplementedError(arg) from None
|
|
503
|
+
except FileNotFoundError:
|
|
504
|
+
raise _YieldUncompressedError(path=path) from None
|
|
505
|
+
|
|
506
|
+
|
|
507
|
+
@dataclass(kw_only=True, slots=True)
|
|
508
|
+
class _YieldUncompressedError(Exception):
|
|
509
|
+
path: Path
|
|
510
|
+
|
|
511
|
+
|
|
512
|
+
##
|
|
513
|
+
|
|
514
|
+
|
|
515
|
+
@contextmanager
|
|
516
|
+
def yield_zip(path: PathLike, /) -> Iterator[Path]:
|
|
517
|
+
"""Yield the contents of a Zip file."""
|
|
518
|
+
path = Path(path)
|
|
519
|
+
try:
|
|
520
|
+
with ZipFile(path) as zf, TemporaryDirectory() as temp:
|
|
521
|
+
zf.extractall(path=temp)
|
|
522
|
+
try:
|
|
523
|
+
yield one(temp.iterdir())
|
|
524
|
+
except (OneEmptyError, OneNonUniqueError):
|
|
525
|
+
yield temp
|
|
526
|
+
except FileNotFoundError:
|
|
527
|
+
raise YieldZipError(path=path) from None
|
|
528
|
+
|
|
529
|
+
|
|
530
|
+
@dataclass(kw_only=True, slots=True)
|
|
531
|
+
class YieldZipError(Exception):
|
|
532
|
+
path: Path
|
|
533
|
+
|
|
534
|
+
@override
|
|
535
|
+
def __str__(self) -> str:
|
|
536
|
+
return f"Cannot uncompress {repr_str(self.path)} since it does not exist"
|
|
537
|
+
|
|
538
|
+
|
|
539
|
+
###############################################################################
|
|
540
|
+
#### constants ################################################################
|
|
541
|
+
###############################################################################
|
|
542
|
+
|
|
543
|
+
|
|
544
|
+
def is_none(obj: Any, /) -> TypeIs[None]:
|
|
545
|
+
"""Check if an object is `None`."""
|
|
546
|
+
return obj is None
|
|
547
|
+
|
|
548
|
+
|
|
549
|
+
def is_not_none(obj: Any, /) -> bool:
|
|
550
|
+
"""Check if an object is not `None`."""
|
|
551
|
+
return obj is not None
|
|
552
|
+
|
|
553
|
+
|
|
554
|
+
##
|
|
555
|
+
|
|
556
|
+
|
|
557
|
+
def is_sentinel(obj: Any, /) -> TypeIs[Sentinel]:
|
|
558
|
+
"""Check if an object is the sentinel."""
|
|
559
|
+
return obj is sentinel
|
|
560
|
+
|
|
561
|
+
|
|
562
|
+
###############################################################################
|
|
563
|
+
#### contextlib ###############################################################
|
|
564
|
+
###############################################################################
|
|
565
|
+
|
|
566
|
+
|
|
567
|
+
@contextmanager
|
|
568
|
+
def suppress_super_attribute_error() -> Iterator[None]:
|
|
569
|
+
"""Suppress the super() attribute error, for mix-ins."""
|
|
570
|
+
try:
|
|
571
|
+
yield
|
|
572
|
+
except AttributeError as error:
|
|
573
|
+
if not _suppress_super_attribute_error_pattern.search(error.args[0]):
|
|
574
|
+
raise
|
|
575
|
+
|
|
576
|
+
|
|
577
|
+
_suppress_super_attribute_error_pattern = re.compile(
|
|
578
|
+
r"'super' object has no attribute '\w+'"
|
|
579
|
+
)
|
|
580
|
+
|
|
581
|
+
|
|
582
|
+
###############################################################################
|
|
583
|
+
#### contextvars ##############################################################
|
|
584
|
+
###############################################################################
|
|
585
|
+
|
|
586
|
+
|
|
587
|
+
@contextmanager
|
|
588
|
+
def yield_temp_context_var(var: ContextVar[bool], /) -> Iterator[None]:
|
|
589
|
+
"""Yield a temporary boolean context var as True."""
|
|
590
|
+
token = var.set(True)
|
|
591
|
+
try:
|
|
592
|
+
yield
|
|
593
|
+
finally:
|
|
594
|
+
_ = var.reset(token)
|
|
595
|
+
|
|
596
|
+
|
|
597
|
+
###############################################################################
|
|
598
|
+
#### dataclass ################################################################
|
|
599
|
+
###############################################################################
|
|
600
|
+
|
|
601
|
+
|
|
602
|
+
@overload
|
|
603
|
+
def replace_non_sentinel(
|
|
604
|
+
obj: Dataclass, /, *, in_place: Literal[True], **kwargs: Any
|
|
605
|
+
) -> None: ...
|
|
606
|
+
@overload
|
|
607
|
+
def replace_non_sentinel[T: Dataclass](
|
|
608
|
+
obj: T, /, *, in_place: Literal[False] = False, **kwargs: Any
|
|
609
|
+
) -> T: ...
|
|
610
|
+
@overload
|
|
611
|
+
def replace_non_sentinel[T: Dataclass](
|
|
612
|
+
obj: T, /, *, in_place: bool = False, **kwargs: Any
|
|
613
|
+
) -> T | None: ...
|
|
614
|
+
def replace_non_sentinel[T: Dataclass](
|
|
615
|
+
obj: T, /, *, in_place: bool = False, **kwargs: Any
|
|
616
|
+
) -> T | None:
|
|
617
|
+
"""Replace attributes on a dataclass, filtering out sentinel values."""
|
|
618
|
+
if in_place:
|
|
619
|
+
for k, v in kwargs.items():
|
|
620
|
+
if not is_sentinel(v):
|
|
621
|
+
setattr(obj, k, v)
|
|
622
|
+
return None
|
|
623
|
+
return replace(obj, **{k: v for k, v in kwargs.items() if not is_sentinel(v)})
|
|
624
|
+
|
|
625
|
+
|
|
626
|
+
###############################################################################
|
|
627
|
+
#### functools ################################################################
|
|
628
|
+
###############################################################################
|
|
629
|
+
|
|
630
|
+
|
|
631
|
+
def not_func[**P](func: Callable[P, bool], /) -> Callable[P, bool]:
|
|
632
|
+
"""Lift a boolean-valued function to return its conjugation."""
|
|
633
|
+
|
|
634
|
+
@wraps(func)
|
|
635
|
+
def wrapped(*args: P.args, **kwargs: P.kwargs) -> bool:
|
|
636
|
+
return not func(*args, **kwargs)
|
|
637
|
+
|
|
638
|
+
return wrapped
|
|
639
|
+
|
|
640
|
+
|
|
641
|
+
###############################################################################
|
|
642
|
+
#### functions ################################################################
|
|
643
|
+
###############################################################################
|
|
644
|
+
|
|
645
|
+
|
|
646
|
+
@overload
|
|
647
|
+
def first[T](tup: tuple[T], /) -> T: ...
|
|
648
|
+
@overload
|
|
649
|
+
def first[T](tup: tuple[T, Any], /) -> T: ...
|
|
650
|
+
@overload
|
|
651
|
+
def first[T](tup: tuple[T, Any, Any], /) -> T: ...
|
|
652
|
+
@overload
|
|
653
|
+
def first[T](tup: tuple[T, Any, Any, Any], /) -> T: ...
|
|
654
|
+
def first(tup: tuple[Any, ...], /) -> Any:
|
|
655
|
+
"""Get the first element in a tuple."""
|
|
656
|
+
return tup[0]
|
|
657
|
+
|
|
658
|
+
|
|
659
|
+
@overload
|
|
660
|
+
def second[T](tup: tuple[Any, T], /) -> T: ...
|
|
661
|
+
@overload
|
|
662
|
+
def second[T](tup: tuple[Any, T, Any], /) -> T: ...
|
|
663
|
+
@overload
|
|
664
|
+
def second[T](tup: tuple[Any, T, Any, Any], /) -> T: ...
|
|
665
|
+
def second(tup: tuple[Any, ...], /) -> Any:
|
|
666
|
+
"""Get the second element in a tuple."""
|
|
667
|
+
return tup[1]
|
|
668
|
+
|
|
669
|
+
|
|
670
|
+
@overload
|
|
671
|
+
def last[T](tup: tuple[T], /) -> T: ...
|
|
672
|
+
@overload
|
|
673
|
+
def last[T](tup: tuple[Any, T], /) -> T: ...
|
|
674
|
+
@overload
|
|
675
|
+
def last[T](tup: tuple[Any, Any, T], /) -> T: ...
|
|
676
|
+
@overload
|
|
677
|
+
def last[T](tup: tuple[Any, Any, Any, T], /) -> T: ...
|
|
678
|
+
def last(tup: tuple[Any, ...], /) -> Any:
|
|
679
|
+
"""Get the last element in a tuple."""
|
|
680
|
+
return tup[-1]
|
|
681
|
+
|
|
682
|
+
|
|
683
|
+
##
|
|
684
|
+
|
|
685
|
+
|
|
686
|
+
def identity[T](obj: T, /) -> T:
|
|
687
|
+
"""Return the object itself."""
|
|
688
|
+
return obj
|
|
689
|
+
|
|
690
|
+
|
|
691
|
+
###############################################################################
|
|
692
|
+
#### grp ######################################################################
|
|
693
|
+
###############################################################################
|
|
694
|
+
|
|
695
|
+
|
|
696
|
+
get_gid_name = utilities.constants._get_gid_name # noqa: SLF001
|
|
697
|
+
|
|
698
|
+
|
|
699
|
+
def get_file_group(path: PathLike, /) -> str | None:
|
|
700
|
+
"""Get the group of a file."""
|
|
701
|
+
gid = Path(path).stat().st_gid
|
|
702
|
+
return get_gid_name(gid)
|
|
703
|
+
|
|
704
|
+
|
|
705
|
+
###############################################################################
|
|
706
|
+
#### itertools ################################################################
|
|
707
|
+
###############################################################################
|
|
708
|
+
|
|
709
|
+
|
|
710
|
+
def always_iterable[T](obj: MaybeIterable[T], /) -> Iterable[T]:
|
|
711
|
+
"""Typed version of `always_iterable`."""
|
|
712
|
+
obj = cast("Any", obj)
|
|
713
|
+
if isinstance(obj, str | bytes):
|
|
714
|
+
return cast("list[T]", [obj])
|
|
715
|
+
try:
|
|
716
|
+
return iter(cast("Iterable[T]", obj))
|
|
717
|
+
except TypeError:
|
|
718
|
+
return cast("list[T]", [obj])
|
|
719
|
+
|
|
720
|
+
|
|
721
|
+
##
|
|
722
|
+
|
|
723
|
+
|
|
724
|
+
def chunked[T](iterable: Iterable[T], n: int, /) -> Iterator[Sequence[T]]:
|
|
725
|
+
"""Break an iterable into lists of length n."""
|
|
726
|
+
return iter(partial(take, n, iter(iterable)), [])
|
|
727
|
+
|
|
728
|
+
|
|
729
|
+
##
|
|
730
|
+
|
|
731
|
+
|
|
732
|
+
def one[T](*iterables: Iterable[T]) -> T:
|
|
733
|
+
"""Return the unique value in a set of iterables."""
|
|
734
|
+
it = chain(*iterables)
|
|
735
|
+
try:
|
|
736
|
+
first = next(it)
|
|
737
|
+
except StopIteration:
|
|
738
|
+
raise OneEmptyError(iterables=iterables) from None
|
|
739
|
+
try:
|
|
740
|
+
second = next(it)
|
|
741
|
+
except StopIteration:
|
|
742
|
+
return first
|
|
743
|
+
raise OneNonUniqueError(iterables=iterables, first=first, second=second)
|
|
744
|
+
|
|
745
|
+
|
|
746
|
+
@dataclass(kw_only=True, slots=True)
|
|
747
|
+
class OneError[T](Exception):
|
|
748
|
+
iterables: tuple[Iterable[T], ...]
|
|
749
|
+
|
|
750
|
+
|
|
751
|
+
@dataclass(kw_only=True, slots=True)
|
|
752
|
+
class OneEmptyError[T](OneError[T]):
|
|
753
|
+
@override
|
|
754
|
+
def __str__(self) -> str:
|
|
755
|
+
return f"Iterable(s) {repr_(self.iterables)} must not be empty"
|
|
756
|
+
|
|
757
|
+
|
|
758
|
+
@dataclass(kw_only=True, slots=True)
|
|
759
|
+
class OneNonUniqueError[T](OneError):
|
|
760
|
+
first: T
|
|
761
|
+
second: T
|
|
762
|
+
|
|
763
|
+
@override
|
|
764
|
+
def __str__(self) -> str:
|
|
765
|
+
return f"Iterable(s) {repr_(self.iterables)} must contain exactly one item; got {self.first}, {self.second} and perhaps more"
|
|
766
|
+
|
|
767
|
+
|
|
768
|
+
##
|
|
769
|
+
|
|
770
|
+
|
|
771
|
+
def one_str(
|
|
772
|
+
iterable: Iterable[str],
|
|
773
|
+
text: str,
|
|
774
|
+
/,
|
|
775
|
+
*,
|
|
776
|
+
head: bool = False,
|
|
777
|
+
case_sensitive: bool = False,
|
|
778
|
+
) -> str:
|
|
779
|
+
"""Find the unique string in an iterable."""
|
|
780
|
+
as_list = list(iterable)
|
|
781
|
+
match head, case_sensitive:
|
|
782
|
+
case False, True:
|
|
783
|
+
it = (t for t in as_list if t == text)
|
|
784
|
+
case False, False:
|
|
785
|
+
it = (t for t in as_list if t.lower() == text.lower())
|
|
786
|
+
case True, True:
|
|
787
|
+
it = (t for t in as_list if t.startswith(text))
|
|
788
|
+
case True, False:
|
|
789
|
+
it = (t for t in as_list if t.lower().startswith(text.lower()))
|
|
790
|
+
case never:
|
|
791
|
+
assert_never(never)
|
|
792
|
+
try:
|
|
793
|
+
return one(it)
|
|
794
|
+
except OneEmptyError:
|
|
795
|
+
raise OneStrEmptyError(
|
|
796
|
+
iterable=as_list, text=text, head=head, case_sensitive=case_sensitive
|
|
797
|
+
) from None
|
|
798
|
+
except OneNonUniqueError as error:
|
|
799
|
+
raise OneStrNonUniqueError(
|
|
800
|
+
iterable=as_list,
|
|
801
|
+
text=text,
|
|
802
|
+
head=head,
|
|
803
|
+
case_sensitive=case_sensitive,
|
|
804
|
+
first=error.first,
|
|
805
|
+
second=error.second,
|
|
806
|
+
) from None
|
|
807
|
+
|
|
808
|
+
|
|
809
|
+
@dataclass(kw_only=True, slots=True)
|
|
810
|
+
class OneStrError(Exception):
|
|
811
|
+
iterable: Iterable[str]
|
|
812
|
+
text: str
|
|
813
|
+
head: bool = False
|
|
814
|
+
case_sensitive: bool = False
|
|
815
|
+
|
|
816
|
+
|
|
817
|
+
@dataclass(kw_only=True, slots=True)
|
|
818
|
+
class OneStrEmptyError(OneStrError):
|
|
819
|
+
@override
|
|
820
|
+
def __str__(self) -> str:
|
|
821
|
+
head = f"Iterable {repr_(self.iterable)} does not contain"
|
|
822
|
+
match self.head, self.case_sensitive:
|
|
823
|
+
case False, True:
|
|
824
|
+
tail = repr(self.text)
|
|
825
|
+
case False, False:
|
|
826
|
+
tail = f"{repr_(self.text)} (modulo case)"
|
|
827
|
+
case True, True:
|
|
828
|
+
tail = f"any string starting with {repr_(self.text)}"
|
|
829
|
+
case True, False:
|
|
830
|
+
tail = f"any string starting with {repr_(self.text)} (modulo case)"
|
|
831
|
+
case never:
|
|
832
|
+
assert_never(never)
|
|
833
|
+
return f"{head} {tail}"
|
|
834
|
+
|
|
835
|
+
|
|
836
|
+
@dataclass(kw_only=True, slots=True)
|
|
837
|
+
class OneStrNonUniqueError(OneStrError):
|
|
838
|
+
first: str
|
|
839
|
+
second: str
|
|
840
|
+
|
|
841
|
+
@override
|
|
842
|
+
def __str__(self) -> str:
|
|
843
|
+
head = f"Iterable {repr_(self.iterable)} must contain"
|
|
844
|
+
match self.head, self.case_sensitive:
|
|
845
|
+
case False, True:
|
|
846
|
+
mid = f"{repr_(self.text)} exactly once"
|
|
847
|
+
case False, False:
|
|
848
|
+
mid = f"{repr_(self.text)} exactly once (modulo case)"
|
|
849
|
+
case True, True:
|
|
850
|
+
mid = f"exactly one string starting with {repr_(self.text)}"
|
|
851
|
+
case True, False:
|
|
852
|
+
mid = (
|
|
853
|
+
f"exactly one string starting with {repr_(self.text)} (modulo case)"
|
|
854
|
+
)
|
|
855
|
+
case never:
|
|
856
|
+
assert_never(never)
|
|
857
|
+
return f"{head} {mid}; got {repr_(self.first)}, {repr_(self.second)} and perhaps more"
|
|
858
|
+
|
|
859
|
+
|
|
860
|
+
##
|
|
861
|
+
|
|
862
|
+
|
|
863
|
+
def take[T](n: int, iterable: Iterable[T], /) -> Sequence[T]:
|
|
864
|
+
"""Return first n items of the iterable as a list."""
|
|
865
|
+
return list(islice(iterable, n))
|
|
866
|
+
|
|
867
|
+
|
|
868
|
+
##
|
|
869
|
+
|
|
870
|
+
|
|
871
|
+
@overload
|
|
872
|
+
def transpose[T1](iterable: Iterable[tuple[T1]], /) -> tuple[list[T1]]: ...
|
|
873
|
+
@overload
|
|
874
|
+
def transpose[T1, T2](
|
|
875
|
+
iterable: Iterable[tuple[T1, T2]], /
|
|
876
|
+
) -> tuple[list[T1], list[T2]]: ...
|
|
877
|
+
@overload
|
|
878
|
+
def transpose[T1, T2, T3](
|
|
879
|
+
iterable: Iterable[tuple[T1, T2, T3]], /
|
|
880
|
+
) -> tuple[list[T1], list[T2], list[T3]]: ...
|
|
881
|
+
@overload
|
|
882
|
+
def transpose[T1, T2, T3, T4](
|
|
883
|
+
iterable: Iterable[tuple[T1, T2, T3, T4]], /
|
|
884
|
+
) -> tuple[list[T1], list[T2], list[T3], list[T4]]: ...
|
|
885
|
+
@overload
|
|
886
|
+
def transpose[T1, T2, T3, T4, T5](
|
|
887
|
+
iterable: Iterable[tuple[T1, T2, T3, T4, T5]], /
|
|
888
|
+
) -> tuple[list[T1], list[T2], list[T3], list[T4], list[T5]]: ...
|
|
889
|
+
def transpose(iterable: Iterable[tuple[Any]]) -> tuple[list[Any], ...]: # pyright: ignore[reportInconsistentOverload]
|
|
890
|
+
"""Typed verison of `transpose`."""
|
|
891
|
+
return tuple(map(list, zip(*iterable, strict=True)))
|
|
892
|
+
|
|
893
|
+
|
|
894
|
+
##
|
|
895
|
+
|
|
896
|
+
|
|
897
|
+
def unique_everseen[T](
|
|
898
|
+
iterable: Iterable[T], /, *, key: Callable[[T], Any] | None = None
|
|
899
|
+
) -> Iterator[T]:
|
|
900
|
+
"""Yield unique elements, preserving order."""
|
|
901
|
+
seenset = set()
|
|
902
|
+
seenset_add = seenset.add
|
|
903
|
+
seenlist = []
|
|
904
|
+
seenlist_add = seenlist.append
|
|
905
|
+
use_key = key is not None
|
|
906
|
+
for element in iterable:
|
|
907
|
+
k = key(element) if use_key else element
|
|
908
|
+
try:
|
|
909
|
+
if k not in seenset:
|
|
910
|
+
seenset_add(k)
|
|
911
|
+
yield element
|
|
912
|
+
except TypeError:
|
|
913
|
+
if k not in seenlist:
|
|
914
|
+
seenlist_add(k)
|
|
915
|
+
yield element
|
|
916
|
+
|
|
917
|
+
|
|
918
|
+
###############################################################################
|
|
919
|
+
#### os #######################################################################
|
|
920
|
+
###############################################################################
|
|
921
|
+
|
|
922
|
+
|
|
923
|
+
def chmod(path: PathLike, perms: PermissionsLike, /) -> None:
|
|
924
|
+
"""Change file mode."""
|
|
925
|
+
Path(path).chmod(int(Permissions.new(perms)))
|
|
926
|
+
|
|
927
|
+
|
|
928
|
+
##
|
|
929
|
+
|
|
930
|
+
|
|
931
|
+
def copy(
|
|
932
|
+
src: PathLike,
|
|
933
|
+
dest: PathLike,
|
|
934
|
+
/,
|
|
935
|
+
*,
|
|
936
|
+
overwrite: bool = False,
|
|
937
|
+
perms: PermissionsLike | None = None,
|
|
938
|
+
owner: str | int | None = None,
|
|
939
|
+
group: str | int | None = None,
|
|
940
|
+
) -> None:
|
|
941
|
+
"""Copy a file atomically."""
|
|
942
|
+
src, dest = map(Path, [src, dest])
|
|
943
|
+
_copy_or_move(
|
|
944
|
+
src, dest, "copy", overwrite=overwrite, perms=perms, owner=owner, group=group
|
|
945
|
+
)
|
|
946
|
+
|
|
947
|
+
|
|
948
|
+
def move(
|
|
949
|
+
src: PathLike,
|
|
950
|
+
dest: PathLike,
|
|
951
|
+
/,
|
|
952
|
+
*,
|
|
953
|
+
overwrite: bool = False,
|
|
954
|
+
perms: PermissionsLike | None = None,
|
|
955
|
+
owner: str | int | None = None,
|
|
956
|
+
group: str | int | None = None,
|
|
957
|
+
) -> None:
|
|
958
|
+
"""Move a file atomically."""
|
|
959
|
+
src, dest = map(Path, [src, dest])
|
|
960
|
+
_copy_or_move(
|
|
961
|
+
src, dest, "move", overwrite=overwrite, perms=perms, owner=owner, group=group
|
|
962
|
+
)
|
|
963
|
+
|
|
964
|
+
|
|
965
|
+
@dataclass(kw_only=True, slots=True)
|
|
966
|
+
class CopyOrMoveError(Exception): ...
|
|
967
|
+
|
|
968
|
+
|
|
969
|
+
def _copy_or_move(
|
|
970
|
+
src: Path,
|
|
971
|
+
dest: Path,
|
|
972
|
+
mode: CopyOrMove,
|
|
973
|
+
/,
|
|
974
|
+
*,
|
|
975
|
+
overwrite: bool = False,
|
|
976
|
+
perms: PermissionsLike | None = None,
|
|
977
|
+
owner: str | int | None = None,
|
|
978
|
+
group: str | int | None = None,
|
|
979
|
+
) -> None:
|
|
980
|
+
match file_or_dir(src), file_or_dir(dest), overwrite:
|
|
981
|
+
case None, _, _:
|
|
982
|
+
raise _CopyOrMoveSourceNotFoundError(src=src)
|
|
983
|
+
case "file" | "dir", "file" | "dir", False:
|
|
984
|
+
raise _CopyOrMoveDestinationExistsError(mode=mode, src=src, dest=dest)
|
|
985
|
+
case ("file", None, _) | ("file", "file", True):
|
|
986
|
+
_copy_or_move__file_to_file(src, dest, mode)
|
|
987
|
+
case "file", "dir", True:
|
|
988
|
+
_copy_or_move__file_to_dir(src, dest, mode)
|
|
989
|
+
case ("dir", None, _) | ("dir", "dir", True):
|
|
990
|
+
_copy_or_move__dir_to_dir(src, dest, mode)
|
|
991
|
+
case "dir", "file", True:
|
|
992
|
+
_copy_or_move__dir_to_file(src, dest, mode)
|
|
993
|
+
case never:
|
|
994
|
+
assert_never(never)
|
|
995
|
+
if perms is not None:
|
|
996
|
+
chmod(dest, perms)
|
|
997
|
+
if (owner is not None) or (group is not None):
|
|
998
|
+
chown(dest, user=owner, group=group)
|
|
999
|
+
|
|
1000
|
+
|
|
1001
|
+
@dataclass(kw_only=True, slots=True)
|
|
1002
|
+
class _CopyOrMoveSourceNotFoundError(CopyOrMoveError):
|
|
1003
|
+
src: Path
|
|
1004
|
+
|
|
1005
|
+
@override
|
|
1006
|
+
def __str__(self) -> str:
|
|
1007
|
+
return f"Source {repr_str(self.src)} does not exist"
|
|
1008
|
+
|
|
1009
|
+
|
|
1010
|
+
@dataclass(kw_only=True, slots=True)
|
|
1011
|
+
class _CopyOrMoveDestinationExistsError(CopyOrMoveError):
|
|
1012
|
+
mode: CopyOrMove
|
|
1013
|
+
src: Path
|
|
1014
|
+
dest: Path
|
|
1015
|
+
|
|
1016
|
+
@override
|
|
1017
|
+
def __str__(self) -> str:
|
|
1018
|
+
return f"Cannot {self.mode} source {repr_str(self.src)} since destination {repr_str(self.dest)} already exists"
|
|
1019
|
+
|
|
1020
|
+
|
|
1021
|
+
def _copy_or_move__file_to_file(src: Path, dest: Path, mode: CopyOrMove, /) -> None:
|
|
1022
|
+
with yield_adjacent_temp_file(dest) as temp:
|
|
1023
|
+
_copy_or_move__shutil_file(src, temp, mode, dest)
|
|
1024
|
+
|
|
1025
|
+
|
|
1026
|
+
def _copy_or_move__file_to_dir(src: Path, dest: Path, mode: CopyOrMove, /) -> None:
|
|
1027
|
+
with (
|
|
1028
|
+
yield_adjacent_temp_dir(dest) as temp_dir,
|
|
1029
|
+
yield_adjacent_temp_file(dest) as temp_file,
|
|
1030
|
+
):
|
|
1031
|
+
_ = dest.replace(temp_dir)
|
|
1032
|
+
_copy_or_move__shutil_file(src, temp_file, mode, dest)
|
|
1033
|
+
|
|
1034
|
+
|
|
1035
|
+
def _copy_or_move__dir_to_dir(src: Path, dest: Path, mode: CopyOrMove, /) -> None:
|
|
1036
|
+
with yield_adjacent_temp_dir(dest) as temp1, yield_adjacent_temp_dir(dest) as temp2:
|
|
1037
|
+
with suppress(FileNotFoundError):
|
|
1038
|
+
_ = dest.replace(temp1)
|
|
1039
|
+
_copy_or_move__shutil_dir(src, temp2, mode, dest)
|
|
1040
|
+
|
|
1041
|
+
|
|
1042
|
+
def _copy_or_move__dir_to_file(src: Path, dest: Path, mode: CopyOrMove, /) -> None:
|
|
1043
|
+
with (
|
|
1044
|
+
yield_adjacent_temp_file(dest) as temp_file,
|
|
1045
|
+
yield_adjacent_temp_dir(dest) as temp_dir,
|
|
1046
|
+
):
|
|
1047
|
+
_ = dest.replace(temp_file)
|
|
1048
|
+
_copy_or_move__shutil_dir(src, temp_dir, mode, dest)
|
|
1049
|
+
|
|
1050
|
+
|
|
1051
|
+
def _copy_or_move__shutil_file(
|
|
1052
|
+
src: Path, temp: Path, mode: CopyOrMove, dest: Path, /
|
|
1053
|
+
) -> None:
|
|
1054
|
+
match mode:
|
|
1055
|
+
case "copy":
|
|
1056
|
+
_ = copyfile(src, temp)
|
|
1057
|
+
case "move":
|
|
1058
|
+
_ = shutil.move(src, temp)
|
|
1059
|
+
case never:
|
|
1060
|
+
assert_never(never)
|
|
1061
|
+
_ = temp.replace(dest)
|
|
1062
|
+
|
|
1063
|
+
|
|
1064
|
+
def _copy_or_move__shutil_dir(
|
|
1065
|
+
src: Path, temp: Path, mode: CopyOrMove, dest: Path, /
|
|
1066
|
+
) -> None:
|
|
1067
|
+
match mode:
|
|
1068
|
+
case "copy":
|
|
1069
|
+
_ = copytree(src, temp, dirs_exist_ok=True)
|
|
1070
|
+
_ = temp.replace(dest)
|
|
1071
|
+
case "move":
|
|
1072
|
+
_ = shutil.move(src, temp)
|
|
1073
|
+
_ = (temp / src.name).replace(dest)
|
|
1074
|
+
case never:
|
|
1075
|
+
assert_never(never)
|
|
1076
|
+
|
|
1077
|
+
|
|
1078
|
+
##
|
|
1079
|
+
|
|
1080
|
+
|
|
1081
|
+
@overload
|
|
1082
|
+
def get_env(
|
|
1083
|
+
key: str, /, *, case_sensitive: bool = False, default: str, nullable: bool = False
|
|
1084
|
+
) -> str: ...
|
|
1085
|
+
@overload
|
|
1086
|
+
def get_env(
|
|
1087
|
+
key: str,
|
|
1088
|
+
/,
|
|
1089
|
+
*,
|
|
1090
|
+
case_sensitive: bool = False,
|
|
1091
|
+
default: None = None,
|
|
1092
|
+
nullable: Literal[False] = False,
|
|
1093
|
+
) -> str: ...
|
|
1094
|
+
@overload
|
|
1095
|
+
def get_env(
|
|
1096
|
+
key: str,
|
|
1097
|
+
/,
|
|
1098
|
+
*,
|
|
1099
|
+
case_sensitive: bool = False,
|
|
1100
|
+
default: str | None = None,
|
|
1101
|
+
nullable: bool = False,
|
|
1102
|
+
) -> str | None: ...
|
|
1103
|
+
def get_env(
|
|
1104
|
+
key: str,
|
|
1105
|
+
/,
|
|
1106
|
+
*,
|
|
1107
|
+
case_sensitive: bool = False,
|
|
1108
|
+
default: str | None = None,
|
|
1109
|
+
nullable: bool = False,
|
|
1110
|
+
) -> str | None:
|
|
1111
|
+
"""Get an environment variable."""
|
|
1112
|
+
try:
|
|
1113
|
+
key_use = one_str(environ, key, case_sensitive=case_sensitive)
|
|
1114
|
+
except OneStrEmptyError:
|
|
1115
|
+
match default, nullable:
|
|
1116
|
+
case None, False:
|
|
1117
|
+
raise GetEnvError(key=key, case_sensitive=case_sensitive) from None
|
|
1118
|
+
case None, True:
|
|
1119
|
+
return None
|
|
1120
|
+
case str(), _:
|
|
1121
|
+
return default
|
|
1122
|
+
case never:
|
|
1123
|
+
assert_never(never)
|
|
1124
|
+
return environ[key_use]
|
|
1125
|
+
|
|
1126
|
+
|
|
1127
|
+
@dataclass(kw_only=True, slots=True)
|
|
1128
|
+
class GetEnvError(Exception):
|
|
1129
|
+
key: str
|
|
1130
|
+
case_sensitive: bool = False
|
|
1131
|
+
|
|
1132
|
+
@override
|
|
1133
|
+
def __str__(self) -> str:
|
|
1134
|
+
desc = f"No environment variable {repr_(self.key)}"
|
|
1135
|
+
return desc if self.case_sensitive else f"{desc} (modulo case)"
|
|
1136
|
+
|
|
1137
|
+
|
|
1138
|
+
##
|
|
1139
|
+
|
|
1140
|
+
|
|
1141
|
+
def has_env(key: str, /, *, case_sensitive: bool = False) -> bool:
|
|
1142
|
+
"""Check if an environment variable is define."""
|
|
1143
|
+
try:
|
|
1144
|
+
_ = get_env(key, case_sensitive=case_sensitive)
|
|
1145
|
+
except GetEnvError:
|
|
1146
|
+
return False
|
|
1147
|
+
return True
|
|
1148
|
+
|
|
1149
|
+
|
|
1150
|
+
##
|
|
1151
|
+
|
|
1152
|
+
|
|
1153
|
+
def is_debug() -> bool:
|
|
1154
|
+
"""Check if we are in `DEBUG` mode."""
|
|
1155
|
+
return has_env("DEBUG")
|
|
1156
|
+
|
|
1157
|
+
|
|
1158
|
+
def is_pytest() -> bool:
|
|
1159
|
+
"""Check if `pytest` is running."""
|
|
1160
|
+
return has_env("PYTEST_VERSION")
|
|
1161
|
+
|
|
1162
|
+
|
|
1163
|
+
##
|
|
1164
|
+
|
|
1165
|
+
|
|
1166
|
+
def move_many(
|
|
1167
|
+
*paths: tuple[PathLike, PathLike],
|
|
1168
|
+
overwrite: bool = False,
|
|
1169
|
+
perms: PermissionsLike | None = None,
|
|
1170
|
+
owner: str | int | None = None,
|
|
1171
|
+
group: str | int | None = None,
|
|
1172
|
+
) -> None:
|
|
1173
|
+
"""Move a set of files concurrently."""
|
|
1174
|
+
with ExitStack() as stack:
|
|
1175
|
+
for src, dest in paths:
|
|
1176
|
+
temp = stack.enter_context(yield_write_path(dest, overwrite=overwrite))
|
|
1177
|
+
move(src, temp, overwrite=overwrite, perms=perms, owner=owner, group=group)
|
|
1178
|
+
|
|
1179
|
+
|
|
1180
|
+
##
|
|
1181
|
+
|
|
1182
|
+
|
|
1183
|
+
@contextmanager
|
|
1184
|
+
def yield_temp_environ(
|
|
1185
|
+
env: Mapping[str, str | None] | None = None, **env_kwargs: str | None
|
|
1186
|
+
) -> Iterator[None]:
|
|
1187
|
+
"""Yield a temporary environment."""
|
|
1188
|
+
mapping: dict[str, str | None] = ({} if env is None else dict(env)) | env_kwargs
|
|
1189
|
+
prev = {key: getenv(key) for key in mapping}
|
|
1190
|
+
_yield_temp_environ_apply(mapping)
|
|
1191
|
+
try:
|
|
1192
|
+
yield
|
|
1193
|
+
finally:
|
|
1194
|
+
_yield_temp_environ_apply(prev)
|
|
1195
|
+
|
|
1196
|
+
|
|
1197
|
+
def _yield_temp_environ_apply(mapping: Mapping[str, str | None], /) -> None:
|
|
1198
|
+
for key, value in mapping.items():
|
|
1199
|
+
if value is None:
|
|
1200
|
+
with suppress(KeyError):
|
|
1201
|
+
del environ[key]
|
|
1202
|
+
else:
|
|
1203
|
+
environ[key] = value
|
|
1204
|
+
|
|
1205
|
+
|
|
1206
|
+
###############################################################################
|
|
1207
|
+
#### pathlib ##################################################################
|
|
1208
|
+
###############################################################################
|
|
1209
|
+
|
|
1210
|
+
|
|
1211
|
+
@overload
|
|
1212
|
+
def file_or_dir(path: PathLike, /, *, exists: Literal[True]) -> FileOrDir: ...
|
|
1213
|
+
@overload
|
|
1214
|
+
def file_or_dir(path: PathLike, /, *, exists: bool = False) -> FileOrDir | None: ...
|
|
1215
|
+
def file_or_dir(path: PathLike, /, *, exists: bool = False) -> FileOrDir | None:
|
|
1216
|
+
"""Classify a path as a file, directory or non-existent."""
|
|
1217
|
+
path = Path(path)
|
|
1218
|
+
match path.exists(), path.is_file(), path.is_dir(), exists:
|
|
1219
|
+
case True, True, False, _:
|
|
1220
|
+
return "file"
|
|
1221
|
+
case True, False, True, _:
|
|
1222
|
+
return "dir"
|
|
1223
|
+
case False, False, False, True:
|
|
1224
|
+
raise _FileOrDirMissingError(path=path)
|
|
1225
|
+
case False, False, False, False:
|
|
1226
|
+
return None
|
|
1227
|
+
case _:
|
|
1228
|
+
raise _FileOrDirTypeError(path=path)
|
|
1229
|
+
|
|
1230
|
+
|
|
1231
|
+
@dataclass(kw_only=True, slots=True)
|
|
1232
|
+
class FileOrDirError(Exception):
|
|
1233
|
+
path: Path
|
|
1234
|
+
|
|
1235
|
+
|
|
1236
|
+
@dataclass(kw_only=True, slots=True)
|
|
1237
|
+
class _FileOrDirMissingError(FileOrDirError):
|
|
1238
|
+
@override
|
|
1239
|
+
def __str__(self) -> str:
|
|
1240
|
+
return f"Path does not exist: {repr_str(self.path)}"
|
|
1241
|
+
|
|
1242
|
+
|
|
1243
|
+
@dataclass(kw_only=True, slots=True)
|
|
1244
|
+
class _FileOrDirTypeError(FileOrDirError):
|
|
1245
|
+
@override
|
|
1246
|
+
def __str__(self) -> str:
|
|
1247
|
+
return f"Path is neither a file nor a directory: {repr_str(self.path)}"
|
|
1248
|
+
|
|
1249
|
+
|
|
1250
|
+
##
|
|
1251
|
+
|
|
1252
|
+
|
|
1253
|
+
@contextmanager
|
|
1254
|
+
def yield_temp_cwd(path: PathLike, /) -> Iterator[None]:
|
|
1255
|
+
"""Yield a temporary working directory."""
|
|
1256
|
+
prev = Path.cwd()
|
|
1257
|
+
chdir(path)
|
|
1258
|
+
try:
|
|
1259
|
+
yield
|
|
1260
|
+
finally:
|
|
1261
|
+
chdir(prev)
|
|
1262
|
+
|
|
1263
|
+
|
|
1264
|
+
###############################################################################
|
|
1265
|
+
#### permissions ##############################################################
|
|
1266
|
+
###############################################################################
|
|
1267
|
+
|
|
1268
|
+
|
|
1269
|
+
type PermissionsLike = Permissions | int | str
|
|
1270
|
+
|
|
1271
|
+
|
|
1272
|
+
@dataclass(order=True, unsafe_hash=True, kw_only=True, slots=True)
|
|
1273
|
+
class Permissions:
|
|
1274
|
+
"""A set of file permissions."""
|
|
1275
|
+
|
|
1276
|
+
user_read: bool = False
|
|
1277
|
+
user_write: bool = False
|
|
1278
|
+
user_execute: bool = False
|
|
1279
|
+
group_read: bool = False
|
|
1280
|
+
group_write: bool = False
|
|
1281
|
+
group_execute: bool = False
|
|
1282
|
+
others_read: bool = False
|
|
1283
|
+
others_write: bool = False
|
|
1284
|
+
others_execute: bool = False
|
|
1285
|
+
|
|
1286
|
+
def __int__(self) -> int:
|
|
1287
|
+
flags: list[int] = [
|
|
1288
|
+
S_IRUSR if self.user_read else 0,
|
|
1289
|
+
S_IWUSR if self.user_write else 0,
|
|
1290
|
+
S_IXUSR if self.user_execute else 0,
|
|
1291
|
+
S_IRGRP if self.group_read else 0,
|
|
1292
|
+
S_IWGRP if self.group_write else 0,
|
|
1293
|
+
S_IXGRP if self.group_execute else 0,
|
|
1294
|
+
S_IROTH if self.others_read else 0,
|
|
1295
|
+
S_IWOTH if self.others_write else 0,
|
|
1296
|
+
S_IXOTH if self.others_execute else 0,
|
|
1297
|
+
]
|
|
1298
|
+
return reduce(or_, flags)
|
|
1299
|
+
|
|
1300
|
+
@override
|
|
1301
|
+
def __repr__(self) -> str:
|
|
1302
|
+
return ",".join([
|
|
1303
|
+
self._repr_parts(
|
|
1304
|
+
"u",
|
|
1305
|
+
read=self.user_read,
|
|
1306
|
+
write=self.user_write,
|
|
1307
|
+
execute=self.user_execute,
|
|
1308
|
+
),
|
|
1309
|
+
self._repr_parts(
|
|
1310
|
+
"g",
|
|
1311
|
+
read=self.group_read,
|
|
1312
|
+
write=self.group_write,
|
|
1313
|
+
execute=self.group_execute,
|
|
1314
|
+
),
|
|
1315
|
+
self._repr_parts(
|
|
1316
|
+
"o",
|
|
1317
|
+
read=self.others_read,
|
|
1318
|
+
write=self.others_write,
|
|
1319
|
+
execute=self.others_execute,
|
|
1320
|
+
),
|
|
1321
|
+
])
|
|
1322
|
+
|
|
1323
|
+
def _repr_parts(
|
|
1324
|
+
self,
|
|
1325
|
+
prefix: Literal["u", "g", "o"],
|
|
1326
|
+
/,
|
|
1327
|
+
*,
|
|
1328
|
+
read: bool = False,
|
|
1329
|
+
write: bool = False,
|
|
1330
|
+
execute: bool = False,
|
|
1331
|
+
) -> str:
|
|
1332
|
+
parts: list[str] = [
|
|
1333
|
+
"r" if read else "",
|
|
1334
|
+
"w" if write else "",
|
|
1335
|
+
"x" if execute else "",
|
|
1336
|
+
]
|
|
1337
|
+
return f"{prefix}={''.join(parts)}"
|
|
1338
|
+
|
|
1339
|
+
@override
|
|
1340
|
+
def __str__(self) -> str:
|
|
1341
|
+
return repr(self)
|
|
1342
|
+
|
|
1343
|
+
@classmethod
|
|
1344
|
+
def new(cls, perms: PermissionsLike, /) -> Self:
|
|
1345
|
+
match perms:
|
|
1346
|
+
case Permissions():
|
|
1347
|
+
return cast("Self", perms)
|
|
1348
|
+
case int():
|
|
1349
|
+
return cls.from_int(perms)
|
|
1350
|
+
case str():
|
|
1351
|
+
return cls.from_text(perms)
|
|
1352
|
+
case never:
|
|
1353
|
+
assert_never(never)
|
|
1354
|
+
|
|
1355
|
+
@classmethod
|
|
1356
|
+
def from_human_int(cls, n: int, /) -> Self:
|
|
1357
|
+
if not (0 <= n <= 777):
|
|
1358
|
+
raise _PermissionsFromHumanIntRangeError(n=n)
|
|
1359
|
+
user_read, user_write, user_execute = cls._from_human_int(n, (n // 100) % 10)
|
|
1360
|
+
group_read, group_write, group_execute = cls._from_human_int(n, (n // 10) % 10)
|
|
1361
|
+
others_read, others_write, others_execute = cls._from_human_int(n, n % 10)
|
|
1362
|
+
return cls(
|
|
1363
|
+
user_read=user_read,
|
|
1364
|
+
user_write=user_write,
|
|
1365
|
+
user_execute=user_execute,
|
|
1366
|
+
group_read=group_read,
|
|
1367
|
+
group_write=group_write,
|
|
1368
|
+
group_execute=group_execute,
|
|
1369
|
+
others_read=others_read,
|
|
1370
|
+
others_write=others_write,
|
|
1371
|
+
others_execute=others_execute,
|
|
1372
|
+
)
|
|
1373
|
+
|
|
1374
|
+
@classmethod
|
|
1375
|
+
def _from_human_int(cls, n: int, digit: int, /) -> tuple[bool, bool, bool]:
|
|
1376
|
+
if not (0 <= digit <= 7):
|
|
1377
|
+
raise _PermissionsFromHumanIntDigitError(n=n, digit=digit)
|
|
1378
|
+
return bool(4 & digit), bool(2 & digit), bool(1 & digit)
|
|
1379
|
+
|
|
1380
|
+
@classmethod
|
|
1381
|
+
def from_int(cls, n: int, /) -> Self:
|
|
1382
|
+
if 0o0 <= n <= 0o777:
|
|
1383
|
+
return cls(
|
|
1384
|
+
user_read=bool(n & S_IRUSR),
|
|
1385
|
+
user_write=bool(n & S_IWUSR),
|
|
1386
|
+
user_execute=bool(n & S_IXUSR),
|
|
1387
|
+
group_read=bool(n & S_IRGRP),
|
|
1388
|
+
group_write=bool(n & S_IWGRP),
|
|
1389
|
+
group_execute=bool(n & S_IXGRP),
|
|
1390
|
+
others_read=bool(n & S_IROTH),
|
|
1391
|
+
others_write=bool(n & S_IWOTH),
|
|
1392
|
+
others_execute=bool(n & S_IXOTH),
|
|
1393
|
+
)
|
|
1394
|
+
raise _PermissionsFromIntError(n=n)
|
|
1395
|
+
|
|
1396
|
+
@classmethod
|
|
1397
|
+
def from_path(cls, path: PathLike, /) -> Self:
|
|
1398
|
+
return cls.from_int(S_IMODE(Path(path).stat().st_mode))
|
|
1399
|
+
|
|
1400
|
+
@classmethod
|
|
1401
|
+
def from_text(cls, text: str, /) -> Self:
|
|
1402
|
+
try:
|
|
1403
|
+
user, group, others = extract_groups(
|
|
1404
|
+
r"^u=(r?w?x?),g=(r?w?x?),o=(r?w?x?)$", text
|
|
1405
|
+
)
|
|
1406
|
+
except ExtractGroupsError:
|
|
1407
|
+
raise _PermissionsFromTextError(text=text) from None
|
|
1408
|
+
user_read, user_write, user_execute = cls._from_text_part(user)
|
|
1409
|
+
group_read, group_write, group_execute = cls._from_text_part(group)
|
|
1410
|
+
others_read, others_write, others_execute = cls._from_text_part(others)
|
|
1411
|
+
return cls(
|
|
1412
|
+
user_read=user_read,
|
|
1413
|
+
user_write=user_write,
|
|
1414
|
+
user_execute=user_execute,
|
|
1415
|
+
group_read=group_read,
|
|
1416
|
+
group_write=group_write,
|
|
1417
|
+
group_execute=group_execute,
|
|
1418
|
+
others_read=others_read,
|
|
1419
|
+
others_write=others_write,
|
|
1420
|
+
others_execute=others_execute,
|
|
1421
|
+
)
|
|
1422
|
+
|
|
1423
|
+
@classmethod
|
|
1424
|
+
def _from_text_part(cls, text: str, /) -> tuple[bool, bool, bool]:
|
|
1425
|
+
read, write, execute = extract_groups("^(r?)(w?)(x?)$", text)
|
|
1426
|
+
return read != "", write != "", execute != ""
|
|
1427
|
+
|
|
1428
|
+
@property
|
|
1429
|
+
def human_int(self) -> int:
|
|
1430
|
+
return (
|
|
1431
|
+
100
|
|
1432
|
+
* self._human_int(
|
|
1433
|
+
read=self.user_read, write=self.user_write, execute=self.user_execute
|
|
1434
|
+
)
|
|
1435
|
+
+ 10
|
|
1436
|
+
* self._human_int(
|
|
1437
|
+
read=self.group_read, write=self.group_write, execute=self.group_execute
|
|
1438
|
+
)
|
|
1439
|
+
+ self._human_int(
|
|
1440
|
+
read=self.others_read,
|
|
1441
|
+
write=self.others_write,
|
|
1442
|
+
execute=self.others_execute,
|
|
1443
|
+
)
|
|
1444
|
+
)
|
|
1445
|
+
|
|
1446
|
+
def _human_int(
|
|
1447
|
+
self, *, read: bool = False, write: bool = False, execute: bool = False
|
|
1448
|
+
) -> int:
|
|
1449
|
+
return (4 if read else 0) + (2 if write else 0) + (1 if execute else 0)
|
|
1450
|
+
|
|
1451
|
+
def replace(
|
|
1452
|
+
self,
|
|
1453
|
+
*,
|
|
1454
|
+
user_read: bool | Sentinel = sentinel,
|
|
1455
|
+
user_write: bool | Sentinel = sentinel,
|
|
1456
|
+
user_execute: bool | Sentinel = sentinel,
|
|
1457
|
+
group_read: bool | Sentinel = sentinel,
|
|
1458
|
+
group_write: bool | Sentinel = sentinel,
|
|
1459
|
+
group_execute: bool | Sentinel = sentinel,
|
|
1460
|
+
others_read: bool | Sentinel = sentinel,
|
|
1461
|
+
others_write: bool | Sentinel = sentinel,
|
|
1462
|
+
others_execute: bool | Sentinel = sentinel,
|
|
1463
|
+
) -> Self:
|
|
1464
|
+
return replace_non_sentinel(
|
|
1465
|
+
self,
|
|
1466
|
+
user_read=user_read,
|
|
1467
|
+
user_write=user_write,
|
|
1468
|
+
user_execute=user_execute,
|
|
1469
|
+
group_read=group_read,
|
|
1470
|
+
group_write=group_write,
|
|
1471
|
+
group_execute=group_execute,
|
|
1472
|
+
others_read=others_read,
|
|
1473
|
+
others_write=others_write,
|
|
1474
|
+
others_execute=others_execute,
|
|
1475
|
+
)
|
|
1476
|
+
|
|
1477
|
+
|
|
1478
|
+
@dataclass(kw_only=True, slots=True)
|
|
1479
|
+
class PermissionsError(Exception): ...
|
|
1480
|
+
|
|
1481
|
+
|
|
1482
|
+
@dataclass(kw_only=True, slots=True)
|
|
1483
|
+
class _PermissionsFromHumanIntError(PermissionsError):
|
|
1484
|
+
n: int
|
|
1485
|
+
|
|
1486
|
+
|
|
1487
|
+
@dataclass(kw_only=True, slots=True)
|
|
1488
|
+
class _PermissionsFromHumanIntRangeError(_PermissionsFromHumanIntError):
|
|
1489
|
+
@override
|
|
1490
|
+
def __str__(self) -> str:
|
|
1491
|
+
return f"Invalid human integer for permissions; got {self.n}"
|
|
1492
|
+
|
|
1493
|
+
|
|
1494
|
+
@dataclass(kw_only=True, slots=True)
|
|
1495
|
+
class _PermissionsFromHumanIntDigitError(_PermissionsFromHumanIntError):
|
|
1496
|
+
digit: int
|
|
1497
|
+
|
|
1498
|
+
@override
|
|
1499
|
+
def __str__(self) -> str:
|
|
1500
|
+
return (
|
|
1501
|
+
f"Invalid human integer for permissions; got digit {self.digit} in {self.n}"
|
|
1502
|
+
)
|
|
1503
|
+
|
|
1504
|
+
|
|
1505
|
+
@dataclass(kw_only=True, slots=True)
|
|
1506
|
+
class _PermissionsFromIntError(PermissionsError):
|
|
1507
|
+
n: int
|
|
1508
|
+
|
|
1509
|
+
@override
|
|
1510
|
+
def __str__(self) -> str:
|
|
1511
|
+
return f"Invalid integer for permissions; got {self.n} = {oct(self.n)}"
|
|
1512
|
+
|
|
1513
|
+
|
|
1514
|
+
@dataclass(kw_only=True, slots=True)
|
|
1515
|
+
class _PermissionsFromTextError(PermissionsError):
|
|
1516
|
+
text: str
|
|
1517
|
+
|
|
1518
|
+
@override
|
|
1519
|
+
def __str__(self) -> str:
|
|
1520
|
+
return f"Invalid string for permissions; got {self.text!r}"
|
|
1521
|
+
|
|
1522
|
+
|
|
1523
|
+
###############################################################################
|
|
1524
|
+
#### pwd ######################################################################
|
|
1525
|
+
###############################################################################
|
|
1526
|
+
|
|
1527
|
+
|
|
1528
|
+
get_uid_name = utilities.constants._get_uid_name # noqa: SLF001
|
|
1529
|
+
|
|
1530
|
+
|
|
1531
|
+
def get_file_owner(path: PathLike, /) -> str | None:
|
|
1532
|
+
"""Get the owner of a file."""
|
|
1533
|
+
uid = Path(path).stat().st_uid
|
|
1534
|
+
return get_uid_name(uid)
|
|
1535
|
+
|
|
1536
|
+
|
|
1537
|
+
###############################################################################
|
|
1538
|
+
#### re #######################################################################
|
|
1539
|
+
###############################################################################
|
|
1540
|
+
|
|
1541
|
+
|
|
1542
|
+
def extract_group(pattern: PatternLike, text: str, /, *, flags: int = 0) -> str:
|
|
1543
|
+
"""Extract a group.
|
|
1544
|
+
|
|
1545
|
+
The regex must have 1 capture group, and this must match exactly once.
|
|
1546
|
+
"""
|
|
1547
|
+
pattern_use = _to_pattern(pattern, flags=flags)
|
|
1548
|
+
match pattern_use.groups:
|
|
1549
|
+
case 0:
|
|
1550
|
+
raise _ExtractGroupNoCaptureGroupsError(pattern=pattern_use, text=text)
|
|
1551
|
+
case 1:
|
|
1552
|
+
matches: list[str] = pattern_use.findall(text)
|
|
1553
|
+
match len(matches):
|
|
1554
|
+
case 0:
|
|
1555
|
+
raise _ExtractGroupNoMatchesError(
|
|
1556
|
+
pattern=pattern_use, text=text
|
|
1557
|
+
) from None
|
|
1558
|
+
case 1:
|
|
1559
|
+
return matches[0]
|
|
1560
|
+
case _:
|
|
1561
|
+
raise _ExtractGroupMultipleMatchesError(
|
|
1562
|
+
pattern=pattern_use, text=text, matches=matches
|
|
1563
|
+
) from None
|
|
1564
|
+
case _:
|
|
1565
|
+
raise _ExtractGroupMultipleCaptureGroupsError(
|
|
1566
|
+
pattern=pattern_use, text=text
|
|
1567
|
+
)
|
|
1568
|
+
|
|
1569
|
+
|
|
1570
|
+
@dataclass(kw_only=True, slots=True)
|
|
1571
|
+
class ExtractGroupError(Exception):
|
|
1572
|
+
pattern: Pattern[str]
|
|
1573
|
+
text: str
|
|
1574
|
+
|
|
1575
|
+
|
|
1576
|
+
@dataclass(kw_only=True, slots=True)
|
|
1577
|
+
class _ExtractGroupMultipleCaptureGroupsError(ExtractGroupError):
|
|
1578
|
+
@override
|
|
1579
|
+
def __str__(self) -> str:
|
|
1580
|
+
return f"Pattern {self.pattern} must contain exactly one capture group; it had multiple"
|
|
1581
|
+
|
|
1582
|
+
|
|
1583
|
+
@dataclass(kw_only=True, slots=True)
|
|
1584
|
+
class _ExtractGroupMultipleMatchesError(ExtractGroupError):
|
|
1585
|
+
matches: list[str]
|
|
1586
|
+
|
|
1587
|
+
@override
|
|
1588
|
+
def __str__(self) -> str:
|
|
1589
|
+
return f"Pattern {self.pattern} must match against {self.text} exactly once; matches were {self.matches}"
|
|
1590
|
+
|
|
1591
|
+
|
|
1592
|
+
@dataclass(kw_only=True, slots=True)
|
|
1593
|
+
class _ExtractGroupNoCaptureGroupsError(ExtractGroupError):
|
|
1594
|
+
@override
|
|
1595
|
+
def __str__(self) -> str:
|
|
1596
|
+
return f"Pattern {self.pattern} must contain exactly one capture group; it had none".format(
|
|
1597
|
+
self.pattern
|
|
1598
|
+
)
|
|
1599
|
+
|
|
1600
|
+
|
|
1601
|
+
@dataclass(kw_only=True, slots=True)
|
|
1602
|
+
class _ExtractGroupNoMatchesError(ExtractGroupError):
|
|
1603
|
+
@override
|
|
1604
|
+
def __str__(self) -> str:
|
|
1605
|
+
return f"Pattern {self.pattern} must match against {self.text}"
|
|
1606
|
+
|
|
1607
|
+
|
|
1608
|
+
##
|
|
1609
|
+
|
|
1610
|
+
|
|
1611
|
+
def extract_groups(pattern: PatternLike, text: str, /, *, flags: int = 0) -> list[str]:
|
|
1612
|
+
"""Extract multiple groups.
|
|
1613
|
+
|
|
1614
|
+
The regex may have any number of capture groups, and they must collectively
|
|
1615
|
+
match exactly once.
|
|
1616
|
+
"""
|
|
1617
|
+
pattern_use = _to_pattern(pattern, flags=flags)
|
|
1618
|
+
if (n_groups := pattern_use.groups) == 0:
|
|
1619
|
+
raise _ExtractGroupsNoCaptureGroupsError(pattern=pattern_use, text=text)
|
|
1620
|
+
matches: list[str] = pattern_use.findall(text)
|
|
1621
|
+
match len(matches), n_groups:
|
|
1622
|
+
case 0, _:
|
|
1623
|
+
raise _ExtractGroupsNoMatchesError(pattern=pattern_use, text=text)
|
|
1624
|
+
case 1, 1:
|
|
1625
|
+
return matches
|
|
1626
|
+
case 1, _:
|
|
1627
|
+
return list(matches[0])
|
|
1628
|
+
case _:
|
|
1629
|
+
raise _ExtractGroupsMultipleMatchesError(
|
|
1630
|
+
pattern=pattern_use, text=text, matches=matches
|
|
1631
|
+
)
|
|
1632
|
+
|
|
1633
|
+
|
|
1634
|
+
@dataclass(kw_only=True, slots=True)
|
|
1635
|
+
class ExtractGroupsError(Exception):
|
|
1636
|
+
pattern: Pattern[str]
|
|
1637
|
+
text: str
|
|
1638
|
+
|
|
1639
|
+
|
|
1640
|
+
@dataclass(kw_only=True, slots=True)
|
|
1641
|
+
class _ExtractGroupsMultipleMatchesError(ExtractGroupsError):
|
|
1642
|
+
matches: list[str]
|
|
1643
|
+
|
|
1644
|
+
@override
|
|
1645
|
+
def __str__(self) -> str:
|
|
1646
|
+
return f"Pattern {self.pattern} must match against {self.text} exactly once; matches were {self.matches}"
|
|
1647
|
+
|
|
1648
|
+
|
|
1649
|
+
@dataclass(kw_only=True, slots=True)
|
|
1650
|
+
class _ExtractGroupsNoCaptureGroupsError(ExtractGroupsError):
|
|
1651
|
+
pattern: Pattern[str]
|
|
1652
|
+
text: str
|
|
1653
|
+
|
|
1654
|
+
@override
|
|
1655
|
+
def __str__(self) -> str:
|
|
1656
|
+
return f"Pattern {self.pattern} must contain at least one capture group"
|
|
1657
|
+
|
|
1658
|
+
|
|
1659
|
+
@dataclass(kw_only=True, slots=True)
|
|
1660
|
+
class _ExtractGroupsNoMatchesError(ExtractGroupsError):
|
|
1661
|
+
@override
|
|
1662
|
+
def __str__(self) -> str:
|
|
1663
|
+
return f"Pattern {self.pattern} must match against {self.text}"
|
|
1664
|
+
|
|
1665
|
+
|
|
1666
|
+
##
|
|
1667
|
+
|
|
1668
|
+
|
|
1669
|
+
def _to_pattern(pattern: PatternLike, /, *, flags: int = 0) -> Pattern[str]:
|
|
1670
|
+
match pattern:
|
|
1671
|
+
case Pattern():
|
|
1672
|
+
return pattern
|
|
1673
|
+
case str():
|
|
1674
|
+
return re.compile(pattern, flags=flags)
|
|
1675
|
+
case never:
|
|
1676
|
+
assert_never(never)
|
|
1677
|
+
|
|
1678
|
+
|
|
1679
|
+
###############################################################################
|
|
1680
|
+
#### readers/writers ##########################################################
|
|
1681
|
+
###############################################################################
|
|
1682
|
+
|
|
1683
|
+
|
|
1684
|
+
def read_bytes(path: PathLike, /, *, decompress: bool = False) -> bytes:
|
|
1685
|
+
"""Read data from a file."""
|
|
1686
|
+
path = Path(path)
|
|
1687
|
+
if decompress:
|
|
1688
|
+
try:
|
|
1689
|
+
with yield_gzip(path) as temp:
|
|
1690
|
+
return temp.read_bytes()
|
|
1691
|
+
except YieldGzipError as error:
|
|
1692
|
+
raise ReadBytesError(path=error.path) from None
|
|
1693
|
+
else:
|
|
1694
|
+
try:
|
|
1695
|
+
return path.read_bytes()
|
|
1696
|
+
except FileNotFoundError:
|
|
1697
|
+
raise ReadBytesError(path=path) from None
|
|
1698
|
+
|
|
1699
|
+
|
|
1700
|
+
@dataclass(kw_only=True, slots=True)
|
|
1701
|
+
class ReadBytesError(Exception):
|
|
1702
|
+
path: Path
|
|
1703
|
+
|
|
1704
|
+
@override
|
|
1705
|
+
def __str__(self) -> str:
|
|
1706
|
+
return f"Cannot read from {repr_str(self.path)} since it does not exist"
|
|
1707
|
+
|
|
1708
|
+
|
|
1709
|
+
def write_bytes(
|
|
1710
|
+
path: PathLike,
|
|
1711
|
+
data: bytes,
|
|
1712
|
+
/,
|
|
1713
|
+
*,
|
|
1714
|
+
compress: bool = False,
|
|
1715
|
+
overwrite: bool = False,
|
|
1716
|
+
perms: PermissionsLike | None = None,
|
|
1717
|
+
owner: str | int | None = None,
|
|
1718
|
+
group: str | int | None = None,
|
|
1719
|
+
json: bool = False,
|
|
1720
|
+
) -> None:
|
|
1721
|
+
"""Write data to a file."""
|
|
1722
|
+
try:
|
|
1723
|
+
with yield_write_path(
|
|
1724
|
+
path,
|
|
1725
|
+
compress=compress,
|
|
1726
|
+
overwrite=overwrite,
|
|
1727
|
+
perms=perms,
|
|
1728
|
+
owner=owner,
|
|
1729
|
+
group=group,
|
|
1730
|
+
) as temp:
|
|
1731
|
+
if json: # pragma: no cover
|
|
1732
|
+
with suppress(FileNotFoundError):
|
|
1733
|
+
data = check_output(["prettier", "--parser=json"], input=data)
|
|
1734
|
+
_ = temp.write_bytes(data)
|
|
1735
|
+
except YieldWritePathError as error:
|
|
1736
|
+
raise WriteBytesError(path=error.path) from None
|
|
1737
|
+
|
|
1738
|
+
|
|
1739
|
+
@dataclass(kw_only=True, slots=True)
|
|
1740
|
+
class WriteBytesError(Exception):
|
|
1741
|
+
path: Path
|
|
1742
|
+
|
|
1743
|
+
@override
|
|
1744
|
+
def __str__(self) -> str:
|
|
1745
|
+
return f"Cannot write to {repr_str(self.path)} since it already exists"
|
|
1746
|
+
|
|
1747
|
+
|
|
1748
|
+
##
|
|
1749
|
+
|
|
1750
|
+
|
|
1751
|
+
def read_pickle(path: PathLike, /) -> Any:
|
|
1752
|
+
"""Read an object from disk."""
|
|
1753
|
+
path = Path(path)
|
|
1754
|
+
try:
|
|
1755
|
+
with gzip.open(path, mode="rb") as gz:
|
|
1756
|
+
return pickle.load(gz) # noqa: S301
|
|
1757
|
+
except FileNotFoundError:
|
|
1758
|
+
raise ReadPickleError(path=path) from None
|
|
1759
|
+
|
|
1760
|
+
|
|
1761
|
+
@dataclass(kw_only=True, slots=True)
|
|
1762
|
+
class ReadPickleError(Exception):
|
|
1763
|
+
path: Path
|
|
1764
|
+
|
|
1765
|
+
@override
|
|
1766
|
+
def __str__(self) -> str:
|
|
1767
|
+
return f"Cannot read from {repr_str(self.path)} since it does not exist"
|
|
1768
|
+
|
|
1769
|
+
|
|
1770
|
+
def write_pickle(path: PathLike, obj: Any, /, *, overwrite: bool = False) -> None:
|
|
1771
|
+
"""Write an object to disk."""
|
|
1772
|
+
try:
|
|
1773
|
+
with (
|
|
1774
|
+
yield_write_path(path, overwrite=overwrite) as temp,
|
|
1775
|
+
gzip.open(temp, mode="wb") as gz,
|
|
1776
|
+
):
|
|
1777
|
+
pickle.dump(obj, gz)
|
|
1778
|
+
except YieldWritePathError as error:
|
|
1779
|
+
raise WritePickleError(path=error.path) from None
|
|
1780
|
+
|
|
1781
|
+
|
|
1782
|
+
@dataclass(kw_only=True, slots=True)
|
|
1783
|
+
class WritePickleError(Exception):
|
|
1784
|
+
path: Path
|
|
1785
|
+
|
|
1786
|
+
@override
|
|
1787
|
+
def __str__(self) -> str:
|
|
1788
|
+
return f"Cannot write to {repr_str(self.path)} since it already exists"
|
|
1789
|
+
|
|
1790
|
+
|
|
1791
|
+
##
|
|
1792
|
+
|
|
1793
|
+
|
|
1794
|
+
def read_text(path: PathLike, /, *, decompress: bool = False) -> str:
|
|
1795
|
+
"""Read text from a file."""
|
|
1796
|
+
path = Path(path)
|
|
1797
|
+
if decompress:
|
|
1798
|
+
try:
|
|
1799
|
+
with yield_gzip(path) as temp:
|
|
1800
|
+
return temp.read_text()
|
|
1801
|
+
except YieldGzipError as error:
|
|
1802
|
+
raise ReadTextError(path=error.path) from None
|
|
1803
|
+
else:
|
|
1804
|
+
try:
|
|
1805
|
+
return path.read_text()
|
|
1806
|
+
except FileNotFoundError:
|
|
1807
|
+
raise ReadTextError(path=path) from None
|
|
1808
|
+
|
|
1809
|
+
|
|
1810
|
+
@dataclass(kw_only=True, slots=True)
|
|
1811
|
+
class ReadTextError(Exception):
|
|
1812
|
+
path: Path
|
|
1813
|
+
|
|
1814
|
+
@override
|
|
1815
|
+
def __str__(self) -> str:
|
|
1816
|
+
return f"Cannot read from {repr_str(self.path)} since it does not exist"
|
|
1817
|
+
|
|
1818
|
+
|
|
1819
|
+
def write_text(
|
|
1820
|
+
path: PathLike,
|
|
1821
|
+
text: str,
|
|
1822
|
+
/,
|
|
1823
|
+
*,
|
|
1824
|
+
compress: bool = False,
|
|
1825
|
+
overwrite: bool = False,
|
|
1826
|
+
perms: PermissionsLike | None = None,
|
|
1827
|
+
owner: str | int | None = None,
|
|
1828
|
+
group: str | int | None = None,
|
|
1829
|
+
) -> None:
|
|
1830
|
+
"""Write text to a file."""
|
|
1831
|
+
try:
|
|
1832
|
+
with yield_write_path(
|
|
1833
|
+
path,
|
|
1834
|
+
compress=compress,
|
|
1835
|
+
overwrite=overwrite,
|
|
1836
|
+
perms=perms,
|
|
1837
|
+
owner=owner,
|
|
1838
|
+
group=group,
|
|
1839
|
+
) as temp:
|
|
1840
|
+
_ = temp.write_text(normalize_str(text))
|
|
1841
|
+
except YieldWritePathError as error:
|
|
1842
|
+
raise WriteTextError(path=error.path) from None
|
|
1843
|
+
|
|
1844
|
+
|
|
1845
|
+
@dataclass(kw_only=True, slots=True)
|
|
1846
|
+
class WriteTextError(Exception):
|
|
1847
|
+
path: Path
|
|
1848
|
+
|
|
1849
|
+
@override
|
|
1850
|
+
def __str__(self) -> str:
|
|
1851
|
+
return f"Cannot write to {repr_str(self.path)} since it already exists"
|
|
1852
|
+
|
|
1853
|
+
|
|
1854
|
+
###############################################################################
|
|
1855
|
+
#### reprlib ##################################################################
|
|
1856
|
+
###############################################################################
|
|
1857
|
+
|
|
1858
|
+
|
|
1859
|
+
def repr_(
|
|
1860
|
+
obj: Any,
|
|
1861
|
+
/,
|
|
1862
|
+
*,
|
|
1863
|
+
max_width: int = RICH_MAX_WIDTH,
|
|
1864
|
+
indent_size: int = RICH_INDENT_SIZE,
|
|
1865
|
+
max_length: int | None = RICH_MAX_LENGTH,
|
|
1866
|
+
max_string: int | None = RICH_MAX_STRING,
|
|
1867
|
+
max_depth: int | None = RICH_MAX_DEPTH,
|
|
1868
|
+
expand_all: bool = RICH_EXPAND_ALL,
|
|
1869
|
+
) -> str:
|
|
1870
|
+
"""Get the representation of an object."""
|
|
1871
|
+
try:
|
|
1872
|
+
from rich.pretty import pretty_repr
|
|
1873
|
+
except ModuleNotFoundError: # pragma: no cover
|
|
1874
|
+
return reprlib.repr(obj)
|
|
1875
|
+
return pretty_repr(
|
|
1876
|
+
obj,
|
|
1877
|
+
max_width=max_width,
|
|
1878
|
+
indent_size=indent_size,
|
|
1879
|
+
max_length=max_length,
|
|
1880
|
+
max_string=max_string,
|
|
1881
|
+
max_depth=max_depth,
|
|
1882
|
+
expand_all=expand_all,
|
|
1883
|
+
)
|
|
1884
|
+
|
|
1885
|
+
|
|
1886
|
+
##
|
|
1887
|
+
|
|
1888
|
+
|
|
1889
|
+
def repr_str(
|
|
1890
|
+
obj: Any,
|
|
1891
|
+
/,
|
|
1892
|
+
*,
|
|
1893
|
+
max_width: int = RICH_MAX_WIDTH,
|
|
1894
|
+
indent_size: int = RICH_INDENT_SIZE,
|
|
1895
|
+
max_length: int | None = RICH_MAX_LENGTH,
|
|
1896
|
+
max_string: int | None = RICH_MAX_STRING,
|
|
1897
|
+
max_depth: int | None = RICH_MAX_DEPTH,
|
|
1898
|
+
expand_all: bool = RICH_EXPAND_ALL,
|
|
1899
|
+
) -> str:
|
|
1900
|
+
"""Get the representation of the string of an object."""
|
|
1901
|
+
return repr_(
|
|
1902
|
+
str(obj),
|
|
1903
|
+
max_width=max_width,
|
|
1904
|
+
indent_size=indent_size,
|
|
1905
|
+
max_length=max_length,
|
|
1906
|
+
max_string=max_string,
|
|
1907
|
+
max_depth=max_depth,
|
|
1908
|
+
expand_all=expand_all,
|
|
1909
|
+
)
|
|
1910
|
+
|
|
1911
|
+
|
|
1912
|
+
###############################################################################
|
|
1913
|
+
#### shutil ###################################################################
|
|
1914
|
+
###############################################################################
|
|
1915
|
+
|
|
1916
|
+
|
|
1917
|
+
def chown(
|
|
1918
|
+
path: PathLike,
|
|
1919
|
+
/,
|
|
1920
|
+
*,
|
|
1921
|
+
recursive: bool = False,
|
|
1922
|
+
user: str | int | None = None,
|
|
1923
|
+
group: str | int | None = None,
|
|
1924
|
+
) -> None:
|
|
1925
|
+
"""Change file owner and/or group."""
|
|
1926
|
+
path = Path(path)
|
|
1927
|
+
paths = list(path.rglob("**/*")) if recursive else [path]
|
|
1928
|
+
for p in paths:
|
|
1929
|
+
match user, group:
|
|
1930
|
+
case None, None:
|
|
1931
|
+
...
|
|
1932
|
+
case str() | int(), None:
|
|
1933
|
+
shutil.chown(p, user, group)
|
|
1934
|
+
case None, str() | int():
|
|
1935
|
+
shutil.chown(p, user, group)
|
|
1936
|
+
case str() | int(), str() | int():
|
|
1937
|
+
shutil.chown(p, user, group)
|
|
1938
|
+
case never:
|
|
1939
|
+
assert_never(never)
|
|
1940
|
+
|
|
1941
|
+
|
|
1942
|
+
###############################################################################
|
|
1943
|
+
#### tempfile #################################################################
|
|
1944
|
+
###############################################################################
|
|
1945
|
+
|
|
1946
|
+
|
|
1947
|
+
class TemporaryDirectory:
|
|
1948
|
+
"""Wrapper around `TemporaryDirectory` with a `Path` attribute."""
|
|
1949
|
+
|
|
1950
|
+
def __init__(
|
|
1951
|
+
self,
|
|
1952
|
+
*,
|
|
1953
|
+
suffix: str | None = None,
|
|
1954
|
+
prefix: str | None = None,
|
|
1955
|
+
dir: PathLike | None = None, # noqa: A002
|
|
1956
|
+
ignore_cleanup_errors: bool = False,
|
|
1957
|
+
delete: bool = True,
|
|
1958
|
+
) -> None:
|
|
1959
|
+
super().__init__()
|
|
1960
|
+
self._temp_dir = _TemporaryDirectoryNoResourceWarning(
|
|
1961
|
+
suffix=suffix,
|
|
1962
|
+
prefix=prefix,
|
|
1963
|
+
dir=dir,
|
|
1964
|
+
ignore_cleanup_errors=ignore_cleanup_errors,
|
|
1965
|
+
delete=delete,
|
|
1966
|
+
)
|
|
1967
|
+
self.path = Path(self._temp_dir.name)
|
|
1968
|
+
|
|
1969
|
+
def __enter__(self) -> Path:
|
|
1970
|
+
return Path(self._temp_dir.__enter__())
|
|
1971
|
+
|
|
1972
|
+
def __exit__(
|
|
1973
|
+
self,
|
|
1974
|
+
exc: type[BaseException] | None,
|
|
1975
|
+
val: BaseException | None,
|
|
1976
|
+
tb: TracebackType | None,
|
|
1977
|
+
) -> None:
|
|
1978
|
+
self._temp_dir.__exit__(exc, val, tb)
|
|
1979
|
+
|
|
1980
|
+
|
|
1981
|
+
class _TemporaryDirectoryNoResourceWarning(tempfile.TemporaryDirectory):
|
|
1982
|
+
@classmethod
|
|
1983
|
+
@override
|
|
1984
|
+
def _cleanup( # pyright: ignore[reportGeneralTypeIssues]
|
|
1985
|
+
cls,
|
|
1986
|
+
name: str,
|
|
1987
|
+
warn_message: str,
|
|
1988
|
+
ignore_errors: bool = False,
|
|
1989
|
+
delete: bool = True,
|
|
1990
|
+
) -> None:
|
|
1991
|
+
with suppress_warnings(category=ResourceWarning):
|
|
1992
|
+
return super()._cleanup( # pyright: ignore[reportAttributeAccessIssue]
|
|
1993
|
+
name, warn_message, ignore_errors=ignore_errors, delete=delete
|
|
1994
|
+
)
|
|
1995
|
+
|
|
1996
|
+
|
|
1997
|
+
##
|
|
1998
|
+
|
|
1999
|
+
|
|
2000
|
+
@contextmanager
|
|
2001
|
+
def TemporaryFile( # noqa: N802
|
|
2002
|
+
*,
|
|
2003
|
+
dir: PathLike | None = None, # noqa: A002
|
|
2004
|
+
suffix: str | None = None,
|
|
2005
|
+
prefix: str | None = None,
|
|
2006
|
+
ignore_cleanup_errors: bool = False,
|
|
2007
|
+
delete: bool = True,
|
|
2008
|
+
name: str | None = None,
|
|
2009
|
+
data: bytes | None = None,
|
|
2010
|
+
text: str | None = None,
|
|
2011
|
+
) -> Iterator[Path]:
|
|
2012
|
+
"""Yield a temporary file."""
|
|
2013
|
+
if dir is None:
|
|
2014
|
+
with (
|
|
2015
|
+
TemporaryDirectory(
|
|
2016
|
+
suffix=suffix,
|
|
2017
|
+
prefix=prefix,
|
|
2018
|
+
dir=dir,
|
|
2019
|
+
ignore_cleanup_errors=ignore_cleanup_errors,
|
|
2020
|
+
delete=delete,
|
|
2021
|
+
) as temp_dir,
|
|
2022
|
+
_temporary_file_outer(
|
|
2023
|
+
temp_dir,
|
|
2024
|
+
suffix=suffix,
|
|
2025
|
+
prefix=prefix,
|
|
2026
|
+
delete=delete,
|
|
2027
|
+
name=name,
|
|
2028
|
+
data=data,
|
|
2029
|
+
text=text,
|
|
2030
|
+
) as temp,
|
|
2031
|
+
):
|
|
2032
|
+
yield temp
|
|
2033
|
+
else:
|
|
2034
|
+
with _temporary_file_outer(
|
|
2035
|
+
dir,
|
|
2036
|
+
suffix=suffix,
|
|
2037
|
+
prefix=prefix,
|
|
2038
|
+
delete=delete,
|
|
2039
|
+
name=name,
|
|
2040
|
+
data=data,
|
|
2041
|
+
text=text,
|
|
2042
|
+
) as temp:
|
|
2043
|
+
yield temp
|
|
2044
|
+
|
|
2045
|
+
|
|
2046
|
+
@contextmanager
|
|
2047
|
+
def _temporary_file_outer(
|
|
2048
|
+
path: PathLike,
|
|
2049
|
+
/,
|
|
2050
|
+
*,
|
|
2051
|
+
suffix: str | None = None,
|
|
2052
|
+
prefix: str | None = None,
|
|
2053
|
+
delete: bool = True,
|
|
2054
|
+
name: str | None = None,
|
|
2055
|
+
data: bytes | None = None,
|
|
2056
|
+
text: str | None = None,
|
|
2057
|
+
) -> Iterator[Path]:
|
|
2058
|
+
with _temporary_file_inner(
|
|
2059
|
+
Path(path), suffix=suffix, prefix=prefix, delete=delete, name=name
|
|
2060
|
+
) as temp:
|
|
2061
|
+
if data is not None:
|
|
2062
|
+
_ = temp.write_bytes(data)
|
|
2063
|
+
if text is not None:
|
|
2064
|
+
_ = temp.write_text(text)
|
|
2065
|
+
yield temp
|
|
2066
|
+
|
|
2067
|
+
|
|
2068
|
+
@contextmanager
|
|
2069
|
+
def _temporary_file_inner(
|
|
2070
|
+
path: Path,
|
|
2071
|
+
/,
|
|
2072
|
+
*,
|
|
2073
|
+
suffix: str | None = None,
|
|
2074
|
+
prefix: str | None = None,
|
|
2075
|
+
delete: bool = True,
|
|
2076
|
+
name: str | None = None,
|
|
2077
|
+
) -> Iterator[Path]:
|
|
2078
|
+
with _NamedTemporaryFile(
|
|
2079
|
+
suffix=suffix, prefix=prefix, dir=path, delete=delete, delete_on_close=False
|
|
2080
|
+
) as temp:
|
|
2081
|
+
if name is None:
|
|
2082
|
+
yield Path(path, temp.name)
|
|
2083
|
+
else:
|
|
2084
|
+
_ = shutil.move(path / temp.name, path / name)
|
|
2085
|
+
yield path / name
|
|
2086
|
+
|
|
2087
|
+
|
|
2088
|
+
##
|
|
2089
|
+
|
|
2090
|
+
|
|
2091
|
+
@contextmanager
|
|
2092
|
+
def yield_adjacent_temp_dir(path: PathLike, /) -> Iterator[Path]:
|
|
2093
|
+
"""Yield a temporary directory adjacent to target path."""
|
|
2094
|
+
|
|
2095
|
+
path = Path(path)
|
|
2096
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
2097
|
+
with TemporaryDirectory(suffix=".tmp", prefix=path.name, dir=path.parent) as temp:
|
|
2098
|
+
yield temp
|
|
2099
|
+
|
|
2100
|
+
|
|
2101
|
+
@contextmanager
|
|
2102
|
+
def yield_adjacent_temp_file(path: PathLike, /) -> Iterator[Path]:
|
|
2103
|
+
"""Yield a temporary file adjacent to target path."""
|
|
2104
|
+
|
|
2105
|
+
path = Path(path)
|
|
2106
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
2107
|
+
with TemporaryFile(dir=path.parent, suffix=".tmp", prefix=path.name) as temp:
|
|
2108
|
+
yield temp
|
|
2109
|
+
|
|
2110
|
+
|
|
2111
|
+
###############################################################################
|
|
2112
|
+
#### text #####################################################################
|
|
2113
|
+
###############################################################################
|
|
2114
|
+
|
|
2115
|
+
|
|
2116
|
+
def kebab_case(text: str, /) -> str:
|
|
2117
|
+
"""Convert text into kebab case."""
|
|
2118
|
+
return _kebab_snake_case(text, "-")
|
|
2119
|
+
|
|
2120
|
+
|
|
2121
|
+
def snake_case(text: str, /) -> str:
|
|
2122
|
+
"""Convert text into snake case."""
|
|
2123
|
+
return _kebab_snake_case(text, "_")
|
|
2124
|
+
|
|
2125
|
+
|
|
2126
|
+
def _kebab_snake_case(text: str, separator: str, /) -> str:
|
|
2127
|
+
"""Convert text into kebab/snake case."""
|
|
2128
|
+
leading = _kebab_leading_pattern.search(text) is not None
|
|
2129
|
+
trailing = _kebab_trailing_pattern.search(text) is not None
|
|
2130
|
+
parts = _kebab_pascal_pattern.findall(text)
|
|
2131
|
+
parts = (p for p in parts if len(p) >= 1)
|
|
2132
|
+
parts = chain([""] if leading else [], parts, [""] if trailing else [])
|
|
2133
|
+
return separator.join(parts).lower()
|
|
2134
|
+
|
|
2135
|
+
|
|
2136
|
+
_kebab_leading_pattern = re.compile(r"^_")
|
|
2137
|
+
_kebab_trailing_pattern = re.compile(r"_$")
|
|
2138
|
+
|
|
2139
|
+
|
|
2140
|
+
def pascal_case(text: str, /) -> str:
|
|
2141
|
+
"""Convert text to pascal case."""
|
|
2142
|
+
parts = _kebab_pascal_pattern.findall(text)
|
|
2143
|
+
parts = [p for p in parts if len(p) >= 1]
|
|
2144
|
+
parts = list(map(_pascal_case_upper_or_title, parts))
|
|
2145
|
+
return "".join(parts)
|
|
2146
|
+
|
|
2147
|
+
|
|
2148
|
+
def _pascal_case_upper_or_title(text: str, /) -> str:
|
|
2149
|
+
return text if text.isupper() else text.title()
|
|
2150
|
+
|
|
2151
|
+
|
|
2152
|
+
_kebab_pascal_pattern = re.compile(
|
|
2153
|
+
r"""
|
|
2154
|
+
[A-Z]+(?=[A-Z][a-z0-9]) | # all caps followed by Upper+lower or digit (API in APIResponse2)
|
|
2155
|
+
[A-Z]?[a-z]+[0-9]* | # normal words with optional trailing digits (Text123)
|
|
2156
|
+
[A-Z]+[0-9]* | # consecutive caps with optional trailing digits (ID2)
|
|
2157
|
+
""",
|
|
2158
|
+
flags=VERBOSE,
|
|
2159
|
+
)
|
|
2160
|
+
|
|
2161
|
+
|
|
2162
|
+
##
|
|
2163
|
+
|
|
2164
|
+
|
|
2165
|
+
def normalize_multi_line_str(text: str, /) -> str:
|
|
2166
|
+
"""Normalize a multi-line string."""
|
|
2167
|
+
stripped = text.strip("\n")
|
|
2168
|
+
return normalize_str(dedent(stripped))
|
|
2169
|
+
|
|
2170
|
+
|
|
2171
|
+
def normalize_str(text: str, /) -> str:
|
|
2172
|
+
"""Normalize a string."""
|
|
2173
|
+
return text.strip("\n") + "\n"
|
|
2174
|
+
|
|
2175
|
+
|
|
2176
|
+
##
|
|
2177
|
+
|
|
2178
|
+
|
|
2179
|
+
def substitute(
|
|
2180
|
+
path_or_text: PathLike,
|
|
2181
|
+
/,
|
|
2182
|
+
*,
|
|
2183
|
+
environ: bool = False,
|
|
2184
|
+
mapping: StrMapping | None = None,
|
|
2185
|
+
safe: bool = False,
|
|
2186
|
+
**kwargs: Any,
|
|
2187
|
+
) -> str:
|
|
2188
|
+
"""Substitute from a Path or string."""
|
|
2189
|
+
match path_or_text:
|
|
2190
|
+
case Path() as path:
|
|
2191
|
+
return substitute(
|
|
2192
|
+
path.read_text(), environ=environ, mapping=mapping, safe=safe, **kwargs
|
|
2193
|
+
)
|
|
2194
|
+
case str() as text:
|
|
2195
|
+
template = Template(text)
|
|
2196
|
+
mapping_use: StrMapping = {} if mapping is None else mapping
|
|
2197
|
+
kwargs_use: StrDict = (os.environ if environ else {}) | kwargs
|
|
2198
|
+
if safe:
|
|
2199
|
+
return template.safe_substitute(mapping_use, **kwargs_use)
|
|
2200
|
+
try:
|
|
2201
|
+
return template.substitute(mapping_use, **kwargs_use)
|
|
2202
|
+
except KeyError as error:
|
|
2203
|
+
raise SubstituteError(key=error.args[0]) from None
|
|
2204
|
+
case never:
|
|
2205
|
+
assert_never(never)
|
|
2206
|
+
|
|
2207
|
+
|
|
2208
|
+
@dataclass(kw_only=True, slots=True)
|
|
2209
|
+
class SubstituteError(Exception):
|
|
2210
|
+
key: str
|
|
2211
|
+
|
|
2212
|
+
@override
|
|
2213
|
+
def __str__(self) -> str:
|
|
2214
|
+
return f"Missing key: {repr_(self.key)}"
|
|
2215
|
+
|
|
2216
|
+
|
|
2217
|
+
##
|
|
2218
|
+
|
|
2219
|
+
|
|
2220
|
+
def unique_str() -> str:
|
|
2221
|
+
"""Generate a unique string."""
|
|
2222
|
+
now = time_ns()
|
|
2223
|
+
pid = getpid()
|
|
2224
|
+
ident = get_ident()
|
|
2225
|
+
key = str(uuid4()).replace("-", "")
|
|
2226
|
+
return f"{now}_{pid}_{ident}_{key}"
|
|
2227
|
+
|
|
2228
|
+
|
|
2229
|
+
###############################################################################
|
|
2230
|
+
#### warnings #################################################################
|
|
2231
|
+
###############################################################################
|
|
2232
|
+
|
|
2233
|
+
|
|
2234
|
+
@contextmanager
|
|
2235
|
+
def suppress_warnings(
|
|
2236
|
+
*, message: str = "", category: TypeLike[Warning] | None = None
|
|
2237
|
+
) -> Iterator[None]:
|
|
2238
|
+
"""Suppress warnings."""
|
|
2239
|
+
with _yield_caught_warnings("ignore", message=message, category=category):
|
|
2240
|
+
yield
|
|
2241
|
+
|
|
2242
|
+
|
|
2243
|
+
@contextmanager
|
|
2244
|
+
def yield_warnings_as_errors(
|
|
2245
|
+
*, message: str = "", category: TypeLike[Warning] | None = None
|
|
2246
|
+
) -> Iterator[None]:
|
|
2247
|
+
"""Catch warnings as errors."""
|
|
2248
|
+
with _yield_caught_warnings("error", message=message, category=category):
|
|
2249
|
+
yield
|
|
2250
|
+
|
|
2251
|
+
|
|
2252
|
+
@contextmanager
|
|
2253
|
+
def _yield_caught_warnings(
|
|
2254
|
+
action: FilterWarningsAction,
|
|
2255
|
+
/,
|
|
2256
|
+
*,
|
|
2257
|
+
message: str = "",
|
|
2258
|
+
category: TypeLike[Warning] | None = None,
|
|
2259
|
+
) -> Iterator[None]:
|
|
2260
|
+
with catch_warnings():
|
|
2261
|
+
match category:
|
|
2262
|
+
case None:
|
|
2263
|
+
filterwarnings(action, message=message)
|
|
2264
|
+
case type():
|
|
2265
|
+
filterwarnings(action, message=message, category=category)
|
|
2266
|
+
case tuple():
|
|
2267
|
+
for c in category:
|
|
2268
|
+
filterwarnings(action, message=message, category=c)
|
|
2269
|
+
case never:
|
|
2270
|
+
assert_never(never)
|
|
2271
|
+
yield
|
|
2272
|
+
|
|
2273
|
+
|
|
2274
|
+
###############################################################################
|
|
2275
|
+
#### whenever #################################################################
|
|
2276
|
+
###############################################################################
|
|
2277
|
+
|
|
2278
|
+
|
|
2279
|
+
get_now_local = utilities.constants._get_now_local # noqa: SLF001
|
|
2280
|
+
|
|
2281
|
+
|
|
2282
|
+
def get_now(time_zone: TimeZoneLike = UTC, /) -> ZonedDateTime:
|
|
2283
|
+
"""Get the current zoned date-time."""
|
|
2284
|
+
return _get_now(to_time_zone_name(time_zone))
|
|
2285
|
+
|
|
2286
|
+
|
|
2287
|
+
def get_now_plain(time_zone: TimeZoneLike = UTC, /) -> PlainDateTime:
|
|
2288
|
+
"""Get the current plain date-time."""
|
|
2289
|
+
return get_now(time_zone).to_plain()
|
|
2290
|
+
|
|
2291
|
+
|
|
2292
|
+
def get_now_local_plain() -> PlainDateTime:
|
|
2293
|
+
"""Get the current plain date-time in the local time-zone."""
|
|
2294
|
+
return get_now_local().to_plain()
|
|
2295
|
+
|
|
2296
|
+
|
|
2297
|
+
##
|
|
2298
|
+
|
|
2299
|
+
|
|
2300
|
+
def get_time(time_zone: TimeZoneLike = UTC, /) -> Time:
|
|
2301
|
+
"""Get the current time."""
|
|
2302
|
+
return get_now(time_zone).time()
|
|
2303
|
+
|
|
2304
|
+
|
|
2305
|
+
def get_time_local() -> Time:
|
|
2306
|
+
"""Get the current time in the local time-zone."""
|
|
2307
|
+
return get_time(LOCAL_TIME_ZONE)
|
|
2308
|
+
|
|
2309
|
+
|
|
2310
|
+
##
|
|
2311
|
+
|
|
2312
|
+
|
|
2313
|
+
def get_today(time_zone: TimeZoneLike = UTC, /) -> Date:
|
|
2314
|
+
"""Get the current, timezone-aware local date."""
|
|
2315
|
+
return get_now(time_zone).date()
|
|
2316
|
+
|
|
2317
|
+
|
|
2318
|
+
def get_today_local() -> Date:
|
|
2319
|
+
"""Get the current, timezone-aware local date."""
|
|
2320
|
+
return get_today(LOCAL_TIME_ZONE)
|
|
2321
|
+
|
|
2322
|
+
|
|
2323
|
+
###############################################################################
|
|
2324
|
+
#### writers ##################################################################
|
|
2325
|
+
###############################################################################
|
|
2326
|
+
|
|
2327
|
+
|
|
2328
|
+
@contextmanager
|
|
2329
|
+
def yield_write_path(
|
|
2330
|
+
path: PathLike,
|
|
2331
|
+
/,
|
|
2332
|
+
*,
|
|
2333
|
+
compress: bool = False,
|
|
2334
|
+
overwrite: bool = False,
|
|
2335
|
+
perms: PermissionsLike | None = None,
|
|
2336
|
+
owner: str | int | None = None,
|
|
2337
|
+
group: str | int | None = None,
|
|
2338
|
+
) -> Iterator[Path]:
|
|
2339
|
+
"""Yield a temporary path for atomically writing files to disk."""
|
|
2340
|
+
with yield_adjacent_temp_file(path) as temp:
|
|
2341
|
+
yield temp
|
|
2342
|
+
if compress:
|
|
2343
|
+
try:
|
|
2344
|
+
compress_gzip(temp, path, overwrite=overwrite)
|
|
2345
|
+
except CompressGzipError as error:
|
|
2346
|
+
raise YieldWritePathError(path=error.dest) from None
|
|
2347
|
+
else:
|
|
2348
|
+
try:
|
|
2349
|
+
move(temp, path, overwrite=overwrite)
|
|
2350
|
+
except _CopyOrMoveDestinationExistsError as error:
|
|
2351
|
+
raise YieldWritePathError(path=error.dest) from None
|
|
2352
|
+
if perms is not None:
|
|
2353
|
+
chmod(path, perms)
|
|
2354
|
+
if (owner is not None) or (group is not None):
|
|
2355
|
+
chown(path, user=owner, group=group)
|
|
2356
|
+
|
|
2357
|
+
|
|
2358
|
+
@dataclass(kw_only=True, slots=True)
|
|
2359
|
+
class YieldWritePathError(Exception):
|
|
2360
|
+
path: Path
|
|
2361
|
+
|
|
2362
|
+
@override
|
|
2363
|
+
def __str__(self) -> str:
|
|
2364
|
+
return f"Cannot write to {repr_str(self.path)} since it already exists"
|
|
2365
|
+
|
|
2366
|
+
|
|
2367
|
+
###############################################################################
|
|
2368
|
+
#### zoneinfo #################################################################
|
|
2369
|
+
###############################################################################
|
|
2370
|
+
|
|
2371
|
+
|
|
2372
|
+
def to_zone_info(obj: TimeZoneLike, /) -> ZoneInfo:
|
|
2373
|
+
"""Convert to a time-zone."""
|
|
2374
|
+
match obj:
|
|
2375
|
+
case ZoneInfo() as zone_info:
|
|
2376
|
+
return zone_info
|
|
2377
|
+
case ZonedDateTime() as date_time:
|
|
2378
|
+
return ZoneInfo(date_time.tz)
|
|
2379
|
+
case "local" | "localtime":
|
|
2380
|
+
return LOCAL_TIME_ZONE
|
|
2381
|
+
case str() as key:
|
|
2382
|
+
return ZoneInfo(key)
|
|
2383
|
+
case dt.tzinfo() as tzinfo:
|
|
2384
|
+
if tzinfo is dt.UTC:
|
|
2385
|
+
return UTC
|
|
2386
|
+
raise _ToZoneInfoInvalidTZInfoError(time_zone=obj)
|
|
2387
|
+
case dt.datetime() as date_time:
|
|
2388
|
+
if date_time.tzinfo is None:
|
|
2389
|
+
raise _ToZoneInfoPlainDateTimeError(date_time=date_time)
|
|
2390
|
+
return to_zone_info(date_time.tzinfo)
|
|
2391
|
+
case never:
|
|
2392
|
+
assert_never(never)
|
|
2393
|
+
|
|
2394
|
+
|
|
2395
|
+
@dataclass(kw_only=True, slots=True)
|
|
2396
|
+
class ToTimeZoneError(Exception): ...
|
|
2397
|
+
|
|
2398
|
+
|
|
2399
|
+
@dataclass(kw_only=True, slots=True)
|
|
2400
|
+
class _ToZoneInfoInvalidTZInfoError(ToTimeZoneError):
|
|
2401
|
+
time_zone: dt.tzinfo
|
|
2402
|
+
|
|
2403
|
+
@override
|
|
2404
|
+
def __str__(self) -> str:
|
|
2405
|
+
return f"Invalid time-zone: {self.time_zone}"
|
|
2406
|
+
|
|
2407
|
+
|
|
2408
|
+
@dataclass(kw_only=True, slots=True)
|
|
2409
|
+
class _ToZoneInfoPlainDateTimeError(ToTimeZoneError):
|
|
2410
|
+
date_time: dt.datetime
|
|
2411
|
+
|
|
2412
|
+
@override
|
|
2413
|
+
def __str__(self) -> str:
|
|
2414
|
+
return f"Plain date-time: {self.date_time}"
|
|
2415
|
+
|
|
2416
|
+
|
|
2417
|
+
##
|
|
2418
|
+
|
|
2419
|
+
|
|
2420
|
+
def to_time_zone_name(obj: TimeZoneLike, /) -> TimeZone:
|
|
2421
|
+
"""Convert to a time zone name."""
|
|
2422
|
+
match obj:
|
|
2423
|
+
case ZoneInfo() as zone_info:
|
|
2424
|
+
return cast("TimeZone", zone_info.key)
|
|
2425
|
+
case ZonedDateTime() as date_time:
|
|
2426
|
+
return cast("TimeZone", date_time.tz)
|
|
2427
|
+
case "local" | "localtime":
|
|
2428
|
+
return LOCAL_TIME_ZONE_NAME
|
|
2429
|
+
case str() as time_zone:
|
|
2430
|
+
if time_zone in TIME_ZONES:
|
|
2431
|
+
return time_zone
|
|
2432
|
+
raise _ToTimeZoneNameInvalidKeyError(time_zone=time_zone)
|
|
2433
|
+
case dt.tzinfo() as tzinfo:
|
|
2434
|
+
if tzinfo is dt.UTC:
|
|
2435
|
+
return cast("TimeZone", UTC.key)
|
|
2436
|
+
raise _ToTimeZoneNameInvalidTZInfoError(time_zone=obj)
|
|
2437
|
+
case dt.datetime() as date_time:
|
|
2438
|
+
if date_time.tzinfo is None:
|
|
2439
|
+
raise _ToTimeZoneNamePlainDateTimeError(date_time=date_time)
|
|
2440
|
+
return to_time_zone_name(date_time.tzinfo)
|
|
2441
|
+
case never:
|
|
2442
|
+
assert_never(never)
|
|
2443
|
+
|
|
2444
|
+
|
|
2445
|
+
@dataclass(kw_only=True, slots=True)
|
|
2446
|
+
class ToTimeZoneNameError(Exception): ...
|
|
2447
|
+
|
|
2448
|
+
|
|
2449
|
+
@dataclass(kw_only=True, slots=True)
|
|
2450
|
+
class _ToTimeZoneNameInvalidKeyError(ToTimeZoneNameError):
|
|
2451
|
+
time_zone: str
|
|
2452
|
+
|
|
2453
|
+
@override
|
|
2454
|
+
def __str__(self) -> str:
|
|
2455
|
+
return f"Invalid time-zone: {self.time_zone!r}"
|
|
2456
|
+
|
|
2457
|
+
|
|
2458
|
+
@dataclass(kw_only=True, slots=True)
|
|
2459
|
+
class _ToTimeZoneNameInvalidTZInfoError(ToTimeZoneNameError):
|
|
2460
|
+
time_zone: dt.tzinfo
|
|
2461
|
+
|
|
2462
|
+
@override
|
|
2463
|
+
def __str__(self) -> str:
|
|
2464
|
+
return f"Invalid time-zone: {self.time_zone}"
|
|
2465
|
+
|
|
2466
|
+
|
|
2467
|
+
@dataclass(kw_only=True, slots=True)
|
|
2468
|
+
class _ToTimeZoneNamePlainDateTimeError(ToTimeZoneNameError):
|
|
2469
|
+
date_time: dt.datetime
|
|
2470
|
+
|
|
2471
|
+
@override
|
|
2472
|
+
def __str__(self) -> str:
|
|
2473
|
+
return f"Plain date-time: {self.date_time}"
|
|
2474
|
+
|
|
2475
|
+
|
|
2476
|
+
__all__ = [
|
|
2477
|
+
"CompressBZ2Error",
|
|
2478
|
+
"CompressGzipError",
|
|
2479
|
+
"CompressLZMAError",
|
|
2480
|
+
"CompressZipError",
|
|
2481
|
+
"ExtractGroupError",
|
|
2482
|
+
"ExtractGroupsError",
|
|
2483
|
+
"FileOrDirError",
|
|
2484
|
+
"GetEnvError",
|
|
2485
|
+
"MaxNullableError",
|
|
2486
|
+
"MinNullableError",
|
|
2487
|
+
"OneEmptyError",
|
|
2488
|
+
"OneError",
|
|
2489
|
+
"OneNonUniqueError",
|
|
2490
|
+
"OneStrEmptyError",
|
|
2491
|
+
"OneStrError",
|
|
2492
|
+
"OneStrNonUniqueError",
|
|
2493
|
+
"Permissions",
|
|
2494
|
+
"PermissionsError",
|
|
2495
|
+
"PermissionsLike",
|
|
2496
|
+
"ReadBytesError",
|
|
2497
|
+
"ReadPickleError",
|
|
2498
|
+
"ReadTextError",
|
|
2499
|
+
"SubstituteError",
|
|
2500
|
+
"TemporaryDirectory",
|
|
2501
|
+
"TemporaryFile",
|
|
2502
|
+
"ToTimeZoneError",
|
|
2503
|
+
"ToTimeZoneNameError",
|
|
2504
|
+
"WriteBytesError",
|
|
2505
|
+
"WriteBytesError",
|
|
2506
|
+
"WritePickleError",
|
|
2507
|
+
"WriteTextError",
|
|
2508
|
+
"WriteTextError",
|
|
2509
|
+
"YieldBZ2Error",
|
|
2510
|
+
"YieldGzipError",
|
|
2511
|
+
"YieldLZMAError",
|
|
2512
|
+
"YieldZipError",
|
|
2513
|
+
"always_iterable",
|
|
2514
|
+
"chmod",
|
|
2515
|
+
"chown",
|
|
2516
|
+
"chunked",
|
|
2517
|
+
"compress_bz2",
|
|
2518
|
+
"compress_gzip",
|
|
2519
|
+
"compress_lzma",
|
|
2520
|
+
"compress_zip",
|
|
2521
|
+
"extract_group",
|
|
2522
|
+
"extract_groups",
|
|
2523
|
+
"file_or_dir",
|
|
2524
|
+
"get_class",
|
|
2525
|
+
"get_class_name",
|
|
2526
|
+
"get_env",
|
|
2527
|
+
"get_file_group",
|
|
2528
|
+
"get_file_owner",
|
|
2529
|
+
"get_func_name",
|
|
2530
|
+
"get_gid_name",
|
|
2531
|
+
"get_now",
|
|
2532
|
+
"get_now_local",
|
|
2533
|
+
"get_now_local_plain",
|
|
2534
|
+
"get_now_plain",
|
|
2535
|
+
"get_time",
|
|
2536
|
+
"get_time_local",
|
|
2537
|
+
"get_today",
|
|
2538
|
+
"get_today_local",
|
|
2539
|
+
"get_uid_name",
|
|
2540
|
+
"has_env",
|
|
2541
|
+
"is_debug",
|
|
2542
|
+
"is_none",
|
|
2543
|
+
"is_not_none",
|
|
2544
|
+
"is_pytest",
|
|
2545
|
+
"is_sentinel",
|
|
2546
|
+
"max_nullable",
|
|
2547
|
+
"min_nullable",
|
|
2548
|
+
"move_many",
|
|
2549
|
+
"normalize_multi_line_str",
|
|
2550
|
+
"normalize_str",
|
|
2551
|
+
"not_func",
|
|
2552
|
+
"one",
|
|
2553
|
+
"one_str",
|
|
2554
|
+
"read_bytes",
|
|
2555
|
+
"read_pickle",
|
|
2556
|
+
"read_text",
|
|
2557
|
+
"replace_non_sentinel",
|
|
2558
|
+
"repr_",
|
|
2559
|
+
"repr_str",
|
|
2560
|
+
"substitute",
|
|
2561
|
+
"suppress_super_attribute_error",
|
|
2562
|
+
"suppress_warnings",
|
|
2563
|
+
"take",
|
|
2564
|
+
"to_time_zone_name",
|
|
2565
|
+
"to_zone_info",
|
|
2566
|
+
"transpose",
|
|
2567
|
+
"unique_everseen",
|
|
2568
|
+
"unique_str",
|
|
2569
|
+
"write_bytes",
|
|
2570
|
+
"write_pickle",
|
|
2571
|
+
"write_text",
|
|
2572
|
+
"yield_adjacent_temp_dir",
|
|
2573
|
+
"yield_adjacent_temp_file",
|
|
2574
|
+
"yield_bz2",
|
|
2575
|
+
"yield_gzip",
|
|
2576
|
+
"yield_lzma",
|
|
2577
|
+
"yield_temp_cwd",
|
|
2578
|
+
"yield_temp_environ",
|
|
2579
|
+
"yield_warnings_as_errors",
|
|
2580
|
+
"yield_zip",
|
|
2581
|
+
]
|