dycw-utilities 0.166.30__py3-none-any.whl → 0.185.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. dycw_utilities-0.185.8.dist-info/METADATA +33 -0
  2. dycw_utilities-0.185.8.dist-info/RECORD +90 -0
  3. {dycw_utilities-0.166.30.dist-info → dycw_utilities-0.185.8.dist-info}/WHEEL +1 -1
  4. {dycw_utilities-0.166.30.dist-info → dycw_utilities-0.185.8.dist-info}/entry_points.txt +1 -0
  5. utilities/__init__.py +1 -1
  6. utilities/altair.py +17 -10
  7. utilities/asyncio.py +50 -72
  8. utilities/atools.py +9 -11
  9. utilities/cachetools.py +16 -11
  10. utilities/click.py +76 -19
  11. utilities/concurrent.py +1 -1
  12. utilities/constants.py +492 -0
  13. utilities/contextlib.py +23 -30
  14. utilities/contextvars.py +1 -23
  15. utilities/core.py +2581 -0
  16. utilities/dataclasses.py +16 -119
  17. utilities/docker.py +387 -0
  18. utilities/enum.py +1 -1
  19. utilities/errors.py +2 -16
  20. utilities/fastapi.py +5 -5
  21. utilities/fpdf2.py +2 -1
  22. utilities/functions.py +34 -265
  23. utilities/http.py +2 -3
  24. utilities/hypothesis.py +84 -29
  25. utilities/importlib.py +17 -1
  26. utilities/iterables.py +39 -575
  27. utilities/jinja2.py +145 -0
  28. utilities/jupyter.py +5 -3
  29. utilities/libcst.py +1 -1
  30. utilities/lightweight_charts.py +4 -6
  31. utilities/logging.py +24 -24
  32. utilities/math.py +1 -36
  33. utilities/more_itertools.py +4 -6
  34. utilities/numpy.py +2 -1
  35. utilities/operator.py +2 -2
  36. utilities/orjson.py +42 -43
  37. utilities/os.py +4 -147
  38. utilities/packaging.py +129 -0
  39. utilities/parse.py +35 -15
  40. utilities/pathlib.py +3 -120
  41. utilities/platform.py +8 -90
  42. utilities/polars.py +38 -32
  43. utilities/postgres.py +37 -33
  44. utilities/pottery.py +20 -18
  45. utilities/pqdm.py +3 -4
  46. utilities/psutil.py +2 -3
  47. utilities/pydantic.py +25 -0
  48. utilities/pydantic_settings.py +87 -16
  49. utilities/pydantic_settings_sops.py +16 -3
  50. utilities/pyinstrument.py +4 -4
  51. utilities/pytest.py +96 -125
  52. utilities/pytest_plugins/pytest_regressions.py +2 -2
  53. utilities/pytest_regressions.py +32 -11
  54. utilities/random.py +2 -8
  55. utilities/redis.py +98 -94
  56. utilities/reprlib.py +11 -118
  57. utilities/shellingham.py +66 -0
  58. utilities/shutil.py +25 -0
  59. utilities/slack_sdk.py +13 -12
  60. utilities/sqlalchemy.py +57 -30
  61. utilities/sqlalchemy_polars.py +16 -25
  62. utilities/subprocess.py +2590 -0
  63. utilities/tabulate.py +32 -0
  64. utilities/testbook.py +8 -8
  65. utilities/text.py +24 -99
  66. utilities/throttle.py +159 -0
  67. utilities/time.py +18 -0
  68. utilities/timer.py +31 -14
  69. utilities/traceback.py +16 -23
  70. utilities/types.py +42 -2
  71. utilities/typing.py +26 -14
  72. utilities/uuid.py +1 -1
  73. utilities/version.py +202 -45
  74. utilities/whenever.py +53 -150
  75. dycw_utilities-0.166.30.dist-info/METADATA +0 -41
  76. dycw_utilities-0.166.30.dist-info/RECORD +0 -98
  77. dycw_utilities-0.166.30.dist-info/licenses/LICENSE +0 -21
  78. utilities/aeventkit.py +0 -388
  79. utilities/atomicwrites.py +0 -182
  80. utilities/cryptography.py +0 -41
  81. utilities/getpass.py +0 -8
  82. utilities/git.py +0 -19
  83. utilities/gzip.py +0 -31
  84. utilities/json.py +0 -70
  85. utilities/pickle.py +0 -25
  86. utilities/re.py +0 -156
  87. utilities/sentinel.py +0 -73
  88. utilities/socket.py +0 -8
  89. utilities/string.py +0 -20
  90. utilities/tempfile.py +0 -77
  91. utilities/typed_settings.py +0 -152
  92. utilities/tzdata.py +0 -11
  93. utilities/tzlocal.py +0 -28
  94. utilities/warnings.py +0 -65
  95. utilities/zipfile.py +0 -25
  96. utilities/zoneinfo.py +0 -133
utilities/json.py DELETED
@@ -1,70 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from contextlib import suppress
4
- from dataclasses import dataclass
5
- from pathlib import Path
6
- from subprocess import check_output
7
- from typing import TYPE_CHECKING, assert_never, overload, override
8
-
9
- from utilities.atomicwrites import writer
10
- from utilities.gzip import write_binary
11
-
12
- if TYPE_CHECKING:
13
- from utilities.types import PathLike
14
-
15
-
16
- ##
17
-
18
-
19
- @overload
20
- def run_prettier(source: bytes, /) -> bytes: ...
21
- @overload
22
- def run_prettier(source: str, /) -> str: ...
23
- @overload
24
- def run_prettier(source: Path, /) -> None: ...
25
- def run_prettier(source: bytes | str | Path, /) -> bytes | str | None:
26
- """Run `prettier` on a string/path."""
27
- match source: # skipif-ci
28
- case bytes() as data:
29
- return _run_prettier_core(data, text=False)
30
- case str() as text:
31
- if (path := Path(text)).is_file():
32
- return run_prettier(path)
33
- return _run_prettier_core(text, text=True)
34
- case Path() as path:
35
- result = run_prettier(path.read_bytes())
36
- with writer(path, overwrite=True) as temp:
37
- _ = temp.write_bytes(result)
38
- return None
39
- case never:
40
- assert_never(never)
41
-
42
-
43
- def _run_prettier_core(data: bytes | str, /, *, text: bool) -> bytes | str:
44
- """Run `prettier` on a string/path."""
45
- try: # skipif-ci
46
- return check_output(["prettier", "--parser=json"], input=data, text=text)
47
- except FileNotFoundError: # pragma: no cover
48
- raise RunPrettierError from None
49
-
50
-
51
- @dataclass(kw_only=True, slots=True)
52
- class RunPrettierError(Exception):
53
- @override
54
- def __str__(self) -> str:
55
- return "Unable to find 'prettier'" # pragma: no cover
56
-
57
-
58
- ##
59
-
60
-
61
- def write_formatted_json(
62
- data: bytes, path: PathLike, /, *, compress: bool = False, overwrite: bool = False
63
- ) -> None:
64
- """Write a formatted byte string to disk."""
65
- with suppress(RunPrettierError):
66
- data = run_prettier(data)
67
- write_binary(data, path, compress=compress, overwrite=overwrite)
68
-
69
-
70
- __all__ = ["RunPrettierError", "run_prettier", "write_formatted_json"]
utilities/pickle.py DELETED
@@ -1,25 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import gzip
4
- from pickle import dump, load
5
- from typing import TYPE_CHECKING, Any
6
-
7
- from utilities.atomicwrites import writer
8
-
9
- if TYPE_CHECKING:
10
- from utilities.types import PathLike
11
-
12
-
13
- def read_pickle(path: PathLike, /) -> Any:
14
- """Read an object from disk."""
15
- with gzip.open(path, mode="rb") as gz:
16
- return load(gz) # noqa: S301
17
-
18
-
19
- def write_pickle(obj: Any, path: PathLike, /, *, overwrite: bool = False) -> None:
20
- """Write an object to disk."""
21
- with writer(path, overwrite=overwrite) as temp, gzip.open(temp, mode="wb") as gz:
22
- dump(obj, gz)
23
-
24
-
25
- __all__ = ["read_pickle", "write_pickle"]
utilities/re.py DELETED
@@ -1,156 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import re
4
- from dataclasses import dataclass
5
- from re import Pattern
6
- from typing import TYPE_CHECKING, assert_never, override
7
-
8
- if TYPE_CHECKING:
9
- from utilities.types import PatternLike
10
-
11
-
12
- def ensure_pattern(pattern: PatternLike, /, *, flags: int = 0) -> Pattern[str]:
13
- """Ensure a pattern is returned."""
14
- match pattern:
15
- case Pattern():
16
- return pattern
17
- case str():
18
- return re.compile(pattern, flags=flags)
19
- case never:
20
- assert_never(never)
21
-
22
-
23
- ##
24
-
25
-
26
- def extract_group(pattern: PatternLike, text: str, /, *, flags: int = 0) -> str:
27
- """Extract a group.
28
-
29
- The regex must have 1 capture group, and this must match exactly once.
30
- """
31
- pattern_use = ensure_pattern(pattern, flags=flags)
32
- match pattern_use.groups:
33
- case 0:
34
- raise _ExtractGroupNoCaptureGroupsError(pattern=pattern_use, text=text)
35
- case 1:
36
- matches: list[str] = pattern_use.findall(text)
37
- match len(matches):
38
- case 0:
39
- raise _ExtractGroupNoMatchesError(
40
- pattern=pattern_use, text=text
41
- ) from None
42
- case 1:
43
- return matches[0]
44
- case _:
45
- raise _ExtractGroupMultipleMatchesError(
46
- pattern=pattern_use, text=text, matches=matches
47
- ) from None
48
- case _:
49
- raise _ExtractGroupMultipleCaptureGroupsError(
50
- pattern=pattern_use, text=text
51
- )
52
-
53
-
54
- @dataclass(kw_only=True, slots=True)
55
- class ExtractGroupError(Exception):
56
- pattern: Pattern[str]
57
- text: str
58
-
59
-
60
- @dataclass(kw_only=True, slots=True)
61
- class _ExtractGroupMultipleCaptureGroupsError(ExtractGroupError):
62
- @override
63
- def __str__(self) -> str:
64
- return f"Pattern {self.pattern} must contain exactly one capture group; it had multiple"
65
-
66
-
67
- @dataclass(kw_only=True, slots=True)
68
- class _ExtractGroupMultipleMatchesError(ExtractGroupError):
69
- matches: list[str]
70
-
71
- @override
72
- def __str__(self) -> str:
73
- return f"Pattern {self.pattern} must match against {self.text} exactly once; matches were {self.matches}"
74
-
75
-
76
- @dataclass(kw_only=True, slots=True)
77
- class _ExtractGroupNoCaptureGroupsError(ExtractGroupError):
78
- @override
79
- def __str__(self) -> str:
80
- return f"Pattern {self.pattern} must contain exactly one capture group; it had none".format(
81
- self.pattern
82
- )
83
-
84
-
85
- @dataclass(kw_only=True, slots=True)
86
- class _ExtractGroupNoMatchesError(ExtractGroupError):
87
- @override
88
- def __str__(self) -> str:
89
- return f"Pattern {self.pattern} must match against {self.text}"
90
-
91
-
92
- ##
93
-
94
-
95
- def extract_groups(pattern: PatternLike, text: str, /, *, flags: int = 0) -> list[str]:
96
- """Extract multiple groups.
97
-
98
- The regex may have any number of capture groups, and they must collectively
99
- match exactly once.
100
- """
101
- pattern_use = ensure_pattern(pattern, flags=flags)
102
- if (n_groups := pattern_use.groups) == 0:
103
- raise _ExtractGroupsNoCaptureGroupsError(pattern=pattern_use, text=text)
104
- matches: list[str] = pattern_use.findall(text)
105
- match len(matches), n_groups:
106
- case 0, _:
107
- raise _ExtractGroupsNoMatchesError(pattern=pattern_use, text=text)
108
- case 1, 1:
109
- return matches
110
- case 1, _:
111
- return list(matches[0])
112
- case _:
113
- raise _ExtractGroupsMultipleMatchesError(
114
- pattern=pattern_use, text=text, matches=matches
115
- )
116
-
117
-
118
- @dataclass(kw_only=True, slots=True)
119
- class ExtractGroupsError(Exception):
120
- pattern: Pattern[str]
121
- text: str
122
-
123
-
124
- @dataclass(kw_only=True, slots=True)
125
- class _ExtractGroupsMultipleMatchesError(ExtractGroupsError):
126
- matches: list[str]
127
-
128
- @override
129
- def __str__(self) -> str:
130
- return f"Pattern {self.pattern} must match against {self.text} exactly once; matches were {self.matches}"
131
-
132
-
133
- @dataclass(kw_only=True, slots=True)
134
- class _ExtractGroupsNoCaptureGroupsError(ExtractGroupsError):
135
- pattern: Pattern[str]
136
- text: str
137
-
138
- @override
139
- def __str__(self) -> str:
140
- return f"Pattern {self.pattern} must contain at least one capture group"
141
-
142
-
143
- @dataclass(kw_only=True, slots=True)
144
- class _ExtractGroupsNoMatchesError(ExtractGroupsError):
145
- @override
146
- def __str__(self) -> str:
147
- return f"Pattern {self.pattern} must match against {self.text}"
148
-
149
-
150
- __all__ = [
151
- "ExtractGroupError",
152
- "ExtractGroupsError",
153
- "ensure_pattern",
154
- "extract_group",
155
- "extract_groups",
156
- ]
utilities/sentinel.py DELETED
@@ -1,73 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from dataclasses import dataclass
4
- from re import IGNORECASE, search
5
- from typing import Any, override
6
-
7
- from typing_extensions import TypeIs
8
-
9
-
10
- class _Meta(type):
11
- """Metaclass for the sentinel."""
12
-
13
- instance: Any = None
14
-
15
- @override
16
- def __call__(cls, *args: Any, **kwargs: Any) -> Any:
17
- if cls.instance is None:
18
- cls.instance = super().__call__(*args, **kwargs)
19
- return cls.instance
20
-
21
-
22
- SENTINEL_REPR = "<sentinel>"
23
-
24
-
25
- class Sentinel(metaclass=_Meta):
26
- """Base class for the sentinel object."""
27
-
28
- @override
29
- def __repr__(self) -> str:
30
- return SENTINEL_REPR
31
-
32
- @override
33
- def __str__(self) -> str:
34
- return repr(self)
35
-
36
-
37
- sentinel = Sentinel()
38
-
39
- ##
40
-
41
-
42
- def is_sentinel(obj: Any, /) -> TypeIs[Sentinel]:
43
- """Check if an object is the sentinel."""
44
- return obj is sentinel
45
-
46
-
47
- ##
48
-
49
-
50
- def parse_sentinel(text: str, /) -> Sentinel:
51
- """Parse text into the Sentinel value."""
52
- if search("^(|sentinel|<sentinel>)$", text, flags=IGNORECASE):
53
- return sentinel
54
- raise ParseSentinelError(text=text)
55
-
56
-
57
- @dataclass(kw_only=True, slots=True)
58
- class ParseSentinelError(Exception):
59
- text: str
60
-
61
- @override
62
- def __str__(self) -> str:
63
- return f"Unable to parse sentinel value; got {self.text!r}"
64
-
65
-
66
- __all__ = [
67
- "SENTINEL_REPR",
68
- "ParseSentinelError",
69
- "Sentinel",
70
- "is_sentinel",
71
- "parse_sentinel",
72
- "sentinel",
73
- ]
utilities/socket.py DELETED
@@ -1,8 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from socket import gethostname
4
-
5
- HOSTNAME = gethostname()
6
-
7
-
8
- __all__ = ["HOSTNAME"]
utilities/string.py DELETED
@@ -1,20 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from os import environ
4
- from pathlib import Path
5
- from string import Template
6
- from typing import Any, assert_never
7
-
8
-
9
- def substitute_environ(path_or_text: Path | str, /, **kwargs: Any) -> str:
10
- """Substitute the environment variables in a file."""
11
- match path_or_text:
12
- case Path() as path:
13
- return substitute_environ(path.read_text(), **kwargs)
14
- case str() as text:
15
- return Template(text).substitute(environ, **kwargs)
16
- case never:
17
- assert_never(never)
18
-
19
-
20
- __all__ = ["substitute_environ"]
utilities/tempfile.py DELETED
@@ -1,77 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import tempfile
4
- from pathlib import Path
5
- from tempfile import gettempdir as _gettempdir
6
- from typing import TYPE_CHECKING, override
7
-
8
- from utilities.warnings import suppress_warnings
9
-
10
- if TYPE_CHECKING:
11
- from types import TracebackType
12
-
13
- from utilities.types import PathLike
14
-
15
-
16
- class TemporaryDirectory:
17
- """Wrapper around `TemporaryDirectory` with a `Path` attribute."""
18
-
19
- def __init__(
20
- self,
21
- *,
22
- suffix: str | None = None,
23
- prefix: str | None = None,
24
- dir: PathLike | None = None, # noqa: A002
25
- ignore_cleanup_errors: bool = False,
26
- delete: bool = True,
27
- ) -> None:
28
- super().__init__()
29
- self._temp_dir = _TemporaryDirectoryNoResourceWarning(
30
- suffix=suffix,
31
- prefix=prefix,
32
- dir=dir,
33
- ignore_cleanup_errors=ignore_cleanup_errors,
34
- delete=delete,
35
- )
36
- self.path = Path(self._temp_dir.name)
37
-
38
- def __enter__(self) -> Path:
39
- return Path(self._temp_dir.__enter__())
40
-
41
- def __exit__(
42
- self,
43
- exc: type[BaseException] | None,
44
- val: BaseException | None,
45
- tb: TracebackType | None,
46
- ) -> None:
47
- self._temp_dir.__exit__(exc, val, tb)
48
-
49
-
50
- class _TemporaryDirectoryNoResourceWarning(tempfile.TemporaryDirectory):
51
- @classmethod
52
- @override
53
- def _cleanup( # pyright: ignore[reportGeneralTypeIssues]
54
- cls,
55
- name: str,
56
- warn_message: str,
57
- ignore_errors: bool = False,
58
- delete: bool = True,
59
- ) -> None:
60
- with suppress_warnings(category=ResourceWarning):
61
- return super()._cleanup( # pyright: ignore[reportAttributeAccessIssue]
62
- name, warn_message, ignore_errors=ignore_errors, delete=delete
63
- )
64
-
65
-
66
- ##
67
-
68
-
69
- def gettempdir() -> Path:
70
- """Get the name of the directory used for temporary files."""
71
- return Path(_gettempdir())
72
-
73
-
74
- TEMP_DIR = gettempdir()
75
-
76
-
77
- __all__ = ["TEMP_DIR", "TemporaryDirectory", "gettempdir"]
@@ -1,152 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from dataclasses import dataclass
4
- from functools import partial
5
- from ipaddress import IPv4Address, IPv6Address
6
- from os import environ
7
- from pathlib import Path
8
- from re import search
9
- from typing import TYPE_CHECKING, Any, assert_never, override
10
- from uuid import UUID
11
-
12
- import typed_settings
13
- from typed_settings import EnvLoader, FileLoader, find
14
- from typed_settings.converters import TSConverter
15
- from typed_settings.loaders import TomlFormat
16
- from whenever import (
17
- Date,
18
- DateDelta,
19
- DateTimeDelta,
20
- MonthDay,
21
- PlainDateTime,
22
- Time,
23
- TimeDelta,
24
- YearMonth,
25
- ZonedDateTime,
26
- )
27
-
28
- from utilities.iterables import always_iterable
29
- from utilities.pathlib import to_path
30
- from utilities.string import substitute_environ
31
-
32
- if TYPE_CHECKING:
33
- from collections.abc import Callable, Iterable
34
-
35
- from typed_settings.loaders import Loader
36
- from typed_settings.processors import Processor
37
-
38
- from utilities.types import MaybeCallablePathLike, MaybeIterable, PathLike
39
-
40
-
41
- type _ConverterItem = tuple[type[Any], Callable[..., Any]]
42
-
43
-
44
- ##
45
-
46
-
47
- class ExtendedTSConverter(TSConverter):
48
- """An extension of the TSConverter for custom types."""
49
-
50
- @override
51
- def __init__(
52
- self,
53
- *,
54
- resolve_paths: bool = True,
55
- strlist_sep: str | Callable[[str], list] | None = ":",
56
- extra: Iterable[_ConverterItem] = (),
57
- ) -> None:
58
- super().__init__(resolve_paths=resolve_paths, strlist_sep=strlist_sep)
59
- cases: list[_ConverterItem] = [
60
- (Date, Date.parse_common_iso),
61
- (DateDelta, DateDelta.parse_common_iso),
62
- (DateTimeDelta, DateTimeDelta.parse_common_iso),
63
- (IPv4Address, IPv4Address),
64
- (IPv6Address, IPv6Address),
65
- (MonthDay, MonthDay.parse_common_iso),
66
- (Path, partial(_parse_path, resolve=resolve_paths, pwd=Path.cwd())),
67
- (PlainDateTime, PlainDateTime.parse_common_iso),
68
- (Time, Time.parse_common_iso),
69
- (TimeDelta, TimeDelta.parse_common_iso),
70
- (UUID, UUID),
71
- (YearMonth, YearMonth.parse_common_iso),
72
- (ZonedDateTime, ZonedDateTime.parse_common_iso),
73
- *extra,
74
- ]
75
- extras = {cls: _make_converter(cls, func) for cls, func in cases}
76
- self.scalar_converters |= extras
77
-
78
-
79
- def _make_converter[T](
80
- cls: type[T], parser: Callable[[str], T], /
81
- ) -> Callable[[Any, type[Any]], Any]:
82
- def hook(value: T | str, _: type[T] = cls, /) -> Any:
83
- if not isinstance(value, (cls, str)): # pragma: no cover
84
- msg = f"Invalid type {type(value).__name__!r}; expected '{cls.__name__}' or 'str'"
85
- raise TypeError(msg)
86
- if isinstance(value, str):
87
- return parser(value)
88
- return value
89
-
90
- return hook
91
-
92
-
93
- def _parse_path(
94
- path: str, /, *, resolve: bool = False, pwd: MaybeCallablePathLike = Path.cwd
95
- ) -> Path:
96
- path = substitute_environ(path, **environ)
97
- match resolve:
98
- case True:
99
- return to_path(pwd).joinpath(path).resolve()
100
- case False:
101
- return Path(path)
102
- case never:
103
- assert_never(never)
104
-
105
-
106
- ##
107
-
108
-
109
- _BASE_DIR: Path = Path()
110
-
111
-
112
- def load_settings[T](
113
- cls: type[T],
114
- app_name: str,
115
- /,
116
- *,
117
- filenames: MaybeIterable[str] = "settings.toml",
118
- start_dir: PathLike | None = None,
119
- loaders: MaybeIterable[Loader] | None = None,
120
- processors: MaybeIterable[Processor] = (),
121
- converters: Iterable[_ConverterItem] = (),
122
- base_dir: Path = _BASE_DIR,
123
- ) -> T:
124
- if not search(r"^[A-Za-z]+(?:_[A-Za-z]+)*$", app_name):
125
- raise LoadSettingsError(appname=app_name)
126
- filenames_use = list(always_iterable(filenames))
127
- start_dir_use = None if start_dir is None else Path(start_dir)
128
- files = [find(filename, start_dir=start_dir_use) for filename in filenames_use]
129
- file_loader = FileLoader(formats={"*.toml": TomlFormat(app_name)}, files=files)
130
- env_loader = EnvLoader(f"{app_name.upper()}__", nested_delimiter="__")
131
- loaders_use: list[Loader] = [file_loader, env_loader]
132
- if loaders is not None:
133
- loaders_use.extend(always_iterable(loaders))
134
- return typed_settings.load_settings(
135
- cls,
136
- loaders_use,
137
- processors=list(always_iterable(processors)),
138
- converter=ExtendedTSConverter(extra=converters),
139
- base_dir=base_dir,
140
- )
141
-
142
-
143
- @dataclass(kw_only=True, slots=True)
144
- class LoadSettingsError(Exception):
145
- appname: str
146
-
147
- @override
148
- def __str__(self) -> str:
149
- return f"Invalid app name; got {self.appname!r}"
150
-
151
-
152
- __all__ = ["ExtendedTSConverter", "LoadSettingsError", "load_settings"]
utilities/tzdata.py DELETED
@@ -1,11 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from zoneinfo import ZoneInfo
4
-
5
- HongKong = ZoneInfo("Asia/Hong_Kong")
6
- Tokyo = ZoneInfo("Asia/Tokyo")
7
- USCentral = ZoneInfo("US/Central")
8
- USEastern = ZoneInfo("US/Eastern")
9
-
10
-
11
- __all__ = ["HongKong", "Tokyo", "USCentral", "USEastern"]
utilities/tzlocal.py DELETED
@@ -1,28 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from logging import getLogger
4
- from typing import TYPE_CHECKING, cast
5
-
6
- from tzlocal import get_localzone
7
-
8
- if TYPE_CHECKING:
9
- from zoneinfo import ZoneInfo
10
-
11
- from utilities.types import TimeZone
12
-
13
-
14
- def get_local_time_zone() -> ZoneInfo:
15
- """Get the local time zone, with the logging disabled."""
16
- logger = getLogger("tzlocal") # avoid import cycle
17
- init_disabled = logger.disabled
18
- logger.disabled = True
19
- time_zone = get_localzone()
20
- logger.disabled = init_disabled
21
- return time_zone
22
-
23
-
24
- LOCAL_TIME_ZONE = get_local_time_zone()
25
- LOCAL_TIME_ZONE_NAME = cast("TimeZone", LOCAL_TIME_ZONE.key)
26
-
27
-
28
- __all__ = ["LOCAL_TIME_ZONE", "LOCAL_TIME_ZONE_NAME", "get_local_time_zone"]
utilities/warnings.py DELETED
@@ -1,65 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from contextlib import ExitStack, contextmanager
4
- from typing import TYPE_CHECKING, Literal, TypedDict
5
- from warnings import catch_warnings, filterwarnings
6
-
7
- if TYPE_CHECKING:
8
- from collections.abc import Iterator
9
-
10
- from utilities.types import TypeLike
11
-
12
-
13
- @contextmanager
14
- def catch_warnings_as_errors(
15
- *, message: str = "", category: TypeLike[Warning] | None = None
16
- ) -> Iterator[None]:
17
- """Catch warnings as errors."""
18
- with _handle_warnings("error", message=message, category=category):
19
- yield
20
-
21
-
22
- @contextmanager
23
- def suppress_warnings(
24
- *, message: str = "", category: TypeLike[Warning] | None = None
25
- ) -> Iterator[None]:
26
- """Suppress warnings."""
27
- with _handle_warnings("ignore", message=message, category=category):
28
- yield
29
-
30
-
31
- _ActionKind = Literal["error", "ignore"]
32
-
33
-
34
- def _handle_warnings(
35
- action: _ActionKind,
36
- /,
37
- *,
38
- message: str = "",
39
- category: TypeLike[Warning] | None = None,
40
- ) -> ExitStack:
41
- """Handle a set of warnings."""
42
- stack = ExitStack()
43
- categories = category if isinstance(category, tuple) else [category]
44
- for cat in categories:
45
- cm = _handle_warnings_1(action, message=message, category=cat)
46
- stack.enter_context(cm)
47
- return stack
48
-
49
-
50
- @contextmanager
51
- def _handle_warnings_1(
52
- action: _ActionKind, /, *, message: str = "", category: type[Warning] | None = None
53
- ) -> Iterator[None]:
54
- """Handle one set of warnings."""
55
-
56
- class Kwargs(TypedDict, total=False):
57
- category: type[Warning]
58
-
59
- with catch_warnings():
60
- kwargs: Kwargs = {} if category is None else {"category": category}
61
- filterwarnings(action, message=message, **kwargs)
62
- yield
63
-
64
-
65
- __all__ = ["catch_warnings_as_errors", "suppress_warnings"]