dycw-utilities 0.129.10__py3-none-any.whl → 0.175.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dycw_utilities-0.175.17.dist-info/METADATA +34 -0
- dycw_utilities-0.175.17.dist-info/RECORD +103 -0
- dycw_utilities-0.175.17.dist-info/WHEEL +4 -0
- dycw_utilities-0.175.17.dist-info/entry_points.txt +4 -0
- utilities/__init__.py +1 -1
- utilities/altair.py +14 -14
- utilities/asyncio.py +350 -819
- utilities/atomicwrites.py +18 -6
- utilities/atools.py +77 -22
- utilities/cachetools.py +24 -29
- utilities/click.py +393 -237
- utilities/concurrent.py +8 -11
- utilities/contextlib.py +216 -17
- utilities/contextvars.py +20 -1
- utilities/cryptography.py +3 -3
- utilities/dataclasses.py +83 -118
- utilities/docker.py +293 -0
- utilities/enum.py +26 -23
- utilities/errors.py +17 -3
- utilities/fastapi.py +29 -65
- utilities/fpdf2.py +3 -3
- utilities/functions.py +169 -416
- utilities/functools.py +18 -19
- utilities/git.py +9 -30
- utilities/grp.py +28 -0
- utilities/gzip.py +31 -0
- utilities/http.py +3 -2
- utilities/hypothesis.py +738 -589
- utilities/importlib.py +17 -1
- utilities/inflect.py +25 -0
- utilities/iterables.py +194 -262
- utilities/jinja2.py +148 -0
- utilities/json.py +70 -0
- utilities/libcst.py +38 -17
- utilities/lightweight_charts.py +5 -9
- utilities/logging.py +345 -543
- utilities/math.py +18 -13
- utilities/memory_profiler.py +11 -15
- utilities/more_itertools.py +200 -131
- utilities/operator.py +33 -29
- utilities/optuna.py +6 -6
- utilities/orjson.py +272 -137
- utilities/os.py +61 -4
- utilities/parse.py +59 -61
- utilities/pathlib.py +281 -40
- utilities/permissions.py +298 -0
- utilities/pickle.py +2 -2
- utilities/platform.py +24 -5
- utilities/polars.py +1214 -430
- utilities/polars_ols.py +1 -1
- utilities/postgres.py +408 -0
- utilities/pottery.py +113 -26
- utilities/pqdm.py +10 -11
- utilities/psutil.py +6 -57
- utilities/pwd.py +28 -0
- utilities/pydantic.py +4 -54
- utilities/pydantic_settings.py +240 -0
- utilities/pydantic_settings_sops.py +76 -0
- utilities/pyinstrument.py +8 -10
- utilities/pytest.py +227 -121
- utilities/pytest_plugins/__init__.py +1 -0
- utilities/pytest_plugins/pytest_randomly.py +23 -0
- utilities/pytest_plugins/pytest_regressions.py +56 -0
- utilities/pytest_regressions.py +26 -46
- utilities/random.py +13 -9
- utilities/re.py +58 -28
- utilities/redis.py +401 -550
- utilities/scipy.py +1 -1
- utilities/sentinel.py +10 -0
- utilities/shelve.py +4 -1
- utilities/shutil.py +25 -0
- utilities/slack_sdk.py +36 -106
- utilities/sqlalchemy.py +502 -473
- utilities/sqlalchemy_polars.py +38 -94
- utilities/string.py +2 -3
- utilities/subprocess.py +1572 -0
- utilities/tempfile.py +86 -4
- utilities/testbook.py +50 -0
- utilities/text.py +165 -42
- utilities/timer.py +37 -65
- utilities/traceback.py +158 -929
- utilities/types.py +146 -116
- utilities/typing.py +531 -71
- utilities/tzdata.py +1 -53
- utilities/tzlocal.py +6 -23
- utilities/uuid.py +43 -5
- utilities/version.py +27 -26
- utilities/whenever.py +1776 -386
- utilities/zoneinfo.py +84 -22
- dycw_utilities-0.129.10.dist-info/METADATA +0 -241
- dycw_utilities-0.129.10.dist-info/RECORD +0 -96
- dycw_utilities-0.129.10.dist-info/WHEEL +0 -4
- dycw_utilities-0.129.10.dist-info/licenses/LICENSE +0 -21
- utilities/datetime.py +0 -1409
- utilities/eventkit.py +0 -402
- utilities/loguru.py +0 -144
- utilities/luigi.py +0 -228
- utilities/period.py +0 -324
- utilities/pyrsistent.py +0 -89
- utilities/python_dotenv.py +0 -105
- utilities/streamlit.py +0 -105
- utilities/sys.py +0 -87
- utilities/tenacity.py +0 -145
utilities/logging.py
CHANGED
|
@@ -1,19 +1,14 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import datetime as dt
|
|
4
3
|
import re
|
|
5
|
-
from contextlib import contextmanager
|
|
6
4
|
from dataclasses import dataclass, field
|
|
7
5
|
from functools import cached_property
|
|
8
|
-
from itertools import product
|
|
9
6
|
from logging import (
|
|
10
|
-
DEBUG,
|
|
11
|
-
ERROR,
|
|
12
|
-
NOTSET,
|
|
13
7
|
FileHandler,
|
|
14
8
|
Formatter,
|
|
15
9
|
Handler,
|
|
16
10
|
Logger,
|
|
11
|
+
LoggerAdapter,
|
|
17
12
|
LogRecord,
|
|
18
13
|
StreamHandler,
|
|
19
14
|
basicConfig,
|
|
@@ -24,59 +19,318 @@ from logging import (
|
|
|
24
19
|
from logging.handlers import BaseRotatingHandler, TimedRotatingFileHandler
|
|
25
20
|
from pathlib import Path
|
|
26
21
|
from re import Pattern
|
|
27
|
-
from
|
|
28
|
-
from time import time
|
|
22
|
+
from socket import gethostname
|
|
29
23
|
from typing import (
|
|
30
24
|
TYPE_CHECKING,
|
|
31
25
|
Any,
|
|
32
|
-
|
|
26
|
+
Concatenate,
|
|
33
27
|
Literal,
|
|
28
|
+
NotRequired,
|
|
34
29
|
Self,
|
|
30
|
+
TypedDict,
|
|
35
31
|
assert_never,
|
|
36
32
|
cast,
|
|
37
33
|
override,
|
|
38
34
|
)
|
|
39
35
|
|
|
36
|
+
from whenever import ZonedDateTime
|
|
37
|
+
|
|
38
|
+
from utilities.atomicwrites import move_many
|
|
40
39
|
from utilities.dataclasses import replace_non_sentinel
|
|
41
|
-
from utilities.datetime import (
|
|
42
|
-
SECOND,
|
|
43
|
-
maybe_sub_pct_y,
|
|
44
|
-
parse_datetime_compact,
|
|
45
|
-
round_datetime,
|
|
46
|
-
serialize_compact,
|
|
47
|
-
)
|
|
48
40
|
from utilities.errors import ImpossibleCaseError
|
|
49
41
|
from utilities.iterables import OneEmptyError, always_iterable, one
|
|
50
|
-
from utilities.pathlib import ensure_suffix,
|
|
51
|
-
from utilities.
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
RICH_MAX_STRING,
|
|
57
|
-
RICH_MAX_WIDTH,
|
|
42
|
+
from utilities.pathlib import ensure_suffix, to_path
|
|
43
|
+
from utilities.re import (
|
|
44
|
+
ExtractGroupError,
|
|
45
|
+
ExtractGroupsError,
|
|
46
|
+
extract_group,
|
|
47
|
+
extract_groups,
|
|
58
48
|
)
|
|
59
49
|
from utilities.sentinel import Sentinel, sentinel
|
|
60
|
-
from utilities.
|
|
50
|
+
from utilities.whenever import (
|
|
51
|
+
WheneverLogRecord,
|
|
52
|
+
format_compact,
|
|
53
|
+
get_now_local,
|
|
54
|
+
to_zoned_date_time,
|
|
55
|
+
)
|
|
61
56
|
|
|
62
57
|
if TYPE_CHECKING:
|
|
63
|
-
from collections.abc import Callable, Iterable,
|
|
58
|
+
from collections.abc import Callable, Iterable, Mapping, MutableMapping
|
|
59
|
+
from datetime import time
|
|
64
60
|
from logging import _FilterType
|
|
65
|
-
from zoneinfo import ZoneInfo
|
|
66
61
|
|
|
67
62
|
from utilities.types import (
|
|
68
|
-
|
|
63
|
+
LoggerLike,
|
|
69
64
|
LogLevel,
|
|
70
65
|
MaybeCallablePathLike,
|
|
71
66
|
MaybeIterable,
|
|
72
67
|
PathLike,
|
|
68
|
+
StrMapping,
|
|
73
69
|
)
|
|
74
|
-
from utilities.version import MaybeCallableVersionLike
|
|
75
70
|
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
71
|
+
|
|
72
|
+
_DEFAULT_DATEFMT = "%Y-%m-%d %H:%M:%S"
|
|
73
|
+
_DEFAULT_BACKUP_COUNT: int = 100
|
|
74
|
+
_DEFAULT_MAX_BYTES: int = 10 * 1024**2
|
|
75
|
+
_DEFAULT_WHEN: _When = "D"
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
##
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def add_adapter[**P](
|
|
82
|
+
logger: Logger,
|
|
83
|
+
process: Callable[Concatenate[str, P], str],
|
|
84
|
+
/,
|
|
85
|
+
*args: P.args,
|
|
86
|
+
**kwargs: P.kwargs,
|
|
87
|
+
) -> LoggerAdapter:
|
|
88
|
+
"""Add an adapter to a logger."""
|
|
89
|
+
|
|
90
|
+
class CustomAdapter(LoggerAdapter):
|
|
91
|
+
@override
|
|
92
|
+
def process(
|
|
93
|
+
self, msg: str, kwargs: MutableMapping[str, Any]
|
|
94
|
+
) -> tuple[str, MutableMapping[str, Any]]:
|
|
95
|
+
extra = cast("_ArgsAndKwargs", self.extra)
|
|
96
|
+
new_msg = process(msg, *extra["args"], **extra["kwargs"])
|
|
97
|
+
return new_msg, kwargs
|
|
98
|
+
|
|
99
|
+
return CustomAdapter(logger, extra=_ArgsAndKwargs(args=args, kwargs=kwargs))
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class _ArgsAndKwargs(TypedDict):
|
|
103
|
+
args: tuple[Any, ...]
|
|
104
|
+
kwargs: StrMapping
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
##
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def add_filters(handler: Handler, /, *filters: _FilterType) -> None:
|
|
111
|
+
"""Add a set of filters to a handler."""
|
|
112
|
+
for filter_i in filters:
|
|
113
|
+
handler.addFilter(filter_i)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
##
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def basic_config(
|
|
120
|
+
*,
|
|
121
|
+
obj: LoggerLike | Handler | None = None,
|
|
122
|
+
format_: str | None = None,
|
|
123
|
+
prefix: str | None = None,
|
|
124
|
+
hostname: bool = False,
|
|
125
|
+
datefmt: str = _DEFAULT_DATEFMT,
|
|
126
|
+
level: LogLevel = "INFO",
|
|
127
|
+
filters: MaybeIterable[_FilterType] | None = None,
|
|
128
|
+
plain: bool = False,
|
|
129
|
+
color_field_styles: Mapping[str, _FieldStyleKeys] | None = None,
|
|
130
|
+
) -> None:
|
|
131
|
+
"""Do the basic config."""
|
|
132
|
+
match obj:
|
|
133
|
+
case None:
|
|
134
|
+
if format_ is None:
|
|
135
|
+
format_use = get_format_str(prefix=prefix, hostname=hostname)
|
|
136
|
+
else:
|
|
137
|
+
format_use = format_
|
|
138
|
+
basicConfig(format=format_use, datefmt=datefmt, style="{", level=level)
|
|
139
|
+
case Logger() as logger:
|
|
140
|
+
logger.setLevel(level)
|
|
141
|
+
logger.addHandler(handler := StreamHandler())
|
|
142
|
+
basic_config(
|
|
143
|
+
obj=handler,
|
|
144
|
+
format_=format_,
|
|
145
|
+
prefix=prefix,
|
|
146
|
+
hostname=hostname,
|
|
147
|
+
datefmt=datefmt,
|
|
148
|
+
level=level,
|
|
149
|
+
filters=filters,
|
|
150
|
+
plain=plain,
|
|
151
|
+
color_field_styles=color_field_styles,
|
|
152
|
+
)
|
|
153
|
+
case str() as name:
|
|
154
|
+
basic_config(
|
|
155
|
+
obj=to_logger(name),
|
|
156
|
+
format_=format_,
|
|
157
|
+
prefix=prefix,
|
|
158
|
+
hostname=hostname,
|
|
159
|
+
datefmt=datefmt,
|
|
160
|
+
level=level,
|
|
161
|
+
filters=filters,
|
|
162
|
+
plain=plain,
|
|
163
|
+
color_field_styles=color_field_styles,
|
|
164
|
+
)
|
|
165
|
+
case Handler() as handler:
|
|
166
|
+
handler.setLevel(level)
|
|
167
|
+
if filters is not None:
|
|
168
|
+
add_filters(handler, *always_iterable(filters))
|
|
169
|
+
formatter = get_formatter(
|
|
170
|
+
prefix=prefix,
|
|
171
|
+
format_=format_,
|
|
172
|
+
hostname=hostname,
|
|
173
|
+
datefmt=datefmt,
|
|
174
|
+
plain=plain,
|
|
175
|
+
color_field_styles=color_field_styles,
|
|
176
|
+
)
|
|
177
|
+
handler.setFormatter(formatter)
|
|
178
|
+
case never:
|
|
179
|
+
assert_never(never)
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
##
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def get_format_str(*, prefix: str | None = None, hostname: bool = False) -> str:
|
|
186
|
+
"""Generate a format string."""
|
|
187
|
+
parts: list[str] = [
|
|
188
|
+
"{zoned_datetime}",
|
|
189
|
+
f"{gethostname()}:{{process}}" if hostname else "{process}",
|
|
190
|
+
"{name}:{funcName}:{lineno}",
|
|
191
|
+
"{levelname}",
|
|
192
|
+
"{message}",
|
|
193
|
+
]
|
|
194
|
+
joined = " | ".join(parts)
|
|
195
|
+
return joined if prefix is None else f"{prefix} {joined}"
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
##
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
type _FieldStyleKeys = Literal[
|
|
202
|
+
"asctime", "hostname", "levelname", "name", "programname", "username"
|
|
203
|
+
]
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
class _FieldStyleDict(TypedDict):
|
|
207
|
+
color: str
|
|
208
|
+
bold: NotRequired[bool]
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def get_formatter(
|
|
212
|
+
*,
|
|
213
|
+
format_: str | None = None,
|
|
214
|
+
prefix: str | None = None,
|
|
215
|
+
hostname: bool = False,
|
|
216
|
+
datefmt: str = _DEFAULT_DATEFMT,
|
|
217
|
+
plain: bool = False,
|
|
218
|
+
color_field_styles: Mapping[str, _FieldStyleKeys] | None = None,
|
|
219
|
+
) -> Formatter:
|
|
220
|
+
"""Get the formatter; colored if available."""
|
|
221
|
+
setLogRecordFactory(WheneverLogRecord)
|
|
222
|
+
if plain:
|
|
223
|
+
return _get_plain_formatter(
|
|
224
|
+
format_=format_, prefix=prefix, hostname=hostname, datefmt=datefmt
|
|
225
|
+
)
|
|
226
|
+
try:
|
|
227
|
+
from coloredlogs import DEFAULT_FIELD_STYLES, ColoredFormatter
|
|
228
|
+
except ModuleNotFoundError: # pragma: no cover
|
|
229
|
+
return _get_plain_formatter(
|
|
230
|
+
format_=format_, prefix=prefix, hostname=hostname, datefmt=datefmt
|
|
231
|
+
)
|
|
232
|
+
format_use = (
|
|
233
|
+
get_format_str(prefix=prefix, hostname=hostname) if format_ is None else format_
|
|
234
|
+
)
|
|
235
|
+
default = cast("dict[_FieldStyleKeys, _FieldStyleDict]", DEFAULT_FIELD_STYLES)
|
|
236
|
+
field_styles = {cast("str", k): v for k, v in default.items()}
|
|
237
|
+
field_styles["zoned_datetime"] = default["asctime"]
|
|
238
|
+
field_styles["hostname"] = default["hostname"]
|
|
239
|
+
field_styles["process"] = default["hostname"]
|
|
240
|
+
field_styles["lineno"] = default["name"]
|
|
241
|
+
field_styles["funcName"] = default["name"]
|
|
242
|
+
if color_field_styles is not None:
|
|
243
|
+
field_styles.update({k: default[v] for k, v in color_field_styles.items()})
|
|
244
|
+
return ColoredFormatter(
|
|
245
|
+
fmt=format_use, datefmt=datefmt, style="{", field_styles=field_styles
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def _get_plain_formatter(
|
|
250
|
+
*,
|
|
251
|
+
format_: str | None = None,
|
|
252
|
+
prefix: str | None = None,
|
|
253
|
+
hostname: bool = False,
|
|
254
|
+
datefmt: str = _DEFAULT_DATEFMT,
|
|
255
|
+
) -> Formatter:
|
|
256
|
+
"""Get the plain formatter."""
|
|
257
|
+
format_use = (
|
|
258
|
+
get_format_str(prefix=prefix, hostname=hostname) if format_ is None else format_
|
|
259
|
+
)
|
|
260
|
+
return Formatter(fmt=format_use, datefmt=datefmt, style="{")
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
##
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def get_logging_level_number(level: LogLevel, /) -> int:
|
|
267
|
+
"""Get the logging level number."""
|
|
268
|
+
mapping = getLevelNamesMapping()
|
|
269
|
+
try:
|
|
270
|
+
return mapping[level]
|
|
271
|
+
except KeyError:
|
|
272
|
+
raise GetLoggingLevelNumberError(level=level) from None
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
@dataclass(kw_only=True, slots=True)
|
|
276
|
+
class GetLoggingLevelNumberError(Exception):
|
|
277
|
+
level: LogLevel
|
|
278
|
+
|
|
279
|
+
@override
|
|
280
|
+
def __str__(self) -> str:
|
|
281
|
+
return f"Invalid logging level: {self.level!r}"
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
##
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def setup_logging(
|
|
288
|
+
*,
|
|
289
|
+
logger: LoggerLike | None = None,
|
|
290
|
+
format_: str | None = None,
|
|
291
|
+
datefmt: str = _DEFAULT_DATEFMT,
|
|
292
|
+
console_level: LogLevel = "INFO",
|
|
293
|
+
console_prefix: str = "❯", # noqa: RUF001
|
|
294
|
+
console_filters: MaybeIterable[_FilterType] | None = None,
|
|
295
|
+
files_dir: MaybeCallablePathLike = Path.cwd,
|
|
296
|
+
files_max_bytes: int = _DEFAULT_MAX_BYTES,
|
|
297
|
+
files_when: _When = _DEFAULT_WHEN,
|
|
298
|
+
files_interval: int = 1,
|
|
299
|
+
files_backup_count: int = _DEFAULT_BACKUP_COUNT,
|
|
300
|
+
files_filters: Iterable[_FilterType] | None = None,
|
|
301
|
+
) -> None:
|
|
302
|
+
"""Set up logger."""
|
|
303
|
+
basic_config(
|
|
304
|
+
obj=logger,
|
|
305
|
+
prefix=console_prefix,
|
|
306
|
+
format_=format_,
|
|
307
|
+
datefmt=datefmt,
|
|
308
|
+
level=console_level,
|
|
309
|
+
filters=console_filters,
|
|
310
|
+
)
|
|
311
|
+
logger_use = to_logger(logger)
|
|
312
|
+
name = logger_use.name
|
|
313
|
+
levels: list[LogLevel] = ["DEBUG", "INFO", "ERROR"]
|
|
314
|
+
for level in levels:
|
|
315
|
+
lower = level.lower()
|
|
316
|
+
for stem in [lower, f"{name}-{lower}"]:
|
|
317
|
+
handler = SizeAndTimeRotatingFileHandler(
|
|
318
|
+
to_path(files_dir).joinpath(stem).with_suffix(".txt"),
|
|
319
|
+
maxBytes=files_max_bytes,
|
|
320
|
+
when=files_when,
|
|
321
|
+
interval=files_interval,
|
|
322
|
+
backupCount=files_backup_count,
|
|
323
|
+
)
|
|
324
|
+
logger_use.addHandler(handler)
|
|
325
|
+
basic_config(
|
|
326
|
+
obj=handler,
|
|
327
|
+
format_=format_,
|
|
328
|
+
hostname=True,
|
|
329
|
+
datefmt=datefmt,
|
|
330
|
+
level=level,
|
|
331
|
+
filters=files_filters,
|
|
332
|
+
plain=True,
|
|
333
|
+
)
|
|
80
334
|
|
|
81
335
|
|
|
82
336
|
##
|
|
@@ -100,12 +354,12 @@ class SizeAndTimeRotatingFileHandler(BaseRotatingHandler):
|
|
|
100
354
|
encoding: str | None = None,
|
|
101
355
|
delay: bool = False,
|
|
102
356
|
errors: Literal["strict", "ignore", "replace"] | None = None,
|
|
103
|
-
maxBytes: int =
|
|
104
|
-
when: _When =
|
|
357
|
+
maxBytes: int = _DEFAULT_MAX_BYTES,
|
|
358
|
+
when: _When = _DEFAULT_WHEN,
|
|
105
359
|
interval: int = 1,
|
|
106
|
-
backupCount: int =
|
|
360
|
+
backupCount: int = _DEFAULT_BACKUP_COUNT,
|
|
107
361
|
utc: bool = False,
|
|
108
|
-
atTime:
|
|
362
|
+
atTime: time | None = None,
|
|
109
363
|
) -> None:
|
|
110
364
|
path = Path(filename)
|
|
111
365
|
path.parent.mkdir(parents=True, exist_ok=True)
|
|
@@ -131,7 +385,7 @@ class SizeAndTimeRotatingFileHandler(BaseRotatingHandler):
|
|
|
131
385
|
|
|
132
386
|
@override
|
|
133
387
|
def emit(self, record: LogRecord) -> None:
|
|
134
|
-
try:
|
|
388
|
+
try:
|
|
135
389
|
if (self._backup_count is not None) and self._should_rollover(record):
|
|
136
390
|
self._do_rollover(backup_count=self._backup_count)
|
|
137
391
|
FileHandler.emit(self, record)
|
|
@@ -143,23 +397,23 @@ class SizeAndTimeRotatingFileHandler(BaseRotatingHandler):
|
|
|
143
397
|
self.stream.close()
|
|
144
398
|
self.stream = None
|
|
145
399
|
|
|
146
|
-
actions = _compute_rollover_actions(
|
|
400
|
+
actions = _compute_rollover_actions(
|
|
147
401
|
self._directory,
|
|
148
402
|
self._stem,
|
|
149
403
|
self._suffix,
|
|
150
404
|
patterns=self._patterns,
|
|
151
405
|
backup_count=backup_count,
|
|
152
406
|
)
|
|
153
|
-
actions.do()
|
|
407
|
+
actions.do()
|
|
154
408
|
|
|
155
409
|
if not self.delay: # pragma: no cover
|
|
156
410
|
self.stream = self._open()
|
|
157
|
-
self._time_handler.rolloverAt = (
|
|
158
|
-
|
|
411
|
+
self._time_handler.rolloverAt = self._time_handler.computeRollover(
|
|
412
|
+
get_now_local().timestamp()
|
|
159
413
|
)
|
|
160
414
|
|
|
161
415
|
def _should_rollover(self, record: LogRecord, /) -> bool:
|
|
162
|
-
if self._max_bytes is not None:
|
|
416
|
+
if self._max_bytes is not None:
|
|
163
417
|
try:
|
|
164
418
|
size = self._filename.stat().st_size
|
|
165
419
|
except FileNotFoundError:
|
|
@@ -167,16 +421,14 @@ class SizeAndTimeRotatingFileHandler(BaseRotatingHandler):
|
|
|
167
421
|
else:
|
|
168
422
|
if size >= self._max_bytes:
|
|
169
423
|
return True
|
|
170
|
-
return bool(self._time_handler.shouldRollover(record))
|
|
424
|
+
return bool(self._time_handler.shouldRollover(record))
|
|
171
425
|
|
|
172
426
|
|
|
173
427
|
def _compute_rollover_patterns(stem: str, suffix: str, /) -> _RolloverPatterns:
|
|
174
428
|
return _RolloverPatterns(
|
|
175
429
|
pattern1=re.compile(rf"^{stem}\.(\d+){suffix}$"),
|
|
176
|
-
pattern2=re.compile(rf"^{stem}\.(\d+)__(
|
|
177
|
-
pattern3=re.compile(
|
|
178
|
-
rf"^{stem}\.(\d+)__(\d{{8}}T\d{{6}})__(\d{{8}}T\d{{6}}){suffix}$"
|
|
179
|
-
),
|
|
430
|
+
pattern2=re.compile(rf"^{stem}\.(\d+)__(.+?){suffix}$"),
|
|
431
|
+
pattern3=re.compile(rf"^{stem}\.(\d+)__(.+?)__(.+?){suffix}$"),
|
|
180
432
|
)
|
|
181
433
|
|
|
182
434
|
|
|
@@ -196,20 +448,18 @@ def _compute_rollover_actions(
|
|
|
196
448
|
patterns: _RolloverPatterns | None = None,
|
|
197
449
|
backup_count: int = 1,
|
|
198
450
|
) -> _RolloverActions:
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
patterns = ( # skipif-ci-and-windows
|
|
451
|
+
patterns = (
|
|
202
452
|
_compute_rollover_patterns(stem, suffix) if patterns is None else patterns
|
|
203
453
|
)
|
|
204
|
-
files = {
|
|
454
|
+
files = {
|
|
205
455
|
file
|
|
206
456
|
for path in directory.iterdir()
|
|
207
457
|
if (file := _RotatingLogFile.from_path(path, stem, suffix, patterns=patterns))
|
|
208
458
|
is not None
|
|
209
459
|
}
|
|
210
|
-
deletions: set[_Deletion] = set()
|
|
211
|
-
rotations: set[_Rotation] = set()
|
|
212
|
-
for file in files:
|
|
460
|
+
deletions: set[_Deletion] = set()
|
|
461
|
+
rotations: set[_Rotation] = set()
|
|
462
|
+
for file in files:
|
|
213
463
|
match file.index, file.start, file.end:
|
|
214
464
|
case int() as index, _, _ if index >= backup_count:
|
|
215
465
|
deletions.add(_Deletion(file=file))
|
|
@@ -227,13 +477,11 @@ def _compute_rollover_actions(
|
|
|
227
477
|
rotations.add(
|
|
228
478
|
_Rotation(file=file, index=curr + 1, start=start, end=end)
|
|
229
479
|
)
|
|
230
|
-
case int() as index,
|
|
480
|
+
case int() as index, ZonedDateTime(), ZonedDateTime():
|
|
231
481
|
rotations.add(_Rotation(file=file, index=index + 1))
|
|
232
482
|
case _: # pragma: no cover
|
|
233
483
|
raise NotImplementedError
|
|
234
|
-
return _RolloverActions(
|
|
235
|
-
deletions=deletions, rotations=rotations
|
|
236
|
-
)
|
|
484
|
+
return _RolloverActions(deletions=deletions, rotations=rotations)
|
|
237
485
|
|
|
238
486
|
|
|
239
487
|
@dataclass(order=True, unsafe_hash=True, kw_only=True)
|
|
@@ -242,13 +490,9 @@ class _RolloverActions:
|
|
|
242
490
|
rotations: set[_Rotation] = field(default_factory=set)
|
|
243
491
|
|
|
244
492
|
def do(self) -> None:
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
for deletion in self.deletions: # skipif-ci-and-windows
|
|
493
|
+
for deletion in self.deletions:
|
|
248
494
|
deletion.delete()
|
|
249
|
-
move_many(
|
|
250
|
-
*((r.file.path, r.destination) for r in self.rotations)
|
|
251
|
-
)
|
|
495
|
+
move_many(*((r.file.path, r.destination) for r in self.rotations))
|
|
252
496
|
|
|
253
497
|
|
|
254
498
|
@dataclass(order=True, unsafe_hash=True, kw_only=True)
|
|
@@ -257,14 +501,8 @@ class _RotatingLogFile:
|
|
|
257
501
|
stem: str
|
|
258
502
|
suffix: str
|
|
259
503
|
index: int | None = None
|
|
260
|
-
start:
|
|
261
|
-
end:
|
|
262
|
-
|
|
263
|
-
def __post_init__(self) -> None:
|
|
264
|
-
if self.start is not None:
|
|
265
|
-
self.start = round_datetime(self.start, SECOND)
|
|
266
|
-
if self.end is not None:
|
|
267
|
-
self.end = round_datetime(self.end, SECOND)
|
|
504
|
+
start: ZonedDateTime | None = None
|
|
505
|
+
end: ZonedDateTime | None = None
|
|
268
506
|
|
|
269
507
|
@classmethod
|
|
270
508
|
def from_path(
|
|
@@ -278,19 +516,24 @@ class _RotatingLogFile:
|
|
|
278
516
|
) -> Self | None:
|
|
279
517
|
if (not path.stem.startswith(stem)) or path.suffix != suffix:
|
|
280
518
|
return None
|
|
281
|
-
if patterns is None:
|
|
519
|
+
if patterns is None:
|
|
282
520
|
patterns = _compute_rollover_patterns(stem, suffix)
|
|
283
521
|
try:
|
|
284
|
-
|
|
285
|
-
except
|
|
522
|
+
index, start, end = extract_groups(patterns.pattern3, path.name)
|
|
523
|
+
except ExtractGroupsError:
|
|
286
524
|
pass
|
|
287
525
|
else:
|
|
288
526
|
return cls(
|
|
289
|
-
directory=path.parent,
|
|
527
|
+
directory=path.parent,
|
|
528
|
+
stem=stem,
|
|
529
|
+
suffix=suffix,
|
|
530
|
+
index=int(index),
|
|
531
|
+
start=to_zoned_date_time(start),
|
|
532
|
+
end=to_zoned_date_time(end),
|
|
290
533
|
)
|
|
291
534
|
try:
|
|
292
|
-
|
|
293
|
-
except
|
|
535
|
+
index, end = extract_groups(patterns.pattern2, path.name)
|
|
536
|
+
except ExtractGroupsError:
|
|
294
537
|
pass
|
|
295
538
|
else:
|
|
296
539
|
return cls(
|
|
@@ -298,21 +541,17 @@ class _RotatingLogFile:
|
|
|
298
541
|
stem=stem,
|
|
299
542
|
suffix=suffix,
|
|
300
543
|
index=int(index),
|
|
301
|
-
end=
|
|
544
|
+
end=to_zoned_date_time(end),
|
|
302
545
|
)
|
|
303
546
|
try:
|
|
304
|
-
|
|
305
|
-
except
|
|
306
|
-
|
|
547
|
+
index = extract_group(patterns.pattern1, path.name)
|
|
548
|
+
except ExtractGroupError:
|
|
549
|
+
pass
|
|
307
550
|
else:
|
|
308
551
|
return cls(
|
|
309
|
-
directory=path.parent,
|
|
310
|
-
stem=stem,
|
|
311
|
-
suffix=suffix,
|
|
312
|
-
index=int(index),
|
|
313
|
-
start=parse_datetime_compact(start),
|
|
314
|
-
end=parse_datetime_compact(end),
|
|
552
|
+
directory=path.parent, stem=stem, suffix=suffix, index=int(index)
|
|
315
553
|
)
|
|
554
|
+
return cls(directory=path.parent, stem=stem, suffix=suffix)
|
|
316
555
|
|
|
317
556
|
@cached_property
|
|
318
557
|
def path(self) -> Path:
|
|
@@ -322,10 +561,10 @@ class _RotatingLogFile:
|
|
|
322
561
|
tail = None
|
|
323
562
|
case int() as index, None, None:
|
|
324
563
|
tail = str(index)
|
|
325
|
-
case int() as index, None,
|
|
326
|
-
tail = f"{index}__{
|
|
327
|
-
case int() as index,
|
|
328
|
-
tail = f"{index}__{
|
|
564
|
+
case int() as index, None, ZonedDateTime() as end:
|
|
565
|
+
tail = f"{index}__{format_compact(end, path=True)}"
|
|
566
|
+
case int() as index, ZonedDateTime() as start, ZonedDateTime() as end:
|
|
567
|
+
tail = f"{index}__{format_compact(start, path=True)}__{format_compact(end, path=True)}"
|
|
329
568
|
case _: # pragma: no cover
|
|
330
569
|
raise ImpossibleCaseError(
|
|
331
570
|
case=[f"{self.index=}", f"{self.start=}", f"{self.end=}"]
|
|
@@ -337,12 +576,10 @@ class _RotatingLogFile:
|
|
|
337
576
|
self,
|
|
338
577
|
*,
|
|
339
578
|
index: int | None | Sentinel = sentinel,
|
|
340
|
-
start:
|
|
341
|
-
end:
|
|
579
|
+
start: ZonedDateTime | None | Sentinel = sentinel,
|
|
580
|
+
end: ZonedDateTime | None | Sentinel = sentinel,
|
|
342
581
|
) -> Self:
|
|
343
|
-
return replace_non_sentinel(
|
|
344
|
-
self, index=index, start=start, end=end
|
|
345
|
-
)
|
|
582
|
+
return replace_non_sentinel(self, index=index, start=start, end=end)
|
|
346
583
|
|
|
347
584
|
|
|
348
585
|
@dataclass(order=True, unsafe_hash=True, kw_only=True)
|
|
@@ -350,478 +587,43 @@ class _Deletion:
|
|
|
350
587
|
file: _RotatingLogFile
|
|
351
588
|
|
|
352
589
|
def delete(self) -> None:
|
|
353
|
-
self.file.path.unlink(missing_ok=True)
|
|
590
|
+
self.file.path.unlink(missing_ok=True)
|
|
354
591
|
|
|
355
592
|
|
|
356
593
|
@dataclass(order=True, unsafe_hash=True, kw_only=True)
|
|
357
594
|
class _Rotation:
|
|
358
595
|
file: _RotatingLogFile
|
|
359
596
|
index: int = 0
|
|
360
|
-
start:
|
|
361
|
-
end:
|
|
362
|
-
|
|
363
|
-
def __post_init__(self) -> None:
|
|
364
|
-
if isinstance(self.start, dt.datetime): # skipif-ci-and-windows
|
|
365
|
-
self.start = round_datetime(self.start, SECOND)
|
|
366
|
-
if isinstance(self.end, dt.datetime): # skipif-ci-and-windows
|
|
367
|
-
self.end = round_datetime(self.end, SECOND)
|
|
597
|
+
start: ZonedDateTime | None | Sentinel = sentinel
|
|
598
|
+
end: ZonedDateTime | Sentinel = sentinel
|
|
368
599
|
|
|
369
600
|
@cached_property
|
|
370
601
|
def destination(self) -> Path:
|
|
371
|
-
return self.file.replace(
|
|
372
|
-
index=self.index, start=self.start, end=self.end
|
|
373
|
-
).path
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
##
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
class StandaloneFileHandler(Handler):
|
|
380
|
-
"""Handler for emitting tracebacks to individual files."""
|
|
381
|
-
|
|
382
|
-
@override
|
|
383
|
-
def __init__(
|
|
384
|
-
self, *, level: int = NOTSET, path: MaybeCallablePathLike | None = None
|
|
385
|
-
) -> None:
|
|
386
|
-
super().__init__(level=level)
|
|
387
|
-
self._path = get_path(path=path)
|
|
388
|
-
|
|
389
|
-
@override
|
|
390
|
-
def emit(self, record: LogRecord) -> None:
|
|
391
|
-
from utilities.atomicwrites import writer
|
|
392
|
-
from utilities.tzlocal import get_now_local
|
|
393
|
-
|
|
394
|
-
try:
|
|
395
|
-
path = self._path.joinpath(serialize_compact(get_now_local())).with_suffix(
|
|
396
|
-
".txt"
|
|
397
|
-
)
|
|
398
|
-
formatted = self.format(record)
|
|
399
|
-
with writer(path, overwrite=True) as temp, temp.open(mode="w") as fh:
|
|
400
|
-
_ = fh.write(formatted)
|
|
401
|
-
except Exception: # noqa: BLE001 # pragma: no cover
|
|
402
|
-
self.handleError(record)
|
|
602
|
+
return self.file.replace(index=self.index, start=self.start, end=self.end).path
|
|
403
603
|
|
|
404
604
|
|
|
405
605
|
##
|
|
406
606
|
|
|
407
607
|
|
|
408
|
-
def
|
|
409
|
-
"""
|
|
410
|
-
for filter_i in filters:
|
|
411
|
-
handler.addFilter(filter_i)
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
##
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
def basic_config(
|
|
418
|
-
*,
|
|
419
|
-
obj: LoggerOrName | Handler | None = None,
|
|
420
|
-
format_: str = "{asctime} | {name} | {levelname:8} | {message}",
|
|
421
|
-
whenever: bool = False,
|
|
422
|
-
level: LogLevel = "INFO",
|
|
423
|
-
plain: bool = False,
|
|
424
|
-
) -> None:
|
|
425
|
-
"""Do the basic config."""
|
|
426
|
-
if whenever:
|
|
427
|
-
format_ = format_.replace("{asctime}", "{zoned_datetime}")
|
|
428
|
-
datefmt = maybe_sub_pct_y("%Y-%m-%d %H:%M:%S")
|
|
429
|
-
match obj:
|
|
430
|
-
case None:
|
|
431
|
-
basicConfig(format=format_, datefmt=datefmt, style="{", level=level)
|
|
432
|
-
case Logger() as logger:
|
|
433
|
-
logger.setLevel(level)
|
|
434
|
-
logger.addHandler(handler := StreamHandler())
|
|
435
|
-
basic_config(
|
|
436
|
-
obj=handler,
|
|
437
|
-
format_=format_,
|
|
438
|
-
whenever=whenever,
|
|
439
|
-
level=level,
|
|
440
|
-
plain=plain,
|
|
441
|
-
)
|
|
442
|
-
case str() as name:
|
|
443
|
-
basic_config(
|
|
444
|
-
obj=get_logger(logger=name),
|
|
445
|
-
format_=format_,
|
|
446
|
-
whenever=whenever,
|
|
447
|
-
level=level,
|
|
448
|
-
plain=plain,
|
|
449
|
-
)
|
|
450
|
-
case Handler() as handler:
|
|
451
|
-
handler.setLevel(level)
|
|
452
|
-
if plain:
|
|
453
|
-
formatter = Formatter(fmt=format_, datefmt=datefmt, style="{")
|
|
454
|
-
else:
|
|
455
|
-
try:
|
|
456
|
-
from coloredlogs import ColoredFormatter
|
|
457
|
-
except ModuleNotFoundError: # pragma: no cover
|
|
458
|
-
formatter = Formatter(fmt=format_, datefmt=datefmt, style="{")
|
|
459
|
-
else:
|
|
460
|
-
formatter = ColoredFormatter(
|
|
461
|
-
fmt=format_, datefmt=datefmt, style="{"
|
|
462
|
-
)
|
|
463
|
-
handler.setFormatter(formatter)
|
|
464
|
-
case _ as never:
|
|
465
|
-
assert_never(never)
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
##
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
def filter_for_key(
|
|
472
|
-
key: str, /, *, default: bool = False
|
|
473
|
-
) -> Callable[[LogRecord], bool]:
|
|
474
|
-
"""Make a filter for a given attribute."""
|
|
475
|
-
if (key in _FILTER_FOR_KEY_BLACKLIST) or key.startswith("__"):
|
|
476
|
-
raise FilterForKeyError(key=key)
|
|
477
|
-
|
|
478
|
-
def filter_(record: LogRecord, /) -> bool:
|
|
479
|
-
try:
|
|
480
|
-
value = getattr(record, key)
|
|
481
|
-
except AttributeError:
|
|
482
|
-
return default
|
|
483
|
-
return bool(value)
|
|
484
|
-
|
|
485
|
-
return filter_
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
# fmt: off
|
|
489
|
-
_FILTER_FOR_KEY_BLACKLIST = {
|
|
490
|
-
"args", "created", "exc_info", "exc_text", "filename", "funcName", "getMessage", "levelname", "levelno", "lineno", "module", "msecs", "msg", "name", "pathname", "process", "processName", "relativeCreated", "stack_info", "taskName", "thread", "threadName"
|
|
491
|
-
}
|
|
492
|
-
# fmt: on
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
@dataclass(kw_only=True, slots=True)
|
|
496
|
-
class FilterForKeyError(Exception):
|
|
497
|
-
key: str
|
|
498
|
-
|
|
499
|
-
@override
|
|
500
|
-
def __str__(self) -> str:
|
|
501
|
-
return f"Invalid key: {self.key!r}"
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
##
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
def get_default_logging_path() -> Path:
|
|
508
|
-
"""Get the logging default path."""
|
|
509
|
-
return get_root().joinpath(".logs")
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
##
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
def get_logger(*, logger: LoggerOrName | None = None) -> Logger:
|
|
516
|
-
"""Get a logger."""
|
|
608
|
+
def to_logger(logger: LoggerLike | None = None, /) -> Logger:
|
|
609
|
+
"""Convert to a logger."""
|
|
517
610
|
match logger:
|
|
518
611
|
case Logger():
|
|
519
612
|
return logger
|
|
520
613
|
case str() | None:
|
|
521
614
|
return getLogger(logger)
|
|
522
|
-
case
|
|
615
|
+
case never:
|
|
523
616
|
assert_never(never)
|
|
524
617
|
|
|
525
618
|
|
|
526
|
-
##
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
def get_logging_level_number(level: LogLevel, /) -> int:
|
|
530
|
-
"""Get the logging level number."""
|
|
531
|
-
mapping = getLevelNamesMapping()
|
|
532
|
-
try:
|
|
533
|
-
return mapping[level]
|
|
534
|
-
except KeyError:
|
|
535
|
-
raise GetLoggingLevelNumberError(level=level) from None
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
@dataclass(kw_only=True, slots=True)
|
|
539
|
-
class GetLoggingLevelNumberError(Exception):
|
|
540
|
-
level: LogLevel
|
|
541
|
-
|
|
542
|
-
@override
|
|
543
|
-
def __str__(self) -> str:
|
|
544
|
-
return f"Invalid logging level: {self.level!r}"
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
##
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
def setup_logging(
|
|
551
|
-
*,
|
|
552
|
-
logger: LoggerOrName | None = None,
|
|
553
|
-
console_level: LogLevel | None = "INFO",
|
|
554
|
-
console_filters: Iterable[_FilterType] | None = None,
|
|
555
|
-
console_fmt: str = "❯ {_zoned_datetime_str} | {name}:{funcName}:{lineno} | {message}", # noqa: RUF001
|
|
556
|
-
files_dir: MaybeCallablePathLike | None = get_default_logging_path,
|
|
557
|
-
files_when: _When = "D",
|
|
558
|
-
files_interval: int = 1,
|
|
559
|
-
files_backup_count: int = 10,
|
|
560
|
-
files_max_bytes: int = 10 * 1024**2,
|
|
561
|
-
files_filters: Iterable[_FilterType] | None = None,
|
|
562
|
-
files_fmt: str = "{_zoned_datetime_str} | {name}:{funcName}:{lineno} | {levelname:8} | {message}",
|
|
563
|
-
filters: MaybeIterable[_FilterType] | None = None,
|
|
564
|
-
formatter_version: MaybeCallableVersionLike | None = None,
|
|
565
|
-
formatter_max_width: int = RICH_MAX_WIDTH,
|
|
566
|
-
formatter_indent_size: int = RICH_INDENT_SIZE,
|
|
567
|
-
formatter_max_length: int | None = RICH_MAX_LENGTH,
|
|
568
|
-
formatter_max_string: int | None = RICH_MAX_STRING,
|
|
569
|
-
formatter_max_depth: int | None = RICH_MAX_DEPTH,
|
|
570
|
-
formatter_expand_all: bool = RICH_EXPAND_ALL,
|
|
571
|
-
extra: Callable[[LoggerOrName | None], None] | None = None,
|
|
572
|
-
) -> None:
|
|
573
|
-
"""Set up logger."""
|
|
574
|
-
# log record factory
|
|
575
|
-
from utilities.tzlocal import get_local_time_zone # skipif-ci-and-windows
|
|
576
|
-
|
|
577
|
-
class LogRecordNanoLocal( # skipif-ci-and-windows
|
|
578
|
-
_AdvancedLogRecord, time_zone=get_local_time_zone()
|
|
579
|
-
): ...
|
|
580
|
-
|
|
581
|
-
setLogRecordFactory(LogRecordNanoLocal) # skipif-ci-and-windows
|
|
582
|
-
|
|
583
|
-
console_fmt, files_fmt = [ # skipif-ci-and-windows
|
|
584
|
-
f.replace("{_zoned_datetime_str}", LogRecordNanoLocal.get_zoned_datetime_fmt())
|
|
585
|
-
for f in [console_fmt, files_fmt]
|
|
586
|
-
]
|
|
587
|
-
|
|
588
|
-
# logger
|
|
589
|
-
logger_use = get_logger(logger=logger) # skipif-ci-and-windows
|
|
590
|
-
logger_use.setLevel(DEBUG) # skipif-ci-and-windows
|
|
591
|
-
|
|
592
|
-
# filters
|
|
593
|
-
console_filters = ( # skipif-ci-and-windows
|
|
594
|
-
[] if console_filters is None else list(console_filters)
|
|
595
|
-
)
|
|
596
|
-
files_filters = ( # skipif-ci-and-windows
|
|
597
|
-
[] if files_filters is None else list(files_filters)
|
|
598
|
-
)
|
|
599
|
-
filters = ( # skipif-ci-and-windows
|
|
600
|
-
[] if filters is None else list(always_iterable(filters))
|
|
601
|
-
)
|
|
602
|
-
|
|
603
|
-
# formatters
|
|
604
|
-
try: # skipif-ci-and-windows
|
|
605
|
-
from coloredlogs import DEFAULT_FIELD_STYLES, ColoredFormatter
|
|
606
|
-
except ModuleNotFoundError: # pragma: no cover
|
|
607
|
-
console_formatter = Formatter(fmt=console_fmt, style="{")
|
|
608
|
-
files_formatter = Formatter(fmt=files_fmt, style="{")
|
|
609
|
-
else: # skipif-ci-and-windows
|
|
610
|
-
field_styles = DEFAULT_FIELD_STYLES | {
|
|
611
|
-
"_zoned_datetime_str": DEFAULT_FIELD_STYLES["asctime"]
|
|
612
|
-
}
|
|
613
|
-
console_formatter = ColoredFormatter(
|
|
614
|
-
fmt=console_fmt, style="{", field_styles=field_styles
|
|
615
|
-
)
|
|
616
|
-
files_formatter = ColoredFormatter(
|
|
617
|
-
fmt=files_fmt, style="{", field_styles=field_styles
|
|
618
|
-
)
|
|
619
|
-
plain_formatter = Formatter(fmt=files_fmt, style="{") # skipif-ci-and-windows
|
|
620
|
-
|
|
621
|
-
# console
|
|
622
|
-
if console_level is not None: # skipif-ci-and-windows
|
|
623
|
-
console_low_or_no_exc_handler = StreamHandler(stream=stdout)
|
|
624
|
-
add_filters(console_low_or_no_exc_handler, _console_low_or_no_exc_filter)
|
|
625
|
-
add_filters(console_low_or_no_exc_handler, *console_filters)
|
|
626
|
-
add_filters(console_low_or_no_exc_handler, *filters)
|
|
627
|
-
console_low_or_no_exc_handler.setFormatter(console_formatter)
|
|
628
|
-
console_low_or_no_exc_handler.setLevel(console_level)
|
|
629
|
-
logger_use.addHandler(console_low_or_no_exc_handler)
|
|
630
|
-
|
|
631
|
-
console_high_and_exc_handler = StreamHandler(stream=stdout)
|
|
632
|
-
add_filters(console_high_and_exc_handler, *console_filters)
|
|
633
|
-
add_filters(console_high_and_exc_handler, *filters)
|
|
634
|
-
_ = RichTracebackFormatter.create_and_set(
|
|
635
|
-
console_high_and_exc_handler,
|
|
636
|
-
version=formatter_version,
|
|
637
|
-
max_width=formatter_max_width,
|
|
638
|
-
indent_size=formatter_indent_size,
|
|
639
|
-
max_length=formatter_max_length,
|
|
640
|
-
max_string=formatter_max_string,
|
|
641
|
-
max_depth=formatter_max_depth,
|
|
642
|
-
expand_all=formatter_expand_all,
|
|
643
|
-
detail=True,
|
|
644
|
-
post=_ansi_wrap_red,
|
|
645
|
-
)
|
|
646
|
-
console_high_and_exc_handler.setLevel(
|
|
647
|
-
max(get_logging_level_number(console_level), ERROR)
|
|
648
|
-
)
|
|
649
|
-
logger_use.addHandler(console_high_and_exc_handler)
|
|
650
|
-
|
|
651
|
-
# debug & info
|
|
652
|
-
directory = get_path(path=files_dir) # skipif-ci-and-windows
|
|
653
|
-
levels: list[LogLevel] = ["DEBUG", "INFO"] # skipif-ci-and-windows
|
|
654
|
-
for level, (subpath, files_or_plain_formatter) in product( # skipif-ci-and-windows
|
|
655
|
-
levels, [(Path(), files_formatter), (Path("plain"), plain_formatter)]
|
|
656
|
-
):
|
|
657
|
-
path = ensure_suffix(directory.joinpath(subpath, level.lower()), ".txt")
|
|
658
|
-
path.parent.mkdir(parents=True, exist_ok=True)
|
|
659
|
-
file_handler = SizeAndTimeRotatingFileHandler(
|
|
660
|
-
filename=path,
|
|
661
|
-
when=files_when,
|
|
662
|
-
interval=files_interval,
|
|
663
|
-
backupCount=files_backup_count,
|
|
664
|
-
maxBytes=files_max_bytes,
|
|
665
|
-
)
|
|
666
|
-
add_filters(file_handler, *files_filters)
|
|
667
|
-
add_filters(file_handler, *filters)
|
|
668
|
-
file_handler.setFormatter(files_or_plain_formatter)
|
|
669
|
-
file_handler.setLevel(level)
|
|
670
|
-
logger_use.addHandler(file_handler)
|
|
671
|
-
|
|
672
|
-
# errors
|
|
673
|
-
standalone_file_handler = StandaloneFileHandler( # skipif-ci-and-windows
|
|
674
|
-
level=ERROR, path=directory.joinpath("errors")
|
|
675
|
-
)
|
|
676
|
-
add_filters(standalone_file_handler, _standalone_file_filter)
|
|
677
|
-
standalone_file_handler.setFormatter(
|
|
678
|
-
RichTracebackFormatter(
|
|
679
|
-
version=formatter_version,
|
|
680
|
-
max_width=formatter_max_width,
|
|
681
|
-
indent_size=formatter_indent_size,
|
|
682
|
-
max_length=formatter_max_length,
|
|
683
|
-
max_string=formatter_max_string,
|
|
684
|
-
max_depth=formatter_max_depth,
|
|
685
|
-
expand_all=formatter_expand_all,
|
|
686
|
-
detail=True,
|
|
687
|
-
)
|
|
688
|
-
)
|
|
689
|
-
logger_use.addHandler(standalone_file_handler) # skipif-ci-and-windows
|
|
690
|
-
|
|
691
|
-
# extra
|
|
692
|
-
if extra is not None: # skipif-ci-and-windows
|
|
693
|
-
extra(logger_use)
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
def _console_low_or_no_exc_filter(record: LogRecord, /) -> bool:
|
|
697
|
-
return (record.levelno < ERROR) or (
|
|
698
|
-
(record.levelno >= ERROR) and (record.exc_info is None)
|
|
699
|
-
)
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
def _standalone_file_filter(record: LogRecord, /) -> bool:
|
|
703
|
-
return record.exc_info is not None
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
##
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
@contextmanager
|
|
710
|
-
def temp_handler(
|
|
711
|
-
handler: Handler, /, *, logger: LoggerOrName | None = None
|
|
712
|
-
) -> Iterator[None]:
|
|
713
|
-
"""Context manager with temporary handler set."""
|
|
714
|
-
logger_use = get_logger(logger=logger)
|
|
715
|
-
logger_use.addHandler(handler)
|
|
716
|
-
try:
|
|
717
|
-
yield
|
|
718
|
-
finally:
|
|
719
|
-
_ = logger_use.removeHandler(handler)
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
##
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
@contextmanager
|
|
726
|
-
def temp_logger(
|
|
727
|
-
logger: LoggerOrName,
|
|
728
|
-
/,
|
|
729
|
-
*,
|
|
730
|
-
disabled: bool | None = None,
|
|
731
|
-
level: LogLevel | None = None,
|
|
732
|
-
propagate: bool | None = None,
|
|
733
|
-
) -> Iterator[Logger]:
|
|
734
|
-
"""Context manager with temporary logger settings."""
|
|
735
|
-
logger_use = get_logger(logger=logger)
|
|
736
|
-
init_disabled = logger_use.disabled
|
|
737
|
-
init_level = logger_use.level
|
|
738
|
-
init_propagate = logger_use.propagate
|
|
739
|
-
if disabled is not None:
|
|
740
|
-
logger_use.disabled = disabled
|
|
741
|
-
if level is not None:
|
|
742
|
-
logger_use.setLevel(level)
|
|
743
|
-
if propagate is not None:
|
|
744
|
-
logger_use.propagate = propagate
|
|
745
|
-
try:
|
|
746
|
-
yield logger_use
|
|
747
|
-
finally:
|
|
748
|
-
if disabled is not None:
|
|
749
|
-
logger_use.disabled = init_disabled
|
|
750
|
-
if level is not None:
|
|
751
|
-
logger_use.setLevel(init_level)
|
|
752
|
-
if propagate is not None:
|
|
753
|
-
logger_use.propagate = init_propagate
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
##
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
class _AdvancedLogRecord(LogRecord):
|
|
760
|
-
"""Advanced log record."""
|
|
761
|
-
|
|
762
|
-
time_zone: ClassVar[str] = NotImplemented
|
|
763
|
-
|
|
764
|
-
@override
|
|
765
|
-
def __init__(
|
|
766
|
-
self,
|
|
767
|
-
name: str,
|
|
768
|
-
level: int,
|
|
769
|
-
pathname: str,
|
|
770
|
-
lineno: int,
|
|
771
|
-
msg: object,
|
|
772
|
-
args: Any,
|
|
773
|
-
exc_info: Any,
|
|
774
|
-
func: str | None = None,
|
|
775
|
-
sinfo: str | None = None,
|
|
776
|
-
) -> None:
|
|
777
|
-
self._zoned_datetime = self.get_now() # skipif-ci-and-windows
|
|
778
|
-
self._zoned_datetime_str = ( # skipif-ci-and-windows
|
|
779
|
-
self._zoned_datetime.format_common_iso()
|
|
780
|
-
)
|
|
781
|
-
super().__init__( # skipif-ci-and-windows
|
|
782
|
-
name, level, pathname, lineno, msg, args, exc_info, func, sinfo
|
|
783
|
-
)
|
|
784
|
-
|
|
785
|
-
@override
|
|
786
|
-
def __init_subclass__(cls, *, time_zone: ZoneInfo, **kwargs: Any) -> None:
|
|
787
|
-
cls.time_zone = time_zone.key # skipif-ci-and-windows
|
|
788
|
-
super().__init_subclass__(**kwargs) # skipif-ci-and-windows
|
|
789
|
-
|
|
790
|
-
@classmethod
|
|
791
|
-
def get_now(cls) -> Any:
|
|
792
|
-
"""Get the current zoned datetime."""
|
|
793
|
-
return cast("Any", ZonedDateTime).now(cls.time_zone) # skipif-ci-and-windows
|
|
794
|
-
|
|
795
|
-
@classmethod
|
|
796
|
-
def get_zoned_datetime_fmt(cls) -> str:
|
|
797
|
-
"""Get the zoned datetime format string."""
|
|
798
|
-
length = len(cls.get_now().format_common_iso()) # skipif-ci-and-windows
|
|
799
|
-
return f"{{_zoned_datetime_str:{length}}}" # skipif-ci-and-windows
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
##
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
def _ansi_wrap_red(text: str, /) -> str:
|
|
806
|
-
try:
|
|
807
|
-
from humanfriendly.terminal import ansi_wrap
|
|
808
|
-
except ModuleNotFoundError: # pragma: no cover
|
|
809
|
-
return text
|
|
810
|
-
return ansi_wrap(text, color="red")
|
|
811
|
-
|
|
812
|
-
|
|
813
619
|
__all__ = [
|
|
814
|
-
"FilterForKeyError",
|
|
815
620
|
"GetLoggingLevelNumberError",
|
|
816
621
|
"SizeAndTimeRotatingFileHandler",
|
|
817
|
-
"
|
|
622
|
+
"add_adapter",
|
|
818
623
|
"add_filters",
|
|
819
624
|
"basic_config",
|
|
820
|
-
"
|
|
821
|
-
"get_default_logging_path",
|
|
822
|
-
"get_logger",
|
|
625
|
+
"get_format_str",
|
|
823
626
|
"get_logging_level_number",
|
|
824
627
|
"setup_logging",
|
|
825
|
-
"
|
|
826
|
-
"temp_logger",
|
|
628
|
+
"to_logger",
|
|
827
629
|
]
|