beans-logging 6.0.3__py3-none-any.whl → 7.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- beans_logging/__init__.py +6 -6
- beans_logging/__version__.py +1 -1
- beans_logging/_builder.py +154 -0
- beans_logging/_constants.py +17 -5
- beans_logging/_core.py +295 -0
- beans_logging/_intercept.py +106 -0
- beans_logging/auto.py +3 -12
- beans_logging/config.py +118 -70
- beans_logging/filters.py +37 -20
- beans_logging/formats.py +20 -4
- beans_logging/{rotation.py → rotators.py} +20 -14
- beans_logging/schemas.py +143 -0
- beans_logging/sinks.py +11 -2
- {beans_logging-6.0.3.dist-info → beans_logging-7.0.0.dist-info}/METADATA +80 -61
- beans_logging-7.0.0.dist-info/RECORD +18 -0
- beans_logging/_base.py +0 -642
- beans_logging/_handlers.py +0 -40
- beans_logging/_utils.py +0 -101
- beans_logging-6.0.3.dist-info/RECORD +0 -17
- {beans_logging-6.0.3.dist-info → beans_logging-7.0.0.dist-info}/WHEEL +0 -0
- {beans_logging-6.0.3.dist-info → beans_logging-7.0.0.dist-info}/licenses/LICENSE.txt +0 -0
- {beans_logging-6.0.3.dist-info → beans_logging-7.0.0.dist-info}/top_level.txt +0 -0
beans_logging/config.py
CHANGED
|
@@ -1,69 +1,60 @@
|
|
|
1
|
-
import
|
|
1
|
+
import sys
|
|
2
2
|
import datetime
|
|
3
3
|
from typing import Any
|
|
4
|
-
from typing_extensions import Self
|
|
5
4
|
|
|
6
|
-
|
|
5
|
+
if sys.version_info >= (3, 11):
|
|
6
|
+
from typing import Self
|
|
7
|
+
else:
|
|
8
|
+
from typing_extensions import Self
|
|
7
9
|
|
|
8
|
-
|
|
9
|
-
from
|
|
10
|
+
import potato_util as utils
|
|
11
|
+
from pydantic import Field, model_validator, field_validator
|
|
10
12
|
|
|
13
|
+
from ._constants import LogHandlerTypeEnum, LogLevelEnum
|
|
14
|
+
from .schemas import ExtraBaseModel, LogHandlerPM, LoguruHandlerPM
|
|
11
15
|
|
|
12
|
-
class ExtraBaseModel(BaseModel):
|
|
13
|
-
model_config = ConfigDict(extra="allow")
|
|
14
16
|
|
|
17
|
+
def _get_handlers() -> dict[str, LogHandlerPM]:
|
|
18
|
+
"""Get default log handlers.
|
|
15
19
|
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
20
|
+
Returns:
|
|
21
|
+
dict[str, LogHandlerPM]: Default handlers as dictionary.
|
|
22
|
+
"""
|
|
19
23
|
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
default=(
|
|
25
|
-
"[<c>{time:YYYY-MM-DD HH:mm:ss.SSS Z}</c> | <level>{level_short:<5}</level> | <w>{name}:{line}</w>]: "
|
|
26
|
-
"<level>{message}</level>"
|
|
24
|
+
_log_handlers: dict[str, LogHandlerPM] = {
|
|
25
|
+
"default.all.std_handler": LogHandlerPM(type_=LogHandlerTypeEnum.STD),
|
|
26
|
+
"default.all.file_handler": LogHandlerPM(
|
|
27
|
+
type_=LogHandlerTypeEnum.FILE, enabled=False
|
|
27
28
|
),
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
29
|
+
"default.err.file_handler": LogHandlerPM(
|
|
30
|
+
type_=LogHandlerTypeEnum.FILE, error=True, enabled=False
|
|
31
|
+
),
|
|
32
|
+
"default.all.json_handler": LogHandlerPM(
|
|
33
|
+
type_=LogHandlerTypeEnum.FILE, serialize=True, enabled=False
|
|
34
|
+
),
|
|
35
|
+
"default.err.json_handler": LogHandlerPM(
|
|
36
|
+
type_=LogHandlerTypeEnum.FILE, serialize=True, error=True, enabled=False
|
|
37
|
+
),
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
return _log_handlers
|
|
32
41
|
|
|
33
42
|
|
|
34
|
-
class
|
|
35
|
-
enabled: bool = Field(default=False)
|
|
43
|
+
class StdConfigPM(ExtraBaseModel):
|
|
36
44
|
format_str: str = Field(
|
|
37
|
-
default=
|
|
45
|
+
default=(
|
|
46
|
+
"[<c>{time:YYYY-MM-DD HH:mm:ss.SSS Z}</c> | <level>{extra[level_short]:<5}</level> | <w>{name}:{line}</w>]:"
|
|
47
|
+
" <level>{message}</level>"
|
|
48
|
+
),
|
|
38
49
|
min_length=8,
|
|
39
50
|
max_length=512,
|
|
40
51
|
)
|
|
41
|
-
|
|
42
|
-
default="{app_name}.std.all.log", min_length=4, max_length=1024
|
|
43
|
-
)
|
|
44
|
-
err_path: str = Field(
|
|
45
|
-
default="{app_name}.std.err.log", min_length=4, max_length=1024
|
|
46
|
-
)
|
|
47
|
-
|
|
48
|
-
@model_validator(mode="after")
|
|
49
|
-
def _check_log_path(self) -> Self:
|
|
50
|
-
if self.log_path == self.err_path:
|
|
51
|
-
raise ValueError(
|
|
52
|
-
f"`log_path` and `err_path` attributes have same value: '{self.log_path}', must be different!"
|
|
53
|
-
)
|
|
52
|
+
colorize: bool = Field(default=True)
|
|
54
53
|
|
|
55
|
-
return self
|
|
56
54
|
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
use_custom: bool = Field(default=False)
|
|
61
|
-
log_path: str = Field(
|
|
62
|
-
default="{app_name}.json.all.log", min_length=4, max_length=1024
|
|
63
|
-
)
|
|
64
|
-
err_path: str = Field(
|
|
65
|
-
default="{app_name}.json.err.log", min_length=4, max_length=1024
|
|
66
|
-
)
|
|
55
|
+
class PathsConfigPM(ExtraBaseModel):
|
|
56
|
+
log_path: str = Field(..., min_length=4, max_length=1024)
|
|
57
|
+
err_path: str = Field(..., min_length=4, max_length=1024)
|
|
67
58
|
|
|
68
59
|
@model_validator(mode="after")
|
|
69
60
|
def _check_log_path(self) -> Self:
|
|
@@ -77,7 +68,7 @@ class JsonHandlersConfigPM(ExtraBaseModel):
|
|
|
77
68
|
|
|
78
69
|
class FileConfigPM(ExtraBaseModel):
|
|
79
70
|
logs_dir: str = Field(
|
|
80
|
-
|
|
71
|
+
default="./logs",
|
|
81
72
|
min_length=2,
|
|
82
73
|
max_length=1024,
|
|
83
74
|
)
|
|
@@ -85,10 +76,23 @@ class FileConfigPM(ExtraBaseModel):
|
|
|
85
76
|
default=10_000_000, ge=1_000, lt=1_000_000_000 # 10MB = 10 * 1000 * 1000
|
|
86
77
|
)
|
|
87
78
|
rotate_time: datetime.time = Field(default_factory=lambda: datetime.time(0, 0, 0))
|
|
88
|
-
|
|
79
|
+
retention: int = Field(default=90, ge=1)
|
|
89
80
|
encoding: str = Field(default="utf8", min_length=2, max_length=31)
|
|
90
|
-
|
|
91
|
-
|
|
81
|
+
|
|
82
|
+
plain: PathsConfigPM = Field(
|
|
83
|
+
default_factory=lambda: PathsConfigPM(
|
|
84
|
+
log_path="{app_name}.all.log",
|
|
85
|
+
err_path="{app_name}.err.log",
|
|
86
|
+
)
|
|
87
|
+
)
|
|
88
|
+
json_: PathsConfigPM = Field(
|
|
89
|
+
default_factory=lambda: PathsConfigPM(
|
|
90
|
+
log_path="json/{app_name}.json.all.log",
|
|
91
|
+
err_path="json/{app_name}.json.err.log",
|
|
92
|
+
),
|
|
93
|
+
validation_alias="json",
|
|
94
|
+
serialization_alias="json",
|
|
95
|
+
)
|
|
92
96
|
|
|
93
97
|
@field_validator("rotate_time", mode="before")
|
|
94
98
|
@classmethod
|
|
@@ -99,14 +103,40 @@ class FileConfigPM(ExtraBaseModel):
|
|
|
99
103
|
return val
|
|
100
104
|
|
|
101
105
|
|
|
102
|
-
class
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
+
class LevelConfigPM(ExtraBaseModel):
|
|
107
|
+
base: str | int | LogLevelEnum = Field(default=LogLevelEnum.INFO)
|
|
108
|
+
err: str | int | LogLevelEnum = Field(default=LogLevelEnum.WARNING)
|
|
109
|
+
|
|
110
|
+
@field_validator("base", mode="before")
|
|
111
|
+
@classmethod
|
|
112
|
+
def _check_level(cls, val: Any) -> Any:
|
|
113
|
+
if not isinstance(val, (str, int, LogLevelEnum)):
|
|
114
|
+
raise TypeError(
|
|
115
|
+
f"Level attribute type {type(val).__name__} is invalid, must be str, int or <LogLevelEnum>!"
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
if utils.is_debug_mode() and (val != LogLevelEnum.TRACE) and (val != 5):
|
|
119
|
+
val = LogLevelEnum.DEBUG
|
|
120
|
+
|
|
121
|
+
return val
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class DefaultConfigPM(ExtraBaseModel):
|
|
125
|
+
level: LevelConfigPM = Field(default_factory=LevelConfigPM)
|
|
126
|
+
std: StdConfigPM = Field(default_factory=StdConfigPM)
|
|
127
|
+
format_str: str = Field(
|
|
128
|
+
default="[{time:YYYY-MM-DD HH:mm:ss.SSS Z} | {extra[level_short]:<5} | {name}:{line}]: {message}",
|
|
129
|
+
min_length=8,
|
|
130
|
+
max_length=512,
|
|
131
|
+
)
|
|
132
|
+
file: FileConfigPM = Field(default_factory=FileConfigPM)
|
|
133
|
+
custom_serialize: bool = Field(default=False)
|
|
106
134
|
|
|
107
135
|
|
|
108
136
|
class InterceptConfigPM(ExtraBaseModel):
|
|
109
|
-
|
|
137
|
+
enabled: bool = Field(default=True)
|
|
138
|
+
only_base: bool = Field(default=False)
|
|
139
|
+
ignore_modules: list[str] = Field(default=[])
|
|
110
140
|
include_modules: list[str] = Field(default=[])
|
|
111
141
|
mute_modules: list[str] = Field(default=[])
|
|
112
142
|
|
|
@@ -116,23 +146,41 @@ class ExtraConfigPM(ExtraBaseModel):
|
|
|
116
146
|
|
|
117
147
|
|
|
118
148
|
class LoggerConfigPM(ExtraBaseModel):
|
|
119
|
-
app_name: str = Field(
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
stream: StreamConfigPM = Field(default_factory=StreamConfigPM)
|
|
124
|
-
file: FileConfigPM = Field(default_factory=FileConfigPM)
|
|
149
|
+
app_name: str = Field(
|
|
150
|
+
default_factory=utils.get_slug_name, min_length=1, max_length=128
|
|
151
|
+
)
|
|
152
|
+
default: DefaultConfigPM = Field(default_factory=DefaultConfigPM)
|
|
125
153
|
intercept: InterceptConfigPM = Field(default_factory=InterceptConfigPM)
|
|
154
|
+
handlers: dict[str, LogHandlerPM] = Field(default_factory=_get_handlers)
|
|
126
155
|
extra: ExtraConfigPM | None = Field(default_factory=ExtraConfigPM)
|
|
127
156
|
|
|
157
|
+
@field_validator("handlers", mode="before")
|
|
158
|
+
@classmethod
|
|
159
|
+
def _check_handlers(cls, val: Any) -> Any:
|
|
160
|
+
if val:
|
|
161
|
+
if not isinstance(val, dict):
|
|
162
|
+
raise TypeError(
|
|
163
|
+
f"'handlers' attribute type {type(val).__name__} is invalid, must be a dict of <LogHandlerPM>, "
|
|
164
|
+
f"<LoguruHandlerPM> or dict!"
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
for _i, _handler in val.items():
|
|
168
|
+
if not isinstance(_handler, (LogHandlerPM, LoguruHandlerPM, dict)):
|
|
169
|
+
raise TypeError(
|
|
170
|
+
f"'handlers' attribute index {_i} type {type(_handler).__name__} is invalid, must be "
|
|
171
|
+
f"<LogHandlerPM>, <LoguruHandlerPM> or dict!"
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
if isinstance(_handler, LoguruHandlerPM):
|
|
175
|
+
val[_i] = LogHandlerPM(
|
|
176
|
+
**_handler.model_dump(exclude_none=True, exclude_unset=True)
|
|
177
|
+
)
|
|
178
|
+
elif isinstance(_handler, dict):
|
|
179
|
+
val[_i] = LogHandlerPM(**_handler)
|
|
180
|
+
|
|
181
|
+
return val
|
|
182
|
+
|
|
128
183
|
|
|
129
184
|
__all__ = [
|
|
130
|
-
"StdHandlerConfigPM",
|
|
131
|
-
"StreamConfigPM",
|
|
132
|
-
"LogHandlersConfigPM",
|
|
133
|
-
"JsonHandlersConfigPM",
|
|
134
|
-
"FileConfigPM",
|
|
135
|
-
"AutoLoadConfigPM",
|
|
136
|
-
"InterceptConfigPM",
|
|
137
185
|
"LoggerConfigPM",
|
|
138
186
|
]
|
beans_logging/filters.py
CHANGED
|
@@ -1,33 +1,39 @@
|
|
|
1
|
-
|
|
1
|
+
from typing import TYPE_CHECKING
|
|
2
|
+
|
|
3
|
+
if TYPE_CHECKING:
|
|
4
|
+
from loguru import Record
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def add_level_short(record: "Record") -> "Record":
|
|
2
8
|
"""Filter for adding short level name to log record.
|
|
3
9
|
|
|
4
10
|
Args:
|
|
5
|
-
record (
|
|
11
|
+
record (Record, required): Log record as dictionary.
|
|
6
12
|
|
|
7
13
|
Returns:
|
|
8
|
-
|
|
14
|
+
Record: Log record as dictionary with short level name.
|
|
9
15
|
"""
|
|
10
16
|
|
|
11
|
-
if "level_short" not in record:
|
|
17
|
+
if "level_short" not in record["extra"]:
|
|
12
18
|
if record["level"].name == "SUCCESS":
|
|
13
|
-
record["level_short"] = "OK"
|
|
19
|
+
record["extra"]["level_short"] = "OK"
|
|
14
20
|
elif record["level"].name == "WARNING":
|
|
15
|
-
record["level_short"] = "WARN"
|
|
21
|
+
record["extra"]["level_short"] = "WARN"
|
|
16
22
|
elif record["level"].name == "CRITICAL":
|
|
17
|
-
record["level_short"] = "CRIT"
|
|
23
|
+
record["extra"]["level_short"] = "CRIT"
|
|
18
24
|
elif 5 < len(record["level"].name):
|
|
19
|
-
record["level_short"] = record["level"].name[:5]
|
|
25
|
+
record["extra"]["level_short"] = record["level"].name[:5]
|
|
20
26
|
else:
|
|
21
|
-
record["level_short"] = record["level"].name
|
|
27
|
+
record["extra"]["level_short"] = record["level"].name
|
|
22
28
|
|
|
23
29
|
return record
|
|
24
30
|
|
|
25
31
|
|
|
26
|
-
def use_all_filter(record:
|
|
32
|
+
def use_all_filter(record: "Record") -> bool:
|
|
27
33
|
"""Filter message for all handlers that use this filter.
|
|
28
34
|
|
|
29
35
|
Args:
|
|
30
|
-
record (
|
|
36
|
+
record (Record): Log record as dictionary.
|
|
31
37
|
|
|
32
38
|
Returns:
|
|
33
39
|
bool: False if record is disabled by extra 'disable_all' key, True otherwise.
|
|
@@ -41,7 +47,7 @@ def use_all_filter(record: dict) -> bool:
|
|
|
41
47
|
return True
|
|
42
48
|
|
|
43
49
|
|
|
44
|
-
def use_std_filter(record:
|
|
50
|
+
def use_std_filter(record: "Record") -> bool:
|
|
45
51
|
"""Filter message for std handlers that use this filter.
|
|
46
52
|
|
|
47
53
|
Args:
|
|
@@ -60,11 +66,11 @@ def use_std_filter(record: dict) -> bool:
|
|
|
60
66
|
return True
|
|
61
67
|
|
|
62
68
|
|
|
63
|
-
def use_file_filter(record:
|
|
69
|
+
def use_file_filter(record: "Record") -> bool:
|
|
64
70
|
"""Filter message for file handlers that use this filter.
|
|
65
71
|
|
|
66
72
|
Args:
|
|
67
|
-
record (
|
|
73
|
+
record (Record): Log record as dictionary.
|
|
68
74
|
|
|
69
75
|
Returns:
|
|
70
76
|
bool: False if record is disabled by extra 'disable_file' key, True otherwise.
|
|
@@ -79,11 +85,11 @@ def use_file_filter(record: dict) -> bool:
|
|
|
79
85
|
return True
|
|
80
86
|
|
|
81
87
|
|
|
82
|
-
def use_file_err_filter(record:
|
|
88
|
+
def use_file_err_filter(record: "Record") -> bool:
|
|
83
89
|
"""Filter message for error file handlers that use this filter.
|
|
84
90
|
|
|
85
91
|
Args:
|
|
86
|
-
record (
|
|
92
|
+
record (Record): Log record as dictionary.
|
|
87
93
|
|
|
88
94
|
Returns:
|
|
89
95
|
bool: False if record is disabled by extra 'disable_file_err' key, True otherwise.
|
|
@@ -98,11 +104,11 @@ def use_file_err_filter(record: dict) -> bool:
|
|
|
98
104
|
return True
|
|
99
105
|
|
|
100
106
|
|
|
101
|
-
def use_file_json_filter(record:
|
|
107
|
+
def use_file_json_filter(record: "Record") -> bool:
|
|
102
108
|
"""Filter message for json file handlers that use this filter.
|
|
103
109
|
|
|
104
110
|
Args:
|
|
105
|
-
record (
|
|
111
|
+
record (Record): Log record as dictionary.
|
|
106
112
|
|
|
107
113
|
Returns:
|
|
108
114
|
bool: False if record is disabled by extra 'disable_file_json' key, True otherwise.
|
|
@@ -117,11 +123,11 @@ def use_file_json_filter(record: dict) -> bool:
|
|
|
117
123
|
return True
|
|
118
124
|
|
|
119
125
|
|
|
120
|
-
def use_file_json_err_filter(record:
|
|
126
|
+
def use_file_json_err_filter(record: "Record") -> bool:
|
|
121
127
|
"""Filter message for json error file handlers that use this filter.
|
|
122
128
|
|
|
123
129
|
Args:
|
|
124
|
-
record (
|
|
130
|
+
record (Record): Log record as dictionary.
|
|
125
131
|
|
|
126
132
|
Returns:
|
|
127
133
|
bool: False if record is disabled by extra 'disable_file_json_err' key, True otherwise.
|
|
@@ -134,3 +140,14 @@ def use_file_json_err_filter(record: dict) -> bool:
|
|
|
134
140
|
return False
|
|
135
141
|
|
|
136
142
|
return True
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
__all__ = [
|
|
146
|
+
"add_level_short",
|
|
147
|
+
"use_all_filter",
|
|
148
|
+
"use_std_filter",
|
|
149
|
+
"use_file_filter",
|
|
150
|
+
"use_file_err_filter",
|
|
151
|
+
"use_file_json_filter",
|
|
152
|
+
"use_file_json_err_filter",
|
|
153
|
+
]
|
beans_logging/formats.py
CHANGED
|
@@ -1,8 +1,12 @@
|
|
|
1
1
|
import json
|
|
2
2
|
import traceback
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
3
4
|
|
|
5
|
+
if TYPE_CHECKING:
|
|
6
|
+
from loguru import Record
|
|
4
7
|
|
|
5
|
-
|
|
8
|
+
|
|
9
|
+
def json_formatter(record: "Record") -> str:
|
|
6
10
|
"""Custom json formatter for loguru logger.
|
|
7
11
|
|
|
8
12
|
Args:
|
|
@@ -16,7 +20,11 @@ def json_format(record: dict) -> str:
|
|
|
16
20
|
if record["exception"]:
|
|
17
21
|
_error = {}
|
|
18
22
|
_error_type, _error_value, _error_traceback = record["exception"]
|
|
19
|
-
|
|
23
|
+
if _error_type:
|
|
24
|
+
_error["type"] = _error_type.__name__
|
|
25
|
+
else:
|
|
26
|
+
_error["type"] = "None"
|
|
27
|
+
|
|
20
28
|
_error["value"] = str(_error_value)
|
|
21
29
|
_error["traceback"] = "".join(traceback.format_tb(_error_traceback))
|
|
22
30
|
|
|
@@ -24,6 +32,9 @@ def json_format(record: dict) -> str:
|
|
|
24
32
|
if record["extra"] and (0 < len(record["extra"])):
|
|
25
33
|
_extra = record["extra"]
|
|
26
34
|
|
|
35
|
+
if _extra and ("serialized" in _extra):
|
|
36
|
+
del _extra["serialized"]
|
|
37
|
+
|
|
27
38
|
_json_record = {
|
|
28
39
|
"timestamp": record["time"].strftime("%Y-%m-%dT%H:%M:%S%z"),
|
|
29
40
|
"level": record["level"].name,
|
|
@@ -39,5 +50,10 @@ def json_format(record: dict) -> str:
|
|
|
39
50
|
"elapsed": str(record["elapsed"]),
|
|
40
51
|
}
|
|
41
52
|
|
|
42
|
-
record["serialized"] = json.dumps(_json_record)
|
|
43
|
-
return "{serialized}\n"
|
|
53
|
+
record["extra"]["serialized"] = json.dumps(_json_record)
|
|
54
|
+
return "{extra[serialized]}\n"
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
__all__ = [
|
|
58
|
+
"json_formatter",
|
|
59
|
+
]
|
|
@@ -1,43 +1,44 @@
|
|
|
1
1
|
import datetime
|
|
2
|
-
from typing import TextIO
|
|
2
|
+
from typing import TextIO, TYPE_CHECKING
|
|
3
3
|
|
|
4
|
-
|
|
4
|
+
if TYPE_CHECKING:
|
|
5
|
+
from loguru import Message
|
|
5
6
|
|
|
6
7
|
|
|
7
|
-
class
|
|
8
|
-
"""
|
|
8
|
+
class Rotator:
|
|
9
|
+
"""Rotator class for checking file size and time for rotation.
|
|
9
10
|
|
|
10
11
|
Attributes:
|
|
11
|
-
_size_limit
|
|
12
|
-
|
|
12
|
+
_size_limit (int ): File size limit for rotation.
|
|
13
|
+
_dt_limit (datetime.datetime): Datetime when the log file should rotate.
|
|
13
14
|
|
|
14
15
|
Methods:
|
|
15
16
|
should_rotate(): Check if the log file should rotate.
|
|
16
17
|
"""
|
|
17
18
|
|
|
18
19
|
def __init__(self, *, rotate_size: int, rotate_time: datetime.time):
|
|
19
|
-
"""
|
|
20
|
+
"""Rotator constructor method.
|
|
20
21
|
|
|
21
22
|
Args:
|
|
22
23
|
rotate_size (int, required): File size limit for rotation.
|
|
23
24
|
rotate_time (datetime.time, required): Time when the log file should rotate.
|
|
24
25
|
"""
|
|
25
26
|
|
|
26
|
-
|
|
27
|
+
_current_dt = datetime.datetime.now()
|
|
27
28
|
|
|
28
29
|
self._size_limit = rotate_size
|
|
29
|
-
self.
|
|
30
|
+
self._dt_limit = _current_dt.replace(
|
|
30
31
|
hour=rotate_time.hour,
|
|
31
32
|
minute=rotate_time.minute,
|
|
32
33
|
second=rotate_time.second,
|
|
33
34
|
)
|
|
34
35
|
|
|
35
|
-
if
|
|
36
|
+
if _current_dt >= self._dt_limit:
|
|
36
37
|
# The current time is already past the target time so it would rotate already.
|
|
37
38
|
# Add one day to prevent an immediate rotation.
|
|
38
|
-
self.
|
|
39
|
+
self._dt_limit += datetime.timedelta(days=1)
|
|
39
40
|
|
|
40
|
-
def should_rotate(self, message: Message, file: TextIO) -> bool:
|
|
41
|
+
def should_rotate(self, message: "Message", file: TextIO) -> bool:
|
|
41
42
|
"""Check if the log file should rotate.
|
|
42
43
|
|
|
43
44
|
Args:
|
|
@@ -53,11 +54,16 @@ class RotationChecker:
|
|
|
53
54
|
return True
|
|
54
55
|
|
|
55
56
|
_elapsed_timestamp = (
|
|
56
|
-
message.record["time"].timestamp() - self.
|
|
57
|
+
message.record["time"].timestamp() - self._dt_limit.timestamp()
|
|
57
58
|
)
|
|
58
59
|
if _elapsed_timestamp >= 0:
|
|
59
60
|
_elapsed_days = datetime.timedelta(seconds=_elapsed_timestamp).days
|
|
60
|
-
self.
|
|
61
|
+
self._dt_limit += datetime.timedelta(days=_elapsed_days + 1)
|
|
61
62
|
return True
|
|
62
63
|
|
|
63
64
|
return False
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
__all__ = [
|
|
68
|
+
"Rotator",
|
|
69
|
+
]
|
beans_logging/schemas.py
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import inspect
|
|
4
|
+
import datetime
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from logging import Handler
|
|
7
|
+
from asyncio import AbstractEventLoop
|
|
8
|
+
from multiprocessing.context import BaseContext
|
|
9
|
+
from typing import TYPE_CHECKING, Any, TextIO, Union, Protocol, runtime_checkable
|
|
10
|
+
from collections.abc import Callable, Awaitable
|
|
11
|
+
|
|
12
|
+
if sys.version_info >= (3, 11):
|
|
13
|
+
from typing import Self
|
|
14
|
+
else:
|
|
15
|
+
from typing_extensions import Self
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from loguru import Record, Message
|
|
20
|
+
from pydantic import BaseModel, Field, ConfigDict, model_validator
|
|
21
|
+
|
|
22
|
+
from ._constants import LogHandlerTypeEnum, LogLevelEnum
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class ExtraBaseModel(BaseModel):
|
|
26
|
+
model_config = ConfigDict(
|
|
27
|
+
extra="allow",
|
|
28
|
+
validate_default=True,
|
|
29
|
+
validate_assignment=True,
|
|
30
|
+
populate_by_name=True, # Remove in Pydantic v3
|
|
31
|
+
serialize_by_alias=True,
|
|
32
|
+
validate_by_name=True,
|
|
33
|
+
arbitrary_types_allowed=True,
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@runtime_checkable
|
|
38
|
+
class _SupportsWrite(Protocol):
|
|
39
|
+
def write(self, __s: str) -> Any: ...
|
|
40
|
+
def flush(self) -> Any: ...
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
_SinkType = Union[
|
|
44
|
+
str,
|
|
45
|
+
Path,
|
|
46
|
+
TextIO,
|
|
47
|
+
_SupportsWrite,
|
|
48
|
+
Callable[[Any], Any],
|
|
49
|
+
Callable[[Any], Awaitable[Any]],
|
|
50
|
+
Handler,
|
|
51
|
+
]
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class LoguruHandlerPM(ExtraBaseModel):
|
|
55
|
+
sink: _SinkType = Field(...)
|
|
56
|
+
level: str | int | None = Field(default=None)
|
|
57
|
+
format_: (
|
|
58
|
+
str | Callable[["Record"], str] | Callable[[dict[str, Any]], str] | None
|
|
59
|
+
) = Field(default=None, validation_alias="format", serialization_alias="format")
|
|
60
|
+
filter_: (
|
|
61
|
+
Callable[["Record"], bool]
|
|
62
|
+
| Callable[[dict[str, Any]], bool]
|
|
63
|
+
| str
|
|
64
|
+
| dict[str, Any]
|
|
65
|
+
| None
|
|
66
|
+
) = Field(default=None, validation_alias="filter", serialization_alias="filter")
|
|
67
|
+
colorize: bool | None = Field(default=None)
|
|
68
|
+
serialize: bool | None = Field(default=None)
|
|
69
|
+
backtrace: bool | None = Field(default=None)
|
|
70
|
+
diagnose: bool | None = Field(default=None)
|
|
71
|
+
enqueue: bool | None = Field(default=None)
|
|
72
|
+
context: BaseContext | str | None = Field(default=None)
|
|
73
|
+
catch: bool | None = Field(default=None)
|
|
74
|
+
loop: AbstractEventLoop | None = Field(default=None)
|
|
75
|
+
rotation: (
|
|
76
|
+
str
|
|
77
|
+
| int
|
|
78
|
+
| datetime.time
|
|
79
|
+
| datetime.timedelta
|
|
80
|
+
| Callable[["Message", TextIO], bool]
|
|
81
|
+
| Callable[[str, TextIO], bool]
|
|
82
|
+
| Callable[[str, Any], bool]
|
|
83
|
+
| None
|
|
84
|
+
) = Field(default=None)
|
|
85
|
+
retention: str | int | datetime.timedelta | Callable[[Any], None] | None = Field(
|
|
86
|
+
default=None
|
|
87
|
+
)
|
|
88
|
+
compression: str | Callable[[str], None] | None = Field(default=None)
|
|
89
|
+
delay: bool | None = Field(default=None)
|
|
90
|
+
watch: bool | None = Field(default=None)
|
|
91
|
+
mode: str | None = Field(default=None)
|
|
92
|
+
buffering: int | None = Field(default=None)
|
|
93
|
+
encoding: str | None = Field(default=None)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
class LogHandlerPM(LoguruHandlerPM):
|
|
97
|
+
type_: LogHandlerTypeEnum = Field(
|
|
98
|
+
default=LogHandlerTypeEnum.UNKNOWN,
|
|
99
|
+
validation_alias="type",
|
|
100
|
+
serialization_alias="type",
|
|
101
|
+
)
|
|
102
|
+
sink: _SinkType | None = Field(default=None)
|
|
103
|
+
level: str | int | LogLevelEnum | None = Field(default=None)
|
|
104
|
+
custom_serialize: bool | None = Field(default=None)
|
|
105
|
+
error: bool = Field(default=False)
|
|
106
|
+
enabled: bool = Field(default=True)
|
|
107
|
+
|
|
108
|
+
@model_validator(mode="after")
|
|
109
|
+
def _check_all(self) -> Self:
|
|
110
|
+
|
|
111
|
+
if (self.loop is not None) and (
|
|
112
|
+
(not callable(self.sink)) or (not inspect.iscoroutinefunction(self.sink))
|
|
113
|
+
):
|
|
114
|
+
raise ValueError(
|
|
115
|
+
f"'loop' attribute is set but 'sink' attribute type {type(self.sink)} is invalid, "
|
|
116
|
+
"'loop' only can be used with async callable (coroutine function) 'sink'!"
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
if not isinstance(self.sink, (str, os.PathLike)):
|
|
120
|
+
for _attr in (
|
|
121
|
+
"rotation",
|
|
122
|
+
"retention",
|
|
123
|
+
"compression",
|
|
124
|
+
"delay",
|
|
125
|
+
"watch",
|
|
126
|
+
"mode",
|
|
127
|
+
"buffering",
|
|
128
|
+
"encoding",
|
|
129
|
+
):
|
|
130
|
+
if getattr(self, _attr) is not None:
|
|
131
|
+
raise ValueError(
|
|
132
|
+
f"'{_attr}' attribute is set but 'sink' attribute type {type(self.sink).__name__} is invalid, "
|
|
133
|
+
f"'{_attr}' can only be used with file path 'sink'!"
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
return self
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
__all__ = [
|
|
140
|
+
"ExtraBaseModel",
|
|
141
|
+
"LoguruHandlerPM",
|
|
142
|
+
"LogHandlerPM",
|
|
143
|
+
]
|
beans_logging/sinks.py
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
import sys
|
|
2
|
+
from typing import TYPE_CHECKING
|
|
2
3
|
|
|
3
|
-
|
|
4
|
+
if TYPE_CHECKING:
|
|
5
|
+
from loguru import Message
|
|
4
6
|
|
|
5
7
|
|
|
6
|
-
def std_sink(message: Message):
|
|
8
|
+
def std_sink(message: "Message") -> None:
|
|
7
9
|
"""Print message based on log level to stdout or stderr.
|
|
8
10
|
|
|
9
11
|
Args:
|
|
@@ -12,5 +14,12 @@ def std_sink(message: Message):
|
|
|
12
14
|
|
|
13
15
|
if message.record["level"].no < 40:
|
|
14
16
|
sys.stdout.write(message)
|
|
17
|
+
# sys.stdout.flush()
|
|
15
18
|
else:
|
|
16
19
|
sys.stderr.write(message)
|
|
20
|
+
# sys.stderr.flush()
|
|
21
|
+
|
|
22
|
+
return
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
__all__ = ["std_sink"]
|