beans-logging 6.0.3__py3-none-any.whl → 7.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
beans_logging/config.py CHANGED
@@ -1,78 +1,57 @@
1
1
  import os
2
2
  import datetime
3
3
  from typing import Any
4
- from typing_extensions import Self
5
4
 
6
- from pydantic import BaseModel, Field, field_validator, model_validator, ConfigDict
5
+ import potato_util as utils
6
+ from pydantic import Field, field_validator
7
7
 
8
- from ._constants import LogLevelEnum
9
- from ._utils import get_slug_name
8
+ from ._constants import LogHandlerTypeEnum, LogLevelEnum
9
+ from .schemas import ExtraBaseModel, LogHandlerPM, LoguruHandlerPM
10
10
 
11
11
 
12
- class ExtraBaseModel(BaseModel):
13
- model_config = ConfigDict(extra="allow")
12
+ def _get_handlers() -> dict[str, LogHandlerPM]:
13
+ """Get default log handlers.
14
14
 
15
+ Returns:
16
+ dict[str, LogHandlerPM]: Default handlers as dictionary.
17
+ """
15
18
 
16
- class StdHandlerConfigPM(ExtraBaseModel):
17
- enabled: bool = Field(default=True)
18
-
19
-
20
- class StreamConfigPM(ExtraBaseModel):
21
- use_color: bool = Field(default=True)
22
- use_icon: bool = Field(default=False)
23
- format_str: str = Field(
24
- default=(
25
- "[<c>{time:YYYY-MM-DD HH:mm:ss.SSS Z}</c> | <level>{level_short:<5}</level> | <w>{name}:{line}</w>]: "
26
- "<level>{message}</level>"
19
+ _log_handlers: dict[str, LogHandlerPM] = {
20
+ "default.all.std_handler": LogHandlerPM(
21
+ type_=LogHandlerTypeEnum.STD,
22
+ format_=(
23
+ "[<c>{time:YYYY-MM-DD HH:mm:ss.SSS Z}</c> | <level>{extra[level_short]:<5}</level> | "
24
+ "<w>{name}:{line}</w>]: <level>{message}</level>"
25
+ ),
26
+ colorize=True,
27
27
  ),
28
- min_length=8,
29
- max_length=512,
30
- )
31
- std_handler: StdHandlerConfigPM = Field(default_factory=StdHandlerConfigPM)
32
-
33
-
34
- class LogHandlersConfigPM(ExtraBaseModel):
35
- enabled: bool = Field(default=False)
36
- format_str: str = Field(
37
- default="[{time:YYYY-MM-DD HH:mm:ss.SSS Z} | {level_short:<5} | {name}:{line}]: {message}",
38
- min_length=8,
39
- max_length=512,
40
- )
41
- log_path: str = Field(
42
- default="{app_name}.std.all.log", min_length=4, max_length=1024
43
- )
44
- err_path: str = Field(
45
- default="{app_name}.std.err.log", min_length=4, max_length=1024
46
- )
47
-
48
- @model_validator(mode="after")
49
- def _check_log_path(self) -> Self:
50
- if self.log_path == self.err_path:
51
- raise ValueError(
52
- f"`log_path` and `err_path` attributes have same value: '{self.log_path}', must be different!"
53
- )
54
-
55
- return self
56
-
57
-
58
- class JsonHandlersConfigPM(ExtraBaseModel):
59
- enabled: bool = Field(default=False)
60
- use_custom: bool = Field(default=False)
61
- log_path: str = Field(
62
- default="{app_name}.json.all.log", min_length=4, max_length=1024
63
- )
64
- err_path: str = Field(
65
- default="{app_name}.json.err.log", min_length=4, max_length=1024
66
- )
67
-
68
- @model_validator(mode="after")
69
- def _check_log_path(self) -> Self:
70
- if self.log_path == self.err_path:
71
- raise ValueError(
72
- f"`log_path` and `err_path` attributes have same value: '{self.log_path}', must be different!"
73
- )
28
+ "default.all.file_handler": LogHandlerPM(
29
+ type_=LogHandlerTypeEnum.FILE,
30
+ sink="{app_name}.all.log",
31
+ enabled=False,
32
+ ),
33
+ "default.err.file_handler": LogHandlerPM(
34
+ type_=LogHandlerTypeEnum.FILE,
35
+ sink="{app_name}.err.log",
36
+ error=True,
37
+ enabled=False,
38
+ ),
39
+ "default.all.json_handler": LogHandlerPM(
40
+ type_=LogHandlerTypeEnum.FILE,
41
+ sink="json/{app_name}.json.all.log",
42
+ serialize=True,
43
+ enabled=False,
44
+ ),
45
+ "default.err.json_handler": LogHandlerPM(
46
+ type_=LogHandlerTypeEnum.FILE,
47
+ sink="json/{app_name}.json.err.log",
48
+ serialize=True,
49
+ error=True,
50
+ enabled=False,
51
+ ),
52
+ }
74
53
 
75
- return self
54
+ return _log_handlers
76
55
 
77
56
 
78
57
  class FileConfigPM(ExtraBaseModel):
@@ -85,10 +64,8 @@ class FileConfigPM(ExtraBaseModel):
85
64
  default=10_000_000, ge=1_000, lt=1_000_000_000 # 10MB = 10 * 1000 * 1000
86
65
  )
87
66
  rotate_time: datetime.time = Field(default_factory=lambda: datetime.time(0, 0, 0))
88
- backup_count: int = Field(default=90, ge=1)
67
+ retention: int = Field(default=90, ge=1)
89
68
  encoding: str = Field(default="utf8", min_length=2, max_length=31)
90
- log_handlers: LogHandlersConfigPM = Field(default_factory=LogHandlersConfigPM)
91
- json_handlers: JsonHandlersConfigPM = Field(default_factory=JsonHandlersConfigPM)
92
69
 
93
70
  @field_validator("rotate_time", mode="before")
94
71
  @classmethod
@@ -98,15 +75,48 @@ class FileConfigPM(ExtraBaseModel):
98
75
 
99
76
  return val
100
77
 
78
+ @field_validator("logs_dir", mode="before")
79
+ @classmethod
80
+ def _check_logs_dir(cls, val: Any) -> Any:
81
+ if isinstance(val, str) and (not os.path.isabs(val)):
82
+ val = os.path.abspath(val)
101
83
 
102
- class AutoLoadConfigPM(ExtraBaseModel):
103
- enabled: bool = Field(default=True)
104
- only_base: bool = Field(default=False)
105
- ignore_modules: list[str] = Field(default=[])
84
+ return val
85
+
86
+
87
+ class LevelConfigPM(ExtraBaseModel):
88
+ base: str | int | LogLevelEnum = Field(default=LogLevelEnum.INFO)
89
+ err: str | int | LogLevelEnum = Field(default=LogLevelEnum.WARNING)
90
+
91
+ @field_validator("base", mode="before")
92
+ @classmethod
93
+ def _check_level(cls, val: Any) -> Any:
94
+ if not isinstance(val, (str, int, LogLevelEnum)):
95
+ raise TypeError(
96
+ f"Level attribute type {type(val).__name__} is invalid, must be str, int or <LogLevelEnum>!"
97
+ )
98
+
99
+ if utils.is_debug_mode() and (val != LogLevelEnum.TRACE) and (val != 5):
100
+ val = LogLevelEnum.DEBUG
101
+
102
+ return val
103
+
104
+
105
+ class DefaultConfigPM(ExtraBaseModel):
106
+ level: LevelConfigPM = Field(default_factory=LevelConfigPM)
107
+ format_str: str = Field(
108
+ default="[{time:YYYY-MM-DD HH:mm:ss.SSS Z} | {extra[level_short]:<5} | {name}:{line}]: {message}",
109
+ min_length=8,
110
+ max_length=512,
111
+ )
112
+ file: FileConfigPM = Field(default_factory=FileConfigPM)
113
+ custom_serialize: bool = Field(default=False)
106
114
 
107
115
 
108
116
  class InterceptConfigPM(ExtraBaseModel):
109
- auto_load: AutoLoadConfigPM = Field(default_factory=AutoLoadConfigPM)
117
+ enabled: bool = Field(default=True)
118
+ only_base: bool = Field(default=False)
119
+ ignore_modules: list[str] = Field(default=[])
110
120
  include_modules: list[str] = Field(default=[])
111
121
  mute_modules: list[str] = Field(default=[])
112
122
 
@@ -116,23 +126,41 @@ class ExtraConfigPM(ExtraBaseModel):
116
126
 
117
127
 
118
128
  class LoggerConfigPM(ExtraBaseModel):
119
- app_name: str = Field(default_factory=get_slug_name, min_length=1, max_length=128)
120
- level: LogLevelEnum = Field(default=LogLevelEnum.INFO)
121
- use_backtrace: bool = Field(default=True)
122
- use_diagnose: bool = Field(default=False)
123
- stream: StreamConfigPM = Field(default_factory=StreamConfigPM)
124
- file: FileConfigPM = Field(default_factory=FileConfigPM)
129
+ app_name: str = Field(
130
+ default_factory=utils.get_slug_name, min_length=1, max_length=128
131
+ )
132
+ default: DefaultConfigPM = Field(default_factory=DefaultConfigPM)
125
133
  intercept: InterceptConfigPM = Field(default_factory=InterceptConfigPM)
134
+ handlers: dict[str, LogHandlerPM] = Field(default_factory=_get_handlers)
126
135
  extra: ExtraConfigPM | None = Field(default_factory=ExtraConfigPM)
127
136
 
137
+ @field_validator("handlers", mode="before")
138
+ @classmethod
139
+ def _check_handlers(cls, val: Any) -> Any:
140
+ if val:
141
+ if not isinstance(val, dict):
142
+ raise TypeError(
143
+ f"'handlers' attribute type {type(val).__name__} is invalid, must be a dict of <LogHandlerPM>, "
144
+ f"<LoguruHandlerPM> or dict!"
145
+ )
146
+
147
+ for _i, _handler in val.items():
148
+ if not isinstance(_handler, (LogHandlerPM, LoguruHandlerPM, dict)):
149
+ raise TypeError(
150
+ f"'handlers' attribute index {_i} type {type(_handler).__name__} is invalid, must be "
151
+ f"<LogHandlerPM>, <LoguruHandlerPM> or dict!"
152
+ )
153
+
154
+ if isinstance(_handler, LoguruHandlerPM):
155
+ val[_i] = LogHandlerPM(
156
+ **_handler.model_dump(exclude_none=True, exclude_unset=True)
157
+ )
158
+ elif isinstance(_handler, dict):
159
+ val[_i] = LogHandlerPM(**_handler)
160
+
161
+ return val
162
+
128
163
 
129
164
  __all__ = [
130
- "StdHandlerConfigPM",
131
- "StreamConfigPM",
132
- "LogHandlersConfigPM",
133
- "JsonHandlersConfigPM",
134
- "FileConfigPM",
135
- "AutoLoadConfigPM",
136
- "InterceptConfigPM",
137
165
  "LoggerConfigPM",
138
166
  ]
beans_logging/filters.py CHANGED
@@ -1,33 +1,39 @@
1
- def add_level_short(record: dict) -> dict:
1
+ from typing import TYPE_CHECKING
2
+
3
+ if TYPE_CHECKING:
4
+ from loguru import Record
5
+
6
+
7
+ def add_level_short(record: "Record") -> "Record":
2
8
  """Filter for adding short level name to log record.
3
9
 
4
10
  Args:
5
- record (dict, required): Log record as dictionary.
11
+ record (Record, required): Log record as dictionary.
6
12
 
7
13
  Returns:
8
- dict: Log record as dictionary with short level name.
14
+ Record: Log record as dictionary with short level name.
9
15
  """
10
16
 
11
- if "level_short" not in record:
17
+ if "level_short" not in record["extra"]:
12
18
  if record["level"].name == "SUCCESS":
13
- record["level_short"] = "OK"
19
+ record["extra"]["level_short"] = "OK"
14
20
  elif record["level"].name == "WARNING":
15
- record["level_short"] = "WARN"
21
+ record["extra"]["level_short"] = "WARN"
16
22
  elif record["level"].name == "CRITICAL":
17
- record["level_short"] = "CRIT"
23
+ record["extra"]["level_short"] = "CRIT"
18
24
  elif 5 < len(record["level"].name):
19
- record["level_short"] = record["level"].name[:5]
25
+ record["extra"]["level_short"] = record["level"].name[:5]
20
26
  else:
21
- record["level_short"] = record["level"].name
27
+ record["extra"]["level_short"] = record["level"].name
22
28
 
23
29
  return record
24
30
 
25
31
 
26
- def use_all_filter(record: dict) -> bool:
32
+ def use_all_filter(record: "Record") -> bool:
27
33
  """Filter message for all handlers that use this filter.
28
34
 
29
35
  Args:
30
- record (dict): Log record as dictionary.
36
+ record (Record): Log record as dictionary.
31
37
 
32
38
  Returns:
33
39
  bool: False if record is disabled by extra 'disable_all' key, True otherwise.
@@ -41,7 +47,7 @@ def use_all_filter(record: dict) -> bool:
41
47
  return True
42
48
 
43
49
 
44
- def use_std_filter(record: dict) -> bool:
50
+ def use_std_filter(record: "Record") -> bool:
45
51
  """Filter message for std handlers that use this filter.
46
52
 
47
53
  Args:
@@ -60,11 +66,11 @@ def use_std_filter(record: dict) -> bool:
60
66
  return True
61
67
 
62
68
 
63
- def use_file_filter(record: dict) -> bool:
69
+ def use_file_filter(record: "Record") -> bool:
64
70
  """Filter message for file handlers that use this filter.
65
71
 
66
72
  Args:
67
- record (dict): Log record as dictionary.
73
+ record (Record): Log record as dictionary.
68
74
 
69
75
  Returns:
70
76
  bool: False if record is disabled by extra 'disable_file' key, True otherwise.
@@ -79,11 +85,11 @@ def use_file_filter(record: dict) -> bool:
79
85
  return True
80
86
 
81
87
 
82
- def use_file_err_filter(record: dict) -> bool:
88
+ def use_file_err_filter(record: "Record") -> bool:
83
89
  """Filter message for error file handlers that use this filter.
84
90
 
85
91
  Args:
86
- record (dict): Log record as dictionary.
92
+ record (Record): Log record as dictionary.
87
93
 
88
94
  Returns:
89
95
  bool: False if record is disabled by extra 'disable_file_err' key, True otherwise.
@@ -98,11 +104,11 @@ def use_file_err_filter(record: dict) -> bool:
98
104
  return True
99
105
 
100
106
 
101
- def use_file_json_filter(record: dict) -> bool:
107
+ def use_file_json_filter(record: "Record") -> bool:
102
108
  """Filter message for json file handlers that use this filter.
103
109
 
104
110
  Args:
105
- record (dict): Log record as dictionary.
111
+ record (Record): Log record as dictionary.
106
112
 
107
113
  Returns:
108
114
  bool: False if record is disabled by extra 'disable_file_json' key, True otherwise.
@@ -117,11 +123,11 @@ def use_file_json_filter(record: dict) -> bool:
117
123
  return True
118
124
 
119
125
 
120
- def use_file_json_err_filter(record: dict) -> bool:
126
+ def use_file_json_err_filter(record: "Record") -> bool:
121
127
  """Filter message for json error file handlers that use this filter.
122
128
 
123
129
  Args:
124
- record (dict): Log record as dictionary.
130
+ record (Record): Log record as dictionary.
125
131
 
126
132
  Returns:
127
133
  bool: False if record is disabled by extra 'disable_file_json_err' key, True otherwise.
@@ -134,3 +140,14 @@ def use_file_json_err_filter(record: dict) -> bool:
134
140
  return False
135
141
 
136
142
  return True
143
+
144
+
145
+ __all__ = [
146
+ "add_level_short",
147
+ "use_all_filter",
148
+ "use_std_filter",
149
+ "use_file_filter",
150
+ "use_file_err_filter",
151
+ "use_file_json_filter",
152
+ "use_file_json_err_filter",
153
+ ]
beans_logging/formats.py CHANGED
@@ -1,8 +1,12 @@
1
1
  import json
2
2
  import traceback
3
+ from typing import TYPE_CHECKING
3
4
 
5
+ if TYPE_CHECKING:
6
+ from loguru import Record
4
7
 
5
- def json_format(record: dict) -> str:
8
+
9
+ def json_formatter(record: "Record") -> str:
6
10
  """Custom json formatter for loguru logger.
7
11
 
8
12
  Args:
@@ -16,7 +20,11 @@ def json_format(record: dict) -> str:
16
20
  if record["exception"]:
17
21
  _error = {}
18
22
  _error_type, _error_value, _error_traceback = record["exception"]
19
- _error["type"] = _error_type.__name__
23
+ if _error_type:
24
+ _error["type"] = _error_type.__name__
25
+ else:
26
+ _error["type"] = "None"
27
+
20
28
  _error["value"] = str(_error_value)
21
29
  _error["traceback"] = "".join(traceback.format_tb(_error_traceback))
22
30
 
@@ -24,6 +32,9 @@ def json_format(record: dict) -> str:
24
32
  if record["extra"] and (0 < len(record["extra"])):
25
33
  _extra = record["extra"]
26
34
 
35
+ if _extra and ("serialized" in _extra):
36
+ del _extra["serialized"]
37
+
27
38
  _json_record = {
28
39
  "timestamp": record["time"].strftime("%Y-%m-%dT%H:%M:%S%z"),
29
40
  "level": record["level"].name,
@@ -39,5 +50,10 @@ def json_format(record: dict) -> str:
39
50
  "elapsed": str(record["elapsed"]),
40
51
  }
41
52
 
42
- record["serialized"] = json.dumps(_json_record)
43
- return "{serialized}\n"
53
+ record["extra"]["serialized"] = json.dumps(_json_record)
54
+ return "{extra[serialized]}\n"
55
+
56
+
57
+ __all__ = [
58
+ "json_formatter",
59
+ ]
@@ -1,43 +1,44 @@
1
1
  import datetime
2
- from typing import TextIO
2
+ from typing import TextIO, TYPE_CHECKING
3
3
 
4
- from loguru._handler import Message
4
+ if TYPE_CHECKING:
5
+ from loguru import Message
5
6
 
6
7
 
7
- class RotationChecker:
8
- """RotationChecker class for checking file size and time for rotation.
8
+ class Rotator:
9
+ """Rotator class for checking file size and time for rotation.
9
10
 
10
11
  Attributes:
11
- _size_limit (int ): File size limit for rotation.
12
- _dtime_limit (datetime.datetime): Datetime when the log file should rotate.
12
+ _size_limit (int ): File size limit for rotation.
13
+ _dt_limit (datetime.datetime): Datetime when the log file should rotate.
13
14
 
14
15
  Methods:
15
16
  should_rotate(): Check if the log file should rotate.
16
17
  """
17
18
 
18
19
  def __init__(self, *, rotate_size: int, rotate_time: datetime.time):
19
- """RotationChecker constructor method.
20
+ """Rotator constructor method.
20
21
 
21
22
  Args:
22
23
  rotate_size (int, required): File size limit for rotation.
23
24
  rotate_time (datetime.time, required): Time when the log file should rotate.
24
25
  """
25
26
 
26
- _current_dtime = datetime.datetime.now()
27
+ _current_dt = datetime.datetime.now()
27
28
 
28
29
  self._size_limit = rotate_size
29
- self._dtime_limit = _current_dtime.replace(
30
+ self._dt_limit = _current_dt.replace(
30
31
  hour=rotate_time.hour,
31
32
  minute=rotate_time.minute,
32
33
  second=rotate_time.second,
33
34
  )
34
35
 
35
- if _current_dtime >= self._dtime_limit:
36
+ if _current_dt >= self._dt_limit:
36
37
  # The current time is already past the target time so it would rotate already.
37
38
  # Add one day to prevent an immediate rotation.
38
- self._dtime_limit += datetime.timedelta(days=1)
39
+ self._dt_limit += datetime.timedelta(days=1)
39
40
 
40
- def should_rotate(self, message: Message, file: TextIO) -> bool:
41
+ def should_rotate(self, message: "Message", file: TextIO) -> bool:
41
42
  """Check if the log file should rotate.
42
43
 
43
44
  Args:
@@ -53,11 +54,16 @@ class RotationChecker:
53
54
  return True
54
55
 
55
56
  _elapsed_timestamp = (
56
- message.record["time"].timestamp() - self._dtime_limit.timestamp()
57
+ message.record["time"].timestamp() - self._dt_limit.timestamp()
57
58
  )
58
59
  if _elapsed_timestamp >= 0:
59
60
  _elapsed_days = datetime.timedelta(seconds=_elapsed_timestamp).days
60
- self._dtime_limit += datetime.timedelta(days=_elapsed_days + 1)
61
+ self._dt_limit += datetime.timedelta(days=_elapsed_days + 1)
61
62
  return True
62
63
 
63
64
  return False
65
+
66
+
67
+ __all__ = [
68
+ "Rotator",
69
+ ]
@@ -0,0 +1,143 @@
1
+ import os
2
+ import sys
3
+ import inspect
4
+ import datetime
5
+ from pathlib import Path
6
+ from logging import Handler
7
+ from asyncio import AbstractEventLoop
8
+ from multiprocessing.context import BaseContext
9
+ from typing import TYPE_CHECKING, Any, TextIO, Union, Protocol, runtime_checkable
10
+ from collections.abc import Callable, Awaitable
11
+
12
+ if sys.version_info >= (3, 11):
13
+ from typing import Self
14
+ else:
15
+ from typing_extensions import Self
16
+
17
+
18
+ if TYPE_CHECKING:
19
+ from loguru import Record, Message
20
+ from pydantic import BaseModel, Field, ConfigDict, model_validator
21
+
22
+ from ._constants import LogHandlerTypeEnum, LogLevelEnum
23
+
24
+
25
+ class ExtraBaseModel(BaseModel):
26
+ model_config = ConfigDict(
27
+ extra="allow",
28
+ validate_default=True,
29
+ validate_assignment=True,
30
+ populate_by_name=True, # Remove in Pydantic v3
31
+ serialize_by_alias=True,
32
+ validate_by_name=True,
33
+ arbitrary_types_allowed=True,
34
+ )
35
+
36
+
37
+ @runtime_checkable
38
+ class _SupportsWrite(Protocol):
39
+ def write(self, __s: str) -> Any: ...
40
+ def flush(self) -> Any: ...
41
+
42
+
43
+ _SinkType = Union[
44
+ str,
45
+ Path,
46
+ TextIO,
47
+ _SupportsWrite,
48
+ Callable[[Any], Any],
49
+ Callable[[Any], Awaitable[Any]],
50
+ Handler,
51
+ ]
52
+
53
+
54
+ class LoguruHandlerPM(ExtraBaseModel):
55
+ sink: _SinkType = Field(...)
56
+ level: str | int | None = Field(default=None)
57
+ format_: (
58
+ str | Callable[["Record"], str] | Callable[[dict[str, Any]], str] | None
59
+ ) = Field(default=None, validation_alias="format", serialization_alias="format")
60
+ filter_: (
61
+ Callable[["Record"], bool]
62
+ | Callable[[dict[str, Any]], bool]
63
+ | str
64
+ | dict[str, Any]
65
+ | None
66
+ ) = Field(default=None, validation_alias="filter", serialization_alias="filter")
67
+ colorize: bool | None = Field(default=None)
68
+ serialize: bool | None = Field(default=None)
69
+ backtrace: bool | None = Field(default=None)
70
+ diagnose: bool | None = Field(default=None)
71
+ enqueue: bool | None = Field(default=None)
72
+ context: BaseContext | str | None = Field(default=None)
73
+ catch: bool | None = Field(default=None)
74
+ loop: AbstractEventLoop | None = Field(default=None)
75
+ rotation: (
76
+ str
77
+ | int
78
+ | datetime.time
79
+ | datetime.timedelta
80
+ | Callable[["Message", TextIO], bool]
81
+ | Callable[[str, TextIO], bool]
82
+ | Callable[[str, Any], bool]
83
+ | None
84
+ ) = Field(default=None)
85
+ retention: str | int | datetime.timedelta | Callable[[Any], None] | None = Field(
86
+ default=None
87
+ )
88
+ compression: str | Callable[[str], None] | None = Field(default=None)
89
+ delay: bool | None = Field(default=None)
90
+ watch: bool | None = Field(default=None)
91
+ mode: str | None = Field(default=None)
92
+ buffering: int | None = Field(default=None)
93
+ encoding: str | None = Field(default=None)
94
+
95
+
96
+ class LogHandlerPM(LoguruHandlerPM):
97
+ type_: LogHandlerTypeEnum = Field(
98
+ default=LogHandlerTypeEnum.UNKNOWN,
99
+ validation_alias="type",
100
+ serialization_alias="type",
101
+ )
102
+ sink: _SinkType | None = Field(default=None)
103
+ level: str | int | LogLevelEnum | None = Field(default=None)
104
+ custom_serialize: bool | None = Field(default=None)
105
+ error: bool = Field(default=False)
106
+ enabled: bool = Field(default=True)
107
+
108
+ @model_validator(mode="after")
109
+ def _check_all(self) -> Self:
110
+
111
+ if (self.loop is not None) and (
112
+ (not callable(self.sink)) or (not inspect.iscoroutinefunction(self.sink))
113
+ ):
114
+ raise ValueError(
115
+ f"'loop' attribute is set but 'sink' attribute type {type(self.sink)} is invalid, "
116
+ "'loop' only can be used with async callable (coroutine function) 'sink'!"
117
+ )
118
+
119
+ if not isinstance(self.sink, (str, os.PathLike)):
120
+ for _attr in (
121
+ "rotation",
122
+ "retention",
123
+ "compression",
124
+ "delay",
125
+ "watch",
126
+ "mode",
127
+ "buffering",
128
+ "encoding",
129
+ ):
130
+ if getattr(self, _attr) is not None:
131
+ raise ValueError(
132
+ f"'{_attr}' attribute is set but 'sink' attribute type {type(self.sink).__name__} is invalid, "
133
+ f"'{_attr}' can only be used with file path 'sink'!"
134
+ )
135
+
136
+ return self
137
+
138
+
139
+ __all__ = [
140
+ "ExtraBaseModel",
141
+ "LoguruHandlerPM",
142
+ "LogHandlerPM",
143
+ ]
beans_logging/sinks.py CHANGED
@@ -1,9 +1,11 @@
1
1
  import sys
2
+ from typing import TYPE_CHECKING
2
3
 
3
- from loguru._handler import Message
4
+ if TYPE_CHECKING:
5
+ from loguru import Message
4
6
 
5
7
 
6
- def std_sink(message: Message):
8
+ def std_sink(message: "Message") -> None:
7
9
  """Print message based on log level to stdout or stderr.
8
10
 
9
11
  Args:
@@ -12,5 +14,12 @@ def std_sink(message: Message):
12
14
 
13
15
  if message.record["level"].no < 40:
14
16
  sys.stdout.write(message)
17
+ # sys.stdout.flush()
15
18
  else:
16
19
  sys.stderr.write(message)
20
+ # sys.stderr.flush()
21
+
22
+ return
23
+
24
+
25
+ __all__ = ["std_sink"]