beans-logging 6.0.2__py3-none-any.whl → 7.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,186 @@
1
+ import sys
2
+ import datetime
3
+ from typing import Any
4
+
5
+ if sys.version_info >= (3, 11):
6
+ from typing import Self
7
+ else:
8
+ from typing_extensions import Self
9
+
10
+ import potato_util as utils
11
+ from pydantic import Field, model_validator, field_validator
12
+
13
+ from ._constants import LogHandlerTypeEnum, LogLevelEnum
14
+ from .schemas import ExtraBaseModel, LogHandlerPM, LoguruHandlerPM
15
+
16
+
17
+ def _get_handlers() -> dict[str, LogHandlerPM]:
18
+ """Get default log handlers.
19
+
20
+ Returns:
21
+ dict[str, LogHandlerPM]: Default handlers as dictionary.
22
+ """
23
+
24
+ _log_handlers: dict[str, LogHandlerPM] = {
25
+ "default.all.std_handler": LogHandlerPM(type_=LogHandlerTypeEnum.STD),
26
+ "default.all.file_handler": LogHandlerPM(
27
+ type_=LogHandlerTypeEnum.FILE, enabled=False
28
+ ),
29
+ "default.err.file_handler": LogHandlerPM(
30
+ type_=LogHandlerTypeEnum.FILE, error=True, enabled=False
31
+ ),
32
+ "default.all.json_handler": LogHandlerPM(
33
+ type_=LogHandlerTypeEnum.FILE, serialize=True, enabled=False
34
+ ),
35
+ "default.err.json_handler": LogHandlerPM(
36
+ type_=LogHandlerTypeEnum.FILE, serialize=True, error=True, enabled=False
37
+ ),
38
+ }
39
+
40
+ return _log_handlers
41
+
42
+
43
+ class StdConfigPM(ExtraBaseModel):
44
+ format_str: str = Field(
45
+ default=(
46
+ "[<c>{time:YYYY-MM-DD HH:mm:ss.SSS Z}</c> | <level>{extra[level_short]:<5}</level> | <w>{name}:{line}</w>]:"
47
+ " <level>{message}</level>"
48
+ ),
49
+ min_length=8,
50
+ max_length=512,
51
+ )
52
+ colorize: bool = Field(default=True)
53
+
54
+
55
+ class PathsConfigPM(ExtraBaseModel):
56
+ log_path: str = Field(..., min_length=4, max_length=1024)
57
+ err_path: str = Field(..., min_length=4, max_length=1024)
58
+
59
+ @model_validator(mode="after")
60
+ def _check_log_path(self) -> Self:
61
+ if self.log_path == self.err_path:
62
+ raise ValueError(
63
+ f"`log_path` and `err_path` attributes have same value: '{self.log_path}', must be different!"
64
+ )
65
+
66
+ return self
67
+
68
+
69
+ class FileConfigPM(ExtraBaseModel):
70
+ logs_dir: str = Field(
71
+ default="./logs",
72
+ min_length=2,
73
+ max_length=1024,
74
+ )
75
+ rotate_size: int = Field(
76
+ default=10_000_000, ge=1_000, lt=1_000_000_000 # 10MB = 10 * 1000 * 1000
77
+ )
78
+ rotate_time: datetime.time = Field(default_factory=lambda: datetime.time(0, 0, 0))
79
+ retention: int = Field(default=90, ge=1)
80
+ encoding: str = Field(default="utf8", min_length=2, max_length=31)
81
+
82
+ plain: PathsConfigPM = Field(
83
+ default_factory=lambda: PathsConfigPM(
84
+ log_path="{app_name}.all.log",
85
+ err_path="{app_name}.err.log",
86
+ )
87
+ )
88
+ json_: PathsConfigPM = Field(
89
+ default_factory=lambda: PathsConfigPM(
90
+ log_path="json/{app_name}.json.all.log",
91
+ err_path="json/{app_name}.json.err.log",
92
+ ),
93
+ validation_alias="json",
94
+ serialization_alias="json",
95
+ )
96
+
97
+ @field_validator("rotate_time", mode="before")
98
+ @classmethod
99
+ def _check_rotate_time(cls, val: Any) -> Any:
100
+ if isinstance(val, str):
101
+ val = datetime.time.fromisoformat(val)
102
+
103
+ return val
104
+
105
+
106
+ class LevelConfigPM(ExtraBaseModel):
107
+ base: str | int | LogLevelEnum = Field(default=LogLevelEnum.INFO)
108
+ err: str | int | LogLevelEnum = Field(default=LogLevelEnum.WARNING)
109
+
110
+ @field_validator("base", mode="before")
111
+ @classmethod
112
+ def _check_level(cls, val: Any) -> Any:
113
+ if not isinstance(val, (str, int, LogLevelEnum)):
114
+ raise TypeError(
115
+ f"Level attribute type {type(val).__name__} is invalid, must be str, int or <LogLevelEnum>!"
116
+ )
117
+
118
+ if utils.is_debug_mode() and (val != LogLevelEnum.TRACE) and (val != 5):
119
+ val = LogLevelEnum.DEBUG
120
+
121
+ return val
122
+
123
+
124
+ class DefaultConfigPM(ExtraBaseModel):
125
+ level: LevelConfigPM = Field(default_factory=LevelConfigPM)
126
+ std: StdConfigPM = Field(default_factory=StdConfigPM)
127
+ format_str: str = Field(
128
+ default="[{time:YYYY-MM-DD HH:mm:ss.SSS Z} | {extra[level_short]:<5} | {name}:{line}]: {message}",
129
+ min_length=8,
130
+ max_length=512,
131
+ )
132
+ file: FileConfigPM = Field(default_factory=FileConfigPM)
133
+ custom_serialize: bool = Field(default=False)
134
+
135
+
136
+ class InterceptConfigPM(ExtraBaseModel):
137
+ enabled: bool = Field(default=True)
138
+ only_base: bool = Field(default=False)
139
+ ignore_modules: list[str] = Field(default=[])
140
+ include_modules: list[str] = Field(default=[])
141
+ mute_modules: list[str] = Field(default=[])
142
+
143
+
144
+ class ExtraConfigPM(ExtraBaseModel):
145
+ pass
146
+
147
+
148
+ class LoggerConfigPM(ExtraBaseModel):
149
+ app_name: str = Field(
150
+ default_factory=utils.get_slug_name, min_length=1, max_length=128
151
+ )
152
+ default: DefaultConfigPM = Field(default_factory=DefaultConfigPM)
153
+ intercept: InterceptConfigPM = Field(default_factory=InterceptConfigPM)
154
+ handlers: dict[str, LogHandlerPM] = Field(default_factory=_get_handlers)
155
+ extra: ExtraConfigPM | None = Field(default_factory=ExtraConfigPM)
156
+
157
+ @field_validator("handlers", mode="before")
158
+ @classmethod
159
+ def _check_handlers(cls, val: Any) -> Any:
160
+ if val:
161
+ if not isinstance(val, dict):
162
+ raise TypeError(
163
+ f"'handlers' attribute type {type(val).__name__} is invalid, must be a dict of <LogHandlerPM>, "
164
+ f"<LoguruHandlerPM> or dict!"
165
+ )
166
+
167
+ for _i, _handler in val.items():
168
+ if not isinstance(_handler, (LogHandlerPM, LoguruHandlerPM, dict)):
169
+ raise TypeError(
170
+ f"'handlers' attribute index {_i} type {type(_handler).__name__} is invalid, must be "
171
+ f"<LogHandlerPM>, <LoguruHandlerPM> or dict!"
172
+ )
173
+
174
+ if isinstance(_handler, LoguruHandlerPM):
175
+ val[_i] = LogHandlerPM(
176
+ **_handler.model_dump(exclude_none=True, exclude_unset=True)
177
+ )
178
+ elif isinstance(_handler, dict):
179
+ val[_i] = LogHandlerPM(**_handler)
180
+
181
+ return val
182
+
183
+
184
+ __all__ = [
185
+ "LoggerConfigPM",
186
+ ]
beans_logging/filters.py CHANGED
@@ -1,33 +1,39 @@
1
- def add_level_short(record: dict) -> dict:
1
+ from typing import TYPE_CHECKING
2
+
3
+ if TYPE_CHECKING:
4
+ from loguru import Record
5
+
6
+
7
+ def add_level_short(record: "Record") -> "Record":
2
8
  """Filter for adding short level name to log record.
3
9
 
4
10
  Args:
5
- record (dict, required): Log record as dictionary.
11
+ record (Record, required): Log record as dictionary.
6
12
 
7
13
  Returns:
8
- dict: Log record as dictionary with short level name.
14
+ Record: Log record as dictionary with short level name.
9
15
  """
10
16
 
11
- if "level_short" not in record:
17
+ if "level_short" not in record["extra"]:
12
18
  if record["level"].name == "SUCCESS":
13
- record["level_short"] = "OK"
19
+ record["extra"]["level_short"] = "OK"
14
20
  elif record["level"].name == "WARNING":
15
- record["level_short"] = "WARN"
21
+ record["extra"]["level_short"] = "WARN"
16
22
  elif record["level"].name == "CRITICAL":
17
- record["level_short"] = "CRIT"
23
+ record["extra"]["level_short"] = "CRIT"
18
24
  elif 5 < len(record["level"].name):
19
- record["level_short"] = record["level"].name[:5]
25
+ record["extra"]["level_short"] = record["level"].name[:5]
20
26
  else:
21
- record["level_short"] = record["level"].name
27
+ record["extra"]["level_short"] = record["level"].name
22
28
 
23
29
  return record
24
30
 
25
31
 
26
- def use_all_filter(record: dict) -> bool:
32
+ def use_all_filter(record: "Record") -> bool:
27
33
  """Filter message for all handlers that use this filter.
28
34
 
29
35
  Args:
30
- record (dict): Log record as dictionary.
36
+ record (Record): Log record as dictionary.
31
37
 
32
38
  Returns:
33
39
  bool: False if record is disabled by extra 'disable_all' key, True otherwise.
@@ -41,7 +47,7 @@ def use_all_filter(record: dict) -> bool:
41
47
  return True
42
48
 
43
49
 
44
- def use_std_filter(record: dict) -> bool:
50
+ def use_std_filter(record: "Record") -> bool:
45
51
  """Filter message for std handlers that use this filter.
46
52
 
47
53
  Args:
@@ -60,11 +66,11 @@ def use_std_filter(record: dict) -> bool:
60
66
  return True
61
67
 
62
68
 
63
- def use_file_filter(record: dict) -> bool:
69
+ def use_file_filter(record: "Record") -> bool:
64
70
  """Filter message for file handlers that use this filter.
65
71
 
66
72
  Args:
67
- record (dict): Log record as dictionary.
73
+ record (Record): Log record as dictionary.
68
74
 
69
75
  Returns:
70
76
  bool: False if record is disabled by extra 'disable_file' key, True otherwise.
@@ -79,11 +85,11 @@ def use_file_filter(record: dict) -> bool:
79
85
  return True
80
86
 
81
87
 
82
- def use_file_err_filter(record: dict) -> bool:
88
+ def use_file_err_filter(record: "Record") -> bool:
83
89
  """Filter message for error file handlers that use this filter.
84
90
 
85
91
  Args:
86
- record (dict): Log record as dictionary.
92
+ record (Record): Log record as dictionary.
87
93
 
88
94
  Returns:
89
95
  bool: False if record is disabled by extra 'disable_file_err' key, True otherwise.
@@ -98,11 +104,11 @@ def use_file_err_filter(record: dict) -> bool:
98
104
  return True
99
105
 
100
106
 
101
- def use_file_json_filter(record: dict) -> bool:
107
+ def use_file_json_filter(record: "Record") -> bool:
102
108
  """Filter message for json file handlers that use this filter.
103
109
 
104
110
  Args:
105
- record (dict): Log record as dictionary.
111
+ record (Record): Log record as dictionary.
106
112
 
107
113
  Returns:
108
114
  bool: False if record is disabled by extra 'disable_file_json' key, True otherwise.
@@ -117,11 +123,11 @@ def use_file_json_filter(record: dict) -> bool:
117
123
  return True
118
124
 
119
125
 
120
- def use_file_json_err_filter(record: dict) -> bool:
126
+ def use_file_json_err_filter(record: "Record") -> bool:
121
127
  """Filter message for json error file handlers that use this filter.
122
128
 
123
129
  Args:
124
- record (dict): Log record as dictionary.
130
+ record (Record): Log record as dictionary.
125
131
 
126
132
  Returns:
127
133
  bool: False if record is disabled by extra 'disable_file_json_err' key, True otherwise.
@@ -134,3 +140,14 @@ def use_file_json_err_filter(record: dict) -> bool:
134
140
  return False
135
141
 
136
142
  return True
143
+
144
+
145
+ __all__ = [
146
+ "add_level_short",
147
+ "use_all_filter",
148
+ "use_std_filter",
149
+ "use_file_filter",
150
+ "use_file_err_filter",
151
+ "use_file_json_filter",
152
+ "use_file_json_err_filter",
153
+ ]
beans_logging/formats.py CHANGED
@@ -1,8 +1,12 @@
1
1
  import json
2
2
  import traceback
3
+ from typing import TYPE_CHECKING
3
4
 
5
+ if TYPE_CHECKING:
6
+ from loguru import Record
4
7
 
5
- def json_format(record: dict) -> str:
8
+
9
+ def json_formatter(record: "Record") -> str:
6
10
  """Custom json formatter for loguru logger.
7
11
 
8
12
  Args:
@@ -16,7 +20,11 @@ def json_format(record: dict) -> str:
16
20
  if record["exception"]:
17
21
  _error = {}
18
22
  _error_type, _error_value, _error_traceback = record["exception"]
19
- _error["type"] = _error_type.__name__
23
+ if _error_type:
24
+ _error["type"] = _error_type.__name__
25
+ else:
26
+ _error["type"] = "None"
27
+
20
28
  _error["value"] = str(_error_value)
21
29
  _error["traceback"] = "".join(traceback.format_tb(_error_traceback))
22
30
 
@@ -24,6 +32,9 @@ def json_format(record: dict) -> str:
24
32
  if record["extra"] and (0 < len(record["extra"])):
25
33
  _extra = record["extra"]
26
34
 
35
+ if _extra and ("serialized" in _extra):
36
+ del _extra["serialized"]
37
+
27
38
  _json_record = {
28
39
  "timestamp": record["time"].strftime("%Y-%m-%dT%H:%M:%S%z"),
29
40
  "level": record["level"].name,
@@ -39,5 +50,10 @@ def json_format(record: dict) -> str:
39
50
  "elapsed": str(record["elapsed"]),
40
51
  }
41
52
 
42
- record["serialized"] = json.dumps(_json_record)
43
- return "{serialized}\n"
53
+ record["extra"]["serialized"] = json.dumps(_json_record)
54
+ return "{extra[serialized]}\n"
55
+
56
+
57
+ __all__ = [
58
+ "json_formatter",
59
+ ]
@@ -1,43 +1,44 @@
1
1
  import datetime
2
- from typing import TextIO
2
+ from typing import TextIO, TYPE_CHECKING
3
3
 
4
- from loguru._handler import Message
4
+ if TYPE_CHECKING:
5
+ from loguru import Message
5
6
 
6
7
 
7
- class RotationChecker:
8
- """RotationChecker class for checking file size and time for rotation.
8
+ class Rotator:
9
+ """Rotator class for checking file size and time for rotation.
9
10
 
10
11
  Attributes:
11
- _size_limit (int ): File size limit for rotation.
12
- _dtime_limit (datetime.datetime): Datetime when the log file should rotate.
12
+ _size_limit (int ): File size limit for rotation.
13
+ _dt_limit (datetime.datetime): Datetime when the log file should rotate.
13
14
 
14
15
  Methods:
15
16
  should_rotate(): Check if the log file should rotate.
16
17
  """
17
18
 
18
19
  def __init__(self, *, rotate_size: int, rotate_time: datetime.time):
19
- """RotationChecker constructor method.
20
+ """Rotator constructor method.
20
21
 
21
22
  Args:
22
23
  rotate_size (int, required): File size limit for rotation.
23
24
  rotate_time (datetime.time, required): Time when the log file should rotate.
24
25
  """
25
26
 
26
- _current_dtime = datetime.datetime.now()
27
+ _current_dt = datetime.datetime.now()
27
28
 
28
29
  self._size_limit = rotate_size
29
- self._dtime_limit = _current_dtime.replace(
30
+ self._dt_limit = _current_dt.replace(
30
31
  hour=rotate_time.hour,
31
32
  minute=rotate_time.minute,
32
33
  second=rotate_time.second,
33
34
  )
34
35
 
35
- if _current_dtime >= self._dtime_limit:
36
+ if _current_dt >= self._dt_limit:
36
37
  # The current time is already past the target time so it would rotate already.
37
38
  # Add one day to prevent an immediate rotation.
38
- self._dtime_limit += datetime.timedelta(days=1)
39
+ self._dt_limit += datetime.timedelta(days=1)
39
40
 
40
- def should_rotate(self, message: Message, file: TextIO) -> bool:
41
+ def should_rotate(self, message: "Message", file: TextIO) -> bool:
41
42
  """Check if the log file should rotate.
42
43
 
43
44
  Args:
@@ -53,11 +54,16 @@ class RotationChecker:
53
54
  return True
54
55
 
55
56
  _elapsed_timestamp = (
56
- message.record["time"].timestamp() - self._dtime_limit.timestamp()
57
+ message.record["time"].timestamp() - self._dt_limit.timestamp()
57
58
  )
58
59
  if _elapsed_timestamp >= 0:
59
60
  _elapsed_days = datetime.timedelta(seconds=_elapsed_timestamp).days
60
- self._dtime_limit += datetime.timedelta(days=_elapsed_days + 1)
61
+ self._dt_limit += datetime.timedelta(days=_elapsed_days + 1)
61
62
  return True
62
63
 
63
64
  return False
65
+
66
+
67
+ __all__ = [
68
+ "Rotator",
69
+ ]