beans-logging-fastapi 1.1.1__py3-none-any.whl → 3.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,31 +1,13 @@
1
- # -*- coding: utf-8 -*-
1
+ from beans_logging import logger
2
2
 
3
- from ._filters import use_http_filter
4
- from ._formats import http_file_format, http_file_json_format
5
- from ._handlers import add_http_file_handler, add_http_file_json_handler
6
- from ._middlewares import RequestHTTPInfoMiddleware, ResponseHTTPInfoMiddleware
7
- from ._base import HttpAccessLogMiddleware
8
- from ._async_log import *
9
3
  from .__version__ import __version__
4
+ from .config import LoggerConfigPM
5
+ from ._core import add_logger
10
6
 
11
7
 
12
8
  __all__ = [
13
- "use_http_filter",
14
- "http_file_format",
15
- "http_file_json_format",
16
- "add_http_file_handler",
17
- "add_http_file_json_handler",
18
- "RequestHTTPInfoMiddleware",
19
- "ResponseHTTPInfoMiddleware",
20
- "HttpAccessLogMiddleware",
21
- "async_log_http_error",
22
- "async_log_trace",
23
- "async_log_debug",
24
- "async_log_info",
25
- "async_log_success",
26
- "async_log_warning",
27
- "async_log_error",
28
- "async_log_critical",
29
- "async_log_level",
30
9
  "__version__",
10
+ "logger",
11
+ "add_logger",
12
+ "LoggerConfigPM",
31
13
  ]
@@ -1,3 +1 @@
1
- # -*- coding: utf-8 -*-
2
-
3
- __version__ = "1.1.1"
1
+ __version__ = "3.0.0"
@@ -1,42 +1,46 @@
1
- # -*- coding: utf-8 -*-
1
+ from typing import Any
2
2
 
3
- from typing import Dict, Any
4
-
5
- from pydantic import validate_arguments
3
+ from pydantic import validate_call
6
4
  from fastapi import Request
7
5
  from fastapi.concurrency import run_in_threadpool
8
6
 
9
7
  from beans_logging import logger, Logger
10
8
 
11
9
 
12
- @validate_arguments(config=dict(arbitrary_types_allowed=True))
10
+ @validate_call(config={"arbitrary_types_allowed": True})
13
11
  async def async_log_http_error(
14
12
  request: Request,
15
13
  status_code: int,
16
- msg_format: str = '<n><w>[{request_id}]</w></n> {client_host} {user_id} "<u>{method} {url_path}</u> HTTP/{http_version}" <n>{status_code}</n>',
17
- ):
14
+ format_str: str = (
15
+ '<n><w>[{request_id}]</w></n> {client_host} {user_id} "<u>{method} {url_path}</u> '
16
+ 'HTTP/{http_version}" <n>{status_code}</n>'
17
+ ),
18
+ ) -> None:
18
19
  """Log HTTP error for unhandled Exception.
19
20
 
20
21
  Args:
21
22
  request (Request, required): Request instance.
22
23
  status_code (int , required): HTTP status code.
23
- msg_format (str , optional): Message format. Defaults to '<n><w>[{request_id}]</w></n> {client_host} {user_id} "<u>{method} {url_path}</u> HTTP/{http_version}" <n>{status_code}</n>'.
24
+ format_str (str , optional): Message format. Defaults to
25
+ '<n><w>[{request_id}]</w></n> {client_host} {user_id} "<u>{method} {url_path}</u> HTTP/{http_version}"
26
+ <n>{status_code}</n>'.
24
27
  """
25
28
 
26
- _http_info: Dict[str, Any] = {"request_id": request.state.request_id}
29
+ _http_info: dict[str, Any] = {"request_id": request.state.request_id}
27
30
  if hasattr(request.state, "http_info") and isinstance(
28
31
  request.state.http_info, dict
29
32
  ):
30
- _http_info: Dict[str, Any] = request.state.http_info
33
+ _http_info: dict[str, Any] = request.state.http_info
31
34
  _http_info["status_code"] = status_code
32
35
 
33
- _msg = msg_format.format(**_http_info)
36
+ _msg = format_str.format(**_http_info)
34
37
  _logger: Logger = logger.opt(colors=True, record=True).bind(http_info=_http_info)
35
38
  await run_in_threadpool(_logger.error, _msg)
39
+ return
36
40
 
37
41
 
38
- @validate_arguments
39
- async def async_log_trace(message: str):
42
+ @validate_call
43
+ async def async_log_trace(message: str) -> None:
40
44
  """Log trace message.
41
45
 
42
46
  Args:
@@ -44,10 +48,11 @@ async def async_log_trace(message: str):
44
48
  """
45
49
 
46
50
  await run_in_threadpool(logger.trace, message)
51
+ return
47
52
 
48
53
 
49
- @validate_arguments
50
- async def async_log_debug(message: str):
54
+ @validate_call
55
+ async def async_log_debug(message: str) -> None:
51
56
  """Log debug message.
52
57
 
53
58
  Args:
@@ -55,10 +60,11 @@ async def async_log_debug(message: str):
55
60
  """
56
61
 
57
62
  await run_in_threadpool(logger.debug, message)
63
+ return
58
64
 
59
65
 
60
- @validate_arguments
61
- async def async_log_info(message: str):
66
+ @validate_call
67
+ async def async_log_info(message: str) -> None:
62
68
  """Log info message.
63
69
 
64
70
  Args:
@@ -66,10 +72,11 @@ async def async_log_info(message: str):
66
72
  """
67
73
 
68
74
  await run_in_threadpool(logger.info, message)
75
+ return
69
76
 
70
77
 
71
- @validate_arguments
72
- async def async_log_success(message: str):
78
+ @validate_call
79
+ async def async_log_success(message: str) -> None:
73
80
  """Log success message.
74
81
 
75
82
  Args:
@@ -77,10 +84,11 @@ async def async_log_success(message: str):
77
84
  """
78
85
 
79
86
  await run_in_threadpool(logger.success, message)
87
+ return
80
88
 
81
89
 
82
- @validate_arguments
83
- async def async_log_warning(message: str):
90
+ @validate_call
91
+ async def async_log_warning(message: str) -> None:
84
92
  """Log warning message.
85
93
 
86
94
  Args:
@@ -88,10 +96,11 @@ async def async_log_warning(message: str):
88
96
  """
89
97
 
90
98
  await run_in_threadpool(logger.warning, message)
99
+ return
91
100
 
92
101
 
93
- @validate_arguments
94
- async def async_log_error(message: str):
102
+ @validate_call
103
+ async def async_log_error(message: str) -> None:
95
104
  """Log error message.
96
105
 
97
106
  Args:
@@ -99,10 +108,11 @@ async def async_log_error(message: str):
99
108
  """
100
109
 
101
110
  await run_in_threadpool(logger.error, message)
111
+ return
102
112
 
103
113
 
104
- @validate_arguments
105
- async def async_log_critical(message: str):
114
+ @validate_call
115
+ async def async_log_critical(message: str) -> None:
106
116
  """Log critical message.
107
117
 
108
118
  Args:
@@ -110,10 +120,11 @@ async def async_log_critical(message: str):
110
120
  """
111
121
 
112
122
  await run_in_threadpool(logger.critical, message)
123
+ return
113
124
 
114
125
 
115
- @validate_arguments
116
- async def async_log_level(level: str, message: str):
126
+ @validate_call
127
+ async def async_log_level(level: str, message: str) -> None:
117
128
  """Log level message.
118
129
 
119
130
  Args:
@@ -122,6 +133,7 @@ async def async_log_level(level: str, message: str):
122
133
  """
123
134
 
124
135
  await run_in_threadpool(logger.log, level, message)
136
+ return
125
137
 
126
138
 
127
139
  __all__ = [
@@ -0,0 +1,90 @@
1
+ from typing import TYPE_CHECKING
2
+
3
+ from pydantic import validate_call
4
+ from fastapi import FastAPI
5
+
6
+ if TYPE_CHECKING:
7
+ from loguru import Logger
8
+ else:
9
+ from loguru._logger import Logger
10
+
11
+ from beans_logging import LoggerLoader
12
+
13
+ from .constants import (
14
+ HTTP_ACCESS_FILE_HANDLER_NAME,
15
+ HTTP_ERR_FILE_HANDLER_NAME,
16
+ HTTP_ACCESS_JSON_HANDLER_NAME,
17
+ HTTP_ERR_JSON_HANDLER_NAME,
18
+ )
19
+ from .config import LoggerConfigPM
20
+ from .filters import use_http_filter
21
+ from .formats import http_file_format, http_file_json_format
22
+ from .middlewares import (
23
+ HttpAccessLogMiddleware,
24
+ RequestHTTPInfoMiddleware,
25
+ ResponseHTTPInfoMiddleware,
26
+ )
27
+
28
+
29
+ @validate_call(config={"arbitrary_types_allowed": True})
30
+ def add_logger(
31
+ app: FastAPI,
32
+ config: LoggerConfigPM,
33
+ has_proxy_headers: bool | None = None,
34
+ has_cf_headers: bool | None = None,
35
+ ) -> "Logger":
36
+ """Add and initialize logger middlewares and handlers to FastAPI application.
37
+
38
+ Args:
39
+ app (FastAPI , required): FastAPI application instance.
40
+ config (LoggerConfigPM, required): Logger configuration model.
41
+ has_proxy_headers (bool | None , optional): Whether to use proxy headers. Defaults to None.
42
+ has_cf_headers (bool | None , optional): Whether to use Cloudflare headers. Defaults to None.
43
+
44
+ Returns:
45
+ Logger: Initialized Logger instance.
46
+ """
47
+
48
+ logger_loader = LoggerLoader(config=config)
49
+
50
+ if has_proxy_headers is None:
51
+ has_proxy_headers = config.http.headers.has_proxy
52
+
53
+ if has_cf_headers is None:
54
+ has_cf_headers = config.http.headers.has_cf
55
+
56
+ app.add_middleware(ResponseHTTPInfoMiddleware)
57
+ app.add_middleware(
58
+ HttpAccessLogMiddleware,
59
+ debug_format_str=config.http.std.debug_format_str,
60
+ format_str=config.http.std.format_str,
61
+ )
62
+ app.add_middleware(
63
+ RequestHTTPInfoMiddleware,
64
+ has_proxy_headers=has_proxy_headers,
65
+ has_cf_headers=has_cf_headers,
66
+ )
67
+
68
+ for _name, _handler in logger_loader.config.handlers.items():
69
+ if (_name == HTTP_ACCESS_FILE_HANDLER_NAME) or (
70
+ _name == HTTP_ERR_FILE_HANDLER_NAME
71
+ ):
72
+ _handler.filter_ = use_http_filter
73
+ _handler.format_ = lambda record: http_file_format(
74
+ record=record,
75
+ format_str=config.http.file.format_str,
76
+ tz=config.http.file.tz,
77
+ )
78
+ elif (_name == HTTP_ACCESS_JSON_HANDLER_NAME) or (
79
+ _name == HTTP_ERR_JSON_HANDLER_NAME
80
+ ):
81
+ _handler.filter_ = use_http_filter
82
+ _handler.format_ = http_file_json_format
83
+
84
+ logger: Logger = logger_loader.load()
85
+ return logger
86
+
87
+
88
+ __all__ = [
89
+ "add_logger",
90
+ ]
@@ -0,0 +1,168 @@
1
+ from typing import Any
2
+
3
+ import potato_util as utils
4
+ from pydantic import Field, field_validator
5
+
6
+ from beans_logging.constants import LogHandlerTypeEnum
7
+ from beans_logging.schemas import LogHandlerPM
8
+ from beans_logging.config import (
9
+ get_default_handlers as get_base_handlers,
10
+ ExtraBaseModel,
11
+ InterceptConfigPM,
12
+ LoggerConfigPM as BaseLoggerConfigPM,
13
+ )
14
+
15
+ from .constants import (
16
+ HTTP_ACCESS_FILE_HANDLER_NAME,
17
+ HTTP_ERR_FILE_HANDLER_NAME,
18
+ HTTP_ACCESS_JSON_HANDLER_NAME,
19
+ HTTP_ERR_JSON_HANDLER_NAME,
20
+ )
21
+
22
+
23
+ def get_default_handlers() -> dict[str, LogHandlerPM]:
24
+ """Get fastapi default log handlers.
25
+
26
+ Returns:
27
+ dict[str, LogHandlerPM]: Default handlers as dictionary.
28
+ """
29
+
30
+ _base_handlers = get_base_handlers()
31
+ for _name, _handler in _base_handlers.items():
32
+ if _name.startswith("default"):
33
+ _handler.enabled = True
34
+
35
+ _http_handlers: dict[str, LogHandlerPM] = {
36
+ HTTP_ACCESS_FILE_HANDLER_NAME: LogHandlerPM(
37
+ h_type=LogHandlerTypeEnum.FILE,
38
+ sink="http/{app_name}.http-access.log",
39
+ ),
40
+ HTTP_ERR_FILE_HANDLER_NAME: LogHandlerPM(
41
+ h_type=LogHandlerTypeEnum.FILE,
42
+ sink="http/{app_name}.http-err.log",
43
+ error=True,
44
+ ),
45
+ HTTP_ACCESS_JSON_HANDLER_NAME: LogHandlerPM(
46
+ h_type=LogHandlerTypeEnum.FILE,
47
+ sink="http.json/{app_name}.http-access.json.log",
48
+ ),
49
+ HTTP_ERR_JSON_HANDLER_NAME: LogHandlerPM(
50
+ h_type=LogHandlerTypeEnum.FILE,
51
+ sink="http.json/{app_name}.http-err.json.log",
52
+ error=True,
53
+ ),
54
+ }
55
+
56
+ _default_handlers = {**_base_handlers, **_http_handlers}
57
+ return _default_handlers
58
+
59
+
60
+ def get_default_intercept() -> InterceptConfigPM:
61
+ _default_intercept = InterceptConfigPM(mute_modules=["uvicorn.access"])
62
+ return _default_intercept
63
+
64
+
65
+ class StdConfigPM(ExtraBaseModel):
66
+ format_str: str = Field(
67
+ default=(
68
+ '<n><w>[{request_id}]</w></n> {client_host} {user_id} "<u>{method} {url_path}</u> HTTP/{http_version}"'
69
+ " {status_code} {content_length}B {response_time}ms"
70
+ ),
71
+ min_length=8,
72
+ max_length=512,
73
+ )
74
+ err_format_str: str = Field(
75
+ default=(
76
+ '<n><w>[{request_id}]</w></n> {client_host} {user_id} "<u>{method} {url_path}</u> HTTP/{http_version}"'
77
+ " <n>{status_code}</n>"
78
+ ),
79
+ min_length=8,
80
+ max_length=512,
81
+ )
82
+ debug_format_str: str = Field(
83
+ default='<n>[{request_id}]</n> {client_host} {user_id} "<u>{method} {url_path}</u> HTTP/{http_version}"',
84
+ min_length=8,
85
+ max_length=512,
86
+ )
87
+
88
+
89
+ class FileConfigPM(ExtraBaseModel):
90
+ format_str: str = Field(
91
+ default=(
92
+ '{client_host} {request_id} {user_id} [{datetime}] "{method} {url_path} HTTP/{http_version}"'
93
+ ' {status_code} {content_length} "{h_referer}" "{h_user_agent}" {response_time}'
94
+ ),
95
+ min_length=8,
96
+ max_length=512,
97
+ )
98
+ tz: str = Field(default="localtime", min_length=2, max_length=64)
99
+
100
+
101
+ class HeadersConfigPM(ExtraBaseModel):
102
+ has_proxy: bool = Field(default=False)
103
+ has_cf: bool = Field(default=False)
104
+
105
+
106
+ class HttpConfigPM(ExtraBaseModel):
107
+ std: StdConfigPM = Field(default_factory=StdConfigPM)
108
+ file: FileConfigPM = Field(default_factory=FileConfigPM)
109
+ headers: HeadersConfigPM = Field(default_factory=HeadersConfigPM)
110
+
111
+
112
+ class LoggerConfigPM(BaseLoggerConfigPM):
113
+ http: HttpConfigPM = Field(default_factory=HttpConfigPM)
114
+ intercept: InterceptConfigPM = Field(default_factory=get_default_intercept)
115
+ handlers: dict[str, LogHandlerPM] = Field(default_factory=get_default_handlers)
116
+
117
+ @field_validator("handlers", mode="before")
118
+ @classmethod
119
+ def _check_handlers(cls, val: Any) -> dict[str, LogHandlerPM]:
120
+
121
+ _default_handlers = get_default_handlers()
122
+
123
+ if not val:
124
+ val = _default_handlers
125
+ return val
126
+
127
+ if not isinstance(val, dict):
128
+ raise TypeError(
129
+ f"'handlers' attribute type {type(val).__name__} is invalid, must be a dict of <LogHandlerPM> or dict!"
130
+ )
131
+
132
+ for _key, _handler in val.items():
133
+ if not isinstance(_handler, (LogHandlerPM, dict)):
134
+ raise TypeError(
135
+ f"'handlers' attribute's '{_key}' key -> value type {type(_handler).__name__} is invalid, must be "
136
+ f"<LogHandlerPM> or dict!"
137
+ )
138
+
139
+ if isinstance(_handler, LogHandlerPM):
140
+ val[_key] = _handler.model_dump(
141
+ by_alias=True, exclude_unset=True, exclude_none=True
142
+ )
143
+
144
+ _default_dict = {
145
+ _key: _handler.model_dump(
146
+ by_alias=True, exclude_unset=True, exclude_none=True
147
+ )
148
+ for _key, _handler in _default_handlers.items()
149
+ }
150
+
151
+ if _default_dict != val:
152
+ val = utils.deep_merge(_default_dict, val)
153
+
154
+ for _key, _handler in val.items():
155
+ val[_key] = LogHandlerPM(**_handler)
156
+
157
+ return val
158
+
159
+
160
+ __all__ = [
161
+ "LoggerConfigPM",
162
+ "HttpConfigPM",
163
+ "StdConfigPM",
164
+ "FileConfigPM",
165
+ "HeadersConfigPM",
166
+ "get_default_intercept",
167
+ "get_default_handlers",
168
+ ]
@@ -0,0 +1,12 @@
1
+ HTTP_ACCESS_FILE_HANDLER_NAME = "http.access.file_handler"
2
+ HTTP_ERR_FILE_HANDLER_NAME = "http.err.file_handler"
3
+ HTTP_ACCESS_JSON_HANDLER_NAME = "http.access.json_handler"
4
+ HTTP_ERR_JSON_HANDLER_NAME = "http.err.json_handler"
5
+
6
+
7
+ __all__ = [
8
+ "HTTP_ACCESS_FILE_HANDLER_NAME",
9
+ "HTTP_ERR_FILE_HANDLER_NAME",
10
+ "HTTP_ACCESS_JSON_HANDLER_NAME",
11
+ "HTTP_ERR_JSON_HANDLER_NAME",
12
+ ]
@@ -1,13 +1,16 @@
1
- # -*- coding: utf-8 -*-
1
+ from typing import TYPE_CHECKING
2
+
3
+ if TYPE_CHECKING:
4
+ from loguru import Record
2
5
 
3
6
  from beans_logging.filters import use_all_filter
4
7
 
5
8
 
6
- def use_http_filter(record: dict) -> bool:
9
+ def use_http_filter(record: "Record") -> bool:
7
10
  """Filter message only for http access log handler by checking 'http_info' key in extra.
8
11
 
9
12
  Args:
10
- record (dict): Log record as dictionary.
13
+ record (Record, required): Log record as dictionary.
11
14
 
12
15
  Returns:
13
16
  bool: True if record has 'http_info' key in extra, False otherwise.
@@ -22,4 +25,6 @@ def use_http_filter(record: dict) -> bool:
22
25
  return True
23
26
 
24
27
 
25
- __all__ = ["use_http_filter"]
28
+ __all__ = [
29
+ "use_http_filter",
30
+ ]
@@ -0,0 +1,95 @@
1
+ import json
2
+ from typing import Any
3
+ from zoneinfo import ZoneInfo
4
+ from typing import TYPE_CHECKING
5
+
6
+ if TYPE_CHECKING:
7
+ from loguru import Record
8
+
9
+
10
+ def http_file_format(
11
+ record: "Record",
12
+ format_str: str = (
13
+ '{client_host} {request_id} {user_id} [{datetime}] "{method} {url_path} HTTP/{http_version}"'
14
+ ' {status_code} {content_length} "{h_referer}" "{h_user_agent}" {response_time}'
15
+ ),
16
+ tz: str = "localtime",
17
+ ) -> str:
18
+ """Http access log file format.
19
+
20
+ Args:
21
+ record (Record, required): Log record as dictionary.
22
+ format_str (str , optional): Log message format.
23
+ tz (str , optional): Timezone for datetime field. Defaults to 'localtime'.
24
+
25
+ Returns:
26
+ str: Format for http access log record.
27
+ """
28
+
29
+ if "http_info" not in record["extra"]:
30
+ return ""
31
+
32
+ if "http_message" in record["extra"]:
33
+ del record["extra"]["http_message"]
34
+
35
+ _http_info: dict[str, Any] = record["extra"]["http_info"]
36
+ if "datetime" not in _http_info:
37
+ _dt = record["time"]
38
+ if tz != "localtime":
39
+ if not _dt.tzinfo:
40
+ _dt = _dt.replace(tzinfo=ZoneInfo("UTC"))
41
+
42
+ _dt = _dt.astimezone(ZoneInfo(tz))
43
+
44
+ _http_info["datetime"] = _dt.isoformat(timespec="milliseconds")
45
+
46
+ if "content_length" not in _http_info:
47
+ _http_info["content_length"] = 0
48
+
49
+ if "h_referer" not in _http_info:
50
+ _http_info["h_referer"] = "-"
51
+
52
+ if "h_user_agent" not in _http_info:
53
+ _http_info["h_user_agent"] = "-"
54
+
55
+ if "response_time" not in _http_info:
56
+ _http_info["response_time"] = 0
57
+
58
+ record["extra"]["http_info"] = _http_info
59
+ _msg = format_str.format(**_http_info)
60
+
61
+ record["extra"]["http_message"] = _msg
62
+ return "{extra[http_message]}\n"
63
+
64
+
65
+ def http_file_json_format(record: "Record") -> str:
66
+ """Http access json log file format.
67
+
68
+ Args:
69
+ record (Record, required): Log record as dictionary.
70
+
71
+ Returns:
72
+ str: Format for http access json log record.
73
+ """
74
+
75
+ if "http_info" not in record["extra"]:
76
+ return ""
77
+
78
+ if "datetime" not in record["extra"]["http_info"]:
79
+ record["extra"]["http_info"]["datetime"] = record["time"].isoformat(
80
+ timespec="milliseconds"
81
+ )
82
+
83
+ if "http_serialized" in record["extra"]:
84
+ del record["extra"]["http_serialized"]
85
+
86
+ _http_info = record["extra"]["http_info"]
87
+ record["extra"]["http_serialized"] = json.dumps(_http_info)
88
+
89
+ return "{extra[http_serialized]}\n"
90
+
91
+
92
+ __all__ = [
93
+ "http_file_format",
94
+ "http_file_json_format",
95
+ ]