backend.ai-logging 24.9.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- backend_ai_logging-24.9.0/MANIFEST.in +1 -0
- backend_ai_logging-24.9.0/PKG-INFO +45 -0
- backend_ai_logging-24.9.0/ai/backend/logging/VERSION +1 -0
- backend_ai_logging-24.9.0/ai/backend/logging/__init__.py +21 -0
- backend_ai_logging-24.9.0/ai/backend/logging/abc.py +20 -0
- backend_ai_logging-24.9.0/ai/backend/logging/config.py +84 -0
- backend_ai_logging-24.9.0/ai/backend/logging/exceptions.py +11 -0
- backend_ai_logging-24.9.0/ai/backend/logging/formatter.py +73 -0
- backend_ai_logging-24.9.0/ai/backend/logging/handler/__init__.py +0 -0
- backend_ai_logging-24.9.0/ai/backend/logging/handler/graylog.py +71 -0
- backend_ai_logging-24.9.0/ai/backend/logging/handler/intrinsic.py +66 -0
- backend_ai_logging-24.9.0/ai/backend/logging/handler/logstash.py +108 -0
- backend_ai_logging-24.9.0/ai/backend/logging/logger.py +348 -0
- backend_ai_logging-24.9.0/ai/backend/logging/py.typed +1 -0
- backend_ai_logging-24.9.0/ai/backend/logging/types.py +178 -0
- backend_ai_logging-24.9.0/ai/backend/logging/utils.py +47 -0
- backend_ai_logging-24.9.0/backend.ai_logging.egg-info/PKG-INFO +45 -0
- backend_ai_logging-24.9.0/backend.ai_logging.egg-info/SOURCES.txt +24 -0
- backend_ai_logging-24.9.0/backend.ai_logging.egg-info/dependency_links.txt +1 -0
- backend_ai_logging-24.9.0/backend.ai_logging.egg-info/namespace_packages.txt +1 -0
- backend_ai_logging-24.9.0/backend.ai_logging.egg-info/not-zip-safe +1 -0
- backend_ai_logging-24.9.0/backend.ai_logging.egg-info/requires.txt +7 -0
- backend_ai_logging-24.9.0/backend.ai_logging.egg-info/top_level.txt +1 -0
- backend_ai_logging-24.9.0/backend_shim.py +31 -0
- backend_ai_logging-24.9.0/setup.cfg +4 -0
- backend_ai_logging-24.9.0/setup.py +72 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
include *.py
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: backend.ai-logging
|
|
3
|
+
Version: 24.9.0
|
|
4
|
+
Summary: Backend.AI Logging Subsystem
|
|
5
|
+
Home-page: https://github.com/lablup/backend.ai
|
|
6
|
+
Author: Lablup Inc. and contributors
|
|
7
|
+
License: MIT
|
|
8
|
+
Project-URL: Documentation, https://docs.backend.ai/
|
|
9
|
+
Project-URL: Source, https://github.com/lablup/backend.ai
|
|
10
|
+
Classifier: Intended Audience :: Developers
|
|
11
|
+
Classifier: Operating System :: MacOS :: MacOS X
|
|
12
|
+
Classifier: Operating System :: POSIX :: Linux
|
|
13
|
+
Classifier: Programming Language :: Python
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Classifier: Environment :: No Input/Output (Daemon)
|
|
16
|
+
Classifier: Topic :: Scientific/Engineering
|
|
17
|
+
Classifier: Topic :: Software Development
|
|
18
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
20
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
21
|
+
Requires-Python: >=3.12,<3.13
|
|
22
|
+
Description-Content-Type: text/markdown
|
|
23
|
+
Requires-Dist: coloredlogs~=15.0
|
|
24
|
+
Requires-Dist: graypy==2.1.0
|
|
25
|
+
Requires-Dist: msgpack~=1.1.0
|
|
26
|
+
Requires-Dist: python-json-logger>=2.0.1
|
|
27
|
+
Requires-Dist: pyzmq~=26.2
|
|
28
|
+
Requires-Dist: trafaret~=2.1
|
|
29
|
+
Requires-Dist: yarl~=1.13.1
|
|
30
|
+
|
|
31
|
+
Backend.AI Logging Subsystem
|
|
32
|
+
============================
|
|
33
|
+
|
|
34
|
+
Package Structure
|
|
35
|
+
-----------------
|
|
36
|
+
|
|
37
|
+
* `ai.backend.logging`
|
|
38
|
+
- `abc`: Abstract base classes
|
|
39
|
+
- `logger`: The core logging facility
|
|
40
|
+
- `Logger`: The standard multiprocess-friendly logger using `RelayHandler` based on ZeroMQ
|
|
41
|
+
- `LocalLogger`: A minimalized console/file logger that does not require serialization via networks at all
|
|
42
|
+
- `handler`: Collection of vendor-specific handler implementations
|
|
43
|
+
- `formatter`: Collection of formatters
|
|
44
|
+
- `types`: Definition of enums/types like `LogLevel`
|
|
45
|
+
- `utils`: Brace-style message formatting adapters and other extras
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
24.09.0
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from .abc import AbstractLogger
|
|
4
|
+
from .exceptions import ConfigurationError
|
|
5
|
+
from .logger import LocalLogger, Logger, NoopLogger, is_active
|
|
6
|
+
from .types import LogFormat, LogLevel
|
|
7
|
+
from .utils import BraceStyleAdapter
|
|
8
|
+
|
|
9
|
+
__version__ = (Path(__file__).parent / "VERSION").read_text().strip()
|
|
10
|
+
|
|
11
|
+
__all__ = (
|
|
12
|
+
"AbstractLogger",
|
|
13
|
+
"Logger",
|
|
14
|
+
"LocalLogger",
|
|
15
|
+
"NoopLogger",
|
|
16
|
+
"BraceStyleAdapter",
|
|
17
|
+
"is_active",
|
|
18
|
+
"ConfigurationError",
|
|
19
|
+
"LogFormat",
|
|
20
|
+
"LogLevel",
|
|
21
|
+
)
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from abc import ABCMeta, abstractmethod
|
|
4
|
+
from typing import Any, MutableMapping, Self
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class AbstractLogger(metaclass=ABCMeta):
|
|
8
|
+
def __init__(
|
|
9
|
+
self,
|
|
10
|
+
logging_config: MutableMapping[str, Any],
|
|
11
|
+
) -> None:
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
@abstractmethod
|
|
15
|
+
def __enter__(self) -> Self:
|
|
16
|
+
raise NotImplementedError
|
|
17
|
+
|
|
18
|
+
@abstractmethod
|
|
19
|
+
def __exit__(self, *exc_info_args) -> bool | None:
|
|
20
|
+
raise NotImplementedError
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import MutableMapping
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
import trafaret as t
|
|
7
|
+
|
|
8
|
+
from .types import (
|
|
9
|
+
DirPathTrafaret,
|
|
10
|
+
LogFormat,
|
|
11
|
+
LogLevel,
|
|
12
|
+
SimpleBinarySizeTrafaret,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
default_pkg_ns: dict[str, LogLevel] = {
|
|
16
|
+
"": LogLevel.WARNING,
|
|
17
|
+
"ai.backend": LogLevel.INFO,
|
|
18
|
+
"tests": LogLevel.DEBUG,
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
logging_config_iv = t.Dict({
|
|
22
|
+
t.Key("level", default=LogLevel.INFO): LogLevel.as_trafaret(),
|
|
23
|
+
t.Key("pkg-ns", default=default_pkg_ns): t.Mapping(
|
|
24
|
+
t.String(allow_blank=True), LogLevel.as_trafaret()
|
|
25
|
+
),
|
|
26
|
+
t.Key("drivers", default=["console"]): t.List(
|
|
27
|
+
t.Enum(
|
|
28
|
+
"console",
|
|
29
|
+
"logstash",
|
|
30
|
+
"file",
|
|
31
|
+
"graylog",
|
|
32
|
+
)
|
|
33
|
+
),
|
|
34
|
+
t.Key(
|
|
35
|
+
"console",
|
|
36
|
+
default={
|
|
37
|
+
"colored": None,
|
|
38
|
+
"format": LogFormat.VERBOSE,
|
|
39
|
+
},
|
|
40
|
+
): t.Dict({
|
|
41
|
+
t.Key("colored", default=None): t.Null | t.ToBool,
|
|
42
|
+
t.Key("format", default=LogFormat.VERBOSE): LogFormat.as_trafaret(),
|
|
43
|
+
}).allow_extra("*"),
|
|
44
|
+
t.Key("file", default=None): t.Null
|
|
45
|
+
| t.Dict({
|
|
46
|
+
t.Key("path"): DirPathTrafaret(auto_create=True),
|
|
47
|
+
t.Key("filename"): t.String,
|
|
48
|
+
t.Key("backup-count", default=5): t.ToInt[1:100],
|
|
49
|
+
t.Key("rotation-size", default="10M"): SimpleBinarySizeTrafaret,
|
|
50
|
+
t.Key("format", default=LogFormat.VERBOSE): LogFormat.as_trafaret(),
|
|
51
|
+
}).allow_extra("*"),
|
|
52
|
+
t.Key("logstash", default=None): t.Null
|
|
53
|
+
| t.Dict({
|
|
54
|
+
t.Key("endpoint"): t.Tuple(t.String, t.ToInt[1:65535])
|
|
55
|
+
| t.Dict({
|
|
56
|
+
t.Key("host"): t.String,
|
|
57
|
+
t.Key("port"): t.ToInt[1:65535],
|
|
58
|
+
}),
|
|
59
|
+
t.Key("protocol", default="tcp"): t.Enum("zmq.push", "zmq.pub", "tcp", "udp"),
|
|
60
|
+
t.Key("ssl-enabled", default=True): t.ToBool,
|
|
61
|
+
t.Key("ssl-verify", default=True): t.ToBool,
|
|
62
|
+
# NOTE: logstash does not have format option.
|
|
63
|
+
}).allow_extra("*"),
|
|
64
|
+
t.Key("graylog", default=None): t.Null
|
|
65
|
+
| t.Dict({
|
|
66
|
+
t.Key("host"): t.String,
|
|
67
|
+
t.Key("port"): t.ToInt[1024:65535],
|
|
68
|
+
t.Key("level", default=LogLevel.INFO): LogLevel.as_trafaret(),
|
|
69
|
+
t.Key("ssl-verify", default=False): t.Bool,
|
|
70
|
+
t.Key("ca-certs", default=None): t.Null | t.String(allow_blank=True),
|
|
71
|
+
t.Key("keyfile", default=None): t.Null | t.String(allow_blank=True),
|
|
72
|
+
t.Key("certfile", default=None): t.Null | t.String(allow_blank=True),
|
|
73
|
+
t.Key("fqdn", default=True): t.ToBool,
|
|
74
|
+
t.Key("localname", default=None): t.Null | t.String(),
|
|
75
|
+
}).allow_extra("*"),
|
|
76
|
+
}).allow_extra("*")
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def override_key(table: MutableMapping[str, Any], key_path: tuple[str, ...], value: Any):
|
|
80
|
+
for k in key_path[:-1]:
|
|
81
|
+
if k not in table:
|
|
82
|
+
table[k] = {}
|
|
83
|
+
table = table[k]
|
|
84
|
+
table[key_path[-1]] = value
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, Mapping
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class ConfigurationError(Exception):
|
|
7
|
+
invalid_data: Mapping[str, Any]
|
|
8
|
+
|
|
9
|
+
def __init__(self, invalid_data: Mapping[str, Any]) -> None:
|
|
10
|
+
super().__init__(invalid_data)
|
|
11
|
+
self.invalid_data = invalid_data
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import pprint
|
|
5
|
+
import time
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import coloredlogs
|
|
10
|
+
from pythonjsonlogger.jsonlogger import JsonFormatter
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def format_exception(self, ei) -> str:
|
|
14
|
+
s = "".join(ei)
|
|
15
|
+
if s[-1:] == "\n":
|
|
16
|
+
s = s[:-1]
|
|
17
|
+
return s
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class SerializedExceptionFormatter(logging.Formatter):
|
|
21
|
+
def formatException(self, ei) -> str:
|
|
22
|
+
return format_exception(self, ei)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class ConsoleFormatter(logging.Formatter):
|
|
26
|
+
def formatException(self, ei) -> str:
|
|
27
|
+
return format_exception(self, ei)
|
|
28
|
+
|
|
29
|
+
def formatTime(self, record: logging.LogRecord, datefmt: str | None = None) -> str:
|
|
30
|
+
ct = self.converter(record.created) # type: ignore
|
|
31
|
+
if datefmt:
|
|
32
|
+
datefmt = datefmt.replace("%f", f"{int(record.msecs):03d}")
|
|
33
|
+
return time.strftime(datefmt, ct)
|
|
34
|
+
else:
|
|
35
|
+
t = time.strftime("%Y-%m-%d %H:%M:%S", ct)
|
|
36
|
+
return f"{t}.{int(record.msecs):03d}"
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class CustomJsonFormatter(JsonFormatter):
|
|
40
|
+
def formatException(self, ei) -> str:
|
|
41
|
+
return format_exception(self, ei)
|
|
42
|
+
|
|
43
|
+
def add_fields(
|
|
44
|
+
self,
|
|
45
|
+
log_record: dict[str, Any], # the manipulated entry object
|
|
46
|
+
record: logging.LogRecord, # the source log record
|
|
47
|
+
message_dict: dict[str, Any],
|
|
48
|
+
) -> None:
|
|
49
|
+
super().add_fields(log_record, record, message_dict)
|
|
50
|
+
if not log_record.get("timestamp"):
|
|
51
|
+
# this doesn't use record.created, so it is slightly off
|
|
52
|
+
now = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
|
|
53
|
+
log_record["timestamp"] = now
|
|
54
|
+
if loglevel := log_record.get("level"):
|
|
55
|
+
log_record["level"] = loglevel.upper()
|
|
56
|
+
else:
|
|
57
|
+
log_record["level"] = record.levelname.upper()
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class ColorizedFormatter(coloredlogs.ColoredFormatter):
|
|
61
|
+
def __init__(self, *args, **kwargs) -> None:
|
|
62
|
+
super().__init__(*args, **kwargs)
|
|
63
|
+
coloredlogs.logging.Formatter.formatException = format_exception
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class pretty:
|
|
67
|
+
"""A simple object wrapper to pretty-format it when formatting the log record."""
|
|
68
|
+
|
|
69
|
+
def __init__(self, obj: Any) -> None:
|
|
70
|
+
self.obj = obj
|
|
71
|
+
|
|
72
|
+
def __repr__(self) -> str:
|
|
73
|
+
return pprint.pformat(self.obj)
|
|
File without changes
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import socket
|
|
5
|
+
import ssl
|
|
6
|
+
from typing import Any, Mapping, Optional
|
|
7
|
+
|
|
8
|
+
import graypy
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class GELFTLSHandler(graypy.GELFTLSHandler):
|
|
12
|
+
ssl_ctx: ssl.SSLContext
|
|
13
|
+
|
|
14
|
+
def __init__(self, host, port=12204, validate=False, ca_certs=None, **kwargs) -> None:
|
|
15
|
+
"""Initialize the GELFTLSHandler
|
|
16
|
+
|
|
17
|
+
:param host: GELF TLS input host.
|
|
18
|
+
:type host: str
|
|
19
|
+
|
|
20
|
+
:param port: GELF TLS input port.
|
|
21
|
+
:type port: int
|
|
22
|
+
|
|
23
|
+
:param validate: If :obj:`True`, validate the Graylog server's
|
|
24
|
+
certificate. In this case specifying ``ca_certs`` is also
|
|
25
|
+
required.
|
|
26
|
+
:type validate: bool
|
|
27
|
+
|
|
28
|
+
:param ca_certs: Path to CA bundle file.
|
|
29
|
+
:type ca_certs: str
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
super().__init__(host, port=port, validate=validate, **kwargs)
|
|
33
|
+
self.ssl_ctx = ssl.create_default_context(capath=ca_certs)
|
|
34
|
+
if not validate:
|
|
35
|
+
self.ssl_ctx.check_hostname = False
|
|
36
|
+
self.ssl_ctx.verify_mode = ssl.CERT_NONE
|
|
37
|
+
|
|
38
|
+
def makeSocket(self, timeout: float = 1):
|
|
39
|
+
"""Create a TLS wrapped socket"""
|
|
40
|
+
plain_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
41
|
+
|
|
42
|
+
if hasattr(plain_socket, "settimeout"):
|
|
43
|
+
plain_socket.settimeout(timeout)
|
|
44
|
+
|
|
45
|
+
wrapped_socket = self.ssl_ctx.wrap_socket(
|
|
46
|
+
plain_socket,
|
|
47
|
+
server_hostname=self.host,
|
|
48
|
+
)
|
|
49
|
+
wrapped_socket.connect((self.host, self.port))
|
|
50
|
+
|
|
51
|
+
return wrapped_socket
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def setup_graylog_handler(config: Mapping[str, Any]) -> Optional[logging.Handler]:
|
|
55
|
+
drv_config = config["graylog"]
|
|
56
|
+
graylog_params = {
|
|
57
|
+
"host": drv_config["host"],
|
|
58
|
+
"port": drv_config["port"],
|
|
59
|
+
"validate": drv_config["ssl-verify"],
|
|
60
|
+
"ca_certs": drv_config["ca-certs"],
|
|
61
|
+
"keyfile": drv_config["keyfile"],
|
|
62
|
+
"certfile": drv_config["certfile"],
|
|
63
|
+
}
|
|
64
|
+
if drv_config["localname"]:
|
|
65
|
+
graylog_params["localname"] = drv_config["localname"]
|
|
66
|
+
else:
|
|
67
|
+
graylog_params["fqdn"] = drv_config["fqdn"]
|
|
68
|
+
|
|
69
|
+
graylog_handler = GELFTLSHandler(**graylog_params)
|
|
70
|
+
graylog_handler.setLevel(config["level"])
|
|
71
|
+
return graylog_handler
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import sys
|
|
5
|
+
import traceback
|
|
6
|
+
from typing import TYPE_CHECKING, Optional, override
|
|
7
|
+
|
|
8
|
+
import msgpack
|
|
9
|
+
import zmq
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from ..logger import MsgpackOptions
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class RelayHandler(logging.Handler):
|
|
16
|
+
_sock: zmq.Socket | None
|
|
17
|
+
|
|
18
|
+
def __init__(self, *, endpoint: str, msgpack_options: MsgpackOptions) -> None:
|
|
19
|
+
super().__init__()
|
|
20
|
+
self.endpoint = endpoint
|
|
21
|
+
self.msgpack_options = msgpack_options
|
|
22
|
+
self._zctx = zmq.Context()
|
|
23
|
+
# We should use PUSH-PULL socket pairs to avoid
|
|
24
|
+
# lost of synchronization sentinel messages.
|
|
25
|
+
if endpoint:
|
|
26
|
+
self._sock = self._zctx.socket(zmq.PUSH)
|
|
27
|
+
assert self._sock is not None
|
|
28
|
+
self._sock.setsockopt(zmq.LINGER, 100)
|
|
29
|
+
self._sock.connect(self.endpoint)
|
|
30
|
+
else:
|
|
31
|
+
self._sock = None
|
|
32
|
+
|
|
33
|
+
def close(self) -> None:
|
|
34
|
+
if self._sock is not None:
|
|
35
|
+
self._sock.close()
|
|
36
|
+
self._zctx.term()
|
|
37
|
+
|
|
38
|
+
def _fallback(self, record: Optional[logging.LogRecord]) -> None:
|
|
39
|
+
if record is None:
|
|
40
|
+
return
|
|
41
|
+
print(record.getMessage(), file=sys.stderr)
|
|
42
|
+
|
|
43
|
+
@override
|
|
44
|
+
def emit(self, record: Optional[logging.LogRecord]) -> None:
|
|
45
|
+
if self._sock is None:
|
|
46
|
+
self._fallback(record)
|
|
47
|
+
return
|
|
48
|
+
# record may be None to signal shutdown.
|
|
49
|
+
if record:
|
|
50
|
+
log_body = {
|
|
51
|
+
"name": record.name,
|
|
52
|
+
"pathname": record.pathname,
|
|
53
|
+
"lineno": record.lineno,
|
|
54
|
+
"msg": record.getMessage(),
|
|
55
|
+
"levelno": record.levelno,
|
|
56
|
+
"levelname": record.levelname,
|
|
57
|
+
}
|
|
58
|
+
if record.exc_info:
|
|
59
|
+
log_body["exc_info"] = traceback.format_exception(*record.exc_info)
|
|
60
|
+
else:
|
|
61
|
+
log_body = None
|
|
62
|
+
try:
|
|
63
|
+
serialized_record = msgpack.packb(log_body, **self.msgpack_options["pack_opts"])
|
|
64
|
+
self._sock.send(serialized_record)
|
|
65
|
+
except zmq.ZMQError:
|
|
66
|
+
self._fallback(record)
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
import socket
|
|
6
|
+
import ssl
|
|
7
|
+
from collections import OrderedDict
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from typing import override
|
|
10
|
+
|
|
11
|
+
import zmq
|
|
12
|
+
|
|
13
|
+
from ..exceptions import ConfigurationError
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class LogstashHandler(logging.Handler):
|
|
17
|
+
_sock: socket.socket | zmq.Socket | None
|
|
18
|
+
_sslctx: ssl.SSLContext | None
|
|
19
|
+
_zmqctx: zmq.Context | None
|
|
20
|
+
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
endpoint: tuple[str, int],
|
|
24
|
+
protocol: str,
|
|
25
|
+
*,
|
|
26
|
+
ssl_enabled: bool = True,
|
|
27
|
+
ssl_verify: bool = True,
|
|
28
|
+
myhost: str | None = None,
|
|
29
|
+
) -> None:
|
|
30
|
+
super().__init__()
|
|
31
|
+
self._endpoint = endpoint
|
|
32
|
+
self._protocol = protocol
|
|
33
|
+
self._ssl_enabled = ssl_enabled
|
|
34
|
+
self._ssl_verify = ssl_verify
|
|
35
|
+
self._myhost = myhost
|
|
36
|
+
self._sock = None
|
|
37
|
+
self._sslctx = None
|
|
38
|
+
self._zmqctx = None
|
|
39
|
+
|
|
40
|
+
def _setup_transport(self) -> None:
|
|
41
|
+
if self._sock is not None:
|
|
42
|
+
return
|
|
43
|
+
if self._protocol == "zmq.push":
|
|
44
|
+
self._zmqctx = zmq.Context()
|
|
45
|
+
zsock = self._zmqctx.socket(zmq.PUSH)
|
|
46
|
+
zsock.setsockopt(zmq.LINGER, 50)
|
|
47
|
+
zsock.setsockopt(zmq.SNDHWM, 20)
|
|
48
|
+
zsock.connect(f"tcp://{self._endpoint[0]}:{self._endpoint[1]}")
|
|
49
|
+
self._sock = zsock
|
|
50
|
+
elif self._protocol == "zmq.pub":
|
|
51
|
+
self._zmqctx = zmq.Context()
|
|
52
|
+
zsock = self._zmqctx.socket(zmq.PUB)
|
|
53
|
+
zsock.setsockopt(zmq.LINGER, 50)
|
|
54
|
+
zsock.setsockopt(zmq.SNDHWM, 20)
|
|
55
|
+
zsock.connect(f"tcp://{self._endpoint[0]}:{self._endpoint[1]}")
|
|
56
|
+
self._sock = zsock
|
|
57
|
+
elif self._protocol == "tcp":
|
|
58
|
+
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
59
|
+
if self._ssl_enabled:
|
|
60
|
+
self._sslctx = ssl.create_default_context()
|
|
61
|
+
self._sslctx.minimum_version = ssl.TLSVersion.TLSv1_2
|
|
62
|
+
if not self._ssl_verify:
|
|
63
|
+
self._sslctx.check_hostname = False
|
|
64
|
+
self._sslctx.verify_mode = ssl.CERT_NONE
|
|
65
|
+
sock = self._sslctx.wrap_socket(sock, server_hostname=self._endpoint[0])
|
|
66
|
+
sock.connect((self._endpoint[0], self._endpoint[1]))
|
|
67
|
+
self._sock = sock
|
|
68
|
+
elif self._protocol == "udp":
|
|
69
|
+
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
70
|
+
sock.connect((self._endpoint[0], self._endpoint[1]))
|
|
71
|
+
self._sock = sock
|
|
72
|
+
else:
|
|
73
|
+
raise ConfigurationError({
|
|
74
|
+
"logging.LogstashHandler": f"unsupported protocol: {self._protocol}"
|
|
75
|
+
})
|
|
76
|
+
|
|
77
|
+
def cleanup(self) -> None:
|
|
78
|
+
if self._sock:
|
|
79
|
+
self._sock.close()
|
|
80
|
+
self._sslctx = None
|
|
81
|
+
if self._zmqctx:
|
|
82
|
+
self._zmqctx.term()
|
|
83
|
+
|
|
84
|
+
@override
|
|
85
|
+
def emit(self, record: logging.LogRecord) -> None:
|
|
86
|
+
self._setup_transport()
|
|
87
|
+
tags: set[str] = set()
|
|
88
|
+
|
|
89
|
+
# This log format follows logstash's event format.
|
|
90
|
+
log = OrderedDict([
|
|
91
|
+
("@timestamp", datetime.now().isoformat()),
|
|
92
|
+
("@version", 1),
|
|
93
|
+
("host", self._myhost),
|
|
94
|
+
("logger", record.name),
|
|
95
|
+
("path", record.pathname),
|
|
96
|
+
("func", record.funcName),
|
|
97
|
+
("lineno", record.lineno),
|
|
98
|
+
("message", record.getMessage()),
|
|
99
|
+
("level", record.levelname),
|
|
100
|
+
("tags", list(tags)),
|
|
101
|
+
])
|
|
102
|
+
if self._protocol.startswith("zmq"):
|
|
103
|
+
assert isinstance(self._sock, zmq.Socket)
|
|
104
|
+
self._sock.send_json(log)
|
|
105
|
+
else:
|
|
106
|
+
# TODO: reconnect if disconnected
|
|
107
|
+
assert isinstance(self._sock, socket.socket)
|
|
108
|
+
self._sock.sendall(json.dumps(log).encode("utf-8"))
|
|
@@ -0,0 +1,348 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import logging.config
|
|
5
|
+
import logging.handlers
|
|
6
|
+
import os
|
|
7
|
+
import sys
|
|
8
|
+
import threading
|
|
9
|
+
from collections.abc import Mapping, MutableMapping
|
|
10
|
+
from contextvars import ContextVar
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Any, Self, TypedDict, override
|
|
13
|
+
|
|
14
|
+
import msgpack
|
|
15
|
+
import yarl
|
|
16
|
+
import zmq
|
|
17
|
+
|
|
18
|
+
from .abc import AbstractLogger
|
|
19
|
+
from .config import logging_config_iv, override_key
|
|
20
|
+
from .exceptions import ConfigurationError
|
|
21
|
+
from .formatter import (
|
|
22
|
+
ColorizedFormatter,
|
|
23
|
+
ConsoleFormatter,
|
|
24
|
+
CustomJsonFormatter,
|
|
25
|
+
SerializedExceptionFormatter,
|
|
26
|
+
)
|
|
27
|
+
from .handler.intrinsic import RelayHandler
|
|
28
|
+
|
|
29
|
+
is_active: ContextVar[bool] = ContextVar("is_active", default=False)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _check_driver_config_exists_if_activated(cfg, driver):
|
|
33
|
+
if driver in cfg["drivers"] and cfg[driver] is None:
|
|
34
|
+
raise ConfigurationError({"logging": f"{driver} driver is activated but no config given."})
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class MsgpackOptions(TypedDict):
|
|
38
|
+
pack_opts: Mapping[str, Any]
|
|
39
|
+
unpack_opts: Mapping[str, Any]
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class NoopLogger(AbstractLogger):
|
|
43
|
+
def __init__(
|
|
44
|
+
self,
|
|
45
|
+
logging_config: MutableMapping[str, Any],
|
|
46
|
+
) -> None:
|
|
47
|
+
pass
|
|
48
|
+
|
|
49
|
+
@override
|
|
50
|
+
def __enter__(self) -> Self:
|
|
51
|
+
return self
|
|
52
|
+
|
|
53
|
+
@override
|
|
54
|
+
def __exit__(self, *exc_info_args) -> bool | None:
|
|
55
|
+
pass
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class LocalLogger(AbstractLogger):
|
|
59
|
+
def __init__(
|
|
60
|
+
self,
|
|
61
|
+
logging_config: MutableMapping[str, Any],
|
|
62
|
+
) -> None:
|
|
63
|
+
cfg = logging_config_iv.check(logging_config)
|
|
64
|
+
_check_driver_config_exists_if_activated(cfg, "console")
|
|
65
|
+
self.logging_config = cfg
|
|
66
|
+
log_handlers = []
|
|
67
|
+
if "console" in self.logging_config["drivers"]:
|
|
68
|
+
console_handler = setup_console_log_handler(self.logging_config)
|
|
69
|
+
log_handlers.append(console_handler)
|
|
70
|
+
if "file" in self.logging_config["drivers"]:
|
|
71
|
+
file_handler = setup_file_log_handler(self.logging_config)
|
|
72
|
+
log_handlers.append(file_handler)
|
|
73
|
+
self.log_config = {
|
|
74
|
+
"version": 1,
|
|
75
|
+
"disable_existing_loggers": False,
|
|
76
|
+
"handlers": {
|
|
77
|
+
"null": {"class": "logging.NullHandler"},
|
|
78
|
+
},
|
|
79
|
+
"loggers": {
|
|
80
|
+
"": {
|
|
81
|
+
"handlers": [],
|
|
82
|
+
"level": cfg["level"],
|
|
83
|
+
},
|
|
84
|
+
**{
|
|
85
|
+
k: {
|
|
86
|
+
"handlers": [],
|
|
87
|
+
"level": v,
|
|
88
|
+
"propagate": False,
|
|
89
|
+
}
|
|
90
|
+
for k, v in cfg["pkg-ns"].items()
|
|
91
|
+
},
|
|
92
|
+
},
|
|
93
|
+
}
|
|
94
|
+
logging.config.dictConfig(self.log_config)
|
|
95
|
+
root_logger = logging.getLogger(None)
|
|
96
|
+
for h in log_handlers:
|
|
97
|
+
root_logger.addHandler(h)
|
|
98
|
+
for pkg_ns in cfg["pkg-ns"].keys():
|
|
99
|
+
ns_logger = logging.getLogger(pkg_ns)
|
|
100
|
+
for h in log_handlers:
|
|
101
|
+
ns_logger.addHandler(h)
|
|
102
|
+
|
|
103
|
+
@override
|
|
104
|
+
def __enter__(self) -> Self:
|
|
105
|
+
return self
|
|
106
|
+
|
|
107
|
+
@override
|
|
108
|
+
def __exit__(self, *exc_info_args) -> bool | None:
|
|
109
|
+
pass
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class Logger(AbstractLogger):
|
|
113
|
+
is_master: bool
|
|
114
|
+
log_endpoint: str
|
|
115
|
+
logging_config: Mapping[str, Any]
|
|
116
|
+
log_config: dict[str, Any]
|
|
117
|
+
log_worker: threading.Thread
|
|
118
|
+
|
|
119
|
+
def __init__(
|
|
120
|
+
self,
|
|
121
|
+
logging_config: MutableMapping[str, Any],
|
|
122
|
+
*,
|
|
123
|
+
is_master: bool,
|
|
124
|
+
log_endpoint: str,
|
|
125
|
+
msgpack_options: MsgpackOptions,
|
|
126
|
+
) -> None:
|
|
127
|
+
if (env_legacy_logfile_path := os.environ.get("BACKEND_LOG_FILE", None)) is not None:
|
|
128
|
+
p = Path(env_legacy_logfile_path)
|
|
129
|
+
override_key(logging_config, ("file", "path"), p.parent)
|
|
130
|
+
override_key(logging_config, ("file", "filename"), p.name)
|
|
131
|
+
if (env_legacy_backup_count := os.environ.get("BACKEND_LOG_FILE_COUNT", None)) is not None:
|
|
132
|
+
override_key(logging_config, ("file", "backup-count"), env_legacy_backup_count)
|
|
133
|
+
if (env_legacy_logfile_size := os.environ.get("BACKEND_LOG_FILE_SIZE", None)) is not None:
|
|
134
|
+
legacy_logfile_size = f"{env_legacy_logfile_size}M"
|
|
135
|
+
override_key(logging_config, ("file", "rotation-size"), legacy_logfile_size)
|
|
136
|
+
|
|
137
|
+
cfg = logging_config_iv.check(logging_config)
|
|
138
|
+
|
|
139
|
+
_check_driver_config_exists_if_activated(cfg, "console")
|
|
140
|
+
_check_driver_config_exists_if_activated(cfg, "file")
|
|
141
|
+
_check_driver_config_exists_if_activated(cfg, "logstash")
|
|
142
|
+
_check_driver_config_exists_if_activated(cfg, "graylog")
|
|
143
|
+
|
|
144
|
+
self.is_master = is_master
|
|
145
|
+
self.msgpack_options = msgpack_options
|
|
146
|
+
self.log_endpoint = log_endpoint
|
|
147
|
+
self.logging_config = cfg
|
|
148
|
+
self.log_config = {
|
|
149
|
+
"version": 1,
|
|
150
|
+
"disable_existing_loggers": False,
|
|
151
|
+
"handlers": {
|
|
152
|
+
"null": {"class": "logging.NullHandler"},
|
|
153
|
+
},
|
|
154
|
+
"loggers": {
|
|
155
|
+
"": {"handlers": [], "level": cfg["level"]},
|
|
156
|
+
**{
|
|
157
|
+
k: {"handlers": [], "level": v, "propagate": False}
|
|
158
|
+
for k, v in cfg["pkg-ns"].items()
|
|
159
|
+
},
|
|
160
|
+
},
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
@override
|
|
164
|
+
def __enter__(self) -> Self:
|
|
165
|
+
self.log_config["handlers"]["relay"] = {
|
|
166
|
+
"class": "ai.backend.logging.handler.intrinsic.RelayHandler",
|
|
167
|
+
"level": self.logging_config["level"],
|
|
168
|
+
"endpoint": self.log_endpoint,
|
|
169
|
+
"msgpack_options": self.msgpack_options,
|
|
170
|
+
}
|
|
171
|
+
for _logger in self.log_config["loggers"].values():
|
|
172
|
+
_logger["handlers"].append("relay")
|
|
173
|
+
logging.config.dictConfig(self.log_config)
|
|
174
|
+
self._is_active_token = is_active.set(True)
|
|
175
|
+
if self.is_master and self.log_endpoint:
|
|
176
|
+
self.relay_handler = logging.getLogger("").handlers[0]
|
|
177
|
+
self.ready_event = threading.Event()
|
|
178
|
+
assert isinstance(self.relay_handler, RelayHandler)
|
|
179
|
+
self.log_worker = threading.Thread(
|
|
180
|
+
target=log_worker,
|
|
181
|
+
name="Logger",
|
|
182
|
+
args=(
|
|
183
|
+
self.logging_config,
|
|
184
|
+
os.getpid(),
|
|
185
|
+
self.log_endpoint,
|
|
186
|
+
self.ready_event,
|
|
187
|
+
self.msgpack_options,
|
|
188
|
+
),
|
|
189
|
+
)
|
|
190
|
+
self.log_worker.start()
|
|
191
|
+
self.ready_event.wait()
|
|
192
|
+
return self
|
|
193
|
+
|
|
194
|
+
@override
|
|
195
|
+
def __exit__(self, *exc_info_args) -> bool | None:
|
|
196
|
+
# Resetting generates "different context" errors.
|
|
197
|
+
# Since practically we only need to check activeness in alembic scripts
|
|
198
|
+
# and it should be active until the program terminates,
|
|
199
|
+
# just leave it as-is.
|
|
200
|
+
is_active.reset(self._is_active_token)
|
|
201
|
+
if self.is_master and self.log_endpoint:
|
|
202
|
+
assert isinstance(self.relay_handler, RelayHandler)
|
|
203
|
+
self.relay_handler.emit(None)
|
|
204
|
+
self.log_worker.join()
|
|
205
|
+
self.relay_handler.close()
|
|
206
|
+
ep_url = yarl.URL(self.log_endpoint)
|
|
207
|
+
if ep_url.scheme.lower() == "ipc" and (ep_sock := Path(ep_url.path)).exists():
|
|
208
|
+
ep_sock.unlink()
|
|
209
|
+
return None
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def setup_console_log_handler(config: Mapping[str, Any]) -> logging.Handler:
|
|
213
|
+
log_formats = {
|
|
214
|
+
"simple": "%(levelname)s %(message)s",
|
|
215
|
+
"verbose": "%(asctime)s %(levelname)s %(name)s [%(process)d] %(message)s",
|
|
216
|
+
}
|
|
217
|
+
drv_config = config["console"]
|
|
218
|
+
console_formatter: logging.Formatter
|
|
219
|
+
colored = drv_config["colored"]
|
|
220
|
+
if colored is None:
|
|
221
|
+
colored = sys.stderr.isatty()
|
|
222
|
+
if colored:
|
|
223
|
+
console_formatter = ColorizedFormatter(
|
|
224
|
+
log_formats[drv_config["format"]],
|
|
225
|
+
datefmt="%Y-%m-%d %H:%M:%S.%f", # coloredlogs has intrinsic support for msec
|
|
226
|
+
field_styles={
|
|
227
|
+
"levelname": {"color": 248, "bold": True},
|
|
228
|
+
"name": {"color": 246, "bold": False},
|
|
229
|
+
"process": {"color": "cyan"},
|
|
230
|
+
"asctime": {"color": 240},
|
|
231
|
+
},
|
|
232
|
+
level_styles={
|
|
233
|
+
"debug": {"color": "green"},
|
|
234
|
+
"verbose": {"color": "green", "bright": True},
|
|
235
|
+
"info": {"color": "cyan", "bright": True},
|
|
236
|
+
"notice": {"color": "cyan", "bold": True},
|
|
237
|
+
"warning": {"color": "yellow"},
|
|
238
|
+
"error": {"color": "red", "bright": True},
|
|
239
|
+
"success": {"color": 77},
|
|
240
|
+
"critical": {"background": "red", "color": 255, "bold": True},
|
|
241
|
+
},
|
|
242
|
+
)
|
|
243
|
+
else:
|
|
244
|
+
console_formatter = ConsoleFormatter(
|
|
245
|
+
log_formats[drv_config["format"]],
|
|
246
|
+
datefmt="%Y-%m-%d %H:%M:%S.%f",
|
|
247
|
+
)
|
|
248
|
+
console_handler = logging.StreamHandler(
|
|
249
|
+
stream=sys.stderr,
|
|
250
|
+
)
|
|
251
|
+
console_handler.setLevel(config["level"])
|
|
252
|
+
console_handler.setFormatter(console_formatter)
|
|
253
|
+
return console_handler
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def setup_file_log_handler(config: Mapping[str, Any]) -> logging.Handler:
|
|
257
|
+
drv_config = config["file"]
|
|
258
|
+
fmt = "%(timestamp) %(level) %(name) %(processName) %(message)"
|
|
259
|
+
file_handler = logging.handlers.RotatingFileHandler(
|
|
260
|
+
filename=drv_config["path"] / drv_config["filename"],
|
|
261
|
+
backupCount=drv_config["backup-count"],
|
|
262
|
+
maxBytes=drv_config["rotation-size"],
|
|
263
|
+
encoding="utf-8",
|
|
264
|
+
)
|
|
265
|
+
file_handler.setLevel(config["level"])
|
|
266
|
+
file_handler.setFormatter(CustomJsonFormatter(fmt))
|
|
267
|
+
return file_handler
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
def log_worker(
|
|
271
|
+
logging_config: Mapping[str, Any],
|
|
272
|
+
parent_pid: int,
|
|
273
|
+
log_endpoint: str,
|
|
274
|
+
ready_event: threading.Event,
|
|
275
|
+
msgpack_options: MsgpackOptions,
|
|
276
|
+
) -> None:
|
|
277
|
+
console_handler = None
|
|
278
|
+
file_handler = None
|
|
279
|
+
logstash_handler = None
|
|
280
|
+
graylog_handler = None
|
|
281
|
+
|
|
282
|
+
# For future references: when implementing new kind of logging adapters,
|
|
283
|
+
# make sure to adapt our custom `Formatter.formatException()` approach;
|
|
284
|
+
# Otherwise it won't print out EXCEPTION level log (along with the traceback).
|
|
285
|
+
if "console" in logging_config["drivers"]:
|
|
286
|
+
console_handler = setup_console_log_handler(logging_config)
|
|
287
|
+
|
|
288
|
+
if "file" in logging_config["drivers"]:
|
|
289
|
+
file_handler = setup_file_log_handler(logging_config)
|
|
290
|
+
|
|
291
|
+
if "logstash" in logging_config["drivers"]:
|
|
292
|
+
from .handler.logstash import LogstashHandler
|
|
293
|
+
|
|
294
|
+
drv_config = logging_config["logstash"]
|
|
295
|
+
logstash_handler = LogstashHandler(
|
|
296
|
+
endpoint=drv_config["endpoint"],
|
|
297
|
+
protocol=drv_config["protocol"],
|
|
298
|
+
ssl_enabled=drv_config["ssl-enabled"],
|
|
299
|
+
ssl_verify=drv_config["ssl-verify"],
|
|
300
|
+
myhost="hostname", # TODO: implement
|
|
301
|
+
)
|
|
302
|
+
logstash_handler.setLevel(logging_config["level"])
|
|
303
|
+
logstash_handler.setFormatter(SerializedExceptionFormatter())
|
|
304
|
+
|
|
305
|
+
if "graylog" in logging_config["drivers"]:
|
|
306
|
+
from .handler.graylog import setup_graylog_handler
|
|
307
|
+
|
|
308
|
+
graylog_handler = setup_graylog_handler(logging_config)
|
|
309
|
+
assert graylog_handler is not None
|
|
310
|
+
graylog_handler.setFormatter(SerializedExceptionFormatter())
|
|
311
|
+
|
|
312
|
+
zctx = zmq.Context()
|
|
313
|
+
agg_sock = zctx.socket(zmq.PULL)
|
|
314
|
+
agg_sock.bind(log_endpoint)
|
|
315
|
+
ep_url = yarl.URL(log_endpoint)
|
|
316
|
+
if ep_url.scheme.lower() == "ipc":
|
|
317
|
+
os.chmod(ep_url.path, 0o777)
|
|
318
|
+
try:
|
|
319
|
+
ready_event.set()
|
|
320
|
+
while True:
|
|
321
|
+
data = agg_sock.recv()
|
|
322
|
+
if not data:
|
|
323
|
+
return
|
|
324
|
+
unpacked_data = msgpack.unpackb(data, **msgpack_options["unpack_opts"])
|
|
325
|
+
if not unpacked_data:
|
|
326
|
+
break
|
|
327
|
+
rec = logging.makeLogRecord(unpacked_data)
|
|
328
|
+
if rec is None:
|
|
329
|
+
break
|
|
330
|
+
if console_handler:
|
|
331
|
+
console_handler.emit(rec)
|
|
332
|
+
try:
|
|
333
|
+
if file_handler:
|
|
334
|
+
file_handler.emit(rec)
|
|
335
|
+
if logstash_handler:
|
|
336
|
+
logstash_handler.emit(rec)
|
|
337
|
+
if graylog_handler:
|
|
338
|
+
graylog_handler.emit(rec)
|
|
339
|
+
except OSError:
|
|
340
|
+
# don't terminate the log worker.
|
|
341
|
+
continue
|
|
342
|
+
finally:
|
|
343
|
+
if logstash_handler:
|
|
344
|
+
logstash_handler.cleanup()
|
|
345
|
+
if graylog_handler:
|
|
346
|
+
graylog_handler.close()
|
|
347
|
+
agg_sock.close()
|
|
348
|
+
zctx.term()
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
placeholder
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import enum
|
|
4
|
+
from decimal import Decimal
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, Generic, Self, TypeVar, override
|
|
7
|
+
|
|
8
|
+
import trafaret as t
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class CIStrEnum(enum.StrEnum):
|
|
12
|
+
"""
|
|
13
|
+
An StrEnum variant to allow case-insenstive matching of the members while the values are
|
|
14
|
+
lowercased.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
@override
|
|
18
|
+
@classmethod
|
|
19
|
+
def _missing_(cls, value: Any) -> Self | None:
|
|
20
|
+
assert isinstance(value, str) # since this is an StrEnum
|
|
21
|
+
value = value.lower()
|
|
22
|
+
# To prevent infinite recursion, we don't rely on "cls(value)" but manually search the
|
|
23
|
+
# members as the official stdlib example suggests.
|
|
24
|
+
for member in cls:
|
|
25
|
+
if member.value == value:
|
|
26
|
+
return member
|
|
27
|
+
return None
|
|
28
|
+
|
|
29
|
+
# The defualt behavior of `enum.auto()` is to set the value to the lowercased member name.
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def as_trafaret(cls) -> t.Trafaret:
|
|
33
|
+
return CIStrEnumTrafaret(cls)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class CIUpperStrEnum(CIStrEnum):
|
|
37
|
+
"""
|
|
38
|
+
An StrEnum variant to allow case-insenstive matching of the members while the values are
|
|
39
|
+
UPPERCASED.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
@override
|
|
43
|
+
@classmethod
|
|
44
|
+
def _missing_(cls, value: Any) -> Self | None:
|
|
45
|
+
assert isinstance(value, str) # since this is an StrEnum
|
|
46
|
+
value = value.upper()
|
|
47
|
+
for member in cls:
|
|
48
|
+
if member.value == value:
|
|
49
|
+
return member
|
|
50
|
+
return None
|
|
51
|
+
|
|
52
|
+
@override
|
|
53
|
+
@staticmethod
|
|
54
|
+
def _generate_next_value_(name, start, count, last_values) -> str:
|
|
55
|
+
return name.upper()
|
|
56
|
+
|
|
57
|
+
@classmethod
|
|
58
|
+
def as_trafaret(cls) -> t.Trafaret:
|
|
59
|
+
return CIUpperStrEnumTrafaret(cls)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
T_enum = TypeVar("T_enum", bound=enum.Enum)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class CIStrEnumTrafaret(t.Trafaret, Generic[T_enum]):
|
|
66
|
+
"""
|
|
67
|
+
A case-insensitive version of trafaret to parse StrEnum values.
|
|
68
|
+
"""
|
|
69
|
+
|
|
70
|
+
def __init__(self, enum_cls: type[T_enum]) -> None:
|
|
71
|
+
self.enum_cls = enum_cls
|
|
72
|
+
|
|
73
|
+
def check_and_return(self, value: str) -> T_enum:
|
|
74
|
+
try:
|
|
75
|
+
# Assume that the enum values are lowercases.
|
|
76
|
+
return self.enum_cls(value.lower())
|
|
77
|
+
except (KeyError, ValueError):
|
|
78
|
+
self._failure(f"value is not a valid member of {self.enum_cls.__name__}", value=value)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class CIUpperStrEnumTrafaret(t.Trafaret, Generic[T_enum]):
|
|
82
|
+
"""
|
|
83
|
+
A case-insensitive version of trafaret to parse StrEnum values.
|
|
84
|
+
"""
|
|
85
|
+
|
|
86
|
+
def __init__(self, enum_cls: type[T_enum]) -> None:
|
|
87
|
+
self.enum_cls = enum_cls
|
|
88
|
+
|
|
89
|
+
def check_and_return(self, value: str) -> T_enum:
|
|
90
|
+
try:
|
|
91
|
+
# Assume that the enum values are lowercases.
|
|
92
|
+
return self.enum_cls(value.upper())
|
|
93
|
+
except (KeyError, ValueError):
|
|
94
|
+
self._failure(f"value is not a valid member of {self.enum_cls.__name__}", value=value)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class LogLevel(CIUpperStrEnum):
|
|
98
|
+
# The logging stdlib only accepts uppercased loglevel names,
|
|
99
|
+
# so we subclass `CIUpperStrEnum` here.
|
|
100
|
+
CRITICAL = enum.auto()
|
|
101
|
+
ERROR = enum.auto()
|
|
102
|
+
WARNING = enum.auto()
|
|
103
|
+
INFO = enum.auto()
|
|
104
|
+
DEBUG = enum.auto()
|
|
105
|
+
NOTSET = enum.auto()
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class LogFormat(CIStrEnum):
|
|
109
|
+
SIMPLE = enum.auto()
|
|
110
|
+
VERBOSE = enum.auto()
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
class SimpleBinarySizeTrafaret(t.Trafaret):
|
|
114
|
+
suffix_map = {
|
|
115
|
+
"y": 2**80, # yotta
|
|
116
|
+
"z": 2**70, # zetta
|
|
117
|
+
"e": 2**60, # exa
|
|
118
|
+
"p": 2**50, # peta
|
|
119
|
+
"t": 2**40, # tera
|
|
120
|
+
"g": 2**30, # giga
|
|
121
|
+
"m": 2**20, # mega
|
|
122
|
+
"k": 2**10, # kilo
|
|
123
|
+
" ": 1,
|
|
124
|
+
}
|
|
125
|
+
endings = ("ibytes", "ibyte", "ib", "bytes", "byte", "b")
|
|
126
|
+
|
|
127
|
+
def check_and_return(self, value: str) -> int:
|
|
128
|
+
orig_value = value
|
|
129
|
+
value = value.strip().replace("_", "")
|
|
130
|
+
try:
|
|
131
|
+
return int(value)
|
|
132
|
+
except ValueError:
|
|
133
|
+
value = value.lower()
|
|
134
|
+
dec_expr: Decimal
|
|
135
|
+
try:
|
|
136
|
+
for ending in self.endings:
|
|
137
|
+
if (stem := value.removesuffix(ending)) != value:
|
|
138
|
+
suffix = stem[-1]
|
|
139
|
+
dec_expr = Decimal(stem[:-1])
|
|
140
|
+
break
|
|
141
|
+
else:
|
|
142
|
+
# when there is suffix without scale (e.g., "2K")
|
|
143
|
+
if not str.isnumeric(value[-1]):
|
|
144
|
+
suffix = value[-1]
|
|
145
|
+
dec_expr = Decimal(value[:-1])
|
|
146
|
+
else:
|
|
147
|
+
# has no suffix and is not an integer
|
|
148
|
+
# -> fractional bytes (e.g., 1.5 byte)
|
|
149
|
+
raise ValueError("Fractional bytes are not allowed")
|
|
150
|
+
except ArithmeticError:
|
|
151
|
+
raise ValueError("Unconvertible value", orig_value)
|
|
152
|
+
try:
|
|
153
|
+
multiplier = self.suffix_map[suffix]
|
|
154
|
+
except KeyError:
|
|
155
|
+
raise ValueError("Unconvertible value", orig_value)
|
|
156
|
+
return int(dec_expr * multiplier)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
class DirPathTrafaret(t.Trafaret):
|
|
160
|
+
def __init__(
|
|
161
|
+
self,
|
|
162
|
+
*,
|
|
163
|
+
auto_create: bool = False,
|
|
164
|
+
) -> None:
|
|
165
|
+
super().__init__()
|
|
166
|
+
self._auto_create = auto_create
|
|
167
|
+
|
|
168
|
+
def check_and_return(self, value: Any) -> Path:
|
|
169
|
+
try:
|
|
170
|
+
p = Path(value).resolve()
|
|
171
|
+
except (TypeError, ValueError):
|
|
172
|
+
self._failure("cannot parse value as a path", value=value)
|
|
173
|
+
else:
|
|
174
|
+
if self._auto_create:
|
|
175
|
+
p.mkdir(parents=True, exist_ok=True)
|
|
176
|
+
if not p.is_dir():
|
|
177
|
+
self._failure("value is not a directory", value=value)
|
|
178
|
+
return p
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from collections.abc import Iterable
|
|
5
|
+
from typing import Any, LiteralString
|
|
6
|
+
|
|
7
|
+
__all__ = (
|
|
8
|
+
"BraceMessage",
|
|
9
|
+
"BraceStyleAdapter",
|
|
10
|
+
"enforce_debug_logging",
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class BraceMessage:
|
|
15
|
+
__slots__ = ("fmt", "args")
|
|
16
|
+
|
|
17
|
+
def __init__(self, fmt: LiteralString, args: tuple[Any, ...]):
|
|
18
|
+
self.fmt = fmt
|
|
19
|
+
self.args = args
|
|
20
|
+
|
|
21
|
+
def __str__(self):
|
|
22
|
+
return self.fmt.format(*self.args)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class BraceStyleAdapter(logging.LoggerAdapter):
|
|
26
|
+
def __init__(self, logger, extra=None):
|
|
27
|
+
super().__init__(logger, extra)
|
|
28
|
+
|
|
29
|
+
def log(self, level, msg, *args, **kwargs):
|
|
30
|
+
if self.isEnabledFor(level):
|
|
31
|
+
msg, kwargs = self.process(msg, kwargs)
|
|
32
|
+
kwargs["stacklevel"] = kwargs.get("stacklevel", 1) + 1
|
|
33
|
+
self.logger._log(level, BraceMessage(msg, args), (), **kwargs)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def enforce_debug_logging(loggers: Iterable[str]) -> None:
|
|
37
|
+
# Backend.AI's daemon logging:
|
|
38
|
+
# - All handlers are added to the root logger only.
|
|
39
|
+
# -> Need to override the log level of the root logger's handlers.
|
|
40
|
+
# - Each logger has separate logging level.
|
|
41
|
+
# -> Need to override the log level of the individual loggers.
|
|
42
|
+
root_logger = logging.getLogger()
|
|
43
|
+
for handler in root_logger.handlers:
|
|
44
|
+
handler.setLevel(logging.DEBUG)
|
|
45
|
+
for name in loggers:
|
|
46
|
+
instance = logging.getLogger(name)
|
|
47
|
+
instance.setLevel(logging.DEBUG)
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: backend.ai-logging
|
|
3
|
+
Version: 24.9.0
|
|
4
|
+
Summary: Backend.AI Logging Subsystem
|
|
5
|
+
Home-page: https://github.com/lablup/backend.ai
|
|
6
|
+
Author: Lablup Inc. and contributors
|
|
7
|
+
License: MIT
|
|
8
|
+
Project-URL: Documentation, https://docs.backend.ai/
|
|
9
|
+
Project-URL: Source, https://github.com/lablup/backend.ai
|
|
10
|
+
Classifier: Intended Audience :: Developers
|
|
11
|
+
Classifier: Operating System :: MacOS :: MacOS X
|
|
12
|
+
Classifier: Operating System :: POSIX :: Linux
|
|
13
|
+
Classifier: Programming Language :: Python
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Classifier: Environment :: No Input/Output (Daemon)
|
|
16
|
+
Classifier: Topic :: Scientific/Engineering
|
|
17
|
+
Classifier: Topic :: Software Development
|
|
18
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
20
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
21
|
+
Requires-Python: >=3.12,<3.13
|
|
22
|
+
Description-Content-Type: text/markdown
|
|
23
|
+
Requires-Dist: coloredlogs~=15.0
|
|
24
|
+
Requires-Dist: graypy==2.1.0
|
|
25
|
+
Requires-Dist: msgpack~=1.1.0
|
|
26
|
+
Requires-Dist: python-json-logger>=2.0.1
|
|
27
|
+
Requires-Dist: pyzmq~=26.2
|
|
28
|
+
Requires-Dist: trafaret~=2.1
|
|
29
|
+
Requires-Dist: yarl~=1.13.1
|
|
30
|
+
|
|
31
|
+
Backend.AI Logging Subsystem
|
|
32
|
+
============================
|
|
33
|
+
|
|
34
|
+
Package Structure
|
|
35
|
+
-----------------
|
|
36
|
+
|
|
37
|
+
* `ai.backend.logging`
|
|
38
|
+
- `abc`: Abstract base classes
|
|
39
|
+
- `logger`: The core logging facility
|
|
40
|
+
- `Logger`: The standard multiprocess-friendly logger using `RelayHandler` based on ZeroMQ
|
|
41
|
+
- `LocalLogger`: A minimalized console/file logger that does not require serialization via networks at all
|
|
42
|
+
- `handler`: Collection of vendor-specific handler implementations
|
|
43
|
+
- `formatter`: Collection of formatters
|
|
44
|
+
- `types`: Definition of enums/types like `LogLevel`
|
|
45
|
+
- `utils`: Brace-style message formatting adapters and other extras
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
MANIFEST.in
|
|
2
|
+
backend_shim.py
|
|
3
|
+
setup.py
|
|
4
|
+
ai/backend/logging/VERSION
|
|
5
|
+
ai/backend/logging/__init__.py
|
|
6
|
+
ai/backend/logging/abc.py
|
|
7
|
+
ai/backend/logging/config.py
|
|
8
|
+
ai/backend/logging/exceptions.py
|
|
9
|
+
ai/backend/logging/formatter.py
|
|
10
|
+
ai/backend/logging/logger.py
|
|
11
|
+
ai/backend/logging/py.typed
|
|
12
|
+
ai/backend/logging/types.py
|
|
13
|
+
ai/backend/logging/utils.py
|
|
14
|
+
ai/backend/logging/handler/__init__.py
|
|
15
|
+
ai/backend/logging/handler/graylog.py
|
|
16
|
+
ai/backend/logging/handler/intrinsic.py
|
|
17
|
+
ai/backend/logging/handler/logstash.py
|
|
18
|
+
backend.ai_logging.egg-info/PKG-INFO
|
|
19
|
+
backend.ai_logging.egg-info/SOURCES.txt
|
|
20
|
+
backend.ai_logging.egg-info/dependency_links.txt
|
|
21
|
+
backend.ai_logging.egg-info/namespace_packages.txt
|
|
22
|
+
backend.ai_logging.egg-info/not-zip-safe
|
|
23
|
+
backend.ai_logging.egg-info/requires.txt
|
|
24
|
+
backend.ai_logging.egg-info/top_level.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ai
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
|
|
2
|
+
# DO NOT EDIT THIS FILE -- AUTOGENERATED BY PANTS
|
|
3
|
+
|
|
4
|
+
import errno
|
|
5
|
+
import os
|
|
6
|
+
import setuptools.build_meta
|
|
7
|
+
|
|
8
|
+
backend = setuptools.build_meta.__legacy__
|
|
9
|
+
|
|
10
|
+
dist_dir = "dist/"
|
|
11
|
+
build_wheel = True
|
|
12
|
+
build_sdist = True
|
|
13
|
+
wheel_config_settings = {
|
|
14
|
+
}
|
|
15
|
+
sdist_config_settings = {
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
# Python 2.7 doesn't have the exist_ok arg on os.makedirs().
|
|
19
|
+
try:
|
|
20
|
+
os.makedirs(dist_dir)
|
|
21
|
+
except OSError as e:
|
|
22
|
+
if e.errno != errno.EEXIST:
|
|
23
|
+
raise
|
|
24
|
+
|
|
25
|
+
wheel_path = backend.build_wheel(dist_dir, wheel_config_settings) if build_wheel else None
|
|
26
|
+
sdist_path = backend.build_sdist(dist_dir, sdist_config_settings) if build_sdist else None
|
|
27
|
+
|
|
28
|
+
if wheel_path:
|
|
29
|
+
print("wheel: {wheel_path}".format(wheel_path=wheel_path))
|
|
30
|
+
if sdist_path:
|
|
31
|
+
print("sdist: {sdist_path}".format(sdist_path=sdist_path))
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
|
|
2
|
+
# DO NOT EDIT THIS FILE -- AUTOGENERATED BY PANTS
|
|
3
|
+
# Target: src/ai/backend/logging:dist
|
|
4
|
+
|
|
5
|
+
from setuptools import setup
|
|
6
|
+
|
|
7
|
+
setup(**{
|
|
8
|
+
'author': 'Lablup Inc. and contributors',
|
|
9
|
+
'classifiers': [
|
|
10
|
+
'Intended Audience :: Developers',
|
|
11
|
+
'Operating System :: MacOS :: MacOS X',
|
|
12
|
+
'Operating System :: POSIX :: Linux',
|
|
13
|
+
'Programming Language :: Python',
|
|
14
|
+
'Programming Language :: Python :: 3',
|
|
15
|
+
'Environment :: No Input/Output (Daemon)',
|
|
16
|
+
'Topic :: Scientific/Engineering',
|
|
17
|
+
'Topic :: Software Development',
|
|
18
|
+
'Development Status :: 5 - Production/Stable',
|
|
19
|
+
'Programming Language :: Python :: 3.12',
|
|
20
|
+
'License :: OSI Approved :: MIT License',
|
|
21
|
+
],
|
|
22
|
+
'description': 'Backend.AI Logging Subsystem',
|
|
23
|
+
'install_requires': (
|
|
24
|
+
'coloredlogs~=15.0',
|
|
25
|
+
'graypy==2.1.0',
|
|
26
|
+
'msgpack~=1.1.0',
|
|
27
|
+
'python-json-logger>=2.0.1',
|
|
28
|
+
'pyzmq~=26.2',
|
|
29
|
+
'trafaret~=2.1',
|
|
30
|
+
'yarl~=1.13.1',
|
|
31
|
+
),
|
|
32
|
+
'license': 'MIT',
|
|
33
|
+
'long_description': """Backend.AI Logging Subsystem
|
|
34
|
+
============================
|
|
35
|
+
|
|
36
|
+
Package Structure
|
|
37
|
+
-----------------
|
|
38
|
+
|
|
39
|
+
* `ai.backend.logging`
|
|
40
|
+
- `abc`: Abstract base classes
|
|
41
|
+
- `logger`: The core logging facility
|
|
42
|
+
- `Logger`: The standard multiprocess-friendly logger using `RelayHandler` based on ZeroMQ
|
|
43
|
+
- `LocalLogger`: A minimalized console/file logger that does not require serialization via networks at all
|
|
44
|
+
- `handler`: Collection of vendor-specific handler implementations
|
|
45
|
+
- `formatter`: Collection of formatters
|
|
46
|
+
- `types`: Definition of enums/types like `LogLevel`
|
|
47
|
+
- `utils`: Brace-style message formatting adapters and other extras
|
|
48
|
+
""",
|
|
49
|
+
'long_description_content_type': 'text/markdown',
|
|
50
|
+
'name': 'backend.ai-logging',
|
|
51
|
+
'namespace_packages': (
|
|
52
|
+
),
|
|
53
|
+
'package_data': {
|
|
54
|
+
'ai.backend.logging': (
|
|
55
|
+
'VERSION',
|
|
56
|
+
'py.typed',
|
|
57
|
+
),
|
|
58
|
+
},
|
|
59
|
+
'packages': (
|
|
60
|
+
'ai.backend.logging',
|
|
61
|
+
'ai.backend.logging.handler',
|
|
62
|
+
),
|
|
63
|
+
'project_urls': {
|
|
64
|
+
'Documentation': 'https://docs.backend.ai/',
|
|
65
|
+
'Source': 'https://github.com/lablup/backend.ai',
|
|
66
|
+
},
|
|
67
|
+
'python_requires': '>=3.12,<3.13',
|
|
68
|
+
'url': 'https://github.com/lablup/backend.ai',
|
|
69
|
+
'version': """24.09.0
|
|
70
|
+
""",
|
|
71
|
+
'zip_safe': False,
|
|
72
|
+
})
|