logxpy 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- logxpy/__init__.py +126 -0
- logxpy/_action.py +958 -0
- logxpy/_async.py +186 -0
- logxpy/_base.py +80 -0
- logxpy/_compat.py +71 -0
- logxpy/_config.py +45 -0
- logxpy/_dest.py +88 -0
- logxpy/_errors.py +58 -0
- logxpy/_fmt.py +68 -0
- logxpy/_generators.py +136 -0
- logxpy/_mask.py +23 -0
- logxpy/_message.py +195 -0
- logxpy/_output.py +517 -0
- logxpy/_pool.py +93 -0
- logxpy/_traceback.py +126 -0
- logxpy/_types.py +71 -0
- logxpy/_util.py +56 -0
- logxpy/_validation.py +486 -0
- logxpy/_version.py +21 -0
- logxpy/cli.py +61 -0
- logxpy/dask.py +172 -0
- logxpy/decorators.py +268 -0
- logxpy/filter.py +124 -0
- logxpy/journald.py +88 -0
- logxpy/json.py +149 -0
- logxpy/loggerx.py +253 -0
- logxpy/logwriter.py +84 -0
- logxpy/parse.py +191 -0
- logxpy/prettyprint.py +173 -0
- logxpy/serializers.py +36 -0
- logxpy/stdlib.py +23 -0
- logxpy/tai64n.py +45 -0
- logxpy/testing.py +472 -0
- logxpy/tests/__init__.py +9 -0
- logxpy/tests/common.py +36 -0
- logxpy/tests/strategies.py +231 -0
- logxpy/tests/test_action.py +1751 -0
- logxpy/tests/test_api.py +86 -0
- logxpy/tests/test_async.py +67 -0
- logxpy/tests/test_compat.py +13 -0
- logxpy/tests/test_config.py +21 -0
- logxpy/tests/test_coroutines.py +105 -0
- logxpy/tests/test_dask.py +211 -0
- logxpy/tests/test_decorators.py +54 -0
- logxpy/tests/test_filter.py +122 -0
- logxpy/tests/test_fmt.py +42 -0
- logxpy/tests/test_generators.py +292 -0
- logxpy/tests/test_journald.py +246 -0
- logxpy/tests/test_json.py +208 -0
- logxpy/tests/test_loggerx.py +44 -0
- logxpy/tests/test_logwriter.py +262 -0
- logxpy/tests/test_message.py +334 -0
- logxpy/tests/test_output.py +921 -0
- logxpy/tests/test_parse.py +309 -0
- logxpy/tests/test_pool.py +55 -0
- logxpy/tests/test_prettyprint.py +303 -0
- logxpy/tests/test_pyinstaller.py +35 -0
- logxpy/tests/test_serializers.py +36 -0
- logxpy/tests/test_stdlib.py +73 -0
- logxpy/tests/test_tai64n.py +66 -0
- logxpy/tests/test_testing.py +1051 -0
- logxpy/tests/test_traceback.py +251 -0
- logxpy/tests/test_twisted.py +814 -0
- logxpy/tests/test_util.py +45 -0
- logxpy/tests/test_validation.py +989 -0
- logxpy/twisted.py +265 -0
- logxpy-0.1.0.dist-info/METADATA +100 -0
- logxpy-0.1.0.dist-info/RECORD +72 -0
- logxpy-0.1.0.dist-info/WHEEL +5 -0
- logxpy-0.1.0.dist-info/entry_points.txt +2 -0
- logxpy-0.1.0.dist-info/licenses/LICENSE +201 -0
- logxpy-0.1.0.dist-info/top_level.txt +1 -0
logxpy/_async.py
ADDED
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
"""Async action and scope - integrates with eliot's context system."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import AsyncIterator, Callable, Iterator
|
|
6
|
+
from contextlib import asynccontextmanager, contextmanager
|
|
7
|
+
from contextvars import ContextVar, Token
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
# Reuse eliot's context var for compatibility
|
|
11
|
+
from ._action import _ACTION_CONTEXT, current_action
|
|
12
|
+
from ._base import now, uuid
|
|
13
|
+
from ._types import Level, Record
|
|
14
|
+
|
|
15
|
+
# Additional scope context (LoggerX feature - logxpy doesn't have this)
|
|
16
|
+
_SCOPE: ContextVar[dict[str, Any]] = ContextVar("loggerx_scope", default={})
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def current_scope() -> dict[str, Any]:
|
|
20
|
+
return _SCOPE.get()
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _get_parent_info() -> tuple[str, tuple[int, ...]]:
|
|
24
|
+
"""Get task_uuid and next level from current logxpy action."""
|
|
25
|
+
parent = current_action()
|
|
26
|
+
if parent:
|
|
27
|
+
# eliot.Action has task_uuid property and _task_level (TaskLevel object)
|
|
28
|
+
task_uuid = parent.task_uuid
|
|
29
|
+
# Get current level and create child level
|
|
30
|
+
if hasattr(parent, "_task_level"):
|
|
31
|
+
level = tuple(parent._task_level.as_list()) + (1,)
|
|
32
|
+
else:
|
|
33
|
+
level = (1,)
|
|
34
|
+
return task_uuid, level
|
|
35
|
+
return uuid(), (1,)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
# === AsyncAction (works alongside eliot.Action for async contexts) ===
|
|
39
|
+
class AsyncAction:
|
|
40
|
+
"""Async-native action that integrates with eliot's context system."""
|
|
41
|
+
|
|
42
|
+
__slots__ = (
|
|
43
|
+
"_child_count",
|
|
44
|
+
"_start",
|
|
45
|
+
"_token",
|
|
46
|
+
"action_type",
|
|
47
|
+
"fields",
|
|
48
|
+
"level",
|
|
49
|
+
"task_level",
|
|
50
|
+
"task_uuid",
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
def __init__(
|
|
54
|
+
self,
|
|
55
|
+
action_type: str,
|
|
56
|
+
task_uuid: str,
|
|
57
|
+
task_level: tuple[int, ...],
|
|
58
|
+
level: Level = Level.INFO,
|
|
59
|
+
**fields: Any,
|
|
60
|
+
):
|
|
61
|
+
self.task_uuid = task_uuid
|
|
62
|
+
self.task_level = task_level
|
|
63
|
+
self.action_type = action_type
|
|
64
|
+
self.fields = fields
|
|
65
|
+
self.level = level
|
|
66
|
+
self._start = now()
|
|
67
|
+
self._token: Token[Any] | None = None
|
|
68
|
+
self._child_count = 0
|
|
69
|
+
|
|
70
|
+
def child_level(self) -> tuple[int, ...]:
|
|
71
|
+
self._child_count += 1
|
|
72
|
+
return (*self.task_level, self._child_count)
|
|
73
|
+
|
|
74
|
+
def _enter(self) -> AsyncAction:
|
|
75
|
+
self._token = _ACTION_CONTEXT.set(self)
|
|
76
|
+
_emit(self._start_record())
|
|
77
|
+
return self
|
|
78
|
+
|
|
79
|
+
def _exit(self, exc: BaseException | None) -> None:
|
|
80
|
+
if self._token:
|
|
81
|
+
_ACTION_CONTEXT.reset(self._token)
|
|
82
|
+
_emit(self._end_record(exc))
|
|
83
|
+
|
|
84
|
+
def _start_record(self) -> Record:
|
|
85
|
+
return Record(
|
|
86
|
+
timestamp=self._start,
|
|
87
|
+
level=self.level,
|
|
88
|
+
message="",
|
|
89
|
+
fields=self.fields,
|
|
90
|
+
context=current_scope(),
|
|
91
|
+
task_uuid=self.task_uuid,
|
|
92
|
+
task_level=self.task_level,
|
|
93
|
+
action_type=self.action_type,
|
|
94
|
+
action_status="started",
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
def _end_record(self, exc: BaseException | None) -> Record:
|
|
98
|
+
status = "failed" if exc else "succeeded"
|
|
99
|
+
fields = {**self.fields, "eliot:duration": round(now() - self._start, 6)}
|
|
100
|
+
if exc:
|
|
101
|
+
fields["exception"] = f"{type(exc).__module__}.{type(exc).__name__}"
|
|
102
|
+
fields["reason"] = str(exc)
|
|
103
|
+
return Record(
|
|
104
|
+
timestamp=now(),
|
|
105
|
+
level=self.level,
|
|
106
|
+
message="",
|
|
107
|
+
fields=fields,
|
|
108
|
+
context=current_scope(),
|
|
109
|
+
task_uuid=self.task_uuid,
|
|
110
|
+
task_level=(*self.task_level, self._child_count + 1),
|
|
111
|
+
action_type=self.action_type,
|
|
112
|
+
action_status=status,
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
# Dual context manager support (sync + async)
|
|
116
|
+
def __enter__(self) -> AsyncAction:
|
|
117
|
+
return self._enter()
|
|
118
|
+
|
|
119
|
+
def __exit__(self, *exc) -> None:
|
|
120
|
+
self._exit(exc[1])
|
|
121
|
+
|
|
122
|
+
async def __aenter__(self) -> AsyncAction:
|
|
123
|
+
return self._enter()
|
|
124
|
+
|
|
125
|
+
async def __aexit__(self, *exc) -> None:
|
|
126
|
+
self._exit(exc[1])
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
@contextmanager
|
|
130
|
+
def action(action_type: str, level: str | Level = Level.INFO, **fields: Any) -> Iterator[AsyncAction]:
|
|
131
|
+
"""Create action context (sync) - compatible with eliot's nesting."""
|
|
132
|
+
if isinstance(level, str):
|
|
133
|
+
level = Level[level.upper()]
|
|
134
|
+
task_uuid, t_level = _get_parent_info()
|
|
135
|
+
act = AsyncAction(action_type, task_uuid, t_level, level=level, **fields)
|
|
136
|
+
with act:
|
|
137
|
+
yield act
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
@asynccontextmanager
|
|
141
|
+
async def aaction(action_type: str, level: str | Level = Level.INFO, **fields: Any) -> AsyncIterator[AsyncAction]:
|
|
142
|
+
"""Create action context (async)."""
|
|
143
|
+
if isinstance(level, str):
|
|
144
|
+
level = Level[level.upper()]
|
|
145
|
+
task_uuid, t_level = _get_parent_info()
|
|
146
|
+
act = AsyncAction(action_type, task_uuid, t_level, level=level, **fields)
|
|
147
|
+
async with act:
|
|
148
|
+
yield act
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
# === Scope (LoggerX feature - not in eliot) ===
|
|
152
|
+
@contextmanager
|
|
153
|
+
def scope(**ctx: Any) -> Iterator[dict[str, Any]]:
|
|
154
|
+
"""Create nested scope context for field inheritance."""
|
|
155
|
+
current = _SCOPE.get()
|
|
156
|
+
merged = {**current, **ctx}
|
|
157
|
+
token = _SCOPE.set(merged)
|
|
158
|
+
try:
|
|
159
|
+
yield merged
|
|
160
|
+
finally:
|
|
161
|
+
_SCOPE.reset(token)
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
# === Emit (connects to eliot's destination system) ===
|
|
165
|
+
_emit_handlers: list[Callable[[Record], None]] = []
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def _emit(record: Record) -> None:
|
|
169
|
+
"""Emit to eliot's destinations + any registered handlers."""
|
|
170
|
+
from ._output import Logger
|
|
171
|
+
from .loggerx import get_global_masker
|
|
172
|
+
|
|
173
|
+
data = record.to_dict()
|
|
174
|
+
|
|
175
|
+
# Apply masking if configured
|
|
176
|
+
masker = get_global_masker()
|
|
177
|
+
if masker:
|
|
178
|
+
data = masker.mask(data)
|
|
179
|
+
|
|
180
|
+
Logger._destinations.send(data) # Uses eliot's system
|
|
181
|
+
for fn in _emit_handlers:
|
|
182
|
+
fn(record)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def register_emitter(fn: Callable[[Record], None]) -> None:
|
|
186
|
+
_emit_handlers.append(fn)
|
logxpy/_base.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"""Shared utilities to reduce code duplication."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import inspect
|
|
7
|
+
import sys
|
|
8
|
+
import time
|
|
9
|
+
from collections.abc import Callable
|
|
10
|
+
from functools import wraps
|
|
11
|
+
from typing import Any, ParamSpec, TypeVar
|
|
12
|
+
from uuid import uuid4
|
|
13
|
+
|
|
14
|
+
P = ParamSpec("P")
|
|
15
|
+
T = TypeVar("T")
|
|
16
|
+
|
|
17
|
+
# === Time utilities ===
|
|
18
|
+
now = time.time
|
|
19
|
+
monotonic = time.monotonic
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def uuid() -> str:
|
|
23
|
+
return str(uuid4())
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# === Async/sync wrapper factory ===
|
|
27
|
+
def dual_wrapper(
|
|
28
|
+
async_impl: Callable[..., Any],
|
|
29
|
+
sync_impl: Callable[..., Any] | None = None,
|
|
30
|
+
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
|
31
|
+
"""Create wrapper that works for both async and sync functions."""
|
|
32
|
+
|
|
33
|
+
def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
|
|
34
|
+
is_async = inspect.iscoroutinefunction(func)
|
|
35
|
+
|
|
36
|
+
@wraps(func)
|
|
37
|
+
async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
38
|
+
return await async_impl(func, *args, **kwargs)
|
|
39
|
+
|
|
40
|
+
@wraps(func)
|
|
41
|
+
def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
42
|
+
impl = sync_impl or (lambda f, *a, **k: asyncio.run(async_impl(f, *a, **k))) # type: ignore
|
|
43
|
+
return impl(func, *args, **kwargs)
|
|
44
|
+
|
|
45
|
+
return async_wrapper if is_async else sync_wrapper
|
|
46
|
+
|
|
47
|
+
return decorator
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
# === Value truncation ===
|
|
51
|
+
def truncate(obj: Any, max_depth: int = 3, max_len: int = 500) -> Any:
|
|
52
|
+
"""Truncate nested objects for logging."""
|
|
53
|
+
if max_depth <= 0:
|
|
54
|
+
return f"<{type(obj).__name__}>"
|
|
55
|
+
match obj:
|
|
56
|
+
case None | bool() | int() | float():
|
|
57
|
+
return obj
|
|
58
|
+
case str() if len(obj) > max_len:
|
|
59
|
+
return obj[:max_len] + "..."
|
|
60
|
+
case str():
|
|
61
|
+
return obj
|
|
62
|
+
case bytes():
|
|
63
|
+
return f"<bytes:{len(obj)}>"
|
|
64
|
+
case dict():
|
|
65
|
+
return {str(k)[:50]: truncate(v, max_depth - 1, max_len) for k, v in list(obj.items())[:50]}
|
|
66
|
+
case list() | tuple():
|
|
67
|
+
items = [truncate(x, max_depth - 1, max_len) for x in obj[:100]]
|
|
68
|
+
return items + [f"...+{len(obj) - 100}"] if len(obj) > 100 else items
|
|
69
|
+
case _ if hasattr(obj, "__dataclass_fields__"):
|
|
70
|
+
return {
|
|
71
|
+
"_type": type(obj).__name__,
|
|
72
|
+
**{f: truncate(getattr(obj, f), max_depth - 1, max_len) for f in obj.__dataclass_fields__},
|
|
73
|
+
}
|
|
74
|
+
case _:
|
|
75
|
+
return {"_type": type(obj).__name__, "_repr": repr(obj)[:max_len]}
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
# === Module lazy import ===
|
|
79
|
+
def get_module(name: str) -> Any | None:
|
|
80
|
+
return sys.modules.get(name)
|
logxpy/_compat.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"""Eliot backward compatibility - re-export ALL logxpy APIs with deprecation notices."""
|
|
2
|
+
|
|
3
|
+
from warnings import warn
|
|
4
|
+
|
|
5
|
+
from ._action import (
|
|
6
|
+
log_call as _log_call,
|
|
7
|
+
)
|
|
8
|
+
from ._action import (
|
|
9
|
+
log_message as _log_message,
|
|
10
|
+
)
|
|
11
|
+
from ._action import (
|
|
12
|
+
start_action,
|
|
13
|
+
startTask,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
# === Re-export ALL logxpy public APIs (from eliot/__init__.py __all__) ===
|
|
17
|
+
from ._output import (
|
|
18
|
+
Logger,
|
|
19
|
+
)
|
|
20
|
+
from ._traceback import write_traceback, writeFailure
|
|
21
|
+
|
|
22
|
+
# === Backward compat aliases (same as eliot/__init__.py) ===
|
|
23
|
+
startAction = start_action
|
|
24
|
+
start_task = startTask
|
|
25
|
+
write_failure = writeFailure
|
|
26
|
+
writeTraceback = write_traceback
|
|
27
|
+
add_destinations = Logger._destinations.add
|
|
28
|
+
remove_destination = Logger._destinations.remove
|
|
29
|
+
add_global_fields = Logger._destinations.addGlobalFields
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
# Deprecated
|
|
33
|
+
def add_destination(dest):
|
|
34
|
+
warn("Use add_destinations()", DeprecationWarning, stacklevel=2)
|
|
35
|
+
Logger._destinations.add(dest)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
addDestination = add_destination
|
|
39
|
+
removeDestination = remove_destination
|
|
40
|
+
addGlobalFields = add_global_fields
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def use_asyncio_context():
|
|
44
|
+
warn("No longer needed as of Eliot 1.8.0", DeprecationWarning, stacklevel=2)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
# === New LoggerX API ===
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
# === Deprecation wrappers for common patterns ===
|
|
51
|
+
def _deprecated(old: str, new: str):
|
|
52
|
+
def wrapper(fn):
|
|
53
|
+
def inner(*a, **kw):
|
|
54
|
+
warn(f"{old} is deprecated, use {new}", DeprecationWarning, stacklevel=2)
|
|
55
|
+
return fn(*a, **kw)
|
|
56
|
+
|
|
57
|
+
return inner
|
|
58
|
+
|
|
59
|
+
return wrapper
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
# Wrap log_call to suggest @log.logged
|
|
63
|
+
@_deprecated("log_call", "@log.logged")
|
|
64
|
+
def log_call(*a, **kw):
|
|
65
|
+
return _log_call(*a, **kw)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
# Wrap log_message to suggest log.info()
|
|
69
|
+
@_deprecated("log_message", "log.info()")
|
|
70
|
+
def log_message(*a, **kw):
|
|
71
|
+
return _log_message(*a, **kw)
|
logxpy/_config.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"""Configuration loading."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
import os
|
|
4
|
+
import tomllib
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from ._types import Level
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class Config:
|
|
13
|
+
level: Level = Level.INFO
|
|
14
|
+
format: str = "rich"
|
|
15
|
+
destinations: list[str] = field(default_factory=lambda: ["console"])
|
|
16
|
+
mask_fields: list[str] = field(default_factory=lambda: ["password", "token", "secret"])
|
|
17
|
+
mask_patterns: list[str] = field(default_factory=list)
|
|
18
|
+
context: dict[str, Any] = field(default_factory=dict)
|
|
19
|
+
file_path: str | None = None
|
|
20
|
+
|
|
21
|
+
@classmethod
|
|
22
|
+
def load(cls) -> 'Config':
|
|
23
|
+
cfg = cls()
|
|
24
|
+
# 1. pyproject.toml
|
|
25
|
+
pyproject = Path.cwd() / 'pyproject.toml'
|
|
26
|
+
if pyproject.exists():
|
|
27
|
+
data = tomllib.loads(pyproject.read_text()).get('tool', {}).get('loggerx', {})
|
|
28
|
+
if 'level' in data: cfg.level = Level[data['level'].upper()]
|
|
29
|
+
if 'format' in data: cfg.format = data['format']
|
|
30
|
+
if 'destinations' in data: cfg.destinations = data['destinations']
|
|
31
|
+
if m := data.get('mask'):
|
|
32
|
+
cfg.mask_fields = m.get('fields', cfg.mask_fields)
|
|
33
|
+
cfg.mask_patterns = m.get('patterns', cfg.mask_patterns)
|
|
34
|
+
# 2. Environment (overrides)
|
|
35
|
+
if v := os.environ.get('LOGGERX_LEVEL'): cfg.level = Level[v.upper()]
|
|
36
|
+
if v := os.environ.get('LOGGERX_FORMAT'): cfg.format = v
|
|
37
|
+
if v := os.environ.get('LOGGERX_DESTINATIONS'): cfg.destinations = v.split(',')
|
|
38
|
+
return cfg
|
|
39
|
+
|
|
40
|
+
_cfg: Config | None = None
|
|
41
|
+
|
|
42
|
+
def get_config() -> Config:
|
|
43
|
+
global _cfg
|
|
44
|
+
if _cfg is None: _cfg = Config.load()
|
|
45
|
+
return _cfg
|
logxpy/_dest.py
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
"""Output destinations."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from ._types import Record
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ConsoleDestination:
|
|
13
|
+
def __init__(self, rich: bool = True):
|
|
14
|
+
self._rich = rich
|
|
15
|
+
if rich:
|
|
16
|
+
from rich.console import Console
|
|
17
|
+
|
|
18
|
+
self._console = Console()
|
|
19
|
+
|
|
20
|
+
async def write(self, record: Record) -> None:
|
|
21
|
+
lvl = record.level.name.ljust(8)
|
|
22
|
+
line = f"[{lvl}] {record.message} {record.fields}"
|
|
23
|
+
if self._rich:
|
|
24
|
+
color = {"ERROR": "red", "WARNING": "yellow", "SUCCESS": "green"}.get(record.level.name, "")
|
|
25
|
+
self._console.print(f"[{color}]{line}[/{color}]" if color else line)
|
|
26
|
+
else:
|
|
27
|
+
print(line)
|
|
28
|
+
|
|
29
|
+
async def flush(self) -> None:
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
async def close(self) -> None:
|
|
33
|
+
pass
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class FileDestination:
|
|
37
|
+
def __init__(self, path: str | Path, buffer: int = 100):
|
|
38
|
+
self._path = Path(path)
|
|
39
|
+
self._buf: list[str] = []
|
|
40
|
+
self._size = buffer
|
|
41
|
+
self._lock = asyncio.Lock()
|
|
42
|
+
|
|
43
|
+
async def write(self, record: Record) -> None:
|
|
44
|
+
import orjson
|
|
45
|
+
|
|
46
|
+
line = orjson.dumps(record.to_dict()).decode() + "\n"
|
|
47
|
+
async with self._lock:
|
|
48
|
+
self._buf.append(line)
|
|
49
|
+
if len(self._buf) >= self._size:
|
|
50
|
+
await self.flush()
|
|
51
|
+
|
|
52
|
+
async def flush(self) -> None:
|
|
53
|
+
if not self._buf:
|
|
54
|
+
return
|
|
55
|
+
async with self._lock:
|
|
56
|
+
with self._path.open("a") as f:
|
|
57
|
+
f.writelines(self._buf)
|
|
58
|
+
self._buf.clear()
|
|
59
|
+
|
|
60
|
+
async def close(self) -> None:
|
|
61
|
+
await self.flush()
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class OTelDestination:
|
|
65
|
+
def __init__(self, endpoint: str = "localhost:4317"):
|
|
66
|
+
self._endpoint = endpoint
|
|
67
|
+
self._tracer: Any | None = None
|
|
68
|
+
|
|
69
|
+
async def write(self, record: Record) -> None:
|
|
70
|
+
if not self._tracer:
|
|
71
|
+
try:
|
|
72
|
+
from opentelemetry import trace
|
|
73
|
+
from opentelemetry.sdk.trace import TracerProvider
|
|
74
|
+
|
|
75
|
+
trace.set_tracer_provider(TracerProvider())
|
|
76
|
+
self._tracer = trace.get_tracer(__name__)
|
|
77
|
+
except ImportError:
|
|
78
|
+
return
|
|
79
|
+
if self._tracer and record.action_type:
|
|
80
|
+
with self._tracer.start_as_current_span(record.action_type) as span:
|
|
81
|
+
for k, v in record.fields.items():
|
|
82
|
+
span.set_attribute(k, str(v))
|
|
83
|
+
|
|
84
|
+
async def flush(self) -> None:
|
|
85
|
+
pass
|
|
86
|
+
|
|
87
|
+
async def close(self) -> None:
|
|
88
|
+
pass
|
logxpy/_errors.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Error-handling utility code.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from inspect import getmro
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class ErrorExtraction(object):
|
|
9
|
+
"""
|
|
10
|
+
Extract fields from exceptions for failed-action messages.
|
|
11
|
+
|
|
12
|
+
@ivar registry: Map exception class to function that extracts fields.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
def __init__(self):
|
|
16
|
+
self.registry = {}
|
|
17
|
+
|
|
18
|
+
def register_exception_extractor(self, exception_class, extractor):
|
|
19
|
+
"""
|
|
20
|
+
Register a function that converts exceptions to fields.
|
|
21
|
+
|
|
22
|
+
@param exception_class: Class to register for.
|
|
23
|
+
|
|
24
|
+
@param extractor: Single-argument callable that takes an exception
|
|
25
|
+
of the given class (or a subclass) and returns a dictionary,
|
|
26
|
+
fields to include in a failed action message.
|
|
27
|
+
"""
|
|
28
|
+
self.registry[exception_class] = extractor
|
|
29
|
+
|
|
30
|
+
def get_fields_for_exception(self, logger, exception):
|
|
31
|
+
"""
|
|
32
|
+
Given an exception instance, return fields to add to the failed action
|
|
33
|
+
message.
|
|
34
|
+
|
|
35
|
+
@param logger: ``ILogger`` currently being used.
|
|
36
|
+
@param exception: An exception instance.
|
|
37
|
+
|
|
38
|
+
@return: Dictionary with fields to include.
|
|
39
|
+
"""
|
|
40
|
+
for klass in getmro(exception.__class__):
|
|
41
|
+
if klass in self.registry:
|
|
42
|
+
extractor = self.registry[klass]
|
|
43
|
+
try:
|
|
44
|
+
return extractor(exception)
|
|
45
|
+
except:
|
|
46
|
+
from ._traceback import write_traceback
|
|
47
|
+
|
|
48
|
+
write_traceback(logger)
|
|
49
|
+
return {}
|
|
50
|
+
return {}
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
_error_extraction = ErrorExtraction()
|
|
54
|
+
register_exception_extractor = _error_extraction.register_exception_extractor
|
|
55
|
+
get_fields_for_exception = _error_extraction.get_fields_for_exception
|
|
56
|
+
|
|
57
|
+
# Default handler for OSError and IOError by registered EnvironmentError:
|
|
58
|
+
register_exception_extractor(EnvironmentError, lambda e: {"errno": e.errno})
|
logxpy/_fmt.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
"""Type-specific formatters."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any, Protocol
|
|
6
|
+
|
|
7
|
+
from ._base import get_module, truncate
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class Formatter(Protocol):
|
|
11
|
+
def supports(self, obj: Any) -> bool: ...
|
|
12
|
+
def format(self, obj: Any, **opts: Any) -> dict[str, Any]: ...
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class DFFormatter:
|
|
16
|
+
def supports(self, obj: Any) -> bool:
|
|
17
|
+
pd = get_module("pandas")
|
|
18
|
+
return pd is not None and isinstance(obj, pd.DataFrame)
|
|
19
|
+
|
|
20
|
+
def format(self, obj: Any, max_rows: int = 5, **_: Any) -> dict[str, Any]:
|
|
21
|
+
return {
|
|
22
|
+
"_type": "DataFrame",
|
|
23
|
+
"shape": list(obj.shape),
|
|
24
|
+
"columns": list(obj.columns)[:20],
|
|
25
|
+
"dtypes": {str(k): str(v) for k, v in list(obj.dtypes.items())[:20]},
|
|
26
|
+
"head": obj.head(max_rows).to_dict(orient="records"),
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class TensorFormatter:
|
|
31
|
+
def supports(self, obj: Any) -> bool:
|
|
32
|
+
return hasattr(obj, "shape") and hasattr(obj, "dtype")
|
|
33
|
+
|
|
34
|
+
def format(self, obj: Any, **_: Any) -> dict[str, Any]:
|
|
35
|
+
r: dict[str, Any] = {
|
|
36
|
+
"_type": type(obj).__name__,
|
|
37
|
+
"shape": list(getattr(obj.shape, "__iter__", lambda: [obj.shape])()),
|
|
38
|
+
"dtype": str(obj.dtype),
|
|
39
|
+
}
|
|
40
|
+
try:
|
|
41
|
+
r.update({"min": float(obj.min()), "max": float(obj.max()), "mean": float(obj.mean())})
|
|
42
|
+
if hasattr(obj, "std"):
|
|
43
|
+
r["std"] = float(obj.std())
|
|
44
|
+
if hasattr(obj, "device"):
|
|
45
|
+
r["device"] = str(obj.device)
|
|
46
|
+
except:
|
|
47
|
+
pass
|
|
48
|
+
return r
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class ImageFormatter:
|
|
52
|
+
def supports(self, obj: Any) -> bool:
|
|
53
|
+
PIL = get_module("PIL")
|
|
54
|
+
return PIL is not None and hasattr(PIL, "Image") and isinstance(obj, PIL.Image.Image)
|
|
55
|
+
|
|
56
|
+
def format(self, obj: Any, max_size: tuple[int, int] = (256, 256), **_: Any) -> dict[str, Any]:
|
|
57
|
+
return {"_type": "Image", "size": list(obj.size), "mode": obj.mode}
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
# Combined formatter
|
|
61
|
+
_FMT: list[Formatter] = [DFFormatter(), TensorFormatter(), ImageFormatter()]
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def format_value(obj: Any, **opts: Any) -> Any:
|
|
65
|
+
for f in _FMT:
|
|
66
|
+
if f.supports(obj):
|
|
67
|
+
return f.format(obj, **opts)
|
|
68
|
+
return truncate(obj, opts.get("max_depth", 3), opts.get("max_len", 500))
|