logxpy 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- logxpy/__init__.py +126 -0
- logxpy/_action.py +958 -0
- logxpy/_async.py +186 -0
- logxpy/_base.py +80 -0
- logxpy/_compat.py +71 -0
- logxpy/_config.py +45 -0
- logxpy/_dest.py +88 -0
- logxpy/_errors.py +58 -0
- logxpy/_fmt.py +68 -0
- logxpy/_generators.py +136 -0
- logxpy/_mask.py +23 -0
- logxpy/_message.py +195 -0
- logxpy/_output.py +517 -0
- logxpy/_pool.py +93 -0
- logxpy/_traceback.py +126 -0
- logxpy/_types.py +71 -0
- logxpy/_util.py +56 -0
- logxpy/_validation.py +486 -0
- logxpy/_version.py +21 -0
- logxpy/cli.py +61 -0
- logxpy/dask.py +172 -0
- logxpy/decorators.py +268 -0
- logxpy/filter.py +124 -0
- logxpy/journald.py +88 -0
- logxpy/json.py +149 -0
- logxpy/loggerx.py +253 -0
- logxpy/logwriter.py +84 -0
- logxpy/parse.py +191 -0
- logxpy/prettyprint.py +173 -0
- logxpy/serializers.py +36 -0
- logxpy/stdlib.py +23 -0
- logxpy/tai64n.py +45 -0
- logxpy/testing.py +472 -0
- logxpy/tests/__init__.py +9 -0
- logxpy/tests/common.py +36 -0
- logxpy/tests/strategies.py +231 -0
- logxpy/tests/test_action.py +1751 -0
- logxpy/tests/test_api.py +86 -0
- logxpy/tests/test_async.py +67 -0
- logxpy/tests/test_compat.py +13 -0
- logxpy/tests/test_config.py +21 -0
- logxpy/tests/test_coroutines.py +105 -0
- logxpy/tests/test_dask.py +211 -0
- logxpy/tests/test_decorators.py +54 -0
- logxpy/tests/test_filter.py +122 -0
- logxpy/tests/test_fmt.py +42 -0
- logxpy/tests/test_generators.py +292 -0
- logxpy/tests/test_journald.py +246 -0
- logxpy/tests/test_json.py +208 -0
- logxpy/tests/test_loggerx.py +44 -0
- logxpy/tests/test_logwriter.py +262 -0
- logxpy/tests/test_message.py +334 -0
- logxpy/tests/test_output.py +921 -0
- logxpy/tests/test_parse.py +309 -0
- logxpy/tests/test_pool.py +55 -0
- logxpy/tests/test_prettyprint.py +303 -0
- logxpy/tests/test_pyinstaller.py +35 -0
- logxpy/tests/test_serializers.py +36 -0
- logxpy/tests/test_stdlib.py +73 -0
- logxpy/tests/test_tai64n.py +66 -0
- logxpy/tests/test_testing.py +1051 -0
- logxpy/tests/test_traceback.py +251 -0
- logxpy/tests/test_twisted.py +814 -0
- logxpy/tests/test_util.py +45 -0
- logxpy/tests/test_validation.py +989 -0
- logxpy/twisted.py +265 -0
- logxpy-0.1.0.dist-info/METADATA +100 -0
- logxpy-0.1.0.dist-info/RECORD +72 -0
- logxpy-0.1.0.dist-info/WHEEL +5 -0
- logxpy-0.1.0.dist-info/entry_points.txt +2 -0
- logxpy-0.1.0.dist-info/licenses/LICENSE +201 -0
- logxpy-0.1.0.dist-info/top_level.txt +1 -0
logxpy/dask.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
"""Support for Eliot tracing with Dask computations."""
|
|
2
|
+
|
|
3
|
+
from pyrsistent import PClass, field
|
|
4
|
+
|
|
5
|
+
from dask import compute, optimize, persist
|
|
6
|
+
|
|
7
|
+
try:
|
|
8
|
+
from dask.distributed import Future
|
|
9
|
+
from dask.highlevelgraph import HighLevelGraph
|
|
10
|
+
except:
|
|
11
|
+
|
|
12
|
+
class Future(object):
|
|
13
|
+
pass
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
from dask.core import toposort, get_dependencies, ishashable
|
|
17
|
+
from . import start_action, current_action, Action
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class _RunWithEliotContext(PClass):
|
|
21
|
+
"""
|
|
22
|
+
Run a callable within an Eliot context.
|
|
23
|
+
|
|
24
|
+
@ivar task_id: The serialized Eliot task ID.
|
|
25
|
+
@ivar func: The function that Dask wants to run.
|
|
26
|
+
@ivar key: The key in the Dask graph.
|
|
27
|
+
@ivar dependencies: The keys in the Dask graph this depends on.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
task_id = field(type=str)
|
|
31
|
+
func = field() # callable
|
|
32
|
+
key = field(type=str)
|
|
33
|
+
dependencies = field()
|
|
34
|
+
|
|
35
|
+
# Pretend to be underlying callable for purposes of equality; necessary for
|
|
36
|
+
# optimizer to be happy:
|
|
37
|
+
|
|
38
|
+
def __eq__(self, other):
|
|
39
|
+
return self.func == other
|
|
40
|
+
|
|
41
|
+
def __ne__(self, other):
|
|
42
|
+
return self.func != other
|
|
43
|
+
|
|
44
|
+
def __hash__(self):
|
|
45
|
+
return hash(self.func)
|
|
46
|
+
|
|
47
|
+
def __call__(self, *args, **kwargs):
|
|
48
|
+
with Action.continue_task(task_id=self.task_id) as action:
|
|
49
|
+
action.log(
|
|
50
|
+
message_type="dask:task", key=self.key, dependencies=self.dependencies
|
|
51
|
+
)
|
|
52
|
+
return self.func(*args, **kwargs)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def compute_with_trace(*args):
|
|
56
|
+
"""Do Dask compute(), but with added Eliot tracing.
|
|
57
|
+
|
|
58
|
+
Dask is a graph of tasks, but Eliot logs trees. So we need to emulate a
|
|
59
|
+
graph using a tree. We do this by making Eliot action for each task, but
|
|
60
|
+
having it list the tasks it depends on.
|
|
61
|
+
|
|
62
|
+
We use the following algorithm:
|
|
63
|
+
|
|
64
|
+
1. Create a top-level action.
|
|
65
|
+
|
|
66
|
+
2. For each entry in the dask graph, create a child with
|
|
67
|
+
serialize_task_id. Do this in likely order of execution, so that
|
|
68
|
+
if B depends on A the task level of B is higher than the task Ievel
|
|
69
|
+
of A.
|
|
70
|
+
|
|
71
|
+
3. Replace each function with a wrapper that uses the corresponding
|
|
72
|
+
task ID (with Action.continue_task), and while it's at it also
|
|
73
|
+
records which other things this function depends on.
|
|
74
|
+
|
|
75
|
+
Known issues:
|
|
76
|
+
|
|
77
|
+
1. Retries will confuse Eliot. Probably need different
|
|
78
|
+
distributed-tree mechanism within Eliot to solve that.
|
|
79
|
+
"""
|
|
80
|
+
# 1. Create top-level Eliot Action:
|
|
81
|
+
with start_action(action_type="dask:compute"):
|
|
82
|
+
# In order to reduce logging verbosity, add logging to the already
|
|
83
|
+
# optimized graph:
|
|
84
|
+
optimized = optimize(*args, optimizations=[_add_logging])
|
|
85
|
+
return compute(*optimized, optimize_graph=False)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def persist_with_trace(*args):
|
|
89
|
+
"""Do Dask persist(), but with added Eliot tracing.
|
|
90
|
+
|
|
91
|
+
Known issues:
|
|
92
|
+
|
|
93
|
+
1. Retries will confuse Eliot. Probably need different
|
|
94
|
+
distributed-tree mechanism within Eliot to solve that.
|
|
95
|
+
"""
|
|
96
|
+
# 1. Create top-level Eliot Action:
|
|
97
|
+
with start_action(action_type="dask:persist"):
|
|
98
|
+
# In order to reduce logging verbosity, add logging to the already
|
|
99
|
+
# optimized graph:
|
|
100
|
+
optimized = optimize(*args, optimizations=[_add_logging])
|
|
101
|
+
return persist(*optimized, optimize_graph=False)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def _add_logging(dsk, ignore=None):
|
|
105
|
+
"""
|
|
106
|
+
Add logging to a Dask graph.
|
|
107
|
+
|
|
108
|
+
@param dsk: The Dask graph.
|
|
109
|
+
|
|
110
|
+
@return: New Dask graph.
|
|
111
|
+
"""
|
|
112
|
+
if isinstance(dsk, HighLevelGraph):
|
|
113
|
+
dsk = dsk.to_dict()
|
|
114
|
+
|
|
115
|
+
ctx = current_action()
|
|
116
|
+
result = {}
|
|
117
|
+
|
|
118
|
+
# Use topological sort to ensure Eliot actions are in logical order of
|
|
119
|
+
# execution in Dask:
|
|
120
|
+
keys = toposort(dsk)
|
|
121
|
+
|
|
122
|
+
# Give each key a string name. Some keys are just aliases to other
|
|
123
|
+
# keys, so make sure we have underlying key available. Later on might
|
|
124
|
+
# want to shorten them as well.
|
|
125
|
+
def simplify(k):
|
|
126
|
+
if isinstance(k, str):
|
|
127
|
+
return k
|
|
128
|
+
return "-".join(str(o) for o in k)
|
|
129
|
+
|
|
130
|
+
key_names = {}
|
|
131
|
+
for key in keys:
|
|
132
|
+
value = dsk[key]
|
|
133
|
+
if not callable(value) and ishashable(value) and value in keys:
|
|
134
|
+
# It's an alias for another key:
|
|
135
|
+
key_names[key] = key_names[value]
|
|
136
|
+
else:
|
|
137
|
+
key_names[key] = simplify(key)
|
|
138
|
+
|
|
139
|
+
# Values in the graph can be either:
|
|
140
|
+
#
|
|
141
|
+
# 1. A list of other values.
|
|
142
|
+
# 2. A tuple, where first value might be a callable, aka a task.
|
|
143
|
+
# 3. A literal of some sort.
|
|
144
|
+
def maybe_wrap(key, value):
|
|
145
|
+
if isinstance(value, list):
|
|
146
|
+
return [maybe_wrap(key, v) for v in value]
|
|
147
|
+
elif isinstance(value, tuple):
|
|
148
|
+
func = value[0]
|
|
149
|
+
args = value[1:]
|
|
150
|
+
if not callable(func):
|
|
151
|
+
# Not a callable, so nothing to wrap.
|
|
152
|
+
return value
|
|
153
|
+
wrapped_func = _RunWithEliotContext(
|
|
154
|
+
task_id=str(ctx.serialize_task_id(), "utf-8"),
|
|
155
|
+
func=func,
|
|
156
|
+
key=key_names[key],
|
|
157
|
+
dependencies=[key_names[k] for k in get_dependencies(dsk, key)],
|
|
158
|
+
)
|
|
159
|
+
return (wrapped_func,) + args
|
|
160
|
+
else:
|
|
161
|
+
return value
|
|
162
|
+
|
|
163
|
+
# Replace function with wrapper that logs appropriate Action; iterate in
|
|
164
|
+
# topological order so action task levels are in reasonable order.
|
|
165
|
+
for key in keys:
|
|
166
|
+
result[key] = maybe_wrap(key, dsk[key])
|
|
167
|
+
|
|
168
|
+
assert set(result.keys()) == set(dsk.keys())
|
|
169
|
+
return result
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
__all__ = ["compute_with_trace", "persist_with_trace"]
|
logxpy/decorators.py
ADDED
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
"""Logging decorators - @logged, @timed, @retry, @generator."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import inspect
|
|
7
|
+
import time
|
|
8
|
+
from collections.abc import AsyncIterator, Awaitable, Callable, Iterator
|
|
9
|
+
from functools import wraps
|
|
10
|
+
from typing import Any, ParamSpec, TypeVar, cast
|
|
11
|
+
|
|
12
|
+
from ._async import aaction, action
|
|
13
|
+
from ._base import truncate
|
|
14
|
+
|
|
15
|
+
P = ParamSpec("P")
|
|
16
|
+
T = TypeVar("T")
|
|
17
|
+
|
|
18
|
+
MASK_KEYS = {"password", "token", "secret", "key", "auth", "credential"}
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _extract_args(
|
|
22
|
+
func: Callable[..., Any],
|
|
23
|
+
args: tuple[Any, ...],
|
|
24
|
+
kwargs: dict[str, Any],
|
|
25
|
+
capture_self: bool,
|
|
26
|
+
exclude: set[str],
|
|
27
|
+
) -> dict[str, Any]:
|
|
28
|
+
sig = inspect.signature(func)
|
|
29
|
+
bound = sig.bind(*args, **kwargs)
|
|
30
|
+
bound.apply_defaults()
|
|
31
|
+
return {
|
|
32
|
+
k: "***" if k in exclude else truncate(v)
|
|
33
|
+
for k, v in bound.arguments.items()
|
|
34
|
+
if not (k == "self" and not capture_self)
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def logged(
|
|
39
|
+
fn: Callable[P, T] | None = None,
|
|
40
|
+
*,
|
|
41
|
+
level: str = "INFO",
|
|
42
|
+
capture_args: bool = True,
|
|
43
|
+
capture_result: bool = True,
|
|
44
|
+
capture_self: bool = False,
|
|
45
|
+
exclude: set[str] | None = None,
|
|
46
|
+
timer: bool = True,
|
|
47
|
+
when: Callable[..., bool] | None = None,
|
|
48
|
+
max_depth: int = 3,
|
|
49
|
+
max_length: int = 500,
|
|
50
|
+
silent_errors: bool = False,
|
|
51
|
+
) -> Any:
|
|
52
|
+
"""Universal logging decorator for entry/exit/timing/args/result."""
|
|
53
|
+
exclude = (exclude or set()) | MASK_KEYS
|
|
54
|
+
|
|
55
|
+
def decorator(func: Callable[P, T]) -> Callable[P, T]:
|
|
56
|
+
name = f"{func.__module__}.{func.__qualname__}"
|
|
57
|
+
is_async = inspect.iscoroutinefunction(func)
|
|
58
|
+
|
|
59
|
+
@wraps(func)
|
|
60
|
+
async def async_impl(*a: P.args, **kw: P.kwargs) -> T:
|
|
61
|
+
if when and not when(func, a, kw):
|
|
62
|
+
# We know func is async if we are here
|
|
63
|
+
return await cast("Awaitable[T]", func(*a, **kw))
|
|
64
|
+
|
|
65
|
+
log_args: dict[str, Any] = {}
|
|
66
|
+
if capture_args:
|
|
67
|
+
try:
|
|
68
|
+
log_args = _extract_args(func, a, kw, capture_self, exclude)
|
|
69
|
+
except Exception:
|
|
70
|
+
pass # Best effort arg capture
|
|
71
|
+
|
|
72
|
+
async with aaction(name, level=level, **log_args) as act:
|
|
73
|
+
try:
|
|
74
|
+
result = await cast("Awaitable[T]", func(*a, **kw))
|
|
75
|
+
if capture_result and result is not None:
|
|
76
|
+
act.fields["result"] = truncate(result, max_depth, max_length)
|
|
77
|
+
return result
|
|
78
|
+
except Exception as e:
|
|
79
|
+
if silent_errors:
|
|
80
|
+
act.fields["error_suppressed"] = str(e)
|
|
81
|
+
# We have to return something. Since we don't know T, we return None and cast it.
|
|
82
|
+
return cast("T", None)
|
|
83
|
+
raise
|
|
84
|
+
|
|
85
|
+
@wraps(func)
|
|
86
|
+
def sync_impl(*a: P.args, **kw: P.kwargs) -> T:
|
|
87
|
+
if when and not when(func, a, kw):
|
|
88
|
+
return func(*a, **kw)
|
|
89
|
+
|
|
90
|
+
log_args: dict[str, Any] = {}
|
|
91
|
+
if capture_args:
|
|
92
|
+
try:
|
|
93
|
+
log_args = _extract_args(func, a, kw, capture_self, exclude)
|
|
94
|
+
except Exception:
|
|
95
|
+
pass
|
|
96
|
+
|
|
97
|
+
with action(name, level=level, **log_args) as act:
|
|
98
|
+
try:
|
|
99
|
+
result = func(*a, **kw)
|
|
100
|
+
if capture_result and result is not None:
|
|
101
|
+
act.fields["result"] = truncate(result, max_depth, max_length)
|
|
102
|
+
return result
|
|
103
|
+
except Exception as e:
|
|
104
|
+
if silent_errors:
|
|
105
|
+
act.fields["error_suppressed"] = str(e)
|
|
106
|
+
return cast("T", None)
|
|
107
|
+
raise
|
|
108
|
+
|
|
109
|
+
return cast("Callable[P, T]", async_impl if is_async else sync_impl)
|
|
110
|
+
|
|
111
|
+
return decorator(fn) if fn else decorator
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def timed(metric: str | None = None) -> Any:
|
|
115
|
+
"""Timing-only decorator."""
|
|
116
|
+
|
|
117
|
+
def decorator(func: Callable[P, T]) -> Callable[P, T]:
|
|
118
|
+
name = metric or f"{func.__module__}.{func.__qualname__}"
|
|
119
|
+
is_async = inspect.iscoroutinefunction(func)
|
|
120
|
+
|
|
121
|
+
@wraps(func)
|
|
122
|
+
async def async_impl(*a: P.args, **kw: P.kwargs) -> T:
|
|
123
|
+
start = time.monotonic()
|
|
124
|
+
try:
|
|
125
|
+
return await cast("Awaitable[T]", func(*a, **kw))
|
|
126
|
+
finally:
|
|
127
|
+
from .loggerx import log
|
|
128
|
+
|
|
129
|
+
log.debug(f"⏱ {func.__name__}", metric=name, duration_ms=round((time.monotonic() - start) * 1000, 2))
|
|
130
|
+
|
|
131
|
+
@wraps(func)
|
|
132
|
+
def sync_impl(*a: P.args, **kw: P.kwargs) -> T:
|
|
133
|
+
start = time.monotonic()
|
|
134
|
+
try:
|
|
135
|
+
return func(*a, **kw)
|
|
136
|
+
finally:
|
|
137
|
+
from .loggerx import log
|
|
138
|
+
|
|
139
|
+
log.debug(f"⏱ {func.__name__}", metric=name, duration_ms=round((time.monotonic() - start) * 1000, 2))
|
|
140
|
+
|
|
141
|
+
return cast("Callable[P, T]", async_impl if is_async else sync_impl)
|
|
142
|
+
|
|
143
|
+
return decorator
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def retry(
|
|
147
|
+
attempts: int = 3,
|
|
148
|
+
delay: float = 1.0,
|
|
149
|
+
backoff: float = 2.0,
|
|
150
|
+
on_retry: Callable[[int, Exception], None] | None = None,
|
|
151
|
+
) -> Any:
|
|
152
|
+
"""Retry with exponential backoff."""
|
|
153
|
+
|
|
154
|
+
def decorator(func: Callable[P, T]) -> Callable[P, T]:
|
|
155
|
+
is_async = inspect.iscoroutinefunction(func)
|
|
156
|
+
|
|
157
|
+
@wraps(func)
|
|
158
|
+
async def async_impl(*a: P.args, **kw: P.kwargs) -> T:
|
|
159
|
+
d, last = delay, None
|
|
160
|
+
for i in range(1, attempts + 1):
|
|
161
|
+
try:
|
|
162
|
+
return await cast("Awaitable[T]", func(*a, **kw))
|
|
163
|
+
except Exception as e:
|
|
164
|
+
last = e
|
|
165
|
+
if i == attempts:
|
|
166
|
+
raise
|
|
167
|
+
if on_retry:
|
|
168
|
+
on_retry(i, e)
|
|
169
|
+
await asyncio.sleep(d)
|
|
170
|
+
d *= backoff
|
|
171
|
+
# Should be unreachable if attempts > 0 and we raise on last attempt
|
|
172
|
+
raise last or Exception("Retry failed")
|
|
173
|
+
|
|
174
|
+
@wraps(func)
|
|
175
|
+
def sync_impl(*a: P.args, **kw: P.kwargs) -> T:
|
|
176
|
+
d, last = delay, None
|
|
177
|
+
for i in range(1, attempts + 1):
|
|
178
|
+
try:
|
|
179
|
+
return func(*a, **kw)
|
|
180
|
+
except Exception as e:
|
|
181
|
+
last = e
|
|
182
|
+
if i == attempts:
|
|
183
|
+
raise
|
|
184
|
+
if on_retry:
|
|
185
|
+
on_retry(i, e)
|
|
186
|
+
time.sleep(d)
|
|
187
|
+
d *= backoff
|
|
188
|
+
raise last or Exception("Retry failed")
|
|
189
|
+
|
|
190
|
+
return cast("Callable[P, T]", async_impl if is_async else sync_impl)
|
|
191
|
+
|
|
192
|
+
return decorator
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def generator(name: str | None = None, every: int = 100) -> Any:
|
|
196
|
+
"""Generator progress tracking."""
|
|
197
|
+
|
|
198
|
+
def decorator(func: Callable[..., Iterator[T]]) -> Callable[..., Iterator[T]]:
|
|
199
|
+
label = name or func.__name__
|
|
200
|
+
|
|
201
|
+
@wraps(func)
|
|
202
|
+
def wrapper(*a: Any, **kw: Any) -> Iterator[T]:
|
|
203
|
+
from .loggerx import log
|
|
204
|
+
|
|
205
|
+
for i, item in enumerate(func(*a, **kw), 1):
|
|
206
|
+
if i % every == 0:
|
|
207
|
+
log.info(f"📦 {label}", count=i)
|
|
208
|
+
yield item
|
|
209
|
+
|
|
210
|
+
return wrapper
|
|
211
|
+
|
|
212
|
+
return decorator
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def aiterator(name: str | None = None, every: int = 100) -> Any:
|
|
216
|
+
"""Async iterator progress tracking."""
|
|
217
|
+
|
|
218
|
+
def decorator(func: Callable[..., AsyncIterator[T]]) -> Callable[..., AsyncIterator[T]]:
|
|
219
|
+
label = name or func.__name__
|
|
220
|
+
|
|
221
|
+
@wraps(func)
|
|
222
|
+
async def wrapper(*a: Any, **kw: Any) -> AsyncIterator[T]:
|
|
223
|
+
from .loggerx import log
|
|
224
|
+
|
|
225
|
+
i = 0
|
|
226
|
+
async for item in func(*a, **kw):
|
|
227
|
+
i += 1
|
|
228
|
+
if i % every == 0:
|
|
229
|
+
log.info(f"📦 {label}", count=i)
|
|
230
|
+
yield item
|
|
231
|
+
|
|
232
|
+
return wrapper
|
|
233
|
+
|
|
234
|
+
return decorator
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def trace(name: str | None = None, kind: str = "internal", attributes: dict[str, Any] | None = None) -> Any:
|
|
238
|
+
"""OpenTelemetry trace decorator."""
|
|
239
|
+
|
|
240
|
+
def decorator(func: Callable[P, T]) -> Callable[P, T]:
|
|
241
|
+
span_name = name or f"{func.__module__}.{func.__qualname__}"
|
|
242
|
+
is_async = inspect.iscoroutinefunction(func)
|
|
243
|
+
|
|
244
|
+
@wraps(func)
|
|
245
|
+
async def async_impl(*a: P.args, **kw: P.kwargs) -> T:
|
|
246
|
+
try:
|
|
247
|
+
from opentelemetry import trace as otel
|
|
248
|
+
|
|
249
|
+
tracer = otel.get_tracer(__name__)
|
|
250
|
+
with tracer.start_as_current_span(span_name, attributes=attributes):
|
|
251
|
+
return await cast("Awaitable[T]", func(*a, **kw))
|
|
252
|
+
except ImportError:
|
|
253
|
+
return await cast("Awaitable[T]", func(*a, **kw))
|
|
254
|
+
|
|
255
|
+
@wraps(func)
|
|
256
|
+
def sync_impl(*a: P.args, **kw: P.kwargs) -> T:
|
|
257
|
+
try:
|
|
258
|
+
from opentelemetry import trace as otel
|
|
259
|
+
|
|
260
|
+
tracer = otel.get_tracer(__name__)
|
|
261
|
+
with tracer.start_as_current_span(span_name, attributes=attributes):
|
|
262
|
+
return func(*a, **kw)
|
|
263
|
+
except ImportError:
|
|
264
|
+
return func(*a, **kw)
|
|
265
|
+
|
|
266
|
+
return cast("Callable[P, T]", async_impl if is_async else sync_impl)
|
|
267
|
+
|
|
268
|
+
return decorator
|
logxpy/filter.py
ADDED
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Command line program for filtering line-based Eliot logs.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
if __name__ == "__main__":
|
|
6
|
+
import eliot.filter
|
|
7
|
+
|
|
8
|
+
eliot.filter.main()
|
|
9
|
+
|
|
10
|
+
import sys
|
|
11
|
+
from datetime import datetime, timedelta
|
|
12
|
+
from json import JSONEncoder, dumps, loads
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class _DatetimeJSONEncoder(JSONEncoder):
|
|
16
|
+
"""
|
|
17
|
+
JSON encoder that supports L{datetime}.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
def default(self, o):
|
|
21
|
+
if isinstance(o, datetime):
|
|
22
|
+
return o.isoformat()
|
|
23
|
+
return JSONEncoder.default(self, o)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class EliotFilter(object):
|
|
27
|
+
"""
|
|
28
|
+
Filter Eliot log lines using a Python expression.
|
|
29
|
+
|
|
30
|
+
@ivar code: A Python code object, the compiled filter expression.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
_SKIP = object()
|
|
34
|
+
|
|
35
|
+
def __init__(self, expr, incoming, output):
|
|
36
|
+
"""
|
|
37
|
+
@param expr: A Python expression that will be called for each log message.
|
|
38
|
+
@type expr: L{str}
|
|
39
|
+
|
|
40
|
+
@param incoming: An iterable of L{bytes}, each of which is a serialized
|
|
41
|
+
Eliot message.
|
|
42
|
+
|
|
43
|
+
@param output: A file to which output should be written.
|
|
44
|
+
@type output: L{file} or a file-like object.
|
|
45
|
+
"""
|
|
46
|
+
self.code = compile(expr, "<string>", "eval")
|
|
47
|
+
self.incoming = incoming
|
|
48
|
+
self.output = output
|
|
49
|
+
|
|
50
|
+
def run(self):
|
|
51
|
+
"""
|
|
52
|
+
For each incoming message, decode the JSON, evaluate expression, encode
|
|
53
|
+
as JSON and write that to the output file.
|
|
54
|
+
"""
|
|
55
|
+
for line in self.incoming:
|
|
56
|
+
message = loads(line)
|
|
57
|
+
result = self._evaluate(message)
|
|
58
|
+
if result is self._SKIP:
|
|
59
|
+
continue
|
|
60
|
+
self.output.write(dumps(result, cls=_DatetimeJSONEncoder) + "\n")
|
|
61
|
+
|
|
62
|
+
def _evaluate(self, message):
|
|
63
|
+
"""
|
|
64
|
+
Evaluate the expression with the given Python object in its locals.
|
|
65
|
+
|
|
66
|
+
@param message: A decoded JSON input.
|
|
67
|
+
|
|
68
|
+
@return: The resulting object.
|
|
69
|
+
"""
|
|
70
|
+
return eval(
|
|
71
|
+
self.code,
|
|
72
|
+
globals(),
|
|
73
|
+
{
|
|
74
|
+
"J": message,
|
|
75
|
+
"timedelta": timedelta,
|
|
76
|
+
"datetime": datetime,
|
|
77
|
+
"SKIP": self._SKIP,
|
|
78
|
+
},
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
USAGE = """\
|
|
83
|
+
Usage: cat eliot.log | python -m eliot.filter <expr>
|
|
84
|
+
|
|
85
|
+
Read JSON-expression per line from stdin, and filter it using a Python
|
|
86
|
+
expression <expr>.
|
|
87
|
+
|
|
88
|
+
The expression will have a local `J` containing decoded JSON. `datetime` and
|
|
89
|
+
`timedelta` from Python's `datetime` module are also available as locals,
|
|
90
|
+
containing the corresponding classes. `SKIP` is also available, if it's the
|
|
91
|
+
expression result that indicates nothing should be output.
|
|
92
|
+
|
|
93
|
+
The output will be written to stdout using JSON serialization. `datetime`
|
|
94
|
+
objects will be serialized to ISO format.
|
|
95
|
+
|
|
96
|
+
Examples:
|
|
97
|
+
|
|
98
|
+
- Pass through the messages unchanged:
|
|
99
|
+
|
|
100
|
+
$ cat eliot.log | python -m eliot.filter J
|
|
101
|
+
|
|
102
|
+
- Retrieve a specific field from a specific message type, dropping messages
|
|
103
|
+
of other types:
|
|
104
|
+
|
|
105
|
+
$ cat eliot.log | python -m eliot.filter \\
|
|
106
|
+
"J['field'] if J.get('message_type') == 'my:message' else SKIP"
|
|
107
|
+
"""
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def main(sys=sys):
|
|
111
|
+
"""
|
|
112
|
+
Run the program.
|
|
113
|
+
|
|
114
|
+
Accept arguments from L{sys.argv}, read from L{sys.stdin}, write to
|
|
115
|
+
L{sys.stdout}.
|
|
116
|
+
|
|
117
|
+
@param sys: An object with same interface and defaulting to the L{sys}
|
|
118
|
+
module.
|
|
119
|
+
"""
|
|
120
|
+
if len(sys.argv) != 2:
|
|
121
|
+
sys.stderr.write(USAGE)
|
|
122
|
+
return 1
|
|
123
|
+
EliotFilter(sys.argv[1], sys.stdin, sys.stdout).run()
|
|
124
|
+
return 0
|
logxpy/journald.py
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
"""
|
|
2
|
+
journald support for Eliot.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from cffi import FFI
|
|
6
|
+
from os import strerror
|
|
7
|
+
from sys import argv
|
|
8
|
+
from os.path import basename
|
|
9
|
+
|
|
10
|
+
from .json import _dumps_bytes as dumps
|
|
11
|
+
from ._message import TASK_UUID_FIELD, MESSAGE_TYPE_FIELD
|
|
12
|
+
from ._action import ACTION_TYPE_FIELD, ACTION_STATUS_FIELD, FAILED_STATUS
|
|
13
|
+
|
|
14
|
+
_ffi = FFI()
|
|
15
|
+
_ffi.cdef(
|
|
16
|
+
"""
|
|
17
|
+
int sd_journal_send(const char *format, ...);
|
|
18
|
+
"""
|
|
19
|
+
)
|
|
20
|
+
try:
|
|
21
|
+
try:
|
|
22
|
+
_journald = _ffi.dlopen("libsystemd.so.0")
|
|
23
|
+
except OSError:
|
|
24
|
+
# Older versions of systemd have separate library:
|
|
25
|
+
_journald = _ffi.dlopen("libsystemd-journal.so.0")
|
|
26
|
+
except OSError as e:
|
|
27
|
+
raise ImportError("Failed to load journald: " + str(e))
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def sd_journal_send(**kwargs):
|
|
31
|
+
"""
|
|
32
|
+
Send a message to the journald log.
|
|
33
|
+
|
|
34
|
+
@param kwargs: Mapping between field names to values, both as bytes.
|
|
35
|
+
|
|
36
|
+
@raise IOError: If the operation failed.
|
|
37
|
+
"""
|
|
38
|
+
# The function uses printf formatting, so we need to quote
|
|
39
|
+
# percentages.
|
|
40
|
+
fields = [
|
|
41
|
+
_ffi.new("char[]", key.encode("ascii") + b"=" + value.replace(b"%", b"%%"))
|
|
42
|
+
for key, value in kwargs.items()
|
|
43
|
+
]
|
|
44
|
+
fields.append(_ffi.NULL)
|
|
45
|
+
result = _journald.sd_journal_send(*fields)
|
|
46
|
+
if result != 0:
|
|
47
|
+
raise IOError(-result, strerror(-result))
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class JournaldDestination(object):
|
|
51
|
+
"""
|
|
52
|
+
A logging destination that writes to journald.
|
|
53
|
+
|
|
54
|
+
The message will be logged as JSON, with an additional field
|
|
55
|
+
C{ELIOT_TASK} storing the C{task_uuid} and C{ELIOT_TYPE} storing the
|
|
56
|
+
C{message_type} or C{action_type}.
|
|
57
|
+
|
|
58
|
+
Messages for failed actions will get priority 3 ("error"), and
|
|
59
|
+
traceback messages will get priority 2 ("critical"). All other
|
|
60
|
+
messages will get priority 1 ("info").
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
def __init__(self):
|
|
64
|
+
self._identifier = basename(argv[0]).encode("utf-8")
|
|
65
|
+
|
|
66
|
+
def __call__(self, message):
|
|
67
|
+
"""
|
|
68
|
+
Write the given message to journald.
|
|
69
|
+
|
|
70
|
+
@param message: Dictionary passed from a C{Logger}.
|
|
71
|
+
"""
|
|
72
|
+
eliot_type = ""
|
|
73
|
+
priority = b"6"
|
|
74
|
+
if ACTION_TYPE_FIELD in message:
|
|
75
|
+
eliot_type = message[ACTION_TYPE_FIELD]
|
|
76
|
+
if message[ACTION_STATUS_FIELD] == FAILED_STATUS:
|
|
77
|
+
priority = b"3"
|
|
78
|
+
elif MESSAGE_TYPE_FIELD in message:
|
|
79
|
+
eliot_type = message[MESSAGE_TYPE_FIELD]
|
|
80
|
+
if eliot_type == "eliot:traceback":
|
|
81
|
+
priority = b"2"
|
|
82
|
+
sd_journal_send(
|
|
83
|
+
MESSAGE=dumps(message),
|
|
84
|
+
ELIOT_TASK=message[TASK_UUID_FIELD].encode("utf-8"),
|
|
85
|
+
ELIOT_TYPE=eliot_type.encode("utf-8"),
|
|
86
|
+
SYSLOG_IDENTIFIER=self._identifier,
|
|
87
|
+
PRIORITY=priority,
|
|
88
|
+
)
|