logxpy 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. logxpy/__init__.py +126 -0
  2. logxpy/_action.py +958 -0
  3. logxpy/_async.py +186 -0
  4. logxpy/_base.py +80 -0
  5. logxpy/_compat.py +71 -0
  6. logxpy/_config.py +45 -0
  7. logxpy/_dest.py +88 -0
  8. logxpy/_errors.py +58 -0
  9. logxpy/_fmt.py +68 -0
  10. logxpy/_generators.py +136 -0
  11. logxpy/_mask.py +23 -0
  12. logxpy/_message.py +195 -0
  13. logxpy/_output.py +517 -0
  14. logxpy/_pool.py +93 -0
  15. logxpy/_traceback.py +126 -0
  16. logxpy/_types.py +71 -0
  17. logxpy/_util.py +56 -0
  18. logxpy/_validation.py +486 -0
  19. logxpy/_version.py +21 -0
  20. logxpy/cli.py +61 -0
  21. logxpy/dask.py +172 -0
  22. logxpy/decorators.py +268 -0
  23. logxpy/filter.py +124 -0
  24. logxpy/journald.py +88 -0
  25. logxpy/json.py +149 -0
  26. logxpy/loggerx.py +253 -0
  27. logxpy/logwriter.py +84 -0
  28. logxpy/parse.py +191 -0
  29. logxpy/prettyprint.py +173 -0
  30. logxpy/serializers.py +36 -0
  31. logxpy/stdlib.py +23 -0
  32. logxpy/tai64n.py +45 -0
  33. logxpy/testing.py +472 -0
  34. logxpy/tests/__init__.py +9 -0
  35. logxpy/tests/common.py +36 -0
  36. logxpy/tests/strategies.py +231 -0
  37. logxpy/tests/test_action.py +1751 -0
  38. logxpy/tests/test_api.py +86 -0
  39. logxpy/tests/test_async.py +67 -0
  40. logxpy/tests/test_compat.py +13 -0
  41. logxpy/tests/test_config.py +21 -0
  42. logxpy/tests/test_coroutines.py +105 -0
  43. logxpy/tests/test_dask.py +211 -0
  44. logxpy/tests/test_decorators.py +54 -0
  45. logxpy/tests/test_filter.py +122 -0
  46. logxpy/tests/test_fmt.py +42 -0
  47. logxpy/tests/test_generators.py +292 -0
  48. logxpy/tests/test_journald.py +246 -0
  49. logxpy/tests/test_json.py +208 -0
  50. logxpy/tests/test_loggerx.py +44 -0
  51. logxpy/tests/test_logwriter.py +262 -0
  52. logxpy/tests/test_message.py +334 -0
  53. logxpy/tests/test_output.py +921 -0
  54. logxpy/tests/test_parse.py +309 -0
  55. logxpy/tests/test_pool.py +55 -0
  56. logxpy/tests/test_prettyprint.py +303 -0
  57. logxpy/tests/test_pyinstaller.py +35 -0
  58. logxpy/tests/test_serializers.py +36 -0
  59. logxpy/tests/test_stdlib.py +73 -0
  60. logxpy/tests/test_tai64n.py +66 -0
  61. logxpy/tests/test_testing.py +1051 -0
  62. logxpy/tests/test_traceback.py +251 -0
  63. logxpy/tests/test_twisted.py +814 -0
  64. logxpy/tests/test_util.py +45 -0
  65. logxpy/tests/test_validation.py +989 -0
  66. logxpy/twisted.py +265 -0
  67. logxpy-0.1.0.dist-info/METADATA +100 -0
  68. logxpy-0.1.0.dist-info/RECORD +72 -0
  69. logxpy-0.1.0.dist-info/WHEEL +5 -0
  70. logxpy-0.1.0.dist-info/entry_points.txt +2 -0
  71. logxpy-0.1.0.dist-info/licenses/LICENSE +201 -0
  72. logxpy-0.1.0.dist-info/top_level.txt +1 -0
logxpy/json.py ADDED
@@ -0,0 +1,149 @@
1
+ """Custom JSON encoding support."""
2
+
3
+ from typing import Callable
4
+ import json
5
+ import sys
6
+ from pathlib import Path
7
+ from datetime import date, time
8
+ import platform
9
+
10
+
11
+ class EliotJSONEncoder(json.JSONEncoder):
12
+ """
13
+ DEPRECATED. JSON encoder with additional functionality.
14
+
15
+ In particular, supports NumPy types.
16
+ """
17
+
18
+ def default(self, o):
19
+ return json_default(o)
20
+
21
+
22
+ def json_default(o: object) -> object:
23
+ """
24
+ JSON object encoder for non-standard types. In particular, supports NumPy
25
+ types, Path objects, Pydantic models, dataclasses, Pandas and Polars
26
+ objects. If you are wrapping it, call it last, as it will raise a
27
+ ``TypeError`` on unsupported types.
28
+ """
29
+ numpy = sys.modules.get("numpy", None)
30
+ if numpy is not None:
31
+ if isinstance(o, numpy.floating):
32
+ return float(o)
33
+ if isinstance(o, numpy.integer):
34
+ return int(o)
35
+ if isinstance(o, numpy.bool_):
36
+ return bool(o)
37
+ if isinstance(o, numpy.ndarray):
38
+ if o.size > 10000:
39
+ # Too big to want to log as-is, log a summary:
40
+ return {
41
+ "array_start": o.flat[:10000].tolist(),
42
+ "original_shape": o.shape,
43
+ }
44
+ else:
45
+ return o.tolist()
46
+
47
+ # Add Pydantic support
48
+ pydantic = sys.modules.get("pydantic", None)
49
+ if pydantic is not None and isinstance(o, pydantic.BaseModel):
50
+ return o.model_dump()
51
+
52
+ if isinstance(o, Path):
53
+ return str(o)
54
+
55
+ if isinstance(o, date):
56
+ return o.isoformat()
57
+
58
+ if isinstance(o, time):
59
+ return o.isoformat()
60
+
61
+ if isinstance(o, set):
62
+ return list(o)
63
+
64
+ if isinstance(o, complex):
65
+ return {"real": o.real, "imag": o.imag}
66
+
67
+ # Add Pandas support
68
+ pandas = sys.modules.get("pandas", None)
69
+ if pandas is not None:
70
+ if isinstance(o, pandas.Timestamp):
71
+ return o.isoformat()
72
+ if isinstance(o, pandas.Series):
73
+ return o.to_list()
74
+ if isinstance(o, pandas.DataFrame):
75
+ return o.to_dict(orient="records")
76
+ if isinstance(o, pandas.Interval):
77
+ return {"left": o.left, "right": o.right, "closed": o.closed}
78
+ if isinstance(o, pandas.Period):
79
+ return str(o)
80
+
81
+ # Add Polars support
82
+ polars = sys.modules.get("polars", None)
83
+ if polars is not None:
84
+ if isinstance(o, polars.Series):
85
+ return o.to_list()
86
+ if isinstance(o, polars.DataFrame):
87
+ return o.to_dicts()
88
+ if isinstance(o, polars.Datetime):
89
+ return o.isoformat()
90
+
91
+ raise TypeError("Unsupported type")
92
+
93
+
94
+ if platform.python_implementation() == "PyPy":
95
+ # We're not using orjson, so need to serialize a few more types.
96
+
97
+ original_json_default = json_default
98
+
99
+ def json_default(o: object, original_json_default=original_json_default) -> object:
100
+ from datetime import datetime
101
+ from enum import Enum
102
+ from uuid import UUID
103
+
104
+ # Add dataclass support
105
+ if hasattr(o, "__dataclass_fields__"):
106
+ return {field: getattr(o, field) for field in o.__dataclass_fields__}
107
+ if isinstance(o, datetime):
108
+ return o.isoformat()
109
+
110
+ if isinstance(o, UUID):
111
+ return str(o)
112
+
113
+ if isinstance(o, Enum):
114
+ return o.value
115
+
116
+ return original_json_default(o)
117
+
118
+ json_default.__doc__ = original_json_default.__doc__
119
+ del original_json_default
120
+
121
+
122
+ def _encoder_to_default_function(
123
+ encoder: json.JSONEncoder,
124
+ ) -> Callable[[object], object]:
125
+ """
126
+ Convert an encoder into a default function usable by ``orjson``.
127
+ """
128
+
129
+ def default(o: object) -> object:
130
+ return encoder.default(o)
131
+
132
+ return default
133
+
134
+
135
+ try:
136
+ from orjson import dumps as _dumps_bytes
137
+
138
+ def _dumps_unicode(o: object, default=None) -> str:
139
+ return _dumps_bytes(o, default=default).decode("utf-8")
140
+
141
+ except ImportError:
142
+
143
+ def _dumps_bytes(o: object, default=None) -> bytes:
144
+ """Serialize an object to JSON, output bytes."""
145
+ return json.dumps(o, default=default).encode("utf-8")
146
+
147
+ _dumps_unicode = json.dumps
148
+
149
+ __all__ = ["EliotJSONEncoder", "json_default"]
logxpy/loggerx.py ADDED
@@ -0,0 +1,253 @@
1
+ """Main Logger facade - LoggerX API built on eliot."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import traceback
6
+ from contextlib import contextmanager
7
+ from typing import Any
8
+
9
+ from . import decorators
10
+ from ._action import current_action # Use eliot's to work with both Action types
11
+ from ._async import _emit, current_scope, scope
12
+ from ._base import now, uuid
13
+ from ._fmt import format_value
14
+ from ._output import to_file
15
+ from ._types import Level, Record
16
+
17
+
18
+ class Logger:
19
+ """LoggerX-compatible logger with fluent API."""
20
+
21
+ __slots__ = ("_context", "_level", "_masker", "_name")
22
+
23
+ def __init__(self, name: str = "root", context: dict[str, Any] | None = None):
24
+ self._level = Level.DEBUG
25
+ self._name = name
26
+ self._context = context or {}
27
+ self._masker = None
28
+
29
+ # === Level Methods (fluent - return self) ===
30
+ def debug(self, msg: str, **f: Any) -> Logger:
31
+ return self._log(Level.DEBUG, msg, **f)
32
+
33
+ def info(self, msg: str, **f: Any) -> Logger:
34
+ return self._log(Level.INFO, msg, **f)
35
+
36
+ def success(self, msg: str, **f: Any) -> Logger:
37
+ return self._log(Level.SUCCESS, msg, **f)
38
+
39
+ def note(self, msg: str, **f: Any) -> Logger:
40
+ return self._log(Level.NOTE, msg, **f)
41
+
42
+ def warning(self, msg: str, **f: Any) -> Logger:
43
+ return self._log(Level.WARNING, msg, **f)
44
+
45
+ def error(self, msg: str, **f: Any) -> Logger:
46
+ return self._log(Level.ERROR, msg, **f)
47
+
48
+ def critical(self, msg: str, **f: Any) -> Logger:
49
+ return self._log(Level.CRITICAL, msg, **f)
50
+
51
+ def checkpoint(self, msg: str, **f: Any) -> Logger:
52
+ return self._log(Level.INFO, f"📍 {msg}", **f)
53
+
54
+ def exception(self, msg: str, **f: Any) -> Logger:
55
+ f["eliot:traceback"] = traceback.format_exc()
56
+ return self._log(Level.ERROR, msg, **f)
57
+
58
+ def __call__(self, msg: str, **f: Any) -> Logger:
59
+ """Shortcut: log("msg") == log.info("msg")"""
60
+ return self.info(msg, **f)
61
+
62
+ # === Universal Send ===
63
+ def send(self, msg: str, data: Any, **f: Any) -> Logger:
64
+ return self._log(Level.INFO, msg, data=format_value(data), **f)
65
+
66
+ # === Type Methods ===
67
+ def df(self, data: Any, title: str | None = None, **opts: Any) -> Logger:
68
+ return self.send(title or "DataFrame", data, **opts)
69
+
70
+ def tensor(self, data: Any, title: str | None = None) -> Logger:
71
+ return self.send(title or "Tensor", data)
72
+
73
+ def json(self, data: dict, title: str | None = None) -> Logger:
74
+ import json as _json
75
+
76
+ return self._log(Level.INFO, title or "JSON", content=_json.dumps(data, indent=2, default=str)[:5000])
77
+
78
+ def img(self, data: Any, title: str | None = None, **opts: Any) -> Logger:
79
+ return self.send(title or "Image", data, **opts)
80
+
81
+ def plot(self, fig: Any, title: str | None = None) -> Logger:
82
+ return self.send(title or "Plot", fig) # Basic support via repr/str for now
83
+
84
+ def tree(self, data: Any, title: str | None = None) -> Logger:
85
+ return self.send(title or "Tree", data) # Basic support
86
+
87
+ def table(self, data: list[dict], title: str | None = None) -> Logger:
88
+ return self.send(title or "Table", data) # Basic support
89
+
90
+ # === Context (LoggerX features) ===
91
+ def scope(self, **ctx: Any):
92
+ """Create nested scope: `with log.scope(user_id=123):`"""
93
+ return scope(**ctx)
94
+
95
+ def ctx(self, **ctx: Any) -> Logger:
96
+ """Fluent interface to add context to a new logger instance."""
97
+ new_ctx = self._context.copy()
98
+ new_ctx.update(ctx)
99
+ child = Logger(self._name, new_ctx)
100
+ child._level = self._level
101
+ return child
102
+
103
+ def new(self, name: str | None = None) -> Logger:
104
+ """Create child logger with name."""
105
+ new_name = f"{self._name}.{name}" if name else self._name
106
+ child = Logger(new_name, self._context.copy())
107
+ child._level = self._level
108
+ return child
109
+
110
+ @contextmanager
111
+ def span(self, name: str, **attributes: Any):
112
+ """OpenTelemetry span context manager."""
113
+ try:
114
+ from opentelemetry import trace as otel
115
+
116
+ tracer = otel.get_tracer(__name__)
117
+ with tracer.start_as_current_span(name, attributes=attributes) as span:
118
+ yield span
119
+ except ImportError:
120
+
121
+ class MockSpan:
122
+ def set_attribute(self, k, v):
123
+ pass
124
+
125
+ def add_event(self, n, a=None):
126
+ pass
127
+
128
+ def __enter__(self):
129
+ return self
130
+
131
+ def __exit__(self, *a):
132
+ pass
133
+
134
+ yield MockSpan()
135
+
136
+ # === Decorators (exposed as methods) ===
137
+ logged = staticmethod(decorators.logged)
138
+ timed = staticmethod(decorators.timed)
139
+ retry = staticmethod(decorators.retry)
140
+ generator = staticmethod(decorators.generator)
141
+ aiterator = staticmethod(decorators.aiterator)
142
+ trace = staticmethod(decorators.trace)
143
+
144
+ # === Config ===
145
+ def configure(
146
+ self,
147
+ level: str = "DEBUG",
148
+ destinations: list[str] | None = None,
149
+ format: str = "rich",
150
+ context: dict[str, Any] | None = None,
151
+ mask_fields: list[str] | None = None,
152
+ **_: Any,
153
+ ) -> Logger:
154
+ self._level = Level[level.upper()]
155
+
156
+ if context:
157
+ self._context.update(context)
158
+
159
+ if mask_fields:
160
+ from ._mask import Masker
161
+
162
+ self._masker = Masker(mask_fields, [])
163
+ set_global_masker(self._masker)
164
+
165
+ if destinations:
166
+ # Clear existing destinations if possible, or we just add new ones?
167
+ # Eliot doesn't easily allow clearing all destinations without accessing private members.
168
+ # But we can try.
169
+ # However, EliotLogger._destinations is a global singleton-like thing.
170
+
171
+ # Simple implementation: Add what's requested.
172
+ for dest in destinations:
173
+ if dest == "console":
174
+ # We don't add console by default in eliot, usually.
175
+ # But here we can add our ConsoleDestination
176
+ # Need to adapt it to Eliot's interface?
177
+ # No, _emit uses _destinations.send.
178
+ # Our ConsoleDestination is an async thing in _dest.py?
179
+ # Wait, _dest.py destinations have `write(record)`.
180
+ # Eliot destinations expect `call(dict)`.
181
+
182
+ # We need to bridge between Eliot destinations and LoggerX destinations if we want to use LoggerX destinations.
183
+ # Or we just use Eliot's to_file.
184
+
185
+ if format == "rich":
186
+ # We can't easily plug async destination into sync logxpy pipeline yet
187
+ # For now, we assume standard logxpy setup + our extensions.
188
+ pass
189
+
190
+ elif dest.startswith("file://"):
191
+ path = dest.replace("file://", "")
192
+ to_file(open(path, "a"))
193
+
194
+ elif dest.startswith("otel"):
195
+ # Setup OTel
196
+ pass
197
+
198
+ return self
199
+
200
+ # === Internal ===
201
+ def _log(self, level: Level, msg: str, **fields: Any) -> Logger:
202
+ if level.value < self._level.value:
203
+ return self
204
+ act = current_action()
205
+ task_uuid, task_level = _get_task_info(act)
206
+
207
+ # Merge context: global scope + logger instance context
208
+ ctx = current_scope()
209
+ if self._context:
210
+ ctx = {**ctx, **self._context}
211
+
212
+ record = Record(
213
+ timestamp=now(),
214
+ level=level,
215
+ message=msg,
216
+ message_type=f"loggerx:{level.name.lower()}",
217
+ fields=fields,
218
+ context=ctx,
219
+ task_uuid=task_uuid,
220
+ task_level=task_level,
221
+ )
222
+ _emit(record) # Goes to eliot's destinations + any new handlers
223
+ return self
224
+
225
+
226
+ def _get_task_info(act) -> tuple[str, tuple[int, ...]]:
227
+ """Extract task info from eliot.Action or AsyncAction."""
228
+ if act is None:
229
+ return uuid(), (1,)
230
+ task_uuid = act.task_uuid
231
+ # eliot.Action uses _task_level (TaskLevel), AsyncAction uses task_level (tuple)
232
+ if hasattr(act, "task_level"): # AsyncAction
233
+ return task_uuid, act.task_level
234
+ if hasattr(act, "_task_level"): # eliot.Action
235
+ return task_uuid, tuple(act._task_level.as_list())
236
+ return task_uuid, (1,)
237
+
238
+
239
+ # === Global masker ===
240
+ _global_masker = None
241
+
242
+
243
+ def set_global_masker(masker):
244
+ global _global_masker
245
+ _global_masker = masker
246
+
247
+
248
+ def get_global_masker():
249
+ return _global_masker
250
+
251
+
252
+ # === Global instance ===
253
+ log = Logger()
logxpy/logwriter.py ADDED
@@ -0,0 +1,84 @@
1
+ """
2
+ A log destination for use by Twisted applications.
3
+
4
+ Runs in a thread, so that we don't do blocking I/O in the event loop thread.
5
+ """
6
+
7
+ import threading
8
+ from queue import SimpleQueue
9
+
10
+ from twisted.application.service import Service
11
+ from twisted.internet.threads import deferToThreadPool
12
+
13
+ from . import addDestination, removeDestination
14
+
15
+ _STOP = object()
16
+
17
+
18
+ class ThreadedWriter(Service):
19
+ """
20
+ An non-blocking Eliot log destination that wraps a blocking
21
+ destination, writing log messages to the latter in a managed thread.
22
+
23
+ @ivar _thread: C{None}, or a L{threading.Thread} running the private
24
+ reactor.
25
+ """
26
+
27
+ name = "Eliot Log Writer"
28
+
29
+ def __init__(self, destination, reactor):
30
+ """
31
+ @param destination: The underlying destination for log files. This will
32
+ be called from a non-reactor thread.
33
+
34
+ @param reactor: The main reactor.
35
+ """
36
+ self._destination = destination
37
+ self._queue = SimpleQueue()
38
+ self._mainReactor = reactor
39
+ self._thread = None
40
+
41
+ def startService(self):
42
+ """
43
+ Start the writer thread.
44
+ """
45
+ Service.startService(self)
46
+ self._thread = threading.Thread(target=self._reader)
47
+ self._thread.start()
48
+ addDestination(self)
49
+
50
+ def stopService(self):
51
+ """
52
+ Stop the writer thread, wait for it to finish.
53
+ """
54
+ Service.stopService(self)
55
+ removeDestination(self)
56
+ self._queue.put(_STOP)
57
+ return deferToThreadPool(
58
+ self._mainReactor, self._mainReactor.getThreadPool(), self._thread.join
59
+ )
60
+
61
+ def __call__(self, data):
62
+ """
63
+ Add the data to the queue, to be serialized to JSON and written by the
64
+ writer thread with a newline added.
65
+
66
+ @param data: C{bytes} to write to disk.
67
+ """
68
+ self._queue.put(data)
69
+
70
+ def _reader(self):
71
+ """
72
+ Runs in a thread, reads messages from a queue and writes them to
73
+ the wrapped observer.
74
+ """
75
+ while True:
76
+ msg = self._queue.get()
77
+ if msg is _STOP:
78
+ return
79
+ try:
80
+ self._destination(msg)
81
+ except Exception:
82
+ # Lower-level destination blew up, nothing we can do, so
83
+ # just drop on the floor.
84
+ pass
logxpy/parse.py ADDED
@@ -0,0 +1,191 @@
1
+ """
2
+ Parse a stream of serialized messages into a forest of
3
+ ``WrittenAction`` and ``WrittenMessage`` objects.
4
+ """
5
+
6
+ from pyrsistent import PClass, pmap_field, pset_field, discard
7
+
8
+ from ._message import WrittenMessage, TASK_UUID_FIELD
9
+ from ._action import (
10
+ TaskLevel,
11
+ WrittenAction,
12
+ ACTION_STATUS_FIELD,
13
+ STARTED_STATUS,
14
+ ACTION_TYPE_FIELD,
15
+ )
16
+
17
+
18
+ class Task(PClass):
19
+ """
20
+ A tree of actions with the same task UUID.
21
+ """
22
+
23
+ _nodes = pmap_field(TaskLevel, (WrittenAction, WrittenMessage))
24
+ _completed = pset_field(TaskLevel)
25
+ _root_level = TaskLevel(level=[])
26
+
27
+ def root(self):
28
+ """
29
+ @return: The root L{WrittenAction}.
30
+ """
31
+ return self._nodes[self._root_level]
32
+
33
+ def is_complete(self):
34
+ """
35
+ @return bool: True only if all messages in the task tree have been
36
+ added to it.
37
+ """
38
+ return self._root_level in self._completed
39
+
40
+ def _insert_action(self, node):
41
+ """
42
+ Add a L{WrittenAction} to the tree.
43
+
44
+ Parent actions will be created as necessary.
45
+
46
+ @param child: A L{WrittenAction} to add to the tree.
47
+
48
+ @return: Updated L{Task}.
49
+ """
50
+ task = self
51
+ if (
52
+ node.end_message
53
+ and node.start_message
54
+ and (len(node.children) == node.end_message.task_level.level[-1] - 2)
55
+ ):
56
+ # Possibly this action is complete, make sure all sub-actions
57
+ # are complete:
58
+ completed = True
59
+ for child in node.children:
60
+ if (
61
+ isinstance(child, WrittenAction)
62
+ and child.task_level not in self._completed
63
+ ):
64
+ completed = False
65
+ break
66
+ if completed:
67
+ task = task.transform(["_completed"], lambda s: s.add(node.task_level))
68
+ task = task.transform(["_nodes", node.task_level], node)
69
+ return task._ensure_node_parents(node)
70
+
71
+ def _ensure_node_parents(self, child):
72
+ """
73
+ Ensure the node (WrittenAction/WrittenMessage) is referenced by parent
74
+ nodes.
75
+
76
+ Parent actions will be created as necessary.
77
+
78
+ @param child: A L{WrittenMessage} or L{WrittenAction} which is
79
+ being added to the tree.
80
+
81
+ @return: Updated L{Task}.
82
+ """
83
+ task_level = child.task_level
84
+ if task_level.parent() is None:
85
+ return self
86
+
87
+ parent = self._nodes.get(task_level.parent())
88
+ if parent is None:
89
+ parent = WrittenAction(
90
+ task_level=task_level.parent(), task_uuid=child.task_uuid
91
+ )
92
+ parent = parent._add_child(child)
93
+ return self._insert_action(parent)
94
+
95
+ def add(self, message_dict):
96
+ """
97
+ Update the L{Task} with a dictionary containing a serialized Eliot
98
+ message.
99
+
100
+ @param message_dict: Dictionary whose task UUID matches this one.
101
+
102
+ @return: Updated L{Task}.
103
+ """
104
+ is_action = message_dict.get(ACTION_TYPE_FIELD) is not None
105
+ written_message = WrittenMessage.from_dict(message_dict)
106
+ if is_action:
107
+ action_level = written_message.task_level.parent()
108
+ action = self._nodes.get(action_level)
109
+ if action is None:
110
+ action = WrittenAction(
111
+ task_level=action_level, task_uuid=message_dict[TASK_UUID_FIELD]
112
+ )
113
+ if message_dict[ACTION_STATUS_FIELD] == STARTED_STATUS:
114
+ # Either newly created MissingAction, or one created by
115
+ # previously added descendant of the action.
116
+ action = action._start(written_message)
117
+ else:
118
+ action = action._end(written_message)
119
+ return self._insert_action(action)
120
+ else:
121
+ # Special case where there is no action:
122
+ if written_message.task_level.level == [1]:
123
+ return self.transform(
124
+ ["_nodes", self._root_level],
125
+ written_message,
126
+ ["_completed"],
127
+ lambda s: s.add(self._root_level),
128
+ )
129
+ else:
130
+ return self._ensure_node_parents(written_message)
131
+
132
+
133
+ class Parser(PClass):
134
+ """
135
+ Parse serialized Eliot messages into L{Task} instances.
136
+
137
+ @ivar _tasks: Map from UUID to corresponding L{Task}.
138
+ """
139
+
140
+ _tasks = pmap_field(str, Task)
141
+
142
+ def add(self, message_dict):
143
+ """
144
+ Update the L{Parser} with a dictionary containing a serialized Eliot
145
+ message.
146
+
147
+ @param message_dict: Dictionary of serialized Eliot message.
148
+
149
+ @return: Tuple of (list of completed L{Task} instances, updated
150
+ L{Parser}).
151
+ """
152
+ uuid = message_dict[TASK_UUID_FIELD]
153
+ if uuid in self._tasks:
154
+ task = self._tasks[uuid]
155
+ else:
156
+ task = Task()
157
+ task = task.add(message_dict)
158
+ if task.is_complete():
159
+ parser = self.transform(["_tasks", uuid], discard)
160
+ return [task], parser
161
+ else:
162
+ parser = self.transform(["_tasks", uuid], task)
163
+ return [], parser
164
+
165
+ def incomplete_tasks(self):
166
+ """
167
+ @return: List of L{Task} that are not yet complete.
168
+ """
169
+ return list(self._tasks.values())
170
+
171
+ @classmethod
172
+ def parse_stream(cls, iterable):
173
+ """
174
+ Parse a stream of messages into a stream of L{Task} instances.
175
+
176
+ :param iterable: An iterable of serialized Eliot message dictionaries.
177
+
178
+ :return: An iterable of parsed L{Task} instances. Remaining
179
+ incomplete L{Task} will be returned when the input stream is
180
+ exhausted.
181
+ """
182
+ parser = Parser()
183
+ for message_dict in iterable:
184
+ completed, parser = parser.add(message_dict)
185
+ for task in completed:
186
+ yield task
187
+ for task in parser.incomplete_tasks():
188
+ yield task
189
+
190
+
191
+ __all__ = ["Parser", "Task", "TaskLevel", "WrittenMessage", "WrittenAction"]