track-sdk 0.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,78 @@
1
+ Metadata-Version: 2.3
2
+ Name: track-sdk
3
+ Version: 0.2.0
4
+ Summary: ML experiment tracking library built on pybag-sdk
5
+ License: MIT
6
+ Requires-Dist: pybag-sdk>=0.10.0
7
+ Requires-Dist: numpy>=1.20.0
8
+ Requires-Dist: pillow>=12.1.0
9
+ Requires-Dist: pytest>=7.0.0 ; extra == 'test'
10
+ Requires-Dist: pytest-cov>=4.0.0 ; extra == 'test'
11
+ Requires-Python: >=3.10
12
+ Provides-Extra: test
13
+ Description-Content-Type: text/markdown
14
+
15
+ # Track
16
+
17
+ An experiment tracking library built on top of [pybag](https://github.com/siliconlad/pybag).
18
+
19
+ ## Minimal Logger Examples
20
+
21
+ `output_dir` is a directory. The logger writes to `<output_dir>/<YYYYMMDD_HHMMSS_microseconds>_<logger-name>.mcap`.
22
+
23
+ ### 1. Log messages
24
+
25
+ ```python
26
+ from track import Logger
27
+
28
+ logger = Logger("demo", output_dir="logs").open()
29
+ logger.info("training started")
30
+ logger.warning("learning rate is high")
31
+ logger.close()
32
+ ```
33
+
34
+ ### 2. Log an image (NumPy array)
35
+
36
+ ```python
37
+ import numpy as np
38
+ from track import Logger
39
+
40
+ image = np.zeros((64, 64, 3), dtype=np.uint8)
41
+ image[:, :, 1] = 255 # green
42
+
43
+ logger = Logger("demo", output_dir="logs").open()
44
+ logger.log_image("camera/rgb", image, format="png")
45
+ logger.close()
46
+ ```
47
+
48
+ ### 3. Log a point cloud (structured array)
49
+
50
+ ```python
51
+ import numpy as np
52
+ from track import Logger
53
+
54
+ dtype = np.dtype([("x", "f4"), ("y", "f4"), ("z", "f4")])
55
+ points = np.zeros(3, dtype=dtype)
56
+ points["x"] = [0.0, 1.0, 0.0]
57
+ points["y"] = [0.0, 0.0, 1.0]
58
+ points["z"] = [0.0, 0.0, 0.0]
59
+
60
+ logger = Logger("demo", output_dir="logs").open()
61
+ logger.log_pointcloud("lidar", points)
62
+ logger.close()
63
+ ```
64
+
65
+ ### 4. Add metadata and attachments
66
+
67
+ ```python
68
+ from track import Logger
69
+
70
+ logger = Logger("demo", output_dir="logs").open()
71
+ logger.add_metadata("experiment", {"name": "baseline", "epoch": "1"})
72
+ logger.add_attachment(
73
+ "config.json",
74
+ b'{"batch_size": 32}',
75
+ media_type="application/json",
76
+ )
77
+ logger.close()
78
+ ```
@@ -0,0 +1,64 @@
1
+ # Track
2
+
3
+ An experiment tracking library built on top of [pybag](https://github.com/siliconlad/pybag).
4
+
5
+ ## Minimal Logger Examples
6
+
7
+ `output_dir` is a directory. The logger writes to `<output_dir>/<YYYYMMDD_HHMMSS_microseconds>_<logger-name>.mcap`.
8
+
9
+ ### 1. Log messages
10
+
11
+ ```python
12
+ from track import Logger
13
+
14
+ logger = Logger("demo", output_dir="logs").open()
15
+ logger.info("training started")
16
+ logger.warning("learning rate is high")
17
+ logger.close()
18
+ ```
19
+
20
+ ### 2. Log an image (NumPy array)
21
+
22
+ ```python
23
+ import numpy as np
24
+ from track import Logger
25
+
26
+ image = np.zeros((64, 64, 3), dtype=np.uint8)
27
+ image[:, :, 1] = 255 # green
28
+
29
+ logger = Logger("demo", output_dir="logs").open()
30
+ logger.log_image("camera/rgb", image, format="png")
31
+ logger.close()
32
+ ```
33
+
34
+ ### 3. Log a point cloud (structured array)
35
+
36
+ ```python
37
+ import numpy as np
38
+ from track import Logger
39
+
40
+ dtype = np.dtype([("x", "f4"), ("y", "f4"), ("z", "f4")])
41
+ points = np.zeros(3, dtype=dtype)
42
+ points["x"] = [0.0, 1.0, 0.0]
43
+ points["y"] = [0.0, 0.0, 1.0]
44
+ points["z"] = [0.0, 0.0, 0.0]
45
+
46
+ logger = Logger("demo", output_dir="logs").open()
47
+ logger.log_pointcloud("lidar", points)
48
+ logger.close()
49
+ ```
50
+
51
+ ### 4. Add metadata and attachments
52
+
53
+ ```python
54
+ from track import Logger
55
+
56
+ logger = Logger("demo", output_dir="logs").open()
57
+ logger.add_metadata("experiment", {"name": "baseline", "epoch": "1"})
58
+ logger.add_attachment(
59
+ "config.json",
60
+ b'{"batch_size": 32}',
61
+ media_type="application/json",
62
+ )
63
+ logger.close()
64
+ ```
@@ -0,0 +1,30 @@
1
+ [build-system]
2
+ requires = ["uv_build>=0.9.16,<0.10.0"]
3
+ build-backend = "uv_build"
4
+
5
+ [project]
6
+ name = "track-sdk"
7
+ version = "0.2.0"
8
+ description = "ML experiment tracking library built on pybag-sdk"
9
+ readme = "README.md"
10
+ license = {text = "MIT"}
11
+ requires-python = ">=3.10"
12
+ dependencies = [
13
+ "pybag-sdk>=0.10.0",
14
+ "numpy>=1.20.0",
15
+ "pillow>=12.1.0",
16
+ ]
17
+
18
+ [project.optional-dependencies]
19
+ test = [
20
+ "pytest>=7.0.0",
21
+ "pytest-cov>=4.0.0",
22
+ ]
23
+
24
+ [tool.uv.build-backend]
25
+ module-name = "track"
26
+
27
+ [tool.mypy]
28
+ python_version = "3.10"
29
+ warn_return_any = true
30
+ warn_unused_configs = true
@@ -0,0 +1,198 @@
1
+ """Track - ML Experiment Tracking Library."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import threading
6
+ from pathlib import Path
7
+ import numpy as np
8
+
9
+ from track.logger import Logger, LogLevel
10
+
11
+ __version__ = "0.2.0"
12
+ __all__ = [
13
+ "Logger",
14
+ "LogLevel",
15
+ # Registry
16
+ "get_logger",
17
+ # Default logger lifecycle
18
+ "init",
19
+ "finish",
20
+ # Convenience log functions
21
+ "debug",
22
+ "info",
23
+ "warning",
24
+ "error",
25
+ "fatal",
26
+ # Convenience data functions
27
+ "log_image",
28
+ "log_pointcloud",
29
+ "add_metadata",
30
+ "add_attachment",
31
+ ]
32
+
33
+ # ---------------------------------------------------------------------------
34
+ # Global logger registry
35
+ # ---------------------------------------------------------------------------
36
+
37
+ _loggers: dict[str, Logger] = {}
38
+ _default_logger: Logger | None = None
39
+ _registry_lock = threading.Lock()
40
+
41
+
42
+ def get_logger(name: str) -> Logger:
43
+ """Retrieve a registered logger by name.
44
+
45
+ Raises:
46
+ KeyError: If no logger with that name has been registered via ``init``.
47
+ """
48
+ with _registry_lock:
49
+ try:
50
+ return _loggers[name]
51
+ except KeyError:
52
+ raise KeyError(
53
+ f"No logger named {name!r}. Create one with track.init({name!r}, ...)."
54
+ ) from None
55
+
56
+
57
+ def init(
58
+ name: str,
59
+ output_dir: str | Path | None = None,
60
+ *,
61
+ use_process: bool = False,
62
+ ) -> Logger:
63
+ """Create, open, register, and set a logger as the default.
64
+
65
+ Args:
66
+ name: Logger name (included in log messages).
67
+ output_dir: Directory where MCAP files should be written.
68
+ use_process: If True, use a background writer process.
69
+
70
+ Returns:
71
+ The newly created and opened ``Logger``.
72
+
73
+ Raises:
74
+ ValueError: If a logger with *name* is already registered.
75
+ """
76
+ global _default_logger
77
+
78
+ with _registry_lock:
79
+ if name in _loggers:
80
+ raise ValueError(
81
+ f"A logger named {name!r} is already registered. "
82
+ "Call track.finish() first or use a different name."
83
+ )
84
+
85
+ logger = Logger(name, output_dir, use_process=use_process)
86
+ logger.open()
87
+
88
+ with _registry_lock:
89
+ # Re-check after releasing the lock for open() — another thread
90
+ # could have registered the same name in the meantime.
91
+ if name in _loggers:
92
+ logger.close()
93
+ raise ValueError(
94
+ f"A logger named {name!r} was registered concurrently. "
95
+ "Call track.finish() first or use a different name."
96
+ )
97
+ _loggers[name] = logger
98
+ _default_logger = logger
99
+
100
+ return logger
101
+
102
+
103
+ def finish() -> None:
104
+ """Close and unregister the default logger."""
105
+ global _default_logger
106
+
107
+ with _registry_lock:
108
+ logger = _default_logger
109
+ if logger is None:
110
+ return
111
+ _default_logger = None
112
+ _loggers.pop(logger._name, None)
113
+
114
+ logger.close()
115
+
116
+
117
+ # ---------------------------------------------------------------------------
118
+ # Convenience functions — delegate to the default logger
119
+ # ---------------------------------------------------------------------------
120
+
121
+ def _get_default() -> Logger:
122
+ """Return the default logger or raise a clear error."""
123
+ logger = _default_logger
124
+ if logger is None:
125
+ raise RuntimeError(
126
+ "No default logger. Call track.init(...) first."
127
+ )
128
+ return logger
129
+
130
+
131
+ def debug(message: str, *, timestamp_ns: int | None = None) -> None:
132
+ """Log a debug message to the default logger."""
133
+ _get_default().debug(message, timestamp_ns=timestamp_ns, _stacklevel=3)
134
+
135
+
136
+ def info(message: str, *, timestamp_ns: int | None = None) -> None:
137
+ """Log an info message to the default logger."""
138
+ _get_default().info(message, timestamp_ns=timestamp_ns, _stacklevel=3)
139
+
140
+
141
+ def warning(message: str, *, timestamp_ns: int | None = None) -> None:
142
+ """Log a warning message to the default logger."""
143
+ _get_default().warning(message, timestamp_ns=timestamp_ns, _stacklevel=3)
144
+
145
+
146
+ def error(message: str, *, timestamp_ns: int | None = None) -> None:
147
+ """Log an error message to the default logger."""
148
+ _get_default().error(message, timestamp_ns=timestamp_ns, _stacklevel=3)
149
+
150
+
151
+ def fatal(message: str, *, timestamp_ns: int | None = None) -> None:
152
+ """Log a fatal message to the default logger."""
153
+ _get_default().fatal(message, timestamp_ns=timestamp_ns, _stacklevel=3)
154
+
155
+
156
+ def log_image(
157
+ topic: str,
158
+ image: bytes | np.ndarray,
159
+ *,
160
+ format: str = "png",
161
+ frame_id: str | None = None,
162
+ timestamp_ns: int | None = None,
163
+ ) -> None:
164
+ """Log an image to the default logger."""
165
+ _get_default().log_image(
166
+ topic, image, format=format, frame_id=frame_id, timestamp_ns=timestamp_ns,
167
+ )
168
+
169
+
170
+ def log_pointcloud(
171
+ topic: str,
172
+ points: np.ndarray,
173
+ *,
174
+ frame_id: str = "world",
175
+ timestamp_ns: int | None = None,
176
+ ) -> None:
177
+ """Log a point cloud to the default logger."""
178
+ _get_default().log_pointcloud(
179
+ topic, points, frame_id=frame_id, timestamp_ns=timestamp_ns,
180
+ )
181
+
182
+
183
+ def add_metadata(name: str, data: dict[str, str]) -> None:
184
+ """Add metadata to the default logger."""
185
+ _get_default().add_metadata(name, data)
186
+
187
+
188
+ def add_attachment(
189
+ name: str,
190
+ data: bytes,
191
+ media_type: str = "application/octet-stream",
192
+ *,
193
+ timestamp_ns: int | None = None,
194
+ ) -> None:
195
+ """Add an attachment to the default logger."""
196
+ _get_default().add_attachment(
197
+ name, data, media_type=media_type, timestamp_ns=timestamp_ns,
198
+ )
@@ -0,0 +1,725 @@
1
+ """ML experiment logger built on pybag."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import atexit
6
+ import datetime as dt
7
+ import inspect
8
+ import io
9
+ import multiprocessing as mp
10
+ import os
11
+ import queue
12
+ import re
13
+ import sys
14
+ import threading
15
+ import time
16
+ import warnings
17
+ from dataclasses import dataclass
18
+ from enum import Enum, IntEnum, auto
19
+ from pathlib import Path
20
+ from typing import Any
21
+
22
+ import numpy as np
23
+ import pybag.types as t
24
+ from PIL import Image
25
+ from pybag.mcap_writer import McapFileWriter
26
+ from pybag.ros2.humble import builtin_interfaces, sensor_msgs, std_msgs
27
+
28
+
29
+ class LogLevel(IntEnum):
30
+ """Logging level."""
31
+
32
+ DEBUG = 10
33
+ INFO = 20
34
+ WARNING = 30
35
+ ERROR = 40
36
+ FATAL = 50
37
+
38
+
39
+ # TODO: Use the rcl_interfaces message in pybag when added
40
+ @dataclass(kw_only=True)
41
+ class Log:
42
+ __msg_name__ = "rcl_interfaces/msg/Log"
43
+
44
+ stamp: builtin_interfaces.Time
45
+ """Timestamp associated with the log message."""
46
+
47
+ level: t.uint8
48
+ """Logging level."""
49
+
50
+ name: t.string
51
+ """Name of logger that this message came from."""
52
+
53
+ msg: t.string
54
+ """Full log message."""
55
+
56
+ file: t.string
57
+ """File the log message came from."""
58
+
59
+ line: t.uint32
60
+ """Line number in the file the log message came from."""
61
+
62
+
63
+ class RecordType(Enum):
64
+ """Types of records for process-queue logging."""
65
+
66
+ LOG = auto()
67
+ IMAGE = auto()
68
+ POINTCLOUD = auto()
69
+ METADATA = auto()
70
+ ATTACHMENT = auto()
71
+ CLOSE = auto()
72
+
73
+
74
+ @dataclass
75
+ class LogRecord:
76
+ """A write request for the background writer process."""
77
+
78
+ record_type: RecordType
79
+ data: dict[str, Any]
80
+
81
+
82
+ # Dtype to PointField datatype mapping
83
+ DTYPE_TO_POINTFIELD: dict[np.dtype, int] = {
84
+ np.dtype("int8"): sensor_msgs.PointField.INT8,
85
+ np.dtype("uint8"): sensor_msgs.PointField.UINT8,
86
+ np.dtype("int16"): sensor_msgs.PointField.INT16,
87
+ np.dtype("uint16"): sensor_msgs.PointField.UINT16,
88
+ np.dtype("int32"): sensor_msgs.PointField.INT32,
89
+ np.dtype("uint32"): sensor_msgs.PointField.UINT32,
90
+ np.dtype("float32"): sensor_msgs.PointField.FLOAT32,
91
+ np.dtype("float64"): sensor_msgs.PointField.FLOAT64,
92
+ }
93
+
94
+
95
+ def _now_ns() -> int:
96
+ """Get current time in nanoseconds."""
97
+ return time.time_ns()
98
+
99
+
100
+ def _ns_to_stamp(ns: int) -> builtin_interfaces.Time:
101
+ """Convert nanoseconds to builtin_interfaces.Time."""
102
+ return builtin_interfaces.Time(
103
+ sec=ns // 1_000_000_000,
104
+ nanosec=ns % 1_000_000_000,
105
+ )
106
+
107
+
108
+ def _make_header(timestamp_ns: int, frame_id: str = "") -> std_msgs.Header:
109
+ """Create a std_msgs.Header."""
110
+ return std_msgs.Header(
111
+ stamp=_ns_to_stamp(timestamp_ns),
112
+ frame_id=frame_id,
113
+ )
114
+
115
+
116
+ def _process_record(logger: Logger, record: LogRecord) -> None:
117
+ """Process one queued record in the writer process."""
118
+ data = record.data
119
+
120
+ if record.record_type == RecordType.LOG:
121
+ logger._log(
122
+ LogLevel(data["level"]),
123
+ data["message"],
124
+ file=data["file"],
125
+ line=data["line"],
126
+ timestamp_ns=data["timestamp"],
127
+ )
128
+ elif record.record_type == RecordType.IMAGE:
129
+ logger.log_image(
130
+ topic=data["topic"],
131
+ image=data["image"],
132
+ format=data["format"],
133
+ frame_id=data["frame_id"],
134
+ timestamp_ns=data["timestamp"],
135
+ )
136
+ elif record.record_type == RecordType.POINTCLOUD:
137
+ logger.log_pointcloud(
138
+ topic=data["topic"],
139
+ points=data["points"],
140
+ frame_id=data["frame_id"],
141
+ timestamp_ns=data["timestamp"],
142
+ )
143
+ elif record.record_type == RecordType.METADATA:
144
+ logger.add_metadata(data["name"], data["data"])
145
+ elif record.record_type == RecordType.ATTACHMENT:
146
+ logger.add_attachment(
147
+ data["name"],
148
+ data["data"],
149
+ media_type=data["media_type"],
150
+ timestamp_ns=data["timestamp"],
151
+ )
152
+
153
+
154
+ def _writer_process_main(
155
+ output_path: str,
156
+ record_queue: mp.Queue,
157
+ name: str,
158
+ ready_event: mp.Event,
159
+ ) -> None:
160
+ """Writer process target that drains queue records into MCAP."""
161
+ logger = Logger(name, output_path, use_process=False)
162
+
163
+ try:
164
+ logger.open()
165
+ ready_event.set()
166
+
167
+ while True:
168
+ try:
169
+ record: LogRecord = record_queue.get(timeout=0.1)
170
+ except queue.Empty:
171
+ continue
172
+
173
+ if record.record_type == RecordType.CLOSE:
174
+ break
175
+
176
+ try:
177
+ _process_record(logger, record)
178
+ except Exception as exc:
179
+ print(f"Error processing log record: {exc}", file=sys.stderr)
180
+ except Exception as exc:
181
+ print(f"Error starting writer process: {exc}", file=sys.stderr)
182
+ ready_event.set()
183
+ finally:
184
+ logger.close()
185
+
186
+
187
+ class Logger:
188
+ """Logger for tracking logs, images, and point clouds.
189
+
190
+ The default mode writes synchronously and is safe to share across threads.
191
+ For multi-process producers, pass `use_process=True` to enqueue records and
192
+ write from a dedicated background process.
193
+ """
194
+
195
+ def __init__(
196
+ self,
197
+ name: str,
198
+ output_dir: str | Path | None = None,
199
+ *,
200
+ use_process: bool = False,
201
+ ) -> None:
202
+ """Initialize the logger.
203
+
204
+ Args:
205
+ name: Logger name (included in log messages).
206
+ output_dir: Directory where MCAP files should be written.
207
+ A timestamped file named `<datetime>_<logger-name>.mcap`
208
+ is created automatically.
209
+ For backwards compatibility, passing a `.mcap` path writes
210
+ directly to that file path.
211
+ use_process: If True, enqueue writes and use a writer process.
212
+ """
213
+ self._name = name
214
+ self._use_process = use_process
215
+ self._queue_size = 1000 # TODO: Expose as param?
216
+ self._output = self._resolve_output(output_dir)
217
+
218
+ self._writer: McapFileWriter | None = None
219
+ self._lock = threading.Lock()
220
+ self._closed = False
221
+
222
+ self._queue: mp.Queue | None = None
223
+ self._writer_process: mp.Process | None = None
224
+ self._owner_pid: int | None = None
225
+ self._atexit_registered = False
226
+
227
+ def __enter__(self) -> Logger:
228
+ """Enter context manager."""
229
+ self.open()
230
+ return self
231
+
232
+ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
233
+ """Exit context manager."""
234
+ self.close()
235
+
236
+ def __getstate__(self) -> dict[str, Any]:
237
+ """Support pickling when sharing logger to spawned child processes."""
238
+ state = self.__dict__.copy()
239
+ state["_lock"] = None
240
+ state["_writer"] = None
241
+ state["_writer_process"] = None
242
+ state["_atexit_registered"] = False
243
+ return state
244
+
245
+ def __setstate__(self, state: dict[str, Any]) -> None:
246
+ """Restore pickled logger state."""
247
+ self.__dict__.update(state)
248
+ self._lock = threading.Lock()
249
+ self._writer = None
250
+ self._writer_process = None
251
+ self._atexit_registered = False
252
+
253
+ def _resolve_output(self, output_dir_override: str | Path | None) -> Path:
254
+ """Determine the output file path for this logger instance."""
255
+ if output_dir_override is not None:
256
+ output_target = Path(output_dir_override).expanduser()
257
+ elif os.environ.get("TRACK_OUTPUT_DIR") is not None:
258
+ output_target = Path(os.environ["TRACK_OUTPUT_DIR"]).expanduser()
259
+ else:
260
+ # Default fallback in case nothing is specified
261
+ output_target = Path.home() / ".local" / "track" / "logs"
262
+
263
+ # Backwards-compatible explicit file path behavior.
264
+ if output_target.suffix.lower() == ".mcap":
265
+ return output_target
266
+
267
+ timestamp = dt.datetime.now().strftime("%Y%m%d_%H%M%S_%f")
268
+ safe_name = re.sub(r"[^A-Za-z0-9_.-]+", "_", self._name).strip("._")
269
+ if not safe_name:
270
+ safe_name = "logger"
271
+ return output_target / f"{timestamp}_{safe_name}.mcap"
272
+
273
+ @property
274
+ def output_path(self) -> Path:
275
+ """Resolved output MCAP file path for this logger."""
276
+ return self._output
277
+
278
+ def open(self) -> 'Logger':
279
+ """Open the logger for writing."""
280
+ with self._lock:
281
+ if self._is_open():
282
+ return self
283
+
284
+ self._closed = False
285
+ self._owner_pid = os.getpid()
286
+ self._output.parent.mkdir(parents=True, exist_ok=True)
287
+ self._open_writer()
288
+
289
+ if not self._atexit_registered:
290
+ atexit.register(self.close)
291
+ self._atexit_registered = True
292
+ return self
293
+
294
+ def _is_open(self) -> bool:
295
+ """Check whether logger resources are initialized."""
296
+ if self._use_process:
297
+ return self._queue is not None
298
+ return self._writer is not None
299
+
300
+ def _open_writer(self):
301
+ """Set up the writer for this logger."""
302
+ if self._use_process:
303
+ # Open separate writer process
304
+ self._open_writer_process()
305
+ else:
306
+ # Open writer in this process
307
+ self._writer = McapFileWriter.open(self._output, mode="w", profile="ros2")
308
+
309
+ def _open_writer_process(self) -> None:
310
+ """Open process queue mode. Lock must be held."""
311
+ self._queue = mp.Queue(maxsize=self._queue_size)
312
+ ready_event = mp.Event()
313
+
314
+ self._writer_process = mp.Process(
315
+ target=_writer_process_main,
316
+ args=(self._output, self._queue, self._name, ready_event),
317
+ daemon=False,
318
+ )
319
+
320
+ try:
321
+ self._writer_process.start()
322
+ if not ready_event.wait(timeout=10.0):
323
+ raise RuntimeError("Writer process failed to start.")
324
+ if not self._writer_process.is_alive():
325
+ exitcode = self._writer_process.exitcode
326
+ raise RuntimeError(f"Writer process exited unexpectedly (exitcode={exitcode}).")
327
+ except Exception:
328
+ # Cleanup writer process
329
+ if self._writer_process is not None:
330
+ if self._writer_process.is_alive():
331
+ self._writer_process.terminate()
332
+ self._writer_process.join(timeout=1.0)
333
+ self._writer_process = None
334
+ # Cleanup queue
335
+ if self._queue is not None:
336
+ self._queue.close()
337
+ self._queue.join_thread()
338
+ self._queue = None
339
+
340
+ raise
341
+
342
+ def close(self) -> None:
343
+ """Close the logger and finalize the file."""
344
+ with self._lock:
345
+ if self._closed:
346
+ return
347
+ self._closed = True
348
+
349
+ writer = self._writer
350
+ record_queue = self._queue
351
+ writer_process = self._writer_process
352
+ owner_pid = self._owner_pid
353
+ use_process = self._use_process
354
+
355
+ self._writer = None
356
+ self._queue = None
357
+ self._writer_process = None
358
+ self._owner_pid = None
359
+
360
+ if self._atexit_registered:
361
+ try:
362
+ atexit.unregister(self.close)
363
+ except Exception:
364
+ pass
365
+ self._atexit_registered = False
366
+
367
+ if not use_process:
368
+ if writer is not None:
369
+ writer.close()
370
+ return
371
+
372
+ # In process mode, only the owner process should control writer shutdown.
373
+ if owner_pid is not None and os.getpid() != owner_pid:
374
+ return
375
+
376
+ if record_queue is not None:
377
+ try:
378
+ record_queue.put(LogRecord(RecordType.CLOSE, {}), timeout=5.0)
379
+ except (queue.Full, Exception):
380
+ pass
381
+
382
+ if writer_process is not None:
383
+ writer_process.join(timeout=10.0)
384
+ if writer_process.is_alive():
385
+ writer_process.terminate()
386
+ writer_process.join(timeout=1.0)
387
+
388
+ if record_queue is not None:
389
+ try:
390
+ record_queue.close()
391
+ record_queue.join_thread()
392
+ except Exception:
393
+ pass
394
+
395
+ def _get_caller_info(self, stacklevel: int = 2) -> tuple[str, int]:
396
+ """Get the file and line number of the caller."""
397
+ frame = inspect.currentframe()
398
+ try:
399
+ for _ in range(stacklevel):
400
+ if frame is not None:
401
+ frame = frame.f_back
402
+ if frame is not None:
403
+ return frame.f_code.co_filename, frame.f_lineno
404
+ finally:
405
+ del frame
406
+ return "", 0
407
+
408
+ def _log(
409
+ self,
410
+ level: LogLevel,
411
+ message: str,
412
+ file: str,
413
+ line: int,
414
+ *,
415
+ timestamp_ns: int | None = None,
416
+ ) -> None:
417
+ """Internal log method."""
418
+ ts = _now_ns() if timestamp_ns is None else timestamp_ns
419
+
420
+ # Multiprocess implementation
421
+ if self._use_process:
422
+ self._put_record(
423
+ LogRecord(
424
+ RecordType.LOG,
425
+ {
426
+ "level": int(level),
427
+ "message": message,
428
+ "file": file,
429
+ "line": line,
430
+ "timestamp": ts,
431
+ },
432
+ )
433
+ )
434
+ return
435
+
436
+ # Single process implementation
437
+ with self._lock:
438
+ self._write_log(level, message, file=file, line=line, timestamp_ns=ts)
439
+
440
+ def _put_record(self, record: LogRecord, timeout: float = 1.0) -> None:
441
+ """Enqueue one record for the background writer process."""
442
+ try:
443
+ assert self._queue is not None
444
+ self._queue.put(record, timeout=timeout)
445
+ except queue.Full:
446
+ # TODO: Is there something better we can do?
447
+ warnings.warn("Log queue full, dropping record!", stacklevel=2)
448
+
449
+ def _write_log(
450
+ self,
451
+ level: LogLevel,
452
+ message: str,
453
+ *,
454
+ file: str,
455
+ line: int,
456
+ timestamp_ns: int,
457
+ ) -> None:
458
+ """Write a log entry directly. Lock should be held."""
459
+ assert self._writer is not None, "Open the logger first with open()"
460
+ log_msg = Log(
461
+ stamp=_ns_to_stamp(timestamp_ns),
462
+ level=int(level),
463
+ name=self._name,
464
+ msg=message,
465
+ file=file,
466
+ line=line,
467
+ )
468
+ self._writer.write_message("/log", timestamp_ns, log_msg)
469
+
470
+ def debug(self, message: str, *, timestamp_ns: int | None = None, _stacklevel: int = 2) -> None:
471
+ """Log a debug message."""
472
+ file, line = self._get_caller_info(_stacklevel)
473
+ self._log(LogLevel.DEBUG, message, timestamp_ns=timestamp_ns, file=file, line=line)
474
+
475
+ def info(self, message: str, *, timestamp_ns: int | None = None, _stacklevel: int = 2) -> None:
476
+ """Log an info message."""
477
+ file, line = self._get_caller_info(_stacklevel)
478
+ self._log(LogLevel.INFO, message, timestamp_ns=timestamp_ns, file=file, line=line)
479
+
480
+ def warning(self, message: str, *, timestamp_ns: int | None = None, _stacklevel: int = 2) -> None:
481
+ """Log a warning message."""
482
+ file, line = self._get_caller_info(_stacklevel)
483
+ self._log(LogLevel.WARNING, message, timestamp_ns=timestamp_ns, file=file, line=line)
484
+
485
+ def error(self, message: str, *, timestamp_ns: int | None = None, _stacklevel: int = 2) -> None:
486
+ """Log an error message."""
487
+ file, line = self._get_caller_info(_stacklevel)
488
+ self._log(LogLevel.ERROR, message, timestamp_ns=timestamp_ns, file=file, line=line)
489
+
490
+ def fatal(self, message: str, *, timestamp_ns: int | None = None, _stacklevel: int = 2) -> None:
491
+ """Log a fatal message."""
492
+ file, line = self._get_caller_info(_stacklevel)
493
+ self._log(LogLevel.FATAL, message, timestamp_ns=timestamp_ns, file=file, line=line)
494
+
495
+ def add_metadata(self, name: str, data: dict[str, str]) -> None:
496
+ """Add metadata."""
497
+ # Multiprocess implementation
498
+ if self._use_process:
499
+ self._put_record(
500
+ LogRecord(
501
+ RecordType.METADATA,
502
+ {"name": name, "data": data.copy()},
503
+ )
504
+ )
505
+ return
506
+ # Single process implementation
507
+ with self._lock:
508
+ assert self._writer is not None, "Open the logger first with open()"
509
+ self._writer.write_metadata(name, data)
510
+
511
+ def add_attachment(
512
+ self,
513
+ name: str,
514
+ data: bytes,
515
+ media_type: str = "application/octet-stream",
516
+ *,
517
+ timestamp_ns: int | None = None,
518
+ ) -> None:
519
+ """Add an attachment.
520
+
521
+ Attachments are useful for storing auxiliary data like model weights,
522
+ configuration files, or other binary data.
523
+ """
524
+ ts = timestamp_ns if timestamp_ns is not None else _now_ns()
525
+
526
+ # Multiprocess implementation
527
+ if self._use_process:
528
+ self._put_record(
529
+ LogRecord(
530
+ RecordType.ATTACHMENT,
531
+ {
532
+ "name": name,
533
+ "data": data,
534
+ "media_type": media_type,
535
+ "timestamp": ts,
536
+ },
537
+ )
538
+ )
539
+ return
540
+
541
+ # Single process implementation
542
+ with self._lock:
543
+ assert self._writer is not None, "Open the logger first with open()"
544
+ self._writer.write_attachment(name, data, media_type=media_type, log_time=ts)
545
+
546
+ def log_image(
547
+ self,
548
+ topic: str,
549
+ image: bytes | np.ndarray,
550
+ *,
551
+ format: str = "png",
552
+ frame_id: str | None = None,
553
+ timestamp_ns: int | None = None,
554
+ ) -> None:
555
+ """Log an image.
556
+
557
+ Args:
558
+ topic: Topic name (e.g., "camera/rgb").
559
+ image: Image data as compressed bytes or numpy array.
560
+ format: Image format ('png', 'jpeg', or 'webp').
561
+ frame_id: Frame of reference for the image.
562
+ timestamp_ns: Optional timestamp in nanoseconds.
563
+ """
564
+ ts = timestamp_ns if timestamp_ns is not None else _now_ns()
565
+ image_format = format.lower()
566
+
567
+ # Multiprocess implementation
568
+ if self._use_process:
569
+ image_data = image.copy() if isinstance(image, np.ndarray) else image
570
+ self._put_record(
571
+ LogRecord(
572
+ RecordType.IMAGE,
573
+ {
574
+ "topic": topic,
575
+ "image": image_data,
576
+ "format": image_format,
577
+ "frame_id": frame_id or "",
578
+ "timestamp": ts,
579
+ },
580
+ )
581
+ )
582
+ return
583
+
584
+ # Single process implementation
585
+ with self._lock:
586
+ self._write_image(
587
+ topic=topic,
588
+ image=image,
589
+ format=image_format,
590
+ frame_id=frame_id or "",
591
+ timestamp_ns=ts,
592
+ )
593
+
594
+ def _write_image(
595
+ self,
596
+ *,
597
+ topic: str,
598
+ image: bytes | np.ndarray,
599
+ format: str,
600
+ frame_id: str,
601
+ timestamp_ns: int,
602
+ ) -> None:
603
+ """Write image directly. Lock should be held."""
604
+ assert self._writer is not None, "Open the logger first with open()"
605
+ channel_topic = f"/images/{topic}" if not topic.startswith("/") else topic
606
+
607
+ if isinstance(image, np.ndarray):
608
+ image = self._encode_image_array(image, format)
609
+
610
+ msg = sensor_msgs.CompressedImage(
611
+ header=_make_header(timestamp_ns, frame_id or topic),
612
+ format=format,
613
+ data=list(image),
614
+ )
615
+ self._writer.write_message(channel_topic, timestamp_ns, msg)
616
+
617
+ def _encode_image_array(self, array: np.ndarray, format: str) -> bytes:
618
+ """Encode a numpy array to compressed image bytes."""
619
+ # Determine PIL mode from array shape
620
+ if array.ndim == 2:
621
+ mode = "L"
622
+ elif array.ndim == 3 and array.shape[2] == 3:
623
+ mode = "RGB"
624
+ elif array.ndim == 3 and array.shape[2] == 4:
625
+ mode = "RGBA"
626
+ else:
627
+ raise ValueError(f"Unsupported array shape: {array.shape}")
628
+
629
+ # Ensure uint8
630
+ if array.dtype != np.uint8:
631
+ if array.max() <= 1.0:
632
+ array = (array * 255).astype(np.uint8)
633
+ else:
634
+ array = array.astype(np.uint8)
635
+
636
+ img = Image.fromarray(array, mode=mode)
637
+
638
+ # Encode to bytes
639
+ buffer = io.BytesIO()
640
+ img.save(buffer, format=format.upper())
641
+ return buffer.getvalue()
642
+
643
+ def log_pointcloud(
644
+ self,
645
+ topic: str,
646
+ points: np.ndarray,
647
+ *,
648
+ frame_id: str = "world",
649
+ timestamp_ns: int | None = None,
650
+ ) -> None:
651
+ """Log a point cloud."""
652
+ ts = timestamp_ns if timestamp_ns is not None else _now_ns()
653
+
654
+ # Multiprocess implementation
655
+ if self._use_process:
656
+ self._put_record(
657
+ LogRecord(
658
+ RecordType.POINTCLOUD,
659
+ {
660
+ "topic": topic,
661
+ "points": points.copy(),
662
+ "frame_id": frame_id,
663
+ "timestamp": ts,
664
+ },
665
+ )
666
+ )
667
+ return
668
+
669
+ # Single process implementation
670
+ with self._lock:
671
+ self._write_pointcloud(topic=topic, points=points, frame_id=frame_id, timestamp_ns=ts)
672
+
673
+ def _write_pointcloud(
674
+ self,
675
+ *,
676
+ topic: str,
677
+ points: np.ndarray,
678
+ frame_id: str,
679
+ timestamp_ns: int,
680
+ ) -> None:
681
+ """Write point cloud directly. Lock should be held."""
682
+ assert self._writer is not None, "Open logger first with open()"
683
+ channel_topic = f"/pointclouds/{topic}" if not topic.startswith("/") else topic
684
+
685
+ if not isinstance(points.dtype, np.dtype) or points.dtype.names is None:
686
+ raise ValueError(
687
+ "points must be a numpy structured array (recarray). "
688
+ "Example: np.dtype([('x', 'f4'), ('y', 'f4'), ('z', 'f4')])"
689
+ )
690
+
691
+ fields = []
692
+ for name in points.dtype.names:
693
+ field_dtype = points.dtype.fields[name][0]
694
+ offset = points.dtype.fields[name][1]
695
+
696
+ base_dtype = np.dtype(field_dtype.base)
697
+ if base_dtype not in DTYPE_TO_POINTFIELD:
698
+ raise ValueError(f"Unsupported dtype for field '{name}': {base_dtype}")
699
+
700
+ fields.append(
701
+ sensor_msgs.PointField(
702
+ name=name,
703
+ offset=offset,
704
+ datatype=DTYPE_TO_POINTFIELD[base_dtype],
705
+ count=1,
706
+ )
707
+ )
708
+
709
+ n_points = len(points)
710
+ point_step = points.dtype.itemsize
711
+ row_step = n_points * point_step
712
+ data = points.tobytes()
713
+
714
+ msg = sensor_msgs.PointCloud2(
715
+ header=_make_header(timestamp_ns, frame_id),
716
+ height=1,
717
+ width=n_points,
718
+ fields=fields,
719
+ is_bigendian=False,
720
+ point_step=point_step,
721
+ row_step=row_step,
722
+ data=list(data),
723
+ is_dense=True,
724
+ )
725
+ self._writer.write_message(channel_topic, timestamp_ns, msg)