foxglove-sdk 0.16.1__cp314-cp314-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- foxglove/__init__.py +241 -0
- foxglove/_foxglove_py/__init__.pyi +210 -0
- foxglove/_foxglove_py/channels.pyi +2792 -0
- foxglove/_foxglove_py/cloud.pyi +9 -0
- foxglove/_foxglove_py/mcap.pyi +96 -0
- foxglove/_foxglove_py/schemas.pyi +1009 -0
- foxglove/_foxglove_py/schemas_wkt.pyi +85 -0
- foxglove/_foxglove_py/websocket.pyi +321 -0
- foxglove/_foxglove_py.cp314-win_amd64.pyd +0 -0
- foxglove/benchmarks/test_mcap_serialization.py +160 -0
- foxglove/channel.py +241 -0
- foxglove/channels/__init__.py +94 -0
- foxglove/cloud.py +61 -0
- foxglove/mcap.py +12 -0
- foxglove/notebook/__init__.py +0 -0
- foxglove/notebook/foxglove_widget.py +100 -0
- foxglove/notebook/notebook_buffer.py +114 -0
- foxglove/notebook/static/widget.js +1 -0
- foxglove/py.typed +0 -0
- foxglove/schemas/__init__.py +163 -0
- foxglove/tests/__init__.py +0 -0
- foxglove/tests/test_channel.py +243 -0
- foxglove/tests/test_context.py +10 -0
- foxglove/tests/test_logging.py +62 -0
- foxglove/tests/test_mcap.py +199 -0
- foxglove/tests/test_parameters.py +178 -0
- foxglove/tests/test_schemas.py +17 -0
- foxglove/tests/test_server.py +112 -0
- foxglove/tests/test_time.py +137 -0
- foxglove/websocket.py +199 -0
- foxglove_sdk-0.16.1.dist-info/METADATA +53 -0
- foxglove_sdk-0.16.1.dist-info/RECORD +33 -0
- foxglove_sdk-0.16.1.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module contains the definitions of the well-known Foxglove schemas for logging.
|
|
3
|
+
|
|
4
|
+
Log messages to a corresponding channel type from :py:mod:`foxglove.channels`.
|
|
5
|
+
|
|
6
|
+
Note that the schema classes are currently immutable and do not expose
|
|
7
|
+
getters and setters for their fields. This is a limitation we plan to address in the future.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
# Generated by https://github.com/foxglove/foxglove-sdk
|
|
11
|
+
|
|
12
|
+
from typing import Union
|
|
13
|
+
|
|
14
|
+
from foxglove._foxglove_py.schemas import (
|
|
15
|
+
ArrowPrimitive,
|
|
16
|
+
CameraCalibration,
|
|
17
|
+
CircleAnnotation,
|
|
18
|
+
Color,
|
|
19
|
+
CompressedImage,
|
|
20
|
+
CompressedVideo,
|
|
21
|
+
CubePrimitive,
|
|
22
|
+
CylinderPrimitive,
|
|
23
|
+
Duration,
|
|
24
|
+
FrameTransform,
|
|
25
|
+
FrameTransforms,
|
|
26
|
+
GeoJson,
|
|
27
|
+
Grid,
|
|
28
|
+
ImageAnnotations,
|
|
29
|
+
KeyValuePair,
|
|
30
|
+
LaserScan,
|
|
31
|
+
LinePrimitive,
|
|
32
|
+
LinePrimitiveLineType,
|
|
33
|
+
LocationFix,
|
|
34
|
+
LocationFixes,
|
|
35
|
+
LocationFixPositionCovarianceType,
|
|
36
|
+
Log,
|
|
37
|
+
LogLevel,
|
|
38
|
+
ModelPrimitive,
|
|
39
|
+
PackedElementField,
|
|
40
|
+
PackedElementFieldNumericType,
|
|
41
|
+
Point2,
|
|
42
|
+
Point3,
|
|
43
|
+
PointCloud,
|
|
44
|
+
PointsAnnotation,
|
|
45
|
+
PointsAnnotationType,
|
|
46
|
+
Pose,
|
|
47
|
+
PoseInFrame,
|
|
48
|
+
PosesInFrame,
|
|
49
|
+
Quaternion,
|
|
50
|
+
RawAudio,
|
|
51
|
+
RawImage,
|
|
52
|
+
SceneEntity,
|
|
53
|
+
SceneEntityDeletion,
|
|
54
|
+
SceneEntityDeletionType,
|
|
55
|
+
SceneUpdate,
|
|
56
|
+
SpherePrimitive,
|
|
57
|
+
TextAnnotation,
|
|
58
|
+
TextPrimitive,
|
|
59
|
+
Timestamp,
|
|
60
|
+
TriangleListPrimitive,
|
|
61
|
+
Vector2,
|
|
62
|
+
Vector3,
|
|
63
|
+
VoxelGrid,
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
FoxgloveSchema = Union[
|
|
67
|
+
ArrowPrimitive,
|
|
68
|
+
CameraCalibration,
|
|
69
|
+
CircleAnnotation,
|
|
70
|
+
Color,
|
|
71
|
+
CompressedImage,
|
|
72
|
+
CompressedVideo,
|
|
73
|
+
CubePrimitive,
|
|
74
|
+
CylinderPrimitive,
|
|
75
|
+
Duration,
|
|
76
|
+
FrameTransform,
|
|
77
|
+
FrameTransforms,
|
|
78
|
+
GeoJson,
|
|
79
|
+
Grid,
|
|
80
|
+
ImageAnnotations,
|
|
81
|
+
KeyValuePair,
|
|
82
|
+
LaserScan,
|
|
83
|
+
LinePrimitive,
|
|
84
|
+
LocationFix,
|
|
85
|
+
LocationFixes,
|
|
86
|
+
Log,
|
|
87
|
+
ModelPrimitive,
|
|
88
|
+
PackedElementField,
|
|
89
|
+
Point2,
|
|
90
|
+
Point3,
|
|
91
|
+
PointCloud,
|
|
92
|
+
PointsAnnotation,
|
|
93
|
+
Pose,
|
|
94
|
+
PoseInFrame,
|
|
95
|
+
PosesInFrame,
|
|
96
|
+
Quaternion,
|
|
97
|
+
RawAudio,
|
|
98
|
+
RawImage,
|
|
99
|
+
SceneEntity,
|
|
100
|
+
SceneEntityDeletion,
|
|
101
|
+
SceneUpdate,
|
|
102
|
+
SpherePrimitive,
|
|
103
|
+
TextAnnotation,
|
|
104
|
+
TextPrimitive,
|
|
105
|
+
Timestamp,
|
|
106
|
+
TriangleListPrimitive,
|
|
107
|
+
Vector2,
|
|
108
|
+
Vector3,
|
|
109
|
+
VoxelGrid,
|
|
110
|
+
]
|
|
111
|
+
|
|
112
|
+
__all__ = [
|
|
113
|
+
"FoxgloveSchema",
|
|
114
|
+
"ArrowPrimitive",
|
|
115
|
+
"CameraCalibration",
|
|
116
|
+
"CircleAnnotation",
|
|
117
|
+
"Color",
|
|
118
|
+
"CompressedImage",
|
|
119
|
+
"CompressedVideo",
|
|
120
|
+
"CubePrimitive",
|
|
121
|
+
"CylinderPrimitive",
|
|
122
|
+
"Duration",
|
|
123
|
+
"FrameTransform",
|
|
124
|
+
"FrameTransforms",
|
|
125
|
+
"GeoJson",
|
|
126
|
+
"Grid",
|
|
127
|
+
"ImageAnnotations",
|
|
128
|
+
"KeyValuePair",
|
|
129
|
+
"LaserScan",
|
|
130
|
+
"LinePrimitive",
|
|
131
|
+
"LinePrimitiveLineType",
|
|
132
|
+
"LocationFix",
|
|
133
|
+
"LocationFixPositionCovarianceType",
|
|
134
|
+
"LocationFixes",
|
|
135
|
+
"Log",
|
|
136
|
+
"LogLevel",
|
|
137
|
+
"ModelPrimitive",
|
|
138
|
+
"PackedElementField",
|
|
139
|
+
"PackedElementFieldNumericType",
|
|
140
|
+
"Point2",
|
|
141
|
+
"Point3",
|
|
142
|
+
"PointCloud",
|
|
143
|
+
"PointsAnnotation",
|
|
144
|
+
"PointsAnnotationType",
|
|
145
|
+
"Pose",
|
|
146
|
+
"PoseInFrame",
|
|
147
|
+
"PosesInFrame",
|
|
148
|
+
"Quaternion",
|
|
149
|
+
"RawAudio",
|
|
150
|
+
"RawImage",
|
|
151
|
+
"SceneEntity",
|
|
152
|
+
"SceneEntityDeletion",
|
|
153
|
+
"SceneEntityDeletionType",
|
|
154
|
+
"SceneUpdate",
|
|
155
|
+
"SpherePrimitive",
|
|
156
|
+
"TextAnnotation",
|
|
157
|
+
"TextPrimitive",
|
|
158
|
+
"Timestamp",
|
|
159
|
+
"TriangleListPrimitive",
|
|
160
|
+
"Vector2",
|
|
161
|
+
"Vector3",
|
|
162
|
+
"VoxelGrid",
|
|
163
|
+
]
|
|
File without changes
|
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import random
|
|
4
|
+
|
|
5
|
+
import pytest
|
|
6
|
+
from foxglove import Channel, Context, Schema
|
|
7
|
+
from foxglove.channels import LogChannel
|
|
8
|
+
from foxglove.schemas import Log
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@pytest.fixture
|
|
12
|
+
def new_topic() -> str:
|
|
13
|
+
return f"/{random.random()}"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def test_warns_on_duplicate_topics(caplog: pytest.LogCaptureFixture) -> None:
|
|
17
|
+
schema = {"type": "object"}
|
|
18
|
+
c1 = Channel("test-duplicate", schema=schema)
|
|
19
|
+
c2 = Channel("test-duplicate", schema=schema)
|
|
20
|
+
assert c1.id() == c2.id()
|
|
21
|
+
|
|
22
|
+
with caplog.at_level(logging.WARNING):
|
|
23
|
+
# Same topic, different schema
|
|
24
|
+
c3 = Channel(
|
|
25
|
+
"test-duplicate",
|
|
26
|
+
schema={
|
|
27
|
+
"type": "object",
|
|
28
|
+
"additionalProperties": False,
|
|
29
|
+
},
|
|
30
|
+
)
|
|
31
|
+
assert c1.id() != c3.id()
|
|
32
|
+
|
|
33
|
+
assert len(caplog.records) == 1
|
|
34
|
+
for _, _, message in caplog.record_tuples:
|
|
35
|
+
assert (
|
|
36
|
+
"Channel with topic test-duplicate already exists in this context"
|
|
37
|
+
in message
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def test_does_not_warn_on_duplicate_topics_in_contexts(
|
|
42
|
+
caplog: pytest.LogCaptureFixture,
|
|
43
|
+
) -> None:
|
|
44
|
+
ctx1 = Context()
|
|
45
|
+
ctx2 = Context()
|
|
46
|
+
|
|
47
|
+
_ = Channel("test-duplicate", context=ctx1)
|
|
48
|
+
|
|
49
|
+
with caplog.at_level(logging.WARNING):
|
|
50
|
+
Channel("test-duplicate", context=ctx2)
|
|
51
|
+
|
|
52
|
+
assert len(caplog.records) == 0
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def test_requires_an_object_schema(new_topic: str) -> None:
|
|
56
|
+
schema = {"type": "array"}
|
|
57
|
+
with pytest.raises(ValueError, match="Only object schemas are supported"):
|
|
58
|
+
Channel(new_topic, schema=schema)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def test_log_dict_on_json_channel(new_topic: str) -> None:
|
|
62
|
+
json_schema = {"type": "object", "additionalProperties": True}
|
|
63
|
+
channel = Channel(new_topic, schema=json_schema)
|
|
64
|
+
|
|
65
|
+
assert channel.message_encoding == "json"
|
|
66
|
+
|
|
67
|
+
schema = channel.schema()
|
|
68
|
+
assert schema is not None
|
|
69
|
+
assert schema.encoding == "jsonschema"
|
|
70
|
+
assert json.loads(schema.data) == json_schema
|
|
71
|
+
|
|
72
|
+
channel.log({"test": "test"})
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def test_log_dict_on_schemaless_channel(new_topic: str) -> None:
|
|
76
|
+
channel = Channel(new_topic)
|
|
77
|
+
assert channel.message_encoding == "json"
|
|
78
|
+
|
|
79
|
+
schema = channel.schema()
|
|
80
|
+
assert schema is not None
|
|
81
|
+
assert schema.encoding == "jsonschema"
|
|
82
|
+
assert schema.data == b""
|
|
83
|
+
|
|
84
|
+
channel.log({"test": "test"})
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def test_log_dict_with_empty_schema(new_topic: str) -> None:
|
|
88
|
+
channel = Channel(new_topic, schema={})
|
|
89
|
+
assert channel.message_encoding == "json"
|
|
90
|
+
|
|
91
|
+
schema = channel.schema()
|
|
92
|
+
assert schema is not None
|
|
93
|
+
assert schema.encoding == "jsonschema"
|
|
94
|
+
assert schema.data == b""
|
|
95
|
+
|
|
96
|
+
channel.log({"test": "test"})
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def test_log_dict_on_schemaless_json_channel(new_topic: str) -> None:
|
|
100
|
+
channel = Channel(
|
|
101
|
+
new_topic,
|
|
102
|
+
message_encoding="json",
|
|
103
|
+
)
|
|
104
|
+
assert channel.message_encoding == "json"
|
|
105
|
+
|
|
106
|
+
schema = channel.schema()
|
|
107
|
+
assert schema is not None
|
|
108
|
+
assert schema.encoding == "jsonschema"
|
|
109
|
+
assert schema.data == b""
|
|
110
|
+
|
|
111
|
+
channel.log({"test": "test"})
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def test_log_must_serialize_on_protobuf_channel(new_topic: str) -> None:
|
|
115
|
+
schema = Schema(
|
|
116
|
+
name="my_schema",
|
|
117
|
+
encoding="protobuf",
|
|
118
|
+
data=b"\x01",
|
|
119
|
+
)
|
|
120
|
+
channel = Channel(
|
|
121
|
+
new_topic,
|
|
122
|
+
message_encoding="protobuf",
|
|
123
|
+
schema=schema,
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
assert channel.message_encoding == "protobuf"
|
|
127
|
+
assert channel.schema() == schema
|
|
128
|
+
|
|
129
|
+
with pytest.raises(TypeError, match="Unsupported message type"):
|
|
130
|
+
channel.log({"test": "test"})
|
|
131
|
+
|
|
132
|
+
channel.log(b"\x01")
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def test_channel_attributes(new_topic: str) -> None:
|
|
136
|
+
channel = Channel(new_topic, message_encoding="json")
|
|
137
|
+
assert channel.topic() == new_topic
|
|
138
|
+
assert channel.message_encoding == "json"
|
|
139
|
+
assert channel.schema() is not None
|
|
140
|
+
assert channel.metadata() == {}
|
|
141
|
+
assert not channel.has_sinks()
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def test_typed_channel_attributes(new_topic: str) -> None:
|
|
145
|
+
channel = LogChannel(new_topic)
|
|
146
|
+
assert channel.topic() == new_topic
|
|
147
|
+
assert channel.message_encoding == "protobuf"
|
|
148
|
+
assert channel.schema() == Log.get_schema()
|
|
149
|
+
assert channel.metadata() == {}
|
|
150
|
+
assert not channel.has_sinks()
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def test_channel_metadata(new_topic: str) -> None:
|
|
154
|
+
channel = Channel(new_topic, metadata={"foo": "bar"})
|
|
155
|
+
assert channel.metadata() == {"foo": "bar"}
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def test_channel_metadata_mistyped(new_topic: str) -> None:
|
|
159
|
+
with pytest.raises(TypeError, match="argument 'metadata'"):
|
|
160
|
+
Channel(new_topic, metadata={"1": 1}) # type: ignore
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def test_typed_channel_metadata(new_topic: str) -> None:
|
|
164
|
+
channel = LogChannel(new_topic, metadata={"foo": "bar"})
|
|
165
|
+
assert channel.metadata() == {"foo": "bar"}
|
|
166
|
+
channel = LogChannel(new_topic, context=Context(), metadata={"foo": "baz"})
|
|
167
|
+
assert channel.metadata() == {"foo": "baz"}
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def test_typed_channel_metadata_mistyped(new_topic: str) -> None:
|
|
171
|
+
with pytest.raises(TypeError, match="argument 'metadata'"):
|
|
172
|
+
LogChannel(new_topic, metadata={"1": 1}) # type: ignore
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def test_closed_channel_log(new_topic: str, caplog: pytest.LogCaptureFixture) -> None:
|
|
176
|
+
channel = Channel(new_topic, schema={"type": "object"})
|
|
177
|
+
channel.close()
|
|
178
|
+
with caplog.at_level(logging.WARNING):
|
|
179
|
+
channel.log(b"\x01")
|
|
180
|
+
|
|
181
|
+
assert len(caplog.records) == 1
|
|
182
|
+
for log_name, _, message in caplog.record_tuples:
|
|
183
|
+
assert log_name == "foxglove.channel.raw_channel"
|
|
184
|
+
assert message == f"Cannot log on closed channel for {new_topic}"
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def test_close_typed_channel(new_topic: str, caplog: pytest.LogCaptureFixture) -> None:
|
|
188
|
+
channel = LogChannel(new_topic)
|
|
189
|
+
channel.close()
|
|
190
|
+
with caplog.at_level(logging.WARNING):
|
|
191
|
+
channel.log(Log())
|
|
192
|
+
|
|
193
|
+
assert len(caplog.records) == 1
|
|
194
|
+
for log_name, _, message in caplog.record_tuples:
|
|
195
|
+
assert log_name == "foxglove.channel.raw_channel"
|
|
196
|
+
assert message == f"Cannot log on closed channel for {new_topic}"
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def test_typed_channel_requires_kwargs_after_message(new_topic: str) -> None:
|
|
200
|
+
channel = LogChannel(new_topic)
|
|
201
|
+
|
|
202
|
+
channel.log(Log(), log_time=0)
|
|
203
|
+
|
|
204
|
+
with pytest.raises(
|
|
205
|
+
TypeError,
|
|
206
|
+
match="takes 1 positional arguments but 2 were given",
|
|
207
|
+
):
|
|
208
|
+
channel.log(Log(), 0) # type: ignore
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def test_generates_names_for_schemas(new_topic: str) -> None:
|
|
212
|
+
ch_1 = Channel(
|
|
213
|
+
new_topic + "-1",
|
|
214
|
+
schema={"type": "object", "properties": {"foo": {"type": "string"}}},
|
|
215
|
+
)
|
|
216
|
+
ch_2 = Channel(
|
|
217
|
+
new_topic + "-2",
|
|
218
|
+
schema={"type": "object", "additionalProperties": True},
|
|
219
|
+
)
|
|
220
|
+
# Same schema will have the same name
|
|
221
|
+
ch_3 = Channel(
|
|
222
|
+
new_topic + "-3",
|
|
223
|
+
schema={"type": "object", "additionalProperties": True},
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
assert ch_1.schema_name() != ch_2.schema_name()
|
|
227
|
+
assert ch_2.schema_name() == ch_3.schema_name()
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def test_exposes_unique_channel_ids(new_topic: str) -> None:
|
|
231
|
+
ch_1 = Channel(new_topic + "-1")
|
|
232
|
+
ch_2 = Channel(new_topic + "-2")
|
|
233
|
+
ch_3 = LogChannel(new_topic + "-3")
|
|
234
|
+
|
|
235
|
+
assert ch_1.id() > 0
|
|
236
|
+
assert ch_1.id() < ch_2.id()
|
|
237
|
+
assert ch_2.id() < ch_3.id()
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def test_log_message_to_specific_sink(new_topic: str) -> None:
|
|
241
|
+
ctx = Context()
|
|
242
|
+
ch = Channel(new_topic, context=ctx)
|
|
243
|
+
ch.log("test", sink_id=1)
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
import subprocess
|
|
4
|
+
import sys
|
|
5
|
+
|
|
6
|
+
import pytest
|
|
7
|
+
from foxglove import set_log_level
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def test_set_log_level_accepts_string_or_int() -> None:
|
|
11
|
+
set_log_level("DEBUG")
|
|
12
|
+
set_log_level(logging.DEBUG)
|
|
13
|
+
with pytest.raises(ValueError):
|
|
14
|
+
set_log_level("debug")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def test_set_log_level_clamps_illegal_values() -> None:
|
|
18
|
+
set_log_level(-1)
|
|
19
|
+
set_log_level(2**64)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def test_logging_config_with_env() -> None:
|
|
23
|
+
# Run a script in a child process so logger can be re-initialized from env.
|
|
24
|
+
test_script = """
|
|
25
|
+
import logging
|
|
26
|
+
import foxglove
|
|
27
|
+
|
|
28
|
+
logging.basicConfig(level=logging.DEBUG)
|
|
29
|
+
|
|
30
|
+
server = foxglove.start_server(port=0)
|
|
31
|
+
server.stop()
|
|
32
|
+
|
|
33
|
+
print("test_init_with_env_complete")
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
# Default: unset
|
|
37
|
+
env = os.environ.copy()
|
|
38
|
+
env["FOXGLOVE_LOG_LEVEL"] = ""
|
|
39
|
+
|
|
40
|
+
result = subprocess.run(
|
|
41
|
+
[sys.executable, "-c", test_script],
|
|
42
|
+
env=env,
|
|
43
|
+
capture_output=True,
|
|
44
|
+
text=True,
|
|
45
|
+
timeout=5,
|
|
46
|
+
)
|
|
47
|
+
assert "test_init_with_env_complete" in result.stdout
|
|
48
|
+
assert "Started server" in result.stderr
|
|
49
|
+
|
|
50
|
+
# Quiet the WS server logging
|
|
51
|
+
env = os.environ.copy()
|
|
52
|
+
env["FOXGLOVE_LOG_LEVEL"] = "debug,foxglove::websocket::server=warn"
|
|
53
|
+
|
|
54
|
+
result = subprocess.run(
|
|
55
|
+
[sys.executable, "-c", test_script],
|
|
56
|
+
env=env,
|
|
57
|
+
capture_output=True,
|
|
58
|
+
text=True,
|
|
59
|
+
timeout=5,
|
|
60
|
+
)
|
|
61
|
+
assert "test_init_with_env_complete" in result.stdout
|
|
62
|
+
assert "Started server" not in result.stderr
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from typing import Callable, Generator, Optional
|
|
3
|
+
|
|
4
|
+
import pytest
|
|
5
|
+
from foxglove import Channel, ChannelDescriptor, Context, open_mcap
|
|
6
|
+
from foxglove.mcap import MCAPWriteOptions
|
|
7
|
+
|
|
8
|
+
chan = Channel("test", schema={"type": "object"})
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@pytest.fixture
|
|
12
|
+
def make_tmp_mcap(
|
|
13
|
+
tmp_path_factory: pytest.TempPathFactory,
|
|
14
|
+
) -> Generator[Callable[[], Path], None, None]:
|
|
15
|
+
mcap: Optional[Path] = None
|
|
16
|
+
dir: Optional[Path] = None
|
|
17
|
+
|
|
18
|
+
def _make_tmp_mcap() -> Path:
|
|
19
|
+
nonlocal dir, mcap
|
|
20
|
+
dir = tmp_path_factory.mktemp("test", numbered=True)
|
|
21
|
+
mcap = dir / "test.mcap"
|
|
22
|
+
return mcap
|
|
23
|
+
|
|
24
|
+
yield _make_tmp_mcap
|
|
25
|
+
|
|
26
|
+
if mcap is not None and dir is not None:
|
|
27
|
+
try:
|
|
28
|
+
mcap.unlink()
|
|
29
|
+
dir.rmdir()
|
|
30
|
+
except FileNotFoundError:
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@pytest.fixture
|
|
35
|
+
def tmp_mcap(make_tmp_mcap: Callable[[], Path]) -> Generator[Path, None, None]:
|
|
36
|
+
yield make_tmp_mcap()
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def test_open_with_str(tmp_mcap: Path) -> None:
|
|
40
|
+
open_mcap(str(tmp_mcap))
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def test_overwrite(tmp_mcap: Path) -> None:
|
|
44
|
+
tmp_mcap.touch()
|
|
45
|
+
with pytest.raises(FileExistsError):
|
|
46
|
+
open_mcap(tmp_mcap)
|
|
47
|
+
open_mcap(tmp_mcap, allow_overwrite=True)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def test_explicit_close(tmp_mcap: Path) -> None:
|
|
51
|
+
mcap = open_mcap(tmp_mcap)
|
|
52
|
+
for ii in range(20):
|
|
53
|
+
chan.log({"foo": ii})
|
|
54
|
+
size_before_close = tmp_mcap.stat().st_size
|
|
55
|
+
mcap.close()
|
|
56
|
+
assert tmp_mcap.stat().st_size > size_before_close
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def test_context_manager(tmp_mcap: Path) -> None:
|
|
60
|
+
with open_mcap(tmp_mcap):
|
|
61
|
+
for ii in range(20):
|
|
62
|
+
chan.log({"foo": ii})
|
|
63
|
+
size_before_close = tmp_mcap.stat().st_size
|
|
64
|
+
assert tmp_mcap.stat().st_size > size_before_close
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def test_writer_compression(make_tmp_mcap: Callable[[], Path]) -> None:
|
|
68
|
+
tmp_1 = make_tmp_mcap()
|
|
69
|
+
tmp_2 = make_tmp_mcap()
|
|
70
|
+
|
|
71
|
+
# Compression is enabled by default
|
|
72
|
+
mcap_1 = open_mcap(tmp_1)
|
|
73
|
+
mcap_2 = open_mcap(tmp_2, writer_options=MCAPWriteOptions(compression=None))
|
|
74
|
+
|
|
75
|
+
for _ in range(20):
|
|
76
|
+
chan.log({"foo": "bar"})
|
|
77
|
+
|
|
78
|
+
mcap_1.close()
|
|
79
|
+
mcap_2.close()
|
|
80
|
+
|
|
81
|
+
assert tmp_1.stat().st_size < tmp_2.stat().st_size
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def test_writer_custom_profile(tmp_mcap: Path) -> None:
|
|
85
|
+
options = MCAPWriteOptions(profile="--custom-profile-1--")
|
|
86
|
+
with open_mcap(tmp_mcap, writer_options=options):
|
|
87
|
+
chan.log({"foo": "bar"})
|
|
88
|
+
|
|
89
|
+
contents = tmp_mcap.read_bytes()
|
|
90
|
+
assert contents.find(b"--custom-profile-1--") > -1
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def test_write_to_different_contexts(make_tmp_mcap: Callable[[], Path]) -> None:
|
|
94
|
+
tmp_1 = make_tmp_mcap()
|
|
95
|
+
tmp_2 = make_tmp_mcap()
|
|
96
|
+
|
|
97
|
+
ctx1 = Context()
|
|
98
|
+
ctx2 = Context()
|
|
99
|
+
|
|
100
|
+
options = MCAPWriteOptions(compression=None)
|
|
101
|
+
mcap1 = open_mcap(tmp_1, writer_options=options, context=ctx1)
|
|
102
|
+
mcap2 = open_mcap(tmp_2, writer_options=options, context=ctx2)
|
|
103
|
+
|
|
104
|
+
ch1 = Channel("ctx1", context=ctx1)
|
|
105
|
+
ch1.log({"a": "b"})
|
|
106
|
+
|
|
107
|
+
ch2 = Channel("ctx2", context=ctx2)
|
|
108
|
+
ch2.log({"has-more-data": "true"})
|
|
109
|
+
|
|
110
|
+
mcap1.close()
|
|
111
|
+
mcap2.close()
|
|
112
|
+
|
|
113
|
+
contents1 = tmp_1.read_bytes()
|
|
114
|
+
contents2 = tmp_2.read_bytes()
|
|
115
|
+
|
|
116
|
+
assert len(contents1) < len(contents2)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def _verify_metadata_in_file(file_path: Path, expected_metadata: dict) -> None:
|
|
120
|
+
"""Helper function to verify metadata in MCAP file matches expected."""
|
|
121
|
+
import mcap.reader
|
|
122
|
+
|
|
123
|
+
with open(file_path, "rb") as f:
|
|
124
|
+
reader = mcap.reader.make_reader(f)
|
|
125
|
+
|
|
126
|
+
found_metadata = {}
|
|
127
|
+
metadata_count = 0
|
|
128
|
+
|
|
129
|
+
for record in reader.iter_metadata():
|
|
130
|
+
metadata_count += 1
|
|
131
|
+
found_metadata[record.name] = dict(record.metadata)
|
|
132
|
+
|
|
133
|
+
# Verify count
|
|
134
|
+
assert metadata_count == len(
|
|
135
|
+
expected_metadata
|
|
136
|
+
), f"Expected {len(expected_metadata)} metadata records, found {metadata_count}"
|
|
137
|
+
|
|
138
|
+
# Verify metadata names and content
|
|
139
|
+
assert set(found_metadata.keys()) == set(
|
|
140
|
+
expected_metadata.keys()
|
|
141
|
+
), "Metadata names don't match"
|
|
142
|
+
|
|
143
|
+
for name, expected_kv in expected_metadata.items():
|
|
144
|
+
assert (
|
|
145
|
+
found_metadata[name] == expected_kv
|
|
146
|
+
), f"Metadata '{name}' has wrong key-value pairs"
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def test_write_metadata(tmp_mcap: Path) -> None:
|
|
150
|
+
"""Test writing metadata to MCAP file."""
|
|
151
|
+
# Define expected metadata
|
|
152
|
+
expected_metadata = {
|
|
153
|
+
"test1": {"key1": "value1", "key2": "value2"},
|
|
154
|
+
"test2": {"a": "1", "b": "2"},
|
|
155
|
+
"test3": {"x": "y", "z": "w"},
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
with open_mcap(tmp_mcap) as writer:
|
|
159
|
+
# This should not raise an error
|
|
160
|
+
writer.write_metadata("empty", {})
|
|
161
|
+
|
|
162
|
+
# Write basic metadata
|
|
163
|
+
writer.write_metadata("test1", expected_metadata["test1"])
|
|
164
|
+
|
|
165
|
+
# Write multiple metadata records
|
|
166
|
+
writer.write_metadata("test2", expected_metadata["test2"])
|
|
167
|
+
writer.write_metadata("test3", expected_metadata["test3"])
|
|
168
|
+
|
|
169
|
+
# Write empty metadata (should be skipped)
|
|
170
|
+
writer.write_metadata("empty_test", {})
|
|
171
|
+
|
|
172
|
+
# Log some messages
|
|
173
|
+
for ii in range(5):
|
|
174
|
+
chan.log({"foo": ii})
|
|
175
|
+
|
|
176
|
+
# Verify metadata was written correctly
|
|
177
|
+
_verify_metadata_in_file(tmp_mcap, expected_metadata)
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def test_channel_filter(make_tmp_mcap: Callable[[], Path]) -> None:
|
|
181
|
+
tmp_1 = make_tmp_mcap()
|
|
182
|
+
tmp_2 = make_tmp_mcap()
|
|
183
|
+
|
|
184
|
+
ch1 = Channel("/1", schema={"type": "object"})
|
|
185
|
+
ch2 = Channel("/2", schema={"type": "object"})
|
|
186
|
+
|
|
187
|
+
def filter(ch: ChannelDescriptor) -> bool:
|
|
188
|
+
return ch.topic.startswith("/1")
|
|
189
|
+
|
|
190
|
+
mcap1 = open_mcap(tmp_1, channel_filter=filter)
|
|
191
|
+
mcap2 = open_mcap(tmp_2, channel_filter=None)
|
|
192
|
+
|
|
193
|
+
ch1.log({})
|
|
194
|
+
ch2.log({})
|
|
195
|
+
|
|
196
|
+
mcap1.close()
|
|
197
|
+
mcap2.close()
|
|
198
|
+
|
|
199
|
+
assert tmp_1.stat().st_size < tmp_2.stat().st_size
|