foodforthought-cli 0.2.1__py3-none-any.whl → 0.2.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ate/__init__.py +1 -1
- ate/bridge_server.py +622 -0
- ate/cli.py +2625 -242
- ate/compatibility.py +580 -0
- ate/generators/__init__.py +19 -0
- ate/generators/docker_generator.py +461 -0
- ate/generators/hardware_config.py +469 -0
- ate/generators/ros2_generator.py +617 -0
- ate/generators/skill_generator.py +783 -0
- ate/marketplace.py +524 -0
- ate/mcp_server.py +1341 -107
- ate/primitives.py +1016 -0
- ate/robot_setup.py +2222 -0
- ate/skill_schema.py +537 -0
- ate/telemetry/__init__.py +33 -0
- ate/telemetry/cli.py +455 -0
- ate/telemetry/collector.py +444 -0
- ate/telemetry/context.py +318 -0
- ate/telemetry/fleet_agent.py +419 -0
- ate/telemetry/formats/__init__.py +18 -0
- ate/telemetry/formats/hdf5_serializer.py +503 -0
- ate/telemetry/formats/mcap_serializer.py +457 -0
- ate/telemetry/types.py +334 -0
- foodforthought_cli-0.2.3.dist-info/METADATA +300 -0
- foodforthought_cli-0.2.3.dist-info/RECORD +44 -0
- foodforthought_cli-0.2.3.dist-info/top_level.txt +6 -0
- mechdog_labeled/__init__.py +3 -0
- mechdog_labeled/primitives.py +113 -0
- mechdog_labeled/servo_map.py +209 -0
- mechdog_output/__init__.py +3 -0
- mechdog_output/primitives.py +59 -0
- mechdog_output/servo_map.py +203 -0
- test_autodetect/__init__.py +3 -0
- test_autodetect/primitives.py +113 -0
- test_autodetect/servo_map.py +209 -0
- test_full_auto/__init__.py +3 -0
- test_full_auto/primitives.py +113 -0
- test_full_auto/servo_map.py +209 -0
- test_smart_detect/__init__.py +3 -0
- test_smart_detect/primitives.py +113 -0
- test_smart_detect/servo_map.py +209 -0
- foodforthought_cli-0.2.1.dist-info/METADATA +0 -151
- foodforthought_cli-0.2.1.dist-info/RECORD +0 -9
- foodforthought_cli-0.2.1.dist-info/top_level.txt +0 -1
- {foodforthought_cli-0.2.1.dist-info → foodforthought_cli-0.2.3.dist-info}/WHEEL +0 -0
- {foodforthought_cli-0.2.1.dist-info → foodforthought_cli-0.2.3.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,457 @@
|
|
|
1
|
+
"""
|
|
2
|
+
MCAP Serializer for Telemetry Data
|
|
3
|
+
|
|
4
|
+
Serializes trajectory recordings to MCAP format, which is the standard
|
|
5
|
+
for ROS2 bag files and is compatible with Foxglove visualization.
|
|
6
|
+
|
|
7
|
+
MCAP spec: https://mcap.dev/spec
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import io
|
|
11
|
+
import json
|
|
12
|
+
import struct
|
|
13
|
+
from datetime import datetime
|
|
14
|
+
from typing import Dict, Any, List, Optional
|
|
15
|
+
|
|
16
|
+
from ..types import TrajectoryRecording, TrajectoryFrame
|
|
17
|
+
|
|
18
|
+
# MCAP constants
|
|
19
|
+
MCAP_MAGIC = b"\x89MCAP0\r\n"
|
|
20
|
+
MCAP_FOOTER_MAGIC = b"\x89MCAP0\r\n"
|
|
21
|
+
|
|
22
|
+
# Op codes
|
|
23
|
+
OP_HEADER = 0x01
|
|
24
|
+
OP_FOOTER = 0x02
|
|
25
|
+
OP_SCHEMA = 0x03
|
|
26
|
+
OP_CHANNEL = 0x04
|
|
27
|
+
OP_MESSAGE = 0x05
|
|
28
|
+
OP_CHUNK = 0x06
|
|
29
|
+
OP_MESSAGE_INDEX = 0x07
|
|
30
|
+
OP_CHUNK_INDEX = 0x08
|
|
31
|
+
OP_ATTACHMENT = 0x09
|
|
32
|
+
OP_ATTACHMENT_INDEX = 0x0A
|
|
33
|
+
OP_STATISTICS = 0x0B
|
|
34
|
+
OP_METADATA = 0x0C
|
|
35
|
+
OP_METADATA_INDEX = 0x0D
|
|
36
|
+
OP_SUMMARY_OFFSET = 0x0E
|
|
37
|
+
OP_DATA_END = 0x0F
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _write_record(buf: io.BytesIO, op: int, data: bytes) -> None:
|
|
41
|
+
"""Write a record to the buffer."""
|
|
42
|
+
buf.write(struct.pack("<B", op))
|
|
43
|
+
buf.write(struct.pack("<Q", len(data)))
|
|
44
|
+
buf.write(data)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _write_header(buf: io.BytesIO, profile: str = "", library: str = "ate-telemetry") -> None:
|
|
48
|
+
"""Write MCAP header."""
|
|
49
|
+
profile_bytes = profile.encode("utf-8")
|
|
50
|
+
library_bytes = library.encode("utf-8")
|
|
51
|
+
|
|
52
|
+
data = io.BytesIO()
|
|
53
|
+
data.write(struct.pack("<I", len(profile_bytes)))
|
|
54
|
+
data.write(profile_bytes)
|
|
55
|
+
data.write(struct.pack("<I", len(library_bytes)))
|
|
56
|
+
data.write(library_bytes)
|
|
57
|
+
|
|
58
|
+
_write_record(buf, OP_HEADER, data.getvalue())
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _write_schema(
|
|
62
|
+
buf: io.BytesIO,
|
|
63
|
+
schema_id: int,
|
|
64
|
+
name: str,
|
|
65
|
+
encoding: str,
|
|
66
|
+
data: bytes,
|
|
67
|
+
) -> None:
|
|
68
|
+
"""Write a schema record."""
|
|
69
|
+
name_bytes = name.encode("utf-8")
|
|
70
|
+
encoding_bytes = encoding.encode("utf-8")
|
|
71
|
+
|
|
72
|
+
record = io.BytesIO()
|
|
73
|
+
record.write(struct.pack("<H", schema_id))
|
|
74
|
+
record.write(struct.pack("<I", len(name_bytes)))
|
|
75
|
+
record.write(name_bytes)
|
|
76
|
+
record.write(struct.pack("<I", len(encoding_bytes)))
|
|
77
|
+
record.write(encoding_bytes)
|
|
78
|
+
record.write(struct.pack("<I", len(data)))
|
|
79
|
+
record.write(data)
|
|
80
|
+
|
|
81
|
+
_write_record(buf, OP_SCHEMA, record.getvalue())
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _write_channel(
|
|
85
|
+
buf: io.BytesIO,
|
|
86
|
+
channel_id: int,
|
|
87
|
+
schema_id: int,
|
|
88
|
+
topic: str,
|
|
89
|
+
message_encoding: str,
|
|
90
|
+
metadata: Dict[str, str] = None,
|
|
91
|
+
) -> None:
|
|
92
|
+
"""Write a channel record."""
|
|
93
|
+
topic_bytes = topic.encode("utf-8")
|
|
94
|
+
encoding_bytes = message_encoding.encode("utf-8")
|
|
95
|
+
metadata = metadata or {}
|
|
96
|
+
|
|
97
|
+
record = io.BytesIO()
|
|
98
|
+
record.write(struct.pack("<H", channel_id))
|
|
99
|
+
record.write(struct.pack("<H", schema_id))
|
|
100
|
+
record.write(struct.pack("<I", len(topic_bytes)))
|
|
101
|
+
record.write(topic_bytes)
|
|
102
|
+
record.write(struct.pack("<I", len(encoding_bytes)))
|
|
103
|
+
record.write(encoding_bytes)
|
|
104
|
+
|
|
105
|
+
# Metadata map
|
|
106
|
+
record.write(struct.pack("<I", len(metadata)))
|
|
107
|
+
for key, value in metadata.items():
|
|
108
|
+
key_bytes = key.encode("utf-8")
|
|
109
|
+
value_bytes = value.encode("utf-8")
|
|
110
|
+
record.write(struct.pack("<I", len(key_bytes)))
|
|
111
|
+
record.write(key_bytes)
|
|
112
|
+
record.write(struct.pack("<I", len(value_bytes)))
|
|
113
|
+
record.write(value_bytes)
|
|
114
|
+
|
|
115
|
+
_write_record(buf, OP_CHANNEL, record.getvalue())
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def _write_message(
|
|
119
|
+
buf: io.BytesIO,
|
|
120
|
+
channel_id: int,
|
|
121
|
+
sequence: int,
|
|
122
|
+
log_time: int,
|
|
123
|
+
publish_time: int,
|
|
124
|
+
data: bytes,
|
|
125
|
+
) -> None:
|
|
126
|
+
"""Write a message record."""
|
|
127
|
+
record = io.BytesIO()
|
|
128
|
+
record.write(struct.pack("<H", channel_id))
|
|
129
|
+
record.write(struct.pack("<I", sequence))
|
|
130
|
+
record.write(struct.pack("<Q", log_time))
|
|
131
|
+
record.write(struct.pack("<Q", publish_time))
|
|
132
|
+
record.write(data)
|
|
133
|
+
|
|
134
|
+
_write_record(buf, OP_MESSAGE, record.getvalue())
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def _write_metadata(buf: io.BytesIO, name: str, metadata: Dict[str, str]) -> None:
|
|
138
|
+
"""Write a metadata record."""
|
|
139
|
+
name_bytes = name.encode("utf-8")
|
|
140
|
+
|
|
141
|
+
record = io.BytesIO()
|
|
142
|
+
record.write(struct.pack("<I", len(name_bytes)))
|
|
143
|
+
record.write(name_bytes)
|
|
144
|
+
|
|
145
|
+
record.write(struct.pack("<I", len(metadata)))
|
|
146
|
+
for key, value in metadata.items():
|
|
147
|
+
key_bytes = key.encode("utf-8")
|
|
148
|
+
value_bytes = str(value).encode("utf-8")
|
|
149
|
+
record.write(struct.pack("<I", len(key_bytes)))
|
|
150
|
+
record.write(key_bytes)
|
|
151
|
+
record.write(struct.pack("<I", len(value_bytes)))
|
|
152
|
+
record.write(value_bytes)
|
|
153
|
+
|
|
154
|
+
_write_record(buf, OP_METADATA, record.getvalue())
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def _write_data_end(buf: io.BytesIO) -> None:
|
|
158
|
+
"""Write data end record."""
|
|
159
|
+
data = struct.pack("<I", 0) # data_section_crc (0 = not computed)
|
|
160
|
+
_write_record(buf, OP_DATA_END, data)
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def _write_footer(buf: io.BytesIO) -> None:
|
|
164
|
+
"""Write MCAP footer."""
|
|
165
|
+
data = io.BytesIO()
|
|
166
|
+
data.write(struct.pack("<Q", 0)) # summary_start
|
|
167
|
+
data.write(struct.pack("<Q", 0)) # summary_offset_start
|
|
168
|
+
data.write(struct.pack("<I", 0)) # summary_crc
|
|
169
|
+
|
|
170
|
+
_write_record(buf, OP_FOOTER, data.getvalue())
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def serialize_to_mcap(recording: TrajectoryRecording) -> bytes:
|
|
174
|
+
"""
|
|
175
|
+
Serialize trajectory recording to MCAP format.
|
|
176
|
+
|
|
177
|
+
Creates an MCAP file with:
|
|
178
|
+
- /joint_states topic: Joint positions, velocities, torques
|
|
179
|
+
- /end_effector_pose topic: End effector pose (if available)
|
|
180
|
+
- /events topic: Execution events
|
|
181
|
+
|
|
182
|
+
Args:
|
|
183
|
+
recording: The trajectory recording to serialize
|
|
184
|
+
|
|
185
|
+
Returns:
|
|
186
|
+
MCAP file as bytes
|
|
187
|
+
"""
|
|
188
|
+
buf = io.BytesIO()
|
|
189
|
+
|
|
190
|
+
# Write magic
|
|
191
|
+
buf.write(MCAP_MAGIC)
|
|
192
|
+
|
|
193
|
+
# Write header
|
|
194
|
+
_write_header(buf, profile="ros2", library="ate-telemetry/1.0")
|
|
195
|
+
|
|
196
|
+
# Define schemas
|
|
197
|
+
joint_state_schema = json.dumps({
|
|
198
|
+
"type": "object",
|
|
199
|
+
"title": "JointState",
|
|
200
|
+
"properties": {
|
|
201
|
+
"timestamp": {"type": "number"},
|
|
202
|
+
"positions": {"type": "object", "additionalProperties": {"type": "number"}},
|
|
203
|
+
"velocities": {"type": "object", "additionalProperties": {"type": "number"}},
|
|
204
|
+
"torques": {"type": "object", "additionalProperties": {"type": "number"}},
|
|
205
|
+
"accelerations": {"type": "object", "additionalProperties": {"type": "number"}},
|
|
206
|
+
},
|
|
207
|
+
}).encode("utf-8")
|
|
208
|
+
|
|
209
|
+
pose_schema = json.dumps({
|
|
210
|
+
"type": "object",
|
|
211
|
+
"title": "Pose",
|
|
212
|
+
"properties": {
|
|
213
|
+
"timestamp": {"type": "number"},
|
|
214
|
+
"position": {
|
|
215
|
+
"type": "object",
|
|
216
|
+
"properties": {
|
|
217
|
+
"x": {"type": "number"},
|
|
218
|
+
"y": {"type": "number"},
|
|
219
|
+
"z": {"type": "number"},
|
|
220
|
+
},
|
|
221
|
+
},
|
|
222
|
+
"orientation": {
|
|
223
|
+
"type": "object",
|
|
224
|
+
"properties": {
|
|
225
|
+
"x": {"type": "number"},
|
|
226
|
+
"y": {"type": "number"},
|
|
227
|
+
"z": {"type": "number"},
|
|
228
|
+
"w": {"type": "number"},
|
|
229
|
+
},
|
|
230
|
+
},
|
|
231
|
+
},
|
|
232
|
+
}).encode("utf-8")
|
|
233
|
+
|
|
234
|
+
event_schema = json.dumps({
|
|
235
|
+
"type": "object",
|
|
236
|
+
"title": "ExecutionEvent",
|
|
237
|
+
"properties": {
|
|
238
|
+
"timestamp": {"type": "number"},
|
|
239
|
+
"eventType": {"type": "string"},
|
|
240
|
+
"data": {"type": "object"},
|
|
241
|
+
},
|
|
242
|
+
}).encode("utf-8")
|
|
243
|
+
|
|
244
|
+
# Write schemas
|
|
245
|
+
_write_schema(buf, 1, "JointState", "jsonschema", joint_state_schema)
|
|
246
|
+
_write_schema(buf, 2, "Pose", "jsonschema", pose_schema)
|
|
247
|
+
_write_schema(buf, 3, "ExecutionEvent", "jsonschema", event_schema)
|
|
248
|
+
|
|
249
|
+
# Write channels
|
|
250
|
+
_write_channel(buf, 1, 1, "/joint_states", "json", {
|
|
251
|
+
"robot_id": recording.robot_id,
|
|
252
|
+
"skill_id": recording.skill_id or "",
|
|
253
|
+
})
|
|
254
|
+
_write_channel(buf, 2, 2, "/end_effector_pose", "json")
|
|
255
|
+
_write_channel(buf, 3, 3, "/events", "json")
|
|
256
|
+
|
|
257
|
+
# Calculate base timestamp
|
|
258
|
+
if recording.start_time:
|
|
259
|
+
base_time_ns = int(recording.start_time.timestamp() * 1e9)
|
|
260
|
+
else:
|
|
261
|
+
base_time_ns = int(datetime.utcnow().timestamp() * 1e9)
|
|
262
|
+
|
|
263
|
+
# Write frames as messages
|
|
264
|
+
sequence = 0
|
|
265
|
+
for frame in recording.frames:
|
|
266
|
+
timestamp_ns = base_time_ns + int(frame.timestamp * 1e9)
|
|
267
|
+
|
|
268
|
+
# Joint state message
|
|
269
|
+
joint_data = json.dumps({
|
|
270
|
+
"timestamp": frame.timestamp,
|
|
271
|
+
"positions": frame.joint_positions,
|
|
272
|
+
"velocities": frame.joint_velocities,
|
|
273
|
+
"torques": frame.joint_torques,
|
|
274
|
+
"accelerations": frame.joint_accelerations,
|
|
275
|
+
}).encode("utf-8")
|
|
276
|
+
|
|
277
|
+
_write_message(buf, 1, sequence, timestamp_ns, timestamp_ns, joint_data)
|
|
278
|
+
sequence += 1
|
|
279
|
+
|
|
280
|
+
# End effector pose message (if available)
|
|
281
|
+
if frame.end_effector_pose:
|
|
282
|
+
pose_data = json.dumps({
|
|
283
|
+
"timestamp": frame.timestamp,
|
|
284
|
+
"position": frame.end_effector_pose.position.to_dict(),
|
|
285
|
+
"orientation": frame.end_effector_pose.orientation.to_dict(),
|
|
286
|
+
}).encode("utf-8")
|
|
287
|
+
|
|
288
|
+
_write_message(buf, 2, sequence, timestamp_ns, timestamp_ns, pose_data)
|
|
289
|
+
sequence += 1
|
|
290
|
+
|
|
291
|
+
# Write events as messages
|
|
292
|
+
for event in recording.events:
|
|
293
|
+
timestamp_ns = base_time_ns + int(event.timestamp * 1e9)
|
|
294
|
+
|
|
295
|
+
event_data = json.dumps({
|
|
296
|
+
"timestamp": event.timestamp,
|
|
297
|
+
"eventType": event.event_type.value if hasattr(event.event_type, "value") else str(event.event_type),
|
|
298
|
+
"data": event.data,
|
|
299
|
+
}).encode("utf-8")
|
|
300
|
+
|
|
301
|
+
_write_message(buf, 3, sequence, timestamp_ns, timestamp_ns, event_data)
|
|
302
|
+
sequence += 1
|
|
303
|
+
|
|
304
|
+
# Write metadata
|
|
305
|
+
_write_metadata(buf, "recording_info", {
|
|
306
|
+
"recording_id": recording.id,
|
|
307
|
+
"robot_id": recording.robot_id,
|
|
308
|
+
"skill_id": recording.skill_id or "",
|
|
309
|
+
"source": recording.source.value if hasattr(recording.source, "value") else str(recording.source),
|
|
310
|
+
"success": str(recording.success),
|
|
311
|
+
"duration": str(recording.metadata.duration),
|
|
312
|
+
"frame_count": str(recording.metadata.total_frames),
|
|
313
|
+
"frame_rate": str(recording.metadata.frame_rate),
|
|
314
|
+
})
|
|
315
|
+
|
|
316
|
+
# Write data end and footer
|
|
317
|
+
_write_data_end(buf)
|
|
318
|
+
_write_footer(buf)
|
|
319
|
+
buf.write(MCAP_FOOTER_MAGIC)
|
|
320
|
+
|
|
321
|
+
return buf.getvalue()
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
def deserialize_from_mcap(data: bytes) -> TrajectoryRecording:
|
|
325
|
+
"""
|
|
326
|
+
Deserialize MCAP data to TrajectoryRecording.
|
|
327
|
+
|
|
328
|
+
Args:
|
|
329
|
+
data: MCAP file as bytes
|
|
330
|
+
|
|
331
|
+
Returns:
|
|
332
|
+
Parsed TrajectoryRecording
|
|
333
|
+
"""
|
|
334
|
+
from ..types import (
|
|
335
|
+
TrajectoryRecording,
|
|
336
|
+
TrajectoryFrame,
|
|
337
|
+
TrajectoryMetadata,
|
|
338
|
+
ExecutionEvent,
|
|
339
|
+
EventType,
|
|
340
|
+
TelemetrySource,
|
|
341
|
+
Pose,
|
|
342
|
+
Vector3,
|
|
343
|
+
Quaternion,
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
buf = io.BytesIO(data)
|
|
347
|
+
|
|
348
|
+
# Verify magic
|
|
349
|
+
magic = buf.read(8)
|
|
350
|
+
if magic != MCAP_MAGIC:
|
|
351
|
+
raise ValueError("Invalid MCAP file: bad magic")
|
|
352
|
+
|
|
353
|
+
frames: List[TrajectoryFrame] = []
|
|
354
|
+
events: List[ExecutionEvent] = []
|
|
355
|
+
metadata: Dict[str, str] = {}
|
|
356
|
+
channels: Dict[int, str] = {}
|
|
357
|
+
|
|
358
|
+
while True:
|
|
359
|
+
op_byte = buf.read(1)
|
|
360
|
+
if not op_byte:
|
|
361
|
+
break
|
|
362
|
+
|
|
363
|
+
op = struct.unpack("<B", op_byte)[0]
|
|
364
|
+
length = struct.unpack("<Q", buf.read(8))[0]
|
|
365
|
+
record_data = buf.read(length)
|
|
366
|
+
|
|
367
|
+
if op == OP_FOOTER:
|
|
368
|
+
break
|
|
369
|
+
|
|
370
|
+
if op == OP_CHANNEL:
|
|
371
|
+
record_buf = io.BytesIO(record_data)
|
|
372
|
+
channel_id = struct.unpack("<H", record_buf.read(2))[0]
|
|
373
|
+
record_buf.read(2) # schema_id
|
|
374
|
+
topic_len = struct.unpack("<I", record_buf.read(4))[0]
|
|
375
|
+
topic = record_buf.read(topic_len).decode("utf-8")
|
|
376
|
+
channels[channel_id] = topic
|
|
377
|
+
|
|
378
|
+
elif op == OP_MESSAGE:
|
|
379
|
+
record_buf = io.BytesIO(record_data)
|
|
380
|
+
channel_id = struct.unpack("<H", record_buf.read(2))[0]
|
|
381
|
+
record_buf.read(4) # sequence
|
|
382
|
+
record_buf.read(8) # log_time
|
|
383
|
+
record_buf.read(8) # publish_time
|
|
384
|
+
msg_data = record_buf.read()
|
|
385
|
+
|
|
386
|
+
topic = channels.get(channel_id, "")
|
|
387
|
+
|
|
388
|
+
try:
|
|
389
|
+
msg = json.loads(msg_data.decode("utf-8"))
|
|
390
|
+
|
|
391
|
+
if topic == "/joint_states":
|
|
392
|
+
frame = TrajectoryFrame(
|
|
393
|
+
timestamp=msg.get("timestamp", 0),
|
|
394
|
+
joint_positions=msg.get("positions", {}),
|
|
395
|
+
joint_velocities=msg.get("velocities", {}),
|
|
396
|
+
joint_torques=msg.get("torques", {}),
|
|
397
|
+
joint_accelerations=msg.get("accelerations", {}),
|
|
398
|
+
)
|
|
399
|
+
frames.append(frame)
|
|
400
|
+
|
|
401
|
+
elif topic == "/events":
|
|
402
|
+
event_type_str = msg.get("eventType", "")
|
|
403
|
+
try:
|
|
404
|
+
event_type = EventType(event_type_str)
|
|
405
|
+
except ValueError:
|
|
406
|
+
event_type = event_type_str
|
|
407
|
+
|
|
408
|
+
event = ExecutionEvent(
|
|
409
|
+
timestamp=msg.get("timestamp", 0),
|
|
410
|
+
event_type=event_type,
|
|
411
|
+
data=msg.get("data", {}),
|
|
412
|
+
)
|
|
413
|
+
events.append(event)
|
|
414
|
+
|
|
415
|
+
except json.JSONDecodeError:
|
|
416
|
+
pass
|
|
417
|
+
|
|
418
|
+
elif op == OP_METADATA:
|
|
419
|
+
record_buf = io.BytesIO(record_data)
|
|
420
|
+
name_len = struct.unpack("<I", record_buf.read(4))[0]
|
|
421
|
+
name = record_buf.read(name_len).decode("utf-8")
|
|
422
|
+
|
|
423
|
+
if name == "recording_info":
|
|
424
|
+
map_len = struct.unpack("<I", record_buf.read(4))[0]
|
|
425
|
+
for _ in range(map_len):
|
|
426
|
+
key_len = struct.unpack("<I", record_buf.read(4))[0]
|
|
427
|
+
key = record_buf.read(key_len).decode("utf-8")
|
|
428
|
+
val_len = struct.unpack("<I", record_buf.read(4))[0]
|
|
429
|
+
val = record_buf.read(val_len).decode("utf-8")
|
|
430
|
+
metadata[key] = val
|
|
431
|
+
|
|
432
|
+
# Sort frames by timestamp
|
|
433
|
+
frames.sort(key=lambda f: f.timestamp)
|
|
434
|
+
|
|
435
|
+
# Build recording
|
|
436
|
+
source_str = metadata.get("source", "hardware")
|
|
437
|
+
try:
|
|
438
|
+
source = TelemetrySource(source_str)
|
|
439
|
+
except ValueError:
|
|
440
|
+
source = TelemetrySource.HARDWARE
|
|
441
|
+
|
|
442
|
+
recording = TrajectoryRecording(
|
|
443
|
+
id=metadata.get("recording_id", ""),
|
|
444
|
+
robot_id=metadata.get("robot_id", ""),
|
|
445
|
+
skill_id=metadata.get("skill_id") or None,
|
|
446
|
+
source=source,
|
|
447
|
+
success=metadata.get("success", "True").lower() == "true",
|
|
448
|
+
frames=frames,
|
|
449
|
+
events=events,
|
|
450
|
+
metadata=TrajectoryMetadata(
|
|
451
|
+
duration=float(metadata.get("duration", 0)),
|
|
452
|
+
total_frames=int(metadata.get("frame_count", len(frames))),
|
|
453
|
+
frame_rate=float(metadata.get("frame_rate", 0)),
|
|
454
|
+
),
|
|
455
|
+
)
|
|
456
|
+
|
|
457
|
+
return recording
|