foodforthought-cli 0.2.4__py3-none-any.whl → 0.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ate/__init__.py +1 -1
- ate/behaviors/__init__.py +88 -0
- ate/behaviors/common.py +686 -0
- ate/behaviors/tree.py +454 -0
- ate/cli.py +610 -54
- ate/drivers/__init__.py +27 -0
- ate/drivers/mechdog.py +606 -0
- ate/interfaces/__init__.py +171 -0
- ate/interfaces/base.py +271 -0
- ate/interfaces/body.py +267 -0
- ate/interfaces/detection.py +282 -0
- ate/interfaces/locomotion.py +422 -0
- ate/interfaces/manipulation.py +408 -0
- ate/interfaces/navigation.py +389 -0
- ate/interfaces/perception.py +362 -0
- ate/interfaces/types.py +371 -0
- ate/mcp_server.py +387 -0
- ate/recording/__init__.py +44 -0
- ate/recording/demonstration.py +378 -0
- ate/recording/session.py +405 -0
- ate/recording/upload.py +304 -0
- ate/recording/wrapper.py +95 -0
- ate/robot/__init__.py +79 -0
- ate/robot/calibration.py +583 -0
- ate/robot/commands.py +3603 -0
- ate/robot/discovery.py +339 -0
- ate/robot/introspection.py +330 -0
- ate/robot/manager.py +270 -0
- ate/robot/profiles.py +275 -0
- ate/robot/registry.py +319 -0
- ate/robot/skill_upload.py +393 -0
- ate/robot/visual_labeler.py +1039 -0
- {foodforthought_cli-0.2.4.dist-info → foodforthought_cli-0.2.8.dist-info}/METADATA +9 -1
- {foodforthought_cli-0.2.4.dist-info → foodforthought_cli-0.2.8.dist-info}/RECORD +37 -8
- {foodforthought_cli-0.2.4.dist-info → foodforthought_cli-0.2.8.dist-info}/WHEEL +0 -0
- {foodforthought_cli-0.2.4.dist-info → foodforthought_cli-0.2.8.dist-info}/entry_points.txt +0 -0
- {foodforthought_cli-0.2.4.dist-info → foodforthought_cli-0.2.8.dist-info}/top_level.txt +0 -0
ate/robot/commands.py
ADDED
|
@@ -0,0 +1,3603 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Robot management CLI commands.
|
|
3
|
+
|
|
4
|
+
Provides:
|
|
5
|
+
- ate robot discover - Find robots on network/USB
|
|
6
|
+
- ate robot list - List known robot types
|
|
7
|
+
- ate robot info - Show robot capabilities
|
|
8
|
+
- ate robot setup - Interactive setup wizard
|
|
9
|
+
- ate robot test - Test robot capabilities
|
|
10
|
+
- ate robot profiles - Manage saved profiles
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import os
|
|
14
|
+
import sys
|
|
15
|
+
import json
|
|
16
|
+
from typing import Optional
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
|
|
19
|
+
from .discovery import (
|
|
20
|
+
discover_robots,
|
|
21
|
+
discover_serial_robots,
|
|
22
|
+
discover_network_cameras,
|
|
23
|
+
discover_ble_robots,
|
|
24
|
+
quick_scan,
|
|
25
|
+
)
|
|
26
|
+
from .profiles import (
|
|
27
|
+
RobotProfile,
|
|
28
|
+
load_profile,
|
|
29
|
+
save_profile,
|
|
30
|
+
list_profiles,
|
|
31
|
+
delete_profile,
|
|
32
|
+
get_template,
|
|
33
|
+
list_templates,
|
|
34
|
+
create_profile_from_discovery,
|
|
35
|
+
)
|
|
36
|
+
from .registry import (
|
|
37
|
+
RobotRegistry,
|
|
38
|
+
KNOWN_ROBOTS,
|
|
39
|
+
)
|
|
40
|
+
from .introspection import (
|
|
41
|
+
get_capabilities,
|
|
42
|
+
get_methods,
|
|
43
|
+
test_capability,
|
|
44
|
+
format_capabilities,
|
|
45
|
+
format_methods,
|
|
46
|
+
)
|
|
47
|
+
from .manager import RobotManager
|
|
48
|
+
from .calibration import (
|
|
49
|
+
VisualCalibrator,
|
|
50
|
+
RobotCalibration,
|
|
51
|
+
load_calibration,
|
|
52
|
+
save_calibration,
|
|
53
|
+
list_calibrations,
|
|
54
|
+
quick_gripper_calibration,
|
|
55
|
+
)
|
|
56
|
+
from .visual_labeler import (
|
|
57
|
+
DualCameraLabeler,
|
|
58
|
+
SkillLibrary,
|
|
59
|
+
load_skill_library,
|
|
60
|
+
list_skill_libraries,
|
|
61
|
+
visual_label_command,
|
|
62
|
+
)
|
|
63
|
+
from .skill_upload import (
|
|
64
|
+
SkillLibraryUploader,
|
|
65
|
+
upload_skill_library,
|
|
66
|
+
)
|
|
67
|
+
from .teach import teach_command
|
|
68
|
+
from .perception import LivePerceptionExecutor, PerceptionSystem
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def robot_primitives_command(action: str = "list", name: Optional[str] = None):
|
|
72
|
+
"""
|
|
73
|
+
List and inspect programmatic primitives.
|
|
74
|
+
|
|
75
|
+
ate robot primitives list
|
|
76
|
+
ate robot primitives show gripper_open
|
|
77
|
+
ate robot primitives test pickup --port /dev/cu.usbserial-10
|
|
78
|
+
"""
|
|
79
|
+
from .primitives import PrimitiveLibrary, SkillType
|
|
80
|
+
|
|
81
|
+
lib = PrimitiveLibrary(robot_interface=None)
|
|
82
|
+
|
|
83
|
+
if action == "list":
|
|
84
|
+
print("=" * 60)
|
|
85
|
+
print("SKILL PRIMITIVES")
|
|
86
|
+
print("=" * 60)
|
|
87
|
+
|
|
88
|
+
print(f"\nPrimitives ({len(lib.primitives)}):")
|
|
89
|
+
print("-" * 40)
|
|
90
|
+
for name, prim in lib.primitives.items():
|
|
91
|
+
hw = ", ".join(r.value for r in prim.hardware) or "none"
|
|
92
|
+
servos = len(prim.servo_targets)
|
|
93
|
+
print(f" {name:<20} [{hw}] ({servos} servos, {prim.duration_ms}ms)")
|
|
94
|
+
|
|
95
|
+
print(f"\nCompound Skills ({len(lib.compounds)}):")
|
|
96
|
+
print("-" * 40)
|
|
97
|
+
for name, comp in lib.compounds.items():
|
|
98
|
+
hw = ", ".join(r.value for r in comp.hardware) or "none"
|
|
99
|
+
steps = len(comp.steps)
|
|
100
|
+
print(f" {name:<20} [{hw}] ({steps} steps)")
|
|
101
|
+
print(f" → {' → '.join(comp.steps)}")
|
|
102
|
+
|
|
103
|
+
print(f"\nBehaviors ({len(lib.behaviors)}):")
|
|
104
|
+
print("-" * 40)
|
|
105
|
+
for name, beh in lib.behaviors.items():
|
|
106
|
+
hw = ", ".join(r.value for r in beh.hardware) or "none"
|
|
107
|
+
print(f" {name:<20} [{hw}]")
|
|
108
|
+
print(f" {beh.description}")
|
|
109
|
+
|
|
110
|
+
total = len(lib.primitives) + len(lib.compounds) + len(lib.behaviors)
|
|
111
|
+
print(f"\nTotal: {total} skills")
|
|
112
|
+
|
|
113
|
+
elif action == "show":
|
|
114
|
+
if not name:
|
|
115
|
+
print("Usage: ate robot primitives show <name>")
|
|
116
|
+
sys.exit(1)
|
|
117
|
+
|
|
118
|
+
# Find in primitives
|
|
119
|
+
if name in lib.primitives:
|
|
120
|
+
prim = lib.primitives[name]
|
|
121
|
+
print(f"\nPrimitive: {name}")
|
|
122
|
+
print("=" * 40)
|
|
123
|
+
print(f"Description: {prim.description}")
|
|
124
|
+
print(f"Duration: {prim.duration_ms}ms")
|
|
125
|
+
print(f"Hardware: {[r.value for r in prim.hardware]}")
|
|
126
|
+
print(f"\nServo Targets:")
|
|
127
|
+
for sid, val in prim.servo_targets.items():
|
|
128
|
+
print(f" Servo {sid}: {val}")
|
|
129
|
+
|
|
130
|
+
elif name in lib.compounds:
|
|
131
|
+
comp = lib.compounds[name]
|
|
132
|
+
print(f"\nCompound Skill: {name}")
|
|
133
|
+
print("=" * 40)
|
|
134
|
+
print(f"Description: {comp.description}")
|
|
135
|
+
print(f"Wait between: {comp.wait_between_ms}ms")
|
|
136
|
+
print(f"Hardware: {[r.value for r in comp.hardware]}")
|
|
137
|
+
print(f"\nSteps:")
|
|
138
|
+
for i, step in enumerate(comp.steps, 1):
|
|
139
|
+
print(f" {i}. {step}")
|
|
140
|
+
|
|
141
|
+
elif name in lib.behaviors:
|
|
142
|
+
beh = lib.behaviors[name]
|
|
143
|
+
print(f"\nBehavior: {name}")
|
|
144
|
+
print("=" * 40)
|
|
145
|
+
print(f"Description: {beh.description}")
|
|
146
|
+
print(f"Hardware: {[r.value for r in beh.hardware]}")
|
|
147
|
+
print(f"\nSteps:")
|
|
148
|
+
for i, step in enumerate(beh.steps, 1):
|
|
149
|
+
if isinstance(step, dict):
|
|
150
|
+
print(f" {i}. {step}")
|
|
151
|
+
else:
|
|
152
|
+
print(f" {i}. {step}")
|
|
153
|
+
|
|
154
|
+
else:
|
|
155
|
+
print(f"Skill not found: {name}")
|
|
156
|
+
print("\nAvailable skills:")
|
|
157
|
+
print(f" Primitives: {', '.join(lib.primitives.keys())}")
|
|
158
|
+
print(f" Compounds: {', '.join(lib.compounds.keys())}")
|
|
159
|
+
print(f" Behaviors: {', '.join(lib.behaviors.keys())}")
|
|
160
|
+
sys.exit(1)
|
|
161
|
+
|
|
162
|
+
elif action == "test":
|
|
163
|
+
if not name:
|
|
164
|
+
print("Usage: ate robot primitives test <name> --port <port>")
|
|
165
|
+
sys.exit(1)
|
|
166
|
+
print(f"Testing skill: {name}")
|
|
167
|
+
print("(Use 'ate robot behavior' for full behavior execution)")
|
|
168
|
+
|
|
169
|
+
else:
|
|
170
|
+
print(f"Unknown action: {action}")
|
|
171
|
+
print("Available actions: list, show, test")
|
|
172
|
+
sys.exit(1)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def robot_behavior_command(
|
|
176
|
+
profile_name: Optional[str] = None,
|
|
177
|
+
behavior: str = "pickup_green_ball",
|
|
178
|
+
camera_ip: Optional[str] = None,
|
|
179
|
+
simulate: bool = False,
|
|
180
|
+
):
|
|
181
|
+
"""
|
|
182
|
+
Execute a high-level behavior.
|
|
183
|
+
|
|
184
|
+
ate robot behavior pickup_green_ball --profile my_mechdog
|
|
185
|
+
ate robot behavior pickup_green_ball --camera-ip 192.168.4.1
|
|
186
|
+
ate robot behavior pickup_green_ball --simulate
|
|
187
|
+
"""
|
|
188
|
+
# PREREQUISITE CHECK: Ensure direction calibration is complete
|
|
189
|
+
# This is the enforcement that prevents catastrophic failures
|
|
190
|
+
if profile_name and not simulate:
|
|
191
|
+
from .calibration_state import CalibrationState
|
|
192
|
+
|
|
193
|
+
state = CalibrationState.load(profile_name)
|
|
194
|
+
passed, message = state.check_prerequisite("direction_calibrated")
|
|
195
|
+
|
|
196
|
+
if not passed:
|
|
197
|
+
print(message)
|
|
198
|
+
print("\nThe behavior cannot run without direction calibration.")
|
|
199
|
+
print("This prevents the arm from moving in the WRONG direction.")
|
|
200
|
+
sys.exit(1)
|
|
201
|
+
|
|
202
|
+
print(f"Behavior: {behavior}")
|
|
203
|
+
print("=" * 50)
|
|
204
|
+
|
|
205
|
+
robot = None
|
|
206
|
+
|
|
207
|
+
if profile_name and not simulate:
|
|
208
|
+
# Load robot from profile
|
|
209
|
+
profile = load_profile(profile_name)
|
|
210
|
+
if not profile:
|
|
211
|
+
print(f"Profile not found: {profile_name}")
|
|
212
|
+
sys.exit(1)
|
|
213
|
+
|
|
214
|
+
manager = RobotManager()
|
|
215
|
+
manager.load(profile_name)
|
|
216
|
+
|
|
217
|
+
print("Connecting to robot...")
|
|
218
|
+
if not manager.connect(profile_name):
|
|
219
|
+
print("Connection failed")
|
|
220
|
+
sys.exit(1)
|
|
221
|
+
|
|
222
|
+
robot = manager.get(profile_name)
|
|
223
|
+
camera_ip = camera_ip or profile.camera_ip
|
|
224
|
+
|
|
225
|
+
# Create executor
|
|
226
|
+
executor = LivePerceptionExecutor(
|
|
227
|
+
robot_interface=robot,
|
|
228
|
+
camera_ip=camera_ip or "192.168.4.1",
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
# Try to connect to camera (ok if it fails - will use simulation)
|
|
232
|
+
if not simulate and camera_ip:
|
|
233
|
+
if executor.connect():
|
|
234
|
+
print(f"Camera connected: {camera_ip}")
|
|
235
|
+
else:
|
|
236
|
+
print("Camera not available - using simulated perception")
|
|
237
|
+
|
|
238
|
+
try:
|
|
239
|
+
# Execute behavior
|
|
240
|
+
if behavior == "pickup_green_ball":
|
|
241
|
+
success = executor.pickup_green_ball()
|
|
242
|
+
else:
|
|
243
|
+
print(f"Unknown behavior: {behavior}")
|
|
244
|
+
print("Available: pickup_green_ball")
|
|
245
|
+
sys.exit(1)
|
|
246
|
+
|
|
247
|
+
print("\n" + "=" * 50)
|
|
248
|
+
print(f"Result: {'SUCCESS' if success else 'FAILED'}")
|
|
249
|
+
|
|
250
|
+
except KeyboardInterrupt:
|
|
251
|
+
print("\n\nAborted by user")
|
|
252
|
+
finally:
|
|
253
|
+
executor.disconnect()
|
|
254
|
+
if robot and profile_name:
|
|
255
|
+
manager = RobotManager()
|
|
256
|
+
manager.disconnect(profile_name)
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def robot_discover_command(subnet: Optional[str] = None, timeout: float = 3.0, json_output: bool = False):
|
|
260
|
+
"""
|
|
261
|
+
Discover robots on network and USB.
|
|
262
|
+
|
|
263
|
+
ate robot discover
|
|
264
|
+
ate robot discover --subnet 192.168.1 --timeout 5
|
|
265
|
+
"""
|
|
266
|
+
print("Scanning for robots...\n")
|
|
267
|
+
|
|
268
|
+
# Serial devices
|
|
269
|
+
print("USB/Serial devices:")
|
|
270
|
+
serial_robots = discover_serial_robots()
|
|
271
|
+
|
|
272
|
+
if serial_robots:
|
|
273
|
+
for robot in serial_robots:
|
|
274
|
+
status = "✓" if robot.robot_type else "?"
|
|
275
|
+
print(f" {status} {robot.name}")
|
|
276
|
+
print(f" Port: {robot.port}")
|
|
277
|
+
if robot.robot_type:
|
|
278
|
+
print(f" Type: {robot.robot_type}")
|
|
279
|
+
if robot.manufacturer:
|
|
280
|
+
print(f" Manufacturer: {robot.manufacturer}")
|
|
281
|
+
else:
|
|
282
|
+
print(" (none found)")
|
|
283
|
+
|
|
284
|
+
# Network cameras
|
|
285
|
+
print("\nNetwork cameras:")
|
|
286
|
+
cameras = discover_network_cameras(timeout=timeout, subnet=subnet)
|
|
287
|
+
|
|
288
|
+
if cameras:
|
|
289
|
+
for cam in cameras:
|
|
290
|
+
print(f" ✓ {cam.name}")
|
|
291
|
+
print(f" IP: {cam.ip}")
|
|
292
|
+
if cam.ports:
|
|
293
|
+
print(f" Ports: {cam.ports}")
|
|
294
|
+
else:
|
|
295
|
+
print(" (none found)")
|
|
296
|
+
|
|
297
|
+
# Summary
|
|
298
|
+
total = len(serial_robots) + len(cameras)
|
|
299
|
+
print(f"\nFound {total} device(s)")
|
|
300
|
+
|
|
301
|
+
if json_output:
|
|
302
|
+
result = {
|
|
303
|
+
"serial": [
|
|
304
|
+
{
|
|
305
|
+
"port": r.port,
|
|
306
|
+
"type": r.robot_type,
|
|
307
|
+
"name": r.name,
|
|
308
|
+
}
|
|
309
|
+
for r in serial_robots
|
|
310
|
+
],
|
|
311
|
+
"cameras": [
|
|
312
|
+
{
|
|
313
|
+
"ip": c.ip,
|
|
314
|
+
"name": c.name,
|
|
315
|
+
"ports": c.ports,
|
|
316
|
+
}
|
|
317
|
+
for c in cameras
|
|
318
|
+
],
|
|
319
|
+
}
|
|
320
|
+
print("\n" + json.dumps(result, indent=2))
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
def robot_ble_command(
|
|
324
|
+
action: str = "discover",
|
|
325
|
+
timeout: float = 10.0,
|
|
326
|
+
address: Optional[str] = None,
|
|
327
|
+
name_filter: Optional[str] = None,
|
|
328
|
+
profile_name: Optional[str] = None,
|
|
329
|
+
json_output: bool = False,
|
|
330
|
+
# Capture/analyze arguments
|
|
331
|
+
platform: str = "auto",
|
|
332
|
+
output: str = "capture.pklg",
|
|
333
|
+
capture_file: Optional[str] = None,
|
|
334
|
+
generate_code: bool = False,
|
|
335
|
+
):
|
|
336
|
+
"""
|
|
337
|
+
Bluetooth Low Energy robot discovery, connection, and protocol analysis.
|
|
338
|
+
|
|
339
|
+
ate robot ble discover
|
|
340
|
+
ate robot ble discover --timeout 15 --filter MechDog
|
|
341
|
+
ate robot ble connect AA:BB:CC:DD:EE:FF --profile my_mechdog
|
|
342
|
+
ate robot ble capture --platform ios -o capture.pklg
|
|
343
|
+
ate robot ble analyze capture.pklg --generate
|
|
344
|
+
ate robot ble inspect mechdog_00
|
|
345
|
+
"""
|
|
346
|
+
if action == "discover":
|
|
347
|
+
print("Scanning for BLE robots...")
|
|
348
|
+
print(f"Timeout: {timeout}s")
|
|
349
|
+
if name_filter:
|
|
350
|
+
print(f"Filter: {name_filter}")
|
|
351
|
+
print()
|
|
352
|
+
|
|
353
|
+
try:
|
|
354
|
+
from ..drivers.ble_transport import BLETransport, discover_ble_robots as ble_discover
|
|
355
|
+
|
|
356
|
+
# Use name filter if provided
|
|
357
|
+
if name_filter:
|
|
358
|
+
devices = BLETransport.discover_sync(timeout=timeout, name_filter=name_filter)
|
|
359
|
+
else:
|
|
360
|
+
devices = ble_discover(timeout=timeout)
|
|
361
|
+
|
|
362
|
+
if not devices:
|
|
363
|
+
print("No BLE devices found.")
|
|
364
|
+
print()
|
|
365
|
+
print("Tips:")
|
|
366
|
+
print(" - Ensure Bluetooth is enabled on your computer")
|
|
367
|
+
print(" - Make sure the robot is powered on")
|
|
368
|
+
print(" - Some robots need to be in pairing mode")
|
|
369
|
+
print(" - Try: ate robot ble discover --timeout 15")
|
|
370
|
+
return
|
|
371
|
+
|
|
372
|
+
print(f"Found {len(devices)} device(s):\n")
|
|
373
|
+
for i, device in enumerate(devices, 1):
|
|
374
|
+
print(f" [{i}] {device.name}")
|
|
375
|
+
print(f" Address: {device.address}")
|
|
376
|
+
print(f" RSSI: {device.rssi} dBm")
|
|
377
|
+
print()
|
|
378
|
+
|
|
379
|
+
if json_output:
|
|
380
|
+
result = [{
|
|
381
|
+
"name": d.name,
|
|
382
|
+
"address": d.address,
|
|
383
|
+
"rssi": d.rssi,
|
|
384
|
+
} for d in devices]
|
|
385
|
+
print(json.dumps(result, indent=2))
|
|
386
|
+
|
|
387
|
+
print("To connect:")
|
|
388
|
+
print(f" ate robot ble connect <address> --profile <name>")
|
|
389
|
+
print()
|
|
390
|
+
print("Or configure a profile for BLE:")
|
|
391
|
+
print(" ate robot profiles create my_ble_robot")
|
|
392
|
+
print(" # Edit ~/.ate/robots/my_ble_robot.json and add:")
|
|
393
|
+
print(' # "use_ble": true, "ble_address": "<address>"')
|
|
394
|
+
|
|
395
|
+
except ImportError:
|
|
396
|
+
print("BLE support requires the 'bleak' library.")
|
|
397
|
+
print("Install with: pip install bleak")
|
|
398
|
+
sys.exit(1)
|
|
399
|
+
except Exception as e:
|
|
400
|
+
print(f"BLE scan failed: {e}")
|
|
401
|
+
sys.exit(1)
|
|
402
|
+
|
|
403
|
+
elif action == "connect":
|
|
404
|
+
if not address:
|
|
405
|
+
print("Usage: ate robot ble connect <address>")
|
|
406
|
+
print()
|
|
407
|
+
print("First discover BLE devices:")
|
|
408
|
+
print(" ate robot ble discover")
|
|
409
|
+
sys.exit(1)
|
|
410
|
+
|
|
411
|
+
print(f"Connecting to BLE device: {address}")
|
|
412
|
+
|
|
413
|
+
try:
|
|
414
|
+
from ..drivers.ble_transport import BLETransport
|
|
415
|
+
|
|
416
|
+
transport = BLETransport(address=address, timeout=10.0)
|
|
417
|
+
|
|
418
|
+
print("Connecting...")
|
|
419
|
+
if transport.connect():
|
|
420
|
+
print("Connected!")
|
|
421
|
+
print()
|
|
422
|
+
|
|
423
|
+
# Try to identify robot
|
|
424
|
+
print("Probing device...")
|
|
425
|
+
transport.write(b'\x03') # Ctrl+C
|
|
426
|
+
import time
|
|
427
|
+
time.sleep(0.2)
|
|
428
|
+
transport.write(b'\x02') # Ctrl+B
|
|
429
|
+
time.sleep(0.5)
|
|
430
|
+
|
|
431
|
+
transport.write(b'import sys; print(sys.implementation)\r\n')
|
|
432
|
+
time.sleep(0.5)
|
|
433
|
+
response = transport.read(1000).decode('utf-8', errors='ignore')
|
|
434
|
+
|
|
435
|
+
if 'micropython' in response.lower():
|
|
436
|
+
print("Device: MicroPython device")
|
|
437
|
+
|
|
438
|
+
# Check for MechDog
|
|
439
|
+
transport.write(b'from HW_MechDog import MechDog\r\n')
|
|
440
|
+
time.sleep(0.3)
|
|
441
|
+
response2 = transport.read(500).decode('utf-8', errors='ignore')
|
|
442
|
+
|
|
443
|
+
if 'Error' not in response2 and 'Traceback' not in response2:
|
|
444
|
+
print("Robot: HiWonder MechDog")
|
|
445
|
+
else:
|
|
446
|
+
print("Robot: Unknown MicroPython robot")
|
|
447
|
+
else:
|
|
448
|
+
print("Device: Unknown type")
|
|
449
|
+
print(f"Response: {response[:200]}")
|
|
450
|
+
|
|
451
|
+
transport.disconnect()
|
|
452
|
+
|
|
453
|
+
if profile_name:
|
|
454
|
+
# Update profile with BLE settings
|
|
455
|
+
profile = load_profile(profile_name)
|
|
456
|
+
if profile:
|
|
457
|
+
profile.use_ble = True
|
|
458
|
+
profile.ble_address = address
|
|
459
|
+
if save_profile(profile):
|
|
460
|
+
print()
|
|
461
|
+
print(f"Updated profile '{profile_name}' for BLE connection.")
|
|
462
|
+
else:
|
|
463
|
+
print(f"Profile not found: {profile_name}")
|
|
464
|
+
|
|
465
|
+
print()
|
|
466
|
+
print("To use this device, configure a profile:")
|
|
467
|
+
print(" ate robot profiles create my_ble_robot")
|
|
468
|
+
print(" # Then edit to add BLE settings")
|
|
469
|
+
|
|
470
|
+
else:
|
|
471
|
+
print("Connection failed.")
|
|
472
|
+
print("Make sure the device is powered on and in range.")
|
|
473
|
+
|
|
474
|
+
except ImportError:
|
|
475
|
+
print("BLE support requires the 'bleak' library.")
|
|
476
|
+
print("Install with: pip install bleak")
|
|
477
|
+
sys.exit(1)
|
|
478
|
+
except Exception as e:
|
|
479
|
+
print(f"BLE connection failed: {e}")
|
|
480
|
+
sys.exit(1)
|
|
481
|
+
|
|
482
|
+
elif action == "info":
|
|
483
|
+
print("BLE Robot Support")
|
|
484
|
+
print("=" * 40)
|
|
485
|
+
print()
|
|
486
|
+
print("Supported robots with BLE:")
|
|
487
|
+
for robot in KNOWN_ROBOTS.values():
|
|
488
|
+
from .registry import ConnectionType
|
|
489
|
+
if ConnectionType.BLUETOOTH in robot.connection_types:
|
|
490
|
+
print(f" - {robot.name} ({robot.manufacturer})")
|
|
491
|
+
if robot.default_ports.get("ble_service_uuid"):
|
|
492
|
+
print(f" Service: {robot.default_ports['ble_service_uuid']}")
|
|
493
|
+
print()
|
|
494
|
+
print("BLE UUIDs (ESP32/HM-10 compatible):")
|
|
495
|
+
print(" Service: 0000ffe0-0000-1000-8000-00805f9b34fb")
|
|
496
|
+
print(" Write: 0000ffe1-0000-1000-8000-00805f9b34fb")
|
|
497
|
+
print(" Notify: 0000ffe2-0000-1000-8000-00805f9b34fb")
|
|
498
|
+
print()
|
|
499
|
+
print("Commands:")
|
|
500
|
+
print(" ate robot ble discover - Find BLE devices")
|
|
501
|
+
print(" ate robot ble connect - Test connection to device")
|
|
502
|
+
print(" ate robot ble inspect - Show device characteristics")
|
|
503
|
+
print(" ate robot ble capture - Capture phone app traffic")
|
|
504
|
+
print(" ate robot ble analyze - Analyze capture file")
|
|
505
|
+
print(" ate robot ble info - Show this information")
|
|
506
|
+
|
|
507
|
+
elif action == "capture":
|
|
508
|
+
# BLE traffic capture from phone app
|
|
509
|
+
try:
|
|
510
|
+
from .ble_capture import (
|
|
511
|
+
capture_ios_interactive,
|
|
512
|
+
capture_android_interactive,
|
|
513
|
+
check_ios_device,
|
|
514
|
+
check_android_device,
|
|
515
|
+
)
|
|
516
|
+
|
|
517
|
+
if platform == "auto":
|
|
518
|
+
# Auto-detect platform
|
|
519
|
+
ios_ok, _ = check_ios_device()
|
|
520
|
+
android_ok, _ = check_android_device()
|
|
521
|
+
|
|
522
|
+
if ios_ok:
|
|
523
|
+
platform = "ios"
|
|
524
|
+
elif android_ok:
|
|
525
|
+
platform = "android"
|
|
526
|
+
else:
|
|
527
|
+
print("No mobile device detected.")
|
|
528
|
+
print()
|
|
529
|
+
print("Connect your iPhone or Android phone via USB.")
|
|
530
|
+
print()
|
|
531
|
+
print("For iOS: Connect via Lightning/USB-C cable")
|
|
532
|
+
print("For Android: Enable USB debugging and connect via USB")
|
|
533
|
+
sys.exit(1)
|
|
534
|
+
|
|
535
|
+
if platform == "ios":
|
|
536
|
+
success = capture_ios_interactive(output)
|
|
537
|
+
else:
|
|
538
|
+
success = capture_android_interactive(output)
|
|
539
|
+
|
|
540
|
+
if success:
|
|
541
|
+
print()
|
|
542
|
+
print(f"Next step: Analyze the capture with:")
|
|
543
|
+
print(f" ate robot ble analyze {output}")
|
|
544
|
+
|
|
545
|
+
except ImportError as e:
|
|
546
|
+
print(f"BLE capture requires additional dependencies: {e}")
|
|
547
|
+
sys.exit(1)
|
|
548
|
+
|
|
549
|
+
elif action == "analyze":
|
|
550
|
+
# Analyze BLE capture file
|
|
551
|
+
if not capture_file:
|
|
552
|
+
print("Usage: ate robot ble analyze <capture_file>")
|
|
553
|
+
print()
|
|
554
|
+
print("First capture traffic:")
|
|
555
|
+
print(" ate robot ble capture -o capture.pklg")
|
|
556
|
+
sys.exit(1)
|
|
557
|
+
|
|
558
|
+
try:
|
|
559
|
+
from .ble_capture import analyze_capture
|
|
560
|
+
|
|
561
|
+
analysis = analyze_capture(capture_file)
|
|
562
|
+
|
|
563
|
+
if analysis:
|
|
564
|
+
print(analysis.summary())
|
|
565
|
+
|
|
566
|
+
if generate_code or output != "capture.pklg":
|
|
567
|
+
code = analysis.generate_python_code()
|
|
568
|
+
|
|
569
|
+
# If output looks like a .py file, save there
|
|
570
|
+
if output.endswith(".py"):
|
|
571
|
+
with open(output, "w") as f:
|
|
572
|
+
f.write(code)
|
|
573
|
+
print(f"\nGenerated code saved to: {output}")
|
|
574
|
+
elif generate_code:
|
|
575
|
+
print("\n" + "=" * 60)
|
|
576
|
+
print("GENERATED PYTHON CODE")
|
|
577
|
+
print("=" * 60)
|
|
578
|
+
print(code)
|
|
579
|
+
|
|
580
|
+
except ImportError as e:
|
|
581
|
+
print(f"BLE analysis requires tshark: {e}")
|
|
582
|
+
print("Install with: brew install wireshark")
|
|
583
|
+
sys.exit(1)
|
|
584
|
+
|
|
585
|
+
elif action == "inspect":
|
|
586
|
+
# Inspect BLE device characteristics
|
|
587
|
+
if not address:
|
|
588
|
+
print("Usage: ate robot ble inspect <device_name_or_address>")
|
|
589
|
+
print()
|
|
590
|
+
print("First discover devices:")
|
|
591
|
+
print(" ate robot ble discover")
|
|
592
|
+
sys.exit(1)
|
|
593
|
+
|
|
594
|
+
try:
|
|
595
|
+
from .ble_capture import inspect_ble_device
|
|
596
|
+
|
|
597
|
+
if not inspect_ble_device(address):
|
|
598
|
+
sys.exit(1)
|
|
599
|
+
|
|
600
|
+
except ImportError as e:
|
|
601
|
+
print(f"BLE inspection requires bleak: {e}")
|
|
602
|
+
print("Install with: pip install bleak")
|
|
603
|
+
sys.exit(1)
|
|
604
|
+
|
|
605
|
+
elif action == "pickup":
|
|
606
|
+
# Pure-BLE ball pickup using action groups and locomotion
|
|
607
|
+
if not address:
|
|
608
|
+
# Try to find MechDog automatically
|
|
609
|
+
print("Scanning for MechDog...")
|
|
610
|
+
try:
|
|
611
|
+
import asyncio
|
|
612
|
+
from bleak import BleakScanner
|
|
613
|
+
|
|
614
|
+
async def find_mechdog():
|
|
615
|
+
devices = await BleakScanner.discover(timeout=5.0)
|
|
616
|
+
for d in devices:
|
|
617
|
+
if "mechdog" in (d.name or "").lower():
|
|
618
|
+
return d.address
|
|
619
|
+
return None
|
|
620
|
+
|
|
621
|
+
address = asyncio.run(find_mechdog())
|
|
622
|
+
|
|
623
|
+
if not address:
|
|
624
|
+
print("MechDog not found! Make sure it's powered on.")
|
|
625
|
+
print("Or specify address: ate robot ble pickup --address AA:BB:CC:DD")
|
|
626
|
+
sys.exit(1)
|
|
627
|
+
|
|
628
|
+
print(f"Found MechDog: {address}")
|
|
629
|
+
|
|
630
|
+
except ImportError:
|
|
631
|
+
print("BLE requires bleak: pip install bleak")
|
|
632
|
+
sys.exit(1)
|
|
633
|
+
|
|
634
|
+
try:
|
|
635
|
+
from .ble_capture import ble_pickup_sequence
|
|
636
|
+
|
|
637
|
+
print()
|
|
638
|
+
print("=" * 60)
|
|
639
|
+
print("BLE BALL PICKUP SEQUENCE")
|
|
640
|
+
print("=" * 60)
|
|
641
|
+
print()
|
|
642
|
+
print("This uses BLE-only commands:")
|
|
643
|
+
print("- Action groups (CMD|2|1|X|$) for arm control")
|
|
644
|
+
print("- Locomotion (CMD|3|X|$) for walking")
|
|
645
|
+
print()
|
|
646
|
+
print("NOTE: If arm doesn't respond, connect USB serial for")
|
|
647
|
+
print("direct servo control: ate robot test mechdog --port <port>")
|
|
648
|
+
print()
|
|
649
|
+
|
|
650
|
+
import asyncio
|
|
651
|
+
success = asyncio.run(ble_pickup_sequence(address))
|
|
652
|
+
|
|
653
|
+
if success:
|
|
654
|
+
print("\n✓ Pickup sequence complete!")
|
|
655
|
+
else:
|
|
656
|
+
print("\n✗ Pickup failed. Try USB serial for arm control.")
|
|
657
|
+
sys.exit(1)
|
|
658
|
+
|
|
659
|
+
except ImportError as e:
|
|
660
|
+
print(f"BLE pickup requires bleak: {e}")
|
|
661
|
+
print("Install with: pip install bleak")
|
|
662
|
+
sys.exit(1)
|
|
663
|
+
|
|
664
|
+
elif action == "test-locomotion":
|
|
665
|
+
# Quick test of BLE locomotion commands
|
|
666
|
+
if not address:
|
|
667
|
+
print("Usage: ate robot ble test-locomotion --address <address>")
|
|
668
|
+
sys.exit(1)
|
|
669
|
+
|
|
670
|
+
try:
|
|
671
|
+
import asyncio
|
|
672
|
+
from bleak import BleakClient, BleakScanner
|
|
673
|
+
|
|
674
|
+
async def test_locomotion():
|
|
675
|
+
print(f"Connecting to {address}...")
|
|
676
|
+
|
|
677
|
+
# Find device
|
|
678
|
+
devices = await BleakScanner.discover(timeout=5.0)
|
|
679
|
+
device = None
|
|
680
|
+
for d in devices:
|
|
681
|
+
if address.lower() in (d.address or "").lower() or address.lower() in (d.name or "").lower():
|
|
682
|
+
device = d
|
|
683
|
+
break
|
|
684
|
+
|
|
685
|
+
if not device:
|
|
686
|
+
print("Device not found!")
|
|
687
|
+
return False
|
|
688
|
+
|
|
689
|
+
async with BleakClient(device) as client:
|
|
690
|
+
print(f"Connected: {client.is_connected}")
|
|
691
|
+
|
|
692
|
+
FFE1 = "0000ffe1-0000-1000-8000-00805f9b34fb"
|
|
693
|
+
FFE2 = "0000ffe2-0000-1000-8000-00805f9b34fb"
|
|
694
|
+
|
|
695
|
+
responses = []
|
|
696
|
+
def handler(s, d):
|
|
697
|
+
try:
|
|
698
|
+
responses.append(d.decode('ascii'))
|
|
699
|
+
except:
|
|
700
|
+
pass
|
|
701
|
+
|
|
702
|
+
await client.start_notify(FFE2, handler)
|
|
703
|
+
|
|
704
|
+
# Battery
|
|
705
|
+
print("\nBattery check...")
|
|
706
|
+
await client.write_gatt_char(FFE1, b"CMD|6|$")
|
|
707
|
+
await asyncio.sleep(1.0)
|
|
708
|
+
for r in responses:
|
|
709
|
+
if "CMD|6|" in r:
|
|
710
|
+
parts = r.split("|")
|
|
711
|
+
if len(parts) >= 3:
|
|
712
|
+
try:
|
|
713
|
+
mv = int(parts[2])
|
|
714
|
+
print(f" Battery: {mv/1000:.2f}V")
|
|
715
|
+
except:
|
|
716
|
+
pass
|
|
717
|
+
|
|
718
|
+
# Forward
|
|
719
|
+
print("\nWalking forward (1s)...")
|
|
720
|
+
await client.write_gatt_char(FFE1, b"CMD|3|3|$")
|
|
721
|
+
await asyncio.sleep(1.0)
|
|
722
|
+
await client.write_gatt_char(FFE1, b"CMD|3|0|$")
|
|
723
|
+
|
|
724
|
+
# Backward
|
|
725
|
+
print("Walking backward (1s)...")
|
|
726
|
+
await client.write_gatt_char(FFE1, b"CMD|3|7|$")
|
|
727
|
+
await asyncio.sleep(1.0)
|
|
728
|
+
await client.write_gatt_char(FFE1, b"CMD|3|0|$")
|
|
729
|
+
|
|
730
|
+
await client.stop_notify(FFE2)
|
|
731
|
+
print("\n✓ Locomotion test complete!")
|
|
732
|
+
return True
|
|
733
|
+
|
|
734
|
+
asyncio.run(test_locomotion())
|
|
735
|
+
|
|
736
|
+
except ImportError:
|
|
737
|
+
print("BLE requires bleak: pip install bleak")
|
|
738
|
+
sys.exit(1)
|
|
739
|
+
except Exception as e:
|
|
740
|
+
print(f"Test failed: {e}")
|
|
741
|
+
sys.exit(1)
|
|
742
|
+
|
|
743
|
+
else:
|
|
744
|
+
print(f"Unknown action: {action}")
|
|
745
|
+
print("Available actions: discover, connect, inspect, capture, analyze, pickup, test-locomotion, info")
|
|
746
|
+
sys.exit(1)
|
|
747
|
+
|
|
748
|
+
|
|
749
|
+
def robot_list_command(archetype: Optional[str] = None):
|
|
750
|
+
"""
|
|
751
|
+
List known robot types.
|
|
752
|
+
|
|
753
|
+
ate robot list
|
|
754
|
+
ate robot list --archetype quadruped
|
|
755
|
+
"""
|
|
756
|
+
robots = RobotRegistry.list_all()
|
|
757
|
+
|
|
758
|
+
if archetype:
|
|
759
|
+
robots = [r for r in robots if r.archetype == archetype]
|
|
760
|
+
|
|
761
|
+
print("Known Robot Types:\n")
|
|
762
|
+
print(f"{'ID':<25} {'Name':<15} {'Manufacturer':<15} {'Archetype':<12}")
|
|
763
|
+
print("-" * 70)
|
|
764
|
+
|
|
765
|
+
for robot in robots:
|
|
766
|
+
print(f"{robot.id:<25} {robot.name:<15} {robot.manufacturer:<15} {robot.archetype:<12}")
|
|
767
|
+
|
|
768
|
+
print(f"\n{len(robots)} robot type(s)")
|
|
769
|
+
print("\nFor detailed info: ate robot info <robot_type>")
|
|
770
|
+
|
|
771
|
+
|
|
772
|
+
def robot_info_command(robot_type: Optional[str] = None, profile_name: Optional[str] = None):
|
|
773
|
+
"""
|
|
774
|
+
Show robot type or profile information.
|
|
775
|
+
|
|
776
|
+
ate robot info hiwonder_mechdog
|
|
777
|
+
ate robot info --profile my_mechdog
|
|
778
|
+
"""
|
|
779
|
+
if profile_name:
|
|
780
|
+
# Show profile info
|
|
781
|
+
profile = load_profile(profile_name)
|
|
782
|
+
if not profile:
|
|
783
|
+
print(f"Profile not found: {profile_name}")
|
|
784
|
+
sys.exit(1)
|
|
785
|
+
|
|
786
|
+
print(f"Profile: {profile.name}")
|
|
787
|
+
print(f"Robot Type: {profile.robot_type}")
|
|
788
|
+
print(f"Serial Port: {profile.serial_port or '(not set)'}")
|
|
789
|
+
print(f"Camera IP: {profile.camera_ip or '(not set)'}")
|
|
790
|
+
print(f"Has Camera: {profile.has_camera}")
|
|
791
|
+
print(f"Has Arm: {profile.has_arm}")
|
|
792
|
+
|
|
793
|
+
# Try to load and show capabilities
|
|
794
|
+
if profile.robot_type in KNOWN_ROBOTS:
|
|
795
|
+
rtype = KNOWN_ROBOTS[profile.robot_type]
|
|
796
|
+
print(f"\nCapabilities: {', '.join(rtype.capabilities)}")
|
|
797
|
+
if rtype.optional_capabilities:
|
|
798
|
+
print(f"Optional: {', '.join(rtype.optional_capabilities)}")
|
|
799
|
+
|
|
800
|
+
return
|
|
801
|
+
|
|
802
|
+
if not robot_type:
|
|
803
|
+
print("Usage: ate robot info <robot_type>")
|
|
804
|
+
print(" or: ate robot info --profile <profile_name>")
|
|
805
|
+
sys.exit(1)
|
|
806
|
+
|
|
807
|
+
rtype = KNOWN_ROBOTS.get(robot_type)
|
|
808
|
+
if not rtype:
|
|
809
|
+
print(f"Unknown robot type: {robot_type}")
|
|
810
|
+
print("\nAvailable types:")
|
|
811
|
+
for r in KNOWN_ROBOTS.values():
|
|
812
|
+
print(f" {r.id}")
|
|
813
|
+
sys.exit(1)
|
|
814
|
+
|
|
815
|
+
print(f"\n{rtype.name}")
|
|
816
|
+
print("=" * 40)
|
|
817
|
+
print(f"ID: {rtype.id}")
|
|
818
|
+
print(f"Manufacturer: {rtype.manufacturer}")
|
|
819
|
+
print(f"Archetype: {rtype.archetype}")
|
|
820
|
+
print(f"\nDescription: {rtype.description}")
|
|
821
|
+
|
|
822
|
+
print(f"\nConnection Types: {', '.join(c.name for c in rtype.connection_types)}")
|
|
823
|
+
if rtype.serial_patterns:
|
|
824
|
+
print(f"Serial Patterns: {', '.join(rtype.serial_patterns)}")
|
|
825
|
+
if rtype.default_ports:
|
|
826
|
+
print(f"Default Ports: {rtype.default_ports}")
|
|
827
|
+
|
|
828
|
+
print(f"\nCapabilities:")
|
|
829
|
+
for cap in sorted(rtype.capabilities):
|
|
830
|
+
print(f" ✓ {cap}")
|
|
831
|
+
|
|
832
|
+
if rtype.optional_capabilities:
|
|
833
|
+
print(f"\nOptional Capabilities:")
|
|
834
|
+
for cap in sorted(rtype.optional_capabilities):
|
|
835
|
+
print(f" ○ {cap}")
|
|
836
|
+
|
|
837
|
+
print(f"\nDriver: {rtype.driver_module}.{rtype.driver_class}")
|
|
838
|
+
|
|
839
|
+
if rtype.setup_url:
|
|
840
|
+
print(f"\nDocumentation: {rtype.setup_url}")
|
|
841
|
+
|
|
842
|
+
|
|
843
|
+
def robot_setup_command(
|
|
844
|
+
port: Optional[str] = None,
|
|
845
|
+
camera_ip: Optional[str] = None,
|
|
846
|
+
robot_type: str = "hiwonder_mechdog",
|
|
847
|
+
name: Optional[str] = None,
|
|
848
|
+
non_interactive: bool = False,
|
|
849
|
+
):
|
|
850
|
+
"""
|
|
851
|
+
Interactive setup wizard.
|
|
852
|
+
|
|
853
|
+
ate robot setup
|
|
854
|
+
ate robot setup --port /dev/cu.usbserial-10 --camera-ip 192.168.1.100
|
|
855
|
+
"""
|
|
856
|
+
print("Robot Setup Wizard")
|
|
857
|
+
print("=" * 40)
|
|
858
|
+
print()
|
|
859
|
+
|
|
860
|
+
# Step 1: Discover devices if not specified
|
|
861
|
+
if not port:
|
|
862
|
+
print("Scanning for robots...")
|
|
863
|
+
serial_robots = discover_serial_robots()
|
|
864
|
+
|
|
865
|
+
if serial_robots:
|
|
866
|
+
print("\nFound serial devices:")
|
|
867
|
+
for i, robot in enumerate(serial_robots):
|
|
868
|
+
type_hint = f" [{robot.robot_type}]" if robot.robot_type else ""
|
|
869
|
+
print(f" [{i+1}] {robot.port}{type_hint}")
|
|
870
|
+
|
|
871
|
+
if not non_interactive:
|
|
872
|
+
try:
|
|
873
|
+
choice = input("\nSelect device number (or press Enter to skip): ")
|
|
874
|
+
if choice.strip():
|
|
875
|
+
idx = int(choice) - 1
|
|
876
|
+
if 0 <= idx < len(serial_robots):
|
|
877
|
+
port = serial_robots[idx].port
|
|
878
|
+
if serial_robots[idx].robot_type:
|
|
879
|
+
robot_type = serial_robots[idx].robot_type
|
|
880
|
+
except (ValueError, KeyboardInterrupt):
|
|
881
|
+
pass
|
|
882
|
+
elif serial_robots:
|
|
883
|
+
# Auto-select first identified robot in non-interactive mode
|
|
884
|
+
for robot in serial_robots:
|
|
885
|
+
if robot.robot_type:
|
|
886
|
+
port = robot.port
|
|
887
|
+
robot_type = robot.robot_type
|
|
888
|
+
break
|
|
889
|
+
else:
|
|
890
|
+
print("No serial devices found.")
|
|
891
|
+
|
|
892
|
+
# Step 2: Discover cameras if not specified
|
|
893
|
+
if not camera_ip:
|
|
894
|
+
print("\nScanning for network cameras (this may take a few seconds)...")
|
|
895
|
+
cameras = discover_network_cameras(timeout=1.5)
|
|
896
|
+
|
|
897
|
+
if cameras:
|
|
898
|
+
print("\nFound network cameras:")
|
|
899
|
+
for i, cam in enumerate(cameras):
|
|
900
|
+
print(f" [{i+1}] {cam.ip}")
|
|
901
|
+
|
|
902
|
+
if not non_interactive:
|
|
903
|
+
try:
|
|
904
|
+
choice = input("\nSelect camera number (or press Enter to skip): ")
|
|
905
|
+
if choice.strip():
|
|
906
|
+
idx = int(choice) - 1
|
|
907
|
+
if 0 <= idx < len(cameras):
|
|
908
|
+
camera_ip = cameras[idx].ip
|
|
909
|
+
except (ValueError, KeyboardInterrupt):
|
|
910
|
+
pass
|
|
911
|
+
elif cameras:
|
|
912
|
+
camera_ip = cameras[0].ip
|
|
913
|
+
else:
|
|
914
|
+
print("No network cameras found.")
|
|
915
|
+
|
|
916
|
+
# Step 3: Get name
|
|
917
|
+
if not name:
|
|
918
|
+
if non_interactive:
|
|
919
|
+
name = f"robot_{robot_type.split('_')[-1]}"
|
|
920
|
+
else:
|
|
921
|
+
default_name = f"my_{robot_type.split('_')[-1]}"
|
|
922
|
+
name = input(f"\nProfile name [{default_name}]: ").strip() or default_name
|
|
923
|
+
|
|
924
|
+
# Step 4: Create profile
|
|
925
|
+
profile = create_profile_from_discovery(
|
|
926
|
+
name=name,
|
|
927
|
+
serial_port=port,
|
|
928
|
+
camera_ip=camera_ip,
|
|
929
|
+
robot_type=robot_type,
|
|
930
|
+
)
|
|
931
|
+
|
|
932
|
+
# Show summary
|
|
933
|
+
print("\n" + "=" * 40)
|
|
934
|
+
print("Configuration Summary")
|
|
935
|
+
print("=" * 40)
|
|
936
|
+
print(f"Name: {profile.name}")
|
|
937
|
+
print(f"Robot Type: {profile.robot_type}")
|
|
938
|
+
print(f"Serial Port: {profile.serial_port or '(not set)'}")
|
|
939
|
+
print(f"Camera IP: {profile.camera_ip or '(not set)'}")
|
|
940
|
+
print(f"Has Camera: {profile.has_camera}")
|
|
941
|
+
|
|
942
|
+
# Step 5: Save profile
|
|
943
|
+
if not non_interactive:
|
|
944
|
+
confirm = input("\nSave this profile? [Y/n]: ")
|
|
945
|
+
if confirm.lower() == 'n':
|
|
946
|
+
print("Profile not saved.")
|
|
947
|
+
return
|
|
948
|
+
|
|
949
|
+
if save_profile(profile):
|
|
950
|
+
print(f"\nProfile saved to ~/.ate/robots/{profile.name}.json")
|
|
951
|
+
print(f"\nTo use this robot:")
|
|
952
|
+
print(f" ate record demo --profile {profile.name}")
|
|
953
|
+
print(f"\nTo view capabilities:")
|
|
954
|
+
print(f" ate robot test {profile.name}")
|
|
955
|
+
else:
|
|
956
|
+
print("Failed to save profile.")
|
|
957
|
+
sys.exit(1)
|
|
958
|
+
|
|
959
|
+
|
|
960
|
+
def robot_test_command(profile_name: str, capability: Optional[str] = None, verbose: bool = False):
|
|
961
|
+
"""
|
|
962
|
+
Test robot capabilities.
|
|
963
|
+
|
|
964
|
+
ate robot test my_mechdog
|
|
965
|
+
ate robot test my_mechdog --capability camera
|
|
966
|
+
"""
|
|
967
|
+
profile = load_profile(profile_name)
|
|
968
|
+
if not profile:
|
|
969
|
+
print(f"Profile not found: {profile_name}")
|
|
970
|
+
sys.exit(1)
|
|
971
|
+
|
|
972
|
+
print(f"Testing robot: {profile.name}")
|
|
973
|
+
print(f"Type: {profile.robot_type}")
|
|
974
|
+
print()
|
|
975
|
+
|
|
976
|
+
# Create robot instance
|
|
977
|
+
manager = RobotManager()
|
|
978
|
+
managed = manager.load(profile_name)
|
|
979
|
+
|
|
980
|
+
if not managed:
|
|
981
|
+
print("Failed to load robot configuration")
|
|
982
|
+
sys.exit(1)
|
|
983
|
+
|
|
984
|
+
# Connect
|
|
985
|
+
print("Connecting...")
|
|
986
|
+
if not manager.connect(profile_name):
|
|
987
|
+
print("Connection failed")
|
|
988
|
+
sys.exit(1)
|
|
989
|
+
|
|
990
|
+
print("Connected!\n")
|
|
991
|
+
|
|
992
|
+
try:
|
|
993
|
+
robot = manager.get(profile_name)
|
|
994
|
+
|
|
995
|
+
if capability:
|
|
996
|
+
# Test specific capability
|
|
997
|
+
result = test_capability(robot, capability)
|
|
998
|
+
print(f"Testing {capability}...")
|
|
999
|
+
for test in result.get("tests", []):
|
|
1000
|
+
status = "✓" if test["status"] == "passed" else "✗"
|
|
1001
|
+
print(f" {status} {test['name']}: {test.get('message', test.get('error', ''))}")
|
|
1002
|
+
print(f"\nOverall: {result['status'].upper()}")
|
|
1003
|
+
else:
|
|
1004
|
+
# Show all capabilities
|
|
1005
|
+
caps = get_capabilities(robot)
|
|
1006
|
+
print("Available Capabilities:")
|
|
1007
|
+
print("-" * 40)
|
|
1008
|
+
for name, info in caps.items():
|
|
1009
|
+
print(f" ✓ {name} ({len(info.methods)} methods)")
|
|
1010
|
+
|
|
1011
|
+
if verbose:
|
|
1012
|
+
print("\nMethods:")
|
|
1013
|
+
print(format_methods(get_methods(robot)))
|
|
1014
|
+
|
|
1015
|
+
# Quick tests
|
|
1016
|
+
print("\nRunning capability tests...")
|
|
1017
|
+
for cap_name in caps.keys():
|
|
1018
|
+
result = test_capability(robot, cap_name)
|
|
1019
|
+
status = "✓" if result["status"] == "passed" else "✗"
|
|
1020
|
+
print(f" {status} {cap_name}: {result['status']}")
|
|
1021
|
+
|
|
1022
|
+
finally:
|
|
1023
|
+
manager.disconnect(profile_name)
|
|
1024
|
+
print("\nDisconnected.")
|
|
1025
|
+
|
|
1026
|
+
|
|
1027
|
+
def robot_approach_command(
|
|
1028
|
+
profile_name: str,
|
|
1029
|
+
duration: float = 30.0,
|
|
1030
|
+
target_distance: float = 0.15,
|
|
1031
|
+
speed: float = 0.3,
|
|
1032
|
+
detect_colors: Optional[list] = None,
|
|
1033
|
+
dry_run: bool = False,
|
|
1034
|
+
):
|
|
1035
|
+
"""
|
|
1036
|
+
Approach detected targets.
|
|
1037
|
+
|
|
1038
|
+
ate robot approach my_mechdog
|
|
1039
|
+
ate robot approach my_mechdog --duration 60 --target-distance 0.2
|
|
1040
|
+
"""
|
|
1041
|
+
import time
|
|
1042
|
+
from ..behaviors import ApproachTarget, ApproachConfig, BehaviorStatus
|
|
1043
|
+
from ..behaviors.tree import Blackboard
|
|
1044
|
+
from ..detection.trash_detector import TrashDetector
|
|
1045
|
+
from ..interfaces import VisualDistanceEstimator
|
|
1046
|
+
|
|
1047
|
+
profile = load_profile(profile_name)
|
|
1048
|
+
if not profile:
|
|
1049
|
+
print(f"Profile not found: {profile_name}")
|
|
1050
|
+
sys.exit(1)
|
|
1051
|
+
|
|
1052
|
+
print(f"Approach Mode: {profile.name}")
|
|
1053
|
+
print(f"Target distance: {target_distance}m")
|
|
1054
|
+
print(f"Speed: {speed}")
|
|
1055
|
+
print()
|
|
1056
|
+
|
|
1057
|
+
# Create robot instance
|
|
1058
|
+
manager = RobotManager()
|
|
1059
|
+
managed = manager.load(profile_name)
|
|
1060
|
+
|
|
1061
|
+
if not managed:
|
|
1062
|
+
print("Failed to load robot configuration")
|
|
1063
|
+
sys.exit(1)
|
|
1064
|
+
|
|
1065
|
+
# Connect
|
|
1066
|
+
print("Connecting...")
|
|
1067
|
+
if not manager.connect(profile_name):
|
|
1068
|
+
print("Connection failed")
|
|
1069
|
+
sys.exit(1)
|
|
1070
|
+
|
|
1071
|
+
print("Connected!\n")
|
|
1072
|
+
|
|
1073
|
+
try:
|
|
1074
|
+
robot = manager.get(profile_name)
|
|
1075
|
+
info = robot.get_info()
|
|
1076
|
+
print(f"Capabilities: {[c.name for c in info.capabilities]}")
|
|
1077
|
+
|
|
1078
|
+
# Check for required capabilities
|
|
1079
|
+
has_camera = hasattr(robot, 'get_image') and hasattr(robot, 'has_camera') and robot.has_camera()
|
|
1080
|
+
has_locomotion = hasattr(robot, 'walk') or hasattr(robot, 'drive')
|
|
1081
|
+
|
|
1082
|
+
if not has_camera:
|
|
1083
|
+
print("Error: Robot does not have camera capability")
|
|
1084
|
+
sys.exit(1)
|
|
1085
|
+
|
|
1086
|
+
if not has_locomotion:
|
|
1087
|
+
print("Error: Robot does not have locomotion capability")
|
|
1088
|
+
sys.exit(1)
|
|
1089
|
+
|
|
1090
|
+
# Initialize detector
|
|
1091
|
+
detector = TrashDetector()
|
|
1092
|
+
if detect_colors:
|
|
1093
|
+
print(f"Detecting colors: {detect_colors}")
|
|
1094
|
+
|
|
1095
|
+
# Initialize visual distance estimator
|
|
1096
|
+
res = robot.get_resolution()
|
|
1097
|
+
visual_estimator = VisualDistanceEstimator(
|
|
1098
|
+
image_width=res[0] or 640,
|
|
1099
|
+
image_height=res[1] or 480,
|
|
1100
|
+
)
|
|
1101
|
+
|
|
1102
|
+
# Configure approach behavior
|
|
1103
|
+
config = ApproachConfig(
|
|
1104
|
+
target_distance=target_distance,
|
|
1105
|
+
center_tolerance=0.15,
|
|
1106
|
+
approach_speed=speed,
|
|
1107
|
+
slow_distance=target_distance * 2,
|
|
1108
|
+
use_visual_distance=True,
|
|
1109
|
+
)
|
|
1110
|
+
|
|
1111
|
+
# Stand ready
|
|
1112
|
+
if hasattr(robot, 'stand'):
|
|
1113
|
+
robot.stand()
|
|
1114
|
+
time.sleep(0.5)
|
|
1115
|
+
|
|
1116
|
+
print("Scanning for targets...")
|
|
1117
|
+
start_time = time.time()
|
|
1118
|
+
|
|
1119
|
+
# Scanning positions
|
|
1120
|
+
scan_positions = [
|
|
1121
|
+
("center", 0, 0),
|
|
1122
|
+
("left", 0, 0.4),
|
|
1123
|
+
("right", 0, -0.4),
|
|
1124
|
+
]
|
|
1125
|
+
|
|
1126
|
+
blackboard = Blackboard()
|
|
1127
|
+
target_found = False
|
|
1128
|
+
|
|
1129
|
+
while time.time() - start_time < duration and not target_found:
|
|
1130
|
+
for name, pitch, yaw in scan_positions:
|
|
1131
|
+
if time.time() - start_time >= duration:
|
|
1132
|
+
break
|
|
1133
|
+
|
|
1134
|
+
print(f"\nLooking {name}...")
|
|
1135
|
+
if hasattr(robot, 'set_body_orientation'):
|
|
1136
|
+
robot.set_body_orientation(pitch=pitch, yaw=yaw)
|
|
1137
|
+
time.sleep(0.5)
|
|
1138
|
+
|
|
1139
|
+
# Get image and detect
|
|
1140
|
+
image = robot.get_image()
|
|
1141
|
+
if not image.data:
|
|
1142
|
+
continue
|
|
1143
|
+
|
|
1144
|
+
detections = detector.detect(image)
|
|
1145
|
+
if not detections:
|
|
1146
|
+
print(f" No targets")
|
|
1147
|
+
continue
|
|
1148
|
+
|
|
1149
|
+
# Found target!
|
|
1150
|
+
target = detections[0]
|
|
1151
|
+
print(f" Target: {target.class_name} at ({target.bbox.x}, {target.bbox.y})")
|
|
1152
|
+
|
|
1153
|
+
# Estimate distance
|
|
1154
|
+
dist_reading = visual_estimator.estimate_from_detection(
|
|
1155
|
+
bbox_width=int(target.bbox.width),
|
|
1156
|
+
object_type=target.class_name,
|
|
1157
|
+
)
|
|
1158
|
+
print(f" Estimated distance: {dist_reading.distance:.2f}m")
|
|
1159
|
+
|
|
1160
|
+
# Store in blackboard
|
|
1161
|
+
blackboard.set("target_detection", target)
|
|
1162
|
+
blackboard.set("detections", detections)
|
|
1163
|
+
target_found = True
|
|
1164
|
+
break
|
|
1165
|
+
|
|
1166
|
+
if not target_found:
|
|
1167
|
+
print("\nNo targets found during scan period.")
|
|
1168
|
+
if hasattr(robot, 'set_body_orientation'):
|
|
1169
|
+
robot.set_body_orientation(0, 0, 0)
|
|
1170
|
+
return
|
|
1171
|
+
|
|
1172
|
+
# Reset pose for approach
|
|
1173
|
+
if hasattr(robot, 'set_body_orientation'):
|
|
1174
|
+
robot.set_body_orientation(0, 0, 0)
|
|
1175
|
+
time.sleep(0.3)
|
|
1176
|
+
|
|
1177
|
+
if dry_run:
|
|
1178
|
+
print("\n[DRY RUN] Would approach target but stopping here.")
|
|
1179
|
+
return
|
|
1180
|
+
|
|
1181
|
+
# Create approach behavior
|
|
1182
|
+
distance_sensor = robot if hasattr(robot, 'get_distance') else None
|
|
1183
|
+
|
|
1184
|
+
approach = ApproachTarget(
|
|
1185
|
+
locomotion=robot,
|
|
1186
|
+
camera=robot,
|
|
1187
|
+
distance_sensor=distance_sensor,
|
|
1188
|
+
config=config,
|
|
1189
|
+
)
|
|
1190
|
+
approach.blackboard = blackboard
|
|
1191
|
+
|
|
1192
|
+
print("\n--- Starting Approach ---")
|
|
1193
|
+
print("Press Ctrl+C to stop\n")
|
|
1194
|
+
|
|
1195
|
+
try:
|
|
1196
|
+
approach_start = time.time()
|
|
1197
|
+
while time.time() - approach_start < duration:
|
|
1198
|
+
# Get fresh detection
|
|
1199
|
+
image = robot.get_image()
|
|
1200
|
+
if image.data:
|
|
1201
|
+
detections = detector.detect(image)
|
|
1202
|
+
if detections:
|
|
1203
|
+
blackboard.set("target_detection", detections[0])
|
|
1204
|
+
blackboard.set("detections", detections)
|
|
1205
|
+
|
|
1206
|
+
# Tick behavior
|
|
1207
|
+
status = approach.tick()
|
|
1208
|
+
|
|
1209
|
+
state = blackboard.get("approach_state")
|
|
1210
|
+
distance = blackboard.get("target_distance")
|
|
1211
|
+
print(f" State: {state}, Distance: {distance:.2f}m" if distance else f" State: {state}")
|
|
1212
|
+
|
|
1213
|
+
if status == BehaviorStatus.SUCCESS:
|
|
1214
|
+
print("\n=== TARGET REACHED ===")
|
|
1215
|
+
break
|
|
1216
|
+
elif status == BehaviorStatus.FAILURE:
|
|
1217
|
+
print("\n=== APPROACH FAILED ===")
|
|
1218
|
+
break
|
|
1219
|
+
|
|
1220
|
+
time.sleep(0.2)
|
|
1221
|
+
|
|
1222
|
+
except KeyboardInterrupt:
|
|
1223
|
+
print("\n\nStopping...")
|
|
1224
|
+
|
|
1225
|
+
# Stop movement
|
|
1226
|
+
if hasattr(robot, 'stop'):
|
|
1227
|
+
robot.stop()
|
|
1228
|
+
|
|
1229
|
+
finally:
|
|
1230
|
+
manager.disconnect(profile_name)
|
|
1231
|
+
print("\nDisconnected.")
|
|
1232
|
+
|
|
1233
|
+
|
|
1234
|
+
def robot_profiles_command(action: str, name: Optional[str] = None, template: Optional[str] = None):
|
|
1235
|
+
"""
|
|
1236
|
+
Manage saved profiles.
|
|
1237
|
+
|
|
1238
|
+
ate robot profiles list
|
|
1239
|
+
ate robot profiles show my_mechdog
|
|
1240
|
+
ate robot profiles delete my_mechdog
|
|
1241
|
+
ate robot profiles create my_dog --template mechdog_camera
|
|
1242
|
+
"""
|
|
1243
|
+
if action == "list":
|
|
1244
|
+
profiles = list_profiles()
|
|
1245
|
+
|
|
1246
|
+
if not profiles:
|
|
1247
|
+
print("No saved profiles.")
|
|
1248
|
+
print("\nCreate one with: ate robot setup")
|
|
1249
|
+
return
|
|
1250
|
+
|
|
1251
|
+
print("Saved Profiles:\n")
|
|
1252
|
+
print(f"{'Name':<20} {'Type':<25} {'Port':<25}")
|
|
1253
|
+
print("-" * 70)
|
|
1254
|
+
|
|
1255
|
+
for p in profiles:
|
|
1256
|
+
port = p.serial_port or "(no port)"
|
|
1257
|
+
print(f"{p.name:<20} {p.robot_type:<25} {port:<25}")
|
|
1258
|
+
|
|
1259
|
+
elif action == "show":
|
|
1260
|
+
if not name:
|
|
1261
|
+
print("Usage: ate robot profiles show <name>")
|
|
1262
|
+
sys.exit(1)
|
|
1263
|
+
robot_info_command(profile_name=name)
|
|
1264
|
+
|
|
1265
|
+
elif action == "delete":
|
|
1266
|
+
if not name:
|
|
1267
|
+
print("Usage: ate robot profiles delete <name>")
|
|
1268
|
+
sys.exit(1)
|
|
1269
|
+
|
|
1270
|
+
if delete_profile(name):
|
|
1271
|
+
print(f"Deleted profile: {name}")
|
|
1272
|
+
else:
|
|
1273
|
+
print(f"Profile not found: {name}")
|
|
1274
|
+
sys.exit(1)
|
|
1275
|
+
|
|
1276
|
+
elif action == "create":
|
|
1277
|
+
if not name:
|
|
1278
|
+
print("Usage: ate robot profiles create <name> [--template <template>]")
|
|
1279
|
+
sys.exit(1)
|
|
1280
|
+
|
|
1281
|
+
if template:
|
|
1282
|
+
profile = get_template(template)
|
|
1283
|
+
if not profile:
|
|
1284
|
+
print(f"Template not found: {template}")
|
|
1285
|
+
print(f"Available templates: {', '.join(list_templates())}")
|
|
1286
|
+
sys.exit(1)
|
|
1287
|
+
profile.name = name
|
|
1288
|
+
else:
|
|
1289
|
+
profile = RobotProfile(name=name, robot_type="hiwonder_mechdog")
|
|
1290
|
+
|
|
1291
|
+
if save_profile(profile):
|
|
1292
|
+
print(f"Created profile: {name}")
|
|
1293
|
+
print(f"Edit at: ~/.ate/robots/{name}.yaml")
|
|
1294
|
+
else:
|
|
1295
|
+
print("Failed to create profile")
|
|
1296
|
+
sys.exit(1)
|
|
1297
|
+
|
|
1298
|
+
elif action == "templates":
|
|
1299
|
+
print("Available Templates:\n")
|
|
1300
|
+
for tname in list_templates():
|
|
1301
|
+
tmpl = get_template(tname)
|
|
1302
|
+
if tmpl:
|
|
1303
|
+
print(f" {tname}")
|
|
1304
|
+
print(f" {tmpl.description}")
|
|
1305
|
+
print()
|
|
1306
|
+
|
|
1307
|
+
else:
|
|
1308
|
+
print(f"Unknown action: {action}")
|
|
1309
|
+
print("Available actions: list, show, delete, create, templates")
|
|
1310
|
+
sys.exit(1)
|
|
1311
|
+
|
|
1312
|
+
|
|
1313
|
+
def robot_calibrate_command(
|
|
1314
|
+
action: str,
|
|
1315
|
+
port: Optional[str] = None,
|
|
1316
|
+
camera_url: Optional[str] = None,
|
|
1317
|
+
name: str = "robot",
|
|
1318
|
+
robot_type: str = "unknown",
|
|
1319
|
+
servo_id: Optional[int] = None,
|
|
1320
|
+
pose_name: Optional[str] = None,
|
|
1321
|
+
):
|
|
1322
|
+
"""
|
|
1323
|
+
Visual calibration for robot servos and poses.
|
|
1324
|
+
|
|
1325
|
+
ate robot calibrate start --port /dev/cu.usbserial-10 --name my_mechdog
|
|
1326
|
+
ate robot calibrate gripper --port /dev/cu.usbserial-10 --servo 11
|
|
1327
|
+
ate robot calibrate list
|
|
1328
|
+
ate robot calibrate poses my_mechdog
|
|
1329
|
+
"""
|
|
1330
|
+
if action == "start":
|
|
1331
|
+
# Full interactive calibration
|
|
1332
|
+
if not port:
|
|
1333
|
+
# Try to discover
|
|
1334
|
+
serial_robots = discover_serial_robots()
|
|
1335
|
+
if serial_robots:
|
|
1336
|
+
port = serial_robots[0].port
|
|
1337
|
+
print(f"Auto-detected: {port}")
|
|
1338
|
+
else:
|
|
1339
|
+
print("No serial port specified and none discovered.")
|
|
1340
|
+
print("Usage: ate robot calibrate start --port /dev/cu.usbserial-10")
|
|
1341
|
+
sys.exit(1)
|
|
1342
|
+
|
|
1343
|
+
calibrator = VisualCalibrator(
|
|
1344
|
+
serial_port=port,
|
|
1345
|
+
robot_model=robot_type,
|
|
1346
|
+
robot_name=name,
|
|
1347
|
+
)
|
|
1348
|
+
|
|
1349
|
+
if camera_url:
|
|
1350
|
+
calibrator.set_camera(camera_url)
|
|
1351
|
+
|
|
1352
|
+
try:
|
|
1353
|
+
calibration = calibrator.run_interactive()
|
|
1354
|
+
save_calibration(calibration)
|
|
1355
|
+
print(f"\nCalibration saved as: {name}")
|
|
1356
|
+
except KeyboardInterrupt:
|
|
1357
|
+
print("\nCalibration cancelled.")
|
|
1358
|
+
except Exception as e:
|
|
1359
|
+
print(f"\nCalibration failed: {e}")
|
|
1360
|
+
sys.exit(1)
|
|
1361
|
+
|
|
1362
|
+
elif action == "gripper":
|
|
1363
|
+
# Quick gripper-only calibration
|
|
1364
|
+
if not port:
|
|
1365
|
+
serial_robots = discover_serial_robots()
|
|
1366
|
+
if serial_robots:
|
|
1367
|
+
port = serial_robots[0].port
|
|
1368
|
+
else:
|
|
1369
|
+
print("No serial port found. Use --port to specify.")
|
|
1370
|
+
sys.exit(1)
|
|
1371
|
+
|
|
1372
|
+
sid = servo_id or 11 # Default gripper servo
|
|
1373
|
+
print(f"Quick gripper calibration on servo {sid}...")
|
|
1374
|
+
|
|
1375
|
+
try:
|
|
1376
|
+
gripper = quick_gripper_calibration(port, gripper_servo_id=sid)
|
|
1377
|
+
print(f"\nGripper calibrated:")
|
|
1378
|
+
print(f" Open: {gripper.positions.get('open', 'N/A')}")
|
|
1379
|
+
print(f" Closed: {gripper.positions.get('closed', 'N/A')}")
|
|
1380
|
+
print(f" Range: {gripper.min_value} - {gripper.max_value}")
|
|
1381
|
+
except Exception as e:
|
|
1382
|
+
print(f"Calibration failed: {e}")
|
|
1383
|
+
sys.exit(1)
|
|
1384
|
+
|
|
1385
|
+
elif action == "list":
|
|
1386
|
+
# List saved calibrations
|
|
1387
|
+
cals = list_calibrations()
|
|
1388
|
+
if not cals:
|
|
1389
|
+
print("No calibrations saved.")
|
|
1390
|
+
print("\nRun: ate robot calibrate start --port <port> --name <name>")
|
|
1391
|
+
return
|
|
1392
|
+
|
|
1393
|
+
print("Saved Calibrations:\n")
|
|
1394
|
+
for cal_name in cals:
|
|
1395
|
+
cal = load_calibration(cal_name)
|
|
1396
|
+
if cal:
|
|
1397
|
+
servo_count = len(cal.servos)
|
|
1398
|
+
pose_count = len(cal.poses)
|
|
1399
|
+
print(f" {cal_name}")
|
|
1400
|
+
print(f" Model: {cal.robot_model}, Servos: {servo_count}, Poses: {pose_count}")
|
|
1401
|
+
else:
|
|
1402
|
+
print(f" {cal_name} (error loading)")
|
|
1403
|
+
|
|
1404
|
+
elif action == "poses":
|
|
1405
|
+
# Show poses for a calibration
|
|
1406
|
+
if not name or name == "robot":
|
|
1407
|
+
print("Usage: ate robot calibrate poses <calibration_name>")
|
|
1408
|
+
sys.exit(1)
|
|
1409
|
+
|
|
1410
|
+
cal = load_calibration(name)
|
|
1411
|
+
if not cal:
|
|
1412
|
+
print(f"Calibration not found: {name}")
|
|
1413
|
+
sys.exit(1)
|
|
1414
|
+
|
|
1415
|
+
if not cal.poses:
|
|
1416
|
+
print(f"No poses saved for {name}")
|
|
1417
|
+
return
|
|
1418
|
+
|
|
1419
|
+
print(f"Poses for {name}:\n")
|
|
1420
|
+
for pose_name, pose in cal.poses.items():
|
|
1421
|
+
print(f" {pose_name}")
|
|
1422
|
+
if pose.description:
|
|
1423
|
+
print(f" {pose.description}")
|
|
1424
|
+
print(f" Servos: {len(pose.servo_positions)}, Time: {pose.transition_time_ms}ms")
|
|
1425
|
+
|
|
1426
|
+
elif action == "record":
|
|
1427
|
+
# Record a new pose
|
|
1428
|
+
if not name or name == "robot":
|
|
1429
|
+
print("Usage: ate robot calibrate record <calibration_name> --pose <pose_name>")
|
|
1430
|
+
sys.exit(1)
|
|
1431
|
+
|
|
1432
|
+
if not pose_name:
|
|
1433
|
+
print("Usage: ate robot calibrate record <name> --pose <pose_name>")
|
|
1434
|
+
sys.exit(1)
|
|
1435
|
+
|
|
1436
|
+
cal = load_calibration(name)
|
|
1437
|
+
if not cal:
|
|
1438
|
+
print(f"Calibration not found: {name}")
|
|
1439
|
+
sys.exit(1)
|
|
1440
|
+
|
|
1441
|
+
if not cal.serial_port:
|
|
1442
|
+
print("Calibration has no serial port configured.")
|
|
1443
|
+
sys.exit(1)
|
|
1444
|
+
|
|
1445
|
+
calibrator = VisualCalibrator(
|
|
1446
|
+
serial_port=cal.serial_port,
|
|
1447
|
+
robot_model=cal.robot_model,
|
|
1448
|
+
robot_name=cal.robot_name,
|
|
1449
|
+
)
|
|
1450
|
+
calibrator.calibration = cal
|
|
1451
|
+
|
|
1452
|
+
if cal.camera_url:
|
|
1453
|
+
calibrator.set_camera(cal.camera_url)
|
|
1454
|
+
|
|
1455
|
+
if not calibrator.connect():
|
|
1456
|
+
print("Failed to connect to robot")
|
|
1457
|
+
sys.exit(1)
|
|
1458
|
+
|
|
1459
|
+
try:
|
|
1460
|
+
calibrator.send_command("from HW_MechDog import MechDog; dog = MechDog()", wait=1.5)
|
|
1461
|
+
desc = input("Pose description (optional): ").strip()
|
|
1462
|
+
pose = calibrator.record_pose(pose_name, desc)
|
|
1463
|
+
save_calibration(calibrator.calibration)
|
|
1464
|
+
print(f"Recorded pose '{pose_name}' with {len(pose.servo_positions)} servos")
|
|
1465
|
+
finally:
|
|
1466
|
+
calibrator.disconnect()
|
|
1467
|
+
|
|
1468
|
+
elif action == "apply":
|
|
1469
|
+
# Apply a saved pose
|
|
1470
|
+
if not name or name == "robot":
|
|
1471
|
+
print("Usage: ate robot calibrate apply <calibration_name> --pose <pose_name>")
|
|
1472
|
+
sys.exit(1)
|
|
1473
|
+
|
|
1474
|
+
if not pose_name:
|
|
1475
|
+
print("Usage: ate robot calibrate apply <name> --pose <pose_name>")
|
|
1476
|
+
sys.exit(1)
|
|
1477
|
+
|
|
1478
|
+
cal = load_calibration(name)
|
|
1479
|
+
if not cal:
|
|
1480
|
+
print(f"Calibration not found: {name}")
|
|
1481
|
+
sys.exit(1)
|
|
1482
|
+
|
|
1483
|
+
pose = cal.get_pose(pose_name)
|
|
1484
|
+
if not pose:
|
|
1485
|
+
print(f"Pose not found: {pose_name}")
|
|
1486
|
+
print(f"Available poses: {', '.join(cal.poses.keys())}")
|
|
1487
|
+
sys.exit(1)
|
|
1488
|
+
|
|
1489
|
+
if not cal.serial_port:
|
|
1490
|
+
print("Calibration has no serial port configured.")
|
|
1491
|
+
sys.exit(1)
|
|
1492
|
+
|
|
1493
|
+
calibrator = VisualCalibrator(
|
|
1494
|
+
serial_port=cal.serial_port,
|
|
1495
|
+
robot_model=cal.robot_model,
|
|
1496
|
+
robot_name=cal.robot_name,
|
|
1497
|
+
)
|
|
1498
|
+
calibrator.calibration = cal
|
|
1499
|
+
|
|
1500
|
+
if not calibrator.connect():
|
|
1501
|
+
print("Failed to connect to robot")
|
|
1502
|
+
sys.exit(1)
|
|
1503
|
+
|
|
1504
|
+
try:
|
|
1505
|
+
calibrator.send_command("from HW_MechDog import MechDog; dog = MechDog()", wait=1.5)
|
|
1506
|
+
print(f"Applying pose: {pose_name}")
|
|
1507
|
+
success = calibrator.apply_pose(pose)
|
|
1508
|
+
if success:
|
|
1509
|
+
print("Pose applied successfully")
|
|
1510
|
+
else:
|
|
1511
|
+
print("Some servos failed to move")
|
|
1512
|
+
finally:
|
|
1513
|
+
calibrator.disconnect()
|
|
1514
|
+
|
|
1515
|
+
elif action == "direction":
|
|
1516
|
+
# Direction calibration via Twitch Test
|
|
1517
|
+
from .direction_calibration import run_direction_calibration
|
|
1518
|
+
from .calibration_state import CalibrationState
|
|
1519
|
+
|
|
1520
|
+
if not port:
|
|
1521
|
+
# Try to discover
|
|
1522
|
+
serial_robots = discover_serial_robots()
|
|
1523
|
+
if serial_robots:
|
|
1524
|
+
port = serial_robots[0].port
|
|
1525
|
+
print(f"Auto-detected: {port}")
|
|
1526
|
+
else:
|
|
1527
|
+
print("No serial port specified and none discovered.")
|
|
1528
|
+
print("Usage: ate robot calibrate direction --port /dev/cu.usbserial-10 --name my_robot")
|
|
1529
|
+
sys.exit(1)
|
|
1530
|
+
|
|
1531
|
+
# Run direction calibration
|
|
1532
|
+
mappings = run_direction_calibration(
|
|
1533
|
+
serial_port=port,
|
|
1534
|
+
robot_name=name,
|
|
1535
|
+
arm_servos=[8, 9, 10, 11], # MechDog arm servos
|
|
1536
|
+
)
|
|
1537
|
+
|
|
1538
|
+
if mappings:
|
|
1539
|
+
print("\nDirection calibration complete!")
|
|
1540
|
+
print("You can now run behaviors with correct directions.")
|
|
1541
|
+
else:
|
|
1542
|
+
print("\nDirection calibration failed.")
|
|
1543
|
+
print("Ensure ball and arm markers are visible to webcam.")
|
|
1544
|
+
sys.exit(1)
|
|
1545
|
+
|
|
1546
|
+
elif action == "status":
|
|
1547
|
+
# Show calibration status
|
|
1548
|
+
from .calibration_state import CalibrationState
|
|
1549
|
+
|
|
1550
|
+
if not name or name == "robot":
|
|
1551
|
+
# List all calibration states
|
|
1552
|
+
states = CalibrationState.list_all()
|
|
1553
|
+
if not states:
|
|
1554
|
+
print("No calibration states found.")
|
|
1555
|
+
print("\nStart with: ate robot calibrate start --name <robot_name>")
|
|
1556
|
+
return
|
|
1557
|
+
|
|
1558
|
+
print("Calibration States:\n")
|
|
1559
|
+
for state_name in states:
|
|
1560
|
+
state = CalibrationState.load(state_name)
|
|
1561
|
+
current = state.get_current_stage()
|
|
1562
|
+
print(f" {state_name}: {current}")
|
|
1563
|
+
|
|
1564
|
+
print("\nFor details: ate robot calibrate status <name>")
|
|
1565
|
+
else:
|
|
1566
|
+
# Show specific robot's status
|
|
1567
|
+
state = CalibrationState.load(name)
|
|
1568
|
+
state.print_status()
|
|
1569
|
+
|
|
1570
|
+
elif action == "reset":
|
|
1571
|
+
# Reset calibration state
|
|
1572
|
+
from .calibration_state import CalibrationState
|
|
1573
|
+
|
|
1574
|
+
if not name or name == "robot":
|
|
1575
|
+
print("Usage: ate robot calibrate reset <name>")
|
|
1576
|
+
sys.exit(1)
|
|
1577
|
+
|
|
1578
|
+
state = CalibrationState.load(name)
|
|
1579
|
+
confirm = input(f"Reset all calibration for '{name}'? [y/N]: ")
|
|
1580
|
+
if confirm.lower() == 'y':
|
|
1581
|
+
state.reset()
|
|
1582
|
+
print(f"Calibration reset for: {name}")
|
|
1583
|
+
else:
|
|
1584
|
+
print("Cancelled.")
|
|
1585
|
+
|
|
1586
|
+
else:
|
|
1587
|
+
print(f"Unknown action: {action}")
|
|
1588
|
+
print("Available actions: start, gripper, direction, status, list, poses, record, apply, reset")
|
|
1589
|
+
sys.exit(1)
|
|
1590
|
+
|
|
1591
|
+
|
|
1592
|
+
def robot_label_command(
|
|
1593
|
+
port: Optional[str] = None,
|
|
1594
|
+
name: str = "robot",
|
|
1595
|
+
robot_type: str = "unknown",
|
|
1596
|
+
webcam_id: int = 0,
|
|
1597
|
+
camera_url: Optional[str] = None,
|
|
1598
|
+
):
|
|
1599
|
+
"""
|
|
1600
|
+
Interactive visual labeling session.
|
|
1601
|
+
|
|
1602
|
+
ate robot label --port /dev/cu.usbserial-10 --name my_mechdog
|
|
1603
|
+
ate robot label --port /dev/cu.usbserial-10 --camera http://192.168.50.98:80/capture
|
|
1604
|
+
"""
|
|
1605
|
+
if not port:
|
|
1606
|
+
# Try to discover
|
|
1607
|
+
serial_robots = discover_serial_robots()
|
|
1608
|
+
if serial_robots:
|
|
1609
|
+
port = serial_robots[0].port
|
|
1610
|
+
print(f"Auto-detected: {port}")
|
|
1611
|
+
else:
|
|
1612
|
+
print("No serial port specified and none discovered.")
|
|
1613
|
+
print("Usage: ate robot label --port /dev/cu.usbserial-10")
|
|
1614
|
+
sys.exit(1)
|
|
1615
|
+
|
|
1616
|
+
visual_label_command(
|
|
1617
|
+
port=port,
|
|
1618
|
+
name=name,
|
|
1619
|
+
robot_type=robot_type,
|
|
1620
|
+
webcam_id=webcam_id,
|
|
1621
|
+
camera_url=camera_url,
|
|
1622
|
+
)
|
|
1623
|
+
|
|
1624
|
+
|
|
1625
|
+
def robot_skills_command(
|
|
1626
|
+
action: str,
|
|
1627
|
+
name: Optional[str] = None,
|
|
1628
|
+
skill_name: Optional[str] = None,
|
|
1629
|
+
):
|
|
1630
|
+
"""
|
|
1631
|
+
Manage generated robot skills.
|
|
1632
|
+
|
|
1633
|
+
ate robot skills list
|
|
1634
|
+
ate robot skills show my_mechdog
|
|
1635
|
+
ate robot skills export my_mechdog pickup
|
|
1636
|
+
"""
|
|
1637
|
+
if action == "list":
|
|
1638
|
+
libraries = list_skill_libraries()
|
|
1639
|
+
if not libraries:
|
|
1640
|
+
print("No skill libraries saved.")
|
|
1641
|
+
print("\nRun: ate robot label --port <port> --name <name>")
|
|
1642
|
+
return
|
|
1643
|
+
|
|
1644
|
+
print("Saved Skill Libraries:\n")
|
|
1645
|
+
for lib_name in libraries:
|
|
1646
|
+
lib = load_skill_library(lib_name)
|
|
1647
|
+
if lib:
|
|
1648
|
+
action_count = len(lib.actions)
|
|
1649
|
+
print(f" {lib_name}")
|
|
1650
|
+
print(f" Model: {lib.robot_model}, Actions: {action_count}")
|
|
1651
|
+
else:
|
|
1652
|
+
print(f" {lib_name} (error loading)")
|
|
1653
|
+
|
|
1654
|
+
elif action == "show":
|
|
1655
|
+
if not name:
|
|
1656
|
+
print("Usage: ate robot skills show <library_name>")
|
|
1657
|
+
sys.exit(1)
|
|
1658
|
+
|
|
1659
|
+
lib = load_skill_library(name)
|
|
1660
|
+
if not lib:
|
|
1661
|
+
print(f"Skill library not found: {name}")
|
|
1662
|
+
sys.exit(1)
|
|
1663
|
+
|
|
1664
|
+
print(f"\nSkill Library: {lib.robot_name}")
|
|
1665
|
+
print(f"Model: {lib.robot_model}")
|
|
1666
|
+
print(f"Created: {lib.created_at}")
|
|
1667
|
+
|
|
1668
|
+
if lib.servo_labels:
|
|
1669
|
+
print(f"\nLabeled Servos:")
|
|
1670
|
+
for sid, label in lib.servo_labels.items():
|
|
1671
|
+
print(f" Servo {sid}: {label}")
|
|
1672
|
+
|
|
1673
|
+
if lib.joint_groups:
|
|
1674
|
+
print(f"\nJoint Groups:")
|
|
1675
|
+
for group, ids in lib.joint_groups.items():
|
|
1676
|
+
print(f" {group}: {ids}")
|
|
1677
|
+
|
|
1678
|
+
if lib.actions:
|
|
1679
|
+
print(f"\nActions:")
|
|
1680
|
+
for action_name, act in lib.actions.items():
|
|
1681
|
+
print(f" {action_name}: {len(act.steps)} steps ({act.action_type.value})")
|
|
1682
|
+
if act.description:
|
|
1683
|
+
print(f" {act.description}")
|
|
1684
|
+
|
|
1685
|
+
elif action == "export":
|
|
1686
|
+
if not name:
|
|
1687
|
+
print("Usage: ate robot skills export <library_name> [skill_name]")
|
|
1688
|
+
sys.exit(1)
|
|
1689
|
+
|
|
1690
|
+
lib = load_skill_library(name)
|
|
1691
|
+
if not lib:
|
|
1692
|
+
print(f"Skill library not found: {name}")
|
|
1693
|
+
sys.exit(1)
|
|
1694
|
+
|
|
1695
|
+
# Load calibration for skill generation
|
|
1696
|
+
cal = load_calibration(name)
|
|
1697
|
+
if not cal:
|
|
1698
|
+
print(f"No calibration found for: {name}")
|
|
1699
|
+
sys.exit(1)
|
|
1700
|
+
|
|
1701
|
+
labeler = DualCameraLabeler(
|
|
1702
|
+
serial_port=cal.serial_port or "",
|
|
1703
|
+
robot_name=name,
|
|
1704
|
+
robot_model=cal.robot_model,
|
|
1705
|
+
)
|
|
1706
|
+
labeler.calibrator.calibration = cal
|
|
1707
|
+
labeler.library = lib
|
|
1708
|
+
|
|
1709
|
+
from pathlib import Path
|
|
1710
|
+
skills_dir = Path.home() / ".ate" / "skills" / name
|
|
1711
|
+
skills_dir.mkdir(parents=True, exist_ok=True)
|
|
1712
|
+
|
|
1713
|
+
if skill_name:
|
|
1714
|
+
# Export single skill
|
|
1715
|
+
act = lib.actions.get(skill_name)
|
|
1716
|
+
if not act:
|
|
1717
|
+
print(f"Action not found: {skill_name}")
|
|
1718
|
+
print(f"Available: {list(lib.actions.keys())}")
|
|
1719
|
+
sys.exit(1)
|
|
1720
|
+
|
|
1721
|
+
code = labeler.generate_skill_code(act)
|
|
1722
|
+
path = skills_dir / f"{skill_name}.py"
|
|
1723
|
+
with open(path, 'w') as f:
|
|
1724
|
+
f.write(code)
|
|
1725
|
+
print(f"Exported: {path}")
|
|
1726
|
+
else:
|
|
1727
|
+
# Export all skills
|
|
1728
|
+
for act_name, act in lib.actions.items():
|
|
1729
|
+
code = labeler.generate_skill_code(act)
|
|
1730
|
+
path = skills_dir / f"{act_name}.py"
|
|
1731
|
+
with open(path, 'w') as f:
|
|
1732
|
+
f.write(code)
|
|
1733
|
+
print(f"Exported: {path}")
|
|
1734
|
+
|
|
1735
|
+
else:
|
|
1736
|
+
print(f"Unknown action: {action}")
|
|
1737
|
+
print("Available actions: list, show, export")
|
|
1738
|
+
sys.exit(1)
|
|
1739
|
+
|
|
1740
|
+
|
|
1741
|
+
def robot_upload_command(
|
|
1742
|
+
name: str,
|
|
1743
|
+
project_id: Optional[str] = None,
|
|
1744
|
+
include_images: bool = True,
|
|
1745
|
+
):
|
|
1746
|
+
"""
|
|
1747
|
+
Upload skill library to FoodforThought.
|
|
1748
|
+
|
|
1749
|
+
Creates artifacts with proper data lineage:
|
|
1750
|
+
raw (images) → processed (calibration) → labeled (poses) → skill (code)
|
|
1751
|
+
|
|
1752
|
+
ate robot upload mechdog
|
|
1753
|
+
ate robot upload mechdog --project-id abc123
|
|
1754
|
+
ate robot upload mechdog --no-images
|
|
1755
|
+
"""
|
|
1756
|
+
# Check login status
|
|
1757
|
+
config_file = Path.home() / ".ate" / "config.json"
|
|
1758
|
+
if not config_file.exists():
|
|
1759
|
+
print("Not logged in. Run 'ate login' first.")
|
|
1760
|
+
sys.exit(1)
|
|
1761
|
+
|
|
1762
|
+
# Load calibration
|
|
1763
|
+
cal = load_calibration(name)
|
|
1764
|
+
if not cal:
|
|
1765
|
+
print(f"No calibration found for: {name}")
|
|
1766
|
+
print("\nAvailable calibrations:")
|
|
1767
|
+
for cal_name in list_calibrations():
|
|
1768
|
+
print(f" - {cal_name}")
|
|
1769
|
+
sys.exit(1)
|
|
1770
|
+
|
|
1771
|
+
# Load skill library
|
|
1772
|
+
lib = load_skill_library(name)
|
|
1773
|
+
if not lib:
|
|
1774
|
+
print(f"No skill library found for: {name}")
|
|
1775
|
+
print("Run 'ate robot label' first to create skills.")
|
|
1776
|
+
sys.exit(1)
|
|
1777
|
+
|
|
1778
|
+
print(f"Uploading skill library: {name}")
|
|
1779
|
+
print(f" Robot model: {lib.robot_model}")
|
|
1780
|
+
print(f" Actions: {len(lib.actions)}")
|
|
1781
|
+
print(f" Poses: {len(cal.poses)}")
|
|
1782
|
+
print(f" Servos: {len(cal.servos)}")
|
|
1783
|
+
if include_images:
|
|
1784
|
+
img_dir = Path.home() / ".ate" / "skill_images" / name
|
|
1785
|
+
if img_dir.exists():
|
|
1786
|
+
img_count = len(list(img_dir.glob("*.jpg")))
|
|
1787
|
+
print(f" Images: {img_count}")
|
|
1788
|
+
print()
|
|
1789
|
+
|
|
1790
|
+
try:
|
|
1791
|
+
result = upload_skill_library(
|
|
1792
|
+
robot_name=name,
|
|
1793
|
+
project_id=project_id,
|
|
1794
|
+
include_images=include_images,
|
|
1795
|
+
)
|
|
1796
|
+
|
|
1797
|
+
print("Upload complete!")
|
|
1798
|
+
print(f" Project ID: {result['project_id']}")
|
|
1799
|
+
print(f" Artifacts created: {len(result['artifacts'])}")
|
|
1800
|
+
print()
|
|
1801
|
+
print("Artifacts:")
|
|
1802
|
+
for art in result['artifacts']:
|
|
1803
|
+
print(f" [{art['stage']}] {art['name']}")
|
|
1804
|
+
|
|
1805
|
+
print()
|
|
1806
|
+
print(f"View at: https://www.kindly.fyi/foodforthought/projects/{result['project_id']}")
|
|
1807
|
+
|
|
1808
|
+
except ValueError as e:
|
|
1809
|
+
print(f"Error: {e}")
|
|
1810
|
+
sys.exit(1)
|
|
1811
|
+
except Exception as e:
|
|
1812
|
+
print(f"Upload failed: {e}")
|
|
1813
|
+
sys.exit(1)
|
|
1814
|
+
|
|
1815
|
+
|
|
1816
|
+
def robot_map_servos_command(
|
|
1817
|
+
port: Optional[str] = None,
|
|
1818
|
+
num_servos: int = 13,
|
|
1819
|
+
use_camera: bool = False,
|
|
1820
|
+
camera_index: int = 0,
|
|
1821
|
+
wifi_camera_ip: Optional[str] = None,
|
|
1822
|
+
output: Optional[str] = None,
|
|
1823
|
+
save_images: bool = False,
|
|
1824
|
+
upload: bool = False,
|
|
1825
|
+
project_id: Optional[str] = None,
|
|
1826
|
+
):
|
|
1827
|
+
"""
|
|
1828
|
+
Map all servos and generate primitive skills using LLM-guided exploration.
|
|
1829
|
+
|
|
1830
|
+
ate robot map-servos --port /dev/cu.usbserial-10
|
|
1831
|
+
ate robot map-servos --port /dev/cu.usbserial-10 --camera --output ./my_robot
|
|
1832
|
+
ate robot map-servos --port /dev/cu.usbserial-10 --wifi-camera 192.168.4.1
|
|
1833
|
+
"""
|
|
1834
|
+
from .servo_mapper import run_servo_mapping
|
|
1835
|
+
|
|
1836
|
+
if not port:
|
|
1837
|
+
# Try to discover
|
|
1838
|
+
serial_robots = discover_serial_robots()
|
|
1839
|
+
if serial_robots:
|
|
1840
|
+
port = serial_robots[0].port
|
|
1841
|
+
print(f"Auto-detected: {port}")
|
|
1842
|
+
else:
|
|
1843
|
+
print("No serial port specified and none discovered.")
|
|
1844
|
+
print("Usage: ate robot map-servos --port /dev/cu.usbserial-10")
|
|
1845
|
+
sys.exit(1)
|
|
1846
|
+
|
|
1847
|
+
output_file = output or f"./servo_mappings_{port.split('/')[-1]}"
|
|
1848
|
+
|
|
1849
|
+
results = run_servo_mapping(
|
|
1850
|
+
serial_port=port,
|
|
1851
|
+
num_servos=num_servos,
|
|
1852
|
+
use_camera=use_camera,
|
|
1853
|
+
camera_index=camera_index,
|
|
1854
|
+
wifi_camera_ip=wifi_camera_ip,
|
|
1855
|
+
use_proxy=True, # Use metered LLM access
|
|
1856
|
+
output_file=output_file,
|
|
1857
|
+
save_images=save_images,
|
|
1858
|
+
)
|
|
1859
|
+
|
|
1860
|
+
if results.get("python_code"):
|
|
1861
|
+
print("\n" + "=" * 60)
|
|
1862
|
+
print("GENERATED PYTHON CODE:")
|
|
1863
|
+
print("=" * 60)
|
|
1864
|
+
print(results["python_code"][:2000]) # First 2000 chars
|
|
1865
|
+
if len(results["python_code"]) > 2000:
|
|
1866
|
+
print(f"\n... (see {output_file}.py for full code)")
|
|
1867
|
+
|
|
1868
|
+
# Upload to FoodforThought if requested
|
|
1869
|
+
if upload:
|
|
1870
|
+
from .servo_mapper import upload_servo_mapping
|
|
1871
|
+
print("\n" + "=" * 60)
|
|
1872
|
+
print("UPLOADING TO FOODFORTHOUGHT")
|
|
1873
|
+
print("=" * 60)
|
|
1874
|
+
try:
|
|
1875
|
+
upload_result = upload_servo_mapping(
|
|
1876
|
+
results=results,
|
|
1877
|
+
output_file=output_file,
|
|
1878
|
+
project_id=project_id,
|
|
1879
|
+
)
|
|
1880
|
+
print(f"\nUpload complete!")
|
|
1881
|
+
print(f" Project ID: {upload_result['project_id']}")
|
|
1882
|
+
print(f" Artifacts created: {len(upload_result['artifacts'])}")
|
|
1883
|
+
for art in upload_result['artifacts']:
|
|
1884
|
+
print(f" [{art['stage']}] {art['name']}")
|
|
1885
|
+
print(f"\nView at: https://www.kindly.fyi/foodforthought/projects/{upload_result['project_id']}")
|
|
1886
|
+
except Exception as e:
|
|
1887
|
+
print(f"Upload failed: {e}")
|
|
1888
|
+
print("Servo mapping saved locally. Run 'ate robot upload' to retry.")
|
|
1889
|
+
|
|
1890
|
+
|
|
1891
|
+
def robot_upload_calibration_command(
|
|
1892
|
+
calibration_file: str,
|
|
1893
|
+
robot_slug: Optional[str] = None,
|
|
1894
|
+
robot_name: Optional[str] = None,
|
|
1895
|
+
method: str = "llm_vision",
|
|
1896
|
+
publish: bool = False,
|
|
1897
|
+
):
|
|
1898
|
+
"""
|
|
1899
|
+
Upload a calibration to the community registry.
|
|
1900
|
+
|
|
1901
|
+
ate robot upload-calibration ./servo_mappings.json --robot-slug mechdog-mini
|
|
1902
|
+
ate robot upload-calibration ./calibration.json --publish
|
|
1903
|
+
"""
|
|
1904
|
+
import json
|
|
1905
|
+
import requests
|
|
1906
|
+
from .skill_upload import SkillLibraryUploader
|
|
1907
|
+
|
|
1908
|
+
# Load calibration file
|
|
1909
|
+
if not os.path.exists(calibration_file):
|
|
1910
|
+
print(f"Error: Calibration file not found: {calibration_file}")
|
|
1911
|
+
sys.exit(1)
|
|
1912
|
+
|
|
1913
|
+
with open(calibration_file, 'r') as f:
|
|
1914
|
+
calibration_data = json.load(f)
|
|
1915
|
+
|
|
1916
|
+
# Get API credentials
|
|
1917
|
+
try:
|
|
1918
|
+
uploader = SkillLibraryUploader()
|
|
1919
|
+
except Exception as e:
|
|
1920
|
+
print(f"Error initializing uploader: {e}")
|
|
1921
|
+
sys.exit(1)
|
|
1922
|
+
|
|
1923
|
+
base_url = os.environ.get("FOODFORTHOUGHT_API_URL", "https://www.kindly.fyi")
|
|
1924
|
+
|
|
1925
|
+
# Prepare calibration payload
|
|
1926
|
+
payload = {
|
|
1927
|
+
"name": calibration_data.get("name", f"Calibration from {calibration_file}"),
|
|
1928
|
+
"description": calibration_data.get("description", "Community-contributed calibration"),
|
|
1929
|
+
"version": calibration_data.get("version", "1.0.0"),
|
|
1930
|
+
"method": method,
|
|
1931
|
+
"confidence": calibration_data.get("confidence", 0.8),
|
|
1932
|
+
}
|
|
1933
|
+
|
|
1934
|
+
# Add robot identification
|
|
1935
|
+
if robot_slug:
|
|
1936
|
+
payload["robotSlug"] = robot_slug
|
|
1937
|
+
elif calibration_data.get("robot_slug"):
|
|
1938
|
+
payload["robotSlug"] = calibration_data["robot_slug"]
|
|
1939
|
+
elif calibration_data.get("robot_id"):
|
|
1940
|
+
payload["robotId"] = calibration_data["robot_id"]
|
|
1941
|
+
else:
|
|
1942
|
+
print("Error: Must specify --robot-slug or include robot_slug/robot_id in calibration file")
|
|
1943
|
+
sys.exit(1)
|
|
1944
|
+
|
|
1945
|
+
# Add kinematic data
|
|
1946
|
+
if calibration_data.get("urdf"):
|
|
1947
|
+
payload["urdfContent"] = calibration_data["urdf"]
|
|
1948
|
+
if calibration_data.get("dh_parameters"):
|
|
1949
|
+
payload["dhParameters"] = calibration_data["dh_parameters"]
|
|
1950
|
+
if calibration_data.get("joint_mappings") or calibration_data.get("mappings"):
|
|
1951
|
+
payload["jointMappings"] = calibration_data.get("joint_mappings") or calibration_data.get("mappings")
|
|
1952
|
+
if calibration_data.get("servo_mappings"):
|
|
1953
|
+
payload["servoMappings"] = calibration_data["servo_mappings"]
|
|
1954
|
+
if calibration_data.get("python_code"):
|
|
1955
|
+
payload["pythonCode"] = calibration_data["python_code"]
|
|
1956
|
+
if calibration_data.get("primitives"):
|
|
1957
|
+
payload["generatedPrimitives"] = calibration_data["primitives"]
|
|
1958
|
+
|
|
1959
|
+
# Hardware fingerprint
|
|
1960
|
+
if calibration_data.get("firmware_version"):
|
|
1961
|
+
payload["firmwareVersion"] = calibration_data["firmware_version"]
|
|
1962
|
+
if calibration_data.get("port"):
|
|
1963
|
+
payload["serialPortName"] = calibration_data["port"]
|
|
1964
|
+
|
|
1965
|
+
print("Uploading calibration to community registry...")
|
|
1966
|
+
print(f" Robot: {payload.get('robotSlug', payload.get('robotId', 'Unknown'))}")
|
|
1967
|
+
print(f" Method: {method}")
|
|
1968
|
+
print()
|
|
1969
|
+
|
|
1970
|
+
try:
|
|
1971
|
+
# Use the uploader's pre-configured headers
|
|
1972
|
+
headers = uploader.headers
|
|
1973
|
+
response = requests.post(
|
|
1974
|
+
f"{base_url}/api/robot-calibrations",
|
|
1975
|
+
json=payload,
|
|
1976
|
+
headers=headers,
|
|
1977
|
+
)
|
|
1978
|
+
response.raise_for_status()
|
|
1979
|
+
result = response.json()
|
|
1980
|
+
except requests.RequestException as e:
|
|
1981
|
+
print(f"Error uploading calibration: {e}")
|
|
1982
|
+
if hasattr(e.response, 'json'):
|
|
1983
|
+
try:
|
|
1984
|
+
print(f"Details: {e.response.json()}")
|
|
1985
|
+
except:
|
|
1986
|
+
pass
|
|
1987
|
+
sys.exit(1)
|
|
1988
|
+
|
|
1989
|
+
print("[OK] Calibration uploaded successfully!")
|
|
1990
|
+
print(f" ID: {result['id']}")
|
|
1991
|
+
print(f" Name: {result['name']}")
|
|
1992
|
+
print(f" Status: {result['status']}")
|
|
1993
|
+
print()
|
|
1994
|
+
|
|
1995
|
+
if publish:
|
|
1996
|
+
print("Publishing calibration...")
|
|
1997
|
+
try:
|
|
1998
|
+
publish_response = requests.patch(
|
|
1999
|
+
f"{base_url}/api/robot-calibrations/{result['id']}",
|
|
2000
|
+
json={"status": "published"},
|
|
2001
|
+
headers=headers,
|
|
2002
|
+
)
|
|
2003
|
+
publish_response.raise_for_status()
|
|
2004
|
+
print("[OK] Calibration published!")
|
|
2005
|
+
except requests.RequestException as e:
|
|
2006
|
+
print(f"Warning: Failed to publish: {e}")
|
|
2007
|
+
print("Calibration saved as draft. Publish manually when ready.")
|
|
2008
|
+
else:
|
|
2009
|
+
print("Calibration saved as draft. To publish:")
|
|
2010
|
+
print(f" ate robot publish-calibration {result['id']}")
|
|
2011
|
+
|
|
2012
|
+
print()
|
|
2013
|
+
print("Other users can now find and use your calibration!")
|
|
2014
|
+
print("Thank you for contributing to the community.")
|
|
2015
|
+
|
|
2016
|
+
|
|
2017
|
+
def robot_identify_command(
|
|
2018
|
+
robot_slug: Optional[str] = None,
|
|
2019
|
+
robot_name: Optional[str] = None,
|
|
2020
|
+
port: Optional[str] = None,
|
|
2021
|
+
):
|
|
2022
|
+
"""
|
|
2023
|
+
Search the community calibration registry for matching calibrations.
|
|
2024
|
+
|
|
2025
|
+
ate robot identify mechdog-mini
|
|
2026
|
+
ate robot identify --port /dev/cu.usbserial-10
|
|
2027
|
+
"""
|
|
2028
|
+
import requests
|
|
2029
|
+
|
|
2030
|
+
base_url = os.environ.get("FOODFORTHOUGHT_API_URL", "https://www.kindly.fyi")
|
|
2031
|
+
|
|
2032
|
+
print("Searching community calibration registry...")
|
|
2033
|
+
print()
|
|
2034
|
+
|
|
2035
|
+
# Build search query
|
|
2036
|
+
params = {"status": "published"}
|
|
2037
|
+
if robot_slug:
|
|
2038
|
+
params["robotSlug"] = robot_slug
|
|
2039
|
+
if robot_name:
|
|
2040
|
+
params["search"] = robot_name
|
|
2041
|
+
|
|
2042
|
+
try:
|
|
2043
|
+
response = requests.get(f"{base_url}/api/robot-calibrations", params=params)
|
|
2044
|
+
response.raise_for_status()
|
|
2045
|
+
data = response.json()
|
|
2046
|
+
except requests.RequestException as e:
|
|
2047
|
+
# Handle 404 (endpoint not deployed yet) or network errors
|
|
2048
|
+
if hasattr(e, 'response') and e.response is not None and e.response.status_code == 404:
|
|
2049
|
+
print("The calibration registry is being set up.")
|
|
2050
|
+
print()
|
|
2051
|
+
print("Be the first to calibrate this robot!")
|
|
2052
|
+
print(" 1. Run: ate robot generate-markers")
|
|
2053
|
+
print(" 2. Print and attach markers to your robot")
|
|
2054
|
+
print(" 3. Run: ate robot calibrate --method aruco")
|
|
2055
|
+
print(" 4. Run: ate robot upload-calibration <file>")
|
|
2056
|
+
print()
|
|
2057
|
+
print("Your calibration will help everyone with this robot model.")
|
|
2058
|
+
return
|
|
2059
|
+
print(f"Error fetching calibrations: {e}")
|
|
2060
|
+
sys.exit(1)
|
|
2061
|
+
|
|
2062
|
+
calibrations = data.get("calibrations", [])
|
|
2063
|
+
|
|
2064
|
+
if not calibrations:
|
|
2065
|
+
print("No matching calibrations found in the community registry.")
|
|
2066
|
+
print()
|
|
2067
|
+
print("Be the first to calibrate this robot!")
|
|
2068
|
+
print(" 1. Run: ate robot generate-markers")
|
|
2069
|
+
print(" 2. Print and attach markers to your robot")
|
|
2070
|
+
print(" 3. Run: ate robot calibrate --upload")
|
|
2071
|
+
print()
|
|
2072
|
+
print("Your calibration will help everyone with this robot model.")
|
|
2073
|
+
return
|
|
2074
|
+
|
|
2075
|
+
print(f"Found {len(calibrations)} calibration(s):")
|
|
2076
|
+
print()
|
|
2077
|
+
|
|
2078
|
+
for i, cal in enumerate(calibrations, 1):
|
|
2079
|
+
robot = cal.get("robot", {})
|
|
2080
|
+
author = cal.get("author", {})
|
|
2081
|
+
verifications = cal.get("_count", {}).get("verifications", 0)
|
|
2082
|
+
|
|
2083
|
+
verified_badge = " [VERIFIED]" if cal.get("verified") else ""
|
|
2084
|
+
featured_badge = " [FEATURED]" if cal.get("featured") else ""
|
|
2085
|
+
|
|
2086
|
+
print(f"{i}. {cal['name']}{verified_badge}{featured_badge}")
|
|
2087
|
+
print(f" Robot: {robot.get('name', 'Unknown')} ({robot.get('manufacturer', 'Unknown')})")
|
|
2088
|
+
print(f" Method: {cal['method']}")
|
|
2089
|
+
print(f" Confidence: {cal['confidence']:.0%}")
|
|
2090
|
+
print(f" Verified by: {verifications} user(s)")
|
|
2091
|
+
print(f" Author: {author.get('name', 'Unknown')}")
|
|
2092
|
+
print(f" ID: {cal['id']}")
|
|
2093
|
+
print()
|
|
2094
|
+
|
|
2095
|
+
print("To download a calibration:")
|
|
2096
|
+
print(" ate robot download <calibration-id>")
|
|
2097
|
+
print()
|
|
2098
|
+
print("To verify a calibration works on your robot:")
|
|
2099
|
+
print(" ate robot verify <calibration-id>")
|
|
2100
|
+
|
|
2101
|
+
|
|
2102
|
+
def robot_generate_markers_command(
|
|
2103
|
+
output: str = "aruco_markers.pdf",
|
|
2104
|
+
count: int = 12,
|
|
2105
|
+
size: float = 30.0,
|
|
2106
|
+
robot_name: Optional[str] = None,
|
|
2107
|
+
page_size: str = "letter",
|
|
2108
|
+
preset: Optional[str] = None,
|
|
2109
|
+
list_presets: bool = False,
|
|
2110
|
+
):
|
|
2111
|
+
"""
|
|
2112
|
+
Generate printable ArUco markers for robot calibration.
|
|
2113
|
+
|
|
2114
|
+
ate robot generate-markers
|
|
2115
|
+
ate robot generate-markers --count 6 --size 40
|
|
2116
|
+
ate robot generate-markers --robot-name "My MechDog" -o my_markers.pdf
|
|
2117
|
+
ate robot generate-markers --preset mechdog-mini
|
|
2118
|
+
"""
|
|
2119
|
+
try:
|
|
2120
|
+
from .marker_generator import generate_marker_pdf, list_presets as get_presets, get_preset_markers
|
|
2121
|
+
from reportlab.lib.pagesizes import LETTER, A4
|
|
2122
|
+
except ImportError as e:
|
|
2123
|
+
print(f"Error: Missing dependencies for marker generation.")
|
|
2124
|
+
print(f"Install with: pip install opencv-contrib-python reportlab pillow")
|
|
2125
|
+
print(f"Details: {e}")
|
|
2126
|
+
sys.exit(1)
|
|
2127
|
+
|
|
2128
|
+
# List presets and exit
|
|
2129
|
+
if list_presets:
|
|
2130
|
+
presets = get_presets()
|
|
2131
|
+
print("Available robot presets:")
|
|
2132
|
+
print()
|
|
2133
|
+
for p in presets:
|
|
2134
|
+
print(f" {p['name']:20} {p['robot_name']:30} ({p['marker_count']} markers)")
|
|
2135
|
+
print()
|
|
2136
|
+
print("Usage: ate robot generate-markers --preset mechdog-mini")
|
|
2137
|
+
return
|
|
2138
|
+
|
|
2139
|
+
page = LETTER if page_size == "letter" else A4
|
|
2140
|
+
|
|
2141
|
+
# Use preset if specified
|
|
2142
|
+
marker_specs = None
|
|
2143
|
+
if preset:
|
|
2144
|
+
try:
|
|
2145
|
+
marker_specs, preset_robot_name = get_preset_markers(preset)
|
|
2146
|
+
if not robot_name:
|
|
2147
|
+
robot_name = preset_robot_name
|
|
2148
|
+
count = len(marker_specs)
|
|
2149
|
+
print(f"Using preset: {preset}")
|
|
2150
|
+
print(f" Robot: {robot_name}")
|
|
2151
|
+
print(f" Markers: {count} (with optimized sizes per joint)")
|
|
2152
|
+
except KeyError as e:
|
|
2153
|
+
print(f"Error: {e}")
|
|
2154
|
+
print("\nRun 'ate robot generate-markers --list-presets' to see available presets.")
|
|
2155
|
+
sys.exit(1)
|
|
2156
|
+
else:
|
|
2157
|
+
print(f"Generating {count} ArUco markers...")
|
|
2158
|
+
print(f" Size: {size}mm (uniform)")
|
|
2159
|
+
|
|
2160
|
+
print(f" Page: {page_size}")
|
|
2161
|
+
print(f" Output: {output}")
|
|
2162
|
+
print()
|
|
2163
|
+
|
|
2164
|
+
try:
|
|
2165
|
+
if marker_specs:
|
|
2166
|
+
# Use preset specs with variable sizes
|
|
2167
|
+
result = generate_marker_pdf(
|
|
2168
|
+
output_path=output,
|
|
2169
|
+
marker_specs=marker_specs,
|
|
2170
|
+
page_size=page,
|
|
2171
|
+
robot_name=robot_name,
|
|
2172
|
+
include_instructions=True,
|
|
2173
|
+
)
|
|
2174
|
+
else:
|
|
2175
|
+
# Use uniform size
|
|
2176
|
+
result = generate_marker_pdf(
|
|
2177
|
+
output_path=output,
|
|
2178
|
+
count=count,
|
|
2179
|
+
size_mm=size,
|
|
2180
|
+
page_size=page,
|
|
2181
|
+
robot_name=robot_name,
|
|
2182
|
+
include_instructions=True,
|
|
2183
|
+
)
|
|
2184
|
+
|
|
2185
|
+
print(f"[OK] Markers saved to: {result}")
|
|
2186
|
+
print()
|
|
2187
|
+
print("Next steps:")
|
|
2188
|
+
print(" 1. Print the PDF at 100% scale (do not 'fit to page')")
|
|
2189
|
+
print(" 2. Cut out markers along the dashed lines")
|
|
2190
|
+
print(" 3. Attach to your robot's moving segments")
|
|
2191
|
+
print(" 4. Run: ate robot calibrate --method aruco")
|
|
2192
|
+
print()
|
|
2193
|
+
print("Your calibration can be shared with the community!")
|
|
2194
|
+
print("First user to calibrate a robot model helps everyone else.")
|
|
2195
|
+
except Exception as e:
|
|
2196
|
+
print(f"Error generating markers: {e}")
|
|
2197
|
+
import traceback
|
|
2198
|
+
traceback.print_exc()
|
|
2199
|
+
sys.exit(1)
|
|
2200
|
+
|
|
2201
|
+
|
|
2202
|
+
def robot_urdf_command(
|
|
2203
|
+
action: str = "generate",
|
|
2204
|
+
reset: bool = False,
|
|
2205
|
+
frames: int = 20,
|
|
2206
|
+
camera: int = 0,
|
|
2207
|
+
marker_size: float = 0.025,
|
|
2208
|
+
output: Optional[str] = None,
|
|
2209
|
+
ble_address: Optional[str] = None,
|
|
2210
|
+
robot_name: str = "mechdog",
|
|
2211
|
+
urdf_file: Optional[str] = None,
|
|
2212
|
+
bridge: bool = False,
|
|
2213
|
+
launch: bool = False,
|
|
2214
|
+
):
|
|
2215
|
+
"""
|
|
2216
|
+
URDF generation and Artifex integration command.
|
|
2217
|
+
|
|
2218
|
+
ate robot urdf generate # Generate URDF from markers
|
|
2219
|
+
ate robot urdf generate --reset # Reset robot pose first
|
|
2220
|
+
ate robot urdf open # Open in Artifex Desktop
|
|
2221
|
+
ate robot urdf info # Show URDF details
|
|
2222
|
+
"""
|
|
2223
|
+
import asyncio
|
|
2224
|
+
import cv2
|
|
2225
|
+
import numpy as np
|
|
2226
|
+
import re
|
|
2227
|
+
import subprocess
|
|
2228
|
+
from datetime import datetime
|
|
2229
|
+
|
|
2230
|
+
# Default paths
|
|
2231
|
+
urdf_dir = Path(__file__).parent.parent.parent / "urdf_generation"
|
|
2232
|
+
urdf_dir.mkdir(exist_ok=True)
|
|
2233
|
+
default_urdf = urdf_dir / "mechdog_calibrated.urdf"
|
|
2234
|
+
|
|
2235
|
+
if action == "generate":
|
|
2236
|
+
print("=" * 60)
|
|
2237
|
+
print("ATE URDF GENERATION")
|
|
2238
|
+
print("=" * 60)
|
|
2239
|
+
print(f"\nSettings:")
|
|
2240
|
+
print(f" Marker size: {marker_size * 1000:.0f}mm")
|
|
2241
|
+
print(f" Camera index: {camera}")
|
|
2242
|
+
print(f" Frames: {frames}")
|
|
2243
|
+
print(f" Robot name: {robot_name}")
|
|
2244
|
+
|
|
2245
|
+
# ArUco setup
|
|
2246
|
+
ARUCO_DICT = cv2.aruco.getPredefinedDictionary(cv2.aruco.DICT_4X4_50)
|
|
2247
|
+
ARUCO_PARAMS = cv2.aruco.DetectorParameters()
|
|
2248
|
+
|
|
2249
|
+
# Reset robot pose if requested
|
|
2250
|
+
if reset:
|
|
2251
|
+
print("\n--- Resetting Robot Pose ---")
|
|
2252
|
+
try:
|
|
2253
|
+
async def do_reset():
|
|
2254
|
+
from bleak import BleakClient, BleakScanner
|
|
2255
|
+
|
|
2256
|
+
address = ble_address
|
|
2257
|
+
if not address:
|
|
2258
|
+
# Auto-discover MechDog
|
|
2259
|
+
print("Scanning for MechDog...")
|
|
2260
|
+
devices = await BleakScanner.discover(timeout=5.0)
|
|
2261
|
+
for d in devices:
|
|
2262
|
+
if d.name and "MechDog" in d.name:
|
|
2263
|
+
address = d.address
|
|
2264
|
+
print(f"Found: {d.name} ({address})")
|
|
2265
|
+
break
|
|
2266
|
+
|
|
2267
|
+
if not address:
|
|
2268
|
+
print("! MechDog not found - skipping reset")
|
|
2269
|
+
return False
|
|
2270
|
+
|
|
2271
|
+
FFE1 = "0000ffe1-0000-1000-8000-00805f9b34fb"
|
|
2272
|
+
|
|
2273
|
+
async with BleakClient(address) as client:
|
|
2274
|
+
print(f"Connected to {address}")
|
|
2275
|
+
|
|
2276
|
+
async def send(cmd: str, delay: float = 0.5):
|
|
2277
|
+
await client.write_gatt_char(FFE1, (cmd + "\n").encode())
|
|
2278
|
+
await asyncio.sleep(delay)
|
|
2279
|
+
|
|
2280
|
+
# Reset sequence
|
|
2281
|
+
print(" Sit...")
|
|
2282
|
+
await send("CMD|2|1|4|$", 2.0)
|
|
2283
|
+
print(" Stand...")
|
|
2284
|
+
await send("CMD|2|1|3|$", 2.0)
|
|
2285
|
+
print(" Arm home...")
|
|
2286
|
+
await send("CMD|7|8|$", 1.5)
|
|
2287
|
+
print(" Done")
|
|
2288
|
+
return True
|
|
2289
|
+
|
|
2290
|
+
asyncio.run(do_reset())
|
|
2291
|
+
print("Waiting for robot to settle...")
|
|
2292
|
+
import time
|
|
2293
|
+
time.sleep(2.0)
|
|
2294
|
+
|
|
2295
|
+
except ImportError:
|
|
2296
|
+
print("! bleak not installed - install with: pip install bleak")
|
|
2297
|
+
except Exception as e:
|
|
2298
|
+
print(f"! Reset failed: {e}")
|
|
2299
|
+
print(" Continuing with current pose...")
|
|
2300
|
+
|
|
2301
|
+
# Open camera
|
|
2302
|
+
print("\n--- Capturing Markers ---")
|
|
2303
|
+
cap = cv2.VideoCapture(camera)
|
|
2304
|
+
if not cap.isOpened():
|
|
2305
|
+
print("Error: Could not open webcam")
|
|
2306
|
+
sys.exit(1)
|
|
2307
|
+
|
|
2308
|
+
cap.set(cv2.CAP_PROP_FRAME_WIDTH, 1280)
|
|
2309
|
+
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 720)
|
|
2310
|
+
import time
|
|
2311
|
+
time.sleep(1)
|
|
2312
|
+
|
|
2313
|
+
ret, frame = cap.read()
|
|
2314
|
+
if not ret:
|
|
2315
|
+
print("Error: Could not read from camera")
|
|
2316
|
+
sys.exit(1)
|
|
2317
|
+
|
|
2318
|
+
h, w = frame.shape[:2]
|
|
2319
|
+
focal_length = w * 0.8
|
|
2320
|
+
camera_matrix = np.array([
|
|
2321
|
+
[focal_length, 0, w / 2],
|
|
2322
|
+
[0, focal_length, h / 2],
|
|
2323
|
+
[0, 0, 1]
|
|
2324
|
+
], dtype=np.float32)
|
|
2325
|
+
dist_coeffs = np.zeros(5, dtype=np.float32)
|
|
2326
|
+
|
|
2327
|
+
print(f"Camera: {w}x{h}")
|
|
2328
|
+
|
|
2329
|
+
# Capture markers over multiple frames
|
|
2330
|
+
all_markers = {}
|
|
2331
|
+
detector = cv2.aruco.ArucoDetector(ARUCO_DICT, ARUCO_PARAMS)
|
|
2332
|
+
|
|
2333
|
+
for i in range(frames):
|
|
2334
|
+
for _ in range(2):
|
|
2335
|
+
cap.read()
|
|
2336
|
+
|
|
2337
|
+
ret, frame = cap.read()
|
|
2338
|
+
if not ret:
|
|
2339
|
+
continue
|
|
2340
|
+
|
|
2341
|
+
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
|
|
2342
|
+
corners, ids, _ = detector.detectMarkers(gray)
|
|
2343
|
+
|
|
2344
|
+
if ids is not None:
|
|
2345
|
+
for j, marker_id in enumerate(ids.flatten()):
|
|
2346
|
+
rvecs, tvecs, _ = cv2.aruco.estimatePoseSingleMarkers(
|
|
2347
|
+
[corners[j]], marker_size, camera_matrix, dist_coeffs
|
|
2348
|
+
)
|
|
2349
|
+
mid = int(marker_id)
|
|
2350
|
+
if mid not in all_markers:
|
|
2351
|
+
all_markers[mid] = {"positions": [], "rotations": [], "corners": corners[j][0].tolist()}
|
|
2352
|
+
all_markers[mid]["positions"].append(tvecs[0][0].tolist())
|
|
2353
|
+
all_markers[mid]["rotations"].append(rvecs[0][0].tolist())
|
|
2354
|
+
|
|
2355
|
+
print(f" Frame {i+1}/{frames}: {len(all_markers)} markers", end='\r')
|
|
2356
|
+
|
|
2357
|
+
print()
|
|
2358
|
+
cap.release()
|
|
2359
|
+
|
|
2360
|
+
# Calculate averages
|
|
2361
|
+
for mid, data in all_markers.items():
|
|
2362
|
+
positions = np.array(data["positions"])
|
|
2363
|
+
rotations = np.array(data["rotations"])
|
|
2364
|
+
data["avg_position"] = positions.mean(axis=0).tolist()
|
|
2365
|
+
data["std_position"] = positions.std(axis=0).tolist()
|
|
2366
|
+
data["avg_rotation"] = rotations.mean(axis=0).tolist()
|
|
2367
|
+
data["detection_count"] = len(positions)
|
|
2368
|
+
|
|
2369
|
+
print(f"\nDetected {len(all_markers)} markers: {sorted(all_markers.keys())}")
|
|
2370
|
+
|
|
2371
|
+
# Save marker data
|
|
2372
|
+
marker_data = {
|
|
2373
|
+
"timestamp": datetime.now().isoformat(),
|
|
2374
|
+
"marker_size_m": marker_size,
|
|
2375
|
+
"markers": all_markers,
|
|
2376
|
+
"camera_matrix": camera_matrix.tolist(),
|
|
2377
|
+
}
|
|
2378
|
+
marker_json = urdf_dir / "marker_data.json"
|
|
2379
|
+
with open(marker_json, "w") as f:
|
|
2380
|
+
json.dump(marker_data, f, indent=2)
|
|
2381
|
+
print(f"Saved: {marker_json}")
|
|
2382
|
+
|
|
2383
|
+
# Generate URDF
|
|
2384
|
+
print("\n--- Generating URDF ---")
|
|
2385
|
+
urdf_path = Path(output) if output else default_urdf
|
|
2386
|
+
_generate_urdf_from_markers(all_markers, urdf_path, robot_name)
|
|
2387
|
+
print(f"\nURDF saved to: {urdf_path}")
|
|
2388
|
+
print("\nTo open in Artifex Desktop:")
|
|
2389
|
+
print(f" ate robot urdf open")
|
|
2390
|
+
|
|
2391
|
+
elif action == "open":
|
|
2392
|
+
# Resolve URDF file
|
|
2393
|
+
urdf_path = Path(urdf_file) if urdf_file else default_urdf
|
|
2394
|
+
|
|
2395
|
+
if not urdf_path.exists():
|
|
2396
|
+
print(f"Error: URDF not found: {urdf_path}")
|
|
2397
|
+
print("\nGenerate one first:")
|
|
2398
|
+
print(" ate robot urdf generate")
|
|
2399
|
+
sys.exit(1)
|
|
2400
|
+
|
|
2401
|
+
# Show info
|
|
2402
|
+
_show_urdf_info(urdf_path)
|
|
2403
|
+
|
|
2404
|
+
if bridge:
|
|
2405
|
+
# Try bridge sync
|
|
2406
|
+
print("\n--- Syncing via ATE Bridge ---")
|
|
2407
|
+
success = asyncio.run(_sync_urdf_bridge(urdf_path))
|
|
2408
|
+
if not success and launch:
|
|
2409
|
+
_launch_artifex(urdf_path)
|
|
2410
|
+
else:
|
|
2411
|
+
# Direct launch
|
|
2412
|
+
_launch_artifex(urdf_path)
|
|
2413
|
+
|
|
2414
|
+
elif action == "info":
|
|
2415
|
+
urdf_path = Path(urdf_file) if urdf_file else default_urdf
|
|
2416
|
+
|
|
2417
|
+
if not urdf_path.exists():
|
|
2418
|
+
print(f"Error: URDF not found: {urdf_path}")
|
|
2419
|
+
sys.exit(1)
|
|
2420
|
+
|
|
2421
|
+
_show_urdf_info(urdf_path)
|
|
2422
|
+
|
|
2423
|
+
else:
|
|
2424
|
+
print("Usage: ate robot urdf <generate|open|info>")
|
|
2425
|
+
sys.exit(1)
|
|
2426
|
+
|
|
2427
|
+
|
|
2428
|
+
def _generate_urdf_from_markers(markers: dict, output_path: Path, robot_name: str = "mechdog"):
|
|
2429
|
+
"""Generate URDF from detected marker positions."""
|
|
2430
|
+
from datetime import datetime
|
|
2431
|
+
|
|
2432
|
+
detected = list(markers.keys())
|
|
2433
|
+
|
|
2434
|
+
# Find body reference marker
|
|
2435
|
+
body_marker = 0 if 0 in markers else 7 if 7 in markers else detected[0] if detected else None
|
|
2436
|
+
if not body_marker:
|
|
2437
|
+
print("Error: No markers detected!")
|
|
2438
|
+
return
|
|
2439
|
+
|
|
2440
|
+
import numpy as np
|
|
2441
|
+
body_pos = np.array(markers[body_marker]["avg_position"])
|
|
2442
|
+
|
|
2443
|
+
# Calculate relative positions
|
|
2444
|
+
relative_positions = {}
|
|
2445
|
+
for mid, data in markers.items():
|
|
2446
|
+
pos = np.array(data["avg_position"])
|
|
2447
|
+
relative_positions[mid] = (pos - body_pos).tolist()
|
|
2448
|
+
|
|
2449
|
+
# Build URDF
|
|
2450
|
+
urdf = f'''<?xml version="1.0"?>
|
|
2451
|
+
<robot name="{robot_name}" xmlns:xacro="http://www.ros.org/wiki/xacro">
|
|
2452
|
+
<!--
|
|
2453
|
+
{robot_name.upper()} URDF - Generated by ATE CLI
|
|
2454
|
+
Date: {datetime.now().isoformat()}
|
|
2455
|
+
Markers detected: {detected}
|
|
2456
|
+
|
|
2457
|
+
Generated with: ate robot urdf generate
|
|
2458
|
+
-->
|
|
2459
|
+
|
|
2460
|
+
<!-- Materials -->
|
|
2461
|
+
<material name="orange"><color rgba="1.0 0.5 0.0 1.0"/></material>
|
|
2462
|
+
<material name="black"><color rgba="0.1 0.1 0.1 1.0"/></material>
|
|
2463
|
+
<material name="gray"><color rgba="0.5 0.5 0.5 1.0"/></material>
|
|
2464
|
+
|
|
2465
|
+
<!-- Base Link -->
|
|
2466
|
+
<link name="base_link">
|
|
2467
|
+
<visual><geometry><box size="0.01 0.01 0.01"/></geometry></visual>
|
|
2468
|
+
</link>
|
|
2469
|
+
|
|
2470
|
+
<!-- Body -->
|
|
2471
|
+
<link name="body">
|
|
2472
|
+
<visual>
|
|
2473
|
+
<geometry><box size="0.18 0.10 0.06"/></geometry>
|
|
2474
|
+
<material name="orange"/>
|
|
2475
|
+
</visual>
|
|
2476
|
+
<collision><geometry><box size="0.18 0.10 0.06"/></geometry></collision>
|
|
2477
|
+
<inertial>
|
|
2478
|
+
<mass value="0.8"/>
|
|
2479
|
+
<inertia ixx="0.002" ixy="0" ixz="0" iyy="0.003" iyz="0" izz="0.002"/>
|
|
2480
|
+
</inertial>
|
|
2481
|
+
</link>
|
|
2482
|
+
|
|
2483
|
+
<joint name="body_joint" type="fixed">
|
|
2484
|
+
<parent link="base_link"/>
|
|
2485
|
+
<child link="body"/>
|
|
2486
|
+
<origin xyz="0 0 0.12"/>
|
|
2487
|
+
</joint>
|
|
2488
|
+
|
|
2489
|
+
<!-- Arm -->
|
|
2490
|
+
'''
|
|
2491
|
+
# Arm configuration
|
|
2492
|
+
arm_offset = "0.09 0 0.02"
|
|
2493
|
+
if 1 in markers:
|
|
2494
|
+
rel = relative_positions[1]
|
|
2495
|
+
arm_offset = f"{rel[0]:.4f} {rel[1]:.4f} {rel[2] + 0.02:.4f}"
|
|
2496
|
+
|
|
2497
|
+
urdf += f''' <link name="arm_link1">
|
|
2498
|
+
<visual>
|
|
2499
|
+
<origin xyz="0.035 0 0"/>
|
|
2500
|
+
<geometry><box size="0.07 0.03 0.03"/></geometry>
|
|
2501
|
+
<material name="gray"/>
|
|
2502
|
+
</visual>
|
|
2503
|
+
<inertial><mass value="0.08"/><inertia ixx="0.0001" ixy="0" ixz="0" iyy="0.0001" iyz="0" izz="0.0001"/></inertial>
|
|
2504
|
+
</link>
|
|
2505
|
+
|
|
2506
|
+
<joint name="arm_shoulder_joint" type="revolute">
|
|
2507
|
+
<parent link="body"/>
|
|
2508
|
+
<child link="arm_link1"/>
|
|
2509
|
+
<origin xyz="{arm_offset}"/>
|
|
2510
|
+
<axis xyz="0 1 0"/>
|
|
2511
|
+
<limit lower="-1.57" upper="1.57" effort="5.0" velocity="2.0"/>
|
|
2512
|
+
</joint>
|
|
2513
|
+
|
|
2514
|
+
<link name="arm_link2">
|
|
2515
|
+
<visual>
|
|
2516
|
+
<origin xyz="0.04 0 0"/>
|
|
2517
|
+
<geometry><box size="0.08 0.025 0.025"/></geometry>
|
|
2518
|
+
<material name="gray"/>
|
|
2519
|
+
</visual>
|
|
2520
|
+
<inertial><mass value="0.06"/><inertia ixx="0.0001" ixy="0" ixz="0" iyy="0.0001" iyz="0" izz="0.0001"/></inertial>
|
|
2521
|
+
</link>
|
|
2522
|
+
|
|
2523
|
+
<joint name="arm_elbow_joint" type="revolute">
|
|
2524
|
+
<parent link="arm_link1"/>
|
|
2525
|
+
<child link="arm_link2"/>
|
|
2526
|
+
<origin xyz="0.07 0 0"/>
|
|
2527
|
+
<axis xyz="0 1 0"/>
|
|
2528
|
+
<limit lower="-1.57" upper="1.57" effort="3.0" velocity="2.0"/>
|
|
2529
|
+
</joint>
|
|
2530
|
+
|
|
2531
|
+
<link name="gripper_base">
|
|
2532
|
+
<visual><geometry><box size="0.03 0.04 0.02"/></geometry><material name="black"/></visual>
|
|
2533
|
+
<inertial><mass value="0.03"/><inertia ixx="0.00001" ixy="0" ixz="0" iyy="0.00001" iyz="0" izz="0.00001"/></inertial>
|
|
2534
|
+
</link>
|
|
2535
|
+
|
|
2536
|
+
<joint name="gripper_base_joint" type="fixed">
|
|
2537
|
+
<parent link="arm_link2"/>
|
|
2538
|
+
<child link="gripper_base"/>
|
|
2539
|
+
<origin xyz="0.08 0 0"/>
|
|
2540
|
+
</joint>
|
|
2541
|
+
|
|
2542
|
+
<link name="gripper_left_finger">
|
|
2543
|
+
<visual><origin xyz="0.015 0 0"/><geometry><box size="0.03 0.005 0.015"/></geometry><material name="black"/></visual>
|
|
2544
|
+
<inertial><mass value="0.01"/><inertia ixx="0.000001" ixy="0" ixz="0" iyy="0.000001" iyz="0" izz="0.000001"/></inertial>
|
|
2545
|
+
</link>
|
|
2546
|
+
|
|
2547
|
+
<joint name="gripper_left_joint" type="prismatic">
|
|
2548
|
+
<parent link="gripper_base"/>
|
|
2549
|
+
<child link="gripper_left_finger"/>
|
|
2550
|
+
<origin xyz="0.01 0.012 0"/>
|
|
2551
|
+
<axis xyz="0 1 0"/>
|
|
2552
|
+
<limit lower="-0.012" upper="0" effort="1.0" velocity="0.5"/>
|
|
2553
|
+
</joint>
|
|
2554
|
+
|
|
2555
|
+
<link name="gripper_right_finger">
|
|
2556
|
+
<visual><origin xyz="0.015 0 0"/><geometry><box size="0.03 0.005 0.015"/></geometry><material name="black"/></visual>
|
|
2557
|
+
<inertial><mass value="0.01"/><inertia ixx="0.000001" ixy="0" ixz="0" iyy="0.000001" iyz="0" izz="0.000001"/></inertial>
|
|
2558
|
+
</link>
|
|
2559
|
+
|
|
2560
|
+
<joint name="gripper_right_joint" type="prismatic">
|
|
2561
|
+
<parent link="gripper_base"/>
|
|
2562
|
+
<child link="gripper_right_finger"/>
|
|
2563
|
+
<origin xyz="0.01 -0.012 0"/>
|
|
2564
|
+
<axis xyz="0 -1 0"/>
|
|
2565
|
+
<limit lower="-0.012" upper="0" effort="1.0" velocity="0.5"/>
|
|
2566
|
+
</joint>
|
|
2567
|
+
|
|
2568
|
+
<!-- Legs -->
|
|
2569
|
+
'''
|
|
2570
|
+
|
|
2571
|
+
# Generate legs
|
|
2572
|
+
legs = [
|
|
2573
|
+
("front_left", 0.07, 0.05, 13),
|
|
2574
|
+
("front_right", 0.07, -0.05, 14),
|
|
2575
|
+
("rear_left", -0.07, 0.05, 15),
|
|
2576
|
+
("rear_right", -0.07, -0.05, 16),
|
|
2577
|
+
]
|
|
2578
|
+
|
|
2579
|
+
for leg_name, x_def, y_def, marker_id in legs:
|
|
2580
|
+
x_off = relative_positions[marker_id][0] if marker_id in markers else x_def
|
|
2581
|
+
y_off = relative_positions[marker_id][1] if marker_id in markers else y_def
|
|
2582
|
+
|
|
2583
|
+
urdf += f''' <!-- {leg_name.replace("_", " ").title()} Leg -->
|
|
2584
|
+
<link name="{leg_name}_hip">
|
|
2585
|
+
<visual><geometry><cylinder radius="0.012" length="0.025"/></geometry><material name="gray"/></visual>
|
|
2586
|
+
<inertial><mass value="0.03"/><inertia ixx="0.00001" ixy="0" ixz="0" iyy="0.00001" iyz="0" izz="0.00001"/></inertial>
|
|
2587
|
+
</link>
|
|
2588
|
+
<joint name="{leg_name}_hip_joint" type="revolute">
|
|
2589
|
+
<parent link="body"/><child link="{leg_name}_hip"/>
|
|
2590
|
+
<origin xyz="{x_off:.4f} {y_off:.4f} -0.02" rpy="1.5708 0 0"/>
|
|
2591
|
+
<axis xyz="0 0 1"/><limit lower="-0.5" upper="0.5" effort="3.0" velocity="2.0"/>
|
|
2592
|
+
</joint>
|
|
2593
|
+
|
|
2594
|
+
<link name="{leg_name}_thigh">
|
|
2595
|
+
<visual><origin xyz="0 0 -0.03"/><geometry><box size="0.025 0.02 0.06"/></geometry><material name="orange"/></visual>
|
|
2596
|
+
<inertial><mass value="0.05"/><inertia ixx="0.00005" ixy="0" ixz="0" iyy="0.00005" iyz="0" izz="0.00001"/></inertial>
|
|
2597
|
+
</link>
|
|
2598
|
+
<joint name="{leg_name}_thigh_joint" type="revolute">
|
|
2599
|
+
<parent link="{leg_name}_hip"/><child link="{leg_name}_thigh"/>
|
|
2600
|
+
<origin xyz="0 0 -0.015" rpy="-1.5708 0 0"/>
|
|
2601
|
+
<axis xyz="0 1 0"/><limit lower="-1.57" upper="1.57" effort="5.0" velocity="2.0"/>
|
|
2602
|
+
</joint>
|
|
2603
|
+
|
|
2604
|
+
<link name="{leg_name}_shin">
|
|
2605
|
+
<visual><origin xyz="0 0 -0.025"/><geometry><box size="0.02 0.015 0.05"/></geometry><material name="orange"/></visual>
|
|
2606
|
+
<inertial><mass value="0.04"/><inertia ixx="0.00003" ixy="0" ixz="0" iyy="0.00003" iyz="0" izz="0.000005"/></inertial>
|
|
2607
|
+
</link>
|
|
2608
|
+
<joint name="{leg_name}_knee_joint" type="revolute">
|
|
2609
|
+
<parent link="{leg_name}_thigh"/><child link="{leg_name}_shin"/>
|
|
2610
|
+
<origin xyz="0 0 -0.06"/><axis xyz="0 1 0"/><limit lower="-2.5" upper="0" effort="4.0" velocity="2.0"/>
|
|
2611
|
+
</joint>
|
|
2612
|
+
|
|
2613
|
+
<link name="{leg_name}_foot">
|
|
2614
|
+
<visual><geometry><sphere radius="0.012"/></geometry><material name="black"/></visual>
|
|
2615
|
+
<collision><geometry><sphere radius="0.012"/></geometry></collision>
|
|
2616
|
+
<inertial><mass value="0.01"/><inertia ixx="0.000001" ixy="0" ixz="0" iyy="0.000001" iyz="0" izz="0.000001"/></inertial>
|
|
2617
|
+
</link>
|
|
2618
|
+
<joint name="{leg_name}_foot_joint" type="fixed">
|
|
2619
|
+
<parent link="{leg_name}_shin"/><child link="{leg_name}_foot"/>
|
|
2620
|
+
<origin xyz="0 0 -0.05"/>
|
|
2621
|
+
</joint>
|
|
2622
|
+
|
|
2623
|
+
'''
|
|
2624
|
+
|
|
2625
|
+
urdf += "</robot>\n"
|
|
2626
|
+
|
|
2627
|
+
# Write URDF
|
|
2628
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
2629
|
+
with open(output_path, "w") as f:
|
|
2630
|
+
f.write(urdf)
|
|
2631
|
+
|
|
2632
|
+
# Count elements
|
|
2633
|
+
import re
|
|
2634
|
+
links = len(re.findall(r'<link\s+name=', urdf))
|
|
2635
|
+
joints = len(re.findall(r'<joint\s+name=', urdf))
|
|
2636
|
+
print(f"Generated: {links} links, {joints} joints")
|
|
2637
|
+
|
|
2638
|
+
|
|
2639
|
+
def _show_urdf_info(urdf_path: Path):
|
|
2640
|
+
"""Display information about a URDF file."""
|
|
2641
|
+
import re
|
|
2642
|
+
|
|
2643
|
+
with open(urdf_path, 'r') as f:
|
|
2644
|
+
content = f.read()
|
|
2645
|
+
|
|
2646
|
+
robot_match = re.search(r'<robot\s+name="([^"]+)"', content)
|
|
2647
|
+
robot_name = robot_match.group(1) if robot_match else "unknown"
|
|
2648
|
+
|
|
2649
|
+
joints = re.findall(r'<joint\s+name="([^"]+)"', content)
|
|
2650
|
+
links = re.findall(r'<link\s+name="([^"]+)"', content)
|
|
2651
|
+
revolute = len(re.findall(r'type="revolute"', content))
|
|
2652
|
+
prismatic = len(re.findall(r'type="prismatic"', content))
|
|
2653
|
+
fixed = len(re.findall(r'type="fixed"', content))
|
|
2654
|
+
|
|
2655
|
+
print(f"\n--- URDF Information ---")
|
|
2656
|
+
print(f"Robot: {robot_name}")
|
|
2657
|
+
print(f"File: {urdf_path}")
|
|
2658
|
+
print(f"Size: {urdf_path.stat().st_size} bytes")
|
|
2659
|
+
print(f"\nStructure:")
|
|
2660
|
+
print(f" Links: {len(links)}")
|
|
2661
|
+
print(f" Joints: {len(joints)}")
|
|
2662
|
+
print(f" - Revolute: {revolute}")
|
|
2663
|
+
print(f" - Prismatic: {prismatic}")
|
|
2664
|
+
print(f" - Fixed: {fixed}")
|
|
2665
|
+
|
|
2666
|
+
|
|
2667
|
+
async def _sync_urdf_bridge(urdf_path: Path) -> bool:
|
|
2668
|
+
"""Sync URDF to Artifex via ATE bridge server."""
|
|
2669
|
+
try:
|
|
2670
|
+
import websockets
|
|
2671
|
+
except ImportError:
|
|
2672
|
+
print("Installing websockets...")
|
|
2673
|
+
import subprocess
|
|
2674
|
+
subprocess.run([sys.executable, "-m", "pip", "install", "websockets", "-q"])
|
|
2675
|
+
import websockets
|
|
2676
|
+
|
|
2677
|
+
try:
|
|
2678
|
+
with open(urdf_path, 'r') as f:
|
|
2679
|
+
urdf_content = f.read()
|
|
2680
|
+
|
|
2681
|
+
async with websockets.connect('ws://localhost:8765', open_timeout=5) as ws:
|
|
2682
|
+
print("Connected to ATE bridge server")
|
|
2683
|
+
|
|
2684
|
+
request = {"id": 1, "action": "sync_urdf", "params": {"urdf": urdf_content}}
|
|
2685
|
+
await ws.send(json.dumps(request))
|
|
2686
|
+
response = await asyncio.wait_for(ws.recv(), timeout=10.0)
|
|
2687
|
+
result = json.loads(response)
|
|
2688
|
+
|
|
2689
|
+
if "error" in result:
|
|
2690
|
+
print(f"Sync failed: {result['error']}")
|
|
2691
|
+
return False
|
|
2692
|
+
|
|
2693
|
+
if result.get("result"):
|
|
2694
|
+
joints = result["result"].get("joints", [])
|
|
2695
|
+
print(f"URDF synced to Artifex! ({len(joints)} joints)")
|
|
2696
|
+
return True
|
|
2697
|
+
|
|
2698
|
+
except ConnectionRefusedError:
|
|
2699
|
+
print("ATE bridge server not running")
|
|
2700
|
+
print(" Start with: python -m ate.bridge_server")
|
|
2701
|
+
except asyncio.TimeoutError:
|
|
2702
|
+
print("Connection timeout")
|
|
2703
|
+
except Exception as e:
|
|
2704
|
+
print(f"Error: {e}")
|
|
2705
|
+
|
|
2706
|
+
return False
|
|
2707
|
+
|
|
2708
|
+
|
|
2709
|
+
def _launch_artifex(urdf_path: Optional[Path] = None) -> bool:
|
|
2710
|
+
"""Launch Artifex Desktop with optional URDF file."""
|
|
2711
|
+
import subprocess
|
|
2712
|
+
|
|
2713
|
+
print("\n--- Launching Artifex Desktop ---")
|
|
2714
|
+
|
|
2715
|
+
# Find Artifex
|
|
2716
|
+
artifex_paths = [
|
|
2717
|
+
Path("/Applications/Artifex.app"),
|
|
2718
|
+
Path.home() / "Applications/Artifex.app",
|
|
2719
|
+
Path(__file__).parent.parent.parent.parent / "artifex-desktop/dist/mac-arm64/Artifex.app",
|
|
2720
|
+
Path(__file__).parent.parent.parent.parent / "artifex-desktop/dist/mac/Artifex.app",
|
|
2721
|
+
]
|
|
2722
|
+
|
|
2723
|
+
artifex_app = None
|
|
2724
|
+
for p in artifex_paths:
|
|
2725
|
+
if p.exists():
|
|
2726
|
+
artifex_app = p
|
|
2727
|
+
break
|
|
2728
|
+
|
|
2729
|
+
if not artifex_app:
|
|
2730
|
+
print("Artifex Desktop not found")
|
|
2731
|
+
print("\nInstall from: https://kindly.fyi/artifex")
|
|
2732
|
+
if urdf_path:
|
|
2733
|
+
print(f"\nOr manually open: {urdf_path}")
|
|
2734
|
+
return False
|
|
2735
|
+
|
|
2736
|
+
print(f"Found: {artifex_app}")
|
|
2737
|
+
|
|
2738
|
+
try:
|
|
2739
|
+
if urdf_path:
|
|
2740
|
+
cmd = ["open", "-a", str(artifex_app), str(urdf_path.absolute())]
|
|
2741
|
+
print(f"Opening: {urdf_path.name}")
|
|
2742
|
+
else:
|
|
2743
|
+
cmd = ["open", str(artifex_app)]
|
|
2744
|
+
|
|
2745
|
+
subprocess.run(cmd, check=True)
|
|
2746
|
+
print("Artifex Desktop launched")
|
|
2747
|
+
return True
|
|
2748
|
+
|
|
2749
|
+
except subprocess.CalledProcessError as e:
|
|
2750
|
+
print(f"Launch failed: {e}")
|
|
2751
|
+
return False
|
|
2752
|
+
|
|
2753
|
+
|
|
2754
|
+
def register_robot_parser(subparsers):
|
|
2755
|
+
"""
|
|
2756
|
+
Register the robot command with argparse.
|
|
2757
|
+
|
|
2758
|
+
Call this from the main CLI to add robot commands.
|
|
2759
|
+
"""
|
|
2760
|
+
robot_parser = subparsers.add_parser("robot",
|
|
2761
|
+
help="Robot discovery, setup, and management",
|
|
2762
|
+
description="""Robot management commands.
|
|
2763
|
+
|
|
2764
|
+
Discover robots, set up configurations, and test capabilities.
|
|
2765
|
+
|
|
2766
|
+
EXAMPLES:
|
|
2767
|
+
ate robot discover # Find robots on network/USB
|
|
2768
|
+
ate robot list # List known robot types
|
|
2769
|
+
ate robot info hiwonder_mechdog # Show robot type info
|
|
2770
|
+
ate robot setup # Interactive setup wizard
|
|
2771
|
+
ate robot test my_mechdog # Test robot capabilities
|
|
2772
|
+
ate robot profiles list # List saved profiles
|
|
2773
|
+
""")
|
|
2774
|
+
robot_subparsers = robot_parser.add_subparsers(dest="robot_action", help="Robot action")
|
|
2775
|
+
|
|
2776
|
+
# robot discover
|
|
2777
|
+
discover_parser = robot_subparsers.add_parser("discover", help="Find robots on network and USB")
|
|
2778
|
+
discover_parser.add_argument("--subnet", help="Network subnet to scan (e.g., 192.168.1)")
|
|
2779
|
+
discover_parser.add_argument("--timeout", type=float, default=3.0, help="Scan timeout (default: 3s)")
|
|
2780
|
+
discover_parser.add_argument("--json", action="store_true", help="Output as JSON")
|
|
2781
|
+
|
|
2782
|
+
# robot ble - Bluetooth Low Energy discovery and connection
|
|
2783
|
+
ble_parser = robot_subparsers.add_parser("ble",
|
|
2784
|
+
help="Bluetooth Low Energy robot discovery and connection",
|
|
2785
|
+
description="""Discover and connect to robots via Bluetooth Low Energy.
|
|
2786
|
+
|
|
2787
|
+
BLE provides wireless control without WiFi infrastructure - useful for
|
|
2788
|
+
outdoor robots or when USB is inconvenient.
|
|
2789
|
+
|
|
2790
|
+
EXAMPLES:
|
|
2791
|
+
ate robot ble discover # Scan for BLE robots
|
|
2792
|
+
ate robot ble discover --filter MechDog # Filter by name
|
|
2793
|
+
ate robot ble connect AA:BB:CC:DD:EE:FF # Test connection
|
|
2794
|
+
ate robot ble info # Show BLE-capable robots
|
|
2795
|
+
""")
|
|
2796
|
+
ble_subparsers = ble_parser.add_subparsers(dest="ble_action", help="BLE action")
|
|
2797
|
+
|
|
2798
|
+
# ble discover
|
|
2799
|
+
ble_discover = ble_subparsers.add_parser("discover", help="Scan for BLE devices")
|
|
2800
|
+
ble_discover.add_argument("--timeout", type=float, default=10.0,
|
|
2801
|
+
help="Scan timeout in seconds (default: 10)")
|
|
2802
|
+
ble_discover.add_argument("--filter", dest="name_filter",
|
|
2803
|
+
help="Filter devices by name (e.g., MechDog, ESP32)")
|
|
2804
|
+
ble_discover.add_argument("--json", action="store_true", dest="json_output",
|
|
2805
|
+
help="Output as JSON")
|
|
2806
|
+
|
|
2807
|
+
# ble connect
|
|
2808
|
+
ble_connect = ble_subparsers.add_parser("connect", help="Test connection to BLE device")
|
|
2809
|
+
ble_connect.add_argument("address", help="BLE device address (e.g., AA:BB:CC:DD:EE:FF)")
|
|
2810
|
+
ble_connect.add_argument("-p", "--profile", dest="profile_name",
|
|
2811
|
+
help="Update profile with BLE settings")
|
|
2812
|
+
|
|
2813
|
+
# ble info
|
|
2814
|
+
ble_subparsers.add_parser("info", help="Show BLE-capable robots and UUIDs")
|
|
2815
|
+
|
|
2816
|
+
# ble capture - Capture traffic from phone app
|
|
2817
|
+
ble_capture = ble_subparsers.add_parser("capture",
|
|
2818
|
+
help="Capture BLE traffic from phone app for protocol reverse-engineering",
|
|
2819
|
+
description="""Capture BLE traffic while using a phone app to control your robot.
|
|
2820
|
+
|
|
2821
|
+
This automates the workflow for reverse-engineering undocumented BLE protocols:
|
|
2822
|
+
1. Guides you through setting up PacketLogger (iOS) or HCI snoop (Android)
|
|
2823
|
+
2. Captures traffic while you operate the phone app
|
|
2824
|
+
3. Saves capture file for analysis
|
|
2825
|
+
|
|
2826
|
+
EXAMPLES:
|
|
2827
|
+
ate robot ble capture # Auto-detect platform
|
|
2828
|
+
ate robot ble capture --platform ios # Force iOS capture
|
|
2829
|
+
ate robot ble capture -o my_capture.pklg # Specify output file
|
|
2830
|
+
""")
|
|
2831
|
+
ble_capture.add_argument("-p", "--platform", choices=["ios", "android", "auto"],
|
|
2832
|
+
default="auto", help="Mobile platform (default: auto-detect)")
|
|
2833
|
+
ble_capture.add_argument("-o", "--output", default="capture.pklg",
|
|
2834
|
+
help="Output file path (default: capture.pklg)")
|
|
2835
|
+
|
|
2836
|
+
# ble analyze - Analyze capture file
|
|
2837
|
+
ble_analyze = ble_subparsers.add_parser("analyze",
|
|
2838
|
+
help="Analyze BLE capture file to decode protocol commands",
|
|
2839
|
+
description="""Analyze a BLE capture file (.pklg or .btsnoop) to decode the protocol.
|
|
2840
|
+
|
|
2841
|
+
Extracts write commands and notifications, identifies the protocol format,
|
|
2842
|
+
and optionally generates Python code to replay the discovered commands.
|
|
2843
|
+
|
|
2844
|
+
EXAMPLES:
|
|
2845
|
+
ate robot ble analyze capture.pklg # Decode and display commands
|
|
2846
|
+
ate robot ble analyze capture.pklg --generate # Also generate Python code
|
|
2847
|
+
ate robot ble analyze capture.pklg -o protocol.py # Save code to file
|
|
2848
|
+
""")
|
|
2849
|
+
ble_analyze.add_argument("capture_file", help="Path to capture file (.pklg or .btsnoop)")
|
|
2850
|
+
ble_analyze.add_argument("-g", "--generate", action="store_true",
|
|
2851
|
+
help="Generate Python replay code")
|
|
2852
|
+
ble_analyze.add_argument("-o", "--output", help="Output file for generated code")
|
|
2853
|
+
|
|
2854
|
+
# ble inspect - Inspect device characteristics
|
|
2855
|
+
ble_inspect = ble_subparsers.add_parser("inspect",
|
|
2856
|
+
help="Inspect BLE device services and characteristics",
|
|
2857
|
+
description="""Connect to a BLE device and show its service/characteristic map.
|
|
2858
|
+
|
|
2859
|
+
Shows which characteristics support write vs notify, helping you understand
|
|
2860
|
+
the correct configuration for communication.
|
|
2861
|
+
|
|
2862
|
+
EXAMPLES:
|
|
2863
|
+
ate robot ble inspect mechdog_00 # Inspect by device name
|
|
2864
|
+
ate robot ble inspect AA:BB:CC:DD:EE:FF # Inspect by address
|
|
2865
|
+
""")
|
|
2866
|
+
ble_inspect.add_argument("address", help="Device address or name to inspect")
|
|
2867
|
+
|
|
2868
|
+
# robot list
|
|
2869
|
+
list_parser = robot_subparsers.add_parser("list", help="List known robot types")
|
|
2870
|
+
list_parser.add_argument("--archetype", choices=["quadruped", "humanoid", "arm", "custom"],
|
|
2871
|
+
help="Filter by archetype")
|
|
2872
|
+
|
|
2873
|
+
# robot info
|
|
2874
|
+
info_parser = robot_subparsers.add_parser("info", help="Show robot type or profile info")
|
|
2875
|
+
info_parser.add_argument("robot_type", nargs="?", help="Robot type ID")
|
|
2876
|
+
info_parser.add_argument("-p", "--profile", help="Show profile instead of type")
|
|
2877
|
+
|
|
2878
|
+
# robot setup
|
|
2879
|
+
setup_parser = robot_subparsers.add_parser("setup", help="Interactive setup wizard")
|
|
2880
|
+
setup_parser.add_argument("--port", help="Serial port")
|
|
2881
|
+
setup_parser.add_argument("--camera-ip", help="Camera IP address")
|
|
2882
|
+
setup_parser.add_argument("--type", dest="robot_type", default="hiwonder_mechdog",
|
|
2883
|
+
help="Robot type (default: hiwonder_mechdog)")
|
|
2884
|
+
setup_parser.add_argument("--name", help="Profile name")
|
|
2885
|
+
setup_parser.add_argument("--non-interactive", action="store_true",
|
|
2886
|
+
help="Use defaults, no prompts")
|
|
2887
|
+
|
|
2888
|
+
# robot test
|
|
2889
|
+
test_parser = robot_subparsers.add_parser("test", help="Test robot capabilities")
|
|
2890
|
+
test_parser.add_argument("profile", help="Profile name")
|
|
2891
|
+
test_parser.add_argument("-c", "--capability", help="Specific capability to test")
|
|
2892
|
+
test_parser.add_argument("-v", "--verbose", action="store_true", help="Show method details")
|
|
2893
|
+
|
|
2894
|
+
# robot approach
|
|
2895
|
+
approach_parser = robot_subparsers.add_parser("approach",
|
|
2896
|
+
help="Approach detected targets",
|
|
2897
|
+
description="""Scan for targets and approach them using visual servoing.
|
|
2898
|
+
|
|
2899
|
+
Works with any robot that has camera and locomotion capabilities.
|
|
2900
|
+
Uses visual distance estimation when no hardware distance sensor is available.
|
|
2901
|
+
|
|
2902
|
+
EXAMPLES:
|
|
2903
|
+
ate robot approach my_mechdog # Basic approach
|
|
2904
|
+
ate robot approach my_mechdog --target-distance 0.2 # Stop 20cm from target
|
|
2905
|
+
ate robot approach my_mechdog --dry-run # Scan only, no movement
|
|
2906
|
+
ate robot approach my_mechdog --speed 0.5 # Faster approach
|
|
2907
|
+
""")
|
|
2908
|
+
approach_parser.add_argument("profile", help="Profile name")
|
|
2909
|
+
approach_parser.add_argument("-d", "--duration", type=float, default=30.0,
|
|
2910
|
+
help="Maximum duration in seconds (default: 30)")
|
|
2911
|
+
approach_parser.add_argument("-t", "--target-distance", type=float, default=0.15,
|
|
2912
|
+
help="Distance to stop from target in meters (default: 0.15)")
|
|
2913
|
+
approach_parser.add_argument("-s", "--speed", type=float, default=0.3,
|
|
2914
|
+
help="Approach speed (default: 0.3)")
|
|
2915
|
+
approach_parser.add_argument("--detect-colors", nargs="+",
|
|
2916
|
+
help="Colors to detect (red, blue, green, etc.)")
|
|
2917
|
+
approach_parser.add_argument("--dry-run", action="store_true",
|
|
2918
|
+
help="Scan and detect but don't move")
|
|
2919
|
+
|
|
2920
|
+
# robot profiles
|
|
2921
|
+
profiles_parser = robot_subparsers.add_parser("profiles", help="Manage saved profiles")
|
|
2922
|
+
profiles_subparsers = profiles_parser.add_subparsers(dest="profiles_action", help="Profile action")
|
|
2923
|
+
|
|
2924
|
+
profiles_subparsers.add_parser("list", help="List saved profiles")
|
|
2925
|
+
|
|
2926
|
+
profiles_show = profiles_subparsers.add_parser("show", help="Show profile details")
|
|
2927
|
+
profiles_show.add_argument("name", help="Profile name")
|
|
2928
|
+
|
|
2929
|
+
profiles_delete = profiles_subparsers.add_parser("delete", help="Delete profile")
|
|
2930
|
+
profiles_delete.add_argument("name", help="Profile name")
|
|
2931
|
+
|
|
2932
|
+
profiles_create = profiles_subparsers.add_parser("create", help="Create new profile")
|
|
2933
|
+
profiles_create.add_argument("name", help="Profile name")
|
|
2934
|
+
profiles_create.add_argument("-t", "--template", help="Template to use")
|
|
2935
|
+
|
|
2936
|
+
profiles_subparsers.add_parser("templates", help="List available templates")
|
|
2937
|
+
|
|
2938
|
+
# robot calibrate
|
|
2939
|
+
calibrate_parser = robot_subparsers.add_parser("calibrate",
|
|
2940
|
+
help="Visual calibration for robot servos and poses",
|
|
2941
|
+
description="""Visual calibration wizard with webcam feedback.
|
|
2942
|
+
|
|
2943
|
+
Interactively discover servo ranges, record named positions (poses),
|
|
2944
|
+
and build a calibration profile for your robot.
|
|
2945
|
+
|
|
2946
|
+
EXAMPLES:
|
|
2947
|
+
ate robot calibrate start --port /dev/cu.usbserial-10 --name my_mechdog
|
|
2948
|
+
ate robot calibrate gripper --port /dev/cu.usbserial-10 --servo 11
|
|
2949
|
+
ate robot calibrate list
|
|
2950
|
+
ate robot calibrate poses my_mechdog
|
|
2951
|
+
ate robot calibrate record my_mechdog --pose gripper_open
|
|
2952
|
+
ate robot calibrate apply my_mechdog --pose gripper_closed
|
|
2953
|
+
""")
|
|
2954
|
+
calibrate_subparsers = calibrate_parser.add_subparsers(dest="calibrate_action", help="Calibration action")
|
|
2955
|
+
|
|
2956
|
+
# calibrate start
|
|
2957
|
+
cal_start = calibrate_subparsers.add_parser("start", help="Run interactive calibration wizard")
|
|
2958
|
+
cal_start.add_argument("--port", help="Serial port")
|
|
2959
|
+
cal_start.add_argument("--camera", dest="camera_url", help="Camera URL for visual feedback")
|
|
2960
|
+
cal_start.add_argument("--name", default="robot", help="Calibration name (default: robot)")
|
|
2961
|
+
cal_start.add_argument("--type", dest="robot_type", default="unknown", help="Robot type")
|
|
2962
|
+
|
|
2963
|
+
# calibrate gripper
|
|
2964
|
+
cal_gripper = calibrate_subparsers.add_parser("gripper", help="Quick gripper-only calibration")
|
|
2965
|
+
cal_gripper.add_argument("--port", help="Serial port")
|
|
2966
|
+
cal_gripper.add_argument("--servo", type=int, default=11, help="Gripper servo ID (default: 11)")
|
|
2967
|
+
|
|
2968
|
+
# calibrate list
|
|
2969
|
+
calibrate_subparsers.add_parser("list", help="List saved calibrations")
|
|
2970
|
+
|
|
2971
|
+
# calibrate poses
|
|
2972
|
+
cal_poses = calibrate_subparsers.add_parser("poses", help="Show poses for a calibration")
|
|
2973
|
+
cal_poses.add_argument("name", help="Calibration name")
|
|
2974
|
+
|
|
2975
|
+
# calibrate record
|
|
2976
|
+
cal_record = calibrate_subparsers.add_parser("record", help="Record current position as a pose")
|
|
2977
|
+
cal_record.add_argument("name", help="Calibration name")
|
|
2978
|
+
cal_record.add_argument("--pose", required=True, help="Name for the pose")
|
|
2979
|
+
|
|
2980
|
+
# calibrate apply
|
|
2981
|
+
cal_apply = calibrate_subparsers.add_parser("apply", help="Apply a saved pose")
|
|
2982
|
+
cal_apply.add_argument("name", help="Calibration name")
|
|
2983
|
+
cal_apply.add_argument("--pose", required=True, help="Pose to apply")
|
|
2984
|
+
|
|
2985
|
+
# calibrate direction - THE CRITICAL STEP
|
|
2986
|
+
cal_direction = calibrate_subparsers.add_parser("direction",
|
|
2987
|
+
help="Direction calibration via Twitch Test (CRITICAL)",
|
|
2988
|
+
description="""Determine which servo direction moves TOWARD the target.
|
|
2989
|
+
|
|
2990
|
+
This is the CRITICAL step that was missing! Without direction calibration,
|
|
2991
|
+
the robot doesn't know if positive servo values move the gripper toward
|
|
2992
|
+
or away from the target.
|
|
2993
|
+
|
|
2994
|
+
Process:
|
|
2995
|
+
1. Place a green ball in front of the robot
|
|
2996
|
+
2. Ensure ArUco markers on arm are visible to webcam
|
|
2997
|
+
3. For each servo, measure distance to ball before/after small movement
|
|
2998
|
+
4. Record whether positive values move toward or away from target
|
|
2999
|
+
|
|
3000
|
+
EXAMPLES:
|
|
3001
|
+
ate robot calibrate direction --port /dev/cu.usbserial-10 --name my_mechdog
|
|
3002
|
+
ate robot calibrate direction --name my_mechdog
|
|
3003
|
+
""")
|
|
3004
|
+
cal_direction.add_argument("--port", help="Serial port")
|
|
3005
|
+
cal_direction.add_argument("--name", default="robot", help="Robot name")
|
|
3006
|
+
|
|
3007
|
+
# calibrate status - Show calibration progress
|
|
3008
|
+
cal_status = calibrate_subparsers.add_parser("status",
|
|
3009
|
+
help="Show calibration status and next steps",
|
|
3010
|
+
description="""Show the calibration status for a robot.
|
|
3011
|
+
|
|
3012
|
+
Displays which calibration stages are complete and what needs to be done next.
|
|
3013
|
+
This enforces the proper calibration workflow.
|
|
3014
|
+
|
|
3015
|
+
EXAMPLES:
|
|
3016
|
+
ate robot calibrate status # List all robots
|
|
3017
|
+
ate robot calibrate status my_mechdog # Show specific robot
|
|
3018
|
+
""")
|
|
3019
|
+
cal_status.add_argument("name", nargs="?", default="robot", help="Robot name")
|
|
3020
|
+
|
|
3021
|
+
# calibrate reset - Reset calibration state
|
|
3022
|
+
cal_reset = calibrate_subparsers.add_parser("reset",
|
|
3023
|
+
help="Reset calibration state for a robot",
|
|
3024
|
+
description="""Reset all calibration progress for a robot.
|
|
3025
|
+
|
|
3026
|
+
Use this if you need to recalibrate from scratch.
|
|
3027
|
+
""")
|
|
3028
|
+
cal_reset.add_argument("name", help="Robot name")
|
|
3029
|
+
|
|
3030
|
+
# robot label - Visual labeling with dual cameras
|
|
3031
|
+
label_parser = robot_subparsers.add_parser("label",
|
|
3032
|
+
help="Visual servo/pose/action labeling with dual cameras",
|
|
3033
|
+
description="""Interactive visual labeling session using webcam + robot camera.
|
|
3034
|
+
|
|
3035
|
+
Creates a "bedrock" of basic skills by:
|
|
3036
|
+
1. Discovering and labeling servos by their physical effect
|
|
3037
|
+
2. Recording named poses with visual confirmation
|
|
3038
|
+
3. Sequencing poses into multi-step actions
|
|
3039
|
+
4. Generating reusable Python skill code
|
|
3040
|
+
|
|
3041
|
+
EXAMPLES:
|
|
3042
|
+
ate robot label --name my_mechdog
|
|
3043
|
+
ate robot label --port /dev/cu.usbserial-10 --camera http://192.168.50.98:80/capture
|
|
3044
|
+
ate robot label --webcam 1 --name mechdog
|
|
3045
|
+
""")
|
|
3046
|
+
label_parser.add_argument("--port", help="Serial port (auto-detects if not specified)")
|
|
3047
|
+
label_parser.add_argument("--name", default="robot", help="Robot name for saving (default: robot)")
|
|
3048
|
+
label_parser.add_argument("--type", dest="robot_type", default="hiwonder_mechdog", help="Robot type")
|
|
3049
|
+
label_parser.add_argument("--webcam", type=int, default=0, dest="webcam_id",
|
|
3050
|
+
help="Webcam device ID (default: 0)")
|
|
3051
|
+
label_parser.add_argument("--camera", dest="camera_url",
|
|
3052
|
+
help="Robot camera URL (e.g., http://192.168.50.98:80/capture)")
|
|
3053
|
+
|
|
3054
|
+
# robot skills - Manage generated skills
|
|
3055
|
+
skills_parser = robot_subparsers.add_parser("skills",
|
|
3056
|
+
help="Manage generated robot skills",
|
|
3057
|
+
description="""Manage skill libraries created from visual labeling.
|
|
3058
|
+
|
|
3059
|
+
View, export, and manage the Python skill code generated from labeled
|
|
3060
|
+
poses and actions.
|
|
3061
|
+
|
|
3062
|
+
EXAMPLES:
|
|
3063
|
+
ate robot skills list
|
|
3064
|
+
ate robot skills show my_mechdog
|
|
3065
|
+
ate robot skills export my_mechdog
|
|
3066
|
+
ate robot skills export my_mechdog pickup
|
|
3067
|
+
""")
|
|
3068
|
+
skills_subparsers = skills_parser.add_subparsers(dest="skills_action", help="Skills action")
|
|
3069
|
+
|
|
3070
|
+
skills_subparsers.add_parser("list", help="List saved skill libraries")
|
|
3071
|
+
|
|
3072
|
+
skills_show = skills_subparsers.add_parser("show", help="Show skill library details")
|
|
3073
|
+
skills_show.add_argument("name", help="Library name")
|
|
3074
|
+
|
|
3075
|
+
skills_export = skills_subparsers.add_parser("export", help="Export skills as Python code")
|
|
3076
|
+
skills_export.add_argument("name", help="Library name")
|
|
3077
|
+
skills_export.add_argument("skill", nargs="?", help="Specific skill to export (exports all if omitted)")
|
|
3078
|
+
|
|
3079
|
+
# robot upload - Upload skill library to FoodforThought
|
|
3080
|
+
upload_parser = robot_subparsers.add_parser("upload",
|
|
3081
|
+
help="Upload skill library to FoodforThought",
|
|
3082
|
+
description="""Upload calibration, poses, and skills to FoodforThought platform.
|
|
3083
|
+
|
|
3084
|
+
Creates artifacts with full data lineage:
|
|
3085
|
+
raw (images) → processed (calibration) → labeled (poses) → skill (code)
|
|
3086
|
+
|
|
3087
|
+
Requires authentication. Run 'ate login' first.
|
|
3088
|
+
|
|
3089
|
+
EXAMPLES:
|
|
3090
|
+
ate robot upload mechdog
|
|
3091
|
+
ate robot upload mechdog --project-id abc123
|
|
3092
|
+
ate robot upload mechdog --no-images
|
|
3093
|
+
""")
|
|
3094
|
+
upload_parser.add_argument("name", help="Robot name (matches calibration/library)")
|
|
3095
|
+
upload_parser.add_argument("--project-id", dest="project_id", help="Existing project ID (creates new if omitted)")
|
|
3096
|
+
upload_parser.add_argument("--no-images", action="store_true", dest="no_images",
|
|
3097
|
+
help="Skip uploading pose images")
|
|
3098
|
+
|
|
3099
|
+
# robot teach - Fast keyboard-driven teaching mode
|
|
3100
|
+
teach_parser = robot_subparsers.add_parser("teach",
|
|
3101
|
+
help="Fast keyboard-driven teaching mode with live preview",
|
|
3102
|
+
description="""Real-time teaching interface for quick skill development.
|
|
3103
|
+
|
|
3104
|
+
Opens a live webcam preview with keyboard controls:
|
|
3105
|
+
↑/↓ Select servo
|
|
3106
|
+
←/→ Adjust position (hold Shift for fine control)
|
|
3107
|
+
S Save current position as a pose
|
|
3108
|
+
A Create action from saved poses
|
|
3109
|
+
P Playback last action
|
|
3110
|
+
C Clear recorded poses
|
|
3111
|
+
R Reset to center positions
|
|
3112
|
+
+/- Adjust step size
|
|
3113
|
+
Q Quit and save
|
|
3114
|
+
|
|
3115
|
+
Much faster iteration than menu-driven labeling.
|
|
3116
|
+
|
|
3117
|
+
EXAMPLES:
|
|
3118
|
+
ate robot teach --port /dev/cu.usbserial-10 --name mechdog
|
|
3119
|
+
ate robot teach --port /dev/cu.usbserial-10 --webcam 1
|
|
3120
|
+
""")
|
|
3121
|
+
teach_parser.add_argument("--port", required=True, help="Serial port")
|
|
3122
|
+
teach_parser.add_argument("--name", default="robot", help="Robot name for saving")
|
|
3123
|
+
teach_parser.add_argument("--type", dest="robot_type", default="hiwonder_mechdog", help="Robot type")
|
|
3124
|
+
teach_parser.add_argument("--webcam", type=int, default=0, dest="webcam_id", help="Webcam device ID")
|
|
3125
|
+
teach_parser.add_argument("--camera", dest="camera_url", help="Robot camera URL (optional)")
|
|
3126
|
+
|
|
3127
|
+
# robot primitives - List and inspect programmatic primitives
|
|
3128
|
+
primitives_parser = robot_subparsers.add_parser("primitives",
|
|
3129
|
+
help="List and inspect skill primitives",
|
|
3130
|
+
description="""View the programmatic skill library.
|
|
3131
|
+
|
|
3132
|
+
Shows all primitives, compound skills, and behaviors with their
|
|
3133
|
+
hardware requirements, servo targets, and step sequences.
|
|
3134
|
+
|
|
3135
|
+
EXAMPLES:
|
|
3136
|
+
ate robot primitives list
|
|
3137
|
+
ate robot primitives show gripper_open
|
|
3138
|
+
ate robot primitives show pickup
|
|
3139
|
+
ate robot primitives show fetch
|
|
3140
|
+
""")
|
|
3141
|
+
primitives_subparsers = primitives_parser.add_subparsers(dest="primitives_action", help="Primitives action")
|
|
3142
|
+
|
|
3143
|
+
primitives_subparsers.add_parser("list", help="List all primitives, compounds, and behaviors")
|
|
3144
|
+
|
|
3145
|
+
primitives_show = primitives_subparsers.add_parser("show", help="Show details of a specific skill")
|
|
3146
|
+
primitives_show.add_argument("name", help="Skill name")
|
|
3147
|
+
|
|
3148
|
+
# robot map-servos - LLM-guided servo mapping
|
|
3149
|
+
map_servos_parser = robot_subparsers.add_parser("map-servos",
|
|
3150
|
+
help="Map all servos and generate primitive skills using LLM",
|
|
3151
|
+
description="""LLM-guided servo mapping and primitive generation.
|
|
3152
|
+
|
|
3153
|
+
Uses an AI agent to systematically probe each servo, analyze its function
|
|
3154
|
+
(via camera or position feedback), and generate named positions and
|
|
3155
|
+
primitive skills.
|
|
3156
|
+
|
|
3157
|
+
This automates the tedious process of discovering what each servo does
|
|
3158
|
+
and creates reusable skill code.
|
|
3159
|
+
|
|
3160
|
+
EXAMPLES:
|
|
3161
|
+
ate robot map-servos --port /dev/cu.usbserial-10
|
|
3162
|
+
ate robot map-servos --port /dev/cu.usbserial-10 --camera
|
|
3163
|
+
ate robot map-servos --port /dev/cu.usbserial-10 --wifi-camera 192.168.4.1
|
|
3164
|
+
ate robot map-servos --num-servos 6 --output ./my_arm
|
|
3165
|
+
""")
|
|
3166
|
+
map_servos_parser.add_argument("--port", help="Serial port (auto-detects if not specified)")
|
|
3167
|
+
map_servos_parser.add_argument("--num-servos", type=int, default=13,
|
|
3168
|
+
help="Number of servos to probe (default: 13)")
|
|
3169
|
+
map_servos_parser.add_argument("--camera", action="store_true", dest="use_camera",
|
|
3170
|
+
help="Use webcam for visual verification")
|
|
3171
|
+
map_servos_parser.add_argument("--camera-index", type=int, default=0,
|
|
3172
|
+
help="Webcam device index (default: 0)")
|
|
3173
|
+
map_servos_parser.add_argument("--wifi-camera", dest="wifi_camera_ip",
|
|
3174
|
+
help="Use robot's WiFi camera at IP (e.g., 192.168.4.1)")
|
|
3175
|
+
map_servos_parser.add_argument("--output", "-o", help="Output file prefix (default: ./servo_mappings_<port>)")
|
|
3176
|
+
map_servos_parser.add_argument("--save-images", action="store_true",
|
|
3177
|
+
help="Save camera images during probing")
|
|
3178
|
+
map_servos_parser.add_argument("--upload", action="store_true",
|
|
3179
|
+
help="Upload servo mappings to FoodforThought after mapping")
|
|
3180
|
+
map_servos_parser.add_argument("--project-id", dest="project_id",
|
|
3181
|
+
help="Project ID for upload (creates new if not specified)")
|
|
3182
|
+
|
|
3183
|
+
# robot generate-markers - Generate printable ArUco markers
|
|
3184
|
+
markers_parser = robot_subparsers.add_parser("generate-markers",
|
|
3185
|
+
help="Generate printable ArUco markers for calibration",
|
|
3186
|
+
description="""Generate a PDF with printable ArUco markers for robot calibration.
|
|
3187
|
+
|
|
3188
|
+
These markers enable precise visual system identification:
|
|
3189
|
+
- Attach markers to each joint/segment of your robot
|
|
3190
|
+
- Run calibration to discover kinematics automatically
|
|
3191
|
+
- Share your calibration with the community!
|
|
3192
|
+
|
|
3193
|
+
The PDF includes:
|
|
3194
|
+
- Markers with cutting guides
|
|
3195
|
+
- Joint labels and IDs
|
|
3196
|
+
- Setup instructions
|
|
3197
|
+
|
|
3198
|
+
EXAMPLES:
|
|
3199
|
+
ate robot generate-markers
|
|
3200
|
+
ate robot generate-markers --count 6 --size 40
|
|
3201
|
+
ate robot generate-markers --robot-name "My MechDog" -o my_markers.pdf
|
|
3202
|
+
""")
|
|
3203
|
+
markers_parser.add_argument("--output", "-o", default="aruco_markers.pdf",
|
|
3204
|
+
help="Output PDF path (default: aruco_markers.pdf)")
|
|
3205
|
+
markers_parser.add_argument("--count", "-n", type=int, default=12,
|
|
3206
|
+
help="Number of markers to generate (default: 12)")
|
|
3207
|
+
markers_parser.add_argument("--size", type=float, default=30.0,
|
|
3208
|
+
help="Marker size in millimeters (default: 30)")
|
|
3209
|
+
markers_parser.add_argument("--robot-name",
|
|
3210
|
+
help="Optional robot name for labeling")
|
|
3211
|
+
markers_parser.add_argument("--page-size", choices=["letter", "a4"], default="letter",
|
|
3212
|
+
help="Page size (default: letter)")
|
|
3213
|
+
markers_parser.add_argument("--preset",
|
|
3214
|
+
help="Use robot-specific preset (e.g., mechdog-mini, unitree-go1, xarm-6)")
|
|
3215
|
+
markers_parser.add_argument("--list-presets", action="store_true",
|
|
3216
|
+
help="List available robot presets")
|
|
3217
|
+
|
|
3218
|
+
# robot identify - Search community calibration registry
|
|
3219
|
+
identify_parser = robot_subparsers.add_parser("identify",
|
|
3220
|
+
help="Search community calibration registry for your robot",
|
|
3221
|
+
description="""Search the FoodforThought community calibration registry.
|
|
3222
|
+
|
|
3223
|
+
Find calibrations shared by other users for your robot model.
|
|
3224
|
+
When a community member calibrates a robot, everyone benefits!
|
|
3225
|
+
|
|
3226
|
+
EXAMPLES:
|
|
3227
|
+
ate robot identify mechdog-mini
|
|
3228
|
+
ate robot identify --search "MechDog"
|
|
3229
|
+
ate robot identify --port /dev/cu.usbserial-10 # Auto-detect robot type
|
|
3230
|
+
""")
|
|
3231
|
+
identify_parser.add_argument("robot_slug", nargs="?",
|
|
3232
|
+
help="Robot model slug (e.g., mechdog-mini)")
|
|
3233
|
+
identify_parser.add_argument("--search", dest="robot_name",
|
|
3234
|
+
help="Search by robot name")
|
|
3235
|
+
identify_parser.add_argument("--port",
|
|
3236
|
+
help="Serial port (for auto-detection)")
|
|
3237
|
+
|
|
3238
|
+
# robot upload-calibration - Upload calibration to community registry
|
|
3239
|
+
upload_cal_parser = robot_subparsers.add_parser("upload-calibration",
|
|
3240
|
+
help="Upload a calibration to the community registry",
|
|
3241
|
+
description="""Upload your calibration to share with the community.
|
|
3242
|
+
|
|
3243
|
+
Your calibration helps everyone with the same robot model!
|
|
3244
|
+
First user to calibrate a robot benefits the entire community.
|
|
3245
|
+
|
|
3246
|
+
EXAMPLES:
|
|
3247
|
+
ate robot upload-calibration ./servo_mappings.json --robot-slug mechdog-mini
|
|
3248
|
+
ate robot upload-calibration ./calibration.json --publish
|
|
3249
|
+
""")
|
|
3250
|
+
upload_cal_parser.add_argument("calibration_file",
|
|
3251
|
+
help="Path to calibration JSON file")
|
|
3252
|
+
upload_cal_parser.add_argument("--robot-slug",
|
|
3253
|
+
help="Robot model slug")
|
|
3254
|
+
upload_cal_parser.add_argument("--robot-name",
|
|
3255
|
+
help="Robot name (for search)")
|
|
3256
|
+
upload_cal_parser.add_argument("--method",
|
|
3257
|
+
choices=["aruco_marker", "llm_vision", "manual", "urdf_import", "twitch_test", "direction_calibration"],
|
|
3258
|
+
default="llm_vision",
|
|
3259
|
+
help="Calibration method (default: llm_vision)")
|
|
3260
|
+
upload_cal_parser.add_argument("--publish", action="store_true",
|
|
3261
|
+
help="Publish immediately (default: save as draft)")
|
|
3262
|
+
|
|
3263
|
+
# robot behavior - Run high-level behaviors with perception
|
|
3264
|
+
behavior_parser = robot_subparsers.add_parser("behavior",
|
|
3265
|
+
help="Execute high-level behaviors with perception",
|
|
3266
|
+
description="""Run complex behaviors that combine primitives with perception.
|
|
3267
|
+
|
|
3268
|
+
Behaviors include closed-loop control using camera feedback:
|
|
3269
|
+
- Detect targets
|
|
3270
|
+
- Align to center them in view
|
|
3271
|
+
- Approach until close enough
|
|
3272
|
+
- Execute manipulation sequences
|
|
3273
|
+
- Verify success
|
|
3274
|
+
|
|
3275
|
+
EXAMPLES:
|
|
3276
|
+
ate robot behavior pickup_green_ball --simulate
|
|
3277
|
+
ate robot behavior pickup_green_ball --profile my_mechdog
|
|
3278
|
+
ate robot behavior pickup_green_ball --camera-ip 192.168.4.1
|
|
3279
|
+
""")
|
|
3280
|
+
behavior_parser.add_argument("behavior", nargs="?", default="pickup_green_ball",
|
|
3281
|
+
help="Behavior to execute (default: pickup_green_ball)")
|
|
3282
|
+
behavior_parser.add_argument("-p", "--profile", dest="profile_name",
|
|
3283
|
+
help="Robot profile for real execution")
|
|
3284
|
+
behavior_parser.add_argument("--camera-ip", dest="camera_ip",
|
|
3285
|
+
help="WiFi camera IP address")
|
|
3286
|
+
behavior_parser.add_argument("--simulate", action="store_true",
|
|
3287
|
+
help="Run in simulation mode (no real robot)")
|
|
3288
|
+
|
|
3289
|
+
# robot urdf - URDF generation and Artifex integration
|
|
3290
|
+
urdf_parser = robot_subparsers.add_parser("urdf",
|
|
3291
|
+
help="Generate URDF from ArUco markers and open in Artifex",
|
|
3292
|
+
description="""Generate URDF from visual ArUco marker detection.
|
|
3293
|
+
|
|
3294
|
+
This command captures ArUco markers attached to robot links via webcam,
|
|
3295
|
+
computes their 3D positions, and generates a calibrated URDF file.
|
|
3296
|
+
The URDF can then be opened directly in Artifex Desktop for visualization.
|
|
3297
|
+
|
|
3298
|
+
WORKFLOW:
|
|
3299
|
+
1. Attach ArUco markers (4x4_50 dictionary, 25mm) to robot links
|
|
3300
|
+
2. Run 'ate robot urdf generate' to capture and create URDF
|
|
3301
|
+
3. Run 'ate robot urdf open' to view in Artifex Desktop
|
|
3302
|
+
|
|
3303
|
+
EXAMPLES:
|
|
3304
|
+
ate robot urdf generate # Capture markers and generate URDF
|
|
3305
|
+
ate robot urdf generate --reset # Reset robot pose first (BLE)
|
|
3306
|
+
ate robot urdf generate --frames 50 # More frames for accuracy
|
|
3307
|
+
ate robot urdf open # Open in Artifex Desktop
|
|
3308
|
+
ate robot urdf open --bridge # Sync via ATE bridge server
|
|
3309
|
+
ate robot urdf info # Show URDF details
|
|
3310
|
+
""")
|
|
3311
|
+
urdf_subparsers = urdf_parser.add_subparsers(dest="urdf_action", help="URDF action")
|
|
3312
|
+
|
|
3313
|
+
# urdf generate
|
|
3314
|
+
urdf_generate = urdf_subparsers.add_parser("generate",
|
|
3315
|
+
help="Generate URDF from ArUco marker detection")
|
|
3316
|
+
urdf_generate.add_argument("--reset", action="store_true",
|
|
3317
|
+
help="Reset robot pose via BLE before capture")
|
|
3318
|
+
urdf_generate.add_argument("--frames", type=int, default=20,
|
|
3319
|
+
help="Number of frames to capture (default: 20)")
|
|
3320
|
+
urdf_generate.add_argument("--camera", type=int, default=0,
|
|
3321
|
+
help="Camera index (default: 0)")
|
|
3322
|
+
urdf_generate.add_argument("--marker-size", dest="marker_size", type=float, default=0.025,
|
|
3323
|
+
help="Marker size in meters (default: 0.025)")
|
|
3324
|
+
urdf_generate.add_argument("--output", "-o",
|
|
3325
|
+
help="Output URDF path (default: urdf_generation/mechdog_calibrated.urdf)")
|
|
3326
|
+
urdf_generate.add_argument("--ble-address", dest="ble_address",
|
|
3327
|
+
help="BLE address for robot reset (default: auto-detect)")
|
|
3328
|
+
urdf_generate.add_argument("--robot-name", dest="robot_name", default="mechdog",
|
|
3329
|
+
help="Robot name in URDF (default: mechdog)")
|
|
3330
|
+
|
|
3331
|
+
# urdf open
|
|
3332
|
+
urdf_open = urdf_subparsers.add_parser("open",
|
|
3333
|
+
help="Open URDF in Artifex Desktop")
|
|
3334
|
+
urdf_open.add_argument("urdf_file", nargs="?",
|
|
3335
|
+
help="URDF file to open (default: last generated)")
|
|
3336
|
+
urdf_open.add_argument("--bridge", action="store_true",
|
|
3337
|
+
help="Sync via ATE bridge server instead of direct launch")
|
|
3338
|
+
urdf_open.add_argument("--launch", action="store_true",
|
|
3339
|
+
help="Force launch Artifex even if bridge fails")
|
|
3340
|
+
|
|
3341
|
+
# urdf info
|
|
3342
|
+
urdf_info = urdf_subparsers.add_parser("info",
|
|
3343
|
+
help="Show information about a URDF file")
|
|
3344
|
+
urdf_info.add_argument("urdf_file", nargs="?",
|
|
3345
|
+
help="URDF file to inspect (default: last generated)")
|
|
3346
|
+
|
|
3347
|
+
return robot_parser
|
|
3348
|
+
|
|
3349
|
+
|
|
3350
|
+
def handle_robot_command(args):
|
|
3351
|
+
"""
|
|
3352
|
+
Handle robot command dispatch.
|
|
3353
|
+
|
|
3354
|
+
Call this from the main CLI command handler.
|
|
3355
|
+
"""
|
|
3356
|
+
if args.robot_action == "discover":
|
|
3357
|
+
robot_discover_command(
|
|
3358
|
+
subnet=args.subnet,
|
|
3359
|
+
timeout=args.timeout,
|
|
3360
|
+
json_output=args.json
|
|
3361
|
+
)
|
|
3362
|
+
|
|
3363
|
+
elif args.robot_action == "ble":
|
|
3364
|
+
action = getattr(args, "ble_action", None) or "info"
|
|
3365
|
+
robot_ble_command(
|
|
3366
|
+
action=action,
|
|
3367
|
+
timeout=getattr(args, "timeout", 10.0),
|
|
3368
|
+
address=getattr(args, "address", None),
|
|
3369
|
+
name_filter=getattr(args, "name_filter", None),
|
|
3370
|
+
profile_name=getattr(args, "profile_name", None),
|
|
3371
|
+
json_output=getattr(args, "json_output", False),
|
|
3372
|
+
# New capture/analyze arguments
|
|
3373
|
+
platform=getattr(args, "platform", "auto"),
|
|
3374
|
+
output=getattr(args, "output", "capture.pklg"),
|
|
3375
|
+
capture_file=getattr(args, "capture_file", None),
|
|
3376
|
+
generate_code=getattr(args, "generate", False),
|
|
3377
|
+
)
|
|
3378
|
+
|
|
3379
|
+
elif args.robot_action == "list":
|
|
3380
|
+
robot_list_command(archetype=args.archetype)
|
|
3381
|
+
|
|
3382
|
+
elif args.robot_action == "info":
|
|
3383
|
+
robot_info_command(
|
|
3384
|
+
robot_type=args.robot_type,
|
|
3385
|
+
profile_name=args.profile
|
|
3386
|
+
)
|
|
3387
|
+
|
|
3388
|
+
elif args.robot_action == "setup":
|
|
3389
|
+
robot_setup_command(
|
|
3390
|
+
port=args.port,
|
|
3391
|
+
camera_ip=args.camera_ip,
|
|
3392
|
+
robot_type=args.robot_type,
|
|
3393
|
+
name=args.name,
|
|
3394
|
+
non_interactive=args.non_interactive
|
|
3395
|
+
)
|
|
3396
|
+
|
|
3397
|
+
elif args.robot_action == "test":
|
|
3398
|
+
robot_test_command(
|
|
3399
|
+
profile_name=args.profile,
|
|
3400
|
+
capability=args.capability,
|
|
3401
|
+
verbose=args.verbose
|
|
3402
|
+
)
|
|
3403
|
+
|
|
3404
|
+
elif args.robot_action == "approach":
|
|
3405
|
+
robot_approach_command(
|
|
3406
|
+
profile_name=args.profile,
|
|
3407
|
+
duration=args.duration,
|
|
3408
|
+
target_distance=args.target_distance,
|
|
3409
|
+
speed=args.speed,
|
|
3410
|
+
detect_colors=args.detect_colors,
|
|
3411
|
+
dry_run=args.dry_run
|
|
3412
|
+
)
|
|
3413
|
+
|
|
3414
|
+
elif args.robot_action == "profiles":
|
|
3415
|
+
if args.profiles_action == "list":
|
|
3416
|
+
robot_profiles_command("list")
|
|
3417
|
+
elif args.profiles_action == "show":
|
|
3418
|
+
robot_profiles_command("show", name=args.name)
|
|
3419
|
+
elif args.profiles_action == "delete":
|
|
3420
|
+
robot_profiles_command("delete", name=args.name)
|
|
3421
|
+
elif args.profiles_action == "create":
|
|
3422
|
+
robot_profiles_command("create", name=args.name, template=getattr(args, "template", None))
|
|
3423
|
+
elif args.profiles_action == "templates":
|
|
3424
|
+
robot_profiles_command("templates")
|
|
3425
|
+
else:
|
|
3426
|
+
print("Usage: ate robot profiles <list|show|delete|create|templates>")
|
|
3427
|
+
|
|
3428
|
+
elif args.robot_action == "calibrate":
|
|
3429
|
+
action = getattr(args, "calibrate_action", None)
|
|
3430
|
+
if action == "start":
|
|
3431
|
+
robot_calibrate_command(
|
|
3432
|
+
action="start",
|
|
3433
|
+
port=args.port,
|
|
3434
|
+
camera_url=args.camera_url,
|
|
3435
|
+
name=args.name,
|
|
3436
|
+
robot_type=args.robot_type,
|
|
3437
|
+
)
|
|
3438
|
+
elif action == "gripper":
|
|
3439
|
+
robot_calibrate_command(
|
|
3440
|
+
action="gripper",
|
|
3441
|
+
port=args.port,
|
|
3442
|
+
servo_id=args.servo,
|
|
3443
|
+
)
|
|
3444
|
+
elif action == "list":
|
|
3445
|
+
robot_calibrate_command(action="list")
|
|
3446
|
+
elif action == "poses":
|
|
3447
|
+
robot_calibrate_command(action="poses", name=args.name)
|
|
3448
|
+
elif action == "record":
|
|
3449
|
+
robot_calibrate_command(
|
|
3450
|
+
action="record",
|
|
3451
|
+
name=args.name,
|
|
3452
|
+
pose_name=args.pose,
|
|
3453
|
+
)
|
|
3454
|
+
elif action == "apply":
|
|
3455
|
+
robot_calibrate_command(
|
|
3456
|
+
action="apply",
|
|
3457
|
+
name=args.name,
|
|
3458
|
+
pose_name=args.pose,
|
|
3459
|
+
)
|
|
3460
|
+
elif action == "direction":
|
|
3461
|
+
robot_calibrate_command(
|
|
3462
|
+
action="direction",
|
|
3463
|
+
port=getattr(args, "port", None),
|
|
3464
|
+
name=getattr(args, "name", "robot"),
|
|
3465
|
+
)
|
|
3466
|
+
elif action == "status":
|
|
3467
|
+
robot_calibrate_command(
|
|
3468
|
+
action="status",
|
|
3469
|
+
name=getattr(args, "name", "robot"),
|
|
3470
|
+
)
|
|
3471
|
+
elif action == "reset":
|
|
3472
|
+
robot_calibrate_command(
|
|
3473
|
+
action="reset",
|
|
3474
|
+
name=args.name,
|
|
3475
|
+
)
|
|
3476
|
+
else:
|
|
3477
|
+
print("Usage: ate robot calibrate <start|gripper|direction|status|list|poses|record|apply|reset>")
|
|
3478
|
+
|
|
3479
|
+
elif args.robot_action == "label":
|
|
3480
|
+
robot_label_command(
|
|
3481
|
+
port=args.port,
|
|
3482
|
+
name=args.name,
|
|
3483
|
+
robot_type=args.robot_type,
|
|
3484
|
+
webcam_id=args.webcam_id,
|
|
3485
|
+
camera_url=args.camera_url,
|
|
3486
|
+
)
|
|
3487
|
+
|
|
3488
|
+
elif args.robot_action == "skills":
|
|
3489
|
+
action = getattr(args, "skills_action", None)
|
|
3490
|
+
if action == "list":
|
|
3491
|
+
robot_skills_command("list")
|
|
3492
|
+
elif action == "show":
|
|
3493
|
+
robot_skills_command("show", name=args.name)
|
|
3494
|
+
elif action == "export":
|
|
3495
|
+
robot_skills_command("export", name=args.name, skill_name=getattr(args, "skill", None))
|
|
3496
|
+
else:
|
|
3497
|
+
print("Usage: ate robot skills <list|show|export>")
|
|
3498
|
+
|
|
3499
|
+
elif args.robot_action == "upload":
|
|
3500
|
+
robot_upload_command(
|
|
3501
|
+
name=args.name,
|
|
3502
|
+
project_id=getattr(args, "project_id", None),
|
|
3503
|
+
include_images=not getattr(args, "no_images", False),
|
|
3504
|
+
)
|
|
3505
|
+
|
|
3506
|
+
elif args.robot_action == "teach":
|
|
3507
|
+
teach_command(
|
|
3508
|
+
port=args.port,
|
|
3509
|
+
name=args.name,
|
|
3510
|
+
robot_type=args.robot_type,
|
|
3511
|
+
webcam_id=args.webcam_id,
|
|
3512
|
+
camera_url=getattr(args, "camera_url", None),
|
|
3513
|
+
)
|
|
3514
|
+
|
|
3515
|
+
elif args.robot_action == "behavior":
|
|
3516
|
+
robot_behavior_command(
|
|
3517
|
+
profile_name=getattr(args, "profile_name", None),
|
|
3518
|
+
behavior=args.behavior,
|
|
3519
|
+
camera_ip=getattr(args, "camera_ip", None),
|
|
3520
|
+
simulate=getattr(args, "simulate", False),
|
|
3521
|
+
)
|
|
3522
|
+
|
|
3523
|
+
elif args.robot_action == "primitives":
|
|
3524
|
+
action = getattr(args, "primitives_action", None) or "list"
|
|
3525
|
+
name = getattr(args, "name", None)
|
|
3526
|
+
robot_primitives_command(action=action, name=name)
|
|
3527
|
+
|
|
3528
|
+
elif args.robot_action == "map-servos":
|
|
3529
|
+
robot_map_servos_command(
|
|
3530
|
+
port=getattr(args, "port", None),
|
|
3531
|
+
num_servos=getattr(args, "num_servos", 13),
|
|
3532
|
+
use_camera=getattr(args, "use_camera", False),
|
|
3533
|
+
camera_index=getattr(args, "camera_index", 0),
|
|
3534
|
+
wifi_camera_ip=getattr(args, "wifi_camera_ip", None),
|
|
3535
|
+
output=getattr(args, "output", None),
|
|
3536
|
+
save_images=getattr(args, "save_images", False),
|
|
3537
|
+
upload=getattr(args, "upload", False),
|
|
3538
|
+
project_id=getattr(args, "project_id", None),
|
|
3539
|
+
)
|
|
3540
|
+
|
|
3541
|
+
elif args.robot_action == "generate-markers":
|
|
3542
|
+
robot_generate_markers_command(
|
|
3543
|
+
output=getattr(args, "output", "aruco_markers.pdf"),
|
|
3544
|
+
count=getattr(args, "count", 12),
|
|
3545
|
+
size=getattr(args, "size", 30.0),
|
|
3546
|
+
robot_name=getattr(args, "robot_name", None),
|
|
3547
|
+
page_size=getattr(args, "page_size", "letter"),
|
|
3548
|
+
preset=getattr(args, "preset", None),
|
|
3549
|
+
list_presets=getattr(args, "list_presets", False),
|
|
3550
|
+
)
|
|
3551
|
+
|
|
3552
|
+
elif args.robot_action == "identify":
|
|
3553
|
+
robot_identify_command(
|
|
3554
|
+
robot_slug=getattr(args, "robot_slug", None),
|
|
3555
|
+
robot_name=getattr(args, "robot_name", None),
|
|
3556
|
+
port=getattr(args, "port", None),
|
|
3557
|
+
)
|
|
3558
|
+
|
|
3559
|
+
elif args.robot_action == "upload-calibration":
|
|
3560
|
+
robot_upload_calibration_command(
|
|
3561
|
+
calibration_file=args.calibration_file,
|
|
3562
|
+
robot_slug=getattr(args, "robot_slug", None),
|
|
3563
|
+
robot_name=getattr(args, "robot_name", None),
|
|
3564
|
+
method=getattr(args, "method", "llm_vision"),
|
|
3565
|
+
publish=getattr(args, "publish", False),
|
|
3566
|
+
)
|
|
3567
|
+
|
|
3568
|
+
elif args.robot_action == "urdf":
|
|
3569
|
+
action = getattr(args, "urdf_action", None)
|
|
3570
|
+
if action == "generate":
|
|
3571
|
+
robot_urdf_command(
|
|
3572
|
+
action="generate",
|
|
3573
|
+
reset=getattr(args, "reset", False),
|
|
3574
|
+
frames=getattr(args, "frames", 20),
|
|
3575
|
+
camera=getattr(args, "camera", 0),
|
|
3576
|
+
marker_size=getattr(args, "marker_size", 0.025),
|
|
3577
|
+
output=getattr(args, "output", None),
|
|
3578
|
+
ble_address=getattr(args, "ble_address", None),
|
|
3579
|
+
robot_name=getattr(args, "robot_name", "mechdog"),
|
|
3580
|
+
)
|
|
3581
|
+
elif action == "open":
|
|
3582
|
+
robot_urdf_command(
|
|
3583
|
+
action="open",
|
|
3584
|
+
urdf_file=getattr(args, "urdf_file", None),
|
|
3585
|
+
bridge=getattr(args, "bridge", False),
|
|
3586
|
+
launch=getattr(args, "launch", False),
|
|
3587
|
+
)
|
|
3588
|
+
elif action == "info":
|
|
3589
|
+
robot_urdf_command(
|
|
3590
|
+
action="info",
|
|
3591
|
+
urdf_file=getattr(args, "urdf_file", None),
|
|
3592
|
+
)
|
|
3593
|
+
else:
|
|
3594
|
+
print("Usage: ate robot urdf <generate|open|info>")
|
|
3595
|
+
print("\nExamples:")
|
|
3596
|
+
print(" ate robot urdf generate # Generate URDF from markers")
|
|
3597
|
+
print(" ate robot urdf generate --reset # Reset robot pose first")
|
|
3598
|
+
print(" ate robot urdf open # Open in Artifex Desktop")
|
|
3599
|
+
print(" ate robot urdf info # Show URDF details")
|
|
3600
|
+
|
|
3601
|
+
else:
|
|
3602
|
+
print("Usage: ate robot <discover|ble|list|info|setup|test|approach|profiles|calibrate|label|skills|upload|teach|behavior|primitives|map-servos|generate-markers|identify|upload-calibration|urdf>")
|
|
3603
|
+
print("\nRun 'ate robot --help' for more information.")
|