foodforthought-cli 0.2.7__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. ate/__init__.py +6 -0
  2. ate/__main__.py +16 -0
  3. ate/auth/__init__.py +1 -0
  4. ate/auth/device_flow.py +141 -0
  5. ate/auth/token_store.py +96 -0
  6. ate/behaviors/__init__.py +100 -0
  7. ate/behaviors/approach.py +399 -0
  8. ate/behaviors/common.py +686 -0
  9. ate/behaviors/tree.py +454 -0
  10. ate/cli.py +855 -3995
  11. ate/client.py +90 -0
  12. ate/commands/__init__.py +168 -0
  13. ate/commands/auth.py +389 -0
  14. ate/commands/bridge.py +448 -0
  15. ate/commands/data.py +185 -0
  16. ate/commands/deps.py +111 -0
  17. ate/commands/generate.py +384 -0
  18. ate/commands/memory.py +907 -0
  19. ate/commands/parts.py +166 -0
  20. ate/commands/primitive.py +399 -0
  21. ate/commands/protocol.py +288 -0
  22. ate/commands/recording.py +524 -0
  23. ate/commands/repo.py +154 -0
  24. ate/commands/simulation.py +291 -0
  25. ate/commands/skill.py +303 -0
  26. ate/commands/skills.py +487 -0
  27. ate/commands/team.py +147 -0
  28. ate/commands/workflow.py +271 -0
  29. ate/detection/__init__.py +38 -0
  30. ate/detection/base.py +142 -0
  31. ate/detection/color_detector.py +399 -0
  32. ate/detection/trash_detector.py +322 -0
  33. ate/drivers/__init__.py +39 -0
  34. ate/drivers/ble_transport.py +405 -0
  35. ate/drivers/mechdog.py +942 -0
  36. ate/drivers/wifi_camera.py +477 -0
  37. ate/interfaces/__init__.py +187 -0
  38. ate/interfaces/base.py +273 -0
  39. ate/interfaces/body.py +267 -0
  40. ate/interfaces/detection.py +282 -0
  41. ate/interfaces/locomotion.py +422 -0
  42. ate/interfaces/manipulation.py +408 -0
  43. ate/interfaces/navigation.py +389 -0
  44. ate/interfaces/perception.py +362 -0
  45. ate/interfaces/sensors.py +247 -0
  46. ate/interfaces/types.py +371 -0
  47. ate/llm_proxy.py +239 -0
  48. ate/mcp_server.py +387 -0
  49. ate/memory/__init__.py +35 -0
  50. ate/memory/cloud.py +244 -0
  51. ate/memory/context.py +269 -0
  52. ate/memory/embeddings.py +184 -0
  53. ate/memory/export.py +26 -0
  54. ate/memory/merge.py +146 -0
  55. ate/memory/migrate/__init__.py +34 -0
  56. ate/memory/migrate/base.py +89 -0
  57. ate/memory/migrate/pipeline.py +189 -0
  58. ate/memory/migrate/sources/__init__.py +13 -0
  59. ate/memory/migrate/sources/chroma.py +170 -0
  60. ate/memory/migrate/sources/pinecone.py +120 -0
  61. ate/memory/migrate/sources/qdrant.py +110 -0
  62. ate/memory/migrate/sources/weaviate.py +160 -0
  63. ate/memory/reranker.py +353 -0
  64. ate/memory/search.py +26 -0
  65. ate/memory/store.py +548 -0
  66. ate/recording/__init__.py +83 -0
  67. ate/recording/demonstration.py +378 -0
  68. ate/recording/session.py +415 -0
  69. ate/recording/upload.py +304 -0
  70. ate/recording/visual.py +416 -0
  71. ate/recording/wrapper.py +95 -0
  72. ate/robot/__init__.py +221 -0
  73. ate/robot/agentic_servo.py +856 -0
  74. ate/robot/behaviors.py +493 -0
  75. ate/robot/ble_capture.py +1000 -0
  76. ate/robot/ble_enumerate.py +506 -0
  77. ate/robot/calibration.py +668 -0
  78. ate/robot/calibration_state.py +388 -0
  79. ate/robot/commands.py +3735 -0
  80. ate/robot/direction_calibration.py +554 -0
  81. ate/robot/discovery.py +441 -0
  82. ate/robot/introspection.py +330 -0
  83. ate/robot/llm_system_id.py +654 -0
  84. ate/robot/locomotion_calibration.py +508 -0
  85. ate/robot/manager.py +270 -0
  86. ate/robot/marker_generator.py +611 -0
  87. ate/robot/perception.py +502 -0
  88. ate/robot/primitives.py +614 -0
  89. ate/robot/profiles.py +281 -0
  90. ate/robot/registry.py +322 -0
  91. ate/robot/servo_mapper.py +1153 -0
  92. ate/robot/skill_upload.py +675 -0
  93. ate/robot/target_calibration.py +500 -0
  94. ate/robot/teach.py +515 -0
  95. ate/robot/types.py +242 -0
  96. ate/robot/visual_labeler.py +1048 -0
  97. ate/robot/visual_servo_loop.py +494 -0
  98. ate/robot/visual_servoing.py +570 -0
  99. ate/robot/visual_system_id.py +906 -0
  100. ate/transports/__init__.py +121 -0
  101. ate/transports/base.py +394 -0
  102. ate/transports/ble.py +405 -0
  103. ate/transports/hybrid.py +444 -0
  104. ate/transports/serial.py +345 -0
  105. ate/urdf/__init__.py +30 -0
  106. ate/urdf/capture.py +582 -0
  107. ate/urdf/cloud.py +491 -0
  108. ate/urdf/collision.py +271 -0
  109. ate/urdf/commands.py +708 -0
  110. ate/urdf/depth.py +360 -0
  111. ate/urdf/inertial.py +312 -0
  112. ate/urdf/kinematics.py +330 -0
  113. ate/urdf/lifting.py +415 -0
  114. ate/urdf/meshing.py +300 -0
  115. ate/urdf/models/__init__.py +110 -0
  116. ate/urdf/models/depth_anything.py +253 -0
  117. ate/urdf/models/sam2.py +324 -0
  118. ate/urdf/motion_analysis.py +396 -0
  119. ate/urdf/pipeline.py +468 -0
  120. ate/urdf/scale.py +256 -0
  121. ate/urdf/scan_session.py +411 -0
  122. ate/urdf/segmentation.py +299 -0
  123. ate/urdf/synthesis.py +319 -0
  124. ate/urdf/topology.py +336 -0
  125. ate/urdf/validation.py +371 -0
  126. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/METADATA +9 -1
  127. foodforthought_cli-0.3.0.dist-info/RECORD +166 -0
  128. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/WHEEL +1 -1
  129. foodforthought_cli-0.2.7.dist-info/RECORD +0 -44
  130. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/entry_points.txt +0 -0
  131. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/top_level.txt +0 -0
ate/robot/commands.py ADDED
@@ -0,0 +1,3735 @@
1
+ """
2
+ Robot management CLI commands.
3
+
4
+ Provides:
5
+ - ate robot discover - Find robots on network/USB
6
+ - ate robot list - List known robot types
7
+ - ate robot info - Show robot capabilities
8
+ - ate robot setup - Interactive setup wizard
9
+ - ate robot test - Test robot capabilities
10
+ - ate robot profiles - Manage saved profiles
11
+ """
12
+
13
+ import os
14
+ import sys
15
+ import json
16
+ from typing import Optional
17
+ from pathlib import Path
18
+
19
+ from .discovery import (
20
+ discover_robots,
21
+ discover_serial_robots,
22
+ discover_network_cameras,
23
+ discover_ble_robots,
24
+ quick_scan,
25
+ )
26
+ from .profiles import (
27
+ RobotProfile,
28
+ load_profile,
29
+ save_profile,
30
+ list_profiles,
31
+ delete_profile,
32
+ get_template,
33
+ list_templates,
34
+ create_profile_from_discovery,
35
+ )
36
+ from .registry import (
37
+ RobotRegistry,
38
+ KNOWN_ROBOTS,
39
+ )
40
+ from .introspection import (
41
+ get_capabilities,
42
+ get_methods,
43
+ test_capability,
44
+ format_capabilities,
45
+ format_methods,
46
+ )
47
+ from .manager import RobotManager
48
+ from .calibration import (
49
+ VisualCalibrator,
50
+ RobotCalibration,
51
+ load_calibration,
52
+ save_calibration,
53
+ list_calibrations,
54
+ quick_gripper_calibration,
55
+ )
56
+ from .visual_labeler import (
57
+ DualCameraLabeler,
58
+ SkillLibrary,
59
+ load_skill_library,
60
+ list_skill_libraries,
61
+ visual_label_command,
62
+ )
63
+ from .skill_upload import (
64
+ SkillLibraryUploader,
65
+ upload_skill_library,
66
+ )
67
+ from .teach import teach_command
68
+ from .perception import LivePerceptionExecutor, PerceptionSystem
69
+
70
+
71
+ def robot_primitives_command(action: str = "list", name: Optional[str] = None):
72
+ """
73
+ List and inspect programmatic primitives.
74
+
75
+ ate robot primitives list
76
+ ate robot primitives show gripper_open
77
+ ate robot primitives test pickup --port /dev/cu.usbserial-10
78
+ """
79
+ from .primitives import PrimitiveLibrary, SkillType
80
+
81
+ lib = PrimitiveLibrary(robot_interface=None)
82
+
83
+ if action == "list":
84
+ print("=" * 60)
85
+ print("SKILL PRIMITIVES")
86
+ print("=" * 60)
87
+
88
+ print(f"\nPrimitives ({len(lib.primitives)}):")
89
+ print("-" * 40)
90
+ for name, prim in lib.primitives.items():
91
+ hw = ", ".join(r.value for r in prim.hardware) or "none"
92
+ servos = len(prim.servo_targets)
93
+ print(f" {name:<20} [{hw}] ({servos} servos, {prim.duration_ms}ms)")
94
+
95
+ print(f"\nCompound Skills ({len(lib.compounds)}):")
96
+ print("-" * 40)
97
+ for name, comp in lib.compounds.items():
98
+ hw = ", ".join(r.value for r in comp.hardware) or "none"
99
+ steps = len(comp.steps)
100
+ print(f" {name:<20} [{hw}] ({steps} steps)")
101
+ print(f" → {' → '.join(comp.steps)}")
102
+
103
+ print(f"\nBehaviors ({len(lib.behaviors)}):")
104
+ print("-" * 40)
105
+ for name, beh in lib.behaviors.items():
106
+ hw = ", ".join(r.value for r in beh.hardware) or "none"
107
+ print(f" {name:<20} [{hw}]")
108
+ print(f" {beh.description}")
109
+
110
+ total = len(lib.primitives) + len(lib.compounds) + len(lib.behaviors)
111
+ print(f"\nTotal: {total} skills")
112
+
113
+ elif action == "show":
114
+ if not name:
115
+ print("Usage: ate robot primitives show <name>")
116
+ sys.exit(1)
117
+
118
+ # Find in primitives
119
+ if name in lib.primitives:
120
+ prim = lib.primitives[name]
121
+ print(f"\nPrimitive: {name}")
122
+ print("=" * 40)
123
+ print(f"Description: {prim.description}")
124
+ print(f"Duration: {prim.duration_ms}ms")
125
+ print(f"Hardware: {[r.value for r in prim.hardware]}")
126
+ print(f"\nServo Targets:")
127
+ for sid, val in prim.servo_targets.items():
128
+ print(f" Servo {sid}: {val}")
129
+
130
+ elif name in lib.compounds:
131
+ comp = lib.compounds[name]
132
+ print(f"\nCompound Skill: {name}")
133
+ print("=" * 40)
134
+ print(f"Description: {comp.description}")
135
+ print(f"Wait between: {comp.wait_between_ms}ms")
136
+ print(f"Hardware: {[r.value for r in comp.hardware]}")
137
+ print(f"\nSteps:")
138
+ for i, step in enumerate(comp.steps, 1):
139
+ print(f" {i}. {step}")
140
+
141
+ elif name in lib.behaviors:
142
+ beh = lib.behaviors[name]
143
+ print(f"\nBehavior: {name}")
144
+ print("=" * 40)
145
+ print(f"Description: {beh.description}")
146
+ print(f"Hardware: {[r.value for r in beh.hardware]}")
147
+ print(f"\nSteps:")
148
+ for i, step in enumerate(beh.steps, 1):
149
+ if isinstance(step, dict):
150
+ print(f" {i}. {step}")
151
+ else:
152
+ print(f" {i}. {step}")
153
+
154
+ else:
155
+ print(f"Skill not found: {name}")
156
+ print("\nAvailable skills:")
157
+ print(f" Primitives: {', '.join(lib.primitives.keys())}")
158
+ print(f" Compounds: {', '.join(lib.compounds.keys())}")
159
+ print(f" Behaviors: {', '.join(lib.behaviors.keys())}")
160
+ sys.exit(1)
161
+
162
+ elif action == "test":
163
+ if not name:
164
+ print("Usage: ate robot primitives test <name> --port <port>")
165
+ sys.exit(1)
166
+ print(f"Testing skill: {name}")
167
+ print("(Use 'ate robot behavior' for full behavior execution)")
168
+
169
+ else:
170
+ print(f"Unknown action: {action}")
171
+ print("Available actions: list, show, test")
172
+ sys.exit(1)
173
+
174
+
175
+ def robot_behavior_command(
176
+ profile_name: Optional[str] = None,
177
+ behavior: str = "pickup_green_ball",
178
+ camera_ip: Optional[str] = None,
179
+ simulate: bool = False,
180
+ ):
181
+ """
182
+ Execute a high-level behavior.
183
+
184
+ ate robot behavior pickup_green_ball --profile my_mechdog
185
+ ate robot behavior pickup_green_ball --camera-ip 192.168.4.1
186
+ ate robot behavior pickup_green_ball --simulate
187
+ """
188
+ # PREREQUISITE CHECK: Ensure direction calibration is complete
189
+ # This is the enforcement that prevents catastrophic failures
190
+ if profile_name and not simulate:
191
+ from .calibration_state import CalibrationState
192
+
193
+ state = CalibrationState.load(profile_name)
194
+ passed, message = state.check_prerequisite("direction_calibrated")
195
+
196
+ if not passed:
197
+ print(message)
198
+ print("\nThe behavior cannot run without direction calibration.")
199
+ print("This prevents the arm from moving in the WRONG direction.")
200
+ sys.exit(1)
201
+
202
+ print(f"Behavior: {behavior}")
203
+ print("=" * 50)
204
+
205
+ robot = None
206
+
207
+ if profile_name and not simulate:
208
+ # Load robot from profile
209
+ profile = load_profile(profile_name)
210
+ if not profile:
211
+ print(f"Profile not found: {profile_name}")
212
+ sys.exit(1)
213
+
214
+ manager = RobotManager()
215
+ manager.load(profile_name)
216
+
217
+ print("Connecting to robot...")
218
+ if not manager.connect(profile_name):
219
+ print("Connection failed")
220
+ sys.exit(1)
221
+
222
+ robot = manager.get(profile_name)
223
+ camera_ip = camera_ip or profile.camera_ip
224
+
225
+ # Create executor
226
+ executor = LivePerceptionExecutor(
227
+ robot_interface=robot,
228
+ camera_ip=camera_ip or "192.168.4.1",
229
+ )
230
+
231
+ # Try to connect to camera (ok if it fails - will use simulation)
232
+ if not simulate and camera_ip:
233
+ if executor.connect():
234
+ print(f"Camera connected: {camera_ip}")
235
+ else:
236
+ print("Camera not available - using simulated perception")
237
+
238
+ try:
239
+ # Execute behavior
240
+ if behavior == "pickup_green_ball":
241
+ success = executor.pickup_green_ball()
242
+ else:
243
+ print(f"Unknown behavior: {behavior}")
244
+ print("Available: pickup_green_ball")
245
+ sys.exit(1)
246
+
247
+ print("\n" + "=" * 50)
248
+ print(f"Result: {'SUCCESS' if success else 'FAILED'}")
249
+
250
+ except KeyboardInterrupt:
251
+ print("\n\nAborted by user")
252
+ finally:
253
+ executor.disconnect()
254
+ if robot and profile_name:
255
+ manager = RobotManager()
256
+ manager.disconnect(profile_name)
257
+
258
+
259
+ def robot_discover_command(subnet: Optional[str] = None, timeout: float = 3.0, json_output: bool = False):
260
+ """
261
+ Discover robots on network and USB.
262
+
263
+ ate robot discover
264
+ ate robot discover --subnet 192.168.1 --timeout 5
265
+ """
266
+ print("Scanning for robots...\n")
267
+
268
+ # Serial devices
269
+ print("USB/Serial devices:")
270
+ serial_robots = discover_serial_robots()
271
+
272
+ if serial_robots:
273
+ for robot in serial_robots:
274
+ status = "✓" if robot.robot_type else "?"
275
+ print(f" {status} {robot.name}")
276
+ print(f" Port: {robot.port}")
277
+ if robot.robot_type:
278
+ print(f" Type: {robot.robot_type}")
279
+ if robot.manufacturer:
280
+ print(f" Manufacturer: {robot.manufacturer}")
281
+ else:
282
+ print(" (none found)")
283
+
284
+ # Network cameras
285
+ print("\nNetwork cameras:")
286
+ cameras = discover_network_cameras(timeout=timeout, subnet=subnet)
287
+
288
+ if cameras:
289
+ for cam in cameras:
290
+ print(f" ✓ {cam.name}")
291
+ print(f" IP: {cam.ip}")
292
+ if cam.ports:
293
+ print(f" Ports: {cam.ports}")
294
+ else:
295
+ print(" (none found)")
296
+
297
+ # Summary
298
+ total = len(serial_robots) + len(cameras)
299
+ print(f"\nFound {total} device(s)")
300
+
301
+ if json_output:
302
+ result = {
303
+ "serial": [
304
+ {
305
+ "port": r.port,
306
+ "type": r.robot_type,
307
+ "name": r.name,
308
+ }
309
+ for r in serial_robots
310
+ ],
311
+ "cameras": [
312
+ {
313
+ "ip": c.ip,
314
+ "name": c.name,
315
+ "ports": c.ports,
316
+ }
317
+ for c in cameras
318
+ ],
319
+ }
320
+ print("\n" + json.dumps(result, indent=2))
321
+
322
+
323
+ def robot_ble_command(
324
+ action: str = "discover",
325
+ timeout: float = 10.0,
326
+ address: Optional[str] = None,
327
+ name_filter: Optional[str] = None,
328
+ profile_name: Optional[str] = None,
329
+ json_output: bool = False,
330
+ # Capture/analyze arguments
331
+ platform: str = "auto",
332
+ output: str = "capture.pklg",
333
+ capture_file: Optional[str] = None,
334
+ generate_code: bool = False,
335
+ ):
336
+ """
337
+ Bluetooth Low Energy robot discovery, connection, and protocol analysis.
338
+
339
+ ate robot ble discover
340
+ ate robot ble discover --timeout 15 --filter MechDog
341
+ ate robot ble connect AA:BB:CC:DD:EE:FF --profile my_mechdog
342
+ ate robot ble capture --platform ios -o capture.pklg
343
+ ate robot ble analyze capture.pklg --generate
344
+ ate robot ble inspect mechdog_00
345
+ """
346
+ if action == "discover":
347
+ print("Scanning for BLE robots...")
348
+ print(f"Timeout: {timeout}s")
349
+ if name_filter:
350
+ print(f"Filter: {name_filter}")
351
+ print()
352
+
353
+ try:
354
+ from ..drivers.ble_transport import BLETransport, discover_ble_robots as ble_discover
355
+
356
+ # Use name filter if provided
357
+ if name_filter:
358
+ devices = BLETransport.discover_sync(timeout=timeout, name_filter=name_filter)
359
+ else:
360
+ devices = ble_discover(timeout=timeout)
361
+
362
+ if not devices:
363
+ print("No BLE devices found.")
364
+ print()
365
+ print("Tips:")
366
+ print(" - Ensure Bluetooth is enabled on your computer")
367
+ print(" - Make sure the robot is powered on")
368
+ print(" - Some robots need to be in pairing mode")
369
+ print(" - Try: ate robot ble discover --timeout 15")
370
+ return
371
+
372
+ print(f"Found {len(devices)} device(s):\n")
373
+ for i, device in enumerate(devices, 1):
374
+ print(f" [{i}] {device.name}")
375
+ print(f" Address: {device.address}")
376
+ print(f" RSSI: {device.rssi} dBm")
377
+ print()
378
+
379
+ if json_output:
380
+ result = [{
381
+ "name": d.name,
382
+ "address": d.address,
383
+ "rssi": d.rssi,
384
+ } for d in devices]
385
+ print(json.dumps(result, indent=2))
386
+
387
+ print("To connect:")
388
+ print(f" ate robot ble connect <address> --profile <name>")
389
+ print()
390
+ print("Or configure a profile for BLE:")
391
+ print(" ate robot profiles create my_ble_robot")
392
+ print(" # Edit ~/.ate/robots/my_ble_robot.json and add:")
393
+ print(' # "use_ble": true, "ble_address": "<address>"')
394
+
395
+ except ImportError:
396
+ print("BLE support requires the 'bleak' library.")
397
+ print("Install with: pip install bleak")
398
+ sys.exit(1)
399
+ except Exception as e:
400
+ print(f"BLE scan failed: {e}")
401
+ sys.exit(1)
402
+
403
+ elif action == "connect":
404
+ if not address:
405
+ print("Usage: ate robot ble connect <address>")
406
+ print()
407
+ print("First discover BLE devices:")
408
+ print(" ate robot ble discover")
409
+ sys.exit(1)
410
+
411
+ print(f"Connecting to BLE device: {address}")
412
+
413
+ try:
414
+ from ..drivers.ble_transport import BLETransport
415
+
416
+ transport = BLETransport(address=address, timeout=10.0)
417
+
418
+ print("Connecting...")
419
+ if transport.connect():
420
+ print("Connected!")
421
+ print()
422
+
423
+ # Try to identify robot
424
+ print("Probing device...")
425
+ transport.write(b'\x03') # Ctrl+C
426
+ import time
427
+ time.sleep(0.2)
428
+ transport.write(b'\x02') # Ctrl+B
429
+ time.sleep(0.5)
430
+
431
+ transport.write(b'import sys; print(sys.implementation)\r\n')
432
+ time.sleep(0.5)
433
+ response = transport.read(1000).decode('utf-8', errors='ignore')
434
+
435
+ if 'micropython' in response.lower():
436
+ print("Device: MicroPython device")
437
+
438
+ # Check for MechDog
439
+ transport.write(b'from HW_MechDog import MechDog\r\n')
440
+ time.sleep(0.3)
441
+ response2 = transport.read(500).decode('utf-8', errors='ignore')
442
+
443
+ if 'Error' not in response2 and 'Traceback' not in response2:
444
+ print("Robot: HiWonder MechDog")
445
+ else:
446
+ print("Robot: Unknown MicroPython robot")
447
+ else:
448
+ print("Device: Unknown type")
449
+ print(f"Response: {response[:200]}")
450
+
451
+ transport.disconnect()
452
+
453
+ if profile_name:
454
+ # Update profile with BLE settings
455
+ profile = load_profile(profile_name)
456
+ if profile:
457
+ profile.use_ble = True
458
+ profile.ble_address = address
459
+ if save_profile(profile):
460
+ print()
461
+ print(f"Updated profile '{profile_name}' for BLE connection.")
462
+ else:
463
+ print(f"Profile not found: {profile_name}")
464
+
465
+ print()
466
+ print("To use this device, configure a profile:")
467
+ print(" ate robot profiles create my_ble_robot")
468
+ print(" # Then edit to add BLE settings")
469
+
470
+ else:
471
+ print("Connection failed.")
472
+ print("Make sure the device is powered on and in range.")
473
+
474
+ except ImportError:
475
+ print("BLE support requires the 'bleak' library.")
476
+ print("Install with: pip install bleak")
477
+ sys.exit(1)
478
+ except Exception as e:
479
+ print(f"BLE connection failed: {e}")
480
+ sys.exit(1)
481
+
482
+ elif action == "info":
483
+ print("BLE Robot Support")
484
+ print("=" * 40)
485
+ print()
486
+ print("Supported robots with BLE:")
487
+ for robot in KNOWN_ROBOTS.values():
488
+ from .registry import ConnectionType
489
+ if ConnectionType.BLUETOOTH in robot.connection_types:
490
+ print(f" - {robot.name} ({robot.manufacturer})")
491
+ if robot.default_ports.get("ble_service_uuid"):
492
+ print(f" Service: {robot.default_ports['ble_service_uuid']}")
493
+ print()
494
+ print("BLE UUIDs (ESP32/HM-10 compatible):")
495
+ print(" Service: 0000ffe0-0000-1000-8000-00805f9b34fb")
496
+ print(" Write: 0000ffe1-0000-1000-8000-00805f9b34fb")
497
+ print(" Notify: 0000ffe2-0000-1000-8000-00805f9b34fb")
498
+ print()
499
+ print("Commands:")
500
+ print(" ate robot ble discover - Find BLE devices")
501
+ print(" ate robot ble connect - Test connection to device")
502
+ print(" ate robot ble inspect - Show device characteristics")
503
+ print(" ate robot ble capture - Capture phone app traffic")
504
+ print(" ate robot ble analyze - Analyze capture file")
505
+ print(" ate robot ble info - Show this information")
506
+
507
+ elif action == "capture":
508
+ # BLE traffic capture from phone app
509
+ try:
510
+ from .ble_capture import (
511
+ capture_ios_interactive,
512
+ capture_android_interactive,
513
+ check_ios_device,
514
+ check_android_device,
515
+ )
516
+
517
+ if platform == "auto":
518
+ # Auto-detect platform
519
+ ios_ok, _ = check_ios_device()
520
+ android_ok, _ = check_android_device()
521
+
522
+ if ios_ok:
523
+ platform = "ios"
524
+ elif android_ok:
525
+ platform = "android"
526
+ else:
527
+ print("No mobile device detected.")
528
+ print()
529
+ print("Connect your iPhone or Android phone via USB.")
530
+ print()
531
+ print("For iOS: Connect via Lightning/USB-C cable")
532
+ print("For Android: Enable USB debugging and connect via USB")
533
+ sys.exit(1)
534
+
535
+ if platform == "ios":
536
+ success = capture_ios_interactive(output)
537
+ else:
538
+ success = capture_android_interactive(output)
539
+
540
+ if success:
541
+ print()
542
+ print(f"Next step: Analyze the capture with:")
543
+ print(f" ate robot ble analyze {output}")
544
+
545
+ except ImportError as e:
546
+ print(f"BLE capture requires additional dependencies: {e}")
547
+ sys.exit(1)
548
+
549
+ elif action == "analyze":
550
+ # Analyze BLE capture file
551
+ if not capture_file:
552
+ print("Usage: ate robot ble analyze <capture_file>")
553
+ print()
554
+ print("First capture traffic:")
555
+ print(" ate robot ble capture -o capture.pklg")
556
+ sys.exit(1)
557
+
558
+ try:
559
+ from .ble_capture import analyze_capture
560
+
561
+ analysis = analyze_capture(capture_file)
562
+
563
+ if analysis:
564
+ print(analysis.summary())
565
+
566
+ if generate_code or output != "capture.pklg":
567
+ code = analysis.generate_python_code()
568
+
569
+ # If output looks like a .py file, save there
570
+ if output.endswith(".py"):
571
+ with open(output, "w") as f:
572
+ f.write(code)
573
+ print(f"\nGenerated code saved to: {output}")
574
+ elif generate_code:
575
+ print("\n" + "=" * 60)
576
+ print("GENERATED PYTHON CODE")
577
+ print("=" * 60)
578
+ print(code)
579
+
580
+ except ImportError as e:
581
+ print(f"BLE analysis requires tshark: {e}")
582
+ print("Install with: brew install wireshark")
583
+ sys.exit(1)
584
+
585
+ elif action == "inspect":
586
+ # Inspect BLE device characteristics
587
+ if not address:
588
+ print("Usage: ate robot ble inspect <device_name_or_address>")
589
+ print()
590
+ print("First discover devices:")
591
+ print(" ate robot ble discover")
592
+ sys.exit(1)
593
+
594
+ try:
595
+ from .ble_capture import inspect_ble_device
596
+
597
+ if not inspect_ble_device(address):
598
+ sys.exit(1)
599
+
600
+ except ImportError as e:
601
+ print(f"BLE inspection requires bleak: {e}")
602
+ print("Install with: pip install bleak")
603
+ sys.exit(1)
604
+
605
+ elif action == "pickup":
606
+ # Pure-BLE ball pickup using action groups and locomotion
607
+ if not address:
608
+ # Try to find MechDog automatically
609
+ print("Scanning for MechDog...")
610
+ try:
611
+ import asyncio
612
+ from bleak import BleakScanner
613
+
614
+ async def find_mechdog():
615
+ devices = await BleakScanner.discover(timeout=5.0)
616
+ for d in devices:
617
+ if "mechdog" in (d.name or "").lower():
618
+ return d.address
619
+ return None
620
+
621
+ address = asyncio.run(find_mechdog())
622
+
623
+ if not address:
624
+ print("MechDog not found! Make sure it's powered on.")
625
+ print("Or specify address: ate robot ble pickup --address AA:BB:CC:DD")
626
+ sys.exit(1)
627
+
628
+ print(f"Found MechDog: {address}")
629
+
630
+ except ImportError:
631
+ print("BLE requires bleak: pip install bleak")
632
+ sys.exit(1)
633
+
634
+ try:
635
+ from .ble_capture import ble_pickup_sequence
636
+
637
+ print()
638
+ print("=" * 60)
639
+ print("BLE BALL PICKUP SEQUENCE")
640
+ print("=" * 60)
641
+ print()
642
+ print("This uses BLE-only commands:")
643
+ print("- Action groups (CMD|2|1|X|$) for arm control")
644
+ print("- Locomotion (CMD|3|X|$) for walking")
645
+ print()
646
+ print("NOTE: If arm doesn't respond, connect USB serial for")
647
+ print("direct servo control: ate robot test mechdog --port <port>")
648
+ print()
649
+
650
+ import asyncio
651
+ success = asyncio.run(ble_pickup_sequence(address))
652
+
653
+ if success:
654
+ print("\n✓ Pickup sequence complete!")
655
+ else:
656
+ print("\n✗ Pickup failed. Try USB serial for arm control.")
657
+ sys.exit(1)
658
+
659
+ except ImportError as e:
660
+ print(f"BLE pickup requires bleak: {e}")
661
+ print("Install with: pip install bleak")
662
+ sys.exit(1)
663
+
664
+ elif action == "test-locomotion":
665
+ # Quick test of BLE locomotion commands
666
+ if not address:
667
+ print("Usage: ate robot ble test-locomotion --address <address>")
668
+ sys.exit(1)
669
+
670
+ try:
671
+ import asyncio
672
+ from bleak import BleakClient, BleakScanner
673
+
674
+ async def test_locomotion():
675
+ print(f"Connecting to {address}...")
676
+
677
+ # Find device
678
+ devices = await BleakScanner.discover(timeout=5.0)
679
+ device = None
680
+ for d in devices:
681
+ if address.lower() in (d.address or "").lower() or address.lower() in (d.name or "").lower():
682
+ device = d
683
+ break
684
+
685
+ if not device:
686
+ print("Device not found!")
687
+ return False
688
+
689
+ async with BleakClient(device) as client:
690
+ print(f"Connected: {client.is_connected}")
691
+
692
+ FFE1 = "0000ffe1-0000-1000-8000-00805f9b34fb"
693
+ FFE2 = "0000ffe2-0000-1000-8000-00805f9b34fb"
694
+
695
+ responses = []
696
+ def handler(s, d):
697
+ try:
698
+ responses.append(d.decode('ascii'))
699
+ except:
700
+ pass
701
+
702
+ await client.start_notify(FFE2, handler)
703
+
704
+ # Battery
705
+ print("\nBattery check...")
706
+ await client.write_gatt_char(FFE1, b"CMD|6|$")
707
+ await asyncio.sleep(1.0)
708
+ for r in responses:
709
+ if "CMD|6|" in r:
710
+ parts = r.split("|")
711
+ if len(parts) >= 3:
712
+ try:
713
+ mv = int(parts[2])
714
+ print(f" Battery: {mv/1000:.2f}V")
715
+ except:
716
+ pass
717
+
718
+ # Forward
719
+ print("\nWalking forward (1s)...")
720
+ await client.write_gatt_char(FFE1, b"CMD|3|3|$")
721
+ await asyncio.sleep(1.0)
722
+ await client.write_gatt_char(FFE1, b"CMD|3|0|$")
723
+
724
+ # Backward
725
+ print("Walking backward (1s)...")
726
+ await client.write_gatt_char(FFE1, b"CMD|3|7|$")
727
+ await asyncio.sleep(1.0)
728
+ await client.write_gatt_char(FFE1, b"CMD|3|0|$")
729
+
730
+ await client.stop_notify(FFE2)
731
+ print("\n✓ Locomotion test complete!")
732
+ return True
733
+
734
+ asyncio.run(test_locomotion())
735
+
736
+ except ImportError:
737
+ print("BLE requires bleak: pip install bleak")
738
+ sys.exit(1)
739
+ except Exception as e:
740
+ print(f"Test failed: {e}")
741
+ sys.exit(1)
742
+
743
+ else:
744
+ print(f"Unknown action: {action}")
745
+ print("Available actions: discover, connect, inspect, capture, analyze, pickup, test-locomotion, info")
746
+ sys.exit(1)
747
+
748
+
749
+ def robot_list_command(archetype: Optional[str] = None):
750
+ """
751
+ List known robot types.
752
+
753
+ ate robot list
754
+ ate robot list --archetype quadruped
755
+ """
756
+ robots = RobotRegistry.list_all()
757
+
758
+ if archetype:
759
+ robots = [r for r in robots if r.archetype == archetype]
760
+
761
+ print("Known Robot Types:\n")
762
+ print(f"{'ID':<25} {'Name':<15} {'Manufacturer':<15} {'Archetype':<12}")
763
+ print("-" * 70)
764
+
765
+ for robot in robots:
766
+ print(f"{robot.id:<25} {robot.name:<15} {robot.manufacturer:<15} {robot.archetype:<12}")
767
+
768
+ print(f"\n{len(robots)} robot type(s)")
769
+ print("\nFor detailed info: ate robot info <robot_type>")
770
+
771
+
772
+ def robot_info_command(robot_type: Optional[str] = None, profile_name: Optional[str] = None):
773
+ """
774
+ Show robot type or profile information.
775
+
776
+ ate robot info hiwonder_mechdog
777
+ ate robot info --profile my_mechdog
778
+ """
779
+ if profile_name:
780
+ # Show profile info
781
+ profile = load_profile(profile_name)
782
+ if not profile:
783
+ print(f"Profile not found: {profile_name}")
784
+ sys.exit(1)
785
+
786
+ print(f"Profile: {profile.name}")
787
+ print(f"Robot Type: {profile.robot_type}")
788
+ print(f"Serial Port: {profile.serial_port or '(not set)'}")
789
+ print(f"Camera IP: {profile.camera_ip or '(not set)'}")
790
+ print(f"Has Camera: {profile.has_camera}")
791
+ print(f"Has Arm: {profile.has_arm}")
792
+
793
+ # Try to load and show capabilities
794
+ if profile.robot_type in KNOWN_ROBOTS:
795
+ rtype = KNOWN_ROBOTS[profile.robot_type]
796
+ print(f"\nCapabilities: {', '.join(rtype.capabilities)}")
797
+ if rtype.optional_capabilities:
798
+ print(f"Optional: {', '.join(rtype.optional_capabilities)}")
799
+
800
+ return
801
+
802
+ if not robot_type:
803
+ print("Usage: ate robot info <robot_type>")
804
+ print(" or: ate robot info --profile <profile_name>")
805
+ sys.exit(1)
806
+
807
+ rtype = KNOWN_ROBOTS.get(robot_type)
808
+ if not rtype:
809
+ print(f"Unknown robot type: {robot_type}")
810
+ print("\nAvailable types:")
811
+ for r in KNOWN_ROBOTS.values():
812
+ print(f" {r.id}")
813
+ sys.exit(1)
814
+
815
+ print(f"\n{rtype.name}")
816
+ print("=" * 40)
817
+ print(f"ID: {rtype.id}")
818
+ print(f"Manufacturer: {rtype.manufacturer}")
819
+ print(f"Archetype: {rtype.archetype}")
820
+ print(f"\nDescription: {rtype.description}")
821
+
822
+ print(f"\nConnection Types: {', '.join(c.name for c in rtype.connection_types)}")
823
+ if rtype.serial_patterns:
824
+ print(f"Serial Patterns: {', '.join(rtype.serial_patterns)}")
825
+ if rtype.default_ports:
826
+ print(f"Default Ports: {rtype.default_ports}")
827
+
828
+ print(f"\nCapabilities:")
829
+ for cap in sorted(rtype.capabilities):
830
+ print(f" ✓ {cap}")
831
+
832
+ if rtype.optional_capabilities:
833
+ print(f"\nOptional Capabilities:")
834
+ for cap in sorted(rtype.optional_capabilities):
835
+ print(f" ○ {cap}")
836
+
837
+ print(f"\nDriver: {rtype.driver_module}.{rtype.driver_class}")
838
+
839
+ if rtype.setup_url:
840
+ print(f"\nDocumentation: {rtype.setup_url}")
841
+
842
+
843
+ def robot_setup_command(
844
+ port: Optional[str] = None,
845
+ camera_ip: Optional[str] = None,
846
+ robot_type: str = "hiwonder_mechdog",
847
+ name: Optional[str] = None,
848
+ non_interactive: bool = False,
849
+ ):
850
+ """
851
+ Interactive setup wizard.
852
+
853
+ ate robot setup
854
+ ate robot setup --port /dev/cu.usbserial-10 --camera-ip 192.168.1.100
855
+ """
856
+ print("Robot Setup Wizard")
857
+ print("=" * 40)
858
+ print()
859
+
860
+ # Step 1: Discover devices if not specified
861
+ if not port:
862
+ print("Scanning for robots...")
863
+ serial_robots = discover_serial_robots()
864
+
865
+ if serial_robots:
866
+ print("\nFound serial devices:")
867
+ for i, robot in enumerate(serial_robots):
868
+ type_hint = f" [{robot.robot_type}]" if robot.robot_type else ""
869
+ print(f" [{i+1}] {robot.port}{type_hint}")
870
+
871
+ if not non_interactive:
872
+ try:
873
+ choice = input("\nSelect device number (or press Enter to skip): ")
874
+ if choice.strip():
875
+ idx = int(choice) - 1
876
+ if 0 <= idx < len(serial_robots):
877
+ port = serial_robots[idx].port
878
+ if serial_robots[idx].robot_type:
879
+ robot_type = serial_robots[idx].robot_type
880
+ except (ValueError, KeyboardInterrupt):
881
+ pass
882
+ elif serial_robots:
883
+ # Auto-select first identified robot in non-interactive mode
884
+ for robot in serial_robots:
885
+ if robot.robot_type:
886
+ port = robot.port
887
+ robot_type = robot.robot_type
888
+ break
889
+ else:
890
+ print("No serial devices found.")
891
+
892
+ # Step 2: Discover cameras if not specified
893
+ if not camera_ip:
894
+ print("\nScanning for network cameras (this may take a few seconds)...")
895
+ cameras = discover_network_cameras(timeout=1.5)
896
+
897
+ if cameras:
898
+ print("\nFound network cameras:")
899
+ for i, cam in enumerate(cameras):
900
+ print(f" [{i+1}] {cam.ip}")
901
+
902
+ if not non_interactive:
903
+ try:
904
+ choice = input("\nSelect camera number (or press Enter to skip): ")
905
+ if choice.strip():
906
+ idx = int(choice) - 1
907
+ if 0 <= idx < len(cameras):
908
+ camera_ip = cameras[idx].ip
909
+ except (ValueError, KeyboardInterrupt):
910
+ pass
911
+ elif cameras:
912
+ camera_ip = cameras[0].ip
913
+ else:
914
+ print("No network cameras found.")
915
+
916
+ # Step 3: Get name
917
+ if not name:
918
+ if non_interactive:
919
+ name = f"robot_{robot_type.split('_')[-1]}"
920
+ else:
921
+ default_name = f"my_{robot_type.split('_')[-1]}"
922
+ name = input(f"\nProfile name [{default_name}]: ").strip() or default_name
923
+
924
+ # Step 4: Create profile
925
+ profile = create_profile_from_discovery(
926
+ name=name,
927
+ serial_port=port,
928
+ camera_ip=camera_ip,
929
+ robot_type=robot_type,
930
+ )
931
+
932
+ # Show summary
933
+ print("\n" + "=" * 40)
934
+ print("Configuration Summary")
935
+ print("=" * 40)
936
+ print(f"Name: {profile.name}")
937
+ print(f"Robot Type: {profile.robot_type}")
938
+ print(f"Serial Port: {profile.serial_port or '(not set)'}")
939
+ print(f"Camera IP: {profile.camera_ip or '(not set)'}")
940
+ print(f"Has Camera: {profile.has_camera}")
941
+
942
+ # Step 5: Save profile
943
+ if not non_interactive:
944
+ confirm = input("\nSave this profile? [Y/n]: ")
945
+ if confirm.lower() == 'n':
946
+ print("Profile not saved.")
947
+ return
948
+
949
+ if save_profile(profile):
950
+ print(f"\nProfile saved to ~/.ate/robots/{profile.name}.json")
951
+ print(f"\nTo use this robot:")
952
+ print(f" ate record demo --profile {profile.name}")
953
+ print(f"\nTo view capabilities:")
954
+ print(f" ate robot test {profile.name}")
955
+ else:
956
+ print("Failed to save profile.")
957
+ sys.exit(1)
958
+
959
+
960
+ def robot_test_command(profile_name: str, capability: Optional[str] = None, verbose: bool = False):
961
+ """
962
+ Test robot capabilities.
963
+
964
+ ate robot test my_mechdog
965
+ ate robot test my_mechdog --capability camera
966
+ """
967
+ profile = load_profile(profile_name)
968
+ if not profile:
969
+ print(f"Profile not found: {profile_name}")
970
+ sys.exit(1)
971
+
972
+ print(f"Testing robot: {profile.name}")
973
+ print(f"Type: {profile.robot_type}")
974
+ print()
975
+
976
+ # Create robot instance
977
+ manager = RobotManager()
978
+ managed = manager.load(profile_name)
979
+
980
+ if not managed:
981
+ print("Failed to load robot configuration")
982
+ sys.exit(1)
983
+
984
+ # Connect
985
+ print("Connecting...")
986
+ if not manager.connect(profile_name):
987
+ print("Connection failed")
988
+ sys.exit(1)
989
+
990
+ print("Connected!\n")
991
+
992
+ try:
993
+ robot = manager.get(profile_name)
994
+
995
+ if capability:
996
+ # Test specific capability
997
+ result = test_capability(robot, capability)
998
+ print(f"Testing {capability}...")
999
+ for test in result.get("tests", []):
1000
+ status = "✓" if test["status"] == "passed" else "✗"
1001
+ print(f" {status} {test['name']}: {test.get('message', test.get('error', ''))}")
1002
+ print(f"\nOverall: {result['status'].upper()}")
1003
+ else:
1004
+ # Show all capabilities
1005
+ caps = get_capabilities(robot)
1006
+ print("Available Capabilities:")
1007
+ print("-" * 40)
1008
+ for name, info in caps.items():
1009
+ print(f" ✓ {name} ({len(info.methods)} methods)")
1010
+
1011
+ if verbose:
1012
+ print("\nMethods:")
1013
+ print(format_methods(get_methods(robot)))
1014
+
1015
+ # Quick tests
1016
+ print("\nRunning capability tests...")
1017
+ for cap_name in caps.keys():
1018
+ result = test_capability(robot, cap_name)
1019
+ status = "✓" if result["status"] == "passed" else "✗"
1020
+ print(f" {status} {cap_name}: {result['status']}")
1021
+
1022
+ finally:
1023
+ manager.disconnect(profile_name)
1024
+ print("\nDisconnected.")
1025
+
1026
+
1027
+ def robot_approach_command(
1028
+ profile_name: str,
1029
+ duration: float = 30.0,
1030
+ target_distance: float = 0.15,
1031
+ speed: float = 0.3,
1032
+ detect_colors: Optional[list] = None,
1033
+ dry_run: bool = False,
1034
+ ):
1035
+ """
1036
+ Approach detected targets.
1037
+
1038
+ ate robot approach my_mechdog
1039
+ ate robot approach my_mechdog --duration 60 --target-distance 0.2
1040
+ """
1041
+ import time
1042
+ from ..behaviors import ApproachTarget, ApproachConfig, BehaviorStatus
1043
+ from ..behaviors.tree import Blackboard
1044
+ from ..detection.trash_detector import TrashDetector
1045
+ from ..interfaces import VisualDistanceEstimator
1046
+
1047
+ profile = load_profile(profile_name)
1048
+ if not profile:
1049
+ print(f"Profile not found: {profile_name}")
1050
+ sys.exit(1)
1051
+
1052
+ print(f"Approach Mode: {profile.name}")
1053
+ print(f"Target distance: {target_distance}m")
1054
+ print(f"Speed: {speed}")
1055
+ print()
1056
+
1057
+ # Create robot instance
1058
+ manager = RobotManager()
1059
+ managed = manager.load(profile_name)
1060
+
1061
+ if not managed:
1062
+ print("Failed to load robot configuration")
1063
+ sys.exit(1)
1064
+
1065
+ # Connect
1066
+ print("Connecting...")
1067
+ if not manager.connect(profile_name):
1068
+ print("Connection failed")
1069
+ sys.exit(1)
1070
+
1071
+ print("Connected!\n")
1072
+
1073
+ try:
1074
+ robot = manager.get(profile_name)
1075
+ info = robot.get_info()
1076
+ print(f"Capabilities: {[c.name for c in info.capabilities]}")
1077
+
1078
+ # Check for required capabilities
1079
+ has_camera = hasattr(robot, 'get_image') and hasattr(robot, 'has_camera') and robot.has_camera()
1080
+ has_locomotion = hasattr(robot, 'walk') or hasattr(robot, 'drive')
1081
+
1082
+ if not has_camera:
1083
+ print("Error: Robot does not have camera capability")
1084
+ sys.exit(1)
1085
+
1086
+ if not has_locomotion:
1087
+ print("Error: Robot does not have locomotion capability")
1088
+ sys.exit(1)
1089
+
1090
+ # Initialize detector
1091
+ detector = TrashDetector()
1092
+ if detect_colors:
1093
+ print(f"Detecting colors: {detect_colors}")
1094
+
1095
+ # Initialize visual distance estimator
1096
+ res = robot.get_resolution()
1097
+ visual_estimator = VisualDistanceEstimator(
1098
+ image_width=res[0] or 640,
1099
+ image_height=res[1] or 480,
1100
+ )
1101
+
1102
+ # Configure approach behavior
1103
+ config = ApproachConfig(
1104
+ target_distance=target_distance,
1105
+ center_tolerance=0.15,
1106
+ approach_speed=speed,
1107
+ slow_distance=target_distance * 2,
1108
+ use_visual_distance=True,
1109
+ )
1110
+
1111
+ # Stand ready
1112
+ if hasattr(robot, 'stand'):
1113
+ robot.stand()
1114
+ time.sleep(0.5)
1115
+
1116
+ print("Scanning for targets...")
1117
+ start_time = time.time()
1118
+
1119
+ # Scanning positions
1120
+ scan_positions = [
1121
+ ("center", 0, 0),
1122
+ ("left", 0, 0.4),
1123
+ ("right", 0, -0.4),
1124
+ ]
1125
+
1126
+ blackboard = Blackboard()
1127
+ target_found = False
1128
+
1129
+ while time.time() - start_time < duration and not target_found:
1130
+ for name, pitch, yaw in scan_positions:
1131
+ if time.time() - start_time >= duration:
1132
+ break
1133
+
1134
+ print(f"\nLooking {name}...")
1135
+ if hasattr(robot, 'set_body_orientation'):
1136
+ robot.set_body_orientation(pitch=pitch, yaw=yaw)
1137
+ time.sleep(0.5)
1138
+
1139
+ # Get image and detect
1140
+ image = robot.get_image()
1141
+ if not image.data:
1142
+ continue
1143
+
1144
+ detections = detector.detect(image)
1145
+ if not detections:
1146
+ print(f" No targets")
1147
+ continue
1148
+
1149
+ # Found target!
1150
+ target = detections[0]
1151
+ print(f" Target: {target.class_name} at ({target.bbox.x}, {target.bbox.y})")
1152
+
1153
+ # Estimate distance
1154
+ dist_reading = visual_estimator.estimate_from_detection(
1155
+ bbox_width=int(target.bbox.width),
1156
+ object_type=target.class_name,
1157
+ )
1158
+ print(f" Estimated distance: {dist_reading.distance:.2f}m")
1159
+
1160
+ # Store in blackboard
1161
+ blackboard.set("target_detection", target)
1162
+ blackboard.set("detections", detections)
1163
+ target_found = True
1164
+ break
1165
+
1166
+ if not target_found:
1167
+ print("\nNo targets found during scan period.")
1168
+ if hasattr(robot, 'set_body_orientation'):
1169
+ robot.set_body_orientation(0, 0, 0)
1170
+ return
1171
+
1172
+ # Reset pose for approach
1173
+ if hasattr(robot, 'set_body_orientation'):
1174
+ robot.set_body_orientation(0, 0, 0)
1175
+ time.sleep(0.3)
1176
+
1177
+ if dry_run:
1178
+ print("\n[DRY RUN] Would approach target but stopping here.")
1179
+ return
1180
+
1181
+ # Create approach behavior
1182
+ distance_sensor = robot if hasattr(robot, 'get_distance') else None
1183
+
1184
+ approach = ApproachTarget(
1185
+ locomotion=robot,
1186
+ camera=robot,
1187
+ distance_sensor=distance_sensor,
1188
+ config=config,
1189
+ )
1190
+ approach.blackboard = blackboard
1191
+
1192
+ print("\n--- Starting Approach ---")
1193
+ print("Press Ctrl+C to stop\n")
1194
+
1195
+ try:
1196
+ approach_start = time.time()
1197
+ while time.time() - approach_start < duration:
1198
+ # Get fresh detection
1199
+ image = robot.get_image()
1200
+ if image.data:
1201
+ detections = detector.detect(image)
1202
+ if detections:
1203
+ blackboard.set("target_detection", detections[0])
1204
+ blackboard.set("detections", detections)
1205
+
1206
+ # Tick behavior
1207
+ status = approach.tick()
1208
+
1209
+ state = blackboard.get("approach_state")
1210
+ distance = blackboard.get("target_distance")
1211
+ print(f" State: {state}, Distance: {distance:.2f}m" if distance else f" State: {state}")
1212
+
1213
+ if status == BehaviorStatus.SUCCESS:
1214
+ print("\n=== TARGET REACHED ===")
1215
+ break
1216
+ elif status == BehaviorStatus.FAILURE:
1217
+ print("\n=== APPROACH FAILED ===")
1218
+ break
1219
+
1220
+ time.sleep(0.2)
1221
+
1222
+ except KeyboardInterrupt:
1223
+ print("\n\nStopping...")
1224
+
1225
+ # Stop movement
1226
+ if hasattr(robot, 'stop'):
1227
+ robot.stop()
1228
+
1229
+ finally:
1230
+ manager.disconnect(profile_name)
1231
+ print("\nDisconnected.")
1232
+
1233
+
1234
+ def robot_profiles_command(action: str, name: Optional[str] = None, template: Optional[str] = None):
1235
+ """
1236
+ Manage saved profiles.
1237
+
1238
+ ate robot profiles list
1239
+ ate robot profiles show my_mechdog
1240
+ ate robot profiles delete my_mechdog
1241
+ ate robot profiles create my_dog --template mechdog_camera
1242
+ """
1243
+ if action == "list":
1244
+ profiles = list_profiles()
1245
+
1246
+ if not profiles:
1247
+ print("No saved profiles.")
1248
+ print("\nCreate one with: ate robot setup")
1249
+ return
1250
+
1251
+ print("Saved Profiles:\n")
1252
+ print(f"{'Name':<20} {'Type':<25} {'Port':<25}")
1253
+ print("-" * 70)
1254
+
1255
+ for p in profiles:
1256
+ port = p.serial_port or "(no port)"
1257
+ print(f"{p.name:<20} {p.robot_type:<25} {port:<25}")
1258
+
1259
+ elif action == "show":
1260
+ if not name:
1261
+ print("Usage: ate robot profiles show <name>")
1262
+ sys.exit(1)
1263
+ robot_info_command(profile_name=name)
1264
+
1265
+ elif action == "delete":
1266
+ if not name:
1267
+ print("Usage: ate robot profiles delete <name>")
1268
+ sys.exit(1)
1269
+
1270
+ if delete_profile(name):
1271
+ print(f"Deleted profile: {name}")
1272
+ else:
1273
+ print(f"Profile not found: {name}")
1274
+ sys.exit(1)
1275
+
1276
+ elif action == "create":
1277
+ if not name:
1278
+ print("Usage: ate robot profiles create <name> [--template <template>]")
1279
+ sys.exit(1)
1280
+
1281
+ if template:
1282
+ profile = get_template(template)
1283
+ if not profile:
1284
+ print(f"Template not found: {template}")
1285
+ print(f"Available templates: {', '.join(list_templates())}")
1286
+ sys.exit(1)
1287
+ profile.name = name
1288
+ else:
1289
+ profile = RobotProfile(name=name, robot_type="hiwonder_mechdog")
1290
+
1291
+ if save_profile(profile):
1292
+ print(f"Created profile: {name}")
1293
+ print(f"Edit at: ~/.ate/robots/{name}.yaml")
1294
+ else:
1295
+ print("Failed to create profile")
1296
+ sys.exit(1)
1297
+
1298
+ elif action == "templates":
1299
+ print("Available Templates:\n")
1300
+ for tname in list_templates():
1301
+ tmpl = get_template(tname)
1302
+ if tmpl:
1303
+ print(f" {tname}")
1304
+ print(f" {tmpl.description}")
1305
+ print()
1306
+
1307
+ else:
1308
+ print(f"Unknown action: {action}")
1309
+ print("Available actions: list, show, delete, create, templates")
1310
+ sys.exit(1)
1311
+
1312
+
1313
+ def robot_calibrate_command(
1314
+ action: str,
1315
+ port: Optional[str] = None,
1316
+ camera_url: Optional[str] = None,
1317
+ name: str = "robot",
1318
+ robot_type: str = "unknown",
1319
+ servo_id: Optional[int] = None,
1320
+ pose_name: Optional[str] = None,
1321
+ ):
1322
+ """
1323
+ Visual calibration for robot servos and poses.
1324
+
1325
+ ate robot calibrate start --port /dev/cu.usbserial-10 --name my_mechdog
1326
+ ate robot calibrate gripper --port /dev/cu.usbserial-10 --servo 11
1327
+ ate robot calibrate list
1328
+ ate robot calibrate poses my_mechdog
1329
+ """
1330
+ if action == "start":
1331
+ # Full interactive calibration
1332
+ if not port:
1333
+ # Try to discover
1334
+ serial_robots = discover_serial_robots()
1335
+ if serial_robots:
1336
+ port = serial_robots[0].port
1337
+ print(f"Auto-detected: {port}")
1338
+ else:
1339
+ print("No serial port specified and none discovered.")
1340
+ print("Usage: ate robot calibrate start --port /dev/cu.usbserial-10")
1341
+ sys.exit(1)
1342
+
1343
+ calibrator = VisualCalibrator(
1344
+ serial_port=port,
1345
+ robot_model=robot_type,
1346
+ robot_name=name,
1347
+ )
1348
+
1349
+ if camera_url:
1350
+ calibrator.set_camera(camera_url)
1351
+
1352
+ try:
1353
+ calibration = calibrator.run_interactive()
1354
+ save_calibration(calibration)
1355
+ print(f"\nCalibration saved as: {name}")
1356
+ except KeyboardInterrupt:
1357
+ print("\nCalibration cancelled.")
1358
+ except Exception as e:
1359
+ print(f"\nCalibration failed: {e}")
1360
+ sys.exit(1)
1361
+
1362
+ elif action == "gripper":
1363
+ # Quick gripper-only calibration
1364
+ if not port:
1365
+ serial_robots = discover_serial_robots()
1366
+ if serial_robots:
1367
+ port = serial_robots[0].port
1368
+ else:
1369
+ print("No serial port found. Use --port to specify.")
1370
+ sys.exit(1)
1371
+
1372
+ sid = servo_id or 11 # Default gripper servo
1373
+ print(f"Quick gripper calibration on servo {sid}...")
1374
+
1375
+ try:
1376
+ gripper = quick_gripper_calibration(port, gripper_servo_id=sid)
1377
+ print(f"\nGripper calibrated:")
1378
+ print(f" Open: {gripper.positions.get('open', 'N/A')}")
1379
+ print(f" Closed: {gripper.positions.get('closed', 'N/A')}")
1380
+ print(f" Range: {gripper.min_value} - {gripper.max_value}")
1381
+ except Exception as e:
1382
+ print(f"Calibration failed: {e}")
1383
+ sys.exit(1)
1384
+
1385
+ elif action == "list":
1386
+ # List saved calibrations
1387
+ cals = list_calibrations()
1388
+ if not cals:
1389
+ print("No calibrations saved.")
1390
+ print("\nRun: ate robot calibrate start --port <port> --name <name>")
1391
+ return
1392
+
1393
+ print("Saved Calibrations:\n")
1394
+ for cal_name in cals:
1395
+ cal = load_calibration(cal_name)
1396
+ if cal:
1397
+ servo_count = len(cal.servos)
1398
+ pose_count = len(cal.poses)
1399
+ print(f" {cal_name}")
1400
+ print(f" Model: {cal.robot_model}, Servos: {servo_count}, Poses: {pose_count}")
1401
+ else:
1402
+ print(f" {cal_name} (error loading)")
1403
+
1404
+ elif action == "poses":
1405
+ # Show poses for a calibration
1406
+ if not name or name == "robot":
1407
+ print("Usage: ate robot calibrate poses <calibration_name>")
1408
+ sys.exit(1)
1409
+
1410
+ cal = load_calibration(name)
1411
+ if not cal:
1412
+ print(f"Calibration not found: {name}")
1413
+ sys.exit(1)
1414
+
1415
+ if not cal.poses:
1416
+ print(f"No poses saved for {name}")
1417
+ return
1418
+
1419
+ print(f"Poses for {name}:\n")
1420
+ for pose_name, pose in cal.poses.items():
1421
+ print(f" {pose_name}")
1422
+ if pose.description:
1423
+ print(f" {pose.description}")
1424
+ print(f" Servos: {len(pose.servo_positions)}, Time: {pose.transition_time_ms}ms")
1425
+
1426
+ elif action == "record":
1427
+ # Record a new pose
1428
+ if not name or name == "robot":
1429
+ print("Usage: ate robot calibrate record <calibration_name> --pose <pose_name>")
1430
+ sys.exit(1)
1431
+
1432
+ if not pose_name:
1433
+ print("Usage: ate robot calibrate record <name> --pose <pose_name>")
1434
+ sys.exit(1)
1435
+
1436
+ cal = load_calibration(name)
1437
+ if not cal:
1438
+ print(f"Calibration not found: {name}")
1439
+ sys.exit(1)
1440
+
1441
+ if not cal.serial_port:
1442
+ print("Calibration has no serial port configured.")
1443
+ sys.exit(1)
1444
+
1445
+ calibrator = VisualCalibrator(
1446
+ serial_port=cal.serial_port,
1447
+ robot_model=cal.robot_model,
1448
+ robot_name=cal.robot_name,
1449
+ )
1450
+ calibrator.calibration = cal
1451
+
1452
+ if cal.camera_url:
1453
+ calibrator.set_camera(cal.camera_url)
1454
+
1455
+ if not calibrator.connect():
1456
+ print("Failed to connect to robot")
1457
+ sys.exit(1)
1458
+
1459
+ try:
1460
+ calibrator.send_command("from HW_MechDog import MechDog; dog = MechDog()", wait=1.5)
1461
+ desc = input("Pose description (optional): ").strip()
1462
+ pose = calibrator.record_pose(pose_name, desc)
1463
+ save_calibration(calibrator.calibration)
1464
+ print(f"Recorded pose '{pose_name}' with {len(pose.servo_positions)} servos")
1465
+ finally:
1466
+ calibrator.disconnect()
1467
+
1468
+ elif action == "apply":
1469
+ # Apply a saved pose
1470
+ if not name or name == "robot":
1471
+ print("Usage: ate robot calibrate apply <calibration_name> --pose <pose_name>")
1472
+ sys.exit(1)
1473
+
1474
+ if not pose_name:
1475
+ print("Usage: ate robot calibrate apply <name> --pose <pose_name>")
1476
+ sys.exit(1)
1477
+
1478
+ cal = load_calibration(name)
1479
+ if not cal:
1480
+ print(f"Calibration not found: {name}")
1481
+ sys.exit(1)
1482
+
1483
+ pose = cal.get_pose(pose_name)
1484
+ if not pose:
1485
+ print(f"Pose not found: {pose_name}")
1486
+ print(f"Available poses: {', '.join(cal.poses.keys())}")
1487
+ sys.exit(1)
1488
+
1489
+ if not cal.serial_port:
1490
+ print("Calibration has no serial port configured.")
1491
+ sys.exit(1)
1492
+
1493
+ calibrator = VisualCalibrator(
1494
+ serial_port=cal.serial_port,
1495
+ robot_model=cal.robot_model,
1496
+ robot_name=cal.robot_name,
1497
+ )
1498
+ calibrator.calibration = cal
1499
+
1500
+ if not calibrator.connect():
1501
+ print("Failed to connect to robot")
1502
+ sys.exit(1)
1503
+
1504
+ try:
1505
+ calibrator.send_command("from HW_MechDog import MechDog; dog = MechDog()", wait=1.5)
1506
+ print(f"Applying pose: {pose_name}")
1507
+ success = calibrator.apply_pose(pose)
1508
+ if success:
1509
+ print("Pose applied successfully")
1510
+ else:
1511
+ print("Some servos failed to move")
1512
+ finally:
1513
+ calibrator.disconnect()
1514
+
1515
+ elif action == "direction":
1516
+ # Direction calibration via Twitch Test
1517
+ from .direction_calibration import run_direction_calibration
1518
+ from .calibration_state import CalibrationState
1519
+
1520
+ if not port:
1521
+ # Try to discover
1522
+ serial_robots = discover_serial_robots()
1523
+ if serial_robots:
1524
+ port = serial_robots[0].port
1525
+ print(f"Auto-detected: {port}")
1526
+ else:
1527
+ print("No serial port specified and none discovered.")
1528
+ print("Usage: ate robot calibrate direction --port /dev/cu.usbserial-10 --name my_robot")
1529
+ sys.exit(1)
1530
+
1531
+ # Run direction calibration
1532
+ mappings = run_direction_calibration(
1533
+ serial_port=port,
1534
+ robot_name=name,
1535
+ arm_servos=[8, 9, 10, 11], # MechDog arm servos
1536
+ )
1537
+
1538
+ if mappings:
1539
+ print("\nDirection calibration complete!")
1540
+ print("You can now run behaviors with correct directions.")
1541
+ else:
1542
+ print("\nDirection calibration failed.")
1543
+ print("Ensure ball and arm markers are visible to webcam.")
1544
+ sys.exit(1)
1545
+
1546
+ elif action == "status":
1547
+ # Show calibration status
1548
+ from .calibration_state import CalibrationState
1549
+
1550
+ if not name or name == "robot":
1551
+ # List all calibration states
1552
+ states = CalibrationState.list_all()
1553
+ if not states:
1554
+ print("No calibration states found.")
1555
+ print("\nStart with: ate robot calibrate start --name <robot_name>")
1556
+ return
1557
+
1558
+ print("Calibration States:\n")
1559
+ for state_name in states:
1560
+ state = CalibrationState.load(state_name)
1561
+ current = state.get_current_stage()
1562
+ print(f" {state_name}: {current}")
1563
+
1564
+ print("\nFor details: ate robot calibrate status <name>")
1565
+ else:
1566
+ # Show specific robot's status
1567
+ state = CalibrationState.load(name)
1568
+ state.print_status()
1569
+
1570
+ elif action == "reset":
1571
+ # Reset calibration state
1572
+ from .calibration_state import CalibrationState
1573
+
1574
+ if not name or name == "robot":
1575
+ print("Usage: ate robot calibrate reset <name>")
1576
+ sys.exit(1)
1577
+
1578
+ state = CalibrationState.load(name)
1579
+ confirm = input(f"Reset all calibration for '{name}'? [y/N]: ")
1580
+ if confirm.lower() == 'y':
1581
+ state.reset()
1582
+ print(f"Calibration reset for: {name}")
1583
+ else:
1584
+ print("Cancelled.")
1585
+
1586
+ else:
1587
+ print(f"Unknown action: {action}")
1588
+ print("Available actions: start, gripper, direction, status, list, poses, record, apply, reset")
1589
+ sys.exit(1)
1590
+
1591
+
1592
+ def robot_label_command(
1593
+ port: Optional[str] = None,
1594
+ name: str = "robot",
1595
+ robot_type: str = "unknown",
1596
+ webcam_id: int = 0,
1597
+ camera_url: Optional[str] = None,
1598
+ ):
1599
+ """
1600
+ Interactive visual labeling session.
1601
+
1602
+ ate robot label --port /dev/cu.usbserial-10 --name my_mechdog
1603
+ ate robot label --port /dev/cu.usbserial-10 --camera http://192.168.50.98:80/capture
1604
+ """
1605
+ if not port:
1606
+ # Try to discover
1607
+ serial_robots = discover_serial_robots()
1608
+ if serial_robots:
1609
+ port = serial_robots[0].port
1610
+ print(f"Auto-detected: {port}")
1611
+ else:
1612
+ print("No serial port specified and none discovered.")
1613
+ print("Usage: ate robot label --port /dev/cu.usbserial-10")
1614
+ sys.exit(1)
1615
+
1616
+ visual_label_command(
1617
+ port=port,
1618
+ name=name,
1619
+ robot_type=robot_type,
1620
+ webcam_id=webcam_id,
1621
+ camera_url=camera_url,
1622
+ )
1623
+
1624
+
1625
+ def robot_skills_command(
1626
+ action: str,
1627
+ name: Optional[str] = None,
1628
+ skill_name: Optional[str] = None,
1629
+ ):
1630
+ """
1631
+ Manage generated robot skills.
1632
+
1633
+ ate robot skills list
1634
+ ate robot skills show my_mechdog
1635
+ ate robot skills export my_mechdog pickup
1636
+ """
1637
+ if action == "list":
1638
+ libraries = list_skill_libraries()
1639
+ if not libraries:
1640
+ print("No skill libraries saved.")
1641
+ print("\nRun: ate robot label --port <port> --name <name>")
1642
+ return
1643
+
1644
+ print("Saved Skill Libraries:\n")
1645
+ for lib_name in libraries:
1646
+ lib = load_skill_library(lib_name)
1647
+ if lib:
1648
+ action_count = len(lib.actions)
1649
+ print(f" {lib_name}")
1650
+ print(f" Model: {lib.robot_model}, Actions: {action_count}")
1651
+ else:
1652
+ print(f" {lib_name} (error loading)")
1653
+
1654
+ elif action == "show":
1655
+ if not name:
1656
+ print("Usage: ate robot skills show <library_name>")
1657
+ sys.exit(1)
1658
+
1659
+ lib = load_skill_library(name)
1660
+ if not lib:
1661
+ print(f"Skill library not found: {name}")
1662
+ sys.exit(1)
1663
+
1664
+ print(f"\nSkill Library: {lib.robot_name}")
1665
+ print(f"Model: {lib.robot_model}")
1666
+ print(f"Created: {lib.created_at}")
1667
+
1668
+ if lib.servo_labels:
1669
+ print(f"\nLabeled Servos:")
1670
+ for sid, label in lib.servo_labels.items():
1671
+ print(f" Servo {sid}: {label}")
1672
+
1673
+ if lib.joint_groups:
1674
+ print(f"\nJoint Groups:")
1675
+ for group, ids in lib.joint_groups.items():
1676
+ print(f" {group}: {ids}")
1677
+
1678
+ if lib.actions:
1679
+ print(f"\nActions:")
1680
+ for action_name, act in lib.actions.items():
1681
+ print(f" {action_name}: {len(act.steps)} steps ({act.action_type.value})")
1682
+ if act.description:
1683
+ print(f" {act.description}")
1684
+
1685
+ elif action == "export":
1686
+ if not name:
1687
+ print("Usage: ate robot skills export <library_name> [skill_name]")
1688
+ sys.exit(1)
1689
+
1690
+ lib = load_skill_library(name)
1691
+ if not lib:
1692
+ print(f"Skill library not found: {name}")
1693
+ sys.exit(1)
1694
+
1695
+ # Load calibration for skill generation
1696
+ cal = load_calibration(name)
1697
+ if not cal:
1698
+ print(f"No calibration found for: {name}")
1699
+ sys.exit(1)
1700
+
1701
+ labeler = DualCameraLabeler(
1702
+ serial_port=cal.serial_port or "",
1703
+ robot_name=name,
1704
+ robot_model=cal.robot_model,
1705
+ )
1706
+ labeler.calibrator.calibration = cal
1707
+ labeler.library = lib
1708
+
1709
+ from pathlib import Path
1710
+ skills_dir = Path.home() / ".ate" / "skills" / name
1711
+ skills_dir.mkdir(parents=True, exist_ok=True)
1712
+
1713
+ if skill_name:
1714
+ # Export single skill
1715
+ act = lib.actions.get(skill_name)
1716
+ if not act:
1717
+ print(f"Action not found: {skill_name}")
1718
+ print(f"Available: {list(lib.actions.keys())}")
1719
+ sys.exit(1)
1720
+
1721
+ code = labeler.generate_skill_code(act)
1722
+ path = skills_dir / f"{skill_name}.py"
1723
+ with open(path, 'w') as f:
1724
+ f.write(code)
1725
+ print(f"Exported: {path}")
1726
+ else:
1727
+ # Export all skills
1728
+ for act_name, act in lib.actions.items():
1729
+ code = labeler.generate_skill_code(act)
1730
+ path = skills_dir / f"{act_name}.py"
1731
+ with open(path, 'w') as f:
1732
+ f.write(code)
1733
+ print(f"Exported: {path}")
1734
+
1735
+ else:
1736
+ print(f"Unknown action: {action}")
1737
+ print("Available actions: list, show, export")
1738
+ sys.exit(1)
1739
+
1740
+
1741
+ def robot_upload_command(
1742
+ name: str,
1743
+ project_id: Optional[str] = None,
1744
+ include_images: bool = True,
1745
+ dry_run: bool = False,
1746
+ ):
1747
+ """
1748
+ Upload skill library to FoodforThought.
1749
+
1750
+ Creates artifacts with proper data lineage:
1751
+ raw (images) → processed (calibration) → labeled (poses) → skill (code)
1752
+
1753
+ ate robot upload mechdog
1754
+ ate robot upload mechdog --project-id abc123
1755
+ ate robot upload mechdog --no-images
1756
+ ate robot upload mechdog --dry-run
1757
+ """
1758
+ # Check login status
1759
+ config_file = Path.home() / ".ate" / "config.json"
1760
+ if not config_file.exists():
1761
+ print("Not logged in. Run 'ate login' first.")
1762
+ sys.exit(1)
1763
+
1764
+ # Load calibration
1765
+ cal = load_calibration(name)
1766
+ if not cal:
1767
+ print(f"No calibration found for: {name}")
1768
+ print("\nAvailable calibrations:")
1769
+ for cal_name in list_calibrations():
1770
+ print(f" - {cal_name}")
1771
+ sys.exit(1)
1772
+
1773
+ # Load skill library
1774
+ lib = load_skill_library(name)
1775
+ if not lib:
1776
+ print(f"No skill library found for: {name}")
1777
+ print("Run 'ate robot label' first to create skills.")
1778
+ sys.exit(1)
1779
+
1780
+ # Count images
1781
+ img_count = 0
1782
+ if include_images:
1783
+ img_dir = Path.home() / ".ate" / "skill_images" / name
1784
+ if img_dir.exists():
1785
+ img_count = len(list(img_dir.glob("*.jpg")))
1786
+
1787
+ # Dry run preview
1788
+ if dry_run:
1789
+ print("=" * 60)
1790
+ print("DRY RUN - No changes will be made")
1791
+ print("=" * 60)
1792
+ print()
1793
+ print(f"Would upload skill library: {name}")
1794
+ print(f" Robot model: {lib.robot_model}")
1795
+ print(f" Target project: {project_id or '(new project)'}")
1796
+ print()
1797
+ print("Artifacts that would be created:")
1798
+ print()
1799
+
1800
+ # Raw artifacts (images)
1801
+ if include_images and img_count > 0:
1802
+ print(f" [raw] {img_count} pose images")
1803
+ for img_path in list(img_dir.glob("*.jpg"))[:3]:
1804
+ print(f" - {img_path.name}")
1805
+ if img_count > 3:
1806
+ print(f" ... and {img_count - 3} more")
1807
+
1808
+ # Processed artifact (calibration)
1809
+ print(f" [processed] {name}_calibration")
1810
+ print(f" - {len(cal.servos)} servos configured")
1811
+
1812
+ # Labeled artifacts (poses)
1813
+ print(f" [labeled] {len(cal.poses)} poses:")
1814
+ for pose_name in list(cal.poses.keys())[:5]:
1815
+ print(f" - {pose_name}")
1816
+ if len(cal.poses) > 5:
1817
+ print(f" ... and {len(cal.poses) - 5} more")
1818
+
1819
+ # Skill artifacts
1820
+ print(f" [skill] {len(lib.actions)} skills:")
1821
+ for action_name in list(lib.actions.keys())[:5]:
1822
+ print(f" - {action_name}")
1823
+ if len(lib.actions) > 5:
1824
+ print(f" ... and {len(lib.actions) - 5} more")
1825
+
1826
+ print()
1827
+ print("Total artifacts: ", end="")
1828
+ total = img_count + 1 + len(cal.poses) + len(lib.actions)
1829
+ print(f"{total}")
1830
+ print()
1831
+ print("Run without --dry-run to perform the upload.")
1832
+ return
1833
+
1834
+ print(f"Uploading skill library: {name}")
1835
+ print(f" Robot model: {lib.robot_model}")
1836
+ print(f" Actions: {len(lib.actions)}")
1837
+ print(f" Poses: {len(cal.poses)}")
1838
+ print(f" Servos: {len(cal.servos)}")
1839
+ if include_images and img_count > 0:
1840
+ print(f" Images: {img_count}")
1841
+ print()
1842
+
1843
+ try:
1844
+ result = upload_skill_library(
1845
+ robot_name=name,
1846
+ project_id=project_id,
1847
+ include_images=include_images,
1848
+ )
1849
+
1850
+ print("Upload complete!")
1851
+ print(f" Project ID: {result['project_id']}")
1852
+ print(f" Artifacts created: {len(result['artifacts'])}")
1853
+ print()
1854
+ print("Artifacts:")
1855
+ for art in result['artifacts']:
1856
+ print(f" [{art['stage']}] {art['name']}")
1857
+
1858
+ print()
1859
+ print(f"View at: https://www.kindly.fyi/foodforthought/projects/{result['project_id']}")
1860
+
1861
+ except ValueError as e:
1862
+ print(f"Error: {e}")
1863
+ sys.exit(1)
1864
+ except Exception as e:
1865
+ print(f"Upload failed: {e}")
1866
+ sys.exit(1)
1867
+
1868
+
1869
+ def robot_map_servos_command(
1870
+ port: Optional[str] = None,
1871
+ num_servos: int = 13,
1872
+ use_camera: bool = False,
1873
+ camera_index: int = 0,
1874
+ wifi_camera_ip: Optional[str] = None,
1875
+ output: Optional[str] = None,
1876
+ save_images: bool = False,
1877
+ upload: bool = False,
1878
+ project_id: Optional[str] = None,
1879
+ ):
1880
+ """
1881
+ Map all servos and generate primitive skills using LLM-guided exploration.
1882
+
1883
+ ate robot map-servos --port /dev/cu.usbserial-10
1884
+ ate robot map-servos --port /dev/cu.usbserial-10 --camera --output ./my_robot
1885
+ ate robot map-servos --port /dev/cu.usbserial-10 --wifi-camera 192.168.4.1
1886
+ """
1887
+ from .servo_mapper import run_servo_mapping
1888
+
1889
+ if not port:
1890
+ # Try to discover
1891
+ serial_robots = discover_serial_robots()
1892
+ if serial_robots:
1893
+ port = serial_robots[0].port
1894
+ print(f"Auto-detected: {port}")
1895
+ else:
1896
+ print("No serial port specified and none discovered.")
1897
+ print("Usage: ate robot map-servos --port /dev/cu.usbserial-10")
1898
+ sys.exit(1)
1899
+
1900
+ output_file = output or f"./servo_mappings_{port.split('/')[-1]}"
1901
+
1902
+ results = run_servo_mapping(
1903
+ serial_port=port,
1904
+ num_servos=num_servos,
1905
+ use_camera=use_camera,
1906
+ camera_index=camera_index,
1907
+ wifi_camera_ip=wifi_camera_ip,
1908
+ use_proxy=True, # Use metered LLM access
1909
+ output_file=output_file,
1910
+ save_images=save_images,
1911
+ )
1912
+
1913
+ if results.get("python_code"):
1914
+ print("\n" + "=" * 60)
1915
+ print("GENERATED PYTHON CODE:")
1916
+ print("=" * 60)
1917
+ print(results["python_code"][:2000]) # First 2000 chars
1918
+ if len(results["python_code"]) > 2000:
1919
+ print(f"\n... (see {output_file}.py for full code)")
1920
+
1921
+ # Upload to FoodforThought if requested
1922
+ if upload:
1923
+ from .servo_mapper import upload_servo_mapping
1924
+ print("\n" + "=" * 60)
1925
+ print("UPLOADING TO FOODFORTHOUGHT")
1926
+ print("=" * 60)
1927
+ try:
1928
+ upload_result = upload_servo_mapping(
1929
+ results=results,
1930
+ output_file=output_file,
1931
+ project_id=project_id,
1932
+ )
1933
+ print(f"\nUpload complete!")
1934
+ print(f" Project ID: {upload_result['project_id']}")
1935
+ print(f" Artifacts created: {len(upload_result['artifacts'])}")
1936
+ for art in upload_result['artifacts']:
1937
+ print(f" [{art['stage']}] {art['name']}")
1938
+ print(f"\nView at: https://www.kindly.fyi/foodforthought/projects/{upload_result['project_id']}")
1939
+ except Exception as e:
1940
+ print(f"Upload failed: {e}")
1941
+ print("Servo mapping saved locally. Run 'ate robot upload' to retry.")
1942
+
1943
+
1944
+ def robot_upload_calibration_command(
1945
+ calibration_file: str,
1946
+ robot_slug: Optional[str] = None,
1947
+ robot_name: Optional[str] = None,
1948
+ method: str = "llm_vision",
1949
+ publish: bool = False,
1950
+ dry_run: bool = False,
1951
+ ):
1952
+ """
1953
+ Upload a calibration to the community registry.
1954
+
1955
+ ate robot upload-calibration ./servo_mappings.json --robot-slug mechdog-mini
1956
+ ate robot upload-calibration ./calibration.json --publish
1957
+ ate robot upload-calibration ./calibration.json --dry-run
1958
+ """
1959
+ import json
1960
+ import requests
1961
+ from .skill_upload import SkillLibraryUploader
1962
+
1963
+ # Load calibration file
1964
+ if not os.path.exists(calibration_file):
1965
+ print(f"Error: Calibration file not found: {calibration_file}")
1966
+ sys.exit(1)
1967
+
1968
+ with open(calibration_file, 'r') as f:
1969
+ calibration_data = json.load(f)
1970
+
1971
+ # Dry run preview
1972
+ if dry_run:
1973
+ print("=" * 60)
1974
+ print("DRY RUN - No changes will be made")
1975
+ print("=" * 60)
1976
+ print()
1977
+ print(f"Would upload calibration from: {calibration_file}")
1978
+ print()
1979
+ print("Calibration details:")
1980
+ print(f" Name: {calibration_data.get('name', f'Calibration from {calibration_file}')}")
1981
+ print(f" Version: {calibration_data.get('version', '1.0.0')}")
1982
+ print(f" Method: {method}")
1983
+ print(f" Robot: {robot_slug or calibration_data.get('robot_slug', 'Not specified')}")
1984
+ print()
1985
+
1986
+ # Show what data would be uploaded
1987
+ if calibration_data.get("servos"):
1988
+ print(f" Servos: {len(calibration_data['servos'])}")
1989
+ if calibration_data.get("joint_mappings") or calibration_data.get("mappings"):
1990
+ mappings = calibration_data.get("joint_mappings") or calibration_data.get("mappings")
1991
+ print(f" Joint mappings: {len(mappings) if isinstance(mappings, (list, dict)) else 'present'}")
1992
+ if calibration_data.get("urdf"):
1993
+ print(f" URDF: included ({len(calibration_data['urdf'])} chars)")
1994
+ if calibration_data.get("python_code"):
1995
+ print(f" Python code: included ({len(calibration_data['python_code'])} chars)")
1996
+
1997
+ print()
1998
+ print(f" Publish immediately: {publish}")
1999
+ print()
2000
+ print("Run without --dry-run to perform the upload.")
2001
+ return
2002
+
2003
+ # Get API credentials
2004
+ try:
2005
+ uploader = SkillLibraryUploader()
2006
+ except Exception as e:
2007
+ print(f"Error initializing uploader: {e}")
2008
+ sys.exit(1)
2009
+
2010
+ base_url = os.environ.get("FOODFORTHOUGHT_API_URL", "https://www.kindly.fyi")
2011
+
2012
+ # Prepare calibration payload
2013
+ payload = {
2014
+ "name": calibration_data.get("name", f"Calibration from {calibration_file}"),
2015
+ "description": calibration_data.get("description", "Community-contributed calibration"),
2016
+ "version": calibration_data.get("version", "1.0.0"),
2017
+ "method": method,
2018
+ "confidence": calibration_data.get("confidence", 0.8),
2019
+ }
2020
+
2021
+ # Add robot identification
2022
+ if robot_slug:
2023
+ payload["robotSlug"] = robot_slug
2024
+ elif calibration_data.get("robot_slug"):
2025
+ payload["robotSlug"] = calibration_data["robot_slug"]
2026
+ elif calibration_data.get("robot_id"):
2027
+ payload["robotId"] = calibration_data["robot_id"]
2028
+ else:
2029
+ print("Error: Must specify --robot-slug or include robot_slug/robot_id in calibration file")
2030
+ sys.exit(1)
2031
+
2032
+ # Add kinematic data
2033
+ if calibration_data.get("urdf"):
2034
+ payload["urdfContent"] = calibration_data["urdf"]
2035
+ if calibration_data.get("dh_parameters"):
2036
+ payload["dhParameters"] = calibration_data["dh_parameters"]
2037
+ if calibration_data.get("joint_mappings") or calibration_data.get("mappings"):
2038
+ payload["jointMappings"] = calibration_data.get("joint_mappings") or calibration_data.get("mappings")
2039
+ if calibration_data.get("servo_mappings"):
2040
+ payload["servoMappings"] = calibration_data["servo_mappings"]
2041
+ if calibration_data.get("python_code"):
2042
+ payload["pythonCode"] = calibration_data["python_code"]
2043
+ if calibration_data.get("primitives"):
2044
+ payload["generatedPrimitives"] = calibration_data["primitives"]
2045
+
2046
+ # Hardware fingerprint
2047
+ if calibration_data.get("firmware_version"):
2048
+ payload["firmwareVersion"] = calibration_data["firmware_version"]
2049
+ if calibration_data.get("port"):
2050
+ payload["serialPortName"] = calibration_data["port"]
2051
+
2052
+ print("Uploading calibration to community registry...")
2053
+ print(f" Robot: {payload.get('robotSlug', payload.get('robotId', 'Unknown'))}")
2054
+ print(f" Method: {method}")
2055
+ print()
2056
+
2057
+ try:
2058
+ # Use the uploader's pre-configured headers
2059
+ headers = uploader.headers
2060
+ response = requests.post(
2061
+ f"{base_url}/api/robot-calibrations",
2062
+ json=payload,
2063
+ headers=headers,
2064
+ )
2065
+
2066
+ if not response.ok:
2067
+ # Parse error and provide helpful suggestions
2068
+ try:
2069
+ error_body = response.json()
2070
+ except:
2071
+ error_body = {"raw": response.text}
2072
+
2073
+ print(f"Error uploading calibration: {response.status_code} {response.reason}")
2074
+
2075
+ error_msg = error_body.get("error") or error_body.get("message", "")
2076
+ if error_msg:
2077
+ print(f"Details: {error_msg}")
2078
+
2079
+ # Provide specific suggestions based on error
2080
+ print("\nSuggestions:")
2081
+ if response.status_code == 404 and "robot" in str(error_body).lower():
2082
+ print(" - Robot slug may not exist in the database")
2083
+ print(" - Search for robots: ate robot identify --search <partial-name>")
2084
+ print(" - Try without --robot-slug to search by robot name")
2085
+ # Try to find similar robots
2086
+ if robot_slug:
2087
+ print(f"\n Looking for robots similar to '{robot_slug}'...")
2088
+ try:
2089
+ search_resp = requests.get(
2090
+ f"{base_url}/api/robots?search={robot_slug.replace('-', ' ')}",
2091
+ headers=headers,
2092
+ )
2093
+ if search_resp.ok:
2094
+ robots = search_resp.json().get("robots", [])
2095
+ if robots:
2096
+ print(" Found similar robots:")
2097
+ for r in robots[:5]:
2098
+ print(f" - {r.get('slug')}: {r.get('name')}")
2099
+ except:
2100
+ pass
2101
+
2102
+ elif response.status_code == 400:
2103
+ print(" - Check calibration file contains required fields")
2104
+ print(" - Ensure robot_slug or robot_id is specified")
2105
+ elif response.status_code == 401:
2106
+ print(" - Session expired. Run: ate login")
2107
+
2108
+ sys.exit(1)
2109
+
2110
+ result = response.json()
2111
+ except requests.RequestException as e:
2112
+ print(f"Error uploading calibration: {e}")
2113
+ sys.exit(1)
2114
+
2115
+ print("[OK] Calibration uploaded successfully!")
2116
+ print(f" ID: {result['id']}")
2117
+ print(f" Name: {result['name']}")
2118
+ print(f" Status: {result['status']}")
2119
+ print()
2120
+
2121
+ if publish:
2122
+ print("Publishing calibration...")
2123
+ try:
2124
+ publish_response = requests.patch(
2125
+ f"{base_url}/api/robot-calibrations/{result['id']}",
2126
+ json={"status": "published"},
2127
+ headers=headers,
2128
+ )
2129
+ publish_response.raise_for_status()
2130
+ print("[OK] Calibration published!")
2131
+ except requests.RequestException as e:
2132
+ print(f"Warning: Failed to publish: {e}")
2133
+ print("Calibration saved as draft. Publish manually when ready.")
2134
+ else:
2135
+ print("Calibration saved as draft. To publish:")
2136
+ print(f" ate robot publish-calibration {result['id']}")
2137
+
2138
+ print()
2139
+ print("Other users can now find and use your calibration!")
2140
+ print("Thank you for contributing to the community.")
2141
+
2142
+
2143
+ def robot_identify_command(
2144
+ robot_slug: Optional[str] = None,
2145
+ robot_name: Optional[str] = None,
2146
+ port: Optional[str] = None,
2147
+ ):
2148
+ """
2149
+ Search the community calibration registry for matching calibrations.
2150
+
2151
+ ate robot identify mechdog-mini
2152
+ ate robot identify --port /dev/cu.usbserial-10
2153
+ """
2154
+ import requests
2155
+
2156
+ base_url = os.environ.get("FOODFORTHOUGHT_API_URL", "https://www.kindly.fyi")
2157
+
2158
+ print("Searching community calibration registry...")
2159
+ print()
2160
+
2161
+ # Build search query
2162
+ params = {"status": "published"}
2163
+ if robot_slug:
2164
+ params["robotSlug"] = robot_slug
2165
+ if robot_name:
2166
+ params["search"] = robot_name
2167
+
2168
+ try:
2169
+ response = requests.get(f"{base_url}/api/robot-calibrations", params=params)
2170
+ response.raise_for_status()
2171
+ data = response.json()
2172
+ except requests.RequestException as e:
2173
+ # Handle 404 (endpoint not deployed yet) or network errors
2174
+ if hasattr(e, 'response') and e.response is not None and e.response.status_code == 404:
2175
+ print("The calibration registry is being set up.")
2176
+ print()
2177
+ print("Be the first to calibrate this robot!")
2178
+ print(" 1. Run: ate robot generate-markers")
2179
+ print(" 2. Print and attach markers to your robot")
2180
+ print(" 3. Run: ate robot calibrate --method aruco")
2181
+ print(" 4. Run: ate robot upload-calibration <file>")
2182
+ print()
2183
+ print("Your calibration will help everyone with this robot model.")
2184
+ return
2185
+ print(f"Error fetching calibrations: {e}")
2186
+ sys.exit(1)
2187
+
2188
+ calibrations = data.get("calibrations", [])
2189
+
2190
+ if not calibrations:
2191
+ print("No matching calibrations found in the community registry.")
2192
+ print()
2193
+ print("Be the first to calibrate this robot!")
2194
+ print(" 1. Run: ate robot generate-markers")
2195
+ print(" 2. Print and attach markers to your robot")
2196
+ print(" 3. Run: ate robot calibrate --upload")
2197
+ print()
2198
+ print("Your calibration will help everyone with this robot model.")
2199
+ return
2200
+
2201
+ print(f"Found {len(calibrations)} calibration(s):")
2202
+ print()
2203
+
2204
+ for i, cal in enumerate(calibrations, 1):
2205
+ robot = cal.get("robot", {})
2206
+ author = cal.get("author", {})
2207
+ verifications = cal.get("_count", {}).get("verifications", 0)
2208
+
2209
+ verified_badge = " [VERIFIED]" if cal.get("verified") else ""
2210
+ featured_badge = " [FEATURED]" if cal.get("featured") else ""
2211
+
2212
+ print(f"{i}. {cal['name']}{verified_badge}{featured_badge}")
2213
+ print(f" Robot: {robot.get('name', 'Unknown')} ({robot.get('manufacturer', 'Unknown')})")
2214
+ print(f" Method: {cal['method']}")
2215
+ print(f" Confidence: {cal['confidence']:.0%}")
2216
+ print(f" Verified by: {verifications} user(s)")
2217
+ print(f" Author: {author.get('name', 'Unknown')}")
2218
+ print(f" ID: {cal['id']}")
2219
+ print()
2220
+
2221
+ print("To download a calibration:")
2222
+ print(" ate robot download <calibration-id>")
2223
+ print()
2224
+ print("To verify a calibration works on your robot:")
2225
+ print(" ate robot verify <calibration-id>")
2226
+
2227
+
2228
+ def robot_generate_markers_command(
2229
+ output: str = "aruco_markers.pdf",
2230
+ count: int = 12,
2231
+ size: float = 30.0,
2232
+ robot_name: Optional[str] = None,
2233
+ page_size: str = "letter",
2234
+ preset: Optional[str] = None,
2235
+ list_presets: bool = False,
2236
+ ):
2237
+ """
2238
+ Generate printable ArUco markers for robot calibration.
2239
+
2240
+ ate robot generate-markers
2241
+ ate robot generate-markers --count 6 --size 40
2242
+ ate robot generate-markers --robot-name "My MechDog" -o my_markers.pdf
2243
+ ate robot generate-markers --preset mechdog-mini
2244
+ """
2245
+ try:
2246
+ from .marker_generator import generate_marker_pdf, list_presets as get_presets, get_preset_markers
2247
+ from reportlab.lib.pagesizes import LETTER, A4
2248
+ except ImportError as e:
2249
+ print(f"Error: Missing dependencies for marker generation.")
2250
+ print(f"Install with: pip install opencv-contrib-python reportlab pillow")
2251
+ print(f"Details: {e}")
2252
+ sys.exit(1)
2253
+
2254
+ # List presets and exit
2255
+ if list_presets:
2256
+ presets = get_presets()
2257
+ print("Available robot presets:")
2258
+ print()
2259
+ for p in presets:
2260
+ print(f" {p['name']:20} {p['robot_name']:30} ({p['marker_count']} markers)")
2261
+ print()
2262
+ print("Usage: ate robot generate-markers --preset mechdog-mini")
2263
+ return
2264
+
2265
+ page = LETTER if page_size == "letter" else A4
2266
+
2267
+ # Use preset if specified
2268
+ marker_specs = None
2269
+ if preset:
2270
+ try:
2271
+ marker_specs, preset_robot_name = get_preset_markers(preset)
2272
+ if not robot_name:
2273
+ robot_name = preset_robot_name
2274
+ count = len(marker_specs)
2275
+ print(f"Using preset: {preset}")
2276
+ print(f" Robot: {robot_name}")
2277
+ print(f" Markers: {count} (with optimized sizes per joint)")
2278
+ except KeyError as e:
2279
+ print(f"Error: {e}")
2280
+ print("\nRun 'ate robot generate-markers --list-presets' to see available presets.")
2281
+ sys.exit(1)
2282
+ else:
2283
+ print(f"Generating {count} ArUco markers...")
2284
+ print(f" Size: {size}mm (uniform)")
2285
+
2286
+ print(f" Page: {page_size}")
2287
+ print(f" Output: {output}")
2288
+ print()
2289
+
2290
+ try:
2291
+ if marker_specs:
2292
+ # Use preset specs with variable sizes
2293
+ result = generate_marker_pdf(
2294
+ output_path=output,
2295
+ marker_specs=marker_specs,
2296
+ page_size=page,
2297
+ robot_name=robot_name,
2298
+ include_instructions=True,
2299
+ )
2300
+ else:
2301
+ # Use uniform size
2302
+ result = generate_marker_pdf(
2303
+ output_path=output,
2304
+ count=count,
2305
+ size_mm=size,
2306
+ page_size=page,
2307
+ robot_name=robot_name,
2308
+ include_instructions=True,
2309
+ )
2310
+
2311
+ print(f"[OK] Markers saved to: {result}")
2312
+ print()
2313
+ print("Next steps:")
2314
+ print(" 1. Print the PDF at 100% scale (do not 'fit to page')")
2315
+ print(" 2. Cut out markers along the dashed lines")
2316
+ print(" 3. Attach to your robot's moving segments")
2317
+ print(" 4. Run: ate robot calibrate --method aruco")
2318
+ print()
2319
+ print("Your calibration can be shared with the community!")
2320
+ print("First user to calibrate a robot model helps everyone else.")
2321
+ except Exception as e:
2322
+ print(f"Error generating markers: {e}")
2323
+ import traceback
2324
+ traceback.print_exc()
2325
+ sys.exit(1)
2326
+
2327
+
2328
+ def robot_urdf_command(
2329
+ action: str = "generate",
2330
+ reset: bool = False,
2331
+ frames: int = 20,
2332
+ camera: int = 0,
2333
+ marker_size: float = 0.025,
2334
+ output: Optional[str] = None,
2335
+ ble_address: Optional[str] = None,
2336
+ robot_name: str = "mechdog",
2337
+ urdf_file: Optional[str] = None,
2338
+ bridge: bool = False,
2339
+ launch: bool = False,
2340
+ ):
2341
+ """
2342
+ URDF generation and Artifex integration command.
2343
+
2344
+ ate robot urdf generate # Generate URDF from markers
2345
+ ate robot urdf generate --reset # Reset robot pose first
2346
+ ate robot urdf open # Open in Artifex Desktop
2347
+ ate robot urdf info # Show URDF details
2348
+ """
2349
+ import asyncio
2350
+ import cv2
2351
+ import numpy as np
2352
+ import re
2353
+ import subprocess
2354
+ from datetime import datetime
2355
+
2356
+ # Default paths
2357
+ urdf_dir = Path(__file__).parent.parent.parent / "urdf_generation"
2358
+ urdf_dir.mkdir(exist_ok=True)
2359
+ default_urdf = urdf_dir / "mechdog_calibrated.urdf"
2360
+
2361
+ if action == "generate":
2362
+ print("=" * 60)
2363
+ print("ATE URDF GENERATION")
2364
+ print("=" * 60)
2365
+ print(f"\nSettings:")
2366
+ print(f" Marker size: {marker_size * 1000:.0f}mm")
2367
+ print(f" Camera index: {camera}")
2368
+ print(f" Frames: {frames}")
2369
+ print(f" Robot name: {robot_name}")
2370
+
2371
+ # ArUco setup
2372
+ ARUCO_DICT = cv2.aruco.getPredefinedDictionary(cv2.aruco.DICT_4X4_50)
2373
+ ARUCO_PARAMS = cv2.aruco.DetectorParameters()
2374
+
2375
+ # Reset robot pose if requested
2376
+ if reset:
2377
+ print("\n--- Resetting Robot Pose ---")
2378
+ try:
2379
+ async def do_reset():
2380
+ from bleak import BleakClient, BleakScanner
2381
+
2382
+ address = ble_address
2383
+ if not address:
2384
+ # Auto-discover MechDog
2385
+ print("Scanning for MechDog...")
2386
+ devices = await BleakScanner.discover(timeout=5.0)
2387
+ for d in devices:
2388
+ if d.name and "MechDog" in d.name:
2389
+ address = d.address
2390
+ print(f"Found: {d.name} ({address})")
2391
+ break
2392
+
2393
+ if not address:
2394
+ print("! MechDog not found - skipping reset")
2395
+ return False
2396
+
2397
+ FFE1 = "0000ffe1-0000-1000-8000-00805f9b34fb"
2398
+
2399
+ async with BleakClient(address) as client:
2400
+ print(f"Connected to {address}")
2401
+
2402
+ async def send(cmd: str, delay: float = 0.5):
2403
+ await client.write_gatt_char(FFE1, (cmd + "\n").encode())
2404
+ await asyncio.sleep(delay)
2405
+
2406
+ # Reset sequence
2407
+ print(" Sit...")
2408
+ await send("CMD|2|1|4|$", 2.0)
2409
+ print(" Stand...")
2410
+ await send("CMD|2|1|3|$", 2.0)
2411
+ print(" Arm home...")
2412
+ await send("CMD|7|8|$", 1.5)
2413
+ print(" Done")
2414
+ return True
2415
+
2416
+ asyncio.run(do_reset())
2417
+ print("Waiting for robot to settle...")
2418
+ import time
2419
+ time.sleep(2.0)
2420
+
2421
+ except ImportError:
2422
+ print("! bleak not installed - install with: pip install bleak")
2423
+ except Exception as e:
2424
+ print(f"! Reset failed: {e}")
2425
+ print(" Continuing with current pose...")
2426
+
2427
+ # Open camera
2428
+ print("\n--- Capturing Markers ---")
2429
+ cap = cv2.VideoCapture(camera)
2430
+ if not cap.isOpened():
2431
+ print("Error: Could not open webcam")
2432
+ sys.exit(1)
2433
+
2434
+ cap.set(cv2.CAP_PROP_FRAME_WIDTH, 1280)
2435
+ cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 720)
2436
+ import time
2437
+ time.sleep(1)
2438
+
2439
+ ret, frame = cap.read()
2440
+ if not ret:
2441
+ print("Error: Could not read from camera")
2442
+ sys.exit(1)
2443
+
2444
+ h, w = frame.shape[:2]
2445
+ focal_length = w * 0.8
2446
+ camera_matrix = np.array([
2447
+ [focal_length, 0, w / 2],
2448
+ [0, focal_length, h / 2],
2449
+ [0, 0, 1]
2450
+ ], dtype=np.float32)
2451
+ dist_coeffs = np.zeros(5, dtype=np.float32)
2452
+
2453
+ print(f"Camera: {w}x{h}")
2454
+
2455
+ # Capture markers over multiple frames
2456
+ all_markers = {}
2457
+ detector = cv2.aruco.ArucoDetector(ARUCO_DICT, ARUCO_PARAMS)
2458
+
2459
+ for i in range(frames):
2460
+ for _ in range(2):
2461
+ cap.read()
2462
+
2463
+ ret, frame = cap.read()
2464
+ if not ret:
2465
+ continue
2466
+
2467
+ gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
2468
+ corners, ids, _ = detector.detectMarkers(gray)
2469
+
2470
+ if ids is not None:
2471
+ for j, marker_id in enumerate(ids.flatten()):
2472
+ rvecs, tvecs, _ = cv2.aruco.estimatePoseSingleMarkers(
2473
+ [corners[j]], marker_size, camera_matrix, dist_coeffs
2474
+ )
2475
+ mid = int(marker_id)
2476
+ if mid not in all_markers:
2477
+ all_markers[mid] = {"positions": [], "rotations": [], "corners": corners[j][0].tolist()}
2478
+ all_markers[mid]["positions"].append(tvecs[0][0].tolist())
2479
+ all_markers[mid]["rotations"].append(rvecs[0][0].tolist())
2480
+
2481
+ print(f" Frame {i+1}/{frames}: {len(all_markers)} markers", end='\r')
2482
+
2483
+ print()
2484
+ cap.release()
2485
+
2486
+ # Calculate averages
2487
+ for mid, data in all_markers.items():
2488
+ positions = np.array(data["positions"])
2489
+ rotations = np.array(data["rotations"])
2490
+ data["avg_position"] = positions.mean(axis=0).tolist()
2491
+ data["std_position"] = positions.std(axis=0).tolist()
2492
+ data["avg_rotation"] = rotations.mean(axis=0).tolist()
2493
+ data["detection_count"] = len(positions)
2494
+
2495
+ print(f"\nDetected {len(all_markers)} markers: {sorted(all_markers.keys())}")
2496
+
2497
+ # Save marker data
2498
+ marker_data = {
2499
+ "timestamp": datetime.now().isoformat(),
2500
+ "marker_size_m": marker_size,
2501
+ "markers": all_markers,
2502
+ "camera_matrix": camera_matrix.tolist(),
2503
+ }
2504
+ marker_json = urdf_dir / "marker_data.json"
2505
+ with open(marker_json, "w") as f:
2506
+ json.dump(marker_data, f, indent=2)
2507
+ print(f"Saved: {marker_json}")
2508
+
2509
+ # Generate URDF
2510
+ print("\n--- Generating URDF ---")
2511
+ urdf_path = Path(output) if output else default_urdf
2512
+ _generate_urdf_from_markers(all_markers, urdf_path, robot_name)
2513
+ print(f"\nURDF saved to: {urdf_path}")
2514
+ print("\nTo open in Artifex Desktop:")
2515
+ print(f" ate robot urdf open")
2516
+
2517
+ elif action == "open":
2518
+ # Resolve URDF file
2519
+ urdf_path = Path(urdf_file) if urdf_file else default_urdf
2520
+
2521
+ if not urdf_path.exists():
2522
+ print(f"Error: URDF not found: {urdf_path}")
2523
+ print("\nGenerate one first:")
2524
+ print(" ate robot urdf generate")
2525
+ sys.exit(1)
2526
+
2527
+ # Show info
2528
+ _show_urdf_info(urdf_path)
2529
+
2530
+ if bridge:
2531
+ # Try bridge sync
2532
+ print("\n--- Syncing via ATE Bridge ---")
2533
+ success = asyncio.run(_sync_urdf_bridge(urdf_path))
2534
+ if not success and launch:
2535
+ _launch_artifex(urdf_path)
2536
+ else:
2537
+ # Direct launch
2538
+ _launch_artifex(urdf_path)
2539
+
2540
+ elif action == "info":
2541
+ urdf_path = Path(urdf_file) if urdf_file else default_urdf
2542
+
2543
+ if not urdf_path.exists():
2544
+ print(f"Error: URDF not found: {urdf_path}")
2545
+ sys.exit(1)
2546
+
2547
+ _show_urdf_info(urdf_path)
2548
+
2549
+ else:
2550
+ print("Usage: ate robot urdf <generate|open|info>")
2551
+ sys.exit(1)
2552
+
2553
+
2554
+ def _generate_urdf_from_markers(markers: dict, output_path: Path, robot_name: str = "mechdog"):
2555
+ """Generate URDF from detected marker positions."""
2556
+ from datetime import datetime
2557
+
2558
+ detected = list(markers.keys())
2559
+
2560
+ # Find body reference marker
2561
+ body_marker = 0 if 0 in markers else 7 if 7 in markers else detected[0] if detected else None
2562
+ if not body_marker:
2563
+ print("Error: No markers detected!")
2564
+ return
2565
+
2566
+ import numpy as np
2567
+ body_pos = np.array(markers[body_marker]["avg_position"])
2568
+
2569
+ # Calculate relative positions
2570
+ relative_positions = {}
2571
+ for mid, data in markers.items():
2572
+ pos = np.array(data["avg_position"])
2573
+ relative_positions[mid] = (pos - body_pos).tolist()
2574
+
2575
+ # Build URDF
2576
+ urdf = f'''<?xml version="1.0"?>
2577
+ <robot name="{robot_name}" xmlns:xacro="http://www.ros.org/wiki/xacro">
2578
+ <!--
2579
+ {robot_name.upper()} URDF - Generated by ATE CLI
2580
+ Date: {datetime.now().isoformat()}
2581
+ Markers detected: {detected}
2582
+
2583
+ Generated with: ate robot urdf generate
2584
+ -->
2585
+
2586
+ <!-- Materials -->
2587
+ <material name="orange"><color rgba="1.0 0.5 0.0 1.0"/></material>
2588
+ <material name="black"><color rgba="0.1 0.1 0.1 1.0"/></material>
2589
+ <material name="gray"><color rgba="0.5 0.5 0.5 1.0"/></material>
2590
+
2591
+ <!-- Base Link -->
2592
+ <link name="base_link">
2593
+ <visual><geometry><box size="0.01 0.01 0.01"/></geometry></visual>
2594
+ </link>
2595
+
2596
+ <!-- Body -->
2597
+ <link name="body">
2598
+ <visual>
2599
+ <geometry><box size="0.18 0.10 0.06"/></geometry>
2600
+ <material name="orange"/>
2601
+ </visual>
2602
+ <collision><geometry><box size="0.18 0.10 0.06"/></geometry></collision>
2603
+ <inertial>
2604
+ <mass value="0.8"/>
2605
+ <inertia ixx="0.002" ixy="0" ixz="0" iyy="0.003" iyz="0" izz="0.002"/>
2606
+ </inertial>
2607
+ </link>
2608
+
2609
+ <joint name="body_joint" type="fixed">
2610
+ <parent link="base_link"/>
2611
+ <child link="body"/>
2612
+ <origin xyz="0 0 0.12"/>
2613
+ </joint>
2614
+
2615
+ <!-- Arm -->
2616
+ '''
2617
+ # Arm configuration
2618
+ arm_offset = "0.09 0 0.02"
2619
+ if 1 in markers:
2620
+ rel = relative_positions[1]
2621
+ arm_offset = f"{rel[0]:.4f} {rel[1]:.4f} {rel[2] + 0.02:.4f}"
2622
+
2623
+ urdf += f''' <link name="arm_link1">
2624
+ <visual>
2625
+ <origin xyz="0.035 0 0"/>
2626
+ <geometry><box size="0.07 0.03 0.03"/></geometry>
2627
+ <material name="gray"/>
2628
+ </visual>
2629
+ <inertial><mass value="0.08"/><inertia ixx="0.0001" ixy="0" ixz="0" iyy="0.0001" iyz="0" izz="0.0001"/></inertial>
2630
+ </link>
2631
+
2632
+ <joint name="arm_shoulder_joint" type="revolute">
2633
+ <parent link="body"/>
2634
+ <child link="arm_link1"/>
2635
+ <origin xyz="{arm_offset}"/>
2636
+ <axis xyz="0 1 0"/>
2637
+ <limit lower="-1.57" upper="1.57" effort="5.0" velocity="2.0"/>
2638
+ </joint>
2639
+
2640
+ <link name="arm_link2">
2641
+ <visual>
2642
+ <origin xyz="0.04 0 0"/>
2643
+ <geometry><box size="0.08 0.025 0.025"/></geometry>
2644
+ <material name="gray"/>
2645
+ </visual>
2646
+ <inertial><mass value="0.06"/><inertia ixx="0.0001" ixy="0" ixz="0" iyy="0.0001" iyz="0" izz="0.0001"/></inertial>
2647
+ </link>
2648
+
2649
+ <joint name="arm_elbow_joint" type="revolute">
2650
+ <parent link="arm_link1"/>
2651
+ <child link="arm_link2"/>
2652
+ <origin xyz="0.07 0 0"/>
2653
+ <axis xyz="0 1 0"/>
2654
+ <limit lower="-1.57" upper="1.57" effort="3.0" velocity="2.0"/>
2655
+ </joint>
2656
+
2657
+ <link name="gripper_base">
2658
+ <visual><geometry><box size="0.03 0.04 0.02"/></geometry><material name="black"/></visual>
2659
+ <inertial><mass value="0.03"/><inertia ixx="0.00001" ixy="0" ixz="0" iyy="0.00001" iyz="0" izz="0.00001"/></inertial>
2660
+ </link>
2661
+
2662
+ <joint name="gripper_base_joint" type="fixed">
2663
+ <parent link="arm_link2"/>
2664
+ <child link="gripper_base"/>
2665
+ <origin xyz="0.08 0 0"/>
2666
+ </joint>
2667
+
2668
+ <link name="gripper_left_finger">
2669
+ <visual><origin xyz="0.015 0 0"/><geometry><box size="0.03 0.005 0.015"/></geometry><material name="black"/></visual>
2670
+ <inertial><mass value="0.01"/><inertia ixx="0.000001" ixy="0" ixz="0" iyy="0.000001" iyz="0" izz="0.000001"/></inertial>
2671
+ </link>
2672
+
2673
+ <joint name="gripper_left_joint" type="prismatic">
2674
+ <parent link="gripper_base"/>
2675
+ <child link="gripper_left_finger"/>
2676
+ <origin xyz="0.01 0.012 0"/>
2677
+ <axis xyz="0 1 0"/>
2678
+ <limit lower="-0.012" upper="0" effort="1.0" velocity="0.5"/>
2679
+ </joint>
2680
+
2681
+ <link name="gripper_right_finger">
2682
+ <visual><origin xyz="0.015 0 0"/><geometry><box size="0.03 0.005 0.015"/></geometry><material name="black"/></visual>
2683
+ <inertial><mass value="0.01"/><inertia ixx="0.000001" ixy="0" ixz="0" iyy="0.000001" iyz="0" izz="0.000001"/></inertial>
2684
+ </link>
2685
+
2686
+ <joint name="gripper_right_joint" type="prismatic">
2687
+ <parent link="gripper_base"/>
2688
+ <child link="gripper_right_finger"/>
2689
+ <origin xyz="0.01 -0.012 0"/>
2690
+ <axis xyz="0 -1 0"/>
2691
+ <limit lower="-0.012" upper="0" effort="1.0" velocity="0.5"/>
2692
+ </joint>
2693
+
2694
+ <!-- Legs -->
2695
+ '''
2696
+
2697
+ # Generate legs
2698
+ legs = [
2699
+ ("front_left", 0.07, 0.05, 13),
2700
+ ("front_right", 0.07, -0.05, 14),
2701
+ ("rear_left", -0.07, 0.05, 15),
2702
+ ("rear_right", -0.07, -0.05, 16),
2703
+ ]
2704
+
2705
+ for leg_name, x_def, y_def, marker_id in legs:
2706
+ x_off = relative_positions[marker_id][0] if marker_id in markers else x_def
2707
+ y_off = relative_positions[marker_id][1] if marker_id in markers else y_def
2708
+
2709
+ urdf += f''' <!-- {leg_name.replace("_", " ").title()} Leg -->
2710
+ <link name="{leg_name}_hip">
2711
+ <visual><geometry><cylinder radius="0.012" length="0.025"/></geometry><material name="gray"/></visual>
2712
+ <inertial><mass value="0.03"/><inertia ixx="0.00001" ixy="0" ixz="0" iyy="0.00001" iyz="0" izz="0.00001"/></inertial>
2713
+ </link>
2714
+ <joint name="{leg_name}_hip_joint" type="revolute">
2715
+ <parent link="body"/><child link="{leg_name}_hip"/>
2716
+ <origin xyz="{x_off:.4f} {y_off:.4f} -0.02" rpy="1.5708 0 0"/>
2717
+ <axis xyz="0 0 1"/><limit lower="-0.5" upper="0.5" effort="3.0" velocity="2.0"/>
2718
+ </joint>
2719
+
2720
+ <link name="{leg_name}_thigh">
2721
+ <visual><origin xyz="0 0 -0.03"/><geometry><box size="0.025 0.02 0.06"/></geometry><material name="orange"/></visual>
2722
+ <inertial><mass value="0.05"/><inertia ixx="0.00005" ixy="0" ixz="0" iyy="0.00005" iyz="0" izz="0.00001"/></inertial>
2723
+ </link>
2724
+ <joint name="{leg_name}_thigh_joint" type="revolute">
2725
+ <parent link="{leg_name}_hip"/><child link="{leg_name}_thigh"/>
2726
+ <origin xyz="0 0 -0.015" rpy="-1.5708 0 0"/>
2727
+ <axis xyz="0 1 0"/><limit lower="-1.57" upper="1.57" effort="5.0" velocity="2.0"/>
2728
+ </joint>
2729
+
2730
+ <link name="{leg_name}_shin">
2731
+ <visual><origin xyz="0 0 -0.025"/><geometry><box size="0.02 0.015 0.05"/></geometry><material name="orange"/></visual>
2732
+ <inertial><mass value="0.04"/><inertia ixx="0.00003" ixy="0" ixz="0" iyy="0.00003" iyz="0" izz="0.000005"/></inertial>
2733
+ </link>
2734
+ <joint name="{leg_name}_knee_joint" type="revolute">
2735
+ <parent link="{leg_name}_thigh"/><child link="{leg_name}_shin"/>
2736
+ <origin xyz="0 0 -0.06"/><axis xyz="0 1 0"/><limit lower="-2.5" upper="0" effort="4.0" velocity="2.0"/>
2737
+ </joint>
2738
+
2739
+ <link name="{leg_name}_foot">
2740
+ <visual><geometry><sphere radius="0.012"/></geometry><material name="black"/></visual>
2741
+ <collision><geometry><sphere radius="0.012"/></geometry></collision>
2742
+ <inertial><mass value="0.01"/><inertia ixx="0.000001" ixy="0" ixz="0" iyy="0.000001" iyz="0" izz="0.000001"/></inertial>
2743
+ </link>
2744
+ <joint name="{leg_name}_foot_joint" type="fixed">
2745
+ <parent link="{leg_name}_shin"/><child link="{leg_name}_foot"/>
2746
+ <origin xyz="0 0 -0.05"/>
2747
+ </joint>
2748
+
2749
+ '''
2750
+
2751
+ urdf += "</robot>\n"
2752
+
2753
+ # Write URDF
2754
+ output_path.parent.mkdir(parents=True, exist_ok=True)
2755
+ with open(output_path, "w") as f:
2756
+ f.write(urdf)
2757
+
2758
+ # Count elements
2759
+ import re
2760
+ links = len(re.findall(r'<link\s+name=', urdf))
2761
+ joints = len(re.findall(r'<joint\s+name=', urdf))
2762
+ print(f"Generated: {links} links, {joints} joints")
2763
+
2764
+
2765
+ def _show_urdf_info(urdf_path: Path):
2766
+ """Display information about a URDF file."""
2767
+ import re
2768
+
2769
+ with open(urdf_path, 'r') as f:
2770
+ content = f.read()
2771
+
2772
+ robot_match = re.search(r'<robot\s+name="([^"]+)"', content)
2773
+ robot_name = robot_match.group(1) if robot_match else "unknown"
2774
+
2775
+ joints = re.findall(r'<joint\s+name="([^"]+)"', content)
2776
+ links = re.findall(r'<link\s+name="([^"]+)"', content)
2777
+ revolute = len(re.findall(r'type="revolute"', content))
2778
+ prismatic = len(re.findall(r'type="prismatic"', content))
2779
+ fixed = len(re.findall(r'type="fixed"', content))
2780
+
2781
+ print(f"\n--- URDF Information ---")
2782
+ print(f"Robot: {robot_name}")
2783
+ print(f"File: {urdf_path}")
2784
+ print(f"Size: {urdf_path.stat().st_size} bytes")
2785
+ print(f"\nStructure:")
2786
+ print(f" Links: {len(links)}")
2787
+ print(f" Joints: {len(joints)}")
2788
+ print(f" - Revolute: {revolute}")
2789
+ print(f" - Prismatic: {prismatic}")
2790
+ print(f" - Fixed: {fixed}")
2791
+
2792
+
2793
+ async def _sync_urdf_bridge(urdf_path: Path) -> bool:
2794
+ """Sync URDF to Artifex via ATE bridge server."""
2795
+ try:
2796
+ import websockets
2797
+ except ImportError:
2798
+ print("Installing websockets...")
2799
+ import subprocess
2800
+ subprocess.run([sys.executable, "-m", "pip", "install", "websockets", "-q"])
2801
+ import websockets
2802
+
2803
+ try:
2804
+ with open(urdf_path, 'r') as f:
2805
+ urdf_content = f.read()
2806
+
2807
+ async with websockets.connect('ws://localhost:8765', open_timeout=5) as ws:
2808
+ print("Connected to ATE bridge server")
2809
+
2810
+ request = {"id": 1, "action": "sync_urdf", "params": {"urdf": urdf_content}}
2811
+ await ws.send(json.dumps(request))
2812
+ response = await asyncio.wait_for(ws.recv(), timeout=10.0)
2813
+ result = json.loads(response)
2814
+
2815
+ if "error" in result:
2816
+ print(f"Sync failed: {result['error']}")
2817
+ return False
2818
+
2819
+ if result.get("result"):
2820
+ joints = result["result"].get("joints", [])
2821
+ print(f"URDF synced to Artifex! ({len(joints)} joints)")
2822
+ return True
2823
+
2824
+ except ConnectionRefusedError:
2825
+ print("ATE bridge server not running")
2826
+ print(" Start with: python -m ate.bridge_server")
2827
+ except asyncio.TimeoutError:
2828
+ print("Connection timeout")
2829
+ except Exception as e:
2830
+ print(f"Error: {e}")
2831
+
2832
+ return False
2833
+
2834
+
2835
+ def _launch_artifex(urdf_path: Optional[Path] = None) -> bool:
2836
+ """Launch Artifex Desktop with optional URDF file."""
2837
+ import subprocess
2838
+
2839
+ print("\n--- Launching Artifex Desktop ---")
2840
+
2841
+ # Find Artifex
2842
+ artifex_paths = [
2843
+ Path("/Applications/Artifex.app"),
2844
+ Path.home() / "Applications/Artifex.app",
2845
+ Path(__file__).parent.parent.parent.parent / "artifex-desktop/dist/mac-arm64/Artifex.app",
2846
+ Path(__file__).parent.parent.parent.parent / "artifex-desktop/dist/mac/Artifex.app",
2847
+ ]
2848
+
2849
+ artifex_app = None
2850
+ for p in artifex_paths:
2851
+ if p.exists():
2852
+ artifex_app = p
2853
+ break
2854
+
2855
+ if not artifex_app:
2856
+ print("Artifex Desktop not found")
2857
+ print("\nInstall from: https://kindly.fyi/artifex")
2858
+ if urdf_path:
2859
+ print(f"\nOr manually open: {urdf_path}")
2860
+ return False
2861
+
2862
+ print(f"Found: {artifex_app}")
2863
+
2864
+ try:
2865
+ if urdf_path:
2866
+ cmd = ["open", "-a", str(artifex_app), str(urdf_path.absolute())]
2867
+ print(f"Opening: {urdf_path.name}")
2868
+ else:
2869
+ cmd = ["open", str(artifex_app)]
2870
+
2871
+ subprocess.run(cmd, check=True)
2872
+ print("Artifex Desktop launched")
2873
+ return True
2874
+
2875
+ except subprocess.CalledProcessError as e:
2876
+ print(f"Launch failed: {e}")
2877
+ return False
2878
+
2879
+
2880
+ def register_robot_parser(subparsers):
2881
+ """
2882
+ Register the robot command with argparse.
2883
+
2884
+ Call this from the main CLI to add robot commands.
2885
+ """
2886
+ robot_parser = subparsers.add_parser("robot",
2887
+ help="Robot discovery, setup, and management",
2888
+ description="""Robot management commands.
2889
+
2890
+ Discover robots, set up configurations, and test capabilities.
2891
+
2892
+ EXAMPLES:
2893
+ ate robot discover # Find robots on network/USB
2894
+ ate robot list # List known robot types
2895
+ ate robot info hiwonder_mechdog # Show robot type info
2896
+ ate robot setup # Interactive setup wizard
2897
+ ate robot test my_mechdog # Test robot capabilities
2898
+ ate robot profiles list # List saved profiles
2899
+ """)
2900
+ robot_subparsers = robot_parser.add_subparsers(dest="robot_action", help="Robot action")
2901
+
2902
+ # robot discover
2903
+ discover_parser = robot_subparsers.add_parser("discover", help="Find robots on network and USB")
2904
+ discover_parser.add_argument("--subnet", help="Network subnet to scan (e.g., 192.168.1)")
2905
+ discover_parser.add_argument("--timeout", type=float, default=3.0, help="Scan timeout (default: 3s)")
2906
+ discover_parser.add_argument("--json", action="store_true", help="Output as JSON")
2907
+
2908
+ # robot ble - Bluetooth Low Energy discovery and connection
2909
+ ble_parser = robot_subparsers.add_parser("ble",
2910
+ help="Bluetooth Low Energy robot discovery and connection",
2911
+ description="""Discover and connect to robots via Bluetooth Low Energy.
2912
+
2913
+ BLE provides wireless control without WiFi infrastructure - useful for
2914
+ outdoor robots or when USB is inconvenient.
2915
+
2916
+ EXAMPLES:
2917
+ ate robot ble discover # Scan for BLE robots
2918
+ ate robot ble discover --filter MechDog # Filter by name
2919
+ ate robot ble connect AA:BB:CC:DD:EE:FF # Test connection
2920
+ ate robot ble info # Show BLE-capable robots
2921
+ """)
2922
+ ble_subparsers = ble_parser.add_subparsers(dest="ble_action", help="BLE action")
2923
+
2924
+ # ble discover
2925
+ ble_discover = ble_subparsers.add_parser("discover", help="Scan for BLE devices")
2926
+ ble_discover.add_argument("--timeout", type=float, default=10.0,
2927
+ help="Scan timeout in seconds (default: 10)")
2928
+ ble_discover.add_argument("--filter", dest="name_filter",
2929
+ help="Filter devices by name (e.g., MechDog, ESP32)")
2930
+ ble_discover.add_argument("--json", action="store_true", dest="json_output",
2931
+ help="Output as JSON")
2932
+
2933
+ # ble connect
2934
+ ble_connect = ble_subparsers.add_parser("connect", help="Test connection to BLE device")
2935
+ ble_connect.add_argument("address", help="BLE device address (e.g., AA:BB:CC:DD:EE:FF)")
2936
+ ble_connect.add_argument("-p", "--profile", dest="profile_name",
2937
+ help="Update profile with BLE settings")
2938
+
2939
+ # ble info
2940
+ ble_subparsers.add_parser("info", help="Show BLE-capable robots and UUIDs")
2941
+
2942
+ # ble capture - Capture traffic from phone app
2943
+ ble_capture = ble_subparsers.add_parser("capture",
2944
+ help="Capture BLE traffic from phone app for protocol reverse-engineering",
2945
+ description="""Capture BLE traffic while using a phone app to control your robot.
2946
+
2947
+ This automates the workflow for reverse-engineering undocumented BLE protocols:
2948
+ 1. Guides you through setting up PacketLogger (iOS) or HCI snoop (Android)
2949
+ 2. Captures traffic while you operate the phone app
2950
+ 3. Saves capture file for analysis
2951
+
2952
+ EXAMPLES:
2953
+ ate robot ble capture # Auto-detect platform
2954
+ ate robot ble capture --platform ios # Force iOS capture
2955
+ ate robot ble capture -o my_capture.pklg # Specify output file
2956
+ """)
2957
+ ble_capture.add_argument("-p", "--platform", choices=["ios", "android", "auto"],
2958
+ default="auto", help="Mobile platform (default: auto-detect)")
2959
+ ble_capture.add_argument("-o", "--output", default="capture.pklg",
2960
+ help="Output file path (default: capture.pklg)")
2961
+
2962
+ # ble analyze - Analyze capture file
2963
+ ble_analyze = ble_subparsers.add_parser("analyze",
2964
+ help="Analyze BLE capture file to decode protocol commands",
2965
+ description="""Analyze a BLE capture file (.pklg or .btsnoop) to decode the protocol.
2966
+
2967
+ Extracts write commands and notifications, identifies the protocol format,
2968
+ and optionally generates Python code to replay the discovered commands.
2969
+
2970
+ EXAMPLES:
2971
+ ate robot ble analyze capture.pklg # Decode and display commands
2972
+ ate robot ble analyze capture.pklg --generate # Also generate Python code
2973
+ ate robot ble analyze capture.pklg -o protocol.py # Save code to file
2974
+ """)
2975
+ ble_analyze.add_argument("capture_file", help="Path to capture file (.pklg or .btsnoop)")
2976
+ ble_analyze.add_argument("-g", "--generate", action="store_true",
2977
+ help="Generate Python replay code")
2978
+ ble_analyze.add_argument("-o", "--output", help="Output file for generated code")
2979
+
2980
+ # ble inspect - Inspect device characteristics
2981
+ ble_inspect = ble_subparsers.add_parser("inspect",
2982
+ help="Inspect BLE device services and characteristics",
2983
+ description="""Connect to a BLE device and show its service/characteristic map.
2984
+
2985
+ Shows which characteristics support write vs notify, helping you understand
2986
+ the correct configuration for communication.
2987
+
2988
+ EXAMPLES:
2989
+ ate robot ble inspect mechdog_00 # Inspect by device name
2990
+ ate robot ble inspect AA:BB:CC:DD:EE:FF # Inspect by address
2991
+ """)
2992
+ ble_inspect.add_argument("address", help="Device address or name to inspect")
2993
+
2994
+ # robot list
2995
+ list_parser = robot_subparsers.add_parser("list", help="List known robot types")
2996
+ list_parser.add_argument("--archetype", choices=["quadruped", "humanoid", "arm", "custom"],
2997
+ help="Filter by archetype")
2998
+
2999
+ # robot info
3000
+ info_parser = robot_subparsers.add_parser("info", help="Show robot type or profile info")
3001
+ info_parser.add_argument("robot_type", nargs="?", help="Robot type ID")
3002
+ info_parser.add_argument("-p", "--profile", help="Show profile instead of type")
3003
+
3004
+ # robot setup
3005
+ setup_parser = robot_subparsers.add_parser("setup", help="Interactive setup wizard")
3006
+ setup_parser.add_argument("--port", help="Serial port")
3007
+ setup_parser.add_argument("--camera-ip", help="Camera IP address")
3008
+ setup_parser.add_argument("--type", dest="robot_type", default="hiwonder_mechdog",
3009
+ help="Robot type (default: hiwonder_mechdog)")
3010
+ setup_parser.add_argument("--name", help="Profile name")
3011
+ setup_parser.add_argument("--non-interactive", action="store_true",
3012
+ help="Use defaults, no prompts")
3013
+
3014
+ # robot test
3015
+ test_parser = robot_subparsers.add_parser("test", help="Test robot capabilities")
3016
+ test_parser.add_argument("profile", help="Profile name")
3017
+ test_parser.add_argument("-c", "--capability", help="Specific capability to test")
3018
+ test_parser.add_argument("-v", "--verbose", action="store_true", help="Show method details")
3019
+
3020
+ # robot approach
3021
+ approach_parser = robot_subparsers.add_parser("approach",
3022
+ help="Approach detected targets",
3023
+ description="""Scan for targets and approach them using visual servoing.
3024
+
3025
+ Works with any robot that has camera and locomotion capabilities.
3026
+ Uses visual distance estimation when no hardware distance sensor is available.
3027
+
3028
+ EXAMPLES:
3029
+ ate robot approach my_mechdog # Basic approach
3030
+ ate robot approach my_mechdog --target-distance 0.2 # Stop 20cm from target
3031
+ ate robot approach my_mechdog --dry-run # Scan only, no movement
3032
+ ate robot approach my_mechdog --speed 0.5 # Faster approach
3033
+ """)
3034
+ approach_parser.add_argument("profile", help="Profile name")
3035
+ approach_parser.add_argument("-d", "--duration", type=float, default=30.0,
3036
+ help="Maximum duration in seconds (default: 30)")
3037
+ approach_parser.add_argument("-t", "--target-distance", type=float, default=0.15,
3038
+ help="Distance to stop from target in meters (default: 0.15)")
3039
+ approach_parser.add_argument("-s", "--speed", type=float, default=0.3,
3040
+ help="Approach speed (default: 0.3)")
3041
+ approach_parser.add_argument("--detect-colors", nargs="+",
3042
+ help="Colors to detect (red, blue, green, etc.)")
3043
+ approach_parser.add_argument("--dry-run", action="store_true",
3044
+ help="Scan and detect but don't move")
3045
+
3046
+ # robot profiles
3047
+ profiles_parser = robot_subparsers.add_parser("profiles", help="Manage saved profiles")
3048
+ profiles_subparsers = profiles_parser.add_subparsers(dest="profiles_action", help="Profile action")
3049
+
3050
+ profiles_subparsers.add_parser("list", help="List saved profiles")
3051
+
3052
+ profiles_show = profiles_subparsers.add_parser("show", help="Show profile details")
3053
+ profiles_show.add_argument("name", help="Profile name")
3054
+
3055
+ profiles_delete = profiles_subparsers.add_parser("delete", help="Delete profile")
3056
+ profiles_delete.add_argument("name", help="Profile name")
3057
+
3058
+ profiles_create = profiles_subparsers.add_parser("create", help="Create new profile")
3059
+ profiles_create.add_argument("name", help="Profile name")
3060
+ profiles_create.add_argument("-t", "--template", help="Template to use")
3061
+
3062
+ profiles_subparsers.add_parser("templates", help="List available templates")
3063
+
3064
+ # robot calibrate
3065
+ calibrate_parser = robot_subparsers.add_parser("calibrate",
3066
+ help="Visual calibration for robot servos and poses",
3067
+ description="""Visual calibration wizard with webcam feedback.
3068
+
3069
+ Interactively discover servo ranges, record named positions (poses),
3070
+ and build a calibration profile for your robot.
3071
+
3072
+ EXAMPLES:
3073
+ ate robot calibrate start --port /dev/cu.usbserial-10 --name my_mechdog
3074
+ ate robot calibrate gripper --port /dev/cu.usbserial-10 --servo 11
3075
+ ate robot calibrate list
3076
+ ate robot calibrate poses my_mechdog
3077
+ ate robot calibrate record my_mechdog --pose gripper_open
3078
+ ate robot calibrate apply my_mechdog --pose gripper_closed
3079
+ """)
3080
+ calibrate_subparsers = calibrate_parser.add_subparsers(dest="calibrate_action", help="Calibration action")
3081
+
3082
+ # calibrate start
3083
+ cal_start = calibrate_subparsers.add_parser("start", help="Run interactive calibration wizard")
3084
+ cal_start.add_argument("--port", help="Serial port")
3085
+ cal_start.add_argument("--camera", dest="camera_url", help="Camera URL for visual feedback")
3086
+ cal_start.add_argument("--name", default="robot", help="Calibration name (default: robot)")
3087
+ cal_start.add_argument("--type", dest="robot_type", default="unknown", help="Robot type")
3088
+
3089
+ # calibrate gripper
3090
+ cal_gripper = calibrate_subparsers.add_parser("gripper", help="Quick gripper-only calibration")
3091
+ cal_gripper.add_argument("--port", help="Serial port")
3092
+ cal_gripper.add_argument("--servo", type=int, default=11, help="Gripper servo ID (default: 11)")
3093
+
3094
+ # calibrate list
3095
+ calibrate_subparsers.add_parser("list", help="List saved calibrations")
3096
+
3097
+ # calibrate poses
3098
+ cal_poses = calibrate_subparsers.add_parser("poses", help="Show poses for a calibration")
3099
+ cal_poses.add_argument("name", help="Calibration name")
3100
+
3101
+ # calibrate record
3102
+ cal_record = calibrate_subparsers.add_parser("record", help="Record current position as a pose")
3103
+ cal_record.add_argument("name", help="Calibration name")
3104
+ cal_record.add_argument("--pose", required=True, help="Name for the pose")
3105
+
3106
+ # calibrate apply
3107
+ cal_apply = calibrate_subparsers.add_parser("apply", help="Apply a saved pose")
3108
+ cal_apply.add_argument("name", help="Calibration name")
3109
+ cal_apply.add_argument("--pose", required=True, help="Pose to apply")
3110
+
3111
+ # calibrate direction - THE CRITICAL STEP
3112
+ cal_direction = calibrate_subparsers.add_parser("direction",
3113
+ help="Direction calibration via Twitch Test (CRITICAL)",
3114
+ description="""Determine which servo direction moves TOWARD the target.
3115
+
3116
+ This is the CRITICAL step that was missing! Without direction calibration,
3117
+ the robot doesn't know if positive servo values move the gripper toward
3118
+ or away from the target.
3119
+
3120
+ Process:
3121
+ 1. Place a green ball in front of the robot
3122
+ 2. Ensure ArUco markers on arm are visible to webcam
3123
+ 3. For each servo, measure distance to ball before/after small movement
3124
+ 4. Record whether positive values move toward or away from target
3125
+
3126
+ EXAMPLES:
3127
+ ate robot calibrate direction --port /dev/cu.usbserial-10 --name my_mechdog
3128
+ ate robot calibrate direction --name my_mechdog
3129
+ """)
3130
+ cal_direction.add_argument("--port", help="Serial port")
3131
+ cal_direction.add_argument("--name", default="robot", help="Robot name")
3132
+
3133
+ # calibrate status - Show calibration progress
3134
+ cal_status = calibrate_subparsers.add_parser("status",
3135
+ help="Show calibration status and next steps",
3136
+ description="""Show the calibration status for a robot.
3137
+
3138
+ Displays which calibration stages are complete and what needs to be done next.
3139
+ This enforces the proper calibration workflow.
3140
+
3141
+ EXAMPLES:
3142
+ ate robot calibrate status # List all robots
3143
+ ate robot calibrate status my_mechdog # Show specific robot
3144
+ """)
3145
+ cal_status.add_argument("name", nargs="?", default="robot", help="Robot name")
3146
+
3147
+ # calibrate reset - Reset calibration state
3148
+ cal_reset = calibrate_subparsers.add_parser("reset",
3149
+ help="Reset calibration state for a robot",
3150
+ description="""Reset all calibration progress for a robot.
3151
+
3152
+ Use this if you need to recalibrate from scratch.
3153
+ """)
3154
+ cal_reset.add_argument("name", help="Robot name")
3155
+
3156
+ # robot label - Visual labeling with dual cameras
3157
+ label_parser = robot_subparsers.add_parser("label",
3158
+ help="Visual servo/pose/action labeling with dual cameras",
3159
+ description="""Interactive visual labeling session using webcam + robot camera.
3160
+
3161
+ Creates a "bedrock" of basic skills by:
3162
+ 1. Discovering and labeling servos by their physical effect
3163
+ 2. Recording named poses with visual confirmation
3164
+ 3. Sequencing poses into multi-step actions
3165
+ 4. Generating reusable Python skill code
3166
+
3167
+ EXAMPLES:
3168
+ ate robot label --name my_mechdog
3169
+ ate robot label --port /dev/cu.usbserial-10 --camera http://192.168.50.98:80/capture
3170
+ ate robot label --webcam 1 --name mechdog
3171
+ """)
3172
+ label_parser.add_argument("--port", help="Serial port (auto-detects if not specified)")
3173
+ label_parser.add_argument("--name", default="robot", help="Robot name for saving (default: robot)")
3174
+ label_parser.add_argument("--type", dest="robot_type", default="hiwonder_mechdog", help="Robot type")
3175
+ label_parser.add_argument("--webcam", type=int, default=0, dest="webcam_id",
3176
+ help="Webcam device ID (default: 0)")
3177
+ label_parser.add_argument("--camera", dest="camera_url",
3178
+ help="Robot camera URL (e.g., http://192.168.50.98:80/capture)")
3179
+
3180
+ # robot skills - Manage generated skills
3181
+ skills_parser = robot_subparsers.add_parser("skills",
3182
+ help="Manage generated robot skills",
3183
+ description="""Manage skill libraries created from visual labeling.
3184
+
3185
+ View, export, and manage the Python skill code generated from labeled
3186
+ poses and actions.
3187
+
3188
+ EXAMPLES:
3189
+ ate robot skills list
3190
+ ate robot skills show my_mechdog
3191
+ ate robot skills export my_mechdog
3192
+ ate robot skills export my_mechdog pickup
3193
+ """)
3194
+ skills_subparsers = skills_parser.add_subparsers(dest="skills_action", help="Skills action")
3195
+
3196
+ skills_subparsers.add_parser("list", help="List saved skill libraries")
3197
+
3198
+ skills_show = skills_subparsers.add_parser("show", help="Show skill library details")
3199
+ skills_show.add_argument("name", help="Library name")
3200
+
3201
+ skills_export = skills_subparsers.add_parser("export", help="Export skills as Python code")
3202
+ skills_export.add_argument("name", help="Library name")
3203
+ skills_export.add_argument("skill", nargs="?", help="Specific skill to export (exports all if omitted)")
3204
+
3205
+ # robot upload - Upload skill library to FoodforThought
3206
+ upload_parser = robot_subparsers.add_parser("upload",
3207
+ help="Upload skill library to FoodforThought",
3208
+ description="""Upload calibration, poses, and skills to FoodforThought platform.
3209
+
3210
+ Creates artifacts with full data lineage:
3211
+ raw (images) → processed (calibration) → labeled (poses) → skill (code)
3212
+
3213
+ Requires authentication. Run 'ate login' first.
3214
+
3215
+ EXAMPLES:
3216
+ ate robot upload mechdog
3217
+ ate robot upload mechdog --project-id abc123
3218
+ ate robot upload mechdog --no-images
3219
+ """)
3220
+ upload_parser.add_argument("name", help="Robot name (matches calibration/library)")
3221
+ upload_parser.add_argument("--project-id", dest="project_id", help="Existing project ID (creates new if omitted)")
3222
+ upload_parser.add_argument("--no-images", action="store_true", dest="no_images",
3223
+ help="Skip uploading pose images")
3224
+ upload_parser.add_argument("--dry-run", action="store_true",
3225
+ help="Preview what would be uploaded without making changes")
3226
+
3227
+ # robot teach - Fast keyboard-driven teaching mode
3228
+ teach_parser = robot_subparsers.add_parser("teach",
3229
+ help="Fast keyboard-driven teaching mode with live preview",
3230
+ description="""Real-time teaching interface for quick skill development.
3231
+
3232
+ Opens a live webcam preview with keyboard controls:
3233
+ ↑/↓ Select servo
3234
+ ←/→ Adjust position (hold Shift for fine control)
3235
+ S Save current position as a pose
3236
+ A Create action from saved poses
3237
+ P Playback last action
3238
+ C Clear recorded poses
3239
+ R Reset to center positions
3240
+ +/- Adjust step size
3241
+ Q Quit and save
3242
+
3243
+ Much faster iteration than menu-driven labeling.
3244
+
3245
+ EXAMPLES:
3246
+ ate robot teach --port /dev/cu.usbserial-10 --name mechdog
3247
+ ate robot teach --port /dev/cu.usbserial-10 --webcam 1
3248
+ """)
3249
+ teach_parser.add_argument("--port", required=True, help="Serial port")
3250
+ teach_parser.add_argument("--name", default="robot", help="Robot name for saving")
3251
+ teach_parser.add_argument("--type", dest="robot_type", default="hiwonder_mechdog", help="Robot type")
3252
+ teach_parser.add_argument("--webcam", type=int, default=0, dest="webcam_id", help="Webcam device ID")
3253
+ teach_parser.add_argument("--camera", dest="camera_url", help="Robot camera URL (optional)")
3254
+
3255
+ # robot primitives - List and inspect programmatic primitives
3256
+ primitives_parser = robot_subparsers.add_parser("primitives",
3257
+ help="List and inspect skill primitives",
3258
+ description="""View the programmatic skill library.
3259
+
3260
+ Shows all primitives, compound skills, and behaviors with their
3261
+ hardware requirements, servo targets, and step sequences.
3262
+
3263
+ EXAMPLES:
3264
+ ate robot primitives list
3265
+ ate robot primitives show gripper_open
3266
+ ate robot primitives show pickup
3267
+ ate robot primitives show fetch
3268
+ """)
3269
+ primitives_subparsers = primitives_parser.add_subparsers(dest="primitives_action", help="Primitives action")
3270
+
3271
+ primitives_subparsers.add_parser("list", help="List all primitives, compounds, and behaviors")
3272
+
3273
+ primitives_show = primitives_subparsers.add_parser("show", help="Show details of a specific skill")
3274
+ primitives_show.add_argument("name", help="Skill name")
3275
+
3276
+ # robot map-servos - LLM-guided servo mapping
3277
+ map_servos_parser = robot_subparsers.add_parser("map-servos",
3278
+ help="Map all servos and generate primitive skills using LLM",
3279
+ description="""LLM-guided servo mapping and primitive generation.
3280
+
3281
+ Uses an AI agent to systematically probe each servo, analyze its function
3282
+ (via camera or position feedback), and generate named positions and
3283
+ primitive skills.
3284
+
3285
+ This automates the tedious process of discovering what each servo does
3286
+ and creates reusable skill code.
3287
+
3288
+ EXAMPLES:
3289
+ ate robot map-servos --port /dev/cu.usbserial-10
3290
+ ate robot map-servos --port /dev/cu.usbserial-10 --camera
3291
+ ate robot map-servos --port /dev/cu.usbserial-10 --wifi-camera 192.168.4.1
3292
+ ate robot map-servos --num-servos 6 --output ./my_arm
3293
+ """)
3294
+ map_servos_parser.add_argument("--port", help="Serial port (auto-detects if not specified)")
3295
+ map_servos_parser.add_argument("--num-servos", type=int, default=13,
3296
+ help="Number of servos to probe (default: 13)")
3297
+ map_servos_parser.add_argument("--camera", action="store_true", dest="use_camera",
3298
+ help="Use webcam for visual verification")
3299
+ map_servos_parser.add_argument("--camera-index", type=int, default=0,
3300
+ help="Webcam device index (default: 0)")
3301
+ map_servos_parser.add_argument("--wifi-camera", dest="wifi_camera_ip",
3302
+ help="Use robot's WiFi camera at IP (e.g., 192.168.4.1)")
3303
+ map_servos_parser.add_argument("--output", "-o", help="Output file prefix (default: ./servo_mappings_<port>)")
3304
+ map_servos_parser.add_argument("--save-images", action="store_true",
3305
+ help="Save camera images during probing")
3306
+ map_servos_parser.add_argument("--upload", action="store_true",
3307
+ help="Upload servo mappings to FoodforThought after mapping")
3308
+ map_servos_parser.add_argument("--project-id", dest="project_id",
3309
+ help="Project ID for upload (creates new if not specified)")
3310
+
3311
+ # robot generate-markers - Generate printable ArUco markers
3312
+ markers_parser = robot_subparsers.add_parser("generate-markers",
3313
+ help="Generate printable ArUco markers for calibration",
3314
+ description="""Generate a PDF with printable ArUco markers for robot calibration.
3315
+
3316
+ These markers enable precise visual system identification:
3317
+ - Attach markers to each joint/segment of your robot
3318
+ - Run calibration to discover kinematics automatically
3319
+ - Share your calibration with the community!
3320
+
3321
+ The PDF includes:
3322
+ - Markers with cutting guides
3323
+ - Joint labels and IDs
3324
+ - Setup instructions
3325
+
3326
+ EXAMPLES:
3327
+ ate robot generate-markers
3328
+ ate robot generate-markers --count 6 --size 40
3329
+ ate robot generate-markers --robot-name "My MechDog" -o my_markers.pdf
3330
+ """)
3331
+ markers_parser.add_argument("--output", "-o", default="aruco_markers.pdf",
3332
+ help="Output PDF path (default: aruco_markers.pdf)")
3333
+ markers_parser.add_argument("--count", "-n", type=int, default=12,
3334
+ help="Number of markers to generate (default: 12)")
3335
+ markers_parser.add_argument("--size", type=float, default=30.0,
3336
+ help="Marker size in millimeters (default: 30)")
3337
+ markers_parser.add_argument("--robot-name",
3338
+ help="Optional robot name for labeling")
3339
+ markers_parser.add_argument("--page-size", choices=["letter", "a4"], default="letter",
3340
+ help="Page size (default: letter)")
3341
+ markers_parser.add_argument("--preset",
3342
+ help="Use robot-specific preset (e.g., mechdog-mini, unitree-go1, xarm-6)")
3343
+ markers_parser.add_argument("--list-presets", action="store_true",
3344
+ help="List available robot presets")
3345
+
3346
+ # robot identify - Search community calibration registry
3347
+ identify_parser = robot_subparsers.add_parser("identify",
3348
+ help="Search community calibration registry for your robot",
3349
+ description="""Search the FoodforThought community calibration registry.
3350
+
3351
+ Find calibrations shared by other users for your robot model.
3352
+ When a community member calibrates a robot, everyone benefits!
3353
+
3354
+ EXAMPLES:
3355
+ ate robot identify mechdog-mini
3356
+ ate robot identify --search "MechDog"
3357
+ ate robot identify --port /dev/cu.usbserial-10 # Auto-detect robot type
3358
+ """)
3359
+ identify_parser.add_argument("robot_slug", nargs="?",
3360
+ help="Robot model slug (e.g., mechdog-mini)")
3361
+ identify_parser.add_argument("--search", dest="robot_name",
3362
+ help="Search by robot name")
3363
+ identify_parser.add_argument("--port",
3364
+ help="Serial port (for auto-detection)")
3365
+
3366
+ # robot upload-calibration - Upload calibration to community registry
3367
+ upload_cal_parser = robot_subparsers.add_parser("upload-calibration",
3368
+ help="Upload a calibration to the community registry",
3369
+ description="""Upload your calibration to share with the community.
3370
+
3371
+ Your calibration helps everyone with the same robot model!
3372
+ First user to calibrate a robot benefits the entire community.
3373
+
3374
+ EXAMPLES:
3375
+ ate robot upload-calibration ./servo_mappings.json --robot-slug mechdog-mini
3376
+ ate robot upload-calibration ./calibration.json --publish
3377
+ """)
3378
+ upload_cal_parser.add_argument("calibration_file",
3379
+ help="Path to calibration JSON file")
3380
+ upload_cal_parser.add_argument("--robot-slug",
3381
+ help="Robot model slug")
3382
+ upload_cal_parser.add_argument("--robot-name",
3383
+ help="Robot name (for search)")
3384
+ upload_cal_parser.add_argument("--method",
3385
+ choices=["aruco_marker", "llm_vision", "manual", "urdf_import", "twitch_test", "direction_calibration"],
3386
+ default="llm_vision",
3387
+ help="Calibration method (default: llm_vision)")
3388
+ upload_cal_parser.add_argument("--dry-run", action="store_true",
3389
+ help="Preview what would be uploaded without making changes")
3390
+ upload_cal_parser.add_argument("--publish", action="store_true",
3391
+ help="Publish immediately (default: save as draft)")
3392
+
3393
+ # robot behavior - Run high-level behaviors with perception
3394
+ behavior_parser = robot_subparsers.add_parser("behavior",
3395
+ help="Execute high-level behaviors with perception",
3396
+ description="""Run complex behaviors that combine primitives with perception.
3397
+
3398
+ Behaviors include closed-loop control using camera feedback:
3399
+ - Detect targets
3400
+ - Align to center them in view
3401
+ - Approach until close enough
3402
+ - Execute manipulation sequences
3403
+ - Verify success
3404
+
3405
+ EXAMPLES:
3406
+ ate robot behavior pickup_green_ball --simulate
3407
+ ate robot behavior pickup_green_ball --profile my_mechdog
3408
+ ate robot behavior pickup_green_ball --camera-ip 192.168.4.1
3409
+ """)
3410
+ behavior_parser.add_argument("behavior", nargs="?", default="pickup_green_ball",
3411
+ help="Behavior to execute (default: pickup_green_ball)")
3412
+ behavior_parser.add_argument("-p", "--profile", dest="profile_name",
3413
+ help="Robot profile for real execution")
3414
+ behavior_parser.add_argument("--camera-ip", dest="camera_ip",
3415
+ help="WiFi camera IP address")
3416
+ behavior_parser.add_argument("--simulate", action="store_true",
3417
+ help="Run in simulation mode (no real robot)")
3418
+
3419
+ # robot urdf - URDF generation and Artifex integration
3420
+ urdf_parser = robot_subparsers.add_parser("urdf",
3421
+ help="Generate URDF from ArUco markers and open in Artifex",
3422
+ description="""Generate URDF from visual ArUco marker detection.
3423
+
3424
+ This command captures ArUco markers attached to robot links via webcam,
3425
+ computes their 3D positions, and generates a calibrated URDF file.
3426
+ The URDF can then be opened directly in Artifex Desktop for visualization.
3427
+
3428
+ WORKFLOW:
3429
+ 1. Attach ArUco markers (4x4_50 dictionary, 25mm) to robot links
3430
+ 2. Run 'ate robot urdf generate' to capture and create URDF
3431
+ 3. Run 'ate robot urdf open' to view in Artifex Desktop
3432
+
3433
+ EXAMPLES:
3434
+ ate robot urdf generate # Capture markers and generate URDF
3435
+ ate robot urdf generate --reset # Reset robot pose first (BLE)
3436
+ ate robot urdf generate --frames 50 # More frames for accuracy
3437
+ ate robot urdf open # Open in Artifex Desktop
3438
+ ate robot urdf open --bridge # Sync via ATE bridge server
3439
+ ate robot urdf info # Show URDF details
3440
+ """)
3441
+ urdf_subparsers = urdf_parser.add_subparsers(dest="urdf_action", help="URDF action")
3442
+
3443
+ # urdf generate
3444
+ urdf_generate = urdf_subparsers.add_parser("generate",
3445
+ help="Generate URDF from ArUco marker detection")
3446
+ urdf_generate.add_argument("--reset", action="store_true",
3447
+ help="Reset robot pose via BLE before capture")
3448
+ urdf_generate.add_argument("--frames", type=int, default=20,
3449
+ help="Number of frames to capture (default: 20)")
3450
+ urdf_generate.add_argument("--camera", type=int, default=0,
3451
+ help="Camera index (default: 0)")
3452
+ urdf_generate.add_argument("--marker-size", dest="marker_size", type=float, default=0.025,
3453
+ help="Marker size in meters (default: 0.025)")
3454
+ urdf_generate.add_argument("--output", "-o",
3455
+ help="Output URDF path (default: urdf_generation/mechdog_calibrated.urdf)")
3456
+ urdf_generate.add_argument("--ble-address", dest="ble_address",
3457
+ help="BLE address for robot reset (default: auto-detect)")
3458
+ urdf_generate.add_argument("--robot-name", dest="robot_name", default="mechdog",
3459
+ help="Robot name in URDF (default: mechdog)")
3460
+
3461
+ # urdf open
3462
+ urdf_open = urdf_subparsers.add_parser("open",
3463
+ help="Open URDF in Artifex Desktop")
3464
+ urdf_open.add_argument("urdf_file", nargs="?",
3465
+ help="URDF file to open (default: last generated)")
3466
+ urdf_open.add_argument("--bridge", action="store_true",
3467
+ help="Sync via ATE bridge server instead of direct launch")
3468
+ urdf_open.add_argument("--launch", action="store_true",
3469
+ help="Force launch Artifex even if bridge fails")
3470
+
3471
+ # urdf info
3472
+ urdf_info = urdf_subparsers.add_parser("info",
3473
+ help="Show information about a URDF file")
3474
+ urdf_info.add_argument("urdf_file", nargs="?",
3475
+ help="URDF file to inspect (default: last generated)")
3476
+
3477
+ return robot_parser
3478
+
3479
+
3480
+ def handle_robot_command(args):
3481
+ """
3482
+ Handle robot command dispatch.
3483
+
3484
+ Call this from the main CLI command handler.
3485
+ """
3486
+ if args.robot_action == "discover":
3487
+ robot_discover_command(
3488
+ subnet=args.subnet,
3489
+ timeout=args.timeout,
3490
+ json_output=args.json
3491
+ )
3492
+
3493
+ elif args.robot_action == "ble":
3494
+ action = getattr(args, "ble_action", None) or "info"
3495
+ robot_ble_command(
3496
+ action=action,
3497
+ timeout=getattr(args, "timeout", 10.0),
3498
+ address=getattr(args, "address", None),
3499
+ name_filter=getattr(args, "name_filter", None),
3500
+ profile_name=getattr(args, "profile_name", None),
3501
+ json_output=getattr(args, "json_output", False),
3502
+ # New capture/analyze arguments
3503
+ platform=getattr(args, "platform", "auto"),
3504
+ output=getattr(args, "output", "capture.pklg"),
3505
+ capture_file=getattr(args, "capture_file", None),
3506
+ generate_code=getattr(args, "generate", False),
3507
+ )
3508
+
3509
+ elif args.robot_action == "list":
3510
+ robot_list_command(archetype=args.archetype)
3511
+
3512
+ elif args.robot_action == "info":
3513
+ robot_info_command(
3514
+ robot_type=args.robot_type,
3515
+ profile_name=args.profile
3516
+ )
3517
+
3518
+ elif args.robot_action == "setup":
3519
+ robot_setup_command(
3520
+ port=args.port,
3521
+ camera_ip=args.camera_ip,
3522
+ robot_type=args.robot_type,
3523
+ name=args.name,
3524
+ non_interactive=args.non_interactive
3525
+ )
3526
+
3527
+ elif args.robot_action == "test":
3528
+ robot_test_command(
3529
+ profile_name=args.profile,
3530
+ capability=args.capability,
3531
+ verbose=args.verbose
3532
+ )
3533
+
3534
+ elif args.robot_action == "approach":
3535
+ robot_approach_command(
3536
+ profile_name=args.profile,
3537
+ duration=args.duration,
3538
+ target_distance=args.target_distance,
3539
+ speed=args.speed,
3540
+ detect_colors=args.detect_colors,
3541
+ dry_run=args.dry_run
3542
+ )
3543
+
3544
+ elif args.robot_action == "profiles":
3545
+ if args.profiles_action == "list":
3546
+ robot_profiles_command("list")
3547
+ elif args.profiles_action == "show":
3548
+ robot_profiles_command("show", name=args.name)
3549
+ elif args.profiles_action == "delete":
3550
+ robot_profiles_command("delete", name=args.name)
3551
+ elif args.profiles_action == "create":
3552
+ robot_profiles_command("create", name=args.name, template=getattr(args, "template", None))
3553
+ elif args.profiles_action == "templates":
3554
+ robot_profiles_command("templates")
3555
+ else:
3556
+ print("Usage: ate robot profiles <list|show|delete|create|templates>")
3557
+
3558
+ elif args.robot_action == "calibrate":
3559
+ action = getattr(args, "calibrate_action", None)
3560
+ if action == "start":
3561
+ robot_calibrate_command(
3562
+ action="start",
3563
+ port=args.port,
3564
+ camera_url=args.camera_url,
3565
+ name=args.name,
3566
+ robot_type=args.robot_type,
3567
+ )
3568
+ elif action == "gripper":
3569
+ robot_calibrate_command(
3570
+ action="gripper",
3571
+ port=args.port,
3572
+ servo_id=args.servo,
3573
+ )
3574
+ elif action == "list":
3575
+ robot_calibrate_command(action="list")
3576
+ elif action == "poses":
3577
+ robot_calibrate_command(action="poses", name=args.name)
3578
+ elif action == "record":
3579
+ robot_calibrate_command(
3580
+ action="record",
3581
+ name=args.name,
3582
+ pose_name=args.pose,
3583
+ )
3584
+ elif action == "apply":
3585
+ robot_calibrate_command(
3586
+ action="apply",
3587
+ name=args.name,
3588
+ pose_name=args.pose,
3589
+ )
3590
+ elif action == "direction":
3591
+ robot_calibrate_command(
3592
+ action="direction",
3593
+ port=getattr(args, "port", None),
3594
+ name=getattr(args, "name", "robot"),
3595
+ )
3596
+ elif action == "status":
3597
+ robot_calibrate_command(
3598
+ action="status",
3599
+ name=getattr(args, "name", "robot"),
3600
+ )
3601
+ elif action == "reset":
3602
+ robot_calibrate_command(
3603
+ action="reset",
3604
+ name=args.name,
3605
+ )
3606
+ else:
3607
+ print("Usage: ate robot calibrate <start|gripper|direction|status|list|poses|record|apply|reset>")
3608
+
3609
+ elif args.robot_action == "label":
3610
+ robot_label_command(
3611
+ port=args.port,
3612
+ name=args.name,
3613
+ robot_type=args.robot_type,
3614
+ webcam_id=args.webcam_id,
3615
+ camera_url=args.camera_url,
3616
+ )
3617
+
3618
+ elif args.robot_action == "skills":
3619
+ action = getattr(args, "skills_action", None)
3620
+ if action == "list":
3621
+ robot_skills_command("list")
3622
+ elif action == "show":
3623
+ robot_skills_command("show", name=args.name)
3624
+ elif action == "export":
3625
+ robot_skills_command("export", name=args.name, skill_name=getattr(args, "skill", None))
3626
+ else:
3627
+ print("Usage: ate robot skills <list|show|export>")
3628
+
3629
+ elif args.robot_action == "upload":
3630
+ robot_upload_command(
3631
+ name=args.name,
3632
+ project_id=getattr(args, "project_id", None),
3633
+ include_images=not getattr(args, "no_images", False),
3634
+ dry_run=getattr(args, "dry_run", False),
3635
+ )
3636
+
3637
+ elif args.robot_action == "teach":
3638
+ teach_command(
3639
+ port=args.port,
3640
+ name=args.name,
3641
+ robot_type=args.robot_type,
3642
+ webcam_id=args.webcam_id,
3643
+ camera_url=getattr(args, "camera_url", None),
3644
+ )
3645
+
3646
+ elif args.robot_action == "behavior":
3647
+ robot_behavior_command(
3648
+ profile_name=getattr(args, "profile_name", None),
3649
+ behavior=args.behavior,
3650
+ camera_ip=getattr(args, "camera_ip", None),
3651
+ simulate=getattr(args, "simulate", False),
3652
+ )
3653
+
3654
+ elif args.robot_action == "primitives":
3655
+ action = getattr(args, "primitives_action", None) or "list"
3656
+ name = getattr(args, "name", None)
3657
+ robot_primitives_command(action=action, name=name)
3658
+
3659
+ elif args.robot_action == "map-servos":
3660
+ robot_map_servos_command(
3661
+ port=getattr(args, "port", None),
3662
+ num_servos=getattr(args, "num_servos", 13),
3663
+ use_camera=getattr(args, "use_camera", False),
3664
+ camera_index=getattr(args, "camera_index", 0),
3665
+ wifi_camera_ip=getattr(args, "wifi_camera_ip", None),
3666
+ output=getattr(args, "output", None),
3667
+ save_images=getattr(args, "save_images", False),
3668
+ upload=getattr(args, "upload", False),
3669
+ project_id=getattr(args, "project_id", None),
3670
+ )
3671
+
3672
+ elif args.robot_action == "generate-markers":
3673
+ robot_generate_markers_command(
3674
+ output=getattr(args, "output", "aruco_markers.pdf"),
3675
+ count=getattr(args, "count", 12),
3676
+ size=getattr(args, "size", 30.0),
3677
+ robot_name=getattr(args, "robot_name", None),
3678
+ page_size=getattr(args, "page_size", "letter"),
3679
+ preset=getattr(args, "preset", None),
3680
+ list_presets=getattr(args, "list_presets", False),
3681
+ )
3682
+
3683
+ elif args.robot_action == "identify":
3684
+ robot_identify_command(
3685
+ robot_slug=getattr(args, "robot_slug", None),
3686
+ robot_name=getattr(args, "robot_name", None),
3687
+ port=getattr(args, "port", None),
3688
+ )
3689
+
3690
+ elif args.robot_action == "upload-calibration":
3691
+ robot_upload_calibration_command(
3692
+ calibration_file=args.calibration_file,
3693
+ robot_slug=getattr(args, "robot_slug", None),
3694
+ robot_name=getattr(args, "robot_name", None),
3695
+ method=getattr(args, "method", "llm_vision"),
3696
+ publish=getattr(args, "publish", False),
3697
+ dry_run=getattr(args, "dry_run", False),
3698
+ )
3699
+
3700
+ elif args.robot_action == "urdf":
3701
+ action = getattr(args, "urdf_action", None)
3702
+ if action == "generate":
3703
+ robot_urdf_command(
3704
+ action="generate",
3705
+ reset=getattr(args, "reset", False),
3706
+ frames=getattr(args, "frames", 20),
3707
+ camera=getattr(args, "camera", 0),
3708
+ marker_size=getattr(args, "marker_size", 0.025),
3709
+ output=getattr(args, "output", None),
3710
+ ble_address=getattr(args, "ble_address", None),
3711
+ robot_name=getattr(args, "robot_name", "mechdog"),
3712
+ )
3713
+ elif action == "open":
3714
+ robot_urdf_command(
3715
+ action="open",
3716
+ urdf_file=getattr(args, "urdf_file", None),
3717
+ bridge=getattr(args, "bridge", False),
3718
+ launch=getattr(args, "launch", False),
3719
+ )
3720
+ elif action == "info":
3721
+ robot_urdf_command(
3722
+ action="info",
3723
+ urdf_file=getattr(args, "urdf_file", None),
3724
+ )
3725
+ else:
3726
+ print("Usage: ate robot urdf <generate|open|info>")
3727
+ print("\nExamples:")
3728
+ print(" ate robot urdf generate # Generate URDF from markers")
3729
+ print(" ate robot urdf generate --reset # Reset robot pose first")
3730
+ print(" ate robot urdf open # Open in Artifex Desktop")
3731
+ print(" ate robot urdf info # Show URDF details")
3732
+
3733
+ else:
3734
+ print("Usage: ate robot <discover|ble|list|info|setup|test|approach|profiles|calibrate|label|skills|upload|teach|behavior|primitives|map-servos|generate-markers|identify|upload-calibration|urdf>")
3735
+ print("\nRun 'ate robot --help' for more information.")