foodforthought-cli 0.2.8__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. ate/__init__.py +6 -0
  2. ate/__main__.py +16 -0
  3. ate/auth/__init__.py +1 -0
  4. ate/auth/device_flow.py +141 -0
  5. ate/auth/token_store.py +96 -0
  6. ate/behaviors/__init__.py +12 -0
  7. ate/behaviors/approach.py +399 -0
  8. ate/cli.py +855 -4551
  9. ate/client.py +90 -0
  10. ate/commands/__init__.py +168 -0
  11. ate/commands/auth.py +389 -0
  12. ate/commands/bridge.py +448 -0
  13. ate/commands/data.py +185 -0
  14. ate/commands/deps.py +111 -0
  15. ate/commands/generate.py +384 -0
  16. ate/commands/memory.py +907 -0
  17. ate/commands/parts.py +166 -0
  18. ate/commands/primitive.py +399 -0
  19. ate/commands/protocol.py +288 -0
  20. ate/commands/recording.py +524 -0
  21. ate/commands/repo.py +154 -0
  22. ate/commands/simulation.py +291 -0
  23. ate/commands/skill.py +303 -0
  24. ate/commands/skills.py +487 -0
  25. ate/commands/team.py +147 -0
  26. ate/commands/workflow.py +271 -0
  27. ate/detection/__init__.py +38 -0
  28. ate/detection/base.py +142 -0
  29. ate/detection/color_detector.py +399 -0
  30. ate/detection/trash_detector.py +322 -0
  31. ate/drivers/__init__.py +18 -6
  32. ate/drivers/ble_transport.py +405 -0
  33. ate/drivers/mechdog.py +360 -24
  34. ate/drivers/wifi_camera.py +477 -0
  35. ate/interfaces/__init__.py +16 -0
  36. ate/interfaces/base.py +2 -0
  37. ate/interfaces/sensors.py +247 -0
  38. ate/llm_proxy.py +239 -0
  39. ate/memory/__init__.py +35 -0
  40. ate/memory/cloud.py +244 -0
  41. ate/memory/context.py +269 -0
  42. ate/memory/embeddings.py +184 -0
  43. ate/memory/export.py +26 -0
  44. ate/memory/merge.py +146 -0
  45. ate/memory/migrate/__init__.py +34 -0
  46. ate/memory/migrate/base.py +89 -0
  47. ate/memory/migrate/pipeline.py +189 -0
  48. ate/memory/migrate/sources/__init__.py +13 -0
  49. ate/memory/migrate/sources/chroma.py +170 -0
  50. ate/memory/migrate/sources/pinecone.py +120 -0
  51. ate/memory/migrate/sources/qdrant.py +110 -0
  52. ate/memory/migrate/sources/weaviate.py +160 -0
  53. ate/memory/reranker.py +353 -0
  54. ate/memory/search.py +26 -0
  55. ate/memory/store.py +548 -0
  56. ate/recording/__init__.py +42 -3
  57. ate/recording/session.py +12 -2
  58. ate/recording/visual.py +416 -0
  59. ate/robot/__init__.py +142 -0
  60. ate/robot/agentic_servo.py +856 -0
  61. ate/robot/behaviors.py +493 -0
  62. ate/robot/ble_capture.py +1000 -0
  63. ate/robot/ble_enumerate.py +506 -0
  64. ate/robot/calibration.py +88 -3
  65. ate/robot/calibration_state.py +388 -0
  66. ate/robot/commands.py +143 -11
  67. ate/robot/direction_calibration.py +554 -0
  68. ate/robot/discovery.py +104 -2
  69. ate/robot/llm_system_id.py +654 -0
  70. ate/robot/locomotion_calibration.py +508 -0
  71. ate/robot/marker_generator.py +611 -0
  72. ate/robot/perception.py +502 -0
  73. ate/robot/primitives.py +614 -0
  74. ate/robot/profiles.py +6 -0
  75. ate/robot/registry.py +5 -2
  76. ate/robot/servo_mapper.py +1153 -0
  77. ate/robot/skill_upload.py +285 -3
  78. ate/robot/target_calibration.py +500 -0
  79. ate/robot/teach.py +515 -0
  80. ate/robot/types.py +242 -0
  81. ate/robot/visual_labeler.py +9 -0
  82. ate/robot/visual_servo_loop.py +494 -0
  83. ate/robot/visual_servoing.py +570 -0
  84. ate/robot/visual_system_id.py +906 -0
  85. ate/transports/__init__.py +121 -0
  86. ate/transports/base.py +394 -0
  87. ate/transports/ble.py +405 -0
  88. ate/transports/hybrid.py +444 -0
  89. ate/transports/serial.py +345 -0
  90. ate/urdf/__init__.py +30 -0
  91. ate/urdf/capture.py +582 -0
  92. ate/urdf/cloud.py +491 -0
  93. ate/urdf/collision.py +271 -0
  94. ate/urdf/commands.py +708 -0
  95. ate/urdf/depth.py +360 -0
  96. ate/urdf/inertial.py +312 -0
  97. ate/urdf/kinematics.py +330 -0
  98. ate/urdf/lifting.py +415 -0
  99. ate/urdf/meshing.py +300 -0
  100. ate/urdf/models/__init__.py +110 -0
  101. ate/urdf/models/depth_anything.py +253 -0
  102. ate/urdf/models/sam2.py +324 -0
  103. ate/urdf/motion_analysis.py +396 -0
  104. ate/urdf/pipeline.py +468 -0
  105. ate/urdf/scale.py +256 -0
  106. ate/urdf/scan_session.py +411 -0
  107. ate/urdf/segmentation.py +299 -0
  108. ate/urdf/synthesis.py +319 -0
  109. ate/urdf/topology.py +336 -0
  110. ate/urdf/validation.py +371 -0
  111. {foodforthought_cli-0.2.8.dist-info → foodforthought_cli-0.3.0.dist-info}/METADATA +1 -1
  112. foodforthought_cli-0.3.0.dist-info/RECORD +166 -0
  113. {foodforthought_cli-0.2.8.dist-info → foodforthought_cli-0.3.0.dist-info}/WHEEL +1 -1
  114. foodforthought_cli-0.2.8.dist-info/RECORD +0 -73
  115. {foodforthought_cli-0.2.8.dist-info → foodforthought_cli-0.3.0.dist-info}/entry_points.txt +0 -0
  116. {foodforthought_cli-0.2.8.dist-info → foodforthought_cli-0.3.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,554 @@
1
+ """
2
+ Direction Calibration via Twitch Test.
3
+
4
+ This module implements the CRITICAL missing step in robot calibration:
5
+ determining whether positive servo commands move TOWARD or AWAY from targets.
6
+
7
+ The Problem:
8
+ -----------
9
+ Previous calibration discovered:
10
+ - Which servos exist and their ranges
11
+ - Which servo controls which joint (kinematic structure)
12
+
13
+ But NOT:
14
+ - Does servo 8 at 1700 move gripper TOWARD the ball or AWAY from it?
15
+
16
+ Without this, behaviors like pickup use wrong directions and fail catastrophically.
17
+
18
+ The Solution - Twitch Test:
19
+ --------------------------
20
+ 1. Place target (ball) in front of robot
21
+ 2. Detect gripper position via ArUco marker
22
+ 3. For each servo:
23
+ a. Measure distance(gripper, ball) BEFORE
24
+ b. Move servo by +100 (small perturbation)
25
+ c. Measure distance(gripper, ball) AFTER
26
+ d. If distance decreased → positive = toward target
27
+ e. If distance increased → positive = away from target
28
+ 4. Store direction mapping
29
+ 5. Behaviors now use correct signs!
30
+
31
+ Usage:
32
+ from ate.robot.direction_calibration import run_direction_calibration
33
+
34
+ # Run interactive calibration
35
+ result = run_direction_calibration(
36
+ serial_port="/dev/cu.usbserial-10",
37
+ robot_name="my_mechdog",
38
+ )
39
+
40
+ # Result contains direction mappings for all arm servos
41
+ """
42
+
43
+ import cv2
44
+ import numpy as np
45
+ import serial
46
+ import time
47
+ import json
48
+ import sys
49
+ from dataclasses import dataclass
50
+ from datetime import datetime
51
+ from pathlib import Path
52
+ from typing import Optional, Dict, List, Tuple, Any
53
+
54
+ from .calibration_state import (
55
+ CalibrationState,
56
+ DirectionMapping,
57
+ require_direction_calibration,
58
+ )
59
+
60
+
61
+ @dataclass
62
+ class TwitchResult:
63
+ """Result of a single twitch test for one servo."""
64
+ servo_id: int
65
+ initial_distance: float # pixels
66
+ final_distance: float # pixels
67
+ delta: float # final - initial (negative = got closer)
68
+ perturbation: int # PWM units moved
69
+ direction: str # "toward" or "away"
70
+ confidence: float # based on magnitude of delta
71
+
72
+
73
+ class DirectionCalibrator:
74
+ """
75
+ Automated direction calibration using ArUco + ball detection.
76
+
77
+ The POINT of ArUco markers + webcam:
78
+ - Camera sees ball position
79
+ - Camera sees gripper marker
80
+ - Move servo → measure if gripper got closer or further from ball
81
+ - NO HUMAN OBSERVATION NEEDED
82
+ """
83
+
84
+ def __init__(
85
+ self,
86
+ serial_port: str,
87
+ robot_name: str = "robot",
88
+ gripper_marker_id: int = 15,
89
+ camera_index: int = 0,
90
+ arm_servos: Optional[List[int]] = None,
91
+ ):
92
+ self.serial_port = serial_port
93
+ self.robot_name = robot_name
94
+ self.gripper_marker_id = gripper_marker_id
95
+ self.camera_index = camera_index
96
+ self.arm_servos = arm_servos or [8, 9, 10, 11] # Default MechDog arm servos
97
+
98
+ self.cap: Optional[cv2.VideoCapture] = None
99
+ self.ser: Optional[serial.Serial] = None
100
+ self.aruco_detector = None
101
+
102
+ # Results
103
+ self.results: Dict[int, TwitchResult] = {}
104
+
105
+ def setup_camera(self) -> bool:
106
+ """Initialize webcam and ArUco detector."""
107
+ print("Initializing camera...")
108
+
109
+ self.cap = cv2.VideoCapture(self.camera_index)
110
+ if not self.cap.isOpened():
111
+ print(f"Error: Could not open camera {self.camera_index}")
112
+ return False
113
+
114
+ self.cap.set(cv2.CAP_PROP_FRAME_WIDTH, 1280)
115
+ self.cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 720)
116
+
117
+ # Warm up camera
118
+ for _ in range(10):
119
+ self.cap.read()
120
+
121
+ # Setup ArUco detector
122
+ aruco_dict = cv2.aruco.getPredefinedDictionary(cv2.aruco.DICT_4X4_50)
123
+ aruco_params = cv2.aruco.DetectorParameters()
124
+ self.aruco_detector = cv2.aruco.ArucoDetector(aruco_dict, aruco_params)
125
+
126
+ print("Camera initialized.")
127
+ return True
128
+
129
+ def setup_robot(self) -> bool:
130
+ """Connect to robot via serial."""
131
+ print(f"Connecting to robot on {self.serial_port}...")
132
+
133
+ try:
134
+ self.ser = serial.Serial(self.serial_port, 115200, timeout=2)
135
+ time.sleep(0.5)
136
+ self.ser.read(self.ser.in_waiting) # Clear buffer
137
+
138
+ # Initialize MechDog
139
+ self.send_command("from HW_MechDog import MechDog", wait=1.5)
140
+ self.send_command("dog = MechDog()", wait=1.5)
141
+
142
+ print("Robot connected.")
143
+ return True
144
+
145
+ except Exception as e:
146
+ print(f"Error connecting to robot: {e}")
147
+ return False
148
+
149
+ def send_command(self, command: str, wait: float = 0.5) -> str:
150
+ """Send command to robot."""
151
+ if not self.ser:
152
+ return ""
153
+
154
+ try:
155
+ self.ser.write(f"{command}\r\n".encode())
156
+ time.sleep(wait)
157
+ return self.ser.read(self.ser.in_waiting).decode("utf-8", errors="ignore")
158
+ except Exception as e:
159
+ print(f"Command error: {e}")
160
+ return ""
161
+
162
+ def set_servo(self, servo_id: int, position: int, duration: int = 500) -> None:
163
+ """Set a servo to a specific position."""
164
+ self.send_command(f"dog.set_servo({servo_id}, {position}, {duration})", wait=duration/1000 + 0.3)
165
+
166
+ def detect_ball(self, frame: np.ndarray) -> Optional[Tuple[int, int]]:
167
+ """Detect green/cyan ball, return center (x, y) or None."""
168
+ hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
169
+
170
+ # Blue-green ball detection (handles green, cyan, teal balls)
171
+ # Expanded range to handle different lighting conditions
172
+ lower = np.array([70, 50, 50])
173
+ upper = np.array([110, 255, 255])
174
+ mask = cv2.inRange(hsv, lower, upper)
175
+
176
+ contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
177
+
178
+ if contours:
179
+ largest = max(contours, key=cv2.contourArea)
180
+ if cv2.contourArea(largest) > 500:
181
+ M = cv2.moments(largest)
182
+ if M["m00"] > 0:
183
+ cx = int(M["m10"] / M["m00"])
184
+ cy = int(M["m01"] / M["m00"])
185
+ return (cx, cy)
186
+
187
+ return None
188
+
189
+ def detect_markers(self, frame: np.ndarray) -> Dict[int, Tuple[float, float]]:
190
+ """Detect ArUco markers, return dict of marker_id -> (x, y)."""
191
+ if self.aruco_detector is None:
192
+ return {}
193
+
194
+ gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
195
+ corners, ids, _ = self.aruco_detector.detectMarkers(gray)
196
+
197
+ markers = {}
198
+ if ids is not None:
199
+ for i, mid in enumerate(ids.flatten()):
200
+ c = corners[i][0]
201
+ center = (float(np.mean(c[:, 0])), float(np.mean(c[:, 1])))
202
+ markers[int(mid)] = center
203
+
204
+ return markers
205
+
206
+ def capture_state(self) -> Tuple[Optional[Tuple[int, int]], Dict[int, Tuple[float, float]], np.ndarray]:
207
+ """Capture frame and return (ball_pos, markers, frame)."""
208
+ if not self.cap:
209
+ return None, {}, np.zeros((480, 640, 3), dtype=np.uint8)
210
+
211
+ # Flush old frames
212
+ for _ in range(3):
213
+ self.cap.read()
214
+
215
+ ret, frame = self.cap.read()
216
+ if not ret:
217
+ return None, {}, np.zeros((480, 640, 3), dtype=np.uint8)
218
+
219
+ ball = self.detect_ball(frame)
220
+ markers = self.detect_markers(frame)
221
+
222
+ return ball, markers, frame
223
+
224
+ def distance(self, p1: Optional[Tuple[float, float]], p2: Optional[Tuple[float, float]]) -> float:
225
+ """Euclidean distance between two points."""
226
+ if p1 is None or p2 is None:
227
+ return float("inf")
228
+ return np.sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2)
229
+
230
+ def twitch_test(
231
+ self,
232
+ servo_id: int,
233
+ neutral: int = 1500,
234
+ perturbation: int = 100,
235
+ ) -> Optional[TwitchResult]:
236
+ """
237
+ Perform a single twitch test on a servo.
238
+
239
+ 1. Move to neutral
240
+ 2. Measure distance(gripper, ball)
241
+ 3. Move by +perturbation
242
+ 4. Measure distance again
243
+ 5. Determine if we got closer or further
244
+ """
245
+ print(f"\n Testing servo {servo_id}...")
246
+
247
+ # Move to neutral first
248
+ self.set_servo(servo_id, neutral, duration=600)
249
+ time.sleep(0.5)
250
+
251
+ # Capture baseline
252
+ ball_before, markers_before, _ = self.capture_state()
253
+
254
+ if ball_before is None:
255
+ print(f" Warning: Ball not detected")
256
+ return None
257
+
258
+ gripper_before = markers_before.get(self.gripper_marker_id)
259
+ if gripper_before is None:
260
+ # Try other arm markers as fallback
261
+ for marker_id in [15, 14, 13]:
262
+ if marker_id in markers_before:
263
+ gripper_before = markers_before[marker_id]
264
+ print(f" Using marker M{marker_id} as reference")
265
+ break
266
+
267
+ if gripper_before is None:
268
+ print(f" Warning: No arm markers detected")
269
+ return None
270
+
271
+ dist_before = self.distance(gripper_before, ball_before)
272
+ print(f" Before: gripper at {gripper_before[0]:.0f},{gripper_before[1]:.0f}, dist={dist_before:.1f}px")
273
+
274
+ # Perturbation: move servo by +perturbation
275
+ self.set_servo(servo_id, neutral + perturbation, duration=400)
276
+ time.sleep(0.4)
277
+
278
+ # Capture after perturbation
279
+ ball_after, markers_after, frame = self.capture_state()
280
+
281
+ if ball_after is None:
282
+ print(f" Warning: Ball lost after move")
283
+ # Return to neutral
284
+ self.set_servo(servo_id, neutral, duration=400)
285
+ return None
286
+
287
+ # Find gripper marker (same ID as before if possible)
288
+ gripper_after = None
289
+ for marker_id in [15, 14, 13]:
290
+ if marker_id in markers_after:
291
+ gripper_after = markers_after[marker_id]
292
+ break
293
+
294
+ if gripper_after is None:
295
+ print(f" Warning: Gripper marker lost after move (significant movement)")
296
+ # Return to neutral
297
+ self.set_servo(servo_id, neutral, duration=400)
298
+ # If marker was lost, the movement was significant - treat as "toward" if likely
299
+ return TwitchResult(
300
+ servo_id=servo_id,
301
+ initial_distance=dist_before,
302
+ final_distance=0, # Unknown
303
+ delta=-dist_before, # Assume moved toward (marker closer = can't see)
304
+ perturbation=perturbation,
305
+ direction="toward", # Tentative
306
+ confidence=0.3, # Low confidence since we can't verify
307
+ )
308
+
309
+ dist_after = self.distance(gripper_after, ball_after)
310
+ delta = dist_after - dist_before
311
+
312
+ print(f" After: gripper at {gripper_after[0]:.0f},{gripper_after[1]:.0f}, dist={dist_after:.1f}px")
313
+ print(f" Delta: {delta:+.1f}px")
314
+
315
+ # Return to neutral
316
+ self.set_servo(servo_id, neutral, duration=400)
317
+ time.sleep(0.3)
318
+
319
+ # Determine direction
320
+ # Negative delta = got closer = positive command moves toward target
321
+ if delta < -10:
322
+ direction = "toward"
323
+ confidence = min(1.0, abs(delta) / 50)
324
+ elif delta > 10:
325
+ direction = "away"
326
+ confidence = min(1.0, abs(delta) / 50)
327
+ else:
328
+ direction = "minimal"
329
+ confidence = 0.2
330
+
331
+ print(f" Result: +{perturbation} PWM → {direction} (confidence: {confidence:.0%})")
332
+
333
+ return TwitchResult(
334
+ servo_id=servo_id,
335
+ initial_distance=dist_before,
336
+ final_distance=dist_after,
337
+ delta=delta,
338
+ perturbation=perturbation,
339
+ direction=direction,
340
+ confidence=confidence,
341
+ )
342
+
343
+ def run_calibration(self) -> Dict[int, DirectionMapping]:
344
+ """
345
+ Run direction calibration for all arm servos.
346
+
347
+ Returns dict of servo_id -> DirectionMapping
348
+ """
349
+ print("\n" + "=" * 60)
350
+ print("DIRECTION CALIBRATION (Twitch Test)")
351
+ print("=" * 60)
352
+ print("\nPurpose: Determine if positive servo values move")
353
+ print(" TOWARD or AWAY from the target.\n")
354
+
355
+ if not self.setup_camera():
356
+ return {}
357
+
358
+ if not self.setup_robot():
359
+ self.cleanup()
360
+ return {}
361
+
362
+ # Verify we can see ball and markers
363
+ print("Verifying visibility...")
364
+ ball, markers, frame = self.capture_state()
365
+
366
+ if ball is None:
367
+ print("\nERROR: Cannot detect ball.")
368
+ print("Please position a green ball in front of the robot.")
369
+ cv2.imwrite("/tmp/direction_cal_no_ball.jpg", frame)
370
+ print("Saved debug image to /tmp/direction_cal_no_ball.jpg")
371
+ self.cleanup()
372
+ return {}
373
+
374
+ print(f" Ball detected at: {ball}")
375
+
376
+ gripper_marker = None
377
+ for m in [15, 14, 13]:
378
+ if m in markers:
379
+ gripper_marker = m
380
+ break
381
+
382
+ if gripper_marker is None:
383
+ print("\nERROR: No arm markers (M13, M14, M15) visible.")
384
+ print("Please position camera to see arm markers.")
385
+ cv2.imwrite("/tmp/direction_cal_no_markers.jpg", frame)
386
+ print("Saved debug image to /tmp/direction_cal_no_markers.jpg")
387
+ self.cleanup()
388
+ return {}
389
+
390
+ print(f" Using marker M{gripper_marker} as gripper reference")
391
+ print(f" Visible markers: {sorted(markers.keys())}")
392
+
393
+ # Save initial state image
394
+ cv2.circle(frame, ball, 20, (0, 255, 0), 3)
395
+ cv2.putText(frame, "BALL", (ball[0]+25, ball[1]),
396
+ cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 255, 0), 2)
397
+ gpos = markers[gripper_marker]
398
+ cv2.circle(frame, (int(gpos[0]), int(gpos[1])), 15, (255, 0, 255), 3)
399
+ cv2.putText(frame, f"M{gripper_marker}", (int(gpos[0])+20, int(gpos[1])),
400
+ cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 0, 255), 2)
401
+ cv2.imwrite("/tmp/direction_cal_initial.jpg", frame)
402
+
403
+ # Set neutral position for all arm servos
404
+ print("\nSetting neutral position...")
405
+ for servo_id in self.arm_servos:
406
+ self.set_servo(servo_id, 1500, duration=500)
407
+ time.sleep(1)
408
+
409
+ # Run twitch test on each servo
410
+ print("\nRunning twitch tests...")
411
+ direction_mappings: Dict[int, DirectionMapping] = {}
412
+
413
+ for servo_id in self.arm_servos:
414
+ result = self.twitch_test(servo_id)
415
+
416
+ if result:
417
+ self.results[servo_id] = result
418
+
419
+ # Convert to DirectionMapping
420
+ toward_direction = "positive" if result.direction == "toward" else "negative"
421
+ if result.direction == "minimal":
422
+ toward_direction = "unknown"
423
+
424
+ mapping = DirectionMapping(
425
+ servo_id=servo_id,
426
+ toward_direction=toward_direction,
427
+ confidence=result.confidence,
428
+ delta_pixels=result.delta,
429
+ calibrated_at=datetime.now().isoformat(),
430
+ )
431
+ direction_mappings[servo_id] = mapping
432
+
433
+ self.cleanup()
434
+
435
+ # Print summary
436
+ print("\n" + "=" * 60)
437
+ print("DIRECTION CALIBRATION RESULTS")
438
+ print("=" * 60)
439
+
440
+ for servo_id, mapping in direction_mappings.items():
441
+ result = self.results.get(servo_id)
442
+ delta_str = f"Δ={result.delta:+.1f}px" if result else ""
443
+ print(f"\n Servo {servo_id}:")
444
+ print(f" To move TOWARD target: use {mapping.toward_direction} values")
445
+ print(f" Confidence: {mapping.confidence:.0%} {delta_str}")
446
+
447
+ # Save results
448
+ output_path = Path.home() / ".ate" / "direction_calibration" / f"{self.robot_name}.json"
449
+ output_path.parent.mkdir(parents=True, exist_ok=True)
450
+
451
+ output = {
452
+ "robot_name": self.robot_name,
453
+ "calibrated_at": datetime.now().isoformat(),
454
+ "gripper_marker_id": self.gripper_marker_id,
455
+ "mappings": {
456
+ str(k): {
457
+ "servo_id": v.servo_id,
458
+ "toward_direction": v.toward_direction,
459
+ "confidence": v.confidence,
460
+ "delta_pixels": v.delta_pixels,
461
+ }
462
+ for k, v in direction_mappings.items()
463
+ },
464
+ }
465
+
466
+ with open(output_path, "w") as f:
467
+ json.dump(output, f, indent=2)
468
+
469
+ print(f"\nSaved to: {output_path}")
470
+
471
+ return direction_mappings
472
+
473
+ def cleanup(self) -> None:
474
+ """Release resources."""
475
+ if self.cap:
476
+ self.cap.release()
477
+ if self.ser:
478
+ self.ser.close()
479
+
480
+
481
+ def run_direction_calibration(
482
+ serial_port: str,
483
+ robot_name: str = "robot",
484
+ gripper_marker_id: int = 15,
485
+ camera_index: int = 0,
486
+ arm_servos: Optional[List[int]] = None,
487
+ ) -> Dict[int, DirectionMapping]:
488
+ """
489
+ Run direction calibration and update calibration state.
490
+
491
+ This is the main entry point for the 'ate robot calibrate direction' command.
492
+ """
493
+ calibrator = DirectionCalibrator(
494
+ serial_port=serial_port,
495
+ robot_name=robot_name,
496
+ gripper_marker_id=gripper_marker_id,
497
+ camera_index=camera_index,
498
+ arm_servos=arm_servos,
499
+ )
500
+
501
+ mappings = calibrator.run_calibration()
502
+
503
+ if mappings:
504
+ # Update calibration state
505
+ state = CalibrationState.load(robot_name)
506
+ state.complete_direction_calibration(mappings)
507
+
508
+ print("\n" + "=" * 60)
509
+ print("CALIBRATION STATE UPDATED")
510
+ print("=" * 60)
511
+ state.print_status()
512
+
513
+ return mappings
514
+
515
+
516
+ def load_direction_calibration(robot_name: str) -> Optional[Dict[int, DirectionMapping]]:
517
+ """Load saved direction calibration for a robot."""
518
+ path = Path.home() / ".ate" / "direction_calibration" / f"{robot_name}.json"
519
+
520
+ if not path.exists():
521
+ return None
522
+
523
+ try:
524
+ with open(path) as f:
525
+ data = json.load(f)
526
+
527
+ mappings = {}
528
+ for k, v in data.get("mappings", {}).items():
529
+ mappings[int(k)] = DirectionMapping(
530
+ servo_id=v["servo_id"],
531
+ toward_direction=v["toward_direction"],
532
+ confidence=v["confidence"],
533
+ delta_pixels=v["delta_pixels"],
534
+ calibrated_at=v.get("calibrated_at", ""),
535
+ )
536
+
537
+ return mappings
538
+
539
+ except Exception as e:
540
+ print(f"Error loading direction calibration: {e}")
541
+ return None
542
+
543
+
544
+ def get_toward_direction(robot_name: str, servo_id: int) -> Optional[str]:
545
+ """
546
+ Get the direction that moves a servo toward the target.
547
+
548
+ Returns "positive" or "negative", or None if not calibrated.
549
+ """
550
+ mappings = load_direction_calibration(robot_name)
551
+ if not mappings or servo_id not in mappings:
552
+ return None
553
+
554
+ return mappings[servo_id].toward_direction
ate/robot/discovery.py CHANGED
@@ -4,6 +4,7 @@ Auto-discovery of robots on network and USB.
4
4
  Scans for:
5
5
  - Serial devices matching known patterns
6
6
  - Network cameras (ESP32-CAM, IP cameras)
7
+ - BLE devices (ESP32, HM-10, etc.)
7
8
  - ROS2 topics (if ROS2 available)
8
9
  - mDNS services
9
10
  """
@@ -30,6 +31,12 @@ try:
30
31
  except ImportError:
31
32
  HAS_REQUESTS = False
32
33
 
34
+ try:
35
+ from bleak import BleakScanner
36
+ HAS_BLEAK = True
37
+ except ImportError:
38
+ HAS_BLEAK = False
39
+
33
40
  from .registry import KNOWN_ROBOTS, RobotType, ConnectionType
34
41
 
35
42
 
@@ -67,15 +74,17 @@ def discover_robots(
67
74
  timeout: float = 5.0,
68
75
  scan_serial: bool = True,
69
76
  scan_network: bool = True,
77
+ scan_ble: bool = True,
70
78
  network_subnet: Optional[str] = None,
71
79
  ) -> List[DiscoveredRobot]:
72
80
  """
73
- Discover all robots on network and USB.
81
+ Discover all robots on network, USB, and BLE.
74
82
 
75
83
  Args:
76
- timeout: Timeout for network scans
84
+ timeout: Timeout for network/BLE scans
77
85
  scan_serial: Scan USB serial devices
78
86
  scan_network: Scan network for cameras/robots
87
+ scan_ble: Scan for BLE devices
79
88
  network_subnet: Subnet to scan (e.g., "192.168.1")
80
89
 
81
90
  Returns:
@@ -92,6 +101,9 @@ def discover_robots(
92
101
  subnet=network_subnet
93
102
  ))
94
103
 
104
+ if scan_ble:
105
+ discovered.extend(discover_ble_robots(timeout=timeout))
106
+
95
107
  return discovered
96
108
 
97
109
 
@@ -234,6 +246,83 @@ def discover_network_cameras(
234
246
  return discovered
235
247
 
236
248
 
249
+ def discover_ble_robots(
250
+ timeout: float = 10.0,
251
+ name_patterns: List[str] = None,
252
+ ) -> List[DiscoveredRobot]:
253
+ """
254
+ Discover robots via Bluetooth Low Energy.
255
+
256
+ Scans for BLE devices matching known robot name patterns.
257
+ Common ESP32-based robots advertise names like "MechDog", "ESP32", etc.
258
+ """
259
+ if not HAS_BLEAK:
260
+ return []
261
+
262
+ import asyncio
263
+
264
+ # Default patterns for known robots
265
+ if name_patterns is None:
266
+ name_patterns = [
267
+ "MechDog",
268
+ "HiWonder",
269
+ "ESP32",
270
+ "ESP-",
271
+ "Unitree",
272
+ "Robot",
273
+ ]
274
+
275
+ async def scan():
276
+ devices = await BleakScanner.discover(timeout=timeout)
277
+ return devices
278
+
279
+ try:
280
+ # Run the async scan
281
+ devices = asyncio.run(scan())
282
+ except Exception as e:
283
+ print(f"BLE scan error: {e}")
284
+ return []
285
+
286
+ discovered = []
287
+ for device in devices:
288
+ name = device.name or ""
289
+
290
+ # Check if name matches any pattern
291
+ matched_pattern = None
292
+ for pattern in name_patterns:
293
+ if pattern.lower() in name.lower():
294
+ matched_pattern = pattern
295
+ break
296
+
297
+ if not matched_pattern:
298
+ continue
299
+
300
+ # Try to identify robot type
301
+ robot_type = None
302
+ for rt in KNOWN_ROBOTS.values():
303
+ if ConnectionType.BLUETOOTH not in rt.connection_types:
304
+ continue
305
+ if rt.name.lower() in name.lower() or rt.manufacturer.lower() in name.lower():
306
+ robot_type = rt.id
307
+ break
308
+
309
+ robot = DiscoveredRobot(
310
+ robot_type=robot_type,
311
+ name=f"{name} ({device.address})",
312
+ status=DiscoveryStatus.IDENTIFIED if robot_type else DiscoveryStatus.FOUND,
313
+ connection=ConnectionType.BLUETOOTH,
314
+ port=device.address, # BLE address used as "port"
315
+ raw_data={
316
+ "ble_address": device.address,
317
+ "ble_name": name,
318
+ "rssi": device.rssi if hasattr(device, 'rssi') else None,
319
+ }
320
+ )
321
+ discovered.append(robot)
322
+
323
+ return discovered
324
+
325
+
237
326
  def probe_serial_device(port: str, baud_rate: int = 115200) -> Optional[Dict[str, Any]]:
238
327
  """
239
328
  Probe a serial device to identify what type of robot it is.
@@ -313,6 +402,7 @@ def quick_scan() -> Dict[str, Any]:
313
402
  result = {
314
403
  "serial_ports": [],
315
404
  "network_cameras": [],
405
+ "ble_devices": [],
316
406
  "identified_robots": [],
317
407
  }
318
408
 
@@ -336,4 +426,16 @@ def quick_scan() -> Dict[str, Any]:
336
426
  "ports": camera.ports,
337
427
  })
338
428
 
429
+ # Quick BLE scan
430
+ ble_robots = discover_ble_robots(timeout=3.0)
431
+ for robot in ble_robots:
432
+ result["ble_devices"].append({
433
+ "address": robot.port,
434
+ "name": robot.name,
435
+ "type": robot.robot_type,
436
+ "rssi": robot.raw_data.get("rssi"),
437
+ })
438
+ if robot.robot_type:
439
+ result["identified_robots"].append(robot)
440
+
339
441
  return result