foodforthought-cli 0.2.7__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ate/__init__.py +6 -0
- ate/__main__.py +16 -0
- ate/auth/__init__.py +1 -0
- ate/auth/device_flow.py +141 -0
- ate/auth/token_store.py +96 -0
- ate/behaviors/__init__.py +100 -0
- ate/behaviors/approach.py +399 -0
- ate/behaviors/common.py +686 -0
- ate/behaviors/tree.py +454 -0
- ate/cli.py +855 -3995
- ate/client.py +90 -0
- ate/commands/__init__.py +168 -0
- ate/commands/auth.py +389 -0
- ate/commands/bridge.py +448 -0
- ate/commands/data.py +185 -0
- ate/commands/deps.py +111 -0
- ate/commands/generate.py +384 -0
- ate/commands/memory.py +907 -0
- ate/commands/parts.py +166 -0
- ate/commands/primitive.py +399 -0
- ate/commands/protocol.py +288 -0
- ate/commands/recording.py +524 -0
- ate/commands/repo.py +154 -0
- ate/commands/simulation.py +291 -0
- ate/commands/skill.py +303 -0
- ate/commands/skills.py +487 -0
- ate/commands/team.py +147 -0
- ate/commands/workflow.py +271 -0
- ate/detection/__init__.py +38 -0
- ate/detection/base.py +142 -0
- ate/detection/color_detector.py +399 -0
- ate/detection/trash_detector.py +322 -0
- ate/drivers/__init__.py +39 -0
- ate/drivers/ble_transport.py +405 -0
- ate/drivers/mechdog.py +942 -0
- ate/drivers/wifi_camera.py +477 -0
- ate/interfaces/__init__.py +187 -0
- ate/interfaces/base.py +273 -0
- ate/interfaces/body.py +267 -0
- ate/interfaces/detection.py +282 -0
- ate/interfaces/locomotion.py +422 -0
- ate/interfaces/manipulation.py +408 -0
- ate/interfaces/navigation.py +389 -0
- ate/interfaces/perception.py +362 -0
- ate/interfaces/sensors.py +247 -0
- ate/interfaces/types.py +371 -0
- ate/llm_proxy.py +239 -0
- ate/mcp_server.py +387 -0
- ate/memory/__init__.py +35 -0
- ate/memory/cloud.py +244 -0
- ate/memory/context.py +269 -0
- ate/memory/embeddings.py +184 -0
- ate/memory/export.py +26 -0
- ate/memory/merge.py +146 -0
- ate/memory/migrate/__init__.py +34 -0
- ate/memory/migrate/base.py +89 -0
- ate/memory/migrate/pipeline.py +189 -0
- ate/memory/migrate/sources/__init__.py +13 -0
- ate/memory/migrate/sources/chroma.py +170 -0
- ate/memory/migrate/sources/pinecone.py +120 -0
- ate/memory/migrate/sources/qdrant.py +110 -0
- ate/memory/migrate/sources/weaviate.py +160 -0
- ate/memory/reranker.py +353 -0
- ate/memory/search.py +26 -0
- ate/memory/store.py +548 -0
- ate/recording/__init__.py +83 -0
- ate/recording/demonstration.py +378 -0
- ate/recording/session.py +415 -0
- ate/recording/upload.py +304 -0
- ate/recording/visual.py +416 -0
- ate/recording/wrapper.py +95 -0
- ate/robot/__init__.py +221 -0
- ate/robot/agentic_servo.py +856 -0
- ate/robot/behaviors.py +493 -0
- ate/robot/ble_capture.py +1000 -0
- ate/robot/ble_enumerate.py +506 -0
- ate/robot/calibration.py +668 -0
- ate/robot/calibration_state.py +388 -0
- ate/robot/commands.py +3735 -0
- ate/robot/direction_calibration.py +554 -0
- ate/robot/discovery.py +441 -0
- ate/robot/introspection.py +330 -0
- ate/robot/llm_system_id.py +654 -0
- ate/robot/locomotion_calibration.py +508 -0
- ate/robot/manager.py +270 -0
- ate/robot/marker_generator.py +611 -0
- ate/robot/perception.py +502 -0
- ate/robot/primitives.py +614 -0
- ate/robot/profiles.py +281 -0
- ate/robot/registry.py +322 -0
- ate/robot/servo_mapper.py +1153 -0
- ate/robot/skill_upload.py +675 -0
- ate/robot/target_calibration.py +500 -0
- ate/robot/teach.py +515 -0
- ate/robot/types.py +242 -0
- ate/robot/visual_labeler.py +1048 -0
- ate/robot/visual_servo_loop.py +494 -0
- ate/robot/visual_servoing.py +570 -0
- ate/robot/visual_system_id.py +906 -0
- ate/transports/__init__.py +121 -0
- ate/transports/base.py +394 -0
- ate/transports/ble.py +405 -0
- ate/transports/hybrid.py +444 -0
- ate/transports/serial.py +345 -0
- ate/urdf/__init__.py +30 -0
- ate/urdf/capture.py +582 -0
- ate/urdf/cloud.py +491 -0
- ate/urdf/collision.py +271 -0
- ate/urdf/commands.py +708 -0
- ate/urdf/depth.py +360 -0
- ate/urdf/inertial.py +312 -0
- ate/urdf/kinematics.py +330 -0
- ate/urdf/lifting.py +415 -0
- ate/urdf/meshing.py +300 -0
- ate/urdf/models/__init__.py +110 -0
- ate/urdf/models/depth_anything.py +253 -0
- ate/urdf/models/sam2.py +324 -0
- ate/urdf/motion_analysis.py +396 -0
- ate/urdf/pipeline.py +468 -0
- ate/urdf/scale.py +256 -0
- ate/urdf/scan_session.py +411 -0
- ate/urdf/segmentation.py +299 -0
- ate/urdf/synthesis.py +319 -0
- ate/urdf/topology.py +336 -0
- ate/urdf/validation.py +371 -0
- {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/METADATA +9 -1
- foodforthought_cli-0.3.0.dist-info/RECORD +166 -0
- {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/WHEEL +1 -1
- foodforthought_cli-0.2.7.dist-info/RECORD +0 -44
- {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/entry_points.txt +0 -0
- {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,554 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Direction Calibration via Twitch Test.
|
|
3
|
+
|
|
4
|
+
This module implements the CRITICAL missing step in robot calibration:
|
|
5
|
+
determining whether positive servo commands move TOWARD or AWAY from targets.
|
|
6
|
+
|
|
7
|
+
The Problem:
|
|
8
|
+
-----------
|
|
9
|
+
Previous calibration discovered:
|
|
10
|
+
- Which servos exist and their ranges
|
|
11
|
+
- Which servo controls which joint (kinematic structure)
|
|
12
|
+
|
|
13
|
+
But NOT:
|
|
14
|
+
- Does servo 8 at 1700 move gripper TOWARD the ball or AWAY from it?
|
|
15
|
+
|
|
16
|
+
Without this, behaviors like pickup use wrong directions and fail catastrophically.
|
|
17
|
+
|
|
18
|
+
The Solution - Twitch Test:
|
|
19
|
+
--------------------------
|
|
20
|
+
1. Place target (ball) in front of robot
|
|
21
|
+
2. Detect gripper position via ArUco marker
|
|
22
|
+
3. For each servo:
|
|
23
|
+
a. Measure distance(gripper, ball) BEFORE
|
|
24
|
+
b. Move servo by +100 (small perturbation)
|
|
25
|
+
c. Measure distance(gripper, ball) AFTER
|
|
26
|
+
d. If distance decreased → positive = toward target
|
|
27
|
+
e. If distance increased → positive = away from target
|
|
28
|
+
4. Store direction mapping
|
|
29
|
+
5. Behaviors now use correct signs!
|
|
30
|
+
|
|
31
|
+
Usage:
|
|
32
|
+
from ate.robot.direction_calibration import run_direction_calibration
|
|
33
|
+
|
|
34
|
+
# Run interactive calibration
|
|
35
|
+
result = run_direction_calibration(
|
|
36
|
+
serial_port="/dev/cu.usbserial-10",
|
|
37
|
+
robot_name="my_mechdog",
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
# Result contains direction mappings for all arm servos
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
import cv2
|
|
44
|
+
import numpy as np
|
|
45
|
+
import serial
|
|
46
|
+
import time
|
|
47
|
+
import json
|
|
48
|
+
import sys
|
|
49
|
+
from dataclasses import dataclass
|
|
50
|
+
from datetime import datetime
|
|
51
|
+
from pathlib import Path
|
|
52
|
+
from typing import Optional, Dict, List, Tuple, Any
|
|
53
|
+
|
|
54
|
+
from .calibration_state import (
|
|
55
|
+
CalibrationState,
|
|
56
|
+
DirectionMapping,
|
|
57
|
+
require_direction_calibration,
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@dataclass
|
|
62
|
+
class TwitchResult:
|
|
63
|
+
"""Result of a single twitch test for one servo."""
|
|
64
|
+
servo_id: int
|
|
65
|
+
initial_distance: float # pixels
|
|
66
|
+
final_distance: float # pixels
|
|
67
|
+
delta: float # final - initial (negative = got closer)
|
|
68
|
+
perturbation: int # PWM units moved
|
|
69
|
+
direction: str # "toward" or "away"
|
|
70
|
+
confidence: float # based on magnitude of delta
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class DirectionCalibrator:
|
|
74
|
+
"""
|
|
75
|
+
Automated direction calibration using ArUco + ball detection.
|
|
76
|
+
|
|
77
|
+
The POINT of ArUco markers + webcam:
|
|
78
|
+
- Camera sees ball position
|
|
79
|
+
- Camera sees gripper marker
|
|
80
|
+
- Move servo → measure if gripper got closer or further from ball
|
|
81
|
+
- NO HUMAN OBSERVATION NEEDED
|
|
82
|
+
"""
|
|
83
|
+
|
|
84
|
+
def __init__(
|
|
85
|
+
self,
|
|
86
|
+
serial_port: str,
|
|
87
|
+
robot_name: str = "robot",
|
|
88
|
+
gripper_marker_id: int = 15,
|
|
89
|
+
camera_index: int = 0,
|
|
90
|
+
arm_servos: Optional[List[int]] = None,
|
|
91
|
+
):
|
|
92
|
+
self.serial_port = serial_port
|
|
93
|
+
self.robot_name = robot_name
|
|
94
|
+
self.gripper_marker_id = gripper_marker_id
|
|
95
|
+
self.camera_index = camera_index
|
|
96
|
+
self.arm_servos = arm_servos or [8, 9, 10, 11] # Default MechDog arm servos
|
|
97
|
+
|
|
98
|
+
self.cap: Optional[cv2.VideoCapture] = None
|
|
99
|
+
self.ser: Optional[serial.Serial] = None
|
|
100
|
+
self.aruco_detector = None
|
|
101
|
+
|
|
102
|
+
# Results
|
|
103
|
+
self.results: Dict[int, TwitchResult] = {}
|
|
104
|
+
|
|
105
|
+
def setup_camera(self) -> bool:
|
|
106
|
+
"""Initialize webcam and ArUco detector."""
|
|
107
|
+
print("Initializing camera...")
|
|
108
|
+
|
|
109
|
+
self.cap = cv2.VideoCapture(self.camera_index)
|
|
110
|
+
if not self.cap.isOpened():
|
|
111
|
+
print(f"Error: Could not open camera {self.camera_index}")
|
|
112
|
+
return False
|
|
113
|
+
|
|
114
|
+
self.cap.set(cv2.CAP_PROP_FRAME_WIDTH, 1280)
|
|
115
|
+
self.cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 720)
|
|
116
|
+
|
|
117
|
+
# Warm up camera
|
|
118
|
+
for _ in range(10):
|
|
119
|
+
self.cap.read()
|
|
120
|
+
|
|
121
|
+
# Setup ArUco detector
|
|
122
|
+
aruco_dict = cv2.aruco.getPredefinedDictionary(cv2.aruco.DICT_4X4_50)
|
|
123
|
+
aruco_params = cv2.aruco.DetectorParameters()
|
|
124
|
+
self.aruco_detector = cv2.aruco.ArucoDetector(aruco_dict, aruco_params)
|
|
125
|
+
|
|
126
|
+
print("Camera initialized.")
|
|
127
|
+
return True
|
|
128
|
+
|
|
129
|
+
def setup_robot(self) -> bool:
|
|
130
|
+
"""Connect to robot via serial."""
|
|
131
|
+
print(f"Connecting to robot on {self.serial_port}...")
|
|
132
|
+
|
|
133
|
+
try:
|
|
134
|
+
self.ser = serial.Serial(self.serial_port, 115200, timeout=2)
|
|
135
|
+
time.sleep(0.5)
|
|
136
|
+
self.ser.read(self.ser.in_waiting) # Clear buffer
|
|
137
|
+
|
|
138
|
+
# Initialize MechDog
|
|
139
|
+
self.send_command("from HW_MechDog import MechDog", wait=1.5)
|
|
140
|
+
self.send_command("dog = MechDog()", wait=1.5)
|
|
141
|
+
|
|
142
|
+
print("Robot connected.")
|
|
143
|
+
return True
|
|
144
|
+
|
|
145
|
+
except Exception as e:
|
|
146
|
+
print(f"Error connecting to robot: {e}")
|
|
147
|
+
return False
|
|
148
|
+
|
|
149
|
+
def send_command(self, command: str, wait: float = 0.5) -> str:
|
|
150
|
+
"""Send command to robot."""
|
|
151
|
+
if not self.ser:
|
|
152
|
+
return ""
|
|
153
|
+
|
|
154
|
+
try:
|
|
155
|
+
self.ser.write(f"{command}\r\n".encode())
|
|
156
|
+
time.sleep(wait)
|
|
157
|
+
return self.ser.read(self.ser.in_waiting).decode("utf-8", errors="ignore")
|
|
158
|
+
except Exception as e:
|
|
159
|
+
print(f"Command error: {e}")
|
|
160
|
+
return ""
|
|
161
|
+
|
|
162
|
+
def set_servo(self, servo_id: int, position: int, duration: int = 500) -> None:
|
|
163
|
+
"""Set a servo to a specific position."""
|
|
164
|
+
self.send_command(f"dog.set_servo({servo_id}, {position}, {duration})", wait=duration/1000 + 0.3)
|
|
165
|
+
|
|
166
|
+
def detect_ball(self, frame: np.ndarray) -> Optional[Tuple[int, int]]:
|
|
167
|
+
"""Detect green/cyan ball, return center (x, y) or None."""
|
|
168
|
+
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
|
|
169
|
+
|
|
170
|
+
# Blue-green ball detection (handles green, cyan, teal balls)
|
|
171
|
+
# Expanded range to handle different lighting conditions
|
|
172
|
+
lower = np.array([70, 50, 50])
|
|
173
|
+
upper = np.array([110, 255, 255])
|
|
174
|
+
mask = cv2.inRange(hsv, lower, upper)
|
|
175
|
+
|
|
176
|
+
contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
|
|
177
|
+
|
|
178
|
+
if contours:
|
|
179
|
+
largest = max(contours, key=cv2.contourArea)
|
|
180
|
+
if cv2.contourArea(largest) > 500:
|
|
181
|
+
M = cv2.moments(largest)
|
|
182
|
+
if M["m00"] > 0:
|
|
183
|
+
cx = int(M["m10"] / M["m00"])
|
|
184
|
+
cy = int(M["m01"] / M["m00"])
|
|
185
|
+
return (cx, cy)
|
|
186
|
+
|
|
187
|
+
return None
|
|
188
|
+
|
|
189
|
+
def detect_markers(self, frame: np.ndarray) -> Dict[int, Tuple[float, float]]:
|
|
190
|
+
"""Detect ArUco markers, return dict of marker_id -> (x, y)."""
|
|
191
|
+
if self.aruco_detector is None:
|
|
192
|
+
return {}
|
|
193
|
+
|
|
194
|
+
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
|
|
195
|
+
corners, ids, _ = self.aruco_detector.detectMarkers(gray)
|
|
196
|
+
|
|
197
|
+
markers = {}
|
|
198
|
+
if ids is not None:
|
|
199
|
+
for i, mid in enumerate(ids.flatten()):
|
|
200
|
+
c = corners[i][0]
|
|
201
|
+
center = (float(np.mean(c[:, 0])), float(np.mean(c[:, 1])))
|
|
202
|
+
markers[int(mid)] = center
|
|
203
|
+
|
|
204
|
+
return markers
|
|
205
|
+
|
|
206
|
+
def capture_state(self) -> Tuple[Optional[Tuple[int, int]], Dict[int, Tuple[float, float]], np.ndarray]:
|
|
207
|
+
"""Capture frame and return (ball_pos, markers, frame)."""
|
|
208
|
+
if not self.cap:
|
|
209
|
+
return None, {}, np.zeros((480, 640, 3), dtype=np.uint8)
|
|
210
|
+
|
|
211
|
+
# Flush old frames
|
|
212
|
+
for _ in range(3):
|
|
213
|
+
self.cap.read()
|
|
214
|
+
|
|
215
|
+
ret, frame = self.cap.read()
|
|
216
|
+
if not ret:
|
|
217
|
+
return None, {}, np.zeros((480, 640, 3), dtype=np.uint8)
|
|
218
|
+
|
|
219
|
+
ball = self.detect_ball(frame)
|
|
220
|
+
markers = self.detect_markers(frame)
|
|
221
|
+
|
|
222
|
+
return ball, markers, frame
|
|
223
|
+
|
|
224
|
+
def distance(self, p1: Optional[Tuple[float, float]], p2: Optional[Tuple[float, float]]) -> float:
|
|
225
|
+
"""Euclidean distance between two points."""
|
|
226
|
+
if p1 is None or p2 is None:
|
|
227
|
+
return float("inf")
|
|
228
|
+
return np.sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2)
|
|
229
|
+
|
|
230
|
+
def twitch_test(
|
|
231
|
+
self,
|
|
232
|
+
servo_id: int,
|
|
233
|
+
neutral: int = 1500,
|
|
234
|
+
perturbation: int = 100,
|
|
235
|
+
) -> Optional[TwitchResult]:
|
|
236
|
+
"""
|
|
237
|
+
Perform a single twitch test on a servo.
|
|
238
|
+
|
|
239
|
+
1. Move to neutral
|
|
240
|
+
2. Measure distance(gripper, ball)
|
|
241
|
+
3. Move by +perturbation
|
|
242
|
+
4. Measure distance again
|
|
243
|
+
5. Determine if we got closer or further
|
|
244
|
+
"""
|
|
245
|
+
print(f"\n Testing servo {servo_id}...")
|
|
246
|
+
|
|
247
|
+
# Move to neutral first
|
|
248
|
+
self.set_servo(servo_id, neutral, duration=600)
|
|
249
|
+
time.sleep(0.5)
|
|
250
|
+
|
|
251
|
+
# Capture baseline
|
|
252
|
+
ball_before, markers_before, _ = self.capture_state()
|
|
253
|
+
|
|
254
|
+
if ball_before is None:
|
|
255
|
+
print(f" Warning: Ball not detected")
|
|
256
|
+
return None
|
|
257
|
+
|
|
258
|
+
gripper_before = markers_before.get(self.gripper_marker_id)
|
|
259
|
+
if gripper_before is None:
|
|
260
|
+
# Try other arm markers as fallback
|
|
261
|
+
for marker_id in [15, 14, 13]:
|
|
262
|
+
if marker_id in markers_before:
|
|
263
|
+
gripper_before = markers_before[marker_id]
|
|
264
|
+
print(f" Using marker M{marker_id} as reference")
|
|
265
|
+
break
|
|
266
|
+
|
|
267
|
+
if gripper_before is None:
|
|
268
|
+
print(f" Warning: No arm markers detected")
|
|
269
|
+
return None
|
|
270
|
+
|
|
271
|
+
dist_before = self.distance(gripper_before, ball_before)
|
|
272
|
+
print(f" Before: gripper at {gripper_before[0]:.0f},{gripper_before[1]:.0f}, dist={dist_before:.1f}px")
|
|
273
|
+
|
|
274
|
+
# Perturbation: move servo by +perturbation
|
|
275
|
+
self.set_servo(servo_id, neutral + perturbation, duration=400)
|
|
276
|
+
time.sleep(0.4)
|
|
277
|
+
|
|
278
|
+
# Capture after perturbation
|
|
279
|
+
ball_after, markers_after, frame = self.capture_state()
|
|
280
|
+
|
|
281
|
+
if ball_after is None:
|
|
282
|
+
print(f" Warning: Ball lost after move")
|
|
283
|
+
# Return to neutral
|
|
284
|
+
self.set_servo(servo_id, neutral, duration=400)
|
|
285
|
+
return None
|
|
286
|
+
|
|
287
|
+
# Find gripper marker (same ID as before if possible)
|
|
288
|
+
gripper_after = None
|
|
289
|
+
for marker_id in [15, 14, 13]:
|
|
290
|
+
if marker_id in markers_after:
|
|
291
|
+
gripper_after = markers_after[marker_id]
|
|
292
|
+
break
|
|
293
|
+
|
|
294
|
+
if gripper_after is None:
|
|
295
|
+
print(f" Warning: Gripper marker lost after move (significant movement)")
|
|
296
|
+
# Return to neutral
|
|
297
|
+
self.set_servo(servo_id, neutral, duration=400)
|
|
298
|
+
# If marker was lost, the movement was significant - treat as "toward" if likely
|
|
299
|
+
return TwitchResult(
|
|
300
|
+
servo_id=servo_id,
|
|
301
|
+
initial_distance=dist_before,
|
|
302
|
+
final_distance=0, # Unknown
|
|
303
|
+
delta=-dist_before, # Assume moved toward (marker closer = can't see)
|
|
304
|
+
perturbation=perturbation,
|
|
305
|
+
direction="toward", # Tentative
|
|
306
|
+
confidence=0.3, # Low confidence since we can't verify
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
dist_after = self.distance(gripper_after, ball_after)
|
|
310
|
+
delta = dist_after - dist_before
|
|
311
|
+
|
|
312
|
+
print(f" After: gripper at {gripper_after[0]:.0f},{gripper_after[1]:.0f}, dist={dist_after:.1f}px")
|
|
313
|
+
print(f" Delta: {delta:+.1f}px")
|
|
314
|
+
|
|
315
|
+
# Return to neutral
|
|
316
|
+
self.set_servo(servo_id, neutral, duration=400)
|
|
317
|
+
time.sleep(0.3)
|
|
318
|
+
|
|
319
|
+
# Determine direction
|
|
320
|
+
# Negative delta = got closer = positive command moves toward target
|
|
321
|
+
if delta < -10:
|
|
322
|
+
direction = "toward"
|
|
323
|
+
confidence = min(1.0, abs(delta) / 50)
|
|
324
|
+
elif delta > 10:
|
|
325
|
+
direction = "away"
|
|
326
|
+
confidence = min(1.0, abs(delta) / 50)
|
|
327
|
+
else:
|
|
328
|
+
direction = "minimal"
|
|
329
|
+
confidence = 0.2
|
|
330
|
+
|
|
331
|
+
print(f" Result: +{perturbation} PWM → {direction} (confidence: {confidence:.0%})")
|
|
332
|
+
|
|
333
|
+
return TwitchResult(
|
|
334
|
+
servo_id=servo_id,
|
|
335
|
+
initial_distance=dist_before,
|
|
336
|
+
final_distance=dist_after,
|
|
337
|
+
delta=delta,
|
|
338
|
+
perturbation=perturbation,
|
|
339
|
+
direction=direction,
|
|
340
|
+
confidence=confidence,
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
def run_calibration(self) -> Dict[int, DirectionMapping]:
|
|
344
|
+
"""
|
|
345
|
+
Run direction calibration for all arm servos.
|
|
346
|
+
|
|
347
|
+
Returns dict of servo_id -> DirectionMapping
|
|
348
|
+
"""
|
|
349
|
+
print("\n" + "=" * 60)
|
|
350
|
+
print("DIRECTION CALIBRATION (Twitch Test)")
|
|
351
|
+
print("=" * 60)
|
|
352
|
+
print("\nPurpose: Determine if positive servo values move")
|
|
353
|
+
print(" TOWARD or AWAY from the target.\n")
|
|
354
|
+
|
|
355
|
+
if not self.setup_camera():
|
|
356
|
+
return {}
|
|
357
|
+
|
|
358
|
+
if not self.setup_robot():
|
|
359
|
+
self.cleanup()
|
|
360
|
+
return {}
|
|
361
|
+
|
|
362
|
+
# Verify we can see ball and markers
|
|
363
|
+
print("Verifying visibility...")
|
|
364
|
+
ball, markers, frame = self.capture_state()
|
|
365
|
+
|
|
366
|
+
if ball is None:
|
|
367
|
+
print("\nERROR: Cannot detect ball.")
|
|
368
|
+
print("Please position a green ball in front of the robot.")
|
|
369
|
+
cv2.imwrite("/tmp/direction_cal_no_ball.jpg", frame)
|
|
370
|
+
print("Saved debug image to /tmp/direction_cal_no_ball.jpg")
|
|
371
|
+
self.cleanup()
|
|
372
|
+
return {}
|
|
373
|
+
|
|
374
|
+
print(f" Ball detected at: {ball}")
|
|
375
|
+
|
|
376
|
+
gripper_marker = None
|
|
377
|
+
for m in [15, 14, 13]:
|
|
378
|
+
if m in markers:
|
|
379
|
+
gripper_marker = m
|
|
380
|
+
break
|
|
381
|
+
|
|
382
|
+
if gripper_marker is None:
|
|
383
|
+
print("\nERROR: No arm markers (M13, M14, M15) visible.")
|
|
384
|
+
print("Please position camera to see arm markers.")
|
|
385
|
+
cv2.imwrite("/tmp/direction_cal_no_markers.jpg", frame)
|
|
386
|
+
print("Saved debug image to /tmp/direction_cal_no_markers.jpg")
|
|
387
|
+
self.cleanup()
|
|
388
|
+
return {}
|
|
389
|
+
|
|
390
|
+
print(f" Using marker M{gripper_marker} as gripper reference")
|
|
391
|
+
print(f" Visible markers: {sorted(markers.keys())}")
|
|
392
|
+
|
|
393
|
+
# Save initial state image
|
|
394
|
+
cv2.circle(frame, ball, 20, (0, 255, 0), 3)
|
|
395
|
+
cv2.putText(frame, "BALL", (ball[0]+25, ball[1]),
|
|
396
|
+
cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 255, 0), 2)
|
|
397
|
+
gpos = markers[gripper_marker]
|
|
398
|
+
cv2.circle(frame, (int(gpos[0]), int(gpos[1])), 15, (255, 0, 255), 3)
|
|
399
|
+
cv2.putText(frame, f"M{gripper_marker}", (int(gpos[0])+20, int(gpos[1])),
|
|
400
|
+
cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 0, 255), 2)
|
|
401
|
+
cv2.imwrite("/tmp/direction_cal_initial.jpg", frame)
|
|
402
|
+
|
|
403
|
+
# Set neutral position for all arm servos
|
|
404
|
+
print("\nSetting neutral position...")
|
|
405
|
+
for servo_id in self.arm_servos:
|
|
406
|
+
self.set_servo(servo_id, 1500, duration=500)
|
|
407
|
+
time.sleep(1)
|
|
408
|
+
|
|
409
|
+
# Run twitch test on each servo
|
|
410
|
+
print("\nRunning twitch tests...")
|
|
411
|
+
direction_mappings: Dict[int, DirectionMapping] = {}
|
|
412
|
+
|
|
413
|
+
for servo_id in self.arm_servos:
|
|
414
|
+
result = self.twitch_test(servo_id)
|
|
415
|
+
|
|
416
|
+
if result:
|
|
417
|
+
self.results[servo_id] = result
|
|
418
|
+
|
|
419
|
+
# Convert to DirectionMapping
|
|
420
|
+
toward_direction = "positive" if result.direction == "toward" else "negative"
|
|
421
|
+
if result.direction == "minimal":
|
|
422
|
+
toward_direction = "unknown"
|
|
423
|
+
|
|
424
|
+
mapping = DirectionMapping(
|
|
425
|
+
servo_id=servo_id,
|
|
426
|
+
toward_direction=toward_direction,
|
|
427
|
+
confidence=result.confidence,
|
|
428
|
+
delta_pixels=result.delta,
|
|
429
|
+
calibrated_at=datetime.now().isoformat(),
|
|
430
|
+
)
|
|
431
|
+
direction_mappings[servo_id] = mapping
|
|
432
|
+
|
|
433
|
+
self.cleanup()
|
|
434
|
+
|
|
435
|
+
# Print summary
|
|
436
|
+
print("\n" + "=" * 60)
|
|
437
|
+
print("DIRECTION CALIBRATION RESULTS")
|
|
438
|
+
print("=" * 60)
|
|
439
|
+
|
|
440
|
+
for servo_id, mapping in direction_mappings.items():
|
|
441
|
+
result = self.results.get(servo_id)
|
|
442
|
+
delta_str = f"Δ={result.delta:+.1f}px" if result else ""
|
|
443
|
+
print(f"\n Servo {servo_id}:")
|
|
444
|
+
print(f" To move TOWARD target: use {mapping.toward_direction} values")
|
|
445
|
+
print(f" Confidence: {mapping.confidence:.0%} {delta_str}")
|
|
446
|
+
|
|
447
|
+
# Save results
|
|
448
|
+
output_path = Path.home() / ".ate" / "direction_calibration" / f"{self.robot_name}.json"
|
|
449
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
450
|
+
|
|
451
|
+
output = {
|
|
452
|
+
"robot_name": self.robot_name,
|
|
453
|
+
"calibrated_at": datetime.now().isoformat(),
|
|
454
|
+
"gripper_marker_id": self.gripper_marker_id,
|
|
455
|
+
"mappings": {
|
|
456
|
+
str(k): {
|
|
457
|
+
"servo_id": v.servo_id,
|
|
458
|
+
"toward_direction": v.toward_direction,
|
|
459
|
+
"confidence": v.confidence,
|
|
460
|
+
"delta_pixels": v.delta_pixels,
|
|
461
|
+
}
|
|
462
|
+
for k, v in direction_mappings.items()
|
|
463
|
+
},
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
with open(output_path, "w") as f:
|
|
467
|
+
json.dump(output, f, indent=2)
|
|
468
|
+
|
|
469
|
+
print(f"\nSaved to: {output_path}")
|
|
470
|
+
|
|
471
|
+
return direction_mappings
|
|
472
|
+
|
|
473
|
+
def cleanup(self) -> None:
|
|
474
|
+
"""Release resources."""
|
|
475
|
+
if self.cap:
|
|
476
|
+
self.cap.release()
|
|
477
|
+
if self.ser:
|
|
478
|
+
self.ser.close()
|
|
479
|
+
|
|
480
|
+
|
|
481
|
+
def run_direction_calibration(
|
|
482
|
+
serial_port: str,
|
|
483
|
+
robot_name: str = "robot",
|
|
484
|
+
gripper_marker_id: int = 15,
|
|
485
|
+
camera_index: int = 0,
|
|
486
|
+
arm_servos: Optional[List[int]] = None,
|
|
487
|
+
) -> Dict[int, DirectionMapping]:
|
|
488
|
+
"""
|
|
489
|
+
Run direction calibration and update calibration state.
|
|
490
|
+
|
|
491
|
+
This is the main entry point for the 'ate robot calibrate direction' command.
|
|
492
|
+
"""
|
|
493
|
+
calibrator = DirectionCalibrator(
|
|
494
|
+
serial_port=serial_port,
|
|
495
|
+
robot_name=robot_name,
|
|
496
|
+
gripper_marker_id=gripper_marker_id,
|
|
497
|
+
camera_index=camera_index,
|
|
498
|
+
arm_servos=arm_servos,
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
mappings = calibrator.run_calibration()
|
|
502
|
+
|
|
503
|
+
if mappings:
|
|
504
|
+
# Update calibration state
|
|
505
|
+
state = CalibrationState.load(robot_name)
|
|
506
|
+
state.complete_direction_calibration(mappings)
|
|
507
|
+
|
|
508
|
+
print("\n" + "=" * 60)
|
|
509
|
+
print("CALIBRATION STATE UPDATED")
|
|
510
|
+
print("=" * 60)
|
|
511
|
+
state.print_status()
|
|
512
|
+
|
|
513
|
+
return mappings
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
def load_direction_calibration(robot_name: str) -> Optional[Dict[int, DirectionMapping]]:
|
|
517
|
+
"""Load saved direction calibration for a robot."""
|
|
518
|
+
path = Path.home() / ".ate" / "direction_calibration" / f"{robot_name}.json"
|
|
519
|
+
|
|
520
|
+
if not path.exists():
|
|
521
|
+
return None
|
|
522
|
+
|
|
523
|
+
try:
|
|
524
|
+
with open(path) as f:
|
|
525
|
+
data = json.load(f)
|
|
526
|
+
|
|
527
|
+
mappings = {}
|
|
528
|
+
for k, v in data.get("mappings", {}).items():
|
|
529
|
+
mappings[int(k)] = DirectionMapping(
|
|
530
|
+
servo_id=v["servo_id"],
|
|
531
|
+
toward_direction=v["toward_direction"],
|
|
532
|
+
confidence=v["confidence"],
|
|
533
|
+
delta_pixels=v["delta_pixels"],
|
|
534
|
+
calibrated_at=v.get("calibrated_at", ""),
|
|
535
|
+
)
|
|
536
|
+
|
|
537
|
+
return mappings
|
|
538
|
+
|
|
539
|
+
except Exception as e:
|
|
540
|
+
print(f"Error loading direction calibration: {e}")
|
|
541
|
+
return None
|
|
542
|
+
|
|
543
|
+
|
|
544
|
+
def get_toward_direction(robot_name: str, servo_id: int) -> Optional[str]:
|
|
545
|
+
"""
|
|
546
|
+
Get the direction that moves a servo toward the target.
|
|
547
|
+
|
|
548
|
+
Returns "positive" or "negative", or None if not calibrated.
|
|
549
|
+
"""
|
|
550
|
+
mappings = load_direction_calibration(robot_name)
|
|
551
|
+
if not mappings or servo_id not in mappings:
|
|
552
|
+
return None
|
|
553
|
+
|
|
554
|
+
return mappings[servo_id].toward_direction
|