foodforthought-cli 0.2.7__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. ate/__init__.py +6 -0
  2. ate/__main__.py +16 -0
  3. ate/auth/__init__.py +1 -0
  4. ate/auth/device_flow.py +141 -0
  5. ate/auth/token_store.py +96 -0
  6. ate/behaviors/__init__.py +100 -0
  7. ate/behaviors/approach.py +399 -0
  8. ate/behaviors/common.py +686 -0
  9. ate/behaviors/tree.py +454 -0
  10. ate/cli.py +855 -3995
  11. ate/client.py +90 -0
  12. ate/commands/__init__.py +168 -0
  13. ate/commands/auth.py +389 -0
  14. ate/commands/bridge.py +448 -0
  15. ate/commands/data.py +185 -0
  16. ate/commands/deps.py +111 -0
  17. ate/commands/generate.py +384 -0
  18. ate/commands/memory.py +907 -0
  19. ate/commands/parts.py +166 -0
  20. ate/commands/primitive.py +399 -0
  21. ate/commands/protocol.py +288 -0
  22. ate/commands/recording.py +524 -0
  23. ate/commands/repo.py +154 -0
  24. ate/commands/simulation.py +291 -0
  25. ate/commands/skill.py +303 -0
  26. ate/commands/skills.py +487 -0
  27. ate/commands/team.py +147 -0
  28. ate/commands/workflow.py +271 -0
  29. ate/detection/__init__.py +38 -0
  30. ate/detection/base.py +142 -0
  31. ate/detection/color_detector.py +399 -0
  32. ate/detection/trash_detector.py +322 -0
  33. ate/drivers/__init__.py +39 -0
  34. ate/drivers/ble_transport.py +405 -0
  35. ate/drivers/mechdog.py +942 -0
  36. ate/drivers/wifi_camera.py +477 -0
  37. ate/interfaces/__init__.py +187 -0
  38. ate/interfaces/base.py +273 -0
  39. ate/interfaces/body.py +267 -0
  40. ate/interfaces/detection.py +282 -0
  41. ate/interfaces/locomotion.py +422 -0
  42. ate/interfaces/manipulation.py +408 -0
  43. ate/interfaces/navigation.py +389 -0
  44. ate/interfaces/perception.py +362 -0
  45. ate/interfaces/sensors.py +247 -0
  46. ate/interfaces/types.py +371 -0
  47. ate/llm_proxy.py +239 -0
  48. ate/mcp_server.py +387 -0
  49. ate/memory/__init__.py +35 -0
  50. ate/memory/cloud.py +244 -0
  51. ate/memory/context.py +269 -0
  52. ate/memory/embeddings.py +184 -0
  53. ate/memory/export.py +26 -0
  54. ate/memory/merge.py +146 -0
  55. ate/memory/migrate/__init__.py +34 -0
  56. ate/memory/migrate/base.py +89 -0
  57. ate/memory/migrate/pipeline.py +189 -0
  58. ate/memory/migrate/sources/__init__.py +13 -0
  59. ate/memory/migrate/sources/chroma.py +170 -0
  60. ate/memory/migrate/sources/pinecone.py +120 -0
  61. ate/memory/migrate/sources/qdrant.py +110 -0
  62. ate/memory/migrate/sources/weaviate.py +160 -0
  63. ate/memory/reranker.py +353 -0
  64. ate/memory/search.py +26 -0
  65. ate/memory/store.py +548 -0
  66. ate/recording/__init__.py +83 -0
  67. ate/recording/demonstration.py +378 -0
  68. ate/recording/session.py +415 -0
  69. ate/recording/upload.py +304 -0
  70. ate/recording/visual.py +416 -0
  71. ate/recording/wrapper.py +95 -0
  72. ate/robot/__init__.py +221 -0
  73. ate/robot/agentic_servo.py +856 -0
  74. ate/robot/behaviors.py +493 -0
  75. ate/robot/ble_capture.py +1000 -0
  76. ate/robot/ble_enumerate.py +506 -0
  77. ate/robot/calibration.py +668 -0
  78. ate/robot/calibration_state.py +388 -0
  79. ate/robot/commands.py +3735 -0
  80. ate/robot/direction_calibration.py +554 -0
  81. ate/robot/discovery.py +441 -0
  82. ate/robot/introspection.py +330 -0
  83. ate/robot/llm_system_id.py +654 -0
  84. ate/robot/locomotion_calibration.py +508 -0
  85. ate/robot/manager.py +270 -0
  86. ate/robot/marker_generator.py +611 -0
  87. ate/robot/perception.py +502 -0
  88. ate/robot/primitives.py +614 -0
  89. ate/robot/profiles.py +281 -0
  90. ate/robot/registry.py +322 -0
  91. ate/robot/servo_mapper.py +1153 -0
  92. ate/robot/skill_upload.py +675 -0
  93. ate/robot/target_calibration.py +500 -0
  94. ate/robot/teach.py +515 -0
  95. ate/robot/types.py +242 -0
  96. ate/robot/visual_labeler.py +1048 -0
  97. ate/robot/visual_servo_loop.py +494 -0
  98. ate/robot/visual_servoing.py +570 -0
  99. ate/robot/visual_system_id.py +906 -0
  100. ate/transports/__init__.py +121 -0
  101. ate/transports/base.py +394 -0
  102. ate/transports/ble.py +405 -0
  103. ate/transports/hybrid.py +444 -0
  104. ate/transports/serial.py +345 -0
  105. ate/urdf/__init__.py +30 -0
  106. ate/urdf/capture.py +582 -0
  107. ate/urdf/cloud.py +491 -0
  108. ate/urdf/collision.py +271 -0
  109. ate/urdf/commands.py +708 -0
  110. ate/urdf/depth.py +360 -0
  111. ate/urdf/inertial.py +312 -0
  112. ate/urdf/kinematics.py +330 -0
  113. ate/urdf/lifting.py +415 -0
  114. ate/urdf/meshing.py +300 -0
  115. ate/urdf/models/__init__.py +110 -0
  116. ate/urdf/models/depth_anything.py +253 -0
  117. ate/urdf/models/sam2.py +324 -0
  118. ate/urdf/motion_analysis.py +396 -0
  119. ate/urdf/pipeline.py +468 -0
  120. ate/urdf/scale.py +256 -0
  121. ate/urdf/scan_session.py +411 -0
  122. ate/urdf/segmentation.py +299 -0
  123. ate/urdf/synthesis.py +319 -0
  124. ate/urdf/topology.py +336 -0
  125. ate/urdf/validation.py +371 -0
  126. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/METADATA +9 -1
  127. foodforthought_cli-0.3.0.dist-info/RECORD +166 -0
  128. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/WHEEL +1 -1
  129. foodforthought_cli-0.2.7.dist-info/RECORD +0 -44
  130. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/entry_points.txt +0 -0
  131. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,500 @@
1
+ """
2
+ Target Detection Calibration
3
+
4
+ Interactive tool to tune color detection for specific targets,
5
+ with robot self-masking to ignore the robot's own features.
6
+
7
+ The Problem:
8
+ -----------
9
+ Generic ball detection picked up:
10
+ - Blue LEDs on robot
11
+ - Cyan/teal reflections
12
+ - ArUco marker borders
13
+
14
+ This caused erratic position readings, breaking visual servoing.
15
+
16
+ The Solution:
17
+ ------------
18
+ 1. Interactive HSV tuning with live preview
19
+ 2. Robot self-mask using ArUco marker positions
20
+ 3. Target profiles saved for reuse
21
+ 4. Multiple target types supported
22
+
23
+ Usage:
24
+ from ate.robot.target_calibration import TargetCalibrator
25
+
26
+ cal = TargetCalibrator(camera_index=0)
27
+ profile = cal.calibrate_target("green_ball")
28
+ profile.save("~/.ate/targets/green_ball.json")
29
+
30
+ # Later use
31
+ profile = TargetProfile.load("green_ball")
32
+ position = profile.detect(frame)
33
+ """
34
+
35
+ import cv2
36
+ import numpy as np
37
+ import json
38
+ from dataclasses import dataclass, field, asdict
39
+ from datetime import datetime
40
+ from pathlib import Path
41
+ from typing import Optional, Dict, List, Tuple, Callable
42
+ import time
43
+
44
+
45
+ @dataclass
46
+ class HSVRange:
47
+ """HSV color range for detection."""
48
+ h_low: int = 0
49
+ h_high: int = 180
50
+ s_low: int = 0
51
+ s_high: int = 255
52
+ v_low: int = 0
53
+ v_high: int = 255
54
+
55
+ def to_arrays(self) -> Tuple[np.ndarray, np.ndarray]:
56
+ """Convert to OpenCV format arrays."""
57
+ lower = np.array([self.h_low, self.s_low, self.v_low])
58
+ upper = np.array([self.h_high, self.s_high, self.v_high])
59
+ return lower, upper
60
+
61
+ @classmethod
62
+ def from_dict(cls, d: dict) -> "HSVRange":
63
+ return cls(**d)
64
+
65
+
66
+ @dataclass
67
+ class TargetProfile:
68
+ """
69
+ Calibrated detection profile for a target type.
70
+
71
+ Includes HSV ranges, size constraints, and optional mask regions.
72
+ """
73
+ name: str
74
+ hsv_range: HSVRange
75
+ min_area: int = 500
76
+ max_area: int = 50000
77
+ min_circularity: float = 0.3 # 0-1, 1 = perfect circle
78
+ mask_regions: List[Tuple[int, int, int, int]] = field(default_factory=list) # [(x, y, w, h), ...]
79
+ robot_mask_enabled: bool = True
80
+ created_at: str = field(default_factory=lambda: datetime.now().isoformat())
81
+ samples_collected: int = 0
82
+
83
+ def detect(
84
+ self,
85
+ frame: np.ndarray,
86
+ robot_markers: Optional[Dict[int, Tuple[float, float]]] = None,
87
+ ) -> Optional[Tuple[int, int]]:
88
+ """
89
+ Detect target in frame, return (x, y) or None.
90
+
91
+ Args:
92
+ frame: BGR image
93
+ robot_markers: Optional dict of ArUco marker positions to mask
94
+ """
95
+ hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
96
+
97
+ # Create mask from HSV range
98
+ lower, upper = self.hsv_range.to_arrays()
99
+ mask = cv2.inRange(hsv, lower, upper)
100
+
101
+ # Apply static mask regions
102
+ for (x, y, w, h) in self.mask_regions:
103
+ mask[y:y+h, x:x+w] = 0
104
+
105
+ # Apply robot self-mask using ArUco markers
106
+ if self.robot_mask_enabled and robot_markers:
107
+ mask = self._apply_robot_mask(mask, robot_markers)
108
+
109
+ # Find contours
110
+ contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
111
+
112
+ if not contours:
113
+ return None
114
+
115
+ # Filter by area and circularity
116
+ valid_contours = []
117
+ for contour in contours:
118
+ area = cv2.contourArea(contour)
119
+ if area < self.min_area or area > self.max_area:
120
+ continue
121
+
122
+ # Calculate circularity
123
+ perimeter = cv2.arcLength(contour, True)
124
+ if perimeter > 0:
125
+ circularity = 4 * np.pi * area / (perimeter * perimeter)
126
+ if circularity < self.min_circularity:
127
+ continue
128
+
129
+ valid_contours.append((contour, area))
130
+
131
+ if not valid_contours:
132
+ return None
133
+
134
+ # Return center of largest valid contour
135
+ largest = max(valid_contours, key=lambda x: x[1])[0]
136
+ M = cv2.moments(largest)
137
+ if M["m00"] > 0:
138
+ cx = int(M["m10"] / M["m00"])
139
+ cy = int(M["m01"] / M["m00"])
140
+ return (cx, cy)
141
+
142
+ return None
143
+
144
+ def _apply_robot_mask(
145
+ self,
146
+ mask: np.ndarray,
147
+ markers: Dict[int, Tuple[float, float]],
148
+ radius: int = 80,
149
+ ) -> np.ndarray:
150
+ """Mask out regions around detected ArUco markers (robot parts)."""
151
+ for marker_id, (mx, my) in markers.items():
152
+ # Mask a circular region around each marker
153
+ cv2.circle(mask, (int(mx), int(my)), radius, 0, -1)
154
+ return mask
155
+
156
+ def save(self, path: Optional[str] = None) -> str:
157
+ """Save profile to JSON."""
158
+ if path is None:
159
+ save_dir = Path.home() / ".ate" / "targets"
160
+ save_dir.mkdir(parents=True, exist_ok=True)
161
+ path = str(save_dir / f"{self.name}.json")
162
+
163
+ data = {
164
+ "name": self.name,
165
+ "hsv_range": asdict(self.hsv_range),
166
+ "min_area": self.min_area,
167
+ "max_area": self.max_area,
168
+ "min_circularity": self.min_circularity,
169
+ "mask_regions": self.mask_regions,
170
+ "robot_mask_enabled": self.robot_mask_enabled,
171
+ "created_at": self.created_at,
172
+ "samples_collected": self.samples_collected,
173
+ }
174
+
175
+ with open(path, "w") as f:
176
+ json.dump(data, f, indent=2)
177
+
178
+ return path
179
+
180
+ @classmethod
181
+ def load(cls, name_or_path: str) -> Optional["TargetProfile"]:
182
+ """Load profile from JSON."""
183
+ if "/" in name_or_path or name_or_path.endswith(".json"):
184
+ path = Path(name_or_path)
185
+ else:
186
+ path = Path.home() / ".ate" / "targets" / f"{name_or_path}.json"
187
+
188
+ if not path.exists():
189
+ return None
190
+
191
+ with open(path) as f:
192
+ data = json.load(f)
193
+
194
+ return cls(
195
+ name=data["name"],
196
+ hsv_range=HSVRange.from_dict(data["hsv_range"]),
197
+ min_area=data.get("min_area", 500),
198
+ max_area=data.get("max_area", 50000),
199
+ min_circularity=data.get("min_circularity", 0.3),
200
+ mask_regions=data.get("mask_regions", []),
201
+ robot_mask_enabled=data.get("robot_mask_enabled", True),
202
+ created_at=data.get("created_at", ""),
203
+ samples_collected=data.get("samples_collected", 0),
204
+ )
205
+
206
+
207
+ class TargetCalibrator:
208
+ """
209
+ Interactive target calibration tool.
210
+
211
+ Provides live preview of detection with HSV tuning sliders.
212
+ """
213
+
214
+ def __init__(
215
+ self,
216
+ camera_index: int = 0,
217
+ aruco_dict: int = cv2.aruco.DICT_4X4_50,
218
+ ):
219
+ self.camera_index = camera_index
220
+ self.cap: Optional[cv2.VideoCapture] = None
221
+
222
+ # ArUco setup for robot masking
223
+ aruco_dictionary = cv2.aruco.getPredefinedDictionary(aruco_dict)
224
+ aruco_params = cv2.aruco.DetectorParameters()
225
+ self.aruco_detector = cv2.aruco.ArucoDetector(aruco_dictionary, aruco_params)
226
+
227
+ # Current HSV values (will be set by sliders)
228
+ self.hsv_range = HSVRange(
229
+ h_low=70, h_high=110, # Blue-green default
230
+ s_low=50, s_high=255,
231
+ v_low=50, v_high=255,
232
+ )
233
+
234
+ # Samples for averaging
235
+ self.hsv_samples: List[Tuple[int, int, int]] = []
236
+
237
+ def setup_camera(self) -> bool:
238
+ """Initialize webcam."""
239
+ self.cap = cv2.VideoCapture(self.camera_index)
240
+ if not self.cap.isOpened():
241
+ return False
242
+
243
+ self.cap.set(cv2.CAP_PROP_FRAME_WIDTH, 1280)
244
+ self.cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 720)
245
+
246
+ # Warm up
247
+ for _ in range(10):
248
+ self.cap.read()
249
+
250
+ return True
251
+
252
+ def detect_markers(self, frame: np.ndarray) -> Dict[int, Tuple[float, float]]:
253
+ """Detect ArUco markers for robot masking."""
254
+ gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
255
+ corners, ids, _ = self.aruco_detector.detectMarkers(gray)
256
+
257
+ markers = {}
258
+ if ids is not None:
259
+ for i, mid in enumerate(ids.flatten()):
260
+ c = corners[i][0]
261
+ center = (float(np.mean(c[:, 0])), float(np.mean(c[:, 1])))
262
+ markers[int(mid)] = center
263
+
264
+ return markers
265
+
266
+ def sample_hsv_at_click(self, frame: np.ndarray, x: int, y: int, radius: int = 10):
267
+ """Sample HSV values from a region around click point."""
268
+ hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
269
+
270
+ # Get region
271
+ y1 = max(0, y - radius)
272
+ y2 = min(frame.shape[0], y + radius)
273
+ x1 = max(0, x - radius)
274
+ x2 = min(frame.shape[1], x + radius)
275
+
276
+ region = hsv[y1:y2, x1:x2]
277
+
278
+ # Calculate median HSV values
279
+ h_med = int(np.median(region[:, :, 0]))
280
+ s_med = int(np.median(region[:, :, 1]))
281
+ v_med = int(np.median(region[:, :, 2]))
282
+
283
+ self.hsv_samples.append((h_med, s_med, v_med))
284
+
285
+ return (h_med, s_med, v_med)
286
+
287
+ def compute_hsv_range_from_samples(self, margin: int = 15) -> HSVRange:
288
+ """Compute HSV range from collected samples."""
289
+ if not self.hsv_samples:
290
+ return self.hsv_range
291
+
292
+ h_vals = [s[0] for s in self.hsv_samples]
293
+ s_vals = [s[1] for s in self.hsv_samples]
294
+ v_vals = [s[2] for s in self.hsv_samples]
295
+
296
+ return HSVRange(
297
+ h_low=max(0, min(h_vals) - margin),
298
+ h_high=min(180, max(h_vals) + margin),
299
+ s_low=max(0, min(s_vals) - margin),
300
+ s_high=min(255, max(s_vals) + margin),
301
+ v_low=max(0, min(v_vals) - margin),
302
+ v_high=min(255, max(v_vals) + margin),
303
+ )
304
+
305
+ def calibrate_interactive(self, target_name: str) -> Optional[TargetProfile]:
306
+ """
307
+ Run interactive calibration with OpenCV windows.
308
+
309
+ Instructions:
310
+ - Click on target object to sample HSV values
311
+ - Adjust sliders if needed
312
+ - Press 's' to save and exit
313
+ - Press 'c' to clear samples
314
+ - Press 'q' to quit without saving
315
+ """
316
+ if not self.setup_camera():
317
+ print("ERROR: Could not open camera")
318
+ return None
319
+
320
+ window_name = f"Target Calibration: {target_name}"
321
+ mask_window = "Detection Mask"
322
+
323
+ cv2.namedWindow(window_name)
324
+ cv2.namedWindow(mask_window)
325
+
326
+ # Mouse callback for sampling
327
+ current_frame = [None]
328
+
329
+ def mouse_callback(event, x, y, flags, param):
330
+ if event == cv2.EVENT_LBUTTONDOWN and current_frame[0] is not None:
331
+ hsv = self.sample_hsv_at_click(current_frame[0], x, y)
332
+ print(f"Sampled HSV at ({x}, {y}): H={hsv[0]}, S={hsv[1]}, V={hsv[2]}")
333
+ # Update range from samples
334
+ self.hsv_range = self.compute_hsv_range_from_samples()
335
+
336
+ cv2.setMouseCallback(window_name, mouse_callback)
337
+
338
+ # Create trackbars
339
+ def nothing(x):
340
+ pass
341
+
342
+ cv2.createTrackbar("H Low", window_name, self.hsv_range.h_low, 180, nothing)
343
+ cv2.createTrackbar("H High", window_name, self.hsv_range.h_high, 180, nothing)
344
+ cv2.createTrackbar("S Low", window_name, self.hsv_range.s_low, 255, nothing)
345
+ cv2.createTrackbar("S High", window_name, self.hsv_range.s_high, 255, nothing)
346
+ cv2.createTrackbar("V Low", window_name, self.hsv_range.v_low, 255, nothing)
347
+ cv2.createTrackbar("V High", window_name, self.hsv_range.v_high, 255, nothing)
348
+ cv2.createTrackbar("Robot Mask", window_name, 1, 1, nothing)
349
+
350
+ print(f"\nTarget Calibration: {target_name}")
351
+ print("=" * 50)
352
+ print("Click on the target object to sample HSV values")
353
+ print("Adjust sliders to fine-tune detection")
354
+ print("Press 's' to save, 'c' to clear samples, 'q' to quit")
355
+ print()
356
+
357
+ robot_mask_enabled = True
358
+
359
+ while True:
360
+ ret, frame = self.cap.read()
361
+ if not ret:
362
+ continue
363
+
364
+ current_frame[0] = frame.copy()
365
+
366
+ # Read trackbar values
367
+ self.hsv_range.h_low = cv2.getTrackbarPos("H Low", window_name)
368
+ self.hsv_range.h_high = cv2.getTrackbarPos("H High", window_name)
369
+ self.hsv_range.s_low = cv2.getTrackbarPos("S Low", window_name)
370
+ self.hsv_range.s_high = cv2.getTrackbarPos("S High", window_name)
371
+ self.hsv_range.v_low = cv2.getTrackbarPos("V Low", window_name)
372
+ self.hsv_range.v_high = cv2.getTrackbarPos("V High", window_name)
373
+ robot_mask_enabled = cv2.getTrackbarPos("Robot Mask", window_name) == 1
374
+
375
+ # Detect markers for masking
376
+ markers = self.detect_markers(frame)
377
+
378
+ # Create profile for detection
379
+ profile = TargetProfile(
380
+ name=target_name,
381
+ hsv_range=self.hsv_range,
382
+ robot_mask_enabled=robot_mask_enabled,
383
+ samples_collected=len(self.hsv_samples),
384
+ )
385
+
386
+ # Detect target
387
+ target_pos = profile.detect(frame, markers if robot_mask_enabled else None)
388
+
389
+ # Create visualization
390
+ display = frame.copy()
391
+
392
+ # Draw marker masks
393
+ if robot_mask_enabled:
394
+ for mid, (mx, my) in markers.items():
395
+ cv2.circle(display, (int(mx), int(my)), 80, (128, 128, 128), 2)
396
+ cv2.putText(display, f"MASK M{mid}", (int(mx)-30, int(my)-85),
397
+ cv2.FONT_HERSHEY_SIMPLEX, 0.5, (128, 128, 128), 1)
398
+
399
+ # Draw detected target
400
+ if target_pos:
401
+ cv2.circle(display, target_pos, 30, (0, 255, 0), 3)
402
+ cv2.putText(display, f"TARGET ({target_pos[0]}, {target_pos[1]})",
403
+ (target_pos[0] + 35, target_pos[1]),
404
+ cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 255, 0), 2)
405
+
406
+ # Draw HSV info
407
+ info = f"HSV: H[{self.hsv_range.h_low}-{self.hsv_range.h_high}] "
408
+ info += f"S[{self.hsv_range.s_low}-{self.hsv_range.s_high}] "
409
+ info += f"V[{self.hsv_range.v_low}-{self.hsv_range.v_high}]"
410
+ cv2.putText(display, info, (10, 30),
411
+ cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 2)
412
+
413
+ cv2.putText(display, f"Samples: {len(self.hsv_samples)}", (10, 60),
414
+ cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 2)
415
+
416
+ # Show detection mask
417
+ hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
418
+ lower, upper = self.hsv_range.to_arrays()
419
+ mask = cv2.inRange(hsv, lower, upper)
420
+
421
+ if robot_mask_enabled:
422
+ for mid, (mx, my) in markers.items():
423
+ cv2.circle(mask, (int(mx), int(my)), 80, 0, -1)
424
+
425
+ cv2.imshow(window_name, display)
426
+ cv2.imshow(mask_window, mask)
427
+
428
+ key = cv2.waitKey(1) & 0xFF
429
+
430
+ if key == ord('s'):
431
+ # Save and exit
432
+ profile.samples_collected = len(self.hsv_samples)
433
+ path = profile.save()
434
+ print(f"\nSaved target profile to: {path}")
435
+ self.cap.release()
436
+ cv2.destroyAllWindows()
437
+ return profile
438
+
439
+ elif key == ord('c'):
440
+ # Clear samples
441
+ self.hsv_samples = []
442
+ print("Cleared HSV samples")
443
+
444
+ elif key == ord('q'):
445
+ # Quit without saving
446
+ print("\nCancelled - profile not saved")
447
+ self.cap.release()
448
+ cv2.destroyAllWindows()
449
+ return None
450
+
451
+ self.cap.release()
452
+ cv2.destroyAllWindows()
453
+ return None
454
+
455
+ def calibrate_non_interactive(
456
+ self,
457
+ target_name: str,
458
+ hsv_range: HSVRange,
459
+ robot_mask_enabled: bool = True,
460
+ ) -> TargetProfile:
461
+ """Create target profile without interactive calibration."""
462
+ return TargetProfile(
463
+ name=target_name,
464
+ hsv_range=hsv_range,
465
+ robot_mask_enabled=robot_mask_enabled,
466
+ )
467
+
468
+
469
+ def run_target_calibration(target_name: str, camera_index: int = 0) -> Optional[TargetProfile]:
470
+ """
471
+ Run interactive target calibration.
472
+
473
+ Entry point for CLI command.
474
+ """
475
+ calibrator = TargetCalibrator(camera_index=camera_index)
476
+ return calibrator.calibrate_interactive(target_name)
477
+
478
+
479
+ def list_target_profiles() -> List[str]:
480
+ """List saved target profiles."""
481
+ target_dir = Path.home() / ".ate" / "targets"
482
+ if not target_dir.exists():
483
+ return []
484
+ return [p.stem for p in target_dir.glob("*.json")]
485
+
486
+
487
+ def detect_with_profile(
488
+ frame: np.ndarray,
489
+ profile_name: str,
490
+ robot_markers: Optional[Dict[int, Tuple[float, float]]] = None,
491
+ ) -> Optional[Tuple[int, int]]:
492
+ """
493
+ Detect target using saved profile.
494
+
495
+ Convenience function for use in behaviors.
496
+ """
497
+ profile = TargetProfile.load(profile_name)
498
+ if not profile:
499
+ return None
500
+ return profile.detect(frame, robot_markers)