foodforthought-cli 0.2.7__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. ate/__init__.py +6 -0
  2. ate/__main__.py +16 -0
  3. ate/auth/__init__.py +1 -0
  4. ate/auth/device_flow.py +141 -0
  5. ate/auth/token_store.py +96 -0
  6. ate/behaviors/__init__.py +100 -0
  7. ate/behaviors/approach.py +399 -0
  8. ate/behaviors/common.py +686 -0
  9. ate/behaviors/tree.py +454 -0
  10. ate/cli.py +855 -3995
  11. ate/client.py +90 -0
  12. ate/commands/__init__.py +168 -0
  13. ate/commands/auth.py +389 -0
  14. ate/commands/bridge.py +448 -0
  15. ate/commands/data.py +185 -0
  16. ate/commands/deps.py +111 -0
  17. ate/commands/generate.py +384 -0
  18. ate/commands/memory.py +907 -0
  19. ate/commands/parts.py +166 -0
  20. ate/commands/primitive.py +399 -0
  21. ate/commands/protocol.py +288 -0
  22. ate/commands/recording.py +524 -0
  23. ate/commands/repo.py +154 -0
  24. ate/commands/simulation.py +291 -0
  25. ate/commands/skill.py +303 -0
  26. ate/commands/skills.py +487 -0
  27. ate/commands/team.py +147 -0
  28. ate/commands/workflow.py +271 -0
  29. ate/detection/__init__.py +38 -0
  30. ate/detection/base.py +142 -0
  31. ate/detection/color_detector.py +399 -0
  32. ate/detection/trash_detector.py +322 -0
  33. ate/drivers/__init__.py +39 -0
  34. ate/drivers/ble_transport.py +405 -0
  35. ate/drivers/mechdog.py +942 -0
  36. ate/drivers/wifi_camera.py +477 -0
  37. ate/interfaces/__init__.py +187 -0
  38. ate/interfaces/base.py +273 -0
  39. ate/interfaces/body.py +267 -0
  40. ate/interfaces/detection.py +282 -0
  41. ate/interfaces/locomotion.py +422 -0
  42. ate/interfaces/manipulation.py +408 -0
  43. ate/interfaces/navigation.py +389 -0
  44. ate/interfaces/perception.py +362 -0
  45. ate/interfaces/sensors.py +247 -0
  46. ate/interfaces/types.py +371 -0
  47. ate/llm_proxy.py +239 -0
  48. ate/mcp_server.py +387 -0
  49. ate/memory/__init__.py +35 -0
  50. ate/memory/cloud.py +244 -0
  51. ate/memory/context.py +269 -0
  52. ate/memory/embeddings.py +184 -0
  53. ate/memory/export.py +26 -0
  54. ate/memory/merge.py +146 -0
  55. ate/memory/migrate/__init__.py +34 -0
  56. ate/memory/migrate/base.py +89 -0
  57. ate/memory/migrate/pipeline.py +189 -0
  58. ate/memory/migrate/sources/__init__.py +13 -0
  59. ate/memory/migrate/sources/chroma.py +170 -0
  60. ate/memory/migrate/sources/pinecone.py +120 -0
  61. ate/memory/migrate/sources/qdrant.py +110 -0
  62. ate/memory/migrate/sources/weaviate.py +160 -0
  63. ate/memory/reranker.py +353 -0
  64. ate/memory/search.py +26 -0
  65. ate/memory/store.py +548 -0
  66. ate/recording/__init__.py +83 -0
  67. ate/recording/demonstration.py +378 -0
  68. ate/recording/session.py +415 -0
  69. ate/recording/upload.py +304 -0
  70. ate/recording/visual.py +416 -0
  71. ate/recording/wrapper.py +95 -0
  72. ate/robot/__init__.py +221 -0
  73. ate/robot/agentic_servo.py +856 -0
  74. ate/robot/behaviors.py +493 -0
  75. ate/robot/ble_capture.py +1000 -0
  76. ate/robot/ble_enumerate.py +506 -0
  77. ate/robot/calibration.py +668 -0
  78. ate/robot/calibration_state.py +388 -0
  79. ate/robot/commands.py +3735 -0
  80. ate/robot/direction_calibration.py +554 -0
  81. ate/robot/discovery.py +441 -0
  82. ate/robot/introspection.py +330 -0
  83. ate/robot/llm_system_id.py +654 -0
  84. ate/robot/locomotion_calibration.py +508 -0
  85. ate/robot/manager.py +270 -0
  86. ate/robot/marker_generator.py +611 -0
  87. ate/robot/perception.py +502 -0
  88. ate/robot/primitives.py +614 -0
  89. ate/robot/profiles.py +281 -0
  90. ate/robot/registry.py +322 -0
  91. ate/robot/servo_mapper.py +1153 -0
  92. ate/robot/skill_upload.py +675 -0
  93. ate/robot/target_calibration.py +500 -0
  94. ate/robot/teach.py +515 -0
  95. ate/robot/types.py +242 -0
  96. ate/robot/visual_labeler.py +1048 -0
  97. ate/robot/visual_servo_loop.py +494 -0
  98. ate/robot/visual_servoing.py +570 -0
  99. ate/robot/visual_system_id.py +906 -0
  100. ate/transports/__init__.py +121 -0
  101. ate/transports/base.py +394 -0
  102. ate/transports/ble.py +405 -0
  103. ate/transports/hybrid.py +444 -0
  104. ate/transports/serial.py +345 -0
  105. ate/urdf/__init__.py +30 -0
  106. ate/urdf/capture.py +582 -0
  107. ate/urdf/cloud.py +491 -0
  108. ate/urdf/collision.py +271 -0
  109. ate/urdf/commands.py +708 -0
  110. ate/urdf/depth.py +360 -0
  111. ate/urdf/inertial.py +312 -0
  112. ate/urdf/kinematics.py +330 -0
  113. ate/urdf/lifting.py +415 -0
  114. ate/urdf/meshing.py +300 -0
  115. ate/urdf/models/__init__.py +110 -0
  116. ate/urdf/models/depth_anything.py +253 -0
  117. ate/urdf/models/sam2.py +324 -0
  118. ate/urdf/motion_analysis.py +396 -0
  119. ate/urdf/pipeline.py +468 -0
  120. ate/urdf/scale.py +256 -0
  121. ate/urdf/scan_session.py +411 -0
  122. ate/urdf/segmentation.py +299 -0
  123. ate/urdf/synthesis.py +319 -0
  124. ate/urdf/topology.py +336 -0
  125. ate/urdf/validation.py +371 -0
  126. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/METADATA +9 -1
  127. foodforthought_cli-0.3.0.dist-info/RECORD +166 -0
  128. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/WHEEL +1 -1
  129. foodforthought_cli-0.2.7.dist-info/RECORD +0 -44
  130. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/entry_points.txt +0 -0
  131. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,494 @@
1
+ """
2
+ Visual Servoing Framework
3
+
4
+ A reusable feedback loop that uses direction calibration to iteratively
5
+ move toward a target. This is the "Model-Free Adaptive IBVS" approach
6
+ recommended by research.
7
+
8
+ The Problem This Solves:
9
+ -----------------------
10
+ Direction calibration tells us WHICH WAY to move, but not HOW FAR.
11
+ Open-loop commands (set servo to X) fail if the target position is wrong.
12
+
13
+ Visual servoing solves this by:
14
+ 1. Measure current distance to target
15
+ 2. Move a small step in the "toward" direction
16
+ 3. Measure new distance
17
+ 4. Repeat until close enough (or stuck)
18
+
19
+ This is robot-agnostic and task-agnostic. Any task that needs to
20
+ approach a visual target can use this framework.
21
+
22
+ Usage:
23
+ from ate.robot.visual_servo_loop import VisualServoLoop, ServoConfig
24
+
25
+ # Configure servos with their direction calibration
26
+ servos = [
27
+ ServoConfig(id=9, toward="positive", min=800, max=2200, step=50),
28
+ ServoConfig(id=8, toward="negative", min=800, max=2200, step=30),
29
+ ]
30
+
31
+ # Create the loop
32
+ loop = VisualServoLoop(
33
+ servos=servos,
34
+ target_detector=detect_ball, # function(frame) -> (x, y) or None
35
+ effector_detector=detect_gripper, # function(frame) -> (x, y) or None
36
+ move_servo=robot.set_servo, # function(id, position, duration)
37
+ capture_frame=camera.read, # function() -> frame
38
+ )
39
+
40
+ # Run until converged or max iterations
41
+ result = loop.run(
42
+ threshold_px=50, # Stop when within 50 pixels
43
+ max_iterations=20,
44
+ )
45
+ """
46
+
47
+ import time
48
+ import numpy as np
49
+ from dataclasses import dataclass, field
50
+ from typing import Callable, Optional, List, Dict, Tuple, Any
51
+ from enum import Enum
52
+ from datetime import datetime
53
+ import json
54
+ from pathlib import Path
55
+
56
+
57
+ class ServoRole(Enum):
58
+ """Semantic role of a servo in approaching a target."""
59
+ HORIZONTAL = "horizontal" # Moves effector left/right
60
+ VERTICAL = "vertical" # Moves effector up/down
61
+ DEPTH = "depth" # Moves effector forward/back
62
+ GRIPPER = "gripper" # Opens/closes gripper
63
+ UNKNOWN = "unknown"
64
+
65
+
66
+ @dataclass
67
+ class ServoConfig:
68
+ """Configuration for a servo in the visual servo loop."""
69
+ id: int
70
+ toward_direction: str # "positive" or "negative"
71
+ min_value: int = 500
72
+ max_value: int = 2500
73
+ current_value: int = 1500
74
+ step_size: int = 50 # PWM units per iteration
75
+ role: ServoRole = ServoRole.UNKNOWN
76
+ enabled: bool = True # Can disable specific servos
77
+ confidence: float = 1.0 # From direction calibration
78
+
79
+
80
+ @dataclass
81
+ class ServoState:
82
+ """Tracks the state of a servo during servoing."""
83
+ config: ServoConfig
84
+ initial_value: int
85
+ current_value: int
86
+ moves_made: int = 0
87
+ last_delta: float = 0.0 # Last measured improvement
88
+ stuck_count: int = 0 # How many times we've made no progress
89
+
90
+
91
+ @dataclass
92
+ class ServoIteration:
93
+ """Record of a single iteration of the servo loop."""
94
+ iteration: int
95
+ timestamp: str
96
+ target_pos: Optional[Tuple[int, int]]
97
+ effector_pos: Optional[Tuple[int, int]]
98
+ distance_px: float
99
+ servo_moves: Dict[int, int] # servo_id -> new_value
100
+ delta_distance: float # Change from last iteration
101
+
102
+
103
+ @dataclass
104
+ class ServoResult:
105
+ """Result of a visual servo run."""
106
+ success: bool
107
+ reason: str # "converged", "max_iterations", "target_lost", "stuck"
108
+ final_distance_px: float
109
+ iterations: int
110
+ total_time_s: float
111
+ history: List[ServoIteration] = field(default_factory=list)
112
+ servo_states: Dict[int, ServoState] = field(default_factory=dict)
113
+
114
+
115
+ class VisualServoLoop:
116
+ """
117
+ Feedback loop that uses direction calibration to approach a target.
118
+
119
+ This is the core visual servoing framework. It's robot-agnostic -
120
+ you provide the functions to detect targets, move servos, and capture frames.
121
+ """
122
+
123
+ def __init__(
124
+ self,
125
+ servos: List[ServoConfig],
126
+ target_detector: Callable[[np.ndarray], Optional[Tuple[int, int]]],
127
+ effector_detector: Callable[[np.ndarray], Optional[Tuple[int, int]]],
128
+ move_servo: Callable[[int, int, int], None],
129
+ capture_frame: Callable[[], np.ndarray],
130
+ frame_delay: float = 0.3, # Seconds to wait after move
131
+ ):
132
+ """
133
+ Initialize the visual servo loop.
134
+
135
+ Args:
136
+ servos: List of servo configurations with direction calibration
137
+ target_detector: Function that takes a frame and returns target (x, y) or None
138
+ effector_detector: Function that takes a frame and returns effector (x, y) or None
139
+ move_servo: Function to move a servo: (id, position, duration_ms)
140
+ capture_frame: Function to capture a camera frame
141
+ frame_delay: Seconds to wait after moving before capturing
142
+ """
143
+ self.servos = {s.id: s for s in servos}
144
+ self.target_detector = target_detector
145
+ self.effector_detector = effector_detector
146
+ self.move_servo = move_servo
147
+ self.capture_frame = capture_frame
148
+ self.frame_delay = frame_delay
149
+
150
+ # State tracking
151
+ self.states: Dict[int, ServoState] = {}
152
+ self.history: List[ServoIteration] = []
153
+
154
+ # Initialize states
155
+ for servo in servos:
156
+ self.states[servo.id] = ServoState(
157
+ config=servo,
158
+ initial_value=servo.current_value,
159
+ current_value=servo.current_value,
160
+ )
161
+
162
+ def distance(self, p1: Optional[Tuple], p2: Optional[Tuple]) -> float:
163
+ """Euclidean distance between two points."""
164
+ if p1 is None or p2 is None:
165
+ return float("inf")
166
+ return np.sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2)
167
+
168
+ def capture_state(self) -> Tuple[Optional[Tuple], Optional[Tuple], np.ndarray]:
169
+ """Capture frame and detect target/effector positions."""
170
+ frame = self.capture_frame()
171
+ target = self.target_detector(frame)
172
+ effector = self.effector_detector(frame)
173
+ return target, effector, frame
174
+
175
+ def move_toward(self, servo_id: int, amount: int = None) -> int:
176
+ """
177
+ Move a servo toward the target by one step.
178
+
179
+ Uses direction calibration to determine which way to move.
180
+ Returns the new servo position.
181
+ """
182
+ state = self.states[servo_id]
183
+ config = state.config
184
+
185
+ if not config.enabled:
186
+ return state.current_value
187
+
188
+ step = amount or config.step_size
189
+
190
+ # Use calibrated direction
191
+ if config.toward_direction == "positive":
192
+ new_value = state.current_value + step
193
+ else:
194
+ new_value = state.current_value - step
195
+
196
+ # Clamp to limits
197
+ new_value = max(config.min_value, min(config.max_value, new_value))
198
+
199
+ # Execute move
200
+ self.move_servo(servo_id, new_value, 300)
201
+ state.current_value = new_value
202
+ state.moves_made += 1
203
+
204
+ return new_value
205
+
206
+ def move_away(self, servo_id: int, amount: int = None) -> int:
207
+ """Move a servo away from the target by one step."""
208
+ state = self.states[servo_id]
209
+ config = state.config
210
+
211
+ if not config.enabled:
212
+ return state.current_value
213
+
214
+ step = amount or config.step_size
215
+
216
+ # Opposite of toward direction
217
+ if config.toward_direction == "positive":
218
+ new_value = state.current_value - step
219
+ else:
220
+ new_value = state.current_value + step
221
+
222
+ new_value = max(config.min_value, min(config.max_value, new_value))
223
+ self.move_servo(servo_id, new_value, 300)
224
+ state.current_value = new_value
225
+ state.moves_made += 1
226
+
227
+ return new_value
228
+
229
+ def run(
230
+ self,
231
+ threshold_px: float = 50.0,
232
+ max_iterations: int = 30,
233
+ stuck_threshold: int = 3,
234
+ verbose: bool = True,
235
+ ) -> ServoResult:
236
+ """
237
+ Run the visual servo loop until convergence or failure.
238
+
239
+ Args:
240
+ threshold_px: Stop when distance is below this (pixels)
241
+ max_iterations: Maximum iterations before giving up
242
+ stuck_threshold: Give up if no progress for this many iterations
243
+ verbose: Print progress
244
+
245
+ Returns:
246
+ ServoResult with success/failure and history
247
+ """
248
+ start_time = time.time()
249
+ self.history = []
250
+
251
+ if verbose:
252
+ print("\n" + "=" * 60)
253
+ print("VISUAL SERVO LOOP")
254
+ print("=" * 60)
255
+ print(f"Target threshold: {threshold_px}px")
256
+ print(f"Max iterations: {max_iterations}")
257
+ print(f"Servos: {list(self.servos.keys())}")
258
+ print()
259
+
260
+ # Initial state
261
+ target, effector, frame = self.capture_state()
262
+
263
+ if target is None:
264
+ return ServoResult(
265
+ success=False,
266
+ reason="target_lost",
267
+ final_distance_px=float("inf"),
268
+ iterations=0,
269
+ total_time_s=time.time() - start_time,
270
+ history=self.history,
271
+ servo_states=self.states,
272
+ )
273
+
274
+ if effector is None:
275
+ return ServoResult(
276
+ success=False,
277
+ reason="effector_lost",
278
+ final_distance_px=float("inf"),
279
+ iterations=0,
280
+ total_time_s=time.time() - start_time,
281
+ history=self.history,
282
+ servo_states=self.states,
283
+ )
284
+
285
+ current_distance = self.distance(effector, target)
286
+ if verbose:
287
+ print(f"Initial: target={target}, effector={effector}, dist={current_distance:.1f}px")
288
+
289
+ global_stuck_count = 0
290
+
291
+ for iteration in range(max_iterations):
292
+ # Check if we've converged
293
+ if current_distance < threshold_px:
294
+ if verbose:
295
+ print(f"\n[CONVERGED] Distance {current_distance:.1f}px < threshold {threshold_px}px")
296
+
297
+ return ServoResult(
298
+ success=True,
299
+ reason="converged",
300
+ final_distance_px=current_distance,
301
+ iterations=iteration,
302
+ total_time_s=time.time() - start_time,
303
+ history=self.history,
304
+ servo_states=self.states,
305
+ )
306
+
307
+ # Try moving each enabled servo toward target
308
+ moves_this_iteration = {}
309
+ best_improvement = 0
310
+ best_servo = None
311
+
312
+ for servo_id, state in self.states.items():
313
+ if not state.config.enabled:
314
+ continue
315
+
316
+ # Store current position for potential rollback
317
+ original_value = state.current_value
318
+
319
+ # Try moving toward
320
+ new_value = self.move_toward(servo_id)
321
+ time.sleep(self.frame_delay)
322
+
323
+ # Measure result
324
+ new_target, new_effector, _ = self.capture_state()
325
+
326
+ if new_target is None or new_effector is None:
327
+ # Lost tracking - rollback
328
+ self.move_servo(servo_id, original_value, 200)
329
+ state.current_value = original_value
330
+ time.sleep(self.frame_delay)
331
+ continue
332
+
333
+ new_distance = self.distance(new_effector, new_target)
334
+ improvement = current_distance - new_distance
335
+
336
+ if verbose:
337
+ direction = "↓" if improvement > 0 else "↑"
338
+ print(f" Servo {servo_id}: {original_value}→{new_value}, "
339
+ f"dist {current_distance:.1f}→{new_distance:.1f}px "
340
+ f"({direction}{abs(improvement):.1f}px)")
341
+
342
+ if improvement > best_improvement:
343
+ best_improvement = improvement
344
+ best_servo = servo_id
345
+ moves_this_iteration[servo_id] = new_value
346
+
347
+ # Rollback this move - we'll commit the best one
348
+ self.move_servo(servo_id, original_value, 200)
349
+ state.current_value = original_value
350
+ time.sleep(0.1)
351
+
352
+ # Commit the best move (if any improvement)
353
+ if best_servo is not None and best_improvement > 5: # Minimum improvement threshold
354
+ state = self.states[best_servo]
355
+ new_value = moves_this_iteration[best_servo]
356
+ self.move_servo(best_servo, new_value, 300)
357
+ state.current_value = new_value
358
+ time.sleep(self.frame_delay)
359
+
360
+ # Update distance
361
+ target, effector, frame = self.capture_state()
362
+ if target and effector:
363
+ current_distance = self.distance(effector, target)
364
+ state.last_delta = best_improvement
365
+
366
+ global_stuck_count = 0
367
+
368
+ if verbose:
369
+ print(f" → Committed servo {best_servo} to {new_value}, "
370
+ f"new dist={current_distance:.1f}px")
371
+ else:
372
+ global_stuck_count += 1
373
+ if verbose:
374
+ print(f" → No improvement found (stuck count: {global_stuck_count})")
375
+
376
+ # Record iteration
377
+ self.history.append(ServoIteration(
378
+ iteration=iteration,
379
+ timestamp=datetime.now().isoformat(),
380
+ target_pos=target,
381
+ effector_pos=effector,
382
+ distance_px=current_distance,
383
+ servo_moves=moves_this_iteration,
384
+ delta_distance=best_improvement,
385
+ ))
386
+
387
+ # Check if stuck
388
+ if global_stuck_count >= stuck_threshold:
389
+ if verbose:
390
+ print(f"\n[STUCK] No progress for {stuck_threshold} iterations")
391
+
392
+ return ServoResult(
393
+ success=False,
394
+ reason="stuck",
395
+ final_distance_px=current_distance,
396
+ iterations=iteration,
397
+ total_time_s=time.time() - start_time,
398
+ history=self.history,
399
+ servo_states=self.states,
400
+ )
401
+
402
+ # Re-acquire target for next iteration
403
+ target, effector, frame = self.capture_state()
404
+ if target is None:
405
+ if verbose:
406
+ print("\n[TARGET LOST]")
407
+ return ServoResult(
408
+ success=False,
409
+ reason="target_lost",
410
+ final_distance_px=current_distance,
411
+ iterations=iteration,
412
+ total_time_s=time.time() - start_time,
413
+ history=self.history,
414
+ servo_states=self.states,
415
+ )
416
+
417
+ # Max iterations reached
418
+ if verbose:
419
+ print(f"\n[MAX ITERATIONS] Reached {max_iterations} iterations")
420
+
421
+ return ServoResult(
422
+ success=False,
423
+ reason="max_iterations",
424
+ final_distance_px=current_distance,
425
+ iterations=max_iterations,
426
+ total_time_s=time.time() - start_time,
427
+ history=self.history,
428
+ servo_states=self.states,
429
+ )
430
+
431
+ def save_history(self, path: str) -> None:
432
+ """Save servo history to JSON for analysis."""
433
+ data = {
434
+ "timestamp": datetime.now().isoformat(),
435
+ "servos": {
436
+ str(sid): {
437
+ "toward_direction": s.config.toward_direction,
438
+ "initial_value": s.initial_value,
439
+ "final_value": s.current_value,
440
+ "moves_made": s.moves_made,
441
+ }
442
+ for sid, s in self.states.items()
443
+ },
444
+ "iterations": [
445
+ {
446
+ "iteration": it.iteration,
447
+ "timestamp": it.timestamp,
448
+ "target_pos": it.target_pos,
449
+ "effector_pos": it.effector_pos,
450
+ "distance_px": it.distance_px,
451
+ "delta_distance": it.delta_distance,
452
+ }
453
+ for it in self.history
454
+ ],
455
+ }
456
+
457
+ Path(path).parent.mkdir(parents=True, exist_ok=True)
458
+ with open(path, "w") as f:
459
+ json.dump(data, f, indent=2)
460
+
461
+
462
+ def load_direction_calibration_for_servos(
463
+ robot_name: str,
464
+ servo_ids: List[int],
465
+ ) -> List[ServoConfig]:
466
+ """
467
+ Load direction calibration and create ServoConfig objects.
468
+
469
+ Convenience function to bridge direction calibration with servo loop.
470
+ """
471
+ from .direction_calibration import load_direction_calibration
472
+
473
+ calibration = load_direction_calibration(robot_name)
474
+ if not calibration:
475
+ raise ValueError(f"No direction calibration found for {robot_name}")
476
+
477
+ configs = []
478
+ for servo_id in servo_ids:
479
+ mapping = calibration.get(servo_id)
480
+ if mapping:
481
+ configs.append(ServoConfig(
482
+ id=servo_id,
483
+ toward_direction=mapping.toward_direction,
484
+ confidence=mapping.confidence,
485
+ ))
486
+ else:
487
+ # No calibration - default to positive
488
+ configs.append(ServoConfig(
489
+ id=servo_id,
490
+ toward_direction="positive",
491
+ confidence=0.0,
492
+ ))
493
+
494
+ return configs