foodforthought-cli 0.2.4__py3-none-any.whl → 0.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. ate/__init__.py +1 -1
  2. ate/behaviors/__init__.py +88 -0
  3. ate/behaviors/common.py +686 -0
  4. ate/behaviors/tree.py +454 -0
  5. ate/cli.py +610 -54
  6. ate/drivers/__init__.py +27 -0
  7. ate/drivers/mechdog.py +606 -0
  8. ate/interfaces/__init__.py +171 -0
  9. ate/interfaces/base.py +271 -0
  10. ate/interfaces/body.py +267 -0
  11. ate/interfaces/detection.py +282 -0
  12. ate/interfaces/locomotion.py +422 -0
  13. ate/interfaces/manipulation.py +408 -0
  14. ate/interfaces/navigation.py +389 -0
  15. ate/interfaces/perception.py +362 -0
  16. ate/interfaces/types.py +371 -0
  17. ate/mcp_server.py +387 -0
  18. ate/recording/__init__.py +44 -0
  19. ate/recording/demonstration.py +378 -0
  20. ate/recording/session.py +405 -0
  21. ate/recording/upload.py +304 -0
  22. ate/recording/wrapper.py +95 -0
  23. ate/robot/__init__.py +79 -0
  24. ate/robot/calibration.py +583 -0
  25. ate/robot/commands.py +3603 -0
  26. ate/robot/discovery.py +339 -0
  27. ate/robot/introspection.py +330 -0
  28. ate/robot/manager.py +270 -0
  29. ate/robot/profiles.py +275 -0
  30. ate/robot/registry.py +319 -0
  31. ate/robot/skill_upload.py +393 -0
  32. ate/robot/visual_labeler.py +1039 -0
  33. {foodforthought_cli-0.2.4.dist-info → foodforthought_cli-0.2.8.dist-info}/METADATA +9 -1
  34. {foodforthought_cli-0.2.4.dist-info → foodforthought_cli-0.2.8.dist-info}/RECORD +37 -8
  35. {foodforthought_cli-0.2.4.dist-info → foodforthought_cli-0.2.8.dist-info}/WHEEL +0 -0
  36. {foodforthought_cli-0.2.4.dist-info → foodforthought_cli-0.2.8.dist-info}/entry_points.txt +0 -0
  37. {foodforthought_cli-0.2.4.dist-info → foodforthought_cli-0.2.8.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,282 @@
1
+ """
2
+ Object detection interface for robot perception.
3
+
4
+ This is a HIGHER-LEVEL interface that wraps camera interfaces
5
+ and ML models to provide semantic understanding of the environment.
6
+
7
+ Design principle: Models are pluggable - the interface abstracts
8
+ away the specific ML framework (YOLO, Detectron2, etc.)
9
+ """
10
+
11
+ from abc import ABC, abstractmethod
12
+ from dataclasses import dataclass, field
13
+ from typing import List, Optional, Dict, Any, Callable
14
+ from enum import Enum, auto
15
+
16
+ from .types import Vector3, Image, ActionResult
17
+
18
+
19
+ @dataclass
20
+ class BoundingBox:
21
+ """2D bounding box in image coordinates."""
22
+ x_min: float # Left edge (pixels)
23
+ y_min: float # Top edge (pixels)
24
+ x_max: float # Right edge (pixels)
25
+ y_max: float # Bottom edge (pixels)
26
+
27
+ @property
28
+ def width(self) -> float:
29
+ return self.x_max - self.x_min
30
+
31
+ @property
32
+ def height(self) -> float:
33
+ return self.y_max - self.y_min
34
+
35
+ @property
36
+ def center(self) -> tuple:
37
+ return ((self.x_min + self.x_max) / 2, (self.y_min + self.y_max) / 2)
38
+
39
+ @property
40
+ def area(self) -> float:
41
+ return self.width * self.height
42
+
43
+
44
+ @dataclass
45
+ class Detection:
46
+ """A detected object in an image."""
47
+ class_name: str # e.g., "trash", "bottle", "can"
48
+ class_id: int # Numeric class ID
49
+ confidence: float # 0.0 to 1.0
50
+ bbox: BoundingBox # 2D bounding box
51
+
52
+ # Optional 3D info (if depth available)
53
+ position_3d: Optional[Vector3] = None # In camera frame
54
+ distance: Optional[float] = None # Distance in meters
55
+
56
+ # Optional instance segmentation mask
57
+ mask: Optional[Any] = None
58
+
59
+ # Additional attributes (color, size estimates, etc.)
60
+ attributes: Dict[str, Any] = field(default_factory=dict)
61
+
62
+ def to_dict(self) -> dict:
63
+ return {
64
+ "class_name": self.class_name,
65
+ "class_id": self.class_id,
66
+ "confidence": self.confidence,
67
+ "bbox": {
68
+ "x_min": self.bbox.x_min,
69
+ "y_min": self.bbox.y_min,
70
+ "x_max": self.bbox.x_max,
71
+ "y_max": self.bbox.y_max,
72
+ },
73
+ "position_3d": self.position_3d.to_dict() if self.position_3d else None,
74
+ "distance": self.distance,
75
+ "attributes": self.attributes,
76
+ }
77
+
78
+
79
+ @dataclass
80
+ class DetectionResult:
81
+ """Result of running object detection on an image."""
82
+ detections: List[Detection]
83
+ image: Optional[Image] = None # Original image (optional)
84
+ inference_time_ms: float = 0.0
85
+ model_name: str = ""
86
+
87
+ def filter_by_class(self, class_name: str) -> List[Detection]:
88
+ """Get detections of a specific class."""
89
+ return [d for d in self.detections if d.class_name == class_name]
90
+
91
+ def filter_by_confidence(self, min_confidence: float) -> List[Detection]:
92
+ """Get detections above confidence threshold."""
93
+ return [d for d in self.detections if d.confidence >= min_confidence]
94
+
95
+ def get_closest(self) -> Optional[Detection]:
96
+ """Get the closest detected object (requires 3D info)."""
97
+ with_distance = [d for d in self.detections if d.distance is not None]
98
+ if not with_distance:
99
+ return None
100
+ return min(with_distance, key=lambda d: d.distance)
101
+
102
+
103
+ class ObjectDetectionInterface(ABC):
104
+ """
105
+ Interface for object detection capabilities.
106
+
107
+ This abstracts the specific ML model and camera hardware,
108
+ providing a unified API for detecting objects in the environment.
109
+
110
+ Use cases:
111
+ - Trash detection for cleanup tasks
112
+ - Object manipulation (find and grasp)
113
+ - Obstacle detection for navigation
114
+ - Person detection for social robots
115
+ """
116
+
117
+ @abstractmethod
118
+ def detect(self, image: Optional[Image] = None) -> DetectionResult:
119
+ """
120
+ Run object detection.
121
+
122
+ Args:
123
+ image: Image to process. If None, capture from camera.
124
+
125
+ Returns:
126
+ DetectionResult with all detections
127
+ """
128
+ pass
129
+
130
+ @abstractmethod
131
+ def get_classes(self) -> List[str]:
132
+ """
133
+ Get list of classes this detector can recognize.
134
+
135
+ Returns:
136
+ List of class names
137
+ """
138
+ pass
139
+
140
+ def detect_class(self, class_name: str, min_confidence: float = 0.5) -> List[Detection]:
141
+ """
142
+ Detect objects of a specific class.
143
+
144
+ Args:
145
+ class_name: Class to detect (e.g., "bottle", "trash")
146
+ min_confidence: Minimum confidence threshold
147
+
148
+ Returns:
149
+ List of detections of that class
150
+ """
151
+ result = self.detect()
152
+ return [
153
+ d for d in result.detections
154
+ if d.class_name == class_name and d.confidence >= min_confidence
155
+ ]
156
+
157
+ def detect_any(self, class_names: List[str], min_confidence: float = 0.5) -> List[Detection]:
158
+ """
159
+ Detect objects of any of the specified classes.
160
+
161
+ Args:
162
+ class_names: List of classes to detect
163
+ min_confidence: Minimum confidence threshold
164
+
165
+ Returns:
166
+ List of detections matching any class
167
+ """
168
+ result = self.detect()
169
+ return [
170
+ d for d in result.detections
171
+ if d.class_name in class_names and d.confidence >= min_confidence
172
+ ]
173
+
174
+ def find_nearest(self, class_name: str) -> Optional[Detection]:
175
+ """
176
+ Find the nearest object of a class.
177
+
178
+ Args:
179
+ class_name: Class to find
180
+
181
+ Returns:
182
+ Detection of nearest object, or None
183
+ """
184
+ detections = self.detect_class(class_name)
185
+ with_distance = [d for d in detections if d.distance is not None]
186
+ if not with_distance:
187
+ # Fall back to largest bounding box (likely closest)
188
+ if detections:
189
+ return max(detections, key=lambda d: d.bbox.area)
190
+ return None
191
+ return min(with_distance, key=lambda d: d.distance)
192
+
193
+ # =========================================================================
194
+ # Model management
195
+ # =========================================================================
196
+
197
+ def load_model(self, model_path: str) -> ActionResult:
198
+ """Load a specific detection model."""
199
+ return ActionResult.error("Custom model loading not supported")
200
+
201
+ def get_model_info(self) -> Dict[str, Any]:
202
+ """Get information about the current model."""
203
+ return {"classes": self.get_classes()}
204
+
205
+ # =========================================================================
206
+ # Streaming detection
207
+ # =========================================================================
208
+
209
+ def start_detection_stream(
210
+ self,
211
+ callback: Callable[[DetectionResult], None],
212
+ min_confidence: float = 0.5
213
+ ) -> ActionResult:
214
+ """
215
+ Start continuous detection with callbacks.
216
+
217
+ Args:
218
+ callback: Function called with each detection result
219
+ min_confidence: Minimum confidence for callbacks
220
+ """
221
+ return ActionResult.error("Streaming detection not supported")
222
+
223
+ def stop_detection_stream(self) -> ActionResult:
224
+ """Stop the detection stream."""
225
+ return ActionResult.error("Streaming detection not supported")
226
+
227
+
228
+ class TrashDetectionInterface(ObjectDetectionInterface):
229
+ """
230
+ Specialized detector for trash/litter.
231
+
232
+ Recognizes common trash items:
233
+ - Bottles (plastic, glass)
234
+ - Cans
235
+ - Paper/cardboard
236
+ - Wrappers/packaging
237
+ - Cigarette butts
238
+ - General debris
239
+ """
240
+
241
+ TRASH_CLASSES = [
242
+ "plastic_bottle",
243
+ "glass_bottle",
244
+ "can",
245
+ "paper",
246
+ "cardboard",
247
+ "wrapper",
248
+ "cigarette_butt",
249
+ "debris",
250
+ "trash", # Generic
251
+ ]
252
+
253
+ def get_classes(self) -> List[str]:
254
+ return self.TRASH_CLASSES
255
+
256
+ def detect_trash(self, min_confidence: float = 0.5) -> List[Detection]:
257
+ """
258
+ Detect all trash items.
259
+
260
+ Returns:
261
+ List of trash detections
262
+ """
263
+ return self.detect_any(self.TRASH_CLASSES, min_confidence)
264
+
265
+ def find_nearest_trash(self) -> Optional[Detection]:
266
+ """
267
+ Find the nearest trash item.
268
+
269
+ Returns:
270
+ Detection of nearest trash, or None
271
+ """
272
+ detections = self.detect_trash()
273
+ with_distance = [d for d in detections if d.distance is not None]
274
+ if not with_distance:
275
+ if detections:
276
+ return max(detections, key=lambda d: d.bbox.area)
277
+ return None
278
+ return min(with_distance, key=lambda d: d.distance)
279
+
280
+ def is_trash_visible(self) -> bool:
281
+ """Check if any trash is visible."""
282
+ return len(self.detect_trash()) > 0
@@ -0,0 +1,422 @@
1
+ """
2
+ Locomotion interfaces for mobile robots.
3
+
4
+ Supports:
5
+ - Quadrupeds (MechDog, Spot, Unitree, ANYmal)
6
+ - Bipeds (Humanoids, Digit, Atlas)
7
+ - Wheeled robots (TurtleBot, AMRs)
8
+ - Aerial (Drones)
9
+
10
+ Each interface defines normalized actions that are hardware-agnostic.
11
+ A "walk forward 0.5m" command works the same on MechDog and Spot.
12
+ """
13
+
14
+ from abc import ABC, abstractmethod
15
+ from typing import Optional, List, Callable
16
+ from enum import Enum, auto
17
+
18
+ from .types import (
19
+ Vector3,
20
+ Quaternion,
21
+ Pose,
22
+ Twist,
23
+ GaitType,
24
+ GaitParameters,
25
+ JointState,
26
+ ActionResult,
27
+ )
28
+
29
+
30
+ class LocomotionInterface(ABC):
31
+ """
32
+ Base interface for all locomotion.
33
+
34
+ All mobile robots share these concepts:
35
+ - Current pose in some frame
36
+ - Velocity control
37
+ - Stop
38
+ """
39
+
40
+ @abstractmethod
41
+ def get_pose(self) -> Pose:
42
+ """
43
+ Get current pose (position + orientation) in odometry frame.
44
+
45
+ Returns:
46
+ Pose in odom frame
47
+ """
48
+ pass
49
+
50
+ @abstractmethod
51
+ def get_velocity(self) -> Twist:
52
+ """
53
+ Get current velocity (linear + angular).
54
+
55
+ Returns:
56
+ Twist with linear (m/s) and angular (rad/s) velocity
57
+ """
58
+ pass
59
+
60
+ @abstractmethod
61
+ def stop(self) -> ActionResult:
62
+ """
63
+ Immediately stop all locomotion.
64
+
65
+ Returns:
66
+ ActionResult
67
+ """
68
+ pass
69
+
70
+ @abstractmethod
71
+ def is_moving(self) -> bool:
72
+ """
73
+ Check if robot is currently moving.
74
+
75
+ Returns:
76
+ True if any locomotion is active
77
+ """
78
+ pass
79
+
80
+
81
+ class QuadrupedLocomotion(LocomotionInterface):
82
+ """
83
+ Interface for quadruped (4-legged) robots.
84
+
85
+ Implemented by: MechDog, Spot, Unitree Go1/Go2, ANYmal, etc.
86
+
87
+ Coordinate frame conventions:
88
+ - X: Forward (positive = front of robot)
89
+ - Y: Left (positive = left side of robot)
90
+ - Z: Up (positive = above robot)
91
+
92
+ All distances in meters, angles in radians.
93
+ """
94
+
95
+ # =========================================================================
96
+ # High-level movement commands
97
+ # =========================================================================
98
+
99
+ @abstractmethod
100
+ def walk(self, direction: Vector3, speed: float = 0.5) -> ActionResult:
101
+ """
102
+ Walk in a direction at given speed.
103
+
104
+ This is a continuous command - robot keeps walking until stop() is called.
105
+
106
+ Args:
107
+ direction: Unit vector for direction (in robot frame)
108
+ Vector3(1, 0, 0) = forward
109
+ Vector3(0, 1, 0) = left
110
+ Vector3(-1, 0, 0) = backward
111
+ speed: Speed in m/s (clamped to robot's max)
112
+
113
+ Returns:
114
+ ActionResult
115
+ """
116
+ pass
117
+
118
+ @abstractmethod
119
+ def walk_to(self, target: Vector3, speed: float = 0.5) -> ActionResult:
120
+ """
121
+ Walk to a target position (blocking).
122
+
123
+ Args:
124
+ target: Target position in odometry frame
125
+ speed: Speed in m/s
126
+
127
+ Returns:
128
+ ActionResult (when target reached or failed)
129
+ """
130
+ pass
131
+
132
+ @abstractmethod
133
+ def turn(self, angle: float, speed: float = 0.5) -> ActionResult:
134
+ """
135
+ Turn in place by given angle (blocking).
136
+
137
+ Args:
138
+ angle: Angle in radians (positive = counterclockwise)
139
+ speed: Angular speed in rad/s
140
+
141
+ Returns:
142
+ ActionResult (when turn complete)
143
+ """
144
+ pass
145
+
146
+ @abstractmethod
147
+ def turn_continuous(self, angular_velocity: float) -> ActionResult:
148
+ """
149
+ Turn continuously at given angular velocity.
150
+
151
+ Args:
152
+ angular_velocity: rad/s (positive = counterclockwise)
153
+
154
+ Returns:
155
+ ActionResult
156
+ """
157
+ pass
158
+
159
+ # =========================================================================
160
+ # Posture commands
161
+ # =========================================================================
162
+
163
+ @abstractmethod
164
+ def stand(self) -> ActionResult:
165
+ """
166
+ Stand up from any position.
167
+
168
+ Returns:
169
+ ActionResult (when standing complete)
170
+ """
171
+ pass
172
+
173
+ @abstractmethod
174
+ def sit(self) -> ActionResult:
175
+ """
176
+ Sit down (lower body to ground).
177
+
178
+ Returns:
179
+ ActionResult (when sit complete)
180
+ """
181
+ pass
182
+
183
+ @abstractmethod
184
+ def lie_down(self) -> ActionResult:
185
+ """
186
+ Lie down completely (motors may turn off).
187
+
188
+ Returns:
189
+ ActionResult
190
+ """
191
+ pass
192
+
193
+ # =========================================================================
194
+ # Gait control
195
+ # =========================================================================
196
+
197
+ @abstractmethod
198
+ def set_gait(self, gait: GaitType) -> ActionResult:
199
+ """
200
+ Set the gait pattern.
201
+
202
+ Args:
203
+ gait: GaitType (WALK, TROT, BOUND, etc.)
204
+
205
+ Returns:
206
+ ActionResult
207
+ """
208
+ pass
209
+
210
+ @abstractmethod
211
+ def get_gait(self) -> GaitType:
212
+ """
213
+ Get current gait pattern.
214
+
215
+ Returns:
216
+ Current GaitType
217
+ """
218
+ pass
219
+
220
+ def set_gait_parameters(self, params: GaitParameters) -> ActionResult:
221
+ """
222
+ Set detailed gait parameters.
223
+
224
+ Default implementation just sets gait type.
225
+ Override for robots with fine-grained gait control.
226
+
227
+ Args:
228
+ params: GaitParameters with stride, step height, etc.
229
+
230
+ Returns:
231
+ ActionResult
232
+ """
233
+ return self.set_gait(params.gait_type)
234
+
235
+ # =========================================================================
236
+ # Leg control (for advanced use)
237
+ # =========================================================================
238
+
239
+ @abstractmethod
240
+ def get_foot_positions(self) -> List[Vector3]:
241
+ """
242
+ Get current foot positions relative to body.
243
+
244
+ Returns:
245
+ List of 4 Vector3 positions [front_left, front_right, back_left, back_right]
246
+ """
247
+ pass
248
+
249
+ def set_foot_position(self, leg_index: int, position: Vector3) -> ActionResult:
250
+ """
251
+ Set a single foot position using inverse kinematics.
252
+
253
+ Args:
254
+ leg_index: 0=front_left, 1=front_right, 2=back_left, 3=back_right
255
+ position: Target position relative to body
256
+
257
+ Returns:
258
+ ActionResult
259
+ """
260
+ # Default: not implemented
261
+ return ActionResult.error("set_foot_position not implemented for this robot")
262
+
263
+ # =========================================================================
264
+ # Joint-level access (for telemetry/recording)
265
+ # =========================================================================
266
+
267
+ @abstractmethod
268
+ def get_joint_state(self) -> JointState:
269
+ """
270
+ Get current state of all leg joints.
271
+
272
+ Returns:
273
+ JointState with positions, velocities, efforts
274
+ """
275
+ pass
276
+
277
+ def get_joint_names(self) -> List[str]:
278
+ """
279
+ Get names of all leg joints in order.
280
+
281
+ Default naming convention:
282
+ [FL_hip, FL_thigh, FL_calf, FR_hip, FR_thigh, FR_calf,
283
+ BL_hip, BL_thigh, BL_calf, BR_hip, BR_thigh, BR_calf]
284
+
285
+ Returns:
286
+ List of joint names
287
+ """
288
+ return [
289
+ "FL_hip", "FL_thigh", "FL_calf",
290
+ "FR_hip", "FR_thigh", "FR_calf",
291
+ "BL_hip", "BL_thigh", "BL_calf",
292
+ "BR_hip", "BR_thigh", "BR_calf",
293
+ ]
294
+
295
+
296
+ class BipedLocomotion(LocomotionInterface):
297
+ """
298
+ Interface for biped (2-legged) robots.
299
+
300
+ Implemented by: Humanoids, Digit, Cassie, etc.
301
+ """
302
+
303
+ @abstractmethod
304
+ def walk(self, direction: Vector3, speed: float = 0.5) -> ActionResult:
305
+ """Walk in direction."""
306
+ pass
307
+
308
+ @abstractmethod
309
+ def walk_to(self, target: Vector3, speed: float = 0.5) -> ActionResult:
310
+ """Walk to target position."""
311
+ pass
312
+
313
+ @abstractmethod
314
+ def turn(self, angle: float, speed: float = 0.5) -> ActionResult:
315
+ """Turn by angle."""
316
+ pass
317
+
318
+ @abstractmethod
319
+ def stand(self) -> ActionResult:
320
+ """Stand up."""
321
+ pass
322
+
323
+ @abstractmethod
324
+ def crouch(self) -> ActionResult:
325
+ """Lower body while standing."""
326
+ pass
327
+
328
+ @abstractmethod
329
+ def get_joint_state(self) -> JointState:
330
+ """Get leg joint states."""
331
+ pass
332
+
333
+ # Biped-specific
334
+ @abstractmethod
335
+ def step_over(self, obstacle_height: float) -> ActionResult:
336
+ """Step over an obstacle of given height."""
337
+ pass
338
+
339
+ @abstractmethod
340
+ def climb_stairs(self, step_height: float, num_steps: int) -> ActionResult:
341
+ """Climb stairs."""
342
+ pass
343
+
344
+
345
+ class WheeledLocomotion(LocomotionInterface):
346
+ """
347
+ Interface for wheeled robots.
348
+
349
+ Implemented by: TurtleBot, AMRs, differential drive, Ackermann, etc.
350
+ """
351
+
352
+ @abstractmethod
353
+ def drive(self, linear: float, angular: float) -> ActionResult:
354
+ """
355
+ Drive with given linear and angular velocity.
356
+
357
+ Args:
358
+ linear: Forward velocity in m/s
359
+ angular: Angular velocity in rad/s
360
+
361
+ Returns:
362
+ ActionResult
363
+ """
364
+ pass
365
+
366
+ @abstractmethod
367
+ def drive_to(self, target: Vector3, speed: float = 0.5) -> ActionResult:
368
+ """Drive to target position."""
369
+ pass
370
+
371
+ @abstractmethod
372
+ def turn(self, angle: float, speed: float = 0.5) -> ActionResult:
373
+ """Turn by angle."""
374
+ pass
375
+
376
+ @abstractmethod
377
+ def get_wheel_velocities(self) -> List[float]:
378
+ """Get current wheel velocities in rad/s."""
379
+ pass
380
+
381
+
382
+ class AerialLocomotion(LocomotionInterface):
383
+ """
384
+ Interface for aerial robots (drones).
385
+
386
+ Implemented by: Quadcopters, fixed-wing, etc.
387
+ """
388
+
389
+ @abstractmethod
390
+ def takeoff(self, altitude: float = 1.0) -> ActionResult:
391
+ """Take off to given altitude."""
392
+ pass
393
+
394
+ @abstractmethod
395
+ def land(self) -> ActionResult:
396
+ """Land at current position."""
397
+ pass
398
+
399
+ @abstractmethod
400
+ def hover(self) -> ActionResult:
401
+ """Hover in place."""
402
+ pass
403
+
404
+ @abstractmethod
405
+ def fly_to(self, target: Vector3, speed: float = 1.0) -> ActionResult:
406
+ """Fly to target position."""
407
+ pass
408
+
409
+ @abstractmethod
410
+ def set_velocity(self, velocity: Vector3) -> ActionResult:
411
+ """Set 3D velocity."""
412
+ pass
413
+
414
+ @abstractmethod
415
+ def get_altitude(self) -> float:
416
+ """Get current altitude in meters."""
417
+ pass
418
+
419
+ @abstractmethod
420
+ def is_flying(self) -> bool:
421
+ """Check if currently airborne."""
422
+ pass