foodforthought-cli 0.2.7__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ate/__init__.py +6 -0
- ate/__main__.py +16 -0
- ate/auth/__init__.py +1 -0
- ate/auth/device_flow.py +141 -0
- ate/auth/token_store.py +96 -0
- ate/behaviors/__init__.py +100 -0
- ate/behaviors/approach.py +399 -0
- ate/behaviors/common.py +686 -0
- ate/behaviors/tree.py +454 -0
- ate/cli.py +855 -3995
- ate/client.py +90 -0
- ate/commands/__init__.py +168 -0
- ate/commands/auth.py +389 -0
- ate/commands/bridge.py +448 -0
- ate/commands/data.py +185 -0
- ate/commands/deps.py +111 -0
- ate/commands/generate.py +384 -0
- ate/commands/memory.py +907 -0
- ate/commands/parts.py +166 -0
- ate/commands/primitive.py +399 -0
- ate/commands/protocol.py +288 -0
- ate/commands/recording.py +524 -0
- ate/commands/repo.py +154 -0
- ate/commands/simulation.py +291 -0
- ate/commands/skill.py +303 -0
- ate/commands/skills.py +487 -0
- ate/commands/team.py +147 -0
- ate/commands/workflow.py +271 -0
- ate/detection/__init__.py +38 -0
- ate/detection/base.py +142 -0
- ate/detection/color_detector.py +399 -0
- ate/detection/trash_detector.py +322 -0
- ate/drivers/__init__.py +39 -0
- ate/drivers/ble_transport.py +405 -0
- ate/drivers/mechdog.py +942 -0
- ate/drivers/wifi_camera.py +477 -0
- ate/interfaces/__init__.py +187 -0
- ate/interfaces/base.py +273 -0
- ate/interfaces/body.py +267 -0
- ate/interfaces/detection.py +282 -0
- ate/interfaces/locomotion.py +422 -0
- ate/interfaces/manipulation.py +408 -0
- ate/interfaces/navigation.py +389 -0
- ate/interfaces/perception.py +362 -0
- ate/interfaces/sensors.py +247 -0
- ate/interfaces/types.py +371 -0
- ate/llm_proxy.py +239 -0
- ate/mcp_server.py +387 -0
- ate/memory/__init__.py +35 -0
- ate/memory/cloud.py +244 -0
- ate/memory/context.py +269 -0
- ate/memory/embeddings.py +184 -0
- ate/memory/export.py +26 -0
- ate/memory/merge.py +146 -0
- ate/memory/migrate/__init__.py +34 -0
- ate/memory/migrate/base.py +89 -0
- ate/memory/migrate/pipeline.py +189 -0
- ate/memory/migrate/sources/__init__.py +13 -0
- ate/memory/migrate/sources/chroma.py +170 -0
- ate/memory/migrate/sources/pinecone.py +120 -0
- ate/memory/migrate/sources/qdrant.py +110 -0
- ate/memory/migrate/sources/weaviate.py +160 -0
- ate/memory/reranker.py +353 -0
- ate/memory/search.py +26 -0
- ate/memory/store.py +548 -0
- ate/recording/__init__.py +83 -0
- ate/recording/demonstration.py +378 -0
- ate/recording/session.py +415 -0
- ate/recording/upload.py +304 -0
- ate/recording/visual.py +416 -0
- ate/recording/wrapper.py +95 -0
- ate/robot/__init__.py +221 -0
- ate/robot/agentic_servo.py +856 -0
- ate/robot/behaviors.py +493 -0
- ate/robot/ble_capture.py +1000 -0
- ate/robot/ble_enumerate.py +506 -0
- ate/robot/calibration.py +668 -0
- ate/robot/calibration_state.py +388 -0
- ate/robot/commands.py +3735 -0
- ate/robot/direction_calibration.py +554 -0
- ate/robot/discovery.py +441 -0
- ate/robot/introspection.py +330 -0
- ate/robot/llm_system_id.py +654 -0
- ate/robot/locomotion_calibration.py +508 -0
- ate/robot/manager.py +270 -0
- ate/robot/marker_generator.py +611 -0
- ate/robot/perception.py +502 -0
- ate/robot/primitives.py +614 -0
- ate/robot/profiles.py +281 -0
- ate/robot/registry.py +322 -0
- ate/robot/servo_mapper.py +1153 -0
- ate/robot/skill_upload.py +675 -0
- ate/robot/target_calibration.py +500 -0
- ate/robot/teach.py +515 -0
- ate/robot/types.py +242 -0
- ate/robot/visual_labeler.py +1048 -0
- ate/robot/visual_servo_loop.py +494 -0
- ate/robot/visual_servoing.py +570 -0
- ate/robot/visual_system_id.py +906 -0
- ate/transports/__init__.py +121 -0
- ate/transports/base.py +394 -0
- ate/transports/ble.py +405 -0
- ate/transports/hybrid.py +444 -0
- ate/transports/serial.py +345 -0
- ate/urdf/__init__.py +30 -0
- ate/urdf/capture.py +582 -0
- ate/urdf/cloud.py +491 -0
- ate/urdf/collision.py +271 -0
- ate/urdf/commands.py +708 -0
- ate/urdf/depth.py +360 -0
- ate/urdf/inertial.py +312 -0
- ate/urdf/kinematics.py +330 -0
- ate/urdf/lifting.py +415 -0
- ate/urdf/meshing.py +300 -0
- ate/urdf/models/__init__.py +110 -0
- ate/urdf/models/depth_anything.py +253 -0
- ate/urdf/models/sam2.py +324 -0
- ate/urdf/motion_analysis.py +396 -0
- ate/urdf/pipeline.py +468 -0
- ate/urdf/scale.py +256 -0
- ate/urdf/scan_session.py +411 -0
- ate/urdf/segmentation.py +299 -0
- ate/urdf/synthesis.py +319 -0
- ate/urdf/topology.py +336 -0
- ate/urdf/validation.py +371 -0
- {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/METADATA +9 -1
- foodforthought_cli-0.3.0.dist-info/RECORD +166 -0
- {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/WHEEL +1 -1
- foodforthought_cli-0.2.7.dist-info/RECORD +0 -44
- {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/entry_points.txt +0 -0
- {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,282 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Object detection interface for robot perception.
|
|
3
|
+
|
|
4
|
+
This is a HIGHER-LEVEL interface that wraps camera interfaces
|
|
5
|
+
and ML models to provide semantic understanding of the environment.
|
|
6
|
+
|
|
7
|
+
Design principle: Models are pluggable - the interface abstracts
|
|
8
|
+
away the specific ML framework (YOLO, Detectron2, etc.)
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from abc import ABC, abstractmethod
|
|
12
|
+
from dataclasses import dataclass, field
|
|
13
|
+
from typing import List, Optional, Dict, Any, Callable
|
|
14
|
+
from enum import Enum, auto
|
|
15
|
+
|
|
16
|
+
from .types import Vector3, Image, ActionResult
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class BoundingBox:
|
|
21
|
+
"""2D bounding box in image coordinates."""
|
|
22
|
+
x_min: float # Left edge (pixels)
|
|
23
|
+
y_min: float # Top edge (pixels)
|
|
24
|
+
x_max: float # Right edge (pixels)
|
|
25
|
+
y_max: float # Bottom edge (pixels)
|
|
26
|
+
|
|
27
|
+
@property
|
|
28
|
+
def width(self) -> float:
|
|
29
|
+
return self.x_max - self.x_min
|
|
30
|
+
|
|
31
|
+
@property
|
|
32
|
+
def height(self) -> float:
|
|
33
|
+
return self.y_max - self.y_min
|
|
34
|
+
|
|
35
|
+
@property
|
|
36
|
+
def center(self) -> tuple:
|
|
37
|
+
return ((self.x_min + self.x_max) / 2, (self.y_min + self.y_max) / 2)
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def area(self) -> float:
|
|
41
|
+
return self.width * self.height
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@dataclass
|
|
45
|
+
class Detection:
|
|
46
|
+
"""A detected object in an image."""
|
|
47
|
+
class_name: str # e.g., "trash", "bottle", "can"
|
|
48
|
+
class_id: int # Numeric class ID
|
|
49
|
+
confidence: float # 0.0 to 1.0
|
|
50
|
+
bbox: BoundingBox # 2D bounding box
|
|
51
|
+
|
|
52
|
+
# Optional 3D info (if depth available)
|
|
53
|
+
position_3d: Optional[Vector3] = None # In camera frame
|
|
54
|
+
distance: Optional[float] = None # Distance in meters
|
|
55
|
+
|
|
56
|
+
# Optional instance segmentation mask
|
|
57
|
+
mask: Optional[Any] = None
|
|
58
|
+
|
|
59
|
+
# Additional attributes (color, size estimates, etc.)
|
|
60
|
+
attributes: Dict[str, Any] = field(default_factory=dict)
|
|
61
|
+
|
|
62
|
+
def to_dict(self) -> dict:
|
|
63
|
+
return {
|
|
64
|
+
"class_name": self.class_name,
|
|
65
|
+
"class_id": self.class_id,
|
|
66
|
+
"confidence": self.confidence,
|
|
67
|
+
"bbox": {
|
|
68
|
+
"x_min": self.bbox.x_min,
|
|
69
|
+
"y_min": self.bbox.y_min,
|
|
70
|
+
"x_max": self.bbox.x_max,
|
|
71
|
+
"y_max": self.bbox.y_max,
|
|
72
|
+
},
|
|
73
|
+
"position_3d": self.position_3d.to_dict() if self.position_3d else None,
|
|
74
|
+
"distance": self.distance,
|
|
75
|
+
"attributes": self.attributes,
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@dataclass
|
|
80
|
+
class DetectionResult:
|
|
81
|
+
"""Result of running object detection on an image."""
|
|
82
|
+
detections: List[Detection]
|
|
83
|
+
image: Optional[Image] = None # Original image (optional)
|
|
84
|
+
inference_time_ms: float = 0.0
|
|
85
|
+
model_name: str = ""
|
|
86
|
+
|
|
87
|
+
def filter_by_class(self, class_name: str) -> List[Detection]:
|
|
88
|
+
"""Get detections of a specific class."""
|
|
89
|
+
return [d for d in self.detections if d.class_name == class_name]
|
|
90
|
+
|
|
91
|
+
def filter_by_confidence(self, min_confidence: float) -> List[Detection]:
|
|
92
|
+
"""Get detections above confidence threshold."""
|
|
93
|
+
return [d for d in self.detections if d.confidence >= min_confidence]
|
|
94
|
+
|
|
95
|
+
def get_closest(self) -> Optional[Detection]:
|
|
96
|
+
"""Get the closest detected object (requires 3D info)."""
|
|
97
|
+
with_distance = [d for d in self.detections if d.distance is not None]
|
|
98
|
+
if not with_distance:
|
|
99
|
+
return None
|
|
100
|
+
return min(with_distance, key=lambda d: d.distance)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
class ObjectDetectionInterface(ABC):
|
|
104
|
+
"""
|
|
105
|
+
Interface for object detection capabilities.
|
|
106
|
+
|
|
107
|
+
This abstracts the specific ML model and camera hardware,
|
|
108
|
+
providing a unified API for detecting objects in the environment.
|
|
109
|
+
|
|
110
|
+
Use cases:
|
|
111
|
+
- Trash detection for cleanup tasks
|
|
112
|
+
- Object manipulation (find and grasp)
|
|
113
|
+
- Obstacle detection for navigation
|
|
114
|
+
- Person detection for social robots
|
|
115
|
+
"""
|
|
116
|
+
|
|
117
|
+
@abstractmethod
|
|
118
|
+
def detect(self, image: Optional[Image] = None) -> DetectionResult:
|
|
119
|
+
"""
|
|
120
|
+
Run object detection.
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
image: Image to process. If None, capture from camera.
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
DetectionResult with all detections
|
|
127
|
+
"""
|
|
128
|
+
pass
|
|
129
|
+
|
|
130
|
+
@abstractmethod
|
|
131
|
+
def get_classes(self) -> List[str]:
|
|
132
|
+
"""
|
|
133
|
+
Get list of classes this detector can recognize.
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
List of class names
|
|
137
|
+
"""
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
def detect_class(self, class_name: str, min_confidence: float = 0.5) -> List[Detection]:
|
|
141
|
+
"""
|
|
142
|
+
Detect objects of a specific class.
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
class_name: Class to detect (e.g., "bottle", "trash")
|
|
146
|
+
min_confidence: Minimum confidence threshold
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
List of detections of that class
|
|
150
|
+
"""
|
|
151
|
+
result = self.detect()
|
|
152
|
+
return [
|
|
153
|
+
d for d in result.detections
|
|
154
|
+
if d.class_name == class_name and d.confidence >= min_confidence
|
|
155
|
+
]
|
|
156
|
+
|
|
157
|
+
def detect_any(self, class_names: List[str], min_confidence: float = 0.5) -> List[Detection]:
|
|
158
|
+
"""
|
|
159
|
+
Detect objects of any of the specified classes.
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
class_names: List of classes to detect
|
|
163
|
+
min_confidence: Minimum confidence threshold
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
List of detections matching any class
|
|
167
|
+
"""
|
|
168
|
+
result = self.detect()
|
|
169
|
+
return [
|
|
170
|
+
d for d in result.detections
|
|
171
|
+
if d.class_name in class_names and d.confidence >= min_confidence
|
|
172
|
+
]
|
|
173
|
+
|
|
174
|
+
def find_nearest(self, class_name: str) -> Optional[Detection]:
|
|
175
|
+
"""
|
|
176
|
+
Find the nearest object of a class.
|
|
177
|
+
|
|
178
|
+
Args:
|
|
179
|
+
class_name: Class to find
|
|
180
|
+
|
|
181
|
+
Returns:
|
|
182
|
+
Detection of nearest object, or None
|
|
183
|
+
"""
|
|
184
|
+
detections = self.detect_class(class_name)
|
|
185
|
+
with_distance = [d for d in detections if d.distance is not None]
|
|
186
|
+
if not with_distance:
|
|
187
|
+
# Fall back to largest bounding box (likely closest)
|
|
188
|
+
if detections:
|
|
189
|
+
return max(detections, key=lambda d: d.bbox.area)
|
|
190
|
+
return None
|
|
191
|
+
return min(with_distance, key=lambda d: d.distance)
|
|
192
|
+
|
|
193
|
+
# =========================================================================
|
|
194
|
+
# Model management
|
|
195
|
+
# =========================================================================
|
|
196
|
+
|
|
197
|
+
def load_model(self, model_path: str) -> ActionResult:
|
|
198
|
+
"""Load a specific detection model."""
|
|
199
|
+
return ActionResult.error("Custom model loading not supported")
|
|
200
|
+
|
|
201
|
+
def get_model_info(self) -> Dict[str, Any]:
|
|
202
|
+
"""Get information about the current model."""
|
|
203
|
+
return {"classes": self.get_classes()}
|
|
204
|
+
|
|
205
|
+
# =========================================================================
|
|
206
|
+
# Streaming detection
|
|
207
|
+
# =========================================================================
|
|
208
|
+
|
|
209
|
+
def start_detection_stream(
|
|
210
|
+
self,
|
|
211
|
+
callback: Callable[[DetectionResult], None],
|
|
212
|
+
min_confidence: float = 0.5
|
|
213
|
+
) -> ActionResult:
|
|
214
|
+
"""
|
|
215
|
+
Start continuous detection with callbacks.
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
callback: Function called with each detection result
|
|
219
|
+
min_confidence: Minimum confidence for callbacks
|
|
220
|
+
"""
|
|
221
|
+
return ActionResult.error("Streaming detection not supported")
|
|
222
|
+
|
|
223
|
+
def stop_detection_stream(self) -> ActionResult:
|
|
224
|
+
"""Stop the detection stream."""
|
|
225
|
+
return ActionResult.error("Streaming detection not supported")
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
class TrashDetectionInterface(ObjectDetectionInterface):
|
|
229
|
+
"""
|
|
230
|
+
Specialized detector for trash/litter.
|
|
231
|
+
|
|
232
|
+
Recognizes common trash items:
|
|
233
|
+
- Bottles (plastic, glass)
|
|
234
|
+
- Cans
|
|
235
|
+
- Paper/cardboard
|
|
236
|
+
- Wrappers/packaging
|
|
237
|
+
- Cigarette butts
|
|
238
|
+
- General debris
|
|
239
|
+
"""
|
|
240
|
+
|
|
241
|
+
TRASH_CLASSES = [
|
|
242
|
+
"plastic_bottle",
|
|
243
|
+
"glass_bottle",
|
|
244
|
+
"can",
|
|
245
|
+
"paper",
|
|
246
|
+
"cardboard",
|
|
247
|
+
"wrapper",
|
|
248
|
+
"cigarette_butt",
|
|
249
|
+
"debris",
|
|
250
|
+
"trash", # Generic
|
|
251
|
+
]
|
|
252
|
+
|
|
253
|
+
def get_classes(self) -> List[str]:
|
|
254
|
+
return self.TRASH_CLASSES
|
|
255
|
+
|
|
256
|
+
def detect_trash(self, min_confidence: float = 0.5) -> List[Detection]:
|
|
257
|
+
"""
|
|
258
|
+
Detect all trash items.
|
|
259
|
+
|
|
260
|
+
Returns:
|
|
261
|
+
List of trash detections
|
|
262
|
+
"""
|
|
263
|
+
return self.detect_any(self.TRASH_CLASSES, min_confidence)
|
|
264
|
+
|
|
265
|
+
def find_nearest_trash(self) -> Optional[Detection]:
|
|
266
|
+
"""
|
|
267
|
+
Find the nearest trash item.
|
|
268
|
+
|
|
269
|
+
Returns:
|
|
270
|
+
Detection of nearest trash, or None
|
|
271
|
+
"""
|
|
272
|
+
detections = self.detect_trash()
|
|
273
|
+
with_distance = [d for d in detections if d.distance is not None]
|
|
274
|
+
if not with_distance:
|
|
275
|
+
if detections:
|
|
276
|
+
return max(detections, key=lambda d: d.bbox.area)
|
|
277
|
+
return None
|
|
278
|
+
return min(with_distance, key=lambda d: d.distance)
|
|
279
|
+
|
|
280
|
+
def is_trash_visible(self) -> bool:
|
|
281
|
+
"""Check if any trash is visible."""
|
|
282
|
+
return len(self.detect_trash()) > 0
|
|
@@ -0,0 +1,422 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Locomotion interfaces for mobile robots.
|
|
3
|
+
|
|
4
|
+
Supports:
|
|
5
|
+
- Quadrupeds (MechDog, Spot, Unitree, ANYmal)
|
|
6
|
+
- Bipeds (Humanoids, Digit, Atlas)
|
|
7
|
+
- Wheeled robots (TurtleBot, AMRs)
|
|
8
|
+
- Aerial (Drones)
|
|
9
|
+
|
|
10
|
+
Each interface defines normalized actions that are hardware-agnostic.
|
|
11
|
+
A "walk forward 0.5m" command works the same on MechDog and Spot.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from abc import ABC, abstractmethod
|
|
15
|
+
from typing import Optional, List, Callable
|
|
16
|
+
from enum import Enum, auto
|
|
17
|
+
|
|
18
|
+
from .types import (
|
|
19
|
+
Vector3,
|
|
20
|
+
Quaternion,
|
|
21
|
+
Pose,
|
|
22
|
+
Twist,
|
|
23
|
+
GaitType,
|
|
24
|
+
GaitParameters,
|
|
25
|
+
JointState,
|
|
26
|
+
ActionResult,
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class LocomotionInterface(ABC):
|
|
31
|
+
"""
|
|
32
|
+
Base interface for all locomotion.
|
|
33
|
+
|
|
34
|
+
All mobile robots share these concepts:
|
|
35
|
+
- Current pose in some frame
|
|
36
|
+
- Velocity control
|
|
37
|
+
- Stop
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
@abstractmethod
|
|
41
|
+
def get_pose(self) -> Pose:
|
|
42
|
+
"""
|
|
43
|
+
Get current pose (position + orientation) in odometry frame.
|
|
44
|
+
|
|
45
|
+
Returns:
|
|
46
|
+
Pose in odom frame
|
|
47
|
+
"""
|
|
48
|
+
pass
|
|
49
|
+
|
|
50
|
+
@abstractmethod
|
|
51
|
+
def get_velocity(self) -> Twist:
|
|
52
|
+
"""
|
|
53
|
+
Get current velocity (linear + angular).
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
Twist with linear (m/s) and angular (rad/s) velocity
|
|
57
|
+
"""
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
@abstractmethod
|
|
61
|
+
def stop(self) -> ActionResult:
|
|
62
|
+
"""
|
|
63
|
+
Immediately stop all locomotion.
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
ActionResult
|
|
67
|
+
"""
|
|
68
|
+
pass
|
|
69
|
+
|
|
70
|
+
@abstractmethod
|
|
71
|
+
def is_moving(self) -> bool:
|
|
72
|
+
"""
|
|
73
|
+
Check if robot is currently moving.
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
True if any locomotion is active
|
|
77
|
+
"""
|
|
78
|
+
pass
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class QuadrupedLocomotion(LocomotionInterface):
|
|
82
|
+
"""
|
|
83
|
+
Interface for quadruped (4-legged) robots.
|
|
84
|
+
|
|
85
|
+
Implemented by: MechDog, Spot, Unitree Go1/Go2, ANYmal, etc.
|
|
86
|
+
|
|
87
|
+
Coordinate frame conventions:
|
|
88
|
+
- X: Forward (positive = front of robot)
|
|
89
|
+
- Y: Left (positive = left side of robot)
|
|
90
|
+
- Z: Up (positive = above robot)
|
|
91
|
+
|
|
92
|
+
All distances in meters, angles in radians.
|
|
93
|
+
"""
|
|
94
|
+
|
|
95
|
+
# =========================================================================
|
|
96
|
+
# High-level movement commands
|
|
97
|
+
# =========================================================================
|
|
98
|
+
|
|
99
|
+
@abstractmethod
|
|
100
|
+
def walk(self, direction: Vector3, speed: float = 0.5) -> ActionResult:
|
|
101
|
+
"""
|
|
102
|
+
Walk in a direction at given speed.
|
|
103
|
+
|
|
104
|
+
This is a continuous command - robot keeps walking until stop() is called.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
direction: Unit vector for direction (in robot frame)
|
|
108
|
+
Vector3(1, 0, 0) = forward
|
|
109
|
+
Vector3(0, 1, 0) = left
|
|
110
|
+
Vector3(-1, 0, 0) = backward
|
|
111
|
+
speed: Speed in m/s (clamped to robot's max)
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
ActionResult
|
|
115
|
+
"""
|
|
116
|
+
pass
|
|
117
|
+
|
|
118
|
+
@abstractmethod
|
|
119
|
+
def walk_to(self, target: Vector3, speed: float = 0.5) -> ActionResult:
|
|
120
|
+
"""
|
|
121
|
+
Walk to a target position (blocking).
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
target: Target position in odometry frame
|
|
125
|
+
speed: Speed in m/s
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
ActionResult (when target reached or failed)
|
|
129
|
+
"""
|
|
130
|
+
pass
|
|
131
|
+
|
|
132
|
+
@abstractmethod
|
|
133
|
+
def turn(self, angle: float, speed: float = 0.5) -> ActionResult:
|
|
134
|
+
"""
|
|
135
|
+
Turn in place by given angle (blocking).
|
|
136
|
+
|
|
137
|
+
Args:
|
|
138
|
+
angle: Angle in radians (positive = counterclockwise)
|
|
139
|
+
speed: Angular speed in rad/s
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
ActionResult (when turn complete)
|
|
143
|
+
"""
|
|
144
|
+
pass
|
|
145
|
+
|
|
146
|
+
@abstractmethod
|
|
147
|
+
def turn_continuous(self, angular_velocity: float) -> ActionResult:
|
|
148
|
+
"""
|
|
149
|
+
Turn continuously at given angular velocity.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
angular_velocity: rad/s (positive = counterclockwise)
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
ActionResult
|
|
156
|
+
"""
|
|
157
|
+
pass
|
|
158
|
+
|
|
159
|
+
# =========================================================================
|
|
160
|
+
# Posture commands
|
|
161
|
+
# =========================================================================
|
|
162
|
+
|
|
163
|
+
@abstractmethod
|
|
164
|
+
def stand(self) -> ActionResult:
|
|
165
|
+
"""
|
|
166
|
+
Stand up from any position.
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
ActionResult (when standing complete)
|
|
170
|
+
"""
|
|
171
|
+
pass
|
|
172
|
+
|
|
173
|
+
@abstractmethod
|
|
174
|
+
def sit(self) -> ActionResult:
|
|
175
|
+
"""
|
|
176
|
+
Sit down (lower body to ground).
|
|
177
|
+
|
|
178
|
+
Returns:
|
|
179
|
+
ActionResult (when sit complete)
|
|
180
|
+
"""
|
|
181
|
+
pass
|
|
182
|
+
|
|
183
|
+
@abstractmethod
|
|
184
|
+
def lie_down(self) -> ActionResult:
|
|
185
|
+
"""
|
|
186
|
+
Lie down completely (motors may turn off).
|
|
187
|
+
|
|
188
|
+
Returns:
|
|
189
|
+
ActionResult
|
|
190
|
+
"""
|
|
191
|
+
pass
|
|
192
|
+
|
|
193
|
+
# =========================================================================
|
|
194
|
+
# Gait control
|
|
195
|
+
# =========================================================================
|
|
196
|
+
|
|
197
|
+
@abstractmethod
|
|
198
|
+
def set_gait(self, gait: GaitType) -> ActionResult:
|
|
199
|
+
"""
|
|
200
|
+
Set the gait pattern.
|
|
201
|
+
|
|
202
|
+
Args:
|
|
203
|
+
gait: GaitType (WALK, TROT, BOUND, etc.)
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
ActionResult
|
|
207
|
+
"""
|
|
208
|
+
pass
|
|
209
|
+
|
|
210
|
+
@abstractmethod
|
|
211
|
+
def get_gait(self) -> GaitType:
|
|
212
|
+
"""
|
|
213
|
+
Get current gait pattern.
|
|
214
|
+
|
|
215
|
+
Returns:
|
|
216
|
+
Current GaitType
|
|
217
|
+
"""
|
|
218
|
+
pass
|
|
219
|
+
|
|
220
|
+
def set_gait_parameters(self, params: GaitParameters) -> ActionResult:
|
|
221
|
+
"""
|
|
222
|
+
Set detailed gait parameters.
|
|
223
|
+
|
|
224
|
+
Default implementation just sets gait type.
|
|
225
|
+
Override for robots with fine-grained gait control.
|
|
226
|
+
|
|
227
|
+
Args:
|
|
228
|
+
params: GaitParameters with stride, step height, etc.
|
|
229
|
+
|
|
230
|
+
Returns:
|
|
231
|
+
ActionResult
|
|
232
|
+
"""
|
|
233
|
+
return self.set_gait(params.gait_type)
|
|
234
|
+
|
|
235
|
+
# =========================================================================
|
|
236
|
+
# Leg control (for advanced use)
|
|
237
|
+
# =========================================================================
|
|
238
|
+
|
|
239
|
+
@abstractmethod
|
|
240
|
+
def get_foot_positions(self) -> List[Vector3]:
|
|
241
|
+
"""
|
|
242
|
+
Get current foot positions relative to body.
|
|
243
|
+
|
|
244
|
+
Returns:
|
|
245
|
+
List of 4 Vector3 positions [front_left, front_right, back_left, back_right]
|
|
246
|
+
"""
|
|
247
|
+
pass
|
|
248
|
+
|
|
249
|
+
def set_foot_position(self, leg_index: int, position: Vector3) -> ActionResult:
|
|
250
|
+
"""
|
|
251
|
+
Set a single foot position using inverse kinematics.
|
|
252
|
+
|
|
253
|
+
Args:
|
|
254
|
+
leg_index: 0=front_left, 1=front_right, 2=back_left, 3=back_right
|
|
255
|
+
position: Target position relative to body
|
|
256
|
+
|
|
257
|
+
Returns:
|
|
258
|
+
ActionResult
|
|
259
|
+
"""
|
|
260
|
+
# Default: not implemented
|
|
261
|
+
return ActionResult.error("set_foot_position not implemented for this robot")
|
|
262
|
+
|
|
263
|
+
# =========================================================================
|
|
264
|
+
# Joint-level access (for telemetry/recording)
|
|
265
|
+
# =========================================================================
|
|
266
|
+
|
|
267
|
+
@abstractmethod
|
|
268
|
+
def get_joint_state(self) -> JointState:
|
|
269
|
+
"""
|
|
270
|
+
Get current state of all leg joints.
|
|
271
|
+
|
|
272
|
+
Returns:
|
|
273
|
+
JointState with positions, velocities, efforts
|
|
274
|
+
"""
|
|
275
|
+
pass
|
|
276
|
+
|
|
277
|
+
def get_joint_names(self) -> List[str]:
|
|
278
|
+
"""
|
|
279
|
+
Get names of all leg joints in order.
|
|
280
|
+
|
|
281
|
+
Default naming convention:
|
|
282
|
+
[FL_hip, FL_thigh, FL_calf, FR_hip, FR_thigh, FR_calf,
|
|
283
|
+
BL_hip, BL_thigh, BL_calf, BR_hip, BR_thigh, BR_calf]
|
|
284
|
+
|
|
285
|
+
Returns:
|
|
286
|
+
List of joint names
|
|
287
|
+
"""
|
|
288
|
+
return [
|
|
289
|
+
"FL_hip", "FL_thigh", "FL_calf",
|
|
290
|
+
"FR_hip", "FR_thigh", "FR_calf",
|
|
291
|
+
"BL_hip", "BL_thigh", "BL_calf",
|
|
292
|
+
"BR_hip", "BR_thigh", "BR_calf",
|
|
293
|
+
]
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
class BipedLocomotion(LocomotionInterface):
|
|
297
|
+
"""
|
|
298
|
+
Interface for biped (2-legged) robots.
|
|
299
|
+
|
|
300
|
+
Implemented by: Humanoids, Digit, Cassie, etc.
|
|
301
|
+
"""
|
|
302
|
+
|
|
303
|
+
@abstractmethod
|
|
304
|
+
def walk(self, direction: Vector3, speed: float = 0.5) -> ActionResult:
|
|
305
|
+
"""Walk in direction."""
|
|
306
|
+
pass
|
|
307
|
+
|
|
308
|
+
@abstractmethod
|
|
309
|
+
def walk_to(self, target: Vector3, speed: float = 0.5) -> ActionResult:
|
|
310
|
+
"""Walk to target position."""
|
|
311
|
+
pass
|
|
312
|
+
|
|
313
|
+
@abstractmethod
|
|
314
|
+
def turn(self, angle: float, speed: float = 0.5) -> ActionResult:
|
|
315
|
+
"""Turn by angle."""
|
|
316
|
+
pass
|
|
317
|
+
|
|
318
|
+
@abstractmethod
|
|
319
|
+
def stand(self) -> ActionResult:
|
|
320
|
+
"""Stand up."""
|
|
321
|
+
pass
|
|
322
|
+
|
|
323
|
+
@abstractmethod
|
|
324
|
+
def crouch(self) -> ActionResult:
|
|
325
|
+
"""Lower body while standing."""
|
|
326
|
+
pass
|
|
327
|
+
|
|
328
|
+
@abstractmethod
|
|
329
|
+
def get_joint_state(self) -> JointState:
|
|
330
|
+
"""Get leg joint states."""
|
|
331
|
+
pass
|
|
332
|
+
|
|
333
|
+
# Biped-specific
|
|
334
|
+
@abstractmethod
|
|
335
|
+
def step_over(self, obstacle_height: float) -> ActionResult:
|
|
336
|
+
"""Step over an obstacle of given height."""
|
|
337
|
+
pass
|
|
338
|
+
|
|
339
|
+
@abstractmethod
|
|
340
|
+
def climb_stairs(self, step_height: float, num_steps: int) -> ActionResult:
|
|
341
|
+
"""Climb stairs."""
|
|
342
|
+
pass
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
class WheeledLocomotion(LocomotionInterface):
|
|
346
|
+
"""
|
|
347
|
+
Interface for wheeled robots.
|
|
348
|
+
|
|
349
|
+
Implemented by: TurtleBot, AMRs, differential drive, Ackermann, etc.
|
|
350
|
+
"""
|
|
351
|
+
|
|
352
|
+
@abstractmethod
|
|
353
|
+
def drive(self, linear: float, angular: float) -> ActionResult:
|
|
354
|
+
"""
|
|
355
|
+
Drive with given linear and angular velocity.
|
|
356
|
+
|
|
357
|
+
Args:
|
|
358
|
+
linear: Forward velocity in m/s
|
|
359
|
+
angular: Angular velocity in rad/s
|
|
360
|
+
|
|
361
|
+
Returns:
|
|
362
|
+
ActionResult
|
|
363
|
+
"""
|
|
364
|
+
pass
|
|
365
|
+
|
|
366
|
+
@abstractmethod
|
|
367
|
+
def drive_to(self, target: Vector3, speed: float = 0.5) -> ActionResult:
|
|
368
|
+
"""Drive to target position."""
|
|
369
|
+
pass
|
|
370
|
+
|
|
371
|
+
@abstractmethod
|
|
372
|
+
def turn(self, angle: float, speed: float = 0.5) -> ActionResult:
|
|
373
|
+
"""Turn by angle."""
|
|
374
|
+
pass
|
|
375
|
+
|
|
376
|
+
@abstractmethod
|
|
377
|
+
def get_wheel_velocities(self) -> List[float]:
|
|
378
|
+
"""Get current wheel velocities in rad/s."""
|
|
379
|
+
pass
|
|
380
|
+
|
|
381
|
+
|
|
382
|
+
class AerialLocomotion(LocomotionInterface):
|
|
383
|
+
"""
|
|
384
|
+
Interface for aerial robots (drones).
|
|
385
|
+
|
|
386
|
+
Implemented by: Quadcopters, fixed-wing, etc.
|
|
387
|
+
"""
|
|
388
|
+
|
|
389
|
+
@abstractmethod
|
|
390
|
+
def takeoff(self, altitude: float = 1.0) -> ActionResult:
|
|
391
|
+
"""Take off to given altitude."""
|
|
392
|
+
pass
|
|
393
|
+
|
|
394
|
+
@abstractmethod
|
|
395
|
+
def land(self) -> ActionResult:
|
|
396
|
+
"""Land at current position."""
|
|
397
|
+
pass
|
|
398
|
+
|
|
399
|
+
@abstractmethod
|
|
400
|
+
def hover(self) -> ActionResult:
|
|
401
|
+
"""Hover in place."""
|
|
402
|
+
pass
|
|
403
|
+
|
|
404
|
+
@abstractmethod
|
|
405
|
+
def fly_to(self, target: Vector3, speed: float = 1.0) -> ActionResult:
|
|
406
|
+
"""Fly to target position."""
|
|
407
|
+
pass
|
|
408
|
+
|
|
409
|
+
@abstractmethod
|
|
410
|
+
def set_velocity(self, velocity: Vector3) -> ActionResult:
|
|
411
|
+
"""Set 3D velocity."""
|
|
412
|
+
pass
|
|
413
|
+
|
|
414
|
+
@abstractmethod
|
|
415
|
+
def get_altitude(self) -> float:
|
|
416
|
+
"""Get current altitude in meters."""
|
|
417
|
+
pass
|
|
418
|
+
|
|
419
|
+
@abstractmethod
|
|
420
|
+
def is_flying(self) -> bool:
|
|
421
|
+
"""Check if currently airborne."""
|
|
422
|
+
pass
|