foodforthought-cli 0.2.7__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. ate/__init__.py +6 -0
  2. ate/__main__.py +16 -0
  3. ate/auth/__init__.py +1 -0
  4. ate/auth/device_flow.py +141 -0
  5. ate/auth/token_store.py +96 -0
  6. ate/behaviors/__init__.py +100 -0
  7. ate/behaviors/approach.py +399 -0
  8. ate/behaviors/common.py +686 -0
  9. ate/behaviors/tree.py +454 -0
  10. ate/cli.py +855 -3995
  11. ate/client.py +90 -0
  12. ate/commands/__init__.py +168 -0
  13. ate/commands/auth.py +389 -0
  14. ate/commands/bridge.py +448 -0
  15. ate/commands/data.py +185 -0
  16. ate/commands/deps.py +111 -0
  17. ate/commands/generate.py +384 -0
  18. ate/commands/memory.py +907 -0
  19. ate/commands/parts.py +166 -0
  20. ate/commands/primitive.py +399 -0
  21. ate/commands/protocol.py +288 -0
  22. ate/commands/recording.py +524 -0
  23. ate/commands/repo.py +154 -0
  24. ate/commands/simulation.py +291 -0
  25. ate/commands/skill.py +303 -0
  26. ate/commands/skills.py +487 -0
  27. ate/commands/team.py +147 -0
  28. ate/commands/workflow.py +271 -0
  29. ate/detection/__init__.py +38 -0
  30. ate/detection/base.py +142 -0
  31. ate/detection/color_detector.py +399 -0
  32. ate/detection/trash_detector.py +322 -0
  33. ate/drivers/__init__.py +39 -0
  34. ate/drivers/ble_transport.py +405 -0
  35. ate/drivers/mechdog.py +942 -0
  36. ate/drivers/wifi_camera.py +477 -0
  37. ate/interfaces/__init__.py +187 -0
  38. ate/interfaces/base.py +273 -0
  39. ate/interfaces/body.py +267 -0
  40. ate/interfaces/detection.py +282 -0
  41. ate/interfaces/locomotion.py +422 -0
  42. ate/interfaces/manipulation.py +408 -0
  43. ate/interfaces/navigation.py +389 -0
  44. ate/interfaces/perception.py +362 -0
  45. ate/interfaces/sensors.py +247 -0
  46. ate/interfaces/types.py +371 -0
  47. ate/llm_proxy.py +239 -0
  48. ate/mcp_server.py +387 -0
  49. ate/memory/__init__.py +35 -0
  50. ate/memory/cloud.py +244 -0
  51. ate/memory/context.py +269 -0
  52. ate/memory/embeddings.py +184 -0
  53. ate/memory/export.py +26 -0
  54. ate/memory/merge.py +146 -0
  55. ate/memory/migrate/__init__.py +34 -0
  56. ate/memory/migrate/base.py +89 -0
  57. ate/memory/migrate/pipeline.py +189 -0
  58. ate/memory/migrate/sources/__init__.py +13 -0
  59. ate/memory/migrate/sources/chroma.py +170 -0
  60. ate/memory/migrate/sources/pinecone.py +120 -0
  61. ate/memory/migrate/sources/qdrant.py +110 -0
  62. ate/memory/migrate/sources/weaviate.py +160 -0
  63. ate/memory/reranker.py +353 -0
  64. ate/memory/search.py +26 -0
  65. ate/memory/store.py +548 -0
  66. ate/recording/__init__.py +83 -0
  67. ate/recording/demonstration.py +378 -0
  68. ate/recording/session.py +415 -0
  69. ate/recording/upload.py +304 -0
  70. ate/recording/visual.py +416 -0
  71. ate/recording/wrapper.py +95 -0
  72. ate/robot/__init__.py +221 -0
  73. ate/robot/agentic_servo.py +856 -0
  74. ate/robot/behaviors.py +493 -0
  75. ate/robot/ble_capture.py +1000 -0
  76. ate/robot/ble_enumerate.py +506 -0
  77. ate/robot/calibration.py +668 -0
  78. ate/robot/calibration_state.py +388 -0
  79. ate/robot/commands.py +3735 -0
  80. ate/robot/direction_calibration.py +554 -0
  81. ate/robot/discovery.py +441 -0
  82. ate/robot/introspection.py +330 -0
  83. ate/robot/llm_system_id.py +654 -0
  84. ate/robot/locomotion_calibration.py +508 -0
  85. ate/robot/manager.py +270 -0
  86. ate/robot/marker_generator.py +611 -0
  87. ate/robot/perception.py +502 -0
  88. ate/robot/primitives.py +614 -0
  89. ate/robot/profiles.py +281 -0
  90. ate/robot/registry.py +322 -0
  91. ate/robot/servo_mapper.py +1153 -0
  92. ate/robot/skill_upload.py +675 -0
  93. ate/robot/target_calibration.py +500 -0
  94. ate/robot/teach.py +515 -0
  95. ate/robot/types.py +242 -0
  96. ate/robot/visual_labeler.py +1048 -0
  97. ate/robot/visual_servo_loop.py +494 -0
  98. ate/robot/visual_servoing.py +570 -0
  99. ate/robot/visual_system_id.py +906 -0
  100. ate/transports/__init__.py +121 -0
  101. ate/transports/base.py +394 -0
  102. ate/transports/ble.py +405 -0
  103. ate/transports/hybrid.py +444 -0
  104. ate/transports/serial.py +345 -0
  105. ate/urdf/__init__.py +30 -0
  106. ate/urdf/capture.py +582 -0
  107. ate/urdf/cloud.py +491 -0
  108. ate/urdf/collision.py +271 -0
  109. ate/urdf/commands.py +708 -0
  110. ate/urdf/depth.py +360 -0
  111. ate/urdf/inertial.py +312 -0
  112. ate/urdf/kinematics.py +330 -0
  113. ate/urdf/lifting.py +415 -0
  114. ate/urdf/meshing.py +300 -0
  115. ate/urdf/models/__init__.py +110 -0
  116. ate/urdf/models/depth_anything.py +253 -0
  117. ate/urdf/models/sam2.py +324 -0
  118. ate/urdf/motion_analysis.py +396 -0
  119. ate/urdf/pipeline.py +468 -0
  120. ate/urdf/scale.py +256 -0
  121. ate/urdf/scan_session.py +411 -0
  122. ate/urdf/segmentation.py +299 -0
  123. ate/urdf/synthesis.py +319 -0
  124. ate/urdf/topology.py +336 -0
  125. ate/urdf/validation.py +371 -0
  126. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/METADATA +9 -1
  127. foodforthought_cli-0.3.0.dist-info/RECORD +166 -0
  128. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/WHEEL +1 -1
  129. foodforthought_cli-0.2.7.dist-info/RECORD +0 -44
  130. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/entry_points.txt +0 -0
  131. {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,362 @@
1
+ """
2
+ Perception interfaces for robot sensors.
3
+
4
+ Supports:
5
+ - RGB cameras
6
+ - Depth cameras (stereo, ToF, structured light)
7
+ - LiDAR (2D and 3D)
8
+ - IMU
9
+ - Force/Torque sensors
10
+
11
+ Design principle: Return raw sensor data in standard formats.
12
+ Processing (object detection, SLAM, etc.) happens at higher levels.
13
+ """
14
+
15
+ from abc import ABC, abstractmethod
16
+ from typing import Optional, List, Callable, Tuple
17
+ from enum import Enum, auto
18
+
19
+ from .types import (
20
+ Vector3,
21
+ Quaternion,
22
+ Pose,
23
+ Image,
24
+ DepthImage,
25
+ PointCloud,
26
+ IMUReading,
27
+ ForceTorqueReading,
28
+ ActionResult,
29
+ )
30
+
31
+
32
+ class CameraInterface(ABC):
33
+ """
34
+ Interface for RGB cameras.
35
+
36
+ Can be:
37
+ - Fixed mount (e.g., head camera)
38
+ - On end-effector (e.g., wrist camera)
39
+ - External (e.g., Intel RealSense)
40
+ """
41
+
42
+ @abstractmethod
43
+ def get_image(self) -> Image:
44
+ """
45
+ Capture current frame.
46
+
47
+ Returns:
48
+ Image with RGB data
49
+ """
50
+ pass
51
+
52
+ @abstractmethod
53
+ def get_resolution(self) -> Tuple[int, int]:
54
+ """
55
+ Get camera resolution.
56
+
57
+ Returns:
58
+ (width, height) in pixels
59
+ """
60
+ pass
61
+
62
+ @abstractmethod
63
+ def get_intrinsics(self) -> dict:
64
+ """
65
+ Get camera intrinsic parameters.
66
+
67
+ Returns:
68
+ Dict with:
69
+ - fx, fy: focal lengths
70
+ - cx, cy: principal point
71
+ - distortion: distortion coefficients
72
+ """
73
+ pass
74
+
75
+ def get_frame_id(self) -> str:
76
+ """Get coordinate frame ID for this camera."""
77
+ return "camera"
78
+
79
+ def set_resolution(self, width: int, height: int) -> ActionResult:
80
+ """Set camera resolution."""
81
+ return ActionResult.error("Resolution change not supported")
82
+
83
+ def set_exposure(self, exposure_ms: float) -> ActionResult:
84
+ """Set exposure time in milliseconds."""
85
+ return ActionResult.error("Exposure control not supported")
86
+
87
+ # =========================================================================
88
+ # Streaming (optional)
89
+ # =========================================================================
90
+
91
+ def start_streaming(self, callback: Callable[[Image], None]) -> ActionResult:
92
+ """
93
+ Start continuous image streaming.
94
+
95
+ Args:
96
+ callback: Function called with each new frame
97
+ """
98
+ return ActionResult.error("Streaming not supported")
99
+
100
+ def stop_streaming(self) -> ActionResult:
101
+ """Stop image streaming."""
102
+ return ActionResult.error("Streaming not supported")
103
+
104
+ def get_fps(self) -> float:
105
+ """Get current frame rate."""
106
+ return 0.0
107
+
108
+
109
+ class DepthCameraInterface(CameraInterface):
110
+ """
111
+ Interface for depth cameras.
112
+
113
+ Extends CameraInterface with depth-specific methods.
114
+ Implemented by: RealSense, Kinect, stereo cameras, ToF sensors.
115
+ """
116
+
117
+ @abstractmethod
118
+ def get_depth_image(self) -> DepthImage:
119
+ """
120
+ Capture current depth frame.
121
+
122
+ Returns:
123
+ DepthImage with depth values
124
+ """
125
+ pass
126
+
127
+ @abstractmethod
128
+ def get_point_cloud(self) -> PointCloud:
129
+ """
130
+ Get 3D point cloud from depth data.
131
+
132
+ Returns:
133
+ PointCloud in camera frame
134
+ """
135
+ pass
136
+
137
+ def get_rgbd(self) -> Tuple[Image, DepthImage]:
138
+ """
139
+ Get aligned RGB and depth images.
140
+
141
+ Returns:
142
+ (rgb_image, depth_image) tuple
143
+ """
144
+ return (self.get_image(), self.get_depth_image())
145
+
146
+ def get_depth_range(self) -> Tuple[float, float]:
147
+ """
148
+ Get valid depth range.
149
+
150
+ Returns:
151
+ (min_depth, max_depth) in meters
152
+ """
153
+ return (0.1, 10.0)
154
+
155
+ def point_to_3d(self, x: int, y: int) -> Optional[Vector3]:
156
+ """
157
+ Convert pixel coordinates to 3D point.
158
+
159
+ Args:
160
+ x, y: Pixel coordinates
161
+
162
+ Returns:
163
+ Vector3 in camera frame, or None if invalid depth
164
+ """
165
+ depth = self.get_depth_image()
166
+ # Implementation depends on depth format
167
+ return None
168
+
169
+
170
+ class LidarInterface(ABC):
171
+ """
172
+ Interface for LiDAR sensors.
173
+
174
+ Supports:
175
+ - 2D LiDAR (RPLIDAR, Hokuyo)
176
+ - 3D LiDAR (Velodyne, Ouster, Livox)
177
+ """
178
+
179
+ @abstractmethod
180
+ def get_scan(self) -> PointCloud:
181
+ """
182
+ Get current LiDAR scan.
183
+
184
+ Returns:
185
+ PointCloud in sensor frame
186
+ """
187
+ pass
188
+
189
+ @abstractmethod
190
+ def get_range(self) -> Tuple[float, float]:
191
+ """
192
+ Get measurement range.
193
+
194
+ Returns:
195
+ (min_range, max_range) in meters
196
+ """
197
+ pass
198
+
199
+ def get_frame_id(self) -> str:
200
+ """Get coordinate frame ID for this LiDAR."""
201
+ return "lidar"
202
+
203
+ def is_3d(self) -> bool:
204
+ """Check if this is a 3D LiDAR."""
205
+ return True
206
+
207
+ def get_angular_resolution(self) -> float:
208
+ """Get angular resolution in radians."""
209
+ return 0.01 # ~0.5 degrees default
210
+
211
+ # =========================================================================
212
+ # 2D LiDAR specific
213
+ # =========================================================================
214
+
215
+ def get_scan_2d(self) -> List[Tuple[float, float]]:
216
+ """
217
+ Get 2D scan as list of (angle, distance) pairs.
218
+
219
+ Returns:
220
+ List of (angle_rad, distance_m) tuples
221
+ """
222
+ # Default: project 3D to 2D
223
+ cloud = self.get_scan()
224
+ scan_2d = []
225
+ import math
226
+ for point in cloud.points:
227
+ angle = math.atan2(point.y, point.x)
228
+ distance = math.sqrt(point.x**2 + point.y**2)
229
+ scan_2d.append((angle, distance))
230
+ return scan_2d
231
+
232
+
233
+ class IMUInterface(ABC):
234
+ """
235
+ Interface for Inertial Measurement Units.
236
+
237
+ Provides:
238
+ - Orientation (from gyro integration or sensor fusion)
239
+ - Angular velocity (from gyroscope)
240
+ - Linear acceleration (from accelerometer)
241
+ """
242
+
243
+ @abstractmethod
244
+ def get_reading(self) -> IMUReading:
245
+ """
246
+ Get current IMU reading.
247
+
248
+ Returns:
249
+ IMUReading with orientation, angular velocity, acceleration
250
+ """
251
+ pass
252
+
253
+ @abstractmethod
254
+ def get_orientation(self) -> Quaternion:
255
+ """
256
+ Get current orientation.
257
+
258
+ Returns:
259
+ Quaternion representing orientation
260
+ """
261
+ pass
262
+
263
+ def get_euler(self) -> Tuple[float, float, float]:
264
+ """
265
+ Get orientation as Euler angles.
266
+
267
+ Returns:
268
+ (roll, pitch, yaw) in radians
269
+ """
270
+ return self.get_orientation().to_euler()
271
+
272
+ def get_angular_velocity(self) -> Vector3:
273
+ """
274
+ Get angular velocity.
275
+
276
+ Returns:
277
+ Vector3 in rad/s
278
+ """
279
+ return self.get_reading().angular_velocity
280
+
281
+ def get_linear_acceleration(self) -> Vector3:
282
+ """
283
+ Get linear acceleration.
284
+
285
+ Returns:
286
+ Vector3 in m/s² (includes gravity)
287
+ """
288
+ return self.get_reading().linear_acceleration
289
+
290
+ def calibrate(self) -> ActionResult:
291
+ """
292
+ Calibrate IMU (set current orientation as reference).
293
+
294
+ Robot should be stationary and level.
295
+ """
296
+ return ActionResult.error("Calibration not supported")
297
+
298
+ def get_frame_id(self) -> str:
299
+ """Get coordinate frame ID for this IMU."""
300
+ return "imu"
301
+
302
+
303
+ class ForceTorqueInterface(ABC):
304
+ """
305
+ Interface for force-torque sensors.
306
+
307
+ Typically mounted at wrist (between arm and gripper).
308
+ """
309
+
310
+ @abstractmethod
311
+ def get_reading(self) -> ForceTorqueReading:
312
+ """
313
+ Get current force-torque reading.
314
+
315
+ Returns:
316
+ ForceTorqueReading with force and torque vectors
317
+ """
318
+ pass
319
+
320
+ @abstractmethod
321
+ def get_force(self) -> Vector3:
322
+ """
323
+ Get force vector.
324
+
325
+ Returns:
326
+ Vector3 in Newtons
327
+ """
328
+ pass
329
+
330
+ @abstractmethod
331
+ def get_torque(self) -> Vector3:
332
+ """
333
+ Get torque vector.
334
+
335
+ Returns:
336
+ Vector3 in Nm
337
+ """
338
+ pass
339
+
340
+ def zero(self) -> ActionResult:
341
+ """
342
+ Zero the sensor (set current reading as offset).
343
+
344
+ Call when no external load is applied.
345
+ """
346
+ return ActionResult.error("Zeroing not supported")
347
+
348
+ def get_frame_id(self) -> str:
349
+ """Get coordinate frame ID for this sensor."""
350
+ return "ft_sensor"
351
+
352
+ def is_in_contact(self, threshold: float = 1.0) -> bool:
353
+ """
354
+ Check if sensor detects contact.
355
+
356
+ Args:
357
+ threshold: Force threshold in Newtons
358
+
359
+ Returns:
360
+ True if force magnitude exceeds threshold
361
+ """
362
+ return self.get_force().magnitude() > threshold
@@ -0,0 +1,247 @@
1
+ """
2
+ Sensor interfaces for robot perception.
3
+
4
+ These interfaces abstract different sensor types so behaviors
5
+ can work across robots with different sensor configurations.
6
+
7
+ Design principle: A behavior that needs distance sensing can use
8
+ ANY implementation of DistanceSensorInterface - ultrasonic, lidar,
9
+ depth camera, or even visual estimation.
10
+ """
11
+
12
+ from abc import ABC, abstractmethod
13
+ from dataclasses import dataclass
14
+ from typing import Optional, List, Tuple
15
+ from enum import Enum, auto
16
+
17
+ from .types import ActionResult
18
+
19
+
20
+ class DistanceSensorType(Enum):
21
+ """Type of distance sensor."""
22
+ ULTRASONIC = auto() # HC-SR04, etc.
23
+ INFRARED = auto() # IR proximity
24
+ LIDAR_SINGLE = auto() # Single-point lidar
25
+ LIDAR_2D = auto() # 2D scanning lidar
26
+ DEPTH_CAMERA = auto() # Depth camera center point
27
+ VISUAL_ESTIMATION = auto() # Estimated from RGB camera
28
+
29
+
30
+ @dataclass
31
+ class DistanceReading:
32
+ """A distance measurement."""
33
+ distance: float # Distance in meters
34
+ valid: bool = True # Whether reading is valid
35
+ sensor_type: DistanceSensorType = DistanceSensorType.ULTRASONIC
36
+ timestamp: float = 0.0 # Seconds since epoch
37
+ confidence: float = 1.0 # Confidence in reading (0-1)
38
+
39
+ @classmethod
40
+ def invalid(cls) -> "DistanceReading":
41
+ """Create an invalid reading."""
42
+ return cls(distance=0.0, valid=False)
43
+
44
+
45
+ class DistanceSensorInterface(ABC):
46
+ """
47
+ Interface for distance/proximity sensing.
48
+
49
+ Can be implemented by:
50
+ - Ultrasonic sensors (HC-SR04, etc.)
51
+ - IR proximity sensors
52
+ - Single-point lidar
53
+ - Depth camera (center pixel or ROI average)
54
+ - Visual estimation (object size in image)
55
+
56
+ This abstraction allows behaviors like "approach target" to work
57
+ on any robot with any distance sensing capability.
58
+
59
+ Example:
60
+ # Works with any implementation
61
+ sensor: DistanceSensorInterface = robot.get_distance_sensor()
62
+
63
+ while True:
64
+ reading = sensor.get_distance()
65
+ if reading.valid and reading.distance < 0.15:
66
+ robot.stop()
67
+ break
68
+ robot.walk_forward(speed=0.1)
69
+ """
70
+
71
+ @abstractmethod
72
+ def get_distance(self) -> DistanceReading:
73
+ """
74
+ Get current distance reading.
75
+
76
+ Returns:
77
+ DistanceReading with distance in meters
78
+ """
79
+ pass
80
+
81
+ @abstractmethod
82
+ def get_min_range(self) -> float:
83
+ """
84
+ Get minimum measurable range in meters.
85
+
86
+ Returns below this distance are unreliable.
87
+ """
88
+ pass
89
+
90
+ @abstractmethod
91
+ def get_max_range(self) -> float:
92
+ """
93
+ Get maximum measurable range in meters.
94
+
95
+ Returns above this distance are unreliable.
96
+ """
97
+ pass
98
+
99
+ def get_sensor_type(self) -> DistanceSensorType:
100
+ """Get the type of distance sensor."""
101
+ return DistanceSensorType.ULTRASONIC # Default
102
+
103
+ def is_obstacle_detected(self, threshold: float = 0.20) -> bool:
104
+ """
105
+ Check if an obstacle is within threshold distance.
106
+
107
+ Args:
108
+ threshold: Distance threshold in meters
109
+
110
+ Returns:
111
+ True if obstacle detected within threshold
112
+ """
113
+ reading = self.get_distance()
114
+ return reading.valid and reading.distance < threshold
115
+
116
+
117
+ class VisualDistanceEstimator(DistanceSensorInterface):
118
+ """
119
+ Estimate distance from visual object size.
120
+
121
+ When no hardware distance sensor is available, we can estimate
122
+ distance based on the apparent size of a detected object.
123
+
124
+ This uses the pinhole camera model:
125
+ distance = (known_size * focal_length) / apparent_size
126
+
127
+ For unknown objects, we use heuristics based on object category.
128
+ """
129
+
130
+ # Typical object sizes in meters (width)
131
+ TYPICAL_SIZES = {
132
+ "bottle": 0.07,
133
+ "can": 0.065,
134
+ "wrapper": 0.10,
135
+ "paper": 0.20,
136
+ "plastic": 0.10,
137
+ "metal": 0.08,
138
+ "unknown": 0.10,
139
+ }
140
+
141
+ def __init__(
142
+ self,
143
+ image_width: int = 640,
144
+ image_height: int = 480,
145
+ fov_horizontal: float = 60.0, # Degrees
146
+ ):
147
+ """
148
+ Initialize visual distance estimator.
149
+
150
+ Args:
151
+ image_width: Camera image width in pixels
152
+ image_height: Camera image height in pixels
153
+ fov_horizontal: Horizontal field of view in degrees
154
+ """
155
+ self.image_width = image_width
156
+ self.image_height = image_height
157
+ self.fov_horizontal = fov_horizontal
158
+
159
+ # Calculate focal length in pixels
160
+ import math
161
+ self.focal_length = image_width / (2 * math.tan(math.radians(fov_horizontal / 2)))
162
+
163
+ self._last_reading: Optional[DistanceReading] = None
164
+
165
+ def estimate_from_detection(
166
+ self,
167
+ bbox_width: int,
168
+ object_type: str = "unknown",
169
+ known_size: Optional[float] = None,
170
+ ) -> DistanceReading:
171
+ """
172
+ Estimate distance from detected object bounding box.
173
+
174
+ Args:
175
+ bbox_width: Width of bounding box in pixels
176
+ object_type: Type of object (for size lookup)
177
+ known_size: Known real-world size in meters (overrides lookup)
178
+
179
+ Returns:
180
+ DistanceReading with estimated distance
181
+ """
182
+ import time
183
+
184
+ if bbox_width <= 0:
185
+ return DistanceReading.invalid()
186
+
187
+ # Get object size
188
+ real_size = known_size or self.TYPICAL_SIZES.get(object_type, 0.10)
189
+
190
+ # Estimate distance using pinhole model
191
+ distance = (real_size * self.focal_length) / bbox_width
192
+
193
+ # Confidence decreases with smaller bounding boxes (more uncertainty)
194
+ confidence = min(1.0, bbox_width / 50.0)
195
+
196
+ reading = DistanceReading(
197
+ distance=distance,
198
+ valid=True,
199
+ sensor_type=DistanceSensorType.VISUAL_ESTIMATION,
200
+ timestamp=time.time(),
201
+ confidence=confidence,
202
+ )
203
+
204
+ self._last_reading = reading
205
+ return reading
206
+
207
+ def get_distance(self) -> DistanceReading:
208
+ """Get last estimated distance."""
209
+ if self._last_reading:
210
+ return self._last_reading
211
+ return DistanceReading.invalid()
212
+
213
+ def get_min_range(self) -> float:
214
+ return 0.1 # 10cm minimum
215
+
216
+ def get_max_range(self) -> float:
217
+ return 5.0 # 5m maximum (very rough beyond this)
218
+
219
+ def get_sensor_type(self) -> DistanceSensorType:
220
+ return DistanceSensorType.VISUAL_ESTIMATION
221
+
222
+
223
+ @dataclass
224
+ class ProximitySensorReading:
225
+ """Reading from proximity/bump sensors."""
226
+ triggered: bool # Whether sensor is triggered
227
+ sensor_id: str # Sensor identifier
228
+ timestamp: float = 0.0
229
+
230
+
231
+ class ProximitySensorInterface(ABC):
232
+ """
233
+ Interface for proximity/bump sensors.
234
+
235
+ These are simple binary sensors that detect contact or very close proximity.
236
+ Common on wheeled robots and robot vacuums.
237
+ """
238
+
239
+ @abstractmethod
240
+ def get_proximity_sensors(self) -> List[ProximitySensorReading]:
241
+ """Get readings from all proximity sensors."""
242
+ pass
243
+
244
+ @abstractmethod
245
+ def is_any_triggered(self) -> bool:
246
+ """Check if any proximity sensor is triggered."""
247
+ pass