simple-autonomous-car 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. simple_autonomous_car/__init__.py +96 -0
  2. simple_autonomous_car/alerts/__init__.py +5 -0
  3. simple_autonomous_car/alerts/track_bounds_alert.py +276 -0
  4. simple_autonomous_car/car/__init__.py +5 -0
  5. simple_autonomous_car/car/car.py +234 -0
  6. simple_autonomous_car/constants.py +112 -0
  7. simple_autonomous_car/control/__init__.py +7 -0
  8. simple_autonomous_car/control/base_controller.py +152 -0
  9. simple_autonomous_car/control/controller_viz.py +282 -0
  10. simple_autonomous_car/control/pid_controller.py +153 -0
  11. simple_autonomous_car/control/pure_pursuit_controller.py +578 -0
  12. simple_autonomous_car/costmap/__init__.py +12 -0
  13. simple_autonomous_car/costmap/base_costmap.py +187 -0
  14. simple_autonomous_car/costmap/grid_costmap.py +507 -0
  15. simple_autonomous_car/costmap/inflation.py +126 -0
  16. simple_autonomous_car/detection/__init__.py +5 -0
  17. simple_autonomous_car/detection/error_detector.py +165 -0
  18. simple_autonomous_car/filters/__init__.py +7 -0
  19. simple_autonomous_car/filters/base_filter.py +119 -0
  20. simple_autonomous_car/filters/kalman_filter.py +131 -0
  21. simple_autonomous_car/filters/particle_filter.py +162 -0
  22. simple_autonomous_car/footprint/__init__.py +7 -0
  23. simple_autonomous_car/footprint/base_footprint.py +128 -0
  24. simple_autonomous_car/footprint/circular_footprint.py +73 -0
  25. simple_autonomous_car/footprint/rectangular_footprint.py +123 -0
  26. simple_autonomous_car/frames/__init__.py +21 -0
  27. simple_autonomous_car/frames/frenet.py +267 -0
  28. simple_autonomous_car/maps/__init__.py +9 -0
  29. simple_autonomous_car/maps/frenet_map.py +97 -0
  30. simple_autonomous_car/maps/grid_ground_truth_map.py +83 -0
  31. simple_autonomous_car/maps/grid_map.py +361 -0
  32. simple_autonomous_car/maps/ground_truth_map.py +64 -0
  33. simple_autonomous_car/maps/perceived_map.py +169 -0
  34. simple_autonomous_car/perception/__init__.py +5 -0
  35. simple_autonomous_car/perception/perception.py +107 -0
  36. simple_autonomous_car/planning/__init__.py +7 -0
  37. simple_autonomous_car/planning/base_planner.py +184 -0
  38. simple_autonomous_car/planning/goal_planner.py +261 -0
  39. simple_autonomous_car/planning/track_planner.py +199 -0
  40. simple_autonomous_car/sensors/__init__.py +6 -0
  41. simple_autonomous_car/sensors/base_sensor.py +105 -0
  42. simple_autonomous_car/sensors/lidar_sensor.py +145 -0
  43. simple_autonomous_car/track/__init__.py +5 -0
  44. simple_autonomous_car/track/track.py +463 -0
  45. simple_autonomous_car/visualization/__init__.py +25 -0
  46. simple_autonomous_car/visualization/alert_viz.py +316 -0
  47. simple_autonomous_car/visualization/utils.py +169 -0
  48. simple_autonomous_car-0.1.2.dist-info/METADATA +324 -0
  49. simple_autonomous_car-0.1.2.dist-info/RECORD +50 -0
  50. simple_autonomous_car-0.1.2.dist-info/WHEEL +4 -0
@@ -0,0 +1,267 @@
1
+ """Frenet frame coordinate system utilities.
2
+
3
+ Frenet frame: (s, d) where:
4
+ - s: distance along the path (longitudinal)
5
+ - d: lateral offset from the path (positive = left, negative = right)
6
+ """
7
+
8
+ from typing import Any
9
+
10
+ import numpy as np
11
+
12
+ from simple_autonomous_car.car.car import CarState
13
+ from simple_autonomous_car.track.track import Track
14
+
15
+
16
+ class FrenetFrame:
17
+ """Frenet frame representation along a path."""
18
+
19
+ def __init__(self, track: Track):
20
+ """
21
+ Initialize Frenet frame with a reference path (track centerline).
22
+
23
+ Args:
24
+ track: Track object providing the reference path
25
+ """
26
+ self.track = track
27
+ self._compute_cumulative_distances()
28
+
29
+ def _compute_cumulative_distances(self) -> None:
30
+ """Precompute cumulative distances along the track."""
31
+ self.cumulative_distances = np.zeros(len(self.track.centerline))
32
+ for i in range(1, len(self.track.centerline)):
33
+ dist = np.linalg.norm(self.track.centerline[i] - self.track.centerline[i - 1])
34
+ self.cumulative_distances[i] = self.cumulative_distances[i - 1] + dist
35
+ self.total_length = self.cumulative_distances[-1]
36
+
37
+ def get_closest_point(self, point: np.ndarray) -> tuple[int, float]:
38
+ """
39
+ Find closest point on centerline to given point.
40
+
41
+ Args:
42
+ point: Point in global frame [x, y]
43
+
44
+ Returns:
45
+ Tuple of (index, distance_along_path)
46
+ """
47
+ distances = np.linalg.norm(self.track.centerline - point, axis=1)
48
+ closest_idx_int = np.argmin(distances)
49
+ closest_idx: int = int(closest_idx_int)
50
+ s = self.cumulative_distances[closest_idx]
51
+ return closest_idx, float(s)
52
+
53
+ def get_tangent_normal(self, s: float) -> tuple[np.ndarray, np.ndarray]:
54
+ """
55
+ Get tangent and normal vectors at distance s along path.
56
+
57
+ Args:
58
+ s: Distance along path
59
+
60
+ Returns:
61
+ Tuple of (tangent_vector, normal_vector)
62
+ """
63
+ s = s % self.total_length # Wrap around
64
+
65
+ # Find segment
66
+ idx = np.searchsorted(self.cumulative_distances, s)
67
+ if idx == 0: # type: ignore[assignment]
68
+ idx = 1 # type: ignore[assignment]
69
+ if idx >= len(self.track.centerline): # type: ignore[assignment]
70
+ idx = len(self.track.centerline) - 1 # type: ignore[assignment]
71
+
72
+ # Get direction
73
+ direction = self.track.centerline[idx] - self.track.centerline[idx - 1]
74
+ direction_norm = np.linalg.norm(direction)
75
+ if direction_norm > 1e-6:
76
+ tangent = direction / direction_norm
77
+ else:
78
+ tangent = np.array([1.0, 0.0])
79
+
80
+ # Normal (perpendicular, pointing left)
81
+ normal = np.array([-tangent[1], tangent[0]])
82
+
83
+ return tangent, normal
84
+
85
+ def global_to_frenet(self, point: np.ndarray) -> tuple[float, float]:
86
+ """
87
+ Convert point from global frame to Frenet frame.
88
+
89
+ Args:
90
+ point: Point in global frame [x, y]
91
+
92
+ Returns:
93
+ Tuple of (s, d) where s is distance along path, d is lateral offset
94
+ """
95
+ closest_idx, s = self.get_closest_point(point)
96
+
97
+ # Get point on centerline
98
+ centerline_point = self.track.centerline[closest_idx]
99
+
100
+ # Get tangent and normal at this point
101
+ tangent, normal = self.get_tangent_normal(s)
102
+
103
+ # Compute lateral offset
104
+ vector_to_point = point - centerline_point
105
+ d = np.dot(vector_to_point, normal)
106
+
107
+ return s, d
108
+
109
+ def frenet_to_global(self, s: float, d: float) -> np.ndarray:
110
+ """
111
+ Convert point from Frenet frame to global frame.
112
+
113
+ Args:
114
+ s: Distance along path
115
+ d: Lateral offset (positive = left)
116
+
117
+ Returns:
118
+ Point in global frame [x, y]
119
+ """
120
+ s = s % self.total_length # Wrap around
121
+
122
+ # Find segment
123
+ idx_int = np.searchsorted(self.cumulative_distances, s)
124
+ idx: int = int(idx_int.item() if hasattr(idx_int, "item") else idx_int)
125
+ if idx == 0:
126
+ idx = 1
127
+ if idx >= len(self.track.centerline):
128
+ idx = len(self.track.centerline) - 1
129
+
130
+ # Interpolate along centerline
131
+ segment_dist = s - self.cumulative_distances[idx - 1]
132
+ segment_length = self.cumulative_distances[idx] - self.cumulative_distances[idx - 1]
133
+ if segment_length > 1e-6:
134
+ t = segment_dist / segment_length
135
+ else:
136
+ t = 0.0
137
+
138
+ centerline_point = self.track.centerline[idx - 1] + t * (
139
+ self.track.centerline[idx] - self.track.centerline[idx - 1]
140
+ )
141
+
142
+ # Get tangent and normal
143
+ tangent, normal = self.get_tangent_normal(s)
144
+
145
+ # Offset by d in normal direction
146
+ global_point = centerline_point + d * normal
147
+
148
+ return np.asarray(global_point, dtype=np.float64)
149
+
150
+
151
+ # Convenience functions
152
+ def global_to_frenet(point: np.ndarray, frenet_frame: FrenetFrame) -> tuple[float, float]:
153
+ """Convert point from global to Frenet frame."""
154
+ return frenet_frame.global_to_frenet(point)
155
+
156
+
157
+ def frenet_to_global(s: float, d: float, frenet_frame: FrenetFrame) -> np.ndarray:
158
+ """Convert point from Frenet to global frame."""
159
+ return frenet_frame.frenet_to_global(s, d)
160
+
161
+
162
+ def ego_to_frenet(
163
+ point_ego: np.ndarray, car_state: CarState, frenet_frame: FrenetFrame
164
+ ) -> tuple[float, float]:
165
+ """
166
+ Convert point from ego frame to Frenet frame.
167
+
168
+ Args:
169
+ point_ego: Point in ego frame [x, y] (x=forward, y=left)
170
+ car_state: Current car state
171
+ frenet_frame: Frenet frame instance
172
+
173
+ Returns:
174
+ Tuple of (s, d) in Frenet frame
175
+ """
176
+ # Convert ego to global
177
+ point_global = car_state.transform_to_world_frame(point_ego)
178
+ # Convert global to Frenet
179
+ return frenet_frame.global_to_frenet(point_global)
180
+
181
+
182
+ # Add method to FrenetFrame class for convenience
183
+ def _ego_to_frenet_method(
184
+ self: Any, point_ego: np.ndarray, car_state: CarState
185
+ ) -> tuple[float, float]:
186
+ """Convert point from ego frame to Frenet frame (instance method)."""
187
+ return ego_to_frenet(point_ego, car_state, self)
188
+
189
+
190
+ FrenetFrame.ego_to_frenet = _ego_to_frenet_method # type: ignore[attr-defined]
191
+
192
+
193
+ def frenet_to_ego(s: float, d: float, car_state: CarState, frenet_frame: FrenetFrame) -> np.ndarray:
194
+ """
195
+ Convert point from Frenet frame to ego frame.
196
+
197
+ Args:
198
+ s: Distance along path
199
+ d: Lateral offset
200
+ car_state: Current car state
201
+ frenet_frame: Frenet frame instance
202
+
203
+ Returns:
204
+ Point in ego frame [x, y]
205
+ """
206
+ # Convert Frenet to global
207
+ point_global = frenet_frame.frenet_to_global(s, d)
208
+ # Convert global to ego
209
+ return car_state.transform_to_car_frame(point_global)
210
+
211
+
212
+ def sensor_to_ego(
213
+ point_sensor: np.ndarray, sensor_pose_ego: np.ndarray | None = None
214
+ ) -> np.ndarray:
215
+ """
216
+ Convert point from sensor frame to ego frame.
217
+
218
+ Args:
219
+ point_sensor: Point in sensor frame [x, y]
220
+ sensor_pose_ego: Sensor pose in ego frame [x, y, heading].
221
+ If None, assumes sensor is at ego origin.
222
+
223
+ Returns:
224
+ Point in ego frame [x, y]
225
+ """
226
+ if sensor_pose_ego is None:
227
+ # Sensor at ego origin, no transformation needed
228
+ return point_sensor
229
+
230
+ sensor_x, sensor_y, sensor_heading = sensor_pose_ego
231
+ cos_h = np.cos(sensor_heading)
232
+ sin_h = np.sin(sensor_heading)
233
+
234
+ # Rotation matrix from sensor to ego
235
+ rotation = np.array([[cos_h, -sin_h], [sin_h, cos_h]])
236
+
237
+ # Transform
238
+ point_ego = rotation @ point_sensor + np.array([sensor_x, sensor_y])
239
+ return np.asarray(point_ego, dtype=np.float64)
240
+
241
+
242
+ def ego_to_sensor(point_ego: np.ndarray, sensor_pose_ego: np.ndarray | None = None) -> np.ndarray:
243
+ """
244
+ Convert point from ego frame to sensor frame.
245
+
246
+ Args:
247
+ point_ego: Point in ego frame [x, y]
248
+ sensor_pose_ego: Sensor pose in ego frame [x, y, heading].
249
+ If None, assumes sensor is at ego origin.
250
+
251
+ Returns:
252
+ Point in sensor frame [x, y]
253
+ """
254
+ if sensor_pose_ego is None:
255
+ # Sensor at ego origin, no transformation needed
256
+ return point_ego
257
+
258
+ sensor_x, sensor_y, sensor_heading = sensor_pose_ego
259
+ cos_h = np.cos(sensor_heading)
260
+ sin_h = np.sin(sensor_heading)
261
+
262
+ # Rotation matrix from ego to sensor (inverse)
263
+ rotation = np.array([[cos_h, sin_h], [-sin_h, cos_h]])
264
+
265
+ # Transform
266
+ point_sensor = rotation @ (point_ego - np.array([sensor_x, sensor_y]))
267
+ return np.asarray(point_sensor, dtype=np.float64)
@@ -0,0 +1,9 @@
1
+ """Map representations for ground truth and perception."""
2
+
3
+ from simple_autonomous_car.maps.frenet_map import FrenetMap
4
+ from simple_autonomous_car.maps.grid_ground_truth_map import GridGroundTruthMap
5
+ from simple_autonomous_car.maps.grid_map import GridMap
6
+ from simple_autonomous_car.maps.ground_truth_map import GroundTruthMap
7
+ from simple_autonomous_car.maps.perceived_map import PerceivedMap
8
+
9
+ __all__ = ["GroundTruthMap", "PerceivedMap", "FrenetMap", "GridMap", "GridGroundTruthMap"]
@@ -0,0 +1,97 @@
1
+ """Map representation in Frenet frame."""
2
+
3
+ import numpy as np
4
+
5
+ from simple_autonomous_car.frames.frenet import FrenetFrame
6
+ from simple_autonomous_car.track.track import Track
7
+
8
+
9
+ class FrenetMap:
10
+ """Map representation in Frenet frame coordinates."""
11
+
12
+ def __init__(self, track: Track):
13
+ """
14
+ Initialize Frenet map.
15
+
16
+ Args:
17
+ track: Track object
18
+ """
19
+ self.track = track
20
+ self.frenet_frame = FrenetFrame(track)
21
+ self._compute_frenet_bounds()
22
+
23
+ def _compute_frenet_bounds(self) -> None:
24
+ """Compute track boundaries in Frenet frame."""
25
+ # Convert inner and outer bounds to Frenet coordinates
26
+ inner_frenet = []
27
+ outer_frenet = []
28
+
29
+ for i in range(len(self.track.centerline)):
30
+ inner_point = self.track.inner_bound[i]
31
+ outer_point = self.track.outer_bound[i]
32
+
33
+ s_inner, d_inner = self.frenet_frame.global_to_frenet(inner_point)
34
+ s_outer, d_outer = self.frenet_frame.global_to_frenet(outer_point)
35
+
36
+ inner_frenet.append([s_inner, d_inner])
37
+ outer_frenet.append([s_outer, d_outer])
38
+
39
+ self.inner_bound_frenet = np.array(inner_frenet)
40
+ self.outer_bound_frenet = np.array(outer_frenet)
41
+
42
+ def get_bounds_at_s(self, s: float) -> tuple[float, float]:
43
+ """
44
+ Get inner and outer boundary lateral offsets at distance s.
45
+
46
+ Args:
47
+ s: Distance along path
48
+
49
+ Returns:
50
+ Tuple of (d_inner, d_outer) lateral offsets
51
+ """
52
+ # Find closest point
53
+ s_wrapped = s % self.frenet_frame.total_length
54
+
55
+ # Find segment
56
+ idx = np.searchsorted(self.frenet_frame.cumulative_distances, s_wrapped)
57
+ if idx == 0:
58
+ idx = 1
59
+ if idx >= len(self.track.centerline):
60
+ idx = len(self.track.centerline) - 1
61
+
62
+ # Get bounds at this point
63
+ d_inner = self.inner_bound_frenet[idx, 1]
64
+ d_outer = self.outer_bound_frenet[idx, 1]
65
+
66
+ return d_inner, d_outer
67
+
68
+ def get_bounds_in_range(self, s_start: float, s_end: float) -> tuple[np.ndarray, np.ndarray]:
69
+ """
70
+ Get inner and outer bounds in a range of s values.
71
+
72
+ Args:
73
+ s_start: Start distance along path
74
+ s_end: End distance along path
75
+
76
+ Returns:
77
+ Tuple of (inner_bounds, outer_bounds) as arrays of shape (N, 2) with [s, d]
78
+ """
79
+ # Find points in range
80
+ s_start_wrapped = s_start % self.frenet_frame.total_length
81
+ s_end_wrapped = s_end % self.frenet_frame.total_length
82
+
83
+ # Get all points in range
84
+ if s_start_wrapped <= s_end_wrapped:
85
+ mask = (self.inner_bound_frenet[:, 0] >= s_start_wrapped) & (
86
+ self.inner_bound_frenet[:, 0] <= s_end_wrapped
87
+ )
88
+ else:
89
+ # Wraps around
90
+ mask = (self.inner_bound_frenet[:, 0] >= s_start_wrapped) | (
91
+ self.inner_bound_frenet[:, 0] <= s_end_wrapped
92
+ )
93
+
94
+ inner_bounds = self.inner_bound_frenet[mask]
95
+ outer_bounds = self.outer_bound_frenet[mask]
96
+
97
+ return inner_bounds, outer_bounds
@@ -0,0 +1,83 @@
1
+ """Ground truth map for grid-based environments."""
2
+
3
+ import numpy as np
4
+
5
+ from simple_autonomous_car.maps.grid_map import GridMap
6
+
7
+
8
+ class GridGroundTruthMap:
9
+ """
10
+ Ground truth map for grid-based environments with obstacles.
11
+
12
+ This provides a compatible interface with GroundTruthMap for sensors
13
+ to work with grid map environments.
14
+
15
+ Parameters
16
+ ----------
17
+ grid_map : GridMap
18
+ Grid map with obstacles.
19
+ """
20
+
21
+ def __init__(self, grid_map: GridMap):
22
+ """Initialize ground truth map from grid map."""
23
+ self.grid_map = grid_map
24
+ self.track = None # No track for grid maps
25
+
26
+ def get_visible_segments(
27
+ self, car_position: np.ndarray, car_heading: float, horizon: float, fov: float = 2 * np.pi
28
+ ) -> tuple[np.ndarray, np.ndarray, np.ndarray]:
29
+ """
30
+ Get visible obstacles within horizon and field of view.
31
+
32
+ Parameters
33
+ ----------
34
+ car_position : np.ndarray
35
+ Car position [x, y] in world frame.
36
+ car_heading : float
37
+ Car heading angle in radians.
38
+ horizon : float
39
+ Maximum distance to consider.
40
+ fov : float, default=2*pi
41
+ Field of view angle in radians.
42
+
43
+ Returns
44
+ -------
45
+ Tuple[np.ndarray, np.ndarray, np.ndarray]
46
+ (visible_obstacles, empty, empty) - compatible with track-based interface.
47
+ """
48
+ # Get obstacles within range
49
+ obstacles = self.grid_map.obstacles
50
+ if len(obstacles) == 0:
51
+ return (
52
+ np.array([]).reshape(0, 2),
53
+ np.array([]).reshape(0, 2),
54
+ np.array([]).reshape(0, 2),
55
+ )
56
+
57
+ # Calculate distances from car
58
+ vectors = obstacles - car_position
59
+ distances = np.linalg.norm(vectors, axis=1)
60
+
61
+ # Filter by distance (consider obstacle size)
62
+ within_horizon = distances <= (horizon + self.grid_map.obstacle_size)
63
+
64
+ # Filter by FOV (if not 360 degrees)
65
+ if fov < 2 * np.pi:
66
+ angles = np.arctan2(vectors[:, 1], vectors[:, 0]) - car_heading
67
+ # Normalize angles to [-pi, pi]
68
+ angles = np.arctan2(np.sin(angles), np.cos(angles))
69
+ within_fov = np.abs(angles) <= fov / 2.0
70
+ visible_mask = within_horizon & within_fov
71
+ else:
72
+ visible_mask = within_horizon
73
+
74
+ visible_obstacles = obstacles[visible_mask]
75
+
76
+ # Return in compatible format (centerline, inner, outer)
77
+ # For grid maps, we return obstacles as "centerline", empty arrays for bounds
78
+ return visible_obstacles, np.array([]).reshape(0, 2), np.array([]).reshape(0, 2)
79
+
80
+ def get_all_bounds(self) -> tuple[np.ndarray, np.ndarray]:
81
+ """Get map boundaries (for compatibility)."""
82
+ bounds, _ = self.grid_map.get_bounds()
83
+ return bounds, bounds.copy()