simple-autonomous-car 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- simple_autonomous_car/__init__.py +96 -0
- simple_autonomous_car/alerts/__init__.py +5 -0
- simple_autonomous_car/alerts/track_bounds_alert.py +276 -0
- simple_autonomous_car/car/__init__.py +5 -0
- simple_autonomous_car/car/car.py +234 -0
- simple_autonomous_car/constants.py +112 -0
- simple_autonomous_car/control/__init__.py +7 -0
- simple_autonomous_car/control/base_controller.py +152 -0
- simple_autonomous_car/control/controller_viz.py +282 -0
- simple_autonomous_car/control/pid_controller.py +153 -0
- simple_autonomous_car/control/pure_pursuit_controller.py +578 -0
- simple_autonomous_car/costmap/__init__.py +12 -0
- simple_autonomous_car/costmap/base_costmap.py +187 -0
- simple_autonomous_car/costmap/grid_costmap.py +507 -0
- simple_autonomous_car/costmap/inflation.py +126 -0
- simple_autonomous_car/detection/__init__.py +5 -0
- simple_autonomous_car/detection/error_detector.py +165 -0
- simple_autonomous_car/filters/__init__.py +7 -0
- simple_autonomous_car/filters/base_filter.py +119 -0
- simple_autonomous_car/filters/kalman_filter.py +131 -0
- simple_autonomous_car/filters/particle_filter.py +162 -0
- simple_autonomous_car/footprint/__init__.py +7 -0
- simple_autonomous_car/footprint/base_footprint.py +128 -0
- simple_autonomous_car/footprint/circular_footprint.py +73 -0
- simple_autonomous_car/footprint/rectangular_footprint.py +123 -0
- simple_autonomous_car/frames/__init__.py +21 -0
- simple_autonomous_car/frames/frenet.py +267 -0
- simple_autonomous_car/maps/__init__.py +9 -0
- simple_autonomous_car/maps/frenet_map.py +97 -0
- simple_autonomous_car/maps/grid_ground_truth_map.py +83 -0
- simple_autonomous_car/maps/grid_map.py +361 -0
- simple_autonomous_car/maps/ground_truth_map.py +64 -0
- simple_autonomous_car/maps/perceived_map.py +169 -0
- simple_autonomous_car/perception/__init__.py +5 -0
- simple_autonomous_car/perception/perception.py +107 -0
- simple_autonomous_car/planning/__init__.py +7 -0
- simple_autonomous_car/planning/base_planner.py +184 -0
- simple_autonomous_car/planning/goal_planner.py +261 -0
- simple_autonomous_car/planning/track_planner.py +199 -0
- simple_autonomous_car/sensors/__init__.py +6 -0
- simple_autonomous_car/sensors/base_sensor.py +105 -0
- simple_autonomous_car/sensors/lidar_sensor.py +145 -0
- simple_autonomous_car/track/__init__.py +5 -0
- simple_autonomous_car/track/track.py +463 -0
- simple_autonomous_car/visualization/__init__.py +25 -0
- simple_autonomous_car/visualization/alert_viz.py +316 -0
- simple_autonomous_car/visualization/utils.py +169 -0
- simple_autonomous_car-0.1.2.dist-info/METADATA +324 -0
- simple_autonomous_car-0.1.2.dist-info/RECORD +50 -0
- simple_autonomous_car-0.1.2.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,361 @@
|
|
|
1
|
+
"""Grid-based map with obstacles for goal-based navigation."""
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, Any, Optional
|
|
4
|
+
|
|
5
|
+
import matplotlib.patches as patches
|
|
6
|
+
import numpy as np
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from simple_autonomous_car.car.car import CarState
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class GridMap:
|
|
13
|
+
"""
|
|
14
|
+
Represents a grid-based map with obstacles for goal-based navigation.
|
|
15
|
+
|
|
16
|
+
This is similar to Track but for grid-based environments with obstacles.
|
|
17
|
+
It provides a compatible interface for visualization and planning.
|
|
18
|
+
|
|
19
|
+
Parameters
|
|
20
|
+
----------
|
|
21
|
+
width : float
|
|
22
|
+
Width of the map in meters.
|
|
23
|
+
height : float
|
|
24
|
+
Height of the map in meters.
|
|
25
|
+
resolution : float, default=0.5
|
|
26
|
+
Resolution in meters per cell.
|
|
27
|
+
obstacles : np.ndarray, optional
|
|
28
|
+
Array of obstacle positions [[x1, y1], [x2, y2], ...] or None.
|
|
29
|
+
obstacle_size : float, default=1.0
|
|
30
|
+
Size of each obstacle (radius or half-width).
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
def __init__(
|
|
34
|
+
self,
|
|
35
|
+
width: float,
|
|
36
|
+
height: float,
|
|
37
|
+
resolution: float = 0.5,
|
|
38
|
+
obstacles: np.ndarray | None = None,
|
|
39
|
+
obstacle_size: float = 1.0,
|
|
40
|
+
):
|
|
41
|
+
self.width = width
|
|
42
|
+
self.height = height
|
|
43
|
+
self.resolution = resolution
|
|
44
|
+
self.obstacle_size = obstacle_size
|
|
45
|
+
|
|
46
|
+
# Store obstacles
|
|
47
|
+
if obstacles is None:
|
|
48
|
+
self.obstacles = np.array([]).reshape(0, 2)
|
|
49
|
+
else:
|
|
50
|
+
self.obstacles = np.asarray(obstacles, dtype=np.float64)
|
|
51
|
+
if self.obstacles.ndim != 2 or self.obstacles.shape[1] != 2:
|
|
52
|
+
raise ValueError("obstacles must be shape (N, 2)")
|
|
53
|
+
|
|
54
|
+
# Create grid representation for fast collision checking
|
|
55
|
+
self._create_grid()
|
|
56
|
+
|
|
57
|
+
def _create_grid(self) -> None:
|
|
58
|
+
"""Create internal grid representation of obstacles."""
|
|
59
|
+
self.width_cells = int(self.width / self.resolution)
|
|
60
|
+
self.height_cells = int(self.height / self.resolution)
|
|
61
|
+
self.grid = np.zeros((self.height_cells, self.width_cells), dtype=bool)
|
|
62
|
+
|
|
63
|
+
# Mark obstacles in grid
|
|
64
|
+
for obstacle in self.obstacles:
|
|
65
|
+
x, y = obstacle
|
|
66
|
+
# Convert to grid coordinates
|
|
67
|
+
grid_x = int((x + self.width / 2) / self.resolution)
|
|
68
|
+
grid_y = int((y + self.height / 2) / self.resolution)
|
|
69
|
+
|
|
70
|
+
# Mark cells within obstacle_size
|
|
71
|
+
radius_cells = int(self.obstacle_size / self.resolution)
|
|
72
|
+
for dy in range(-radius_cells, radius_cells + 1):
|
|
73
|
+
for dx in range(-radius_cells, radius_cells + 1):
|
|
74
|
+
gx = grid_x + dx
|
|
75
|
+
gy = grid_y + dy
|
|
76
|
+
if 0 <= gx < self.width_cells and 0 <= gy < self.height_cells:
|
|
77
|
+
dist = np.sqrt(dx**2 + dy**2) * self.resolution
|
|
78
|
+
if dist <= self.obstacle_size:
|
|
79
|
+
self.grid[gy, gx] = True
|
|
80
|
+
|
|
81
|
+
def is_obstacle(self, position: np.ndarray) -> bool:
|
|
82
|
+
"""
|
|
83
|
+
Check if a position is occupied by an obstacle.
|
|
84
|
+
|
|
85
|
+
Parameters
|
|
86
|
+
----------
|
|
87
|
+
position : np.ndarray
|
|
88
|
+
Position [x, y] in world coordinates.
|
|
89
|
+
|
|
90
|
+
Returns
|
|
91
|
+
-------
|
|
92
|
+
bool
|
|
93
|
+
True if position is occupied, False otherwise.
|
|
94
|
+
"""
|
|
95
|
+
# Convert to grid coordinates
|
|
96
|
+
grid_x = int((position[0] + self.width / 2) / self.resolution)
|
|
97
|
+
grid_y = int((position[1] + self.height / 2) / self.resolution)
|
|
98
|
+
|
|
99
|
+
if 0 <= grid_x < self.width_cells and 0 <= grid_y < self.height_cells:
|
|
100
|
+
result = self.grid[grid_y, grid_x]
|
|
101
|
+
return bool(result)
|
|
102
|
+
return True # Out of bounds is considered obstacle
|
|
103
|
+
|
|
104
|
+
def is_valid_position(self, position: np.ndarray) -> bool:
|
|
105
|
+
"""
|
|
106
|
+
Check if a position is valid (within bounds and not obstacle).
|
|
107
|
+
|
|
108
|
+
Parameters
|
|
109
|
+
----------
|
|
110
|
+
position : np.ndarray
|
|
111
|
+
Position [x, y] in world coordinates.
|
|
112
|
+
|
|
113
|
+
Returns
|
|
114
|
+
-------
|
|
115
|
+
bool
|
|
116
|
+
True if position is valid, False otherwise.
|
|
117
|
+
"""
|
|
118
|
+
x, y = position
|
|
119
|
+
# Check bounds
|
|
120
|
+
if x < -self.width / 2 or x > self.width / 2:
|
|
121
|
+
return False
|
|
122
|
+
if y < -self.height / 2 or y > self.height / 2:
|
|
123
|
+
return False
|
|
124
|
+
|
|
125
|
+
# Check obstacles
|
|
126
|
+
return not self.is_obstacle(position)
|
|
127
|
+
|
|
128
|
+
def get_bounds(self) -> tuple[np.ndarray, np.ndarray]:
|
|
129
|
+
"""
|
|
130
|
+
Get map boundaries (for compatibility with Track interface).
|
|
131
|
+
|
|
132
|
+
Returns
|
|
133
|
+
-------
|
|
134
|
+
Tuple[np.ndarray, np.ndarray]
|
|
135
|
+
(inner_bound, outer_bound) - both are the same for grid maps.
|
|
136
|
+
"""
|
|
137
|
+
bounds = np.array(
|
|
138
|
+
[
|
|
139
|
+
[-self.width / 2, -self.height / 2],
|
|
140
|
+
[self.width / 2, -self.height / 2],
|
|
141
|
+
[self.width / 2, self.height / 2],
|
|
142
|
+
[-self.width / 2, self.height / 2],
|
|
143
|
+
[-self.width / 2, -self.height / 2], # Close the loop
|
|
144
|
+
]
|
|
145
|
+
)
|
|
146
|
+
return bounds, bounds.copy()
|
|
147
|
+
|
|
148
|
+
def get_boundary_obstacles(self, spacing: float | None = None) -> np.ndarray:
|
|
149
|
+
"""
|
|
150
|
+
Get boundary points as obstacles for costmap.
|
|
151
|
+
|
|
152
|
+
Samples points along the map boundaries at regular intervals.
|
|
153
|
+
|
|
154
|
+
Parameters
|
|
155
|
+
----------
|
|
156
|
+
spacing : float, optional
|
|
157
|
+
Spacing between boundary points in meters.
|
|
158
|
+
If None, uses resolution.
|
|
159
|
+
|
|
160
|
+
Returns
|
|
161
|
+
-------
|
|
162
|
+
np.ndarray
|
|
163
|
+
Array of boundary obstacle positions [[x1, y1], [x2, y2], ...]
|
|
164
|
+
"""
|
|
165
|
+
if spacing is None:
|
|
166
|
+
spacing = self.resolution
|
|
167
|
+
|
|
168
|
+
boundary_points = []
|
|
169
|
+
|
|
170
|
+
# Bottom edge (left to right)
|
|
171
|
+
num_points_x = int(self.width / spacing) + 1
|
|
172
|
+
for i in range(num_points_x):
|
|
173
|
+
x = -self.width / 2 + i * spacing
|
|
174
|
+
if x > self.width / 2:
|
|
175
|
+
x = self.width / 2
|
|
176
|
+
boundary_points.append([x, -self.height / 2])
|
|
177
|
+
|
|
178
|
+
# Right edge (bottom to top)
|
|
179
|
+
num_points_y = int(self.height / spacing) + 1
|
|
180
|
+
for i in range(1, num_points_y): # Skip corner already added
|
|
181
|
+
y = -self.height / 2 + i * spacing
|
|
182
|
+
if y > self.height / 2:
|
|
183
|
+
y = self.height / 2
|
|
184
|
+
boundary_points.append([self.width / 2, y])
|
|
185
|
+
|
|
186
|
+
# Top edge (right to left)
|
|
187
|
+
for i in range(1, num_points_x): # Skip corner already added
|
|
188
|
+
x = self.width / 2 - i * spacing
|
|
189
|
+
if x < -self.width / 2:
|
|
190
|
+
x = -self.width / 2
|
|
191
|
+
boundary_points.append([x, self.height / 2])
|
|
192
|
+
|
|
193
|
+
# Left edge (top to bottom)
|
|
194
|
+
for i in range(1, num_points_y - 1): # Skip both corners already added
|
|
195
|
+
y = self.height / 2 - i * spacing
|
|
196
|
+
if y < -self.height / 2:
|
|
197
|
+
y = -self.height / 2
|
|
198
|
+
boundary_points.append([-self.width / 2, y])
|
|
199
|
+
|
|
200
|
+
return np.array(boundary_points)
|
|
201
|
+
|
|
202
|
+
@classmethod
|
|
203
|
+
def create_random_map(
|
|
204
|
+
cls,
|
|
205
|
+
width: float = 50.0,
|
|
206
|
+
height: float = 50.0,
|
|
207
|
+
resolution: float = 0.5,
|
|
208
|
+
num_obstacles: int = 10,
|
|
209
|
+
obstacle_size: float = 2.0,
|
|
210
|
+
seed: int | None = None,
|
|
211
|
+
) -> "GridMap":
|
|
212
|
+
"""
|
|
213
|
+
Create a random grid map with obstacles.
|
|
214
|
+
|
|
215
|
+
Parameters
|
|
216
|
+
----------
|
|
217
|
+
width : float, default=50.0
|
|
218
|
+
Width of map in meters.
|
|
219
|
+
height : float, default=50.0
|
|
220
|
+
Height of map in meters.
|
|
221
|
+
resolution : float, default=0.5
|
|
222
|
+
Resolution in meters per cell.
|
|
223
|
+
num_obstacles : int, default=10
|
|
224
|
+
Number of obstacles to place.
|
|
225
|
+
obstacle_size : float, default=2.0
|
|
226
|
+
Size of each obstacle.
|
|
227
|
+
seed : int, optional
|
|
228
|
+
Random seed for reproducibility.
|
|
229
|
+
|
|
230
|
+
Returns
|
|
231
|
+
-------
|
|
232
|
+
GridMap
|
|
233
|
+
Random grid map instance.
|
|
234
|
+
"""
|
|
235
|
+
if seed is not None:
|
|
236
|
+
np.random.seed(seed)
|
|
237
|
+
|
|
238
|
+
# Generate random obstacle positions
|
|
239
|
+
obstacles = []
|
|
240
|
+
margin = obstacle_size + 2.0 # Keep obstacles away from edges
|
|
241
|
+
|
|
242
|
+
for _ in range(num_obstacles):
|
|
243
|
+
x = np.random.uniform(-width / 2 + margin, width / 2 - margin)
|
|
244
|
+
y = np.random.uniform(-height / 2 + margin, height / 2 - margin)
|
|
245
|
+
obstacles.append([x, y])
|
|
246
|
+
|
|
247
|
+
return cls(
|
|
248
|
+
width=width,
|
|
249
|
+
height=height,
|
|
250
|
+
resolution=resolution,
|
|
251
|
+
obstacles=np.array(obstacles),
|
|
252
|
+
obstacle_size=obstacle_size,
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
def visualize(
|
|
256
|
+
self,
|
|
257
|
+
ax: Any,
|
|
258
|
+
car_state: Optional["CarState"] = None,
|
|
259
|
+
frame: str = "global",
|
|
260
|
+
goal: np.ndarray | None = None,
|
|
261
|
+
horizon: float | None = None,
|
|
262
|
+
**kwargs: Any,
|
|
263
|
+
) -> None:
|
|
264
|
+
"""
|
|
265
|
+
Visualize the grid map.
|
|
266
|
+
|
|
267
|
+
Parameters
|
|
268
|
+
----------
|
|
269
|
+
ax : matplotlib.axes.Axes
|
|
270
|
+
Axes to plot on.
|
|
271
|
+
car_state : CarState, optional
|
|
272
|
+
Current car state (for frame transformation).
|
|
273
|
+
frame : str, default="global"
|
|
274
|
+
Frame to plot in: "global" or "ego".
|
|
275
|
+
goal : np.ndarray, optional
|
|
276
|
+
Goal position [x, y] to display.
|
|
277
|
+
horizon : float, optional
|
|
278
|
+
Maximum distance to display (for ego frame filtering).
|
|
279
|
+
**kwargs
|
|
280
|
+
Additional visualization arguments.
|
|
281
|
+
"""
|
|
282
|
+
# Extract visualization parameters
|
|
283
|
+
alpha = kwargs.pop("alpha", 0.3)
|
|
284
|
+
obstacle_color = kwargs.pop("obstacle_color", "red")
|
|
285
|
+
goal_color = kwargs.pop("goal_color", "green")
|
|
286
|
+
goal_size = kwargs.pop("goal_size", 1.0)
|
|
287
|
+
|
|
288
|
+
# Draw map boundaries
|
|
289
|
+
bounds, _ = self.get_bounds()
|
|
290
|
+
if frame == "ego" and horizon is not None and car_state is not None:
|
|
291
|
+
# Transform boundaries to ego frame and filter by horizon
|
|
292
|
+
bounds_ego = np.array([car_state.transform_to_car_frame(b) for b in bounds])
|
|
293
|
+
distances = np.linalg.norm(bounds_ego, axis=1)
|
|
294
|
+
# Show boundaries that are within horizon OR form a visible segment
|
|
295
|
+
# Keep at least 4 points to form a rectangle
|
|
296
|
+
if len(bounds_ego) >= 4:
|
|
297
|
+
# For a rectangle, show all points but clip to horizon if needed
|
|
298
|
+
# Draw segments that are within horizon
|
|
299
|
+
for i in range(len(bounds_ego)):
|
|
300
|
+
next_i = (i + 1) % len(bounds_ego)
|
|
301
|
+
if distances[i] <= horizon or distances[next_i] <= horizon:
|
|
302
|
+
# Draw segment if at least one endpoint is visible
|
|
303
|
+
ax.plot(
|
|
304
|
+
[bounds_ego[i, 0], bounds_ego[next_i, 0]],
|
|
305
|
+
[bounds_ego[i, 1], bounds_ego[next_i, 1]],
|
|
306
|
+
"k-",
|
|
307
|
+
linewidth=2,
|
|
308
|
+
label="Map Bounds" if i == 0 else "",
|
|
309
|
+
**kwargs,
|
|
310
|
+
)
|
|
311
|
+
else:
|
|
312
|
+
ax.plot(bounds[:, 0], bounds[:, 1], "k-", linewidth=2, label="Map Bounds", **kwargs)
|
|
313
|
+
|
|
314
|
+
# Draw obstacles (filter by horizon in ego frame)
|
|
315
|
+
for obstacle in self.obstacles:
|
|
316
|
+
if frame == "ego" and car_state is not None:
|
|
317
|
+
# Transform to ego frame
|
|
318
|
+
obstacle_ego = car_state.transform_to_car_frame(obstacle)
|
|
319
|
+
circle = patches.Circle(
|
|
320
|
+
(float(obstacle_ego[0]), float(obstacle_ego[1])),
|
|
321
|
+
self.obstacle_size,
|
|
322
|
+
color=obstacle_color,
|
|
323
|
+
alpha=alpha,
|
|
324
|
+
**kwargs,
|
|
325
|
+
)
|
|
326
|
+
else:
|
|
327
|
+
circle = patches.Circle(
|
|
328
|
+
(float(obstacle[0]), float(obstacle[1])),
|
|
329
|
+
self.obstacle_size,
|
|
330
|
+
color=obstacle_color,
|
|
331
|
+
alpha=alpha,
|
|
332
|
+
**kwargs,
|
|
333
|
+
)
|
|
334
|
+
ax.add_patch(circle)
|
|
335
|
+
|
|
336
|
+
# Draw goal if provided (filter by horizon in ego frame)
|
|
337
|
+
if goal is not None:
|
|
338
|
+
if frame == "ego" and car_state is not None:
|
|
339
|
+
goal_plot = car_state.transform_to_car_frame(goal)
|
|
340
|
+
# Filter by horizon
|
|
341
|
+
if horizon is not None:
|
|
342
|
+
distance = np.linalg.norm(goal_plot)
|
|
343
|
+
if distance > horizon:
|
|
344
|
+
return # Goal outside horizon, don't draw
|
|
345
|
+
else:
|
|
346
|
+
goal_plot = goal
|
|
347
|
+
|
|
348
|
+
ax.plot(
|
|
349
|
+
goal_plot[0],
|
|
350
|
+
goal_plot[1],
|
|
351
|
+
"o",
|
|
352
|
+
color=goal_color,
|
|
353
|
+
markersize=goal_size * 10,
|
|
354
|
+
label="Goal",
|
|
355
|
+
markeredgecolor="black",
|
|
356
|
+
markeredgewidth=2,
|
|
357
|
+
**kwargs,
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
ax.set_aspect("equal")
|
|
361
|
+
ax.grid(True, alpha=0.3)
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
"""Ground truth map representation."""
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
|
|
5
|
+
from simple_autonomous_car.track.track import Track
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class GroundTruthMap:
|
|
9
|
+
"""Ground truth map of the track in world coordinates."""
|
|
10
|
+
|
|
11
|
+
def __init__(self, track: Track):
|
|
12
|
+
"""
|
|
13
|
+
Initialize ground truth map.
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
track: Track object containing centerline and bounds
|
|
17
|
+
"""
|
|
18
|
+
self.track = track
|
|
19
|
+
self.centerline = track.centerline.copy()
|
|
20
|
+
self.inner_bound = track.inner_bound.copy()
|
|
21
|
+
self.outer_bound = track.outer_bound.copy()
|
|
22
|
+
|
|
23
|
+
def get_visible_segments(
|
|
24
|
+
self, car_position: np.ndarray, car_heading: float, horizon: float, fov: float = 2 * np.pi
|
|
25
|
+
) -> tuple[np.ndarray, np.ndarray, np.ndarray]:
|
|
26
|
+
"""
|
|
27
|
+
Get visible segments of the track within horizon and field of view.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
car_position: Car position [x, y] in world frame
|
|
31
|
+
car_heading: Car heading angle in radians
|
|
32
|
+
horizon: Maximum distance to consider
|
|
33
|
+
fov: Field of view angle in radians (default: 2*pi = 360 degrees)
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Tuple of (visible_centerline, visible_inner_bound, visible_outer_bound)
|
|
37
|
+
"""
|
|
38
|
+
# Calculate distances and angles from car
|
|
39
|
+
vectors = self.centerline - car_position
|
|
40
|
+
distances = np.linalg.norm(vectors, axis=1)
|
|
41
|
+
|
|
42
|
+
# Filter by distance
|
|
43
|
+
within_horizon = distances <= horizon
|
|
44
|
+
|
|
45
|
+
# Filter by FOV (if not 360 degrees)
|
|
46
|
+
if fov < 2 * np.pi:
|
|
47
|
+
angles = np.arctan2(vectors[:, 1], vectors[:, 0]) - car_heading
|
|
48
|
+
# Normalize angles to [-pi, pi]
|
|
49
|
+
angles = np.arctan2(np.sin(angles), np.cos(angles))
|
|
50
|
+
within_fov = np.abs(angles) <= fov / 2.0
|
|
51
|
+
visible_mask = within_horizon & within_fov
|
|
52
|
+
else:
|
|
53
|
+
# 360 degree view - only filter by distance
|
|
54
|
+
visible_mask = within_horizon
|
|
55
|
+
|
|
56
|
+
visible_centerline = self.centerline[visible_mask]
|
|
57
|
+
visible_inner = self.inner_bound[visible_mask]
|
|
58
|
+
visible_outer = self.outer_bound[visible_mask]
|
|
59
|
+
|
|
60
|
+
return visible_centerline, visible_inner, visible_outer
|
|
61
|
+
|
|
62
|
+
def get_all_bounds(self) -> tuple[np.ndarray, np.ndarray]:
|
|
63
|
+
"""Get all inner and outer bounds."""
|
|
64
|
+
return self.inner_bound, self.outer_bound
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
"""Perceived map with noise and reference frame transformations."""
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from simple_autonomous_car.car.car import CarState
|
|
9
|
+
|
|
10
|
+
from simple_autonomous_car.maps.ground_truth_map import GroundTruthMap
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class PerceivedMap:
|
|
14
|
+
"""Perceived map with potential errors and noise."""
|
|
15
|
+
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
ground_truth: GroundTruthMap,
|
|
19
|
+
position_noise_std: float = 0.1,
|
|
20
|
+
orientation_noise_std: float = 0.05,
|
|
21
|
+
measurement_noise_std: float = 0.2,
|
|
22
|
+
):
|
|
23
|
+
"""
|
|
24
|
+
Initialize perceived map.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
ground_truth: Ground truth map
|
|
28
|
+
position_noise_std: Standard deviation of position estimation error
|
|
29
|
+
orientation_noise_std: Standard deviation of orientation estimation error
|
|
30
|
+
measurement_noise_std: Standard deviation of measurement noise
|
|
31
|
+
"""
|
|
32
|
+
self.ground_truth = ground_truth
|
|
33
|
+
self.position_noise_std = position_noise_std
|
|
34
|
+
self.orientation_noise_std = orientation_noise_std
|
|
35
|
+
self.measurement_noise_std = measurement_noise_std
|
|
36
|
+
|
|
37
|
+
# Perceived car state (with localization errors)
|
|
38
|
+
self.perceived_car_state: CarState | None = None
|
|
39
|
+
|
|
40
|
+
def update_perceived_state(self, true_car_state: "CarState") -> None:
|
|
41
|
+
"""
|
|
42
|
+
Update perceived car state with localization errors.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
true_car_state: True car state
|
|
46
|
+
"""
|
|
47
|
+
# Add position noise
|
|
48
|
+
position_noise = np.random.normal(0, self.position_noise_std, 2)
|
|
49
|
+
perceived_x = true_car_state.x + position_noise[0]
|
|
50
|
+
perceived_y = true_car_state.y + position_noise[1]
|
|
51
|
+
|
|
52
|
+
# Add orientation noise
|
|
53
|
+
orientation_noise = np.random.normal(0, self.orientation_noise_std)
|
|
54
|
+
perceived_heading = true_car_state.heading + orientation_noise
|
|
55
|
+
|
|
56
|
+
# Import here to avoid circular import
|
|
57
|
+
from simple_autonomous_car.car.car import CarState
|
|
58
|
+
|
|
59
|
+
self.perceived_car_state = CarState(
|
|
60
|
+
x=perceived_x,
|
|
61
|
+
y=perceived_y,
|
|
62
|
+
heading=perceived_heading,
|
|
63
|
+
velocity=true_car_state.velocity,
|
|
64
|
+
steering_angle=true_car_state.steering_angle,
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
def get_perceived_segments(
|
|
68
|
+
self, horizon: float, fov: float = 2 * np.pi
|
|
69
|
+
) -> tuple[np.ndarray, np.ndarray, np.ndarray]:
|
|
70
|
+
"""
|
|
71
|
+
Get perceived track segments in perceived car frame.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
horizon: Maximum distance to consider
|
|
75
|
+
fov: Field of view angle in radians
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
Tuple of (perceived_centerline_car_frame, perceived_inner_car_frame, perceived_outer_car_frame)
|
|
79
|
+
"""
|
|
80
|
+
if self.perceived_car_state is None:
|
|
81
|
+
raise ValueError("Perceived car state not set. Call update_perceived_state first.")
|
|
82
|
+
|
|
83
|
+
# Get visible segments from ground truth in world frame
|
|
84
|
+
visible_centerline, visible_inner, visible_outer = self.ground_truth.get_visible_segments(
|
|
85
|
+
self.perceived_car_state.position(),
|
|
86
|
+
self.perceived_car_state.heading,
|
|
87
|
+
horizon,
|
|
88
|
+
fov,
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
if len(visible_centerline) == 0:
|
|
92
|
+
return (
|
|
93
|
+
np.array([]).reshape(0, 2),
|
|
94
|
+
np.array([]).reshape(0, 2),
|
|
95
|
+
np.array([]).reshape(0, 2),
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
# Add measurement noise
|
|
99
|
+
centerline_noise = np.random.normal(0, self.measurement_noise_std, visible_centerline.shape)
|
|
100
|
+
inner_noise = np.random.normal(0, self.measurement_noise_std, visible_inner.shape)
|
|
101
|
+
outer_noise = np.random.normal(0, self.measurement_noise_std, visible_outer.shape)
|
|
102
|
+
|
|
103
|
+
perceived_centerline_world = visible_centerline + centerline_noise
|
|
104
|
+
perceived_inner_world = visible_inner + inner_noise
|
|
105
|
+
perceived_outer_world = visible_outer + outer_noise
|
|
106
|
+
|
|
107
|
+
# Transform to perceived car frame
|
|
108
|
+
perceived_centerline_car = np.array(
|
|
109
|
+
[
|
|
110
|
+
self.perceived_car_state.transform_to_car_frame(point)
|
|
111
|
+
for point in perceived_centerline_world
|
|
112
|
+
]
|
|
113
|
+
)
|
|
114
|
+
perceived_inner_car = np.array(
|
|
115
|
+
[
|
|
116
|
+
self.perceived_car_state.transform_to_car_frame(point)
|
|
117
|
+
for point in perceived_inner_world
|
|
118
|
+
]
|
|
119
|
+
)
|
|
120
|
+
perceived_outer_car = np.array(
|
|
121
|
+
[
|
|
122
|
+
self.perceived_car_state.transform_to_car_frame(point)
|
|
123
|
+
for point in perceived_outer_world
|
|
124
|
+
]
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
return perceived_centerline_car, perceived_inner_car, perceived_outer_car
|
|
128
|
+
|
|
129
|
+
def get_perceived_segments_world_frame(
|
|
130
|
+
self, horizon: float, fov: float = 2 * np.pi
|
|
131
|
+
) -> tuple[np.ndarray, np.ndarray, np.ndarray]:
|
|
132
|
+
"""
|
|
133
|
+
Get perceived track segments in world frame.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
horizon: Maximum distance to consider
|
|
137
|
+
fov: Field of view angle in radians
|
|
138
|
+
|
|
139
|
+
Returns:
|
|
140
|
+
Tuple of (perceived_centerline_world, perceived_inner_world, perceived_outer_world)
|
|
141
|
+
"""
|
|
142
|
+
if self.perceived_car_state is None:
|
|
143
|
+
raise ValueError("Perceived car state not set. Call update_perceived_state first.")
|
|
144
|
+
|
|
145
|
+
# Get visible segments from ground truth
|
|
146
|
+
visible_centerline, visible_inner, visible_outer = self.ground_truth.get_visible_segments(
|
|
147
|
+
self.perceived_car_state.position(),
|
|
148
|
+
self.perceived_car_state.heading,
|
|
149
|
+
horizon,
|
|
150
|
+
fov,
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
if len(visible_centerline) == 0:
|
|
154
|
+
return (
|
|
155
|
+
np.array([]).reshape(0, 2),
|
|
156
|
+
np.array([]).reshape(0, 2),
|
|
157
|
+
np.array([]).reshape(0, 2),
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
# Add measurement noise
|
|
161
|
+
centerline_noise = np.random.normal(0, self.measurement_noise_std, visible_centerline.shape)
|
|
162
|
+
inner_noise = np.random.normal(0, self.measurement_noise_std, visible_inner.shape)
|
|
163
|
+
outer_noise = np.random.normal(0, self.measurement_noise_std, visible_outer.shape)
|
|
164
|
+
|
|
165
|
+
perceived_centerline_world = visible_centerline + centerline_noise
|
|
166
|
+
perceived_inner_world = visible_inner + inner_noise
|
|
167
|
+
perceived_outer_world = visible_outer + outer_noise
|
|
168
|
+
|
|
169
|
+
return perceived_centerline_world, perceived_inner_world, perceived_outer_world
|