simple-autonomous-car 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. simple_autonomous_car/__init__.py +96 -0
  2. simple_autonomous_car/alerts/__init__.py +5 -0
  3. simple_autonomous_car/alerts/track_bounds_alert.py +276 -0
  4. simple_autonomous_car/car/__init__.py +5 -0
  5. simple_autonomous_car/car/car.py +234 -0
  6. simple_autonomous_car/constants.py +112 -0
  7. simple_autonomous_car/control/__init__.py +7 -0
  8. simple_autonomous_car/control/base_controller.py +152 -0
  9. simple_autonomous_car/control/controller_viz.py +282 -0
  10. simple_autonomous_car/control/pid_controller.py +153 -0
  11. simple_autonomous_car/control/pure_pursuit_controller.py +578 -0
  12. simple_autonomous_car/costmap/__init__.py +12 -0
  13. simple_autonomous_car/costmap/base_costmap.py +187 -0
  14. simple_autonomous_car/costmap/grid_costmap.py +507 -0
  15. simple_autonomous_car/costmap/inflation.py +126 -0
  16. simple_autonomous_car/detection/__init__.py +5 -0
  17. simple_autonomous_car/detection/error_detector.py +165 -0
  18. simple_autonomous_car/filters/__init__.py +7 -0
  19. simple_autonomous_car/filters/base_filter.py +119 -0
  20. simple_autonomous_car/filters/kalman_filter.py +131 -0
  21. simple_autonomous_car/filters/particle_filter.py +162 -0
  22. simple_autonomous_car/footprint/__init__.py +7 -0
  23. simple_autonomous_car/footprint/base_footprint.py +128 -0
  24. simple_autonomous_car/footprint/circular_footprint.py +73 -0
  25. simple_autonomous_car/footprint/rectangular_footprint.py +123 -0
  26. simple_autonomous_car/frames/__init__.py +21 -0
  27. simple_autonomous_car/frames/frenet.py +267 -0
  28. simple_autonomous_car/maps/__init__.py +9 -0
  29. simple_autonomous_car/maps/frenet_map.py +97 -0
  30. simple_autonomous_car/maps/grid_ground_truth_map.py +83 -0
  31. simple_autonomous_car/maps/grid_map.py +361 -0
  32. simple_autonomous_car/maps/ground_truth_map.py +64 -0
  33. simple_autonomous_car/maps/perceived_map.py +169 -0
  34. simple_autonomous_car/perception/__init__.py +5 -0
  35. simple_autonomous_car/perception/perception.py +107 -0
  36. simple_autonomous_car/planning/__init__.py +7 -0
  37. simple_autonomous_car/planning/base_planner.py +184 -0
  38. simple_autonomous_car/planning/goal_planner.py +261 -0
  39. simple_autonomous_car/planning/track_planner.py +199 -0
  40. simple_autonomous_car/sensors/__init__.py +6 -0
  41. simple_autonomous_car/sensors/base_sensor.py +105 -0
  42. simple_autonomous_car/sensors/lidar_sensor.py +145 -0
  43. simple_autonomous_car/track/__init__.py +5 -0
  44. simple_autonomous_car/track/track.py +463 -0
  45. simple_autonomous_car/visualization/__init__.py +25 -0
  46. simple_autonomous_car/visualization/alert_viz.py +316 -0
  47. simple_autonomous_car/visualization/utils.py +169 -0
  48. simple_autonomous_car-0.1.2.dist-info/METADATA +324 -0
  49. simple_autonomous_car-0.1.2.dist-info/RECORD +50 -0
  50. simple_autonomous_car-0.1.2.dist-info/WHEEL +4 -0
@@ -0,0 +1,199 @@
1
+ """Track-based path planner."""
2
+
3
+ from typing import TYPE_CHECKING, Any, Optional
4
+
5
+ import numpy as np
6
+
7
+ from simple_autonomous_car.car.car import CarState
8
+ from simple_autonomous_car.constants import (
9
+ DEFAULT_PLAN_LINEWIDTH,
10
+ DEFAULT_PLANNER_LOOKAHEAD_DISTANCE,
11
+ DEFAULT_WAYPOINT_SPACING,
12
+ MIN_SEGMENT_LENGTH,
13
+ )
14
+ from simple_autonomous_car.planning.base_planner import BasePlanner
15
+ from simple_autonomous_car.track.track import Track
16
+
17
+ if TYPE_CHECKING:
18
+ from simple_autonomous_car.costmap.base_costmap import BaseCostmap
19
+
20
+
21
+ class TrackPlanner(BasePlanner):
22
+ """
23
+ Simple planner that follows the track centerline.
24
+
25
+ This planner generates a path along the track centerline ahead of the car.
26
+ It's useful for basic track following scenarios.
27
+
28
+ Parameters
29
+ ----------
30
+ track : Track
31
+ Track to plan on.
32
+ lookahead_distance : float, default=50.0
33
+ Distance ahead to plan (meters).
34
+ waypoint_spacing : float, default=2.0
35
+ Spacing between waypoints in meters.
36
+ name : str, default="track_planner"
37
+ Planner name.
38
+ """
39
+
40
+ def __init__(
41
+ self,
42
+ track: Track,
43
+ lookahead_distance: float = DEFAULT_PLANNER_LOOKAHEAD_DISTANCE,
44
+ waypoint_spacing: float = DEFAULT_WAYPOINT_SPACING,
45
+ name: str = "track_planner",
46
+ ):
47
+ super().__init__(name=name)
48
+ self.track = track
49
+ self.lookahead_distance = lookahead_distance
50
+ self.waypoint_spacing = waypoint_spacing
51
+
52
+ # Precompute cumulative distances
53
+ self._compute_cumulative_distances()
54
+
55
+ def _compute_cumulative_distances(self) -> None:
56
+ """Precompute cumulative distances along track."""
57
+ self.cumulative_distances = np.zeros(len(self.track.centerline))
58
+ for i in range(1, len(self.track.centerline)):
59
+ dist = np.linalg.norm(self.track.centerline[i] - self.track.centerline[i - 1])
60
+ self.cumulative_distances[i] = self.cumulative_distances[i - 1] + dist
61
+ self.total_length = self.cumulative_distances[-1]
62
+
63
+ def plan(
64
+ self,
65
+ car_state: CarState,
66
+ perception_data: dict | None = None,
67
+ costmap: Optional["BaseCostmap"] = None,
68
+ goal: np.ndarray | None = None,
69
+ ) -> np.ndarray:
70
+ """
71
+ Generate plan along track centerline.
72
+
73
+ Parameters
74
+ ----------
75
+ car_state : CarState
76
+ Current car state.
77
+ perception_data : dict, optional
78
+ Not used by track planner.
79
+ costmap : BaseCostmap, optional
80
+ Costmap for obstacle avoidance (not used by basic track planner).
81
+ goal : np.ndarray, optional
82
+ Not used by track planner.
83
+
84
+ Returns
85
+ -------
86
+ np.ndarray
87
+ Planned path as array of shape (N, 2) with [x, y] waypoints.
88
+ """
89
+ if not self.enabled:
90
+ return np.array([]).reshape(0, 2)
91
+
92
+ # Find closest point on track
93
+ car_pos = car_state.position()
94
+ distances = np.linalg.norm(self.track.centerline - car_pos, axis=1)
95
+ closest_idx = np.argmin(distances)
96
+ s_start = self.cumulative_distances[closest_idx]
97
+
98
+ # Generate waypoints ahead
99
+ num_waypoints = int(self.lookahead_distance / self.waypoint_spacing) + 1
100
+
101
+ waypoints = []
102
+ for i in range(num_waypoints):
103
+ s = s_start + (i / (num_waypoints - 1)) * self.lookahead_distance
104
+ s = s % self.total_length # Wrap around
105
+
106
+ # Find point at distance s
107
+ idx = np.searchsorted(self.cumulative_distances, s)
108
+ if idx == 0:
109
+ idx = 1
110
+ if idx >= len(self.track.centerline):
111
+ idx = len(self.track.centerline) - 1
112
+
113
+ # Interpolate
114
+ segment_dist = s - self.cumulative_distances[idx - 1]
115
+ segment_length = self.cumulative_distances[idx] - self.cumulative_distances[idx - 1]
116
+ if segment_length > MIN_SEGMENT_LENGTH:
117
+ t = segment_dist / segment_length
118
+ else:
119
+ t = 0.0
120
+
121
+ point = self.track.centerline[idx - 1] + t * (
122
+ self.track.centerline[idx] - self.track.centerline[idx - 1]
123
+ )
124
+ waypoints.append(point)
125
+
126
+ return np.array(waypoints)
127
+
128
+ def visualize(
129
+ self,
130
+ ax: Any,
131
+ car_state: CarState,
132
+ plan: np.ndarray | None = None,
133
+ frame: str = "global",
134
+ **kwargs: Any,
135
+ ) -> None:
136
+ """
137
+ Visualize planned path on the given axes.
138
+
139
+ Parameters
140
+ ----------
141
+ ax : matplotlib.axes.Axes
142
+ Axes to plot on.
143
+ car_state : CarState
144
+ Current car state.
145
+ plan : np.ndarray, optional
146
+ Planned path (if already computed, otherwise will be computed).
147
+ frame : str, default="global"
148
+ Frame to plot in: "global" or "ego".
149
+ **kwargs
150
+ Additional visualization arguments:
151
+ - color: str, color for plan line
152
+ - linewidth: float, linewidth for plan
153
+ - linestyle: str, linestyle for plan
154
+ - show_waypoints: bool, whether to show waypoint markers
155
+ """
156
+ # Compute plan if not provided
157
+ if plan is None:
158
+ plan = self.plan(car_state)
159
+
160
+ if len(plan) == 0:
161
+ return
162
+
163
+ # Transform plan to desired frame
164
+ if frame == "ego":
165
+ plan_plot = np.array([car_state.transform_to_car_frame(point) for point in plan])
166
+ else:
167
+ plan_plot = plan
168
+
169
+ # Extract visualization parameters
170
+ color = kwargs.pop("color", "green")
171
+ linewidth = kwargs.pop("linewidth", DEFAULT_PLAN_LINEWIDTH)
172
+ kwargs.pop("linestyle", "--")
173
+ show_waypoints = kwargs.pop("show_waypoints", frame == "global")
174
+ alpha = kwargs.pop("alpha", 0.8)
175
+
176
+ # Plot plan
177
+ if show_waypoints:
178
+ ax.plot(
179
+ plan_plot[:, 0],
180
+ plan_plot[:, 1],
181
+ "o-",
182
+ color=color,
183
+ linewidth=linewidth,
184
+ markersize=4,
185
+ label="Plan",
186
+ alpha=alpha,
187
+ **kwargs,
188
+ )
189
+ else:
190
+ ax.plot(
191
+ plan_plot[:, 0],
192
+ plan_plot[:, 1],
193
+ "-",
194
+ color=color,
195
+ linewidth=linewidth,
196
+ label="Plan",
197
+ alpha=alpha,
198
+ **kwargs,
199
+ )
@@ -0,0 +1,6 @@
1
+ """Sensor system for autonomous vehicles."""
2
+
3
+ from simple_autonomous_car.sensors.base_sensor import BaseSensor
4
+ from simple_autonomous_car.sensors.lidar_sensor import LiDARSensor
5
+
6
+ __all__ = ["BaseSensor", "LiDARSensor"]
@@ -0,0 +1,105 @@
1
+ """Base sensor class for modular sensor system."""
2
+
3
+ from abc import ABC, abstractmethod
4
+ from typing import TYPE_CHECKING
5
+
6
+ import numpy as np
7
+
8
+ if TYPE_CHECKING:
9
+ from simple_autonomous_car.car.car import CarState
10
+
11
+ from simple_autonomous_car.perception.perception import PerceptionPoints
12
+
13
+
14
+ class BaseSensor(ABC):
15
+ """
16
+ Base class for all sensors.
17
+
18
+ This abstract class defines the interface that all sensors must implement.
19
+ Sensors can be added to a car and will provide perception data in ego frame.
20
+
21
+ Attributes
22
+ ----------
23
+ name : str
24
+ Name/identifier of the sensor.
25
+ pose_ego : np.ndarray
26
+ Sensor pose in ego frame [x, y, heading]. Default is [0, 0, 0] (at car origin).
27
+ max_range : float
28
+ Maximum sensor range in meters.
29
+ enabled : bool
30
+ Whether the sensor is enabled.
31
+ """
32
+
33
+ def __init__(
34
+ self,
35
+ name: str = "sensor",
36
+ pose_ego: np.ndarray | None = None,
37
+ max_range: float = 50.0,
38
+ enabled: bool = True,
39
+ ):
40
+ """
41
+ Initialize base sensor.
42
+
43
+ Parameters
44
+ ----------
45
+ name : str, default="sensor"
46
+ Name/identifier for this sensor instance.
47
+ pose_ego : np.ndarray, optional
48
+ Sensor pose in ego frame [x, y, heading] in meters and radians.
49
+ If None, sensor is at car origin with zero heading.
50
+ max_range : float, default=50.0
51
+ Maximum sensor range in meters.
52
+ enabled : bool, default=True
53
+ Whether the sensor is enabled. Disabled sensors return empty data.
54
+ """
55
+ self.name = name
56
+ self.pose_ego = pose_ego if pose_ego is not None else np.array([0.0, 0.0, 0.0])
57
+ self.max_range = max_range
58
+ self.enabled = enabled
59
+
60
+ @abstractmethod
61
+ def sense(self, car_state: "CarState", environment_data: dict) -> PerceptionPoints:
62
+ """
63
+ Sense the environment and return perception data.
64
+
65
+ This is the main method that all sensors must implement.
66
+ It takes the car state and environment data, and returns
67
+ perception points in ego frame.
68
+
69
+ Parameters
70
+ ----------
71
+ car_state : CarState
72
+ Current state of the car.
73
+ environment_data : dict
74
+ Dictionary containing environment data (e.g., ground truth map,
75
+ obstacles, etc.). Structure depends on sensor type.
76
+
77
+ Returns
78
+ -------
79
+ PerceptionPoints
80
+ Perception data in ego frame. Returns empty points if sensor is disabled.
81
+ """
82
+ pass
83
+
84
+ def is_enabled(self) -> bool:
85
+ """Check if sensor is enabled."""
86
+ return self.enabled
87
+
88
+ def enable(self) -> None:
89
+ """Enable the sensor."""
90
+ self.enabled = True
91
+
92
+ def disable(self) -> None:
93
+ """Disable the sensor."""
94
+ self.enabled = False
95
+
96
+ def set_pose(self, pose_ego: np.ndarray) -> None:
97
+ """
98
+ Set sensor pose in ego frame.
99
+
100
+ Parameters
101
+ ----------
102
+ pose_ego : np.ndarray
103
+ Sensor pose [x, y, heading] in ego frame.
104
+ """
105
+ self.pose_ego = np.asarray(pose_ego, dtype=np.float64)
@@ -0,0 +1,145 @@
1
+ """LiDAR sensor implementation."""
2
+
3
+ import numpy as np
4
+
5
+ # Import CarState here to avoid circular import
6
+ from simple_autonomous_car.car.car import CarState
7
+ from simple_autonomous_car.frames.frenet import sensor_to_ego
8
+ from simple_autonomous_car.maps.ground_truth_map import GroundTruthMap
9
+ from simple_autonomous_car.maps.perceived_map import PerceivedMap
10
+ from simple_autonomous_car.perception.perception import PerceptionPoints
11
+ from simple_autonomous_car.sensors.base_sensor import BaseSensor
12
+
13
+
14
+ class LiDARSensor(BaseSensor):
15
+ """
16
+ LiDAR sensor that detects track boundaries.
17
+
18
+ This sensor simulates a 360° LiDAR that detects track boundaries
19
+ and returns a point cloud. It combines ground truth map data with
20
+ perception errors and sensor noise.
21
+
22
+ Parameters
23
+ ----------
24
+ ground_truth_map : GroundTruthMap
25
+ Ground truth map for reference.
26
+ perceived_map : PerceivedMap
27
+ Perceived map that adds localization errors.
28
+ angular_resolution : float, default=0.1
29
+ Angular resolution in radians (spacing between rays).
30
+ point_noise_std : float, default=0.1
31
+ Standard deviation of point measurement noise in meters.
32
+ name : str, default="lidar"
33
+ Sensor name/identifier.
34
+ pose_ego : np.ndarray, optional
35
+ Sensor pose in ego frame [x, y, heading].
36
+ max_range : float, default=50.0
37
+ Maximum sensor range in meters.
38
+ """
39
+
40
+ def __init__(
41
+ self,
42
+ ground_truth_map: GroundTruthMap,
43
+ perceived_map: PerceivedMap,
44
+ angular_resolution: float = 0.1,
45
+ point_noise_std: float = 0.1,
46
+ name: str = "lidar",
47
+ pose_ego: np.ndarray | None = None,
48
+ max_range: float = 50.0,
49
+ ):
50
+ super().__init__(name=name, pose_ego=pose_ego, max_range=max_range)
51
+ self.ground_truth_map = ground_truth_map
52
+ self.perceived_map = perceived_map
53
+ self.angular_resolution = angular_resolution
54
+ self.point_noise_std = point_noise_std
55
+
56
+ def sense(self, car_state: CarState, environment_data: dict | None = None) -> PerceptionPoints:
57
+ """
58
+ Sense track boundaries and return point cloud.
59
+
60
+ Parameters
61
+ ----------
62
+ car_state : CarState
63
+ Current car state.
64
+ environment_data : dict, optional
65
+ Additional environment data (not used for LiDAR).
66
+
67
+ Returns
68
+ -------
69
+ PerceptionPoints
70
+ Point cloud in ego frame. Returns empty if sensor is disabled.
71
+ """
72
+ if not self.enabled:
73
+ return PerceptionPoints(np.array([]).reshape(0, 2), frame="ego")
74
+
75
+ # Update perceived state
76
+ self.perceived_map.update_perceived_state(car_state)
77
+
78
+ # Get perceived segments (360 degree view)
79
+ perceived_centerline, perceived_inner, perceived_outer = (
80
+ self.perceived_map.get_perceived_segments(self.max_range, fov=2 * np.pi)
81
+ )
82
+
83
+ # Combine inner and outer bounds into perception points
84
+ if len(perceived_inner) > 0 and len(perceived_outer) > 0:
85
+ perception_points_car = np.vstack([perceived_inner, perceived_outer])
86
+ elif len(perceived_inner) > 0:
87
+ perception_points_car = perceived_inner
88
+ elif len(perceived_outer) > 0:
89
+ perception_points_car = perceived_outer
90
+ else:
91
+ perception_points_car = np.array([]).reshape(0, 2)
92
+
93
+ # Add sensor noise
94
+ if len(perception_points_car) > 0:
95
+ point_noise = np.random.normal(0, self.point_noise_std, perception_points_car.shape)
96
+ perception_points_car = perception_points_car + point_noise
97
+
98
+ # Transform from sensor frame to ego frame if sensor is not at origin
99
+ if not np.allclose(self.pose_ego, [0.0, 0.0, 0.0]):
100
+ perception_points_ego = np.array(
101
+ [sensor_to_ego(point, self.pose_ego) for point in perception_points_car]
102
+ )
103
+ else:
104
+ perception_points_ego = perception_points_car
105
+
106
+ return PerceptionPoints(perception_points_ego, frame="ego")
107
+
108
+ def get_map_lines_car_frame(self, car_state: CarState) -> tuple[np.ndarray, np.ndarray]:
109
+ """
110
+ Get ground truth map lines in car frame.
111
+
112
+ Parameters
113
+ ----------
114
+ car_state : CarState
115
+ Current car state.
116
+
117
+ Returns
118
+ -------
119
+ tuple[np.ndarray, np.ndarray]
120
+ Tuple of (map_lines_car_frame, map_lines_world_frame).
121
+ """
122
+ # Get all visible segments (360 degree view)
123
+ gt_centerline, gt_inner, gt_outer = self.ground_truth_map.get_visible_segments(
124
+ car_state.position(), car_state.heading, self.max_range, fov=2 * np.pi
125
+ )
126
+
127
+ # Combine inner and outer bounds
128
+ if len(gt_inner) > 0 and len(gt_outer) > 0:
129
+ map_lines_world = np.vstack([gt_inner, gt_outer])
130
+ elif len(gt_inner) > 0:
131
+ map_lines_world = gt_inner
132
+ elif len(gt_outer) > 0:
133
+ map_lines_world = gt_outer
134
+ else:
135
+ map_lines_world = np.array([]).reshape(0, 2)
136
+
137
+ # Transform to car frame
138
+ if len(map_lines_world) > 0:
139
+ map_lines_car = np.array(
140
+ [car_state.transform_to_car_frame(point) for point in map_lines_world]
141
+ )
142
+ else:
143
+ map_lines_car = np.array([]).reshape(0, 2)
144
+
145
+ return map_lines_car, map_lines_world
@@ -0,0 +1,5 @@
1
+ """Track generation and management."""
2
+
3
+ from simple_autonomous_car.track.track import Track
4
+
5
+ __all__ = ["Track"]