Skip to content

Commit

Permalink
Merge branch 'autowarefoundation:main' into fix/euclidean-cluster-params
Browse files Browse the repository at this point in the history
  • Loading branch information
beginningfan authored Jun 24, 2024
2 parents 4c732e5 + 2a3eaee commit 51dc439
Show file tree
Hide file tree
Showing 90 changed files with 2,676 additions and 1,511 deletions.
3 changes: 2 additions & 1 deletion .cppcheck_suppressions
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
*:*/test/*

arrayIndexThenCheck
checkersReport
constParameterPointer
constParameterReference
constVariable
constVariableReference
containerOutOfBounds
// cspell: ignore cstyle
cstyleCast
duplicateBranch
duplicateBreak
Expand All @@ -28,6 +28,7 @@ shadowArgument
shadowFunction
shadowVariable
syntaxError
// cspell: ignore uninit
uninitMemberVar
unknownMacro
unmatchedSuppression
Expand Down
18 changes: 6 additions & 12 deletions .github/workflows/clang-tidy-differential.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,10 @@ jobs:
runs-on: ubuntu-latest
container: ghcr.io/autowarefoundation/autoware:latest-prebuilt-cuda
steps:
- name: Set PR fetch depth
run: echo "PR_FETCH_DEPTH=$(( ${{ github.event.pull_request.commits }} + 1 ))" >> "${GITHUB_ENV}"

- name: Checkout PR branch and all PR commits
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: ${{ env.PR_FETCH_DEPTH }}
fetch-depth: 0

- name: Show disk space before the tasks
run: df -h
Expand All @@ -41,19 +37,17 @@ jobs:

- name: Get modified files
id: get-modified-files
uses: tj-actions/changed-files@v42
with:
files: |
**/*.cpp
**/*.hpp
run: |
echo "changed_files=$(git diff --name-only "origin/${{ github.base_ref }}"...HEAD | grep -E '\.(cpp|hpp)$' || true)" >> $GITHUB_OUTPUT
shell: bash

- name: Run clang-tidy
if: ${{ steps.get-modified-files.outputs.all_changed_files != '' }}
if: ${{ steps.get-modified-files.outputs.changed_files != '' }}
uses: autowarefoundation/autoware-github-actions/clang-tidy@v1
with:
rosdistro: humble
target-packages: ${{ steps.get-modified-packages.outputs.modified-packages }}
target-files: ${{ steps.get-modified-files.outputs.all_changed_files }}
target-files: ${{ steps.get-modified-files.outputs.changed_files }}
clang-tidy-config-url: https://raw.githubusercontent.com/autowarefoundation/autoware/main/.clang-tidy
build-depends-repos: build_depends.repos

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/cppcheck-daily.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
continue-on-error: true
id: cppcheck
run: |
cppcheck --enable=all --inconclusive --check-level=exhaustive --error-exitcode=1 --xml . 2> cppcheck-report.xml
cppcheck --enable=all --inconclusive --check-level=exhaustive --suppress=*:*/test/* --error-exitcode=1 --xml . 2> cppcheck-report.xml
shell: bash

- name: Count errors by error ID and severity
Expand Down
30 changes: 25 additions & 5 deletions common/autoware_test_utils/src/autoware_test_utils.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
#include <rclcpp/logging.hpp>
#include <rclcpp/node.hpp>

#include <lanelet2_core/geometry/LineString.h>

#include <utility>

namespace autoware::test_utils
Expand Down Expand Up @@ -54,14 +56,32 @@ lanelet::LaneletMapPtr loadMap(const std::string & lanelet2_filename)
lanelet::ErrorMessages errors{};
lanelet::projection::MGRSProjector projector{};
lanelet::LaneletMapPtr map = lanelet::load(lanelet2_filename, projector, &errors);
if (errors.empty()) {
return map;
if (!errors.empty()) {
for (const auto & error : errors) {
RCLCPP_ERROR_STREAM(rclcpp::get_logger("map_loader"), error);
}
return nullptr;
}

for (const auto & error : errors) {
RCLCPP_ERROR_STREAM(rclcpp::get_logger("map_loader"), error);
for (lanelet::Point3d point : map->pointLayer) {
if (point.hasAttribute("local_x")) {
point.x() = point.attribute("local_x").asDouble().value();
}
if (point.hasAttribute("local_y")) {
point.y() = point.attribute("local_y").asDouble().value();
}
}
return nullptr;

// realign lanelet borders using updated points
for (lanelet::Lanelet lanelet : map->laneletLayer) {
auto left = lanelet.leftBound();
auto right = lanelet.rightBound();
std::tie(left, right) = lanelet::geometry::align(left, right);
lanelet.setLeftBound(left);
lanelet.setRightBound(right);
}

return map;
}

LaneletMapBin convertToMapBinMsg(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@
#ifndef TENSORRT_COMMON__TENSORRT_COMMON_HPP_
#define TENSORRT_COMMON__TENSORRT_COMMON_HPP_

#ifndef YOLOX_STANDALONE
#include <rclcpp/rclcpp.hpp>
#endif

#include <NvInfer.h>
#include <NvOnnxParser.h>
Expand Down Expand Up @@ -86,6 +88,7 @@ struct BuildConfig
profile_per_layer(profile_per_layer),
clip_value(clip_value)
{
#ifndef YOLOX_STANDALONE
if (
std::find(valid_calib_type.begin(), valid_calib_type.end(), calib_type_str) ==
valid_calib_type.end()) {
Expand All @@ -95,6 +98,7 @@ struct BuildConfig
<< "Default calibration type will be used: MinMax" << std::endl;
std::cerr << message.str();
}
#endif
}
};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ class BirdEyeViewController : public rviz_common::FramePositionTrackingViewContr

void onInitialize() override;

void handleMouseEvent(rviz_common::ViewportMouseEvent & evt) override;
void handleMouseEvent(rviz_common::ViewportMouseEvent & event) override;

void lookAt(const Ogre::Vector3 & point) override;

Expand All @@ -96,7 +96,7 @@ class BirdEyeViewController : public rviz_common::FramePositionTrackingViewContr
void orientCamera();

void setPosition(const Ogre::Vector3 & pos_rel_target);
void move_camera(float x, float y);
void move_camera(float dx, float dy);
void updateCamera();
Ogre::SceneNode * getCameraParent(Ogre::Camera * camera);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ class ThirdPersonViewController : public rviz_default_plugins::view_controllers:
public:
void onInitialize() override;

void handleMouseEvent(rviz_common::ViewportMouseEvent & evt) override;
void handleMouseEvent(rviz_common::ViewportMouseEvent & event) override;

void lookAt(const Ogre::Vector3 & point) override;

Expand Down
60 changes: 43 additions & 17 deletions control/autoware_autonomous_emergency_braking/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@ This module has following assumptions.

- The predicted path of the ego vehicle can be made from either the path created from sensors or the path created from a control module, or both.

- The current speed and angular velocity can be obtained from the sensors of the ego vehicle, and it uses point cloud as obstacles.
- The current speed and angular velocity can be obtained from the sensors of the ego vehicle, and it uses points as obstacles.

- The AEBs target obstacles are 2D points that can be obtained from the input point cloud or by obtaining the intersection points between the predicted ego footprint path and a predicted object's shape.

### IMU path generation: steering angle vs IMU's angular velocity

Expand Down Expand Up @@ -40,11 +42,13 @@ AEB has the following steps before it outputs the emergency stop signal.

2. Generate a predicted path of the ego vehicle.

3. Get target obstacles from the input point cloud.
3. Get target obstacles from the input point cloud and/or predicted object data.

4. Estimate the closest obstacle speed.

4. Collision check with target obstacles.
5. Collision check with target obstacles.

5. Send emergency stop signals to `/diagnostics`.
6. Send emergency stop signals to `/diagnostics`.

We give more details of each section below.

Expand All @@ -58,7 +62,7 @@ We do not activate AEB module if it satisfies the following conditions.

### 2. Generate a predicted path of the ego vehicle

AEB generates a predicted path based on current velocity and current angular velocity obtained from attached sensors. Note that if `use_imu_path` is `false`, it skips this step. This predicted path is generated as:
AEB generates a predicted footprint path based on current velocity and current angular velocity obtained from attached sensors. Note that if `use_imu_path` is `false`, it skips this step. This predicted path is generated as:

$$
x_{k+1} = x_k + v cos(\theta_k) dt \\
Expand All @@ -68,29 +72,47 @@ $$

where $v$ and $\omega$ are current longitudinal velocity and angular velocity respectively. $dt$ is time interval that users can define in advance.

### 3. Get target obstacles from the input point cloud
On the other hand, if `use_predicted_trajectory` is set to true, the AEB module will use the predicted path from the MPC as a base to generate a footprint path. Both the IMU footprint path and the MPC footprint path can be used at the same time.

### 3. Get target obstacles

After generating the ego footprint path(s), the target obstacles are identified. There are two methods to find target obstacles: using the input point cloud, or using the predicted object information coming from perception modules.

After generating the ego predicted path, we select target obstacles from the input point cloud. This obstacle filtering has three major steps, which are rough filtering, noise filtering with clustering and rigorous filtering.
#### Pointcloud obstacle filtering

#### Rough filtering
The AEB module can filter the input pointcloud to find target obstacles with which the ego vehicle might collide. This method can be enable if the `use_pointcloud_data` parameter is set to true. The pointcloud obstacle filtering has three major steps, which are rough filtering, noise filtering with clustering and rigorous filtering.

In rough filtering step, we select target obstacle with simple filter. Create a search area up to a certain distance (default is half of the ego vehicle width plus the `path_footprint_extra_margin` parameter) away from the predicted path of the ego vehicle and ignore the point cloud that are not within it. The image of the rough filtering is illustrated below.
##### Rough filtering

In rough filtering step, we select target obstacle with simple filter. Create a search area up to a certain distance (default is half of the ego vehicle width plus the `path_footprint_extra_margin` parameter) away from the predicted path of the ego vehicle and ignore the point cloud that are not within it. The rough filtering step is illustrated below.

![rough_filtering](./image/obstacle_filtering_1.drawio.svg)

#### Noise filtering with clustering and convex hulls
##### Noise filtering with clustering and convex hulls

To prevent the AEB from considering noisy points, euclidean clustering is performed on the filtered point cloud. The points in the point cloud that are not close enough to other points to form a cluster are discarded. The parameters `cluster_tolerance`, `minimum_cluster_size` and `maximum_cluster_size` can be used to tune the clustering and the size of objects to be ignored, for more information about the clustering method used by the AEB module, please check the official documentation on euclidean clustering of the PCL library: <https://pcl.readthedocs.io/projects/tutorials/en/master/cluster_extraction.html>.
To prevent the AEB from considering noisy points, euclidean clustering is performed on the filtered point cloud. The points in the point cloud that are not close enough to other points to form a cluster are discarded. Furthermore, each point in a cluster is compared against the `cluster_minimum_height` parameter, if no point inside a cluster has a height/z value greater than `cluster_minimum_height`, the whole cluster of points is discarded. The parameters `cluster_tolerance`, `minimum_cluster_size` and `maximum_cluster_size` can be used to tune the clustering and the size of objects to be ignored, for more information about the clustering method used by the AEB module, please check the official documentation on euclidean clustering of the PCL library: <https://pcl.readthedocs.io/projects/tutorials/en/master/cluster_extraction.html>.

Furthermore, a 2D convex hull is created around each detected cluster, the vertices of each hull represent the most extreme/outside points of the cluster. These vertices are then checked in the next step.

#### Rigorous filtering
##### Rigorous filtering

After Noise filtering, the module performs a geometric collision check to determine whether the filtered obstacles/hull vertices actually have possibility to collide with the ego vehicle. In this check, the ego vehicle is represented as a rectangle, and the point cloud obstacles are represented as points. Only the vertices with a possibility of collision are kept. Finally, the vertex that is closest to the ego vehicle is chosen as the candidate for collision checking: Since rss distance is used to judge if a collision will happen or not, if the closest vertex to the ego is deemed to be safe, the rest of the vertices (and the points in the clusters) will also be safe.
After Noise filtering, the module performs a geometric collision check to determine whether the filtered obstacles/hull vertices actually have possibility to collide with the ego vehicle. In this check, the ego vehicle is represented as a rectangle, and the point cloud obstacles are represented as points. Only the vertices with a possibility of collision are kept.

![rigorous_filtering](./image/obstacle_filtering_2.drawio.svg)

#### Obstacle velocity estimation
#### Using predicted objects to get target obstacles

If the `use_predicted_object_data` parameter is set to true, the AEB can use predicted object data coming from the perception modules, to get target obstacle points. This is done by obtaining the 2D intersection points between the ego's predicted footprint path and each of the predicted objects enveloping polygon or bounding box.

![predicted_object_and_path_intersection](./image/using-predicted-objects.drawio.svg)

### Finding the closest target obstacle

Once all target obstacles have been identified, the AEB module chooses the point that is closest to the ego vehicle as the candidate for collision checking. Only the closest point is considered because RSS distance is used to judge if a collision will happen or not, and if the closest vertex to the ego is deemed to be safe from collision, the rest of the target obstacles will also be safe.

![closest_object](./image/closest-point.drawio.svg)

### 4. Obstacle velocity estimation

Once the position of the closest obstacle/point is determined, the AEB modules uses the history of previously detected objects to estimate the closest object relative speed using the following equations:

Expand All @@ -110,7 +132,9 @@ Where $t_{1}$ and $t_{0}$ are the timestamps of the point clouds used to detect

![relative_speed](./image/object_relative_speed.drawio.svg)

Finally, the velocity vector is compared against the ego's predicted path to get the longitudinal velocity $v_{obj}$:
Note that, when the closest obstacle/point comes from using predicted object data, $v_{norm}$ is calculated by directly computing the norm of the predicted object's velocity in the x and y axes.

The velocity vector is then compared against the ego's predicted path to get the longitudinal velocity $v_{obj}$:

$$
v_{obj} = v_{norm} * Cos(yaw_{diff}) + v_{ego}
Expand All @@ -120,7 +144,9 @@ where $yaw_{diff}$ is the difference in yaw between the ego path and the displac

Note that, the object velocity is calculated against the ego's current movement direction. If the object moves in the opposite direction to the ego's movement, the object velocity is set to 0 m/s. That is because the RSS distance calculation assumes the ego and the object move in the same direction and it cannot deal with negative velocities.

### 4. Collision check with target obstacles using RSS distance
The resulting estimated object speed is added to a queue of speeds with timestamps. The AEB then checks for expiration of past speed estimations and eliminates expired speed measurements from the queue, the object expiration is determined by checking if the time elapsed since the speed was first added to the queue is larger than the parameter `previous_obstacle_keep_time`. Finally, the median speed of the queue is calculated. The median speed will be used to calculate the RSS distance used for collision checking.

### 5. Collision check with target obstacles using RSS distance

In the fourth step, it checks the collision with the closest obstacle point using RSS distance. RSS distance is formulated as:

Expand All @@ -132,7 +158,7 @@ where $v_{ego}$ and $v_{obj}$ is current ego and obstacle velocity, $a_{min}$ an

![rss_check](./image/rss_check.drawio.svg)

### 5. Send emergency stop signals to `/diagnostics`
### 6. Send emergency stop signals to `/diagnostics`

If AEB detects collision with point cloud obstacles in the previous step, it sends emergency signal to `/diagnostics` in this step. Note that in order to enable emergency stop, it has to send ERROR level emergency. Moreover, AEB user should modify the setting file to keep the emergency level, otherwise Autoware does not hold the emergency state.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@

# Point cloud clustering
cluster_tolerance: 0.1 #[m]
cluster_minimum_height: 0.0
minimum_cluster_size: 10
maximum_cluster_size: 10000

Expand Down
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit 51dc439

Please sign in to comment.