From ccad585a77762a5876480275291511434f0af1c0 Mon Sep 17 00:00:00 2001 From: Taekjin LEE Date: Thu, 31 Oct 2024 10:52:49 +0900 Subject: [PATCH] fix: predicted path generation logic (#1610) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(lane_change): cancel hysteresis (#6288) * feat(lane_change): cancel hysteresis Signed-off-by: Muhammad Zulfaqar Azmi * Update documentation Signed-off-by: Muhammad Zulfaqar Azmi * fix the explanation of the hysteresis count Signed-off-by: Muhammad Zulfaqar Azmi --------- Signed-off-by: Muhammad Zulfaqar Azmi * Add parked parked RSS Signed-off-by: Zulfaqar Azmi * support new perception_reproducer Signed-off-by: temkei.kem <1041084556@qq.com> * move files Signed-off-by: temkei.kem <1041084556@qq.com> * remove old files. Signed-off-by: temkei.kem <1041084556@qq.com> * fix pre-commit err Signed-off-by: temkei.kem <1041084556@qq.com> * style(pre-commit): autofix * feat(autoware_behavior_path_planner_common): disable feature of turning off blinker at low velocity (#1571) Refactor turn signal decider logic and add support for detecting turn signals in turn lanes Signed-off-by: Kyoichi Sugahara * fix a small bug about perception reproducer * style(pre-commit): autofix * feat(out_of_lane): ignore lanelets beyond the last path point (#1554) * feat(out_of_lane): ignore lanelets beyond the last path point Signed-off-by: Maxime CLEMENT * style(pre-commit): autofix --------- Signed-off-by: Maxime CLEMENT Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> * feat(behavior_velocity_run_out_module): exclude obstacle crossing ego… (#1574) feat(behavior_velocity_run_out_module): exclude obstacle crossing ego back line (#6680) * add method to ignore target obstacles that cross ego cut lane * WIP add debug support * add params and finish debug marker * change lane to line * use autoware utils to get the cut line * simplify code wit calcOffsetPose * Update readme and eliminate unused code * update readme * eliminate unused function * readme * comments and readme * eliminate unused include * typo * rename param for consistency * change lane to line for consistency * rename for clarity, add brief * fix indentation * update description * lane ->line * lane -> line --------- Signed-off-by: Daniel Sanchez * fix(lane_change): change stopping logic (RT0-33761) (#1581) * RT0-33761 fix lane change stopping logic Signed-off-by: Zulfaqar Azmi * copied from awf main tested implementation Signed-off-by: Zulfaqar Azmi * doxygen comment Signed-off-by: Zulfaqar Azmi * Update planning/behavior_path_lane_change_module/src/utils/utils.cpp Co-authored-by: mkquda <168697710+mkquda@users.noreply.github.com> --------- Signed-off-by: Zulfaqar Azmi Co-authored-by: mkquda <168697710+mkquda@users.noreply.github.com> * perf: PR 7237 https://github.com/autowarefoundation/autoware.universe/pull/7237 * perf RP8406 https://github.com/autowarefoundation/autoware.universe/pull/8406 * perf PR 8416 * perf PR 8427 * perf PR 8413 * tool PR 8456 * perf PR 8461 * perf PR 8388 * perf PR 8467 * perf PR 8471 * perf PR 8490 * perf PR 8751 * chore: fix format * perf PR 8657 * feat: improve lanelet search logic in getPredictedReferencePath() * sp develop remove non approved change (#1611) Revert "feat: improve lanelet search logic in getPredictedReferencePath()" This reverts commit 5de95b01c51cabadc42cf11f83a534ff7cf4f11e. * feat PR 8811 * fix PR 8973 * feat: improve lanelet search logic in getPredictedReferencePath() --------- Signed-off-by: Muhammad Zulfaqar Azmi Signed-off-by: Zulfaqar Azmi Signed-off-by: temkei.kem <1041084556@qq.com> Signed-off-by: Kyoichi Sugahara Signed-off-by: Maxime CLEMENT Signed-off-by: Daniel Sanchez Co-authored-by: Zulfaqar Azmi <93502286+zulfaqar-azmi-t4@users.noreply.github.com> Co-authored-by: Zulfaqar Azmi Co-authored-by: temkei.kem <1041084556@qq.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Kyoichi Sugahara Co-authored-by: Takayuki Murooka Co-authored-by: Maxime CLEMENT <78338830+maxime-clem@users.noreply.github.com> Co-authored-by: danielsanchezaran Co-authored-by: mkquda <168697710+mkquda@users.noreply.github.com> Co-authored-by: Shohei Sakai --- .../tier4_autoware_utils/system/lru_cache.hpp | 142 +++++++ .../test/src/system/test_lru_cache.cpp | 78 ++++ .../map_based_prediction_node.hpp | 39 +- .../map_based_prediction/path_generator.hpp | 31 +- .../src/map_based_prediction_node.cpp | 358 +++++++++++++----- .../src/path_generator.cpp | 306 +++++++++++---- .../multi_object_tracker_core.hpp | 8 +- perception/multi_object_tracker/package.xml | 1 - .../src/multi_object_tracker_core.cpp | 44 ++- .../README.md | 24 ++ .../config/lane_change.param.yaml | 1 + .../scene.hpp | 7 +- .../utils/base_class.hpp | 4 + .../utils/data_structs.hpp | 6 + .../utils/utils.hpp | 49 +++ .../src/interface.cpp | 3 +- .../src/manager.cpp | 19 + .../src/scene.cpp | 243 ++++++------ .../src/utils/utils.cpp | 86 +++++ .../src/turn_signal_decider.cpp | 25 +- .../src/lanelets_selection.cpp | 10 + .../README.md | 8 + .../config/run_out.param.yaml | 2 + .../docs/ego_cut_line.svg | 129 +++++++ .../src/debug.cpp | 19 + .../src/debug.hpp | 2 + .../src/manager.cpp | 2 + .../src/scene.cpp | 38 +- .../src/scene.hpp | 11 + .../src/utils.cpp | 22 ++ .../src/utils.hpp | 8 + .../perception_replayer_common.py | 49 ++- .../perception_reproducer.py | 261 ++++++++++--- .../scripts/perception_replayer/utils.py | 5 + 34 files changed, 1649 insertions(+), 391 deletions(-) create mode 100644 common/tier4_autoware_utils/include/tier4_autoware_utils/system/lru_cache.hpp create mode 100644 common/tier4_autoware_utils/test/src/system/test_lru_cache.cpp create mode 100644 planning/behavior_velocity_run_out_module/docs/ego_cut_line.svg diff --git a/common/tier4_autoware_utils/include/tier4_autoware_utils/system/lru_cache.hpp b/common/tier4_autoware_utils/include/tier4_autoware_utils/system/lru_cache.hpp new file mode 100644 index 0000000000000..3734f8ecd84ed --- /dev/null +++ b/common/tier4_autoware_utils/include/tier4_autoware_utils/system/lru_cache.hpp @@ -0,0 +1,142 @@ +// Copyright 2024 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#ifndef TIER4_AUTOWARE_UTILS__SYSTEM__LRU_CACHE_HPP_ +#define TIER4_AUTOWARE_UTILS__SYSTEM__LRU_CACHE_HPP_ + +#include +#include +#include +#include +#include + +namespace tier4_autoware_utils +{ + +/** + * @brief A template class for LRU (Least Recently Used) Cache. + * + * This class implements a simple LRU cache using a combination of a list and a hash map. + * + * @tparam Key The type of keys. + * @tparam Value The type of values. + * @tparam Map The type of underlying map, defaulted to std::unordered_map. + */ +template class Map = std::unordered_map> +class LRUCache +{ +private: + size_t capacity_; ///< The maximum capacity of the cache. + std::list> cache_list_; ///< List to maintain the order of elements. + Map>::iterator> + cache_map_; ///< Map for fast access to elements. + +public: + /** + * @brief Construct a new LRUCache object. + * + * @param size The capacity of the cache. + */ + explicit LRUCache(size_t size) : capacity_(size) {} + + /** + * @brief Get the capacity of the cache. + * + * @return The capacity of the cache. + */ + [[nodiscard]] size_t capacity() const { return capacity_; } + + /** + * @brief Insert a key-value pair into the cache. + * + * If the key already exists, its value is updated and it is moved to the front. + * If the cache exceeds its capacity, the least recently used element is removed. + * + * @param key The key to insert. + * @param value The value to insert. + */ + void put(const Key & key, const Value & value) + { + auto it = cache_map_.find(key); + if (it != cache_map_.end()) { + cache_list_.erase(it->second); + } + cache_list_.push_front({key, value}); + cache_map_[key] = cache_list_.begin(); + + if (cache_map_.size() > capacity_) { + auto last = cache_list_.back(); + cache_map_.erase(last.first); + cache_list_.pop_back(); + } + } + + /** + * @brief Retrieve a value from the cache. + * + * If the key does not exist in the cache, std::nullopt is returned. + * If the key exists, the value is returned and the element is moved to the front. + * + * @param key The key to retrieve. + * @return The value associated with the key, or std::nullopt if the key does not exist. + */ + std::optional get(const Key & key) + { + auto it = cache_map_.find(key); + if (it == cache_map_.end()) { + return std::nullopt; + } + cache_list_.splice(cache_list_.begin(), cache_list_, it->second); + return it->second->second; + } + + /** + * @brief Clear the cache. + * + * This removes all elements from the cache. + */ + void clear() + { + cache_list_.clear(); + cache_map_.clear(); + } + + /** + * @brief Get the current size of the cache. + * + * @return The number of elements in the cache. + */ + [[nodiscard]] size_t size() const { return cache_map_.size(); } + + /** + * @brief Check if the cache is empty. + * + * @return True if the cache is empty, false otherwise. + */ + [[nodiscard]] bool empty() const { return cache_map_.empty(); } + + /** + * @brief Check if a key exists in the cache. + * + * @param key The key to check. + * @return True if the key exists, false otherwise. + */ + [[nodiscard]] bool contains(const Key & key) const + { + return cache_map_.find(key) != cache_map_.end(); + } +}; + +} // namespace tier4_autoware_utils + +#endif // TIER4_AUTOWARE_UTILS__SYSTEM__LRU_CACHE_HPP_ diff --git a/common/tier4_autoware_utils/test/src/system/test_lru_cache.cpp b/common/tier4_autoware_utils/test/src/system/test_lru_cache.cpp new file mode 100644 index 0000000000000..f1cd2414f50fe --- /dev/null +++ b/common/tier4_autoware_utils/test/src/system/test_lru_cache.cpp @@ -0,0 +1,78 @@ +// Copyright 2024 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#include "tier4_autoware_utils/system/lru_cache.hpp" + +#include + +#include +#include +#include +#include + +using tier4_autoware_utils::LRUCache; + +// Fibonacci calculation with LRU cache +int64_t fibonacci_with_cache(int n, LRUCache * cache) +{ + if (n <= 1) return n; + + if (cache->contains(n)) { + return *cache->get(n); + } + int64_t result = fibonacci_with_cache(n - 1, cache) + fibonacci_with_cache(n - 2, cache); + cache->put(n, result); + return result; +} + +// Fibonacci calculation without cache +int64_t fibonacci_no_cache(int n) +{ + if (n <= 1) return n; + return fibonacci_no_cache(n - 1) + fibonacci_no_cache(n - 2); +} + +// Helper function to measure execution time +template +std::pair()(std::declval()...))> measure_time( + Func func, Args &&... args) +{ + auto start = std::chrono::high_resolution_clock::now(); + auto result = func(std::forward(args)...); + auto end = std::chrono::high_resolution_clock::now(); + return {std::chrono::duration_cast(end - start).count(), result}; +} + +// Test case to verify Fibonacci calculation results with and without cache +TEST(FibonacciLRUCacheTest, CompareResultsAndPerformance) +{ + const int max_n = 40; // Test range + LRUCache cache(max_n); // Cache with capacity set to max_n + + for (int i = 0; i <= max_n; ++i) { + // Measure time for performance comparison + auto [time_with_cache, result_with_cache] = + measure_time([i, &cache]() { return fibonacci_with_cache(i, &cache); }); + auto [time_without_cache, result_without_cache] = + measure_time([i]() { return fibonacci_no_cache(i); }); + + EXPECT_EQ(result_with_cache, result_without_cache) << "Mismatch at n = " << i; + + // Print the calculation time + std::cout << "n = " << i << ": With Cache = " << time_with_cache + << " μs, Without Cache = " << time_without_cache << " μs\n"; + + // // Clear the cache after each iteration to ensure fair comparison + cache.clear(); + } +} diff --git a/perception/map_based_prediction/include/map_based_prediction/map_based_prediction_node.hpp b/perception/map_based_prediction/include/map_based_prediction/map_based_prediction_node.hpp index bdd9ad89bc025..e1005720c0423 100644 --- a/perception/map_based_prediction/include/map_based_prediction/map_based_prediction_node.hpp +++ b/perception/map_based_prediction/include/map_based_prediction/map_based_prediction_node.hpp @@ -19,6 +19,7 @@ #include #include +#include #include #include @@ -31,7 +32,9 @@ #include #include +#include #include +#include #include #include @@ -41,6 +44,28 @@ #include #include +namespace std +{ +template <> +struct hash +{ + // 0x9e3779b9 is a magic number. See + // https://stackoverflow.com/questions/4948780/magic-number-in-boosthash-combine + size_t operator()(const lanelet::routing::LaneletPaths & paths) const + { + size_t seed = 0; + for (const auto & path : paths) { + for (const auto & lanelet : path) { + seed ^= hash{}(lanelet.id()) + 0x9e3779b9 + (seed << 6U) + (seed >> 2U); + } + // Add a separator between paths + seed ^= hash{}(0) + 0x9e3779b9 + (seed << 6U) + (seed >> 2U); + } + return seed; + } +}; +} // namespace std + namespace map_based_prediction { struct LateralKinematicsToLanelet @@ -84,6 +109,7 @@ struct LaneletData struct PredictedRefPath { float probability; + double width; PosePath path; Maneuver maneuver; }; @@ -132,6 +158,9 @@ class MapBasedPredictionNode : public rclcpp::Node // Crosswalk Entry Points lanelet::ConstLanelets crosswalks_; + // Fences + lanelet::LaneletMapUPtr fence_layer_{nullptr}; + // Parameters bool enable_delay_compensation_; double prediction_time_horizon_; @@ -194,7 +223,8 @@ class MapBasedPredictionNode : public rclcpp::Node void updateObjectsHistory( const std_msgs::msg::Header & header, const TrackedObject & object, const LaneletsData & current_lanelets_data); - + std::optional searchProperStartingRefPathIndex( + const TrackedObject & object, const PosePath & pose_path) const; std::vector getPredictedReferencePath( const TrackedObject & object, const LaneletsData & current_lanelets_data, const double object_detected_time); @@ -217,7 +247,12 @@ class MapBasedPredictionNode : public rclcpp::Node const TrackedObject & object, const lanelet::routing::LaneletPaths & candidate_paths, const float path_probability, const ManeuverProbability & maneuver_probability, const Maneuver & maneuver, std::vector & reference_paths); - std::vector convertPathType(const lanelet::routing::LaneletPaths & paths); + + mutable tier4_autoware_utils::LRUCache< + lanelet::routing::LaneletPaths, std::vector>> + lru_cache_of_convert_path_type_{1000}; + std::vector> convertPathType( + const lanelet::routing::LaneletPaths & paths) const; void updateFuturePossibleLanelets( const TrackedObject & object, const lanelet::routing::LaneletPaths & paths); diff --git a/perception/map_based_prediction/include/map_based_prediction/path_generator.hpp b/perception/map_based_prediction/include/map_based_prediction/path_generator.hpp index 4da4f62be2ede..7ac9c6311d842 100644 --- a/perception/map_based_prediction/include/map_based_prediction/path_generator.hpp +++ b/perception/map_based_prediction/include/map_based_prediction/path_generator.hpp @@ -22,7 +22,11 @@ #include #include #include +#include #include +#include + +#include #include #include @@ -91,7 +95,7 @@ class PathGenerator PredictedPath generatePathForOffLaneVehicle(const TrackedObject & object); PredictedPath generatePathForOnLaneVehicle( - const TrackedObject & object, const PosePath & ref_paths); + const TrackedObject & object, const PosePath & ref_path, const double path_width = 0.0); PredictedPath generatePathForCrosswalkUser( const TrackedObject & object, const CrosswalkEdgePoints & reachable_crosswalk) const; @@ -109,23 +113,34 @@ class PathGenerator // Member functions PredictedPath generateStraightPath(const TrackedObject & object) const; - PredictedPath generatePolynomialPath(const TrackedObject & object, const PosePath & ref_path); + PredictedPath generatePolynomialPath( + const TrackedObject & object, const PosePath & ref_path, const double path_width, + const double backlash_width) const; FrenetPath generateFrenetPath( - const FrenetPoint & current_point, const FrenetPoint & target_point, const double max_length); + const FrenetPoint & current_point, const FrenetPoint & target_point, + const double max_length) const; Eigen::Vector3d calcLatCoefficients( - const FrenetPoint & current_point, const FrenetPoint & target_point, const double T); + const FrenetPoint & current_point, const FrenetPoint & target_point, const double T) const; Eigen::Vector2d calcLonCoefficients( - const FrenetPoint & current_point, const FrenetPoint & target_point, const double T); + const FrenetPoint & current_point, const FrenetPoint & target_point, const double T) const; + + std::vector interpolationLerp( + const std::vector & base_keys, const std::vector & base_values, + const std::vector & query_keys) const; + std::vector interpolationLerp( + const std::vector & base_keys, const std::vector & base_values, + const std::vector & query_keys) const; PosePath interpolateReferencePath( - const PosePath & base_path, const FrenetPath & frenet_predicted_path); + const PosePath & base_path, const FrenetPath & frenet_predicted_path) const; PredictedPath convertToPredictedPath( const TrackedObject & object, const FrenetPath & frenet_predicted_path, - const PosePath & ref_path); + const PosePath & ref_path) const; - FrenetPoint getFrenetPoint(const TrackedObject & object, const PosePath & ref_path); + FrenetPoint getFrenetPoint( + const TrackedObject & object, const geometry_msgs::msg::Pose & ref_pose) const; }; } // namespace map_based_prediction diff --git a/perception/map_based_prediction/src/map_based_prediction_node.cpp b/perception/map_based_prediction/src/map_based_prediction_node.cpp index 85535df4babfe..7100dc2070389 100644 --- a/perception/map_based_prediction/src/map_based_prediction_node.cpp +++ b/perception/map_based_prediction/src/map_based_prediction_node.cpp @@ -14,6 +14,7 @@ #include "map_based_prediction/map_based_prediction_node.hpp" +#include #include #include #include @@ -33,6 +34,7 @@ #include #include +#include #include #include #include @@ -359,46 +361,14 @@ CrosswalkEdgePoints getCrosswalkEdgePoints(const lanelet::ConstLanelet & crosswa back_center_point, r_p_back, l_p_back}; } -bool withinLanelet( - const TrackedObject & object, const lanelet::ConstLanelet & lanelet, - const bool use_yaw_information = false, const float yaw_threshold = 0.6) -{ - using Point = boost::geometry::model::d2::point_xy; - - const auto & obj_pos = object.kinematics.pose_with_covariance.pose.position; - const Point p_object{obj_pos.x, obj_pos.y}; - - auto polygon = lanelet.polygon2d().basicPolygon(); - boost::geometry::correct(polygon); - bool with_in_polygon = boost::geometry::within(p_object, polygon); - - if (!use_yaw_information) return with_in_polygon; - - // use yaw angle to compare - const double abs_yaw_diff = calcAbsYawDiffBetweenLaneletAndObject(object, lanelet); - if (abs_yaw_diff < yaw_threshold) return with_in_polygon; - - return false; -} - bool withinRoadLanelet( - const TrackedObject & object, const lanelet::LaneletMapPtr & lanelet_map_ptr, + const TrackedObject & object, + const std::vector> & surrounding_lanelets_with_dist, const bool use_yaw_information = false) { - using Point = boost::geometry::model::d2::point_xy; - - const auto & obj_pos = object.kinematics.pose_with_covariance.pose.position; - const Point p_object{obj_pos.x, obj_pos.y}; - - lanelet::BasicPoint2d search_point(obj_pos.x, obj_pos.y); - // nearest lanelet - constexpr double search_radius = 10.0; // [m] - const auto surrounding_lanelets = - lanelet::geometry::findNearest(lanelet_map_ptr->laneletLayer, search_point, search_radius); - - for (const auto & lanelet : surrounding_lanelets) { - if (lanelet.second.hasAttribute(lanelet::AttributeName::Subtype)) { - lanelet::Attribute attr = lanelet.second.attribute(lanelet::AttributeName::Subtype); + for (const auto & [dist, lanelet] : surrounding_lanelets_with_dist) { + if (lanelet.hasAttribute(lanelet::AttributeName::Subtype)) { + lanelet::Attribute attr = lanelet.attribute(lanelet::AttributeName::Subtype); if ( attr.value() == lanelet::AttributeValueString::Crosswalk || attr.value() == lanelet::AttributeValueString::Walkway) { @@ -406,7 +376,13 @@ bool withinRoadLanelet( } } - if (withinLanelet(object, lanelet.second, use_yaw_information)) { + constexpr float yaw_threshold = 0.6; + bool within_lanelet = std::abs(dist) < 1e-5; + if (use_yaw_information) { + within_lanelet = + within_lanelet && calcAbsYawDiffBetweenLaneletAndObject(object, lanelet) < yaw_threshold; + } + if (within_lanelet) { return true; } } @@ -414,9 +390,23 @@ bool withinRoadLanelet( return false; } +bool withinRoadLanelet( + const TrackedObject & object, const lanelet::LaneletMapPtr & lanelet_map_ptr, + const bool use_yaw_information = false) +{ + const auto & obj_pos = object.kinematics.pose_with_covariance.pose.position; + lanelet::BasicPoint2d search_point(obj_pos.x, obj_pos.y); + // nearest lanelet + constexpr double search_radius = 10.0; // [m] + const auto surrounding_lanelets_with_dist = + lanelet::geometry::findWithin2d(lanelet_map_ptr->laneletLayer, search_point, search_radius); + + return withinRoadLanelet(object, surrounding_lanelets_with_dist, use_yaw_information); +} + boost::optional isReachableCrosswalkEdgePoints( - const TrackedObject & object, const lanelet::ConstLanelet & target_crosswalk, - const CrosswalkEdgePoints & edge_points, const lanelet::LaneletMapPtr & lanelet_map_ptr, + const TrackedObject & object, const lanelet::ConstLanelets & surrounding_lanelets, + const lanelet::ConstLanelets & surrounding_crosswalks, const CrosswalkEdgePoints & edge_points, const double time_horizon, const double min_object_vel) { using Point = boost::geometry::model::d2::point_xy; @@ -425,11 +415,6 @@ boost::optional isReachableCrosswalkEdgePoints( const auto & obj_vel = object.kinematics.twist_with_covariance.twist.linear; const auto yaw = tier4_autoware_utils::getRPY(object.kinematics.pose_with_covariance.pose).z; - lanelet::BasicPoint2d obj_pos_as_lanelet(obj_pos.x, obj_pos.y); - if (boost::geometry::within(obj_pos_as_lanelet, target_crosswalk.polygon2d().basicPolygon())) { - return {}; - } - const auto & p1 = edge_points.front_center_point; const auto & p2 = edge_points.back_center_point; @@ -446,17 +431,12 @@ boost::optional isReachableCrosswalkEdgePoints( const auto estimated_velocity = std::hypot(obj_vel.x, obj_vel.y); const auto is_stop_object = estimated_velocity < stop_velocity_th; const auto velocity = std::max(min_object_vel, estimated_velocity); - const auto surrounding_lanelets = lanelet::geometry::findNearest( - lanelet_map_ptr->laneletLayer, obj_pos_as_lanelet, time_horizon * velocity); - const auto isAcrossAnyRoad = [&surrounding_lanelets](const Point & p_src, const Point & p_dst) { - const auto withinAnyCrosswalk = [&surrounding_lanelets](const Point & p) { - for (const auto & lanelet : surrounding_lanelets) { - const lanelet::Attribute attr = lanelet.second.attribute(lanelet::AttributeName::Subtype); - if ( - (attr.value() == lanelet::AttributeValueString::Crosswalk || - attr.value() == lanelet::AttributeValueString::Walkway) && - boost::geometry::within(p, lanelet.second.polygon2d().basicPolygon())) { + const auto isAcrossAnyRoad = [&surrounding_lanelets, &surrounding_crosswalks]( + const Point & p_src, const Point & p_dst) { + const auto withinAnyCrosswalk = [&surrounding_crosswalks](const Point & p) { + for (const auto & crosswalk : surrounding_crosswalks) { + if (boost::geometry::within(p, crosswalk.polygon2d().basicPolygon())) { return true; } } @@ -475,14 +455,13 @@ boost::optional isReachableCrosswalkEdgePoints( std::vector points_of_intersect; const boost::geometry::model::linestring line{p_src, p_dst}; for (const auto & lanelet : surrounding_lanelets) { - const lanelet::Attribute attr = lanelet.second.attribute(lanelet::AttributeName::Subtype); + const lanelet::Attribute attr = lanelet.attribute(lanelet::AttributeName::Subtype); if (attr.value() != lanelet::AttributeValueString::Road) { continue; } std::vector tmp_intersects; - boost::geometry::intersection( - line, lanelet.second.polygon2d().basicPolygon(), tmp_intersects); + boost::geometry::intersection(line, lanelet.polygon2d().basicPolygon(), tmp_intersects); for (const auto & p : tmp_intersects) { if (isExist(p, points_of_intersect) || withinAnyCrosswalk(p)) { continue; @@ -828,6 +807,7 @@ void MapBasedPredictionNode::mapCallback(const HADMapBin::ConstSharedPtr msg) lanelet_map_ptr_ = std::make_shared(); lanelet::utils::conversion::fromBinMsg( *msg, lanelet_map_ptr_, &traffic_rules_ptr_, &routing_graph_ptr_); + lru_cache_of_convert_path_type_.clear(); // clear cache RCLCPP_INFO(get_logger(), "[Map Based Prediction]: Map is loaded"); const auto all_lanelets = lanelet::utils::query::laneletLayer(lanelet_map_ptr_); @@ -835,6 +815,16 @@ void MapBasedPredictionNode::mapCallback(const HADMapBin::ConstSharedPtr msg) const auto walkways = lanelet::utils::query::walkwayLanelets(all_lanelets); crosswalks_.insert(crosswalks_.end(), crosswalks.begin(), crosswalks.end()); crosswalks_.insert(crosswalks_.end(), walkways.begin(), walkways.end()); + + lanelet::LineStrings3d fences; + for (const auto & linestring : lanelet_map_ptr_->lineStringLayer) { + if (const std::string type = linestring.attributeOr(lanelet::AttributeName::Type, "none"); + type == "fence") { + fences.push_back(lanelet::LineString3d( + std::const_pointer_cast(linestring.constData()))); + } + } + fence_layer_ = lanelet::utils::createMap(fences); } void MapBasedPredictionNode::objectsCallback(const TrackedObjects::ConstSharedPtr in_objects) @@ -845,23 +835,6 @@ void MapBasedPredictionNode::objectsCallback(const TrackedObjects::ConstSharedPt return; } - auto world2map_transform = transform_listener_.getTransform( - "map", // target - in_objects->header.frame_id, // src - in_objects->header.stamp, rclcpp::Duration::from_seconds(0.1)); - auto map2world_transform = transform_listener_.getTransform( - in_objects->header.frame_id, // target - "map", // src - in_objects->header.stamp, rclcpp::Duration::from_seconds(0.1)); - auto debug_map2lidar_transform = transform_listener_.getTransform( - "base_link", // target - "map", // src - rclcpp::Time(), rclcpp::Duration::from_seconds(0.1)); - - if (!world2map_transform || !map2world_transform || !debug_map2lidar_transform) { - return; - } - // Remove old objects information in object history const double objects_detected_time = rclcpp::Time(in_objects->header.stamp).seconds(); removeOldObjectsHistory(objects_detected_time); @@ -874,12 +847,24 @@ void MapBasedPredictionNode::objectsCallback(const TrackedObjects::ConstSharedPt // result debug visualization_msgs::msg::MarkerArray debug_markers; + // get world to map transform + geometry_msgs::msg::TransformStamped::ConstSharedPtr world2map_transform; + + bool is_object_not_in_map_frame = in_objects->header.frame_id != "map"; + if (is_object_not_in_map_frame) { + world2map_transform = transform_listener_.getTransform( + "map", // target + in_objects->header.frame_id, // src + in_objects->header.stamp, rclcpp::Duration::from_seconds(0.1)); + if (!world2map_transform) return; + } + for (const auto & object : in_objects->objects) { std::string object_id = tier4_autoware_utils::toHexString(object.object_id); TrackedObject transformed_object = object; // transform object frame if it's based on map frame - if (in_objects->header.frame_id != "map") { + if (is_object_not_in_map_frame) { geometry_msgs::msg::PoseStamped pose_in_map; geometry_msgs::msg::PoseStamped pose_orig; pose_orig.pose = object.kinematics.pose_with_covariance.pose; @@ -950,7 +935,7 @@ void MapBasedPredictionNode::objectsCallback(const TrackedObjects::ConstSharedPt // If predicted reference path is empty, assume this object is out of the lane if (ref_paths.empty()) { PredictedPath predicted_path = - path_generator_->generatePathForLowSpeedVehicle(transformed_object); + path_generator_->generatePathForOffLaneVehicle(transformed_object); predicted_path.confidence = 1.0; if (predicted_path.path.empty()) break; @@ -982,7 +967,7 @@ void MapBasedPredictionNode::objectsCallback(const TrackedObjects::ConstSharedPt std::vector predicted_paths; for (const auto & ref_path : ref_paths) { PredictedPath predicted_path = path_generator_->generatePathForOnLaneVehicle( - yaw_fixed_transformed_object, ref_path.path); + yaw_fixed_transformed_object, ref_path.path, ref_path.width); if (predicted_path.path.empty()) { continue; } @@ -1031,10 +1016,13 @@ void MapBasedPredictionNode::objectsCallback(const TrackedObjects::ConstSharedPt bool MapBasedPredictionNode::doesPathCrossAnyFence(const PredictedPath & predicted_path) { - const lanelet::ConstLineStrings3d & all_fences = - lanelet::utils::query::getAllFences(lanelet_map_ptr_); - for (const auto & fence_line : all_fences) { - if (doesPathCrossFence(predicted_path, fence_line)) { + lanelet::BasicLineString2d predicted_path_ls; + for (const auto & p : predicted_path.path) + predicted_path_ls.emplace_back(p.position.x, p.position.y); + const auto candidates = + fence_layer_->lineStringLayer.search(lanelet::geometry::boundingBox2d(predicted_path_ls)); + for (const auto & candidate : candidates) { + if (doesPathCrossFence(predicted_path, candidate)) { return true; } } @@ -1081,10 +1069,26 @@ PredictedObject MapBasedPredictionNode::getPredictedObjectAsCrosswalkUser( } boost::optional crossing_crosswalk{boost::none}; - for (const auto & crosswalk : crosswalks_) { - if (withinLanelet(object, crosswalk)) { - crossing_crosswalk = crosswalk; - break; + const auto & obj_pos = object.kinematics.pose_with_covariance.pose.position; + const auto & obj_vel = object.kinematics.twist_with_covariance.twist.linear; + const auto estimated_velocity = std::hypot(obj_vel.x, obj_vel.y); + const auto velocity = std::max(min_crosswalk_user_velocity_, estimated_velocity); + const auto surrounding_lanelets_with_dist = lanelet::geometry::findWithin2d( + lanelet_map_ptr_->laneletLayer, lanelet::BasicPoint2d{obj_pos.x, obj_pos.y}, + prediction_time_horizon_ * velocity); + lanelet::ConstLanelets surrounding_lanelets; + lanelet::ConstLanelets surrounding_crosswalks; + for (const auto & [dist, lanelet] : surrounding_lanelets_with_dist) { + surrounding_lanelets.push_back(lanelet); + const auto attr = lanelet.attribute(lanelet::AttributeName::Subtype); + if ( + attr.value() == lanelet::AttributeValueString::Crosswalk || + attr.value() == lanelet::AttributeValueString::Walkway) { + const auto & crosswalk = lanelet; + surrounding_crosswalks.push_back(crosswalk); + if (std::abs(dist) < 1e-5) { + crossing_crosswalk = crosswalk; + } } } @@ -1114,7 +1118,7 @@ PredictedObject MapBasedPredictionNode::getPredictedObjectAsCrosswalkUser( // If the object is not crossing the crosswalk, in the road lanelets, try to find the closest // crosswalk and generate path to the crosswalk edge - } else if (withinRoadLanelet(object, lanelet_map_ptr_)) { + } else if (withinRoadLanelet(object, surrounding_lanelets_with_dist)) { lanelet::ConstLanelet closest_crosswalk{}; const auto & obj_pose = object.kinematics.pose_with_covariance.pose; const auto found_closest_crosswalk = @@ -1144,8 +1148,14 @@ PredictedObject MapBasedPredictionNode::getPredictedObjectAsCrosswalkUser( } } } - // try to find the edge points for all crosswalks and generate path to the crosswalk edge - for (const auto & crosswalk : crosswalks_) { + + // try to find the edge points for other surrounding crosswalks and generate path to the crosswalk + // edge + for (const auto & crosswalk : surrounding_crosswalks) { + if (crossing_crosswalk && crossing_crosswalk.get() == crosswalk) { + continue; + } + const auto edge_points = getCrosswalkEdgePoints(crosswalk); const auto reachable_first = hasPotentialToReach( @@ -1162,7 +1172,7 @@ PredictedObject MapBasedPredictionNode::getPredictedObjectAsCrosswalkUser( } const auto reachable_crosswalk = isReachableCrosswalkEdgePoints( - object, crosswalk, edge_points, lanelet_map_ptr_, prediction_time_horizon_, + object, surrounding_lanelets, surrounding_crosswalks, edge_points, prediction_time_horizon_, min_crosswalk_user_velocity_); if (!reachable_crosswalk) { @@ -1485,6 +1495,90 @@ void MapBasedPredictionNode::updateObjectsHistory( } } +std::optional MapBasedPredictionNode::searchProperStartingRefPathIndex( + const TrackedObject & object, const PosePath & pose_path) const +{ + bool is_position_found = false; + std::optional opt_index{std::nullopt}; + auto & index = opt_index.emplace(); + + // starting segment index is a segment close enough to the object + const auto obj_point = object.kinematics.pose_with_covariance.pose.position; + { + double min_dist_sq = std::numeric_limits::max(); + constexpr double acceptable_dist_sq = 1.0; // [m2] + for (size_t i = 0; i < pose_path.size(); i++) { + const double dx = pose_path.at(i).position.x - obj_point.x; + const double dy = pose_path.at(i).position.y - obj_point.y; + const double dist_sq = dx * dx + dy * dy; + if (dist_sq < min_dist_sq) { + min_dist_sq = dist_sq; + index = i; + } + if (dist_sq < acceptable_dist_sq) { + break; + } + } + } + + // calculate score that object can reach the target path smoothly, and search the + // starting segment index that have the best score + size_t idx = 0; + { // find target segmentation index + constexpr double search_distance = 22.0; // [m] + constexpr double yaw_diff_limit = M_PI / 3.0; // 60 degrees + + const double obj_yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); + const size_t search_segment_count = + static_cast(std::floor(search_distance / reference_path_resolution_)); + const size_t search_segment_num = + std::min(search_segment_count, static_cast(pose_path.size() - index)); + + // search for the best score, which is the smallest + double best_score = 1e9; // initial value is large enough + for (size_t i = 0; i < search_segment_num; ++i) { + const auto & path_pose = pose_path.at(index + i); + // yaw difference + const double path_yaw = tf2::getYaw(path_pose.orientation); + const double relative_path_yaw = autoware_utils::normalize_radian(path_yaw - obj_yaw); + if (std::abs(relative_path_yaw) > yaw_diff_limit) { + continue; + } + + const double dx = path_pose.position.x - obj_point.x; + const double dy = path_pose.position.y - obj_point.y; + const double dx_cp = std::cos(obj_yaw) * dx + std::sin(obj_yaw) * dy; + const double dy_cp = -std::sin(obj_yaw) * dx + std::cos(obj_yaw) * dy; + const double neutral_yaw = std::atan2(dy_cp, dx_cp) * 2.0; + const double delta_yaw = autoware_utils::normalize_radian(path_yaw - obj_yaw - neutral_yaw); + if (std::abs(delta_yaw) > yaw_diff_limit) { + continue; + } + + // objective function score + constexpr double weight_ratio = 0.01; + double score = delta_yaw * delta_yaw + weight_ratio * neutral_yaw * neutral_yaw; + constexpr double acceptable_score = 1e-3; + + if (score < best_score) { + best_score = score; + idx = i; + is_position_found = true; + if (score < acceptable_score) { + // if the score is small enough, we can break the loop + break; + } + } + } + } + + // update starting segment index + index += idx; + index = std::clamp(index, 0ul, pose_path.size() - 1); + + return is_position_found ? opt_index : std::nullopt; +} + std::vector MapBasedPredictionNode::getPredictedReferencePath( const TrackedObject & object, const LaneletsData & current_lanelets_data, const double object_detected_time) @@ -1541,6 +1635,15 @@ std::vector MapBasedPredictionNode::getPredictedReferencePath( if (!unconnected_lanelets.empty()) { return unconnected_lanelets.front(); } + // search side of the next lanelet + const lanelet::ConstLanelets next_lanelet = routing_graph_ptr_->following(lanelet); + if (!next_lanelet.empty()) { + const auto next = get_left ? routing_graph_ptr_->left(next_lanelet.front()) + : routing_graph_ptr_->right(next_lanelet.front()); + if (!!next) { + return *next; + } + } } // if no candidate lanelet found, return empty @@ -1589,6 +1692,26 @@ std::vector MapBasedPredictionNode::getPredictedReferencePath( addReferencePathsLocal(center_paths, Maneuver::LANE_FOLLOW); } + // Step 3. Search starting point for each reference path + for (auto it = all_ref_paths.begin(); it != all_ref_paths.end();) { + auto & pose_path = it->path; + if (pose_path.empty()) { + continue; + } + + const std::optional opt_starting_idx = + searchProperStartingRefPathIndex(object, pose_path); + + if (opt_starting_idx.has_value()) { + // Trim the reference path + pose_path.erase(pose_path.begin(), pose_path.begin() + opt_starting_idx.value()); + ++it; + } else { + // Proper starting point is not found, remove the reference path + it = all_ref_paths.erase(it); + } + } + return all_ref_paths; } @@ -1912,7 +2035,8 @@ void MapBasedPredictionNode::addReferencePaths( for (const auto & converted_path : converted_paths) { PredictedRefPath predicted_path; predicted_path.probability = maneuver_probability.at(maneuver) * path_probability; - predicted_path.path = converted_path; + predicted_path.path = converted_path.first; + predicted_path.width = converted_path.second; predicted_path.maneuver = maneuver; reference_paths.push_back(predicted_path); } @@ -1983,12 +2107,17 @@ ManeuverProbability MapBasedPredictionNode::calculateManeuverProbability( return maneuver_prob; } -std::vector MapBasedPredictionNode::convertPathType( - const lanelet::routing::LaneletPaths & paths) +std::vector> MapBasedPredictionNode::convertPathType( + const lanelet::routing::LaneletPaths & paths) const { - std::vector converted_paths; + if (lru_cache_of_convert_path_type_.contains(paths)) { + return *lru_cache_of_convert_path_type_.get(paths); + } + + std::vector> converted_paths; for (const auto & path : paths) { PosePath converted_path; + double width = 10.0; // Initialize with a large value // Insert Positions. Note that we start inserting points from previous lanelet if (!path.empty()) { @@ -2008,7 +2137,13 @@ std::vector MapBasedPredictionNode::convertPathType( const double lane_yaw = std::atan2( current_p.position.y - prev_p.position.y, current_p.position.x - prev_p.position.x); - current_p.orientation = tier4_autoware_utils::createQuaternionFromYaw(lane_yaw); + const double sin_yaw_half = std::sin(lane_yaw / 2.0); + const double cos_yaw_half = std::cos(lane_yaw / 2.0); + current_p.orientation.x = 0.0; + current_p.orientation.y = 0.0; + current_p.orientation.z = sin_yaw_half; + current_p.orientation.w = cos_yaw_half; + converted_path.push_back(current_p); prev_p = current_p; } @@ -2039,18 +2174,39 @@ std::vector MapBasedPredictionNode::convertPathType( const double lane_yaw = std::atan2( current_p.position.y - prev_p.position.y, current_p.position.x - prev_p.position.x); - current_p.orientation = tier4_autoware_utils::createQuaternionFromYaw(lane_yaw); + const double sin_yaw_half = std::sin(lane_yaw / 2.0); + const double cos_yaw_half = std::cos(lane_yaw / 2.0); + current_p.orientation.x = 0.0; + current_p.orientation.y = 0.0; + current_p.orientation.z = sin_yaw_half; + current_p.orientation.w = cos_yaw_half; + converted_path.push_back(current_p); prev_p = current_p; } + + // Update minimum width + const auto left_bound = lanelet.leftBound2d(); + const auto right_bound = lanelet.rightBound2d(); + const double lanelet_width_front = std::hypot( + left_bound.front().x() - right_bound.front().x(), + left_bound.front().y() - right_bound.front().y()); + width = std::min(width, lanelet_width_front); } // Resample Path - const auto resampled_converted_path = - motion_utils::resamplePoseVector(converted_path, reference_path_resolution_); - converted_paths.push_back(resampled_converted_path); - } - + const bool use_akima_spline_for_xy = true; + const bool use_lerp_for_z = true; + // the options use_akima_spline_for_xy and use_lerp_for_z are set to true + // but the implementation of use_akima_spline_for_xy in resamplePoseVector and + // resamplePointVector is opposite to the options so the options are set to true to use linear + // interpolation for xy + const auto resampled_converted_path = motion_utils::resamplePoseVector( + converted_path, reference_path_resolution_, use_akima_spline_for_xy, use_lerp_for_z); + converted_paths.push_back(std::make_pair(resampled_converted_path, width)); + } + + lru_cache_of_convert_path_type_.put(paths, converted_paths); return converted_paths; } diff --git a/perception/map_based_prediction/src/path_generator.cpp b/perception/map_based_prediction/src/path_generator.cpp index 5cb7e186b7cc4..1c3e23bfe68f1 100644 --- a/perception/map_based_prediction/src/path_generator.cpp +++ b/perception/map_based_prediction/src/path_generator.cpp @@ -14,7 +14,7 @@ #include "map_based_prediction/path_generator.hpp" -#include +#include #include #include @@ -149,13 +149,32 @@ PredictedPath PathGenerator::generatePathForOffLaneVehicle(const TrackedObject & } PredictedPath PathGenerator::generatePathForOnLaneVehicle( - const TrackedObject & object, const PosePath & ref_paths) + const TrackedObject & object, const PosePath & ref_path, const double path_width) { - if (ref_paths.size() < 2) { + if (ref_path.size() < 2) { return generateStraightPath(object); } - return generatePolynomialPath(object, ref_paths); + // if the object is moving backward, we generate a straight path + if (object.kinematics.twist_with_covariance.twist.linear.x < 0.0) { + return generateStraightPath(object); + } + + // get object width + double object_width = 5.0; // a large number + if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + object_width = object.shape.dimensions.y; + } else if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::CYLINDER) { + object_width = object.shape.dimensions.x; + } + // Calculate the backlash width, which represents the maximum distance the object can be biased + // from the reference path + constexpr double margin = + 0.5; // Set a safety margin of 0.5m for the object to stay away from the edge of the lane + double backlash_width = (path_width - object_width) / 2.0 - margin; + backlash_width = std::max(backlash_width, 0.0); // minimum is 0.0 + + return generatePolynomialPath(object, ref_path, path_width, backlash_width); } PredictedPath PathGenerator::generateStraightPath(const TrackedObject & object) const @@ -178,22 +197,63 @@ PredictedPath PathGenerator::generateStraightPath(const TrackedObject & object) } PredictedPath PathGenerator::generatePolynomialPath( - const TrackedObject & object, const PosePath & ref_path) + const TrackedObject & object, const PosePath & ref_path, const double path_width, + const double backlash_width) const { // Get current Frenet Point const double ref_path_len = motion_utils::calcArcLength(ref_path); - const auto current_point = getFrenetPoint(object, ref_path); + const auto current_point = getFrenetPoint(object, ref_path.at(0)); // Step1. Set Target Frenet Point // Note that we do not set position s, // since we don't know the target longitudinal position FrenetPoint terminal_point; - terminal_point.s_vel = current_point.s_vel; + terminal_point.s_vel = std::hypot(current_point.s_vel, current_point.d_vel); terminal_point.s_acc = 0.0; terminal_point.d = 0.0; terminal_point.d_vel = 0.0; terminal_point.d_acc = 0.0; + // if the object is behind of the reference path adjust the lateral_time_horizon_ to reach the + // start of the reference path + double lateral_duration_adjusted = lateral_time_horizon_; + if (current_point.s < 0.0) { + const double distance_to_start = -current_point.s; + const double duration_to_reach = distance_to_start / terminal_point.s_vel; + lateral_duration_adjusted = std::max(lateral_duration_adjusted, duration_to_reach); + } + + // calculate terminal d position, based on backlash width + { + if (backlash_width < 0.01 /*m*/) { + // If the backlash width is less than 0.01m, do not consider the backlash width and reduce + // calculation cost + terminal_point.d = 0.0; + } else { + const double return_width = path_width / 2.0; // [m] + const double current_momentum_d = + current_point.d + 0.5 * current_point.d_vel * lateral_duration_adjusted; + const double momentum_d_abs = std::abs(current_momentum_d); + + if (momentum_d_abs < backlash_width) { + // If the object momentum is within the backlash width, we set the target d position to the + // current momentum + terminal_point.d = current_momentum_d; + } else if ( + momentum_d_abs >= backlash_width && momentum_d_abs < backlash_width + return_width) { + // If the object momentum is within the return zone, we set the target d position close to + // the zero gradually + terminal_point.d = + (backlash_width + return_width - momentum_d_abs) * backlash_width / return_width; + terminal_point.d *= (current_momentum_d > 0) ? 1 : -1; + } else { + // If the object momentum is outside the backlash width + return zone, we set the target d + // position to 0 + terminal_point.d = 0.0; + } + } + } + // Step2. Generate Predicted Path on a Frenet coordinate const auto frenet_predicted_path = generateFrenetPath(current_point, terminal_point, ref_path_len); @@ -210,7 +270,8 @@ PredictedPath PathGenerator::generatePolynomialPath( } FrenetPath PathGenerator::generateFrenetPath( - const FrenetPoint & current_point, const FrenetPoint & target_point, const double max_length) + const FrenetPoint & current_point, const FrenetPoint & target_point, + const double max_length) const { FrenetPath path; const double duration = time_horizon_; @@ -223,28 +284,26 @@ FrenetPath PathGenerator::generateFrenetPath( path.reserve(static_cast(duration / sampling_time_interval_)); for (double t = 0.0; t <= duration; t += sampling_time_interval_) { + const auto t2 = t * t; + const auto t3 = t2 * t; + const auto t4 = t3 * t; + const auto t5 = t4 * t; const double current_acc = 0.0; // Currently we assume the object is traveling at a constant speed - const double d_next_ = current_point.d + current_point.d_vel * t + - current_acc * 2.0 * std::pow(t, 2) + lat_coeff(0) * std::pow(t, 3) + - lat_coeff(1) * std::pow(t, 4) + lat_coeff(2) * std::pow(t, 5); - // t > lateral_duration: 0.0, else d_next_ - const double d_next = t > lateral_duration ? 0.0 : d_next_; - const double s_next = current_point.s + current_point.s_vel * t + - 2.0 * current_acc * std::pow(t, 2) + lon_coeff(0) * std::pow(t, 3) + - lon_coeff(1) * std::pow(t, 4); + const double d_next_ = current_point.d + current_point.d_vel * t + current_acc * 2.0 * t2 + + lat_coeff(0) * t3 + lat_coeff(1) * t4 + lat_coeff(2) * t5; + // t > lateral_duration: target_point.d, else d_next_ + const double d_next = t > lateral_duration ? target_point.d : d_next_; + const double s_next = current_point.s + current_point.s_vel * t + 2.0 * current_acc * t2 + + lon_coeff(0) * t3 + lon_coeff(1) * t4; if (s_next > max_length) { break; } // We assume the object is traveling at a constant speed along s direction FrenetPoint point; - point.s = std::max(s_next, 0.0); - point.s_vel = current_point.s_vel; - point.s_acc = current_point.s_acc; + point.s = s_next; point.d = d_next; - point.d_vel = current_point.d_vel; - point.d_acc = current_point.d_acc; path.push_back(point); } @@ -252,7 +311,7 @@ FrenetPath PathGenerator::generateFrenetPath( } Eigen::Vector3d PathGenerator::calcLatCoefficients( - const FrenetPoint & current_point, const FrenetPoint & target_point, const double T) + const FrenetPoint & current_point, const FrenetPoint & target_point, const double T) const { // Lateral Path Calculation // Quintic polynomial for d @@ -265,10 +324,14 @@ Eigen::Vector3d PathGenerator::calcLatCoefficients( // b = np.matrix([[xe - self.a0 - self.a1 * T - self.a2 * T**2], // [vxe - self.a1 - 2 * self.a2 * T], // [axe - 2 * self.a2]]) + const auto T2 = T * T; + const auto T3 = T2 * T; + const auto T4 = T3 * T; + const auto T5 = T4 * T; + Eigen::Matrix3d A_lat_inv; - A_lat_inv << 10 / std::pow(T, 3), -4 / std::pow(T, 2), 1 / (2 * T), -15 / std::pow(T, 4), - 7 / std::pow(T, 3), -1 / std::pow(T, 2), 6 / std::pow(T, 5), -3 / std::pow(T, 4), - 1 / (2 * std::pow(T, 3)); + A_lat_inv << 10 / T3, -4 / T2, 1 / (2 * T), -15 / T4, 7 / T3, -1 / T2, 6 / T5, -3 / T4, + 1 / (2 * T3); Eigen::Vector3d b_lat; b_lat[0] = target_point.d - current_point.d - current_point.d_vel * T; b_lat[1] = target_point.d_vel - current_point.d_vel; @@ -278,7 +341,7 @@ Eigen::Vector3d PathGenerator::calcLatCoefficients( } Eigen::Vector2d PathGenerator::calcLonCoefficients( - const FrenetPoint & current_point, const FrenetPoint & target_point, const double T) + const FrenetPoint & current_point, const FrenetPoint & target_point, const double T) const { // Longitudinal Path Calculation // Quadric polynomial @@ -286,17 +349,112 @@ Eigen::Vector2d PathGenerator::calcLonCoefficients( // [-1/(2*T**3), 1/(4*T**2)]]) // b = np.matrix([[vxe - self.a1 - 2 * self.a2 * T], // [axe - 2 * self.a2]]) + const auto T2 = T * T; + const auto T3 = T2 * T; + Eigen::Matrix2d A_lon_inv; - A_lon_inv << 1 / std::pow(T, 2), -1 / (3 * T), -1 / (2 * std::pow(T, 3)), - 1 / (4 * std::pow(T, 2)); + A_lon_inv << 1 / T2, -1 / (3 * T), -1 / (2 * T3), 1 / (4 * T2); Eigen::Vector2d b_lon; b_lon[0] = target_point.s_vel - current_point.s_vel; b_lon[1] = 0.0; return A_lon_inv * b_lon; } +std::vector PathGenerator::interpolationLerp( + const std::vector & base_keys, const std::vector & base_values, + const std::vector & query_keys) const +{ + // calculate linear interpolation + // extrapolate the value if the query key is out of the base key range + std::vector query_values; + size_t key_index = 0; + double last_query_key = query_keys.at(0); + for (const auto query_key : query_keys) { + // search for the closest key index + // if current query key is larger than the last query key, search base_keys increasing order + if (query_key >= last_query_key) { + while (base_keys.at(key_index + 1) < query_key) { + if (key_index == base_keys.size() - 2) { + break; + } + ++key_index; + } + } else { + // if current query key is smaller than the last query key, search base_keys decreasing order + while (base_keys.at(key_index) > query_key) { + if (key_index == 0) { + break; + } + --key_index; + } + } + last_query_key = query_key; + + const double & src_val = base_values.at(key_index); + const double & dst_val = base_values.at(key_index + 1); + const double ratio = (query_key - base_keys.at(key_index)) / + (base_keys.at(key_index + 1) - base_keys.at(key_index)); + + const double interpolated_val = src_val + (dst_val - src_val) * ratio; + query_values.push_back(interpolated_val); + } + + return query_values; +} + +std::vector PathGenerator::interpolationLerp( + const std::vector & base_keys, const std::vector & base_values, + const std::vector & query_keys) const +{ + // calculate linear interpolation + // extrapolate the value if the query key is out of the base key range + std::vector query_values; + size_t key_index = 0; + double last_query_key = query_keys.at(0); + for (const auto query_key : query_keys) { + // search for the closest key index + // if current query key is larger than the last query key, search base_keys increasing order + if (query_key >= last_query_key) { + while (base_keys.at(key_index + 1) < query_key) { + if (key_index == base_keys.size() - 2) { + break; + } + ++key_index; + } + } else { + // if current query key is smaller than the last query key, search base_keys decreasing order + while (base_keys.at(key_index) > query_key) { + if (key_index == 0) { + break; + } + --key_index; + } + } + last_query_key = query_key; + + const tf2::Quaternion & src_val = base_values.at(key_index); + const tf2::Quaternion & dst_val = base_values.at(key_index + 1); + const double ratio = (query_key - base_keys.at(key_index)) / + (base_keys.at(key_index + 1) - base_keys.at(key_index)); + + // in case of extrapolation, export the nearest quaternion + if (ratio < 0.0) { + query_values.push_back(src_val); + continue; + } + if (ratio > 1.0) { + query_values.push_back(dst_val); + continue; + } + const auto interpolated_quat = tf2::slerp(src_val, dst_val, ratio); + query_values.push_back(interpolated_quat); + } + + return query_values; +} + PosePath PathGenerator::interpolateReferencePath( - const PosePath & base_path, const FrenetPath & frenet_predicted_path) + const PosePath & base_path, const FrenetPath & frenet_predicted_path) const { PosePath interpolated_path; const size_t interpolate_num = frenet_predicted_path.size(); @@ -308,100 +466,102 @@ PosePath PathGenerator::interpolateReferencePath( std::vector base_path_x(base_path.size()); std::vector base_path_y(base_path.size()); std::vector base_path_z(base_path.size()); + std::vector base_path_orientation(base_path.size()); std::vector base_path_s(base_path.size(), 0.0); for (size_t i = 0; i < base_path.size(); ++i) { base_path_x.at(i) = base_path.at(i).position.x; base_path_y.at(i) = base_path.at(i).position.y; base_path_z.at(i) = base_path.at(i).position.z; + tf2::Quaternion src_tf; + tf2::fromMsg(base_path.at(i).orientation, src_tf); + base_path_orientation.at(i) = src_tf; if (i > 0) { base_path_s.at(i) = base_path_s.at(i - 1) + tier4_autoware_utils::calcDistance2d( base_path.at(i - 1), base_path.at(i)); } } + // Prepare resampled s vector std::vector resampled_s(frenet_predicted_path.size()); for (size_t i = 0; i < frenet_predicted_path.size(); ++i) { resampled_s.at(i) = frenet_predicted_path.at(i).s; } - if (resampled_s.front() > resampled_s.back()) { - std::reverse(resampled_s.begin(), resampled_s.end()); - } - // Spline Interpolation - std::vector spline_ref_path_x = - interpolation::spline(base_path_s, base_path_x, resampled_s); - std::vector spline_ref_path_y = - interpolation::spline(base_path_s, base_path_y, resampled_s); - std::vector spline_ref_path_z = - interpolation::spline(base_path_s, base_path_z, resampled_s); + // Linear Interpolation for x, y, z, and orientation + std::vector lerp_ref_path_x = interpolationLerp(base_path_s, base_path_x, resampled_s); + std::vector lerp_ref_path_y = interpolationLerp(base_path_s, base_path_y, resampled_s); + std::vector lerp_ref_path_z = interpolationLerp(base_path_s, base_path_z, resampled_s); + std::vector lerp_ref_path_orientation = + interpolationLerp(base_path_s, base_path_orientation, resampled_s); interpolated_path.resize(interpolate_num); - for (size_t i = 0; i < interpolate_num - 1; ++i) { + for (size_t i = 0; i < interpolate_num; ++i) { geometry_msgs::msg::Pose interpolated_pose; - const auto current_point = - tier4_autoware_utils::createPoint(spline_ref_path_x.at(i), spline_ref_path_y.at(i), 0.0); - const auto next_point = tier4_autoware_utils::createPoint( - spline_ref_path_x.at(i + 1), spline_ref_path_y.at(i + 1), 0.0); - const double yaw = tier4_autoware_utils::calcAzimuthAngle(current_point, next_point); interpolated_pose.position = tier4_autoware_utils::createPoint( - spline_ref_path_x.at(i), spline_ref_path_y.at(i), spline_ref_path_z.at(i)); - interpolated_pose.orientation = tier4_autoware_utils::createQuaternionFromYaw(yaw); + lerp_ref_path_x.at(i), lerp_ref_path_y.at(i), lerp_ref_path_z.at(i)); + interpolated_pose.orientation = tf2::toMsg(lerp_ref_path_orientation.at(i)); interpolated_path.at(i) = interpolated_pose; } - interpolated_path.back().position = tier4_autoware_utils::createPoint( - spline_ref_path_x.back(), spline_ref_path_y.back(), spline_ref_path_z.back()); - interpolated_path.back().orientation = interpolated_path.at(interpolate_num - 2).orientation; - return interpolated_path; } PredictedPath PathGenerator::convertToPredictedPath( - const TrackedObject & object, const FrenetPath & frenet_predicted_path, const PosePath & ref_path) + const TrackedObject & object, const FrenetPath & frenet_predicted_path, + const PosePath & ref_path) const { + // Object position + const auto & object_pose = object.kinematics.pose_with_covariance.pose; + const double object_height = object.shape.dimensions.z / 2.0; + + // Convert Frenet Path to Cartesian Path PredictedPath predicted_path; predicted_path.time_step = rclcpp::Duration::from_seconds(sampling_time_interval_); predicted_path.path.resize(ref_path.size()); - for (size_t i = 0; i < predicted_path.path.size(); ++i) { + + // Set the first point as the object's current position + predicted_path.path.at(0) = object_pose; + + for (size_t i = 1; i < predicted_path.path.size(); ++i) { // Reference Point from interpolated reference path const auto & ref_pose = ref_path.at(i); // Frenet Point from frenet predicted path const auto & frenet_point = frenet_predicted_path.at(i); + double d_offset = frenet_point.d; // Converted Pose - auto predicted_pose = tier4_autoware_utils::calcOffsetPose(ref_pose, 0.0, frenet_point.d, 0.0); - predicted_pose.position.z = object.kinematics.pose_with_covariance.pose.position.z; - if (i == 0) { - predicted_pose.orientation = object.kinematics.pose_with_covariance.pose.orientation; - } else { - const double yaw = tier4_autoware_utils::calcAzimuthAngle( - predicted_path.path.at(i - 1).position, predicted_pose.position); - predicted_pose.orientation = tier4_autoware_utils::createQuaternionFromYaw(yaw); - } + auto predicted_pose = tier4_autoware_utils::calcOffsetPose(ref_pose, 0.0, d_offset, 0.0); + predicted_pose.position.z += object_height; + + const double yaw = tier4_autoware_utils::calcAzimuthAngle( + predicted_path.path.at(i - 1).position, predicted_pose.position); + predicted_pose.orientation = tier4_autoware_utils::createQuaternionFromYaw(yaw); + predicted_path.path.at(i) = predicted_pose; } return predicted_path; } -FrenetPoint PathGenerator::getFrenetPoint(const TrackedObject & object, const PosePath & ref_path) +FrenetPoint PathGenerator::getFrenetPoint( + const TrackedObject & object, const geometry_msgs::msg::Pose & ref_pose) const { FrenetPoint frenet_point; + + // 1. Position const auto obj_point = object.kinematics.pose_with_covariance.pose.position; + const float obj_yaw = + static_cast(tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation)); + const float lane_yaw = static_cast(tf2::getYaw(ref_pose.orientation)); + frenet_point.s = (obj_point.x - ref_pose.position.x) * cos(lane_yaw) + + (obj_point.y - ref_pose.position.y) * sin(lane_yaw); + frenet_point.d = -(obj_point.x - ref_pose.position.x) * sin(lane_yaw) + + (obj_point.y - ref_pose.position.y) * cos(lane_yaw); - const size_t nearest_segment_idx = motion_utils::findNearestSegmentIndex(ref_path, obj_point); - const double l = - motion_utils::calcLongitudinalOffsetToSegment(ref_path, nearest_segment_idx, obj_point); + // 2. Velocity const float vx = static_cast(object.kinematics.twist_with_covariance.twist.linear.x); const float vy = static_cast(object.kinematics.twist_with_covariance.twist.linear.y); - const float obj_yaw = - static_cast(tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation)); - const float lane_yaw = - static_cast(tf2::getYaw(ref_path.at(nearest_segment_idx).orientation)); const float delta_yaw = obj_yaw - lane_yaw; - - frenet_point.s = motion_utils::calcSignedArcLength(ref_path, 0, nearest_segment_idx) + l; - frenet_point.d = motion_utils::calcLateralOffset(ref_path, obj_point); frenet_point.s_vel = vx * std::cos(delta_yaw) - vy * std::sin(delta_yaw); frenet_point.d_vel = vx * std::sin(delta_yaw) + vy * std::cos(delta_yaw); frenet_point.s_acc = 0.0; diff --git a/perception/multi_object_tracker/include/multi_object_tracker/multi_object_tracker_core.hpp b/perception/multi_object_tracker/include/multi_object_tracker/multi_object_tracker_core.hpp index 95d33b78ff184..7e9f3a8754695 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/multi_object_tracker_core.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/multi_object_tracker_core.hpp @@ -98,6 +98,11 @@ class MultiObjectTracker : public rclcpp::Node rclcpp::Subscription::SharedPtr detected_object_sub_; rclcpp::TimerBase::SharedPtr publish_timer_; // publish timer + rclcpp::Time last_published_time_; + rclcpp::Time last_updated_time_; + double publisher_period_; + static constexpr double minimum_publish_interval_ratio = 0.85; + static constexpr double maximum_publish_interval_ratio = 1.05; // debugger class std::unique_ptr debugger_; @@ -116,8 +121,7 @@ class MultiObjectTracker : public rclcpp::Node std::unique_ptr data_association_; void checkTrackerLifeCycle( - std::list> & list_tracker, const rclcpp::Time & time, - const geometry_msgs::msg::Transform & self_transform); + std::list> & list_tracker, const rclcpp::Time & time); void sanitizeTracker( std::list> & list_tracker, const rclcpp::Time & time); std::shared_ptr createNewTracker( diff --git a/perception/multi_object_tracker/package.xml b/perception/multi_object_tracker/package.xml index 992b1f5e2a123..119919b5f076c 100644 --- a/perception/multi_object_tracker/package.xml +++ b/perception/multi_object_tracker/package.xml @@ -25,7 +25,6 @@ tier4_autoware_utils tier4_perception_msgs unique_identifier_msgs - ament_lint_auto autoware_lint_common diff --git a/perception/multi_object_tracker/src/multi_object_tracker_core.cpp b/perception/multi_object_tracker/src/multi_object_tracker_core.cpp index d28833241bd5f..2acc4f447cb50 100644 --- a/perception/multi_object_tracker/src/multi_object_tracker_core.cpp +++ b/perception/multi_object_tracker/src/multi_object_tracker_core.cpp @@ -174,6 +174,8 @@ void TrackerDebugger::startMeasurementTime(const rclcpp::Time & measurement_head MultiObjectTracker::MultiObjectTracker(const rclcpp::NodeOptions & node_options) : rclcpp::Node("multi_object_tracker", node_options), + last_published_time_(this->now()), + last_updated_time_(this->now()), tf_buffer_(this->get_clock()), tf_listener_(tf_buffer_) { @@ -196,11 +198,15 @@ MultiObjectTracker::MultiObjectTracker(const rclcpp::NodeOptions & node_options) this->get_node_base_interface(), this->get_node_timers_interface()); tf_buffer_.setCreateTimerInterface(cti); - // Create ROS time based timer + // Create ROS time based timer. + // If the delay compensation is enabled, the timer is used to publish the output at the correct + // time. if (enable_delay_compensation) { - const auto period_ns = rclcpp::Rate(publish_rate).period(); + publisher_period_ = 1.0 / publish_rate; // [s] + constexpr double timer_multiplier = 10.0; // 10 times frequent for publish timing check + const auto timer_period = rclcpp::Rate(publish_rate * timer_multiplier).period(); publish_timer_ = rclcpp::create_timer( - this, get_clock(), period_ns, std::bind(&MultiObjectTracker::onTimer, this)); + this, get_clock(), timer_period, std::bind(&MultiObjectTracker::onTimer, this)); } const auto tmp = this->declare_parameter>("can_assign_matrix"); @@ -236,6 +242,8 @@ MultiObjectTracker::MultiObjectTracker(const rclcpp::NodeOptions & node_options) void MultiObjectTracker::onMeasurement( const autoware_auto_perception_msgs::msg::DetectedObjects::ConstSharedPtr input_objects_msg) { + last_updated_time_ = this->now(); + /* keep the latest input stamp and check transform*/ debugger_->startMeasurementTime(rclcpp::Time(input_objects_msg->header.stamp)); const auto self_transform = getTransformAnonymous( @@ -277,7 +285,7 @@ void MultiObjectTracker::onMeasurement( } /* life cycle check */ - checkTrackerLifeCycle(list_tracker_, measurement_time, *self_transform); + checkTrackerLifeCycle(list_tracker_, measurement_time); /* sanitize trackers */ sanitizeTracker(list_tracker_, measurement_time); @@ -326,14 +334,28 @@ std::shared_ptr MultiObjectTracker::createNewTracker( void MultiObjectTracker::onTimer() { rclcpp::Time current_time = this->now(); - const auto self_transform = - getTransformAnonymous(tf_buffer_, world_frame_id_, "base_link", current_time); - if (!self_transform) { + + // ensure minimum interval: room for the next process(prediction) + const double minimum_publish_interval = publisher_period_ * minimum_publish_interval_ratio; + const auto elapsed_time = (current_time - last_published_time_).seconds(); + if (elapsed_time < minimum_publish_interval) { + return; + } + + // if there was update after publishing, publish new messages + bool should_publish = last_published_time_ < last_updated_time_; + + // if there was no update, publish if the elapsed time is longer than the maximum publish latency + // in this case, it will perform extrapolate/remove old objects + const double maximum_publish_interval = publisher_period_ * maximum_publish_interval_ratio; + should_publish = should_publish || elapsed_time > maximum_publish_interval; + + if (!should_publish) { return; } /* life cycle check */ - checkTrackerLifeCycle(list_tracker_, current_time, *self_transform); + checkTrackerLifeCycle(list_tracker_, current_time); /* sanitize trackers */ sanitizeTracker(list_tracker_, current_time); @@ -342,8 +364,7 @@ void MultiObjectTracker::onTimer() } void MultiObjectTracker::checkTrackerLifeCycle( - std::list> & list_tracker, const rclcpp::Time & time, - [[maybe_unused]] const geometry_msgs::msg::Transform & self_transform) + std::list> & list_tracker, const rclcpp::Time & time) { /* params */ constexpr float max_elapsed_time = 1.0; @@ -464,6 +485,9 @@ void MultiObjectTracker::publish(const rclcpp::Time & time) // Publish tracked_objects_pub_->publish(output_msg); + // Update last published time + last_published_time_ = this->now(); + // Debugger Publish if enabled debugger_->publishProcessingTime(); debugger_->publishTentativeObjects(tentative_objects_msg); diff --git a/planning/behavior_path_lane_change_module/README.md b/planning/behavior_path_lane_change_module/README.md index 564ec0a7e0abf..dc077ab02cae6 100644 --- a/planning/behavior_path_lane_change_module/README.md +++ b/planning/behavior_path_lane_change_module/README.md @@ -284,6 +284,29 @@ detach @enduml ``` +To preventive measure for lane change path oscillations caused by alternating safe and unsafe conditions, an additional hysteresis count check is implemented before executing an abort or cancel maneuver. If unsafe, the `unsafe_hysteresis_count_` is incremented and compared against `unsafe_hysteresis_threshold`; exceeding it prompts an abort condition check, ensuring decisions are made with consideration to recent safety assessments as shown in flow chart above. This mechanism stabilizes decision-making, preventing abrupt changes due to transient unsafe conditions. + +```plantuml +@startuml +skinparam defaultTextAlignment center +skinparam backgroundColor #WHITE + +title Abort Lane Change + +if (Perform collision check?) then (SAFE) + :Reset unsafe_hysteresis_count_; +else (UNSAFE) + :Increase unsafe_hysteresis_count_; + if (unsafe_hysteresis_count_ > unsafe_hysteresis_threshold?) then (FALSE) + else (TRUE) + #LightPink:Check abort condition; + stop + endif +endif +:Continue lane changing; +@enduml +``` + #### Cancel Suppose the lane change trajectory is evaluated as unsafe. In that case, if the ego vehicle has not departed from the current lane yet, the trajectory will be reset, and the ego vehicle will resume the lane following the maneuver. @@ -433,6 +456,7 @@ The following parameters are configurable in `lane_change.param.yaml`. | `cancel.duration` | [s] | double | The time taken to complete returning to the center line. | 3.0 | | `cancel.max_lateral_jerk` | [m/sss] | double | The maximum lateral jerk for abort path | 1000.0 | | `cancel.overhang_tolerance` | [m] | double | Lane change cancel is prohibited if the vehicle head exceeds the lane boundary more than this tolerance distance | 0.0 | +| `unsafe_hysteresis_threshold` | [-] | int | threshold that helps prevent frequent switching between safe and unsafe decisions | 10 | ### Debug diff --git a/planning/behavior_path_lane_change_module/config/lane_change.param.yaml b/planning/behavior_path_lane_change_module/config/lane_change.param.yaml index 228307a51268b..1ab33514c5f24 100644 --- a/planning/behavior_path_lane_change_module/config/lane_change.param.yaml +++ b/planning/behavior_path_lane_change_module/config/lane_change.param.yaml @@ -106,6 +106,7 @@ duration: 5.0 # [s] max_lateral_jerk: 1000.0 # [m/s3] overhang_tolerance: 0.0 # [m] + unsafe_hysteresis_threshold: 10 # [/] finish_judge_lateral_threshold: 0.2 # [m] diff --git a/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/scene.hpp b/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/scene.hpp index bd4f19848fbdc..04f21f5249240 100644 --- a/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/scene.hpp +++ b/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/scene.hpp @@ -15,6 +15,7 @@ #define BEHAVIOR_PATH_LANE_CHANGE_MODULE__SCENE_HPP_ #include "behavior_path_lane_change_module/utils/base_class.hpp" +#include "behavior_path_lane_change_module/utils/data_structs.hpp" #include #include @@ -59,6 +60,8 @@ class NormalLaneChange : public LaneChangeBase void insertStopPoint(const lanelet::ConstLanelets & lanelets, PathWithLaneId & path) override; + void insert_stop_point_on_current_lanes(PathWithLaneId & path); + PathWithLaneId getReferencePath() const override; std::optional extendPath() override; @@ -72,6 +75,8 @@ class NormalLaneChange : public LaneChangeBase PathSafetyStatus isApprovedPathSafe() const override; bool isRequiredStop(const bool is_object_coming_from_rear) const override; + PathSafetyStatus evaluateApprovedPathWithUnsafeHysteresis( + PathSafetyStatus approved_path_safety_status) override; bool isNearEndOfCurrentLanes( const lanelet::ConstLanelets & current_lanes, const lanelet::ConstLanelets & target_lanes, @@ -175,7 +180,7 @@ class NormalLaneChange : public LaneChangeBase std::pair calcCurrentMinMaxAcceleration() const; - void setStopPose(const Pose & stop_pose); + void set_stop_pose(const double arc_length_to_stop_pose, PathWithLaneId & path); void updateStopTime(); diff --git a/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/base_class.hpp b/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/base_class.hpp index d7a559bb54cf6..9c891389c4571 100644 --- a/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/base_class.hpp +++ b/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/base_class.hpp @@ -94,6 +94,9 @@ class LaneChangeBase virtual PathSafetyStatus isApprovedPathSafe() const = 0; + virtual PathSafetyStatus evaluateApprovedPathWithUnsafeHysteresis( + PathSafetyStatus approve_path_safety_status) = 0; + virtual bool isNearEndOfCurrentLanes( const lanelet::ConstLanelets & current_lanes, const lanelet::ConstLanelets & target_lanes, const double threshold) const = 0; @@ -248,6 +251,7 @@ class LaneChangeBase PathWithLaneId prev_approved_path_{}; + int unsafe_hysteresis_count_{0}; bool is_abort_path_approved_{false}; bool is_abort_approval_requested_{false}; bool is_activated_{false}; diff --git a/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/data_structs.hpp b/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/data_structs.hpp index 9b1814a396574..5039dea7c0db6 100644 --- a/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/data_structs.hpp +++ b/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/data_structs.hpp @@ -75,6 +75,11 @@ struct LaneChangeCancelParameters double duration{5.0}; double max_lateral_jerk{10.0}; double overhang_tolerance{0.0}; + + // unsafe_hysteresis_threshold will be compare with the number of detected unsafe instance. If the + // number of unsafe exceeds unsafe_hysteresis_threshold, the lane change will be cancelled or + // aborted. + int unsafe_hysteresis_threshold{2}; }; struct LaneChangeParameters @@ -132,6 +137,7 @@ struct LaneChangeParameters // safety check bool allow_loose_check_for_cancel{true}; utils::path_safety_checker::RSSparams rss_params{}; + utils::path_safety_checker::RSSparams rss_params_for_parked{}; utils::path_safety_checker::RSSparams rss_params_for_abort{}; utils::path_safety_checker::RSSparams rss_params_for_stuck{}; diff --git a/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/utils.hpp b/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/utils.hpp index a84acf4decd8d..48b940093c2cd 100644 --- a/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/utils.hpp +++ b/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/utils.hpp @@ -289,5 +289,54 @@ bool isWithinTurnDirectionLanes(const lanelet::ConstLanelet & lanelet, const Pol double calcPhaseLength( const double velocity, const double maximum_velocity, const double acceleration, const double time); + +/** + * @brief Calculates the minimum distance to a stationary object in the current lanes. + * + * This function determines the closest distance from the ego vehicle to a stationary object + * in the current lanes. It checks if the object is within the stopping criteria based on its + * velocity and calculates the distance while accounting for the object's size. Only objects + * positioned after the specified distance to the target lane's start are considered. + * + * @param filtered_objects A collection of lane change target objects, including those in the + * current lane. + * @param bpp_param Parameters for the behavior path planner, such as vehicle dimensions. + * @param lc_param Parameters for the lane change process, including the velocity threshold for + * stopping. + * @param dist_to_target_lane_start The distance from the ego vehicle to the start of the target + * lane. + * @param ego_pose The current pose of the ego vehicle. + * @param path The current path of the ego vehicle, containing path points and lane information. + * @return The minimum distance to a stationary object in the current lanes. If no valid object is + * found, returns the maximum possible double value. + */ +double get_min_dist_to_current_lanes_obj( + const LaneChangeTargetObjects & filtered_objects, const BehaviorPathPlannerParameters & bpp_param, + const LaneChangeParameters & lc_param, const double dist_to_target_lane_start, const Pose & pose, + const PathWithLaneId & path); + +/** + * @brief Checks if there is any stationary object in the target lanes that would affect the lane + * change stop decision. + * + * This function determines whether there are any stationary objects in the target lanes that could + * impact the decision to insert a stop point for the ego vehicle. It checks each object's velocity, + * position relative to the ego vehicle, and overlap with the target lanes to identify if any object + * meets the criteria for being a blocking obstacle. + * + * @param target_lanes A collection of lanelets representing the target lanes for the lane change. + * @param filtered_objects A collection of lane change target objects, including those in the target + * lanes. + * @param lc_param Parameters for the lane change process, such as the stop velocity threshold. + * @param stop_arc_length The arc length at which the ego vehicle is expected to stop. + * @param ego_pose The current pose of the ego vehicle. + * @param path The current path of the ego vehicle, containing path points and lane information. + * @return true if there is a stationary object in the target lanes that meets the criteria for + * being a blocking object; otherwise, false. + */ +bool has_blocking_target_object( + const lanelet::ConstLanelets & target_lanes, const LaneChangeTargetObjects & filtered_objects, + const LaneChangeParameters & lc_param, const double stop_arc_length, const Pose & ego_pose, + const PathWithLaneId & path); } // namespace behavior_path_planner::utils::lane_change #endif // BEHAVIOR_PATH_LANE_CHANGE_MODULE__UTILS__UTILS_HPP_ diff --git a/planning/behavior_path_lane_change_module/src/interface.cpp b/planning/behavior_path_lane_change_module/src/interface.cpp index 00793532023b9..61699a8b35bdc 100644 --- a/planning/behavior_path_lane_change_module/src/interface.cpp +++ b/planning/behavior_path_lane_change_module/src/interface.cpp @@ -116,7 +116,8 @@ ModuleStatus LaneChangeInterface::updateState() return isWaitingApproval() ? ModuleStatus::RUNNING : ModuleStatus::SUCCESS; } - const auto [is_safe, is_object_coming_from_rear] = module_type_->isApprovedPathSafe(); + const auto [is_safe, is_object_coming_from_rear] = + module_type_->evaluateApprovedPathWithUnsafeHysteresis(module_type_->isApprovedPathSafe()); setObjectDebugVisualization(); if (is_safe) { diff --git a/planning/behavior_path_lane_change_module/src/manager.cpp b/planning/behavior_path_lane_change_module/src/manager.cpp index f4fd6fafca0e5..a854a4984d261 100644 --- a/planning/behavior_path_lane_change_module/src/manager.cpp +++ b/planning/behavior_path_lane_change_module/src/manager.cpp @@ -116,6 +116,23 @@ void LaneChangeModuleManager::init(rclcpp::Node * node) p.rss_params.lateral_distance_max_threshold = getOrDeclareParameter( *node, parameter("safety_check.execution.lateral_distance_max_threshold")); + p.rss_params_for_parked.longitudinal_distance_min_threshold = getOrDeclareParameter( + *node, parameter("safety_check.parked.longitudinal_distance_min_threshold")); + p.rss_params_for_parked.longitudinal_distance_min_threshold = getOrDeclareParameter( + *node, parameter("safety_check.parked.longitudinal_distance_min_threshold")); + p.rss_params_for_parked.longitudinal_velocity_delta_time = getOrDeclareParameter( + *node, parameter("safety_check.parked.longitudinal_velocity_delta_time")); + p.rss_params_for_parked.front_vehicle_deceleration = getOrDeclareParameter( + *node, parameter("safety_check.parked.expected_front_deceleration")); + p.rss_params_for_parked.rear_vehicle_deceleration = getOrDeclareParameter( + *node, parameter("safety_check.parked.expected_rear_deceleration")); + p.rss_params_for_parked.rear_vehicle_reaction_time = getOrDeclareParameter( + *node, parameter("safety_check.parked.rear_vehicle_reaction_time")); + p.rss_params_for_parked.rear_vehicle_safety_time_margin = getOrDeclareParameter( + *node, parameter("safety_check.parked.rear_vehicle_safety_time_margin")); + p.rss_params_for_parked.lateral_distance_max_threshold = getOrDeclareParameter( + *node, parameter("safety_check.parked.lateral_distance_max_threshold")); + p.rss_params_for_abort.longitudinal_distance_min_threshold = getOrDeclareParameter( *node, parameter("safety_check.cancel.longitudinal_distance_min_threshold")); p.rss_params_for_abort.longitudinal_velocity_delta_time = getOrDeclareParameter( @@ -215,6 +232,8 @@ void LaneChangeModuleManager::init(rclcpp::Node * node) getOrDeclareParameter(*node, parameter("cancel.max_lateral_jerk")); p.cancel.overhang_tolerance = getOrDeclareParameter(*node, parameter("cancel.overhang_tolerance")); + p.cancel.unsafe_hysteresis_threshold = + getOrDeclareParameter(*node, parameter("cancel.unsafe_hysteresis_threshold")); p.finish_judge_lateral_threshold = getOrDeclareParameter(*node, parameter("finish_judge_lateral_threshold")); diff --git a/planning/behavior_path_lane_change_module/src/scene.cpp b/planning/behavior_path_lane_change_module/src/scene.cpp index 43acb159c1799..284ea9409e924 100644 --- a/planning/behavior_path_lane_change_module/src/scene.cpp +++ b/planning/behavior_path_lane_change_module/src/scene.cpp @@ -165,8 +165,7 @@ BehaviorModuleOutput NormalLaneChange::generateOutput() output.path.points, output.path.points.front().point.pose.position, getEgoPosition()); const auto stop_dist = -(current_velocity * current_velocity / (2.0 * planner_data_->parameters.min_acc)); - const auto stop_point = utils::insertStopPoint(stop_dist + current_dist, output.path); - setStopPose(stop_point.point.pose); + set_stop_pose(stop_dist + current_dist, output.path); } else { insertStopPoint(status_.target_lanes, output.path); } @@ -204,7 +203,7 @@ void NormalLaneChange::extendOutputDrivableArea(BehaviorModuleOutput & output) void NormalLaneChange::insertStopPoint( const lanelet::ConstLanelets & lanelets, PathWithLaneId & path) { - if (lanelets.empty()) { + if (lanelets.empty() || status_.current_lanes.empty()) { return; } @@ -214,133 +213,119 @@ void NormalLaneChange::insertStopPoint( return; } - const auto shift_intervals = route_handler->getLateralIntervalsToPreferredLane(lanelets.back()); - const double lane_change_buffer = - utils::lane_change::calcMinimumLaneChangeLength(*lane_change_parameters_, shift_intervals, 0.0); + const auto & current_lanes = status_.current_lanes; + const auto is_current_lane = lanelets.front().id() == current_lanes.front().id() && + lanelets.back().id() == current_lanes.back().id(); + + // if input is not current lane, we can just insert the points at terminal. + if (!is_current_lane) { + const auto shift_intervals = route_handler->getLateralIntervalsToPreferredLane(lanelets.back()); + const auto min_dist_buffer = utils::lane_change::calcMinimumLaneChangeLength( + *lane_change_parameters_, shift_intervals, 0.0); + const auto backward_length_buffer = + lane_change_parameters_->backward_length_buffer_for_end_of_lane; + const auto arc_length_to_stop_pose = + motion_utils::calcArcLength(path.points) - backward_length_buffer + min_dist_buffer; + set_stop_pose(arc_length_to_stop_pose, path); + return; + } - const auto getDistanceAlongLanelet = [&](const geometry_msgs::msg::Pose & target) { - return utils::getSignedDistance(path.points.front().point.pose, target, lanelets); - }; + insert_stop_point_on_current_lanes(path); +} - // If lanelets.back() is in goal route section, get distance to goal. - // Otherwise, get distance to end of lane. - double distance_to_terminal = 0.0; - if (route_handler->isInGoalRouteSection(lanelets.back())) { - const auto goal = route_handler->getGoalPose(); - distance_to_terminal = getDistanceAlongLanelet(goal); - } else { - distance_to_terminal = utils::getDistanceToEndOfLane(path.points.front().point.pose, lanelets); - } +void NormalLaneChange::insert_stop_point_on_current_lanes(PathWithLaneId & path) +{ + const auto & path_front_pose = path.points.front().point.pose; + const auto & route_handler_ptr = getRouteHandler(); + const auto & current_lanes = status_.current_lanes; + const auto current_path = + route_handler_ptr->getCenterLinePath(current_lanes, 0.0, std::numeric_limits::max()); + const auto & center_line = current_path.points; + const auto get_arc_length_along_lanelet = [&](const geometry_msgs::msg::Pose & target) { + return motion_utils::calcSignedArcLength( + center_line, path_front_pose.position, target.position); + }; - const double stop_point_buffer = lane_change_parameters_->backward_length_buffer_for_end_of_lane; - const auto target_objects = getTargetObjects(status_.current_lanes, status_.target_lanes); - double stopping_distance = distance_to_terminal - lane_change_buffer - stop_point_buffer; - - const auto is_valid_start_point = std::invoke([&]() -> bool { - auto lc_start_point = lanelet::utils::conversion::toLaneletPoint( - status_.lane_change_path.info.lane_changing_start.position); - const auto target_neighbor_preferred_lane_poly_2d = - utils::lane_change::getTargetNeighborLanesPolygon( - *route_handler, status_.current_lanes, type_); - return boost::geometry::covered_by( - lanelet::traits::to2D(lc_start_point), target_neighbor_preferred_lane_poly_2d); + const auto dist_to_terminal = std::invoke([&]() -> double { + const auto target_pose = route_handler_ptr->isInGoalRouteSection(current_lanes.back()) + ? route_handler_ptr->getGoalPose() + : center_line.back().point.pose; + return get_arc_length_along_lanelet(target_pose); }); - if (!is_valid_start_point) { - const auto stop_point = utils::insertStopPoint(stopping_distance, path); - setStopPose(stop_point.point.pose); + const auto shift_intervals = + route_handler_ptr->getLateralIntervalsToPreferredLane(current_lanes.back()); + const auto min_dist_buffer = + utils::lane_change::calcMinimumLaneChangeLength(*lane_change_parameters_, shift_intervals, 0.0); + const auto backward_length_buffer = + lane_change_parameters_->backward_length_buffer_for_end_of_lane; + const auto dist_to_terminal_start = dist_to_terminal - min_dist_buffer - backward_length_buffer; + const auto filtered_objects = getTargetObjects(status_.current_lanes, status_.target_lanes); + if (filtered_objects.current_lane.empty()) { + set_stop_pose(dist_to_terminal_start, path); return; } - // calculate minimum distance from path front to the stationary object on the ego lane. - const auto distance_to_ego_lane_obj = [&]() -> double { - double distance_to_obj = distance_to_terminal; - const double distance_to_ego = getDistanceAlongLanelet(getEgoPose()); - - for (const auto & object : target_objects.current_lane) { - // check if stationary - const auto obj_v = std::abs(object.initial_twist.twist.linear.x); - if (obj_v > lane_change_parameters_->stop_velocity_threshold) { - continue; - } - - // calculate distance from path front to the stationary object polygon on the ego lane. - const auto polygon = - tier4_autoware_utils::toPolygon2d(object.initial_pose.pose, object.shape).outer(); - for (const auto & polygon_p : polygon) { - const auto p_fp = tier4_autoware_utils::toMsg(polygon_p.to_3d()); - const auto lateral_fp = motion_utils::calcLateralOffset(path.points, p_fp); + const auto dist_to_target_lane_start = std::invoke([&]() -> double { + const auto & front_lane = status_.target_lanes.front(); + const auto & ll_front_pt = front_lane.centerline2d().front(); + const auto front_pt = lanelet::utils::conversion::toGeomMsgPt(ll_front_pt); + const auto yaw = lanelet::utils::getLaneletAngle(front_lane, front_pt); + Pose target_front; + target_front.position = front_pt; + tf2::Quaternion tf_quat; + tf_quat.setRPY(0, 0, yaw); + target_front.orientation = tf2::toMsg(tf_quat); - // ignore if the point is around the ego path - if (std::abs(lateral_fp) > planner_data_->parameters.vehicle_width) { - continue; - } + return get_arc_length_along_lanelet(target_front); + }); - const double current_distance_to_obj = calcSignedArcLength(path.points, 0, p_fp); + const auto arc_length_to_current_obj = utils::lane_change::get_min_dist_to_current_lanes_obj( + filtered_objects, getCommonParam(), *lane_change_parameters_, dist_to_target_lane_start, + getEgoPose(), path); - // ignore backward object - if (current_distance_to_obj < distance_to_ego) { - continue; - } - distance_to_obj = std::min(distance_to_obj, current_distance_to_obj); - } - } - return distance_to_obj; - }(); - - // Need to stop before blocking obstacle - if (distance_to_ego_lane_obj < distance_to_terminal) { - // consider rss distance when the LC need to avoid obstacles - const auto rss_dist = calcRssDistance( - 0.0, lane_change_parameters_->minimum_lane_changing_velocity, - lane_change_parameters_->rss_params); - const double lane_change_buffer_for_blocking_object = - utils::lane_change::calcMinimumLaneChangeLength(*lane_change_parameters_, shift_intervals); - - const auto stopping_distance_for_obj = - distance_to_ego_lane_obj - lane_change_buffer_for_blocking_object - - lane_change_parameters_->backward_length_buffer_for_blocking_object - rss_dist - - getCommonParam().base_link2front; - - // If the target lane in the lane change section is blocked by a stationary obstacle, there - // is no reason for stopping with a lane change margin. Instead, stop right behind the - // obstacle. - // ---------------------------------------------------------- - // [obj]> - // ---------------------------------------------------------- - // [ego]> | <--- lane change margin ---> [obj]> - // ---------------------------------------------------------- - const bool has_blocking_target_lane_obj = std::any_of( - target_objects.target_lane.begin(), target_objects.target_lane.end(), [&](const auto & o) { - const auto v = std::abs(o.initial_twist.twist.linear.x); - if (v > lane_change_parameters_->stop_velocity_threshold) { - return false; - } + // margin with leading vehicle + // consider rss distance when the LC need to avoid obstacles + const auto rss_dist = calcRssDistance( + 0.0, lane_change_parameters_->minimum_lane_changing_velocity, + lane_change_parameters_->rss_params); - // target_objects includes objects out of target lanes, so filter them out - if (!boost::geometry::intersects( - tier4_autoware_utils::toPolygon2d(o.initial_pose.pose, o.shape).outer(), - lanelet::utils::combineLaneletsShape(status_.target_lanes) - .polygon2d() - .basicPolygon())) { - return false; - } + const auto min_single_lc_length = utils::lane_change::calcMinimumLaneChangeLength( + *lane_change_parameters_, {shift_intervals.front()}, 0.0); + const auto stop_margin = min_single_lc_length + + lane_change_parameters_->backward_length_buffer_for_blocking_object + + rss_dist + getCommonParam().base_link2front; + const auto stop_arc_length_behind_obj = arc_length_to_current_obj - stop_margin; - const double distance_to_target_lane_obj = getDistanceAlongLanelet(o.initial_pose.pose); - return stopping_distance_for_obj < distance_to_target_lane_obj && - distance_to_target_lane_obj < distance_to_ego_lane_obj; - }); + if (stop_arc_length_behind_obj < dist_to_target_lane_start) { + set_stop_pose(dist_to_target_lane_start, path); + return; + } - if (!has_blocking_target_lane_obj) { - stopping_distance = stopping_distance_for_obj; - } + if (stop_arc_length_behind_obj > dist_to_terminal_start) { + set_stop_pose(dist_to_terminal_start, path); + return; } - if (stopping_distance > 0.0) { - const auto stop_point = utils::insertStopPoint(stopping_distance, path); - setStopPose(stop_point.point.pose); + // If the target lane in the lane change section is blocked by a stationary obstacle, there + // is no reason for stopping with a lane change margin. Instead, stop right behind the + // obstacle. + // ---------------------------------------------------------- + // [obj]> + // ---------------------------------------------------------- + // [ego]> | <--- stop margin ---> [obj]> + // ---------------------------------------------------------- + const auto has_blocking_target_lane_obj = utils::lane_change::has_blocking_target_object( + status_.target_lanes, filtered_objects, *lane_change_parameters_, stop_arc_length_behind_obj, + getEgoPose(), path); + + if (has_blocking_target_lane_obj || stop_arc_length_behind_obj <= 0.0) { + set_stop_pose(dist_to_terminal_start, path); + return; } + + set_stop_pose(stop_arc_length_behind_obj, path); } PathWithLaneId NormalLaneChange::getReferencePath() const @@ -420,6 +405,7 @@ void NormalLaneChange::resetParameters() is_abort_approval_requested_ = false; current_lane_change_state_ = LaneChangeStates::Normal; abort_path_ = nullptr; + unsafe_hysteresis_count_ = 0; object_debug_.clear(); } @@ -1451,6 +1437,28 @@ PathSafetyStatus NormalLaneChange::isApprovedPathSafe() const return safety_status; } +PathSafetyStatus NormalLaneChange::evaluateApprovedPathWithUnsafeHysteresis( + PathSafetyStatus approved_path_safety_status) +{ + if (!approved_path_safety_status.is_safe) { + ++unsafe_hysteresis_count_; + RCLCPP_DEBUG( + logger_, "%s: Increasing hysteresis count to %d.", __func__, unsafe_hysteresis_count_); + } else { + if (unsafe_hysteresis_count_ > 0) { + RCLCPP_DEBUG(logger_, "%s: Lane change is now SAFE. Resetting hysteresis count.", __func__); + } + unsafe_hysteresis_count_ = 0; + } + if (unsafe_hysteresis_count_ > lane_change_parameters_->cancel.unsafe_hysteresis_threshold) { + RCLCPP_DEBUG( + logger_, "%s: hysteresis count exceed threshold. lane change is now %s", __func__, + (approved_path_safety_status.is_safe ? "safe" : "UNSAFE")); + return approved_path_safety_status; + } + return {}; +} + bool NormalLaneChange::isValidPath(const PathWithLaneId & path) const { const auto & route_handler = planner_data_->route_handler; @@ -1692,9 +1700,13 @@ PathSafetyStatus NormalLaneChange::isLaneChangePathSafe( const auto obj_predicted_paths = utils::path_safety_checker::getPredictedPathFromObj( obj, lane_change_parameters_->use_all_predicted_path); auto is_safe = true; + const auto selected_rss_param = + (obj.initial_twist.twist.linear.x <= lane_change_parameters_->stop_velocity_threshold) + ? lane_change_parameters_->rss_params_for_parked + : rss_params; for (const auto & obj_path : obj_predicted_paths) { const auto collided_polygons = utils::path_safety_checker::getCollidedPolygons( - path, ego_predicted_path, obj, obj_path, common_parameters, rss_params, 1.0, + path, ego_predicted_path, obj, obj_path, common_parameters, selected_rss_param, 1.0, current_debug_data.second); if (collided_polygons.empty()) { @@ -1815,9 +1827,10 @@ bool NormalLaneChange::isVehicleStuck(const lanelet::ConstLanelets & current_lan return isVehicleStuck(current_lanes, detection_distance); } -void NormalLaneChange::setStopPose(const Pose & stop_pose) +void NormalLaneChange::set_stop_pose(const double arc_length_to_stop_pose, PathWithLaneId & path) { - lane_change_stop_pose_ = stop_pose; + const auto stop_point = utils::insertStopPoint(arc_length_to_stop_pose, path); + lane_change_stop_pose_ = stop_point.point.pose; } void NormalLaneChange::updateStopTime() diff --git a/planning/behavior_path_lane_change_module/src/utils/utils.cpp b/planning/behavior_path_lane_change_module/src/utils/utils.cpp index 60d7c699dc6bb..0d5b20db3c0c8 100644 --- a/planning/behavior_path_lane_change_module/src/utils/utils.cpp +++ b/planning/behavior_path_lane_change_module/src/utils/utils.cpp @@ -1219,4 +1219,90 @@ double calcPhaseLength( const auto length_with_max_velocity = maximum_velocity * duration; return std::min(length_with_acceleration, length_with_max_velocity); } + +double get_min_dist_to_current_lanes_obj( + const LaneChangeTargetObjects & filtered_objects, const BehaviorPathPlannerParameters & bpp_param, + const LaneChangeParameters & lc_param, const double dist_to_target_lane_start, + const Pose & ego_pose, const PathWithLaneId & path) +{ + const auto & path_points = path.points; + + auto min_dist_to_obj = std::numeric_limits::max(); + for (const auto & object : filtered_objects.current_lane) { + // check if stationary + const auto obj_v = std::abs(object.initial_twist.twist.linear.x); + if (obj_v > lc_param.stop_velocity_threshold) { + continue; + } + + // provide "estimation" based on size of object + const auto dist_to_obj = + motion_utils::calcSignedArcLength( + path_points, path_points.front().point.pose.position, object.initial_pose.pose.position) - + (object.shape.dimensions.x / 2); + + const auto dist_to_ego = motion_utils::calcSignedArcLength( + path_points, ego_pose.position, object.initial_pose.pose.position) - + (object.shape.dimensions.x / 2); + + if (dist_to_obj < dist_to_target_lane_start || dist_to_obj < dist_to_ego) { + continue; + } + + // calculate distance from path front to the stationary object polygon on the ego lane. + const auto obj_poly = + tier4_autoware_utils::toPolygon2d(object.initial_pose.pose, object.shape).outer(); + for (const auto & polygon_p : obj_poly) { + const auto p_fp = tier4_autoware_utils::toMsg(polygon_p.to_3d()); + const auto lateral_fp = motion_utils::calcLateralOffset(path_points, p_fp); + + // ignore if the point is not on ego path + if (std::abs(lateral_fp) > (bpp_param.vehicle_width / 2)) { + continue; + } + + const auto current_distance_to_obj = motion_utils::calcSignedArcLength(path_points, 0, p_fp); + min_dist_to_obj = std::min(min_dist_to_obj, current_distance_to_obj); + } + } + return min_dist_to_obj; +} + +bool has_blocking_target_object( + const lanelet::ConstLanelets & target_lanes, const LaneChangeTargetObjects & filtered_objects, + const LaneChangeParameters & lc_param, const double stop_arc_length, const Pose & ego_pose, + const PathWithLaneId & path) +{ + const auto target_lane_poly = + lanelet::utils::combineLaneletsShape(target_lanes).polygon2d().basicPolygon(); + return std::any_of( + filtered_objects.target_lane.begin(), filtered_objects.target_lane.end(), + [&](const auto & object) { + const auto v = std::abs(object.initial_twist.twist.linear.x); + if (v > lc_param.stop_velocity_threshold) { + return false; + } + + const auto arc_length_to_ego = + motion_utils::calcSignedArcLength( + path.points, ego_pose.position, object.initial_pose.pose.position) - + (object.shape.dimensions.x / 2); + + if (arc_length_to_ego < 0.0) { + return false; + } + + const auto obj_poly = + tier4_autoware_utils::toPolygon2d(object.initial_pose.pose, object.shape); + // filtered_objects includes objects out of target lanes, so filter them out + if (boost::geometry::disjoint(obj_poly, target_lane_poly)) { + return false; + } + + const auto arc_length_to_target_lane_obj = motion_utils::calcSignedArcLength( + path.points, path.points.front().point.pose.position, object.initial_pose.pose.position); + const auto width_margin = object.shape.dimensions.x / 2; + return (arc_length_to_target_lane_obj - width_margin) >= stop_arc_length; + }); +} } // namespace behavior_path_planner::utils::lane_change diff --git a/planning/behavior_path_planner_common/src/turn_signal_decider.cpp b/planning/behavior_path_planner_common/src/turn_signal_decider.cpp index f808aee2bde2e..16537620a29a6 100644 --- a/planning/behavior_path_planner_common/src/turn_signal_decider.cpp +++ b/planning/behavior_path_planner_common/src/turn_signal_decider.cpp @@ -136,6 +136,13 @@ std::optional TurnSignalDecider::getIntersectionTurnSignalInfo( const size_t current_seg_idx, const RouteHandler & route_handler, const double nearest_dist_threshold, const double nearest_yaw_threshold) { + const auto requires_turn_signal = [¤t_vel]( + const auto & turn_direction, const bool is_in_turn_lane) { + constexpr double stop_velocity_threshold = 0.1; + return ( + turn_direction == "right" || turn_direction == "left" || + (turn_direction == "straight" && current_vel < stop_velocity_threshold && !is_in_turn_lane)); + }; // base search distance const double base_search_distance = intersection_search_time_ * current_vel + intersection_search_distance_; @@ -152,6 +159,19 @@ std::optional TurnSignalDecider::getIntersectionTurnSignalInfo( } } + bool is_in_turn_lane = false; + for (const auto & lane_id : unique_lane_ids) { + const auto lanelet = route_handler.getLaneletsFromId(lane_id); + const std::string turn_direction = lanelet.attributeOr("turn_direction", "none"); + if (turn_direction == "left" || turn_direction == "right") { + const auto & position = current_pose.position; + const lanelet::BasicPoint2d point(position.x, position.y); + if (lanelet::geometry::inside(lanelet, point)) { + is_in_turn_lane = true; + break; + } + } + } // combine consecutive lanes of the same turn direction // stores lanes that have already been combine std::set processed_lanes; @@ -258,11 +278,8 @@ std::optional TurnSignalDecider::getIntersectionTurnSignalInfo( continue; } - constexpr double stop_velocity_threshold = 0.1; if (dist_to_front_point < search_distance) { - if ( - lane_attribute == "right" || lane_attribute == "left" || - (lane_attribute == "straight" && current_vel < stop_velocity_threshold)) { + if (requires_turn_signal(lane_attribute, is_in_turn_lane)) { // update map if necessary if (desired_start_point_map_.find(lane_id) == desired_start_point_map_.end()) { desired_start_point_map_.emplace(lane_id, current_pose); diff --git a/planning/behavior_velocity_out_of_lane_module/src/lanelets_selection.cpp b/planning/behavior_velocity_out_of_lane_module/src/lanelets_selection.cpp index 738ea22106b29..25bc2a9718de3 100644 --- a/planning/behavior_velocity_out_of_lane_module/src/lanelets_selection.cpp +++ b/planning/behavior_velocity_out_of_lane_module/src/lanelets_selection.cpp @@ -93,6 +93,16 @@ lanelet::ConstLanelets calculate_ignored_lanelets( const auto is_path_lanelet = contains_lanelet(path_lanelets, l.second.id()); if (!is_path_lanelet) ignored_lanelets.push_back(l.second); } + // ignore lanelets beyond the last path pose + const auto beyond = planning_utils::calculateOffsetPoint2d( + ego_data.path.points.back().point.pose, params.front_offset, 0.0); + const lanelet::BasicPoint2d beyond_point(beyond.x(), beyond.y()); + const auto beyond_lanelets = lanelet::geometry::findWithin2d( + route_handler.getLaneletMapPtr()->laneletLayer, beyond_point, 0.0); + for (const auto & l : beyond_lanelets) { + const auto is_path_lanelet = contains_lanelet(path_lanelets, l.second.id()); + if (!is_path_lanelet) ignored_lanelets.push_back(l.second); + } return ignored_lanelets; } diff --git a/planning/behavior_velocity_run_out_module/README.md b/planning/behavior_velocity_run_out_module/README.md index c48f2a951cd55..5cd4b2708fd2d 100644 --- a/planning/behavior_velocity_run_out_module/README.md +++ b/planning/behavior_velocity_run_out_module/README.md @@ -108,6 +108,14 @@ You can choose whether to use this feature by parameter of `use_partition_lanele ![brief](./docs/exclude_obstacles_by_partition.svg) +##### Exclude obstacles that cross the ego vehicle's "cut line" + +This module can exclude obstacles that have predicted paths that will cross the back side of the ego vehicle. It excludes obstacles if their predicted path crosses the ego's "cut line". The "cut line" is a virtual line segment that is perpendicular to the ego vehicle and that passes through the ego's base link. + +You can choose whether to use this feature by setting the parameter `use_ego_cut_line` to `true` or `false`. The width of the line can be tuned with the parameter `ego_cut_line_length`. + +![brief](./docs/ego_cut_line.svg) + #### Collision detection ##### Detect collision with dynamic obstacles diff --git a/planning/behavior_velocity_run_out_module/config/run_out.param.yaml b/planning/behavior_velocity_run_out_module/config/run_out.param.yaml index 5534228c1b86f..3a4a1d1e1a89d 100644 --- a/planning/behavior_velocity_run_out_module/config/run_out.param.yaml +++ b/planning/behavior_velocity_run_out_module/config/run_out.param.yaml @@ -5,12 +5,14 @@ use_partition_lanelet: true # [-] whether to use partition lanelet map data suppress_on_crosswalk: true # [-] whether to suppress on crosswalk lanelet: specify_decel_jerk: false # [-] whether to specify jerk when ego decelerates + use_ego_cut_line: true # [-] filter obstacles that pass the backside of ego: if a dynamic obstacle's predicted path intersects this line, it is ignored stop_margin: 2.5 # [m] the vehicle decelerates to be able to stop with this margin passing_margin: 1.1 # [m] the vehicle begins to accelerate if the vehicle's front in predicted position is ahead of the obstacle + this margin deceleration_jerk: -0.3 # [m/s^3] ego decelerates with this jerk when stopping for obstacles detection_distance: 45.0 # [m] ahead distance from ego to detect the obstacles detection_span: 1.0 # [m] calculate collision with this span to reduce calculation time min_vel_ego_kmph: 3.6 # [km/h] min velocity to calculate time to collision + ego_cut_line_length: 3.0 # The width of the ego's cut line detection_area: margin_behind: 0.5 # [m] ahead margin for detection area length diff --git a/planning/behavior_velocity_run_out_module/docs/ego_cut_line.svg b/planning/behavior_velocity_run_out_module/docs/ego_cut_line.svg new file mode 100644 index 0000000000000..de6bec5e172df --- /dev/null +++ b/planning/behavior_velocity_run_out_module/docs/ego_cut_line.svg @@ -0,0 +1,129 @@ + + + + + + + + + + diff --git a/planning/behavior_velocity_run_out_module/src/debug.cpp b/planning/behavior_velocity_run_out_module/src/debug.cpp index c0d026b5ffaf4..23764bc73fbff 100644 --- a/planning/behavior_velocity_run_out_module/src/debug.cpp +++ b/planning/behavior_velocity_run_out_module/src/debug.cpp @@ -86,6 +86,14 @@ void RunOutDebug::pushCollisionPoints(const geometry_msgs::msg::Point & point) collision_points_.push_back(point_with_height); } +void RunOutDebug::pushEgoCutLine(const std::vector & line) +{ + for (const auto & point : line) { + const auto point_with_height = createPoint(point.x, point.y, height_); + ego_cut_line_.push_back(point_with_height); + } +} + void RunOutDebug::pushCollisionPoints(const std::vector & points) { for (const auto & p : points) { @@ -160,6 +168,7 @@ void RunOutDebug::clearDebugMarker() predicted_obstacle_polygons_.clear(); collision_obstacle_polygons_.clear(); travel_time_texts_.clear(); + ego_cut_line_.clear(); } visualization_msgs::msg::MarkerArray RunOutDebug::createVisualizationMarkerArray() @@ -265,6 +274,16 @@ visualization_msgs::msg::MarkerArray RunOutDebug::createVisualizationMarkerArray &msg); } + if (!ego_cut_line_.empty()) { + auto marker = createDefaultMarker( + "map", current_time, "ego_cut_line", 0, visualization_msgs::msg::Marker::LINE_LIST, + createMarkerScale(0.2, 0.2, 0.2), createMarkerColor(0.7, 0.0, 0.7, 0.999)); + for (const auto & p : ego_cut_line_) { + marker.points.push_back(p); + } + msg.markers.push_back(marker); + } + if (!travel_time_texts_.empty()) { auto marker = createDefaultMarker( "map", current_time, "travel_time_texts", 0, diff --git a/planning/behavior_velocity_run_out_module/src/debug.hpp b/planning/behavior_velocity_run_out_module/src/debug.hpp index b28725a92628e..e9b269ee437f0 100644 --- a/planning/behavior_velocity_run_out_module/src/debug.hpp +++ b/planning/behavior_velocity_run_out_module/src/debug.hpp @@ -109,6 +109,7 @@ class RunOutDebug void pushPredictedVehiclePolygons(const std::vector & polygon); void pushPredictedObstaclePolygons(const std::vector & polygon); void pushCollisionObstaclePolygons(const std::vector & polygon); + void pushEgoCutLine(const std::vector & line); void pushDetectionAreaPolygons(const Polygon2d & debug_polygon); void pushMandatoryDetectionAreaPolygons(const Polygon2d & debug_polygon); void pushTravelTimeTexts( @@ -134,6 +135,7 @@ class RunOutDebug rclcpp::Publisher::SharedPtr pub_debug_pointcloud_; std::vector collision_points_; std::vector nearest_collision_point_; + std::vector ego_cut_line_; std::vector stop_pose_; std::vector> predicted_vehicle_polygons_; std::vector> predicted_obstacle_polygons_; diff --git a/planning/behavior_velocity_run_out_module/src/manager.cpp b/planning/behavior_velocity_run_out_module/src/manager.cpp index 3ba9bf8bf52e6..e583393edc15e 100644 --- a/planning/behavior_velocity_run_out_module/src/manager.cpp +++ b/planning/behavior_velocity_run_out_module/src/manager.cpp @@ -58,6 +58,7 @@ RunOutModuleManager::RunOutModuleManager(rclcpp::Node & node) auto & p = planner_param_.run_out; p.detection_method = getOrDeclareParameter(node, ns + ".detection_method"); p.use_partition_lanelet = getOrDeclareParameter(node, ns + ".use_partition_lanelet"); + p.use_ego_cut_line = getOrDeclareParameter(node, ns + ".use_ego_cut_line"); p.suppress_on_crosswalk = getOrDeclareParameter(node, ns + ".suppress_on_crosswalk"); p.specify_decel_jerk = getOrDeclareParameter(node, ns + ".specify_decel_jerk"); p.stop_margin = getOrDeclareParameter(node, ns + ".stop_margin"); @@ -66,6 +67,7 @@ RunOutModuleManager::RunOutModuleManager(rclcpp::Node & node) p.detection_distance = getOrDeclareParameter(node, ns + ".detection_distance"); p.detection_span = getOrDeclareParameter(node, ns + ".detection_span"); p.min_vel_ego_kmph = getOrDeclareParameter(node, ns + ".min_vel_ego_kmph"); + p.ego_cut_line_length = getOrDeclareParameter(node, ns + ".ego_cut_line_length"); } { diff --git a/planning/behavior_velocity_run_out_module/src/scene.cpp b/planning/behavior_velocity_run_out_module/src/scene.cpp index 92516e7b4424b..f64266790b997 100644 --- a/planning/behavior_velocity_run_out_module/src/scene.cpp +++ b/planning/behavior_velocity_run_out_module/src/scene.cpp @@ -102,9 +102,18 @@ bool RunOutModule::modifyPathVelocity( const auto dynamic_obstacles = dynamic_obstacle_creator_->createDynamicObstacles(); debug_ptr_->setDebugValues(DebugValues::TYPE::NUM_OBSTACLES, dynamic_obstacles.size()); - // extract obstacles using lanelet information - const auto partition_excluded_obstacles = - excludeObstaclesOutSideOfPartition(dynamic_obstacles, extended_smoothed_path, current_pose); + const auto filtered_obstacles = std::invoke([&]() { + // extract obstacles using lanelet information + const auto partition_excluded_obstacles = + excludeObstaclesOutSideOfPartition(dynamic_obstacles, extended_smoothed_path, current_pose); + + if (!planner_param_.run_out.use_ego_cut_line) return partition_excluded_obstacles; + + // extract obstacles that cross the ego's cut line + const auto ego_cut_line_excluded_obstacles = + excludeObstaclesCrossingEgoCutLine(partition_excluded_obstacles, current_pose); + return ego_cut_line_excluded_obstacles; + }); // record time for obstacle creation const auto t_obstacle_creation = std::chrono::system_clock::now(); @@ -122,7 +131,7 @@ bool RunOutModule::modifyPathVelocity( planner_data_->route_handler_->getOverallGraphPtr()) : std::vector>(); const auto dynamic_obstacle = - detectCollision(partition_excluded_obstacles, extended_smoothed_path, crosswalk_lanelets); + detectCollision(filtered_obstacles, extended_smoothed_path, crosswalk_lanelets); // record time for collision check const auto t_collision_check = std::chrono::system_clock::now(); @@ -147,8 +156,7 @@ bool RunOutModule::modifyPathVelocity( applyMaxJerkLimit(current_pose, current_vel, current_acc, *path); } - publishDebugValue( - extended_smoothed_path, partition_excluded_obstacles, dynamic_obstacle, current_pose); + publishDebugValue(extended_smoothed_path, filtered_obstacles, dynamic_obstacle, current_pose); // record time for collision check const auto t_path_planning = std::chrono::system_clock::now(); @@ -801,6 +809,24 @@ void RunOutModule::applyMaxJerkLimit( run_out_utils::insertPathVelocityFromIndex(stop_point_idx.value(), jerk_limited_vel, path.points); } +std::vector RunOutModule::excludeObstaclesCrossingEgoCutLine( + const std::vector & dynamic_obstacles, + const geometry_msgs::msg::Pose & current_pose) const +{ + std::vector extracted_obstacles; + std::vector ego_cut_line; + const double ego_cut_line_half_width = planner_param_.run_out.ego_cut_line_length / 2.0; + std::for_each(dynamic_obstacles.begin(), dynamic_obstacles.end(), [&](const auto & o) { + const auto predicted_path = run_out_utils::getHighestConfidencePath(o.predicted_paths); + if (!run_out_utils::pathIntersectsEgoCutLine( + predicted_path, current_pose, ego_cut_line_half_width, ego_cut_line)) { + extracted_obstacles.push_back(o); + } + }); + debug_ptr_->pushEgoCutLine(ego_cut_line); + return extracted_obstacles; +} + std::vector RunOutModule::excludeObstaclesOutSideOfPartition( const std::vector & dynamic_obstacles, const PathWithLaneId & path, const geometry_msgs::msg::Pose & current_pose) const diff --git a/planning/behavior_velocity_run_out_module/src/scene.hpp b/planning/behavior_velocity_run_out_module/src/scene.hpp index def90f036c440..02b17783ab6c1 100644 --- a/planning/behavior_velocity_run_out_module/src/scene.hpp +++ b/planning/behavior_velocity_run_out_module/src/scene.hpp @@ -136,6 +136,17 @@ class RunOutModule : public SceneModuleInterface const geometry_msgs::msg::Pose & current_pose, const float current_vel, const float current_acc, PathWithLaneId & path) const; + /** + * @brief Creates a virtual line segment that is perpendicular to the ego vehicle and that passes + * through the ego's base link and excludes objects with paths that intersect that line segment. + * @param [in] dynamic_obstacles obstacles to be filtered. + * @param [in] current_pose ego vehicle's current pose. + * @return a vector of dynamic obstacles that don't intersect the line segment. + */ + std::vector excludeObstaclesCrossingEgoCutLine( + const std::vector & dynamic_obstacles, + const geometry_msgs::msg::Pose & current_pose) const; + std::vector excludeObstaclesOutSideOfPartition( const std::vector & dynamic_obstacles, const PathWithLaneId & path, const geometry_msgs::msg::Pose & current_pose) const; diff --git a/planning/behavior_velocity_run_out_module/src/utils.cpp b/planning/behavior_velocity_run_out_module/src/utils.cpp index db989f90fafc9..f17f4c7251ea4 100644 --- a/planning/behavior_velocity_run_out_module/src/utils.cpp +++ b/planning/behavior_velocity_run_out_module/src/utils.cpp @@ -176,6 +176,28 @@ std::optional findFirstStopPointIdx(PathPointsWithLaneId & path_points) return {}; } +bool pathIntersectsEgoCutLine( + const std::vector & path, const geometry_msgs::msg::Pose & ego_pose, + const double half_line_length, std::vector & ego_cut_line) +{ + if (path.size() < 2) return false; + const auto p1 = + tier4_autoware_utils::calcOffsetPose(ego_pose, 0.0, half_line_length, 0.0).position; + const auto p2 = + tier4_autoware_utils::calcOffsetPose(ego_pose, 0.0, -half_line_length, 0.0).position; + ego_cut_line = {p1, p2}; + + for (size_t i = 1; i < path.size(); ++i) { + const auto & p3 = path.at(i).position; + const auto & p4 = path.at(i - 1).position; + const auto intersection = tier4_autoware_utils::intersect(p1, p2, p3, p4); + if (intersection.has_value()) { + return true; + } + } + return false; +} + LineString2d createLineString2d(const lanelet::BasicPolygon2d & poly) { LineString2d line_string; diff --git a/planning/behavior_velocity_run_out_module/src/utils.hpp b/planning/behavior_velocity_run_out_module/src/utils.hpp index 5524c0f76049d..2924d59586392 100644 --- a/planning/behavior_velocity_run_out_module/src/utils.hpp +++ b/planning/behavior_velocity_run_out_module/src/utils.hpp @@ -15,6 +15,8 @@ #ifndef UTILS_HPP_ #define UTILS_HPP_ +#include "tier4_autoware_utils/geometry/geometry.hpp" + #include #include #include @@ -56,9 +58,11 @@ struct RunOutParam bool use_partition_lanelet; bool suppress_on_crosswalk; bool specify_decel_jerk; + bool use_ego_cut_line; double stop_margin; double passing_margin; double deceleration_jerk; + double ego_cut_line_length; float detection_distance; float detection_span; float min_vel_ego_kmph; @@ -194,6 +198,10 @@ struct DynamicObstacleData Polygon2d createBoostPolyFromMsg(const std::vector & input_poly); +bool pathIntersectsEgoCutLine( + const std::vector & path, const geometry_msgs::msg::Pose & ego_pose, + const double half_line_length, std::vector & ego_cut_line); + std::uint8_t getHighestProbLabel(const std::vector & classification); std::vector getHighestConfidencePath( diff --git a/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py b/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py index 7bf54c0278a27..2d876e2c93a0a 100644 --- a/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py +++ b/planning/planning_debug_tools/scripts/perception_replayer/perception_replayer_common.py @@ -22,8 +22,8 @@ from autoware_auto_perception_msgs.msg import DetectedObjects from autoware_auto_perception_msgs.msg import PredictedObjects from autoware_auto_perception_msgs.msg import TrackedObjects -from autoware_auto_perception_msgs.msg import TrafficSignalArray as AutoTrafficSignalArray from autoware_perception_msgs.msg import TrafficSignalArray +from geometry_msgs.msg import PoseStamped from geometry_msgs.msg import PoseWithCovarianceStamped from nav_msgs.msg import Odometry import psutil @@ -32,6 +32,7 @@ from rosbag2_py import StorageFilter from rosidl_runtime_py.utilities import get_message from sensor_msgs.msg import PointCloud2 +from utils import get_starting_time from utils import open_reader @@ -40,11 +41,10 @@ def __init__(self, args, name): super().__init__(name) self.args = args - self.ego_pose = None + self.ego_odom = None self.rosbag_objects_data = [] self.rosbag_ego_odom_data = [] self.rosbag_traffic_signals_data = [] - self.is_auto_traffic_signals = False # subscriber self.sub_odom = self.create_subscription( @@ -76,31 +76,33 @@ def __init__(self, args, name): Odometry, "/perception_reproducer/rosbag_ego_odom", 1 ) + self.goal_pose_publisher = self.create_publisher( + PoseStamped, "/planning/mission_planning/goal", 1 + ) + + self.traffic_signals_pub = self.create_publisher( + TrafficSignalArray, "/perception/traffic_light_recognition/traffic_signals", 1 + ) + # load rosbag print("Stared loading rosbag") if os.path.isdir(args.bag): - for bag_file in sorted(os.listdir(args.bag)): - if bag_file.endswith(self.args.rosbag_format): - self.load_rosbag(args.bag + "/" + bag_file) + bags = [ + os.path.join(args.bag, base_name) + for base_name in os.listdir(args.bag) + if base_name.endswith(args.rosbag_format) + ] + for bag_file in sorted(bags, key=get_starting_time): + self.load_rosbag(bag_file) else: self.load_rosbag(args.bag) print("Ended loading rosbag") - # temporary support old auto msgs - if self.is_auto_traffic_signals: - self.auto_traffic_signals_pub = self.create_publisher( - AutoTrafficSignalArray, "/perception/traffic_light_recognition/traffic_signals", 1 - ) - else: - self.traffic_signals_pub = self.create_publisher( - TrafficSignalArray, "/perception/traffic_light_recognition/traffic_signals", 1 - ) - # wait for ready to publish/subscribe time.sleep(1.0) def on_odom(self, odom): - self.ego_pose = odom.pose.pose + self.ego_odom = odom def load_rosbag(self, rosbag2_path: str): reader = open_reader(str(rosbag2_path)) @@ -126,14 +128,18 @@ def load_rosbag(self, rosbag2_path: str): msg_type = get_message(type_map[topic]) msg = deserialize_message(data, msg_type) if topic == objects_topic: + assert isinstance( + msg, self.objects_pub.msg_type + ), f"Unsupported conversion from {type(msg)}" self.rosbag_objects_data.append((stamp, msg)) if topic == ego_odom_topic: + assert isinstance(msg, Odometry), f"Unsupported conversion from {type(msg)}" self.rosbag_ego_odom_data.append((stamp, msg)) if topic == traffic_signals_topic: + assert isinstance( + msg, TrafficSignalArray + ), f"Unsupported conversion from {type(msg)}" self.rosbag_traffic_signals_data.append((stamp, msg)) - self.is_auto_traffic_signals = ( - "autoware_auto_perception_msgs" in type(msg).__module__ - ) def kill_online_perception_node(self): # kill node if required @@ -153,6 +159,9 @@ def kill_online_perception_node(self): pass def binary_search(self, data, timestamp): + if not data: + return None + if data[-1][0] < timestamp: return data[-1][1] elif data[0][0] > timestamp: diff --git a/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py b/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py index b2b6a3c0ef38e..3f63321b0fba2 100755 --- a/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py +++ b/planning/planning_debug_tools/scripts/perception_replayer/perception_reproducer.py @@ -15,8 +15,10 @@ # limitations under the License. import argparse +from collections import deque import pickle +from geometry_msgs.msg import PoseWithCovarianceStamped import numpy as np from perception_replayer_common import PerceptionReplayerCommon import rclpy @@ -27,16 +29,44 @@ class PerceptionReproducer(PerceptionReplayerCommon): def __init__(self, args): + self.rosbag_ego_odom_search_radius = args.search_radius + self.ego_odom_search_radius = self.rosbag_ego_odom_search_radius + self.reproduce_cool_down = args.reproduce_cool_down if args.search_radius != 0.0 else 0.0 + super().__init__(args, "perception_reproducer") - self.prev_traffic_signals_msg = None self.stopwatch = StopWatch(self.args.verbose) # for debug + # refresh cool down for setting initial pose in psim. + self.sub_init_pos = self.create_subscription( + PoseWithCovarianceStamped, "/initialpose", lambda msg: self.cool_down_indices.clear(), 1 + ) + # to make some data to accelerate computation self.preprocess_data() + self.reproduce_sequence_indices = deque() # contains ego_odom_idx + self.cool_down_indices = deque() # contains ego_odom_idx + self.ego_odom_id2last_published_timestamp = {} # for checking last published timestamp. + self.last_sequenced_ego_pose = None + + pose_timestamp, self.prev_ego_odom_msg = self.rosbag_ego_odom_data[0] + self.perv_objects_msg, self.prev_traffic_signals_msg = self.find_topics_by_timestamp( + pose_timestamp + ) + self.memorized_original_objects_msg = ( + self.memorized_noised_objects_msg + ) = self.perv_objects_msg + # start main timer callback - self.timer = self.create_timer(0.1, self.on_timer) + + average_ego_odom_interval = np.mean( + [ + (self.rosbag_ego_odom_data[i][0] - self.rosbag_ego_odom_data[i - 1][0]) / 1e9 + for i in range(1, len(self.rosbag_ego_odom_data)) + ] + ) + self.timer = self.create_timer(average_ego_odom_interval, self.on_timer) # kill perception process to avoid a conflict of the perception topics self.timer_check_perception_process = self.create_timer(3.0, self.on_timer_kill_perception) @@ -60,84 +90,206 @@ def on_timer(self): print("\n-- on_timer start --") self.stopwatch.tic("total on_timer") - timestamp = self.get_clock().now().to_msg() + timestamp = self.get_clock().now() + timestamp_msg = timestamp.to_msg() if self.args.detected_object: - pointcloud_msg = create_empty_pointcloud(timestamp) + pointcloud_msg = create_empty_pointcloud(timestamp_msg) self.pointcloud_pub.publish(pointcloud_msg) - if not self.ego_pose: - print("No ego pose found.") + if not self.ego_odom: + print("No ego odom found.") return - # find nearest ego odom by simulation observation - self.stopwatch.tic("find_nearest_ego_odom_by_observation") - ego_odom = self.find_nearest_ego_odom_by_observation(self.ego_pose) - pose_timestamp = ego_odom[0] - log_ego_pose = ego_odom[1].pose.pose - self.stopwatch.toc("find_nearest_ego_odom_by_observation") + ego_pose = self.ego_odom.pose.pose + dist_moved = ( + np.sqrt( + (ego_pose.position.x - self.last_sequenced_ego_pose.position.x) ** 2 + + (ego_pose.position.y - self.last_sequenced_ego_pose.position.y) ** 2 + ) + if self.last_sequenced_ego_pose + else 999 + ) - # extract message by the nearest ego odom timestamp - self.stopwatch.tic("find_topics_by_timestamp") - msgs_orig = self.find_topics_by_timestamp(pose_timestamp) - self.stopwatch.toc("find_topics_by_timestamp") + # Update the reproduce sequence if the distance moved is greater than the search radius. + if dist_moved > self.ego_odom_search_radius: + self.last_sequenced_ego_pose = ego_pose - # copy the messages - self.stopwatch.tic("message deepcopy") - if self.args.detected_object: - msgs = pickle.loads(pickle.dumps(msgs_orig)) # this is x5 faster than deepcopy - objects_msg = msgs[0] - traffic_signals_msg = msgs[1] + # find the nearest ego odom by simulation observation + self.stopwatch.tic("find_nearest_ego_odom_by_observation") + nearby_ego_odom_indies = self.find_nearby_ego_odom_indies( + [ego_pose], self.ego_odom_search_radius + ) + nearby_ego_odom_indies = [ + self.rosbag_ego_odom_data[idx][1].pose.pose for idx in nearby_ego_odom_indies + ] + if not nearby_ego_odom_indies: + nearest_ego_odom_ind = self.find_nearest_ego_odom_index(ego_pose) + nearby_ego_odom_indies += [ + self.rosbag_ego_odom_data[nearest_ego_odom_ind][1].pose.pose + ] + self.stopwatch.toc("find_nearest_ego_odom_by_observation") + + # find a list of ego odom around the nearest_ego_odom_pos. + self.stopwatch.tic("find_nearby_ego_odom_indies") + ego_odom_indices = self.find_nearby_ego_odom_indies( + nearby_ego_odom_indies, self.rosbag_ego_odom_search_radius + ) + self.stopwatch.toc("find_nearby_ego_odom_indies") + + # update reproduce_sequence with those data not in cool down list. + while self.cool_down_indices: + last_timestamp = self.ego_odom_id2last_published_timestamp[ + self.cool_down_indices[0] + ] + if (timestamp - last_timestamp).nanoseconds / 1e9 > self.reproduce_cool_down: + self.cool_down_indices.popleft() + else: + break + + self.stopwatch.tic("update reproduce_sequence") + ego_odom_indices = [ + idx for idx in ego_odom_indices if idx not in self.cool_down_indices + ] + ego_odom_indices = sorted(ego_odom_indices) + self.reproduce_sequence_indices = deque(ego_odom_indices) + self.stopwatch.toc("update reproduce_sequence") + + if self.args.verbose: + print("reproduce_sequence_indices: ", list(self.reproduce_sequence_indices)[:20]) + + # Get messages + repeat_flag = len(self.reproduce_sequence_indices) == 0 + + # Add an additional constraint to avoid publishing too fast when there is a speed gap between the ego and the rosbag's ego when ego is departing/stopping while rosbag's ego is moving. + if not repeat_flag: + ego_speed = np.sqrt( + self.ego_odom.twist.twist.linear.x**2 + self.ego_odom.twist.twist.linear.y**2 + ) + ego_odom_idx = self.reproduce_sequence_indices[0] + _, ego_odom_msg = self.rosbag_ego_odom_data[ego_odom_idx] + ego_rosbag_speed = np.sqrt( + ego_odom_msg.twist.twist.linear.x**2 + ego_odom_msg.twist.twist.linear.y**2 + ) + + ego_rosbag_dist = np.sqrt( + (ego_pose.position.x - ego_odom_msg.pose.pose.position.x) ** 2 + + (ego_pose.position.y - ego_odom_msg.pose.pose.position.y) ** 2 + ) + repeat_flag = ( + ego_rosbag_speed > ego_speed * 2 + and ego_rosbag_speed > 3.0 + and ego_rosbag_dist > self.ego_odom_search_radius + ) + # if ego_rosbag_speed is too fast than ego_speed, stop publishing the rosbag's ego odom message temporarily. + + if not repeat_flag: + self.stopwatch.tic("find_topics_by_timestamp") + ego_odom_idx = self.reproduce_sequence_indices.popleft() + # extract messages by the nearest ego odom timestamp + pose_timestamp, ego_odom_msg = self.rosbag_ego_odom_data[ego_odom_idx] + objects_msg, traffic_signals_msg = self.find_topics_by_timestamp(pose_timestamp) + self.stopwatch.toc("find_topics_by_timestamp") + # update cool down info. + self.ego_odom_id2last_published_timestamp[ego_odom_idx] = timestamp + self.cool_down_indices.append(ego_odom_idx) else: - # NOTE: No need to deepcopy since only timestamp will be changed and it will be changed every time correctly. - objects_msg = msgs_orig[0] - traffic_signals_msg = msgs_orig[1] - self.stopwatch.toc("message deepcopy") + ego_odom_msg = self.prev_ego_odom_msg + objects_msg = self.perv_objects_msg + traffic_signals_msg = self.prev_traffic_signals_msg + # Transform and publish messages. self.stopwatch.tic("transform and publish") + # ego odom + if ego_odom_msg: + self.prev_ego_odom_msg = ego_odom_msg + self.recorded_ego_pub.publish(ego_odom_msg) # objects + objects_msg = objects_msg if objects_msg else self.perv_objects_msg if objects_msg: - objects_msg.header.stamp = timestamp + self.perv_objects_msg = objects_msg + objects_msg.header.stamp = timestamp_msg + + # add noise to repeat published objects + if repeat_flag and self.args.noise: + objects_msg = self.add_perception_noise(objects_msg) + if self.args.detected_object: - translate_objects_coordinate(self.ego_pose, log_ego_pose, objects_msg) - self.objects_pub.publish(objects_msg) + objects_msg = self.copy_message(objects_msg) + translate_objects_coordinate(ego_pose, ego_odom_msg.pose.pose, objects_msg) - # ego odom - self.recorded_ego_pub.publish(ego_odom[1]) + self.objects_pub.publish(objects_msg) # traffic signals - # temporary support old auto msgs + traffic_signals_msg = ( + traffic_signals_msg if traffic_signals_msg else self.prev_traffic_signals_msg + ) if traffic_signals_msg: - if self.is_auto_traffic_signals: - traffic_signals_msg.header.stamp = timestamp - self.auto_traffic_signals_pub.publish(traffic_signals_msg) - else: - traffic_signals_msg.stamp = timestamp - self.traffic_signals_pub.publish(traffic_signals_msg) + traffic_signals_msg.stamp = timestamp_msg self.prev_traffic_signals_msg = traffic_signals_msg - elif self.prev_traffic_signals_msg: - if self.is_auto_traffic_signals: - self.prev_traffic_signals_msg.header.stamp = timestamp - self.auto_traffic_signals_pub.publish(self.prev_traffic_signals_msg) - else: - self.prev_traffic_signals_msg.stamp = timestamp - self.traffic_signals_pub.publish(self.prev_traffic_signals_msg) + self.traffic_signals_pub.publish(traffic_signals_msg) + self.stopwatch.toc("transform and publish") self.stopwatch.toc("total on_timer") - def find_nearest_ego_odom_by_observation(self, ego_pose): + def find_nearest_ego_odom_index(self, ego_pose): # nearest search with numpy format is much (~ x100) faster than regular for loop self_pose = np.array([ego_pose.position.x, ego_pose.position.y]) dists_squared = np.sum((self.rosbag_ego_odom_data_numpy - self_pose) ** 2, axis=1) nearest_idx = np.argmin(dists_squared) + return nearest_idx + + def find_nearby_ego_odom_indies(self, ego_poses: list, search_radius: float): + ego_poses_np = np.array([[pose.position.x, pose.position.y] for pose in ego_poses]) + dists_squared = np.sum( + (self.rosbag_ego_odom_data_numpy[:, None] - ego_poses_np) ** 2, axis=2 + ) + nearby_indices = np.where(np.any(dists_squared <= search_radius**2, axis=1))[0] + + return nearby_indices - return self.rosbag_ego_odom_data[nearest_idx] + def copy_message(self, msg): + self.stopwatch.tic("message deepcopy") + objects_msg_copied = pickle.loads(pickle.dumps(msg)) # this is x5 faster than deepcopy + self.stopwatch.toc("message deepcopy") + return objects_msg_copied + + def add_perception_noise( + self, objects_msg, update_rate=0.03, x_noise_std=0.1, y_noise_std=0.05 + ): + if self.memorized_original_objects_msg != objects_msg: + self.memorized_noised_objects_msg = self.memorized_original_objects_msg = objects_msg + + if np.random.rand() < update_rate: + self.stopwatch.tic("add noise") + self.memorized_noised_objects_msg = self.copy_message( + self.memorized_original_objects_msg + ) + for obj in self.memorized_noised_objects_msg.objects: + noise_x = np.random.normal(0, x_noise_std) + noise_y = np.random.normal(0, y_noise_std) + if self.args.detected_object or self.args.tracked_object: + obj.kinematics.pose_with_covariance.pose.position.x += noise_x + obj.kinematics.pose_with_covariance.pose.position.y += noise_y + else: + obj.kinematics.initial_pose_with_covariance.pose.position.x += noise_x + obj.kinematics.initial_pose_with_covariance.pose.position.y += noise_y + self.stopwatch.toc("add noise") + + return self.memorized_noised_objects_msg if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("-b", "--bag", help="rosbag", default=None) + parser.add_argument( + "-n", + "--noise", + help="apply perception noise to the objects when publishing repeated messages", + action="store_true", + default=True, + ) parser.add_argument( "-d", "--detected-object", help="publish detected object", action="store_true" ) @@ -150,6 +302,21 @@ def find_nearest_ego_odom_by_observation(self, ego_pose): parser.add_argument( "-v", "--verbose", help="output debug data", action="store_true", default=False ) + parser.add_argument( + "-r", + "--search-radius", + help="the search radius for searching rosbag's ego odom messages around the nearest ego odom pose (default is 1.5 meters), if the search radius is set to 0, it will always publish the closest message, just as the old reproducer did.", + type=float, + default=1.5, + ) + parser.add_argument( + "-c", + "--reproduce-cool-down", + help="The cool down time for republishing published messages (default is 80.0 seconds), please make sure that it's greater than the ego's stopping time.", + type=float, + default=80.0, + ) + args = parser.parse_args() rclpy.init() diff --git a/planning/planning_debug_tools/scripts/perception_replayer/utils.py b/planning/planning_debug_tools/scripts/perception_replayer/utils.py index 7e8e0a18b4b7c..22b4296bdef94 100644 --- a/planning/planning_debug_tools/scripts/perception_replayer/utils.py +++ b/planning/planning_debug_tools/scripts/perception_replayer/utils.py @@ -26,6 +26,11 @@ from tf_transformations import quaternion_from_euler +def get_starting_time(uri: str): + info = rosbag2_py.Info().read_metadata(uri, "sqlite3") + return info.starting_time + + def get_rosbag_options(path, serialization_format="cdr"): storage_options = rosbag2_py.StorageOptions(uri=path, storage_id="sqlite3")