From 821d14a17d8fa05e62c3f545867c58de8fe60f6c Mon Sep 17 00:00:00 2001 From: Go Sakayori Date: Fri, 21 Jun 2024 17:08:24 +0900 Subject: [PATCH 01/29] refactor(static_obstacle_avoidance): change logger name for utils (#7617) change logger name for static avoidance utils Signed-off-by: Go Sakayori --- .../behavior_path_static_obstacle_avoidance_module/utils.hpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/planning/behavior_path_planner/autoware_behavior_path_static_obstacle_avoidance_module/include/autoware/behavior_path_static_obstacle_avoidance_module/utils.hpp b/planning/behavior_path_planner/autoware_behavior_path_static_obstacle_avoidance_module/include/autoware/behavior_path_static_obstacle_avoidance_module/utils.hpp index 1f68ef7d49c47..3797fe0d55147 100644 --- a/planning/behavior_path_planner/autoware_behavior_path_static_obstacle_avoidance_module/include/autoware/behavior_path_static_obstacle_avoidance_module/utils.hpp +++ b/planning/behavior_path_planner/autoware_behavior_path_static_obstacle_avoidance_module/include/autoware/behavior_path_static_obstacle_avoidance_module/utils.hpp @@ -34,7 +34,8 @@ using autoware::behavior_path_planner::utils::path_safety_checker::PoseWithVeloc using autoware::behavior_path_planner::utils::path_safety_checker::PredictedPathWithPolygon; static constexpr const char * logger_namespace = - "planning.scenario_planning.lane_driving.behavior_planning.behavior_path_planner.avoidance.utils"; + "planning.scenario_planning.lane_driving.behavior_planning.behavior_path_planner.static_obstacle_" + "avoidance.utils"; bool isOnRight(const ObjectData & obj); From 84994174e1d9bf08279802c324305c7fc0efd02a Mon Sep 17 00:00:00 2001 From: Zulfaqar Azmi <93502286+zulfaqar-azmi-t4@users.noreply.github.com> Date: Fri, 21 Jun 2024 17:59:05 +0900 Subject: [PATCH 02/29] refactor(lane_change): use lane change namespace for structs (#7508) * refactor(lane_change): use lane change namespace for structs Signed-off-by: Zulfaqar Azmi * Move lane change namespace to bottom level Signed-off-by: Zulfaqar Azmi --------- Signed-off-by: Zulfaqar Azmi --- .../behavior_path_lane_change_module/scene.hpp | 2 +- .../utils/base_class.hpp | 6 +++--- .../utils/data_structs.hpp | 11 ++--------- .../utils/debug_structs.hpp | 4 ++-- .../utils/markers.hpp | 2 +- .../behavior_path_lane_change_module/utils/utils.hpp | 4 ++-- 6 files changed, 11 insertions(+), 18 deletions(-) diff --git a/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/scene.hpp b/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/scene.hpp index 69e9e90a236b9..be62492b7c2cc 100644 --- a/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/scene.hpp +++ b/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/scene.hpp @@ -31,10 +31,10 @@ using autoware::behavior_path_planner::utils::path_safety_checker:: using autoware::behavior_path_planner::utils::path_safety_checker::PoseWithVelocityStamped; using autoware::behavior_path_planner::utils::path_safety_checker::PredictedPathWithPolygon; using autoware::route_handler::Direction; -using data::lane_change::LanesPolygon; using geometry_msgs::msg::Point; using geometry_msgs::msg::Pose; using geometry_msgs::msg::Twist; +using lane_change::LanesPolygon; using tier4_planning_msgs::msg::PathWithLaneId; using utils::path_safety_checker::ExtendedPredictedObjects; diff --git a/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/base_class.hpp b/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/base_class.hpp index 229f46f89377f..e8e83a0703a76 100644 --- a/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/base_class.hpp +++ b/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/base_class.hpp @@ -38,10 +38,10 @@ namespace autoware::behavior_path_planner { using autoware::route_handler::Direction; using autoware::universe_utils::StopWatch; -using data::lane_change::PathSafetyStatus; using geometry_msgs::msg::Point; using geometry_msgs::msg::Pose; using geometry_msgs::msg::Twist; +using lane_change::PathSafetyStatus; using tier4_planning_msgs::msg::PathWithLaneId; class LaneChangeBase @@ -126,7 +126,7 @@ class LaneChangeBase const LaneChangeStatus & getLaneChangeStatus() const { return status_; } - const data::lane_change::Debug & getDebugData() const { return lane_change_debug_; } + const lane_change::Debug & getDebugData() const { return lane_change_debug_; } const Pose & getEgoPose() const { return planner_data_->self_odometry->pose.pose; } @@ -233,7 +233,7 @@ class LaneChangeBase LaneChangeModuleType type_{LaneChangeModuleType::NORMAL}; mutable StopWatch stop_watch_; - mutable data::lane_change::Debug lane_change_debug_; + mutable lane_change::Debug lane_change_debug_; rclcpp::Logger logger_ = utils::lane_change::getLogger(getModuleTypeStr()); mutable rclcpp::Clock clock_{RCL_ROS_TIME}; diff --git a/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/data_structs.hpp b/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/data_structs.hpp index 0facd8c077505..e2afb6abeec03 100644 --- a/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/data_structs.hpp +++ b/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/data_structs.hpp @@ -189,13 +189,6 @@ struct LaneChangeInfo double terminal_lane_changing_velocity{0.0}; }; -struct LaneChangeTargetObjectIndices -{ - std::vector current_lane{}; - std::vector target_lane{}; - std::vector other_lane{}; -}; - struct LaneChangeLanesFilteredObjects { utils::path_safety_checker::ExtendedPredictedObjects current_lane{}; @@ -210,7 +203,7 @@ enum class LaneChangeModuleType { }; } // namespace autoware::behavior_path_planner -namespace autoware::behavior_path_planner::data::lane_change +namespace autoware::behavior_path_planner::lane_change { struct PathSafetyStatus { @@ -224,6 +217,6 @@ struct LanesPolygon std::optional target; std::vector target_backward; }; -} // namespace autoware::behavior_path_planner::data::lane_change +} // namespace autoware::behavior_path_planner::lane_change #endif // AUTOWARE__BEHAVIOR_PATH_LANE_CHANGE_MODULE__UTILS__DATA_STRUCTS_HPP_ diff --git a/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/debug_structs.hpp b/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/debug_structs.hpp index 3b8159dc3ad47..1890b9f308e8e 100644 --- a/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/debug_structs.hpp +++ b/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/debug_structs.hpp @@ -26,7 +26,7 @@ #include #include -namespace autoware::behavior_path_planner::data::lane_change +namespace autoware::behavior_path_planner::lane_change { using utils::path_safety_checker::CollisionCheckDebugMap; struct Debug @@ -71,6 +71,6 @@ struct Debug is_abort = false; } }; -} // namespace autoware::behavior_path_planner::data::lane_change +} // namespace autoware::behavior_path_planner::lane_change #endif // AUTOWARE__BEHAVIOR_PATH_LANE_CHANGE_MODULE__UTILS__DEBUG_STRUCTS_HPP_ diff --git a/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/markers.hpp b/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/markers.hpp index c40db47adf280..91d1f1db15cbc 100644 --- a/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/markers.hpp +++ b/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/markers.hpp @@ -30,7 +30,7 @@ namespace marker_utils::lane_change_markers { using autoware::behavior_path_planner::LaneChangePath; -using autoware::behavior_path_planner::data::lane_change::Debug; +using autoware::behavior_path_planner::lane_change::Debug; using autoware::behavior_path_planner::utils::path_safety_checker::ExtendedPredictedObjects; using visualization_msgs::msg::MarkerArray; MarkerArray showAllValidLaneChangePath( diff --git a/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/utils.hpp b/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/utils.hpp index 4071c39568987..e25b67bdb73d8 100644 --- a/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/utils.hpp +++ b/planning/behavior_path_planner/autoware_behavior_path_lane_change_module/include/autoware/behavior_path_lane_change_module/utils/utils.hpp @@ -48,8 +48,8 @@ using autoware::universe_utils::Polygon2d; using autoware_perception_msgs::msg::PredictedObject; using autoware_perception_msgs::msg::PredictedObjects; using autoware_perception_msgs::msg::PredictedPath; -using data::lane_change::LanesPolygon; -using data::lane_change::PathSafetyStatus; +using behavior_path_planner::lane_change::LanesPolygon; +using behavior_path_planner::lane_change::PathSafetyStatus; using geometry_msgs::msg::Point; using geometry_msgs::msg::Pose; using geometry_msgs::msg::Twist; From a91bfeb0d4f4a33f1e52400099daf764b288d65c Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Fri, 21 Jun 2024 18:10:05 +0900 Subject: [PATCH 03/29] fix(pointcloud_preprocessor): fix arrayIndexThenCheck warning (#7547) * fix(pointcloud_preprocessor): fix arrayIndexThenCheck waarning Signed-off-by: Ryuta Kambe * fix deleted_azimuths's index Signed-off-by: Ryuta Kambe * style(pre-commit): autofix * fix typo Signed-off-by: Ryuta Kambe --------- Signed-off-by: Ryuta Kambe Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .../outlier_filter/dual_return_outlier_filter_nodelet.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sensing/pointcloud_preprocessor/src/outlier_filter/dual_return_outlier_filter_nodelet.cpp b/sensing/pointcloud_preprocessor/src/outlier_filter/dual_return_outlier_filter_nodelet.cpp index ccceae9ab5186..ffbf322e669dc 100644 --- a/sensing/pointcloud_preprocessor/src/outlier_filter/dual_return_outlier_filter_nodelet.cpp +++ b/sensing/pointcloud_preprocessor/src/outlier_filter/dual_return_outlier_filter_nodelet.cpp @@ -231,10 +231,10 @@ void DualReturnOutlierFilterComponent::filter( if (deleted_azimuths.size() == 0) { continue; } - while ((uint)deleted_azimuths[current_deleted_index] < + while (current_deleted_index < deleted_azimuths.size() && + (uint)deleted_azimuths[current_deleted_index] < ((i + static_cast(min_azimuth / horizontal_resolution) + 1) * - horizontal_resolution) && - current_deleted_index < (deleted_azimuths.size() - 1)) { + horizontal_resolution)) { noise_frequency[i] = noise_frequency[i] + 1; current_deleted_index++; } From b91298ba044c20b2cb64bc350bfc1be4350e0383 Mon Sep 17 00:00:00 2001 From: danielsanchezaran Date: Sat, 22 Jun 2024 09:59:15 +0900 Subject: [PATCH 04/29] feat(autonomous_emergency_braking): add cluster min height for aeb (#7605) * add minimum cluster height threshold Signed-off-by: Daniel Sanchez * add update param option Signed-off-by: Daniel Sanchez * use param Signed-off-by: Daniel Sanchez * avoid the float check if cluster_surpasses_threshold_height is already true Signed-off-by: Daniel Sanchez * update README Signed-off-by: Daniel Sanchez * add cluster height description Signed-off-by: Daniel Sanchez --------- Signed-off-by: Daniel Sanchez --- .../README.md | 60 +- .../autonomous_emergency_braking.param.yaml | 1 + .../image/closest-point.drawio.svg | 245 ++++++++ .../image/obstacle_filtering_2.drawio.svg | 575 +++++++----------- .../image/using-predicted-objects.drawio.svg | 114 ++++ .../autonomous_emergency_braking/node.hpp | 1 + .../src/node.cpp | 10 +- 7 files changed, 645 insertions(+), 361 deletions(-) create mode 100644 control/autoware_autonomous_emergency_braking/image/closest-point.drawio.svg create mode 100644 control/autoware_autonomous_emergency_braking/image/using-predicted-objects.drawio.svg diff --git a/control/autoware_autonomous_emergency_braking/README.md b/control/autoware_autonomous_emergency_braking/README.md index 2f3f5f08d4156..003611b83ae88 100644 --- a/control/autoware_autonomous_emergency_braking/README.md +++ b/control/autoware_autonomous_emergency_braking/README.md @@ -12,7 +12,9 @@ This module has following assumptions. - The predicted path of the ego vehicle can be made from either the path created from sensors or the path created from a control module, or both. -- The current speed and angular velocity can be obtained from the sensors of the ego vehicle, and it uses point cloud as obstacles. +- The current speed and angular velocity can be obtained from the sensors of the ego vehicle, and it uses points as obstacles. + +- The AEBs target obstacles are 2D points that can be obtained from the input point cloud or by obtaining the intersection points between the predicted ego footprint path and a predicted object's shape. ### IMU path generation: steering angle vs IMU's angular velocity @@ -40,11 +42,13 @@ AEB has the following steps before it outputs the emergency stop signal. 2. Generate a predicted path of the ego vehicle. -3. Get target obstacles from the input point cloud. +3. Get target obstacles from the input point cloud and/or predicted object data. + +4. Estimate the closest obstacle speed. -4. Collision check with target obstacles. +5. Collision check with target obstacles. -5. Send emergency stop signals to `/diagnostics`. +6. Send emergency stop signals to `/diagnostics`. We give more details of each section below. @@ -58,7 +62,7 @@ We do not activate AEB module if it satisfies the following conditions. ### 2. Generate a predicted path of the ego vehicle -AEB generates a predicted path based on current velocity and current angular velocity obtained from attached sensors. Note that if `use_imu_path` is `false`, it skips this step. This predicted path is generated as: +AEB generates a predicted footprint path based on current velocity and current angular velocity obtained from attached sensors. Note that if `use_imu_path` is `false`, it skips this step. This predicted path is generated as: $$ x_{k+1} = x_k + v cos(\theta_k) dt \\ @@ -68,29 +72,47 @@ $$ where $v$ and $\omega$ are current longitudinal velocity and angular velocity respectively. $dt$ is time interval that users can define in advance. -### 3. Get target obstacles from the input point cloud +On the other hand, if `use_predicted_trajectory` is set to true, the AEB module will use the predicted path from the MPC as a base to generate a footprint path. Both the IMU footprint path and the MPC footprint path can be used at the same time. + +### 3. Get target obstacles + +After generating the ego footprint path(s), the target obstacles are identified. There are two methods to find target obstacles: using the input point cloud, or using the predicted object information coming from perception modules. -After generating the ego predicted path, we select target obstacles from the input point cloud. This obstacle filtering has three major steps, which are rough filtering, noise filtering with clustering and rigorous filtering. +#### Pointcloud obstacle filtering -#### Rough filtering +The AEB module can filter the input pointcloud to find target obstacles with which the ego vehicle might collide. This method can be enable if the `use_pointcloud_data` parameter is set to true. The pointcloud obstacle filtering has three major steps, which are rough filtering, noise filtering with clustering and rigorous filtering. -In rough filtering step, we select target obstacle with simple filter. Create a search area up to a certain distance (default is half of the ego vehicle width plus the `path_footprint_extra_margin` parameter) away from the predicted path of the ego vehicle and ignore the point cloud that are not within it. The image of the rough filtering is illustrated below. +##### Rough filtering + +In rough filtering step, we select target obstacle with simple filter. Create a search area up to a certain distance (default is half of the ego vehicle width plus the `path_footprint_extra_margin` parameter) away from the predicted path of the ego vehicle and ignore the point cloud that are not within it. The rough filtering step is illustrated below. ![rough_filtering](./image/obstacle_filtering_1.drawio.svg) -#### Noise filtering with clustering and convex hulls +##### Noise filtering with clustering and convex hulls -To prevent the AEB from considering noisy points, euclidean clustering is performed on the filtered point cloud. The points in the point cloud that are not close enough to other points to form a cluster are discarded. The parameters `cluster_tolerance`, `minimum_cluster_size` and `maximum_cluster_size` can be used to tune the clustering and the size of objects to be ignored, for more information about the clustering method used by the AEB module, please check the official documentation on euclidean clustering of the PCL library: . +To prevent the AEB from considering noisy points, euclidean clustering is performed on the filtered point cloud. The points in the point cloud that are not close enough to other points to form a cluster are discarded. Furthermore, each point in a cluster is compared against the `cluster_minimum_height` parameter, if no point inside a cluster has a height/z value greater than `cluster_minimum_height`, the whole cluster of points is discarded. The parameters `cluster_tolerance`, `minimum_cluster_size` and `maximum_cluster_size` can be used to tune the clustering and the size of objects to be ignored, for more information about the clustering method used by the AEB module, please check the official documentation on euclidean clustering of the PCL library: . Furthermore, a 2D convex hull is created around each detected cluster, the vertices of each hull represent the most extreme/outside points of the cluster. These vertices are then checked in the next step. -#### Rigorous filtering +##### Rigorous filtering -After Noise filtering, the module performs a geometric collision check to determine whether the filtered obstacles/hull vertices actually have possibility to collide with the ego vehicle. In this check, the ego vehicle is represented as a rectangle, and the point cloud obstacles are represented as points. Only the vertices with a possibility of collision are kept. Finally, the vertex that is closest to the ego vehicle is chosen as the candidate for collision checking: Since rss distance is used to judge if a collision will happen or not, if the closest vertex to the ego is deemed to be safe, the rest of the vertices (and the points in the clusters) will also be safe. +After Noise filtering, the module performs a geometric collision check to determine whether the filtered obstacles/hull vertices actually have possibility to collide with the ego vehicle. In this check, the ego vehicle is represented as a rectangle, and the point cloud obstacles are represented as points. Only the vertices with a possibility of collision are kept. ![rigorous_filtering](./image/obstacle_filtering_2.drawio.svg) -#### Obstacle velocity estimation +#### Using predicted objects to get target obstacles + +If the `use_predicted_object_data` parameter is set to true, the AEB can use predicted object data coming from the perception modules, to get target obstacle points. This is done by obtaining the 2D intersection points between the ego's predicted footprint path and each of the predicted objects enveloping polygon or bounding box. + +![predicted_object_and_path_intersection](./image/using-predicted-objects.drawio.svg) + +### Finding the closest target obstacle + +Once all target obstacles have been identified, the AEB module chooses the point that is closest to the ego vehicle as the candidate for collision checking. Only the closest point is considered because RSS distance is used to judge if a collision will happen or not, and if the closest vertex to the ego is deemed to be safe from collision, the rest of the target obstacles will also be safe. + +![closest_object](./image/closest-point.drawio.svg) + +### 4. Obstacle velocity estimation Once the position of the closest obstacle/point is determined, the AEB modules uses the history of previously detected objects to estimate the closest object relative speed using the following equations: @@ -110,7 +132,9 @@ Where $t_{1}$ and $t_{0}$ are the timestamps of the point clouds used to detect ![relative_speed](./image/object_relative_speed.drawio.svg) -Finally, the velocity vector is compared against the ego's predicted path to get the longitudinal velocity $v_{obj}$: +Note that, when the closest obstacle/point comes from using predicted object data, $v_{norm}$ is calculated by directly computing the norm of the predicted object's velocity in the x and y axes. + +The velocity vector is then compared against the ego's predicted path to get the longitudinal velocity $v_{obj}$: $$ v_{obj} = v_{norm} * Cos(yaw_{diff}) + v_{ego} @@ -120,7 +144,9 @@ where $yaw_{diff}$ is the difference in yaw between the ego path and the displac Note that, the object velocity is calculated against the ego's current movement direction. If the object moves in the opposite direction to the ego's movement, the object velocity is set to 0 m/s. That is because the RSS distance calculation assumes the ego and the object move in the same direction and it cannot deal with negative velocities. -### 4. Collision check with target obstacles using RSS distance +The resulting estimated object speed is added to a queue of speeds with timestamps. The AEB then checks for expiration of past speed estimations and eliminates expired speed measurements from the queue, the object expiration is determined by checking if the time elapsed since the speed was first added to the queue is larger than the parameter `previous_obstacle_keep_time`. Finally, the median speed of the queue is calculated. The median speed will be used to calculate the RSS distance used for collision checking. + +### 5. Collision check with target obstacles using RSS distance In the fourth step, it checks the collision with the closest obstacle point using RSS distance. RSS distance is formulated as: @@ -132,7 +158,7 @@ where $v_{ego}$ and $v_{obj}$ is current ego and obstacle velocity, $a_{min}$ an ![rss_check](./image/rss_check.drawio.svg) -### 5. Send emergency stop signals to `/diagnostics` +### 6. Send emergency stop signals to `/diagnostics` If AEB detects collision with point cloud obstacles in the previous step, it sends emergency signal to `/diagnostics` in this step. Note that in order to enable emergency stop, it has to send ERROR level emergency. Moreover, AEB user should modify the setting file to keep the emergency level, otherwise Autoware does not hold the emergency state. diff --git a/control/autoware_autonomous_emergency_braking/config/autonomous_emergency_braking.param.yaml b/control/autoware_autonomous_emergency_braking/config/autonomous_emergency_braking.param.yaml index 344d076197170..482ddc50766f8 100644 --- a/control/autoware_autonomous_emergency_braking/config/autonomous_emergency_braking.param.yaml +++ b/control/autoware_autonomous_emergency_braking/config/autonomous_emergency_braking.param.yaml @@ -28,6 +28,7 @@ # Point cloud clustering cluster_tolerance: 0.1 #[m] + cluster_minimum_height: 0.0 minimum_cluster_size: 10 maximum_cluster_size: 10000 diff --git a/control/autoware_autonomous_emergency_braking/image/closest-point.drawio.svg b/control/autoware_autonomous_emergency_braking/image/closest-point.drawio.svg new file mode 100644 index 0000000000000..154bba1fc9152 --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/image/closest-point.drawio.svg @@ -0,0 +1,245 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Closest Point +
+
+
+
+ +
+
+
+
+
+ + + + + + + + +
+
+
+ Find the closest target obstacle +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + +
+
+
+
diff --git a/control/autoware_autonomous_emergency_braking/image/obstacle_filtering_2.drawio.svg b/control/autoware_autonomous_emergency_braking/image/obstacle_filtering_2.drawio.svg index 1d12fe233ad8e..137872daebace 100644 --- a/control/autoware_autonomous_emergency_braking/image/obstacle_filtering_2.drawio.svg +++ b/control/autoware_autonomous_emergency_braking/image/obstacle_filtering_2.drawio.svg @@ -6,360 +6,249 @@ xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" width="616px" - height="534px" - viewBox="-0.5 -0.5 616 534" - content="<mxfile host="app.diagrams.net" modified="2024-06-13T07:23:40.787Z" agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36" etag="VWopeiBdF3bKI_lRsSXP" version="24.5.4" type="google" scale="1" border="0"> <diagram name="Page-1" id="nTw8mlOSOdTxmldlJuHK"> <mxGraphModel dx="941" dy="648" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0"> <root> <mxCell id="0" /> <mxCell id="1" parent="0" /> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-1" value="" style="group;opacity=30;" vertex="1" connectable="0" parent="1"> <mxGeometry x="340" y="602" width="420" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-2" value="" style="group;fillColor=#dae8fc;strokeColor=#6c8ebf;container=0;" vertex="1" connectable="0" parent="ty0ni0eQC3Mwg9WxKvAH-1"> <mxGeometry width="420" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-3" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;opacity=40;strokeWidth=5;" vertex="1" parent="ty0ni0eQC3Mwg9WxKvAH-1"> <mxGeometry x="280" width="140" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-4" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;opacity=40;strokeWidth=5;" vertex="1" parent="ty0ni0eQC3Mwg9WxKvAH-1"> <mxGeometry x="140" width="140" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-5" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;opacity=40;strokeWidth=5;" vertex="1" parent="ty0ni0eQC3Mwg9WxKvAH-1"> <mxGeometry width="140" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-6" value="" style="group" vertex="1" connectable="0" parent="1"> <mxGeometry x="340" y="647" width="420" height="85" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-7" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;opacity=40;strokeWidth=5;" vertex="1" parent="ty0ni0eQC3Mwg9WxKvAH-6"> <mxGeometry x="280" width="140" height="60" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-8" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;opacity=40;strokeWidth=5;" vertex="1" parent="ty0ni0eQC3Mwg9WxKvAH-6"> <mxGeometry x="140" width="140" height="60" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-9" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;opacity=40;strokeWidth=5;" vertex="1" parent="ty0ni0eQC3Mwg9WxKvAH-6"> <mxGeometry width="140" height="60" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-10" value="" style="shape=image;verticalLabelPosition=bottom;labelBackgroundColor=default;verticalAlign=top;aspect=fixed;imageAspect=0;image=data:image/svg+xml,PHN2ZyB4bWxuczp4bGluaz0iaHR0cDovL3d3dy53My5vcmcvMTk5OS94bGluayIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB2aWV3Qm94PSIwIDAgMTE5LjQ2IDQwIiBpZD0iTGF5ZXJfMiI+PGRlZnM+PHN0eWxlPi5jbHMtMXtjbGlwLXBhdGg6dXJsKCNjbGlwcGF0aCk7fS5jbHMtMntmaWxsOm5vbmU7fS5jbHMtMiwuY2xzLTN7c3Ryb2tlLXdpZHRoOjBweDt9LmNscy0ze2ZpbGw6IzIzMWYyMDt9PC9zdHlsZT48Y2xpcFBhdGggaWQ9ImNsaXBwYXRoIj48cmVjdCBoZWlnaHQ9IjQwIiB3aWR0aD0iMTE1LjExIiB4PSIuNSIgY2xhc3M9ImNscy0yIi8+PC9jbGlwUGF0aD48L2RlZnM+PGcgaWQ9IlJvYWRzIj48cmVjdCBoZWlnaHQ9IjEiIHdpZHRoPSIzLjUiIHk9IjE5LjUiIHg9Ii41IiBjbGFzcz0iY2xzLTMiLz48cGF0aCBkPSJtMTA0LjksMjAuNWgtNy4yMXYtMWg3LjIxdjFabS0xNC40MSwwaC03LjIxdi0xaDcuMjF2MVptLTE0LjQxLDBoLTcuMjF2LTFoNy4yMXYxWm0tMTQuNDEsMGgtNy4yMXYtMWg3LjIxdjFabS0xNC40MSwwaC03LjIxdi0xaDcuMjF2MVptLTE0LjQxLDBoLTcuMjF2LTFoNy4yMXYxWm0tMTQuNDEsMGgtNy4yMXYtMWg3LjIxdjFaIiBjbGFzcz0iY2xzLTMiLz48cmVjdCBoZWlnaHQ9IjEiIHdpZHRoPSIzLjUiIHk9IjE5LjUiIHg9IjExMi4xMSIgY2xhc3M9ImNscy0zIi8+PGcgY2xhc3M9ImNscy0xIj48cG9seWdvbiBwb2ludHM9IjAgMi41OSAwIDMuNTIgMS4wMiAzLjUyIDEuMDIgMy41OSAxMTguNDYgMy41OSAxMTguNDYgMzYuNDEgMSAzNi40MSAxIDM2LjQgMCAzNi40IDAgMzcuNDEgMTE5LjQ2IDM3LjQxIDExOS40NiAyLjU5IDAgMi41OSIgY2xhc3M9ImNscy0zIi8+PC9nPjwvZz48L3N2Zz4=;" vertex="1" parent="1"> <mxGeometry x="180" y="618" width="595" height="200" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-11" value="" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1"> <mxGeometry relative="1" as="geometry"> <mxPoint x="456.7649999999999" y="677.5" as="targetPoint" /> <mxPoint x="350.0000000000002" y="677.4867690685733" as="sourcePoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-12" value="" style="shape=image;aspect=fixed;image=data:image/svg+xml,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjwhLS0gQ3JlYXRlZCB3aXRoIElua3NjYXBlIChodHRwOi8vd3d3Lmlua3NjYXBlLm9yZy8pIC0tPgoKPHN2ZwogICB3aWR0aD0iNy4zMDU0NjA5bW0iCiAgIGhlaWdodD0iMy4xODI4MzIybW0iCiAgIHZpZXdCb3g9IjAgMCA3LjMwNTQ2MDkgMy4xODI4MzIyIgogICB2ZXJzaW9uPSIxLjEiCiAgIGlkPSJzdmcxIgogICB4bWw6c3BhY2U9InByZXNlcnZlIgogICBpbmtzY2FwZTp2ZXJzaW9uPSIxLjMgKDE6MS4zKzIwMjMwNzIzMTQ1OSswZTE1MGVkNmM0KSIKICAgc29kaXBvZGk6ZG9jbmFtZT0iYmVoYXZpb3JfcGF0aF9wbGFubmVyLnN2ZyIKICAgeG1sbnM6aW5rc2NhcGU9Imh0dHA6Ly93d3cuaW5rc2NhcGUub3JnL25hbWVzcGFjZXMvaW5rc2NhcGUiCiAgIHhtbG5zOnNvZGlwb2RpPSJodHRwOi8vc29kaXBvZGkuc291cmNlZm9yZ2UubmV0L0RURC9zb2RpcG9kaS0wLmR0ZCIKICAgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIgogICB4bWxuczpzdmc9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48c29kaXBvZGk6bmFtZWR2aWV3CiAgICAgaWQ9Im5hbWVkdmlldzEiCiAgICAgcGFnZWNvbG9yPSIjZmZmZmZmIgogICAgIGJvcmRlcmNvbG9yPSIjMDAwMDAwIgogICAgIGJvcmRlcm9wYWNpdHk9IjAuMjUiCiAgICAgaW5rc2NhcGU6c2hvd3BhZ2VzaGFkb3c9IjIiCiAgICAgaW5rc2NhcGU6cGFnZW9wYWNpdHk9IjAuMCIKICAgICBpbmtzY2FwZTpwYWdlY2hlY2tlcmJvYXJkPSIwIgogICAgIGlua3NjYXBlOmRlc2tjb2xvcj0iI2QxZDFkMSIKICAgICBpbmtzY2FwZTpkb2N1bWVudC11bml0cz0ibW0iCiAgICAgaW5rc2NhcGU6em9vbT0iMTkuMTA5NTAxIgogICAgIGlua3NjYXBlOmN4PSIxNzkuODMyMDEiCiAgICAgaW5rc2NhcGU6Y3k9IjQ4MC40NDE2MyIKICAgICBpbmtzY2FwZTp3aW5kb3ctd2lkdGg9IjM3NzAiCiAgICAgaW5rc2NhcGU6d2luZG93LWhlaWdodD0iMjA5NiIKICAgICBpbmtzY2FwZTp3aW5kb3cteD0iNzAiCiAgICAgaW5rc2NhcGU6d2luZG93LXk9IjI3IgogICAgIGlua3NjYXBlOndpbmRvdy1tYXhpbWl6ZWQ9IjEiCiAgICAgaW5rc2NhcGU6Y3VycmVudC1sYXllcj0ibGF5ZXIxIiAvPjxkZWZzCiAgICAgaWQ9ImRlZnMxIiAvPjxnCiAgICAgaW5rc2NhcGU6bGFiZWw9IkxheWVyIDEiCiAgICAgaW5rc2NhcGU6Z3JvdXBtb2RlPSJsYXllciIKICAgICBpZD0ibGF5ZXIxIgogICAgIHRyYW5zZm9ybT0idHJhbnNsYXRlKC01MS4zMjMzMjMsLTEzMi4wNTM0MikiPjxyZWN0CiAgICAgICBzdHlsZT0iZmlsbDojZjhjZWNjO2ZpbGwtb3BhY2l0eToxO3N0cm9rZTojMDAwMDAwO3N0cm9rZS13aWR0aDowLjE1O3N0cm9rZS1kYXNoYXJyYXk6bm9uZSIKICAgICAgIGlkPSJyZWN0MSIKICAgICAgIHdpZHRoPSI3LjE1NTQ2MzciCiAgICAgICBoZWlnaHQ9IjMuMDMyODMxNCIKICAgICAgIHg9IjUxLjM5ODMyMyIKICAgICAgIHk9IjEzMi4xMjg0MiIKICAgICAgIHJ5PSIwLjkzNTU2ODM5IiAvPjxwYXRoCiAgICAgICBpZD0icGF0aDkiCiAgICAgICBzdHlsZT0iZmlsbDojZjhjZWNjO2ZpbGwtb3BhY2l0eToxO3N0cm9rZTojMDAwMDAwO3N0cm9rZS13aWR0aDowLjE1O3N0cm9rZS1kYXNoYXJyYXk6bm9uZSIKICAgICAgIGQ9Im0gNTYuNjM1MjgyLDEzNC43OTk4NCAtMS4xNDEzNjUsLTAuMjQ1NSBtIDEuMTQxMzY1LC0yLjA1ODEgLTEuMTQxMzY1LDAuMjQ1NSBtIC0yLjYzNzE1NiwyLjA4NDY3IDEuMjM4Mzg3LC0wLjI5ODYzIG0gLTEuMjM4Mzg3LC0yLjA1ODExIDEuMjM4Mzg3LDAuMjk4NjQgbSAwLjczOTM5MSwxLjc3MDY4IHYgMC4zNTcyNyBtIDAsLTIuNDkzOTYgdiAwLjM1NzI3IG0gLTAuNzgyMjUxLC0wLjAxNDEgaCAxLjQ1ODQwMSB2IDEuODA3ODkgaCAtMS40NTg0MDEgeiBtIC0wLjg0MDE1NCwtMC4zMzA4MyBoIDIuOTE2Mzk4IGMgMC41ODkwMTksMCAxLjA2MzIxMiwwLjMzOTM5IDEuMDYzMjEyLDAuNzYwOTYgdiAwLjk0NDg4IGMgMCwwLjQyMTU3IC0wLjQ3NDE5MywwLjc2MDk2IC0xLjA2MzIxMiwwLjc2MDk2IGggLTIuOTE2Mzk4IGMgLTAuNTg5MDE5LDAgLTEuMDYzMjEyLC0wLjMzOTM5IC0xLjA2MzIxMiwtMC43NjA5NiB2IC0wLjk0NDg4IGMgMCwtMC40MjE1NyAwLjQ3NDE5MywtMC43NjA5NiAxLjA2MzIxMiwtMC43NjA5NiB6IiAvPjwvZz48L3N2Zz4K;points=[[0,0.25,0,0,0],[0,0.5,0,0,0],[0,0.75,0,0,0],[0.1,0,0,0,0],[0.1,1,0,0,0],[0.25,0,0,0,0],[0.25,1,0,0,0],[0.5,0,0,0,0],[0.5,1,0,0,0],[0.75,0,0,0,0],[0.75,1,0,0,0],[0.89,0,0,0,0],[0.89,1,0,0,0],[1,0.25,0,0,0],[1,0.5,0,0,0],[1,0.75,0,0,0]];imageBackground=none;imageAspect=0;perimeter=none;" vertex="1" parent="1"> <mxGeometry x="466.34" y="655" width="107.33" height="46" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-13" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#60a917;strokeColor=#2D7600;fontColor=#ffffff;" vertex="1" parent="1"> <mxGeometry x="464" y="671" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-14" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#60a917;strokeColor=#2D7600;fontColor=#ffffff;" vertex="1" parent="1"> <mxGeometry x="468.34000000000003" y="685" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-15" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#60a917;strokeColor=#2D7600;fontColor=#ffffff;" vertex="1" parent="1"> <mxGeometry x="480" y="655" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-16" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#e1d5e7;strokeColor=#9673a6;" vertex="1" parent="1"> <mxGeometry x="472" y="665" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-17" value="" style="shape=image;verticalLabelPosition=bottom;labelBackgroundColor=default;verticalAlign=top;aspect=fixed;imageAspect=0;image=data:image/png,iVBORw0KGgoAAAANSUhEUgAAAKwAAABKCAYAAAAmLL9HAAAACXBIWXMAADT9AAA1CgHHClLTAAAJN0lEQVR4nO2dQW7bOBSGfxezLJDMbnbJADWjXdwTVD1B3BNEPcGkJxjNCeKeYJQT1DlBlBPU2QlEgbF33U0MdJ9Z8KmRnkhTylgSJfMDhIC2adHML/LxkXycPD09oQmBEHMAIYAZXUeNvsBz6GwBrOhKMymXTTJP6gg2EOIUQAxgDi9Qz37ZAlgCiDMp17YP7xRsIMQxlFD/2FPhPJ5dfIYS7qPpA0bBBkLMoJR/0k7ZPB4tGwDzTMqV7k2tYAMhIgAL6Lv/vAlPAaxMX+wpEwhRquhMyklfZekTaghnUOMgk4m5BXCVSZnwNyqCJbH+bfiSBYDFribbo8cLtgqZnFd06YT7kYu2JFhSf6rJfAsg8kJ9OV6wZki4CYAL9tYWQFjsxX8KljKtULVZbzIpo7YKeyh4wdoJhEgAXLKXNwBmeWP5S+GNGAMUK/UKIYBj+jsIAiHSvsvQEo8o+1lr98qZlFEgBFAW7QmUyRAD1MKSn/Uflv82k3L+8nK3RyBECCCC9wsPgVsAS90AykQgxBJV8+D3TMr1K0rE7M0tlCCcIhAiCoRYA7iDegq9WN3nAsDfgRBrmiWtQwSlwSIxAOSC5V/klCcgEGIWCLGC8l54v/AwOQHwJRBiSeMlI6S9BXt5DgCTs+l0DuBL4Y0tgFNXBBsIcQXg2vKxeyjvxpqutnkD5Ut8DeA3umycs/TDvgvVAt/p+gFlk36rkafoZzU1Lg9QXiejD59EvUa5F/0wOZtOFyhPvToz0DKMGnPuASRNbKP/WZYQ3m7OJ42STMrU9mGqsxjAO8N3hRbRJij//z9PzqbTlH1hxVnbBzSK1v3Qe6j55rSjcsyguiddWQ6Ze6jZKOtMJwk3QbXFLbmsNPkilCex7l9BNd9Fep9qpSdLJ5BPmZRhh2JdAPhqKMuh8w7AV6qjndD/a4aqGXQC1WKb4FqcTc6mU6cc2oEQMYA/2cvW7mPPZTiGsom53Vkkt5tzv+MYCQt/dz20D1D/H+u4x9Bzfsqk1AqfT7g4JVjqOu7Yyy6JdQNlky1dGZR2BdXLHPoJJqCmaA31axzoc8G+4h/omYSlXRHrFsq2P82kTA5NrIByNdFvPwXwEVU/6TmAtKbLKmIvH0HNZllxRrBkCvAnt5ZRv0cW0LufZi4MRF2B6kJnk56j6j/V5V8BuGEvR3Xu7YRgC8vMitx2KRKaheEutLybW3dVjqFAdRKiKtrLmjNaMUufkEdmJ04IFnrfZq0uYo/wliE3Rw6u+68L1U2IqnlQp5VdQ60zKGIVuiuC5eK86bJVI38fN0fmXqx2qI640E6oTm2kLB3aMvQuWFopxu1G6xO6Z2KWvunK1zsGqK64TRrXyMrHJ6e2DL0LFtWn6qHLgRbZTbx1jbu6/4iIWdpqk2oaBevCJhcFm3Z8f96dPfhBVnOozvgAbO/rqV0Q7ClLpx3fn7cCScf3HxMJS1tH/U1xQbD8R607vv8pS491mrULGtukTfnF/pHWKbmzeohzwAd8Tgu2sMwxxLPNt4HqmWot+2sRXne71mK8CBdaWKdw1ZUVCHFMe53y7UHFAcoJvXZXZ0V/W3RRd16wA6CwxoFvzNNxgRpz+kPFC3YYJGjWvZ5jpINHL1jHIZuVt6wbqNVjEwC/Qq2e2rDPXFDeUeEF6z4RS+fbShLgedkflLeFi5bnHTxesO4TsrQ2fiq9FlvyDh4vWPfh05XGPVCa5Ziji+HgBTswdrmOxuoZKOIF6z4lu9SybI/P3XObdvB4wbpPytKxriUtnEexK+/gcWFq1rObBNXwkyvaA5fbs6bdrEm7Rese38I6Dq0N4FtJTqAiovxLly5I3u0YF6F7wQ6DCM2Cxz1ghD5YwAt2EBQ2+/GWVsctRrx50tuwAyHf7Of48sLW8YIdGCTItOdi9IY3CRiH4Hxviy7qzgXBloIw1In+sWf4YKbr+48JXnd7jzLugmBb3wdkYc3SXrAvp/X9eS4Ids3SYcf35w9M1PH9x0TE0nvfH+eCYFOWDju+P1/9dE7RaDwNMETw2RVd+0W4KNjzLu1Y2qXLF4nEXd1/RMQsvbHtgNbsiLAu1uldsIaIIV1HLoxZ+nKM20vaguqKhyqNa2RtbPP2LliCB3+77LJbpoXP/Onubbv0kKA64l3/pmZs35ClU1sGVwS7xAtijO4Z3qofYcTbpfdBYft549i+1CDxzZVWm9cJwRqOaryoGWN0X2VYohoyMo/bf9pVOYYC1UmK6kDrhurSRszSVpsXcESwAJBJGaPaLS86nki4gj5u/4qOEPXg53GqK+jPg6jTus5QtXmTOvd2RrBExNJ5t9yJaAurorhojwBc04nU0SGaCRQqKT9N/RpVM6DJsUcJe3mLmiagU+d0AYM8WA4Y72KUGYBjOHaw3CPKT8vbHiIIlthxKLLxh7VUjgXKB0d7qnzOpKxjBpgagftMytCQZwZ1dGrO9hU053nWLmpL0Gni95q3rgMh0q58pPSPeGsoy6FzD9W41RFrCL3Nu8HuKN1ciyudYENbAbqAnjo+agdUd3JHwo06KMeKyvKeysPdb4fEFqoO3tMh1daZLDIB7lDdc7aF/aSekKVXk7PpdA7gC/si7bmffUAj0mvLx3J7co1uIni/gXr6XwP4jS4buhG163yn6wdUw/atRp4ZXSHMkWceAES7BE8mxBplc/XD5OnpCYEQ3I79i9xMTkC2TIIWIjp7OucWSqw2b0KM8uB7m0l5nLu1uKP3yiXXDXXLM+jDSnqGwQbAh0xK64F9hqNcl8CzHzZmbx7BwSAMhdOkvT05HG7xfBJ63eWGCap+3hgAJk9Pys1lcOHc0IjdWchcCPHsL3QV7nscq+fhEcreXQFIm46FDC7Nn66zomCP6SbcUHZetEOAO8D7nqBxEYNY8wDOj0BharZwyC3vZi/7PJnEM34KJ+RwsVZcX6W1BORm0DmCLwCsAyG0kfM8npdAQo2h3Fe6E3KuuOvrp0nAviiCWozADV9AqX4J5fdc9T2NOxS8SaCgMUc+7pjDrLEr3SJwrWALX7zECMOOe5xmA2UGaBtC4/JCyjAD8Lmlgnk8nL+gBljGXtvYwhah1eUxzE24x/NSchMzpg2pO6kl2CKBEHMo+yO3RbyAPU3YouynbRS74D+7OeKwBtsqIwAAAABJRU5ErkJggg==;direction=west;imageBackground=default;rotation=0;" vertex="1" parent="1"> <mxGeometry x="235.47000000000003" y="651.5" width="123.19" height="53" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-18" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#e1d5e7;strokeColor=#9673a6;" vertex="1" parent="1"> <mxGeometry x="482" y="673" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-19" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#60a917;strokeColor=#2D7600;fontColor=#ffffff;" vertex="1" parent="1"> <mxGeometry x="478.34000000000003" y="695" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-20" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#60a917;strokeColor=#2D7600;fontColor=#ffffff;" vertex="1" parent="1"> <mxGeometry x="492" y="683" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-21" value="" style="shape=image;aspect=fixed;image=data:image/svg+xml,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjwhLS0gQ3JlYXRlZCB3aXRoIElua3NjYXBlIChodHRwOi8vd3d3Lmlua3NjYXBlLm9yZy8pIC0tPgoKPHN2ZwogICB3aWR0aD0iNy4zMDU0NjA5bW0iCiAgIGhlaWdodD0iMy4xODI4MzIybW0iCiAgIHZpZXdCb3g9IjAgMCA3LjMwNTQ2MDkgMy4xODI4MzIyIgogICB2ZXJzaW9uPSIxLjEiCiAgIGlkPSJzdmcxIgogICB4bWw6c3BhY2U9InByZXNlcnZlIgogICBpbmtzY2FwZTp2ZXJzaW9uPSIxLjMgKDE6MS4zKzIwMjMwNzIzMTQ1OSswZTE1MGVkNmM0KSIKICAgc29kaXBvZGk6ZG9jbmFtZT0iYmVoYXZpb3JfcGF0aF9wbGFubmVyLnN2ZyIKICAgeG1sbnM6aW5rc2NhcGU9Imh0dHA6Ly93d3cuaW5rc2NhcGUub3JnL25hbWVzcGFjZXMvaW5rc2NhcGUiCiAgIHhtbG5zOnNvZGlwb2RpPSJodHRwOi8vc29kaXBvZGkuc291cmNlZm9yZ2UubmV0L0RURC9zb2RpcG9kaS0wLmR0ZCIKICAgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIgogICB4bWxuczpzdmc9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48c29kaXBvZGk6bmFtZWR2aWV3CiAgICAgaWQ9Im5hbWVkdmlldzEiCiAgICAgcGFnZWNvbG9yPSIjZmZmZmZmIgogICAgIGJvcmRlcmNvbG9yPSIjMDAwMDAwIgogICAgIGJvcmRlcm9wYWNpdHk9IjAuMjUiCiAgICAgaW5rc2NhcGU6c2hvd3BhZ2VzaGFkb3c9IjIiCiAgICAgaW5rc2NhcGU6cGFnZW9wYWNpdHk9IjAuMCIKICAgICBpbmtzY2FwZTpwYWdlY2hlY2tlcmJvYXJkPSIwIgogICAgIGlua3NjYXBlOmRlc2tjb2xvcj0iI2QxZDFkMSIKICAgICBpbmtzY2FwZTpkb2N1bWVudC11bml0cz0ibW0iCiAgICAgaW5rc2NhcGU6em9vbT0iMTkuMTA5NTAxIgogICAgIGlua3NjYXBlOmN4PSIxNzkuODMyMDEiCiAgICAgaW5rc2NhcGU6Y3k9IjQ4MC40NDE2MyIKICAgICBpbmtzY2FwZTp3aW5kb3ctd2lkdGg9IjM3NzAiCiAgICAgaW5rc2NhcGU6d2luZG93LWhlaWdodD0iMjA5NiIKICAgICBpbmtzY2FwZTp3aW5kb3cteD0iNzAiCiAgICAgaW5rc2NhcGU6d2luZG93LXk9IjI3IgogICAgIGlua3NjYXBlOndpbmRvdy1tYXhpbWl6ZWQ9IjEiCiAgICAgaW5rc2NhcGU6Y3VycmVudC1sYXllcj0ibGF5ZXIxIiAvPjxkZWZzCiAgICAgaWQ9ImRlZnMxIiAvPjxnCiAgICAgaW5rc2NhcGU6bGFiZWw9IkxheWVyIDEiCiAgICAgaW5rc2NhcGU6Z3JvdXBtb2RlPSJsYXllciIKICAgICBpZD0ibGF5ZXIxIgogICAgIHRyYW5zZm9ybT0idHJhbnNsYXRlKC01MS4zMjMzMjMsLTEzMi4wNTM0MikiPjxyZWN0CiAgICAgICBzdHlsZT0iZmlsbDojZjhjZWNjO2ZpbGwtb3BhY2l0eToxO3N0cm9rZTojMDAwMDAwO3N0cm9rZS13aWR0aDowLjE1O3N0cm9rZS1kYXNoYXJyYXk6bm9uZSIKICAgICAgIGlkPSJyZWN0MSIKICAgICAgIHdpZHRoPSI3LjE1NTQ2MzciCiAgICAgICBoZWlnaHQ9IjMuMDMyODMxNCIKICAgICAgIHg9IjUxLjM5ODMyMyIKICAgICAgIHk9IjEzMi4xMjg0MiIKICAgICAgIHJ5PSIwLjkzNTU2ODM5IiAvPjxwYXRoCiAgICAgICBpZD0icGF0aDkiCiAgICAgICBzdHlsZT0iZmlsbDojZjhjZWNjO2ZpbGwtb3BhY2l0eToxO3N0cm9rZTojMDAwMDAwO3N0cm9rZS13aWR0aDowLjE1O3N0cm9rZS1kYXNoYXJyYXk6bm9uZSIKICAgICAgIGQ9Im0gNTYuNjM1MjgyLDEzNC43OTk4NCAtMS4xNDEzNjUsLTAuMjQ1NSBtIDEuMTQxMzY1LC0yLjA1ODEgLTEuMTQxMzY1LDAuMjQ1NSBtIC0yLjYzNzE1NiwyLjA4NDY3IDEuMjM4Mzg3LC0wLjI5ODYzIG0gLTEuMjM4Mzg3LC0yLjA1ODExIDEuMjM4Mzg3LDAuMjk4NjQgbSAwLjczOTM5MSwxLjc3MDY4IHYgMC4zNTcyNyBtIDAsLTIuNDkzOTYgdiAwLjM1NzI3IG0gLTAuNzgyMjUxLC0wLjAxNDEgaCAxLjQ1ODQwMSB2IDEuODA3ODkgaCAtMS40NTg0MDEgeiBtIC0wLjg0MDE1NCwtMC4zMzA4MyBoIDIuOTE2Mzk4IGMgMC41ODkwMTksMCAxLjA2MzIxMiwwLjMzOTM5IDEuMDYzMjEyLDAuNzYwOTYgdiAwLjk0NDg4IGMgMCwwLjQyMTU3IC0wLjQ3NDE5MywwLjc2MDk2IC0xLjA2MzIxMiwwLjc2MDk2IGggLTIuOTE2Mzk4IGMgLTAuNTg5MDE5LDAgLTEuMDYzMjEyLC0wLjMzOTM5IC0xLjA2MzIxMiwtMC43NjA5NiB2IC0wLjk0NDg4IGMgMCwtMC40MjE1NyAwLjQ3NDE5MywtMC43NjA5NiAxLjA2MzIxMiwtMC43NjA5NiB6IiAvPjwvZz48L3N2Zz4K;points=[[0,0.25,0,0,0],[0,0.5,0,0,0],[0,0.75,0,0,0],[0.1,0,0,0,0],[0.1,1,0,0,0],[0.25,0,0,0,0],[0.25,1,0,0,0],[0.5,0,0,0,0],[0.5,1,0,0,0],[0.75,0,0,0,0],[0.75,1,0,0,0],[0.89,0,0,0,0],[0.89,1,0,0,0],[1,0.25,0,0,0],[1,0.5,0,0,0],[1,0.75,0,0,0]];imageBackground=none;imageAspect=0;perimeter=none;" vertex="1" parent="1"> <mxGeometry x="392.66999999999996" y="782" width="107.33" height="46" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-22" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;" edge="1" parent="1"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="468" y="676" as="sourcePoint" /> <mxPoint x="484" y="659" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-23" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;entryX=0;entryY=0.875;entryDx=0;entryDy=0;entryPerimeter=0;exitX=0.369;exitY=0.304;exitDx=0;exitDy=0;exitPerimeter=0;" edge="1" parent="1" source="ty0ni0eQC3Mwg9WxKvAH-20"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="482" y="683" as="sourcePoint" /> <mxPoint x="484" y="659" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-24" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#e1d5e7;strokeColor=#9673a6;" vertex="1" parent="1"> <mxGeometry x="481" y="684" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-25" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;entryX=0.406;entryY=0.732;entryDx=0;entryDy=0;entryPerimeter=0;" edge="1" parent="1"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="473" y="689" as="sourcePoint" /> <mxPoint x="468.24800000000005" y="675.856" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-26" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;entryX=0.716;entryY=0.513;entryDx=0;entryDy=0;entryPerimeter=0;" edge="1" parent="1"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="482" y="699" as="sourcePoint" /> <mxPoint x="473.068" y="689.104" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-27" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;entryX=0.716;entryY=0.513;entryDx=0;entryDy=0;entryPerimeter=0;exitX=0.366;exitY=0.246;exitDx=0;exitDy=0;exitPerimeter=0;" edge="1" parent="1" source="ty0ni0eQC3Mwg9WxKvAH-20"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="491" y="709" as="sourcePoint" /> <mxPoint x="482.068" y="699.104" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-28" value="" style="group;opacity=30;" vertex="1" connectable="0" parent="1"> <mxGeometry x="334.84" y="872" width="420" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-29" value="" style="group;fillColor=#dae8fc;strokeColor=#6c8ebf;container=0;" vertex="1" connectable="0" parent="ty0ni0eQC3Mwg9WxKvAH-28"> <mxGeometry width="420" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-30" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;opacity=40;strokeWidth=5;" vertex="1" parent="ty0ni0eQC3Mwg9WxKvAH-28"> <mxGeometry x="280" width="140" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-31" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;opacity=40;strokeWidth=5;" vertex="1" parent="ty0ni0eQC3Mwg9WxKvAH-28"> <mxGeometry x="140" width="140" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-32" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;opacity=40;strokeWidth=5;" vertex="1" parent="ty0ni0eQC3Mwg9WxKvAH-28"> <mxGeometry width="140" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-33" value="" style="group" vertex="1" connectable="0" parent="1"> <mxGeometry x="334.84" y="917" width="420" height="85" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-34" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;opacity=40;strokeWidth=5;" vertex="1" parent="ty0ni0eQC3Mwg9WxKvAH-33"> <mxGeometry x="280" width="140" height="60" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-35" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;opacity=40;strokeWidth=5;" vertex="1" parent="ty0ni0eQC3Mwg9WxKvAH-33"> <mxGeometry x="140" width="140" height="60" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-36" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;opacity=40;strokeWidth=5;" vertex="1" parent="ty0ni0eQC3Mwg9WxKvAH-33"> <mxGeometry width="140" height="60" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-37" value="" style="shape=image;verticalLabelPosition=bottom;labelBackgroundColor=default;verticalAlign=top;aspect=fixed;imageAspect=0;image=data:image/svg+xml,PHN2ZyB4bWxuczp4bGluaz0iaHR0cDovL3d3dy53My5vcmcvMTk5OS94bGluayIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB2aWV3Qm94PSIwIDAgMTE5LjQ2IDQwIiBpZD0iTGF5ZXJfMiI+PGRlZnM+PHN0eWxlPi5jbHMtMXtjbGlwLXBhdGg6dXJsKCNjbGlwcGF0aCk7fS5jbHMtMntmaWxsOm5vbmU7fS5jbHMtMiwuY2xzLTN7c3Ryb2tlLXdpZHRoOjBweDt9LmNscy0ze2ZpbGw6IzIzMWYyMDt9PC9zdHlsZT48Y2xpcFBhdGggaWQ9ImNsaXBwYXRoIj48cmVjdCBoZWlnaHQ9IjQwIiB3aWR0aD0iMTE1LjExIiB4PSIuNSIgY2xhc3M9ImNscy0yIi8+PC9jbGlwUGF0aD48L2RlZnM+PGcgaWQ9IlJvYWRzIj48cmVjdCBoZWlnaHQ9IjEiIHdpZHRoPSIzLjUiIHk9IjE5LjUiIHg9Ii41IiBjbGFzcz0iY2xzLTMiLz48cGF0aCBkPSJtMTA0LjksMjAuNWgtNy4yMXYtMWg3LjIxdjFabS0xNC40MSwwaC03LjIxdi0xaDcuMjF2MVptLTE0LjQxLDBoLTcuMjF2LTFoNy4yMXYxWm0tMTQuNDEsMGgtNy4yMXYtMWg3LjIxdjFabS0xNC40MSwwaC03LjIxdi0xaDcuMjF2MVptLTE0LjQxLDBoLTcuMjF2LTFoNy4yMXYxWm0tMTQuNDEsMGgtNy4yMXYtMWg3LjIxdjFaIiBjbGFzcz0iY2xzLTMiLz48cmVjdCBoZWlnaHQ9IjEiIHdpZHRoPSIzLjUiIHk9IjE5LjUiIHg9IjExMi4xMSIgY2xhc3M9ImNscy0zIi8+PGcgY2xhc3M9ImNscy0xIj48cG9seWdvbiBwb2ludHM9IjAgMi41OSAwIDMuNTIgMS4wMiAzLjUyIDEuMDIgMy41OSAxMTguNDYgMy41OSAxMTguNDYgMzYuNDEgMSAzNi40MSAxIDM2LjQgMCAzNi40IDAgMzcuNDEgMTE5LjQ2IDM3LjQxIDExOS40NiAyLjU5IDAgMi41OSIgY2xhc3M9ImNscy0zIi8+PC9nPjwvZz48L3N2Zz4=;" vertex="1" parent="1"> <mxGeometry x="174.84" y="888" width="595" height="200" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-38" value="" style="shape=image;aspect=fixed;image=data:image/svg+xml,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjwhLS0gQ3JlYXRlZCB3aXRoIElua3NjYXBlIChodHRwOi8vd3d3Lmlua3NjYXBlLm9yZy8pIC0tPgoKPHN2ZwogICB3aWR0aD0iNy4zMDU0NjA5bW0iCiAgIGhlaWdodD0iMy4xODI4MzIybW0iCiAgIHZpZXdCb3g9IjAgMCA3LjMwNTQ2MDkgMy4xODI4MzIyIgogICB2ZXJzaW9uPSIxLjEiCiAgIGlkPSJzdmcxIgogICB4bWw6c3BhY2U9InByZXNlcnZlIgogICBpbmtzY2FwZTp2ZXJzaW9uPSIxLjMgKDE6MS4zKzIwMjMwNzIzMTQ1OSswZTE1MGVkNmM0KSIKICAgc29kaXBvZGk6ZG9jbmFtZT0iYmVoYXZpb3JfcGF0aF9wbGFubmVyLnN2ZyIKICAgeG1sbnM6aW5rc2NhcGU9Imh0dHA6Ly93d3cuaW5rc2NhcGUub3JnL25hbWVzcGFjZXMvaW5rc2NhcGUiCiAgIHhtbG5zOnNvZGlwb2RpPSJodHRwOi8vc29kaXBvZGkuc291cmNlZm9yZ2UubmV0L0RURC9zb2RpcG9kaS0wLmR0ZCIKICAgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIgogICB4bWxuczpzdmc9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48c29kaXBvZGk6bmFtZWR2aWV3CiAgICAgaWQ9Im5hbWVkdmlldzEiCiAgICAgcGFnZWNvbG9yPSIjZmZmZmZmIgogICAgIGJvcmRlcmNvbG9yPSIjMDAwMDAwIgogICAgIGJvcmRlcm9wYWNpdHk9IjAuMjUiCiAgICAgaW5rc2NhcGU6c2hvd3BhZ2VzaGFkb3c9IjIiCiAgICAgaW5rc2NhcGU6cGFnZW9wYWNpdHk9IjAuMCIKICAgICBpbmtzY2FwZTpwYWdlY2hlY2tlcmJvYXJkPSIwIgogICAgIGlua3NjYXBlOmRlc2tjb2xvcj0iI2QxZDFkMSIKICAgICBpbmtzY2FwZTpkb2N1bWVudC11bml0cz0ibW0iCiAgICAgaW5rc2NhcGU6em9vbT0iMTkuMTA5NTAxIgogICAgIGlua3NjYXBlOmN4PSIxNzkuODMyMDEiCiAgICAgaW5rc2NhcGU6Y3k9IjQ4MC40NDE2MyIKICAgICBpbmtzY2FwZTp3aW5kb3ctd2lkdGg9IjM3NzAiCiAgICAgaW5rc2NhcGU6d2luZG93LWhlaWdodD0iMjA5NiIKICAgICBpbmtzY2FwZTp3aW5kb3cteD0iNzAiCiAgICAgaW5rc2NhcGU6d2luZG93LXk9IjI3IgogICAgIGlua3NjYXBlOndpbmRvdy1tYXhpbWl6ZWQ9IjEiCiAgICAgaW5rc2NhcGU6Y3VycmVudC1sYXllcj0ibGF5ZXIxIiAvPjxkZWZzCiAgICAgaWQ9ImRlZnMxIiAvPjxnCiAgICAgaW5rc2NhcGU6bGFiZWw9IkxheWVyIDEiCiAgICAgaW5rc2NhcGU6Z3JvdXBtb2RlPSJsYXllciIKICAgICBpZD0ibGF5ZXIxIgogICAgIHRyYW5zZm9ybT0idHJhbnNsYXRlKC01MS4zMjMzMjMsLTEzMi4wNTM0MikiPjxyZWN0CiAgICAgICBzdHlsZT0iZmlsbDojZjhjZWNjO2ZpbGwtb3BhY2l0eToxO3N0cm9rZTojMDAwMDAwO3N0cm9rZS13aWR0aDowLjE1O3N0cm9rZS1kYXNoYXJyYXk6bm9uZSIKICAgICAgIGlkPSJyZWN0MSIKICAgICAgIHdpZHRoPSI3LjE1NTQ2MzciCiAgICAgICBoZWlnaHQ9IjMuMDMyODMxNCIKICAgICAgIHg9IjUxLjM5ODMyMyIKICAgICAgIHk9IjEzMi4xMjg0MiIKICAgICAgIHJ5PSIwLjkzNTU2ODM5IiAvPjxwYXRoCiAgICAgICBpZD0icGF0aDkiCiAgICAgICBzdHlsZT0iZmlsbDojZjhjZWNjO2ZpbGwtb3BhY2l0eToxO3N0cm9rZTojMDAwMDAwO3N0cm9rZS13aWR0aDowLjE1O3N0cm9rZS1kYXNoYXJyYXk6bm9uZSIKICAgICAgIGQ9Im0gNTYuNjM1MjgyLDEzNC43OTk4NCAtMS4xNDEzNjUsLTAuMjQ1NSBtIDEuMTQxMzY1LC0yLjA1ODEgLTEuMTQxMzY1LDAuMjQ1NSBtIC0yLjYzNzE1NiwyLjA4NDY3IDEuMjM4Mzg3LC0wLjI5ODYzIG0gLTEuMjM4Mzg3LC0yLjA1ODExIDEuMjM4Mzg3LDAuMjk4NjQgbSAwLjczOTM5MSwxLjc3MDY4IHYgMC4zNTcyNyBtIDAsLTIuNDkzOTYgdiAwLjM1NzI3IG0gLTAuNzgyMjUxLC0wLjAxNDEgaCAxLjQ1ODQwMSB2IDEuODA3ODkgaCAtMS40NTg0MDEgeiBtIC0wLjg0MDE1NCwtMC4zMzA4MyBoIDIuOTE2Mzk4IGMgMC41ODkwMTksMCAxLjA2MzIxMiwwLjMzOTM5IDEuMDYzMjEyLDAuNzYwOTYgdiAwLjk0NDg4IGMgMCwwLjQyMTU3IC0wLjQ3NDE5MywwLjc2MDk2IC0xLjA2MzIxMiwwLjc2MDk2IGggLTIuOTE2Mzk4IGMgLTAuNTg5MDE5LDAgLTEuMDYzMjEyLC0wLjMzOTM5IC0xLjA2MzIxMiwtMC43NjA5NiB2IC0wLjk0NDg4IGMgMCwtMC40MjE1NyAwLjQ3NDE5MywtMC43NjA5NiAxLjA2MzIxMiwtMC43NjA5NiB6IiAvPjwvZz48L3N2Zz4K;points=[[0,0.25,0,0,0],[0,0.5,0,0,0],[0,0.75,0,0,0],[0.1,0,0,0,0],[0.1,1,0,0,0],[0.25,0,0,0,0],[0.25,1,0,0,0],[0.5,0,0,0,0],[0.5,1,0,0,0],[0.75,0,0,0,0],[0.75,1,0,0,0],[0.89,0,0,0,0],[0.89,1,0,0,0],[1,0.25,0,0,0],[1,0.5,0,0,0],[1,0.75,0,0,0]];imageBackground=none;imageAspect=0;perimeter=none;" vertex="1" parent="1"> <mxGeometry x="461.17999999999995" y="925" width="107.33" height="46" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-39" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#e3c800;strokeColor=#B09500;fontColor=#000000;" vertex="1" parent="1"> <mxGeometry x="458.84" y="941" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-40" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#60a917;strokeColor=#2D7600;fontColor=#ffffff;" vertex="1" parent="1"> <mxGeometry x="463.18" y="955" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-41" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#60a917;strokeColor=#2D7600;fontColor=#ffffff;" vertex="1" parent="1"> <mxGeometry x="474.84" y="925" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-42" value="" style="shape=image;verticalLabelPosition=bottom;labelBackgroundColor=default;verticalAlign=top;aspect=fixed;imageAspect=0;image=data:image/png,iVBORw0KGgoAAAANSUhEUgAAAKwAAABKCAYAAAAmLL9HAAAACXBIWXMAADT9AAA1CgHHClLTAAAJN0lEQVR4nO2dQW7bOBSGfxezLJDMbnbJADWjXdwTVD1B3BNEPcGkJxjNCeKeYJQT1DlBlBPU2QlEgbF33U0MdJ9Z8KmRnkhTylgSJfMDhIC2adHML/LxkXycPD09oQmBEHMAIYAZXUeNvsBz6GwBrOhKMymXTTJP6gg2EOIUQAxgDi9Qz37ZAlgCiDMp17YP7xRsIMQxlFD/2FPhPJ5dfIYS7qPpA0bBBkLMoJR/0k7ZPB4tGwDzTMqV7k2tYAMhIgAL6Lv/vAlPAaxMX+wpEwhRquhMyklfZekTaghnUOMgk4m5BXCVSZnwNyqCJbH+bfiSBYDFribbo8cLtgqZnFd06YT7kYu2JFhSf6rJfAsg8kJ9OV6wZki4CYAL9tYWQFjsxX8KljKtULVZbzIpo7YKeyh4wdoJhEgAXLKXNwBmeWP5S+GNGAMUK/UKIYBj+jsIAiHSvsvQEo8o+1lr98qZlFEgBFAW7QmUyRAD1MKSn/Uflv82k3L+8nK3RyBECCCC9wsPgVsAS90AykQgxBJV8+D3TMr1K0rE7M0tlCCcIhAiCoRYA7iDegq9WN3nAsDfgRBrmiWtQwSlwSIxAOSC5V/klCcgEGIWCLGC8l54v/AwOQHwJRBiSeMlI6S9BXt5DgCTs+l0DuBL4Y0tgFNXBBsIcQXg2vKxeyjvxpqutnkD5Ut8DeA3umycs/TDvgvVAt/p+gFlk36rkafoZzU1Lg9QXiejD59EvUa5F/0wOZtOFyhPvToz0DKMGnPuASRNbKP/WZYQ3m7OJ42STMrU9mGqsxjAO8N3hRbRJij//z9PzqbTlH1hxVnbBzSK1v3Qe6j55rSjcsyguiddWQ6Ze6jZKOtMJwk3QbXFLbmsNPkilCex7l9BNd9Fep9qpSdLJ5BPmZRhh2JdAPhqKMuh8w7AV6qjndD/a4aqGXQC1WKb4FqcTc6mU6cc2oEQMYA/2cvW7mPPZTiGsom53Vkkt5tzv+MYCQt/dz20D1D/H+u4x9Bzfsqk1AqfT7g4JVjqOu7Yyy6JdQNlky1dGZR2BdXLHPoJJqCmaA31axzoc8G+4h/omYSlXRHrFsq2P82kTA5NrIByNdFvPwXwEVU/6TmAtKbLKmIvH0HNZllxRrBkCvAnt5ZRv0cW0LufZi4MRF2B6kJnk56j6j/V5V8BuGEvR3Xu7YRgC8vMitx2KRKaheEutLybW3dVjqFAdRKiKtrLmjNaMUufkEdmJ04IFnrfZq0uYo/wliE3Rw6u+68L1U2IqnlQp5VdQ60zKGIVuiuC5eK86bJVI38fN0fmXqx2qI640E6oTm2kLB3aMvQuWFopxu1G6xO6Z2KWvunK1zsGqK64TRrXyMrHJ6e2DL0LFtWn6qHLgRbZTbx1jbu6/4iIWdpqk2oaBevCJhcFm3Z8f96dPfhBVnOozvgAbO/rqV0Q7ClLpx3fn7cCScf3HxMJS1tH/U1xQbD8R607vv8pS491mrULGtukTfnF/pHWKbmzeohzwAd8Tgu2sMwxxLPNt4HqmWot+2sRXne71mK8CBdaWKdw1ZUVCHFMe53y7UHFAcoJvXZXZ0V/W3RRd16wA6CwxoFvzNNxgRpz+kPFC3YYJGjWvZ5jpINHL1jHIZuVt6wbqNVjEwC/Qq2e2rDPXFDeUeEF6z4RS+fbShLgedkflLeFi5bnHTxesO4TsrQ2fiq9FlvyDh4vWPfh05XGPVCa5Ziji+HgBTswdrmOxuoZKOIF6z4lu9SybI/P3XObdvB4wbpPytKxriUtnEexK+/gcWFq1rObBNXwkyvaA5fbs6bdrEm7Rese38I6Dq0N4FtJTqAiovxLly5I3u0YF6F7wQ6DCM2Cxz1ghD5YwAt2EBQ2+/GWVsctRrx50tuwAyHf7Of48sLW8YIdGCTItOdi9IY3CRiH4Hxviy7qzgXBloIw1In+sWf4YKbr+48JXnd7jzLugmBb3wdkYc3SXrAvp/X9eS4Ids3SYcf35w9M1PH9x0TE0nvfH+eCYFOWDju+P1/9dE7RaDwNMETw2RVd+0W4KNjzLu1Y2qXLF4nEXd1/RMQsvbHtgNbsiLAu1uldsIaIIV1HLoxZ+nKM20vaguqKhyqNa2RtbPP2LliCB3+77LJbpoXP/Onubbv0kKA64l3/pmZs35ClU1sGVwS7xAtijO4Z3qofYcTbpfdBYft549i+1CDxzZVWm9cJwRqOaryoGWN0X2VYohoyMo/bf9pVOYYC1UmK6kDrhurSRszSVpsXcESwAJBJGaPaLS86nki4gj5u/4qOEPXg53GqK+jPg6jTus5QtXmTOvd2RrBExNJ5t9yJaAurorhojwBc04nU0SGaCRQqKT9N/RpVM6DJsUcJe3mLmiagU+d0AYM8WA4Y72KUGYBjOHaw3CPKT8vbHiIIlthxKLLxh7VUjgXKB0d7qnzOpKxjBpgagftMytCQZwZ1dGrO9hU053nWLmpL0Gni95q3rgMh0q58pPSPeGsoy6FzD9W41RFrCL3Nu8HuKN1ciyudYENbAbqAnjo+agdUd3JHwo06KMeKyvKeysPdb4fEFqoO3tMh1daZLDIB7lDdc7aF/aSekKVXk7PpdA7gC/si7bmffUAj0mvLx3J7co1uIni/gXr6XwP4jS4buhG163yn6wdUw/atRp4ZXSHMkWceAES7BE8mxBplc/XD5OnpCYEQ3I79i9xMTkC2TIIWIjp7OucWSqw2b0KM8uB7m0l5nLu1uKP3yiXXDXXLM+jDSnqGwQbAh0xK64F9hqNcl8CzHzZmbx7BwSAMhdOkvT05HG7xfBJ63eWGCap+3hgAJk9Pys1lcOHc0IjdWchcCPHsL3QV7nscq+fhEcreXQFIm46FDC7Nn66zomCP6SbcUHZetEOAO8D7nqBxEYNY8wDOj0BharZwyC3vZi/7PJnEM34KJ+RwsVZcX6W1BORm0DmCLwCsAyG0kfM8npdAQo2h3Fe6E3KuuOvrp0nAviiCWozADV9AqX4J5fdc9T2NOxS8SaCgMUc+7pjDrLEr3SJwrWALX7zECMOOe5xmA2UGaBtC4/JCyjAD8Lmlgnk8nL+gBljGXtvYwhah1eUxzE24x/NSchMzpg2pO6kl2CKBEHMo+yO3RbyAPU3YouynbRS74D+7OeKwBtsqIwAAAABJRU5ErkJggg==;direction=west;imageBackground=default;rotation=0;" vertex="1" parent="1"> <mxGeometry x="230.31000000000003" y="921.5" width="123.19" height="53" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-43" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#60a917;strokeColor=#2D7600;fontColor=#ffffff;" vertex="1" parent="1"> <mxGeometry x="473.18" y="965" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-44" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#60a917;strokeColor=#2D7600;fontColor=#ffffff;" vertex="1" parent="1"> <mxGeometry x="486.84" y="953" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-45" value="" style="shape=image;aspect=fixed;image=data:image/svg+xml,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjwhLS0gQ3JlYXRlZCB3aXRoIElua3NjYXBlIChodHRwOi8vd3d3Lmlua3NjYXBlLm9yZy8pIC0tPgoKPHN2ZwogICB3aWR0aD0iNy4zMDU0NjA5bW0iCiAgIGhlaWdodD0iMy4xODI4MzIybW0iCiAgIHZpZXdCb3g9IjAgMCA3LjMwNTQ2MDkgMy4xODI4MzIyIgogICB2ZXJzaW9uPSIxLjEiCiAgIGlkPSJzdmcxIgogICB4bWw6c3BhY2U9InByZXNlcnZlIgogICBpbmtzY2FwZTp2ZXJzaW9uPSIxLjMgKDE6MS4zKzIwMjMwNzIzMTQ1OSswZTE1MGVkNmM0KSIKICAgc29kaXBvZGk6ZG9jbmFtZT0iYmVoYXZpb3JfcGF0aF9wbGFubmVyLnN2ZyIKICAgeG1sbnM6aW5rc2NhcGU9Imh0dHA6Ly93d3cuaW5rc2NhcGUub3JnL25hbWVzcGFjZXMvaW5rc2NhcGUiCiAgIHhtbG5zOnNvZGlwb2RpPSJodHRwOi8vc29kaXBvZGkuc291cmNlZm9yZ2UubmV0L0RURC9zb2RpcG9kaS0wLmR0ZCIKICAgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIgogICB4bWxuczpzdmc9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48c29kaXBvZGk6bmFtZWR2aWV3CiAgICAgaWQ9Im5hbWVkdmlldzEiCiAgICAgcGFnZWNvbG9yPSIjZmZmZmZmIgogICAgIGJvcmRlcmNvbG9yPSIjMDAwMDAwIgogICAgIGJvcmRlcm9wYWNpdHk9IjAuMjUiCiAgICAgaW5rc2NhcGU6c2hvd3BhZ2VzaGFkb3c9IjIiCiAgICAgaW5rc2NhcGU6cGFnZW9wYWNpdHk9IjAuMCIKICAgICBpbmtzY2FwZTpwYWdlY2hlY2tlcmJvYXJkPSIwIgogICAgIGlua3NjYXBlOmRlc2tjb2xvcj0iI2QxZDFkMSIKICAgICBpbmtzY2FwZTpkb2N1bWVudC11bml0cz0ibW0iCiAgICAgaW5rc2NhcGU6em9vbT0iMTkuMTA5NTAxIgogICAgIGlua3NjYXBlOmN4PSIxNzkuODMyMDEiCiAgICAgaW5rc2NhcGU6Y3k9IjQ4MC40NDE2MyIKICAgICBpbmtzY2FwZTp3aW5kb3ctd2lkdGg9IjM3NzAiCiAgICAgaW5rc2NhcGU6d2luZG93LWhlaWdodD0iMjA5NiIKICAgICBpbmtzY2FwZTp3aW5kb3cteD0iNzAiCiAgICAgaW5rc2NhcGU6d2luZG93LXk9IjI3IgogICAgIGlua3NjYXBlOndpbmRvdy1tYXhpbWl6ZWQ9IjEiCiAgICAgaW5rc2NhcGU6Y3VycmVudC1sYXllcj0ibGF5ZXIxIiAvPjxkZWZzCiAgICAgaWQ9ImRlZnMxIiAvPjxnCiAgICAgaW5rc2NhcGU6bGFiZWw9IkxheWVyIDEiCiAgICAgaW5rc2NhcGU6Z3JvdXBtb2RlPSJsYXllciIKICAgICBpZD0ibGF5ZXIxIgogICAgIHRyYW5zZm9ybT0idHJhbnNsYXRlKC01MS4zMjMzMjMsLTEzMi4wNTM0MikiPjxyZWN0CiAgICAgICBzdHlsZT0iZmlsbDojZjhjZWNjO2ZpbGwtb3BhY2l0eToxO3N0cm9rZTojMDAwMDAwO3N0cm9rZS13aWR0aDowLjE1O3N0cm9rZS1kYXNoYXJyYXk6bm9uZSIKICAgICAgIGlkPSJyZWN0MSIKICAgICAgIHdpZHRoPSI3LjE1NTQ2MzciCiAgICAgICBoZWlnaHQ9IjMuMDMyODMxNCIKICAgICAgIHg9IjUxLjM5ODMyMyIKICAgICAgIHk9IjEzMi4xMjg0MiIKICAgICAgIHJ5PSIwLjkzNTU2ODM5IiAvPjxwYXRoCiAgICAgICBpZD0icGF0aDkiCiAgICAgICBzdHlsZT0iZmlsbDojZjhjZWNjO2ZpbGwtb3BhY2l0eToxO3N0cm9rZTojMDAwMDAwO3N0cm9rZS13aWR0aDowLjE1O3N0cm9rZS1kYXNoYXJyYXk6bm9uZSIKICAgICAgIGQ9Im0gNTYuNjM1MjgyLDEzNC43OTk4NCAtMS4xNDEzNjUsLTAuMjQ1NSBtIDEuMTQxMzY1LC0yLjA1ODEgLTEuMTQxMzY1LDAuMjQ1NSBtIC0yLjYzNzE1NiwyLjA4NDY3IDEuMjM4Mzg3LC0wLjI5ODYzIG0gLTEuMjM4Mzg3LC0yLjA1ODExIDEuMjM4Mzg3LDAuMjk4NjQgbSAwLjczOTM5MSwxLjc3MDY4IHYgMC4zNTcyNyBtIDAsLTIuNDkzOTYgdiAwLjM1NzI3IG0gLTAuNzgyMjUxLC0wLjAxNDEgaCAxLjQ1ODQwMSB2IDEuODA3ODkgaCAtMS40NTg0MDEgeiBtIC0wLjg0MDE1NCwtMC4zMzA4MyBoIDIuOTE2Mzk4IGMgMC41ODkwMTksMCAxLjA2MzIxMiwwLjMzOTM5IDEuMDYzMjEyLDAuNzYwOTYgdiAwLjk0NDg4IGMgMCwwLjQyMTU3IC0wLjQ3NDE5MywwLjc2MDk2IC0xLjA2MzIxMiwwLjc2MDk2IGggLTIuOTE2Mzk4IGMgLTAuNTg5MDE5LDAgLTEuMDYzMjEyLC0wLjMzOTM5IC0xLjA2MzIxMiwtMC43NjA5NiB2IC0wLjk0NDg4IGMgMCwtMC40MjE1NyAwLjQ3NDE5MywtMC43NjA5NiAxLjA2MzIxMiwtMC43NjA5NiB6IiAvPjwvZz48L3N2Zz4K;points=[[0,0.25,0,0,0],[0,0.5,0,0,0],[0,0.75,0,0,0],[0.1,0,0,0,0],[0.1,1,0,0,0],[0.25,0,0,0,0],[0.25,1,0,0,0],[0.5,0,0,0,0],[0.5,1,0,0,0],[0.75,0,0,0,0],[0.75,1,0,0,0],[0.89,0,0,0,0],[0.89,1,0,0,0],[1,0.25,0,0,0],[1,0.5,0,0,0],[1,0.75,0,0,0]];imageBackground=none;imageAspect=0;perimeter=none;" vertex="1" parent="1"> <mxGeometry x="387.50999999999993" y="1052" width="107.33" height="46" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-46" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;" edge="1" parent="1"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="462.84" y="946" as="sourcePoint" /> <mxPoint x="478.84" y="929" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-47" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;entryX=0;entryY=0.875;entryDx=0;entryDy=0;entryPerimeter=0;exitX=0.369;exitY=0.304;exitDx=0;exitDy=0;exitPerimeter=0;" edge="1" parent="1" source="ty0ni0eQC3Mwg9WxKvAH-44"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="476.84" y="953" as="sourcePoint" /> <mxPoint x="478.84" y="929" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-48" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;entryX=0.406;entryY=0.732;entryDx=0;entryDy=0;entryPerimeter=0;" edge="1" parent="1"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="467.84" y="959" as="sourcePoint" /> <mxPoint x="463.088" y="945.856" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-49" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;entryX=0.716;entryY=0.513;entryDx=0;entryDy=0;entryPerimeter=0;" edge="1" parent="1"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="476.84" y="969" as="sourcePoint" /> <mxPoint x="467.90799999999996" y="959.104" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-50" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;entryX=0.716;entryY=0.513;entryDx=0;entryDy=0;entryPerimeter=0;exitX=0.366;exitY=0.246;exitDx=0;exitDy=0;exitPerimeter=0;" edge="1" parent="1" source="ty0ni0eQC3Mwg9WxKvAH-44"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="485.84" y="979" as="sourcePoint" /> <mxPoint x="476.90799999999996" y="969.104" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-51" value="" style="endArrow=classic;startArrow=classic;html=1;rounded=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;exitX=0.008;exitY=0.543;exitDx=0;exitDy=0;exitPerimeter=0;" edge="1" parent="1" source="ty0ni0eQC3Mwg9WxKvAH-42" target="ty0ni0eQC3Mwg9WxKvAH-39"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="353.5" y="949" as="sourcePoint" /> <mxPoint x="403.5" y="899" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-52" value="&lt;font style=&quot;font-size: 14px;&quot;&gt;Closest Point&lt;/font&gt;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=none;fontColor=#000000;" vertex="1" connectable="0" parent="ty0ni0eQC3Mwg9WxKvAH-51"> <mxGeometry x="-0.084" y="1" relative="1" as="geometry"> <mxPoint x="3" y="-7" as="offset" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-53" value="Filter out points outside clusters and get a 2D convex hull of each cluster" style="text;html=1;align=center;verticalAlign=middle;resizable=0;points=[];autosize=1;strokeColor=none;fillColor=none;fontSize=18;" vertex="1" parent="1"> <mxGeometry x="159" y="564" width="600" height="40" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-54" value="Find the closest vertex of the convex hull(s)" style="text;html=1;align=center;verticalAlign=middle;resizable=0;points=[];autosize=1;strokeColor=none;fillColor=none;fontSize=18;" vertex="1" parent="1"> <mxGeometry x="301" y="832" width="370" height="40" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-55" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#e1d5e7;strokeColor=#9673a6;" vertex="1" parent="1"> <mxGeometry x="535" y="673" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-56" value="" style="verticalLabelPosition=bottom;verticalAlign=top;html=1;shape=mxgraph.basic.x;fillColor=#FF0000;opacity=40;" vertex="1" parent="1"> <mxGeometry x="529" y="668" width="20" height="19" as="geometry" /> </mxCell> </root> </mxGraphModel> </diagram> </mxfile> " + height="265px" + viewBox="-0.5 -0.5 616 265" + content="<mxfile host="app.diagrams.net" modified="2024-06-21T02:42:01.489Z" agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36" etag="3pgNd0JrTgvgM2V0BwGE" version="24.6.0" type="google" scale="1" border="0"> <diagram name="Page-1" id="nTw8mlOSOdTxmldlJuHK"> <mxGraphModel dx="1137" dy="783" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0"> <root> <mxCell id="0" /> <mxCell id="1" parent="0" /> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-1" value="" style="group;opacity=30;" parent="1" connectable="0" vertex="1"> <mxGeometry x="340" y="602" width="420" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-2" value="" style="group;fillColor=#dae8fc;strokeColor=#6c8ebf;container=0;" parent="ty0ni0eQC3Mwg9WxKvAH-1" connectable="0" vertex="1"> <mxGeometry width="420" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-3" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;opacity=40;strokeWidth=5;" parent="ty0ni0eQC3Mwg9WxKvAH-1" vertex="1"> <mxGeometry x="280" width="140" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-4" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;opacity=40;strokeWidth=5;" parent="ty0ni0eQC3Mwg9WxKvAH-1" vertex="1"> <mxGeometry x="140" width="140" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-5" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;opacity=40;strokeWidth=5;" parent="ty0ni0eQC3Mwg9WxKvAH-1" vertex="1"> <mxGeometry width="140" height="155" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-6" value="" style="group" parent="1" connectable="0" vertex="1"> <mxGeometry x="340" y="647" width="420" height="85" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-7" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;opacity=40;strokeWidth=5;" parent="ty0ni0eQC3Mwg9WxKvAH-6" vertex="1"> <mxGeometry x="280" width="140" height="60" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-8" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;opacity=40;strokeWidth=5;" parent="ty0ni0eQC3Mwg9WxKvAH-6" vertex="1"> <mxGeometry x="140" width="140" height="60" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-9" value="" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;opacity=40;strokeWidth=5;" parent="ty0ni0eQC3Mwg9WxKvAH-6" vertex="1"> <mxGeometry width="140" height="60" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-10" value="" style="shape=image;verticalLabelPosition=bottom;labelBackgroundColor=default;verticalAlign=top;aspect=fixed;imageAspect=0;image=data:image/svg+xml,PHN2ZyB4bWxuczp4bGluaz0iaHR0cDovL3d3dy53My5vcmcvMTk5OS94bGluayIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB2aWV3Qm94PSIwIDAgMTE5LjQ2IDQwIiBpZD0iTGF5ZXJfMiI+PGRlZnM+PHN0eWxlPi5jbHMtMXtjbGlwLXBhdGg6dXJsKCNjbGlwcGF0aCk7fS5jbHMtMntmaWxsOm5vbmU7fS5jbHMtMiwuY2xzLTN7c3Ryb2tlLXdpZHRoOjBweDt9LmNscy0ze2ZpbGw6IzIzMWYyMDt9PC9zdHlsZT48Y2xpcFBhdGggaWQ9ImNsaXBwYXRoIj48cmVjdCBoZWlnaHQ9IjQwIiB3aWR0aD0iMTE1LjExIiB4PSIuNSIgY2xhc3M9ImNscy0yIi8+PC9jbGlwUGF0aD48L2RlZnM+PGcgaWQ9IlJvYWRzIj48cmVjdCBoZWlnaHQ9IjEiIHdpZHRoPSIzLjUiIHk9IjE5LjUiIHg9Ii41IiBjbGFzcz0iY2xzLTMiLz48cGF0aCBkPSJtMTA0LjksMjAuNWgtNy4yMXYtMWg3LjIxdjFabS0xNC40MSwwaC03LjIxdi0xaDcuMjF2MVptLTE0LjQxLDBoLTcuMjF2LTFoNy4yMXYxWm0tMTQuNDEsMGgtNy4yMXYtMWg3LjIxdjFabS0xNC40MSwwaC03LjIxdi0xaDcuMjF2MVptLTE0LjQxLDBoLTcuMjF2LTFoNy4yMXYxWm0tMTQuNDEsMGgtNy4yMXYtMWg3LjIxdjFaIiBjbGFzcz0iY2xzLTMiLz48cmVjdCBoZWlnaHQ9IjEiIHdpZHRoPSIzLjUiIHk9IjE5LjUiIHg9IjExMi4xMSIgY2xhc3M9ImNscy0zIi8+PGcgY2xhc3M9ImNscy0xIj48cG9seWdvbiBwb2ludHM9IjAgMi41OSAwIDMuNTIgMS4wMiAzLjUyIDEuMDIgMy41OSAxMTguNDYgMy41OSAxMTguNDYgMzYuNDEgMSAzNi40MSAxIDM2LjQgMCAzNi40IDAgMzcuNDEgMTE5LjQ2IDM3LjQxIDExOS40NiAyLjU5IDAgMi41OSIgY2xhc3M9ImNscy0zIi8+PC9nPjwvZz48L3N2Zz4=;" parent="1" vertex="1"> <mxGeometry x="180" y="618" width="595" height="200" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-11" value="" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" parent="1" edge="1"> <mxGeometry relative="1" as="geometry"> <mxPoint x="456.7649999999999" y="677.5" as="targetPoint" /> <mxPoint x="350.0000000000002" y="677.4867690685733" as="sourcePoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-12" value="" style="shape=image;aspect=fixed;image=data:image/svg+xml,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjwhLS0gQ3JlYXRlZCB3aXRoIElua3NjYXBlIChodHRwOi8vd3d3Lmlua3NjYXBlLm9yZy8pIC0tPgoKPHN2ZwogICB3aWR0aD0iNy4zMDU0NjA5bW0iCiAgIGhlaWdodD0iMy4xODI4MzIybW0iCiAgIHZpZXdCb3g9IjAgMCA3LjMwNTQ2MDkgMy4xODI4MzIyIgogICB2ZXJzaW9uPSIxLjEiCiAgIGlkPSJzdmcxIgogICB4bWw6c3BhY2U9InByZXNlcnZlIgogICBpbmtzY2FwZTp2ZXJzaW9uPSIxLjMgKDE6MS4zKzIwMjMwNzIzMTQ1OSswZTE1MGVkNmM0KSIKICAgc29kaXBvZGk6ZG9jbmFtZT0iYmVoYXZpb3JfcGF0aF9wbGFubmVyLnN2ZyIKICAgeG1sbnM6aW5rc2NhcGU9Imh0dHA6Ly93d3cuaW5rc2NhcGUub3JnL25hbWVzcGFjZXMvaW5rc2NhcGUiCiAgIHhtbG5zOnNvZGlwb2RpPSJodHRwOi8vc29kaXBvZGkuc291cmNlZm9yZ2UubmV0L0RURC9zb2RpcG9kaS0wLmR0ZCIKICAgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIgogICB4bWxuczpzdmc9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48c29kaXBvZGk6bmFtZWR2aWV3CiAgICAgaWQ9Im5hbWVkdmlldzEiCiAgICAgcGFnZWNvbG9yPSIjZmZmZmZmIgogICAgIGJvcmRlcmNvbG9yPSIjMDAwMDAwIgogICAgIGJvcmRlcm9wYWNpdHk9IjAuMjUiCiAgICAgaW5rc2NhcGU6c2hvd3BhZ2VzaGFkb3c9IjIiCiAgICAgaW5rc2NhcGU6cGFnZW9wYWNpdHk9IjAuMCIKICAgICBpbmtzY2FwZTpwYWdlY2hlY2tlcmJvYXJkPSIwIgogICAgIGlua3NjYXBlOmRlc2tjb2xvcj0iI2QxZDFkMSIKICAgICBpbmtzY2FwZTpkb2N1bWVudC11bml0cz0ibW0iCiAgICAgaW5rc2NhcGU6em9vbT0iMTkuMTA5NTAxIgogICAgIGlua3NjYXBlOmN4PSIxNzkuODMyMDEiCiAgICAgaW5rc2NhcGU6Y3k9IjQ4MC40NDE2MyIKICAgICBpbmtzY2FwZTp3aW5kb3ctd2lkdGg9IjM3NzAiCiAgICAgaW5rc2NhcGU6d2luZG93LWhlaWdodD0iMjA5NiIKICAgICBpbmtzY2FwZTp3aW5kb3cteD0iNzAiCiAgICAgaW5rc2NhcGU6d2luZG93LXk9IjI3IgogICAgIGlua3NjYXBlOndpbmRvdy1tYXhpbWl6ZWQ9IjEiCiAgICAgaW5rc2NhcGU6Y3VycmVudC1sYXllcj0ibGF5ZXIxIiAvPjxkZWZzCiAgICAgaWQ9ImRlZnMxIiAvPjxnCiAgICAgaW5rc2NhcGU6bGFiZWw9IkxheWVyIDEiCiAgICAgaW5rc2NhcGU6Z3JvdXBtb2RlPSJsYXllciIKICAgICBpZD0ibGF5ZXIxIgogICAgIHRyYW5zZm9ybT0idHJhbnNsYXRlKC01MS4zMjMzMjMsLTEzMi4wNTM0MikiPjxyZWN0CiAgICAgICBzdHlsZT0iZmlsbDojZjhjZWNjO2ZpbGwtb3BhY2l0eToxO3N0cm9rZTojMDAwMDAwO3N0cm9rZS13aWR0aDowLjE1O3N0cm9rZS1kYXNoYXJyYXk6bm9uZSIKICAgICAgIGlkPSJyZWN0MSIKICAgICAgIHdpZHRoPSI3LjE1NTQ2MzciCiAgICAgICBoZWlnaHQ9IjMuMDMyODMxNCIKICAgICAgIHg9IjUxLjM5ODMyMyIKICAgICAgIHk9IjEzMi4xMjg0MiIKICAgICAgIHJ5PSIwLjkzNTU2ODM5IiAvPjxwYXRoCiAgICAgICBpZD0icGF0aDkiCiAgICAgICBzdHlsZT0iZmlsbDojZjhjZWNjO2ZpbGwtb3BhY2l0eToxO3N0cm9rZTojMDAwMDAwO3N0cm9rZS13aWR0aDowLjE1O3N0cm9rZS1kYXNoYXJyYXk6bm9uZSIKICAgICAgIGQ9Im0gNTYuNjM1MjgyLDEzNC43OTk4NCAtMS4xNDEzNjUsLTAuMjQ1NSBtIDEuMTQxMzY1LC0yLjA1ODEgLTEuMTQxMzY1LDAuMjQ1NSBtIC0yLjYzNzE1NiwyLjA4NDY3IDEuMjM4Mzg3LC0wLjI5ODYzIG0gLTEuMjM4Mzg3LC0yLjA1ODExIDEuMjM4Mzg3LDAuMjk4NjQgbSAwLjczOTM5MSwxLjc3MDY4IHYgMC4zNTcyNyBtIDAsLTIuNDkzOTYgdiAwLjM1NzI3IG0gLTAuNzgyMjUxLC0wLjAxNDEgaCAxLjQ1ODQwMSB2IDEuODA3ODkgaCAtMS40NTg0MDEgeiBtIC0wLjg0MDE1NCwtMC4zMzA4MyBoIDIuOTE2Mzk4IGMgMC41ODkwMTksMCAxLjA2MzIxMiwwLjMzOTM5IDEuMDYzMjEyLDAuNzYwOTYgdiAwLjk0NDg4IGMgMCwwLjQyMTU3IC0wLjQ3NDE5MywwLjc2MDk2IC0xLjA2MzIxMiwwLjc2MDk2IGggLTIuOTE2Mzk4IGMgLTAuNTg5MDE5LDAgLTEuMDYzMjEyLC0wLjMzOTM5IC0xLjA2MzIxMiwtMC43NjA5NiB2IC0wLjk0NDg4IGMgMCwtMC40MjE1NyAwLjQ3NDE5MywtMC43NjA5NiAxLjA2MzIxMiwtMC43NjA5NiB6IiAvPjwvZz48L3N2Zz4K;points=[[0,0.25,0,0,0],[0,0.5,0,0,0],[0,0.75,0,0,0],[0.1,0,0,0,0],[0.1,1,0,0,0],[0.25,0,0,0,0],[0.25,1,0,0,0],[0.5,0,0,0,0],[0.5,1,0,0,0],[0.75,0,0,0,0],[0.75,1,0,0,0],[0.89,0,0,0,0],[0.89,1,0,0,0],[1,0.25,0,0,0],[1,0.5,0,0,0],[1,0.75,0,0,0]];imageBackground=none;imageAspect=0;perimeter=none;" parent="1" vertex="1"> <mxGeometry x="466.34" y="655" width="107.33" height="46" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-13" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#60a917;strokeColor=#2D7600;fontColor=#ffffff;" parent="1" vertex="1"> <mxGeometry x="464" y="671" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-14" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#60a917;strokeColor=#2D7600;fontColor=#ffffff;" parent="1" vertex="1"> <mxGeometry x="468.34000000000003" y="685" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-15" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#60a917;strokeColor=#2D7600;fontColor=#ffffff;" parent="1" vertex="1"> <mxGeometry x="480" y="655" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-16" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#e1d5e7;strokeColor=#9673a6;" parent="1" vertex="1"> <mxGeometry x="472" y="665" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-17" value="" style="shape=image;verticalLabelPosition=bottom;labelBackgroundColor=default;verticalAlign=top;aspect=fixed;imageAspect=0;image=data:image/png,iVBORw0KGgoAAAANSUhEUgAAAKwAAABKCAYAAAAmLL9HAAAACXBIWXMAADT9AAA1CgHHClLTAAAJN0lEQVR4nO2dQW7bOBSGfxezLJDMbnbJADWjXdwTVD1B3BNEPcGkJxjNCeKeYJQT1DlBlBPU2QlEgbF33U0MdJ9Z8KmRnkhTylgSJfMDhIC2adHML/LxkXycPD09oQmBEHMAIYAZXUeNvsBz6GwBrOhKMymXTTJP6gg2EOIUQAxgDi9Qz37ZAlgCiDMp17YP7xRsIMQxlFD/2FPhPJ5dfIYS7qPpA0bBBkLMoJR/0k7ZPB4tGwDzTMqV7k2tYAMhIgAL6Lv/vAlPAaxMX+wpEwhRquhMyklfZekTaghnUOMgk4m5BXCVSZnwNyqCJbH+bfiSBYDFribbo8cLtgqZnFd06YT7kYu2JFhSf6rJfAsg8kJ9OV6wZki4CYAL9tYWQFjsxX8KljKtULVZbzIpo7YKeyh4wdoJhEgAXLKXNwBmeWP5S+GNGAMUK/UKIYBj+jsIAiHSvsvQEo8o+1lr98qZlFEgBFAW7QmUyRAD1MKSn/Uflv82k3L+8nK3RyBECCCC9wsPgVsAS90AykQgxBJV8+D3TMr1K0rE7M0tlCCcIhAiCoRYA7iDegq9WN3nAsDfgRBrmiWtQwSlwSIxAOSC5V/klCcgEGIWCLGC8l54v/AwOQHwJRBiSeMlI6S9BXt5DgCTs+l0DuBL4Y0tgFNXBBsIcQXg2vKxeyjvxpqutnkD5Ut8DeA3umycs/TDvgvVAt/p+gFlk36rkafoZzU1Lg9QXiejD59EvUa5F/0wOZtOFyhPvToz0DKMGnPuASRNbKP/WZYQ3m7OJ42STMrU9mGqsxjAO8N3hRbRJij//z9PzqbTlH1hxVnbBzSK1v3Qe6j55rSjcsyguiddWQ6Ze6jZKOtMJwk3QbXFLbmsNPkilCex7l9BNd9Fep9qpSdLJ5BPmZRhh2JdAPhqKMuh8w7AV6qjndD/a4aqGXQC1WKb4FqcTc6mU6cc2oEQMYA/2cvW7mPPZTiGsom53Vkkt5tzv+MYCQt/dz20D1D/H+u4x9Bzfsqk1AqfT7g4JVjqOu7Yyy6JdQNlky1dGZR2BdXLHPoJJqCmaA31axzoc8G+4h/omYSlXRHrFsq2P82kTA5NrIByNdFvPwXwEVU/6TmAtKbLKmIvH0HNZllxRrBkCvAnt5ZRv0cW0LufZi4MRF2B6kJnk56j6j/V5V8BuGEvR3Xu7YRgC8vMitx2KRKaheEutLybW3dVjqFAdRKiKtrLmjNaMUufkEdmJ04IFnrfZq0uYo/wliE3Rw6u+68L1U2IqnlQp5VdQ60zKGIVuiuC5eK86bJVI38fN0fmXqx2qI640E6oTm2kLB3aMvQuWFopxu1G6xO6Z2KWvunK1zsGqK64TRrXyMrHJ6e2DL0LFtWn6qHLgRbZTbx1jbu6/4iIWdpqk2oaBevCJhcFm3Z8f96dPfhBVnOozvgAbO/rqV0Q7ClLpx3fn7cCScf3HxMJS1tH/U1xQbD8R607vv8pS491mrULGtukTfnF/pHWKbmzeohzwAd8Tgu2sMwxxLPNt4HqmWot+2sRXne71mK8CBdaWKdw1ZUVCHFMe53y7UHFAcoJvXZXZ0V/W3RRd16wA6CwxoFvzNNxgRpz+kPFC3YYJGjWvZ5jpINHL1jHIZuVt6wbqNVjEwC/Qq2e2rDPXFDeUeEF6z4RS+fbShLgedkflLeFi5bnHTxesO4TsrQ2fiq9FlvyDh4vWPfh05XGPVCa5Ziji+HgBTswdrmOxuoZKOIF6z4lu9SybI/P3XObdvB4wbpPytKxriUtnEexK+/gcWFq1rObBNXwkyvaA5fbs6bdrEm7Rese38I6Dq0N4FtJTqAiovxLly5I3u0YF6F7wQ6DCM2Cxz1ghD5YwAt2EBQ2+/GWVsctRrx50tuwAyHf7Of48sLW8YIdGCTItOdi9IY3CRiH4Hxviy7qzgXBloIw1In+sWf4YKbr+48JXnd7jzLugmBb3wdkYc3SXrAvp/X9eS4Ids3SYcf35w9M1PH9x0TE0nvfH+eCYFOWDju+P1/9dE7RaDwNMETw2RVd+0W4KNjzLu1Y2qXLF4nEXd1/RMQsvbHtgNbsiLAu1uldsIaIIV1HLoxZ+nKM20vaguqKhyqNa2RtbPP2LliCB3+77LJbpoXP/Onubbv0kKA64l3/pmZs35ClU1sGVwS7xAtijO4Z3qofYcTbpfdBYft549i+1CDxzZVWm9cJwRqOaryoGWN0X2VYohoyMo/bf9pVOYYC1UmK6kDrhurSRszSVpsXcESwAJBJGaPaLS86nki4gj5u/4qOEPXg53GqK+jPg6jTus5QtXmTOvd2RrBExNJ5t9yJaAurorhojwBc04nU0SGaCRQqKT9N/RpVM6DJsUcJe3mLmiagU+d0AYM8WA4Y72KUGYBjOHaw3CPKT8vbHiIIlthxKLLxh7VUjgXKB0d7qnzOpKxjBpgagftMytCQZwZ1dGrO9hU053nWLmpL0Gni95q3rgMh0q58pPSPeGsoy6FzD9W41RFrCL3Nu8HuKN1ciyudYENbAbqAnjo+agdUd3JHwo06KMeKyvKeysPdb4fEFqoO3tMh1daZLDIB7lDdc7aF/aSekKVXk7PpdA7gC/si7bmffUAj0mvLx3J7co1uIni/gXr6XwP4jS4buhG163yn6wdUw/atRp4ZXSHMkWceAES7BE8mxBplc/XD5OnpCYEQ3I79i9xMTkC2TIIWIjp7OucWSqw2b0KM8uB7m0l5nLu1uKP3yiXXDXXLM+jDSnqGwQbAh0xK64F9hqNcl8CzHzZmbx7BwSAMhdOkvT05HG7xfBJ63eWGCap+3hgAJk9Pys1lcOHc0IjdWchcCPHsL3QV7nscq+fhEcreXQFIm46FDC7Nn66zomCP6SbcUHZetEOAO8D7nqBxEYNY8wDOj0BharZwyC3vZi/7PJnEM34KJ+RwsVZcX6W1BORm0DmCLwCsAyG0kfM8npdAQo2h3Fe6E3KuuOvrp0nAviiCWozADV9AqX4J5fdc9T2NOxS8SaCgMUc+7pjDrLEr3SJwrWALX7zECMOOe5xmA2UGaBtC4/JCyjAD8Lmlgnk8nL+gBljGXtvYwhah1eUxzE24x/NSchMzpg2pO6kl2CKBEHMo+yO3RbyAPU3YouynbRS74D+7OeKwBtsqIwAAAABJRU5ErkJggg==;direction=west;imageBackground=default;rotation=0;" parent="1" vertex="1"> <mxGeometry x="235.47000000000003" y="651.5" width="123.19" height="53" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-18" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#e1d5e7;strokeColor=#9673a6;" parent="1" vertex="1"> <mxGeometry x="482" y="673" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-19" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#60a917;strokeColor=#2D7600;fontColor=#ffffff;" parent="1" vertex="1"> <mxGeometry x="478.34000000000003" y="695" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-20" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#60a917;strokeColor=#2D7600;fontColor=#ffffff;" parent="1" vertex="1"> <mxGeometry x="492" y="683" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-21" value="" style="shape=image;aspect=fixed;image=data:image/svg+xml,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjwhLS0gQ3JlYXRlZCB3aXRoIElua3NjYXBlIChodHRwOi8vd3d3Lmlua3NjYXBlLm9yZy8pIC0tPgoKPHN2ZwogICB3aWR0aD0iNy4zMDU0NjA5bW0iCiAgIGhlaWdodD0iMy4xODI4MzIybW0iCiAgIHZpZXdCb3g9IjAgMCA3LjMwNTQ2MDkgMy4xODI4MzIyIgogICB2ZXJzaW9uPSIxLjEiCiAgIGlkPSJzdmcxIgogICB4bWw6c3BhY2U9InByZXNlcnZlIgogICBpbmtzY2FwZTp2ZXJzaW9uPSIxLjMgKDE6MS4zKzIwMjMwNzIzMTQ1OSswZTE1MGVkNmM0KSIKICAgc29kaXBvZGk6ZG9jbmFtZT0iYmVoYXZpb3JfcGF0aF9wbGFubmVyLnN2ZyIKICAgeG1sbnM6aW5rc2NhcGU9Imh0dHA6Ly93d3cuaW5rc2NhcGUub3JnL25hbWVzcGFjZXMvaW5rc2NhcGUiCiAgIHhtbG5zOnNvZGlwb2RpPSJodHRwOi8vc29kaXBvZGkuc291cmNlZm9yZ2UubmV0L0RURC9zb2RpcG9kaS0wLmR0ZCIKICAgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIgogICB4bWxuczpzdmc9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48c29kaXBvZGk6bmFtZWR2aWV3CiAgICAgaWQ9Im5hbWVkdmlldzEiCiAgICAgcGFnZWNvbG9yPSIjZmZmZmZmIgogICAgIGJvcmRlcmNvbG9yPSIjMDAwMDAwIgogICAgIGJvcmRlcm9wYWNpdHk9IjAuMjUiCiAgICAgaW5rc2NhcGU6c2hvd3BhZ2VzaGFkb3c9IjIiCiAgICAgaW5rc2NhcGU6cGFnZW9wYWNpdHk9IjAuMCIKICAgICBpbmtzY2FwZTpwYWdlY2hlY2tlcmJvYXJkPSIwIgogICAgIGlua3NjYXBlOmRlc2tjb2xvcj0iI2QxZDFkMSIKICAgICBpbmtzY2FwZTpkb2N1bWVudC11bml0cz0ibW0iCiAgICAgaW5rc2NhcGU6em9vbT0iMTkuMTA5NTAxIgogICAgIGlua3NjYXBlOmN4PSIxNzkuODMyMDEiCiAgICAgaW5rc2NhcGU6Y3k9IjQ4MC40NDE2MyIKICAgICBpbmtzY2FwZTp3aW5kb3ctd2lkdGg9IjM3NzAiCiAgICAgaW5rc2NhcGU6d2luZG93LWhlaWdodD0iMjA5NiIKICAgICBpbmtzY2FwZTp3aW5kb3cteD0iNzAiCiAgICAgaW5rc2NhcGU6d2luZG93LXk9IjI3IgogICAgIGlua3NjYXBlOndpbmRvdy1tYXhpbWl6ZWQ9IjEiCiAgICAgaW5rc2NhcGU6Y3VycmVudC1sYXllcj0ibGF5ZXIxIiAvPjxkZWZzCiAgICAgaWQ9ImRlZnMxIiAvPjxnCiAgICAgaW5rc2NhcGU6bGFiZWw9IkxheWVyIDEiCiAgICAgaW5rc2NhcGU6Z3JvdXBtb2RlPSJsYXllciIKICAgICBpZD0ibGF5ZXIxIgogICAgIHRyYW5zZm9ybT0idHJhbnNsYXRlKC01MS4zMjMzMjMsLTEzMi4wNTM0MikiPjxyZWN0CiAgICAgICBzdHlsZT0iZmlsbDojZjhjZWNjO2ZpbGwtb3BhY2l0eToxO3N0cm9rZTojMDAwMDAwO3N0cm9rZS13aWR0aDowLjE1O3N0cm9rZS1kYXNoYXJyYXk6bm9uZSIKICAgICAgIGlkPSJyZWN0MSIKICAgICAgIHdpZHRoPSI3LjE1NTQ2MzciCiAgICAgICBoZWlnaHQ9IjMuMDMyODMxNCIKICAgICAgIHg9IjUxLjM5ODMyMyIKICAgICAgIHk9IjEzMi4xMjg0MiIKICAgICAgIHJ5PSIwLjkzNTU2ODM5IiAvPjxwYXRoCiAgICAgICBpZD0icGF0aDkiCiAgICAgICBzdHlsZT0iZmlsbDojZjhjZWNjO2ZpbGwtb3BhY2l0eToxO3N0cm9rZTojMDAwMDAwO3N0cm9rZS13aWR0aDowLjE1O3N0cm9rZS1kYXNoYXJyYXk6bm9uZSIKICAgICAgIGQ9Im0gNTYuNjM1MjgyLDEzNC43OTk4NCAtMS4xNDEzNjUsLTAuMjQ1NSBtIDEuMTQxMzY1LC0yLjA1ODEgLTEuMTQxMzY1LDAuMjQ1NSBtIC0yLjYzNzE1NiwyLjA4NDY3IDEuMjM4Mzg3LC0wLjI5ODYzIG0gLTEuMjM4Mzg3LC0yLjA1ODExIDEuMjM4Mzg3LDAuMjk4NjQgbSAwLjczOTM5MSwxLjc3MDY4IHYgMC4zNTcyNyBtIDAsLTIuNDkzOTYgdiAwLjM1NzI3IG0gLTAuNzgyMjUxLC0wLjAxNDEgaCAxLjQ1ODQwMSB2IDEuODA3ODkgaCAtMS40NTg0MDEgeiBtIC0wLjg0MDE1NCwtMC4zMzA4MyBoIDIuOTE2Mzk4IGMgMC41ODkwMTksMCAxLjA2MzIxMiwwLjMzOTM5IDEuMDYzMjEyLDAuNzYwOTYgdiAwLjk0NDg4IGMgMCwwLjQyMTU3IC0wLjQ3NDE5MywwLjc2MDk2IC0xLjA2MzIxMiwwLjc2MDk2IGggLTIuOTE2Mzk4IGMgLTAuNTg5MDE5LDAgLTEuMDYzMjEyLC0wLjMzOTM5IC0xLjA2MzIxMiwtMC43NjA5NiB2IC0wLjk0NDg4IGMgMCwtMC40MjE1NyAwLjQ3NDE5MywtMC43NjA5NiAxLjA2MzIxMiwtMC43NjA5NiB6IiAvPjwvZz48L3N2Zz4K;points=[[0,0.25,0,0,0],[0,0.5,0,0,0],[0,0.75,0,0,0],[0.1,0,0,0,0],[0.1,1,0,0,0],[0.25,0,0,0,0],[0.25,1,0,0,0],[0.5,0,0,0,0],[0.5,1,0,0,0],[0.75,0,0,0,0],[0.75,1,0,0,0],[0.89,0,0,0,0],[0.89,1,0,0,0],[1,0.25,0,0,0],[1,0.5,0,0,0],[1,0.75,0,0,0]];imageBackground=none;imageAspect=0;perimeter=none;" parent="1" vertex="1"> <mxGeometry x="392.66999999999996" y="782" width="107.33" height="46" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-22" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;" parent="1" edge="1"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="468" y="676" as="sourcePoint" /> <mxPoint x="484" y="659" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-23" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;entryX=0;entryY=0.875;entryDx=0;entryDy=0;entryPerimeter=0;exitX=0.369;exitY=0.304;exitDx=0;exitDy=0;exitPerimeter=0;" parent="1" source="ty0ni0eQC3Mwg9WxKvAH-20" edge="1"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="482" y="683" as="sourcePoint" /> <mxPoint x="484" y="659" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-24" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#e1d5e7;strokeColor=#9673a6;" parent="1" vertex="1"> <mxGeometry x="481" y="684" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-25" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;entryX=0.406;entryY=0.732;entryDx=0;entryDy=0;entryPerimeter=0;" parent="1" edge="1"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="473" y="689" as="sourcePoint" /> <mxPoint x="468.24800000000005" y="675.856" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-26" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;entryX=0.716;entryY=0.513;entryDx=0;entryDy=0;entryPerimeter=0;" parent="1" edge="1"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="482" y="699" as="sourcePoint" /> <mxPoint x="473.068" y="689.104" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-27" value="" style="endArrow=none;html=1;rounded=0;fillColor=#60a917;strokeColor=#2D7600;entryX=0.716;entryY=0.513;entryDx=0;entryDy=0;entryPerimeter=0;exitX=0.366;exitY=0.246;exitDx=0;exitDy=0;exitPerimeter=0;" parent="1" source="ty0ni0eQC3Mwg9WxKvAH-20" edge="1"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="491" y="709" as="sourcePoint" /> <mxPoint x="482.068" y="699.104" as="targetPoint" /> </mxGeometry> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-53" value="Filter out points outside clusters and get a 2D convex hull of each cluster" style="text;html=1;align=center;verticalAlign=middle;resizable=0;points=[];autosize=1;strokeColor=none;fillColor=none;fontSize=18;" parent="1" vertex="1"> <mxGeometry x="159" y="564" width="600" height="40" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-55" value="" style="ellipse;whiteSpace=wrap;html=1;aspect=fixed;fillColor=#e1d5e7;strokeColor=#9673a6;" parent="1" vertex="1"> <mxGeometry x="535" y="673" width="8" height="8" as="geometry" /> </mxCell> <mxCell id="ty0ni0eQC3Mwg9WxKvAH-56" value="" style="verticalLabelPosition=bottom;verticalAlign=top;html=1;shape=mxgraph.basic.x;fillColor=#FF0000;opacity=40;" parent="1" vertex="1"> <mxGeometry x="529" y="668" width="20" height="19" as="geometry" /> </mxCell> </root> </mxGraphModel> </diagram> </mxfile> " style="background-color: rgb(255, 255, 255);" > - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+ + -
-
- Closest Point -
-
-
-
- -
-
-
- - - - - - - -
-
-
- Filter out points outside clusters and get a 2D convex hull of each cluster -
-
-
-
- -
-
-
- - - - - - - -
-
-
- Find the closest vertex of the convex hull(s) -
-
-
-
- -
+ + + + + + + + + + + + + + + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Filter out points outside clusters and get a 2D convex hull of each cluster +
+
+
+
+ +
+
+
+
+ + + + + + + + + +
- - - - - -
diff --git a/control/autoware_autonomous_emergency_braking/image/using-predicted-objects.drawio.svg b/control/autoware_autonomous_emergency_braking/image/using-predicted-objects.drawio.svg new file mode 100644 index 0000000000000..244b609c7f009 --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/image/using-predicted-objects.drawio.svg @@ -0,0 +1,114 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Ego path footprint and object polygon intersection points +
+
+
+
+ +
+
+
+
+
diff --git a/control/autoware_autonomous_emergency_braking/include/autoware/autonomous_emergency_braking/node.hpp b/control/autoware_autonomous_emergency_braking/include/autoware/autonomous_emergency_braking/node.hpp index 8c454d5d583b6..23c2214cf0282 100644 --- a/control/autoware_autonomous_emergency_braking/include/autoware/autonomous_emergency_braking/node.hpp +++ b/control/autoware_autonomous_emergency_braking/include/autoware/autonomous_emergency_braking/node.hpp @@ -347,6 +347,7 @@ class AEB : public rclcpp::Node double a_ego_min_; double a_obj_min_; double cluster_tolerance_; + double cluster_minimum_height_; int minimum_cluster_size_; int maximum_cluster_size_; double imu_prediction_time_horizon_; diff --git a/control/autoware_autonomous_emergency_braking/src/node.cpp b/control/autoware_autonomous_emergency_braking/src/node.cpp index fca3341680997..674467810747d 100644 --- a/control/autoware_autonomous_emergency_braking/src/node.cpp +++ b/control/autoware_autonomous_emergency_braking/src/node.cpp @@ -147,6 +147,7 @@ AEB::AEB(const rclcpp::NodeOptions & node_options) a_obj_min_ = declare_parameter("a_obj_min"); cluster_tolerance_ = declare_parameter("cluster_tolerance"); + cluster_minimum_height_ = declare_parameter("cluster_minimum_height"); minimum_cluster_size_ = declare_parameter("minimum_cluster_size"); maximum_cluster_size_ = declare_parameter("maximum_cluster_size"); @@ -198,6 +199,7 @@ rcl_interfaces::msg::SetParametersResult AEB::onParameter( updateParam(parameters, "a_obj_min", a_obj_min_); updateParam(parameters, "cluster_tolerance", cluster_tolerance_); + updateParam(parameters, "cluster_minimum_height", cluster_minimum_height_); updateParam(parameters, "minimum_cluster_size", minimum_cluster_size_); updateParam(parameters, "maximum_cluster_size", maximum_cluster_size_); @@ -725,9 +727,15 @@ void AEB::createObjectDataUsingPointCloudClusters( PointCloud::Ptr points_belonging_to_cluster_hulls(new PointCloud); for (const auto & indices : cluster_indices) { PointCloud::Ptr cluster(new PointCloud); + bool cluster_surpasses_threshold_height{false}; for (const auto & index : indices.indices) { - cluster->push_back((*obstacle_points_ptr)[index]); + const auto & p = (*obstacle_points_ptr)[index]; + cluster_surpasses_threshold_height = (cluster_surpasses_threshold_height) + ? cluster_surpasses_threshold_height + : (p.z > cluster_minimum_height_); + cluster->push_back(p); } + if (!cluster_surpasses_threshold_height) continue; // Make a 2d convex hull for the objects pcl::ConvexHull hull; hull.setDimension(2); From c3fe17d64734a32d7861f33f0f9e1f4c81ce04a0 Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Sat, 22 Jun 2024 12:06:16 +0900 Subject: [PATCH 05/29] fix(tracking_object_merger): fix shadowArgument warning (#7622) Signed-off-by: Ryuta Kambe --- .../include/tracking_object_merger/utils/utils.hpp | 2 +- perception/tracking_object_merger/src/utils/utils.cpp | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/perception/tracking_object_merger/include/tracking_object_merger/utils/utils.hpp b/perception/tracking_object_merger/include/tracking_object_merger/utils/utils.hpp index 1778e09da5cd9..03352512d5545 100644 --- a/perception/tracking_object_merger/include/tracking_object_merger/utils/utils.hpp +++ b/perception/tracking_object_merger/include/tracking_object_merger/utils/utils.hpp @@ -93,7 +93,7 @@ TrackedObject linearInterpolationForTrackedObject( TrackedObject predictPastOrFutureTrackedObject(const TrackedObject & obj, const double dt); TrackedObjects predictPastOrFutureTrackedObjects( - const TrackedObjects & obj, const std_msgs::msg::Header & header); + const TrackedObjects & input_objects, const std_msgs::msg::Header & header); // predict tracked objects TrackedObjects interpolateTrackedObjects( diff --git a/perception/tracking_object_merger/src/utils/utils.cpp b/perception/tracking_object_merger/src/utils/utils.cpp index e981ef09a21b0..90437d3ebd7aa 100644 --- a/perception/tracking_object_merger/src/utils/utils.cpp +++ b/perception/tracking_object_merger/src/utils/utils.cpp @@ -153,20 +153,20 @@ TrackedObject predictPastOrFutureTrackedObject(const TrackedObject & obj, const /** * @brief predict past or future tracked objects * - * @param obj + * @param input_objects * @param header * @return TrackedObjects */ TrackedObjects predictPastOrFutureTrackedObjects( - const TrackedObjects & obj, const std_msgs::msg::Header & header) + const TrackedObjects & input_objects, const std_msgs::msg::Header & header) { // for each object, predict past or future TrackedObjects output_objects; - output_objects.header = obj.header; + output_objects.header = input_objects.header; output_objects.header.stamp = header.stamp; - const auto dt = (rclcpp::Time(header.stamp) - rclcpp::Time(obj.header.stamp)).seconds(); - for (const auto & obj : obj.objects) { + const auto dt = (rclcpp::Time(header.stamp) - rclcpp::Time(input_objects.header.stamp)).seconds(); + for (const auto & obj : input_objects.objects) { output_objects.objects.push_back(predictPastOrFutureTrackedObject(obj, dt)); } return output_objects; From 60c66b816b9be53ac660d6c334d82316052653fb Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Sat, 22 Jun 2024 12:06:48 +0900 Subject: [PATCH 06/29] fix(autoware_behavior_path_planner_common): fix shadowArgument warning (#7623) Signed-off-by: Ryuta Kambe --- .../utils/drivable_area_expansion/static_drivable_area.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/planning/behavior_path_planner/autoware_behavior_path_planner_common/src/utils/drivable_area_expansion/static_drivable_area.cpp b/planning/behavior_path_planner/autoware_behavior_path_planner_common/src/utils/drivable_area_expansion/static_drivable_area.cpp index f304fff9ce450..4eff51d8a7806 100644 --- a/planning/behavior_path_planner/autoware_behavior_path_planner_common/src/utils/drivable_area_expansion/static_drivable_area.cpp +++ b/planning/behavior_path_planner/autoware_behavior_path_planner_common/src/utils/drivable_area_expansion/static_drivable_area.cpp @@ -745,9 +745,9 @@ std::vector cutOverlappedLanes( } // Step2. pick up only path points within drivable lanes - for (const auto & lanes : shorten_lanes) { + for (const auto & drivable_lanes : shorten_lanes) { for (size_t i = start_point_idx; i < original_points.size(); ++i) { - if (is_point_in_drivable_lanes(lanes, original_points.at(i))) { + if (is_point_in_drivable_lanes(drivable_lanes, original_points.at(i))) { path.points.push_back(original_points.at(i)); continue; } From 8b24d9bd3b58c4fa6f62ecdc703ae42779bda6b4 Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Sat, 22 Jun 2024 14:32:41 +0900 Subject: [PATCH 07/29] fix(tier4_camera_view_rviz_plugin): fix funcArgNamesDifferent warnings (#7621) Signed-off-by: Ryuta Kambe --- .../src/bird_eye_view_controller.hpp | 4 ++-- .../src/third_person_view_controller.hpp | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/common/tier4_camera_view_rviz_plugin/src/bird_eye_view_controller.hpp b/common/tier4_camera_view_rviz_plugin/src/bird_eye_view_controller.hpp index 3c0091740bd59..c25787af2f47b 100644 --- a/common/tier4_camera_view_rviz_plugin/src/bird_eye_view_controller.hpp +++ b/common/tier4_camera_view_rviz_plugin/src/bird_eye_view_controller.hpp @@ -72,7 +72,7 @@ class BirdEyeViewController : public rviz_common::FramePositionTrackingViewContr void onInitialize() override; - void handleMouseEvent(rviz_common::ViewportMouseEvent & evt) override; + void handleMouseEvent(rviz_common::ViewportMouseEvent & event) override; void lookAt(const Ogre::Vector3 & point) override; @@ -96,7 +96,7 @@ class BirdEyeViewController : public rviz_common::FramePositionTrackingViewContr void orientCamera(); void setPosition(const Ogre::Vector3 & pos_rel_target); - void move_camera(float x, float y); + void move_camera(float dx, float dy); void updateCamera(); Ogre::SceneNode * getCameraParent(Ogre::Camera * camera); diff --git a/common/tier4_camera_view_rviz_plugin/src/third_person_view_controller.hpp b/common/tier4_camera_view_rviz_plugin/src/third_person_view_controller.hpp index fef036ceccda3..7f2cf6a8d17ee 100644 --- a/common/tier4_camera_view_rviz_plugin/src/third_person_view_controller.hpp +++ b/common/tier4_camera_view_rviz_plugin/src/third_person_view_controller.hpp @@ -68,7 +68,7 @@ class ThirdPersonViewController : public rviz_default_plugins::view_controllers: public: void onInitialize() override; - void handleMouseEvent(rviz_common::ViewportMouseEvent & evt) override; + void handleMouseEvent(rviz_common::ViewportMouseEvent & event) override; void lookAt(const Ogre::Vector3 & point) override; From 3900cbedd705e5e754e9e4121657eed974418924 Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Sat, 22 Jun 2024 17:55:09 +0900 Subject: [PATCH 08/29] fix(autoware_behavior_path_goal_planner_module): fix lateral_offset related warnings (#7624) Signed-off-by: Ryuta Kambe --- .../src/goal_searcher.cpp | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/planning/behavior_path_planner/autoware_behavior_path_goal_planner_module/src/goal_searcher.cpp b/planning/behavior_path_planner/autoware_behavior_path_goal_planner_module/src/goal_searcher.cpp index 04ee5278912ac..83e4c2857b3ac 100644 --- a/planning/behavior_path_planner/autoware_behavior_path_goal_planner_module/src/goal_searcher.cpp +++ b/planning/behavior_path_planner/autoware_behavior_path_goal_planner_module/src/goal_searcher.cpp @@ -161,9 +161,7 @@ GoalCandidates GoalSearcher::search( original_search_poses.push_back(original_search_pose); // for createAreaPolygon Pose search_pose{}; // search goal_pose in lateral direction - double lateral_offset = 0.0; for (double dy = 0; dy <= max_lateral_offset; dy += lateral_offset_interval) { - lateral_offset = dy; search_pose = calcOffsetPose(original_search_pose, 0, sign * dy, 0); const auto transformed_vehicle_footprint = @@ -185,7 +183,7 @@ GoalCandidates GoalSearcher::search( GoalCandidate goal_candidate{}; goal_candidate.goal_pose = search_pose; - goal_candidate.lateral_offset = lateral_offset; + goal_candidate.lateral_offset = dy; goal_candidate.id = goal_id; goal_id++; // use longitudinal_distance as distance_from_original_goal From a54b73e553067bae84ce2e2cc940eddbdd1519d3 Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Sat, 22 Jun 2024 23:53:26 +0900 Subject: [PATCH 09/29] fix(probabilistic_occupancy_grid_map): fix knownConditionTrueFalse warning (#7619) Signed-off-by: Ryuta Kambe --- .../occupancy_grid_map_projective.cpp | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/perception/probabilistic_occupancy_grid_map/src/pointcloud_based_occupancy_grid_map/occupancy_grid_map_projective.cpp b/perception/probabilistic_occupancy_grid_map/src/pointcloud_based_occupancy_grid_map/occupancy_grid_map_projective.cpp index 79fc6e8c40b5e..3846b3a7d16e7 100644 --- a/perception/probabilistic_occupancy_grid_map/src/pointcloud_based_occupancy_grid_map/occupancy_grid_map_projective.cpp +++ b/perception/probabilistic_occupancy_grid_map/src/pointcloud_based_occupancy_grid_map/occupancy_grid_map_projective.cpp @@ -225,11 +225,9 @@ void OccupancyGridMapProjectiveBlindSpot::updateWithPointCloud( if (dist_index + 1 == obstacle_pointcloud_angle_bin.size()) { const auto & source = obstacle_pointcloud_angle_bin.at(dist_index); - if (!no_visible_point_beyond) { - raytrace( - source.wx, source.wy, source.projected_wx, source.projected_wy, - occupancy_cost_value::NO_INFORMATION); - } + raytrace( + source.wx, source.wy, source.projected_wx, source.projected_wy, + occupancy_cost_value::NO_INFORMATION); continue; } @@ -238,11 +236,6 @@ void OccupancyGridMapProjectiveBlindSpot::updateWithPointCloud( obstacle_pointcloud_angle_bin.at(dist_index).range); if (next_obstacle_point_distance <= obstacle_separation_threshold_) { continue; - } else if (no_visible_point_beyond) { - const auto & source = obstacle_pointcloud_angle_bin.at(dist_index); - const auto & target = obstacle_pointcloud_angle_bin.at(dist_index + 1); - raytrace(source.wx, source.wy, target.wx, target.wy, occupancy_cost_value::NO_INFORMATION); - continue; } auto next_raw_distance = From 6c2e48465c8ce3896525959c403976b503dcb03f Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Sat, 22 Jun 2024 23:54:21 +0900 Subject: [PATCH 10/29] fix(pointcloud_preprocessor): fix constVariableReference warnings (#7628) Signed-off-by: Ryuta Kambe --- .../src/concatenate_data/concatenate_and_time_sync_nodelet.cpp | 2 +- .../src/concatenate_data/concatenate_pointclouds.cpp | 2 +- .../src/time_synchronizer/time_synchronizer_nodelet.cpp | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sensing/pointcloud_preprocessor/src/concatenate_data/concatenate_and_time_sync_nodelet.cpp b/sensing/pointcloud_preprocessor/src/concatenate_data/concatenate_and_time_sync_nodelet.cpp index e998a79b27f77..4fd28e02cc74f 100644 --- a/sensing/pointcloud_preprocessor/src/concatenate_data/concatenate_and_time_sync_nodelet.cpp +++ b/sensing/pointcloud_preprocessor/src/concatenate_data/concatenate_and_time_sync_nodelet.cpp @@ -149,7 +149,7 @@ PointCloudConcatenateDataSynchronizerComponent::PointCloudConcatenateDataSynchro { RCLCPP_DEBUG_STREAM( get_logger(), "Subscribing to " << input_topics_.size() << " user given topics as inputs:"); - for (auto & input_topic : input_topics_) { + for (const auto & input_topic : input_topics_) { RCLCPP_DEBUG_STREAM(get_logger(), " - " << input_topic); } diff --git a/sensing/pointcloud_preprocessor/src/concatenate_data/concatenate_pointclouds.cpp b/sensing/pointcloud_preprocessor/src/concatenate_data/concatenate_pointclouds.cpp index 3586b7c999776..29f0bf61a1ab7 100644 --- a/sensing/pointcloud_preprocessor/src/concatenate_data/concatenate_pointclouds.cpp +++ b/sensing/pointcloud_preprocessor/src/concatenate_data/concatenate_pointclouds.cpp @@ -105,7 +105,7 @@ PointCloudConcatenationComponent::PointCloudConcatenationComponent( { RCLCPP_INFO_STREAM( get_logger(), "Subscribing to " << input_topics_.size() << " user given topics as inputs:"); - for (auto & input_topic : input_topics_) { + for (const auto & input_topic : input_topics_) { RCLCPP_INFO_STREAM(get_logger(), " - " << input_topic); } diff --git a/sensing/pointcloud_preprocessor/src/time_synchronizer/time_synchronizer_nodelet.cpp b/sensing/pointcloud_preprocessor/src/time_synchronizer/time_synchronizer_nodelet.cpp index 099e5757ffc0a..890dcdb974cf3 100644 --- a/sensing/pointcloud_preprocessor/src/time_synchronizer/time_synchronizer_nodelet.cpp +++ b/sensing/pointcloud_preprocessor/src/time_synchronizer/time_synchronizer_nodelet.cpp @@ -118,7 +118,7 @@ PointCloudDataSynchronizerComponent::PointCloudDataSynchronizerComponent( { RCLCPP_INFO_STREAM( get_logger(), "Subscribing to " << input_topics_.size() << " user given topics as inputs:"); - for (auto & input_topic : input_topics_) { + for (const auto & input_topic : input_topics_) { RCLCPP_INFO_STREAM(get_logger(), " - " << input_topic); } From dc3751f8f88f67ab80fff9769c9a6bcbe7fedafc Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Sun, 23 Jun 2024 12:49:28 +0900 Subject: [PATCH 11/29] fix(autoware_motion_velocity_obstacle_velocity_limiter_module): fix unreadVariable warning (#7625) Signed-off-by: Ryuta Kambe --- .../src/trajectory_preprocessing.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/planning/motion_velocity_planner/autoware_motion_velocity_obstacle_velocity_limiter_module/src/trajectory_preprocessing.cpp b/planning/motion_velocity_planner/autoware_motion_velocity_obstacle_velocity_limiter_module/src/trajectory_preprocessing.cpp index 7498568be99fb..f4b1304d67e5e 100644 --- a/planning/motion_velocity_planner/autoware_motion_velocity_obstacle_velocity_limiter_module/src/trajectory_preprocessing.cpp +++ b/planning/motion_velocity_planner/autoware_motion_velocity_obstacle_velocity_limiter_module/src/trajectory_preprocessing.cpp @@ -68,7 +68,6 @@ TrajectoryPoints downsampleTrajectory( void calculateSteeringAngles(TrajectoryPoints & trajectory, const double wheel_base) { - auto t = 0.0; auto prev_point = trajectory.front(); auto prev_heading = tf2::getYaw(prev_point.pose.orientation); for (auto i = 1ul; i < trajectory.size(); ++i) { @@ -76,7 +75,6 @@ void calculateSteeringAngles(TrajectoryPoints & trajectory, const double wheel_b auto & point = trajectory[i]; const auto dt = autoware::universe_utils::calcDistance2d(prev_point, point) / prev_point.longitudinal_velocity_mps; - t += dt; const auto heading = tf2::getYaw(point.pose.orientation); const auto d_heading = heading - prev_heading; prev_heading = heading; From 2461903b4023ae281c83b5d83a2466f7a4c7a841 Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Sun, 23 Jun 2024 12:50:41 +0900 Subject: [PATCH 12/29] fix(pointcloud_preprocessor): fix redundantInitialization warning (#7629) Signed-off-by: Ryuta Kambe --- .../dual_return_outlier_filter_nodelet.cpp | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/sensing/pointcloud_preprocessor/src/outlier_filter/dual_return_outlier_filter_nodelet.cpp b/sensing/pointcloud_preprocessor/src/outlier_filter/dual_return_outlier_filter_nodelet.cpp index ffbf322e669dc..275c56eaefcc7 100644 --- a/sensing/pointcloud_preprocessor/src/outlier_filter/dual_return_outlier_filter_nodelet.cpp +++ b/sensing/pointcloud_preprocessor/src/outlier_filter/dual_return_outlier_filter_nodelet.cpp @@ -85,28 +85,17 @@ void DualReturnOutlierFilterComponent::onVisibilityChecker(DiagnosticStatusWrapp // Add values stat.add("value", std::to_string(visibility_)); - // Judge level auto level = DiagnosticStatus::OK; + std::string msg = "OK"; if (visibility_ < 0) { level = DiagnosticStatus::STALE; + msg = "STALE"; } else if (visibility_ < visibility_error_threshold_) { level = DiagnosticStatus::ERROR; + msg = "ERROR: low visibility in dual outlier filter"; } else if (visibility_ < visibility_warn_threshold_) { level = DiagnosticStatus::WARN; - } else { - level = DiagnosticStatus::OK; - } - - // Set message - std::string msg; - if (level == DiagnosticStatus::OK) { - msg = "OK"; - } else if (level == DiagnosticStatus::WARN) { msg = "WARNING: low visibility in dual outlier filter"; - } else if (level == DiagnosticStatus::ERROR) { - msg = "ERROR: low visibility in dual outlier filter"; - } else if (level == DiagnosticStatus::STALE) { - msg = "STALE"; } stat.summary(level, msg); } From ca8cb30fe90a61d80fae8ddffd3bbffa78a96cf8 Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Sun, 23 Jun 2024 13:32:25 +0900 Subject: [PATCH 13/29] fix(autoware_obstacle_cruise_planner): fix unreadVariable warning (#7627) Signed-off-by: Ryuta Kambe --- .../optimization_based_planner/optimization_based_planner.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/planning/autoware_obstacle_cruise_planner/src/optimization_based_planner/optimization_based_planner.cpp b/planning/autoware_obstacle_cruise_planner/src/optimization_based_planner/optimization_based_planner.cpp index d256a64984667..0e9a27622fbd9 100644 --- a/planning/autoware_obstacle_cruise_planner/src/optimization_based_planner/optimization_based_planner.cpp +++ b/planning/autoware_obstacle_cruise_planner/src/optimization_based_planner/optimization_based_planner.cpp @@ -702,7 +702,6 @@ void OptimizationBasedPlanner::publishDebugTrajectory( const SBoundaries & s_boundaries, const VelocityOptimizer::OptimizationResult & opt_result) { const auto & current_time = planner_data.current_time; - const std::vector time = opt_result.t; // Publish optimized result and boundary Trajectory boundary_traj; boundary_traj.header.stamp = current_time; From 30ecaad79293215d0d46aa4b42d7bdfe18fc698f Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Sun, 23 Jun 2024 15:37:06 +0900 Subject: [PATCH 14/29] fix(autoware_behavior_velocity_no_drivable_lane_module): fix containerOutOfBounds wawrning (#7631) * fix(autoware_behavior_velocity_no_drivable_lane_module): fix containerOutOfBounds wawrning Signed-off-by: Ryuta Kambe * style(pre-commit): autofix --------- Signed-off-by: Ryuta Kambe Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .../src/util.cpp | 50 +++++++++++-------- 1 file changed, 29 insertions(+), 21 deletions(-) diff --git a/planning/behavior_velocity_planner/autoware_behavior_velocity_no_drivable_lane_module/src/util.cpp b/planning/behavior_velocity_planner/autoware_behavior_velocity_no_drivable_lane_module/src/util.cpp index 1339fee207416..25315fd397aa4 100644 --- a/planning/behavior_velocity_planner/autoware_behavior_velocity_no_drivable_lane_module/src/util.cpp +++ b/planning/behavior_velocity_planner/autoware_behavior_velocity_no_drivable_lane_module/src/util.cpp @@ -85,29 +85,37 @@ PathWithNoDrivableLanePolygonIntersection getPathIntersectionWithNoDrivableLaneP bg::within(first_path_point, polygon); auto const & is_last_path_point_inside_polygon = bg::within(last_path_point, polygon); - if ( - intersects.empty() && - path_no_drivable_lane_polygon_intersection.is_first_path_point_inside_polygon && - is_last_path_point_inside_polygon) { - path_no_drivable_lane_polygon_intersection.is_path_inside_of_polygon = true; - } else { - // classify first and second intersection points - for (size_t i = 0; i < intersects.size(); ++i) { - const auto & p = intersects.at(i); - if ( - (intersects.size() == 2 && i == 0) || - (intersects.size() == 1 && is_last_path_point_inside_polygon)) { - path_no_drivable_lane_polygon_intersection.first_intersection_point = - createPoint(p.x(), p.y(), ego_pos.z); - } else if ( - (intersects.size() == 2 && i == 1) || - (intersects.size() == 1 && - path_no_drivable_lane_polygon_intersection.is_first_path_point_inside_polygon)) { - path_no_drivable_lane_polygon_intersection.second_intersection_point = - createPoint(p.x(), p.y(), ego_pos.z); - } + if (intersects.empty()) { + if ( + path_no_drivable_lane_polygon_intersection.is_first_path_point_inside_polygon && + is_last_path_point_inside_polygon) { + path_no_drivable_lane_polygon_intersection.is_path_inside_of_polygon = true; + } else { + // do nothing + } + } else if (intersects.size() == 1) { + const auto & p = intersects.at(0); + if (is_last_path_point_inside_polygon) { + path_no_drivable_lane_polygon_intersection.first_intersection_point = + createPoint(p.x(), p.y(), ego_pos.z); + } else if (path_no_drivable_lane_polygon_intersection.is_first_path_point_inside_polygon) { + path_no_drivable_lane_polygon_intersection.second_intersection_point = + createPoint(p.x(), p.y(), ego_pos.z); + } else { + // do nothing } + } else if (intersects.size() == 2) { + // classify first and second intersection points + const auto & p0 = intersects.at(0); + const auto & p1 = intersects.at(1); + path_no_drivable_lane_polygon_intersection.first_intersection_point = + createPoint(p0.x(), p0.y(), ego_pos.z); + path_no_drivable_lane_polygon_intersection.second_intersection_point = + createPoint(p1.x(), p1.y(), ego_pos.z); + } else { + // do nothing } + return path_no_drivable_lane_polygon_intersection; } } // namespace autoware::behavior_velocity_planner From 6ac4b24ca3e623e0ee384763e92817392a87898d Mon Sep 17 00:00:00 2001 From: Taekjin LEE Date: Mon, 24 Jun 2024 08:52:34 +0900 Subject: [PATCH 15/29] feat(multi_object_tracker): tracker refactoring (#7271) * feat: separate filters Signed-off-by: Taekjin LEE * fix: object validator to modular Signed-off-by: Taekjin LEE * fix: replace xyzrpy covariance index Signed-off-by: Taekjin LEE * chore: refactor tracker class members Signed-off-by: Taekjin LEE * chore: refactor Signed-off-by: Taekjin LEE * chore: refactoring destructor Signed-off-by: Taekjin LEE * chore: refactoring getUpdatingObject Signed-off-by: Taekjin LEE * chore: refactoring object size checker Signed-off-by: Taekjin LEE * fix: debugger delay calculation order is fixed Signed-off-by: Taekjin LEE * fix: bicycle size update bug fix Signed-off-by: Taekjin LEE * fix: renamed message package Signed-off-by: Taekjin LEE * chore: refactoring Signed-off-by: Taekjin LEE * feat: object parameter class Signed-off-by: Taekjin LEE * style(pre-commit): autofix Signed-off-by: Taekjin LEE * chore: refactor includes Signed-off-by: Taekjin LEE * feat: initial impl. of object model Signed-off-by: Taekjin LEE * feat: impl. object model to bicycle, big, normal Signed-off-by: Taekjin LEE * feat: impl. object model to pedestrian Signed-off-by: Taekjin LEE * feat: impl. cont Signed-off-by: Taekjin LEE * style(pre-commit): autofix Signed-off-by: Taekjin LEE * chore: fix missing refactoring Signed-off-by: Taekjin LEE * fix: align unit of angles, yaw rates Signed-off-by: Taekjin LEE * fix: rename tier4_autoware_utils to autoware_universe_utils Signed-off-by: Taekjin LEE * fix: missing unit conversion Signed-off-by: Taekjin LEE * style(pre-commit): autofix Signed-off-by: Taekjin LEE * fix: sources Signed-off-by: Taekjin LEE * chore: Update include and import statements Signed-off-by: Taekjin LEE * fix: unit convert bug Signed-off-by: Taekjin LEE * fix: update to autoware::universe_utils Signed-off-by: Taekjin LEE * fix: mis-implementation of process noise Signed-off-by: Taekjin LEE --------- Signed-off-by: Taekjin LEE Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .../data_association/data_association.hpp | 13 +- .../debugger/debug_object.hpp | 8 +- .../debugger/debugger.hpp | 8 +- .../multi_object_tracker_core.hpp | 4 +- .../processor/processor.hpp | 4 +- .../tracker/model/bicycle_tracker.hpp | 23 +- .../tracker/model/big_vehicle_tracker.hpp | 25 +- .../model/multiple_vehicle_tracker.hpp | 2 +- .../tracker/model/normal_vehicle_tracker.hpp | 25 +- .../tracker/model/pass_through_tracker.hpp | 4 +- .../model/pedestrian_and_bicycle_tracker.hpp | 3 +- .../tracker/model/pedestrian_tracker.hpp | 23 +- .../tracker/model/tracker_base.hpp | 6 +- .../tracker/model/unknown_tracker.hpp | 7 +- .../motion_model/bicycle_motion_model.hpp | 4 +- .../motion_model/ctrv_motion_model.hpp | 2 +- .../tracker/motion_model/cv_motion_model.hpp | 2 +- .../motion_model/motion_model_base.hpp | 3 +- .../tracker/object_model/object_model.hpp | 305 ++++++++++++++++++ .../multi_object_tracker/utils/utils.hpp | 141 ++------ .../src/debugger/debugger.cpp | 2 +- .../src/multi_object_tracker_core.cpp | 19 +- .../src/processor/processor.cpp | 2 +- .../src/tracker/model/bicycle_tracker.cpp | 188 +++++------ .../src/tracker/model/big_vehicle_tracker.cpp | 213 ++++++------ .../tracker/model/normal_vehicle_tracker.cpp | 214 ++++++------ .../tracker/model/pass_through_tracker.cpp | 46 +-- .../src/tracker/model/pedestrian_tracker.cpp | 204 ++++++------ .../src/tracker/model/unknown_tracker.cpp | 63 ++-- .../motion_model/bicycle_motion_model.cpp | 158 ++++----- .../motion_model/ctrv_motion_model.cpp | 124 +++---- .../tracker/motion_model/cv_motion_model.cpp | 87 ++--- 32 files changed, 1036 insertions(+), 896 deletions(-) create mode 100644 perception/multi_object_tracker/include/multi_object_tracker/tracker/object_model/object_model.hpp diff --git a/perception/multi_object_tracker/include/multi_object_tracker/data_association/data_association.hpp b/perception/multi_object_tracker/include/multi_object_tracker/data_association/data_association.hpp index 56f4c3ac45e52..c30423db02c26 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/data_association/data_association.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/data_association/data_association.hpp @@ -19,19 +19,20 @@ #ifndef MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__DATA_ASSOCIATION_HPP_ #define MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__DATA_ASSOCIATION_HPP_ -#include -#include -#include -#include - #define EIGEN_MPL2_ONLY + #include "multi_object_tracker/data_association/solver/gnn_solver.hpp" #include "multi_object_tracker/tracker/tracker.hpp" #include #include -#include +#include "autoware_perception_msgs/msg/detected_objects.hpp" + +#include +#include +#include +#include class DataAssociation { diff --git a/perception/multi_object_tracker/include/multi_object_tracker/debugger/debug_object.hpp b/perception/multi_object_tracker/include/multi_object_tracker/debugger/debug_object.hpp index 7c1b814258bcf..08c9eb975d8e6 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/debugger/debug_object.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/debugger/debug_object.hpp @@ -15,15 +15,15 @@ #ifndef MULTI_OBJECT_TRACKER__DEBUGGER__DEBUG_OBJECT_HPP_ #define MULTI_OBJECT_TRACKER__DEBUGGER__DEBUG_OBJECT_HPP_ +#include "autoware/universe_utils/ros/uuid_helper.hpp" #include "multi_object_tracker/tracker/model/tracker_base.hpp" -#include #include -#include "unique_identifier_msgs/msg/uuid.hpp" -#include -#include +#include "autoware_perception_msgs/msg/detected_objects.hpp" +#include "autoware_perception_msgs/msg/tracked_objects.hpp" #include +#include #include #include diff --git a/perception/multi_object_tracker/include/multi_object_tracker/debugger/debugger.hpp b/perception/multi_object_tracker/include/multi_object_tracker/debugger/debugger.hpp index 3019f16ada7c9..a1c516147b220 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/debugger/debugger.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/debugger/debugger.hpp @@ -15,16 +15,16 @@ #ifndef MULTI_OBJECT_TRACKER__DEBUGGER__DEBUGGER_HPP_ #define MULTI_OBJECT_TRACKER__DEBUGGER__DEBUGGER_HPP_ +#include "autoware/universe_utils/ros/debug_publisher.hpp" +#include "autoware/universe_utils/ros/published_time_publisher.hpp" #include "multi_object_tracker/debugger/debug_object.hpp" -#include -#include #include #include #include -#include -#include +#include "autoware_perception_msgs/msg/detected_objects.hpp" +#include "autoware_perception_msgs/msg/tracked_objects.hpp" #include #include diff --git a/perception/multi_object_tracker/include/multi_object_tracker/multi_object_tracker_core.hpp b/perception/multi_object_tracker/include/multi_object_tracker/multi_object_tracker_core.hpp index 4c1ccced1bcf7..aff3cbd00eabe 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/multi_object_tracker_core.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/multi_object_tracker_core.hpp @@ -27,8 +27,8 @@ #include -#include -#include +#include "autoware_perception_msgs/msg/detected_objects.hpp" +#include "autoware_perception_msgs/msg/tracked_objects.hpp" #include #include diff --git a/perception/multi_object_tracker/include/multi_object_tracker/processor/processor.hpp b/perception/multi_object_tracker/include/multi_object_tracker/processor/processor.hpp index b2f5f41c81f0a..6d0dbcd036e53 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/processor/processor.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/processor/processor.hpp @@ -19,8 +19,8 @@ #include -#include -#include +#include "autoware_perception_msgs/msg/detected_objects.hpp" +#include "autoware_perception_msgs/msg/tracked_objects.hpp" #include #include diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/bicycle_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/bicycle_tracker.hpp index 36176b8202e72..63ad496b70ed9 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/bicycle_tracker.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/bicycle_tracker.hpp @@ -19,10 +19,10 @@ #ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__BICYCLE_TRACKER_HPP_ #define MULTI_OBJECT_TRACKER__TRACKER__MODEL__BICYCLE_TRACKER_HPP_ +#include "kalman_filter/kalman_filter.hpp" #include "multi_object_tracker/tracker/model/tracker_base.hpp" #include "multi_object_tracker/tracker/motion_model/bicycle_motion_model.hpp" - -#include +#include "multi_object_tracker/tracker/object_model/object_model.hpp" class BicycleTracker : public Tracker { @@ -30,13 +30,7 @@ class BicycleTracker : public Tracker autoware_perception_msgs::msg::DetectedObject object_; rclcpp::Logger logger_; -private: - struct EkfParams - { - double r_cov_x; - double r_cov_y; - double r_cov_yaw; - } ekf_params_; + object_model::ObjectModel object_model_ = object_model::bicycle; double z_; @@ -48,9 +42,7 @@ class BicycleTracker : public Tracker }; BoundingBox bounding_box_; -private: BicycleMotionModel motion_model_; - const char DIM = motion_model_.DIM; using IDX = BicycleMotionModel::IDX; public: @@ -63,15 +55,16 @@ class BicycleTracker : public Tracker bool measure( const autoware_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time, const geometry_msgs::msg::Transform & self_transform) override; - autoware_perception_msgs::msg::DetectedObject getUpdatingObject( - const autoware_perception_msgs::msg::DetectedObject & object, - const geometry_msgs::msg::Transform & self_transform); bool measureWithPose(const autoware_perception_msgs::msg::DetectedObject & object); bool measureWithShape(const autoware_perception_msgs::msg::DetectedObject & object); bool getTrackedObject( const rclcpp::Time & time, autoware_perception_msgs::msg::TrackedObject & object) const override; - virtual ~BicycleTracker() {} + +private: + autoware_perception_msgs::msg::DetectedObject getUpdatingObject( + const autoware_perception_msgs::msg::DetectedObject & object, + const geometry_msgs::msg::Transform & self_transform) const; }; #endif // MULTI_OBJECT_TRACKER__TRACKER__MODEL__BICYCLE_TRACKER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/big_vehicle_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/big_vehicle_tracker.hpp index 8eec03c5dc847..7112e6a08ade1 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/big_vehicle_tracker.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/big_vehicle_tracker.hpp @@ -19,10 +19,10 @@ #ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__BIG_VEHICLE_TRACKER_HPP_ #define MULTI_OBJECT_TRACKER__TRACKER__MODEL__BIG_VEHICLE_TRACKER_HPP_ +#include "kalman_filter/kalman_filter.hpp" #include "multi_object_tracker/tracker/model/tracker_base.hpp" #include "multi_object_tracker/tracker/motion_model/bicycle_motion_model.hpp" - -#include +#include "multi_object_tracker/tracker/object_model/object_model.hpp" class BigVehicleTracker : public Tracker { @@ -30,14 +30,8 @@ class BigVehicleTracker : public Tracker autoware_perception_msgs::msg::DetectedObject object_; rclcpp::Logger logger_; -private: - struct EkfParams - { - double r_cov_x; - double r_cov_y; - double r_cov_yaw; - double r_cov_vel; - } ekf_params_; + object_model::ObjectModel object_model_ = object_model::big_vehicle; + double velocity_deviation_threshold_; double z_; @@ -51,9 +45,7 @@ class BigVehicleTracker : public Tracker BoundingBox bounding_box_; Eigen::Vector2d tracking_offset_; -private: BicycleMotionModel motion_model_; - const char DIM = motion_model_.DIM; using IDX = BicycleMotionModel::IDX; public: @@ -66,15 +58,16 @@ class BigVehicleTracker : public Tracker bool measure( const autoware_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time, const geometry_msgs::msg::Transform & self_transform) override; - autoware_perception_msgs::msg::DetectedObject getUpdatingObject( - const autoware_perception_msgs::msg::DetectedObject & object, - const geometry_msgs::msg::Transform & self_transform); bool measureWithPose(const autoware_perception_msgs::msg::DetectedObject & object); bool measureWithShape(const autoware_perception_msgs::msg::DetectedObject & object); bool getTrackedObject( const rclcpp::Time & time, autoware_perception_msgs::msg::TrackedObject & object) const override; - virtual ~BigVehicleTracker() {} + +private: + autoware_perception_msgs::msg::DetectedObject getUpdatingObject( + const autoware_perception_msgs::msg::DetectedObject & object, + const geometry_msgs::msg::Transform & self_transform); }; #endif // MULTI_OBJECT_TRACKER__TRACKER__MODEL__BIG_VEHICLE_TRACKER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/multiple_vehicle_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/multiple_vehicle_tracker.hpp index b3d088da54721..75aec0b06d6b8 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/multiple_vehicle_tracker.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/multiple_vehicle_tracker.hpp @@ -19,11 +19,11 @@ #ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__MULTIPLE_VEHICLE_TRACKER_HPP_ #define MULTI_OBJECT_TRACKER__TRACKER__MODEL__MULTIPLE_VEHICLE_TRACKER_HPP_ +#include "kalman_filter/kalman_filter.hpp" #include "multi_object_tracker/tracker/model/big_vehicle_tracker.hpp" #include "multi_object_tracker/tracker/model/normal_vehicle_tracker.hpp" #include "multi_object_tracker/tracker/model/tracker_base.hpp" -#include #include class MultipleVehicleTracker : public Tracker diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/normal_vehicle_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/normal_vehicle_tracker.hpp index 107a3ef194afb..1165cecab258b 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/normal_vehicle_tracker.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/normal_vehicle_tracker.hpp @@ -19,10 +19,10 @@ #ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__NORMAL_VEHICLE_TRACKER_HPP_ #define MULTI_OBJECT_TRACKER__TRACKER__MODEL__NORMAL_VEHICLE_TRACKER_HPP_ +#include "kalman_filter/kalman_filter.hpp" #include "multi_object_tracker/tracker/model/tracker_base.hpp" #include "multi_object_tracker/tracker/motion_model/bicycle_motion_model.hpp" - -#include +#include "multi_object_tracker/tracker/object_model/object_model.hpp" class NormalVehicleTracker : public Tracker { @@ -30,14 +30,8 @@ class NormalVehicleTracker : public Tracker autoware_perception_msgs::msg::DetectedObject object_; rclcpp::Logger logger_; -private: - struct EkfParams - { - double r_cov_x; - double r_cov_y; - double r_cov_yaw; - double r_cov_vel; - } ekf_params_; + object_model::ObjectModel object_model_ = object_model::normal_vehicle; + double velocity_deviation_threshold_; double z_; @@ -51,9 +45,7 @@ class NormalVehicleTracker : public Tracker BoundingBox bounding_box_; Eigen::Vector2d tracking_offset_; -private: BicycleMotionModel motion_model_; - const char DIM = motion_model_.DIM; using IDX = BicycleMotionModel::IDX; public: @@ -66,15 +58,16 @@ class NormalVehicleTracker : public Tracker bool measure( const autoware_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time, const geometry_msgs::msg::Transform & self_transform) override; - autoware_perception_msgs::msg::DetectedObject getUpdatingObject( - const autoware_perception_msgs::msg::DetectedObject & object, - const geometry_msgs::msg::Transform & self_transform); bool measureWithPose(const autoware_perception_msgs::msg::DetectedObject & object); bool measureWithShape(const autoware_perception_msgs::msg::DetectedObject & object); bool getTrackedObject( const rclcpp::Time & time, autoware_perception_msgs::msg::TrackedObject & object) const override; - virtual ~NormalVehicleTracker() {} + +private: + autoware_perception_msgs::msg::DetectedObject getUpdatingObject( + const autoware_perception_msgs::msg::DetectedObject & object, + const geometry_msgs::msg::Transform & self_transform); }; #endif // MULTI_OBJECT_TRACKER__TRACKER__MODEL__NORMAL_VEHICLE_TRACKER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pass_through_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pass_through_tracker.hpp index 605b3ae462cec..cd661dab52c6e 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pass_through_tracker.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pass_through_tracker.hpp @@ -19,10 +19,9 @@ #ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__PASS_THROUGH_TRACKER_HPP_ #define MULTI_OBJECT_TRACKER__TRACKER__MODEL__PASS_THROUGH_TRACKER_HPP_ +#include "kalman_filter/kalman_filter.hpp" #include "tracker_base.hpp" -#include - class PassThroughTracker : public Tracker { private: @@ -43,7 +42,6 @@ class PassThroughTracker : public Tracker bool getTrackedObject( const rclcpp::Time & time, autoware_perception_msgs::msg::TrackedObject & object) const override; - virtual ~PassThroughTracker() {} }; #endif // MULTI_OBJECT_TRACKER__TRACKER__MODEL__PASS_THROUGH_TRACKER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pedestrian_and_bicycle_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pedestrian_and_bicycle_tracker.hpp index ad81c504d3719..3c3ac038b085e 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pedestrian_and_bicycle_tracker.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pedestrian_and_bicycle_tracker.hpp @@ -19,12 +19,11 @@ #ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__PEDESTRIAN_AND_BICYCLE_TRACKER_HPP_ #define MULTI_OBJECT_TRACKER__TRACKER__MODEL__PEDESTRIAN_AND_BICYCLE_TRACKER_HPP_ +#include "kalman_filter/kalman_filter.hpp" #include "multi_object_tracker/tracker/model/bicycle_tracker.hpp" #include "multi_object_tracker/tracker/model/pedestrian_tracker.hpp" #include "multi_object_tracker/tracker/model/tracker_base.hpp" -#include - class PedestrianAndBicycleTracker : public Tracker { private: diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pedestrian_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pedestrian_tracker.hpp index e5d718c4b15ee..c5be57f656eb5 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pedestrian_tracker.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pedestrian_tracker.hpp @@ -19,10 +19,10 @@ #ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__PEDESTRIAN_TRACKER_HPP_ #define MULTI_OBJECT_TRACKER__TRACKER__MODEL__PEDESTRIAN_TRACKER_HPP_ +#include "kalman_filter/kalman_filter.hpp" #include "multi_object_tracker/tracker/model/tracker_base.hpp" #include "multi_object_tracker/tracker/motion_model/ctrv_motion_model.hpp" - -#include +#include "multi_object_tracker/tracker/object_model/object_model.hpp" // cspell: ignore CTRV @@ -32,13 +32,7 @@ class PedestrianTracker : public Tracker autoware_perception_msgs::msg::DetectedObject object_; rclcpp::Logger logger_; -private: - struct EkfParams - { - double r_cov_x; - double r_cov_y; - double r_cov_yaw; - } ekf_params_; + object_model::ObjectModel object_model_ = object_model::pedestrian; double z_; @@ -56,9 +50,7 @@ class PedestrianTracker : public Tracker BoundingBox bounding_box_; Cylinder cylinder_; -private: CTRVMotionModel motion_model_; - const char DIM = motion_model_.DIM; using IDX = CTRVMotionModel::IDX; public: @@ -71,15 +63,16 @@ class PedestrianTracker : public Tracker bool measure( const autoware_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time, const geometry_msgs::msg::Transform & self_transform) override; - autoware_perception_msgs::msg::DetectedObject getUpdatingObject( - const autoware_perception_msgs::msg::DetectedObject & object, - const geometry_msgs::msg::Transform & self_transform); bool measureWithPose(const autoware_perception_msgs::msg::DetectedObject & object); bool measureWithShape(const autoware_perception_msgs::msg::DetectedObject & object); bool getTrackedObject( const rclcpp::Time & time, autoware_perception_msgs::msg::TrackedObject & object) const override; - virtual ~PedestrianTracker() {} + +private: + autoware_perception_msgs::msg::DetectedObject getUpdatingObject( + const autoware_perception_msgs::msg::DetectedObject & object, + const geometry_msgs::msg::Transform & self_transform) const; }; #endif // MULTI_OBJECT_TRACKER__TRACKER__MODEL__PEDESTRIAN_TRACKER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/tracker_base.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/tracker_base.hpp index 56670e9b54b7e..44b3884c392e6 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/tracker_base.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/tracker_base.hpp @@ -28,8 +28,8 @@ #include "autoware_perception_msgs/msg/detected_object.hpp" #include "autoware_perception_msgs/msg/tracked_object.hpp" -#include "geometry_msgs/msg/point.hpp" -#include "unique_identifier_msgs/msg/uuid.hpp" +#include +#include #include @@ -54,7 +54,7 @@ class Tracker const rclcpp::Time & time, const std::vector & classification, const size_t & channel_size); - virtual ~Tracker() {} + virtual ~Tracker() = default; void initializeExistenceProbabilities( const uint & channel_index, const float & existence_probability); diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/unknown_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/unknown_tracker.hpp index 43a1f2fd14842..ca8ecba160bd8 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/unknown_tracker.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/unknown_tracker.hpp @@ -19,18 +19,16 @@ #ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__UNKNOWN_TRACKER_HPP_ #define MULTI_OBJECT_TRACKER__TRACKER__MODEL__UNKNOWN_TRACKER_HPP_ +#include "kalman_filter/kalman_filter.hpp" #include "multi_object_tracker/tracker/model/tracker_base.hpp" #include "multi_object_tracker/tracker/motion_model/cv_motion_model.hpp" -#include - class UnknownTracker : public Tracker { private: autoware_perception_msgs::msg::DetectedObject object_; rclcpp::Logger logger_; -private: struct EkfParams { double r_cov_x; @@ -41,9 +39,7 @@ class UnknownTracker : public Tracker double z_; -private: CVMotionModel motion_model_; - const char DIM = motion_model_.DIM; using IDX = CVMotionModel::IDX; public: @@ -64,7 +60,6 @@ class UnknownTracker : public Tracker bool getTrackedObject( const rclcpp::Time & time, autoware_perception_msgs::msg::TrackedObject & object) const override; - virtual ~UnknownTracker() {} }; #endif // MULTI_OBJECT_TRACKER__TRACKER__MODEL__UNKNOWN_TRACKER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/bicycle_motion_model.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/bicycle_motion_model.hpp index 5127d0448835c..f6ce2842388c6 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/bicycle_motion_model.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/bicycle_motion_model.hpp @@ -19,10 +19,10 @@ #ifndef MULTI_OBJECT_TRACKER__TRACKER__MOTION_MODEL__BICYCLE_MOTION_MODEL_HPP_ #define MULTI_OBJECT_TRACKER__TRACKER__MOTION_MODEL__BICYCLE_MOTION_MODEL_HPP_ +#include "kalman_filter/kalman_filter.hpp" #include "multi_object_tracker/tracker/motion_model/motion_model_base.hpp" #include -#include #include #ifdef ROS_DISTRO_GALACTIC @@ -47,6 +47,8 @@ class BicycleMotionModel : public MotionModel { double q_stddev_acc_long; double q_stddev_acc_lat; + double q_cov_acc_long; + double q_cov_acc_lat; double q_stddev_yaw_rate_min; double q_stddev_yaw_rate_max; double q_cov_slip_rate_min; diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/ctrv_motion_model.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/ctrv_motion_model.hpp index 6b071eddec7a9..657048079c46c 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/ctrv_motion_model.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/ctrv_motion_model.hpp @@ -19,10 +19,10 @@ #ifndef MULTI_OBJECT_TRACKER__TRACKER__MOTION_MODEL__CTRV_MOTION_MODEL_HPP_ #define MULTI_OBJECT_TRACKER__TRACKER__MOTION_MODEL__CTRV_MOTION_MODEL_HPP_ +#include "kalman_filter/kalman_filter.hpp" #include "multi_object_tracker/tracker/motion_model/motion_model_base.hpp" #include -#include #include #ifdef ROS_DISTRO_GALACTIC diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/cv_motion_model.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/cv_motion_model.hpp index 59032706b00d6..421e413d7aab7 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/cv_motion_model.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/cv_motion_model.hpp @@ -19,10 +19,10 @@ #ifndef MULTI_OBJECT_TRACKER__TRACKER__MOTION_MODEL__CV_MOTION_MODEL_HPP_ #define MULTI_OBJECT_TRACKER__TRACKER__MOTION_MODEL__CV_MOTION_MODEL_HPP_ +#include "kalman_filter/kalman_filter.hpp" #include "multi_object_tracker/tracker/motion_model/motion_model_base.hpp" #include -#include #include #ifdef ROS_DISTRO_GALACTIC diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/motion_model_base.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/motion_model_base.hpp index 1aca602ed66a3..e740612e96d4f 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/motion_model_base.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/motion_model/motion_model_base.hpp @@ -19,8 +19,9 @@ #ifndef MULTI_OBJECT_TRACKER__TRACKER__MOTION_MODEL__MOTION_MODEL_BASE_HPP_ #define MULTI_OBJECT_TRACKER__TRACKER__MOTION_MODEL__MOTION_MODEL_BASE_HPP_ +#include "kalman_filter/kalman_filter.hpp" + #include -#include #include #ifdef ROS_DISTRO_GALACTIC diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/object_model/object_model.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/object_model/object_model.hpp new file mode 100644 index 0000000000000..6a771344bb36b --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/object_model/object_model.hpp @@ -0,0 +1,305 @@ +// Copyright 2024 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Taekjin Lee +// + +#ifndef MULTI_OBJECT_TRACKER__TRACKER__OBJECT_MODEL__OBJECT_MODEL_HPP_ +#define MULTI_OBJECT_TRACKER__TRACKER__OBJECT_MODEL__OBJECT_MODEL_HPP_ + +#include + +namespace +{ +constexpr double const_g = 9.81; + +template +constexpr T sq(const T x) +{ + return x * x; +} + +template +constexpr T deg2rad(const T deg) +{ + return deg * static_cast(M_PI) / static_cast(180.0); +} + +// cspell: ignore kmph +template +constexpr T kmph2mps(const T kmph) +{ + return kmph * static_cast(1000.0) / static_cast(3600.0); +} + +} // namespace + +namespace object_model +{ + +enum class ObjectModelType { NormalVehicle, BigVehicle, Bicycle, Pedestrian, Unknown }; +struct ObjectSize +{ + double length{0.0}; // [m] + double width{0.0}; // [m] + double height{0.0}; // [m] +}; +struct ObjectSizeLimit +{ + double length_min{0.0}; // [m] + double length_max{0.0}; // [m] + double width_min{0.0}; // [m] + double width_max{0.0}; // [m] + double height_min{0.0}; // [m] + double height_max{0.0}; // [m] +}; +struct MotionProcessNoise +{ + double vel_long{0.0}; // [m/s] uncertain longitudinal velocity + double vel_lat{0.0}; // [m/s] uncertain lateral velocity + double yaw_rate{0.0}; // [rad/s] uncertain yaw rate + double yaw_rate_min{0.0}; // [rad/s] uncertain yaw rate, minimum + double yaw_rate_max{0.0}; // [rad/s] uncertain yaw rate, maximum + double acc_long{0.0}; // [m/s^2] uncertain longitudinal acceleration + double acc_lat{0.0}; // [m/s^2] uncertain lateral acceleration + double acc_turn{0.0}; // [rad/s^2] uncertain rotation acceleration +}; +struct MotionProcessLimit +{ + double acc_long_max{0.0}; // [m/s^2] + double acc_lat_max{0.0}; // [m/s^2] + double vel_long_max{0.0}; // [m/s] + double vel_lat_max{0.0}; // [m/s] + double yaw_rate_max{0.0}; // [rad/s] +}; +struct StateCovariance +{ + double pos_x{0.0}; // [m^2] + double pos_y{0.0}; // [m^2] + double yaw{0.0}; // [rad^2] + double yaw_rate{0.0}; // [rad^2/s^2] + double vel_long{0.0}; // [m^2/s^2] + double vel_lat{0.0}; // [m^2/s^2] + double acc_long{0.0}; // [m^2/s^4] + double acc_lat{0.0}; // [m^2/s^4] +}; +struct BicycleModelState +{ + double init_slip_angle_cov{0.0}; // [rad^2/s^2] initial slip angle covariance + double slip_angle_max{0.0}; // [rad] max slip angle + double slip_rate_stddev_min{0.0}; // [rad/s] uncertain slip angle change rate, minimum + double slip_rate_stddev_max{0.0}; // [rad/s] uncertain slip angle change rate, maximum + double wheel_pos_ratio_front{0.0}; // [-] + double wheel_pos_ratio_rear{0.0}; // [-] + double wheel_pos_front_min{0.0}; // [m] + double wheel_pos_rear_min{0.0}; // [m] +}; + +class ObjectModel +{ +public: + ObjectSize init_size; + ObjectSizeLimit size_limit; + MotionProcessNoise process_noise; + MotionProcessLimit process_limit; + StateCovariance initial_covariance; + StateCovariance measurement_covariance; + BicycleModelState bicycle_state; + + explicit ObjectModel(const ObjectModelType & type) + { + switch (type) { + case ObjectModelType::NormalVehicle: + init_size.length = 3.0; + init_size.width = 2.0; + init_size.height = 1.8; + size_limit.length_min = 1.0; + size_limit.length_max = 20.0; + size_limit.width_min = 1.0; + size_limit.width_max = 5.0; + size_limit.height_min = 1.0; + size_limit.height_max = 5.0; + + process_noise.acc_long = const_g * 0.35; + process_noise.acc_lat = const_g * 0.15; + process_noise.yaw_rate_min = deg2rad(1.5); + process_noise.yaw_rate_max = deg2rad(15.0); + + process_limit.acc_long_max = const_g; + process_limit.acc_lat_max = const_g; + process_limit.vel_long_max = kmph2mps(140.0); + + // initial covariance + initial_covariance.pos_x = sq(1.0); + initial_covariance.pos_y = sq(0.3); + initial_covariance.yaw = sq(deg2rad(25.0)); + initial_covariance.vel_long = sq(kmph2mps(1000.0)); + initial_covariance.vel_lat = sq(0.2); + + // measurement noise model + measurement_covariance.pos_x = sq(0.5); + measurement_covariance.pos_y = sq(0.4); + measurement_covariance.yaw = sq(deg2rad(20.0)); + measurement_covariance.vel_long = sq(1.0); + + // bicycle motion model + bicycle_state.init_slip_angle_cov = sq(deg2rad(5.0)); + bicycle_state.slip_angle_max = deg2rad(30.0); + bicycle_state.slip_rate_stddev_min = deg2rad(0.3); + bicycle_state.slip_rate_stddev_max = deg2rad(10.0); + bicycle_state.wheel_pos_ratio_front = 0.3; + bicycle_state.wheel_pos_ratio_rear = 0.25; + bicycle_state.wheel_pos_front_min = 1.0; + bicycle_state.wheel_pos_rear_min = 1.0; + break; + + case ObjectModelType::BigVehicle: + init_size.length = 6.0; + init_size.width = 2.0; + init_size.height = 2.0; + size_limit.length_min = 1.0; + size_limit.length_max = 35.0; + size_limit.width_min = 1.0; + size_limit.width_max = 10.0; + size_limit.height_min = 1.0; + size_limit.height_max = 10.0; + + process_noise.acc_long = const_g * 0.35; + process_noise.acc_lat = const_g * 0.15; + process_noise.yaw_rate_min = deg2rad(1.5); + process_noise.yaw_rate_max = deg2rad(15.0); + + process_limit.acc_long_max = const_g; + process_limit.acc_lat_max = const_g; + process_limit.vel_long_max = kmph2mps(140.0); + + // initial covariance + initial_covariance.pos_x = sq(1.5); + initial_covariance.pos_y = sq(0.5); + initial_covariance.yaw = sq(deg2rad(25.0)); + initial_covariance.vel_long = sq(kmph2mps(1000.0)); + initial_covariance.vel_lat = sq(0.2); + + // measurement noise model + measurement_covariance.pos_x = sq(0.5); + measurement_covariance.pos_y = sq(0.4); + measurement_covariance.yaw = sq(deg2rad(20.0)); + measurement_covariance.vel_long = sq(kmph2mps(10.0)); + + // bicycle motion model + bicycle_state.init_slip_angle_cov = sq(deg2rad(5.0)); + bicycle_state.slip_angle_max = deg2rad(30.0); + bicycle_state.slip_rate_stddev_min = deg2rad(0.3); + bicycle_state.slip_rate_stddev_max = deg2rad(10.0); + bicycle_state.wheel_pos_ratio_front = 0.3; + bicycle_state.wheel_pos_ratio_rear = 0.25; + bicycle_state.wheel_pos_front_min = 1.5; + bicycle_state.wheel_pos_rear_min = 1.5; + break; + + case ObjectModelType::Bicycle: + init_size.length = 1.0; + init_size.width = 0.5; + init_size.height = 1.7; + size_limit.length_min = 0.5; + size_limit.length_max = 8.0; + size_limit.width_min = 0.5; + size_limit.width_max = 3.0; + size_limit.height_min = 0.5; + size_limit.height_max = 2.5; + + process_noise.acc_long = const_g * 0.35; + process_noise.acc_lat = const_g * 0.15; + process_noise.yaw_rate_min = deg2rad(5.0); + process_noise.yaw_rate_max = deg2rad(15.0); + + process_limit.acc_long_max = const_g; + process_limit.acc_lat_max = const_g; + process_limit.vel_long_max = kmph2mps(120.0); + + // initial covariance + initial_covariance.pos_x = sq(0.8); + initial_covariance.pos_y = sq(0.5); + initial_covariance.yaw = sq(deg2rad(25.0)); + initial_covariance.vel_long = sq(kmph2mps(1000.0)); + initial_covariance.vel_lat = sq(0.2); + + // measurement noise model + measurement_covariance.pos_x = sq(0.5); + measurement_covariance.pos_y = sq(0.4); + measurement_covariance.yaw = sq(deg2rad(30.0)); + measurement_covariance.vel_long = sq(kmph2mps(10.0)); + + // bicycle motion model + bicycle_state.init_slip_angle_cov = sq(deg2rad(5.0)); + bicycle_state.slip_angle_max = deg2rad(30.0); + bicycle_state.slip_rate_stddev_min = deg2rad(1.0); + bicycle_state.slip_rate_stddev_max = deg2rad(10.0); + bicycle_state.wheel_pos_ratio_front = 0.3; + bicycle_state.wheel_pos_ratio_rear = 0.3; + bicycle_state.wheel_pos_front_min = 0.3; + bicycle_state.wheel_pos_rear_min = 0.3; + break; + + case ObjectModelType::Pedestrian: + init_size.length = 0.5; + init_size.width = 0.5; + init_size.height = 1.7; + size_limit.length_min = 0.3; + size_limit.length_max = 2.0; + size_limit.width_min = 0.3; + size_limit.width_max = 1.0; + size_limit.height_min = 0.6; + size_limit.height_max = 2.0; + + process_noise.vel_long = 0.5; + process_noise.vel_lat = 0.5; + process_noise.yaw_rate = deg2rad(20.0); + process_noise.acc_long = const_g * 0.3; + process_noise.acc_turn = deg2rad(30.0); + + process_limit.vel_long_max = kmph2mps(100.0); + process_limit.yaw_rate_max = deg2rad(30.0); + + // initial covariance + initial_covariance.pos_x = sq(2.0); + initial_covariance.pos_y = sq(2.0); + initial_covariance.yaw = sq(deg2rad(1000.0)); + initial_covariance.vel_long = sq(kmph2mps(120.0)); + initial_covariance.yaw_rate = sq(deg2rad(360.0)); + + // measurement noise model + measurement_covariance.pos_x = sq(0.4); + measurement_covariance.pos_y = sq(0.4); + measurement_covariance.yaw = sq(deg2rad(30.0)); + + break; + + default: + break; + } + } + virtual ~ObjectModel() = default; +}; + +// create static objects by using ObjectModel class +static const ObjectModel normal_vehicle(ObjectModelType::NormalVehicle); +static const ObjectModel big_vehicle(ObjectModelType::BigVehicle); +static const ObjectModel bicycle(ObjectModelType::Bicycle); +static const ObjectModel pedestrian(ObjectModelType::Pedestrian); + +} // namespace object_model + +#endif // MULTI_OBJECT_TRACKER__TRACKER__OBJECT_MODEL__OBJECT_MODEL_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/utils/utils.hpp b/perception/multi_object_tracker/include/multi_object_tracker/utils/utils.hpp index 259a58342c369..0bda7870ae2b9 100644 --- a/perception/multi_object_tracker/include/multi_object_tracker/utils/utils.hpp +++ b/perception/multi_object_tracker/include/multi_object_tracker/utils/utils.hpp @@ -22,9 +22,9 @@ #include #include -#include -#include -#include +#include "autoware_perception_msgs/msg/detected_object.hpp" +#include "autoware_perception_msgs/msg/shape.hpp" +#include "autoware_perception_msgs/msg/tracked_object.hpp" #include #include #include @@ -38,45 +38,6 @@ namespace utils { -enum MSG_COV_IDX { - X_X = 0, - X_Y = 1, - X_Z = 2, - X_ROLL = 3, - X_PITCH = 4, - X_YAW = 5, - Y_X = 6, - Y_Y = 7, - Y_Z = 8, - Y_ROLL = 9, - Y_PITCH = 10, - Y_YAW = 11, - Z_X = 12, - Z_Y = 13, - Z_Z = 14, - Z_ROLL = 15, - Z_PITCH = 16, - Z_YAW = 17, - ROLL_X = 18, - ROLL_Y = 19, - ROLL_Z = 20, - ROLL_ROLL = 21, - ROLL_PITCH = 22, - ROLL_YAW = 23, - PITCH_X = 24, - PITCH_Y = 25, - PITCH_Z = 26, - PITCH_ROLL = 27, - PITCH_PITCH = 28, - PITCH_YAW = 29, - YAW_X = 30, - YAW_Y = 31, - YAW_Z = 32, - YAW_ROLL = 33, - YAW_PITCH = 34, - YAW_YAW = 35 -}; - enum BBOX_IDX { FRONT_SURFACE = 0, RIGHT_SURFACE = 1, @@ -132,7 +93,8 @@ inline int getNearestCornerOrSurface( // (left) | | (right) // -- // x- (rear) - int xgrid, ygrid; + int xgrid = 0; + int ygrid = 0; const int labels[3][3] = { {BBOX_IDX::FRONT_L_CORNER, BBOX_IDX::FRONT_SURFACE, BBOX_IDX::FRONT_R_CORNER}, {BBOX_IDX::LEFT_SURFACE, BBOX_IDX::INSIDE, BBOX_IDX::RIGHT_SURFACE}, @@ -155,34 +117,6 @@ inline int getNearestCornerOrSurface( return labels[xgrid][ygrid]; // 0 to 7 + 1(null) value } -/** - * @brief Get the Nearest Corner or Surface from detected object - * @param object: input object - * @param yaw: object yaw angle (after solved front and back uncertainty) - * @param self_transform - * @return nearest corner or surface index - */ -inline int getNearestCornerOrSurfaceFromObject( - const autoware_perception_msgs::msg::DetectedObject & object, const double & yaw, - const geometry_msgs::msg::Transform & self_transform) -{ - // only work for BBOX shape - if (object.shape.type != autoware_perception_msgs::msg::Shape::BOUNDING_BOX) { - return BBOX_IDX::INVALID; - } - - // extract necessary information from input object - double x, y, width, length; - x = object.kinematics.pose_with_covariance.pose.position.x; - y = object.kinematics.pose_with_covariance.pose.position.y; - // yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); //not use raw yaw - // now - width = object.shape.dimensions.y; - length = object.shape.dimensions.x; - - return getNearestCornerOrSurface(x, y, yaw, width, length, self_transform); -} - /** * @brief Calc bounding box center offset caused by shape change * @param dw: width update [m] = w_new - w_old @@ -225,30 +159,6 @@ inline Eigen::Vector2d calcOffsetVectorFromShapeChange( return offset; // do nothing if indx == INVALID or INSIDE } -/** - * @brief post-processing to recover bounding box center from tracking point and offset vector - * @param x: x of tracking point estimated with ekf - * @param y: y of tracking point estimated with ekf - * @param yaw: yaw of tracking point estimated with ekf - * @param dw: diff of width: w_estimated - w_input - * @param dl: diff of length: l_estimated - l_input - * @param indx: closest corner or surface index - * @param tracking_offset: tracking offset between bounding box center and tracking point - */ -inline Eigen::Vector2d recoverFromTrackingPoint( - const double x, const double y, const double yaw, const double dw, const double dl, - const int indx, const Eigen::Vector2d & tracking_offset) -{ - const Eigen::Vector2d tracking_point{x, y}; - const Eigen::Matrix2d R = Eigen::Rotation2Dd(yaw).toRotationMatrix(); - - const Eigen::Vector2d shape_change_offset = calcOffsetVectorFromShapeChange(dw, dl, indx); - - Eigen::Vector2d output_center = tracking_point - R * tracking_offset - R * shape_change_offset; - - return output_center; -} - /** * @brief Convert input object center to tracking point based on nearest corner information * 1. update anchor offset vector, 2. offset input bbox based on tracking_offset vector and @@ -274,9 +184,8 @@ inline void calcAnchorPointOffset( } // current object width and height - double w_n, l_n; - l_n = input_object.shape.dimensions.x; - w_n = input_object.shape.dimensions.y; + const double w_n = input_object.shape.dimensions.y; + const double l_n = input_object.shape.dimensions.x; // update offset const Eigen::Vector2d offset = calcOffsetVectorFromShapeChange(w_n - w, l_n - l, indx); @@ -304,27 +213,19 @@ inline bool convertConvexHullToBoundingBox( } // look for bounding box boundary - double max_x = 0; - double max_y = 0; - double min_x = 0; - double min_y = 0; - double max_z = 0; - for (size_t i = 0; i < input_object.shape.footprint.points.size(); ++i) { - const double foot_x = input_object.shape.footprint.points.at(i).x; - const double foot_y = input_object.shape.footprint.points.at(i).y; - const double foot_z = input_object.shape.footprint.points.at(i).z; - max_x = std::max(max_x, foot_x); - max_y = std::max(max_y, foot_y); - min_x = std::min(min_x, foot_x); - min_y = std::min(min_y, foot_y); - max_z = std::max(max_z, foot_z); + float max_x = 0; + float max_y = 0; + float min_x = 0; + float min_y = 0; + float max_z = 0; + for (const auto & point : input_object.shape.footprint.points) { + max_x = std::max(max_x, point.x); + max_y = std::max(max_y, point.y); + min_x = std::min(min_x, point.x); + min_y = std::min(min_y, point.y); + max_z = std::max(max_z, point.z); } - // calc bounding box state - const double length = max_x - min_x; - const double width = max_y - min_y; - const double height = max_z; - // calc new center const Eigen::Vector2d center{ input_object.kinematics.pose_with_covariance.pose.position.x, @@ -340,9 +241,9 @@ inline bool convertConvexHullToBoundingBox( output_object.kinematics.pose_with_covariance.pose.position.y = new_center.y(); output_object.shape.type = autoware_perception_msgs::msg::Shape::BOUNDING_BOX; - output_object.shape.dimensions.x = length; - output_object.shape.dimensions.y = width; - output_object.shape.dimensions.z = height; + output_object.shape.dimensions.x = max_x - min_x; + output_object.shape.dimensions.y = max_y - min_y; + output_object.shape.dimensions.z = max_z; return true; } diff --git a/perception/multi_object_tracker/src/debugger/debugger.cpp b/perception/multi_object_tracker/src/debugger/debugger.cpp index a3ece17eab112..e692ae76468e4 100644 --- a/perception/multi_object_tracker/src/debugger/debugger.cpp +++ b/perception/multi_object_tracker/src/debugger/debugger.cpp @@ -98,7 +98,7 @@ void TrackerDebugger::checkDelay(diagnostic_updater::DiagnosticStatusWrapper & s stat.summary(diagnostic_msgs::msg::DiagnosticStatus::ERROR, "Measurement time is not set."); return; } - const double & delay = pipeline_latency_ms_; // [s] + const double & delay = pipeline_latency_ms_ / 1e3; // [s] if (delay == 0.0) { stat.summary(diagnostic_msgs::msg::DiagnosticStatus::OK, "Detection delay is not calculated."); diff --git a/perception/multi_object_tracker/src/multi_object_tracker_core.cpp b/perception/multi_object_tracker/src/multi_object_tracker_core.cpp index 54255e706e35a..97c3d93d191b2 100644 --- a/perception/multi_object_tracker/src/multi_object_tracker_core.cpp +++ b/perception/multi_object_tracker/src/multi_object_tracker_core.cpp @@ -13,6 +13,15 @@ // limitations under the License. // // +#define EIGEN_MPL2_ONLY + +#include "multi_object_tracker/multi_object_tracker_core.hpp" + +#include "multi_object_tracker/utils/utils.hpp" + +#include +#include +#include #include @@ -28,14 +37,6 @@ #include #include -#define EIGEN_MPL2_ONLY -#include "multi_object_tracker/multi_object_tracker_core.hpp" -#include "multi_object_tracker/utils/utils.hpp" - -#include -#include -#include - namespace { // Function to get the transform between two frames @@ -207,7 +208,7 @@ void MultiObjectTracker::onTrigger() { const rclcpp::Time current_time = this->now(); // get objects from the input manager and run process - std::vector> objects_list; + ObjectsList objects_list; const bool is_objects_ready = input_manager_->getObjects(current_time, objects_list); if (!is_objects_ready) return; diff --git a/perception/multi_object_tracker/src/processor/processor.cpp b/perception/multi_object_tracker/src/processor/processor.cpp index ff14f7849b161..50d1a021c5838 100644 --- a/perception/multi_object_tracker/src/processor/processor.cpp +++ b/perception/multi_object_tracker/src/processor/processor.cpp @@ -17,7 +17,7 @@ #include "multi_object_tracker/tracker/tracker.hpp" #include "object_recognition_utils/object_recognition_utils.hpp" -#include +#include "autoware_perception_msgs/msg/tracked_objects.hpp" #include diff --git a/perception/multi_object_tracker/src/tracker/model/bicycle_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/bicycle_tracker.cpp index 05e06107c2247..a438c830596d7 100644 --- a/perception/multi_object_tracker/src/tracker/model/bicycle_tracker.cpp +++ b/perception/multi_object_tracker/src/tracker/model/bicycle_tracker.cpp @@ -15,14 +15,19 @@ // // Author: v1.0 Yukihiro Saito // +#define EIGEN_MPL2_ONLY #include "multi_object_tracker/tracker/model/bicycle_tracker.hpp" +#include "autoware/universe_utils/geometry/boost_polygon_utils.hpp" +#include "autoware/universe_utils/math/normalization.hpp" +#include "autoware/universe_utils/math/unit_conversion.hpp" +#include "autoware/universe_utils/ros/msg_covariance.hpp" #include "multi_object_tracker/utils/utils.hpp" +#include "object_recognition_utils/object_recognition_utils.hpp" -#include -#include -#include +#include +#include #include #include @@ -34,11 +39,6 @@ #else #include #endif -#include "object_recognition_utils/object_recognition_utils.hpp" - -#define EIGEN_MPL2_ONLY -#include -#include using Label = autoware_perception_msgs::msg::ObjectClassification; @@ -55,45 +55,36 @@ BicycleTracker::BicycleTracker( // initialize existence probability initializeExistenceProbabilities(channel_index, object.existence_probability); - // Initialize parameters - // measurement noise covariance: detector uncertainty + ego vehicle motion uncertainty - double r_stddev_x = 0.5; // in vehicle coordinate [m] - double r_stddev_y = 0.4; // in vehicle coordinate [m] - double r_stddev_yaw = autoware::universe_utils::deg2rad(30); // in map coordinate [rad] - ekf_params_.r_cov_x = std::pow(r_stddev_x, 2.0); - ekf_params_.r_cov_y = std::pow(r_stddev_y, 2.0); - ekf_params_.r_cov_yaw = std::pow(r_stddev_yaw, 2.0); - // OBJECT SHAPE MODEL if (object.shape.type == autoware_perception_msgs::msg::Shape::BOUNDING_BOX) { bounding_box_ = { object.shape.dimensions.x, object.shape.dimensions.y, object.shape.dimensions.z}; } else { - bounding_box_ = {1.0, 0.5, 1.7}; + bounding_box_ = { + object_model_.init_size.length, object_model_.init_size.width, + object_model_.init_size.height}; // default value } // set maximum and minimum size - constexpr double max_size = 5.0; - constexpr double min_size = 0.3; - bounding_box_.length = std::min(std::max(bounding_box_.length, min_size), max_size); - bounding_box_.width = std::min(std::max(bounding_box_.width, min_size), max_size); - bounding_box_.height = std::min(std::max(bounding_box_.height, min_size), max_size); + bounding_box_.length = std::clamp( + bounding_box_.length, object_model_.size_limit.length_min, object_model_.size_limit.length_max); + bounding_box_.width = std::clamp( + bounding_box_.width, object_model_.size_limit.width_min, object_model_.size_limit.width_max); + bounding_box_.height = std::clamp( + bounding_box_.height, object_model_.size_limit.height_min, object_model_.size_limit.height_max); // Set motion model parameters { - constexpr double q_stddev_acc_long = - 9.8 * 0.35; // [m/(s*s)] uncertain longitudinal acceleration - constexpr double q_stddev_acc_lat = 9.8 * 0.15; // [m/(s*s)] uncertain lateral acceleration - constexpr double q_stddev_yaw_rate_min = 5.0; // [deg/s] uncertain yaw change rate, minimum - constexpr double q_stddev_yaw_rate_max = 15.0; // [deg/s] uncertain yaw change rate, maximum - constexpr double q_stddev_slip_rate_min = - 1.0; // [deg/s] uncertain slip angle change rate, minimum - constexpr double q_stddev_slip_rate_max = - 10.0; // [deg/s] uncertain slip angle change rate, maximum - constexpr double q_max_slip_angle = 30; // [deg] max slip angle - constexpr double lf_ratio = 0.3; // [-] ratio of front wheel position - constexpr double lf_min = 0.3; // [m] minimum front wheel position - constexpr double lr_ratio = 0.3; // [-] ratio of rear wheel position - constexpr double lr_min = 0.3; // [m] minimum rear wheel position + const double q_stddev_acc_long = object_model_.process_noise.acc_long; + const double q_stddev_acc_lat = object_model_.process_noise.acc_lat; + const double q_stddev_yaw_rate_min = object_model_.process_noise.yaw_rate_min; + const double q_stddev_yaw_rate_max = object_model_.process_noise.yaw_rate_max; + const double q_stddev_slip_rate_min = object_model_.bicycle_state.slip_rate_stddev_min; + const double q_stddev_slip_rate_max = object_model_.bicycle_state.slip_rate_stddev_max; + const double q_max_slip_angle = object_model_.bicycle_state.slip_angle_max; + const double lf_ratio = object_model_.bicycle_state.wheel_pos_ratio_front; + const double lf_min = object_model_.bicycle_state.wheel_pos_front_min; + const double lr_ratio = object_model_.bicycle_state.wheel_pos_ratio_rear; + const double lr_min = object_model_.bicycle_state.wheel_pos_rear_min; motion_model_.setMotionParams( q_stddev_acc_long, q_stddev_acc_lat, q_stddev_yaw_rate_min, q_stddev_yaw_rate_max, q_stddev_slip_rate_min, q_stddev_slip_rate_max, q_max_slip_angle, lf_ratio, lf_min, lr_ratio, @@ -102,59 +93,47 @@ BicycleTracker::BicycleTracker( // Set motion limits { - constexpr double max_vel = autoware::universe_utils::kmph2mps(80); // [m/s] maximum velocity - constexpr double max_slip = 30; // [deg] maximum slip angle + const double max_vel = object_model_.process_limit.vel_long_max; + const double max_slip = object_model_.bicycle_state.slip_angle_max; motion_model_.setMotionLimits(max_vel, max_slip); // maximum velocity and slip angle } // Set initial state { + using autoware::universe_utils::xyzrpy_covariance_index::XYZRPY_COV_IDX; const double x = object.kinematics.pose_with_covariance.pose.position.x; const double y = object.kinematics.pose_with_covariance.pose.position.y; const double yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); - auto pose_cov = object.kinematics.pose_with_covariance.covariance; - double vel = 0.0; - double vel_cov; - const double & length = bounding_box_.length; - - if (object.kinematics.has_twist) { - vel = object.kinematics.twist_with_covariance.twist.linear.x; - } + auto pose_cov = object.kinematics.pose_with_covariance.covariance; if (!object.kinematics.has_position_covariance) { // initial state covariance - constexpr double p0_stddev_x = 0.8; // in object coordinate [m] - constexpr double p0_stddev_y = 0.5; // in object coordinate [m] - constexpr double p0_stddev_yaw = - autoware::universe_utils::deg2rad(25); // in map coordinate [rad] - constexpr double p0_cov_x = p0_stddev_x * p0_stddev_x; - constexpr double p0_cov_y = p0_stddev_y * p0_stddev_y; - constexpr double p0_cov_yaw = p0_stddev_yaw * p0_stddev_yaw; + const auto & p0_cov_x = object_model_.initial_covariance.pos_x; + const auto & p0_cov_y = object_model_.initial_covariance.pos_y; + const auto & p0_cov_yaw = object_model_.initial_covariance.yaw; const double cos_yaw = std::cos(yaw); const double sin_yaw = std::sin(yaw); const double sin_2yaw = std::sin(2.0 * yaw); - pose_cov[utils::MSG_COV_IDX::X_X] = - p0_cov_x * cos_yaw * cos_yaw + p0_cov_y * sin_yaw * sin_yaw; - pose_cov[utils::MSG_COV_IDX::X_Y] = 0.5 * (p0_cov_x - p0_cov_y) * sin_2yaw; - pose_cov[utils::MSG_COV_IDX::Y_Y] = - p0_cov_x * sin_yaw * sin_yaw + p0_cov_y * cos_yaw * cos_yaw; - pose_cov[utils::MSG_COV_IDX::Y_X] = pose_cov[utils::MSG_COV_IDX::X_Y]; - pose_cov[utils::MSG_COV_IDX::YAW_YAW] = p0_cov_yaw; + pose_cov[XYZRPY_COV_IDX::X_X] = p0_cov_x * cos_yaw * cos_yaw + p0_cov_y * sin_yaw * sin_yaw; + pose_cov[XYZRPY_COV_IDX::X_Y] = 0.5 * (p0_cov_x - p0_cov_y) * sin_2yaw; + pose_cov[XYZRPY_COV_IDX::Y_Y] = p0_cov_x * sin_yaw * sin_yaw + p0_cov_y * cos_yaw * cos_yaw; + pose_cov[XYZRPY_COV_IDX::Y_X] = pose_cov[XYZRPY_COV_IDX::X_Y]; + pose_cov[XYZRPY_COV_IDX::YAW_YAW] = p0_cov_yaw; } - if (!object.kinematics.has_twist_covariance) { - constexpr double p0_stddev_vel = - autoware::universe_utils::kmph2mps(1000); // in object coordinate [m/s] - vel_cov = std::pow(p0_stddev_vel, 2.0); - } else { - vel_cov = object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + double vel = 0.0; + double vel_cov = object_model_.initial_covariance.vel_long; + if (object.kinematics.has_twist) { + vel = object.kinematics.twist_with_covariance.twist.linear.x; + } + if (object.kinematics.has_twist_covariance) { + vel_cov = object.kinematics.twist_with_covariance.covariance[XYZRPY_COV_IDX::X_X]; } const double slip = 0.0; - const double p0_stddev_slip = - autoware::universe_utils::deg2rad(5); // in object coordinate [rad/s] - const double slip_cov = std::pow(p0_stddev_slip, 2.0); + const double slip_cov = object_model_.bicycle_state.init_slip_angle_cov; + const double & length = bounding_box_.length; // initialize motion model motion_model_.initialize(time, x, y, yaw, pose_cov, vel, vel_cov, slip, slip_cov, length); @@ -168,9 +147,9 @@ bool BicycleTracker::predict(const rclcpp::Time & time) autoware_perception_msgs::msg::DetectedObject BicycleTracker::getUpdatingObject( const autoware_perception_msgs::msg::DetectedObject & object, - const geometry_msgs::msg::Transform & /*self_transform*/) + const geometry_msgs::msg::Transform & /*self_transform*/) const { - autoware_perception_msgs::msg::DetectedObject updating_object; + autoware_perception_msgs::msg::DetectedObject updating_object = object; // OBJECT SHAPE MODEL // convert to bounding box if input is convex shape @@ -178,19 +157,42 @@ autoware_perception_msgs::msg::DetectedObject BicycleTracker::getUpdatingObject( if (!utils::convertConvexHullToBoundingBox(object, updating_object)) { updating_object = object; } - } else { - updating_object = object; } // UNCERTAINTY MODEL if (!object.kinematics.has_position_covariance) { + // measurement noise covariance + auto r_cov_x = object_model_.measurement_covariance.pos_x; + auto r_cov_y = object_model_.measurement_covariance.pos_y; + + // yaw angle fix + const double pose_yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); + const bool is_yaw_available = + object.kinematics.orientation_availability != + autoware_perception_msgs::msg::DetectedObjectKinematics::UNAVAILABLE; + // fill covariance matrix + using autoware::universe_utils::xyzrpy_covariance_index::XYZRPY_COV_IDX; auto & pose_cov = updating_object.kinematics.pose_with_covariance.covariance; - pose_cov[utils::MSG_COV_IDX::X_X] = ekf_params_.r_cov_x; // x - x - pose_cov[utils::MSG_COV_IDX::X_Y] = 0; // x - y - pose_cov[utils::MSG_COV_IDX::Y_X] = 0; // y - x - pose_cov[utils::MSG_COV_IDX::Y_Y] = ekf_params_.r_cov_y; // y - y - pose_cov[utils::MSG_COV_IDX::YAW_YAW] = ekf_params_.r_cov_yaw; // yaw - yaw + const double cos_yaw = std::cos(pose_yaw); + const double sin_yaw = std::sin(pose_yaw); + const double sin_2yaw = std::sin(2.0 * pose_yaw); + pose_cov[XYZRPY_COV_IDX::X_X] = + r_cov_x * cos_yaw * cos_yaw + r_cov_y * sin_yaw * sin_yaw; // x - x + pose_cov[XYZRPY_COV_IDX::X_Y] = 0.5 * (r_cov_x - r_cov_y) * sin_2yaw; // x - y + pose_cov[XYZRPY_COV_IDX::Y_Y] = + r_cov_x * sin_yaw * sin_yaw + r_cov_y * cos_yaw * cos_yaw; // y - y + pose_cov[XYZRPY_COV_IDX::Y_X] = pose_cov[XYZRPY_COV_IDX::X_Y]; // y - x + pose_cov[XYZRPY_COV_IDX::X_YAW] = 0.0; // x - yaw + pose_cov[XYZRPY_COV_IDX::Y_YAW] = 0.0; // y - yaw + pose_cov[XYZRPY_COV_IDX::YAW_X] = 0.0; // yaw - x + pose_cov[XYZRPY_COV_IDX::YAW_Y] = 0.0; // yaw - y + pose_cov[XYZRPY_COV_IDX::YAW_YAW] = object_model_.measurement_covariance.yaw; // yaw - yaw + if (!is_yaw_available) { + pose_cov[XYZRPY_COV_IDX::YAW_YAW] *= 1e3; // yaw is not available, multiply large value + } + auto & twist_cov = updating_object.kinematics.twist_with_covariance.covariance; + twist_cov[XYZRPY_COV_IDX::X_X] = object_model_.measurement_covariance.vel_long; // vel - vel } return updating_object; @@ -229,8 +231,7 @@ bool BicycleTracker::measureWithPose(const autoware_perception_msgs::msg::Detect bool BicycleTracker::measureWithShape(const autoware_perception_msgs::msg::DetectedObject & object) { - autoware_perception_msgs::msg::DetectedObject bbox_object; - if (!object.shape.type == autoware_perception_msgs::msg::Shape::BOUNDING_BOX) { + if (object.shape.type != autoware_perception_msgs::msg::Shape::BOUNDING_BOX) { // do not update shape if the input is not a bounding box return false; } @@ -239,28 +240,29 @@ bool BicycleTracker::measureWithShape(const autoware_perception_msgs::msg::Detec constexpr double size_max = 30.0; // [m] constexpr double size_min = 0.1; // [m] if ( - bbox_object.shape.dimensions.x > size_max || bbox_object.shape.dimensions.y > size_max || - bbox_object.shape.dimensions.z > size_max) { + object.shape.dimensions.x > size_max || object.shape.dimensions.y > size_max || + object.shape.dimensions.z > size_max) { return false; } else if ( - bbox_object.shape.dimensions.x < size_min || bbox_object.shape.dimensions.y < size_min || - bbox_object.shape.dimensions.z < size_min) { + object.shape.dimensions.x < size_min || object.shape.dimensions.y < size_min || + object.shape.dimensions.z < size_min) { return false; } // update object size constexpr double gain = 0.1; constexpr double gain_inv = 1.0 - gain; - bounding_box_.length = gain_inv * bounding_box_.length + gain * bbox_object.shape.dimensions.x; - bounding_box_.width = gain_inv * bounding_box_.width + gain * bbox_object.shape.dimensions.y; - bounding_box_.height = gain_inv * bounding_box_.height + gain * bbox_object.shape.dimensions.z; + bounding_box_.length = gain_inv * bounding_box_.length + gain * object.shape.dimensions.x; + bounding_box_.width = gain_inv * bounding_box_.width + gain * object.shape.dimensions.y; + bounding_box_.height = gain_inv * bounding_box_.height + gain * object.shape.dimensions.z; // set maximum and minimum size - constexpr double max_size = 5.0; - constexpr double min_size = 0.3; - bounding_box_.length = std::min(std::max(bounding_box_.length, min_size), max_size); - bounding_box_.width = std::min(std::max(bounding_box_.width, min_size), max_size); - bounding_box_.height = std::min(std::max(bounding_box_.height, min_size), max_size); + bounding_box_.length = std::clamp( + bounding_box_.length, object_model_.size_limit.length_min, object_model_.size_limit.length_max); + bounding_box_.width = std::clamp( + bounding_box_.width, object_model_.size_limit.width_min, object_model_.size_limit.width_max); + bounding_box_.height = std::clamp( + bounding_box_.height, object_model_.size_limit.height_min, object_model_.size_limit.height_max); // update motion model motion_model_.updateExtendedState(bounding_box_.length); diff --git a/perception/multi_object_tracker/src/tracker/model/big_vehicle_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/big_vehicle_tracker.cpp index 4414d8a21ca01..90d33e65b46bd 100644 --- a/perception/multi_object_tracker/src/tracker/model/big_vehicle_tracker.cpp +++ b/perception/multi_object_tracker/src/tracker/model/big_vehicle_tracker.cpp @@ -15,14 +15,19 @@ // // Author: v1.0 Yukihiro Saito // +#define EIGEN_MPL2_ONLY #include "multi_object_tracker/tracker/model/big_vehicle_tracker.hpp" +#include "autoware/universe_utils/geometry/boost_polygon_utils.hpp" +#include "autoware/universe_utils/math/normalization.hpp" +#include "autoware/universe_utils/math/unit_conversion.hpp" +#include "autoware/universe_utils/ros/msg_covariance.hpp" #include "multi_object_tracker/utils/utils.hpp" +#include "object_recognition_utils/object_recognition_utils.hpp" -#include -#include -#include +#include +#include #include #include @@ -34,11 +39,6 @@ #else #include #endif -#include "object_recognition_utils/object_recognition_utils.hpp" - -#define EIGEN_MPL2_ONLY -#include -#include using Label = autoware_perception_msgs::msg::ObjectClassification; @@ -56,17 +56,6 @@ BigVehicleTracker::BigVehicleTracker( // initialize existence probability initializeExistenceProbabilities(channel_index, object.existence_probability); - // Initialize parameters - // measurement noise covariance: detector uncertainty + ego vehicle motion uncertainty - float r_stddev_x = 0.5; // in vehicle coordinate [m] - float r_stddev_y = 0.4; // in vehicle coordinate [m] - float r_stddev_yaw = autoware::universe_utils::deg2rad(20); // in map coordinate [rad] - float r_stddev_vel = 1.0; // in object coordinate [m/s] - ekf_params_.r_cov_x = std::pow(r_stddev_x, 2.0); - ekf_params_.r_cov_y = std::pow(r_stddev_y, 2.0); - ekf_params_.r_cov_yaw = std::pow(r_stddev_yaw, 2.0); - ekf_params_.r_cov_vel = std::pow(r_stddev_vel, 2.0); - // velocity deviation threshold // if the predicted velocity is close to the observed velocity, // the observed velocity is used as the measurement. @@ -82,7 +71,9 @@ BigVehicleTracker::BigVehicleTracker( RCLCPP_WARN( logger_, "BigVehicleTracker::BigVehicleTracker: Failed to convert convex hull to bounding box."); - bounding_box_ = {6.0, 2.0, 2.0}; // default value + bounding_box_ = { + object_model_.init_size.length, object_model_.init_size.width, + object_model_.init_size.height}; // default value } else { bounding_box_ = { bbox_object.shape.dimensions.x, bbox_object.shape.dimensions.y, @@ -90,28 +81,26 @@ BigVehicleTracker::BigVehicleTracker( } } // set maximum and minimum size - constexpr double max_size = 30.0; - constexpr double min_size = 1.0; - bounding_box_.length = std::min(std::max(bounding_box_.length, min_size), max_size); - bounding_box_.width = std::min(std::max(bounding_box_.width, min_size), max_size); - bounding_box_.height = std::min(std::max(bounding_box_.height, min_size), max_size); + bounding_box_.length = std::clamp( + bounding_box_.length, object_model_.size_limit.length_min, object_model_.size_limit.length_max); + bounding_box_.width = std::clamp( + bounding_box_.width, object_model_.size_limit.width_min, object_model_.size_limit.width_max); + bounding_box_.height = std::clamp( + bounding_box_.height, object_model_.size_limit.height_min, object_model_.size_limit.height_max); // Set motion model parameters { - constexpr double q_stddev_acc_long = - 9.8 * 0.35; // [m/(s*s)] uncertain longitudinal acceleration - constexpr double q_stddev_acc_lat = 9.8 * 0.15; // [m/(s*s)] uncertain lateral acceleration - constexpr double q_stddev_yaw_rate_min = 1.5; // [deg/s] uncertain yaw change rate, minimum - constexpr double q_stddev_yaw_rate_max = 15.0; // [deg/s] uncertain yaw change rate, maximum - constexpr double q_stddev_slip_rate_min = - 0.3; // [deg/s] uncertain slip angle change rate, minimum - constexpr double q_stddev_slip_rate_max = - 10.0; // [deg/s] uncertain slip angle change rate, maximum - constexpr double q_max_slip_angle = 30; // [deg] max slip angle - constexpr double lf_ratio = 0.3; // [-] ratio of front wheel position - constexpr double lf_min = 1.5; // [m] minimum front wheel position - constexpr double lr_ratio = 0.25; // [-] ratio of rear wheel position - constexpr double lr_min = 1.5; // [m] minimum rear wheel position + const double q_stddev_acc_long = object_model_.process_noise.acc_long; + const double q_stddev_acc_lat = object_model_.process_noise.acc_lat; + const double q_stddev_yaw_rate_min = object_model_.process_noise.yaw_rate_min; + const double q_stddev_yaw_rate_max = object_model_.process_noise.yaw_rate_max; + const double q_stddev_slip_rate_min = object_model_.bicycle_state.slip_rate_stddev_min; + const double q_stddev_slip_rate_max = object_model_.bicycle_state.slip_rate_stddev_max; + const double q_max_slip_angle = object_model_.bicycle_state.slip_angle_max; + const double lf_ratio = object_model_.bicycle_state.wheel_pos_ratio_front; + const double lf_min = object_model_.bicycle_state.wheel_pos_front_min; + const double lr_ratio = object_model_.bicycle_state.wheel_pos_ratio_rear; + const double lr_min = object_model_.bicycle_state.wheel_pos_rear_min; motion_model_.setMotionParams( q_stddev_acc_long, q_stddev_acc_lat, q_stddev_yaw_rate_min, q_stddev_yaw_rate_max, q_stddev_slip_rate_min, q_stddev_slip_rate_max, q_max_slip_angle, lf_ratio, lf_min, lr_ratio, @@ -120,59 +109,47 @@ BigVehicleTracker::BigVehicleTracker( // Set motion limits { - constexpr double max_vel = autoware::universe_utils::kmph2mps(100); // [m/s] maximum velocity - constexpr double max_slip = 30; // [deg] maximum slip angle + const double max_vel = object_model_.process_limit.vel_long_max; + const double max_slip = object_model_.bicycle_state.slip_angle_max; motion_model_.setMotionLimits(max_vel, max_slip); // maximum velocity and slip angle } // Set initial state { + using autoware::universe_utils::xyzrpy_covariance_index::XYZRPY_COV_IDX; const double x = object.kinematics.pose_with_covariance.pose.position.x; const double y = object.kinematics.pose_with_covariance.pose.position.y; const double yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); - auto pose_cov = object.kinematics.pose_with_covariance.covariance; - double vel = 0.0; - double vel_cov; - const double & length = bounding_box_.length; - - if (object.kinematics.has_twist) { - vel = object.kinematics.twist_with_covariance.twist.linear.x; - } + auto pose_cov = object.kinematics.pose_with_covariance.covariance; if (!object.kinematics.has_position_covariance) { // initial state covariance - constexpr double p0_stddev_x = 1.5; // in object coordinate [m] - constexpr double p0_stddev_y = 0.5; // in object coordinate [m] - constexpr double p0_stddev_yaw = - autoware::universe_utils::deg2rad(25); // in map coordinate [rad] - constexpr double p0_cov_x = p0_stddev_x * p0_stddev_x; - constexpr double p0_cov_y = p0_stddev_y * p0_stddev_y; - constexpr double p0_cov_yaw = p0_stddev_yaw * p0_stddev_yaw; + const auto & p0_cov_x = object_model_.initial_covariance.pos_x; + const auto & p0_cov_y = object_model_.initial_covariance.pos_y; + const auto & p0_cov_yaw = object_model_.initial_covariance.yaw; const double cos_yaw = std::cos(yaw); const double sin_yaw = std::sin(yaw); const double sin_2yaw = std::sin(2.0 * yaw); - pose_cov[utils::MSG_COV_IDX::X_X] = - p0_cov_x * cos_yaw * cos_yaw + p0_cov_y * sin_yaw * sin_yaw; - pose_cov[utils::MSG_COV_IDX::X_Y] = 0.5 * (p0_cov_x - p0_cov_y) * sin_2yaw; - pose_cov[utils::MSG_COV_IDX::Y_Y] = - p0_cov_x * sin_yaw * sin_yaw + p0_cov_y * cos_yaw * cos_yaw; - pose_cov[utils::MSG_COV_IDX::Y_X] = pose_cov[utils::MSG_COV_IDX::X_Y]; - pose_cov[utils::MSG_COV_IDX::YAW_YAW] = p0_cov_yaw; + pose_cov[XYZRPY_COV_IDX::X_X] = p0_cov_x * cos_yaw * cos_yaw + p0_cov_y * sin_yaw * sin_yaw; + pose_cov[XYZRPY_COV_IDX::X_Y] = 0.5 * (p0_cov_x - p0_cov_y) * sin_2yaw; + pose_cov[XYZRPY_COV_IDX::Y_Y] = p0_cov_x * sin_yaw * sin_yaw + p0_cov_y * cos_yaw * cos_yaw; + pose_cov[XYZRPY_COV_IDX::Y_X] = pose_cov[XYZRPY_COV_IDX::X_Y]; + pose_cov[XYZRPY_COV_IDX::YAW_YAW] = p0_cov_yaw; } - if (!object.kinematics.has_twist_covariance) { - constexpr double p0_stddev_vel = - autoware::universe_utils::kmph2mps(1000); // in object coordinate [m/s] - vel_cov = std::pow(p0_stddev_vel, 2.0); - } else { - vel_cov = object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + double vel = 0.0; + double vel_cov = object_model_.initial_covariance.vel_long; + if (object.kinematics.has_twist) { + vel = object.kinematics.twist_with_covariance.twist.linear.x; + } + if (object.kinematics.has_twist_covariance) { + vel_cov = object.kinematics.twist_with_covariance.covariance[XYZRPY_COV_IDX::X_X]; } const double slip = 0.0; - const double p0_stddev_slip = - autoware::universe_utils::deg2rad(5); // in object coordinate [rad/s] - const double slip_cov = std::pow(p0_stddev_slip, 2.0); + const double slip_cov = object_model_.bicycle_state.init_slip_angle_cov; + const double & length = bounding_box_.length; // initialize motion model motion_model_.initialize(time, x, y, yaw, pose_cov, vel, vel_cov, slip, slip_cov, length); @@ -190,14 +167,9 @@ autoware_perception_msgs::msg::DetectedObject BigVehicleTracker::getUpdatingObje { autoware_perception_msgs::msg::DetectedObject updating_object = object; - // current (predicted) state - const double tracked_x = motion_model_.getStateElement(IDX::X); - const double tracked_y = motion_model_.getStateElement(IDX::Y); - const double tracked_yaw = motion_model_.getStateElement(IDX::YAW); - // OBJECT SHAPE MODEL // convert to bounding box if input is convex shape - autoware_perception_msgs::msg::DetectedObject bbox_object; + autoware_perception_msgs::msg::DetectedObject bbox_object = object; if (object.shape.type != autoware_perception_msgs::msg::Shape::BOUNDING_BOX) { if (!utils::convertConvexHullToBoundingBox(object, bbox_object)) { RCLCPP_WARN( @@ -205,10 +177,13 @@ autoware_perception_msgs::msg::DetectedObject BigVehicleTracker::getUpdatingObje "BigVehicleTracker::getUpdatingObject: Failed to convert convex hull to bounding box."); bbox_object = object; } - } else { - bbox_object = object; } + // current (predicted) state + const double tracked_x = motion_model_.getStateElement(IDX::X); + const double tracked_y = motion_model_.getStateElement(IDX::Y); + const double tracked_yaw = motion_model_.getStateElement(IDX::YAW); + // get offset measurement const int nearest_corner_index = utils::getNearestCornerOrSurface( tracked_x, tracked_y, tracked_yaw, bounding_box_.width, bounding_box_.length, self_transform); @@ -219,50 +194,45 @@ autoware_perception_msgs::msg::DetectedObject BigVehicleTracker::getUpdatingObje // UNCERTAINTY MODEL if (!object.kinematics.has_position_covariance) { // measurement noise covariance - float r_cov_x; - float r_cov_y; - using Label = autoware_perception_msgs::msg::ObjectClassification; + auto r_cov_x = object_model_.measurement_covariance.pos_x; + auto r_cov_y = object_model_.measurement_covariance.pos_y; const uint8_t label = object_recognition_utils::getHighestProbLabel(object.classification); - if (utils::isLargeVehicleLabel(label)) { - r_cov_x = ekf_params_.r_cov_x; - r_cov_y = ekf_params_.r_cov_y; - } else if (label == Label::CAR) { + if (label == autoware_perception_msgs::msg::ObjectClassification::CAR) { // if label is changed, enlarge the measurement noise covariance constexpr float r_stddev_x = 2.0; // [m] constexpr float r_stddev_y = 2.0; // [m] - r_cov_x = std::pow(r_stddev_x, 2.0); - r_cov_y = std::pow(r_stddev_y, 2.0); - } else { - r_cov_x = ekf_params_.r_cov_x; - r_cov_y = ekf_params_.r_cov_y; + r_cov_x = r_stddev_x * r_stddev_x; + r_cov_y = r_stddev_y * r_stddev_y; } // yaw angle fix - double pose_yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); - bool is_yaw_available = object.kinematics.orientation_availability != - autoware_perception_msgs::msg::DetectedObjectKinematics::UNAVAILABLE; + const double pose_yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); + const bool is_yaw_available = + object.kinematics.orientation_availability != + autoware_perception_msgs::msg::DetectedObjectKinematics::UNAVAILABLE; // fill covariance matrix + using autoware::universe_utils::xyzrpy_covariance_index::XYZRPY_COV_IDX; auto & pose_cov = updating_object.kinematics.pose_with_covariance.covariance; const double cos_yaw = std::cos(pose_yaw); const double sin_yaw = std::sin(pose_yaw); - const double sin_2yaw = std::sin(2.0f * pose_yaw); - pose_cov[utils::MSG_COV_IDX::X_X] = - r_cov_x * cos_yaw * cos_yaw + r_cov_y * sin_yaw * sin_yaw; // x - x - pose_cov[utils::MSG_COV_IDX::X_Y] = 0.5f * (r_cov_x - r_cov_y) * sin_2yaw; // x - y - pose_cov[utils::MSG_COV_IDX::Y_Y] = - r_cov_x * sin_yaw * sin_yaw + r_cov_y * cos_yaw * cos_yaw; // y - y - pose_cov[utils::MSG_COV_IDX::Y_X] = pose_cov[utils::MSG_COV_IDX::X_Y]; // y - x - pose_cov[utils::MSG_COV_IDX::X_YAW] = 0.0; // x - yaw - pose_cov[utils::MSG_COV_IDX::Y_YAW] = 0.0; // y - yaw - pose_cov[utils::MSG_COV_IDX::YAW_X] = 0.0; // yaw - x - pose_cov[utils::MSG_COV_IDX::YAW_Y] = 0.0; // yaw - y - pose_cov[utils::MSG_COV_IDX::YAW_YAW] = ekf_params_.r_cov_yaw; // yaw - yaw + const double sin_2yaw = std::sin(2.0 * pose_yaw); + pose_cov[XYZRPY_COV_IDX::X_X] = + r_cov_x * cos_yaw * cos_yaw + r_cov_y * sin_yaw * sin_yaw; // x - x + pose_cov[XYZRPY_COV_IDX::X_Y] = 0.5 * (r_cov_x - r_cov_y) * sin_2yaw; // x - y + pose_cov[XYZRPY_COV_IDX::Y_Y] = + r_cov_x * sin_yaw * sin_yaw + r_cov_y * cos_yaw * cos_yaw; // y - y + pose_cov[XYZRPY_COV_IDX::Y_X] = pose_cov[XYZRPY_COV_IDX::X_Y]; // y - x + pose_cov[XYZRPY_COV_IDX::X_YAW] = 0.0; // x - yaw + pose_cov[XYZRPY_COV_IDX::Y_YAW] = 0.0; // y - yaw + pose_cov[XYZRPY_COV_IDX::YAW_X] = 0.0; // yaw - x + pose_cov[XYZRPY_COV_IDX::YAW_Y] = 0.0; // yaw - y + pose_cov[XYZRPY_COV_IDX::YAW_YAW] = object_model_.measurement_covariance.yaw; // yaw - yaw if (!is_yaw_available) { - pose_cov[utils::MSG_COV_IDX::YAW_YAW] *= 1e3; // yaw is not available, multiply large value + pose_cov[XYZRPY_COV_IDX::YAW_YAW] *= 1e3; // yaw is not available, multiply large value } auto & twist_cov = updating_object.kinematics.twist_with_covariance.covariance; - twist_cov[utils::MSG_COV_IDX::X_X] = ekf_params_.r_cov_vel; // vel - vel + twist_cov[XYZRPY_COV_IDX::X_X] = object_model_.measurement_covariance.vel_long; // vel - vel } return updating_object; @@ -314,34 +284,35 @@ bool BigVehicleTracker::measureWithPose( bool BigVehicleTracker::measureWithShape( const autoware_perception_msgs::msg::DetectedObject & object) { - if (!object.shape.type == autoware_perception_msgs::msg::Shape::BOUNDING_BOX) { + if (object.shape.type != autoware_perception_msgs::msg::Shape::BOUNDING_BOX) { // do not update shape if the input is not a bounding box return false; } // check object size abnormality - constexpr double size_max = 40.0; // [m] + constexpr double size_max = 35.0; // [m] constexpr double size_min = 1.0; // [m] - if (object.shape.dimensions.x > size_max || object.shape.dimensions.y > size_max) { - return false; - } else if (object.shape.dimensions.x < size_min || object.shape.dimensions.y < size_min) { + bool is_size_valid = + (object.shape.dimensions.x <= size_max && object.shape.dimensions.y <= size_max && + object.shape.dimensions.x >= size_min && object.shape.dimensions.y >= size_min); + if (!is_size_valid) { return false; } + // update object size constexpr double gain = 0.5; constexpr double gain_inv = 1.0 - gain; - - // update object size bounding_box_.length = gain_inv * bounding_box_.length + gain * object.shape.dimensions.x; bounding_box_.width = gain_inv * bounding_box_.width + gain * object.shape.dimensions.y; bounding_box_.height = gain_inv * bounding_box_.height + gain * object.shape.dimensions.z; // set maximum and minimum size - constexpr double max_size = 30.0; - constexpr double min_size = 1.0; - bounding_box_.length = std::min(std::max(bounding_box_.length, min_size), max_size); - bounding_box_.width = std::min(std::max(bounding_box_.width, min_size), max_size); - bounding_box_.height = std::min(std::max(bounding_box_.height, min_size), max_size); + bounding_box_.length = std::clamp( + bounding_box_.length, object_model_.size_limit.length_min, object_model_.size_limit.length_max); + bounding_box_.width = std::clamp( + bounding_box_.width, object_model_.size_limit.width_min, object_model_.size_limit.width_max); + bounding_box_.height = std::clamp( + bounding_box_.height, object_model_.size_limit.height_min, object_model_.size_limit.height_max); // update motion model motion_model_.updateExtendedState(bounding_box_.length); diff --git a/perception/multi_object_tracker/src/tracker/model/normal_vehicle_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/normal_vehicle_tracker.cpp index 1fff1a13add65..67282893083c1 100644 --- a/perception/multi_object_tracker/src/tracker/model/normal_vehicle_tracker.cpp +++ b/perception/multi_object_tracker/src/tracker/model/normal_vehicle_tracker.cpp @@ -15,14 +15,19 @@ // // Author: v1.0 Yukihiro Saito // +#define EIGEN_MPL2_ONLY #include "multi_object_tracker/tracker/model/normal_vehicle_tracker.hpp" +#include "autoware/universe_utils/geometry/boost_polygon_utils.hpp" +#include "autoware/universe_utils/math/normalization.hpp" +#include "autoware/universe_utils/math/unit_conversion.hpp" +#include "autoware/universe_utils/ros/msg_covariance.hpp" #include "multi_object_tracker/utils/utils.hpp" +#include "object_recognition_utils/object_recognition_utils.hpp" -#include -#include -#include +#include +#include #include #include @@ -34,11 +39,6 @@ #else #include #endif -#include "object_recognition_utils/object_recognition_utils.hpp" - -#define EIGEN_MPL2_ONLY -#include -#include using Label = autoware_perception_msgs::msg::ObjectClassification; @@ -56,17 +56,6 @@ NormalVehicleTracker::NormalVehicleTracker( // initialize existence probability initializeExistenceProbabilities(channel_index, object.existence_probability); - // Initialize parameters - // measurement noise covariance: detector uncertainty + ego vehicle motion uncertainty - float r_stddev_x = 0.5; // in vehicle coordinate [m] - float r_stddev_y = 0.4; // in vehicle coordinate [m] - float r_stddev_yaw = autoware::universe_utils::deg2rad(20); // in map coordinate [rad] - float r_stddev_vel = 1.0; // in object coordinate [m/s] - ekf_params_.r_cov_x = std::pow(r_stddev_x, 2.0); - ekf_params_.r_cov_y = std::pow(r_stddev_y, 2.0); - ekf_params_.r_cov_yaw = std::pow(r_stddev_yaw, 2.0); - ekf_params_.r_cov_vel = std::pow(r_stddev_vel, 2.0); - // velocity deviation threshold // if the predicted velocity is close to the observed velocity, // the observed velocity is used as the measurement. @@ -83,7 +72,9 @@ NormalVehicleTracker::NormalVehicleTracker( logger_, "NormalVehicleTracker::NormalVehicleTracker: Failed to convert convex hull to bounding " "box."); - bounding_box_ = {3.0, 2.0, 1.8}; // default value + bounding_box_ = { + object_model_.init_size.length, object_model_.init_size.width, + object_model_.init_size.height}; // default value } else { bounding_box_ = { bbox_object.shape.dimensions.x, bbox_object.shape.dimensions.y, @@ -91,28 +82,26 @@ NormalVehicleTracker::NormalVehicleTracker( } } // set maximum and minimum size - constexpr double max_size = 20.0; - constexpr double min_size = 1.0; - bounding_box_.length = std::min(std::max(bounding_box_.length, min_size), max_size); - bounding_box_.width = std::min(std::max(bounding_box_.width, min_size), max_size); - bounding_box_.height = std::min(std::max(bounding_box_.height, min_size), max_size); + bounding_box_.length = std::clamp( + bounding_box_.length, object_model_.size_limit.length_min, object_model_.size_limit.length_max); + bounding_box_.width = std::clamp( + bounding_box_.width, object_model_.size_limit.width_min, object_model_.size_limit.width_max); + bounding_box_.height = std::clamp( + bounding_box_.height, object_model_.size_limit.height_min, object_model_.size_limit.height_max); // Set motion model parameters { - constexpr double q_stddev_acc_long = - 9.8 * 0.35; // [m/(s*s)] uncertain longitudinal acceleration - constexpr double q_stddev_acc_lat = 9.8 * 0.15; // [m/(s*s)] uncertain lateral acceleration - constexpr double q_stddev_yaw_rate_min = 1.5; // [deg/s] uncertain yaw change rate, minimum - constexpr double q_stddev_yaw_rate_max = 15.0; // [deg/s] uncertain yaw change rate, maximum - constexpr double q_stddev_slip_rate_min = - 0.3; // [deg/s] uncertain slip angle change rate, minimum - constexpr double q_stddev_slip_rate_max = - 10.0; // [deg/s] uncertain slip angle change rate, maximum - constexpr double q_max_slip_angle = 30; // [deg] max slip angle - constexpr double lf_ratio = 0.3; // [-] ratio of front wheel position - constexpr double lf_min = 1.0; // [m] minimum front wheel position - constexpr double lr_ratio = 0.25; // [-] ratio of rear wheel position - constexpr double lr_min = 1.0; // [m] minimum rear wheel position + const double q_stddev_acc_long = object_model_.process_noise.acc_long; + const double q_stddev_acc_lat = object_model_.process_noise.acc_lat; + const double q_stddev_yaw_rate_min = object_model_.process_noise.yaw_rate_min; + const double q_stddev_yaw_rate_max = object_model_.process_noise.yaw_rate_max; + const double q_stddev_slip_rate_min = object_model_.bicycle_state.slip_rate_stddev_min; + const double q_stddev_slip_rate_max = object_model_.bicycle_state.slip_rate_stddev_max; + const double q_max_slip_angle = object_model_.bicycle_state.slip_angle_max; + const double lf_ratio = object_model_.bicycle_state.wheel_pos_ratio_front; + const double lf_min = object_model_.bicycle_state.wheel_pos_front_min; + const double lr_ratio = object_model_.bicycle_state.wheel_pos_ratio_rear; + const double lr_min = object_model_.bicycle_state.wheel_pos_rear_min; motion_model_.setMotionParams( q_stddev_acc_long, q_stddev_acc_lat, q_stddev_yaw_rate_min, q_stddev_yaw_rate_max, q_stddev_slip_rate_min, q_stddev_slip_rate_max, q_max_slip_angle, lf_ratio, lf_min, lr_ratio, @@ -121,59 +110,47 @@ NormalVehicleTracker::NormalVehicleTracker( // Set motion limits { - constexpr double max_vel = autoware::universe_utils::kmph2mps(100); // [m/s] maximum velocity - constexpr double max_slip = 30; // [deg] maximum slip angle + const double max_vel = object_model_.process_limit.vel_long_max; + const double max_slip = object_model_.bicycle_state.slip_angle_max; motion_model_.setMotionLimits(max_vel, max_slip); // maximum velocity and slip angle } // Set initial state { + using autoware::universe_utils::xyzrpy_covariance_index::XYZRPY_COV_IDX; const double x = object.kinematics.pose_with_covariance.pose.position.x; const double y = object.kinematics.pose_with_covariance.pose.position.y; const double yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); - auto pose_cov = object.kinematics.pose_with_covariance.covariance; - double vel = 0.0; - double vel_cov; - const double & length = bounding_box_.length; - - if (object.kinematics.has_twist) { - vel = object.kinematics.twist_with_covariance.twist.linear.x; - } + auto pose_cov = object.kinematics.pose_with_covariance.covariance; if (!object.kinematics.has_position_covariance) { // initial state covariance - constexpr double p0_stddev_x = 1.0; // in object coordinate [m] - constexpr double p0_stddev_y = 0.3; // in object coordinate [m] - constexpr double p0_stddev_yaw = - autoware::universe_utils::deg2rad(25); // in map coordinate [rad] - constexpr double p0_cov_x = p0_stddev_x * p0_stddev_x; - constexpr double p0_cov_y = p0_stddev_y * p0_stddev_y; - constexpr double p0_cov_yaw = p0_stddev_yaw * p0_stddev_yaw; + const auto & p0_cov_x = object_model_.initial_covariance.pos_x; + const auto & p0_cov_y = object_model_.initial_covariance.pos_y; + const auto & p0_cov_yaw = object_model_.initial_covariance.yaw; const double cos_yaw = std::cos(yaw); const double sin_yaw = std::sin(yaw); const double sin_2yaw = std::sin(2.0 * yaw); - pose_cov[utils::MSG_COV_IDX::X_X] = - p0_cov_x * cos_yaw * cos_yaw + p0_cov_y * sin_yaw * sin_yaw; - pose_cov[utils::MSG_COV_IDX::X_Y] = 0.5 * (p0_cov_x - p0_cov_y) * sin_2yaw; - pose_cov[utils::MSG_COV_IDX::Y_Y] = - p0_cov_x * sin_yaw * sin_yaw + p0_cov_y * cos_yaw * cos_yaw; - pose_cov[utils::MSG_COV_IDX::Y_X] = pose_cov[utils::MSG_COV_IDX::X_Y]; - pose_cov[utils::MSG_COV_IDX::YAW_YAW] = p0_cov_yaw; + pose_cov[XYZRPY_COV_IDX::X_X] = p0_cov_x * cos_yaw * cos_yaw + p0_cov_y * sin_yaw * sin_yaw; + pose_cov[XYZRPY_COV_IDX::X_Y] = 0.5 * (p0_cov_x - p0_cov_y) * sin_2yaw; + pose_cov[XYZRPY_COV_IDX::Y_Y] = p0_cov_x * sin_yaw * sin_yaw + p0_cov_y * cos_yaw * cos_yaw; + pose_cov[XYZRPY_COV_IDX::Y_X] = pose_cov[XYZRPY_COV_IDX::X_Y]; + pose_cov[XYZRPY_COV_IDX::YAW_YAW] = p0_cov_yaw; } - if (!object.kinematics.has_twist_covariance) { - constexpr double p0_stddev_vel = - autoware::universe_utils::kmph2mps(1000); // in object coordinate [m/s] - vel_cov = std::pow(p0_stddev_vel, 2.0); - } else { - vel_cov = object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + double vel = 0.0; + double vel_cov = object_model_.initial_covariance.vel_long; + if (object.kinematics.has_twist) { + vel = object.kinematics.twist_with_covariance.twist.linear.x; + } + if (object.kinematics.has_twist_covariance) { + vel_cov = object.kinematics.twist_with_covariance.covariance[XYZRPY_COV_IDX::X_X]; } const double slip = 0.0; - const double p0_stddev_slip = - autoware::universe_utils::deg2rad(5); // in object coordinate [rad/s] - const double slip_cov = std::pow(p0_stddev_slip, 2.0); + const double slip_cov = object_model_.bicycle_state.init_slip_angle_cov; + const double & length = bounding_box_.length; // initialize motion model motion_model_.initialize(time, x, y, yaw, pose_cov, vel, vel_cov, slip, slip_cov, length); @@ -191,14 +168,9 @@ autoware_perception_msgs::msg::DetectedObject NormalVehicleTracker::getUpdatingO { autoware_perception_msgs::msg::DetectedObject updating_object = object; - // current (predicted) state - const double tracked_x = motion_model_.getStateElement(IDX::X); - const double tracked_y = motion_model_.getStateElement(IDX::Y); - const double tracked_yaw = motion_model_.getStateElement(IDX::YAW); - // OBJECT SHAPE MODEL // convert to bounding box if input is convex shape - autoware_perception_msgs::msg::DetectedObject bbox_object; + autoware_perception_msgs::msg::DetectedObject bbox_object = object; if (object.shape.type != autoware_perception_msgs::msg::Shape::BOUNDING_BOX) { if (!utils::convertConvexHullToBoundingBox(object, bbox_object)) { RCLCPP_WARN( @@ -206,11 +178,13 @@ autoware_perception_msgs::msg::DetectedObject NormalVehicleTracker::getUpdatingO "NormalVehicleTracker::getUpdatingObject: Failed to convert convex hull to bounding box."); bbox_object = object; } - - } else { - bbox_object = object; } + // current (predicted) state + const double tracked_x = motion_model_.getStateElement(IDX::X); + const double tracked_y = motion_model_.getStateElement(IDX::Y); + const double tracked_yaw = motion_model_.getStateElement(IDX::YAW); + // get offset measurement const int nearest_corner_index = utils::getNearestCornerOrSurface( tracked_x, tracked_y, tracked_yaw, bounding_box_.width, bounding_box_.length, self_transform); @@ -221,50 +195,45 @@ autoware_perception_msgs::msg::DetectedObject NormalVehicleTracker::getUpdatingO // UNCERTAINTY MODEL if (!object.kinematics.has_position_covariance) { // measurement noise covariance - float r_cov_x; - float r_cov_y; - using Label = autoware_perception_msgs::msg::ObjectClassification; + auto r_cov_x = object_model_.measurement_covariance.pos_x; + auto r_cov_y = object_model_.measurement_covariance.pos_y; const uint8_t label = object_recognition_utils::getHighestProbLabel(object.classification); - if (label == Label::CAR) { - r_cov_x = ekf_params_.r_cov_x; - r_cov_y = ekf_params_.r_cov_y; - } else if (utils::isLargeVehicleLabel(label)) { + if (utils::isLargeVehicleLabel(label)) { // if label is changed, enlarge the measurement noise covariance constexpr float r_stddev_x = 2.0; // [m] constexpr float r_stddev_y = 2.0; // [m] - r_cov_x = std::pow(r_stddev_x, 2.0); - r_cov_y = std::pow(r_stddev_y, 2.0); - } else { - r_cov_x = ekf_params_.r_cov_x; - r_cov_y = ekf_params_.r_cov_y; + r_cov_x = r_stddev_x * r_stddev_x; + r_cov_y = r_stddev_y * r_stddev_y; } // yaw angle fix - double pose_yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); - bool is_yaw_available = object.kinematics.orientation_availability != - autoware_perception_msgs::msg::DetectedObjectKinematics::UNAVAILABLE; + const double pose_yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); + const bool is_yaw_available = + object.kinematics.orientation_availability != + autoware_perception_msgs::msg::DetectedObjectKinematics::UNAVAILABLE; // fill covariance matrix + using autoware::universe_utils::xyzrpy_covariance_index::XYZRPY_COV_IDX; auto & pose_cov = updating_object.kinematics.pose_with_covariance.covariance; const double cos_yaw = std::cos(pose_yaw); const double sin_yaw = std::sin(pose_yaw); - const double sin_2yaw = std::sin(2.0f * pose_yaw); - pose_cov[utils::MSG_COV_IDX::X_X] = - r_cov_x * cos_yaw * cos_yaw + r_cov_y * sin_yaw * sin_yaw; // x - x - pose_cov[utils::MSG_COV_IDX::X_Y] = 0.5f * (r_cov_x - r_cov_y) * sin_2yaw; // x - y - pose_cov[utils::MSG_COV_IDX::Y_Y] = - r_cov_x * sin_yaw * sin_yaw + r_cov_y * cos_yaw * cos_yaw; // y - y - pose_cov[utils::MSG_COV_IDX::Y_X] = pose_cov[utils::MSG_COV_IDX::X_Y]; // y - x - pose_cov[utils::MSG_COV_IDX::X_YAW] = 0.0; // x - yaw - pose_cov[utils::MSG_COV_IDX::Y_YAW] = 0.0; // y - yaw - pose_cov[utils::MSG_COV_IDX::YAW_X] = 0.0; // yaw - x - pose_cov[utils::MSG_COV_IDX::YAW_Y] = 0.0; // yaw - y - pose_cov[utils::MSG_COV_IDX::YAW_YAW] = ekf_params_.r_cov_yaw; // yaw - yaw + const double sin_2yaw = std::sin(2.0 * pose_yaw); + pose_cov[XYZRPY_COV_IDX::X_X] = + r_cov_x * cos_yaw * cos_yaw + r_cov_y * sin_yaw * sin_yaw; // x - x + pose_cov[XYZRPY_COV_IDX::X_Y] = 0.5 * (r_cov_x - r_cov_y) * sin_2yaw; // x - y + pose_cov[XYZRPY_COV_IDX::Y_Y] = + r_cov_x * sin_yaw * sin_yaw + r_cov_y * cos_yaw * cos_yaw; // y - y + pose_cov[XYZRPY_COV_IDX::Y_X] = pose_cov[XYZRPY_COV_IDX::X_Y]; // y - x + pose_cov[XYZRPY_COV_IDX::X_YAW] = 0.0; // x - yaw + pose_cov[XYZRPY_COV_IDX::Y_YAW] = 0.0; // y - yaw + pose_cov[XYZRPY_COV_IDX::YAW_X] = 0.0; // yaw - x + pose_cov[XYZRPY_COV_IDX::YAW_Y] = 0.0; // yaw - y + pose_cov[XYZRPY_COV_IDX::YAW_YAW] = object_model_.measurement_covariance.yaw; // yaw - yaw if (!is_yaw_available) { - pose_cov[utils::MSG_COV_IDX::YAW_YAW] *= 1e3; // yaw is not available, multiply large value + pose_cov[XYZRPY_COV_IDX::YAW_YAW] *= 1e3; // yaw is not available, multiply large value } auto & twist_cov = updating_object.kinematics.twist_with_covariance.covariance; - twist_cov[utils::MSG_COV_IDX::X_X] = ekf_params_.r_cov_vel; // vel - vel + twist_cov[XYZRPY_COV_IDX::X_X] = object_model_.measurement_covariance.vel_long; // vel - vel } return updating_object; @@ -316,34 +285,35 @@ bool NormalVehicleTracker::measureWithPose( bool NormalVehicleTracker::measureWithShape( const autoware_perception_msgs::msg::DetectedObject & object) { - if (!object.shape.type == autoware_perception_msgs::msg::Shape::BOUNDING_BOX) { + if (object.shape.type != autoware_perception_msgs::msg::Shape::BOUNDING_BOX) { // do not update shape if the input is not a bounding box return false; } // check object size abnormality - constexpr double size_max = 30.0; // [m] + constexpr double size_max = 35.0; // [m] constexpr double size_min = 1.0; // [m] - if (object.shape.dimensions.x > size_max || object.shape.dimensions.y > size_max) { - return false; - } else if (object.shape.dimensions.x < size_min || object.shape.dimensions.y < size_min) { + bool is_size_valid = + (object.shape.dimensions.x <= size_max && object.shape.dimensions.y <= size_max && + object.shape.dimensions.x >= size_min && object.shape.dimensions.y >= size_min); + if (!is_size_valid) { return false; } + // update object size constexpr double gain = 0.5; constexpr double gain_inv = 1.0 - gain; - - // update object size bounding_box_.length = gain_inv * bounding_box_.length + gain * object.shape.dimensions.x; bounding_box_.width = gain_inv * bounding_box_.width + gain * object.shape.dimensions.y; bounding_box_.height = gain_inv * bounding_box_.height + gain * object.shape.dimensions.z; // set maximum and minimum size - constexpr double max_size = 20.0; - constexpr double min_size = 1.0; - bounding_box_.length = std::min(std::max(bounding_box_.length, min_size), max_size); - bounding_box_.width = std::min(std::max(bounding_box_.width, min_size), max_size); - bounding_box_.height = std::min(std::max(bounding_box_.height, min_size), max_size); + bounding_box_.length = std::clamp( + bounding_box_.length, object_model_.size_limit.length_min, object_model_.size_limit.length_max); + bounding_box_.width = std::clamp( + bounding_box_.width, object_model_.size_limit.width_min, object_model_.size_limit.width_max); + bounding_box_.height = std::clamp( + bounding_box_.height, object_model_.size_limit.height_min, object_model_.size_limit.height_max); // update motion model motion_model_.updateExtendedState(bounding_box_.length); diff --git a/perception/multi_object_tracker/src/tracker/model/pass_through_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/pass_through_tracker.cpp index 3e4f23cd440bc..17be415480360 100644 --- a/perception/multi_object_tracker/src/tracker/model/pass_through_tracker.cpp +++ b/perception/multi_object_tracker/src/tracker/model/pass_through_tracker.cpp @@ -15,6 +15,15 @@ // // Author: v1.0 Yutaka Shimizu // +#define EIGEN_MPL2_ONLY +#include "multi_object_tracker/tracker/model/pass_through_tracker.hpp" + +#include "autoware/universe_utils/ros/msg_covariance.hpp" +#include "multi_object_tracker/utils/utils.hpp" +#include "object_recognition_utils/object_recognition_utils.hpp" + +#include +#include #include #include @@ -27,14 +36,6 @@ #include #endif -#define EIGEN_MPL2_ONLY -#include "multi_object_tracker/tracker/model/pass_through_tracker.hpp" -#include "multi_object_tracker/utils/utils.hpp" -#include "object_recognition_utils/object_recognition_utils.hpp" - -#include -#include - PassThroughTracker::PassThroughTracker( const rclcpp::Time & time, const autoware_perception_msgs::msg::DetectedObject & object, const geometry_msgs::msg::Transform & /*self_transform*/, const size_t channel_size, @@ -86,25 +87,26 @@ bool PassThroughTracker::measure( bool PassThroughTracker::getTrackedObject( const rclcpp::Time & time, autoware_perception_msgs::msg::TrackedObject & object) const { + using autoware::universe_utils::xyzrpy_covariance_index::XYZRPY_COV_IDX; object = object_recognition_utils::toTrackedObject(object_); object.object_id = getUUID(); object.classification = getClassification(); - object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] = 0.0; - object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y] = 0.0; - object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X] = 0.0; - object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] = 0.0; - object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Z_Z] = 0.0; - object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::ROLL_ROLL] = 0.0; - object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::PITCH_PITCH] = 0.0; - object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] = 0.0; + object.kinematics.pose_with_covariance.covariance[XYZRPY_COV_IDX::X_X] = 0.0; + object.kinematics.pose_with_covariance.covariance[XYZRPY_COV_IDX::X_Y] = 0.0; + object.kinematics.pose_with_covariance.covariance[XYZRPY_COV_IDX::Y_X] = 0.0; + object.kinematics.pose_with_covariance.covariance[XYZRPY_COV_IDX::Y_Y] = 0.0; + object.kinematics.pose_with_covariance.covariance[XYZRPY_COV_IDX::Z_Z] = 0.0; + object.kinematics.pose_with_covariance.covariance[XYZRPY_COV_IDX::ROLL_ROLL] = 0.0; + object.kinematics.pose_with_covariance.covariance[XYZRPY_COV_IDX::PITCH_PITCH] = 0.0; + object.kinematics.pose_with_covariance.covariance[XYZRPY_COV_IDX::YAW_YAW] = 0.0; // twist covariance - object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X] = 0.0; - object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] = 0.0; - object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Z_Z] = 0.0; - object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::ROLL_ROLL] = 0.0; - object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::PITCH_PITCH] = 0.0; - object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] = 0.0; + object.kinematics.twist_with_covariance.covariance[XYZRPY_COV_IDX::X_X] = 0.0; + object.kinematics.twist_with_covariance.covariance[XYZRPY_COV_IDX::Y_Y] = 0.0; + object.kinematics.twist_with_covariance.covariance[XYZRPY_COV_IDX::Z_Z] = 0.0; + object.kinematics.twist_with_covariance.covariance[XYZRPY_COV_IDX::ROLL_ROLL] = 0.0; + object.kinematics.twist_with_covariance.covariance[XYZRPY_COV_IDX::PITCH_PITCH] = 0.0; + object.kinematics.twist_with_covariance.covariance[XYZRPY_COV_IDX::YAW_YAW] = 0.0; const double dt = (time - last_update_time_).seconds(); if (0.5 /*500msec*/ < dt) { diff --git a/perception/multi_object_tracker/src/tracker/model/pedestrian_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/pedestrian_tracker.cpp index 887810066f038..d9412c3b5739c 100644 --- a/perception/multi_object_tracker/src/tracker/model/pedestrian_tracker.cpp +++ b/perception/multi_object_tracker/src/tracker/model/pedestrian_tracker.cpp @@ -15,14 +15,19 @@ // // Author: v1.0 Yukihiro Saito // +#define EIGEN_MPL2_ONLY #include "multi_object_tracker/tracker/model/pedestrian_tracker.hpp" +#include "autoware/universe_utils/geometry/boost_polygon_utils.hpp" +#include "autoware/universe_utils/math/normalization.hpp" +#include "autoware/universe_utils/math/unit_conversion.hpp" +#include "autoware/universe_utils/ros/msg_covariance.hpp" #include "multi_object_tracker/utils/utils.hpp" +#include "object_recognition_utils/object_recognition_utils.hpp" -#include -#include -#include +#include +#include #include #include @@ -34,11 +39,6 @@ #else #include #endif -#include "object_recognition_utils/object_recognition_utils.hpp" - -#define EIGEN_MPL2_ONLY -#include -#include using Label = autoware_perception_msgs::msg::ObjectClassification; @@ -55,18 +55,11 @@ PedestrianTracker::PedestrianTracker( // initialize existence probability initializeExistenceProbabilities(channel_index, object.existence_probability); - // Initialize parameters - // measurement noise covariance - float r_stddev_x = 0.4; // [m] - float r_stddev_y = 0.4; // [m] - float r_stddev_yaw = autoware::universe_utils::deg2rad(30); // [rad] - ekf_params_.r_cov_x = std::pow(r_stddev_x, 2.0); - ekf_params_.r_cov_y = std::pow(r_stddev_y, 2.0); - ekf_params_.r_cov_yaw = std::pow(r_stddev_yaw, 2.0); - // OBJECT SHAPE MODEL - bounding_box_ = {0.5, 0.5, 1.7}; - cylinder_ = {0.5, 1.7}; + bounding_box_ = { + object_model_.init_size.length, object_model_.init_size.width, + object_model_.init_size.height}; // default value + cylinder_ = {object_model_.init_size.length, object_model_.init_size.height}; // default value if (object.shape.type == autoware_perception_msgs::msg::Shape::BOUNDING_BOX) { bounding_box_ = { object.shape.dimensions.x, object.shape.dimensions.y, object.shape.dimensions.z}; @@ -76,78 +69,70 @@ PedestrianTracker::PedestrianTracker( // do not update polygon shape } // set maximum and minimum size - constexpr double max_size = 5.0; - constexpr double min_size = 0.3; - bounding_box_.length = std::min(std::max(bounding_box_.length, min_size), max_size); - bounding_box_.width = std::min(std::max(bounding_box_.width, min_size), max_size); - bounding_box_.height = std::min(std::max(bounding_box_.height, min_size), max_size); - cylinder_.width = std::min(std::max(cylinder_.width, min_size), max_size); - cylinder_.height = std::min(std::max(cylinder_.height, min_size), max_size); + bounding_box_.length = std::clamp( + bounding_box_.length, object_model_.size_limit.length_min, object_model_.size_limit.length_max); + bounding_box_.width = std::clamp( + bounding_box_.width, object_model_.size_limit.width_min, object_model_.size_limit.width_max); + bounding_box_.height = std::clamp( + bounding_box_.height, object_model_.size_limit.height_min, object_model_.size_limit.height_max); + cylinder_.width = std::clamp( + cylinder_.width, object_model_.size_limit.length_min, object_model_.size_limit.length_max); + cylinder_.height = std::clamp( + cylinder_.height, object_model_.size_limit.height_min, object_model_.size_limit.height_max); // Set motion model parameters { - constexpr double q_stddev_x = 0.5; // [m/s] - constexpr double q_stddev_y = 0.5; // [m/s] - constexpr double q_stddev_yaw = autoware::universe_utils::deg2rad(20); // [rad/s] - constexpr double q_stddev_vx = 9.8 * 0.3; // [m/(s*s)] - constexpr double q_stddev_wz = autoware::universe_utils::deg2rad(30); // [rad/(s*s)] + const double q_stddev_x = object_model_.process_noise.vel_long; + const double q_stddev_y = object_model_.process_noise.vel_lat; + const double q_stddev_yaw = object_model_.process_noise.yaw_rate; + const double q_stddev_vx = object_model_.process_noise.acc_long; + const double q_stddev_wz = object_model_.process_noise.acc_turn; motion_model_.setMotionParams(q_stddev_x, q_stddev_y, q_stddev_yaw, q_stddev_vx, q_stddev_wz); } // Set motion limits - motion_model_.setMotionLimits( - autoware::universe_utils::kmph2mps(100), /* [m/s] maximum velocity */ - 30.0 /* [deg/s] maximum turn rate */ - ); + { + const double max_vel = object_model_.process_limit.vel_long_max; + const double max_turn_rate = object_model_.process_limit.yaw_rate_max; + motion_model_.setMotionLimits(max_vel, max_turn_rate); // maximum velocity and slip angle + } // Set initial state { + using autoware::universe_utils::xyzrpy_covariance_index::XYZRPY_COV_IDX; const double x = object.kinematics.pose_with_covariance.pose.position.x; const double y = object.kinematics.pose_with_covariance.pose.position.y; const double yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); - auto pose_cov = object.kinematics.pose_with_covariance.covariance; - double vel = 0.0; - double wz = 0.0; - double vel_cov; - double wz_cov; - - if (object.kinematics.has_twist) { - vel = object.kinematics.twist_with_covariance.twist.linear.x; - wz = object.kinematics.twist_with_covariance.twist.angular.z; - } + auto pose_cov = object.kinematics.pose_with_covariance.covariance; if (!object.kinematics.has_position_covariance) { // initial state covariance - constexpr double p0_stddev_x = 2.0; // in object coordinate [m] - constexpr double p0_stddev_y = 2.0; // in object coordinate [m] - constexpr double p0_stddev_yaw = - autoware::universe_utils::deg2rad(1000); // in map coordinate [rad] - constexpr double p0_cov_x = p0_stddev_x * p0_stddev_x; - constexpr double p0_cov_y = p0_stddev_y * p0_stddev_y; - constexpr double p0_cov_yaw = p0_stddev_yaw * p0_stddev_yaw; + const auto & p0_cov_x = object_model_.initial_covariance.pos_x; + const auto & p0_cov_y = object_model_.initial_covariance.pos_y; + const auto & p0_cov_yaw = object_model_.initial_covariance.yaw; const double cos_yaw = std::cos(yaw); const double sin_yaw = std::sin(yaw); const double sin_2yaw = std::sin(2.0 * yaw); - pose_cov[utils::MSG_COV_IDX::X_X] = - p0_cov_x * cos_yaw * cos_yaw + p0_cov_y * sin_yaw * sin_yaw; - pose_cov[utils::MSG_COV_IDX::X_Y] = 0.5 * (p0_cov_x - p0_cov_y) * sin_2yaw; - pose_cov[utils::MSG_COV_IDX::Y_Y] = - p0_cov_x * sin_yaw * sin_yaw + p0_cov_y * cos_yaw * cos_yaw; - pose_cov[utils::MSG_COV_IDX::Y_X] = pose_cov[utils::MSG_COV_IDX::X_Y]; - pose_cov[utils::MSG_COV_IDX::YAW_YAW] = p0_cov_yaw; + pose_cov[XYZRPY_COV_IDX::X_X] = p0_cov_x * cos_yaw * cos_yaw + p0_cov_y * sin_yaw * sin_yaw; + pose_cov[XYZRPY_COV_IDX::X_Y] = 0.5 * (p0_cov_x - p0_cov_y) * sin_2yaw; + pose_cov[XYZRPY_COV_IDX::Y_Y] = p0_cov_x * sin_yaw * sin_yaw + p0_cov_y * cos_yaw * cos_yaw; + pose_cov[XYZRPY_COV_IDX::Y_X] = pose_cov[XYZRPY_COV_IDX::X_Y]; + pose_cov[XYZRPY_COV_IDX::YAW_YAW] = p0_cov_yaw; + } + + double vel = 0.0; + double wz = 0.0; + if (object.kinematics.has_twist) { + vel = object.kinematics.twist_with_covariance.twist.linear.x; + wz = object.kinematics.twist_with_covariance.twist.angular.z; } - if (!object.kinematics.has_twist_covariance) { - constexpr double p0_stddev_vel = - autoware::universe_utils::kmph2mps(120); // in object coordinate [m/s] - constexpr double p0_stddev_wz = - autoware::universe_utils::deg2rad(360); // in object coordinate [rad/s] - vel_cov = std::pow(p0_stddev_vel, 2.0); - wz_cov = std::pow(p0_stddev_wz, 2.0); - } else { - vel_cov = object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; - wz_cov = object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW]; + double vel_cov = object_model_.initial_covariance.vel_long; + double wz_cov = object_model_.initial_covariance.yaw_rate; + if (object.kinematics.has_twist_covariance) { + vel_cov = object.kinematics.twist_with_covariance.covariance[XYZRPY_COV_IDX::X_X]; + wz_cov = object.kinematics.twist_with_covariance.covariance[XYZRPY_COV_IDX::YAW_YAW]; } // initialize motion model @@ -162,26 +147,46 @@ bool PedestrianTracker::predict(const rclcpp::Time & time) autoware_perception_msgs::msg::DetectedObject PedestrianTracker::getUpdatingObject( const autoware_perception_msgs::msg::DetectedObject & object, - const geometry_msgs::msg::Transform & /*self_transform*/) + const geometry_msgs::msg::Transform & /*self_transform*/) const { autoware_perception_msgs::msg::DetectedObject updating_object = object; // UNCERTAINTY MODEL if (!object.kinematics.has_position_covariance) { - const double & r_cov_x = ekf_params_.r_cov_x; - const double & r_cov_y = ekf_params_.r_cov_y; - auto & pose_cov = updating_object.kinematics.pose_with_covariance.covariance; + // measurement noise covariance + auto r_cov_x = object_model_.measurement_covariance.pos_x; + auto r_cov_y = object_model_.measurement_covariance.pos_y; + + // yaw angle fix const double pose_yaw = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); + const bool is_yaw_available = + object.kinematics.orientation_availability != + autoware_perception_msgs::msg::DetectedObjectKinematics::UNAVAILABLE; + + // fill covariance matrix + using autoware::universe_utils::xyzrpy_covariance_index::XYZRPY_COV_IDX; + auto & pose_cov = updating_object.kinematics.pose_with_covariance.covariance; const double cos_yaw = std::cos(pose_yaw); const double sin_yaw = std::sin(pose_yaw); - const double sin_2yaw = std::sin(2.0f * pose_yaw); - pose_cov[utils::MSG_COV_IDX::X_X] = - r_cov_x * cos_yaw * cos_yaw + r_cov_y * sin_yaw * sin_yaw; // x - x - pose_cov[utils::MSG_COV_IDX::X_Y] = 0.5f * (r_cov_x - r_cov_y) * sin_2yaw; // x - y - pose_cov[utils::MSG_COV_IDX::Y_Y] = - r_cov_x * sin_yaw * sin_yaw + r_cov_y * cos_yaw * cos_yaw; // y - y - pose_cov[utils::MSG_COV_IDX::Y_X] = pose_cov[utils::MSG_COV_IDX::X_Y]; // y - x + const double sin_2yaw = std::sin(2.0 * pose_yaw); + pose_cov[XYZRPY_COV_IDX::X_X] = + r_cov_x * cos_yaw * cos_yaw + r_cov_y * sin_yaw * sin_yaw; // x - x + pose_cov[XYZRPY_COV_IDX::X_Y] = 0.5 * (r_cov_x - r_cov_y) * sin_2yaw; // x - y + pose_cov[XYZRPY_COV_IDX::Y_Y] = + r_cov_x * sin_yaw * sin_yaw + r_cov_y * cos_yaw * cos_yaw; // y - y + pose_cov[XYZRPY_COV_IDX::Y_X] = pose_cov[XYZRPY_COV_IDX::X_Y]; // y - x + pose_cov[XYZRPY_COV_IDX::X_YAW] = 0.0; // x - yaw + pose_cov[XYZRPY_COV_IDX::Y_YAW] = 0.0; // y - yaw + pose_cov[XYZRPY_COV_IDX::YAW_X] = 0.0; // yaw - x + pose_cov[XYZRPY_COV_IDX::YAW_Y] = 0.0; // yaw - y + pose_cov[XYZRPY_COV_IDX::YAW_YAW] = object_model_.measurement_covariance.yaw; // yaw - yaw + if (!is_yaw_available) { + pose_cov[XYZRPY_COV_IDX::YAW_YAW] *= 1e3; // yaw is not available, multiply large value + } + auto & twist_cov = updating_object.kinematics.twist_with_covariance.covariance; + twist_cov[XYZRPY_COV_IDX::X_X] = object_model_.measurement_covariance.vel_long; // vel - vel } + return updating_object; } @@ -216,43 +221,48 @@ bool PedestrianTracker::measureWithShape( // check bound box size abnormality constexpr double size_max = 30.0; // [m] constexpr double size_min = 0.1; // [m] - if ( - object.shape.dimensions.x > size_max || object.shape.dimensions.y > size_max || - object.shape.dimensions.z > size_max) { - return false; - } else if ( - object.shape.dimensions.x < size_min || object.shape.dimensions.y < size_min || - object.shape.dimensions.z < size_min) { + bool is_size_valid = + (object.shape.dimensions.x <= size_max && object.shape.dimensions.y <= size_max && + object.shape.dimensions.z <= size_max && object.shape.dimensions.x >= size_min && + object.shape.dimensions.y >= size_min && object.shape.dimensions.z >= size_min); + if (!is_size_valid) { return false; } // update bounding box size bounding_box_.length = gain_inv * bounding_box_.length + gain * object.shape.dimensions.x; bounding_box_.width = gain_inv * bounding_box_.width + gain * object.shape.dimensions.y; bounding_box_.height = gain_inv * bounding_box_.height + gain * object.shape.dimensions.z; + } else if (object.shape.type == autoware_perception_msgs::msg::Shape::CYLINDER) { // check cylinder size abnormality constexpr double size_max = 30.0; // [m] constexpr double size_min = 0.1; // [m] - if (object.shape.dimensions.x > size_max || object.shape.dimensions.z > size_max) { - return false; - } else if (object.shape.dimensions.x < size_min || object.shape.dimensions.z < size_min) { + bool is_size_valid = + (object.shape.dimensions.x <= size_max && object.shape.dimensions.z <= size_max && + object.shape.dimensions.x >= size_min && object.shape.dimensions.z >= size_min); + if (!is_size_valid) { return false; } + // update cylinder size cylinder_.width = gain_inv * cylinder_.width + gain * object.shape.dimensions.x; cylinder_.height = gain_inv * cylinder_.height + gain * object.shape.dimensions.z; + } else { // do not update polygon shape return false; } // set maximum and minimum size - constexpr double max_size = 5.0; - constexpr double min_size = 0.3; - bounding_box_.length = std::min(std::max(bounding_box_.length, min_size), max_size); - bounding_box_.width = std::min(std::max(bounding_box_.width, min_size), max_size); - bounding_box_.height = std::min(std::max(bounding_box_.height, min_size), max_size); - cylinder_.width = std::min(std::max(cylinder_.width, min_size), max_size); - cylinder_.height = std::min(std::max(cylinder_.height, min_size), max_size); + bounding_box_.length = std::clamp( + bounding_box_.length, object_model_.size_limit.length_min, object_model_.size_limit.length_max); + bounding_box_.width = std::clamp( + bounding_box_.width, object_model_.size_limit.width_min, object_model_.size_limit.width_max); + bounding_box_.height = std::clamp( + bounding_box_.height, object_model_.size_limit.height_min, object_model_.size_limit.height_max); + cylinder_.width = std::clamp( + cylinder_.width, object_model_.size_limit.length_min, object_model_.size_limit.length_max); + cylinder_.height = std::clamp( + cylinder_.height, object_model_.size_limit.height_min, object_model_.size_limit.height_max); return true; } diff --git a/perception/multi_object_tracker/src/tracker/model/unknown_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/unknown_tracker.cpp index 8e484623ef0aa..6905d7c3b8ced 100644 --- a/perception/multi_object_tracker/src/tracker/model/unknown_tracker.cpp +++ b/perception/multi_object_tracker/src/tracker/model/unknown_tracker.cpp @@ -11,14 +11,18 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +#define EIGEN_MPL2_ONLY #include "multi_object_tracker/tracker/model/unknown_tracker.hpp" +#include "autoware/universe_utils/geometry/boost_polygon_utils.hpp" +#include "autoware/universe_utils/math/normalization.hpp" +#include "autoware/universe_utils/math/unit_conversion.hpp" +#include "autoware/universe_utils/ros/msg_covariance.hpp" #include "multi_object_tracker/utils/utils.hpp" -#include -#include -#include +#include +#include #include #include @@ -32,10 +36,6 @@ #endif #include "object_recognition_utils/object_recognition_utils.hpp" -#define EIGEN_MPL2_ONLY -#include -#include - UnknownTracker::UnknownTracker( const rclcpp::Time & time, const autoware_perception_msgs::msg::DetectedObject & object, const geometry_msgs::msg::Transform & /*self_transform*/, const size_t channel_size, @@ -73,6 +73,7 @@ UnknownTracker::UnknownTracker( // Set initial state { + using autoware::universe_utils::xyzrpy_covariance_index::XYZRPY_COV_IDX; const double x = object.kinematics.pose_with_covariance.pose.position.x; const double y = object.kinematics.pose_with_covariance.pose.position.y; auto pose_cov = object.kinematics.pose_with_covariance.covariance; @@ -98,12 +99,10 @@ UnknownTracker::UnknownTracker( const double cos_yaw = std::cos(yaw); const double sin_yaw = std::sin(yaw); const double sin_2yaw = std::sin(2.0 * yaw); - pose_cov[utils::MSG_COV_IDX::X_X] = - p0_cov_x * cos_yaw * cos_yaw + p0_cov_y * sin_yaw * sin_yaw; - pose_cov[utils::MSG_COV_IDX::X_Y] = 0.5 * (p0_cov_x - p0_cov_y) * sin_2yaw; - pose_cov[utils::MSG_COV_IDX::Y_Y] = - p0_cov_x * sin_yaw * sin_yaw + p0_cov_y * cos_yaw * cos_yaw; - pose_cov[utils::MSG_COV_IDX::Y_X] = pose_cov[utils::MSG_COV_IDX::X_Y]; + pose_cov[XYZRPY_COV_IDX::X_X] = p0_cov_x * cos_yaw * cos_yaw + p0_cov_y * sin_yaw * sin_yaw; + pose_cov[XYZRPY_COV_IDX::X_Y] = 0.5 * (p0_cov_x - p0_cov_y) * sin_2yaw; + pose_cov[XYZRPY_COV_IDX::Y_Y] = p0_cov_x * sin_yaw * sin_yaw + p0_cov_y * cos_yaw * cos_yaw; + pose_cov[XYZRPY_COV_IDX::Y_X] = pose_cov[XYZRPY_COV_IDX::X_Y]; } if (!object.kinematics.has_twist_covariance) { @@ -111,24 +110,24 @@ UnknownTracker::UnknownTracker( constexpr double p0_stddev_vy = autoware::universe_utils::kmph2mps(10); // [m/s] const double p0_cov_vx = std::pow(p0_stddev_vx, 2.0); const double p0_cov_vy = std::pow(p0_stddev_vy, 2.0); - twist_cov[utils::MSG_COV_IDX::X_X] = p0_cov_vx; - twist_cov[utils::MSG_COV_IDX::X_Y] = 0.0; - twist_cov[utils::MSG_COV_IDX::Y_X] = 0.0; - twist_cov[utils::MSG_COV_IDX::Y_Y] = p0_cov_vy; + twist_cov[XYZRPY_COV_IDX::X_X] = p0_cov_vx; + twist_cov[XYZRPY_COV_IDX::X_Y] = 0.0; + twist_cov[XYZRPY_COV_IDX::Y_X] = 0.0; + twist_cov[XYZRPY_COV_IDX::Y_Y] = p0_cov_vy; } // rotate twist covariance matrix, since it is in the vehicle coordinate system Eigen::MatrixXd twist_cov_rotate(2, 2); - twist_cov_rotate(0, 0) = twist_cov[utils::MSG_COV_IDX::X_X]; - twist_cov_rotate(0, 1) = twist_cov[utils::MSG_COV_IDX::X_Y]; - twist_cov_rotate(1, 0) = twist_cov[utils::MSG_COV_IDX::Y_X]; - twist_cov_rotate(1, 1) = twist_cov[utils::MSG_COV_IDX::Y_Y]; + twist_cov_rotate(0, 0) = twist_cov[XYZRPY_COV_IDX::X_X]; + twist_cov_rotate(0, 1) = twist_cov[XYZRPY_COV_IDX::X_Y]; + twist_cov_rotate(1, 0) = twist_cov[XYZRPY_COV_IDX::Y_X]; + twist_cov_rotate(1, 1) = twist_cov[XYZRPY_COV_IDX::Y_Y]; Eigen::MatrixXd R_yaw = Eigen::Rotation2Dd(-yaw).toRotationMatrix(); Eigen::MatrixXd twist_cov_rotated = R_yaw * twist_cov_rotate * R_yaw.transpose(); - twist_cov[utils::MSG_COV_IDX::X_X] = twist_cov_rotated(0, 0); - twist_cov[utils::MSG_COV_IDX::X_Y] = twist_cov_rotated(0, 1); - twist_cov[utils::MSG_COV_IDX::Y_X] = twist_cov_rotated(1, 0); - twist_cov[utils::MSG_COV_IDX::Y_Y] = twist_cov_rotated(1, 1); + twist_cov[XYZRPY_COV_IDX::X_X] = twist_cov_rotated(0, 0); + twist_cov[XYZRPY_COV_IDX::X_Y] = twist_cov_rotated(0, 1); + twist_cov[XYZRPY_COV_IDX::Y_X] = twist_cov_rotated(1, 0); + twist_cov[XYZRPY_COV_IDX::Y_Y] = twist_cov_rotated(1, 1); // initialize motion model motion_model_.initialize(time, x, y, pose_cov, vx, vy, twist_cov); @@ -148,6 +147,8 @@ autoware_perception_msgs::msg::DetectedObject UnknownTracker::getUpdatingObject( // UNCERTAINTY MODEL if (!object.kinematics.has_position_covariance) { + // fill covariance matrix + using autoware::universe_utils::xyzrpy_covariance_index::XYZRPY_COV_IDX; const double & r_cov_x = ekf_params_.r_cov_x; const double & r_cov_y = ekf_params_.r_cov_y; auto & pose_cov = updating_object.kinematics.pose_with_covariance.covariance; @@ -155,12 +156,12 @@ autoware_perception_msgs::msg::DetectedObject UnknownTracker::getUpdatingObject( const double cos_yaw = std::cos(pose_yaw); const double sin_yaw = std::sin(pose_yaw); const double sin_2yaw = std::sin(2.0f * pose_yaw); - pose_cov[utils::MSG_COV_IDX::X_X] = - r_cov_x * cos_yaw * cos_yaw + r_cov_y * sin_yaw * sin_yaw; // x - x - pose_cov[utils::MSG_COV_IDX::X_Y] = 0.5f * (r_cov_x - r_cov_y) * sin_2yaw; // x - y - pose_cov[utils::MSG_COV_IDX::Y_Y] = - r_cov_x * sin_yaw * sin_yaw + r_cov_y * cos_yaw * cos_yaw; // y - y - pose_cov[utils::MSG_COV_IDX::Y_X] = pose_cov[utils::MSG_COV_IDX::X_Y]; // y - x + pose_cov[XYZRPY_COV_IDX::X_X] = + r_cov_x * cos_yaw * cos_yaw + r_cov_y * sin_yaw * sin_yaw; // x - x + pose_cov[XYZRPY_COV_IDX::X_Y] = 0.5f * (r_cov_x - r_cov_y) * sin_2yaw; // x - y + pose_cov[XYZRPY_COV_IDX::Y_Y] = + r_cov_x * sin_yaw * sin_yaw + r_cov_y * cos_yaw * cos_yaw; // y - y + pose_cov[XYZRPY_COV_IDX::Y_X] = pose_cov[XYZRPY_COV_IDX::X_Y]; // y - x } return updating_object; } diff --git a/perception/multi_object_tracker/src/tracker/motion_model/bicycle_motion_model.cpp b/perception/multi_object_tracker/src/tracker/motion_model/bicycle_motion_model.cpp index f21855c2356ef..466108a2bef0e 100644 --- a/perception/multi_object_tracker/src/tracker/motion_model/bicycle_motion_model.cpp +++ b/perception/multi_object_tracker/src/tracker/motion_model/bicycle_motion_model.cpp @@ -15,25 +15,25 @@ // // Author: v1.0 Taekjin Lee // +#define EIGEN_MPL2_ONLY #include "multi_object_tracker/tracker/motion_model/bicycle_motion_model.hpp" +#include "autoware/universe_utils/math/normalization.hpp" +#include "autoware/universe_utils/math/unit_conversion.hpp" +#include "autoware/universe_utils/ros/msg_covariance.hpp" #include "multi_object_tracker/tracker/motion_model/motion_model_base.hpp" #include "multi_object_tracker/utils/utils.hpp" -#include -#include - -#define EIGEN_MPL2_ONLY #include #include // cspell: ignore CTRV // Bicycle CTRV motion model // CTRV : Constant Turn Rate and constant Velocity +using autoware::universe_utils::xyzrpy_covariance_index::XYZRPY_COV_IDX; -BicycleMotionModel::BicycleMotionModel() -: MotionModel(), logger_(rclcpp::get_logger("BicycleMotionModel")) +BicycleMotionModel::BicycleMotionModel() : logger_(rclcpp::get_logger("BicycleMotionModel")) { // Initialize motion parameters setDefaultParams(); @@ -44,11 +44,16 @@ void BicycleMotionModel::setDefaultParams() // set default motion parameters constexpr double q_stddev_acc_long = 9.8 * 0.35; // [m/(s*s)] uncertain longitudinal acceleration constexpr double q_stddev_acc_lat = 9.8 * 0.15; // [m/(s*s)] uncertain lateral acceleration - constexpr double q_stddev_yaw_rate_min = 1.5; // [deg/s] uncertain yaw change rate - constexpr double q_stddev_yaw_rate_max = 15.0; // [deg/s] uncertain yaw change rate - constexpr double q_stddev_slip_rate_min = 0.3; // [deg/s] uncertain slip angle change rate - constexpr double q_stddev_slip_rate_max = 10.0; // [deg/s] uncertain slip angle change rate - constexpr double q_max_slip_angle = 30.0; // [deg] max slip angle + constexpr double q_stddev_yaw_rate_min = + autoware::universe_utils::deg2rad(1.5); // [rad/s] uncertain yaw change rate + constexpr double q_stddev_yaw_rate_max = + autoware::universe_utils::deg2rad(15.0); // [rad/s] uncertain yaw change rate + constexpr double q_stddev_slip_rate_min = + autoware::universe_utils::deg2rad(0.3); // [rad/s] uncertain slip angle change rate + constexpr double q_stddev_slip_rate_max = + autoware::universe_utils::deg2rad(10.0); // [rad/s] uncertain slip angle change rate + constexpr double q_max_slip_angle = + autoware::universe_utils::deg2rad(30.0); // [rad] max slip angle // extended state parameters constexpr double lf_ratio = 0.3; // 30% front from the center constexpr double lf_min = 1.0; // minimum of 1.0m @@ -79,13 +84,13 @@ void BicycleMotionModel::setMotionParams( // set process noise covariance parameters motion_params_.q_stddev_acc_long = q_stddev_acc_long; motion_params_.q_stddev_acc_lat = q_stddev_acc_lat; - motion_params_.q_stddev_yaw_rate_min = autoware::universe_utils::deg2rad(q_stddev_yaw_rate_min); - motion_params_.q_stddev_yaw_rate_max = autoware::universe_utils::deg2rad(q_stddev_yaw_rate_max); - motion_params_.q_cov_slip_rate_min = - std::pow(autoware::universe_utils::deg2rad(q_stddev_slip_rate_min), 2.0); - motion_params_.q_cov_slip_rate_max = - std::pow(autoware::universe_utils::deg2rad(q_stddev_slip_rate_max), 2.0); - motion_params_.q_max_slip_angle = autoware::universe_utils::deg2rad(q_max_slip_angle); + motion_params_.q_cov_acc_long = q_stddev_acc_long * q_stddev_acc_long; + motion_params_.q_cov_acc_lat = q_stddev_acc_lat * q_stddev_acc_lat; + motion_params_.q_stddev_yaw_rate_min = q_stddev_yaw_rate_min; + motion_params_.q_stddev_yaw_rate_max = q_stddev_yaw_rate_max; + motion_params_.q_cov_slip_rate_min = q_stddev_slip_rate_min * q_stddev_slip_rate_min; + motion_params_.q_cov_slip_rate_max = q_stddev_slip_rate_max * q_stddev_slip_rate_max; + motion_params_.q_max_slip_angle = q_max_slip_angle; constexpr double minimum_wheel_pos = 0.01; // minimum of 0.01m if (lf_min < minimum_wheel_pos || lr_min < minimum_wheel_pos) { @@ -103,7 +108,7 @@ void BicycleMotionModel::setMotionLimits(const double & max_vel, const double & { // set motion limitations motion_params_.max_vel = max_vel; - motion_params_.max_slip = autoware::universe_utils::deg2rad(max_slip); + motion_params_.max_slip = max_slip; } bool BicycleMotionModel::initialize( @@ -117,9 +122,9 @@ bool BicycleMotionModel::initialize( // initialize covariance matrix P Eigen::MatrixXd P = Eigen::MatrixXd::Zero(DIM, DIM); - P(IDX::X, IDX::X) = pose_cov[utils::MSG_COV_IDX::X_X]; - P(IDX::Y, IDX::Y) = pose_cov[utils::MSG_COV_IDX::Y_Y]; - P(IDX::YAW, IDX::YAW) = pose_cov[utils::MSG_COV_IDX::YAW_YAW]; + P(IDX::X, IDX::X) = pose_cov[XYZRPY_COV_IDX::X_X]; + P(IDX::Y, IDX::Y) = pose_cov[XYZRPY_COV_IDX::Y_Y]; + P(IDX::YAW, IDX::YAW) = pose_cov[XYZRPY_COV_IDX::YAW_YAW]; P(IDX::VEL, IDX::VEL) = vel_cov; P(IDX::SLIP, IDX::SLIP) = slip_cov; @@ -147,10 +152,10 @@ bool BicycleMotionModel::updateStatePose( C(1, IDX::Y) = 1.0; Eigen::MatrixXd R = Eigen::MatrixXd::Zero(DIM_Y, DIM_Y); - R(0, 0) = pose_cov[utils::MSG_COV_IDX::X_X]; - R(0, 1) = pose_cov[utils::MSG_COV_IDX::X_Y]; - R(1, 0) = pose_cov[utils::MSG_COV_IDX::Y_X]; - R(1, 1) = pose_cov[utils::MSG_COV_IDX::Y_Y]; + R(0, 0) = pose_cov[XYZRPY_COV_IDX::X_X]; + R(0, 1) = pose_cov[XYZRPY_COV_IDX::X_Y]; + R(1, 0) = pose_cov[XYZRPY_COV_IDX::Y_X]; + R(1, 1) = pose_cov[XYZRPY_COV_IDX::Y_Y]; return ekf_.update(Y, C, R); } @@ -186,15 +191,15 @@ bool BicycleMotionModel::updateStatePoseHead( C(2, IDX::YAW) = 1.0; Eigen::MatrixXd R = Eigen::MatrixXd::Zero(DIM_Y, DIM_Y); - R(0, 0) = pose_cov[utils::MSG_COV_IDX::X_X]; - R(0, 1) = pose_cov[utils::MSG_COV_IDX::X_Y]; - R(1, 0) = pose_cov[utils::MSG_COV_IDX::Y_X]; - R(1, 1) = pose_cov[utils::MSG_COV_IDX::Y_Y]; - R(0, 2) = pose_cov[utils::MSG_COV_IDX::X_YAW]; - R(1, 2) = pose_cov[utils::MSG_COV_IDX::Y_YAW]; - R(2, 0) = pose_cov[utils::MSG_COV_IDX::YAW_X]; - R(2, 1) = pose_cov[utils::MSG_COV_IDX::YAW_Y]; - R(2, 2) = pose_cov[utils::MSG_COV_IDX::YAW_YAW]; + R(0, 0) = pose_cov[XYZRPY_COV_IDX::X_X]; + R(0, 1) = pose_cov[XYZRPY_COV_IDX::X_Y]; + R(1, 0) = pose_cov[XYZRPY_COV_IDX::Y_X]; + R(1, 1) = pose_cov[XYZRPY_COV_IDX::Y_Y]; + R(0, 2) = pose_cov[XYZRPY_COV_IDX::X_YAW]; + R(1, 2) = pose_cov[XYZRPY_COV_IDX::Y_YAW]; + R(2, 0) = pose_cov[XYZRPY_COV_IDX::YAW_X]; + R(2, 1) = pose_cov[XYZRPY_COV_IDX::YAW_Y]; + R(2, 2) = pose_cov[XYZRPY_COV_IDX::YAW_YAW]; return ekf_.update(Y, C, R); } @@ -232,16 +237,16 @@ bool BicycleMotionModel::updateStatePoseHeadVel( C(3, IDX::VEL) = 1.0; Eigen::MatrixXd R = Eigen::MatrixXd::Zero(DIM_Y, DIM_Y); - R(0, 0) = pose_cov[utils::MSG_COV_IDX::X_X]; - R(0, 1) = pose_cov[utils::MSG_COV_IDX::X_Y]; - R(1, 0) = pose_cov[utils::MSG_COV_IDX::Y_X]; - R(1, 1) = pose_cov[utils::MSG_COV_IDX::Y_Y]; - R(0, 2) = pose_cov[utils::MSG_COV_IDX::X_YAW]; - R(1, 2) = pose_cov[utils::MSG_COV_IDX::Y_YAW]; - R(2, 0) = pose_cov[utils::MSG_COV_IDX::YAW_X]; - R(2, 1) = pose_cov[utils::MSG_COV_IDX::YAW_Y]; - R(2, 2) = pose_cov[utils::MSG_COV_IDX::YAW_YAW]; - R(3, 3) = twist_cov[utils::MSG_COV_IDX::X_X]; + R(0, 0) = pose_cov[XYZRPY_COV_IDX::X_X]; + R(0, 1) = pose_cov[XYZRPY_COV_IDX::X_Y]; + R(1, 0) = pose_cov[XYZRPY_COV_IDX::Y_X]; + R(1, 1) = pose_cov[XYZRPY_COV_IDX::Y_Y]; + R(0, 2) = pose_cov[XYZRPY_COV_IDX::X_YAW]; + R(1, 2) = pose_cov[XYZRPY_COV_IDX::Y_YAW]; + R(2, 0) = pose_cov[XYZRPY_COV_IDX::YAW_X]; + R(2, 1) = pose_cov[XYZRPY_COV_IDX::YAW_Y]; + R(2, 2) = pose_cov[XYZRPY_COV_IDX::YAW_YAW]; + R(3, 3) = twist_cov[XYZRPY_COV_IDX::X_X]; return ekf_.update(Y, C, R); } @@ -354,10 +359,8 @@ bool BicycleMotionModel::predictStateStep(const double dt, KalmanFilter & ekf) c A(IDX::YAW, IDX::SLIP) = vel / lr_ * cos_slip * dt; // Process noise covariance Q - double q_stddev_yaw_rate{0.0}; - if (vel <= 0.01) { - q_stddev_yaw_rate = motion_params_.q_stddev_yaw_rate_min; - } else { + double q_stddev_yaw_rate = motion_params_.q_stddev_yaw_rate_min; + if (vel > 0.01) { /* uncertainty of the yaw rate is limited by the following: * - centripetal acceleration a_lat : d(yaw)/dt = w = a_lat/v * - or maximum slip angle slip_max : w = v*sin(slip_max)/l_r @@ -365,8 +368,9 @@ bool BicycleMotionModel::predictStateStep(const double dt, KalmanFilter & ekf) c q_stddev_yaw_rate = std::min( motion_params_.q_stddev_acc_lat / vel, vel * std::sin(motion_params_.q_max_slip_angle) / lr_); // [rad/s] - q_stddev_yaw_rate = std::min(q_stddev_yaw_rate, motion_params_.q_stddev_yaw_rate_max); - q_stddev_yaw_rate = std::max(q_stddev_yaw_rate, motion_params_.q_stddev_yaw_rate_min); + q_stddev_yaw_rate = std::clamp( + q_stddev_yaw_rate, motion_params_.q_stddev_yaw_rate_min, + motion_params_.q_stddev_yaw_rate_max); } double q_cov_slip_rate{0.0}; if (vel <= 0.01) { @@ -384,11 +388,13 @@ bool BicycleMotionModel::predictStateStep(const double dt, KalmanFilter & ekf) c q_cov_slip_rate = std::min(q_cov_slip_rate, motion_params_.q_cov_slip_rate_max); q_cov_slip_rate = std::max(q_cov_slip_rate, motion_params_.q_cov_slip_rate_min); } - const double q_cov_x = std::pow(0.5 * motion_params_.q_stddev_acc_long * dt * dt, 2); - const double q_cov_y = std::pow(0.5 * motion_params_.q_stddev_acc_lat * dt * dt, 2); - const double q_cov_yaw = std::pow(q_stddev_yaw_rate * dt, 2); - const double q_cov_vel = std::pow(motion_params_.q_stddev_acc_long * dt, 2); - const double q_cov_slip = q_cov_slip_rate * dt * dt; + const double dt2 = dt * dt; + const double dt4 = dt2 * dt2; + const double q_cov_x = 0.25 * motion_params_.q_cov_acc_long * dt4; + const double q_cov_y = 0.25 * motion_params_.q_cov_acc_lat * dt4; + const double q_cov_yaw = q_stddev_yaw_rate * q_stddev_yaw_rate * dt2; + const double q_cov_vel = motion_params_.q_cov_acc_long * dt2; + const double q_cov_slip = q_cov_slip_rate * dt2; Eigen::MatrixXd Q = Eigen::MatrixXd::Zero(DIM, DIM); // Rotate the covariance matrix according to the vehicle yaw @@ -445,14 +451,14 @@ bool BicycleMotionModel::getPredictedState( constexpr double zz_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative constexpr double rr_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative constexpr double pp_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative - pose_cov[utils::MSG_COV_IDX::X_X] = P(IDX::X, IDX::X); - pose_cov[utils::MSG_COV_IDX::X_Y] = P(IDX::X, IDX::Y); - pose_cov[utils::MSG_COV_IDX::Y_X] = P(IDX::Y, IDX::X); - pose_cov[utils::MSG_COV_IDX::Y_Y] = P(IDX::Y, IDX::Y); - pose_cov[utils::MSG_COV_IDX::YAW_YAW] = P(IDX::YAW, IDX::YAW); - pose_cov[utils::MSG_COV_IDX::Z_Z] = zz_cov; - pose_cov[utils::MSG_COV_IDX::ROLL_ROLL] = rr_cov; - pose_cov[utils::MSG_COV_IDX::PITCH_PITCH] = pp_cov; + pose_cov[XYZRPY_COV_IDX::X_X] = P(IDX::X, IDX::X); + pose_cov[XYZRPY_COV_IDX::X_Y] = P(IDX::X, IDX::Y); + pose_cov[XYZRPY_COV_IDX::Y_X] = P(IDX::Y, IDX::X); + pose_cov[XYZRPY_COV_IDX::Y_Y] = P(IDX::Y, IDX::Y); + pose_cov[XYZRPY_COV_IDX::YAW_YAW] = P(IDX::YAW, IDX::YAW); + pose_cov[XYZRPY_COV_IDX::Z_Z] = zz_cov; + pose_cov[XYZRPY_COV_IDX::ROLL_ROLL] = rr_cov; + pose_cov[XYZRPY_COV_IDX::PITCH_PITCH] = pp_cov; // set twist covariance Eigen::MatrixXd cov_jacob(3, 2); @@ -466,18 +472,18 @@ bool BicycleMotionModel::getPredictedState( constexpr double vz_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative constexpr double wx_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative constexpr double wy_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative - twist_cov[utils::MSG_COV_IDX::X_X] = twist_cov_mat(0, 0); - twist_cov[utils::MSG_COV_IDX::X_Y] = twist_cov_mat(0, 1); - twist_cov[utils::MSG_COV_IDX::X_YAW] = twist_cov_mat(0, 2); - twist_cov[utils::MSG_COV_IDX::Y_X] = twist_cov_mat(1, 0); - twist_cov[utils::MSG_COV_IDX::Y_Y] = twist_cov_mat(1, 1); - twist_cov[utils::MSG_COV_IDX::Y_YAW] = twist_cov_mat(1, 2); - twist_cov[utils::MSG_COV_IDX::YAW_X] = twist_cov_mat(2, 0); - twist_cov[utils::MSG_COV_IDX::YAW_Y] = twist_cov_mat(2, 1); - twist_cov[utils::MSG_COV_IDX::YAW_YAW] = twist_cov_mat(2, 2); - twist_cov[utils::MSG_COV_IDX::Z_Z] = vz_cov; - twist_cov[utils::MSG_COV_IDX::ROLL_ROLL] = wx_cov; - twist_cov[utils::MSG_COV_IDX::PITCH_PITCH] = wy_cov; + twist_cov[XYZRPY_COV_IDX::X_X] = twist_cov_mat(0, 0); + twist_cov[XYZRPY_COV_IDX::X_Y] = twist_cov_mat(0, 1); + twist_cov[XYZRPY_COV_IDX::X_YAW] = twist_cov_mat(0, 2); + twist_cov[XYZRPY_COV_IDX::Y_X] = twist_cov_mat(1, 0); + twist_cov[XYZRPY_COV_IDX::Y_Y] = twist_cov_mat(1, 1); + twist_cov[XYZRPY_COV_IDX::Y_YAW] = twist_cov_mat(1, 2); + twist_cov[XYZRPY_COV_IDX::YAW_X] = twist_cov_mat(2, 0); + twist_cov[XYZRPY_COV_IDX::YAW_Y] = twist_cov_mat(2, 1); + twist_cov[XYZRPY_COV_IDX::YAW_YAW] = twist_cov_mat(2, 2); + twist_cov[XYZRPY_COV_IDX::Z_Z] = vz_cov; + twist_cov[XYZRPY_COV_IDX::ROLL_ROLL] = wx_cov; + twist_cov[XYZRPY_COV_IDX::PITCH_PITCH] = wy_cov; return true; } diff --git a/perception/multi_object_tracker/src/tracker/motion_model/ctrv_motion_model.cpp b/perception/multi_object_tracker/src/tracker/motion_model/ctrv_motion_model.cpp index 79ce6e5dbb61c..b19af1d26d162 100644 --- a/perception/multi_object_tracker/src/tracker/motion_model/ctrv_motion_model.cpp +++ b/perception/multi_object_tracker/src/tracker/motion_model/ctrv_motion_model.cpp @@ -15,23 +15,24 @@ // // Author: v1.0 Taekjin Lee // +#define EIGEN_MPL2_ONLY #include "multi_object_tracker/tracker/motion_model/ctrv_motion_model.hpp" +#include "autoware/universe_utils/math/normalization.hpp" +#include "autoware/universe_utils/math/unit_conversion.hpp" +#include "autoware/universe_utils/ros/msg_covariance.hpp" #include "multi_object_tracker/tracker/motion_model/motion_model_base.hpp" #include "multi_object_tracker/utils/utils.hpp" -#include -#include - -#define EIGEN_MPL2_ONLY #include #include // cspell: ignore CTRV // Constant Turn Rate and constant Velocity (CTRV) motion model +using autoware::universe_utils::xyzrpy_covariance_index::XYZRPY_COV_IDX; -CTRVMotionModel::CTRVMotionModel() : MotionModel(), logger_(rclcpp::get_logger("CTRVMotionModel")) +CTRVMotionModel::CTRVMotionModel() : logger_(rclcpp::get_logger("CTRVMotionModel")) { // Initialize motion parameters setDefaultParams(); @@ -63,11 +64,11 @@ void CTRVMotionModel::setMotionParams( const double & q_stddev_vel, const double & q_stddev_wz) { // set process noise covariance parameters - motion_params_.q_cov_x = std::pow(q_stddev_x, 2.0); - motion_params_.q_cov_y = std::pow(q_stddev_y, 2.0); - motion_params_.q_cov_yaw = std::pow(q_stddev_yaw, 2.0); - motion_params_.q_cov_vel = std::pow(q_stddev_vel, 2.0); - motion_params_.q_cov_wz = std::pow(q_stddev_wz, 2.0); + motion_params_.q_cov_x = q_stddev_x * q_stddev_x; + motion_params_.q_cov_y = q_stddev_y * q_stddev_y; + motion_params_.q_cov_yaw = q_stddev_yaw * q_stddev_yaw; + motion_params_.q_cov_vel = q_stddev_vel * q_stddev_vel; + motion_params_.q_cov_wz = q_stddev_wz * q_stddev_wz; } void CTRVMotionModel::setMotionLimits(const double & max_vel, const double & max_wz) @@ -88,9 +89,9 @@ bool CTRVMotionModel::initialize( // initialize covariance matrix P Eigen::MatrixXd P = Eigen::MatrixXd::Zero(DIM, DIM); - P(IDX::X, IDX::X) = pose_cov[utils::MSG_COV_IDX::X_X]; - P(IDX::Y, IDX::Y) = pose_cov[utils::MSG_COV_IDX::Y_Y]; - P(IDX::YAW, IDX::YAW) = pose_cov[utils::MSG_COV_IDX::YAW_YAW]; + P(IDX::X, IDX::X) = pose_cov[XYZRPY_COV_IDX::X_X]; + P(IDX::Y, IDX::Y) = pose_cov[XYZRPY_COV_IDX::Y_Y]; + P(IDX::YAW, IDX::YAW) = pose_cov[XYZRPY_COV_IDX::YAW_YAW]; P(IDX::VEL, IDX::VEL) = vel_cov; P(IDX::WZ, IDX::WZ) = wz_cov; @@ -115,10 +116,10 @@ bool CTRVMotionModel::updateStatePose( C(1, IDX::Y) = 1.0; Eigen::MatrixXd R = Eigen::MatrixXd::Zero(DIM_Y, DIM_Y); - R(0, 0) = pose_cov[utils::MSG_COV_IDX::X_X]; - R(0, 1) = pose_cov[utils::MSG_COV_IDX::X_Y]; - R(1, 0) = pose_cov[utils::MSG_COV_IDX::Y_X]; - R(1, 1) = pose_cov[utils::MSG_COV_IDX::Y_Y]; + R(0, 0) = pose_cov[XYZRPY_COV_IDX::X_X]; + R(0, 1) = pose_cov[XYZRPY_COV_IDX::X_Y]; + R(1, 0) = pose_cov[XYZRPY_COV_IDX::Y_X]; + R(1, 1) = pose_cov[XYZRPY_COV_IDX::Y_Y]; return ekf_.update(Y, C, R); } @@ -154,15 +155,15 @@ bool CTRVMotionModel::updateStatePoseHead( C(2, IDX::YAW) = 1.0; Eigen::MatrixXd R = Eigen::MatrixXd::Zero(DIM_Y, DIM_Y); - R(0, 0) = pose_cov[utils::MSG_COV_IDX::X_X]; - R(0, 1) = pose_cov[utils::MSG_COV_IDX::X_Y]; - R(1, 0) = pose_cov[utils::MSG_COV_IDX::Y_X]; - R(1, 1) = pose_cov[utils::MSG_COV_IDX::Y_Y]; - R(0, 2) = pose_cov[utils::MSG_COV_IDX::X_YAW]; - R(1, 2) = pose_cov[utils::MSG_COV_IDX::Y_YAW]; - R(2, 0) = pose_cov[utils::MSG_COV_IDX::YAW_X]; - R(2, 1) = pose_cov[utils::MSG_COV_IDX::YAW_Y]; - R(2, 2) = pose_cov[utils::MSG_COV_IDX::YAW_YAW]; + R(0, 0) = pose_cov[XYZRPY_COV_IDX::X_X]; + R(0, 1) = pose_cov[XYZRPY_COV_IDX::X_Y]; + R(1, 0) = pose_cov[XYZRPY_COV_IDX::Y_X]; + R(1, 1) = pose_cov[XYZRPY_COV_IDX::Y_Y]; + R(0, 2) = pose_cov[XYZRPY_COV_IDX::X_YAW]; + R(1, 2) = pose_cov[XYZRPY_COV_IDX::Y_YAW]; + R(2, 0) = pose_cov[XYZRPY_COV_IDX::YAW_X]; + R(2, 1) = pose_cov[XYZRPY_COV_IDX::YAW_Y]; + R(2, 2) = pose_cov[XYZRPY_COV_IDX::YAW_YAW]; return ekf_.update(Y, C, R); } @@ -200,16 +201,16 @@ bool CTRVMotionModel::updateStatePoseHeadVel( C(3, IDX::VEL) = 1.0; Eigen::MatrixXd R = Eigen::MatrixXd::Zero(DIM_Y, DIM_Y); - R(0, 0) = pose_cov[utils::MSG_COV_IDX::X_X]; - R(0, 1) = pose_cov[utils::MSG_COV_IDX::X_Y]; - R(1, 0) = pose_cov[utils::MSG_COV_IDX::Y_X]; - R(1, 1) = pose_cov[utils::MSG_COV_IDX::Y_Y]; - R(0, 2) = pose_cov[utils::MSG_COV_IDX::X_YAW]; - R(1, 2) = pose_cov[utils::MSG_COV_IDX::Y_YAW]; - R(2, 0) = pose_cov[utils::MSG_COV_IDX::YAW_X]; - R(2, 1) = pose_cov[utils::MSG_COV_IDX::YAW_Y]; - R(2, 2) = pose_cov[utils::MSG_COV_IDX::YAW_YAW]; - R(3, 3) = twist_cov[utils::MSG_COV_IDX::X_X]; + R(0, 0) = pose_cov[XYZRPY_COV_IDX::X_X]; + R(0, 1) = pose_cov[XYZRPY_COV_IDX::X_Y]; + R(1, 0) = pose_cov[XYZRPY_COV_IDX::Y_X]; + R(1, 1) = pose_cov[XYZRPY_COV_IDX::Y_Y]; + R(0, 2) = pose_cov[XYZRPY_COV_IDX::X_YAW]; + R(1, 2) = pose_cov[XYZRPY_COV_IDX::Y_YAW]; + R(2, 0) = pose_cov[XYZRPY_COV_IDX::YAW_X]; + R(2, 1) = pose_cov[XYZRPY_COV_IDX::YAW_Y]; + R(2, 2) = pose_cov[XYZRPY_COV_IDX::YAW_YAW]; + R(3, 3) = twist_cov[XYZRPY_COV_IDX::X_X]; return ekf_.update(Y, C, R); } @@ -295,11 +296,12 @@ bool CTRVMotionModel::predictStateStep(const double dt, KalmanFilter & ekf) cons A(IDX::YAW, IDX::WZ) = dt; // Process noise covariance Q - const double q_cov_x = std::pow(0.5 * motion_params_.q_cov_x * dt, 2); - const double q_cov_y = std::pow(0.5 * motion_params_.q_cov_y * dt, 2); - const double q_cov_yaw = std::pow(motion_params_.q_cov_yaw * dt, 2); - const double q_cov_vel = std::pow(motion_params_.q_cov_vel * dt, 2); - const double q_cov_wz = std::pow(motion_params_.q_cov_wz * dt, 2); + const double dt2 = dt * dt; + const double q_cov_x = motion_params_.q_cov_x * dt2; + const double q_cov_y = motion_params_.q_cov_y * dt2; + const double q_cov_yaw = motion_params_.q_cov_yaw * dt2; + const double q_cov_vel = motion_params_.q_cov_vel * dt2; + const double q_cov_wz = motion_params_.q_cov_wz * dt2; Eigen::MatrixXd Q = Eigen::MatrixXd::Zero(DIM, DIM); // Rotate the covariance matrix according to the vehicle yaw // because q_cov_x and y are in the vehicle coordinate system. @@ -355,32 +357,32 @@ bool CTRVMotionModel::getPredictedState( constexpr double zz_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative constexpr double rr_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative constexpr double pp_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative - pose_cov[utils::MSG_COV_IDX::X_X] = P(IDX::X, IDX::X); - pose_cov[utils::MSG_COV_IDX::X_Y] = P(IDX::X, IDX::Y); - pose_cov[utils::MSG_COV_IDX::Y_X] = P(IDX::Y, IDX::X); - pose_cov[utils::MSG_COV_IDX::Y_Y] = P(IDX::Y, IDX::Y); - pose_cov[utils::MSG_COV_IDX::YAW_YAW] = P(IDX::YAW, IDX::YAW); - pose_cov[utils::MSG_COV_IDX::Z_Z] = zz_cov; - pose_cov[utils::MSG_COV_IDX::ROLL_ROLL] = rr_cov; - pose_cov[utils::MSG_COV_IDX::PITCH_PITCH] = pp_cov; + pose_cov[XYZRPY_COV_IDX::X_X] = P(IDX::X, IDX::X); + pose_cov[XYZRPY_COV_IDX::X_Y] = P(IDX::X, IDX::Y); + pose_cov[XYZRPY_COV_IDX::Y_X] = P(IDX::Y, IDX::X); + pose_cov[XYZRPY_COV_IDX::Y_Y] = P(IDX::Y, IDX::Y); + pose_cov[XYZRPY_COV_IDX::YAW_YAW] = P(IDX::YAW, IDX::YAW); + pose_cov[XYZRPY_COV_IDX::Z_Z] = zz_cov; + pose_cov[XYZRPY_COV_IDX::ROLL_ROLL] = rr_cov; + pose_cov[XYZRPY_COV_IDX::PITCH_PITCH] = pp_cov; // set twist covariance constexpr double vy_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative constexpr double vz_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative constexpr double wx_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative constexpr double wy_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative - twist_cov[utils::MSG_COV_IDX::X_X] = P(IDX::VEL, IDX::VEL); - twist_cov[utils::MSG_COV_IDX::X_Y] = 0.0; - twist_cov[utils::MSG_COV_IDX::X_YAW] = P(IDX::VEL, IDX::WZ); - twist_cov[utils::MSG_COV_IDX::Y_X] = 0.0; - twist_cov[utils::MSG_COV_IDX::Y_Y] = vy_cov; - twist_cov[utils::MSG_COV_IDX::Y_YAW] = 0.0; - twist_cov[utils::MSG_COV_IDX::YAW_X] = P(IDX::WZ, IDX::VEL); - twist_cov[utils::MSG_COV_IDX::YAW_Y] = 0.0; - twist_cov[utils::MSG_COV_IDX::YAW_YAW] = P(IDX::WZ, IDX::WZ); - twist_cov[utils::MSG_COV_IDX::Z_Z] = vz_cov; - twist_cov[utils::MSG_COV_IDX::ROLL_ROLL] = wx_cov; - twist_cov[utils::MSG_COV_IDX::PITCH_PITCH] = wy_cov; + twist_cov[XYZRPY_COV_IDX::X_X] = P(IDX::VEL, IDX::VEL); + twist_cov[XYZRPY_COV_IDX::X_Y] = 0.0; + twist_cov[XYZRPY_COV_IDX::X_YAW] = P(IDX::VEL, IDX::WZ); + twist_cov[XYZRPY_COV_IDX::Y_X] = 0.0; + twist_cov[XYZRPY_COV_IDX::Y_Y] = vy_cov; + twist_cov[XYZRPY_COV_IDX::Y_YAW] = 0.0; + twist_cov[XYZRPY_COV_IDX::YAW_X] = P(IDX::WZ, IDX::VEL); + twist_cov[XYZRPY_COV_IDX::YAW_Y] = 0.0; + twist_cov[XYZRPY_COV_IDX::YAW_YAW] = P(IDX::WZ, IDX::WZ); + twist_cov[XYZRPY_COV_IDX::Z_Z] = vz_cov; + twist_cov[XYZRPY_COV_IDX::ROLL_ROLL] = wx_cov; + twist_cov[XYZRPY_COV_IDX::PITCH_PITCH] = wy_cov; return true; } diff --git a/perception/multi_object_tracker/src/tracker/motion_model/cv_motion_model.cpp b/perception/multi_object_tracker/src/tracker/motion_model/cv_motion_model.cpp index 8b3e9014f52d7..231ba73796ed9 100644 --- a/perception/multi_object_tracker/src/tracker/motion_model/cv_motion_model.cpp +++ b/perception/multi_object_tracker/src/tracker/motion_model/cv_motion_model.cpp @@ -15,25 +15,26 @@ // // Author: v1.0 Taekjin Lee // +#define EIGEN_MPL2_ONLY #include "multi_object_tracker/tracker/motion_model/cv_motion_model.hpp" +#include "autoware/universe_utils/math/normalization.hpp" +#include "autoware/universe_utils/math/unit_conversion.hpp" +#include "autoware/universe_utils/ros/msg_covariance.hpp" #include "multi_object_tracker/tracker/motion_model/motion_model_base.hpp" #include "multi_object_tracker/utils/utils.hpp" -#include -#include - -#include - -#define EIGEN_MPL2_ONLY #include #include +#include + // cspell: ignore CV // Constant Velocity (CV) motion model +using autoware::universe_utils::xyzrpy_covariance_index::XYZRPY_COV_IDX; -CVMotionModel::CVMotionModel() : MotionModel(), logger_(rclcpp::get_logger("CVMotionModel")) +CVMotionModel::CVMotionModel() : logger_(rclcpp::get_logger("CVMotionModel")) { // Initialize motion parameters setDefaultParams(); @@ -63,10 +64,10 @@ void CVMotionModel::setMotionParams( const double & q_stddev_vy) { // set process noise covariance parameters - motion_params_.q_cov_x = std::pow(q_stddev_x, 2.0); - motion_params_.q_cov_y = std::pow(q_stddev_y, 2.0); - motion_params_.q_cov_vx = std::pow(q_stddev_vx, 2.0); - motion_params_.q_cov_vy = std::pow(q_stddev_vy, 2.0); + motion_params_.q_cov_x = q_stddev_x * q_stddev_x; + motion_params_.q_cov_y = q_stddev_y * q_stddev_y; + motion_params_.q_cov_vx = q_stddev_vx * q_stddev_vx; + motion_params_.q_cov_vy = q_stddev_vy * q_stddev_vy; } void CVMotionModel::setMotionLimits(const double & max_vx, const double & max_vy) @@ -87,10 +88,10 @@ bool CVMotionModel::initialize( // initialize covariance matrix P Eigen::MatrixXd P = Eigen::MatrixXd::Zero(DIM, DIM); - P(IDX::X, IDX::X) = pose_cov[utils::MSG_COV_IDX::X_X]; - P(IDX::Y, IDX::Y) = pose_cov[utils::MSG_COV_IDX::Y_Y]; - P(IDX::VX, IDX::VX) = twist_cov[utils::MSG_COV_IDX::X_X]; - P(IDX::VY, IDX::VY) = twist_cov[utils::MSG_COV_IDX::Y_Y]; + P(IDX::X, IDX::X) = pose_cov[XYZRPY_COV_IDX::X_X]; + P(IDX::Y, IDX::Y) = pose_cov[XYZRPY_COV_IDX::Y_Y]; + P(IDX::VX, IDX::VX) = twist_cov[XYZRPY_COV_IDX::X_X]; + P(IDX::VY, IDX::VY) = twist_cov[XYZRPY_COV_IDX::Y_Y]; return MotionModel::initialize(time, X, P); } @@ -113,10 +114,10 @@ bool CVMotionModel::updateStatePose( C(1, IDX::Y) = 1.0; Eigen::MatrixXd R = Eigen::MatrixXd::Zero(DIM_Y, DIM_Y); - R(0, 0) = pose_cov[utils::MSG_COV_IDX::X_X]; - R(0, 1) = pose_cov[utils::MSG_COV_IDX::X_Y]; - R(1, 0) = pose_cov[utils::MSG_COV_IDX::Y_X]; - R(1, 1) = pose_cov[utils::MSG_COV_IDX::Y_Y]; + R(0, 0) = pose_cov[XYZRPY_COV_IDX::X_X]; + R(0, 1) = pose_cov[XYZRPY_COV_IDX::X_Y]; + R(1, 0) = pose_cov[XYZRPY_COV_IDX::Y_X]; + R(1, 1) = pose_cov[XYZRPY_COV_IDX::Y_Y]; return ekf_.update(Y, C, R); } @@ -142,14 +143,14 @@ bool CVMotionModel::updateStatePoseVel( C(3, IDX::VY) = 1.0; Eigen::MatrixXd R = Eigen::MatrixXd::Zero(DIM_Y, DIM_Y); - R(0, 0) = pose_cov[utils::MSG_COV_IDX::X_X]; - R(0, 1) = pose_cov[utils::MSG_COV_IDX::X_Y]; - R(1, 0) = pose_cov[utils::MSG_COV_IDX::Y_X]; - R(1, 1) = pose_cov[utils::MSG_COV_IDX::Y_Y]; - R(2, 2) = twist_cov[utils::MSG_COV_IDX::X_X]; - R(2, 3) = twist_cov[utils::MSG_COV_IDX::X_Y]; - R(3, 2) = twist_cov[utils::MSG_COV_IDX::Y_X]; - R(3, 3) = twist_cov[utils::MSG_COV_IDX::Y_Y]; + R(0, 0) = pose_cov[XYZRPY_COV_IDX::X_X]; + R(0, 1) = pose_cov[XYZRPY_COV_IDX::X_Y]; + R(1, 0) = pose_cov[XYZRPY_COV_IDX::Y_X]; + R(1, 1) = pose_cov[XYZRPY_COV_IDX::Y_Y]; + R(2, 2) = twist_cov[XYZRPY_COV_IDX::X_X]; + R(2, 3) = twist_cov[XYZRPY_COV_IDX::X_Y]; + R(3, 2) = twist_cov[XYZRPY_COV_IDX::Y_X]; + R(3, 3) = twist_cov[XYZRPY_COV_IDX::Y_Y]; return ekf_.update(Y, C, R); } @@ -272,14 +273,14 @@ bool CVMotionModel::getPredictedState( constexpr double rr_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative constexpr double pp_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative constexpr double yaw_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative - pose_cov[utils::MSG_COV_IDX::X_X] = P(IDX::X, IDX::X); - pose_cov[utils::MSG_COV_IDX::X_Y] = P(IDX::X, IDX::Y); - pose_cov[utils::MSG_COV_IDX::Y_X] = P(IDX::Y, IDX::X); - pose_cov[utils::MSG_COV_IDX::Y_Y] = P(IDX::Y, IDX::Y); - pose_cov[utils::MSG_COV_IDX::Z_Z] = zz_cov; - pose_cov[utils::MSG_COV_IDX::ROLL_ROLL] = rr_cov; - pose_cov[utils::MSG_COV_IDX::PITCH_PITCH] = pp_cov; - pose_cov[utils::MSG_COV_IDX::YAW_YAW] = yaw_cov; + pose_cov[XYZRPY_COV_IDX::X_X] = P(IDX::X, IDX::X); + pose_cov[XYZRPY_COV_IDX::X_Y] = P(IDX::X, IDX::Y); + pose_cov[XYZRPY_COV_IDX::Y_X] = P(IDX::Y, IDX::X); + pose_cov[XYZRPY_COV_IDX::Y_Y] = P(IDX::Y, IDX::Y); + pose_cov[XYZRPY_COV_IDX::Z_Z] = zz_cov; + pose_cov[XYZRPY_COV_IDX::ROLL_ROLL] = rr_cov; + pose_cov[XYZRPY_COV_IDX::PITCH_PITCH] = pp_cov; + pose_cov[XYZRPY_COV_IDX::YAW_YAW] = yaw_cov; // set twist covariance constexpr double vz_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative @@ -294,14 +295,14 @@ bool CVMotionModel::getPredictedState( twist_cov_rotate(1, 1) = P(IDX::VY, IDX::VY); Eigen::MatrixXd R_yaw = Eigen::Rotation2Dd(-yaw).toRotationMatrix(); Eigen::MatrixXd twist_cov_rotated = R_yaw * twist_cov_rotate * R_yaw.transpose(); - twist_cov[utils::MSG_COV_IDX::X_X] = twist_cov_rotated(0, 0); - twist_cov[utils::MSG_COV_IDX::X_Y] = twist_cov_rotated(0, 1); - twist_cov[utils::MSG_COV_IDX::Y_X] = twist_cov_rotated(1, 0); - twist_cov[utils::MSG_COV_IDX::Y_Y] = twist_cov_rotated(1, 1); - twist_cov[utils::MSG_COV_IDX::Z_Z] = vz_cov; - twist_cov[utils::MSG_COV_IDX::ROLL_ROLL] = wx_cov; - twist_cov[utils::MSG_COV_IDX::PITCH_PITCH] = wy_cov; - twist_cov[utils::MSG_COV_IDX::YAW_YAW] = wz_cov; + twist_cov[XYZRPY_COV_IDX::X_X] = twist_cov_rotated(0, 0); + twist_cov[XYZRPY_COV_IDX::X_Y] = twist_cov_rotated(0, 1); + twist_cov[XYZRPY_COV_IDX::Y_X] = twist_cov_rotated(1, 0); + twist_cov[XYZRPY_COV_IDX::Y_Y] = twist_cov_rotated(1, 1); + twist_cov[XYZRPY_COV_IDX::Z_Z] = vz_cov; + twist_cov[XYZRPY_COV_IDX::ROLL_ROLL] = wx_cov; + twist_cov[XYZRPY_COV_IDX::PITCH_PITCH] = wy_cov; + twist_cov[XYZRPY_COV_IDX::YAW_YAW] = wz_cov; return true; } From 480d97a4db8748154bb0f379a08d971568d783e7 Mon Sep 17 00:00:00 2001 From: Zulfaqar Azmi <93502286+zulfaqar-azmi-t4@users.noreply.github.com> Date: Mon, 24 Jun 2024 09:57:02 +0900 Subject: [PATCH 16/29] feat(route_handler): add unit test for lane change related functions (#7504) * RT1-6230 feat(route_handler): add unit test for lane change related functions Signed-off-by: Zulfaqar Azmi * fix spell check Signed-off-by: Zulfaqar Azmi * fix spellcheck Signed-off-by: Zulfaqar Azmi --------- Signed-off-by: Zulfaqar Azmi --- .../src/autoware_test_utils.cpp | 30 ++- .../test/test_route_handler.cpp | 238 +++++++++++++----- .../test/test_route_handler.hpp | 43 +++- 3 files changed, 234 insertions(+), 77 deletions(-) diff --git a/common/autoware_test_utils/src/autoware_test_utils.cpp b/common/autoware_test_utils/src/autoware_test_utils.cpp index 563350dbe53cc..d870bcf9974a1 100644 --- a/common/autoware_test_utils/src/autoware_test_utils.cpp +++ b/common/autoware_test_utils/src/autoware_test_utils.cpp @@ -19,6 +19,8 @@ #include #include +#include + #include namespace autoware::test_utils @@ -54,14 +56,32 @@ lanelet::LaneletMapPtr loadMap(const std::string & lanelet2_filename) lanelet::ErrorMessages errors{}; lanelet::projection::MGRSProjector projector{}; lanelet::LaneletMapPtr map = lanelet::load(lanelet2_filename, projector, &errors); - if (errors.empty()) { - return map; + if (!errors.empty()) { + for (const auto & error : errors) { + RCLCPP_ERROR_STREAM(rclcpp::get_logger("map_loader"), error); + } + return nullptr; } - for (const auto & error : errors) { - RCLCPP_ERROR_STREAM(rclcpp::get_logger("map_loader"), error); + for (lanelet::Point3d point : map->pointLayer) { + if (point.hasAttribute("local_x")) { + point.x() = point.attribute("local_x").asDouble().value(); + } + if (point.hasAttribute("local_y")) { + point.y() = point.attribute("local_y").asDouble().value(); + } } - return nullptr; + + // realign lanelet borders using updated points + for (lanelet::Lanelet lanelet : map->laneletLayer) { + auto left = lanelet.leftBound(); + auto right = lanelet.rightBound(); + std::tie(left, right) = lanelet::geometry::align(left, right); + lanelet.setLeftBound(left); + lanelet.setRightBound(right); + } + + return map; } LaneletMapBin convertToMapBinMsg( diff --git a/planning/autoware_route_handler/test/test_route_handler.cpp b/planning/autoware_route_handler/test/test_route_handler.cpp index 35a2fe3ef45da..9a4081c51be3b 100644 --- a/planning/autoware_route_handler/test/test_route_handler.cpp +++ b/planning/autoware_route_handler/test/test_route_handler.cpp @@ -49,10 +49,11 @@ TEST_F(TestRouteHandler, getGoalLaneId) TEST_F(TestRouteHandler, getLaneletSequenceWhenOverlappingRoute) { - set_route_handler("/test_map/overlap_map.osm"); + set_route_handler("overlap_map.osm"); ASSERT_FALSE(route_handler_->isHandlerReady()); - geometry_msgs::msg::Pose start_pose, goal_pose; + geometry_msgs::msg::Pose start_pose; + geometry_msgs::msg::Pose goal_pose; start_pose.position = autoware::universe_utils::createPoint(3728.870361, 73739.281250, 0); start_pose.orientation = autoware::universe_utils::createQuaternion(0, 0, -0.513117, 0.858319); goal_pose.position = autoware::universe_utils::createPoint(3729.961182, 73727.328125, 0); @@ -79,11 +80,12 @@ TEST_F(TestRouteHandler, getLaneletSequenceWhenOverlappingRoute) TEST_F(TestRouteHandler, getClosestRouteLaneletFromLaneletWhenOverlappingRoute) { - set_route_handler("/test_map/overlap_map.osm"); - set_test_route("/test_route/overlap_test_route.yaml"); + set_route_handler("overlap_map.osm"); + set_test_route("overlap_test_route.yaml"); ASSERT_TRUE(route_handler_->isHandlerReady()); - geometry_msgs::msg::Pose reference_pose, search_pose; + geometry_msgs::msg::Pose reference_pose; + geometry_msgs::msg::Pose search_pose; lanelet::ConstLanelet reference_lanelet; reference_pose.position = autoware::universe_utils::createPoint(3730.88, 73735.3, 0); @@ -102,71 +104,189 @@ TEST_F(TestRouteHandler, getClosestRouteLaneletFromLaneletWhenOverlappingRoute) ASSERT_EQ(closest_lanelet.id(), 345); found_lanelet = route_handler_->getClosestRouteLaneletFromLanelet( - search_pose, reference_lanelet, &closest_lanelet, 3.0, 1.046); + search_pose, reference_lanelet, &closest_lanelet, dist_threshold, yaw_threshold); ASSERT_TRUE(found_lanelet); ASSERT_EQ(closest_lanelet.id(), 277); } -// TEST_F(TestRouteHandler, getClosestLaneletWithinRouteWhenPointsInRoute) -// { -// lanelet::ConstLanelet closest_lane; - -// Pose search_pose; - -// search_pose.position = autoware::universe_utils::createPoint(-1.0, 1.75, 0); -// search_pose.orientation = autoware::universe_utils::createQuaternion(0.0, 0.0, 0.0, 1.0); -// const auto closest_lane_obtained7 = -// route_handler_->getClosestLaneletWithinRoute(search_pose, &closest_lane); - -// ASSERT_TRUE(closest_lane_obtained7); -// ASSERT_EQ(closest_lane.id(), 4775); - -// search_pose.position = autoware::universe_utils::createPoint(-0.5, 1.75, 0); -// const auto closest_lane_obtained = -// search_pose.orientation = autoware::universe_utils::createQuaternion(0.0, 0.0, 0.0, 1.0); -// route_handler_->getClosestLaneletWithinRoute(search_pose, &closest_lane); - -// ASSERT_TRUE(closest_lane_obtained); -// ASSERT_EQ(closest_lane.id(), 4775); - -// search_pose.position = autoware::universe_utils::createPoint(-.01, 1.75, 0); -// search_pose.orientation = autoware::universe_utils::createQuaternion(0.0, 0.0, 0.0, 1.0); -// const auto closest_lane_obtained3 = -// route_handler_->getClosestLaneletWithinRoute(search_pose, &closest_lane); +TEST_F(TestRouteHandler, CheckLaneIsInGoalRouteSection) +{ + const auto lane = route_handler_->getLaneletsFromId(4785); + const auto is_lane_in_goal_route_section = route_handler_->isInGoalRouteSection(lane); + ASSERT_TRUE(is_lane_in_goal_route_section); +} -// ASSERT_TRUE(closest_lane_obtained3); -// ASSERT_EQ(closest_lane.id(), 4775); +TEST_F(TestRouteHandler, CheckLaneIsNotInGoalRouteSection) +{ + const auto lane = route_handler_->getLaneletsFromId(4780); + const auto is_lane_in_goal_route_section = route_handler_->isInGoalRouteSection(lane); + ASSERT_FALSE(is_lane_in_goal_route_section); +} -// search_pose.position = autoware::universe_utils::createPoint(0.0, 1.75, 0); -// search_pose.orientation = autoware::universe_utils::createQuaternion(0.0, 0.0, 0.0, 1.0); -// const auto closest_lane_obtained1 = -// route_handler_->getClosestLaneletWithinRoute(search_pose, &closest_lane); +TEST_F(TestRouteHandler, checkGetLaneletSequence) +{ + const auto current_pose = autoware::test_utils::createPose(-50.0, 1.75, 0.0, 0.0, 0.0, 0.0); -// ASSERT_TRUE(closest_lane_obtained1); -// ASSERT_EQ(closest_lane.id(), 4775); + lanelet::ConstLanelet closest_lanelet; + const auto found_closest_lanelet = route_handler_->getClosestLaneletWithConstrainsWithinRoute( + current_pose, &closest_lanelet, dist_threshold, yaw_threshold); + ASSERT_TRUE(found_closest_lanelet); + ASSERT_EQ(closest_lanelet.id(), 4765ul); + + const auto current_lanes = route_handler_->getLaneletSequence( + closest_lanelet, current_pose, backward_path_length, forward_path_length); + + ASSERT_EQ(current_lanes.size(), 6ul); + ASSERT_EQ(current_lanes.at(0).id(), 4765ul); + ASSERT_EQ(current_lanes.at(1).id(), 4770ul); + ASSERT_EQ(current_lanes.at(2).id(), 4775ul); + ASSERT_EQ(current_lanes.at(3).id(), 4424ul); + ASSERT_EQ(current_lanes.at(4).id(), 4780ul); + ASSERT_EQ(current_lanes.at(5).id(), 4785ul); +} -// search_pose.position = autoware::universe_utils::createPoint(0.01, 1.75, 0); -// search_pose.orientation = autoware::universe_utils::createQuaternion(0.0, 0.0, 0.0, 1.0); -// const auto closest_lane_obtained2 = -// route_handler_->getClosestLaneletWithinRoute(search_pose, &closest_lane); +TEST_F(TestRouteHandler, checkLateralIntervalToPreferredLaneWhenLaneChangeToRight) +{ + const auto current_lanes = get_current_lanes(); + + // The input is within expectation. + // this lane is of preferred lane type + std::for_each(current_lanes.begin(), current_lanes.begin() + 3, [&](const auto & lane) { + const auto result = route_handler_->getLateralIntervalsToPreferredLane(lane, Direction::RIGHT); + ASSERT_EQ(result.size(), 0ul); + }); + + // The input is within expectation. + // this alternative lane is a subset of preferred lane route section + std::for_each(current_lanes.begin() + 3, current_lanes.end(), [&](const auto & lane) { + const auto result = route_handler_->getLateralIntervalsToPreferredLane(lane, Direction::RIGHT); + ASSERT_EQ(result.size(), 1ul); + EXPECT_DOUBLE_EQ(result.at(0), -3.5); + }); + + // The input is within expectation. + // Although Direction::NONE, the function should still return result similar to + // Direction::RIGHT. + std::for_each(current_lanes.begin(), current_lanes.begin() + 3, [&](const auto & lane) { + const auto result = route_handler_->getLateralIntervalsToPreferredLane(lane, Direction::NONE); + ASSERT_EQ(result.size(), 0ul); + }); + + // The input is within expectation. + // Although Direction::NONE is provided, the function should behave similarly to + // Direction::RIGHT. + std::for_each(current_lanes.begin() + 3, current_lanes.end(), [&](const auto & lane) { + const auto result = route_handler_->getLateralIntervalsToPreferredLane(lane, Direction::NONE); + ASSERT_EQ(result.size(), 1ul); + EXPECT_DOUBLE_EQ(result.at(0), -3.5); + }); +} -// ASSERT_TRUE(closest_lane_obtained2); -// ASSERT_EQ(closest_lane.id(), 4424); +TEST_F(TestRouteHandler, checkLateralIntervalToPreferredLaneUsingUnexpectedResults) +{ + const auto current_lanes = get_current_lanes(); -// search_pose.position = autoware::universe_utils::createPoint(0.5, 1.75, 0); -// search_pose.orientation = autoware::universe_utils::createQuaternion(0.0, 0.0, 0.0, 1.0); -// const auto closest_lane_obtained4 = -// route_handler_->getClosestLaneletWithinRoute(search_pose, &closest_lane); + std::for_each(current_lanes.begin(), current_lanes.end(), [&](const auto & lane) { + const auto result = route_handler_->getLateralIntervalsToPreferredLane(lane, Direction::LEFT); + ASSERT_EQ(result.size(), 0ul); + }); +} -// ASSERT_TRUE(closest_lane_obtained4); -// ASSERT_EQ(closest_lane.id(), 4424); +TEST_F(TestRouteHandler, testGetCenterLinePath) +{ + const auto current_lanes = route_handler_->getLaneletsFromIds({4424, 4780, 4785}); + { + // The input is within expectation. + const auto center_line_path = route_handler_->getCenterLinePath(current_lanes, 0.0, 50.0); + ASSERT_EQ(center_line_path.points.size(), 51); // 26 + 26 - 1(overlapped) + ASSERT_EQ(center_line_path.points.back().lane_ids.size(), 2); + ASSERT_EQ(center_line_path.points.back().lane_ids.at(0), 4780); + ASSERT_EQ(center_line_path.points.back().lane_ids.at(1), 4785); + } + { + // The input is broken. + // s_start is negative, and s_end is over the boundary. + const auto center_line_path = route_handler_->getCenterLinePath(current_lanes, -1.0, 200.0); + ASSERT_EQ(center_line_path.points.size(), 76); // 26 + 26 + 26 - 2(overlapped) + ASSERT_EQ(center_line_path.points.front().lane_ids.size(), 1); + ASSERT_EQ(center_line_path.points.front().lane_ids.at(0), 4424); + ASSERT_EQ(center_line_path.points.back().lane_ids.size(), 1); + ASSERT_EQ(center_line_path.points.back().lane_ids.at(0), 4785); + } +} +TEST_F(TestRouteHandler, DISABLED_testGetCenterLinePathWhenLanesIsNotConnected) +{ + // broken current lanes. 4424 and 4785 are not connected directly. + const auto current_lanes = route_handler_->getLaneletsFromIds({4424, 4780, 4785}); + + // The input is broken. Test is disabled because it doesn't pass. + const auto center_line_path = route_handler_->getCenterLinePath(current_lanes, 0.0, 75.0); + ASSERT_EQ(center_line_path.points.size(), 26); // 26 + 26 + 26 - 2(overlapped) + ASSERT_EQ(center_line_path.points.front().lane_ids.size(), 1); + ASSERT_EQ(center_line_path.points.front().lane_ids.at(0), 4424); + ASSERT_EQ(center_line_path.points.back().lane_ids.size(), 1); + ASSERT_EQ(center_line_path.points.back().lane_ids.at(0), 4424); +} -// search_pose.position = autoware::universe_utils::createPoint(1.0, 1.75, 0); -// search_pose.orientation = autoware::universe_utils::createQuaternion(0.0, 0.0, 0.0, 1.0); -// const auto closest_lane_obtained5 = -// route_handler_->getClosestLaneletWithinRoute(search_pose, &closest_lane); +TEST_F(TestRouteHandler, getClosestLaneletWithinRouteWhenPointsInRoute) +{ + auto get_closest_lanelet_within_route = + [&](double x, double y, double z) -> std::optional { + const auto pose = autoware::test_utils::createPose(x, y, z, 0.0, 0.0, 0.0); + lanelet::ConstLanelet closest_lanelet; + const auto closest_lane_obtained = + route_handler_->getClosestLaneletWithinRoute(pose, &closest_lanelet); + if (!closest_lane_obtained) { + return std::nullopt; + } + return closest_lanelet.id(); + }; + + ASSERT_TRUE(get_closest_lanelet_within_route(-0.5, 1.75, 0).has_value()); + ASSERT_EQ(get_closest_lanelet_within_route(-0.5, 1.75, 0).value(), 4775ul); + + ASSERT_TRUE(get_closest_lanelet_within_route(-0.01, 1.75, 0).has_value()); + ASSERT_EQ(get_closest_lanelet_within_route(-0.01, 1.75, 0).value(), 4775ul); + + ASSERT_TRUE(get_closest_lanelet_within_route(0.0, 1.75, 0).has_value()); + ASSERT_EQ(get_closest_lanelet_within_route(0.0, 1.75, 0).value(), 4775ul); + + ASSERT_TRUE(get_closest_lanelet_within_route(0.01, 1.75, 0).has_value()); + ASSERT_EQ(get_closest_lanelet_within_route(0.01, 1.75, 0).value(), 4424ul); + + ASSERT_TRUE(get_closest_lanelet_within_route(0.5, 1.75, 0).has_value()); + ASSERT_EQ(get_closest_lanelet_within_route(0.5, 1.75, 0).value(), 4424ul); +} -// ASSERT_TRUE(closest_lane_obtained5); -// ASSERT_EQ(closest_lane.id(), 4424); -// } +TEST_F(TestRouteHandler, testGetLaneChangeTargetLanes) +{ + { + // The input is within expectation. + // There exist no lane changing lane since both 4770 and 4775 are preferred lane. + const auto current_lanes = route_handler_->getLaneletsFromIds({4770, 4775}); + const auto lane_change_lane = + route_handler_->getLaneChangeTarget(current_lanes, Direction::RIGHT); + ASSERT_FALSE(lane_change_lane.has_value()); + } + + { + // The input is within expectation. + // There exist lane changing lane since 4424 is subset of preferred lane 9598. + const auto current_lanes = route_handler_->getLaneletsFromIds({4775, 4424}); + const auto lane_change_lane = + route_handler_->getLaneChangeTarget(current_lanes, Direction::RIGHT); + EXPECT_TRUE(lane_change_lane.has_value()); + ASSERT_EQ(lane_change_lane.value().id(), 9598ul); + } + + { + // The input is within expectation. + // There is a lane-changing lane. Within the maximum current lanes, there is an alternative lane + // to the preferred lane. Therefore, the lane-changing lane exists. + const auto current_lanes = get_current_lanes(); + const auto lane_change_lane = route_handler_->getLaneChangeTarget(current_lanes); + ASSERT_TRUE(lane_change_lane.has_value()); + ASSERT_EQ(lane_change_lane.value().id(), 9598ul); + } +} } // namespace autoware::route_handler::test diff --git a/planning/autoware_route_handler/test/test_route_handler.hpp b/planning/autoware_route_handler/test/test_route_handler.hpp index 86f7461fc7538..ec3368809df99 100644 --- a/planning/autoware_route_handler/test/test_route_handler.hpp +++ b/planning/autoware_route_handler/test/test_route_handler.hpp @@ -15,7 +15,6 @@ #ifndef TEST_ROUTE_HANDLER_HPP_ #define TEST_ROUTE_HANDLER_HPP_ -#include "ament_index_cpp/get_package_share_directory.hpp" #include "autoware_test_utils/autoware_test_utils.hpp" #include "autoware_test_utils/mock_data_parser.hpp" #include "gtest/gtest.h" @@ -37,16 +36,16 @@ namespace autoware::route_handler::test { +using autoware::test_utils::get_absolute_path_to_lanelet_map; +using autoware::test_utils::get_absolute_path_to_route; using autoware_map_msgs::msg::LaneletMapBin; - class TestRouteHandler : public ::testing::Test { public: TestRouteHandler() { - autoware_test_utils_dir = ament_index_cpp::get_package_share_directory("autoware_test_utils"); - set_route_handler("/test_map/2km_test.osm"); - set_test_route("/test_route/lane_change_test_route.yaml"); + set_route_handler("2km_test.osm"); + set_test_route(lane_change_right_test_route_filename); } TestRouteHandler(const TestRouteHandler &) = delete; @@ -55,26 +54,44 @@ class TestRouteHandler : public ::testing::Test TestRouteHandler & operator=(TestRouteHandler &&) = delete; ~TestRouteHandler() override = default; - void set_route_handler(const std::string & relative_path) + void set_route_handler(const std::string & lanelet_map_filename) { route_handler_.reset(); - const auto lanelet2_path = autoware_test_utils_dir + relative_path; + const auto lanelet2_path = + get_absolute_path_to_lanelet_map(autoware_test_utils_dir, lanelet_map_filename); const auto map_bin_msg = autoware::test_utils::make_map_bin_msg(lanelet2_path, center_line_resolution); route_handler_ = std::make_shared(map_bin_msg); } - void set_test_route(const std::string & route_path) + void set_test_route(const std::string & route_filename) { - const auto route_handler_dir = - ament_index_cpp::get_package_share_directory("autoware_route_handler"); - const auto rh_test_route = route_handler_dir + route_path; + const auto rh_test_route = + get_absolute_path_to_route(autoware_route_handler_dir, route_filename); route_handler_->setRoute(autoware::test_utils::parse_lanelet_route_file(rh_test_route)); } + lanelet::ConstLanelets get_current_lanes() + { + const auto current_pose = autoware::test_utils::createPose(-50.0, 1.75, 0.0, 0.0, 0.0, 0.0); + lanelet::ConstLanelet closest_lanelet; + [[maybe_unused]] const auto found_closest_lanelet = + route_handler_->getClosestLaneletWithConstrainsWithinRoute( + current_pose, &closest_lanelet, dist_threshold, yaw_threshold); + return route_handler_->getLaneletSequence( + closest_lanelet, current_pose, backward_path_length, forward_path_length); + } + std::shared_ptr route_handler_; - std::string autoware_test_utils_dir; - static constexpr double center_line_resolution = 5.0; + std::string autoware_test_utils_dir{"autoware_test_utils"}; + std::string autoware_route_handler_dir{"autoware_route_handler"}; + std::string lane_change_right_test_route_filename{"lane_change_test_route.yaml"}; + + static constexpr double center_line_resolution{5.0}; + static constexpr double dist_threshold{3.0}; + static constexpr double yaw_threshold{1.045}; + static constexpr double backward_path_length{5.0}; + static constexpr double forward_path_length{300.0}; }; } // namespace autoware::route_handler::test From 60b05b07efc3682bf2096b4218ceb09634f0c3c1 Mon Sep 17 00:00:00 2001 From: Maxime CLEMENT <78338830+maxime-clem@users.noreply.github.com> Date: Mon, 24 Jun 2024 10:12:00 +0900 Subject: [PATCH 17/29] feat(motion_velocity_planner): publish processing times (#7633) Signed-off-by: Maxime CLEMENT --- .../src/dynamic_obstacle_stop_module.cpp | 9 +++++++++ .../src/obstacle_velocity_limiter_module.cpp | 9 +++++++++ .../src/out_of_lane_module.cpp | 17 +++++++++++++++-- .../plugin_module_interface.hpp | 2 ++ .../src/node.cpp | 11 +++++++++++ .../src/node.hpp | 1 + 6 files changed, 47 insertions(+), 2 deletions(-) diff --git a/planning/motion_velocity_planner/autoware_motion_velocity_dynamic_obstacle_stop_module/src/dynamic_obstacle_stop_module.cpp b/planning/motion_velocity_planner/autoware_motion_velocity_dynamic_obstacle_stop_module/src/dynamic_obstacle_stop_module.cpp index fde54a7041672..bf9dc30145cd3 100644 --- a/planning/motion_velocity_planner/autoware_motion_velocity_dynamic_obstacle_stop_module/src/dynamic_obstacle_stop_module.cpp +++ b/planning/motion_velocity_planner/autoware_motion_velocity_dynamic_obstacle_stop_module/src/dynamic_obstacle_stop_module.cpp @@ -29,6 +29,7 @@ #include #include +#include #include #include #include @@ -48,6 +49,8 @@ void DynamicObstacleStopModule::init(rclcpp::Node & node, const std::string & mo node.create_publisher("~/" + ns_ + "/debug_markers", 1); virtual_wall_publisher_ = node.create_publisher("~/" + ns_ + "/virtual_walls", 1); + processing_time_publisher_ = std::make_shared( + &node, "~/debug/" + ns_ + "/processing_time_ms"); using autoware::universe_utils::getOrDeclareParameter; auto & p = params_; @@ -179,6 +182,12 @@ VelocityPlanningResult DynamicObstacleStopModule::plan( debug_data_.ego_footprints = ego_data.trajectory_footprints; debug_data_.obstacle_footprints = obstacle_forward_footprints; debug_data_.z = ego_data.pose.position.z; + std::map processing_times; + processing_times["preprocessing"] = preprocessing_duration_us / 1000; + processing_times["footprints"] = footprints_duration_us / 1000; + processing_times["collisions"] = collisions_duration_us / 1000; + processing_times["Total"] = total_time_us / 1000; + processing_time_publisher_->publish(processing_times); return result; } diff --git a/planning/motion_velocity_planner/autoware_motion_velocity_obstacle_velocity_limiter_module/src/obstacle_velocity_limiter_module.cpp b/planning/motion_velocity_planner/autoware_motion_velocity_obstacle_velocity_limiter_module/src/obstacle_velocity_limiter_module.cpp index 2b78a2f1495bf..5a4f03f6f20a6 100644 --- a/planning/motion_velocity_planner/autoware_motion_velocity_obstacle_velocity_limiter_module/src/obstacle_velocity_limiter_module.cpp +++ b/planning/motion_velocity_planner/autoware_motion_velocity_obstacle_velocity_limiter_module/src/obstacle_velocity_limiter_module.cpp @@ -34,6 +34,7 @@ #include #include +#include namespace autoware::motion_velocity_planner { @@ -52,6 +53,8 @@ void ObstacleVelocityLimiterModule::init(rclcpp::Node & node, const std::string node.create_publisher("~/" + ns_ + "/debug_markers", 1); virtual_wall_publisher_ = node.create_publisher("~/" + ns_ + "/virtual_walls", 1); + processing_time_publisher_ = std::make_shared( + &node, "~/debug/" + ns_ + "/processing_time_ms"); const auto vehicle_info = vehicle_info_utils::VehicleInfoUtils(node).getVehicleInfo(); vehicle_lateral_offset_ = static_cast(vehicle_info.max_lateral_offset_m); @@ -218,6 +221,12 @@ VelocityPlanningResult ObstacleVelocityLimiterModule::plan( safe_footprint_polygons, obstacle_masks, planner_data->current_odometry.pose.pose.position.z)); } + std::map processing_times; + processing_times["preprocessing"] = preprocessing_us / 1000; + processing_times["obstacles"] = obstacles_us / 1000; + processing_times["slowdowns"] = slowdowns_us / 1000; + processing_times["Total"] = total_us / 1000; + processing_time_publisher_->publish(processing_times); return result; } } // namespace autoware::motion_velocity_planner diff --git a/planning/motion_velocity_planner/autoware_motion_velocity_out_of_lane_module/src/out_of_lane_module.cpp b/planning/motion_velocity_planner/autoware_motion_velocity_out_of_lane_module/src/out_of_lane_module.cpp index f6d81f6f036d0..18a37e74df9aa 100644 --- a/planning/motion_velocity_planner/autoware_motion_velocity_out_of_lane_module/src/out_of_lane_module.cpp +++ b/planning/motion_velocity_planner/autoware_motion_velocity_out_of_lane_module/src/out_of_lane_module.cpp @@ -33,6 +33,7 @@ #include +#include #include #include #include @@ -56,6 +57,8 @@ void OutOfLaneModule::init(rclcpp::Node & node, const std::string & module_name) node.create_publisher("~/" + ns_ + "/debug_markers", 1); virtual_wall_publisher_ = node.create_publisher("~/" + ns_ + "/virtual_walls", 1); + processing_time_publisher_ = std::make_shared( + &node, "~/debug/" + ns_ + "/processing_time_ms"); } void OutOfLaneModule::init_parameters(rclcpp::Node & node) { @@ -207,7 +210,7 @@ VelocityPlanningResult OutOfLaneModule::plan( inputs.ego_data = ego_data; stopwatch.tic("filter_predicted_objects"); inputs.objects = out_of_lane::filter_predicted_objects(planner_data, ego_data, params_); - const auto filter_predicted_objects_ms = stopwatch.toc("filter_predicted_objects"); + const auto filter_predicted_objects_us = stopwatch.toc("filter_predicted_objects"); inputs.route_handler = planner_data->route_handler; inputs.lanelets = other_lanelets; stopwatch.tic("calculate_decisions"); @@ -288,12 +291,22 @@ VelocityPlanningResult OutOfLaneModule::plan( "\tcalc_slowdown_points = %2.0fus\n" "\tinsert_slowdown_points = %2.0fus\n", total_time_us, calculate_lanelets_us, calculate_trajectory_footprints_us, - calculate_overlapping_ranges_us, filter_predicted_objects_ms, calculate_decisions_us, + calculate_overlapping_ranges_us, filter_predicted_objects_us, calculate_decisions_us, calc_slowdown_points_us, insert_slowdown_points_us); debug_publisher_->publish(out_of_lane::debug::create_debug_marker_array(debug_data_)); virtual_wall_marker_creator.add_virtual_walls( out_of_lane::debug::create_virtual_walls(debug_data_, params_)); virtual_wall_publisher_->publish(virtual_wall_marker_creator.create_markers(clock_->now())); + std::map processing_times; + processing_times["calculate_lanelets"] = calculate_lanelets_us / 1000; + processing_times["calculate_trajectory_footprints"] = calculate_trajectory_footprints_us / 1000; + processing_times["calculate_overlapping_ranges"] = calculate_overlapping_ranges_us / 1000; + processing_times["filter_pred_objects"] = filter_predicted_objects_us / 1000; + processing_times["calculate_decision"] = calculate_decisions_us / 1000; + processing_times["calc_slowdown_points"] = calc_slowdown_points_us / 1000; + processing_times["insert_slowdown_points"] = insert_slowdown_points_us / 1000; + processing_times["Total"] = total_time_us / 1000; + processing_time_publisher_->publish(processing_times); return result; } diff --git a/planning/motion_velocity_planner/autoware_motion_velocity_planner_common/include/autoware/motion_velocity_planner_common/plugin_module_interface.hpp b/planning/motion_velocity_planner/autoware_motion_velocity_planner_common/include/autoware/motion_velocity_planner_common/plugin_module_interface.hpp index 535510453b8ba..bf0be64a0880d 100644 --- a/planning/motion_velocity_planner/autoware_motion_velocity_planner_common/include/autoware/motion_velocity_planner_common/plugin_module_interface.hpp +++ b/planning/motion_velocity_planner/autoware_motion_velocity_planner_common/include/autoware/motion_velocity_planner_common/plugin_module_interface.hpp @@ -19,6 +19,7 @@ #include "velocity_planning_result.hpp" #include +#include #include #include @@ -44,6 +45,7 @@ class PluginModuleInterface rclcpp::Logger logger_ = rclcpp::get_logger(""); rclcpp::Publisher::SharedPtr debug_publisher_; rclcpp::Publisher::SharedPtr virtual_wall_publisher_; + std::shared_ptr processing_time_publisher_; autoware::motion_utils::VirtualWallMarkerCreator virtual_wall_marker_creator{}; }; diff --git a/planning/motion_velocity_planner/autoware_motion_velocity_planner_node/src/node.cpp b/planning/motion_velocity_planner/autoware_motion_velocity_planner_node/src/node.cpp index 714b6fd948a32..da997dcfbc6e4 100644 --- a/planning/motion_velocity_planner/autoware_motion_velocity_planner_node/src/node.cpp +++ b/planning/motion_velocity_planner/autoware_motion_velocity_planner_node/src/node.cpp @@ -17,6 +17,7 @@ #include #include #include +#include #include #include #include @@ -31,6 +32,7 @@ #include #include +#include #include #include @@ -252,9 +254,14 @@ void MotionVelocityPlannerNode::on_trajectory( { std::unique_lock lk(mutex_); + autoware::universe_utils::StopWatch stop_watch; + std::map processing_times; + stop_watch.tic("Total"); + if (!update_planner_data()) { return; } + processing_times["update_planner_data"] = stop_watch.toc(true); if (input_trajectory_msg->points.empty()) { RCLCPP_WARN(get_logger(), "Input trajectory message is empty"); @@ -264,6 +271,7 @@ void MotionVelocityPlannerNode::on_trajectory( if (has_received_map_) { planner_data_.route_handler = std::make_shared(*map_ptr_); has_received_map_ = false; + processing_times["make_RouteHandler"] = stop_watch.toc(true); } autoware::motion_velocity_planner::TrajectoryPoints input_trajectory_points{ @@ -271,12 +279,15 @@ void MotionVelocityPlannerNode::on_trajectory( auto output_trajectory_msg = generate_trajectory(input_trajectory_points); output_trajectory_msg.header = input_trajectory_msg->header; + processing_times["generate_trajectory"] = stop_watch.toc(true); lk.unlock(); trajectory_pub_->publish(output_trajectory_msg); published_time_publisher_->publish_if_subscribed( trajectory_pub_, output_trajectory_msg.header.stamp); + processing_times["Total"] = stop_watch.toc("Total"); + processing_time_publisher_.publish(processing_times); } void MotionVelocityPlannerNode::insert_stop( diff --git a/planning/motion_velocity_planner/autoware_motion_velocity_planner_node/src/node.hpp b/planning/motion_velocity_planner/autoware_motion_velocity_planner_node/src/node.hpp index 7b771d0b04442..04e3e6b02c7b0 100644 --- a/planning/motion_velocity_planner/autoware_motion_velocity_planner_node/src/node.hpp +++ b/planning/motion_velocity_planner/autoware_motion_velocity_planner_node/src/node.hpp @@ -96,6 +96,7 @@ class MotionVelocityPlannerNode : public rclcpp::Node rclcpp::Publisher::SharedPtr debug_viz_pub_; rclcpp::Publisher::SharedPtr velocity_factor_publisher_; + autoware::universe_utils::ProcessingTimePublisher processing_time_publisher_{this}; // parameters rclcpp::node_interfaces::OnSetParametersCallbackHandle::SharedPtr set_param_callback_; From 2ca276fdb0f3d8188131318b4ee295d7f6784d77 Mon Sep 17 00:00:00 2001 From: badai nguyen <94814556+badai-nguyen@users.noreply.github.com> Date: Mon, 24 Jun 2024 10:38:28 +0900 Subject: [PATCH 18/29] feat(tensorrt yolox): inference and publish mask image from yolox model with semantic segmentation header (#5553) * add segmentation model Signed-off-by: badai-nguyen fix: add multitask for segment Signed-off-by: badai-nguyen * fix: publish mask Signed-off-by: badai-nguyen * feat: publish colorized mask Signed-off-by: badai-nguyen * fix: resize yolox mask Signed-off-by: badai-nguyen * fix: add memory allocate operations Signed-off-by: Manato HIRABAYASHI * refactor: remove underscore for a local variable Signed-off-by: Manato HIRABAYASHI * chore: add condition to check the number of subscriber for newly added topics Signed-off-by: Manato HIRABAYASHI * chore: pre-commit Signed-off-by: badai-nguyen * fix: add roi overlapping segment Signed-off-by: badai-nguyen * fix: roi overlay segment Signed-off-by: badai-nguyen * chore: refactor Signed-off-by: badai-nguyen * docs: update readme Signed-off-by: badai-nguyen * fix: update model name Signed-off-by: badai-nguyen * fix: add utils into tensorrt_yolox Signed-off-by: badai-nguyen * fix: launch file Signed-off-by: badai-nguyen * chore: remove unnecessary depend Signed-off-by: badai-nguyen * chore: fix yaml file Signed-off-by: badai-nguyen * chore: remove duplicated param in launch Signed-off-by: badai-nguyen * fix: semantic class Signed-off-by: badai-nguyen * docs: update readme Signed-off-by: badai-nguyen * chore: update default param Signed-off-by: badai-nguyen * fix: add processing time topic Signed-off-by: badai-nguyen * chore: typo Signed-off-by: badai-nguyen * style(pre-commit): autofix * chore: fix cspell error Signed-off-by: badai-nguyen * fix: yolox default param Signed-off-by: badai-nguyen * chore: rename debug topics Signed-off-by: badai-nguyen * chore: rename debug topics Signed-off-by: badai-nguyen * docs: update model description Signed-off-by: badai-nguyen * style(pre-commit): autofix * fix: launch Signed-off-by: badai-nguyen * refactor: unpublish mask for single task Signed-off-by: badai-nguyen * Update perception/tensorrt_yolox/src/tensorrt_yolox.cpp Co-authored-by: Manato Hirabayashi <3022416+manato@users.noreply.github.com> * Update perception/tensorrt_yolox/src/tensorrt_yolox.cpp Co-authored-by: Manato Hirabayashi <3022416+manato@users.noreply.github.com> * Update perception/tensorrt_yolox/src/tensorrt_yolox.cpp Co-authored-by: Manato Hirabayashi <3022416+manato@users.noreply.github.com> * style(pre-commit): autofix * docs: update reamde Signed-off-by: badai-nguyen * style(pre-commit): autofix * fix: skip mask size as yolox output Signed-off-by: badai-nguyen --------- Signed-off-by: badai-nguyen Signed-off-by: Manato HIRABAYASHI Co-authored-by: Manato HIRABAYASHI Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Manato Hirabayashi <3022416+manato@users.noreply.github.com> --- .../tensorrt_common/tensorrt_common.hpp | 4 + perception/tensorrt_yolox/README.md | 84 ++++-- .../config/yolox_s_plus_opt.param.yaml | 22 ++ .../include/tensorrt_yolox/preprocess.hpp | 17 +- .../include/tensorrt_yolox/tensorrt_yolox.hpp | 67 ++++- .../tensorrt_yolox/tensorrt_yolox_node.hpp | 47 ++- .../launch/yolox_s_plus_opt.launch.xml | 12 +- perception/tensorrt_yolox/src/preprocess.cu | 35 +++ .../tensorrt_yolox/src/tensorrt_yolox.cpp | 269 +++++++++++++++++- .../src/tensorrt_yolox_node.cpp | 96 ++++++- .../src/yolox_single_image_inference_node.cpp | 4 +- 11 files changed, 614 insertions(+), 43 deletions(-) diff --git a/common/tensorrt_common/include/tensorrt_common/tensorrt_common.hpp b/common/tensorrt_common/include/tensorrt_common/tensorrt_common.hpp index aabaea7ca6339..6691c1fb9e97d 100644 --- a/common/tensorrt_common/include/tensorrt_common/tensorrt_common.hpp +++ b/common/tensorrt_common/include/tensorrt_common/tensorrt_common.hpp @@ -15,7 +15,9 @@ #ifndef TENSORRT_COMMON__TENSORRT_COMMON_HPP_ #define TENSORRT_COMMON__TENSORRT_COMMON_HPP_ +#ifndef YOLOX_STANDALONE #include +#endif #include #include @@ -86,6 +88,7 @@ struct BuildConfig profile_per_layer(profile_per_layer), clip_value(clip_value) { +#ifndef YOLOX_STANDALONE if ( std::find(valid_calib_type.begin(), valid_calib_type.end(), calib_type_str) == valid_calib_type.end()) { @@ -95,6 +98,7 @@ struct BuildConfig << "Default calibration type will be used: MinMax" << std::endl; std::cerr << message.str(); } +#endif } }; diff --git a/perception/tensorrt_yolox/README.md b/perception/tensorrt_yolox/README.md index ca407b1ff6811..af88e73cc04a6 100644 --- a/perception/tensorrt_yolox/README.md +++ b/perception/tensorrt_yolox/README.md @@ -2,13 +2,13 @@ ## Purpose -This package detects target objects e.g., cars, trucks, bicycles, and pedestrians on a image based on [YOLOX](https://github.com/Megvii-BaseDetection/YOLOX) model. +This package detects target objects e.g., cars, trucks, bicycles, and pedestrians and segment target objects such as cars, trucks, buses and pedestrian, building, vegetation, road, sidewalk on a image based on [YOLOX](https://github.com/Megvii-BaseDetection/YOLOX) model with multi-header structure. ## Inner-workings / Algorithms ### Cite - + Zheng Ge, Songtao Liu, Feng Wang, Zeming Li, Jian Sun, "YOLOX: Exceeding YOLO Series in 2021", arXiv preprint arXiv:2107.08430, 2021 [[ref](https://arxiv.org/abs/2107.08430)] @@ -22,10 +22,12 @@ Zheng Ge, Songtao Liu, Feng Wang, Zeming Li, Jian Sun, "YOLOX: Exceeding YOLO Se ### Output -| Name | Type | Description | -| ------------- | -------------------------------------------------- | -------------------------------------------------- | -| `out/objects` | `tier4_perception_msgs/DetectedObjectsWithFeature` | The detected objects with 2D bounding boxes | -| `out/image` | `sensor_msgs/Image` | The image with 2D bounding boxes for visualization | +| Name | Type | Description | +| ---------------- | -------------------------------------------------- | ------------------------------------------------------------------- | +| `out/objects` | `tier4_perception_msgs/DetectedObjectsWithFeature` | The detected objects with 2D bounding boxes | +| `out/image` | `sensor_msgs/Image` | The image with 2D bounding boxes for visualization | +| `out/mask` | `sensor_msgs/Image` | The semantic segmentation mask | +| `out/color_mask` | `sensor_msgs/Image` | The colorized image of semantic segmentation mask for visualization | ## Parameters @@ -40,20 +42,33 @@ Zheng Ge, Songtao Liu, Feng Wang, Zeming Li, Jian Sun, "YOLOX: Exceeding YOLO Se ### Node Parameters -| Name | Type | Default Value | Description | -| ----------------------------- | ------ | ------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `model_path` | string | "" | The onnx file name for yolox model | -| `label_path` | string | "" | The label file with label names for detected objects written on it | -| `precision` | string | "fp16" | The inference mode: "fp32", "fp16", "int8" | -| `build_only` | bool | false | shutdown node after TensorRT engine file is built | -| `calibration_algorithm` | string | "MinMax" | Calibration algorithm to be used for quantization when precision==int8. Valid value is one of: Entropy",("Legacy" \| "Percentile"), "MinMax"] | -| `dla_core_id` | int | -1 | If positive ID value is specified, the node assign inference task to the DLA core | -| `quantize_first_layer` | bool | false | If true, set the operating precision for the first (input) layer to be fp16. This option is valid only when precision==int8 | -| `quantize_last_layer` | bool | false | If true, set the operating precision for the last (output) layer to be fp16. This option is valid only when precision==int8 | -| `profile_per_layer` | bool | false | If true, profiler function will be enabled. Since the profile function may affect execution speed, it is recommended to set this flag true only for development purpose. | -| `clip_value` | double | 0.0 | If positive value is specified, the value of each layer output will be clipped between [0.0, clip_value]. This option is valid only when precision==int8 and used to manually specify the dynamic range instead of using any calibration | -| `preprocess_on_gpu` | bool | true | If true, pre-processing is performed on GPU | -| `calibration_image_list_path` | string | "" | Path to a file which contains path to images. Those images will be used for int8 quantization. | +| Name | Type | Default Value | Description | +| -------------------------------------- | ------ | ------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `model_path` | string | "" | The onnx file name for yolox model | +| `model_name` | string | "" | The yolox model name:
"yolox-sPlus-T4-960x960-pseudo-finetune" for detection only, could reduce resource and processing_time
"yolox-sPlus-opt-pseudoV2-T4-960x960-T4-seg16cls" for multi-task including semantic segmentation | +| `label_path` | string | "" | The label file with label names for detected objects written on it | +| `precision` | string | "fp16" | The inference mode: "fp32", "fp16", "int8" | +| `build_only` | bool | false | shutdown node after TensorRT engine file is built | +| `calibration_algorithm` | string | "MinMax" | Calibration algorithm to be used for quantization when precision==int8. Valid value is one of: Entropy",("Legacy" \| "Percentile"), "MinMax"] | +| `dla_core_id` | int | -1 | If positive ID value is specified, the node assign inference task to the DLA core | +| `quantize_first_layer` | bool | false | If true, set the operating precision for the first (input) layer to be fp16. This option is valid only when precision==int8 | +| `quantize_last_layer` | bool | false | If true, set the operating precision for the last (output) layer to be fp16. This option is valid only when precision==int8 | +| `profile_per_layer` | bool | false | If true, profiler function will be enabled. Since the profile function may affect execution speed, it is recommended to set this flag true only for development purpose. | +| `clip_value` | double | 0.0 | If positive value is specified, the value of each layer output will be clipped between [0.0, clip_value]. This option is valid only when precision==int8 and used to manually specify the dynamic range instead of using any calibration | +| `preprocess_on_gpu` | bool | true | If true, pre-processing is performed on GPU | +| `calibration_image_list_path` | string | "" | Path to a file which contains path to images. Those images will be used for int8 quantization. | +| `yolox_s_plus_opt_param_path` | string | "" | Path to parameter file | +| `is_publish_color_mask` | bool | false | If true, publish color mask for result visualization | +| `is_roi_overlap_segment` | bool | true | If true, overlay detected object roi onto semantic segmentation to avoid over-filtering pointcloud especially small size objects | +| `overlap_roi_score_threshold` | float | 0.3 | minimum existence_probability of detected roi considered to replace segmentation | +| `roi_overlay_segment_label.UNKNOWN` | bool | true | If true, unknown objects roi will be overlaid onto sematic segmentation mask. | +| `roi_overlay_segment_label.CAR` | bool | false | If true, car objects roi will be overlaid onto sematic segmentation mask. | +| `roi_overlay_segment_label.TRUCK` | bool | false | If true, truck objects roi will be overlaid onto sematic segmentation mask. | +| `roi_overlay_segment_label.BUS` | bool | false | If true, bus objects roi will be overlaid onto sematic segmentation mask. | +| `roi_overlay_segment_label.MOTORCYCLE` | bool | true | If true, motorcycle objects roi will be overlaid onto sematic segmentation mask. | +| `roi_overlay_segment_label.BICYCLE` | bool | true | If true, bicycle objects roi will be overlaid onto sematic segmentation mask. | +| `roi_overlay_segment_label.PEDESTRIAN` | bool | true | If true, pedestrian objects roi will be overlaid onto sematic segmentation mask. | +| `roi_overlay_segment_label.ANIMAL` | bool | true | If true, animal objects roi will be overlaid onto sematic segmentation mask. | ## Assumptions / Known limits @@ -69,6 +84,27 @@ The label contained in detected 2D bounding boxes (i.e., `out/objects`) will be If other labels (case insensitive) are contained in the file specified via the `label_file` parameter, those are labeled as `UNKNOWN`, while detected rectangles are drawn in the visualization result (`out/image`). +The semantic segmentation mask is a gray image whose each pixel is index of one following class: + +| index | semantic name | +| ----- | ---------------- | +| 0 | road | +| 1 | building | +| 2 | wall | +| 3 | obstacle | +| 4 | traffic_light | +| 5 | traffic_sign | +| 6 | person | +| 7 | vehicle | +| 8 | bike | +| 9 | road | +| 10 | sidewalk | +| 11 | roadPaint | +| 12 | curbstone | +| 13 | crosswalk_others | +| 14 | vegetation | +| 15 | sky | + ## Onnx model A sample model (named `yolox-tiny.onnx`) is downloaded by ansible script on env preparation stage, if not, please, follow [Manual downloading of artifacts](https://github.com/autowarefoundation/autoware/tree/main/ansible/roles/artifacts). @@ -79,11 +115,12 @@ hence these parameters are ignored when users specify ONNX models including this This package accepts both `EfficientNMS_TRT` attached ONNXs and [models published from the official YOLOX repository](https://github.com/Megvii-BaseDetection/YOLOX/tree/main/demo/ONNXRuntime#download-onnx-models) (we referred to them as "plain" models). -In addition to `yolox-tiny.onnx`, a custom model named `yolox-sPlus-opt.onnx` is either available. -This model is based on YOLOX-s and tuned to perform more accurate detection with almost comparable execution speed with `yolox-tiny`. +In addition to `yolox-tiny.onnx`, a custom model named `yolox-sPlus-opt-pseudoV2-T4-960x960-T4-seg16cls` is either available. +This model is multi-header structure model which is based on YOLOX-s and tuned to perform more accurate detection with almost comparable execution speed with `yolox-tiny`. To get better results with this model, users are recommended to use some specific running arguments such as `precision:=int8`, `calibration_algorithm:=Entropy`, `clip_value:=6.0`. Users can refer `launch/yolox_sPlus_opt.launch.xml` to see how this model can be used. +Beside detection result, this model also output image semantic segmentation result for pointcloud filtering purpose. All models are automatically converted to TensorRT format. These converted files will be saved in the same directory as specified ONNX files @@ -146,7 +183,7 @@ Please refer [the official document](https://github.com/Megvii-BaseDetection/YOL ## Label file -A sample label file (named `label.txt`)is also downloaded automatically during env preparation process +A sample label file (named `label.txt`) and semantic segmentation color map file (name `semseg_color_map.csv`) are also downloaded automatically during env preparation process (**NOTE:** This file is incompatible with models that output labels for the COCO dataset (e.g., models from the official YOLOX repository)). This file represents the correspondence between class index (integer outputted from YOLOX network) and @@ -157,3 +194,4 @@ with labels according to the order in this file. - - +- diff --git a/perception/tensorrt_yolox/config/yolox_s_plus_opt.param.yaml b/perception/tensorrt_yolox/config/yolox_s_plus_opt.param.yaml index bc67173442094..57c1b40c44a4d 100644 --- a/perception/tensorrt_yolox/config/yolox_s_plus_opt.param.yaml +++ b/perception/tensorrt_yolox/config/yolox_s_plus_opt.param.yaml @@ -1,7 +1,29 @@ +# cspell: ignore semseg /**: ros__parameters: + # refine segmentation mask by overlay roi class + # disable when sematic segmentation accuracy is good enough + is_roi_overlap_segment: true + + # minimum existence_probability of detected roi considered to replace segmentation + overlap_roi_score_threshold: 0.3 + + # publish color mask for result visualization + is_publish_color_mask: false + + roi_overlay_segment_label: + UNKNOWN : true + CAR : false + TRUCK : false + BUS : false + MOTORCYCLE : true + BICYCLE : true + PEDESTRIAN : true + ANIMAL: true + model_path: "$(var data_path)/tensorrt_yolox/$(var model_name).onnx" label_path: "$(var data_path)/tensorrt_yolox/label.txt" + color_map_path: "$(var data_path)/tensorrt_yolox/semseg_color_map.csv" score_threshold: 0.35 nms_threshold: 0.7 precision: "int8" # Operation precision to be used on inference. Valid value is one of: [fp32, fp16, int8]. diff --git a/perception/tensorrt_yolox/include/tensorrt_yolox/preprocess.hpp b/perception/tensorrt_yolox/include/tensorrt_yolox/preprocess.hpp index 3549ae35e70ea..faac6de4e3284 100644 --- a/perception/tensorrt_yolox/include/tensorrt_yolox/preprocess.hpp +++ b/perception/tensorrt_yolox/include/tensorrt_yolox/preprocess.hpp @@ -179,6 +179,21 @@ extern void crop_resize_bilinear_letterbox_nhwc_to_nchw32_batch_gpu( extern void multi_scale_resize_bilinear_letterbox_nhwc_to_nchw32_batch_gpu( float * dst, unsigned char * src, int d_w, int d_h, int d_c, Roi * d_roi, int s_w, int s_h, int s_c, int batch, float norm, cudaStream_t stream); -} // namespace tensorrt_yolox +/** + * @brief Argmax on GPU + * @param[out] dst processed image + * @param[in] src probability map + * @param[in] d_w width for output + * @param[in] d_h height for output + * @param[in] s_w width for input + * @param[in] s_h height for input + * @param[in] s_c channel for input + * @param[in] batch batch size + * @param[in] stream cuda stream + */ +extern void argmax_gpu( + unsigned char * dst, float * src, int d_w, int d_h, int s_w, int s_h, int s_c, int batch, + cudaStream_t stream); +} // namespace tensorrt_yolox #endif // TENSORRT_YOLOX__PREPROCESS_HPP_ diff --git a/perception/tensorrt_yolox/include/tensorrt_yolox/tensorrt_yolox.hpp b/perception/tensorrt_yolox/include/tensorrt_yolox/tensorrt_yolox.hpp index c42222a70c96b..d287c8a44d4cf 100644 --- a/perception/tensorrt_yolox/include/tensorrt_yolox/tensorrt_yolox.hpp +++ b/perception/tensorrt_yolox/include/tensorrt_yolox/tensorrt_yolox.hpp @@ -52,6 +52,13 @@ struct GridAndStride int stride; }; +typedef struct Colormap_ +{ + int id; + std::string name; + std::vector color; +} Colormap; + /** * @class TrtYoloX * @brief TensorRT YOLOX for faster inference @@ -85,7 +92,7 @@ class TrtYoloX const bool use_gpu_preprocess = false, std::string calibration_image_list_file = std::string(), const double norm_factor = 1.0, [[maybe_unused]] const std::string & cache_dir = "", const tensorrt_common::BatchConfig & batch_config = {1, 1, 1}, - const size_t max_workspace_size = (1 << 30)); + const size_t max_workspace_size = (1 << 30), const std::string & color_map_path = ""); /** * @brief Deconstruct TrtYoloX */ @@ -96,7 +103,9 @@ class TrtYoloX * @param[out] objects results for object detection * @param[in] images batched images */ - bool doInference(const std::vector & images, ObjectArrays & objects); + bool doInference( + const std::vector & images, ObjectArrays & objects, std::vector & masks, + std::vector & color_masks); /** * @brief run inference including pre-process and post-process @@ -130,6 +139,22 @@ class TrtYoloX */ void printProfiling(void); + /** + * @brief get num for multitask heads + */ + int getMultitaskNum(void); + + /** + * @brief get colorized masks from index using specific colormap + * @param[out] cmask colorized mask + * @param[in] index multitask index + * @param[in] colormap colormap for masks + */ + void getColorizedMask( + const std::vector & colormap, const cv::Mat & mask, + cv::Mat & colorized_mask); + inline std::vector getColorMap() { return sematic_color_map_; } + private: /** * @brief run preprocess including resizing, letterbox, NHWC2NCHW and toFloat on CPU @@ -177,7 +202,9 @@ class TrtYoloX const cv::Mat & images, int batch_size, ObjectArrays & objects); bool feedforward(const std::vector & images, ObjectArrays & objects); - bool feedforwardAndDecode(const std::vector & images, ObjectArrays & objects); + bool feedforwardAndDecode( + const std::vector & images, ObjectArrays & objects, std::vector & masks, + std::vector & color_masks); void decodeOutputs(float * prob, ObjectArray & objects, float scale, cv::Size & img_size) const; void generateGridsAndStride( const int target_w, const int target_h, const std::vector & strides, @@ -206,6 +233,26 @@ class TrtYoloX void nmsSortedBboxes( const ObjectArray & face_objects, std::vector & picked, float nms_threshold) const; + /** + * @brief get a mask image for a segmentation head + * @param[out] argmax argmax results + * @param[in] prob probability map + * @param[in] dims dimension for probability map + * @param[in] out_w mask width excluding letterbox + * @param[in] out_h mask height excluding letterbox + */ + cv::Mat getMaskImage(float * prob, nvinfer1::Dims dims, int out_w, int out_h); + + /** + * @brief get a mask image on GPUs for a segmentation head + * @param[out] mask image + * @param[in] prob probability map on device + * @param[in] out_w mask width excluding letterbox + * @param[in] out_h mask height excluding letterbox + * @param[in] b current batch + */ + cv::Mat getMaskImageGpu(float * d_prob, nvinfer1::Dims dims, int out_w, int out_h, int b); + std::unique_ptr trt_common_; std::vector input_h_; @@ -249,6 +296,20 @@ class TrtYoloX CudaUniquePtrHost roi_h_; // device pointer for ROI CudaUniquePtr roi_d_; + + // flag whether model has multitasks + int multitask_; + // buff size for segmentation heads + CudaUniquePtr segmentation_out_prob_d_; + CudaUniquePtrHost segmentation_out_prob_h_; + size_t segmentation_out_elem_num_; + size_t segmentation_out_elem_num_per_batch_; + std::vector segmentation_masks_; + // host buffer for argmax postprocessing on GPU + CudaUniquePtrHost argmax_buf_h_; + // device buffer for argmax postprocessing on GPU + CudaUniquePtr argmax_buf_d_; + std::vector sematic_color_map_; }; } // namespace tensorrt_yolox diff --git a/perception/tensorrt_yolox/include/tensorrt_yolox/tensorrt_yolox_node.hpp b/perception/tensorrt_yolox/include/tensorrt_yolox/tensorrt_yolox_node.hpp index 6044148a932ae..48ffedb98441d 100644 --- a/perception/tensorrt_yolox/include/tensorrt_yolox/tensorrt_yolox_node.hpp +++ b/perception/tensorrt_yolox/include/tensorrt_yolox/tensorrt_yolox_node.hpp @@ -15,6 +15,8 @@ #ifndef TENSORRT_YOLOX__TENSORRT_YOLOX_NODE_HPP_ #define TENSORRT_YOLOX__TENSORRT_YOLOX_NODE_HPP_ +#include "object_recognition_utils/object_recognition_utils.hpp" + #include #include #include @@ -25,6 +27,7 @@ #include #include #include +#include #if __has_include() #include @@ -41,10 +44,30 @@ namespace tensorrt_yolox { +// cspell: ignore Semseg using LabelMap = std::map; - +using Label = tier4_perception_msgs::msg::Semantic; class TrtYoloXNode : public rclcpp::Node { + struct RoiOverlaySemsegLabel + { + bool UNKNOWN; + bool CAR; + bool TRUCK; + bool BUS; + bool MOTORCYCLE; + bool BICYCLE; + bool PEDESTRIAN; + bool ANIMAL; + bool isOverlay(const uint8_t label) const + { + return (label == Label::UNKNOWN && UNKNOWN) || (label == Label::CAR && CAR) || + (label == Label::TRUCK && TRUCK) || (label == Label::BUS && BUS) || + (label == Label::ANIMAL && ANIMAL) || (label == Label::MOTORBIKE && MOTORCYCLE) || + (label == Label::BICYCLE && BICYCLE) || (label == Label::PEDESTRIAN && PEDESTRIAN); + }; + }; // struct RoiOverlaySemsegLabel + public: explicit TrtYoloXNode(const rclcpp::NodeOptions & node_options); @@ -53,8 +76,13 @@ class TrtYoloXNode : public rclcpp::Node void onImage(const sensor_msgs::msg::Image::ConstSharedPtr msg); bool readLabelFile(const std::string & label_path); void replaceLabelMap(); - + void overlapSegmentByRoi( + const tensorrt_yolox::Object & object, cv::Mat & mask, const int width, const int height); + int mapRoiLabel2SegLabel(const int32_t roi_label_index); image_transport::Publisher image_pub_; + image_transport::Publisher mask_pub_; + image_transport::Publisher color_mask_pub_; + rclcpp::Publisher::SharedPtr objects_pub_; image_transport::Subscriber image_sub_; @@ -63,6 +91,21 @@ class TrtYoloXNode : public rclcpp::Node LabelMap label_map_; std::unique_ptr trt_yolox_; + bool is_roi_overlap_segment_; + bool is_publish_color_mask_; + float overlap_roi_score_threshold_; + // TODO(badai-nguyen): change to function + std::map remap_roi_to_semantic_ = { + {"UNKNOWN", 3}, // other + {"ANIMAL", 0}, // other + {"PEDESTRIAN", 6}, // person + {"CAR", 7}, // car + {"TRUCK", 7}, // truck + {"BUS", 7}, // bus + {"BICYCLE", 8}, // bicycle + {"MOTORBIKE", 8}, // motorcycle + }; + RoiOverlaySemsegLabel roi_overlay_segment_labels_; std::unique_ptr> stop_watch_ptr_; std::unique_ptr debug_publisher_; }; diff --git a/perception/tensorrt_yolox/launch/yolox_s_plus_opt.launch.xml b/perception/tensorrt_yolox/launch/yolox_s_plus_opt.launch.xml index dd15eda2913ce..e4436a0424be5 100644 --- a/perception/tensorrt_yolox/launch/yolox_s_plus_opt.launch.xml +++ b/perception/tensorrt_yolox/launch/yolox_s_plus_opt.launch.xml @@ -1,22 +1,30 @@ - + - + + + + + diff --git a/perception/tensorrt_yolox/src/preprocess.cu b/perception/tensorrt_yolox/src/preprocess.cu index 3c3087c536f10..f384de2975aa4 100644 --- a/perception/tensorrt_yolox/src/preprocess.cu +++ b/perception/tensorrt_yolox/src/preprocess.cu @@ -594,4 +594,39 @@ void multi_scale_resize_bilinear_letterbox_nhwc_to_nchw32_batch_gpu( cuda_gridsize(N), block, 0, stream>>>(N, dst, src, d_h, d_w, s_h, s_w, d_roi, norm, batch); } +__global__ void argmax_gpu_kernel( + int N, unsigned char * dst, float * src, int dst_h, int dst_w, int src_c, int src_h, int src_w, + int batch) +{ + // NHWC + int index = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x; + + if (index >= N) return; + int c = 0; + int w = index % dst_w; + int h = index / (dst_w); + + int b; + for (b = 0; b < batch; b++) { + float max_prob = 0.0; + int max_index = 0; + int dst_index = w + dst_w * h + b * dst_h * dst_w; + for (c = 0; c < src_c; c++) { + int src_index = w + src_w * h + c * src_h * src_w + b * src_c * src_h * src_w; + max_index = max_prob < src[src_index] ? c : max_index; + max_prob = max_prob < src[src_index] ? src[src_index] : max_prob; + } + dst[dst_index] = max_index; + } +} + +void argmax_gpu( + unsigned char * dst, float * src, int d_w, int d_h, int s_w, int s_h, int s_c, int batch, + cudaStream_t stream) +{ + int N = d_w * d_h; + argmax_gpu_kernel<<>>( + N, dst, src, d_h, d_w, s_c, s_h, s_w, batch); +} + } // namespace tensorrt_yolox diff --git a/perception/tensorrt_yolox/src/tensorrt_yolox.cpp b/perception/tensorrt_yolox/src/tensorrt_yolox.cpp index 11f68f580acc2..af6cecd2eff7d 100644 --- a/perception/tensorrt_yolox/src/tensorrt_yolox.cpp +++ b/perception/tensorrt_yolox/src/tensorrt_yolox.cpp @@ -15,12 +15,18 @@ #include "cuda_utils/cuda_check_error.hpp" #include "cuda_utils/cuda_unique_ptr.hpp" +#include #include #include #include +#include + #include +#include #include +#include +#include #include #include #include @@ -97,6 +103,53 @@ std::vector loadImageList(const std::string & filename, const std:: } return fileList; } + +std::vector get_seg_colormap(const std::string & filename) +{ + std::vector seg_cmap; + if (filename != "not-specified") { + std::vector color_list = loadListFromTextFile(filename); + for (int i = 0; i < static_cast(color_list.size()); i++) { + if (i == 0) { + // Skip header + continue; + } + std::string colormapString = color_list[i]; + tensorrt_yolox::Colormap cmap; + std::vector rgb; + size_t npos = colormapString.find_first_of(','); + assert(npos != std::string::npos); + std::string substr = colormapString.substr(0, npos); + int id = static_cast(std::stoi(trim(substr))); + colormapString.erase(0, npos + 1); + + npos = colormapString.find_first_of(','); + assert(npos != std::string::npos); + substr = colormapString.substr(0, npos); + std::string name = (trim(substr)); + cmap.id = id; + cmap.name = name; + colormapString.erase(0, npos + 1); + while (!colormapString.empty()) { + size_t npos = colormapString.find_first_of(','); + if (npos != std::string::npos) { + substr = colormapString.substr(0, npos); + unsigned char c = (unsigned char)std::stoi(trim(substr)); + cmap.color.push_back(c); + colormapString.erase(0, npos + 1); + } else { + unsigned char c = (unsigned char)std::stoi(trim(colormapString)); + cmap.color.push_back(c); + break; + } + } + + seg_cmap.push_back(cmap); + } + } + return seg_cmap; +} + } // anonymous namespace namespace tensorrt_yolox @@ -106,12 +159,14 @@ TrtYoloX::TrtYoloX( const float score_threshold, const float nms_threshold, tensorrt_common::BuildConfig build_config, const bool use_gpu_preprocess, std::string calibration_image_list_path, const double norm_factor, [[maybe_unused]] const std::string & cache_dir, const tensorrt_common::BatchConfig & batch_config, - const size_t max_workspace_size) + const size_t max_workspace_size, const std::string & color_map_path) { src_width_ = -1; src_height_ = -1; norm_factor_ = norm_factor; batch_size_ = batch_config[2]; + multitask_ = 0; + sematic_color_map_ = get_seg_colormap(color_map_path); if (precision == "int8") { if (build_config.clip_value <= 0.0) { if (calibration_image_list_path.empty()) { @@ -196,9 +251,18 @@ TrtYoloX::TrtYoloX( needs_output_decode_ = false; break; default: + needs_output_decode_ = true; + // The following three values are considered only if the specified model is plain one + num_class_ = num_class; + score_threshold_ = score_threshold; + nms_threshold_ = nms_threshold; + // Todo : Support multiple segmentation heads + multitask_++; + /* std::stringstream s; s << "\"" << model_path << "\" is unsupported format"; throw std::runtime_error{s.str()}; + */ } // GPU memory allocation @@ -234,10 +298,31 @@ TrtYoloX::TrtYoloX( out_scores_d_ = cuda_utils::make_unique(batch_config[2] * max_detections_); out_classes_d_ = cuda_utils::make_unique(batch_config[2] * max_detections_); } + if (multitask_) { + // Allocate buffer for segmentation + segmentation_out_elem_num_ = 0; + for (int m = 0; m < multitask_; m++) { + const auto output_dims = + trt_common_->getBindingDimensions(m + 2); // 0 : input, 1 : output for detections + size_t out_elem_num = std::accumulate( + output_dims.d + 1, output_dims.d + output_dims.nbDims, 1, std::multiplies()); + out_elem_num = out_elem_num * batch_config[2]; + segmentation_out_elem_num_ += out_elem_num; + } + segmentation_out_elem_num_per_batch_ = + static_cast(segmentation_out_elem_num_ / batch_config[2]); + segmentation_out_prob_d_ = cuda_utils::make_unique(segmentation_out_elem_num_); + segmentation_out_prob_h_ = + cuda_utils::make_unique_host(segmentation_out_elem_num_, cudaHostAllocPortable); + } if (use_gpu_preprocess) { use_gpu_preprocess_ = true; image_buf_h_ = nullptr; image_buf_d_ = nullptr; + if (multitask_) { + argmax_buf_h_ = nullptr; + argmax_buf_d_ = nullptr; + } } else { use_gpu_preprocess_ = false; } @@ -252,6 +337,9 @@ TrtYoloX::~TrtYoloX() if (image_buf_d_) { image_buf_d_.reset(); } + if (argmax_buf_d_) { + argmax_buf_d_.reset(); + } } } @@ -294,6 +382,26 @@ void TrtYoloX::initPreprocessBuffer(int width, int height) width * height * 3 * batch_size_, cudaHostAllocWriteCombined); image_buf_d_ = cuda_utils::make_unique(width * height * 3 * batch_size_); } + if (multitask_) { + size_t argmax_out_elem_num = 0; + for (int m = 0; m < multitask_; m++) { + const auto output_dims = + trt_common_->getBindingDimensions(m + 2); // 0 : input, 1 : output for detections + const float scale = std::min( + output_dims.d[3] / static_cast(width), + output_dims.d[2] / static_cast(height)); + int out_w = static_cast(width * scale); + int out_h = static_cast(height * scale); + // size_t out_elem_num = std::accumulate( + // output_dims.d + 1, output_dims.d + output_dims.nbDims, 1, std::multiplies()); + // out_elem_num = out_elem_num * batch_size_; + size_t out_elem_num = out_w * out_h * batch_size_; + argmax_out_elem_num += out_elem_num; + } + argmax_buf_h_ = + cuda_utils::make_unique_host(argmax_out_elem_num, cudaHostAllocPortable); + argmax_buf_d_ = cuda_utils::make_unique(argmax_out_elem_num); + } } } @@ -321,6 +429,12 @@ void TrtYoloX::preprocessGpu(const std::vector & images) if (image_buf_d_) { image_buf_d_.reset(); } + if (argmax_buf_h_) { + argmax_buf_h_.reset(); + } + if (argmax_buf_d_) { + argmax_buf_d_.reset(); + } } } src_width_ = width; @@ -333,6 +447,7 @@ void TrtYoloX::preprocessGpu(const std::vector & images) const float input_height = static_cast(input_dims.d[2]); const float input_width = static_cast(input_dims.d[3]); int b = 0; + size_t argmax_out_elem_num = 0; for (const auto & image : images) { if (!image_buf_h_) { const float scale = std::min(input_width / image.cols, input_height / image.rows); @@ -348,7 +463,31 @@ void TrtYoloX::preprocessGpu(const std::vector & images) image_buf_h_.get() + index, &image.data[0], image.cols * image.rows * 3 * sizeof(unsigned char)); b++; + + if (multitask_) { + for (int m = 0; m < multitask_; m++) { + const auto output_dims = + trt_common_->getBindingDimensions(m + 2); // 0: input, 1: output for detections + const float scale = std::min( + output_dims.d[3] / static_cast(image.cols), + output_dims.d[2] / static_cast(image.rows)); + int out_w = static_cast(image.cols * scale); + int out_h = static_cast(image.rows * scale); + argmax_out_elem_num += out_w * out_h * batch_size; + } + } + } + + if (multitask_) { + if (!argmax_buf_h_) { + argmax_buf_h_ = + cuda_utils::make_unique_host(argmax_out_elem_num, cudaHostAllocPortable); + } + if (!argmax_buf_d_) { + argmax_buf_d_ = cuda_utils::make_unique(argmax_out_elem_num); + } } + // Copy into device memory CHECK_CUDA_ERROR(cudaMemcpyAsync( image_buf_d_.get(), image_buf_h_.get(), @@ -406,7 +545,9 @@ void TrtYoloX::preprocess(const std::vector & images) // No Need for Sync } -bool TrtYoloX::doInference(const std::vector & images, ObjectArrays & objects) +bool TrtYoloX::doInference( + const std::vector & images, ObjectArrays & objects, std::vector & masks, + [[maybe_unused]] std::vector & color_masks) { if (!trt_common_->isInitialized()) { return false; @@ -419,7 +560,7 @@ bool TrtYoloX::doInference(const std::vector & images, ObjectArrays & o } if (needs_output_decode_) { - return feedforwardAndDecode(images, objects); + return feedforwardAndDecode(images, objects, masks, color_masks); } else { return feedforward(images, objects); } @@ -659,6 +800,8 @@ void TrtYoloX::multiScalePreprocess(const cv::Mat & image, const std::vector & images, ObjectArrays & objects, const std::vector & rois) { + std::vector masks; + std::vector color_masks; if (!trt_common_->isInitialized()) { return false; } @@ -669,7 +812,7 @@ bool TrtYoloX::doInferenceWithRoi( } if (needs_output_decode_) { - return feedforwardAndDecode(images, objects); + return feedforwardAndDecode(images, objects, masks, color_masks); } else { return feedforward(images, objects); } @@ -747,10 +890,14 @@ bool TrtYoloX::feedforward(const std::vector & images, ObjectArrays & o return true; } -bool TrtYoloX::feedforwardAndDecode(const std::vector & images, ObjectArrays & objects) +bool TrtYoloX::feedforwardAndDecode( + const std::vector & images, ObjectArrays & objects, std::vector & out_masks, + [[maybe_unused]] std::vector & color_masks) { std::vector buffers = {input_d_.get(), out_prob_d_.get()}; - + if (multitask_) { + buffers = {input_d_.get(), out_prob_d_.get(), segmentation_out_prob_d_.get()}; + } trt_common_->enqueueV2(buffers.data(), *stream_, nullptr); const auto batch_size = images.size(); @@ -758,6 +905,11 @@ bool TrtYoloX::feedforwardAndDecode(const std::vector & images, ObjectA CHECK_CUDA_ERROR(cudaMemcpyAsync( out_prob_h_.get(), out_prob_d_.get(), sizeof(float) * out_elem_num_, cudaMemcpyDeviceToHost, *stream_)); + if (multitask_ && !use_gpu_preprocess_) { + CHECK_CUDA_ERROR(cudaMemcpyAsync( + segmentation_out_prob_h_.get(), segmentation_out_prob_d_.get(), + sizeof(float) * segmentation_out_elem_num_, cudaMemcpyDeviceToHost, *stream_)); + } cudaStreamSynchronize(*stream_); objects.clear(); @@ -766,7 +918,43 @@ bool TrtYoloX::feedforwardAndDecode(const std::vector & images, ObjectA float * batch_prob = out_prob_h_.get() + (i * out_elem_num_per_batch_); ObjectArray object_array; decodeOutputs(batch_prob, object_array, scales_[i], image_size); + // add refine mask using object objects.emplace_back(object_array); + if (multitask_) { + segmentation_masks_.clear(); + + size_t counter = 0; + int batch = + static_cast(segmentation_out_elem_num_ / segmentation_out_elem_num_per_batch_); + for (int m = 0; m < multitask_; m++) { + const auto output_dims = + trt_common_->getBindingDimensions(m + 2); // 0 : input, 1 : output for detections + size_t out_elem_num = std::accumulate( + output_dims.d + 1, output_dims.d + output_dims.nbDims, 1, std::multiplies()); + out_elem_num = out_elem_num * batch; + const float scale = std::min( + output_dims.d[3] / static_cast(image_size.width), + output_dims.d[2] / static_cast(image_size.height)); + int out_w = static_cast(image_size.width * scale); + int out_h = static_cast(image_size.height * scale); + cv::Mat mask; + if (use_gpu_preprocess_) { + float * d_segmentation_results = + segmentation_out_prob_d_.get() + (i * segmentation_out_elem_num_per_batch_); + mask = getMaskImageGpu(&(d_segmentation_results[counter]), output_dims, out_w, out_h, i); + } else { + float * segmentation_results = + segmentation_out_prob_h_.get() + (i * segmentation_out_elem_num_per_batch_); + mask = getMaskImage(&(segmentation_results[counter]), output_dims, out_w, out_h); + } + segmentation_masks_.emplace_back(std::move(mask)); + counter += out_elem_num; + } + // semantic segmentation was fixed as first task + out_masks.at(i) = segmentation_masks_.at(0); + } else { + continue; + } } return true; } @@ -1036,4 +1224,73 @@ void TrtYoloX::nmsSortedBboxes( } } +cv::Mat TrtYoloX::getMaskImageGpu(float * d_prob, nvinfer1::Dims dims, int out_w, int out_h, int b) +{ + // NCHW + int classes = dims.d[1]; + int height = dims.d[2]; + int width = dims.d[3]; + cv::Mat mask = cv::Mat::zeros(out_h, out_w, CV_8UC1); + int index = b * out_w * out_h; + argmax_gpu( + (unsigned char *)argmax_buf_d_.get() + index, d_prob, out_w, out_h, width, height, classes, 1, + *stream_); + CHECK_CUDA_ERROR(cudaMemcpyAsync( + argmax_buf_h_.get(), argmax_buf_d_.get(), sizeof(unsigned char) * 1 * out_w * out_h, + cudaMemcpyDeviceToHost, *stream_)); + cudaStreamSynchronize(*stream_); + std::memcpy(mask.data, argmax_buf_h_.get() + index, sizeof(unsigned char) * 1 * out_w * out_h); + return mask; +} + +cv::Mat TrtYoloX::getMaskImage(float * prob, nvinfer1::Dims dims, int out_w, int out_h) +{ + // NCHW + int classes = dims.d[1]; + int height = dims.d[2]; + int width = dims.d[3]; + cv::Mat mask = cv::Mat::zeros(out_h, out_w, CV_8UC1); + // argmax + // #pragma omp parallel for + for (int y = 0; y < out_h; y++) { + for (int x = 0; x < out_w; x++) { + float max = 0.0; + int index = 0; + for (int c = 0; c < classes; c++) { + float value = prob[c * height * width + y * width + x]; + if (max < value) { + max = value; + index = c; + } + } + mask.at(y, x) = index; + } + } + return mask; +} + +int TrtYoloX::getMultitaskNum(void) +{ + return multitask_; +} + +void TrtYoloX::getColorizedMask( + const std::vector & colormap, const cv::Mat & mask, cv::Mat & cmask) +{ + int width = mask.cols; + int height = mask.rows; + if ((cmask.cols != mask.cols) || (cmask.rows != mask.rows)) { + throw std::runtime_error("input and output image have difference size."); + return; + } + for (int y = 0; y < height; y++) { + for (int x = 0; x < width; x++) { + unsigned char id = mask.at(y, x); + cmask.at(y, x)[0] = colormap[id].color[2]; + cmask.at(y, x)[1] = colormap[id].color[1]; + cmask.at(y, x)[2] = colormap[id].color[0]; + } + } +} + } // namespace tensorrt_yolox diff --git a/perception/tensorrt_yolox/src/tensorrt_yolox_node.cpp b/perception/tensorrt_yolox/src/tensorrt_yolox_node.cpp index 4ee18b99e4bc6..c13384918bd9c 100644 --- a/perception/tensorrt_yolox/src/tensorrt_yolox_node.cpp +++ b/perception/tensorrt_yolox/src/tensorrt_yolox_node.cpp @@ -33,7 +33,7 @@ TrtYoloXNode::TrtYoloXNode(const rclcpp::NodeOptions & node_options) stop_watch_ptr_ = std::make_unique>(); debug_publisher_ = - std::make_unique(this, "tensorrt_yolox"); + std::make_unique(this, this->get_name()); stop_watch_ptr_->tic("cyclic_time"); stop_watch_ptr_->tic("processing_time"); } @@ -96,25 +96,51 @@ TrtYoloXNode::TrtYoloXNode(const rclcpp::NodeOptions & node_options) ("Path to a file which contains path to images." "Those images will be used for int8 quantization.")); + std::string color_map_path = declare_parameter_with_description( + "color_map_path", "", ("Path to a file which contains path to color map.")); if (!readLabelFile(label_path)) { RCLCPP_ERROR(this->get_logger(), "Could not find label file"); rclcpp::shutdown(); } + + is_roi_overlap_segment_ = declare_parameter("is_roi_overlap_segment"); + is_publish_color_mask_ = declare_parameter("is_publish_color_mask"); + overlap_roi_score_threshold_ = declare_parameter("overlap_roi_score_threshold"); + roi_overlay_segment_labels_.UNKNOWN = + declare_parameter("roi_overlay_segment_label.UNKNOWN"); + roi_overlay_segment_labels_.CAR = declare_parameter("roi_overlay_segment_label.CAR"); + roi_overlay_segment_labels_.TRUCK = declare_parameter("roi_overlay_segment_label.TRUCK"); + roi_overlay_segment_labels_.BUS = declare_parameter("roi_overlay_segment_label.BUS"); + roi_overlay_segment_labels_.MOTORCYCLE = + declare_parameter("roi_overlay_segment_label.MOTORCYCLE"); + roi_overlay_segment_labels_.BICYCLE = + declare_parameter("roi_overlay_segment_label.BICYCLE"); + roi_overlay_segment_labels_.PEDESTRIAN = + declare_parameter("roi_overlay_segment_label.PEDESTRIAN"); + roi_overlay_segment_labels_.ANIMAL = declare_parameter("roi_overlay_segment_label.ANIMAL"); replaceLabelMap(); tensorrt_common::BuildConfig build_config( calibration_algorithm, dla_core_id, quantize_first_layer, quantize_last_layer, profile_per_layer, clip_value); + const double norm_factor = 1.0; + const std::string cache_dir = ""; + const tensorrt_common::BatchConfig batch_config{1, 1, 1}; + const size_t max_workspace_size = (1 << 30); + trt_yolox_ = std::make_unique( model_path, precision, label_map_.size(), score_threshold, nms_threshold, build_config, - preprocess_on_gpu, calibration_image_list_path); + preprocess_on_gpu, calibration_image_list_path, norm_factor, cache_dir, batch_config, + max_workspace_size, color_map_path); timer_ = rclcpp::create_timer(this, get_clock(), 100ms, std::bind(&TrtYoloXNode::onConnect, this)); objects_pub_ = this->create_publisher( "~/out/objects", 1); + mask_pub_ = image_transport::create_publisher(this, "~/out/mask"); + color_mask_pub_ = image_transport::create_publisher(this, "~/out/color_mask"); image_pub_ = image_transport::create_publisher(this, "~/out/image"); if (declare_parameter("build_only", false)) { @@ -129,7 +155,8 @@ void TrtYoloXNode::onConnect() if ( objects_pub_->get_subscription_count() == 0 && objects_pub_->get_intra_process_subscription_count() == 0 && - image_pub_.getNumSubscribers() == 0) { + image_pub_.getNumSubscribers() == 0 && mask_pub_.getNumSubscribers() == 0 && + color_mask_pub_.getNumSubscribers() == 0) { image_sub_.shutdown(); } else if (!image_sub_) { image_sub_ = image_transport::create_subscription( @@ -154,10 +181,16 @@ void TrtYoloXNode::onImage(const sensor_msgs::msg::Image::ConstSharedPtr msg) const auto height = in_image_ptr->image.rows; tensorrt_yolox::ObjectArrays objects; - if (!trt_yolox_->doInference({in_image_ptr->image}, objects)) { + std::vector masks = {cv::Mat(cv::Size(height, width), CV_8UC1, cv::Scalar(0))}; + std::vector color_masks = { + cv::Mat(cv::Size(height, width), CV_8UC3, cv::Scalar(0, 0, 0))}; + + if (!trt_yolox_->doInference({in_image_ptr->image}, objects, masks, color_masks)) { RCLCPP_WARN(this->get_logger(), "Fail to inference"); return; } + auto & mask = masks.at(0); + for (const auto & yolox_object : objects.at(0)) { tier4_perception_msgs::msg::DetectedObjectWithFeature object; object.feature.roi.x_offset = yolox_object.x_offset; @@ -177,9 +210,21 @@ void TrtYoloXNode::onImage(const sensor_msgs::msg::Image::ConstSharedPtr msg) cv::rectangle( in_image_ptr->image, cv::Point(left, top), cv::Point(right, bottom), cv::Scalar(0, 0, 255), 3, 8, 0); + // Refine mask: replacing segmentation mask by roi class + // This should remove when the segmentation accuracy is high + if (is_roi_overlap_segment_ && trt_yolox_->getMultitaskNum() > 0) { + overlapSegmentByRoi(yolox_object, mask, width, height); + } + } + // TODO(badai-nguyen): consider to change to 4bits data transfer + if (trt_yolox_->getMultitaskNum() > 0) { + sensor_msgs::msg::Image::SharedPtr out_mask_msg = + cv_bridge::CvImage(std_msgs::msg::Header(), sensor_msgs::image_encodings::MONO8, mask) + .toImageMsg(); + out_mask_msg->header = msg->header; + mask_pub_.publish(out_mask_msg); } image_pub_.publish(in_image_ptr->toImageMsg()); - out_objects.header = msg->header; objects_pub_->publish(out_objects); @@ -198,6 +243,16 @@ void TrtYoloXNode::onImage(const sensor_msgs::msg::Image::ConstSharedPtr msg) debug_publisher_->publish( "debug/pipeline_latency_ms", pipeline_latency_ms); } + + if (is_publish_color_mask_ && trt_yolox_->getMultitaskNum() > 0) { + cv::Mat color_mask = cv::Mat::zeros(mask.rows, mask.cols, CV_8UC3); + trt_yolox_->getColorizedMask(trt_yolox_->getColorMap(), mask, color_mask); + sensor_msgs::msg::Image::SharedPtr output_color_mask_msg = + cv_bridge::CvImage(std_msgs::msg::Header(), sensor_msgs::image_encodings::BGR8, color_mask) + .toImageMsg(); + output_color_mask_msg->header = msg->header; + color_mask_pub_.publish(output_color_mask_msg); + } } bool TrtYoloXNode::readLabelFile(const std::string & label_path) @@ -234,6 +289,37 @@ void TrtYoloXNode::replaceLabelMap() } } +int TrtYoloXNode::mapRoiLabel2SegLabel(const int32_t roi_label_index) +{ + if (roi_overlay_segment_labels_.isOverlay(static_cast(roi_label_index))) { + std::string label = label_map_[roi_label_index]; + + return remap_roi_to_semantic_[label]; + } + return -1; +} + +void TrtYoloXNode::overlapSegmentByRoi( + const tensorrt_yolox::Object & roi_object, cv::Mat & mask, const int orig_width, + const int orig_height) +{ + if (roi_object.score < overlap_roi_score_threshold_) return; + int seg_class_index = mapRoiLabel2SegLabel(roi_object.type); + if (seg_class_index < 0) return; + + const float scale_x = static_cast(mask.cols) / static_cast(orig_width); + const float scale_y = static_cast(mask.rows) / static_cast(orig_height); + const int roi_width = static_cast(roi_object.width * scale_x); + const int roi_height = static_cast(roi_object.height * scale_y); + const int roi_x_offset = static_cast(roi_object.x_offset * scale_x); + const int roi_y_offset = static_cast(roi_object.y_offset * scale_y); + + cv::Mat replace_roi( + cv::Size(roi_width, roi_height), mask.type(), static_cast(seg_class_index)); + replace_roi.copyTo(mask.colRange(roi_x_offset, roi_x_offset + roi_width) + .rowRange(roi_y_offset, roi_y_offset + roi_height)); +} + } // namespace tensorrt_yolox #include "rclcpp_components/register_node_macro.hpp" diff --git a/perception/tensorrt_yolox/src/yolox_single_image_inference_node.cpp b/perception/tensorrt_yolox/src/yolox_single_image_inference_node.cpp index 0657f0096b07e..360f41e470e38 100644 --- a/perception/tensorrt_yolox/src/yolox_single_image_inference_node.cpp +++ b/perception/tensorrt_yolox/src/yolox_single_image_inference_node.cpp @@ -47,7 +47,9 @@ class YoloXSingleImageInferenceNode : public rclcpp::Node auto trt_yolox = std::make_unique(model_path, precision); auto image = cv::imread(image_path); tensorrt_yolox::ObjectArrays objects; - trt_yolox->doInference({image}, objects); + std::vector masks; + std::vector color_masks; + trt_yolox->doInference({image}, objects, masks, color_masks); for (const auto & object : objects[0]) { const auto left = object.x_offset; const auto top = object.y_offset; From 8df02f53ea15c5e7ef1f00ae47bb644d2b86c2cb Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Mon, 24 Jun 2024 11:18:50 +0900 Subject: [PATCH 19/29] fix(tensorrt_yolox): fix unusedVariable warning (#7586) fix unusedVariable warning Signed-off-by: Ryuta Kambe --- perception/tensorrt_yolox/src/tensorrt_yolox.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/perception/tensorrt_yolox/src/tensorrt_yolox.cpp b/perception/tensorrt_yolox/src/tensorrt_yolox.cpp index af6cecd2eff7d..12229ad313bc4 100644 --- a/perception/tensorrt_yolox/src/tensorrt_yolox.cpp +++ b/perception/tensorrt_yolox/src/tensorrt_yolox.cpp @@ -821,7 +821,6 @@ bool TrtYoloX::doInferenceWithRoi( bool TrtYoloX::doMultiScaleInference( const cv::Mat & image, ObjectArrays & objects, const std::vector & rois) { - std::vector images; if (!trt_common_->isInitialized()) { return false; } From e5be1bef9223fc8cece55052561bd6f443f8e184 Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Mon, 24 Jun 2024 11:19:23 +0900 Subject: [PATCH 20/29] chore(ci): remove arrayIndexThenCheck (#7618) Signed-off-by: Ryuta Kambe --- .cppcheck_suppressions | 1 - 1 file changed, 1 deletion(-) diff --git a/.cppcheck_suppressions b/.cppcheck_suppressions index d6048f59d473f..6f64bd278f8a4 100644 --- a/.cppcheck_suppressions +++ b/.cppcheck_suppressions @@ -1,6 +1,5 @@ *:*/test/* -arrayIndexThenCheck checkersReport constParameterPointer constParameterReference From 5bead38619a69668828e67f4e610cb189b887748 Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Mon, 24 Jun 2024 11:24:14 +0900 Subject: [PATCH 21/29] fix(autoware_behavior_path_dynamic_obstacle_avoidance_module): fix bitwiseOnBoolean warning (#7636) Signed-off-by: veqcc --- .../src/scene.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/planning/behavior_path_planner/autoware_behavior_path_dynamic_obstacle_avoidance_module/src/scene.cpp b/planning/behavior_path_planner/autoware_behavior_path_dynamic_obstacle_avoidance_module/src/scene.cpp index 8b411340e4ddc..2bd52850d0a3f 100644 --- a/planning/behavior_path_planner/autoware_behavior_path_dynamic_obstacle_avoidance_module/src/scene.cpp +++ b/planning/behavior_path_planner/autoware_behavior_path_dynamic_obstacle_avoidance_module/src/scene.cpp @@ -1471,7 +1471,7 @@ DynamicObstacleAvoidanceModule::calcMinMaxLateralOffsetToAvoidRegulatedObject( return prev_object->lat_offset_to_avoid->min_value; }(); const double filtered_min_bound_lat_offset = - (prev_min_lat_avoid_to_offset.has_value() & enable_lowpass_filter) + (prev_min_lat_avoid_to_offset.has_value() && enable_lowpass_filter) ? signal_processing::lowpassFilter( min_bound_lat_offset, *prev_min_lat_avoid_to_offset, parameters_->lpf_gain_for_lat_avoid_to_offset) @@ -1556,7 +1556,7 @@ DynamicObstacleAvoidanceModule::calcMinMaxLateralOffsetToAvoidUnregulatedObject( return prev_object->lat_offset_to_avoid->min_value; }(); const double filtered_min_bound_pos = - (prev_min_lat_avoid_to_offset.has_value() & enable_lowpass_filter) + (prev_min_lat_avoid_to_offset.has_value() && enable_lowpass_filter) ? signal_processing::lowpassFilter( bound_pos.min_value, *prev_min_lat_avoid_to_offset, parameters_->lpf_gain_for_lat_avoid_to_offset) From ed0c45dd74b3257cc581826c29ec0b45eb522d02 Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Mon, 24 Jun 2024 13:00:59 +0900 Subject: [PATCH 22/29] fix(bytetrack): fix cppcheck warnings (#7576) * fix unreadVariable Signed-off-by: Ryuta Kambe * fix variableScope Signed-off-by: Ryuta Kambe * fix unusedVariable Signed-off-by: Ryuta Kambe * fix constParameterPointer Signed-off-by: Ryuta Kambe * fix unreadVariable Signed-off-by: Ryuta Kambe * style(pre-commit): autofix --------- Signed-off-by: Ryuta Kambe Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- perception/bytetrack/lib/src/lapjv.cpp | 5 +++-- perception/bytetrack/lib/src/strack.cpp | 9 --------- perception/bytetrack/lib/src/utils.cpp | 5 ++--- perception/bytetrack/src/bytetrack_visualizer_node.cpp | 4 ++-- 4 files changed, 7 insertions(+), 16 deletions(-) diff --git a/perception/bytetrack/lib/src/lapjv.cpp b/perception/bytetrack/lib/src/lapjv.cpp index ecf4c31b81ff8..1b8b39ccbb9f7 100644 --- a/perception/bytetrack/lib/src/lapjv.cpp +++ b/perception/bytetrack/lib/src/lapjv.cpp @@ -181,7 +181,8 @@ int_t _carr_dense( /** Find columns with minimum d[j] and put them on the SCAN list. */ -uint_t _find_dense(const uint_t n, uint_t lo, cost_t * d, int_t * cols, [[maybe_unused]] int_t * y) +uint_t _find_dense( + const uint_t n, uint_t lo, const cost_t * d, int_t * cols, [[maybe_unused]] int_t * y) { uint_t hi = lo + 1; cost_t mind = d[cols[lo]]; @@ -203,7 +204,7 @@ uint_t _find_dense(const uint_t n, uint_t lo, cost_t * d, int_t * cols, [[maybe_ // and try to decrease d of the TODO columns using the SCAN column. int_t _scan_dense( const uint_t n, cost_t * cost[], uint_t * plo, uint_t * phi, cost_t * d, int_t * cols, - int_t * pred, int_t * y, cost_t * v) + int_t * pred, const int_t * y, const cost_t * v) { uint_t lo = *plo; uint_t hi = *phi; diff --git a/perception/bytetrack/lib/src/strack.cpp b/perception/bytetrack/lib/src/strack.cpp index f2c5a40ad3c2e..81ba438faee44 100644 --- a/perception/bytetrack/lib/src/strack.cpp +++ b/perception/bytetrack/lib/src/strack.cpp @@ -112,12 +112,6 @@ void STrack::activate(int frame_id) this->track_id = this->next_id(); this->unique_id = boost::uuids::random_generator()(); - std::vector _tlwh_tmp(4); - _tlwh_tmp[0] = this->original_tlwh[0]; - _tlwh_tmp[1] = this->original_tlwh[1]; - _tlwh_tmp[2] = this->original_tlwh[2]; - _tlwh_tmp[3] = this->original_tlwh[3]; - std::vector xyah = tlwh_to_xyah(_tlwh_tmp); // init kf init_kalman_filter(); // reflect state @@ -132,7 +126,6 @@ void STrack::activate(int frame_id) void STrack::re_activate(STrack & new_track, int frame_id, bool new_id) { - std::vector xyah = tlwh_to_xyah(new_track.tlwh); // TODO(me): write kf update Eigen::MatrixXd measurement = Eigen::MatrixXd::Zero(_kf_parameters.dim_z, 1); measurement << new_track.tlwh[0], new_track.tlwh[1], new_track.tlwh[2], new_track.tlwh[3]; @@ -156,8 +149,6 @@ void STrack::update(STrack & new_track, int frame_id) this->frame_id = frame_id; this->tracklet_len++; - std::vector xyah = tlwh_to_xyah(new_track.tlwh); - // update // TODO(me): write update Eigen::MatrixXd measurement = Eigen::MatrixXd::Zero(_kf_parameters.dim_z, 1); diff --git a/perception/bytetrack/lib/src/utils.cpp b/perception/bytetrack/lib/src/utils.cpp index 64ad27cf36eba..e987bd36b8064 100644 --- a/perception/bytetrack/lib/src/utils.cpp +++ b/perception/bytetrack/lib/src/utils.cpp @@ -190,7 +190,6 @@ std::vector> ByteTracker::ious( // bbox_ious for (size_t k = 0; k < btlbrs.size(); k++) { - std::vector ious_tmp; float box_area = (btlbrs[k][2] - btlbrs[k][0] + 1) * (btlbrs[k][3] - btlbrs[k][1] + 1); for (size_t n = 0; n < atlbrs.size(); n++) { float iw = std::min(atlbrs[n][2], btlbrs[k][2]) - std::max(atlbrs[n][0], btlbrs[k][0]) + 1; @@ -277,8 +276,6 @@ double ByteTracker::lapjv( std::vector> cost_c; cost_c.assign(cost.begin(), cost.end()); - std::vector> cost_c_extended; - int n_rows = cost.size(); int n_cols = cost[0].size(); rowsol.resize(n_rows); @@ -296,6 +293,8 @@ double ByteTracker::lapjv( } if (extend_cost || cost_limit < LONG_MAX) { + std::vector> cost_c_extended; + n = n_rows + n_cols; cost_c_extended.resize(n); for (size_t i = 0; i < cost_c_extended.size(); i++) cost_c_extended[i].resize(n); diff --git a/perception/bytetrack/src/bytetrack_visualizer_node.cpp b/perception/bytetrack/src/bytetrack_visualizer_node.cpp index ce350b8c288da..cdb767568a510 100644 --- a/perception/bytetrack/src/bytetrack_visualizer_node.cpp +++ b/perception/bytetrack/src/bytetrack_visualizer_node.cpp @@ -128,14 +128,14 @@ void ByteTrackVisualizerNode::callback( } std::vector bboxes; - for (auto & feat_obj : rect_msg->feature_objects) { + for (const auto & feat_obj : rect_msg->feature_objects) { auto roi_msg = feat_obj.feature.roi; cv::Rect rect(roi_msg.x_offset, roi_msg.y_offset, roi_msg.width, roi_msg.height); bboxes.push_back(rect); } std::vector uuids; - for (auto & dynamic_obj : uuid_msg->objects) { + for (const auto & dynamic_obj : uuid_msg->objects) { auto uuid_msg = dynamic_obj.id.uuid; boost::uuids::uuid uuid_raw; std::copy(uuid_msg.begin(), uuid_msg.end(), uuid_raw.begin()); From 22aaa47d6c5109c48b6241f1e6978c32b15bea4b Mon Sep 17 00:00:00 2001 From: Kotaro Yoshimoto Date: Mon, 24 Jun 2024 15:17:59 +0900 Subject: [PATCH 23/29] chore(typo): temoporary => temporary (#7645) Signed-off-by: Kotaro Yoshimoto --- sensing/gnss_poser/src/gnss_poser_core.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sensing/gnss_poser/src/gnss_poser_core.cpp b/sensing/gnss_poser/src/gnss_poser_core.cpp index 049063a72cd5c..7a3b40336ff3d 100644 --- a/sensing/gnss_poser/src/gnss_poser_core.cpp +++ b/sensing/gnss_poser/src/gnss_poser_core.cpp @@ -61,7 +61,7 @@ GNSSPoser::GNSSPoser(const rclcpp::NodeOptions & node_options) "gnss_pose_cov", rclcpp::QoS{1}); fixed_pub_ = create_publisher("gnss_fixed", rclcpp::QoS{1}); - // Set msg_gnss_ins_orientation_stamped_ with temoporary values (not to publish zero value + // Set msg_gnss_ins_orientation_stamped_ with temporary values (not to publish zero value // covariances) msg_gnss_ins_orientation_stamped_->orientation.rmse_rotation_x = 1.0; msg_gnss_ins_orientation_stamped_->orientation.rmse_rotation_y = 1.0; From d46a059902e7d3b8de26cd2c4307902395dc3c28 Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Mon, 24 Jun 2024 15:34:22 +0900 Subject: [PATCH 24/29] fix(autoware_obstacle_cruise_planner): fix knownConditionTrueFalse warnings (#7620) * fix(autoware_obstacle_cruise_planner): fix knownConditionTrueFalse warnings Signed-off-by: Ryuta Kambe * fix Signed-off-by: Ryuta Kambe --------- Signed-off-by: Ryuta Kambe --- .../optimization_based_planner.cpp | 15 ++++++++++----- .../src/planner_interface.cpp | 2 +- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/planning/autoware_obstacle_cruise_planner/src/optimization_based_planner/optimization_based_planner.cpp b/planning/autoware_obstacle_cruise_planner/src/optimization_based_planner/optimization_based_planner.cpp index 0e9a27622fbd9..b3a0fc014c410 100644 --- a/planning/autoware_obstacle_cruise_planner/src/optimization_based_planner/optimization_based_planner.cpp +++ b/planning/autoware_obstacle_cruise_planner/src/optimization_based_planner/optimization_based_planner.cpp @@ -339,16 +339,21 @@ std::tuple OptimizationBasedPlanner::calcInitialMotion( const auto stop_dist = autoware::motion_utils::calcDistanceToForwardStopPoint( input_traj_points, ego_pose, ego_nearest_param_.dist_threshold, ego_nearest_param_.yaw_threshold); - if ((stop_dist && *stop_dist > stop_dist_to_prohibit_engage_) || !stop_dist) { - initial_vel = engage_velocity_; - initial_acc = engage_acceleration_; + if (!stop_dist.has_value()) { + RCLCPP_DEBUG( + rclcpp::get_logger("ObstacleCruisePlanner::OptimizationBasedPlanner"), + "calcInitialMotion : vehicle speed is low (%.3f), and desired speed is high (%.3f). Use " + "engage speed (%.3f) until vehicle speed reaches engage_vel_thr (%.3f)", + vehicle_speed, target_vel, engage_velocity_, engage_vel_thr); + return std::make_tuple(engage_velocity_, engage_acceleration_); + } else if (stop_dist.value() > stop_dist_to_prohibit_engage_) { RCLCPP_DEBUG( rclcpp::get_logger("ObstacleCruisePlanner::OptimizationBasedPlanner"), "calcInitialMotion : vehicle speed is low (%.3f), and desired speed is high (%.3f). Use " "engage speed (%.3f) until vehicle speed reaches engage_vel_thr (%.3f). stop_dist = %.3f", vehicle_speed, target_vel, engage_velocity_, engage_vel_thr, stop_dist.value()); - return std::make_tuple(initial_vel, initial_acc); - } else if (stop_dist) { + return std::make_tuple(engage_velocity_, engage_acceleration_); + } else { RCLCPP_DEBUG( rclcpp::get_logger("ObstacleCruisePlanner::OptimizationBasedPlanner"), "calcInitialMotion : stop point is close (%.3f[m]). no engage.", stop_dist.value()); diff --git a/planning/autoware_obstacle_cruise_planner/src/planner_interface.cpp b/planning/autoware_obstacle_cruise_planner/src/planner_interface.cpp index 18e0fc7f5bb03..a3fe0948782a1 100644 --- a/planning/autoware_obstacle_cruise_planner/src/planner_interface.cpp +++ b/planning/autoware_obstacle_cruise_planner/src/planner_interface.cpp @@ -637,7 +637,7 @@ std::vector PlannerInterface::generateSlowDownTrajectory( slow_down_debug_multi_array_.data.push_back(feasible_slow_down_vel); slow_down_debug_multi_array_.data.push_back(stable_slow_down_vel); slow_down_debug_multi_array_.data.push_back(slow_down_start_idx ? *slow_down_start_idx : -1.0); - slow_down_debug_multi_array_.data.push_back(slow_down_end_idx ? *slow_down_end_idx : -1.0); + slow_down_debug_multi_array_.data.push_back(*slow_down_end_idx); // add virtual wall if (slow_down_start_idx && slow_down_end_idx) { From cfdd868cbbd5c67632224272459176b39fe5b902 Mon Sep 17 00:00:00 2001 From: Ryuta Kambe Date: Mon, 24 Jun 2024 16:18:59 +0900 Subject: [PATCH 25/29] chore(ci): update cppcheck-daily (#7648) Signed-off-by: veqcc --- .github/workflows/cppcheck-daily.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cppcheck-daily.yaml b/.github/workflows/cppcheck-daily.yaml index df845b2154929..03ea75105b360 100644 --- a/.github/workflows/cppcheck-daily.yaml +++ b/.github/workflows/cppcheck-daily.yaml @@ -22,7 +22,7 @@ jobs: continue-on-error: true id: cppcheck run: | - cppcheck --enable=all --inconclusive --check-level=exhaustive --error-exitcode=1 --xml . 2> cppcheck-report.xml + cppcheck --enable=all --inconclusive --check-level=exhaustive --suppress=*:*/test/* --error-exitcode=1 --xml . 2> cppcheck-report.xml shell: bash - name: Count errors by error ID and severity From 953d9293f0e4e857b11274126fe75eb845820e01 Mon Sep 17 00:00:00 2001 From: Kotaro Yoshimoto Date: Mon, 24 Jun 2024 16:19:49 +0900 Subject: [PATCH 26/29] chore(ci): change way to modified files in clang-tidy-differential (#7639) * chore(ci): fetch full depth in clang-tidy-differential Signed-off-by: Kotaro Yoshimoto * chore(ci): use raw git to get modified files Signed-off-by: Kotaro Yoshimoto * chore(ci): fix deprecated GitHub actions variable operation Signed-off-by: Kotaro Yoshimoto * chore(ci): delete unnecessary steps Signed-off-by: Kotaro Yoshimoto * refactor(ci): unite multiple command line into one line Signed-off-by: Kotaro Yoshimoto --------- Signed-off-by: Kotaro Yoshimoto --- .github/workflows/clang-tidy-differential.yaml | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/.github/workflows/clang-tidy-differential.yaml b/.github/workflows/clang-tidy-differential.yaml index a59287a6d9b37..e2e4fc478e0ee 100644 --- a/.github/workflows/clang-tidy-differential.yaml +++ b/.github/workflows/clang-tidy-differential.yaml @@ -20,14 +20,10 @@ jobs: runs-on: ubuntu-latest container: ghcr.io/autowarefoundation/autoware:latest-prebuilt-cuda steps: - - name: Set PR fetch depth - run: echo "PR_FETCH_DEPTH=$(( ${{ github.event.pull_request.commits }} + 1 ))" >> "${GITHUB_ENV}" - - name: Checkout PR branch and all PR commits uses: actions/checkout@v4 with: - ref: ${{ github.event.pull_request.head.sha }} - fetch-depth: ${{ env.PR_FETCH_DEPTH }} + fetch-depth: 0 - name: Show disk space before the tasks run: df -h @@ -41,19 +37,17 @@ jobs: - name: Get modified files id: get-modified-files - uses: tj-actions/changed-files@v42 - with: - files: | - **/*.cpp - **/*.hpp + run: | + echo "changed_files=$(git diff --name-only "origin/${{ github.base_ref }}"...HEAD | grep -E '\.(cpp|hpp)$' || true)" >> $GITHUB_OUTPUT + shell: bash - name: Run clang-tidy - if: ${{ steps.get-modified-files.outputs.all_changed_files != '' }} + if: ${{ steps.get-modified-files.outputs.changed_files != '' }} uses: autowarefoundation/autoware-github-actions/clang-tidy@v1 with: rosdistro: humble target-packages: ${{ steps.get-modified-packages.outputs.modified-packages }} - target-files: ${{ steps.get-modified-files.outputs.all_changed_files }} + target-files: ${{ steps.get-modified-files.outputs.changed_files }} clang-tidy-config-url: https://raw.githubusercontent.com/autowarefoundation/autoware/main/.clang-tidy build-depends-repos: build_depends.repos From f15e5d6539931345d8120723220aec796311a832 Mon Sep 17 00:00:00 2001 From: Kotaro Yoshimoto Date: Mon, 24 Jun 2024 16:22:48 +0900 Subject: [PATCH 27/29] chore(cspell): ignore some words in .cppcheck_suppressions (#7647) Signed-off-by: Kotaro Yoshimoto --- .cppcheck_suppressions | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.cppcheck_suppressions b/.cppcheck_suppressions index 6f64bd278f8a4..f8ab02136d38c 100644 --- a/.cppcheck_suppressions +++ b/.cppcheck_suppressions @@ -6,6 +6,7 @@ constParameterReference constVariable constVariableReference containerOutOfBounds +// cspell: ignore cstyle cstyleCast duplicateBranch duplicateBreak @@ -27,6 +28,7 @@ shadowArgument shadowFunction shadowVariable syntaxError +// cspell: ignore uninit uninitMemberVar unknownMacro unmatchedSuppression From d563f1dd813591d66083a65505e6ca8ab093818d Mon Sep 17 00:00:00 2001 From: Koichi98 <45482193+Koichi98@users.noreply.github.com> Date: Mon, 24 Jun 2024 17:54:03 +0900 Subject: [PATCH 28/29] fix(autoware_obstacle_cruise_planner): fix shadowVariable warning (#7656) * fix(autoware_obstacle_cruise_planner): fix shadowVariable warning Signed-off-by: Koichi Imai * fix Signed-off-by: Koichi Imai * style(pre-commit): autofix --------- Signed-off-by: Koichi Imai Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .../src/pid_based_planner/pid_based_planner.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/planning/autoware_obstacle_cruise_planner/src/pid_based_planner/pid_based_planner.cpp b/planning/autoware_obstacle_cruise_planner/src/pid_based_planner/pid_based_planner.cpp index 345265b1d20b0..d0736c6158a7a 100644 --- a/planning/autoware_obstacle_cruise_planner/src/pid_based_planner/pid_based_planner.cpp +++ b/planning/autoware_obstacle_cruise_planner/src/pid_based_planner/pid_based_planner.cpp @@ -509,7 +509,6 @@ std::vector PIDBasedPlanner::getAccelerationLimitedTrajectory( const double v0, const double a0, const double target_acc, const double target_jerk_ratio) const { // calculate vt function - const auto & i = longitudinal_info_; const auto vt = [&]( const double v0, const double a0, const double jerk, const double t, const double target_acc) { @@ -541,8 +540,8 @@ std::vector PIDBasedPlanner::getAccelerationLimitedTrajectory( // Therefore, we calculate v1 (velocity at next step) and use it for initial velocity. const double v1 = v0; // + 0.1; // a0 * 0.1; // + jerk * 0.1 * 0.1 / 2.0; const double a1 = a0; // + jerk * 0.1; - const double jerk = - target_acc > a1 ? i.max_jerk * target_jerk_ratio : i.min_jerk * target_jerk_ratio; + const double jerk = target_acc > a1 ? longitudinal_info_.max_jerk * target_jerk_ratio + : longitudinal_info_.min_jerk * target_jerk_ratio; double t_current = 0.0; std::vector s_vec{0.0}; From 2a3eaee333e6437743b9b9a352e91d51455f07c4 Mon Sep 17 00:00:00 2001 From: Koichi98 <45482193+Koichi98@users.noreply.github.com> Date: Mon, 24 Jun 2024 18:37:23 +0900 Subject: [PATCH 29/29] fix(autoware_obstacle_cruise_planner): fix shadowVariable warning in generateSlowDownTrajectory (#7659) Signed-off-by: Koichi Imai --- .../src/planner_interface.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/planning/autoware_obstacle_cruise_planner/src/planner_interface.cpp b/planning/autoware_obstacle_cruise_planner/src/planner_interface.cpp index a3fe0948782a1..516bf5579b82c 100644 --- a/planning/autoware_obstacle_cruise_planner/src/planner_interface.cpp +++ b/planning/autoware_obstacle_cruise_planner/src/planner_interface.cpp @@ -623,9 +623,9 @@ std::vector PlannerInterface::generateSlowDownTrajectory( }(); // insert slow down velocity between slow start and end - for (size_t i = (slow_down_start_idx ? *slow_down_start_idx : 0); i <= *slow_down_end_idx; - ++i) { - auto & traj_point = slow_down_traj_points.at(i); + for (size_t j = (slow_down_start_idx ? *slow_down_start_idx : 0); j <= *slow_down_end_idx; + ++j) { + auto & traj_point = slow_down_traj_points.at(j); traj_point.longitudinal_velocity_mps = std::min(traj_point.longitudinal_velocity_mps, static_cast(stable_slow_down_vel)); }