Skip to content

Commit

Permalink
Merge pull request koide3#214 from TirelessDev/map-loading
Browse files Browse the repository at this point in the history
Map loading and Transform/Odom topic refactoring
  • Loading branch information
koide3 authored Feb 28, 2023
2 parents 1e7a70b + 8f6f68a commit 6247b2b
Show file tree
Hide file tree
Showing 8 changed files with 182 additions and 23 deletions.
1 change: 1 addition & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ add_message_files(FILES

add_service_files(FILES
SaveMap.srv
LoadGraph.srv
DumpGraph.srv
)

Expand Down
131 changes: 128 additions & 3 deletions apps/hdl_graph_slam_nodelet.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
#include <hdl_graph_slam/FloorCoeffs.h>

#include <hdl_graph_slam/SaveMap.h>
#include <hdl_graph_slam/LoadGraph.h>
#include <hdl_graph_slam/DumpGraph.h>

#include <nodelet/nodelet.h>
Expand Down Expand Up @@ -75,6 +76,7 @@ class HdlGraphSlamNodelet : public nodelet::Nodelet {
private_nh = getPrivateNodeHandle();

// init parameters
published_odom_topic = private_nh.param<std::string>("published_odom_topic", "/odom");
map_frame_id = private_nh.param<std::string>("map_frame_id", "map");
odom_frame_id = private_nh.param<std::string>("odom_frame_id", "odom");
map_cloud_resolution = private_nh.param<double>("map_cloud_resolution", 0.05);
Expand Down Expand Up @@ -107,7 +109,7 @@ class HdlGraphSlamNodelet : public nodelet::Nodelet {
points_topic = private_nh.param<std::string>("points_topic", "/velodyne_points");

// subscribers
odom_sub.reset(new message_filters::Subscriber<nav_msgs::Odometry>(mt_nh, "/odom", 256));
odom_sub.reset(new message_filters::Subscriber<nav_msgs::Odometry>(mt_nh, published_odom_topic, 256));
cloud_sub.reset(new message_filters::Subscriber<sensor_msgs::PointCloud2>(mt_nh, "/filtered_points", 32));
sync.reset(new message_filters::Synchronizer<ApproxSyncPolicy>(ApproxSyncPolicy(32), *odom_sub, *cloud_sub));
sync->registerCallback(boost::bind(&HdlGraphSlamNodelet::cloud_callback, this, _1, _2));
Expand All @@ -126,6 +128,8 @@ class HdlGraphSlamNodelet : public nodelet::Nodelet {
map_points_pub = mt_nh.advertise<sensor_msgs::PointCloud2>("/hdl_graph_slam/map_points", 1, true);
read_until_pub = mt_nh.advertise<std_msgs::Header>("/hdl_graph_slam/read_until", 32);


load_service_server = mt_nh.advertiseService("/hdl_graph_slam/load", &HdlGraphSlamNodelet::load_service, this);
dump_service_server = mt_nh.advertiseService("/hdl_graph_slam/dump", &HdlGraphSlamNodelet::dump_service, this);
save_map_service_server = mt_nh.advertiseService("/hdl_graph_slam/save_map", &HdlGraphSlamNodelet::save_map_service, this);

Expand Down Expand Up @@ -188,6 +192,7 @@ class HdlGraphSlamNodelet : public nodelet::Nodelet {
Eigen::Isometry3d odom2map(trans_odom2map.cast<double>());
trans_odom2map_mutex.unlock();

std::cout << "flush_keyframe_queue - keyframes len:"<< keyframes.size() << std::endl;
int num_processed = 0;
for(int i = 0; i < std::min<int>(keyframe_queue.size(), max_keyframes_per_update); i++) {
num_processed = i;
Expand All @@ -203,6 +208,7 @@ class HdlGraphSlamNodelet : public nodelet::Nodelet {

// fix the first node
if(keyframes.empty() && new_keyframes.size() == 1) {

if(private_nh.param<bool>("fix_first_node", false)) {
Eigen::MatrixXd inf = Eigen::MatrixXd::Identity(6, 6);
std::stringstream sst(private_nh.param<std::string>("fix_first_node_stddev", "1 1 1 1 1 1"));
Expand Down Expand Up @@ -802,6 +808,121 @@ class HdlGraphSlamNodelet : public nodelet::Nodelet {
return markers;
}


/**
* @brief load all data from a directory
* @param req
* @param res
* @return
*/
bool load_service(hdl_graph_slam::LoadGraphRequest& req, hdl_graph_slam::LoadGraphResponse& res) {
std::lock_guard<std::mutex> lock(main_thread_mutex);

std::string directory = req.path;

std::cout << "loading data from:" << directory << std::endl;

// Load graph.
graph_slam->load(directory + "/graph.g2o");

// Iterate over the items in this directory and count how many sub directories there are.
// This will give an upper limit on how many keyframe indexes we can expect to find.
boost::filesystem::directory_iterator begin(directory), end;
int max_directory_count = std::count_if(begin, end,
[](const boost::filesystem::directory_entry & d) {
return boost::filesystem::is_directory(d.path()); // only return true if a direcotry
});

// Load keyframes by looping through key frame indexes that we expect to see.
for(int i = 0; i < max_directory_count; i++) {
std::stringstream sst;
sst << boost::format("%s/%06d") % directory % i;
std::string key_frame_directory = sst.str();

// If key_frame_directory doesnt exist, then we have run out so lets stop looking.
if(!boost::filesystem::is_directory(key_frame_directory)) {
break;
}

KeyFrame::Ptr keyframe(new KeyFrame(key_frame_directory, graph_slam->graph.get()));
keyframes.push_back(keyframe);
}
std::cout << "loaded " << keyframes.size() << " keyframes" <<std::endl;

// Load special nodes.
std::ifstream ifs(directory + "/special_nodes.csv");
if(!ifs) {
return false;
}
while(!ifs.eof()) {
std::string token;
ifs >> token;
if(token == "anchor_node") {
int id = 0;
ifs >> id;
anchor_node = static_cast<g2o::VertexSE3*>(graph_slam->graph->vertex(id));
} else if(token == "anchor_edge") {
int id = 0;
ifs >> id;
// We have no way of directly pulling the edge based on the edge ID that we have just read in.
// Fortunatly anchor edges are always attached to the anchor node so we can iterate over
// the edges that listed against the node untill we find the one that matches our ID.
if(anchor_node){
auto edges = anchor_node->edges();

for(auto e : edges) {
int edgeID = e->id();
if (edgeID == id){
anchor_edge = static_cast<g2o::EdgeSE3*>(e);

break;
}
}
}
} else if(token == "floor_node") {
int id = 0;
ifs >> id;
floor_plane_node = static_cast<g2o::VertexPlane*>(graph_slam->graph->vertex(id));
}
}

// check if we have any non null special nodes, if all are null then lets not bother.
if(anchor_node->id() || anchor_edge->id() || floor_plane_node->id()) {
std::cout << "loaded special nodes - ";

// check exists before printing information about each special node
if(anchor_node->id()) {
std::cout << " anchor_node: " << anchor_node->id();
}
if(anchor_edge->id()) {
std::cout << " anchor_edge: " << anchor_edge->id();
}
if(floor_plane_node->id()) {
std::cout << " floor_node: " << floor_plane_node->id();
}

// finish with a new line
std::cout << std::endl
}

// Update our keyframe snapshot so we can publish a map update, trigger update with graph_updated = true.
std::vector<KeyFrameSnapshot::Ptr> snapshot(keyframes.size());

std::transform(keyframes.begin(), keyframes.end(), snapshot.begin(), [=](const KeyFrame::Ptr& k) { return std::make_shared<KeyFrameSnapshot>(k); });

keyframes_snapshot_mutex.lock();
keyframes_snapshot.swap(snapshot);
keyframes_snapshot_mutex.unlock();
graph_updated = true;

res.success = true;

std::cout << "snapshot updated" << std::endl << "loading successful" <<std::endl;

return true;
}


/**
* @brief dump all data to the current directory
* @param req
Expand All @@ -826,9 +947,11 @@ class HdlGraphSlamNodelet : public nodelet::Nodelet {
boost::filesystem::create_directory(directory);
}

std::cout << "all data dumped to:" << directory << std::endl;

std::cout << "dumping data to:" << directory << std::endl;
// save graph
graph_slam->save(directory + "/graph.g2o");

// save keyframes
for(int i = 0; i < keyframes.size(); i++) {
std::stringstream sst;
sst << boost::format("%s/%06d") % directory % i;
Expand Down Expand Up @@ -910,6 +1033,7 @@ class HdlGraphSlamNodelet : public nodelet::Nodelet {

ros::Publisher markers_pub;

std::string published_odom_topic;
std::string map_frame_id;
std::string odom_frame_id;

Expand All @@ -923,6 +1047,7 @@ class HdlGraphSlamNodelet : public nodelet::Nodelet {

tf::TransformListener tf_listener;

ros::ServiceServer load_service_server;
ros::ServiceServer dump_service_server;
ros::ServiceServer save_map_service_server;

Expand Down
4 changes: 3 additions & 1 deletion apps/scan_matching_odometry_nodelet.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ class ScanMatchingOdometryNodelet : public nodelet::Nodelet {

points_sub = nh.subscribe("/filtered_points", 256, &ScanMatchingOdometryNodelet::cloud_callback, this);
read_until_pub = nh.advertise<std_msgs::Header>("/scan_matching_odometry/read_until", 32);
odom_pub = nh.advertise<nav_msgs::Odometry>("/odom", 32);
odom_pub = nh.advertise<nav_msgs::Odometry>(published_odom_topic, 32);
trans_pub = nh.advertise<geometry_msgs::TransformStamped>("/scan_matching_odometry/transform", 32);
status_pub = private_nh.advertise<ScanMatchingStatus>("/scan_matching_odometry/status", 8);
aligned_points_pub = nh.advertise<sensor_msgs::PointCloud2>("/aligned_points", 32);
Expand All @@ -64,6 +64,7 @@ class ScanMatchingOdometryNodelet : public nodelet::Nodelet {
*/
void initialize_params() {
auto& pnh = private_nh;
published_odom_topic = private_nh.param<std::string>("published_odom_topic", "/odom");
points_topic = pnh.param<std::string>("points_topic", "/velodyne_points");
odom_frame_id = pnh.param<std::string>("odom_frame_id", "odom");
robot_odom_frame_id = pnh.param<std::string>("robot_odom_frame_id", "robot_odom");
Expand Down Expand Up @@ -350,6 +351,7 @@ class ScanMatchingOdometryNodelet : public nodelet::Nodelet {
tf::TransformBroadcaster odom_broadcaster;
tf::TransformBroadcaster keyframe_broadcaster;

std::string published_odom_topic;
std::string points_topic;
std::string odom_frame_id;
std::string robot_odom_frame_id;
Expand Down
40 changes: 30 additions & 10 deletions launch/hdl_graph_slam.launch
Original file line number Diff line number Diff line change
Expand Up @@ -7,34 +7,46 @@
<arg name="enable_imu_acc" default="false" />
<arg name="enable_imu_ori" default="false" />

<!-- hdl_graph_slam will publish its estimated pose from odom to base_link within the following structure -->
<!-- ***** map -> odom -> base_link -> velodyne ***** -->
<!-- these can be named in the following -->
<!-- topic for hdl to find velodyne points -->
<arg name="points_topic" default="/velodyne_points" />
<!-- what should the map frame be called -->
<arg name="map_frame_id" default="map" />
<!-- what should the base link frame be called -->
<arg name="base_link_frame_id" default="base_link" />
<!-- what should the odom frame be called -->
<arg name="lidar_odom_frame_id" default="odom" />

<!-- hdl_graph_slam will also publish an odom message -->
<!-- what should the topic be for odom messages published by hdl-graph-slam -->
<arg name="published_odom_topic" default="/odom" />

<!-- optional arguments -->
<arg name="enable_robot_odometry_init_guess" default="false" />
<arg name="robot_odom_frame_id" default="robot_odom" />

<!-- transformation between lidar and base_link -->
<node pkg="tf" type="static_transform_publisher" name="lidar2base_publisher" args="0 0 0 0 0 0 base_link velodyne 10" />
<node pkg="tf" type="static_transform_publisher" name="lidar2base_publisher" args="0 0 0 0 0 0 $(arg base_link_frame_id) velodyne 10" />

<!-- in case you use velodyne_driver, comment out the following line -->
<node pkg="nodelet" type="nodelet" name="$(arg nodelet_manager)" args="manager" output="screen"/>
<node pkg="nodelet" type="nodelet" name="$(arg nodelet_manager)" args="manager" output="screen"/>

<!-- prefiltering_nodelet -->
<node pkg="nodelet" type="nodelet" name="prefiltering_nodelet" args="load hdl_graph_slam/PrefilteringNodelet $(arg nodelet_manager)">
<remap from="/velodyne_points" to="$(arg points_topic)" />
<!-- in case base_link_frame is blank, mapping will be performed in the lidar frame -->
<param name="base_link_frame" value="base_link" />
<param name="base_link_frame" value="$(arg base_link_frame_id)" />
<!-- distance filter -->
<param name="use_distance_filter" value="true" />
<param name="use_distance_filter" value="false" />
<param name="distance_near_thresh" value="0.1" />
<param name="distance_far_thresh" value="100.0" />
<param name="distance_far_thresh" value="1000.0" />
<!-- NONE, VOXELGRID, or APPROX_VOXELGRID -->
<param name="downsample_method" value="VOXELGRID" />
<param name="downsample_resolution" value="0.1" />
<!-- NONE, RADIUS, or STATISTICAL -->
<param name="outlier_removal_method" value="RADIUS" />
<param name="outlier_removal_method" value="NONE" />
<param name="statistical_mean_k" value="30" />
<param name="statistical_stddev" value="1.2" />
<param name="radius_radius" value="0.5" />
Expand All @@ -44,6 +56,8 @@
<!-- scan_matching_odometry_nodelet -->
<node pkg="nodelet" type="nodelet" name="scan_matching_odometry_nodelet" args="load hdl_graph_slam/ScanMatchingOdometryNodelet $(arg nodelet_manager)">
<param name="points_topic" value="$(arg points_topic)" />
<!-- published_odom_topic -->
<param name="published_odom_topic" value="$(arg published_odom_topic)" />
<param name="odom_frame_id" value="$(arg lidar_odom_frame_id)" />
<param name="robot_odom_frame_id" value="$(arg robot_odom_frame_id)" />
<param name="keyframe_delta_trans" value="1.0" />
Expand Down Expand Up @@ -82,6 +96,8 @@
<!-- hdl_graph_slam_nodelet -->
<node pkg="nodelet" type="nodelet" name="hdl_graph_slam_nodelet" args="load hdl_graph_slam/HdlGraphSlamNodelet $(arg nodelet_manager)">
<param name="points_topic" value="$(arg points_topic)" />
<!-- published_odom_topic -->
<param name="published_odom_topic" value="$(arg published_odom_topic)" />
<!-- frame settings -->
<param name="map_frame_id" value="$(arg map_frame_id)" />
<param name="odom_frame_id" value="$(arg lidar_odom_frame_id)" />
Expand All @@ -102,13 +118,14 @@
<param name="fix_first_node_stddev" value="10 10 10 1 1 1"/>
<param name="fix_first_node_adaptive" value="true"/>
<!-- loop closure params -->
<param name="distance_thresh" value="10.0" />
<param name="accum_distance_thresh" value="15.0" />
<param name="distance_thresh" value="20.0" />
<param name="accum_distance_thresh" value="35.0" />
<param name="min_edge_interval" value="5.0" />
<param name="fitness_score_thresh" value="0.5" />

<!-- scan matching params -->
<param name="registration_method" value="FAST_GICP" />
<param name="reg_num_threads" value="0" />
<param name="reg_num_threads" value="4" />
<param name="reg_transformation_epsilon" value="0.01"/>
<param name="reg_maximum_iterations" value="64"/>
<param name="reg_max_correspondence_distance" value="2.5"/>
Expand Down Expand Up @@ -152,5 +169,8 @@
<param name="map_cloud_resolution" value="0.05" />
</node>

<node pkg="hdl_graph_slam" type="map2odom_publisher.py" name="map2odom_publisher" />
<node pkg="hdl_graph_slam" type="map2odom_publisher.py" name="map2odom_publisher" >
<param name="map_frame_id" value="$(arg map_frame_id)" />
<param name="odom_frame_id" value="$(arg lidar_odom_frame_id)" />
</node>
</launch>
4 changes: 2 additions & 2 deletions launch/hdl_graph_slam_imu.launch
Original file line number Diff line number Diff line change
Expand Up @@ -66,13 +66,13 @@
<param name="downsample_method" value="NONE" />
<param name="downsample_resolution" value="0.1" />
<!-- ICP, GICP, NDT, GICP_OMP, or NDT_OMP(recommended) -->
<param name="registration_method" value="GICP" />
<param name="registration_method" value="NDT_OMP" />
<param name="transformation_epsilon" value="0.01"/>
<param name="maximum_iterations" value="64"/>
<param name="use_reciprocal_correspondences" value="false"/>
<param name="gicp_correspondence_randomness" value="20"/>
<param name="gicp_max_optimizer_iterations" value="20"/>
<param name="ndt_resolution" value="1.0" />
<param name="ndt_resolution" value="10.0" />
<param name="ndt_num_threads" value="0" />
<param name="ndt_nn_search_method" value="DIRECT7" />
</node>
Expand Down
8 changes: 4 additions & 4 deletions launch/hdl_graph_slam_kitti.launch
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
<!-- arguments -->
<arg name="nodelet_manager" default="velodyne_nodelet_manager" />
<arg name="enable_floor_detection" default="true" />
<arg name="enable_gps" default="false" />
<arg name="enable_gps" default="true" />
<arg name="enable_imu_acc" default="false" />
<arg name="enable_imu_ori" default="false" />
<arg name="points_topic" default="/velodyne_points" />
Expand Down Expand Up @@ -66,7 +66,7 @@
<param name="sensor_height" value="3.0" />
<param name="height_clip_range" value="1.0" />
<param name="floor_pts_thresh" value="512" />
<param name="use_normal_filtering" value="true" />
<param name="use_normal_filtering" value="false" />
<param name="normal_filter_thresh" value="20.0" />
</node>

Expand Down Expand Up @@ -138,8 +138,8 @@
<param name="min_stddev_q" value="0.05" />
<param name="max_stddev_q" value="0.2" />
<!-- update params -->
<param name="graph_update_interval" value="3.0" />
<param name="map_cloud_update_interval" value="10.0" />
<param name="graph_update_interval" value="1.5" />
<param name="map_cloud_update_interval" value="5.0" />
<param name="map_cloud_resolution" value="0.05" />
</node>

Expand Down
Loading

0 comments on commit 6247b2b

Please sign in to comment.