// SPDX-License-Identifier: BSD-2-Clause #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "hdl_graph_slam/ScanMatchingStatus.h" #include "sensor_msgs/LaserScan.h" namespace hdl_graph_slam { class ScanMatchingOdometryNodelet : public nodelet::Nodelet { public: typedef pcl::PointXYZI PointT; EIGEN_MAKE_ALIGNED_OPERATOR_NEW ScanMatchingOdometryNodelet() {} virtual ~ScanMatchingOdometryNodelet() {} virtual void onInit() { // 初始化函数 NODELET_DEBUG("initializing scan_matching_odometry_nodelet..."); nh = getNodeHandle(); // 节点句柄 private_nh = getPrivateNodeHandle(); // 私有节点句柄 initialize_params(); // 参数初始化函数i if(private_nh.param("enable_imu_frontend", false)) { // .launch中是true,考虑imu的信息,默认是false msf_pose_sub = nh.subscribe("/msf_core/pose", 1, boost::bind(&ScanMatchingOdometryNodelet::msf_pose_callback, this, _1, false)); // 这个是实时发布的? msf_pose_after_update_sub = nh.subscribe("/msf_core/pose_after_update", 1, boost::bind(&ScanMatchingOdometryNodelet::msf_pose_callback, this, _1, true)); // 这个是某个关键帧更新后发布的? } points_sub = nh.subscribe("/filtered_points", 256, &ScanMatchingOdometryNodelet::cloud_callback, this); // 接收过滤后的点云数据 // points_sub = nh.subscribe("/scan", 256, &ScanMatchingOdometryNodelet::cloud_callback, this); // 接收过滤后的点云数据 read_until_pub = nh.advertise("/scan_matching_odometry/read_until", 32); // odom_pub = nh.advertise(published_odom_topic, 32); // 发布机器人的里程计信息 trans_pub = nh.advertise("/scan_matching_odometry/transform", 32); // status_pub = private_nh.advertise("/scan_matching_odometry/status", 8); // aligned_points_pub = nh.advertise("/aligned_points", 32); // 对齐后的点云 } // 只发布三个话题:处理后的激光数据 前后帧合并的激光数据 odom // 定义一个debug 宏(定义宏),如果是ture,则发布消息,否则不发布 // #define DEBUG 0 // if(DEBUG){ // 雷达厂家会给一个程序,会用一个topic发数据出来 // } private: /** * @brief initialize parameters */ void initialize_params() { auto& pnh = private_nh; published_odom_topic = private_nh.param("published_odom_topic", "/odom"); points_topic = pnh.param("points_topic", "/velodyne_points"); odom_frame_id = pnh.param("odom_frame_id", "odom"); robot_odom_frame_id = pnh.param("robot_odom_frame_id", "robot_odom"); // The minimum tranlational distance and rotation angle between keyframes. // If this value is zero, frames are always compared with the previous frame keyframe_delta_trans = pnh.param("keyframe_delta_trans", 0.25); // 三个参数有一个满足,则更新关键帧 keyframe keyframe_delta_angle = pnh.param("keyframe_delta_angle", 0.15); keyframe_delta_time = pnh.param("keyframe_delta_time", 1.0); // Registration validation by thresholding transform_thresholding = pnh.param("transform_thresholding", false); // .launch 中也是给false max_acceptable_trans = pnh.param("max_acceptable_trans", 1.0); max_acceptable_angle = pnh.param("max_acceptable_angle", 1.0); // select a downsample method (VOXELGRID, APPROX_VOXELGRID, NONE) std::string downsample_method = pnh.param("downsample_method", "VOXELGRID"); // launch 给的参数是 VOXELGRID double downsample_resolution = pnh.param("downsample_resolution", 0.1); // 体素的大小,决定每个提速立方体的边长 .launch 中给的参数是0.1 if(downsample_method == "VOXELGRID") { // 选用的是这个方法 std::cout << "downsample: VOXELGRID " << downsample_resolution << std::endl; auto voxelgrid = new pcl::VoxelGrid(); // 创建体素网格下采样对象 // 设置体素的长宽高。 体素的大小决定了网络的精细程度,体素越小,下采样后的点云越接近原始点云 voxelgrid->setLeafSize(downsample_resolution, downsample_resolution, downsample_resolution); downsample_filter.reset(voxelgrid); } else if(downsample_method == "APPROX_VOXELGRID") { std::cout << "downsample: APPROX_VOXELGRID " << downsample_resolution << std::endl; pcl::ApproximateVoxelGrid::Ptr approx_voxelgrid(new pcl::ApproximateVoxelGrid()); approx_voxelgrid->setLeafSize(downsample_resolution, downsample_resolution, downsample_resolution); downsample_filter = approx_voxelgrid; } else { if(downsample_method != "NONE") { std::cerr << "warning: unknown downsampling type (" << downsample_method << ")" << std::endl; std::cerr << " : use passthrough filter" << std::endl; } std::cout << "downsample: NONE" << std::endl; pcl::PassThrough::Ptr passthrough(new pcl::PassThrough()); downsample_filter = passthrough; } registration = select_registration_method(pnh); /* 三种下采样方法的基本原理: VOXELGRID: 基本原理: 体素网格(VoxelGrid)下采样是一种常见的点云下采样方法。它将三维空间划分为一个由体素(三维像素)组成的网格,并在每个体素内部选择一个代表性点(通常是中心点或最接近中心的点),从而减少点的数量。所有在同一个体素内的点都被这个代表性点替代。 特点: 这种方法简单有效,可以均匀地减少点数,但可能会丢失一些细节。 APPROX_VOXELGRID: 基本原理: 近似体素网格(ApproximateVoxelGrid)下采样与体素网格类似,但它在处理每个体素内的点时采用更简单的方法来选择代表性点,例如选择包含在体素内的最大点或平均位置点。这种方法的计算效率更高,但可能不如体素网格下采样精确。 特点: 计算速度比VOXELGRID快,但精度较低,适用于对处理速度要求较高的场景。 PASSTHROUGH: 基本原理: 通道(PassThrough)下采样不是通过空间划分来减少点数,而是根据点的某些属性(如位置或强度)来筛选点。例如,可以设置一个过滤条件,只保留在特定高度范围内的点,或者只保留某个方向上的点。 特点: 允许用户定义更灵活的筛选条件,但不会像体素网格方法那样均匀地减少点数。如果没有指定下采样方法,或者指定的方法不被识别,就会使用通道过滤器作为默认方法。 */ } /** * @brief callback for point clouds * @param cloud_msg point cloud msg */ void cloud_callback(const sensor_msgs::PointCloud2ConstPtr& cloud_msg) { // cloud_msg:滤波后的点云数据 if(!ros::ok()) { return; } pcl::PointCloud::Ptr cloud(new pcl::PointCloud()); // 创建一个 pcl::PointCloud 类型的指针 pcl::fromROSMsg(*cloud_msg, *cloud); // 将ROS消息格式的点云(*cloud_msg)转换为PCL(Point Cloud Library)格式的点云(*cloud) Eigen::Matrix4f pose = matching(cloud_msg->header.stamp, cloud); // 点云匹配函数,返回 publish_odometry(cloud_msg->header.stamp, cloud_msg->header.frame_id, pose); // 发布里程计数据 // In offline estimation, point clouds until the published time will be supplied std_msgs::HeaderPtr read_until(new std_msgs::Header()); read_until->frame_id = points_topic; read_until->stamp = cloud_msg->header.stamp + ros::Duration(1, 0); read_until_pub.publish(read_until); read_until->frame_id = "/filtered_points"; read_until_pub.publish(read_until); } void msf_pose_callback(const geometry_msgs::PoseWithCovarianceStampedConstPtr& pose_msg, bool after_update) { // 多状态 MSF 的回调函数,接受MSF里程计的位置信息,并根据是否为更新后的位置,存储在不同变量中 if(after_update) { msf_pose_after_update = pose_msg; } else { msf_pose = pose_msg; } } /** * @brief downsample a point cloud * @param cloud input cloud * @return downsampled point cloud */ pcl::PointCloud::ConstPtr downsample(const pcl::PointCloud::ConstPtr& cloud) const { // 对点云数据进行向下采样,减少点的数量以提高处理速度 if(!downsample_filter) { return cloud; } pcl::PointCloud::Ptr filtered(new pcl::PointCloud()); // 创建一个新的点云对象,用来存储下采样后的点云数据 downsample_filter->setInputCloud(cloud); downsample_filter->filter(*filtered); return filtered; } /** * @brief estimate the relative pose between an input cloud and a keyframe cloud * @param stamp the timestamp of the input cloud * @param cloud the input cloud * @return the relative pose between the input cloud and the keyframe cloud */ Eigen::Matrix4f matching(const ros::Time& stamp, const pcl::PointCloud::ConstPtr& cloud) { // 执行扫描匹配算法,估计输入点云和关键帧之间的相对位置 if(!keyframe) { // 判断 keyframe 是否为空指针,是的话说明还没有初始化关键真 prev_time = ros::Time(); // prev_trans.setIdentity(); // 设置为单位矩阵,表示:与上一次的位资没有发生变化 keyframe_pose.setIdentity(); // 关键帧的初始位资 keyframe_stamp = stamp; // 关键帧的时间戳 keyframe = downsample(cloud); // 对点云数据进行下采样,减少点的数量,提高处理效率和精度 registration->setInputTarget(keyframe); // 将 keyframe 设置成关键帧 return Eigen::Matrix4f::Identity(); // 没有之前的位资信息,假设与上次位资没有发生变化,返回单位矩阵 // registration 对象负责计算输入点云(cloud)与已有关键帧(keyframe)之间的相对变换。这种变换估算允许系统理解传感器(如激光雷达)在两次扫描之间的移动 } auto filtered = downsample(cloud); // 下采样 registration->setInputSource(filtered); // 把点云数据给到 registration std::string msf_source; // 记录初始位资估计的来源(imu 或者 odometry) Eigen::Isometry3f msf_delta = Eigen::Isometry3f::Identity(); // 三维仿射变换的单位矩阵,用于存储上一帧到当前帧的位资变化,3表示三维 // 缩放变换和旋转变换称为线性变换(linear transform) 线性变换和平移变换统称为仿射变换(affine transfrom) if(private_nh.param("enable_imu_frontend", false)) { // .launch 中是 true if(msf_pose && msf_pose->header.stamp > keyframe_stamp && msf_pose_after_update && msf_pose_after_update->header.stamp > keyframe_stamp) { // 如果 msf_pose 不是空指针, msf_pose:imu 数据 // 如果 msf_pose 的数据比当前关键帧的数据更新 // 如果 msf_pose_after_update 不是空指针 Eigen::Isometry3d pose0 = pose2isometry(msf_pose_after_update->pose.pose); // 函数pose2isometry() 将机器人在世界坐标系中的位置和方向 转换成对应的 仿射变换矩阵 Eigen::Isometry3d pose1 = pose2isometry(msf_pose->pose.pose); Eigen::Isometry3d delta = pose0.inverse() * pose1; // 相乘得到一个描述从pose0到pose1的变换的3D仿射变换 msf_source = "imu"; msf_delta = delta.cast(); // 将double转换成float } else { std::cerr << "msf data is too old" << std::endl; } } // else if(private_nh.param("enable_robot_odometry_init_guess", false) && !prev_time.isZero()) // .launch 给的参数是 false !prev_time.isZero() 判断是不是有效的时间戳 { tf::StampedTransform transform; // 声明一个变量,用来存储两个坐标帧之间的变换信息 if(tf_listener.waitForTransform(cloud->header.frame_id, stamp, cloud->header.frame_id, prev_time, robot_odom_frame_id, ros::Duration(0))) // 将cloud->header.frame_id 坐标帧变换到 robot_odom_frame_id 坐标帧 // waitForTransform 方法等待直到可以计算出这个变换,或者直到超时(这里设置的超时时间为0,意味着无限等待) stamp 是当前的时间戳 { tf_listener.lookupTransform(cloud->header.frame_id, stamp, cloud->header.frame_id, prev_time, robot_odom_frame_id, transform); // 如果waitForTransform成功,那么调用lookupTransform方法来获取实际的变换,并将其存储在transform变量中 } else if(tf_listener.waitForTransform(cloud->header.frame_id, ros::Time(0), cloud->header.frame_id, prev_time, robot_odom_frame_id, ros::Duration(0))) // 如果上面的变换失败了,再尝试一遍,但参考时间改成 ros::Time(0) { tf_listener.lookupTransform(cloud->header.frame_id, ros::Time(0), cloud->header.frame_id, prev_time, robot_odom_frame_id, transform); } if(transform.stamp_.isZero()) { NODELET_WARN_STREAM("failed to look up transform between " << cloud->header.frame_id << " and " << robot_odom_frame_id); } else { msf_source = "odometry"; msf_delta = tf2isometry(transform).cast(); // 将 transform 转换为 Eigen::Isometry3f 类型(3D仿射变换,使用float类型) } } pcl::PointCloud::Ptr aligned(new pcl::PointCloud()); // 创建一个新的点云对象aligned,用于存储对齐后的点云数据 registration->align(*aligned, prev_trans * msf_delta.matrix()); // 用 registration 来对齐点云, 如果没有imu或者odom信息, msf_delta 是单位矩阵, 即将观测到的点云数据对齐到关键帧? publish_scan_matching_status(stamp, cloud->header.frame_id, aligned, msf_source, msf_delta); // 发布扫描匹配的状态信息,包括时间戳、坐标帧ID、对齐后的点云、位姿来源和位姿更新 if(!registration->hasConverged()) { // 检查扫描匹配是否收敛(是否匹配成功?)。如果没有收敛,输出信息并返回上一次的位姿 NODELET_INFO_STREAM("scan matching has not converged!!"); NODELET_INFO_STREAM("ignore this frame(" << stamp << ")"); return keyframe_pose * prev_trans; } Eigen::Matrix4f trans = registration->getFinalTransformation(); // 获得当前点云和上一帧点云 关键帧 的仿射变换 Eigen::Matrix4f odom = keyframe_pose * trans; // 算出来 odom if(transform_thresholding) { // .launch 设置为false 默认为 false // 如果启用了变换阈值判断,计算本次变换的平移和旋转,并与最大可接受值进行比较。如果超出阈值,输出信息并返回上一次的位姿 // 即如果某两帧的点云差别特别大,忽略后面这一帧的匹配,返回上一个姿态的 odom Eigen::Matrix4f delta = prev_trans.inverse() * trans; double dx = delta.block<3, 1>(0, 3).norm(); double da = std::acos(Eigen::Quaternionf(delta.block<3, 3>(0, 0)).w()); if(dx > max_acceptable_trans || da > max_acceptable_angle) { NODELET_INFO_STREAM("too large transform!! " << dx << "[m] " << da << "[rad]"); NODELET_INFO_STREAM("ignore this frame(" << stamp << ")"); return keyframe_pose * prev_trans; } } prev_time = stamp; // 当前帧的时间戳 prev_trans = trans; // 当前帧的仿射变换 auto keyframe_trans = matrix2transform(stamp, keyframe_pose, odom_frame_id, "keyframe"); // 将变换矩阵转换为tf::Transform对象,用于发布关键帧的变换 keyframe_broadcaster.sendTransform(keyframe_trans); // 发布关键帧的变换(这个是发送到哪里? ) double delta_trans = trans.block<3, 1>(0, 3).norm(); // 计算 当前帧 与 关键帧 变换的 平移距离 double delta_angle = std::acos(Eigen::Quaternionf(trans.block<3, 3>(0, 0)).w()); // 计算 当前帧 与 关键帧 变换的 旋转角度 double delta_time = (stamp - keyframe_stamp).toSec(); // 计算 当前帧 与 关键帧 变换的 时间差 if(delta_trans > keyframe_delta_trans || delta_angle > keyframe_delta_angle || delta_time > keyframe_delta_time) { // 如果有一个超过阈值,更新关键帧 keyframe = filtered; registration->setInputTarget(keyframe); keyframe_pose = odom; keyframe_stamp = stamp; prev_time = stamp; prev_trans.setIdentity(); } if(aligned_points_pub.getNumSubscribers() > 0) { // 如果有节点订阅了对齐后的点云,进行变换并发布 pcl::transformPointCloud(*cloud, *aligned, odom); aligned->header.frame_id = odom_frame_id; aligned_points_pub.publish(*aligned); } // std::cout << "The matrix odom is: \n" << odom << std::endl; return odom; // 返回里程计 } /** * @brief publish odometry * @param stamp timestamp * @param pose odometry pose to be published */ void publish_odometry(const ros::Time& stamp, const std::string& base_frame_id, const Eigen::Matrix4f& pose) { // 发布里程计数据 // publish transform stamped for IMU integration geometry_msgs::TransformStamped odom_trans = matrix2transform(stamp, pose, odom_frame_id, base_frame_id); trans_pub.publish(odom_trans); // broadcast the transform over tf odom_broadcaster.sendTransform(odom_trans); // publish the transform nav_msgs::Odometry odom; odom.header.stamp = stamp; odom.header.frame_id = odom_frame_id; odom.pose.pose.position.x = pose(0, 3); odom.pose.pose.position.y = pose(1, 3); odom.pose.pose.position.z = pose(2, 3); odom.pose.pose.orientation = odom_trans.transform.rotation; odom.child_frame_id = base_frame_id; odom.twist.twist.linear.x = 0.0; odom.twist.twist.linear.y = 0.0; odom.twist.twist.angular.z = 0.0; odom_pub.publish(odom); } /** * @brief publish scan matching status */ void publish_scan_matching_status(const ros::Time& stamp, const std::string& frame_id, pcl::PointCloud::ConstPtr aligned, const std::string& msf_source, const Eigen::Isometry3f& msf_delta) { // 发布扫描的状态,包括匹配是否收敛、匹配误差、内点比例、相对位置等信息 if(!status_pub.getNumSubscribers()) { return; } ScanMatchingStatus status; status.header.frame_id = frame_id; status.header.stamp = stamp; status.has_converged = registration->hasConverged(); status.matching_error = registration->getFitnessScore(); const double max_correspondence_dist = 0.5; int num_inliers = 0; std::vector k_indices; std::vector k_sq_dists; for(int i = 0; i < aligned->size(); i++) { const auto& pt = aligned->at(i); registration->getSearchMethodTarget()->nearestKSearch(pt, 1, k_indices, k_sq_dists); if(k_sq_dists[0] < max_correspondence_dist * max_correspondence_dist) { num_inliers++; } } status.inlier_fraction = static_cast(num_inliers) / aligned->size(); status.relative_pose = isometry2pose(Eigen::Isometry3f(registration->getFinalTransformation()).cast()); if(!msf_source.empty()) { status.prediction_labels.resize(1); status.prediction_labels[0].data = msf_source; status.prediction_errors.resize(1); Eigen::Isometry3f error = Eigen::Isometry3f(registration->getFinalTransformation()).inverse() * msf_delta; status.prediction_errors[0] = isometry2pose(error.cast()); } status_pub.publish(status); } private: // ROS topics ros::NodeHandle nh; ros::NodeHandle private_nh; ros::Subscriber points_sub; ros::Subscriber msf_pose_sub; ros::Subscriber msf_pose_after_update_sub; ros::Publisher odom_pub; ros::Publisher trans_pub; ros::Publisher aligned_points_pub; ros::Publisher status_pub; tf::TransformListener tf_listener; tf::TransformBroadcaster odom_broadcaster; tf::TransformBroadcaster keyframe_broadcaster; std::string published_odom_topic; std::string points_topic; std::string odom_frame_id; std::string robot_odom_frame_id; ros::Publisher read_until_pub; // keyframe parameters double keyframe_delta_trans; // minimum distance between keyframes double keyframe_delta_angle; // double keyframe_delta_time; // // registration validation by thresholding bool transform_thresholding; // double max_acceptable_trans; // double max_acceptable_angle; // odometry calculation geometry_msgs::PoseWithCovarianceStampedConstPtr msf_pose; geometry_msgs::PoseWithCovarianceStampedConstPtr msf_pose_after_update; ros::Time prev_time; // 当前关键帧的时间戳? Eigen::Matrix4f prev_trans; // 地图起点到当前 帧 的放射变换 Eigen::Matrix4f keyframe_pose; // 地图起点到当前 关键帧 的仿射变换 ros::Time keyframe_stamp; // 关键帧的时间戳 pcl::PointCloud::ConstPtr keyframe; // 关键帧 // pcl::Filter::Ptr downsample_filter; pcl::Registration::Ptr registration; }; } // namespace hdl_graph_slam PLUGINLIB_EXPORT_CLASS(hdl_graph_slam::ScanMatchingOdometryNodelet, nodelet::Nodelet)