scan_matching_odometry_nodelet.cpp 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448
  1. // SPDX-License-Identifier: BSD-2-Clause
  2. #include <memory>
  3. #include <iostream>
  4. #include <ros/ros.h>
  5. #include <ros/time.h>
  6. #include <ros/duration.h>
  7. #include <pcl_ros/point_cloud.h>
  8. #include <tf_conversions/tf_eigen.h>
  9. #include <tf/transform_listener.h>
  10. #include <tf/transform_broadcaster.h>
  11. #include <std_msgs/Time.h>
  12. #include <nav_msgs/Odometry.h>
  13. #include <sensor_msgs/PointCloud2.h>
  14. #include <geometry_msgs/TransformStamped.h>
  15. #include <geometry_msgs/PoseWithCovarianceStamped.h>
  16. #include <nodelet/nodelet.h>
  17. #include <pluginlib/class_list_macros.h>
  18. #include <pcl/filters/voxel_grid.h>
  19. #include <pcl/filters/passthrough.h>
  20. #include <pcl/filters/approximate_voxel_grid.h>
  21. #include <hdl_graph_slam/ros_utils.hpp>
  22. #include <hdl_graph_slam/registrations.hpp>
  23. #include "hdl_graph_slam/ScanMatchingStatus.h"
  24. #include "sensor_msgs/LaserScan.h"
  25. namespace hdl_graph_slam {
  26. class ScanMatchingOdometryNodelet : public nodelet::Nodelet {
  27. public:
  28. typedef pcl::PointXYZI PointT;
  29. EIGEN_MAKE_ALIGNED_OPERATOR_NEW
  30. ScanMatchingOdometryNodelet() {}
  31. virtual ~ScanMatchingOdometryNodelet() {}
  32. virtual void onInit() { // 初始化函数
  33. NODELET_DEBUG("initializing scan_matching_odometry_nodelet...");
  34. nh = getNodeHandle(); // 节点句柄
  35. private_nh = getPrivateNodeHandle(); // 私有节点句柄
  36. initialize_params(); // 参数初始化函数i
  37. if(private_nh.param<bool>("enable_imu_frontend", false)) { // .launch中是true,考虑imu的信息,默认是false
  38. msf_pose_sub = nh.subscribe<geometry_msgs::PoseWithCovarianceStamped>("/msf_core/pose", 1, boost::bind(&ScanMatchingOdometryNodelet::msf_pose_callback, this, _1, false));
  39. // 这个是实时发布的?
  40. msf_pose_after_update_sub = nh.subscribe<geometry_msgs::PoseWithCovarianceStamped>("/msf_core/pose_after_update", 1, boost::bind(&ScanMatchingOdometryNodelet::msf_pose_callback, this, _1, true));
  41. // 这个是某个关键帧更新后发布的?
  42. }
  43. points_sub = nh.subscribe("/filtered_points", 256, &ScanMatchingOdometryNodelet::cloud_callback, this); // 接收过滤后的点云数据
  44. // points_sub = nh.subscribe("/scan", 256, &ScanMatchingOdometryNodelet::cloud_callback, this); // 接收过滤后的点云数据
  45. read_until_pub = nh.advertise<std_msgs::Header>("/scan_matching_odometry/read_until", 32); //
  46. odom_pub = nh.advertise<nav_msgs::Odometry>(published_odom_topic, 32); // 发布机器人的里程计信息
  47. trans_pub = nh.advertise<geometry_msgs::TransformStamped>("/scan_matching_odometry/transform", 32); //
  48. status_pub = private_nh.advertise<ScanMatchingStatus>("/scan_matching_odometry/status", 8); //
  49. aligned_points_pub = nh.advertise<sensor_msgs::PointCloud2>("/aligned_points", 32); // 对齐后的点云
  50. }
  51. // 只发布三个话题:处理后的激光数据 前后帧合并的激光数据 odom
  52. // 定义一个debug 宏(定义宏),如果是ture,则发布消息,否则不发布
  53. // #define DEBUG 0
  54. // if(DEBUG){
  55. // 雷达厂家会给一个程序,会用一个topic发数据出来
  56. // }
  57. private:
  58. /**
  59. * @brief initialize parameters
  60. */
  61. void initialize_params() {
  62. auto& pnh = private_nh;
  63. published_odom_topic = private_nh.param<std::string>("published_odom_topic", "/odom");
  64. points_topic = pnh.param<std::string>("points_topic", "/velodyne_points");
  65. odom_frame_id = pnh.param<std::string>("odom_frame_id", "odom");
  66. robot_odom_frame_id = pnh.param<std::string>("robot_odom_frame_id", "robot_odom");
  67. // The minimum tranlational distance and rotation angle between keyframes.
  68. // If this value is zero, frames are always compared with the previous frame
  69. keyframe_delta_trans = pnh.param<double>("keyframe_delta_trans", 0.25); // 三个参数有一个满足,则更新关键帧 keyframe
  70. keyframe_delta_angle = pnh.param<double>("keyframe_delta_angle", 0.15);
  71. keyframe_delta_time = pnh.param<double>("keyframe_delta_time", 1.0);
  72. // Registration validation by thresholding
  73. transform_thresholding = pnh.param<bool>("transform_thresholding", false); // .launch 中也是给false
  74. max_acceptable_trans = pnh.param<double>("max_acceptable_trans", 1.0);
  75. max_acceptable_angle = pnh.param<double>("max_acceptable_angle", 1.0);
  76. // select a downsample method (VOXELGRID, APPROX_VOXELGRID, NONE)
  77. std::string downsample_method = pnh.param<std::string>("downsample_method", "VOXELGRID"); // launch 给的参数是 VOXELGRID
  78. double downsample_resolution = pnh.param<double>("downsample_resolution", 0.1); // 体素的大小,决定每个提速立方体的边长 .launch 中给的参数是0.1
  79. if(downsample_method == "VOXELGRID") {
  80. // 选用的是这个方法
  81. std::cout << "downsample: VOXELGRID " << downsample_resolution << std::endl;
  82. auto voxelgrid = new pcl::VoxelGrid<PointT>(); // 创建体素网格下采样对象
  83. // 设置体素的长宽高。 体素的大小决定了网络的精细程度,体素越小,下采样后的点云越接近原始点云
  84. voxelgrid->setLeafSize(downsample_resolution, downsample_resolution, downsample_resolution);
  85. downsample_filter.reset(voxelgrid);
  86. } else if(downsample_method == "APPROX_VOXELGRID") {
  87. std::cout << "downsample: APPROX_VOXELGRID " << downsample_resolution << std::endl;
  88. pcl::ApproximateVoxelGrid<PointT>::Ptr approx_voxelgrid(new pcl::ApproximateVoxelGrid<PointT>());
  89. approx_voxelgrid->setLeafSize(downsample_resolution, downsample_resolution, downsample_resolution);
  90. downsample_filter = approx_voxelgrid;
  91. } else {
  92. if(downsample_method != "NONE") {
  93. std::cerr << "warning: unknown downsampling type (" << downsample_method << ")" << std::endl;
  94. std::cerr << " : use passthrough filter" << std::endl;
  95. }
  96. std::cout << "downsample: NONE" << std::endl;
  97. pcl::PassThrough<PointT>::Ptr passthrough(new pcl::PassThrough<PointT>());
  98. downsample_filter = passthrough;
  99. }
  100. registration = select_registration_method(pnh);
  101. /*
  102. 三种下采样方法的基本原理:
  103. VOXELGRID:
  104. 基本原理: 体素网格(VoxelGrid)下采样是一种常见的点云下采样方法。它将三维空间划分为一个由体素(三维像素)组成的网格,并在每个体素内部选择一个代表性点(通常是中心点或最接近中心的点),从而减少点的数量。所有在同一个体素内的点都被这个代表性点替代。
  105. 特点: 这种方法简单有效,可以均匀地减少点数,但可能会丢失一些细节。
  106. APPROX_VOXELGRID:
  107. 基本原理: 近似体素网格(ApproximateVoxelGrid)下采样与体素网格类似,但它在处理每个体素内的点时采用更简单的方法来选择代表性点,例如选择包含在体素内的最大点或平均位置点。这种方法的计算效率更高,但可能不如体素网格下采样精确。
  108. 特点: 计算速度比VOXELGRID快,但精度较低,适用于对处理速度要求较高的场景。
  109. PASSTHROUGH:
  110. 基本原理: 通道(PassThrough)下采样不是通过空间划分来减少点数,而是根据点的某些属性(如位置或强度)来筛选点。例如,可以设置一个过滤条件,只保留在特定高度范围内的点,或者只保留某个方向上的点。
  111. 特点: 允许用户定义更灵活的筛选条件,但不会像体素网格方法那样均匀地减少点数。如果没有指定下采样方法,或者指定的方法不被识别,就会使用通道过滤器作为默认方法。
  112. */
  113. }
  114. /**
  115. * @brief callback for point clouds
  116. * @param cloud_msg point cloud msg
  117. */
  118. void cloud_callback(const sensor_msgs::PointCloud2ConstPtr& cloud_msg) { // cloud_msg:滤波后的点云数据
  119. if(!ros::ok()) {
  120. return;
  121. }
  122. pcl::PointCloud<PointT>::Ptr cloud(new pcl::PointCloud<PointT>()); // 创建一个 pcl::PointCloud<PointT> 类型的指针
  123. pcl::fromROSMsg(*cloud_msg, *cloud); // 将ROS消息格式的点云(*cloud_msg)转换为PCL(Point Cloud Library)格式的点云(*cloud)
  124. Eigen::Matrix4f pose = matching(cloud_msg->header.stamp, cloud); // 点云匹配函数,返回
  125. publish_odometry(cloud_msg->header.stamp, cloud_msg->header.frame_id, pose); // 发布里程计数据
  126. // In offline estimation, point clouds until the published time will be supplied
  127. std_msgs::HeaderPtr read_until(new std_msgs::Header());
  128. read_until->frame_id = points_topic;
  129. read_until->stamp = cloud_msg->header.stamp + ros::Duration(1, 0);
  130. read_until_pub.publish(read_until);
  131. read_until->frame_id = "/filtered_points";
  132. read_until_pub.publish(read_until);
  133. }
  134. void msf_pose_callback(const geometry_msgs::PoseWithCovarianceStampedConstPtr& pose_msg, bool after_update) { // 多状态 MSF 的回调函数,接受MSF里程计的位置信息,并根据是否为更新后的位置,存储在不同变量中
  135. if(after_update) {
  136. msf_pose_after_update = pose_msg;
  137. } else {
  138. msf_pose = pose_msg;
  139. }
  140. }
  141. /**
  142. * @brief downsample a point cloud
  143. * @param cloud input cloud
  144. * @return downsampled point cloud
  145. */
  146. pcl::PointCloud<PointT>::ConstPtr downsample(const pcl::PointCloud<PointT>::ConstPtr& cloud) const { // 对点云数据进行向下采样,减少点的数量以提高处理速度
  147. if(!downsample_filter) {
  148. return cloud;
  149. }
  150. pcl::PointCloud<PointT>::Ptr filtered(new pcl::PointCloud<PointT>()); // 创建一个新的点云对象,用来存储下采样后的点云数据
  151. downsample_filter->setInputCloud(cloud);
  152. downsample_filter->filter(*filtered);
  153. return filtered;
  154. }
  155. /**
  156. * @brief estimate the relative pose between an input cloud and a keyframe cloud
  157. * @param stamp the timestamp of the input cloud
  158. * @param cloud the input cloud
  159. * @return the relative pose between the input cloud and the keyframe cloud
  160. */
  161. Eigen::Matrix4f matching(const ros::Time& stamp, const pcl::PointCloud<PointT>::ConstPtr& cloud) { // 执行扫描匹配算法,估计输入点云和关键帧之间的相对位置
  162. if(!keyframe) { // 判断 keyframe 是否为空指针,是的话说明还没有初始化关键真
  163. prev_time = ros::Time(); //
  164. prev_trans.setIdentity(); // 设置为单位矩阵,表示:与上一次的位资没有发生变化
  165. keyframe_pose.setIdentity(); // 关键帧的初始位资
  166. keyframe_stamp = stamp; // 关键帧的时间戳
  167. keyframe = downsample(cloud); // 对点云数据进行下采样,减少点的数量,提高处理效率和精度
  168. registration->setInputTarget(keyframe); // 将 keyframe 设置成关键帧
  169. return Eigen::Matrix4f::Identity(); // 没有之前的位资信息,假设与上次位资没有发生变化,返回单位矩阵
  170. // registration 对象负责计算输入点云(cloud)与已有关键帧(keyframe)之间的相对变换。这种变换估算允许系统理解传感器(如激光雷达)在两次扫描之间的移动
  171. }
  172. auto filtered = downsample(cloud); // 下采样
  173. registration->setInputSource(filtered); // 把点云数据给到 registration
  174. std::string msf_source; // 记录初始位资估计的来源(imu 或者 odometry)
  175. Eigen::Isometry3f msf_delta = Eigen::Isometry3f::Identity(); // 三维仿射变换的单位矩阵,用于存储上一帧到当前帧的位资变化,3表示三维
  176. // 缩放变换和旋转变换称为线性变换(linear transform) 线性变换和平移变换统称为仿射变换(affine transfrom)
  177. if(private_nh.param<bool>("enable_imu_frontend", false)) { // .launch 中是 true
  178. if(msf_pose && msf_pose->header.stamp > keyframe_stamp && msf_pose_after_update && msf_pose_after_update->header.stamp > keyframe_stamp) {
  179. // 如果 msf_pose 不是空指针, msf_pose:imu 数据
  180. // 如果 msf_pose 的数据比当前关键帧的数据更新
  181. // 如果 msf_pose_after_update 不是空指针
  182. Eigen::Isometry3d pose0 = pose2isometry(msf_pose_after_update->pose.pose); // 函数pose2isometry() 将机器人在世界坐标系中的位置和方向 转换成对应的 仿射变换矩阵
  183. Eigen::Isometry3d pose1 = pose2isometry(msf_pose->pose.pose);
  184. Eigen::Isometry3d delta = pose0.inverse() * pose1; // 相乘得到一个描述从pose0到pose1的变换的3D仿射变换
  185. msf_source = "imu";
  186. msf_delta = delta.cast<float>(); // 将double转换成float
  187. } else {
  188. std::cerr << "msf data is too old" << std::endl;
  189. }
  190. } //
  191. else if(private_nh.param<bool>("enable_robot_odometry_init_guess", false) && !prev_time.isZero()) // .launch 给的参数是 false !prev_time.isZero() 判断是不是有效的时间戳
  192. {
  193. tf::StampedTransform transform; // 声明一个变量,用来存储两个坐标帧之间的变换信息
  194. if(tf_listener.waitForTransform(cloud->header.frame_id, stamp, cloud->header.frame_id, prev_time, robot_odom_frame_id, ros::Duration(0)))
  195. // 将cloud->header.frame_id 坐标帧变换到 robot_odom_frame_id 坐标帧
  196. // waitForTransform 方法等待直到可以计算出这个变换,或者直到超时(这里设置的超时时间为0,意味着无限等待) stamp 是当前的时间戳
  197. {
  198. tf_listener.lookupTransform(cloud->header.frame_id, stamp, cloud->header.frame_id, prev_time, robot_odom_frame_id, transform);
  199. // 如果waitForTransform成功,那么调用lookupTransform方法来获取实际的变换,并将其存储在transform变量中
  200. }
  201. else if(tf_listener.waitForTransform(cloud->header.frame_id, ros::Time(0), cloud->header.frame_id, prev_time, robot_odom_frame_id, ros::Duration(0)))
  202. // 如果上面的变换失败了,再尝试一遍,但参考时间改成 ros::Time(0)
  203. {
  204. tf_listener.lookupTransform(cloud->header.frame_id, ros::Time(0), cloud->header.frame_id, prev_time, robot_odom_frame_id, transform);
  205. }
  206. if(transform.stamp_.isZero()) {
  207. NODELET_WARN_STREAM("failed to look up transform between " << cloud->header.frame_id << " and " << robot_odom_frame_id);
  208. } else {
  209. msf_source = "odometry";
  210. msf_delta = tf2isometry(transform).cast<float>(); // 将 transform 转换为 Eigen::Isometry3f 类型(3D仿射变换,使用float类型)
  211. }
  212. }
  213. pcl::PointCloud<PointT>::Ptr aligned(new pcl::PointCloud<PointT>()); // 创建一个新的点云对象aligned,用于存储对齐后的点云数据
  214. registration->align(*aligned, prev_trans * msf_delta.matrix()); // 用 registration 来对齐点云, 如果没有imu或者odom信息, msf_delta 是单位矩阵, 即将观测到的点云数据对齐到关键帧?
  215. publish_scan_matching_status(stamp, cloud->header.frame_id, aligned, msf_source, msf_delta); // 发布扫描匹配的状态信息,包括时间戳、坐标帧ID、对齐后的点云、位姿来源和位姿更新
  216. if(!registration->hasConverged()) { // 检查扫描匹配是否收敛(是否匹配成功?)。如果没有收敛,输出信息并返回上一次的位姿
  217. NODELET_INFO_STREAM("scan matching has not converged!!");
  218. NODELET_INFO_STREAM("ignore this frame(" << stamp << ")");
  219. return keyframe_pose * prev_trans;
  220. }
  221. Eigen::Matrix4f trans = registration->getFinalTransformation(); // 获得当前点云和上一帧点云 关键帧 的仿射变换
  222. Eigen::Matrix4f odom = keyframe_pose * trans; // 算出来 odom
  223. if(transform_thresholding) { // .launch 设置为false 默认为 false
  224. // 如果启用了变换阈值判断,计算本次变换的平移和旋转,并与最大可接受值进行比较。如果超出阈值,输出信息并返回上一次的位姿
  225. // 即如果某两帧的点云差别特别大,忽略后面这一帧的匹配,返回上一个姿态的 odom
  226. Eigen::Matrix4f delta = prev_trans.inverse() * trans;
  227. double dx = delta.block<3, 1>(0, 3).norm();
  228. double da = std::acos(Eigen::Quaternionf(delta.block<3, 3>(0, 0)).w());
  229. if(dx > max_acceptable_trans || da > max_acceptable_angle) {
  230. NODELET_INFO_STREAM("too large transform!! " << dx << "[m] " << da << "[rad]");
  231. NODELET_INFO_STREAM("ignore this frame(" << stamp << ")");
  232. return keyframe_pose * prev_trans;
  233. }
  234. }
  235. prev_time = stamp; // 当前帧的时间戳
  236. prev_trans = trans; // 当前帧的仿射变换
  237. auto keyframe_trans = matrix2transform(stamp, keyframe_pose, odom_frame_id, "keyframe"); // 将变换矩阵转换为tf::Transform对象,用于发布关键帧的变换
  238. keyframe_broadcaster.sendTransform(keyframe_trans); // 发布关键帧的变换(这个是发送到哪里? )
  239. double delta_trans = trans.block<3, 1>(0, 3).norm(); // 计算 当前帧 与 关键帧 变换的 平移距离
  240. double delta_angle = std::acos(Eigen::Quaternionf(trans.block<3, 3>(0, 0)).w()); // 计算 当前帧 与 关键帧 变换的 旋转角度
  241. double delta_time = (stamp - keyframe_stamp).toSec(); // 计算 当前帧 与 关键帧 变换的 时间差
  242. if(delta_trans > keyframe_delta_trans || delta_angle > keyframe_delta_angle || delta_time > keyframe_delta_time) { // 如果有一个超过阈值,更新关键帧
  243. keyframe = filtered;
  244. registration->setInputTarget(keyframe);
  245. keyframe_pose = odom;
  246. keyframe_stamp = stamp;
  247. prev_time = stamp;
  248. prev_trans.setIdentity();
  249. }
  250. if(aligned_points_pub.getNumSubscribers() > 0) { // 如果有节点订阅了对齐后的点云,进行变换并发布
  251. pcl::transformPointCloud(*cloud, *aligned, odom);
  252. aligned->header.frame_id = odom_frame_id;
  253. aligned_points_pub.publish(*aligned);
  254. }
  255. // std::cout << "The matrix odom is: \n" << odom << std::endl;
  256. return odom; // 返回里程计
  257. }
  258. /**
  259. * @brief publish odometry
  260. * @param stamp timestamp
  261. * @param pose odometry pose to be published
  262. */
  263. void publish_odometry(const ros::Time& stamp, const std::string& base_frame_id, const Eigen::Matrix4f& pose) { // 发布里程计数据
  264. // publish transform stamped for IMU integration
  265. geometry_msgs::TransformStamped odom_trans = matrix2transform(stamp, pose, odom_frame_id, base_frame_id);
  266. trans_pub.publish(odom_trans);
  267. // broadcast the transform over tf
  268. odom_broadcaster.sendTransform(odom_trans);
  269. // publish the transform
  270. nav_msgs::Odometry odom;
  271. odom.header.stamp = stamp;
  272. odom.header.frame_id = odom_frame_id;
  273. odom.pose.pose.position.x = pose(0, 3);
  274. odom.pose.pose.position.y = pose(1, 3);
  275. odom.pose.pose.position.z = pose(2, 3);
  276. odom.pose.pose.orientation = odom_trans.transform.rotation;
  277. odom.child_frame_id = base_frame_id;
  278. odom.twist.twist.linear.x = 0.0;
  279. odom.twist.twist.linear.y = 0.0;
  280. odom.twist.twist.angular.z = 0.0;
  281. odom_pub.publish(odom);
  282. }
  283. /**
  284. * @brief publish scan matching status
  285. */
  286. void publish_scan_matching_status(const ros::Time& stamp, const std::string& frame_id, pcl::PointCloud<pcl::PointXYZI>::ConstPtr aligned, const std::string& msf_source, const Eigen::Isometry3f& msf_delta) {
  287. // 发布扫描的状态,包括匹配是否收敛、匹配误差、内点比例、相对位置等信息
  288. if(!status_pub.getNumSubscribers()) {
  289. return;
  290. }
  291. ScanMatchingStatus status;
  292. status.header.frame_id = frame_id;
  293. status.header.stamp = stamp;
  294. status.has_converged = registration->hasConverged();
  295. status.matching_error = registration->getFitnessScore();
  296. const double max_correspondence_dist = 0.5;
  297. int num_inliers = 0;
  298. std::vector<int> k_indices;
  299. std::vector<float> k_sq_dists;
  300. for(int i = 0; i < aligned->size(); i++) {
  301. const auto& pt = aligned->at(i);
  302. registration->getSearchMethodTarget()->nearestKSearch(pt, 1, k_indices, k_sq_dists);
  303. if(k_sq_dists[0] < max_correspondence_dist * max_correspondence_dist) {
  304. num_inliers++;
  305. }
  306. }
  307. status.inlier_fraction = static_cast<float>(num_inliers) / aligned->size();
  308. status.relative_pose = isometry2pose(Eigen::Isometry3f(registration->getFinalTransformation()).cast<double>());
  309. if(!msf_source.empty()) {
  310. status.prediction_labels.resize(1);
  311. status.prediction_labels[0].data = msf_source;
  312. status.prediction_errors.resize(1);
  313. Eigen::Isometry3f error = Eigen::Isometry3f(registration->getFinalTransformation()).inverse() * msf_delta;
  314. status.prediction_errors[0] = isometry2pose(error.cast<double>());
  315. }
  316. status_pub.publish(status);
  317. }
  318. private:
  319. // ROS topics
  320. ros::NodeHandle nh;
  321. ros::NodeHandle private_nh;
  322. ros::Subscriber points_sub;
  323. ros::Subscriber msf_pose_sub;
  324. ros::Subscriber msf_pose_after_update_sub;
  325. ros::Publisher odom_pub;
  326. ros::Publisher trans_pub;
  327. ros::Publisher aligned_points_pub;
  328. ros::Publisher status_pub;
  329. tf::TransformListener tf_listener;
  330. tf::TransformBroadcaster odom_broadcaster;
  331. tf::TransformBroadcaster keyframe_broadcaster;
  332. std::string published_odom_topic;
  333. std::string points_topic;
  334. std::string odom_frame_id;
  335. std::string robot_odom_frame_id;
  336. ros::Publisher read_until_pub;
  337. // keyframe parameters
  338. double keyframe_delta_trans; // minimum distance between keyframes
  339. double keyframe_delta_angle; //
  340. double keyframe_delta_time; //
  341. // registration validation by thresholding
  342. bool transform_thresholding; //
  343. double max_acceptable_trans; //
  344. double max_acceptable_angle;
  345. // odometry calculation
  346. geometry_msgs::PoseWithCovarianceStampedConstPtr msf_pose;
  347. geometry_msgs::PoseWithCovarianceStampedConstPtr msf_pose_after_update;
  348. ros::Time prev_time; // 当前关键帧的时间戳?
  349. Eigen::Matrix4f prev_trans; // 地图起点到当前 帧 的放射变换
  350. Eigen::Matrix4f keyframe_pose; // 地图起点到当前 关键帧 的仿射变换
  351. ros::Time keyframe_stamp; // 关键帧的时间戳
  352. pcl::PointCloud<PointT>::ConstPtr keyframe; // 关键帧
  353. //
  354. pcl::Filter<PointT>::Ptr downsample_filter;
  355. pcl::Registration<PointT, PointT>::Ptr registration;
  356. };
  357. } // namespace hdl_graph_slam
  358. PLUGINLIB_EXPORT_CLASS(hdl_graph_slam::ScanMatchingOdometryNodelet, nodelet::Nodelet)