scan_matching_odometry_nodelet.cpp 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443
  1. // SPDX-License-Identifier: BSD-2-Clause
  2. #include <memory>
  3. #include <iostream>
  4. #include <ros/ros.h>
  5. #include <ros/time.h>
  6. #include <ros/duration.h>
  7. #include <pcl_ros/point_cloud.h>
  8. #include <tf_conversions/tf_eigen.h>
  9. #include <tf/transform_listener.h>
  10. #include <tf/transform_broadcaster.h>
  11. #include <std_msgs/Time.h>
  12. #include <nav_msgs/Odometry.h>
  13. #include <sensor_msgs/PointCloud2.h>
  14. #include <geometry_msgs/TransformStamped.h>
  15. #include <geometry_msgs/PoseWithCovarianceStamped.h>
  16. #include <nodelet/nodelet.h>
  17. #include <pluginlib/class_list_macros.h>
  18. #include <pcl/filters/voxel_grid.h>
  19. #include <pcl/filters/passthrough.h>
  20. #include <pcl/filters/approximate_voxel_grid.h>
  21. #include <hdl_graph_slam/ros_utils.hpp>
  22. #include <hdl_graph_slam/registrations.hpp>
  23. #include "hdl_graph_slam/ScanMatchingStatus.h"
  24. namespace hdl_graph_slam {
  25. class ScanMatchingOdometryNodelet : public nodelet::Nodelet {
  26. public:
  27. typedef pcl::PointXYZI PointT;
  28. EIGEN_MAKE_ALIGNED_OPERATOR_NEW
  29. ScanMatchingOdometryNodelet() {}
  30. virtual ~ScanMatchingOdometryNodelet() {}
  31. virtual void onInit() { // 初始化函数
  32. NODELET_DEBUG("initializing scan_matching_odometry_nodelet...");
  33. nh = getNodeHandle(); // 节点句柄
  34. private_nh = getPrivateNodeHandle(); // 私有节点句柄
  35. initialize_params(); // 参数初始化函数i
  36. if(private_nh.param<bool>("enable_imu_frontend", false)) { // .launch中是true,考虑imu的信息,默认是false
  37. msf_pose_sub = nh.subscribe<geometry_msgs::PoseWithCovarianceStamped>("/msf_core/pose", 1, boost::bind(&ScanMatchingOdometryNodelet::msf_pose_callback, this, _1, false));
  38. // 这个是实时发布的?
  39. msf_pose_after_update_sub = nh.subscribe<geometry_msgs::PoseWithCovarianceStamped>("/msf_core/pose_after_update", 1, boost::bind(&ScanMatchingOdometryNodelet::msf_pose_callback, this, _1, true));
  40. // 这个是某个关键帧更新后发布的?
  41. }
  42. points_sub = nh.subscribe("/filtered_points", 256, &ScanMatchingOdometryNodelet::cloud_callback, this); // 接收过滤后的点云数据
  43. read_until_pub = nh.advertise<std_msgs::Header>("/scan_matching_odometry/read_until", 32); //
  44. odom_pub = nh.advertise<nav_msgs::Odometry>(published_odom_topic, 32); // 发布机器人的里程计信息
  45. trans_pub = nh.advertise<geometry_msgs::TransformStamped>("/scan_matching_odometry/transform", 32); //
  46. status_pub = private_nh.advertise<ScanMatchingStatus>("/scan_matching_odometry/status", 8); //
  47. aligned_points_pub = nh.advertise<sensor_msgs::PointCloud2>("/aligned_points", 32); // 对齐后的点云
  48. }
  49. // 只发布三个话题:处理后的激光数据 前后帧合并的激光数据 odom
  50. // 定义一个debug 宏(定义宏),如果是ture,则发布消息,否则不发布
  51. // #define DEBUG 0
  52. // if(DEBUG){
  53. // 雷达厂家会给一个程序,会用一个topic发数据出来
  54. // }
  55. private:
  56. /**
  57. * @brief initialize parameters
  58. */
  59. void initialize_params() {
  60. auto& pnh = private_nh;
  61. published_odom_topic = private_nh.param<std::string>("published_odom_topic", "/odom");
  62. points_topic = pnh.param<std::string>("points_topic", "/velodyne_points");
  63. odom_frame_id = pnh.param<std::string>("odom_frame_id", "odom");
  64. robot_odom_frame_id = pnh.param<std::string>("robot_odom_frame_id", "robot_odom");
  65. // The minimum tranlational distance and rotation angle between keyframes.
  66. // If this value is zero, frames are always compared with the previous frame
  67. keyframe_delta_trans = pnh.param<double>("keyframe_delta_trans", 0.25); // 三个参数有一个满足,则更新关键帧 keyframe
  68. keyframe_delta_angle = pnh.param<double>("keyframe_delta_angle", 0.15);
  69. keyframe_delta_time = pnh.param<double>("keyframe_delta_time", 1.0);
  70. // Registration validation by thresholding
  71. transform_thresholding = pnh.param<bool>("transform_thresholding", false); // .launch 中也是给false
  72. max_acceptable_trans = pnh.param<double>("max_acceptable_trans", 1.0);
  73. max_acceptable_angle = pnh.param<double>("max_acceptable_angle", 1.0);
  74. // select a downsample method (VOXELGRID, APPROX_VOXELGRID, NONE)
  75. std::string downsample_method = pnh.param<std::string>("downsample_method", "VOXELGRID"); // launch 给的参数是 VOXELGRID
  76. double downsample_resolution = pnh.param<double>("downsample_resolution", 0.1); // 体素的大小,决定每个提速立方体的边长 .launch 中给的参数是0.1
  77. if(downsample_method == "VOXELGRID") {
  78. // 选用的是这个方法
  79. std::cout << "downsample: VOXELGRID " << downsample_resolution << std::endl;
  80. auto voxelgrid = new pcl::VoxelGrid<PointT>(); // 创建体素网格下采样对象
  81. // 设置体素的长宽高。 体素的大小决定了网络的精细程度,体素越小,下采样后的点云越接近原始点云
  82. voxelgrid->setLeafSize(downsample_resolution, downsample_resolution, downsample_resolution);
  83. downsample_filter.reset(voxelgrid);
  84. } else if(downsample_method == "APPROX_VOXELGRID") {
  85. std::cout << "downsample: APPROX_VOXELGRID " << downsample_resolution << std::endl;
  86. pcl::ApproximateVoxelGrid<PointT>::Ptr approx_voxelgrid(new pcl::ApproximateVoxelGrid<PointT>());
  87. approx_voxelgrid->setLeafSize(downsample_resolution, downsample_resolution, downsample_resolution);
  88. downsample_filter = approx_voxelgrid;
  89. } else {
  90. if(downsample_method != "NONE") {
  91. std::cerr << "warning: unknown downsampling type (" << downsample_method << ")" << std::endl;
  92. std::cerr << " : use passthrough filter" << std::endl;
  93. }
  94. std::cout << "downsample: NONE" << std::endl;
  95. pcl::PassThrough<PointT>::Ptr passthrough(new pcl::PassThrough<PointT>());
  96. downsample_filter = passthrough;
  97. }
  98. registration = select_registration_method(pnh);
  99. /*
  100. 三种下采样方法的基本原理:
  101. VOXELGRID:
  102. 基本原理: 体素网格(VoxelGrid)下采样是一种常见的点云下采样方法。它将三维空间划分为一个由体素(三维像素)组成的网格,并在每个体素内部选择一个代表性点(通常是中心点或最接近中心的点),从而减少点的数量。所有在同一个体素内的点都被这个代表性点替代。
  103. 特点: 这种方法简单有效,可以均匀地减少点数,但可能会丢失一些细节。
  104. APPROX_VOXELGRID:
  105. 基本原理: 近似体素网格(ApproximateVoxelGrid)下采样与体素网格类似,但它在处理每个体素内的点时采用更简单的方法来选择代表性点,例如选择包含在体素内的最大点或平均位置点。这种方法的计算效率更高,但可能不如体素网格下采样精确。
  106. 特点: 计算速度比VOXELGRID快,但精度较低,适用于对处理速度要求较高的场景。
  107. PASSTHROUGH:
  108. 基本原理: 通道(PassThrough)下采样不是通过空间划分来减少点数,而是根据点的某些属性(如位置或强度)来筛选点。例如,可以设置一个过滤条件,只保留在特定高度范围内的点,或者只保留某个方向上的点。
  109. 特点: 允许用户定义更灵活的筛选条件,但不会像体素网格方法那样均匀地减少点数。如果没有指定下采样方法,或者指定的方法不被识别,就会使用通道过滤器作为默认方法。
  110. */
  111. }
  112. /**
  113. * @brief callback for point clouds
  114. * @param cloud_msg point cloud msg
  115. */
  116. void cloud_callback(const sensor_msgs::PointCloud2ConstPtr& cloud_msg) { // cloud_msg:滤波后的点云数据
  117. if(!ros::ok()) {
  118. return;
  119. }
  120. pcl::PointCloud<PointT>::Ptr cloud(new pcl::PointCloud<PointT>()); // 创建一个 pcl::PointCloud<PointT> 类型的指针
  121. pcl::fromROSMsg(*cloud_msg, *cloud); // 将ROS消息格式的点云(*cloud_msg)转换为PCL(Point Cloud Library)格式的点云(*cloud)
  122. Eigen::Matrix4f pose = matching(cloud_msg->header.stamp, cloud); // 点云匹配函数,返回
  123. publish_odometry(cloud_msg->header.stamp, cloud_msg->header.frame_id, pose); // 发布里程计数据
  124. // In offline estimation, point clouds until the published time will be supplied
  125. std_msgs::HeaderPtr read_until(new std_msgs::Header());
  126. read_until->frame_id = points_topic;
  127. read_until->stamp = cloud_msg->header.stamp + ros::Duration(1, 0);
  128. read_until_pub.publish(read_until);
  129. read_until->frame_id = "/filtered_points";
  130. read_until_pub.publish(read_until);
  131. }
  132. void msf_pose_callback(const geometry_msgs::PoseWithCovarianceStampedConstPtr& pose_msg, bool after_update) { // 多状态 MSF 的回调函数,接受MSF里程计的位置信息,并根据是否为更新后的位置,存储在不同变量中
  133. if(after_update) {
  134. msf_pose_after_update = pose_msg;
  135. } else {
  136. msf_pose = pose_msg;
  137. }
  138. }
  139. /**
  140. * @brief downsample a point cloud
  141. * @param cloud input cloud
  142. * @return downsampled point cloud
  143. */
  144. pcl::PointCloud<PointT>::ConstPtr downsample(const pcl::PointCloud<PointT>::ConstPtr& cloud) const { // 对点云数据进行向下采样,减少点的数量以提高处理速度
  145. if(!downsample_filter) {
  146. return cloud;
  147. }
  148. pcl::PointCloud<PointT>::Ptr filtered(new pcl::PointCloud<PointT>()); // 创建一个新的点云对象,用来存储下采样后的点云数据
  149. downsample_filter->setInputCloud(cloud);
  150. downsample_filter->filter(*filtered);
  151. return filtered;
  152. }
  153. /**
  154. * @brief estimate the relative pose between an input cloud and a keyframe cloud
  155. * @param stamp the timestamp of the input cloud
  156. * @param cloud the input cloud
  157. * @return the relative pose between the input cloud and the keyframe cloud
  158. */
  159. Eigen::Matrix4f matching(const ros::Time& stamp, const pcl::PointCloud<PointT>::ConstPtr& cloud) { // 执行扫描匹配算法,估计输入点云和关键帧之间的相对位置
  160. if(!keyframe) { // 判断 keyframe 是否为空指针,是的话说明还没有初始化关键真
  161. prev_time = ros::Time(); //
  162. prev_trans.setIdentity(); // 设置为单位矩阵,表示:与上一次的位资没有发生变化
  163. keyframe_pose.setIdentity(); // 关键帧的初始位资
  164. keyframe_stamp = stamp; // 关键帧的时间戳
  165. keyframe = downsample(cloud); // 对点云数据进行下采样,减少点的数量,提高处理效率和精度
  166. registration->setInputTarget(keyframe); // 将 keyframe 设置成关键帧
  167. return Eigen::Matrix4f::Identity(); // 没有之前的位资信息,假设与上次位资没有发生变化,返回单位矩阵
  168. // registration 对象负责计算输入点云(cloud)与已有关键帧(keyframe)之间的相对变换。这种变换估算允许系统理解传感器(如激光雷达)在两次扫描之间的移动
  169. }
  170. auto filtered = downsample(cloud); // 下采样
  171. registration->setInputSource(filtered); // 把点云数据给到 registration
  172. std::string msf_source; // 记录初始位资估计的来源(imu 或者 odometry)
  173. Eigen::Isometry3f msf_delta = Eigen::Isometry3f::Identity(); // 三维仿射变换的单位矩阵,用于存储上一帧到当前帧的位资变化,3表示三维
  174. // 缩放变换和旋转变换称为线性变换(linear transform) 线性变换和平移变换统称为仿射变换(affine transfrom)
  175. if(private_nh.param<bool>("enable_imu_frontend", false)) { // .launch 中是 true
  176. if(msf_pose && msf_pose->header.stamp > keyframe_stamp && msf_pose_after_update && msf_pose_after_update->header.stamp > keyframe_stamp) {
  177. // 如果 msf_pose 不是空指针, msf_pose:imu 数据
  178. // 如果 msf_pose 的数据比当前关键帧的数据更新
  179. // 如果 msf_pose_after_update 不是空指针
  180. Eigen::Isometry3d pose0 = pose2isometry(msf_pose_after_update->pose.pose); // 函数pose2isometry() 将机器人在世界坐标系中的位置和方向 转换成对应的 仿射变换矩阵
  181. Eigen::Isometry3d pose1 = pose2isometry(msf_pose->pose.pose);
  182. Eigen::Isometry3d delta = pose0.inverse() * pose1; // 相乘得到一个描述从pose0到pose1的变换的3D仿射变换
  183. msf_source = "imu";
  184. msf_delta = delta.cast<float>(); // 将double转换成float
  185. } else {
  186. std::cerr << "msf data is too old" << std::endl;
  187. }
  188. } //
  189. else if(private_nh.param<bool>("enable_robot_odometry_init_guess", false) && !prev_time.isZero()) // .launch 给的参数是 false !prev_time.isZero() 判断是不是有效的时间戳
  190. {
  191. tf::StampedTransform transform; // 声明一个变量,用来存储两个坐标帧之间的变换信息
  192. if(tf_listener.waitForTransform(cloud->header.frame_id, stamp, cloud->header.frame_id, prev_time, robot_odom_frame_id, ros::Duration(0)))
  193. // 将cloud->header.frame_id 坐标帧变换到 robot_odom_frame_id 坐标帧
  194. // waitForTransform 方法等待直到可以计算出这个变换,或者直到超时(这里设置的超时时间为0,意味着无限等待) stamp 是当前的时间戳
  195. {
  196. tf_listener.lookupTransform(cloud->header.frame_id, stamp, cloud->header.frame_id, prev_time, robot_odom_frame_id, transform);
  197. // 如果waitForTransform成功,那么调用lookupTransform方法来获取实际的变换,并将其存储在transform变量中
  198. }
  199. else if(tf_listener.waitForTransform(cloud->header.frame_id, ros::Time(0), cloud->header.frame_id, prev_time, robot_odom_frame_id, ros::Duration(0)))
  200. // 如果上面的变换失败了,再尝试一遍,但参考时间改成 ros::Time(0)
  201. {
  202. tf_listener.lookupTransform(cloud->header.frame_id, ros::Time(0), cloud->header.frame_id, prev_time, robot_odom_frame_id, transform);
  203. }
  204. if(transform.stamp_.isZero()) {
  205. NODELET_WARN_STREAM("failed to look up transform between " << cloud->header.frame_id << " and " << robot_odom_frame_id);
  206. } else {
  207. msf_source = "odometry";
  208. msf_delta = tf2isometry(transform).cast<float>(); // 将 transform 转换为 Eigen::Isometry3f 类型(3D仿射变换,使用float类型)
  209. }
  210. }
  211. pcl::PointCloud<PointT>::Ptr aligned(new pcl::PointCloud<PointT>()); // 创建一个新的点云对象aligned,用于存储对齐后的点云数据
  212. registration->align(*aligned, prev_trans * msf_delta.matrix()); // 用 registration 来对齐点云, 如果没有imu或者odom信息, msf_delta 是单位矩阵, 即将观测到的点云数据对齐到关键帧?
  213. publish_scan_matching_status(stamp, cloud->header.frame_id, aligned, msf_source, msf_delta); // 发布扫描匹配的状态信息,包括时间戳、坐标帧ID、对齐后的点云、位姿来源和位姿更新
  214. if(!registration->hasConverged()) { // 检查扫描匹配是否收敛(是否匹配成功?)。如果没有收敛,输出信息并返回上一次的位姿
  215. NODELET_INFO_STREAM("scan matching has not converged!!");
  216. NODELET_INFO_STREAM("ignore this frame(" << stamp << ")");
  217. return keyframe_pose * prev_trans;
  218. }
  219. Eigen::Matrix4f trans = registration->getFinalTransformation(); // 获得当前点云和上一帧点云 关键帧 的仿射变换
  220. Eigen::Matrix4f odom = keyframe_pose * trans; // 算出来 odom
  221. if(transform_thresholding) { // .launch 设置为false 默认为 false
  222. // 如果启用了变换阈值判断,计算本次变换的平移和旋转,并与最大可接受值进行比较。如果超出阈值,输出信息并返回上一次的位姿
  223. // 即如果某两帧的点云差别特别大,忽略后面这一帧的匹配,返回上一个姿态的 odom
  224. Eigen::Matrix4f delta = prev_trans.inverse() * trans;
  225. double dx = delta.block<3, 1>(0, 3).norm();
  226. double da = std::acos(Eigen::Quaternionf(delta.block<3, 3>(0, 0)).w());
  227. if(dx > max_acceptable_trans || da > max_acceptable_angle) {
  228. NODELET_INFO_STREAM("too large transform!! " << dx << "[m] " << da << "[rad]");
  229. NODELET_INFO_STREAM("ignore this frame(" << stamp << ")");
  230. return keyframe_pose * prev_trans;
  231. }
  232. }
  233. prev_time = stamp; // 当前帧的时间戳
  234. prev_trans = trans; // 当前帧的仿射变换
  235. auto keyframe_trans = matrix2transform(stamp, keyframe_pose, odom_frame_id, "keyframe"); // 将变换矩阵转换为tf::Transform对象,用于发布关键帧的变换
  236. keyframe_broadcaster.sendTransform(keyframe_trans); // 发布关键帧的变换(这个是发送到哪里? )
  237. double delta_trans = trans.block<3, 1>(0, 3).norm(); // 计算 当前帧 与 关键帧 变换的 平移距离
  238. double delta_angle = std::acos(Eigen::Quaternionf(trans.block<3, 3>(0, 0)).w()); // 计算 当前帧 与 关键帧 变换的 旋转角度
  239. double delta_time = (stamp - keyframe_stamp).toSec(); // 计算 当前帧 与 关键帧 变换的 时间差
  240. if(delta_trans > keyframe_delta_trans || delta_angle > keyframe_delta_angle || delta_time > keyframe_delta_time) { // 如果有一个超过阈值,更新关键帧
  241. keyframe = filtered;
  242. registration->setInputTarget(keyframe);
  243. keyframe_pose = odom;
  244. keyframe_stamp = stamp;
  245. prev_time = stamp;
  246. prev_trans.setIdentity();
  247. }
  248. if(aligned_points_pub.getNumSubscribers() > 0) { // 如果有节点订阅了对齐后的点云,进行变换并发布
  249. pcl::transformPointCloud(*cloud, *aligned, odom);
  250. aligned->header.frame_id = odom_frame_id;
  251. aligned_points_pub.publish(*aligned);
  252. }
  253. std::cout << "The matrix odom is: \n" << odom << std::endl;
  254. return odom; // 返回里程计
  255. }
  256. /**
  257. * @brief publish odometry
  258. * @param stamp timestamp
  259. * @param pose odometry pose to be published
  260. */
  261. void publish_odometry(const ros::Time& stamp, const std::string& base_frame_id, const Eigen::Matrix4f& pose) { // 发布里程计数据
  262. // publish transform stamped for IMU integration
  263. geometry_msgs::TransformStamped odom_trans = matrix2transform(stamp, pose, odom_frame_id, base_frame_id);
  264. trans_pub.publish(odom_trans);
  265. // broadcast the transform over tf
  266. odom_broadcaster.sendTransform(odom_trans);
  267. // publish the transform
  268. nav_msgs::Odometry odom;
  269. odom.header.stamp = stamp;
  270. odom.header.frame_id = odom_frame_id;
  271. odom.pose.pose.position.x = pose(0, 3);
  272. odom.pose.pose.position.y = pose(1, 3);
  273. odom.pose.pose.position.z = pose(2, 3);
  274. odom.pose.pose.orientation = odom_trans.transform.rotation;
  275. odom.child_frame_id = base_frame_id;
  276. odom.twist.twist.linear.x = 0.0;
  277. odom.twist.twist.linear.y = 0.0;
  278. odom.twist.twist.angular.z = 0.0;
  279. odom_pub.publish(odom);
  280. }
  281. /**
  282. * @brief publish scan matching status
  283. */
  284. void publish_scan_matching_status(const ros::Time& stamp, const std::string& frame_id, pcl::PointCloud<pcl::PointXYZI>::ConstPtr aligned, const std::string& msf_source, const Eigen::Isometry3f& msf_delta) {
  285. // 发布扫描的状态,包括匹配是否收敛、匹配误差、内点比例、相对位置等信息
  286. if(!status_pub.getNumSubscribers()) {
  287. return;
  288. }
  289. ScanMatchingStatus status;
  290. status.header.frame_id = frame_id;
  291. status.header.stamp = stamp;
  292. status.has_converged = registration->hasConverged();
  293. status.matching_error = registration->getFitnessScore();
  294. const double max_correspondence_dist = 0.5;
  295. int num_inliers = 0;
  296. std::vector<int> k_indices;
  297. std::vector<float> k_sq_dists;
  298. for(int i = 0; i < aligned->size(); i++) {
  299. const auto& pt = aligned->at(i);
  300. registration->getSearchMethodTarget()->nearestKSearch(pt, 1, k_indices, k_sq_dists);
  301. if(k_sq_dists[0] < max_correspondence_dist * max_correspondence_dist) {
  302. num_inliers++;
  303. }
  304. }
  305. status.inlier_fraction = static_cast<float>(num_inliers) / aligned->size();
  306. status.relative_pose = isometry2pose(Eigen::Isometry3f(registration->getFinalTransformation()).cast<double>());
  307. if(!msf_source.empty()) {
  308. status.prediction_labels.resize(1);
  309. status.prediction_labels[0].data = msf_source;
  310. status.prediction_errors.resize(1);
  311. Eigen::Isometry3f error = Eigen::Isometry3f(registration->getFinalTransformation()).inverse() * msf_delta;
  312. status.prediction_errors[0] = isometry2pose(error.cast<double>());
  313. }
  314. status_pub.publish(status);
  315. }
  316. private:
  317. // ROS topics
  318. ros::NodeHandle nh;
  319. ros::NodeHandle private_nh;
  320. ros::Subscriber points_sub;
  321. ros::Subscriber msf_pose_sub;
  322. ros::Subscriber msf_pose_after_update_sub;
  323. ros::Publisher odom_pub;
  324. ros::Publisher trans_pub;
  325. ros::Publisher aligned_points_pub;
  326. ros::Publisher status_pub;
  327. tf::TransformListener tf_listener;
  328. tf::TransformBroadcaster odom_broadcaster;
  329. tf::TransformBroadcaster keyframe_broadcaster;
  330. std::string published_odom_topic;
  331. std::string points_topic;
  332. std::string odom_frame_id;
  333. std::string robot_odom_frame_id;
  334. ros::Publisher read_until_pub;
  335. // keyframe parameters
  336. double keyframe_delta_trans; // minimum distance between keyframes
  337. double keyframe_delta_angle; //
  338. double keyframe_delta_time; //
  339. // registration validation by thresholding
  340. bool transform_thresholding; //
  341. double max_acceptable_trans; //
  342. double max_acceptable_angle;
  343. // odometry calculation
  344. geometry_msgs::PoseWithCovarianceStampedConstPtr msf_pose;
  345. geometry_msgs::PoseWithCovarianceStampedConstPtr msf_pose_after_update;
  346. ros::Time prev_time; // 当前关键帧的时间戳?
  347. Eigen::Matrix4f prev_trans; // 地图起点到当前 帧 的放射变换
  348. Eigen::Matrix4f keyframe_pose; // 地图起点到当前 关键帧 的仿射变换
  349. ros::Time keyframe_stamp; // 关键帧的时间戳
  350. pcl::PointCloud<PointT>::ConstPtr keyframe; // 关键帧
  351. //
  352. pcl::Filter<PointT>::Ptr downsample_filter;
  353. pcl::Registration<PointT, PointT>::Ptr registration;
  354. };
  355. } // namespace hdl_graph_slam
  356. PLUGINLIB_EXPORT_CLASS(hdl_graph_slam::ScanMatchingOdometryNodelet, nodelet::Nodelet)