V-LOAM源码解析(三)

转载请注明出处:本文转自zhch_pan的博客http://www.cnblogs.com/zhchp-blog/

 

本博客为本人之前做项目时做的源码阅读工作,po到网上希望帮助其他人更好的理解V-LOAM的工程实现,有些地方代码做了修改,可能和原工程有出入,但对于该工程的整体流程理解没有妨碍。

源码下载链接:https://github.com/Jinqiang/demo_lidar

 


 

节点:processDepthmap

功能:根据视觉里程计topic("/cam_to_init"),和点云数据("/sync_scan_cloud_filtered"),将新点云和已有点云变换到当前相机坐标系下,实现图像与点云数据的对齐

 

  1 /*
  2  * 数组voRx[]~voTz[]用于保存连续的相机位姿,depthCloud不断把之前的点云点变换到最近时刻的相机位姿,
  3  * 当再有新的点云到来时,进入函数syncCloudHandler(),该函数先将该帧点云变换到最近的相机位姿下,然后
  4  * 添加到depthCloud中;当相机位姿发生变化时,进入函数voDataHandler(),先将depthCloud变换到新的相机
  5  * 位姿下,然后进行滤波,然后发布出去
  6  */
  7 #include <math.h>
  8 #include <stdio.h>
  9 #include <stdlib.h>
 10 #include <ros/ros.h>
 11 
 12 #include <nav_msgs/Odometry.h>
 13 
 14 #include <tf/transform_datatypes.h>
 15 #include <tf/transform_listener.h>
 16 #include <tf/transform_broadcaster.h>
 17 
 18 #include "pointDefinition.h"
 19 
 20 const double PI = 3.1415926;
 21 
 22 const int keepVoDataNum = 30;
 23 double voDataTime[keepVoDataNum] = {0};
 24 double voRx[keepVoDataNum] = {0};
 25 double voRy[keepVoDataNum] = {0};
 26 double voRz[keepVoDataNum] = {0};
 27 double voTx[keepVoDataNum] = {0};
 28 double voTy[keepVoDataNum] = {0};
 29 double voTz[keepVoDataNum] = {0};
 30 int voDataInd = -1;
 31 int voRegInd = 0;
 32 
 33 pcl::PointCloud<pcl::PointXYZI>::Ptr depthCloud(new pcl::PointCloud<pcl::PointXYZI>());
 34 pcl::PointCloud<pcl::PointXYZ>::Ptr syncCloud(new pcl::PointCloud<pcl::PointXYZ>());
 35 pcl::PointCloud<pcl::PointXYZI>::Ptr tempCloud(new pcl::PointCloud<pcl::PointXYZI>());
 36 pcl::PointCloud<pcl::PointXYZI>::Ptr tempCloud2(new pcl::PointCloud<pcl::PointXYZI>());
 37 
 38 double timeRec = 0;
 39 double rxRec = 0, ryRec = 0, rzRec = 0;
 40 double txRec = 0, tyRec = 0, tzRec = 0;
 41 
 42 bool systemInited = false;
 43 double initTime;
 44 
 45 int startCount = -1;
 46 const int startSkipNum = 5;
 47 
 48 ros::Publisher *depthCloudPubPointer = NULL;
 49 
 50 void voDataHandler(const nav_msgs::Odometry::ConstPtr& voData)
 51 {
 52   double time = voData->header.stamp.toSec();
 53 
 54   double roll, pitch, yaw;
 55   geometry_msgs::Quaternion geoQuat = voData->pose.pose.orientation;
 56   tf::Matrix3x3(tf::Quaternion(geoQuat.z, -geoQuat.x, -geoQuat.y, geoQuat.w)).getRPY(roll, pitch, yaw);
 57 
 58   double rx = voData->twist.twist.angular.x - rxRec;
 59   double ry = voData->twist.twist.angular.y - ryRec;
 60   double rz = voData->twist.twist.angular.z - rzRec;
 61 
 62   if (ry < -PI) {
 63     ry += 2 * PI;
 64   } else if (ry > PI) {
 65     ry -= 2 * PI;
 66   }
 67 
 68   double tx = voData->pose.pose.position.x - txRec;
 69   double ty = voData->pose.pose.position.y - tyRec;
 70   double tz = voData->pose.pose.position.z - tzRec;
 71 
 72   rxRec = voData->twist.twist.angular.x;
 73   ryRec = voData->twist.twist.angular.y;
 74   rzRec = voData->twist.twist.angular.z;
 75 
 76   txRec = voData->pose.pose.position.x;
 77   tyRec = voData->pose.pose.position.y;
 78   tzRec = voData->pose.pose.position.z;
 79 
 80   //因为是把世界坐标系旋转到当前坐标系,所以roll,pitch,yaw应该取负值,而绕x轴和绕y轴的旋转角度在发布与接收时已经被
 81   //添加了负值,所以旋转矩阵没变,而绕z轴的旋转角没取负值,所以在旋转矩阵里要把绕z轴角度取负值
 82   double x1 = cos(yaw) * tx + sin(yaw) * tz;
 83   double y1 = ty;
 84   double z1 = -sin(yaw) * tx + cos(yaw) * tz;
 85 
 86   double x2 = x1;
 87   double y2 = cos(pitch) * y1 - sin(pitch) * z1;
 88   double z2 = sin(pitch) * y1 + cos(pitch) * z1;
 89 
 90   tx = cos(roll) * x2 + sin(roll) * y2;
 91   ty = -sin(roll) * x2 + cos(roll) * y2;
 92   tz = z2;
 93 
 94   //voDataInd取值为0~29
 95   voDataInd = (voDataInd + 1) % keepVoDataNum;
 96   voDataTime[voDataInd] = time;
 97   /*
 98    * rx~ry存的是R_lc中的旋转量,旋转方向是z->x->y,参考坐标系是上一帧,所以也就是说上一帧按照R_lc=ry*rx*rz(旋转方向自右向左)的
 99    * 顺序旋转可以得到当前帧的坐标,在visualOdometry.cpp中可以看到,transform[0]~[2]存储的其实是R_cl中的旋转角度,而vo的
100    * twist中的旋转角度存的是angleSum[0]~[2] -= transform[0]~[2],有一个取负值的操作,取负之后得到的就是R_lc中的旋转角,
101    * R_lc和R_cl的区别就是:
102    * R_lc=ry*rx*rz
103    * R_cl=-rz*-rx*-ry(旋转顺序从右往左看)
104    * tx~tz存的就是T_lc的位移量,当前坐标系相对于上一帧坐标系,在当前坐标系下表示的位移增量,
105   */
106   voRx[voDataInd] = rx;
107   voRy[voDataInd] = ry;
108   voRz[voDataInd] = rz;
109   voTx[voDataInd] = tx;
110   voTy[voDataInd] = ty;
111   voTz[voDataInd] = tz;
112 
113   double cosrx = cos(rx);
114   double sinrx = sin(rx);
115   double cosry = cos(ry);
116   double sinry = sin(ry);
117   double cosrz = cos(rz);
118   double sinrz = sin(rz);
119 
120   if (time - timeRec < 0.5) {
121     pcl::PointXYZI point;
122     tempCloud->clear();
123     double x1, y1, z1, x2, y2, z2;
124     int depthCloudNum = depthCloud->points.size();
125     for (int i = 0; i < depthCloudNum; i++) {
126       point = depthCloud->points[i];
127 
128       x1 = cosry * point.x - sinry * point.z;
129       y1 = point.y;
130       z1 = sinry * point.x + cosry * point.z;
131 
132       x2 = x1;
133       y2 = cosrx * y1 + sinrx * z1;
134       z2 = -sinrx * y1 + cosrx * z1;
135 
136       /*
137        * tx~tz存的就是当前坐标系相对于上一帧坐标系,在当前坐标系下表示的位移增量,T_lc,因为是
138        * rx = voData->twist.twist.angular.x - rxRec;所以基准坐标系是上一帧,即rxRec.
139        * 所以上一帧的一个点P_l,通过P_c=R_cl*P_l+T_cl可以变换到当前坐标系,
140        * 并且R_cl=-rz*-rx*-ry  T_cl=-T_lc(即-tx,-ty,-tz),所以这里是减去tx,ty,tz
141       */
142       point.x = cosrz * x2 + sinrz * y2 - tx;
143       point.y = -sinrz * x2 + cosrz * y2 - ty;
144       point.z = z2 - tz;
145 
146       double pointDis = sqrt(point.x * point.x + point.y * point.y + point.z * point.z);
147       double timeDis = time - initTime - point.intensity;
148       if (fabs(point.x / point.z) < 2 && fabs(point.y / point.z) < 1 && point.z > 0.5 && pointDis < 15 &&
149           timeDis < 5.0) {
150         tempCloud->push_back(point);
151       }
152     }
153 
154     depthCloud->clear();
155     pcl::VoxelGrid<pcl::PointXYZI> downSizeFilter;
156     downSizeFilter.setInputCloud(tempCloud);
157     downSizeFilter.setLeafSize(0.05, 0.05, 0.05);
158     downSizeFilter.filter(*depthCloud);
159     depthCloudNum = depthCloud->points.size();
160 
161     tempCloud->clear();
162     for (int i = 0; i < depthCloudNum; i++) {
163       point = depthCloud->points[i];
164 
165       if (fabs(point.x / point.z) < 1 && fabs(point.y / point.z) < 0.6) {
166         point.intensity = depthCloud->points[i].z;
167         point.x *= 10 / depthCloud->points[i].z;
168         point.y *= 10 / depthCloud->points[i].z;
169         point.z = 10;
170 
171         tempCloud->push_back(point);
172       }
173     }
174 
175     tempCloud2->clear();
176     downSizeFilter.setInputCloud(tempCloud);
177     downSizeFilter.setLeafSize(0.1, 0.1, 0.1);
178     downSizeFilter.filter(*tempCloud2);
179     int tempCloud2Num = tempCloud2->points.size();
180 
181     for (int i = 0; i < tempCloud2Num; i++) {
182       tempCloud2->points[i].z = tempCloud2->points[i].intensity;
183       tempCloud2->points[i].x *= tempCloud2->points[i].z / 10;
184       tempCloud2->points[i].y *= tempCloud2->points[i].z / 10;
185       tempCloud2->points[i].intensity = 10;
186     }
187 
188     sensor_msgs::PointCloud2 depthCloud2;
189     pcl::toROSMsg(*tempCloud2, depthCloud2);
190     depthCloud2.header.frame_id = "camera2";
191     depthCloud2.header.stamp = voData->header.stamp;
192     depthCloudPubPointer->publish(depthCloud2);
193   }
194 
195   timeRec = time;
196 }
197 
198 void syncCloudHandler(const sensor_msgs::PointCloud2ConstPtr& syncCloud2)
199 {
200   if (startCount < startSkipNum) {
201     startCount++;
202     return;
203   }
204 
205   if (!systemInited) {
206     initTime = syncCloud2->header.stamp.toSec();
207     systemInited = true;
208   }
209   double time = syncCloud2->header.stamp.toSec();
210   double timeLasted = time - initTime;
211 
212   syncCloud->clear();
213   pcl::fromROSMsg(*syncCloud2, *syncCloud);
214 
215   double scale = 0;
216   int voPreInd = keepVoDataNum - 1;
217   if (voDataInd >= 0) {
218     while (voDataTime[voRegInd] <= time && voRegInd != voDataInd) {
219       voRegInd = (voRegInd + 1) % keepVoDataNum;
220     }
221 
222     voPreInd = (voRegInd + keepVoDataNum - 1) % keepVoDataNum;
223     double voTimePre = voDataTime[voPreInd];
224     double voTimeReg = voDataTime[voRegInd];
225 
226     if (voTimeReg - voTimePre < 0.5) {
227       //modified at 2018/01/09
228       //double scale =  (voTimeReg - time) / (voTimeReg - voTimePre);
229       scale =  (voTimeReg - time) / (voTimeReg - voTimePre);
230       if (scale > 1) {
231         scale = 1;
232       } else if (scale < 0) {
233         scale = 0;
234       }
235     }
236   }
237 
238   //通过插值得到与点云对应的坐标系,这个坐标系下保存的rx2~rz2,tx2~tz2指的是点云对应的帧与voRegInd指向的帧之间的R,T关系
239   double rx2 = voRx[voRegInd] * scale;
240   double ry2 = voRy[voRegInd] * scale;
241   double rz2 = voRz[voRegInd] * scale;
242 
243   double tx2 = voTx[voRegInd] * scale;
244   double ty2 = voTy[voRegInd] * scale;
245   double tz2 = voTz[voRegInd] * scale;
246 
247   double cosrx2 = cos(rx2);
248   double sinrx2 = sin(rx2);
249   double cosry2 = cos(ry2);
250   double sinry2 = sin(ry2);
251   double cosrz2 = cos(rz2);
252   double sinrz2 = sin(rz2);
253 
254   pcl::PointXYZI point;
255   double x1, y1, z1, x2, y2, z2;
256   int syncCloudNum = syncCloud->points.size();
257   for (int i = 0; i < syncCloudNum; i++) {
258     point.x = syncCloud->points[i].x;
259     point.y = syncCloud->points[i].y;
260     point.z = syncCloud->points[i].z;
261     point.intensity = timeLasted;
262 
263     //把插值得到的坐标系下的点转换到voRegInd指向的那一帧
264     x1 = cosry2 * point.x - sinry2 * point.z;
265     y1 = point.y;
266     z1 = sinry2 * point.x + cosry2 * point.z;
267 
268     x2 = x1;
269     y2 = cosrx2 * y1 + sinrx2 * z1;
270     z2 = -sinrx2 * y1 + cosrx2 * z1;
271 
272     point.x = cosrz2 * x2 + sinrz2 * y2 - tx2;
273     point.y = -sinrz2 * x2 + cosrz2 * y2 - ty2;
274     point.z = z2 - tz2;
275 
276     //将点云一直变换到最新的一帧坐标系下
277     if (voDataInd >= 0) {
278       int voAftInd = (voRegInd + 1) % keepVoDataNum;
279       while (voAftInd != (voDataInd + 1) % keepVoDataNum) {
280         double rx = voRx[voAftInd];
281         double ry = voRy[voAftInd];
282         double rz = voRz[voAftInd];
283 
284         double tx = voTx[voAftInd];
285         double ty = voTy[voAftInd];
286         double tz = voTz[voAftInd];
287 
288         double cosrx = cos(rx);
289         double sinrx = sin(rx);
290         double cosry = cos(ry);
291         double sinry = sin(ry);
292         double cosrz = cos(rz);
293         double sinrz = sin(rz);
294 
295         x1 = cosry * point.x - sinry * point.z;
296         y1 = point.y;
297         z1 = sinry * point.x + cosry * point.z;
298 
299         x2 = x1;
300         y2 = cosrx * y1 + sinrx * z1;
301         z2 = -sinrx * y1 + cosrx * z1;
302 
303         point.x = cosrz * x2 + sinrz * y2 - tx;
304         point.y = -sinrz * x2 + cosrz * y2 - ty;
305         point.z = z2 - tz;
306 
307         voAftInd = (voAftInd + 1) % keepVoDataNum;
308       }
309     }
310 
311     double pointDis = sqrt(point.x * point.x + point.y * point.y + point.z * point.z);
312     if (fabs(point.x / point.z) < 2 && fabs(point.y / point.z) < 1.5 && point.z > 0.5 && pointDis < 15) {
313       depthCloud->push_back(point);
314     }
315   }
316 }
317 
318 int main(int argc, char** argv)
319 {
320   ros::init(argc, argv, "processDepthmap");
321   ros::NodeHandle nh;
322 
323   ros::Subscriber voDataSub = nh.subscribe<nav_msgs::Odometry> ("/cam_to_init", 5, voDataHandler);
324 
325   ros::Subscriber syncCloudSub = nh.subscribe<sensor_msgs::PointCloud2>
326                                  ("/sync_scan_cloud_filtered", 5, syncCloudHandler);
327 
328   ros::Publisher depthCloudPub = nh.advertise<sensor_msgs::PointCloud2> ("/depth_cloud", 5);
329   depthCloudPubPointer = &depthCloudPub;
330 
331   ros::spin();
332 
333   return 0;
334 }

 

posted on 2018-04-07 22:15  zhch_pan  阅读(3223)  评论(1编辑  收藏  举报

导航