ORBSLAM2+PCL:基于RGBD摄像机的实时稠密重构
在ORBSLAM2的定位基础上,结合摄像机点云数据实现三维环境的稠密重构,代码在下面的项目上进行修改https://github.com/tiantiandabaojian/ORB-SLAM2_RGBD_DENSE_MAP.git该项目源码编译后运行出现错误,经过分析为存储每帧点云信息的pointcloud容器出现问题,取消存储位姿T即可因此不再使用其在回环线程的点云模型重置功能,只...
在ORBSLAM2的定位基础上,结合摄像机点云数据实现三维环境的稠密重构,代码在下面的项目上进行修改
https://github.com/tiantiandabaojian/ORB-SLAM2_RGBD_DENSE_MAP.git
修改后的代码
https://github.com/huang-xiaoliang/ORB-SLAM2_RGBD_DENSE_LOOP
该项目源码编译后运行出现错误,经过分析为存储每帧点云信息的pointcloud容器出现问题,取消存储位姿T即可
(后来发现可以是代码版本问题,里面eigen版本太高了,使用旧版本就不会存在这个问题)
因此不再使用其在回环线程的点云模型重置功能,只是在ORBSLAM2的基础上,在tracking线程中创建关键帧处添加点云提取和融合功能
融合后点云在view线程中使用pcl的cloudview进行实时的稠密点云模型显示
如下图所示,融合模型相比原生的点云模型模糊(未回环),这是在低速运动下扫描得到的模型,高速时由于笔记本cpu运算能力有限处理的帧数少,定位误差大
作者回环后重构模型
PART1:添加新的稠密点云模型类pointcloudmapping
class PointCloudMapping
{
public:
typedef pcl::PointXYZRGBA PointT;
typedef pcl::PointCloud<PointT> PointCloud;
PointCloudMapping( double resolution_,double meank_,double thresh_ );
void save();
// 插入一个keyframe,会更新一次地图
void insertKeyFrame2( KeyFrame* kf, cv::Mat& color, cv::Mat& depth,int idk);
void shutdown();
void viewer();
protected:
PointCloud::Ptr generatePointCloud(KeyFrame* kf, cv::Mat& color, cv::Mat& depth);
PointCloud::Ptr globalMap;
shared_ptr<thread> viewerThread;
bool shutDownFlag =false;
mutex shutDownMutex;
condition_variable keyFrameUpdated;
mutex keyFrameUpdateMutex;
// data to generate point clouds
mutex keyframeMutex;
double resolution = 0.01;
double meank = 50;
double thresh = 1;
pcl::VoxelGrid<PointT> voxel;
pcl::StatisticalOutlierRemoval<PointT> statistical_filter;
};
//初始化时启动视图view线程展示点云模型
PointCloudMapping::PointCloudMapping(double resolution_,double meank_,double thresh_)
{
this->resolution = resolution_;
this->meank = thresh_;
this->thresh = thresh_;
statistical_filter.setMeanK(meank);
statistical_filter.setStddevMulThresh(thresh);
voxel.setLeafSize( resolution, resolution, resolution);
globalMap = boost::make_shared< PointCloud >( );
viewerThread = make_shared<thread>( bind(&PointCloudMapping::viewer, this ) );
}
void PointCloudMapping::shutdown()
{
{
unique_lock<mutex> lck(shutDownMutex);
shutDownFlag = true;
keyFrameUpdated.notify_one();
}
viewerThread->join();
}
void PointCloudMapping::insertKeyFrame2(KeyFrame* kf, cv::Mat& color, cv::Mat& depth,int idk)
{
cout<<"receive a keyframe, id = "<<idk<<" 第"<<kf->mnId<<"个"<<endl;
PointCloude pointcloude;
pointcloude.pcID = idk;
pointcloude.T = ORB_SLAM2::Converter::toSE3Quat( kf->GetPose() );
pointcloude.pcE = generatePointCloud(kf,color,depth);
PointCloud::Ptr p (new PointCloud);
pcl::transformPointCloud( *(pointcloude.pcE), *p, pointcloude.T.inverse().matrix());
*globalMap += *p;
//过滤
PointCloud::Ptr tmp1 ( new PointCloud );
statistical_filter.setInputCloud(globalMap);
statistical_filter.filter( *tmp1 );
//融合
PointCloud::Ptr tmp(new PointCloud());
voxel.setInputCloud( tmp1 );
voxel.filter( *globalMap );
//pointcloud2.push_back(pointcloude);
cout<<"当前关键帧点云数量为:"<<pointcloude.pcE->points.size()<<endl;
cout<<"融合后关键帧点云数量为:"<<globalMap->points.size()<<endl;
//keyFrameUpdated.notify_one();
if((kf->mnId)%20==10)
{
globalMap->clear();
cout<<"清空global点云数据,×××××××××××××××××××××××××××××××××××××××××××"<<endl;
}
if((kf->mnId)%20==5)
{
cout<<"唤醒view线程更新点云模型,×××××××××××××××××××××××××××××××××××××××××××"<<endl;
keyFrameUpdated.notify_one();
}
}
pcl::PointCloud< PointCloudMapping::PointT >::Ptr PointCloudMapping::generatePointCloud(KeyFrame* kf, cv::Mat& color, cv::Mat& depth)//,Eigen::Isometry3d T
{
PointCloud::Ptr tmp( new PointCloud() );
// point cloud is null ptr
for ( int m=0; m<depth.rows; m+=3 )
{
for ( int n=0; n<depth.cols; n+=3 )
{
float d = depth.ptr<float>(m)[n];
if (d < 0.01 || d>5)
continue;
PointT p;
p.z = d;
p.x = ( n - kf->cx) * p.z / kf->fx;
p.y = ( m - kf->cy) * p.z / kf->fy;
p.b = color.ptr<uchar>(m)[n*3];
p.g = color.ptr<uchar>(m)[n*3+1];
p.r = color.ptr<uchar>(m)[n*3+2];
tmp->points.push_back(p);
}
}
//Eigen::Isometry3d T = ORB_SLAM2::Converter::toSE3Quat( kf->GetPose() );
//PointCloud::Ptr cloud(new PointCloud);
//pcl::transformPointCloud( *tmp, *cloud, T.inverse().matrix());
//cloud->is_dense = false;
//cout<<"generate point cloud for kf "<<kf->mnId<<", size="<<cloud->points.size()<<endl;
return tmp;
}
void PointCloudMapping::viewer()
{
pcl::visualization::CloudViewer viewer("viewer");
while(1)
{
{
unique_lock<mutex> lck_shutdown( shutDownMutex );
if (shutDownFlag)
{
break;
}
}
{
cout<<"view线程进入睡眠××××××××××××××××××××××××××××××××××××"<<endl;
unique_lock<mutex> lck_keyframeUpdated( keyFrameUpdateMutex );
keyFrameUpdated.wait( lck_keyframeUpdated );
}
// keyframe is updated
cout<<"在view线程中显示点云模型×××××××××××××××××××××××××"<<endl;
viewer.showCloud( globalMap );
}
}
void PointCloudMapping::save()
{
pcl::io::savePCDFile( "result.pcd", *globalMap );
cout<<"globalMap save finished"<<endl;
}
PART2:在system类的构造函数中初始化PointCloudMapping并传入tracking线程
// Initialize pointcloud mapping
mpPointCloudMapping = make_shared<PointCloudMapping>( resolution,meank,thresh );
//Initialize the Tracking thread
//(it will live in the main thread of execution, the one that called this constructor)
mpTracker = new Tracking(this, mpVocabulary, mpFrameDrawer, mpMapDrawer,
mpMap, mpPointCloudMapping, mpKeyFrameDatabase, strSettingsFile, mSensor);
PART3:tracking线程中存储关键帧点云数据
//mpPointCloudMapping中插入关键帧,并提取关键帧信息存入pointcloud
//如果关键帧信息和回环帧id相同,则添加至点云模型
mpPointCloudMapping->insertKeyFrame2( pKF, this->mImRGB, this->mImDepth ,idk);
点云模型的显示线程view在PointCloudMapping的构造函数中实现,此处不再单独列出,由于计算机性能有限,点云模型融合后仍会影响orbslam2的追踪性能,后期进行回环融合后,再对建模效果进行进一步评价。
更多推荐
所有评论(0)