Skip to content

Cplusplus RGBD Mapping

matlabbe edited this page Feb 11, 2024 · 25 revisions

Introduction

This tutorial will show an example of usage of the RTAB-Map library in C++ for RGB-D mapping.

Details

Project configuration (example with CMake)

Here our project directory:

MyProject
|
-->CMakeLists.txt
-->main.cpp
-->MapBuilder.h

Files:

In your CMakeLists.txt:

cmake_minimum_required(VERSION 2.8)
PROJECT( MyProject )

FIND_PACKAGE(RTABMap REQUIRED)

SET(moc_srcs MapBuilder.h)

ADD_EXECUTABLE(rgbd_mapping main.cpp ${moc_srcs})
 
TARGET_LINK_LIBRARIES(rgbd_mapping rtabmap::gui)

SET_TARGET_PROPERTIES(
    rgbd_mapping
  PROPERTIES
    AUTOUIC ON
    AUTOMOC ON
    AUTORCC ON
)

Code

The main.cpp

#include <rtabmap/core/Odometry.h>
#include "rtabmap/core/Rtabmap.h"
#include "rtabmap/core/RtabmapThread.h"
#include "rtabmap/core/CameraRGBD.h"
#include "rtabmap/core/CameraStereo.h"
#include "rtabmap/core/CameraThread.h"
#include "rtabmap/core/OdometryThread.h"
#include "rtabmap/core/Graph.h"
#include "rtabmap/utilite/UEventsManager.h"
#include <QApplication>
#include <stdio.h>
#include <pcl/io/pcd_io.h>
#include <pcl/io/ply_io.h>
#include <pcl/filters/filter.h>

#ifdef RTABMAP_PYTHON
#include "rtabmap/core/PythonInterface.h"
#endif

#include "MapBuilder.h"

void showUsage()
{
   printf("\nUsage:\n"
   		"rtabmap-rgbd_mapping driver\n"
   		"  driver       Driver number to use: 0=OpenNI-PCL, 1=OpenNI2, 2=Freenect, 3=OpenNI-CV, 4=OpenNI-CV-ASUS, 5=Freenect2, 6=ZED SDK, 7=RealSense, 8=RealSense2 9=Kinect for Azure SDK 10=MYNT EYE S\n\n");
   exit(1);
}

using namespace rtabmap;
int main(int argc, char * argv[])
{
   ULogger::setType(ULogger::kTypeConsole);
   ULogger::setLevel(ULogger::kWarning);

#ifdef RTABMAP_PYTHON
   PythonInterface python; // Make sure we initialize python in main thread
#endif

   int driver = 0;
   if(argc < 2)
   {
   	showUsage();
   }
   else
   {
   	driver = atoi(argv[argc-1]);
   	if(driver < 0 || driver > 10)
   	{
   		UERROR("driver should be between 0 and 10.");
   		showUsage();
   	}
   }

   // Here is the pipeline that we will use:
   // CameraOpenni -> "CameraEvent" -> OdometryThread -> "OdometryEvent" -> RtabmapThread -> "RtabmapEvent"

   // Create the OpenNI camera, it will send a CameraEvent at the rate specified.
   // Set transform to camera so z is up, y is left and x going forward
   Camera * camera = 0;
   if(driver == 1)
   {
   	if(!CameraOpenNI2::available())
   	{
   		UERROR("Not built with OpenNI2 support...");
   		exit(-1);
   	}
   	camera = new CameraOpenNI2();
   }
   else if(driver == 2)
   {
   	if(!CameraFreenect::available())
   	{
   		UERROR("Not built with Freenect support...");
   		exit(-1);
   	}
   	camera = new CameraFreenect();
   }
   else if(driver == 3)
   {
   	if(!CameraOpenNICV::available())
   	{
   		UERROR("Not built with OpenNI from OpenCV support...");
   		exit(-1);
   	}
   	camera = new CameraOpenNICV();
   }
   else if(driver == 4)
   {
   	if(!CameraOpenNICV::available())
   	{
   		UERROR("Not built with OpenNI from OpenCV support...");
   		exit(-1);
   	}
   	camera = new CameraOpenNICV(true);
   }
   else if (driver == 5)
   {
   	if (!CameraFreenect2::available())
   	{
   		UERROR("Not built with Freenect2 support...");
   		exit(-1);
   	}
   	camera = new CameraFreenect2(0, CameraFreenect2::kTypeColor2DepthSD);
   }
   else if (driver == 6)
   {
   	if (!CameraStereoZed::available())
   	{
   		UERROR("Not built with ZED SDK support...");
   		exit(-1);
   	}
   	camera = new CameraStereoZed(0, 2, 1, 1, 100, false);
   }
   else if (driver == 7)
   {
   	if (!CameraRealSense::available())
   	{
   		UERROR("Not built with RealSense support...");
   		exit(-1);
   	}
   	camera = new CameraRealSense();
   }
   else if (driver == 8)
   {
   	if (!CameraRealSense2::available())
   	{
   		UERROR("Not built with RealSense2 support...");
   		exit(-1);
   	}
   	camera = new CameraRealSense2();
   }
   else if (driver == 9)
   {
   	if (!rtabmap::CameraK4A::available())
   	{
   		UERROR("Not built with Kinect for Azure SDK support...");
   		exit(-1);
   	}
   	camera = new rtabmap::CameraK4A(1);
   }
   else if (driver == 10)
   {
   	if (!rtabmap::CameraMyntEye::available())
   	{
   		UERROR("Not built with Mynt Eye S support...");
   		exit(-1);
   	}
   	camera = new rtabmap::CameraMyntEye();
   }
   else
   {
   	camera = new rtabmap::CameraOpenni();
   }

   if(!camera->init())
   {
   	UERROR("Camera init failed!");
   }

   CameraThread cameraThread(camera);


   // GUI stuff, there the handler will receive RtabmapEvent and construct the map
   // We give it the camera so the GUI can pause/resume the camera
   QApplication app(argc, argv);
   MapBuilder mapBuilder(&cameraThread);

   // Create an odometry thread to process camera events, it will send OdometryEvent.
   OdometryThread odomThread(Odometry::create());


   ParametersMap params;
   //param.insert(ParametersPair(Parameters::kRGBDCreateOccupancyGrid(), "true")); // uncomment to create local occupancy grids

   // Create RTAB-Map to process OdometryEvent
   Rtabmap * rtabmap = new Rtabmap();
   rtabmap->init(params);
   RtabmapThread rtabmapThread(rtabmap); // ownership is transfered

   // Setup handlers
   odomThread.registerToEventsManager();
   rtabmapThread.registerToEventsManager();
   mapBuilder.registerToEventsManager();

   // The RTAB-Map is subscribed by default to CameraEvent, but we want
   // RTAB-Map to process OdometryEvent instead, ignoring the CameraEvent.
   // We can do that by creating a "pipe" between the camera and odometry, then
   // only the odometry will receive CameraEvent from that camera. RTAB-Map is
   // also subscribed to OdometryEvent by default, so no need to create a pipe between
   // odometry and RTAB-Map.
   UEventsManager::createPipe(&cameraThread, &odomThread, "CameraEvent");

   // Let's start the threads
   rtabmapThread.start();
   odomThread.start();
   cameraThread.start();

   mapBuilder.show();
   app.exec(); // main loop

   // remove handlers
   mapBuilder.unregisterFromEventsManager();
   rtabmapThread.unregisterFromEventsManager();
   odomThread.unregisterFromEventsManager();

   // Kill all threads
   cameraThread.kill();
   odomThread.join(true);
   rtabmapThread.join(true);

   // Save 3D map
   printf("Saving rtabmap_cloud.pcd...\n");
   std::map<int, Signature> nodes;
   std::map<int, Transform> optimizedPoses;
   std::multimap<int, Link> links;
   rtabmap->getGraph(optimizedPoses, links, true, true, &nodes, true, true, true, true);
   pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZRGB>);
   for(std::map<int, Transform>::iterator iter=optimizedPoses.begin(); iter!=optimizedPoses.end(); ++iter)
   {
   	Signature node = nodes.find(iter->first)->second;

   	// uncompress data
   	node.sensorData().uncompressData();

   	pcl::PointCloud<pcl::PointXYZRGB>::Ptr tmp = util3d::cloudRGBFromSensorData(
   			node.sensorData(),
   			4,           // image decimation before creating the clouds
   			4.0f,        // maximum depth of the cloud
   			0.0f);
   	pcl::PointCloud<pcl::PointXYZRGB>::Ptr tmpNoNaN(new pcl::PointCloud<pcl::PointXYZRGB>);
   	std::vector<int> index;
   	pcl::removeNaNFromPointCloud(*tmp, *tmpNoNaN, index);
   	if(!tmpNoNaN->empty())
   	{
   		*cloud += *util3d::transformPointCloud(tmpNoNaN, iter->second); // transform the point cloud to its pose
   	}
   }
   if(cloud->size())
   {
   	printf("Voxel grid filtering of the assembled cloud (voxel=%f, %d points)\n", 0.01f, (int)cloud->size());
   	cloud = util3d::voxelize(cloud, 0.01f);

   	printf("Saving rtabmap_cloud.pcd... done! (%d points)\n", (int)cloud->size());
   	pcl::io::savePCDFile("rtabmap_cloud.pcd", *cloud);
   	//pcl::io::savePLYFile("rtabmap_cloud.ply", *cloud); // to save in PLY format
   }
   else
   {
   	printf("Saving rtabmap_cloud.pcd... failed! The cloud is empty.\n");
   }

   // Save trajectory
   printf("Saving rtabmap_trajectory.txt ...\n");
   if(optimizedPoses.size() && graph::exportPoses("rtabmap_trajectory.txt", 0, optimizedPoses, links))
   {
   	printf("Saving rtabmap_trajectory.txt... done!\n");
   }
   else
   {
   	printf("Saving rtabmap_trajectory.txt... failed!\n");
   }

   rtabmap->close(false);

   return 0;
}

The MapBuilder class (for visualization):

#ifndef MAPBUILDER_H_
#define MAPBUILDER_H_

#include <QVBoxLayout>
#include <QtCore/QMetaType>
#include <QAction>

#ifndef Q_MOC_RUN // Mac OS X issue
#include "rtabmap/gui/CloudViewer.h"
#include "rtabmap/core/util3d.h"
#include "rtabmap/core/util3d_filtering.h"
#include "rtabmap/core/util3d_transforms.h"
#include "rtabmap/core/util3d_mapping.h"
#include "rtabmap/core/RtabmapEvent.h"
#include "rtabmap/core/global_map/OccupancyGrid.h"
#endif
#include "rtabmap/utilite/UStl.h"
#include "rtabmap/utilite/UConversion.h"
#include "rtabmap/utilite/UEventsHandler.h"
#include "rtabmap/utilite/ULogger.h"
#include "rtabmap/core/OdometryEvent.h"
#include "rtabmap/core/CameraThread.h"

using namespace rtabmap;

// This class receives RtabmapEvent and construct/update a 3D Map
class MapBuilder : public QWidget, public UEventsHandler
{
   Q_OBJECT
public:
   //Camera ownership is not transferred!
   MapBuilder(CameraThread * camera = 0) :
   	camera_(camera),
   	odometryCorrection_(Transform::getIdentity()),
   	processingStatistics_(false),
   	lastOdometryProcessed_(true),
   	grid_(&localGrids_)
   {
   	this->setWindowFlags(Qt::Dialog);
   	this->setWindowTitle(tr("3D Map"));
   	this->setMinimumWidth(800);
   	this->setMinimumHeight(600);

   	cloudViewer_ = new CloudViewer(this);

   	QVBoxLayout *layout = new QVBoxLayout();
   	layout->addWidget(cloudViewer_);
   	this->setLayout(layout);

   	qRegisterMetaType<rtabmap::OdometryEvent>("rtabmap::OdometryEvent");
   	qRegisterMetaType<rtabmap::Statistics>("rtabmap::Statistics");

   	QAction * pause = new QAction(this);
   	this->addAction(pause);
   	pause->setShortcut(Qt::Key_Space);
   	connect(pause, SIGNAL(triggered()), this, SLOT(pauseDetection()));
   }

   virtual ~MapBuilder()
   {
   	this->unregisterFromEventsManager();
   }

protected Q_SLOTS:
   virtual void pauseDetection()
   {
   	UWARN("");
   	if(camera_)
   	{
   		if(camera_->isCapturing())
   		{
   			camera_->join(true);
   		}
   		else
   		{
   			camera_->start();
   		}
   	}
   }

   virtual void processOdometry(const rtabmap::OdometryEvent & odom)
   {
   	if(!this->isVisible())
   	{
   		return;
   	}

   	Transform pose = odom.pose();
   	if(pose.isNull())
   	{
   		//Odometry lost
   		cloudViewer_->setBackgroundColor(Qt::darkRed);

   		pose = lastOdomPose_;
   	}
   	else
   	{
   		cloudViewer_->setBackgroundColor(cloudViewer_->getDefaultBackgroundColor());
   	}
   	if(!pose.isNull())
   	{
   		lastOdomPose_ = pose;

   		// 3d cloud
   		if(odom.data().depthOrRightRaw().cols == odom.data().imageRaw().cols &&
   		   odom.data().depthOrRightRaw().rows == odom.data().imageRaw().rows &&
   		   !odom.data().depthOrRightRaw().empty() &&
   		   (odom.data().stereoCameraModels().size() || odom.data().cameraModels().size()))
   		{
   			pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = util3d::cloudRGBFromSensorData(
   				odom.data(),
   				2,     // decimation
   				4.0f); // max depth
   			if(cloud->size())
   			{
   				if(!cloudViewer_->addCloud("cloudOdom", cloud, odometryCorrection_*pose))
   				{
   					UERROR("Adding cloudOdom to viewer failed!");
   				}
   			}
   			else
   			{
   				cloudViewer_->setCloudVisibility("cloudOdom", false);
   				UWARN("Empty cloudOdom!");
   			}
   		}

   		if(!odom.pose().isNull())
   		{
   			// update camera position
   			cloudViewer_->updateCameraTargetPosition(odometryCorrection_*odom.pose());
   		}
   	}
   	cloudViewer_->update();

   	lastOdometryProcessed_ = true;
   }


   virtual void processStatistics(const rtabmap::Statistics & stats)
   {
   	processingStatistics_ = true;

   	//============================
   	// Add RGB-D clouds
   	//============================
   	const std::map<int, Transform> & poses = stats.poses();
   	QMap<std::string, Transform> clouds = cloudViewer_->getAddedClouds();
   	for(std::map<int, Transform>::const_iterator iter = poses.lower_bound(1); iter!=poses.end(); ++iter)
   	{
   		if(!iter->second.isNull())
   		{
   			std::string cloudName = uFormat("cloud%d", iter->first);

   			// 3d point cloud
   			if(clouds.contains(cloudName))
   			{
   				// Update only if the pose has changed
   				Transform tCloud;
   				cloudViewer_->getPose(cloudName, tCloud);
   				if(tCloud.isNull() || iter->second != tCloud)
   				{
   					if(!cloudViewer_->updateCloudPose(cloudName, iter->second))
   					{
   						UERROR("Updating pose cloud %d failed!", iter->first);
   					}
   				}
   				cloudViewer_->setCloudVisibility(cloudName, true);
   			}
   			else if(iter->first == stats.getLastSignatureData().id())
   			{
   				Signature s = stats.getLastSignatureData();
   				s.sensorData().uncompressData(); // make sure data is uncompressed
   				// Add the new cloud
   				pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = util3d::cloudRGBFromSensorData(
   						s.sensorData(),
   					    4,     // decimation
   					    4.0f); // max depth
   				if(cloud->size())
   				{
   					if(!cloudViewer_->addCloud(cloudName, cloud, iter->second))
   					{
   						UERROR("Adding cloud %d to viewer failed!", iter->first);
   					}
   				}
   				else
   				{
   					UWARN("Empty cloud %d!", iter->first);
   				}
   			}
   		}
   		else
   		{
   			UWARN("Null pose for %d ?!?", iter->first);
   		}
   	}

   	//============================
   	// Add 3D graph (show all poses)
   	//============================
   	cloudViewer_->removeAllGraphs();
   	cloudViewer_->removeCloud("graph_nodes");
   	if(poses.size())
   	{
   		// Set graph
   		pcl::PointCloud<pcl::PointXYZ>::Ptr graph(new pcl::PointCloud<pcl::PointXYZ>);
   		pcl::PointCloud<pcl::PointXYZ>::Ptr graphNodes(new pcl::PointCloud<pcl::PointXYZ>);
   		for(std::map<int, Transform>::const_iterator iter=poses.lower_bound(1); iter!=poses.end(); ++iter)
   		{
   			graph->push_back(pcl::PointXYZ(iter->second.x(), iter->second.y(), iter->second.z()));
   		}
   		*graphNodes = *graph;


   		// add graph
   		cloudViewer_->addOrUpdateGraph("graph", graph, Qt::gray);
   		cloudViewer_->addCloud("graph_nodes", graphNodes, Transform::getIdentity(), Qt::green);
   		cloudViewer_->setCloudPointSize("graph_nodes", 5);
   	}

   	//============================
   	// Update/add occupancy grid (when RGBD/CreateOccupancyGrid is true)
   	//============================
   	if(grid_.addedNodes().find(stats.getLastSignatureData().id()) == grid_.addedNodes().end())
   	{
   		if(stats.getLastSignatureData().sensorData().gridCellSize() > 0.0f)
   		{
   			cv::Mat groundCells, obstacleCells, emptyCells;
   			stats.getLastSignatureData().sensorData().uncompressDataConst(0, 0, 0, 0, &groundCells, &obstacleCells, &emptyCells);
   			localGrids_.add(stats.getLastSignatureData().id(), groundCells, obstacleCells, emptyCells, stats.getLastSignatureData().sensorData().gridCellSize(), stats.getLastSignatureData().sensorData().gridViewPoint());
   		}
   	}

   	if(grid_.addedNodes().size() || localGrids_.size())
   	{
   		grid_.update(stats.poses());
   	}
   	if(grid_.addedNodes().size())
   	{
   		float xMin, yMin;
   		cv::Mat map8S = grid_.getMap(xMin, yMin);
   		if(!map8S.empty())
   		{
   			//convert to gray scaled map
   			cv::Mat map8U = util3d::convertMap2Image8U(map8S);
   			cloudViewer_->addOccupancyGridMap(map8U, grid_.getCellSize(), xMin, yMin, 0.75);
   		}
   	}

   	odometryCorrection_ = stats.mapCorrection();

   	cloudViewer_->update();

   	processingStatistics_ = false;
   }

   virtual bool handleEvent(UEvent * event)
   {
   	if(event->getClassName().compare("RtabmapEvent") == 0)
   	{
   		RtabmapEvent * rtabmapEvent = (RtabmapEvent *)event;
   		const Statistics & stats = rtabmapEvent->getStats();
   		// Statistics must be processed in the Qt thread
   		if(this->isVisible())
   		{
   			QMetaObject::invokeMethod(this, "processStatistics", Q_ARG(rtabmap::Statistics, stats));
   		}
   	}
   	else if(event->getClassName().compare("OdometryEvent") == 0)
   	{
   		OdometryEvent * odomEvent = (OdometryEvent *)event;
   		// Odometry must be processed in the Qt thread
   		if(this->isVisible() &&
   		   lastOdometryProcessed_ &&
   		   !processingStatistics_)
   		{
   			lastOdometryProcessed_ = false; // if we receive too many odometry events!
   			QMetaObject::invokeMethod(this, "processOdometry", Q_ARG(rtabmap::OdometryEvent, *odomEvent));
   		}
   	}
   	return false;
   }

protected:
   CloudViewer * cloudViewer_;
   CameraThread * camera_;
   Transform lastOdomPose_;
   Transform odometryCorrection_;
   bool processingStatistics_;
   bool lastOdometryProcessed_;
   LocalGridCache localGrids_;
   OccupancyGrid grid_;
};


#endif /* MAPBUILDER_H_ */

Build and run

In your directory:

$ cmake .
$ make
$ ./rgbd_mapping

You should see a window with the 3D map constructing. If the background turns red, that means that odometry cannot be computed: visit the Lost Odometry section on this page for more information.

Example without threads and events

The previous approach based on threads and events is preferred. However, if you want to see step-by-step how images are processed in the different modules, it would be easier to remove threading stuff. So I provide an example in which I don't use any threads and events. The code is here and the dataset used can be downloaded here in the stereo tutorial. It is also built when RTAB-Map is built from source, the name is rtabmap-noEventsExample.

Example (with unzipped stereo_20Hz.zip images):

./rtabmap-noEventsExample 20 2 10 stereo_20Hz stereo_20Hz stereo_20Hz/left stereo_20Hz/right