Skip to content

Commit

Permalink
Added marker data to frames and keyframes & added viewer marker visuals
Browse files Browse the repository at this point in the history
Each marker is made up of 4 points. These points have been undistorted
and can be either used as another source of mapping data or matched with
orb features in the frame ( e.g. using nearest neighbour). The markers
return distance and pose of the camera with respect to the marker board
plane
  • Loading branch information
SamuelDudley committed Jan 29, 2017
1 parent 59d2cd1 commit a8da87c
Show file tree
Hide file tree
Showing 11 changed files with 183 additions and 64 deletions.
89 changes: 39 additions & 50 deletions Examples/Monocular/mono_cland_live2.cc
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ int main(int argc, char **argv)
vector<float> eulerb(3);
vector<float> eulertr(3);

// aruco setup



cv::Mat cameraMatrix = (cv::Mat_<float>(3,3) << fSettings["Camera.fx"],0,fSettings["Camera.cx"],
Expand All @@ -216,6 +216,7 @@ int main(int argc, char **argv)

cv::Mat distorsionCoeff = (cv::Mat_<float>(4,1) << fSettings["Camera.k1"],fSettings["Camera.k2"],fSettings["Camera.p1"],fSettings["Camera.p2"]);

// aruco setup
MarkerDetector MDetector;
double MarkerSize = 0.287;
MDetector.setThresholdParams(7, 7);
Expand All @@ -224,74 +225,60 @@ int main(int argc, char **argv)
std::map<uint32_t,MarkerPoseTracker> MTracker; // use a map so that for each id, we use a different pose tracker
CameraParameters CamParam;
CamParam.setParams(cameraMatrix, distorsionCoeff, cv::Size(fSettings["Camera.width"],fSettings["Camera.height"]));
cout << "valid?: " << CamParam.isValid() << endl;
// cameraMatrix 3x3 matrix (fx 0 cx, 0 fy cy, 0 0 1)
// distorsionCoeff 4x1 matrix (k1,k2,p1,p2)
// size image size

bool liveTracking = true;
bool captureImages = false;

// Sample for XIMEA OpenCV
xiAPIplusCameraOcv cam;
cam.OpenFirst();
// cam.SetDownsamplingType(XI_SKIPPING); //XI_BINNING
// cam.SetDownsampling(XI_DWN_2x2);
cam.StartAcquisition();
cam.EnableAutoExposureAutoGain();
cv::Mat src;
int frameCount = 0;

// cam.EnableHDR();
// cam.EnableWhiteBalanceAuto();
if (liveTracking) {
// XIMEA OpenCV
cam.OpenFirst();
// cam.SetDownsamplingType(XI_SKIPPING); //XI_BINNING
// cam.SetDownsampling(XI_DWN_2x2);
cam.StartAcquisition();
cam.EnableAutoExposureAutoGain();

// cam.EnableHDR();
// cam.EnableWhiteBalanceAuto();

cam.SetAutoExposureAutoGainExposurePriority(0.8f);
}

cam.SetAutoExposureAutoGainExposurePriority(0.8f);
// cam.SetWidth(1024);
// cam.SetHeight(1024);

cv::Mat src;
// cv::Mat im;
int count = 0;
cv::Mat markerRot = cv::Mat::eye(3,3,CV_32F);

for(int ni=0; ni<nImages; ni++)
{
// Read image from file
// im = cv::imread(vstrImageFilenames[ni],CV_LOAD_IMAGE_UNCHANGED);

src = cam.GetNextImageOcvMat();
cv::resize(src, im, cv::Size(1024, 1024), 0, 0, cv::INTER_CUBIC); // resize to 1024x768 resolution

// cv::imwrite( "/home/uas/Documents/ximea_cal/images/"+std::to_string(count)+".jpg", im );
// count++;
// grab the image from the camera

if (liveTracking) {
src = cam.GetNextImageOcvMat();
cv::resize(src, im, cv::Size(1024, 1024), 0, 0, cv::INTER_CUBIC); // resize image resolution
cout << "FPS = " << cam.GetFrameRate() << endl;
if (captureImages){
cv::imwrite( "/home/uas/Documents/ximea_cal/images/"+std::to_string(frameCount)+".jpg", im );
frameCount++;
}
} else {
// Read image from file
im = cv::imread(vstrImageFilenames[ni],CV_LOAD_IMAGE_UNCHANGED);
}

// Detect markers
vector< Marker > Markers=MDetector.detect(im);
vector<Marker> Markers=MDetector.detect(im);
// Do pose estimation for each marker
for(auto & marker:Markers)
MTracker[marker.id].estimatePose(marker,CamParam,MarkerSize);
// If marker id is specified then look for just that id, otherwise lock on to closest marker

for (unsigned int i = 0; i < Markers.size(); i++) {
cv::Rodrigues(Markers[i].Rvec,markerRot);
cout << "Detected target id:" << Markers[i].id << endl << " Tvec:" << Markers[i].Tvec << endl <<"Rvec:" << markerRot << endl;
cout << "Detected target id:" << Markers[i].id << endl << "Tvec:" << Markers[i].Tvec << endl << "Rvec:" << markerRot << endl;
vector<float> eulertr = ORB_SLAM2::Converter::toEuler(ORB_SLAM2::Converter::toQuaternion(markerRot));
cout << "marker roll =" << eulertr[0]*radToDeg << ", pitch=" << eulertr[1]*radToDeg << ", yaw=" << eulertr[2]*radToDeg << endl;
cout << "roll: " << eulertr[0]*radToDeg << ", pitch: " << eulertr[1]*radToDeg << ", yaw: " << eulertr[2]*radToDeg << endl;
cout << "top_left: " << Markers[i][0] << " top_right: " << Markers[i][1] << " btm_right: " << Markers[i][2] << " btm_left: " << Markers[i][3] << endl << endl;
}

// Loop through each detected marker
for (unsigned int i = 0; i < Markers.size(); i++) {
// If marker id was found, draw a green marker
Markers[i].draw(im, cv::Scalar(0, 255, 0), 2, false);
// If pose estimation was successful, draw AR cube and distance
if (CamParam.isValid() && MarkerSize != -1){
// cout << Markers[i].id << ":" << Markers[i].Tvec.at<float>(0,0) << ":" << Markers[i].Tvec.at<float>(0,1) << ":" << Markers[i].Tvec.at<float>(0,2) << endl;
CvDrawingUtils::draw3dAxis(im, Markers[i], CamParam);

// Otherwise draw a red marker
} else {
Markers[i].draw(im, cv::Scalar(0, 0, 255), 2, false);
}
}

// Supply optional inter-frame rotation and translation
// TODO: pull this info from the Autopilot EKF and produce the matrix
Expand All @@ -307,7 +294,7 @@ int main(int argc, char **argv)
float currentEast = vNEDPPositions[ni][1];
float currentDown = vNEDPPositions[ni][2];

cout << "time= " << tframe <<" FPS= " << cam.GetFrameRate() << endl;
cout << "time= " << tframe << endl;
// cout << "AP roll 1 =" << currentRoll*radToDeg << ", pitch=" << currentPitch*radToDeg << ", yaw=" << currentYaw*radToDeg << endl;

eulerAutopilotCurrent = {currentRoll, currentPitch, currentYaw}; // we apply negitive values as the orb world & camera frame use -ve rotation conventions
Expand Down Expand Up @@ -444,15 +431,17 @@ int main(int argc, char **argv)

if (SLAM.GetStepping()){
// if in stepping mode each frame will require a key press before continuing
cam.DisableAutoExposureAutoGain();
if (liveTracking) {
cam.DisableAutoExposureAutoGain();
}
getchar();
}


}

// Pass the image to the SLAM system
cameraPose = SLAM.TrackMonocular(im, tframe, mAPPoseNED, autopilotPoseCurrentCameraFrame);
cameraPose = SLAM.TrackMonocular(im, tframe, mAPPoseNED, autopilotPoseCurrentCameraFrame, Markers);
trackingStatus = SLAM.GetStatus();
cameraHasJumped = SLAM.CameraHasJumped();

Expand Down
18 changes: 15 additions & 3 deletions include/Frame.h
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@
#include "KeyFrame.h"
#include "ORBextractor.h"

#include<aruco.h>

#include <opencv2/opencv.hpp>

namespace ORB_SLAM2
Expand All @@ -55,7 +57,7 @@ class Frame
Frame(const cv::Mat &imGray, const cv::Mat &imDepth, const double &timeStamp, ORBextractor* extractor,ORBVocabulary* voc, cv::Mat &K, cv::Mat &distCoef, const float &bf, const float &thDepth);

// Constructor for Monocular cameras.
Frame(const cv::Mat &imGray, const double &timeStamp, ORBextractor* extractor,ORBVocabulary* voc, cv::Mat &K, cv::Mat &distCoef, const float &bf, const float &thDepth, cv::Mat &mAPPoseNED, cv::Mat &mIFrameTransRot);
Frame(const cv::Mat &imGray, const double &timeStamp, ORBextractor* extractor,ORBVocabulary* voc, cv::Mat &K, cv::Mat &distCoef, const float &bf, const float &thDepth, cv::Mat &mAPPoseNED, cv::Mat &mIFrameTransRot, vector<aruco::Marker> &vMarkers);

// Extract ORB on the image. 0 for left image and 1 for right image.
void ExtractORB(int flag, const cv::Mat &im);
Expand All @@ -66,6 +68,9 @@ class Frame
// Set the AP pose. Body frame rotation right hand aircraft standard (x direction of nose, y out right wing, z down). Translation is global frame NED (North East Down)
void SetAPPose(cv::Mat mAPPoseNED_);

// Extract the marker points and insert them into a cv::keypoint structure to remain constant with ORB-SLAM
void ExtractMarkerPoints();

// Set the camera pose.
void SetPose(cv::Mat Tcw);

Expand Down Expand Up @@ -142,15 +147,20 @@ class Frame
cv::Mat mAPPoseNED;
// Ardupilot EKF interframe translation and rotation matrix
cv::Mat mIFrameTransRot;
// Markers (aruco)
vector<aruco::Marker> vMarkers;

// Number of Marker points (for aruco markers this will be multiples of 4)
int M;

// Number of KeyPoints.
int N;

// Vector of keypoints (original for visualization) and undistorted (actually used by the system).
// In the stereo case, mvKeysUn is redundant as images must be rectified.
// In the RGB-D case, RGB images can be distorted.
std::vector<cv::KeyPoint> mvKeys, mvKeysRight;
std::vector<cv::KeyPoint> mvKeysUn;
std::vector<cv::KeyPoint> mvKeys, mvKeysRight, mvMarkers; //mvMarkers = pixel coords of marker(s)
std::vector<cv::KeyPoint> mvKeysUn, mvMarkersUn; // mvMarkersUn = undistorted pixel coords of marker(s)

// Corresponding stereo coordinate and depth for each keypoint.
// "Monocular" keypoints have a negative value.
Expand Down Expand Up @@ -210,6 +220,8 @@ class Frame
// (called in the constructor).
void UndistortKeyPoints();

void UndistortMarkerPoints(); // aruco markers

// Computes image bounds for the undistorted image (called in the constructor).
void ComputeImageBounds(const cv::Mat &imLeft);

Expand Down
7 changes: 7 additions & 0 deletions include/FrameDrawer.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
#include "MapPoint.h"
#include "Map.h"

#include<aruco.h>

#include<opencv2/core.hpp>
#include<opencv2/features2d.hpp>

Expand All @@ -51,11 +53,16 @@ class FrameDrawer
protected:

void DrawTextInfo(cv::Mat &im, int nState, cv::Mat &imText);
void DrawMarkers(cv::Mat &im);

aruco::CameraParameters camParam;
// Info of the frame to be drawn
cv::Mat mIm;
cv::Mat mK;
cv::Mat mDistCoef;
int N;
vector<cv::KeyPoint> mvCurrentKeys;
vector<aruco::Marker> vCurrentMarkers;
vector<bool> mvbMap, mvbVO;
bool mbOnlyTracking;
int mnTracked, mnTrackedVO;
Expand Down
12 changes: 12 additions & 0 deletions include/KeyFrame.h
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
#include "ORBextractor.h"
#include "Frame.h"
#include "KeyFrameDatabase.h"
#include <aruco.h>

#include <mutex>

Expand Down Expand Up @@ -174,6 +175,17 @@ class KeyFrame
const std::vector<float> mvDepth; // negative value for monocular points
const cv::Mat mDescriptors;

// Number of MarkerPoints
const int M;

// MarkerPoints
const std::vector<cv::KeyPoint> mvMarkers;
const std::vector<cv::KeyPoint> mvMarkersUn;

// Markers
const std::vector<aruco::Marker> vMarkers;


//BoW
DBoW2::BowVector mBowVec;
DBoW2::FeatureVector mFeatVec;
Expand Down
4 changes: 3 additions & 1 deletion include/System.h
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@
#include "ORBVocabulary.h"
#include "Viewer.h"

#include<aruco.h>

namespace ORB_SLAM2
{

Expand Down Expand Up @@ -77,7 +79,7 @@ class System
// Proccess the given monocular frame
// Input images: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale.
// Returns the camera pose (empty if tracking fails).
cv::Mat TrackMonocular(const cv::Mat &im, const double &timestamp, cv::Mat &mAPPoseNED, cv::Mat &mIFrameTransRot);
cv::Mat TrackMonocular(const cv::Mat &im, const double &timestamp, cv::Mat &mAPPoseNED, cv::Mat &mIFrameTransRot, vector<aruco::Marker> &vMarkers);

// This stops local mapping thread (map building) and performs only camera tracking.
void ActivateLocalizationMode();
Expand Down
2 changes: 1 addition & 1 deletion include/Tracking.h
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ class Tracking
// Preprocess the input and call Track(). Extract features and performs stereo matching.
cv::Mat GrabImageStereo(const cv::Mat &imRectLeft,const cv::Mat &imRectRight, const double &timestamp);
cv::Mat GrabImageRGBD(const cv::Mat &imRGB,const cv::Mat &imD, const double &timestamp);
cv::Mat GrabImageMonocular(const cv::Mat &im, const double &timestamp, cv::Mat &mAPPoseNED, cv::Mat &mIFrameTransRot);
cv::Mat GrabImageMonocular(const cv::Mat &im, const double &timestamp, cv::Mat &mAPPoseNED, cv::Mat &mIFrameTransRot, vector<aruco::Marker> &vMarkers);

void SetLocalMapper(LocalMapping* pLocalMapper);
void SetLoopClosing(LoopClosing* pLoopClosing);
Expand Down
Loading

0 comments on commit a8da87c

Please sign in to comment.