From 8502699db8dbbf99faebc617ca6956c140385ea1 Mon Sep 17 00:00:00 2001 From: Samuel Date: Mon, 23 Jan 2017 15:16:13 +1030 Subject: [PATCH] added autopilot pose to viwer --- include/MapDrawer.h | 8 +++++ src/MapDrawer.cc | 84 +++++++++++++++++++++++++++++++++++++++++++++ src/Tracking.cc | 61 ++++++++++++++++++++++---------- src/Viewer.cc | 8 +++++ 4 files changed, 143 insertions(+), 18 deletions(-) diff --git a/include/MapDrawer.h b/include/MapDrawer.h index 83b282676d..8859b215a3 100644 --- a/include/MapDrawer.h +++ b/include/MapDrawer.h @@ -45,6 +45,12 @@ class MapDrawer void SetReferenceKeyFrame(KeyFrame *pKF); void GetCurrentOpenGLCameraMatrix(pangolin::OpenGlMatrix &M); + void SetCurrentAutopilotPose(const cv::Mat &TcwAP); + void DrawCurrentAutopilot(pangolin::OpenGlMatrix &TcwAP); + void GetCurrentOpenGLAutopilotMatrix(pangolin::OpenGlMatrix &M); + + + private: float mKeyFrameSize; @@ -56,6 +62,8 @@ class MapDrawer cv::Mat mCameraPose; + cv::Mat mAutopilotPose; + std::mutex mMutexCamera; }; diff --git a/src/MapDrawer.cc b/src/MapDrawer.cc index 4d9990bc8a..3cf0199197 100644 --- a/src/MapDrawer.cc +++ b/src/MapDrawer.cc @@ -219,12 +219,60 @@ void MapDrawer::DrawCurrentCamera(pangolin::OpenGlMatrix &Twc) } +void MapDrawer::DrawCurrentAutopilot(pangolin::OpenGlMatrix &TwcAP) +{ + const float &w = mCameraSize; + const float h = w*0.75; + const float z = w*0.6; + + glPushMatrix(); + +#ifdef HAVE_GLES + glMultMatrixf(TwcAP.m); +#else + glMultMatrixd(TwcAP.m); +#endif + + glLineWidth(mCameraLineWidth); + glColor3f(1.0f,0.0f,0.0f); //R,G,B + glBegin(GL_LINES); + glVertex3f(0,0,0); + glVertex3f(w,h,z); + glVertex3f(0,0,0); + glVertex3f(w,-h,z); + glVertex3f(0,0,0); + glVertex3f(-w,-h,z); + glVertex3f(0,0,0); + glVertex3f(-w,h,z); + + glVertex3f(w,h,z); + glVertex3f(w,-h,z); + + glVertex3f(-w,h,z); + glVertex3f(-w,-h,z); + + glVertex3f(-w,h,z); + glVertex3f(w,h,z); + + glVertex3f(-w,-h,z); + glVertex3f(w,-h,z); + glEnd(); + + glPopMatrix(); +} + void MapDrawer::SetCurrentCameraPose(const cv::Mat &Tcw) { unique_lock lock(mMutexCamera); mCameraPose = Tcw.clone(); } +void MapDrawer::SetCurrentAutopilotPose(const cv::Mat &TcwAP) +{ + unique_lock lock(mMutexCamera); + mAutopilotPose = TcwAP.clone(); +} + void MapDrawer::GetCurrentOpenGLCameraMatrix(pangolin::OpenGlMatrix &M) { if(!mCameraPose.empty()) @@ -261,4 +309,40 @@ void MapDrawer::GetCurrentOpenGLCameraMatrix(pangolin::OpenGlMatrix &M) M.SetIdentity(); } +void MapDrawer::GetCurrentOpenGLAutopilotMatrix(pangolin::OpenGlMatrix &M) +{ + if(!mAutopilotPose.empty()) + { + cv::Mat Rwc(3,3,CV_32F); + cv::Mat twc(3,1,CV_32F); + { + unique_lock lock(mMutexCamera); + Rwc = mAutopilotPose.rowRange(0,3).colRange(0,3).t(); + twc = -Rwc*mAutopilotPose.rowRange(0,3).col(3); + } + + M.m[0] = Rwc.at(0,0); + M.m[1] = Rwc.at(1,0); + M.m[2] = Rwc.at(2,0); + M.m[3] = 0.0; + + M.m[4] = Rwc.at(0,1); + M.m[5] = Rwc.at(1,1); + M.m[6] = Rwc.at(2,1); + M.m[7] = 0.0; + + M.m[8] = Rwc.at(0,2); + M.m[9] = Rwc.at(1,2); + M.m[10] = Rwc.at(2,2); + M.m[11] = 0.0; + + M.m[12] = twc.at(0); + M.m[13] = twc.at(1); + M.m[14] = twc.at(2); + M.m[15] = 1.0; + } + else + M.SetIdentity(); +} + } //namespace ORB_SLAM diff --git a/src/Tracking.cc b/src/Tracking.cc index 787c204c84..b5884a0652 100644 --- a/src/Tracking.cc +++ b/src/Tracking.cc @@ -429,28 +429,50 @@ void Tracking::Track() // Update motion model if(!mLastFrame.mTcw.empty()) { - if(!mCurrentFrame.mIFrameTransRot.empty()) // use the supplied inter-frame velocity info if available - { - mVelocity = mCurrentFrame.mIFrameTransRot; - - } - else // assume linear rotation and translation velocity model - { - cv::Mat LastTwc = cv::Mat::eye(4,4,CV_32F); + + cv::Mat LastTwc = cv::Mat::eye(4,4,CV_32F); + + if(!mCurrentFrame.mIFrameTransRot.empty()) // use the supplied inter-frame velocity info if available + { +// cv::Mat lastAPPose = mLastFrame.mIFrameTransRot; +// cv::Mat lastAPPoseRotation = cv::Mat::eye(3,3,CV_32F); +// lastAPPose.rowRange(0,3).colRange(0,3).copyTo(lastAPPoseRotation.rowRange(0,3).colRange(0,3)); +// lastAPPoseRotation = lastAPPoseRotation.t(); +// lastAPPoseRotation.copyTo(LastTwc.rowRange(0,3).colRange(0,3)); +// mLastFrame.GetCameraCenter().copyTo(LastTwc.rowRange(0,3).col(3)); +// mVelocity = mCurrentFrame.mIFrameTransRot*LastTwc; + mLastFrame.GetRotationInverse().copyTo(LastTwc.rowRange(0,3).colRange(0,3)); mLastFrame.GetCameraCenter().copyTo(LastTwc.rowRange(0,3).col(3)); + +// cv::Mat currentAPPoseRotation = cv::Mat::eye(3,3,CV_32F); +// mCurrentFrame.mIFrameTransRot.rowRange(0,3).colRange(0,3).copyTo(mCurrentFrame.mTcw.rowRange(0,3).colRange(0,3)); + +// mVelocity = mCurrentFrame.mIFrameTransRot*LastTwc; mVelocity = mCurrentFrame.mTcw*LastTwc; -// cv::Mat tmp = cv::Mat::eye(4,4,CV_32F); -// tmp.row(2).colRange(0,4).copyTo(mVelocity.row(2).colRange(0,4)); -// cout << endl << "mVelocity: " << mVelocity << endl << endl; -// cout << "mLastFrame.mTcw: " << mLastFrame.mTcw << endl << endl; - } + + + + cout << endl << "using supplied mVelocity: " << mVelocity << endl << endl; + } + else // assume linear rotation and translation velocity model + { + mLastFrame.GetRotationInverse().copyTo(LastTwc.rowRange(0,3).colRange(0,3)); + mLastFrame.GetCameraCenter().copyTo(LastTwc.rowRange(0,3).col(3)); + mVelocity = mCurrentFrame.mTcw*LastTwc; + } +// cv::Mat tmp = cv::Mat::eye(4,4,CV_32F); +// tmp.row(2).colRange(0,4).copyTo(mVelocity.row(2).colRange(0,4)); +// cout << endl << "mVelocity: " << mVelocity << endl << endl; +// cout << "mLastFrame.mTcw: " << mLastFrame.mTcw << endl << endl; + } else // this occurs on loop closure mVelocity = cv::Mat(); - mpMapDrawer->SetCurrentCameraPose(mCurrentFrame.mTcw); //set the camera pose + mpMapDrawer->SetCurrentAutopilotPose(mCurrentFrame.mIFrameTransRot); //set the Autopilot pose in the viwer app + mpMapDrawer->SetCurrentCameraPose(mCurrentFrame.mTcw); //set the camera pose in the viewer app // Clean temporal point matches for(int i=0; i(NULL)); @@ -952,23 +975,25 @@ bool Tracking::TrackWithMotionModel() th=7; int nmatches = matcher.SearchByProjection(mCurrentFrame,mLastFrame,th,mSensor==System::MONOCULAR); + cout << "tracking with motion model :" << nmatches << endl << endl; // If few matches, uses a wider window search if(nmatches<20) { fill(mCurrentFrame.mvpMapPoints.begin(),mCurrentFrame.mvpMapPoints.end(),static_cast(NULL)); nmatches = matcher.SearchByProjection(mCurrentFrame,mLastFrame,2*th,mSensor==System::MONOCULAR); + + cout << "tracking with motion model wide :" << nmatches << endl << endl; } + + if(nmatches<20) + cout << "tracking with motion model failed (<20)" << endl << endl; return false; // Optimize frame pose with all matches Optimizer::PoseOptimization(&mCurrentFrame); - /////// trying to stop the world z from going spastic... does not allow initilisation -// cv::Mat tmp = cv::Mat::eye(4,4,CV_32F); -// tmp.row(2).col(3).copyTo(mCurrentFrame.mTcw.row(2).col(3)); - ////// // Discard outliers int nmatchesMap = 0; diff --git a/src/Viewer.cc b/src/Viewer.cc index 5f3a424168..1d4e42d3e8 100644 --- a/src/Viewer.cc +++ b/src/Viewer.cc @@ -85,7 +85,11 @@ void Viewer::Run() .SetHandler(new pangolin::Handler3D(s_cam)); pangolin::OpenGlMatrix Twc; + pangolin::OpenGlMatrix TwcAP; // autopilot pose + Twc.SetIdentity(); + TwcAP.SetIdentity(); + cv::namedWindow("ORB-SLAM2: Current Frame"); @@ -97,6 +101,7 @@ void Viewer::Run() glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); mpMapDrawer->GetCurrentOpenGLCameraMatrix(Twc); + mpMapDrawer->GetCurrentOpenGLAutopilotMatrix(TwcAP); if(menuFollowCamera && bFollow) { @@ -126,6 +131,9 @@ void Viewer::Run() d_cam.Activate(s_cam); glClearColor(1.0f,1.0f,1.0f,1.0f); + + mpMapDrawer->DrawCurrentAutopilot(TwcAP); + mpMapDrawer->DrawCurrentCamera(Twc); if(menuShowKeyFrames || menuShowGraph) mpMapDrawer->DrawKeyFrames(menuShowKeyFrames,menuShowGraph);