diff --git a/.gitignore b/.gitignore
index 52de1ff5de..627674fd28 100644
--- a/.gitignore
+++ b/.gitignore
@@ -19,9 +19,11 @@ Examples/Stereo-Inertial/stereo_inertial_euroc
Examples/Stereo-Inertial/stereo_inertial_tum_vi
Examples/ROS/ORB_SLAM3/Mono
+Examples/ROS/ORB_SLAM3/Mono_Inertial
Examples/ROS/ORB_SLAM3/MonoAR
Examples/ROS/ORB_SLAM3/RGBD
Examples/ROS/ORB_SLAM3/Stereo
+Examples/ROS/ORB_SLAM3/Stereo_Inertial
Examples/ROS/ORB_SLAM3/CMakeLists.txt.user
Examples/ROS/ORB_SLAM3/build/
diff --git a/Examples/RGB-D/TUM1.yaml b/Examples/RGB-D/TUM1.yaml
index 717bc36de0..7100039eab 100644
--- a/Examples/RGB-D/TUM1.yaml
+++ b/Examples/RGB-D/TUM1.yaml
@@ -33,7 +33,7 @@ Camera.RGB: 1
ThDepth: 40.0
# Deptmap values factor
-DepthMapFactor: 5000.0
+DepthMapFactor: 5000.0 # 1.0 for ROS_bag
#--------------------------------------------------------------------------------------------
# ORB Parameters
diff --git a/Examples/ROS/ORB_SLAM3/CMakeLists.txt b/Examples/ROS/ORB_SLAM3/CMakeLists.txt
index 1e2000b422..12792fe44b 100644
--- a/Examples/ROS/ORB_SLAM3/CMakeLists.txt
+++ b/Examples/ROS/ORB_SLAM3/CMakeLists.txt
@@ -45,6 +45,7 @@ include_directories(
${PROJECT_SOURCE_DIR}
${PROJECT_SOURCE_DIR}/../../../
${PROJECT_SOURCE_DIR}/../../../include
+${PROJECT_SOURCE_DIR}/../../../include/CameraModels
${Pangolin_INCLUDE_DIRS}
)
@@ -55,6 +56,7 @@ ${Pangolin_LIBRARIES}
${PROJECT_SOURCE_DIR}/../../../Thirdparty/DBoW2/lib/libDBoW2.so
${PROJECT_SOURCE_DIR}/../../../Thirdparty/g2o/lib/libg2o.so
${PROJECT_SOURCE_DIR}/../../../lib/libORB_SLAM3.so
+-lboost_system
)
# Node for monocular camera
@@ -95,3 +97,20 @@ target_link_libraries(RGBD
${LIBS}
)
+# Node for monocular-inertial camera
+rosbuild_add_executable(Mono_Inertial
+src/ros_mono_inertial.cc
+)
+
+target_link_libraries(Mono_Inertial
+${LIBS}
+)
+
+# Node for stereo-inertial camera
+rosbuild_add_executable(Stereo_Inertial
+src/ros_stereo_inertial.cc
+)
+
+target_link_libraries(Stereo_Inertial
+${LIBS}
+)
diff --git a/Examples/ROS/ORB_SLAM3/manifest.xml b/Examples/ROS/ORB_SLAM3/manifest.xml
index b40564f796..79e6e6fce9 100644
--- a/Examples/ROS/ORB_SLAM3/manifest.xml
+++ b/Examples/ROS/ORB_SLAM3/manifest.xml
@@ -1,18 +1,14 @@
-
-
- ORB_SLAM2
-
+
+ ORB_SLAM3
- Raul Mur-Artal
+ Carlos Campos, Richard Elvira, Juan J. Gomez, Jose M.M. Montiel, Juan D. Tardos
GPLv3
-
-
diff --git a/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.cc b/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.cc
index 86a300a6aa..b0baa6f0be 100644
--- a/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.cc
+++ b/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.cc
@@ -26,7 +26,7 @@
using namespace std;
-namespace ORB_SLAM2
+namespace ORB_SLAM3
{
const float eps = 1e-4;
@@ -233,7 +233,7 @@ void ViewerAR::Run()
}
-void ViewerAR::SetImagePose(const cv::Mat &im, const cv::Mat &Tcw, const int &status, const vector &vKeys, const vector &vMPs)
+void ViewerAR::SetImagePose(const cv::Mat &im, const cv::Mat &Tcw, const int &status, const vector &vKeys, const vector &vMPs)
{
unique_lock lock(mMutexPoseImage);
mImage = im.clone();
diff --git a/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.h b/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.h
index 85071a23ce..675d301fa0 100644
--- a/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.h
+++ b/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.h
@@ -26,7 +26,7 @@
#include
#include"../../../include/System.h"
-namespace ORB_SLAM2
+namespace ORB_SLAM3
{
class Plane
@@ -62,7 +62,7 @@ class ViewerAR
mT=1e3/fps;
}
- void SetSLAM(ORB_SLAM2::System* pSystem){
+ void SetSLAM(ORB_SLAM3::System* pSystem){
mpSystem = pSystem;
}
@@ -82,7 +82,7 @@ class ViewerAR
private:
//SLAM
- ORB_SLAM2::System* mpSystem;
+ ORB_SLAM3::System* mpSystem;
void PrintStatus(const int &status, const bool &bLocMode, cv::Mat &im);
void AddTextToImage(const std::string &s, cv::Mat &im, const int r=0, const int g=0, const int b=0);
diff --git a/Examples/ROS/ORB_SLAM3/src/AR/ros_mono_ar.cc b/Examples/ROS/ORB_SLAM3/src/AR/ros_mono_ar.cc
index 5d896280b7..334403a100 100644
--- a/Examples/ROS/ORB_SLAM3/src/AR/ros_mono_ar.cc
+++ b/Examples/ROS/ORB_SLAM3/src/AR/ros_mono_ar.cc
@@ -35,7 +35,7 @@
using namespace std;
-ORB_SLAM2::ViewerAR viewerAR;
+ORB_SLAM3::ViewerAR viewerAR;
bool bRGB = true;
cv::Mat K;
@@ -45,11 +45,11 @@ cv::Mat DistCoef;
class ImageGrabber
{
public:
- ImageGrabber(ORB_SLAM2::System* pSLAM):mpSLAM(pSLAM){}
+ ImageGrabber(ORB_SLAM3::System* pSLAM):mpSLAM(pSLAM){}
void GrabImage(const sensor_msgs::ImageConstPtr& msg);
- ORB_SLAM2::System* mpSLAM;
+ ORB_SLAM3::System* mpSLAM;
};
int main(int argc, char **argv)
@@ -59,13 +59,13 @@ int main(int argc, char **argv)
if(argc != 3)
{
- cerr << endl << "Usage: rosrun ORB_SLAM2 Mono path_to_vocabulary path_to_settings" << endl;
+ cerr << endl << "Usage: rosrun ORB_SLAM3 Mono path_to_vocabulary path_to_settings" << endl;
ros::shutdown();
return 1;
}
// Create SLAM system. It initializes all system threads and gets ready to process frames.
- ORB_SLAM2::System SLAM(argv[1],argv[2],ORB_SLAM2::System::MONOCULAR,false);
+ ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::MONOCULAR,false);
cout << endl << endl;
@@ -118,7 +118,7 @@ int main(int argc, char **argv)
DistCoef.at(4) = k3;
}
- thread tViewer = thread(&ORB_SLAM2::ViewerAR::Run,&viewerAR);
+ thread tViewer = thread(&ORB_SLAM3::ViewerAR::Run,&viewerAR);
ros::spin();
@@ -150,7 +150,7 @@ void ImageGrabber::GrabImage(const sensor_msgs::ImageConstPtr& msg)
cv::Mat imu;
cv::Mat Tcw = mpSLAM->TrackMonocular(cv_ptr->image,cv_ptr->header.stamp.toSec());
int state = mpSLAM->GetTrackingState();
- vector vMPs = mpSLAM->GetTrackedMapPoints();
+ vector vMPs = mpSLAM->GetTrackedMapPoints();
vector vKeys = mpSLAM->GetTrackedKeyPointsUn();
cv::undistort(im,imu,K,DistCoef);
diff --git a/Examples/ROS/ORB_SLAM3/src/ros_mono.cc b/Examples/ROS/ORB_SLAM3/src/ros_mono.cc
index 95ce36f246..2cdfde0a14 100644
--- a/Examples/ROS/ORB_SLAM3/src/ros_mono.cc
+++ b/Examples/ROS/ORB_SLAM3/src/ros_mono.cc
@@ -34,11 +34,11 @@ using namespace std;
class ImageGrabber
{
public:
- ImageGrabber(ORB_SLAM2::System* pSLAM):mpSLAM(pSLAM){}
+ ImageGrabber(ORB_SLAM3::System* pSLAM):mpSLAM(pSLAM){}
void GrabImage(const sensor_msgs::ImageConstPtr& msg);
- ORB_SLAM2::System* mpSLAM;
+ ORB_SLAM3::System* mpSLAM;
};
int main(int argc, char **argv)
@@ -48,18 +48,18 @@ int main(int argc, char **argv)
if(argc != 3)
{
- cerr << endl << "Usage: rosrun ORB_SLAM2 Mono path_to_vocabulary path_to_settings" << endl;
+ cerr << endl << "Usage: rosrun ORB_SLAM3 Mono path_to_vocabulary path_to_settings" << endl;
ros::shutdown();
return 1;
}
// Create SLAM system. It initializes all system threads and gets ready to process frames.
- ORB_SLAM2::System SLAM(argv[1],argv[2],ORB_SLAM2::System::MONOCULAR,true);
+ ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::MONOCULAR,true);
ImageGrabber igb(&SLAM);
ros::NodeHandle nodeHandler;
- ros::Subscriber sub = nodeHandler.subscribe("/camera/image_raw", 1, &ImageGrabber::GrabImage,&igb);
+ ros::Subscriber sub = nodeHandler.subscribe("/camera/image_raw", 100, &ImageGrabber::GrabImage,&igb);
ros::spin();
diff --git a/Examples/ROS/ORB_SLAM3/src/ros_mono_inertial.cc b/Examples/ROS/ORB_SLAM3/src/ros_mono_inertial.cc
new file mode 100644
index 0000000000..85d02925ed
--- /dev/null
+++ b/Examples/ROS/ORB_SLAM3/src/ros_mono_inertial.cc
@@ -0,0 +1,194 @@
+/**
+* This file is part of ORB-SLAM3
+*
+* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
+* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
+*
+* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
+* License as published by the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
+* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
+* If not, see .
+*/
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include
+#include
+#include
+
+#include
+
+#include"../../../include/System.h"
+#include"../include/ImuTypes.h"
+
+using namespace std;
+
+class ImuGrabber
+{
+public:
+ ImuGrabber(){};
+ void GrabImu(const sensor_msgs::ImuConstPtr &imu_msg);
+
+ queue imuBuf;
+ std::mutex mBufMutex;
+};
+
+class ImageGrabber
+{
+public:
+ ImageGrabber(ORB_SLAM3::System* pSLAM, ImuGrabber *pImuGb, const bool bClahe): mpSLAM(pSLAM), mpImuGb(pImuGb), mbClahe(bClahe){}
+
+ void GrabImage(const sensor_msgs::ImageConstPtr& msg);
+ cv::Mat GetImage(const sensor_msgs::ImageConstPtr &img_msg);
+ void SyncWithImu();
+
+ queue img0Buf;
+ std::mutex mBufMutex;
+
+ ORB_SLAM3::System* mpSLAM;
+ ImuGrabber *mpImuGb;
+
+ const bool mbClahe;
+ cv::Ptr mClahe = cv::createCLAHE(3.0, cv::Size(8, 8));
+};
+
+
+
+int main(int argc, char **argv)
+{
+ ros::init(argc, argv, "Mono_Inertial");
+ ros::NodeHandle n("~");
+ ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, ros::console::levels::Info);
+ bool bEqual = false;
+ if(argc < 3 || argc > 4)
+ {
+ cerr << endl << "Usage: rosrun ORB_SLAM3 Mono_Inertial path_to_vocabulary path_to_settings [do_equalize]" << endl;
+ ros::shutdown();
+ return 1;
+ }
+
+
+ if(argc==4)
+ {
+ std::string sbEqual(argv[3]);
+ if(sbEqual == "true")
+ bEqual = true;
+ }
+
+ // Create SLAM system. It initializes all system threads and gets ready to process frames.
+ ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::IMU_MONOCULAR,true);
+
+ ImuGrabber imugb;
+ ImageGrabber igb(&SLAM,&imugb,bEqual); // TODO
+
+ // Maximum delay, 5 seconds
+ ros::Subscriber sub_imu = n.subscribe("/imu", 1000, &ImuGrabber::GrabImu, &imugb);
+ ros::Subscriber sub_img0 = n.subscribe("/camera/image_raw", 100, &ImageGrabber::GrabImage,&igb);
+
+ std::thread sync_thread(&ImageGrabber::SyncWithImu,&igb);
+
+ ros::spin();
+
+ return 0;
+}
+
+
+
+void ImageGrabber::GrabImage(const sensor_msgs::ImageConstPtr &img_msg)
+{
+ mBufMutex.lock();
+ img0Buf.push(img_msg);
+ mBufMutex.unlock();
+}
+
+cv::Mat ImageGrabber::GetImage(const sensor_msgs::ImageConstPtr &img_msg)
+{
+ // Copy the ros image message to cv::Mat.
+ cv_bridge::CvImageConstPtr cv_ptr;
+ try
+ {
+ cv_ptr = cv_bridge::toCvShare(img_msg, sensor_msgs::image_encodings::MONO8);
+ }
+ catch (cv_bridge::Exception& e)
+ {
+ ROS_ERROR("cv_bridge exception: %s", e.what());
+ }
+
+ if(cv_ptr->image.type()==0)
+ {
+ return cv_ptr->image.clone();
+ }
+ else
+ {
+ std::cout << "Error type" << std::endl;
+ return cv_ptr->image.clone();
+ }
+}
+
+void ImageGrabber::SyncWithImu()
+{
+ while(1)
+ {
+ cv::Mat im;
+ double tIm = 0;
+ if (!img0Buf.empty()&&!mpImuGb->imuBuf.empty())
+ {
+ tIm = img0Buf.front()->header.stamp.toSec();
+ if(tIm>mpImuGb->imuBuf.back()->header.stamp.toSec())
+ continue;
+ {
+ this->mBufMutex.lock();
+ im = GetImage(img0Buf.front());
+ img0Buf.pop();
+ this->mBufMutex.unlock();
+ }
+
+ vector vImuMeas;
+ mpImuGb->mBufMutex.lock();
+ if(!mpImuGb->imuBuf.empty())
+ {
+ // Load imu measurements from buffer
+ vImuMeas.clear();
+ while(!mpImuGb->imuBuf.empty() && mpImuGb->imuBuf.front()->header.stamp.toSec()<=tIm)
+ {
+ double t = mpImuGb->imuBuf.front()->header.stamp.toSec();
+ cv::Point3f acc(mpImuGb->imuBuf.front()->linear_acceleration.x, mpImuGb->imuBuf.front()->linear_acceleration.y, mpImuGb->imuBuf.front()->linear_acceleration.z);
+ cv::Point3f gyr(mpImuGb->imuBuf.front()->angular_velocity.x, mpImuGb->imuBuf.front()->angular_velocity.y, mpImuGb->imuBuf.front()->angular_velocity.z);
+ vImuMeas.push_back(ORB_SLAM3::IMU::Point(acc,gyr,t));
+ mpImuGb->imuBuf.pop();
+ }
+ }
+ mpImuGb->mBufMutex.unlock();
+ if(mbClahe)
+ mClahe->apply(im,im);
+
+ mpSLAM->TrackMonocular(im,tIm,vImuMeas);
+ }
+
+ std::chrono::milliseconds tSleep(1);
+ std::this_thread::sleep_for(tSleep);
+ }
+}
+
+void ImuGrabber::GrabImu(const sensor_msgs::ImuConstPtr &imu_msg)
+{
+ mBufMutex.lock();
+ imuBuf.push(imu_msg);
+ mBufMutex.unlock();
+ return;
+}
+
+
diff --git a/Examples/ROS/ORB_SLAM3/src/ros_rgbd.cc b/Examples/ROS/ORB_SLAM3/src/ros_rgbd.cc
index ae18150f70..cb68d76b6e 100644
--- a/Examples/ROS/ORB_SLAM3/src/ros_rgbd.cc
+++ b/Examples/ROS/ORB_SLAM3/src/ros_rgbd.cc
@@ -37,11 +37,11 @@ using namespace std;
class ImageGrabber
{
public:
- ImageGrabber(ORB_SLAM2::System* pSLAM):mpSLAM(pSLAM){}
+ ImageGrabber(ORB_SLAM3::System* pSLAM):mpSLAM(pSLAM){}
void GrabRGBD(const sensor_msgs::ImageConstPtr& msgRGB,const sensor_msgs::ImageConstPtr& msgD);
- ORB_SLAM2::System* mpSLAM;
+ ORB_SLAM3::System* mpSLAM;
};
int main(int argc, char **argv)
@@ -51,20 +51,20 @@ int main(int argc, char **argv)
if(argc != 3)
{
- cerr << endl << "Usage: rosrun ORB_SLAM2 RGBD path_to_vocabulary path_to_settings" << endl;
+ cerr << endl << "Usage: rosrun ORB_SLAM3 RGBD path_to_vocabulary path_to_settings" << endl;
ros::shutdown();
return 1;
}
// Create SLAM system. It initializes all system threads and gets ready to process frames.
- ORB_SLAM2::System SLAM(argv[1],argv[2],ORB_SLAM2::System::RGBD,true);
+ ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::RGBD,true);
ImageGrabber igb(&SLAM);
ros::NodeHandle nh;
- message_filters::Subscriber rgb_sub(nh, "/camera/rgb/image_raw", 1);
- message_filters::Subscriber depth_sub(nh, "camera/depth_registered/image_raw", 1);
+ message_filters::Subscriber rgb_sub(nh, "/camera/rgb/image_raw", 100);
+ message_filters::Subscriber depth_sub(nh, "camera/depth_registered/image_raw", 100);
typedef message_filters::sync_policies::ApproximateTime sync_pol;
message_filters::Synchronizer sync(sync_pol(10), rgb_sub,depth_sub);
sync.registerCallback(boost::bind(&ImageGrabber::GrabRGBD,&igb,_1,_2));
@@ -106,7 +106,6 @@ void ImageGrabber::GrabRGBD(const sensor_msgs::ImageConstPtr& msgRGB,const senso
ROS_ERROR("cv_bridge exception: %s", e.what());
return;
}
-
mpSLAM->TrackRGBD(cv_ptrRGB->image,cv_ptrD->image,cv_ptrRGB->header.stamp.toSec());
}
diff --git a/Examples/ROS/ORB_SLAM3/src/ros_stereo.cc b/Examples/ROS/ORB_SLAM3/src/ros_stereo.cc
index 11063ce61a..574dc0a8c4 100644
--- a/Examples/ROS/ORB_SLAM3/src/ros_stereo.cc
+++ b/Examples/ROS/ORB_SLAM3/src/ros_stereo.cc
@@ -37,11 +37,11 @@ using namespace std;
class ImageGrabber
{
public:
- ImageGrabber(ORB_SLAM2::System* pSLAM):mpSLAM(pSLAM){}
+ ImageGrabber(ORB_SLAM3::System* pSLAM):mpSLAM(pSLAM){}
void GrabStereo(const sensor_msgs::ImageConstPtr& msgLeft,const sensor_msgs::ImageConstPtr& msgRight);
- ORB_SLAM2::System* mpSLAM;
+ ORB_SLAM3::System* mpSLAM;
bool do_rectify;
cv::Mat M1l,M2l,M1r,M2r;
};
@@ -53,13 +53,13 @@ int main(int argc, char **argv)
if(argc != 4)
{
- cerr << endl << "Usage: rosrun ORB_SLAM2 Stereo path_to_vocabulary path_to_settings do_rectify" << endl;
+ cerr << endl << "Usage: rosrun ORB_SLAM3 Stereo path_to_vocabulary path_to_settings do_rectify" << endl;
ros::shutdown();
return 1;
}
// Create SLAM system. It initializes all system threads and gets ready to process frames.
- ORB_SLAM2::System SLAM(argv[1],argv[2],ORB_SLAM2::System::STEREO,true);
+ ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::STEREO,true);
ImageGrabber igb(&SLAM);
@@ -107,8 +107,8 @@ int main(int argc, char **argv)
ros::NodeHandle nh;
- message_filters::Subscriber left_sub(nh, "/camera/left/image_raw", 1);
- message_filters::Subscriber right_sub(nh, "camera/right/image_raw", 1);
+ message_filters::Subscriber left_sub(nh, "/camera/left/image_raw", 100);
+ message_filters::Subscriber right_sub(nh, "/camera/right/image_raw", 100);
typedef message_filters::sync_policies::ApproximateTime sync_pol;
message_filters::Synchronizer sync(sync_pol(10), left_sub,right_sub);
sync.registerCallback(boost::bind(&ImageGrabber::GrabStereo,&igb,_1,_2));
diff --git a/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc b/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc
new file mode 100644
index 0000000000..07cd68c715
--- /dev/null
+++ b/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc
@@ -0,0 +1,282 @@
+/**
+* This file is part of ORB-SLAM3
+*
+* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
+* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
+*
+* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
+* License as published by the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
+* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
+* If not, see .
+*/
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include
+#include
+#include
+
+#include
+
+#include"../../../include/System.h"
+#include"../include/ImuTypes.h"
+
+using namespace std;
+
+class ImuGrabber
+{
+public:
+ ImuGrabber(){};
+ void GrabImu(const sensor_msgs::ImuConstPtr &imu_msg);
+
+ queue imuBuf;
+ std::mutex mBufMutex;
+};
+
+class ImageGrabber
+{
+public:
+ ImageGrabber(ORB_SLAM3::System* pSLAM, ImuGrabber *pImuGb, const bool bRect, const bool bClahe): mpSLAM(pSLAM), mpImuGb(pImuGb), do_rectify(bRect), mbClahe(bClahe){}
+
+ void GrabImageLeft(const sensor_msgs::ImageConstPtr& msg);
+ void GrabImageRight(const sensor_msgs::ImageConstPtr& msg);
+ cv::Mat GetImage(const sensor_msgs::ImageConstPtr &img_msg);
+ void SyncWithImu();
+
+ queue imgLeftBuf, imgRightBuf;
+ std::mutex mBufMutexLeft,mBufMutexRight;
+
+ ORB_SLAM3::System* mpSLAM;
+ ImuGrabber *mpImuGb;
+
+ const bool do_rectify;
+ cv::Mat M1l,M2l,M1r,M2r;
+
+ const bool mbClahe;
+ cv::Ptr mClahe = cv::createCLAHE(3.0, cv::Size(8, 8));
+};
+
+
+
+int main(int argc, char **argv)
+{
+ ros::init(argc, argv, "Stereo_Inertial");
+ ros::NodeHandle n("~");
+ ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, ros::console::levels::Info);
+ bool bEqual = false;
+ if(argc < 4 || argc > 5)
+ {
+ cerr << endl << "Usage: rosrun ORB_SLAM3 Stereo_Inertial path_to_vocabulary path_to_settings do_rectify [do_equalize]" << endl;
+ ros::shutdown();
+ return 1;
+ }
+
+ std::string sbRect(argv[3]);
+ if(argc==5)
+ {
+ std::string sbEqual(argv[4]);
+ if(sbEqual == "true")
+ bEqual = true;
+ }
+
+ // Create SLAM system. It initializes all system threads and gets ready to process frames.
+ ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::IMU_STEREO,true);
+
+ ImuGrabber imugb;
+ ImageGrabber igb(&SLAM,&imugb,sbRect == "true",bEqual);
+
+ if(igb.do_rectify)
+ {
+ // Load settings related to stereo calibration
+ cv::FileStorage fsSettings(argv[2], cv::FileStorage::READ);
+ if(!fsSettings.isOpened())
+ {
+ cerr << "ERROR: Wrong path to settings" << endl;
+ return -1;
+ }
+
+ cv::Mat K_l, K_r, P_l, P_r, R_l, R_r, D_l, D_r;
+ fsSettings["LEFT.K"] >> K_l;
+ fsSettings["RIGHT.K"] >> K_r;
+
+ fsSettings["LEFT.P"] >> P_l;
+ fsSettings["RIGHT.P"] >> P_r;
+
+ fsSettings["LEFT.R"] >> R_l;
+ fsSettings["RIGHT.R"] >> R_r;
+
+ fsSettings["LEFT.D"] >> D_l;
+ fsSettings["RIGHT.D"] >> D_r;
+
+ int rows_l = fsSettings["LEFT.height"];
+ int cols_l = fsSettings["LEFT.width"];
+ int rows_r = fsSettings["RIGHT.height"];
+ int cols_r = fsSettings["RIGHT.width"];
+
+ if(K_l.empty() || K_r.empty() || P_l.empty() || P_r.empty() || R_l.empty() || R_r.empty() || D_l.empty() || D_r.empty() ||
+ rows_l==0 || rows_r==0 || cols_l==0 || cols_r==0)
+ {
+ cerr << "ERROR: Calibration parameters to rectify stereo are missing!" << endl;
+ return -1;
+ }
+
+ cv::initUndistortRectifyMap(K_l,D_l,R_l,P_l.rowRange(0,3).colRange(0,3),cv::Size(cols_l,rows_l),CV_32F,igb.M1l,igb.M2l);
+ cv::initUndistortRectifyMap(K_r,D_r,R_r,P_r.rowRange(0,3).colRange(0,3),cv::Size(cols_r,rows_r),CV_32F,igb.M1r,igb.M2r);
+ }
+
+ // Maximum delay, 5 seconds
+ ros::Subscriber sub_imu = n.subscribe("/imu", 1000, &ImuGrabber::GrabImu, &imugb);
+ ros::Subscriber sub_img_left = n.subscribe("/camera/left/image_raw", 100, &ImageGrabber::GrabImageLeft,&igb);
+ ros::Subscriber sub_img_right = n.subscribe("/camera/right/image_raw", 100, &ImageGrabber::GrabImageRight,&igb);
+
+ std::thread sync_thread(&ImageGrabber::SyncWithImu,&igb);
+
+ ros::spin();
+
+ return 0;
+}
+
+
+
+void ImageGrabber::GrabImageLeft(const sensor_msgs::ImageConstPtr &img_msg)
+{
+ mBufMutexLeft.lock();
+ imgLeftBuf.push(img_msg);
+ mBufMutexLeft.unlock();
+}
+
+void ImageGrabber::GrabImageRight(const sensor_msgs::ImageConstPtr &img_msg)
+{
+ mBufMutexRight.lock();
+ imgRightBuf.push(img_msg);
+ mBufMutexRight.unlock();
+}
+
+cv::Mat ImageGrabber::GetImage(const sensor_msgs::ImageConstPtr &img_msg)
+{
+ // Copy the ros image message to cv::Mat.
+ cv_bridge::CvImageConstPtr cv_ptr;
+ try
+ {
+ cv_ptr = cv_bridge::toCvShare(img_msg, sensor_msgs::image_encodings::MONO8);
+ }
+ catch (cv_bridge::Exception& e)
+ {
+ ROS_ERROR("cv_bridge exception: %s", e.what());
+ }
+
+ if(cv_ptr->image.type()==0)
+ {
+ return cv_ptr->image.clone();
+ }
+ else
+ {
+ std::cout << "Error type" << std::endl;
+ return cv_ptr->image.clone();
+ }
+}
+
+void ImageGrabber::SyncWithImu()
+{
+ const double maxTimeDiff = 0.01;
+ while(1)
+ {
+ cv::Mat imLeft, imRight;
+ double tImLeft = 0, tImRight = 0;
+ if (!imgLeftBuf.empty()&&!imgRightBuf.empty()&&!mpImuGb->imuBuf.empty())
+ {
+ tImLeft = imgLeftBuf.front()->header.stamp.toSec();
+ tImRight = imgRightBuf.front()->header.stamp.toSec();
+
+ this->mBufMutexRight.lock();
+ while((tImLeft-tImRight)>maxTimeDiff && imgRightBuf.size()>1)
+ {
+ imgRightBuf.pop();
+ tImRight = imgRightBuf.front()->header.stamp.toSec();
+ }
+ this->mBufMutexRight.unlock();
+
+ this->mBufMutexLeft.lock();
+ while((tImRight-tImLeft)>maxTimeDiff && imgLeftBuf.size()>1)
+ {
+ imgLeftBuf.pop();
+ tImLeft = imgLeftBuf.front()->header.stamp.toSec();
+ }
+ this->mBufMutexLeft.unlock();
+
+ if((tImLeft-tImRight)>maxTimeDiff || (tImRight-tImLeft)>maxTimeDiff)
+ {
+ // std::cout << "big time difference" << std::endl;
+ continue;
+ }
+ if(tImLeft>mpImuGb->imuBuf.back()->header.stamp.toSec())
+ continue;
+
+ this->mBufMutexLeft.lock();
+ imLeft = GetImage(imgLeftBuf.front());
+ imgLeftBuf.pop();
+ this->mBufMutexLeft.unlock();
+
+ this->mBufMutexRight.lock();
+ imRight = GetImage(imgRightBuf.front());
+ imgRightBuf.pop();
+ this->mBufMutexRight.unlock();
+
+ vector vImuMeas;
+ mpImuGb->mBufMutex.lock();
+ if(!mpImuGb->imuBuf.empty())
+ {
+ // Load imu measurements from buffer
+ vImuMeas.clear();
+ while(!mpImuGb->imuBuf.empty() && mpImuGb->imuBuf.front()->header.stamp.toSec()<=tImLeft)
+ {
+ double t = mpImuGb->imuBuf.front()->header.stamp.toSec();
+ cv::Point3f acc(mpImuGb->imuBuf.front()->linear_acceleration.x, mpImuGb->imuBuf.front()->linear_acceleration.y, mpImuGb->imuBuf.front()->linear_acceleration.z);
+ cv::Point3f gyr(mpImuGb->imuBuf.front()->angular_velocity.x, mpImuGb->imuBuf.front()->angular_velocity.y, mpImuGb->imuBuf.front()->angular_velocity.z);
+ vImuMeas.push_back(ORB_SLAM3::IMU::Point(acc,gyr,t));
+ mpImuGb->imuBuf.pop();
+ }
+ }
+ mpImuGb->mBufMutex.unlock();
+ if(mbClahe)
+ {
+ mClahe->apply(imLeft,imLeft);
+ mClahe->apply(imRight,imRight);
+ }
+
+ if(do_rectify)
+ {
+ cv::remap(imLeft,imLeft,M1l,M2l,cv::INTER_LINEAR);
+ cv::remap(imRight,imRight,M1r,M2r,cv::INTER_LINEAR);
+ }
+
+ mpSLAM->TrackStereo(imLeft,imRight,tImLeft,vImuMeas);
+
+ std::chrono::milliseconds tSleep(1);
+ std::this_thread::sleep_for(tSleep);
+ }
+ }
+}
+
+void ImuGrabber::GrabImu(const sensor_msgs::ImuConstPtr &imu_msg)
+{
+ mBufMutex.lock();
+ imuBuf.push(imu_msg);
+ mBufMutex.unlock();
+ return;
+}
+
+
diff --git a/README.md b/README.md
index ae626351fb..4a2f6bb4bd 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
# ORB-SLAM3
-### V0.2: Beta version, 21 Jul 2020
+### V0.3: Beta version, 7 Aug 2020
**Authors:** Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, [José M. M. Montiel](http://webdiis.unizar.es/~josemari/), [Juan D. Tardos](http://webdiis.unizar.es/~jdtardos/).
ORB-SLAM3 is the first real-time SLAM library able to perform **Visual, Visual-Inertial and Multi-Map SLAM** with **monocular, stereo and RGB-D** cameras, using **pin-hole and fisheye** lens models. In all sensor configurations, ORB-SLAM3 is as robust as the best systems available in the literature, and significantly more accurate.
@@ -10,7 +10,7 @@ We provide examples to run ORB-SLAM3 in the [EuRoC dataset](http://projects.asl.
This software is based on [ORB-SLAM2](https://github.com/raulmur/ORB_SLAM2) developed by [Raul Mur-Artal](http://webdiis.unizar.es/~raulmur/), [Juan D. Tardos](http://webdiis.unizar.es/~jdtardos/), [J. M. M. Montiel](http://webdiis.unizar.es/~josemari/) and [Dorian Galvez-Lopez](http://doriangalvez.com/) ([DBoW2](https://github.com/dorian3d/DBoW2)).
+alt="ORB-SLAM3" width="240" height="180" border="10" />
### Related Publications:
@@ -70,6 +70,10 @@ Required to calculate the alignment of the trajectory with the ground truth. **R
* (deb) `sudo apt install libpython2.7-dev`
* (mac) preinstalled with osx
+## ROS (optional)
+
+We provide some examples to process input of a monocular, monocular-inertial, stereo, stereo-inertial or RGB-D camera using ROS. Building these examples is optional. These have been tested with ROS Melodic under Ubuntu 18.04.
+
# 3. Building ORB-SLAM3 library and examples
Clone the repository:
@@ -126,3 +130,82 @@ Execute the following script to process sequences and compute the RMS ATE:
./tum_vi_eval_examples
```
+# 6. ROS Examples
+
+### Building the nodes for mono, mono-inertial, stereo, stereo-inertial and RGB-D
+Tested with ROS Melodic and ubuntu 18.04.
+
+1. Add the path including *Examples/ROS/ORB_SLAM3* to the ROS_PACKAGE_PATH environment variable. Open .bashrc file:
+ ```
+ gedit ~/.bashrc
+ ```
+and add at the end the following line. Replace PATH by the folder where you cloned ORB_SLAM3:
+
+ ```
+ export ROS_PACKAGE_PATH=${ROS_PACKAGE_PATH}:PATH/ORB_SLAM3/Examples/ROS
+ ```
+
+2. Execute `build_ros.sh` script:
+
+ ```
+ chmod +x build_ros.sh
+ ./build_ros.sh
+ ```
+
+### Running Monocular Node
+For a monocular input from topic `/camera/image_raw` run node ORB_SLAM3/Mono. You will need to provide the vocabulary file and a settings file. See the monocular examples above.
+
+ ```
+ rosrun ORB_SLAM3 Mono PATH_TO_VOCABULARY PATH_TO_SETTINGS_FILE
+ ```
+
+### Running Monocular-Inertial Node
+For a monocular input from topic `/camera/image_raw` and an inertial input from topic `/imu`, run node ORB_SLAM3/Mono_Inertial. Setting the optional third argument to true will apply CLAHE equalization to images (Mainly for TUM-VI dataset).
+
+ ```
+ rosrun ORB_SLAM3 Mono PATH_TO_VOCABULARY PATH_TO_SETTINGS_FILE [EQUALIZATION]
+ ```
+
+### Running Stereo Node
+For a stereo input from topic `/camera/left/image_raw` and `/camera/right/image_raw` run node ORB_SLAM3/Stereo. You will need to provide the vocabulary file and a settings file. For Pinhole camera model, if you **provide rectification matrices** (see Examples/Stereo/EuRoC.yaml example), the node will recitify the images online, **otherwise images must be pre-rectified**. For FishEye camera model, rectification is not required since system works with original images:
+
+ ```
+ rosrun ORB_SLAM3 Stereo PATH_TO_VOCABULARY PATH_TO_SETTINGS_FILE ONLINE_RECTIFICATION
+ ```
+
+### Running Stereo-Inertial Node
+For a stereo input from topics `/camera/left/image_raw` and `/camera/right/image_raw`, and an inertial input from topic `/imu`, run node ORB_SLAM3/Stereo_Inertial. You will need to provide the vocabulary file and a settings file, including rectification matrices if required in a similar way to Stereo case:
+
+ ```
+ rosrun ORB_SLAM3 Stereo_Inertial PATH_TO_VOCABULARY PATH_TO_SETTINGS_FILE ONLINE_RECTIFICATION [EQUALIZATION]
+ ```
+
+### Running RGB_D Node
+For an RGB-D input from topics `/camera/rgb/image_raw` and `/camera/depth_registered/image_raw`, run node ORB_SLAM3/RGBD. You will need to provide the vocabulary file and a settings file. See the RGB-D example above.
+
+ ```
+ rosrun ORB_SLAM3 RGBD PATH_TO_VOCABULARY PATH_TO_SETTINGS_FILE
+ ```
+
+**Running ROS example:** Download a rosbag (e.g. V1_02_medium.bag) from the EuRoC dataset (http://projects.asl.ethz.ch/datasets/doku.php?id=kmavvisualinertialdatasets). Open 3 tabs on the terminal and run the following command at each tab for a Stereo-Inertial configuration:
+ ```
+ roscore
+ ```
+
+ ```
+ rosrun ORB_SLAM3 Stereo_Inertial Vocabulary/ORBvoc.txt Examples/Stereo-Inertial/EuRoC.yaml true
+ ```
+
+ ```
+ rosbag play --pause V1_02_medium.bag /cam0/image_raw:=/camera/left/image_raw /cam1/image_raw:=/camera/right/image_raw /imu0:=/imu
+ ```
+
+Once ORB-SLAM3 has loaded the vocabulary, press space in the rosbag tab.
+
+**Remark:** For rosbags from TUM-VI dataset, some play issue may appear due to chunk size. One possible solution is to rebag them with the default chunk size, for example:
+ ```
+ rosrun rosbag fastrebag.py dataset-room1_512_16.bag dataset-room1_512_16_small_chunks.bag
+ ```
+
+
+
diff --git a/Thirdparty/g2o/g2o/core/optimization_algorithm_levenberg.cpp b/Thirdparty/g2o/g2o/core/optimization_algorithm_levenberg.cpp
index 1fd5e42af8..067678f4a4 100644
--- a/Thirdparty/g2o/g2o/core/optimization_algorithm_levenberg.cpp
+++ b/Thirdparty/g2o/g2o/core/optimization_algorithm_levenberg.cpp
@@ -44,11 +44,11 @@ namespace g2o {
OptimizationAlgorithmWithHessian(solver)
{
_currentLambda = -1.;
- _tau = 1e-50; // Carlos: originally 1e-5
+ _tau = 1e-5; // Carlos: originally 1e-5
_goodStepUpperScale = 2./3.;
_goodStepLowerScale = 1./3.;
_userLambdaInit = _properties.makeProperty >("initialLambda", 0.);
- _maxTrialsAfterFailure = _properties.makeProperty >("maxTrialsAfterFailure", 100); // Carlos: Originally 10 iterations
+ _maxTrialsAfterFailure = _properties.makeProperty >("maxTrialsAfterFailure", 10); // Carlos: Originally 10 iterations
_ni=2.;
_levenbergIterations = 0;
_nBad = 0;
diff --git a/include/G2oTypes.h b/include/G2oTypes.h
index d9ad707393..3f7113fbc1 100644
--- a/include/G2oTypes.h
+++ b/include/G2oTypes.h
@@ -676,9 +676,9 @@ class EdgeAccRW : public g2o::BaseBinaryEdge<3,Eigen::Vector3d,VertexAccBias,Ver
virtual bool write(std::ostream& os) const{return false;}
void computeError(){
- const VertexGyroBias* VG1= static_cast(_vertices[0]);
- const VertexGyroBias* VG2= static_cast(_vertices[1]);
- _error = VG2->estimate()-VG1->estimate();
+ const VertexAccBias* VA1= static_cast(_vertices[0]);
+ const VertexAccBias* VA2= static_cast(_vertices[1]);
+ _error = VA2->estimate()-VA1->estimate();
}
virtual void linearizeOplus(){
diff --git a/src/Frame.cc b/src/Frame.cc
index 4ed93939f8..87814cd9cc 100644
--- a/src/Frame.cc
+++ b/src/Frame.cc
@@ -118,7 +118,6 @@ Frame::Frame(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timeSt
N = mvKeys.size();
-
if(mvKeys.empty())
return;
@@ -281,11 +280,6 @@ Frame::Frame(const cv::Mat &imGray, const double &timeStamp, ORBextractor* extra
// Frame ID
mnId=nNextId++;
- // imGray(cv::Range(0,175),cv::Range(0,512)) = cv::Mat::zeros(cv::Size(512, 175),CV_8UC1);
- // imGray(cv::Range(350,512),cv::Range(260,360)) = cv::Mat::zeros(cv::Size(100, 162),CV_8UC1);
-
- // imGray = imGray(cv::Range(250,512),cv::Range(0,512)).clone();
-
// Scale Level Info
mnScaleLevels = mpORBextractorLeft->GetLevels();
mfScaleFactor = mpORBextractorLeft->GetScaleFactor();
@@ -308,7 +302,6 @@ Frame::Frame(const cv::Mat &imGray, const double &timeStamp, ORBextractor* extra
N = mvKeys.size();
-
if(mvKeys.empty())
return;
@@ -344,6 +337,7 @@ Frame::Frame(const cv::Mat &imGray, const double &timeStamp, ORBextractor* extra
mbInitialComputations=false;
}
+
mb = mbf/fx;
//Set no stereo fisheye information
diff --git a/src/Optimizer.cc b/src/Optimizer.cc
index e32ed01cbb..c0624c916b 100644
--- a/src/Optimizer.cc
+++ b/src/Optimizer.cc
@@ -1678,11 +1678,6 @@ void Optimizer::LocalBundleAdjustment(KeyFrame *pKF, bool* pbStopFlag, vector::iterator lit=lLocalMapPoints.begin(), lend=lLocalMapPoints.end(); lit!=lend; lit++)
@@ -1733,29 +1728,12 @@ void Optimizer::LocalBundleAdjustment(KeyFrame *pKF, bool* pbStopFlag, Map* pMap
if(pMP)
if(!pMP->isBad() && pMP->GetMap() == pCurrentMap)
{
- /*if(sNumObsMP.find(pMP) == sNumObsMP.end())
+
+ if(pMP->mnBALocalForKF!=pKF->mnId)
{
- sNumObsMP.insert(pMP);
- int index_mp = pMP->GetIndexInKeyFrame(pKFi);
- if(pKFi->mvuRight[index_mp]>=0)
- {
- // Stereo, it has at least 2 observations by pKFi
- if(pMP->mnBALocalForKF!=pKF->mnId)
- {
- lLocalMapPoints.push_back(pMP);
- pMP->mnBALocalForKF=pKF->mnId;
- }
- }
+ lLocalMapPoints.push_back(pMP);
+ pMP->mnBALocalForKF=pKF->mnId;
}
- else
- {*/
- if(pMP->mnBALocalForKF!=pKF->mnId)
- {
- lLocalMapPoints.push_back(pMP);
- pMP->mnBALocalForKF=pKF->mnId;
- }
- //}
-
}
}
}
@@ -1872,10 +1850,6 @@ void Optimizer::LocalBundleAdjustment(KeyFrame *pKF, bool* pbStopFlag, Map* pMap
if(pKFi->mnId>maxKFid)
maxKFid=pKFi->mnId;
}
- //Verbose::PrintMess("LM-LBA: Fixed KFs added", Verbose::VERBOSITY_DEBUG);
-
- //Verbose::PrintMess("LM-LBA: opt/fixed KFs: " + to_string(lLocalKeyFrames.size()) + "/" + to_string(lFixedCameras.size()), Verbose::VERBOSITY_DEBUG);
- //Verbose::PrintMess("LM-LBA: local MPs: " + to_string(lLocalMapPoints.size()), Verbose::VERBOSITY_DEBUG);
// Set MapPoint vertices
const int nExpectedSize = (lLocalKeyFrames.size()+lFixedCameras.size())*lLocalMapPoints.size();
@@ -2182,21 +2156,8 @@ void Optimizer::LocalBundleAdjustment(KeyFrame *pKF, bool* pbStopFlag, Map* pMap
bRedrawError = true;
string folder_name = "test_LBA";
string name = "_PreLM_LBA";
- //pMap->printReprojectionError(lLocalKeyFrames, pKF, name, folder_name);
name = "_PreLM_LBA_Fixed";
- //pMap->printReprojectionError(lFixedCameras, pKF, name, folder_name);
- /*for(size_t i=0, iend=vpEdgesMono.size(); iisBad())
- continue;
-
- Verbose::PrintMess("ERROR CHI2: " + to_string(e->chi2()) + "; DEPTH POSITIVE: " + to_string(e->isDepthPositive()), Verbose::VERBOSITY_NORMAL);
- }*/
- //return;
}
@@ -2315,11 +2276,6 @@ void Optimizer::LocalBundleAdjustment(KeyFrame *pKF, bool* pbStopFlag, Map* pMap
Verbose::PrintMess("LM-LBA: Bad observations in mono " + to_string(numBadMonoMP) + " and stereo " + to_string(numBadStereoMP), Verbose::VERBOSITY_DEBUG);
}
}
- //Verbose::PrintMess("LM-LBA: Num fixed cameras " + to_string(num_fixedKF), Verbose::VERBOSITY_DEBUG);
- //Verbose::PrintMess("LM-LBA: Num Points " + to_string(lLocalMapPoints.size()), Verbose::VERBOSITY_DEBUG);
- //Verbose::PrintMess("LM-LBA: Num optimized cameras " + to_string(lLocalKeyFrames.size()), Verbose::VERBOSITY_DEBUG);
- //Verbose::PrintMess("----------", Verbose::VERBOSITY_DEBUG);
-
//Points
for(list::iterator lit=lLocalMapPoints.begin(), lend=lLocalMapPoints.end(); lit!=lend; lit++)
@@ -3932,8 +3888,6 @@ int Optimizer::OptimizeSim3(KeyFrame *pKF1, KeyFrame *pKF2, vector &
int Optimizer::OptimizeSim3(KeyFrame *pKF1, KeyFrame *pKF2, vector &vpMatches1, g2o::Sim3 &g2oS12, const float th2,
const bool bFixScale, Eigen::Matrix &mAcumHessian, const bool bAllPoints)
{
- bool bShowImages = false;
-
g2o::SparseOptimizer optimizer;
g2o::BlockSolverX::LinearSolverType * linearSolver;
@@ -3944,12 +3898,6 @@ int Optimizer::OptimizeSim3(KeyFrame *pKF1, KeyFrame *pKF2, vector &
g2o::OptimizationAlgorithmLevenberg* solver = new g2o::OptimizationAlgorithmLevenberg(solver_ptr);
optimizer.setAlgorithm(solver);
- // Calibration
- const cv::Mat &K1 = pKF1->mK;
- const cv::Mat &K2 = pKF2->mK;
-
- //const cv::Mat &DistCoeff2 = pKF2->mDistCoef;
-
// Camera poses
const cv::Mat R1w = pKF1->GetRotation();
const cv::Mat t1w = pKF1->GetTranslation();
@@ -3979,16 +3927,6 @@ int Optimizer::OptimizeSim3(KeyFrame *pKF1, KeyFrame *pKF2, vector &
vpEdges21.reserve(2*N);
vbIsInKF2.reserve(2*N);
- float cx1 = K1.at(0,2);
- float cy1 = K1.at(1,2);
- float fx1 = K1.at(0,0);
- float fy1 = K1.at(1,1);
-
- float cx2 = K2.at(0,2);
- float cy2 = K2.at(1,2);
- float fx2 = K2.at(0,0);
- float fy2 = K2.at(1,1);
-
const float deltaHuber = sqrt(th2);
int nCorrespondences = 0;
@@ -3997,11 +3935,6 @@ int Optimizer::OptimizeSim3(KeyFrame *pKF1, KeyFrame *pKF2, vector &
int nOutKF2 = 0;
int nMatchWithoutMP = 0;
- cv::Mat img1 = cv::imread(pKF1->mNameFile, CV_LOAD_IMAGE_UNCHANGED);
- cv::cvtColor(img1, img1, CV_GRAY2BGR);
- cv::Mat img2 = cv::imread(pKF2->mNameFile, CV_LOAD_IMAGE_UNCHANGED);
- cv::cvtColor(img2, img2, CV_GRAY2BGR);
-
vector vIdsOnlyInKF2;
for(int i=0; i &
vIdsOnlyInKF2.push_back(id2);
}
-
- cv::circle(img1, pKF1->mvKeys[i].pt, 1, cv::Scalar(0, 0, 255));
-
continue;
-
}
if(i2<0 && !bAllPoints)
@@ -4093,42 +4022,6 @@ int Optimizer::OptimizeSim3(KeyFrame *pKF1, KeyFrame *pKF2, vector &
obs1 << kpUn1.pt.x, kpUn1.pt.y;
ORB_SLAM3::EdgeSim3ProjectXYZ* e12 = new ORB_SLAM3::EdgeSim3ProjectXYZ();
- /*bool inKF1;
- if(pMP1)
- {
- const cv::KeyPoint &kpUn1 = pKF1->mvKeysUn[i];
- obs1 << kpUn1.pt.x, kpUn1.pt.y;
-
- inKF1 = true;
- }
- else
- {
- float invz = 1/P3D1c.at(2);
- float x = P3D1c.at(0)*invz;
- float y = P3D1c.at(1)*invz;
-
- obs1 << x, y;
- kpUn1 = cv::KeyPoint(cv::Point2f(x, y), pMP1->mnTrackScaleLevel);
-
- inKF1 = false;
- }*/
-
- if(bShowImages) //TODO test to project the matched points in the image
- {
- cv::circle(img1, pKF1->mvKeys[i].pt, 1, cv::Scalar(0, 255, 0));
-
- Eigen::Matrix eigP3D2c = Converter::toVector3d(P3D2c);
- Eigen::Matrix eigP3D1c = g2oS12.map(eigP3D2c);
- cv::Mat cvP3D1c = Converter::toCvMat(eigP3D1c);
-
- float invz = 1/cvP3D1c.at(2);
- float x = fx1 * cvP3D1c.at(0)*invz + cx1;
- float y = fy1 * cvP3D1c.at(1)*invz + cy1;
-
- cv::Point2f ptProjPoint(x, y);
- cv::line(img1, pKF1->mvKeys[i].pt, ptProjPoint, cv::Scalar(255, 0, 0), 1);
- }
-
e12->setVertex(0, dynamic_cast(optimizer.vertex(id2)));
e12->setVertex(1, dynamic_cast(optimizer.vertex(0)));
@@ -4152,11 +4045,6 @@ int Optimizer::OptimizeSim3(KeyFrame *pKF1, KeyFrame *pKF2, vector &
inKF2 = true;
nInKF2++;
-
- if(bShowImages)
- {
- cv::circle(img2, pKF2->mvKeys[i2].pt, 1, cv::Scalar(0, 255, 0));
- }
}
else
{
@@ -4164,52 +4052,11 @@ int Optimizer::OptimizeSim3(KeyFrame *pKF1, KeyFrame *pKF2, vector &
float x = P3D2c.at(0)*invz;
float y = P3D2c.at(1)*invz;
- /*cv::Mat mat(1,2,CV_32F);
- mat.at(0,0) = x;
- mat.at(0,1) = y;
- mat=mat.reshape(2);
- cv::undistortPoints(mat,mat,K2,DistCoeff2,cv::Mat(),K2);
- mat=mat.reshape(1);
-
- x = mat.at(0,0);
- y = mat.at(0,1);*/
-
obs2 << x, y;
kpUn2 = cv::KeyPoint(cv::Point2f(x, y), pMP2->mnTrackScaleLevel);
inKF2 = false;
nOutKF2++;
-
- if(bShowImages)
- {
- cv::circle(img2, kpUn2.pt, 1, cv::Scalar(0, 0, 255));
- }
-
- //TODO print projection, because all of them become in outliers
-
-
-
- // Project in image and check it is not outside
- //float u = pKF2->fx * x + pKF2->cx;
- //float v = pKF2->fy * y + pKF2->cy;
- //obs2 << u, v;
- //kpUn2 = cv::KeyPoint(cv::Point2f(u, v), pMP2->mnTrackScaleLevel);
- }
-
- {
- Eigen::Matrix eigP3D1c = Converter::toVector3d(P3D1c);
- Eigen::Matrix eigP3D2c = g2oS12.inverse().map(eigP3D1c);
- cv::Mat cvP3D2c = Converter::toCvMat(eigP3D2c);
-
- float invz = 1/cvP3D2c.at(2);
- float x = fx2 * cvP3D2c.at(0)*invz + cx2;
- float y = fy2 * cvP3D2c.at(1)*invz + cy2;
-
- if(bShowImages)
- {
- cv::Point2f ptProjPoint(x, y);
- cv::line(img2, kpUn2.pt, ptProjPoint, cv::Scalar(255, 0, 0), 1);
- }
}
ORB_SLAM3::EdgeInverseSim3ProjectXYZ* e21 = new ORB_SLAM3::EdgeInverseSim3ProjectXYZ();
@@ -4269,13 +4116,6 @@ int Optimizer::OptimizeSim3(KeyFrame *pKF1, KeyFrame *pKF2, vector &
e12->setRobustKernel(0);
e21->setRobustKernel(0);
}
- if(bShowImages)
- {
- string pathImg1 = "./test_OptSim3/KF_" + to_string(pKF1->mnId) + "_Main.jpg";
- cv::imwrite(pathImg1, img1);
- string pathImg2 = "./test_OptSim3/KF_" + to_string(pKF1->mnId) + "_Matched.jpg";
- cv::imwrite(pathImg2, img2);
- }
Verbose::PrintMess("Sim3: First Opt -> Correspondences: " + to_string(nCorrespondences) + "; nBad: " + to_string(nBad) + "; nBadOutKF2: " + to_string(nBadOutKF2), Verbose::VERBOSITY_DEBUG);
@@ -4321,7 +4161,6 @@ int Optimizer::OptimizeSim3(KeyFrame *pKF1, KeyFrame *pKF2, vector &
//Verbose::PrintMess("Sim3: Initial seed " + g2oS12, Verbose::VERBOSITY_DEBUG);
g2o::VertexSim3Expmap* vSim3_recov = static_cast(optimizer.vertex(0));
g2oS12= vSim3_recov->estimate();
-
//Verbose::PrintMess("Sim3: Optimized solution " + g2oS12, Verbose::VERBOSITY_DEBUG);
return nIn;
@@ -5388,6 +5227,7 @@ void Optimizer::InertialOptimization(Map *pMap, Eigen::Matrix3d &Rwg, double &sc
vpei.reserve(vpKFs.size());
vector > vppUsedKF;
vppUsedKF.reserve(vpKFs.size());
+ std::cout << "build optimization graph" << std::endl;
for(size_t i=0;iisBad() || pKFi->mPrevKF->mnId>maxKFid)
continue;
+ if(!pKFi->mpImuPreintegrated)
+ std::cout << "Not preintegrated measurement" << std::endl;
pKFi->mpImuPreintegrated->SetNewBias(pKFi->mPrevKF->GetImuBias());
g2o::HyperGraph::Vertex* VP1 = optimizer.vertex(pKFi->mPrevKF->mnId);
@@ -5434,9 +5276,13 @@ void Optimizer::InertialOptimization(Map *pMap, Eigen::Matrix3d &Rwg, double &sc
// Compute error for different scales
std::set setEdges = optimizer.edges();
+ std::cout << "start optimization" << std::endl;
optimizer.setVerbose(false);
optimizer.initializeOptimization();
optimizer.optimize(its);
+
+ std::cout << "end optimization" << std::endl;
+
scale = VS->estimate();
// Recover optimized data
@@ -5456,6 +5302,8 @@ void Optimizer::InertialOptimization(Map *pMap, Eigen::Matrix3d &Rwg, double &sc
cv::Mat cvbg = Converter::toCvMat(bg);
//Keyframes velocities and biases
+ std::cout << "update Keyframes velocities and biases" << std::endl;
+
const int N = vpKFs.size();
for(size_t i=0; iGrabImageMonocular(im,timestamp,filename);
- /*if(mpLocalMapper->mbNewInit)
- {
- // Save data
- SaveDebugData(mpLocalMapper->mIdxInit);
- mpLocalMapper->mbNewInit=false;
- // Check if reset
- {
- unique_lock lock(mMutexReset);
- if(mpLocalMapper->mInitTime>10.0)
- {
- mpTracker->Reset();
- mbReset = false;
- mbResetActiveMap = false;
- mpLocalMapper->mInitSect++;
- }
- }
- }*/
-
unique_lock lock2(mMutexState);
mTrackingState = mpTracker->mState;
mTrackedMapPoints = mpTracker->mCurrentFrame.mvpMapPoints;
diff --git a/src/Tracking.cc b/src/Tracking.cc
index 495ea06fb9..bfb8348d2c 100644
--- a/src/Tracking.cc
+++ b/src/Tracking.cc
@@ -86,6 +86,11 @@ Tracking::Tracking(System *pSys, ORBVocabulary* pVoc, FrameDrawer *pFrameDrawer,
float K3 = fSettings["Camera.k3"];
float K4 = fSettings["Camera.k4"];
+ std::cout << "K1 = " << K1 << std::endl;
+ std::cout << "K2 = " << K2 << std::endl;
+ std::cout << "K3 = " << K3 << std::endl;
+ std::cout << "K4 = " << K4 << std::endl;
+
vector vCamCalib{fx,fy,cx,cy,K1,K2,K3,K4};
mpCamera = new KannalaBrandt8(vCamCalib);
@@ -514,7 +519,6 @@ cv::Mat Tracking::GrabImageMonocular(const cv::Mat &im, const double ×tamp,
{
if(mState==NOT_INITIALIZED || mState==NO_IMAGES_YET)
{
- cout << "init extractor" << endl;
mCurrentFrame = Frame(mImGray,timestamp,mpIniORBextractor,mpORBVocabulary,mpCamera,mDistCoef,mbf,mThDepth,&mLastFrame,*mpImuCalib);
}
else
@@ -668,10 +672,6 @@ void Tracking::PreintegrateIMU()
mCurrentFrame.mpImuPreintegrated = mpImuPreintegratedFromLastKF;
mCurrentFrame.mpLastKeyFrame = mpLastKeyFrame;
- if(!mpLastKeyFrame)
- {
- cout << "last KF is empty!" << endl;
- }
mCurrentFrame.setIntegrated();
Verbose::PrintMess("Preintegration is finished!! ", Verbose::VERBOSITY_DEBUG);
@@ -943,8 +943,6 @@ void Tracking::Track()
mbMapUpdated = true;
}
- //std::cout << "T2" << std::endl;
-
if(mState==NOT_INITIALIZED)
{
@@ -1374,7 +1372,6 @@ void Tracking::StereoInitialization()
if (cv::norm(mCurrentFrame.mpImuPreintegratedFrame->avgA-mLastFrame.mpImuPreintegratedFrame->avgA)<0.5)
{
- cout << cv::norm(mCurrentFrame.mpImuPreintegratedFrame->avgA) << endl;
cout << "not enough acceleration" << endl;
return;
}
@@ -1477,6 +1474,7 @@ void Tracking::MonocularInitialization()
// Set Reference Frame
if(mCurrentFrame.mvKeys.size()>100)
{
+
mInitialFrame = Frame(mCurrentFrame);
mLastFrame = Frame(mCurrentFrame);
mvbPrevMatched.resize(mCurrentFrame.mvKeysUn.size());
@@ -2939,7 +2937,7 @@ void Tracking::ResetActiveMap(bool bLocMap)
index++;
}
- cout << num_lost << " Frames had been set to lost" << endl;
+ cout << num_lost << " Frames set to lost" << endl;
mlbLost = lbLost;