From f2be24bdafd835a6d9ca315719eeb251eb9b6c28 Mon Sep 17 00:00:00 2001 From: nindanaoto Date: Sun, 26 Dec 2021 17:22:47 +0000 Subject: [PATCH 1/7] copy Example_old/ROS --- Examples/ROS/ORB_SLAM3/Asus.yaml | 70 ++ Examples/ROS/ORB_SLAM3/CMakeLists.txt | 116 ++++ Examples/ROS/ORB_SLAM3/manifest.xml | 15 + Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.cc | 640 ++++++++++++++++++ Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.h | 118 ++++ Examples/ROS/ORB_SLAM3/src/AR/ros_mono_ar.cc | 167 +++++ Examples/ROS/ORB_SLAM3/src/ros_mono.cc | 94 +++ .../ROS/ORB_SLAM3/src/ros_mono_inertial.cc | 194 ++++++ Examples/ROS/ORB_SLAM3/src/ros_rgbd.cc | 112 +++ Examples/ROS/ORB_SLAM3/src/ros_stereo.cc | 170 +++++ .../ROS/ORB_SLAM3/src/ros_stereo_inertial.cc | 286 ++++++++ 11 files changed, 1982 insertions(+) create mode 100644 Examples/ROS/ORB_SLAM3/Asus.yaml create mode 100644 Examples/ROS/ORB_SLAM3/CMakeLists.txt create mode 100644 Examples/ROS/ORB_SLAM3/manifest.xml create mode 100644 Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.cc create mode 100644 Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.h create mode 100644 Examples/ROS/ORB_SLAM3/src/AR/ros_mono_ar.cc create mode 100644 Examples/ROS/ORB_SLAM3/src/ros_mono.cc create mode 100644 Examples/ROS/ORB_SLAM3/src/ros_mono_inertial.cc create mode 100644 Examples/ROS/ORB_SLAM3/src/ros_rgbd.cc create mode 100644 Examples/ROS/ORB_SLAM3/src/ros_stereo.cc create mode 100644 Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc diff --git a/Examples/ROS/ORB_SLAM3/Asus.yaml b/Examples/ROS/ORB_SLAM3/Asus.yaml new file mode 100644 index 0000000000..4d0c08ec7d --- /dev/null +++ b/Examples/ROS/ORB_SLAM3/Asus.yaml @@ -0,0 +1,70 @@ +%YAML:1.0 + +#-------------------------------------------------------------------------------------------- +# Camera Parameters. Adjust them! +#-------------------------------------------------------------------------------------------- +Camera.type: "PinHole" + +# Camera calibration and distortion parameters (OpenCV) +Camera.fx: 535.4 +Camera.fy: 539.2 +Camera.cx: 320.1 +Camera.cy: 247.6 + +Camera.k1: 0.0 +Camera.k2: 0.0 +Camera.p1: 0.0 +Camera.p2: 0.0 + +Camera.width: 640 +Camera.height: 480 + +# Camera frames per second +Camera.fps: 30.0 + +# IR projector baseline times fx (aprox.) +Camera.bf: 40.0 + +# Color order of the images (0: BGR, 1: RGB. It is ignored if images are grayscale) +Camera.RGB: 1 + +# Close/Far threshold. Baseline times. +ThDepth: 40.0 + +# Deptmap values factor +DepthMapFactor: 1.0 + +#-------------------------------------------------------------------------------------------- +# ORB Parameters +#-------------------------------------------------------------------------------------------- + +# ORB Extractor: Number of features per image +ORBextractor.nFeatures: 1000 + +# ORB Extractor: Scale factor between levels in the scale pyramid +ORBextractor.scaleFactor: 1.2 + +# ORB Extractor: Number of levels in the scale pyramid +ORBextractor.nLevels: 8 + +# ORB Extractor: Fast threshold +# Image is divided in a grid. At each cell FAST are extracted imposing a minimum response. +# Firstly we impose iniThFAST. If no corners are detected we impose a lower value minThFAST +# You can lower these values if your images have low contrast +ORBextractor.iniThFAST: 20 +ORBextractor.minThFAST: 7 + +#-------------------------------------------------------------------------------------------- +# Viewer Parameters +#-------------------------------------------------------------------------------------------- +Viewer.KeyFrameSize: 0.05 +Viewer.KeyFrameLineWidth: 1 +Viewer.GraphLineWidth: 0.9 +Viewer.PointSize:2 +Viewer.CameraSize: 0.08 +Viewer.CameraLineWidth: 3 +Viewer.ViewpointX: 0 +Viewer.ViewpointY: -0.7 +Viewer.ViewpointZ: -1.8 +Viewer.ViewpointF: 500 + diff --git a/Examples/ROS/ORB_SLAM3/CMakeLists.txt b/Examples/ROS/ORB_SLAM3/CMakeLists.txt new file mode 100644 index 0000000000..12792fe44b --- /dev/null +++ b/Examples/ROS/ORB_SLAM3/CMakeLists.txt @@ -0,0 +1,116 @@ +cmake_minimum_required(VERSION 2.4.6) +include($ENV{ROS_ROOT}/core/rosbuild/rosbuild.cmake) + +rosbuild_init() + +IF(NOT ROS_BUILD_TYPE) + SET(ROS_BUILD_TYPE Release) +ENDIF() + +MESSAGE("Build type: " ${ROS_BUILD_TYPE}) + +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -O3 -march=native ") +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -O3 -march=native") + +# Check C++11 or C++0x support +include(CheckCXXCompilerFlag) +CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) +CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) +if(COMPILER_SUPPORTS_CXX11) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") + add_definitions(-DCOMPILEDWITHC11) + message(STATUS "Using flag -std=c++11.") +elseif(COMPILER_SUPPORTS_CXX0X) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") + add_definitions(-DCOMPILEDWITHC0X) + message(STATUS "Using flag -std=c++0x.") +else() + message(FATAL_ERROR "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.") +endif() + +LIST(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/../../../cmake_modules) + +find_package(OpenCV 3.0 QUIET) +if(NOT OpenCV_FOUND) + find_package(OpenCV 2.4.3 QUIET) + if(NOT OpenCV_FOUND) + message(FATAL_ERROR "OpenCV > 2.4.3 not found.") + endif() +endif() + +find_package(Eigen3 3.1.0 REQUIRED) +find_package(Pangolin REQUIRED) + +include_directories( +${PROJECT_SOURCE_DIR} +${PROJECT_SOURCE_DIR}/../../../ +${PROJECT_SOURCE_DIR}/../../../include +${PROJECT_SOURCE_DIR}/../../../include/CameraModels +${Pangolin_INCLUDE_DIRS} +) + +set(LIBS +${OpenCV_LIBS} +${EIGEN3_LIBS} +${Pangolin_LIBRARIES} +${PROJECT_SOURCE_DIR}/../../../Thirdparty/DBoW2/lib/libDBoW2.so +${PROJECT_SOURCE_DIR}/../../../Thirdparty/g2o/lib/libg2o.so +${PROJECT_SOURCE_DIR}/../../../lib/libORB_SLAM3.so +-lboost_system +) + +# Node for monocular camera +rosbuild_add_executable(Mono +src/ros_mono.cc +) + +target_link_libraries(Mono +${LIBS} +) + +# Node for monocular camera (Augmented Reality Demo) +rosbuild_add_executable(MonoAR +src/AR/ros_mono_ar.cc +src/AR/ViewerAR.h +src/AR/ViewerAR.cc +) + +target_link_libraries(MonoAR +${LIBS} +) + +# Node for stereo camera +rosbuild_add_executable(Stereo +src/ros_stereo.cc +) + +target_link_libraries(Stereo +${LIBS} +) + +# Node for RGB-D camera +rosbuild_add_executable(RGBD +src/ros_rgbd.cc +) + +target_link_libraries(RGBD +${LIBS} +) + +# Node for monocular-inertial camera +rosbuild_add_executable(Mono_Inertial +src/ros_mono_inertial.cc +) + +target_link_libraries(Mono_Inertial +${LIBS} +) + +# Node for stereo-inertial camera +rosbuild_add_executable(Stereo_Inertial +src/ros_stereo_inertial.cc +) + +target_link_libraries(Stereo_Inertial +${LIBS} +) diff --git a/Examples/ROS/ORB_SLAM3/manifest.xml b/Examples/ROS/ORB_SLAM3/manifest.xml new file mode 100644 index 0000000000..79e6e6fce9 --- /dev/null +++ b/Examples/ROS/ORB_SLAM3/manifest.xml @@ -0,0 +1,15 @@ + + + ORB_SLAM3 + + Carlos Campos, Richard Elvira, Juan J. Gomez, Jose M.M. Montiel, Juan D. Tardos + GPLv3 + + + + + + + + + diff --git a/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.cc b/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.cc new file mode 100644 index 0000000000..dc9f18b7e3 --- /dev/null +++ b/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.cc @@ -0,0 +1,640 @@ +/** +* This file is part of ORB-SLAM3 +* +* Copyright (C) 2017-2021 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* +* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public +* License as published by the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even +* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License along with ORB-SLAM3. +* If not, see . +*/ + +#include "ViewerAR.h" + +#include + +#include +#include +#include + +using namespace std; + +namespace ORB_SLAM3 +{ + +const float eps = 1e-4; + +cv::Mat ExpSO3(const float &x, const float &y, const float &z) +{ + cv::Mat I = cv::Mat::eye(3,3,CV_32F); + const float d2 = x*x+y*y+z*z; + const float d = sqrt(d2); + cv::Mat W = (cv::Mat_(3,3) << 0, -z, y, + z, 0, -x, + -y, x, 0); + if(d(0),v.at(1),v.at(2)); +} + +ViewerAR::ViewerAR(){} + +void ViewerAR::Run() +{ + int w,h,wui; + + cv::Mat im, Tcw; + int status; + vector vKeys; + vector vMPs; + + while(1) + { + GetImagePose(im,Tcw,status,vKeys,vMPs); + if(im.empty()) + cv::waitKey(mT); + else + { + w = im.cols; + h = im.rows; + break; + } + } + + wui=200; + + pangolin::CreateWindowAndBind("Viewer",w+wui,h); + + glEnable(GL_DEPTH_TEST); + glEnable (GL_BLEND); + + pangolin::CreatePanel("menu").SetBounds(0.0,1.0,0.0,pangolin::Attach::Pix(wui)); + pangolin::Var menu_detectplane("menu.Insert Cube",false,false); + pangolin::Var menu_clear("menu.Clear All",false,false); + pangolin::Var menu_drawim("menu.Draw Image",true,true); + pangolin::Var menu_drawcube("menu.Draw Cube",true,true); + pangolin::Var menu_cubesize("menu. Cube Size",0.05,0.01,0.3); + pangolin::Var menu_drawgrid("menu.Draw Grid",true,true); + pangolin::Var menu_ngrid("menu. Grid Elements",3,1,10); + pangolin::Var menu_sizegrid("menu. Element Size",0.05,0.01,0.3); + pangolin::Var menu_drawpoints("menu.Draw Points",false,true); + + pangolin::Var menu_LocalizationMode("menu.Localization Mode",false,true); + bool bLocalizationMode = false; + + pangolin::View& d_image = pangolin::Display("image") + .SetBounds(0,1.0f,pangolin::Attach::Pix(wui),1.0f,(float)w/h) + .SetLock(pangolin::LockLeft, pangolin::LockTop); + + pangolin::GlTexture imageTexture(w,h,GL_RGB,false,0,GL_RGB,GL_UNSIGNED_BYTE); + + pangolin::OpenGlMatrixSpec P = pangolin::ProjectionMatrixRDF_TopLeft(w,h,fx,fy,cx,cy,0.001,1000); + + vector vpPlane; + + while(1) + { + + if(menu_LocalizationMode && !bLocalizationMode) + { + mpSystem->ActivateLocalizationMode(); + bLocalizationMode = true; + } + else if(!menu_LocalizationMode && bLocalizationMode) + { + mpSystem->DeactivateLocalizationMode(); + bLocalizationMode = false; + } + + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + + // Activate camera view + d_image.Activate(); + glColor3f(1.0,1.0,1.0); + + // Get last image and its computed pose from SLAM + GetImagePose(im,Tcw,status,vKeys,vMPs); + + // Add text to image + PrintStatus(status,bLocalizationMode,im); + + if(menu_drawpoints) + DrawTrackedPoints(vKeys,vMPs,im); + + // Draw image + if(menu_drawim) + DrawImageTexture(imageTexture,im); + + glClear(GL_DEPTH_BUFFER_BIT); + + // Load camera projection + glMatrixMode(GL_PROJECTION); + P.Load(); + + glMatrixMode(GL_MODELVIEW); + + // Load camera pose + LoadCameraPose(Tcw); + + // Draw virtual things + if(status==2) + { + if(menu_clear) + { + if(!vpPlane.empty()) + { + for(size_t i=0; iMapChanged()) + { + cout << "Map changed. All virtual elements are recomputed!" << endl; + bRecompute = true; + } + } + + for(size_t i=0; iRecompute(); + } + glPushMatrix(); + pPlane->glTpw.Multiply(); + + // Draw cube + if(menu_drawcube) + { + DrawCube(menu_cubesize); + } + + // Draw grid plane + if(menu_drawgrid) + { + DrawPlane(menu_ngrid,menu_sizegrid); + } + + glPopMatrix(); + } + } + } + + + } + + pangolin::FinishFrame(); + usleep(mT*1000); + } + +} + +void ViewerAR::SetImagePose(const cv::Mat &im, const cv::Mat &Tcw, const int &status, const vector &vKeys, const vector &vMPs) +{ + unique_lock lock(mMutexPoseImage); + mImage = im.clone(); + mTcw = Tcw.clone(); + mStatus = status; + mvKeys = vKeys; + mvMPs = vMPs; +} + +void ViewerAR::GetImagePose(cv::Mat &im, cv::Mat &Tcw, int &status, std::vector &vKeys, std::vector &vMPs) +{ + unique_lock lock(mMutexPoseImage); + im = mImage.clone(); + Tcw = mTcw.clone(); + status = mStatus; + vKeys = mvKeys; + vMPs = mvMPs; +} + +void ViewerAR::LoadCameraPose(const cv::Mat &Tcw) +{ + if(!Tcw.empty()) + { + pangolin::OpenGlMatrix M; + + M.m[0] = Tcw.at(0,0); + M.m[1] = Tcw.at(1,0); + M.m[2] = Tcw.at(2,0); + M.m[3] = 0.0; + + M.m[4] = Tcw.at(0,1); + M.m[5] = Tcw.at(1,1); + M.m[6] = Tcw.at(2,1); + M.m[7] = 0.0; + + M.m[8] = Tcw.at(0,2); + M.m[9] = Tcw.at(1,2); + M.m[10] = Tcw.at(2,2); + M.m[11] = 0.0; + + M.m[12] = Tcw.at(0,3); + M.m[13] = Tcw.at(1,3); + M.m[14] = Tcw.at(2,3); + M.m[15] = 1.0; + + M.Load(); + } +} + +void ViewerAR::PrintStatus(const int &status, const bool &bLocMode, cv::Mat &im) +{ + if(!bLocMode) + { + switch(status) + { + case 1: {AddTextToImage("SLAM NOT INITIALIZED",im,255,0,0); break;} + case 2: {AddTextToImage("SLAM ON",im,0,255,0); break;} + case 3: {AddTextToImage("SLAM LOST",im,255,0,0); break;} + } + } + else + { + switch(status) + { + case 1: {AddTextToImage("SLAM NOT INITIALIZED",im,255,0,0); break;} + case 2: {AddTextToImage("LOCALIZATION ON",im,0,255,0); break;} + case 3: {AddTextToImage("LOCALIZATION LOST",im,255,0,0); break;} + } + } +} + +void ViewerAR::AddTextToImage(const string &s, cv::Mat &im, const int r, const int g, const int b) +{ + int l = 10; + //imText.rowRange(im.rows-imText.rows,imText.rows) = cv::Mat::zeros(textSize.height+10,im.cols,im.type()); + cv::putText(im,s,cv::Point(l,im.rows-l),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8); + cv::putText(im,s,cv::Point(l-1,im.rows-l),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8); + cv::putText(im,s,cv::Point(l+1,im.rows-l),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8); + cv::putText(im,s,cv::Point(l-1,im.rows-(l-1)),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8); + cv::putText(im,s,cv::Point(l,im.rows-(l-1)),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8); + cv::putText(im,s,cv::Point(l+1,im.rows-(l-1)),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8); + cv::putText(im,s,cv::Point(l-1,im.rows-(l+1)),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8); + cv::putText(im,s,cv::Point(l,im.rows-(l+1)),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8); + cv::putText(im,s,cv::Point(l+1,im.rows-(l+1)),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8); + + cv::putText(im,s,cv::Point(l,im.rows-l),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(r,g,b),2,8); +} + +void ViewerAR::DrawImageTexture(pangolin::GlTexture &imageTexture, cv::Mat &im) +{ + if(!im.empty()) + { + imageTexture.Upload(im.data,GL_RGB,GL_UNSIGNED_BYTE); + imageTexture.RenderToViewportFlipY(); + } +} + +void ViewerAR::DrawCube(const float &size,const float x, const float y, const float z) +{ + pangolin::OpenGlMatrix M = pangolin::OpenGlMatrix::Translate(-x,-size-y,-z); + glPushMatrix(); + M.Multiply(); + pangolin::glDrawColouredCube(-size,size); + glPopMatrix(); +} + +void ViewerAR::DrawPlane(Plane *pPlane, int ndivs, float ndivsize) +{ + glPushMatrix(); + pPlane->glTpw.Multiply(); + DrawPlane(ndivs,ndivsize); + glPopMatrix(); +} + +void ViewerAR::DrawPlane(int ndivs, float ndivsize) +{ + // Plane parallel to x-z at origin with normal -y + const float minx = -ndivs*ndivsize; + const float minz = -ndivs*ndivsize; + const float maxx = ndivs*ndivsize; + const float maxz = ndivs*ndivsize; + + + glLineWidth(2); + glColor3f(0.7f,0.7f,1.0f); + glBegin(GL_LINES); + + for(int n = 0; n<=2*ndivs; n++) + { + glVertex3f(minx+ndivsize*n,0,minz); + glVertex3f(minx+ndivsize*n,0,maxz); + glVertex3f(minx,0,minz+ndivsize*n); + glVertex3f(maxx,0,minz+ndivsize*n); + } + + glEnd(); + +} + +void ViewerAR::DrawTrackedPoints(const std::vector &vKeys, const std::vector &vMPs, cv::Mat &im) +{ + const int N = vKeys.size(); + + + for(int i=0; i &vMPs, const int iterations) +{ + // Retrieve 3D points + vector vPoints; + vPoints.reserve(vMPs.size()); + vector vPointMP; + vPointMP.reserve(vMPs.size()); + + for(size_t i=0; iObservations()>5) + { + vPoints.push_back(pMP->GetWorldPos()); + vPointMP.push_back(pMP); + } + } + } + + const int N = vPoints.size(); + + if(N<50) + return NULL; + + + // Indices for minimum set selection + vector vAllIndices; + vAllIndices.reserve(N); + vector vAvailableIndices; + + for(int i=0; i bestvDist; + + //RANSAC + for(int n=0; n(3,0); + const float b = vt.at(3,1); + const float c = vt.at(3,2); + const float d = vt.at(3,3); + + vector vDistances(N,0); + + const float f = 1.0f/sqrt(a*a+b*b+c*c+d*d); + + for(int i=0; i(0)*a+vPoints[i].at(1)*b+vPoints[i].at(2)*c+d)*f; + } + + vector vSorted = vDistances; + sort(vSorted.begin(),vSorted.end()); + + int nth = max((int)(0.2*N),20); + const float medianDist = vSorted[nth]; + + if(medianDist vbInliers(N,false); + int nInliers = 0; + for(int i=0; i vInlierMPs(nInliers,NULL); + int nin = 0; + for(int i=0; i &vMPs, const cv::Mat &Tcw):mvMPs(vMPs),mTcw(Tcw.clone()) +{ + rang = -3.14f/2+((float)rand()/RAND_MAX)*3.14f; + Recompute(); +} + +void Plane::Recompute() +{ + const int N = mvMPs.size(); + + // Recompute plane with all points + cv::Mat A = cv::Mat(N,4,CV_32F); + A.col(3) = cv::Mat::ones(N,1,CV_32F); + + o = cv::Mat::zeros(3,1,CV_32F); + + int nPoints = 0; + for(int i=0; iisBad()) + { + cv::Mat Xw = pMP->GetWorldPos(); + o+=Xw; + A.row(nPoints).colRange(0,3) = Xw.t(); + nPoints++; + } + } + A.resize(nPoints); + + cv::Mat u,w,vt; + cv::SVDecomp(A,w,u,vt,cv::SVD::MODIFY_A | cv::SVD::FULL_UV); + + float a = vt.at(3,0); + float b = vt.at(3,1); + float c = vt.at(3,2); + + o = o*(1.0f/nPoints); + const float f = 1.0f/sqrt(a*a+b*b+c*c); + + // Compute XC just the first time + if(XC.empty()) + { + cv::Mat Oc = -mTcw.colRange(0,3).rowRange(0,3).t()*mTcw.rowRange(0,3).col(3); + XC = Oc-o; + } + + if((XC.at(0)*a+XC.at(1)*b+XC.at(2)*c)>0) + { + a=-a; + b=-b; + c=-c; + } + + const float nx = a*f; + const float ny = b*f; + const float nz = c*f; + + n = (cv::Mat_(3,1)<(3,1) << 0.0f, 1.0f, 0.0f); + + cv::Mat v = up.cross(n); + const float sa = cv::norm(v); + const float ca = up.dot(n); + const float ang = atan2(sa,ca); + Tpw = cv::Mat::eye(4,4,CV_32F); + + + Tpw.rowRange(0,3).colRange(0,3) = ExpSO3(v*ang/sa)*ExpSO3(up*rang); + o.copyTo(Tpw.col(3).rowRange(0,3)); + + glTpw.m[0] = Tpw.at(0,0); + glTpw.m[1] = Tpw.at(1,0); + glTpw.m[2] = Tpw.at(2,0); + glTpw.m[3] = 0.0; + + glTpw.m[4] = Tpw.at(0,1); + glTpw.m[5] = Tpw.at(1,1); + glTpw.m[6] = Tpw.at(2,1); + glTpw.m[7] = 0.0; + + glTpw.m[8] = Tpw.at(0,2); + glTpw.m[9] = Tpw.at(1,2); + glTpw.m[10] = Tpw.at(2,2); + glTpw.m[11] = 0.0; + + glTpw.m[12] = Tpw.at(0,3); + glTpw.m[13] = Tpw.at(1,3); + glTpw.m[14] = Tpw.at(2,3); + glTpw.m[15] = 1.0; + +} + +Plane::Plane(const float &nx, const float &ny, const float &nz, const float &ox, const float &oy, const float &oz) +{ + n = (cv::Mat_(3,1)<(3,1)<(3,1) << 0.0f, 1.0f, 0.0f); + + cv::Mat v = up.cross(n); + const float s = cv::norm(v); + const float c = up.dot(n); + const float a = atan2(s,c); + Tpw = cv::Mat::eye(4,4,CV_32F); + const float rang = -3.14f/2+((float)rand()/RAND_MAX)*3.14f; + cout << rang; + Tpw.rowRange(0,3).colRange(0,3) = ExpSO3(v*a/s)*ExpSO3(up*rang); + o.copyTo(Tpw.col(3).rowRange(0,3)); + + glTpw.m[0] = Tpw.at(0,0); + glTpw.m[1] = Tpw.at(1,0); + glTpw.m[2] = Tpw.at(2,0); + glTpw.m[3] = 0.0; + + glTpw.m[4] = Tpw.at(0,1); + glTpw.m[5] = Tpw.at(1,1); + glTpw.m[6] = Tpw.at(2,1); + glTpw.m[7] = 0.0; + + glTpw.m[8] = Tpw.at(0,2); + glTpw.m[9] = Tpw.at(1,2); + glTpw.m[10] = Tpw.at(2,2); + glTpw.m[11] = 0.0; + + glTpw.m[12] = Tpw.at(0,3); + glTpw.m[13] = Tpw.at(1,3); + glTpw.m[14] = Tpw.at(2,3); + glTpw.m[15] = 1.0; +} + +} diff --git a/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.h b/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.h new file mode 100644 index 0000000000..ef4c578d46 --- /dev/null +++ b/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.h @@ -0,0 +1,118 @@ +/** +* This file is part of ORB-SLAM3 +* +* Copyright (C) 2017-2021 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* +* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public +* License as published by the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even +* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License along with ORB-SLAM3. +* If not, see . +*/ + + +#ifndef VIEWERAR_H +#define VIEWERAR_H + +#include +#include +#include +#include +#include"../../../include/System.h" + +namespace ORB_SLAM3 +{ + +class Plane +{ +public: + Plane(const std::vector &vMPs, const cv::Mat &Tcw); + Plane(const float &nx, const float &ny, const float &nz, const float &ox, const float &oy, const float &oz); + + void Recompute(); + + //normal + cv::Mat n; + //origin + cv::Mat o; + //arbitrary orientation along normal + float rang; + //transformation from world to the plane + cv::Mat Tpw; + pangolin::OpenGlMatrix glTpw; + //MapPoints that define the plane + std::vector mvMPs; + //camera pose when the plane was first observed (to compute normal direction) + cv::Mat mTcw, XC; +}; + +class ViewerAR +{ +public: + ViewerAR(); + + void SetFPS(const float fps){ + mFPS = fps; + mT=1e3/fps; + } + + void SetSLAM(ORB_SLAM3::System* pSystem){ + mpSystem = pSystem; + } + + // Main thread function. + void Run(); + + void SetCameraCalibration(const float &fx_, const float &fy_, const float &cx_, const float &cy_){ + fx = fx_; fy = fy_; cx = cx_; cy = cy_; + } + + void SetImagePose(const cv::Mat &im, const cv::Mat &Tcw, const int &status, + const std::vector &vKeys, const std::vector &vMPs); + + void GetImagePose(cv::Mat &im, cv::Mat &Tcw, int &status, + std::vector &vKeys, std::vector &vMPs); + +private: + + //SLAM + ORB_SLAM3::System* mpSystem; + + void PrintStatus(const int &status, const bool &bLocMode, cv::Mat &im); + void AddTextToImage(const std::string &s, cv::Mat &im, const int r=0, const int g=0, const int b=0); + void LoadCameraPose(const cv::Mat &Tcw); + void DrawImageTexture(pangolin::GlTexture &imageTexture, cv::Mat &im); + void DrawCube(const float &size, const float x=0, const float y=0, const float z=0); + void DrawPlane(int ndivs, float ndivsize); + void DrawPlane(Plane* pPlane, int ndivs, float ndivsize); + void DrawTrackedPoints(const std::vector &vKeys, const std::vector &vMPs, cv::Mat &im); + + Plane* DetectPlane(const cv::Mat Tcw, const std::vector &vMPs, const int iterations=50); + + // frame rate + float mFPS, mT; + float fx,fy,cx,cy; + + // Last processed image and computed pose by the SLAM + std::mutex mMutexPoseImage; + cv::Mat mTcw; + cv::Mat mImage; + int mStatus; + std::vector mvKeys; + std::vector mvMPs; + +}; + + +} + + +#endif // VIEWERAR_H + + diff --git a/Examples/ROS/ORB_SLAM3/src/AR/ros_mono_ar.cc b/Examples/ROS/ORB_SLAM3/src/AR/ros_mono_ar.cc new file mode 100644 index 0000000000..e26fbca66a --- /dev/null +++ b/Examples/ROS/ORB_SLAM3/src/AR/ros_mono_ar.cc @@ -0,0 +1,167 @@ +/** +* This file is part of ORB-SLAM3 +* +* Copyright (C) 2017-2021 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* +* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public +* License as published by the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even +* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License along with ORB-SLAM3. +* If not, see . +*/ + + +#include +#include +#include +#include + +#include +#include + +#include +#include + +#include"../../../include/System.h" + +#include"ViewerAR.h" + +using namespace std; + + +ORB_SLAM3::ViewerAR viewerAR; +bool bRGB = true; + +cv::Mat K; +cv::Mat DistCoef; + + +class ImageGrabber +{ +public: + ImageGrabber(ORB_SLAM3::System* pSLAM):mpSLAM(pSLAM){} + + void GrabImage(const sensor_msgs::ImageConstPtr& msg); + + ORB_SLAM3::System* mpSLAM; +}; + +int main(int argc, char **argv) +{ + ros::init(argc, argv, "Mono"); + ros::start(); + + if(argc != 3) + { + cerr << endl << "Usage: rosrun ORB_SLAM3 Mono path_to_vocabulary path_to_settings" << endl; + ros::shutdown(); + return 1; + } + + // Create SLAM system. It initializes all system threads and gets ready to process frames. + ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::MONOCULAR,false); + + + cout << endl << endl; + cout << "-----------------------" << endl; + cout << "Augmented Reality Demo" << endl; + cout << "1) Translate the camera to initialize SLAM." << endl; + cout << "2) Look at a planar region and translate the camera." << endl; + cout << "3) Press Insert Cube to place a virtual cube in the plane. " << endl; + cout << endl; + cout << "You can place several cubes in different planes." << endl; + cout << "-----------------------" << endl; + cout << endl; + + + viewerAR.SetSLAM(&SLAM); + + ImageGrabber igb(&SLAM); + + ros::NodeHandle nodeHandler; + ros::Subscriber sub = nodeHandler.subscribe("/camera/image_raw", 1, &ImageGrabber::GrabImage,&igb); + + + cv::FileStorage fSettings(argv[2], cv::FileStorage::READ); + bRGB = static_cast((int)fSettings["Camera.RGB"]); + float fps = fSettings["Camera.fps"]; + viewerAR.SetFPS(fps); + + float fx = fSettings["Camera.fx"]; + float fy = fSettings["Camera.fy"]; + float cx = fSettings["Camera.cx"]; + float cy = fSettings["Camera.cy"]; + + viewerAR.SetCameraCalibration(fx,fy,cx,cy); + + K = cv::Mat::eye(3,3,CV_32F); + K.at(0,0) = fx; + K.at(1,1) = fy; + K.at(0,2) = cx; + K.at(1,2) = cy; + + DistCoef = cv::Mat::zeros(4,1,CV_32F); + DistCoef.at(0) = fSettings["Camera.k1"]; + DistCoef.at(1) = fSettings["Camera.k2"]; + DistCoef.at(2) = fSettings["Camera.p1"]; + DistCoef.at(3) = fSettings["Camera.p2"]; + const float k3 = fSettings["Camera.k3"]; + if(k3!=0) + { + DistCoef.resize(5); + DistCoef.at(4) = k3; + } + + thread tViewer = thread(&ORB_SLAM3::ViewerAR::Run,&viewerAR); + + ros::spin(); + + // Stop all threads + SLAM.Shutdown(); + + // Save camera trajectory + SLAM.SaveKeyFrameTrajectoryTUM("KeyFrameTrajectory.txt"); + + ros::shutdown(); + + return 0; +} + +void ImageGrabber::GrabImage(const sensor_msgs::ImageConstPtr& msg) +{ + // Copy the ros image message to cv::Mat. + cv_bridge::CvImageConstPtr cv_ptr; + try + { + cv_ptr = cv_bridge::toCvShare(msg); + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("cv_bridge exception: %s", e.what()); + return; + } + cv::Mat im = cv_ptr->image.clone(); + cv::Mat imu; + cv::Mat Tcw = mpSLAM->TrackMonocular(cv_ptr->image,cv_ptr->header.stamp.toSec()); + int state = mpSLAM->GetTrackingState(); + vector vMPs = mpSLAM->GetTrackedMapPoints(); + vector vKeys = mpSLAM->GetTrackedKeyPointsUn(); + + cv::undistort(im,imu,K,DistCoef); + + if(bRGB) + viewerAR.SetImagePose(imu,Tcw,state,vKeys,vMPs); + else + { + cv::cvtColor(imu,imu,CV_RGB2BGR); + viewerAR.SetImagePose(imu,Tcw,state,vKeys,vMPs); + } +} + + diff --git a/Examples/ROS/ORB_SLAM3/src/ros_mono.cc b/Examples/ROS/ORB_SLAM3/src/ros_mono.cc new file mode 100644 index 0000000000..7e1adb3009 --- /dev/null +++ b/Examples/ROS/ORB_SLAM3/src/ros_mono.cc @@ -0,0 +1,94 @@ +/** +* This file is part of ORB-SLAM3 +* +* Copyright (C) 2017-2021 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* +* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public +* License as published by the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even +* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License along with ORB-SLAM3. +* If not, see . +*/ + + +#include +#include +#include +#include + +#include +#include + +#include + +#include"../../../include/System.h" + +using namespace std; + +class ImageGrabber +{ +public: + ImageGrabber(ORB_SLAM3::System* pSLAM):mpSLAM(pSLAM){} + + void GrabImage(const sensor_msgs::ImageConstPtr& msg); + + ORB_SLAM3::System* mpSLAM; +}; + +int main(int argc, char **argv) +{ + ros::init(argc, argv, "Mono"); + ros::start(); + + if(argc != 3) + { + cerr << endl << "Usage: rosrun ORB_SLAM3 Mono path_to_vocabulary path_to_settings" << endl; + ros::shutdown(); + return 1; + } + + // Create SLAM system. It initializes all system threads and gets ready to process frames. + ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::MONOCULAR,true); + + ImageGrabber igb(&SLAM); + + ros::NodeHandle nodeHandler; + ros::Subscriber sub = nodeHandler.subscribe("/camera/image_raw", 1, &ImageGrabber::GrabImage,&igb); + + ros::spin(); + + // Stop all threads + SLAM.Shutdown(); + + // Save camera trajectory + SLAM.SaveKeyFrameTrajectoryTUM("KeyFrameTrajectory.txt"); + + ros::shutdown(); + + return 0; +} + +void ImageGrabber::GrabImage(const sensor_msgs::ImageConstPtr& msg) +{ + // Copy the ros image message to cv::Mat. + cv_bridge::CvImageConstPtr cv_ptr; + try + { + cv_ptr = cv_bridge::toCvShare(msg); + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("cv_bridge exception: %s", e.what()); + return; + } + + mpSLAM->TrackMonocular(cv_ptr->image,cv_ptr->header.stamp.toSec()); +} + + diff --git a/Examples/ROS/ORB_SLAM3/src/ros_mono_inertial.cc b/Examples/ROS/ORB_SLAM3/src/ros_mono_inertial.cc new file mode 100644 index 0000000000..fa3630285c --- /dev/null +++ b/Examples/ROS/ORB_SLAM3/src/ros_mono_inertial.cc @@ -0,0 +1,194 @@ +/** +* This file is part of ORB-SLAM3 +* +* Copyright (C) 2017-2021 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* +* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public +* License as published by the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even +* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License along with ORB-SLAM3. +* If not, see . +*/ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include + +#include"../../../include/System.h" +#include"../include/ImuTypes.h" + +using namespace std; + +class ImuGrabber +{ +public: + ImuGrabber(){}; + void GrabImu(const sensor_msgs::ImuConstPtr &imu_msg); + + queue imuBuf; + std::mutex mBufMutex; +}; + +class ImageGrabber +{ +public: + ImageGrabber(ORB_SLAM3::System* pSLAM, ImuGrabber *pImuGb, const bool bClahe): mpSLAM(pSLAM), mpImuGb(pImuGb), mbClahe(bClahe){} + + void GrabImage(const sensor_msgs::ImageConstPtr& msg); + cv::Mat GetImage(const sensor_msgs::ImageConstPtr &img_msg); + void SyncWithImu(); + + queue img0Buf; + std::mutex mBufMutex; + + ORB_SLAM3::System* mpSLAM; + ImuGrabber *mpImuGb; + + const bool mbClahe; + cv::Ptr mClahe = cv::createCLAHE(3.0, cv::Size(8, 8)); +}; + + + +int main(int argc, char **argv) +{ + ros::init(argc, argv, "Mono_Inertial"); + ros::NodeHandle n("~"); + ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, ros::console::levels::Info); + bool bEqual = false; + if(argc < 3 || argc > 4) + { + cerr << endl << "Usage: rosrun ORB_SLAM3 Mono_Inertial path_to_vocabulary path_to_settings [do_equalize]" << endl; + ros::shutdown(); + return 1; + } + + + if(argc==4) + { + std::string sbEqual(argv[3]); + if(sbEqual == "true") + bEqual = true; + } + + // Create SLAM system. It initializes all system threads and gets ready to process frames. + ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::IMU_MONOCULAR,true); + + ImuGrabber imugb; + ImageGrabber igb(&SLAM,&imugb,bEqual); // TODO + + // Maximum delay, 5 seconds + ros::Subscriber sub_imu = n.subscribe("/imu", 1000, &ImuGrabber::GrabImu, &imugb); + ros::Subscriber sub_img0 = n.subscribe("/camera/image_raw", 100, &ImageGrabber::GrabImage,&igb); + + std::thread sync_thread(&ImageGrabber::SyncWithImu,&igb); + + ros::spin(); + + return 0; +} + +void ImageGrabber::GrabImage(const sensor_msgs::ImageConstPtr &img_msg) +{ + mBufMutex.lock(); + if (!img0Buf.empty()) + img0Buf.pop(); + img0Buf.push(img_msg); + mBufMutex.unlock(); +} + +cv::Mat ImageGrabber::GetImage(const sensor_msgs::ImageConstPtr &img_msg) +{ + // Copy the ros image message to cv::Mat. + cv_bridge::CvImageConstPtr cv_ptr; + try + { + cv_ptr = cv_bridge::toCvShare(img_msg, sensor_msgs::image_encodings::MONO8); + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("cv_bridge exception: %s", e.what()); + } + + if(cv_ptr->image.type()==0) + { + return cv_ptr->image.clone(); + } + else + { + std::cout << "Error type" << std::endl; + return cv_ptr->image.clone(); + } +} + +void ImageGrabber::SyncWithImu() +{ + while(1) + { + cv::Mat im; + double tIm = 0; + if (!img0Buf.empty()&&!mpImuGb->imuBuf.empty()) + { + tIm = img0Buf.front()->header.stamp.toSec(); + if(tIm>mpImuGb->imuBuf.back()->header.stamp.toSec()) + continue; + { + this->mBufMutex.lock(); + im = GetImage(img0Buf.front()); + img0Buf.pop(); + this->mBufMutex.unlock(); + } + + vector vImuMeas; + mpImuGb->mBufMutex.lock(); + if(!mpImuGb->imuBuf.empty()) + { + // Load imu measurements from buffer + vImuMeas.clear(); + while(!mpImuGb->imuBuf.empty() && mpImuGb->imuBuf.front()->header.stamp.toSec()<=tIm) + { + double t = mpImuGb->imuBuf.front()->header.stamp.toSec(); + cv::Point3f acc(mpImuGb->imuBuf.front()->linear_acceleration.x, mpImuGb->imuBuf.front()->linear_acceleration.y, mpImuGb->imuBuf.front()->linear_acceleration.z); + cv::Point3f gyr(mpImuGb->imuBuf.front()->angular_velocity.x, mpImuGb->imuBuf.front()->angular_velocity.y, mpImuGb->imuBuf.front()->angular_velocity.z); + vImuMeas.push_back(ORB_SLAM3::IMU::Point(acc,gyr,t)); + mpImuGb->imuBuf.pop(); + } + } + mpImuGb->mBufMutex.unlock(); + if(mbClahe) + mClahe->apply(im,im); + + mpSLAM->TrackMonocular(im,tIm,vImuMeas); + } + + std::chrono::milliseconds tSleep(1); + std::this_thread::sleep_for(tSleep); + } +} + +void ImuGrabber::GrabImu(const sensor_msgs::ImuConstPtr &imu_msg) +{ + mBufMutex.lock(); + imuBuf.push(imu_msg); + mBufMutex.unlock(); + return; +} + + diff --git a/Examples/ROS/ORB_SLAM3/src/ros_rgbd.cc b/Examples/ROS/ORB_SLAM3/src/ros_rgbd.cc new file mode 100644 index 0000000000..faf6131541 --- /dev/null +++ b/Examples/ROS/ORB_SLAM3/src/ros_rgbd.cc @@ -0,0 +1,112 @@ +/** +* This file is part of ORB-SLAM3 +* +* Copyright (C) 2017-2021 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* +* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public +* License as published by the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even +* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License along with ORB-SLAM3. +* If not, see . +*/ + + +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include + +#include"../../../include/System.h" + +using namespace std; + +class ImageGrabber +{ +public: + ImageGrabber(ORB_SLAM3::System* pSLAM):mpSLAM(pSLAM){} + + void GrabRGBD(const sensor_msgs::ImageConstPtr& msgRGB,const sensor_msgs::ImageConstPtr& msgD); + + ORB_SLAM3::System* mpSLAM; +}; + +int main(int argc, char **argv) +{ + ros::init(argc, argv, "RGBD"); + ros::start(); + + if(argc != 3) + { + cerr << endl << "Usage: rosrun ORB_SLAM3 RGBD path_to_vocabulary path_to_settings" << endl; + ros::shutdown(); + return 1; + } + + // Create SLAM system. It initializes all system threads and gets ready to process frames. + ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::RGBD,true); + + ImageGrabber igb(&SLAM); + + ros::NodeHandle nh; + + message_filters::Subscriber rgb_sub(nh, "/camera/rgb/image_raw", 100); + message_filters::Subscriber depth_sub(nh, "camera/depth_registered/image_raw", 100); + typedef message_filters::sync_policies::ApproximateTime sync_pol; + message_filters::Synchronizer sync(sync_pol(10), rgb_sub,depth_sub); + sync.registerCallback(boost::bind(&ImageGrabber::GrabRGBD,&igb,_1,_2)); + + ros::spin(); + + // Stop all threads + SLAM.Shutdown(); + + // Save camera trajectory + SLAM.SaveKeyFrameTrajectoryTUM("KeyFrameTrajectory.txt"); + + ros::shutdown(); + + return 0; +} + +void ImageGrabber::GrabRGBD(const sensor_msgs::ImageConstPtr& msgRGB,const sensor_msgs::ImageConstPtr& msgD) +{ + // Copy the ros image message to cv::Mat. + cv_bridge::CvImageConstPtr cv_ptrRGB; + try + { + cv_ptrRGB = cv_bridge::toCvShare(msgRGB); + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("cv_bridge exception: %s", e.what()); + return; + } + + cv_bridge::CvImageConstPtr cv_ptrD; + try + { + cv_ptrD = cv_bridge::toCvShare(msgD); + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("cv_bridge exception: %s", e.what()); + return; + } + mpSLAM->TrackRGBD(cv_ptrRGB->image,cv_ptrD->image,cv_ptrRGB->header.stamp.toSec()); +} + + diff --git a/Examples/ROS/ORB_SLAM3/src/ros_stereo.cc b/Examples/ROS/ORB_SLAM3/src/ros_stereo.cc new file mode 100644 index 0000000000..030fdb6af8 --- /dev/null +++ b/Examples/ROS/ORB_SLAM3/src/ros_stereo.cc @@ -0,0 +1,170 @@ +/** +* This file is part of ORB-SLAM3 +* +* Copyright (C) 2017-2021 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* +* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public +* License as published by the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even +* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License along with ORB-SLAM3. +* If not, see . +*/ + + +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include + +#include"../../../include/System.h" + +using namespace std; + +class ImageGrabber +{ +public: + ImageGrabber(ORB_SLAM3::System* pSLAM):mpSLAM(pSLAM){} + + void GrabStereo(const sensor_msgs::ImageConstPtr& msgLeft,const sensor_msgs::ImageConstPtr& msgRight); + + ORB_SLAM3::System* mpSLAM; + bool do_rectify; + cv::Mat M1l,M2l,M1r,M2r; +}; + +int main(int argc, char **argv) +{ + ros::init(argc, argv, "RGBD"); + ros::start(); + + if(argc != 4) + { + cerr << endl << "Usage: rosrun ORB_SLAM3 Stereo path_to_vocabulary path_to_settings do_rectify" << endl; + ros::shutdown(); + return 1; + } + + // Create SLAM system. It initializes all system threads and gets ready to process frames. + ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::STEREO,true); + + ImageGrabber igb(&SLAM); + + stringstream ss(argv[3]); + ss >> boolalpha >> igb.do_rectify; + + if(igb.do_rectify) + { + // Load settings related to stereo calibration + cv::FileStorage fsSettings(argv[2], cv::FileStorage::READ); + if(!fsSettings.isOpened()) + { + cerr << "ERROR: Wrong path to settings" << endl; + return -1; + } + + cv::Mat K_l, K_r, P_l, P_r, R_l, R_r, D_l, D_r; + fsSettings["LEFT.K"] >> K_l; + fsSettings["RIGHT.K"] >> K_r; + + fsSettings["LEFT.P"] >> P_l; + fsSettings["RIGHT.P"] >> P_r; + + fsSettings["LEFT.R"] >> R_l; + fsSettings["RIGHT.R"] >> R_r; + + fsSettings["LEFT.D"] >> D_l; + fsSettings["RIGHT.D"] >> D_r; + + int rows_l = fsSettings["LEFT.height"]; + int cols_l = fsSettings["LEFT.width"]; + int rows_r = fsSettings["RIGHT.height"]; + int cols_r = fsSettings["RIGHT.width"]; + + if(K_l.empty() || K_r.empty() || P_l.empty() || P_r.empty() || R_l.empty() || R_r.empty() || D_l.empty() || D_r.empty() || + rows_l==0 || rows_r==0 || cols_l==0 || cols_r==0) + { + cerr << "ERROR: Calibration parameters to rectify stereo are missing!" << endl; + return -1; + } + + cv::initUndistortRectifyMap(K_l,D_l,R_l,P_l.rowRange(0,3).colRange(0,3),cv::Size(cols_l,rows_l),CV_32F,igb.M1l,igb.M2l); + cv::initUndistortRectifyMap(K_r,D_r,R_r,P_r.rowRange(0,3).colRange(0,3),cv::Size(cols_r,rows_r),CV_32F,igb.M1r,igb.M2r); + } + + ros::NodeHandle nh; + + message_filters::Subscriber left_sub(nh, "/camera/left/image_raw", 1); + message_filters::Subscriber right_sub(nh, "/camera/right/image_raw", 1); + typedef message_filters::sync_policies::ApproximateTime sync_pol; + message_filters::Synchronizer sync(sync_pol(10), left_sub,right_sub); + sync.registerCallback(boost::bind(&ImageGrabber::GrabStereo,&igb,_1,_2)); + + ros::spin(); + + // Stop all threads + SLAM.Shutdown(); + + // Save camera trajectory + SLAM.SaveKeyFrameTrajectoryTUM("KeyFrameTrajectory_TUM_Format.txt"); + SLAM.SaveTrajectoryTUM("FrameTrajectory_TUM_Format.txt"); + SLAM.SaveTrajectoryKITTI("FrameTrajectory_KITTI_Format.txt"); + + ros::shutdown(); + + return 0; +} + +void ImageGrabber::GrabStereo(const sensor_msgs::ImageConstPtr& msgLeft,const sensor_msgs::ImageConstPtr& msgRight) +{ + // Copy the ros image message to cv::Mat. + cv_bridge::CvImageConstPtr cv_ptrLeft; + try + { + cv_ptrLeft = cv_bridge::toCvShare(msgLeft); + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("cv_bridge exception: %s", e.what()); + return; + } + + cv_bridge::CvImageConstPtr cv_ptrRight; + try + { + cv_ptrRight = cv_bridge::toCvShare(msgRight); + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("cv_bridge exception: %s", e.what()); + return; + } + + if(do_rectify) + { + cv::Mat imLeft, imRight; + cv::remap(cv_ptrLeft->image,imLeft,M1l,M2l,cv::INTER_LINEAR); + cv::remap(cv_ptrRight->image,imRight,M1r,M2r,cv::INTER_LINEAR); + mpSLAM->TrackStereo(imLeft,imRight,cv_ptrLeft->header.stamp.toSec()); + } + else + { + mpSLAM->TrackStereo(cv_ptrLeft->image,cv_ptrRight->image,cv_ptrLeft->header.stamp.toSec()); + } + +} + + diff --git a/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc b/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc new file mode 100644 index 0000000000..231ad72c07 --- /dev/null +++ b/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc @@ -0,0 +1,286 @@ +/** +* This file is part of ORB-SLAM3 +* +* Copyright (C) 2017-2021 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza. +* +* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public +* License as published by the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even +* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License along with ORB-SLAM3. +* If not, see . +*/ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include + +#include"../../../include/System.h" +#include"../include/ImuTypes.h" + +using namespace std; + +class ImuGrabber +{ +public: + ImuGrabber(){}; + void GrabImu(const sensor_msgs::ImuConstPtr &imu_msg); + + queue imuBuf; + std::mutex mBufMutex; +}; + +class ImageGrabber +{ +public: + ImageGrabber(ORB_SLAM3::System* pSLAM, ImuGrabber *pImuGb, const bool bRect, const bool bClahe): mpSLAM(pSLAM), mpImuGb(pImuGb), do_rectify(bRect), mbClahe(bClahe){} + + void GrabImageLeft(const sensor_msgs::ImageConstPtr& msg); + void GrabImageRight(const sensor_msgs::ImageConstPtr& msg); + cv::Mat GetImage(const sensor_msgs::ImageConstPtr &img_msg); + void SyncWithImu(); + + queue imgLeftBuf, imgRightBuf; + std::mutex mBufMutexLeft,mBufMutexRight; + + ORB_SLAM3::System* mpSLAM; + ImuGrabber *mpImuGb; + + const bool do_rectify; + cv::Mat M1l,M2l,M1r,M2r; + + const bool mbClahe; + cv::Ptr mClahe = cv::createCLAHE(3.0, cv::Size(8, 8)); +}; + + + +int main(int argc, char **argv) +{ + ros::init(argc, argv, "Stereo_Inertial"); + ros::NodeHandle n("~"); + ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, ros::console::levels::Info); + bool bEqual = false; + if(argc < 4 || argc > 5) + { + cerr << endl << "Usage: rosrun ORB_SLAM3 Stereo_Inertial path_to_vocabulary path_to_settings do_rectify [do_equalize]" << endl; + ros::shutdown(); + return 1; + } + + std::string sbRect(argv[3]); + if(argc==5) + { + std::string sbEqual(argv[4]); + if(sbEqual == "true") + bEqual = true; + } + + // Create SLAM system. It initializes all system threads and gets ready to process frames. + ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::IMU_STEREO,true); + + ImuGrabber imugb; + ImageGrabber igb(&SLAM,&imugb,sbRect == "true",bEqual); + + if(igb.do_rectify) + { + // Load settings related to stereo calibration + cv::FileStorage fsSettings(argv[2], cv::FileStorage::READ); + if(!fsSettings.isOpened()) + { + cerr << "ERROR: Wrong path to settings" << endl; + return -1; + } + + cv::Mat K_l, K_r, P_l, P_r, R_l, R_r, D_l, D_r; + fsSettings["LEFT.K"] >> K_l; + fsSettings["RIGHT.K"] >> K_r; + + fsSettings["LEFT.P"] >> P_l; + fsSettings["RIGHT.P"] >> P_r; + + fsSettings["LEFT.R"] >> R_l; + fsSettings["RIGHT.R"] >> R_r; + + fsSettings["LEFT.D"] >> D_l; + fsSettings["RIGHT.D"] >> D_r; + + int rows_l = fsSettings["LEFT.height"]; + int cols_l = fsSettings["LEFT.width"]; + int rows_r = fsSettings["RIGHT.height"]; + int cols_r = fsSettings["RIGHT.width"]; + + if(K_l.empty() || K_r.empty() || P_l.empty() || P_r.empty() || R_l.empty() || R_r.empty() || D_l.empty() || D_r.empty() || + rows_l==0 || rows_r==0 || cols_l==0 || cols_r==0) + { + cerr << "ERROR: Calibration parameters to rectify stereo are missing!" << endl; + return -1; + } + + cv::initUndistortRectifyMap(K_l,D_l,R_l,P_l.rowRange(0,3).colRange(0,3),cv::Size(cols_l,rows_l),CV_32F,igb.M1l,igb.M2l); + cv::initUndistortRectifyMap(K_r,D_r,R_r,P_r.rowRange(0,3).colRange(0,3),cv::Size(cols_r,rows_r),CV_32F,igb.M1r,igb.M2r); + } + + // Maximum delay, 5 seconds + ros::Subscriber sub_imu = n.subscribe("/imu", 1000, &ImuGrabber::GrabImu, &imugb); + ros::Subscriber sub_img_left = n.subscribe("/camera/left/image_raw", 100, &ImageGrabber::GrabImageLeft,&igb); + ros::Subscriber sub_img_right = n.subscribe("/camera/right/image_raw", 100, &ImageGrabber::GrabImageRight,&igb); + + std::thread sync_thread(&ImageGrabber::SyncWithImu,&igb); + + ros::spin(); + + return 0; +} + + + +void ImageGrabber::GrabImageLeft(const sensor_msgs::ImageConstPtr &img_msg) +{ + mBufMutexLeft.lock(); + if (!imgLeftBuf.empty()) + imgLeftBuf.pop(); + imgLeftBuf.push(img_msg); + mBufMutexLeft.unlock(); +} + +void ImageGrabber::GrabImageRight(const sensor_msgs::ImageConstPtr &img_msg) +{ + mBufMutexRight.lock(); + if (!imgRightBuf.empty()) + imgRightBuf.pop(); + imgRightBuf.push(img_msg); + mBufMutexRight.unlock(); +} + +cv::Mat ImageGrabber::GetImage(const sensor_msgs::ImageConstPtr &img_msg) +{ + // Copy the ros image message to cv::Mat. + cv_bridge::CvImageConstPtr cv_ptr; + try + { + cv_ptr = cv_bridge::toCvShare(img_msg, sensor_msgs::image_encodings::MONO8); + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("cv_bridge exception: %s", e.what()); + } + + if(cv_ptr->image.type()==0) + { + return cv_ptr->image.clone(); + } + else + { + std::cout << "Error type" << std::endl; + return cv_ptr->image.clone(); + } +} + +void ImageGrabber::SyncWithImu() +{ + const double maxTimeDiff = 0.01; + while(1) + { + cv::Mat imLeft, imRight; + double tImLeft = 0, tImRight = 0; + if (!imgLeftBuf.empty()&&!imgRightBuf.empty()&&!mpImuGb->imuBuf.empty()) + { + tImLeft = imgLeftBuf.front()->header.stamp.toSec(); + tImRight = imgRightBuf.front()->header.stamp.toSec(); + + this->mBufMutexRight.lock(); + while((tImLeft-tImRight)>maxTimeDiff && imgRightBuf.size()>1) + { + imgRightBuf.pop(); + tImRight = imgRightBuf.front()->header.stamp.toSec(); + } + this->mBufMutexRight.unlock(); + + this->mBufMutexLeft.lock(); + while((tImRight-tImLeft)>maxTimeDiff && imgLeftBuf.size()>1) + { + imgLeftBuf.pop(); + tImLeft = imgLeftBuf.front()->header.stamp.toSec(); + } + this->mBufMutexLeft.unlock(); + + if((tImLeft-tImRight)>maxTimeDiff || (tImRight-tImLeft)>maxTimeDiff) + { + // std::cout << "big time difference" << std::endl; + continue; + } + if(tImLeft>mpImuGb->imuBuf.back()->header.stamp.toSec()) + continue; + + this->mBufMutexLeft.lock(); + imLeft = GetImage(imgLeftBuf.front()); + imgLeftBuf.pop(); + this->mBufMutexLeft.unlock(); + + this->mBufMutexRight.lock(); + imRight = GetImage(imgRightBuf.front()); + imgRightBuf.pop(); + this->mBufMutexRight.unlock(); + + vector vImuMeas; + mpImuGb->mBufMutex.lock(); + if(!mpImuGb->imuBuf.empty()) + { + // Load imu measurements from buffer + vImuMeas.clear(); + while(!mpImuGb->imuBuf.empty() && mpImuGb->imuBuf.front()->header.stamp.toSec()<=tImLeft) + { + double t = mpImuGb->imuBuf.front()->header.stamp.toSec(); + cv::Point3f acc(mpImuGb->imuBuf.front()->linear_acceleration.x, mpImuGb->imuBuf.front()->linear_acceleration.y, mpImuGb->imuBuf.front()->linear_acceleration.z); + cv::Point3f gyr(mpImuGb->imuBuf.front()->angular_velocity.x, mpImuGb->imuBuf.front()->angular_velocity.y, mpImuGb->imuBuf.front()->angular_velocity.z); + vImuMeas.push_back(ORB_SLAM3::IMU::Point(acc,gyr,t)); + mpImuGb->imuBuf.pop(); + } + } + mpImuGb->mBufMutex.unlock(); + if(mbClahe) + { + mClahe->apply(imLeft,imLeft); + mClahe->apply(imRight,imRight); + } + + if(do_rectify) + { + cv::remap(imLeft,imLeft,M1l,M2l,cv::INTER_LINEAR); + cv::remap(imRight,imRight,M1r,M2r,cv::INTER_LINEAR); + } + + mpSLAM->TrackStereo(imLeft,imRight,tImLeft,vImuMeas); + + std::chrono::milliseconds tSleep(1); + std::this_thread::sleep_for(tSleep); + } + } +} + +void ImuGrabber::GrabImu(const sensor_msgs::ImuConstPtr &imu_msg) +{ + mBufMutex.lock(); + imuBuf.push(imu_msg); + mBufMutex.unlock(); + return; +} + + From d451e9c3f688186fc1bc8d4695282746c8a7c878 Mon Sep 17 00:00:00 2001 From: nindanaoto Date: Mon, 27 Dec 2021 01:05:56 +0000 Subject: [PATCH 2/7] compilable ROS Example --- Examples/ROS/ORB_SLAM3/CMakeLists.txt | 3 ++- Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.cc | 10 ++++++++-- Examples/ROS/ORB_SLAM3/src/AR/ros_mono_ar.cc | 10 ++++++++-- 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/Examples/ROS/ORB_SLAM3/CMakeLists.txt b/Examples/ROS/ORB_SLAM3/CMakeLists.txt index 12792fe44b..dd5861d057 100644 --- a/Examples/ROS/ORB_SLAM3/CMakeLists.txt +++ b/Examples/ROS/ORB_SLAM3/CMakeLists.txt @@ -30,7 +30,7 @@ endif() LIST(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/../../../cmake_modules) -find_package(OpenCV 3.0 QUIET) +find_package(OpenCV 4.5 QUIET) if(NOT OpenCV_FOUND) find_package(OpenCV 2.4.3 QUIET) if(NOT OpenCV_FOUND) @@ -46,6 +46,7 @@ ${PROJECT_SOURCE_DIR} ${PROJECT_SOURCE_DIR}/../../../ ${PROJECT_SOURCE_DIR}/../../../include ${PROJECT_SOURCE_DIR}/../../../include/CameraModels +${PROJECT_SOURCE_DIR}/../../../Thirdparty/Sophus ${Pangolin_INCLUDE_DIRS} ) diff --git a/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.cc b/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.cc index dc9f18b7e3..4a8973f5e5 100644 --- a/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.cc +++ b/Examples/ROS/ORB_SLAM3/src/AR/ViewerAR.cc @@ -20,6 +20,9 @@ #include +#include +#include + #include #include #include @@ -402,7 +405,9 @@ Plane* ViewerAR::DetectPlane(const cv::Mat Tcw, const std::vector &vM { if(pMP->Observations()>5) { - vPoints.push_back(pMP->GetWorldPos()); + cv::Mat WorldPos; + cv::eigen2cv(pMP->GetWorldPos(), WorldPos); + vPoints.push_back(WorldPos); vPointMP.push_back(pMP); } } @@ -527,7 +532,8 @@ void Plane::Recompute() MapPoint* pMP = mvMPs[i]; if(!pMP->isBad()) { - cv::Mat Xw = pMP->GetWorldPos(); + cv::Mat Xw; + cv::eigen2cv(pMP->GetWorldPos(), Xw); o+=Xw; A.row(nPoints).colRange(0,3) = Xw.t(); nPoints++; diff --git a/Examples/ROS/ORB_SLAM3/src/AR/ros_mono_ar.cc b/Examples/ROS/ORB_SLAM3/src/AR/ros_mono_ar.cc index e26fbca66a..0a0b36bbea 100644 --- a/Examples/ROS/ORB_SLAM3/src/AR/ros_mono_ar.cc +++ b/Examples/ROS/ORB_SLAM3/src/AR/ros_mono_ar.cc @@ -23,9 +23,12 @@ #include #include -#include +#include + +#include #include +#include #include #include"../../../include/System.h" @@ -148,7 +151,10 @@ void ImageGrabber::GrabImage(const sensor_msgs::ImageConstPtr& msg) } cv::Mat im = cv_ptr->image.clone(); cv::Mat imu; - cv::Mat Tcw = mpSLAM->TrackMonocular(cv_ptr->image,cv_ptr->header.stamp.toSec()); + cv::Mat Tcw; + Sophus::SE3f Tcw_SE3f = mpSLAM->TrackMonocular(cv_ptr->image,cv_ptr->header.stamp.toSec()); + Eigen::Matrix4f Tcw_Matrix = Tcw_SE3f.matrix(); + cv::eigen2cv(Tcw_Matrix, Tcw); int state = mpSLAM->GetTrackingState(); vector vMPs = mpSLAM->GetTrackedMapPoints(); vector vKeys = mpSLAM->GetTrackedKeyPointsUn(); From ec9ea0a24b4c5e2181a912751ad01bd17d31ea46 Mon Sep 17 00:00:00 2001 From: nindanaoto Date: Mon, 27 Dec 2021 06:30:08 +0000 Subject: [PATCH 3/7] Use OpenCV 4.4 --- Examples/ROS/ORB_SLAM3/CMakeLists.txt | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/Examples/ROS/ORB_SLAM3/CMakeLists.txt b/Examples/ROS/ORB_SLAM3/CMakeLists.txt index dd5861d057..a1cc580a17 100644 --- a/Examples/ROS/ORB_SLAM3/CMakeLists.txt +++ b/Examples/ROS/ORB_SLAM3/CMakeLists.txt @@ -30,12 +30,9 @@ endif() LIST(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/../../../cmake_modules) -find_package(OpenCV 4.5 QUIET) +find_package(OpenCV 4.4 QUIET) if(NOT OpenCV_FOUND) - find_package(OpenCV 2.4.3 QUIET) - if(NOT OpenCV_FOUND) - message(FATAL_ERROR "OpenCV > 2.4.3 not found.") - endif() + message(FATAL_ERROR "OpenCV > 4.4 not found.") endif() find_package(Eigen3 3.1.0 REQUIRED) From 5abe9fdaa2228ea7241e0152419021d792d49456 Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Mon, 6 Jun 2022 18:11:41 +0000 Subject: [PATCH 4/7] publish as ROS topic --- .../ROS/ORB_SLAM3/src/ros_stereo_inertial.cc | 100 ++++++++++++++++-- src/System.cc | 14 ++- 2 files changed, 106 insertions(+), 8 deletions(-) diff --git a/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc b/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc index 231ad72c07..5b7ff8870f 100644 --- a/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc +++ b/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc @@ -28,6 +28,8 @@ #include #include #include +#include +#include #include @@ -36,6 +38,9 @@ using namespace std; +Sophus::SE3f Tcw = {}; +nav_msgs::Odometry odom; + class ImuGrabber { public: @@ -74,26 +79,33 @@ class ImageGrabber int main(int argc, char **argv) { ros::init(argc, argv, "Stereo_Inertial"); - ros::NodeHandle n("~"); + ros::NodeHandle n("orb_slam3"); ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, ros::console::levels::Info); bool bEqual = false; - if(argc < 4 || argc > 5) + bool bView = true; + if(argc < 4 || argc > 6) { - cerr << endl << "Usage: rosrun ORB_SLAM3 Stereo_Inertial path_to_vocabulary path_to_settings do_rectify [do_equalize]" << endl; + cerr << endl << "Usage: rosrun ORB_SLAM3 Stereo_Inertial path_to_vocabulary path_to_settings do_rectify [do_equalize] [enable_view]" << endl; ros::shutdown(); return 1; } std::string sbRect(argv[3]); - if(argc==5) + if(argc>=5) { std::string sbEqual(argv[4]); if(sbEqual == "true") bEqual = true; } + if(argc==6) + { + std::string sbView(argv[5]); + if(sbView == "false") + bView = false; + } // Create SLAM system. It initializes all system threads and gets ready to process frames. - ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::IMU_STEREO,true); + ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::IMU_STEREO,bView); ImuGrabber imugb; ImageGrabber igb(&SLAM,&imugb,sbRect == "true",bEqual); @@ -144,7 +156,78 @@ int main(int argc, char **argv) std::thread sync_thread(&ImageGrabber::SyncWithImu,&igb); - ros::spin(); + ros::Rate loop_rate(80); + + ros::Publisher odom_pub = n.advertise("odom", 100); + tf::TransformBroadcaster odom_broadcaster; + + Sophus::SE3f LastTwc = {}; + + ros::Time current_time, last_time; + current_time = ros::Time::now(); + last_time = ros::Time::now(); + + while (n.ok()) + { + ros::spinOnce(); // check for incoming messages + current_time = ros::Time::now(); + + const Sophus::SE3f twist = Tcw * LastTwc; + + //first, we'll publish the transform over tf + geometry_msgs::TransformStamped odom_trans; + odom_trans.header.stamp = current_time; + odom_trans.header.frame_id = "odom"; + odom_trans.child_frame_id = "base_link"; + + odom_trans.transform.translation.x = Tcw.translation()[0]; + odom_trans.transform.translation.y = Tcw.translation()[1]; + odom_trans.transform.translation.z = Tcw.translation()[2]; + odom_trans.transform.rotation.w = Tcw.data()[0]; + odom_trans.transform.rotation.x = Tcw.data()[1]; + odom_trans.transform.rotation.y = Tcw.data()[2]; + odom_trans.transform.rotation.z = Tcw.data()[3]; + + //send the transform + odom_broadcaster.sendTransform(odom_trans); + + /** + * This is a message object. You stuff it with data, and then publish it. + */ + + + odom.header.stamp = current_time; + odom.header.frame_id = "odom"; + odom.child_frame_id = "base_link"; + + odom.pose.pose.position.x = Tcw.translation()[0]; + odom.pose.pose.position.y = Tcw.translation()[1]; + odom.pose.pose.position.z = Tcw.translation()[2]; + + odom.pose.pose.orientation.x = Tcw.so3(). unit_quaternion().x(); + odom.pose.pose.orientation.y = Tcw.so3(). unit_quaternion().y(); + odom.pose.pose.orientation.z = Tcw.so3(). unit_quaternion().z(); + odom.pose.pose.orientation.w = Tcw.so3(). unit_quaternion().w(); + + double dt = (current_time - last_time).toSec(); + + odom.twist.twist.linear.x = twist.translation()[0]/dt; + odom.twist.twist.linear.y = twist.translation()[1]/dt; + odom.twist.twist.linear.z = twist.translation()[2]/dt; + + /** + * The publish() function is how you send messages. The parameter + * is the message object. The type of this object must agree with the type + * given as a template parameter to the advertise<>() call, as was done + * in the constructor above. + */ + odom_pub.publish(odom); + + LastTwc = Tcw.inverse(); + last_time = current_time; + + loop_rate.sleep(); + } return 0; } @@ -251,6 +334,9 @@ void ImageGrabber::SyncWithImu() cv::Point3f acc(mpImuGb->imuBuf.front()->linear_acceleration.x, mpImuGb->imuBuf.front()->linear_acceleration.y, mpImuGb->imuBuf.front()->linear_acceleration.z); cv::Point3f gyr(mpImuGb->imuBuf.front()->angular_velocity.x, mpImuGb->imuBuf.front()->angular_velocity.y, mpImuGb->imuBuf.front()->angular_velocity.z); vImuMeas.push_back(ORB_SLAM3::IMU::Point(acc,gyr,t)); + odom.twist.twist.angular.x = mpImuGb->imuBuf.front()->angular_velocity.x; + odom.twist.twist.angular.y = mpImuGb->imuBuf.front()->angular_velocity.y; + odom.twist.twist.angular.z = mpImuGb->imuBuf.front()->angular_velocity.z; mpImuGb->imuBuf.pop(); } } @@ -267,7 +353,7 @@ void ImageGrabber::SyncWithImu() cv::remap(imRight,imRight,M1r,M2r,cv::INTER_LINEAR); } - mpSLAM->TrackStereo(imLeft,imRight,tImLeft,vImuMeas); + Tcw = mpSLAM->TrackStereo(imLeft,imRight,tImLeft,vImuMeas); std::chrono::milliseconds tSleep(1); std::this_thread::sleep_for(tSleep); diff --git a/src/System.cc b/src/System.cc index 60d9c5185a..1c71de68ca 100644 --- a/src/System.cc +++ b/src/System.cc @@ -313,7 +313,9 @@ Sophus::SE3f System::TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, mpTracker->GrabImuData(vImuMeas[i_imu]); // std::cout << "start GrabImageStereo" << std::endl; - Sophus::SE3f Tcw = mpTracker->GrabImageStereo(imLeftToFeed,imRightToFeed,timestamp,filename); + mpTracker->GrabImageStereo(imLeftToFeed,imRightToFeed,timestamp,filename); + Eigen::Matrix4f M; + pangolin::OpenGlMatrix Twc, Ow; // std::cout << "out grabber" << std::endl; @@ -322,6 +324,16 @@ Sophus::SE3f System::TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, mTrackedMapPoints = mpTracker->mCurrentFrame.mvpMapPoints; mTrackedKeyPointsUn = mpTracker->mCurrentFrame.mvKeysUn; + mpMapDrawer->GetCurrentOpenGLCameraMatrix(Twc,Ow); + + for (int i = 0; i<4; i++){ + M(0,i) = Twc.m[4*i]; + M(1,i) = Twc.m[4*i+1]; + M(2,i) = Twc.m[4*i+2]; + M(3,i) = Twc.m[4*i+3]; + } + Sophus::SE3f Tcw(M); + return Tcw; } From d7f90508f972b553d60162b67b6998b3d098d22a Mon Sep 17 00:00:00 2001 From: nindanaoto Date: Sun, 12 Jun 2022 16:45:42 +0000 Subject: [PATCH 5/7] use frame to extract velocity --- .../ROS/ORB_SLAM3/src/ros_stereo_inertial.cc | 54 ++++++++++++------- include/System.h | 2 +- src/System.cc | 16 ++---- 3 files changed, 39 insertions(+), 33 deletions(-) diff --git a/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc b/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc index 5b7ff8870f..5a8e888885 100644 --- a/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc +++ b/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc @@ -38,7 +38,8 @@ using namespace std; -Sophus::SE3f Tcw = {}; +ORB_SLAM3::Frame frame = {}; + nav_msgs::Odometry odom; class ImuGrabber @@ -156,12 +157,11 @@ int main(int argc, char **argv) std::thread sync_thread(&ImageGrabber::SyncWithImu,&igb); - ros::Rate loop_rate(80); + ros::Rate loop_rate(45); ros::Publisher odom_pub = n.advertise("odom", 100); tf::TransformBroadcaster odom_broadcaster; - Sophus::SE3f LastTwc = {}; ros::Time current_time, last_time; current_time = ros::Time::now(); @@ -172,7 +172,7 @@ int main(int argc, char **argv) ros::spinOnce(); // check for incoming messages current_time = ros::Time::now(); - const Sophus::SE3f twist = Tcw * LastTwc; + const Sophus::SE3f Tcw = frame.GetPose(); //first, we'll publish the transform over tf geometry_msgs::TransformStamped odom_trans; @@ -183,10 +183,10 @@ int main(int argc, char **argv) odom_trans.transform.translation.x = Tcw.translation()[0]; odom_trans.transform.translation.y = Tcw.translation()[1]; odom_trans.transform.translation.z = Tcw.translation()[2]; - odom_trans.transform.rotation.w = Tcw.data()[0]; - odom_trans.transform.rotation.x = Tcw.data()[1]; - odom_trans.transform.rotation.y = Tcw.data()[2]; - odom_trans.transform.rotation.z = Tcw.data()[3]; + odom_trans.transform.rotation.x = Tcw.so3().unit_quaternion().x(); + odom_trans.transform.rotation.y = Tcw.so3().unit_quaternion().y(); + odom_trans.transform.rotation.z = Tcw.so3().unit_quaternion().z(); + odom_trans.transform.rotation.w = Tcw.so3().unit_quaternion().w(); //send the transform odom_broadcaster.sendTransform(odom_trans); @@ -204,16 +204,16 @@ int main(int argc, char **argv) odom.pose.pose.position.y = Tcw.translation()[1]; odom.pose.pose.position.z = Tcw.translation()[2]; - odom.pose.pose.orientation.x = Tcw.so3(). unit_quaternion().x(); - odom.pose.pose.orientation.y = Tcw.so3(). unit_quaternion().y(); - odom.pose.pose.orientation.z = Tcw.so3(). unit_quaternion().z(); - odom.pose.pose.orientation.w = Tcw.so3(). unit_quaternion().w(); + odom.pose.pose.orientation.x = Tcw.so3().unit_quaternion().x(); + odom.pose.pose.orientation.y = Tcw.so3().unit_quaternion().y(); + odom.pose.pose.orientation.z = Tcw.so3().unit_quaternion().z(); + odom.pose.pose.orientation.w = Tcw.so3().unit_quaternion().w(); double dt = (current_time - last_time).toSec(); - odom.twist.twist.linear.x = twist.translation()[0]/dt; - odom.twist.twist.linear.y = twist.translation()[1]/dt; - odom.twist.twist.linear.z = twist.translation()[2]/dt; + odom.twist.twist.linear.x = frame.GetVelocity()[0]; + odom.twist.twist.linear.y = frame.GetVelocity()[1]; + odom.twist.twist.linear.z = frame.GetVelocity()[2]; /** * The publish() function is how you send messages. The parameter @@ -223,9 +223,6 @@ int main(int argc, char **argv) */ odom_pub.publish(odom); - LastTwc = Tcw.inverse(); - last_time = current_time; - loop_rate.sleep(); } @@ -353,7 +350,26 @@ void ImageGrabber::SyncWithImu() cv::remap(imRight,imRight,M1r,M2r,cv::INTER_LINEAR); } - Tcw = mpSLAM->TrackStereo(imLeft,imRight,tImLeft,vImuMeas); + frame = mpSLAM->TrackStereo(imLeft,imRight,tImLeft,vImuMeas); + + if(frame.imuIsPreintegrated()){ + const Eigen::Vector3f twb1 = frame.GetImuPosition(); + const Eigen::Matrix3f Rwb1 = frame.GetImuRotation(); + const Eigen::Vector3f Vwb1 = frame.GetVelocity(); + const Eigen::Vector3f Gz(0, 0, -ORB_SLAM3::IMU::GRAVITY_VALUE); + const float t12 = frame.mpImuPreintegratedFrame->dT; + + std::cout<<"Here:"<JRg<GetDeltaRotation(frame.mImuBias)); + std::cout<<"Here0"<GetDeltaPosition(frame.mImuBias); + std::cout<<"Here1"<GetDeltaVelocity(frame.mImuBias); + std::cout<<"Here2"<& vImuMeas = vector(), string filename=""); + Frame TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, const double ×tamp, const vector& vImuMeas = vector(), string filename=""); // Process the given rgbd frame. Depthmap must be registered to the RGB frame. // Input image: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale. diff --git a/src/System.cc b/src/System.cc index 1c71de68ca..9afc7a4101 100644 --- a/src/System.cc +++ b/src/System.cc @@ -241,7 +241,7 @@ System::System(const string &strVocFile, const string &strSettingsFile, const eS } -Sophus::SE3f System::TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, const double ×tamp, const vector& vImuMeas, string filename) +Frame System::TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, const double ×tamp, const vector& vImuMeas, string filename) { if(mSensor!=STEREO && mSensor!=IMU_STEREO) { @@ -314,8 +314,6 @@ Sophus::SE3f System::TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, // std::cout << "start GrabImageStereo" << std::endl; mpTracker->GrabImageStereo(imLeftToFeed,imRightToFeed,timestamp,filename); - Eigen::Matrix4f M; - pangolin::OpenGlMatrix Twc, Ow; // std::cout << "out grabber" << std::endl; @@ -324,17 +322,9 @@ Sophus::SE3f System::TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, mTrackedMapPoints = mpTracker->mCurrentFrame.mvpMapPoints; mTrackedKeyPointsUn = mpTracker->mCurrentFrame.mvKeysUn; - mpMapDrawer->GetCurrentOpenGLCameraMatrix(Twc,Ow); + Frame frame = mpTracker->mCurrentFrame; - for (int i = 0; i<4; i++){ - M(0,i) = Twc.m[4*i]; - M(1,i) = Twc.m[4*i+1]; - M(2,i) = Twc.m[4*i+2]; - M(3,i) = Twc.m[4*i+3]; - } - Sophus::SE3f Tcw(M); - - return Tcw; + return frame; } Sophus::SE3f System::TrackRGBD(const cv::Mat &im, const cv::Mat &depthmap, const double ×tamp, const vector& vImuMeas, string filename) From 5af8aae35f2ac477d0d79bb5ceb8a5a58e5438f5 Mon Sep 17 00:00:00 2001 From: nindanaoto Date: Thu, 16 Jun 2022 18:19:06 +0000 Subject: [PATCH 6/7] seems to be able to integrate IMU but bit unstabale for init --- .../ROS/ORB_SLAM3/src/ros_stereo_inertial.cc | 44 ++++++++++--------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc b/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc index 5a8e888885..0306689f5c 100644 --- a/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc +++ b/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc @@ -39,6 +39,7 @@ using namespace std; ORB_SLAM3::Frame frame = {}; +bool initialized = false; nav_msgs::Odometry odom; @@ -157,7 +158,7 @@ int main(int argc, char **argv) std::thread sync_thread(&ImageGrabber::SyncWithImu,&igb); - ros::Rate loop_rate(45); + ros::Rate loop_rate(200); ros::Publisher odom_pub = n.advertise("odom", 100); tf::TransformBroadcaster odom_broadcaster; @@ -351,25 +352,7 @@ void ImageGrabber::SyncWithImu() } frame = mpSLAM->TrackStereo(imLeft,imRight,tImLeft,vImuMeas); - - if(frame.imuIsPreintegrated()){ - const Eigen::Vector3f twb1 = frame.GetImuPosition(); - const Eigen::Matrix3f Rwb1 = frame.GetImuRotation(); - const Eigen::Vector3f Vwb1 = frame.GetVelocity(); - const Eigen::Vector3f Gz(0, 0, -ORB_SLAM3::IMU::GRAVITY_VALUE); - const float t12 = frame.mpImuPreintegratedFrame->dT; - - std::cout<<"Here:"<JRg<GetDeltaRotation(frame.mImuBias)); - std::cout<<"Here0"<GetDeltaPosition(frame.mImuBias); - std::cout<<"Here1"<GetDeltaVelocity(frame.mImuBias); - std::cout<<"Here2"<linear_acceleration.x,imu_msg->linear_acceleration.y,imu_msg->linear_acceleration.z,imu_msg->angular_velocity.x,imu_msg->angular_velocity.y,imu_msg->angular_velocity.z,imu_msg->header.stamp.toSec()); + mpImuPreintegratedFromLastFrame.IntegrateNewMeasurement(imudata.a,imudata.w,imu_msg->header.stamp.toSec()-frame.mTimeStamp); + + const Eigen::Vector3f twb1 = frame.GetImuPosition(); + const Eigen::Matrix3f Rwb1 = frame.GetImuRotation(); + const Eigen::Vector3f Vwb1 = frame.GetVelocity(); + const Eigen::Vector3f Gz(0, 0, -ORB_SLAM3::IMU::GRAVITY_VALUE); + const float t12 = mpImuPreintegratedFromLastFrame.dT; + + Eigen::Matrix3f Rwb2 = ORB_SLAM3::IMU::NormalizeRotation(Rwb1 * mpImuPreintegratedFromLastFrame.GetDeltaRotation(frame.mImuBias)); + Eigen::Vector3f twb2 = twb1 + Vwb1*t12 + 0.5f*t12*t12*Gz+ Rwb1 * mpImuPreintegratedFromLastFrame.GetDeltaPosition(frame.mImuBias); + Eigen::Vector3f Vwb2 = Vwb1 + t12*Gz + Rwb1 * mpImuPreintegratedFromLastFrame.GetDeltaVelocity(frame.mImuBias); + frame.mTimeStamp = imu_msg->header.stamp.toSec(); + auto old = frame.GetPose().translation()[0]; + frame.SetImuPoseVelocity(Rwb2,twb2,Vwb2); + odom.twist.twist.angular.x = imu_msg->angular_velocity.x; + odom.twist.twist.angular.y = imu_msg->angular_velocity.y; + odom.twist.twist.angular.z = imu_msg->angular_velocity.z; + } return; } From 9736d1213f1d2c1401c0115ecb3843ca6f47b327 Mon Sep 17 00:00:00 2001 From: nindanaoto Date: Tue, 5 Jul 2022 13:10:19 +0900 Subject: [PATCH 7/7] fast moving lose track --- .../ROS/ORB_SLAM3/src/ros_stereo_inertial.cc | 52 ++++++++++++------- 1 file changed, 33 insertions(+), 19 deletions(-) diff --git a/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc b/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc index 0306689f5c..b83c0b632e 100644 --- a/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc +++ b/Examples/ROS/ORB_SLAM3/src/ros_stereo_inertial.cc @@ -163,17 +163,21 @@ int main(int argc, char **argv) ros::Publisher odom_pub = n.advertise("odom", 100); tf::TransformBroadcaster odom_broadcaster; + Eigen::Matrix3f rotm; + rotm << 0,1,0,-1,0,0,0,0,1; + const Eigen::Quaternionf rotq(rotm); ros::Time current_time, last_time; current_time = ros::Time::now(); last_time = ros::Time::now(); + Sophus::SE3f prevTcw; while (n.ok()) { ros::spinOnce(); // check for incoming messages current_time = ros::Time::now(); - const Sophus::SE3f Tcw = frame.GetPose(); + const Sophus::SE3f Tcw = frame.GetPose().inverse(); //first, we'll publish the transform over tf geometry_msgs::TransformStamped odom_trans; @@ -181,13 +185,14 @@ int main(int argc, char **argv) odom_trans.header.frame_id = "odom"; odom_trans.child_frame_id = "base_link"; - odom_trans.transform.translation.x = Tcw.translation()[0]; - odom_trans.transform.translation.y = Tcw.translation()[1]; + odom_trans.transform.translation.x = Tcw.translation()[1]; + odom_trans.transform.translation.y = -Tcw.translation()[0]; odom_trans.transform.translation.z = Tcw.translation()[2]; - odom_trans.transform.rotation.x = Tcw.so3().unit_quaternion().x(); - odom_trans.transform.rotation.y = Tcw.so3().unit_quaternion().y(); - odom_trans.transform.rotation.z = Tcw.so3().unit_quaternion().z(); - odom_trans.transform.rotation.w = Tcw.so3().unit_quaternion().w(); + const Eigen::Quaternionf rotted = rotq*Tcw.so3().unit_quaternion(); + odom_trans.transform.rotation.x = rotted.x(); + odom_trans.transform.rotation.y = rotted.y(); + odom_trans.transform.rotation.z = rotted.z(); + odom_trans.transform.rotation.w = rotted.w(); //send the transform odom_broadcaster.sendTransform(odom_trans); @@ -201,21 +206,29 @@ int main(int argc, char **argv) odom.header.frame_id = "odom"; odom.child_frame_id = "base_link"; - odom.pose.pose.position.x = Tcw.translation()[0]; - odom.pose.pose.position.y = Tcw.translation()[1]; + odom.pose.pose.position.x = Tcw.translation()[1]; + odom.pose.pose.position.y = -Tcw.translation()[0]; odom.pose.pose.position.z = Tcw.translation()[2]; - odom.pose.pose.orientation.x = Tcw.so3().unit_quaternion().x(); - odom.pose.pose.orientation.y = Tcw.so3().unit_quaternion().y(); - odom.pose.pose.orientation.z = Tcw.so3().unit_quaternion().z(); - odom.pose.pose.orientation.w = Tcw.so3().unit_quaternion().w(); + odom.pose.pose.orientation.x = rotted.x(); + odom.pose.pose.orientation.y = rotted.y(); + odom.pose.pose.orientation.z = rotted.z(); + odom.pose.pose.orientation.w = rotted.w(); double dt = (current_time - last_time).toSec(); + const Sophus::SE3f diff = Tcw * prevTcw; - odom.twist.twist.linear.x = frame.GetVelocity()[0]; - odom.twist.twist.linear.y = frame.GetVelocity()[1]; + // odom.twist.twist.linear.x = diff.translation()[1]/dt; + // odom.twist.twist.linear.y = -diff.translation()[0]/dt; + // odom.twist.twist.linear.z = diff.translation()[2]/dt; + odom.twist.twist.linear.x = frame.GetVelocity()[1]; + odom.twist.twist.linear.y = -frame.GetVelocity()[0]; odom.twist.twist.linear.z = frame.GetVelocity()[2]; + prevTcw = frame.GetPose(); + + last_time = current_time; + /** * The publish() function is how you send messages. The parameter * is the message object. The type of this object must agree with the type @@ -352,7 +365,7 @@ void ImageGrabber::SyncWithImu() } frame = mpSLAM->TrackStereo(imLeft,imRight,tImLeft,vImuMeas); - if(frame.imuIsPreintegrated()) initialized = true; + initialized = frame.imuIsPreintegrated(); std::chrono::milliseconds tSleep(1); std::this_thread::sleep_for(tSleep); @@ -379,12 +392,13 @@ void ImuGrabber::GrabImu(const sensor_msgs::ImuConstPtr &imu_msg) Eigen::Matrix3f Rwb2 = ORB_SLAM3::IMU::NormalizeRotation(Rwb1 * mpImuPreintegratedFromLastFrame.GetDeltaRotation(frame.mImuBias)); Eigen::Vector3f twb2 = twb1 + Vwb1*t12 + 0.5f*t12*t12*Gz+ Rwb1 * mpImuPreintegratedFromLastFrame.GetDeltaPosition(frame.mImuBias); Eigen::Vector3f Vwb2 = Vwb1 + t12*Gz + Rwb1 * mpImuPreintegratedFromLastFrame.GetDeltaVelocity(frame.mImuBias); + if(Rwb2.determinant()>0){ frame.mTimeStamp = imu_msg->header.stamp.toSec(); - auto old = frame.GetPose().translation()[0]; frame.SetImuPoseVelocity(Rwb2,twb2,Vwb2); - odom.twist.twist.angular.x = imu_msg->angular_velocity.x; + } + odom.twist.twist.angular.x = -imu_msg->angular_velocity.z; odom.twist.twist.angular.y = imu_msg->angular_velocity.y; - odom.twist.twist.angular.z = imu_msg->angular_velocity.z; + odom.twist.twist.angular.z = -imu_msg->angular_velocity.x; } return; }