merge the newest code from author
parent
43c195710b
commit
feedb9bdd8
|
@ -1,6 +1,6 @@
|
|||
CMakeLists.txt.user
|
||||
CMakeLists_modified.txt
|
||||
Examples/
|
||||
|
||||
Examples/RGB-D/rgbd_tum
|
||||
|
||||
Examples/Monocular/mono_euroc
|
||||
|
@ -47,7 +47,3 @@ cmake-build-debug/
|
|||
|
||||
*.pyc
|
||||
*.osa
|
||||
|
||||
.vscode/
|
||||
CMakeLists.txt
|
||||
build.sh
|
||||
|
|
|
@ -4,16 +4,16 @@ project(ORB_SLAM3)
|
|||
IF(NOT CMAKE_BUILD_TYPE)
|
||||
SET(CMAKE_BUILD_TYPE Release)
|
||||
ENDIF()
|
||||
# set(CMAKE_BUILD_TYPE "Debug")
|
||||
|
||||
MESSAGE("Build type: " ${CMAKE_BUILD_TYPE})
|
||||
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -O3")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -O3")
|
||||
# set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} -march=native")
|
||||
# set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -march=native")
|
||||
set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} -march=native")
|
||||
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -march=native")
|
||||
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -Wno-deprecated -O3 -march=native ")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wno-deprecated -O3 -march=native")
|
||||
# set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -Wno-deprecated -O3 -march=native ")
|
||||
# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wno-deprecated -O3 -march=native")
|
||||
|
||||
# Check C++11 or C++0x support
|
||||
include(CheckCXXCompilerFlag)
|
||||
|
@ -33,12 +33,12 @@ endif()
|
|||
|
||||
LIST(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake_modules)
|
||||
|
||||
find_package(OpenCV 3)
|
||||
find_package(OpenCV 4.0)
|
||||
if(NOT OpenCV_FOUND)
|
||||
find_package(OpenCV 2.4.3 QUIET)
|
||||
if(NOT OpenCV_FOUND)
|
||||
message(FATAL_ERROR "OpenCV > 2.4.3 not found.")
|
||||
endif()
|
||||
find_package(OpenCV 3.0)
|
||||
if(NOT OpenCV_FOUND)
|
||||
message(FATAL_ERROR "OpenCV > 3.0 not found.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
MESSAGE("OPENCV VERSION:")
|
||||
|
@ -46,6 +46,7 @@ MESSAGE(${OpenCV_VERSION})
|
|||
|
||||
find_package(Eigen3 3.1.0 REQUIRED)
|
||||
find_package(Pangolin REQUIRED)
|
||||
find_package(realsense2)
|
||||
|
||||
include_directories(
|
||||
${PROJECT_SOURCE_DIR}
|
||||
|
@ -72,7 +73,6 @@ src/Atlas.cc
|
|||
src/Map.cc
|
||||
src/MapDrawer.cc
|
||||
src/Optimizer.cc
|
||||
src/PnPsolver.cc
|
||||
src/Frame.cc
|
||||
src/KeyFrameDatabase.cc
|
||||
src/Sim3Solver.cc
|
||||
|
@ -84,6 +84,7 @@ src/CameraModels/Pinhole.cpp
|
|||
src/CameraModels/KannalaBrandt8.cpp
|
||||
src/OptimizableTypes.cpp
|
||||
src/MLPnPsolver.cpp
|
||||
src/TwoViewReconstruction.cc
|
||||
include/System.h
|
||||
include/Tracking.h
|
||||
include/LocalMapping.h
|
||||
|
@ -98,7 +99,6 @@ include/Atlas.h
|
|||
include/Map.h
|
||||
include/MapDrawer.h
|
||||
include/Optimizer.h
|
||||
include/PnPsolver.h
|
||||
include/Frame.h
|
||||
include/KeyFrameDatabase.h
|
||||
include/Sim3Solver.h
|
||||
|
@ -112,7 +112,8 @@ include/CameraModels/KannalaBrandt8.h
|
|||
include/OptimizableTypes.h
|
||||
include/MLPnPsolver.h
|
||||
include/TwoViewReconstruction.h
|
||||
src/TwoViewReconstruction.cc)
|
||||
include/Config.h
|
||||
)
|
||||
|
||||
add_subdirectory(Thirdparty/g2o)
|
||||
|
||||
|
@ -127,14 +128,17 @@ ${PROJECT_SOURCE_DIR}/Thirdparty/g2o/lib/libg2o.so
|
|||
)
|
||||
|
||||
|
||||
# Build examples
|
||||
### Build examples
|
||||
|
||||
# RGB-D examples
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/Examples/RGB-D)
|
||||
|
||||
add_executable(rgbd_tum
|
||||
Examples/RGB-D/rgbd_tum.cc)
|
||||
target_link_libraries(rgbd_tum ${PROJECT_NAME})
|
||||
|
||||
|
||||
# Stereo examples
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/Examples/Stereo)
|
||||
|
||||
add_executable(stereo_kitti
|
||||
|
@ -149,7 +153,7 @@ add_executable(stereo_tum_vi
|
|||
Examples/Stereo/stereo_tum_vi.cc)
|
||||
target_link_libraries(stereo_tum_vi ${PROJECT_NAME})
|
||||
|
||||
|
||||
# Monocular examples
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/Examples/Monocular)
|
||||
|
||||
add_executable(mono_tum
|
||||
|
@ -168,7 +172,7 @@ add_executable(mono_tum_vi
|
|||
Examples/Monocular/mono_tum_vi.cc)
|
||||
target_link_libraries(mono_tum_vi ${PROJECT_NAME})
|
||||
|
||||
|
||||
# Monocular-Inertial examples
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/Examples/Monocular-Inertial)
|
||||
|
||||
add_executable(mono_inertial_euroc
|
||||
|
@ -179,16 +183,14 @@ add_executable(mono_inertial_tum_vi
|
|||
Examples/Monocular-Inertial/mono_inertial_tum_vi.cc)
|
||||
target_link_libraries(mono_inertial_tum_vi ${PROJECT_NAME})
|
||||
|
||||
|
||||
# Stereo-Inertial examples
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/Examples/Stereo-Inertial)
|
||||
|
||||
add_executable(stereo_inertial_euroc
|
||||
Examples/Stereo-Inertial/stereo_inertial_euroc.cc)
|
||||
target_link_libraries(stereo_inertial_euroc ${PROJECT_NAME})
|
||||
|
||||
|
||||
add_executable(stereo_inertial_tum_vi
|
||||
Examples/Stereo-Inertial/stereo_inertial_tum_vi.cc)
|
||||
target_link_libraries(stereo_inertial_tum_vi ${PROJECT_NAME})
|
||||
|
||||
|
||||
|
|
21
Changelog.md
21
Changelog.md
|
@ -1,15 +1,26 @@
|
|||
# ORB-SLAM3
|
||||
Details of changes between the different versions.
|
||||
|
||||
### V0.4: Beta version, 21 April 2021
|
||||
|
||||
- Changed OpenCV dynamic matrices to static matrices to speed up the code.
|
||||
|
||||
- Capability to measure running time of the system threads.
|
||||
|
||||
- Compatibility with OpenCV 4.0 (Requires at least OpenCV 3.0)
|
||||
|
||||
- Fixed minor bugs.
|
||||
|
||||
|
||||
### V0.3: Beta version, 4 Sep 2020
|
||||
|
||||
- RGB-D compatibility, the RGB-D examples had been adapted to the new version.
|
||||
- RGB-D compatibility, the RGB-D examples have been adapted to the new version.
|
||||
|
||||
- Kitti and TUM dataset compatibility, these examples had been adapted to the new version.
|
||||
- Kitti and TUM dataset compatibility, these examples have been adapted to the new version.
|
||||
|
||||
- ROS compatibility, It had been updated the old references in the code to work with this version.
|
||||
- ROS compatibility, updated the old references in the code to work with this version.
|
||||
|
||||
- Config file parser, the YAML file contains the session configuration, a wrong parametrization may break the execution without any information to solve it. This version parses the file to read all the fields and give a proper answer if one of the fields have been wrong deffined or don't exist.
|
||||
- Config file parser, the YAML file contains the session configuration, a wrong parametrization may break the execution without any information to solve it. This version parses the file to read all the fields and give a proper answer if one of the fields have been wrongly deffined or does not exist.
|
||||
|
||||
- Fixed minor bugs.
|
||||
|
||||
|
@ -19,7 +30,7 @@ Initial release. It has these capabilities:
|
|||
|
||||
- Multiple-Maps capabilities, it is able to handle multiple maps in the same session and merge them when a common area is detected with a seamless fussion.
|
||||
|
||||
- Inertial sensor, the IMU initialization takes a 2 seconds to achieve a scale error less than 5\% and it is reffined in the next 10 seconds until is around 1\%. Inertial measures are integrated at frame rate to estimate the scale, gravity and velocity in order to improve the visual features detection and make the system robust to temporal occlusions.
|
||||
- Inertial sensor, the IMU initialization takes 2 seconds to achieve a scale error less than 5\% and it is reffined in the next 10 seconds until it is around 1\%. Inertial measures are integrated at frame rate to estimate the scale, gravity and velocity in order to improve the visual features detection and make the system robust to temporal occlusions.
|
||||
|
||||
- Fisheye sensor, the fisheye sensors are now fully supported in monocular and stereo.
|
||||
|
||||
|
|
|
@ -116,25 +116,19 @@ int main(int argc, char *argv[])
|
|||
|
||||
cout.precision(17);
|
||||
|
||||
/*cout << "Start processing sequence ..." << endl;
|
||||
cout << "Images in the sequence: " << nImages << endl;
|
||||
cout << "IMU data in the sequence: " << nImu << endl << endl;*/
|
||||
|
||||
// Create SLAM system. It initializes all system threads and gets ready to process frames.
|
||||
ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::IMU_MONOCULAR, true);
|
||||
|
||||
int proccIm=0;
|
||||
for (seq = 0; seq<num_seq; seq++)
|
||||
{
|
||||
|
||||
// Main loop
|
||||
cv::Mat im;
|
||||
vector<ORB_SLAM3::IMU::Point> vImuMeas;
|
||||
proccIm = 0;
|
||||
for(int ni=0; ni<nImages[seq]; ni++, proccIm++)
|
||||
{
|
||||
// Read image from file
|
||||
im = cv::imread(vstrImageFilenames[seq][ni],CV_LOAD_IMAGE_UNCHANGED);
|
||||
im = cv::imread(vstrImageFilenames[seq][ni],cv::IMREAD_UNCHANGED);
|
||||
|
||||
double tframe = vTimestampsCam[seq][ni];
|
||||
|
||||
|
@ -150,8 +144,6 @@ int main(int argc, char *argv[])
|
|||
|
||||
if(ni>0)
|
||||
{
|
||||
// cout << "t_cam " << tframe << endl;
|
||||
|
||||
while(vTimestampsImu[seq][first_imu[seq]]<=vTimestampsCam[seq][ni])
|
||||
{
|
||||
vImuMeas.push_back(ORB_SLAM3::IMU::Point(vAcc[seq][first_imu[seq]].x,vAcc[seq][first_imu[seq]].y,vAcc[seq][first_imu[seq]].z,
|
||||
|
@ -161,9 +153,7 @@ int main(int argc, char *argv[])
|
|||
}
|
||||
}
|
||||
|
||||
/*cout << "first imu: " << first_imu << endl;
|
||||
cout << "first imu time: " << fixed << vTimestampsImu[first_imu] << endl;
|
||||
cout << "size vImu: " << vImuMeas.size() << endl;*/
|
||||
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
|
||||
#else
|
||||
|
@ -171,8 +161,7 @@ int main(int argc, char *argv[])
|
|||
#endif
|
||||
|
||||
// Pass the image to the SLAM system
|
||||
// cout << "tframe = " << tframe << endl;
|
||||
SLAM.TrackMonocular(im,tframe,vImuMeas); // TODO change to monocular_inertial
|
||||
SLAM.TrackMonocular(im,tframe,vImuMeas);
|
||||
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now();
|
||||
|
@ -180,9 +169,13 @@ int main(int argc, char *argv[])
|
|||
std::chrono::monotonic_clock::time_point t2 = std::chrono::monotonic_clock::now();
|
||||
#endif
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
double t_track = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t2 - t1).count();
|
||||
SLAM.InsertTrackTime(t_track);
|
||||
#endif
|
||||
|
||||
double ttrack= std::chrono::duration_cast<std::chrono::duration<double> >(t2 - t1).count();
|
||||
ttrack_tot += ttrack;
|
||||
// std::cout << "ttrack: " << ttrack << std::endl;
|
||||
|
||||
vTimesTrack[ni]=ttrack;
|
||||
|
||||
|
@ -194,7 +187,7 @@ int main(int argc, char *argv[])
|
|||
T = tframe-vTimestampsCam[seq][ni-1];
|
||||
|
||||
if(ttrack<T)
|
||||
usleep((T-ttrack)*1e6); // 1e6
|
||||
usleep((T-ttrack)*1e6);
|
||||
}
|
||||
if(seq < num_seq - 1)
|
||||
{
|
||||
|
|
|
@ -105,7 +105,6 @@ int main(int argc, char **argv)
|
|||
// Step 3 默认IMU数据早于图像数据记录,找到和第一帧图像时间戳最接近的imu时间戳索引,记录在first_imu[seq]中
|
||||
while(vTimestampsImu[seq][first_imu[seq]]<=vTimestampsCam[seq][0]){
|
||||
first_imu[seq]++;
|
||||
cout << "first_imu[seq] = " << first_imu[seq] << endl;
|
||||
}
|
||||
// 因为上面退出while循环时IMU时间戳刚刚超过图像时间戳,所以这里需要再减一个索引
|
||||
first_imu[seq]--; // first imu measurement to be considered
|
||||
|
|
|
@ -91,7 +91,7 @@ int main(int argc, char **argv)
|
|||
{
|
||||
|
||||
// Read image from file
|
||||
im = cv::imread(vstrImageFilenames[seq][ni],CV_LOAD_IMAGE_UNCHANGED);
|
||||
im = cv::imread(vstrImageFilenames[seq][ni],cv::IMREAD_UNCHANGED);
|
||||
double tframe = vTimestampsCam[seq][ni];
|
||||
|
||||
if(im.empty())
|
||||
|
@ -108,8 +108,7 @@ int main(int argc, char **argv)
|
|||
#endif
|
||||
|
||||
// Pass the image to the SLAM system
|
||||
// cout << "tframe = " << tframe << endl;
|
||||
SLAM.TrackMonocular(im,tframe); // TODO change to monocular_inertial
|
||||
SLAM.TrackMonocular(im,tframe);
|
||||
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now();
|
||||
|
@ -117,6 +116,11 @@ int main(int argc, char **argv)
|
|||
std::chrono::monotonic_clock::time_point t2 = std::chrono::monotonic_clock::now();
|
||||
#endif
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
double t_track = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t2 - t1).count();
|
||||
SLAM.InsertTrackTime(t_track);
|
||||
#endif
|
||||
|
||||
double ttrack= std::chrono::duration_cast<std::chrono::duration<double> >(t2 - t1).count();
|
||||
|
||||
vTimesTrack[ni]=ttrack;
|
||||
|
@ -129,7 +133,7 @@ int main(int argc, char **argv)
|
|||
T = tframe-vTimestampsCam[seq][ni-1];
|
||||
|
||||
if(ttrack<T)
|
||||
usleep((T-ttrack)*1e6); // 1e6
|
||||
usleep((T-ttrack)*1e6);
|
||||
}
|
||||
|
||||
if(seq < num_seq - 1)
|
||||
|
|
|
@ -53,10 +53,6 @@ int main(int argc, char **argv)
|
|||
vector<float> vTimesTrack;
|
||||
vTimesTrack.resize(nImages);
|
||||
|
||||
cout << endl << "-------" << endl;
|
||||
cout << "Start processing sequence ..." << endl;
|
||||
cout << "Images in the sequence: " << nImages << endl << endl;
|
||||
|
||||
// Main loop
|
||||
cv::Mat im;
|
||||
for(int ni=0; ni<nImages; ni++)
|
||||
|
|
|
@ -55,10 +55,6 @@ int main(int argc, char **argv)
|
|||
vector<float> vTimesTrack;
|
||||
vTimesTrack.resize(nImages);
|
||||
|
||||
cout << endl << "-------" << endl;
|
||||
cout << "Start processing sequence ..." << endl;
|
||||
cout << "Images in the sequence: " << nImages << endl << endl;
|
||||
|
||||
// Main loop
|
||||
cv::Mat im;
|
||||
for(int ni=0; ni<nImages; ni++)
|
||||
|
|
|
@ -103,7 +103,7 @@ int main(int argc, char **argv)
|
|||
{
|
||||
|
||||
// Read image from file
|
||||
im = cv::imread(vstrImageFilenames[seq][ni],cv::IMREAD_GRAYSCALE);
|
||||
im = cv::imread(vstrImageFilenames[seq][ni],cv::IMREAD_UNCHANGED);
|
||||
|
||||
// clahe
|
||||
clahe->apply(im,im);
|
||||
|
@ -146,7 +146,7 @@ int main(int argc, char **argv)
|
|||
T = tframe-vTimestampsCam[seq][ni-1];
|
||||
|
||||
if(ttrack<T)
|
||||
usleep((T-ttrack)*1e6); // 1e6
|
||||
usleep((T-ttrack)*1e6);
|
||||
|
||||
}
|
||||
if(seq < num_seq - 1)
|
||||
|
@ -158,13 +158,9 @@ int main(int argc, char **argv)
|
|||
|
||||
}
|
||||
|
||||
// cout << "ttrack_tot = " << ttrack_tot << std::endl;
|
||||
// Stop all threads
|
||||
SLAM.Shutdown();
|
||||
|
||||
|
||||
// Tracking time statistics
|
||||
|
||||
// Save camera trajectory
|
||||
|
||||
if (bFileName)
|
||||
|
|
|
@ -65,17 +65,13 @@ int main(int argc, char **argv)
|
|||
vector<float> vTimesTrack;
|
||||
vTimesTrack.resize(nImages);
|
||||
|
||||
cout << endl << "-------" << endl;
|
||||
cout << "Start processing sequence ..." << endl;
|
||||
cout << "Images in the sequence: " << nImages << endl << endl;
|
||||
|
||||
// Main loop
|
||||
cv::Mat imRGB, imD;
|
||||
for(int ni=0; ni<nImages; ni++)
|
||||
{
|
||||
// Read image and depthmap from file
|
||||
imRGB = cv::imread(string(argv[3])+"/"+vstrImageFilenamesRGB[ni],cv::IMREAD_UNCHANGED);
|
||||
imD = cv::imread(string(argv[3])+"/"+vstrImageFilenamesD[ni],cv::IMREAD_UNCHANGED);
|
||||
imRGB = cv::imread(string(argv[3])+"/"+vstrImageFilenamesRGB[ni],cv::IMREAD_UNCHANGED); //,cv::IMREAD_UNCHANGED);
|
||||
imD = cv::imread(string(argv[3])+"/"+vstrImageFilenamesD[ni],cv::IMREAD_UNCHANGED); //,cv::IMREAD_UNCHANGED);
|
||||
double tframe = vTimestamps[ni];
|
||||
|
||||
if(imRGB.empty())
|
||||
|
|
|
@ -167,6 +167,7 @@ int main(int argc, char **argv)
|
|||
// Seq loop
|
||||
vector<ORB_SLAM3::IMU::Point> vImuMeas;
|
||||
double t_rect = 0;
|
||||
double t_track = 0;
|
||||
int num_rect = 0;
|
||||
int proccIm = 0;
|
||||
for(int ni=0; ni<nImages[seq]; ni++, proccIm++)
|
||||
|
@ -190,28 +191,32 @@ int main(int argc, char **argv)
|
|||
}
|
||||
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point t_Start_Rect = std::chrono::steady_clock::now();
|
||||
#else
|
||||
std::chrono::monotonic_clock::time_point t_Start_Rect = std::chrono::monotonic_clock::now();
|
||||
#endif
|
||||
#endif
|
||||
cv::remap(imLeft,imLeftRect,M1l,M2l,cv::INTER_LINEAR);
|
||||
cv::remap(imRight,imRightRect,M1r,M2r,cv::INTER_LINEAR);
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point t_End_Rect = std::chrono::steady_clock::now();
|
||||
#else
|
||||
std::chrono::monotonic_clock::time_point t_End_Rect = std::chrono::monotonic_clock::now();
|
||||
#endif
|
||||
|
||||
t_rect = std::chrono::duration_cast<std::chrono::duration<double> >(t_End_Rect - t_Start_Rect).count();
|
||||
t_rect = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t_End_Rect - t_Start_Rect).count();
|
||||
SLAM.InsertRectTime(t_rect);
|
||||
#endif
|
||||
double tframe = vTimestampsCam[seq][ni];
|
||||
|
||||
// Load imu measurements from previous frame
|
||||
vImuMeas.clear();
|
||||
|
||||
if(ni>0)
|
||||
while(vTimestampsImu[seq][first_imu[seq]]<=vTimestampsCam[seq][ni]) // while(vTimestampsImu[first_imu]<=vTimestampsCam[ni])
|
||||
while(vTimestampsImu[seq][first_imu[seq]]<=vTimestampsCam[seq][ni])
|
||||
{
|
||||
vImuMeas.push_back(ORB_SLAM3::IMU::Point(vAcc[seq][first_imu[seq]].x,vAcc[seq][first_imu[seq]].y,vAcc[seq][first_imu[seq]].z,
|
||||
vGyro[seq][first_imu[seq]].x,vGyro[seq][first_imu[seq]].y,vGyro[seq][first_imu[seq]].z,
|
||||
|
@ -234,6 +239,11 @@ int main(int argc, char **argv)
|
|||
std::chrono::monotonic_clock::time_point t2 = std::chrono::monotonic_clock::now();
|
||||
#endif
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
t_track = t_rect + std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t2 - t1).count();
|
||||
SLAM.InsertTrackTime(t_track);
|
||||
#endif
|
||||
|
||||
double ttrack= std::chrono::duration_cast<std::chrono::duration<double> >(t2 - t1).count();
|
||||
|
||||
vTimesTrack[ni]=ttrack;
|
||||
|
|
|
@ -114,10 +114,6 @@ int main(int argc, char **argv)
|
|||
cout << endl << "-------" << endl;
|
||||
cout.precision(17);
|
||||
|
||||
/*cout << "Start processing sequence ..." << endl;
|
||||
cout << "Images in the sequence: " << nImages << endl;
|
||||
cout << "IMU data in the sequence: " << nImu << endl << endl;*/
|
||||
|
||||
// Create SLAM system. It initializes all system threads and gets ready to process frames.
|
||||
ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::IMU_STEREO, true, 0, file_name);
|
||||
|
||||
|
@ -156,23 +152,16 @@ int main(int argc, char **argv)
|
|||
|
||||
if(ni>0)
|
||||
{
|
||||
// cout << "t_cam " << tframe << endl;
|
||||
|
||||
while(vTimestampsImu[seq][first_imu[seq]]<=vTimestampsCam[seq][ni])
|
||||
{
|
||||
// vImuMeas.push_back(ORB_SLAM3::IMU::Point(vAcc[first_imu],vGyro[first_imu],vTimestampsImu[first_imu]));
|
||||
vImuMeas.push_back(ORB_SLAM3::IMU::Point(vAcc[seq][first_imu[seq]].x,vAcc[seq][first_imu[seq]].y,vAcc[seq][first_imu[seq]].z,
|
||||
vGyro[seq][first_imu[seq]].x,vGyro[seq][first_imu[seq]].y,vGyro[seq][first_imu[seq]].z,
|
||||
vTimestampsImu[seq][first_imu[seq]]));
|
||||
// cout << "t_imu = " << fixed << vImuMeas.back().t << endl;
|
||||
first_imu[seq]++;
|
||||
}
|
||||
}
|
||||
|
||||
/*cout << "first imu: " << first_imu[seq] << endl;
|
||||
cout << "first imu time: " << fixed << vTimestampsImu[seq][0] << endl;
|
||||
cout << "size vImu: " << vImuMeas.size() << endl;*/
|
||||
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
|
||||
#else
|
||||
|
@ -190,7 +179,6 @@ int main(int argc, char **argv)
|
|||
|
||||
double ttrack= std::chrono::duration_cast<std::chrono::duration<double> >(t2 - t1).count();
|
||||
ttrack_tot += ttrack;
|
||||
// std::cout << "ttrack: " << ttrack << std::endl;
|
||||
|
||||
vTimesTrack[ni]=ttrack;
|
||||
|
||||
|
@ -202,7 +190,7 @@ int main(int argc, char **argv)
|
|||
T = tframe-vTimestampsCam[seq][ni-1];
|
||||
|
||||
if(ttrack<T)
|
||||
usleep((T-ttrack)*1e6); // 1e6
|
||||
usleep((T-ttrack)*1e6);
|
||||
}
|
||||
if(seq < num_seq - 1)
|
||||
{
|
||||
|
@ -216,8 +204,6 @@ int main(int argc, char **argv)
|
|||
// Stop all threads
|
||||
SLAM.Shutdown();
|
||||
|
||||
// Tracking time statistics
|
||||
|
||||
// Save camera trajectory
|
||||
std::chrono::system_clock::time_point scNow = std::chrono::system_clock::now();
|
||||
std::time_t now = std::chrono::system_clock::to_time_t(scNow);
|
||||
|
|
|
@ -135,6 +135,7 @@ int main(int argc, char **argv)
|
|||
// Seq loop
|
||||
|
||||
double t_rect = 0;
|
||||
double t_track = 0;
|
||||
int num_rect = 0;
|
||||
int proccIm = 0;
|
||||
for(int ni=0; ni<nImages[seq]; ni++, proccIm++)
|
||||
|
@ -157,25 +158,26 @@ int main(int argc, char **argv)
|
|||
return 1;
|
||||
}
|
||||
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point t_Start_Rect = std::chrono::steady_clock::now();
|
||||
#else
|
||||
std::chrono::monotonic_clock::time_point t_Start_Rect = std::chrono::monotonic_clock::now();
|
||||
#endif
|
||||
#endif
|
||||
cv::remap(imLeft,imLeftRect,M1l,M2l,cv::INTER_LINEAR);
|
||||
cv::remap(imRight,imRightRect,M1r,M2r,cv::INTER_LINEAR);
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point t_End_Rect = std::chrono::steady_clock::now();
|
||||
#else
|
||||
std::chrono::monotonic_clock::time_point t_End_Rect = std::chrono::monotonic_clock::now();
|
||||
#endif
|
||||
|
||||
t_rect = std::chrono::duration_cast<std::chrono::duration<double> >(t_End_Rect - t_Start_Rect).count();
|
||||
t_rect = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t_End_Rect - t_Start_Rect).count();
|
||||
SLAM.InsertRectTime(t_rect);
|
||||
#endif
|
||||
double tframe = vTimestampsCam[seq][ni];
|
||||
|
||||
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
|
||||
#else
|
||||
|
@ -191,6 +193,11 @@ int main(int argc, char **argv)
|
|||
std::chrono::monotonic_clock::time_point t2 = std::chrono::monotonic_clock::now();
|
||||
#endif
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
t_track = t_rect + std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t2 - t1).count();
|
||||
SLAM.InsertTrackTime(t_track);
|
||||
#endif
|
||||
|
||||
double ttrack= std::chrono::duration_cast<std::chrono::duration<double> >(t2 - t1).count();
|
||||
|
||||
vTimesTrack[ni]=ttrack;
|
||||
|
@ -203,7 +210,7 @@ int main(int argc, char **argv)
|
|||
T = tframe-vTimestampsCam[seq][ni-1];
|
||||
|
||||
if(ttrack<T)
|
||||
usleep((T-ttrack)*1e6); // 1e6
|
||||
usleep((T-ttrack)*1e6);
|
||||
}
|
||||
|
||||
if(seq < num_seq - 1)
|
||||
|
|
|
@ -52,11 +52,7 @@ int main(int argc, char **argv)
|
|||
|
||||
// Vector for tracking time statistics
|
||||
vector<float> vTimesTrack;
|
||||
vTimesTrack.resize(nImages);
|
||||
|
||||
cout << endl << "-------" << endl;
|
||||
cout << "Start processing sequence ..." << endl;
|
||||
cout << "Images in the sequence: " << nImages << endl << endl;
|
||||
vTimesTrack.resize(nImages);
|
||||
|
||||
// Main loop
|
||||
cv::Mat imLeft, imRight;
|
||||
|
|
|
@ -137,7 +137,6 @@ int main(int argc, char **argv)
|
|||
|
||||
double ttrack= std::chrono::duration_cast<std::chrono::duration<double> >(t2 - t1).count();
|
||||
ttrack_tot += ttrack;
|
||||
// std::cout << "ttrack: " << ttrack << std::endl;
|
||||
|
||||
vTimesTrack[ni]=ttrack;
|
||||
|
||||
|
@ -149,7 +148,7 @@ int main(int argc, char **argv)
|
|||
T = tframe-vTimestampsCam[seq][ni-1];
|
||||
|
||||
if(ttrack<T)
|
||||
usleep((T-ttrack)*1e6); // 1e6
|
||||
usleep((T-ttrack)*1e6);
|
||||
}
|
||||
if(seq < num_seq - 1)
|
||||
{
|
||||
|
@ -163,8 +162,6 @@ int main(int argc, char **argv)
|
|||
// Stop all threads
|
||||
SLAM.Shutdown();
|
||||
|
||||
// Tracking time statistics
|
||||
|
||||
// Save camera trajectory
|
||||
std::chrono::system_clock::time_point scNow = std::chrono::system_clock::now();
|
||||
std::time_t now = std::chrono::system_clock::to_time_t(scNow);
|
||||
|
|
|
@ -12,10 +12,10 @@
|
|||
|
||||
# ORB-SLAM3
|
||||
|
||||
### V0.3: Beta version, 4 Sep 2020
|
||||
### V0.4: Beta version, 21 April 2021
|
||||
**Authors:** Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, [José M. M. Montiel](http://webdiis.unizar.es/~josemari/), [Juan D. Tardos](http://webdiis.unizar.es/~jdtardos/).
|
||||
|
||||
The [Changelog](https://github.com/UZ-SLAMLab/ORB_SLAM3/Changelog.md) describes the features of each version.
|
||||
The [Changelog](https://github.com/UZ-SLAMLab/ORB_SLAM3/blob/master/Changelog.md) describes the features of each version.
|
||||
|
||||
ORB-SLAM3 is the first real-time SLAM library able to perform **Visual, Visual-Inertial and Multi-Map SLAM** with **monocular, stereo and RGB-D** cameras, using **pin-hole and fisheye** lens models. In all sensor configurations, ORB-SLAM3 is as robust as the best systems available in the literature, and significantly more accurate.
|
||||
|
||||
|
@ -28,7 +28,7 @@ alt="ORB-SLAM3" width="240" height="180" border="10" /></a>
|
|||
|
||||
### Related Publications:
|
||||
|
||||
[ORB-SLAM3] Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M. M. Montiel and Juan D. Tardós, **ORB-SLAM3: An Accurate Open-Source Library for Visual, Visual-Inertial and Multi-Map SLAM**, Under review. **[PDF](https://arxiv.org/pdf/2007.11898.pdf)**.
|
||||
[ORB-SLAM3] Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M. M. Montiel and Juan D. Tardós, **ORB-SLAM3: An Accurate Open-Source Library for Visual, Visual-Inertial and Multi-Map SLAM**, *IEEE Transactions on Robotics, 2021* **[PDF](https://arxiv.org/abs/2007.11898)**.
|
||||
|
||||
[IMU-Initialization] Carlos Campos, J. M. M. Montiel and Juan D. Tardós, **Inertial-Only Optimization for Visual-Inertial Initialization**, *ICRA 2020*. **[PDF](https://arxiv.org/pdf/2003.05766.pdf)**
|
||||
|
||||
|
@ -221,5 +221,6 @@ Once ORB-SLAM3 has loaded the vocabulary, press space in the rosbag tab.
|
|||
rosrun rosbag fastrebag.py dataset-room1_512_16.bag dataset-room1_512_16_small_chunks.bag
|
||||
```
|
||||
|
||||
|
||||
# 7. Time analysis
|
||||
A flag in `include\Config.h` activates time measurements. It is necessary to uncomment the line `#define REGISTER_TIMES` to obtain the time stats of one execution which is shown at the terminal and stored in a text file(`ExecTimeMean.txt`).
|
||||
|
||||
|
|
|
@ -24,11 +24,11 @@ set(SRCS_DUTILS
|
|||
DUtils/Random.cpp
|
||||
DUtils/Timestamp.cpp)
|
||||
|
||||
find_package(OpenCV 3.0 QUIET)
|
||||
find_package(OpenCV 4.0 QUIET)
|
||||
if(NOT OpenCV_FOUND)
|
||||
find_package(OpenCV 2.4.3 QUIET)
|
||||
find_package(OpenCV 3.0 QUIET)
|
||||
if(NOT OpenCV_FOUND)
|
||||
message(FATAL_ERROR "OpenCV > 2.4.3 not found.")
|
||||
message(FATAL_ERROR "OpenCV > 3.0 not found.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
|
|
|
@ -1333,13 +1333,7 @@ int TemplatedVocabulary<TDescriptor,F>::stopWords(double minWeight)
|
|||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
/**
|
||||
* @brief 读取TXT格式的预训练好的ORB字典
|
||||
*
|
||||
* @param[in] filename 文件名
|
||||
* @return true 读取成功
|
||||
* @return false 读取失败
|
||||
*/
|
||||
|
||||
template<class TDescriptor, class F>
|
||||
bool TemplatedVocabulary<TDescriptor,F>::loadFromTextFile(const std::string &filename)
|
||||
{
|
||||
|
@ -1347,7 +1341,7 @@ bool TemplatedVocabulary<TDescriptor,F>::loadFromTextFile(const std::string &fil
|
|||
f.open(filename.c_str());
|
||||
|
||||
if(f.eof())
|
||||
return false;
|
||||
return false;
|
||||
|
||||
m_words.clear();
|
||||
m_nodes.clear();
|
||||
|
@ -1356,8 +1350,8 @@ bool TemplatedVocabulary<TDescriptor,F>::loadFromTextFile(const std::string &fil
|
|||
getline(f,s);
|
||||
stringstream ss;
|
||||
ss << s;
|
||||
ss >> m_k; // 树的分支数目
|
||||
ss >> m_L; // 树的深度
|
||||
ss >> m_k;
|
||||
ss >> m_L;
|
||||
int n1, n2;
|
||||
ss >> n1;
|
||||
ss >> n2;
|
||||
|
@ -1365,22 +1359,20 @@ bool TemplatedVocabulary<TDescriptor,F>::loadFromTextFile(const std::string &fil
|
|||
if(m_k<0 || m_k>20 || m_L<1 || m_L>10 || n1<0 || n1>5 || n2<0 || n2>3)
|
||||
{
|
||||
std::cerr << "Vocabulary loading failure: This is not a correct text file!" << endl;
|
||||
return false;
|
||||
return false;
|
||||
}
|
||||
|
||||
m_scoring = (ScoringType)n1; // 评分类型
|
||||
m_weighting = (WeightingType)n2; // 权重类型
|
||||
m_scoring = (ScoringType)n1;
|
||||
m_weighting = (WeightingType)n2;
|
||||
createScoringObject();
|
||||
|
||||
// 总共节点(nodes)数,是一个等比数列求和
|
||||
//! bug 没有包含最后叶子节点数,应该改为 ((pow((double)m_k, (double)m_L + 2) - 1)/(m_k - 1))
|
||||
//! 但是没有影响,因为这里只是reserve,实际存储是一步步resize实现
|
||||
// nodes
|
||||
int expected_nodes =
|
||||
(int)((pow((double)m_k, (double)m_L + 1) - 1)/(m_k - 1));
|
||||
m_nodes.reserve(expected_nodes);
|
||||
// 预分配空间给 单词(叶子)数
|
||||
|
||||
m_words.reserve(pow((double)m_k, (double)m_L + 1));
|
||||
// 第一个节点是根节点,id设为0
|
||||
|
||||
m_nodes.resize(1);
|
||||
m_nodes[0].id = 0;
|
||||
while(!f.eof())
|
||||
|
@ -1389,53 +1381,40 @@ bool TemplatedVocabulary<TDescriptor,F>::loadFromTextFile(const std::string &fil
|
|||
getline(f,snode);
|
||||
stringstream ssnode;
|
||||
ssnode << snode;
|
||||
// nid 表示当前节点id,实际是读取顺序,从0开始
|
||||
int nid = m_nodes.size();
|
||||
// 节点size 加1
|
||||
m_nodes.resize(m_nodes.size()+1);
|
||||
m_nodes[nid].id = nid;
|
||||
|
||||
// 读每行的第1个数字,表示父节点id
|
||||
int nid = m_nodes.size();
|
||||
m_nodes.resize(m_nodes.size()+1);
|
||||
m_nodes[nid].id = nid;
|
||||
|
||||
int pid ;
|
||||
ssnode >> pid;
|
||||
// 记录节点id的相互父子关系
|
||||
m_nodes[nid].parent = pid;
|
||||
m_nodes[pid].children.push_back(nid);
|
||||
|
||||
// 读取第2个数字,表示是否是叶子(Word)
|
||||
int nIsLeaf;
|
||||
ssnode >> nIsLeaf;
|
||||
|
||||
// 每个特征点描述子是256 bit,一个字节对应8 bit,所以一个特征点需要32个字节存储。
|
||||
// 这里 F::L=32,也就是读取32个字节,最后以字符串形式存储在ssd
|
||||
stringstream ssd;
|
||||
for(int iD=0;iD<F::L;iD++)
|
||||
{
|
||||
string sElement;
|
||||
ssnode >> sElement;
|
||||
ssd << sElement << " ";
|
||||
}
|
||||
// 将ssd存储在该节点的描述子
|
||||
}
|
||||
F::fromString(m_nodes[nid].descriptor, ssd.str());
|
||||
|
||||
// 读取最后一个数字:节点的权重(Word才有)
|
||||
ssnode >> m_nodes[nid].weight;
|
||||
|
||||
if(nIsLeaf>0)
|
||||
{
|
||||
// 如果是叶子(Word),存储到m_words
|
||||
int wid = m_words.size();
|
||||
m_words.resize(wid+1);
|
||||
|
||||
//存储Word的id,具有唯一性
|
||||
m_nodes[nid].word_id = wid;
|
||||
|
||||
//构建 vector<Node*> m_words,存储word所在node的指针
|
||||
m_words[wid] = &m_nodes[nid];
|
||||
}
|
||||
else
|
||||
{
|
||||
//非叶子节点,直接分配 m_k个分支
|
||||
m_nodes[nid].children.reserve(m_k);
|
||||
}
|
||||
}
|
||||
|
|
4
build.sh
4
build.sh
|
@ -27,5 +27,5 @@ echo "Configuring and building ORB_SLAM3 ..."
|
|||
|
||||
mkdir build
|
||||
cd build
|
||||
cmake .. -DCMAKE_BUILD_TYPE=Debug
|
||||
make
|
||||
cmake .. -DCMAKE_BUILD_TYPE=Release
|
||||
make -j
|
||||
|
|
|
@ -43,33 +43,8 @@ class Frame;
|
|||
class KannalaBrandt8;
|
||||
class Pinhole;
|
||||
|
||||
//BOOST_CLASS_EXPORT_GUID(Pinhole, "Pinhole")
|
||||
//BOOST_CLASS_EXPORT_GUID(KannalaBrandt8, "KannalaBrandt8")
|
||||
|
||||
class Atlas
|
||||
{
|
||||
friend class boost::serialization::access;
|
||||
|
||||
template<class Archive>
|
||||
void serialize(Archive &ar, const unsigned int version)
|
||||
{
|
||||
//ar.template register_type<Pinhole>();
|
||||
//ar.template register_type<KannalaBrandt8>();
|
||||
|
||||
// Save/load the set of maps, the set is broken in libboost 1.58 for ubuntu 16.04
|
||||
//ar & mspMaps;
|
||||
ar & mvpBackupMaps;
|
||||
ar & mvpCameras;
|
||||
//ar & mvpBackupCamPin;
|
||||
//ar & mvpBackupCamKan;
|
||||
// Need to save/load the static Id from Frame, KeyFrame, MapPoint and Map
|
||||
ar & Map::nNextId;
|
||||
ar & Frame::nNextId;
|
||||
ar & KeyFrame::nNextId;
|
||||
ar & MapPoint::nNextId;
|
||||
ar & GeometricCamera::nNextId;
|
||||
ar & mnLastInitKFidMap;
|
||||
}
|
||||
|
||||
public:
|
||||
Atlas();
|
||||
|
@ -86,8 +61,6 @@ public:
|
|||
// Method for change components in the current map
|
||||
void AddKeyFrame(KeyFrame* pKF);
|
||||
void AddMapPoint(MapPoint* pMP);
|
||||
//void EraseMapPoint(MapPoint* pMP);
|
||||
//void EraseKeyFrame(KeyFrame* pKF);
|
||||
|
||||
void AddCamera(GeometricCamera* pCam);
|
||||
|
||||
|
@ -122,10 +95,6 @@ public:
|
|||
void SetImuInitialized();
|
||||
bool isImuInitialized();
|
||||
|
||||
// Function for garantee the correction of serialization of this object
|
||||
void PreSave();
|
||||
void PostLoad();
|
||||
|
||||
void SetKeyFrameDababase(KeyFrameDatabase* pKFDB);
|
||||
KeyFrameDatabase* GetKeyFrameDatabase();
|
||||
|
||||
|
@ -140,15 +109,12 @@ protected:
|
|||
|
||||
std::set<Map*> mspMaps;
|
||||
std::set<Map*> mspBadMaps;
|
||||
// Its necessary change the container from set to vector because libboost 1.58 and Ubuntu 16.04 have an error with this cointainer
|
||||
std::vector<Map*> mvpBackupMaps;
|
||||
Map* mpCurrentMap;
|
||||
|
||||
std::vector<GeometricCamera*> mvpCameras;
|
||||
std::vector<KannalaBrandt8*> mvpBackupCamKan;
|
||||
std::vector<Pinhole*> mvpBackupCamPin;
|
||||
|
||||
//Pinhole testCam;
|
||||
std::mutex mMutexAtlas;
|
||||
|
||||
unsigned long int mnLastInitKFidMap;
|
||||
|
|
|
@ -36,23 +36,13 @@
|
|||
namespace ORB_SLAM3 {
|
||||
class GeometricCamera {
|
||||
|
||||
friend class boost::serialization::access;
|
||||
|
||||
template<class Archive>
|
||||
void serialize(Archive& ar, const unsigned int version)
|
||||
{
|
||||
ar & mnId;
|
||||
ar & mnType;
|
||||
ar & mvParameters;
|
||||
}
|
||||
|
||||
|
||||
public:
|
||||
GeometricCamera() {}
|
||||
GeometricCamera(const std::vector<float> &_vParameters) : mvParameters(_vParameters) {}
|
||||
~GeometricCamera() {}
|
||||
|
||||
virtual cv::Point2f project(const cv::Point3f &p3D) = 0;
|
||||
virtual cv::Point2f project(const cv::Matx31f &m3D) = 0;
|
||||
virtual cv::Point2f project(const cv::Mat& m3D) = 0;
|
||||
virtual Eigen::Vector2d project(const Eigen::Vector3d & v3D) = 0;
|
||||
virtual cv::Mat projectMat(const cv::Point3f& p3D) = 0;
|
||||
|
@ -61,6 +51,7 @@ namespace ORB_SLAM3 {
|
|||
|
||||
virtual cv::Point3f unproject(const cv::Point2f &p2D) = 0;
|
||||
virtual cv::Mat unprojectMat(const cv::Point2f &p2D) = 0;
|
||||
virtual cv::Matx31f unprojectMat_(const cv::Point2f &p2D) = 0;
|
||||
|
||||
virtual cv::Mat projectJac(const cv::Point3f &p3D) = 0;
|
||||
virtual Eigen::Matrix<double,2,3> projectJac(const Eigen::Vector3d& v3D) = 0;
|
||||
|
@ -71,8 +62,10 @@ namespace ORB_SLAM3 {
|
|||
cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D, std::vector<bool> &vbTriangulated) = 0;
|
||||
|
||||
virtual cv::Mat toK() = 0;
|
||||
virtual cv::Matx33f toK_() = 0;
|
||||
|
||||
virtual bool epipolarConstrain(GeometricCamera* otherCamera, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc) = 0;
|
||||
virtual bool epipolarConstrain_(GeometricCamera* otherCamera, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc) = 0;
|
||||
|
||||
float getParameter(const int i){return mvParameters[i];}
|
||||
void setParameter(const float p, const size_t i){mvParameters[i] = p;}
|
||||
|
|
|
@ -35,15 +35,6 @@
|
|||
namespace ORB_SLAM3 {
|
||||
class KannalaBrandt8 final : public GeometricCamera {
|
||||
|
||||
friend class boost::serialization::access;
|
||||
|
||||
template<class Archive>
|
||||
void serialize(Archive& ar, const unsigned int version)
|
||||
{
|
||||
ar & boost::serialization::base_object<GeometricCamera>(*this);
|
||||
ar & const_cast<float&>(precision);
|
||||
}
|
||||
|
||||
public:
|
||||
KannalaBrandt8() : precision(1e-6) {
|
||||
mvParameters.resize(8);
|
||||
|
@ -69,6 +60,7 @@ namespace ORB_SLAM3 {
|
|||
}
|
||||
|
||||
cv::Point2f project(const cv::Point3f &p3D);
|
||||
cv::Point2f project(const cv::Matx31f &m3D);
|
||||
cv::Point2f project(const cv::Mat& m3D);
|
||||
Eigen::Vector2d project(const Eigen::Vector3d & v3D);
|
||||
cv::Mat projectMat(const cv::Point3f& p3D);
|
||||
|
@ -77,6 +69,7 @@ namespace ORB_SLAM3 {
|
|||
|
||||
cv::Point3f unproject(const cv::Point2f &p2D);
|
||||
cv::Mat unprojectMat(const cv::Point2f &p2D);
|
||||
cv::Matx31f unprojectMat_(const cv::Point2f &p2D);
|
||||
|
||||
cv::Mat projectJac(const cv::Point3f &p3D);
|
||||
Eigen::Matrix<double,2,3> projectJac(const Eigen::Vector3d& v3D);
|
||||
|
@ -87,10 +80,14 @@ namespace ORB_SLAM3 {
|
|||
cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D, std::vector<bool> &vbTriangulated);
|
||||
|
||||
cv::Mat toK();
|
||||
cv::Matx33f toK_();
|
||||
|
||||
bool epipolarConstrain(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc);
|
||||
bool epipolarConstrain_(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc);
|
||||
|
||||
|
||||
float TriangulateMatches(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc, cv::Mat& p3D);
|
||||
float TriangulateMatches_(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc, cv::Matx31f& p3D);
|
||||
|
||||
std::vector<int> mvLappingArea;
|
||||
|
||||
|
@ -110,6 +107,7 @@ namespace ORB_SLAM3 {
|
|||
TwoViewReconstruction* tvr;
|
||||
|
||||
void Triangulate(const cv::Point2f &p1, const cv::Point2f &p2, const cv::Mat &Tcw1, const cv::Mat &Tcw2,cv::Mat &x3D);
|
||||
void Triangulate_(const cv::Point2f &p1, const cv::Point2f &p2, const cv::Matx44f &Tcw1, const cv::Matx44f &Tcw2,cv::Matx31f &x3D);
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -35,14 +35,6 @@
|
|||
namespace ORB_SLAM3 {
|
||||
class Pinhole : public GeometricCamera {
|
||||
|
||||
friend class boost::serialization::access;
|
||||
|
||||
template<class Archive>
|
||||
void serialize(Archive& ar, const unsigned int version)
|
||||
{
|
||||
ar & boost::serialization::base_object<GeometricCamera>(*this);
|
||||
}
|
||||
|
||||
public:
|
||||
Pinhole() {
|
||||
mvParameters.resize(4);
|
||||
|
@ -67,6 +59,7 @@ namespace ORB_SLAM3 {
|
|||
}
|
||||
|
||||
cv::Point2f project(const cv::Point3f &p3D);
|
||||
cv::Point2f project(const cv::Matx31f &m3D);
|
||||
cv::Point2f project(const cv::Mat &m3D);
|
||||
Eigen::Vector2d project(const Eigen::Vector3d & v3D);
|
||||
cv::Mat projectMat(const cv::Point3f& p3D);
|
||||
|
@ -75,6 +68,8 @@ namespace ORB_SLAM3 {
|
|||
|
||||
cv::Point3f unproject(const cv::Point2f &p2D);
|
||||
cv::Mat unprojectMat(const cv::Point2f &p2D);
|
||||
cv::Matx31f unprojectMat_(const cv::Point2f &p2D);
|
||||
|
||||
|
||||
cv::Mat projectJac(const cv::Point3f &p3D);
|
||||
Eigen::Matrix<double,2,3> projectJac(const Eigen::Vector3d& v3D);
|
||||
|
@ -85,8 +80,10 @@ namespace ORB_SLAM3 {
|
|||
cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D, std::vector<bool> &vbTriangulated);
|
||||
|
||||
cv::Mat toK();
|
||||
cv::Matx33f toK_();
|
||||
|
||||
bool epipolarConstrain(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc);
|
||||
bool epipolarConstrain_(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc);
|
||||
|
||||
bool matchAndtriangulate(const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, GeometricCamera* pOther,
|
||||
cv::Mat& Tcw1, cv::Mat& Tcw2,
|
||||
|
@ -97,6 +94,7 @@ namespace ORB_SLAM3 {
|
|||
friend std::istream& operator>>(std::istream& os, Pinhole& ph);
|
||||
private:
|
||||
cv::Mat SkewSymmetricMatrix(const cv::Mat &v);
|
||||
cv::Matx33f SkewSymmetricMatrix_(const cv::Matx31f &v);
|
||||
|
||||
//Parameters vector corresponds to
|
||||
// [fx, fy, cx, cy]
|
||||
|
|
|
@ -20,8 +20,6 @@
|
|||
#ifndef FRAME_H
|
||||
#define FRAME_H
|
||||
|
||||
//#define SAVE_TIMES
|
||||
|
||||
#include<vector>
|
||||
|
||||
#include "Thirdparty/DBoW2/DBoW2/BowVector.h"
|
||||
|
@ -29,6 +27,7 @@
|
|||
|
||||
#include "ImuTypes.h"
|
||||
#include "ORBVocabulary.h"
|
||||
#include "Config.h"
|
||||
|
||||
#include <mutex>
|
||||
#include <opencv2/opencv.hpp>
|
||||
|
@ -61,9 +60,6 @@ public:
|
|||
// Constructor for Monocular cameras.
|
||||
Frame(const cv::Mat &imGray, const double &timeStamp, ORBextractor* extractor,ORBVocabulary* voc, GeometricCamera* pCamera, cv::Mat &distCoef, const float &bf, const float &thDepth, Frame* pPrevF = static_cast<Frame*>(NULL), const IMU::Calib &ImuCalib = IMU::Calib());
|
||||
|
||||
// Destructor
|
||||
// ~Frame();
|
||||
|
||||
// Extract ORB on the image. 0 for left image and 1 for right image.
|
||||
void ExtractORB(int flag, const cv::Mat &im, const int x0, const int x1);
|
||||
|
||||
|
@ -247,9 +243,10 @@ public:
|
|||
|
||||
int mnDataset;
|
||||
|
||||
double mTimeStereoMatch;
|
||||
#ifdef REGISTER_TIMES
|
||||
double mTimeORB_Ext;
|
||||
|
||||
double mTimeStereoMatch;
|
||||
#endif
|
||||
|
||||
private:
|
||||
|
||||
|
@ -269,6 +266,10 @@ private:
|
|||
cv::Mat mtcw;
|
||||
//==mtwc
|
||||
|
||||
cv::Matx31f mOwx;
|
||||
cv::Matx33f mRcwx;
|
||||
cv::Matx31f mtcwx;
|
||||
|
||||
bool mbImuPreintegrated;
|
||||
|
||||
std::mutex *mpMutexImu;
|
||||
|
@ -295,6 +296,7 @@ public:
|
|||
std::vector<std::size_t> mGridRight[FRAME_GRID_COLS][FRAME_GRID_ROWS];
|
||||
|
||||
cv::Mat mTlr, mRlr, mtlr, mTrl;
|
||||
cv::Matx34f mTrlx, mTlrx;
|
||||
|
||||
Frame(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timeStamp, ORBextractor* extractorLeft, ORBextractor* extractorRight, ORBVocabulary* voc, cv::Mat &K, cv::Mat &distCoef, const float &bf, const float &thDepth, GeometricCamera* pCamera, GeometricCamera* pCamera2, cv::Mat& Tlr,Frame* pPrevF = static_cast<Frame*>(NULL), const IMU::Calib &ImuCalib = IMU::Calib());
|
||||
|
||||
|
|
|
@ -51,233 +51,6 @@ class GeometricCamera;
|
|||
class KeyFrame
|
||||
{
|
||||
|
||||
|
||||
template<class Archive>
|
||||
void serializeMatrix(Archive& ar, cv::Mat& mat, const unsigned int version)
|
||||
{
|
||||
int cols, rows, type;
|
||||
bool continuous;
|
||||
|
||||
if (Archive::is_saving::value) {
|
||||
cols = mat.cols; rows = mat.rows; type = mat.type();
|
||||
continuous = mat.isContinuous();
|
||||
}
|
||||
|
||||
ar & cols & rows & type & continuous;
|
||||
|
||||
if (Archive::is_loading::value)
|
||||
mat.create(rows, cols, type);
|
||||
|
||||
if (continuous) {
|
||||
const unsigned int data_size = rows * cols * mat.elemSize();
|
||||
ar & boost::serialization::make_array(mat.ptr(), data_size);
|
||||
} else {
|
||||
const unsigned int row_size = cols*mat.elemSize();
|
||||
for (int i = 0; i < rows; i++) {
|
||||
ar & boost::serialization::make_array(mat.ptr(i), row_size);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
template<class Archive>
|
||||
void serializeMatrix(Archive& ar, const cv::Mat& mat, const unsigned int version)
|
||||
{
|
||||
cv::Mat matAux = mat;
|
||||
|
||||
serializeMatrix(ar, matAux,version);
|
||||
|
||||
if (Archive::is_loading::value)
|
||||
{
|
||||
cv::Mat* ptr;
|
||||
ptr = (cv::Mat*)( &mat );
|
||||
*ptr = matAux;
|
||||
}
|
||||
}
|
||||
|
||||
friend class boost::serialization::access;
|
||||
template<class Archive>
|
||||
void serializeVectorKeyPoints(Archive& ar, const vector<cv::KeyPoint>& vKP, const unsigned int version)
|
||||
{
|
||||
int NumEl;
|
||||
|
||||
if (Archive::is_saving::value) {
|
||||
NumEl = vKP.size();
|
||||
}
|
||||
|
||||
ar & NumEl;
|
||||
|
||||
vector<cv::KeyPoint> vKPaux = vKP;
|
||||
if (Archive::is_loading::value)
|
||||
vKPaux.reserve(NumEl);
|
||||
|
||||
for(int i=0; i < NumEl; ++i)
|
||||
{
|
||||
cv::KeyPoint KPi;
|
||||
|
||||
if (Archive::is_loading::value)
|
||||
KPi = cv::KeyPoint();
|
||||
|
||||
if (Archive::is_saving::value)
|
||||
KPi = vKPaux[i];
|
||||
|
||||
ar & KPi.angle;
|
||||
ar & KPi.response;
|
||||
ar & KPi.size;
|
||||
ar & KPi.pt.x;
|
||||
ar & KPi.pt.y;
|
||||
ar & KPi.class_id;
|
||||
ar & KPi.octave;
|
||||
|
||||
if (Archive::is_loading::value)
|
||||
vKPaux.push_back(KPi);
|
||||
}
|
||||
|
||||
|
||||
if (Archive::is_loading::value)
|
||||
{
|
||||
vector<cv::KeyPoint> *ptr;
|
||||
ptr = (vector<cv::KeyPoint>*)( &vKP );
|
||||
*ptr = vKPaux;
|
||||
}
|
||||
}
|
||||
|
||||
template<class Archive>
|
||||
void serialize(Archive& ar, const unsigned int version)
|
||||
{
|
||||
ar & mnId;
|
||||
ar & const_cast<long unsigned int&>(mnFrameId);
|
||||
ar & const_cast<double&>(mTimeStamp);
|
||||
// Grid
|
||||
ar & const_cast<int&>(mnGridCols);
|
||||
ar & const_cast<int&>(mnGridRows);
|
||||
ar & const_cast<float&>(mfGridElementWidthInv);
|
||||
ar & const_cast<float&>(mfGridElementHeightInv);
|
||||
// Variables of tracking
|
||||
ar & mnTrackReferenceForFrame;
|
||||
ar & mnFuseTargetForKF;
|
||||
// Variables of local mapping
|
||||
ar & mnBALocalForKF;
|
||||
ar & mnBAFixedForKF;
|
||||
ar & mnNumberOfOpt;
|
||||
// Variables used by KeyFrameDatabase
|
||||
ar & mnLoopQuery;
|
||||
ar & mnLoopWords;
|
||||
ar & mLoopScore;
|
||||
ar & mnRelocQuery;
|
||||
ar & mnRelocWords;
|
||||
ar & mRelocScore;
|
||||
ar & mnMergeQuery;
|
||||
ar & mnMergeWords;
|
||||
ar & mMergeScore;
|
||||
ar & mnPlaceRecognitionQuery;
|
||||
ar & mnPlaceRecognitionWords;
|
||||
ar & mPlaceRecognitionScore;
|
||||
ar & mbCurrentPlaceRecognition;
|
||||
// Variables of loop closing
|
||||
serializeMatrix(ar,mTcwGBA,version);
|
||||
serializeMatrix(ar,mTcwBefGBA,version);
|
||||
serializeMatrix(ar,mVwbGBA,version);
|
||||
serializeMatrix(ar,mVwbBefGBA,version);
|
||||
ar & mBiasGBA;
|
||||
ar & mnBAGlobalForKF;
|
||||
// Variables of Merging
|
||||
serializeMatrix(ar,mTcwMerge,version);
|
||||
serializeMatrix(ar,mTcwBefMerge,version);
|
||||
serializeMatrix(ar,mTwcBefMerge,version);
|
||||
serializeMatrix(ar,mVwbMerge,version);
|
||||
serializeMatrix(ar,mVwbBefMerge,version);
|
||||
ar & mBiasMerge;
|
||||
ar & mnMergeCorrectedForKF;
|
||||
ar & mnMergeForKF;
|
||||
ar & mfScaleMerge;
|
||||
ar & mnBALocalForMerge;
|
||||
// Scale
|
||||
ar & mfScale;
|
||||
// Calibration parameters
|
||||
ar & const_cast<float&>(fx);
|
||||
ar & const_cast<float&>(fy);
|
||||
ar & const_cast<float&>(invfx);
|
||||
ar & const_cast<float&>(invfy);
|
||||
ar & const_cast<float&>(cx);
|
||||
ar & const_cast<float&>(cy);
|
||||
ar & const_cast<float&>(mbf);
|
||||
ar & const_cast<float&>(mb);
|
||||
ar & const_cast<float&>(mThDepth);
|
||||
serializeMatrix(ar,mDistCoef,version);
|
||||
// Number of Keypoints
|
||||
ar & const_cast<int&>(N);
|
||||
// KeyPoints
|
||||
serializeVectorKeyPoints(ar,mvKeys,version);
|
||||
serializeVectorKeyPoints(ar,mvKeysUn,version);
|
||||
ar & const_cast<vector<float>& >(mvuRight);
|
||||
ar & const_cast<vector<float>& >(mvDepth);
|
||||
serializeMatrix(ar,mDescriptors,version);
|
||||
// BOW
|
||||
ar & mBowVec;
|
||||
ar & mFeatVec;
|
||||
// Pose relative to parent
|
||||
serializeMatrix(ar,mTcp,version);
|
||||
// Scale
|
||||
ar & const_cast<int&>(mnScaleLevels);
|
||||
ar & const_cast<float&>(mfScaleFactor);
|
||||
ar & const_cast<float&>(mfLogScaleFactor);
|
||||
ar & const_cast<vector<float>& >(mvScaleFactors);
|
||||
ar & const_cast<vector<float>& >(mvLevelSigma2);
|
||||
ar & const_cast<vector<float>& >(mvInvLevelSigma2);
|
||||
// Image bounds and calibration
|
||||
ar & const_cast<int&>(mnMinX);
|
||||
ar & const_cast<int&>(mnMinY);
|
||||
ar & const_cast<int&>(mnMaxX);
|
||||
ar & const_cast<int&>(mnMaxY);
|
||||
serializeMatrix(ar,mK,version);
|
||||
// Pose
|
||||
serializeMatrix(ar,Tcw,version);
|
||||
// MapPointsId associated to keypoints
|
||||
ar & mvBackupMapPointsId;
|
||||
// Grid
|
||||
ar & mGrid;
|
||||
// Connected KeyFrameWeight
|
||||
ar & mBackupConnectedKeyFrameIdWeights;
|
||||
// Spanning Tree and Loop Edges
|
||||
ar & mbFirstConnection;
|
||||
ar & mBackupParentId;
|
||||
ar & mvBackupChildrensId;
|
||||
ar & mvBackupLoopEdgesId;
|
||||
ar & mvBackupMergeEdgesId;
|
||||
// Bad flags
|
||||
ar & mbNotErase;
|
||||
ar & mbToBeErased;
|
||||
ar & mbBad;
|
||||
|
||||
ar & mHalfBaseline;
|
||||
|
||||
// Camera variables
|
||||
ar & mnBackupIdCamera;
|
||||
ar & mnBackupIdCamera2;
|
||||
|
||||
// Fisheye variables
|
||||
/*ar & mvLeftToRightMatch;
|
||||
ar & mvRightToLeftMatch;
|
||||
ar & NLeft;
|
||||
ar & NRight;
|
||||
serializeMatrix(ar, mTlr, version);
|
||||
//serializeMatrix(ar, mTrl, version);
|
||||
serializeVectorKeyPoints(ar, mvKeysRight, version);
|
||||
ar & mGridRight;
|
||||
|
||||
// Inertial variables
|
||||
ar & mImuBias;
|
||||
ar & mBackupImuPreintegrated;
|
||||
ar & mImuCalib;
|
||||
ar & mBackupPrevKFId;
|
||||
ar & mBackupNextKFId;
|
||||
ar & bImu;
|
||||
serializeMatrix(ar, Vw, version);
|
||||
serializeMatrix(ar, Owb, version);*/
|
||||
|
||||
}
|
||||
|
||||
public:
|
||||
KeyFrame();
|
||||
KeyFrame(Frame &F, Map* pMap, KeyFrameDatabase* pKFDB);
|
||||
|
@ -297,6 +70,16 @@ public:
|
|||
cv::Mat GetTranslation();
|
||||
cv::Mat GetVelocity();
|
||||
|
||||
cv::Matx33f GetRotation_();
|
||||
cv::Matx31f GetTranslation_();
|
||||
cv::Matx31f GetCameraCenter_();
|
||||
cv::Matx33f GetRightRotation_();
|
||||
cv::Matx31f GetRightTranslation_();
|
||||
cv::Matx44f GetRightPose_();
|
||||
cv::Matx31f GetRightCameraCenter_();
|
||||
cv::Matx44f GetPose_();
|
||||
|
||||
|
||||
// Bag of Words Representation
|
||||
void ComputeBoW();
|
||||
|
||||
|
@ -343,6 +126,7 @@ public:
|
|||
// KeyPoint functions
|
||||
std::vector<size_t> GetFeaturesInArea(const float &x, const float &y, const float &r, const bool bRight = false) const;
|
||||
cv::Mat UnprojectStereo(int i);
|
||||
cv::Matx31f UnprojectStereo_(int i);
|
||||
|
||||
// Image
|
||||
bool IsInImage(const float &x, const float &y) const;
|
||||
|
@ -377,10 +161,6 @@ public:
|
|||
bool ProjectPointDistort(MapPoint* pMP, cv::Point2f &kp, float &u, float &v);
|
||||
bool ProjectPointUnDistort(MapPoint* pMP, cv::Point2f &kp, float &u, float &v);
|
||||
|
||||
void PreSave(set<KeyFrame*>& spKF,set<MapPoint*>& spMP, set<GeometricCamera*>& spCam);
|
||||
void PostLoad(map<long unsigned int, KeyFrame*>& mpKFid, map<long unsigned int, MapPoint*>& mpMPid, map<unsigned int, GeometricCamera*>& mpCamId);
|
||||
|
||||
|
||||
void SetORBVocabulary(ORBVocabulary* pORBVoc);
|
||||
void SetKeyFrameDatabase(KeyFrameDatabase* pKFDB);
|
||||
|
||||
|
@ -516,6 +296,9 @@ protected:
|
|||
cv::Mat Ow;
|
||||
cv::Mat Cw; // Stereo middel point. Only for visualization
|
||||
|
||||
cv::Matx44f Tcw_, Twc_, Tlr_;
|
||||
cv::Matx31f Ow_;
|
||||
|
||||
// IMU position
|
||||
cv::Mat Owb;
|
||||
|
||||
|
@ -527,8 +310,6 @@ protected:
|
|||
|
||||
// MapPoints associated to keypoints
|
||||
std::vector<MapPoint*> mvpMapPoints;
|
||||
// For save relation without pointer, this is necessary for save/load function
|
||||
std::vector<long long int> mvBackupMapPointsId;
|
||||
|
||||
// BoW
|
||||
KeyFrameDatabase* mpKeyFrameDB;
|
||||
|
@ -540,8 +321,6 @@ protected:
|
|||
std::map<KeyFrame*,int> mConnectedKeyFrameWeights;
|
||||
std::vector<KeyFrame*> mvpOrderedConnectedKeyFrames;
|
||||
std::vector<int> mvOrderedWeights;
|
||||
// For save relation without pointer, this is necessary for save/load function
|
||||
std::map<long unsigned int, int> mBackupConnectedKeyFrameIdWeights;
|
||||
|
||||
// Spanning Tree and Loop Edges
|
||||
bool mbFirstConnection;
|
||||
|
@ -549,11 +328,6 @@ protected:
|
|||
std::set<KeyFrame*> mspChildrens;
|
||||
std::set<KeyFrame*> mspLoopEdges;
|
||||
std::set<KeyFrame*> mspMergeEdges;
|
||||
// For save relation without pointer, this is necessary for save/load function
|
||||
long long int mBackupParentId;
|
||||
std::vector<long unsigned int> mvBackupChildrensId;
|
||||
std::vector<long unsigned int> mvBackupLoopEdgesId;
|
||||
std::vector<long unsigned int> mvBackupMergeEdgesId;
|
||||
|
||||
// Bad flags
|
||||
bool mbNotErase;
|
||||
|
@ -569,14 +343,6 @@ protected:
|
|||
std::mutex mMutexFeatures;
|
||||
std::mutex mMutexMap;
|
||||
|
||||
// Backup variables for inertial
|
||||
long long int mBackupPrevKFId;
|
||||
long long int mBackupNextKFId;
|
||||
IMU::Preintegrated mBackupImuPreintegrated;
|
||||
|
||||
// Backup for Cameras
|
||||
unsigned int mnBackupIdCamera, mnBackupIdCamera2;
|
||||
|
||||
public:
|
||||
GeometricCamera* mpCamera, *mpCamera2;
|
||||
|
||||
|
@ -601,7 +367,7 @@ public:
|
|||
cv::Mat GetRightRotation();
|
||||
cv::Mat GetRightTranslation();
|
||||
|
||||
cv::Mat imgLeft, imgRight; //TODO Backup??
|
||||
cv::Mat imgLeft, imgRight;
|
||||
|
||||
void PrintPointDistribution(){
|
||||
int left = 0, right = 0;
|
||||
|
|
|
@ -46,13 +46,6 @@ class Map;
|
|||
|
||||
class KeyFrameDatabase
|
||||
{
|
||||
friend class boost::serialization::access;
|
||||
|
||||
template<class Archive>
|
||||
void serialize(Archive& ar, const unsigned int version)
|
||||
{
|
||||
ar & mvBackupInvertedFileId;
|
||||
}
|
||||
|
||||
public:
|
||||
|
||||
|
@ -76,8 +69,6 @@ public:
|
|||
// Relocalization
|
||||
std::vector<KeyFrame*> DetectRelocalizationCandidates(Frame* F, Map* pMap);
|
||||
|
||||
void PreSave();
|
||||
void PostLoad(map<long unsigned int, KeyFrame*> mpKFid);
|
||||
void SetORBVocabulary(ORBVocabulary* pORBVoc);
|
||||
|
||||
protected:
|
||||
|
@ -88,9 +79,6 @@ protected:
|
|||
// Inverted file
|
||||
std::vector<list<KeyFrame*> > mvInvertedFile;
|
||||
|
||||
// For save relation without pointer, this is necessary for save/load function
|
||||
std::vector<list<long unsigned int> > mvBackupInvertedFileId;
|
||||
|
||||
// Mutex
|
||||
std::mutex mMutex;
|
||||
};
|
||||
|
|
|
@ -95,11 +95,6 @@ public:
|
|||
double mFirstTs;
|
||||
int mnMatchesInliers;
|
||||
|
||||
// For debugging (erase in normal mode)
|
||||
int mInitFr;
|
||||
int mIdxIteration;
|
||||
string strSequence;
|
||||
|
||||
bool mbNotBA1;
|
||||
bool mbNotBA2;
|
||||
bool mbBadImu;
|
||||
|
@ -109,6 +104,25 @@ public:
|
|||
// not consider far points (clouds)
|
||||
bool mbFarPoints;
|
||||
float mThFarPoints;
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
vector<double> vdKFInsert_ms;
|
||||
vector<double> vdMPCulling_ms;
|
||||
vector<double> vdMPCreation_ms;
|
||||
vector<double> vdLBA_ms;
|
||||
vector<double> vdKFCulling_ms;
|
||||
vector<double> vdLMTotal_ms;
|
||||
|
||||
|
||||
vector<double> vdLBASync_ms;
|
||||
vector<double> vdKFCullingSync_ms;
|
||||
vector<int> vnLBA_edges;
|
||||
vector<int> vnLBA_KFopt;
|
||||
vector<int> vnLBA_KFfixed;
|
||||
vector<int> vnLBA_MPs;
|
||||
int nLBA_exec;
|
||||
int nLBA_abort;
|
||||
#endif
|
||||
protected:
|
||||
|
||||
bool CheckNewKeyFrames();
|
||||
|
@ -120,8 +134,10 @@ protected:
|
|||
void KeyFrameCulling();
|
||||
|
||||
cv::Mat ComputeF12(KeyFrame* &pKF1, KeyFrame* &pKF2);
|
||||
cv::Matx33f ComputeF12_(KeyFrame* &pKF1, KeyFrame* &pKF2);
|
||||
|
||||
cv::Mat SkewSymmetricMatrix(const cv::Mat &v);
|
||||
cv::Matx33f SkewSymmetricMatrix_(const cv::Matx31f &v);
|
||||
|
||||
System *mpSystem;
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
#include "Atlas.h"
|
||||
#include "ORBVocabulary.h"
|
||||
#include "Tracking.h"
|
||||
#include "Config.h"
|
||||
|
||||
#include "KeyFrameDatabase.h"
|
||||
|
||||
|
@ -48,7 +49,7 @@ public:
|
|||
|
||||
typedef pair<set<KeyFrame*>,int> ConsistentGroup;
|
||||
typedef map<KeyFrame*,g2o::Sim3,std::less<KeyFrame*>,
|
||||
Eigen::aligned_allocator<std::pair<const KeyFrame*, g2o::Sim3> > > KeyFrameAndPose;
|
||||
Eigen::aligned_allocator<std::pair<KeyFrame* const, g2o::Sim3> > > KeyFrameAndPose;
|
||||
|
||||
public:
|
||||
|
||||
|
@ -86,6 +87,26 @@ public:
|
|||
|
||||
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
double timeDetectBoW;
|
||||
|
||||
std::vector<double> vTimeBoW_ms;
|
||||
std::vector<double> vTimeSE3_ms;
|
||||
std::vector<double> vTimePRTotal_ms;
|
||||
|
||||
std::vector<double> vTimeLoopFusion_ms;
|
||||
std::vector<double> vTimeLoopEssent_ms;
|
||||
std::vector<double> vTimeLoopTotal_ms;
|
||||
|
||||
std::vector<double> vTimeMergeFusion_ms;
|
||||
std::vector<double> vTimeMergeBA_ms;
|
||||
std::vector<double> vTimeMergeTotal_ms;
|
||||
|
||||
std::vector<double> vTimeFullGBA_ms;
|
||||
std::vector<double> vTimeMapUpdate_ms;
|
||||
std::vector<double> vTimeGBATotal_ms;
|
||||
#endif
|
||||
|
||||
protected:
|
||||
|
||||
bool CheckNewKeyFrames();
|
||||
|
@ -112,9 +133,6 @@ protected:
|
|||
void MergeLocal();
|
||||
void MergeLocal2();
|
||||
|
||||
void CheckObservations(set<KeyFrame*> &spKFsMap1, set<KeyFrame*> &spKFsMap2);
|
||||
void printReprojectionError(set<KeyFrame*> &spLocalWindowKFs, KeyFrame* mpCurrentKF, string &name);
|
||||
|
||||
void ResetIfRequested();
|
||||
bool mbResetRequested;
|
||||
bool mbResetActiveMapRequested;
|
||||
|
|
|
@ -65,8 +65,6 @@ namespace ORB_SLAM3{
|
|||
void SetRansacParameters(double probability = 0.99, int minInliers = 8, int maxIterations = 300, int minSet = 6, float epsilon = 0.4,
|
||||
float th2 = 5.991);
|
||||
|
||||
//Find metod is necessary?
|
||||
|
||||
cv::Mat iterate(int nIterations, bool &bNoMore, vector<bool> &vbInliers, int &nInliers);
|
||||
|
||||
//Type definitions needed by the original code
|
||||
|
|
|
@ -37,39 +37,10 @@ class MapPoint;
|
|||
class KeyFrame;
|
||||
class Atlas;
|
||||
class KeyFrameDatabase;
|
||||
class GeometricCamera;
|
||||
|
||||
class Map
|
||||
{
|
||||
friend class boost::serialization::access;
|
||||
|
||||
template<class Archive>
|
||||
void serialize(Archive &ar, const unsigned int version)
|
||||
{
|
||||
ar & mnId;
|
||||
ar & mnInitKFid;
|
||||
ar & mnMaxKFid;
|
||||
ar & mnBigChangeIdx;
|
||||
// Set of KeyFrames and MapPoints, in this version the set serializator is not working
|
||||
//ar & mspKeyFrames;
|
||||
//ar & mspMapPoints;
|
||||
|
||||
ar & mvpBackupKeyFrames;
|
||||
ar & mvpBackupMapPoints;
|
||||
|
||||
ar & mvBackupKeyFrameOriginsId;
|
||||
|
||||
ar & mnBackupKFinitialID;
|
||||
ar & mnBackupKFlowerID;
|
||||
|
||||
ar & mbImuInitialized;
|
||||
ar & mbIsInertial;
|
||||
ar & mbIMU_BA1;
|
||||
ar & mbIMU_BA2;
|
||||
|
||||
ar & mnInitKFid;
|
||||
ar & mnMaxKFid;
|
||||
ar & mnLastLoopKFid;
|
||||
}
|
||||
|
||||
public:
|
||||
Map();
|
||||
|
@ -134,11 +105,6 @@ public:
|
|||
|
||||
unsigned int GetLowerKFID();
|
||||
|
||||
void PreSave(std::set<GeometricCamera*> &spCams);
|
||||
void PostLoad(KeyFrameDatabase* pKFDB, ORBVocabulary* pORBVoc, map<long unsigned int, KeyFrame*>& mpKeyFrameId, map<unsigned int, GeometricCamera*> &mpCams);
|
||||
|
||||
void printReprojectionError(list<KeyFrame*> &lpLocalWindowKFs, KeyFrame* mpCurrentKF, string &name, string &name_folder);
|
||||
|
||||
vector<KeyFrame*> mvpKeyFrameOrigins;
|
||||
vector<unsigned long int> mvBackupKeyFrameOriginsId;
|
||||
KeyFrame* mpFirstRegionKF;
|
||||
|
@ -162,15 +128,9 @@ protected:
|
|||
std::set<MapPoint*> mspMapPoints;
|
||||
std::set<KeyFrame*> mspKeyFrames;
|
||||
|
||||
std::vector<MapPoint*> mvpBackupMapPoints;
|
||||
std::vector<KeyFrame*> mvpBackupKeyFrames;
|
||||
|
||||
KeyFrame* mpKFinitial;
|
||||
KeyFrame* mpKFlowerID;
|
||||
|
||||
unsigned long int mnBackupKFinitialID;
|
||||
unsigned long int mnBackupKFlowerID;
|
||||
|
||||
std::vector<MapPoint*> mvpReferenceMapPoints;
|
||||
|
||||
bool mbImuInitialized;
|
||||
|
@ -193,7 +153,7 @@ protected:
|
|||
bool mHasTumbnail;
|
||||
bool mbBad = false;
|
||||
|
||||
bool mbIsInertial; //标记是否有imu
|
||||
bool mbIsInertial;
|
||||
bool mbIMU_BA1;
|
||||
bool mbIMU_BA2;
|
||||
|
||||
|
|
|
@ -40,89 +40,6 @@ class Frame;
|
|||
|
||||
class MapPoint
|
||||
{
|
||||
template<class Archive>
|
||||
void serializeMatrix(Archive &ar, cv::Mat& mat, const unsigned int version)
|
||||
{
|
||||
int cols, rows, type;
|
||||
bool continuous;
|
||||
|
||||
if (Archive::is_saving::value) {
|
||||
cols = mat.cols; rows = mat.rows; type = mat.type();
|
||||
continuous = mat.isContinuous();
|
||||
}
|
||||
|
||||
ar & cols & rows & type & continuous;
|
||||
if (Archive::is_loading::value)
|
||||
mat.create(rows, cols, type);
|
||||
|
||||
if (continuous) {
|
||||
const unsigned int data_size = rows * cols * mat.elemSize();
|
||||
ar & boost::serialization::make_array(mat.ptr(), data_size);
|
||||
} else {
|
||||
const unsigned int row_size = cols*mat.elemSize();
|
||||
for (int i = 0; i < rows; i++) {
|
||||
ar & boost::serialization::make_array(mat.ptr(i), row_size);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
friend class boost::serialization::access;
|
||||
template<class Archive>
|
||||
void serialize(Archive & ar, const unsigned int version)
|
||||
{
|
||||
ar & mnId;
|
||||
ar & mnFirstKFid;
|
||||
ar & mnFirstFrame;
|
||||
ar & nObs;
|
||||
// Variables used by the tracking
|
||||
ar & mTrackProjX;
|
||||
ar & mTrackProjY;
|
||||
ar & mTrackDepth;
|
||||
ar & mTrackDepthR;
|
||||
ar & mTrackProjXR;
|
||||
ar & mTrackProjYR;
|
||||
ar & mbTrackInView;
|
||||
ar & mbTrackInViewR;
|
||||
ar & mnTrackScaleLevel;
|
||||
ar & mnTrackScaleLevelR;
|
||||
ar & mTrackViewCos;
|
||||
ar & mTrackViewCosR;
|
||||
ar & mnTrackReferenceForFrame;
|
||||
ar & mnLastFrameSeen;
|
||||
|
||||
// Variables used by local mapping
|
||||
ar & mnBALocalForKF;
|
||||
ar & mnFuseCandidateForKF;
|
||||
|
||||
// Variables used by loop closing and merging
|
||||
ar & mnLoopPointForKF;
|
||||
ar & mnCorrectedByKF;
|
||||
ar & mnCorrectedReference;
|
||||
serializeMatrix(ar,mPosGBA,version);
|
||||
ar & mnBAGlobalForKF;
|
||||
ar & mnBALocalForMerge;
|
||||
serializeMatrix(ar,mPosMerge,version);
|
||||
serializeMatrix(ar,mNormalVectorMerge,version);
|
||||
|
||||
// Protected variables
|
||||
serializeMatrix(ar,mWorldPos,version);
|
||||
//ar & BOOST_SERIALIZATION_NVP(mBackupObservationsId);
|
||||
ar & mBackupObservationsId1;
|
||||
ar & mBackupObservationsId2;
|
||||
serializeMatrix(ar,mNormalVector,version);
|
||||
serializeMatrix(ar,mDescriptor,version);
|
||||
ar & mBackupRefKFId;
|
||||
ar & mnVisible;
|
||||
ar & mnFound;
|
||||
|
||||
ar & mbBad;
|
||||
ar & mBackupReplacedId;
|
||||
|
||||
ar & mfMinDistance;
|
||||
ar & mfMaxDistance;
|
||||
|
||||
}
|
||||
|
||||
|
||||
public:
|
||||
MapPoint();
|
||||
|
@ -136,6 +53,11 @@ public:
|
|||
cv::Mat GetWorldPos();
|
||||
|
||||
cv::Mat GetNormal();
|
||||
|
||||
cv::Matx31f GetWorldPos2();
|
||||
|
||||
cv::Matx31f GetNormal2();
|
||||
|
||||
KeyFrame* GetReferenceKeyFrame();
|
||||
|
||||
std::map<KeyFrame*,std::tuple<int,int>> GetObservations();
|
||||
|
@ -175,11 +97,6 @@ public:
|
|||
Map* GetMap();
|
||||
void UpdateMap(Map* pMap);
|
||||
|
||||
void PrintObservations();
|
||||
|
||||
void PreSave(set<KeyFrame*>& spKF,set<MapPoint*>& spMP);
|
||||
void PostLoad(map<long unsigned int, KeyFrame*>& mpKFid, map<long unsigned int, MapPoint*>& mpMPid);
|
||||
|
||||
public:
|
||||
long unsigned int mnId;
|
||||
static long unsigned int nNextId;
|
||||
|
@ -231,22 +148,20 @@ protected:
|
|||
|
||||
// Position in absolute coordinates
|
||||
cv::Mat mWorldPos;
|
||||
cv::Matx31f mWorldPosx;
|
||||
|
||||
// Keyframes observing the point and associated index in keyframe
|
||||
std::map<KeyFrame*,std::tuple<int,int> > mObservations;
|
||||
// For save relation without pointer, this is necessary for save/load function
|
||||
std::map<long unsigned int, int> mBackupObservationsId1;
|
||||
std::map<long unsigned int, int> mBackupObservationsId2;
|
||||
|
||||
// Mean viewing direction
|
||||
cv::Mat mNormalVector;
|
||||
cv::Matx31f mNormalVectorx;
|
||||
|
||||
// Best descriptor to fast matching
|
||||
cv::Mat mDescriptor;
|
||||
|
||||
// Reference KeyFrame
|
||||
KeyFrame* mpRefKF;
|
||||
long unsigned int mBackupRefKFId;
|
||||
|
||||
// Tracking counters
|
||||
int mnVisible;
|
||||
|
@ -255,8 +170,6 @@ protected:
|
|||
// Bad flag (we do not currently erase MapPoint from memory)
|
||||
bool mbBad;
|
||||
MapPoint* mpReplaced;
|
||||
// For save relation without pointer, this is necessary for save/load function
|
||||
long long int mBackupReplacedId;
|
||||
|
||||
// Scale invariance distances
|
||||
float mfMinDistance;
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
|
||||
#include <vector>
|
||||
#include <list>
|
||||
#include <opencv/cv.h>
|
||||
#include <opencv2/opencv.hpp>
|
||||
|
||||
|
||||
namespace ORB_SLAM3
|
||||
|
|
|
@ -74,6 +74,9 @@ public:
|
|||
int SearchForTriangulation(KeyFrame *pKF1, KeyFrame* pKF2, cv::Mat F12,
|
||||
std::vector<pair<size_t, size_t> > &vMatchedPairs, const bool bOnlyStereo, const bool bCoarse = false);
|
||||
|
||||
int SearchForTriangulation_(KeyFrame *pKF1, KeyFrame* pKF2, cv::Matx33f F12,
|
||||
std::vector<pair<size_t, size_t> > &vMatchedPairs, const bool bOnlyStereo, const bool bCoarse = false);
|
||||
|
||||
int SearchForTriangulation(KeyFrame *pKF1, KeyFrame *pKF2, cv::Mat F12,
|
||||
vector<pair<size_t, size_t> > &vMatchedPairs, const bool bOnlyStereo, vector<cv::Mat> &vMatchedPoints);
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ public:
|
|||
void static FullInertialBA(Map *pMap, int its, const bool bFixLocal=false, const unsigned long nLoopKF=0, bool *pbStopFlag=NULL, bool bInit=false, float priorG = 1e2, float priorA=1e6, Eigen::VectorXd *vSingVal = NULL, bool *bHess=NULL);
|
||||
|
||||
void static LocalBundleAdjustment(KeyFrame* pKF, bool *pbStopFlag, vector<KeyFrame*> &vpNonEnoughOptKFs);
|
||||
void static LocalBundleAdjustment(KeyFrame* pKF, bool *pbStopFlag, Map *pMap, int& num_fixedKF);
|
||||
void static LocalBundleAdjustment(KeyFrame* pKF, bool *pbStopFlag, Map *pMap, int& num_fixedKF, int& num_OptKF, int& num_MPs, int& num_edges);
|
||||
|
||||
void static MergeBundleAdjustmentVisual(KeyFrame* pCurrentKF, vector<KeyFrame*> vpWeldingKFs, vector<KeyFrame*> vpFixedKFs, bool *pbStopFlag);
|
||||
|
||||
|
@ -96,7 +96,7 @@ public:
|
|||
|
||||
// For inertial systems
|
||||
|
||||
void static LocalInertialBA(KeyFrame* pKF, bool *pbStopFlag, Map *pMap, bool bLarge = false, bool bRecInit = false);
|
||||
void static LocalInertialBA(KeyFrame* pKF, bool *pbStopFlag, Map *pMap, int& num_fixedKF, int& num_OptKF, int& num_MPs, int& num_edges, bool bLarge = false, bool bRecInit = false);
|
||||
|
||||
void static MergeInertialBA(KeyFrame* pCurrKF, KeyFrame* pMergeKF, bool *pbStopFlag, Map *pMap, LoopClosing::KeyFrameAndPose &corrPoses);
|
||||
|
||||
|
|
|
@ -20,8 +20,6 @@
|
|||
#ifndef SYSTEM_H
|
||||
#define SYSTEM_H
|
||||
|
||||
//#define SAVE_TIMES
|
||||
|
||||
#include <unistd.h>
|
||||
#include<stdio.h>
|
||||
#include<stdlib.h>
|
||||
|
@ -39,6 +37,7 @@
|
|||
#include "ORBVocabulary.h"
|
||||
#include "Viewer.h"
|
||||
#include "ImuTypes.h"
|
||||
#include "Config.h"
|
||||
|
||||
|
||||
namespace ORB_SLAM3
|
||||
|
@ -162,9 +161,6 @@ public:
|
|||
void SaveTrajectoryEuRoC(const string &filename);
|
||||
void SaveKeyFrameTrajectoryEuRoC(const string &filename);
|
||||
|
||||
// Save data used for initialization debug
|
||||
void SaveDebugData(const int &iniIdx);
|
||||
|
||||
// Save camera trajectory in the KITTI dataset format.
|
||||
// Only for stereo and RGB-D. This method does not work for monocular.
|
||||
// Call first Shutdown()
|
||||
|
@ -188,14 +184,14 @@ public:
|
|||
|
||||
void ChangeDataset();
|
||||
|
||||
//void SaveAtlas(int type);
|
||||
#ifdef REGISTER_TIMES
|
||||
void InsertRectTime(double& time);
|
||||
|
||||
void InsertTrackTime(double& time);
|
||||
#endif
|
||||
|
||||
private:
|
||||
|
||||
//bool LoadAtlas(string filename, int type);
|
||||
|
||||
//string CalculateCheckSum(string filename, int type);
|
||||
|
||||
// Input sensor
|
||||
eSensor mSensor;
|
||||
|
||||
|
@ -205,8 +201,7 @@ private:
|
|||
// KeyFrame database for place recognition (relocalization and loop detection).
|
||||
KeyFrameDatabase* mpKeyFrameDatabase;
|
||||
|
||||
// Map structure that stores the pointers to all KeyFrames and MapPoints.
|
||||
//Map* mpMap;
|
||||
// Atlas structure that stores the pointers to all KeyFrames and MapPoints.
|
||||
Atlas* mpAtlas;
|
||||
|
||||
// Tracker. It receives a frame and computes the associated camera pose.
|
||||
|
|
|
@ -159,14 +159,30 @@ public:
|
|||
|
||||
vector<MapPoint*> GetLocalMapMPS();
|
||||
|
||||
|
||||
//TEST--
|
||||
bool mbNeedRectify;
|
||||
//cv::Mat M1l, M2l;
|
||||
//cv::Mat M1r, M2r;
|
||||
|
||||
bool mbWriteStats;
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
void LocalMapStats2File();
|
||||
void TrackStats2File();
|
||||
void PrintTimeStats();
|
||||
|
||||
vector<double> vdRectStereo_ms;
|
||||
vector<double> vdORBExtract_ms;
|
||||
vector<double> vdStereoMatch_ms;
|
||||
vector<double> vdIMUInteg_ms;
|
||||
vector<double> vdPosePred_ms;
|
||||
vector<double> vdLMTrack_ms;
|
||||
vector<double> vdNewKF_ms;
|
||||
vector<double> vdTrackTotal_ms;
|
||||
|
||||
vector<double> vdUpdatedLM_ms;
|
||||
vector<double> vdSearchLP_ms;
|
||||
vector<double> vdPoseOpt_ms;
|
||||
#endif
|
||||
|
||||
vector<int> vnKeyFramesLM;
|
||||
vector<int> vnMapPointsLM;
|
||||
|
||||
protected:
|
||||
|
||||
// Main tracking function. It is independent of the input sensor.
|
||||
|
@ -204,7 +220,6 @@ protected:
|
|||
void PreintegrateIMU();
|
||||
|
||||
// Reset IMU biases and compute frame velocity
|
||||
void ResetFrameIMU();
|
||||
void ComputeGyroBias(const vector<Frame*> &vpFs, float &bwx, float &bwy, float &bwz);
|
||||
void ComputeVelocitiesAccBias(const vector<Frame*> &vpFs, float &bax, float &bay, float &baz);
|
||||
|
||||
|
@ -296,6 +311,7 @@ protected:
|
|||
unsigned int mnLastRelocFrameId;
|
||||
double mTimeStampLost;
|
||||
double time_recently_lost;
|
||||
double time_recently_lost_visual;
|
||||
|
||||
unsigned int mnFirstFrameId;
|
||||
unsigned int mnInitialFrameId;
|
||||
|
|
75
src/Atlas.cc
75
src/Atlas.cc
|
@ -260,81 +260,6 @@ bool Atlas::isImuInitialized()
|
|||
return mpCurrentMap->isImuInitialized();
|
||||
}
|
||||
|
||||
void Atlas::PreSave()
|
||||
{
|
||||
if(mpCurrentMap){
|
||||
if(!mspMaps.empty() && mnLastInitKFidMap < mpCurrentMap->GetMaxKFid())
|
||||
mnLastInitKFidMap = mpCurrentMap->GetMaxKFid()+1; //The init KF is the next of current maximum
|
||||
}
|
||||
|
||||
struct compFunctor
|
||||
{
|
||||
inline bool operator()(Map* elem1 ,Map* elem2)
|
||||
{
|
||||
return elem1->GetId() < elem2->GetId();
|
||||
}
|
||||
};
|
||||
std::copy(mspMaps.begin(), mspMaps.end(), std::back_inserter(mvpBackupMaps));
|
||||
sort(mvpBackupMaps.begin(), mvpBackupMaps.end(), compFunctor());
|
||||
|
||||
std::set<GeometricCamera*> spCams(mvpCameras.begin(), mvpCameras.end());
|
||||
cout << "There are " << spCams.size() << " cameras in the atlas" << endl;
|
||||
for(Map* pMi : mvpBackupMaps)
|
||||
{
|
||||
cout << "Pre-save of map " << pMi->GetId() << endl;
|
||||
pMi->PreSave(spCams);
|
||||
}
|
||||
cout << "Maps stored" << endl;
|
||||
for(GeometricCamera* pCam : mvpCameras)
|
||||
{
|
||||
cout << "Pre-save of camera " << pCam->GetId() << endl;
|
||||
if(pCam->GetType() == pCam->CAM_PINHOLE)
|
||||
{
|
||||
mvpBackupCamPin.push_back((Pinhole*) pCam);
|
||||
}
|
||||
else if(pCam->GetType() == pCam->CAM_FISHEYE)
|
||||
{
|
||||
mvpBackupCamKan.push_back((KannalaBrandt8*) pCam);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void Atlas::PostLoad()
|
||||
{
|
||||
mvpCameras.clear();
|
||||
map<unsigned int,GeometricCamera*> mpCams;
|
||||
for(Pinhole* pCam : mvpBackupCamPin)
|
||||
{
|
||||
//mvpCameras.push_back((GeometricCamera*)pCam);
|
||||
mvpCameras.push_back(pCam);
|
||||
mpCams[pCam->GetId()] = pCam;
|
||||
}
|
||||
for(KannalaBrandt8* pCam : mvpBackupCamKan)
|
||||
{
|
||||
//mvpCameras.push_back((GeometricCamera*)pCam);
|
||||
mvpCameras.push_back(pCam);
|
||||
mpCams[pCam->GetId()] = pCam;
|
||||
}
|
||||
|
||||
mspMaps.clear();
|
||||
unsigned long int numKF = 0, numMP = 0;
|
||||
map<long unsigned int, KeyFrame*> mpAllKeyFrameId;
|
||||
for(Map* pMi : mvpBackupMaps)
|
||||
{
|
||||
cout << "Map id:" << pMi->GetId() << endl;
|
||||
mspMaps.insert(pMi);
|
||||
map<long unsigned int, KeyFrame*> mpKeyFrameId;
|
||||
pMi->PostLoad(mpKeyFrameDB, mpORBVocabulary, mpKeyFrameId, mpCams);
|
||||
mpAllKeyFrameId.insert(mpKeyFrameId.begin(), mpKeyFrameId.end());
|
||||
numKF += pMi->GetAllKeyFrames().size();
|
||||
numMP += pMi->GetAllMapPoints().size();
|
||||
}
|
||||
|
||||
cout << "Number KF:" << numKF << "; number MP:" << numMP << endl;
|
||||
mvpBackupMaps.clear();
|
||||
}
|
||||
|
||||
void Atlas::SetKeyFrameDababase(KeyFrameDatabase* pKFDB)
|
||||
{
|
||||
mpKeyFrameDB = pKFDB;
|
||||
|
|
|
@ -20,10 +20,7 @@
|
|||
|
||||
#include <boost/serialization/export.hpp>
|
||||
|
||||
//BOOST_CLASS_EXPORT_IMPLEMENT(ORB_SLAM3::KannalaBrandt8)
|
||||
|
||||
namespace ORB_SLAM3 {
|
||||
//BOOST_CLASS_EXPORT_GUID(KannalaBrandt8, "KannalaBrandt8")
|
||||
|
||||
cv::Point2f KannalaBrandt8::project(const cv::Point3f &p3D) {
|
||||
const float x2_plus_y2 = p3D.x * p3D.x + p3D.y * p3D.y;
|
||||
|
@ -43,6 +40,10 @@ namespace ORB_SLAM3 {
|
|||
|
||||
}
|
||||
|
||||
cv::Point2f KannalaBrandt8::project(const cv::Matx31f &m3D) {
|
||||
return this->project(cv::Point3f(m3D(0),m3D(1),m3D(2)));
|
||||
}
|
||||
|
||||
cv::Point2f KannalaBrandt8::project(const cv::Mat &m3D) {
|
||||
const float* p3D = m3D.ptr<float>();
|
||||
|
||||
|
@ -67,14 +68,6 @@ namespace ORB_SLAM3 {
|
|||
res[1] = mvParameters[1] * r * sin(psi) + mvParameters[3];
|
||||
|
||||
return res;
|
||||
|
||||
/*cv::Point2f cvres = this->project(cv::Point3f(v3D[0],v3D[1],v3D[2]));
|
||||
|
||||
Eigen::Vector2d res;
|
||||
res[0] = cvres.x;
|
||||
res[1] = cvres.y;
|
||||
|
||||
return res;*/
|
||||
}
|
||||
|
||||
cv::Mat KannalaBrandt8::projectMat(const cv::Point3f &p3D) {
|
||||
|
@ -85,12 +78,6 @@ namespace ORB_SLAM3 {
|
|||
|
||||
float KannalaBrandt8::uncertainty2(const Eigen::Matrix<double,2,1> &p2D)
|
||||
{
|
||||
/*Eigen::Matrix<double,2,1> c;
|
||||
c << mvParameters[2], mvParameters[3];
|
||||
if ((p2D-c).squaredNorm()>57600) // 240*240 (256)
|
||||
return 100.f;
|
||||
else
|
||||
return 1.0f;*/
|
||||
return 1.f;
|
||||
}
|
||||
|
||||
|
@ -100,6 +87,12 @@ namespace ORB_SLAM3 {
|
|||
return ret.clone();
|
||||
}
|
||||
|
||||
cv::Matx31f KannalaBrandt8::unprojectMat_(const cv::Point2f &p2D) {
|
||||
cv::Point3f ray = this->unproject(p2D);
|
||||
cv::Matx31f r{ray.x, ray.y, ray.z};
|
||||
return r;
|
||||
}
|
||||
|
||||
cv::Point3f KannalaBrandt8::unproject(const cv::Point2f &p2D) {
|
||||
//Use Newthon method to solve for theta with good precision (err ~ e-6)
|
||||
cv::Point2f pw((p2D.x - mvParameters[2]) / mvParameters[0], (p2D.y - mvParameters[3]) / mvParameters[1]);
|
||||
|
@ -232,11 +225,22 @@ namespace ORB_SLAM3 {
|
|||
return K;
|
||||
}
|
||||
|
||||
cv::Matx33f KannalaBrandt8::toK_() {
|
||||
cv::Matx33f K{mvParameters[0], 0.f, mvParameters[2], 0.f, mvParameters[1], mvParameters[3], 0.f, 0.f, 1.f};
|
||||
|
||||
return K;
|
||||
}
|
||||
|
||||
bool KannalaBrandt8::epipolarConstrain(GeometricCamera* pCamera2, const cv::KeyPoint &kp1, const cv::KeyPoint &kp2, const cv::Mat &R12, const cv::Mat &t12, const float sigmaLevel, const float unc) {
|
||||
cv::Mat p3D;
|
||||
return this->TriangulateMatches(pCamera2,kp1,kp2,R12,t12,sigmaLevel,unc,p3D) > 0.0001f;
|
||||
}
|
||||
|
||||
bool KannalaBrandt8::epipolarConstrain_(GeometricCamera* pCamera2, const cv::KeyPoint &kp1, const cv::KeyPoint &kp2, const cv::Matx33f &R12, const cv::Matx31f &t12, const float sigmaLevel, const float unc) {
|
||||
cv::Matx31f p3D;
|
||||
return this->TriangulateMatches_(pCamera2,kp1,kp2,R12,t12,sigmaLevel,unc,p3D) > 0.0001f;
|
||||
}
|
||||
|
||||
bool KannalaBrandt8::matchAndtriangulate(const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, GeometricCamera* pOther,
|
||||
cv::Mat& Tcw1, cv::Mat& Tcw2,
|
||||
const float sigmaLevel1, const float sigmaLevel2,
|
||||
|
@ -357,8 +361,8 @@ namespace ORB_SLAM3 {
|
|||
|
||||
cv::Mat x3D;
|
||||
cv::Mat Tcw1 = (cv::Mat_<float>(3,4) << 1.f,0.f,0.f,0.f,
|
||||
0.f,1.f,0.f,0.f,
|
||||
0.f,0.f,1.f,0.f);
|
||||
0.f,1.f,0.f,0.f,
|
||||
0.f,0.f,1.f,0.f);
|
||||
cv::Mat Tcw2;
|
||||
cv::Mat R21 = R12.t();
|
||||
cv::Mat t21 = -R21*t12;
|
||||
|
@ -402,6 +406,79 @@ namespace ORB_SLAM3 {
|
|||
return z1;
|
||||
}
|
||||
|
||||
float KannalaBrandt8::TriangulateMatches_(GeometricCamera *pCamera2, const cv::KeyPoint &kp1, const cv::KeyPoint &kp2, const cv::Matx33f &R12, const cv::Matx31f &t12, const float sigmaLevel, const float unc, cv::Matx31f& p3D) {
|
||||
cv::Matx31f r1 = this->unprojectMat_(kp1.pt);
|
||||
cv::Matx31f r2 = pCamera2->unprojectMat_(kp2.pt);
|
||||
|
||||
//Check parallax
|
||||
cv::Matx31f r21 = R12*r2;
|
||||
|
||||
const float cosParallaxRays = r1.dot(r21)/(cv::norm(r1)*cv::norm(r21));
|
||||
|
||||
if(cosParallaxRays > 0.9998){
|
||||
return -1;
|
||||
}
|
||||
|
||||
//Parallax is good, so we try to triangulate
|
||||
cv::Point2f p11,p22;
|
||||
|
||||
p11.x = r1(0);
|
||||
p11.y = r1(1);
|
||||
|
||||
p22.x = r2(0);
|
||||
p22.y = r2(1);
|
||||
|
||||
cv::Matx31f x3D;
|
||||
cv::Matx44f Tcw1{1.f,0.f,0.f,0.f,
|
||||
0.f,1.f,0.f,0.f,
|
||||
0.f,0.f,1.f,0.f};
|
||||
|
||||
cv::Matx33f R21 = R12.t();
|
||||
cv::Matx31f t21 = -R21*t12;
|
||||
|
||||
cv::Matx44f Tcw2{R21(0,0),R21(0,1),R21(0,2),t21(0),
|
||||
R21(1,0),R21(1,1),R21(1,2),t21(1),
|
||||
R21(2,0),R21(2,1),R21(2,2),t21(2),
|
||||
0.f,0.f,0.f,1.f};
|
||||
|
||||
Triangulate_(p11,p22,Tcw1,Tcw2,x3D);
|
||||
cv::Matx13f x3Dt = x3D.t();
|
||||
|
||||
float z1 = x3D(2);
|
||||
if(z1 <= 0){
|
||||
return -1;
|
||||
}
|
||||
|
||||
float z2 = R21.row(2).dot(x3Dt)+t21(2);
|
||||
if(z2<=0){
|
||||
return -1;
|
||||
}
|
||||
|
||||
//Check reprojection error
|
||||
cv::Point2f uv1 = this->project(x3D);
|
||||
|
||||
float errX1 = uv1.x - kp1.pt.x;
|
||||
float errY1 = uv1.y - kp1.pt.y;
|
||||
|
||||
if((errX1*errX1+errY1*errY1)>5.991 * sigmaLevel){ //Reprojection error is high
|
||||
return -1;
|
||||
}
|
||||
|
||||
cv::Matx31f x3D2 = R21 * x3D + t21;
|
||||
cv::Point2f uv2 = pCamera2->project(x3D2);
|
||||
|
||||
float errX2 = uv2.x - kp2.pt.x;
|
||||
float errY2 = uv2.y - kp2.pt.y;
|
||||
|
||||
if((errX2*errX2+errY2*errY2)>5.991 * unc){ //Reprojection error is high
|
||||
return -1;
|
||||
}
|
||||
|
||||
p3D = x3D;
|
||||
|
||||
return z1;
|
||||
}
|
||||
|
||||
std::ostream & operator<<(std::ostream &os, const KannalaBrandt8 &kb) {
|
||||
os << kb.mvParameters[0] << " " << kb.mvParameters[1] << " " << kb.mvParameters[2] << " " << kb.mvParameters[3] << " "
|
||||
<< kb.mvParameters[4] << " " << kb.mvParameters[5] << " " << kb.mvParameters[6] << " " << kb.mvParameters[7];
|
||||
|
@ -433,4 +510,27 @@ namespace ORB_SLAM3 {
|
|||
x3D = vt.row(3).t();
|
||||
x3D = x3D.rowRange(0,3)/x3D.at<float>(3);
|
||||
}
|
||||
|
||||
void KannalaBrandt8::Triangulate_(const cv::Point2f &p1, const cv::Point2f &p2, const cv::Matx44f &Tcw1, const cv::Matx44f &Tcw2, cv::Matx31f &x3D)
|
||||
{
|
||||
cv::Matx14f A0, A1, A2, A3;
|
||||
|
||||
|
||||
A0 = p1.x * Tcw1.row(2) - Tcw1.row(0);
|
||||
A1 = p1.y * Tcw1.row(2) - Tcw1.row(1);
|
||||
A2 = p2.x * Tcw2.row(2) - Tcw2.row(0);
|
||||
A3 = p2.y * Tcw2.row(2) - Tcw2.row(1);
|
||||
cv::Matx44f A{A0(0), A0(1), A0(2), A0(3),
|
||||
A1(0), A1(1), A1(2), A1(3),
|
||||
A2(0), A2(1), A2(2), A2(3),
|
||||
A3(0), A3(1), A3(2), A3(3)};
|
||||
|
||||
// cv::Mat u,w,vt;
|
||||
cv::Matx44f u, vt;
|
||||
cv::Matx41f w;
|
||||
|
||||
cv::SVD::compute(A,w,u,vt,cv::SVD::MODIFY_A| cv::SVD::FULL_UV);
|
||||
cv::Matx41f x3D_h = vt.row(3).t();
|
||||
x3D = x3D_h.get_minor<3,1>(0,0) / x3D_h(3);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,10 +20,7 @@
|
|||
|
||||
#include <boost/serialization/export.hpp>
|
||||
|
||||
//BOOST_CLASS_EXPORT_IMPLEMENT(ORB_SLAM3::Pinhole)
|
||||
|
||||
namespace ORB_SLAM3 {
|
||||
//BOOST_CLASS_EXPORT_GUID(Pinhole, "Pinhole")
|
||||
|
||||
long unsigned int GeometricCamera::nNextId=0;
|
||||
|
||||
|
@ -32,6 +29,10 @@ namespace ORB_SLAM3 {
|
|||
mvParameters[1] * p3D.y / p3D.z + mvParameters[3]);
|
||||
}
|
||||
|
||||
cv::Point2f Pinhole::project(const cv::Matx31f &m3D) {
|
||||
return this->project(cv::Point3f(m3D(0),m3D(1),m3D(2)));
|
||||
}
|
||||
|
||||
cv::Point2f Pinhole::project(const cv::Mat &m3D) {
|
||||
const float* p3D = m3D.ptr<float>();
|
||||
|
||||
|
@ -66,6 +67,12 @@ namespace ORB_SLAM3 {
|
|||
return (cv::Mat_<float>(3,1) << ray.x, ray.y, ray.z);
|
||||
}
|
||||
|
||||
cv::Matx31f Pinhole::unprojectMat_(const cv::Point2f &p2D) {
|
||||
cv::Point3f ray = this->unproject(p2D);
|
||||
cv::Matx31f r{ray.x, ray.y, ray.z};
|
||||
return r;
|
||||
}
|
||||
|
||||
cv::Mat Pinhole::projectJac(const cv::Point3f &p3D) {
|
||||
cv::Mat Jac(2, 3, CV_32F);
|
||||
Jac.at<float>(0, 0) = mvParameters[0] / p3D.z;
|
||||
|
@ -119,6 +126,12 @@ namespace ORB_SLAM3 {
|
|||
return K;
|
||||
}
|
||||
|
||||
cv::Matx33f Pinhole::toK_() {
|
||||
cv::Matx33f K{mvParameters[0], 0.f, mvParameters[2], 0.f, mvParameters[1], mvParameters[3], 0.f, 0.f, 1.f};
|
||||
|
||||
return K;
|
||||
}
|
||||
|
||||
bool Pinhole::epipolarConstrain(GeometricCamera* pCamera2, const cv::KeyPoint &kp1, const cv::KeyPoint &kp2, const cv::Mat &R12, const cv::Mat &t12, const float sigmaLevel, const float unc) {
|
||||
//Compute Fundamental Matrix
|
||||
cv::Mat t12x = SkewSymmetricMatrix(t12);
|
||||
|
@ -143,6 +156,30 @@ namespace ORB_SLAM3 {
|
|||
return dsqr<3.84*unc;
|
||||
}
|
||||
|
||||
bool Pinhole::epipolarConstrain_(GeometricCamera *pCamera2, const cv::KeyPoint &kp1, const cv::KeyPoint &kp2, const cv::Matx33f &R12, const cv::Matx31f &t12, const float sigmaLevel, const float unc) {
|
||||
//Compute Fundamental Matrix
|
||||
auto t12x = SkewSymmetricMatrix_(t12);
|
||||
auto K1 = this->toK_();
|
||||
auto K2 = pCamera2->toK_();
|
||||
cv::Matx33f F12 = K1.t().inv()*t12x*R12*K2.inv();
|
||||
|
||||
// Epipolar line in second image l = x1'F12 = [a b c]
|
||||
const float a = kp1.pt.x*F12(0,0)+kp1.pt.y*F12(1,0)+F12(2,0);
|
||||
const float b = kp1.pt.x*F12(0,1)+kp1.pt.y*F12(1,1)+F12(2,1);
|
||||
const float c = kp1.pt.x*F12(0,2)+kp1.pt.y*F12(1,2)+F12(2,2);
|
||||
|
||||
const float num = a*kp2.pt.x+b*kp2.pt.y+c;
|
||||
|
||||
const float den = a*a+b*b;
|
||||
|
||||
if(den==0)
|
||||
return false;
|
||||
|
||||
const float dsqr = num*num/den;
|
||||
|
||||
return dsqr<3.84*unc;
|
||||
}
|
||||
|
||||
std::ostream & operator<<(std::ostream &os, const Pinhole &ph) {
|
||||
os << ph.mvParameters[0] << " " << ph.mvParameters[1] << " " << ph.mvParameters[2] << " " << ph.mvParameters[3];
|
||||
return os;
|
||||
|
@ -165,4 +202,13 @@ namespace ORB_SLAM3 {
|
|||
v.at<float>(2), 0,-v.at<float>(0),
|
||||
-v.at<float>(1), v.at<float>(0), 0);
|
||||
}
|
||||
|
||||
cv::Matx33f Pinhole::SkewSymmetricMatrix_(const cv::Matx31f &v)
|
||||
{
|
||||
cv::Matx33f skew{0.f, -v(2), v(1),
|
||||
v(2), 0.f, -v(0),
|
||||
-v(1), v(0), 0.f};
|
||||
|
||||
return skew;
|
||||
}
|
||||
}
|
||||
|
|
170
src/Frame.cc
170
src/Frame.cc
|
@ -49,6 +49,10 @@ cv::BFMatcher Frame::BFmatcher = cv::BFMatcher(cv::NORM_HAMMING);
|
|||
|
||||
Frame::Frame(): mpcpi(NULL), mpImuPreintegrated(NULL), mpPrevFrame(NULL), mpImuPreintegratedFrame(NULL), mpReferenceKF(static_cast<KeyFrame*>(NULL)), mbImuPreintegrated(false)
|
||||
{
|
||||
#ifdef REGISTER_TIMES
|
||||
mTimeStereoMatch = 0;
|
||||
mTimeORB_Ext = 0;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
|
@ -69,7 +73,8 @@ Frame::Frame(const Frame &frame)
|
|||
mpCamera(frame.mpCamera), mpCamera2(frame.mpCamera2), Nleft(frame.Nleft), Nright(frame.Nright),
|
||||
monoLeft(frame.monoLeft), monoRight(frame.monoRight), mvLeftToRightMatch(frame.mvLeftToRightMatch),
|
||||
mvRightToLeftMatch(frame.mvRightToLeftMatch), mvStereo3Dpoints(frame.mvStereo3Dpoints),
|
||||
mTlr(frame.mTlr.clone()), mRlr(frame.mRlr.clone()), mtlr(frame.mtlr.clone()), mTrl(frame.mTrl.clone()), mTimeStereoMatch(frame.mTimeStereoMatch), mTimeORB_Ext(frame.mTimeORB_Ext)
|
||||
mTlr(frame.mTlr.clone()), mRlr(frame.mRlr.clone()), mtlr(frame.mtlr.clone()), mTrl(frame.mTrl.clone()),
|
||||
mTrlx(frame.mTrlx), mTlrx(frame.mTlrx), mOwx(frame.mOwx), mRcwx(frame.mRcwx), mtcwx(frame.mtcwx)
|
||||
{
|
||||
for(int i=0;i<FRAME_GRID_COLS;i++)
|
||||
for(int j=0; j<FRAME_GRID_ROWS; j++){
|
||||
|
@ -87,13 +92,18 @@ Frame::Frame(const Frame &frame)
|
|||
|
||||
mmProjectPoints = frame.mmProjectPoints;
|
||||
mmMatchedInImage = frame.mmMatchedInImage;
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
mTimeStereoMatch = frame.mTimeStereoMatch;
|
||||
mTimeORB_Ext = frame.mTimeORB_Ext;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
Frame::Frame(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timeStamp, ORBextractor* extractorLeft, ORBextractor* extractorRight, ORBVocabulary* voc, cv::Mat &K, cv::Mat &distCoef, const float &bf, const float &thDepth, GeometricCamera* pCamera, Frame* pPrevF, const IMU::Calib &ImuCalib)
|
||||
:mpcpi(NULL), mpORBvocabulary(voc),mpORBextractorLeft(extractorLeft),mpORBextractorRight(extractorRight), mTimeStamp(timeStamp), mK(K.clone()), mDistCoef(distCoef.clone()), mbf(bf), mThDepth(thDepth),
|
||||
mImuCalib(ImuCalib), mpImuPreintegrated(NULL), mpPrevFrame(pPrevF),mpImuPreintegratedFrame(NULL), mpReferenceKF(static_cast<KeyFrame*>(NULL)), mbImuPreintegrated(false),
|
||||
mpCamera(pCamera) ,mpCamera2(nullptr), mTimeStereoMatch(0), mTimeORB_Ext(0)
|
||||
mpCamera(pCamera) ,mpCamera2(nullptr)
|
||||
{
|
||||
// Step 1 帧的ID 自增
|
||||
mnId=nNextId++;
|
||||
|
@ -115,13 +125,21 @@ Frame::Frame(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timeSt
|
|||
mvInvLevelSigma2 = mpORBextractorLeft->GetInverseScaleSigmaSquares();
|
||||
|
||||
// ORB extraction
|
||||
// Step 3 对左目右目图像提取ORB特征点, 第一个参数0-左图, 1-右图。为加速计算,同时开了两个线程计算
|
||||
// Step 3 对左目右目图像提取ORB特征点, 第一个参数0-左图, 1-右图。为加速计算,同时开了两个线程计算
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartExtORB = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
thread threadLeft(&Frame::ExtractORB,this,0,imLeft,0,0);
|
||||
// 对右目图像提取orb特征
|
||||
thread threadRight(&Frame::ExtractORB,this,1,imRight,0,0);
|
||||
//等待两张图像特征点提取过程完成
|
||||
threadLeft.join();
|
||||
threadRight.join();
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndExtORB = std::chrono::steady_clock::now();
|
||||
|
||||
mTimeORB_Ext = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndExtORB - time_StartExtORB).count();
|
||||
#endif
|
||||
|
||||
//mvKeys中保存的是左图像中的特征点,这里是获取左侧图像中特征点的个数
|
||||
N = mvKeys.size();
|
||||
|
@ -133,13 +151,20 @@ Frame::Frame(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timeSt
|
|||
// Step 4 用OpenCV的矫正函数、内参对提取到的特征点进行矫正
|
||||
UndistortKeyPoints();
|
||||
|
||||
// Step 5 计算双目间特征点的匹配,只有匹配成功的特征点会计算其深度,深度存放在 mvDepth
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartStereoMatches = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
// Step 5 计算双目间特征点的匹配,只有匹配成功的特征点会计算其深度,深度存放在 mvDepth
|
||||
// mvuRight中存储的应该是左图像中的点所匹配的在右图像中的点的横坐标(纵坐标相同)
|
||||
ComputeStereoMatches();
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndStereoMatches = std::chrono::steady_clock::now();
|
||||
|
||||
// 初始化本帧的地图点
|
||||
mvpMapPoints = vector<MapPoint*>(N,static_cast<MapPoint*>(NULL));
|
||||
// 记录地图点是否为外点,初始化均为外点false
|
||||
mTimeStereoMatch = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndStereoMatches - time_StartStereoMatches).count();
|
||||
#endif
|
||||
|
||||
// 初始化本帧的地图点
|
||||
mvpMapPoints = vector<MapPoint*>(N,static_cast<MapPoint*>(NULL));
|
||||
mvbOutlier = vector<bool>(N,false);
|
||||
mmProjectPoints.clear();// = map<long unsigned int, cv::Point2f>(N, static_cast<cv::Point2f>(NULL));
|
||||
mmMatchedInImage.clear();
|
||||
|
@ -204,7 +229,7 @@ Frame::Frame(const cv::Mat &imGray, const cv::Mat &imDepth, const double &timeSt
|
|||
:mpcpi(NULL),mpORBvocabulary(voc),mpORBextractorLeft(extractor),mpORBextractorRight(static_cast<ORBextractor*>(NULL)),
|
||||
mTimeStamp(timeStamp), mK(K.clone()),mDistCoef(distCoef.clone()), mbf(bf), mThDepth(thDepth),
|
||||
mImuCalib(ImuCalib), mpImuPreintegrated(NULL), mpPrevFrame(pPrevF), mpImuPreintegratedFrame(NULL), mpReferenceKF(static_cast<KeyFrame*>(NULL)), mbImuPreintegrated(false),
|
||||
mpCamera(pCamera),mpCamera2(nullptr), mTimeStereoMatch(0), mTimeORB_Ext(0)
|
||||
mpCamera(pCamera),mpCamera2(nullptr)
|
||||
{
|
||||
// Step 1 帧的ID 自增
|
||||
mnId=nNextId++;
|
||||
|
@ -225,11 +250,11 @@ Frame::Frame(const cv::Mat &imGray, const cv::Mat &imDepth, const double &timeSt
|
|||
mvInvLevelSigma2 = mpORBextractorLeft->GetInverseScaleSigmaSquares();
|
||||
|
||||
// ORB extraction
|
||||
#ifdef SAVE_TIMES
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartExtORB = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
ExtractORB(0,imGray,0,0);
|
||||
#ifdef SAVE_TIMES
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndExtORB = std::chrono::steady_clock::now();
|
||||
|
||||
mTimeORB_Ext = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndExtORB - time_StartExtORB).count();
|
||||
|
@ -305,7 +330,7 @@ Frame::Frame(const cv::Mat &imGray, const double &timeStamp, ORBextractor* extra
|
|||
:mpcpi(NULL),mpORBvocabulary(voc),mpORBextractorLeft(extractor),mpORBextractorRight(static_cast<ORBextractor*>(NULL)),
|
||||
mTimeStamp(timeStamp), mK(static_cast<Pinhole*>(pCamera)->toK()), mDistCoef(distCoef.clone()), mbf(bf), mThDepth(thDepth),
|
||||
mImuCalib(ImuCalib), mpImuPreintegrated(NULL),mpPrevFrame(pPrevF),mpImuPreintegratedFrame(NULL), mpReferenceKF(static_cast<KeyFrame*>(NULL)), mbImuPreintegrated(false), mpCamera(pCamera),
|
||||
mpCamera2(nullptr), mTimeStereoMatch(0), mTimeORB_Ext(0)
|
||||
mpCamera2(nullptr)
|
||||
{
|
||||
// Frame ID
|
||||
// Step 1 帧的ID 自增
|
||||
|
@ -329,12 +354,12 @@ Frame::Frame(const cv::Mat &imGray, const double &timeStamp, ORBextractor* extra
|
|||
mvInvLevelSigma2 = mpORBextractorLeft->GetInverseScaleSigmaSquares();
|
||||
|
||||
// ORB extraction
|
||||
#ifdef SAVE_TIMES
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartExtORB = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
// Step 3 对这个单目图像进行提取特征点, 第一个参数0-左图, 1-右图
|
||||
ExtractORB(0,imGray,0,1000);
|
||||
#ifdef SAVE_TIMES
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndExtORB = std::chrono::steady_clock::now();
|
||||
|
||||
mTimeORB_Ext = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndExtORB - time_StartExtORB).count();
|
||||
|
@ -532,6 +557,14 @@ void Frame::UpdatePoseMatrices()
|
|||
|
||||
// mTcw 求逆后是当前相机坐标系变换到世界坐标系下,对应的光心变换到世界坐标系下就是 mTcw的逆 中对应的平移向量
|
||||
mOw = -mRcw.t()*mtcw;
|
||||
|
||||
// Static matrix
|
||||
mOwx = cv::Matx31f(mOw.at<float>(0), mOw.at<float>(1), mOw.at<float>(2));
|
||||
mRcwx = cv::Matx33f(mRcw.at<float>(0,0), mRcw.at<float>(0,1), mRcw.at<float>(0,2),
|
||||
mRcw.at<float>(1,0), mRcw.at<float>(1,1), mRcw.at<float>(1,2),
|
||||
mRcw.at<float>(2,0), mRcw.at<float>(2,1), mRcw.at<float>(2,2));
|
||||
mtcwx = cv::Matx31f(mtcw.at<float>(0), mtcw.at<float>(1), mtcw.at<float>(2));
|
||||
|
||||
}
|
||||
|
||||
cv::Mat Frame::GetImuPosition()
|
||||
|
@ -579,17 +612,17 @@ bool Frame::isInFrustum(MapPoint *pMP, float viewingCosLimit)
|
|||
|
||||
// 3D in absolute coordinates
|
||||
// Step 1 获得这个地图点的世界坐标
|
||||
cv::Mat P = pMP->GetWorldPos();
|
||||
cv::Matx31f Px = pMP->GetWorldPos2();
|
||||
|
||||
// cout << "b";
|
||||
|
||||
// 3D in camera coordinates
|
||||
// 根据当前帧(粗糙)位姿转化到当前相机坐标系下的三维点Pc
|
||||
const cv::Mat Pc = mRcw*P+mtcw;
|
||||
const cv::Matx31f Pc = mRcwx * Px + mtcwx;
|
||||
const float Pc_dist = cv::norm(Pc);
|
||||
|
||||
// Check positive depth
|
||||
const float &PcZ = Pc.at<float>(2);
|
||||
const float &PcZ = Pc(2);
|
||||
const float invz = 1.0f/PcZ;
|
||||
// Step 2 关卡一:检查这个地图点在当前帧的相机坐标系下,是否有正的深度.如果是负的,表示出错,直接返回false
|
||||
if(PcZ<0.0f)
|
||||
|
@ -615,7 +648,7 @@ bool Frame::isInFrustum(MapPoint *pMP, float viewingCosLimit)
|
|||
const float minDistance = pMP->GetMinDistanceInvariance();
|
||||
// 得到当前地图点距离当前帧相机光心的距离,注意P,mOw都是在同一坐标系下才可以
|
||||
// mOw:当前相机光心在世界坐标系下坐标
|
||||
const cv::Mat PO = P-mOw;
|
||||
const cv::Matx31f PO = Px-mOwx;
|
||||
//取模就得到了距离
|
||||
const float dist = cv::norm(PO);
|
||||
|
||||
|
@ -627,11 +660,11 @@ bool Frame::isInFrustum(MapPoint *pMP, float viewingCosLimit)
|
|||
|
||||
// Check viewing angle
|
||||
// Step 5 关卡四:计算当前相机指向地图点向量和地图点的平均观测方向夹角的余弦值, 若小于cos(viewingCosLimit), 即夹角大于viewingCosLimit弧度则返回
|
||||
cv::Mat Pn = pMP->GetNormal();
|
||||
cv::Matx31f Pnx = pMP->GetNormal2();
|
||||
|
||||
// cout << "f";
|
||||
// 计算当前相机指向地图点向量和地图点的平均观测方向夹角的余弦值,注意平均观测方向为单位向量
|
||||
const float viewCos = PO.dot(Pn)/dist;
|
||||
const float viewCos = PO.dot(Pnx)/dist;
|
||||
|
||||
//如果大于给定的阈值 cos(60°)=0.5,认为这个点方向太偏了,重投影不可靠,返回false
|
||||
if(viewCos<viewingCosLimit)
|
||||
|
@ -1133,11 +1166,7 @@ void Frame::ComputeStereoMatches()
|
|||
const int w = 5;
|
||||
// 提取左图中,以特征点(scaleduL,scaledvL)为中心, 半径为w的图像快patch
|
||||
cv::Mat IL = mpORBextractorLeft->mvImagePyramid[kpL.octave].rowRange(scaledvL-w,scaledvL+w+1).colRange(scaleduL-w,scaleduL+w+1);
|
||||
//IL.convertTo(IL,CV_32F);
|
||||
// 图像块均值归一化,降低亮度变化对相似度计算的影响
|
||||
//IL = IL - IL.at<float>(w,w) *cv::Mat::ones(IL.rows,IL.cols,CV_32F);
|
||||
IL.convertTo(IL,CV_16S);
|
||||
IL = IL - IL.at<short>(w,w);
|
||||
|
||||
//初始化最佳相似度
|
||||
int bestDist = INT_MAX;
|
||||
// 通过滑动窗口搜索优化,得到的列坐标偏移量
|
||||
|
@ -1163,11 +1192,7 @@ void Frame::ComputeStereoMatches()
|
|||
{
|
||||
// 提取左图中,以特征点(scaleduL,scaledvL)为中心, 半径为w的图像快patch
|
||||
cv::Mat IR = mpORBextractorRight->mvImagePyramid[kpL.octave].rowRange(scaledvL-w,scaledvL+w+1).colRange(scaleduR0+incR-w,scaleduR0+incR+w+1);
|
||||
//IR.convertTo(IR,CV_32F);
|
||||
// 图像块均值归一化,降低亮度变化对相似度计算的影响
|
||||
//IR = IR - IR.at<float>(w,w) *cv::Mat::ones(IR.rows,IR.cols,CV_32F);
|
||||
IR.convertTo(IR,CV_16S);
|
||||
IR = IR - IR.at<short>(w,w);
|
||||
|
||||
// sad 计算
|
||||
float dist = cv::norm(IL,IR,cv::NORM_L1);
|
||||
// 统计最小sad和偏移量
|
||||
|
@ -1316,10 +1341,8 @@ Frame::Frame(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timeSt
|
|||
:mpcpi(NULL), mpORBvocabulary(voc),mpORBextractorLeft(extractorLeft),mpORBextractorRight(extractorRight), mTimeStamp(timeStamp), mK(K.clone()), mDistCoef(distCoef.clone()), mbf(bf), mThDepth(thDepth),
|
||||
mImuCalib(ImuCalib), mpImuPreintegrated(NULL), mpPrevFrame(pPrevF),mpImuPreintegratedFrame(NULL), mpReferenceKF(static_cast<KeyFrame*>(NULL)), mbImuPreintegrated(false), mpCamera(pCamera), mpCamera2(pCamera2), mTlr(Tlr)
|
||||
{
|
||||
std::chrono::steady_clock::time_point t0 = std::chrono::steady_clock::now();
|
||||
imgLeft = imLeft.clone();
|
||||
imgRight = imRight.clone();
|
||||
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
|
||||
|
||||
// Frame ID
|
||||
mnId=nNextId++;
|
||||
|
@ -1334,11 +1357,18 @@ Frame::Frame(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timeSt
|
|||
mvInvLevelSigma2 = mpORBextractorLeft->GetInverseScaleSigmaSquares();
|
||||
|
||||
// ORB extraction
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartExtORB = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
thread threadLeft(&Frame::ExtractORB,this,0,imLeft,static_cast<KannalaBrandt8*>(mpCamera)->mvLappingArea[0],static_cast<KannalaBrandt8*>(mpCamera)->mvLappingArea[1]);
|
||||
thread threadRight(&Frame::ExtractORB,this,1,imRight,static_cast<KannalaBrandt8*>(mpCamera2)->mvLappingArea[0],static_cast<KannalaBrandt8*>(mpCamera2)->mvLappingArea[1]);
|
||||
threadLeft.join();
|
||||
threadRight.join();
|
||||
std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now();
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndExtORB = std::chrono::steady_clock::now();
|
||||
|
||||
mTimeORB_Ext = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndExtORB - time_StartExtORB).count();
|
||||
#endif
|
||||
|
||||
Nleft = mvKeys.size();
|
||||
Nright = mvKeysRight.size();
|
||||
|
@ -1375,8 +1405,22 @@ Frame::Frame(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timeSt
|
|||
|
||||
cv::hconcat(Rrl,trl,mTrl);
|
||||
|
||||
mTrlx = cv::Matx34f(Rrl.at<float>(0,0), Rrl.at<float>(0,1), Rrl.at<float>(0,2), trl.at<float>(0),
|
||||
Rrl.at<float>(1,0), Rrl.at<float>(1,1), Rrl.at<float>(1,2), trl.at<float>(1),
|
||||
Rrl.at<float>(2,0), Rrl.at<float>(2,1), Rrl.at<float>(2,2), trl.at<float>(2));
|
||||
mTlrx = cv::Matx34f(mRlr.at<float>(0,0), mRlr.at<float>(0,1), mRlr.at<float>(0,2), mtlr.at<float>(0),
|
||||
mRlr.at<float>(1,0), mRlr.at<float>(1,1), mRlr.at<float>(1,2), mtlr.at<float>(1),
|
||||
mRlr.at<float>(2,0), mRlr.at<float>(2,1), mRlr.at<float>(2,2), mtlr.at<float>(2));
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartStereoMatches = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
ComputeStereoFishEyeMatches();
|
||||
std::chrono::steady_clock::time_point t3 = std::chrono::steady_clock::now();
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndStereoMatches = std::chrono::steady_clock::now();
|
||||
|
||||
mTimeStereoMatch = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndStereoMatches - time_StartStereoMatches).count();
|
||||
#endif
|
||||
|
||||
//Put all descriptors in the same matrix
|
||||
cv::vconcat(mDescriptors,mDescriptorsRight,mDescriptors);
|
||||
|
@ -1385,25 +1429,10 @@ Frame::Frame(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timeSt
|
|||
mvbOutlier = vector<bool>(N,false);
|
||||
|
||||
AssignFeaturesToGrid();
|
||||
std::chrono::steady_clock::time_point t4 = std::chrono::steady_clock::now();
|
||||
|
||||
mpMutexImu = new std::mutex();
|
||||
|
||||
UndistortKeyPoints();
|
||||
std::chrono::steady_clock::time_point t5 = std::chrono::steady_clock::now();
|
||||
|
||||
double t_read = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t1 - t0).count();
|
||||
double t_orbextract = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t2 - t1).count();
|
||||
double t_stereomatches = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t3 - t2).count();
|
||||
double t_assign = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t4 - t3).count();
|
||||
double t_undistort = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t5 - t4).count();
|
||||
|
||||
/*cout << "Reading time: " << t_read << endl;
|
||||
cout << "Extraction time: " << t_orbextract << endl;
|
||||
cout << "Matching time: " << t_stereomatches << endl;
|
||||
cout << "Assignment time: " << t_assign << endl;
|
||||
cout << "Undistortion time: " << t_undistort << endl;*/
|
||||
|
||||
}
|
||||
|
||||
void Frame::ComputeStereoFishEyeMatches() {
|
||||
|
@ -1450,26 +1479,33 @@ void Frame::ComputeStereoFishEyeMatches() {
|
|||
|
||||
bool Frame::isInFrustumChecks(MapPoint *pMP, float viewingCosLimit, bool bRight) {
|
||||
// 3D in absolute coordinates
|
||||
cv::Mat P = pMP->GetWorldPos();
|
||||
cv::Matx31f Px = pMP->GetWorldPos2();
|
||||
|
||||
cv::Matx33f mRx;
|
||||
cv::Matx31f mtx, twcx;
|
||||
|
||||
cv::Matx33f Rcw = mRcwx;
|
||||
cv::Matx33f Rwc = mRcwx.t();
|
||||
cv::Matx31f tcw = mOwx;
|
||||
|
||||
cv::Mat mR, mt, twc;
|
||||
if(bRight){
|
||||
cv::Mat Rrl = mTrl.colRange(0,3).rowRange(0,3);
|
||||
cv::Mat trl = mTrl.col(3);
|
||||
mR = Rrl * mRcw;
|
||||
mt = Rrl * mtcw + trl;
|
||||
twc = mRwc * mTlr.rowRange(0,3).col(3) + mOw;
|
||||
cv::Matx33f Rrl = mTrlx.get_minor<3,3>(0,0);
|
||||
cv::Matx31f trl = mTrlx.get_minor<3,1>(0,3);
|
||||
mRx = Rrl * Rcw;
|
||||
mtx = Rrl * tcw + trl;
|
||||
twcx = Rwc * mTlrx.get_minor<3,1>(0,3) + tcw;
|
||||
}
|
||||
else{
|
||||
mR = mRcw;
|
||||
mt = mtcw;
|
||||
twc = mOw;
|
||||
mRx = mRcwx;
|
||||
mtx = mtcwx;
|
||||
twcx = mOwx;
|
||||
}
|
||||
|
||||
// 3D in camera coordinates
|
||||
cv::Mat Pc = mR*P+mt;
|
||||
const float Pc_dist = cv::norm(Pc);
|
||||
const float &PcZ = Pc.at<float>(2);
|
||||
|
||||
cv::Matx31f Pcx = mRx * Px + mtx;
|
||||
const float Pc_dist = cv::norm(Pcx);
|
||||
const float &PcZ = Pcx(2);
|
||||
|
||||
// Check positive depth
|
||||
if(PcZ<0.0f)
|
||||
|
@ -1477,8 +1513,8 @@ bool Frame::isInFrustumChecks(MapPoint *pMP, float viewingCosLimit, bool bRight)
|
|||
|
||||
// Project in image and check it is not outside
|
||||
cv::Point2f uv;
|
||||
if(bRight) uv = mpCamera2->project(Pc);
|
||||
else uv = mpCamera->project(Pc);
|
||||
if(bRight) uv = mpCamera2->project(Pcx);
|
||||
else uv = mpCamera->project(Pcx);
|
||||
|
||||
if(uv.x<mnMinX || uv.x>mnMaxX)
|
||||
return false;
|
||||
|
@ -1488,16 +1524,16 @@ bool Frame::isInFrustumChecks(MapPoint *pMP, float viewingCosLimit, bool bRight)
|
|||
// Check distance is in the scale invariance region of the MapPoint
|
||||
const float maxDistance = pMP->GetMaxDistanceInvariance();
|
||||
const float minDistance = pMP->GetMinDistanceInvariance();
|
||||
const cv::Mat PO = P-twc;
|
||||
const float dist = cv::norm(PO);
|
||||
const cv::Matx31f POx = Px - twcx;
|
||||
const float dist = cv::norm(POx);
|
||||
|
||||
if(dist<minDistance || dist>maxDistance)
|
||||
return false;
|
||||
|
||||
// Check viewing angle
|
||||
cv::Mat Pn = pMP->GetNormal();
|
||||
cv::Matx31f Pnx = pMP->GetNormal2();
|
||||
|
||||
const float viewCos = PO.dot(Pn)/dist;
|
||||
const float viewCos = POx.dot(Pnx)/dist;
|
||||
|
||||
if(viewCos<viewingCosLimit)
|
||||
return false;
|
||||
|
|
|
@ -38,7 +38,6 @@ FrameDrawer::FrameDrawer(Atlas* pAtlas):both(false),mpAtlas(pAtlas)
|
|||
|
||||
cv::Mat FrameDrawer::DrawFrame(bool bOldFeatures)
|
||||
{
|
||||
// std::cout << "0" << std::endl;
|
||||
cv::Mat im;
|
||||
vector<cv::KeyPoint> vIniKeys; // Initialization: KeyPoints in reference frame
|
||||
vector<int> vMatches; // Initialization: correspondeces with reference keypoints
|
||||
|
@ -47,7 +46,6 @@ cv::Mat FrameDrawer::DrawFrame(bool bOldFeatures)
|
|||
vector<pair<cv::Point2f, cv::Point2f> > vTracks;
|
||||
int state; // Tracking state
|
||||
|
||||
//
|
||||
Frame currentFrame;
|
||||
vector<MapPoint*> vpLocalMap;
|
||||
vector<cv::KeyPoint> vMatchesKeys;
|
||||
|
@ -73,7 +71,7 @@ cv::Mat FrameDrawer::DrawFrame(bool bOldFeatures)
|
|||
vMatches = mvIniMatches;
|
||||
vTracks = mvTracks;
|
||||
}
|
||||
else if(mState==Tracking::OK /*&& bOldFeatures*/)
|
||||
else if(mState==Tracking::OK)
|
||||
{
|
||||
vCurrentKeys = mvCurrentKeys;
|
||||
vbVO = mvbVO;
|
||||
|
@ -96,7 +94,7 @@ cv::Mat FrameDrawer::DrawFrame(bool bOldFeatures)
|
|||
}
|
||||
|
||||
if(im.channels()<3) //this should be always true
|
||||
cvtColor(im,im,CV_GRAY2BGR);
|
||||
cvtColor(im,im,cv::COLOR_GRAY2BGR);
|
||||
|
||||
//Draw
|
||||
if(state==Tracking::NOT_INITIALIZED)
|
||||
|
@ -143,12 +141,8 @@ cv::Mat FrameDrawer::DrawFrame(bool bOldFeatures)
|
|||
mnTrackedVO++;
|
||||
}
|
||||
}
|
||||
/*else
|
||||
{
|
||||
cv::circle(im,vCurrentKeys[i].pt,2,cv::Scalar(0,0,255),-1);
|
||||
}*/
|
||||
|
||||
}
|
||||
// std::cout << "2.3" << std::endl;
|
||||
}
|
||||
else if(state==Tracking::OK && !bOldFeatures)
|
||||
{
|
||||
|
@ -157,9 +151,6 @@ cv::Mat FrameDrawer::DrawFrame(bool bOldFeatures)
|
|||
mnTrackedVO=0;
|
||||
int n = vCurrentKeys.size();
|
||||
|
||||
// cout << "----------------------" << endl;
|
||||
// cout << "Number of matches in old method: " << n << endl;
|
||||
|
||||
for(int i=0; i < n; ++i)
|
||||
{
|
||||
|
||||
|
@ -170,10 +161,6 @@ cv::Mat FrameDrawer::DrawFrame(bool bOldFeatures)
|
|||
}
|
||||
}
|
||||
|
||||
n = mProjectPoints.size();
|
||||
//cout << "Number of projected points: " << n << endl;
|
||||
n = mMatchedInImage.size();
|
||||
//cout << "Number of matched points: " << n << endl;
|
||||
map<long unsigned int, cv::Point2f>::iterator it_match = mMatchedInImage.begin();
|
||||
while(it_match != mMatchedInImage.end())
|
||||
{
|
||||
|
@ -193,37 +180,9 @@ cv::Mat FrameDrawer::DrawFrame(bool bOldFeatures)
|
|||
|
||||
|
||||
it_match++;
|
||||
//it_proj = mProjectPoints.erase(it_proj);
|
||||
}
|
||||
//for(int i=0; i < n; ++i)
|
||||
//{
|
||||
/*if(!vpMatchedMPs[i])
|
||||
continue;*/
|
||||
|
||||
//cv::circle(im,vProjectPoints[i],2,cv::Scalar(255,0,0),-1);
|
||||
/*cv::Point2f point3d_proy;
|
||||
float u, v;
|
||||
bool bIsInImage = currentFrame.ProjectPointDistort(vpMatchedMPs[i] , point3d_proy, u, v);
|
||||
if(bIsInImage)
|
||||
{
|
||||
//cout << "-Point is out of the image" << point3d_proy.x << ", " << point3d_proy.y << endl;
|
||||
cv::circle(im,vMatchesKeys[i].pt,2,cv::Scalar(255,0,0),-1);
|
||||
continue;
|
||||
}
|
||||
|
||||
//cout << "+Point CV " << point3d_proy.x << ", " << point3d_proy.y << endl;
|
||||
//cout << "+Point coord " << u << ", " << v << endl;
|
||||
cv::Point2f point_im = vMatchesKeys[i].pt;
|
||||
|
||||
cv::line(im,cv::Point2f(u, v), point_im,cv::Scalar(0, 255, 0), 1);*/
|
||||
|
||||
//}
|
||||
|
||||
/*cout << "Number of tracker in old method: " << mnTracked << endl;
|
||||
cout << "Number of tracker in new method: " << nTracked2 << endl;*/
|
||||
|
||||
n = vOutlierKeys.size();
|
||||
//cout << "Number of outliers: " << n << endl;
|
||||
for(int i=0; i < n; ++i)
|
||||
{
|
||||
cv::Point2f point3d_proy;
|
||||
|
@ -235,34 +194,7 @@ cv::Mat FrameDrawer::DrawFrame(bool bOldFeatures)
|
|||
cv::line(im,cv::Point2f(u, v), point_im,cv::Scalar(0, 0, 255), 1);
|
||||
}
|
||||
|
||||
// for(int i=0;i<n;i++)
|
||||
// {
|
||||
// if(vbVO[i] || vbMap[i])
|
||||
// {
|
||||
// cv::Point2f pt1,pt2;
|
||||
// pt1.x=vCurrentKeys[i].pt.x-r;
|
||||
// pt1.y=vCurrentKeys[i].pt.y-r;
|
||||
// pt2.x=vCurrentKeys[i].pt.x+r;
|
||||
// pt2.y=vCurrentKeys[i].pt.y+r;
|
||||
|
||||
// // This is a match to a MapPoint in the map
|
||||
// if(vbMap[i])
|
||||
// {
|
||||
// cv::rectangle(im,pt1,pt2,cv::Scalar(0,255,0));
|
||||
// cv::circle(im,vCurrentKeys[i].pt,2,cv::Scalar(0,255,0),-1);
|
||||
// mnTracked++;
|
||||
// }
|
||||
// else // This is match to a "visual odometry" MapPoint created in the last frame
|
||||
// {
|
||||
// cv::rectangle(im,pt1,pt2,cv::Scalar(255,0,0));
|
||||
// cv::circle(im,vCurrentKeys[i].pt,2,cv::Scalar(255,0,0),-1);
|
||||
// mnTrackedVO++;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
}
|
||||
// std::cout << "3" << std::endl;
|
||||
|
||||
cv::Mat imWithInfo;
|
||||
DrawTextInfo(im,state, imWithInfo);
|
||||
|
@ -307,7 +239,7 @@ cv::Mat FrameDrawer::DrawRightFrame()
|
|||
} // destroy scoped mutex -> release mutex
|
||||
|
||||
if(im.channels()<3) //this should be always true
|
||||
cvtColor(im,im,CV_GRAY2BGR);
|
||||
cvtColor(im,im,cv::COLOR_GRAY2BGR);
|
||||
|
||||
//Draw
|
||||
if(state==Tracking::NOT_INITIALIZED) //INITIALIZING
|
||||
|
@ -418,8 +350,6 @@ void FrameDrawer::Update(Tracking *pTracker)
|
|||
N = mvCurrentKeys.size();
|
||||
}
|
||||
|
||||
//cout << "Number of matches in frame: " << N << endl;
|
||||
// cout << "Number of matches in frame: " << N << endl;
|
||||
mvbVO = vector<bool>(N,false);
|
||||
mvbMap = vector<bool>(N,false);
|
||||
mbOnlyTracking = pTracker->mbOnlyTracking;
|
||||
|
@ -427,7 +357,6 @@ void FrameDrawer::Update(Tracking *pTracker)
|
|||
//Variables for the new visualization
|
||||
mCurrentFrame = pTracker->mCurrentFrame;
|
||||
mmProjectPoints = mCurrentFrame.mmProjectPoints;
|
||||
//mmMatchedInImage = mCurrentFrame.mmMatchedInImage;
|
||||
mmMatchedInImage.clear();
|
||||
|
||||
mvpLocalMap = pTracker->GetLocalMapMPS();
|
||||
|
@ -439,8 +368,6 @@ void FrameDrawer::Update(Tracking *pTracker)
|
|||
mvOutlierKeys.reserve(N);
|
||||
mvpOutlierMPs.clear();
|
||||
mvpOutlierMPs.reserve(N);
|
||||
//mvProjectPoints.clear();
|
||||
//mvProjectPoints.reserve(N);
|
||||
|
||||
if(pTracker->mLastProcessedState==Tracking::NOT_INITIALIZED)
|
||||
{
|
||||
|
@ -461,18 +388,8 @@ void FrameDrawer::Update(Tracking *pTracker)
|
|||
else
|
||||
mvbVO[i]=true;
|
||||
|
||||
//mvpMatchedMPs.push_back(pMP);
|
||||
//mvMatchedKeys.push_back(mvCurrentKeys[i]);
|
||||
mmMatchedInImage[pMP->mnId] = mvCurrentKeys[i].pt;
|
||||
|
||||
//cv::Point2f point3d_proy;
|
||||
//float u, v;
|
||||
//bool bIsInImage = mCurrentFrame.ProjectPointDistort(pMP, point3d_proy, u, v);
|
||||
//if(bIsInImage)
|
||||
//{
|
||||
//mvMatchedKeys.push_back(mvCurrentKeys[i]);
|
||||
//mvProjectPoints.push_back(cv::Point2f(u, v));
|
||||
//}
|
||||
}
|
||||
else
|
||||
{
|
||||
|
|
203
src/G2oTypes.cc
203
src/G2oTypes.cc
|
@ -424,61 +424,6 @@ void EdgeStereo::linearizeOplus()
|
|||
y , -x , 0.0, 0.0, 0.0, 1.0;
|
||||
|
||||
_jacobianOplusXj = proj_jac * Rcb * SE3deriv;
|
||||
|
||||
|
||||
/*const VertexPose* VPose = static_cast<const VertexPose*>(_vertices[1]);
|
||||
const g2o::VertexSBAPointXYZ* VPoint = static_cast<const g2o::VertexSBAPointXYZ*>(_vertices[0]);
|
||||
|
||||
const Eigen::Matrix3d &Rcw = VPose->estimate().Rcw;
|
||||
const Eigen::Vector3d &tcw = VPose->estimate().tcw;
|
||||
const Eigen::Vector3d Xc = Rcw*VPoint->estimate() + tcw;
|
||||
const double &xc = Xc[0];
|
||||
const double &yc = Xc[1];
|
||||
const double invzc = 1.0/Xc[2];
|
||||
const double invzc2 = invzc*invzc;
|
||||
const double &fx = VPose->estimate().fx;
|
||||
const double &fy = VPose->estimate().fy;
|
||||
const double &bf = VPose->estimate().bf;
|
||||
const Eigen::Matrix3d &Rcb = VPose->estimate().Rcb;
|
||||
|
||||
// Jacobian wrt Point
|
||||
_jacobianOplusXi(0,0) = -fx*invzc*Rcw(0,0)+fx*xc*invzc2*Rcw(2,0);
|
||||
_jacobianOplusXi(0,1) = -fx*invzc*Rcw(0,1)+fx*xc*invzc2*Rcw(2,1);
|
||||
_jacobianOplusXi(0,2) = -fx*invzc*Rcw(0,2)+fx*xc*invzc2*Rcw(2,2);
|
||||
|
||||
_jacobianOplusXi(1,0) = -fy*invzc*Rcw(1,0)+fy*yc*invzc2*Rcw(2,0);
|
||||
_jacobianOplusXi(1,1) = -fy*invzc*Rcw(1,1)+fy*yc*invzc2*Rcw(2,1);
|
||||
_jacobianOplusXi(1,2) = -fy*invzc*Rcw(1,2)+fy*yc*invzc2*Rcw(2,2);
|
||||
|
||||
_jacobianOplusXi(2,0) = _jacobianOplusXi(0,0)-bf*invzc2*Rcw(2,0);
|
||||
_jacobianOplusXi(2,1) = _jacobianOplusXi(0,1)-bf*invzc2*Rcw(2,1);
|
||||
_jacobianOplusXi(2,2) = _jacobianOplusXi(0,2)-bf*invzc2*Rcw(2,2);
|
||||
|
||||
const Eigen::Vector3d Xb = VPose->estimate().Rbc*Xc + VPose->estimate().tbc;
|
||||
const Eigen::Matrix3d RS = VPose->estimate().Rcb*Skew(Xb);
|
||||
|
||||
// Jacobian wrt Imu Pose
|
||||
_jacobianOplusXj(0,0) = -fx*invzc*RS(0,0)+fx*xc*invzc2*RS(2,0);
|
||||
_jacobianOplusXj(0,1) = -fx*invzc*RS(0,1)+fx*xc*invzc2*RS(2,1);
|
||||
_jacobianOplusXj(0,2) = -fx*invzc*RS(0,2)+fx*xc*invzc2*RS(2,2);
|
||||
_jacobianOplusXj(0,3) = fx*invzc*Rcb(0,0)-fx*xc*invzc2*Rcb(2,0);
|
||||
_jacobianOplusXj(0,4) = fx*invzc*Rcb(0,1)-fx*xc*invzc2*Rcb(2,1);
|
||||
_jacobianOplusXj(0,5) = fx*invzc*Rcb(0,2)-fx*xc*invzc2*Rcb(2,2);
|
||||
|
||||
_jacobianOplusXj(1,0) = -fy*invzc*RS(1,0)+fy*yc*invzc2*RS(2,0);
|
||||
_jacobianOplusXj(1,1) = -fy*invzc*RS(1,1)+fy*yc*invzc2*RS(2,1);
|
||||
_jacobianOplusXj(1,2) = -fy*invzc*RS(1,2)+fy*yc*invzc2*RS(2,2);
|
||||
_jacobianOplusXj(1,3) = fy*invzc*Rcb(1,0)-fy*yc*invzc2*Rcb(2,0);
|
||||
_jacobianOplusXj(1,4) = fy*invzc*Rcb(1,1)-fy*yc*invzc2*Rcb(2,1);
|
||||
_jacobianOplusXj(1,5) = fy*invzc*Rcb(1,2)-fy*yc*invzc2*Rcb(2,2);
|
||||
|
||||
_jacobianOplusXj(2,0) = _jacobianOplusXj(0,0) - bf*invzc2*RS(2,0);
|
||||
_jacobianOplusXj(2,1) = _jacobianOplusXj(0,1) - bf*invzc2*RS(2,1);
|
||||
_jacobianOplusXj(2,2) = _jacobianOplusXj(0,2) - bf*invzc2*RS(2,2);
|
||||
_jacobianOplusXj(2,3) = _jacobianOplusXj(0,3) + bf*invzc2*Rcb(2,0);
|
||||
_jacobianOplusXj(2,4) = _jacobianOplusXj(0,4) + bf*invzc2*Rcb(2,1);
|
||||
_jacobianOplusXj(2,5) = _jacobianOplusXj(0,5) + bf*invzc2*Rcb(2,2);
|
||||
*/
|
||||
}
|
||||
|
||||
void EdgeStereoOnlyPose::linearizeOplus()
|
||||
|
@ -506,156 +451,8 @@ void EdgeStereoOnlyPose::linearizeOplus()
|
|||
-z , 0.0, x, 0.0, 1.0, 0.0,
|
||||
y , -x , 0.0, 0.0, 0.0, 1.0;
|
||||
_jacobianOplusXi = proj_jac * Rcb * SE3deriv;
|
||||
|
||||
/*const VertexPose* VPose = static_cast<const VertexPose*>(_vertices[0]);
|
||||
const Eigen::Matrix3d &Rcw = VPose->estimate().Rcw;
|
||||
const Eigen::Vector3d &tcw = VPose->estimate().tcw;
|
||||
const Eigen::Vector3d Xc = Rcw*Xw + tcw;
|
||||
const double &xc = Xc[0];
|
||||
const double &yc = Xc[1];
|
||||
const double invzc = 1.0/Xc[2];
|
||||
const double invzc2 = invzc*invzc;
|
||||
const double &fx = VPose->estimate().fx;
|
||||
const double &fy = VPose->estimate().fy;
|
||||
const double &bf = VPose->estimate().bf;
|
||||
const Eigen::Matrix3d &Rcb = VPose->estimate().Rcb;
|
||||
|
||||
const Eigen::Vector3d Xb = VPose->estimate().Rbc*Xc + VPose->estimate().tbc;
|
||||
const Eigen::Matrix3d RS = VPose->estimate().Rcb*Skew(Xb);
|
||||
|
||||
// Jacobian wrt Imu Pose
|
||||
_jacobianOplusXi(0,0) = -fx*invzc*RS(0,0)+fx*xc*invzc2*RS(2,0);
|
||||
_jacobianOplusXi(0,1) = -fx*invzc*RS(0,1)+fx*xc*invzc2*RS(2,1);
|
||||
_jacobianOplusXi(0,2) = -fx*invzc*RS(0,2)+fx*xc*invzc2*RS(2,2);
|
||||
_jacobianOplusXi(0,3) = fx*invzc*Rcb(0,0)-fx*xc*invzc2*Rcb(2,0);
|
||||
_jacobianOplusXi(0,4) = fx*invzc*Rcb(0,1)-fx*xc*invzc2*Rcb(2,1);
|
||||
_jacobianOplusXi(0,5) = fx*invzc*Rcb(0,2)-fx*xc*invzc2*Rcb(2,2);
|
||||
|
||||
_jacobianOplusXi(1,0) = -fy*invzc*RS(1,0)+fy*yc*invzc2*RS(2,0);
|
||||
_jacobianOplusXi(1,1) = -fy*invzc*RS(1,1)+fy*yc*invzc2*RS(2,1);
|
||||
_jacobianOplusXi(1,2) = -fy*invzc*RS(1,2)+fy*yc*invzc2*RS(2,2);
|
||||
_jacobianOplusXi(1,3) = fy*invzc*Rcb(1,0)-fy*yc*invzc2*Rcb(2,0);
|
||||
_jacobianOplusXi(1,4) = fy*invzc*Rcb(1,1)-fy*yc*invzc2*Rcb(2,1);
|
||||
_jacobianOplusXi(1,5) = fy*invzc*Rcb(1,2)-fy*yc*invzc2*Rcb(2,2);
|
||||
|
||||
_jacobianOplusXi(2,0) = _jacobianOplusXi(0,0) - bf*invzc2*RS(2,0);
|
||||
_jacobianOplusXi(2,1) = _jacobianOplusXi(0,1) - bf*invzc2*RS(2,1);
|
||||
_jacobianOplusXi(2,2) = _jacobianOplusXi(0,2) - bf*invzc2*RS(2,2);
|
||||
_jacobianOplusXi(2,3) = _jacobianOplusXi(0,3) + bf*invzc2*Rcb(2,0);
|
||||
_jacobianOplusXi(2,4) = _jacobianOplusXi(0,4) + bf*invzc2*Rcb(2,1);
|
||||
_jacobianOplusXi(2,5) = _jacobianOplusXi(0,5) + bf*invzc2*Rcb(2,2);
|
||||
*/
|
||||
}
|
||||
|
||||
/*Eigen::Vector2d EdgeMonoInvdepth::cam_project(const Eigen::Vector3d & trans_xyz) const{
|
||||
double invZ = 1./trans_xyz[2];
|
||||
Eigen::Vector2d res;
|
||||
res[0] = invZ*trans_xyz[0]*fx + cx;
|
||||
res[1] = invZ*trans_xyz[1]*fy + cy;
|
||||
return res;
|
||||
}
|
||||
|
||||
Eigen::Vector3d EdgeMonoInvdepth::cam_unproject(const double u, const double v, const double invDepth) const{
|
||||
Eigen::Vector3d res;
|
||||
res[2] = 1./invDepth;
|
||||
double z_x=res[2]/fx;
|
||||
double z_y=res[2]/fy;
|
||||
res[0] = (u-cx)*z_x;
|
||||
res[1] = (v-cy)*z_y;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
void EdgeMonoInvdepth::linearizeOplus()
|
||||
{
|
||||
VertexInvDepth *vPt = static_cast<VertexInvDepth*>(_vertices[0]);
|
||||
g2o::VertexSE3Expmap *vHost = static_cast<g2o::VertexSE3Expmap*>(_vertices[1]);
|
||||
g2o::VertexSE3Expmap *vObs = static_cast<g2o::VertexSE3Expmap*>(_vertices[2]);
|
||||
|
||||
//
|
||||
g2o::SE3Quat Tiw(vObs->estimate());
|
||||
g2o::SE3Quat T0w(vHost->estimate());
|
||||
g2o::SE3Quat Ti0 = Tiw*T0w.inverse();
|
||||
double o_rho_j = vPt->estimate().rho;
|
||||
Eigen::Vector3d o_X_j = cam_unproject(vPt->estimate().u,vPt->estimate().v,o_rho_j);
|
||||
Eigen::Vector3d i_X_j = Ti0.map(o_X_j);
|
||||
double i_rho_j = 1./i_X_j[2];
|
||||
Eigen::Vector2d i_proj_j = cam_project(i_X_j);
|
||||
|
||||
// i_rho_j*C_ij matrix
|
||||
Eigen::Matrix<double,2,3> rhoC_ij;
|
||||
rhoC_ij(0,0) = i_rho_j*fx;
|
||||
rhoC_ij(0,1) = 0.0;
|
||||
rhoC_ij(0,2) = i_rho_j*(cx-i_proj_j[0]);
|
||||
rhoC_ij(1,0) = 0.0;
|
||||
rhoC_ij(1,1) = i_rho_j*fy;
|
||||
rhoC_ij(1,2) = i_rho_j*(cy-i_proj_j[1]);
|
||||
|
||||
// o_rho_j^{-2}*K^{-1}*0_proj_j vector
|
||||
Eigen::Matrix3d Ri0 = Ti0.rotation().toRotationMatrix();
|
||||
Eigen::Matrix<double, 2, 3> tmp;
|
||||
tmp = rhoC_ij*Ri0;
|
||||
|
||||
// Skew matrices
|
||||
Eigen::Matrix3d skew_0_X_j;
|
||||
Eigen::Matrix3d skew_i_X_j;
|
||||
|
||||
skew_0_X_j=Eigen::MatrixXd::Zero(3,3);
|
||||
skew_i_X_j=Eigen::MatrixXd::Zero(3,3);
|
||||
|
||||
skew_0_X_j(0,1) = -o_X_j[2];
|
||||
skew_0_X_j(1,0) = -skew_0_X_j(0,1);
|
||||
skew_0_X_j(0,2) = o_X_j[1];
|
||||
skew_0_X_j(2,0) = -skew_0_X_j(0,2);
|
||||
skew_0_X_j(1,2) = -o_X_j[0];
|
||||
skew_0_X_j(2,1) = -skew_0_X_j(1,2);
|
||||
|
||||
skew_i_X_j(0,1) = -i_X_j[2];
|
||||
skew_i_X_j(1,0) = -skew_i_X_j(0,1);
|
||||
skew_i_X_j(0,2) = i_X_j[1];
|
||||
skew_i_X_j(2,0) = -skew_i_X_j(0,2);
|
||||
skew_i_X_j(1,2) = -i_X_j[0];
|
||||
skew_i_X_j(2,1) = -skew_i_X_j(1,2);
|
||||
|
||||
// Jacobians w.r.t inverse depth in the host frame
|
||||
_jacobianOplus[0].setZero();
|
||||
_jacobianOplus[0] = (1./o_rho_j)*rhoC_ij*Ri0*o_X_j;
|
||||
|
||||
// Jacobians w.r.t pose host frame
|
||||
_jacobianOplus[1].setZero();
|
||||
// Rotation
|
||||
_jacobianOplus[1].block<2,3>(0,0) = -tmp*skew_0_X_j;
|
||||
// translation
|
||||
_jacobianOplus[1].block<2,3>(0,3) = tmp;
|
||||
|
||||
// Jacobians w.r.t pose observer frame
|
||||
_jacobianOplus[2].setZero();
|
||||
// Rotation
|
||||
_jacobianOplus[2].block<2,3>(0,0) = rhoC_ij*skew_i_X_j;
|
||||
// translation
|
||||
_jacobianOplus[2].block<2,3>(0,3) = -rhoC_ij;
|
||||
}
|
||||
|
||||
|
||||
Eigen::Vector2d EdgeMonoInvdepthBody::cam_project(const Eigen::Vector3d & trans_xyz) const{
|
||||
double invZ = 1./trans_xyz[2];
|
||||
Eigen::Vector2d res;
|
||||
res[0] = invZ*trans_xyz[0]*fx + cx;
|
||||
res[1] = invZ*trans_xyz[1]*fy + cy;
|
||||
return res;
|
||||
}
|
||||
|
||||
Eigen::Vector3d EdgeMonoInvdepthBody::cam_unproject(const double u, const double v, const double invDepth) const{
|
||||
Eigen::Vector3d res;
|
||||
res[2] = 1./invDepth;
|
||||
double z_x=res[2]/fx;
|
||||
double z_y=res[2]/fy;
|
||||
res[0] = (u-cx)*z_x;
|
||||
res[1] = (v-cy)*z_y;
|
||||
|
||||
return res;
|
||||
}*/
|
||||
|
||||
VertexVelocity::VertexVelocity(KeyFrame* pKF)
|
||||
{
|
||||
setEstimate(Converter::toVector3d(pKF->GetVelocity()));
|
||||
|
|
|
@ -31,7 +31,6 @@ cv::Mat NormalizeRotation(const cv::Mat &R)
|
|||
{
|
||||
cv::Mat U,w,Vt;
|
||||
cv::SVDecomp(R,w,U,Vt,cv::SVD::FULL_UV);
|
||||
// assert(cv::determinant(U*Vt)>0);
|
||||
return U*Vt;
|
||||
}
|
||||
|
||||
|
@ -399,10 +398,10 @@ IMU::Bias Preintegrated::GetDeltaBias(const Bias &b_)
|
|||
cv::Mat Preintegrated::GetDeltaRotation(const Bias &b_)
|
||||
{
|
||||
std::unique_lock<std::mutex> lock(mMutex);
|
||||
// ? 计算偏置的变化量
|
||||
// 计算偏置的变化量
|
||||
cv::Mat dbg = (cv::Mat_<float>(3,1) << b_.bwx-b.bwx,b_.bwy-b.bwy,b_.bwz-b.bwz);
|
||||
// ? 考虑偏置后,dR对偏置线性化的近似求解,邱笑晨《预积分总结与公式推导》P13~P14
|
||||
// ? Forster论文公式(44)yP17也有结果(但没有推导),后面两个函数GetDeltaPosition和GetDeltaPosition也是基于此推导的
|
||||
// 考虑偏置后,dR对偏置线性化的近似求解,邱笑晨《预积分总结与公式推导》P13~P14
|
||||
// Forster论文公式(44)yP17也有结果(但没有推导),后面两个函数GetDeltaPosition和GetDeltaPosition也是基于此推导的
|
||||
return NormalizeRotation(dR*ExpSO3(JRg*dbg));
|
||||
}
|
||||
|
||||
|
@ -411,7 +410,7 @@ cv::Mat Preintegrated::GetDeltaVelocity(const Bias &b_)
|
|||
std::unique_lock<std::mutex> lock(mMutex);
|
||||
cv::Mat dbg = (cv::Mat_<float>(3,1) << b_.bwx-b.bwx,b_.bwy-b.bwy,b_.bwz-b.bwz);
|
||||
cv::Mat dba = (cv::Mat_<float>(3,1) << b_.bax-b.bax,b_.bay-b.bay,b_.baz-b.baz);
|
||||
// ? 考虑偏置后,dV对偏置线性化的近似求解,邱笑晨《预积分总结与公式推导》P13,JPg和JPa在预积分处理中更新
|
||||
// 考虑偏置后,dV对偏置线性化的近似求解,邱笑晨《预积分总结与公式推导》P13,JPg和JPa在预积分处理中更新
|
||||
return dV + JVg*dbg + JVa*dba;
|
||||
}
|
||||
|
||||
|
@ -420,7 +419,7 @@ cv::Mat Preintegrated::GetDeltaPosition(const Bias &b_)
|
|||
std::unique_lock<std::mutex> lock(mMutex);
|
||||
cv::Mat dbg = (cv::Mat_<float>(3,1) << b_.bwx-b.bwx,b_.bwy-b.bwy,b_.bwz-b.bwz);
|
||||
cv::Mat dba = (cv::Mat_<float>(3,1) << b_.bax-b.bax,b_.bay-b.bay,b_.baz-b.baz);
|
||||
// ? 考虑偏置后,dP对偏置线性化的近似求解,邱笑晨《预积分总结与公式推导》P13,JPg和JPa在预积分处理中更新
|
||||
// 考虑偏置后,dP对偏置线性化的近似求解,邱笑晨《预积分总结与公式推导》P13,JPg和JPa在预积分处理中更新
|
||||
return dP + JPg*dbg + JPa*dba;
|
||||
}
|
||||
|
||||
|
|
|
@ -196,11 +196,11 @@ bool Initializer::Initialize(const Frame &CurrentFrame, const vector<int> &vMatc
|
|||
//更偏向于平面,此时从单应矩阵恢复,函数ReconstructH返回bool型结果
|
||||
return ReconstructH(vbMatchesInliersH, //输入,匹配成功的特征点对Inliers标记
|
||||
H, //输入,前面RANSAC计算后的单应矩阵
|
||||
mK, //输入,相机的内参数矩阵
|
||||
K, //输入,相机的内参数矩阵
|
||||
R21,t21, //输出,计算出来的相机从参考帧1到当前帧2所发生的旋转和位移变换
|
||||
vP3D, //特征点对经过三角测量之后的空间坐标,也就是地图点
|
||||
vbTriangulated, //特征点对是否成功三角化的标记
|
||||
1.0, //这个对应的形参为minParallax,即认为某对特征点的三角化测量中,认为其测量有效时
|
||||
minParallax, //这个对应的形参为minParallax,即认为某对特征点的三角化测量中,认为其测量有效时
|
||||
//需要满足的最小视差角(如果视差角过小则会引起非常大的观测误差),单位是角度
|
||||
50); //为了进行运动恢复,所需要的最少的三角化测量成功的点个数
|
||||
}
|
||||
|
|
298
src/KeyFrame.cc
298
src/KeyFrame.cc
|
@ -96,6 +96,12 @@ KeyFrame::KeyFrame(Frame &F, Map *pMap, KeyFrameDatabase *pKFDB):
|
|||
SetPose(F.mTcw);
|
||||
|
||||
mnOriginMapId = pMap->GetId();
|
||||
|
||||
this->Tlr_ = cv::Matx44f(mTlr.at<float>(0,0),mTlr.at<float>(0,1),mTlr.at<float>(0,2),mTlr.at<float>(0,3),
|
||||
mTlr.at<float>(1,0),mTlr.at<float>(1,1),mTlr.at<float>(1,2),mTlr.at<float>(1,3),
|
||||
mTlr.at<float>(2,0),mTlr.at<float>(2,1),mTlr.at<float>(2,2),mTlr.at<float>(2,3),
|
||||
mTlr.at<float>(3,0),mTlr.at<float>(3,1),mTlr.at<float>(3,2),mTlr.at<float>(3,3));
|
||||
|
||||
}
|
||||
|
||||
void KeyFrame::ComputeBoW()
|
||||
|
@ -133,6 +139,19 @@ void KeyFrame::SetPose(const cv::Mat &Tcw_)
|
|||
cv::Mat center = (cv::Mat_<float>(4,1) << mHalfBaseline, 0 , 0, 1);
|
||||
// 世界坐标系下,左目相机中心到立体相机中心的向量,方向由左目相机指向立体相机中心
|
||||
Cw = Twc*center;
|
||||
|
||||
//Static matrices
|
||||
this->Tcw_ = cv::Matx44f(Tcw.at<float>(0,0),Tcw.at<float>(0,1),Tcw.at<float>(0,2),Tcw.at<float>(0,3),
|
||||
Tcw.at<float>(1,0),Tcw.at<float>(1,1),Tcw.at<float>(1,2),Tcw.at<float>(1,3),
|
||||
Tcw.at<float>(2,0),Tcw.at<float>(2,1),Tcw.at<float>(2,2),Tcw.at<float>(2,3),
|
||||
Tcw.at<float>(3,0),Tcw.at<float>(3,1),Tcw.at<float>(3,2),Tcw.at<float>(3,3));
|
||||
|
||||
this->Twc_ = cv::Matx44f(Twc.at<float>(0,0),Twc.at<float>(0,1),Twc.at<float>(0,2),Twc.at<float>(0,3),
|
||||
Twc.at<float>(1,0),Twc.at<float>(1,1),Twc.at<float>(1,2),Twc.at<float>(1,3),
|
||||
Twc.at<float>(2,0),Twc.at<float>(2,1),Twc.at<float>(2,2),Twc.at<float>(2,3),
|
||||
Twc.at<float>(3,0),Twc.at<float>(3,1),Twc.at<float>(3,2),Twc.at<float>(3,3));
|
||||
|
||||
this->Ow_ = cv::Matx31f(Ow.at<float>(0),Ow.at<float>(1),Ow.at<float>(2));
|
||||
}
|
||||
|
||||
void KeyFrame::SetVelocity(const cv::Mat &Vw_)
|
||||
|
@ -248,9 +267,12 @@ void KeyFrame::UpdateBestCovisibles()
|
|||
list<int> lWs; // weight
|
||||
for(size_t i=0, iend=vPairs.size(); i<iend;i++)
|
||||
{
|
||||
// push_front 后变成从大到小
|
||||
lKFs.push_front(vPairs[i].second);
|
||||
lWs.push_front(vPairs[i].first);
|
||||
if(!vPairs[i].second->isBad())
|
||||
{
|
||||
// push_front 后变成从大到小
|
||||
lKFs.push_front(vPairs[i].second);
|
||||
lWs.push_front(vPairs[i].first);
|
||||
}
|
||||
}
|
||||
|
||||
// 权重从大到小
|
||||
|
@ -739,11 +761,6 @@ void KeyFrame::SetBadFlag()
|
|||
mbToBeErased = true;
|
||||
return;
|
||||
}
|
||||
if(!mpParent)
|
||||
{
|
||||
//cout << "KF.BADFLAG-> There is not parent, but it is not the first KF in the map" << endl;
|
||||
//cout << "KF.BADFLAG-> KF: " << mnId << "; first KF: " << mpMap->GetInitKFid() << endl;
|
||||
}
|
||||
}
|
||||
|
||||
// Step 2 遍历所有和当前关键帧共视的关键帧,删除他们与当前关键帧的联系
|
||||
|
@ -830,12 +847,6 @@ void KeyFrame::SetBadFlag()
|
|||
// 回答:不需要!这里while循环还没退出,会使用更新的sParentCandidates
|
||||
if(bContinue)
|
||||
{
|
||||
if(pC->mnId == pP->mnId)
|
||||
{
|
||||
/*cout << "ERROR: The parent and son can't be the same KF. ID: " << pC->mnId << endl;
|
||||
cout << "Current KF: " << mnId << endl;
|
||||
cout << "Parent of the map: " << endl;*/
|
||||
}
|
||||
// 因为父节点死了,并且子节点找到了新的父节点,就把它更新为自己的父节点
|
||||
pC->ChangeParent(pP);
|
||||
// 因为子节点找到了新的父节点并更新了父节点,那么该子节点升级,作为其它子节点的备选父节点
|
||||
|
@ -865,10 +876,6 @@ void KeyFrame::SetBadFlag()
|
|||
// 如果当前的关键帧要被删除的话就要计算这个,表示原父关键帧到当前关键帧的位姿变换
|
||||
// 注意在这个删除的过程中,其实并没有将当前关键帧中存储的父关键帧的指针删除掉
|
||||
mTcp = Tcw*mpParent->GetPoseInverse();
|
||||
}
|
||||
else
|
||||
{
|
||||
//cout << "Error: KF haven't got a parent, it is imposible reach this code point without him" << endl;
|
||||
}
|
||||
// 标记当前关键帧已经死了
|
||||
mbBad = true;
|
||||
|
@ -1062,171 +1069,6 @@ void KeyFrame::UpdateMap(Map* pMap)
|
|||
mpMap = pMap;
|
||||
}
|
||||
|
||||
void KeyFrame::PreSave(set<KeyFrame*>& spKF,set<MapPoint*>& spMP, set<GeometricCamera*>& spCam)
|
||||
{
|
||||
// Save the id of each MapPoint in this KF, there can be null pointer in the vector
|
||||
mvBackupMapPointsId.clear();
|
||||
mvBackupMapPointsId.reserve(N);
|
||||
for(int i = 0; i < N; ++i)
|
||||
{
|
||||
|
||||
if(mvpMapPoints[i] && spMP.find(mvpMapPoints[i]) != spMP.end()) // Checks if the element is not null
|
||||
mvBackupMapPointsId.push_back(mvpMapPoints[i]->mnId);
|
||||
else // If the element is null his value is -1 because all the id are positives
|
||||
mvBackupMapPointsId.push_back(-1);
|
||||
}
|
||||
//cout << "KeyFrame: ID from MapPoints stored" << endl;
|
||||
// Save the id of each connected KF with it weight
|
||||
mBackupConnectedKeyFrameIdWeights.clear();
|
||||
for(std::map<KeyFrame*,int>::const_iterator it = mConnectedKeyFrameWeights.begin(), end = mConnectedKeyFrameWeights.end(); it != end; ++it)
|
||||
{
|
||||
if(spKF.find(it->first) != spKF.end())
|
||||
mBackupConnectedKeyFrameIdWeights[it->first->mnId] = it->second;
|
||||
}
|
||||
//cout << "KeyFrame: ID from connected KFs stored" << endl;
|
||||
// Save the parent id
|
||||
mBackupParentId = -1;
|
||||
if(mpParent && spKF.find(mpParent) != spKF.end())
|
||||
mBackupParentId = mpParent->mnId;
|
||||
//cout << "KeyFrame: ID from Parent KF stored" << endl;
|
||||
// Save the id of the childrens KF
|
||||
mvBackupChildrensId.clear();
|
||||
mvBackupChildrensId.reserve(mspChildrens.size());
|
||||
for(KeyFrame* pKFi : mspChildrens)
|
||||
{
|
||||
if(spKF.find(pKFi) != spKF.end())
|
||||
mvBackupChildrensId.push_back(pKFi->mnId);
|
||||
}
|
||||
//cout << "KeyFrame: ID from Children KFs stored" << endl;
|
||||
// Save the id of the loop edge KF
|
||||
mvBackupLoopEdgesId.clear();
|
||||
mvBackupLoopEdgesId.reserve(mspLoopEdges.size());
|
||||
for(KeyFrame* pKFi : mspLoopEdges)
|
||||
{
|
||||
if(spKF.find(pKFi) != spKF.end())
|
||||
mvBackupLoopEdgesId.push_back(pKFi->mnId);
|
||||
}
|
||||
//cout << "KeyFrame: ID from Loop KFs stored" << endl;
|
||||
// Save the id of the merge edge KF
|
||||
mvBackupMergeEdgesId.clear();
|
||||
mvBackupMergeEdgesId.reserve(mspMergeEdges.size());
|
||||
for(KeyFrame* pKFi : mspMergeEdges)
|
||||
{
|
||||
if(spKF.find(pKFi) != spKF.end())
|
||||
mvBackupMergeEdgesId.push_back(pKFi->mnId);
|
||||
}
|
||||
//cout << "KeyFrame: ID from Merge KFs stored" << endl;
|
||||
|
||||
//Camera data
|
||||
mnBackupIdCamera = -1;
|
||||
if(mpCamera && spCam.find(mpCamera) != spCam.end())
|
||||
mnBackupIdCamera = mpCamera->GetId();
|
||||
//cout << "KeyFrame: ID from Camera1 stored; " << mnBackupIdCamera << endl;
|
||||
|
||||
mnBackupIdCamera2 = -1;
|
||||
if(mpCamera2 && spCam.find(mpCamera2) != spCam.end())
|
||||
mnBackupIdCamera2 = mpCamera2->GetId();
|
||||
//cout << "KeyFrame: ID from Camera2 stored; " << mnBackupIdCamera2 << endl;
|
||||
|
||||
//Inertial data
|
||||
mBackupPrevKFId = -1;
|
||||
if(mPrevKF && spKF.find(mPrevKF) != spKF.end())
|
||||
mBackupPrevKFId = mPrevKF->mnId;
|
||||
//cout << "KeyFrame: ID from Prev KF stored" << endl;
|
||||
mBackupNextKFId = -1;
|
||||
if(mNextKF && spKF.find(mNextKF) != spKF.end())
|
||||
mBackupNextKFId = mNextKF->mnId;
|
||||
//cout << "KeyFrame: ID from NextKF stored" << endl;
|
||||
if(mpImuPreintegrated)
|
||||
mBackupImuPreintegrated.CopyFrom(mpImuPreintegrated);
|
||||
//cout << "KeyFrame: Imu Preintegrated stored" << endl;
|
||||
}
|
||||
|
||||
void KeyFrame::PostLoad(map<long unsigned int, KeyFrame*>& mpKFid, map<long unsigned int, MapPoint*>& mpMPid, map<unsigned int, GeometricCamera*>& mpCamId){
|
||||
// Rebuild the empty variables
|
||||
|
||||
// Pose
|
||||
SetPose(Tcw);
|
||||
|
||||
// Reference reconstruction
|
||||
// Each MapPoint sight from this KeyFrame
|
||||
mvpMapPoints.clear();
|
||||
mvpMapPoints.resize(N);
|
||||
for(int i=0; i<N; ++i)
|
||||
{
|
||||
if(mvBackupMapPointsId[i] != -1)
|
||||
mvpMapPoints[i] = mpMPid[mvBackupMapPointsId[i]];
|
||||
else
|
||||
mvpMapPoints[i] = static_cast<MapPoint*>(NULL);
|
||||
}
|
||||
|
||||
// Conected KeyFrames with him weight
|
||||
mConnectedKeyFrameWeights.clear();
|
||||
for(map<long unsigned int, int>::const_iterator it = mBackupConnectedKeyFrameIdWeights.begin(), end = mBackupConnectedKeyFrameIdWeights.end();
|
||||
it != end; ++it)
|
||||
{
|
||||
KeyFrame* pKFi = mpKFid[it->first];
|
||||
mConnectedKeyFrameWeights[pKFi] = it->second;
|
||||
}
|
||||
|
||||
// Restore parent KeyFrame
|
||||
if(mBackupParentId>=0)
|
||||
mpParent = mpKFid[mBackupParentId];
|
||||
|
||||
// KeyFrame childrens
|
||||
mspChildrens.clear();
|
||||
for(vector<long unsigned int>::const_iterator it = mvBackupChildrensId.begin(), end = mvBackupChildrensId.end(); it!=end; ++it)
|
||||
{
|
||||
mspChildrens.insert(mpKFid[*it]);
|
||||
}
|
||||
|
||||
// Loop edge KeyFrame
|
||||
mspLoopEdges.clear();
|
||||
for(vector<long unsigned int>::const_iterator it = mvBackupLoopEdgesId.begin(), end = mvBackupLoopEdgesId.end(); it != end; ++it)
|
||||
{
|
||||
mspLoopEdges.insert(mpKFid[*it]);
|
||||
}
|
||||
|
||||
// Merge edge KeyFrame
|
||||
mspMergeEdges.clear();
|
||||
for(vector<long unsigned int>::const_iterator it = mvBackupMergeEdgesId.begin(), end = mvBackupMergeEdgesId.end(); it != end; ++it)
|
||||
{
|
||||
mspMergeEdges.insert(mpKFid[*it]);
|
||||
}
|
||||
|
||||
//Camera data
|
||||
if(mnBackupIdCamera >= 0)
|
||||
{
|
||||
mpCamera = mpCamId[mnBackupIdCamera];
|
||||
}
|
||||
if(mnBackupIdCamera2 >= 0)
|
||||
{
|
||||
mpCamera2 = mpCamId[mnBackupIdCamera2];
|
||||
}
|
||||
|
||||
//Inertial data
|
||||
if(mBackupPrevKFId != -1)
|
||||
{
|
||||
mPrevKF = mpKFid[mBackupPrevKFId];
|
||||
}
|
||||
if(mBackupNextKFId != -1)
|
||||
{
|
||||
mNextKF = mpKFid[mBackupNextKFId];
|
||||
}
|
||||
mpImuPreintegrated = &mBackupImuPreintegrated;
|
||||
|
||||
|
||||
// Remove all backup container
|
||||
mvBackupMapPointsId.clear();
|
||||
mBackupConnectedKeyFrameIdWeights.clear();
|
||||
mvBackupChildrensId.clear();
|
||||
mvBackupLoopEdgesId.clear();
|
||||
|
||||
UpdateBestCovisibles();
|
||||
|
||||
//ComputeSceneMedianDepth();
|
||||
}
|
||||
|
||||
bool KeyFrame::ProjectPointDistort(MapPoint* pMP, cv::Point2f &kp, float &u, float &v)
|
||||
{
|
||||
|
||||
|
@ -1427,4 +1269,94 @@ void KeyFrame::SetKeyFrameDatabase(KeyFrameDatabase* pKFDB)
|
|||
mpKeyFrameDB = pKFDB;
|
||||
}
|
||||
|
||||
cv::Matx33f KeyFrame::GetRotation_() {
|
||||
unique_lock<mutex> lock(mMutexPose);
|
||||
return Tcw_.get_minor<3,3>(0,0);
|
||||
}
|
||||
|
||||
cv::Matx31f KeyFrame::GetTranslation_() {
|
||||
unique_lock<mutex> lock(mMutexPose);
|
||||
return Tcw_.get_minor<3,1>(0,3);
|
||||
}
|
||||
|
||||
cv::Matx31f KeyFrame::GetCameraCenter_() {
|
||||
unique_lock<mutex> lock(mMutexPose);
|
||||
return Ow_;
|
||||
}
|
||||
|
||||
cv::Matx33f KeyFrame::GetRightRotation_() {
|
||||
unique_lock<mutex> lock(mMutexPose);
|
||||
cv::Matx33f Rrl = Tlr_.get_minor<3,3>(0,0).t();
|
||||
cv::Matx33f Rlw = Tcw_.get_minor<3,3>(0,0);
|
||||
cv::Matx33f Rrw = Rrl * Rlw;
|
||||
|
||||
return Rrw;
|
||||
}
|
||||
|
||||
cv::Matx31f KeyFrame::GetRightTranslation_() {
|
||||
unique_lock<mutex> lock(mMutexPose);
|
||||
cv::Matx33f Rrl = Tlr_.get_minor<3,3>(0,0).t();
|
||||
cv::Matx31f tlw = Tcw_.get_minor<3,1>(0,3);
|
||||
cv::Matx31f trl = - Rrl * Tlr_.get_minor<3,1>(0,3);
|
||||
|
||||
cv::Matx31f trw = Rrl * tlw + trl;
|
||||
|
||||
return trw;
|
||||
}
|
||||
|
||||
cv::Matx44f KeyFrame::GetRightPose_() {
|
||||
unique_lock<mutex> lock(mMutexPose);
|
||||
|
||||
cv::Matx33f Rrl = Tlr_.get_minor<3,3>(0,0).t();
|
||||
cv::Matx33f Rlw = Tcw_.get_minor<3,3>(0,0);
|
||||
cv::Matx33f Rrw = Rrl * Rlw;
|
||||
|
||||
cv::Matx31f tlw = Tcw_.get_minor<3,1>(0,3);
|
||||
cv::Matx31f trl = - Rrl * Tlr_.get_minor<3,1>(0,3);
|
||||
|
||||
cv::Matx31f trw = Rrl * tlw + trl;
|
||||
|
||||
cv::Matx44f Trw{Rrw(0,0),Rrw(0,1),Rrw(0,2),trw(0),
|
||||
Rrw(1,0),Rrw(1,1),Rrw(1,2),trw(1),
|
||||
Rrw(2,0),Rrw(2,1),Rrw(2,2),trw(2),
|
||||
0.f,0.f,0.f,1.f};
|
||||
|
||||
return Trw;
|
||||
}
|
||||
|
||||
cv::Matx31f KeyFrame::GetRightCameraCenter_() {
|
||||
unique_lock<mutex> lock(mMutexPose);
|
||||
cv::Matx33f Rwl = Tcw_.get_minor<3,3>(0,0).t();
|
||||
cv::Matx31f tlr = Tlr_.get_minor<3,1>(0,3);
|
||||
|
||||
cv::Matx31f twr = Rwl * tlr + Ow_;
|
||||
|
||||
return twr;
|
||||
}
|
||||
|
||||
cv::Matx31f KeyFrame::UnprojectStereo_(int i) {
|
||||
const float z = mvDepth[i];
|
||||
if(z>0)
|
||||
{
|
||||
const float u = mvKeys[i].pt.x;
|
||||
const float v = mvKeys[i].pt.y;
|
||||
const float x = (u-cx)*z*invfx;
|
||||
const float y = (v-cy)*z*invfy;
|
||||
cv::Matx31f x3Dc(x,y,z);
|
||||
|
||||
unique_lock<mutex> lock(mMutexPose);
|
||||
return Twc_.get_minor<3,3>(0,0) * x3Dc + Twc_.get_minor<3,1>(0,3);
|
||||
}
|
||||
else
|
||||
return cv::Matx31f::zeros();
|
||||
}
|
||||
|
||||
cv::Matx44f KeyFrame::GetPose_()
|
||||
{
|
||||
unique_lock<mutex> lock(mMutexPose);
|
||||
return Tcw_;
|
||||
}
|
||||
|
||||
|
||||
|
||||
} //namespace ORB_SLAM
|
||||
|
|
|
@ -934,36 +934,6 @@ vector<KeyFrame*> KeyFrameDatabase::DetectRelocalizationCandidates(Frame *F, Map
|
|||
return vpRelocCandidates;
|
||||
}
|
||||
|
||||
void KeyFrameDatabase::PreSave()
|
||||
{
|
||||
//Save the information about the inverted index of KF to node
|
||||
mvBackupInvertedFileId.resize(mvInvertedFile.size());
|
||||
for(int i = 0, numEl = mvInvertedFile.size(); i < numEl; ++i)
|
||||
{
|
||||
for(std::list<KeyFrame*>::const_iterator it = mvInvertedFile[i].begin(), end = mvInvertedFile[i].end(); it != end; ++it)
|
||||
{
|
||||
mvBackupInvertedFileId[i].push_back((*it)->mnId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void KeyFrameDatabase::PostLoad(map<long unsigned int, KeyFrame*> mpKFid)
|
||||
{
|
||||
mvInvertedFile.clear();
|
||||
mvInvertedFile.resize(mpVoc->size());
|
||||
for(unsigned int i = 0; i < mvBackupInvertedFileId.size(); ++i)
|
||||
{
|
||||
for(long unsigned int KFid : mvBackupInvertedFileId[i])
|
||||
{
|
||||
if(mpKFid.find(KFid) != mpKFid.end())
|
||||
{
|
||||
mvInvertedFile[i].push_back(mpKFid[KFid]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void KeyFrameDatabase::SetORBVocabulary(ORBVocabulary* pORBVoc)
|
||||
{
|
||||
ORBVocabulary** ptr;
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
#include "ORBmatcher.h"
|
||||
#include "Optimizer.h"
|
||||
#include "Converter.h"
|
||||
#include "Config.h"
|
||||
|
||||
#include<mutex>
|
||||
#include<chrono>
|
||||
|
@ -32,7 +33,7 @@ namespace ORB_SLAM3
|
|||
LocalMapping::LocalMapping(System* pSys, Atlas *pAtlas, const float bMonocular, bool bInertial, const string &_strSeqName):
|
||||
mpSystem(pSys), mbMonocular(bMonocular), mbInertial(bInertial), mbResetRequested(false), mbResetRequestedActiveMap(false), mbFinishRequested(false), mbFinished(true), mpAtlas(pAtlas), bInitializing(false),
|
||||
mbAbortBA(false), mbStopped(false), mbStopRequested(false), mbNotStop(false), mbAcceptKeyFrames(true),
|
||||
mbNewInit(false), mIdxInit(0), mScale(1.0), mInitSect(0), mbNotBA1(true), mbNotBA2(true), mIdxIteration(0), infoInertial(Eigen::MatrixXd::Zero(9,9))
|
||||
mbNewInit(false), mIdxInit(0), mScale(1.0), mInitSect(0), mbNotBA1(true), mbNotBA2(true), infoInertial(Eigen::MatrixXd::Zero(9,9))
|
||||
{
|
||||
/*
|
||||
* mbStopRequested: 外部线程调用,为true,表示外部线程请求停止 local mapping
|
||||
|
@ -53,15 +54,11 @@ LocalMapping::LocalMapping(System* pSys, Atlas *pAtlas, const float bMonocular,
|
|||
mNumLM = 0;
|
||||
mNumKFCulling=0;
|
||||
|
||||
//DEBUG: times and data from LocalMapping in each frame
|
||||
#ifdef REGISTER_TIMES
|
||||
nLBA_exec = 0;
|
||||
nLBA_abort = 0;
|
||||
#endif
|
||||
|
||||
strSequence = "";//_strSeqName;
|
||||
|
||||
//f_lm.open("localMapping_times" + strSequence + ".txt");
|
||||
/*f_lm.open("localMapping_times.txt");
|
||||
|
||||
f_lm << "# Timestamp KF, Num CovKFs, Num KFs, Num RecentMPs, Num MPs, processKF, MPCulling, CreateMP, SearchNeigh, BA, KFCulling, [numFixKF_LBA]" << endl;
|
||||
f_lm << fixed;*/
|
||||
}
|
||||
|
||||
// 设置回环检测线程句柄
|
||||
|
@ -94,31 +91,34 @@ void LocalMapping::Run()
|
|||
// 等待处理的关键帧列表不为空 并且imu正常
|
||||
if(CheckNewKeyFrames() && !mbBadImu)
|
||||
{
|
||||
// std::cout << "LM" << std::endl;
|
||||
std::chrono::steady_clock::time_point t0 = std::chrono::steady_clock::now();
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
double timeLBA_ms = 0;
|
||||
double timeKFCulling_ms = 0;
|
||||
|
||||
std::chrono::steady_clock::time_point time_StartProcessKF = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
// BoW conversion and insertion in Map
|
||||
// Step 2 处理列表中的关键帧,包括计算BoW、更新观测、描述子、共视图,插入到地图等
|
||||
ProcessNewKeyFrame();
|
||||
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndProcessKF = std::chrono::steady_clock::now();
|
||||
|
||||
double timeProcessKF = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndProcessKF - time_StartProcessKF).count();
|
||||
vdKFInsert_ms.push_back(timeProcessKF);
|
||||
#endif
|
||||
// Check recent MapPoints
|
||||
// Step 3 根据地图点的观测情况剔除质量不好的地图点
|
||||
MapPointCulling();
|
||||
std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now();
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndMPCulling = std::chrono::steady_clock::now();
|
||||
|
||||
double timeMPCulling = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndMPCulling - time_EndProcessKF).count();
|
||||
vdMPCulling_ms.push_back(timeMPCulling);
|
||||
#endif
|
||||
// Triangulate new MapPoints
|
||||
// Step 4 当前关键帧与相邻关键帧通过三角化产生新的地图点,使得跟踪更稳
|
||||
CreateNewMapPoints();
|
||||
std::chrono::steady_clock::time_point t3 = std::chrono::steady_clock::now();
|
||||
|
||||
// Save here:
|
||||
// # Cov KFs
|
||||
// # tot Kfs
|
||||
// # recent added MPs
|
||||
// # tot MPs
|
||||
// # localMPs in LBA
|
||||
// # fixedKFs in LBA
|
||||
|
||||
mbAbortBA = false;
|
||||
// 已经处理完队列中的最后的一个关键帧
|
||||
|
@ -129,20 +129,19 @@ void LocalMapping::Run()
|
|||
SearchInNeighbors();
|
||||
}
|
||||
|
||||
std::chrono::steady_clock::time_point t4 = std::chrono::steady_clock::now();
|
||||
std::chrono::steady_clock::time_point t5 = t4, t6 = t4;
|
||||
// mbAbortBA = false;
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndMPCreation = std::chrono::steady_clock::now();
|
||||
|
||||
//DEBUG--
|
||||
/*f_lm << setprecision(0);
|
||||
f_lm << mpCurrentKeyFrame->mTimeStamp*1e9 << ",";
|
||||
f_lm << mpCurrentKeyFrame->GetVectorCovisibleKeyFrames().size() << ",";
|
||||
f_lm << mpCurrentKeyFrame->GetMap()->GetAllKeyFrames().size() << ",";
|
||||
f_lm << mlpRecentAddedMapPoints.size() << ",";
|
||||
f_lm << mpCurrentKeyFrame->GetMap()->GetAllMapPoints().size() << ",";*/
|
||||
//--
|
||||
double timeMPCreation = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndMPCreation - time_EndMPCulling).count();
|
||||
vdMPCreation_ms.push_back(timeMPCreation);
|
||||
#endif
|
||||
|
||||
bool b_doneLBA = false;
|
||||
int num_FixedKF_BA = 0;
|
||||
// 已经处理完队列中的最后的一个关键帧,并且闭环检测没有请求停止LocalMapping
|
||||
int num_OptKF_BA = 0;
|
||||
int num_MPs_BA = 0;
|
||||
int num_edges_BA = 0;
|
||||
// 已经处理完队列中的最后的一个关键帧,并且闭环检测没有请求停止LocalMapping
|
||||
if(!CheckNewKeyFrames() && !stopRequested())
|
||||
{
|
||||
// 地图中关键帧数目大于2个
|
||||
|
@ -172,21 +171,39 @@ void LocalMapping::Run()
|
|||
}
|
||||
// 条件---------1.1、跟踪成功的内点数目大于75-----1.2、并且是单目--或--2.1、跟踪成功的内点数目大于100-----2.2、并且不是单目
|
||||
bool bLarge = ((mpTracker->GetMatchesInliers()>75)&&mbMonocular)||((mpTracker->GetMatchesInliers()>100)&&!mbMonocular);
|
||||
// BA优化局部地图IMU
|
||||
Optimizer::LocalInertialBA(mpCurrentKeyFrame, &mbAbortBA, mpCurrentKeyFrame->GetMap(), bLarge, !mpCurrentKeyFrame->GetMap()->GetIniertialBA2());
|
||||
Optimizer::LocalInertialBA(mpCurrentKeyFrame, &mbAbortBA, mpCurrentKeyFrame->GetMap(),num_FixedKF_BA,num_OptKF_BA,num_MPs_BA,num_edges_BA, bLarge, !mpCurrentKeyFrame->GetMap()->GetIniertialBA2());
|
||||
b_doneLBA = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Step 6.2 不是IMU模式或者当前关键帧所在的地图还未完成IMU初始化
|
||||
std::chrono::steady_clock::time_point begin = std::chrono::steady_clock::now();
|
||||
// 局部地图BA,不包括IMU数据
|
||||
// Step 6.2 不是IMU模式或者当前关键帧所在的地图还未完成IMU初始化
|
||||
// 局部地图BA,不包括IMU数据
|
||||
// 注意这里的第二个参数是按地址传递的,当这里的 mbAbortBA 状态发生变化时,能够及时执行/停止BA
|
||||
Optimizer::LocalBundleAdjustment(mpCurrentKeyFrame,&mbAbortBA, mpCurrentKeyFrame->GetMap(),num_FixedKF_BA);
|
||||
std::chrono::steady_clock::time_point end = std::chrono::steady_clock::now();
|
||||
Optimizer::LocalBundleAdjustment(mpCurrentKeyFrame,&mbAbortBA, mpCurrentKeyFrame->GetMap(),num_FixedKF_BA,num_OptKF_BA,num_MPs_BA,num_edges_BA);
|
||||
b_doneLBA = true;
|
||||
}
|
||||
|
||||
}
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndLBA = std::chrono::steady_clock::now();
|
||||
|
||||
if(b_doneLBA)
|
||||
{
|
||||
timeLBA_ms = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndLBA - time_EndMPCreation).count();
|
||||
vdLBASync_ms.push_back(timeLBA_ms);
|
||||
|
||||
nLBA_exec += 1;
|
||||
if(mbAbortBA)
|
||||
{
|
||||
nLBA_abort += 1;
|
||||
}
|
||||
vnLBA_edges.push_back(num_edges_BA);
|
||||
vnLBA_KFopt.push_back(num_OptKF_BA);
|
||||
vnLBA_KFfixed.push_back(num_FixedKF_BA);
|
||||
vnLBA_MPs.push_back(num_MPs_BA);
|
||||
}
|
||||
|
||||
t5 = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
|
||||
// Initialize IMU here
|
||||
// Step 7 当前关键帧所在地图的IMU初始化
|
||||
|
@ -204,8 +221,13 @@ void LocalMapping::Run()
|
|||
// 冗余的判定:该关键帧的90%的地图点可以被其它关键帧观测到
|
||||
KeyFrameCulling();
|
||||
|
||||
t6 = std::chrono::steady_clock::now();
|
||||
// Step 9 如果累计时间差小于100s 并且 是IMU模式,进行VIBA
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndKFCulling = std::chrono::steady_clock::now();
|
||||
|
||||
timeKFCulling_ms = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndKFCulling - time_EndLBA).count();
|
||||
vdKFCullingSync_ms.push_back(timeKFCulling_ms);
|
||||
#endif
|
||||
// Step 9 如果累计时间差小于100s 并且 是IMU模式,进行VIBA
|
||||
if ((mTinit<100.0f) && mbInertial)
|
||||
{
|
||||
// Step 9.1 根据条件判断是否进行VIBA1
|
||||
|
@ -262,31 +284,21 @@ void LocalMapping::Run()
|
|||
}
|
||||
}
|
||||
|
||||
std::chrono::steady_clock::time_point t7 = std::chrono::steady_clock::now();
|
||||
#ifdef REGISTER_TIMES
|
||||
vdLBA_ms.push_back(timeLBA_ms);
|
||||
vdKFCulling_ms.push_back(timeKFCulling_ms);
|
||||
#endif
|
||||
|
||||
// Step 10 将当前帧加入到闭环检测队列中
|
||||
mpLoopCloser->InsertKeyFrame(mpCurrentKeyFrame);
|
||||
std::chrono::steady_clock::time_point t8 = std::chrono::steady_clock::now();
|
||||
|
||||
double t_procKF = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t1 - t0).count();
|
||||
double t_MPcull = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t2 - t1).count();
|
||||
double t_CheckMP = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t3 - t2).count();
|
||||
double t_searchNeigh = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t4 - t3).count();
|
||||
double t_Opt = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t5 - t4).count();
|
||||
double t_KF_cull = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t6 - t5).count();
|
||||
double t_Insert = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t8 - t7).count();
|
||||
|
||||
//DEBUG--
|
||||
/*f_lm << setprecision(6);
|
||||
f_lm << t_procKF << ",";
|
||||
f_lm << t_MPcull << ",";
|
||||
f_lm << t_CheckMP << ",";
|
||||
f_lm << t_searchNeigh << ",";
|
||||
f_lm << t_Opt << ",";
|
||||
f_lm << t_KF_cull << ",";
|
||||
f_lm << setprecision(0) << num_FixedKF_BA << "\n";*/
|
||||
//--
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndLocalMap = std::chrono::steady_clock::now();
|
||||
|
||||
double timeLocalMap = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndLocalMap - time_StartProcessKF).count();
|
||||
vdLMTotal_ms.push_back(timeLocalMap);
|
||||
#endif
|
||||
}
|
||||
else if(Stop() && !mbBadImu) // 当要终止当前线程的时候
|
||||
{
|
||||
|
@ -493,15 +505,16 @@ void LocalMapping::CreateNewMapPoints()
|
|||
ORBmatcher matcher(th,false);
|
||||
|
||||
// 取出当前帧从世界坐标系到相机坐标系的变换矩阵
|
||||
cv::Mat Rcw1 = mpCurrentKeyFrame->GetRotation();
|
||||
cv::Mat Rwc1 = Rcw1.t();
|
||||
cv::Mat tcw1 = mpCurrentKeyFrame->GetTranslation();
|
||||
cv::Mat Tcw1(3,4,CV_32F);
|
||||
Rcw1.copyTo(Tcw1.colRange(0,3));
|
||||
tcw1.copyTo(Tcw1.col(3));
|
||||
auto Rcw1 = mpCurrentKeyFrame->GetRotation_();
|
||||
auto Rwc1 = Rcw1.t();
|
||||
auto tcw1 = mpCurrentKeyFrame->GetTranslation_();
|
||||
cv::Matx44f Tcw1{Rcw1(0,0),Rcw1(0,1),Rcw1(0,2),tcw1(0),
|
||||
Rcw1(1,0),Rcw1(1,1),Rcw1(1,2),tcw1(1),
|
||||
Rcw1(2,0),Rcw1(2,1),Rcw1(2,2),tcw1(2),
|
||||
0.f,0.f,0.f,1.f};
|
||||
|
||||
// 得到当前关键帧(左目)光心在世界坐标系中的坐标、内参
|
||||
cv::Mat Ow1 = mpCurrentKeyFrame->GetCameraCenter();
|
||||
auto Ow1 = mpCurrentKeyFrame->GetCameraCenter_();
|
||||
|
||||
const float &fx1 = mpCurrentKeyFrame->fx;
|
||||
const float &fy1 = mpCurrentKeyFrame->fy;
|
||||
|
@ -527,9 +540,9 @@ void LocalMapping::CreateNewMapPoints()
|
|||
|
||||
// Check first that baseline is not too short
|
||||
// 邻接的关键帧光心在世界坐标系中的坐标
|
||||
cv::Mat Ow2 = pKF2->GetCameraCenter();
|
||||
auto Ow2 = pKF2->GetCameraCenter_();
|
||||
// 基线向量,两个关键帧间的相机位移
|
||||
cv::Mat vBaseline = Ow2-Ow1;
|
||||
auto vBaseline = Ow2-Ow1;
|
||||
// 基线长度
|
||||
const float baseline = cv::norm(vBaseline);
|
||||
|
||||
|
@ -555,7 +568,7 @@ void LocalMapping::CreateNewMapPoints()
|
|||
|
||||
// Compute Fundamental Matrix
|
||||
// Step 4:根据两个关键帧的位姿计算它们之间的基础矩阵
|
||||
cv::Mat F12 = ComputeF12(mpCurrentKeyFrame,pKF2);
|
||||
auto F12 = ComputeF12_(mpCurrentKeyFrame,pKF2);
|
||||
|
||||
// Search matches that fullfil epipolar constraint
|
||||
// Step 5:通过BoW对两关键帧的未匹配的特征点快速匹配,用极线约束抑制离群点,生成新的匹配点对
|
||||
|
@ -563,14 +576,16 @@ void LocalMapping::CreateNewMapPoints()
|
|||
bool bCoarse = mbInertial &&
|
||||
((!mpCurrentKeyFrame->GetMap()->GetIniertialBA2() && mpCurrentKeyFrame->GetMap()->GetIniertialBA1())||
|
||||
mpTracker->mState==Tracking::RECENTLY_LOST);
|
||||
matcher.SearchForTriangulation(mpCurrentKeyFrame,pKF2,F12,vMatchedIndices,false,bCoarse);
|
||||
|
||||
cv::Mat Rcw2 = pKF2->GetRotation();
|
||||
cv::Mat Rwc2 = Rcw2.t();
|
||||
cv::Mat tcw2 = pKF2->GetTranslation();
|
||||
cv::Mat Tcw2(3,4,CV_32F);
|
||||
Rcw2.copyTo(Tcw2.colRange(0,3));
|
||||
tcw2.copyTo(Tcw2.col(3));
|
||||
matcher.SearchForTriangulation_(mpCurrentKeyFrame,pKF2,F12,vMatchedIndices,false,bCoarse);
|
||||
|
||||
auto Rcw2 = pKF2->GetRotation_();
|
||||
auto Rwc2 = Rcw2.t();
|
||||
auto tcw2 = pKF2->GetTranslation_();
|
||||
cv::Matx44f Tcw2{Rcw2(0,0),Rcw2(0,1),Rcw2(0,2),tcw2(0),
|
||||
Rcw2(1,0),Rcw2(1,1),Rcw2(1,2),tcw2(1),
|
||||
Rcw2(2,0),Rcw2(2,1),Rcw2(2,2),tcw2(2),
|
||||
0.f,0.f,0.f,1.f};
|
||||
|
||||
const float &fx2 = pKF2->fx;
|
||||
const float &fy2 = pKF2->fy;
|
||||
|
@ -612,65 +627,65 @@ void LocalMapping::CreateNewMapPoints()
|
|||
|
||||
if(mpCurrentKeyFrame->mpCamera2 && pKF2->mpCamera2){
|
||||
if(bRight1 && bRight2){
|
||||
Rcw1 = mpCurrentKeyFrame->GetRightRotation();
|
||||
Rcw1 = mpCurrentKeyFrame->GetRightRotation_();
|
||||
Rwc1 = Rcw1.t();
|
||||
tcw1 = mpCurrentKeyFrame->GetRightTranslation();
|
||||
Tcw1 = mpCurrentKeyFrame->GetRightPose();
|
||||
Ow1 = mpCurrentKeyFrame->GetRightCameraCenter();
|
||||
tcw1 = mpCurrentKeyFrame->GetRightTranslation_();
|
||||
Tcw1 = mpCurrentKeyFrame->GetRightPose_();
|
||||
Ow1 = mpCurrentKeyFrame->GetRightCameraCenter_();
|
||||
|
||||
Rcw2 = pKF2->GetRightRotation();
|
||||
Rcw2 = pKF2->GetRightRotation_();
|
||||
Rwc2 = Rcw2.t();
|
||||
tcw2 = pKF2->GetRightTranslation();
|
||||
Tcw2 = pKF2->GetRightPose();
|
||||
Ow2 = pKF2->GetRightCameraCenter();
|
||||
tcw2 = pKF2->GetRightTranslation_();
|
||||
Tcw2 = pKF2->GetRightPose_();
|
||||
Ow2 = pKF2->GetRightCameraCenter_();
|
||||
|
||||
pCamera1 = mpCurrentKeyFrame->mpCamera2;
|
||||
pCamera2 = pKF2->mpCamera2;
|
||||
}
|
||||
else if(bRight1 && !bRight2){
|
||||
Rcw1 = mpCurrentKeyFrame->GetRightRotation();
|
||||
Rcw1 = mpCurrentKeyFrame->GetRightRotation_();
|
||||
Rwc1 = Rcw1.t();
|
||||
tcw1 = mpCurrentKeyFrame->GetRightTranslation();
|
||||
Tcw1 = mpCurrentKeyFrame->GetRightPose();
|
||||
Ow1 = mpCurrentKeyFrame->GetRightCameraCenter();
|
||||
tcw1 = mpCurrentKeyFrame->GetRightTranslation_();
|
||||
Tcw1 = mpCurrentKeyFrame->GetRightPose_();
|
||||
Ow1 = mpCurrentKeyFrame->GetRightCameraCenter_();
|
||||
|
||||
Rcw2 = pKF2->GetRotation();
|
||||
Rcw2 = pKF2->GetRotation_();
|
||||
Rwc2 = Rcw2.t();
|
||||
tcw2 = pKF2->GetTranslation();
|
||||
Tcw2 = pKF2->GetPose();
|
||||
Ow2 = pKF2->GetCameraCenter();
|
||||
tcw2 = pKF2->GetTranslation_();
|
||||
Tcw2 = pKF2->GetPose_();
|
||||
Ow2 = pKF2->GetCameraCenter_();
|
||||
|
||||
pCamera1 = mpCurrentKeyFrame->mpCamera2;
|
||||
pCamera2 = pKF2->mpCamera;
|
||||
}
|
||||
else if(!bRight1 && bRight2){
|
||||
Rcw1 = mpCurrentKeyFrame->GetRotation();
|
||||
Rcw1 = mpCurrentKeyFrame->GetRotation_();
|
||||
Rwc1 = Rcw1.t();
|
||||
tcw1 = mpCurrentKeyFrame->GetTranslation();
|
||||
Tcw1 = mpCurrentKeyFrame->GetPose();
|
||||
Ow1 = mpCurrentKeyFrame->GetCameraCenter();
|
||||
tcw1 = mpCurrentKeyFrame->GetTranslation_();
|
||||
Tcw1 = mpCurrentKeyFrame->GetPose_();
|
||||
Ow1 = mpCurrentKeyFrame->GetCameraCenter_();
|
||||
|
||||
Rcw2 = pKF2->GetRightRotation();
|
||||
Rcw2 = pKF2->GetRightRotation_();
|
||||
Rwc2 = Rcw2.t();
|
||||
tcw2 = pKF2->GetRightTranslation();
|
||||
Tcw2 = pKF2->GetRightPose();
|
||||
Ow2 = pKF2->GetRightCameraCenter();
|
||||
tcw2 = pKF2->GetRightTranslation_();
|
||||
Tcw2 = pKF2->GetRightPose_();
|
||||
Ow2 = pKF2->GetRightCameraCenter_();
|
||||
|
||||
pCamera1 = mpCurrentKeyFrame->mpCamera;
|
||||
pCamera2 = pKF2->mpCamera2;
|
||||
}
|
||||
else{
|
||||
Rcw1 = mpCurrentKeyFrame->GetRotation();
|
||||
Rcw1 = mpCurrentKeyFrame->GetRotation_();
|
||||
Rwc1 = Rcw1.t();
|
||||
tcw1 = mpCurrentKeyFrame->GetTranslation();
|
||||
Tcw1 = mpCurrentKeyFrame->GetPose();
|
||||
Ow1 = mpCurrentKeyFrame->GetCameraCenter();
|
||||
tcw1 = mpCurrentKeyFrame->GetTranslation_();
|
||||
Tcw1 = mpCurrentKeyFrame->GetPose_();
|
||||
Ow1 = mpCurrentKeyFrame->GetCameraCenter_();
|
||||
|
||||
Rcw2 = pKF2->GetRotation();
|
||||
Rcw2 = pKF2->GetRotation_();
|
||||
Rwc2 = Rcw2.t();
|
||||
tcw2 = pKF2->GetTranslation();
|
||||
Tcw2 = pKF2->GetPose();
|
||||
Ow2 = pKF2->GetCameraCenter();
|
||||
tcw2 = pKF2->GetTranslation_();
|
||||
Tcw2 = pKF2->GetPose_();
|
||||
Ow2 = pKF2->GetCameraCenter_();
|
||||
|
||||
pCamera1 = mpCurrentKeyFrame->mpCamera;
|
||||
pCamera2 = pKF2->mpCamera;
|
||||
|
@ -680,11 +695,11 @@ void LocalMapping::CreateNewMapPoints()
|
|||
// Check parallax between rays
|
||||
// Step 6.2:利用匹配点反投影得到视差角
|
||||
// 特征点反投影,其实得到的是在各自相机坐标系下的一个非归一化的方向向量,和这个点的反投影射线重合
|
||||
cv::Mat xn1 = pCamera1->unprojectMat(kp1.pt);
|
||||
cv::Mat xn2 = pCamera2->unprojectMat(kp2.pt);
|
||||
auto xn1 = pCamera1->unprojectMat_(kp1.pt);
|
||||
auto xn2 = pCamera2->unprojectMat_(kp2.pt);
|
||||
// 由相机坐标系转到世界坐标系(得到的是那条反投影射线的一个同向向量在世界坐标系下的表示,还是只能够表示方向),得到视差角余弦值
|
||||
cv::Mat ray1 = Rwc1*xn1;
|
||||
cv::Mat ray2 = Rwc2*xn2;
|
||||
auto ray1 = Rwc1*xn1;
|
||||
auto ray2 = Rwc2*xn2;
|
||||
// 这个就是求向量之间角度公式
|
||||
const float cosParallaxRays = ray1.dot(ray2)/(cv::norm(ray1)*cv::norm(ray2));
|
||||
|
||||
|
@ -706,68 +721,70 @@ void LocalMapping::CreateNewMapPoints()
|
|||
|
||||
// 得到双目观测的视差角
|
||||
cosParallaxStereo = min(cosParallaxStereo1,cosParallaxStereo2);
|
||||
|
||||
// Step 6.4:三角化恢复3D点
|
||||
cv::Mat x3D;
|
||||
// cosParallaxRays>0 && (bStereo1 || bStereo2 || cosParallaxRays<0.9998)表明视差角正常,0.9998 对应1°
|
||||
// cosParallaxRays < cosParallaxStereo 表明视差角很小
|
||||
// ?视差角度小时用三角法恢复3D点,视差角大时用双目恢复3D点(双目以及深度有效)
|
||||
// 参考:https://github.com/raulmur/ORB_SLAM2/issues/345
|
||||
// Step 6.4:三角化恢复3D点
|
||||
cv::Matx31f x3D;
|
||||
bool bEstimated = false;
|
||||
if(cosParallaxRays<cosParallaxStereo && cosParallaxRays>0 && (bStereo1 || bStereo2 ||
|
||||
(cosParallaxRays<0.9998 && mbInertial) || (cosParallaxRays<0.9998 && !mbInertial)))
|
||||
{
|
||||
// Linear Triangulation Method
|
||||
// 见Initializer.cc的 Triangulate 函数,实现是一样的,顶多就是把投影矩阵换成了变换矩阵
|
||||
cv::Mat A(4,4,CV_32F);
|
||||
A.row(0) = xn1.at<float>(0)*Tcw1.row(2)-Tcw1.row(0);
|
||||
A.row(1) = xn1.at<float>(1)*Tcw1.row(2)-Tcw1.row(1);
|
||||
A.row(2) = xn2.at<float>(0)*Tcw2.row(2)-Tcw2.row(0);
|
||||
A.row(3) = xn2.at<float>(1)*Tcw2.row(2)-Tcw2.row(1);
|
||||
cv::Matx14f A_r0 = xn1(0) * Tcw1.row(2) - Tcw1.row(0);
|
||||
cv::Matx14f A_r1 = xn1(1) * Tcw1.row(2) - Tcw1.row(1);
|
||||
cv::Matx14f A_r2 = xn2(0) * Tcw2.row(2) - Tcw2.row(0);
|
||||
cv::Matx14f A_r3 = xn2(1) * Tcw2.row(2) - Tcw2.row(1);
|
||||
cv::Matx44f A{A_r0(0), A_r0(1), A_r0(2), A_r0(3),
|
||||
A_r1(0), A_r1(1), A_r1(2), A_r1(3),
|
||||
A_r2(0), A_r2(1), A_r2(2), A_r2(3),
|
||||
A_r3(0), A_r3(1), A_r3(2), A_r3(3)};
|
||||
|
||||
cv::Mat w,u,vt;
|
||||
cv::Matx44f u,vt;
|
||||
cv::Matx41f w;
|
||||
cv::SVD::compute(A,w,u,vt,cv::SVD::MODIFY_A| cv::SVD::FULL_UV);
|
||||
|
||||
x3D = vt.row(3).t();
|
||||
// 归一化之前的检查
|
||||
if(x3D.at<float>(3)==0)
|
||||
cv::Matx41f x3D_h = vt.row(3).t();
|
||||
|
||||
if(x3D_h(3)==0)
|
||||
continue;
|
||||
// 归一化成为齐次坐标,然后提取前面三个维度作为欧式坐标
|
||||
// Euclidean coordinates
|
||||
x3D = x3D.rowRange(0,3)/x3D.at<float>(3);
|
||||
x3D = x3D_h.get_minor<3,1>(0,0) / x3D_h(3);
|
||||
bEstimated = true;
|
||||
|
||||
}
|
||||
else if(bStereo1 && cosParallaxStereo1<cosParallaxStereo2)
|
||||
{
|
||||
// 如果是双目,用视差角更大的那个双目信息来恢复,直接用已知3D点反投影了
|
||||
x3D = mpCurrentKeyFrame->UnprojectStereo(idx1);
|
||||
x3D = mpCurrentKeyFrame->UnprojectStereo_(idx1);
|
||||
bEstimated = true;
|
||||
}
|
||||
else if(bStereo2 && cosParallaxStereo2<cosParallaxStereo1)
|
||||
{
|
||||
x3D = pKF2->UnprojectStereo(idx2);
|
||||
x3D = pKF2->UnprojectStereo_(idx2);
|
||||
bEstimated = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
continue; //No stereo and very low parallax
|
||||
}
|
||||
// 为方便后续计算,转换成为了行向量
|
||||
cv::Mat x3Dt = x3D.t();
|
||||
cv::Matx13f x3Dt = x3D.t();
|
||||
|
||||
if(x3Dt.empty()) continue;
|
||||
if(!bEstimated) continue;
|
||||
//Check triangulation in front of cameras
|
||||
// Step 6.5:检测生成的3D点是否在相机前方,不在的话就放弃这个点
|
||||
float z1 = Rcw1.row(2).dot(x3Dt)+tcw1.at<float>(2);
|
||||
float z1 = Rcw1.row(2).dot(x3Dt)+tcw1(2);
|
||||
if(z1<=0)
|
||||
continue;
|
||||
|
||||
float z2 = Rcw2.row(2).dot(x3Dt)+tcw2.at<float>(2);
|
||||
float z2 = Rcw2.row(2).dot(x3Dt)+tcw2(2);
|
||||
if(z2<=0)
|
||||
continue;
|
||||
|
||||
//Check reprojection error in first keyframe
|
||||
// Step 6.6:计算3D点在当前关键帧下的重投影误差
|
||||
const float &sigmaSquare1 = mpCurrentKeyFrame->mvLevelSigma2[kp1.octave];
|
||||
const float x1 = Rcw1.row(0).dot(x3Dt)+tcw1.at<float>(0);
|
||||
const float y1 = Rcw1.row(1).dot(x3Dt)+tcw1.at<float>(1);
|
||||
const float x1 = Rcw1.row(0).dot(x3Dt)+tcw1(0);
|
||||
const float y1 = Rcw1.row(1).dot(x3Dt)+tcw1(1);
|
||||
const float invz1 = 1.0/z1;
|
||||
|
||||
if(!bStereo1)
|
||||
|
@ -799,8 +816,8 @@ void LocalMapping::CreateNewMapPoints()
|
|||
//Check reprojection error in second keyframe
|
||||
// 计算3D点在另一个关键帧下的重投影误差,操作同上
|
||||
const float sigmaSquare2 = pKF2->mvLevelSigma2[kp2.octave];
|
||||
const float x2 = Rcw2.row(0).dot(x3Dt)+tcw2.at<float>(0);
|
||||
const float y2 = Rcw2.row(1).dot(x3Dt)+tcw2.at<float>(1);
|
||||
const float x2 = Rcw2.row(0).dot(x3Dt)+tcw2(0);
|
||||
const float y2 = Rcw2.row(1).dot(x3Dt)+tcw2(1);
|
||||
const float invz2 = 1.0/z2;
|
||||
if(!bStereo2)
|
||||
{
|
||||
|
@ -826,10 +843,10 @@ void LocalMapping::CreateNewMapPoints()
|
|||
// Step 6.7:检查尺度连续性
|
||||
|
||||
// 世界坐标系下,3D点与相机间的向量,方向由相机指向3D点
|
||||
cv::Mat normal1 = x3D-Ow1;
|
||||
auto normal1 = x3D-Ow1;
|
||||
float dist1 = cv::norm(normal1);
|
||||
|
||||
cv::Mat normal2 = x3D-Ow2;
|
||||
auto normal2 = x3D-Ow2;
|
||||
float dist2 = cv::norm(normal2);
|
||||
|
||||
if(dist1==0 || dist2==0)
|
||||
|
@ -847,8 +864,9 @@ void LocalMapping::CreateNewMapPoints()
|
|||
continue;
|
||||
|
||||
// Triangulation is succesfull
|
||||
// Step 6.8:三角化生成3D点成功,构造成MapPoint
|
||||
MapPoint* pMP = new MapPoint(x3D,mpCurrentKeyFrame,mpAtlas->GetCurrentMap());
|
||||
// Step 6.8:三角化生成3D点成功,构造成MapPoint
|
||||
cv::Mat x3D_(x3D);
|
||||
MapPoint* pMP = new MapPoint(x3D_,mpCurrentKeyFrame,mpAtlas->GetCurrentMap());
|
||||
|
||||
// Step 6.9:为该MapPoint添加属性:
|
||||
// a.观测到该MapPoint的关键帧
|
||||
|
@ -1044,8 +1062,26 @@ cv::Mat LocalMapping::ComputeF12(KeyFrame *&pKF1, KeyFrame *&pKF2)
|
|||
const cv::Mat &K1 = pKF1->mpCamera->toK();
|
||||
const cv::Mat &K2 = pKF2->mpCamera->toK();
|
||||
|
||||
// Essential Matrix: t12叉乘R12
|
||||
// Fundamental Matrix: inv(K1)*E*inv(K2)
|
||||
|
||||
return K1.t().inv()*t12x*R12*K2.inv();
|
||||
}
|
||||
|
||||
cv::Matx33f LocalMapping::ComputeF12_(KeyFrame *&pKF1, KeyFrame *&pKF2)
|
||||
{
|
||||
auto R1w = pKF1->GetRotation_();
|
||||
auto t1w = pKF1->GetTranslation_();
|
||||
auto R2w = pKF2->GetRotation_();
|
||||
auto t2w = pKF2->GetTranslation_();
|
||||
|
||||
auto R12 = R1w*R2w.t();
|
||||
auto t12 = -R1w*R2w.t()*t2w+t1w;
|
||||
|
||||
auto t12x = SkewSymmetricMatrix_(t12);
|
||||
|
||||
const auto &K1 = pKF1->mpCamera->toK_();
|
||||
const auto &K2 = pKF2->mpCamera->toK_();
|
||||
|
||||
|
||||
return K1.t().inv()*t12x*R12*K2.inv();
|
||||
}
|
||||
|
||||
|
@ -1322,7 +1358,15 @@ cv::Mat LocalMapping::SkewSymmetricMatrix(const cv::Mat &v)
|
|||
-v.at<float>(1), v.at<float>(0), 0);
|
||||
}
|
||||
|
||||
// 请求当前线程复位,由外部线程调用,堵塞的
|
||||
cv::Matx33f LocalMapping::SkewSymmetricMatrix_(const cv::Matx31f &v)
|
||||
{
|
||||
cv::Matx33f skew{0.f, -v(2), v(1),
|
||||
v(2), 0.f, -v(0),
|
||||
-v(1), v(0), 0.f};
|
||||
|
||||
return skew;
|
||||
}
|
||||
|
||||
void LocalMapping::RequestReset()
|
||||
{
|
||||
{
|
||||
|
|
|
@ -73,7 +73,25 @@ void LoopClosing::Run()
|
|||
mpLastCurrentKF->mvpLoopCandKFs.clear();
|
||||
mpLastCurrentKF->mvpMergeCandKFs.clear();
|
||||
}
|
||||
if(NewDetectCommonRegions())
|
||||
#ifdef REGISTER_TIMES
|
||||
timeDetectBoW = 0;
|
||||
std::chrono::steady_clock::time_point time_StartDetectBoW = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
bool bDetected = NewDetectCommonRegions();
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndDetectBoW = std::chrono::steady_clock::now();
|
||||
double timeDetect = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndDetectBoW - time_StartDetectBoW).count();
|
||||
double timeDetectSE3 = timeDetect - timeDetectBoW;
|
||||
|
||||
if(timeDetectBoW > 0)
|
||||
{
|
||||
vTimeBoW_ms.push_back(timeDetectBoW);
|
||||
}
|
||||
vTimeSE3_ms.push_back(timeDetectSE3);
|
||||
vTimePRTotal_ms.push_back(timeDetect);
|
||||
#endif
|
||||
|
||||
if(bDetected)
|
||||
{
|
||||
if(mbMergeDetected)
|
||||
{
|
||||
|
@ -119,21 +137,22 @@ void LoopClosing::Run()
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
// cout << "tw2w1: " << mSold_new.translation() << endl;
|
||||
// cout << "Rw2w1: " << mSold_new.rotation().toRotationMatrix() << endl;
|
||||
// cout << "Angle Rw2w1: " << 180*LogSO3(mSold_new.rotation().toRotationMatrix())/3.14 << endl;
|
||||
// cout << "scale w2w1: " << mSold_new.scale() << endl;
|
||||
|
||||
mg2oMergeSmw = gSmw2 * gSw2c * gScw1;
|
||||
|
||||
mg2oMergeScw = mg2oMergeSlw;
|
||||
|
||||
// TODO UNCOMMENT
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartMerge = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
if (mpTracker->mSensor==System::IMU_MONOCULAR ||mpTracker->mSensor==System::IMU_STEREO)
|
||||
MergeLocal2();
|
||||
else
|
||||
MergeLocal();
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndMerge = std::chrono::steady_clock::now();
|
||||
double timeMerge = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndMerge - time_StartMerge).count();
|
||||
vTimeMergeTotal_ms.push_back(timeMerge);
|
||||
#endif
|
||||
}
|
||||
|
||||
vdPR_CurrentTime.push_back(mpCurrentKF->mTimeStamp);
|
||||
|
@ -201,7 +220,16 @@ void LoopClosing::Run()
|
|||
}
|
||||
|
||||
mvpLoopMapPoints = mvpLoopMPs;//*mvvpLoopMapPoints[nCurrentIndex];
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartLoop = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
CorrectLoop();
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndLoop = std::chrono::steady_clock::now();
|
||||
double timeLoop = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndLoop - time_StartLoop).count();
|
||||
vTimeLoopTotal_ms.push_back(timeLoop);
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -211,7 +239,16 @@ void LoopClosing::Run()
|
|||
else
|
||||
{
|
||||
mvpLoopMapPoints = mvpLoopMPs;
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartLoop = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
CorrectLoop();
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndLoop = std::chrono::steady_clock::now();
|
||||
double timeLoop = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndLoop - time_StartLoop).count();
|
||||
vTimeLoopTotal_ms.push_back(timeLoop);
|
||||
#endif
|
||||
}
|
||||
|
||||
// Reset all variables
|
||||
|
@ -475,8 +512,15 @@ bool LoopClosing::NewDetectCommonRegions()
|
|||
//cout << "LC: bMergeDetectedInKF: " << bMergeDetectedInKF << " bLoopDetectedInKF: " << bLoopDetectedInKF << endl;
|
||||
if(!bMergeDetectedInKF || !bLoopDetectedInKF)
|
||||
{
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartDetectBoW = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
// Search in BoW
|
||||
mpKeyFrameDB->DetectNBestCandidates(mpCurrentKF, vpLoopBowCand, vpMergeBowCand,3);
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndDetectBoW = std::chrono::steady_clock::now();
|
||||
timeDetectBoW = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndDetectBoW - time_StartDetectBoW).count();
|
||||
#endif
|
||||
}
|
||||
|
||||
// Check the BoW candidates if the geometric candidate list is empty
|
||||
|
@ -520,7 +564,6 @@ bool LoopClosing::DetectAndReffineSim3FromLastKF(KeyFrame* pCurrentKF, KeyFrame*
|
|||
{
|
||||
set<MapPoint*> spAlreadyMatchedMPs;
|
||||
nNumProjMatches = FindMatchesByProjection(pCurrentKF, pMatchedKF, gScw, spAlreadyMatchedMPs, vpMPs, vpMatchedMPs);
|
||||
cout << "REFFINE-SIM3: Projection from last KF with " << nNumProjMatches << " matches" << endl;
|
||||
|
||||
|
||||
int nProjMatches = 30;
|
||||
|
@ -546,7 +589,6 @@ bool LoopClosing::DetectAndReffineSim3FromLastKF(KeyFrame* pCurrentKF, KeyFrame*
|
|||
if(mpTracker->mSensor==System::IMU_MONOCULAR && !pCurrentKF->GetMap()->GetIniertialBA2())
|
||||
bFixedScale=false;
|
||||
int numOptMatches = Optimizer::OptimizeSim3(mpCurrentKF, pMatchedKF, vpMatchedMPs, gScm, 10, bFixedScale, mHessian7x7, true);
|
||||
cout << "REFFINE-SIM3: Optimize Sim3 from last KF with " << numOptMatches << " inliers" << endl;
|
||||
|
||||
|
||||
|
||||
|
@ -744,7 +786,6 @@ bool LoopClosing::DetectCommonRegionsFromBoW(std::vector<KeyFrame*> &vpBowCand,
|
|||
vector<KeyFrame*> vpMatchedKF;
|
||||
vpMatchedKF.resize(mpCurrentKF->GetMapPointMatches().size(), static_cast<KeyFrame*>(NULL));
|
||||
int numProjMatches = matcher.SearchByProjection(mpCurrentKF, mScw, vpMapPoints, vpKeyFrames, vpMatchedMP, vpMatchedKF, 8, 1.5);
|
||||
cout <<"BoW: " << numProjMatches << " matches between " << vpMapPoints.size() << " points with coarse Sim3" << endl;
|
||||
|
||||
if(numProjMatches >= nProjMatches)
|
||||
{
|
||||
|
@ -773,44 +814,6 @@ bool LoopClosing::DetectCommonRegionsFromBoW(std::vector<KeyFrame*> &vpBowCand,
|
|||
|
||||
if(numProjOptMatches >= nProjOptMatches)
|
||||
{
|
||||
cout << "BoW: Current KF " << mpCurrentKF->mnId << "; candidate KF " << pKFi->mnId << endl;
|
||||
cout << "BoW: There are " << numProjOptMatches << " matches between them with the optimized Sim3" << endl;
|
||||
int max_x = -1, min_x = 1000000;
|
||||
int max_y = -1, min_y = 1000000;
|
||||
for(MapPoint* pMPi : vpMatchedMP)
|
||||
{
|
||||
if(!pMPi || pMPi->isBad())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
tuple<size_t,size_t> indexes = pMPi->GetIndexInKeyFrame(pKFi);
|
||||
int index = get<0>(indexes);
|
||||
if(index >= 0)
|
||||
{
|
||||
int coord_x = pKFi->mvKeysUn[index].pt.x;
|
||||
if(coord_x < min_x)
|
||||
{
|
||||
min_x = coord_x;
|
||||
}
|
||||
if(coord_x > max_x)
|
||||
{
|
||||
max_x = coord_x;
|
||||
}
|
||||
int coord_y = pKFi->mvKeysUn[index].pt.y;
|
||||
if(coord_y < min_y)
|
||||
{
|
||||
min_y = coord_y;
|
||||
}
|
||||
if(coord_y > max_y)
|
||||
{
|
||||
max_y = coord_y;
|
||||
}
|
||||
}
|
||||
}
|
||||
//cout << "BoW: Coord in X -> " << min_x << ", " << max_x << "; and Y -> " << min_y << ", " << max_y << endl;
|
||||
//cout << "BoW: features area in X -> " << (max_x - min_x) << " and Y -> " << (max_y - min_y) << endl;
|
||||
|
||||
int nNumKFs = 0;
|
||||
//vpMatchedMPs = vpMatchedMP;
|
||||
//vpMPs = vpMapPoints;
|
||||
|
@ -832,27 +835,7 @@ bool LoopClosing::DetectCommonRegionsFromBoW(std::vector<KeyFrame*> &vpBowCand,
|
|||
|
||||
if(bValid)
|
||||
{
|
||||
//cout << "BoW: KF " << pKFj->mnId << " has " << numProjMatches_j << " matches" << endl;
|
||||
cv::Mat Tc_w = mpCurrentKF->GetPose();
|
||||
cv::Mat Tw_cj = pKFj->GetPoseInverse();
|
||||
cv::Mat Tc_cj = Tc_w * Tw_cj;
|
||||
cv::Vec3d vector_dist = Tc_cj.rowRange(0, 3).col(3);
|
||||
cv::Mat Rc_cj = Tc_cj.rowRange(0, 3).colRange(0, 3);
|
||||
double dist = cv::norm(vector_dist);
|
||||
cout << "BoW: KF " << pKFi->mnId << " to KF " << pKFj->mnId << " is separated by " << dist << " meters" << endl;
|
||||
cout << "BoW: Rotation between KF -> " << Rc_cj << endl;
|
||||
vector<float> v_euler = Converter::toEuler(Rc_cj);
|
||||
v_euler[0] *= 180 /3.1415;
|
||||
v_euler[1] *= 180 /3.1415;
|
||||
v_euler[2] *= 180 /3.1415;
|
||||
cout << "BoW: Rotation in angles (x, y, z) -> (" << v_euler[0] << ", " << v_euler[1] << ", " << v_euler[2] << ")" << endl;
|
||||
nNumKFs++;
|
||||
/*if(numProjMatches_j > numProjOptMatches)
|
||||
{
|
||||
pLastCurrentKF = pKFj;
|
||||
g2oScw = gSjw;
|
||||
vpMatchedMPs = vpMatchedMPs_j;
|
||||
}*/
|
||||
}
|
||||
|
||||
j++;
|
||||
|
@ -1067,9 +1050,7 @@ void LoopClosing::CorrectLoop()
|
|||
// Step 1:根据共视关系更新当前关键帧与其它关键帧之间的连接关系
|
||||
// 因为之前闭环检测、计算Sim3中改变了该关键帧的地图点,所以需要更新
|
||||
cout << "start updating connections" << endl;
|
||||
assert(mpCurrentKF->GetMap()->CheckEssentialGraph());
|
||||
mpCurrentKF->UpdateConnections();
|
||||
assert(mpCurrentKF->GetMap()->CheckEssentialGraph());
|
||||
|
||||
// Retrive keyframes connected to the current keyframe and compute corrected Sim3 pose by propagation
|
||||
// Step 2:通过位姿传播,得到Sim3优化后,与当前帧相连的关键帧的位姿,以及它们的地图点
|
||||
|
@ -1129,8 +1110,6 @@ void LoopClosing::CorrectLoop()
|
|||
}
|
||||
|
||||
// Correct all MapPoints obsrved by current keyframe and neighbors, so that they align with the other side of the loop
|
||||
cout << "LC: start correcting KeyFrames" << endl;
|
||||
cout << "LC: there are " << CorrectedSim3.size() << " KFs in the local window" << endl;
|
||||
// Step 2.2:得到矫正的当前关键帧的共视关键帧位姿后,修正这些关键帧的地图点
|
||||
// 遍历待矫正的共视关键帧(不包括当前关键帧)
|
||||
for(KeyFrameAndPose::iterator mit=CorrectedSim3.begin(), mend=CorrectedSim3.end(); mit!=mend; mit++)
|
||||
|
@ -1201,13 +1180,11 @@ void LoopClosing::CorrectLoop()
|
|||
pKFi->UpdateConnections();
|
||||
}
|
||||
// TODO Check this index increasement
|
||||
mpAtlas->GetCurrentMap()->IncreaseChangeIndex();
|
||||
cout << "LC: end correcting KeyFrames" << endl;
|
||||
pLoopMap->IncreaseChangeIndex();
|
||||
|
||||
|
||||
// Start Loop Fusion
|
||||
// Update matched map points and replace if duplicated
|
||||
cout << "LC: start replacing duplicated" << endl;
|
||||
// Step 3:检查当前帧的地图点与经过闭环匹配后该帧的地图点是否存在冲突,对冲突的进行替换或填补
|
||||
// mvpCurrentMatchedPoints 是当前关键帧和闭环关键帧组的所有地图点进行投影得到的匹配点
|
||||
for(size_t i=0; i<mvpLoopMatchedMPs.size(); i++)
|
||||
|
@ -1232,7 +1209,6 @@ void LoopClosing::CorrectLoop()
|
|||
}
|
||||
}
|
||||
}
|
||||
cout << "LC: end replacing duplicated" << endl;
|
||||
}
|
||||
|
||||
// Project MapPoints observed in the neighborhood of the loop keyframe
|
||||
|
@ -1330,8 +1306,7 @@ void LoopClosing::CorrectLoop()
|
|||
|
||||
void LoopClosing::MergeLocal()
|
||||
{
|
||||
Verbose::PrintMess("MERGE-VISUAL: Merge Visual detected!!!!", Verbose::VERBOSITY_NORMAL);
|
||||
//mpTracker->SetStepByStep(true);
|
||||
Verbose::PrintMess("MERGE: Merge Visual detected!!!!", Verbose::VERBOSITY_NORMAL);
|
||||
|
||||
int numTemporalKFs = 15; //TODO (set by parameter): Temporal KFs in the local window if the map is inertial.
|
||||
|
||||
|
@ -1345,7 +1320,7 @@ void LoopClosing::MergeLocal()
|
|||
// Flag that is true only when we stopped a running BA, in this case we need relaunch at the end of the merge
|
||||
bool bRelaunchBA = false;
|
||||
|
||||
Verbose::PrintMess("MERGE-VISUAL: Check Full Bundle Adjustment", Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE: Check Full Bundle Adjustment", Verbose::VERBOSITY_DEBUG);
|
||||
// If a Global Bundle Adjustment is running, abort it
|
||||
if(isRunningGBA())
|
||||
{
|
||||
|
@ -1362,14 +1337,14 @@ void LoopClosing::MergeLocal()
|
|||
bRelaunchBA = true;
|
||||
}
|
||||
|
||||
Verbose::PrintMess("MERGE-VISUAL: Request Stop Local Mapping", Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE: Request Stop Local Mapping", Verbose::VERBOSITY_DEBUG);
|
||||
mpLocalMapper->RequestStop();
|
||||
// Wait until Local Mapping has effectively stopped
|
||||
while(!mpLocalMapper->isStopped())
|
||||
{
|
||||
usleep(1000);
|
||||
}
|
||||
Verbose::PrintMess("MERGE-VISUAL: Local Map stopped", Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE: Local Map stopped", Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
mpLocalMapper->EmptyQueue();
|
||||
|
||||
|
@ -1378,12 +1353,6 @@ void LoopClosing::MergeLocal()
|
|||
Map* pCurrentMap = mpCurrentKF->GetMap();
|
||||
Map* pMergeMap = mpMergeMatchedKF->GetMap();
|
||||
|
||||
Verbose::PrintMess("MERGE-VISUAL: Initially there are " + to_string(pCurrentMap->KeyFramesInMap()) + " KFs and " + to_string(pCurrentMap->MapPointsInMap()) + " MPs in the active map ", Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE-VISUAL: Initially there are " + to_string(pMergeMap->KeyFramesInMap()) + " KFs and " + to_string(pMergeMap->MapPointsInMap()) + " MPs in the matched map ", Verbose::VERBOSITY_DEBUG);
|
||||
//vector<KeyFrame*> vpMergeKFs = pMergeMap->GetAllKeyFrames();
|
||||
|
||||
////
|
||||
|
||||
// Ensure current keyframe is updated
|
||||
mpCurrentKF->UpdateConnections();
|
||||
|
||||
|
@ -1421,7 +1390,6 @@ void LoopClosing::MergeLocal()
|
|||
|
||||
vector<KeyFrame*> vpCovisibleKFs = mpCurrentKF->GetBestCovisibilityKeyFrames(numTemporalKFs);
|
||||
spLocalWindowKFs.insert(vpCovisibleKFs.begin(), vpCovisibleKFs.end());
|
||||
Verbose::PrintMess("MERGE-VISUAL: Initial number of KFs in local window from active map: " + to_string(spLocalWindowKFs.size()), Verbose::VERBOSITY_DEBUG);
|
||||
const int nMaxTries = 3;
|
||||
int nNumTries = 0;
|
||||
while(spLocalWindowKFs.size() < numTemporalKFs && nNumTries < nMaxTries)
|
||||
|
@ -1444,11 +1412,6 @@ void LoopClosing::MergeLocal()
|
|||
spLocalWindowKFs.insert(vpNewCovKFs.begin(), vpNewCovKFs.end());
|
||||
nNumTries++;
|
||||
}
|
||||
Verbose::PrintMess("MERGE-VISUAL: Last number of KFs in local window from the active map: " + to_string(spLocalWindowKFs.size()), Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
//TODO TEST
|
||||
//vector<KeyFrame*> vpTestKFs = pCurrentMap->GetAllKeyFrames();
|
||||
//spLocalWindowKFs.insert(vpTestKFs.begin(), vpTestKFs.end());
|
||||
|
||||
for(KeyFrame* pKFi : spLocalWindowKFs)
|
||||
{
|
||||
|
@ -1458,10 +1421,7 @@ void LoopClosing::MergeLocal()
|
|||
set<MapPoint*> spMPs = pKFi->GetMapPoints();
|
||||
spLocalWindowMPs.insert(spMPs.begin(), spMPs.end());
|
||||
}
|
||||
Verbose::PrintMess("MERGE-VISUAL: Number of MPs in local window from active map: " + to_string(spLocalWindowMPs.size()), Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE-VISUAL: Number of MPs in the active map: " + to_string(pCurrentMap->GetAllMapPoints().size()), Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
Verbose::PrintMess("-------", Verbose::VERBOSITY_DEBUG);
|
||||
set<KeyFrame*> spMergeConnectedKFs;
|
||||
if(pCurrentMap->IsInertial() && pMergeMap->IsInertial()) //TODO Check the correct initialization
|
||||
{
|
||||
|
@ -1486,7 +1446,6 @@ void LoopClosing::MergeLocal()
|
|||
}
|
||||
vpCovisibleKFs = mpMergeMatchedKF->GetBestCovisibilityKeyFrames(numTemporalKFs);
|
||||
spMergeConnectedKFs.insert(vpCovisibleKFs.begin(), vpCovisibleKFs.end());
|
||||
Verbose::PrintMess("MERGE-VISUAL: Initial number of KFs in the local window from matched map: " + to_string(spMergeConnectedKFs.size()), Verbose::VERBOSITY_DEBUG);
|
||||
nNumTries = 0;
|
||||
while(spMergeConnectedKFs.size() < numTemporalKFs && nNumTries < nMaxTries)
|
||||
{
|
||||
|
@ -1507,7 +1466,6 @@ void LoopClosing::MergeLocal()
|
|||
spMergeConnectedKFs.insert(vpNewCovKFs.begin(), vpNewCovKFs.end());
|
||||
nNumTries++;
|
||||
}
|
||||
Verbose::PrintMess("MERGE-VISUAL: Last number of KFs in the localwindow from matched map: " + to_string(spMergeConnectedKFs.size()), Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
set<MapPoint*> spMapPointMerge;
|
||||
for(KeyFrame* pKFi : spMergeConnectedKFs)
|
||||
|
@ -1533,24 +1491,13 @@ void LoopClosing::MergeLocal()
|
|||
vCorrectedSim3[mpCurrentKF]=g2oCorrectedScw;
|
||||
vNonCorrectedSim3[mpCurrentKF]=g2oNonCorrectedScw;
|
||||
|
||||
|
||||
//TODO Time test
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point timeStartTransfMerge = std::chrono::steady_clock::now();
|
||||
#else
|
||||
std::chrono::monotonic_clock::time_point timeStartTransfMerge = std::chrono::monotonic_clock::now();
|
||||
#endif
|
||||
for(KeyFrame* pKFi : spLocalWindowKFs)
|
||||
{
|
||||
if(!pKFi || pKFi->isBad())
|
||||
{
|
||||
Verbose::PrintMess("Bad KF in correction", Verbose::VERBOSITY_DEBUG);
|
||||
continue;
|
||||
}
|
||||
|
||||
if(pKFi->GetMap() != pCurrentMap)
|
||||
Verbose::PrintMess("Other map KF, this should't happen", Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
g2o::Sim3 g2oCorrectedSiw;
|
||||
|
||||
if(pKFi!=mpCurrentKF)
|
||||
|
@ -1583,32 +1530,16 @@ void LoopClosing::MergeLocal()
|
|||
pKFi->mfScale = s;
|
||||
eigt *=(1./s); //[R t/s;0 1]
|
||||
|
||||
//cout << "R: " << mg2oMergeScw.rotation().toRotationMatrix() << endl;
|
||||
//cout << "angle: " << 180*LogSO3(mg2oMergeScw.rotation().toRotationMatrix())/3.14 << endl;
|
||||
//cout << "t: " << mg2oMergeScw.translation() << endl;
|
||||
|
||||
cv::Mat correctedTiw = Converter::toCvSE3(eigR,eigt);
|
||||
|
||||
pKFi->mTcwMerge = correctedTiw;
|
||||
|
||||
//pKFi->SetPose(correctedTiw);
|
||||
|
||||
// Make sure connections are updated
|
||||
//pKFi->UpdateMap(pMergeMap);
|
||||
//pMergeMap->AddKeyFrame(pKFi);
|
||||
//pCurrentMap->EraseKeyFrame(pKFi);
|
||||
|
||||
//cout << "After -> Map current: " << pCurrentMap << "; New map: " << pKFi->GetMap() << endl;
|
||||
|
||||
if(pCurrentMap->isImuInitialized())
|
||||
{
|
||||
Eigen::Matrix3d Rcor = eigR.transpose()*vNonCorrectedSim3[pKFi].rotation().toRotationMatrix();
|
||||
pKFi->mVwbMerge = Converter::toCvMat(Rcor)*pKFi->GetVelocity();
|
||||
//pKFi->SetVelocity(Converter::toCvMat(Rcor)*pKFi->GetVelocity()); // TODO: should add here scale s
|
||||
}
|
||||
|
||||
//TODO DEBUG to know which are the KFs that had been moved to the other map
|
||||
//pKFi->mnOriginMapId = 5;
|
||||
}
|
||||
|
||||
for(MapPoint* pMPi : spLocalWindowMPs)
|
||||
|
@ -1630,30 +1561,9 @@ void LoopClosing::MergeLocal()
|
|||
cv::Mat cvCorrectedP3Dw = Converter::toCvMat(eigCorrectedP3Dw);
|
||||
|
||||
pMPi->mPosMerge = cvCorrectedP3Dw;
|
||||
//cout << "Rcor: " << Rcor << endl;
|
||||
//cout << "Normal: " << pMPi->GetNormal() << endl;
|
||||
pMPi->mNormalVectorMerge = Converter::toCvMat(Rcor) * pMPi->GetNormal();
|
||||
//pMPi->SetWorldPos(cvCorrectedP3Dw);
|
||||
//pMPi->UpdateMap(pMergeMap);
|
||||
//pMergeMap->AddMapPoint(pMPi);
|
||||
//pCurrentMap->EraseMapPoint(pMPi);
|
||||
//pMPi->UpdateNormalAndDepth();
|
||||
}
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point timeFinishTransfMerge = std::chrono::steady_clock::now();
|
||||
#else
|
||||
std::chrono::monotonic_clock::time_point timeFinishTransfMerge = std::chrono::monotonic_clock::now();
|
||||
#endif
|
||||
std::chrono::duration<double,std::milli> timeTransfMerge = timeFinishTransfMerge - timeStartTransfMerge; // Time in milliseconds
|
||||
Verbose::PrintMess("MERGE-VISUAL: TRANSF ms: " + to_string(timeTransfMerge.count()), Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
|
||||
//TODO Time test
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point timeStartCritMerge = std::chrono::steady_clock::now();
|
||||
#else
|
||||
std::chrono::monotonic_clock::time_point timeStartCritMerge = std::chrono::monotonic_clock::now();
|
||||
#endif
|
||||
{
|
||||
unique_lock<mutex> currentLock(pCurrentMap->mMutexMapUpdate); // We update the current map with the Merge information
|
||||
unique_lock<mutex> mergeLock(pMergeMap->mMutexMapUpdate); // We remove the Kfs and MPs in the merged area from the old map
|
||||
|
@ -1692,25 +1602,13 @@ void LoopClosing::MergeLocal()
|
|||
pMPi->UpdateMap(pMergeMap);
|
||||
pMergeMap->AddMapPoint(pMPi);
|
||||
pCurrentMap->EraseMapPoint(pMPi);
|
||||
//pMPi->UpdateNormalAndDepth();
|
||||
}
|
||||
|
||||
mpAtlas->ChangeMap(pMergeMap);
|
||||
mpAtlas->SetMapBad(pCurrentMap);
|
||||
pMergeMap->IncreaseChangeIndex();
|
||||
//TODO for debug
|
||||
pMergeMap->ChangeId(pCurrentMap->GetId());
|
||||
}
|
||||
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point timeFinishCritMerge = std::chrono::steady_clock::now();
|
||||
#else
|
||||
std::chrono::monotonic_clock::time_point timeFinishCritMerge = std::chrono::monotonic_clock::now();
|
||||
#endif
|
||||
std::chrono::duration<double,std::milli> timeCritMerge = timeFinishCritMerge - timeStartCritMerge; // Time in milliseconds
|
||||
Verbose::PrintMess("MERGE-VISUAL: New current map: " + to_string(pMergeMap->GetId()), Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE-VISUAL: CRITICAL ms: " + to_string(timeCritMerge.count()), Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE-VISUAL: LOCAL MAPPING number of KFs: " + to_string(mpLocalMapper->KeyframesInQueue()), Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
//Rebuild the essential graph in the local window
|
||||
pCurrentMap->GetOriginKF()->SetFirstConnection(false);
|
||||
|
@ -1741,29 +1639,12 @@ void LoopClosing::MergeLocal()
|
|||
vpCheckFuseMapPoint.reserve(spMapPointMerge.size());
|
||||
std::copy(spMapPointMerge.begin(), spMapPointMerge.end(), std::back_inserter(vpCheckFuseMapPoint));
|
||||
|
||||
|
||||
//TODO Time test
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point timeStartFuseMerge = std::chrono::steady_clock::now();
|
||||
#else
|
||||
std::chrono::monotonic_clock::time_point timeStartFuseMerge = std::chrono::monotonic_clock::now();
|
||||
#endif
|
||||
|
||||
// Project MapPoints observed in the neighborhood of the merge keyframe
|
||||
// into the current keyframe and neighbors using corrected poses.
|
||||
// Fuse duplications.
|
||||
SearchAndFuse(vCorrectedSim3, vpCheckFuseMapPoint);
|
||||
|
||||
#ifdef COMPILEDWITHC11
|
||||
std::chrono::steady_clock::time_point timeFinishFuseMerge = std::chrono::steady_clock::now();
|
||||
#else
|
||||
std::chrono::monotonic_clock::time_point timeFinishFuseMerge = std::chrono::monotonic_clock::now();
|
||||
#endif
|
||||
std::chrono::duration<double,std::milli> timeFuseMerge = timeFinishFuseMerge - timeStartFuseMerge; // Time in milliseconds
|
||||
Verbose::PrintMess("MERGE-VISUAL: FUSE DUPLICATED ms: " + to_string(timeFuseMerge.count()), Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
// Update connectivity
|
||||
Verbose::PrintMess("MERGE-VISUAL: Init to update connections in the welding area", Verbose::VERBOSITY_DEBUG);
|
||||
for(KeyFrame* pKFi : spLocalWindowKFs)
|
||||
{
|
||||
if(!pKFi || pKFi->isBad())
|
||||
|
@ -1779,34 +1660,25 @@ void LoopClosing::MergeLocal()
|
|||
pKFi->UpdateConnections();
|
||||
}
|
||||
|
||||
//CheckObservations(spLocalWindowKFs, spMergeConnectedKFs);
|
||||
|
||||
Verbose::PrintMess("MERGE-VISUAL: Finish to update connections in the welding area", Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
bool bStop = false;
|
||||
Verbose::PrintMess("MERGE-VISUAL: Start local BA ", Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE: Start local BA ", Verbose::VERBOSITY_DEBUG);
|
||||
vpLocalCurrentWindowKFs.clear();
|
||||
vpMergeConnectedKFs.clear();
|
||||
std::copy(spLocalWindowKFs.begin(), spLocalWindowKFs.end(), std::back_inserter(vpLocalCurrentWindowKFs));
|
||||
std::copy(spMergeConnectedKFs.begin(), spMergeConnectedKFs.end(), std::back_inserter(vpMergeConnectedKFs));
|
||||
if (mpTracker->mSensor==System::IMU_MONOCULAR || mpTracker->mSensor==System::IMU_STEREO)
|
||||
{
|
||||
Verbose::PrintMess("MERGE-VISUAL: Visual-Inertial", Verbose::VERBOSITY_DEBUG);
|
||||
Optimizer::MergeInertialBA(mpLocalMapper->GetCurrKF(),mpMergeMatchedKF,&bStop, mpCurrentKF->GetMap(),vCorrectedSim3);
|
||||
}
|
||||
else
|
||||
{
|
||||
Verbose::PrintMess("MERGE-VISUAL: Visual", Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE-VISUAL: Local current window->" + to_string(vpLocalCurrentWindowKFs.size()) + "; Local merge window->" + to_string(vpMergeConnectedKFs.size()), Verbose::VERBOSITY_DEBUG);
|
||||
Optimizer::LocalBundleAdjustment(mpCurrentKF, vpLocalCurrentWindowKFs, vpMergeConnectedKFs,&bStop);
|
||||
}
|
||||
|
||||
// Loop closed. Release Local Mapping.
|
||||
mpLocalMapper->Release();
|
||||
|
||||
|
||||
//return;
|
||||
Verbose::PrintMess("MERGE-VISUAL: Finish the LBA", Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE: Finish the LBA", Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
|
||||
////
|
||||
|
@ -1816,11 +1688,11 @@ void LoopClosing::MergeLocal()
|
|||
|
||||
if(vpCurrentMapKFs.size() == 0)
|
||||
{
|
||||
Verbose::PrintMess("MERGE-VISUAL: There are not KFs outside of the welding area", Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE: There are not KFs outside of the welding area", Verbose::VERBOSITY_DEBUG);
|
||||
}
|
||||
else
|
||||
{
|
||||
Verbose::PrintMess("MERGE-VISUAL: Calculate the new position of the elements outside of the window", Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE: Calculate the new position of the elements outside of the window", Verbose::VERBOSITY_DEBUG);
|
||||
//Apply the transformation
|
||||
{
|
||||
if(mpTracker->mSensor == System::MONOCULAR)
|
||||
|
@ -1893,7 +1765,6 @@ void LoopClosing::MergeLocal()
|
|||
}
|
||||
}
|
||||
}
|
||||
Verbose::PrintMess("MERGE-VISUAL: Apply transformation to all elements of the old map", Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
mpLocalMapper->RequestStop();
|
||||
// Wait until Local Mapping has effectively stopped
|
||||
|
@ -1901,7 +1772,6 @@ void LoopClosing::MergeLocal()
|
|||
{
|
||||
usleep(1000);
|
||||
}
|
||||
Verbose::PrintMess("MERGE-VISUAL: Local Map stopped", Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
// Optimize graph (and update the loop position for each element form the begining to the end)
|
||||
if(mpTracker->mSensor != System::MONOCULAR)
|
||||
|
@ -1915,9 +1785,6 @@ void LoopClosing::MergeLocal()
|
|||
unique_lock<mutex> currentLock(pCurrentMap->mMutexMapUpdate); // We update the current map with the Merge information
|
||||
unique_lock<mutex> mergeLock(pMergeMap->mMutexMapUpdate); // We remove the Kfs and MPs in the merged area from the old map
|
||||
|
||||
Verbose::PrintMess("MERGE-VISUAL: There are " + to_string(pMergeMap->KeyFramesInMap()) + " KFs in the map", Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE-VISUAL: It will be inserted " + to_string(vpCurrentMapKFs.size()) + " KFs in the map", Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
for(KeyFrame* pKFi : vpCurrentMapKFs)
|
||||
{
|
||||
if(!pKFi || pKFi->isBad() || pKFi->GetMap() != pCurrentMap)
|
||||
|
@ -1930,8 +1797,6 @@ void LoopClosing::MergeLocal()
|
|||
pMergeMap->AddKeyFrame(pKFi);
|
||||
pCurrentMap->EraseKeyFrame(pKFi);
|
||||
}
|
||||
Verbose::PrintMess("MERGE-VISUAL: There are " + to_string(pMergeMap->MapPointsInMap()) + " MPs in the map", Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE-VISUAL: It will be inserted " + to_string(vpCurrentMapMPs.size()) + " MPs in the map", Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
for(MapPoint* pMPi : vpCurrentMapMPs)
|
||||
{
|
||||
|
@ -1942,19 +1807,12 @@ void LoopClosing::MergeLocal()
|
|||
pMergeMap->AddMapPoint(pMPi);
|
||||
pCurrentMap->EraseMapPoint(pMPi);
|
||||
}
|
||||
Verbose::PrintMess("MERGE-VISUAL: There are " + to_string(pMergeMap->MapPointsInMap()) + " MPs in the map", Verbose::VERBOSITY_DEBUG);
|
||||
}
|
||||
|
||||
Verbose::PrintMess("MERGE-VISUAL: Optimaze the essential graph", Verbose::VERBOSITY_DEBUG);
|
||||
}
|
||||
|
||||
|
||||
|
||||
mpLocalMapper->Release();
|
||||
|
||||
|
||||
Verbose::PrintMess("MERGE-VISUAL: Finally there are " + to_string(pMergeMap->KeyFramesInMap()) + "KFs and " + to_string(pMergeMap->MapPointsInMap()) + " MPs in the complete map ", Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE-VISUAL:Completed!!!!!", Verbose::VERBOSITY_DEBUG);
|
||||
Verbose::PrintMess("MERGE:Completed!!!!!", Verbose::VERBOSITY_DEBUG);
|
||||
|
||||
if(bRelaunchBA && (!pCurrentMap->isImuInitialized() || (pCurrentMap->KeyFramesInMap()<200 && mpAtlas->CountMaps()==1)))
|
||||
{
|
||||
|
@ -1976,52 +1834,6 @@ void LoopClosing::MergeLocal()
|
|||
|
||||
}
|
||||
|
||||
void LoopClosing::printReprojectionError(set<KeyFrame*> &spLocalWindowKFs, KeyFrame* mpCurrentKF, string &name)
|
||||
{
|
||||
string path_imgs = "./test_Reproj/";
|
||||
for(KeyFrame* pKFi : spLocalWindowKFs)
|
||||
{
|
||||
//cout << "KF " << pKFi->mnId << endl;
|
||||
cv::Mat img_i = cv::imread(pKFi->mNameFile, CV_LOAD_IMAGE_UNCHANGED);
|
||||
//cout << "Image -> " << img_i.cols << ", " << img_i.rows << endl;
|
||||
cv::cvtColor(img_i, img_i, CV_GRAY2BGR);
|
||||
//cout << "Change of color in the image " << endl;
|
||||
|
||||
vector<MapPoint*> vpMPs = pKFi->GetMapPointMatches();
|
||||
int num_points = 0;
|
||||
for(int j=0; j<vpMPs.size(); ++j)
|
||||
{
|
||||
MapPoint* pMPij = vpMPs[j];
|
||||
if(!pMPij || pMPij->isBad())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
cv::KeyPoint point_img = pKFi->mvKeysUn[j];
|
||||
cv::Point2f reproj_p;
|
||||
float u, v;
|
||||
bool bIsInImage = pKFi->ProjectPointUnDistort(pMPij, reproj_p, u, v);
|
||||
if(bIsInImage){
|
||||
//cout << "Reproj in the image" << endl;
|
||||
cv::circle(img_i, point_img.pt, 1/*point_img.octave*/, cv::Scalar(0, 255, 0));
|
||||
cv::line(img_i, point_img.pt, reproj_p, cv::Scalar(0, 0, 255));
|
||||
num_points++;
|
||||
}
|
||||
else
|
||||
{
|
||||
//cout << "Reproj out of the image" << endl;
|
||||
cv::circle(img_i, point_img.pt, point_img.octave, cv::Scalar(0, 0, 255));
|
||||
}
|
||||
|
||||
}
|
||||
//cout << "Image painted" << endl;
|
||||
string filename_img = path_imgs + "KF" + to_string(mpCurrentKF->mnId) + "_" + to_string(pKFi->mnId) + name + "points" + to_string(num_points) + ".png";
|
||||
cv::imwrite(filename_img, img_i);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
void LoopClosing::MergeLocal2()
|
||||
{
|
||||
cout << "Merge detected!!!!" << endl;
|
||||
|
@ -2078,14 +1890,9 @@ void LoopClosing::MergeLocal2()
|
|||
|
||||
unique_lock<mutex> lock(mpAtlas->GetCurrentMap()->mMutexMapUpdate);
|
||||
|
||||
cout << "KFs before empty: " << mpAtlas->GetCurrentMap()->KeyFramesInMap() << endl;
|
||||
mpLocalMapper->EmptyQueue();
|
||||
cout << "KFs after empty: " << mpAtlas->GetCurrentMap()->KeyFramesInMap() << endl;
|
||||
|
||||
std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now();
|
||||
cout << "updating active map to merge reference" << endl;
|
||||
cout << "curr merge KF id: " << mpCurrentKF->mnId << endl;
|
||||
cout << "curr tracking KF id: " << mpTracker->GetLastKeyFrame()->mnId << endl;
|
||||
bool bScaleVel=false;
|
||||
if(s_on!=1)
|
||||
bScaleVel=true;
|
||||
|
@ -2114,11 +1921,7 @@ void LoopClosing::MergeLocal2()
|
|||
|
||||
}
|
||||
|
||||
|
||||
cout << "MergeMap init ID: " << pMergeMap->GetInitKFid() << " CurrMap init ID: " << pCurrentMap->GetInitKFid() << endl;
|
||||
|
||||
// Load KFs and MPs from merge map
|
||||
cout << "updating current map" << endl;
|
||||
{
|
||||
// Get Merge Map Mutex (This section stops tracking!!)
|
||||
unique_lock<mutex> currentLock(pCurrentMap->mMutexMapUpdate); // We update the current map with the Merge information
|
||||
|
@ -2164,17 +1967,6 @@ void LoopClosing::MergeLocal2()
|
|||
}
|
||||
}
|
||||
|
||||
cout << "MergeMap init ID: " << pMergeMap->GetInitKFid() << " CurrMap init ID: " << pCurrentMap->GetInitKFid() << endl;
|
||||
|
||||
cout << "end updating current map" << endl;
|
||||
|
||||
// Critical zone
|
||||
bool good = pCurrentMap->CheckEssentialGraph();
|
||||
/*if(!good)
|
||||
cout << "BAD ESSENTIAL GRAPH!!" << endl;*/
|
||||
|
||||
cout << "Update essential graph" << endl;
|
||||
// mpCurrentKF->UpdateConnections(); // to put at false mbFirstConnection
|
||||
pMergeMap->GetOriginKF()->SetFirstConnection(false);
|
||||
pNewChild = mpMergeMatchedKF->GetParent(); // Old parent, it will be the new child of this KF
|
||||
pNewParent = mpMergeMatchedKF; // Old child, now it will be the parent of its own parent(we need eliminate this KF from children list in its old parent)
|
||||
|
@ -2189,33 +1981,17 @@ void LoopClosing::MergeLocal2()
|
|||
|
||||
}
|
||||
|
||||
|
||||
cout << "MergeMap init ID: " << pMergeMap->GetInitKFid() << " CurrMap init ID: " << pCurrentMap->GetInitKFid() << endl;
|
||||
|
||||
cout << "end update essential graph" << endl;
|
||||
|
||||
good = pCurrentMap->CheckEssentialGraph();
|
||||
if(!good)
|
||||
cout << "BAD ESSENTIAL GRAPH 1!!" << endl;
|
||||
|
||||
cout << "Update relationship between KFs" << endl;
|
||||
vector<MapPoint*> vpCheckFuseMapPoint; // MapPoint vector from current map to allow to fuse duplicated points with the old map (merge)
|
||||
vector<KeyFrame*> vpCurrentConnectedKFs;
|
||||
|
||||
|
||||
|
||||
mvpMergeConnectedKFs.push_back(mpMergeMatchedKF);
|
||||
vector<KeyFrame*> aux = mpMergeMatchedKF->GetVectorCovisibleKeyFrames();
|
||||
mvpMergeConnectedKFs.insert(mvpMergeConnectedKFs.end(), aux.begin(), aux.end());
|
||||
if (mvpMergeConnectedKFs.size()>6)
|
||||
mvpMergeConnectedKFs.erase(mvpMergeConnectedKFs.begin()+6,mvpMergeConnectedKFs.end());
|
||||
/*mvpMergeConnectedKFs = mpMergeMatchedKF->GetVectorCovisibleKeyFrames();
|
||||
mvpMergeConnectedKFs.push_back(mpMergeMatchedKF);*/
|
||||
|
||||
mpCurrentKF->UpdateConnections();
|
||||
vpCurrentConnectedKFs.push_back(mpCurrentKF);
|
||||
/*vpCurrentConnectedKFs = mpCurrentKF->GetVectorCovisibleKeyFrames();
|
||||
vpCurrentConnectedKFs.push_back(mpCurrentKF);*/
|
||||
aux = mpCurrentKF->GetVectorCovisibleKeyFrames();
|
||||
vpCurrentConnectedKFs.insert(vpCurrentConnectedKFs.end(), aux.begin(), aux.end());
|
||||
if (vpCurrentConnectedKFs.size()>6)
|
||||
|
@ -2230,33 +2006,10 @@ void LoopClosing::MergeLocal2()
|
|||
break;
|
||||
}
|
||||
|
||||
cout << "vpCurrentConnectedKFs.size() " << vpCurrentConnectedKFs.size() << endl;
|
||||
cout << "mvpMergeConnectedKFs.size() " << mvpMergeConnectedKFs.size() << endl;
|
||||
cout << "spMapPointMerge.size() " << spMapPointMerge.size() << endl;
|
||||
|
||||
|
||||
vpCheckFuseMapPoint.reserve(spMapPointMerge.size());
|
||||
std::copy(spMapPointMerge.begin(), spMapPointMerge.end(), std::back_inserter(vpCheckFuseMapPoint));
|
||||
cout << "Finished to update relationship between KFs" << endl;
|
||||
|
||||
cout << "MergeMap init ID: " << pMergeMap->GetInitKFid() << " CurrMap init ID: " << pCurrentMap->GetInitKFid() << endl;
|
||||
|
||||
good = pCurrentMap->CheckEssentialGraph();
|
||||
if(!good)
|
||||
cout << "BAD ESSENTIAL GRAPH 2!!" << endl;
|
||||
|
||||
cout << "start SearchAndFuse" << endl;
|
||||
SearchAndFuse(vpCurrentConnectedKFs, vpCheckFuseMapPoint);
|
||||
cout << "end SearchAndFuse" << endl;
|
||||
|
||||
cout << "MergeMap init ID: " << pMergeMap->GetInitKFid() << " CurrMap init ID: " << pCurrentMap->GetInitKFid() << endl;
|
||||
|
||||
good = pCurrentMap->CheckEssentialGraph();
|
||||
if(!good)
|
||||
cout << "BAD ESSENTIAL GRAPH 3!!" << endl;
|
||||
|
||||
cout << "Init to update connections" << endl;
|
||||
|
||||
|
||||
for(KeyFrame* pKFi : vpCurrentConnectedKFs)
|
||||
{
|
||||
|
@ -2272,34 +2025,16 @@ void LoopClosing::MergeLocal2()
|
|||
|
||||
pKFi->UpdateConnections();
|
||||
}
|
||||
cout << "end update connections" << endl;
|
||||
|
||||
cout << "MergeMap init ID: " << pMergeMap->GetInitKFid() << " CurrMap init ID: " << pCurrentMap->GetInitKFid() << endl;
|
||||
|
||||
good = pCurrentMap->CheckEssentialGraph();
|
||||
if(!good)
|
||||
cout << "BAD ESSENTIAL GRAPH 4!!" << endl;
|
||||
|
||||
// TODO Check: If new map is too small, we suppose that not informaiton can be propagated from new to old map
|
||||
if (numKFnew<10){
|
||||
mpLocalMapper->Release();
|
||||
return;
|
||||
}
|
||||
|
||||
good = pCurrentMap->CheckEssentialGraph();
|
||||
if(!good)
|
||||
cout << "BAD ESSENTIAL GRAPH 5!!" << endl;
|
||||
|
||||
// Perform BA
|
||||
bool bStopFlag=false;
|
||||
KeyFrame* pCurrKF = mpTracker->GetLastKeyFrame();
|
||||
cout << "start MergeInertialBA" << endl;
|
||||
Optimizer::MergeInertialBA(pCurrKF, mpMergeMatchedKF, &bStopFlag, pCurrentMap,CorrectedSim3);
|
||||
cout << "end MergeInertialBA" << endl;
|
||||
|
||||
good = pCurrentMap->CheckEssentialGraph();
|
||||
if(!good)
|
||||
cout << "BAD ESSENTIAL GRAPH 6!!" << endl;
|
||||
|
||||
// Release Local Mapping.
|
||||
mpLocalMapper->Release();
|
||||
|
@ -2308,64 +2043,12 @@ void LoopClosing::MergeLocal2()
|
|||
return;
|
||||
}
|
||||
|
||||
void LoopClosing::CheckObservations(set<KeyFrame*> &spKFsMap1, set<KeyFrame*> &spKFsMap2)
|
||||
{
|
||||
cout << "----------------------" << endl;
|
||||
for(KeyFrame* pKFi1 : spKFsMap1)
|
||||
{
|
||||
map<KeyFrame*, int> mMatchedMP;
|
||||
set<MapPoint*> spMPs = pKFi1->GetMapPoints();
|
||||
|
||||
for(MapPoint* pMPij : spMPs)
|
||||
{
|
||||
if(!pMPij || pMPij->isBad())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
map<KeyFrame*, tuple<int,int>> mMPijObs = pMPij->GetObservations();
|
||||
for(KeyFrame* pKFi2 : spKFsMap2)
|
||||
{
|
||||
if(mMPijObs.find(pKFi2) != mMPijObs.end())
|
||||
{
|
||||
if(mMatchedMP.find(pKFi2) != mMatchedMP.end())
|
||||
{
|
||||
mMatchedMP[pKFi2] = mMatchedMP[pKFi2] + 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
mMatchedMP[pKFi2] = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if(mMatchedMP.size() == 0)
|
||||
{
|
||||
cout << "CHECK-OBS: KF " << pKFi1->mnId << " has not any matched MP with the other map" << endl;
|
||||
}
|
||||
else
|
||||
{
|
||||
cout << "CHECK-OBS: KF " << pKFi1->mnId << " has matched MP with " << mMatchedMP.size() << " KF from the other map" << endl;
|
||||
for(pair<KeyFrame*, int> matchedKF : mMatchedMP)
|
||||
{
|
||||
cout << " -KF: " << matchedKF.first->mnId << ", Number of matches: " << matchedKF.second << endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
cout << "----------------------" << endl;
|
||||
}
|
||||
|
||||
|
||||
void LoopClosing::SearchAndFuse(const KeyFrameAndPose &CorrectedPosesMap, vector<MapPoint*> &vpMapPoints)
|
||||
{
|
||||
ORBmatcher matcher(0.8);
|
||||
|
||||
int total_replaces = 0;
|
||||
|
||||
cout << "FUSE: Initially there are " << vpMapPoints.size() << " MPs" << endl;
|
||||
cout << "FUSE: Intially there are " << CorrectedPosesMap.size() << " KFs" << endl;
|
||||
for(KeyFrameAndPose::const_iterator mit=CorrectedPosesMap.begin(), mend=CorrectedPosesMap.end(); mit!=mend;mit++)
|
||||
{
|
||||
int num_replaces = 0;
|
||||
|
@ -2396,7 +2079,6 @@ void LoopClosing::SearchAndFuse(const KeyFrameAndPose &CorrectedPosesMap, vector
|
|||
|
||||
total_replaces += num_replaces;
|
||||
}
|
||||
cout << "FUSE: " << total_replaces << " MPs had been fused" << endl;
|
||||
}
|
||||
|
||||
|
||||
|
@ -2406,8 +2088,6 @@ void LoopClosing::SearchAndFuse(const vector<KeyFrame*> &vConectedKFs, vector<Ma
|
|||
|
||||
int total_replaces = 0;
|
||||
|
||||
cout << "FUSE-POSE: Initially there are " << vpMapPoints.size() << " MPs" << endl;
|
||||
cout << "FUSE-POSE: Intially there are " << vConectedKFs.size() << " KFs" << endl;
|
||||
for(auto mit=vConectedKFs.begin(), mend=vConectedKFs.end(); mit!=mend;mit++)
|
||||
{
|
||||
int num_replaces = 0;
|
||||
|
@ -2430,10 +2110,8 @@ void LoopClosing::SearchAndFuse(const vector<KeyFrame*> &vConectedKFs, vector<Ma
|
|||
pRep->Replace(vpMapPoints[i]);
|
||||
}
|
||||
}
|
||||
cout << "FUSE-POSE: KF " << pKF->mnId << " ->" << num_replaces << " MPs fused" << endl;
|
||||
total_replaces += num_replaces;
|
||||
}
|
||||
cout << "FUSE-POSE: " << total_replaces << " MPs had been fused" << endl;
|
||||
}
|
||||
|
||||
// 由外部线程调用,请求复位当前线程
|
||||
|
@ -2513,11 +2191,22 @@ void LoopClosing::RunGlobalBundleAdjustment(Map* pActiveMap, unsigned long nLoop
|
|||
|
||||
const bool bImuInit = pActiveMap->isImuInitialized();
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartFGBA = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
|
||||
if(!bImuInit)
|
||||
Optimizer::GlobalBundleAdjustemnt(pActiveMap,10,&mbStopGBA,nLoopKF,false);
|
||||
else
|
||||
Optimizer::FullInertialBA(pActiveMap,7,false,nLoopKF,&mbStopGBA);
|
||||
// 记录GBA已经迭代次数,用来检查全局BA过程是否是因为意外结束的
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartMapUpdate = std::chrono::steady_clock::now();
|
||||
|
||||
double timeFullGBA = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_StartMapUpdate - time_StartFGBA).count();
|
||||
vTimeFullGBA_ms.push_back(timeFullGBA);
|
||||
#endif
|
||||
|
||||
|
||||
int idx = mnFullBAIdx;
|
||||
// Optimizer::GlobalBundleAdjustemnt(mpMap,10,&mbStopGBA,nLoopKF,false);
|
||||
|
@ -2733,6 +2422,16 @@ void LoopClosing::RunGlobalBundleAdjustment(Map* pActiveMap, unsigned long nLoop
|
|||
mbFinishedGBA = true;
|
||||
mbRunningGBA = false;
|
||||
}
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndMapUpdate = std::chrono::steady_clock::now();
|
||||
|
||||
double timeMapUpdate = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndMapUpdate - time_StartMapUpdate).count();
|
||||
vTimeMapUpdate_ms.push_back(timeMapUpdate);
|
||||
|
||||
double timeGBA = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndMapUpdate - time_StartFGBA).count();
|
||||
vTimeGBATotal_ms.push_back(timeGBA);
|
||||
#endif
|
||||
}
|
||||
|
||||
// 由外部线程调用,请求终止当前线程
|
||||
|
|
149
src/Map.cc
149
src/Map.cc
|
@ -502,154 +502,5 @@ void Map::SetLastMapChange(int currentChangeId)
|
|||
mnMapChangeNotified = currentChangeId;
|
||||
}
|
||||
|
||||
void Map::printReprojectionError(list<KeyFrame*> &lpLocalWindowKFs, KeyFrame* mpCurrentKF, string &name, string &name_folder)
|
||||
{
|
||||
string path_imgs = "./" + name_folder + "/";
|
||||
for(KeyFrame* pKFi : lpLocalWindowKFs)
|
||||
{
|
||||
//cout << "KF " << pKFi->mnId << endl;
|
||||
cv::Mat img_i = cv::imread(pKFi->mNameFile, CV_LOAD_IMAGE_UNCHANGED);
|
||||
//cout << "Image -> " << img_i.cols << ", " << img_i.rows << endl;
|
||||
cv::cvtColor(img_i, img_i, CV_GRAY2BGR);
|
||||
//cout << "Change of color in the image " << endl;
|
||||
|
||||
vector<MapPoint*> vpMPs = pKFi->GetMapPointMatches();
|
||||
int num_points = 0;
|
||||
for(int j=0; j<vpMPs.size(); ++j)
|
||||
{
|
||||
MapPoint* pMPij = vpMPs[j];
|
||||
if(!pMPij || pMPij->isBad())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
cv::KeyPoint point_img = pKFi->mvKeysUn[j];
|
||||
cv::Point2f reproj_p;
|
||||
float u, v;
|
||||
bool bIsInImage = pKFi->ProjectPointUnDistort(pMPij, reproj_p, u, v);
|
||||
if(bIsInImage){
|
||||
//cout << "Reproj in the image" << endl;
|
||||
cv::circle(img_i, point_img.pt, 1/*point_img.octave*/, cv::Scalar(0, 255, 0));
|
||||
cv::line(img_i, point_img.pt, reproj_p, cv::Scalar(0, 0, 255));
|
||||
num_points++;
|
||||
}
|
||||
else
|
||||
{
|
||||
//cout << "Reproj out of the image" << endl;
|
||||
cv::circle(img_i, point_img.pt, point_img.octave, cv::Scalar(0, 0, 255));
|
||||
}
|
||||
|
||||
}
|
||||
//cout << "Image painted" << endl;
|
||||
string filename_img = path_imgs + "KF" + to_string(mpCurrentKF->mnId) + "_" + to_string(pKFi->mnId) + name + "points" + to_string(num_points) + ".png";
|
||||
cv::imwrite(filename_img, img_i);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void Map::PreSave(std::set<GeometricCamera*> &spCams)
|
||||
{
|
||||
int nMPWithoutObs = 0;
|
||||
for(MapPoint* pMPi : mspMapPoints)
|
||||
{
|
||||
if(pMPi->GetObservations().size() == 0)
|
||||
{
|
||||
nMPWithoutObs++;
|
||||
}
|
||||
map<KeyFrame*, std::tuple<int,int>> mpObs = pMPi->GetObservations();
|
||||
for(map<KeyFrame*, std::tuple<int,int>>::iterator it= mpObs.begin(), end=mpObs.end(); it!=end; ++it)
|
||||
{
|
||||
if(it->first->GetMap() != this)
|
||||
{
|
||||
pMPi->EraseObservation(it->first); //We need to find where the KF is set as Bad but the observation is not removed
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
cout << " Bad MapPoints removed" << endl;
|
||||
|
||||
// Saves the id of KF origins
|
||||
mvBackupKeyFrameOriginsId.reserve(mvpKeyFrameOrigins.size());
|
||||
for(int i = 0, numEl = mvpKeyFrameOrigins.size(); i < numEl; ++i)
|
||||
{
|
||||
mvBackupKeyFrameOriginsId.push_back(mvpKeyFrameOrigins[i]->mnId);
|
||||
}
|
||||
|
||||
mvpBackupMapPoints.clear();
|
||||
// Backup of set container to vector
|
||||
//std::copy(mspMapPoints.begin(), mspMapPoints.end(), std::back_inserter(mvpBackupMapPoints));
|
||||
for(MapPoint* pMPi : mspMapPoints)
|
||||
{
|
||||
//cout << "Pre-save of mappoint " << pMPi->mnId << endl;
|
||||
mvpBackupMapPoints.push_back(pMPi);
|
||||
pMPi->PreSave(mspKeyFrames,mspMapPoints);
|
||||
}
|
||||
cout << " MapPoints back up done!!" << endl;
|
||||
|
||||
mvpBackupKeyFrames.clear();
|
||||
//std::copy(mspKeyFrames.begin(), mspKeyFrames.end(), std::back_inserter(mvpBackupKeyFrames));
|
||||
for(KeyFrame* pKFi : mspKeyFrames)
|
||||
{
|
||||
mvpBackupKeyFrames.push_back(pKFi);
|
||||
pKFi->PreSave(mspKeyFrames,mspMapPoints, spCams);
|
||||
}
|
||||
cout << " KeyFrames back up done!!" << endl;
|
||||
|
||||
mnBackupKFinitialID = -1;
|
||||
if(mpKFinitial)
|
||||
{
|
||||
mnBackupKFinitialID = mpKFinitial->mnId;
|
||||
}
|
||||
|
||||
mnBackupKFlowerID = -1;
|
||||
if(mpKFlowerID)
|
||||
{
|
||||
mnBackupKFlowerID = mpKFlowerID->mnId;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void Map::PostLoad(KeyFrameDatabase* pKFDB, ORBVocabulary* pORBVoc, map<long unsigned int, KeyFrame*>& mpKeyFrameId, map<unsigned int, GeometricCamera*> &mpCams)
|
||||
{
|
||||
std::copy(mvpBackupMapPoints.begin(), mvpBackupMapPoints.end(), std::inserter(mspMapPoints, mspMapPoints.begin()));
|
||||
std::copy(mvpBackupKeyFrames.begin(), mvpBackupKeyFrames.end(), std::inserter(mspKeyFrames, mspKeyFrames.begin()));
|
||||
|
||||
map<long unsigned int,MapPoint*> mpMapPointId;
|
||||
for(MapPoint* pMPi : mspMapPoints)
|
||||
{
|
||||
pMPi->UpdateMap(this);
|
||||
mpMapPointId[pMPi->mnId] = pMPi;
|
||||
}
|
||||
|
||||
//map<long unsigned int, KeyFrame*> mpKeyFrameId;
|
||||
for(KeyFrame* pKFi : mspKeyFrames)
|
||||
{
|
||||
pKFi->UpdateMap(this);
|
||||
pKFi->SetORBVocabulary(pORBVoc);
|
||||
pKFi->SetKeyFrameDatabase(pKFDB);
|
||||
mpKeyFrameId[pKFi->mnId] = pKFi;
|
||||
}
|
||||
cout << "Number of KF: " << mspKeyFrames.size() << endl;
|
||||
cout << "Number of MP: " << mspMapPoints.size() << endl;
|
||||
|
||||
// References reconstruction between different instances
|
||||
for(MapPoint* pMPi : mspMapPoints)
|
||||
{
|
||||
//cout << "Post-Load of mappoint " << pMPi->mnId << endl;
|
||||
pMPi->PostLoad(mpKeyFrameId, mpMapPointId);
|
||||
}
|
||||
cout << "End to rebuild MapPoint references" << endl;
|
||||
|
||||
for(KeyFrame* pKFi : mspKeyFrames)
|
||||
{
|
||||
pKFi->PostLoad(mpKeyFrameId, mpMapPointId, mpCams);
|
||||
pKFDB->add(pKFi);
|
||||
}
|
||||
|
||||
cout << "End to rebuild KeyFrame references" << endl;
|
||||
|
||||
mvpBackupMapPoints.clear();
|
||||
}
|
||||
|
||||
|
||||
} //namespace ORB_SLAM3
|
||||
|
|
|
@ -186,14 +186,10 @@ void MapDrawer::DrawKeyFrames(const bool bDrawKF, const bool bDrawGraph, const b
|
|||
glLineWidth(mKeyFrameLineWidth*5);
|
||||
glColor3f(1.0f,0.0f,0.0f);
|
||||
glBegin(GL_LINES);
|
||||
|
||||
//cout << "Initial KF: " << mpAtlas->GetCurrentMap()->GetOriginKF()->mnId << endl;
|
||||
//cout << "Parent KF: " << vpKFs[i]->mnId << endl;
|
||||
}
|
||||
else
|
||||
{
|
||||
glLineWidth(mKeyFrameLineWidth);
|
||||
//glColor3f(0.0f,0.0f,1.0f);
|
||||
glColor3f(mfFrameColors[index_color][0],mfFrameColors[index_color][1],mfFrameColors[index_color][2]);
|
||||
glBegin(GL_LINES);
|
||||
}
|
||||
|
@ -222,32 +218,6 @@ void MapDrawer::DrawKeyFrames(const bool bDrawKF, const bool bDrawGraph, const b
|
|||
|
||||
glPopMatrix();
|
||||
|
||||
//Draw lines with Loop and Merge candidates
|
||||
/*glLineWidth(mGraphLineWidth);
|
||||
glColor4f(1.0f,0.6f,0.0f,1.0f);
|
||||
glBegin(GL_LINES);
|
||||
cv::Mat Ow = pKF->GetCameraCenter();
|
||||
const vector<KeyFrame*> vpLoopCandKFs = pKF->mvpLoopCandKFs;
|
||||
if(!vpLoopCandKFs.empty())
|
||||
{
|
||||
for(vector<KeyFrame*>::const_iterator vit=vpLoopCandKFs.begin(), vend=vpLoopCandKFs.end(); vit!=vend; vit++)
|
||||
{
|
||||
cv::Mat Ow2 = (*vit)->GetCameraCenter();
|
||||
glVertex3f(Ow.at<float>(0),Ow.at<float>(1),Ow.at<float>(2));
|
||||
glVertex3f(Ow2.at<float>(0),Ow2.at<float>(1),Ow2.at<float>(2));
|
||||
}
|
||||
}
|
||||
const vector<KeyFrame*> vpMergeCandKFs = pKF->mvpMergeCandKFs;
|
||||
if(!vpMergeCandKFs.empty())
|
||||
{
|
||||
for(vector<KeyFrame*>::const_iterator vit=vpMergeCandKFs.begin(), vend=vpMergeCandKFs.end(); vit!=vend; vit++)
|
||||
{
|
||||
cv::Mat Ow2 = (*vit)->GetCameraCenter();
|
||||
glVertex3f(Ow.at<float>(0),Ow.at<float>(1),Ow.at<float>(2));
|
||||
glVertex3f(Ow2.at<float>(0),Ow2.at<float>(1),Ow2.at<float>(2));
|
||||
}
|
||||
}*/
|
||||
|
||||
glEnd();
|
||||
}
|
||||
}
|
||||
|
@ -258,7 +228,6 @@ void MapDrawer::DrawKeyFrames(const bool bDrawKF, const bool bDrawGraph, const b
|
|||
glColor4f(0.0f,1.0f,0.0f,0.6f);
|
||||
glBegin(GL_LINES);
|
||||
|
||||
// cout << "-----------------Draw graph-----------------" << endl;
|
||||
for(size_t i=0; i<vpKFs.size(); i++)
|
||||
{
|
||||
// Covisibility Graph
|
||||
|
@ -353,7 +322,6 @@ void MapDrawer::DrawKeyFrames(const bool bDrawKF, const bool bDrawGraph, const b
|
|||
else
|
||||
{
|
||||
glLineWidth(mKeyFrameLineWidth);
|
||||
//glColor3f(0.0f,0.0f,1.0f);
|
||||
glColor3f(mfFrameColors[index_color][0],mfFrameColors[index_color][1],mfFrameColors[index_color][2]);
|
||||
glBegin(GL_LINES);
|
||||
}
|
||||
|
|
|
@ -44,7 +44,9 @@ MapPoint::MapPoint(const cv::Mat &Pos, KeyFrame *pRefKF, Map* pMap):
|
|||
mnOriginMapId(pMap->GetId())
|
||||
{
|
||||
Pos.copyTo(mWorldPos);
|
||||
mWorldPosx = cv::Matx31f(Pos.at<float>(0), Pos.at<float>(1), Pos.at<float>(2));
|
||||
mNormalVector = cv::Mat::zeros(3,1,CV_32F);
|
||||
mNormalVectorx = cv::Matx31f::zeros();
|
||||
|
||||
mbTrackInViewR = false;
|
||||
mbTrackInView = false;
|
||||
|
@ -67,6 +69,7 @@ MapPoint::MapPoint(const double invDepth, cv::Point2f uv_init, KeyFrame* pRefKF,
|
|||
mpHostKF = pHostKF;
|
||||
|
||||
mNormalVector = cv::Mat::zeros(3,1,CV_32F);
|
||||
mNormalVectorx = cv::Matx31f::zeros();
|
||||
|
||||
// Worldpos is not set
|
||||
// MapPoints can be created from Tracking and Local Mapping. This mutex avoid conflicts with id.
|
||||
|
@ -81,6 +84,8 @@ MapPoint::MapPoint(const cv::Mat &Pos, Map* pMap, Frame* pFrame, const int &idxF
|
|||
mnFound(1), mbBad(false), mpReplaced(NULL), mpMap(pMap), mnOriginMapId(pMap->GetId())
|
||||
{
|
||||
Pos.copyTo(mWorldPos);
|
||||
mWorldPosx = cv::Matx31f(Pos.at<float>(0), Pos.at<float>(1), Pos.at<float>(2));
|
||||
|
||||
cv::Mat Ow;
|
||||
if(pFrame -> Nleft == -1 || idxF < pFrame -> Nleft){
|
||||
Ow = pFrame->GetCameraCenter();
|
||||
|
@ -94,6 +99,8 @@ MapPoint::MapPoint(const cv::Mat &Pos, Map* pMap, Frame* pFrame, const int &idxF
|
|||
}
|
||||
mNormalVector = mWorldPos - Ow;
|
||||
mNormalVector = mNormalVector/cv::norm(mNormalVector);
|
||||
mNormalVectorx = cv::Matx31f(mNormalVector.at<float>(0), mNormalVector.at<float>(1), mNormalVector.at<float>(2));
|
||||
|
||||
|
||||
cv::Mat PC = Pos - Ow;
|
||||
const float dist = cv::norm(PC);
|
||||
|
@ -118,6 +125,7 @@ void MapPoint::SetWorldPos(const cv::Mat &Pos)
|
|||
unique_lock<mutex> lock2(mGlobalMutex);
|
||||
unique_lock<mutex> lock(mMutexPos);
|
||||
Pos.copyTo(mWorldPos);
|
||||
mWorldPosx = cv::Matx31f(Pos.at<float>(0), Pos.at<float>(1), Pos.at<float>(2));
|
||||
}
|
||||
|
||||
cv::Mat MapPoint::GetWorldPos()
|
||||
|
@ -132,6 +140,18 @@ cv::Mat MapPoint::GetNormal()
|
|||
return mNormalVector.clone();
|
||||
}
|
||||
|
||||
cv::Matx31f MapPoint::GetWorldPos2()
|
||||
{
|
||||
unique_lock<mutex> lock(mMutexPos);
|
||||
return mWorldPosx;
|
||||
}
|
||||
|
||||
cv::Matx31f MapPoint::GetNormal2()
|
||||
{
|
||||
unique_lock<mutex> lock(mMutexPos);
|
||||
return mNormalVectorx;
|
||||
}
|
||||
|
||||
KeyFrame* MapPoint::GetReferenceKeyFrame()
|
||||
{
|
||||
unique_lock<mutex> lock(mMutexFeatures);
|
||||
|
@ -184,7 +204,6 @@ void MapPoint::EraseObservation(KeyFrame* pKF)
|
|||
// 查找这个要删除的观测,根据单目和双目类型的不同从其中删除当前地图点的被观测次数
|
||||
if(mObservations.count(pKF))
|
||||
{
|
||||
//int idx = mObservations[pKF];
|
||||
tuple<int,int> indexes = mObservations[pKF];
|
||||
int leftIndex = get<0>(indexes), rightIndex = get<1>(indexes);
|
||||
|
||||
|
@ -566,6 +585,7 @@ void MapPoint::UpdateNormalAndDepth()
|
|||
mfMaxDistance = dist*levelScaleFactor; // 观测到该点的距离上限
|
||||
mfMinDistance = mfMaxDistance/pRefKF->mvScaleFactors[nLevels-1]; // 观测到该点的距离下限
|
||||
mNormalVector = normal/n; // 获得地图点平均的观测方向
|
||||
mNormalVectorx = cv::Matx31f(mNormalVector.at<float>(0), mNormalVector.at<float>(1), mNormalVector.at<float>(2));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -573,6 +593,7 @@ void MapPoint::SetNormalVector(cv::Mat& normal)
|
|||
{
|
||||
unique_lock<mutex> lock3(mMutexPos);
|
||||
mNormalVector = normal;
|
||||
mNormalVectorx = cv::Matx31f(mNormalVector.at<float>(0), mNormalVector.at<float>(1), mNormalVector.at<float>(2));
|
||||
}
|
||||
|
||||
float MapPoint::GetMinDistanceInvariance()
|
||||
|
@ -646,18 +667,6 @@ int MapPoint::PredictScale(const float ¤tDist, Frame* pF)
|
|||
return nScale;
|
||||
}
|
||||
|
||||
void MapPoint::PrintObservations()
|
||||
{
|
||||
cout << "MP_OBS: MP " << mnId << endl;
|
||||
for(map<KeyFrame*,tuple<int,int>>::iterator mit=mObservations.begin(), mend=mObservations.end(); mit!=mend; mit++)
|
||||
{
|
||||
KeyFrame* pKFi = mit->first;
|
||||
tuple<int,int> indexes = mit->second;
|
||||
int leftIndex = get<0>(indexes), rightIndex = get<1>(indexes);
|
||||
cout << "--OBS in KF " << pKFi->mnId << " in map " << pKFi->GetMap()->GetId() << endl;
|
||||
}
|
||||
}
|
||||
|
||||
Map* MapPoint::GetMap()
|
||||
{
|
||||
unique_lock<mutex> lock(mMutexMap);
|
||||
|
@ -670,66 +679,4 @@ void MapPoint::UpdateMap(Map* pMap)
|
|||
mpMap = pMap;
|
||||
}
|
||||
|
||||
void MapPoint::PreSave(set<KeyFrame*>& spKF,set<MapPoint*>& spMP)
|
||||
{
|
||||
mBackupReplacedId = -1;
|
||||
if(mpReplaced && spMP.find(mpReplaced) != spMP.end())
|
||||
mBackupReplacedId = mpReplaced->mnId;
|
||||
|
||||
mBackupObservationsId1.clear();
|
||||
mBackupObservationsId2.clear();
|
||||
// Save the id and position in each KF who view it
|
||||
for(std::map<KeyFrame*,std::tuple<int,int> >::const_iterator it = mObservations.begin(), end = mObservations.end(); it != end; ++it)
|
||||
{
|
||||
KeyFrame* pKFi = it->first;
|
||||
if(spKF.find(pKFi) != spKF.end())
|
||||
{
|
||||
mBackupObservationsId1[it->first->mnId] = get<0>(it->second);
|
||||
mBackupObservationsId2[it->first->mnId] = get<1>(it->second);
|
||||
}
|
||||
else
|
||||
{
|
||||
EraseObservation(pKFi);
|
||||
}
|
||||
}
|
||||
|
||||
// Save the id of the reference KF
|
||||
if(spKF.find(mpRefKF) != spKF.end())
|
||||
{
|
||||
mBackupRefKFId = mpRefKF->mnId;
|
||||
}
|
||||
}
|
||||
|
||||
void MapPoint::PostLoad(map<long unsigned int, KeyFrame*>& mpKFid, map<long unsigned int, MapPoint*>& mpMPid)
|
||||
{
|
||||
mpRefKF = mpKFid[mBackupRefKFId];
|
||||
if(!mpRefKF)
|
||||
{
|
||||
cout << "MP without KF reference " << mBackupRefKFId << "; Num obs: " << nObs << endl;
|
||||
}
|
||||
mpReplaced = static_cast<MapPoint*>(NULL);
|
||||
if(mBackupReplacedId>=0)
|
||||
{
|
||||
map<long unsigned int, MapPoint*>::iterator it = mpMPid.find(mBackupReplacedId);
|
||||
if (it != mpMPid.end())
|
||||
mpReplaced = it->second;
|
||||
}
|
||||
|
||||
mObservations.clear();
|
||||
|
||||
for(map<long unsigned int, int>::const_iterator it = mBackupObservationsId1.begin(), end = mBackupObservationsId1.end(); it != end; ++it)
|
||||
{
|
||||
KeyFrame* pKFi = mpKFid[it->first];
|
||||
map<long unsigned int, int>::const_iterator it2 = mBackupObservationsId2.find(it->first);
|
||||
std::tuple<int, int> indexes = tuple<int,int>(it->second,it2->second);
|
||||
if(pKFi)
|
||||
{
|
||||
mObservations[pKFi] = indexes;
|
||||
}
|
||||
}
|
||||
|
||||
mBackupObservationsId1.clear();
|
||||
mBackupObservationsId2.clear();
|
||||
}
|
||||
|
||||
} //namespace ORB_SLAM
|
||||
|
|
|
@ -1043,7 +1043,7 @@ void ORBextractor::ComputeKeyPointsOctTree(
|
|||
allKeypoints.resize(nlevels);
|
||||
|
||||
//图像cell的尺寸,是个正方形,可以理解为边长in像素坐标
|
||||
const float W = 30;
|
||||
const float W = 35;
|
||||
|
||||
// 对每一层图像做处理
|
||||
//遍历所有图像
|
||||
|
|
|
@ -1385,6 +1385,250 @@ int ORBmatcher::SearchForTriangulation(KeyFrame *pKF1, KeyFrame *pKF2, cv::Mat F
|
|||
return nmatches;
|
||||
}
|
||||
|
||||
int ORBmatcher::SearchForTriangulation_(KeyFrame *pKF1, KeyFrame *pKF2, cv::Matx33f F12,
|
||||
vector<pair<size_t, size_t> > &vMatchedPairs, const bool bOnlyStereo, const bool bCoarse)
|
||||
{
|
||||
const DBoW2::FeatureVector &vFeatVec1 = pKF1->mFeatVec;
|
||||
const DBoW2::FeatureVector &vFeatVec2 = pKF2->mFeatVec;
|
||||
|
||||
//Compute epipole in second image
|
||||
auto Cw = pKF1->GetCameraCenter_();
|
||||
auto R2w = pKF2->GetRotation_();
|
||||
auto t2w = pKF2->GetTranslation_();
|
||||
auto C2 = R2w*Cw+t2w;
|
||||
|
||||
cv::Point2f ep = pKF2->mpCamera->project(C2);
|
||||
|
||||
auto R1w = pKF1->GetRotation_();
|
||||
auto t1w = pKF1->GetTranslation_();
|
||||
|
||||
cv::Matx33f R12;
|
||||
cv::Matx31f t12;
|
||||
|
||||
cv::Matx33f Rll,Rlr,Rrl,Rrr;
|
||||
cv::Matx31f tll,tlr,trl,trr;
|
||||
|
||||
GeometricCamera* pCamera1 = pKF1->mpCamera, *pCamera2 = pKF2->mpCamera;
|
||||
|
||||
if(!pKF1->mpCamera2 && !pKF2->mpCamera2){
|
||||
R12 = R1w*R2w.t();
|
||||
t12 = -R1w*R2w.t()*t2w+t1w;
|
||||
}
|
||||
else{
|
||||
Rll = pKF1->GetRotation_() * pKF2->GetRotation_().t();
|
||||
Rlr = pKF1->GetRotation_() * pKF2->GetRightRotation_().t();
|
||||
Rrl = pKF1->GetRightRotation_() * pKF2->GetRotation_().t();
|
||||
Rrr = pKF1->GetRightRotation_() * pKF2->GetRightRotation_().t();
|
||||
|
||||
tll = pKF1->GetRotation_() * (-pKF2->GetRotation_().t() * pKF2->GetTranslation_()) + pKF1->GetTranslation_();
|
||||
tlr = pKF1->GetRotation_() * (-pKF2->GetRightRotation_().t() * pKF2->GetRightTranslation_()) + pKF1->GetTranslation_();
|
||||
trl = pKF1->GetRightRotation_() * (-pKF2->GetRotation_().t() * pKF2->GetTranslation_()) + pKF1->GetRightTranslation_();
|
||||
trr = pKF1->GetRightRotation_() * (-pKF2->GetRightRotation_().t() * pKF2->GetRightTranslation_()) + pKF1->GetRightTranslation_();
|
||||
}
|
||||
|
||||
// Find matches between not tracked keypoints
|
||||
// Matching speed-up by ORB Vocabulary
|
||||
// Compare only ORB that share the same node
|
||||
|
||||
int nmatches=0;
|
||||
vector<bool> vbMatched2(pKF2->N,false);
|
||||
vector<int> vMatches12(pKF1->N,-1);
|
||||
|
||||
vector<int> rotHist[HISTO_LENGTH];
|
||||
for(int i=0;i<HISTO_LENGTH;i++)
|
||||
rotHist[i].reserve(500);
|
||||
|
||||
const float factor = 1.0f/HISTO_LENGTH;
|
||||
|
||||
DBoW2::FeatureVector::const_iterator f1it = vFeatVec1.begin();
|
||||
DBoW2::FeatureVector::const_iterator f2it = vFeatVec2.begin();
|
||||
DBoW2::FeatureVector::const_iterator f1end = vFeatVec1.end();
|
||||
DBoW2::FeatureVector::const_iterator f2end = vFeatVec2.end();
|
||||
|
||||
while(f1it!=f1end && f2it!=f2end)
|
||||
{
|
||||
if(f1it->first == f2it->first)
|
||||
{
|
||||
for(size_t i1=0, iend1=f1it->second.size(); i1<iend1; i1++)
|
||||
{
|
||||
const size_t idx1 = f1it->second[i1];
|
||||
|
||||
MapPoint* pMP1 = pKF1->GetMapPoint(idx1);
|
||||
|
||||
// If there is already a MapPoint skip
|
||||
if(pMP1)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
const bool bStereo1 = (!pKF1->mpCamera2 && pKF1->mvuRight[idx1]>=0);
|
||||
|
||||
if(bOnlyStereo)
|
||||
if(!bStereo1)
|
||||
continue;
|
||||
|
||||
|
||||
const cv::KeyPoint &kp1 = (pKF1 -> NLeft == -1) ? pKF1->mvKeysUn[idx1]
|
||||
: (idx1 < pKF1 -> NLeft) ? pKF1 -> mvKeys[idx1]
|
||||
: pKF1 -> mvKeysRight[idx1 - pKF1 -> NLeft];
|
||||
|
||||
const bool bRight1 = (pKF1 -> NLeft == -1 || idx1 < pKF1 -> NLeft) ? false
|
||||
: true;
|
||||
//if(bRight1) continue;
|
||||
const cv::Mat &d1 = pKF1->mDescriptors.row(idx1);
|
||||
|
||||
int bestDist = TH_LOW;
|
||||
int bestIdx2 = -1;
|
||||
|
||||
for(size_t i2=0, iend2=f2it->second.size(); i2<iend2; i2++)
|
||||
{
|
||||
size_t idx2 = f2it->second[i2];
|
||||
|
||||
MapPoint* pMP2 = pKF2->GetMapPoint(idx2);
|
||||
|
||||
// If we have already matched or there is a MapPoint skip
|
||||
if(vbMatched2[idx2] || pMP2)
|
||||
continue;
|
||||
|
||||
const bool bStereo2 = (!pKF2->mpCamera2 && pKF2->mvuRight[idx2]>=0);
|
||||
|
||||
if(bOnlyStereo)
|
||||
if(!bStereo2)
|
||||
continue;
|
||||
|
||||
const cv::Mat &d2 = pKF2->mDescriptors.row(idx2);
|
||||
|
||||
const int dist = DescriptorDistance(d1,d2);
|
||||
|
||||
if(dist>TH_LOW || dist>bestDist)
|
||||
continue;
|
||||
|
||||
const cv::KeyPoint &kp2 = (pKF2 -> NLeft == -1) ? pKF2->mvKeysUn[idx2]
|
||||
: (idx2 < pKF2 -> NLeft) ? pKF2 -> mvKeys[idx2]
|
||||
: pKF2 -> mvKeysRight[idx2 - pKF2 -> NLeft];
|
||||
const bool bRight2 = (pKF2 -> NLeft == -1 || idx2 < pKF2 -> NLeft) ? false
|
||||
: true;
|
||||
|
||||
if(!bStereo1 && !bStereo2 && !pKF1->mpCamera2)
|
||||
{
|
||||
const float distex = ep.x-kp2.pt.x;
|
||||
const float distey = ep.y-kp2.pt.y;
|
||||
if(distex*distex+distey*distey<100*pKF2->mvScaleFactors[kp2.octave])
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if(pKF1->mpCamera2 && pKF2->mpCamera2){
|
||||
if(bRight1 && bRight2){
|
||||
R12 = Rrr;
|
||||
t12 = trr;
|
||||
|
||||
pCamera1 = pKF1->mpCamera2;
|
||||
pCamera2 = pKF2->mpCamera2;
|
||||
}
|
||||
else if(bRight1 && !bRight2){
|
||||
R12 = Rrl;
|
||||
t12 = trl;
|
||||
|
||||
pCamera1 = pKF1->mpCamera2;
|
||||
pCamera2 = pKF2->mpCamera;
|
||||
}
|
||||
else if(!bRight1 && bRight2){
|
||||
R12 = Rlr;
|
||||
t12 = tlr;
|
||||
|
||||
pCamera1 = pKF1->mpCamera;
|
||||
pCamera2 = pKF2->mpCamera2;
|
||||
}
|
||||
else{
|
||||
R12 = Rll;
|
||||
t12 = tll;
|
||||
|
||||
pCamera1 = pKF1->mpCamera;
|
||||
pCamera2 = pKF2->mpCamera;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
if(pCamera1->epipolarConstrain_(pCamera2,kp1,kp2,R12,t12,pKF1->mvLevelSigma2[kp1.octave],pKF2->mvLevelSigma2[kp2.octave])||bCoarse) // MODIFICATION_2
|
||||
{
|
||||
bestIdx2 = idx2;
|
||||
bestDist = dist;
|
||||
}
|
||||
}
|
||||
|
||||
if(bestIdx2>=0)
|
||||
{
|
||||
const cv::KeyPoint &kp2 = (pKF2 -> NLeft == -1) ? pKF2->mvKeysUn[bestIdx2]
|
||||
: (bestIdx2 < pKF2 -> NLeft) ? pKF2 -> mvKeys[bestIdx2]
|
||||
: pKF2 -> mvKeysRight[bestIdx2 - pKF2 -> NLeft];
|
||||
vMatches12[idx1]=bestIdx2;
|
||||
nmatches++;
|
||||
|
||||
if(mbCheckOrientation)
|
||||
{
|
||||
float rot = kp1.angle-kp2.angle;
|
||||
if(rot<0.0)
|
||||
rot+=360.0f;
|
||||
int bin = round(rot*factor);
|
||||
if(bin==HISTO_LENGTH)
|
||||
bin=0;
|
||||
assert(bin>=0 && bin<HISTO_LENGTH);
|
||||
rotHist[bin].push_back(idx1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
f1it++;
|
||||
f2it++;
|
||||
}
|
||||
else if(f1it->first < f2it->first)
|
||||
{
|
||||
f1it = vFeatVec1.lower_bound(f2it->first);
|
||||
}
|
||||
else
|
||||
{
|
||||
f2it = vFeatVec2.lower_bound(f1it->first);
|
||||
}
|
||||
}
|
||||
|
||||
if(mbCheckOrientation)
|
||||
{
|
||||
int ind1=-1;
|
||||
int ind2=-1;
|
||||
int ind3=-1;
|
||||
|
||||
ComputeThreeMaxima(rotHist,HISTO_LENGTH,ind1,ind2,ind3);
|
||||
|
||||
for(int i=0; i<HISTO_LENGTH; i++)
|
||||
{
|
||||
if(i==ind1 || i==ind2 || i==ind3)
|
||||
continue;
|
||||
for(size_t j=0, jend=rotHist[i].size(); j<jend; j++)
|
||||
{
|
||||
vMatches12[rotHist[i][j]]=-1;
|
||||
nmatches--;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
vMatchedPairs.clear();
|
||||
vMatchedPairs.reserve(nmatches);
|
||||
|
||||
for(size_t i=0, iend=vMatches12.size(); i<iend; i++)
|
||||
{
|
||||
if(vMatches12[i]<0)
|
||||
continue;
|
||||
vMatchedPairs.push_back(make_pair(i,vMatches12[i]));
|
||||
}
|
||||
|
||||
return nmatches;
|
||||
}
|
||||
|
||||
|
||||
int ORBmatcher::SearchForTriangulation(KeyFrame *pKF1, KeyFrame *pKF2, cv::Mat F12,
|
||||
vector<pair<size_t, size_t> > &vMatchedPairs, const bool bOnlyStereo, vector<cv::Mat> &vMatchedPoints)
|
||||
{
|
||||
|
@ -1620,8 +1864,6 @@ int ORBmatcher::Fuse(KeyFrame *pKF, const vector<MapPoint *> &vpMapPoints, const
|
|||
continue;
|
||||
}
|
||||
// 地图点无效 或 已经是该帧的地图点(无需融合),跳过
|
||||
/*if(pMP->isBad() || pMP->IsInKeyFrame(pKF))
|
||||
continue;*/
|
||||
if(pMP->isBad())
|
||||
{
|
||||
count_bad++;
|
||||
|
@ -1792,16 +2034,6 @@ int ORBmatcher::Fuse(KeyFrame *pKF, const vector<MapPoint *> &vpMapPoints, const
|
|||
|
||||
}
|
||||
|
||||
/*cout << "count_notMP = " << count_notMP << endl;
|
||||
cout << "count_bad = " << count_bad << endl;
|
||||
cout << "count_isinKF = " << count_isinKF << endl;
|
||||
cout << "count_negdepth = " << count_negdepth << endl;
|
||||
cout << "count_notinim = " << count_notinim << endl;
|
||||
cout << "count_dist = " << count_dist << endl;
|
||||
cout << "count_normal = " << count_normal << endl;
|
||||
cout << "count_notidx = " << count_notidx << endl;
|
||||
cout << "count_thcheck = " << count_thcheck << endl;
|
||||
cout << "tot fused points: " << nFused << endl;*/
|
||||
return nFused;
|
||||
}
|
||||
|
||||
|
|
828
src/Optimizer.cc
828
src/Optimizer.cc
File diff suppressed because it is too large
Load Diff
|
@ -381,7 +381,7 @@ cv::Mat Sim3Solver::find(vector<bool> &vbInliers12, int &nInliers)
|
|||
void Sim3Solver::ComputeCentroid(cv::Mat &P, cv::Mat &Pr, cv::Mat &C)
|
||||
{
|
||||
// 矩阵P每一行求和,结果存在C。这两句也可以使用CV_REDUCE_AVG选项来实现
|
||||
cv::reduce(P,C,1,CV_REDUCE_SUM);
|
||||
cv::reduce(P,C,1,cv::REDUCE_SUM);
|
||||
C = C/P.cols;// 求平均
|
||||
|
||||
for(int i=0; i<P.cols; i++)
|
||||
|
@ -458,7 +458,7 @@ void Sim3Solver::ComputeSim3(cv::Mat &P1, cv::Mat &P2)
|
|||
double ang=atan2(norm(vec),evec.at<float>(0,0));
|
||||
|
||||
// vec/norm(vec)归一化得到归一化后的旋转向量,然后乘上角度得到包含了旋转轴和旋转角信息的旋转向量vec
|
||||
vec = 2*ang*vec/norm(vec); //Angle-axis x. quaternion angle is the half
|
||||
vec = 2*ang*vec/norm(vec); //Angle-axis representation. quaternion angle is the half
|
||||
|
||||
mR12i.create(3,3,P1.type());
|
||||
// 旋转向量(轴角)转换为旋转矩阵
|
||||
|
|
320
src/System.cc
320
src/System.cc
|
@ -47,10 +47,10 @@ System::System(const string &strVocFile, //词袋文件所在路
|
|||
const string &strLoadingFile //看起来作者貌似想加地图重载功能的一个参数
|
||||
):
|
||||
mSensor(sensor), //初始化传感器类型
|
||||
mpViewer(static_cast<Viewer*>(NULL)), // ?空。。。对象指针? TODO
|
||||
mpViewer(static_cast<Viewer*>(NULL)), // 空对象指针
|
||||
mbReset(false), mbResetActiveMap(false),// ?重新设置ActiveMap
|
||||
mbActivateLocalizationMode(false), // ?是否开启局部定位功能开关
|
||||
mbDeactivateLocalizationMode(false) // ?没有这个模式转换标志
|
||||
mbActivateLocalizationMode(false), // 是否开启局部定位功能开关
|
||||
mbDeactivateLocalizationMode(false) //
|
||||
{
|
||||
// Output welcome message
|
||||
cout << endl <<
|
||||
|
@ -84,7 +84,7 @@ System::System(const string &strVocFile, //词袋文件所在路
|
|||
exit(-1);
|
||||
}
|
||||
|
||||
// ?ORBSLAM3新加的多地图管理功能,这里好像是加载Atlas标识符
|
||||
// ORBSLAM3新加的多地图管理功能,这里加载Atlas标识符
|
||||
bool loadedAtlas = false;
|
||||
|
||||
//----
|
||||
|
@ -112,75 +112,7 @@ System::System(const string &strVocFile, //词袋文件所在路
|
|||
//Create the Atlas
|
||||
// Step 5 创建多地图,参数0表示初始化关键帧id为0
|
||||
mpAtlas = new Atlas(0);
|
||||
|
||||
// 下面注释看起来作者貌似想加地图重载功能,期待期待
|
||||
/*if(strLoadingFile.empty())
|
||||
{
|
||||
//Load ORB Vocabulary
|
||||
cout << endl << "Loading ORB Vocabulary. This could take a while..." << endl;
|
||||
|
||||
mpVocabulary = new ORBVocabulary();
|
||||
bool bVocLoad = mpVocabulary->loadFromTextFile(strVocFile);
|
||||
if(!bVocLoad)
|
||||
{
|
||||
cerr << "Wrong path to vocabulary. " << endl;
|
||||
cerr << "Falied to open at: " << strVocFile << endl;
|
||||
exit(-1);
|
||||
}
|
||||
cout << "Vocabulary loaded!" << endl << endl;
|
||||
|
||||
//Create KeyFrame Database
|
||||
mpKeyFrameDatabase = new KeyFrameDatabase(*mpVocabulary);
|
||||
|
||||
//Create the Atlas
|
||||
//mpMap = new Map();
|
||||
mpAtlas = new Atlas(0);
|
||||
}
|
||||
else
|
||||
{
|
||||
//Load ORB Vocabulary
|
||||
cout << endl << "Loading ORB Vocabulary. This could take a while..." << endl;
|
||||
|
||||
mpVocabulary = new ORBVocabulary();
|
||||
bool bVocLoad = mpVocabulary->loadFromTextFile(strVocFile);
|
||||
if(!bVocLoad)
|
||||
{
|
||||
cerr << "Wrong path to vocabulary. " << endl;
|
||||
cerr << "Falied to open at: " << strVocFile << endl;
|
||||
exit(-1);
|
||||
}
|
||||
cout << "Vocabulary loaded!" << endl << endl;
|
||||
|
||||
cout << "Load File" << endl;
|
||||
|
||||
// Load the file with an earlier session
|
||||
//clock_t start = clock();
|
||||
bool isRead = LoadAtlas(strLoadingFile,BINARY_FILE);
|
||||
|
||||
if(!isRead)
|
||||
{
|
||||
cout << "Error to load the file, please try with other session file or vocabulary file" << endl;
|
||||
exit(-1);
|
||||
}
|
||||
mpKeyFrameDatabase = new KeyFrameDatabase(*mpVocabulary);
|
||||
|
||||
mpAtlas->SetKeyFrameDababase(mpKeyFrameDatabase);
|
||||
mpAtlas->SetORBVocabulary(mpVocabulary);
|
||||
mpAtlas->PostLoad();
|
||||
//cout << "KF in DB: " << mpKeyFrameDatabase->mnNumKFs << "; words: " << mpKeyFrameDatabase->mnNumWords << endl;
|
||||
|
||||
loadedAtlas = true;
|
||||
|
||||
mpAtlas->CreateNewMap();
|
||||
|
||||
//clock_t timeElapsed = clock() - start;
|
||||
//unsigned msElapsed = timeElapsed / (CLOCKS_PER_SEC / 1000);
|
||||
//cout << "Binary file read in " << msElapsed << " ms" << endl;
|
||||
|
||||
//usleep(10*1000*1000);
|
||||
}*/
|
||||
|
||||
// 设置Atlas中的传感器类型
|
||||
if (mSensor==IMU_STEREO || mSensor==IMU_MONOCULAR)
|
||||
// ? 如果是有imu的传感器类型,将mbIsInertial设置为imu属性,以后的跟踪和预积分将和这个标志有关
|
||||
mpAtlas->SetInertialSensor();
|
||||
|
@ -203,8 +135,6 @@ System::System(const string &strVocFile, //词袋文件所在路
|
|||
mpLocalMapper = new LocalMapping(this, mpAtlas, mSensor==MONOCULAR || mSensor==IMU_MONOCULAR, mSensor==IMU_MONOCULAR || mSensor==IMU_STEREO, strSequence);
|
||||
mptLocalMapping = new thread(&ORB_SLAM3::LocalMapping::Run,mpLocalMapper);
|
||||
|
||||
//initFr表示初始化帧的id,代码里设置为0
|
||||
mpLocalMapper->mInitFr = initFr;
|
||||
//设置最远3D地图点的深度值,如果超过阈值,说明可能三角化不太准确,丢弃
|
||||
mpLocalMapper->mThFarPoints = fsSettings["thFarPoints"];
|
||||
// ? 这里有个疑问,C++中浮点型跟0比较是否用精确?
|
||||
|
@ -302,11 +232,8 @@ cv::Mat System::TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, const
|
|||
for(size_t i_imu = 0; i_imu < vImuMeas.size(); i_imu++)
|
||||
mpTracker->GrabImuData(vImuMeas[i_imu]);
|
||||
|
||||
// std::cout << "start GrabImageStereo" << std::endl;
|
||||
cv::Mat Tcw = mpTracker->GrabImageStereo(imLeft,imRight,timestamp,filename);
|
||||
|
||||
// std::cout << "out grabber" << std::endl;
|
||||
|
||||
unique_lock<mutex> lock2(mMutexState);
|
||||
mTrackingState = mpTracker->mState;
|
||||
mTrackedMapPoints = mpTracker->mCurrentFrame.mvpMapPoints;
|
||||
|
@ -518,6 +445,10 @@ void System::Shutdown()
|
|||
|
||||
if(mpViewer)
|
||||
pangolin::BindToContext("ORB-SLAM2: Map Viewer");
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
mpTracker->PrintTimeStats();
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
|
@ -650,7 +581,6 @@ void System::SaveTrajectoryEuRoC(const string &filename)
|
|||
|
||||
ofstream f;
|
||||
f.open(filename.c_str());
|
||||
// cout << "file open" << endl;
|
||||
f << fixed;
|
||||
|
||||
// Frame pose is stored relative to its reference keyframe (which is optimized by BA and pose graph).
|
||||
|
@ -663,56 +593,34 @@ void System::SaveTrajectoryEuRoC(const string &filename)
|
|||
list<double>::iterator lT = mpTracker->mlFrameTimes.begin();
|
||||
list<bool>::iterator lbL = mpTracker->mlbLost.begin();
|
||||
|
||||
//cout << "size mlpReferences: " << mpTracker->mlpReferences.size() << endl;
|
||||
//cout << "size mlRelativeFramePoses: " << mpTracker->mlRelativeFramePoses.size() << endl;
|
||||
//cout << "size mpTracker->mlFrameTimes: " << mpTracker->mlFrameTimes.size() << endl;
|
||||
//cout << "size mpTracker->mlbLost: " << mpTracker->mlbLost.size() << endl;
|
||||
|
||||
|
||||
for(list<cv::Mat>::iterator lit=mpTracker->mlRelativeFramePoses.begin(),
|
||||
lend=mpTracker->mlRelativeFramePoses.end();lit!=lend;lit++, lRit++, lT++, lbL++)
|
||||
{
|
||||
//cout << "1" << endl;
|
||||
if(*lbL)
|
||||
continue;
|
||||
|
||||
|
||||
KeyFrame* pKF = *lRit;
|
||||
//cout << "KF: " << pKF->mnId << endl;
|
||||
|
||||
cv::Mat Trw = cv::Mat::eye(4,4,CV_32F);
|
||||
|
||||
/*cout << "2" << endl;
|
||||
cout << "KF id: " << pKF->mnId << endl;*/
|
||||
|
||||
// If the reference keyframe was culled, traverse the spanning tree to get a suitable keyframe.
|
||||
if (!pKF)
|
||||
continue;
|
||||
|
||||
//cout << "2.5" << endl;
|
||||
|
||||
while(pKF->isBad())
|
||||
{
|
||||
//cout << " 2.bad" << endl;
|
||||
Trw = Trw*pKF->mTcp;
|
||||
pKF = pKF->GetParent();
|
||||
//cout << "--Parent KF: " << pKF->mnId << endl;
|
||||
}
|
||||
|
||||
if(!pKF || pKF->GetMap() != pBiggerMap)
|
||||
{
|
||||
//cout << "--Parent KF is from another map" << endl;
|
||||
/*if(pKF)
|
||||
cout << "--Parent KF " << pKF->mnId << " is from another map " << pKF->GetMap()->GetId() << endl;*/
|
||||
continue;
|
||||
}
|
||||
|
||||
//cout << "3" << endl;
|
||||
|
||||
Trw = Trw*pKF->GetPose()*Twb; // Tcp*Tpw*Twb0=Tcb0 where b0 is the new world reference
|
||||
|
||||
// cout << "4" << endl;
|
||||
|
||||
if (mSensor == IMU_MONOCULAR || mSensor == IMU_STEREO)
|
||||
{
|
||||
cv::Mat Tbw = pKF->mImuCalib.Tbc*(*lit)*Trw;
|
||||
|
@ -730,7 +638,6 @@ void System::SaveTrajectoryEuRoC(const string &filename)
|
|||
f << setprecision(6) << 1e9*(*lT) << " " << setprecision(9) << twc.at<float>(0) << " " << twc.at<float>(1) << " " << twc.at<float>(2) << " " << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << endl;
|
||||
}
|
||||
|
||||
// cout << "5" << endl;
|
||||
}
|
||||
//cout << "end saving trajectory" << endl;
|
||||
f.close();
|
||||
|
@ -767,8 +674,6 @@ void System::SaveKeyFrameTrajectoryEuRoC(const string &filename)
|
|||
{
|
||||
KeyFrame* pKF = vpKFs[i];
|
||||
|
||||
// pKF->SetPose(pKF->GetPose()*Two);
|
||||
|
||||
if(pKF->isBad())
|
||||
continue;
|
||||
if (mSensor == IMU_MONOCULAR || mSensor == IMU_STEREO)
|
||||
|
@ -843,63 +748,6 @@ void System::SaveTrajectoryKITTI(const string &filename)
|
|||
f.close();
|
||||
}
|
||||
|
||||
|
||||
void System::SaveDebugData(const int &initIdx)
|
||||
{
|
||||
// 0. Save initialization trajectory
|
||||
SaveTrajectoryEuRoC("init_FrameTrajectoy_" +to_string(mpLocalMapper->mInitSect)+ "_" + to_string(initIdx)+".txt");
|
||||
|
||||
// 1. Save scale
|
||||
ofstream f;
|
||||
f.open("init_Scale_" + to_string(mpLocalMapper->mInitSect) + ".txt", ios_base::app);
|
||||
f << fixed;
|
||||
f << mpLocalMapper->mScale << endl;
|
||||
f.close();
|
||||
|
||||
// 2. Save gravity direction
|
||||
f.open("init_GDir_" +to_string(mpLocalMapper->mInitSect)+ ".txt", ios_base::app);
|
||||
f << fixed;
|
||||
f << mpLocalMapper->mRwg(0,0) << "," << mpLocalMapper->mRwg(0,1) << "," << mpLocalMapper->mRwg(0,2) << endl;
|
||||
f << mpLocalMapper->mRwg(1,0) << "," << mpLocalMapper->mRwg(1,1) << "," << mpLocalMapper->mRwg(1,2) << endl;
|
||||
f << mpLocalMapper->mRwg(2,0) << "," << mpLocalMapper->mRwg(2,1) << "," << mpLocalMapper->mRwg(2,2) << endl;
|
||||
f.close();
|
||||
|
||||
// 3. Save computational cost
|
||||
f.open("init_CompCost_" +to_string(mpLocalMapper->mInitSect)+ ".txt", ios_base::app);
|
||||
f << fixed;
|
||||
f << mpLocalMapper->mCostTime << endl;
|
||||
f.close();
|
||||
|
||||
// 4. Save biases
|
||||
f.open("init_Biases_" +to_string(mpLocalMapper->mInitSect)+ ".txt", ios_base::app);
|
||||
f << fixed;
|
||||
f << mpLocalMapper->mbg(0) << "," << mpLocalMapper->mbg(1) << "," << mpLocalMapper->mbg(2) << endl;
|
||||
f << mpLocalMapper->mba(0) << "," << mpLocalMapper->mba(1) << "," << mpLocalMapper->mba(2) << endl;
|
||||
f.close();
|
||||
|
||||
// 5. Save covariance matrix
|
||||
f.open("init_CovMatrix_" +to_string(mpLocalMapper->mInitSect)+ "_" +to_string(initIdx)+".txt", ios_base::app);
|
||||
f << fixed;
|
||||
for(int i=0; i<mpLocalMapper->mcovInertial.rows(); i++)
|
||||
{
|
||||
for(int j=0; j<mpLocalMapper->mcovInertial.cols(); j++)
|
||||
{
|
||||
if(j!=0)
|
||||
f << ",";
|
||||
f << setprecision(15) << mpLocalMapper->mcovInertial(i,j);
|
||||
}
|
||||
f << endl;
|
||||
}
|
||||
f.close();
|
||||
|
||||
// 6. Save initialization time
|
||||
f.open("init_Time_" +to_string(mpLocalMapper->mInitSect)+ ".txt", ios_base::app);
|
||||
f << fixed;
|
||||
f << mpLocalMapper->mInitTime << endl;
|
||||
f.close();
|
||||
}
|
||||
|
||||
|
||||
int System::GetTrackingState()
|
||||
{
|
||||
unique_lock<mutex> lock(mMutexState);
|
||||
|
@ -933,7 +781,7 @@ bool System::isLost()
|
|||
return false;
|
||||
else
|
||||
{
|
||||
if ((mpTracker->mState==Tracking::LOST)) //||(mpTracker->mState==Tracking::RECENTLY_LOST))
|
||||
if ((mpTracker->mState==Tracking::LOST))
|
||||
return true;
|
||||
else
|
||||
return false;
|
||||
|
@ -960,154 +808,18 @@ void System::ChangeDataset()
|
|||
mpTracker->NewDataset();
|
||||
}
|
||||
|
||||
/*void System::SaveAtlas(int type){
|
||||
cout << endl << "Enter the name of the file if you want to save the current Atlas session. To exit press ENTER: ";
|
||||
string saveFileName;
|
||||
getline(cin,saveFileName);
|
||||
if(!saveFileName.empty())
|
||||
{
|
||||
//clock_t start = clock();
|
||||
|
||||
// Save the current session
|
||||
mpAtlas->PreSave();
|
||||
mpKeyFrameDatabase->PreSave();
|
||||
|
||||
string pathSaveFileName = "./";
|
||||
pathSaveFileName = pathSaveFileName.append(saveFileName);
|
||||
pathSaveFileName = pathSaveFileName.append(".osa");
|
||||
|
||||
string strVocabularyChecksum = CalculateCheckSum(mStrVocabularyFilePath,TEXT_FILE);
|
||||
std::size_t found = mStrVocabularyFilePath.find_last_of("/\\");
|
||||
string strVocabularyName = mStrVocabularyFilePath.substr(found+1);
|
||||
|
||||
if(type == TEXT_FILE) // File text
|
||||
{
|
||||
cout << "Starting to write the save text file " << endl;
|
||||
std::remove(pathSaveFileName.c_str());
|
||||
std::ofstream ofs(pathSaveFileName, std::ios::binary);
|
||||
boost::archive::text_oarchive oa(ofs);
|
||||
|
||||
oa << strVocabularyName;
|
||||
oa << strVocabularyChecksum;
|
||||
oa << mpAtlas;
|
||||
oa << mpKeyFrameDatabase;
|
||||
cout << "End to write the save text file" << endl;
|
||||
}
|
||||
else if(type == BINARY_FILE) // File binary
|
||||
{
|
||||
cout << "Starting to write the save binary file" << endl;
|
||||
std::remove(pathSaveFileName.c_str());
|
||||
std::ofstream ofs(pathSaveFileName, std::ios::binary);
|
||||
boost::archive::binary_oarchive oa(ofs);
|
||||
oa << strVocabularyName;
|
||||
oa << strVocabularyChecksum;
|
||||
oa << mpAtlas;
|
||||
oa << mpKeyFrameDatabase;
|
||||
cout << "End to write save binary file" << endl;
|
||||
}
|
||||
|
||||
//clock_t timeElapsed = clock() - start;
|
||||
//unsigned msElapsed = timeElapsed / (CLOCKS_PER_SEC / 1000);
|
||||
//cout << "Binary file saved in " << msElapsed << " ms" << endl;
|
||||
}
|
||||
#ifdef REGISTER_TIMES
|
||||
void System::InsertRectTime(double& time)
|
||||
{
|
||||
mpTracker->vdRectStereo_ms.push_back(time);
|
||||
}
|
||||
|
||||
bool System::LoadAtlas(string filename, int type)
|
||||
void System::InsertTrackTime(double& time)
|
||||
{
|
||||
string strFileVoc, strVocChecksum;
|
||||
bool isRead = false;
|
||||
|
||||
if(type == TEXT_FILE) // File text
|
||||
{
|
||||
cout << "Starting to read the save text file " << endl;
|
||||
std::ifstream ifs(filename, std::ios::binary);
|
||||
if(!ifs.good())
|
||||
{
|
||||
cout << "Load file not found" << endl;
|
||||
return false;
|
||||
}
|
||||
boost::archive::text_iarchive ia(ifs);
|
||||
ia >> strFileVoc;
|
||||
ia >> strVocChecksum;
|
||||
ia >> mpAtlas;
|
||||
//ia >> mpKeyFrameDatabase;
|
||||
cout << "End to load the save text file " << endl;
|
||||
isRead = true;
|
||||
}
|
||||
else if(type == BINARY_FILE) // File binary
|
||||
{
|
||||
cout << "Starting to read the save binary file" << endl;
|
||||
std::ifstream ifs(filename, std::ios::binary);
|
||||
if(!ifs.good())
|
||||
{
|
||||
cout << "Load file not found" << endl;
|
||||
return false;
|
||||
}
|
||||
boost::archive::binary_iarchive ia(ifs);
|
||||
ia >> strFileVoc;
|
||||
ia >> strVocChecksum;
|
||||
ia >> mpAtlas;
|
||||
//ia >> mpKeyFrameDatabase;
|
||||
cout << "End to load the save binary file" << endl;
|
||||
isRead = true;
|
||||
}
|
||||
|
||||
if(isRead)
|
||||
{
|
||||
//Check if the vocabulary is the same
|
||||
string strInputVocabularyChecksum = CalculateCheckSum(mStrVocabularyFilePath,TEXT_FILE);
|
||||
|
||||
if(strInputVocabularyChecksum.compare(strVocChecksum) != 0)
|
||||
{
|
||||
cout << "The vocabulary load isn't the same which the load session was created " << endl;
|
||||
cout << "-Vocabulary name: " << strFileVoc << endl;
|
||||
return false; // Both are differents
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
mpTracker->vdTrackTotal_ms.push_back(time);
|
||||
}
|
||||
#endif
|
||||
|
||||
string System::CalculateCheckSum(string filename, int type)
|
||||
{
|
||||
string checksum = "";
|
||||
|
||||
unsigned char c[MD5_DIGEST_LENGTH];
|
||||
|
||||
std::ios_base::openmode flags = std::ios::in;
|
||||
if(type == BINARY_FILE) // Binary file
|
||||
flags = std::ios::in | std::ios::binary;
|
||||
|
||||
ifstream f(filename.c_str(), flags);
|
||||
if ( !f.is_open() )
|
||||
{
|
||||
cout << "[E] Unable to open the in file " << filename << " for Md5 hash." << endl;
|
||||
return checksum;
|
||||
}
|
||||
|
||||
MD5_CTX md5Context;
|
||||
char buffer[1024];
|
||||
|
||||
MD5_Init (&md5Context);
|
||||
while ( int count = f.readsome(buffer, sizeof(buffer)))
|
||||
{
|
||||
MD5_Update(&md5Context, buffer, count);
|
||||
}
|
||||
|
||||
f.close();
|
||||
|
||||
MD5_Final(c, &md5Context );
|
||||
|
||||
for(int i = 0; i < MD5_DIGEST_LENGTH; i++)
|
||||
{
|
||||
char aux[10];
|
||||
sprintf(aux,"%02x", c[i]);
|
||||
checksum = checksum + aux;
|
||||
}
|
||||
|
||||
return checksum;
|
||||
}*/
|
||||
|
||||
} //namespace ORB_SLAM
|
||||
|
||||
|
|
704
src/Tracking.cc
704
src/Tracking.cc
|
@ -19,21 +19,20 @@
|
|||
|
||||
#include "Tracking.h"
|
||||
|
||||
#include<opencv2/core/core.hpp>
|
||||
#include<opencv2/features2d/features2d.hpp>
|
||||
#include <opencv2/core/core.hpp>
|
||||
#include <opencv2/features2d/features2d.hpp>
|
||||
|
||||
#include"ORBmatcher.h"
|
||||
#include"FrameDrawer.h"
|
||||
#include"Converter.h"
|
||||
#include"Initializer.h"
|
||||
#include"G2oTypes.h"
|
||||
#include"Optimizer.h"
|
||||
#include"PnPsolver.h"
|
||||
#include "ORBmatcher.h"
|
||||
#include "FrameDrawer.h"
|
||||
#include "Converter.h"
|
||||
#include "Initializer.h"
|
||||
#include "G2oTypes.h"
|
||||
#include "Optimizer.h"
|
||||
|
||||
#include<iostream>
|
||||
#include <iostream>
|
||||
|
||||
#include<mutex>
|
||||
#include<chrono>
|
||||
#include <mutex>
|
||||
#include <chrono>
|
||||
#include <include/CameraModels/Pinhole.h>
|
||||
#include <include/CameraModels/KannalaBrandt8.h>
|
||||
#include <include/MLPnPsolver.h>
|
||||
|
@ -49,7 +48,7 @@ Tracking::Tracking(System *pSys, ORBVocabulary* pVoc, FrameDrawer *pFrameDrawer,
|
|||
mState(NO_IMAGES_YET), mSensor(sensor), mTrackedFr(0), mbStep(false),
|
||||
mbOnlyTracking(false), mbMapUpdated(false), mbVO(false), mpORBVocabulary(pVoc), mpKeyFrameDB(pKFDB),
|
||||
mpInitializer(static_cast<Initializer*>(NULL)), mpSystem(pSys), mpViewer(NULL),
|
||||
mpFrameDrawer(pFrameDrawer), mpMapDrawer(pMapDrawer), mpAtlas(pAtlas), mnLastRelocFrameId(0), time_recently_lost(5.0),
|
||||
mpFrameDrawer(pFrameDrawer), mpMapDrawer(pMapDrawer), mpAtlas(pAtlas), mnLastRelocFrameId(0), time_recently_lost(5.0), time_recently_lost_visual(2.0),
|
||||
mnInitialFrameId(0), mbCreatedMap(false), mnFirstFrameId(0), mpCamera2(nullptr)
|
||||
{
|
||||
// Load camera parameters from settings file
|
||||
|
@ -85,52 +84,6 @@ Tracking::Tracking(System *pSys, ORBVocabulary* pVoc, FrameDrawer *pFrameDrawer,
|
|||
|
||||
mbInitWith3KFs = false;
|
||||
|
||||
|
||||
//Rectification parameters
|
||||
/*mbNeedRectify = false;
|
||||
if((mSensor == System::STEREO || mSensor == System::IMU_STEREO) && sCameraName == "PinHole")
|
||||
{
|
||||
cv::Mat K_l, K_r, P_l, P_r, R_l, R_r, D_l, D_r;
|
||||
fSettings["LEFT.K"] >> K_l;
|
||||
fSettings["RIGHT.K"] >> K_r;
|
||||
|
||||
fSettings["LEFT.P"] >> P_l;
|
||||
fSettings["RIGHT.P"] >> P_r;
|
||||
|
||||
fSettings["LEFT.R"] >> R_l;
|
||||
fSettings["RIGHT.R"] >> R_r;
|
||||
|
||||
fSettings["LEFT.D"] >> D_l;
|
||||
fSettings["RIGHT.D"] >> D_r;
|
||||
|
||||
int rows_l = fSettings["LEFT.height"];
|
||||
int cols_l = fSettings["LEFT.width"];
|
||||
int rows_r = fSettings["RIGHT.height"];
|
||||
int cols_r = fSettings["RIGHT.width"];
|
||||
|
||||
if(K_l.empty() || K_r.empty() || P_l.empty() || P_r.empty() || R_l.empty() || R_r.empty() || D_l.empty() || D_r.empty()
|
||||
|| rows_l==0 || cols_l==0 || rows_r==0 || cols_r==0)
|
||||
{
|
||||
mbNeedRectify = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
mbNeedRectify = true;
|
||||
// M1r y M2r son los outputs (igual para l)
|
||||
// M1r y M2r son las matrices relativas al mapeo correspondiente a la rectificación de mapa en el eje X e Y respectivamente
|
||||
//cv::initUndistortRectifyMap(K_l,D_l,R_l,P_l.rowRange(0,3).colRange(0,3),cv::Size(cols_l,rows_l),CV_32F,M1l,M2l);
|
||||
//cv::initUndistortRectifyMap(K_r,D_r,R_r,P_r.rowRange(0,3).colRange(0,3),cv::Size(cols_r,rows_r),CV_32F,M1r,M2r);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
int cols = 752;
|
||||
int rows = 480;
|
||||
cv::Mat R_l = cv::Mat::eye(3, 3, CV_32F);
|
||||
}*/
|
||||
|
||||
mnNumDataset = 0;
|
||||
|
||||
if(!b_parse_cam || !b_parse_orb || !b_parse_imu)
|
||||
|
@ -146,24 +99,399 @@ Tracking::Tracking(System *pSys, ORBVocabulary* pVoc, FrameDrawer *pFrameDrawer,
|
|||
}
|
||||
}
|
||||
|
||||
//f_track_stats.open("tracking_stats"+ _nameSeq + ".txt");
|
||||
/*f_track_stats.open("tracking_stats.txt");
|
||||
f_track_stats << "# timestamp, Num KF local, Num MP local, time" << endl;
|
||||
f_track_stats << fixed ;*/
|
||||
#ifdef REGISTER_TIMES
|
||||
vdRectStereo_ms.clear();
|
||||
vdORBExtract_ms.clear();
|
||||
vdStereoMatch_ms.clear();
|
||||
vdIMUInteg_ms.clear();
|
||||
vdPosePred_ms.clear();
|
||||
vdLMTrack_ms.clear();
|
||||
vdNewKF_ms.clear();
|
||||
vdTrackTotal_ms.clear();
|
||||
|
||||
#ifdef SAVE_TIMES
|
||||
f_track_times.open("tracking_times.txt");
|
||||
f_track_times << "# ORB_Ext(ms), Stereo matching(ms), Preintegrate_IMU(ms), Pose pred(ms), LocalMap_track(ms), NewKF_dec(ms)" << endl;
|
||||
f_track_times << fixed ;
|
||||
vdUpdatedLM_ms.clear();
|
||||
vdSearchLP_ms.clear();
|
||||
vdPoseOpt_ms.clear();
|
||||
#endif
|
||||
|
||||
vnKeyFramesLM.clear();
|
||||
vnMapPointsLM.clear();
|
||||
}
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
double calcAverage(vector<double> v_times)
|
||||
{
|
||||
double accum = 0;
|
||||
for(double value : v_times)
|
||||
{
|
||||
accum += value;
|
||||
}
|
||||
|
||||
return accum / v_times.size();
|
||||
}
|
||||
|
||||
double calcDeviation(vector<double> v_times, double average)
|
||||
{
|
||||
double accum = 0;
|
||||
for(double value : v_times)
|
||||
{
|
||||
accum += pow(value - average, 2);
|
||||
}
|
||||
return sqrt(accum / v_times.size());
|
||||
}
|
||||
|
||||
double calcAverage(vector<int> v_values)
|
||||
{
|
||||
double accum = 0;
|
||||
int total = 0;
|
||||
for(double value : v_values)
|
||||
{
|
||||
if(value == 0)
|
||||
continue;
|
||||
accum += value;
|
||||
total++;
|
||||
}
|
||||
|
||||
return accum / total;
|
||||
}
|
||||
|
||||
double calcDeviation(vector<int> v_values, double average)
|
||||
{
|
||||
double accum = 0;
|
||||
int total = 0;
|
||||
for(double value : v_values)
|
||||
{
|
||||
if(value == 0)
|
||||
continue;
|
||||
accum += pow(value - average, 2);
|
||||
total++;
|
||||
}
|
||||
return sqrt(accum / total);
|
||||
}
|
||||
|
||||
void Tracking::LocalMapStats2File()
|
||||
{
|
||||
ofstream f;
|
||||
f.open("LocalMapTimeStats.txt");
|
||||
f << fixed << setprecision(6);
|
||||
f << "#Stereo rect[ms], MP culling[ms], MP creation[ms], LBA[ms], KF culling[ms], Total[ms]" << endl;
|
||||
for(int i=0; i<mpLocalMapper->vdLMTotal_ms.size(); ++i)
|
||||
{
|
||||
f << mpLocalMapper->vdKFInsert_ms[i] << "," << mpLocalMapper->vdMPCulling_ms[i] << ","
|
||||
<< mpLocalMapper->vdMPCreation_ms[i] << "," << mpLocalMapper->vdLBA_ms[i] << ","
|
||||
<< mpLocalMapper->vdKFCulling_ms[i] << "," << mpLocalMapper->vdLMTotal_ms[i] << endl;
|
||||
}
|
||||
|
||||
f.close();
|
||||
|
||||
f.open("LBA_Stats.txt");
|
||||
f << fixed << setprecision(6);
|
||||
f << "#LBA time[ms], KF opt[#], KF fixed[#], MP[#], Edges[#]" << endl;
|
||||
for(int i=0; i<mpLocalMapper->vdLBASync_ms.size(); ++i)
|
||||
{
|
||||
f << mpLocalMapper->vdLBASync_ms[i] << "," << mpLocalMapper->vnLBA_KFopt[i] << ","
|
||||
<< mpLocalMapper->vnLBA_KFfixed[i] << "," << mpLocalMapper->vnLBA_MPs[i] << ","
|
||||
<< mpLocalMapper->vnLBA_edges[i] << endl;
|
||||
}
|
||||
|
||||
|
||||
f.close();
|
||||
}
|
||||
|
||||
void Tracking::TrackStats2File()
|
||||
{
|
||||
ofstream f;
|
||||
f.open("SessionInfo.txt");
|
||||
f << fixed;
|
||||
f << "Number of KFs: " << mpAtlas->GetAllKeyFrames().size() << endl;
|
||||
f << "Number of MPs: " << mpAtlas->GetAllMapPoints().size() << endl;
|
||||
|
||||
f << "OpenCV version: " << CV_VERSION << endl;
|
||||
|
||||
f.close();
|
||||
|
||||
f.open("TrackingTimeStats.txt");
|
||||
f << fixed << setprecision(6);
|
||||
|
||||
f << "#KF insert[ms], ORB ext[ms], Stereo match[ms], IMU preint[ms], Pose pred[ms], LM track[ms], KF dec[ms], Total[ms]" << endl;
|
||||
|
||||
for(int i=0; i<vdTrackTotal_ms.size(); ++i)
|
||||
{
|
||||
double stereo_rect = 0.0;
|
||||
if(!vdRectStereo_ms.empty())
|
||||
{
|
||||
stereo_rect = vdStereoMatch_ms[i];
|
||||
}
|
||||
|
||||
double stereo_match = 0.0;
|
||||
if(!vdStereoMatch_ms.empty())
|
||||
{
|
||||
stereo_match = vdStereoMatch_ms[i];
|
||||
}
|
||||
|
||||
double imu_preint = 0.0;
|
||||
if(!vdIMUInteg_ms.empty())
|
||||
{
|
||||
imu_preint = vdIMUInteg_ms[i];
|
||||
}
|
||||
|
||||
f << stereo_rect << "," << vdORBExtract_ms[i] << "," << stereo_match << "," << imu_preint << ","
|
||||
<< vdPosePred_ms[i] << "," << vdLMTrack_ms[i] << "," << vdNewKF_ms[i] << "," << vdTrackTotal_ms[i] << endl;
|
||||
}
|
||||
|
||||
f.close();
|
||||
|
||||
f.open("TrackLocalMapStats.txt");
|
||||
f << fixed << setprecision(6);
|
||||
|
||||
f << "# number of KF, number of MP, UpdateLM[ms], SearchLP[ms], PoseOpt[ms]" << endl;
|
||||
|
||||
for(int i=0; i<vnKeyFramesLM.size(); ++i)
|
||||
{
|
||||
|
||||
f << vnKeyFramesLM[i] << "," << vnMapPointsLM[i] << "," << vdUpdatedLM_ms[i] << "," << vdSearchLP_ms[i] << "," << vdPoseOpt_ms[i] << endl;
|
||||
}
|
||||
|
||||
f.close();
|
||||
}
|
||||
|
||||
void Tracking::PrintTimeStats()
|
||||
{
|
||||
// Save data in files
|
||||
TrackStats2File();
|
||||
LocalMapStats2File();
|
||||
|
||||
|
||||
ofstream f;
|
||||
f.open("ExecTimeMean.txt");
|
||||
f << fixed;
|
||||
//Report the mean and std of each one
|
||||
std::cout << std::endl << " TIME STATS in ms (mean$\\pm$std)" << std::endl;
|
||||
f << " TIME STATS in ms (mean$\\pm$std)" << std::endl;
|
||||
cout << "OpenCV version: " << CV_VERSION << endl;
|
||||
f << "OpenCV version: " << CV_VERSION << endl;
|
||||
std::cout << "---------------------------" << std::endl;
|
||||
std::cout << "Tracking" << std::setprecision(5) << std::endl << std::endl;
|
||||
f << "---------------------------" << std::endl;
|
||||
f << "Tracking" << std::setprecision(5) << std::endl << std::endl;
|
||||
double average, deviation;
|
||||
if(!vdRectStereo_ms.empty())
|
||||
{
|
||||
average = calcAverage(vdRectStereo_ms);
|
||||
deviation = calcDeviation(vdRectStereo_ms, average);
|
||||
std::cout << "Stereo Rectification: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "Stereo Rectification: " << average << "$\\pm$" << deviation << std::endl;
|
||||
}
|
||||
|
||||
average = calcAverage(vdORBExtract_ms);
|
||||
deviation = calcDeviation(vdORBExtract_ms, average);
|
||||
std::cout << "ORB Extraction: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "ORB Extraction: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
if(!vdStereoMatch_ms.empty())
|
||||
{
|
||||
average = calcAverage(vdStereoMatch_ms);
|
||||
deviation = calcDeviation(vdStereoMatch_ms, average);
|
||||
std::cout << "Stereo Matching: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "Stereo Matching: " << average << "$\\pm$" << deviation << std::endl;
|
||||
}
|
||||
|
||||
if(!vdIMUInteg_ms.empty())
|
||||
{
|
||||
average = calcAverage(vdIMUInteg_ms);
|
||||
deviation = calcDeviation(vdIMUInteg_ms, average);
|
||||
std::cout << "IMU Preintegration: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "IMU Preintegration: " << average << "$\\pm$" << deviation << std::endl;
|
||||
}
|
||||
|
||||
average = calcAverage(vdPosePred_ms);
|
||||
deviation = calcDeviation(vdPosePred_ms, average);
|
||||
std::cout << "Pose Prediction: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "Pose Prediction: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
average = calcAverage(vdLMTrack_ms);
|
||||
deviation = calcDeviation(vdLMTrack_ms, average);
|
||||
std::cout << "LM Track: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "LM Track: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
average = calcAverage(vdNewKF_ms);
|
||||
deviation = calcDeviation(vdNewKF_ms, average);
|
||||
std::cout << "New KF decision: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "New KF decision: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
average = calcAverage(vdTrackTotal_ms);
|
||||
deviation = calcDeviation(vdTrackTotal_ms, average);
|
||||
std::cout << "Total Tracking: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "Total Tracking: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
// Local Map Tracking complexity
|
||||
std::cout << "---------------------------" << std::endl;
|
||||
std::cout << std::endl << "Local Map Tracking complexity (mean$\\pm$std)" << std::endl;
|
||||
f << "---------------------------" << std::endl;
|
||||
f << std::endl << "Local Map Tracking complexity (mean$\\pm$std)" << std::endl;
|
||||
|
||||
average = calcAverage(vnKeyFramesLM);
|
||||
deviation = calcDeviation(vnKeyFramesLM, average);
|
||||
std::cout << "Local KFs: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "Local KFs: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
average = calcAverage(vnMapPointsLM);
|
||||
deviation = calcDeviation(vnMapPointsLM, average);
|
||||
std::cout << "Local MPs: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "Local MPs: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
// Local Mapping time stats
|
||||
std::cout << std::endl << std::endl << std::endl;
|
||||
std::cout << "Local Mapping" << std::endl << std::endl;
|
||||
f << std::endl << "Local Mapping" << std::endl << std::endl;
|
||||
|
||||
average = calcAverage(mpLocalMapper->vdKFInsert_ms);
|
||||
deviation = calcDeviation(mpLocalMapper->vdKFInsert_ms, average);
|
||||
std::cout << "KF Insertion: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "KF Insertion: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
average = calcAverage(mpLocalMapper->vdMPCulling_ms);
|
||||
deviation = calcDeviation(mpLocalMapper->vdMPCulling_ms, average);
|
||||
std::cout << "MP Culling: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "MP Culling: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
average = calcAverage(mpLocalMapper->vdMPCreation_ms);
|
||||
deviation = calcDeviation(mpLocalMapper->vdMPCreation_ms, average);
|
||||
std::cout << "MP Creation: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "MP Creation: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
average = calcAverage(mpLocalMapper->vdLBASync_ms);
|
||||
deviation = calcDeviation(mpLocalMapper->vdLBASync_ms, average);
|
||||
std::cout << "LBA: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "LBA: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
average = calcAverage(mpLocalMapper->vdKFCullingSync_ms);
|
||||
deviation = calcDeviation(mpLocalMapper->vdKFCullingSync_ms, average);
|
||||
std::cout << "KF Culling: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "KF Culling: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
average = calcAverage(mpLocalMapper->vdLMTotal_ms);
|
||||
deviation = calcDeviation(mpLocalMapper->vdLMTotal_ms, average);
|
||||
std::cout << "Total Local Mapping: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "Total Local Mapping: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
// Local Mapping LBA complexity
|
||||
std::cout << "---------------------------" << std::endl;
|
||||
std::cout << std::endl << "LBA complexity (mean$\\pm$std)" << std::endl;
|
||||
f << "---------------------------" << std::endl;
|
||||
f << std::endl << "LBA complexity (mean$\\pm$std)" << std::endl;
|
||||
|
||||
average = calcAverage(mpLocalMapper->vnLBA_edges);
|
||||
deviation = calcDeviation(mpLocalMapper->vnLBA_edges, average);
|
||||
std::cout << "LBA Edges: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "LBA Edges: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
average = calcAverage(mpLocalMapper->vnLBA_KFopt);
|
||||
deviation = calcDeviation(mpLocalMapper->vnLBA_KFopt, average);
|
||||
std::cout << "LBA KF optimized: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "LBA KF optimized: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
average = calcAverage(mpLocalMapper->vnLBA_KFfixed);
|
||||
deviation = calcDeviation(mpLocalMapper->vnLBA_KFfixed, average);
|
||||
std::cout << "LBA KF fixed: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "LBA KF fixed: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
average = calcAverage(mpLocalMapper->vnLBA_MPs);
|
||||
deviation = calcDeviation(mpLocalMapper->vnLBA_MPs, average);
|
||||
std::cout << "LBA MP: " << average << "$\\pm$" << deviation << std::endl << std::endl;
|
||||
f << "LBA MP: " << average << "$\\pm$" << deviation << std::endl << std::endl;
|
||||
|
||||
std::cout << "LBA executions: " << mpLocalMapper->nLBA_exec << std::endl;
|
||||
std::cout << "LBA aborts: " << mpLocalMapper->nLBA_abort << std::endl;
|
||||
f << "LBA executions: " << mpLocalMapper->nLBA_exec << std::endl;
|
||||
f << "LBA aborts: " << mpLocalMapper->nLBA_abort << std::endl;
|
||||
|
||||
// Map complexity
|
||||
std::cout << "---------------------------" << std::endl;
|
||||
std::cout << std::endl << "Map complexity" << std::endl;
|
||||
std::cout << "KFs in map: " << mpAtlas->GetAllMaps()[0]->GetAllKeyFrames().size() << std::endl;
|
||||
std::cout << "MPs in map: " << mpAtlas->GetAllMaps()[0]->GetAllMapPoints().size() << std::endl;
|
||||
f << "---------------------------" << std::endl;
|
||||
f << std::endl << "Map complexity" << std::endl;
|
||||
f << "KFs in map: " << mpAtlas->GetAllMaps()[0]->GetAllKeyFrames().size() << std::endl;
|
||||
f << "MPs in map: " << mpAtlas->GetAllMaps()[0]->GetAllMapPoints().size() << std::endl;
|
||||
|
||||
|
||||
// Place recognition time stats
|
||||
std::cout << std::endl << std::endl << std::endl;
|
||||
std::cout << "Place Recognition (mean$\\pm$std)" << std::endl << std::endl;
|
||||
f << std::endl << "Place Recognition (mean$\\pm$std)" << std::endl << std::endl;
|
||||
|
||||
average = calcAverage(mpLoopClosing->vTimeBoW_ms);
|
||||
deviation = calcDeviation(mpLoopClosing->vTimeBoW_ms, average);
|
||||
std::cout << "Database Query: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "Database Query: " << average << "$\\pm$" << deviation << std::endl;
|
||||
average = calcAverage(mpLoopClosing->vTimeSE3_ms);
|
||||
deviation = calcDeviation(mpLoopClosing->vTimeSE3_ms, average);
|
||||
std::cout << "SE3 estimation: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "SE3 estimation: " << average << "$\\pm$" << deviation << std::endl;
|
||||
average = calcAverage(mpLoopClosing->vTimePRTotal_ms);
|
||||
deviation = calcDeviation(mpLoopClosing->vTimePRTotal_ms, average);
|
||||
std::cout << "Total Place Recognition: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "Total Place Recognition: " << average << "$\\pm$" << deviation << std::endl;
|
||||
|
||||
// Loop Closing time stats
|
||||
if(mpLoopClosing->vTimeLoopTotal_ms.size() > 0)
|
||||
{
|
||||
std::cout << std::endl << std::endl << std::endl;
|
||||
std::cout << "Loop Closing (mean$\\pm$std)" << std::endl << std::endl;
|
||||
f << std::endl << "Loop Closing (mean$\\pm$std)" << std::endl << std::endl;
|
||||
|
||||
average = calcAverage(mpLoopClosing->vTimeLoopTotal_ms);
|
||||
deviation = calcDeviation(mpLoopClosing->vTimeLoopTotal_ms, average);
|
||||
std::cout << "Total Loop Closing: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "Total Loop Closing: " << average << "$\\pm$" << deviation << std::endl;
|
||||
}
|
||||
|
||||
if(mpLoopClosing->vTimeMergeTotal_ms.size() > 0)
|
||||
{
|
||||
// Map Merging time stats
|
||||
std::cout << std::endl << std::endl << std::endl;
|
||||
std::cout << "Map Merging (mean$\\pm$std)" << std::endl << std::endl;
|
||||
f << std::endl << "Map Merging (mean$\\pm$std)" << std::endl << std::endl;
|
||||
|
||||
average = calcAverage(mpLoopClosing->vTimeMergeTotal_ms);
|
||||
deviation = calcDeviation(mpLoopClosing->vTimeMergeTotal_ms, average);
|
||||
std::cout << "Total Map Merging: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "Total Map Merging: " << average << "$\\pm$" << deviation << std::endl;
|
||||
}
|
||||
|
||||
|
||||
if(mpLoopClosing->vTimeGBATotal_ms.size() > 0)
|
||||
{
|
||||
// Full GBA time stats
|
||||
std::cout << std::endl << std::endl << std::endl;
|
||||
std::cout << "Full GBA (mean$\\pm$std)" << std::endl << std::endl;
|
||||
f << std::endl << "Full GBA (mean$\\pm$std)" << std::endl << std::endl;
|
||||
|
||||
average = calcAverage(mpLoopClosing->vTimeFullGBA_ms);
|
||||
deviation = calcDeviation(mpLoopClosing->vTimeFullGBA_ms, average);
|
||||
std::cout << "GBA: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "GBA: " << average << "$\\pm$" << deviation << std::endl;
|
||||
average = calcAverage(mpLoopClosing->vTimeMapUpdate_ms);
|
||||
deviation = calcDeviation(mpLoopClosing->vTimeMapUpdate_ms, average);
|
||||
std::cout << "Map Update: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "Map Update: " << average << "$\\pm$" << deviation << std::endl;
|
||||
average = calcAverage(mpLoopClosing->vTimeGBATotal_ms);
|
||||
deviation = calcDeviation(mpLoopClosing->vTimeGBATotal_ms, average);
|
||||
std::cout << "Total Full GBA: " << average << "$\\pm$" << deviation << std::endl;
|
||||
f << "Total Full GBA: " << average << "$\\pm$" << deviation << std::endl;
|
||||
}
|
||||
|
||||
f.close();
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
Tracking::~Tracking()
|
||||
{
|
||||
//f_track_stats.close();
|
||||
#ifdef SAVE_TIMES
|
||||
f_track_times.close();
|
||||
#endif
|
||||
|
||||
}
|
||||
|
||||
bool Tracking::ParseCamParamFile(cv::FileStorage &fSettings)
|
||||
|
@ -918,26 +1246,26 @@ cv::Mat Tracking::GrabImageStereo(const cv::Mat &imRectLeft, const cv::Mat &imRe
|
|||
{
|
||||
if(mbRGB)
|
||||
{
|
||||
cvtColor(mImGray,mImGray,CV_RGB2GRAY);
|
||||
cvtColor(imGrayRight,imGrayRight,CV_RGB2GRAY);
|
||||
cvtColor(mImGray,mImGray,cv::COLOR_RGB2GRAY);
|
||||
cvtColor(imGrayRight,imGrayRight,cv::COLOR_RGB2GRAY);
|
||||
}
|
||||
else
|
||||
{
|
||||
cvtColor(mImGray,mImGray,CV_BGR2GRAY);
|
||||
cvtColor(imGrayRight,imGrayRight,CV_BGR2GRAY);
|
||||
cvtColor(mImGray,mImGray,cv::COLOR_BGR2GRAY);
|
||||
cvtColor(imGrayRight,imGrayRight,cv::COLOR_BGR2GRAY);
|
||||
}
|
||||
}
|
||||
else if(mImGray.channels()==4)
|
||||
{
|
||||
if(mbRGB)
|
||||
{
|
||||
cvtColor(mImGray,mImGray,CV_RGBA2GRAY);
|
||||
cvtColor(imGrayRight,imGrayRight,CV_RGBA2GRAY);
|
||||
cvtColor(mImGray,mImGray,cv::COLOR_RGBA2GRAY);
|
||||
cvtColor(imGrayRight,imGrayRight,cv::COLOR_RGBA2GRAY);
|
||||
}
|
||||
else
|
||||
{
|
||||
cvtColor(mImGray,mImGray,CV_BGRA2GRAY);
|
||||
cvtColor(imGrayRight,imGrayRight,CV_BGRA2GRAY);
|
||||
cvtColor(mImGray,mImGray,cv::COLOR_BGRA2GRAY);
|
||||
cvtColor(imGrayRight,imGrayRight,cv::COLOR_BGRA2GRAY);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -950,32 +1278,16 @@ cv::Mat Tracking::GrabImageStereo(const cv::Mat &imRectLeft, const cv::Mat &imRe
|
|||
else if(mSensor == System::IMU_STEREO && mpCamera2)
|
||||
mCurrentFrame = Frame(mImGray,imGrayRight,timestamp,mpORBextractorLeft,mpORBextractorRight,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth,mpCamera,mpCamera2,mTlr,&mLastFrame,*mpImuCalib);
|
||||
|
||||
std::chrono::steady_clock::time_point t0 = std::chrono::steady_clock::now();
|
||||
mCurrentFrame.mNameFile = filename;
|
||||
mCurrentFrame.mnDataset = mnNumDataset;
|
||||
|
||||
Track();
|
||||
|
||||
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
|
||||
|
||||
double t_track = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t1 - t0).count();
|
||||
|
||||
/*cout << "trracking time: " << t_track << endl;
|
||||
f_track_stats << setprecision(0) << mCurrentFrame.mTimeStamp*1e9 << ",";
|
||||
f_track_stats << mvpLocalKeyFrames.size() << ",";
|
||||
f_track_stats << mvpLocalMapPoints.size() << ",";
|
||||
f_track_stats << setprecision(6) << t_track << endl;*/
|
||||
|
||||
#ifdef SAVE_TIMES
|
||||
f_track_times << mCurrentFrame.mTimeORB_Ext << ",";
|
||||
f_track_times << mCurrentFrame.mTimeStereoMatch << ",";
|
||||
f_track_times << mTime_PreIntIMU << ",";
|
||||
f_track_times << mTime_PosePred << ",";
|
||||
f_track_times << mTime_LocalMapTrack << ",";
|
||||
f_track_times << mTime_NewKF_Dec << ",";
|
||||
f_track_times << t_track << endl;
|
||||
#ifdef REGISTER_TIMES
|
||||
vdORBExtract_ms.push_back(mCurrentFrame.mTimeORB_Ext);
|
||||
vdStereoMatch_ms.push_back(mCurrentFrame.mTimeStereoMatch);
|
||||
#endif
|
||||
|
||||
Track();
|
||||
|
||||
return mCurrentFrame.mTcw.clone();
|
||||
}
|
||||
|
||||
|
@ -988,49 +1300,32 @@ cv::Mat Tracking::GrabImageRGBD(const cv::Mat &imRGB,const cv::Mat &imD, const d
|
|||
if(mImGray.channels()==3)
|
||||
{
|
||||
if(mbRGB)
|
||||
cvtColor(mImGray,mImGray,CV_RGB2GRAY);
|
||||
cvtColor(mImGray,mImGray,cv::COLOR_RGB2GRAY);
|
||||
else
|
||||
cvtColor(mImGray,mImGray,CV_BGR2GRAY);
|
||||
cvtColor(mImGray,mImGray,cv::COLOR_BGR2GRAY);
|
||||
}
|
||||
else if(mImGray.channels()==4)
|
||||
{
|
||||
if(mbRGB)
|
||||
cvtColor(mImGray,mImGray,CV_RGBA2GRAY);
|
||||
cvtColor(mImGray,mImGray,cv::COLOR_RGBA2GRAY);
|
||||
else
|
||||
cvtColor(mImGray,mImGray,CV_BGRA2GRAY);
|
||||
cvtColor(mImGray,mImGray,cv::COLOR_BGRA2GRAY);
|
||||
}
|
||||
|
||||
if((fabs(mDepthMapFactor-1.0f)>1e-5) || imDepth.type()!=CV_32F)
|
||||
imDepth.convertTo(imDepth,CV_32F,mDepthMapFactor);
|
||||
|
||||
std::chrono::steady_clock::time_point t0 = std::chrono::steady_clock::now();
|
||||
mCurrentFrame = Frame(mImGray,imDepth,timestamp,mpORBextractorLeft,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth,mpCamera);
|
||||
|
||||
mCurrentFrame.mNameFile = filename;
|
||||
mCurrentFrame.mnDataset = mnNumDataset;
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
vdORBExtract_ms.push_back(mCurrentFrame.mTimeORB_Ext);
|
||||
#endif
|
||||
|
||||
Track();
|
||||
|
||||
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
|
||||
|
||||
double t_track = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t1 - t0).count();
|
||||
|
||||
/*f_track_stats << setprecision(0) << mCurrentFrame.mTimeStamp*1e9 << ",";
|
||||
f_track_stats << mvpLocalKeyFrames.size() << ",";
|
||||
f_track_stats << mvpLocalMapPoints.size() << ",";
|
||||
f_track_stats << setprecision(6) << t_track << endl;*/
|
||||
|
||||
#ifdef SAVE_TIMES
|
||||
f_track_times << mCurrentFrame.mTimeORB_Ext << ",";
|
||||
f_track_times << mCurrentFrame.mTimeStereoMatch << ",";
|
||||
f_track_times << mTime_PreIntIMU << ",";
|
||||
f_track_times << mTime_PosePred << ",";
|
||||
f_track_times << mTime_LocalMapTrack << ",";
|
||||
f_track_times << mTime_NewKF_Dec << ",";
|
||||
f_track_times << t_track << endl;
|
||||
#endif
|
||||
|
||||
return mCurrentFrame.mTcw.clone();
|
||||
}
|
||||
|
||||
|
@ -1051,16 +1346,16 @@ cv::Mat Tracking::GrabImageMonocular(const cv::Mat &im, const double ×tamp,
|
|||
if(mImGray.channels()==3)
|
||||
{
|
||||
if(mbRGB)
|
||||
cvtColor(mImGray,mImGray,CV_RGB2GRAY);
|
||||
cvtColor(mImGray,mImGray,cv::COLOR_RGB2GRAY);
|
||||
else
|
||||
cvtColor(mImGray,mImGray,CV_BGR2GRAY);
|
||||
cvtColor(mImGray,mImGray,cv::COLOR_BGR2GRAY);
|
||||
}
|
||||
else if(mImGray.channels()==4)
|
||||
{
|
||||
if(mbRGB)
|
||||
cvtColor(mImGray,mImGray,CV_RGBA2GRAY);
|
||||
cvtColor(mImGray,mImGray,cv::COLOR_RGBA2GRAY);
|
||||
else
|
||||
cvtColor(mImGray,mImGray,CV_BGRA2GRAY);
|
||||
cvtColor(mImGray,mImGray,cv::COLOR_BGRA2GRAY);
|
||||
}
|
||||
|
||||
// Step 2 :构造Frame
|
||||
|
@ -1086,36 +1381,18 @@ cv::Mat Tracking::GrabImageMonocular(const cv::Mat &im, const double ×tamp,
|
|||
if (mState==NO_IMAGES_YET)
|
||||
t0=timestamp;
|
||||
|
||||
std::chrono::steady_clock::time_point t0 = std::chrono::steady_clock::now();
|
||||
|
||||
mCurrentFrame.mNameFile = filename;
|
||||
mCurrentFrame.mnDataset = mnNumDataset;
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
vdORBExtract_ms.push_back(mCurrentFrame.mTimeORB_Ext);
|
||||
#endif
|
||||
|
||||
lastID = mCurrentFrame.mnId;
|
||||
|
||||
// Step 3 :跟踪
|
||||
Track();
|
||||
|
||||
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
|
||||
|
||||
double t_track = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t1 - t0).count();
|
||||
|
||||
/*f_track_stats << setprecision(0) << mCurrentFrame.mTimeStamp*1e9 << ",";
|
||||
f_track_stats << mvpLocalKeyFrames.size() << ",";
|
||||
f_track_stats << mvpLocalMapPoints.size() << ",";
|
||||
f_track_stats << setprecision(6) << t_track << endl;*/
|
||||
|
||||
//开启保存数据模式
|
||||
#ifdef SAVE_TIMES
|
||||
f_track_times << mCurrentFrame.mTimeORB_Ext << ",";
|
||||
f_track_times << mCurrentFrame.mTimeStereoMatch << ",";
|
||||
f_track_times << mTime_PreIntIMU << ",";
|
||||
f_track_times << mTime_PosePred << ",";
|
||||
f_track_times << mTime_LocalMapTrack << ",";
|
||||
f_track_times << mTime_NewKF_Dec << ",";
|
||||
f_track_times << t_track << endl;
|
||||
#endif
|
||||
|
||||
//返回当前帧的位姿
|
||||
return mCurrentFrame.mTcw.clone();
|
||||
}
|
||||
|
@ -1454,10 +1731,6 @@ void Tracking::ComputeVelocitiesAccBias(const vector<Frame*> &vpFs, float &bax,
|
|||
}
|
||||
}
|
||||
|
||||
void Tracking::ResetFrameIMU()
|
||||
{
|
||||
// TODO To implement...
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief 跟踪过程,包括恒速模型跟踪、参考关键帧跟踪、局部地图跟踪
|
||||
|
@ -1469,13 +1742,7 @@ void Tracking::ResetFrameIMU()
|
|||
*/
|
||||
void Tracking::Track()
|
||||
{
|
||||
#ifdef SAVE_TIMES
|
||||
mTime_PreIntIMU = 0;
|
||||
mTime_PosePred = 0;
|
||||
mTime_LocalMapTrack = 0;
|
||||
mTime_NewKF_Dec = 0;
|
||||
#endif
|
||||
// ? 还不清楚这个标识符,跟绘图相关?
|
||||
|
||||
if (bStepByStep)
|
||||
{
|
||||
while(!mbStep)
|
||||
|
@ -1556,15 +1823,16 @@ void Tracking::Track()
|
|||
// Step 4 IMU模式下对IMU数据进行预积分 -> // ? 没有创建地图的情况下
|
||||
if ((mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO) && !mbCreatedMap)
|
||||
{
|
||||
#ifdef SAVE_TIMES
|
||||
std::chrono::steady_clock::time_point t0 = std::chrono::steady_clock::now();
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartPreIMU = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
// IMU数据进行预积分
|
||||
PreintegrateIMU();
|
||||
#ifdef SAVE_TIMES
|
||||
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndPreIMU = std::chrono::steady_clock::now();
|
||||
|
||||
mTime_PreIntIMU = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t1 - t0).count();
|
||||
double timePreImu = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndPreIMU - time_StartPreIMU).count();
|
||||
vdIMUInteg_ms.push_back(timePreImu);
|
||||
#endif
|
||||
|
||||
|
||||
|
@ -1625,14 +1893,15 @@ void Tracking::Track()
|
|||
// Step 6 系统成功初始化,下面是具体跟踪过程
|
||||
bool bOK;
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartPosePred = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
|
||||
// Initial camera pose estimation using motion model or relocalization (if tracking is lost)
|
||||
// mbOnlyTracking等于false表示正常SLAM模式(定位+地图更新),mbOnlyTracking等于true表示仅定位模式
|
||||
// tracking 类构造时默认为false。在viewer中有个开关ActivateLocalizationMode,可以控制是否开启mbOnlyTracking
|
||||
if(!mbOnlyTracking)
|
||||
{
|
||||
#ifdef SAVE_TIMES
|
||||
std::chrono::steady_clock::time_point timeStartPosePredict = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
|
||||
// State OK
|
||||
// Local Mapping is activated. This is the normal behaviour, unless
|
||||
|
@ -1761,13 +2030,6 @@ void Tracking::Track()
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
#ifdef SAVE_TIMES
|
||||
std::chrono::steady_clock::time_point timeEndPosePredict = std::chrono::steady_clock::now();
|
||||
|
||||
mTime_PosePred = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(timeEndPosePredict - timeStartPosePredict).count();
|
||||
#endif
|
||||
|
||||
}
|
||||
else // 纯定位模式,时间关系暂时不看 TOSEE
|
||||
{
|
||||
|
@ -1871,7 +2133,18 @@ void Tracking::Track()
|
|||
if(!mCurrentFrame.mpReferenceKF)
|
||||
mCurrentFrame.mpReferenceKF = mpReferenceKF;
|
||||
|
||||
// If we have an initial estimation of the camera pose and matching. Track the local map.
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndPosePred = std::chrono::steady_clock::now();
|
||||
|
||||
double timePosePred = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndPosePred - time_StartPosePred).count();
|
||||
vdPosePred_ms.push_back(timePosePred);
|
||||
#endif
|
||||
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartLMTrack = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
|
||||
// Step 7 在跟踪得到当前帧初始姿态后,现在对local map进行跟踪得到更多的匹配,并优化当前位姿
|
||||
// 前面只是跟踪一帧得到初始位姿,这里搜索局部关键帧、局部地图点,和当前帧进行投影匹配,得到更多匹配的MapPoints后进行Pose优化
|
||||
// 在帧间匹配得到初始的姿态后,现在对local map进行跟踪得到更多的匹配,并优化当前位姿
|
||||
|
@ -1883,17 +2156,8 @@ void Tracking::Track()
|
|||
{
|
||||
if(bOK)
|
||||
{
|
||||
#ifdef SAVE_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartTrackLocalMap = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
// 局部地图跟踪
|
||||
bOK = TrackLocalMap();
|
||||
#ifdef SAVE_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndTrackLocalMap = std::chrono::steady_clock::now();
|
||||
|
||||
mTime_LocalMapTrack = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndTrackLocalMap - time_StartTrackLocalMap).count();
|
||||
#endif
|
||||
|
||||
|
||||
}
|
||||
if(!bOK)
|
||||
|
@ -1978,13 +2242,19 @@ void Tracking::Track()
|
|||
if(mCurrentFrame.mnId==(mnLastRelocFrameId+mnFramesToResetIMU))
|
||||
{
|
||||
cout << "RESETING FRAME!!!" << endl;
|
||||
ResetFrameIMU();
|
||||
}
|
||||
else if(mCurrentFrame.mnId>(mnLastRelocFrameId+30))
|
||||
mLastBias = mCurrentFrame.mImuBias; // 没啥用,后面会重新赋值后传给普通帧
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndLMTrack = std::chrono::steady_clock::now();
|
||||
|
||||
double timeLMTrack = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndLMTrack - time_StartLMTrack).count();
|
||||
vdLMTrack_ms.push_back(timeLMTrack);
|
||||
#endif
|
||||
|
||||
// Update drawer
|
||||
// 更新显示线程中的图像、特征点、地图点等信息
|
||||
mpFrameDrawer->Update(this);
|
||||
|
@ -2051,16 +2321,12 @@ void Tracking::Track()
|
|||
// 不能够直接执行这个是因为其中存储的都是指针,之前的操作都是为了避免内存泄露
|
||||
mlpTemporalPoints.clear();
|
||||
|
||||
#ifdef SAVE_TIMES
|
||||
std::chrono::steady_clock::time_point timeStartNewKF = std::chrono::steady_clock::now();
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartNewKF = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
// 判断是否需要插入关键帧
|
||||
bool bNeedKF = NeedNewKeyFrame();
|
||||
#ifdef SAVE_TIMES
|
||||
std::chrono::steady_clock::time_point timeEndNewKF = std::chrono::steady_clock::now();
|
||||
|
||||
mTime_NewKF_Dec = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(timeEndNewKF - timeStartNewKF).count();
|
||||
#endif
|
||||
|
||||
|
||||
|
||||
|
@ -2073,6 +2339,13 @@ void Tracking::Track()
|
|||
// 创建关键帧,对于双目或RGB-D会产生新的地图点
|
||||
CreateNewKeyFrame();
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndNewKF = std::chrono::steady_clock::now();
|
||||
|
||||
double timeNewKF = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndNewKF - time_StartNewKF).count();
|
||||
vdNewKF_ms.push_back(timeNewKF);
|
||||
#endif
|
||||
|
||||
// We allow points with high innovation (considererd outliers by the Huber Function)
|
||||
// pass to the new keyframe, so that bundle adjustment will finally decide
|
||||
// if they are outliers or not. We don't want next frame to estimate its position
|
||||
|
@ -2343,8 +2616,6 @@ void Tracking::MonocularInitialization()
|
|||
|
||||
CreateInitialMapMonocular();
|
||||
|
||||
// Just for video
|
||||
// bStepByStep = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2581,7 +2852,6 @@ bool Tracking::TrackReferenceKeyFrame()
|
|||
int nmatchesMap = 0;
|
||||
for(int i =0; i<mCurrentFrame.N; i++)
|
||||
{
|
||||
//if(i >= mCurrentFrame.Nleft) break;
|
||||
if(mCurrentFrame.mvpMapPoints[i])
|
||||
{
|
||||
if(mCurrentFrame.mvbOutlier[i])
|
||||
|
@ -2781,8 +3051,24 @@ bool Tracking::TrackLocalMap()
|
|||
// We retrieve the local map and try to find matches to points in the local map.
|
||||
mTrackedFr++;
|
||||
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartLMUpdate = std::chrono::steady_clock::now();
|
||||
#endif
|
||||
UpdateLocalMap();
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartSearchLP = std::chrono::steady_clock::now();
|
||||
|
||||
double timeUpdatedLM_ms = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_StartSearchLP - time_StartLMUpdate).count();
|
||||
vdUpdatedLM_ms.push_back(timeUpdatedLM_ms);
|
||||
#endif
|
||||
|
||||
SearchLocalPoints();
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_StartPoseOpt = std::chrono::steady_clock::now();
|
||||
|
||||
double timeSearchLP_ms = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_StartPoseOpt - time_StartSearchLP).count();
|
||||
vdSearchLP_ms.push_back(timeSearchLP_ms);
|
||||
#endif
|
||||
|
||||
// TOO check outliers before PO
|
||||
int aux1 = 0, aux2=0;
|
||||
|
@ -2806,8 +3092,7 @@ bool Tracking::TrackLocalMap()
|
|||
}
|
||||
else
|
||||
{
|
||||
// if(!mbMapUpdated && mState == OK) // && (mnMatchesInliers>30))
|
||||
if(!mbMapUpdated) // && (mnMatchesInliers>30))
|
||||
if(!mbMapUpdated)
|
||||
{
|
||||
Verbose::PrintMess("TLM: PoseInertialOptimizationLastFrame ", Verbose::VERBOSITY_DEBUG);
|
||||
inliers = Optimizer::PoseInertialOptimizationLastFrame(&mCurrentFrame); // , !mpLastKeyFrame->GetMap()->GetIniertialBA1());
|
||||
|
@ -2819,6 +3104,15 @@ bool Tracking::TrackLocalMap()
|
|||
}
|
||||
}
|
||||
}
|
||||
#ifdef REGISTER_TIMES
|
||||
std::chrono::steady_clock::time_point time_EndPoseOpt = std::chrono::steady_clock::now();
|
||||
|
||||
double timePoseOpt_ms = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndPoseOpt - time_StartPoseOpt).count();
|
||||
vdPoseOpt_ms.push_back(timePoseOpt_ms);
|
||||
#endif
|
||||
|
||||
vnKeyFramesLM.push_back(mvpLocalKeyFrames.size());
|
||||
vnMapPointsLM.push_back(mvpLocalMapPoints.size());
|
||||
|
||||
aux1 = 0, aux2 = 0;
|
||||
for(int i=0; i<mCurrentFrame.N; i++)
|
||||
|
@ -3134,7 +3428,7 @@ void Tracking::CreateNewKeyFrame()
|
|||
}
|
||||
else
|
||||
{
|
||||
nPoints++; // TODO check ???
|
||||
nPoints++;
|
||||
}
|
||||
|
||||
if(vDepthIdx[j].first>mThDepth && nPoints>maxPoint)
|
||||
|
@ -3226,7 +3520,7 @@ void Tracking::SearchLocalPoints()
|
|||
th=5;
|
||||
|
||||
if(mState==LOST || mState==RECENTLY_LOST) // Lost for less than 1 second
|
||||
th=15; // 15
|
||||
th=15;
|
||||
|
||||
int matches = matcher.SearchByProjection(mCurrentFrame, mvpLocalMapPoints, th, mpLocalMapper->mbFarPoints, mpLocalMapper->mThFarPoints);
|
||||
}
|
||||
|
@ -3314,7 +3608,6 @@ void Tracking::UpdateLocalKeyFrames()
|
|||
}
|
||||
else
|
||||
{
|
||||
// MODIFICATION
|
||||
mLastFrame.mvpMapPoints[i]=NULL;
|
||||
}
|
||||
}
|
||||
|
@ -3350,7 +3643,7 @@ void Tracking::UpdateLocalKeyFrames()
|
|||
for(vector<KeyFrame*>::const_iterator itKF=mvpLocalKeyFrames.begin(), itEndKF=mvpLocalKeyFrames.end(); itKF!=itEndKF; itKF++)
|
||||
{
|
||||
// Limit the number of keyframes
|
||||
if(mvpLocalKeyFrames.size()>80) // 80
|
||||
if(mvpLocalKeyFrames.size()>80)
|
||||
break;
|
||||
|
||||
KeyFrame* pKF = *itKF;
|
||||
|
@ -3747,12 +4040,9 @@ void Tracking::ResetActiveMap(bool bLocMap)
|
|||
// Clear Map (this erase MapPoints and KeyFrames)
|
||||
mpAtlas->clearMap();
|
||||
|
||||
|
||||
//KeyFrame::nNextId = mpAtlas->GetLastInitKFid();
|
||||
//Frame::nNextId = mnLastInitFrameId;
|
||||
mnLastInitFrameId = Frame::nNextId;
|
||||
mnLastRelocFrameId = mnLastInitFrameId;
|
||||
mState = NO_IMAGES_YET; //NOT_INITIALIZED;
|
||||
mState = NO_IMAGES_YET;
|
||||
|
||||
if(mpInitializer)
|
||||
{
|
||||
|
@ -3761,7 +4051,6 @@ void Tracking::ResetActiveMap(bool bLocMap)
|
|||
}
|
||||
|
||||
list<bool> lbLost;
|
||||
// lbLost.reserve(mlbLost.size());
|
||||
unsigned int index = mnFirstFrameId;
|
||||
cout << "mnFirstFrameId = " << mnFirstFrameId << endl;
|
||||
for(Map* pMap : mpAtlas->GetAllMaps())
|
||||
|
@ -3773,7 +4062,6 @@ void Tracking::ResetActiveMap(bool bLocMap)
|
|||
}
|
||||
}
|
||||
|
||||
//cout << "First Frame id: " << index << endl;
|
||||
int num_lost = 0;
|
||||
cout << "mnInitialFrameId = " << mnInitialFrameId << endl;
|
||||
|
||||
|
|
|
@ -145,15 +145,12 @@ void Viewer::Run()
|
|||
pangolin::Var<bool> menuFollowCamera("menu.Follow Camera",true,true);
|
||||
pangolin::Var<bool> menuCamView("menu.Camera View",false,false);
|
||||
pangolin::Var<bool> menuTopView("menu.Top View",false,false);
|
||||
// pangolin::Var<bool> menuSideView("menu.Side View",false,false);
|
||||
pangolin::Var<bool> menuShowPoints("menu.Show Points",true,true);
|
||||
pangolin::Var<bool> menuShowKeyFrames("menu.Show KeyFrames",true,true);
|
||||
pangolin::Var<bool> menuShowGraph("menu.Show Graph",false,true);
|
||||
pangolin::Var<bool> menuShowInertialGraph("menu.Show Inertial Graph",true,true);
|
||||
pangolin::Var<bool> menuLocalizationMode("menu.Localization Mode",false,true);
|
||||
pangolin::Var<bool> menuReset("menu.Reset",false,false);
|
||||
pangolin::Var<bool> menuStepByStep("menu.Step By Step",false,true); // false, true
|
||||
pangolin::Var<bool> menuStep("menu.Step",false,false);
|
||||
|
||||
// Define Camera Render Object (for view / scene browsing)
|
||||
pangolin::OpenGlRenderState s_cam(
|
||||
|
@ -192,7 +189,6 @@ void Viewer::Run()
|
|||
|
||||
if(mbStopTrack)
|
||||
{
|
||||
menuStepByStep = true;
|
||||
mbStopTrack = false;
|
||||
}
|
||||
|
||||
|
@ -237,21 +233,11 @@ void Viewer::Run()
|
|||
{
|
||||
menuTopView = false;
|
||||
bCameraView = false;
|
||||
/*s_cam.SetProjectionMatrix(pangolin::ProjectionMatrix(1024,768,3000,3000,512,389,0.1,1000));
|
||||
s_cam.SetModelViewMatrix(pangolin::ModelViewLookAt(0,0.01,10, 0,0,0,0.0,0.0, 1.0));*/
|
||||
s_cam.SetProjectionMatrix(pangolin::ProjectionMatrix(1024,768,3000,3000,512,389,0.1,10000));
|
||||
s_cam.SetModelViewMatrix(pangolin::ModelViewLookAt(0,0.01,50, 0,0,0,0.0,0.0, 1.0));
|
||||
s_cam.Follow(Ow);
|
||||
}
|
||||
|
||||
/*if(menuSideView && mpMapDrawer->mpAtlas->isImuInitialized())
|
||||
{
|
||||
s_cam.SetProjectionMatrix(pangolin::ProjectionMatrix(1024,768,3000,3000,512,389,0.1,10000));
|
||||
s_cam.SetModelViewMatrix(pangolin::ModelViewLookAt(0.0,0.1,30.0,0,0,0,0.0,0.0,1.0));
|
||||
s_cam.Follow(Twwp);
|
||||
}*/
|
||||
|
||||
|
||||
if(menuLocalizationMode && !bLocalizationMode)
|
||||
{
|
||||
mpSystem->ActivateLocalizationMode();
|
||||
|
@ -263,24 +249,6 @@ void Viewer::Run()
|
|||
bLocalizationMode = false;
|
||||
}
|
||||
|
||||
if(menuStepByStep && !bStepByStep)
|
||||
{
|
||||
mpTracker->SetStepByStep(true);
|
||||
bStepByStep = true;
|
||||
}
|
||||
else if(!menuStepByStep && bStepByStep)
|
||||
{
|
||||
mpTracker->SetStepByStep(false);
|
||||
bStepByStep = false;
|
||||
}
|
||||
|
||||
if(menuStep)
|
||||
{
|
||||
mpTracker->mbStep = true;
|
||||
menuStep = false;
|
||||
}
|
||||
|
||||
|
||||
d_cam.Activate(s_cam);
|
||||
glClearColor(1.0f,1.0f,1.0f,1.0f);
|
||||
mpMapDrawer->DrawCurrentCamera(Twc);
|
||||
|
@ -317,7 +285,6 @@ void Viewer::Run()
|
|||
bLocalizationMode = false;
|
||||
bFollow = true;
|
||||
menuFollowCamera = true;
|
||||
//mpSystem->Reset();
|
||||
mpSystem->ResetActiveMap();
|
||||
menuReset = false;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue