master
electech6 2021-09-03 14:24:01 +08:00
parent feedb9bdd8
commit d10c678558
26 changed files with 4184 additions and 1007 deletions

View File

@ -64,9 +64,9 @@ int main(int argc, char **argv)
vector< vector<double> > vTimestampsCam; //图像时间戳
vector< vector<cv::Point3f> > vAcc, vGyro; //加速度计,陀螺仪
vector< vector<double> > vTimestampsImu; //IMU时间戳
vector<int> nImages;
vector<int> nImages; //图像序列
vector<int> nImu;
vector<int> first_imu(num_seq,0);
vector<int> first_imu(num_seq,0); //记录和第一帧图像时间戳最接近的imu时间戳索引
vstrImageFilenames.resize(num_seq);
vTimestampsCam.resize(num_seq);
@ -168,13 +168,16 @@ int main(int argc, char **argv)
{
// cout << "t_cam " << tframe << endl;
// Step 6 把上一图像帧和当前图像帧之间的imu信息存储在vImuMeas里
// 注意第一个图像帧没有对应的imu数据 //?是否存在一帧,因为之前是从最接近图像第一帧的imu算起,可能无效
// 注意第一个图像帧没有对应的imu数据
// seq: 数据集序列索引first_imu[seq]当前数据集中和当前帧图像最接近的imu时间戳索引ni图像的索引
while(vTimestampsImu[seq][first_imu[seq]]<=vTimestampsCam[seq][ni])
{
// 存储IMU的加速度计信息vAcc、陀螺仪信息vGyro、时间戳信息vTimestampsImu
vImuMeas.push_back(ORB_SLAM3::IMU::Point(vAcc[seq][first_imu[seq]].x,vAcc[seq][first_imu[seq]].y,vAcc[seq][first_imu[seq]].z,
vGyro[seq][first_imu[seq]].x,vGyro[seq][first_imu[seq]].y,vGyro[seq][first_imu[seq]].z,
vTimestampsImu[seq][first_imu[seq]]));
// cout << "t_imu = " << fixed << vImuMeas.back().t << endl;
// 更新
first_imu[seq]++;
}
}

View File

@ -0,0 +1,36 @@
#!/bin/bash
pathDatasetEuroc='/Datasets/EuRoC' #Example, it is necesary to change it by the dataset path
# Single Session Example (Pure visual)
echo "Launching MH01 with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/MH01 ./Stereo/EuRoC_TimeStamps/MH01.txt dataset-MH01_stereo
echo "------------------------------------"
echo "Evaluation of MH01 trajectory with Stereo sensor"
python ../evaluation/evaluate_ate_scale.py ../evaluation/Ground_truth/EuRoC_left_cam/MH01_GT.txt f_dataset-MH01_stereo.txt --plot MH01_stereo.pdf
# MultiSession Example (Pure visual)
echo "Launching Machine Hall with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/MH01 ./Stereo/EuRoC_TimeStamps/MH01.txt "$pathDatasetEuroc"/MH02 ./Stereo/EuRoC_TimeStamps/MH02.txt "$pathDatasetEuroc"/MH03 ./Stereo/EuRoC_TimeStamps/MH03.txt "$pathDatasetEuroc"/MH04 ./Stereo/EuRoC_TimeStamps/MH04.txt "$pathDatasetEuroc"/MH05 ./Stereo/EuRoC_TimeStamps/MH05.txt dataset-MH01_to_MH05_stereo
echo "------------------------------------"
echo "Evaluation of MAchine Hall trajectory with Stereo sensor"
python ../evaluation/evaluate_ate_scale.py ../evaluation/Ground_truth/EuRoC_left_cam/MH_GT.txt f_dataset-MH01_to_MH05_stereo.txt --plot MH01_to_MH05_stereo.pdf
# Single Session Example (Visual-Inertial)
echo "Launching V102 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V102 ./Monocular-Inertial/EuRoC_TimeStamps/V102.txt dataset-V102_monoi
echo "------------------------------------"
echo "Evaluation of V102 trajectory with Monocular-Inertial sensor"
python ../evaluation/evaluate_ate_scale.py "$pathDatasetEuroc"/V102/mav0/state_groundtruth_estimate0/data.csv f_dataset-V102_monoi.txt --plot V102_monoi.pdf
# MultiSession Monocular Examples
echo "Launching Vicon Room 2 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V201 ./Monocular-Inertial/EuRoC_TimeStamps/V201.txt "$pathDatasetEuroc"/V202 ./Monocular-Inertial/EuRoC_TimeStamps/V202.txt "$pathDatasetEuroc"/V203 ./Monocular-Inertial/EuRoC_TimeStamps/V203.txt dataset-V201_to_V203_monoi
echo "------------------------------------"
echo "Evaluation of Vicon Room 2 trajectory with Stereo sensor"
python ../evaluation/evaluate_ate_scale.py ../evaluation/Ground_truth/EuRoC_imu/V2_GT.txt f_dataset-V201_to_V203_monoi.txt --plot V201_to_V203_monoi.pdf

182
Examples/euroc_examples.sh Normal file
View File

@ -0,0 +1,182 @@
#!/bin/bash
pathDatasetEuroc='/Datasets/EuRoC' #Example, it is necesary to change it by the dataset path
#------------------------------------
# Monocular Examples
echo "Launching MH01 with Monocular sensor"
./Monocular/mono_euroc ../Vocabulary/ORBvoc.txt ./Monocular/EuRoC.yaml "$pathDatasetEuroc"/MH01 ./Monocular/EuRoC_TimeStamps/MH01.txt dataset-MH01_mono
echo "Launching MH02 with Monocular sensor"
./Monocular/mono_euroc ../Vocabulary/ORBvoc.txt ./Monocular/EuRoC.yaml "$pathDatasetEuroc"/MH02 ./Monocular/EuRoC_TimeStamps/MH02.txt dataset-MH02_mono
echo "Launching MH03 with Monocular sensor"
./Monocular/mono_euroc ../Vocabulary/ORBvoc.txt ./Monocular/EuRoC.yaml "$pathDatasetEuroc"/MH03 ./Monocular/EuRoC_TimeStamps/MH03.txt dataset-MH03_mono
echo "Launching MH04 with Monocular sensor"
./Monocular/mono_euroc ../Vocabulary/ORBvoc.txt ./Monocular/EuRoC.yaml "$pathDatasetEuroc"/MH04 ./Monocular/EuRoC_TimeStamps/MH04.txt dataset-MH04_mono
echo "Launching MH05 with Monocular sensor"
./Monocular/mono_euroc ../Vocabulary/ORBvoc.txt ./Monocular/EuRoC.yaml "$pathDatasetEuroc"/MH05 ./Monocular/EuRoC_TimeStamps/MH05.txt dataset-MH05_mono
echo "Launching V101 with Monocular sensor"
./Monocular/mono_euroc ../Vocabulary/ORBvoc.txt ./Monocular/EuRoC.yaml "$pathDatasetEuroc"/V101 ./Monocular/EuRoC_TimeStamps/V101.txt dataset-V101_mono
echo "Launching V102 with Monocular sensor"
./Monocular/mono_euroc ../Vocabulary/ORBvoc.txt ./Monocular/EuRoC.yaml "$pathDatasetEuroc"/V102 ./Monocular/EuRoC_TimeStamps/V102.txt dataset-V102_mono
echo "Launching V103 with Monocular sensor"
./Monocular/mono_euroc ../Vocabulary/ORBvoc.txt ./Monocular/EuRoC.yaml "$pathDatasetEuroc"/V103 ./Monocular/EuRoC_TimeStamps/V103.txt dataset-V103_mono
echo "Launching V201 with Monocular sensor"
./Monocular/mono_euroc ../Vocabulary/ORBvoc.txt ./Monocular/EuRoC.yaml "$pathDatasetEuroc"/V201 ./Monocular/EuRoC_TimeStamps/V201.txt dataset-V201_mono
echo "Launching V202 with Monocular sensor"
./Monocular/mono_euroc ../Vocabulary/ORBvoc.txt ./Monocular/EuRoC.yaml "$pathDatasetEuroc"/V202 ./Monocular/EuRoC_TimeStamps/V202.txt dataset-V202_mono
echo "Launching V203 with Monocular sensor"
./Monocular/mono_euroc ../Vocabulary/ORBvoc.txt ./Monocular/EuRoC.yaml "$pathDatasetEuroc"/V203 ./Monocular/EuRoC_TimeStamps/V203.txt dataset-V203_mono
# MultiSession Monocular Examples
echo "Launching Machine Hall with Monocular sensor"
./Monocular/mono_euroc ../Vocabulary/ORBvoc.txt ./Monocular/EuRoC.yaml "$pathDatasetEuroc"/MH01 ./Monocular/EuRoC_TimeStamps/MH01.txt "$pathDatasetEuroc"/MH02 ./Monocular/EuRoC_TimeStamps/MH02.txt "$pathDatasetEuroc"/MH03 ./Monocular/EuRoC_TimeStamps/MH03.txt "$pathDatasetEuroc"/MH04 ./Monocular/EuRoC_TimeStamps/MH04.txt "$pathDatasetEuroc"/MH05 ./Monocular/EuRoC_TimeStamps/MH05.txt dataset-MH01_to_MH05_mono
echo "Launching Vicon Room 1 with Monocular sensor"
./Monocular/mono_euroc ../Vocabulary/ORBvoc.txt ./Monocular/EuRoC.yaml "$pathDatasetEuroc"/V101 ./Monocular/EuRoC_TimeStamps/V101.txt "$pathDatasetEuroc"/V102 ./Monocular/EuRoC_TimeStamps/V102.txt "$pathDatasetEuroc"/V103 ./Monocular/EuRoC_TimeStamps/V103.txt dataset-V101_to_V103_mono
echo "Launching Vicon Room 2 with Monocular sensor"
./Monocular/mono_euroc ../Vocabulary/ORBvoc.txt ./Monocular/EuRoC.yaml "$pathDatasetEuroc"/V201 ./Monocular/EuRoC_TimeStamps/V201.txt "$pathDatasetEuroc"/V202 ./Monocular/EuRoC_TimeStamps/V202.txt "$pathDatasetEuroc"/V203 ./Monocular/EuRoC_TimeStamps/V203.txt dataset-V201_to_V203_mono
#------------------------------------
# Stereo Examples
echo "Launching MH01 with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/MH01 ./Stereo/EuRoC_TimeStamps/MH01.txt dataset-MH01_stereo
echo "Launching MH02 with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/MH02 ./Stereo/EuRoC_TimeStamps/MH02.txt dataset-MH02_stereo
echo "Launching MH03 with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/MH03 ./Stereo/EuRoC_TimeStamps/MH03.txt dataset-MH03_stereo
echo "Launching MH04 with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/MH04 ./Stereo/EuRoC_TimeStamps/MH04.txt dataset-MH04_stereo
echo "Launching MH05 with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/MH05 ./Stereo/EuRoC_TimeStamps/MH05.txt dataset-MH05_stereo
echo "Launching V101 with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/V101 ./Stereo/EuRoC_TimeStamps/V101.txt dataset-V101_stereo
echo "Launching V102 with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/V102 ./Stereo/EuRoC_TimeStamps/V102.txt dataset-V102_stereo
echo "Launching V103 with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/V103 ./Stereo/EuRoC_TimeStamps/V103.txt dataset-V103_stereo
echo "Launching V201 with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/V201 ./Stereo/EuRoC_TimeStamps/V201.txt dataset-V201_stereo
echo "Launching V202 with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/V202 ./Stereo/EuRoC_TimeStamps/V202.txt dataset-V202_stereo
echo "Launching V203 with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/V203 ./Stereo/EuRoC_TimeStamps/V203.txt dataset-V203_stereo
# MultiSession Stereo Examples
echo "Launching Machine Hall with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/MH01 ./Stereo/EuRoC_TimeStamps/MH01.txt "$pathDatasetEuroc"/MH02 ./Stereo/EuRoC_TimeStamps/MH02.txt "$pathDatasetEuroc"/MH03 ./Stereo/EuRoC_TimeStamps/MH03.txt "$pathDatasetEuroc"/MH04 ./Stereo/EuRoC_TimeStamps/MH04.txt "$pathDatasetEuroc"/MH05 ./Stereo/EuRoC_TimeStamps/MH05.txt dataset-MH01_to_MH05_stereo
echo "Launching Vicon Room 1 with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/V101 ./Stereo/EuRoC_TimeStamps/V101.txt "$pathDatasetEuroc"/V102 ./Stereo/EuRoC_TimeStamps/V102.txt "$pathDatasetEuroc"/V103 ./Stereo/EuRoC_TimeStamps/V103.txt dataset-V101_to_V103_stereo
echo "Launching Vicon Room 2 with Stereo sensor"
./Stereo/stereo_euroc ../Vocabulary/ORBvoc.txt ./Stereo/EuRoC.yaml "$pathDatasetEuroc"/V201 ./Stereo/EuRoC_TimeStamps/V201.txt "$pathDatasetEuroc"/V202 ./Stereo/EuRoC_TimeStamps/V202.txt "$pathDatasetEuroc"/V203 ./Stereo/EuRoC_TimeStamps/V203.txt dataset-V201_to_V203_stereo
#------------------------------------
# Monocular-Inertial Examples
echo "Launching MH01 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/MH01 ./Monocular-Inertial/EuRoC_TimeStamps/MH01.txt dataset-MH01_monoi
echo "Launching MH02 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/MH02 ./Monocular-Inertial/EuRoC_TimeStamps/MH02.txt dataset-MH02_monoi
echo "Launching MH03 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/MH03 ./Monocular-Inertial/EuRoC_TimeStamps/MH03.txt dataset-MH03_monoi
echo "Launching MH04 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/MH04 ./Monocular-Inertial/EuRoC_TimeStamps/MH04.txt dataset-MH04_monoi
echo "Launching MH05 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/MH05 ./Monocular-Inertial/EuRoC_TimeStamps/MH05.txt dataset-MH05_monoi
echo "Launching V101 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V101 ./Monocular-Inertial/EuRoC_TimeStamps/V101.txt dataset-V101_monoi
echo "Launching V102 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V102 ./Monocular-Inertial/EuRoC_TimeStamps/V102.txt dataset-V102_monoi
echo "Launching V103 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V103 ./Monocular-Inertial/EuRoC_TimeStamps/V103.txt dataset-V103_monoi
echo "Launching V201 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V201 ./Monocular-Inertial/EuRoC_TimeStamps/V201.txt dataset-V201_monoi
echo "Launching V202 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V202 ./Monocular-Inertial/EuRoC_TimeStamps/V202.txt dataset-V202_monoi
echo "Launching V203 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V203 ./Monocular-Inertial/EuRoC_TimeStamps/V203.txt dataset-V203_monoi
# MultiSession Monocular Examples
echo "Launching Machine Hall with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/MH01 ./Monocular-Inertial/EuRoC_TimeStamps/MH01.txt "$pathDatasetEuroc"/MH02 ./Monocular-Inertial/EuRoC_TimeStamps/MH02.txt "$pathDatasetEuroc"/MH03 ./Monocular-Inertial/EuRoC_TimeStamps/MH03.txt "$pathDatasetEuroc"/MH04 ./Monocular-Inertial/EuRoC_TimeStamps/MH04.txt "$pathDatasetEuroc"/MH05 ./Monocular-Inertial/EuRoC_TimeStamps/MH05.txt dataset-MH01_to_MH05_monoi
echo "Launching Vicon Room 1 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V101 ./Monocular-Inertial/EuRoC_TimeStamps/V101.txt "$pathDatasetEuroc"/V102 ./Monocular-Inertial/EuRoC_TimeStamps/V102.txt "$pathDatasetEuroc"/V103 ./Monocular-Inertial/EuRoC_TimeStamps/V103.txt dataset-V101_to_V103_monoi
echo "Launching Vicon Room 2 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_euroc ../Vocabulary/ORBvoc.txt ./Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V201 ./Monocular-Inertial/EuRoC_TimeStamps/V201.txt "$pathDatasetEuroc"/V202 ./Monocular-Inertial/EuRoC_TimeStamps/V202.txt "$pathDatasetEuroc"/V203 ./Monocular-Inertial/EuRoC_TimeStamps/V203.txt dataset-V201_to_V203_monoi
#------------------------------------
# Stereo-Inertial Examples
echo "Launching MH01 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_euroc ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/EuRoC.yaml "$pathDatasetEuroc"/MH01 ./Stereo-Inertial/EuRoC_TimeStamps/MH01.txt dataset-MH01_stereoi
echo "Launching MH02 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_euroc ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/EuRoC.yaml "$pathDatasetEuroc"/MH02 ./Stereo-Inertial/EuRoC_TimeStamps/MH02.txt dataset-MH02_stereoi
echo "Launching MH03 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_euroc ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/EuRoC.yaml "$pathDatasetEuroc"/MH03 ./Stereo-Inertial/EuRoC_TimeStamps/MH03.txt dataset-MH03_stereoi
echo "Launching MH04 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_euroc ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/EuRoC.yaml "$pathDatasetEuroc"/MH04 ./Stereo-Inertial/EuRoC_TimeStamps/MH04.txt dataset-MH04_stereoi
echo "Launching MH05 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_euroc ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/EuRoC.yaml "$pathDatasetEuroc"/MH05 ./Stereo-Inertial/EuRoC_TimeStamps/MH05.txt dataset-MH05_stereoi
echo "Launching V101 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_euroc ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V101 ./Stereo-Inertial/EuRoC_TimeStamps/V101.txt dataset-V101_stereoi
echo "Launching V102 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_euroc ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V102 ./Stereo-Inertial/EuRoC_TimeStamps/V102.txt dataset-V102_stereoi
echo "Launching V103 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_euroc ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V103 ./Stereo-Inertial/EuRoC_TimeStamps/V103.txt dataset-V103_stereoi
echo "Launching V201 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_euroc ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V201 ./Stereo-Inertial/EuRoC_TimeStamps/V201.txt dataset-V201_stereoi
echo "Launching V202 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_euroc ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V202 ./Stereo-Inertial/EuRoC_TimeStamps/V202.txt dataset-V202_stereoi
echo "Launching V203 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_euroc ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V203 ./Stereo-Inertial/EuRoC_TimeStamps/V203.txt dataset-V203_stereoi
# MultiSession Stereo-Inertial Examples
echo "Launching Machine Hall with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_euroc ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/EuRoC.yaml "$pathDatasetEuroc"/MH01 ./Stereo-Inertial/EuRoC_TimeStamps/MH01.txt "$pathDatasetEuroc"/MH02 ./Stereo-Inertial/EuRoC_TimeStamps/MH02.txt "$pathDatasetEuroc"/MH03 ./Stereo-Inertial/EuRoC_TimeStamps/MH03.txt "$pathDatasetEuroc"/MH04 ./Stereo-Inertial/EuRoC_TimeStamps/MH04.txt "$pathDatasetEuroc"/MH05 ./Stereo-Inertial/EuRoC_TimeStamps/MH05.txt dataset-MH01_to_MH05_stereoi
echo "Launching Vicon Room 1 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_euroc ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V101 ./Stereo-Inertial/EuRoC_TimeStamps/V101.txt "$pathDatasetEuroc"/V102 ./Stereo-Inertial/EuRoC_TimeStamps/V102.txt "$pathDatasetEuroc"/V103 ./Stereo-Inertial/EuRoC_TimeStamps/V103.txt dataset-V101_to_V103_stereoi
echo "Launching Vicon Room 2 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_euroc ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V201 ./Stereo-Inertial/EuRoC_TimeStamps/V201.txt "$pathDatasetEuroc"/V202 ./Stereo-Inertial/EuRoC_TimeStamps/V202.txt "$pathDatasetEuroc"/V203 ./Stereo-Inertial/EuRoC_TimeStamps/V203.txt dataset-V201_to_V203_stereoi

View File

@ -0,0 +1,11 @@
#!/bin/bash
pathDatasetTUM_VI='/Datasets/TUM_VI' #Example, it is necesary to change it by the dataset path
# Single Session Example
echo "Launching Magistrale 1 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt ./Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-magistrale1_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-magistrale1_512_16/mav0/cam1/data ./Stereo-Inertial/TUM_TimeStamps/dataset-magistrale1_512.txt ./Stereo-Inertial/TUM_IMU/dataset-magistrale1_512.txt dataset-magistrale1_512_stereoi
echo "------------------------------------"
echo "Evaluation of Magistrale 1 trajectory with Stereo-Inertial sensor"
python ../evaluation/evaluate_ate_scale.py "$pathDatasetTUM_VI"/magistrale1_512_16/mav0/mocap0/data.csv f_dataset-magistrale1_512_stereoi.txt --plot magistrale1_512_stereoi.pdf

240
Examples/tum_vi_examples.sh Normal file
View File

@ -0,0 +1,240 @@
#!/bin/bash
pathDatasetTUM_VI='/Datasets/TUM_VI' #Example, it is necesary to change it by the dataset path
#------------------------------------
# Monocular Examples
echo "Launching Room 1 with Monocular sensor"
./Monocular/mono_tum_vi ../Vocabulary/ORBvoc.txt Monocular/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room1_512_16/mav0/cam0/data Monocular/TUM_TimeStamps/dataset-room1_512.txt dataset-room1_512_mono
echo "Launching Room 2 with Monocular sensor"
./Monocular/mono_tum_vi ../Vocabulary/ORBvoc.txt Monocular/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room2_512_16/mav0/cam0/data Monocular/TUM_TimeStamps/dataset-room2_512.txt dataset-room2_512_mono
echo "Launching Room 3 with Monocular sensor"
./Monocular/mono_tum_vi ../Vocabulary/ORBvoc.txt Monocular/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room3_512_16/mav0/cam0/data Monocular/TUM_TimeStamps/dataset-room3_512.txt dataset-room3_512_mono
echo "Launching Room 4 with Monocular sensor"
./Monocular/mono_tum_vi ../Vocabulary/ORBvoc.txt Monocular/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room4_512_16/mav0/cam0/data Monocular/TUM_TimeStamps/dataset-room4_512.txt dataset-room4_512_mono
echo "Launching Room 5 with Monocular sensor"
./Monocular/mono_tum_vi ../Vocabulary/ORBvoc.txt Monocular/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room5_512_16/mav0/cam0/data Monocular/TUM_TimeStamps/dataset-room5_512.txt dataset-room5_512_mono
echo "Launching Room 6 with Monocular sensor"
./Monocular/mono_tum_vi ../Vocabulary/ORBvoc.txt Monocular/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room6_512_16/mav0/cam0/data Monocular/TUM_TimeStamps/dataset-room6_512.txt dataset-room6_512_mono
#------------------------------------
# Stereo Examples
echo "Launching Room 1 with Stereo sensor"
./Stereo/stereo_tum_vi ../Vocabulary/ORBvoc.txt Stereo/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room1_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room1_512_16/mav0/cam1/data Stereo/TUM_TimeStamps/dataset-room1_512.txt dataset-room1_512_stereo
echo "Launching Room 2 with Stereo sensor"
./Stereo/stereo_tum_vi ../Vocabulary/ORBvoc.txt Stereo/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room2_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room2_512_16/mav0/cam1/data Stereo/TUM_TimeStamps/dataset-room2_512.txt dataset-room2_512_stereo
echo "Launching Room 3 with Stereo sensor"
./Stereo/stereo_tum_vi ../Vocabulary/ORBvoc.txt Stereo/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room3_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room3_512_16/mav0/cam1/data Stereo/TUM_TimeStamps/dataset-room3_512.txt dataset-room3_512_stereo
echo "Launching Room 4 with Stereo sensor"
./Stereo/stereo_tum_vi ../Vocabulary/ORBvoc.txt Stereo/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room4_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room4_512_16/mav0/cam1/data Stereo/TUM_TimeStamps/dataset-room4_512.txt dataset-room4_512_stereo
echo "Launching Room 5 with Stereo sensor"
./Stereo/stereo_tum_vi ../Vocabulary/ORBvoc.txt Stereo/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room5_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room5_512_16/mav0/cam1/data Stereo/TUM_TimeStamps/dataset-room5_512.txt dataset-room5_512_stereo
echo "Launching Room 6 with Stereo sensor"
./Stereo/stereo_tum_vi ../Vocabulary/ORBvoc.txt Stereo/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room6_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room6_512_16/mav0/cam1/data Stereo/TUM_TimeStamps/dataset-room6_512.txt dataset-room6_512_stereo
#------------------------------------
# Monocular-Inertial Examples
echo "Launching Corridor 1 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-corridor1_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-corridor1_512.txt Monocular-Inertial/TUM_IMU/dataset-corridor1_512.txt dataset-corridor1_512_monoi
echo "Launching Corridor 2 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-corridor2_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-corridor2_512.txt Monocular-Inertial/TUM_IMU/dataset-corridor2_512.txt dataset-corridor2_512_monoi
echo "Launching Corridor 3 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-corridor3_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-corridor3_512.txt Monocular-Inertial/TUM_IMU/dataset-corridor3_512.txt dataset-corridor3_512_monoi
echo "Launching Corridor 4 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-corridor4_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-corridor4_512.txt Monocular-Inertial/TUM_IMU/dataset-corridor4_512.txt dataset-corridor4_512_monoi
echo "Launching Corridor 5 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-corridor5_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-corridor5_512.txt Monocular-Inertial/TUM_IMU/dataset-corridor5_512.txt dataset-corridor5_512_monoi
echo "Launching Magistrale 1 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-magistrale1_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-magistrale1_512.txt Monocular-Inertial/TUM_IMU/dataset-magistrale1_512.txt dataset-magistrale1_512_monoi
echo "Launching Magistrale 2 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-magistrale2_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-magistrale2_512.txt Monocular-Inertial/TUM_IMU/dataset-magistrale2_512.txt dataset-magistrale2_512
echo "Launching Magistrale 3 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-magistrale3_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-magistrale3_512.txt Monocular-Inertial/TUM_IMU/dataset-magistrale3_512.txt dataset-magistrale3_512_monoi
echo "Launching Magistrale 4 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-magistrale4_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-magistrale4_512.txt Monocular-Inertial/TUM_IMU/dataset-magistrale4_512.txt dataset-magistrale4_512_monoi
echo "Launching Magistrale 5 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-magistrale5_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-magistrale5_512.txt Monocular-Inertial/TUM_IMU/dataset-magistrale5_512.txt dataset-magistrale5_512_monoi
echo "Launching Magistrale 6 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-magistrale6_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-magistrale6_512.txt Monocular-Inertial/TUM_IMU/dataset-magistrale6_512.txt dataset-magistrale6_512_monoi
echo "Launching Outdoor 1 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512_far.yaml "$pathDatasetTUM_VI"/dataset-outdoors1_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-outdoors1_512.txt Monocular-Inertial/TUM_IMU/dataset-outdoors1_512.txt dataset-outdoors1_512_monoi
echo "Launching Outdoor 2 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512_far.yaml "$pathDatasetTUM_VI"/dataset-outdoors2_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-outdoors2_512.txt Monocular-Inertial/TUM_IMU/dataset-outdoors2_512.txt dataset-outdoors2_512_monoi
echo "Launching Outdoor 3 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512_far.yaml "$pathDatasetTUM_VI"/dataset-outdoors3_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-outdoors3_512.txt Monocular-Inertial/TUM_IMU/dataset-outdoors3_512.txt dataset-outdoors3_512_monoi
echo "Launching Outdoor 4 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512_far.yaml "$pathDatasetTUM_VI"/dataset-outdoors4_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-outdoors4_512.txt Monocular-Inertial/TUM_IMU/dataset-outdoors4_512.txt dataset-outdoors4_512_monoi
echo "Launching Outdoor 5 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512_far.yaml "$pathDatasetTUM_VI"/dataset-outdoors5_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-outdoors5_512.txt Monocular-Inertial/TUM_IMU/dataset-outdoors5_512.txt dataset-outdoors5_512_monoi
echo "Launching Outdoor 6 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512_far.yaml "$pathDatasetTUM_VI"/dataset-outdoors6_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-outdoors6_512.txt Monocular-Inertial/TUM_IMU/dataset-outdoors6_512.txt dataset-outdoors6_512_monoi
echo "Launching Outdoor 7 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512_far.yaml "$pathDatasetTUM_VI"/dataset-outdoors7_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-outdoors7_512.txt Monocular-Inertial/TUM_IMU/dataset-outdoors7_512.txt dataset-outdoors7_512_monoi
echo "Launching Outdoor 8 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512_far.yaml "$pathDatasetTUM_VI"/dataset-outdoors8_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-outdoors8_512.txt Monocular-Inertial/TUM_IMU/dataset-outdoors8_512.txt dataset-outdoors8_512_monoi
echo "Launching Room 1 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room1_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-room1_512.txt Monocular-Inertial/TUM_IMU/dataset-room1_512.txt dataset-room1_512_monoi
echo "Launching Room 2 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room2_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-room2_512.txt Monocular-Inertial/TUM_IMU/dataset-room2_512.txt dataset-room2_512_monoi
echo "Launching Room 3 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room3_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-room3_512.txt Monocular-Inertial/TUM_IMU/dataset-room3_512.txt dataset-room3_512_monoi
echo "Launching Room 4 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room4_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-room4_512.txt Monocular-Inertial/TUM_IMU/dataset-room4_512.txt dataset-room4_512_monoi
echo "Launching Room 5 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room5_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-room5_512.txt Monocular-Inertial/TUM_IMU/dataset-room5_512.txt dataset-room5_512_monoi
echo "Launching Room 6 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room6_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-room6_512.txt Monocular-Inertial/TUM_IMU/dataset-room6_512.txt dataset-room6_512_monoi
echo "Launching Slides 1 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-slides1_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-slides1_512.txt Monocular-Inertial/TUM_IMU/dataset-slides1_512.txt dataset-slides1_512_monoi
echo "Launching Slides 2 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-slides2_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-slides2_512.txt Monocular-Inertial/TUM_IMU/dataset-slides2_512.txt dataset-slides2_512_monoi
echo "Launching Slides 3 with Monocular-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-slides3_512_16/mav0/cam0/data Monocular-Inertial/TUM_TimeStamps/dataset-slides3_512.txt Monocular-Inertial/TUM_IMU/dataset-slides3_512.txt dataset-slides3_512_monoi
# MultiSession Monocular Examples
echo "Launching Room 1, Magistrale 1, Magistrale 5 and Slides 1 in the same session with Stereo-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room1_512_16/mav0/cam0/data Stereo-Inertial/TUM_TimeStamps/dataset-room1_512.txt Stereo-Inertial/TUM_IMU/dataset-room1_512.txt "$pathDatasetTUM_VI"/dataset-magistrale1_512_16/mav0/cam0/data Stereo-Inertial/TUM_TimeStamps/dataset-magistrale1_512.txt Stereo-Inertial/TUM_IMU/dataset-magistrale1_512.txt "$pathDatasetTUM_VI"/dataset-magistrale5_512_16/mav0/cam0/data Stereo-Inertial/TUM_TimeStamps/dataset-magistrale5_512.txt Stereo-Inertial/TUM_IMU/dataset-magistrale5_512.txt "$pathDatasetTUM_VI"/dataset-slides1_512_16/mav0/cam0/data Stereo-Inertial/TUM_TimeStamps/dataset-slides1_512.txt Stereo-Inertial/TUM_IMU/dataset-slides1_512.txt dataset-room1_mag1_mag5_slides1_monoi
echo "Launching all Rooms (1-6) in the same session with Stereo-Inertial sensor"
./Monocular-Inertial/mono_inertial_tum_vi ../Vocabulary/ORBvoc.txt Monocular-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room1_512_16/mav0/cam0/data Stereo-Inertial/TUM_TimeStamps/dataset-room1_512.txt Stereo-Inertial/TUM_IMU/dataset-room1_512.txt "$pathDatasetTUM_VI"/dataset-room2_512_16/mav0/cam0/data Stereo-Inertial/TUM_TimeStamps/dataset-room2_512.txt Stereo-Inertial/TUM_IMU/dataset-room2_512.txt "$pathDatasetTUM_VI"/dataset-room3_512_16/mav0/cam0/data Stereo-Inertial/TUM_TimeStamps/dataset-room3_512.txt Stereo-Inertial/TUM_IMU/dataset-room3_512.txt "$pathDatasetTUM_VI"/dataset-room4_512_16/mav0/cam0/data Stereo-Inertial/TUM_TimeStamps/dataset-room4_512.txt Stereo-Inertial/TUM_IMU/dataset-room4_512.txt "$pathDatasetTUM_VI"/dataset-room5_512_16/mav0/cam0/data Stereo-Inertial/TUM_TimeStamps/dataset-room5_512.txt Stereo-Inertial/TUM_IMU/dataset-room5_512.txt "$pathDatasetTUM_VI"/dataset-room6_512_16/mav0/cam0/data Stereo-Inertial/TUM_TimeStamps/dataset-room6_512.txt Stereo-Inertial/TUM_IMU/dataset-room6_512.txt dataset-rooms123456_monoi
#------------------------------------
# Stereo-Inertial Examples
echo "Launching Corridor 1 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-corridor1_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-corridor1_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-corridor1_512.txt Stereo-Inertial/TUM_IMU/dataset-corridor1_512.txt dataset-corridor1_512_stereoi
echo "Launching Corridor 2 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-corridor2_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-corridor2_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-corridor2_512.txt Stereo-Inertial/TUM_IMU/dataset-corridor2_512.txt dataset-corridor2_512_stereoi
echo "Launching Corridor 3 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-corridor3_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-corridor3_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-corridor3_512.txt Stereo-Inertial/TUM_IMU/dataset-corridor3_512.txt dataset-corridor3_512_stereoi
echo "Launching Corridor 4 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-corridor4_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-corridor4_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-corridor4_512.txt Stereo-Inertial/TUM_IMU/dataset-corridor4_512.txt dataset-corridor4_512_stereoi
echo "Launching Corridor 5 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-corridor5_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-corridor5_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-corridor5_512.txt Stereo-Inertial/TUM_IMU/dataset-corridor5_512.txt dataset-corridor5_512_stereoi
echo "Launching Magistrale 1 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-magistrale1_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-magistrale1_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-magistrale1_512.txt Stereo-Inertial/TUM_IMU/dataset-magistrale1_512.txt dataset-magistrale1_512_stereoi
echo "Launching Magistrale 2 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-magistrale2_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-magistrale2_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-magistrale2_512.txt Stereo-Inertial/TUM_IMU/dataset-magistrale2_512.txt dataset-magistrale2_512_stereoi
echo "Launching Magistrale 3 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-magistrale3_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-magistrale3_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-magistrale3_512.txt Stereo-Inertial/TUM_IMU/dataset-magistrale3_512.txt dataset-magistrale3_512_stereoi
echo "Launching Magistrale 4 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-magistrale4_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-magistrale4_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-magistrale4_512.txt Stereo-Inertial/TUM_IMU/dataset-magistrale4_512.txt dataset-magistrale4_512_stereoi
echo "Launching Magistrale 5 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-magistrale5_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-magistrale5_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-magistrale5_512.txt Stereo-Inertial/TUM_IMU/dataset-magistrale5_512.txt dataset-magistrale5_512_stereoi
echo "Launching Magistrale 6 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-magistrale6_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-magistrale6_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-magistrale6_512.txt Stereo-Inertial/TUM_IMU/dataset-magistrale6_512.txt dataset-magistrale6_512_stereoi
echo "Launching Outdoor 1 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512_outdoors.yaml "$pathDatasetTUM_VI"/dataset-outdoors1_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-outdoors1_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-outdoors1_512.txt Stereo-Inertial/TUM_IMU/dataset-outdoors1_512.txt outdoors1_512_stereoi
echo "Launching Outdoor 2 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512_outdoors.yaml "$pathDatasetTUM_VI"/dataset-outdoors2_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-outdoors2_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-outdoors2_512.txt Stereo-Inertial/TUM_IMU/dataset-outdoors2_512.txt outdoors2_512_stereoi
echo "Launching Outdoor 3 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512_outdoors.yaml "$pathDatasetTUM_VI"/dataset-outdoors3_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-outdoors3_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-outdoors3_512.txt Stereo-Inertial/TUM_IMU/dataset-outdoors3_512.txt outdoors3_512
echo "Launching Outdoor 4 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512_outdoors.yaml "$pathDatasetTUM_VI"/dataset-outdoors4_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-outdoors4_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-outdoors4_512.txt Stereo-Inertial/TUM_IMU/dataset-outdoors4_512.txt outdoors4_512
echo "Launching Outdoor 5 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512_outdoors.yaml "$pathDatasetTUM_VI"/dataset-outdoors5_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-outdoors5_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-outdoors5_512.txt Stereo-Inertial/TUM_IMU/dataset-outdoors5_512.txt outdoors5_512_stereoi
echo "Launching Outdoor 6 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512_outdoors.yaml "$pathDatasetTUM_VI"/dataset-outdoors6_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-outdoors6_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-outdoors6_512.txt Stereo-Inertial/TUM_IMU/dataset-outdoors6_512.txt outdoors6_512_stereoi
echo "Launching Outdoor 7 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512_outdoors.yaml "$pathDatasetTUM_VI"/dataset-outdoors7_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-outdoors7_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-outdoors7_512.txt Stereo-Inertial/TUM_IMU/dataset-outdoors7_512.txt outdoors7_512_stereoi
echo "Launching Outdoor 8 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512_outdoors.yaml "$pathDatasetTUM_VI"/dataset-outdoors8_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-outdoors8_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-outdoors8_512.txt Stereo-Inertial/TUM_IMU/dataset-outdoors8_512.txt outdoors8_512_stereoi
echo "Launching Room 1 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room1_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room1_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-room1_512.txt Stereo-Inertial/TUM_IMU/dataset-room1_512.txt dataset-room1_512_stereoi
echo "Launching Room 2 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room2_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room2_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-room2_512.txt Stereo-Inertial/TUM_IMU/dataset-room2_512.txt dataset-room2_512_stereoi
echo "Launching Room 3 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room3_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room3_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-room3_512.txt Stereo-Inertial/TUM_IMU/dataset-room3_512.txt dataset-room3_512_stereoi
echo "Launching Room 4 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room4_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room4_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-room4_512.txt Stereo-Inertial/TUM_IMU/dataset-room4_512.txt dataset-room4_512_stereoi
echo "Launching Room 5 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room5_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room5_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-room5_512.txt Stereo-Inertial/TUM_IMU/dataset-room5_512.txt dataset-room5_512_stereoi
echo "Launching Room 6 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room6_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room6_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-room6_512.txt Stereo-Inertial/TUM_IMU/dataset-room6_512.txt dataset-room6_512_stereoi
echo "Launching Slides 1 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-slides1_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-slides1_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-slides1_512.txt Stereo-Inertial/TUM_IMU/dataset-slides1_512.txt dataset-slides1_512_stereoi
echo "Launching Slides 2 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-slides2_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-slides2_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-slides2_512.txt Stereo-Inertial/TUM_IMU/dataset-slides2_512.txt dataset-slides2_512_stereoi
echo "Launching Slides 3 with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-slides3_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-slides3_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-slides3_512.txt Stereo-Inertial/TUM_IMU/dataset-slides3_512.txt dataset-slides3_512_stereoi
# MultiSession Stereo-Inertial Examples
echo "Launching Room 1, Magistrale 1, Magistrale 5 and Slides 1 in the same session with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room1_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room1_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-room1_512.txt Stereo-Inertial/TUM_IMU/dataset-room1_512.txt "$pathDatasetTUM_VI"/dataset-magistrale1_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-magistrale1_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-magistrale1_512.txt Stereo-Inertial/TUM_IMU/dataset-magistrale1_512.txt "$pathDatasetTUM_VI"/dataset-magistrale5_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-magistrale5_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-magistrale5_512.txt Stereo-Inertial/TUM_IMU/dataset-magistrale5_512.txt "$pathDatasetTUM_VI"/dataset-slides1_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-slides1_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-slides1_512.txt Stereo-Inertial/TUM_IMU/dataset-slides1_512.txt dataset-room1_mag1_mag5_slides1_stereoi
echo "Launching all Rooms (1-6) in the same session with Stereo-Inertial sensor"
./Stereo-Inertial/stereo_inertial_tum_vi ../Vocabulary/ORBvoc.txt Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-room1_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room1_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-room1_512.txt Stereo-Inertial/TUM_IMU/dataset-room1_512.txt "$pathDatasetTUM_VI"/dataset-room2_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room2_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-room2_512.txt Stereo-Inertial/TUM_IMU/dataset-room2_512.txt "$pathDatasetTUM_VI"/dataset-room3_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room3_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-room3_512.txt Stereo-Inertial/TUM_IMU/dataset-room3_512.txt "$pathDatasetTUM_VI"/dataset-room4_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room4_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-room4_512.txt Stereo-Inertial/TUM_IMU/dataset-room4_512.txt "$pathDatasetTUM_VI"/dataset-room5_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room5_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-room5_512.txt Stereo-Inertial/TUM_IMU/dataset-room5_512.txt "$pathDatasetTUM_VI"/dataset-room6_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-room6_512_16/mav0/cam1/data Stereo-Inertial/TUM_TimeStamps/dataset-room6_512.txt Stereo-Inertial/TUM_IMU/dataset-room6_512.txt dataset-rooms123456_stereoi

View File

@ -50,7 +50,7 @@ public:
Atlas();
Atlas(int initKFid); // When its initialization the first map is created
~Atlas();
// 创建新地图
void CreateNewMap();
void ChangeMap(Map* pMap);
@ -107,7 +107,7 @@ public:
protected:
std::set<Map*> mspMaps;
std::set<Map*> mspMaps; //存放所有的地图
std::set<Map*> mspBadMaps;
Map* mpCurrentMap;

6
include/Config.h Normal file
View File

@ -0,0 +1,6 @@
#ifndef CONFIG_H
#define CONFIG_H
//#define REGISTER_TIMES
#endif // CONFIG_H

View File

@ -336,6 +336,13 @@ public:
}
};
/**
* @brief
* OptimizableTypes.h
* imu
* imu姿imu姿
* imu使
*/
class EdgeMono : public g2o::BaseBinaryEdge<2,Eigen::Vector2d,g2o::VertexSBAPointXYZ,VertexPose>
{
public:
@ -489,6 +496,7 @@ public:
const int cam_idx;
};
// TO COMMENT
class EdgeInertial : public g2o::BaseMultiEdge<9,Vector9d>
{
public:
@ -858,6 +866,6 @@ public:
Eigen::Vector3d dtij;
};
} //namespace ORB_SLAM2
} //namespace ORB_SLAM3
#endif // G2OTYPES_H

View File

@ -280,6 +280,6 @@ cv::Mat NormalizeRotation(const cv::Mat &R);
}
} //namespace ORB_SLAM2
} //namespace ORB_SLAM3
#endif // IMUTYPES_H

View File

@ -43,9 +43,11 @@
namespace ORB_SLAM3
{
// 打印中间信息
class Verbose
{
public:
// 显示信息量程度
enum eLevel
{
VERBOSITY_QUIET=0,

View File

@ -109,9 +109,9 @@ public:
NO_IMAGES_YET=0, //当前无图像
NOT_INITIALIZED=1, //有图像但是没有完成初始化
OK=2, //正常跟踪状态
RECENTLY_LOST=3, //IMU模式当前地图中的KF>10,且丢失时间<5秒。纯视觉模式没有该状态
RECENTLY_LOST=3, //IMU模式当前地图中的KF>10,且丢失时间<5秒。纯视觉模式没有该状态
LOST=4, //IMU模式当前帧跟丢超过5s。纯视觉模式重定位失败
OK_KLT=5
OK_KLT=5 //未使用
};
eTrackingState mState;

View File

@ -55,15 +55,23 @@ Atlas::~Atlas()
}
}
/**
* @brief
*
*/
void Atlas::CreateNewMap()
{
// 锁住地图集
unique_lock<mutex> lock(mMutexAtlas);
cout << "Creation of new map with id: " << Map::nNextId << endl;
// 如果当前活跃地图有效,先存储当前地图为不活跃地图后退出
if(mpCurrentMap){
cout << "Exits current map " << endl;
// mnLastInitKFidMap为当前地图创建时第1个关键帧的id它是在上一个地图最大关键帧id的基础上增加1
if(!mspMaps.empty() && mnLastInitKFidMap < mpCurrentMap->GetMaxKFid())
mnLastInitKFidMap = mpCurrentMap->GetMaxKFid()+1; //The init KF is the next of current maximum
// 将当前地图储存起来其实就是把mIsInUse标记为false
mpCurrentMap->SetStoredMap();
cout << "Saved map with ID: " << mpCurrentMap->GetId() << endl;
@ -72,9 +80,9 @@ void Atlas::CreateNewMap()
}
cout << "Creation of new map with last KF id: " << mnLastInitKFidMap << endl;
mpCurrentMap = new Map(mnLastInitKFidMap);
mpCurrentMap->SetCurrentMap();
mspMaps.insert(mpCurrentMap);
mpCurrentMap = new Map(mnLastInitKFidMap); //新建地图
mpCurrentMap->SetCurrentMap(); //设置为活跃地图
mspMaps.insert(mpCurrentMap); //插入地图集
}
void Atlas::ChangeMap(Map* pMap)

View File

@ -364,8 +364,8 @@ Frame::Frame(const cv::Mat &imGray, const double &timeStamp, ORBextractor* extra
mTimeORB_Ext = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndExtORB - time_StartExtORB).count();
#endif
//求出特征点的个数
//提取特征点的个数
N = mvKeys.size();
//如果没有能够成功提取出特征点,那么就直接返回了
if(mvKeys.empty())
@ -1369,9 +1369,11 @@ Frame::Frame(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timeSt
mTimeORB_Ext = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndExtORB - time_StartExtORB).count();
#endif
// 左图中提取的特征点数目
Nleft = mvKeys.size();
// 右图中提取的特征点数目
Nright = mvKeysRight.size();
// 特征点总数
N = Nleft + Nright;
if(N == 0)

View File

@ -487,35 +487,46 @@ VertexAccBias::VertexAccBias(Frame *pF)
setEstimate(ba);
}
/**
* @brief imu
* @param pInt
*/
EdgeInertial::EdgeInertial(IMU::Preintegrated *pInt):JRg(Converter::toMatrix3d(pInt->JRg)),
JVg(Converter::toMatrix3d(pInt->JVg)), JPg(Converter::toMatrix3d(pInt->JPg)), JVa(Converter::toMatrix3d(pInt->JVa)),
JPa(Converter::toMatrix3d(pInt->JPa)), mpInt(pInt), dt(pInt->dT)
{
// 准备工作把预积分类里面的值先取出来包含信息的是两帧之间n多个imu信息预积分来的
// This edge links 6 vertices
// 6元边
resize(6);
// 1. 定义重力
g << 0, 0, -IMU::GRAVITY_VALUE;
// 2. 读取协方差矩阵的前9*9部分的逆矩阵该部分表示的是预积分测量噪声的协方差矩阵
cv::Mat cvInfo = pInt->C.rowRange(0, 9).colRange(0, 9).inv(cv::DECOMP_SVD);
// 转成eigen Matrix9d
Matrix9d Info;
for (int r = 0; r < 9; r++)
for (int c = 0; c < 9; c++)
Info(r, c) = cvInfo.at<float>(r, c);
// 3. 强制让其成为对角矩阵
Info = (Info + Info.transpose()) / 2;
// 4. 让特征值很小的时候置为0再重新计算信息矩阵暂不知这么操作的目的是什么先搞清楚操作流程吧
Eigen::SelfAdjointEigenSolver<Eigen::Matrix<double, 9, 9>> es(Info);
Eigen::Matrix<double,9,1> eigs = es.eigenvalues();
Eigen::Matrix<double, 9, 1> eigs = es.eigenvalues(); // 矩阵特征值
for (int i = 0; i < 9; i++)
if (eigs[i] < 1e-12)
eigs[i] = 0;
// asDiagonal 生成对角矩阵
Info = es.eigenvectors() * eigs.asDiagonal() * es.eigenvectors().transpose();
setInformation(Info);
}
/**
* @brief
*/
void EdgeInertial::computeError()
{
// 计算残差
// TODO Maybe Reintegrate inertial measurments when difference between linearization point and current estimate is too big
const VertexPose* VP1 = static_cast<const VertexPose*>(_vertices[0]);
const VertexVelocity* VV1= static_cast<const VertexVelocity*>(_vertices[1]);
@ -536,6 +547,7 @@ void EdgeInertial::computeError()
_error << er, ev, ep;
}
// 计算雅克比矩阵
void EdgeInertial::linearizeOplus()
{
const VertexPose* VP1 = static_cast<const VertexPose*>(_vertices[0]);
@ -549,42 +561,52 @@ void EdgeInertial::linearizeOplus()
Eigen::Vector3d dbg;
dbg << db.bwx, db.bwy, db.bwz;
const Eigen::Matrix3d Rwb1 = VP1->estimate().Rwb;
const Eigen::Matrix3d Rbw1 = Rwb1.transpose();
const Eigen::Matrix3d Rwb2 = VP2->estimate().Rwb;
const Eigen::Matrix3d Rwb1 = VP1->estimate().Rwb; // Ri
const Eigen::Matrix3d Rbw1 = Rwb1.transpose(); // Ri.t()
const Eigen::Matrix3d Rwb2 = VP2->estimate().Rwb; // Rj
const Eigen::Matrix3d dR = Converter::toMatrix3d(mpInt->GetDeltaRotation(b1));
const Eigen::Matrix3d eR = dR.transpose()*Rbw1*Rwb2;
const Eigen::Vector3d er = LogSO3(eR);
const Eigen::Matrix3d invJr = InverseRightJacobianSO3(er);
const Eigen::Matrix3d eR = dR.transpose() * Rbw1 * Rwb2; // r△Rij
const Eigen::Vector3d er = LogSO3(eR); // r△φij
const Eigen::Matrix3d invJr = InverseRightJacobianSO3(er); // Jr^-1(log(△Rij))
// 就很神奇_jacobianOplus个数等于边的个数里面的大小等于观测值维度也就是残差× 每个节点待优化值的维度
// Jacobians wrt Pose 1
// _jacobianOplus[0] 9*6矩阵 总体来说就是三个残差分别对pose1的旋转与平移p求导
_jacobianOplus[0].setZero();
// rotation
// (0,0)起点的3*3块表示旋转残差对pose1的旋转求导
_jacobianOplus[0].block<3,3>(0,0) = -invJr*Rwb2.transpose()*Rwb1; // OK
// (3,0)起点的3*3块表示速度残差对pose1的旋转求导
_jacobianOplus[0].block<3,3>(3,0) = Skew(Rbw1*(VV2->estimate() - VV1->estimate() - g*dt)); // OK
// (6,0)起点的3*3块表示位置残差对pose1的旋转求导
_jacobianOplus[0].block<3,3>(6,0) = Skew(Rbw1*(VP2->estimate().twb - VP1->estimate().twb
- VV1->estimate()*dt - 0.5*g*dt*dt)); // OK
// translation
// (6,3)起点的3*3块表示位置残差对pose1的位置求导
_jacobianOplus[0].block<3,3>(6,3) = -Eigen::Matrix3d::Identity(); // OK
// Jacobians wrt Velocity 1
// _jacobianOplus[1] 9*3矩阵 总体来说就是三个残差分别对pose1的速度求导
_jacobianOplus[1].setZero();
_jacobianOplus[1].block<3,3>(3,0) = -Rbw1; // OK
_jacobianOplus[1].block<3,3>(6,0) = -Rbw1*dt; // OK
// Jacobians wrt Gyro 1
// _jacobianOplus[2] 9*3矩阵 总体来说就是三个残差分别对陀螺仪偏置的速度求导
_jacobianOplus[2].setZero();
_jacobianOplus[2].block<3,3>(0,0) = -invJr*eR.transpose()*RightJacobianSO3(JRg*dbg)*JRg; // OK
_jacobianOplus[2].block<3,3>(3,0) = -JVg; // OK
_jacobianOplus[2].block<3,3>(6,0) = -JPg; // OK
// Jacobians wrt Accelerometer 1
// _jacobianOplus[3] 9*3矩阵 总体来说就是三个残差分别对加速度计偏置的速度求导
_jacobianOplus[3].setZero();
_jacobianOplus[3].block<3,3>(3,0) = -JVa; // OK
_jacobianOplus[3].block<3,3>(6,0) = -JPa; // OK
// Jacobians wrt Pose 2
// _jacobianOplus[3] 9*6矩阵 总体来说就是三个残差分别对pose2的旋转与平移p求导
_jacobianOplus[4].setZero();
// rotation
_jacobianOplus[4].block<3,3>(0,0) = invJr; // OK
@ -592,33 +614,43 @@ void EdgeInertial::linearizeOplus()
_jacobianOplus[4].block<3,3>(6,3) = Rbw1*Rwb2; // OK
// Jacobians wrt Velocity 2
// _jacobianOplus[3] 9*3矩阵 总体来说就是三个残差分别对pose2的速度求导
_jacobianOplus[5].setZero();
_jacobianOplus[5].block<3,3>(3,0) = Rbw1; // OK
}
// localmapping中imu初始化所用的边除了正常的几个优化变量外还优化了重力方向与尺度
EdgeInertialGS::EdgeInertialGS(IMU::Preintegrated *pInt):JRg(Converter::toMatrix3d(pInt->JRg)),
JVg(Converter::toMatrix3d(pInt->JVg)), JPg(Converter::toMatrix3d(pInt->JPg)), JVa(Converter::toMatrix3d(pInt->JVa)),
JPa(Converter::toMatrix3d(pInt->JPa)), mpInt(pInt), dt(pInt->dT)
{
// 准备工作把预积分类里面的值先取出来包含信息的是两帧之间n多个imu信息预积分来的
// This edge links 8 vertices
// 8元边
resize(8);
// 1. 定义重力
gI << 0, 0, -IMU::GRAVITY_VALUE;
// 2. 读取协方差矩阵的前9*9部分的逆矩阵该部分表示的是预积分测量噪声的协方差矩阵
cv::Mat cvInfo = pInt->C.rowRange(0, 9).colRange(0, 9).inv(cv::DECOMP_SVD);
// 转成eigen Matrix9d
Matrix9d Info;
for(int r=0;r<9;r++)
for(int c=0;c<9;c++)
Info(r,c)=cvInfo.at<float>(r,c);
// 3. 强制让其成为对角矩阵
Info = (Info+Info.transpose())/2;
// 4. 让特征值很小的时候置为0再重新计算信息矩阵暂不知这么操作的目的是什么先搞清楚操作流程吧
Eigen::SelfAdjointEigenSolver<Eigen::Matrix<double,9,9> > es(Info);
Eigen::Matrix<double,9,1> eigs = es.eigenvalues();
Eigen::Matrix<double, 9, 1> eigs = es.eigenvalues(); // 矩阵特征值
for(int i=0;i<9;i++)
if(eigs[i]<1e-12)
eigs[i]=0;
// asDiagonal 生成对角矩阵
Info = es.eigenvectors()*eigs.asDiagonal()*es.eigenvectors().transpose();
setInformation(Info);
}
// 计算误差
void EdgeInertialGS::computeError()
{
@ -638,6 +670,8 @@ void EdgeInertialGS::computeError()
const Eigen::Vector3d dV = Converter::toVector3d(mpInt->GetDeltaVelocity(b));
const Eigen::Vector3d dP = Converter::toVector3d(mpInt->GetDeltaPosition(b));
// 计算残差。广义上讲都是真实值 = 残差 + imu旋转为imu*残差=真实值
// dR.transpose() 为imu预积分的值VP1->estimate().Rwb.transpose() * VP2->estimate().Rwb 为相机的Rwc在乘上相机与imu的标定外参矩阵
const Eigen::Vector3d er = LogSO3(dR.transpose()*VP1->estimate().Rwb.transpose()*VP2->estimate().Rwb);
const Eigen::Vector3d ev = VP1->estimate().Rwb.transpose()*(s*(VV2->estimate() - VV1->estimate()) - g*dt) - dV;
const Eigen::Vector3d ep = VP1->estimate().Rwb.transpose()*(s*(VP2->estimate().twb - VP1->estimate().twb - VV1->estimate()*dt) - g*dt*dt/2) - dP;
@ -645,6 +679,7 @@ void EdgeInertialGS::computeError()
_error << er, ev, ep;
}
// 计算雅克比矩阵
void EdgeInertialGS::linearizeOplus()
{
const VertexPose* VP1 = static_cast<const VertexPose*>(_vertices[0]);
@ -655,53 +690,66 @@ void EdgeInertialGS::linearizeOplus()
const VertexVelocity* VV2 = static_cast<const VertexVelocity*>(_vertices[5]);
const VertexGDir* VGDir = static_cast<const VertexGDir*>(_vertices[6]);
const VertexScale* VS = static_cast<const VertexScale*>(_vertices[7]);
// 1. 获取偏置的该变量,因为要对这个东西求导
const IMU::Bias b(VA->estimate()[0],VA->estimate()[1],VA->estimate()[2],VG->estimate()[0],VG->estimate()[1],VG->estimate()[2]);
const IMU::Bias db = mpInt->GetDeltaBias(b);
// 陀螺仪的偏置改变量
Eigen::Vector3d dbg;
dbg << db.bwx, db.bwy, db.bwz;
const Eigen::Matrix3d Rwb1 = VP1->estimate().Rwb;
const Eigen::Matrix3d Rbw1 = Rwb1.transpose();
const Eigen::Matrix3d Rwb2 = VP2->estimate().Rwb;
const Eigen::Matrix3d Rwg = VGDir->estimate().Rwg;
const Eigen::Matrix3d Rwb1 = VP1->estimate().Rwb; // Ri
const Eigen::Matrix3d Rbw1 = Rwb1.transpose(); // Ri.t()
const Eigen::Matrix3d Rwb2 = VP2->estimate().Rwb; // Rj
const Eigen::Matrix3d Rwg = VGDir->estimate().Rwg; // Rwg
Eigen::MatrixXd Gm = Eigen::MatrixXd::Zero(3, 2);
Gm(0, 1) = -IMU::GRAVITY_VALUE;
Gm(1, 0) = IMU::GRAVITY_VALUE;
const double s = VS->estimate();
const Eigen::MatrixXd dGdTheta = Rwg*Gm;
// 预积分得来的dR
const Eigen::Matrix3d dR = Converter::toMatrix3d(mpInt->GetDeltaRotation(b));
const Eigen::Matrix3d eR = dR.transpose()*Rbw1*Rwb2;
const Eigen::Vector3d er = LogSO3(eR);
const Eigen::Matrix3d invJr = InverseRightJacobianSO3(er);
const Eigen::Matrix3d eR = dR.transpose() * Rbw1 * Rwb2; // r△Rij
const Eigen::Vector3d er = LogSO3(eR); // r△φij
const Eigen::Matrix3d invJr = InverseRightJacobianSO3(er); // Jr^-1(log(△Rij))
// 就很神奇_jacobianOplus个数等于边的个数里面的大小等于观测值维度也就是残差× 每个节点待优化值的维度
// Jacobians wrt Pose 1
// _jacobianOplus[0] 9*6矩阵 总体来说就是三个残差分别对pose1的旋转与平移p求导
_jacobianOplus[0].setZero();
// rotation
// (0,0)起点的3*3块表示旋转残差对pose1的旋转求导
_jacobianOplus[0].block<3,3>(0,0) = -invJr*Rwb2.transpose()*Rwb1;
// (3,0)起点的3*3块表示速度残差对pose1的旋转求导
_jacobianOplus[0].block<3,3>(3,0) = Skew(Rbw1*(s*(VV2->estimate() - VV1->estimate()) - g*dt));
// (6,0)起点的3*3块表示位置残差对pose1的旋转求导
_jacobianOplus[0].block<3,3>(6,0) = Skew(Rbw1*(s*(VP2->estimate().twb - VP1->estimate().twb
- VV1->estimate()*dt) - 0.5*g*dt*dt));
// translation
// (6,3)起点的3*3块表示位置残差对pose1的位置求导
_jacobianOplus[0].block<3,3>(6,3) = -s*Eigen::Matrix3d::Identity();
// Jacobians wrt Velocity 1
// _jacobianOplus[1] 9*3矩阵 总体来说就是三个残差分别对pose1的速度求导
_jacobianOplus[1].setZero();
_jacobianOplus[1].block<3,3>(3,0) = -s*Rbw1;
_jacobianOplus[1].block<3,3>(6,0) = -s*Rbw1*dt;
// Jacobians wrt Gyro bias
// _jacobianOplus[2] 9*3矩阵 总体来说就是三个残差分别对陀螺仪偏置的速度求导
_jacobianOplus[2].setZero();
_jacobianOplus[2].block<3,3>(0,0) = -invJr*eR.transpose()*RightJacobianSO3(JRg*dbg)*JRg;
_jacobianOplus[2].block<3,3>(3,0) = -JVg;
_jacobianOplus[2].block<3,3>(6,0) = -JPg;
// Jacobians wrt Accelerometer bias
// _jacobianOplus[3] 9*3矩阵 总体来说就是三个残差分别对加速度计偏置的速度求导
_jacobianOplus[3].setZero();
_jacobianOplus[3].block<3,3>(3,0) = -JVa;
_jacobianOplus[3].block<3,3>(6,0) = -JPa;
// Jacobians wrt Pose 2
// _jacobianOplus[3] 9*6矩阵 总体来说就是三个残差分别对pose2的旋转与平移p求导
_jacobianOplus[4].setZero();
// rotation
_jacobianOplus[4].block<3,3>(0,0) = invJr;
@ -709,15 +757,18 @@ void EdgeInertialGS::linearizeOplus()
_jacobianOplus[4].block<3,3>(6,3) = s*Rbw1*Rwb2;
// Jacobians wrt Velocity 2
// _jacobianOplus[3] 9*3矩阵 总体来说就是三个残差分别对pose2的速度求导
_jacobianOplus[5].setZero();
_jacobianOplus[5].block<3,3>(3,0) = s*Rbw1;
// Jacobians wrt Gravity direction
// _jacobianOplus[3] 9*2矩阵 总体来说就是三个残差分别对重力方向求导
_jacobianOplus[6].setZero();
_jacobianOplus[6].block<3,2>(3,0) = -Rbw1*dGdTheta*dt;
_jacobianOplus[6].block<3,2>(6,0) = -0.5*Rbw1*dGdTheta*dt*dt;
// Jacobians wrt scale factor
// _jacobianOplus[3] 9*1矩阵 总体来说就是三个残差分别对尺度求导
_jacobianOplus[7].setZero();
_jacobianOplus[7].block<3,1>(3,0) = Rbw1*(VV2->estimate()-VV1->estimate());
_jacobianOplus[7].block<3,1>(6,0) = Rbw1*(VP2->estimate().twb-VP1->estimate().twb-VV1->estimate()*dt);
@ -754,9 +805,13 @@ void EdgePriorPoseImu::linearizeOplus()
{
const VertexPose* VP = static_cast<const VertexPose*>(_vertices[0]);
const Eigen::Vector3d er = LogSO3(Rwb.transpose()*VP->estimate().Rwb);
// 就很神奇_jacobianOplus个数等于边的个数里面的大小等于观测值维度也就是3旋转3平移3速度6偏置× 每个节点待优化值的维度
// 源码可读性太差了。。。里面会自动分配矩阵大小,计算改变量时按照对应位置来
_jacobianOplus[0].setZero();
_jacobianOplus[0].block<3,3>(0,0) = InverseRightJacobianSO3(er);
_jacobianOplus[0].block<3,3>(3,3) = Rwb.transpose()*VP->estimate().Rwb;
// LOG(Rbw*R*EXP(φ)) = LOG(EXP(LOG(Rbw*R) + Jr(-1)*φ)) = LOG(Rbw*R) + Jr(-1)*φ
_jacobianOplus[0].block<3, 3>(0, 0) = InverseRightJacobianSO3(er); // Jr(-1)
// Rbw*(t + R*δt - twb) = Rbw*(t - twb) + Rbw*R*δt
_jacobianOplus[0].block<3, 3>(3, 3) = Rwb.transpose() * VP->estimate().Rwb; // Rbw*R
_jacobianOplus[1].setZero();
_jacobianOplus[1].block<3,3>(6,0) = Eigen::Matrix3d::Identity();
_jacobianOplus[2].setZero();
@ -859,6 +914,7 @@ Eigen::Matrix3d RightJacobianSO3(const double x, const double y, const double z)
}
}
// 反对称矩阵
Eigen::Matrix3d Skew(const Eigen::Vector3d &w)
{
Eigen::Matrix3d W;

View File

@ -27,6 +27,11 @@ namespace IMU
const float eps = 1e-4;
/**
* @brief R
* @param R
* @return
*/
cv::Mat NormalizeRotation(const cv::Mat &R)
{
cv::Mat U,w,Vt;
@ -34,6 +39,11 @@ cv::Mat NormalizeRotation(const cv::Mat &R)
return U*Vt;
}
/**
* @brief
* @param v 3
* @return
*/
cv::Mat Skew(const cv::Mat &v)
{
const float x = v.at<float>(0);
@ -44,6 +54,11 @@ cv::Mat Skew(const cv::Mat &v)
-y, x, 0);
}
/**
* @brief SO3
* @param xyz
* @return SO3
*/
cv::Mat ExpSO3(const float &x, const float &y, const float &z)
{
cv::Mat I = cv::Mat::eye(3,3,CV_32F);
@ -58,6 +73,11 @@ cv::Mat ExpSO3(const float &x, const float &y, const float &z)
return (I + W*sin(d)/d + W*W*(1.0f-cos(d))/d2);
}
/**
* @brief SO3
* @param xyz
* @return SO3
*/
Eigen::Matrix<double,3,3> ExpSO3(const double &x, const double &y, const double &z)
{
Eigen::Matrix<double,3,3> I = Eigen::MatrixXd::Identity(3,3);
@ -80,11 +100,21 @@ Eigen::Matrix<double,3,3> ExpSO3(const double &x, const double &y, const double
return (I + W*sin(d)/d + W*W*(1.0-cos(d))/d2);
}
/**
* @brief SO3
* @param v
* @return SO3
*/
cv::Mat ExpSO3(const cv::Mat &v)
{
return ExpSO3(v.at<float>(0),v.at<float>(1),v.at<float>(2));
}
/**
* @brief so3
* @param R SO3
* @return so3
*/
cv::Mat LogSO3(const cv::Mat &R)
{
const float tr = R.at<float>(0,0)+R.at<float>(1,1)+R.at<float>(2,2);
@ -102,6 +132,11 @@ cv::Mat LogSO3(const cv::Mat &R)
return theta*w/s;
}
/**
* @brief
* @param xyz
* @return Jr
*/
cv::Mat RightJacobianSO3(const float &x, const float &y, const float &z)
{
cv::Mat I = cv::Mat::eye(3,3,CV_32F);
@ -120,11 +155,21 @@ cv::Mat RightJacobianSO3(const float &x, const float &y, const float &z)
}
}
/**
* @brief
* @param v so3
* @return Jr
*/
cv::Mat RightJacobianSO3(const cv::Mat &v)
{
return RightJacobianSO3(v.at<float>(0),v.at<float>(1),v.at<float>(2));
}
/**
* @brief
* @param xyz so3
* @return Jr^-1
*/
cv::Mat InverseRightJacobianSO3(const float &x, const float &y, const float &z)
{
cv::Mat I = cv::Mat::eye(3,3,CV_32F);
@ -143,6 +188,11 @@ cv::Mat InverseRightJacobianSO3(const float &x, const float &y, const float &z)
}
}
/**
* @brief
* @param v so3
* @return Jr^-1
*/
cv::Mat InverseRightJacobianSO3(const cv::Mat &v)
{
return InverseRightJacobianSO3(v.at<float>(0),v.at<float>(1),v.at<float>(2));
@ -190,6 +240,11 @@ IntegratedRotation::IntegratedRotation(const cv::Point3f &angVel, const Bias &im
}
}
/**
* @brief
* @param b_
* @param calib imu
*/
Preintegrated::Preintegrated(const Bias &b_, const Calib &calib)
{
Nga = calib.Cov.clone();
@ -206,6 +261,10 @@ Preintegrated::Preintegrated(Preintegrated* pImuPre): dT(pImuPre->dT), C(pImuPre
}
/**
* @brief
* @param pImuPre
*/
void Preintegrated::CopyFrom(Preintegrated* pImuPre)
{
std::cout << "Preintegrated: start clone" << std::endl;
@ -219,6 +278,7 @@ void Preintegrated::CopyFrom(Preintegrated* pImuPre)
dR = pImuPre->dR.clone();
dV = pImuPre->dV.clone();
dP = pImuPre->dP.clone();
// 旋转关于陀螺仪偏置变化的雅克比,以此类推
JRg = pImuPre->JRg.clone();
JVg = pImuPre->JVg.clone();
JVa = pImuPre->JVa.clone();
@ -234,7 +294,10 @@ void Preintegrated::CopyFrom(Preintegrated* pImuPre)
std::cout << "Preintegrated: end clone" << std::endl;
}
/**
* @brief
* @param b_
*/
void Preintegrated::Initialize(const Bias &b_)
{
dR = cv::Mat::eye(3, 3, CV_32F);
@ -249,13 +312,16 @@ void Preintegrated::Initialize(const Bias &b_)
Info = cv::Mat();
db = cv::Mat::zeros(6, 1, CV_32F);
b = b_;
bu=b_;
avgA = cv::Mat::zeros(3,1,CV_32F);
avgW = cv::Mat::zeros(3,1,CV_32F);
bu = b_; // 更新后的偏置
avgA = cv::Mat::zeros(3, 1, CV_32F); // 平均加速度
avgW = cv::Mat::zeros(3, 1, CV_32F); // 平均角速度
dT = 0.0f;
mvMeasurements.clear();
mvMeasurements.clear(); // 存放imu数据及dt
}
/**
* @brief mvMeasurements Optimizer::InertialOptimization
*/
void Preintegrated::Reintegrate()
{
std::unique_lock<std::mutex> lock(mMutex);
@ -299,8 +365,8 @@ void Preintegrated::IntegrateNewMeasurement(const cv::Point3f &acceleration, con
// Update delta position dP and velocity dV (rely on no-updated delta rotation)
// 根据没有更新的dR来更新dP与dV eq.(38)
dP = dP + dV*dt + 0.5f*dR*acc*dt*dt;
dV = dV + dR*acc*dt;
dP = dP + dV*dt + 0.5f*dR*acc*dt*dt; // 对应viorb论文的公式2的第三个位移积分
dV = dV + dR*acc*dt; // 对应viorb论文的公式2的第二个速度积分
// Compute velocity and position parts of matrices A and B (rely on non-updated delta rotation)
// 根据η_ij = A * η_i,j-1 + B_j-1 * η_j-1中的矩阵和矩阵对速度和位移进行更新
@ -335,9 +401,9 @@ void Preintegrated::IntegrateNewMeasurement(const cv::Point3f &acceleration, con
// 小量delta初始为0更新后通常也为0故省略了小量的更新
// Update covariance
// Step 3.更新协方差frost经典预积分论文的第63个公式推导了噪声ηa, ηg对dR dV dP 的影响
C.rowRange(0,9).colRange(0,9) = A*C.rowRange(0,9).colRange(0,9)*A.t() + B*Nga*B.t();
C.rowRange(0,9).colRange(0,9) = A*C.rowRange(0,9).colRange(0,9)*A.t() + B*Nga*B.t(); // B矩阵为9*6矩阵 Nga 6*6对角矩阵3个陀螺仪噪声的平方3个加速度计噪声的平方
// 这一部分最开始是0矩阵随着积分次数增加每次都加上随机游走偏置的信息矩阵
C.rowRange(9,15).colRange(9,15) = C.rowRange(9,15).colRange(9,15) + NgaWalk;
C.rowRange(9,15).colRange(9,15) = C.rowRange(9,15).colRange(9,15) + NgaWalk; // NgaWalk 6*6 随机游走对角矩阵
// Update rotation jacobian wrt bias correction
// 计算偏置的雅克比矩阵r对bg的导数∂ΔRij/∂bg = (ΔRjj-1) * ∂ΔRij-1/∂bg - Jr(j-1)*t
@ -350,6 +416,10 @@ void Preintegrated::IntegrateNewMeasurement(const cv::Point3f &acceleration, con
dT += dt;
}
/**
* @brief 32
* @param pPrev
*/
void Preintegrated::MergePrevious(Preintegrated* pPrev)
{
if (pPrev==this)
@ -376,6 +446,10 @@ void Preintegrated::MergePrevious(Preintegrated* pPrev)
}
/**
* @brief
* @param bu_
*/
void Preintegrated::SetNewBias(const Bias &bu_)
{
std::unique_lock<std::mutex> lock(mMutex);
@ -389,12 +463,22 @@ void Preintegrated::SetNewBias(const Bias &bu_)
db.at<float>(5) = bu_.baz-b.baz;
}
/**
* @brief
* @param b_
* @return
*/
IMU::Bias Preintegrated::GetDeltaBias(const Bias &b_)
{
std::unique_lock<std::mutex> lock(mMutex);
return IMU::Bias(b_.bax-b.bax,b_.bay-b.bay,b_.baz-b.baz,b_.bwx-b.bwx,b_.bwy-b.bwy,b_.bwz-b.bwz);
}
/**
* @brief dR
* @param b_
* @return dR
*/
cv::Mat Preintegrated::GetDeltaRotation(const Bias &b_)
{
std::unique_lock<std::mutex> lock(mMutex);
@ -405,6 +489,11 @@ cv::Mat Preintegrated::GetDeltaRotation(const Bias &b_)
return NormalizeRotation(dR*ExpSO3(JRg*dbg));
}
/**
* @brief dV
* @param b_
* @return dV
*/
cv::Mat Preintegrated::GetDeltaVelocity(const Bias &b_)
{
std::unique_lock<std::mutex> lock(mMutex);
@ -414,6 +503,11 @@ cv::Mat Preintegrated::GetDeltaVelocity(const Bias &b_)
return dV + JVg*dbg + JVa*dba;
}
/**
* @brief dP
* @param b_
* @return dP
*/
cv::Mat Preintegrated::GetDeltaPosition(const Bias &b_)
{
std::unique_lock<std::mutex> lock(mMutex);
@ -423,60 +517,100 @@ cv::Mat Preintegrated::GetDeltaPosition(const Bias &b_)
return dP + JPg*dbg + JPa*dba;
}
/**
* @brief db(δba, δbg)dR,
* @return dR
*/
cv::Mat Preintegrated::GetUpdatedDeltaRotation()
{
std::unique_lock<std::mutex> lock(mMutex);
return NormalizeRotation(dR*ExpSO3(JRg*db.rowRange(0,3)));
}
/**
* @brief db(δba, δbg)dV,
* @return dV
*/
cv::Mat Preintegrated::GetUpdatedDeltaVelocity()
{
std::unique_lock<std::mutex> lock(mMutex);
return dV + JVg*db.rowRange(0,3) + JVa*db.rowRange(3,6);
}
/**
* @brief db(δba, δbg)dP,
* @return dP
*/
cv::Mat Preintegrated::GetUpdatedDeltaPosition()
{
std::unique_lock<std::mutex> lock(mMutex);
return dP + JPg*db.rowRange(0,3) + JPa*db.rowRange(3,6);
}
/**
* @brief dR
* @return dR
*/
cv::Mat Preintegrated::GetOriginalDeltaRotation()
{
std::unique_lock<std::mutex> lock(mMutex);
return dR.clone();
}
/**
* @brief dV
* @return dV
*/
cv::Mat Preintegrated::GetOriginalDeltaVelocity()
{
std::unique_lock<std::mutex> lock(mMutex);
return dV.clone();
}
/**
* @brief dP
* @return dP
*/
cv::Mat Preintegrated::GetOriginalDeltaPosition()
{
std::unique_lock<std::mutex> lock(mMutex);
return dP.clone();
}
/**
* @brief b,
* @return b
*/
Bias Preintegrated::GetOriginalBias()
{
std::unique_lock<std::mutex> lock(mMutex);
return b;
}
/**
* @brief bu,
* @return bu
*/
Bias Preintegrated::GetUpdatedBias()
{
std::unique_lock<std::mutex> lock(mMutex);
return bu;
}
/**
* @brief db,
* @return db
*/
cv::Mat Preintegrated::GetDeltaBias()
{
std::unique_lock<std::mutex> lock(mMutex);
return db.clone();
}
/**
* @brief C,9~15
* @return
*/
Eigen::Matrix<double, 15, 15> Preintegrated::GetInformationMatrix()
{
std::unique_lock<std::mutex> lock(mMutex);
@ -495,6 +629,10 @@ Eigen::Matrix<double,15,15> Preintegrated::GetInformationMatrix()
return EI;
}
/**
* @brief
* @param b
*/
void Bias::CopyFrom(Bias &b)
{
bax = b.bax;
@ -529,21 +667,34 @@ std::ostream& operator<< (std::ostream &out, const Bias &b)
return out;
}
/**
* @brief
* @param Tbc_ 姿
* @param ng
* @param na
* @param ngw
* @param naw
*/
void Calib::Set(const cv::Mat &Tbc_, const float &ng, const float &na, const float &ngw, const float &naw)
{
Tbc = Tbc_.clone();
Tcb = cv::Mat::eye(4,4,CV_32F);
Tcb.rowRange(0,3).colRange(0,3) = Tbc.rowRange(0,3).colRange(0,3).t();
Tcb.rowRange(0,3).col(3) = -Tbc.rowRange(0,3).colRange(0,3).t()*Tbc.rowRange(0,3).col(3);
// 噪声协方差
Cov = cv::Mat::eye(6,6,CV_32F);
const float ng2 = ng*ng;
const float na2 = na*na;
Cov.at<float>(0,0) = ng2;
Cov.at<float>(1,1) = ng2;
Cov.at<float>(2,2) = ng2;
Cov.at<float>(3,3) = na2;
Cov.at<float>(4,4) = na2;
Cov.at<float>(5,5) = na2;
// 随机游走协方差
CovWalk = cv::Mat::eye(6,6,CV_32F);
const float ngw2 = ngw*ngw;
const float naw2 = naw*naw;
@ -555,6 +706,10 @@ void Calib::Set(const cv::Mat &Tbc_, const float &ng, const float &na, const flo
CovWalk.at<float>(5,5) = naw2;
}
/**
* @brief imu
* @param calib imu
*/
Calib::Calib(const Calib &calib)
{
Tbc = calib.Tbc.clone();
@ -565,4 +720,4 @@ Calib::Calib(const Calib &calib)
} //namespace IMU
} //namespace ORB_SLAM2
} //namespace ORB_SLAM3

View File

@ -649,22 +649,33 @@ bool compFirst(const pair<float, KeyFrame*> & a, const pair<float, KeyFrame*> &
}
/**
* @brief NN
*
* @param[in] pKF ()
* @param[out] vpLoopCand
* @param[out] vpMergeCand
* @param[in] nNumCandidates ,
*/
void KeyFrameDatabase::DetectNBestCandidates(KeyFrame *pKF, vector<KeyFrame*> &vpLoopCand, vector<KeyFrame*> &vpMergeCand, int nNumCandidates)
{
// Step 1统计与当前关键帧有相同单词的关键帧
list<KeyFrame*> lKFsSharingWords;
//set<KeyFrame*> spInsertedKFsSharing;
// 当前关键帧的共视关键帧(避免将当前关键帧的共视关键帧加入回环检测)
set<KeyFrame*> spConnectedKF;
// Search all keyframes that share a word with current frame
{
unique_lock<mutex> lock(mMutex);
// 拿到当前关键帧的共视关键帧
spConnectedKF = pKF->GetConnectedKeyFrames();
// 遍历当前关键帧bow向量的每个单词
for(DBoW2::BowVector::const_iterator vit=pKF->mBowVec.begin(), vend=pKF->mBowVec.end(); vit != vend; vit++)
{
{ // 拿到当前单词的逆向索引(所有有当前单词的关键帧)
list<KeyFrame*> &lKFs = mvInvertedFile[vit->first];
// 遍历每个有该单词的关键帧
for(list<KeyFrame*>::iterator lit=lKFs.begin(), lend= lKFs.end(); lit!=lend; lit++)
{
KeyFrame* pKFi=*lit;
@ -672,16 +683,21 @@ void KeyFrameDatabase::DetectNBestCandidates(KeyFrame *pKF, vector<KeyFrame*> &v
{
continue;
}*/
// 如果此关键帧没有被当前关键帧访问过(防止重复添加)
if(pKFi->mnPlaceRecognitionQuery!=pKF->mnId)
{
// 初始化公共单词数为0
pKFi->mnPlaceRecognitionWords=0;
// 如果该关键帧不是当前关键帧的共视关键帧
if(!spConnectedKF.count(pKFi))
{
// 标记改关键帧被当前关键帧访问到
pKFi->mnPlaceRecognitionQuery=pKF->mnId;
// 把当前关键帧添加到有公共单词的关键帧列表中
lKFsSharingWords.push_back(pKFi);
}
}
// 递增该关键帧与当前关键帧的公共单词数
pKFi->mnPlaceRecognitionWords++;
/*if(spInsertedKFsSharing.find(pKFi) == spInsertedKFsSharing.end())
{
@ -692,59 +708,77 @@ void KeyFrameDatabase::DetectNBestCandidates(KeyFrame *pKF, vector<KeyFrame*> &v
}
}
}
// 如果没有有公共单词的关键帧,直接返回
if(lKFsSharingWords.empty())
return;
// Only compare against those keyframes that share enough words
// Step 2 统计所有候选帧中与当前关键帧的公共单词数最多的单词数,并筛选
int maxCommonWords=0;
for(list<KeyFrame*>::iterator lit=lKFsSharingWords.begin(), lend= lKFsSharingWords.end(); lit!=lend; lit++)
{
if((*lit)->mnPlaceRecognitionWords>maxCommonWords)
maxCommonWords=(*lit)->mnPlaceRecognitionWords;
}
// 取0.8倍为阀值
int minCommonWords = maxCommonWords*0.8f;
// 这里的pair是 <相似度,候选帧的指针> : 记录所有大于minCommonWords的候选帧与当前关键帧的相似度
list<pair<float,KeyFrame*> > lScoreAndMatch;
// 只是个统计变量,貌似没有用到
int nscores=0;
// Compute similarity score.
// 对所有大于minCommonWords的候选帧计算相似度
// 遍历所有有公共单词的候选帧
for(list<KeyFrame*>::iterator lit=lKFsSharingWords.begin(), lend= lKFsSharingWords.end(); lit!=lend; lit++)
{
KeyFrame* pKFi = *lit;
// 如果当前帧的公共单词数大于minCommonWords
if(pKFi->mnPlaceRecognitionWords>minCommonWords)
{
nscores++;
// 计算相似度
float si = mpVoc->score(pKF->mBowVec,pKFi->mBowVec);
// 记录该候选帧与当前帧的相似度
pKFi->mPlaceRecognitionScore=si;
// 记录到容器里, 每个元素是<相似度,候选帧的指针>
lScoreAndMatch.push_back(make_pair(si,pKFi));
}
}
// 如果为空,直接返回,表示没有符合上述条件的关键帧
if(lScoreAndMatch.empty())
return;
// Step 3 : 用小组得分排序得到top3总分里最高分的关键帧,作为候选帧
// 统计以组为单位的累计相似度和组内相似度最高的关键帧, 每个pair为<小组总相似度,组内相似度最高的关键帧指针>
list<pair<float,KeyFrame*> > lAccScoreAndMatch;
float bestAccScore = 0;
// Lets now accumulate score by covisibility
// 变量所有被lScoreAndMatch记录的pair <相似度,候选关键帧>
for(list<pair<float,KeyFrame*> >::iterator it=lScoreAndMatch.begin(), itend=lScoreAndMatch.end(); it!=itend; it++)
{
// 候选关键帧
KeyFrame* pKFi = it->second;
// 与候选关键帧共视关系最好的10个关键帧
vector<KeyFrame*> vpNeighs = pKFi->GetBestCovisibilityKeyFrames(10);
// 初始化最大相似度为该候选关键帧自己的相似度
float bestScore = it->first;
// 初始化小组累计得分为改候选关键帧自己的相似度
float accScore = bestScore;
// 初始化组内相似度最高的帧为该候选关键帧本身
KeyFrame* pBestKF = pKFi;
// 遍历与当前关键帧共视关系最好的10帧
for(vector<KeyFrame*>::iterator vit=vpNeighs.begin(), vend=vpNeighs.end(); vit!=vend; vit++)
{
KeyFrame* pKF2 = *vit;
// 如果改关键帧没有被当前关键帧访问过(这里标记的是有没有公共单词)则跳过
if(pKF2->mnPlaceRecognitionQuery!=pKF->mnId)
continue;
// 累加小组总分
accScore+=pKF2->mPlaceRecognitionScore;
// 如果大与组内最高分,则重新记录
if(pKF2->mPlaceRecognitionScore>bestScore)
{
pBestKF=pKF2;
@ -752,39 +786,51 @@ void KeyFrameDatabase::DetectNBestCandidates(KeyFrame *pKF, vector<KeyFrame*> &v
}
}
// 统计以组为单位的累计相似度和组内相似度最高的关键帧, 每个pair为<小组总相似度,组内相似度最高的关键帧指针>
lAccScoreAndMatch.push_back(make_pair(accScore,pBestKF));
// 统计最高得分, 这个bestAccSocre没有用到
if(accScore>bestAccScore)
bestAccScore=accScore;
}
//cout << "Amount of candidates: " << lAccScoreAndMatch.size() << endl;
// 按相似度从大到小排序
lAccScoreAndMatch.sort(compFirst);
// 最后返回的变量, 记录回环的候选帧
vpLoopCand.reserve(nNumCandidates);
// 最后返回的变量, 记录融合候选帧
vpMergeCand.reserve(nNumCandidates);
// 避免重复添加
set<KeyFrame*> spAlreadyAddedKF;
//cout << "Candidates in score order " << endl;
//for(list<pair<float,KeyFrame*> >::iterator it=lAccScoreAndMatch.begin(), itend=lAccScoreAndMatch.end(); it!=itend; it++)
int i = 0;
list<pair<float,KeyFrame*> >::iterator it=lAccScoreAndMatch.begin();
// 遍历lAccScoreAndMatch中所有的pair, 每个pair为<小组总相似度,组内相似度最高的关键帧指针>
while(i < lAccScoreAndMatch.size() && (vpLoopCand.size() < nNumCandidates || vpMergeCand.size() < nNumCandidates))
{
//cout << "Accum score: " << it->first << endl;
// 拿到候选关键帧的指针
KeyFrame* pKFi = it->second;
if(pKFi->isBad())
continue;
// 如果没有被重复添加
if(!spAlreadyAddedKF.count(pKFi))
{
// 如果候选帧与当前关键帧在同一个地图里,且候选者数量还不足够
if(pKF->GetMap() == pKFi->GetMap() && vpLoopCand.size() < nNumCandidates)
{
// 添加到回环候选帧里
vpLoopCand.push_back(pKFi);
}
// 如果候选者与当前关键帧不再同一个地图里, 且候选者数量还不足够, 且候选者所在地图不是bad
else if(pKF->GetMap() != pKFi->GetMap() && vpMergeCand.size() < nNumCandidates && !pKFi->GetMap()->IsBad())
{
// 添加到融合候选帧里
vpMergeCand.push_back(pKFi);
}
// 防止重复添加
spAlreadyAddedKF.insert(pKFi);
}
i++;

View File

@ -120,12 +120,14 @@ void LocalMapping::Run()
// Step 4 当前关键帧与相邻关键帧通过三角化产生新的地图点,使得跟踪更稳
CreateNewMapPoints();
mbAbortBA = false;
mbAbortBA = false; // 注意orbslam2中放在了函数SearchInNeighbors用到了mbAbortBA后面应该放这里更合适
// 已经处理完队列中的最后的一个关键帧
if(!CheckNewKeyFrames())
{
// Find more matches in neighbor keyframes and fuse point duplications
// Step 5 检查并融合当前关键帧与相邻关键帧帧(两级相邻)中重复的地图点
// 先完成相邻关键帧与当前关键帧的地图点的融合(在相邻关键帧中查找当前关键帧的地图点),
// 再完成当前关键帧与相邻关键帧的地图点的融合(在当前关键帧中查找当前相邻关键帧的地图点)
SearchInNeighbors();
}
@ -144,19 +146,19 @@ void LocalMapping::Run()
// 已经处理完队列中的最后的一个关键帧并且闭环检测没有请求停止LocalMapping
if(!CheckNewKeyFrames() && !stopRequested())
{
// 地图中关键帧数目大于2个
// 当前地图中关键帧数目大于2个
if(mpAtlas->KeyFramesInMap()>2)
{
// Step 6.1 处于IMU模式并且当前关键帧所在的地图已经完成IMU初始化
if(mbInertial && mpCurrentKeyFrame->GetMap()->isImuInitialized())
{
// 计算上一帧到当前帧相机光心的距离 + 上上帧到上一帧相机光心的距离
// 计算上一关键帧到当前关键帧相机光心的距离 + 上上关键帧到上一关键帧相机光心的距离
float dist = cv::norm(mpCurrentKeyFrame->mPrevKF->GetCameraCenter() - mpCurrentKeyFrame->GetCameraCenter()) +
cv::norm(mpCurrentKeyFrame->mPrevKF->mPrevKF->GetCameraCenter() - mpCurrentKeyFrame->mPrevKF->GetCameraCenter());
// 如果距离大于5厘米记录当前KF和上一KF时间戳的差累加到mTinit
if(dist>0.05)
mTinit += mpCurrentKeyFrame->mTimeStamp - mpCurrentKeyFrame->mPrevKF->mTimeStamp;
// 当前关键帧所在的地图已经完成IMU BA2
// 当前关键帧所在的地图尚未完成IMU BA2IMU第三阶段初始化
if(!mpCurrentKeyFrame->GetMap()->GetIniertialBA2())
{
// 如果累计时间差小于10s 并且 距离小于2厘米认为运动幅度太小不足以初始化IMU将mbBadImu设置为true
@ -166,9 +168,10 @@ void LocalMapping::Run()
unique_lock<mutex> lock(mMutexReset);
mbResetRequestedActiveMap = true;
mpMapToReset = mpCurrentKeyFrame->GetMap();
mbBadImu = true;
mbBadImu = true; //在跟踪线程里会重置当前活跃地图
}
}
// 判断成功跟踪匹配的点数是否足够多
// 条件---------1.1、跟踪成功的内点数目大于75-----1.2、并且是单目--或--2.1、跟踪成功的内点数目大于100-----2.2、并且不是单目
bool bLarge = ((mpTracker->GetMatchesInliers()>75)&&mbMonocular)||((mpTracker->GetMatchesInliers()>100)&&!mbMonocular);
Optimizer::LocalInertialBA(mpCurrentKeyFrame, &mbAbortBA, mpCurrentKeyFrame->GetMap(),num_FixedKF_BA,num_OptKF_BA,num_MPs_BA,num_edges_BA, bLarge, !mpCurrentKeyFrame->GetMap()->GetIniertialBA2());
@ -206,9 +209,11 @@ void LocalMapping::Run()
#endif
// Initialize IMU here
// Step 7 当前关键帧所在地图的IMU初始化
// Step 7 当前关键帧所在地图未完成IMU初始化第一阶段
if(!mpCurrentKeyFrame->GetMap()->isImuInitialized() && mbInertial)
{
// 在函数InitializeIMU里设置IMU成功初始化标志 SetImuInitialized
// IMU第一阶段初始化
if (mbMonocular)
InitializeIMU(1e2, 1e10, true);
else
@ -217,6 +222,7 @@ void LocalMapping::Run()
// Check redundant local Keyframes
// 跟踪中关键帧插入条件比较松交给LocalMapping线程的关键帧会比较密这里再删除冗余
// Step 8 检测并剔除当前帧相邻的关键帧中冗余的关键帧
// 冗余的判定该关键帧的90%的地图点可以被其它关键帧观测到
KeyFrameCulling();
@ -227,16 +233,16 @@ void LocalMapping::Run()
timeKFCulling_ms = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndKFCulling - time_EndLBA).count();
vdKFCullingSync_ms.push_back(timeKFCulling_ms);
#endif
// Step 9 如果累计时间差小于100s 并且 是IMU模式进行VIBA
// Step 9 如果距离初始化成功累计时间差小于100s 并且 是IMU模式进行VIBA
if ((mTinit<100.0f) && mbInertial)
{
// Step 9.1 根据条件判断是否进行VIBA1
// Step 9.1 根据条件判断是否进行VIBA1IMU第二阶段初始化
// 条件1、当前关键帧所在的地图还未完成IMU初始化---并且--------2、正常跟踪状态----------
if(mpCurrentKeyFrame->GetMap()->isImuInitialized() && mpTracker->mState==Tracking::OK) // Enter here everytime local-mapping is called
{
// 当前关键帧所在的地图还未完成VIBA 1
if(!mpCurrentKeyFrame->GetMap()->GetIniertialBA1()){
// 如果累计时间差大于5s开始VIBA 1
// 如果累计时间差大于5s开始VIBA1IMU第二阶段初始化
if (mTinit>5.0f)
{
cout << "start VIBA 1" << endl;
@ -250,10 +256,10 @@ void LocalMapping::Run()
}
}
//else if (mbNotBA2){
// Step 9.2 根据条件判断是否进行VIBA2
// Step 9.2 根据条件判断是否进行VIBA2IMU第三阶段初始化
// 当前关键帧所在的地图还未完成VIBA 2
else if(!mpCurrentKeyFrame->GetMap()->GetIniertialBA2()){
// 如果累计时间差大于15s开始VIBA 2
// 如果累计时间差大于15s开始VIBA2IMU第三阶段初始化
if (mTinit>15.0f){ // 15.0f
cout << "start VIBA 2" << endl;
mpCurrentKeyFrame->GetMap()->SetIniertialBA2();
@ -267,7 +273,7 @@ void LocalMapping::Run()
}
// scale refinement
// Step 9.3 尺度优化
// Step 9.3 在关键帧小于100时会在满足一定时间间隔后多次进行尺度、重力方向优化
if (((mpAtlas->KeyFramesInMap())<=100) &&
((mTinit>25.0f && mTinit<25.5f)||
(mTinit>35.0f && mTinit<35.5f)||
@ -409,6 +415,9 @@ void LocalMapping::ProcessNewKeyFrame()
mpAtlas->AddKeyFrame(mpCurrentKeyFrame);
}
/**
* @brief 使MP
*/
void LocalMapping::EmptyQueue()
{
while(CheckNewKeyFrames())
@ -487,10 +496,12 @@ void LocalMapping::CreateNewMapPoints()
// Step 1在当前关键帧的共视关键帧中找到共视程度最高的nn帧相邻关键帧
vector<KeyFrame*> vpNeighKFs = mpCurrentKeyFrame->GetBestCovisibilityKeyFrames(nn);
// imu模式下在附近添加更多的帧进来
if (mbInertial)
{
KeyFrame* pKF = mpCurrentKeyFrame;
int count=0;
// 在总数不够且上一关键帧存在,且添加的帧没有超过总数时
while((vpNeighKFs.size()<=nn)&&(pKF->mPrevKF)&&(count++<nn))
{
vector<KeyFrame*>::iterator it = std::find(vpNeighKFs.begin(), vpNeighKFs.end(), pKF->mPrevKF);
@ -561,7 +572,7 @@ void LocalMapping::CreateNewMapPoints()
const float medianDepthKF2 = pKF2->ComputeSceneMedianDepth(2);
// baseline与景深的比例
const float ratioBaselineDepth = baseline/medianDepthKF2;
// 如果比例特别小基线太短恢复3D点不准那么跳过当前邻接的关键帧不生成3D点
// 如果特别远(比例特别小)基线太短恢复3D点不准那么跳过当前邻接的关键帧不生成3D点
if(ratioBaselineDepth<0.01)
continue;
}
@ -573,10 +584,11 @@ void LocalMapping::CreateNewMapPoints()
// Search matches that fullfil epipolar constraint
// Step 5通过BoW对两关键帧的未匹配的特征点快速匹配用极线约束抑制离群点生成新的匹配点对
vector<pair<size_t,size_t> > vMatchedIndices;
// imu相关非imu时为false
bool bCoarse = mbInertial &&
((!mpCurrentKeyFrame->GetMap()->GetIniertialBA2() && mpCurrentKeyFrame->GetMap()->GetIniertialBA1())||
mpTracker->mState==Tracking::RECENTLY_LOST);
// 通过极线约束的方式找到匹配点且该点还没有成为MP注意非单目已经生成的MP这里直接跳过不做匹配所以最后并不会覆盖掉特征点对应的MP
matcher.SearchForTriangulation_(mpCurrentKeyFrame,pKF2,F12,vMatchedIndices,false,bCoarse);
auto Rcw2 = pKF2->GetRotation_();
@ -611,8 +623,10 @@ void LocalMapping::CreateNewMapPoints()
const cv::KeyPoint &kp1 = (mpCurrentKeyFrame -> NLeft == -1) ? mpCurrentKeyFrame->mvKeysUn[idx1]
: (idx1 < mpCurrentKeyFrame -> NLeft) ? mpCurrentKeyFrame -> mvKeys[idx1]
: mpCurrentKeyFrame -> mvKeysRight[idx1 - mpCurrentKeyFrame -> NLeft];
// mvuRight中存放着极限校准后双目特征点在右目对应的像素横坐标如果不是基线校准的双目或者没有找到匹配点其值将为-1或者rgbd
const float kp1_ur=mpCurrentKeyFrame->mvuRight[idx1];
bool bStereo1 = (!mpCurrentKeyFrame->mpCamera2 && kp1_ur>=0);
// 查看点idx1是否为右目的点
const bool bRight1 = (mpCurrentKeyFrame -> NLeft == -1 || idx1 < mpCurrentKeyFrame -> NLeft) ? false
: true;
// 当前匹配在邻接关键帧中的特征点
@ -620,11 +634,13 @@ void LocalMapping::CreateNewMapPoints()
: (idx2 < pKF2 -> NLeft) ? pKF2 -> mvKeys[idx2]
: pKF2 -> mvKeysRight[idx2 - pKF2 -> NLeft];
// mvuRight中存放着双目的深度值如果不是双目其值将为-1
// mvuRight中存放着极限校准后双目特征点在右目对应的像素横坐标如果不是基线校准的双目或者没有找到匹配点其值将为-1或者rgbd
const float kp2_ur = pKF2->mvuRight[idx2];
bool bStereo2 = (!pKF2->mpCamera2 && kp2_ur>=0);
// 查看点idx2是否为右目的点
const bool bRight2 = (pKF2 -> NLeft == -1 || idx2 < pKF2 -> NLeft) ? false
: true;
// 当目前为左右目时,确定两个点所在相机之间的位姿关系
if(mpCurrentKeyFrame->mpCamera2 && pKF2->mpCamera2){
if(bRight1 && bRight2){
Rcw1 = mpCurrentKeyFrame->GetRightRotation_();
@ -724,10 +740,20 @@ void LocalMapping::CreateNewMapPoints()
// Step 6.4三角化恢复3D点
cv::Matx31f x3D;
bool bEstimated = false;
// cosParallaxRays>0 && (bStereo1 || bStereo2 || cosParallaxRays<0.9998)表明视线角正常
// cosParallaxRays<cosParallaxStereo表明前后帧视线角比双目视线角大所以用前后帧三角化而来反之使用双目的如果没有双目则跳过
// 视差角度小时用三角法恢复3D点视差角大时离相机近用双目恢复3D点双目以及深度有效
if(cosParallaxRays<cosParallaxStereo && cosParallaxRays>0 && (bStereo1 || bStereo2 ||
(cosParallaxRays<0.9998 && mbInertial) || (cosParallaxRays<0.9998 && !mbInertial)))
{
// Linear Triangulation Method
// 见Initializer.cpp的Triangulate函数,A矩阵构建的方式类似不同的是乘的反对称矩阵那个是像素坐标构成的而这个是归一化坐标构成的
// Pc = Tcw*Pw 此处Tcw行数默认为3因为不会计算第4行所以没有去掉
// 左右两面乘Pc的反对称矩阵 [Pc]x * Tcw *Pw = 0 构成了A矩阵中间涉及一个尺度a因为都是归一化平面但右面是0所以直接可以约掉不影响最后的尺度
// 0 -1 y Tcw.row(0) -Tcw.row(1) + y*Tcw.row(2)
// 1 0 -x * Tcw.row(1) = Tcw.row(0) - x*Tcw.row(2)
// -y x 0 Tcw.row(2) x*Tcw.row(1) - y*Tcw.row(0)
// 发现上述矩阵线性相关所以取前两维两个点构成了4行的矩阵就是如下的操作求出的是4维的结果[X,Y,Z,A]所以需要除以最后一维使之为1就成了[X,Y,Z,1]这种齐次形式
cv::Matx14f A_r0 = xn1(0) * Tcw1.row(2) - Tcw1.row(0);
cv::Matx14f A_r1 = xn1(1) * Tcw1.row(2) - Tcw1.row(1);
cv::Matx14f A_r2 = xn2(0) * Tcw2.row(2) - Tcw2.row(0);
@ -1044,7 +1070,12 @@ void LocalMapping::SearchInNeighbors()
mpCurrentKeyFrame->UpdateConnections();
}
// 根据两关键帧的姿态计算两个关键帧之间的基本矩阵
/**
* @brief 姿
* @param pKF1 1
* @param pKF2 2
* @return
*/
cv::Mat LocalMapping::ComputeF12(KeyFrame *&pKF1, KeyFrame *&pKF2)
{
// 先构造两帧之间的R12,t12
@ -1139,7 +1170,9 @@ void LocalMapping::Release()
cout << "Local Mapping RELEASE" << endl;
}
// 查看当前是否允许接受关键帧
/**
* @brief 线tracking线
*/
bool LocalMapping::AcceptKeyFrames()
{
unique_lock<mutex> lock(mMutexAccept);
@ -1153,7 +1186,9 @@ void LocalMapping::SetAcceptKeyFrames(bool flag)
mbAcceptKeyFrames=flag;
}
// 设置 mbnotStop标志的状态
/**
* @brief 使
*/
bool LocalMapping::SetNotStop(bool flag)
{
unique_lock<mutex> lock(mMutexStop);
@ -1166,7 +1201,9 @@ bool LocalMapping::SetNotStop(bool flag)
return true;
}
// 终止BA
/**
* @brief BA
*/
void LocalMapping::InterruptBA()
{
mbAbortBA = true;
@ -1192,15 +1229,17 @@ void LocalMapping::KeyFrameCulling()
// scaleLevelipKFi的金字塔尺度
// scaleLevelpKF的金字塔尺度
const int Nd = 21; // MODIFICATION_STEREO_IMU 20 This should be the same than that one from LIBA
mpCurrentKeyFrame->UpdateBestCovisibles();
mpCurrentKeyFrame->UpdateBestCovisibles(); // 更新共视关系
// 1. 根据Covisibility Graph提取当前帧的共视关键帧
vector<KeyFrame*> vpLocalKeyFrames = mpCurrentKeyFrame->GetVectorCovisibleKeyFrames();
float redundant_th;
// 非IMU时
if(!mbInertial)
redundant_th = 0.9;
else if (mbMonocular)
else if (mbMonocular) // imu 且单目时
redundant_th = 0.9;
else
else // 其他imu时
redundant_th = 0.5;
const bool bInitImu = mpAtlas->isImuInitialized();
@ -1212,6 +1251,7 @@ void LocalMapping::KeyFrameCulling()
{
int count = 0;
KeyFrame* aux_KF = mpCurrentKeyFrame;
// 找到第前21个关键帧的关键帧id
while(count<Nd && aux_KF->mPrevKF)
{
aux_KF = aux_KF->mPrevKF;
@ -1306,18 +1346,22 @@ void LocalMapping::KeyFrameCulling()
// Step 4该关键帧90%以上的有效地图点被判断为冗余的,则删除该关键帧
if(nRedundantObservations>redundant_th*nMPs)
{
// imu模式下需要更改前后关键帧的连续性且预积分要叠加起来
if (mbInertial)
{
// 关键帧少于Nd个跳过不删
if (mpAtlas->KeyFramesInMap()<=Nd)
continue;
// 关键帧与当前关键帧id差一个跳过不删
if(pKF->mnId>(mpCurrentKeyFrame->mnId-2))
continue;
// 关键帧具有前后关键帧
if(pKF->mPrevKF && pKF->mNextKF)
{
const float t = pKF->mNextKF->mTimeStamp-pKF->mPrevKF->mTimeStamp;
// 下面两个括号里的内容一模一样
// imu初始化了且距当前帧的ID超过21且前后两个关键帧时间间隔小于3s
// 或者时间间隔小于0.5s
if((bInitImu && (pKF->mnId<last_ID) && t<3.) || (t<0.5))
{
pKF->mNextKF->mpImuPreintegrated->MergePrevious(pKF->mpImuPreintegrated);
@ -1327,6 +1371,7 @@ void LocalMapping::KeyFrameCulling()
pKF->mPrevKF = NULL;
pKF->SetBadFlag();
}
// 没经过imu初始化的第三阶段且关键帧与其前一个关键帧的距离小于0.02m且前后两个关键帧时间间隔小于3s
else if(!mpCurrentKeyFrame->GetMap()->GetIniertialBA2() && (cv::norm(pKF->GetImuPosition()-pKF->mPrevKF->GetImuPosition())<0.02) && (t<3))
{
pKF->mNextKF->mpImuPreintegrated->MergePrevious(pKF->mpImuPreintegrated);
@ -1338,11 +1383,13 @@ void LocalMapping::KeyFrameCulling()
}
}
}
// 非imu就没那么多事儿了直接干掉
else
{
pKF->SetBadFlag();
}
}
// 遍历共视关键帧个数超过一定,就不弄了
if((count > 20 && mbAbortBA) || count>100) // MODIFICATION originally 20 for mbabortBA check just 10 keyframes
{
break;
@ -1350,7 +1397,11 @@ void LocalMapping::KeyFrameCulling()
}
}
// 计算三维向量v的反对称矩阵
/**
* @brief
* @param v
* @return v
*/
cv::Mat LocalMapping::SkewSymmetricMatrix(const cv::Mat &v)
{
return (cv::Mat_<float>(3,3) << 0, -v.at<float>(2), v.at<float>(1),
@ -1388,6 +1439,9 @@ void LocalMapping::RequestReset()
cout << "LM: Map reset, Done!!!" << endl;
}
/**
* @brief
*/
void LocalMapping::RequestResetActiveMap(Map* pMap)
{
{
@ -1465,14 +1519,18 @@ void LocalMapping::RequestFinish()
mbFinishRequested = true;
}
// 检查是否已经有外部线程请求终止当前线程
/**
* @brief while
*/
bool LocalMapping::CheckFinish()
{
unique_lock<mutex> lock(mMutexFinish);
return mbFinishRequested;
}
// 设置当前线程已经真正地结束了
/**
* @brief 线
*/
void LocalMapping::SetFinish()
{
unique_lock<mutex> lock(mMutexFinish);
@ -1481,20 +1539,30 @@ void LocalMapping::SetFinish()
mbStopped = true;
}
// 当前线程的run函数是否已经终止
/**
* @brief 线run
*/
bool LocalMapping::isFinished()
{
unique_lock<mutex> lock(mMutexFinish);
return mbFinished;
}
/**
* @brief imu
* @param priorG bInittrue使
* @param priorA
* @param bFIBA BA
*/
void LocalMapping::InitializeIMU(float priorG, float priorA, bool bFIBA)
{
// 1. 将所有关键帧放入列表及向量里,且查看是否满足初始化条件
if (mbResetRequested)
return;
float minTime;
int nMinKF;
// 从时间及帧数上限制初始化,不满足下面的不进行初始化
if (mbMonocular)
{
minTime = 2.0;
@ -1506,11 +1574,12 @@ void LocalMapping::InitializeIMU(float priorG, float priorA, bool bFIBA)
nMinKF = 10;
}
// 当前地图大于10帧才进行初始化
if(mpAtlas->KeyFramesInMap()<nMinKF)
return;
// Retrieve all keyframe in temporal order
// 按照顺序存放目前地图里的关键帧,顺序按照前后顺序来,包括当前关键帧
list<KeyFrame*> lpKF;
KeyFrame* pKF = mpCurrentKeyFrame;
while(pKF->mPrevKF)
@ -1519,8 +1588,10 @@ void LocalMapping::InitializeIMU(float priorG, float priorA, bool bFIBA)
pKF = pKF->mPrevKF;
}
lpKF.push_front(pKF);
// 以相同内容再构建一个vector
vector<KeyFrame*> vpKF(lpKF.begin(),lpKF.end());
// TODO 跟上面重复?
if(vpKF.size()<nMinKF)
return;
@ -1530,17 +1601,19 @@ void LocalMapping::InitializeIMU(float priorG, float priorA, bool bFIBA)
bInitializing = true;
// 先处理新关键帧,防止堆积且保证数据量充足
while(CheckNewKeyFrames())
{
ProcessNewKeyFrame();
vpKF.push_back(mpCurrentKeyFrame);
lpKF.push_back(mpCurrentKeyFrame);
}
// 2. 正式IMU初始化
const int N = vpKF.size();
IMU::Bias b(0,0,0,0,0,0);
// Compute and KF velocities mRwg estimation
// 在IMU没有初始化情况下
if (!mpCurrentKeyFrame->GetMap()->isImuInitialized())
{
cv::Mat cvRwg;
@ -1551,20 +1624,27 @@ void LocalMapping::InitializeIMU(float priorG, float priorA, bool bFIBA)
continue;
if (!(*itKF)->mPrevKF)
continue;
// Rwbimu坐标转到初始化前世界坐标系下的坐标*更新偏置后的速度,可以理解为在世界坐标系下的速度矢量
dirG -= (*itKF)->mPrevKF->GetImuRotation()*(*itKF)->mpImuPreintegrated->GetUpdatedDeltaVelocity();
// 求取实际的速度,位移/时间
cv::Mat _vel = ((*itKF)->GetImuPosition() - (*itKF)->mPrevKF->GetImuPosition())/(*itKF)->mpImuPreintegrated->dT;
(*itKF)->SetVelocity(_vel);
(*itKF)->mPrevKF->SetVelocity(_vel);
}
// 归一化
dirG = dirG/cv::norm(dirG);
// 原本的重力方向
cv::Mat gI = (cv::Mat_<float>(3,1) << 0.0f, 0.0f, -1.0f);
// 求速度方向与重力方向的角轴
cv::Mat v = gI.cross(dirG);
// 求角轴长度
const float nv = cv::norm(v);
// 求转角大小
const float cosg = gI.dot(dirG);
const float ang = acos(cosg);
// 先计算旋转向量,在除去角轴大小
cv::Mat vzg = v * ang / nv;
// 获得重力方向到当前速度方向的旋转向量
cvRwg = IMU::ExpSO3(vzg);
mRwg = Converter::toMatrix3d(cvRwg);
mTinit = mpCurrentKeyFrame->mTimeStamp-mFirstTs;
@ -1578,9 +1658,11 @@ void LocalMapping::InitializeIMU(float priorG, float priorA, bool bFIBA)
mScale=1.0;
// 暂时没发现在别的地方出现过
mInitTime = mpTracker->mLastFrame.mTimeStamp-vpKF.front()->mTimeStamp;
std::chrono::steady_clock::time_point t0 = std::chrono::steady_clock::now();
// 计算残差及偏置差优化尺度重力方向及Ri Rj Vi Vj Pi Pj
Optimizer::InertialOptimization(mpAtlas->GetCurrentMap(), mRwg, mScale, mbg, mba, mbMonocular, infoInertial, false, false, priorG, priorA);
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
@ -1588,7 +1670,7 @@ void LocalMapping::InitializeIMU(float priorG, float priorA, bool bFIBA)
cout << "bg after inertial-only optimization: " << mbg << endl;
cout << "ba after inertial-only optimization: " << mba << endl;*/
// 尺度太小的话初始化认为失败
if (mScale<1e-1)
{
cout << "scale too small" << endl;
@ -1596,20 +1678,26 @@ void LocalMapping::InitializeIMU(float priorG, float priorA, bool bFIBA)
return;
}
// 到此时为止前面做的东西没有改变map
// Before this line we are not changing the map
unique_lock<mutex> lock(mpAtlas->GetCurrentMap()->mMutexMapUpdate);
std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now();
// 尺度变化超过设定值或者非单目时无论带不带imu但这个函数只在带imu时才执行所以这个可以理解为双目imu
if ((fabs(mScale-1.f)>0.00001)||!mbMonocular)
{
// 恢复重力方向与尺度信息
mpAtlas->GetCurrentMap()->ApplyScaledRotation(Converter::toCvMat(mRwg).t(),mScale,true);
// 更新普通帧的位姿,主要是当前帧与上一帧
mpTracker->UpdateFrameIMU(mScale,vpKF[0]->GetImuBias(),mpCurrentKeyFrame);
}
std::chrono::steady_clock::time_point t3 = std::chrono::steady_clock::now();
// Check if initialization OK
// 即使初始化成功后面还会执行这个函数重新初始化
// 在之前没有初始化成功情况下此时刚刚初始化成功对每一帧都标记后面的kf全部都在tracking里面标记为true
// 也就是初始化之前的那些关键帧即使有imu信息也不算
if (!mpAtlas->isImuInitialized())
for(int i=0;i<N;i++)
{
@ -1622,8 +1710,10 @@ void LocalMapping::InitializeIMU(float priorG, float priorA, bool bFIBA)
cout << "bg: " << mpCurrentKeyFrame->GetGyroBias() << endl;*/
std::chrono::steady_clock::time_point t4 = std::chrono::steady_clock::now();
// 代码里都为true
if (bFIBA)
{
// 承接上一步纯imu优化按照之前的结果更新了尺度信息及适应重力方向所以要结合地图进行一次视觉加imu的全局优化这次带了MP等信息
if (priorA!=0.f)
Optimizer::FullInertialBA(mpAtlas->GetCurrentMap(), 100, false, 0, NULL, true, priorG, priorA);
else
@ -1637,6 +1727,7 @@ void LocalMapping::InitializeIMU(float priorG, float priorA, bool bFIBA)
if (!mpAtlas->isImuInitialized())
{
cout << "IMU in Map " << mpAtlas->GetCurrentMap()->GetId() << " is initialized" << endl;
// ! 重要!标记初始化成功
mpAtlas->SetImuInitialized();
mpTracker->t0IMU = mpTracker->mCurrentFrame.mTimeStamp;
mpCurrentKeyFrame->bImu = true;
@ -1671,6 +1762,9 @@ void LocalMapping::InitializeIMU(float priorG, float priorA, bool bFIBA)
return;
}
/**
* @brief BA100
*/
void LocalMapping::ScaleRefinement()
{
// Minimum number of keyframes to compute a solution
@ -1680,6 +1774,7 @@ void LocalMapping::ScaleRefinement()
return;
// Retrieve all keyframes in temporal order
// 1. 检索所有的关键帧(当前地图)
list<KeyFrame*> lpKF;
KeyFrame* pKF = mpCurrentKeyFrame;
while(pKF->mPrevKF)
@ -1689,7 +1784,7 @@ void LocalMapping::ScaleRefinement()
}
lpKF.push_front(pKF);
vector<KeyFrame*> vpKF(lpKF.begin(),lpKF.end());
// 加入新添加的帧
while(CheckNewKeyFrames())
{
ProcessNewKeyFrame();
@ -1698,11 +1793,12 @@ void LocalMapping::ScaleRefinement()
}
const int N = vpKF.size();
// 2. 更新旋转与尺度
mRwg = Eigen::Matrix3d::Identity();
mScale=1.0;
std::chrono::steady_clock::time_point t0 = std::chrono::steady_clock::now();
// 优化重力方向与尺度
Optimizer::InertialOptimization(mpAtlas->GetCurrentMap(), mRwg, mScale);
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
@ -1714,8 +1810,10 @@ void LocalMapping::ScaleRefinement()
}
// Before this line we are not changing the map
// 3. 开始更新地图
unique_lock<mutex> lock(mpAtlas->GetCurrentMap()->mMutexMapUpdate);
std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now();
// 3.1 如果尺度更新较多或是在双目imu情况下更新地图
if ((fabs(mScale-1.f)>0.00001)||!mbMonocular)
{
mpAtlas->GetCurrentMap()->ApplyScaledRotation(Converter::toCvMat(mRwg).t(),mScale,true);
@ -1723,6 +1821,7 @@ void LocalMapping::ScaleRefinement()
}
std::chrono::steady_clock::time_point t3 = std::chrono::steady_clock::now();
// 3.2 优化的这段时间新进来的kf全部清空不要
for(list<KeyFrame*>::iterator lit = mlNewKeyFrames.begin(), lend=mlNewKeyFrames.end(); lit!=lend; lit++)
{
(*lit)->SetBadFlag();
@ -1738,14 +1837,17 @@ void LocalMapping::ScaleRefinement()
return;
}
/**
* @brief IMUtracking使true
*/
bool LocalMapping::IsInitializing()
{
return bInitializing;
}
/**
* @brief System::GetTimeFromIMUInit()
*/
double LocalMapping::GetCurrKFTime()
{
@ -1757,9 +1859,12 @@ double LocalMapping::GetCurrKFTime()
return 0.0;
}
/**
* @brief
*/
KeyFrame *LocalMapping::GetCurrKF()
{
return mpCurrentKeyFrame;
}
} //namespace ORB_SLAM
} // namespace ORB_SLAM3

File diff suppressed because it is too large Load Diff

View File

@ -49,6 +49,8 @@
#include <Eigen/Sparse>
// MLPnP算法极大似然PnP算法解决PnP问题用在重定位中不用运动的先验知识来估计相机位姿
// 基于论文《MLPNP - A REAL-TIME MAXIMUM LIKELIHOOD SOLUTION TO THE PERSPECTIVE-N-POINT PROBLEM》
namespace ORB_SLAM3 {
/**
@ -62,7 +64,9 @@ namespace ORB_SLAM3 {
* @param[in] N 2D
* @param[in] mpCamera 3D
*/
MLPnPsolver::MLPnPsolver(const Frame &F, const vector<MapPoint *> &vpMapPointMatches):
MLPnPsolver::MLPnPsolver(const Frame &F, // 输入帧的数据
const vector<MapPoint *> &vpMapPointMatches // 待匹配的特征点是当前帧和候选关键帧用BoW进行快速匹配的结果
) :
mnInliersi(0), // 内点的个数
mnIterations(0), // Ransac迭代次数
mnBestInliers(0), // 最佳内点数
@ -70,7 +74,7 @@ namespace ORB_SLAM3 {
mpCamera(F.mpCamera) // 相机模型利用该变量对3D点进行投影
{
mvpMapPointMatches = vpMapPointMatches; // 待匹配的特征点是当前帧和候选关键帧用BoW进行快速匹配的结果
mvBearingVecs.reserve(F.mvpMapPoints.size()); // ? 初始化3D点的单位向量
mvBearingVecs.reserve(F.mvpMapPoints.size()); // 初始化3D点的单位向量
mvP2D.reserve(F.mvpMapPoints.size()); // 初始化3D点的投影点
mvSigma2.reserve(F.mvpMapPoints.size()); // 初始化卡方检验中的sigma值
mvP3Dw.reserve(F.mvpMapPoints.size()); // 初始化3D点坐标
@ -120,6 +124,7 @@ namespace ORB_SLAM3 {
}
}
// 设置RANSAC参数
SetRansacParameters();
}
@ -141,8 +146,8 @@ namespace ORB_SLAM3 {
// Step 1: 判断如果2D点个数不足以启动RANSAC迭代过程的最小下限则退出
if(N<mRansacMinInliers)
{
bNoMore = true;
return cv::Mat();
bNoMore = true; // 已经达到最大迭代次数的标志
return cv::Mat(); // 函数退出
}
// mvAllIndices为所有参与PnP的2D点的索引
@ -278,6 +283,8 @@ namespace ORB_SLAM3 {
}
}
// step 4 相机位姿估计失败,返回零值
// 程序运行到这里,那说明没有满足条件的相机位姿估计结果,位姿估计失败了
return cv::Mat();
}
@ -328,32 +335,41 @@ namespace ORB_SLAM3 {
mRansacMaxIts = max(1,min(nIterations,mRansacMaxIts));
// 计算不同图层上的特征点在进行内点检验的时候,所使用的不同判断误差阈值
mvMaxError.resize(mvSigma2.size());
mvMaxError.resize(mvSigma2.size()); // 层数
for (size_t i = 0; i < mvSigma2.size(); i++)
mvMaxError[i] = mvSigma2[i]*th2;
mvMaxError[i] = mvSigma2[i] * th2; // 不同的尺度,设置不同的最大偏差
}
/**
* @brief (R t)3D-2Dinliers
*/
void MLPnPsolver::CheckInliers() {
mnInliersi = 0;
// 遍历当前帧中所有的匹配点
for(int i=0; i<N; i++)
{
// 取出对应的3D点和2D点
point_t p = mvP3Dw[i];
cv::Point3f P3Dw(p(0),p(1),p(2));
cv::Point2f P2D = mvP2D[i];
// 将3D点由世界坐标系旋转到相机坐标系
float xc = mRi[0][0]*P3Dw.x+mRi[0][1]*P3Dw.y+mRi[0][2]*P3Dw.z+mti[0];
float yc = mRi[1][0]*P3Dw.x+mRi[1][1]*P3Dw.y+mRi[1][2]*P3Dw.z+mti[1];
float zc = mRi[2][0]*P3Dw.x+mRi[2][1]*P3Dw.y+mRi[2][2]*P3Dw.z+mti[2];
// 将相机坐标系下的3D进行投影
cv::Point3f P3Dc(xc,yc,zc);
cv::Point2f uv = mpCamera->project(P3Dc);
// 计算残差
float distX = P2D.x-uv.x;
float distY = P2D.y-uv.y;
float error2 = distX*distX+distY*distY;
// 判定是不是内点
if(error2<mvMaxError[i])
{
mvbInliersi[i]=true;
@ -366,7 +382,11 @@ namespace ORB_SLAM3 {
}
}
/**
* @brief 使姿
*/
bool MLPnPsolver::Refine() {
// 记录内点的索引值
vector<int> vIndices;
vIndices.reserve(mvbBestInliers.size());
@ -379,10 +399,17 @@ namespace ORB_SLAM3 {
}
//Bearing vectors and 3D points used for this ransac iteration
// 因为是重定义所以要另外定义局部变量和iterate里面一样
// 初始化单位向量和3D点给当前重定义函数使用
bearingVectors_t bearingVecs;
points_t p3DS;
vector<int> indexes;
// 注意这里是用所有内点vIndices.size()来进行相机位姿估计
// 而iterate里面是用最小集mRansacMinSet(6个)来进行相机位姿估计
// TODO:有什么区别呢肯定有啦mRansacMinSet只是粗略解
// 这里之所以要Refine就是要用所有满足模型的内点来更加精确地近似表达模型
// 这样求出来的解才会更加准确
for(size_t i=0; i<vIndices.size(); i++)
{
int idx = vIndices[i];
@ -391,6 +418,7 @@ namespace ORB_SLAM3 {
p3DS.push_back(mvP3Dw[idx]);
indexes.push_back(i);
}
// 后面操作和iterate类似就不赘述了
//By the moment, we are using MLPnP without covariance info
cov3_mats_t covs(1);
@ -507,6 +535,8 @@ namespace ORB_SLAM3 {
// 令S = M * M^T其中M = [p1,p2,...,pi],即 points3 空间点矩阵
// 如果矩阵S的秩为3且最小特征值不接近于0则不属于平面条件否则需要解决一下
Eigen::Matrix3d planarTest = points3 * points3.transpose();
// 为了计算矩阵S的秩需要对矩阵S进行满秩的QR分解通过其结果来判断矩阵S的秩从而判断是否是平面条件
Eigen::FullPivHouseholderQR<Eigen::Matrix3d> rankTest(planarTest);
//int r, c;
//double minEigenVal = abs(eigen_solver.eigenvalues().real().minCoeff(&r, &c));
@ -718,7 +748,7 @@ namespace ORB_SLAM3 {
// |r13 r23 r33|
tmp.transposeInPlace();
// 平移部分 t 只表示了正确的方向,没有尺度,需要求解 scale, 先求系数
double scale = 1.0 / std::sqrt(std::abs(tmp.col(1).norm() * tmp.col(2).norm()));
// find best rotation matrix in frobenius sense
// 利用Frobenious范数计算最佳的旋转矩阵利用公式(19) R = U_R*V_R^T
@ -734,7 +764,7 @@ namespace ORB_SLAM3 {
Rout1 *= -1.0;
// rotate this matrix back using the eigen frame
// ? 因为是在平面情况下计算的,估计出来的旋转矩阵是要做一个转换的,根据公式(21)R = Rs*R
// 因为是在平面情况下计算的,估计出来的旋转矩阵是要做一个转换的,根据公式(21)R = Rs*R
// 其中Rs表示特征向量的旋转矩阵
// 注意eigenRot之前已经转置过了transposeInPlace()所以这里Rout1在之前也转置了即tmp.transposeInPlace()
Rout1 = eigenRot.transpose() * Rout1;
@ -941,16 +971,25 @@ namespace ORB_SLAM3 {
result.block<3, 1>(0, 3) = tout;//-result.block<3, 3>(0, 0) * tout;
}
/**
* @brief ->
* @param[in] omega
* @param[out] R
*/
Eigen::Matrix3d MLPnPsolver::rodrigues2rot(const Eigen::Vector3d &omega) {
// 初始化旋转矩阵
rotation_t R = Eigen::Matrix3d::Identity();
// 求旋转向量的反对称矩阵
Eigen::Matrix3d skewW;
skewW << 0.0, -omega(2), omega(1),
omega(2), 0.0, -omega(0),
-omega(1), omega(0), 0.0;
// 求旋转向量的角度
double omega_norm = omega.norm();
// 通过罗德里格斯公式把旋转向量转换成旋转矩阵
if (omega_norm > std::numeric_limits<double>::epsilon())
R = R + sin(omega_norm) / omega_norm * skewW
+ (1 - cos(omega_norm)) / (omega_norm * omega_norm) * (skewW * skewW);
@ -958,17 +997,42 @@ namespace ORB_SLAM3 {
return R;
}
/**
* @brief ->
* @param[in] R
* @param[out] omega
: (SO3) -> (so3)
ln(R)
使,
SO(3)so(3)
BCH(BCHJ)
线
线MLPnP
*/
Eigen::Vector3d MLPnPsolver::rot2rodrigues(const Eigen::Matrix3d &R) {
rodrigues_t omega;
omega << 0.0, 0.0, 0.0;
// R.trace() 矩阵的迹,即该矩阵的特征值总和
double trace = R.trace() - 1.0;
// 李群(SO3) -> 李代数(so3)的对数映射
// 对数映射ln(R)∨将一个旋转矩阵转换为一个李代数,但由于对数的泰勒展开不是很优雅所以用本式
// wnorm是求解出来的角度
double wnorm = acos(trace / 2.0);
// 如果wnorm大于运行编译程序的计算机所能识别的最小非零浮点数则可以生成向量否则为0
if (wnorm > std::numeric_limits<double>::epsilon())
{
// |r11 r21 r31|
// R = |r12 r22 r32|
// |r13 r23 r33|
omega[0] = (R(2, 1) - R(1, 2));
omega[1] = (R(0, 2) - R(2, 0));
omega[2] = (R(1, 0) - R(0, 1));
// theta |r23 - r32|
// ln(R) = ------------ * |r31 - r13|
// 2*sin(theta) |r12 - r21|
double sc = wnorm / (2.0 * sin(wnorm));
omega *= sc;
}
@ -1149,75 +1213,181 @@ namespace ORB_SLAM3 {
double t5 = w1 * w1;
double t6 = w2 * w2;
double t7 = w3 * w3;
// t8 = theta^2 = w1^2+w2^2+w3^2
double t8 = t5 + t6 + t7;
// t9 = sqrt(t8) = sqrt(theta^2) = theta
double t9 = sqrt(t8);
// t10 = sin(theta)
double t10 = sin(t9);
// t11 = 1 / theta
double t11 = 1.0 / sqrt(t8);
// t12 = cos(theta)
double t12 = cos(t9);
// t13 = cos(theta) - 1
double t13 = t12 - 1.0;
// t14 = 1 / theta^2
double t14 = 1.0 / t8;
// t16 = sin(theta)/theta*w3
// 令 A = sin(theta)/theta = t10*t11
// t16 = A*w3
double t16 = t10 * t11 * w3;
// t17 = (cos(theta) - 1)/theta^2 * w1 * w2
// 令 B = (cos(theta) - 1)/theta^2 = t13*t14
// t17 = B*w1*w2
double t17 = t13 * t14 * w1 * w2;
// t19 = sin(theta)/theta*w2
// = A*w2
double t19 = t10 * t11 * w2;
// t20 = (cos(theta) - 1) / theta^2 * w1 * w3
// = B*w1*w3
double t20 = t13 * t14 * w1 * w3;
// t24 = w2^2+w3^2
double t24 = t6 + t7;
// t27 = A*w3 + B*w1*w2
// = -r12
double t27 = t16 + t17;
// t28 = (A*w3 + B*w1*w2)*Y1
// = -r12*Y1
double t28 = Y1 * t27;
// t29 = A*w2 - B*w1*w3
// = r13
double t29 = t19 - t20;
// t30 = (A*w2 - B*w1*w3) * Z1
// = r13 * Z1
double t30 = Z1 * t29;
// t31 = (cos(theta) - 1) /theta^2 * (w2^2+w3^2)
// = B * (w2^2+w3^2)
double t31 = t13 * t14 * t24;
// t32 = B * (w2^2+w3^2) + 1
// = r11
double t32 = t31 + 1.0;
// t33 = (B * (w2^2+w3^2) + 1) * X1
// = r11 * X1
double t33 = X1 * t32;
// t15 = t1 - (A*w3 + B*w1*w2)*Y1 + (A*w2 - B*w1*w3) * Z1 + (B * (w2^2+w3^2) + 1) * X1
// = t1 + r11*X1 + r12*Y1 + r13*Z1
double t15 = t1 - t28 + t30 + t33;
// t21 = t10 * t11 * w1 = sin(theta) / theta * w1
// = A*w1
double t21 = t10 * t11 * w1;
// t22 = t13 * t14 * w2 * w3 = (cos(theta) - 1) / theta^2 * w2 * w3
// = B*w2*w3
double t22 = t13 * t14 * w2 * w3;
// t45 = t5 + t7
// = w1^2 + w3^2
double t45 = t5 + t7;
// t53 = t16 - t17
// = A*w3 - B*w1*w2 = r21
double t53 = t16 - t17;
// t54 = r21*X1
double t54 = X1 * t53;
// t55 = A*w1 + B*w2*w3
// = -r23
double t55 = t21 + t22;
// t56 = -r23*Z1
double t56 = Z1 * t55;
// t57 = t13 * t14 * t45 = (cos(theta) - 1) / theta^2 * (w1^2 + w3^2)
// = B8(w1^2+w3^2)
double t57 = t13 * t14 * t45;
// t58 = B8(w1^2+w3^2) + 1.0
// = r22
double t58 = t57 + 1.0;
// t59 = r22*Y1
double t59 = Y1 * t58;
// t18 = t2 + t54 - t56 + t59
// = t2 + r21*X1 + r22*Y1 + r23*Z1
double t18 = t2 + t54 - t56 + t59;
// t34 = t5 + t6
// = w1^2+w2^2
double t34 = t5 + t6;
// t38 = t19 + t20 = A*w2+B*w1*w3
// = -r31
double t38 = t19 + t20;
// t39 = -r31*X1
double t39 = X1 * t38;
// t40 = A*w1 - B*w2*w3
// = r32
double t40 = t21 - t22;
// t41 = r32*Y1
double t41 = Y1 * t40;
// t42 = t13 * t14 * t34 = (cos(theta) - 1) / theta^2 * (w1^2+w2^2)
// = B*(w1^2+w2^2)
double t42 = t13 * t14 * t34;
// t43 = B*(w1^2+w2^2) + 1
// = r33
double t43 = t42 + 1.0;
// t44 = r33*Z1
double t44 = Z1 * t43;
// t23 = t3 - t39 + t41 + t44 = t3 + r31*X1 + r32*Y1 + r33*Z1
double t23 = t3 - t39 + t41 + t44;
// t25 = 1.0 / pow(theta^2, 3.0 / 2.0)
// = 1 / theta^3
double t25 = 1.0 / pow(t8, 3.0 / 2.0);
// t26 = 1.0 / (t8 * t8)
// = 1 / theta^4
double t26 = 1.0 / (t8 * t8);
// t35 = t12 * t14 * w1 * w2 = cos(theta) / theta^2 * w1 * w2
// 令 cos(theta) / theta^2 = E = t12*t14
// t35 = E*w1*w2
double t35 = t12 * t14 * w1 * w2;
// t36 = t5 * t10 * t25 * w3 = w1^2 * sin(theta) / theta^3 * w3
// 令 sin(theta) / theta^3 = F = t10*t25
// t36 = F*w1^2*w3
double t36 = t5 * t10 * t25 * w3;
// t37 = t5 * t13 * t26 * w3 * 2.0 = w1^2 * (cos(theta) - 1) / theta^4 * w3
// 令 (cos(theta) - 1) / theta^4 = G = t13*t26
// t37 = G*w1^2*w3
double t37 = t5 * t13 * t26 * w3 * 2.0;
// t46 = t10 * t25 * w1 * w3 = sin(theta) / theta^3 * w1 * w3 = F*w1*w3
double t46 = t10 * t25 * w1 * w3;
// t47 = t5 * t10 * t25 * w2 = w1^2 * sin(theta) / theta^3 * w2 = F*w1^2*w2
double t47 = t5 * t10 * t25 * w2;
// t48 = t5 * t13 * t26 * w2 * 2.0 = w1^2 * (cos(theta) - 1) / theta^4 * 2 * w2 = G*w1^2*w2
double t48 = t5 * t13 * t26 * w2 * 2.0;
// t49 = t10 * t11 = sin(theta) / theta = A
double t49 = t10 * t11;
// t50 = t5 * t12 * t14 = w1^2 * cos(theta) / theta^2 = E*w1^2
double t50 = t5 * t12 * t14;
// t51 = t13 * t26 * w1 * w2 * w3 * 2.0 = (cos(theta) - 1) / theta^4 * w1 * w2 * w3 * 2 = 2*G*w1*w2*w3
double t51 = t13 * t26 * w1 * w2 * w3 * 2.0;
// t52 = t10 * t25 * w1 * w2 * w3 = sin(theta) / theta^3 * w1 * w2 * w3 = F*w1*w2*w3
double t52 = t10 * t25 * w1 * w2 * w3;
// t60 = t15 * t15 = (t1 + r11*X1 + r12*Y1 + r13*Z1)^2
double t60 = t15 * t15;
// t61 = t18 * t18 = (t2 + r21*X1 + r22*Y1 + r23*Z1)^2
double t61 = t18 * t18;
// t62 = t23 * t23 = (t3 + r31*X1 + r32*Y1 + r33*Z1)^2
double t62 = t23 * t23;
// t63 = t60 + t61 + t62 = (t1 + r11*X1 + r12*Y1 + r13*Z1)^2 + (t2 + r21*X1 + r22*Y1 + r23*Z1)^2 + (t3 + r31*X1 + r32*Y1 + r33*Z1)^2
double t63 = t60 + t61 + t62;
// t64 = t5 * t10 * t25 = w1^2 * sin(theta) / theta^3 = F*w1^2
double t64 = t5 * t10 * t25;
// t65 = 1 / sqrt((t1 + r11*X1 + r12*Y1 + r13*Z1)^2 + (t2 + r21*X1 + r22*Y1 + r23*Z1)^2 + (t3 + r31*X1 + r32*Y1 + r33*Z1)^2)
double t65 = 1.0 / sqrt(t63);
// t66 = Y1 * r2 * t6 = Y1 * r2 * w2^2
double t66 = Y1 * r2 * t6;
// t67 = Z1 * r3 * t7 = Z1 * r3 * w3^2
double t67 = Z1 * r3 * t7;
// t68 = r1 * t1 * t5 = r1 * t1 * w1^2
double t68 = r1 * t1 * t5;
// t69 = r1 * t1 * t6 = r1 * t1 * w2^2
double t69 = r1 * t1 * t6;
// t70 = r1 * t1 * t7 = r1 * t1 * w3^2
double t70 = r1 * t1 * t7;
// t71 = r2 * t2 * t5 = r2 * t2 * w1^2
double t71 = r2 * t2 * t5;
// t72 = r2 * t2 * t6 = r2 * t2 * w2^2
double t72 = r2 * t2 * t6;
// t73 = r2 * t2 * t7 = r2 * t2 * w3^2
double t73 = r2 * t2 * t7;
// t74 = r3 * t3 * t5 = r3 * t3 * w1^2
double t74 = r3 * t3 * t5;
// t75 = r3 * t3 * t6 = r3 * t3 * w2^2
double t75 = r3 * t3 * t6;
// t76 = r3 * t3 * t7 = r3 * t3 * w3^2
double t76 = r3 * t3 * t7;
// t77 = X1 * r1 * t5 = X1 * r1 * w1^2
double t77 = X1 * r1 * t5;
double t78 = X1 * r2 * w1 * w2;
double t79 = X1 * r3 * w1 * w3;
@ -1225,62 +1395,141 @@ namespace ORB_SLAM3 {
double t81 = Y1 * r3 * w2 * w3;
double t82 = Z1 * r1 * w1 * w3;
double t83 = Z1 * r2 * w2 * w3;
// t84 = X1 * r1 * t6 * t12 = X1 * r1 * w2^2 * cos(theta)
double t84 = X1 * r1 * t6 * t12;
// t85 = X1 * r1 * t7 * t12 = X1 * r1 * w3^2 * cos(theta)
double t85 = X1 * r1 * t7 * t12;
// t86 = Y1 * r2 * t5 * t12 = Y1 * r2 * w1^2 * cos(theta)
double t86 = Y1 * r2 * t5 * t12;
// t87 = Y1 * r2 * t7 * t12 = Y1 * r2 * w3^2 * cos(theta)
double t87 = Y1 * r2 * t7 * t12;
// t88 = Z1 * r3 * t5 * t12 = Z1 * r3 * w1^2 * cos(theta)
double t88 = Z1 * r3 * t5 * t12;
// t89 = Z1 * r3 * t6 * t12 = Z1 * r3 * w2^2 * cos(theta)
double t89 = Z1 * r3 * t6 * t12;
// t90 = X1 * r2 * t9 * t10 * w3 = X1 * r2 * theta * sin(theta) * w3
double t90 = X1 * r2 * t9 * t10 * w3;
// t91 = Y1 * r3 * t9 * t10 * w1 = Y1 * r3 * theta * sin(theta) * w1
double t91 = Y1 * r3 * t9 * t10 * w1;
// t92 = Z1 * r1 * t9 * t10 * w2 = Z1 * r1 * theta * sin(theta) * w2
double t92 = Z1 * r1 * t9 * t10 * w2;
// t102 = X1 * r3 * t9 * t10 * w2 = X1 * r3 * theta * sin(theta) * w2
double t102 = X1 * r3 * t9 * t10 * w2;
// t103 = Y1 * r1 * t9 * t10 * w3 = Y1 * r1 * theta * sin(theta) * w3
double t103 = Y1 * r1 * t9 * t10 * w3;
// t104 = Z1 * r2 * t9 * t10 * w1 = Z1 * r2 * theta * sin(theta) * w1
double t104 = Z1 * r2 * t9 * t10 * w1;
// t105 = X1 * r2 * t12 * w1 * w2 = X1 * r2 * cos(theta) * w1 * w2
double t105 = X1 * r2 * t12 * w1 * w2;
// t106 = X1 * r3 * t12 * w1 * w3 = X1 * r3 * cos(theta) * w1 * w3
double t106 = X1 * r3 * t12 * w1 * w3;
// t107 = Y1 * r1 * t12 * w1 * w2 = Y1 * r1 * cos(theta) * w1 * w2
double t107 = Y1 * r1 * t12 * w1 * w2;
// t108 = Y1 * r3 * t12 * w2 * w3 = Y1 * r3 * cos(theta) * w2 * w3
double t108 = Y1 * r3 * t12 * w2 * w3;
// t109 = Z1 * r1 * t12 * w1 * w3 = Z1 * r1 * cos(theta) * w1 * w3
double t109 = Z1 * r1 * t12 * w1 * w3;
// t110 = Z1 * r2 * t12 * w2 * w3 = Z1 * r2 * cos(theta) * w2 * w3
double t110 = Z1 * r2 * t12 * w2 * w3;
double t93 = t66+t67+t68+t69+t70+t71+t72+t73+t74+t75+t76+t77+t78+t79+t80+t81+t82+t83+t84+t85+t86+t87+t88+t89+t90+t91+t92-t102-t103-t104-t105-t106-t107-t108-t109-t110;
// t93 = t66 + t67 + t68 + t69
// + t70 + t71 + t72 + t73 + t74
// + t75 + t76 + t77 + t78 + t79
// + t80 + t81 + t82 + t83 + t84
// + t85 + t86 + t87 + t88 + t89
// + t90 + t91 + t92 - t102 - t103
// - t104 - t105 - t106 - t107
// - t108 - t109 - t110
//
// = (Y1 * r2 * w2^2) + (Z1 * r3 * w3^2) + (r1 * t1 * w1^2) + (r1 * t1 * w2^2)
// + (r1 * t1 * w3^2) + (r2 * t2 * w1^2) + (r2 * t2 * w2^2) + (r2 * t2 * w3^2) + (r3 * t3 * w1^2)
// + (r3 * t3 * w2^2) + (r3 * t3 * w3^2) + (X1 * r1 * w1^2) + (X1 * r2 * w1 * w2) + (X1 * r3 * w1 * w3)
// + (Y1 * r1 * w1 * w2) + (Y1 * r3 * w2 * w3) + (Z1 * r1 * w1 * w3) + (Z1 * r2 * w2 * w3) + (X1 * r1 * w2^2 * cos(theta))
// + (X1 * r1 * w3^2 * cos(theta)) + (Y1 * r2 * w1^2 * cos(theta)) + (Y1 * r2 * w3^2 * cos(theta)) + (Z1 * r3 * w1^2 * cos(theta)) + (Z1 * r3 * w2^2 * cos(theta))
// + (X1 * r2 * theta * sin(theta) * w3) + (Y1 * r3 * theta * sin(theta) * w1) + (Z1 * r1 * theta * sin(theta) * w2) - (X1 * r3 * theta * sin(theta) * w2) - (Y1 * r1 * theta * sin(theta) * w3)
// - (Z1 * r2 * theta * sin(theta) * w1) - (X1 * r2 * cos(theta) * w1 * w2) - (X1 * r3 * cos(theta) * w1 * w3) - (Y1 * r1 * cos(theta) * w1 * w2) - ()
// - (Y1 * r3 * cos(theta) * w2 * w3) - (Z1 * r1 * cos(theta) * w1 * w3) - (Z1 * r2 * cos(theta) * w2 * w3)
double t93 =
t66 + t67 + t68 + t69 + t70 + t71 + t72 + t73 + t74 + t75 + t76 + t77 + t78 + t79 + t80 + t81 + t82 +
t83 + t84 + t85 + t86 + t87 + t88 + t89 + t90 + t91 + t92 - t102 - t103 - t104 - t105 - t106 - t107 -
t108 - t109 - t110;
// t94 = sin(theta)/theta^3* w1 * w2 = F*w1*w2
double t94 = t10 * t25 * w1 * w2;
// t95 = w2^2*sin(theta)/theta^3*w3 = F*w2^2*w3
double t95 = t6 * t10 * t25 * w3;
// t96 = 2 * w2^2 * (cos(theta) - 1) / theta^4 * w3 = 2*G*w2^2*w3
double t96 = t6 * t13 * t26 * w3 * 2.0;
// t97 = cos(theta) / theta^2 * w2 * w3 = E*w2*w3
double t97 = t12 * t14 * w2 * w3;
// t98 = w2^2 * sin(theta) / theta^3 * w1 = F*w2^2*w1
double t98 = t6 * t10 * t25 * w1;
// t99 = 2 * w2^2 * (cos(theta) - 1) / theta^4 * w1 = 2*G*w2^2*w1
double t99 = t6 * t13 * t26 * w1 * 2.0;
// t100 = w2^2 * sin(theta) / theta^3 = F*w2^2
double t100 = t6 * t10 * t25;
// t101 = 1.0 / pow(sqrt((t1 + r11*X1 + r12*Y1 + r13*Z1)^2 + (t2 + r21*X1 + r22*Y1 + r23*Z1)^2 + (t3 + r31*X1 + r32*Y1 + r33*Z1)^2), 3.0 / 2.0)
// = (t1 + r11*X1 + r12*Y1 + r13*Z1)^2 + (t2 + r21*X1 + r22*Y1 + r23*Z1)^2 + (t3 + r31*X1 + r32*Y1 + r33*Z1)^3
double t101 = 1.0 / pow(t63, 3.0 / 2.0);
// t111 = w2^2 * cos(theta) / theta^2 = E*w2^2
double t111 = t6 * t12 * t14;
// t112 = sin(theta) / theta^3 *w2 * w3 = F*w2*w3
double t112 = t10 * t25 * w2 * w3;
// t113 = cos(theta) / theta^2 * w1 * w3 = E*w1*w3
double t113 = t12 * t14 * w1 * w3;
// t114 = w3^2 * sin(theta) / theta^3 * w2 = F*w3^2*w2
double t114 = t7 * t10 * t25 * w2;
// t115 = t7 * t13 * t26 * w2 * 2.0 = 2*w3^2*(cos(theta) - 1) / theta^4*w2
double t115 = t7 * t13 * t26 * w2 * 2.0;
// t116 = w3^2 * sin(theta) / theta^3 * w1 = F*w3^2*w1
double t116 = t7 * t10 * t25 * w1;
// t117 = 2 * w3^2 * (cos(theta) - 1) / theta^4 * w1 = 2*G*w3^2*w1
double t117 = t7 * t13 * t26 * w1 * 2.0;
// t118 = E*w3^2
double t118 = t7 * t12 * t14;
// t119 = 2*w1*G*(w2^2+w3^2)
double t119 = t13 * t24 * t26 * w1 * 2.0;
// t120 = F*w1*(w2^2+w3^2)
double t120 = t10 * t24 * t25 * w1;
// t121 = (2*G+F)*w1*(w2^2+w3^2)
double t121 = t119 + t120;
// t122 = 2*G*w1*(w1^2+w2^2)
double t122 = t13 * t26 * t34 * w1 * 2.0;
// t123 = F*w1*(w1^2+w2^2)
double t123 = t10 * t25 * t34 * w1;
// t131 = 2*(cos(theta) - 1) / theta^2*w1 = 2*B*w1
double t131 = t13 * t14 * w1 * 2.0;
// t124 = t122 + t123 - t131 = 2*G*w1*(w1^2+w2^2)+F*w1*(w1^2+w2^2)-2*B*w1
// = (2*G+F)*w1*(w1^2+w2^2)-2*B*w1
double t124 = t122 + t123 - t131;
// t139 = t13 * t14 * w3 = B*w3
double t139 = t13 * t14 * w3;
// t125 = -t35 + t36 + t37 + t94 - t139 = -E*w1*w2+F*w1^2*w3+G*w1^2*w3+F*w1*w2-B*w3
double t125 = -t35 + t36 + t37 + t94 - t139;
// t126 = (-E*w1*w2+F*w1^2*w3+G*w1^2*w3+F*w1*w2-B*w3)*X1
double t126 = X1 * t125;
// t127 = t49 + t50 + t51 + t52 - t64 = A + E*w1^2 + (2*G+F)*w1*w2*w3 - F*w1^2
double t127 = t49 + t50 + t51 + t52 - t64;
// t128 = (A + E*w1^2 + (2*G+F)*w1*w2*w3 - F*w1^2)*Y1
double t128 = Y1 * t127;
// t129 = (-E*w1*w2+F*w1^2*w3+G*w1^2*w3+F*w1*w2-B*w3)*X1 + (A + E*w1^2 + (2*G+F)*w1*w2*w3 - F*w1^2)*Y1 - ((2*G+F)*w1*(w1^2+w2^2)-2*B*w1)Z1
double t129 = t126 + t128 - Z1 * t124;
// t130 = 2 * (t3 + r31*X1 + r32*Y1 + r33*Z1)
// * ((-E*w1*w2+F*w1^2*w3+G*w1^2*w3+F*w1*w2-B*w3)*X1 + (A + E*w1^2 + (2*G+F)*w1*w2*w3 - F*w1^2)*Y1 - ((2*G+F)*w1*(w1^2+w2^2)-2*B*w1)Z1)
double t130 = t23 * t129 * 2.0;
// t132 = t13 * t26 * t45 * w1 * 2.0 = 2*w1*G*(w1^2+w3^2)
double t132 = t13 * t26 * t45 * w1 * 2.0;
// t133 = t10 * t25 * t45 * w1 = F*w1*(w1^2+w3^2)
double t133 = t10 * t25 * t45 * w1;
// t138 = t13 * t14 * w2 = B*w2
double t138 = t13 * t14 * w2;
// t134 = -t46 + t47 + t48 + t113 - t138 = -F*w1*w3+F*w1^2*w2+G*w1^2*w2+E*w1*w3-B*w2
double t134 = -t46 + t47 + t48 + t113 - t138;
// t135 = (-F*w1*w3+F*w1^2*w2+G*w1^2*w2+E*w1*w3-B*w2)*X1
double t135 = X1 * t134;
// t136 = -t49 - t50 + t51 + t52 + t64 = -A-E*w1^2+2*G*w1*w2*w3+F*w1*w2*w3+F*w1^2
double t136 = -t49 - t50 + t51 + t52 + t64;
// t137 = (-A-E*w1^2+2*G*w1*w2*w3+F*w1*w2*w3+F*w1^2)*Z1
double t137 = Z1 * t136;
double t140 = X1 * s1 * t5;
double t141 = Y1 * s2 * t6;
double t142 = Z1 * s3 * t7;
@ -1299,76 +1548,334 @@ namespace ORB_SLAM3 {
double t155 = Y1 * s3 * w2 * w3;
double t156 = Z1 * s1 * w1 * w3;
double t157 = Z1 * s2 * w2 * w3;
// t12 = cos(theta)
double t158 = X1 * s1 * t6 * t12;
double t159 = X1 * s1 * t7 * t12;
double t160 = Y1 * s2 * t5 * t12;
double t161 = Y1 * s2 * t7 * t12;
double t162 = Z1 * s3 * t5 * t12;
double t163 = Z1 * s3 * t6 * t12;
// t9 = theta, t10 = sin(theta)
double t164 = X1 * s2 * t9 * t10 * w3;
double t165 = Y1 * s3 * t9 * t10 * w1;
double t166 = Z1 * s1 * t9 * t10 * w2;
double t183 = X1 * s3 * t9 * t10 * w2;
double t184 = Y1 * s1 * t9 * t10 * w3;
double t185 = Z1 * s2 * t9 * t10 * w1;
// t12 = cos(theta)
double t186 = X1 * s2 * t12 * w1 * w2;
double t187 = X1 * s3 * t12 * w1 * w3;
double t188 = Y1 * s1 * t12 * w1 * w2;
double t189 = Y1 * s3 * t12 * w2 * w3;
double t190 = Z1 * s1 * t12 * w1 * w3;
double t191 = Z1 * s2 * t12 * w2 * w3;
double t167 = t140+t141+t142+t143+t144+t145+t146+t147+t148+t149+t150+t151+t152+t153+t154+t155+t156+t157+t158+t159+t160+t161+t162+t163+t164+t165+t166-t183-t184-t185-t186-t187-t188-t189-t190-t191;
// t167 = t140 + t141 + t142 + t143 + t144
// + t145 + t146 + t147 + t148 + t149
// + t150 + t151 + t152 + t153 + t154
// + t155 + t156 + t157 + t158 + t159
// + t160 + t161 + t162 + t163 + t164
// + t165 + t166 - t183 - t184 - t185
// - t186 - t187 - t188
// - t189 - t190 - t191
//
// = (X1 * s1 * t5) + (Y1 * s2 * t6) + (Z1 * s3 * t7) + (s1 * t1 * t5) + (s1 * t1 * t6)
// + (s1 * t1 * t7) + (s2 * t2 * t5) + (s2 * t2 * t6) + (s2 * t2 * t7) + (s3 * t3 * t5)
// + (s3 * t3 * t6) + (s3 * t3 * t7) + (X1 * s2 * w1 * w2) + (X1 * s3 * w1 * w3) + (Y1 * s1 * w1 * w2)
// + (Y1 * s3 * w2 * w3) + (Z1 * s1 * w1 * w3) + (Z1 * s2 * w2 * w3) + (X1 * s1 * t6 * t12) + (X1 * s1 * t7 * t12)
// + (Y1 * s2 * t5 * t12) + (Y1 * s2 * t7 * t12) + (Z1 * s3 * t5 * t12) + (Z1 * s3 * t6 * t12) + (X1 * s2 * t9 * t10 * w3)
// + (Y1 * s3 * t9 * t10 * w1) + (Z1 * s1 * t9 * t10 * w2) - (X1 * s3 * t9 * t10 * w2) - (Y1 * s1 * t9 * t10 * w3) - (Z1 * s2 * t9 * t10 * w1)
// - (X1 * s2 * t12 * w1 * w2) - (X1 * s3 * t12 * w1 * w3) - (Y1 * s1 * t12 * w1 * w2)
// - (Y1 * s3 * t12 * w2 * w3) - (Z1 * s1 * t12 * w1 * w3) - (Z1 * s2 * t12 * w2 * w3)
double t167 =
t140 + t141 + t142 + t143 + t144 + t145 + t146 + t147 + t148 + t149 + t150 + t151 + t152 + t153 + t154 +
t155 + t156 + t157 + t158 + t159 + t160 + t161 + t162 + t163 + t164 + t165 + t166 - t183 - t184 - t185 -
t186 - t187 - t188 - t189 - t190 - t191;
// t168 = t13 * t26 * t45 * w2 * 2.0 = 2*G*w2*(w1^2+w3^2)
double t168 = t13 * t26 * t45 * w2 * 2.0;
// t169 = t10 * t25 * t45 * w2 = F*w2*(w1^2+w3^2)
double t169 = t10 * t25 * t45 * w2;
// t170 = t168 + t169 = (2*G+F)*w2*(w1^2+w3^2)
double t170 = t168 + t169;
// t171 = t13 * t26 * t34 * w2 * 2.0 = 2*G*w2*(w1^2+w2^2)
double t171 = t13 * t26 * t34 * w2 * 2.0;
// t172 = t10 * t25 * t34 * w2 = F*w2*(w1^2+w2^2)
double t172 = t10 * t25 * t34 * w2;
// t176 = t13 * t14 * w2 * 2.0 = 2*B*w2
double t176 = t13 * t14 * w2 * 2.0;
// t173 = t171 + t172 - t176 = (2*G+F)*w2*(w1^2+w2^2) - 2*B*w2
double t173 = t171 + t172 - t176;
// t174 = -t49 + t51 + t52 + t100 - t111 = -A+2*G*w1*w2*w3+F*w1*w2*w3+F*w2^2-E*w2^2
double t174 = -t49 + t51 + t52 + t100 - t111;
// t175 = X1 * t174 = (-A+2*G*w1*w2*w3+F*w1*w2*w3+F*w2^2-E*w2^2)*X1
double t175 = X1 * t174;
// t177 = t13 * t24 * t26 * w2 * 2.0 = 2*w2*G*(w2^2+w3^2)
double t177 = t13 * t24 * t26 * w2 * 2.0;
// t178 = t10 * t24 * t25 * w2 = F*w2*(w2^2+w3^2)
double t178 = t10 * t24 * t25 * w2;
// t192 = t13 * t14 * w1 = B*w1
double t192 = t13 * t14 * w1;
// t179 = -t97 + t98 + t99 + t112 - t192 = -E*w2*w3+F*w2^2*w1+2*G*w2^2*w1+F*w2*w3-B*w1
double t179 = -t97 + t98 + t99 + t112 - t192;
// t180 = (-E*w2*w3+F*w2^2*w1+2*G*w2^2*w1+F*w2*w3-B*w1)*Y1
double t180 = Y1 * t179;
// t181 = A+2*G*w1*w2*w3+F*w1*w2*w3-F*w2^2+E*w2^2
double t181 = t49 + t51 + t52 - t100 + t111;
// t182 = (A+2*G*w1*w2*w3+F*w1*w2*w3-F*w2^2+E*w2^2)*Z1
double t182 = Z1 * t181;
// t193 = 2*G*w3*(w2^2+w3^2)
double t193 = t13 * t26 * t34 * w3 * 2.0;
// t194 = F*w3*(w2^2+w3^2)
double t194 = t10 * t25 * t34 * w3;
// t195 = (2*G+F)*w3*(w2^2+w3^2)
double t195 = t193 + t194;
// t196 = 2*G*w3*(w1^2+w3^2)
double t196 = t13 * t26 * t45 * w3 * 2.0;
// t197 = F*w3*(w1^2+w3^2)
double t197 = t10 * t25 * t45 * w3;
// t200 = 2*B*w3
double t200 = t13 * t14 * w3 * 2.0;
// t198 = (2*G+F)*w3*(w1^2+w3^2) - 2*B*w3
double t198 = t196 + t197 - t200;
// t199 = F*w3^2
double t199 = t7 * t10 * t25;
// t201 = 2*w3*G*(w2^2+w3^2)
double t201 = t13 * t24 * t26 * w3 * 2.0;
// t202 = F*w3*(w2^2+w3^2)
double t202 = t10 * t24 * t25 * w3;
// t203 = -t49 + t51 + t52 - t118 + t199 = -A+2*G*w1*w2*w3+F*w1*w2*w3-E*w3^2+F*w3^2
double t203 = -t49 + t51 + t52 - t118 + t199;
// t204 = (-t49 + t51 + t52 - t118 + t199 = -A+2*G*w1*w2*w3+F*w1*w2*w3-E*w3^2+F*w3^2)*Y1
double t204 = Y1 * t203;
// t205 = 2*t1
double t205 = t1 * 2.0;
// t206 = 2*r13*Z1
double t206 = Z1 * t29 * 2.0;
// t207 = 2*r11*X1
double t207 = X1 * t32 * 2.0;
// t208 = 2*t1 + 2*r13*Z1 + 2*r11*X1 + 2*r12*Y1
double t208 = t205 + t206 + t207 - Y1 * t27 * 2.0;
// t209 = 2*t2
double t209 = t2 * 2.0;
// t210 = 2*r21*X1
double t210 = X1 * t53 * 2.0;
// t211 = 2*r22*Y1
double t211 = Y1 * t58 * 2.0;
// t212 = 2*t2 + 2*r21*X1 + 2*r22*Y1 + 2*r23*Z1
double t212 = t209 + t210 + t211 - Z1 * t55 * 2.0;
double t213 = t3 * 2.0;
double t214 = Y1 * t40 * 2.0;
double t215 = Z1 * t43 * 2.0;
double t216 = t213 + t214 + t215 - X1 * t38 * 2.0;
jacs(0, 0) = t14*t65*(X1*r1*w1*2.0+X1*r2*w2+X1*r3*w3+Y1*r1*w2+Z1*r1*w3+r1*t1*w1*2.0+r2*t2*w1*2.0+r3*t3*w1*2.0+Y1*r3*t5*t12+Y1*r3*t9*t10-Z1*r2*t5*t12-Z1*r2*t9*t10-X1*r2*t12*w2-X1*r3*t12*w3-Y1*r1*t12*w2+Y1*r2*t12*w1*2.0-Z1*r1*t12*w3+Z1*r3*t12*w1*2.0+Y1*r3*t5*t10*t11-Z1*r2*t5*t10*t11+X1*r2*t12*w1*w3-X1*r3*t12*w1*w2-Y1*r1*t12*w1*w3+Z1*r1*t12*w1*w2-Y1*r1*t10*t11*w1*w3+Z1*r1*t10*t11*w1*w2-X1*r1*t6*t10*t11*w1-X1*r1*t7*t10*t11*w1+X1*r2*t5*t10*t11*w2+X1*r3*t5*t10*t11*w3+Y1*r1*t5*t10*t11*w2-Y1*r2*t5*t10*t11*w1-Y1*r2*t7*t10*t11*w1+Z1*r1*t5*t10*t11*w3-Z1*r3*t5*t10*t11*w1-Z1*r3*t6*t10*t11*w1+X1*r2*t10*t11*w1*w3-X1*r3*t10*t11*w1*w2+Y1*r3*t10*t11*w1*w2*w3+Z1*r2*t10*t11*w1*w2*w3)-t26*t65*t93*w1*2.0-t14*t93*t101*(t130+t15*(-X1*t121+Y1*(t46+t47+t48-t13*t14*w2-t12*t14*w1*w3)+Z1*(t35+t36+t37-t13*t14*w3-t10*t25*w1*w2))*2.0+t18*(t135+t137-Y1*(t132+t133-t13*t14*w1*2.0))*2.0)*(1.0/2.0);
jacs(0, 1) = t14*t65*(X1*r2*w1+Y1*r1*w1+Y1*r2*w2*2.0+Y1*r3*w3+Z1*r2*w3+r1*t1*w2*2.0+r2*t2*w2*2.0+r3*t3*w2*2.0-X1*r3*t6*t12-X1*r3*t9*t10+Z1*r1*t6*t12+Z1*r1*t9*t10+X1*r1*t12*w2*2.0-X1*r2*t12*w1-Y1*r1*t12*w1-Y1*r3*t12*w3-Z1*r2*t12*w3+Z1*r3*t12*w2*2.0-X1*r3*t6*t10*t11+Z1*r1*t6*t10*t11+X1*r2*t12*w2*w3-Y1*r1*t12*w2*w3+Y1*r3*t12*w1*w2-Z1*r2*t12*w1*w2-Y1*r1*t10*t11*w2*w3+Y1*r3*t10*t11*w1*w2-Z1*r2*t10*t11*w1*w2-X1*r1*t6*t10*t11*w2+X1*r2*t6*t10*t11*w1-X1*r1*t7*t10*t11*w2+Y1*r1*t6*t10*t11*w1-Y1*r2*t5*t10*t11*w2-Y1*r2*t7*t10*t11*w2+Y1*r3*t6*t10*t11*w3-Z1*r3*t5*t10*t11*w2+Z1*r2*t6*t10*t11*w3-Z1*r3*t6*t10*t11*w2+X1*r2*t10*t11*w2*w3+X1*r3*t10*t11*w1*w2*w3+Z1*r1*t10*t11*w1*w2*w3)-t26*t65*t93*w2*2.0-t14*t93*t101*(t18*(Z1*(-t35+t94+t95+t96-t13*t14*w3)-Y1*t170+X1*(t97+t98+t99-t13*t14*w1-t10*t25*w2*w3))*2.0+t15*(t180+t182-X1*(t177+t178-t13*t14*w2*2.0))*2.0+t23*(t175+Y1*(t35-t94+t95+t96-t13*t14*w3)-Z1*t173)*2.0)*(1.0/2.0);
jacs(0, 2) = t14*t65*(X1*r3*w1+Y1*r3*w2+Z1*r1*w1+Z1*r2*w2+Z1*r3*w3*2.0+r1*t1*w3*2.0+r2*t2*w3*2.0+r3*t3*w3*2.0+X1*r2*t7*t12+X1*r2*t9*t10-Y1*r1*t7*t12-Y1*r1*t9*t10+X1*r1*t12*w3*2.0-X1*r3*t12*w1+Y1*r2*t12*w3*2.0-Y1*r3*t12*w2-Z1*r1*t12*w1-Z1*r2*t12*w2+X1*r2*t7*t10*t11-Y1*r1*t7*t10*t11-X1*r3*t12*w2*w3+Y1*r3*t12*w1*w3+Z1*r1*t12*w2*w3-Z1*r2*t12*w1*w3+Y1*r3*t10*t11*w1*w3+Z1*r1*t10*t11*w2*w3-Z1*r2*t10*t11*w1*w3-X1*r1*t6*t10*t11*w3-X1*r1*t7*t10*t11*w3+X1*r3*t7*t10*t11*w1-Y1*r2*t5*t10*t11*w3-Y1*r2*t7*t10*t11*w3+Y1*r3*t7*t10*t11*w2+Z1*r1*t7*t10*t11*w1+Z1*r2*t7*t10*t11*w2-Z1*r3*t5*t10*t11*w3-Z1*r3*t6*t10*t11*w3-X1*r3*t10*t11*w2*w3+X1*r2*t10*t11*w1*w2*w3+Y1*r1*t10*t11*w1*w2*w3)-t26*t65*t93*w3*2.0-t14*t93*t101*(t18*(Z1*(t46-t113+t114+t115-t13*t14*w2)-Y1*t198+X1*(t49+t51+t52+t118-t7*t10*t25))*2.0+t23*(X1*(-t97+t112+t116+t117-t13*t14*w1)+Y1*(-t46+t113+t114+t115-t13*t14*w2)-Z1*t195)*2.0+t15*(t204+Z1*(t97-t112+t116+t117-t13*t14*w1)-X1*(t201+t202-t13*t14*w3*2.0))*2.0)*(1.0/2.0);
//
jacs(0, 0) = t14 * t65 * (X1 * r1 * w1 * 2.0 + X1 * r2 * w2 + X1 * r3 * w3 + Y1 * r1 * w2 + Z1 * r1 * w3 +
r1 * t1 * w1 * 2.0 + r2 * t2 * w1 * 2.0 + r3 * t3 * w1 * 2.0 + Y1 * r3 * t5 * t12 +
Y1 * r3 * t9 * t10 - Z1 * r2 * t5 * t12 - Z1 * r2 * t9 * t10 - X1 * r2 * t12 * w2 -
X1 * r3 * t12 * w3 - Y1 * r1 * t12 * w2 + Y1 * r2 * t12 * w1 * 2.0 -
Z1 * r1 * t12 * w3 + Z1 * r3 * t12 * w1 * 2.0 + Y1 * r3 * t5 * t10 * t11 -
Z1 * r2 * t5 * t10 * t11 + X1 * r2 * t12 * w1 * w3 - X1 * r3 * t12 * w1 * w2 -
Y1 * r1 * t12 * w1 * w3 + Z1 * r1 * t12 * w1 * w2 - Y1 * r1 * t10 * t11 * w1 * w3 +
Z1 * r1 * t10 * t11 * w1 * w2 - X1 * r1 * t6 * t10 * t11 * w1 -
X1 * r1 * t7 * t10 * t11 * w1 + X1 * r2 * t5 * t10 * t11 * w2 +
X1 * r3 * t5 * t10 * t11 * w3 + Y1 * r1 * t5 * t10 * t11 * w2 -
Y1 * r2 * t5 * t10 * t11 * w1 - Y1 * r2 * t7 * t10 * t11 * w1 +
Z1 * r1 * t5 * t10 * t11 * w3 - Z1 * r3 * t5 * t10 * t11 * w1 -
Z1 * r3 * t6 * t10 * t11 * w1 + X1 * r2 * t10 * t11 * w1 * w3 -
X1 * r3 * t10 * t11 * w1 * w2 + Y1 * r3 * t10 * t11 * w1 * w2 * w3 +
Z1 * r2 * t10 * t11 * w1 * w2 * w3)
- t26 * t65 * t93 * w1 * 2.0
- t14 * t93 * t101 *(t130 + t15 *(-X1 * t121 + Y1 * (t46 +t47 +t48 -t13 *t14 *w2 -t12 *t14 *w1 *w3) +Z1 *(t35 +t36 +t37 -t13 *t14 *w3 -t10 *t25 *w1 *w2)) *2.0
+ t18 *(t135+ t137 - Y1 *(t132 +t133 -t13 *t14 *w1 *2.0)) *2.0) *(1.0 / 2.0);
jacs(0, 1) = t14 * t65 * (X1 * r2 * w1 + Y1 * r1 * w1 + Y1 * r2 * w2 * 2.0 + Y1 * r3 * w3 + Z1 * r2 * w3 +
r1 * t1 * w2 * 2.0 + r2 * t2 * w2 * 2.0 + r3 * t3 * w2 * 2.0 - X1 * r3 * t6 * t12 -
X1 * r3 * t9 * t10 + Z1 * r1 * t6 * t12 + Z1 * r1 * t9 * t10 +
X1 * r1 * t12 * w2 * 2.0 - X1 * r2 * t12 * w1 - Y1 * r1 * t12 * w1 -
Y1 * r3 * t12 * w3 - Z1 * r2 * t12 * w3 + Z1 * r3 * t12 * w2 * 2.0 -
X1 * r3 * t6 * t10 * t11 + Z1 * r1 * t6 * t10 * t11 + X1 * r2 * t12 * w2 * w3 -
Y1 * r1 * t12 * w2 * w3 + Y1 * r3 * t12 * w1 * w2 - Z1 * r2 * t12 * w1 * w2 -
Y1 * r1 * t10 * t11 * w2 * w3 + Y1 * r3 * t10 * t11 * w1 * w2 -
Z1 * r2 * t10 * t11 * w1 * w2 - X1 * r1 * t6 * t10 * t11 * w2 +
X1 * r2 * t6 * t10 * t11 * w1 - X1 * r1 * t7 * t10 * t11 * w2 +
Y1 * r1 * t6 * t10 * t11 * w1 - Y1 * r2 * t5 * t10 * t11 * w2 -
Y1 * r2 * t7 * t10 * t11 * w2 + Y1 * r3 * t6 * t10 * t11 * w3 -
Z1 * r3 * t5 * t10 * t11 * w2 + Z1 * r2 * t6 * t10 * t11 * w3 -
Z1 * r3 * t6 * t10 * t11 * w2 + X1 * r2 * t10 * t11 * w2 * w3 +
X1 * r3 * t10 * t11 * w1 * w2 * w3 + Z1 * r1 * t10 * t11 * w1 * w2 * w3) -
t26 * t65 * t93 * w2 * 2.0 - t14 * t93 * t101 * (t18 *
(Z1 * (-t35 + t94 + t95 + t96 - t13 * t14 * w3) -
Y1 * t170 + X1 *
(t97 + t98 + t99 - t13 * t14 * w1 -
t10 * t25 * w2 * w3)) * 2.0 + t15 *
(t180 +
t182 -
X1 *
(t177 +
t178 -
t13 *
t14 *
w2 *
2.0)) *
2.0 +
t23 * (t175 + Y1 * (t35 - t94 + t95 + t96 -
t13 * t14 * w3) - Z1 * t173) *
2.0) * (1.0 / 2.0);
jacs(0, 2) = t14 * t65 * (X1 * r3 * w1 + Y1 * r3 * w2 + Z1 * r1 * w1 + Z1 * r2 * w2 + Z1 * r3 * w3 * 2.0 +
r1 * t1 * w3 * 2.0 + r2 * t2 * w3 * 2.0 + r3 * t3 * w3 * 2.0 + X1 * r2 * t7 * t12 +
X1 * r2 * t9 * t10 - Y1 * r1 * t7 * t12 - Y1 * r1 * t9 * t10 +
X1 * r1 * t12 * w3 * 2.0 - X1 * r3 * t12 * w1 + Y1 * r2 * t12 * w3 * 2.0 -
Y1 * r3 * t12 * w2 - Z1 * r1 * t12 * w1 - Z1 * r2 * t12 * w2 +
X1 * r2 * t7 * t10 * t11 - Y1 * r1 * t7 * t10 * t11 - X1 * r3 * t12 * w2 * w3 +
Y1 * r3 * t12 * w1 * w3 + Z1 * r1 * t12 * w2 * w3 - Z1 * r2 * t12 * w1 * w3 +
Y1 * r3 * t10 * t11 * w1 * w3 + Z1 * r1 * t10 * t11 * w2 * w3 -
Z1 * r2 * t10 * t11 * w1 * w3 - X1 * r1 * t6 * t10 * t11 * w3 -
X1 * r1 * t7 * t10 * t11 * w3 + X1 * r3 * t7 * t10 * t11 * w1 -
Y1 * r2 * t5 * t10 * t11 * w3 - Y1 * r2 * t7 * t10 * t11 * w3 +
Y1 * r3 * t7 * t10 * t11 * w2 + Z1 * r1 * t7 * t10 * t11 * w1 +
Z1 * r2 * t7 * t10 * t11 * w2 - Z1 * r3 * t5 * t10 * t11 * w3 -
Z1 * r3 * t6 * t10 * t11 * w3 - X1 * r3 * t10 * t11 * w2 * w3 +
X1 * r2 * t10 * t11 * w1 * w2 * w3 + Y1 * r1 * t10 * t11 * w1 * w2 * w3) -
t26 * t65 * t93 * w3 * 2.0 - t14 * t93 * t101 * (t18 * (Z1 * (t46 - t113 + t114 + t115 -
t13 * t14 * w2) - Y1 * t198 + X1 *
(t49 +
t51 +
t52 +
t118 -
t7 *
t10 *
t25)) *
2.0 + t23 * (X1 * (-t97 + t112 + t116 + t117 -
t13 * t14 * w1) + Y1 * (-t46 +
t113 +
t114 +
t115 -
t13 *
t14 *
w2) -
Z1 * t195) * 2.0 + t15 * (t204 + Z1 *
(t97 -
t112 +
t116 +
t117 -
t13 *
t14 *
w1) -
X1 *
(t201 +
t202 -
t13 *
t14 * w3 *
2.0)) *
2.0) *
(1.0 / 2.0);
// = 1/2 * r1 * t65
// - t14 * t93
// * t101 * t208
// = 1/2 * r1 * 1 / sqrt((t1 + r11*X1 + r12*Y1 + r13*Z1)^2 + (t2 + r21*X1 + r22*Y1 + r23*Z1)^2 + (t3 + r31*X1 + r32*Y1 + r33*Z1)^2)
// - 1/theta^2 * ((Y1 * r2 * w2^2) + (Z1 * r3 * w3^2) + (r1 * t1 * w1^2) + (r1 * t1 * w2^2)
// + (r1 * t1 * w3^2) + (r2 * t2 * w1^2) + (r2 * t2 * w2^2) + (r2 * t2 * w3^2) + (r3 * t3 * w1^2)
// + (r3 * t3 * w2^2) + (r3 * t3 * w3^2) + (X1 * r1 * w1^2) + (X1 * r2 * w1 * w2) + (X1 * r3 * w1 * w3)
// + (Y1 * r1 * w1 * w2) + (Y1 * r3 * w2 * w3) + (Z1 * r1 * w1 * w3) + (Z1 * r2 * w2 * w3) + (X1 * r1 * w2^2 * cos(theta))
// + (X1 * r1 * w3^2 * cos(theta)) + (Y1 * r2 * w1^2 * cos(theta)) + (Y1 * r2 * w3^2 * cos(theta)) + (Z1 * r3 * w1^2 * cos(theta)) + (Z1 * r3 * w2^2 * cos(theta))
// + (X1 * r2 * theta * sin(theta) * w3) + (Y1 * r3 * theta * sin(theta) * w1) + (Z1 * r1 * theta * sin(theta) * w2) - (X1 * r3 * theta * sin(theta) * w2) - (Y1 * r1 * theta * sin(theta) * w3)
// - (Z1 * r2 * theta * sin(theta) * w1) - (X1 * r2 * cos(theta) * w1 * w2) - (X1 * r3 * cos(theta) * w1 * w3) - (Y1 * r1 * cos(theta) * w1 * w2) - ()
// - (Y1 * r3 * cos(theta) * w2 * w3) - (Z1 * r1 * cos(theta) * w1 * w3) - (Z1 * r2 * cos(theta) * w2 * w3))
// * (pow(((t1 + r11*X1 + r12*Y1 + r13*Z1)^2 + (t2 + r21*X1 + r22*Y1 + r23*Z1)^2 + (t3 + r31*X1 + r32*Y1 + r33*Z1)^2), 3/2))
// * (2*t1 + 2*r11*X1 + 2*r12*Y1 + 2*r13*Z1)
jacs(0, 3) = r1 * t65 - t14 * t93 * t101 * t208 * (1.0 / 2.0);
jacs(0, 4) = r2 * t65 - t14 * t93 * t101 * t212 * (1.0 / 2.0);
jacs(0, 5) = r3 * t65 - t14 * t93 * t101 * t216 * (1.0 / 2.0);
jacs(1, 0) = t14*t65*(X1*s1*w1*2.0+X1*s2*w2+X1*s3*w3+Y1*s1*w2+Z1*s1*w3+s1*t1*w1*2.0+s2*t2*w1*2.0+s3*t3*w1*2.0+Y1*s3*t5*t12+Y1*s3*t9*t10-Z1*s2*t5*t12-Z1*s2*t9*t10-X1*s2*t12*w2-X1*s3*t12*w3-Y1*s1*t12*w2+Y1*s2*t12*w1*2.0-Z1*s1*t12*w3+Z1*s3*t12*w1*2.0+Y1*s3*t5*t10*t11-Z1*s2*t5*t10*t11+X1*s2*t12*w1*w3-X1*s3*t12*w1*w2-Y1*s1*t12*w1*w3+Z1*s1*t12*w1*w2+X1*s2*t10*t11*w1*w3-X1*s3*t10*t11*w1*w2-Y1*s1*t10*t11*w1*w3+Z1*s1*t10*t11*w1*w2-X1*s1*t6*t10*t11*w1-X1*s1*t7*t10*t11*w1+X1*s2*t5*t10*t11*w2+X1*s3*t5*t10*t11*w3+Y1*s1*t5*t10*t11*w2-Y1*s2*t5*t10*t11*w1-Y1*s2*t7*t10*t11*w1+Z1*s1*t5*t10*t11*w3-Z1*s3*t5*t10*t11*w1-Z1*s3*t6*t10*t11*w1+Y1*s3*t10*t11*w1*w2*w3+Z1*s2*t10*t11*w1*w2*w3)-t14*t101*t167*(t130+t15*(Y1*(t46+t47+t48-t113-t138)+Z1*(t35+t36+t37-t94-t139)-X1*t121)*2.0+t18*(t135+t137-Y1*(-t131+t132+t133))*2.0)*(1.0/2.0)-t26*t65*t167*w1*2.0;
jacs(1, 1) = t14*t65*(X1*s2*w1+Y1*s1*w1+Y1*s2*w2*2.0+Y1*s3*w3+Z1*s2*w3+s1*t1*w2*2.0+s2*t2*w2*2.0+s3*t3*w2*2.0-X1*s3*t6*t12-X1*s3*t9*t10+Z1*s1*t6*t12+Z1*s1*t9*t10+X1*s1*t12*w2*2.0-X1*s2*t12*w1-Y1*s1*t12*w1-Y1*s3*t12*w3-Z1*s2*t12*w3+Z1*s3*t12*w2*2.0-X1*s3*t6*t10*t11+Z1*s1*t6*t10*t11+X1*s2*t12*w2*w3-Y1*s1*t12*w2*w3+Y1*s3*t12*w1*w2-Z1*s2*t12*w1*w2+X1*s2*t10*t11*w2*w3-Y1*s1*t10*t11*w2*w3+Y1*s3*t10*t11*w1*w2-Z1*s2*t10*t11*w1*w2-X1*s1*t6*t10*t11*w2+X1*s2*t6*t10*t11*w1-X1*s1*t7*t10*t11*w2+Y1*s1*t6*t10*t11*w1-Y1*s2*t5*t10*t11*w2-Y1*s2*t7*t10*t11*w2+Y1*s3*t6*t10*t11*w3-Z1*s3*t5*t10*t11*w2+Z1*s2*t6*t10*t11*w3-Z1*s3*t6*t10*t11*w2+X1*s3*t10*t11*w1*w2*w3+Z1*s1*t10*t11*w1*w2*w3)-t26*t65*t167*w2*2.0-t14*t101*t167*(t18*(X1*(t97+t98+t99-t112-t192)+Z1*(-t35+t94+t95+t96-t139)-Y1*t170)*2.0+t15*(t180+t182-X1*(-t176+t177+t178))*2.0+t23*(t175+Y1*(t35-t94+t95+t96-t139)-Z1*t173)*2.0)*(1.0/2.0);
jacs(1, 2) = t14*t65*(X1*s3*w1+Y1*s3*w2+Z1*s1*w1+Z1*s2*w2+Z1*s3*w3*2.0+s1*t1*w3*2.0+s2*t2*w3*2.0+s3*t3*w3*2.0+X1*s2*t7*t12+X1*s2*t9*t10-Y1*s1*t7*t12-Y1*s1*t9*t10+X1*s1*t12*w3*2.0-X1*s3*t12*w1+Y1*s2*t12*w3*2.0-Y1*s3*t12*w2-Z1*s1*t12*w1-Z1*s2*t12*w2+X1*s2*t7*t10*t11-Y1*s1*t7*t10*t11-X1*s3*t12*w2*w3+Y1*s3*t12*w1*w3+Z1*s1*t12*w2*w3-Z1*s2*t12*w1*w3-X1*s3*t10*t11*w2*w3+Y1*s3*t10*t11*w1*w3+Z1*s1*t10*t11*w2*w3-Z1*s2*t10*t11*w1*w3-X1*s1*t6*t10*t11*w3-X1*s1*t7*t10*t11*w3+X1*s3*t7*t10*t11*w1-Y1*s2*t5*t10*t11*w3-Y1*s2*t7*t10*t11*w3+Y1*s3*t7*t10*t11*w2+Z1*s1*t7*t10*t11*w1+Z1*s2*t7*t10*t11*w2-Z1*s3*t5*t10*t11*w3-Z1*s3*t6*t10*t11*w3+X1*s2*t10*t11*w1*w2*w3+Y1*s1*t10*t11*w1*w2*w3)-t26*t65*t167*w3*2.0-t14*t101*t167*(t18*(Z1*(t46-t113+t114+t115-t138)-Y1*t198+X1*(t49+t51+t52+t118-t199))*2.0+t23*(X1*(-t97+t112+t116+t117-t192)+Y1*(-t46+t113+t114+t115-t138)-Z1*t195)*2.0+t15*(t204+Z1*(t97-t112+t116+t117-t192)-X1*(-t200+t201+t202))*2.0)*(1.0/2.0);
jacs(1, 0) = t14 * t65 * (X1 * s1 * w1 * 2.0 + X1 * s2 * w2 + X1 * s3 * w3 + Y1 * s1 * w2 + Z1 * s1 * w3 +
s1 * t1 * w1 * 2.0 + s2 * t2 * w1 * 2.0 + s3 * t3 * w1 * 2.0 + Y1 * s3 * t5 * t12 +
Y1 * s3 * t9 * t10 - Z1 * s2 * t5 * t12 - Z1 * s2 * t9 * t10 - X1 * s2 * t12 * w2 -
X1 * s3 * t12 * w3 - Y1 * s1 * t12 * w2 + Y1 * s2 * t12 * w1 * 2.0 -
Z1 * s1 * t12 * w3 + Z1 * s3 * t12 * w1 * 2.0 + Y1 * s3 * t5 * t10 * t11 -
Z1 * s2 * t5 * t10 * t11 + X1 * s2 * t12 * w1 * w3 - X1 * s3 * t12 * w1 * w2 -
Y1 * s1 * t12 * w1 * w3 + Z1 * s1 * t12 * w1 * w2 + X1 * s2 * t10 * t11 * w1 * w3 -
X1 * s3 * t10 * t11 * w1 * w2 - Y1 * s1 * t10 * t11 * w1 * w3 +
Z1 * s1 * t10 * t11 * w1 * w2 - X1 * s1 * t6 * t10 * t11 * w1 -
X1 * s1 * t7 * t10 * t11 * w1 + X1 * s2 * t5 * t10 * t11 * w2 +
X1 * s3 * t5 * t10 * t11 * w3 + Y1 * s1 * t5 * t10 * t11 * w2 -
Y1 * s2 * t5 * t10 * t11 * w1 - Y1 * s2 * t7 * t10 * t11 * w1 +
Z1 * s1 * t5 * t10 * t11 * w3 - Z1 * s3 * t5 * t10 * t11 * w1 -
Z1 * s3 * t6 * t10 * t11 * w1 + Y1 * s3 * t10 * t11 * w1 * w2 * w3 +
Z1 * s2 * t10 * t11 * w1 * w2 * w3) - t14 * t101 * t167 * (t130 + t15 * (Y1 *
(t46 + t47 +
t48 - t113 -
t138) + Z1 *
(t35 +
t36 +
t37 -
t94 -
t139) -
X1 * t121) *
2.0 + t18 *
(t135 + t137 -
Y1 * (-t131 +
t132 +
t133)) *
2.0) *
(1.0 / 2.0) - t26 * t65 * t167 * w1 * 2.0;
jacs(1, 1) = t14 * t65 * (X1 * s2 * w1 + Y1 * s1 * w1 + Y1 * s2 * w2 * 2.0 + Y1 * s3 * w3 + Z1 * s2 * w3 +
s1 * t1 * w2 * 2.0 + s2 * t2 * w2 * 2.0 + s3 * t3 * w2 * 2.0 - X1 * s3 * t6 * t12 -
X1 * s3 * t9 * t10 + Z1 * s1 * t6 * t12 + Z1 * s1 * t9 * t10 +
X1 * s1 * t12 * w2 * 2.0 - X1 * s2 * t12 * w1 - Y1 * s1 * t12 * w1 -
Y1 * s3 * t12 * w3 - Z1 * s2 * t12 * w3 + Z1 * s3 * t12 * w2 * 2.0 -
X1 * s3 * t6 * t10 * t11 + Z1 * s1 * t6 * t10 * t11 + X1 * s2 * t12 * w2 * w3 -
Y1 * s1 * t12 * w2 * w3 + Y1 * s3 * t12 * w1 * w2 - Z1 * s2 * t12 * w1 * w2 +
X1 * s2 * t10 * t11 * w2 * w3 - Y1 * s1 * t10 * t11 * w2 * w3 +
Y1 * s3 * t10 * t11 * w1 * w2 - Z1 * s2 * t10 * t11 * w1 * w2 -
X1 * s1 * t6 * t10 * t11 * w2 + X1 * s2 * t6 * t10 * t11 * w1 -
X1 * s1 * t7 * t10 * t11 * w2 + Y1 * s1 * t6 * t10 * t11 * w1 -
Y1 * s2 * t5 * t10 * t11 * w2 - Y1 * s2 * t7 * t10 * t11 * w2 +
Y1 * s3 * t6 * t10 * t11 * w3 - Z1 * s3 * t5 * t10 * t11 * w2 +
Z1 * s2 * t6 * t10 * t11 * w3 - Z1 * s3 * t6 * t10 * t11 * w2 +
X1 * s3 * t10 * t11 * w1 * w2 * w3 + Z1 * s1 * t10 * t11 * w1 * w2 * w3) -
t26 * t65 * t167 * w2 * 2.0 - t14 * t101 * t167 * (t18 * (X1 * (t97 + t98 + t99 - t112 - t192) +
Z1 * (-t35 + t94 + t95 + t96 - t139) -
Y1 * t170) * 2.0 + t15 * (t180 + t182 -
X1 *
(-t176 + t177 +
t178)) * 2.0 +
t23 *
(t175 + Y1 * (t35 - t94 + t95 + t96 - t139) -
Z1 * t173) * 2.0) * (1.0 / 2.0);
jacs(1, 2) = t14 * t65 * (X1 * s3 * w1 + Y1 * s3 * w2 + Z1 * s1 * w1 + Z1 * s2 * w2 + Z1 * s3 * w3 * 2.0 +
s1 * t1 * w3 * 2.0 + s2 * t2 * w3 * 2.0 + s3 * t3 * w3 * 2.0 + X1 * s2 * t7 * t12 +
X1 * s2 * t9 * t10 - Y1 * s1 * t7 * t12 - Y1 * s1 * t9 * t10 +
X1 * s1 * t12 * w3 * 2.0 - X1 * s3 * t12 * w1 + Y1 * s2 * t12 * w3 * 2.0 -
Y1 * s3 * t12 * w2 - Z1 * s1 * t12 * w1 - Z1 * s2 * t12 * w2 +
X1 * s2 * t7 * t10 * t11 - Y1 * s1 * t7 * t10 * t11 - X1 * s3 * t12 * w2 * w3 +
Y1 * s3 * t12 * w1 * w3 + Z1 * s1 * t12 * w2 * w3 - Z1 * s2 * t12 * w1 * w3 -
X1 * s3 * t10 * t11 * w2 * w3 + Y1 * s3 * t10 * t11 * w1 * w3 +
Z1 * s1 * t10 * t11 * w2 * w3 - Z1 * s2 * t10 * t11 * w1 * w3 -
X1 * s1 * t6 * t10 * t11 * w3 - X1 * s1 * t7 * t10 * t11 * w3 +
X1 * s3 * t7 * t10 * t11 * w1 - Y1 * s2 * t5 * t10 * t11 * w3 -
Y1 * s2 * t7 * t10 * t11 * w3 + Y1 * s3 * t7 * t10 * t11 * w2 +
Z1 * s1 * t7 * t10 * t11 * w1 + Z1 * s2 * t7 * t10 * t11 * w2 -
Z1 * s3 * t5 * t10 * t11 * w3 - Z1 * s3 * t6 * t10 * t11 * w3 +
X1 * s2 * t10 * t11 * w1 * w2 * w3 + Y1 * s1 * t10 * t11 * w1 * w2 * w3) -
t26 * t65 * t167 * w3 * 2.0 - t14 * t101 * t167 * (t18 * (Z1 * (t46 - t113 + t114 + t115 - t138) -
Y1 * t198 +
X1 * (t49 + t51 + t52 + t118 - t199)) *
2.0 + t23 *
(X1 * (-t97 + t112 + t116 + t117 - t192) +
Y1 * (-t46 + t113 + t114 + t115 - t138) -
Z1 * t195) * 2.0 + t15 * (t204 + Z1 *
(t97 -
t112 +
t116 +
t117 -
t192) -
X1 *
(-t200 + t201 +
t202)) *
2.0) * (1.0 / 2.0);
jacs(1, 3) = s1 * t65 - t14 * t101 * t167 * t208 * (1.0 / 2.0);
jacs(1, 4) = s2 * t65 - t14 * t101 * t167 * t212 * (1.0 / 2.0);
jacs(1, 5) = s3 * t65 - t14 * t101 * t167 * t216 * (1.0 / 2.0);
}
}//End namespace ORB_SLAM2
}//End namespace ORB_SLAM3

View File

@ -27,7 +27,9 @@ namespace ORB_SLAM3
long unsigned int MapPoint::nNextId=0;
mutex MapPoint::mGlobalMutex;
MapPoint::MapPoint():
/**
* @brief
*/
mnFirstKFid(0), mnFirstFrame(0), nObs(0), mnTrackReferenceForFrame(0),
mnLastFrameSeen(0), mnBALocalForKF(0), mnFuseCandidateForKF(0), mnLoopPointForKF(0), mnCorrectedByKF(0),
mnCorrectedReference(0), mnBAGlobalForKF(0), mnVisible(1), mnFound(1), mbBad(false),
@ -36,6 +38,9 @@ MapPoint::MapPoint():
mpReplaced = static_cast<MapPoint*>(NULL);
}
/**
* @brief
*/
MapPoint::MapPoint(const cv::Mat &Pos, KeyFrame *pRefKF, Map* pMap):
mnFirstKFid(pRefKF->mnId), mnFirstFrame(pRefKF->mnFrameId), nObs(0), mnTrackReferenceForFrame(0),
mnLastFrameSeen(0), mnBALocalForKF(0), mnFuseCandidateForKF(0), mnLoopPointForKF(0), mnCorrectedByKF(0),
@ -56,6 +61,9 @@ MapPoint::MapPoint(const cv::Mat &Pos, KeyFrame *pRefKF, Map* pMap):
mnId=nNextId++;
}
/**
* @brief
*/
MapPoint::MapPoint(const double invDepth, cv::Point2f uv_init, KeyFrame* pRefKF, KeyFrame* pHostKF, Map* pMap):
mnFirstKFid(pRefKF->mnId), mnFirstFrame(pRefKF->mnFrameId), nObs(0), mnTrackReferenceForFrame(0),
mnLastFrameSeen(0), mnBALocalForKF(0), mnFuseCandidateForKF(0), mnLoopPointForKF(0), mnCorrectedByKF(0),
@ -77,6 +85,9 @@ MapPoint::MapPoint(const double invDepth, cv::Point2f uv_init, KeyFrame* pRefKF,
mnId=nNextId++;
}
/**
* @brief
*/
MapPoint::MapPoint(const cv::Mat &Pos, Map* pMap, Frame* pFrame, const int &idxF):
mnFirstKFid(-1), mnFirstFrame(pFrame->mnId), nObs(0), mnTrackReferenceForFrame(0), mnLastFrameSeen(0),
mnBALocalForKF(0), mnFuseCandidateForKF(0),mnLoopPointForKF(0), mnCorrectedByKF(0),
@ -120,6 +131,10 @@ MapPoint::MapPoint(const cv::Mat &Pos, Map* pMap, Frame* pFrame, const int &idxF
mnId=nNextId++;
}
/**
* @brief mp
* @param Pos
*/
void MapPoint::SetWorldPos(const cv::Mat &Pos)
{
unique_lock<mutex> lock2(mGlobalMutex);
@ -128,12 +143,18 @@ void MapPoint::SetWorldPos(const cv::Mat &Pos)
mWorldPosx = cv::Matx31f(Pos.at<float>(0), Pos.at<float>(1), Pos.at<float>(2));
}
/**
* @brief mp
*/
cv::Mat MapPoint::GetWorldPos()
{
unique_lock<mutex> lock(mMutexPos);
return mWorldPos.clone();
}
/**
* @brief
*/
cv::Mat MapPoint::GetNormal()
{
unique_lock<mutex> lock(mMutexPos);
@ -242,7 +263,10 @@ std::map<KeyFrame*, std::tuple<int,int>> MapPoint::GetObservations()
return mObservations;
}
// 被观测到的相机数目,单目+1双目或RGB-D则+2
/**
* @brief
* @return nObs
*/
int MapPoint::Observations()
{
unique_lock<mutex> lock(mMutexFeatures);
@ -280,6 +304,10 @@ void MapPoint::SetBadFlag()
mpMap->EraseMapPoint(this);
}
/**
* @brief
* @return
*/
MapPoint* MapPoint::GetReplaced()
{
unique_lock<mutex> lock1(mMutexFeatures);
@ -329,9 +357,10 @@ void MapPoint::Replace(MapPoint* pMP)
tuple<int,int> indexes = mit -> second;
int leftIndex = get<0>(indexes), rightIndex = get<1>(indexes);
// 2.1 判断新点是否已经在pKF里面
if(!pMP->IsInKeyFrame(pKF))
{
// 如果不在替换特征点与mp的匹配关系
if(leftIndex != -1){
pKF->ReplaceMapPointMatch(leftIndex, pMP);
pMP->AddObservation(pKF,leftIndex);
@ -341,6 +370,8 @@ void MapPoint::Replace(MapPoint* pMP)
pMP->AddObservation(pKF,rightIndex);
}
}
// 如果新的MP在之前MP对应的关键帧里面就撞车了。
// 本来目的想新旧MP融为一个这样以来一个点有可能对应两个特征点这样是决不允许的所以删除旧的不动新的
else
{
if(leftIndex != -1){
@ -361,6 +392,9 @@ void MapPoint::Replace(MapPoint* pMP)
mpMap->EraseMapPoint(this);
}
/**
* @brief bad
*/
bool MapPoint::isBad()
{
unique_lock<mutex> lock1(mMutexFeatures,std::defer_lock);
@ -370,19 +404,37 @@ bool MapPoint::isBad()
return mbBad;
}
/**
* @brief Increase Visible
*
* Visible
* 1. MapPointFrame::isInFrustum()
* 2. MapPoint
* MapPointMF
* MF
*/
void MapPoint::IncreaseVisible(int n)
{
unique_lock<mutex> lock(mMutexFeatures);
mnVisible+=n;
}
/**
* @brief Increase Found
*
* +nn1
* @see Tracking::TrackLocalMap()
*/
void MapPoint::IncreaseFound(int n)
{
unique_lock<mutex> lock(mMutexFeatures);
mnFound+=n;
}
// 计算被找到的比例
/**
* @brief /
* @return /
*/
float MapPoint::GetFoundRatio()
{
unique_lock<mutex> lock(mMutexFeatures);
@ -478,18 +530,25 @@ void MapPoint::ComputeDistinctiveDescriptors()
{
unique_lock<mutex> lock(mMutexFeatures);
// 最好的描述子,该描述子相对于其他描述子有最小的距离中值
// 简化来讲,中值代表了这个描述子到其它描述子的平均距离
// 最好的描述子就是和其它描述子的平均距离最小
mDescriptor = vDescriptors[BestIdx].clone();
}
}
//获取当前地图点的描述子
/**
* @brief
*/
cv::Mat MapPoint::GetDescriptor()
{
unique_lock<mutex> lock(mMutexFeatures);
return mDescriptor.clone();
}
//获取当前地图点在某个关键帧的观测中对应的特征点的ID
/**
* @brief id
*/
tuple<int,int> MapPoint::GetIndexInKeyFrame(KeyFrame *pKF)
{
unique_lock<mutex> lock(mMutexFeatures);
@ -499,6 +558,9 @@ tuple<int,int> MapPoint::GetIndexInKeyFrame(KeyFrame *pKF)
return tuple<int,int>(-1,-1);
}
/**
* @brief return (mObservations.count(pKF));
*/
bool MapPoint::IsInKeyFrame(KeyFrame *pKF)
{
unique_lock<mutex> lock(mMutexFeatures);
@ -589,6 +651,10 @@ void MapPoint::UpdateNormalAndDepth()
}
}
/**
* @brief
* @param normal
*/
void MapPoint::SetNormalVector(cv::Mat& normal)
{
unique_lock<mutex> lock3(mMutexPos);
@ -596,12 +662,18 @@ void MapPoint::SetNormalVector(cv::Mat& normal)
mNormalVectorx = cv::Matx31f(mNormalVector.at<float>(0), mNormalVector.at<float>(1), mNormalVector.at<float>(2));
}
/**
* @brief
*/
float MapPoint::GetMinDistanceInvariance()
{
unique_lock<mutex> lock(mMutexPos);
return 0.8f*mfMinDistance;
}
/**
* @brief
*/
float MapPoint::GetMaxDistanceInvariance()
{
unique_lock<mutex> lock(mMutexPos);

View File

@ -1640,7 +1640,7 @@ static void computeDescriptors(const Mat& image, vector<KeyPoint>& keypoints, Ma
// 特征点本身直接乘缩放倍数就可以了
keypoint->pt *= scale;
}
// ?TODO vLappingArea
if(keypoint->pt.x >= vLappingArea[0] && keypoint->pt.x <= vLappingArea[1]){
_keypoints.at(stereoIndex) = (*keypoint);
desc.row(i).copyTo(descriptors.row(stereoIndex));

View File

@ -45,7 +45,9 @@ namespace ORB_SLAM3 {
return os.good();
}
/**
* @brief 姿 _jacobianOplusXi
*/
void EdgeSE3ProjectXYZOnlyPose::linearizeOplus() {
g2o::VertexSE3Expmap * vi = static_cast<g2o::VertexSE3Expmap *>(_vertices[0]);
Eigen::Vector3d xyz_trans = vi->estimate().map(Xw);
@ -88,7 +90,11 @@ namespace ORB_SLAM3 {
return os.good();
}
/**
* @brief 姿 _jacobianOplusXi
*/
void EdgeSE3ProjectXYZOnlyPoseToBody::linearizeOplus() {
// 获得三维点在右相机坐标系下的坐标
g2o::VertexSE3Expmap * vi = static_cast<g2o::VertexSE3Expmap *>(_vertices[0]);
g2o::SE3Quat T_lw(vi->estimate());
Eigen::Vector3d X_l = T_lw.map(Xw);
@ -103,6 +109,12 @@ namespace ORB_SLAM3 {
-z_w , 0.f, x_w, 0.f, 1.f, 0.f,
y_w , -x_w , 0.f, 0.f, 0.f, 1.f;
/*
ρlw != tlw 使Pl = Rlw*Pw + tlw
Pl = EXP(ξlw)*Pw Pr = Rrl * EXP(ξlw) * Pw + trl
Pr ξlw
Rrl*(Pl ξlw)
*/
_jacobianOplusXi = -pCamera->projectJac(X_r) * mTrl.rotation().toRotationMatrix() * SE3deriv;
}
@ -135,7 +147,9 @@ namespace ORB_SLAM3 {
return os.good();
}
/**
* @brief 姿 _jacobianOplusXj _jacobianOplusXi
*/
void EdgeSE3ProjectXYZ::linearizeOplus() {
g2o::VertexSE3Expmap * vj = static_cast<g2o::VertexSE3Expmap *>(_vertices[1]);
g2o::SE3Quat T(vj->estimate());
@ -148,7 +162,7 @@ namespace ORB_SLAM3 {
double z = xyz_trans[2];
auto projectJac = -pCamera->projectJac(xyz_trans);
// Pc = Rcw*Pw + tcw 先求Pw改变对Pc的影响所以直接为Rcw前面再乘Pc对像素的影响
_jacobianOplusXi = projectJac * T.rotation().toRotationMatrix();
Eigen::Matrix<double,3,6> SE3deriv;
@ -188,7 +202,9 @@ namespace ORB_SLAM3 {
return os.good();
}
/**
* @brief 姿 _jacobianOplusXj _jacobianOplusXi
*/
void EdgeSE3ProjectXYZToBody::linearizeOplus() {
g2o::VertexSE3Expmap * vj = static_cast<g2o::VertexSE3Expmap *>(_vertices[1]);
g2o::SE3Quat T_lw(vj->estimate());
@ -208,7 +224,12 @@ namespace ORB_SLAM3 {
SE3deriv << 0.f, z, -y, 1.f, 0.f, 0.f,
-z , 0.f, x, 0.f, 1.f, 0.f,
y , -x , 0.f, 0.f, 0.f, 1.f;
/*
ρlw != tlw 使Pl = Rlw*Pw + tlw
Pl = EXP(ξlw)*Pw Pr = Rrl * EXP(ξlw) * Pw + trl
Pr ξlw
Rrl*(Pl ξlw)
*/
_jacobianOplusXj = -pCamera->projectJac(X_r) * mTrl.rotation().toRotationMatrix() * SE3deriv;
}

File diff suppressed because it is too large Load Diff

View File

@ -114,7 +114,7 @@ System::System(const string &strVocFile, //词袋文件所在路
mpAtlas = new Atlas(0);
if (mSensor==IMU_STEREO || mSensor==IMU_MONOCULAR)
// ? 如果是有imu的传感器类型将mbIsInertial设置为imu属性,以后的跟踪和预积分将和这个标志有关
// 如果是有imu的传感器类型设置mbIsInertial = true;以后的跟踪和预积分将和这个标志有关
mpAtlas->SetInertialSensor();
// Step 6 依次创建跟踪、局部建图、闭环、显示线程
@ -175,6 +175,7 @@ System::System(const string &strVocFile, //词袋文件所在路
mpLoopCloser->SetLocalMapper(mpLocalMapper);
// Fix verbosity
// 打印输出中间的信息,设置为安静模式
Verbose::SetTh(Verbose::VERBOSITY_QUIET);
}
@ -354,7 +355,7 @@ cv::Mat System::TrackMonocular(const cv::Mat &im, const double &timestamp, const
mbReset = false;
mbResetActiveMap = false;
}
//如果检测到重置活动地图,讲重置地图设置
//如果检测到重置活动地图的标志为true,将重置地图
else if(mbResetActiveMap)
{
cout << "SYSTEM-> Reseting active map in monocular case" << endl;
@ -362,14 +363,14 @@ cv::Mat System::TrackMonocular(const cv::Mat &im, const double &timestamp, const
mbResetActiveMap = false;
}
}
// 如果是单目VIO模式把IMU数据存储到mlQueueImuData
// 如果是单目VIO模式把IMU数据存储到队列mlQueueImuData
if (mSensor == System::IMU_MONOCULAR)
for(size_t i_imu = 0; i_imu < vImuMeas.size(); i_imu++)
mpTracker->GrabImuData(vImuMeas[i_imu]);
// 计算相机位姿
cv::Mat Tcw = mpTracker->GrabImageMonocular(im,timestamp,filename);
// 更新跟踪状态和参数
unique_lock<mutex> lock2(mMutexState);
mTrackingState = mpTracker->mState;
mTrackedMapPoints = mpTracker->mCurrentFrame.mvpMapPoints;

File diff suppressed because it is too large Load Diff

View File

@ -22,7 +22,6 @@
#include <thread>
using namespace std;
namespace ORB_SLAM3
{
@ -36,9 +35,20 @@ TwoViewReconstruction::TwoViewReconstruction(cv::Mat& K, float sigma, int iterat
mMaxIterations = iterations;
}
/**
* @brief rt3d
* @param vKeys1
* @param vKeys2
* @param vMatches12 vKeys1vKeys2
* @param R21
* @param t21
* @param vP3D
* @param vbTriangulated
*/
bool TwoViewReconstruction::Reconstruct(const std::vector<cv::KeyPoint> &vKeys1, const std::vector<cv::KeyPoint> &vKeys2, const vector<int> &vMatches12,
cv::Mat &R21, cv::Mat &t21, vector<cv::Point3f> &vP3D, vector<bool> &vbTriangulated)
{
// 1. 准备工作提取匹配关系及准备RANSAC
mvKeys1.clear();
mvKeys2.clear();
@ -47,7 +57,7 @@ bool TwoViewReconstruction::Reconstruct(const std::vector<cv::KeyPoint>& vKeys1,
// Fill structures with current keypoints and matches with reference frame
// Reference Frame: 1, Current Frame: 2
mvMatches12.clear();
mvMatches12.clear(); // 存放匹配点的id
mvMatches12.reserve(mvKeys2.size());
mvbMatched1.resize(mvKeys1.size());
for (size_t i = 0, iend = vMatches12.size(); i < iend; i++)
@ -68,16 +78,18 @@ bool TwoViewReconstruction::Reconstruct(const std::vector<cv::KeyPoint>& vKeys1,
vAllIndices.reserve(N);
vector<size_t> vAvailableIndices;
// 使用vAllIndices为了保证选不到同一个点
for (int i = 0; i < N; i++)
{
vAllIndices.push_back(i);
}
// Generate sets of 8 points for each RANSAC iteration
// 默认200次
mvSets = vector<vector<size_t>>(mMaxIterations, vector<size_t>(8, 0));
DUtils::Random::SeedRandOnce(0);
// 2. 先遍历把200次先取好
for (int it = 0; it < mMaxIterations; it++)
{
vAvailableIndices = vAllIndices;
@ -86,10 +98,11 @@ bool TwoViewReconstruction::Reconstruct(const std::vector<cv::KeyPoint>& vKeys1,
for (size_t j = 0; j < 8; j++)
{
int randi = DUtils::Random::RandomInt(0, vAvailableIndices.size() - 1);
int idx = vAvailableIndices[randi];
int idx = vAvailableIndices[randi]; // 这句不多余,防止重复选择
mvSets[it][j] = idx;
// 保证选不到同一个点
vAvailableIndices[randi] = vAvailableIndices.back();
vAvailableIndices.pop_back();
}
@ -100,6 +113,8 @@ bool TwoViewReconstruction::Reconstruct(const std::vector<cv::KeyPoint>& vKeys1,
float SH, SF;
cv::Mat H, F;
// 3. 双线程分别计算
// 加ref为了提供引用
thread threadH(&TwoViewReconstruction::FindHomography, this, ref(vbMatchesInliersH), ref(SH), ref(H));
thread threadF(&TwoViewReconstruction::FindFundamental, this, ref(vbMatchesInliersF), ref(SF), ref(F));
@ -108,12 +123,15 @@ bool TwoViewReconstruction::Reconstruct(const std::vector<cv::KeyPoint>& vKeys1,
threadF.join();
// Compute ratio of scores
if(SH+SF == 0.f) return false;
if (SH + SF == 0.f)
return false;
float RH = SH / (SH + SF);
float minParallax = 1.0;
// Try to reconstruct from homography or fundamental depending on the ratio (0.40-0.45)
// 4. 看哪个分高用哪个分别是通过H重建与通过F重建
// ORBSLAM2这里的值是0.4 TOSEE
if (RH > 0.50) // if(RH>0.40)
{
//cout << "Initialization from Homography" << endl;
@ -126,14 +144,22 @@ bool TwoViewReconstruction::Reconstruct(const std::vector<cv::KeyPoint>& vKeys1,
}
}
/**
* @brief H
* @param vbMatchesInliers HmvMatches12
* @param score
* @param H21 12H
*/
void TwoViewReconstruction::FindHomography(vector<bool> &vbMatchesInliers, float &score, cv::Mat &H21)
{
// Number of putative matches
// 匹配成功的个数
const int N = mvMatches12.size();
// Normalize coordinates
vector<cv::Point2f> vPn1, vPn2;
cv::Mat T1, T2;
// 像素坐标标准化,先去均值,再除均长
Normalize(mvKeys1, vPn1, T1);
Normalize(mvKeys2, vPn2, T2);
cv::Mat T2inv = T2.inv();
@ -150,6 +176,7 @@ void TwoViewReconstruction::FindHomography(vector<bool> &vbMatchesInliers, float
float currentScore;
// Perform all RANSAC iterations and save the solution with highest score
// 计算归一化后的H矩阵 p2 = H21*p1
for (int it = 0; it < mMaxIterations; it++)
{
// Select a minimum set
@ -160,12 +187,13 @@ void TwoViewReconstruction::FindHomography(vector<bool> &vbMatchesInliers, float
vPn1i[j] = vPn1[mvMatches12[idx].first];
vPn2i[j] = vPn2[mvMatches12[idx].second];
}
// 计算标准化后的H矩阵
cv::Mat Hn = ComputeH21(vPn1i, vPn2i);
// 恢复正常H
H21i = T2inv * Hn * T1;
H12i = H21i.inv();
currentScore = CheckHomography(H21i, H12i, vbCurrentInliers, mSigma);
currentScore = CheckHomography(H21i, H12i, vbCurrentInliers, mSigma); // mSigma默认为1
if (currentScore > score)
{
@ -176,7 +204,12 @@ void TwoViewReconstruction::FindHomography(vector<bool> &vbMatchesInliers, float
}
}
/**
* @brief F
* @param vbMatchesInliers FmvMatches12
* @param score
* @param F21 12F
*/
void TwoViewReconstruction::FindFundamental(vector<bool> &vbMatchesInliers, float &score, cv::Mat &F21)
{
// Number of putative matches
@ -214,6 +247,7 @@ void TwoViewReconstruction::FindFundamental(vector<bool> &vbMatchesInliers, floa
cv::Mat Fn = ComputeF21(vPn1i, vPn2i);
// FN得到的是基于归一化坐标的F矩阵必须乘上归一化过程矩阵才是最后的基于像素坐标的F
F21i = T2t * Fn * T1;
currentScore = CheckFundamental(F21i, vbCurrentInliers, mSigma);
@ -227,7 +261,23 @@ void TwoViewReconstruction::FindFundamental(vector<bool> &vbMatchesInliers, floa
}
}
/**
* @brief homographynormalized DLT
* |x'| | h1 h2 h3 ||x|
* |y'| = a | h4 h5 h6 ||y| : x' = a H x, a
* |1 | | h7 h8 h9 ||1|
* 使DLT(direct linear tranform)
* x' = a H x
* ---> (x') (H x) = 0
* ---> Ah = 0
* A = | 0 0 0 -x -y -1 xy' yy' y'| h = | h1 h2 h3 h4 h5 h6 h7 h8 h9 |
* |-x -y -1 0 0 0 xx' yx' x'|
* SVDAh = 0A'A
* @param vP1 , in reference frame
* @param vP2 , in current frame
* @return
* @see Multiple View Geometry in Computer Vision - Algorithm 4.2 p109
*/
cv::Mat TwoViewReconstruction::ComputeH21(const vector<cv::Point2f> &vP1, const vector<cv::Point2f> &vP2)
{
const int N = vP1.size();
@ -260,7 +310,6 @@ cv::Mat TwoViewReconstruction::ComputeH21(const vector<cv::Point2f> &vP1, const
A.at<float>(2 * i + 1, 6) = -u2 * u1;
A.at<float>(2 * i + 1, 7) = -u2 * v1;
A.at<float>(2 * i + 1, 8) = -u2;
}
cv::Mat u, w, vt;
@ -270,6 +319,16 @@ cv::Mat TwoViewReconstruction::ComputeH21(const vector<cv::Point2f> &vP1, const
return vt.row(8).reshape(0, 3);
}
/**
* @brief fundamental matrixnormalized 8
* x'Fx = 0 Af = 0
* A = | x'x x'y x' y'x y'y y' x y 1 |, f = | f1 f2 f3 f4 f5 f6 f7 f8 f9 |
* SVDAf = 0A'A
* @param vP1 , in reference frame
* @param vP2 , in current frame
* @return
* @see Multiple View Geometry in Computer Vision - Algorithm 11.1 p282 ( p191)
*/
cv::Mat TwoViewReconstruction::ComputeF21(const vector<cv::Point2f> &vP1, const vector<cv::Point2f> &vP2)
{
const int N = vP1.size();
@ -300,6 +359,7 @@ cv::Mat TwoViewReconstruction::ComputeF21(const vector<cv::Point2f> &vP1,const v
cv::Mat Fpre = vt.row(8).reshape(0, 3);
// 这里注意计算完要强制让第三个奇异值为0
cv::SVDecomp(Fpre, w, u, vt, cv::SVD::MODIFY_A | cv::SVD::FULL_UV);
w.at<float>(2) = 0;
@ -307,6 +367,13 @@ cv::Mat TwoViewReconstruction::ComputeF21(const vector<cv::Point2f> &vP1,const v
return u * cv::Mat::diag(w) * vt;
}
/**
* @brief
* @param H21
* @param H12
* @param vbMatchesInliers mvMatches12
* @param sigma 1
*/
float TwoViewReconstruction::CheckHomography(const cv::Mat &H21, const cv::Mat &H12, vector<bool> &vbMatchesInliers, float sigma)
{
const int N = mvMatches12.size();
@ -334,7 +401,7 @@ float TwoViewReconstruction::CheckHomography(const cv::Mat &H21, const cv::Mat &
vbMatchesInliers.resize(N);
float score = 0;
// 基于卡方检验计算出的阈值 自由度为2的卡方分布显著性水平为0.05,对应的临界阈值
const float th = 5.991;
const float invSigmaSquare = 1.0 / (sigma * sigma);
@ -353,7 +420,7 @@ float TwoViewReconstruction::CheckHomography(const cv::Mat &H21, const cv::Mat &
// Reprojection error in first image
// x2in1 = H12*x2
// 计算投影误差2投1 1投2这么做计算累计的卡方检验分数分数越高证明内点与误差越优这么做为了平衡误差与内点个数不是说内点个数越高越好也不是说误差越小越好
const float w2in1inv = 1.0 / (h31inv * u2 + h32inv * v2 + h33inv);
const float u2in1 = (h11inv * u2 + h12inv * v2 + h13inv) * w2in1inv;
const float v2in1 = (h21inv * u2 + h22inv * v2 + h23inv) * w2in1inv;
@ -392,6 +459,12 @@ float TwoViewReconstruction::CheckHomography(const cv::Mat &H21, const cv::Mat &
return score;
}
/**
* @brief
* @param F21
* @param vbMatchesInliers mvMatches12
* @param sigma 1
*/
float TwoViewReconstruction::CheckFundamental(const cv::Mat &F21, vector<bool> &vbMatchesInliers, float sigma)
{
const int N = mvMatches12.size();
@ -410,7 +483,9 @@ float TwoViewReconstruction::CheckFundamental(const cv::Mat &F21, vector<bool> &
float score = 0;
// 基于卡方检验计算出的阈值 自由度为1的卡方分布显著性水平为0.05,对应的临界阈值
const float th = 3.841;
// 基于卡方检验计算出的阈值 自由度为2的卡方分布显著性水平为0.05,对应的临界阈值
const float thScore = 5.991;
const float invSigmaSquare = 1.0 / (sigma * sigma);
@ -429,17 +504,18 @@ float TwoViewReconstruction::CheckFundamental(const cv::Mat &F21, vector<bool> &
// Reprojection error in second image
// l2=F21x1=(a2,b2,c2)
// 计算 img1 上的点在 img2 上投影得到的极线 l2 = F21 * p1 = (a2,b2,c2)
const float a2 = f11 * u1 + f12 * v1 + f13;
const float b2 = f21 * u1 + f22 * v1 + f23;
const float c2 = f31 * u1 + f32 * v1 + f33;
// 计算误差 e = (a * p2.x + b * p2.y + c) / sqrt(a * a + b * b)
const float num2 = a2 * u2 + b2 * v2 + c2;
const float squareDist1 = num2 * num2 / (a2 * a2 + b2 * b2);
const float chiSquare1 = squareDist1 * invSigmaSquare;
// 自由度为1是因为这里的计算是点到线的距离判定分数自由度为2的原因可能是为了与H矩阵持平
if (chiSquare1 > th)
bIn = false;
else
@ -447,7 +523,7 @@ float TwoViewReconstruction::CheckFundamental(const cv::Mat &F21, vector<bool> &
// Reprojection error in second image
// l1 =x2tF21=(a1,b1,c1)
// 与上面相同只不过反过来了
const float a1 = f11 * u2 + f21 * v2 + f31;
const float b1 = f12 * u2 + f22 * v2 + f32;
const float c1 = f13 * u2 + f23 * v2 + f33;
@ -472,26 +548,43 @@ float TwoViewReconstruction::CheckFundamental(const cv::Mat &F21, vector<bool> &
return score;
}
/**
* @brief
* @param vbMatchesInliers mvMatches12
* @param F21
* @param K
* @param R21
* @param t21
* @param vP3D
* @param vbTriangulated mvKeys1
* @param minParallax 1
* @param minTriangulated 50
*/
bool TwoViewReconstruction::ReconstructF(vector<bool> &vbMatchesInliers, cv::Mat &F21, cv::Mat &K,
cv::Mat &R21, cv::Mat &t21, vector<cv::Point3f> &vP3D, vector<bool> &vbTriangulated, float minParallax, int minTriangulated)
cv::Mat &R21, cv::Mat &t21, vector<cv::Point3f> &vP3D,
vector<bool> &vbTriangulated, float minParallax, int minTriangulated)
{
// 统计了合法的匹配,后面用于对比重建出的点数
int N = 0;
for (size_t i = 0, iend = vbMatchesInliers.size(); i < iend; i++)
if (vbMatchesInliers[i])
N++;
// Compute Essential Matrix from Fundamental Matrix
// 1. 计算本质矩阵
cv::Mat E21 = K.t() * F21 * K;
cv::Mat R1, R2, t;
// Recover the 4 motion hypotheses
// 2. 分解本质矩阵得到4对rt
DecomposeE(E21, R1, R2, t);
cv::Mat t1 = t;
cv::Mat t2 = -t;
// Reconstruct with the 4 hyphoteses and check
// 3. 使用四对结果分别重建
vector<cv::Point3f> vP3D1, vP3D2, vP3D3, vP3D4;
vector<bool> vbTriangulated1, vbTriangulated2, vbTriangulated3, vbTriangulated4;
float parallax1, parallax2, parallax3, parallax4;
@ -500,14 +593,16 @@ bool TwoViewReconstruction::ReconstructF(vector<bool> &vbMatchesInliers, cv::Mat
int nGood2 = CheckRT(R2, t1, mvKeys1, mvKeys2, mvMatches12, vbMatchesInliers, K, vP3D2, 4.0 * mSigma2, vbTriangulated2, parallax2);
int nGood3 = CheckRT(R1, t2, mvKeys1, mvKeys2, mvMatches12, vbMatchesInliers, K, vP3D3, 4.0 * mSigma2, vbTriangulated3, parallax3);
int nGood4 = CheckRT(R2, t2, mvKeys1, mvKeys2, mvMatches12, vbMatchesInliers, K, vP3D4, 4.0 * mSigma2, vbTriangulated4, parallax4);
// 统计重建出点的数量最大值
int maxGood = max(nGood1, max(nGood2, max(nGood3, nGood4)));
R21 = cv::Mat();
t21 = cv::Mat();
// 起码要重建出超过百分之90的匹配点
int nMinGood = max(static_cast<int>(0.9 * N), minTriangulated);
// 4. 看看有没有脱颖而出的结果且最大值要高于要求的最低值如果大家都差不多说明有问题因为4个结果中只有一个会正常
// 大家都很多的话反而不正常了。。。
int nsimilar = 0;
if (nGood1 > 0.7 * maxGood)
nsimilar++;
@ -525,6 +620,7 @@ bool TwoViewReconstruction::ReconstructF(vector<bool> &vbMatchesInliers, cv::Mat
}
// If best reconstruction has enough parallax initialize
// 5. 使用最好的结果输出
if (maxGood == nGood1)
{
if (parallax1 > minParallax)
@ -536,7 +632,8 @@ bool TwoViewReconstruction::ReconstructF(vector<bool> &vbMatchesInliers, cv::Mat
t1.copyTo(t21);
return true;
}
}else if(maxGood==nGood2)
}
else if (maxGood == nGood2)
{
if (parallax2 > minParallax)
{
@ -547,7 +644,8 @@ bool TwoViewReconstruction::ReconstructF(vector<bool> &vbMatchesInliers, cv::Mat
t1.copyTo(t21);
return true;
}
}else if(maxGood==nGood3)
}
else if (maxGood == nGood3)
{
if (parallax3 > minParallax)
{
@ -558,7 +656,8 @@ bool TwoViewReconstruction::ReconstructF(vector<bool> &vbMatchesInliers, cv::Mat
t2.copyTo(t21);
return true;
}
}else if(maxGood==nGood4)
}
else if (maxGood == nGood4)
{
if (parallax4 > minParallax)
{
@ -574,9 +673,82 @@ bool TwoViewReconstruction::ReconstructF(vector<bool> &vbMatchesInliers, cv::Mat
return false;
}
/**
* @brief HR t
* HFaugeras SVD-based decomposition Zhang SVD-based decomposition
* Motion and structure from motion in a piecewise plannar environment
* 使Faugeras SVD-based decomposition
* @see
* - Faugeras et al, Motion and structure from motion in a piecewise planar environment. International Journal of Pattern Recognition and Artificial Intelligence, 1988.
* - Deeper understanding of the homography decomposition for vision-based control
* n = (a, b, c)^t (x, y, z)ax + by + cz = d 1/d * n^t * x = 1 d
* x' = R*x + t x x'
* λ2*x' = R*(λ1*x) + t = R*(λ1*x) + t * 1/d * n^t * (λ1*x)
* x' = λ*(R + t * 1/d * n^t) * x = H^ * x
* u' = G * u G = KH^K.inv
* H^ ~= d*R + t * n^t = UV^t = U^t * H^ * V = d*U^t * R * V + (U^t * t) * (V^t * n)^t
* s = det(U) * det(V) s 1 -1 = s^2 * d*U^t * R * V + (U^t * t) * (V^t * n)^t = (s*d) * s * U^t * R * V + (U^t * t) * (V^t * n)^t
* R' = s * U^t * R * V t' = U^t * t n' = V^t * n d' = s * d
* = d' * R' + t' * n'^t s
* = | d1, 0, 0 | e1 = (1, 0, 0)^t e2 = (0, 1, 0)^t e3 = (0, 0, 1)^t
* | 0, d2, 0 |
* | 0, 0, d3 |
* n' = (a1, 0, 0)^t + (0, b1, 0)^t + (0, 0, c1)^t = a1*e1 + b1*e2 + c1*e3
*
* = [d1*e1, d2*e2, d3*e3] = [d' * R' * e1, d' * R' * e2, d' * R' * e3] + [t' * a1, t' * b1, t' * c1]
* ==> d1*e1 = d' * R' * e1 + t' * a1
* d2*e2 = d' * R' * e2 + t' * b1
* d3*e3 = d' * R' * e3 + t' * c1
*
*
* t
* d'R'(b1e1 - a1e2) = d1b1e1 - d2a1e2
* d'R'(c1e2 - b1e3) = d2c1e1 - d3b1e3
* d'R'(a1e3 - c1e1) = d3a1e3 - d1c1e1
*
* (d'^2 - d2^2)*a1^2 + (d'^2 - d1^2)*b1^2 = 0
* (d'^2 - d3^2)*b1^2 + (d'^2 - d2^2)*c1^2 = 0 d'^2 - d1^2 = x1 d'^2 - d2^2 = x2 d'^2 - d3^2 = x3
* (d'^2 - d1^2)*c1^2 + (d'^2 - d3^2)*a1^2 = 0
*
*
* | x2 x1 0 | | a1^2 |
* | 0 x3 x2 | * | b1^2 | = 0 ===> x1 * x2 * x3 = 0 (d'^2 - d1^2) * (d'^2 - d2^2) * (d'^2 - d3^2) = 0
* | x3 0 x1 | | c1^2 |
* d1 >= d2 >= d3 d' = d2 or -d2
*
* -----------------------------------------------------------------------------------------------------------------------------------------------------------------
* d' > 0 a1^2 + b1^2 + c1^2 = 1 ??????
* a1 = ε1 * sqrt((d1^2 - d2^2) / (d1^2 - d3^2))
* b1 = 0
* c1 = ε2 * sqrt((d2^2 - d3^2) / (d1^2 - d3^2)) ε1 ε2 1
* d2*e2 = d' * R' * e2 + t' * b1 => R' * e2 = e2
* | cosθ 0 -sinθ |
* ==> R' = | 0 1 0 | n' R' d'R'(a1e3 - c1e1) = d3a1e3 - d1c1e1
* | sinθ 0 cosθ |
* | cosθ 0 -sinθ | | -c1 | | -d1c1 |
* d' * | 0 1 0 | * | 0 | = | 0 | sinθ cosθ
* | sinθ 0 cosθ | | a1 | | d3a1 |
*
* R' d1*e1 = d' * R' * e1 + t' * a1
* d2*e2 = d' * R' * e2 + t' * b1
* d3*e3 = d' * R' * e3 + t' * c1
*
* t' = (d1 - d3) * (a1, 0, c1)^t
* @param vbMatchesInliers mvMatches12
* @param H21
* @param K
* @param R21
* @param t21
* @param vP3D
* @param vbTriangulated vbMatchesInliers
* @param minParallax 1
* @param minTriangulated 50
*/
bool TwoViewReconstruction::ReconstructH(vector<bool> &vbMatchesInliers, cv::Mat &H21, cv::Mat &K,
cv::Mat &R21, cv::Mat &t21, vector<cv::Point3f> &vP3D, vector<bool> &vbTriangulated, float minParallax, int minTriangulated)
cv::Mat &R21, cv::Mat &t21, vector<cv::Point3f> &vP3D,
vector<bool> &vbTriangulated, float minParallax, int minTriangulated)
{
// 统计了合法的匹配,后面用于对比重建出的点数
int N = 0;
for (size_t i = 0, iend = vbMatchesInliers.size(); i < iend; i++)
if (vbMatchesInliers[i])
@ -585,6 +757,8 @@ bool TwoViewReconstruction::ReconstructH(vector<bool> &vbMatchesInliers, cv::Mat
// We recover 8 motion hypotheses using the method of Faugeras et al.
// Motion and structure from motion in a piecewise planar environment.
// International Journal of Pattern Recognition and Artificial Intelligence, 1988
// step1SVD分解Homography
// 因为特征点是图像坐标系所以讲H矩阵由相机坐标系换算到图像坐标系
cv::Mat invK = K.inv();
cv::Mat A = invK * H21 * K;
@ -597,7 +771,7 @@ bool TwoViewReconstruction::ReconstructH(vector<bool> &vbMatchesInliers, cv::Mat
float d1 = w.at<float>(0);
float d2 = w.at<float>(1);
float d3 = w.at<float>(2);
// SVD分解的正常情况是特征值降序排列
if (d1 / d2 < 1.00001 || d2 / d3 < 1.00001)
{
return false;
@ -609,17 +783,39 @@ bool TwoViewReconstruction::ReconstructH(vector<bool> &vbMatchesInliers, cv::Mat
vn.reserve(8);
// n'=[x1 0 x3] 4 posibilities e1=e3=1, e1=1 e3=-1, e1=-1 e3=1, e1=e3=-1
// step2计算法向量
// n'=[x1 0 x3] 4 posibilities e1=e3=1, e1=1 e3=-1, e1=-1 e3=1, e1=e3=-1
// 法向量n'= [x1 0 x3]
float aux1 = sqrt((d1 * d1 - d2 * d2) / (d1 * d1 - d3 * d3));
float aux3 = sqrt((d2 * d2 - d3 * d3) / (d1 * d1 - d3 * d3));
float x1[] = {aux1, aux1, -aux1, -aux1};
float x3[] = {aux3, -aux3, aux3, -aux3};
// case d'=d2
// step3恢复旋转矩阵
// step3.1:计算 sin(theta)和cos(theta)case d'=d2
float aux_stheta = sqrt((d1 * d1 - d2 * d2) * (d2 * d2 - d3 * d3)) / ((d1 + d3) * d2);
float ctheta = (d2 * d2 + d1 * d3) / ((d1 + d3) * d2);
float stheta[] = {aux_stheta, -aux_stheta, -aux_stheta, aux_stheta};
// step3.2计算四种旋转矩阵Rt
// 计算旋转矩阵 R计算ppt中公式18
// | ctheta 0 -aux_stheta| | aux1|
// Rp = | 0 1 0 | tp = | 0 |
// | aux_stheta 0 ctheta | |-aux3|
// | ctheta 0 aux_stheta| | aux1|
// Rp = | 0 1 0 | tp = | 0 |
// |-aux_stheta 0 ctheta | | aux3|
// | ctheta 0 aux_stheta| |-aux1|
// Rp = | 0 1 0 | tp = | 0 |
// |-aux_stheta 0 ctheta | |-aux3|
// | ctheta 0 -aux_stheta| |-aux1|
// Rp = | 0 1 0 | tp = | 0 |
// | aux_stheta 0 ctheta | | aux3|
for (int i = 0; i < 4; i++)
{
cv::Mat Rp = cv::Mat::eye(3, 3, CV_32F);
@ -637,6 +833,8 @@ bool TwoViewReconstruction::ReconstructH(vector<bool> &vbMatchesInliers, cv::Mat
tp.at<float>(2) = -x3[i];
tp *= d1 - d3;
// 这里虽然对t有归一化并没有决定单目整个SLAM过程的尺度
// 因为CreateInitialMapMonocular函数对3D点深度会缩放然后反过来对 t 有改变
cv::Mat t = U * tp;
vt.push_back(t / cv::norm(t));
@ -652,11 +850,14 @@ bool TwoViewReconstruction::ReconstructH(vector<bool> &vbMatchesInliers, cv::Mat
}
//case d'=-d2
// step3.3:计算 sin(theta)和cos(theta)case d'=-d2
float aux_sphi = sqrt((d1 * d1 - d2 * d2) * (d2 * d2 - d3 * d3)) / ((d1 - d3) * d2);
float cphi = (d1 * d3 - d2 * d2) / ((d1 - d3) * d2);
float sphi[] = {aux_sphi, -aux_sphi, -aux_sphi, aux_sphi};
// step3.4计算四种旋转矩阵Rt
// 计算旋转矩阵 R
for (int i = 0; i < 4; i++)
{
cv::Mat Rp = cv::Mat::eye(3, 3, CV_32F);
@ -689,7 +890,6 @@ bool TwoViewReconstruction::ReconstructH(vector<bool> &vbMatchesInliers, cv::Mat
vn.push_back(n);
}
int bestGood = 0;
int secondBestGood = 0;
int bestSolutionIdx = -1;
@ -699,6 +899,7 @@ bool TwoViewReconstruction::ReconstructH(vector<bool> &vbMatchesInliers, cv::Mat
// Instead of applying the visibility constraints proposed in the Faugeras' paper (which could fail for points seen with low parallax)
// We reconstruct all hypotheses and check in terms of triangulated points and parallax
// step4d'=d2和d'=-d2分别对应8组(R t)通过恢复3D点并判断是否在相机正前方的方法来确定最优解
for (size_t i = 0; i < 8; i++)
{
float parallaxi;
@ -706,6 +907,7 @@ bool TwoViewReconstruction::ReconstructH(vector<bool> &vbMatchesInliers, cv::Mat
vector<bool> vbTriangulatedi;
int nGood = CheckRT(vR[i], vt[i], mvKeys1, mvKeys2, mvMatches12, vbMatchesInliers, K, vP3Di, 4.0 * mSigma2, vbTriangulatedi, parallaxi);
// 保留最优的和次优的
if (nGood > bestGood)
{
secondBestGood = bestGood;
@ -721,7 +923,7 @@ bool TwoViewReconstruction::ReconstructH(vector<bool> &vbMatchesInliers, cv::Mat
}
}
// step5通过判断最优是否明显好于次优从而判断该次Homography分解是否成功
if (secondBestGood < 0.75 * bestGood && bestParallax >= minParallax && bestGood > minTriangulated && bestGood > 0.9 * N)
{
vR[bestSolutionIdx].copyTo(R21);
@ -735,10 +937,22 @@ bool TwoViewReconstruction::ReconstructH(vector<bool> &vbMatchesInliers, cv::Mat
return false;
}
/**
* @brief
* @param kp1 1
* @param kp2 2
* @param P1 1 [K*R | K*t]
* @param P2 2
* @param x3D
*/
void TwoViewReconstruction::Triangulate(const cv::KeyPoint &kp1, const cv::KeyPoint &kp2, const cv::Mat &P1, const cv::Mat &P2, cv::Mat &x3D)
{
cv::Mat A(4, 4, CV_32F);
// x = a*P*X 左右两面乘Pc的反对称矩阵 a*[x]^ * P *X = 0 构成了A矩阵中间涉及一个尺度a因为都是归一化平面但右面是0所以直接可以约掉不影响最后的尺度
// 0 -1 v P(0) -P.row(1) + v*P.row(2)
// 1 0 -u * P(1) = P.row(0) - u*P.row(2)
// -v u 0 P(2) u*P.row(1) - v*P.row(0)
// 发现上述矩阵线性相关所以取前两维两个点构成了4行的矩阵就是如下的操作求出的是4维的结果[X,Y,Z,A]所以需要除以最后一维使之为1就成了[X,Y,Z,1]这种齐次形式
A.row(0) = kp1.pt.x * P1.row(2) - P1.row(0);
A.row(1) = kp1.pt.y * P1.row(2) - P1.row(1);
A.row(2) = kp2.pt.x * P2.row(2) - P2.row(0);
@ -750,6 +964,12 @@ void TwoViewReconstruction::Triangulate(const cv::KeyPoint &kp1, const cv::KeyPo
x3D = x3D.rowRange(0, 3) / x3D.at<float>(3);
}
/**
* @brief T
* @param vKeys
* @param vNormalizedPoints
* @param T
*/
void TwoViewReconstruction::Normalize(const vector<cv::KeyPoint> &vKeys, vector<cv::Point2f> &vNormalizedPoints, cv::Mat &T)
{
float meanX = 0;
@ -763,7 +983,7 @@ void TwoViewReconstruction::Normalize(const vector<cv::KeyPoint> &vKeys, vector<
meanX += vKeys[i].pt.x;
meanY += vKeys[i].pt.y;
}
// 1. 求均值
meanX = meanX / N;
meanY = meanY / N;
@ -778,10 +998,10 @@ void TwoViewReconstruction::Normalize(const vector<cv::KeyPoint> &vKeys, vector<
meanDevX += fabs(vNormalizedPoints[i].x);
meanDevY += fabs(vNormalizedPoints[i].y);
}
// 2. 确定新原点后计算与新原点的距离均值
meanDevX = meanDevX / N;
meanDevY = meanDevY / N;
// 3. 去均值化
float sX = 1.0 / meanDevX;
float sY = 1.0 / meanDevY;
@ -790,7 +1010,7 @@ void TwoViewReconstruction::Normalize(const vector<cv::KeyPoint> &vKeys, vector<
vNormalizedPoints[i].x = vNormalizedPoints[i].x * sX;
vNormalizedPoints[i].y = vNormalizedPoints[i].y * sY;
}
// 4. 计算变化矩阵
T = cv::Mat::eye(3, 3, CV_32F);
T.at<float>(0, 0) = sX;
T.at<float>(1, 1) = sY;
@ -798,7 +1018,20 @@ void TwoViewReconstruction::Normalize(const vector<cv::KeyPoint> &vKeys, vector<
T.at<float>(1, 2) = -meanY * sY;
}
/**
* @brief cheirality checkF
* @param R
* @param t
* @param vKeys1
* @param vKeys2
* @param vMatches12
* @param vbMatchesInliers
* @param K
* @param vP3D
* @param th2
* @param vbGood mvKeys1
* @param parallax
*/
int TwoViewReconstruction::CheckRT(const cv::Mat &R, const cv::Mat &t, const vector<cv::KeyPoint> &vKeys1, const vector<cv::KeyPoint> &vKeys2,
const vector<Match> &vMatches12, vector<bool> &vbMatchesInliers,
const cv::Mat &K, vector<cv::Point3f> &vP3D, float th2, vector<bool> &vbGood, float &parallax)
@ -816,17 +1049,21 @@ int TwoViewReconstruction::CheckRT(const cv::Mat &R, const cv::Mat &t, const vec
vCosParallax.reserve(vKeys1.size());
// Camera 1 Projection Matrix K[I|0]
// 步骤1得到一个相机的投影矩阵
// 以第一个相机的光心作为世界坐标系
cv::Mat P1(3, 4, CV_32F, cv::Scalar(0));
K.copyTo(P1.rowRange(0, 3).colRange(0, 3));
cv::Mat O1 = cv::Mat::zeros(3, 1, CV_32F);
// Camera 2 Projection Matrix K[R|t]
// 步骤2得到第二个相机的投影矩阵
cv::Mat P2(3, 4, CV_32F);
R.copyTo(P2.rowRange(0, 3).colRange(0, 3));
t.copyTo(P2.rowRange(0, 3).col(3));
P2 = K * P2;
// 第二个相机的光心在世界坐标系下的坐标
cv::Mat O2 = -R.t() * t;
int nGood = 0;
@ -836,10 +1073,12 @@ int TwoViewReconstruction::CheckRT(const cv::Mat &R, const cv::Mat &t, const vec
if (!vbMatchesInliers[i])
continue;
// kp1和kp2是匹配特征点
const cv::KeyPoint &kp1 = vKeys1[vMatches12[i].first];
const cv::KeyPoint &kp2 = vKeys2[vMatches12[i].second];
cv::Mat p3dC1;
// 步骤3利用三角法恢复三维点p3dC1
Triangulate(kp1, kp2, P1, P2, p3dC1);
if (!isfinite(p3dC1.at<float>(0)) || !isfinite(p3dC1.at<float>(1)) || !isfinite(p3dC1.at<float>(2)))
@ -849,6 +1088,7 @@ int TwoViewReconstruction::CheckRT(const cv::Mat &R, const cv::Mat &t, const vec
}
// Check parallax
// 步骤4计算视差角余弦值
cv::Mat normal1 = p3dC1 - O1;
float dist1 = cv::norm(normal1);
@ -857,17 +1097,22 @@ int TwoViewReconstruction::CheckRT(const cv::Mat &R, const cv::Mat &t, const vec
float cosParallax = normal1.dot(normal2) / (dist1 * dist2);
// 步骤5判断3D点是否在两个摄像头前方
// Check depth in front of first camera (only if enough parallax, as "infinite" points can easily go to negative depth)
// 步骤5.13D点深度为负在第一个摄像头后方淘汰
if (p3dC1.at<float>(2) <= 0 && cosParallax < 0.99998)
continue;
// Check depth in front of second camera (only if enough parallax, as "infinite" points can easily go to negative depth)
// 步骤5.23D点深度为负在第二个摄像头后方淘汰
cv::Mat p3dC2 = R * p3dC1 + t;
if (p3dC2.at<float>(2) <= 0 && cosParallax < 0.99998)
continue;
// 步骤6计算重投影误差
// Check reprojection error in first image
// 计算3D点在第一个图像上的投影误差
float im1x, im1y;
float invZ1 = 1.0 / p3dC1.at<float>(2);
im1x = fx * p3dC1.at<float>(0) * invZ1 + cx;
@ -875,10 +1120,13 @@ int TwoViewReconstruction::CheckRT(const cv::Mat &R, const cv::Mat &t, const vec
float squareError1 = (im1x - kp1.pt.x) * (im1x - kp1.pt.x) + (im1y - kp1.pt.y) * (im1y - kp1.pt.y);
// 步骤6.1:重投影误差太大,跳过淘汰
// 一般视差角比较小时重投影误差比较大
if (squareError1 > th2)
continue;
// Check reprojection error in second image
// 计算3D点在第二个图像上的投影误差
float im2x, im2y;
float invZ2 = 1.0 / p3dC2.at<float>(2);
im2x = fx * p3dC2.at<float>(0) * invZ2 + cx;
@ -886,9 +1134,12 @@ int TwoViewReconstruction::CheckRT(const cv::Mat &R, const cv::Mat &t, const vec
float squareError2 = (im2x - kp2.pt.x) * (im2x - kp2.pt.x) + (im2y - kp2.pt.y) * (im2y - kp2.pt.y);
// 步骤6.2:重投影误差太大,跳过淘汰
// 一般视差角比较小时重投影误差比较大
if (squareError2 > th2)
continue;
// 步骤7统计经过检验的3D点个数记录3D点视差角
vCosParallax.push_back(cosParallax);
vP3D[vMatches12[i].first] = cv::Point3f(p3dC1.at<float>(0), p3dC1.at<float>(1), p3dC1.at<float>(2));
nGood++;
@ -896,11 +1147,14 @@ int TwoViewReconstruction::CheckRT(const cv::Mat &R, const cv::Mat &t, const vec
if (cosParallax < 0.99998)
vbGood[vMatches12[i].first] = true;
}
// 步骤8得到3D点中较大的视差角
if (nGood > 0)
{
// 从小到大排序
sort(vCosParallax.begin(), vCosParallax.end());
// trick! 排序后并没有取最大的视差角
// 取一个较大的视差角
size_t idx = min(50, int(vCosParallax.size() - 1));
parallax = acos(vCosParallax[idx]) * 180 / CV_PI;
}
@ -910,11 +1164,50 @@ int TwoViewReconstruction::CheckRT(const cv::Mat &R, const cv::Mat &t, const vec
return nGood;
}
/**
* @brief Essential
* https://blog.csdn.net/weixin_44580210/article/details/90344511
* FEssentialE4
* 4[R1,t],[R1,-t],[R2,t],[R2,-t]
* ##
* 3×30
* E=[t]×R=SRS
* 1 S S=UBU^T B diag(a1Za2Z...amZ00...0) Z = [0, 1; -1, 0]
*
* S S=kUZU^ Z
* | 0, 1, 0 |
* |-1, 0, 0 |
* | 0, 0, 0 |
* Z = diag(1, 1, 0) * W W
* | 0,-1, 0 |
* | 1, 0, 0 |
* | 0, 0, 1 |
* E=SR=Udiag(1,1,0)(WU^R) E
*
* ##
* P=[I0]P E SR
* 1 S=UZU^TUXV^TUXV^Tsvd
* UVE
* Udiag(1,1,0)V^T = E = SR = (UZU^T)(UXV^) = U(ZX)V^T
* ZX = diag(1,1,0) x=W X=W^T
* E SVD Udiag(1,1,0)V^E = SR S = UZU^ R = UWVTor UW^TV^
* St=00t=1线 t = U(0,0,1)^T = u3
* U StR
* P=[UWV^T +u3] or [UWV^T u3] or [UW^TV^T +u3] or [UW^TV^T u3]
* @param E Essential Matrix
* @param R1 Rotation Matrix 1
* @param R2 Rotation Matrix 2
* @param t Translation
* @see Multiple View Geometry in Computer Vision - Result 9.19 p259
*/
void TwoViewReconstruction::DecomposeE(const cv::Mat &E, cv::Mat &R1, cv::Mat &R2, cv::Mat &t)
{
cv::Mat u, w, vt;
cv::SVD::compute(E, w, u, vt);
// 对 t 有归一化但是这个地方并没有决定单目整个SLAM过程的尺度
// 因为CreateInitialMapMonocular函数对3D点深度会缩放然后反过来对 t 有改变
u.col(2).copyTo(t);
t = t / cv::norm(t);
@ -924,7 +1217,7 @@ void TwoViewReconstruction::DecomposeE(const cv::Mat &E, cv::Mat &R1, cv::Mat &R
W.at<float>(2, 2) = 1;
R1 = u * W * vt;
if(cv::determinant(R1)<0)
if (cv::determinant(R1) < 0) // 旋转矩阵有行列式为1的约束
R1 = -R1;
R2 = u * W.t() * vt;
@ -932,4 +1225,4 @@ void TwoViewReconstruction::DecomposeE(const cv::Mat &E, cv::Mat &R1, cv::Mat &R
R2 = -R2;
}
} //namespace ORB_SLAM
} // namespace ORB_SLAM3