添加主函数、tracking注释

master
cxl 2020-12-05 22:50:31 +08:00
parent 2fb9d9046b
commit d90c628de4
5 changed files with 279 additions and 68 deletions

View File

@ -38,6 +38,7 @@ void LoadIMU(const string &strImuPath, vector<double> &vTimeStamps, vector<cv::P
double ttrack_tot = 0; double ttrack_tot = 0;
int main(int argc, char **argv) int main(int argc, char **argv)
{ {
// 输出运行的序列数目
const int num_seq = (argc-3)/3; const int num_seq = (argc-3)/3;
cout << "num_seq = " << num_seq << endl; cout << "num_seq = " << num_seq << endl;
bool bFileName= ((argc % 3) == 1); bool bFileName= ((argc % 3) == 1);
@ -48,7 +49,7 @@ int main(int argc, char **argv)
cout << "file name: " << file_name << endl; cout << "file name: " << file_name << endl;
// 按照下面提示至少输入6个参数
if(argc < 6) if(argc < 6)
{ {
cerr << endl << "Usage: ./mono_inertial_tum_vi path_to_vocabulary path_to_settings path_to_image_folder_1 path_to_times_file_1 path_to_imu_data_1 (path_to_image_folder_2 path_to_times_file_2 path_to_imu_data_2 ... path_to_image_folder_N path_to_times_file_N path_to_imu_data_N) (trajectory_file_name)" << endl; cerr << endl << "Usage: ./mono_inertial_tum_vi path_to_vocabulary path_to_settings path_to_image_folder_1 path_to_times_file_1 path_to_imu_data_1 (path_to_image_folder_2 path_to_times_file_2 path_to_imu_data_2 ... path_to_image_folder_N path_to_times_file_N path_to_imu_data_N) (trajectory_file_name)" << endl;
@ -57,11 +58,12 @@ int main(int argc, char **argv)
// Load all sequences: // Load all sequences:
// 准备加载所有序列的数据
int seq; int seq;
vector< vector<string> > vstrImageFilenames; vector< vector<string> > vstrImageFilenames; //图像文件名
vector< vector<double> > vTimestampsCam; vector< vector<double> > vTimestampsCam; //图像时间戳
vector< vector<cv::Point3f> > vAcc, vGyro; vector< vector<cv::Point3f> > vAcc, vGyro; //加速度计,陀螺仪
vector< vector<double> > vTimestampsImu; vector< vector<double> > vTimestampsImu; //IMU时间戳
vector<int> nImages; vector<int> nImages;
vector<int> nImu; vector<int> nImu;
vector<int> first_imu(num_seq,0); vector<int> first_imu(num_seq,0);
@ -75,12 +77,15 @@ int main(int argc, char **argv)
nImu.resize(num_seq); nImu.resize(num_seq);
int tot_images = 0; int tot_images = 0;
// 遍历每个序列
for (seq = 0; seq<num_seq; seq++) for (seq = 0; seq<num_seq; seq++)
{ {
// Step 1 加载图像名和对应的图像时间戳
cout << "Loading images for sequence " << seq << "..."; cout << "Loading images for sequence " << seq << "...";
LoadImages(string(argv[3*(seq+1)]), string(argv[3*(seq+1)+1]), vstrImageFilenames[seq], vTimestampsCam[seq]); LoadImages(string(argv[3*(seq+1)]), string(argv[3*(seq+1)+1]), vstrImageFilenames[seq], vTimestampsCam[seq]);
cout << "LOADED!" << endl; cout << "LOADED!" << endl;
// Step 2 加载IMU数据
cout << "Loading IMU for sequence " << seq << "..."; cout << "Loading IMU for sequence " << seq << "...";
LoadIMU(string(argv[3*(seq+1)+2]), vTimestampsImu[seq], vAcc[seq], vGyro[seq]); LoadIMU(string(argv[3*(seq+1)+2]), vTimestampsImu[seq], vAcc[seq], vGyro[seq]);
cout << "LOADED!" << endl; cout << "LOADED!" << endl;
@ -89,6 +94,7 @@ int main(int argc, char **argv)
tot_images += nImages[seq]; tot_images += nImages[seq];
nImu[seq] = vTimestampsImu[seq].size(); nImu[seq] = vTimestampsImu[seq].size();
//检查是否存在有效数目的图像和imu数据
if((nImages[seq]<=0)||(nImu[seq]<=0)) if((nImages[seq]<=0)||(nImu[seq]<=0))
{ {
cerr << "ERROR: Failed to load images or IMU for sequence" << seq << endl; cerr << "ERROR: Failed to load images or IMU for sequence" << seq << endl;
@ -96,9 +102,10 @@ int main(int argc, char **argv)
} }
// Find first imu to be considered, supposing imu measurements start first // Find first imu to be considered, supposing imu measurements start first
// Step 3 默认IMU数据早于图像数据记录找到和第一帧图像时间戳最接近的imu时间戳索引记录在first_imu[seq]中
while(vTimestampsImu[seq][first_imu[seq]]<=vTimestampsCam[seq][0]) while(vTimestampsImu[seq][first_imu[seq]]<=vTimestampsCam[seq][0])
first_imu[seq]++; first_imu[seq]++;
// 因为上面退出while循环时IMU时间戳刚刚超过图像时间戳所以这里需要再减一个索引
first_imu[seq]--; // first imu measurement to be considered first_imu[seq]--; // first imu measurement to be considered
} }
@ -115,12 +122,12 @@ int main(int argc, char **argv)
cout << "IMU data in the sequence: " << nImu << endl << endl;*/ cout << "IMU data in the sequence: " << nImu << endl << endl;*/
// Create SLAM system. It initializes all system threads and gets ready to process frames. // Create SLAM system. It initializes all system threads and gets ready to process frames.
// Step 4 SLAM系统的初始化包括读取配置文件、字典创建跟踪、局部建图、闭环、显示线程
ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::IMU_MONOCULAR, true, 0, file_name); ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::IMU_MONOCULAR, true, 0, file_name);
int proccIm = 0; int proccIm = 0;
for (seq = 0; seq<num_seq; seq++) for (seq = 0; seq<num_seq; seq++)
{ {
// Main loop // Main loop
cv::Mat im; cv::Mat im;
vector<ORB_SLAM3::IMU::Point> vImuMeas; vector<ORB_SLAM3::IMU::Point> vImuMeas;
@ -128,15 +135,16 @@ int main(int argc, char **argv)
cv::Ptr<cv::CLAHE> clahe = cv::createCLAHE(3.0, cv::Size(8, 8)); cv::Ptr<cv::CLAHE> clahe = cv::createCLAHE(3.0, cv::Size(8, 8));
for(int ni=0; ni<nImages[seq]; ni++, proccIm++) for(int ni=0; ni<nImages[seq]; ni++, proccIm++)
{ {
// Read image from file // Read image from file
// Step 5 读取每一帧图像并转换为灰度图存储在im
im = cv::imread(vstrImageFilenames[seq][ni],cv::IMREAD_GRAYSCALE); im = cv::imread(vstrImageFilenames[seq][ni],cv::IMREAD_GRAYSCALE);
// clahe // clahe
// ?这里是什么作用?
clahe->apply(im,im); clahe->apply(im,im);
// cout << "mat type: " << im.type() << endl; // 取出对应的图像时间戳
double tframe = vTimestampsCam[seq][ni]; double tframe = vTimestampsCam[seq][ni];
if(im.empty()) if(im.empty())
@ -148,12 +156,14 @@ int main(int argc, char **argv)
// Load imu measurements from previous frame // Load imu measurements from previous frame
//清空imu测量
vImuMeas.clear(); vImuMeas.clear();
if(ni>0) if(ni>0)
{ {
// cout << "t_cam " << tframe << endl; // cout << "t_cam " << tframe << endl;
// Step 6 把上一图像帧和当前图像帧之间的imu信息存储在vImuMeas里
// 注意第一个图像帧没有对应的imu数据
while(vTimestampsImu[seq][first_imu[seq]]<=vTimestampsCam[seq][ni]) while(vTimestampsImu[seq][first_imu[seq]]<=vTimestampsCam[seq][ni])
{ {
vImuMeas.push_back(ORB_SLAM3::IMU::Point(vAcc[seq][first_imu[seq]].x,vAcc[seq][first_imu[seq]].y,vAcc[seq][first_imu[seq]].z, vImuMeas.push_back(ORB_SLAM3::IMU::Point(vAcc[seq][first_imu[seq]].x,vAcc[seq][first_imu[seq]].y,vAcc[seq][first_imu[seq]].z,
@ -175,6 +185,7 @@ int main(int argc, char **argv)
// Pass the image to the SLAM system // Pass the image to the SLAM system
// cout << "tframe = " << tframe << endl; // cout << "tframe = " << tframe << endl;
// Step 7 跟踪线程作为主线程运行
SLAM.TrackMonocular(im,tframe,vImuMeas); // TODO change to monocular_inertial SLAM.TrackMonocular(im,tframe,vImuMeas); // TODO change to monocular_inertial
#ifdef COMPILEDWITHC11 #ifdef COMPILEDWITHC11
@ -190,6 +201,7 @@ int main(int argc, char **argv)
vTimesTrack[ni]=ttrack; vTimesTrack[ni]=ttrack;
// Wait to load the next frame // Wait to load the next frame
// 等待读取下一帧
double T=0; double T=0;
if(ni<nImages[seq]-1) if(ni<nImages[seq]-1)
T = vTimestampsCam[seq][ni+1]-tframe; T = vTimestampsCam[seq][ni+1]-tframe;
@ -203,7 +215,7 @@ int main(int argc, char **argv)
if(seq < num_seq - 1) if(seq < num_seq - 1)
{ {
cout << "Changing the dataset" << endl; cout << "Changing the dataset" << endl;
// Step 8 更换数据集
SLAM.ChangeDataset(); SLAM.ChangeDataset();
} }
@ -211,13 +223,14 @@ int main(int argc, char **argv)
// cout << "ttrack_tot = " << ttrack_tot << std::endl; // cout << "ttrack_tot = " << ttrack_tot << std::endl;
// Stop all threads // Stop all threads
// Step 9 关闭SLAM中所有线程
SLAM.Shutdown(); SLAM.Shutdown();
// Tracking time statistics // Tracking time statistics
// Save camera trajectory // Save camera trajectory
// Step 10 保存相机位姿(轨迹)
if (bFileName) if (bFileName)
{ {
const string kf_file = "kf_" + string(argv[argc-1]) + ".txt"; const string kf_file = "kf_" + string(argv[argc-1]) + ".txt";
@ -250,6 +263,14 @@ int main(int argc, char **argv)
return 0; return 0;
} }
/**
* @brief
*
* @param[in] strImagePath
* @param[in] strPathTimes
* @param[in] vstrImages
* @param[in] vTimeStamps vstrImages
*/
void LoadImages(const string &strImagePath, const string &strPathTimes, void LoadImages(const string &strImagePath, const string &strPathTimes,
vector<string> &vstrImages, vector<double> &vTimeStamps) vector<string> &vstrImages, vector<double> &vTimeStamps)
{ {
@ -276,10 +297,19 @@ void LoadImages(const string &strImagePath, const string &strPathTimes,
} }
} }
/**
* @brief IMU
*
* @param[in] strImuPath IMU
* @param[in] vTimeStamps IMU
* @param[in] vAcc
* @param[in] vGyro
*/
void LoadIMU(const string &strImuPath, vector<double> &vTimeStamps, vector<cv::Point3f> &vAcc, vector<cv::Point3f> &vGyro) void LoadIMU(const string &strImuPath, vector<double> &vTimeStamps, vector<cv::Point3f> &vAcc, vector<cv::Point3f> &vGyro)
{ {
ifstream fImu; ifstream fImu;
fImu.open(strImuPath.c_str()); fImu.open(strImuPath.c_str());
// 预申请大小为5000的vector不够可以再扩展
vTimeStamps.reserve(5000); vTimeStamps.reserve(5000);
vAcc.reserve(5000); vAcc.reserve(5000);
vGyro.reserve(5000); vGyro.reserve(5000);
@ -288,6 +318,7 @@ void LoadIMU(const string &strImuPath, vector<double> &vTimeStamps, vector<cv::P
{ {
string s; string s;
getline(fImu,s); getline(fImu,s);
// 跳过注释或无效数据
if (s[0] == '#') if (s[0] == '#')
continue; continue;
@ -305,6 +336,7 @@ void LoadIMU(const string &strImuPath, vector<double> &vTimeStamps, vector<cv::P
item = s.substr(0, pos); item = s.substr(0, pos);
data[6] = stod(item); data[6] = stod(item);
// 注意这里的时间戳除以10的9次方转化为秒单位
vTimeStamps.push_back(data[0]/1e9); vTimeStamps.push_back(data[0]/1e9);
vAcc.push_back(cv::Point3f(data[4],data[5],data[6])); vAcc.push_back(cv::Point3f(data[4],data[5],data[6]));
vGyro.push_back(cv::Point3f(data[1],data[2],data[3])); vGyro.push_back(cv::Point3f(data[1],data[2],data[3]));

View File

@ -1333,7 +1333,13 @@ int TemplatedVocabulary<TDescriptor,F>::stopWords(double minWeight)
} }
// -------------------------------------------------------------------------- // --------------------------------------------------------------------------
/**
* @brief TXTORB
*
* @param[in] filename
* @return true
* @return false
*/
template<class TDescriptor, class F> template<class TDescriptor, class F>
bool TemplatedVocabulary<TDescriptor,F>::loadFromTextFile(const std::string &filename) bool TemplatedVocabulary<TDescriptor,F>::loadFromTextFile(const std::string &filename)
{ {
@ -1341,7 +1347,7 @@ bool TemplatedVocabulary<TDescriptor,F>::loadFromTextFile(const std::string &fil
f.open(filename.c_str()); f.open(filename.c_str());
if(f.eof()) if(f.eof())
return false; return false;
m_words.clear(); m_words.clear();
m_nodes.clear(); m_nodes.clear();
@ -1350,8 +1356,8 @@ bool TemplatedVocabulary<TDescriptor,F>::loadFromTextFile(const std::string &fil
getline(f,s); getline(f,s);
stringstream ss; stringstream ss;
ss << s; ss << s;
ss >> m_k; ss >> m_k; // 树的分支数目
ss >> m_L; ss >> m_L; // 树的深度
int n1, n2; int n1, n2;
ss >> n1; ss >> n1;
ss >> n2; ss >> n2;
@ -1359,20 +1365,22 @@ bool TemplatedVocabulary<TDescriptor,F>::loadFromTextFile(const std::string &fil
if(m_k<0 || m_k>20 || m_L<1 || m_L>10 || n1<0 || n1>5 || n2<0 || n2>3) if(m_k<0 || m_k>20 || m_L<1 || m_L>10 || n1<0 || n1>5 || n2<0 || n2>3)
{ {
std::cerr << "Vocabulary loading failure: This is not a correct text file!" << endl; std::cerr << "Vocabulary loading failure: This is not a correct text file!" << endl;
return false; return false;
} }
m_scoring = (ScoringType)n1; m_scoring = (ScoringType)n1; // 评分类型
m_weighting = (WeightingType)n2; m_weighting = (WeightingType)n2; // 权重类型
createScoringObject(); createScoringObject();
// nodes // 总共节点nodes是一个等比数列求和
//! bug 没有包含最后叶子节点数,应该改为 ((pow((double)m_k, (double)m_L + 2) - 1)/(m_k - 1))
//! 但是没有影响因为这里只是reserve实际存储是一步步resize实现
int expected_nodes = int expected_nodes =
(int)((pow((double)m_k, (double)m_L + 1) - 1)/(m_k - 1)); (int)((pow((double)m_k, (double)m_L + 1) - 1)/(m_k - 1));
m_nodes.reserve(expected_nodes); m_nodes.reserve(expected_nodes);
// 预分配空间给 单词(叶子)数
m_words.reserve(pow((double)m_k, (double)m_L + 1)); m_words.reserve(pow((double)m_k, (double)m_L + 1));
// 第一个节点是根节点id设为0
m_nodes.resize(1); m_nodes.resize(1);
m_nodes[0].id = 0; m_nodes[0].id = 0;
while(!f.eof()) while(!f.eof())
@ -1381,40 +1389,53 @@ bool TemplatedVocabulary<TDescriptor,F>::loadFromTextFile(const std::string &fil
getline(f,snode); getline(f,snode);
stringstream ssnode; stringstream ssnode;
ssnode << snode; ssnode << snode;
// nid 表示当前节点id实际是读取顺序从0开始
int nid = m_nodes.size(); int nid = m_nodes.size();
// 节点size 加1
m_nodes.resize(m_nodes.size()+1); m_nodes.resize(m_nodes.size()+1);
m_nodes[nid].id = nid; m_nodes[nid].id = nid;
// 读每行的第1个数字表示父节点id
int pid ; int pid ;
ssnode >> pid; ssnode >> pid;
// 记录节点id的相互父子关系
m_nodes[nid].parent = pid; m_nodes[nid].parent = pid;
m_nodes[pid].children.push_back(nid); m_nodes[pid].children.push_back(nid);
// 读取第2个数字表示是否是叶子Word
int nIsLeaf; int nIsLeaf;
ssnode >> nIsLeaf; ssnode >> nIsLeaf;
// 每个特征点描述子是256 bit一个字节对应8 bit所以一个特征点需要32个字节存储。
// 这里 F::L=32也就是读取32个字节最后以字符串形式存储在ssd
stringstream ssd; stringstream ssd;
for(int iD=0;iD<F::L;iD++) for(int iD=0;iD<F::L;iD++)
{ {
string sElement; string sElement;
ssnode >> sElement; ssnode >> sElement;
ssd << sElement << " "; ssd << sElement << " ";
} }
// 将ssd存储在该节点的描述子
F::fromString(m_nodes[nid].descriptor, ssd.str()); F::fromString(m_nodes[nid].descriptor, ssd.str());
// 读取最后一个数字节点的权重Word才有
ssnode >> m_nodes[nid].weight; ssnode >> m_nodes[nid].weight;
if(nIsLeaf>0) if(nIsLeaf>0)
{ {
// 如果是叶子Word存储到m_words
int wid = m_words.size(); int wid = m_words.size();
m_words.resize(wid+1); m_words.resize(wid+1);
//存储Word的id具有唯一性
m_nodes[nid].word_id = wid; m_nodes[nid].word_id = wid;
//构建 vector<Node*> m_words存储word所在node的指针
m_words[wid] = &m_nodes[nid]; m_words[wid] = &m_nodes[nid];
} }
else else
{ {
//非叶子节点,直接分配 m_k个分支
m_nodes[nid].children.reserve(m_k); m_nodes[nid].children.reserve(m_k);
} }
} }

View File

@ -103,13 +103,14 @@ public:
public: public:
// Tracking states // Tracking states
// 跟踪状态
enum eTrackingState{ enum eTrackingState{
SYSTEM_NOT_READY=-1, SYSTEM_NOT_READY=-1, //系统没有准备好的状态,一般就是在启动后加载配置文件和词典文件时候的状态
NO_IMAGES_YET=0, NO_IMAGES_YET=0, //当前无图像
NOT_INITIALIZED=1, NOT_INITIALIZED=1, //有图像但是没有完成初始化
OK=2, OK=2, //正常跟踪状态
RECENTLY_LOST=3, RECENTLY_LOST=3, //IMU模式当前地图中的KF>10,且丢失时间<5秒。纯视觉模式没有该状态
LOST=4, LOST=4, //IMU模式当前帧跟丢超过5s。纯视觉模式重定位失败
OK_KLT=5 OK_KLT=5
}; };
@ -275,6 +276,7 @@ protected:
int mMaxFrames; int mMaxFrames;
int mnFirstImuFrameId; int mnFirstImuFrameId;
// 经过多少帧后可以重置IMU一般设置为和帧率相同对应的时间是1s
int mnFramesToResetIMU; int mnFramesToResetIMU;
// Threshold close/far points // Threshold close/far points

View File

@ -52,7 +52,7 @@ System::System(const string &strVocFile, const string &strSettingsFile, const eS
"under certain conditions. See LICENSE.txt." << endl << endl; "under certain conditions. See LICENSE.txt." << endl << endl;
cout << "Input sensor was set to: "; cout << "Input sensor was set to: ";
// Step 1 输出当前传感器类型
if(mSensor==MONOCULAR) if(mSensor==MONOCULAR)
cout << "Monocular" << endl; cout << "Monocular" << endl;
else if(mSensor==STEREO) else if(mSensor==STEREO)
@ -65,9 +65,10 @@ System::System(const string &strVocFile, const string &strSettingsFile, const eS
cout << "Stereo-Inertial" << endl; cout << "Stereo-Inertial" << endl;
//Check settings file //Check settings file
// Step 2 读取配置文件
cv::FileStorage fsSettings(strSettingsFile.c_str(), //将配置文件名转换成为字符串 cv::FileStorage fsSettings(strSettingsFile.c_str(), //将配置文件名转换成为字符串
cv::FileStorage::READ); //只读 cv::FileStorage::READ); //只读
//如果打开失败,就输出调试信息 //如果打开失败,就输出错误信息
if(!fsSettings.isOpened()) if(!fsSettings.isOpened())
{ {
cerr << "Failed to open settings file at: " << strSettingsFile << endl; cerr << "Failed to open settings file at: " << strSettingsFile << endl;
@ -78,13 +79,14 @@ System::System(const string &strVocFile, const string &strSettingsFile, const eS
//---- //----
//Load ORB Vocabulary //Load ORB Vocabulary
// Step 3 加载ORB字典
cout << endl << "Loading ORB Vocabulary. This could take a while..." << endl; cout << endl << "Loading ORB Vocabulary. This could take a while..." << endl;
//建立一个新的ORB字典 //建立一个新的ORB字典
mpVocabulary = new ORBVocabulary(); mpVocabulary = new ORBVocabulary();
//获取字典加载状态 //读取预训练好的ORB字典并返回成功/失败标志
bool bVocLoad = mpVocabulary->loadFromTextFile(strVocFile); bool bVocLoad = mpVocabulary->loadFromTextFile(strVocFile);
//如果加载失败,就输出调试信息 //如果加载失败,就输出错误信息
if(!bVocLoad) if(!bVocLoad)
{ {
cerr << "Wrong path to vocabulary. " << endl; cerr << "Wrong path to vocabulary. " << endl;
@ -94,13 +96,14 @@ System::System(const string &strVocFile, const string &strSettingsFile, const eS
cout << "Vocabulary loaded!" << endl << endl; cout << "Vocabulary loaded!" << endl << endl;
//Create KeyFrame Database //Create KeyFrame Database
// Step 4 创建关键帧数据库
mpKeyFrameDatabase = new KeyFrameDatabase(*mpVocabulary); mpKeyFrameDatabase = new KeyFrameDatabase(*mpVocabulary);
//Create the Atlas //Create the Atlas
//mpMap = new Map(); // Step 5 创建多地图参数0表示初始化关键帧id为0
mpAtlas = new Atlas(0); mpAtlas = new Atlas(0);
//----
// 下面注释看起来作者貌似想加地图重载功能,期待期待
/*if(strLoadingFile.empty()) /*if(strLoadingFile.empty())
{ {
//Load ORB Vocabulary //Load ORB Vocabulary
@ -167,24 +170,31 @@ System::System(const string &strVocFile, const string &strSettingsFile, const eS
//usleep(10*1000*1000); //usleep(10*1000*1000);
}*/ }*/
// 设置Atlas中的传感器类型
if (mSensor==IMU_STEREO || mSensor==IMU_MONOCULAR) if (mSensor==IMU_STEREO || mSensor==IMU_MONOCULAR)
mpAtlas->SetInertialSensor(); mpAtlas->SetInertialSensor();
// Step 6 依次创建跟踪、局部建图、闭环、显示线程
//Create Drawers. These are used by the Viewer //Create Drawers. These are used by the Viewer
// 创建用于显示帧和地图的类由Viewer调用
mpFrameDrawer = new FrameDrawer(mpAtlas); mpFrameDrawer = new FrameDrawer(mpAtlas);
mpMapDrawer = new MapDrawer(mpAtlas, strSettingsFile); mpMapDrawer = new MapDrawer(mpAtlas, strSettingsFile);
//Initialize the Tracking thread //Initialize the Tracking thread
//(it will live in the main thread of execution, the one that called this constructor) //(it will live in the main thread of execution, the one that called this constructor)
// 创建跟踪线程(主线程)
cout << "Seq. Name: " << strSequence << endl; cout << "Seq. Name: " << strSequence << endl;
mpTracker = new Tracking(this, mpVocabulary, mpFrameDrawer, mpMapDrawer, mpTracker = new Tracking(this, mpVocabulary, mpFrameDrawer, mpMapDrawer,
mpAtlas, mpKeyFrameDatabase, strSettingsFile, mSensor, strSequence); mpAtlas, mpKeyFrameDatabase, strSettingsFile, mSensor, strSequence);
//Initialize the Local Mapping thread and launch //Initialize the Local Mapping thread and launch
//创建并开启local mapping线程
mpLocalMapper = new LocalMapping(this, mpAtlas, mSensor==MONOCULAR || mSensor==IMU_MONOCULAR, mSensor==IMU_MONOCULAR || mSensor==IMU_STEREO, strSequence); mpLocalMapper = new LocalMapping(this, mpAtlas, mSensor==MONOCULAR || mSensor==IMU_MONOCULAR, mSensor==IMU_MONOCULAR || mSensor==IMU_STEREO, strSequence);
mptLocalMapping = new thread(&ORB_SLAM3::LocalMapping::Run,mpLocalMapper); mptLocalMapping = new thread(&ORB_SLAM3::LocalMapping::Run,mpLocalMapper);
//initFr表示初始化帧的id代码里设置为0
mpLocalMapper->mInitFr = initFr; mpLocalMapper->mInitFr = initFr;
//设置最远3D地图点的深度值如果超过阈值说明可能三角化不太准确丢弃
mpLocalMapper->mThFarPoints = fsSettings["thFarPoints"]; mpLocalMapper->mThFarPoints = fsSettings["thFarPoints"];
if(mpLocalMapper->mThFarPoints!=0) if(mpLocalMapper->mThFarPoints!=0)
{ {
@ -196,10 +206,12 @@ System::System(const string &strVocFile, const string &strSettingsFile, const eS
//Initialize the Loop Closing thread and launch //Initialize the Loop Closing thread and launch
// mSensor!=MONOCULAR && mSensor!=IMU_MONOCULAR // mSensor!=MONOCULAR && mSensor!=IMU_MONOCULAR
// 创建并开启闭环线程
mpLoopCloser = new LoopClosing(mpAtlas, mpKeyFrameDatabase, mpVocabulary, mSensor!=MONOCULAR); // mSensor!=MONOCULAR); mpLoopCloser = new LoopClosing(mpAtlas, mpKeyFrameDatabase, mpVocabulary, mSensor!=MONOCULAR); // mSensor!=MONOCULAR);
mptLoopClosing = new thread(&ORB_SLAM3::LoopClosing::Run, mpLoopCloser); mptLoopClosing = new thread(&ORB_SLAM3::LoopClosing::Run, mpLoopCloser);
//Initialize the Viewer thread and launch //Initialize the Viewer thread and launch
// 创建并开启显示线程
if(bUseViewer) if(bUseViewer)
{ {
mpViewer = new Viewer(this, mpFrameDrawer,mpMapDrawer,mpTracker,strSettingsFile); mpViewer = new Viewer(this, mpFrameDrawer,mpMapDrawer,mpTracker,strSettingsFile);
@ -210,6 +222,7 @@ System::System(const string &strVocFile, const string &strSettingsFile, const eS
} }
//Set pointers between threads //Set pointers between threads
// 设置线程间的指针
mpTracker->SetLocalMapper(mpLocalMapper); mpTracker->SetLocalMapper(mpLocalMapper);
mpTracker->SetLoopClosing(mpLoopCloser); mpTracker->SetLoopClosing(mpLoopCloser);
@ -348,8 +361,18 @@ cv::Mat System::TrackRGBD(const cv::Mat &im, const cv::Mat &depthmap, const doub
return Tcw; return Tcw;
} }
/**
* @brief /VIO
*
* @param[in] im
* @param[in] timestamp
* @param[in] vImuMeas IMU
* @param[in] filename
* @return cv::Mat 姿Tcw
*/
cv::Mat System::TrackMonocular(const cv::Mat &im, const double &timestamp, const vector<IMU::Point>& vImuMeas, string filename) cv::Mat System::TrackMonocular(const cv::Mat &im, const double &timestamp, const vector<IMU::Point>& vImuMeas, string filename)
{ {
// 确保是单目或单目VIO模式
if(mSensor!=MONOCULAR && mSensor!=IMU_MONOCULAR) if(mSensor!=MONOCULAR && mSensor!=IMU_MONOCULAR)
{ {
cerr << "ERROR: you called TrackMonocular but input sensor was not set to Monocular nor Monocular-Inertial." << endl; cerr << "ERROR: you called TrackMonocular but input sensor was not set to Monocular nor Monocular-Inertial." << endl;
@ -359,6 +382,7 @@ cv::Mat System::TrackMonocular(const cv::Mat &im, const double &timestamp, const
// Check mode change // Check mode change
{ {
unique_lock<mutex> lock(mMutexMode); unique_lock<mutex> lock(mMutexMode);
// mbActivateLocalizationMode为true会关闭局部地图线程仅跟踪模式
if(mbActivateLocalizationMode) if(mbActivateLocalizationMode)
{ {
mpLocalMapper->RequestStop(); mpLocalMapper->RequestStop();
@ -368,7 +392,7 @@ cv::Mat System::TrackMonocular(const cv::Mat &im, const double &timestamp, const
{ {
usleep(1000); usleep(1000);
} }
// 局部地图关闭以后,只进行追踪的线程,只计算相机的位姿,没有对局部地图进行更新
mpTracker->InformOnlyTracking(true); mpTracker->InformOnlyTracking(true);
mbActivateLocalizationMode = false; mbActivateLocalizationMode = false;
} }
@ -396,11 +420,12 @@ cv::Mat System::TrackMonocular(const cv::Mat &im, const double &timestamp, const
mbResetActiveMap = false; mbResetActiveMap = false;
} }
} }
// 如果是单目VIO模式把IMU数据存储到mlQueueImuData
if (mSensor == System::IMU_MONOCULAR) if (mSensor == System::IMU_MONOCULAR)
for(size_t i_imu = 0; i_imu < vImuMeas.size(); i_imu++) for(size_t i_imu = 0; i_imu < vImuMeas.size(); i_imu++)
mpTracker->GrabImuData(vImuMeas[i_imu]); mpTracker->GrabImuData(vImuMeas[i_imu]);
// 计算相机位姿
cv::Mat Tcw = mpTracker->GrabImageMonocular(im,timestamp,filename); cv::Mat Tcw = mpTracker->GrabImageMonocular(im,timestamp,filename);
unique_lock<mutex> lock2(mMutexState); unique_lock<mutex> lock2(mMutexState);

View File

@ -1034,11 +1034,20 @@ cv::Mat Tracking::GrabImageRGBD(const cv::Mat &imRGB,const cv::Mat &imD, const d
return mCurrentFrame.mTcw.clone(); return mCurrentFrame.mTcw.clone();
} }
/**
* @brief
*
* @param[in] im
* @param[in] timestamp
* @param[in] filename
* @return cv::Mat
*/
cv::Mat Tracking::GrabImageMonocular(const cv::Mat &im, const double &timestamp, string filename) cv::Mat Tracking::GrabImageMonocular(const cv::Mat &im, const double &timestamp, string filename)
{ {
mImGray = im; mImGray = im;
// Step 1 :将彩色图像转为灰度图像
//若图片是3、4通道的还需要转化成单通道灰度图
if(mImGray.channels()==3) if(mImGray.channels()==3)
{ {
if(mbRGB) if(mbRGB)
@ -1054,6 +1063,7 @@ cv::Mat Tracking::GrabImageMonocular(const cv::Mat &im, const double &timestamp,
cvtColor(mImGray,mImGray,CV_BGRA2GRAY); cvtColor(mImGray,mImGray,CV_BGRA2GRAY);
} }
// Step 2 构造Frame
if (mSensor == System::MONOCULAR) if (mSensor == System::MONOCULAR)
{ {
if(mState==NOT_INITIALIZED || mState==NO_IMAGES_YET ||(lastID - initID) < mMaxFrames) if(mState==NOT_INITIALIZED || mState==NO_IMAGES_YET ||(lastID - initID) < mMaxFrames)
@ -1071,6 +1081,7 @@ cv::Mat Tracking::GrabImageMonocular(const cv::Mat &im, const double &timestamp,
mCurrentFrame = Frame(mImGray,timestamp,mpORBextractorLeft,mpORBVocabulary,mpCamera,mDistCoef,mbf,mThDepth,&mLastFrame,*mpImuCalib); mCurrentFrame = Frame(mImGray,timestamp,mpORBextractorLeft,mpORBVocabulary,mpCamera,mDistCoef,mbf,mThDepth,&mLastFrame,*mpImuCalib);
} }
// t0存储未初始化时的第1帧时间戳
if (mState==NO_IMAGES_YET) if (mState==NO_IMAGES_YET)
t0=timestamp; t0=timestamp;
@ -1080,6 +1091,8 @@ cv::Mat Tracking::GrabImageMonocular(const cv::Mat &im, const double &timestamp,
mCurrentFrame.mnDataset = mnNumDataset; mCurrentFrame.mnDataset = mnNumDataset;
lastID = mCurrentFrame.mnId; lastID = mCurrentFrame.mnId;
// Step 3 :跟踪
Track(); Track();
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now(); std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
@ -1101,6 +1114,7 @@ cv::Mat Tracking::GrabImageMonocular(const cv::Mat &im, const double &timestamp,
f_track_times << t_track << endl; f_track_times << t_track << endl;
#endif #endif
//返回当前帧的位姿
return mCurrentFrame.mTcw.clone(); return mCurrentFrame.mTcw.clone();
} }
@ -1383,7 +1397,10 @@ void Tracking::ResetFrameIMU()
// TODO To implement... // TODO To implement...
} }
/**
* @brief
*
*/
void Tracking::Track() void Tracking::Track()
{ {
#ifdef SAVE_TIMES #ifdef SAVE_TIMES
@ -1400,6 +1417,7 @@ void Tracking::Track()
mbStep = false; mbStep = false;
} }
// Step 1 如局部建图里认为IMU有问题重置地图
if(mpLocalMapper->mbBadImu) if(mpLocalMapper->mbBadImu)
{ {
cout << "TRACK: Reset map because local mapper set the bad imu flag " << endl; cout << "TRACK: Reset map because local mapper set the bad imu flag " << endl;
@ -1407,12 +1425,16 @@ void Tracking::Track()
return; return;
} }
// 从Atlas中取出当前active的地图
Map* pCurrentMap = mpAtlas->GetCurrentMap(); Map* pCurrentMap = mpAtlas->GetCurrentMap();
// Step 2 处理时间戳异常的情况
if(mState!=NO_IMAGES_YET) if(mState!=NO_IMAGES_YET)
{ {
// 进入以下两个if语句都是不正常的情况不进行跟踪直接返回
if(mLastFrame.mTimeStamp>mCurrentFrame.mTimeStamp) if(mLastFrame.mTimeStamp>mCurrentFrame.mTimeStamp)
{ {
// 如果当前图像时间戳比前一帧图像时间戳小说明出错了清除imu数据创建新的子地图
cerr << "ERROR: Frame with a timestamp older than previous frame detected!" << endl; cerr << "ERROR: Frame with a timestamp older than previous frame detected!" << endl;
unique_lock<mutex> lock(mMutexImuQueue); unique_lock<mutex> lock(mMutexImuQueue);
mlQueueImuData.clear(); mlQueueImuData.clear();
@ -1421,34 +1443,38 @@ void Tracking::Track()
} }
else if(mCurrentFrame.mTimeStamp>mLastFrame.mTimeStamp+1.0) else if(mCurrentFrame.mTimeStamp>mLastFrame.mTimeStamp+1.0)
{ {
// 如果当前图像时间戳和前一帧图像时间戳大于1s说明时间戳明显跳变了重置地图后直接返回
cout << "id last: " << mLastFrame.mnId << " id curr: " << mCurrentFrame.mnId << endl; cout << "id last: " << mLastFrame.mnId << " id curr: " << mCurrentFrame.mnId << endl;
if(mpAtlas->isInertial()) if(mpAtlas->isInertial())
{ {
// 如果当前地图imu成功初始化
if(mpAtlas->isImuInitialized()) if(mpAtlas->isImuInitialized())
{ {
cout << "Timestamp jump detected. State set to LOST. Reseting IMU integration..." << endl; cout << "Timestamp jump detected. State set to LOST. Reseting IMU integration..." << endl;
if(!pCurrentMap->GetIniertialBA2()) if(!pCurrentMap->GetIniertialBA2())
{ {
// 如果当前子图中imu没有经过BA2重置active地图
mpSystem->ResetActiveMap(); mpSystem->ResetActiveMap();
} }
else else
{ {
// 如果当前子图中imu进行了BA2重新创建新的子图
CreateMapInAtlas(); CreateMapInAtlas();
} }
} }
else else
{ {
// 如果当前子图中imu没有初始化重置active地图
cout << "Timestamp jump detected, before IMU initialization. Reseting..." << endl; cout << "Timestamp jump detected, before IMU initialization. Reseting..." << endl;
mpSystem->ResetActiveMap(); mpSystem->ResetActiveMap();
} }
} }
// 不跟踪直接返回
return; return;
} }
} }
// Step 3 IMU模式下设置IMU的Bias参数
if ((mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO) && mpLastKeyFrame) if ((mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO) && mpLastKeyFrame)
mCurrentFrame.SetNewBias(mpLastKeyFrame->GetImuBias()); mCurrentFrame.SetNewBias(mpLastKeyFrame->GetImuBias());
@ -1458,12 +1484,13 @@ void Tracking::Track()
} }
mLastProcessedState=mState; mLastProcessedState=mState;
// Step 4 IMU模式下对IMU数据进行预积分
if ((mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO) && !mbCreatedMap) if ((mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO) && !mbCreatedMap)
{ {
#ifdef SAVE_TIMES #ifdef SAVE_TIMES
std::chrono::steady_clock::time_point t0 = std::chrono::steady_clock::now(); std::chrono::steady_clock::time_point t0 = std::chrono::steady_clock::now();
#endif #endif
// IMU数据进行预积分
PreintegrateIMU(); PreintegrateIMU();
#ifdef SAVE_TIMES #ifdef SAVE_TIMES
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now(); std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
@ -1476,10 +1503,14 @@ void Tracking::Track()
mbCreatedMap = false; mbCreatedMap = false;
// Get Map Mutex -> Map cannot be changed // Get Map Mutex -> Map cannot be changed
// 地图更新时加锁。保证地图不会发生变化
// 疑问:这样子会不会影响地图的实时更新?
// 回答:主要耗时在构造帧中特征点的提取和匹配部分,在那个时候地图是没有被上锁的,有足够的时间更新地图
unique_lock<mutex> lock(pCurrentMap->mMutexMapUpdate); unique_lock<mutex> lock(pCurrentMap->mMutexMapUpdate);
mbMapUpdated = false; mbMapUpdated = false;
// 判断地图id是否更新了
int nCurMapChangeIndex = pCurrentMap->GetMapChangeIndex(); int nCurMapChangeIndex = pCurrentMap->GetMapChangeIndex();
int nMapChangeIndex = pCurrentMap->GetLastMapChange(); int nMapChangeIndex = pCurrentMap->GetLastMapChange();
if(nCurMapChangeIndex>nMapChangeIndex) if(nCurMapChangeIndex>nMapChangeIndex)
@ -1492,10 +1523,15 @@ void Tracking::Track()
if(mState==NOT_INITIALIZED) if(mState==NOT_INITIALIZED)
{ {
// Step 5 初始化
if(mSensor==System::STEREO || mSensor==System::RGBD || mSensor==System::IMU_STEREO) if(mSensor==System::STEREO || mSensor==System::RGBD || mSensor==System::IMU_STEREO)
{
//双目RGBD相机的初始化共用一个函数
StereoInitialization(); StereoInitialization();
}
else else
{ {
//单目初始化
MonocularInitialization(); MonocularInitialization();
} }
@ -1503,21 +1539,26 @@ void Tracking::Track()
if(mState!=OK) // If rightly initialized, mState=OK if(mState!=OK) // If rightly initialized, mState=OK
{ {
// 如果没有成功初始化,直接返回
mLastFrame = Frame(mCurrentFrame); mLastFrame = Frame(mCurrentFrame);
return; return;
} }
if(mpAtlas->GetAllMaps().size() == 1) if(mpAtlas->GetAllMaps().size() == 1)
{ {
// 如果当前地图是第一个地图记录当前帧id为第一帧
mnFirstFrameId = mCurrentFrame.mnId; mnFirstFrameId = mCurrentFrame.mnId;
} }
} }
else else
{ {
// System is initialized. Track Frame. // System is initialized. Track Frame.
// Step 6 系统成功初始化,下面是具体跟踪过程
bool bOK; bool bOK;
// Initial camera pose estimation using motion model or relocalization (if tracking is lost) // Initial camera pose estimation using motion model or relocalization (if tracking is lost)
// mbOnlyTracking等于false表示正常SLAM模式定位+地图更新mbOnlyTracking等于true表示仅定位模式
// tracking 类构造时默认为false。在viewer中有个开关ActivateLocalizationMode可以控制是否开启mbOnlyTracking
if(!mbOnlyTracking) if(!mbOnlyTracking)
{ {
#ifdef SAVE_TIMES #ifdef SAVE_TIMES
@ -1527,12 +1568,22 @@ void Tracking::Track()
// State OK // State OK
// Local Mapping is activated. This is the normal behaviour, unless // Local Mapping is activated. This is the normal behaviour, unless
// you explicitly activate the "only tracking" mode. // you explicitly activate the "only tracking" mode.
// 跟踪进入正常SLAM模式有地图更新
// 如果正常跟踪
if(mState==OK) if(mState==OK)
{ {
// Local Mapping might have changed some MapPoints tracked in last frame // Local Mapping might have changed some MapPoints tracked in last frame
// Step 6.1 检查并更新上一帧被替换的MapPoints
// 局部建图线程则可能会对原有的地图点进行替换.在这里进行检查
CheckReplacedInLastFrame(); CheckReplacedInLastFrame();
// Step 6.2 运动模型是空的并且imu未初始化或刚完成重定位跟踪参考关键帧否则恒速模型跟踪
// 第一个条件,如果运动模型为空并且imu未初始化,说明是刚开始第一帧跟踪,或者已经跟丢了。
// 第二个条件,如果当前帧紧紧地跟着在重定位的帧的后面,我们用重定位帧来恢复位姿
// mnLastRelocFrameId 上一次重定位的那一帧
if((mVelocity.empty() && !pCurrentMap->isImuInitialized()) || mCurrentFrame.mnId<mnLastRelocFrameId+2) if((mVelocity.empty() && !pCurrentMap->isImuInitialized()) || mCurrentFrame.mnId<mnLastRelocFrameId+2)
{ {
//Verbose::PrintMess("TRACK: Track with respect to the reference KF ", Verbose::VERBOSITY_DEBUG); //Verbose::PrintMess("TRACK: Track with respect to the reference KF ", Verbose::VERBOSITY_DEBUG);
@ -1541,14 +1592,21 @@ void Tracking::Track()
else else
{ {
//Verbose::PrintMess("TRACK: Track with motion model", Verbose::VERBOSITY_DEBUG); //Verbose::PrintMess("TRACK: Track with motion model", Verbose::VERBOSITY_DEBUG);
// 用恒速模型跟踪。所谓的恒速就是假设上上帧到上一帧的位姿=上一帧的位姿到当前帧位姿
// 根据恒速模型设定当前帧的初始位姿,用最近的普通帧来跟踪当前的普通帧
// 通过投影的方式在参考帧中找当前帧特征点的匹配点优化每个特征点所对应3D点的投影误差即可得到位姿
bOK = TrackWithMotionModel(); bOK = TrackWithMotionModel();
if(!bOK) if(!bOK)
//根据恒速模型失败了,只能根据参考关键帧来跟踪
bOK = TrackReferenceKeyFrame(); bOK = TrackReferenceKeyFrame();
} }
// Step 6.3 如果经过跟踪参考关键帧、恒速模型跟踪都失败的话并满足一定条件就要标记为LOST了
if (!bOK) if (!bOK)
{ {
// 条件1如果当前帧距离上次重定位帧不久时间在1s内
// mnFramesToResetIMU 表示经过多少帧后可以重置IMU一般设置为和帧率相同对应的时间是1s
// 条件2单目+IMU 或者 双目+IMU模式
if ( mCurrentFrame.mnId<=(mnLastRelocFrameId+mnFramesToResetIMU) && if ( mCurrentFrame.mnId<=(mnLastRelocFrameId+mnFramesToResetIMU) &&
(mSensor==System::IMU_MONOCULAR || mSensor==System::IMU_STEREO)) (mSensor==System::IMU_MONOCULAR || mSensor==System::IMU_STEREO))
{ {
@ -1556,6 +1614,8 @@ void Tracking::Track()
} }
else if(pCurrentMap->KeyFramesInMap()>10) else if(pCurrentMap->KeyFramesInMap()>10)
{ {
// 当前地图中关键帧数目大于10 并且 当前帧距离上次重定位帧超过1s隐藏条件
// 状态标记为RECENTLY_LOST
cout << "KF in map: " << pCurrentMap->KeyFramesInMap() << endl; cout << "KF in map: " << pCurrentMap->KeyFramesInMap() << endl;
mState = RECENTLY_LOST; mState = RECENTLY_LOST;
mTimeStampLost = mCurrentFrame.mTimeStamp; mTimeStampLost = mCurrentFrame.mTimeStamp;
@ -1567,21 +1627,26 @@ void Tracking::Track()
} }
} }
} }
else else //跟踪不正常按照下面处理
{ {
if (mState == RECENTLY_LOST) if (mState == RECENTLY_LOST)
{ {
// 如果是RECENTLY_LOST状态仅存在IMU模式下纯视觉模式无此状态
Verbose::PrintMess("Lost for a short time", Verbose::VERBOSITY_NORMAL); Verbose::PrintMess("Lost for a short time", Verbose::VERBOSITY_NORMAL);
// bOK先置为true
bOK = true; bOK = true;
if((mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO))
if((mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO)) // IMU模式
{ {
// Step 6.4 如果当前地图中IMU已经成功初始化就用IMU数据预测位姿
if(pCurrentMap->isImuInitialized()) if(pCurrentMap->isImuInitialized())
PredictStateIMU(); PredictStateIMU();
else else
bOK = false; bOK = false;
// 如果当前帧距离跟丢帧已经超过了5stime_recently_lost默认为5
// 将RECENTLY_LOST状态改为LOST
if (mCurrentFrame.mTimeStamp-mTimeStampLost>time_recently_lost) if (mCurrentFrame.mTimeStamp-mTimeStampLost>time_recently_lost)
{ {
mState = LOST; mState = LOST;
@ -1591,10 +1656,12 @@ void Tracking::Track()
} }
else else
{ {
// Step 6.5 纯视觉模式则进行重定位。主要是BOW搜索EPnP求解位姿
// TODO fix relocalization // TODO fix relocalization
bOK = Relocalization(); bOK = Relocalization();
if(!bOK) if(!bOK)
{ {
// 纯视觉模式下重定位失败状态为LOST
mState = LOST; mState = LOST;
Verbose::PrintMess("Track Lost...", Verbose::VERBOSITY_NORMAL); Verbose::PrintMess("Track Lost...", Verbose::VERBOSITY_NORMAL);
bOK=false; bOK=false;
@ -1603,14 +1670,16 @@ void Tracking::Track()
} }
else if (mState == LOST) else if (mState == LOST)
{ {
// Step 6.6 如果是LOST状态
Verbose::PrintMess("A new map is started...", Verbose::VERBOSITY_NORMAL); Verbose::PrintMess("A new map is started...", Verbose::VERBOSITY_NORMAL);
if (pCurrentMap->KeyFramesInMap()<10) if (pCurrentMap->KeyFramesInMap()<10)
{ {
//当前地图中关键帧数目小于10重置当前地图
mpSystem->ResetActiveMap(); mpSystem->ResetActiveMap();
cout << "Reseting current map..." << endl; cout << "Reseting current map..." << endl;
}else }else
//当前地图中关键帧数目超过10创建新地图
CreateMapInAtlas(); CreateMapInAtlas();
if(mpLastKeyFrame) if(mpLastKeyFrame)
@ -1633,23 +1702,31 @@ void Tracking::Track()
else else
{ {
// Localization Mode: Local Mapping is deactivated (TODO Not available in inertial mode) // Localization Mode: Local Mapping is deactivated (TODO Not available in inertial mode)
// 只进行跟踪tracking局部地图不工作
if(mState==LOST) if(mState==LOST)
{ {
if(mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO) if(mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO)
Verbose::PrintMess("IMU. State LOST", Verbose::VERBOSITY_NORMAL); Verbose::PrintMess("IMU. State LOST", Verbose::VERBOSITY_NORMAL);
// Step 6.1 LOST状态进行重定位
bOK = Relocalization(); bOK = Relocalization();
} }
else else
{ {
// mbVO是mbOnlyTracking为true时的才有的一个变量
// mbVO为false表示此帧匹配了很多的MapPoints跟踪很正常 (注意有点反直觉)
// mbVO为true表明此帧匹配了很少的MapPoints少于10个要跟丢
if(!mbVO) if(!mbVO)
{ {
// In last frame we tracked enough MapPoints in the map // In last frame we tracked enough MapPoints in the map
// Step 6.2 如果跟踪状态正常,使用恒速模型或参考关键帧跟踪
if(!mVelocity.empty()) if(!mVelocity.empty())
{ {
// 优先使用恒速模型跟踪
bOK = TrackWithMotionModel(); bOK = TrackWithMotionModel();
} }
else else
{ {
// 如果恒速模型不被满足,那么就只能够通过参考关键帧来跟踪
bOK = TrackReferenceKeyFrame(); bOK = TrackReferenceKeyFrame();
} }
} }
@ -1661,32 +1738,49 @@ void Tracking::Track()
// If relocalization is sucessfull we choose that solution, otherwise we retain // If relocalization is sucessfull we choose that solution, otherwise we retain
// the "visual odometry" solution. // the "visual odometry" solution.
// mbVO为true表明此帧匹配了很少小于10的地图点要跟丢的节奏既做跟踪又做重定位
// MM=Motion Model,通过运动模型进行跟踪的结果
bool bOKMM = false; bool bOKMM = false;
// 通过重定位方法来跟踪的结果
bool bOKReloc = false; bool bOKReloc = false;
// 运动模型中构造的地图点
vector<MapPoint*> vpMPsMM; vector<MapPoint*> vpMPsMM;
// 在追踪运动模型后发现的外点
vector<bool> vbOutMM; vector<bool> vbOutMM;
// 运动模型得到的位姿
cv::Mat TcwMM; cv::Mat TcwMM;
// Step 6.3 当运动模型有效的时候,根据运动模型计算位姿
if(!mVelocity.empty()) if(!mVelocity.empty())
{ {
bOKMM = TrackWithMotionModel(); bOKMM = TrackWithMotionModel();
// 将恒速模型跟踪结果暂存到这几个变量中,因为后面重定位会改变这些变量
vpMPsMM = mCurrentFrame.mvpMapPoints; vpMPsMM = mCurrentFrame.mvpMapPoints;
vbOutMM = mCurrentFrame.mvbOutlier; vbOutMM = mCurrentFrame.mvbOutlier;
TcwMM = mCurrentFrame.mTcw.clone(); TcwMM = mCurrentFrame.mTcw.clone();
} }
// Step 6.4 使用重定位的方法来得到当前帧的位姿
bOKReloc = Relocalization(); bOKReloc = Relocalization();
// Step 6.5 根据前面的恒速模型、重定位结果来更新状态
if(bOKMM && !bOKReloc) if(bOKMM && !bOKReloc)
{ {
// 恒速模型成功、重定位失败,重新使用之前暂存的恒速模型结果
mCurrentFrame.SetPose(TcwMM); mCurrentFrame.SetPose(TcwMM);
mCurrentFrame.mvpMapPoints = vpMPsMM; mCurrentFrame.mvpMapPoints = vpMPsMM;
mCurrentFrame.mvbOutlier = vbOutMM; mCurrentFrame.mvbOutlier = vbOutMM;
//? 疑似bug这段代码是不是重复增加了观测次数后面 TrackLocalMap 函数中会有这些操作
// 如果当前帧匹配的3D点很少增加当前可视地图点的被观测次数
if(mbVO) if(mbVO)
{ {
// 更新当前帧的地图点被观测次数
for(int i =0; i<mCurrentFrame.N; i++) for(int i =0; i<mCurrentFrame.N; i++)
{ {
// 如果这个特征点形成了地图点,并且也不是外点的时候
if(mCurrentFrame.mvpMapPoints[i] && !mCurrentFrame.mvbOutlier[i]) if(mCurrentFrame.mvpMapPoints[i] && !mCurrentFrame.mvbOutlier[i])
{ {
// 增加能观测到该地图点的帧数
mCurrentFrame.mvpMapPoints[i]->IncreaseFound(); mCurrentFrame.mvpMapPoints[i]->IncreaseFound();
} }
} }
@ -1694,18 +1788,21 @@ void Tracking::Track()
} }
else if(bOKReloc) else if(bOKReloc)
{ {
// 只要重定位成功整个跟踪过程正常进行(重定位与跟踪,更相信重定位)
mbVO = false; mbVO = false;
} }
// 有一个成功我们就认为执行成功了
bOK = bOKReloc || bOKMM; bOK = bOKReloc || bOKMM;
} }
} }
} }
// 将最新的关键帧作为当前帧的参考关键帧
if(!mCurrentFrame.mpReferenceKF) if(!mCurrentFrame.mpReferenceKF)
mCurrentFrame.mpReferenceKF = mpReferenceKF; mCurrentFrame.mpReferenceKF = mpReferenceKF;
// If we have an initial estimation of the camera pose and matching. Track the local map. // If we have an initial estimation of the camera pose and matching. Track the local map.
// Step 7 在跟踪得到当前帧初始姿态后现在对local map进行跟踪得到更多的匹配并优化当前位姿
// 前面只是跟踪一帧得到初始位姿这里搜索局部关键帧、局部地图点和当前帧进行投影匹配得到更多匹配的MapPoints后进行Pose优化
if(!mbOnlyTracking) if(!mbOnlyTracking)
{ {
if(bOK) if(bOK)
@ -1713,6 +1810,7 @@ void Tracking::Track()
#ifdef SAVE_TIMES #ifdef SAVE_TIMES
std::chrono::steady_clock::time_point time_StartTrackLocalMap = std::chrono::steady_clock::now(); std::chrono::steady_clock::time_point time_StartTrackLocalMap = std::chrono::steady_clock::now();
#endif #endif
// 局部地图跟踪
bOK = TrackLocalMap(); bOK = TrackLocalMap();
#ifdef SAVE_TIMES #ifdef SAVE_TIMES
std::chrono::steady_clock::time_point time_EndTrackLocalMap = std::chrono::steady_clock::now(); std::chrono::steady_clock::time_point time_EndTrackLocalMap = std::chrono::steady_clock::now();
@ -1734,7 +1832,9 @@ void Tracking::Track()
bOK = TrackLocalMap(); bOK = TrackLocalMap();
} }
// Step 8 根据上面的操作来判断是否追踪成功
if(bOK) if(bOK)
// 此时还OK才说明跟踪成功了
mState = OK; mState = OK;
else if (mState == OK) else if (mState == OK)
{ {
@ -1743,6 +1843,7 @@ void Tracking::Track()
Verbose::PrintMess("Track lost for less than one second...", Verbose::VERBOSITY_NORMAL); Verbose::PrintMess("Track lost for less than one second...", Verbose::VERBOSITY_NORMAL);
if(!pCurrentMap->isImuInitialized() || !pCurrentMap->GetIniertialBA2()) if(!pCurrentMap->isImuInitialized() || !pCurrentMap->GetIniertialBA2())
{ {
// IMU模式下IMU没有成功初始化或者没有完成IMU BA则重置当前地图
cout << "IMU is not or recently initialized. Reseting active map..." << endl; cout << "IMU is not or recently initialized. Reseting active map..." << endl;
mpSystem->ResetActiveMap(); mpSystem->ResetActiveMap();
} }
@ -1752,6 +1853,7 @@ void Tracking::Track()
else else
mState=LOST; // visual to lost mState=LOST; // visual to lost
// 如果当前帧距离上次重定位帧超过1s用当前帧时间戳更新lost帧时间戳
if(mCurrentFrame.mnId>mnLastRelocFrameId+mMaxFrames) if(mCurrentFrame.mnId>mnLastRelocFrameId+mMaxFrames)
{ {
mTimeStampLost = mCurrentFrame.mTimeStamp; mTimeStampLost = mCurrentFrame.mTimeStamp;
@ -1759,7 +1861,10 @@ void Tracking::Track()
} }
// Save frame if recent relocalization, since they are used for IMU reset (as we are making copy, it shluld be once mCurrFrame is completely modified) // Save frame if recent relocalization, since they are used for IMU reset (as we are making copy, it shluld be once mCurrFrame is completely modified)
if((mCurrentFrame.mnId<(mnLastRelocFrameId+mnFramesToResetIMU)) && (mCurrentFrame.mnId > mnFramesToResetIMU) && ((mSensor == System::IMU_MONOCULAR) || (mSensor == System::IMU_STEREO)) && pCurrentMap->isImuInitialized()) if((mCurrentFrame.mnId<(mnLastRelocFrameId+mnFramesToResetIMU)) && // 当前帧距离上次重定位帧小于1s
(mCurrentFrame.mnId > mnFramesToResetIMU) && // 当前帧已经运行了超过1s
((mSensor == System::IMU_MONOCULAR) || (mSensor == System::IMU_STEREO)) && // IMU模式
pCurrentMap->isImuInitialized()) // IMU已经成功初始化
{ {
// TODO check this situation // TODO check this situation
Verbose::PrintMess("Saving pointer to frame. imu needs reset...", Verbose::VERBOSITY_NORMAL); Verbose::PrintMess("Saving pointer to frame. imu needs reset...", Verbose::VERBOSITY_NORMAL);
@ -1770,10 +1875,11 @@ void Tracking::Track()
pF->mpImuPreintegratedFrame = new IMU::Preintegrated(mCurrentFrame.mpImuPreintegratedFrame); pF->mpImuPreintegratedFrame = new IMU::Preintegrated(mCurrentFrame.mpImuPreintegratedFrame);
} }
if(pCurrentMap->isImuInitialized()) if(pCurrentMap->isImuInitialized()) // IMU成功初始化
{ {
if(bOK) if(bOK) // 跟踪成功
{ {
// 当前帧距离上次重定位帧刚好等于1s重置IMU
if(mCurrentFrame.mnId==(mnLastRelocFrameId+mnFramesToResetIMU)) if(mCurrentFrame.mnId==(mnLastRelocFrameId+mnFramesToResetIMU))
{ {
cout << "RESETING FRAME!!!" << endl; cout << "RESETING FRAME!!!" << endl;
@ -1785,27 +1891,33 @@ void Tracking::Track()
} }
// Update drawer // Update drawer
// 更新显示线程中的图像、特征点、地图点等信息
mpFrameDrawer->Update(this); mpFrameDrawer->Update(this);
if(!mCurrentFrame.mTcw.empty()) if(!mCurrentFrame.mTcw.empty())
mpMapDrawer->SetCurrentCameraPose(mCurrentFrame.mTcw); mpMapDrawer->SetCurrentCameraPose(mCurrentFrame.mTcw);
// Step 9 如果跟踪成功 或 最近刚刚跟丢,更新速度,清除无效地图点,按需创建关键帧
if(bOK || mState==RECENTLY_LOST) if(bOK || mState==RECENTLY_LOST)
{ {
// Update motion model // Update motion model
// Step 9.1 更新恒速运动模型 TrackWithMotionModel 中的mVelocity
if(!mLastFrame.mTcw.empty() && !mCurrentFrame.mTcw.empty()) if(!mLastFrame.mTcw.empty() && !mCurrentFrame.mTcw.empty())
{ {
cv::Mat LastTwc = cv::Mat::eye(4,4,CV_32F); cv::Mat LastTwc = cv::Mat::eye(4,4,CV_32F);
mLastFrame.GetRotationInverse().copyTo(LastTwc.rowRange(0,3).colRange(0,3)); mLastFrame.GetRotationInverse().copyTo(LastTwc.rowRange(0,3).colRange(0,3));
mLastFrame.GetCameraCenter().copyTo(LastTwc.rowRange(0,3).col(3)); mLastFrame.GetCameraCenter().copyTo(LastTwc.rowRange(0,3).col(3));
// mVelocity = Tcl = Tcw * Twl,表示上一帧到当前帧的变换, 其中 Twl = LastTwc
mVelocity = mCurrentFrame.mTcw*LastTwc; mVelocity = mCurrentFrame.mTcw*LastTwc;
} }
else else
//否则速度为空
mVelocity = cv::Mat(); mVelocity = cv::Mat();
if(mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO) if(mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO)
mpMapDrawer->SetCurrentCameraPose(mCurrentFrame.mTcw); mpMapDrawer->SetCurrentCameraPose(mCurrentFrame.mTcw);
// Clean VO matches // Clean VO matches
// Step 9.2 清除观测不到的地图点
for(int i=0; i<mCurrentFrame.N; i++) for(int i=0; i<mCurrentFrame.N; i++)
{ {
MapPoint* pMP = mCurrentFrame.mvpMapPoints[i]; MapPoint* pMP = mCurrentFrame.mvpMapPoints[i];
@ -1818,16 +1930,22 @@ void Tracking::Track()
} }
// Delete temporal MapPoints // Delete temporal MapPoints
// Step 9.3 清除恒速模型跟踪中 UpdateLastFrame中为当前帧临时添加的MapPoints仅双目和rgbd
// 上个步骤中只是在当前帧中将这些MapPoints剔除这里从MapPoints数据库中删除
// 临时地图点仅仅是为了提高双目或rgbd摄像头的帧间跟踪效果用完以后就扔了没有添加到地图中
for(list<MapPoint*>::iterator lit = mlpTemporalPoints.begin(), lend = mlpTemporalPoints.end(); lit!=lend; lit++) for(list<MapPoint*>::iterator lit = mlpTemporalPoints.begin(), lend = mlpTemporalPoints.end(); lit!=lend; lit++)
{ {
MapPoint* pMP = *lit; MapPoint* pMP = *lit;
delete pMP; delete pMP;
} }
// 这里不仅仅是清除mlpTemporalPoints通过delete pMP还删除了指针指向的MapPoint
// 不能够直接执行这个是因为其中存储的都是指针,之前的操作都是为了避免内存泄露
mlpTemporalPoints.clear(); mlpTemporalPoints.clear();
#ifdef SAVE_TIMES #ifdef SAVE_TIMES
std::chrono::steady_clock::time_point timeStartNewKF = std::chrono::steady_clock::now(); std::chrono::steady_clock::time_point timeStartNewKF = std::chrono::steady_clock::now();
#endif #endif
// 判断是否需要插入关键帧
bool bNeedKF = NeedNewKeyFrame(); bool bNeedKF = NeedNewKeyFrame();
#ifdef SAVE_TIMES #ifdef SAVE_TIMES
std::chrono::steady_clock::time_point timeEndNewKF = std::chrono::steady_clock::now(); std::chrono::steady_clock::time_point timeEndNewKF = std::chrono::steady_clock::now();
@ -1838,23 +1956,35 @@ void Tracking::Track()
// Check if we need to insert a new keyframe // Check if we need to insert a new keyframe
// Step 9.4 根据条件来判断是否插入关键帧
// 需要同时满足下面条件1和2
// 条件1bNeedKF=true需要插入关键帧
// 条件2bOK=true跟踪成功 或 IMU模式下的RECENTLY_LOST模式
if(bNeedKF && (bOK|| (mState==RECENTLY_LOST && (mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO)))) if(bNeedKF && (bOK|| (mState==RECENTLY_LOST && (mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO))))
// 创建关键帧对于双目或RGB-D会产生新的地图点
CreateNewKeyFrame(); CreateNewKeyFrame();
// We allow points with high innovation (considererd outliers by the Huber Function) // We allow points with high innovation (considererd outliers by the Huber Function)
// pass to the new keyframe, so that bundle adjustment will finally decide // pass to the new keyframe, so that bundle adjustment will finally decide
// if they are outliers or not. We don't want next frame to estimate its position // if they are outliers or not. We don't want next frame to estimate its position
// with those points so we discard them in the frame. Only has effect if lastframe is tracked // with those points so we discard them in the frame. Only has effect if lastframe is tracked
// 作者这里说允许在BA中被Huber核函数判断为外点的传入新的关键帧中让后续的BA来审判他们是不是真正的外点
// 但是估计下一帧位姿的时候我们不想用这些外点,所以删掉
// Step 9.5 删除那些在BA中检测为外点的地图点
for(int i=0; i<mCurrentFrame.N;i++) for(int i=0; i<mCurrentFrame.N;i++)
{ {
// 这里第一个条件还要执行判断是因为, 前面的操作中可能删除了其中的地图点
if(mCurrentFrame.mvpMapPoints[i] && mCurrentFrame.mvbOutlier[i]) if(mCurrentFrame.mvpMapPoints[i] && mCurrentFrame.mvbOutlier[i])
mCurrentFrame.mvpMapPoints[i]=static_cast<MapPoint*>(NULL); mCurrentFrame.mvpMapPoints[i]=static_cast<MapPoint*>(NULL);
} }
} }
// Reset if the camera get lost soon after initialization // Reset if the camera get lost soon after initialization
// Step 10 如果跟踪失败,复位
if(mState==LOST) if(mState==LOST)
{ {
// 如果地图中关键帧小于5重置当前地图退出当前跟踪
if(pCurrentMap->KeyFramesInMap()<=5) if(pCurrentMap->KeyFramesInMap()<=5)
{ {
mpSystem->ResetActiveMap(); mpSystem->ResetActiveMap();
@ -1863,28 +1993,28 @@ void Tracking::Track()
if ((mSensor == System::IMU_MONOCULAR) || (mSensor == System::IMU_STEREO)) if ((mSensor == System::IMU_MONOCULAR) || (mSensor == System::IMU_STEREO))
if (!pCurrentMap->isImuInitialized()) if (!pCurrentMap->isImuInitialized())
{ {
// 如果是IMU模式并且还未进行IMU初始化重置当前地图退出当前跟踪
Verbose::PrintMess("Track lost before IMU initialisation, reseting...", Verbose::VERBOSITY_QUIET); Verbose::PrintMess("Track lost before IMU initialisation, reseting...", Verbose::VERBOSITY_QUIET);
mpSystem->ResetActiveMap(); mpSystem->ResetActiveMap();
return; return;
} }
// 如果地图中关键帧超过5 并且 纯视觉模式 或 虽然是IMU模式但是已经完成IMU初始化了创建新的地图
CreateMapInAtlas(); CreateMapInAtlas();
} }
//确保已经设置了参考关键帧
if(!mCurrentFrame.mpReferenceKF) if(!mCurrentFrame.mpReferenceKF)
mCurrentFrame.mpReferenceKF = mpReferenceKF; mCurrentFrame.mpReferenceKF = mpReferenceKF;
// 保存上一帧的数据,当前帧变上一帧
mLastFrame = Frame(mCurrentFrame); mLastFrame = Frame(mCurrentFrame);
} }
// Step 11 记录位姿信息,用于最后保存所有的轨迹
if(mState==OK || mState==RECENTLY_LOST) if(mState==OK || mState==RECENTLY_LOST)
{ {
// Store frame pose information to retrieve the complete camera trajectory afterwards. // Store frame pose information to retrieve the complete camera trajectory afterwards.
if(!mCurrentFrame.mTcw.empty()) if(!mCurrentFrame.mTcw.empty())
{ {
// 计算相对姿态Tcr = Tcw * Twr, Twr = Trw^-1
cv::Mat Tcr = mCurrentFrame.mTcw*mCurrentFrame.mpReferenceKF->GetPoseInverse(); cv::Mat Tcr = mCurrentFrame.mTcw*mCurrentFrame.mpReferenceKF->GetPoseInverse();
mlRelativeFramePoses.push_back(Tcr); mlRelativeFramePoses.push_back(Tcr);
mlpReferences.push_back(mCurrentFrame.mpReferenceKF); mlpReferences.push_back(mCurrentFrame.mpReferenceKF);
@ -1894,6 +2024,7 @@ void Tracking::Track()
else else
{ {
// This can happen if tracking is lost // This can happen if tracking is lost
// 如果跟踪失败,则相对位姿使用上一次值
mlRelativeFramePoses.push_back(mlRelativeFramePoses.back()); mlRelativeFramePoses.push_back(mlRelativeFramePoses.back());
mlpReferences.push_back(mlpReferences.back()); mlpReferences.push_back(mlpReferences.back());
mlFrameTimes.push_back(mlFrameTimes.back()); mlFrameTimes.push_back(mlFrameTimes.back());