添加注释

master
小白逆袭 2021-10-31 16:41:01 +08:00
parent 8dcefb5567
commit 3fd2fcccb7
8 changed files with 92 additions and 31 deletions

View File

@ -41,35 +41,44 @@ namespace ORB_SLAM3 {
GeometricCamera(const std::vector<float> &_vParameters) : mvParameters(_vParameters) {} GeometricCamera(const std::vector<float> &_vParameters) : mvParameters(_vParameters) {}
~GeometricCamera() {} ~GeometricCamera() {}
// 投影函数
virtual cv::Point2f project(const cv::Point3f &p3D) = 0; virtual cv::Point2f project(const cv::Point3f &p3D) = 0;
virtual cv::Point2f project(const cv::Matx31f &m3D) = 0; virtual cv::Point2f project(const cv::Matx31f &m3D) = 0;
virtual cv::Point2f project(const cv::Mat& m3D) = 0; virtual cv::Point2f project(const cv::Mat& m3D) = 0;
virtual Eigen::Vector2d project(const Eigen::Vector3d & v3D) = 0; virtual Eigen::Vector2d project(const Eigen::Vector3d & v3D) = 0;
virtual cv::Mat projectMat(const cv::Point3f& p3D) = 0; virtual cv::Mat projectMat(const cv::Point3f& p3D) = 0;
// 返回像素点的不确定性返回的都是1.0f
virtual float uncertainty2(const Eigen::Matrix<double,2,1> &p2D) = 0; virtual float uncertainty2(const Eigen::Matrix<double,2,1> &p2D) = 0;
// 反投影
virtual cv::Point3f unproject(const cv::Point2f &p2D) = 0; virtual cv::Point3f unproject(const cv::Point2f &p2D) = 0;
virtual cv::Mat unprojectMat(const cv::Point2f &p2D) = 0; virtual cv::Mat unprojectMat(const cv::Point2f &p2D) = 0;
virtual cv::Matx31f unprojectMat_(const cv::Point2f &p2D) = 0; virtual cv::Matx31f unprojectMat_(const cv::Point2f &p2D) = 0;
// 投影雅可比
virtual cv::Mat projectJac(const cv::Point3f &p3D) = 0; virtual cv::Mat projectJac(const cv::Point3f &p3D) = 0;
virtual Eigen::Matrix<double,2,3> projectJac(const Eigen::Vector3d& v3D) = 0; virtual Eigen::Matrix<double,2,3> projectJac(const Eigen::Vector3d& v3D) = 0;
// 反投影雅可比(未使用)
virtual cv::Mat unprojectJac(const cv::Point2f &p2D) = 0; virtual cv::Mat unprojectJac(const cv::Point2f &p2D) = 0;
// 三角化恢复三维点,主要在单目初始化中使用
virtual bool ReconstructWithTwoViews(const std::vector<cv::KeyPoint>& vKeys1, const std::vector<cv::KeyPoint>& vKeys2, const std::vector<int> &vMatches12, virtual bool ReconstructWithTwoViews(const std::vector<cv::KeyPoint>& vKeys1, const std::vector<cv::KeyPoint>& vKeys2, const std::vector<int> &vMatches12,
cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D, std::vector<bool> &vbTriangulated) = 0; cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D, std::vector<bool> &vbTriangulated) = 0;
// 返回K矩阵
virtual cv::Mat toK() = 0; virtual cv::Mat toK() = 0;
virtual cv::Matx33f toK_() = 0; virtual cv::Matx33f toK_() = 0;
// 极线约束三角化MP时使用
virtual bool epipolarConstrain(GeometricCamera* otherCamera, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc) = 0; virtual bool epipolarConstrain(GeometricCamera* otherCamera, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc) = 0;
virtual bool epipolarConstrain_(GeometricCamera* otherCamera, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc) = 0; virtual bool epipolarConstrain_(GeometricCamera* otherCamera, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc) = 0;
// 获取与设置相机内参数
float getParameter(const int i){return mvParameters[i];} float getParameter(const int i){return mvParameters[i];}
void setParameter(const float p, const size_t i){mvParameters[i] = p;} void setParameter(const float p, const size_t i){mvParameters[i] = p;}
// 参数的size
size_t size(){return mvParameters.size();} size_t size(){return mvParameters.size();}
virtual bool matchAndtriangulate(const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, GeometricCamera* pOther, virtual bool matchAndtriangulate(const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, GeometricCamera* pOther,
@ -77,8 +86,8 @@ namespace ORB_SLAM3 {
const float sigmaLevel1, const float sigmaLevel2, const float sigmaLevel1, const float sigmaLevel2,
cv::Mat& x3Dtriangulated) = 0; cv::Mat& x3Dtriangulated) = 0;
// 关于ID与type的获取函数但是代码中没有使用
unsigned int GetId() { return mnId; } unsigned int GetId() { return mnId; }
unsigned int GetType() { return mnType; } unsigned int GetType() { return mnType; }
const unsigned int CAM_PINHOLE = 0; const unsigned int CAM_PINHOLE = 0;
@ -87,7 +96,7 @@ namespace ORB_SLAM3 {
static long unsigned int nNextId; static long unsigned int nNextId;
protected: protected:
std::vector<float> mvParameters; std::vector<float> mvParameters; // 存放参数包括k的参数与畸变
unsigned int mnId; unsigned int mnId;

View File

@ -59,38 +59,47 @@ namespace ORB_SLAM3 {
mnType = CAM_FISHEYE; mnType = CAM_FISHEYE;
} }
// 投影函数
cv::Point2f project(const cv::Point3f &p3D); cv::Point2f project(const cv::Point3f &p3D);
cv::Point2f project(const cv::Matx31f &m3D); cv::Point2f project(const cv::Matx31f &m3D);
cv::Point2f project(const cv::Mat& m3D); cv::Point2f project(const cv::Mat& m3D);
Eigen::Vector2d project(const Eigen::Vector3d & v3D); Eigen::Vector2d project(const Eigen::Vector3d & v3D);
cv::Mat projectMat(const cv::Point3f& p3D); cv::Mat projectMat(const cv::Point3f& p3D);
// 返回像素点的不确定性返回的都是1.0f
float uncertainty2(const Eigen::Matrix<double,2,1> &p2D); float uncertainty2(const Eigen::Matrix<double,2,1> &p2D);
// 反投影
cv::Point3f unproject(const cv::Point2f &p2D); cv::Point3f unproject(const cv::Point2f &p2D);
cv::Mat unprojectMat(const cv::Point2f &p2D); cv::Mat unprojectMat(const cv::Point2f &p2D);
cv::Matx31f unprojectMat_(const cv::Point2f &p2D); cv::Matx31f unprojectMat_(const cv::Point2f &p2D);
// 投影雅可比
cv::Mat projectJac(const cv::Point3f &p3D); cv::Mat projectJac(const cv::Point3f &p3D);
Eigen::Matrix<double,2,3> projectJac(const Eigen::Vector3d& v3D); Eigen::Matrix<double,2,3> projectJac(const Eigen::Vector3d& v3D);
// 反投影雅可比(未使用)
cv::Mat unprojectJac(const cv::Point2f &p2D); cv::Mat unprojectJac(const cv::Point2f &p2D);
// 三角化恢复三维点
bool ReconstructWithTwoViews(const std::vector<cv::KeyPoint>& vKeys1, const std::vector<cv::KeyPoint>& vKeys2, const std::vector<int> &vMatches12, bool ReconstructWithTwoViews(const std::vector<cv::KeyPoint>& vKeys1, const std::vector<cv::KeyPoint>& vKeys2, const std::vector<int> &vMatches12,
cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D, std::vector<bool> &vbTriangulated); cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D, std::vector<bool> &vbTriangulated);
// 返回K矩阵
cv::Mat toK(); cv::Mat toK();
cv::Matx33f toK_(); cv::Matx33f toK_();
// 极线约束
bool epipolarConstrain(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc); bool epipolarConstrain(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc);
bool epipolarConstrain_(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc); bool epipolarConstrain_(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc);
// 三角化匹配
float TriangulateMatches(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc, cv::Mat& p3D); float TriangulateMatches(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc, cv::Mat& p3D);
float TriangulateMatches_(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc, cv::Matx31f& p3D); float TriangulateMatches_(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc, cv::Matx31f& p3D);
std::vector<int> mvLappingArea; std::vector<int> mvLappingArea;
// 匹配三角化
bool matchAndtriangulate(const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, GeometricCamera* pOther, bool matchAndtriangulate(const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, GeometricCamera* pOther,
cv::Mat& Tcw1, cv::Mat& Tcw2, cv::Mat& Tcw1, cv::Mat& Tcw2,
const float sigmaLevel1, const float sigmaLevel2, const float sigmaLevel1, const float sigmaLevel2,
@ -99,13 +108,14 @@ namespace ORB_SLAM3 {
friend std::ostream& operator<<(std::ostream& os, const KannalaBrandt8& kb); friend std::ostream& operator<<(std::ostream& os, const KannalaBrandt8& kb);
friend std::istream& operator>>(std::istream& is, KannalaBrandt8& kb); friend std::istream& operator>>(std::istream& is, KannalaBrandt8& kb);
private: private:
const float precision; const float precision; // 迭代使用
//Parameters vector corresponds to //Parameters vector corresponds to
//[fx, fy, cx, cy, k0, k1, k2, k3] //[fx, fy, cx, cy, k0, k1, k2, k3]
TwoViewReconstruction* tvr; TwoViewReconstruction* tvr;
// 三角化
void Triangulate(const cv::Point2f &p1, const cv::Point2f &p2, const cv::Mat &Tcw1, const cv::Mat &Tcw2,cv::Mat &x3D); void Triangulate(const cv::Point2f &p1, const cv::Point2f &p2, const cv::Mat &Tcw1, const cv::Mat &Tcw2,cv::Mat &x3D);
void Triangulate_(const cv::Point2f &p1, const cv::Point2f &p2, const cv::Matx44f &Tcw1, const cv::Matx44f &Tcw2,cv::Matx31f &x3D); void Triangulate_(const cv::Point2f &p1, const cv::Point2f &p2, const cv::Matx44f &Tcw1, const cv::Matx44f &Tcw2,cv::Matx31f &x3D);
}; };

View File

@ -58,30 +58,37 @@ namespace ORB_SLAM3 {
if(tvr) delete tvr; if(tvr) delete tvr;
} }
// 投影函数
cv::Point2f project(const cv::Point3f &p3D); cv::Point2f project(const cv::Point3f &p3D);
cv::Point2f project(const cv::Matx31f &m3D); cv::Point2f project(const cv::Matx31f &m3D);
cv::Point2f project(const cv::Mat &m3D); cv::Point2f project(const cv::Mat &m3D);
Eigen::Vector2d project(const Eigen::Vector3d & v3D); Eigen::Vector2d project(const Eigen::Vector3d & v3D);
cv::Mat projectMat(const cv::Point3f& p3D); cv::Mat projectMat(const cv::Point3f& p3D);
// 返回像素点的不确定性返回的都是1.0f
float uncertainty2(const Eigen::Matrix<double,2,1> &p2D); float uncertainty2(const Eigen::Matrix<double,2,1> &p2D);
// 反投影
cv::Point3f unproject(const cv::Point2f &p2D); cv::Point3f unproject(const cv::Point2f &p2D);
cv::Mat unprojectMat(const cv::Point2f &p2D); cv::Mat unprojectMat(const cv::Point2f &p2D);
cv::Matx31f unprojectMat_(const cv::Point2f &p2D); cv::Matx31f unprojectMat_(const cv::Point2f &p2D);
// 投影雅可比
cv::Mat projectJac(const cv::Point3f &p3D); cv::Mat projectJac(const cv::Point3f &p3D);
Eigen::Matrix<double,2,3> projectJac(const Eigen::Vector3d& v3D); Eigen::Matrix<double,2,3> projectJac(const Eigen::Vector3d& v3D);
// 反投影雅可比(未使用)
cv::Mat unprojectJac(const cv::Point2f &p2D); cv::Mat unprojectJac(const cv::Point2f &p2D);
// 三角化恢复三维点
bool ReconstructWithTwoViews(const std::vector<cv::KeyPoint>& vKeys1, const std::vector<cv::KeyPoint>& vKeys2, const std::vector<int> &vMatches12, bool ReconstructWithTwoViews(const std::vector<cv::KeyPoint>& vKeys1, const std::vector<cv::KeyPoint>& vKeys2, const std::vector<int> &vMatches12,
cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D, std::vector<bool> &vbTriangulated); cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D, std::vector<bool> &vbTriangulated);
// 返回K矩阵
cv::Mat toK(); cv::Mat toK();
cv::Matx33f toK_(); cv::Matx33f toK_();
// 极线约束
bool epipolarConstrain(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc); bool epipolarConstrain(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc);
bool epipolarConstrain_(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc); bool epipolarConstrain_(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc);

View File

@ -141,8 +141,18 @@ cv::Matx31f KannalaBrandt8::unprojectMat_(const cv::Point2f &p2D)
/** /**
* @brief * @brief
*
* xc = Xc/Zc, yc = Yc/Zc
* r^2 = xc^2 + yc^2
* θ = arctan(r)
* θd = k0*θ + k1*θ^3 + k2*θ^5 + k3*θ^7 + k4*θ^9
* xd = θd/r * xc yd = θd/r * yc
* u = fx*xd + cx v = fy*yd + cy
*
*
* uv * uv
* xd = (u - cx) / fx yd = (v - cy) / fy * xd = (u - cx) / fx yd = (v - cy) / fy
* xc = xd * r / θd yc = yd * r / θd
* xc = xd * tan(θ) / θd yc = yd * tan(θ) / θd * xc = xd * tan(θ) / θd yc = yd * tan(θ) / θd
* θd * θd
* xd^2 + yd^2 = θd^2/r^2 * (xc^2 + yc^2) * xd^2 + yd^2 = θd^2/r^2 * (xc^2 + yc^2)
@ -172,17 +182,22 @@ cv::Point3f KannalaBrandt8::unproject(const cv::Point2f &p2D)
if (theta_d > 1e-8) if (theta_d > 1e-8)
{ {
//Compensate distortion iteratively //Compensate distortion iteratively
float theta = theta_d; float theta = theta_d; // θ的初始值定为了θd
// 开始迭代 // 开始迭代
for (int j = 0; j < 10; j++) for (int j = 0; j < 10; j++)
{ {
float theta2 = theta * theta, theta4 = theta2 * theta2, theta6 = theta4 * theta2, theta8 = theta4 * theta4; float theta2 = theta * theta,
float k0_theta2 = mvParameters[4] * theta2, k1_theta4 = mvParameters[5] * theta4; theta4 = theta2 * theta2,
float k2_theta6 = mvParameters[6] * theta6, k3_theta8 = mvParameters[7] * theta8; theta6 = theta4 * theta2,
theta8 = theta4 * theta4;
float k0_theta2 = mvParameters[4] * theta2,
k1_theta4 = mvParameters[5] * theta4;
float k2_theta6 = mvParameters[6] * theta6,
k3_theta8 = mvParameters[7] * theta8;
float theta_fix = (theta * (1 + k0_theta2 + k1_theta4 + k2_theta6 + k3_theta8) - theta_d) / float theta_fix = (theta * (1 + k0_theta2 + k1_theta4 + k2_theta6 + k3_theta8) - theta_d) /
(1 + 3 * k0_theta2 + 5 * k1_theta4 + 7 * k2_theta6 + 9 * k3_theta8); (1 + 3 * k0_theta2 + 5 * k1_theta4 + 7 * k2_theta6 + 9 * k3_theta8);
theta = theta - theta_fix; theta = theta - theta_fix;
if (fabsf(theta_fix) < precision) if (fabsf(theta_fix) < precision) // 如果更新量变得很小,表示接近最终值
break; break;
} }
//scale = theta - theta_d; //scale = theta - theta_d;
@ -194,7 +209,7 @@ cv::Point3f KannalaBrandt8::unproject(const cv::Point2f &p2D)
} }
/** /**
* @brief * @brief
* u = fx * θd * x / sqrt(x^2 + y^2) + cx * u = fx * θd * x / sqrt(x^2 + y^2) + cx
* v = fy * θd * y / sqrt(x^2 + y^2) + cy * v = fy * θd * y / sqrt(x^2 + y^2) + cy
* xyz * xyz
@ -376,23 +391,27 @@ bool KannalaBrandt8::matchAndtriangulate(const cv::KeyPoint &kp1, const cv::KeyP
cv::Point3f ray1c = this->unproject(kp1.pt); cv::Point3f ray1c = this->unproject(kp1.pt);
cv::Point3f ray2c = pOther->unproject(kp2.pt); cv::Point3f ray2c = pOther->unproject(kp2.pt);
// 获得点1在帧1的归一化坐标
cv::Mat r1(3, 1, CV_32F); cv::Mat r1(3, 1, CV_32F);
r1.at<float>(0) = ray1c.x; r1.at<float>(0) = ray1c.x;
r1.at<float>(1) = ray1c.y; r1.at<float>(1) = ray1c.y;
r1.at<float>(2) = ray1c.z; r1.at<float>(2) = ray1c.z;
// 获得点2在帧2的归一化坐标
cv::Mat r2(3, 1, CV_32F); cv::Mat r2(3, 1, CV_32F);
r2.at<float>(0) = ray2c.x; r2.at<float>(0) = ray2c.x;
r2.at<float>(1) = ray2c.y; r2.at<float>(1) = ray2c.y;
r2.at<float>(2) = ray2c.z; r2.at<float>(2) = ray2c.z;
//Check parallax between rays //Check parallax between rays
// 射线在世界坐标系下
cv::Mat ray1 = Rwc1 * r1; cv::Mat ray1 = Rwc1 * r1;
cv::Mat ray2 = Rwc2 * r2; cv::Mat ray2 = Rwc2 * r2;
const float cosParallaxRays = ray1.dot(ray2) / (cv::norm(ray1) * cv::norm(ray2)); const float cosParallaxRays = ray1.dot(ray2) / (cv::norm(ray1) * cv::norm(ray2));
//If parallax is lower than 0.9998, reject this match //If parallax is lower than 0.9998, reject this match
// 夹角几乎为0时返回因为表示这个点过远三角化会带来大量误差
if (cosParallaxRays > 0.9998) if (cosParallaxRays > 0.9998)
{ {
return false; return false;
@ -409,10 +428,12 @@ bool KannalaBrandt8::matchAndtriangulate(const cv::KeyPoint &kp1, const cv::KeyP
cv::Mat x3D; cv::Mat x3D;
// 三角化
Triangulate(p11, p22, Tcw1, Tcw2, x3D); Triangulate(p11, p22, Tcw1, Tcw2, x3D);
cv::Mat x3Dt = x3D.t(); cv::Mat x3Dt = x3D.t();
// 查看点是否位于相机前面
//Check triangulation in front of cameras //Check triangulation in front of cameras
float z1 = Rcw1.row(2).dot(x3Dt) + tcw1.at<float>(2); float z1 = Rcw1.row(2).dot(x3Dt) + tcw1.at<float>(2);
if (z1 <= 0) if (z1 <= 0)
@ -426,6 +447,7 @@ bool KannalaBrandt8::matchAndtriangulate(const cv::KeyPoint &kp1, const cv::KeyP
return false; return false;
} }
// 查看投影误差
//Check reprojection error in first keyframe //Check reprojection error in first keyframe
// -Transform point into camera reference system // -Transform point into camera reference system
cv::Mat x3D1 = Rcw1 * x3D + tcw1; cv::Mat x3D1 = Rcw1 * x3D + tcw1;
@ -480,7 +502,7 @@ float KannalaBrandt8::TriangulateMatches(GeometricCamera *pCamera2, const cv::Ke
//Check parallax //Check parallax
// 2. 查看射线夹角 // 2. 查看射线夹角
// 这里有点像极线约束但并不是将r2通过R12旋转到与r1同方向的坐标系 // 这里有点像极线约束但并不是将r2通过R12旋转到与r1同方向的坐标系
// 然后计算他们的夹角看其是否超过1.14° 讲真,这个条件感觉有些苛刻 // 然后计算他们的夹角看其是否超过1.14°
cv::Mat r21 = R12 * r2; cv::Mat r21 = R12 * r2;
const float cosParallaxRays = r1.dot(r21) / (cv::norm(r1) * cv::norm(r21)); const float cosParallaxRays = r1.dot(r21) / (cv::norm(r1) * cv::norm(r21));

View File

@ -26,7 +26,7 @@ namespace ORB_SLAM3
long unsigned int GeometricCamera::nNextId = 0; long unsigned int GeometricCamera::nNextId = 0;
/** /**
* @brief * @brief
* @param p3D * @param p3D
* @return * @return
*/ */
@ -37,7 +37,7 @@ cv::Point2f Pinhole::project(const cv::Point3f &p3D)
} }
/** /**
* @brief * @brief
* @param m3D * @param m3D
* @return * @return
*/ */
@ -47,7 +47,7 @@ cv::Point2f Pinhole::project(const cv::Matx31f &m3D)
} }
/** /**
* @brief * @brief
* @param v3D * @param v3D
* @return * @return
*/ */
@ -59,7 +59,7 @@ cv::Point2f Pinhole::project(const cv::Mat &m3D)
} }
/** /**
* @brief * @brief
* @param p3D * @param p3D
* @return * @return
*/ */
@ -73,7 +73,7 @@ Eigen::Vector2d Pinhole::project(const Eigen::Vector3d &v3D)
} }
/** /**
* @brief * @brief
* @param p3D * @param p3D
* @return * @return
*/ */
@ -94,7 +94,7 @@ float Pinhole::uncertainty2(const Eigen::Matrix<double, 2, 1> &p2D)
/** /**
* @brief * @brief
* @param p2D * @param p2D
* @return * @return
*/ */
cv::Point3f Pinhole::unproject(const cv::Point2f &p2D) cv::Point3f Pinhole::unproject(const cv::Point2f &p2D)
{ {
@ -105,7 +105,7 @@ cv::Point3f Pinhole::unproject(const cv::Point2f &p2D)
/** /**
* @brief * @brief
* @param p2D * @param p2D
* @return * @return
*/ */
cv::Mat Pinhole::unprojectMat(const cv::Point2f &p2D) cv::Mat Pinhole::unprojectMat(const cv::Point2f &p2D)
{ {
@ -116,7 +116,7 @@ cv::Mat Pinhole::unprojectMat(const cv::Point2f &p2D)
/** /**
* @brief * @brief
* @param p2D * @param p2D
* @return * @return
*/ */
cv::Matx31f Pinhole::unprojectMat_(const cv::Point2f &p2D) cv::Matx31f Pinhole::unprojectMat_(const cv::Point2f &p2D)
{ {
@ -126,7 +126,7 @@ cv::Matx31f Pinhole::unprojectMat_(const cv::Point2f &p2D)
} }
/** /**
* @brief * @brief
* @param p3D * @param p3D
* @return * @return
*/ */
@ -144,7 +144,7 @@ cv::Mat Pinhole::projectJac(const cv::Point3f &p3D)
} }
/** /**
* @brief * @brief
* @param v3D * @param v3D
* @return * @return
*/ */
@ -162,7 +162,13 @@ Eigen::Matrix<double, 2, 3> Pinhole::projectJac(const Eigen::Vector3d &v3D)
} }
/** /**
* @brief * @brief
* u = fx*x + cx x = (u - cx)/fx
* v = fy*y + cy y = (v - cy)/fy
* (x, y, 1) (u, v)
* 1/fx 0
* 0 1/fy
* 0 0
* @param p2D * @param p2D
* @return * @return
*/ */
@ -179,7 +185,7 @@ cv::Mat Pinhole::unprojectJac(const cv::Point2f &p2D)
return Jac; return Jac;
} }
/** 三角化恢复三维点 /** 三角化恢复三维点 单目初始化时使用
* @param vKeys1 * @param vKeys1
* @param vKeys2 * @param vKeys2
* @param vMatches12 vKeys1vKeys2 * @param vMatches12 vKeys1vKeys2
@ -231,7 +237,7 @@ cv::Matx33f Pinhole::toK_()
* @param R12 2->1 * @param R12 2->1
* @param t12 2->1 * @param t12 2->1
* @param sigmaLevel 1 * @param sigmaLevel 1
* @param unc 2 * @param unc 21.2^2n
* @return * @return
*/ */
bool Pinhole::epipolarConstrain(GeometricCamera *pCamera2, const cv::KeyPoint &kp1, const cv::KeyPoint &kp2, const cv::Mat &R12, const cv::Mat &t12, const float sigmaLevel, const float unc) bool Pinhole::epipolarConstrain(GeometricCamera *pCamera2, const cv::KeyPoint &kp1, const cv::KeyPoint &kp2, const cv::Mat &R12, const cv::Mat &t12, const float sigmaLevel, const float unc)
@ -243,10 +249,15 @@ bool Pinhole::epipolarConstrain(GeometricCamera *pCamera2, const cv::KeyPoint &k
cv::Mat F12 = K1.t().inv() * t12x * R12 * K2.inv(); cv::Mat F12 = K1.t().inv() * t12x * R12 * K2.inv();
// Epipolar line in second image l = x1'F12 = [a b c] // Epipolar line in second image l = x1'F12 = [a b c]
// u2,
// (u1, v1, 1) * F12 * (v2,) = 0 --> (a, b, c) * (u2, v2, 1)^t = 0 --> a*u2 + b*v2 + c = 0
// 1
const float a = kp1.pt.x * F12.at<float>(0, 0) + kp1.pt.y * F12.at<float>(1, 0) + F12.at<float>(2, 0); const float a = kp1.pt.x * F12.at<float>(0, 0) + kp1.pt.y * F12.at<float>(1, 0) + F12.at<float>(2, 0);
const float b = kp1.pt.x * F12.at<float>(0, 1) + kp1.pt.y * F12.at<float>(1, 1) + F12.at<float>(2, 1); const float b = kp1.pt.x * F12.at<float>(0, 1) + kp1.pt.y * F12.at<float>(1, 1) + F12.at<float>(2, 1);
const float c = kp1.pt.x * F12.at<float>(0, 2) + kp1.pt.y * F12.at<float>(1, 2) + F12.at<float>(2, 2); const float c = kp1.pt.x * F12.at<float>(0, 2) + kp1.pt.y * F12.at<float>(1, 2) + F12.at<float>(2, 2);
// 点到直线距离的公式
// d = |a*u2 + b*v2 + c| / sqrt(a^2 + b^2)
const float num = a * kp2.pt.x + b * kp2.pt.y + c; const float num = a * kp2.pt.x + b * kp2.pt.y + c;
const float den = a * a + b * b; const float den = a * a + b * b;

View File

@ -947,6 +947,7 @@ void Frame::UndistortKeyPoints()
// Undistort points // Undistort points
// 函数reshape(int cn,int rows=0) 其中cn为更改后的通道数rows=0表示这个行将保持原来的参数不变 // 函数reshape(int cn,int rows=0) 其中cn为更改后的通道数rows=0表示这个行将保持原来的参数不变
//为了能够直接调用opencv的函数来去畸变需要先将矩阵调整为2通道对应坐标x,y //为了能够直接调用opencv的函数来去畸变需要先将矩阵调整为2通道对应坐标x,y
// cv::undistortPoints最后一个矩阵为空矩阵时得到的点为归一化坐标点
mat=mat.reshape(2); mat=mat.reshape(2);
cv::undistortPoints(mat,mat, static_cast<Pinhole*>(mpCamera)->toK(),mDistCoef,cv::Mat(),mK); cv::undistortPoints(mat,mat, static_cast<Pinhole*>(mpCamera)->toK(),mDistCoef,cv::Mat(),mK);
//调整回只有一个通道,回归我们正常的处理方式 //调整回只有一个通道,回归我们正常的处理方式

View File

@ -1628,7 +1628,7 @@ int ORBmatcher::SearchForTriangulation(KeyFrame *pKF1, KeyFrame *pKF2, cv::Mat F
return nmatches; return nmatches;
} }
// 没看到在哪里用到
int ORBmatcher::SearchForTriangulation(KeyFrame *pKF1, KeyFrame *pKF2, cv::Mat F12, int ORBmatcher::SearchForTriangulation(KeyFrame *pKF1, KeyFrame *pKF2, cv::Mat F12,
vector<pair<size_t, size_t> > &vMatchedPairs, const bool bOnlyStereo, vector<cv::Mat> &vMatchedPoints) vector<pair<size_t, size_t> > &vMatchedPairs, const bool bOnlyStereo, vector<cv::Mat> &vMatchedPoints)
{ {

View File

@ -57,9 +57,10 @@ bool TwoViewReconstruction::Reconstruct(const std::vector<cv::KeyPoint> &vKeys1,
// Fill structures with current keypoints and matches with reference frame // Fill structures with current keypoints and matches with reference frame
// Reference Frame: 1, Current Frame: 2 // Reference Frame: 1, Current Frame: 2
// 填写mvMatches12里面存放的是vKeys1vKeys2匹配点的索引
mvMatches12.clear(); // 存放匹配点的id mvMatches12.clear(); // 存放匹配点的id
mvMatches12.reserve(mvKeys2.size()); mvMatches12.reserve(mvKeys2.size());
mvbMatched1.resize(mvKeys1.size()); mvbMatched1.resize(mvKeys1.size()); // 长度与vKeys1表示对应位置的vKeys1中的点是否有匹配关系
for (size_t i = 0, iend = vMatches12.size(); i < iend; i++) for (size_t i = 0, iend = vMatches12.size(); i < iend; i++)
{ {
if (vMatches12[i] >= 0) if (vMatches12[i] >= 0)
@ -78,7 +79,7 @@ bool TwoViewReconstruction::Reconstruct(const std::vector<cv::KeyPoint> &vKeys1,
vAllIndices.reserve(N); vAllIndices.reserve(N);
vector<size_t> vAvailableIndices; vector<size_t> vAvailableIndices;
// 使用vAllIndices为了保证选不到同一个点 // 使用vAllIndices为了保证8个点选不到同一个点
for (int i = 0; i < N; i++) for (int i = 0; i < N; i++)
{ {
vAllIndices.push_back(i); vAllIndices.push_back(i);
@ -102,7 +103,7 @@ bool TwoViewReconstruction::Reconstruct(const std::vector<cv::KeyPoint> &vKeys1,
mvSets[it][j] = idx; mvSets[it][j] = idx;
// 保证选不到同一个点 // 保证选不到同一个点这么做的话可以删去vAvailableIndices已选点的索引
vAvailableIndices[randi] = vAvailableIndices.back(); vAvailableIndices[randi] = vAvailableIndices.back();
vAvailableIndices.pop_back(); vAvailableIndices.pop_back();
} }