add the bata translation factor, with unit tests
parent
e3dd4e1704
commit
9021c888ef
|
@ -18,6 +18,7 @@
|
||||||
* @brief Binary factor for a relative translation direction measurement.
|
* @brief Binary factor for a relative translation direction measurement.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include <gtsam/base/Vector.h>
|
||||||
#include <gtsam/geometry/Point3.h>
|
#include <gtsam/geometry/Point3.h>
|
||||||
#include <gtsam/geometry/Unit3.h>
|
#include <gtsam/geometry/Unit3.h>
|
||||||
#include <gtsam/linear/NoiseModel.h>
|
#include <gtsam/linear/NoiseModel.h>
|
||||||
|
@ -36,8 +37,6 @@ namespace gtsam {
|
||||||
* normalized(Tb - Ta) - w_aZb.point3()
|
* normalized(Tb - Ta) - w_aZb.point3()
|
||||||
*
|
*
|
||||||
* @ingroup sfm
|
* @ingroup sfm
|
||||||
*
|
|
||||||
*
|
|
||||||
*/
|
*/
|
||||||
class TranslationFactor : public NoiseModelFactorN<Point3, Point3> {
|
class TranslationFactor : public NoiseModelFactorN<Point3, Point3> {
|
||||||
private:
|
private:
|
||||||
|
@ -60,7 +59,7 @@ class TranslationFactor : public NoiseModelFactorN<Point3, Point3> {
|
||||||
* @brief Caclulate error: (norm(Tb - Ta) - measurement)
|
* @brief Caclulate error: (norm(Tb - Ta) - measurement)
|
||||||
* where Tb and Ta are Point3 translations and measurement is
|
* where Tb and Ta are Point3 translations and measurement is
|
||||||
* the Unit3 translation direction from a to b.
|
* the Unit3 translation direction from a to b.
|
||||||
*
|
*
|
||||||
* @param Ta translation for key a
|
* @param Ta translation for key a
|
||||||
* @param Tb translation for key b
|
* @param Tb translation for key b
|
||||||
* @param H1 optional jacobian in Ta
|
* @param H1 optional jacobian in Ta
|
||||||
|
@ -73,7 +72,8 @@ class TranslationFactor : public NoiseModelFactorN<Point3, Point3> {
|
||||||
OptionalMatrixType H2) const override {
|
OptionalMatrixType H2) const override {
|
||||||
const Point3 dir = Tb - Ta;
|
const Point3 dir = Tb - Ta;
|
||||||
Matrix33 H_predicted_dir;
|
Matrix33 H_predicted_dir;
|
||||||
const Point3 predicted = normalize(dir, H1 || H2 ? &H_predicted_dir : nullptr);
|
const Point3 predicted =
|
||||||
|
normalize(dir, H1 || H2 ? &H_predicted_dir : nullptr);
|
||||||
if (H1) *H1 = -H_predicted_dir;
|
if (H1) *H1 = -H_predicted_dir;
|
||||||
if (H2) *H2 = H_predicted_dir;
|
if (H2) *H2 = H_predicted_dir;
|
||||||
return predicted - measured_w_aZb_;
|
return predicted - measured_w_aZb_;
|
||||||
|
@ -89,4 +89,72 @@ class TranslationFactor : public NoiseModelFactorN<Point3, Point3> {
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
}; // \ TranslationFactor
|
}; // \ TranslationFactor
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Binary factor for a relative translation direction measurement
|
||||||
|
* w_aZb. The measurement equation is
|
||||||
|
* w_aZb = scale * (Tb - Ta)
|
||||||
|
* i.e., w_aZb is the translation direction from frame A to B, in world
|
||||||
|
* coordinates.
|
||||||
|
*
|
||||||
|
* Instead of normalizing the Tb - Ta vector, we multiply it by a scalar
|
||||||
|
* which is also jointly optimized. This is inspired by the work on
|
||||||
|
* BATA (Zhuang et al, CVPR 2018).
|
||||||
|
*
|
||||||
|
* @ingroup sfm
|
||||||
|
*/
|
||||||
|
class BilinearAngleTranslationFactor
|
||||||
|
: public NoiseModelFactorN<Point3, Point3, Vector1> {
|
||||||
|
private:
|
||||||
|
typedef NoiseModelFactorN<Point3, Point3, Vector1> Base;
|
||||||
|
Point3 measured_w_aZb_;
|
||||||
|
|
||||||
|
public:
|
||||||
|
/// default constructor
|
||||||
|
BilinearAngleTranslationFactor() {}
|
||||||
|
|
||||||
|
BilinearAngleTranslationFactor(Key a, Key b, Key scale_key, const Unit3& w_aZb,
|
||||||
|
const SharedNoiseModel& noiseModel)
|
||||||
|
: Base(noiseModel, a, b, scale_key),
|
||||||
|
measured_w_aZb_(w_aZb.point3()) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Caclulate error: (scale * (Tb - Ta) - measurement)
|
||||||
|
* where Tb and Ta are Point3 translations and measurement is
|
||||||
|
* the Unit3 translation direction from a to b.
|
||||||
|
*
|
||||||
|
* @param Ta translation for key a
|
||||||
|
* @param Tb translation for key b
|
||||||
|
* @param H1 optional jacobian in Ta
|
||||||
|
* @param H2 optional jacobian in Tb
|
||||||
|
* @return * Vector
|
||||||
|
*/
|
||||||
|
Vector evaluateError(
|
||||||
|
const Point3& Ta, const Point3& Tb, const Vector1& scale,
|
||||||
|
boost::optional<Matrix&> H1 = boost::none,
|
||||||
|
boost::optional<Matrix&> H2 = boost::none,
|
||||||
|
boost::optional<Matrix&> H3 = boost::none) const override {
|
||||||
|
// Ideally we should use a positive real valued scalar datatype for scale.
|
||||||
|
const double abs_scale = abs(scale[0]);
|
||||||
|
const Point3 predicted = (Tb - Ta) * abs_scale;
|
||||||
|
if (H1) {
|
||||||
|
*H1 = -Matrix3::Identity() * abs_scale;
|
||||||
|
}
|
||||||
|
if (H2) {
|
||||||
|
*H2 = Matrix3::Identity() * abs_scale;
|
||||||
|
}
|
||||||
|
if (H3) {
|
||||||
|
*H3 = scale[0] >= 0 ? (Tb - Ta) : (Ta - Tb);
|
||||||
|
}
|
||||||
|
return predicted - measured_w_aZb_;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
friend class boost::serialization::access;
|
||||||
|
template <class ARCHIVE>
|
||||||
|
void serialize(ARCHIVE& ar, const unsigned int /*version*/) {
|
||||||
|
ar& boost::serialization::make_nvp(
|
||||||
|
"Base", boost::serialization::base_object<Base>(*this));
|
||||||
|
}
|
||||||
|
}; // \ BilinearAngleTranslationFactor
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
|
@ -101,9 +101,17 @@ NonlinearFactorGraph TranslationRecovery::buildGraph(
|
||||||
NonlinearFactorGraph graph;
|
NonlinearFactorGraph graph;
|
||||||
|
|
||||||
// Add translation factors for input translation directions.
|
// Add translation factors for input translation directions.
|
||||||
|
uint i = 0;
|
||||||
for (auto edge : relativeTranslations) {
|
for (auto edge : relativeTranslations) {
|
||||||
graph.emplace_shared<TranslationFactor>(edge.key1(), edge.key2(),
|
if (use_bilinear_translation_factor_) {
|
||||||
edge.measured(), edge.noiseModel());
|
graph.emplace_shared<BilinearAngleTranslationFactor>(
|
||||||
|
edge.key1(), edge.key2(), Symbol('S', i), edge.measured(),
|
||||||
|
edge.noiseModel());
|
||||||
|
} else {
|
||||||
|
graph.emplace_shared<TranslationFactor>(
|
||||||
|
edge.key1(), edge.key2(), edge.measured(), edge.noiseModel());
|
||||||
|
}
|
||||||
|
i++;
|
||||||
}
|
}
|
||||||
return graph;
|
return graph;
|
||||||
}
|
}
|
||||||
|
@ -163,6 +171,12 @@ Values TranslationRecovery::initializeRandomly(
|
||||||
insert(edge.key1());
|
insert(edge.key1());
|
||||||
insert(edge.key2());
|
insert(edge.key2());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (use_bilinear_translation_factor_) {
|
||||||
|
for (uint i = 0; i < relativeTranslations.size(); i++) {
|
||||||
|
initial.insert<Vector1>(Symbol('S', i), Vector1(1.0));
|
||||||
|
}
|
||||||
|
}
|
||||||
return initial;
|
return initial;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -60,14 +60,18 @@ class GTSAM_EXPORT TranslationRecovery {
|
||||||
// Parameters.
|
// Parameters.
|
||||||
LevenbergMarquardtParams lmParams_;
|
LevenbergMarquardtParams lmParams_;
|
||||||
|
|
||||||
|
const bool use_bilinear_translation_factor_ = false;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
* @brief Construct a new Translation Recovery object
|
* @brief Construct a new Translation Recovery object
|
||||||
*
|
*
|
||||||
* @param lmParams parameters for optimization.
|
* @param lmParams parameters for optimization.
|
||||||
*/
|
*/
|
||||||
TranslationRecovery(const LevenbergMarquardtParams &lmParams)
|
TranslationRecovery(const LevenbergMarquardtParams &lmParams,
|
||||||
: lmParams_(lmParams) {}
|
bool use_bilinear_translation_factor)
|
||||||
|
: lmParams_(lmParams),
|
||||||
|
use_bilinear_translation_factor_(use_bilinear_translation_factor) {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Default constructor.
|
* @brief Default constructor.
|
||||||
|
|
|
@ -23,7 +23,7 @@ virtual class SfmTrack : gtsam::SfmTrack2d {
|
||||||
SfmTrack();
|
SfmTrack();
|
||||||
SfmTrack(const gtsam::Point3& pt);
|
SfmTrack(const gtsam::Point3& pt);
|
||||||
const Point3& point3() const;
|
const Point3& point3() const;
|
||||||
|
|
||||||
Point3 p;
|
Point3 p;
|
||||||
|
|
||||||
double r;
|
double r;
|
||||||
|
@ -37,8 +37,8 @@ virtual class SfmTrack : gtsam::SfmTrack2d {
|
||||||
bool equals(const gtsam::SfmTrack& expected, double tol) const;
|
bool equals(const gtsam::SfmTrack& expected, double tol) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
#include <gtsam/nonlinear/Values.h>
|
|
||||||
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
||||||
|
#include <gtsam/nonlinear/Values.h>
|
||||||
#include <gtsam/sfm/SfmData.h>
|
#include <gtsam/sfm/SfmData.h>
|
||||||
class SfmData {
|
class SfmData {
|
||||||
SfmData();
|
SfmData();
|
||||||
|
@ -268,7 +268,8 @@ class MFAS {
|
||||||
#include <gtsam/sfm/TranslationRecovery.h>
|
#include <gtsam/sfm/TranslationRecovery.h>
|
||||||
|
|
||||||
class TranslationRecovery {
|
class TranslationRecovery {
|
||||||
TranslationRecovery(const gtsam::LevenbergMarquardtParams& lmParams);
|
TranslationRecovery(const gtsam::LevenbergMarquardtParams& lmParams,
|
||||||
|
const bool use_bilinear_translation_factor);
|
||||||
TranslationRecovery(); // default params.
|
TranslationRecovery(); // default params.
|
||||||
void addPrior(const gtsam::BinaryMeasurementsUnit3& relativeTranslations,
|
void addPrior(const gtsam::BinaryMeasurementsUnit3& relativeTranslations,
|
||||||
const double scale,
|
const double scale,
|
||||||
|
|
|
@ -30,7 +30,7 @@ using namespace gtsam;
|
||||||
static SharedNoiseModel model(noiseModel::Isotropic::Sigma(3, 0.05));
|
static SharedNoiseModel model(noiseModel::Isotropic::Sigma(3, 0.05));
|
||||||
|
|
||||||
// Keys are deliberately *not* in sorted order to test that case.
|
// Keys are deliberately *not* in sorted order to test that case.
|
||||||
static const Key kKey1(2), kKey2(1);
|
static const Key kKey1(2), kKey2(1), kEdgeKey(3);
|
||||||
static const Unit3 kMeasured(1, 0, 0);
|
static const Unit3 kMeasured(1, 0, 0);
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
@ -99,6 +99,79 @@ TEST(TranslationFactor, Jacobian) {
|
||||||
EXPECT(assert_equal(H2Expected, H2Actual, 1e-9));
|
EXPECT(assert_equal(H2Expected, H2Actual, 1e-9));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
TEST(BilinearAngleTranslationFactor, Constructor) {
|
||||||
|
BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
TEST(BilinearAngleTranslationFactor, ZeroError) {
|
||||||
|
// Create a factor
|
||||||
|
BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model);
|
||||||
|
|
||||||
|
// Set the linearization
|
||||||
|
Point3 T1(1, 0, 0), T2(2, 0, 0);
|
||||||
|
Vector1 scale(1.0);
|
||||||
|
|
||||||
|
// Use the factor to calculate the error
|
||||||
|
Vector actualError(factor.evaluateError(T1, T2, scale));
|
||||||
|
|
||||||
|
// Verify we get the expected error
|
||||||
|
Vector expected = (Vector3() << 0, 0, 0).finished();
|
||||||
|
EXPECT(assert_equal(expected, actualError, 1e-9));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
TEST(BilinearAngleTranslationFactor, NonZeroError) {
|
||||||
|
// create a factor
|
||||||
|
BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model);
|
||||||
|
|
||||||
|
// set the linearization
|
||||||
|
Point3 T1(0, 1, 1), T2(0, 2, 2);
|
||||||
|
Vector1 scale(1.0 / sqrt(2));
|
||||||
|
|
||||||
|
// use the factor to calculate the error
|
||||||
|
Vector actualError(factor.evaluateError(T1, T2, scale));
|
||||||
|
|
||||||
|
// verify we get the expected error
|
||||||
|
Vector expected = (Vector3() << -1, 1 / sqrt(2), 1 / sqrt(2)).finished();
|
||||||
|
EXPECT(assert_equal(expected, actualError, 1e-9));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
Vector bilinearAngleFactorError(const Point3 &T1, const Point3 &T2, const Vector1 &scale,
|
||||||
|
const BilinearAngleTranslationFactor &factor) {
|
||||||
|
return factor.evaluateError(T1, T2, scale);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(BilinearAngleTranslationFactor, Jacobian) {
|
||||||
|
// Create a factor
|
||||||
|
BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model);
|
||||||
|
|
||||||
|
// Set the linearization
|
||||||
|
Point3 T1(1, 0, 0), T2(2, 0, 0);
|
||||||
|
Vector1 scale(1.0);
|
||||||
|
|
||||||
|
// Use the factor to calculate the Jacobians
|
||||||
|
Matrix H1Actual, H2Actual, H3Actual;
|
||||||
|
factor.evaluateError(T1, T2, scale, H1Actual, H2Actual, H3Actual);
|
||||||
|
|
||||||
|
// Use numerical derivatives to calculate the Jacobians
|
||||||
|
Matrix H1Expected, H2Expected, H3Expected;
|
||||||
|
H1Expected = numericalDerivative11<Vector, Point3>(
|
||||||
|
std::bind(&bilinearAngleFactorError, std::placeholders::_1, T2, scale, factor), T1);
|
||||||
|
H2Expected = numericalDerivative11<Vector, Point3>(
|
||||||
|
std::bind(&bilinearAngleFactorError, T1, std::placeholders::_1, scale, factor), T2);
|
||||||
|
H3Expected = numericalDerivative11<Vector, Vector1>(
|
||||||
|
std::bind(&bilinearAngleFactorError, T1, T2, std::placeholders::_1, factor), scale);
|
||||||
|
|
||||||
|
// Verify the Jacobians are correct
|
||||||
|
EXPECT(assert_equal(H1Expected, H1Actual, 1e-9));
|
||||||
|
EXPECT(assert_equal(H2Expected, H2Actual, 1e-9));
|
||||||
|
EXPECT(assert_equal(H3Expected, H3Actual, 1e-9));
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
int main() {
|
int main() {
|
||||||
TestResult tr;
|
TestResult tr;
|
||||||
|
|
|
@ -353,6 +353,340 @@ TEST(TranslationRecovery, NodeWithBetweenFactorAndNoMeasurements) {
|
||||||
EXPECT(assert_equal(Point3(1, 2, 1), result.at<Point3>(4), 1e-4));
|
EXPECT(assert_equal(Point3(1, 2, 1), result.at<Point3>(4), 1e-4));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* *************************************************************************
|
||||||
|
* Repeat all tests, but with the Bilinear Angle Translation Factor.
|
||||||
|
* ************************************************************************* */
|
||||||
|
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
// We read the BAL file, which has 3 cameras in it, with poses. We then assume
|
||||||
|
// the rotations are correct, but translations have to be estimated from
|
||||||
|
// translation directions only. Since we have 3 cameras, A, B, and C, we can at
|
||||||
|
// most create three relative measurements, let's call them w_aZb, w_aZc, and
|
||||||
|
// bZc. These will be of type Unit3. We then call `recoverTranslations` which
|
||||||
|
// sets up an optimization problem for the three unknown translations.
|
||||||
|
TEST(TranslationRecovery, BALBATA) {
|
||||||
|
const string filename = findExampleDataFile("dubrovnik-3-7-pre");
|
||||||
|
SfmData db = SfmData::FromBalFile(filename);
|
||||||
|
|
||||||
|
// Get camera poses, as Values
|
||||||
|
size_t j = 0;
|
||||||
|
Values poses;
|
||||||
|
for (auto camera : db.cameras) {
|
||||||
|
poses.insert(j++, camera.pose());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Simulate measurements
|
||||||
|
const auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
|
||||||
|
poses, {{0, 1}, {0, 2}, {1, 2}});
|
||||||
|
|
||||||
|
// Check simulated measurements.
|
||||||
|
for (auto& unitTranslation : relativeTranslations) {
|
||||||
|
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
|
||||||
|
unitTranslation.measured()));
|
||||||
|
}
|
||||||
|
|
||||||
|
LevenbergMarquardtParams params;
|
||||||
|
TranslationRecovery algorithm(params, true);
|
||||||
|
const auto graph = algorithm.buildGraph(relativeTranslations);
|
||||||
|
EXPECT_LONGS_EQUAL(3, graph.size());
|
||||||
|
|
||||||
|
// Run translation recovery
|
||||||
|
const double scale = 2.0;
|
||||||
|
const auto result = algorithm.run(relativeTranslations, scale);
|
||||||
|
|
||||||
|
// Check result for first two translations, determined by prior
|
||||||
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0)));
|
||||||
|
EXPECT(assert_equal(
|
||||||
|
Point3(2 * GetDirectionFromPoses(poses, relativeTranslations[0])),
|
||||||
|
result.at<Point3>(1)));
|
||||||
|
|
||||||
|
// Check that the third translations is correct
|
||||||
|
Point3 Ta = poses.at<Pose3>(0).translation();
|
||||||
|
Point3 Tb = poses.at<Pose3>(1).translation();
|
||||||
|
Point3 Tc = poses.at<Pose3>(2).translation();
|
||||||
|
Point3 expected = (Tc - Ta) * (scale / (Tb - Ta).norm());
|
||||||
|
EXPECT(assert_equal(expected, result.at<Point3>(2), 1e-4));
|
||||||
|
|
||||||
|
// TODO(frank): how to get stats back?
|
||||||
|
// EXPECT_DOUBLES_EQUAL(0.0199833, actualError, 1e-5);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(TranslationRecovery, TwoPoseTestBATA) {
|
||||||
|
// Create a dataset with 2 poses.
|
||||||
|
// __ __
|
||||||
|
// \/ \/
|
||||||
|
// 0 _____ 1
|
||||||
|
//
|
||||||
|
// 0 and 1 face in the same direction but have a translation offset.
|
||||||
|
Values poses;
|
||||||
|
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
|
||||||
|
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
|
||||||
|
|
||||||
|
auto relativeTranslations =
|
||||||
|
TranslationRecovery::SimulateMeasurements(poses, {{0, 1}});
|
||||||
|
|
||||||
|
// Check simulated measurements.
|
||||||
|
for (auto& unitTranslation : relativeTranslations) {
|
||||||
|
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
|
||||||
|
unitTranslation.measured()));
|
||||||
|
}
|
||||||
|
|
||||||
|
LevenbergMarquardtParams params;
|
||||||
|
TranslationRecovery algorithm(params, true);
|
||||||
|
const auto graph = algorithm.buildGraph(relativeTranslations);
|
||||||
|
EXPECT_LONGS_EQUAL(1, graph.size());
|
||||||
|
|
||||||
|
// Run translation recovery
|
||||||
|
const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0);
|
||||||
|
|
||||||
|
// Check result for first two translations, determined by prior
|
||||||
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
||||||
|
EXPECT(assert_equal(Point3(3, 0, 0), result.at<Point3>(1), 1e-8));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(TranslationRecovery, ThreePoseTestBATA) {
|
||||||
|
// Create a dataset with 3 poses.
|
||||||
|
// __ __
|
||||||
|
// \/ \/
|
||||||
|
// 0 _____ 1
|
||||||
|
// \ __ /
|
||||||
|
// \\//
|
||||||
|
// 3
|
||||||
|
//
|
||||||
|
// 0 and 1 face in the same direction but have a translation offset. 3 is in
|
||||||
|
// the same direction as 0 and 1, in between 0 and 1, with some Y axis offset.
|
||||||
|
|
||||||
|
Values poses;
|
||||||
|
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
|
||||||
|
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
|
||||||
|
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
|
||||||
|
|
||||||
|
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
|
||||||
|
poses, {{0, 1}, {1, 3}, {3, 0}});
|
||||||
|
|
||||||
|
// Check simulated measurements.
|
||||||
|
for (auto& unitTranslation : relativeTranslations) {
|
||||||
|
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
|
||||||
|
unitTranslation.measured()));
|
||||||
|
}
|
||||||
|
|
||||||
|
LevenbergMarquardtParams params;
|
||||||
|
TranslationRecovery algorithm(params, true);
|
||||||
|
const auto graph = algorithm.buildGraph(relativeTranslations);
|
||||||
|
EXPECT_LONGS_EQUAL(3, graph.size());
|
||||||
|
|
||||||
|
const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0);
|
||||||
|
|
||||||
|
// Check result
|
||||||
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
||||||
|
EXPECT(assert_equal(Point3(3, 0, 0), result.at<Point3>(1), 1e-8));
|
||||||
|
EXPECT(assert_equal(Point3(1.5, -1.5, 0), result.at<Point3>(3), 1e-8));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(TranslationRecovery, ThreePosesIncludingZeroTranslationBATA) {
|
||||||
|
// Create a dataset with 3 poses.
|
||||||
|
// __ __
|
||||||
|
// \/ \/
|
||||||
|
// 0 _____ 1
|
||||||
|
// 2 <|
|
||||||
|
//
|
||||||
|
// 0 and 1 face in the same direction but have a translation offset. 2 is at
|
||||||
|
// the same point as 1 but is rotated, with little FOV overlap.
|
||||||
|
Values poses;
|
||||||
|
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
|
||||||
|
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
|
||||||
|
poses.insert<Pose3>(2, Pose3(Rot3::RzRyRx(-M_PI / 2, 0, 0), Point3(2, 0, 0)));
|
||||||
|
|
||||||
|
auto relativeTranslations =
|
||||||
|
TranslationRecovery::SimulateMeasurements(poses, {{0, 1}, {1, 2}});
|
||||||
|
|
||||||
|
// Check simulated measurements.
|
||||||
|
for (auto& unitTranslation : relativeTranslations) {
|
||||||
|
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
|
||||||
|
unitTranslation.measured()));
|
||||||
|
}
|
||||||
|
|
||||||
|
LevenbergMarquardtParams params;
|
||||||
|
TranslationRecovery algorithm(params, true);
|
||||||
|
// Run translation recovery
|
||||||
|
const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0);
|
||||||
|
|
||||||
|
// Check result
|
||||||
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
||||||
|
EXPECT(assert_equal(Point3(3, 0, 0), result.at<Point3>(1), 1e-8));
|
||||||
|
EXPECT(assert_equal(Point3(3, 0, 0), result.at<Point3>(2), 1e-8));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(TranslationRecovery, FourPosesIncludingZeroTranslationBATA) {
|
||||||
|
// Create a dataset with 4 poses.
|
||||||
|
// __ __
|
||||||
|
// \/ \/
|
||||||
|
// 0 _____ 1
|
||||||
|
// \ __ 2 <|
|
||||||
|
// \\//
|
||||||
|
// 3
|
||||||
|
//
|
||||||
|
// 0 and 1 face in the same direction but have a translation offset. 2 is at
|
||||||
|
// the same point as 1 but is rotated, with very little FOV overlap. 3 is in
|
||||||
|
// the same direction as 0 and 1, in between 0 and 1, with some Y axis offset.
|
||||||
|
|
||||||
|
Values poses;
|
||||||
|
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
|
||||||
|
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
|
||||||
|
poses.insert<Pose3>(2, Pose3(Rot3::RzRyRx(-M_PI / 2, 0, 0), Point3(2, 0, 0)));
|
||||||
|
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
|
||||||
|
|
||||||
|
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
|
||||||
|
poses, {{0, 1}, {1, 2}, {1, 3}, {3, 0}});
|
||||||
|
|
||||||
|
// Check simulated measurements.
|
||||||
|
for (auto& unitTranslation : relativeTranslations) {
|
||||||
|
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
|
||||||
|
unitTranslation.measured()));
|
||||||
|
}
|
||||||
|
|
||||||
|
LevenbergMarquardtParams params;
|
||||||
|
TranslationRecovery algorithm(params, true);
|
||||||
|
|
||||||
|
// Run translation recovery
|
||||||
|
const auto result = algorithm.run(relativeTranslations, /*scale=*/4.0);
|
||||||
|
|
||||||
|
// Check result
|
||||||
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
||||||
|
EXPECT(assert_equal(Point3(4, 0, 0), result.at<Point3>(1), 1e-8));
|
||||||
|
EXPECT(assert_equal(Point3(4, 0, 0), result.at<Point3>(2), 1e-8));
|
||||||
|
EXPECT(assert_equal(Point3(2, -2, 0), result.at<Point3>(3), 1e-8));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(TranslationRecovery, ThreePosesWithZeroTranslationBATA) {
|
||||||
|
Values poses;
|
||||||
|
poses.insert<Pose3>(0, Pose3(Rot3::RzRyRx(-M_PI / 6, 0, 0), Point3(0, 0, 0)));
|
||||||
|
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(0, 0, 0)));
|
||||||
|
poses.insert<Pose3>(2, Pose3(Rot3::RzRyRx(M_PI / 6, 0, 0), Point3(0, 0, 0)));
|
||||||
|
|
||||||
|
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
|
||||||
|
poses, {{0, 1}, {1, 2}, {2, 0}});
|
||||||
|
|
||||||
|
// Check simulated measurements.
|
||||||
|
for (auto& unitTranslation : relativeTranslations) {
|
||||||
|
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
|
||||||
|
unitTranslation.measured()));
|
||||||
|
}
|
||||||
|
|
||||||
|
LevenbergMarquardtParams params;
|
||||||
|
TranslationRecovery algorithm(params, true);
|
||||||
|
// Run translation recovery
|
||||||
|
const auto result = algorithm.run(relativeTranslations, /*scale=*/4.0);
|
||||||
|
|
||||||
|
// Check result
|
||||||
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
|
||||||
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(1), 1e-8));
|
||||||
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(2), 1e-8));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(TranslationRecovery, ThreePosesWithOneSoftConstraintBATA) {
|
||||||
|
// Create a dataset with 3 poses.
|
||||||
|
// __ __
|
||||||
|
// \/ \/
|
||||||
|
// 0 _____ 1
|
||||||
|
// \ __ /
|
||||||
|
// \\//
|
||||||
|
// 3
|
||||||
|
//
|
||||||
|
// 0 and 1 face in the same direction but have a translation offset. 3 is in
|
||||||
|
// the same direction as 0 and 1, in between 0 and 1, with some Y axis offset.
|
||||||
|
|
||||||
|
Values poses;
|
||||||
|
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
|
||||||
|
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
|
||||||
|
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
|
||||||
|
|
||||||
|
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
|
||||||
|
poses, {{0, 1}, {0, 3}, {1, 3}});
|
||||||
|
|
||||||
|
std::vector<BinaryMeasurement<Point3>> betweenTranslations;
|
||||||
|
betweenTranslations.emplace_back(0, 3, Point3(1, -1, 0),
|
||||||
|
noiseModel::Isotropic::Sigma(3, 1e-2));
|
||||||
|
|
||||||
|
LevenbergMarquardtParams params;
|
||||||
|
TranslationRecovery algorithm(params, true);
|
||||||
|
auto result =
|
||||||
|
algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations);
|
||||||
|
|
||||||
|
// Check result
|
||||||
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
|
||||||
|
EXPECT(assert_equal(Point3(2, 0, 0), result.at<Point3>(1), 1e-4));
|
||||||
|
EXPECT(assert_equal(Point3(1, -1, 0), result.at<Point3>(3), 1e-4));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(TranslationRecovery, ThreePosesWithOneHardConstraintBATA) {
|
||||||
|
// Create a dataset with 3 poses.
|
||||||
|
// __ __
|
||||||
|
// \/ \/
|
||||||
|
// 0 _____ 1
|
||||||
|
// \ __ /
|
||||||
|
// \\//
|
||||||
|
// 3
|
||||||
|
//
|
||||||
|
// 0 and 1 face in the same direction but have a translation offset. 3 is in
|
||||||
|
// the same direction as 0 and 1, in between 0 and 1, with some Y axis offset.
|
||||||
|
|
||||||
|
Values poses;
|
||||||
|
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
|
||||||
|
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
|
||||||
|
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
|
||||||
|
|
||||||
|
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
|
||||||
|
poses, {{0, 1}, {0, 3}, {1, 3}});
|
||||||
|
|
||||||
|
std::vector<BinaryMeasurement<Point3>> betweenTranslations;
|
||||||
|
betweenTranslations.emplace_back(0, 1, Point3(2, 0, 0),
|
||||||
|
noiseModel::Constrained::All(3, 1e2));
|
||||||
|
|
||||||
|
LevenbergMarquardtParams params;
|
||||||
|
TranslationRecovery algorithm(params, true);
|
||||||
|
auto result =
|
||||||
|
algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations);
|
||||||
|
|
||||||
|
// Check result
|
||||||
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
|
||||||
|
EXPECT(assert_equal(Point3(2, 0, 0), result.at<Point3>(1), 1e-4));
|
||||||
|
EXPECT(assert_equal(Point3(1, -1, 0), result.at<Point3>(3), 1e-4));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(TranslationRecovery, NodeWithBetweenFactorAndNoMeasurementsBATA) {
|
||||||
|
// Checks that valid results are obtained when a between translation edge is
|
||||||
|
// provided with a node that does not have any other relative translations.
|
||||||
|
Values poses;
|
||||||
|
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
|
||||||
|
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
|
||||||
|
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
|
||||||
|
poses.insert<Pose3>(4, Pose3(Rot3(), Point3(1, 2, 1)));
|
||||||
|
|
||||||
|
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
|
||||||
|
poses, {{0, 1}, {0, 3}, {1, 3}});
|
||||||
|
|
||||||
|
std::vector<BinaryMeasurement<Point3>> betweenTranslations;
|
||||||
|
betweenTranslations.emplace_back(0, 1, Point3(2, 0, 0),
|
||||||
|
noiseModel::Constrained::All(3, 1e2));
|
||||||
|
// Node 4 only has this between translation prior, no relative translations.
|
||||||
|
betweenTranslations.emplace_back(0, 4, Point3(1, 2, 1));
|
||||||
|
|
||||||
|
LevenbergMarquardtParams params;
|
||||||
|
TranslationRecovery algorithm(params, true);
|
||||||
|
auto result =
|
||||||
|
algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations);
|
||||||
|
|
||||||
|
// Check result
|
||||||
|
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
|
||||||
|
EXPECT(assert_equal(Point3(2, 0, 0), result.at<Point3>(1), 1e-4));
|
||||||
|
EXPECT(assert_equal(Point3(1, -1, 0), result.at<Point3>(3), 1e-4));
|
||||||
|
EXPECT(assert_equal(Point3(1, 2, 1), result.at<Point3>(4), 1e-4));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
int main() {
|
int main() {
|
||||||
TestResult tr;
|
TestResult tr;
|
||||||
|
|
Loading…
Reference in New Issue