diff --git a/gtsam/sfm/TranslationRecovery.cpp b/gtsam/sfm/TranslationRecovery.cpp index 2e81c2d56..7938d6b93 100644 --- a/gtsam/sfm/TranslationRecovery.cpp +++ b/gtsam/sfm/TranslationRecovery.cpp @@ -21,6 +21,7 @@ #include #include #include +#include #include #include #include @@ -28,6 +29,7 @@ #include #include #include +#include #include #include @@ -81,7 +83,7 @@ NonlinearFactorGraph TranslationRecovery::buildGraph() const { } void TranslationRecovery::addPrior( - const double scale, NonlinearFactorGraph *graph, + const double scale, const boost::shared_ptr graph, const SharedNoiseModel &priorNoiseModel) const { auto edge = relativeTranslations_.begin(); if (edge == relativeTranslations_.end()) return; @@ -91,6 +93,32 @@ void TranslationRecovery::addPrior( edge->key2(), scale * edge->measured().point3(), edge->noiseModel()); } +void TranslationRecovery::addPrior( + Key i, const Point3 &prior, + const boost::shared_ptr graph, + const SharedNoiseModel &priorNoiseModel) const { + graph->addPrior(getUniqueKey(i), prior, priorNoiseModel); +} + +Key TranslationRecovery::getUniqueKey(const Key i) const { + for (const auto &optimizedAndDuplicateKeys : sameTranslationNodes_) { + Key optimizedKey = optimizedAndDuplicateKeys.first; + std::set duplicateKeys = optimizedAndDuplicateKeys.second; + if (i == optimizedKey || duplicateKeys.count(i)) return optimizedKey; + } + // Unlikely case, when i is not in the graph. + return i; +} + +void TranslationRecovery::addRelativeHardConstraint( + Key i, Key j, const Point3 &w_itj, + const boost::shared_ptr graph) const { + Point3_ wti_(getUniqueKey(i)), wtj_(getUniqueKey(j)); + Expression w_itj_ = wtj_ - wti_; + graph->addExpressionFactor(noiseModel::Constrained::All(3, 1e9), w_itj, + w_itj_); +} + Values TranslationRecovery::initializeRandomly(std::mt19937 *rng) const { uniform_real_distribution randomVal(-1, 1); // Create a lambda expression that checks whether value exists and randomly @@ -125,12 +153,17 @@ Values TranslationRecovery::initializeRandomly() const { } Values TranslationRecovery::run(const double scale) const { - auto graph = buildGraph(); - addPrior(scale, &graph); + boost::shared_ptr graph_ptr = + boost::make_shared(buildGraph()); + addPrior(scale, graph_ptr); const Values initial = initializeRandomly(); - LevenbergMarquardtOptimizer lm(graph, initial, params_); + LevenbergMarquardtOptimizer lm(*graph_ptr, initial, params_); Values result = lm.optimize(); + return addDuplicateNodes(result); +} +Values TranslationRecovery::addDuplicateNodes(const Values &result) const { + Values final_result = result; // Nodes that were not optimized are stored in sameTranslationNodes_ as a map // from a key that was optimized to keys that were not optimized. Iterate over // map and add results for keys not optimized. @@ -139,11 +172,12 @@ Values TranslationRecovery::run(const double scale) const { std::set duplicateKeys = optimizedAndDuplicateKeys.second; // Add the result for the duplicate key if it does not already exist. for (const Key duplicateKey : duplicateKeys) { - if (result.exists(duplicateKey)) continue; - result.insert(duplicateKey, result.at(optimizedKey)); + if (final_result.exists(duplicateKey)) continue; + final_result.insert(duplicateKey, + final_result.at(optimizedKey)); } } - return result; + return final_result; } TranslationRecovery::TranslationEdges TranslationRecovery::SimulateMeasurements( diff --git a/gtsam/sfm/TranslationRecovery.h b/gtsam/sfm/TranslationRecovery.h index 30c9a14e3..a0bcb5dd4 100644 --- a/gtsam/sfm/TranslationRecovery.h +++ b/gtsam/sfm/TranslationRecovery.h @@ -93,10 +93,20 @@ class TranslationRecovery { * @param graph factor graph to which prior is added. * @param priorNoiseModel the noise model to use with the prior. */ - void addPrior(const double scale, NonlinearFactorGraph *graph, + void addPrior(const double scale, + const boost::shared_ptr graph, const SharedNoiseModel &priorNoiseModel = noiseModel::Isotropic::Sigma(3, 0.01)) const; + void addPrior(Key i, const Point3 &prior, + const boost::shared_ptr graph, + const SharedNoiseModel &priorNoiseModel = + noiseModel::Isotropic::Sigma(3, 0.01)) const; + + void addRelativeHardConstraint( + Key i, Key j, const Point3 &w_itj, + const boost::shared_ptr graph) const; + /** * @brief Create random initial translations. * @@ -120,6 +130,8 @@ class TranslationRecovery { */ Values run(const double scale = 1.0) const; + Values addDuplicateNodes(const Values &result) const; + /** * @brief Simulate translation direction measurements * @@ -131,5 +143,8 @@ class TranslationRecovery { */ static TranslationEdges SimulateMeasurements( const Values &poses, const std::vector &edges); + + private: + Key getUniqueKey(const Key i) const; }; } // namespace gtsam diff --git a/gtsam/sfm/sfm.i b/gtsam/sfm/sfm.i index bf9a73ac5..36a73b792 100644 --- a/gtsam/sfm/sfm.i +++ b/gtsam/sfm/sfm.i @@ -4,6 +4,8 @@ namespace gtsam { +#include +#include #include class SfmTrack { SfmTrack(); @@ -142,8 +144,8 @@ class ShonanAveraging2 { ShonanAveraging2(string g2oFile); ShonanAveraging2(string g2oFile, const gtsam::ShonanAveragingParameters2& parameters); - ShonanAveraging2(const gtsam::BetweenFactorPose2s &factors, - const gtsam::ShonanAveragingParameters2 ¶meters); + ShonanAveraging2(const gtsam::BetweenFactorPose2s& factors, + const gtsam::ShonanAveragingParameters2& parameters); // Query properties size_t nrUnknowns() const; @@ -259,6 +261,16 @@ class TranslationRecovery { TranslationRecovery( const gtsam::BinaryMeasurementsUnit3& relativeTranslations); // default LevenbergMarquardtParams + gtsam::NonlinearFactorGraph buildGraph() const; + gtsam::Values initializeRandomly() const; + void addPrior(gtsam::Key i, const gtsam::Point3& prior, + gtsam::NonlinearFactorGraph* graph, + const gtsam::SharedNoiseModel& model = + gtsam::noiseModel::Isotropic::Sigma(3, 0.01)) const; + void addRelativeHardConstraint(gtsam::Key i, gtsam::Key j, + const gtsam::Point3& w_itj, + gtsam::NonlinearFactorGraph* graph) const; + gtsam::Values addDuplicateNodes(const gtsam::Values& result) const; gtsam::Values run(const double scale) const; gtsam::Values run() const; // default scale = 1.0 }; diff --git a/tests/testTranslationRecovery.cpp b/tests/testTranslationRecovery.cpp index 833f11355..006750252 100644 --- a/tests/testTranslationRecovery.cpp +++ b/tests/testTranslationRecovery.cpp @@ -17,8 +17,8 @@ */ #include -#include #include +#include #include using namespace std; @@ -265,6 +265,43 @@ TEST(TranslationRecovery, ThreePosesWithZeroTranslation) { EXPECT(assert_equal(Point3(0, 0, 0), result.at(2), 1e-8)); } +TEST(TranslationRecovery, ThreePosesWithOneHardConstraint) { + // Create a dataset with 3 poses. + // __ __ + // \/ \/ + // 0 _____ 1 + // \ __ / + // \\// + // 3 + // + // 0 and 1 face in the same direction but have a translation offset. 3 is in + // the same direction as 0 and 1, in between 0 and 1, with some Y axis offset. + + Values poses; + poses.insert(0, Pose3(Rot3(), Point3(0, 0, 0))); + poses.insert(1, Pose3(Rot3(), Point3(2, 0, 0))); + poses.insert(3, Pose3(Rot3(), Point3(1, -1, 0))); + + auto relativeTranslations = + TranslationRecovery::SimulateMeasurements(poses, {{0, 1}, {3, 0}}); + + TranslationRecovery algorithm(relativeTranslations); + boost::shared_ptr graph_ptr = + boost::make_shared(algorithm.buildGraph()); + algorithm.addPrior(0, Point3(), graph_ptr); + algorithm.addRelativeHardConstraint(0, 1, Point3(2, 0, 0), graph_ptr); + const Values initial = algorithm.initializeRandomly(); + LevenbergMarquardtParams params; + LevenbergMarquardtOptimizer lm(*graph_ptr, initial, params); + auto result = algorithm.addDuplicateNodes(lm.optimize()); + EXPECT_LONGS_EQUAL(4, graph_ptr->size()); + + // Check result + EXPECT(assert_equal(Point3(0, 0, 0), result.at(0), 1e-4)); + EXPECT(assert_equal(Point3(2, 0, 0), result.at(1), 1e-4)); + EXPECT(assert_equal(Point3(1, -1, 0), result.at(3), 1e-4)); +} + /* ************************************************************************* */ int main() { TestResult tr;