diff --git a/gtsam/sfm/TranslationRecovery.cpp b/gtsam/sfm/TranslationRecovery.cpp index 8d27136e3..d4100b00a 100644 --- a/gtsam/sfm/TranslationRecovery.cpp +++ b/gtsam/sfm/TranslationRecovery.cpp @@ -39,9 +39,13 @@ TranslationRecovery::TranslationRecovery( const TranslationRecovery::TranslationEdges &relativeTranslations, const LevenbergMarquardtParams &lmParams) : params_(lmParams) { - TranslationEdges tempRelativeTranslations; - DSFMap sameTranslationDSF; + // Some relative translations may be zero. We treat nodes that have a zero + // relativeTranslation as a single node. + // A DSFMap is used to find sets of nodes that have a zero relative + // translation. Add the nodes in each edge to the DSFMap, and merge nodes that + // are connected by a zero relative translation. + DSFMap sameTranslationDSF; for (const auto &edge : relativeTranslations) { Key key1 = sameTranslationDSF.find(edge.key1()); Key key2 = sameTranslationDSF.find(edge.key2()); @@ -49,6 +53,7 @@ TranslationRecovery::TranslationRecovery( sameTranslationDSF.merge(key1, key2); } } + // Use only those edges for which two keys have a distinct root in the DSFMap. for (const auto &edge : relativeTranslations) { Key key1 = sameTranslationDSF.find(edge.key1()); Key key2 = sameTranslationDSF.find(edge.key2()); @@ -56,6 +61,7 @@ TranslationRecovery::TranslationRecovery( relativeTranslations_.emplace_back(key1, key2, edge.measured(), edge.noiseModel()); } + // Store the DSF map for post-processing results. sameTranslationNodes_ = sameTranslationDSF.sets(); } @@ -106,9 +112,13 @@ Values TranslationRecovery::run(const double scale) const { LevenbergMarquardtOptimizer lm(graph, initial, params_); Values result = lm.optimize(); - for (const auto &sameTranslationKeys : sameTranslationNodes_) { - Key optimizedKey = sameTranslationKeys.first; - std::set duplicateKeys = sameTranslationKeys.second; + // Nodes that were not optimized are stored in sameTranslationNodes_ as a map + // from a key that was optimized to keys that were not optimized. Iterate over + // map and add results for keys not optimized. + for (const auto &optimizedAndDuplicateKeys : sameTranslationNodes_) { + Key optimizedKey = optimizedAndDuplicateKeys.first; + std::set duplicateKeys = optimizedAndDuplicateKeys.second; + // Add the result for the duplicate key if it does not already exist. for (const Key duplicateKey : duplicateKeys) { if (result.exists(duplicateKey)) continue; result.insert(duplicateKey, result.at(optimizedKey)); diff --git a/tests/testTranslationRecovery.cpp b/tests/testTranslationRecovery.cpp index e4fbd9219..7260fd5af 100644 --- a/tests/testTranslationRecovery.cpp +++ b/tests/testTranslationRecovery.cpp @@ -89,7 +89,7 @@ TEST(TranslationRecovery, BAL) { // EXPECT_DOUBLES_EQUAL(0.0199833, actualError, 1e-5); } -TEST(TranslationRecovery, TwoPointTest) { +TEST(TranslationRecovery, TwoPoseTest) { // Create a dataset with 2 poses. // __ __ // \/ \/ @@ -114,14 +114,14 @@ TEST(TranslationRecovery, TwoPointTest) { EXPECT_LONGS_EQUAL(1, graph.size()); // Run translation recovery - const auto result = algorithm.run(/*scale=*/2.0); + const auto result = algorithm.run(/*scale=*/3.0); // Check result for first two translations, determined by prior EXPECT(assert_equal(Point3(0, 0, 0), result.at(0))); - EXPECT(assert_equal(Point3(2, 0, 0), result.at(1))); + EXPECT(assert_equal(Point3(3, 0, 0), result.at(1))); } -TEST(TranslationRecovery, ThreePointTest) { +TEST(TranslationRecovery, ThreePoseTest) { // Create a dataset with 3 poses. // __ __ // \/ \/ @@ -151,15 +151,15 @@ TEST(TranslationRecovery, ThreePointTest) { const auto graph = algorithm.buildGraph(); EXPECT_LONGS_EQUAL(3, graph.size()); - const auto result = algorithm.run(/*scale=*/2.0); + const auto result = algorithm.run(/*scale=*/3.0); // Check result EXPECT(assert_equal(Point3(0, 0, 0), result.at(0))); - EXPECT(assert_equal(Point3(2, 0, 0), result.at(1))); - EXPECT(assert_equal(Point3(1, -1, 0), result.at(3))); + EXPECT(assert_equal(Point3(3, 0, 0), result.at(1))); + EXPECT(assert_equal(Point3(1.5, -1.5, 0), result.at(3))); } -TEST(TranslationRecovery, TwoPointsAndZeroTranslation) { +TEST(TranslationRecovery, ThreePosesIncludingZeroTranslation) { // Create a dataset with 3 poses. // __ __ // \/ \/ @@ -188,15 +188,15 @@ TEST(TranslationRecovery, TwoPointsAndZeroTranslation) { EXPECT_LONGS_EQUAL(1, graph.size()); // Run translation recovery - const auto result = algorithm.run(/*scale=*/2.0); + const auto result = algorithm.run(/*scale=*/3.0); // Check result EXPECT(assert_equal(Point3(0, 0, 0), result.at(0))); - EXPECT(assert_equal(Point3(2, 0, 0), result.at(1))); - EXPECT(assert_equal(Point3(2, 0, 0), result.at(2))); + EXPECT(assert_equal(Point3(3, 0, 0), result.at(1))); + EXPECT(assert_equal(Point3(3, 0, 0), result.at(2))); } -TEST(TranslationRecovery, ThreePointsAndZeroTranslation) { +TEST(TranslationRecovery, FourPosesIncludingZeroTranslation) { // Create a dataset with 4 poses. // __ __ // \/ \/ @@ -229,13 +229,13 @@ TEST(TranslationRecovery, ThreePointsAndZeroTranslation) { EXPECT_LONGS_EQUAL(3, graph.size()); // Run translation recovery - const auto result = algorithm.run(/*scale=*/2.0); + const auto result = algorithm.run(/*scale=*/4.0); // Check result EXPECT(assert_equal(Point3(0, 0, 0), result.at(0))); - EXPECT(assert_equal(Point3(2, 0, 0), result.at(1))); - EXPECT(assert_equal(Point3(2, 0, 0), result.at(2))); - EXPECT(assert_equal(Point3(1, -1, 0), result.at(3))); + EXPECT(assert_equal(Point3(4, 0, 0), result.at(1))); + EXPECT(assert_equal(Point3(4, 0, 0), result.at(2))); + EXPECT(assert_equal(Point3(2, -2, 0), result.at(3))); } /* ************************************************************************* */