From aa486586264e082c872f9277092590060fea30f8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 12 Aug 2022 16:18:40 -0400 Subject: [PATCH 1/9] more tests running --- gtsam/hybrid/tests/testHybridIncremental.cpp | 91 ++++++++++---------- 1 file changed, 44 insertions(+), 47 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp index 9afdb9886..eec260cbc 100644 --- a/gtsam/hybrid/tests/testHybridIncremental.cpp +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -116,12 +116,6 @@ TEST(HybridGaussianElimination, IncrementalInference) { graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2) graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) - //TODO(Varun) we cannot enforce ordering - // // Create ordering. - // Ordering ordering1; - // ordering1 += X(1); - // ordering1 += X(2); - // Run update step isam.update(graph1); @@ -133,14 +127,7 @@ TEST(HybridGaussianElimination, IncrementalInference) { graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) - //TODO(Varun) we cannot enforce ordering - // // Create ordering. - // Ordering ordering2; - // ordering2 += X(2); - // ordering2 += X(3); - isam.update(graph2); - GTSAM_PRINT(isam); /********************************************************/ // Run batch elimination so we can compare results. @@ -150,68 +137,78 @@ TEST(HybridGaussianElimination, IncrementalInference) { ordering += X(3); // Now we calculate the actual factors using full elimination - HybridBayesNet::shared_ptr expectedHybridBayesNet; + HybridBayesTree::shared_ptr expectedHybridBayesTree; HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph; - std::tie(expectedHybridBayesNet, expectedRemainingGraph) = - switching.linearizedFactorGraph.eliminatePartialSequential(ordering); + std::tie(expectedHybridBayesTree, expectedRemainingGraph) = + switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); // The densities on X(1) should be the same auto x1_conditional = dynamic_pointer_cast(isam[X(1)]->conditional()->inner()); - EXPECT( - assert_equal(*x1_conditional, *(expectedHybridBayesNet->atGaussian(0)))); + auto actual_x1_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); + EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional)); // The densities on X(2) should be the same auto x2_conditional = dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); - EXPECT( - assert_equal(*x2_conditional, *(expectedHybridBayesNet->atGaussian(1)))); + auto actual_x2_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); + EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional)); - // // The densities on X(3) should be the same - // auto x3_conditional = - // dynamic_pointer_cast(isam[X(3)]->conditional()->inner()); - // EXPECT( - // assert_equal(*x3_conditional, *(expectedHybridBayesNet->atGaussian(2)))); + // The densities on X(3) should be the same + auto x3_conditional = + dynamic_pointer_cast(isam[X(3)]->conditional()->inner()); + auto actual_x3_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); + EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional)); - GTSAM_PRINT(*expectedHybridBayesNet); - - // we only do the manual continuous elimination for 0,0 - // the other discrete probabilities on M(2) are calculated the same way + // We only perform manual continuous elimination for 0,0. + // The other discrete probabilities on M(2) are calculated the same way auto m00_prob = [&]() { GaussianFactorGraph gf; - // gf.add(switching.linearizedFactorGraph.gaussianGraph().at(3)); + auto x2_prior = boost::dynamic_pointer_cast( + switching.linearizedFactorGraph.at(3))->inner(); + gf.add(x2_prior); DiscreteValues m00; m00[M(1)] = 0, m00[M(2)] = 0; - // auto dcMixture = - // dynamic_pointer_cast(graph2.dcGraph().at(0)); - // gf.add(dcMixture->factors()(m00)); - // auto x2_mixed = - // boost::dynamic_pointer_cast(hybridBayesNet.at(1)); - // gf.add(x2_mixed->factors()(m00)); + // P(X2, X3 | M2) + auto dcMixture = + dynamic_pointer_cast(graph2.at(0)); + gf.add(dcMixture->factors()(m00)); + + auto x2_mixed = + boost::dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); + // Perform explicit cast so we can add the conditional to `gf`. + auto x2_cond = boost::dynamic_pointer_cast( + x2_mixed->conditionals()(m00)); + gf.add(x2_cond); + auto result_gf = gf.eliminateSequential(); return gf.probPrime(result_gf->optimize()); }(); - /// Test if the probability values are as expected with regression tests. -// DiscreteValues assignment; -// EXPECT(assert_equal(m00_prob, 0.60656, 1e-5)); -// assignment[M(1)] = 0; -// assignment[M(2)] = 0; -// EXPECT(assert_equal(m00_prob, (*discreteFactor)(assignment), 1e-5)); + auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional(); + // Test if the probability values are as expected with regression tests. + // DiscreteValues assignment; + // EXPECT(assert_equal(m00_prob, 0.60656, 1e-5)); + // assignment[M(1)] = 0; + // assignment[M(2)] = 0; + // EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); // assignment[M(1)] = 1; // assignment[M(2)] = 0; -// EXPECT(assert_equal(0.612477, (*discreteFactor)(assignment), 1e-5)); +// EXPECT(assert_equal(0.612477, (*discreteConditional)(assignment), 1e-5)); // assignment[M(1)] = 0; // assignment[M(2)] = 1; -// EXPECT(assert_equal(0.999952, (*discreteFactor)(assignment), 1e-5)); +// EXPECT(assert_equal(0.999952, (*discreteConditional)(assignment), 1e-5)); // assignment[M(1)] = 1; // assignment[M(2)] = 1; -// EXPECT(assert_equal(1.0, (*discreteFactor)(assignment), 1e-5)); +// EXPECT(assert_equal(1.0, (*discreteConditional)(assignment), 1e-5)); // DiscreteFactorGraph dfg; -// dfg.add(*discreteFactor); -// dfg.add(discreteFactor_m1); +// dfg.add(*discreteConditional); +// dfg.add(discreteConditional_m1); // dfg.add_factors(switching.linearizedFactorGraph.discreteGraph()); // // Check if the chordal graph generated from incremental elimination From 77bea319ddc727d41fffb8f26fc1907cbaeae61d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 13 Aug 2022 15:11:21 -0400 Subject: [PATCH 2/9] one more test passing --- gtsam/hybrid/tests/testHybridIncremental.cpp | 109 +++++++++---------- 1 file changed, 49 insertions(+), 60 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp index eec260cbc..7ab64462c 100644 --- a/gtsam/hybrid/tests/testHybridIncremental.cpp +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -18,7 +18,9 @@ #include #include +#include #include +#include #include #include #include @@ -108,7 +110,7 @@ TEST(HybridGaussianElimination, IncrementalInference) { // Create initial factor graph // * * * // | | | - // *- X1 -*- X2 -*- X3 + // X1 -*- X2 -*- X3 // | | // *-M1 - * - M2 graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) @@ -119,6 +121,10 @@ TEST(HybridGaussianElimination, IncrementalInference) { // Run update step isam.update(graph1); + auto discreteConditional_m1 = + isam[M(1)]->conditional()->asDiscreteConditional(); + EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)})); + /********************************************************/ // New factor graph for incremental update. HybridGaussianFactorGraph graph2; @@ -165,65 +171,48 @@ TEST(HybridGaussianElimination, IncrementalInference) { // We only perform manual continuous elimination for 0,0. // The other discrete probabilities on M(2) are calculated the same way - auto m00_prob = [&]() { - GaussianFactorGraph gf; - auto x2_prior = boost::dynamic_pointer_cast( - switching.linearizedFactorGraph.at(3))->inner(); - gf.add(x2_prior); + Ordering discrete_ordering; + discrete_ordering += M(1); + discrete_ordering += M(2); + HybridBayesTree::shared_ptr discreteBayesTree = + expectedRemainingGraph->eliminateMultifrontal(discrete_ordering); - DiscreteValues m00; - m00[M(1)] = 0, m00[M(2)] = 0; - // P(X2, X3 | M2) - auto dcMixture = - dynamic_pointer_cast(graph2.at(0)); - gf.add(dcMixture->factors()(m00)); + DiscreteValues m00; + m00[M(1)] = 0, m00[M(2)] = 0; + DiscreteConditional decisionTree = *(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional(); + double m00_prob = decisionTree(m00); - auto x2_mixed = - boost::dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); - // Perform explicit cast so we can add the conditional to `gf`. - auto x2_cond = boost::dynamic_pointer_cast( - x2_mixed->conditionals()(m00)); - gf.add(x2_cond); + auto discreteConditional = isam[M(2)]->conditional()->asDiscreteConditional(); - auto result_gf = gf.eliminateSequential(); - return gf.probPrime(result_gf->optimize()); - }(); - - auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional(); // Test if the probability values are as expected with regression tests. - // DiscreteValues assignment; - // EXPECT(assert_equal(m00_prob, 0.60656, 1e-5)); - // assignment[M(1)] = 0; - // assignment[M(2)] = 0; - // EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); -// assignment[M(1)] = 1; -// assignment[M(2)] = 0; -// EXPECT(assert_equal(0.612477, (*discreteConditional)(assignment), 1e-5)); -// assignment[M(1)] = 0; -// assignment[M(2)] = 1; -// EXPECT(assert_equal(0.999952, (*discreteConditional)(assignment), 1e-5)); -// assignment[M(1)] = 1; -// assignment[M(2)] = 1; -// EXPECT(assert_equal(1.0, (*discreteConditional)(assignment), 1e-5)); + DiscreteValues assignment; + EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5)); + assignment[M(1)] = 0; + assignment[M(2)] = 0; + EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 1; + assignment[M(2)] = 0; + EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 0; + assignment[M(2)] = 1; + EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 1; + assignment[M(2)] = 1; + EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5)); -// DiscreteFactorGraph dfg; -// dfg.add(*discreteConditional); -// dfg.add(discreteConditional_m1); -// dfg.add_factors(switching.linearizedFactorGraph.discreteGraph()); - -// // Check if the chordal graph generated from incremental elimination -// matches -// // that of batch elimination. -// auto chordal = dfg.eliminateSequential(); -// auto expectedChordal = -// expectedRemainingGraph->discreteGraph().eliminateSequential(); - -// EXPECT(assert_equal(*expectedChordal, *chordal, 1e-6)); + // Check if the clique conditional generated from incremental elimination matches + // that of batch elimination. + auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); + auto expectedConditional = dynamic_pointer_cast( + (*expectedChordal)[M(2)]->conditional()->inner()); + auto actualConditional = dynamic_pointer_cast( + isam[M(2)]->conditional()->inner()); + EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); } /* ****************************************************************************/ -// // Test if we can approximately do the inference -// TEST(DCGaussianElimination, Approx_inference) { +// Test if we can approximately do the inference +TEST(HybridGaussianElimination, Approx_inference) { // Switching switching(4); // IncrementalHybrid incrementalHybrid; // HybridGaussianFactorGraph graph1; @@ -339,11 +328,11 @@ TEST(HybridGaussianElimination, IncrementalInference) { // *lastDensity(assignment))); // } // } -// } +} /* ****************************************************************************/ -// // Test approximate inference with an additional pruning step. -// TEST(DCGaussianElimination, Incremental_approximate) { +// Test approximate inference with an additional pruning step. +TEST(HybridGaussianElimination, Incremental_approximate) { // Switching switching(5); // IncrementalHybrid incrementalHybrid; // HybridGaussianFactorGraph graph1; @@ -395,12 +384,12 @@ TEST(HybridGaussianElimination, IncrementalInference) { // CHECK_EQUAL(2, actualBayesNet.size()); // EXPECT_LONGS_EQUAL(10, actualBayesNet.atGaussian(0)->nrComponents()); // EXPECT_LONGS_EQUAL(5, actualBayesNet.atGaussian(1)->nrComponents()); -// } +} /* ************************************************************************/ -// // Test for figuring out the optimal ordering to ensure we get -// // a discrete graph after elimination. -// TEST(IncrementalHybrid, NonTrivial) { +// Test for figuring out the optimal ordering to ensure we get +// a discrete graph after elimination. +TEST(IncrementalHybrid, NonTrivial) { // // This is a GTSAM-only test for running inference on a single legged // robot. // // The leg links are represented by the chain X-Y-Z-W, where X is the base @@ -637,7 +626,7 @@ TEST(HybridGaussianElimination, IncrementalInference) { // auto lastConditional = boost::dynamic_pointer_cast( // inc.hybridBayesNet().at(inc.hybridBayesNet().size() - 1)); // EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); -// } +} /* ************************************************************************* */ int main() { From ac20cff710fbecd0305625ed1d63575c8d84dce9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 16 Aug 2022 17:23:52 -0400 Subject: [PATCH 3/9] add incremental pruning to HybridGaussianISAM --- gtsam/hybrid/GaussianMixture.cpp | 27 +- gtsam/hybrid/GaussianMixture.h | 12 +- gtsam/hybrid/HybridGaussianISAM.cpp | 54 +- gtsam/hybrid/HybridGaussianISAM.h | 10 + gtsam/hybrid/tests/testHybridIncremental.cpp | 709 +++++++++---------- 5 files changed, 449 insertions(+), 363 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index b04800d21..6816dfbf6 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -119,11 +119,36 @@ void GaussianMixture::print(const std::string &s, "", [&](Key k) { return formatter(k); }, [&](const GaussianConditional::shared_ptr &gf) -> std::string { RedirectCout rd; - if (!gf->empty()) + if (gf && !gf->empty()) gf->print("", formatter); else return {"nullptr"}; return rd.str(); }); } + +/* *******************************************************************************/ +void GaussianMixture::prune(const DecisionTreeFactor &decisionTree) { + // Functional which loops over all assignments and create a set of + // GaussianConditionals + auto pruner = [&decisionTree]( + const Assignment &choices, + const GaussianConditional::shared_ptr &conditional) + -> GaussianConditional::shared_ptr { + // typecast so we can use this to get probability value + DiscreteValues values(choices); + + if (decisionTree(values) == 0.0) { + // empty aka null pointer + boost::shared_ptr null; + return null; + } else { + return conditional; + } + }; + + auto pruned_conditionals = conditionals_.apply(pruner); + conditionals_.root_ = pruned_conditionals.root_; +} + } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index 49aa31c4c..75deb4d55 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -21,6 +21,7 @@ #include #include +#include #include #include #include @@ -121,7 +122,7 @@ class GTSAM_EXPORT GaussianMixture /// Test equality with base HybridFactor bool equals(const HybridFactor &lf, double tol = 1e-9) const override; - /* print utility */ + /// Print utility void print( const std::string &s = "GaussianMixture\n", const KeyFormatter &formatter = DefaultKeyFormatter) const override; @@ -131,6 +132,15 @@ class GTSAM_EXPORT GaussianMixture /// Getter for the underlying Conditionals DecisionTree const Conditionals &conditionals(); + /** + * @brief Prune the decision tree of Gaussian factors as per the discrete + * `decisionTree`. + * + * @param decisionTree A pruned decision tree of discrete keys where the + * leaves are probabilities. + */ + void prune(const DecisionTreeFactor &decisionTree); + /** * @brief Merge the Gaussian Factor Graphs in `this` and `sum` while * maintaining the decision tree structure. diff --git a/gtsam/hybrid/HybridGaussianISAM.cpp b/gtsam/hybrid/HybridGaussianISAM.cpp index 7783a88dd..df7803722 100644 --- a/gtsam/hybrid/HybridGaussianISAM.cpp +++ b/gtsam/hybrid/HybridGaussianISAM.cpp @@ -41,6 +41,7 @@ HybridGaussianISAM::HybridGaussianISAM(const HybridBayesTree& bayesTree) void HybridGaussianISAM::updateInternal( const HybridGaussianFactorGraph& newFactors, HybridBayesTree::Cliques* orphans, + const boost::optional& ordering, const HybridBayesTree::Eliminate& function) { // Remove the contaminated part of the Bayes tree BayesNetType bn; @@ -78,12 +79,19 @@ void HybridGaussianISAM::updateInternal( // Get an ordering where the new keys are eliminated last const VariableIndex index(factors); - const Ordering ordering = Ordering::ColamdConstrainedLast( - index, KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()), - true); + Ordering elimination_ordering; + if (ordering) { + elimination_ordering = *ordering; + } else { + elimination_ordering = Ordering::ColamdConstrainedLast( + index, + KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()), + true); + } // eliminate all factors (top, added, orphans) into a new Bayes tree - auto bayesTree = factors.eliminateMultifrontal(ordering, function, index); + HybridBayesTree::shared_ptr bayesTree = + factors.eliminateMultifrontal(elimination_ordering, function, index); // Re-add into Bayes tree data structures this->roots_.insert(this->roots_.end(), bayesTree->roots().begin(), @@ -93,9 +101,45 @@ void HybridGaussianISAM::updateInternal( /* ************************************************************************* */ void HybridGaussianISAM::update(const HybridGaussianFactorGraph& newFactors, + const boost::optional& ordering, const HybridBayesTree::Eliminate& function) { Cliques orphans; - this->updateInternal(newFactors, &orphans, function); + this->updateInternal(newFactors, &orphans, ordering, function); +} + +void HybridGaussianISAM::prune(const Key& root, const size_t maxNrLeaves) { + auto decisionTree = boost::dynamic_pointer_cast( + this->clique(root)->conditional()->inner()); + DecisionTreeFactor prunedDiscreteFactor = decisionTree->prune(maxNrLeaves); + decisionTree->root_ = prunedDiscreteFactor.root_; + + std::vector prunedKeys; + for (auto&& clique : nodes()) { + // The cliques can be repeated for each frontal so we record it in + // prunedKeys and check if we have already pruned a particular clique. + if (std::find(prunedKeys.begin(), prunedKeys.end(), clique.first) != + prunedKeys.end()) { + continue; + } + + // Add all the keys of the current clique to be pruned to prunedKeys + for (auto&& key : clique.second->conditional()->frontals()) { + prunedKeys.push_back(key); + } + + // Convert parents() to a KeyVector for comparison + KeyVector parents; + for (auto&& parent : clique.second->conditional()->parents()) { + parents.push_back(parent); + } + + if (parents == decisionTree->keys()) { + auto gaussianMixture = boost::dynamic_pointer_cast( + clique.second->conditional()->inner()); + + gaussianMixture->prune(prunedDiscreteFactor); + } + } } } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianISAM.h b/gtsam/hybrid/HybridGaussianISAM.h index d5b6271da..4fc206582 100644 --- a/gtsam/hybrid/HybridGaussianISAM.h +++ b/gtsam/hybrid/HybridGaussianISAM.h @@ -48,6 +48,7 @@ class GTSAM_EXPORT HybridGaussianISAM : public ISAM { void updateInternal( const HybridGaussianFactorGraph& newFactors, HybridBayesTree::Cliques* orphans, + const boost::optional& ordering = boost::none, const HybridBayesTree::Eliminate& function = HybridBayesTree::EliminationTraitsType::DefaultEliminate); @@ -59,8 +60,17 @@ class GTSAM_EXPORT HybridGaussianISAM : public ISAM { * @param function Elimination function. */ void update(const HybridGaussianFactorGraph& newFactors, + const boost::optional& ordering = boost::none, const HybridBayesTree::Eliminate& function = HybridBayesTree::EliminationTraitsType::DefaultEliminate); + + /** + * @brief + * + * @param root The root key in the discrete conditional decision tree. + * @param maxNumberLeaves + */ + void prune(const Key& root, const size_t maxNumberLeaves); }; /// traits diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp index 7ab64462c..afd0d0a70 100644 --- a/gtsam/hybrid/tests/testHybridIncremental.cpp +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -52,10 +52,10 @@ TEST(HybridGaussianElimination, IncrementalElimination) { HybridGaussianFactorGraph graph1; // Create initial factor graph - // * * * - // | | | - // *- X1 -*- X2 -*- X3 - // \*-M1-*/ + // * * * + // | | | + // X1 -*- X2 -*- X3 + // \*-M1-*/ graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1) graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) @@ -179,7 +179,8 @@ TEST(HybridGaussianElimination, IncrementalInference) { DiscreteValues m00; m00[M(1)] = 0, m00[M(2)] = 0; - DiscreteConditional decisionTree = *(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional(); + DiscreteConditional decisionTree = + *(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional(); double m00_prob = decisionTree(m00); auto discreteConditional = isam[M(2)]->conditional()->asDiscreteConditional(); @@ -200,8 +201,8 @@ TEST(HybridGaussianElimination, IncrementalInference) { assignment[M(2)] = 1; EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5)); - // Check if the clique conditional generated from incremental elimination matches - // that of batch elimination. + // Check if the clique conditional generated from incremental elimination + // matches that of batch elimination. auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); auto expectedConditional = dynamic_pointer_cast( (*expectedChordal)[M(2)]->conditional()->inner()); @@ -213,419 +214,415 @@ TEST(HybridGaussianElimination, IncrementalInference) { /* ****************************************************************************/ // Test if we can approximately do the inference TEST(HybridGaussianElimination, Approx_inference) { -// Switching switching(4); -// IncrementalHybrid incrementalHybrid; -// HybridGaussianFactorGraph graph1; + Switching switching(4); + HybridGaussianISAM incrementalHybrid; + HybridGaussianFactorGraph graph1; -// // Add the 3 DC factors, x1-x2, x2-x3, x3-x4 -// for (size_t i = 0; i < 3; i++) { -// graph1.push_back(switching.linearizedFactorGraph.dcGraph().at(i)); -// } + // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + for (size_t i = 1; i < 4; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } -// // Add the Gaussian factors, 1 prior on X(1), 4 measurements -// for (size_t i = 0; i <= 4; i++) { -// graph1.push_back(switching.linearizedFactorGraph.gaussianGraph().at(i)); -// } + // Add the Gaussian factors, 1 prior on X(1), + // 3 measurements on X(2), X(3), X(4) + graph1.push_back(switching.linearizedFactorGraph.at(0)); + for (size_t i = 4; i <= 7; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } -// // Create ordering. -// Ordering ordering; -// for (size_t j = 1; j <= 4; j++) { -// ordering += X(j); -// } + // Create ordering. + Ordering ordering; + for (size_t j = 1; j <= 4; j++) { + ordering += X(j); + } -// // Now we calculate the actual factors using full elimination -// HybridBayesNet::shared_ptr unprunedHybridBayesNet; -// HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph; -// std::tie(unprunedHybridBayesNet, unprunedRemainingGraph) = -// switching.linearizedFactorGraph.eliminatePartialSequential(ordering); + // Now we calculate the actual factors using full elimination + HybridBayesTree::shared_ptr unprunedHybridBayesTree; + HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph; + std::tie(unprunedHybridBayesTree, unprunedRemainingGraph) = + switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); -// size_t maxComponents = 5; -// incrementalHybrid.update(graph1, ordering, maxComponents); + size_t maxNrLeaves = 5; + incrementalHybrid.update(graph1); -// /* -// unpruned factor is: -// Choice(m3) -// 0 Choice(m2) -// 0 0 Choice(m1) -// 0 0 0 Leaf 0.2248 - -// 0 0 1 Leaf 0.3715 - -// 0 1 Choice(m1) -// 0 1 0 Leaf 0.3742 * -// 0 1 1 Leaf 0.6125 * -// 1 Choice(m2) -// 1 0 Choice(m1) -// 1 0 0 Leaf 0.3706 - -// 1 0 1 Leaf 0.6124 * -// 1 1 Choice(m1) -// 1 1 0 Leaf 0.611 * -// 1 1 1 Leaf 1 * + incrementalHybrid.prune(M(3), maxNrLeaves); -// pruned factors is: -// Choice(m3) -// 0 Choice(m2) -// 0 0 Leaf 0 -// 0 1 Choice(m1) -// 0 1 0 Leaf 0.3742 -// 0 1 1 Leaf 0.6125 -// 1 Choice(m2) -// 1 0 Choice(m1) -// 1 0 0 Leaf 0 -// 1 0 1 Leaf 0.6124 -// 1 1 Choice(m1) -// 1 1 0 Leaf 0.611 -// 1 1 1 Leaf 1 -// */ + /* + unpruned factor is: + Choice(m3) + 0 Choice(m2) + 0 0 Choice(m1) + 0 0 0 Leaf 0.11267528 + 0 0 1 Leaf 0.18576102 + 0 1 Choice(m1) + 0 1 0 Leaf 0.18754662 + 0 1 1 Leaf 0.30623871 + 1 Choice(m2) + 1 0 Choice(m1) + 1 0 0 Leaf 0.18576102 + 1 0 1 Leaf 0.30622428 + 1 1 Choice(m1) + 1 1 0 Leaf 0.30623871 + 1 1 1 Leaf 0.5 -// // Test that the remaining factor graph has one -// // DecisionTreeFactor on {M3, M2, M1}. -// auto remainingFactorGraph = incrementalHybrid.remainingFactorGraph(); -// EXPECT_LONGS_EQUAL(1, remainingFactorGraph.size()); + pruned factors is: + Choice(m3) + 0 Choice(m2) + 0 0 Leaf 0 + 0 1 Choice(m1) + 0 1 0 Leaf 0.18754662 + 0 1 1 Leaf 0.30623871 + 1 Choice(m2) + 1 0 Choice(m1) + 1 0 0 Leaf 0 + 1 0 1 Leaf 0.30622428 + 1 1 Choice(m1) + 1 1 0 Leaf 0.30623871 + 1 1 1 Leaf 0.5 + */ -// auto discreteFactor_m1 = *dynamic_pointer_cast( -// incrementalHybrid.remainingDiscreteGraph().at(0)); -// EXPECT(discreteFactor_m1.keys() == KeyVector({M(3), M(2), M(1)})); + auto discreteConditional_m1 = *dynamic_pointer_cast( + incrementalHybrid[M(1)]->conditional()->inner()); + EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)})); -// // Get the number of elements which are equal to 0. -// auto count = [](const double &value, int count) { -// return value > 0 ? count + 1 : count; -// }; -// // Check that the number of leaves after pruning is 5. -// EXPECT_LONGS_EQUAL(5, discreteFactor_m1.fold(count, 0)); + // Get the number of elements which are greater than 0. + auto count = [](const double &value, int count) { + return value > 0 ? count + 1 : count; + }; + // Check that the number of leaves after pruning is 5. + EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0)); -// /* Expected hybrid Bayes net -// * factor 0: [x1 | x2 m1 ], 2 components -// * factor 1: [x2 | x3 m2 m1 ], 4 components -// * factor 2: [x3 | x4 m3 m2 m1 ], 8 components -// * factor 3: [x4 | m3 m2 m1 ], 8 components -// */ -// auto hybridBayesNet = incrementalHybrid.hybridBayesNet(); + // Check that the hybrid nodes of the bayes net match those of the pre-pruning + // bayes net, at the same positions. + auto &unprunedLastDensity = *dynamic_pointer_cast( + unprunedHybridBayesTree->clique(X(4))->conditional()->inner()); + auto &lastDensity = *dynamic_pointer_cast( + incrementalHybrid[X(4)]->conditional()->inner()); -// // Check if we have a bayes net with 4 hybrid nodes, -// // each with 2, 4, 5 (pruned), and 5 (pruned) leaves respetively. -// EXPECT_LONGS_EQUAL(4, hybridBayesNet.size()); -// EXPECT_LONGS_EQUAL(2, hybridBayesNet.atGaussian(0)->nrComponents()); -// EXPECT_LONGS_EQUAL(4, hybridBayesNet.atGaussian(1)->nrComponents()); -// EXPECT_LONGS_EQUAL(5, hybridBayesNet.atGaussian(2)->nrComponents()); -// EXPECT_LONGS_EQUAL(5, hybridBayesNet.atGaussian(3)->nrComponents()); + std::vector> assignments = + discreteConditional_m1.enumerate(); + // Loop over all assignments and check the pruned components + for (auto &&av : assignments) { + const DiscreteValues &assignment = av.first; + const double value = av.second; -// // Check that the hybrid nodes of the bayes net match those of the bayes -// net -// // before pruning, at the same positions. -// auto &lastDensity = *(hybridBayesNet.atGaussian(3)); -// auto &unprunedLastDensity = *(unprunedHybridBayesNet->atGaussian(3)); -// std::vector> assignments = -// discreteFactor_m1.enumerate(); -// // Loop over all assignments and check the pruned components -// for (auto &&av : assignments) { -// const DiscreteValues &assignment = av.first; -// const double value = av.second; - -// if (value == 0.0) { -// EXPECT(lastDensity(assignment) == nullptr); -// } else { -// CHECK(lastDensity(assignment)); -// EXPECT(assert_equal(*unprunedLastDensity(assignment), -// *lastDensity(assignment))); -// } -// } + if (value == 0.0) { + EXPECT(lastDensity(assignment) == nullptr); + } else { + CHECK(lastDensity(assignment)); + EXPECT(assert_equal(*unprunedLastDensity(assignment), + *lastDensity(assignment))); + } + } } /* ****************************************************************************/ // Test approximate inference with an additional pruning step. TEST(HybridGaussianElimination, Incremental_approximate) { -// Switching switching(5); -// IncrementalHybrid incrementalHybrid; -// HybridGaussianFactorGraph graph1; + Switching switching(5); + HybridGaussianISAM incrementalHybrid; + HybridGaussianFactorGraph graph1; -// // Add the 3 DC factors, x1-x2, x2-x3, x3-x4 -// for (size_t i = 0; i < 3; i++) { -// graph1.push_back(switching.linearizedFactorGraph.dcGraph().at(i)); -// } + /***** Run Round 1 *****/ + // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + for (size_t i = 1; i < 4; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } -// // Add the Gaussian factors, 1 prior on X(1), 4 measurements -// for (size_t i = 0; i <= 4; i++) { -// graph1.push_back(switching.linearizedFactorGraph.gaussianGraph().at(i)); -// } + // Add the Gaussian factors, 1 prior on X(1), + // 3 measurements on X(2), X(3), X(4) + graph1.push_back(switching.linearizedFactorGraph.at(0)); + for (size_t i = 5; i <= 7; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } -// // Create ordering. -// Ordering ordering; -// for (size_t j = 1; j <= 4; j++) { -// ordering += X(j); -// } + // Create ordering. + Ordering ordering; + for (size_t j = 1; j <= 4; j++) { + ordering += X(j); + } -// // Run update with pruning -// size_t maxComponents = 5; -// incrementalHybrid.update(graph1, ordering, maxComponents); + // Run update with pruning + size_t maxComponents = 5; + incrementalHybrid.update(graph1); + incrementalHybrid.prune(M(3), maxComponents); -// // Check if we have a bayes net with 4 hybrid nodes, -// // each with 2, 4, 8, and 5 (pruned) leaves respetively. -// auto actualBayesNet1 = incrementalHybrid.hybridBayesNet(); -// CHECK_EQUAL(4, actualBayesNet1.size()); -// EXPECT_LONGS_EQUAL(2, actualBayesNet1.atGaussian(0)->nrComponents()); -// EXPECT_LONGS_EQUAL(4, actualBayesNet1.atGaussian(1)->nrComponents()); -// EXPECT_LONGS_EQUAL(8, actualBayesNet1.atGaussian(2)->nrComponents()); -// EXPECT_LONGS_EQUAL(5, actualBayesNet1.atGaussian(3)->nrComponents()); + // Check if we have a bayes tree with 4 hybrid nodes, + // each with 2, 4, 8, and 5 (pruned) leaves respetively. + EXPECT_LONGS_EQUAL(4, incrementalHybrid.size()); + EXPECT_LONGS_EQUAL( + 2, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 4, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents()); -// /***** Run Round 2 *****/ -// HybridGaussianFactorGraph graph2; -// graph2.push_back(switching.linearizedFactorGraph.dcGraph().at(3)); -// graph2.push_back(switching.linearizedFactorGraph.gaussianGraph().at(5)); + /***** Run Round 2 *****/ + HybridGaussianFactorGraph graph2; + graph2.push_back(switching.linearizedFactorGraph.at(4)); + graph2.push_back(switching.linearizedFactorGraph.at(8)); -// Ordering ordering2; -// ordering2 += X(4); -// ordering2 += X(5); + Ordering ordering2; + ordering2 += X(4); + ordering2 += X(5); -// // Run update with pruning a second time. -// incrementalHybrid.update(graph2, ordering2, maxComponents); + // Run update with pruning a second time. + incrementalHybrid.update(graph2); + incrementalHybrid.prune(M(4), maxComponents); -// // Check if we have a bayes net with 2 hybrid nodes, -// // each with 10 (pruned), and 5 (pruned) leaves respetively. -// auto actualBayesNet = incrementalHybrid.hybridBayesNet(); -// CHECK_EQUAL(2, actualBayesNet.size()); -// EXPECT_LONGS_EQUAL(10, actualBayesNet.atGaussian(0)->nrComponents()); -// EXPECT_LONGS_EQUAL(5, actualBayesNet.atGaussian(1)->nrComponents()); + // Check if we have a bayes tree with pruned hybrid nodes, + // with 5 (pruned) leaves. + CHECK_EQUAL(5, incrementalHybrid.size()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(5)]->conditional()->asMixture()->nrComponents()); } /* ************************************************************************/ // Test for figuring out the optimal ordering to ensure we get // a discrete graph after elimination. -TEST(IncrementalHybrid, NonTrivial) { -// // This is a GTSAM-only test for running inference on a single legged -// robot. -// // The leg links are represented by the chain X-Y-Z-W, where X is the base -// and -// // W is the foot. -// // We use BetweenFactor as constraints between each of the poses. +TEST(HybridGaussianISAM, NonTrivial) { + // This is a GTSAM-only test for running inference on a single legged robot. + // The leg links are represented by the chain X-Y-Z-W, where X is the base and + // W is the foot. + // We use BetweenFactor as constraints between each of the poses. -// /*************** Run Round 1 ***************/ -// NonlinearHybridFactorGraph fg; + /*************** Run Round 1 ***************/ + HybridNonlinearFactorGraph fg; -// // Add a prior on pose x1 at the origin. -// // A prior factor consists of a mean and -// // a noise model (covariance matrix) -// Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin -// auto priorNoise = noiseModel::Diagonal::Sigmas( -// Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta -// fg.emplace_nonlinear>(X(0), prior, priorNoise); + // // Add a prior on pose x1 at the origin. + // // A prior factor consists of a mean and + // // a noise model (covariance matrix) + // Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin + // auto priorNoise = noiseModel::Diagonal::Sigmas( + // Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta + // fg.emplace_nonlinear>(X(0), prior, priorNoise); -// // create a noise model for the landmark measurements -// auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1); + // // create a noise model for the landmark measurements + // auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1); -// // We model a robot's single leg as X - Y - Z - W -// // where X is the base link and W is the foot link. + // // We model a robot's single leg as X - Y - Z - W + // // where X is the base link and W is the foot link. -// // Add connecting poses similar to PoseFactors in GTD -// fg.emplace_nonlinear>(X(0), Y(0), Pose2(0, 1.0, 0), -// poseNoise); -// fg.emplace_nonlinear>(Y(0), Z(0), Pose2(0, 1.0, 0), -// poseNoise); -// fg.emplace_nonlinear>(Z(0), W(0), Pose2(0, 1.0, 0), -// poseNoise); + // // Add connecting poses similar to PoseFactors in GTD + // fg.emplace_nonlinear>(X(0), Y(0), Pose2(0, 1.0, 0), + // poseNoise); + // fg.emplace_nonlinear>(Y(0), Z(0), Pose2(0, 1.0, 0), + // poseNoise); + // fg.emplace_nonlinear>(Z(0), W(0), Pose2(0, 1.0, 0), + // poseNoise); -// // Create initial estimate -// Values initial; -// initial.insert(X(0), Pose2(0.0, 0.0, 0.0)); -// initial.insert(Y(0), Pose2(0.0, 1.0, 0.0)); -// initial.insert(Z(0), Pose2(0.0, 2.0, 0.0)); -// initial.insert(W(0), Pose2(0.0, 3.0, 0.0)); + // // Create initial estimate + // Values initial; + // initial.insert(X(0), Pose2(0.0, 0.0, 0.0)); + // initial.insert(Y(0), Pose2(0.0, 1.0, 0.0)); + // initial.insert(Z(0), Pose2(0.0, 2.0, 0.0)); + // initial.insert(W(0), Pose2(0.0, 3.0, 0.0)); -// HybridGaussianFactorGraph gfg = fg.linearize(initial); -// fg = NonlinearHybridFactorGraph(); + // HybridGaussianFactorGraph gfg = fg.linearize(initial); + // fg = HybridNonlinearFactorGraph(); -// IncrementalHybrid inc; + // HybridGaussianISAM inc; -// // Regular ordering going up the chain. -// Ordering ordering; -// ordering += W(0); -// ordering += Z(0); -// ordering += Y(0); -// ordering += X(0); + // // Regular ordering going up the chain. + // Ordering ordering; + // ordering += W(0); + // ordering += Z(0); + // ordering += Y(0); + // ordering += X(0); -// // Update without pruning -// // The result is a HybridBayesNet with no discrete variables -// // (equivalent to a GaussianBayesNet). -// // Factorization is: -// // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)` -// inc.update(gfg, ordering); + // // Update without pruning + // // The result is a HybridBayesNet with no discrete variables + // // (equivalent to a GaussianBayesNet). + // // Factorization is: + // // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)` + // inc.update(gfg, ordering); -// /*************** Run Round 2 ***************/ -// using PlanarMotionModel = BetweenFactor; + // /*************** Run Round 2 ***************/ + // using PlanarMotionModel = BetweenFactor; -// // Add odometry factor with discrete modes. -// Pose2 odometry(1.0, 0.0, 0.0); -// KeyVector contKeys = {W(0), W(1)}; -// auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); -// auto still = boost::make_shared(W(0), W(1), Pose2(0, 0, -// 0), -// noise_model), -// moving = boost::make_shared(W(0), W(1), odometry, -// noise_model); -// std::vector components = {moving, still}; -// auto dcFactor = boost::make_shared>( -// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); -// fg.push_back(dcFactor); + // // Add odometry factor with discrete modes. + // Pose2 odometry(1.0, 0.0, 0.0); + // KeyVector contKeys = {W(0), W(1)}; + // auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); + // auto still = boost::make_shared(W(0), W(1), Pose2(0, + // 0, 0), + // noise_model), + // moving = boost::make_shared(W(0), W(1), odometry, + // noise_model); + // std::vector components = {moving, still}; + // auto dcFactor = boost::make_shared>( + // contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + // fg.push_back(dcFactor); -// // Add equivalent of ImuFactor -// fg.emplace_nonlinear>(X(0), X(1), Pose2(1.0, 0.0, 0), -// poseNoise); -// // PoseFactors-like at k=1 -// fg.emplace_nonlinear>(X(1), Y(1), Pose2(0, 1, 0), -// poseNoise); -// fg.emplace_nonlinear>(Y(1), Z(1), Pose2(0, 1, 0), -// poseNoise); -// fg.emplace_nonlinear>(Z(1), W(1), Pose2(-1, 1, 0), -// poseNoise); + // // Add equivalent of ImuFactor + // fg.emplace_nonlinear>(X(0), X(1), Pose2(1.0, 0.0, + // 0), + // poseNoise); + // // PoseFactors-like at k=1 + // fg.emplace_nonlinear>(X(1), Y(1), Pose2(0, 1, 0), + // poseNoise); + // fg.emplace_nonlinear>(Y(1), Z(1), Pose2(0, 1, 0), + // poseNoise); + // fg.emplace_nonlinear>(Z(1), W(1), Pose2(-1, 1, 0), + // poseNoise); -// initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); -// initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); -// initial.insert(Z(1), Pose2(1.0, 2.0, 0.0)); -// // The leg link did not move so we set the expected pose accordingly. -// initial.insert(W(1), Pose2(0.0, 3.0, 0.0)); + // initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); + // initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); + // initial.insert(Z(1), Pose2(1.0, 2.0, 0.0)); + // // The leg link did not move so we set the expected pose accordingly. + // initial.insert(W(1), Pose2(0.0, 3.0, 0.0)); -// // Ordering for k=1. -// // This ordering follows the intuition that we eliminate the previous -// // timestep, and then the current timestep. -// ordering = Ordering(); -// ordering += W(0); -// ordering += Z(0); -// ordering += Y(0); -// ordering += X(0); -// ordering += W(1); -// ordering += Z(1); -// ordering += Y(1); -// ordering += X(1); + // // Ordering for k=1. + // // This ordering follows the intuition that we eliminate the previous + // // timestep, and then the current timestep. + // ordering = Ordering(); + // ordering += W(0); + // ordering += Z(0); + // ordering += Y(0); + // ordering += X(0); + // ordering += W(1); + // ordering += Z(1); + // ordering += Y(1); + // ordering += X(1); -// gfg = fg.linearize(initial); -// fg = NonlinearHybridFactorGraph(); + // gfg = fg.linearize(initial); + // fg = HybridNonlinearFactorGraph(); -// // Update without pruning -// // The result is a HybridBayesNet with 1 discrete variable M(1). -// // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) -// // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, -// M1) -// // P(Y1 | X1, M1)P(X1 | M1)P(M1) -// // The MHS tree is a 2 level tree for time indices (0, 1) with 2 leaves. -// inc.update(gfg, ordering); + // // Update without pruning + // // The result is a HybridBayesNet with 1 discrete variable M(1). + // // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, + // M1) + // // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, + // M1) + // // P(Y1 | X1, M1)P(X1 | M1)P(M1) + // // The MHS tree is a 2 level tree for time indices (0, 1) with 2 leaves. + // inc.update(gfg, ordering); -// /*************** Run Round 3 ***************/ -// // Add odometry factor with discrete modes. -// contKeys = {W(1), W(2)}; -// still = boost::make_shared(W(1), W(2), Pose2(0, 0, 0), -// noise_model); -// moving = -// boost::make_shared(W(1), W(2), odometry, -// noise_model); -// components = {moving, still}; -// dcFactor = boost::make_shared>( -// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); -// fg.push_back(dcFactor); + // /*************** Run Round 3 ***************/ + // // Add odometry factor with discrete modes. + // contKeys = {W(1), W(2)}; + // still = boost::make_shared(W(1), W(2), Pose2(0, 0, 0), + // noise_model); + // moving = + // boost::make_shared(W(1), W(2), odometry, + // noise_model); + // components = {moving, still}; + // dcFactor = boost::make_shared>( + // contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); + // fg.push_back(dcFactor); -// // Add equivalent of ImuFactor -// fg.emplace_nonlinear>(X(1), X(2), Pose2(1.0, 0.0, 0), -// poseNoise); -// // PoseFactors-like at k=1 -// fg.emplace_nonlinear>(X(2), Y(2), Pose2(0, 1, 0), -// poseNoise); -// fg.emplace_nonlinear>(Y(2), Z(2), Pose2(0, 1, 0), -// poseNoise); -// fg.emplace_nonlinear>(Z(2), W(2), Pose2(-2, 1, 0), -// poseNoise); + // // Add equivalent of ImuFactor + // fg.emplace_nonlinear>(X(1), X(2), Pose2(1.0, 0.0, + // 0), + // poseNoise); + // // PoseFactors-like at k=1 + // fg.emplace_nonlinear>(X(2), Y(2), Pose2(0, 1, 0), + // poseNoise); + // fg.emplace_nonlinear>(Y(2), Z(2), Pose2(0, 1, 0), + // poseNoise); + // fg.emplace_nonlinear>(Z(2), W(2), Pose2(-2, 1, 0), + // poseNoise); -// initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); -// initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); -// initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); -// initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); + // initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); + // initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); + // initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); + // initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); -// // Ordering at k=2. Same intuition as before. -// ordering = Ordering(); -// ordering += W(1); -// ordering += Z(1); -// ordering += Y(1); -// ordering += X(1); -// ordering += W(2); -// ordering += Z(2); -// ordering += Y(2); -// ordering += X(2); + // // Ordering at k=2. Same intuition as before. + // ordering = Ordering(); + // ordering += W(1); + // ordering += Z(1); + // ordering += Y(1); + // ordering += X(1); + // ordering += W(2); + // ordering += Z(2); + // ordering += Y(2); + // ordering += X(2); -// gfg = fg.linearize(initial); -// fg = NonlinearHybridFactorGraph(); + // gfg = fg.linearize(initial); + // fg = HybridNonlinearFactorGraph(); -// // Now we prune! -// // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) -// P(X0 | X1, W1, M1) -// // P(W1|W2, Z1, X1, M1, M2) P(Z1| W2, Y1, X1, M1, M2) -// P(Y1 | W2, X1, M1, M2) -// // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2) -// P(Z2|Y2, X2, M1, M2) -// // P(Y2 | X2, M1, M2)P(X2 | M1, M2) P(M1, M2) -// // The MHS at this point should be a 3 level tree on (0, 1, 2). -// // 0 has 2 choices, 1 has 4 choices and 2 has 4 choices. -// inc.update(gfg, ordering, 2); + // // Now we prune! + // // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, + // M1) P(X0 | X1, W1, M1) + // // P(W1|W2, Z1, X1, M1, M2) P(Z1| W2, Y1, X1, M1, + // M2) P(Y1 | W2, X1, M1, M2) + // // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2) + // P(Z2|Y2, X2, M1, M2) + // // P(Y2 | X2, M1, M2)P(X2 | M1, M2) P(M1, M2) + // // The MHS at this point should be a 3 level tree on (0, 1, 2). + // // 0 has 2 choices, 1 has 4 choices and 2 has 4 choices. + // inc.update(gfg, ordering, 2); -// /*************** Run Round 4 ***************/ -// // Add odometry factor with discrete modes. -// contKeys = {W(2), W(3)}; -// still = boost::make_shared(W(2), W(3), Pose2(0, 0, 0), -// noise_model); -// moving = -// boost::make_shared(W(2), W(3), odometry, -// noise_model); -// components = {moving, still}; -// dcFactor = boost::make_shared>( -// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); -// fg.push_back(dcFactor); + // /*************** Run Round 4 ***************/ + // // Add odometry factor with discrete modes. + // contKeys = {W(2), W(3)}; + // still = boost::make_shared(W(2), W(3), Pose2(0, 0, 0), + // noise_model); + // moving = + // boost::make_shared(W(2), W(3), odometry, + // noise_model); + // components = {moving, still}; + // dcFactor = boost::make_shared>( + // contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); + // fg.push_back(dcFactor); -// // Add equivalent of ImuFactor -// fg.emplace_nonlinear>(X(2), X(3), Pose2(1.0, 0.0, 0), -// poseNoise); -// // PoseFactors-like at k=3 -// fg.emplace_nonlinear>(X(3), Y(3), Pose2(0, 1, 0), -// poseNoise); -// fg.emplace_nonlinear>(Y(3), Z(3), Pose2(0, 1, 0), -// poseNoise); -// fg.emplace_nonlinear>(Z(3), W(3), Pose2(-3, 1, 0), -// poseNoise); + // // Add equivalent of ImuFactor + // fg.emplace_nonlinear>(X(2), X(3), Pose2(1.0, 0.0, + // 0), + // poseNoise); + // // PoseFactors-like at k=3 + // fg.emplace_nonlinear>(X(3), Y(3), Pose2(0, 1, 0), + // poseNoise); + // fg.emplace_nonlinear>(Y(3), Z(3), Pose2(0, 1, 0), + // poseNoise); + // fg.emplace_nonlinear>(Z(3), W(3), Pose2(-3, 1, 0), + // poseNoise); -// initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); -// initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); -// initial.insert(Z(3), Pose2(3.0, 2.0, 0.0)); -// initial.insert(W(3), Pose2(0.0, 3.0, 0.0)); + // initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); + // initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); + // initial.insert(Z(3), Pose2(3.0, 2.0, 0.0)); + // initial.insert(W(3), Pose2(0.0, 3.0, 0.0)); -// // Ordering at k=3. Same intuition as before. -// ordering = Ordering(); -// ordering += W(2); -// ordering += Z(2); -// ordering += Y(2); -// ordering += X(2); -// ordering += W(3); -// ordering += Z(3); -// ordering += Y(3); -// ordering += X(3); + // // Ordering at k=3. Same intuition as before. + // ordering = Ordering(); + // ordering += W(2); + // ordering += Z(2); + // ordering += Y(2); + // ordering += X(2); + // ordering += W(3); + // ordering += Z(3); + // ordering += Y(3); + // ordering += X(3); -// gfg = fg.linearize(initial); -// fg = NonlinearHybridFactorGraph(); + // gfg = fg.linearize(initial); + // fg = HybridNonlinearFactorGraph(); -// // Keep pruning! -// inc.update(gfg, ordering, 3); + // // Keep pruning! + // inc.update(gfg, ordering, 3); -// // The final discrete graph should not be empty since we have eliminated -// // all continuous variables. -// EXPECT(!inc.remainingDiscreteGraph().empty()); + // // The final discrete graph should not be empty since we have eliminated + // // all continuous variables. + // EXPECT(!inc.remainingDiscreteGraph().empty()); -// // Test if the optimal discrete mode assignment is (1, 1, 1). -// DiscreteValues optimal_assignment = -// inc.remainingDiscreteGraph().optimize(); DiscreteValues -// expected_assignment; expected_assignment[M(1)] = 1; -// expected_assignment[M(2)] = 1; -// expected_assignment[M(3)] = 1; -// EXPECT(assert_equal(expected_assignment, optimal_assignment)); + // // Test if the optimal discrete mode assignment is (1, 1, 1). + // DiscreteValues optimal_assignment = + // inc.remainingDiscreteGraph().optimize(); DiscreteValues + // expected_assignment; expected_assignment[M(1)] = 1; + // expected_assignment[M(2)] = 1; + // expected_assignment[M(3)] = 1; + // EXPECT(assert_equal(expected_assignment, optimal_assignment)); -// // Test if pruning worked correctly by checking that we only have 3 leaves -// in -// // the last node. -// auto lastConditional = boost::dynamic_pointer_cast( -// inc.hybridBayesNet().at(inc.hybridBayesNet().size() - 1)); -// EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); + // // Test if pruning worked correctly by checking that we only have 3 + // leaves in + // // the last node. + // auto lastConditional = boost::dynamic_pointer_cast( + // inc.hybridBayesNet().at(inc.hybridBayesNet().size() - 1)); + // EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); } /* ************************************************************************* */ From 83b8103db30e6e3426f542563021ebadb63db5e8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 17 Aug 2022 16:28:47 -0400 Subject: [PATCH 4/9] last test to get running --- gtsam/hybrid/HybridGaussianISAM.cpp | 2 - gtsam/hybrid/tests/testHybridIncremental.cpp | 370 +++++++++---------- 2 files changed, 183 insertions(+), 189 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianISAM.cpp b/gtsam/hybrid/HybridGaussianISAM.cpp index df7803722..afcda8c20 100644 --- a/gtsam/hybrid/HybridGaussianISAM.cpp +++ b/gtsam/hybrid/HybridGaussianISAM.cpp @@ -75,8 +75,6 @@ void HybridGaussianISAM::updateInternal( std::copy(allDiscrete.begin(), allDiscrete.end(), std::back_inserter(newKeysDiscreteLast)); - // KeyVector new - // Get an ordering where the new keys are eliminated last const VariableIndex index(factors); Ordering elimination_ordering; diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp index afd0d0a70..8feab5221 100644 --- a/gtsam/hybrid/tests/testHybridIncremental.cpp +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -393,221 +393,217 @@ TEST(HybridGaussianISAM, NonTrivial) { /*************** Run Round 1 ***************/ HybridNonlinearFactorGraph fg; - // // Add a prior on pose x1 at the origin. - // // A prior factor consists of a mean and - // // a noise model (covariance matrix) - // Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin - // auto priorNoise = noiseModel::Diagonal::Sigmas( - // Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta - // fg.emplace_nonlinear>(X(0), prior, priorNoise); + // Add a prior on pose x1 at the origin. + // A prior factor consists of a mean and + // a noise model (covariance matrix) + Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin + auto priorNoise = noiseModel::Diagonal::Sigmas( + Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta + fg.emplace_nonlinear>(X(0), prior, priorNoise); - // // create a noise model for the landmark measurements - // auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1); + // create a noise model for the landmark measurements + auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1); - // // We model a robot's single leg as X - Y - Z - W - // // where X is the base link and W is the foot link. + // We model a robot's single leg as X - Y - Z - W + // where X is the base link and W is the foot link. - // // Add connecting poses similar to PoseFactors in GTD - // fg.emplace_nonlinear>(X(0), Y(0), Pose2(0, 1.0, 0), - // poseNoise); - // fg.emplace_nonlinear>(Y(0), Z(0), Pose2(0, 1.0, 0), - // poseNoise); - // fg.emplace_nonlinear>(Z(0), W(0), Pose2(0, 1.0, 0), - // poseNoise); + // Add connecting poses similar to PoseFactors in GTD + fg.emplace_nonlinear>(X(0), Y(0), Pose2(0, 1.0, 0), + poseNoise); + fg.emplace_nonlinear>(Y(0), Z(0), Pose2(0, 1.0, 0), + poseNoise); + fg.emplace_nonlinear>(Z(0), W(0), Pose2(0, 1.0, 0), + poseNoise); - // // Create initial estimate - // Values initial; - // initial.insert(X(0), Pose2(0.0, 0.0, 0.0)); - // initial.insert(Y(0), Pose2(0.0, 1.0, 0.0)); - // initial.insert(Z(0), Pose2(0.0, 2.0, 0.0)); - // initial.insert(W(0), Pose2(0.0, 3.0, 0.0)); + // Create initial estimate + Values initial; + initial.insert(X(0), Pose2(0.0, 0.0, 0.0)); + initial.insert(Y(0), Pose2(0.0, 1.0, 0.0)); + initial.insert(Z(0), Pose2(0.0, 2.0, 0.0)); + initial.insert(W(0), Pose2(0.0, 3.0, 0.0)); - // HybridGaussianFactorGraph gfg = fg.linearize(initial); - // fg = HybridNonlinearFactorGraph(); + HybridGaussianFactorGraph gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); - // HybridGaussianISAM inc; + HybridGaussianISAM inc; - // // Regular ordering going up the chain. - // Ordering ordering; - // ordering += W(0); - // ordering += Z(0); - // ordering += Y(0); - // ordering += X(0); + // Regular ordering going up the chain. + Ordering ordering; + ordering += W(0); + ordering += Z(0); + ordering += Y(0); + ordering += X(0); - // // Update without pruning - // // The result is a HybridBayesNet with no discrete variables - // // (equivalent to a GaussianBayesNet). - // // Factorization is: - // // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)` - // inc.update(gfg, ordering); + // Update without pruning + // The result is a HybridBayesNet with no discrete variables + // (equivalent to a GaussianBayesNet). + // Factorization is: + // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)` + // TODO(Varun) ClusterTree-inst.h L202 segfaults with custom ordering. + inc.update(gfg); - // /*************** Run Round 2 ***************/ - // using PlanarMotionModel = BetweenFactor; + /*************** Run Round 2 ***************/ + using PlanarMotionModel = BetweenFactor; - // // Add odometry factor with discrete modes. - // Pose2 odometry(1.0, 0.0, 0.0); - // KeyVector contKeys = {W(0), W(1)}; - // auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); - // auto still = boost::make_shared(W(0), W(1), Pose2(0, - // 0, 0), - // noise_model), - // moving = boost::make_shared(W(0), W(1), odometry, - // noise_model); - // std::vector components = {moving, still}; - // auto dcFactor = boost::make_shared>( - // contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); - // fg.push_back(dcFactor); + // Add odometry factor with discrete modes. + Pose2 odometry(1.0, 0.0, 0.0); + KeyVector contKeys = {W(0), W(1)}; + auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); + auto still = boost::make_shared(W(0), W(1), Pose2(0, 0, 0), + noise_model), + moving = boost::make_shared(W(0), W(1), odometry, + noise_model); + std::vector components = {moving, still}; + auto mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + fg.push_back(mixtureFactor); - // // Add equivalent of ImuFactor - // fg.emplace_nonlinear>(X(0), X(1), Pose2(1.0, 0.0, - // 0), - // poseNoise); - // // PoseFactors-like at k=1 - // fg.emplace_nonlinear>(X(1), Y(1), Pose2(0, 1, 0), - // poseNoise); - // fg.emplace_nonlinear>(Y(1), Z(1), Pose2(0, 1, 0), - // poseNoise); - // fg.emplace_nonlinear>(Z(1), W(1), Pose2(-1, 1, 0), - // poseNoise); + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(0), X(1), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=1 + fg.emplace_nonlinear>(X(1), Y(1), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(1), Z(1), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(1), W(1), Pose2(-1, 1, 0), + poseNoise); - // initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); - // initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); - // initial.insert(Z(1), Pose2(1.0, 2.0, 0.0)); - // // The leg link did not move so we set the expected pose accordingly. - // initial.insert(W(1), Pose2(0.0, 3.0, 0.0)); + initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); + initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); + initial.insert(Z(1), Pose2(1.0, 2.0, 0.0)); + // The leg link did not move so we set the expected pose accordingly. + initial.insert(W(1), Pose2(0.0, 3.0, 0.0)); - // // Ordering for k=1. - // // This ordering follows the intuition that we eliminate the previous - // // timestep, and then the current timestep. - // ordering = Ordering(); - // ordering += W(0); - // ordering += Z(0); - // ordering += Y(0); - // ordering += X(0); - // ordering += W(1); - // ordering += Z(1); - // ordering += Y(1); - // ordering += X(1); + gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); - // gfg = fg.linearize(initial); - // fg = HybridNonlinearFactorGraph(); + // Ordering for k=1. + // This ordering follows the intuition that we eliminate the previous + // timestep, and then the current timestep. + ordering = Ordering(); + ordering += W(0); + ordering += Z(0); + ordering += Y(0); + ordering += X(0); + ordering += W(1); + ordering += Z(1); + ordering += Y(1); + ordering += X(1); - // // Update without pruning - // // The result is a HybridBayesNet with 1 discrete variable M(1). - // // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, - // M1) - // // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, - // M1) - // // P(Y1 | X1, M1)P(X1 | M1)P(M1) - // // The MHS tree is a 2 level tree for time indices (0, 1) with 2 leaves. - // inc.update(gfg, ordering); + // Update without pruning + // The result is a HybridBayesNet with 1 discrete variable M(1). + // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) + // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, M1) + // P(Y1 | X1, M1)P(X1 | M1)P(M1) + // The MHS tree is a 1 level tree for time indices (1,) with 2 leaves. + // TODO(Varun) ClusterTree-inst.h L202 segfaults with custom ordering. + inc.update(gfg); - // /*************** Run Round 3 ***************/ - // // Add odometry factor with discrete modes. - // contKeys = {W(1), W(2)}; - // still = boost::make_shared(W(1), W(2), Pose2(0, 0, 0), - // noise_model); - // moving = - // boost::make_shared(W(1), W(2), odometry, - // noise_model); - // components = {moving, still}; - // dcFactor = boost::make_shared>( - // contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); - // fg.push_back(dcFactor); + /*************** Run Round 3 ***************/ + // Add odometry factor with discrete modes. + contKeys = {W(1), W(2)}; + still = boost::make_shared(W(1), W(2), Pose2(0, 0, 0), + noise_model); + moving = + boost::make_shared(W(1), W(2), odometry, noise_model); + components = {moving, still}; + mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); + fg.push_back(mixtureFactor); - // // Add equivalent of ImuFactor - // fg.emplace_nonlinear>(X(1), X(2), Pose2(1.0, 0.0, - // 0), - // poseNoise); - // // PoseFactors-like at k=1 - // fg.emplace_nonlinear>(X(2), Y(2), Pose2(0, 1, 0), - // poseNoise); - // fg.emplace_nonlinear>(Y(2), Z(2), Pose2(0, 1, 0), - // poseNoise); - // fg.emplace_nonlinear>(Z(2), W(2), Pose2(-2, 1, 0), - // poseNoise); + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(1), X(2), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=1 + fg.emplace_nonlinear>(X(2), Y(2), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(2), Z(2), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(2), W(2), Pose2(-2, 1, 0), + poseNoise); - // initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); - // initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); - // initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); - // initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); + initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); + initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); + initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); + initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); - // // Ordering at k=2. Same intuition as before. - // ordering = Ordering(); - // ordering += W(1); - // ordering += Z(1); - // ordering += Y(1); - // ordering += X(1); - // ordering += W(2); - // ordering += Z(2); - // ordering += Y(2); - // ordering += X(2); + // Ordering at k=2. Same intuition as before. + ordering = Ordering(); + ordering += W(1); + ordering += Z(1); + ordering += Y(1); + ordering += X(1); + ordering += W(2); + ordering += Z(2); + ordering += Y(2); + ordering += X(2); - // gfg = fg.linearize(initial); - // fg = HybridNonlinearFactorGraph(); + gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); - // // Now we prune! - // // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, - // M1) P(X0 | X1, W1, M1) - // // P(W1|W2, Z1, X1, M1, M2) P(Z1| W2, Y1, X1, M1, - // M2) P(Y1 | W2, X1, M1, M2) - // // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2) - // P(Z2|Y2, X2, M1, M2) - // // P(Y2 | X2, M1, M2)P(X2 | M1, M2) P(M1, M2) - // // The MHS at this point should be a 3 level tree on (0, 1, 2). - // // 0 has 2 choices, 1 has 4 choices and 2 has 4 choices. - // inc.update(gfg, ordering, 2); + // Now we prune! + // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) + // P(X0 | X1, W1, M1) P(W1|W2, Z1, X1, M1, M2) + // P(Z1| W2, Y1, X1, M1, M2) P(Y1 | W2, X1, M1, M2) + // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2) + // P(Z2|Y2, X2, M1, M2) P(Y2 | X2, M1, M2) + // P(X2 | M1, M2) P(M1, M2) + // The MHS at this point should be a 2 level tree on (1, 2). + // 1 has 2 choices, and 2 has 4 choices. + inc.update(gfg); + inc.prune(M(2), 2); - // /*************** Run Round 4 ***************/ - // // Add odometry factor with discrete modes. - // contKeys = {W(2), W(3)}; - // still = boost::make_shared(W(2), W(3), Pose2(0, 0, 0), - // noise_model); - // moving = - // boost::make_shared(W(2), W(3), odometry, - // noise_model); - // components = {moving, still}; - // dcFactor = boost::make_shared>( - // contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); - // fg.push_back(dcFactor); + /*************** Run Round 4 ***************/ + // Add odometry factor with discrete modes. + contKeys = {W(2), W(3)}; + still = boost::make_shared(W(2), W(3), Pose2(0, 0, 0), + noise_model); + moving = + boost::make_shared(W(2), W(3), odometry, noise_model); + components = {moving, still}; + mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); + fg.push_back(mixtureFactor); - // // Add equivalent of ImuFactor - // fg.emplace_nonlinear>(X(2), X(3), Pose2(1.0, 0.0, - // 0), - // poseNoise); - // // PoseFactors-like at k=3 - // fg.emplace_nonlinear>(X(3), Y(3), Pose2(0, 1, 0), - // poseNoise); - // fg.emplace_nonlinear>(Y(3), Z(3), Pose2(0, 1, 0), - // poseNoise); - // fg.emplace_nonlinear>(Z(3), W(3), Pose2(-3, 1, 0), - // poseNoise); + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(2), X(3), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=3 + fg.emplace_nonlinear>(X(3), Y(3), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(3), Z(3), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(3), W(3), Pose2(-3, 1, 0), + poseNoise); - // initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); - // initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); - // initial.insert(Z(3), Pose2(3.0, 2.0, 0.0)); - // initial.insert(W(3), Pose2(0.0, 3.0, 0.0)); + initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); + initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); + initial.insert(Z(3), Pose2(3.0, 2.0, 0.0)); + initial.insert(W(3), Pose2(0.0, 3.0, 0.0)); - // // Ordering at k=3. Same intuition as before. - // ordering = Ordering(); - // ordering += W(2); - // ordering += Z(2); - // ordering += Y(2); - // ordering += X(2); - // ordering += W(3); - // ordering += Z(3); - // ordering += Y(3); - // ordering += X(3); + gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); - // gfg = fg.linearize(initial); - // fg = HybridNonlinearFactorGraph(); + // Ordering at k=3. Same intuition as before. + ordering = Ordering(); + ordering += W(2); + ordering += Z(2); + ordering += Y(2); + ordering += X(2); + ordering += W(3); + ordering += Z(3); + ordering += Y(3); + ordering += X(3); - // // Keep pruning! - // inc.update(gfg, ordering, 3); + // Keep pruning! + inc.update(gfg); + inc.prune(M(3), 3); + inc.print(); - // // The final discrete graph should not be empty since we have eliminated - // // all continuous variables. - // EXPECT(!inc.remainingDiscreteGraph().empty()); + // The final discrete graph should not be empty since we have eliminated + // all continuous variables. + // EXPECT(!inc.remainingDiscreteGraph().empty()); // // Test if the optimal discrete mode assignment is (1, 1, 1). // DiscreteValues optimal_assignment = From 07f0101db7401be00917c1f2c099cd6326e3369c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 21 Aug 2022 12:48:33 -0400 Subject: [PATCH 5/9] check subset rather than equality for GaussianISAM pruning --- gtsam/hybrid/HybridGaussianISAM.cpp | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridGaussianISAM.cpp b/gtsam/hybrid/HybridGaussianISAM.cpp index afcda8c20..23a95c021 100644 --- a/gtsam/hybrid/HybridGaussianISAM.cpp +++ b/gtsam/hybrid/HybridGaussianISAM.cpp @@ -105,6 +105,22 @@ void HybridGaussianISAM::update(const HybridGaussianFactorGraph& newFactors, this->updateInternal(newFactors, &orphans, ordering, function); } +/* ************************************************************************* */ +/** + * @brief Check if `b` is a subset of `a`. + * Non-const since they need to be sorted. + * + * @param a KeyVector + * @param b KeyVector + * @return True if the keys of b is a subset of a, else false. + */ +bool IsSubset(KeyVector a, KeyVector b) { + std::sort(a.begin(), a.end()); + std::sort(b.begin(), b.end()); + return std::includes(a.begin(), a.end(), b.begin(), b.end()); +} + +/* ************************************************************************* */ void HybridGaussianISAM::prune(const Key& root, const size_t maxNrLeaves) { auto decisionTree = boost::dynamic_pointer_cast( this->clique(root)->conditional()->inner()); @@ -131,7 +147,7 @@ void HybridGaussianISAM::prune(const Key& root, const size_t maxNrLeaves) { parents.push_back(parent); } - if (parents == decisionTree->keys()) { + if (IsSubset(parents, decisionTree->keys())) { auto gaussianMixture = boost::dynamic_pointer_cast( clique.second->conditional()->inner()); From 29c19ee77b5fda682fabdf110baf1a7f320c87b3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 21 Aug 2022 12:49:13 -0400 Subject: [PATCH 6/9] handle HybridConditional and explicitly set Gaussian Factor Graphs to empty --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 31 +++++++++++++++++++--- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 55fa9a908..af381de04 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -96,8 +96,15 @@ GaussianMixtureFactor::Sum sumFrontals( } } else if (f->isContinuous()) { - deferredFactors.push_back( - boost::dynamic_pointer_cast(f)->inner()); + // Check if f is HybridConditional or HybridGaussianFactor. + if (auto hc = boost::dynamic_pointer_cast(f)) { + auto conditional = + boost::dynamic_pointer_cast(hc->inner()); + deferredFactors.push_back(conditional); + } else if (auto gf = boost::dynamic_pointer_cast(f) + ->inner()) { + deferredFactors.push_back(gf); + } } else if (f->isDiscrete()) { // Don't do anything for discrete-only factors @@ -184,6 +191,19 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // sum out frontals, this is the factor on the separator GaussianMixtureFactor::Sum sum = sumFrontals(factors); + // If a tree leaf contains nullptr, + // convert that leaf to an empty GaussianFactorGraph. + // Needed since the DecisionTree will otherwise create + // a GFG with a single (null) factor. + auto emptyGaussian = [](const GaussianFactorGraph &gfg) { + bool hasNull = + std::any_of(gfg.begin(), gfg.end(), + [](const GaussianFactor::shared_ptr &ptr) { return !ptr; }); + + return hasNull ? GaussianFactorGraph() : gfg; + }; + sum = GaussianMixtureFactor::Sum(sum, emptyGaussian); + using EliminationPair = GaussianFactorGraph::EliminationResult; KeyVector keysOfEliminated; // Not the ordering @@ -195,7 +215,10 @@ hybridElimination(const HybridGaussianFactorGraph &factors, if (graph.empty()) { return {nullptr, nullptr}; } - auto result = EliminatePreferCholesky(graph, frontalKeys); + std::pair, + boost::shared_ptr> + result = EliminatePreferCholesky(graph, frontalKeys); + if (keysOfEliminated.empty()) { keysOfEliminated = result.first->keys(); // Initialize the keysOfEliminated to be the @@ -235,7 +258,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, boost::make_shared(discreteFactor)}; } else { - // Create a resulting DCGaussianMixture on the separator. + // Create a resulting GaussianMixtureFactor on the separator. auto factor = boost::make_shared( KeyVector(continuousSeparator.begin(), continuousSeparator.end()), discreteSeparator, separatorFactors); From f6df641b191ef7b1e4bd8b867c707abdc86a4549 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 21 Aug 2022 12:56:14 -0400 Subject: [PATCH 7/9] remove custom orderings, let it happen automatically --- gtsam/hybrid/tests/testHybridIncremental.cpp | 107 ++++--------------- 1 file changed, 21 insertions(+), 86 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp index 8feab5221..4449aba0b 100644 --- a/gtsam/hybrid/tests/testHybridIncremental.cpp +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -61,11 +61,6 @@ TEST(HybridGaussianElimination, IncrementalElimination) { graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) - // Create ordering. - Ordering ordering; - ordering += X(1); - ordering += X(2); - // Run update step isam.update(graph1); @@ -85,11 +80,6 @@ TEST(HybridGaussianElimination, IncrementalElimination) { graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) - // Create ordering. - Ordering ordering2; - ordering2 += X(2); - ordering2 += X(3); - isam.update(graph2); // Check that after the second update we have @@ -336,12 +326,6 @@ TEST(HybridGaussianElimination, Incremental_approximate) { graph1.push_back(switching.linearizedFactorGraph.at(i)); } - // Create ordering. - Ordering ordering; - for (size_t j = 1; j <= 4; j++) { - ordering += X(j); - } - // Run update with pruning size_t maxComponents = 5; incrementalHybrid.update(graph1); @@ -364,10 +348,6 @@ TEST(HybridGaussianElimination, Incremental_approximate) { graph2.push_back(switching.linearizedFactorGraph.at(4)); graph2.push_back(switching.linearizedFactorGraph.at(8)); - Ordering ordering2; - ordering2 += X(4); - ordering2 += X(5); - // Run update with pruning a second time. incrementalHybrid.update(graph2); incrementalHybrid.prune(M(4), maxComponents); @@ -382,14 +362,11 @@ TEST(HybridGaussianElimination, Incremental_approximate) { } /* ************************************************************************/ -// Test for figuring out the optimal ordering to ensure we get -// a discrete graph after elimination. +// A GTSAM-only test for running inference on a single-legged robot. +// The leg links are represented by the chain X-Y-Z-W, where X is the base and +// W is the foot. +// We use BetweenFactor as constraints between each of the poses. TEST(HybridGaussianISAM, NonTrivial) { - // This is a GTSAM-only test for running inference on a single legged robot. - // The leg links are represented by the chain X-Y-Z-W, where X is the base and - // W is the foot. - // We use BetweenFactor as constraints between each of the poses. - /*************** Run Round 1 ***************/ HybridNonlinearFactorGraph fg; @@ -427,19 +404,11 @@ TEST(HybridGaussianISAM, NonTrivial) { HybridGaussianISAM inc; - // Regular ordering going up the chain. - Ordering ordering; - ordering += W(0); - ordering += Z(0); - ordering += Y(0); - ordering += X(0); - // Update without pruning // The result is a HybridBayesNet with no discrete variables // (equivalent to a GaussianBayesNet). // Factorization is: // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)` - // TODO(Varun) ClusterTree-inst.h L202 segfaults with custom ordering. inc.update(gfg); /*************** Run Round 2 ***************/ @@ -478,26 +447,12 @@ TEST(HybridGaussianISAM, NonTrivial) { gfg = fg.linearize(initial); fg = HybridNonlinearFactorGraph(); - // Ordering for k=1. - // This ordering follows the intuition that we eliminate the previous - // timestep, and then the current timestep. - ordering = Ordering(); - ordering += W(0); - ordering += Z(0); - ordering += Y(0); - ordering += X(0); - ordering += W(1); - ordering += Z(1); - ordering += Y(1); - ordering += X(1); - // Update without pruning // The result is a HybridBayesNet with 1 discrete variable M(1). // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, M1) // P(Y1 | X1, M1)P(X1 | M1)P(M1) // The MHS tree is a 1 level tree for time indices (1,) with 2 leaves. - // TODO(Varun) ClusterTree-inst.h L202 segfaults with custom ordering. inc.update(gfg); /*************** Run Round 3 ***************/ @@ -528,17 +483,6 @@ TEST(HybridGaussianISAM, NonTrivial) { initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); - // Ordering at k=2. Same intuition as before. - ordering = Ordering(); - ordering += W(1); - ordering += Z(1); - ordering += Y(1); - ordering += X(1); - ordering += W(2); - ordering += Z(2); - ordering += Y(2); - ordering += X(2); - gfg = fg.linearize(initial); fg = HybridNonlinearFactorGraph(); @@ -585,40 +529,31 @@ TEST(HybridGaussianISAM, NonTrivial) { gfg = fg.linearize(initial); fg = HybridNonlinearFactorGraph(); - // Ordering at k=3. Same intuition as before. - ordering = Ordering(); - ordering += W(2); - ordering += Z(2); - ordering += Y(2); - ordering += X(2); - ordering += W(3); - ordering += Z(3); - ordering += Y(3); - ordering += X(3); - // Keep pruning! inc.update(gfg); inc.prune(M(3), 3); - inc.print(); // The final discrete graph should not be empty since we have eliminated // all continuous variables. - // EXPECT(!inc.remainingDiscreteGraph().empty()); + auto discreteTree = inc[M(3)]->conditional()->asDiscreteConditional(); + EXPECT_LONGS_EQUAL(3, discreteTree->size()); - // // Test if the optimal discrete mode assignment is (1, 1, 1). - // DiscreteValues optimal_assignment = - // inc.remainingDiscreteGraph().optimize(); DiscreteValues - // expected_assignment; expected_assignment[M(1)] = 1; - // expected_assignment[M(2)] = 1; - // expected_assignment[M(3)] = 1; - // EXPECT(assert_equal(expected_assignment, optimal_assignment)); + // Test if the optimal discrete mode assignment is (1, 1, 1). + DiscreteFactorGraph discreteGraph; + discreteGraph.push_back(discreteTree); + DiscreteValues optimal_assignment = discreteGraph.optimize(); - // // Test if pruning worked correctly by checking that we only have 3 - // leaves in - // // the last node. - // auto lastConditional = boost::dynamic_pointer_cast( - // inc.hybridBayesNet().at(inc.hybridBayesNet().size() - 1)); - // EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); + DiscreteValues expected_assignment; + expected_assignment[M(1)] = 1; + expected_assignment[M(2)] = 1; + expected_assignment[M(3)] = 1; + + EXPECT(assert_equal(expected_assignment, optimal_assignment)); + + // Test if pruning worked correctly by checking that we only have 3 leaves in + // the last node. + auto lastConditional = inc[X(3)]->conditional()->asMixture(); + EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); } /* ************************************************************************* */ From 6b792c0a4ca19b750a9c259c1dd84468786884ff Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 21 Aug 2022 12:56:59 -0400 Subject: [PATCH 8/9] add note about sumFrontals --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index af381de04..b57d495d8 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -77,6 +77,19 @@ static GaussianMixtureFactor::Sum &addGaussian( } /* ************************************************************************ */ +/* Function to eliminate variables **under the following assumptions**: + * 1. When the ordering is fully continuous, and the graph only contains + * continuous and hybrid factors + * 2. When the ordering is fully discrete, and the graph only contains discrete + * factors + * + * Any usage outside of this is considered incorrect. + * + * \warning This function is not meant to be used with arbitrary hybrid factor + * graphs. For example, if there exists continuous parents, and one tries to + * eliminate a discrete variable (as specified in the ordering), the result will + * be INCORRECT and there will be NO error raised. + */ GaussianMixtureFactor::Sum sumFrontals( const HybridGaussianFactorGraph &factors) { // sum out frontals, this is the factor on the separator From 893c5f77f827b6c21af6b04b9c2e236010e7298e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 22 Aug 2022 08:29:32 -0400 Subject: [PATCH 9/9] cast to only HybridGaussianFactor --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 39 +++++++++------------- 1 file changed, 16 insertions(+), 23 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index b57d495d8..c024c1255 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -77,19 +77,6 @@ static GaussianMixtureFactor::Sum &addGaussian( } /* ************************************************************************ */ -/* Function to eliminate variables **under the following assumptions**: - * 1. When the ordering is fully continuous, and the graph only contains - * continuous and hybrid factors - * 2. When the ordering is fully discrete, and the graph only contains discrete - * factors - * - * Any usage outside of this is considered incorrect. - * - * \warning This function is not meant to be used with arbitrary hybrid factor - * graphs. For example, if there exists continuous parents, and one tries to - * eliminate a discrete variable (as specified in the ordering), the result will - * be INCORRECT and there will be NO error raised. - */ GaussianMixtureFactor::Sum sumFrontals( const HybridGaussianFactorGraph &factors) { // sum out frontals, this is the factor on the separator @@ -109,15 +96,8 @@ GaussianMixtureFactor::Sum sumFrontals( } } else if (f->isContinuous()) { - // Check if f is HybridConditional or HybridGaussianFactor. - if (auto hc = boost::dynamic_pointer_cast(f)) { - auto conditional = - boost::dynamic_pointer_cast(hc->inner()); - deferredFactors.push_back(conditional); - } else if (auto gf = boost::dynamic_pointer_cast(f) - ->inner()) { - deferredFactors.push_back(gf); - } + deferredFactors.push_back( + boost::dynamic_pointer_cast(f)->inner()); } else if (f->isDiscrete()) { // Don't do anything for discrete-only factors @@ -278,7 +258,20 @@ hybridElimination(const HybridGaussianFactorGraph &factors, return {boost::make_shared(conditional), factor}; } } -/* ************************************************************************ */ +/* ************************************************************************ + * Function to eliminate variables **under the following assumptions**: + * 1. When the ordering is fully continuous, and the graph only contains + * continuous and hybrid factors + * 2. When the ordering is fully discrete, and the graph only contains discrete + * factors + * + * Any usage outside of this is considered incorrect. + * + * \warning This function is not meant to be used with arbitrary hybrid factor + * graphs. For example, if there exists continuous parents, and one tries to + * eliminate a discrete variable (as specified in the ordering), the result will + * be INCORRECT and there will be NO error raised. + */ std::pair // EliminateHybrid(const HybridGaussianFactorGraph &factors, const Ordering &frontalKeys) {