From 8c10a8089e2417996af9a5119e8488761fa4d3a5 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 12 Sep 2022 19:55:38 -0400 Subject: [PATCH 1/5] return shared pointer for HybridNonlinearFactorGraph::linearize --- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 21 +++++++++---------- gtsam/hybrid/HybridNonlinearFactorGraph.h | 3 ++- gtsam/hybrid/tests/Switching.h | 2 +- gtsam/hybrid/tests/testHybridIncremental.cpp | 8 +++---- .../tests/testHybridNonlinearFactorGraph.cpp | 8 +++---- 5 files changed, 21 insertions(+), 21 deletions(-) diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index a4218593b..3a3bf720b 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -27,8 +27,7 @@ void HybridNonlinearFactorGraph::add( } /* ************************************************************************* */ -void HybridNonlinearFactorGraph::add( - boost::shared_ptr factor) { +void HybridNonlinearFactorGraph::add(boost::shared_ptr factor) { FactorGraph::add(boost::make_shared(factor)); } @@ -49,12 +48,12 @@ void HybridNonlinearFactorGraph::print(const std::string& s, } /* ************************************************************************* */ -HybridGaussianFactorGraph HybridNonlinearFactorGraph::linearize( +HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( const Values& continuousValues) const { // create an empty linear FG - HybridGaussianFactorGraph linearFG; + auto linearFG = boost::make_shared(); - linearFG.reserve(size()); + linearFG->reserve(size()); // linearize all hybrid factors for (auto&& factor : factors_) { @@ -66,9 +65,9 @@ HybridGaussianFactorGraph HybridNonlinearFactorGraph::linearize( if (factor->isHybrid()) { // Check if it is a nonlinear mixture factor if (auto nlmf = boost::dynamic_pointer_cast(factor)) { - linearFG.push_back(nlmf->linearize(continuousValues)); + linearFG->push_back(nlmf->linearize(continuousValues)); } else { - linearFG.push_back(factor); + linearFG->push_back(factor); } // Now check if the factor is a continuous only factor. @@ -80,18 +79,18 @@ HybridGaussianFactorGraph HybridNonlinearFactorGraph::linearize( boost::dynamic_pointer_cast(nlhf->inner())) { auto hgf = boost::make_shared( nlf->linearize(continuousValues)); - linearFG.push_back(hgf); + linearFG->push_back(hgf); } else { - linearFG.push_back(factor); + linearFG->push_back(factor); } // Finally if nothing else, we are discrete-only which doesn't need // lineariztion. } else { - linearFG.push_back(factor); + linearFG->push_back(factor); } } else { - linearFG.push_back(GaussianFactor::shared_ptr()); + linearFG->push_back(GaussianFactor::shared_ptr()); } } return linearFG; diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h index 7a19c7755..afa410318 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -127,7 +127,8 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { * @param continuousValues: Dictionary of continuous values. * @return HybridGaussianFactorGraph::shared_ptr */ - HybridGaussianFactorGraph linearize(const Values& continuousValues) const; + HybridGaussianFactorGraph::shared_ptr linearize( + const Values& continuousValues) const; }; template <> diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index ecd90e234..8bcb26c92 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -166,7 +166,7 @@ struct Switching { linearizationPoint.insert(X(k), static_cast(k)); } - linearizedFactorGraph = nonlinearFactorGraph.linearize(linearizationPoint); + linearizedFactorGraph = *nonlinearFactorGraph.linearize(linearizationPoint); } // Create motion models for a given time step diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp index 4449aba0b..8e16d02b9 100644 --- a/gtsam/hybrid/tests/testHybridIncremental.cpp +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -399,7 +399,7 @@ TEST(HybridGaussianISAM, NonTrivial) { initial.insert(Z(0), Pose2(0.0, 2.0, 0.0)); initial.insert(W(0), Pose2(0.0, 3.0, 0.0)); - HybridGaussianFactorGraph gfg = fg.linearize(initial); + HybridGaussianFactorGraph gfg = *fg.linearize(initial); fg = HybridNonlinearFactorGraph(); HybridGaussianISAM inc; @@ -444,7 +444,7 @@ TEST(HybridGaussianISAM, NonTrivial) { // The leg link did not move so we set the expected pose accordingly. initial.insert(W(1), Pose2(0.0, 3.0, 0.0)); - gfg = fg.linearize(initial); + gfg = *fg.linearize(initial); fg = HybridNonlinearFactorGraph(); // Update without pruning @@ -483,7 +483,7 @@ TEST(HybridGaussianISAM, NonTrivial) { initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); - gfg = fg.linearize(initial); + gfg = *fg.linearize(initial); fg = HybridNonlinearFactorGraph(); // Now we prune! @@ -526,7 +526,7 @@ TEST(HybridGaussianISAM, NonTrivial) { initial.insert(Z(3), Pose2(3.0, 2.0, 0.0)); initial.insert(W(3), Pose2(0.0, 3.0, 0.0)); - gfg = fg.linearize(initial); + gfg = *fg.linearize(initial); fg = HybridNonlinearFactorGraph(); // Keep pruning! diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 6b689b4e3..9e93eaba3 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -60,7 +60,7 @@ TEST(HybridFactorGraph, GaussianFactorGraph) { Values linearizationPoint; linearizationPoint.insert(X(0), 0); - HybridGaussianFactorGraph ghfg = fg.linearize(linearizationPoint); + HybridGaussianFactorGraph ghfg = *fg.linearize(linearizationPoint); // Add a factor to the GaussianFactorGraph ghfg.add(JacobianFactor(X(0), I_1x1, Vector1(5))); @@ -139,7 +139,7 @@ TEST(HybridGaussianFactorGraph, Resize) { linearizationPoint.insert(X(1), 1); // Generate `HybridGaussianFactorGraph` by linearizing - HybridGaussianFactorGraph gfg = nhfg.linearize(linearizationPoint); + HybridGaussianFactorGraph gfg = *nhfg.linearize(linearizationPoint); EXPECT_LONGS_EQUAL(gfg.size(), 3); @@ -250,7 +250,7 @@ TEST(HybridFactorGraph, Linearization) { // Linearize here: HybridGaussianFactorGraph actualLinearized = - self.nonlinearFactorGraph.linearize(self.linearizationPoint); + *self.nonlinearFactorGraph.linearize(self.linearizationPoint); EXPECT_LONGS_EQUAL(7, actualLinearized.size()); } @@ -718,7 +718,7 @@ TEST(HybridFactorGraph, DefaultDecisionTree) { ordering += X(0); ordering += X(1); - HybridGaussianFactorGraph linearized = fg.linearize(initialEstimate); + HybridGaussianFactorGraph linearized = *fg.linearize(initialEstimate); gtsam::HybridBayesNet::shared_ptr hybridBayesNet; gtsam::HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; From 5a7801583265f0f88e656c7d648d9a21a1d9d8aa Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 12 Sep 2022 19:55:51 -0400 Subject: [PATCH 2/5] add push_back for containers --- gtsam/hybrid/HybridNonlinearFactorGraph.h | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h index afa410318..5f04851d4 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -109,6 +109,23 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { } } + /** + * Push back many factors as shared_ptr's in a container (factors are not + * copied) + */ + template + Base::Base::HasDerivedElementType push_back( + const CONTAINER& container) { + Base::push_back(container.begin(), container.end()); + } + + /// Push back non-pointer objects in a container (factors are copied). + template + Base::Base::HasDerivedValueType push_back( + const CONTAINER& container) { + Base::push_back(container.begin(), container.end()); + } + /// Add a nonlinear factor as a shared ptr. void add(boost::shared_ptr factor); From 83dd813bff53a596128de567b5dd871317e95782 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 13 Sep 2022 13:01:27 -0400 Subject: [PATCH 3/5] HybridNonlinearISAM class --- gtsam/hybrid/HybridGaussianISAM.h | 2 +- gtsam/hybrid/HybridNonlinearISAM.cpp | 111 ++++ gtsam/hybrid/HybridNonlinearISAM.h | 132 ++++ .../hybrid/tests/testHybridNonlinearISAM.cpp | 589 ++++++++++++++++++ 4 files changed, 833 insertions(+), 1 deletion(-) create mode 100644 gtsam/hybrid/HybridNonlinearISAM.cpp create mode 100644 gtsam/hybrid/HybridNonlinearISAM.h create mode 100644 gtsam/hybrid/tests/testHybridNonlinearISAM.cpp diff --git a/gtsam/hybrid/HybridGaussianISAM.h b/gtsam/hybrid/HybridGaussianISAM.h index 4fc206582..ff0978729 100644 --- a/gtsam/hybrid/HybridGaussianISAM.h +++ b/gtsam/hybrid/HybridGaussianISAM.h @@ -65,7 +65,7 @@ class GTSAM_EXPORT HybridGaussianISAM : public ISAM { HybridBayesTree::EliminationTraitsType::DefaultEliminate); /** - * @brief + * @brief Prune the underlying Bayes tree. * * @param root The root key in the discrete conditional decision tree. * @param maxNumberLeaves diff --git a/gtsam/hybrid/HybridNonlinearISAM.cpp b/gtsam/hybrid/HybridNonlinearISAM.cpp new file mode 100644 index 000000000..36cda4e80 --- /dev/null +++ b/gtsam/hybrid/HybridNonlinearISAM.cpp @@ -0,0 +1,111 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridNonlinearISAM.cpp + * @date Sep 12, 2022 + * @author Varun Agrawal + */ + +#include +#include +#include + +#include + +using namespace std; + +namespace gtsam { + +/* ************************************************************************* */ +void HybridNonlinearISAM::saveGraph(const string& s, + const KeyFormatter& keyFormatter) const { + isam_.saveGraph(s, keyFormatter); +} + +/* ************************************************************************* */ +void HybridNonlinearISAM::update(const HybridNonlinearFactorGraph& newFactors, + const Values& initialValues) { + if (newFactors.size() > 0) { + // Reorder and relinearize every reorderInterval updates + if (reorderInterval_ > 0 && ++reorderCounter_ >= reorderInterval_) { + reorder_relinearize(); + reorderCounter_ = 0; + } + + factors_.push_back(newFactors); + + // Linearize new factors and insert them + // TODO: optimize for whole config? + linPoint_.insert(initialValues); + + boost::shared_ptr linearizedNewFactors = + newFactors.linearize(linPoint_); + + // Update ISAM + isam_.update(*linearizedNewFactors, boost::none, eliminationFunction_); + } +} + +/* ************************************************************************* */ +void HybridNonlinearISAM::reorder_relinearize() { + if (factors_.size() > 0) { + // Obtain the new linearization point + const Values newLinPoint = estimate(); + + isam_.clear(); + + // Just recreate the whole BayesTree + // TODO: allow for constrained ordering here + // TODO: decouple relinearization and reordering to avoid + isam_.update(*factors_.linearize(newLinPoint), boost::none, + eliminationFunction_); + + // Update linearization point + linPoint_ = newLinPoint; + } +} + +/* ************************************************************************* */ +Values HybridNonlinearISAM::estimate() { + Values result; + if (isam_.size() > 0) { + HybridValues values = isam_.optimize(); + assignment_ = values.discrete(); + return linPoint_.retract(values.continuous()); + } else { + return linPoint_; + } +} + +// /* ************************************************************************* +// */ Matrix HybridNonlinearISAM::marginalCovariance(Key key) const { +// return isam_.marginalCovariance(key); +// } + +/* ************************************************************************* */ +void HybridNonlinearISAM::print(const string& s, + const KeyFormatter& keyFormatter) const { + cout << s << "ReorderInterval: " << reorderInterval_ + << " Current Count: " << reorderCounter_ << endl; + isam_.print("HybridGaussianISAM:\n"); + linPoint_.print("Linearization Point:\n", keyFormatter); + factors_.print("Nonlinear Graph:\n", keyFormatter); +} + +/* ************************************************************************* */ +void HybridNonlinearISAM::printStats() const { + isam_.getCliqueData().getStats().print(); +} + +/* ************************************************************************* */ + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearISAM.h b/gtsam/hybrid/HybridNonlinearISAM.h new file mode 100644 index 000000000..d96485fff --- /dev/null +++ b/gtsam/hybrid/HybridNonlinearISAM.h @@ -0,0 +1,132 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridNonlinearISAM.h + * @date Sep 12, 2022 + * @author Varun Agrawal + */ + +#pragma once + +#include +#include + +namespace gtsam { +/** + * Wrapper class to manage ISAM in a nonlinear context + */ +class GTSAM_EXPORT HybridNonlinearISAM { + protected: + /** The internal iSAM object */ + gtsam::HybridGaussianISAM isam_; + + /** The current linearization point */ + Values linPoint_; + + /// The discrete assignment + DiscreteValues assignment_; + + /** The original factors, used when relinearizing */ + HybridNonlinearFactorGraph factors_; + + /** The reordering interval and counter */ + int reorderInterval_; + int reorderCounter_; + + /** The elimination function */ + HybridGaussianFactorGraph::Eliminate eliminationFunction_; + + public: + /// @name Standard Constructors + /// @{ + + /** + * Periodically reorder and relinearize + * @param reorderInterval is the number of updates between reorderings, + * 0 never reorders (and is dangerous for memory consumption) + * 1 (default) reorders every time, in worse case is batch every update + * typical values are 50 or 100 + */ + HybridNonlinearISAM( + int reorderInterval = 1, + const HybridGaussianFactorGraph::Eliminate& eliminationFunction = + HybridGaussianFactorGraph::EliminationTraitsType::DefaultEliminate) + : reorderInterval_(reorderInterval), + reorderCounter_(0), + eliminationFunction_(eliminationFunction) {} + + /// @} + /// @name Standard Interface + /// @{ + + /** Return the current solution estimate */ + Values estimate(); + + // /** find the marginal covariance for a single variable */ + // Matrix marginalCovariance(Key key) const; + + // access + + /** access the underlying bayes tree */ + const HybridGaussianISAM& bayesTree() const { return isam_; } + + /** + * @brief Prune the underlying Bayes tree. + * + * @param root The root key in the discrete conditional decision tree. + * @param maxNumberLeaves + */ + void prune(const Key& root, const size_t maxNumberLeaves) { + isam_.prune(root, maxNumberLeaves); + } + + /** Return the current linearization point */ + const Values& getLinearizationPoint() const { return linPoint_; } + + /** Return the current discrete assignment */ + const DiscreteValues& getAssignment() const { return assignment_; } + + /** get underlying nonlinear graph */ + const HybridNonlinearFactorGraph& getFactorsUnsafe() const { + return factors_; + } + + /** get counters */ + int reorderInterval() const { return reorderInterval_; } ///< TODO: comment + int reorderCounter() const { return reorderCounter_; } ///< TODO: comment + + /** prints out all contents of the system */ + void print(const std::string& s = "", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; + + /** prints out clique statistics */ + void printStats() const; + + /** saves the Tree to a text file in GraphViz format */ + void saveGraph(const std::string& s, + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; + + /// @} + /// @name Advanced Interface + /// @{ + + /** Add new factors along with their initial linearization points */ + void update(const HybridNonlinearFactorGraph& newFactors, + const Values& initialValues); + + /** Relinearization and reordering of variables */ + void reorder_relinearize(); + + /// @} +}; + +} // namespace gtsam diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp new file mode 100644 index 000000000..bed90fe2c --- /dev/null +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -0,0 +1,589 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridNonlinearISAM.cpp + * @brief Unit tests for nonlinear incremental inference + * @author Fan Jiang, Varun Agrawal, Frank Dellaert + * @date Jan 2021 + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "Switching.h" + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::L; +using symbol_shorthand::M; +using symbol_shorthand::W; +using symbol_shorthand::X; +using symbol_shorthand::Y; +using symbol_shorthand::Z; + +/* ****************************************************************************/ +// Test if we can perform elimination incrementally. +TEST(HybridNonlinearISAM, IncrementalElimination) { + Switching switching(3); + HybridNonlinearISAM isam; + HybridNonlinearFactorGraph graph1; + Values initial; + + // Create initial factor graph + // * * * + // | | | + // X1 -*- X2 -*- X3 + // \*-M1-*/ + graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X1) + graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X1, X2 | M1) + graph1.push_back(switching.nonlinearFactorGraph.at(2)); // P(X2, X3 | M2) + graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M1) + + initial.insert(X(1), 1); + initial.insert(X(2), 2); + initial.insert(X(3), 3); + + // Run update step + isam.update(graph1, initial); + + // Check that after update we have 3 hybrid Bayes net nodes: + // P(X1 | X2, M1) and P(X2, X3 | M1, M2), P(M1, M2) + HybridGaussianISAM bayesTree = isam.bayesTree(); + EXPECT_LONGS_EQUAL(3, bayesTree.size()); + EXPECT(bayesTree[X(1)]->conditional()->frontals() == KeyVector{X(1)}); + EXPECT(bayesTree[X(1)]->conditional()->parents() == KeyVector({X(2), M(1)})); + EXPECT(bayesTree[X(2)]->conditional()->frontals() == KeyVector({X(2), X(3)})); + EXPECT(bayesTree[X(2)]->conditional()->parents() == KeyVector({M(1), M(2)})); + + /********************************************************/ + // New factor graph for incremental update. + HybridNonlinearFactorGraph graph2; + initial = Values(); + + graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X2) + graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X3) + graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M1, M2) + + isam.update(graph2, initial); + + bayesTree = isam.bayesTree(); + // Check that after the second update we have + // 1 additional hybrid Bayes net node: + // P(X2, X3 | M1, M2) + EXPECT_LONGS_EQUAL(3, bayesTree.size()); + EXPECT(bayesTree[X(3)]->conditional()->frontals() == KeyVector({X(2), X(3)})); + EXPECT(bayesTree[X(3)]->conditional()->parents() == KeyVector({M(1), M(2)})); +} + +/* ****************************************************************************/ +// Test if we can incrementally do the inference +TEST(HybridNonlinearISAM, IncrementalInference) { + Switching switching(3); + HybridNonlinearISAM isam; + HybridNonlinearFactorGraph graph1; + Values initial; + + // Create initial factor graph + // * * * + // | | | + // X1 -*- X2 -*- X3 + // | | + // *-M1 - * - M2 + graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X1) + graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X1, X2 | M1) + graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X2) + graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M1) + + initial.insert(X(1), 1); + initial.insert(X(2), 2); + + // Run update step + isam.update(graph1, initial); + HybridGaussianISAM bayesTree = isam.bayesTree(); + + auto discreteConditional_m1 = + bayesTree[M(1)]->conditional()->asDiscreteConditional(); + EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)})); + + /********************************************************/ + // New factor graph for incremental update. + HybridNonlinearFactorGraph graph2; + initial = Values(); + + initial.insert(X(3), 3); + + graph2.push_back(switching.nonlinearFactorGraph.at(2)); // P(X2, X3 | M2) + graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X3) + graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M1, M2) + + isam.update(graph2, initial); + bayesTree = isam.bayesTree(); + + /********************************************************/ + // Run batch elimination so we can compare results. + Ordering ordering; + ordering += X(1); + ordering += X(2); + ordering += X(3); + + // Now we calculate the actual factors using full elimination + HybridBayesTree::shared_ptr expectedHybridBayesTree; + HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph; + std::tie(expectedHybridBayesTree, expectedRemainingGraph) = + switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); + + // The densities on X(1) should be the same + auto x1_conditional = dynamic_pointer_cast( + bayesTree[X(1)]->conditional()->inner()); + auto actual_x1_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); + EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional)); + + // The densities on X(2) should be the same + auto x2_conditional = dynamic_pointer_cast( + bayesTree[X(2)]->conditional()->inner()); + auto actual_x2_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); + EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional)); + + // The densities on X(3) should be the same + auto x3_conditional = dynamic_pointer_cast( + bayesTree[X(3)]->conditional()->inner()); + auto actual_x3_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); + EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional)); + + // We only perform manual continuous elimination for 0,0. + // The other discrete probabilities on M(2) are calculated the same way + Ordering discrete_ordering; + discrete_ordering += M(1); + discrete_ordering += M(2); + HybridBayesTree::shared_ptr discreteBayesTree = + expectedRemainingGraph->eliminateMultifrontal(discrete_ordering); + + DiscreteValues m00; + m00[M(1)] = 0, m00[M(2)] = 0; + DiscreteConditional decisionTree = + *(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional(); + double m00_prob = decisionTree(m00); + + auto discreteConditional = + bayesTree[M(2)]->conditional()->asDiscreteConditional(); + + // Test if the probability values are as expected with regression tests. + DiscreteValues assignment; + EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5)); + assignment[M(1)] = 0; + assignment[M(2)] = 0; + EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 1; + assignment[M(2)] = 0; + EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 0; + assignment[M(2)] = 1; + EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 1; + assignment[M(2)] = 1; + EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5)); + + // Check if the clique conditional generated from incremental elimination + // matches that of batch elimination. + auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); + auto expectedConditional = dynamic_pointer_cast( + (*expectedChordal)[M(2)]->conditional()->inner()); + auto actualConditional = dynamic_pointer_cast( + bayesTree[M(2)]->conditional()->inner()); + EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); +} + +/* ****************************************************************************/ +// Test if we can approximately do the inference +TEST(HybridNonlinearISAM, Approx_inference) { + Switching switching(4); + HybridNonlinearISAM incrementalHybrid; + HybridNonlinearFactorGraph graph1; + Values initial; + + // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + for (size_t i = 1; i < 4; i++) { + graph1.push_back(switching.nonlinearFactorGraph.at(i)); + } + + // Add the Gaussian factors, 1 prior on X(1), + // 3 measurements on X(2), X(3), X(4) + graph1.push_back(switching.nonlinearFactorGraph.at(0)); + for (size_t i = 4; i <= 7; i++) { + graph1.push_back(switching.nonlinearFactorGraph.at(i)); + initial.insert(X(i - 3), i - 3); + } + + // Create ordering. + Ordering ordering; + for (size_t j = 1; j <= 4; j++) { + ordering += X(j); + } + + // Now we calculate the actual factors using full elimination + HybridBayesTree::shared_ptr unprunedHybridBayesTree; + HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph; + std::tie(unprunedHybridBayesTree, unprunedRemainingGraph) = + switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); + + size_t maxNrLeaves = 5; + incrementalHybrid.update(graph1, initial); + HybridGaussianISAM bayesTree = incrementalHybrid.bayesTree(); + + bayesTree.prune(M(3), maxNrLeaves); + + /* + unpruned factor is: + Choice(m3) + 0 Choice(m2) + 0 0 Choice(m1) + 0 0 0 Leaf 0.11267528 + 0 0 1 Leaf 0.18576102 + 0 1 Choice(m1) + 0 1 0 Leaf 0.18754662 + 0 1 1 Leaf 0.30623871 + 1 Choice(m2) + 1 0 Choice(m1) + 1 0 0 Leaf 0.18576102 + 1 0 1 Leaf 0.30622428 + 1 1 Choice(m1) + 1 1 0 Leaf 0.30623871 + 1 1 1 Leaf 0.5 + + pruned factors is: + Choice(m3) + 0 Choice(m2) + 0 0 Leaf 0 + 0 1 Choice(m1) + 0 1 0 Leaf 0.18754662 + 0 1 1 Leaf 0.30623871 + 1 Choice(m2) + 1 0 Choice(m1) + 1 0 0 Leaf 0 + 1 0 1 Leaf 0.30622428 + 1 1 Choice(m1) + 1 1 0 Leaf 0.30623871 + 1 1 1 Leaf 0.5 + */ + + auto discreteConditional_m1 = *dynamic_pointer_cast( + bayesTree[M(1)]->conditional()->inner()); + EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)})); + + // Get the number of elements which are greater than 0. + auto count = [](const double &value, int count) { + return value > 0 ? count + 1 : count; + }; + // Check that the number of leaves after pruning is 5. + EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0)); + + // Check that the hybrid nodes of the bayes net match those of the pre-pruning + // bayes net, at the same positions. + auto &unprunedLastDensity = *dynamic_pointer_cast( + unprunedHybridBayesTree->clique(X(4))->conditional()->inner()); + auto &lastDensity = *dynamic_pointer_cast( + bayesTree[X(4)]->conditional()->inner()); + + std::vector> assignments = + discreteConditional_m1.enumerate(); + // Loop over all assignments and check the pruned components + for (auto &&av : assignments) { + const DiscreteValues &assignment = av.first; + const double value = av.second; + + if (value == 0.0) { + EXPECT(lastDensity(assignment) == nullptr); + } else { + CHECK(lastDensity(assignment)); + EXPECT(assert_equal(*unprunedLastDensity(assignment), + *lastDensity(assignment))); + } + } +} + +/* ****************************************************************************/ +// Test approximate inference with an additional pruning step. +TEST(HybridNonlinearISAM, Incremental_approximate) { + Switching switching(5); + HybridNonlinearISAM incrementalHybrid; + HybridNonlinearFactorGraph graph1; + Values initial; + + /***** Run Round 1 *****/ + // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + for (size_t i = 1; i < 4; i++) { + graph1.push_back(switching.nonlinearFactorGraph.at(i)); + } + + // Add the Gaussian factors, 1 prior on X(1), + // 3 measurements on X(2), X(3), X(4) + graph1.push_back(switching.nonlinearFactorGraph.at(0)); + initial.insert(X(1), 1); + for (size_t i = 5; i <= 7; i++) { + graph1.push_back(switching.nonlinearFactorGraph.at(i)); + initial.insert(X(i - 3), i - 3); + } + + // Run update with pruning + size_t maxComponents = 5; + incrementalHybrid.update(graph1, initial); + HybridGaussianISAM bayesTree = incrementalHybrid.bayesTree(); + + bayesTree.prune(M(3), maxComponents); + + // Check if we have a bayes tree with 4 hybrid nodes, + // each with 2, 4, 8, and 5 (pruned) leaves respetively. + EXPECT_LONGS_EQUAL(4, bayesTree.size()); + EXPECT_LONGS_EQUAL( + 2, bayesTree[X(1)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 4, bayesTree[X(2)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents()); + + /***** Run Round 2 *****/ + HybridGaussianFactorGraph graph2; + graph2.push_back(switching.nonlinearFactorGraph.at(4)); // x4-x5 + graph2.push_back(switching.nonlinearFactorGraph.at(8)); // x5 measurement + initial = Values(); + initial.insert(X(5), 5); + + // Run update with pruning a second time. + incrementalHybrid.update(graph2, initial); + bayesTree = incrementalHybrid.bayesTree(); + + bayesTree.prune(M(4), maxComponents); + + // Check if we have a bayes tree with pruned hybrid nodes, + // with 5 (pruned) leaves. + CHECK_EQUAL(5, bayesTree.size()); + EXPECT_LONGS_EQUAL( + 5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, bayesTree[X(5)]->conditional()->asMixture()->nrComponents()); +} + +/* ************************************************************************/ +// A GTSAM-only test for running inference on a single-legged robot. +// The leg links are represented by the chain X-Y-Z-W, where X is the base and +// W is the foot. +// We use BetweenFactor as constraints between each of the poses. +TEST(HybridGaussianISAM, NonTrivial) { + /*************** Run Round 1 ***************/ + HybridNonlinearFactorGraph fg; + HybridNonlinearISAM inc; + + // Add a prior on pose x1 at the origin. + // A prior factor consists of a mean and + // a noise model (covariance matrix) + Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin + auto priorNoise = noiseModel::Diagonal::Sigmas( + Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta + fg.emplace_nonlinear>(X(0), prior, priorNoise); + + // create a noise model for the landmark measurements + auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1); + + // We model a robot's single leg as X - Y - Z - W + // where X is the base link and W is the foot link. + + // Add connecting poses similar to PoseFactors in GTD + fg.emplace_nonlinear>(X(0), Y(0), Pose2(0, 1.0, 0), + poseNoise); + fg.emplace_nonlinear>(Y(0), Z(0), Pose2(0, 1.0, 0), + poseNoise); + fg.emplace_nonlinear>(Z(0), W(0), Pose2(0, 1.0, 0), + poseNoise); + + // Create initial estimate + Values initial; + initial.insert(X(0), Pose2(0.0, 0.0, 0.0)); + initial.insert(Y(0), Pose2(0.0, 1.0, 0.0)); + initial.insert(Z(0), Pose2(0.0, 2.0, 0.0)); + initial.insert(W(0), Pose2(0.0, 3.0, 0.0)); + + // Don't run update now since we don't have discrete variables involved. + + /*************** Run Round 2 ***************/ + using PlanarMotionModel = BetweenFactor; + + // Add odometry factor with discrete modes. + Pose2 odometry(1.0, 0.0, 0.0); + KeyVector contKeys = {W(0), W(1)}; + auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); + auto still = boost::make_shared(W(0), W(1), Pose2(0, 0, 0), + noise_model), + moving = boost::make_shared(W(0), W(1), odometry, + noise_model); + std::vector components = {moving, still}; + auto mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + fg.push_back(mixtureFactor); + + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(0), X(1), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=1 + fg.emplace_nonlinear>(X(1), Y(1), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(1), Z(1), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(1), W(1), Pose2(-1, 1, 0), + poseNoise); + + initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); + initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); + initial.insert(Z(1), Pose2(1.0, 2.0, 0.0)); + // The leg link did not move so we set the expected pose accordingly. + initial.insert(W(1), Pose2(0.0, 3.0, 0.0)); + + // Update without pruning + // The result is a HybridBayesNet with 1 discrete variable M(1). + // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) + // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, M1) + // P(Y1 | X1, M1)P(X1 | M1)P(M1) + // The MHS tree is a 1 level tree for time indices (1,) with 2 leaves. + inc.update(fg, initial); + + fg = HybridNonlinearFactorGraph(); + initial = Values(); + + /*************** Run Round 3 ***************/ + // Add odometry factor with discrete modes. + contKeys = {W(1), W(2)}; + still = boost::make_shared(W(1), W(2), Pose2(0, 0, 0), + noise_model); + moving = + boost::make_shared(W(1), W(2), odometry, + noise_model); + components = {moving, still}; + mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); + fg.push_back(mixtureFactor); + + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(1), X(2), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=1 + fg.emplace_nonlinear>(X(2), Y(2), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(2), Z(2), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(2), W(2), Pose2(-2, 1, 0), + poseNoise); + + initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); + initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); + initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); + initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); + + // Now we prune! + // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) + // P(X0 | X1, W1, M1) P(W1|W2, Z1, X1, M1, M2) + // P(Z1| W2, Y1, X1, M1, M2) P(Y1 | W2, X1, M1, M2) + // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2) + // P(Z2|Y2, X2, M1, M2) P(Y2 | X2, M1, M2) + // P(X2 | M1, M2) P(M1, M2) + // The MHS at this point should be a 2 level tree on (1, 2). + // 1 has 2 choices, and 2 has 4 choices. + inc.update(fg, initial); + inc.prune(M(2), 2); + + fg = HybridNonlinearFactorGraph(); + initial = Values(); + + /*************** Run Round 4 ***************/ + // Add odometry factor with discrete modes. + contKeys = {W(2), W(3)}; + still = boost::make_shared(W(2), W(3), Pose2(0, 0, 0), + noise_model); + moving = + boost::make_shared(W(2), W(3), odometry, + noise_model); + components = {moving, still}; + mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); + fg.push_back(mixtureFactor); + + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(2), X(3), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=3 + fg.emplace_nonlinear>(X(3), Y(3), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(3), Z(3), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(3), W(3), Pose2(-3, 1, 0), + poseNoise); + + initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); + initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); + initial.insert(Z(3), Pose2(3.0, 2.0, 0.0)); + initial.insert(W(3), Pose2(0.0, 3.0, 0.0)); + + // Keep pruning! + inc.update(fg, initial); + inc.prune(M(3), 3); + + fg = HybridNonlinearFactorGraph(); + initial = Values(); + + HybridGaussianISAM bayesTree = inc.bayesTree(); + + // The final discrete graph should not be empty since we have eliminated + // all continuous variables. + auto discreteTree = + bayesTree[M(3)]->conditional()->asDiscreteConditional(); + EXPECT_LONGS_EQUAL(3, discreteTree->size()); + + // Test if the optimal discrete mode assignment is (1, 1, 1). + DiscreteFactorGraph discreteGraph; + discreteGraph.push_back(discreteTree); + DiscreteValues optimal_assignment = discreteGraph.optimize(); + + DiscreteValues expected_assignment; + expected_assignment[M(1)] = 1; + expected_assignment[M(2)] = 1; + expected_assignment[M(3)] = 1; + + EXPECT(assert_equal(expected_assignment, optimal_assignment)); + + // Test if pruning worked correctly by checking that + // we only have 3 leaves in the last node. + auto lastConditional = bayesTree[X(3)]->conditional()->asMixture(); + EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} From 4f22a0651916cd4493ea9fd974001698fe825168 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 14 Sep 2022 14:49:48 -0400 Subject: [PATCH 4/5] add container SFINAE for hybrid nonlinear factor graph push_back --- gtsam/hybrid/HybridNonlinearFactorGraph.h | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h index 5f04851d4..b48e8bb5c 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -42,6 +42,16 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { using IsNonlinear = typename std::enable_if< std::is_base_of::value>::type; + /// Check if T has a value_type derived from FactorType. + template + using HasDerivedValueType = typename std::enable_if< + std::is_base_of::value>::type; + + /// Check if T has a pointer type derived from FactorType. + template + using HasDerivedElementType = typename std::enable_if::value>::type; + public: using Base = HybridFactorGraph; using This = HybridNonlinearFactorGraph; ///< this class @@ -114,15 +124,13 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { * copied) */ template - Base::Base::HasDerivedElementType push_back( - const CONTAINER& container) { + HasDerivedElementType push_back(const CONTAINER& container) { Base::push_back(container.begin(), container.end()); } /// Push back non-pointer objects in a container (factors are copied). template - Base::Base::HasDerivedValueType push_back( - const CONTAINER& container) { + HasDerivedValueType push_back(const CONTAINER& container) { Base::push_back(container.begin(), container.end()); } From 8c0648582eb3bd55bfa1cf0359b02fb7d679ffef Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 14 Sep 2022 15:32:38 -0400 Subject: [PATCH 5/5] fix test name for HybridNonlinearISAM --- gtsam/hybrid/tests/testHybridNonlinearISAM.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index bed90fe2c..46076b330 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -396,7 +396,7 @@ TEST(HybridNonlinearISAM, Incremental_approximate) { // The leg links are represented by the chain X-Y-Z-W, where X is the base and // W is the foot. // We use BetweenFactor as constraints between each of the poses. -TEST(HybridGaussianISAM, NonTrivial) { +TEST(HybridNonlinearISAM, NonTrivial) { /*************** Run Round 1 ***************/ HybridNonlinearFactorGraph fg; HybridNonlinearISAM inc;