Merge pull request #1277 from borglab/feature/nonlinear-hybrid

release/4.3a0
Varun Agrawal 2022-08-22 17:32:35 -04:00 committed by GitHub
commit 05b117429d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 506 additions and 460 deletions

View File

@ -119,11 +119,36 @@ void GaussianMixture::print(const std::string &s,
"", [&](Key k) { return formatter(k); }, "", [&](Key k) { return formatter(k); },
[&](const GaussianConditional::shared_ptr &gf) -> std::string { [&](const GaussianConditional::shared_ptr &gf) -> std::string {
RedirectCout rd; RedirectCout rd;
if (!gf->empty()) if (gf && !gf->empty())
gf->print("", formatter); gf->print("", formatter);
else else
return {"nullptr"}; return {"nullptr"};
return rd.str(); return rd.str();
}); });
} }
/* *******************************************************************************/
void GaussianMixture::prune(const DecisionTreeFactor &decisionTree) {
// Functional which loops over all assignments and create a set of
// GaussianConditionals
auto pruner = [&decisionTree](
const Assignment<Key> &choices,
const GaussianConditional::shared_ptr &conditional)
-> GaussianConditional::shared_ptr {
// typecast so we can use this to get probability value
DiscreteValues values(choices);
if (decisionTree(values) == 0.0) {
// empty aka null pointer
boost::shared_ptr<GaussianConditional> null;
return null;
} else {
return conditional;
}
};
auto pruned_conditionals = conditionals_.apply(pruner);
conditionals_.root_ = pruned_conditionals.root_;
}
} // namespace gtsam } // namespace gtsam

View File

@ -21,6 +21,7 @@
#include <gtsam/discrete/DecisionTree-inl.h> #include <gtsam/discrete/DecisionTree-inl.h>
#include <gtsam/discrete/DecisionTree.h> #include <gtsam/discrete/DecisionTree.h>
#include <gtsam/discrete/DecisionTreeFactor.h>
#include <gtsam/discrete/DiscreteKey.h> #include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/hybrid/HybridFactor.h> #include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/inference/Conditional.h> #include <gtsam/inference/Conditional.h>
@ -121,7 +122,7 @@ class GTSAM_EXPORT GaussianMixture
/// Test equality with base HybridFactor /// Test equality with base HybridFactor
bool equals(const HybridFactor &lf, double tol = 1e-9) const override; bool equals(const HybridFactor &lf, double tol = 1e-9) const override;
/* print utility */ /// Print utility
void print( void print(
const std::string &s = "GaussianMixture\n", const std::string &s = "GaussianMixture\n",
const KeyFormatter &formatter = DefaultKeyFormatter) const override; const KeyFormatter &formatter = DefaultKeyFormatter) const override;
@ -131,6 +132,15 @@ class GTSAM_EXPORT GaussianMixture
/// Getter for the underlying Conditionals DecisionTree /// Getter for the underlying Conditionals DecisionTree
const Conditionals &conditionals(); const Conditionals &conditionals();
/**
* @brief Prune the decision tree of Gaussian factors as per the discrete
* `decisionTree`.
*
* @param decisionTree A pruned decision tree of discrete keys where the
* leaves are probabilities.
*/
void prune(const DecisionTreeFactor &decisionTree);
/** /**
* @brief Merge the Gaussian Factor Graphs in `this` and `sum` while * @brief Merge the Gaussian Factor Graphs in `this` and `sum` while
* maintaining the decision tree structure. * maintaining the decision tree structure.

View File

@ -184,6 +184,19 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
// sum out frontals, this is the factor on the separator // sum out frontals, this is the factor on the separator
GaussianMixtureFactor::Sum sum = sumFrontals(factors); GaussianMixtureFactor::Sum sum = sumFrontals(factors);
// If a tree leaf contains nullptr,
// convert that leaf to an empty GaussianFactorGraph.
// Needed since the DecisionTree will otherwise create
// a GFG with a single (null) factor.
auto emptyGaussian = [](const GaussianFactorGraph &gfg) {
bool hasNull =
std::any_of(gfg.begin(), gfg.end(),
[](const GaussianFactor::shared_ptr &ptr) { return !ptr; });
return hasNull ? GaussianFactorGraph() : gfg;
};
sum = GaussianMixtureFactor::Sum(sum, emptyGaussian);
using EliminationPair = GaussianFactorGraph::EliminationResult; using EliminationPair = GaussianFactorGraph::EliminationResult;
KeyVector keysOfEliminated; // Not the ordering KeyVector keysOfEliminated; // Not the ordering
@ -195,7 +208,10 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
if (graph.empty()) { if (graph.empty()) {
return {nullptr, nullptr}; return {nullptr, nullptr};
} }
auto result = EliminatePreferCholesky(graph, frontalKeys); std::pair<boost::shared_ptr<GaussianConditional>,
boost::shared_ptr<GaussianFactor>>
result = EliminatePreferCholesky(graph, frontalKeys);
if (keysOfEliminated.empty()) { if (keysOfEliminated.empty()) {
keysOfEliminated = keysOfEliminated =
result.first->keys(); // Initialize the keysOfEliminated to be the result.first->keys(); // Initialize the keysOfEliminated to be the
@ -235,14 +251,27 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
boost::make_shared<HybridDiscreteFactor>(discreteFactor)}; boost::make_shared<HybridDiscreteFactor>(discreteFactor)};
} else { } else {
// Create a resulting DCGaussianMixture on the separator. // Create a resulting GaussianMixtureFactor on the separator.
auto factor = boost::make_shared<GaussianMixtureFactor>( auto factor = boost::make_shared<GaussianMixtureFactor>(
KeyVector(continuousSeparator.begin(), continuousSeparator.end()), KeyVector(continuousSeparator.begin(), continuousSeparator.end()),
discreteSeparator, separatorFactors); discreteSeparator, separatorFactors);
return {boost::make_shared<HybridConditional>(conditional), factor}; return {boost::make_shared<HybridConditional>(conditional), factor};
} }
} }
/* ************************************************************************ */ /* ************************************************************************
* Function to eliminate variables **under the following assumptions**:
* 1. When the ordering is fully continuous, and the graph only contains
* continuous and hybrid factors
* 2. When the ordering is fully discrete, and the graph only contains discrete
* factors
*
* Any usage outside of this is considered incorrect.
*
* \warning This function is not meant to be used with arbitrary hybrid factor
* graphs. For example, if there exists continuous parents, and one tries to
* eliminate a discrete variable (as specified in the ordering), the result will
* be INCORRECT and there will be NO error raised.
*/
std::pair<HybridConditional::shared_ptr, HybridFactor::shared_ptr> // std::pair<HybridConditional::shared_ptr, HybridFactor::shared_ptr> //
EliminateHybrid(const HybridGaussianFactorGraph &factors, EliminateHybrid(const HybridGaussianFactorGraph &factors,
const Ordering &frontalKeys) { const Ordering &frontalKeys) {

View File

@ -41,6 +41,7 @@ HybridGaussianISAM::HybridGaussianISAM(const HybridBayesTree& bayesTree)
void HybridGaussianISAM::updateInternal( void HybridGaussianISAM::updateInternal(
const HybridGaussianFactorGraph& newFactors, const HybridGaussianFactorGraph& newFactors,
HybridBayesTree::Cliques* orphans, HybridBayesTree::Cliques* orphans,
const boost::optional<Ordering>& ordering,
const HybridBayesTree::Eliminate& function) { const HybridBayesTree::Eliminate& function) {
// Remove the contaminated part of the Bayes tree // Remove the contaminated part of the Bayes tree
BayesNetType bn; BayesNetType bn;
@ -74,16 +75,21 @@ void HybridGaussianISAM::updateInternal(
std::copy(allDiscrete.begin(), allDiscrete.end(), std::copy(allDiscrete.begin(), allDiscrete.end(),
std::back_inserter(newKeysDiscreteLast)); std::back_inserter(newKeysDiscreteLast));
// KeyVector new
// Get an ordering where the new keys are eliminated last // Get an ordering where the new keys are eliminated last
const VariableIndex index(factors); const VariableIndex index(factors);
const Ordering ordering = Ordering::ColamdConstrainedLast( Ordering elimination_ordering;
index, KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()), if (ordering) {
true); elimination_ordering = *ordering;
} else {
elimination_ordering = Ordering::ColamdConstrainedLast(
index,
KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()),
true);
}
// eliminate all factors (top, added, orphans) into a new Bayes tree // eliminate all factors (top, added, orphans) into a new Bayes tree
auto bayesTree = factors.eliminateMultifrontal(ordering, function, index); HybridBayesTree::shared_ptr bayesTree =
factors.eliminateMultifrontal(elimination_ordering, function, index);
// Re-add into Bayes tree data structures // Re-add into Bayes tree data structures
this->roots_.insert(this->roots_.end(), bayesTree->roots().begin(), this->roots_.insert(this->roots_.end(), bayesTree->roots().begin(),
@ -93,9 +99,61 @@ void HybridGaussianISAM::updateInternal(
/* ************************************************************************* */ /* ************************************************************************* */
void HybridGaussianISAM::update(const HybridGaussianFactorGraph& newFactors, void HybridGaussianISAM::update(const HybridGaussianFactorGraph& newFactors,
const boost::optional<Ordering>& ordering,
const HybridBayesTree::Eliminate& function) { const HybridBayesTree::Eliminate& function) {
Cliques orphans; Cliques orphans;
this->updateInternal(newFactors, &orphans, function); this->updateInternal(newFactors, &orphans, ordering, function);
}
/* ************************************************************************* */
/**
* @brief Check if `b` is a subset of `a`.
* Non-const since they need to be sorted.
*
* @param a KeyVector
* @param b KeyVector
* @return True if the keys of b is a subset of a, else false.
*/
bool IsSubset(KeyVector a, KeyVector b) {
std::sort(a.begin(), a.end());
std::sort(b.begin(), b.end());
return std::includes(a.begin(), a.end(), b.begin(), b.end());
}
/* ************************************************************************* */
void HybridGaussianISAM::prune(const Key& root, const size_t maxNrLeaves) {
auto decisionTree = boost::dynamic_pointer_cast<DecisionTreeFactor>(
this->clique(root)->conditional()->inner());
DecisionTreeFactor prunedDiscreteFactor = decisionTree->prune(maxNrLeaves);
decisionTree->root_ = prunedDiscreteFactor.root_;
std::vector<gtsam::Key> prunedKeys;
for (auto&& clique : nodes()) {
// The cliques can be repeated for each frontal so we record it in
// prunedKeys and check if we have already pruned a particular clique.
if (std::find(prunedKeys.begin(), prunedKeys.end(), clique.first) !=
prunedKeys.end()) {
continue;
}
// Add all the keys of the current clique to be pruned to prunedKeys
for (auto&& key : clique.second->conditional()->frontals()) {
prunedKeys.push_back(key);
}
// Convert parents() to a KeyVector for comparison
KeyVector parents;
for (auto&& parent : clique.second->conditional()->parents()) {
parents.push_back(parent);
}
if (IsSubset(parents, decisionTree->keys())) {
auto gaussianMixture = boost::dynamic_pointer_cast<GaussianMixture>(
clique.second->conditional()->inner());
gaussianMixture->prune(prunedDiscreteFactor);
}
}
} }
} // namespace gtsam } // namespace gtsam

View File

@ -48,6 +48,7 @@ class GTSAM_EXPORT HybridGaussianISAM : public ISAM<HybridBayesTree> {
void updateInternal( void updateInternal(
const HybridGaussianFactorGraph& newFactors, const HybridGaussianFactorGraph& newFactors,
HybridBayesTree::Cliques* orphans, HybridBayesTree::Cliques* orphans,
const boost::optional<Ordering>& ordering = boost::none,
const HybridBayesTree::Eliminate& function = const HybridBayesTree::Eliminate& function =
HybridBayesTree::EliminationTraitsType::DefaultEliminate); HybridBayesTree::EliminationTraitsType::DefaultEliminate);
@ -59,8 +60,17 @@ class GTSAM_EXPORT HybridGaussianISAM : public ISAM<HybridBayesTree> {
* @param function Elimination function. * @param function Elimination function.
*/ */
void update(const HybridGaussianFactorGraph& newFactors, void update(const HybridGaussianFactorGraph& newFactors,
const boost::optional<Ordering>& ordering = boost::none,
const HybridBayesTree::Eliminate& function = const HybridBayesTree::Eliminate& function =
HybridBayesTree::EliminationTraitsType::DefaultEliminate); HybridBayesTree::EliminationTraitsType::DefaultEliminate);
/**
* @brief
*
* @param root The root key in the discrete conditional decision tree.
* @param maxNumberLeaves
*/
void prune(const Key& root, const size_t maxNumberLeaves);
}; };
/// traits /// traits

View File

@ -18,7 +18,9 @@
#include <gtsam/discrete/DiscreteBayesNet.h> #include <gtsam/discrete/DiscreteBayesNet.h>
#include <gtsam/discrete/DiscreteDistribution.h> #include <gtsam/discrete/DiscreteDistribution.h>
#include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/geometry/Pose2.h> #include <gtsam/geometry/Pose2.h>
#include <gtsam/hybrid/HybridConditional.h>
#include <gtsam/hybrid/HybridGaussianISAM.h> #include <gtsam/hybrid/HybridGaussianISAM.h>
#include <gtsam/linear/GaussianBayesNet.h> #include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianFactorGraph.h> #include <gtsam/linear/GaussianFactorGraph.h>
@ -50,20 +52,15 @@ TEST(HybridGaussianElimination, IncrementalElimination) {
HybridGaussianFactorGraph graph1; HybridGaussianFactorGraph graph1;
// Create initial factor graph // Create initial factor graph
// * * * // * * *
// | | | // | | |
// *- X1 -*- X2 -*- X3 // X1 -*- X2 -*- X3
// \*-M1-*/ // \*-M1-*/
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1) graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
// Create ordering.
Ordering ordering;
ordering += X(1);
ordering += X(2);
// Run update step // Run update step
isam.update(graph1); isam.update(graph1);
@ -83,11 +80,6 @@ TEST(HybridGaussianElimination, IncrementalElimination) {
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3)
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2)
// Create ordering.
Ordering ordering2;
ordering2 += X(2);
ordering2 += X(3);
isam.update(graph2); isam.update(graph2);
// Check that after the second update we have // Check that after the second update we have
@ -108,7 +100,7 @@ TEST(HybridGaussianElimination, IncrementalInference) {
// Create initial factor graph // Create initial factor graph
// * * * // * * *
// | | | // | | |
// *- X1 -*- X2 -*- X3 // X1 -*- X2 -*- X3
// | | // | |
// *-M1 - * - M2 // *-M1 - * - M2
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
@ -116,15 +108,13 @@ TEST(HybridGaussianElimination, IncrementalInference) {
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2) graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2)
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
//TODO(Varun) we cannot enforce ordering
// // Create ordering.
// Ordering ordering1;
// ordering1 += X(1);
// ordering1 += X(2);
// Run update step // Run update step
isam.update(graph1); isam.update(graph1);
auto discreteConditional_m1 =
isam[M(1)]->conditional()->asDiscreteConditional();
EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)}));
/********************************************************/ /********************************************************/
// New factor graph for incremental update. // New factor graph for incremental update.
HybridGaussianFactorGraph graph2; HybridGaussianFactorGraph graph2;
@ -133,14 +123,7 @@ TEST(HybridGaussianElimination, IncrementalInference) {
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3)
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2)
//TODO(Varun) we cannot enforce ordering
// // Create ordering.
// Ordering ordering2;
// ordering2 += X(2);
// ordering2 += X(3);
isam.update(graph2); isam.update(graph2);
GTSAM_PRINT(isam);
/********************************************************/ /********************************************************/
// Run batch elimination so we can compare results. // Run batch elimination so we can compare results.
@ -150,497 +133,428 @@ TEST(HybridGaussianElimination, IncrementalInference) {
ordering += X(3); ordering += X(3);
// Now we calculate the actual factors using full elimination // Now we calculate the actual factors using full elimination
HybridBayesNet::shared_ptr expectedHybridBayesNet; HybridBayesTree::shared_ptr expectedHybridBayesTree;
HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph; HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph;
std::tie(expectedHybridBayesNet, expectedRemainingGraph) = std::tie(expectedHybridBayesTree, expectedRemainingGraph) =
switching.linearizedFactorGraph.eliminatePartialSequential(ordering); switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
// The densities on X(1) should be the same // The densities on X(1) should be the same
auto x1_conditional = auto x1_conditional =
dynamic_pointer_cast<GaussianMixture>(isam[X(1)]->conditional()->inner()); dynamic_pointer_cast<GaussianMixture>(isam[X(1)]->conditional()->inner());
EXPECT( auto actual_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
assert_equal(*x1_conditional, *(expectedHybridBayesNet->atGaussian(0)))); (*expectedHybridBayesTree)[X(1)]->conditional()->inner());
EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional));
// The densities on X(2) should be the same // The densities on X(2) should be the same
auto x2_conditional = auto x2_conditional =
dynamic_pointer_cast<GaussianMixture>(isam[X(2)]->conditional()->inner()); dynamic_pointer_cast<GaussianMixture>(isam[X(2)]->conditional()->inner());
EXPECT( auto actual_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
assert_equal(*x2_conditional, *(expectedHybridBayesNet->atGaussian(1)))); (*expectedHybridBayesTree)[X(2)]->conditional()->inner());
EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional));
// // The densities on X(3) should be the same // The densities on X(3) should be the same
// auto x3_conditional = auto x3_conditional =
// dynamic_pointer_cast<GaussianMixture>(isam[X(3)]->conditional()->inner()); dynamic_pointer_cast<GaussianMixture>(isam[X(3)]->conditional()->inner());
// EXPECT( auto actual_x3_conditional = dynamic_pointer_cast<GaussianMixture>(
// assert_equal(*x3_conditional, *(expectedHybridBayesNet->atGaussian(2)))); (*expectedHybridBayesTree)[X(2)]->conditional()->inner());
EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional));
GTSAM_PRINT(*expectedHybridBayesNet); // We only perform manual continuous elimination for 0,0.
// The other discrete probabilities on M(2) are calculated the same way
Ordering discrete_ordering;
discrete_ordering += M(1);
discrete_ordering += M(2);
HybridBayesTree::shared_ptr discreteBayesTree =
expectedRemainingGraph->eliminateMultifrontal(discrete_ordering);
// we only do the manual continuous elimination for 0,0 DiscreteValues m00;
// the other discrete probabilities on M(2) are calculated the same way m00[M(1)] = 0, m00[M(2)] = 0;
auto m00_prob = [&]() { DiscreteConditional decisionTree =
GaussianFactorGraph gf; *(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional();
// gf.add(switching.linearizedFactorGraph.gaussianGraph().at(3)); double m00_prob = decisionTree(m00);
DiscreteValues m00; auto discreteConditional = isam[M(2)]->conditional()->asDiscreteConditional();
m00[M(1)] = 0, m00[M(2)] = 0;
// auto dcMixture =
// dynamic_pointer_cast<DCGaussianMixtureFactor>(graph2.dcGraph().at(0));
// gf.add(dcMixture->factors()(m00));
// auto x2_mixed =
// boost::dynamic_pointer_cast<GaussianMixture>(hybridBayesNet.at(1));
// gf.add(x2_mixed->factors()(m00));
auto result_gf = gf.eliminateSequential();
return gf.probPrime(result_gf->optimize());
}();
/// Test if the probability values are as expected with regression tests. // Test if the probability values are as expected with regression tests.
// DiscreteValues assignment; DiscreteValues assignment;
// EXPECT(assert_equal(m00_prob, 0.60656, 1e-5)); EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5));
// assignment[M(1)] = 0; assignment[M(1)] = 0;
// assignment[M(2)] = 0; assignment[M(2)] = 0;
// EXPECT(assert_equal(m00_prob, (*discreteFactor)(assignment), 1e-5)); EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5));
// assignment[M(1)] = 1; assignment[M(1)] = 1;
// assignment[M(2)] = 0; assignment[M(2)] = 0;
// EXPECT(assert_equal(0.612477, (*discreteFactor)(assignment), 1e-5)); EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5));
// assignment[M(1)] = 0; assignment[M(1)] = 0;
// assignment[M(2)] = 1; assignment[M(2)] = 1;
// EXPECT(assert_equal(0.999952, (*discreteFactor)(assignment), 1e-5)); EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5));
// assignment[M(1)] = 1; assignment[M(1)] = 1;
// assignment[M(2)] = 1; assignment[M(2)] = 1;
// EXPECT(assert_equal(1.0, (*discreteFactor)(assignment), 1e-5)); EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5));
// DiscreteFactorGraph dfg; // Check if the clique conditional generated from incremental elimination
// dfg.add(*discreteFactor); // matches that of batch elimination.
// dfg.add(discreteFactor_m1); auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal();
// dfg.add_factors(switching.linearizedFactorGraph.discreteGraph()); auto expectedConditional = dynamic_pointer_cast<DecisionTreeFactor>(
(*expectedChordal)[M(2)]->conditional()->inner());
// // Check if the chordal graph generated from incremental elimination auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>(
// matches isam[M(2)]->conditional()->inner());
// // that of batch elimination. EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6));
// auto chordal = dfg.eliminateSequential();
// auto expectedChordal =
// expectedRemainingGraph->discreteGraph().eliminateSequential();
// EXPECT(assert_equal(*expectedChordal, *chordal, 1e-6));
} }
/* ****************************************************************************/ /* ****************************************************************************/
// // Test if we can approximately do the inference // Test if we can approximately do the inference
// TEST(DCGaussianElimination, Approx_inference) { TEST(HybridGaussianElimination, Approx_inference) {
// Switching switching(4); Switching switching(4);
// IncrementalHybrid incrementalHybrid; HybridGaussianISAM incrementalHybrid;
// HybridGaussianFactorGraph graph1; HybridGaussianFactorGraph graph1;
// // Add the 3 DC factors, x1-x2, x2-x3, x3-x4 // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
// for (size_t i = 0; i < 3; i++) { for (size_t i = 1; i < 4; i++) {
// graph1.push_back(switching.linearizedFactorGraph.dcGraph().at(i)); graph1.push_back(switching.linearizedFactorGraph.at(i));
// } }
// // Add the Gaussian factors, 1 prior on X(1), 4 measurements // Add the Gaussian factors, 1 prior on X(1),
// for (size_t i = 0; i <= 4; i++) { // 3 measurements on X(2), X(3), X(4)
// graph1.push_back(switching.linearizedFactorGraph.gaussianGraph().at(i)); graph1.push_back(switching.linearizedFactorGraph.at(0));
// } for (size_t i = 4; i <= 7; i++) {
graph1.push_back(switching.linearizedFactorGraph.at(i));
}
// // Create ordering. // Create ordering.
// Ordering ordering; Ordering ordering;
// for (size_t j = 1; j <= 4; j++) { for (size_t j = 1; j <= 4; j++) {
// ordering += X(j); ordering += X(j);
// } }
// // Now we calculate the actual factors using full elimination // Now we calculate the actual factors using full elimination
// HybridBayesNet::shared_ptr unprunedHybridBayesNet; HybridBayesTree::shared_ptr unprunedHybridBayesTree;
// HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph; HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph;
// std::tie(unprunedHybridBayesNet, unprunedRemainingGraph) = std::tie(unprunedHybridBayesTree, unprunedRemainingGraph) =
// switching.linearizedFactorGraph.eliminatePartialSequential(ordering); switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
// size_t maxComponents = 5; size_t maxNrLeaves = 5;
// incrementalHybrid.update(graph1, ordering, maxComponents); incrementalHybrid.update(graph1);
// /* incrementalHybrid.prune(M(3), maxNrLeaves);
// unpruned factor is:
// Choice(m3)
// 0 Choice(m2)
// 0 0 Choice(m1)
// 0 0 0 Leaf 0.2248 -
// 0 0 1 Leaf 0.3715 -
// 0 1 Choice(m1)
// 0 1 0 Leaf 0.3742 *
// 0 1 1 Leaf 0.6125 *
// 1 Choice(m2)
// 1 0 Choice(m1)
// 1 0 0 Leaf 0.3706 -
// 1 0 1 Leaf 0.6124 *
// 1 1 Choice(m1)
// 1 1 0 Leaf 0.611 *
// 1 1 1 Leaf 1 *
// pruned factors is: /*
// Choice(m3) unpruned factor is:
// 0 Choice(m2) Choice(m3)
// 0 0 Leaf 0 0 Choice(m2)
// 0 1 Choice(m1) 0 0 Choice(m1)
// 0 1 0 Leaf 0.3742 0 0 0 Leaf 0.11267528
// 0 1 1 Leaf 0.6125 0 0 1 Leaf 0.18576102
// 1 Choice(m2) 0 1 Choice(m1)
// 1 0 Choice(m1) 0 1 0 Leaf 0.18754662
// 1 0 0 Leaf 0 0 1 1 Leaf 0.30623871
// 1 0 1 Leaf 0.6124 1 Choice(m2)
// 1 1 Choice(m1) 1 0 Choice(m1)
// 1 1 0 Leaf 0.611 1 0 0 Leaf 0.18576102
// 1 1 1 Leaf 1 1 0 1 Leaf 0.30622428
// */ 1 1 Choice(m1)
1 1 0 Leaf 0.30623871
1 1 1 Leaf 0.5
// // Test that the remaining factor graph has one pruned factors is:
// // DecisionTreeFactor on {M3, M2, M1}. Choice(m3)
// auto remainingFactorGraph = incrementalHybrid.remainingFactorGraph(); 0 Choice(m2)
// EXPECT_LONGS_EQUAL(1, remainingFactorGraph.size()); 0 0 Leaf 0
0 1 Choice(m1)
0 1 0 Leaf 0.18754662
0 1 1 Leaf 0.30623871
1 Choice(m2)
1 0 Choice(m1)
1 0 0 Leaf 0
1 0 1 Leaf 0.30622428
1 1 Choice(m1)
1 1 0 Leaf 0.30623871
1 1 1 Leaf 0.5
*/
// auto discreteFactor_m1 = *dynamic_pointer_cast<DecisionTreeFactor>( auto discreteConditional_m1 = *dynamic_pointer_cast<DiscreteConditional>(
// incrementalHybrid.remainingDiscreteGraph().at(0)); incrementalHybrid[M(1)]->conditional()->inner());
// EXPECT(discreteFactor_m1.keys() == KeyVector({M(3), M(2), M(1)})); EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)}));
// // Get the number of elements which are equal to 0. // Get the number of elements which are greater than 0.
// auto count = [](const double &value, int count) { auto count = [](const double &value, int count) {
// return value > 0 ? count + 1 : count; return value > 0 ? count + 1 : count;
// }; };
// // Check that the number of leaves after pruning is 5. // Check that the number of leaves after pruning is 5.
// EXPECT_LONGS_EQUAL(5, discreteFactor_m1.fold(count, 0)); EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0));
// /* Expected hybrid Bayes net // Check that the hybrid nodes of the bayes net match those of the pre-pruning
// * factor 0: [x1 | x2 m1 ], 2 components // bayes net, at the same positions.
// * factor 1: [x2 | x3 m2 m1 ], 4 components auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>(
// * factor 2: [x3 | x4 m3 m2 m1 ], 8 components unprunedHybridBayesTree->clique(X(4))->conditional()->inner());
// * factor 3: [x4 | m3 m2 m1 ], 8 components auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>(
// */ incrementalHybrid[X(4)]->conditional()->inner());
// auto hybridBayesNet = incrementalHybrid.hybridBayesNet();
// // Check if we have a bayes net with 4 hybrid nodes, std::vector<std::pair<DiscreteValues, double>> assignments =
// // each with 2, 4, 5 (pruned), and 5 (pruned) leaves respetively. discreteConditional_m1.enumerate();
// EXPECT_LONGS_EQUAL(4, hybridBayesNet.size()); // Loop over all assignments and check the pruned components
// EXPECT_LONGS_EQUAL(2, hybridBayesNet.atGaussian(0)->nrComponents()); for (auto &&av : assignments) {
// EXPECT_LONGS_EQUAL(4, hybridBayesNet.atGaussian(1)->nrComponents()); const DiscreteValues &assignment = av.first;
// EXPECT_LONGS_EQUAL(5, hybridBayesNet.atGaussian(2)->nrComponents()); const double value = av.second;
// EXPECT_LONGS_EQUAL(5, hybridBayesNet.atGaussian(3)->nrComponents());
// // Check that the hybrid nodes of the bayes net match those of the bayes if (value == 0.0) {
// net EXPECT(lastDensity(assignment) == nullptr);
// // before pruning, at the same positions. } else {
// auto &lastDensity = *(hybridBayesNet.atGaussian(3)); CHECK(lastDensity(assignment));
// auto &unprunedLastDensity = *(unprunedHybridBayesNet->atGaussian(3)); EXPECT(assert_equal(*unprunedLastDensity(assignment),
// std::vector<std::pair<DiscreteValues, double>> assignments = *lastDensity(assignment)));
// discreteFactor_m1.enumerate(); }
// // Loop over all assignments and check the pruned components }
// for (auto &&av : assignments) { }
// const DiscreteValues &assignment = av.first;
// const double value = av.second;
// if (value == 0.0) {
// EXPECT(lastDensity(assignment) == nullptr);
// } else {
// CHECK(lastDensity(assignment));
// EXPECT(assert_equal(*unprunedLastDensity(assignment),
// *lastDensity(assignment)));
// }
// }
// }
/* ****************************************************************************/ /* ****************************************************************************/
// // Test approximate inference with an additional pruning step. // Test approximate inference with an additional pruning step.
// TEST(DCGaussianElimination, Incremental_approximate) { TEST(HybridGaussianElimination, Incremental_approximate) {
// Switching switching(5); Switching switching(5);
// IncrementalHybrid incrementalHybrid; HybridGaussianISAM incrementalHybrid;
// HybridGaussianFactorGraph graph1; HybridGaussianFactorGraph graph1;
// // Add the 3 DC factors, x1-x2, x2-x3, x3-x4 /***** Run Round 1 *****/
// for (size_t i = 0; i < 3; i++) { // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
// graph1.push_back(switching.linearizedFactorGraph.dcGraph().at(i)); for (size_t i = 1; i < 4; i++) {
// } graph1.push_back(switching.linearizedFactorGraph.at(i));
}
// // Add the Gaussian factors, 1 prior on X(1), 4 measurements // Add the Gaussian factors, 1 prior on X(1),
// for (size_t i = 0; i <= 4; i++) { // 3 measurements on X(2), X(3), X(4)
// graph1.push_back(switching.linearizedFactorGraph.gaussianGraph().at(i)); graph1.push_back(switching.linearizedFactorGraph.at(0));
// } for (size_t i = 5; i <= 7; i++) {
graph1.push_back(switching.linearizedFactorGraph.at(i));
}
// // Create ordering. // Run update with pruning
// Ordering ordering; size_t maxComponents = 5;
// for (size_t j = 1; j <= 4; j++) { incrementalHybrid.update(graph1);
// ordering += X(j); incrementalHybrid.prune(M(3), maxComponents);
// }
// // Run update with pruning // Check if we have a bayes tree with 4 hybrid nodes,
// size_t maxComponents = 5; // each with 2, 4, 8, and 5 (pruned) leaves respetively.
// incrementalHybrid.update(graph1, ordering, maxComponents); EXPECT_LONGS_EQUAL(4, incrementalHybrid.size());
EXPECT_LONGS_EQUAL(
2, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents());
EXPECT_LONGS_EQUAL(
4, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents());
EXPECT_LONGS_EQUAL(
5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents());
EXPECT_LONGS_EQUAL(
5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents());
// // Check if we have a bayes net with 4 hybrid nodes, /***** Run Round 2 *****/
// // each with 2, 4, 8, and 5 (pruned) leaves respetively. HybridGaussianFactorGraph graph2;
// auto actualBayesNet1 = incrementalHybrid.hybridBayesNet(); graph2.push_back(switching.linearizedFactorGraph.at(4));
// CHECK_EQUAL(4, actualBayesNet1.size()); graph2.push_back(switching.linearizedFactorGraph.at(8));
// EXPECT_LONGS_EQUAL(2, actualBayesNet1.atGaussian(0)->nrComponents());
// EXPECT_LONGS_EQUAL(4, actualBayesNet1.atGaussian(1)->nrComponents());
// EXPECT_LONGS_EQUAL(8, actualBayesNet1.atGaussian(2)->nrComponents());
// EXPECT_LONGS_EQUAL(5, actualBayesNet1.atGaussian(3)->nrComponents());
// /***** Run Round 2 *****/ // Run update with pruning a second time.
// HybridGaussianFactorGraph graph2; incrementalHybrid.update(graph2);
// graph2.push_back(switching.linearizedFactorGraph.dcGraph().at(3)); incrementalHybrid.prune(M(4), maxComponents);
// graph2.push_back(switching.linearizedFactorGraph.gaussianGraph().at(5));
// Ordering ordering2; // Check if we have a bayes tree with pruned hybrid nodes,
// ordering2 += X(4); // with 5 (pruned) leaves.
// ordering2 += X(5); CHECK_EQUAL(5, incrementalHybrid.size());
EXPECT_LONGS_EQUAL(
// // Run update with pruning a second time. 5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents());
// incrementalHybrid.update(graph2, ordering2, maxComponents); EXPECT_LONGS_EQUAL(
5, incrementalHybrid[X(5)]->conditional()->asMixture()->nrComponents());
// // Check if we have a bayes net with 2 hybrid nodes, }
// // each with 10 (pruned), and 5 (pruned) leaves respetively.
// auto actualBayesNet = incrementalHybrid.hybridBayesNet();
// CHECK_EQUAL(2, actualBayesNet.size());
// EXPECT_LONGS_EQUAL(10, actualBayesNet.atGaussian(0)->nrComponents());
// EXPECT_LONGS_EQUAL(5, actualBayesNet.atGaussian(1)->nrComponents());
// }
/* ************************************************************************/ /* ************************************************************************/
// // Test for figuring out the optimal ordering to ensure we get // A GTSAM-only test for running inference on a single-legged robot.
// // a discrete graph after elimination. // The leg links are represented by the chain X-Y-Z-W, where X is the base and
// TEST(IncrementalHybrid, NonTrivial) { // W is the foot.
// // This is a GTSAM-only test for running inference on a single legged // We use BetweenFactor<Pose2> as constraints between each of the poses.
// robot. TEST(HybridGaussianISAM, NonTrivial) {
// // The leg links are represented by the chain X-Y-Z-W, where X is the base /*************** Run Round 1 ***************/
// and HybridNonlinearFactorGraph fg;
// // W is the foot.
// // We use BetweenFactor<Pose2> as constraints between each of the poses.
// /*************** Run Round 1 ***************/ // Add a prior on pose x1 at the origin.
// NonlinearHybridFactorGraph fg; // A prior factor consists of a mean and
// a noise model (covariance matrix)
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
auto priorNoise = noiseModel::Diagonal::Sigmas(
Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta
fg.emplace_nonlinear<PriorFactor<Pose2>>(X(0), prior, priorNoise);
// // Add a prior on pose x1 at the origin. // create a noise model for the landmark measurements
// // A prior factor consists of a mean and auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1);
// // a noise model (covariance matrix)
// Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
// auto priorNoise = noiseModel::Diagonal::Sigmas(
// Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta
// fg.emplace_nonlinear<PriorFactor<Pose2>>(X(0), prior, priorNoise);
// // create a noise model for the landmark measurements // We model a robot's single leg as X - Y - Z - W
// auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1); // where X is the base link and W is the foot link.
// // We model a robot's single leg as X - Y - Z - W // Add connecting poses similar to PoseFactors in GTD
// // where X is the base link and W is the foot link. fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(0), Y(0), Pose2(0, 1.0, 0),
poseNoise);
fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(0), Z(0), Pose2(0, 1.0, 0),
poseNoise);
fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(0), W(0), Pose2(0, 1.0, 0),
poseNoise);
// // Add connecting poses similar to PoseFactors in GTD // Create initial estimate
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(0), Y(0), Pose2(0, 1.0, 0), Values initial;
// poseNoise); initial.insert(X(0), Pose2(0.0, 0.0, 0.0));
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(0), Z(0), Pose2(0, 1.0, 0), initial.insert(Y(0), Pose2(0.0, 1.0, 0.0));
// poseNoise); initial.insert(Z(0), Pose2(0.0, 2.0, 0.0));
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(0), W(0), Pose2(0, 1.0, 0), initial.insert(W(0), Pose2(0.0, 3.0, 0.0));
// poseNoise);
// // Create initial estimate HybridGaussianFactorGraph gfg = fg.linearize(initial);
// Values initial; fg = HybridNonlinearFactorGraph();
// initial.insert(X(0), Pose2(0.0, 0.0, 0.0));
// initial.insert(Y(0), Pose2(0.0, 1.0, 0.0));
// initial.insert(Z(0), Pose2(0.0, 2.0, 0.0));
// initial.insert(W(0), Pose2(0.0, 3.0, 0.0));
// HybridGaussianFactorGraph gfg = fg.linearize(initial); HybridGaussianISAM inc;
// fg = NonlinearHybridFactorGraph();
// IncrementalHybrid inc; // Update without pruning
// The result is a HybridBayesNet with no discrete variables
// (equivalent to a GaussianBayesNet).
// Factorization is:
// `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)`
inc.update(gfg);
// // Regular ordering going up the chain. /*************** Run Round 2 ***************/
// Ordering ordering; using PlanarMotionModel = BetweenFactor<Pose2>;
// ordering += W(0);
// ordering += Z(0);
// ordering += Y(0);
// ordering += X(0);
// // Update without pruning // Add odometry factor with discrete modes.
// // The result is a HybridBayesNet with no discrete variables Pose2 odometry(1.0, 0.0, 0.0);
// // (equivalent to a GaussianBayesNet). KeyVector contKeys = {W(0), W(1)};
// // Factorization is: auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0);
// // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)` auto still = boost::make_shared<PlanarMotionModel>(W(0), W(1), Pose2(0, 0, 0),
// inc.update(gfg, ordering); noise_model),
moving = boost::make_shared<PlanarMotionModel>(W(0), W(1), odometry,
noise_model);
std::vector<PlanarMotionModel::shared_ptr> components = {moving, still};
auto mixtureFactor = boost::make_shared<MixtureFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
fg.push_back(mixtureFactor);
// /*************** Run Round 2 ***************/ // Add equivalent of ImuFactor
// using PlanarMotionModel = BetweenFactor<Pose2>; fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(0), X(1), Pose2(1.0, 0.0, 0),
poseNoise);
// PoseFactors-like at k=1
fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(1), Y(1), Pose2(0, 1, 0),
poseNoise);
fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(1), Z(1), Pose2(0, 1, 0),
poseNoise);
fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(1), W(1), Pose2(-1, 1, 0),
poseNoise);
// // Add odometry factor with discrete modes. initial.insert(X(1), Pose2(1.0, 0.0, 0.0));
// Pose2 odometry(1.0, 0.0, 0.0); initial.insert(Y(1), Pose2(1.0, 1.0, 0.0));
// KeyVector contKeys = {W(0), W(1)}; initial.insert(Z(1), Pose2(1.0, 2.0, 0.0));
// auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); // The leg link did not move so we set the expected pose accordingly.
// auto still = boost::make_shared<PlanarMotionModel>(W(0), W(1), Pose2(0, 0, initial.insert(W(1), Pose2(0.0, 3.0, 0.0));
// 0),
// noise_model),
// moving = boost::make_shared<PlanarMotionModel>(W(0), W(1), odometry,
// noise_model);
// std::vector<PlanarMotionModel::shared_ptr> components = {moving, still};
// auto dcFactor = boost::make_shared<DCMixtureFactor<PlanarMotionModel>>(
// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
// fg.push_back(dcFactor);
// // Add equivalent of ImuFactor gfg = fg.linearize(initial);
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(0), X(1), Pose2(1.0, 0.0, 0), fg = HybridNonlinearFactorGraph();
// poseNoise);
// // PoseFactors-like at k=1
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(1), Y(1), Pose2(0, 1, 0),
// poseNoise);
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(1), Z(1), Pose2(0, 1, 0),
// poseNoise);
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(1), W(1), Pose2(-1, 1, 0),
// poseNoise);
// initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); // Update without pruning
// initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); // The result is a HybridBayesNet with 1 discrete variable M(1).
// initial.insert(Z(1), Pose2(1.0, 2.0, 0.0)); // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1)
// // The leg link did not move so we set the expected pose accordingly. // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, M1)
// initial.insert(W(1), Pose2(0.0, 3.0, 0.0)); // P(Y1 | X1, M1)P(X1 | M1)P(M1)
// The MHS tree is a 1 level tree for time indices (1,) with 2 leaves.
inc.update(gfg);
// // Ordering for k=1. /*************** Run Round 3 ***************/
// // This ordering follows the intuition that we eliminate the previous // Add odometry factor with discrete modes.
// // timestep, and then the current timestep. contKeys = {W(1), W(2)};
// ordering = Ordering(); still = boost::make_shared<PlanarMotionModel>(W(1), W(2), Pose2(0, 0, 0),
// ordering += W(0); noise_model);
// ordering += Z(0); moving =
// ordering += Y(0); boost::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model);
// ordering += X(0); components = {moving, still};
// ordering += W(1); mixtureFactor = boost::make_shared<MixtureFactor>(
// ordering += Z(1); contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
// ordering += Y(1); fg.push_back(mixtureFactor);
// ordering += X(1);
// gfg = fg.linearize(initial); // Add equivalent of ImuFactor
// fg = NonlinearHybridFactorGraph(); fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(1), X(2), Pose2(1.0, 0.0, 0),
poseNoise);
// PoseFactors-like at k=1
fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(2), Y(2), Pose2(0, 1, 0),
poseNoise);
fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(2), Z(2), Pose2(0, 1, 0),
poseNoise);
fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(2), W(2), Pose2(-2, 1, 0),
poseNoise);
// // Update without pruning initial.insert(X(2), Pose2(2.0, 0.0, 0.0));
// // The result is a HybridBayesNet with 1 discrete variable M(1). initial.insert(Y(2), Pose2(2.0, 1.0, 0.0));
// // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) initial.insert(Z(2), Pose2(2.0, 2.0, 0.0));
// // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, initial.insert(W(2), Pose2(0.0, 3.0, 0.0));
// M1)
// // P(Y1 | X1, M1)P(X1 | M1)P(M1)
// // The MHS tree is a 2 level tree for time indices (0, 1) with 2 leaves.
// inc.update(gfg, ordering);
// /*************** Run Round 3 ***************/ gfg = fg.linearize(initial);
// // Add odometry factor with discrete modes. fg = HybridNonlinearFactorGraph();
// contKeys = {W(1), W(2)};
// still = boost::make_shared<PlanarMotionModel>(W(1), W(2), Pose2(0, 0, 0),
// noise_model);
// moving =
// boost::make_shared<PlanarMotionModel>(W(1), W(2), odometry,
// noise_model);
// components = {moving, still};
// dcFactor = boost::make_shared<DCMixtureFactor<PlanarMotionModel>>(
// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
// fg.push_back(dcFactor);
// // Add equivalent of ImuFactor // Now we prune!
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(1), X(2), Pose2(1.0, 0.0, 0), // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1)
// poseNoise); // P(X0 | X1, W1, M1) P(W1|W2, Z1, X1, M1, M2)
// // PoseFactors-like at k=1 // P(Z1| W2, Y1, X1, M1, M2) P(Y1 | W2, X1, M1, M2)
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(2), Y(2), Pose2(0, 1, 0), // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2)
// poseNoise); // P(Z2|Y2, X2, M1, M2) P(Y2 | X2, M1, M2)
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(2), Z(2), Pose2(0, 1, 0), // P(X2 | M1, M2) P(M1, M2)
// poseNoise); // The MHS at this point should be a 2 level tree on (1, 2).
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(2), W(2), Pose2(-2, 1, 0), // 1 has 2 choices, and 2 has 4 choices.
// poseNoise); inc.update(gfg);
inc.prune(M(2), 2);
// initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); /*************** Run Round 4 ***************/
// initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); // Add odometry factor with discrete modes.
// initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); contKeys = {W(2), W(3)};
// initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); still = boost::make_shared<PlanarMotionModel>(W(2), W(3), Pose2(0, 0, 0),
noise_model);
moving =
boost::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model);
components = {moving, still};
mixtureFactor = boost::make_shared<MixtureFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
fg.push_back(mixtureFactor);
// // Ordering at k=2. Same intuition as before. // Add equivalent of ImuFactor
// ordering = Ordering(); fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(2), X(3), Pose2(1.0, 0.0, 0),
// ordering += W(1); poseNoise);
// ordering += Z(1); // PoseFactors-like at k=3
// ordering += Y(1); fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(3), Y(3), Pose2(0, 1, 0),
// ordering += X(1); poseNoise);
// ordering += W(2); fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(3), Z(3), Pose2(0, 1, 0),
// ordering += Z(2); poseNoise);
// ordering += Y(2); fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(3), W(3), Pose2(-3, 1, 0),
// ordering += X(2); poseNoise);
// gfg = fg.linearize(initial); initial.insert(X(3), Pose2(3.0, 0.0, 0.0));
// fg = NonlinearHybridFactorGraph(); initial.insert(Y(3), Pose2(3.0, 1.0, 0.0));
initial.insert(Z(3), Pose2(3.0, 2.0, 0.0));
initial.insert(W(3), Pose2(0.0, 3.0, 0.0));
// // Now we prune! gfg = fg.linearize(initial);
// // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) fg = HybridNonlinearFactorGraph();
// P(X0 | X1, W1, M1)
// // P(W1|W2, Z1, X1, M1, M2) P(Z1| W2, Y1, X1, M1, M2)
// P(Y1 | W2, X1, M1, M2)
// // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2)
// P(Z2|Y2, X2, M1, M2)
// // P(Y2 | X2, M1, M2)P(X2 | M1, M2) P(M1, M2)
// // The MHS at this point should be a 3 level tree on (0, 1, 2).
// // 0 has 2 choices, 1 has 4 choices and 2 has 4 choices.
// inc.update(gfg, ordering, 2);
// /*************** Run Round 4 ***************/ // Keep pruning!
// // Add odometry factor with discrete modes. inc.update(gfg);
// contKeys = {W(2), W(3)}; inc.prune(M(3), 3);
// still = boost::make_shared<PlanarMotionModel>(W(2), W(3), Pose2(0, 0, 0),
// noise_model);
// moving =
// boost::make_shared<PlanarMotionModel>(W(2), W(3), odometry,
// noise_model);
// components = {moving, still};
// dcFactor = boost::make_shared<DCMixtureFactor<PlanarMotionModel>>(
// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
// fg.push_back(dcFactor);
// // Add equivalent of ImuFactor // The final discrete graph should not be empty since we have eliminated
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(2), X(3), Pose2(1.0, 0.0, 0), // all continuous variables.
// poseNoise); auto discreteTree = inc[M(3)]->conditional()->asDiscreteConditional();
// // PoseFactors-like at k=3 EXPECT_LONGS_EQUAL(3, discreteTree->size());
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(3), Y(3), Pose2(0, 1, 0),
// poseNoise);
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(3), Z(3), Pose2(0, 1, 0),
// poseNoise);
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(3), W(3), Pose2(-3, 1, 0),
// poseNoise);
// initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); // Test if the optimal discrete mode assignment is (1, 1, 1).
// initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); DiscreteFactorGraph discreteGraph;
// initial.insert(Z(3), Pose2(3.0, 2.0, 0.0)); discreteGraph.push_back(discreteTree);
// initial.insert(W(3), Pose2(0.0, 3.0, 0.0)); DiscreteValues optimal_assignment = discreteGraph.optimize();
// // Ordering at k=3. Same intuition as before. DiscreteValues expected_assignment;
// ordering = Ordering(); expected_assignment[M(1)] = 1;
// ordering += W(2); expected_assignment[M(2)] = 1;
// ordering += Z(2); expected_assignment[M(3)] = 1;
// ordering += Y(2);
// ordering += X(2);
// ordering += W(3);
// ordering += Z(3);
// ordering += Y(3);
// ordering += X(3);
// gfg = fg.linearize(initial); EXPECT(assert_equal(expected_assignment, optimal_assignment));
// fg = NonlinearHybridFactorGraph();
// // Keep pruning! // Test if pruning worked correctly by checking that we only have 3 leaves in
// inc.update(gfg, ordering, 3); // the last node.
auto lastConditional = inc[X(3)]->conditional()->asMixture();
// // The final discrete graph should not be empty since we have eliminated EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents());
// // all continuous variables. }
// EXPECT(!inc.remainingDiscreteGraph().empty());
// // Test if the optimal discrete mode assignment is (1, 1, 1).
// DiscreteValues optimal_assignment =
// inc.remainingDiscreteGraph().optimize(); DiscreteValues
// expected_assignment; expected_assignment[M(1)] = 1;
// expected_assignment[M(2)] = 1;
// expected_assignment[M(3)] = 1;
// EXPECT(assert_equal(expected_assignment, optimal_assignment));
// // Test if pruning worked correctly by checking that we only have 3 leaves
// in
// // the last node.
// auto lastConditional = boost::dynamic_pointer_cast<GaussianMixture>(
// inc.hybridBayesNet().at(inc.hybridBayesNet().size() - 1));
// EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents());
// }
/* ************************************************************************* */ /* ************************************************************************* */
int main() { int main() {