Merge pull request #1883 from borglab/feature/hybridISAM

Some Hybrid iSAM cleanup
release/4.3a0
Frank Dellaert 2024-10-23 10:36:30 -07:00 committed by GitHub
commit 0ffe6c9303
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 174 additions and 223 deletions

View File

@ -132,6 +132,14 @@ class GTSAM_EXPORT HybridGaussianFactorGraph
explicit HybridGaussianFactorGraph(const CONTAINER& factors) explicit HybridGaussianFactorGraph(const CONTAINER& factors)
: Base(factors) {} : Base(factors) {}
/**
* Construct from an initializer lists of GaussianFactor shared pointers.
* Example:
* HybridGaussianFactorGraph graph = { factor1, factor2, factor3 };
*/
HybridGaussianFactorGraph(std::initializer_list<sharedFactor> factors)
: Base(factors) {}
/** /**
* Implicit copy/downcast constructor to override explicit template container * Implicit copy/downcast constructor to override explicit template container
* constructor. In BayesTree this is used for: * constructor. In BayesTree this is used for:

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */ * -------------------------------------------------------------------------- */
/** /**
* @file HybridGaussianISAM.h * @file HybridGaussianISAM.cpp
* @date March 31, 2022 * @date March 31, 2022
* @author Fan Jiang * @author Fan Jiang
* @author Frank Dellaert * @author Frank Dellaert

View File

@ -39,8 +39,8 @@ void HybridNonlinearISAM::update(const HybridNonlinearFactorGraph& newFactors,
if (newFactors.size() > 0) { if (newFactors.size() > 0) {
// Reorder and relinearize every reorderInterval updates // Reorder and relinearize every reorderInterval updates
if (reorderInterval_ > 0 && ++reorderCounter_ >= reorderInterval_) { if (reorderInterval_ > 0 && ++reorderCounter_ >= reorderInterval_) {
// TODO(Varun) Relinearization doesn't take into account pruning // TODO(Varun) Re-linearization doesn't take into account pruning
reorder_relinearize(); reorderRelinearize();
reorderCounter_ = 0; reorderCounter_ = 0;
} }
@ -60,7 +60,7 @@ void HybridNonlinearISAM::update(const HybridNonlinearFactorGraph& newFactors,
} }
/* ************************************************************************* */ /* ************************************************************************* */
void HybridNonlinearISAM::reorder_relinearize() { void HybridNonlinearISAM::reorderRelinearize() {
if (factors_.size() > 0) { if (factors_.size() > 0) {
// Obtain the new linearization point // Obtain the new linearization point
const Values newLinPoint = estimate(); const Values newLinPoint = estimate();
@ -69,7 +69,7 @@ void HybridNonlinearISAM::reorder_relinearize() {
// Just recreate the whole BayesTree // Just recreate the whole BayesTree
// TODO: allow for constrained ordering here // TODO: allow for constrained ordering here
// TODO: decouple relinearization and reordering to avoid // TODO: decouple re-linearization and reordering to avoid
isam_.update(*factors_.linearize(newLinPoint), {}, {}, isam_.update(*factors_.linearize(newLinPoint), {}, {},
eliminationFunction_); eliminationFunction_);

View File

@ -37,7 +37,7 @@ class GTSAM_EXPORT HybridNonlinearISAM {
/// The discrete assignment /// The discrete assignment
DiscreteValues assignment_; DiscreteValues assignment_;
/** The original factors, used when relinearizing */ /** The original factors, used when re-linearizing */
HybridNonlinearFactorGraph factors_; HybridNonlinearFactorGraph factors_;
/** The reordering interval and counter */ /** The reordering interval and counter */
@ -52,8 +52,8 @@ class GTSAM_EXPORT HybridNonlinearISAM {
/// @{ /// @{
/** /**
* Periodically reorder and relinearize * Periodically reorder and re-linearize
* @param reorderInterval is the number of updates between reorderings, * @param reorderInterval is the number of updates between re-orderings,
* 0 never reorders (and is dangerous for memory consumption) * 0 never reorders (and is dangerous for memory consumption)
* 1 (default) reorders every time, in worse case is batch every update * 1 (default) reorders every time, in worse case is batch every update
* typical values are 50 or 100 * typical values are 50 or 100
@ -124,8 +124,8 @@ class GTSAM_EXPORT HybridNonlinearISAM {
const std::optional<size_t>& maxNrLeaves = {}, const std::optional<size_t>& maxNrLeaves = {},
const std::optional<Ordering>& ordering = {}); const std::optional<Ordering>& ordering = {});
/** Relinearization and reordering of variables */ /** Re-linearization and reordering of variables */
void reorder_relinearize(); void reorderRelinearize();
/// @} /// @}
}; };

View File

@ -120,12 +120,21 @@ using MotionModel = BetweenFactor<double>;
// Test fixture with switching network. // Test fixture with switching network.
/// ϕ(X(0)) .. ϕ(X(k),X(k+1)) .. ϕ(X(k);z_k) .. ϕ(M(0)) .. ϕ(M(K-3),M(K-2)) /// ϕ(X(0)) .. ϕ(X(k),X(k+1)) .. ϕ(X(k);z_k) .. ϕ(M(0)) .. ϕ(M(K-3),M(K-2))
struct Switching { struct Switching {
private:
HybridNonlinearFactorGraph nonlinearFactorGraph_;
public:
size_t K; size_t K;
DiscreteKeys modes; DiscreteKeys modes;
HybridNonlinearFactorGraph nonlinearFactorGraph; HybridNonlinearFactorGraph unaryFactors, binaryFactors, modeChain;
HybridGaussianFactorGraph linearizedFactorGraph; HybridGaussianFactorGraph linearizedFactorGraph;
Values linearizationPoint; Values linearizationPoint;
// Access the flat nonlinear factor graph.
const HybridNonlinearFactorGraph &nonlinearFactorGraph() const {
return nonlinearFactorGraph_;
}
/** /**
* @brief Create with given number of time steps. * @brief Create with given number of time steps.
* *
@ -136,7 +145,7 @@ struct Switching {
*/ */
Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1, Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1,
std::vector<double> measurements = {}, std::vector<double> measurements = {},
std::string discrete_transition_prob = "1/2 3/2") std::string transitionProbabilityTable = "1/2 3/2")
: K(K) { : K(K) {
using noiseModel::Isotropic; using noiseModel::Isotropic;
@ -155,32 +164,36 @@ struct Switching {
// Create hybrid factor graph. // Create hybrid factor graph.
// Add a prior ϕ(X(0)) on X(0). // Add a prior ϕ(X(0)) on X(0).
nonlinearFactorGraph.emplace_shared<PriorFactor<double>>( nonlinearFactorGraph_.emplace_shared<PriorFactor<double>>(
X(0), measurements.at(0), Isotropic::Sigma(1, prior_sigma)); X(0), measurements.at(0), Isotropic::Sigma(1, prior_sigma));
unaryFactors.push_back(nonlinearFactorGraph_.back());
// Add "motion models" ϕ(X(k),X(k+1),M(k)). // Add "motion models" ϕ(X(k),X(k+1),M(k)).
for (size_t k = 0; k < K - 1; k++) { for (size_t k = 0; k < K - 1; k++) {
auto motion_models = motionModels(k, between_sigma); auto motion_models = motionModels(k, between_sigma);
nonlinearFactorGraph.emplace_shared<HybridNonlinearFactor>(modes[k], nonlinearFactorGraph_.emplace_shared<HybridNonlinearFactor>(modes[k],
motion_models); motion_models);
binaryFactors.push_back(nonlinearFactorGraph_.back());
} }
// Add measurement factors ϕ(X(k);z_k). // Add measurement factors ϕ(X(k);z_k).
auto measurement_noise = Isotropic::Sigma(1, prior_sigma); auto measurement_noise = Isotropic::Sigma(1, prior_sigma);
for (size_t k = 1; k < K; k++) { for (size_t k = 1; k < K; k++) {
nonlinearFactorGraph.emplace_shared<PriorFactor<double>>( nonlinearFactorGraph_.emplace_shared<PriorFactor<double>>(
X(k), measurements.at(k), measurement_noise); X(k), measurements.at(k), measurement_noise);
unaryFactors.push_back(nonlinearFactorGraph_.back());
} }
// Add "mode chain" ϕ(M(0)) ϕ(M(0),M(1)) ... ϕ(M(K-3),M(K-2)) // Add "mode chain" ϕ(M(0)) ϕ(M(0),M(1)) ... ϕ(M(K-3),M(K-2))
addModeChain(&nonlinearFactorGraph, discrete_transition_prob); modeChain = createModeChain(transitionProbabilityTable);
nonlinearFactorGraph_ += modeChain;
// Create the linearization point. // Create the linearization point.
for (size_t k = 0; k < K; k++) { for (size_t k = 0; k < K; k++) {
linearizationPoint.insert<double>(X(k), static_cast<double>(k + 1)); linearizationPoint.insert<double>(X(k), static_cast<double>(k + 1));
} }
linearizedFactorGraph = *nonlinearFactorGraph.linearize(linearizationPoint); linearizedFactorGraph = *nonlinearFactorGraph_.linearize(linearizationPoint);
} }
// Create motion models for a given time step // Create motion models for a given time step
@ -200,15 +213,16 @@ struct Switching {
* *
* @param fg The factor graph to which the mode chain is added. * @param fg The factor graph to which the mode chain is added.
*/ */
template <typename FACTORGRAPH> HybridNonlinearFactorGraph createModeChain(
void addModeChain(FACTORGRAPH *fg, std::string transitionProbabilityTable = "1/2 3/2") {
std::string discrete_transition_prob = "1/2 3/2") { HybridNonlinearFactorGraph chain;
fg->template emplace_shared<DiscreteDistribution>(modes[0], "1/1"); chain.emplace_shared<DiscreteDistribution>(modes[0], "1/1");
for (size_t k = 0; k < K - 2; k++) { for (size_t k = 0; k < K - 2; k++) {
auto parents = {modes[k]}; auto parents = {modes[k]};
fg->template emplace_shared<DiscreteConditional>( chain.emplace_shared<DiscreteConditional>(modes[k + 1], parents,
modes[k + 1], parents, discrete_transition_prob); transitionProbabilityTable);
} }
return chain;
} }
}; };

View File

@ -37,6 +37,8 @@
// Include for test suite // Include for test suite
#include <CppUnitLite/TestHarness.h> #include <CppUnitLite/TestHarness.h>
#include <string>
#include "Switching.h" #include "Switching.h"
using namespace std; using namespace std;
@ -55,13 +57,16 @@ std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0,
Switching InitializeEstimationProblem( Switching InitializeEstimationProblem(
const size_t K, const double between_sigma, const double measurement_sigma, const size_t K, const double between_sigma, const double measurement_sigma,
const std::vector<double>& measurements, const std::vector<double>& measurements,
const std::string& discrete_transition_prob, const std::string& transitionProbabilityTable,
HybridNonlinearFactorGraph& graph, Values& initial) { HybridNonlinearFactorGraph& graph, Values& initial) {
Switching switching(K, between_sigma, measurement_sigma, measurements, Switching switching(K, between_sigma, measurement_sigma, measurements,
discrete_transition_prob); transitionProbabilityTable);
// Add prior on M(0)
graph.push_back(switching.modeChain.at(0));
// Add the X(0) prior // Add the X(0) prior
graph.push_back(switching.nonlinearFactorGraph.at(0)); graph.push_back(switching.unaryFactors.at(0));
initial.insert(X(0), switching.linearizationPoint.at<double>(X(0))); initial.insert(X(0), switching.linearizationPoint.at<double>(X(0)));
return switching; return switching;
@ -128,10 +133,9 @@ TEST(HybridEstimation, IncrementalSmoother) {
constexpr size_t maxNrLeaves = 3; constexpr size_t maxNrLeaves = 3;
for (size_t k = 1; k < K; k++) { for (size_t k = 1; k < K; k++) {
// Motion Model if (k > 1) graph.push_back(switching.modeChain.at(k - 1)); // Mode chain
graph.push_back(switching.nonlinearFactorGraph.at(k)); graph.push_back(switching.binaryFactors.at(k - 1)); // Motion Model
// Measurement graph.push_back(switching.unaryFactors.at(k)); // Measurement
graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1));
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k))); initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
@ -176,10 +180,9 @@ TEST(HybridEstimation, ValidPruningError) {
constexpr size_t maxNrLeaves = 3; constexpr size_t maxNrLeaves = 3;
for (size_t k = 1; k < K; k++) { for (size_t k = 1; k < K; k++) {
// Motion Model if (k > 1) graph.push_back(switching.modeChain.at(k - 1)); // Mode chain
graph.push_back(switching.nonlinearFactorGraph.at(k)); graph.push_back(switching.binaryFactors.at(k - 1)); // Motion Model
// Measurement graph.push_back(switching.unaryFactors.at(k)); // Measurement
graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1));
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k))); initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
@ -225,15 +228,17 @@ TEST(HybridEstimation, ISAM) {
HybridGaussianFactorGraph linearized; HybridGaussianFactorGraph linearized;
const size_t maxNrLeaves = 3;
for (size_t k = 1; k < K; k++) { for (size_t k = 1; k < K; k++) {
// Motion Model if (k > 1) graph.push_back(switching.modeChain.at(k - 1)); // Mode chain
graph.push_back(switching.nonlinearFactorGraph.at(k)); graph.push_back(switching.binaryFactors.at(k - 1)); // Motion Model
// Measurement graph.push_back(switching.unaryFactors.at(k)); // Measurement
graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1));
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k))); initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
isam.update(graph, initial, 3); isam.update(graph, initial, maxNrLeaves);
// isam.saveGraph("NLiSAM" + std::to_string(k) + ".dot");
// GTSAM_PRINT(isam);
graph.resize(0); graph.resize(0);
initial.clear(); initial.clear();
@ -339,12 +344,8 @@ TEST(HybridEstimation, Probability) {
HybridValues hybrid_values = bayesNet->optimize(); HybridValues hybrid_values = bayesNet->optimize();
// This is the correct sequence as designed // This is the correct sequence as designed
DiscreteValues discrete_seq; DiscreteValues expectedSequence{{M(0), 1}, {M(1), 1}, {M(2), 0}};
discrete_seq[M(0)] = 1; EXPECT(assert_equal(expectedSequence, hybrid_values.discrete()));
discrete_seq[M(1)] = 1;
discrete_seq[M(2)] = 0;
EXPECT(assert_equal(discrete_seq, hybrid_values.discrete()));
} }
/****************************************************************************/ /****************************************************************************/
@ -411,12 +412,8 @@ TEST(HybridEstimation, ProbabilityMultifrontal) {
HybridValues hybrid_values = discreteBayesTree->optimize(); HybridValues hybrid_values = discreteBayesTree->optimize();
// This is the correct sequence as designed // This is the correct sequence as designed
DiscreteValues discrete_seq; DiscreteValues expectedSequence{{M(0), 1}, {M(1), 1}, {M(2), 0}};
discrete_seq[M(0)] = 1; EXPECT(assert_equal(expectedSequence, hybrid_values.discrete()));
discrete_seq[M(1)] = 1;
discrete_seq[M(2)] = 0;
EXPECT(assert_equal(discrete_seq, hybrid_values.discrete()));
} }
/********************************************************************************* /*********************************************************************************

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */ * -------------------------------------------------------------------------- */
/** /**
* @file testHybridIncremental.cpp * @file testHybridGaussianISAM.cpp
* @brief Unit tests for incremental inference * @brief Unit tests for incremental inference
* @author Fan Jiang, Varun Agrawal, Frank Dellaert * @author Fan Jiang, Varun Agrawal, Frank Dellaert
* @date Jan 2021 * @date Jan 2021
@ -27,8 +27,6 @@
#include <gtsam/nonlinear/PriorFactor.h> #include <gtsam/nonlinear/PriorFactor.h>
#include <gtsam/sam/BearingRangeFactor.h> #include <gtsam/sam/BearingRangeFactor.h>
#include <numeric>
#include "Switching.h" #include "Switching.h"
// Include for test suite // Include for test suite
@ -36,77 +34,63 @@
using namespace std; using namespace std;
using namespace gtsam; using namespace gtsam;
using noiseModel::Isotropic;
using symbol_shorthand::L;
using symbol_shorthand::M; using symbol_shorthand::M;
using symbol_shorthand::W; using symbol_shorthand::W;
using symbol_shorthand::X; using symbol_shorthand::X;
using symbol_shorthand::Y; using symbol_shorthand::Y;
using symbol_shorthand::Z; using symbol_shorthand::Z;
/* ****************************************************************************/
namespace switching3 {
// ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1)
const Switching switching(3);
const HybridGaussianFactorGraph &lfg = switching.linearizedFactorGraph;
// First update graph: ϕ(x0) ϕ(x0,x1,m0) ϕ(m0)
const HybridGaussianFactorGraph graph1{lfg.at(0), lfg.at(1), lfg.at(5)};
// Second update graph: ϕ(x1,x2,m1) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0,m1)
const HybridGaussianFactorGraph graph2{lfg.at(2), lfg.at(3), lfg.at(4),
lfg.at(6)};
} // namespace switching3
/* ****************************************************************************/ /* ****************************************************************************/
// Test if we can perform elimination incrementally. // Test if we can perform elimination incrementally.
TEST(HybridGaussianElimination, IncrementalElimination) { TEST(HybridGaussianElimination, IncrementalElimination) {
Switching switching(3); using namespace switching3;
HybridGaussianISAM isam; HybridGaussianISAM isam;
HybridGaussianFactorGraph graph1;
// Create initial factor graph // Run first update step
// * * *
// | | |
// X0 -*- X1 -*- X2
// \*-M0-*/
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X0)
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0)
graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1)
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M0)
// Run update step
isam.update(graph1); isam.update(graph1);
// Check that after update we have 2 hybrid Bayes net nodes: // Check that after update we have 2 hybrid Bayes net nodes:
// P(X0 | X1, M0) and P(X1, X2 | M0, M1), P(M0, M1) // P(M0) and P(X0, X1 | M0)
EXPECT_LONGS_EQUAL(3, isam.size()); EXPECT_LONGS_EQUAL(2, isam.size());
EXPECT(isam[X(0)]->conditional()->frontals() == KeyVector{X(0)}); EXPECT(isam[M(0)]->conditional()->frontals() == KeyVector({M(0)}));
EXPECT(isam[X(0)]->conditional()->parents() == KeyVector({X(1), M(0)})); EXPECT(isam[M(0)]->conditional()->parents() == KeyVector());
EXPECT(isam[X(1)]->conditional()->frontals() == KeyVector({X(1), X(2)})); EXPECT(isam[X(0)]->conditional()->frontals() == KeyVector({X(0), X(1)}));
EXPECT(isam[X(1)]->conditional()->parents() == KeyVector({M(0), M(1)})); EXPECT(isam[X(0)]->conditional()->parents() == KeyVector({M(0)}));
/********************************************************/ /********************************************************/
// New factor graph for incremental update. // Run second update step
HybridGaussianFactorGraph graph2;
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X1)
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X2)
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M0, M1)
isam.update(graph2); isam.update(graph2);
// Check that after the second update we have // Check that after update we have 3 hybrid Bayes net nodes:
// 1 additional hybrid Bayes net node: // P(X1, X2 | M0, M1) P(X1, X2 | M0, M1)
// P(X1, X2 | M0, M1)
EXPECT_LONGS_EQUAL(3, isam.size()); EXPECT_LONGS_EQUAL(3, isam.size());
EXPECT(isam[X(2)]->conditional()->frontals() == KeyVector({X(1), X(2)})); EXPECT(isam[M(0)]->conditional()->frontals() == KeyVector({M(0), M(1)}));
EXPECT(isam[X(2)]->conditional()->parents() == KeyVector({M(0), M(1)})); EXPECT(isam[M(0)]->conditional()->parents() == KeyVector());
EXPECT(isam[X(1)]->conditional()->frontals() == KeyVector({X(1), X(2)}));
EXPECT(isam[X(1)]->conditional()->parents() == KeyVector({M(0), M(1)}));
EXPECT(isam[X(0)]->conditional()->frontals() == KeyVector{X(0)});
EXPECT(isam[X(0)]->conditional()->parents() == KeyVector({X(1), M(0)}));
} }
/* ****************************************************************************/ /* ****************************************************************************/
// Test if we can incrementally do the inference // Test if we can incrementally do the inference
TEST(HybridGaussianElimination, IncrementalInference) { TEST(HybridGaussianElimination, IncrementalInference) {
Switching switching(3); using namespace switching3;
HybridGaussianISAM isam; HybridGaussianISAM isam;
HybridGaussianFactorGraph graph1;
// Create initial factor graph
// * * *
// | | |
// X0 -*- X1 -*- X2
// | |
// *-M0 - * - M1
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X0)
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0)
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X1)
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M0)
// Run update step // Run update step
isam.update(graph1); isam.update(graph1);
@ -115,13 +99,7 @@ TEST(HybridGaussianElimination, IncrementalInference) {
EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)})); EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)}));
/********************************************************/ /********************************************************/
// New factor graph for incremental update. // Second incremental update.
HybridGaussianFactorGraph graph2;
graph2.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1)
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X2)
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M0, M1)
isam.update(graph2); isam.update(graph2);
/********************************************************/ /********************************************************/
@ -160,44 +138,19 @@ TEST(HybridGaussianElimination, IncrementalInference) {
// The other discrete probabilities on M(2) are calculated the same way // The other discrete probabilities on M(2) are calculated the same way
const Ordering discreteOrdering{M(0), M(1)}; const Ordering discreteOrdering{M(0), M(1)};
HybridBayesTree::shared_ptr discreteBayesTree = HybridBayesTree::shared_ptr discreteBayesTree =
expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal( expectedRemainingGraph->eliminateMultifrontal(discreteOrdering);
discreteOrdering);
DiscreteValues m00;
m00[M(0)] = 0, m00[M(1)] = 0;
DiscreteConditional decisionTree =
*(*discreteBayesTree)[M(1)]->conditional()->asDiscrete();
double m00_prob = decisionTree(m00);
auto discreteConditional = isam[M(1)]->conditional()->asDiscrete();
// Test the probability values with regression tests. // Test the probability values with regression tests.
DiscreteValues assignment; auto discrete = isam[M(1)]->conditional()->asDiscrete();
EXPECT(assert_equal(0.0952922, m00_prob, 1e-5)); EXPECT(assert_equal(0.095292, (*discrete)({{M(0), 0}, {M(1), 0}}), 1e-5));
assignment[M(0)] = 0; EXPECT(assert_equal(0.282758, (*discrete)({{M(0), 1}, {M(1), 0}}), 1e-5));
assignment[M(1)] = 0; EXPECT(assert_equal(0.314175, (*discrete)({{M(0), 0}, {M(1), 1}}), 1e-5));
EXPECT(assert_equal(0.0952922, (*discreteConditional)(assignment), 1e-5)); EXPECT(assert_equal(0.307775, (*discrete)({{M(0), 1}, {M(1), 1}}), 1e-5));
assignment[M(0)] = 1;
assignment[M(1)] = 0;
EXPECT(assert_equal(0.282758, (*discreteConditional)(assignment), 1e-5));
assignment[M(0)] = 0;
assignment[M(1)] = 1;
EXPECT(assert_equal(0.314175, (*discreteConditional)(assignment), 1e-5));
assignment[M(0)] = 1;
assignment[M(1)] = 1;
EXPECT(assert_equal(0.307775, (*discreteConditional)(assignment), 1e-5));
// Check if the clique conditional generated from incremental elimination // Check that the clique conditional generated from incremental elimination
// matches that of batch elimination. // matches that of batch elimination.
auto expectedChordal = auto expectedConditional = (*discreteBayesTree)[M(1)]->conditional();
expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal(); auto actualConditional = isam[M(1)]->conditional();
auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>(
isam[M(1)]->conditional()->inner());
// Account for the probability terms from evaluating continuous FGs
DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}};
vector<double> probs = {0.095292197, 0.31417524, 0.28275772, 0.30777485};
auto expectedConditional =
std::make_shared<DecisionTreeFactor>(discrete_keys, probs);
EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6)); EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6));
} }
@ -227,7 +180,7 @@ TEST(HybridGaussianElimination, Approx_inference) {
} }
// Now we calculate the actual factors using full elimination // Now we calculate the actual factors using full elimination
const auto [unprunedHybridBayesTree, unprunedRemainingGraph] = const auto [unPrunedHybridBayesTree, unPrunedRemainingGraph] =
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
size_t maxNrLeaves = 5; size_t maxNrLeaves = 5;
@ -236,7 +189,7 @@ TEST(HybridGaussianElimination, Approx_inference) {
incrementalHybrid.prune(maxNrLeaves); incrementalHybrid.prune(maxNrLeaves);
/* /*
unpruned factor is: unPruned factor is:
Choice(m3) Choice(m3)
0 Choice(m2) 0 Choice(m2)
0 0 Choice(m1) 0 0 Choice(m1)
@ -282,8 +235,8 @@ TEST(HybridGaussianElimination, Approx_inference) {
// Check that the hybrid nodes of the bayes net match those of the pre-pruning // Check that the hybrid nodes of the bayes net match those of the pre-pruning
// bayes net, at the same positions. // bayes net, at the same positions.
auto &unprunedLastDensity = *dynamic_pointer_cast<HybridGaussianConditional>( auto &unPrunedLastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
unprunedHybridBayesTree->clique(X(3))->conditional()->inner()); unPrunedHybridBayesTree->clique(X(3))->conditional()->inner());
auto &lastDensity = *dynamic_pointer_cast<HybridGaussianConditional>( auto &lastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
incrementalHybrid[X(3)]->conditional()->inner()); incrementalHybrid[X(3)]->conditional()->inner());
@ -298,7 +251,7 @@ TEST(HybridGaussianElimination, Approx_inference) {
EXPECT(lastDensity(assignment) == nullptr); EXPECT(lastDensity(assignment) == nullptr);
} else { } else {
CHECK(lastDensity(assignment)); CHECK(lastDensity(assignment));
EXPECT(assert_equal(*unprunedLastDensity(assignment), EXPECT(assert_equal(*unPrunedLastDensity(assignment),
*lastDensity(assignment))); *lastDensity(assignment)));
} }
} }
@ -306,7 +259,7 @@ TEST(HybridGaussianElimination, Approx_inference) {
/* ****************************************************************************/ /* ****************************************************************************/
// Test approximate inference with an additional pruning step. // Test approximate inference with an additional pruning step.
TEST(HybridGaussianElimination, Incremental_approximate) { TEST(HybridGaussianElimination, IncrementalApproximate) {
Switching switching(5); Switching switching(5);
HybridGaussianISAM incrementalHybrid; HybridGaussianISAM incrementalHybrid;
HybridGaussianFactorGraph graph1; HybridGaussianFactorGraph graph1;
@ -330,7 +283,7 @@ TEST(HybridGaussianElimination, Incremental_approximate) {
incrementalHybrid.prune(maxComponents); incrementalHybrid.prune(maxComponents);
// Check if we have a bayes tree with 4 hybrid nodes, // Check if we have a bayes tree with 4 hybrid nodes,
// each with 2, 4, 8, and 5 (pruned) leaves respetively. // each with 2, 4, 8, and 5 (pruned) leaves respectively.
EXPECT_LONGS_EQUAL(4, incrementalHybrid.size()); EXPECT_LONGS_EQUAL(4, incrementalHybrid.size());
EXPECT_LONGS_EQUAL( EXPECT_LONGS_EQUAL(
2, incrementalHybrid[X(0)]->conditional()->asHybrid()->nrComponents()); 2, incrementalHybrid[X(0)]->conditional()->asHybrid()->nrComponents());

View File

@ -216,8 +216,8 @@ TEST(HybridNonlinearFactorGraph, PushBack) {
TEST(HybridNonlinearFactorGraph, ErrorTree) { TEST(HybridNonlinearFactorGraph, ErrorTree) {
Switching s(3); Switching s(3);
HybridNonlinearFactorGraph graph = s.nonlinearFactorGraph; const HybridNonlinearFactorGraph &graph = s.nonlinearFactorGraph();
Values values = s.linearizationPoint; const Values &values = s.linearizationPoint;
auto error_tree = graph.errorTree(s.linearizationPoint); auto error_tree = graph.errorTree(s.linearizationPoint);
@ -248,7 +248,7 @@ TEST(HybridNonlinearFactorGraph, ErrorTree) {
TEST(HybridNonlinearFactorGraph, Switching) { TEST(HybridNonlinearFactorGraph, Switching) {
Switching self(3); Switching self(3);
EXPECT_LONGS_EQUAL(7, self.nonlinearFactorGraph.size()); EXPECT_LONGS_EQUAL(7, self.nonlinearFactorGraph().size());
EXPECT_LONGS_EQUAL(7, self.linearizedFactorGraph.size()); EXPECT_LONGS_EQUAL(7, self.linearizedFactorGraph.size());
} }
@ -260,7 +260,7 @@ TEST(HybridNonlinearFactorGraph, Linearization) {
// Linearize here: // Linearize here:
HybridGaussianFactorGraph actualLinearized = HybridGaussianFactorGraph actualLinearized =
*self.nonlinearFactorGraph.linearize(self.linearizationPoint); *self.nonlinearFactorGraph().linearize(self.linearizationPoint);
EXPECT_LONGS_EQUAL(7, actualLinearized.size()); EXPECT_LONGS_EQUAL(7, actualLinearized.size());
} }
@ -409,7 +409,7 @@ TEST(HybridNonlinearFactorGraph, Partial_Elimination) {
/* ****************************************************************************/ /* ****************************************************************************/
TEST(HybridNonlinearFactorGraph, Error) { TEST(HybridNonlinearFactorGraph, Error) {
Switching self(3); Switching self(3);
HybridNonlinearFactorGraph fg = self.nonlinearFactorGraph; HybridNonlinearFactorGraph fg = self.nonlinearFactorGraph();
{ {
HybridValues values(VectorValues(), DiscreteValues{{M(0), 0}, {M(1), 0}}, HybridValues values(VectorValues(), DiscreteValues{{M(0), 0}, {M(1), 0}},
@ -441,8 +441,9 @@ TEST(HybridNonlinearFactorGraph, Error) {
TEST(HybridNonlinearFactorGraph, PrintErrors) { TEST(HybridNonlinearFactorGraph, PrintErrors) {
Switching self(3); Switching self(3);
// Get nonlinear factor graph and add linear factors to be holistic // Get nonlinear factor graph and add linear factors to be holistic.
HybridNonlinearFactorGraph fg = self.nonlinearFactorGraph; // TODO(Frank): ???
HybridNonlinearFactorGraph fg = self.nonlinearFactorGraph();
fg.add(self.linearizedFactorGraph); fg.add(self.linearizedFactorGraph);
// Optimize to get HybridValues // Optimize to get HybridValues

View File

@ -57,10 +57,10 @@ TEST(HybridNonlinearISAM, IncrementalElimination) {
// | | | // | | |
// X0 -*- X1 -*- X2 // X0 -*- X1 -*- X2
// \*-M0-*/ // \*-M0-*/
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X0) graph1.push_back(switching.unaryFactors.at(0)); // P(X0)
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X0, X1 | M0) graph1.push_back(switching.binaryFactors.at(0)); // P(X0, X1 | M0)
graph1.push_back(switching.nonlinearFactorGraph.at(2)); // P(X1, X2 | M1) graph1.push_back(switching.binaryFactors.at(1)); // P(X1, X2 | M1)
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M0) graph1.push_back(switching.modeChain.at(0)); // P(M0)
initial.insert<double>(X(0), 1); initial.insert<double>(X(0), 1);
initial.insert<double>(X(1), 2); initial.insert<double>(X(1), 2);
@ -83,9 +83,9 @@ TEST(HybridNonlinearISAM, IncrementalElimination) {
HybridNonlinearFactorGraph graph2; HybridNonlinearFactorGraph graph2;
initial = Values(); initial = Values();
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X1) graph1.push_back(switching.unaryFactors.at(1)); // P(X1)
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X2) graph2.push_back(switching.unaryFactors.at(2)); // P(X2)
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M0, M1) graph2.push_back(switching.modeChain.at(1)); // P(M0, M1)
isam.update(graph2, initial); isam.update(graph2, initial);
@ -112,10 +112,10 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
// X0 -*- X1 -*- X2 // X0 -*- X1 -*- X2
// | | // | |
// *-M0 - * - M1 // *-M0 - * - M1
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X0) graph1.push_back(switching.unaryFactors.at(0)); // P(X0)
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X0, X1 | M0) graph1.push_back(switching.binaryFactors.at(0)); // P(X0, X1 | M0)
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X1) graph1.push_back(switching.unaryFactors.at(1)); // P(X1)
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M0) graph1.push_back(switching.modeChain.at(0)); // P(M0)
initial.insert<double>(X(0), 1); initial.insert<double>(X(0), 1);
initial.insert<double>(X(1), 2); initial.insert<double>(X(1), 2);
@ -134,9 +134,9 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
initial.insert<double>(X(2), 3); initial.insert<double>(X(2), 3);
graph2.push_back(switching.nonlinearFactorGraph.at(2)); // P(X1, X2 | M1) graph2.push_back(switching.binaryFactors.at(1)); // P(X1, X2 | M1)
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X2) graph2.push_back(switching.unaryFactors.at(2)); // P(X2)
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M0, M1) graph2.push_back(switching.modeChain.at(1)); // P(M0, M1)
isam.update(graph2, initial); isam.update(graph2, initial);
bayesTree = isam.bayesTree(); bayesTree = isam.bayesTree();
@ -175,46 +175,22 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional)); EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
// We only perform manual continuous elimination for 0,0. // We only perform manual continuous elimination for 0,0.
// The other discrete probabilities on M(1) are calculated the same way // The other discrete probabilities on M(2) are calculated the same way
const Ordering discreteOrdering{M(0), M(1)}; const Ordering discreteOrdering{M(0), M(1)};
HybridBayesTree::shared_ptr discreteBayesTree = HybridBayesTree::shared_ptr discreteBayesTree =
expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal( expectedRemainingGraph->eliminateMultifrontal(discreteOrdering);
discreteOrdering);
DiscreteValues m00;
m00[M(0)] = 0, m00[M(1)] = 0;
DiscreteConditional decisionTree =
*(*discreteBayesTree)[M(1)]->conditional()->asDiscrete();
double m00_prob = decisionTree(m00);
auto discreteConditional = bayesTree[M(1)]->conditional()->asDiscrete();
// Test the probability values with regression tests. // Test the probability values with regression tests.
DiscreteValues assignment; auto discrete = bayesTree[M(1)]->conditional()->asDiscrete();
EXPECT(assert_equal(0.0952922, m00_prob, 1e-5)); EXPECT(assert_equal(0.095292, (*discrete)({{M(0), 0}, {M(1), 0}}), 1e-5));
assignment[M(0)] = 0; EXPECT(assert_equal(0.282758, (*discrete)({{M(0), 1}, {M(1), 0}}), 1e-5));
assignment[M(1)] = 0; EXPECT(assert_equal(0.314175, (*discrete)({{M(0), 0}, {M(1), 1}}), 1e-5));
EXPECT(assert_equal(0.0952922, (*discreteConditional)(assignment), 1e-5)); EXPECT(assert_equal(0.307775, (*discrete)({{M(0), 1}, {M(1), 1}}), 1e-5));
assignment[M(0)] = 1;
assignment[M(1)] = 0;
EXPECT(assert_equal(0.282758, (*discreteConditional)(assignment), 1e-5));
assignment[M(0)] = 0;
assignment[M(1)] = 1;
EXPECT(assert_equal(0.314175, (*discreteConditional)(assignment), 1e-5));
assignment[M(0)] = 1;
assignment[M(1)] = 1;
EXPECT(assert_equal(0.307775, (*discreteConditional)(assignment), 1e-5));
// Check if the clique conditional generated from incremental elimination // Check that the clique conditional generated from incremental elimination
// matches that of batch elimination. // matches that of batch elimination.
auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); auto expectedConditional = (*discreteBayesTree)[M(1)]->conditional();
auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>( auto actualConditional = bayesTree[M(1)]->conditional();
bayesTree[M(1)]->conditional()->inner());
// Account for the probability terms from evaluating continuous FGs
DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}};
vector<double> probs = {0.095292197, 0.31417524, 0.28275772, 0.30777485};
auto expectedConditional =
std::make_shared<DecisionTreeFactor>(discrete_keys, probs);
EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6)); EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6));
} }
@ -227,18 +203,19 @@ TEST(HybridNonlinearISAM, Approx_inference) {
Values initial; Values initial;
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3 // Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
for (size_t i = 1; i < 4; i++) { for (size_t i = 0; i < 3; i++) {
graph1.push_back(switching.nonlinearFactorGraph.at(i)); graph1.push_back(switching.binaryFactors.at(i));
} }
// Add the Gaussian factors, 1 prior on X(0), // Add the Gaussian factors, 1 prior on X(0),
// 3 measurements on X(1), X(2), X(3) // 3 measurements on X(1), X(2), X(3)
graph1.push_back(switching.nonlinearFactorGraph.at(0)); for (size_t i = 0; i < 4; i++) {
for (size_t i = 4; i <= 7; i++) { graph1.push_back(switching.unaryFactors.at(i));
graph1.push_back(switching.nonlinearFactorGraph.at(i)); initial.insert<double>(X(i), i + 1);
initial.insert<double>(X(i - 4), i - 3);
} }
// TODO(Frank): no mode chain?
// Create ordering. // Create ordering.
Ordering ordering; Ordering ordering;
for (size_t j = 0; j < 4; j++) { for (size_t j = 0; j < 4; j++) {
@ -246,7 +223,7 @@ TEST(HybridNonlinearISAM, Approx_inference) {
} }
// Now we calculate the actual factors using full elimination // Now we calculate the actual factors using full elimination
const auto [unprunedHybridBayesTree, unprunedRemainingGraph] = const auto [unPrunedHybridBayesTree, unPrunedRemainingGraph] =
switching.linearizedFactorGraph switching.linearizedFactorGraph
.BaseEliminateable::eliminatePartialMultifrontal(ordering); .BaseEliminateable::eliminatePartialMultifrontal(ordering);
@ -257,7 +234,7 @@ TEST(HybridNonlinearISAM, Approx_inference) {
bayesTree.prune(maxNrLeaves); bayesTree.prune(maxNrLeaves);
/* /*
unpruned factor is: unPruned factor is:
Choice(m3) Choice(m3)
0 Choice(m2) 0 Choice(m2)
0 0 Choice(m1) 0 0 Choice(m1)
@ -303,8 +280,8 @@ TEST(HybridNonlinearISAM, Approx_inference) {
// Check that the hybrid nodes of the bayes net match those of the pre-pruning // Check that the hybrid nodes of the bayes net match those of the pre-pruning
// bayes net, at the same positions. // bayes net, at the same positions.
auto &unprunedLastDensity = *dynamic_pointer_cast<HybridGaussianConditional>( auto &unPrunedLastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
unprunedHybridBayesTree->clique(X(3))->conditional()->inner()); unPrunedHybridBayesTree->clique(X(3))->conditional()->inner());
auto &lastDensity = *dynamic_pointer_cast<HybridGaussianConditional>( auto &lastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
bayesTree[X(3)]->conditional()->inner()); bayesTree[X(3)]->conditional()->inner());
@ -319,7 +296,7 @@ TEST(HybridNonlinearISAM, Approx_inference) {
EXPECT(lastDensity(assignment) == nullptr); EXPECT(lastDensity(assignment) == nullptr);
} else { } else {
CHECK(lastDensity(assignment)); CHECK(lastDensity(assignment));
EXPECT(assert_equal(*unprunedLastDensity(assignment), EXPECT(assert_equal(*unPrunedLastDensity(assignment),
*lastDensity(assignment))); *lastDensity(assignment)));
} }
} }
@ -335,19 +312,20 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
/***** Run Round 1 *****/ /***** Run Round 1 *****/
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3 // Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
for (size_t i = 1; i < 4; i++) { for (size_t i = 0; i < 3; i++) {
graph1.push_back(switching.nonlinearFactorGraph.at(i)); graph1.push_back(switching.binaryFactors.at(i));
} }
// Add the Gaussian factors, 1 prior on X(0), // Add the Gaussian factors, 1 prior on X(0),
// 3 measurements on X(1), X(2), X(3) // 3 measurements on X(1), X(2), X(3)
graph1.push_back(switching.nonlinearFactorGraph.at(0)); for (size_t i = 0; i < 4; i++) {
initial.insert<double>(X(0), 1); graph1.push_back(switching.unaryFactors.at(i));
for (size_t i = 5; i <= 7; i++) { initial.insert<double>(X(i), i + 1);
graph1.push_back(switching.nonlinearFactorGraph.at(i));
initial.insert<double>(X(i - 4), i - 3);
} }
// TODO(Frank): no mode chain?
// Run update with pruning // Run update with pruning
size_t maxComponents = 5; size_t maxComponents = 5;
incrementalHybrid.update(graph1, initial); incrementalHybrid.update(graph1, initial);
@ -368,8 +346,8 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
/***** Run Round 2 *****/ /***** Run Round 2 *****/
HybridGaussianFactorGraph graph2; HybridGaussianFactorGraph graph2;
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // x3-x4 graph2.push_back(switching.binaryFactors.at(3)); // x3-x4
graph2.push_back(switching.nonlinearFactorGraph.at(8)); // x4 measurement graph2.push_back(switching.unaryFactors.at(4)); // x4 measurement
initial = Values(); initial = Values();
initial.insert<double>(X(4), 5); initial.insert<double>(X(4), 5);