Merge pull request #1894 from borglab/check-isam

release/4.3a0
Varun Agrawal 2024-11-05 14:48:54 -05:00 committed by GitHub
commit 2bd2d82f19
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 306 additions and 255 deletions

View File

@ -148,6 +148,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
/// Getter for GaussianFactor decision tree
const FactorValuePairs &factors() const { return factors_; }
/**
* @brief Helper function to return factors and functional to create a
* DecisionTree of Gaussian Factor Graphs.

View File

@ -581,4 +581,15 @@ GaussianFactorGraph HybridGaussianFactorGraph::choose(
return gfg;
}
/* ************************************************************************ */
DiscreteFactorGraph HybridGaussianFactorGraph::discreteFactors() const {
DiscreteFactorGraph dfg;
for (auto &&f : factors_) {
if (auto discreteFactor = std::dynamic_pointer_cast<DiscreteFactor>(f)) {
dfg.push_back(discreteFactor);
}
}
return dfg;
}
} // namespace gtsam

View File

@ -18,6 +18,7 @@
#pragma once
#include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridFactorGraph.h>
@ -254,6 +255,14 @@ class GTSAM_EXPORT HybridGaussianFactorGraph
GaussianFactorGraph operator()(const DiscreteValues& assignment) const {
return choose(assignment);
}
/**
* @brief Helper method to get all the discrete factors
* as a DiscreteFactorGraph.
*
* @return DiscreteFactorGraph
*/
DiscreteFactorGraph discreteFactors() const;
};
// traits

View File

@ -16,6 +16,7 @@
* @date Sep 12, 2024
*/
#include <gtsam/discrete/DecisionTreeFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/linear/NoiseModel.h>
#include <gtsam/nonlinear/NonlinearFactor.h>
@ -184,6 +185,11 @@ std::shared_ptr<HybridGaussianFactor> HybridNonlinearFactor::linearize(
[continuousValues](
const std::pair<sharedFactor, double>& f) -> GaussianFactorValuePair {
auto [factor, val] = f;
// Check if valid factor. If not, return null and infinite error.
if (!factor) {
return {nullptr, std::numeric_limits<double>::infinity()};
}
if (auto gaussian = std::dynamic_pointer_cast<noiseModel::Gaussian>(
factor->noiseModel())) {
return {factor->linearize(continuousValues),
@ -202,4 +208,34 @@ std::shared_ptr<HybridGaussianFactor> HybridNonlinearFactor::linearize(
linearized_factors);
}
/* *******************************************************************************/
HybridNonlinearFactor::shared_ptr HybridNonlinearFactor::prune(
const DecisionTreeFactor& discreteProbs) const {
// Find keys in discreteProbs.keys() but not in this->keys():
std::set<Key> mine(this->keys().begin(), this->keys().end());
std::set<Key> theirs(discreteProbs.keys().begin(),
discreteProbs.keys().end());
std::vector<Key> diff;
std::set_difference(theirs.begin(), theirs.end(), mine.begin(), mine.end(),
std::back_inserter(diff));
// Find maximum probability value for every combination of our keys.
Ordering keys(diff);
auto max = discreteProbs.max(keys);
// Check the max value for every combination of our keys.
// If the max value is 0.0, we can prune the corresponding conditional.
auto pruner =
[&](const Assignment<Key>& choices,
const NonlinearFactorValuePair& pair) -> NonlinearFactorValuePair {
if (max->evaluate(choices) == 0.0)
return {nullptr, std::numeric_limits<double>::infinity()};
else
return pair;
};
FactorValuePairs prunedFactors = factors().apply(pruner);
return std::make_shared<HybridNonlinearFactor>(discreteKeys(), prunedFactors);
}
} // namespace gtsam

View File

@ -166,6 +166,9 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor {
/// @}
/// Getter for NonlinearFactor decision tree
const FactorValuePairs& factors() const { return factors_; }
/// Linearize specific nonlinear factors based on the assignment in
/// discreteValues.
GaussianFactor::shared_ptr linearize(
@ -176,6 +179,10 @@ class GTSAM_EXPORT HybridNonlinearFactor : public HybridFactor {
std::shared_ptr<HybridGaussianFactor> linearize(
const Values& continuousValues) const;
/// Prune this factor based on the discrete probabilities.
HybridNonlinearFactor::shared_ptr prune(
const DecisionTreeFactor& discreteProbs) const;
private:
/// Helper struct to assist private constructor below.
struct ConstructorHelper;

View File

@ -16,6 +16,7 @@
*/
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/HybridNonlinearISAM.h>
#include <gtsam/inference/Ordering.h>
@ -39,7 +40,6 @@ void HybridNonlinearISAM::update(const HybridNonlinearFactorGraph& newFactors,
if (newFactors.size() > 0) {
// Reorder and relinearize every reorderInterval updates
if (reorderInterval_ > 0 && ++reorderCounter_ >= reorderInterval_) {
// TODO(Varun) Re-linearization doesn't take into account pruning
reorderRelinearize();
reorderCounter_ = 0;
}
@ -65,8 +65,21 @@ void HybridNonlinearISAM::reorderRelinearize() {
// Obtain the new linearization point
const Values newLinPoint = estimate();
auto discreteProbs = *(isam_.roots().at(0)->conditional()->asDiscrete());
isam_.clear();
// Prune nonlinear factors based on discrete conditional probabilities
HybridNonlinearFactorGraph pruned_factors;
for (auto&& factor : factors_) {
if (auto nf = std::dynamic_pointer_cast<HybridNonlinearFactor>(factor)) {
pruned_factors.push_back(nf->prune(discreteProbs));
} else {
pruned_factors.push_back(factor);
}
}
factors_ = pruned_factors;
// Just recreate the whole BayesTree
// TODO: allow for constrained ordering here
// TODO: decouple re-linearization and reordering to avoid

View File

@ -52,7 +52,8 @@ using symbol_shorthand::X;
* @return HybridGaussianFactorGraph::shared_ptr
*/
inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain(
size_t K, std::function<Key(int)> x = X, std::function<Key(int)> m = M) {
size_t K, std::function<Key(int)> x = X, std::function<Key(int)> m = M,
const std::string &transitionProbabilityTable = "0 1 1 3") {
HybridGaussianFactorGraph hfg;
hfg.add(JacobianFactor(x(1), I_3x3, Z_3x1));
@ -68,7 +69,8 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain(
hfg.add(HybridGaussianFactor({m(k), 2}, components));
if (k > 1) {
hfg.add(DecisionTreeFactor({{m(k - 1), 2}, {m(k), 2}}, "0 1 1 3"));
hfg.add(DecisionTreeFactor({{m(k - 1), 2}, {m(k), 2}},
transitionProbabilityTable));
}
}

View File

@ -74,9 +74,7 @@ TEST(HybridGaussianFactorGraph,
hfg.add(HybridGaussianFactor(m1, two::components(X(1))));
hfg.add(DecisionTreeFactor(m1, {2, 8}));
// TODO(Varun) Adding extra discrete variable not connected to continuous
// variable throws segfault
// hfg.add(DecisionTreeFactor({m1, m2, "1 2 3 4"));
hfg.add(DecisionTreeFactor({m1, m2}, "1 2 3 4"));
HybridBayesTree::shared_ptr result = hfg.eliminateMultifrontal();
@ -176,7 +174,7 @@ TEST(HybridGaussianFactorGraph, Switching) {
// Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7),
// X(5),
// M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)});
KeyVector ordering;
Ordering ordering;
{
std::vector<int> naturalX(N);
@ -187,10 +185,6 @@ TEST(HybridGaussianFactorGraph, Switching) {
auto [ndX, lvls] = makeBinaryOrdering(ordX);
std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering));
// TODO(dellaert): this has no effect!
for (auto& l : lvls) {
l = -l;
}
}
{
std::vector<int> naturalC(N - 1);
@ -199,14 +193,11 @@ TEST(HybridGaussianFactorGraph, Switching) {
std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC),
[](int x) { return M(x); });
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
const auto [ndC, lvls] = makeBinaryOrdering(ordC);
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
}
auto ordering_full = Ordering(ordering);
const auto [hbt, remaining] =
hfg->eliminatePartialMultifrontal(ordering_full);
const auto [hbt, remaining] = hfg->eliminatePartialMultifrontal(ordering);
// 12 cliques in the bayes tree and 0 remaining variables to eliminate.
EXPECT_LONGS_EQUAL(12, hbt->size());
@ -230,7 +221,7 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) {
// Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7),
// X(5),
// M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)});
KeyVector ordering;
Ordering ordering;
{
std::vector<int> naturalX(N);
@ -241,10 +232,6 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) {
auto [ndX, lvls] = makeBinaryOrdering(ordX);
std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering));
// TODO(dellaert): this has no effect!
for (auto& l : lvls) {
l = -l;
}
}
{
std::vector<int> naturalC(N - 1);
@ -257,10 +244,8 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) {
const auto [ndC, lvls] = makeBinaryOrdering(ordC);
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
}
auto ordering_full = Ordering(ordering);
const auto [hbt, remaining] =
hfg->eliminatePartialMultifrontal(ordering_full);
const auto [hbt, remaining] = hfg->eliminatePartialMultifrontal(ordering);
auto new_fg = makeSwitchingChain(12);
auto isam = HybridGaussianISAM(*hbt);
@ -460,12 +445,7 @@ TEST(HybridBayesTree, Optimize) {
const auto [hybridBayesNet, remainingFactorGraph] =
s.linearizedFactorGraph().eliminatePartialSequential(ordering);
DiscreteFactorGraph dfg;
for (auto&& f : *remainingFactorGraph) {
auto discreteFactor = dynamic_pointer_cast<DiscreteFactor>(f);
assert(discreteFactor);
dfg.push_back(discreteFactor);
}
DiscreteFactorGraph dfg = remainingFactorGraph->discreteFactors();
// Add the probabilities for each branch
DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}, {M(2), 2}};

View File

@ -194,164 +194,6 @@ TEST(HybridGaussianFactor, Error) {
4.0, hybridFactor.error({continuousValues, discreteValues}), 1e-9);
}
/* ************************************************************************* */
namespace test_direct_factor_graph {
/**
* @brief Create a Factor Graph by directly specifying all
* the factors instead of creating conditionals first.
* This way we can directly provide the likelihoods and
* then perform linearization.
*
* @param values Initial values to linearize around.
* @param means The means of the HybridGaussianFactor components.
* @param sigmas The covariances of the HybridGaussianFactor components.
* @param m1 The discrete key.
* @return HybridGaussianFactorGraph
*/
static HybridGaussianFactorGraph CreateFactorGraph(
const gtsam::Values &values, const std::vector<double> &means,
const std::vector<double> &sigmas, DiscreteKey &m1,
double measurement_noise = 1e-3) {
auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]);
auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]);
auto prior_noise = noiseModel::Isotropic::Sigma(1, measurement_noise);
auto f0 =
std::make_shared<BetweenFactor<double>>(X(0), X(1), means[0], model0)
->linearize(values);
auto f1 =
std::make_shared<BetweenFactor<double>>(X(0), X(1), means[1], model1)
->linearize(values);
// Create HybridGaussianFactor
// We take negative since we want
// the underlying scalar to be log(\sqrt(|2πΣ|))
std::vector<GaussianFactorValuePair> factors{{f0, model0->negLogConstant()},
{f1, model1->negLogConstant()}};
HybridGaussianFactor motionFactor(m1, factors);
HybridGaussianFactorGraph hfg;
hfg.push_back(motionFactor);
hfg.push_back(PriorFactor<double>(X(0), values.at<double>(X(0)), prior_noise)
.linearize(values));
return hfg;
}
} // namespace test_direct_factor_graph
/* ************************************************************************* */
/**
* @brief Test components with differing means but the same covariances.
* The factor graph is
* *-X1-*-X2
* |
* M1
*/
TEST(HybridGaussianFactor, DifferentMeansFG) {
using namespace test_direct_factor_graph;
DiscreteKey m1(M(1), 2);
Values values;
double x1 = 0.0, x2 = 1.75;
values.insert(X(0), x1);
values.insert(X(1), x2);
std::vector<double> means = {0.0, 2.0}, sigmas = {1e-0, 1e-0};
HybridGaussianFactorGraph hfg = CreateFactorGraph(values, means, sigmas, m1);
{
auto bn = hfg.eliminateSequential();
HybridValues actual = bn->optimize();
HybridValues expected(
VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(-1.75)}},
DiscreteValues{{M(1), 0}});
EXPECT(assert_equal(expected, actual));
DiscreteValues dv0{{M(1), 0}};
VectorValues cont0 = bn->optimize(dv0);
double error0 = bn->error(HybridValues(cont0, dv0));
// regression
EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9);
DiscreteValues dv1{{M(1), 1}};
VectorValues cont1 = bn->optimize(dv1);
double error1 = bn->error(HybridValues(cont1, dv1));
EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9);
}
{
auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3);
hfg.push_back(
PriorFactor<double>(X(1), means[1], prior_noise).linearize(values));
auto bn = hfg.eliminateSequential();
HybridValues actual = bn->optimize();
HybridValues expected(
VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}},
DiscreteValues{{M(1), 1}});
EXPECT(assert_equal(expected, actual));
{
DiscreteValues dv{{M(1), 0}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9);
}
{
DiscreteValues dv{{M(1), 1}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9);
}
}
}
/* ************************************************************************* */
/**
* @brief Test components with differing covariances but the same means.
* The factor graph is
* *-X1-*-X2
* |
* M1
*/
TEST(HybridGaussianFactor, DifferentCovariancesFG) {
using namespace test_direct_factor_graph;
DiscreteKey m1(M(1), 2);
Values values;
double x1 = 1.0, x2 = 1.0;
values.insert(X(0), x1);
values.insert(X(1), x2);
std::vector<double> means = {0.0, 0.0}, sigmas = {1e2, 1e-2};
// Create FG with HybridGaussianFactor and prior on X1
HybridGaussianFactorGraph fg = CreateFactorGraph(values, means, sigmas, m1);
auto hbn = fg.eliminateSequential();
VectorValues cv;
cv.insert(X(0), Vector1(0.0));
cv.insert(X(1), Vector1(0.0));
DiscreteValues dv0{{M(1), 0}};
DiscreteValues dv1{{M(1), 1}};
DiscreteConditional expected_m1(m1, "0.5/0.5");
DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete());
EXPECT(assert_equal(expected_m1, actual_m1));
}
/* ************************************************************************* */
int main() {
TestResult tr;

View File

@ -545,7 +545,7 @@ TEST(HybridGaussianFactorGraph, IncrementalErrorTree) {
/* ****************************************************************************/
// Check that collectProductFactor works correctly.
TEST(HybridGaussianFactorGraph, collectProductFactor) {
TEST(HybridGaussianFactorGraph, CollectProductFactor) {
const int num_measurements = 1;
VectorValues vv{{Z(0), Vector1(5.0)}};
auto fg = tiny::createHybridGaussianFactorGraph(num_measurements, vv);

View File

@ -56,7 +56,7 @@ const HybridGaussianFactorGraph graph2{lfg.at(4), lfg.at(1), lfg.at(2),
/* ****************************************************************************/
// Test if we can perform elimination incrementally.
TEST(HybridGaussianElimination, IncrementalElimination) {
TEST(HybridGaussianISAM, IncrementalElimination) {
using namespace switching3;
HybridGaussianISAM isam;
@ -88,7 +88,7 @@ TEST(HybridGaussianElimination, IncrementalElimination) {
/* ****************************************************************************/
// Test if we can incrementally do the inference
TEST(HybridGaussianElimination, IncrementalInference) {
TEST(HybridGaussianISAM, IncrementalInference) {
using namespace switching3;
HybridGaussianISAM isam;
@ -156,7 +156,7 @@ TEST(HybridGaussianElimination, IncrementalInference) {
/* ****************************************************************************/
// Test if we can approximately do the inference
TEST(HybridGaussianElimination, Approx_inference) {
TEST(HybridGaussianISAM, ApproxInference) {
Switching switching(4);
HybridGaussianISAM incrementalHybrid;
HybridGaussianFactorGraph graph1;
@ -258,27 +258,26 @@ TEST(HybridGaussianElimination, Approx_inference) {
/* ****************************************************************************/
// Test approximate inference with an additional pruning step.
TEST(HybridGaussianElimination, IncrementalApproximate) {
TEST(HybridGaussianISAM, IncrementalApproximate) {
Switching switching(5);
HybridGaussianISAM incrementalHybrid;
HybridGaussianFactorGraph graph1;
HybridGaussianFactorGraph graph;
/***** Run Round 1 *****/
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
for (size_t i = 0; i < 3; i++) {
graph1.push_back(switching.linearBinaryFactors.at(i));
graph.push_back(switching.linearBinaryFactors.at(i));
}
// Add the Gaussian factors, 1 prior on x0,
// 3 measurements on x1, x2, x3
for (size_t i = 0; i <= 3; i++) {
graph1.push_back(switching.linearUnaryFactors.at(i));
graph.push_back(switching.linearUnaryFactors.at(i));
}
// Run update with pruning
size_t maxComponents = 5;
incrementalHybrid.update(graph1);
incrementalHybrid.prune(maxComponents);
incrementalHybrid.update(graph, maxComponents);
// Check if we have a bayes tree with 4 hybrid nodes,
// each with 2, 4, 8, and 5 (pruned) leaves respectively.
@ -293,13 +292,12 @@ TEST(HybridGaussianElimination, IncrementalApproximate) {
5, incrementalHybrid[X(3)]->conditional()->asHybrid()->nrComponents());
/***** Run Round 2 *****/
HybridGaussianFactorGraph graph2;
graph2.push_back(switching.linearBinaryFactors.at(3)); // x3-x4
graph2.push_back(switching.linearUnaryFactors.at(4)); // x4
graph = HybridGaussianFactorGraph();
graph.push_back(switching.linearBinaryFactors.at(3)); // hybrid x3-x4
graph.push_back(switching.linearUnaryFactors.at(4)); // x4
// Run update with pruning a second time.
incrementalHybrid.update(graph2);
incrementalHybrid.prune(maxComponents);
incrementalHybrid.update(graph, maxComponents);
// Check if we have a bayes tree with pruned hybrid nodes,
// with 5 (pruned) leaves.
@ -470,8 +468,7 @@ TEST(HybridGaussianISAM, NonTrivial) {
fg = HybridNonlinearFactorGraph();
// Keep pruning!
inc.update(gfg);
inc.prune(3);
inc.update(gfg, 3);
// The final discrete graph should not be empty since we have eliminated
// all continuous variables.

View File

@ -124,7 +124,7 @@ std::pair<double, double> approximateDiscreteMarginal(
* the posterior probability of m1 should be 0.5/0.5.
* Getting a measurement on z1 gives use more information.
*/
TEST(HybridGaussianFactor, TwoStateModel) {
TEST(HybridGaussianFactorGraph, TwoStateModel) {
double mu0 = 1.0, mu1 = 3.0;
double sigma = 0.5;
auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma, sigma);
@ -178,7 +178,7 @@ TEST(HybridGaussianFactor, TwoStateModel) {
* the P(m1) should be 0.5/0.5.
* Getting a measurement on z1 gives use more information.
*/
TEST(HybridGaussianFactor, TwoStateModel2) {
TEST(HybridGaussianFactorGraph, TwoStateModel2) {
double mu0 = 1.0, mu1 = 3.0;
double sigma0 = 0.5, sigma1 = 2.0;
auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1);
@ -281,7 +281,7 @@ TEST(HybridGaussianFactor, TwoStateModel2) {
* the p(m1) should be 0.5/0.5.
* Getting a measurement on z1 gives use more information.
*/
TEST(HybridGaussianFactor, TwoStateModel3) {
TEST(HybridGaussianFactorGraph, TwoStateModel3) {
double mu = 1.0;
double sigma0 = 0.5, sigma1 = 2.0;
auto hybridMotionModel = CreateHybridMotionModel(mu, mu, sigma0, sigma1);
@ -366,7 +366,7 @@ TEST(HybridGaussianFactor, TwoStateModel3) {
* measurements and vastly different motion model: either stand still or move
* far. This yields a very informative posterior.
*/
TEST(HybridGaussianFactor, TwoStateModel4) {
TEST(HybridGaussianFactorGraph, TwoStateModel4) {
double mu0 = 0.0, mu1 = 10.0;
double sigma0 = 0.2, sigma1 = 5.0;
auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1);
@ -385,6 +385,164 @@ TEST(HybridGaussianFactor, TwoStateModel4) {
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
}
/* ************************************************************************* */
namespace test_direct_factor_graph {
/**
* @brief Create a Factor Graph by directly specifying all
* the factors instead of creating conditionals first.
* This way we can directly provide the likelihoods and
* then perform linearization.
*
* @param values Initial values to linearize around.
* @param means The means of the HybridGaussianFactor components.
* @param sigmas The covariances of the HybridGaussianFactor components.
* @param m1 The discrete key.
* @return HybridGaussianFactorGraph
*/
static HybridGaussianFactorGraph CreateFactorGraph(
const gtsam::Values &values, const std::vector<double> &means,
const std::vector<double> &sigmas, DiscreteKey &m1,
double measurement_noise = 1e-3) {
auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]);
auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]);
auto prior_noise = noiseModel::Isotropic::Sigma(1, measurement_noise);
auto f0 =
std::make_shared<BetweenFactor<double>>(X(0), X(1), means[0], model0)
->linearize(values);
auto f1 =
std::make_shared<BetweenFactor<double>>(X(0), X(1), means[1], model1)
->linearize(values);
// Create HybridGaussianFactor
// We take negative since we want
// the underlying scalar to be log(\sqrt(|2πΣ|))
std::vector<GaussianFactorValuePair> factors{{f0, model0->negLogConstant()},
{f1, model1->negLogConstant()}};
HybridGaussianFactor motionFactor(m1, factors);
HybridGaussianFactorGraph hfg;
hfg.push_back(motionFactor);
hfg.push_back(PriorFactor<double>(X(0), values.at<double>(X(0)), prior_noise)
.linearize(values));
return hfg;
}
} // namespace test_direct_factor_graph
/* ************************************************************************* */
/**
* @brief Test components with differing means but the same covariances.
* The factor graph is
* *-X1-*-X2
* |
* M1
*/
TEST(HybridGaussianFactorGraph, DifferentMeans) {
using namespace test_direct_factor_graph;
DiscreteKey m1(M(1), 2);
Values values;
double x1 = 0.0, x2 = 1.75;
values.insert(X(0), x1);
values.insert(X(1), x2);
std::vector<double> means = {0.0, 2.0}, sigmas = {1e-0, 1e-0};
HybridGaussianFactorGraph hfg = CreateFactorGraph(values, means, sigmas, m1);
{
auto bn = hfg.eliminateSequential();
HybridValues actual = bn->optimize();
HybridValues expected(
VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(-1.75)}},
DiscreteValues{{M(1), 0}});
EXPECT(assert_equal(expected, actual));
DiscreteValues dv0{{M(1), 0}};
VectorValues cont0 = bn->optimize(dv0);
double error0 = bn->error(HybridValues(cont0, dv0));
// regression
EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9);
DiscreteValues dv1{{M(1), 1}};
VectorValues cont1 = bn->optimize(dv1);
double error1 = bn->error(HybridValues(cont1, dv1));
EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9);
}
{
auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3);
hfg.push_back(
PriorFactor<double>(X(1), means[1], prior_noise).linearize(values));
auto bn = hfg.eliminateSequential();
HybridValues actual = bn->optimize();
HybridValues expected(
VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}},
DiscreteValues{{M(1), 1}});
EXPECT(assert_equal(expected, actual));
{
DiscreteValues dv{{M(1), 0}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9);
}
{
DiscreteValues dv{{M(1), 1}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9);
}
}
}
/* ************************************************************************* */
/**
* @brief Test components with differing covariances but the same means.
* The factor graph is
* *-X1-*-X2
* |
* M1
*/
TEST(HybridGaussianFactorGraph, DifferentCovariances) {
using namespace test_direct_factor_graph;
DiscreteKey m1(M(1), 2);
Values values;
double x1 = 1.0, x2 = 1.0;
values.insert(X(0), x1);
values.insert(X(1), x2);
std::vector<double> means = {0.0, 0.0}, sigmas = {1e2, 1e-2};
// Create FG with HybridGaussianFactor and prior on X1
HybridGaussianFactorGraph fg = CreateFactorGraph(values, means, sigmas, m1);
auto hbn = fg.eliminateSequential();
VectorValues cv;
cv.insert(X(0), Vector1(0.0));
cv.insert(X(1), Vector1(0.0));
DiscreteValues dv0{{M(1), 0}};
DiscreteValues dv1{{M(1), 1}};
DiscreteConditional expected_m1(m1, "0.5/0.5");
DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete());
EXPECT(assert_equal(expected_m1, actual_m1));
}
/* ************************************************************************* */
int main() {
TestResult tr;

View File

@ -475,13 +475,8 @@ TEST(HybridNonlinearFactorGraph, Full_Elimination) {
const auto [hybridBayesNet_partial, remainingFactorGraph_partial] =
self.linearizedFactorGraph().eliminatePartialSequential(ordering);
DiscreteFactorGraph discrete_fg;
// TODO(Varun) Make this a function of HybridGaussianFactorGraph?
for (auto &factor : (*remainingFactorGraph_partial)) {
auto df = dynamic_pointer_cast<DiscreteFactor>(factor);
assert(df);
discrete_fg.push_back(df);
}
DiscreteFactorGraph discrete_fg =
remainingFactorGraph_partial->discreteFactors();
ordering.clear();
for (size_t k = 0; k < self.K - 1; k++) ordering.push_back(M(k));

View File

@ -49,7 +49,7 @@ using symbol_shorthand::Z;
TEST(HybridNonlinearISAM, IncrementalElimination) {
Switching switching(3);
HybridNonlinearISAM isam;
HybridNonlinearFactorGraph graph1;
HybridNonlinearFactorGraph graph;
Values initial;
// Create initial factor graph
@ -57,17 +57,17 @@ TEST(HybridNonlinearISAM, IncrementalElimination) {
// | | |
// X0 -*- X1 -*- X2
// \*-M0-*/
graph1.push_back(switching.unaryFactors.at(0)); // P(X0)
graph1.push_back(switching.binaryFactors.at(0)); // P(X0, X1 | M0)
graph1.push_back(switching.binaryFactors.at(1)); // P(X1, X2 | M1)
graph1.push_back(switching.modeChain.at(0)); // P(M0)
graph.push_back(switching.unaryFactors.at(0)); // P(X0)
graph.push_back(switching.binaryFactors.at(0)); // P(X0, X1 | M0)
graph.push_back(switching.binaryFactors.at(1)); // P(X1, X2 | M1)
graph.push_back(switching.modeChain.at(0)); // P(M0)
initial.insert<double>(X(0), 1);
initial.insert<double>(X(1), 2);
initial.insert<double>(X(2), 3);
// Run update step
isam.update(graph1, initial);
isam.update(graph, initial);
// Check that after update we have 3 hybrid Bayes net nodes:
// P(X0 | X1, M0) and P(X1, X2 | M0, M1), P(M0, M1)
@ -80,14 +80,14 @@ TEST(HybridNonlinearISAM, IncrementalElimination) {
/********************************************************/
// New factor graph for incremental update.
HybridNonlinearFactorGraph graph2;
graph = HybridNonlinearFactorGraph();
initial = Values();
graph1.push_back(switching.unaryFactors.at(1)); // P(X1)
graph2.push_back(switching.unaryFactors.at(2)); // P(X2)
graph2.push_back(switching.modeChain.at(1)); // P(M0, M1)
graph.push_back(switching.unaryFactors.at(1)); // P(X1)
graph.push_back(switching.unaryFactors.at(2)); // P(X2)
graph.push_back(switching.modeChain.at(1)); // P(M0, M1)
isam.update(graph2, initial);
isam.update(graph, initial);
bayesTree = isam.bayesTree();
// Check that after the second update we have
@ -103,7 +103,7 @@ TEST(HybridNonlinearISAM, IncrementalElimination) {
TEST(HybridNonlinearISAM, IncrementalInference) {
Switching switching(3);
HybridNonlinearISAM isam;
HybridNonlinearFactorGraph graph1;
HybridNonlinearFactorGraph graph;
Values initial;
// Create initial factor graph
@ -112,16 +112,16 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
// X0 -*- X1 -*- X2
// | |
// *-M0 - * - M1
graph1.push_back(switching.unaryFactors.at(0)); // P(X0)
graph1.push_back(switching.binaryFactors.at(0)); // P(X0, X1 | M0)
graph1.push_back(switching.unaryFactors.at(1)); // P(X1)
graph1.push_back(switching.modeChain.at(0)); // P(M0)
graph.push_back(switching.unaryFactors.at(0)); // P(X0)
graph.push_back(switching.binaryFactors.at(0)); // P(X0, X1 | M0)
graph.push_back(switching.unaryFactors.at(1)); // P(X1)
graph.push_back(switching.modeChain.at(0)); // P(M0)
initial.insert<double>(X(0), 1);
initial.insert<double>(X(1), 2);
// Run update step
isam.update(graph1, initial);
isam.update(graph, initial);
HybridGaussianISAM bayesTree = isam.bayesTree();
auto discreteConditional_m0 = bayesTree[M(0)]->conditional()->asDiscrete();
@ -129,16 +129,16 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
/********************************************************/
// New factor graph for incremental update.
HybridNonlinearFactorGraph graph2;
graph = HybridNonlinearFactorGraph();
initial = Values();
initial.insert<double>(X(2), 3);
graph2.push_back(switching.binaryFactors.at(1)); // P(X1, X2 | M1)
graph2.push_back(switching.unaryFactors.at(2)); // P(X2)
graph2.push_back(switching.modeChain.at(1)); // P(M0, M1)
graph.push_back(switching.binaryFactors.at(1)); // P(X1, X2 | M1)
graph.push_back(switching.unaryFactors.at(2)); // P(X2)
graph.push_back(switching.modeChain.at(1)); // P(M0, M1)
isam.update(graph2, initial);
isam.update(graph, initial);
bayesTree = isam.bayesTree();
/********************************************************/
@ -195,22 +195,22 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
}
/* ****************************************************************************/
// Test if we can approximately do the inference
TEST(HybridNonlinearISAM, Approx_inference) {
// Test if we can approximately do the inference (using pruning)
TEST(HybridNonlinearISAM, ApproxInference) {
Switching switching(4);
HybridNonlinearISAM incrementalHybrid;
HybridNonlinearFactorGraph graph1;
HybridNonlinearFactorGraph graph;
Values initial;
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
for (size_t i = 0; i < 3; i++) {
graph1.push_back(switching.binaryFactors.at(i));
graph.push_back(switching.binaryFactors.at(i));
}
// Add the Gaussian factors, 1 prior on X(0),
// 3 measurements on X(1), X(2), X(3)
for (size_t i = 0; i < 4; i++) {
graph1.push_back(switching.unaryFactors.at(i));
graph.push_back(switching.unaryFactors.at(i));
initial.insert<double>(X(i), i + 1);
}
@ -226,7 +226,7 @@ TEST(HybridNonlinearISAM, Approx_inference) {
.BaseEliminateable::eliminatePartialMultifrontal(ordering);
size_t maxNrLeaves = 5;
incrementalHybrid.update(graph1, initial);
incrementalHybrid.update(graph, initial);
HybridGaussianISAM bayesTree = incrementalHybrid.bayesTree();
bayesTree.prune(maxNrLeaves);
@ -302,22 +302,22 @@ TEST(HybridNonlinearISAM, Approx_inference) {
/* ****************************************************************************/
// Test approximate inference with an additional pruning step.
TEST(HybridNonlinearISAM, Incremental_approximate) {
TEST(HybridNonlinearISAM, IncrementalApproximate) {
Switching switching(5);
HybridNonlinearISAM incrementalHybrid;
HybridNonlinearFactorGraph graph1;
HybridNonlinearFactorGraph graph;
Values initial;
/***** Run Round 1 *****/
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
for (size_t i = 0; i < 3; i++) {
graph1.push_back(switching.binaryFactors.at(i));
graph.push_back(switching.binaryFactors.at(i));
}
// Add the Gaussian factors, 1 prior on X(0),
// 3 measurements on X(1), X(2), X(3)
for (size_t i = 0; i < 4; i++) {
graph1.push_back(switching.unaryFactors.at(i));
graph.push_back(switching.unaryFactors.at(i));
initial.insert<double>(X(i), i + 1);
}
@ -325,7 +325,7 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
// Run update with pruning
size_t maxComponents = 5;
incrementalHybrid.update(graph1, initial);
incrementalHybrid.update(graph, initial);
incrementalHybrid.prune(maxComponents);
HybridGaussianISAM bayesTree = incrementalHybrid.bayesTree();
@ -342,14 +342,14 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
5, bayesTree[X(3)]->conditional()->asHybrid()->nrComponents());
/***** Run Round 2 *****/
HybridGaussianFactorGraph graph2;
graph2.push_back(switching.binaryFactors.at(3)); // x3-x4
graph2.push_back(switching.unaryFactors.at(4)); // x4 measurement
graph = HybridGaussianFactorGraph();
graph.push_back(switching.binaryFactors.at(3)); // x3-x4
graph.push_back(switching.unaryFactors.at(4)); // x4 measurement
initial = Values();
initial.insert<double>(X(4), 5);
// Run update with pruning a second time.
incrementalHybrid.update(graph2, initial);
incrementalHybrid.update(graph, initial);
incrementalHybrid.prune(maxComponents);
bayesTree = incrementalHybrid.bayesTree();