Merge pull request #1290 from borglab/hybrid/improvements
commit
903d7c6a2e
|
@ -65,7 +65,7 @@ class GTSAM_EXPORT HybridGaussianISAM : public ISAM<HybridBayesTree> {
|
|||
HybridBayesTree::EliminationTraitsType::DefaultEliminate);
|
||||
|
||||
/**
|
||||
* @brief
|
||||
* @brief Prune the underlying Bayes tree.
|
||||
*
|
||||
* @param root The root key in the discrete conditional decision tree.
|
||||
* @param maxNumberLeaves
|
||||
|
|
|
@ -27,8 +27,7 @@ void HybridNonlinearFactorGraph::add(
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void HybridNonlinearFactorGraph::add(
|
||||
boost::shared_ptr<DiscreteFactor> factor) {
|
||||
void HybridNonlinearFactorGraph::add(boost::shared_ptr<DiscreteFactor> factor) {
|
||||
FactorGraph::add(boost::make_shared<HybridDiscreteFactor>(factor));
|
||||
}
|
||||
|
||||
|
@ -49,12 +48,12 @@ void HybridNonlinearFactorGraph::print(const std::string& s,
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
HybridGaussianFactorGraph HybridNonlinearFactorGraph::linearize(
|
||||
HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize(
|
||||
const Values& continuousValues) const {
|
||||
// create an empty linear FG
|
||||
HybridGaussianFactorGraph linearFG;
|
||||
auto linearFG = boost::make_shared<HybridGaussianFactorGraph>();
|
||||
|
||||
linearFG.reserve(size());
|
||||
linearFG->reserve(size());
|
||||
|
||||
// linearize all hybrid factors
|
||||
for (auto&& factor : factors_) {
|
||||
|
@ -66,9 +65,9 @@ HybridGaussianFactorGraph HybridNonlinearFactorGraph::linearize(
|
|||
if (factor->isHybrid()) {
|
||||
// Check if it is a nonlinear mixture factor
|
||||
if (auto nlmf = boost::dynamic_pointer_cast<MixtureFactor>(factor)) {
|
||||
linearFG.push_back(nlmf->linearize(continuousValues));
|
||||
linearFG->push_back(nlmf->linearize(continuousValues));
|
||||
} else {
|
||||
linearFG.push_back(factor);
|
||||
linearFG->push_back(factor);
|
||||
}
|
||||
|
||||
// Now check if the factor is a continuous only factor.
|
||||
|
@ -80,18 +79,18 @@ HybridGaussianFactorGraph HybridNonlinearFactorGraph::linearize(
|
|||
boost::dynamic_pointer_cast<NonlinearFactor>(nlhf->inner())) {
|
||||
auto hgf = boost::make_shared<HybridGaussianFactor>(
|
||||
nlf->linearize(continuousValues));
|
||||
linearFG.push_back(hgf);
|
||||
linearFG->push_back(hgf);
|
||||
} else {
|
||||
linearFG.push_back(factor);
|
||||
linearFG->push_back(factor);
|
||||
}
|
||||
// Finally if nothing else, we are discrete-only which doesn't need
|
||||
// lineariztion.
|
||||
} else {
|
||||
linearFG.push_back(factor);
|
||||
linearFG->push_back(factor);
|
||||
}
|
||||
|
||||
} else {
|
||||
linearFG.push_back(GaussianFactor::shared_ptr());
|
||||
linearFG->push_back(GaussianFactor::shared_ptr());
|
||||
}
|
||||
}
|
||||
return linearFG;
|
||||
|
|
|
@ -42,6 +42,16 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph {
|
|||
using IsNonlinear = typename std::enable_if<
|
||||
std::is_base_of<NonlinearFactor, FACTOR>::value>::type;
|
||||
|
||||
/// Check if T has a value_type derived from FactorType.
|
||||
template <typename T>
|
||||
using HasDerivedValueType = typename std::enable_if<
|
||||
std::is_base_of<HybridFactor, typename T::value_type>::value>::type;
|
||||
|
||||
/// Check if T has a pointer type derived from FactorType.
|
||||
template <typename T>
|
||||
using HasDerivedElementType = typename std::enable_if<std::is_base_of<
|
||||
HybridFactor, typename T::value_type::element_type>::value>::type;
|
||||
|
||||
public:
|
||||
using Base = HybridFactorGraph;
|
||||
using This = HybridNonlinearFactorGraph; ///< this class
|
||||
|
@ -109,6 +119,21 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Push back many factors as shared_ptr's in a container (factors are not
|
||||
* copied)
|
||||
*/
|
||||
template <typename CONTAINER>
|
||||
HasDerivedElementType<CONTAINER> push_back(const CONTAINER& container) {
|
||||
Base::push_back(container.begin(), container.end());
|
||||
}
|
||||
|
||||
/// Push back non-pointer objects in a container (factors are copied).
|
||||
template <typename CONTAINER>
|
||||
HasDerivedValueType<CONTAINER> push_back(const CONTAINER& container) {
|
||||
Base::push_back(container.begin(), container.end());
|
||||
}
|
||||
|
||||
/// Add a nonlinear factor as a shared ptr.
|
||||
void add(boost::shared_ptr<NonlinearFactor> factor);
|
||||
|
||||
|
@ -127,7 +152,8 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph {
|
|||
* @param continuousValues: Dictionary of continuous values.
|
||||
* @return HybridGaussianFactorGraph::shared_ptr
|
||||
*/
|
||||
HybridGaussianFactorGraph linearize(const Values& continuousValues) const;
|
||||
HybridGaussianFactorGraph::shared_ptr linearize(
|
||||
const Values& continuousValues) const;
|
||||
};
|
||||
|
||||
template <>
|
||||
|
|
|
@ -0,0 +1,111 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file HybridNonlinearISAM.cpp
|
||||
* @date Sep 12, 2022
|
||||
* @author Varun Agrawal
|
||||
*/
|
||||
|
||||
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
||||
#include <gtsam/hybrid/HybridNonlinearISAM.h>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
|
||||
#include <iostream>
|
||||
|
||||
using namespace std;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
void HybridNonlinearISAM::saveGraph(const string& s,
|
||||
const KeyFormatter& keyFormatter) const {
|
||||
isam_.saveGraph(s, keyFormatter);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void HybridNonlinearISAM::update(const HybridNonlinearFactorGraph& newFactors,
|
||||
const Values& initialValues) {
|
||||
if (newFactors.size() > 0) {
|
||||
// Reorder and relinearize every reorderInterval updates
|
||||
if (reorderInterval_ > 0 && ++reorderCounter_ >= reorderInterval_) {
|
||||
reorder_relinearize();
|
||||
reorderCounter_ = 0;
|
||||
}
|
||||
|
||||
factors_.push_back(newFactors);
|
||||
|
||||
// Linearize new factors and insert them
|
||||
// TODO: optimize for whole config?
|
||||
linPoint_.insert(initialValues);
|
||||
|
||||
boost::shared_ptr<HybridGaussianFactorGraph> linearizedNewFactors =
|
||||
newFactors.linearize(linPoint_);
|
||||
|
||||
// Update ISAM
|
||||
isam_.update(*linearizedNewFactors, boost::none, eliminationFunction_);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void HybridNonlinearISAM::reorder_relinearize() {
|
||||
if (factors_.size() > 0) {
|
||||
// Obtain the new linearization point
|
||||
const Values newLinPoint = estimate();
|
||||
|
||||
isam_.clear();
|
||||
|
||||
// Just recreate the whole BayesTree
|
||||
// TODO: allow for constrained ordering here
|
||||
// TODO: decouple relinearization and reordering to avoid
|
||||
isam_.update(*factors_.linearize(newLinPoint), boost::none,
|
||||
eliminationFunction_);
|
||||
|
||||
// Update linearization point
|
||||
linPoint_ = newLinPoint;
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Values HybridNonlinearISAM::estimate() {
|
||||
Values result;
|
||||
if (isam_.size() > 0) {
|
||||
HybridValues values = isam_.optimize();
|
||||
assignment_ = values.discrete();
|
||||
return linPoint_.retract(values.continuous());
|
||||
} else {
|
||||
return linPoint_;
|
||||
}
|
||||
}
|
||||
|
||||
// /* *************************************************************************
|
||||
// */ Matrix HybridNonlinearISAM::marginalCovariance(Key key) const {
|
||||
// return isam_.marginalCovariance(key);
|
||||
// }
|
||||
|
||||
/* ************************************************************************* */
|
||||
void HybridNonlinearISAM::print(const string& s,
|
||||
const KeyFormatter& keyFormatter) const {
|
||||
cout << s << "ReorderInterval: " << reorderInterval_
|
||||
<< " Current Count: " << reorderCounter_ << endl;
|
||||
isam_.print("HybridGaussianISAM:\n");
|
||||
linPoint_.print("Linearization Point:\n", keyFormatter);
|
||||
factors_.print("Nonlinear Graph:\n", keyFormatter);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void HybridNonlinearISAM::printStats() const {
|
||||
isam_.getCliqueData().getStats().print();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
||||
} // namespace gtsam
|
|
@ -0,0 +1,132 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file HybridNonlinearISAM.h
|
||||
* @date Sep 12, 2022
|
||||
* @author Varun Agrawal
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/hybrid/HybridGaussianISAM.h>
|
||||
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
||||
|
||||
namespace gtsam {
|
||||
/**
|
||||
* Wrapper class to manage ISAM in a nonlinear context
|
||||
*/
|
||||
class GTSAM_EXPORT HybridNonlinearISAM {
|
||||
protected:
|
||||
/** The internal iSAM object */
|
||||
gtsam::HybridGaussianISAM isam_;
|
||||
|
||||
/** The current linearization point */
|
||||
Values linPoint_;
|
||||
|
||||
/// The discrete assignment
|
||||
DiscreteValues assignment_;
|
||||
|
||||
/** The original factors, used when relinearizing */
|
||||
HybridNonlinearFactorGraph factors_;
|
||||
|
||||
/** The reordering interval and counter */
|
||||
int reorderInterval_;
|
||||
int reorderCounter_;
|
||||
|
||||
/** The elimination function */
|
||||
HybridGaussianFactorGraph::Eliminate eliminationFunction_;
|
||||
|
||||
public:
|
||||
/// @name Standard Constructors
|
||||
/// @{
|
||||
|
||||
/**
|
||||
* Periodically reorder and relinearize
|
||||
* @param reorderInterval is the number of updates between reorderings,
|
||||
* 0 never reorders (and is dangerous for memory consumption)
|
||||
* 1 (default) reorders every time, in worse case is batch every update
|
||||
* typical values are 50 or 100
|
||||
*/
|
||||
HybridNonlinearISAM(
|
||||
int reorderInterval = 1,
|
||||
const HybridGaussianFactorGraph::Eliminate& eliminationFunction =
|
||||
HybridGaussianFactorGraph::EliminationTraitsType::DefaultEliminate)
|
||||
: reorderInterval_(reorderInterval),
|
||||
reorderCounter_(0),
|
||||
eliminationFunction_(eliminationFunction) {}
|
||||
|
||||
/// @}
|
||||
/// @name Standard Interface
|
||||
/// @{
|
||||
|
||||
/** Return the current solution estimate */
|
||||
Values estimate();
|
||||
|
||||
// /** find the marginal covariance for a single variable */
|
||||
// Matrix marginalCovariance(Key key) const;
|
||||
|
||||
// access
|
||||
|
||||
/** access the underlying bayes tree */
|
||||
const HybridGaussianISAM& bayesTree() const { return isam_; }
|
||||
|
||||
/**
|
||||
* @brief Prune the underlying Bayes tree.
|
||||
*
|
||||
* @param root The root key in the discrete conditional decision tree.
|
||||
* @param maxNumberLeaves
|
||||
*/
|
||||
void prune(const Key& root, const size_t maxNumberLeaves) {
|
||||
isam_.prune(root, maxNumberLeaves);
|
||||
}
|
||||
|
||||
/** Return the current linearization point */
|
||||
const Values& getLinearizationPoint() const { return linPoint_; }
|
||||
|
||||
/** Return the current discrete assignment */
|
||||
const DiscreteValues& getAssignment() const { return assignment_; }
|
||||
|
||||
/** get underlying nonlinear graph */
|
||||
const HybridNonlinearFactorGraph& getFactorsUnsafe() const {
|
||||
return factors_;
|
||||
}
|
||||
|
||||
/** get counters */
|
||||
int reorderInterval() const { return reorderInterval_; } ///< TODO: comment
|
||||
int reorderCounter() const { return reorderCounter_; } ///< TODO: comment
|
||||
|
||||
/** prints out all contents of the system */
|
||||
void print(const std::string& s = "",
|
||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
|
||||
|
||||
/** prints out clique statistics */
|
||||
void printStats() const;
|
||||
|
||||
/** saves the Tree to a text file in GraphViz format */
|
||||
void saveGraph(const std::string& s,
|
||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
|
||||
|
||||
/// @}
|
||||
/// @name Advanced Interface
|
||||
/// @{
|
||||
|
||||
/** Add new factors along with their initial linearization points */
|
||||
void update(const HybridNonlinearFactorGraph& newFactors,
|
||||
const Values& initialValues);
|
||||
|
||||
/** Relinearization and reordering of variables */
|
||||
void reorder_relinearize();
|
||||
|
||||
/// @}
|
||||
};
|
||||
|
||||
} // namespace gtsam
|
|
@ -166,7 +166,7 @@ struct Switching {
|
|||
linearizationPoint.insert<double>(X(k), static_cast<double>(k));
|
||||
}
|
||||
|
||||
linearizedFactorGraph = nonlinearFactorGraph.linearize(linearizationPoint);
|
||||
linearizedFactorGraph = *nonlinearFactorGraph.linearize(linearizationPoint);
|
||||
}
|
||||
|
||||
// Create motion models for a given time step
|
||||
|
|
|
@ -399,7 +399,7 @@ TEST(HybridGaussianISAM, NonTrivial) {
|
|||
initial.insert(Z(0), Pose2(0.0, 2.0, 0.0));
|
||||
initial.insert(W(0), Pose2(0.0, 3.0, 0.0));
|
||||
|
||||
HybridGaussianFactorGraph gfg = fg.linearize(initial);
|
||||
HybridGaussianFactorGraph gfg = *fg.linearize(initial);
|
||||
fg = HybridNonlinearFactorGraph();
|
||||
|
||||
HybridGaussianISAM inc;
|
||||
|
@ -444,7 +444,7 @@ TEST(HybridGaussianISAM, NonTrivial) {
|
|||
// The leg link did not move so we set the expected pose accordingly.
|
||||
initial.insert(W(1), Pose2(0.0, 3.0, 0.0));
|
||||
|
||||
gfg = fg.linearize(initial);
|
||||
gfg = *fg.linearize(initial);
|
||||
fg = HybridNonlinearFactorGraph();
|
||||
|
||||
// Update without pruning
|
||||
|
@ -483,7 +483,7 @@ TEST(HybridGaussianISAM, NonTrivial) {
|
|||
initial.insert(Z(2), Pose2(2.0, 2.0, 0.0));
|
||||
initial.insert(W(2), Pose2(0.0, 3.0, 0.0));
|
||||
|
||||
gfg = fg.linearize(initial);
|
||||
gfg = *fg.linearize(initial);
|
||||
fg = HybridNonlinearFactorGraph();
|
||||
|
||||
// Now we prune!
|
||||
|
@ -526,7 +526,7 @@ TEST(HybridGaussianISAM, NonTrivial) {
|
|||
initial.insert(Z(3), Pose2(3.0, 2.0, 0.0));
|
||||
initial.insert(W(3), Pose2(0.0, 3.0, 0.0));
|
||||
|
||||
gfg = fg.linearize(initial);
|
||||
gfg = *fg.linearize(initial);
|
||||
fg = HybridNonlinearFactorGraph();
|
||||
|
||||
// Keep pruning!
|
||||
|
|
|
@ -60,7 +60,7 @@ TEST(HybridFactorGraph, GaussianFactorGraph) {
|
|||
Values linearizationPoint;
|
||||
linearizationPoint.insert<double>(X(0), 0);
|
||||
|
||||
HybridGaussianFactorGraph ghfg = fg.linearize(linearizationPoint);
|
||||
HybridGaussianFactorGraph ghfg = *fg.linearize(linearizationPoint);
|
||||
|
||||
// Add a factor to the GaussianFactorGraph
|
||||
ghfg.add(JacobianFactor(X(0), I_1x1, Vector1(5)));
|
||||
|
@ -139,7 +139,7 @@ TEST(HybridGaussianFactorGraph, Resize) {
|
|||
linearizationPoint.insert<double>(X(1), 1);
|
||||
|
||||
// Generate `HybridGaussianFactorGraph` by linearizing
|
||||
HybridGaussianFactorGraph gfg = nhfg.linearize(linearizationPoint);
|
||||
HybridGaussianFactorGraph gfg = *nhfg.linearize(linearizationPoint);
|
||||
|
||||
EXPECT_LONGS_EQUAL(gfg.size(), 3);
|
||||
|
||||
|
@ -250,7 +250,7 @@ TEST(HybridFactorGraph, Linearization) {
|
|||
|
||||
// Linearize here:
|
||||
HybridGaussianFactorGraph actualLinearized =
|
||||
self.nonlinearFactorGraph.linearize(self.linearizationPoint);
|
||||
*self.nonlinearFactorGraph.linearize(self.linearizationPoint);
|
||||
|
||||
EXPECT_LONGS_EQUAL(7, actualLinearized.size());
|
||||
}
|
||||
|
@ -718,7 +718,7 @@ TEST(HybridFactorGraph, DefaultDecisionTree) {
|
|||
ordering += X(0);
|
||||
ordering += X(1);
|
||||
|
||||
HybridGaussianFactorGraph linearized = fg.linearize(initialEstimate);
|
||||
HybridGaussianFactorGraph linearized = *fg.linearize(initialEstimate);
|
||||
gtsam::HybridBayesNet::shared_ptr hybridBayesNet;
|
||||
gtsam::HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
|
||||
|
||||
|
|
|
@ -0,0 +1,589 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file testHybridNonlinearISAM.cpp
|
||||
* @brief Unit tests for nonlinear incremental inference
|
||||
* @author Fan Jiang, Varun Agrawal, Frank Dellaert
|
||||
* @date Jan 2021
|
||||
*/
|
||||
|
||||
#include <gtsam/discrete/DiscreteBayesNet.h>
|
||||
#include <gtsam/discrete/DiscreteDistribution.h>
|
||||
#include <gtsam/discrete/DiscreteFactorGraph.h>
|
||||
#include <gtsam/geometry/Pose2.h>
|
||||
#include <gtsam/hybrid/HybridConditional.h>
|
||||
#include <gtsam/hybrid/HybridNonlinearISAM.h>
|
||||
#include <gtsam/linear/GaussianBayesNet.h>
|
||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||
#include <gtsam/nonlinear/PriorFactor.h>
|
||||
#include <gtsam/sam/BearingRangeFactor.h>
|
||||
|
||||
#include <numeric>
|
||||
|
||||
#include "Switching.h"
|
||||
|
||||
// Include for test suite
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using noiseModel::Isotropic;
|
||||
using symbol_shorthand::L;
|
||||
using symbol_shorthand::M;
|
||||
using symbol_shorthand::W;
|
||||
using symbol_shorthand::X;
|
||||
using symbol_shorthand::Y;
|
||||
using symbol_shorthand::Z;
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test if we can perform elimination incrementally.
|
||||
TEST(HybridNonlinearISAM, IncrementalElimination) {
|
||||
Switching switching(3);
|
||||
HybridNonlinearISAM isam;
|
||||
HybridNonlinearFactorGraph graph1;
|
||||
Values initial;
|
||||
|
||||
// Create initial factor graph
|
||||
// * * *
|
||||
// | | |
|
||||
// X1 -*- X2 -*- X3
|
||||
// \*-M1-*/
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X1)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X1, X2 | M1)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(2)); // P(X2, X3 | M2)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M1)
|
||||
|
||||
initial.insert<double>(X(1), 1);
|
||||
initial.insert<double>(X(2), 2);
|
||||
initial.insert<double>(X(3), 3);
|
||||
|
||||
// Run update step
|
||||
isam.update(graph1, initial);
|
||||
|
||||
// Check that after update we have 3 hybrid Bayes net nodes:
|
||||
// P(X1 | X2, M1) and P(X2, X3 | M1, M2), P(M1, M2)
|
||||
HybridGaussianISAM bayesTree = isam.bayesTree();
|
||||
EXPECT_LONGS_EQUAL(3, bayesTree.size());
|
||||
EXPECT(bayesTree[X(1)]->conditional()->frontals() == KeyVector{X(1)});
|
||||
EXPECT(bayesTree[X(1)]->conditional()->parents() == KeyVector({X(2), M(1)}));
|
||||
EXPECT(bayesTree[X(2)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
|
||||
EXPECT(bayesTree[X(2)]->conditional()->parents() == KeyVector({M(1), M(2)}));
|
||||
|
||||
/********************************************************/
|
||||
// New factor graph for incremental update.
|
||||
HybridNonlinearFactorGraph graph2;
|
||||
initial = Values();
|
||||
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X2)
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X3)
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M1, M2)
|
||||
|
||||
isam.update(graph2, initial);
|
||||
|
||||
bayesTree = isam.bayesTree();
|
||||
// Check that after the second update we have
|
||||
// 1 additional hybrid Bayes net node:
|
||||
// P(X2, X3 | M1, M2)
|
||||
EXPECT_LONGS_EQUAL(3, bayesTree.size());
|
||||
EXPECT(bayesTree[X(3)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
|
||||
EXPECT(bayesTree[X(3)]->conditional()->parents() == KeyVector({M(1), M(2)}));
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test if we can incrementally do the inference
|
||||
TEST(HybridNonlinearISAM, IncrementalInference) {
|
||||
Switching switching(3);
|
||||
HybridNonlinearISAM isam;
|
||||
HybridNonlinearFactorGraph graph1;
|
||||
Values initial;
|
||||
|
||||
// Create initial factor graph
|
||||
// * * *
|
||||
// | | |
|
||||
// X1 -*- X2 -*- X3
|
||||
// | |
|
||||
// *-M1 - * - M2
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X1)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X1, X2 | M1)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X2)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M1)
|
||||
|
||||
initial.insert<double>(X(1), 1);
|
||||
initial.insert<double>(X(2), 2);
|
||||
|
||||
// Run update step
|
||||
isam.update(graph1, initial);
|
||||
HybridGaussianISAM bayesTree = isam.bayesTree();
|
||||
|
||||
auto discreteConditional_m1 =
|
||||
bayesTree[M(1)]->conditional()->asDiscreteConditional();
|
||||
EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)}));
|
||||
|
||||
/********************************************************/
|
||||
// New factor graph for incremental update.
|
||||
HybridNonlinearFactorGraph graph2;
|
||||
initial = Values();
|
||||
|
||||
initial.insert<double>(X(3), 3);
|
||||
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(2)); // P(X2, X3 | M2)
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X3)
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M1, M2)
|
||||
|
||||
isam.update(graph2, initial);
|
||||
bayesTree = isam.bayesTree();
|
||||
|
||||
/********************************************************/
|
||||
// Run batch elimination so we can compare results.
|
||||
Ordering ordering;
|
||||
ordering += X(1);
|
||||
ordering += X(2);
|
||||
ordering += X(3);
|
||||
|
||||
// Now we calculate the actual factors using full elimination
|
||||
HybridBayesTree::shared_ptr expectedHybridBayesTree;
|
||||
HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph;
|
||||
std::tie(expectedHybridBayesTree, expectedRemainingGraph) =
|
||||
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
|
||||
|
||||
// The densities on X(1) should be the same
|
||||
auto x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
bayesTree[X(1)]->conditional()->inner());
|
||||
auto actual_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
|
||||
EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional));
|
||||
|
||||
// The densities on X(2) should be the same
|
||||
auto x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
bayesTree[X(2)]->conditional()->inner());
|
||||
auto actual_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
||||
EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional));
|
||||
|
||||
// The densities on X(3) should be the same
|
||||
auto x3_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
bayesTree[X(3)]->conditional()->inner());
|
||||
auto actual_x3_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
||||
EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional));
|
||||
|
||||
// We only perform manual continuous elimination for 0,0.
|
||||
// The other discrete probabilities on M(2) are calculated the same way
|
||||
Ordering discrete_ordering;
|
||||
discrete_ordering += M(1);
|
||||
discrete_ordering += M(2);
|
||||
HybridBayesTree::shared_ptr discreteBayesTree =
|
||||
expectedRemainingGraph->eliminateMultifrontal(discrete_ordering);
|
||||
|
||||
DiscreteValues m00;
|
||||
m00[M(1)] = 0, m00[M(2)] = 0;
|
||||
DiscreteConditional decisionTree =
|
||||
*(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional();
|
||||
double m00_prob = decisionTree(m00);
|
||||
|
||||
auto discreteConditional =
|
||||
bayesTree[M(2)]->conditional()->asDiscreteConditional();
|
||||
|
||||
// Test if the probability values are as expected with regression tests.
|
||||
DiscreteValues assignment;
|
||||
EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5));
|
||||
assignment[M(1)] = 0;
|
||||
assignment[M(2)] = 0;
|
||||
EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5));
|
||||
assignment[M(1)] = 1;
|
||||
assignment[M(2)] = 0;
|
||||
EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5));
|
||||
assignment[M(1)] = 0;
|
||||
assignment[M(2)] = 1;
|
||||
EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5));
|
||||
assignment[M(1)] = 1;
|
||||
assignment[M(2)] = 1;
|
||||
EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5));
|
||||
|
||||
// Check if the clique conditional generated from incremental elimination
|
||||
// matches that of batch elimination.
|
||||
auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal();
|
||||
auto expectedConditional = dynamic_pointer_cast<DecisionTreeFactor>(
|
||||
(*expectedChordal)[M(2)]->conditional()->inner());
|
||||
auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>(
|
||||
bayesTree[M(2)]->conditional()->inner());
|
||||
EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6));
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test if we can approximately do the inference
|
||||
TEST(HybridNonlinearISAM, Approx_inference) {
|
||||
Switching switching(4);
|
||||
HybridNonlinearISAM incrementalHybrid;
|
||||
HybridNonlinearFactorGraph graph1;
|
||||
Values initial;
|
||||
|
||||
// Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
|
||||
for (size_t i = 1; i < 4; i++) {
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
||||
}
|
||||
|
||||
// Add the Gaussian factors, 1 prior on X(1),
|
||||
// 3 measurements on X(2), X(3), X(4)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(0));
|
||||
for (size_t i = 4; i <= 7; i++) {
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
||||
initial.insert<double>(X(i - 3), i - 3);
|
||||
}
|
||||
|
||||
// Create ordering.
|
||||
Ordering ordering;
|
||||
for (size_t j = 1; j <= 4; j++) {
|
||||
ordering += X(j);
|
||||
}
|
||||
|
||||
// Now we calculate the actual factors using full elimination
|
||||
HybridBayesTree::shared_ptr unprunedHybridBayesTree;
|
||||
HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph;
|
||||
std::tie(unprunedHybridBayesTree, unprunedRemainingGraph) =
|
||||
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
|
||||
|
||||
size_t maxNrLeaves = 5;
|
||||
incrementalHybrid.update(graph1, initial);
|
||||
HybridGaussianISAM bayesTree = incrementalHybrid.bayesTree();
|
||||
|
||||
bayesTree.prune(M(3), maxNrLeaves);
|
||||
|
||||
/*
|
||||
unpruned factor is:
|
||||
Choice(m3)
|
||||
0 Choice(m2)
|
||||
0 0 Choice(m1)
|
||||
0 0 0 Leaf 0.11267528
|
||||
0 0 1 Leaf 0.18576102
|
||||
0 1 Choice(m1)
|
||||
0 1 0 Leaf 0.18754662
|
||||
0 1 1 Leaf 0.30623871
|
||||
1 Choice(m2)
|
||||
1 0 Choice(m1)
|
||||
1 0 0 Leaf 0.18576102
|
||||
1 0 1 Leaf 0.30622428
|
||||
1 1 Choice(m1)
|
||||
1 1 0 Leaf 0.30623871
|
||||
1 1 1 Leaf 0.5
|
||||
|
||||
pruned factors is:
|
||||
Choice(m3)
|
||||
0 Choice(m2)
|
||||
0 0 Leaf 0
|
||||
0 1 Choice(m1)
|
||||
0 1 0 Leaf 0.18754662
|
||||
0 1 1 Leaf 0.30623871
|
||||
1 Choice(m2)
|
||||
1 0 Choice(m1)
|
||||
1 0 0 Leaf 0
|
||||
1 0 1 Leaf 0.30622428
|
||||
1 1 Choice(m1)
|
||||
1 1 0 Leaf 0.30623871
|
||||
1 1 1 Leaf 0.5
|
||||
*/
|
||||
|
||||
auto discreteConditional_m1 = *dynamic_pointer_cast<DiscreteConditional>(
|
||||
bayesTree[M(1)]->conditional()->inner());
|
||||
EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)}));
|
||||
|
||||
// Get the number of elements which are greater than 0.
|
||||
auto count = [](const double &value, int count) {
|
||||
return value > 0 ? count + 1 : count;
|
||||
};
|
||||
// Check that the number of leaves after pruning is 5.
|
||||
EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0));
|
||||
|
||||
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
|
||||
// bayes net, at the same positions.
|
||||
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
||||
unprunedHybridBayesTree->clique(X(4))->conditional()->inner());
|
||||
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
||||
bayesTree[X(4)]->conditional()->inner());
|
||||
|
||||
std::vector<std::pair<DiscreteValues, double>> assignments =
|
||||
discreteConditional_m1.enumerate();
|
||||
// Loop over all assignments and check the pruned components
|
||||
for (auto &&av : assignments) {
|
||||
const DiscreteValues &assignment = av.first;
|
||||
const double value = av.second;
|
||||
|
||||
if (value == 0.0) {
|
||||
EXPECT(lastDensity(assignment) == nullptr);
|
||||
} else {
|
||||
CHECK(lastDensity(assignment));
|
||||
EXPECT(assert_equal(*unprunedLastDensity(assignment),
|
||||
*lastDensity(assignment)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test approximate inference with an additional pruning step.
|
||||
TEST(HybridNonlinearISAM, Incremental_approximate) {
|
||||
Switching switching(5);
|
||||
HybridNonlinearISAM incrementalHybrid;
|
||||
HybridNonlinearFactorGraph graph1;
|
||||
Values initial;
|
||||
|
||||
/***** Run Round 1 *****/
|
||||
// Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
|
||||
for (size_t i = 1; i < 4; i++) {
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
||||
}
|
||||
|
||||
// Add the Gaussian factors, 1 prior on X(1),
|
||||
// 3 measurements on X(2), X(3), X(4)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(0));
|
||||
initial.insert<double>(X(1), 1);
|
||||
for (size_t i = 5; i <= 7; i++) {
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
||||
initial.insert<double>(X(i - 3), i - 3);
|
||||
}
|
||||
|
||||
// Run update with pruning
|
||||
size_t maxComponents = 5;
|
||||
incrementalHybrid.update(graph1, initial);
|
||||
HybridGaussianISAM bayesTree = incrementalHybrid.bayesTree();
|
||||
|
||||
bayesTree.prune(M(3), maxComponents);
|
||||
|
||||
// Check if we have a bayes tree with 4 hybrid nodes,
|
||||
// each with 2, 4, 8, and 5 (pruned) leaves respetively.
|
||||
EXPECT_LONGS_EQUAL(4, bayesTree.size());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
2, bayesTree[X(1)]->conditional()->asMixture()->nrComponents());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
4, bayesTree[X(2)]->conditional()->asMixture()->nrComponents());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents());
|
||||
|
||||
/***** Run Round 2 *****/
|
||||
HybridGaussianFactorGraph graph2;
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // x4-x5
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(8)); // x5 measurement
|
||||
initial = Values();
|
||||
initial.insert<double>(X(5), 5);
|
||||
|
||||
// Run update with pruning a second time.
|
||||
incrementalHybrid.update(graph2, initial);
|
||||
bayesTree = incrementalHybrid.bayesTree();
|
||||
|
||||
bayesTree.prune(M(4), maxComponents);
|
||||
|
||||
// Check if we have a bayes tree with pruned hybrid nodes,
|
||||
// with 5 (pruned) leaves.
|
||||
CHECK_EQUAL(5, bayesTree.size());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
5, bayesTree[X(5)]->conditional()->asMixture()->nrComponents());
|
||||
}
|
||||
|
||||
/* ************************************************************************/
|
||||
// A GTSAM-only test for running inference on a single-legged robot.
|
||||
// The leg links are represented by the chain X-Y-Z-W, where X is the base and
|
||||
// W is the foot.
|
||||
// We use BetweenFactor<Pose2> as constraints between each of the poses.
|
||||
TEST(HybridNonlinearISAM, NonTrivial) {
|
||||
/*************** Run Round 1 ***************/
|
||||
HybridNonlinearFactorGraph fg;
|
||||
HybridNonlinearISAM inc;
|
||||
|
||||
// Add a prior on pose x1 at the origin.
|
||||
// A prior factor consists of a mean and
|
||||
// a noise model (covariance matrix)
|
||||
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
|
||||
auto priorNoise = noiseModel::Diagonal::Sigmas(
|
||||
Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta
|
||||
fg.emplace_nonlinear<PriorFactor<Pose2>>(X(0), prior, priorNoise);
|
||||
|
||||
// create a noise model for the landmark measurements
|
||||
auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1);
|
||||
|
||||
// We model a robot's single leg as X - Y - Z - W
|
||||
// where X is the base link and W is the foot link.
|
||||
|
||||
// Add connecting poses similar to PoseFactors in GTD
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(0), Y(0), Pose2(0, 1.0, 0),
|
||||
poseNoise);
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(0), Z(0), Pose2(0, 1.0, 0),
|
||||
poseNoise);
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(0), W(0), Pose2(0, 1.0, 0),
|
||||
poseNoise);
|
||||
|
||||
// Create initial estimate
|
||||
Values initial;
|
||||
initial.insert(X(0), Pose2(0.0, 0.0, 0.0));
|
||||
initial.insert(Y(0), Pose2(0.0, 1.0, 0.0));
|
||||
initial.insert(Z(0), Pose2(0.0, 2.0, 0.0));
|
||||
initial.insert(W(0), Pose2(0.0, 3.0, 0.0));
|
||||
|
||||
// Don't run update now since we don't have discrete variables involved.
|
||||
|
||||
/*************** Run Round 2 ***************/
|
||||
using PlanarMotionModel = BetweenFactor<Pose2>;
|
||||
|
||||
// Add odometry factor with discrete modes.
|
||||
Pose2 odometry(1.0, 0.0, 0.0);
|
||||
KeyVector contKeys = {W(0), W(1)};
|
||||
auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0);
|
||||
auto still = boost::make_shared<PlanarMotionModel>(W(0), W(1), Pose2(0, 0, 0),
|
||||
noise_model),
|
||||
moving = boost::make_shared<PlanarMotionModel>(W(0), W(1), odometry,
|
||||
noise_model);
|
||||
std::vector<PlanarMotionModel::shared_ptr> components = {moving, still};
|
||||
auto mixtureFactor = boost::make_shared<MixtureFactor>(
|
||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
|
||||
fg.push_back(mixtureFactor);
|
||||
|
||||
// Add equivalent of ImuFactor
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(0), X(1), Pose2(1.0, 0.0, 0),
|
||||
poseNoise);
|
||||
// PoseFactors-like at k=1
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(1), Y(1), Pose2(0, 1, 0),
|
||||
poseNoise);
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(1), Z(1), Pose2(0, 1, 0),
|
||||
poseNoise);
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(1), W(1), Pose2(-1, 1, 0),
|
||||
poseNoise);
|
||||
|
||||
initial.insert(X(1), Pose2(1.0, 0.0, 0.0));
|
||||
initial.insert(Y(1), Pose2(1.0, 1.0, 0.0));
|
||||
initial.insert(Z(1), Pose2(1.0, 2.0, 0.0));
|
||||
// The leg link did not move so we set the expected pose accordingly.
|
||||
initial.insert(W(1), Pose2(0.0, 3.0, 0.0));
|
||||
|
||||
// Update without pruning
|
||||
// The result is a HybridBayesNet with 1 discrete variable M(1).
|
||||
// P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1)
|
||||
// P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, M1)
|
||||
// P(Y1 | X1, M1)P(X1 | M1)P(M1)
|
||||
// The MHS tree is a 1 level tree for time indices (1,) with 2 leaves.
|
||||
inc.update(fg, initial);
|
||||
|
||||
fg = HybridNonlinearFactorGraph();
|
||||
initial = Values();
|
||||
|
||||
/*************** Run Round 3 ***************/
|
||||
// Add odometry factor with discrete modes.
|
||||
contKeys = {W(1), W(2)};
|
||||
still = boost::make_shared<PlanarMotionModel>(W(1), W(2), Pose2(0, 0, 0),
|
||||
noise_model);
|
||||
moving =
|
||||
boost::make_shared<PlanarMotionModel>(W(1), W(2), odometry,
|
||||
noise_model);
|
||||
components = {moving, still};
|
||||
mixtureFactor = boost::make_shared<MixtureFactor>(
|
||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
|
||||
fg.push_back(mixtureFactor);
|
||||
|
||||
// Add equivalent of ImuFactor
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(1), X(2), Pose2(1.0, 0.0, 0),
|
||||
poseNoise);
|
||||
// PoseFactors-like at k=1
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(2), Y(2), Pose2(0, 1, 0),
|
||||
poseNoise);
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(2), Z(2), Pose2(0, 1, 0),
|
||||
poseNoise);
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(2), W(2), Pose2(-2, 1, 0),
|
||||
poseNoise);
|
||||
|
||||
initial.insert(X(2), Pose2(2.0, 0.0, 0.0));
|
||||
initial.insert(Y(2), Pose2(2.0, 1.0, 0.0));
|
||||
initial.insert(Z(2), Pose2(2.0, 2.0, 0.0));
|
||||
initial.insert(W(2), Pose2(0.0, 3.0, 0.0));
|
||||
|
||||
// Now we prune!
|
||||
// P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1)
|
||||
// P(X0 | X1, W1, M1) P(W1|W2, Z1, X1, M1, M2)
|
||||
// P(Z1| W2, Y1, X1, M1, M2) P(Y1 | W2, X1, M1, M2)
|
||||
// P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2)
|
||||
// P(Z2|Y2, X2, M1, M2) P(Y2 | X2, M1, M2)
|
||||
// P(X2 | M1, M2) P(M1, M2)
|
||||
// The MHS at this point should be a 2 level tree on (1, 2).
|
||||
// 1 has 2 choices, and 2 has 4 choices.
|
||||
inc.update(fg, initial);
|
||||
inc.prune(M(2), 2);
|
||||
|
||||
fg = HybridNonlinearFactorGraph();
|
||||
initial = Values();
|
||||
|
||||
/*************** Run Round 4 ***************/
|
||||
// Add odometry factor with discrete modes.
|
||||
contKeys = {W(2), W(3)};
|
||||
still = boost::make_shared<PlanarMotionModel>(W(2), W(3), Pose2(0, 0, 0),
|
||||
noise_model);
|
||||
moving =
|
||||
boost::make_shared<PlanarMotionModel>(W(2), W(3), odometry,
|
||||
noise_model);
|
||||
components = {moving, still};
|
||||
mixtureFactor = boost::make_shared<MixtureFactor>(
|
||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
|
||||
fg.push_back(mixtureFactor);
|
||||
|
||||
// Add equivalent of ImuFactor
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(2), X(3), Pose2(1.0, 0.0, 0),
|
||||
poseNoise);
|
||||
// PoseFactors-like at k=3
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(3), Y(3), Pose2(0, 1, 0),
|
||||
poseNoise);
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(3), Z(3), Pose2(0, 1, 0),
|
||||
poseNoise);
|
||||
fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(3), W(3), Pose2(-3, 1, 0),
|
||||
poseNoise);
|
||||
|
||||
initial.insert(X(3), Pose2(3.0, 0.0, 0.0));
|
||||
initial.insert(Y(3), Pose2(3.0, 1.0, 0.0));
|
||||
initial.insert(Z(3), Pose2(3.0, 2.0, 0.0));
|
||||
initial.insert(W(3), Pose2(0.0, 3.0, 0.0));
|
||||
|
||||
// Keep pruning!
|
||||
inc.update(fg, initial);
|
||||
inc.prune(M(3), 3);
|
||||
|
||||
fg = HybridNonlinearFactorGraph();
|
||||
initial = Values();
|
||||
|
||||
HybridGaussianISAM bayesTree = inc.bayesTree();
|
||||
|
||||
// The final discrete graph should not be empty since we have eliminated
|
||||
// all continuous variables.
|
||||
auto discreteTree =
|
||||
bayesTree[M(3)]->conditional()->asDiscreteConditional();
|
||||
EXPECT_LONGS_EQUAL(3, discreteTree->size());
|
||||
|
||||
// Test if the optimal discrete mode assignment is (1, 1, 1).
|
||||
DiscreteFactorGraph discreteGraph;
|
||||
discreteGraph.push_back(discreteTree);
|
||||
DiscreteValues optimal_assignment = discreteGraph.optimize();
|
||||
|
||||
DiscreteValues expected_assignment;
|
||||
expected_assignment[M(1)] = 1;
|
||||
expected_assignment[M(2)] = 1;
|
||||
expected_assignment[M(3)] = 1;
|
||||
|
||||
EXPECT(assert_equal(expected_assignment, optimal_assignment));
|
||||
|
||||
// Test if pruning worked correctly by checking that
|
||||
// we only have 3 leaves in the last node.
|
||||
auto lastConditional = bayesTree[X(3)]->conditional()->asMixture();
|
||||
EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() {
|
||||
TestResult tr;
|
||||
return TestRegistry::runAllTests(tr);
|
||||
}
|
Loading…
Reference in New Issue