Merge pull request #1263 from borglab/feature/nonlinear-hybrid

Nonlinear Hybrid
release/4.3a0
Varun Agrawal 2022-08-21 09:09:45 -04:00 committed by GitHub
commit 7977f77887
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 2268 additions and 212 deletions

View File

@ -219,6 +219,16 @@ class DiscreteBayesTree {
}; };
#include <gtsam/discrete/DiscreteLookupDAG.h> #include <gtsam/discrete/DiscreteLookupDAG.h>
class DiscreteLookupTable : gtsam::DiscreteConditional{
DiscreteLookupTable(size_t nFrontals, const gtsam::DiscreteKeys& keys,
const gtsam::DecisionTreeFactor::ADT& potentials);
void print(
const std::string& s = "Discrete Lookup Table: ",
const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const;
size_t argmax(const gtsam::DiscreteValues& parentsValues) const;
};
class DiscreteLookupDAG { class DiscreteLookupDAG {
DiscreteLookupDAG(); DiscreteLookupDAG();
void push_back(const gtsam::DiscreteLookupTable* table); void push_back(const gtsam::DiscreteLookupTable* table);

View File

@ -51,7 +51,7 @@ GaussianMixtureFactor GaussianMixtureFactor::FromFactors(
void GaussianMixtureFactor::print(const std::string &s, void GaussianMixtureFactor::print(const std::string &s,
const KeyFormatter &formatter) const { const KeyFormatter &formatter) const {
HybridFactor::print(s, formatter); HybridFactor::print(s, formatter);
std::cout << "]{\n"; std::cout << "{\n";
factors_.print( factors_.print(
"", [&](Key k) { return formatter(k); }, "", [&](Key k) { return formatter(k); },
[&](const GaussianFactor::shared_ptr &gf) -> std::string { [&](const GaussianFactor::shared_ptr &gf) -> std::string {

View File

@ -22,7 +22,7 @@
#include <gtsam/discrete/DecisionTree.h> #include <gtsam/discrete/DecisionTree.h>
#include <gtsam/discrete/DiscreteKey.h> #include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/hybrid/HybridFactor.h> #include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/linear/GaussianFactor.h> #include <gtsam/linear/GaussianFactor.h>
namespace gtsam { namespace gtsam {

View File

@ -76,7 +76,10 @@ class GTSAM_EXPORT HybridBayesTree : public BayesTree<HybridBayesTreeClique> {
/** /**
* @brief Class for Hybrid Bayes tree orphan subtrees. * @brief Class for Hybrid Bayes tree orphan subtrees.
* *
* This does special stuff for the hybrid case * This object stores parent keys in our base type factor so that
* eliminating those parent keys will pull this subtree into the
* elimination.
* This does special stuff for the hybrid case.
* *
* @tparam CLIQUE * @tparam CLIQUE
*/ */

View File

@ -89,17 +89,23 @@ void HybridFactor::print(const std::string &s,
if (isContinuous_) std::cout << "Continuous "; if (isContinuous_) std::cout << "Continuous ";
if (isDiscrete_) std::cout << "Discrete "; if (isDiscrete_) std::cout << "Discrete ";
if (isHybrid_) std::cout << "Hybrid "; if (isHybrid_) std::cout << "Hybrid ";
for (size_t c=0; c<continuousKeys_.size(); c++) { std::cout << "[";
for (size_t c = 0; c < continuousKeys_.size(); c++) {
std::cout << formatter(continuousKeys_.at(c)); std::cout << formatter(continuousKeys_.at(c));
if (c < continuousKeys_.size() - 1) { if (c < continuousKeys_.size() - 1) {
std::cout << " "; std::cout << " ";
} else { } else {
std::cout << "; "; std::cout << (discreteKeys_.size() > 0 ? "; " : "");
} }
} }
for(auto && discreteKey: discreteKeys_) { for (size_t d = 0; d < discreteKeys_.size(); d++) {
std::cout << formatter(discreteKey.first) << " "; std::cout << formatter(discreteKeys_.at(d).first);
if (d < discreteKeys_.size() - 1) {
std::cout << " ";
} }
}
std::cout << "]";
} }
} // namespace gtsam } // namespace gtsam

View File

@ -37,6 +37,8 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
using This = HybridGaussianFactor; using This = HybridGaussianFactor;
using shared_ptr = boost::shared_ptr<This>; using shared_ptr = boost::shared_ptr<This>;
HybridGaussianFactor() = default;
// Explicit conversion from a shared ptr of GF // Explicit conversion from a shared ptr of GF
explicit HybridGaussianFactor(GaussianFactor::shared_ptr other); explicit HybridGaussianFactor(GaussianFactor::shared_ptr other);
@ -52,7 +54,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
/// GTSAM print utility. /// GTSAM print utility.
void print( void print(
const std::string &s = "HybridFactor\n", const std::string &s = "HybridGaussianFactor\n",
const KeyFormatter &formatter = DefaultKeyFormatter) const override; const KeyFormatter &formatter = DefaultKeyFormatter) const override;
/// @} /// @}

View File

@ -98,6 +98,12 @@ GaussianMixtureFactor::Sum sumFrontals(
} else if (f->isContinuous()) { } else if (f->isContinuous()) {
deferredFactors.push_back( deferredFactors.push_back(
boost::dynamic_pointer_cast<HybridGaussianFactor>(f)->inner()); boost::dynamic_pointer_cast<HybridGaussianFactor>(f)->inner());
} else if (f->isDiscrete()) {
// Don't do anything for discrete-only factors
// since we want to eliminate continuous values only.
continue;
} else { } else {
// We need to handle the case where the object is actually an // We need to handle the case where the object is actually an
// BayesTreeOrphanWrapper! // BayesTreeOrphanWrapper!
@ -106,8 +112,8 @@ GaussianMixtureFactor::Sum sumFrontals(
if (!orphan) { if (!orphan) {
auto &fr = *f; auto &fr = *f;
throw std::invalid_argument( throw std::invalid_argument(
std::string("factor is discrete in continuous elimination") + std::string("factor is discrete in continuous elimination ") +
typeid(fr).name()); demangle(typeid(fr).name()));
} }
} }
} }
@ -158,7 +164,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
} }
} }
auto result = EliminateDiscrete(dfg, frontalKeys); auto result = EliminateForMPE(dfg, frontalKeys);
return {boost::make_shared<HybridConditional>(result.first), return {boost::make_shared<HybridConditional>(result.first),
boost::make_shared<HybridDiscreteFactor>(result.second)}; boost::make_shared<HybridDiscreteFactor>(result.second)};

View File

@ -24,6 +24,7 @@
#include <gtsam/inference/EliminateableFactorGraph.h> #include <gtsam/inference/EliminateableFactorGraph.h>
#include <gtsam/inference/FactorGraph.h> #include <gtsam/inference/FactorGraph.h>
#include <gtsam/inference/Ordering.h> #include <gtsam/inference/Ordering.h>
#include <gtsam/linear/GaussianFactor.h>
namespace gtsam { namespace gtsam {

View File

@ -0,0 +1,41 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file HybridNonlinearFactor.cpp
* @date May 28, 2022
* @author Varun Agrawal
*/
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <boost/make_shared.hpp>
namespace gtsam {
/* ************************************************************************* */
HybridNonlinearFactor::HybridNonlinearFactor(
const NonlinearFactor::shared_ptr &other)
: Base(other->keys()), inner_(other) {}
/* ************************************************************************* */
bool HybridNonlinearFactor::equals(const HybridFactor &lf, double tol) const {
return Base::equals(lf, tol);
}
/* ************************************************************************* */
void HybridNonlinearFactor::print(const std::string &s,
const KeyFormatter &formatter) const {
HybridFactor::print(s, formatter);
inner_->print("\n", formatter);
};
} // namespace gtsam

View File

@ -0,0 +1,62 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file HybridNonlinearFactor.h
* @date May 28, 2022
* @author Varun Agrawal
*/
#pragma once
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/nonlinear/NonlinearFactor.h>
namespace gtsam {
/**
* A HybridNonlinearFactor is a layer over NonlinearFactor so that we do not
* have a diamond inheritance.
*/
class HybridNonlinearFactor : public HybridFactor {
private:
NonlinearFactor::shared_ptr inner_;
public:
using Base = HybridFactor;
using This = HybridNonlinearFactor;
using shared_ptr = boost::shared_ptr<This>;
// Explicit conversion from a shared ptr of NonlinearFactor
explicit HybridNonlinearFactor(const NonlinearFactor::shared_ptr &other);
/// @name Testable
/// @{
/// Check equality.
virtual bool equals(const HybridFactor &lf, double tol) const override;
/// GTSAM print utility.
void print(
const std::string &s = "HybridNonlinearFactor\n",
const KeyFormatter &formatter = DefaultKeyFormatter) const override;
/// @}
NonlinearFactor::shared_ptr inner() const { return inner_; }
/// Linearize to a HybridGaussianFactor at the linearization point `c`.
boost::shared_ptr<HybridGaussianFactor> linearize(const Values &c) const {
return boost::make_shared<HybridGaussianFactor>(inner_->linearize(c));
}
};
} // namespace gtsam

View File

@ -0,0 +1,94 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file HybridNonlinearFactorGraph.cpp
* @brief Nonlinear hybrid factor graph that uses type erasure
* @author Varun Agrawal
* @date May 28, 2022
*/
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
namespace gtsam {
/* ************************************************************************* */
void HybridNonlinearFactorGraph::add(
boost::shared_ptr<NonlinearFactor> factor) {
FactorGraph::add(boost::make_shared<HybridNonlinearFactor>(factor));
}
/* ************************************************************************* */
void HybridNonlinearFactorGraph::print(const std::string& s,
const KeyFormatter& keyFormatter) const {
// Base::print(str, keyFormatter);
std::cout << (s.empty() ? "" : s + " ") << std::endl;
std::cout << "size: " << size() << std::endl;
for (size_t i = 0; i < factors_.size(); i++) {
std::stringstream ss;
ss << "factor " << i << ": ";
if (factors_[i]) {
factors_[i]->print(ss.str(), keyFormatter);
std::cout << std::endl;
}
}
}
/* ************************************************************************* */
HybridGaussianFactorGraph HybridNonlinearFactorGraph::linearize(
const Values& continuousValues) const {
// create an empty linear FG
HybridGaussianFactorGraph linearFG;
linearFG.reserve(size());
// linearize all hybrid factors
for (auto&& factor : factors_) {
// First check if it is a valid factor
if (factor) {
// Check if the factor is a hybrid factor.
// It can be either a nonlinear MixtureFactor or a linear
// GaussianMixtureFactor.
if (factor->isHybrid()) {
// Check if it is a nonlinear mixture factor
if (auto nlmf = boost::dynamic_pointer_cast<MixtureFactor>(factor)) {
linearFG.push_back(nlmf->linearize(continuousValues));
} else {
linearFG.push_back(factor);
}
// Now check if the factor is a continuous only factor.
} else if (factor->isContinuous()) {
// In this case, we check if factor->inner() is nonlinear since
// HybridFactors wrap over continuous factors.
auto nlhf = boost::dynamic_pointer_cast<HybridNonlinearFactor>(factor);
if (auto nlf =
boost::dynamic_pointer_cast<NonlinearFactor>(nlhf->inner())) {
auto hgf = boost::make_shared<HybridGaussianFactor>(
nlf->linearize(continuousValues));
linearFG.push_back(hgf);
} else {
linearFG.push_back(factor);
}
// Finally if nothing else, we are discrete-only which doesn't need
// lineariztion.
} else {
linearFG.push_back(factor);
}
} else {
linearFG.push_back(GaussianFactor::shared_ptr());
}
}
return linearFG;
}
} // namespace gtsam

View File

@ -0,0 +1,134 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file HybridNonlinearFactorGraph.h
* @brief Nonlinear hybrid factor graph that uses type erasure
* @author Varun Agrawal
* @date May 28, 2022
*/
#pragma once
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridFactorGraph.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/inference/Ordering.h>
#include <gtsam/nonlinear/NonlinearFactor.h>
#include <boost/format.hpp>
namespace gtsam {
/**
* Nonlinear Hybrid Factor Graph
* -----------------------
* This is the non-linear version of a hybrid factor graph.
* Everything inside needs to be hybrid factor or hybrid conditional.
*/
class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph {
protected:
/// Check if FACTOR type is derived from NonlinearFactor.
template <typename FACTOR>
using IsNonlinear = typename std::enable_if<
std::is_base_of<NonlinearFactor, FACTOR>::value>::type;
public:
using Base = HybridFactorGraph;
using This = HybridNonlinearFactorGraph; ///< this class
using shared_ptr = boost::shared_ptr<This>; ///< shared_ptr to This
using Values = gtsam::Values; ///< backwards compatibility
using Indices = KeyVector; ///> map from keys to values
/// @name Constructors
/// @{
HybridNonlinearFactorGraph() = default;
/**
* Implicit copy/downcast constructor to override explicit template container
* constructor. In BayesTree this is used for:
* `cachedSeparatorMarginal_.reset(*separatorMarginal)`
* */
template <class DERIVEDFACTOR>
HybridNonlinearFactorGraph(const FactorGraph<DERIVEDFACTOR>& graph)
: Base(graph) {}
/// @}
// Allow use of selected FactorGraph methods:
using Base::empty;
using Base::reserve;
using Base::size;
using Base::operator[];
using Base::add;
using Base::resize;
/**
* Add a nonlinear factor *pointer* to the internal nonlinear factor graph
* @param nonlinearFactor - boost::shared_ptr to the factor to add
*/
template <typename FACTOR>
IsNonlinear<FACTOR> push_nonlinear(
const boost::shared_ptr<FACTOR>& nonlinearFactor) {
Base::push_back(boost::make_shared<HybridNonlinearFactor>(nonlinearFactor));
}
/// Construct a factor and add (shared pointer to it) to factor graph.
template <class FACTOR, class... Args>
IsNonlinear<FACTOR> emplace_nonlinear(Args&&... args) {
auto factor = boost::allocate_shared<FACTOR>(
Eigen::aligned_allocator<FACTOR>(), std::forward<Args>(args)...);
push_nonlinear(factor);
}
/**
* @brief Add a single factor shared pointer to the hybrid factor graph.
* Dynamically handles the factor type and assigns it to the correct
* underlying container.
*
* @tparam FACTOR The factor type template
* @param sharedFactor The factor to add to this factor graph.
*/
template <typename FACTOR>
void push_back(const boost::shared_ptr<FACTOR>& sharedFactor) {
if (auto p = boost::dynamic_pointer_cast<NonlinearFactor>(sharedFactor)) {
push_nonlinear(p);
} else {
Base::push_back(sharedFactor);
}
}
/// Add a nonlinear factor as a shared ptr.
void add(boost::shared_ptr<NonlinearFactor> factor);
/// Print the factor graph.
void print(
const std::string& s = "HybridNonlinearFactorGraph",
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override;
/**
* @brief Linearize all the continuous factors in the
* HybridNonlinearFactorGraph.
*
* @param continuousValues: Dictionary of continuous values.
* @return HybridGaussianFactorGraph::shared_ptr
*/
HybridGaussianFactorGraph linearize(const Values& continuousValues) const;
};
template <>
struct traits<HybridNonlinearFactorGraph>
: public Testable<HybridNonlinearFactorGraph> {};
} // namespace gtsam

View File

@ -0,0 +1,244 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file MixtureFactor.h
* @brief Nonlinear Mixture factor of continuous and discrete.
* @author Kevin Doherty, kdoherty@mit.edu
* @author Varun Agrawal
* @date December 2021
*/
#pragma once
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/nonlinear/NonlinearFactor.h>
#include <gtsam/nonlinear/Symbol.h>
#include <algorithm>
#include <boost/format.hpp>
#include <cmath>
#include <limits>
#include <vector>
namespace gtsam {
/**
* @brief Implementation of a discrete conditional mixture factor.
*
* Implements a joint discrete-continuous factor where the discrete variable
* serves to "select" a mixture component corresponding to a NonlinearFactor
* type of measurement.
*
* This class stores all factors as HybridFactors which can then be typecast to
* one of (NonlinearFactor, GaussianFactor) which can then be checked to perform
* the correct operation.
*/
class MixtureFactor : public HybridFactor {
public:
using Base = HybridFactor;
using This = MixtureFactor;
using shared_ptr = boost::shared_ptr<MixtureFactor>;
using sharedFactor = boost::shared_ptr<NonlinearFactor>;
/**
* @brief typedef for DecisionTree which has Keys as node labels and
* NonlinearFactor as leaf nodes.
*/
using Factors = DecisionTree<Key, sharedFactor>;
private:
/// Decision tree of Gaussian factors indexed by discrete keys.
Factors factors_;
bool normalized_;
public:
MixtureFactor() = default;
/**
* @brief Construct from Decision tree.
*
* @param keys Vector of keys for continuous factors.
* @param discreteKeys Vector of discrete keys.
* @param factors Decision tree with of shared factors.
* @param normalized Flag indicating if the factor error is already
* normalized.
*/
MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys,
const Factors& factors, bool normalized = false)
: Base(keys, discreteKeys), factors_(factors), normalized_(normalized) {}
/**
* @brief Convenience constructor that generates the underlying factor
* decision tree for us.
*
* Here it is important that the vector of factors has the correct number of
* elements based on the number of discrete keys and the cardinality of the
* keys, so that the decision tree is constructed appropriately.
*
* @tparam FACTOR The type of the factor shared pointers being passed in. Will
* be typecast to NonlinearFactor shared pointers.
* @param keys Vector of keys for continuous factors.
* @param discreteKeys Vector of discrete keys.
* @param factors Vector of shared pointers to factors.
* @param normalized Flag indicating if the factor error is already
* normalized.
*/
template <typename FACTOR>
MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys,
const std::vector<boost::shared_ptr<FACTOR>>& factors,
bool normalized = false)
: Base(keys, discreteKeys), normalized_(normalized) {
std::vector<NonlinearFactor::shared_ptr> nonlinear_factors;
for (auto&& f : factors) {
nonlinear_factors.push_back(
boost::dynamic_pointer_cast<NonlinearFactor>(f));
}
factors_ = Factors(discreteKeys, nonlinear_factors);
}
~MixtureFactor() = default;
/**
* @brief Compute error of factor given both continuous and discrete values.
*
* @param continuousVals The continuous Values.
* @param discreteVals The discrete Values.
* @return double The error of this factor.
*/
double error(const Values& continuousVals,
const DiscreteValues& discreteVals) const {
// Retrieve the factor corresponding to the assignment in discreteVals.
auto factor = factors_(discreteVals);
// Compute the error for the selected factor
const double factorError = factor->error(continuousVals);
if (normalized_) return factorError;
return factorError +
this->nonlinearFactorLogNormalizingConstant(factor, continuousVals);
}
size_t dim() const {
// TODO(Varun)
throw std::runtime_error("MixtureFactor::dim not implemented.");
}
/// Testable
/// @{
/// print to stdout
void print(
const std::string& s = "MixtureFactor",
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override {
std::cout << (s.empty() ? "" : s + " ");
Base::print("", keyFormatter);
std::cout << "\nMixtureFactor\n";
auto valueFormatter = [](const sharedFactor& v) {
if (v) {
return (boost::format("Nonlinear factor on %d keys") % v->size()).str();
} else {
return std::string("nullptr");
}
};
factors_.print("", keyFormatter, valueFormatter);
}
/// Check equality
bool equals(const HybridFactor& other, double tol = 1e-9) const override {
// We attempt a dynamic cast from HybridFactor to MixtureFactor. If it
// fails, return false.
if (!dynamic_cast<const MixtureFactor*>(&other)) return false;
// If the cast is successful, we'll properly construct a MixtureFactor
// object from `other`
const MixtureFactor& f(static_cast<const MixtureFactor&>(other));
// Ensure that this MixtureFactor and `f` have the same `factors_`.
auto compare = [tol](const sharedFactor& a, const sharedFactor& b) {
return traits<NonlinearFactor>::Equals(*a, *b, tol);
};
if (!factors_.equals(f.factors_, compare)) return false;
// If everything above passes, and the keys_, discreteKeys_ and normalized_
// member variables are identical, return true.
return (std::equal(keys_.begin(), keys_.end(), f.keys().begin()) &&
(discreteKeys_ == f.discreteKeys_) &&
(normalized_ == f.normalized_));
}
/// @}
/// Linearize specific nonlinear factors based on the assignment in
/// discreteValues.
GaussianFactor::shared_ptr linearize(
const Values& continuousVals, const DiscreteValues& discreteVals) const {
auto factor = factors_(discreteVals);
return factor->linearize(continuousVals);
}
/// Linearize all the continuous factors to get a GaussianMixtureFactor.
boost::shared_ptr<GaussianMixtureFactor> linearize(
const Values& continuousVals) const {
// functional to linearize each factor in the decision tree
auto linearizeDT = [continuousVals](const sharedFactor& factor) {
return factor->linearize(continuousVals);
};
DecisionTree<Key, GaussianFactor::shared_ptr> linearized_factors(
factors_, linearizeDT);
return boost::make_shared<GaussianMixtureFactor>(
continuousKeys_, discreteKeys_, linearized_factors);
}
/**
* If the component factors are not already normalized, we want to compute
* their normalizing constants so that the resulting joint distribution is
* appropriately computed. Remember, this is the _negative_ normalizing
* constant for the measurement likelihood (since we are minimizing the
* _negative_ log-likelihood).
*/
double nonlinearFactorLogNormalizingConstant(const sharedFactor& factor,
const Values& values) const {
// Information matrix (inverse covariance matrix) for the factor.
Matrix infoMat;
// If this is a NoiseModelFactor, we'll use its noiseModel to
// otherwise noiseModelFactor will be nullptr
if (auto noiseModelFactor =
boost::dynamic_pointer_cast<NoiseModelFactor>(factor)) {
// If dynamic cast to NoiseModelFactor succeeded, see if the noise model
// is Gaussian
auto noiseModel = noiseModelFactor->noiseModel();
auto gaussianNoiseModel =
boost::dynamic_pointer_cast<noiseModel::Gaussian>(noiseModel);
if (gaussianNoiseModel) {
// If the noise model is Gaussian, retrieve the information matrix
infoMat = gaussianNoiseModel->information();
} else {
// If the factor is not a Gaussian factor, we'll linearize it to get
// something with a normalized noise model
// TODO(kevin): does this make sense to do? I think maybe not in
// general? Should we just yell at the user?
auto gaussianFactor = factor->linearize(values);
infoMat = gaussianFactor->information();
}
}
// Compute the (negative) log of the normalizing constant
return -(factor->dim() * log(2.0 * M_PI) / 2.0) -
(log(infoMat.determinant()) / 2.0);
}
};
} // namespace gtsam

View File

@ -21,10 +21,16 @@
#include <gtsam/discrete/DiscreteDistribution.h> #include <gtsam/discrete/DiscreteDistribution.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h> #include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h> #include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/inference/Symbol.h> #include <gtsam/inference/Symbol.h>
#include <gtsam/linear/JacobianFactor.h> #include <gtsam/linear/JacobianFactor.h>
#include <gtsam/linear/NoiseModel.h>
#include <gtsam/nonlinear/PriorFactor.h>
#include <gtsam/slam/BetweenFactor.h> #include <gtsam/slam/BetweenFactor.h>
#include <vector>
#pragma once #pragma once
namespace gtsam { namespace gtsam {
@ -33,111 +39,6 @@ using symbol_shorthand::C;
using symbol_shorthand::M; using symbol_shorthand::M;
using symbol_shorthand::X; using symbol_shorthand::X;
/* ****************************************************************************/
// Test fixture with switching network.
// TODO(Varun) Currently this is only linear. We need to add nonlinear support
// and then update to
// https://github.com/borglab/gtsam/pull/973/files#diff-58c02b3b197ebf731694946e87762d252e9eaa2f5c6c4ba22d618085b321ca23
struct Switching {
size_t K;
DiscreteKeys modes;
HybridGaussianFactorGraph linearizedFactorGraph;
Values linearizationPoint;
using MotionModel = BetweenFactor<double>;
// using MotionMixture = MixtureFactor<MotionModel>;
/// Create with given number of time steps.
Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1)
: K(K) {
// Create DiscreteKeys for binary K modes, modes[0] will not be used.
modes = addDiscreteModes(K);
// Create hybrid factor graph.
// Add a prior on X(1).
auto prior = boost::make_shared<JacobianFactor>(
X(1), Matrix11::Ones() / prior_sigma, Vector1::Zero());
linearizedFactorGraph.push_back(prior);
// Add "motion models".
linearizedFactorGraph = addMotionModels(K);
// Add measurement factors
for (size_t k = 1; k <= K; k++) {
linearizedFactorGraph.emplace_gaussian<JacobianFactor>(
X(k), Matrix11::Ones() / 0.1, Vector1::Zero());
}
// Add "mode chain"
linearizedFactorGraph = addModeChain(linearizedFactorGraph);
// Create the linearization point.
for (size_t k = 1; k <= K; k++) {
linearizationPoint.insert<double>(X(k), static_cast<double>(k));
}
}
/// Create DiscreteKeys for K binary modes.
DiscreteKeys addDiscreteModes(size_t K) {
DiscreteKeys m;
for (size_t k = 0; k <= K; k++) {
m.emplace_back(M(k), 2);
}
return m;
}
/// Helper function to add motion models for each [k, k+1] interval.
HybridGaussianFactorGraph addMotionModels(size_t K) {
HybridGaussianFactorGraph hgfg;
for (size_t k = 1; k < K; k++) {
auto keys = {X(k), X(k + 1)};
auto components = motionModels(k);
hgfg.emplace_hybrid<GaussianMixtureFactor>(keys, DiscreteKeys{modes[k]},
components);
}
return hgfg;
}
// Create motion models for a given time step
static std::vector<GaussianFactor::shared_ptr> motionModels(
size_t k, double sigma = 1.0) {
auto noise_model = noiseModel::Isotropic::Sigma(1, sigma);
auto still = boost::make_shared<JacobianFactor>(
X(k), -Matrix11::Ones() / sigma, X(k + 1),
Matrix11::Ones() / sigma, Vector1::Zero()),
moving = boost::make_shared<JacobianFactor>(
X(k), -Matrix11::Ones() / sigma, X(k + 1),
Matrix11::Ones() / sigma, -Vector1::Ones() / sigma);
return {boost::dynamic_pointer_cast<GaussianFactor>(still),
boost::dynamic_pointer_cast<GaussianFactor>(moving)};
}
// // Add "mode chain" to NonlinearHybridFactorGraph
// void addModeChain(HybridNonlinearFactorGraph& fg) {
// auto prior = boost::make_shared<DiscreteDistribution>(modes[1], "1/1");
// fg.push_discrete(prior);
// for (size_t k = 1; k < K - 1; k++) {
// auto parents = {modes[k]};
// auto conditional = boost::make_shared<DiscreteConditional>(
// modes[k + 1], parents, "1/2 3/2");
// fg.push_discrete(conditional);
// }
// }
// Add "mode chain" to GaussianHybridFactorGraph
HybridGaussianFactorGraph addModeChain(HybridGaussianFactorGraph& fg) {
auto prior = boost::make_shared<DiscreteDistribution>(modes[1], "1/1");
fg.push_discrete(prior);
for (size_t k = 1; k < K - 1; k++) {
auto parents = {modes[k]};
auto conditional = boost::make_shared<DiscreteConditional>(
modes[k + 1], parents, "1/2 3/2");
fg.push_discrete(conditional);
}
return fg;
}
};
/** /**
* @brief Create a switching system chain. A switching system is a continuous * @brief Create a switching system chain. A switching system is a continuous
* system which depends on a discrete mode at each time step of the chain. * system which depends on a discrete mode at each time step of the chain.
@ -149,7 +50,7 @@ struct Switching {
*/ */
inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain(
size_t n, std::function<Key(int)> keyFunc = X, size_t n, std::function<Key(int)> keyFunc = X,
std::function<Key(int)> dKeyFunc = C) { std::function<Key(int)> dKeyFunc = M) {
HybridGaussianFactorGraph hfg; HybridGaussianFactorGraph hfg;
hfg.add(JacobianFactor(keyFunc(1), I_3x3, Z_3x1)); hfg.add(JacobianFactor(keyFunc(1), I_3x3, Z_3x1));
@ -182,7 +83,7 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain(
* @return std::pair<KeyVector, std::vector<int>> * @return std::pair<KeyVector, std::vector<int>>
*/ */
inline std::pair<KeyVector, std::vector<int>> makeBinaryOrdering( inline std::pair<KeyVector, std::vector<int>> makeBinaryOrdering(
std::vector<Key>& input) { std::vector<Key> &input) {
KeyVector new_order; KeyVector new_order;
std::vector<int> levels(input.size()); std::vector<int> levels(input.size());
@ -211,4 +112,103 @@ inline std::pair<KeyVector, std::vector<int>> makeBinaryOrdering(
return {new_order, levels}; return {new_order, levels};
} }
/* ***************************************************************************
*/
using MotionModel = BetweenFactor<double>;
// using MotionMixture = MixtureFactor<MotionModel>;
// Test fixture with switching network.
struct Switching {
size_t K;
DiscreteKeys modes;
HybridNonlinearFactorGraph nonlinearFactorGraph;
HybridGaussianFactorGraph linearizedFactorGraph;
Values linearizationPoint;
/// Create with given number of time steps.
Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1)
: K(K) {
using symbol_shorthand::M;
using symbol_shorthand::X;
// Create DiscreteKeys for binary K modes, modes[0] will not be used.
for (size_t k = 0; k <= K; k++) {
modes.emplace_back(M(k), 2);
}
// Create hybrid factor graph.
// Add a prior on X(1).
auto prior = boost::make_shared<PriorFactor<double>>(
X(1), 0, noiseModel::Isotropic::Sigma(1, prior_sigma));
nonlinearFactorGraph.push_nonlinear(prior);
// Add "motion models".
for (size_t k = 1; k < K; k++) {
KeyVector keys = {X(k), X(k + 1)};
auto motion_models = motionModels(k);
std::vector<NonlinearFactor::shared_ptr> components;
for (auto &&f : motion_models) {
components.push_back(boost::dynamic_pointer_cast<NonlinearFactor>(f));
}
nonlinearFactorGraph.emplace_hybrid<MixtureFactor>(
keys, DiscreteKeys{modes[k]}, components);
}
// Add measurement factors
auto measurement_noise = noiseModel::Isotropic::Sigma(1, 0.1);
for (size_t k = 2; k <= K; k++) {
nonlinearFactorGraph.emplace_nonlinear<PriorFactor<double>>(
X(k), 1.0 * (k - 1), measurement_noise);
}
// Add "mode chain"
addModeChain(&nonlinearFactorGraph);
// Create the linearization point.
for (size_t k = 1; k <= K; k++) {
linearizationPoint.insert<double>(X(k), static_cast<double>(k));
}
linearizedFactorGraph = nonlinearFactorGraph.linearize(linearizationPoint);
}
// Create motion models for a given time step
static std::vector<MotionModel::shared_ptr> motionModels(size_t k,
double sigma = 1.0) {
using symbol_shorthand::M;
using symbol_shorthand::X;
auto noise_model = noiseModel::Isotropic::Sigma(1, sigma);
auto still =
boost::make_shared<MotionModel>(X(k), X(k + 1), 0.0, noise_model),
moving =
boost::make_shared<MotionModel>(X(k), X(k + 1), 1.0, noise_model);
return {still, moving};
}
// Add "mode chain" to HybridNonlinearFactorGraph
void addModeChain(HybridNonlinearFactorGraph *fg) {
auto prior = boost::make_shared<DiscreteDistribution>(modes[1], "1/1");
fg->push_discrete(prior);
for (size_t k = 1; k < K - 1; k++) {
auto parents = {modes[k]};
auto conditional = boost::make_shared<DiscreteConditional>(
modes[k + 1], parents, "1/2 3/2");
fg->push_discrete(conditional);
}
}
// Add "mode chain" to HybridGaussianFactorGraph
void addModeChain(HybridGaussianFactorGraph *fg) {
auto prior = boost::make_shared<DiscreteDistribution>(modes[1], "1/1");
fg->push_discrete(prior);
for (size_t k = 1; k < K - 1; k++) {
auto parents = {modes[k]};
auto conditional = boost::make_shared<DiscreteConditional>(
modes[k + 1], parents, "1/2 3/2");
fg->push_discrete(conditional);
}
}
};
} // namespace gtsam } // namespace gtsam

View File

@ -52,8 +52,8 @@ using namespace boost::assign;
using namespace std; using namespace std;
using namespace gtsam; using namespace gtsam;
using gtsam::symbol_shorthand::C;
using gtsam::symbol_shorthand::D; using gtsam::symbol_shorthand::D;
using gtsam::symbol_shorthand::M;
using gtsam::symbol_shorthand::X; using gtsam::symbol_shorthand::X;
using gtsam::symbol_shorthand::Y; using gtsam::symbol_shorthand::Y;
@ -67,9 +67,9 @@ TEST(HybridGaussianFactorGraph, Creation) {
// Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor // Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor
// graph // graph
GaussianMixture gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{C(0), 2}), GaussianMixture gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}),
GaussianMixture::Conditionals( GaussianMixture::Conditionals(
C(0), M(0),
boost::make_shared<GaussianConditional>( boost::make_shared<GaussianConditional>(
X(0), Z_3x1, I_3x3, X(1), I_3x3), X(0), Z_3x1, I_3x3, X(1), I_3x3),
boost::make_shared<GaussianConditional>( boost::make_shared<GaussianConditional>(
@ -96,11 +96,11 @@ TEST(HybridGaussianFactorGraph, EliminateMultifrontal) {
// Test multifrontal elimination // Test multifrontal elimination
HybridGaussianFactorGraph hfg; HybridGaussianFactorGraph hfg;
DiscreteKey c(C(1), 2); DiscreteKey m(M(1), 2);
// Add priors on x0 and c1 // Add priors on x0 and c1
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8}))); hfg.add(HybridDiscreteFactor(DecisionTreeFactor(m, {2, 8})));
Ordering ordering; Ordering ordering;
ordering.push_back(X(0)); ordering.push_back(X(0));
@ -114,7 +114,7 @@ TEST(HybridGaussianFactorGraph, EliminateMultifrontal) {
TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
HybridGaussianFactorGraph hfg; HybridGaussianFactorGraph hfg;
DiscreteKey c1(C(1), 2); DiscreteKey m1(M(1), 2);
// Add prior on x0 // Add prior on x0
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
@ -123,45 +123,45 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
// Add a gaussian mixture factor ϕ(x1, c1) // Add a gaussian mixture factor ϕ(x1, c1)
DecisionTree<Key, GaussianFactor::shared_ptr> dt( DecisionTree<Key, GaussianFactor::shared_ptr> dt(
C(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), M(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())); boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt));
auto result = auto result =
hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {C(1)})); hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {M(1)}));
auto dc = result->at(2)->asDiscreteConditional(); auto dc = result->at(2)->asDiscreteConditional();
DiscreteValues mode; DiscreteValues dv;
mode[C(1)] = 0; dv[M(1)] = 0;
EXPECT_DOUBLES_EQUAL(0.6225, (*dc)(mode), 1e-3); EXPECT_DOUBLES_EQUAL(1, dc->operator()(dv), 1e-3);
} }
/* ************************************************************************* */ /* ************************************************************************* */
TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
HybridGaussianFactorGraph hfg; HybridGaussianFactorGraph hfg;
DiscreteKey c1(C(1), 2); DiscreteKey m1(M(1), 2);
// Add prior on x0 // Add prior on x0
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
// Add factor between x0 and x1 // Add factor between x0 and x1
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
DecisionTree<Key, GaussianFactor::shared_ptr> dt( std::vector<GaussianFactor::shared_ptr> factors = {
C(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())); boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())};
hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); hfg.add(GaussianMixtureFactor({X(1)}, {m1}, factors));
// Discrete probability table for c1 // Discrete probability table for c1
hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c1, {2, 8}))); hfg.add(DecisionTreeFactor(m1, {2, 8}));
// Joint discrete probability table for c1, c2 // Joint discrete probability table for c1, c2
hfg.add(HybridDiscreteFactor( hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4")));
auto result = hfg.eliminateSequential( HybridBayesNet::shared_ptr result = hfg.eliminateSequential(
Ordering::ColamdConstrainedLast(hfg, {C(1), C(2)})); Ordering::ColamdConstrainedLast(hfg, {M(1), M(2)}));
// There are 4 variables (2 continuous + 2 discrete) in the bayes net.
EXPECT_LONGS_EQUAL(4, result->size()); EXPECT_LONGS_EQUAL(4, result->size());
} }
@ -169,31 +169,33 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
HybridGaussianFactorGraph hfg; HybridGaussianFactorGraph hfg;
DiscreteKey c1(C(1), 2); DiscreteKey m1(M(1), 2);
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
hfg.add(GaussianMixtureFactor::FromFactors( hfg.add(GaussianMixtureFactor::FromFactors(
{X(1)}, {{C(1), 2}}, {X(1)}, {{M(1), 2}},
{boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), {boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())})); boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())}));
hfg.add(DecisionTreeFactor(c1, {2, 8})); hfg.add(DecisionTreeFactor(m1, {2, 8}));
hfg.add(DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4")); hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
auto result = hfg.eliminateMultifrontal( HybridBayesTree::shared_ptr result = hfg.eliminateMultifrontal(
Ordering::ColamdConstrainedLast(hfg, {C(1), C(2)})); Ordering::ColamdConstrainedLast(hfg, {M(1), M(2)}));
// The bayes tree should have 3 cliques
EXPECT_LONGS_EQUAL(3, result->size());
// GTSAM_PRINT(*result); // GTSAM_PRINT(*result);
// GTSAM_PRINT(*result->marginalFactor(C(2))); // GTSAM_PRINT(*result->marginalFactor(M(2)));
} }
/* ************************************************************************* */ /* ************************************************************************* */
TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) {
HybridGaussianFactorGraph hfg; HybridGaussianFactorGraph hfg;
DiscreteKey c(C(1), 2); DiscreteKey m(M(1), 2);
// Prior on x0 // Prior on x0
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
@ -202,16 +204,16 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) {
// Decision tree with different modes on x1 // Decision tree with different modes on x1
DecisionTree<Key, GaussianFactor::shared_ptr> dt( DecisionTree<Key, GaussianFactor::shared_ptr> dt(
C(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), M(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())); boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
// Hybrid factor P(x1|c1) // Hybrid factor P(x1|c1)
hfg.add(GaussianMixtureFactor({X(1)}, {c}, dt)); hfg.add(GaussianMixtureFactor({X(1)}, {m}, dt));
// Prior factor on c1 // Prior factor on c1
hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8}))); hfg.add(HybridDiscreteFactor(DecisionTreeFactor(m, {2, 8})));
// Get a constrained ordering keeping c1 last // Get a constrained ordering keeping c1 last
auto ordering_full = Ordering::ColamdConstrainedLast(hfg, {C(1)}); auto ordering_full = Ordering::ColamdConstrainedLast(hfg, {M(1)});
// Returns a Hybrid Bayes Tree with distribution P(x0|x1)P(x1|c1)P(c1) // Returns a Hybrid Bayes Tree with distribution P(x0|x1)P(x1|c1)P(c1)
HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full); HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full);
@ -232,48 +234,48 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
{ {
hfg.add(GaussianMixtureFactor::FromFactors( hfg.add(GaussianMixtureFactor::FromFactors(
{X(0)}, {{C(0), 2}}, {X(0)}, {{M(0), 2}},
{boost::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1), {boost::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1),
boost::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones())})); boost::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones())}));
DecisionTree<Key, GaussianFactor::shared_ptr> dt1( DecisionTree<Key, GaussianFactor::shared_ptr> dt1(
C(1), boost::make_shared<JacobianFactor>(X(2), I_3x3, Z_3x1), M(1), boost::make_shared<JacobianFactor>(X(2), I_3x3, Z_3x1),
boost::make_shared<JacobianFactor>(X(2), I_3x3, Vector3::Ones())); boost::make_shared<JacobianFactor>(X(2), I_3x3, Vector3::Ones()));
hfg.add(GaussianMixtureFactor({X(2)}, {{C(1), 2}}, dt1)); hfg.add(GaussianMixtureFactor({X(2)}, {{M(1), 2}}, dt1));
} }
hfg.add(HybridDiscreteFactor( hfg.add(HybridDiscreteFactor(
DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4"))); DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")));
hfg.add(JacobianFactor(X(3), I_3x3, X(4), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(3), I_3x3, X(4), -I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1));
{ {
DecisionTree<Key, GaussianFactor::shared_ptr> dt( DecisionTree<Key, GaussianFactor::shared_ptr> dt(
C(3), boost::make_shared<JacobianFactor>(X(3), I_3x3, Z_3x1), M(3), boost::make_shared<JacobianFactor>(X(3), I_3x3, Z_3x1),
boost::make_shared<JacobianFactor>(X(3), I_3x3, Vector3::Ones())); boost::make_shared<JacobianFactor>(X(3), I_3x3, Vector3::Ones()));
hfg.add(GaussianMixtureFactor({X(3)}, {{C(3), 2}}, dt)); hfg.add(GaussianMixtureFactor({X(3)}, {{M(3), 2}}, dt));
DecisionTree<Key, GaussianFactor::shared_ptr> dt1( DecisionTree<Key, GaussianFactor::shared_ptr> dt1(
C(2), boost::make_shared<JacobianFactor>(X(5), I_3x3, Z_3x1), M(2), boost::make_shared<JacobianFactor>(X(5), I_3x3, Z_3x1),
boost::make_shared<JacobianFactor>(X(5), I_3x3, Vector3::Ones())); boost::make_shared<JacobianFactor>(X(5), I_3x3, Vector3::Ones()));
hfg.add(GaussianMixtureFactor({X(5)}, {{C(2), 2}}, dt1)); hfg.add(GaussianMixtureFactor({X(5)}, {{M(2), 2}}, dt1));
} }
auto ordering_full = auto ordering_full =
Ordering::ColamdConstrainedLast(hfg, {C(0), C(1), C(2), C(3)}); Ordering::ColamdConstrainedLast(hfg, {M(0), M(1), M(2), M(3)});
HybridBayesTree::shared_ptr hbt; HybridBayesTree::shared_ptr hbt;
HybridGaussianFactorGraph::shared_ptr remaining; HybridGaussianFactorGraph::shared_ptr remaining;
std::tie(hbt, remaining) = hfg.eliminatePartialMultifrontal(ordering_full); std::tie(hbt, remaining) = hfg.eliminatePartialMultifrontal(ordering_full);
// GTSAM_PRINT(*hbt); // 9 cliques in the bayes tree and 0 remaining variables to eliminate.
// GTSAM_PRINT(*remaining); EXPECT_LONGS_EQUAL(9, hbt->size());
EXPECT_LONGS_EQUAL(0, remaining->size());
// hbt->marginalFactor(X(1))->print("HBT: ");
/* /*
(Fan) Explanation: the Junction tree will need to reeliminate to get to the (Fan) Explanation: the Junction tree will need to reeliminate to get to the
marginal on X(1), which is not possible because it involves eliminating marginal on X(1), which is not possible because it involves eliminating
@ -307,13 +309,13 @@ TEST(HybridGaussianFactorGraph, Switching) {
// X(3), X(7) // X(3), X(7)
// X(2), X(8) // X(2), X(8)
// X(1), X(4), X(6), X(9) // X(1), X(4), X(6), X(9)
// C(5) will be the center, C(1-4), C(6-8) // M(5) will be the center, M(1-4), M(6-8)
// C(3), C(7) // M(3), M(7)
// C(1), C(4), C(2), C(6), C(8) // M(1), M(4), M(2), M(6), M(8)
// auto ordering_full = // auto ordering_full =
// Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7),
// X(5), // X(5),
// C(1), C(4), C(2), C(6), C(8), C(3), C(7), C(5)}); // M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)});
KeyVector ordering; KeyVector ordering;
{ {
@ -336,7 +338,7 @@ TEST(HybridGaussianFactorGraph, Switching) {
std::iota(naturalC.begin(), naturalC.end(), 1); std::iota(naturalC.begin(), naturalC.end(), 1);
std::vector<Key> ordC; std::vector<Key> ordC;
std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC),
[](int x) { return C(x); }); [](int x) { return M(x); });
KeyVector ndC; KeyVector ndC;
std::vector<int> lvls; std::vector<int> lvls;
@ -353,9 +355,9 @@ TEST(HybridGaussianFactorGraph, Switching) {
HybridGaussianFactorGraph::shared_ptr remaining; HybridGaussianFactorGraph::shared_ptr remaining;
std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full); std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full);
// GTSAM_PRINT(*hbt); // 12 cliques in the bayes tree and 0 remaining variables to eliminate.
// GTSAM_PRINT(*remaining); EXPECT_LONGS_EQUAL(12, hbt->size());
// hbt->marginalFactor(C(11))->print("HBT: "); EXPECT_LONGS_EQUAL(0, remaining->size());
} }
/* ************************************************************************* */ /* ************************************************************************* */
@ -368,13 +370,13 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) {
// X(3), X(7) // X(3), X(7)
// X(2), X(8) // X(2), X(8)
// X(1), X(4), X(6), X(9) // X(1), X(4), X(6), X(9)
// C(5) will be the center, C(1-4), C(6-8) // M(5) will be the center, M(1-4), M(6-8)
// C(3), C(7) // M(3), M(7)
// C(1), C(4), C(2), C(6), C(8) // M(1), M(4), M(2), M(6), M(8)
// auto ordering_full = // auto ordering_full =
// Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7),
// X(5), // X(5),
// C(1), C(4), C(2), C(6), C(8), C(3), C(7), C(5)}); // M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)});
KeyVector ordering; KeyVector ordering;
{ {
@ -397,7 +399,7 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) {
std::iota(naturalC.begin(), naturalC.end(), 1); std::iota(naturalC.begin(), naturalC.end(), 1);
std::vector<Key> ordC; std::vector<Key> ordC;
std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC),
[](int x) { return C(x); }); [](int x) { return M(x); });
KeyVector ndC; KeyVector ndC;
std::vector<int> lvls; std::vector<int> lvls;
@ -407,9 +409,6 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) {
} }
auto ordering_full = Ordering(ordering); auto ordering_full = Ordering(ordering);
// GTSAM_PRINT(*hfg);
// GTSAM_PRINT(ordering_full);
HybridBayesTree::shared_ptr hbt; HybridBayesTree::shared_ptr hbt;
HybridGaussianFactorGraph::shared_ptr remaining; HybridGaussianFactorGraph::shared_ptr remaining;
std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full); std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full);
@ -417,19 +416,18 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) {
auto new_fg = makeSwitchingChain(12); auto new_fg = makeSwitchingChain(12);
auto isam = HybridGaussianISAM(*hbt); auto isam = HybridGaussianISAM(*hbt);
{ // Run an ISAM update.
HybridGaussianFactorGraph factorGraph; HybridGaussianFactorGraph factorGraph;
factorGraph.push_back(new_fg->at(new_fg->size() - 2)); factorGraph.push_back(new_fg->at(new_fg->size() - 2));
factorGraph.push_back(new_fg->at(new_fg->size() - 1)); factorGraph.push_back(new_fg->at(new_fg->size() - 1));
isam.update(factorGraph); isam.update(factorGraph);
// std::cout << isam.dot();
// isam.marginalFactor(C(11))->print(); // ISAM should have 12 factors after the last update
} EXPECT_LONGS_EQUAL(12, isam.size());
} }
/* ************************************************************************* */ /* ************************************************************************* */
// TODO(Varun) Actually test something! TEST(HybridGaussianFactorGraph, SwitchingTwoVar) {
TEST_DISABLED(HybridGaussianFactorGraph, SwitchingTwoVar) {
const int N = 7; const int N = 7;
auto hfg = makeSwitchingChain(N, X); auto hfg = makeSwitchingChain(N, X);
hfg->push_back(*makeSwitchingChain(N, Y, D)); hfg->push_back(*makeSwitchingChain(N, Y, D));
@ -449,7 +447,7 @@ TEST_DISABLED(HybridGaussianFactorGraph, SwitchingTwoVar) {
} }
for (size_t i = 1; i <= N - 1; i++) { for (size_t i = 1; i <= N - 1; i++) {
ordX.emplace_back(C(i)); ordX.emplace_back(M(i));
} }
for (size_t i = 1; i <= N - 1; i++) { for (size_t i = 1; i <= N - 1; i++) {
ordX.emplace_back(D(i)); ordX.emplace_back(D(i));
@ -461,8 +459,8 @@ TEST_DISABLED(HybridGaussianFactorGraph, SwitchingTwoVar) {
dw.positionHints['c'] = 0; dw.positionHints['c'] = 0;
dw.positionHints['d'] = 3; dw.positionHints['d'] = 3;
dw.positionHints['y'] = 2; dw.positionHints['y'] = 2;
std::cout << hfg->dot(DefaultKeyFormatter, dw); // std::cout << hfg->dot(DefaultKeyFormatter, dw);
std::cout << "\n"; // std::cout << "\n";
} }
{ {
@ -471,10 +469,10 @@ TEST_DISABLED(HybridGaussianFactorGraph, SwitchingTwoVar) {
// dw.positionHints['c'] = 0; // dw.positionHints['c'] = 0;
// dw.positionHints['d'] = 3; // dw.positionHints['d'] = 3;
dw.positionHints['x'] = 1; dw.positionHints['x'] = 1;
std::cout << "\n"; // std::cout << "\n";
// std::cout << hfg->eliminateSequential(Ordering(ordX)) // std::cout << hfg->eliminateSequential(Ordering(ordX))
// ->dot(DefaultKeyFormatter, dw); // ->dot(DefaultKeyFormatter, dw);
hfg->eliminateMultifrontal(Ordering(ordX))->dot(std::cout); // hfg->eliminateMultifrontal(Ordering(ordX))->dot(std::cout);
} }
Ordering ordering_partial; Ordering ordering_partial;
@ -482,22 +480,22 @@ TEST_DISABLED(HybridGaussianFactorGraph, SwitchingTwoVar) {
ordering_partial.emplace_back(X(i)); ordering_partial.emplace_back(X(i));
ordering_partial.emplace_back(Y(i)); ordering_partial.emplace_back(Y(i));
} }
{
HybridBayesNet::shared_ptr hbn; HybridBayesNet::shared_ptr hbn;
HybridGaussianFactorGraph::shared_ptr remaining; HybridGaussianFactorGraph::shared_ptr remaining;
std::tie(hbn, remaining) = std::tie(hbn, remaining) =
hfg->eliminatePartialSequential(ordering_partial); hfg->eliminatePartialSequential(ordering_partial);
// remaining->print(); EXPECT_LONGS_EQUAL(14, hbn->size());
EXPECT_LONGS_EQUAL(11, remaining->size());
{ {
DotWriter dw; DotWriter dw;
dw.positionHints['x'] = 1; dw.positionHints['x'] = 1;
dw.positionHints['c'] = 0; dw.positionHints['c'] = 0;
dw.positionHints['d'] = 3; dw.positionHints['d'] = 3;
dw.positionHints['y'] = 2; dw.positionHints['y'] = 2;
std::cout << remaining->dot(DefaultKeyFormatter, dw); // std::cout << remaining->dot(DefaultKeyFormatter, dw);
std::cout << "\n"; // std::cout << "\n";
}
} }
} }

View File

@ -74,7 +74,7 @@ TEST(GaussianMixtureFactor, Sum) {
// Check that number of keys is 3 // Check that number of keys is 3
EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size()); EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size());
// Check that number of discrete keys is 1 // TODO(Frank): should not exist? // Check that number of discrete keys is 1
EXPECT_LONGS_EQUAL(1, mixtureFactorA.discreteKeys().size()); EXPECT_LONGS_EQUAL(1, mixtureFactorA.discreteKeys().size());
// Create sum of two mixture factors: it will be a decision tree now on both // Create sum of two mixture factors: it will be a decision tree now on both
@ -104,7 +104,7 @@ TEST(GaussianMixtureFactor, Printing) {
GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
std::string expected = std::string expected =
R"(Hybrid x1 x2; 1 ]{ R"(Hybrid [x1 x2; 1]{
Choice(1) Choice(1)
0 Leaf : 0 Leaf :
A[x1] = [ A[x1] = [

View File

@ -0,0 +1,45 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file testHybridFactorGraph.cpp
* @brief Unit tests for HybridFactorGraph
* @author Varun Agrawal
* @date May 2021
*/
#include <CppUnitLite/Test.h>
#include <CppUnitLite/TestHarness.h>
#include <gtsam/base/TestableAssertions.h>
#include <gtsam/base/utilities.h>
#include <gtsam/hybrid/HybridFactorGraph.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/nonlinear/PriorFactor.h>
using namespace std;
using namespace gtsam;
using noiseModel::Isotropic;
using symbol_shorthand::L;
using symbol_shorthand::M;
using symbol_shorthand::X;
/* ****************************************************************************
* Test that any linearizedFactorGraph gaussian factors are appended to the
* existing gaussian factor graph in the hybrid factor graph.
*/
TEST(HybridFactorGraph, Constructor) {
// Initialize the hybrid factor graph
HybridFactorGraph fg;
}
/* ************************************************************************* */
int main() {
TestResult tr;
return TestRegistry::runAllTests(tr);
}
/* ************************************************************************* */

View File

@ -0,0 +1,649 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file testHybridIncremental.cpp
* @brief Unit tests for incremental inference
* @author Fan Jiang, Varun Agrawal, Frank Dellaert
* @date Jan 2021
*/
#include <gtsam/discrete/DiscreteBayesNet.h>
#include <gtsam/discrete/DiscreteDistribution.h>
#include <gtsam/geometry/Pose2.h>
#include <gtsam/hybrid/HybridGaussianISAM.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/nonlinear/PriorFactor.h>
#include <gtsam/sam/BearingRangeFactor.h>
#include <numeric>
#include "Switching.h"
// Include for test suite
#include <CppUnitLite/TestHarness.h>
using namespace std;
using namespace gtsam;
using noiseModel::Isotropic;
using symbol_shorthand::L;
using symbol_shorthand::M;
using symbol_shorthand::W;
using symbol_shorthand::X;
using symbol_shorthand::Y;
using symbol_shorthand::Z;
/* ****************************************************************************/
// Test if we can perform elimination incrementally.
TEST(HybridGaussianElimination, IncrementalElimination) {
Switching switching(3);
HybridGaussianISAM isam;
HybridGaussianFactorGraph graph1;
// Create initial factor graph
// * * *
// | | |
// *- X1 -*- X2 -*- X3
// \*-M1-*/
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
// Create ordering.
Ordering ordering;
ordering += X(1);
ordering += X(2);
// Run update step
isam.update(graph1);
// Check that after update we have 3 hybrid Bayes net nodes:
// P(X1 | X2, M1) and P(X2, X3 | M1, M2), P(M1, M2)
EXPECT_LONGS_EQUAL(3, isam.size());
EXPECT(isam[X(1)]->conditional()->frontals() == KeyVector{X(1)});
EXPECT(isam[X(1)]->conditional()->parents() == KeyVector({X(2), M(1)}));
EXPECT(isam[X(2)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
EXPECT(isam[X(2)]->conditional()->parents() == KeyVector({M(1), M(2)}));
/********************************************************/
// New factor graph for incremental update.
HybridGaussianFactorGraph graph2;
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2)
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3)
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2)
// Create ordering.
Ordering ordering2;
ordering2 += X(2);
ordering2 += X(3);
isam.update(graph2);
// Check that after the second update we have
// 1 additional hybrid Bayes net node:
// P(X2, X3 | M1, M2)
EXPECT_LONGS_EQUAL(3, isam.size());
EXPECT(isam[X(3)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
EXPECT(isam[X(3)]->conditional()->parents() == KeyVector({M(1), M(2)}));
}
/* ****************************************************************************/
// Test if we can incrementally do the inference
TEST(HybridGaussianElimination, IncrementalInference) {
Switching switching(3);
HybridGaussianISAM isam;
HybridGaussianFactorGraph graph1;
// Create initial factor graph
// * * *
// | | |
// *- X1 -*- X2 -*- X3
// | |
// *-M1 - * - M2
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2)
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
//TODO(Varun) we cannot enforce ordering
// // Create ordering.
// Ordering ordering1;
// ordering1 += X(1);
// ordering1 += X(2);
// Run update step
isam.update(graph1);
/********************************************************/
// New factor graph for incremental update.
HybridGaussianFactorGraph graph2;
graph2.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3)
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2)
//TODO(Varun) we cannot enforce ordering
// // Create ordering.
// Ordering ordering2;
// ordering2 += X(2);
// ordering2 += X(3);
isam.update(graph2);
GTSAM_PRINT(isam);
/********************************************************/
// Run batch elimination so we can compare results.
Ordering ordering;
ordering += X(1);
ordering += X(2);
ordering += X(3);
// Now we calculate the actual factors using full elimination
HybridBayesNet::shared_ptr expectedHybridBayesNet;
HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph;
std::tie(expectedHybridBayesNet, expectedRemainingGraph) =
switching.linearizedFactorGraph.eliminatePartialSequential(ordering);
// The densities on X(1) should be the same
auto x1_conditional =
dynamic_pointer_cast<GaussianMixture>(isam[X(1)]->conditional()->inner());
EXPECT(
assert_equal(*x1_conditional, *(expectedHybridBayesNet->atGaussian(0))));
// The densities on X(2) should be the same
auto x2_conditional =
dynamic_pointer_cast<GaussianMixture>(isam[X(2)]->conditional()->inner());
EXPECT(
assert_equal(*x2_conditional, *(expectedHybridBayesNet->atGaussian(1))));
// // The densities on X(3) should be the same
// auto x3_conditional =
// dynamic_pointer_cast<GaussianMixture>(isam[X(3)]->conditional()->inner());
// EXPECT(
// assert_equal(*x3_conditional, *(expectedHybridBayesNet->atGaussian(2))));
GTSAM_PRINT(*expectedHybridBayesNet);
// we only do the manual continuous elimination for 0,0
// the other discrete probabilities on M(2) are calculated the same way
auto m00_prob = [&]() {
GaussianFactorGraph gf;
// gf.add(switching.linearizedFactorGraph.gaussianGraph().at(3));
DiscreteValues m00;
m00[M(1)] = 0, m00[M(2)] = 0;
// auto dcMixture =
// dynamic_pointer_cast<DCGaussianMixtureFactor>(graph2.dcGraph().at(0));
// gf.add(dcMixture->factors()(m00));
// auto x2_mixed =
// boost::dynamic_pointer_cast<GaussianMixture>(hybridBayesNet.at(1));
// gf.add(x2_mixed->factors()(m00));
auto result_gf = gf.eliminateSequential();
return gf.probPrime(result_gf->optimize());
}();
/// Test if the probability values are as expected with regression tests.
// DiscreteValues assignment;
// EXPECT(assert_equal(m00_prob, 0.60656, 1e-5));
// assignment[M(1)] = 0;
// assignment[M(2)] = 0;
// EXPECT(assert_equal(m00_prob, (*discreteFactor)(assignment), 1e-5));
// assignment[M(1)] = 1;
// assignment[M(2)] = 0;
// EXPECT(assert_equal(0.612477, (*discreteFactor)(assignment), 1e-5));
// assignment[M(1)] = 0;
// assignment[M(2)] = 1;
// EXPECT(assert_equal(0.999952, (*discreteFactor)(assignment), 1e-5));
// assignment[M(1)] = 1;
// assignment[M(2)] = 1;
// EXPECT(assert_equal(1.0, (*discreteFactor)(assignment), 1e-5));
// DiscreteFactorGraph dfg;
// dfg.add(*discreteFactor);
// dfg.add(discreteFactor_m1);
// dfg.add_factors(switching.linearizedFactorGraph.discreteGraph());
// // Check if the chordal graph generated from incremental elimination
// matches
// // that of batch elimination.
// auto chordal = dfg.eliminateSequential();
// auto expectedChordal =
// expectedRemainingGraph->discreteGraph().eliminateSequential();
// EXPECT(assert_equal(*expectedChordal, *chordal, 1e-6));
}
/* ****************************************************************************/
// // Test if we can approximately do the inference
// TEST(DCGaussianElimination, Approx_inference) {
// Switching switching(4);
// IncrementalHybrid incrementalHybrid;
// HybridGaussianFactorGraph graph1;
// // Add the 3 DC factors, x1-x2, x2-x3, x3-x4
// for (size_t i = 0; i < 3; i++) {
// graph1.push_back(switching.linearizedFactorGraph.dcGraph().at(i));
// }
// // Add the Gaussian factors, 1 prior on X(1), 4 measurements
// for (size_t i = 0; i <= 4; i++) {
// graph1.push_back(switching.linearizedFactorGraph.gaussianGraph().at(i));
// }
// // Create ordering.
// Ordering ordering;
// for (size_t j = 1; j <= 4; j++) {
// ordering += X(j);
// }
// // Now we calculate the actual factors using full elimination
// HybridBayesNet::shared_ptr unprunedHybridBayesNet;
// HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph;
// std::tie(unprunedHybridBayesNet, unprunedRemainingGraph) =
// switching.linearizedFactorGraph.eliminatePartialSequential(ordering);
// size_t maxComponents = 5;
// incrementalHybrid.update(graph1, ordering, maxComponents);
// /*
// unpruned factor is:
// Choice(m3)
// 0 Choice(m2)
// 0 0 Choice(m1)
// 0 0 0 Leaf 0.2248 -
// 0 0 1 Leaf 0.3715 -
// 0 1 Choice(m1)
// 0 1 0 Leaf 0.3742 *
// 0 1 1 Leaf 0.6125 *
// 1 Choice(m2)
// 1 0 Choice(m1)
// 1 0 0 Leaf 0.3706 -
// 1 0 1 Leaf 0.6124 *
// 1 1 Choice(m1)
// 1 1 0 Leaf 0.611 *
// 1 1 1 Leaf 1 *
// pruned factors is:
// Choice(m3)
// 0 Choice(m2)
// 0 0 Leaf 0
// 0 1 Choice(m1)
// 0 1 0 Leaf 0.3742
// 0 1 1 Leaf 0.6125
// 1 Choice(m2)
// 1 0 Choice(m1)
// 1 0 0 Leaf 0
// 1 0 1 Leaf 0.6124
// 1 1 Choice(m1)
// 1 1 0 Leaf 0.611
// 1 1 1 Leaf 1
// */
// // Test that the remaining factor graph has one
// // DecisionTreeFactor on {M3, M2, M1}.
// auto remainingFactorGraph = incrementalHybrid.remainingFactorGraph();
// EXPECT_LONGS_EQUAL(1, remainingFactorGraph.size());
// auto discreteFactor_m1 = *dynamic_pointer_cast<DecisionTreeFactor>(
// incrementalHybrid.remainingDiscreteGraph().at(0));
// EXPECT(discreteFactor_m1.keys() == KeyVector({M(3), M(2), M(1)}));
// // Get the number of elements which are equal to 0.
// auto count = [](const double &value, int count) {
// return value > 0 ? count + 1 : count;
// };
// // Check that the number of leaves after pruning is 5.
// EXPECT_LONGS_EQUAL(5, discreteFactor_m1.fold(count, 0));
// /* Expected hybrid Bayes net
// * factor 0: [x1 | x2 m1 ], 2 components
// * factor 1: [x2 | x3 m2 m1 ], 4 components
// * factor 2: [x3 | x4 m3 m2 m1 ], 8 components
// * factor 3: [x4 | m3 m2 m1 ], 8 components
// */
// auto hybridBayesNet = incrementalHybrid.hybridBayesNet();
// // Check if we have a bayes net with 4 hybrid nodes,
// // each with 2, 4, 5 (pruned), and 5 (pruned) leaves respetively.
// EXPECT_LONGS_EQUAL(4, hybridBayesNet.size());
// EXPECT_LONGS_EQUAL(2, hybridBayesNet.atGaussian(0)->nrComponents());
// EXPECT_LONGS_EQUAL(4, hybridBayesNet.atGaussian(1)->nrComponents());
// EXPECT_LONGS_EQUAL(5, hybridBayesNet.atGaussian(2)->nrComponents());
// EXPECT_LONGS_EQUAL(5, hybridBayesNet.atGaussian(3)->nrComponents());
// // Check that the hybrid nodes of the bayes net match those of the bayes
// net
// // before pruning, at the same positions.
// auto &lastDensity = *(hybridBayesNet.atGaussian(3));
// auto &unprunedLastDensity = *(unprunedHybridBayesNet->atGaussian(3));
// std::vector<std::pair<DiscreteValues, double>> assignments =
// discreteFactor_m1.enumerate();
// // Loop over all assignments and check the pruned components
// for (auto &&av : assignments) {
// const DiscreteValues &assignment = av.first;
// const double value = av.second;
// if (value == 0.0) {
// EXPECT(lastDensity(assignment) == nullptr);
// } else {
// CHECK(lastDensity(assignment));
// EXPECT(assert_equal(*unprunedLastDensity(assignment),
// *lastDensity(assignment)));
// }
// }
// }
/* ****************************************************************************/
// // Test approximate inference with an additional pruning step.
// TEST(DCGaussianElimination, Incremental_approximate) {
// Switching switching(5);
// IncrementalHybrid incrementalHybrid;
// HybridGaussianFactorGraph graph1;
// // Add the 3 DC factors, x1-x2, x2-x3, x3-x4
// for (size_t i = 0; i < 3; i++) {
// graph1.push_back(switching.linearizedFactorGraph.dcGraph().at(i));
// }
// // Add the Gaussian factors, 1 prior on X(1), 4 measurements
// for (size_t i = 0; i <= 4; i++) {
// graph1.push_back(switching.linearizedFactorGraph.gaussianGraph().at(i));
// }
// // Create ordering.
// Ordering ordering;
// for (size_t j = 1; j <= 4; j++) {
// ordering += X(j);
// }
// // Run update with pruning
// size_t maxComponents = 5;
// incrementalHybrid.update(graph1, ordering, maxComponents);
// // Check if we have a bayes net with 4 hybrid nodes,
// // each with 2, 4, 8, and 5 (pruned) leaves respetively.
// auto actualBayesNet1 = incrementalHybrid.hybridBayesNet();
// CHECK_EQUAL(4, actualBayesNet1.size());
// EXPECT_LONGS_EQUAL(2, actualBayesNet1.atGaussian(0)->nrComponents());
// EXPECT_LONGS_EQUAL(4, actualBayesNet1.atGaussian(1)->nrComponents());
// EXPECT_LONGS_EQUAL(8, actualBayesNet1.atGaussian(2)->nrComponents());
// EXPECT_LONGS_EQUAL(5, actualBayesNet1.atGaussian(3)->nrComponents());
// /***** Run Round 2 *****/
// HybridGaussianFactorGraph graph2;
// graph2.push_back(switching.linearizedFactorGraph.dcGraph().at(3));
// graph2.push_back(switching.linearizedFactorGraph.gaussianGraph().at(5));
// Ordering ordering2;
// ordering2 += X(4);
// ordering2 += X(5);
// // Run update with pruning a second time.
// incrementalHybrid.update(graph2, ordering2, maxComponents);
// // Check if we have a bayes net with 2 hybrid nodes,
// // each with 10 (pruned), and 5 (pruned) leaves respetively.
// auto actualBayesNet = incrementalHybrid.hybridBayesNet();
// CHECK_EQUAL(2, actualBayesNet.size());
// EXPECT_LONGS_EQUAL(10, actualBayesNet.atGaussian(0)->nrComponents());
// EXPECT_LONGS_EQUAL(5, actualBayesNet.atGaussian(1)->nrComponents());
// }
/* ************************************************************************/
// // Test for figuring out the optimal ordering to ensure we get
// // a discrete graph after elimination.
// TEST(IncrementalHybrid, NonTrivial) {
// // This is a GTSAM-only test for running inference on a single legged
// robot.
// // The leg links are represented by the chain X-Y-Z-W, where X is the base
// and
// // W is the foot.
// // We use BetweenFactor<Pose2> as constraints between each of the poses.
// /*************** Run Round 1 ***************/
// NonlinearHybridFactorGraph fg;
// // Add a prior on pose x1 at the origin.
// // A prior factor consists of a mean and
// // a noise model (covariance matrix)
// Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
// auto priorNoise = noiseModel::Diagonal::Sigmas(
// Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta
// fg.emplace_nonlinear<PriorFactor<Pose2>>(X(0), prior, priorNoise);
// // create a noise model for the landmark measurements
// auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1);
// // We model a robot's single leg as X - Y - Z - W
// // where X is the base link and W is the foot link.
// // Add connecting poses similar to PoseFactors in GTD
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(0), Y(0), Pose2(0, 1.0, 0),
// poseNoise);
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(0), Z(0), Pose2(0, 1.0, 0),
// poseNoise);
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(0), W(0), Pose2(0, 1.0, 0),
// poseNoise);
// // Create initial estimate
// Values initial;
// initial.insert(X(0), Pose2(0.0, 0.0, 0.0));
// initial.insert(Y(0), Pose2(0.0, 1.0, 0.0));
// initial.insert(Z(0), Pose2(0.0, 2.0, 0.0));
// initial.insert(W(0), Pose2(0.0, 3.0, 0.0));
// HybridGaussianFactorGraph gfg = fg.linearize(initial);
// fg = NonlinearHybridFactorGraph();
// IncrementalHybrid inc;
// // Regular ordering going up the chain.
// Ordering ordering;
// ordering += W(0);
// ordering += Z(0);
// ordering += Y(0);
// ordering += X(0);
// // Update without pruning
// // The result is a HybridBayesNet with no discrete variables
// // (equivalent to a GaussianBayesNet).
// // Factorization is:
// // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)`
// inc.update(gfg, ordering);
// /*************** Run Round 2 ***************/
// using PlanarMotionModel = BetweenFactor<Pose2>;
// // Add odometry factor with discrete modes.
// Pose2 odometry(1.0, 0.0, 0.0);
// KeyVector contKeys = {W(0), W(1)};
// auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0);
// auto still = boost::make_shared<PlanarMotionModel>(W(0), W(1), Pose2(0, 0,
// 0),
// noise_model),
// moving = boost::make_shared<PlanarMotionModel>(W(0), W(1), odometry,
// noise_model);
// std::vector<PlanarMotionModel::shared_ptr> components = {moving, still};
// auto dcFactor = boost::make_shared<DCMixtureFactor<PlanarMotionModel>>(
// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
// fg.push_back(dcFactor);
// // Add equivalent of ImuFactor
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(0), X(1), Pose2(1.0, 0.0, 0),
// poseNoise);
// // PoseFactors-like at k=1
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(1), Y(1), Pose2(0, 1, 0),
// poseNoise);
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(1), Z(1), Pose2(0, 1, 0),
// poseNoise);
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(1), W(1), Pose2(-1, 1, 0),
// poseNoise);
// initial.insert(X(1), Pose2(1.0, 0.0, 0.0));
// initial.insert(Y(1), Pose2(1.0, 1.0, 0.0));
// initial.insert(Z(1), Pose2(1.0, 2.0, 0.0));
// // The leg link did not move so we set the expected pose accordingly.
// initial.insert(W(1), Pose2(0.0, 3.0, 0.0));
// // Ordering for k=1.
// // This ordering follows the intuition that we eliminate the previous
// // timestep, and then the current timestep.
// ordering = Ordering();
// ordering += W(0);
// ordering += Z(0);
// ordering += Y(0);
// ordering += X(0);
// ordering += W(1);
// ordering += Z(1);
// ordering += Y(1);
// ordering += X(1);
// gfg = fg.linearize(initial);
// fg = NonlinearHybridFactorGraph();
// // Update without pruning
// // The result is a HybridBayesNet with 1 discrete variable M(1).
// // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1)
// // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1,
// M1)
// // P(Y1 | X1, M1)P(X1 | M1)P(M1)
// // The MHS tree is a 2 level tree for time indices (0, 1) with 2 leaves.
// inc.update(gfg, ordering);
// /*************** Run Round 3 ***************/
// // Add odometry factor with discrete modes.
// contKeys = {W(1), W(2)};
// still = boost::make_shared<PlanarMotionModel>(W(1), W(2), Pose2(0, 0, 0),
// noise_model);
// moving =
// boost::make_shared<PlanarMotionModel>(W(1), W(2), odometry,
// noise_model);
// components = {moving, still};
// dcFactor = boost::make_shared<DCMixtureFactor<PlanarMotionModel>>(
// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
// fg.push_back(dcFactor);
// // Add equivalent of ImuFactor
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(1), X(2), Pose2(1.0, 0.0, 0),
// poseNoise);
// // PoseFactors-like at k=1
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(2), Y(2), Pose2(0, 1, 0),
// poseNoise);
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(2), Z(2), Pose2(0, 1, 0),
// poseNoise);
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(2), W(2), Pose2(-2, 1, 0),
// poseNoise);
// initial.insert(X(2), Pose2(2.0, 0.0, 0.0));
// initial.insert(Y(2), Pose2(2.0, 1.0, 0.0));
// initial.insert(Z(2), Pose2(2.0, 2.0, 0.0));
// initial.insert(W(2), Pose2(0.0, 3.0, 0.0));
// // Ordering at k=2. Same intuition as before.
// ordering = Ordering();
// ordering += W(1);
// ordering += Z(1);
// ordering += Y(1);
// ordering += X(1);
// ordering += W(2);
// ordering += Z(2);
// ordering += Y(2);
// ordering += X(2);
// gfg = fg.linearize(initial);
// fg = NonlinearHybridFactorGraph();
// // Now we prune!
// // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1)
// P(X0 | X1, W1, M1)
// // P(W1|W2, Z1, X1, M1, M2) P(Z1| W2, Y1, X1, M1, M2)
// P(Y1 | W2, X1, M1, M2)
// // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2)
// P(Z2|Y2, X2, M1, M2)
// // P(Y2 | X2, M1, M2)P(X2 | M1, M2) P(M1, M2)
// // The MHS at this point should be a 3 level tree on (0, 1, 2).
// // 0 has 2 choices, 1 has 4 choices and 2 has 4 choices.
// inc.update(gfg, ordering, 2);
// /*************** Run Round 4 ***************/
// // Add odometry factor with discrete modes.
// contKeys = {W(2), W(3)};
// still = boost::make_shared<PlanarMotionModel>(W(2), W(3), Pose2(0, 0, 0),
// noise_model);
// moving =
// boost::make_shared<PlanarMotionModel>(W(2), W(3), odometry,
// noise_model);
// components = {moving, still};
// dcFactor = boost::make_shared<DCMixtureFactor<PlanarMotionModel>>(
// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
// fg.push_back(dcFactor);
// // Add equivalent of ImuFactor
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(2), X(3), Pose2(1.0, 0.0, 0),
// poseNoise);
// // PoseFactors-like at k=3
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(X(3), Y(3), Pose2(0, 1, 0),
// poseNoise);
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Y(3), Z(3), Pose2(0, 1, 0),
// poseNoise);
// fg.emplace_nonlinear<BetweenFactor<Pose2>>(Z(3), W(3), Pose2(-3, 1, 0),
// poseNoise);
// initial.insert(X(3), Pose2(3.0, 0.0, 0.0));
// initial.insert(Y(3), Pose2(3.0, 1.0, 0.0));
// initial.insert(Z(3), Pose2(3.0, 2.0, 0.0));
// initial.insert(W(3), Pose2(0.0, 3.0, 0.0));
// // Ordering at k=3. Same intuition as before.
// ordering = Ordering();
// ordering += W(2);
// ordering += Z(2);
// ordering += Y(2);
// ordering += X(2);
// ordering += W(3);
// ordering += Z(3);
// ordering += Y(3);
// ordering += X(3);
// gfg = fg.linearize(initial);
// fg = NonlinearHybridFactorGraph();
// // Keep pruning!
// inc.update(gfg, ordering, 3);
// // The final discrete graph should not be empty since we have eliminated
// // all continuous variables.
// EXPECT(!inc.remainingDiscreteGraph().empty());
// // Test if the optimal discrete mode assignment is (1, 1, 1).
// DiscreteValues optimal_assignment =
// inc.remainingDiscreteGraph().optimize(); DiscreteValues
// expected_assignment; expected_assignment[M(1)] = 1;
// expected_assignment[M(2)] = 1;
// expected_assignment[M(3)] = 1;
// EXPECT(assert_equal(expected_assignment, optimal_assignment));
// // Test if pruning worked correctly by checking that we only have 3 leaves
// in
// // the last node.
// auto lastConditional = boost::dynamic_pointer_cast<GaussianMixture>(
// inc.hybridBayesNet().at(inc.hybridBayesNet().size() - 1));
// EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents());
// }
/* ************************************************************************* */
int main() {
TestResult tr;
return TestRegistry::runAllTests(tr);
}

View File

@ -0,0 +1,761 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file testHybridNonlinearFactorGraph.cpp
* @brief Unit tests for HybridNonlinearFactorGraph
* @author Varun Agrawal
* @author Fan Jiang
* @author Frank Dellaert
* @date December 2021
*/
#include <gtsam/base/TestableAssertions.h>
#include <gtsam/base/utilities.h>
#include <gtsam/discrete/DiscreteBayesNet.h>
#include <gtsam/discrete/DiscreteDistribution.h>
#include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/geometry/Pose2.h>
#include <gtsam/hybrid/HybridEliminationTree.h>
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/PriorFactor.h>
#include <gtsam/sam/BearingRangeFactor.h>
#include <gtsam/slam/BetweenFactor.h>
#include <numeric>
#include "Switching.h"
// Include for test suite
#include <CppUnitLite/TestHarness.h>
using namespace std;
using namespace gtsam;
using noiseModel::Isotropic;
using symbol_shorthand::L;
using symbol_shorthand::M;
using symbol_shorthand::X;
/* ****************************************************************************
* Test that any linearizedFactorGraph gaussian factors are appended to the
* existing gaussian factor graph in the hybrid factor graph.
*/
TEST(HybridFactorGraph, GaussianFactorGraph) {
HybridNonlinearFactorGraph fg;
// Add a simple prior factor to the nonlinear factor graph
fg.emplace_nonlinear<PriorFactor<double>>(X(0), 0, Isotropic::Sigma(1, 0.1));
// Linearization point
Values linearizationPoint;
linearizationPoint.insert<double>(X(0), 0);
HybridGaussianFactorGraph ghfg = fg.linearize(linearizationPoint);
// Add a factor to the GaussianFactorGraph
ghfg.add(JacobianFactor(X(0), I_1x1, Vector1(5)));
EXPECT_LONGS_EQUAL(2, ghfg.size());
}
/***************************************************************************
* Test equality for Hybrid Nonlinear Factor Graph.
*/
TEST(HybridNonlinearFactorGraph, Equals) {
HybridNonlinearFactorGraph graph1;
HybridNonlinearFactorGraph graph2;
// Test empty factor graphs
EXPECT(assert_equal(graph1, graph2));
auto f0 = boost::make_shared<PriorFactor<Pose2>>(
1, Pose2(), noiseModel::Isotropic::Sigma(3, 0.001));
graph1.push_back(f0);
graph2.push_back(f0);
auto f1 = boost::make_shared<BetweenFactor<Pose2>>(
1, 2, Pose2(), noiseModel::Isotropic::Sigma(3, 0.1));
graph1.push_back(f1);
graph2.push_back(f1);
// Test non-empty graphs
EXPECT(assert_equal(graph1, graph2));
}
/***************************************************************************
* Test that the resize method works correctly for a HybridNonlinearFactorGraph.
*/
TEST(HybridNonlinearFactorGraph, Resize) {
HybridNonlinearFactorGraph fg;
auto nonlinearFactor = boost::make_shared<BetweenFactor<double>>();
fg.push_back(nonlinearFactor);
auto discreteFactor = boost::make_shared<DecisionTreeFactor>();
fg.push_back(discreteFactor);
auto dcFactor = boost::make_shared<MixtureFactor>();
fg.push_back(dcFactor);
EXPECT_LONGS_EQUAL(fg.size(), 3);
fg.resize(0);
EXPECT_LONGS_EQUAL(fg.size(), 0);
}
/***************************************************************************
* Test that the resize method works correctly for a
* HybridGaussianFactorGraph.
*/
TEST(HybridGaussianFactorGraph, Resize) {
HybridNonlinearFactorGraph nhfg;
auto nonlinearFactor = boost::make_shared<BetweenFactor<double>>(
X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1));
nhfg.push_back(nonlinearFactor);
auto discreteFactor = boost::make_shared<DecisionTreeFactor>();
nhfg.push_back(discreteFactor);
KeyVector contKeys = {X(0), X(1)};
auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0);
auto still = boost::make_shared<MotionModel>(X(0), X(1), 0.0, noise_model),
moving = boost::make_shared<MotionModel>(X(0), X(1), 1.0, noise_model);
std::vector<MotionModel::shared_ptr> components = {still, moving};
auto dcFactor = boost::make_shared<MixtureFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
nhfg.push_back(dcFactor);
Values linearizationPoint;
linearizationPoint.insert<double>(X(0), 0);
linearizationPoint.insert<double>(X(1), 1);
// Generate `HybridGaussianFactorGraph` by linearizing
HybridGaussianFactorGraph gfg = nhfg.linearize(linearizationPoint);
EXPECT_LONGS_EQUAL(gfg.size(), 3);
gfg.resize(0);
EXPECT_LONGS_EQUAL(gfg.size(), 0);
}
/*****************************************************************************
* Test push_back on HFG makes the correct distinction.
*/
TEST(HybridFactorGraph, PushBack) {
HybridNonlinearFactorGraph fg;
auto nonlinearFactor = boost::make_shared<BetweenFactor<double>>();
fg.push_back(nonlinearFactor);
EXPECT_LONGS_EQUAL(fg.size(), 1);
fg = HybridNonlinearFactorGraph();
auto discreteFactor = boost::make_shared<DecisionTreeFactor>();
fg.push_back(discreteFactor);
EXPECT_LONGS_EQUAL(fg.size(), 1);
fg = HybridNonlinearFactorGraph();
auto dcFactor = boost::make_shared<MixtureFactor>();
fg.push_back(dcFactor);
EXPECT_LONGS_EQUAL(fg.size(), 1);
// Now do the same with HybridGaussianFactorGraph
HybridGaussianFactorGraph ghfg;
auto gaussianFactor = boost::make_shared<JacobianFactor>();
ghfg.push_back(gaussianFactor);
EXPECT_LONGS_EQUAL(ghfg.size(), 1);
ghfg = HybridGaussianFactorGraph();
ghfg.push_back(discreteFactor);
EXPECT_LONGS_EQUAL(ghfg.size(), 1);
ghfg = HybridGaussianFactorGraph();
ghfg.push_back(dcFactor);
EXPECT_LONGS_EQUAL(ghfg.size(), 1);
}
/****************************************************************************
* Test construction of switching-like hybrid factor graph.
*/
TEST(HybridFactorGraph, Switching) {
Switching self(3);
EXPECT_LONGS_EQUAL(7, self.nonlinearFactorGraph.size());
EXPECT_LONGS_EQUAL(7, self.linearizedFactorGraph.size());
}
/****************************************************************************
* Test linearization on a switching-like hybrid factor graph.
*/
TEST(HybridFactorGraph, Linearization) {
Switching self(3);
// Linearize here:
HybridGaussianFactorGraph actualLinearized =
self.nonlinearFactorGraph.linearize(self.linearizationPoint);
EXPECT_LONGS_EQUAL(7, actualLinearized.size());
}
/****************************************************************************
* Test elimination tree construction
*/
TEST(HybridFactorGraph, EliminationTree) {
Switching self(3);
// Create ordering.
Ordering ordering;
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
// Create elimination tree.
HybridEliminationTree etree(self.linearizedFactorGraph, ordering);
EXPECT_LONGS_EQUAL(1, etree.roots().size())
}
/****************************************************************************
*Test elimination function by eliminating x1 in *-x1-*-x2 graph.
*/
TEST(GaussianElimination, Eliminate_x1) {
Switching self(3);
// Gather factors on x1, has a simple Gaussian and a mixture factor.
HybridGaussianFactorGraph factors;
// Add gaussian prior
factors.push_back(self.linearizedFactorGraph[0]);
// Add first hybrid factor
factors.push_back(self.linearizedFactorGraph[1]);
// TODO(Varun) remove this block since sum is no longer exposed.
// // Check that sum works:
// auto sum = factors.sum();
// Assignment<Key> mode;
// mode[M(1)] = 1;
// auto actual = sum(mode); // Selects one of 2 modes.
// EXPECT_LONGS_EQUAL(2, actual.size()); // Prior and motion model.
// Eliminate x1
Ordering ordering;
ordering += X(1);
auto result = EliminateHybrid(factors, ordering);
CHECK(result.first);
EXPECT_LONGS_EQUAL(1, result.first->nrFrontals());
CHECK(result.second);
// Has two keys, x2 and m1
EXPECT_LONGS_EQUAL(2, result.second->size());
}
/****************************************************************************
* Test elimination function by eliminating x2 in x1-*-x2-*-x3 chain.
* m1/ \m2
*/
TEST(HybridsGaussianElimination, Eliminate_x2) {
Switching self(3);
// Gather factors on x2, will be two mixture factors (with x1 and x3, resp.).
HybridGaussianFactorGraph factors;
factors.push_back(self.linearizedFactorGraph[1]); // involves m1
factors.push_back(self.linearizedFactorGraph[2]); // involves m2
// TODO(Varun) remove this block since sum is no longer exposed.
// // Check that sum works:
// auto sum = factors.sum();
// Assignment<Key> mode;
// mode[M(1)] = 0;
// mode[M(2)] = 1;
// auto actual = sum(mode); // Selects one of 4 mode
// combinations. EXPECT_LONGS_EQUAL(2, actual.size()); // 2 motion models.
// Eliminate x2
Ordering ordering;
ordering += X(2);
std::pair<HybridConditional::shared_ptr, HybridFactor::shared_ptr> result =
EliminateHybrid(factors, ordering);
CHECK(result.first);
EXPECT_LONGS_EQUAL(1, result.first->nrFrontals());
CHECK(result.second);
// Note: separator keys should include m1, m2.
EXPECT_LONGS_EQUAL(4, result.second->size());
}
/****************************************************************************
* Helper method to generate gaussian factor graphs with a specific mode.
*/
GaussianFactorGraph::shared_ptr batchGFG(double between,
Values linearizationPoint) {
NonlinearFactorGraph graph;
graph.addPrior<double>(X(1), 0, Isotropic::Sigma(1, 0.1));
auto between_x1_x2 = boost::make_shared<MotionModel>(
X(1), X(2), between, Isotropic::Sigma(1, 1.0));
graph.push_back(between_x1_x2);
return graph.linearize(linearizationPoint);
}
/****************************************************************************
* Test elimination function by eliminating x1 and x2 in graph.
*/
TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
Switching self(2, 1.0, 0.1);
auto factors = self.linearizedFactorGraph;
// Eliminate x1
Ordering ordering;
ordering += X(1);
ordering += X(2);
HybridConditional::shared_ptr hybridConditionalMixture;
HybridFactor::shared_ptr factorOnModes;
std::tie(hybridConditionalMixture, factorOnModes) =
EliminateHybrid(factors, ordering);
auto gaussianConditionalMixture =
dynamic_pointer_cast<GaussianMixture>(hybridConditionalMixture->inner());
CHECK(gaussianConditionalMixture);
// Frontals = [x1, x2]
EXPECT_LONGS_EQUAL(2, gaussianConditionalMixture->nrFrontals());
// 1 parent, which is the mode
EXPECT_LONGS_EQUAL(1, gaussianConditionalMixture->nrParents());
// This is now a HybridDiscreteFactor
auto hybridDiscreteFactor =
dynamic_pointer_cast<HybridDiscreteFactor>(factorOnModes);
// Access the type-erased inner object and convert to DecisionTreeFactor
auto discreteFactor =
dynamic_pointer_cast<DecisionTreeFactor>(hybridDiscreteFactor->inner());
CHECK(discreteFactor);
EXPECT_LONGS_EQUAL(1, discreteFactor->discreteKeys().size());
EXPECT(discreteFactor->root_->isLeaf() == false);
}
// /*
// ****************************************************************************/
// /// Test the toDecisionTreeFactor method
// TEST(HybridFactorGraph, ToDecisionTreeFactor) {
// size_t K = 3;
// // Provide tight sigma values so that the errors are visibly different.
// double between_sigma = 5e-8, prior_sigma = 1e-7;
// Switching self(K, between_sigma, prior_sigma);
// // Clear out discrete factors since sum() cannot hanldle those
// HybridGaussianFactorGraph linearizedFactorGraph(
// self.linearizedFactorGraph.gaussianGraph(), DiscreteFactorGraph(),
// self.linearizedFactorGraph.dcGraph());
// auto decisionTreeFactor = linearizedFactorGraph.toDecisionTreeFactor();
// auto allAssignments =
// DiscreteValues::CartesianProduct(linearizedFactorGraph.discreteKeys());
// // Get the error of the discrete assignment m1=0, m2=1.
// double actual = (*decisionTreeFactor)(allAssignments[1]);
// /********************************************/
// // Create equivalent factor graph for m1=0, m2=1
// GaussianFactorGraph graph = linearizedFactorGraph.gaussianGraph();
// for (auto &p : linearizedFactorGraph.dcGraph()) {
// if (auto mixture =
// boost::dynamic_pointer_cast<DCGaussianMixtureFactor>(p)) {
// graph.add((*mixture)(allAssignments[1]));
// }
// }
// VectorValues values = graph.optimize();
// double expected = graph.probPrime(values);
// /********************************************/
// EXPECT_DOUBLES_EQUAL(expected, actual, 1e-12);
// // REGRESSION:
// EXPECT_DOUBLES_EQUAL(0.6125, actual, 1e-4);
// }
/****************************************************************************
* Test partial elimination
*/
TEST(HybridFactorGraph, Partial_Elimination) {
Switching self(3);
auto linearizedFactorGraph = self.linearizedFactorGraph;
// Create ordering.
Ordering ordering;
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
// Eliminate partially.
HybridBayesNet::shared_ptr hybridBayesNet;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
std::tie(hybridBayesNet, remainingFactorGraph) =
linearizedFactorGraph.eliminatePartialSequential(ordering);
CHECK(hybridBayesNet);
// GTSAM_PRINT(*hybridBayesNet); // HybridBayesNet
EXPECT_LONGS_EQUAL(3, hybridBayesNet->size());
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)});
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)}));
EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)});
EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)}));
EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)});
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)}));
CHECK(remainingFactorGraph);
// GTSAM_PRINT(*remainingFactorGraph); // HybridFactorGraph
EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size());
EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(1)}));
EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(2), M(1)}));
EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(1), M(2)}));
}
/****************************************************************************
* Test full elimination
*/
TEST(HybridFactorGraph, Full_Elimination) {
Switching self(3);
auto linearizedFactorGraph = self.linearizedFactorGraph;
// We first do a partial elimination
HybridBayesNet::shared_ptr hybridBayesNet_partial;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph_partial;
DiscreteBayesNet discreteBayesNet;
{
// Create ordering.
Ordering ordering;
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
// Eliminate partially.
std::tie(hybridBayesNet_partial, remainingFactorGraph_partial) =
linearizedFactorGraph.eliminatePartialSequential(ordering);
DiscreteFactorGraph discrete_fg;
// TODO(Varun) Make this a function of HybridGaussianFactorGraph?
for (HybridFactor::shared_ptr& factor : (*remainingFactorGraph_partial)) {
auto df = dynamic_pointer_cast<HybridDiscreteFactor>(factor);
discrete_fg.push_back(df->inner());
}
ordering.clear();
for (size_t k = 1; k < self.K; k++) ordering += M(k);
discreteBayesNet =
*discrete_fg.eliminateSequential(ordering, EliminateForMPE);
}
// Create ordering.
Ordering ordering;
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
for (size_t k = 1; k < self.K; k++) ordering += M(k);
// Eliminate partially.
HybridBayesNet::shared_ptr hybridBayesNet =
linearizedFactorGraph.eliminateSequential(ordering);
CHECK(hybridBayesNet);
EXPECT_LONGS_EQUAL(5, hybridBayesNet->size());
// p(x1 | x2, m1)
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)});
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)}));
// p(x2 | x3, m1, m2)
EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)});
EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)}));
// p(x3 | m1, m2)
EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)});
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)}));
// P(m1 | m2)
EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(1)});
EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(2)}));
EXPECT(
dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(3)->inner())
->equals(*discreteBayesNet.at(0)));
// P(m2)
EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(2)});
EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents());
EXPECT(
dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(4)->inner())
->equals(*discreteBayesNet.at(1)));
}
/****************************************************************************
* Test printing
*/
TEST(HybridFactorGraph, Printing) {
Switching self(3);
auto linearizedFactorGraph = self.linearizedFactorGraph;
// Create ordering.
Ordering ordering;
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
// Eliminate partially.
HybridBayesNet::shared_ptr hybridBayesNet;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
std::tie(hybridBayesNet, remainingFactorGraph) =
linearizedFactorGraph.eliminatePartialSequential(ordering);
string expected_hybridFactorGraph = R"(
size: 7
factor 0: Continuous [x1]
A[x1] = [
10
]
b = [ -10 ]
No noise model
factor 1: Hybrid [x1 x2; m1]{
Choice(m1)
0 Leaf :
A[x1] = [
-1
]
A[x2] = [
1
]
b = [ -1 ]
No noise model
1 Leaf :
A[x1] = [
-1
]
A[x2] = [
1
]
b = [ -0 ]
No noise model
}
factor 2: Hybrid [x2 x3; m2]{
Choice(m2)
0 Leaf :
A[x2] = [
-1
]
A[x3] = [
1
]
b = [ -1 ]
No noise model
1 Leaf :
A[x2] = [
-1
]
A[x3] = [
1
]
b = [ -0 ]
No noise model
}
factor 3: Continuous [x2]
A[x2] = [
10
]
b = [ -10 ]
No noise model
factor 4: Continuous [x3]
A[x3] = [
10
]
b = [ -10 ]
No noise model
factor 5: Discrete [m1]
P( m1 ):
Leaf 0.5
factor 6: Discrete [m2 m1]
P( m2 | m1 ):
Choice(m2)
0 Choice(m1)
0 0 Leaf 0.33333333
0 1 Leaf 0.6
1 Choice(m1)
1 0 Leaf 0.66666667
1 1 Leaf 0.4
)";
EXPECT(assert_print_equal(expected_hybridFactorGraph, linearizedFactorGraph));
// Expected output for hybridBayesNet.
string expected_hybridBayesNet = R"(
size: 3
factor 0: Hybrid P( x1 | x2 m1)
Discrete Keys = (m1, 2),
Choice(m1)
0 Leaf p(x1 | x2)
R = [ 10.0499 ]
S[x2] = [ -0.0995037 ]
d = [ -9.85087 ]
No noise model
1 Leaf p(x1 | x2)
R = [ 10.0499 ]
S[x2] = [ -0.0995037 ]
d = [ -9.95037 ]
No noise model
factor 1: Hybrid P( x2 | x3 m1 m2)
Discrete Keys = (m1, 2), (m2, 2),
Choice(m2)
0 Choice(m1)
0 0 Leaf p(x2 | x3)
R = [ 10.099 ]
S[x3] = [ -0.0990196 ]
d = [ -9.99901 ]
No noise model
0 1 Leaf p(x2 | x3)
R = [ 10.099 ]
S[x3] = [ -0.0990196 ]
d = [ -9.90098 ]
No noise model
1 Choice(m1)
1 0 Leaf p(x2 | x3)
R = [ 10.099 ]
S[x3] = [ -0.0990196 ]
d = [ -10.098 ]
No noise model
1 1 Leaf p(x2 | x3)
R = [ 10.099 ]
S[x3] = [ -0.0990196 ]
d = [ -10 ]
No noise model
factor 2: Hybrid P( x3 | m1 m2)
Discrete Keys = (m1, 2), (m2, 2),
Choice(m2)
0 Choice(m1)
0 0 Leaf p(x3)
R = [ 10.0494 ]
d = [ -10.1489 ]
No noise model
0 1 Leaf p(x3)
R = [ 10.0494 ]
d = [ -10.1479 ]
No noise model
1 Choice(m1)
1 0 Leaf p(x3)
R = [ 10.0494 ]
d = [ -10.0504 ]
No noise model
1 1 Leaf p(x3)
R = [ 10.0494 ]
d = [ -10.0494 ]
No noise model
)";
EXPECT(assert_print_equal(expected_hybridBayesNet, *hybridBayesNet));
}
/****************************************************************************
* Simple PlanarSLAM example test with 2 poses and 2 landmarks (each pose
* connects to 1 landmark) to expose issue with default decision tree creation
* in hybrid elimination. The hybrid factor is between the poses X0 and X1.
* The issue arises if we eliminate a landmark variable first since it is not
* connected to a HybridFactor.
*/
TEST(HybridFactorGraph, DefaultDecisionTree) {
HybridNonlinearFactorGraph fg;
// Add a prior on pose x1 at the origin.
// A prior factor consists of a mean and a noise model (covariance matrix)
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
auto priorNoise = noiseModel::Diagonal::Sigmas(
Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta
fg.emplace_nonlinear<PriorFactor<Pose2>>(X(0), prior, priorNoise);
using PlanarMotionModel = BetweenFactor<Pose2>;
// Add odometry factor
Pose2 odometry(2.0, 0.0, 0.0);
KeyVector contKeys = {X(0), X(1)};
auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0);
auto still = boost::make_shared<PlanarMotionModel>(X(0), X(1), Pose2(0, 0, 0),
noise_model),
moving = boost::make_shared<PlanarMotionModel>(X(0), X(1), odometry,
noise_model);
std::vector<PlanarMotionModel::shared_ptr> motion_models = {still, moving};
// TODO(Varun) Make a templated constructor for MixtureFactor which does this?
std::vector<NonlinearFactor::shared_ptr> components;
for (auto&& f : motion_models) {
components.push_back(boost::dynamic_pointer_cast<NonlinearFactor>(f));
}
fg.emplace_hybrid<MixtureFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
// Add Range-Bearing measurements to from X0 to L0 and X1 to L1.
// create a noise model for the landmark measurements
auto measurementNoise = noiseModel::Diagonal::Sigmas(
Vector2(0.1, 0.2)); // 0.1 rad std on bearing, 20cm on range
// create the measurement values - indices are (pose id, landmark id)
Rot2 bearing11 = Rot2::fromDegrees(45), bearing22 = Rot2::fromDegrees(90);
double range11 = std::sqrt(4.0 + 4.0), range22 = 2.0;
// Add Bearing-Range factors
fg.emplace_nonlinear<BearingRangeFactor<Pose2, Point2>>(
X(0), L(0), bearing11, range11, measurementNoise);
fg.emplace_nonlinear<BearingRangeFactor<Pose2, Point2>>(
X(1), L(1), bearing22, range22, measurementNoise);
// Create (deliberately inaccurate) initial estimate
Values initialEstimate;
initialEstimate.insert(X(0), Pose2(0.5, 0.0, 0.2));
initialEstimate.insert(X(1), Pose2(2.3, 0.1, -0.2));
initialEstimate.insert(L(0), Point2(1.8, 2.1));
initialEstimate.insert(L(1), Point2(4.1, 1.8));
// We want to eliminate variables not connected to DCFactors first.
Ordering ordering;
ordering += L(0);
ordering += L(1);
ordering += X(0);
ordering += X(1);
HybridGaussianFactorGraph linearized = fg.linearize(initialEstimate);
gtsam::HybridBayesNet::shared_ptr hybridBayesNet;
gtsam::HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
// This should NOT fail
std::tie(hybridBayesNet, remainingFactorGraph) =
linearized.eliminatePartialSequential(ordering);
EXPECT_LONGS_EQUAL(4, hybridBayesNet->size());
EXPECT_LONGS_EQUAL(1, remainingFactorGraph->size());
}
/* ************************************************************************* */
int main() {
TestResult tr;
return TestRegistry::runAllTests(tr);
}
/* ************************************************************************* */