Merge branch 'hybrid-error-scalars' into direct-hybrid-fg

release/4.3a0
Varun Agrawal 2024-09-15 11:19:10 -04:00
commit 506cda86b3
51 changed files with 1660 additions and 767 deletions

View File

@ -191,17 +191,17 @@ E_{gc}(x,y)=\frac{1}{2}\|Rx+Sy-d\|_{\Sigma}^{2}.\label{eq:gc_error}
\end_layout \end_layout
\begin_layout Subsubsection* \begin_layout Subsubsection*
GaussianMixture HybridGaussianConditional
\end_layout \end_layout
\begin_layout Standard \begin_layout Standard
A A
\emph on \emph on
GaussianMixture HybridGaussianConditional
\emph default \emph default
(maybe to be renamed to (maybe to be renamed to
\emph on \emph on
GaussianMixtureComponent HybridGaussianConditionalComponent
\emph default \emph default
) just indexes into a number of ) just indexes into a number of
\emph on \emph on
@ -233,7 +233,7 @@ GaussianConditional
to a set of discrete variables. to a set of discrete variables.
As As
\emph on \emph on
GaussianMixture HybridGaussianConditional
\emph default \emph default
is a is a
\emph on \emph on
@ -324,7 +324,7 @@ The key point here is that
\color inherit \color inherit
is the log-normalization constant for the complete is the log-normalization constant for the complete
\emph on \emph on
GaussianMixture HybridGaussianConditional
\emph default \emph default
across all values of across all values of
\begin_inset Formula $m$ \begin_inset Formula $m$
@ -548,15 +548,15 @@ with
\end_layout \end_layout
\begin_layout Subsubsection* \begin_layout Subsubsection*
GaussianMixtureFactor HybridGaussianFactor
\end_layout \end_layout
\begin_layout Standard \begin_layout Standard
Analogously, a Analogously, a
\emph on \emph on
GaussianMixtureFactor HybridGaussianFactor
\emph default \emph default
typically results from a GaussianMixture by having known values typically results from a HybridGaussianConditional by having known values
\begin_inset Formula $\bar{x}$ \begin_inset Formula $\bar{x}$
\end_inset \end_inset
@ -817,7 +817,7 @@ E_{mf}(y,m)=\frac{1}{2}\|A_{m}y-b_{m}\|_{\Sigma_{mfm}}^{2}=E_{gcm}(\bar{x},y)+K_
\end_inset \end_inset
which is identical to the GaussianMixture error which is identical to the HybridGaussianConditional error
\begin_inset CommandInset ref \begin_inset CommandInset ref
LatexCommand eqref LatexCommand eqref
reference "eq:gm_error" reference "eq:gm_error"

View File

@ -168,11 +168,11 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) {
DecisionTreeFactor prunedDiscreteProbs = DecisionTreeFactor prunedDiscreteProbs =
this->pruneDiscreteConditionals(maxNrLeaves); this->pruneDiscreteConditionals(maxNrLeaves);
/* To prune, we visitWith every leaf in the GaussianMixture. /* To prune, we visitWith every leaf in the HybridGaussianConditional.
* For each leaf, using the assignment we can check the discrete decision tree * For each leaf, using the assignment we can check the discrete decision tree
* for 0.0 probability, then just set the leaf to a nullptr. * for 0.0 probability, then just set the leaf to a nullptr.
* *
* We can later check the GaussianMixture for just nullptrs. * We can later check the HybridGaussianConditional for just nullptrs.
*/ */
HybridBayesNet prunedBayesNetFragment; HybridBayesNet prunedBayesNetFragment;
@ -182,14 +182,16 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) {
for (auto &&conditional : *this) { for (auto &&conditional : *this) {
if (auto gm = conditional->asMixture()) { if (auto gm = conditional->asMixture()) {
// Make a copy of the Gaussian mixture and prune it! // Make a copy of the Gaussian mixture and prune it!
auto prunedGaussianMixture = std::make_shared<GaussianMixture>(*gm); auto prunedHybridGaussianConditional =
prunedGaussianMixture->prune(prunedDiscreteProbs); // imperative :-( std::make_shared<HybridGaussianConditional>(*gm);
prunedHybridGaussianConditional->prune(
prunedDiscreteProbs); // imperative :-(
// Type-erase and add to the pruned Bayes Net fragment. // Type-erase and add to the pruned Bayes Net fragment.
prunedBayesNetFragment.push_back(prunedGaussianMixture); prunedBayesNetFragment.push_back(prunedHybridGaussianConditional);
} else { } else {
// Add the non-GaussianMixture conditional // Add the non-HybridGaussianConditional conditional
prunedBayesNetFragment.push_back(conditional); prunedBayesNetFragment.push_back(conditional);
} }
} }

View File

@ -79,7 +79,7 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
* *
* Example: * Example:
* auto shared_ptr_to_a_conditional = * auto shared_ptr_to_a_conditional =
* std::make_shared<GaussianMixture>(...); * std::make_shared<HybridGaussianConditional>(...);
* hbn.push_back(shared_ptr_to_a_conditional); * hbn.push_back(shared_ptr_to_a_conditional);
*/ */
void push_back(HybridConditional &&conditional) { void push_back(HybridConditional &&conditional) {
@ -106,7 +106,7 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
* Preferred: Emplace a conditional directly using arguments. * Preferred: Emplace a conditional directly using arguments.
* *
* Examples: * Examples:
* hbn.emplace_shared<GaussianMixture>(...))); * hbn.emplace_shared<HybridGaussianConditional>(...)));
* hbn.emplace_shared<GaussianConditional>(...))); * hbn.emplace_shared<GaussianConditional>(...)));
* hbn.emplace_shared<DiscreteConditional>(...))); * hbn.emplace_shared<DiscreteConditional>(...)));
*/ */

View File

@ -55,7 +55,7 @@ HybridConditional::HybridConditional(
/* ************************************************************************ */ /* ************************************************************************ */
HybridConditional::HybridConditional( HybridConditional::HybridConditional(
const std::shared_ptr<GaussianMixture> &gaussianMixture) const std::shared_ptr<HybridGaussianConditional> &gaussianMixture)
: BaseFactor(KeyVector(gaussianMixture->keys().begin(), : BaseFactor(KeyVector(gaussianMixture->keys().begin(),
gaussianMixture->keys().begin() + gaussianMixture->keys().begin() +
gaussianMixture->nrContinuous()), gaussianMixture->nrContinuous()),
@ -157,10 +157,10 @@ double HybridConditional::logNormalizationConstant() const {
return gc->logNormalizationConstant(); return gc->logNormalizationConstant();
} }
if (auto gm = asMixture()) { if (auto gm = asMixture()) {
return gm->logNormalizationConstant(); // 0.0! return gm->logNormalizationConstant(); // 0.0!
} }
if (auto dc = asDiscrete()) { if (auto dc = asDiscrete()) {
return dc->logNormalizationConstant(); // 0.0! return dc->logNormalizationConstant(); // 0.0!
} }
throw std::runtime_error( throw std::runtime_error(
"HybridConditional::logProbability: conditional type not handled"); "HybridConditional::logProbability: conditional type not handled");

View File

@ -18,8 +18,8 @@
#pragma once #pragma once
#include <gtsam/discrete/DiscreteConditional.h> #include <gtsam/discrete/DiscreteConditional.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/HybridFactor.h> #include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h> #include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/inference/Conditional.h> #include <gtsam/inference/Conditional.h>
#include <gtsam/inference/Key.h> #include <gtsam/inference/Key.h>
@ -39,7 +39,7 @@ namespace gtsam {
* As a type-erased variant of: * As a type-erased variant of:
* - DiscreteConditional * - DiscreteConditional
* - GaussianConditional * - GaussianConditional
* - GaussianMixture * - HybridGaussianConditional
* *
* The reason why this is important is that `Conditional<T>` is a CRTP class. * The reason why this is important is that `Conditional<T>` is a CRTP class.
* CRTP is static polymorphism such that all CRTP classes, while bearing the * CRTP is static polymorphism such that all CRTP classes, while bearing the
@ -127,7 +127,8 @@ class GTSAM_EXPORT HybridConditional
* @param gaussianMixture Gaussian Mixture Conditional used to create the * @param gaussianMixture Gaussian Mixture Conditional used to create the
* HybridConditional. * HybridConditional.
*/ */
HybridConditional(const std::shared_ptr<GaussianMixture>& gaussianMixture); HybridConditional(
const std::shared_ptr<HybridGaussianConditional>& gaussianMixture);
/// @} /// @}
/// @name Testable /// @name Testable
@ -146,12 +147,12 @@ class GTSAM_EXPORT HybridConditional
/// @{ /// @{
/** /**
* @brief Return HybridConditional as a GaussianMixture * @brief Return HybridConditional as a HybridGaussianConditional
* @return nullptr if not a mixture * @return nullptr if not a mixture
* @return GaussianMixture::shared_ptr otherwise * @return HybridGaussianConditional::shared_ptr otherwise
*/ */
GaussianMixture::shared_ptr asMixture() const { HybridGaussianConditional::shared_ptr asMixture() const {
return std::dynamic_pointer_cast<GaussianMixture>(inner_); return std::dynamic_pointer_cast<HybridGaussianConditional>(inner_);
} }
/** /**
@ -222,8 +223,10 @@ class GTSAM_EXPORT HybridConditional
boost::serialization::void_cast_register<GaussianConditional, Factor>( boost::serialization::void_cast_register<GaussianConditional, Factor>(
static_cast<GaussianConditional*>(NULL), static_cast<Factor*>(NULL)); static_cast<GaussianConditional*>(NULL), static_cast<Factor*>(NULL));
} else { } else {
boost::serialization::void_cast_register<GaussianMixture, Factor>( boost::serialization::void_cast_register<HybridGaussianConditional,
static_cast<GaussianMixture*>(NULL), static_cast<Factor*>(NULL)); Factor>(
static_cast<HybridGaussianConditional*>(NULL),
static_cast<Factor*>(NULL));
} }
} }
#endif #endif

View File

@ -35,8 +35,8 @@ class GTSAM_EXPORT HybridEliminationTree
public: public:
typedef EliminationTree<HybridBayesNet, HybridGaussianFactorGraph> typedef EliminationTree<HybridBayesNet, HybridGaussianFactorGraph>
Base; ///< Base class Base; ///< Base class
typedef HybridEliminationTree This; ///< This class typedef HybridEliminationTree This; ///< This class
typedef std::shared_ptr<This> shared_ptr; ///< Shared pointer to this class typedef std::shared_ptr<This> shared_ptr; ///< Shared pointer to this class
/// @name Constructors /// @name Constructors

View File

@ -45,9 +45,9 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1,
* Base class for *truly* hybrid probabilistic factors * Base class for *truly* hybrid probabilistic factors
* *
* Examples: * Examples:
* - MixtureFactor * - HybridNonlinearFactor
* - GaussianMixtureFactor * - HybridGaussianFactor
* - GaussianMixture * - HybridGaussianConditional
* *
* @ingroup hybrid * @ingroup hybrid
*/ */

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */ * -------------------------------------------------------------------------- */
/** /**
* @file GaussianMixture.cpp * @file HybridGaussianConditional.cpp
* @brief A hybrid conditional in the Conditional Linear Gaussian scheme * @brief A hybrid conditional in the Conditional Linear Gaussian scheme
* @author Fan Jiang * @author Fan Jiang
* @author Varun Agrawal * @author Varun Agrawal
@ -20,8 +20,8 @@
#include <gtsam/base/utilities.h> #include <gtsam/base/utilities.h>
#include <gtsam/discrete/DiscreteValues.h> #include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixture.h> #include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h> #include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridValues.h> #include <gtsam/hybrid/HybridValues.h>
#include <gtsam/inference/Conditional-inst.h> #include <gtsam/inference/Conditional-inst.h>
#include <gtsam/linear/GaussianBayesNet.h> #include <gtsam/linear/GaussianBayesNet.h>
@ -29,10 +29,10 @@
namespace gtsam { namespace gtsam {
GaussianMixture::GaussianMixture( HybridGaussianConditional::HybridGaussianConditional(
const KeyVector &continuousFrontals, const KeyVector &continuousParents, const KeyVector &continuousFrontals, const KeyVector &continuousParents,
const DiscreteKeys &discreteParents, const DiscreteKeys &discreteParents,
const GaussianMixture::Conditionals &conditionals) const HybridGaussianConditional::Conditionals &conditionals)
: BaseFactor(CollectKeys(continuousFrontals, continuousParents), : BaseFactor(CollectKeys(continuousFrontals, continuousParents),
discreteParents), discreteParents),
BaseConditional(continuousFrontals.size()), BaseConditional(continuousFrontals.size()),
@ -50,30 +50,33 @@ GaussianMixture::GaussianMixture(
} }
/* *******************************************************************************/ /* *******************************************************************************/
const GaussianMixture::Conditionals &GaussianMixture::conditionals() const { const HybridGaussianConditional::Conditionals &
HybridGaussianConditional::conditionals() const {
return conditionals_; return conditionals_;
} }
/* *******************************************************************************/ /* *******************************************************************************/
GaussianMixture::GaussianMixture( HybridGaussianConditional::HybridGaussianConditional(
KeyVector &&continuousFrontals, KeyVector &&continuousParents, KeyVector &&continuousFrontals, KeyVector &&continuousParents,
DiscreteKeys &&discreteParents, DiscreteKeys &&discreteParents,
std::vector<GaussianConditional::shared_ptr> &&conditionals) std::vector<GaussianConditional::shared_ptr> &&conditionals)
: GaussianMixture(continuousFrontals, continuousParents, discreteParents, : HybridGaussianConditional(continuousFrontals, continuousParents,
Conditionals(discreteParents, conditionals)) {} discreteParents,
Conditionals(discreteParents, conditionals)) {}
/* *******************************************************************************/ /* *******************************************************************************/
GaussianMixture::GaussianMixture( HybridGaussianConditional::HybridGaussianConditional(
const KeyVector &continuousFrontals, const KeyVector &continuousParents, const KeyVector &continuousFrontals, const KeyVector &continuousParents,
const DiscreteKeys &discreteParents, const DiscreteKeys &discreteParents,
const std::vector<GaussianConditional::shared_ptr> &conditionals) const std::vector<GaussianConditional::shared_ptr> &conditionals)
: GaussianMixture(continuousFrontals, continuousParents, discreteParents, : HybridGaussianConditional(continuousFrontals, continuousParents,
Conditionals(discreteParents, conditionals)) {} discreteParents,
Conditionals(discreteParents, conditionals)) {}
/* *******************************************************************************/ /* *******************************************************************************/
// TODO(dellaert): This is copy/paste: GaussianMixture should be derived from // TODO(dellaert): This is copy/paste: HybridGaussianConditional should be
// GaussianMixtureFactor, no? // derived from HybridGaussianFactor, no?
GaussianFactorGraphTree GaussianMixture::add( GaussianFactorGraphTree HybridGaussianConditional::add(
const GaussianFactorGraphTree &sum) const { const GaussianFactorGraphTree &sum) const {
using Y = GaussianFactorGraph; using Y = GaussianFactorGraph;
auto add = [](const Y &graph1, const Y &graph2) { auto add = [](const Y &graph1, const Y &graph2) {
@ -86,7 +89,8 @@ GaussianFactorGraphTree GaussianMixture::add(
} }
/* *******************************************************************************/ /* *******************************************************************************/
GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree()
const {
auto wrap = [this](const GaussianConditional::shared_ptr &gc) { auto wrap = [this](const GaussianConditional::shared_ptr &gc) {
// First check if conditional has not been pruned // First check if conditional has not been pruned
if (gc) { if (gc) {
@ -109,7 +113,7 @@ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const {
} }
/* *******************************************************************************/ /* *******************************************************************************/
size_t GaussianMixture::nrComponents() const { size_t HybridGaussianConditional::nrComponents() const {
size_t total = 0; size_t total = 0;
conditionals_.visit([&total](const GaussianFactor::shared_ptr &node) { conditionals_.visit([&total](const GaussianFactor::shared_ptr &node) {
if (node) total += 1; if (node) total += 1;
@ -118,7 +122,7 @@ size_t GaussianMixture::nrComponents() const {
} }
/* *******************************************************************************/ /* *******************************************************************************/
GaussianConditional::shared_ptr GaussianMixture::operator()( GaussianConditional::shared_ptr HybridGaussianConditional::operator()(
const DiscreteValues &discreteValues) const { const DiscreteValues &discreteValues) const {
auto &ptr = conditionals_(discreteValues); auto &ptr = conditionals_(discreteValues);
if (!ptr) return nullptr; if (!ptr) return nullptr;
@ -127,11 +131,12 @@ GaussianConditional::shared_ptr GaussianMixture::operator()(
return conditional; return conditional;
else else
throw std::logic_error( throw std::logic_error(
"A GaussianMixture unexpectedly contained a non-conditional"); "A HybridGaussianConditional unexpectedly contained a non-conditional");
} }
/* *******************************************************************************/ /* *******************************************************************************/
bool GaussianMixture::equals(const HybridFactor &lf, double tol) const { bool HybridGaussianConditional::equals(const HybridFactor &lf,
double tol) const {
const This *e = dynamic_cast<const This *>(&lf); const This *e = dynamic_cast<const This *>(&lf);
if (e == nullptr) return false; if (e == nullptr) return false;
@ -149,8 +154,8 @@ bool GaussianMixture::equals(const HybridFactor &lf, double tol) const {
} }
/* *******************************************************************************/ /* *******************************************************************************/
void GaussianMixture::print(const std::string &s, void HybridGaussianConditional::print(const std::string &s,
const KeyFormatter &formatter) const { const KeyFormatter &formatter) const {
std::cout << (s.empty() ? "" : s + "\n"); std::cout << (s.empty() ? "" : s + "\n");
if (isContinuous()) std::cout << "Continuous "; if (isContinuous()) std::cout << "Continuous ";
if (isDiscrete()) std::cout << "Discrete "; if (isDiscrete()) std::cout << "Discrete ";
@ -177,7 +182,7 @@ void GaussianMixture::print(const std::string &s,
} }
/* ************************************************************************* */ /* ************************************************************************* */
KeyVector GaussianMixture::continuousParents() const { KeyVector HybridGaussianConditional::continuousParents() const {
// Get all parent keys: // Get all parent keys:
const auto range = parents(); const auto range = parents();
KeyVector continuousParentKeys(range.begin(), range.end()); KeyVector continuousParentKeys(range.begin(), range.end());
@ -193,7 +198,8 @@ KeyVector GaussianMixture::continuousParents() const {
} }
/* ************************************************************************* */ /* ************************************************************************* */
bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const { bool HybridGaussianConditional::allFrontalsGiven(
const VectorValues &given) const {
for (auto &&kv : given) { for (auto &&kv : given) {
if (given.find(kv.first) == given.end()) { if (given.find(kv.first) == given.end()) {
return false; return false;
@ -203,44 +209,21 @@ bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const {
} }
/* ************************************************************************* */ /* ************************************************************************* */
std::shared_ptr<GaussianMixtureFactor> GaussianMixture::likelihood( std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood(
const VectorValues &given) const { const VectorValues &given) const {
if (!allFrontalsGiven(given)) { if (!allFrontalsGiven(given)) {
throw std::runtime_error( throw std::runtime_error(
"GaussianMixture::likelihood: given values are missing some frontals."); "HybridGaussianConditional::likelihood: given values are missing some "
"frontals.");
} }
const DiscreteKeys discreteParentKeys = discreteKeys(); const DiscreteKeys discreteParentKeys = discreteKeys();
const KeyVector continuousParentKeys = continuousParents(); const KeyVector continuousParentKeys = continuousParents();
const GaussianMixtureFactor::Factors likelihoods( const HybridGaussianFactor::FactorValuePairs likelihoods(
conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { conditionals_,
[&](const GaussianConditional::shared_ptr &conditional)
-> GaussianFactorValuePair {
const auto likelihood_m = conditional->likelihood(given); const auto likelihood_m = conditional->likelihood(given);
return likelihood_m;
});
// First compute all the sqrt(|2 pi Sigma|) terms
auto computeLogNormalizers = [](const GaussianFactor::shared_ptr &gf) {
auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf);
// If we have, say, a Hessian factor, then no need to do anything
if (!jf) return 0.0;
auto model = jf->get_model();
// If there is no noise model, there is nothing to do.
if (!model) {
return 0.0;
}
return ComputeLogNormalizer(model);
};
AlgebraicDecisionTree<Key> log_normalizers =
DecisionTree<Key, double>(likelihoods, computeLogNormalizers);
return std::make_shared<GaussianMixtureFactor>(
continuousParentKeys, discreteParentKeys, likelihoods, log_normalizers);
}
/* ************************************************************************* */
std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) {
std::set<DiscreteKey> s;
s.insert(discreteKeys.begin(), discreteKeys.end()); s.insert(discreteKeys.begin(), discreteKeys.end());
return s; return s;
} }
@ -254,8 +237,7 @@ std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) {
* const Assignment<Key> &, const GaussianConditional::shared_ptr &)> * const Assignment<Key> &, const GaussianConditional::shared_ptr &)>
*/ */
std::function<GaussianConditional::shared_ptr( std::function<GaussianConditional::shared_ptr(
const Assignment<Key> &, const GaussianConditional::shared_ptr &)> HybridGaussianConditional::prunerFunc(const DecisionTreeFactor &discreteProbs) {
GaussianMixture::prunerFunc(const DecisionTreeFactor &discreteProbs) {
// Get the discrete keys as sets for the decision tree // Get the discrete keys as sets for the decision tree
// and the gaussian mixture. // and the gaussian mixture.
auto discreteProbsKeySet = DiscreteKeysAsSet(discreteProbs.discreteKeys()); auto discreteProbsKeySet = DiscreteKeysAsSet(discreteProbs.discreteKeys());
@ -306,7 +288,7 @@ GaussianMixture::prunerFunc(const DecisionTreeFactor &discreteProbs) {
} }
/* *******************************************************************************/ /* *******************************************************************************/
void GaussianMixture::prune(const DecisionTreeFactor &discreteProbs) { void HybridGaussianConditional::prune(const DecisionTreeFactor &discreteProbs) {
// Functional which loops over all assignments and create a set of // Functional which loops over all assignments and create a set of
// GaussianConditionals // GaussianConditionals
auto pruner = prunerFunc(discreteProbs); auto pruner = prunerFunc(discreteProbs);
@ -316,7 +298,7 @@ void GaussianMixture::prune(const DecisionTreeFactor &discreteProbs) {
} }
/* *******************************************************************************/ /* *******************************************************************************/
AlgebraicDecisionTree<Key> GaussianMixture::logProbability( AlgebraicDecisionTree<Key> HybridGaussianConditional::logProbability(
const VectorValues &continuousValues) const { const VectorValues &continuousValues) const {
// functor to calculate (double) logProbability value from // functor to calculate (double) logProbability value from
// GaussianConditional. // GaussianConditional.
@ -334,7 +316,7 @@ AlgebraicDecisionTree<Key> GaussianMixture::logProbability(
} }
/* ************************************************************************* */ /* ************************************************************************* */
double GaussianMixture::conditionalError( double HybridGaussianConditional::conditionalError(
const GaussianConditional::shared_ptr &conditional, const GaussianConditional::shared_ptr &conditional,
const VectorValues &continuousValues) const { const VectorValues &continuousValues) const {
// Check if valid pointer // Check if valid pointer
@ -351,7 +333,7 @@ double GaussianMixture::conditionalError(
} }
/* *******************************************************************************/ /* *******************************************************************************/
AlgebraicDecisionTree<Key> GaussianMixture::errorTree( AlgebraicDecisionTree<Key> HybridGaussianConditional::errorTree(
const VectorValues &continuousValues) const { const VectorValues &continuousValues) const {
auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) { auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) {
return conditionalError(conditional, continuousValues); return conditionalError(conditional, continuousValues);
@ -361,20 +343,21 @@ AlgebraicDecisionTree<Key> GaussianMixture::errorTree(
} }
/* *******************************************************************************/ /* *******************************************************************************/
double GaussianMixture::error(const HybridValues &values) const { double HybridGaussianConditional::error(const HybridValues &values) const {
// Directly index to get the conditional, no need to build the whole tree. // Directly index to get the conditional, no need to build the whole tree.
auto conditional = conditionals_(values.discrete()); auto conditional = conditionals_(values.discrete());
return conditionalError(conditional, values.continuous()); return conditionalError(conditional, values.continuous());
} }
/* *******************************************************************************/ /* *******************************************************************************/
double GaussianMixture::logProbability(const HybridValues &values) const { double HybridGaussianConditional::logProbability(
const HybridValues &values) const {
auto conditional = conditionals_(values.discrete()); auto conditional = conditionals_(values.discrete());
return conditional->logProbability(values.continuous()); return conditional->logProbability(values.continuous());
} }
/* *******************************************************************************/ /* *******************************************************************************/
double GaussianMixture::evaluate(const HybridValues &values) const { double HybridGaussianConditional::evaluate(const HybridValues &values) const {
auto conditional = conditionals_(values.discrete()); auto conditional = conditionals_(values.discrete());
return conditional->evaluate(values.continuous()); return conditional->evaluate(values.continuous());
} }

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */ * -------------------------------------------------------------------------- */
/** /**
* @file GaussianMixture.h * @file HybridGaussianConditional.h
* @brief A hybrid conditional in the Conditional Linear Gaussian scheme * @brief A hybrid conditional in the Conditional Linear Gaussian scheme
* @author Fan Jiang * @author Fan Jiang
* @author Varun Agrawal * @author Varun Agrawal
@ -23,8 +23,8 @@
#include <gtsam/discrete/DecisionTree.h> #include <gtsam/discrete/DecisionTree.h>
#include <gtsam/discrete/DecisionTreeFactor.h> #include <gtsam/discrete/DecisionTreeFactor.h>
#include <gtsam/discrete/DiscreteKey.h> #include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridFactor.h> #include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/inference/Conditional.h> #include <gtsam/inference/Conditional.h>
#include <gtsam/linear/GaussianConditional.h> #include <gtsam/linear/GaussianConditional.h>
@ -50,14 +50,14 @@ class HybridValues;
* *
* @ingroup hybrid * @ingroup hybrid
*/ */
class GTSAM_EXPORT GaussianMixture class GTSAM_EXPORT HybridGaussianConditional
: public HybridFactor, : public HybridFactor,
public Conditional<HybridFactor, GaussianMixture> { public Conditional<HybridFactor, HybridGaussianConditional> {
public: public:
using This = GaussianMixture; using This = HybridGaussianConditional;
using shared_ptr = std::shared_ptr<GaussianMixture>; using shared_ptr = std::shared_ptr<HybridGaussianConditional>;
using BaseFactor = HybridFactor; using BaseFactor = HybridFactor;
using BaseConditional = Conditional<HybridFactor, GaussianMixture>; using BaseConditional = Conditional<HybridFactor, HybridGaussianConditional>;
/// typedef for Decision Tree of Gaussian Conditionals /// typedef for Decision Tree of Gaussian Conditionals
using Conditionals = DecisionTree<Key, GaussianConditional::shared_ptr>; using Conditionals = DecisionTree<Key, GaussianConditional::shared_ptr>;
@ -67,7 +67,7 @@ class GTSAM_EXPORT GaussianMixture
double logConstant_; ///< log of the normalization constant. double logConstant_; ///< log of the normalization constant.
/** /**
* @brief Convert a GaussianMixture of conditionals into * @brief Convert a HybridGaussianConditional of conditionals into
* a DecisionTree of Gaussian factor graphs. * a DecisionTree of Gaussian factor graphs.
*/ */
GaussianFactorGraphTree asGaussianFactorGraphTree() const; GaussianFactorGraphTree asGaussianFactorGraphTree() const;
@ -88,10 +88,10 @@ class GTSAM_EXPORT GaussianMixture
/// @{ /// @{
/// Default constructor, mainly for serialization. /// Default constructor, mainly for serialization.
GaussianMixture() = default; HybridGaussianConditional() = default;
/** /**
* @brief Construct a new GaussianMixture object. * @brief Construct a new HybridGaussianConditional object.
* *
* @param continuousFrontals the continuous frontals. * @param continuousFrontals the continuous frontals.
* @param continuousParents the continuous parents. * @param continuousParents the continuous parents.
@ -101,10 +101,10 @@ class GTSAM_EXPORT GaussianMixture
* cardinality of the DiscreteKeys in discreteParents, since the * cardinality of the DiscreteKeys in discreteParents, since the
* discreteParents will be used as the labels in the decision tree. * discreteParents will be used as the labels in the decision tree.
*/ */
GaussianMixture(const KeyVector &continuousFrontals, HybridGaussianConditional(const KeyVector &continuousFrontals,
const KeyVector &continuousParents, const KeyVector &continuousParents,
const DiscreteKeys &discreteParents, const DiscreteKeys &discreteParents,
const Conditionals &conditionals); const Conditionals &conditionals);
/** /**
* @brief Make a Gaussian Mixture from a list of Gaussian conditionals * @brief Make a Gaussian Mixture from a list of Gaussian conditionals
@ -114,9 +114,10 @@ class GTSAM_EXPORT GaussianMixture
* @param discreteParents Discrete parents variables * @param discreteParents Discrete parents variables
* @param conditionals List of conditionals * @param conditionals List of conditionals
*/ */
GaussianMixture(KeyVector &&continuousFrontals, KeyVector &&continuousParents, HybridGaussianConditional(
DiscreteKeys &&discreteParents, KeyVector &&continuousFrontals, KeyVector &&continuousParents,
std::vector<GaussianConditional::shared_ptr> &&conditionals); DiscreteKeys &&discreteParents,
std::vector<GaussianConditional::shared_ptr> &&conditionals);
/** /**
* @brief Make a Gaussian Mixture from a list of Gaussian conditionals * @brief Make a Gaussian Mixture from a list of Gaussian conditionals
@ -126,7 +127,7 @@ class GTSAM_EXPORT GaussianMixture
* @param discreteParents Discrete parents variables * @param discreteParents Discrete parents variables
* @param conditionals List of conditionals * @param conditionals List of conditionals
*/ */
GaussianMixture( HybridGaussianConditional(
const KeyVector &continuousFrontals, const KeyVector &continuousParents, const KeyVector &continuousFrontals, const KeyVector &continuousParents,
const DiscreteKeys &discreteParents, const DiscreteKeys &discreteParents,
const std::vector<GaussianConditional::shared_ptr> &conditionals); const std::vector<GaussianConditional::shared_ptr> &conditionals);
@ -140,7 +141,7 @@ class GTSAM_EXPORT GaussianMixture
/// Print utility /// Print utility
void print( void print(
const std::string &s = "GaussianMixture\n", const std::string &s = "HybridGaussianConditional\n",
const KeyFormatter &formatter = DefaultKeyFormatter) const override; const KeyFormatter &formatter = DefaultKeyFormatter) const override;
/// @} /// @}
@ -165,14 +166,14 @@ class GTSAM_EXPORT GaussianMixture
* Create a likelihood factor for a Gaussian mixture, return a Mixture factor * Create a likelihood factor for a Gaussian mixture, return a Mixture factor
* on the parents. * on the parents.
*/ */
std::shared_ptr<GaussianMixtureFactor> likelihood( std::shared_ptr<HybridGaussianFactor> likelihood(
const VectorValues &given) const; const VectorValues &given) const;
/// Getter for the underlying Conditionals DecisionTree /// Getter for the underlying Conditionals DecisionTree
const Conditionals &conditionals() const; const Conditionals &conditionals() const;
/** /**
* @brief Compute logProbability of the GaussianMixture as a tree. * @brief Compute logProbability of the HybridGaussianConditional as a tree.
* *
* @param continuousValues The continuous VectorValues. * @param continuousValues The continuous VectorValues.
* @return AlgebraicDecisionTree<Key> A decision tree with the same keys * @return AlgebraicDecisionTree<Key> A decision tree with the same keys
@ -209,7 +210,7 @@ class GTSAM_EXPORT GaussianMixture
double error(const HybridValues &values) const override; double error(const HybridValues &values) const override;
/** /**
* @brief Compute error of the GaussianMixture as a tree. * @brief Compute error of the HybridGaussianConditional as a tree.
* *
* @param continuousValues The continuous VectorValues. * @param continuousValues The continuous VectorValues.
* @return AlgebraicDecisionTree<Key> A decision tree on the discrete keys * @return AlgebraicDecisionTree<Key> A decision tree on the discrete keys
@ -277,6 +278,7 @@ std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &discreteKeys);
// traits // traits
template <> template <>
struct traits<GaussianMixture> : public Testable<GaussianMixture> {}; struct traits<HybridGaussianConditional>
: public Testable<HybridGaussianConditional> {};
} // namespace gtsam } // namespace gtsam

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */ * -------------------------------------------------------------------------- */
/** /**
* @file GaussianMixtureFactor.cpp * @file HybridGaussianFactor.cpp
* @brief A set of Gaussian factors indexed by a set of discrete keys. * @brief A set of Gaussian factors indexed by a set of discrete keys.
* @author Fan Jiang * @author Fan Jiang
* @author Varun Agrawal * @author Varun Agrawal
@ -21,7 +21,7 @@
#include <gtsam/base/utilities.h> #include <gtsam/base/utilities.h>
#include <gtsam/discrete/DecisionTree-inl.h> #include <gtsam/discrete/DecisionTree-inl.h>
#include <gtsam/discrete/DecisionTree.h> #include <gtsam/discrete/DecisionTree.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h> #include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridValues.h> #include <gtsam/hybrid/HybridValues.h>
#include <gtsam/linear/GaussianFactor.h> #include <gtsam/linear/GaussianFactor.h>
#include <gtsam/linear/GaussianFactorGraph.h> #include <gtsam/linear/GaussianFactorGraph.h>
@ -34,52 +34,51 @@ namespace gtsam {
* This is done by storing the normalizer value in * This is done by storing the normalizer value in
* the `b` vector as an additional row. * the `b` vector as an additional row.
* *
* @param factors DecisionTree of GaussianFactor shared pointers. * @param factors DecisionTree of GaussianFactors and arbitrary scalars.
* @param logNormalizers Tree of log-normalizers corresponding to each
* Gaussian factor in factors. * Gaussian factor in factors.
* @return GaussianMixtureFactor::Factors * @return HybridGaussianFactor::Factors
*/ */
GaussianMixtureFactor::Factors augment( HybridGaussianFactor::Factors augment(
const GaussianMixtureFactor::Factors &factors, const HybridGaussianFactor::FactorValuePairs &factors) {
const AlgebraicDecisionTree<Key> &logNormalizers) {
// Find the minimum value so we can "proselytize" to positive values. // Find the minimum value so we can "proselytize" to positive values.
// Done because we can't have sqrt of negative numbers. // Done because we can't have sqrt of negative numbers.
double min_log_normalizer = logNormalizers.min(); auto unzipped_pair = unzip(factors);
AlgebraicDecisionTree<Key> log_normalizers = logNormalizers.apply( const HybridGaussianFactor::Factors gaussianFactors = unzipped_pair.first;
[&min_log_normalizer](double n) { return n - min_log_normalizer; }); const AlgebraicDecisionTree<Key> valueTree = unzipped_pair.second;
double min_value = valueTree.min();
AlgebraicDecisionTree<Key> values =
valueTree.apply([&min_value](double n) { return n - min_value; });
// Finally, update the [A|b] matrices. // Finally, update the [A|b] matrices.
auto update = [&log_normalizers]( auto update = [&values](const Assignment<Key> &assignment,
const Assignment<Key> &assignment, const HybridGaussianFactor::sharedFactor &gf) {
const GaussianMixtureFactor::sharedFactor &gf) {
auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf); auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf);
if (!jf) return gf; if (!jf) return gf;
// If the log_normalizer is 0, do nothing // If the log_normalizer is 0, do nothing
if (log_normalizers(assignment) == 0.0) return gf; if (values(assignment) == 0.0) return gf;
GaussianFactorGraph gfg; GaussianFactorGraph gfg;
gfg.push_back(jf); gfg.push_back(jf);
Vector c(1); Vector c(1);
c << std::sqrt(log_normalizers(assignment)); c << std::sqrt(values(assignment));
auto constantFactor = std::make_shared<JacobianFactor>(c); auto constantFactor = std::make_shared<JacobianFactor>(c);
gfg.push_back(constantFactor); gfg.push_back(constantFactor);
return std::dynamic_pointer_cast<GaussianFactor>( return std::dynamic_pointer_cast<GaussianFactor>(
std::make_shared<JacobianFactor>(gfg)); std::make_shared<JacobianFactor>(gfg));
}; };
return factors.apply(update); return gaussianFactors.apply(update);
} }
/* *******************************************************************************/ /* *******************************************************************************/
GaussianMixtureFactor::GaussianMixtureFactor( HybridGaussianFactor::HybridGaussianFactor(const KeyVector &continuousKeys,
const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const DiscreteKeys &discreteKeys,
const Factors &factors, const AlgebraicDecisionTree<Key> &logNormalizers) const FactorValuePairs &factors)
: Base(continuousKeys, discreteKeys), : Base(continuousKeys, discreteKeys), factors_(augment(factors)) {}
factors_(augment(factors, logNormalizers)) {}
/* *******************************************************************************/ /* *******************************************************************************/
bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const {
const This *e = dynamic_cast<const This *>(&lf); const This *e = dynamic_cast<const This *>(&lf);
if (e == nullptr) return false; if (e == nullptr) return false;
@ -96,10 +95,10 @@ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const {
} }
/* *******************************************************************************/ /* *******************************************************************************/
void GaussianMixtureFactor::print(const std::string &s, void HybridGaussianFactor::print(const std::string &s,
const KeyFormatter &formatter) const { const KeyFormatter &formatter) const {
std::cout << (s.empty() ? "" : s + "\n"); std::cout << (s.empty() ? "" : s + "\n");
std::cout << "GaussianMixtureFactor" << std::endl; std::cout << "HybridGaussianFactor" << std::endl;
HybridFactor::print("", formatter); HybridFactor::print("", formatter);
std::cout << "{\n"; std::cout << "{\n";
if (factors_.empty()) { if (factors_.empty()) {
@ -122,13 +121,13 @@ void GaussianMixtureFactor::print(const std::string &s,
} }
/* *******************************************************************************/ /* *******************************************************************************/
GaussianMixtureFactor::sharedFactor GaussianMixtureFactor::operator()( HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()(
const DiscreteValues &assignment) const { const DiscreteValues &assignment) const {
return factors_(assignment); return factors_(assignment);
} }
/* *******************************************************************************/ /* *******************************************************************************/
GaussianFactorGraphTree GaussianMixtureFactor::add( GaussianFactorGraphTree HybridGaussianFactor::add(
const GaussianFactorGraphTree &sum) const { const GaussianFactorGraphTree &sum) const {
using Y = GaussianFactorGraph; using Y = GaussianFactorGraph;
auto add = [](const Y &graph1, const Y &graph2) { auto add = [](const Y &graph1, const Y &graph2) {
@ -141,14 +140,14 @@ GaussianFactorGraphTree GaussianMixtureFactor::add(
} }
/* *******************************************************************************/ /* *******************************************************************************/
GaussianFactorGraphTree GaussianMixtureFactor::asGaussianFactorGraphTree() GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree()
const { const {
auto wrap = [](const sharedFactor &gf) { return GaussianFactorGraph{gf}; }; auto wrap = [](const sharedFactor &gf) { return GaussianFactorGraph{gf}; };
return {factors_, wrap}; return {factors_, wrap};
} }
/* *******************************************************************************/ /* *******************************************************************************/
AlgebraicDecisionTree<Key> GaussianMixtureFactor::errorTree( AlgebraicDecisionTree<Key> HybridGaussianFactor::errorTree(
const VectorValues &continuousValues) const { const VectorValues &continuousValues) const {
// functor to convert from sharedFactor to double error value. // functor to convert from sharedFactor to double error value.
auto errorFunc = [&continuousValues](const sharedFactor &gf) { auto errorFunc = [&continuousValues](const sharedFactor &gf) {
@ -159,7 +158,7 @@ AlgebraicDecisionTree<Key> GaussianMixtureFactor::errorTree(
} }
/* *******************************************************************************/ /* *******************************************************************************/
double GaussianMixtureFactor::error(const HybridValues &values) const { double HybridGaussianFactor::error(const HybridValues &values) const {
const sharedFactor gf = factors_(values.discrete()); const sharedFactor gf = factors_(values.discrete());
return gf->error(values.continuous()); return gf->error(values.continuous());
} }

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */ * -------------------------------------------------------------------------- */
/** /**
* @file GaussianMixtureFactor.h * @file HybridGaussianFactor.h
* @brief A set of GaussianFactors, indexed by a set of discrete keys. * @brief A set of GaussianFactors, indexed by a set of discrete keys.
* @author Fan Jiang * @author Fan Jiang
* @author Varun Agrawal * @author Varun Agrawal
@ -33,6 +33,9 @@ class HybridValues;
class DiscreteValues; class DiscreteValues;
class VectorValues; class VectorValues;
/// Alias for pair of GaussianFactor::shared_pointer and a double value.
using GaussianFactorValuePair = std::pair<GaussianFactor::shared_ptr, double>;
/** /**
* @brief Implementation of a discrete conditional mixture factor. * @brief Implementation of a discrete conditional mixture factor.
* Implements a joint discrete-continuous factor where the discrete variable * Implements a joint discrete-continuous factor where the discrete variable
@ -44,15 +47,17 @@ class VectorValues;
* *
* @ingroup hybrid * @ingroup hybrid
*/ */
class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
public: public:
using Base = HybridFactor; using Base = HybridFactor;
using This = GaussianMixtureFactor; using This = HybridGaussianFactor;
using shared_ptr = std::shared_ptr<This>; using shared_ptr = std::shared_ptr<This>;
using sharedFactor = std::shared_ptr<GaussianFactor>; using sharedFactor = std::shared_ptr<GaussianFactor>;
/// typedef for Decision Tree of Gaussian factors and log-constant. /// typedef for Decision Tree of Gaussian factors and arbitrary value.
using FactorValuePairs = DecisionTree<Key, GaussianFactorValuePair>;
/// typedef for Decision Tree of Gaussian factors.
using Factors = DecisionTree<Key, sharedFactor>; using Factors = DecisionTree<Key, sharedFactor>;
private: private:
@ -72,7 +77,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
/// @{ /// @{
/// Default constructor, mainly for serialization. /// Default constructor, mainly for serialization.
GaussianMixtureFactor() = default; HybridGaussianFactor() = default;
/** /**
* @brief Construct a new Gaussian mixture factor. * @brief Construct a new Gaussian mixture factor.
@ -80,34 +85,26 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
* @param continuousKeys A vector of keys representing continuous variables. * @param continuousKeys A vector of keys representing continuous variables.
* @param discreteKeys A vector of keys representing discrete variables and * @param discreteKeys A vector of keys representing discrete variables and
* their cardinalities. * their cardinalities.
* @param factors The decision tree of Gaussian factors stored * @param factors The decision tree of Gaussian factors and arbitrary scalars.
* as the mixture density.
* @param logNormalizers Tree of log-normalizers corresponding to each
* Gaussian factor in factors.
*/ */
GaussianMixtureFactor(const KeyVector &continuousKeys, HybridGaussianFactor(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys, const DiscreteKeys &discreteKeys,
const Factors &factors, const FactorValuePairs &factors);
const AlgebraicDecisionTree<Key> &logNormalizers =
AlgebraicDecisionTree<Key>(0.0));
/** /**
* @brief Construct a new GaussianMixtureFactor object using a vector of * @brief Construct a new HybridGaussianFactor object using a vector of
* GaussianFactor shared pointers. * GaussianFactor shared pointers.
* *
* @param continuousKeys Vector of keys for continuous factors. * @param continuousKeys Vector of keys for continuous factors.
* @param discreteKeys Vector of discrete keys. * @param discreteKeys Vector of discrete keys.
* @param factors Vector of gaussian factor shared pointers. * @param factors Vector of gaussian factor shared pointers
* @param logNormalizers Tree of log-normalizers corresponding to each * and arbitrary scalars.
* Gaussian factor in factors.
*/ */
GaussianMixtureFactor(const KeyVector &continuousKeys, HybridGaussianFactor(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys, const DiscreteKeys &discreteKeys,
const std::vector<sharedFactor> &factors, const std::vector<GaussianFactorValuePair> &factors)
const AlgebraicDecisionTree<Key> &logNormalizers = : HybridGaussianFactor(continuousKeys, discreteKeys,
AlgebraicDecisionTree<Key>(0.0)) FactorValuePairs(discreteKeys, factors)) {}
: GaussianMixtureFactor(continuousKeys, discreteKeys,
Factors(discreteKeys, factors), logNormalizers) {}
/// @} /// @}
/// @name Testable /// @name Testable
@ -136,7 +133,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const;
/** /**
* @brief Compute error of the GaussianMixtureFactor as a tree. * @brief Compute error of the HybridGaussianFactor as a tree.
* *
* @param continuousValues The continuous VectorValues. * @param continuousValues The continuous VectorValues.
* @return AlgebraicDecisionTree<Key> A decision tree with the same keys * @return AlgebraicDecisionTree<Key> A decision tree with the same keys
@ -154,9 +151,9 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
/// Getter for GaussianFactor decision tree /// Getter for GaussianFactor decision tree
const Factors &factors() const { return factors_; } const Factors &factors() const { return factors_; }
/// Add MixtureFactor to a Sum, syntactic sugar. /// Add HybridNonlinearFactor to a Sum, syntactic sugar.
friend GaussianFactorGraphTree &operator+=( friend GaussianFactorGraphTree &operator+=(
GaussianFactorGraphTree &sum, const GaussianMixtureFactor &factor) { GaussianFactorGraphTree &sum, const HybridGaussianFactor &factor) {
sum = factor.add(sum); sum = factor.add(sum);
return sum; return sum;
} }
@ -176,8 +173,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
// traits // traits
template <> template <>
struct traits<GaussianMixtureFactor> : public Testable<GaussianMixtureFactor> { struct traits<HybridGaussianFactor> : public Testable<HybridGaussianFactor> {};
};
/** /**
* @brief Helper function to compute the sqrt(|2πΣ|) normalizer values * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values

View File

@ -23,11 +23,11 @@
#include <gtsam/discrete/DiscreteEliminationTree.h> #include <gtsam/discrete/DiscreteEliminationTree.h>
#include <gtsam/discrete/DiscreteFactorGraph.h> #include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/discrete/DiscreteJunctionTree.h> #include <gtsam/discrete/DiscreteJunctionTree.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridConditional.h> #include <gtsam/hybrid/HybridConditional.h>
#include <gtsam/hybrid/HybridEliminationTree.h> #include <gtsam/hybrid/HybridEliminationTree.h>
#include <gtsam/hybrid/HybridFactor.h> #include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h> #include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridJunctionTree.h> #include <gtsam/hybrid/HybridJunctionTree.h>
#include <gtsam/inference/EliminateableFactorGraph-inst.h> #include <gtsam/inference/EliminateableFactorGraph-inst.h>
@ -92,13 +92,13 @@ void HybridGaussianFactorGraph::printErrors(
// Clear the stringstream // Clear the stringstream
ss.str(std::string()); ss.str(std::string());
if (auto gmf = std::dynamic_pointer_cast<GaussianMixtureFactor>(factor)) { if (auto hgf = std::dynamic_pointer_cast<HybridGaussianFactor>(factor)) {
if (factor == nullptr) { if (factor == nullptr) {
std::cout << "nullptr" std::cout << "nullptr"
<< "\n"; << "\n";
} else { } else {
gmf->operator()(values.discrete())->print(ss.str(), keyFormatter); hgf->operator()(values.discrete())->print(ss.str(), keyFormatter);
std::cout << "error = " << gmf->error(values) << std::endl; std::cout << "error = " << factor->error(values) << std::endl;
} }
} else if (auto hc = std::dynamic_pointer_cast<HybridConditional>(factor)) { } else if (auto hc = std::dynamic_pointer_cast<HybridConditional>(factor)) {
if (factor == nullptr) { if (factor == nullptr) {
@ -178,9 +178,9 @@ GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const {
// TODO(dellaert): just use a virtual method defined in HybridFactor. // TODO(dellaert): just use a virtual method defined in HybridFactor.
if (auto gf = dynamic_pointer_cast<GaussianFactor>(f)) { if (auto gf = dynamic_pointer_cast<GaussianFactor>(f)) {
result = addGaussian(result, gf); result = addGaussian(result, gf);
} else if (auto gmf = dynamic_pointer_cast<GaussianMixtureFactor>(f)) { } else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
result = gmf->add(result); result = gmf->add(result);
} else if (auto gm = dynamic_pointer_cast<GaussianMixture>(f)) { } else if (auto gm = dynamic_pointer_cast<HybridGaussianConditional>(f)) {
result = gm->add(result); result = gm->add(result);
} else if (auto hc = dynamic_pointer_cast<HybridConditional>(f)) { } else if (auto hc = dynamic_pointer_cast<HybridConditional>(f)) {
if (auto gm = hc->asMixture()) { if (auto gm = hc->asMixture()) {
@ -258,8 +258,8 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
for (auto &f : factors) { for (auto &f : factors) {
if (auto df = dynamic_pointer_cast<DiscreteFactor>(f)) { if (auto df = dynamic_pointer_cast<DiscreteFactor>(f)) {
dfg.push_back(df); dfg.push_back(df);
} else if (auto gmf = dynamic_pointer_cast<GaussianMixtureFactor>(f)) { } else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
// Case where we have a GaussianMixtureFactor with no continuous keys. // Case where we have a HybridGaussianFactor with no continuous keys.
// In this case, compute discrete probabilities. // In this case, compute discrete probabilities.
auto logProbability = auto logProbability =
[&](const GaussianFactor::shared_ptr &factor) -> double { [&](const GaussianFactor::shared_ptr &factor) -> double {
@ -309,7 +309,7 @@ GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) {
/* ************************************************************************ */ /* ************************************************************************ */
using Result = std::pair<std::shared_ptr<GaussianConditional>, using Result = std::pair<std::shared_ptr<GaussianConditional>,
GaussianMixtureFactor::sharedFactor>; HybridGaussianFactor::sharedFactor>;
/** /**
* Compute the probability p(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m) * Compute the probability p(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m)
@ -341,14 +341,14 @@ static std::shared_ptr<Factor> createDiscreteFactor(
return std::make_shared<DecisionTreeFactor>(discreteSeparator, probabilities); return std::make_shared<DecisionTreeFactor>(discreteSeparator, probabilities);
} }
// Create GaussianMixtureFactor on the separator, taking care to correct // Create HybridGaussianFactor on the separator, taking care to correct
// for conditional constants. // for conditional constants.
static std::shared_ptr<Factor> createGaussianMixtureFactor( static std::shared_ptr<Factor> createHybridGaussianFactor(
const DecisionTree<Key, Result> &eliminationResults, const DecisionTree<Key, Result> &eliminationResults,
const KeyVector &continuousSeparator, const KeyVector &continuousSeparator,
const DiscreteKeys &discreteSeparator) { const DiscreteKeys &discreteSeparator) {
// Correct for the normalization constant used up by the conditional // Correct for the normalization constant used up by the conditional
auto correct = [&](const Result &pair) -> GaussianFactor::shared_ptr { auto correct = [&](const Result &pair) -> GaussianFactorValuePair {
const auto &[conditional, factor] = pair; const auto &[conditional, factor] = pair;
if (factor) { if (factor) {
auto hf = std::dynamic_pointer_cast<HessianFactor>(factor); auto hf = std::dynamic_pointer_cast<HessianFactor>(factor);
@ -357,13 +357,13 @@ static std::shared_ptr<Factor> createGaussianMixtureFactor(
// as per the Hessian definition // as per the Hessian definition
hf->constantTerm() += 2.0 * conditional->logNormalizationConstant(); hf->constantTerm() += 2.0 * conditional->logNormalizationConstant();
} }
return factor; return {factor, 0.0};
}; };
DecisionTree<Key, GaussianFactor::shared_ptr> newFactors(eliminationResults, DecisionTree<Key, GaussianFactorValuePair> newFactors(eliminationResults,
correct); correct);
return std::make_shared<GaussianMixtureFactor>(continuousSeparator, return std::make_shared<HybridGaussianFactor>(continuousSeparator,
discreteSeparator, newFactors); discreteSeparator, newFactors);
} }
static std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>> static std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>>
@ -400,18 +400,18 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
DecisionTree<Key, Result> eliminationResults(factorGraphTree, eliminate); DecisionTree<Key, Result> eliminationResults(factorGraphTree, eliminate);
// If there are no more continuous parents we create a DiscreteFactor with the // If there are no more continuous parents we create a DiscreteFactor with the
// error for each discrete choice. Otherwise, create a GaussianMixtureFactor // error for each discrete choice. Otherwise, create a HybridGaussianFactor
// on the separator, taking care to correct for conditional constants. // on the separator, taking care to correct for conditional constants.
auto newFactor = auto newFactor =
continuousSeparator.empty() continuousSeparator.empty()
? createDiscreteFactor(eliminationResults, discreteSeparator) ? createDiscreteFactor(eliminationResults, discreteSeparator)
: createGaussianMixtureFactor(eliminationResults, continuousSeparator, : createHybridGaussianFactor(eliminationResults, continuousSeparator,
discreteSeparator); discreteSeparator);
// Create the GaussianMixture from the conditionals // Create the HybridGaussianConditional from the conditionals
GaussianMixture::Conditionals conditionals( HybridGaussianConditional::Conditionals conditionals(
eliminationResults, [](const Result &pair) { return pair.first; }); eliminationResults, [](const Result &pair) { return pair.first; });
auto gaussianMixture = std::make_shared<GaussianMixture>( auto gaussianMixture = std::make_shared<HybridGaussianConditional>(
frontalKeys, continuousSeparator, discreteSeparator, conditionals); frontalKeys, continuousSeparator, discreteSeparator, conditionals);
return {std::make_shared<HybridConditional>(gaussianMixture), newFactor}; return {std::make_shared<HybridConditional>(gaussianMixture), newFactor};
@ -458,7 +458,7 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors,
// Because of all these reasons, we carefully consider how to // Because of all these reasons, we carefully consider how to
// implement the hybrid factors so that we do not get poor performance. // implement the hybrid factors so that we do not get poor performance.
// The first thing is how to represent the GaussianMixture. // The first thing is how to represent the HybridGaussianConditional.
// A very possible scenario is that the incoming factors will have different // A very possible scenario is that the incoming factors will have different
// levels of discrete keys. For example, imagine we are going to eliminate the // levels of discrete keys. For example, imagine we are going to eliminate the
// fragment: $\phi(x1,c1,c2)$, $\phi(x1,c2,c3)$, which is perfectly valid. // fragment: $\phi(x1,c1,c2)$, $\phi(x1,c2,c3)$, which is perfectly valid.
@ -549,7 +549,7 @@ AlgebraicDecisionTree<Key> HybridGaussianFactorGraph::errorTree(
f = hc->inner(); f = hc->inner();
} }
if (auto gaussianMixture = dynamic_pointer_cast<GaussianMixtureFactor>(f)) { if (auto gaussianMixture = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
// Compute factor error and add it. // Compute factor error and add it.
error_tree = error_tree + gaussianMixture->errorTree(continuousValues); error_tree = error_tree + gaussianMixture->errorTree(continuousValues);
} else if (auto gaussian = dynamic_pointer_cast<GaussianFactor>(f)) { } else if (auto gaussian = dynamic_pointer_cast<GaussianFactor>(f)) {
@ -597,10 +597,10 @@ GaussianFactorGraph HybridGaussianFactorGraph::operator()(
gfg.push_back(gf); gfg.push_back(gf);
} else if (auto gc = std::dynamic_pointer_cast<GaussianConditional>(f)) { } else if (auto gc = std::dynamic_pointer_cast<GaussianConditional>(f)) {
gfg.push_back(gf); gfg.push_back(gf);
} else if (auto gmf = std::dynamic_pointer_cast<GaussianMixtureFactor>(f)) { } else if (auto hgf = std::dynamic_pointer_cast<HybridGaussianFactor>(f)) {
gfg.push_back((*gmf)(assignment)); gfg.push_back((*hgf)(assignment));
} else if (auto gm = dynamic_pointer_cast<GaussianMixture>(f)) { } else if (auto hgc = dynamic_pointer_cast<HybridGaussianConditional>(f)) {
gfg.push_back((*gm)(assignment)); gfg.push_back((*hgc)(assignment));
} else { } else {
continue; continue;
} }

View File

@ -18,9 +18,9 @@
#pragma once #pragma once
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridFactor.h> #include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridFactorGraph.h> #include <gtsam/hybrid/HybridFactorGraph.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/inference/EliminateableFactorGraph.h> #include <gtsam/inference/EliminateableFactorGraph.h>
#include <gtsam/inference/FactorGraph.h> #include <gtsam/inference/FactorGraph.h>
#include <gtsam/inference/Ordering.h> #include <gtsam/inference/Ordering.h>
@ -221,7 +221,6 @@ class GTSAM_EXPORT HybridGaussianFactorGraph
/// Get the GaussianFactorGraph at a given discrete assignment. /// Get the GaussianFactorGraph at a given discrete assignment.
GaussianFactorGraph operator()(const DiscreteValues& assignment) const; GaussianFactorGraph operator()(const DiscreteValues& assignment) const;
}; };
} // namespace gtsam } // namespace gtsam

View File

@ -51,11 +51,10 @@ class HybridEliminationTree;
*/ */
class GTSAM_EXPORT HybridJunctionTree class GTSAM_EXPORT HybridJunctionTree
: public JunctionTree<HybridBayesTree, HybridGaussianFactorGraph> { : public JunctionTree<HybridBayesTree, HybridGaussianFactorGraph> {
public: public:
typedef JunctionTree<HybridBayesTree, HybridGaussianFactorGraph> typedef JunctionTree<HybridBayesTree, HybridGaussianFactorGraph>
Base; ///< Base class Base; ///< Base class
typedef HybridJunctionTree This; ///< This class typedef HybridJunctionTree This; ///< This class
typedef std::shared_ptr<This> shared_ptr; ///< Shared pointer to this class typedef std::shared_ptr<This> shared_ptr; ///< Shared pointer to this class
/** /**

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */ * -------------------------------------------------------------------------- */
/** /**
* @file MixtureFactor.h * @file HybridNonlinearFactor.h
* @brief Nonlinear Mixture factor of continuous and discrete. * @brief Nonlinear Mixture factor of continuous and discrete.
* @author Kevin Doherty, kdoherty@mit.edu * @author Kevin Doherty, kdoherty@mit.edu
* @author Varun Agrawal * @author Varun Agrawal
@ -20,7 +20,7 @@
#pragma once #pragma once
#include <gtsam/discrete/DiscreteValues.h> #include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h> #include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridValues.h> #include <gtsam/hybrid/HybridValues.h>
#include <gtsam/nonlinear/NonlinearFactor.h> #include <gtsam/nonlinear/NonlinearFactor.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h> #include <gtsam/nonlinear/NonlinearFactorGraph.h>
@ -33,6 +33,9 @@
namespace gtsam { namespace gtsam {
/// Alias for a NonlinearFactor shared pointer and double scalar pair.
using NonlinearFactorValuePair = std::pair<NonlinearFactor::shared_ptr, double>;
/** /**
* @brief Implementation of a discrete conditional mixture factor. * @brief Implementation of a discrete conditional mixture factor.
* *
@ -44,18 +47,18 @@ namespace gtsam {
* one of (NonlinearFactor, GaussianFactor) which can then be checked to perform * one of (NonlinearFactor, GaussianFactor) which can then be checked to perform
* the correct operation. * the correct operation.
*/ */
class MixtureFactor : public HybridFactor { class HybridNonlinearFactor : public HybridFactor {
public: public:
using Base = HybridFactor; using Base = HybridFactor;
using This = MixtureFactor; using This = HybridNonlinearFactor;
using shared_ptr = std::shared_ptr<MixtureFactor>; using shared_ptr = std::shared_ptr<HybridNonlinearFactor>;
using sharedFactor = std::shared_ptr<NonlinearFactor>; using sharedFactor = std::shared_ptr<NonlinearFactor>;
/** /**
* @brief typedef for DecisionTree which has Keys as node labels and * @brief typedef for DecisionTree which has Keys as node labels and
* NonlinearFactor as leaf nodes. * pairs of NonlinearFactor & an arbitrary scalar as leaf nodes.
*/ */
using Factors = DecisionTree<Key, sharedFactor>; using Factors = DecisionTree<Key, NonlinearFactorValuePair>;
private: private:
/// Decision tree of Gaussian factors indexed by discrete keys. /// Decision tree of Gaussian factors indexed by discrete keys.
@ -63,7 +66,7 @@ class MixtureFactor : public HybridFactor {
bool normalized_; bool normalized_;
public: public:
MixtureFactor() = default; HybridNonlinearFactor() = default;
/** /**
* @brief Construct from Decision tree. * @brief Construct from Decision tree.
@ -74,8 +77,8 @@ class MixtureFactor : public HybridFactor {
* @param normalized Flag indicating if the factor error is already * @param normalized Flag indicating if the factor error is already
* normalized. * normalized.
*/ */
MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys,
const Factors& factors, bool normalized = false) const Factors& factors, bool normalized = false)
: Base(keys, discreteKeys), factors_(factors), normalized_(normalized) {} : Base(keys, discreteKeys), factors_(factors), normalized_(normalized) {}
/** /**
@ -90,28 +93,29 @@ class MixtureFactor : public HybridFactor {
* Will be typecast to NonlinearFactor shared pointers. * Will be typecast to NonlinearFactor shared pointers.
* @param keys Vector of keys for continuous factors. * @param keys Vector of keys for continuous factors.
* @param discreteKeys Vector of discrete keys. * @param discreteKeys Vector of discrete keys.
* @param factors Vector of nonlinear factors. * @param factors Vector of nonlinear factor and scalar pairs.
* @param normalized Flag indicating if the factor error is already * @param normalized Flag indicating if the factor error is already
* normalized. * normalized.
*/ */
template <typename FACTOR> template <typename FACTOR>
MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, HybridNonlinearFactor(
const std::vector<std::shared_ptr<FACTOR>>& factors, const KeyVector& keys, const DiscreteKeys& discreteKeys,
bool normalized = false) const std::vector<std::pair<std::shared_ptr<FACTOR>, double>>& factors,
bool normalized = false)
: Base(keys, discreteKeys), normalized_(normalized) { : Base(keys, discreteKeys), normalized_(normalized) {
std::vector<NonlinearFactor::shared_ptr> nonlinear_factors; std::vector<NonlinearFactorValuePair> nonlinear_factors;
KeySet continuous_keys_set(keys.begin(), keys.end()); KeySet continuous_keys_set(keys.begin(), keys.end());
KeySet factor_keys_set; KeySet factor_keys_set;
for (auto&& f : factors) { for (auto&& [f, val] : factors) {
// Insert all factor continuous keys in the continuous keys set. // Insert all factor continuous keys in the continuous keys set.
std::copy(f->keys().begin(), f->keys().end(), std::copy(f->keys().begin(), f->keys().end(),
std::inserter(factor_keys_set, factor_keys_set.end())); std::inserter(factor_keys_set, factor_keys_set.end()));
if (auto nf = std::dynamic_pointer_cast<NonlinearFactor>(f)) { if (auto nf = std::dynamic_pointer_cast<NonlinearFactor>(f)) {
nonlinear_factors.push_back(nf); nonlinear_factors.emplace_back(nf, val);
} else { } else {
throw std::runtime_error( throw std::runtime_error(
"Factors passed into MixtureFactor need to be nonlinear!"); "Factors passed into HybridNonlinearFactor need to be nonlinear!");
} }
} }
factors_ = Factors(discreteKeys, nonlinear_factors); factors_ = Factors(discreteKeys, nonlinear_factors);
@ -124,7 +128,7 @@ class MixtureFactor : public HybridFactor {
} }
/** /**
* @brief Compute error of the MixtureFactor as a tree. * @brief Compute error of the HybridNonlinearFactor as a tree.
* *
* @param continuousValues The continuous values for which to compute the * @param continuousValues The continuous values for which to compute the
* error. * error.
@ -133,9 +137,11 @@ class MixtureFactor : public HybridFactor {
*/ */
AlgebraicDecisionTree<Key> errorTree(const Values& continuousValues) const { AlgebraicDecisionTree<Key> errorTree(const Values& continuousValues) const {
// functor to convert from sharedFactor to double error value. // functor to convert from sharedFactor to double error value.
auto errorFunc = [continuousValues](const sharedFactor& factor) { auto errorFunc =
return factor->error(continuousValues); [continuousValues](const std::pair<sharedFactor, double>& f) {
}; auto [factor, val] = f;
return factor->error(continuousValues) + (0.5 * val * val);
};
DecisionTree<Key, double> result(factors_, errorFunc); DecisionTree<Key, double> result(factors_, errorFunc);
return result; return result;
} }
@ -150,12 +156,10 @@ class MixtureFactor : public HybridFactor {
double error(const Values& continuousValues, double error(const Values& continuousValues,
const DiscreteValues& discreteValues) const { const DiscreteValues& discreteValues) const {
// Retrieve the factor corresponding to the assignment in discreteValues. // Retrieve the factor corresponding to the assignment in discreteValues.
auto factor = factors_(discreteValues); auto [factor, val] = factors_(discreteValues);
// Compute the error for the selected factor // Compute the error for the selected factor
const double factorError = factor->error(continuousValues); const double factorError = factor->error(continuousValues);
if (normalized_) return factorError; return factorError + (0.5 * val * val);
return factorError + this->nonlinearFactorLogNormalizingConstant(
factor, continuousValues);
} }
/** /**
@ -175,7 +179,7 @@ class MixtureFactor : public HybridFactor {
*/ */
size_t dim() const { size_t dim() const {
const auto assignments = DiscreteValues::CartesianProduct(discreteKeys_); const auto assignments = DiscreteValues::CartesianProduct(discreteKeys_);
auto factor = factors_(assignments.at(0)); auto [factor, val] = factors_(assignments.at(0));
return factor->dim(); return factor->dim();
} }
@ -188,10 +192,12 @@ class MixtureFactor : public HybridFactor {
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override { const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override {
std::cout << (s.empty() ? "" : s + " "); std::cout << (s.empty() ? "" : s + " ");
Base::print("", keyFormatter); Base::print("", keyFormatter);
std::cout << "\nMixtureFactor\n"; std::cout << "\nHybridNonlinearFactor\n";
auto valueFormatter = [](const sharedFactor& v) { auto valueFormatter = [](const std::pair<sharedFactor, double>& v) {
if (v) { auto [factor, val] = v;
return "Nonlinear factor on " + std::to_string(v->size()) + " keys"; if (factor) {
return "Nonlinear factor on " + std::to_string(factor->size()) +
" keys";
} else { } else {
return std::string("nullptr"); return std::string("nullptr");
} }
@ -201,17 +207,20 @@ class MixtureFactor : public HybridFactor {
/// Check equality /// Check equality
bool equals(const HybridFactor& other, double tol = 1e-9) const override { bool equals(const HybridFactor& other, double tol = 1e-9) const override {
// We attempt a dynamic cast from HybridFactor to MixtureFactor. If it // We attempt a dynamic cast from HybridFactor to HybridNonlinearFactor. If
// fails, return false. // it fails, return false.
if (!dynamic_cast<const MixtureFactor*>(&other)) return false; if (!dynamic_cast<const HybridNonlinearFactor*>(&other)) return false;
// If the cast is successful, we'll properly construct a MixtureFactor // If the cast is successful, we'll properly construct a
// object from `other` // HybridNonlinearFactor object from `other`
const MixtureFactor& f(static_cast<const MixtureFactor&>(other)); const HybridNonlinearFactor& f(
static_cast<const HybridNonlinearFactor&>(other));
// Ensure that this MixtureFactor and `f` have the same `factors_`. // Ensure that this HybridNonlinearFactor and `f` have the same `factors_`.
auto compare = [tol](const sharedFactor& a, const sharedFactor& b) { auto compare = [tol](const std::pair<sharedFactor, double>& a,
return traits<NonlinearFactor>::Equals(*a, *b, tol); const std::pair<sharedFactor, double>& b) {
return traits<NonlinearFactor>::Equals(*a.first, *b.first, tol) &&
(a.second == b.second);
}; };
if (!factors_.equals(f.factors_, compare)) return false; if (!factors_.equals(f.factors_, compare)) return false;
@ -229,22 +238,25 @@ class MixtureFactor : public HybridFactor {
GaussianFactor::shared_ptr linearize( GaussianFactor::shared_ptr linearize(
const Values& continuousValues, const Values& continuousValues,
const DiscreteValues& discreteValues) const { const DiscreteValues& discreteValues) const {
auto factor = factors_(discreteValues); auto factor = factors_(discreteValues).first;
return factor->linearize(continuousValues); return factor->linearize(continuousValues);
} }
/// Linearize all the continuous factors to get a GaussianMixtureFactor. /// Linearize all the continuous factors to get a HybridGaussianFactor.
std::shared_ptr<GaussianMixtureFactor> linearize( std::shared_ptr<HybridGaussianFactor> linearize(
const Values& continuousValues) const { const Values& continuousValues) const {
// functional to linearize each factor in the decision tree // functional to linearize each factor in the decision tree
auto linearizeDT = [continuousValues](const sharedFactor& factor) { auto linearizeDT =
return factor->linearize(continuousValues); [continuousValues](const std::pair<sharedFactor, double>& f)
-> GaussianFactorValuePair {
auto [factor, val] = f;
return {factor->linearize(continuousValues), val};
}; };
DecisionTree<Key, GaussianFactor::shared_ptr> linearized_factors( DecisionTree<Key, std::pair<GaussianFactor::shared_ptr, double>>
factors_, linearizeDT); linearized_factors(factors_, linearizeDT);
return std::make_shared<GaussianMixtureFactor>( return std::make_shared<HybridGaussianFactor>(
continuousKeys_, discreteKeys_, linearized_factors); continuousKeys_, discreteKeys_, linearized_factors);
} }

View File

@ -18,10 +18,10 @@
#include <gtsam/discrete/DecisionTreeFactor.h> #include <gtsam/discrete/DecisionTreeFactor.h>
#include <gtsam/discrete/TableFactor.h> #include <gtsam/discrete/TableFactor.h>
#include <gtsam/hybrid/GaussianMixture.h> #include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h> #include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h> #include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/nonlinear/NonlinearFactor.h> #include <gtsam/nonlinear/NonlinearFactor.h>
namespace gtsam { namespace gtsam {
@ -59,7 +59,7 @@ void HybridNonlinearFactorGraph::printErrors(
// Clear the stringstream // Clear the stringstream
ss.str(std::string()); ss.str(std::string());
if (auto mf = std::dynamic_pointer_cast<MixtureFactor>(factor)) { if (auto mf = std::dynamic_pointer_cast<HybridNonlinearFactor>(factor)) {
if (factor == nullptr) { if (factor == nullptr) {
std::cout << "nullptr" std::cout << "nullptr"
<< "\n"; << "\n";
@ -70,7 +70,7 @@ void HybridNonlinearFactorGraph::printErrors(
std::cout << std::endl; std::cout << std::endl;
} }
} else if (auto gmf = } else if (auto gmf =
std::dynamic_pointer_cast<GaussianMixtureFactor>(factor)) { std::dynamic_pointer_cast<HybridGaussianFactor>(factor)) {
if (factor == nullptr) { if (factor == nullptr) {
std::cout << "nullptr" std::cout << "nullptr"
<< "\n"; << "\n";
@ -80,7 +80,8 @@ void HybridNonlinearFactorGraph::printErrors(
gmf->errorTree(values.continuous()).print("", keyFormatter); gmf->errorTree(values.continuous()).print("", keyFormatter);
std::cout << std::endl; std::cout << std::endl;
} }
} else if (auto gm = std::dynamic_pointer_cast<GaussianMixture>(factor)) { } else if (auto gm = std::dynamic_pointer_cast<HybridGaussianConditional>(
factor)) {
if (factor == nullptr) { if (factor == nullptr) {
std::cout << "nullptr" std::cout << "nullptr"
<< "\n"; << "\n";
@ -151,8 +152,8 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize(
continue; continue;
} }
// Check if it is a nonlinear mixture factor // Check if it is a nonlinear mixture factor
if (auto mf = dynamic_pointer_cast<MixtureFactor>(f)) { if (auto mf = dynamic_pointer_cast<HybridNonlinearFactor>(f)) {
const GaussianMixtureFactor::shared_ptr& gmf = const HybridGaussianFactor::shared_ptr& gmf =
mf->linearize(continuousValues); mf->linearize(continuousValues);
linearFG->push_back(gmf); linearFG->push_back(gmf);
} else if (auto nlf = dynamic_pointer_cast<NonlinearFactor>(f)) { } else if (auto nlf = dynamic_pointer_cast<NonlinearFactor>(f)) {
@ -161,9 +162,9 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize(
} else if (dynamic_pointer_cast<DiscreteFactor>(f)) { } else if (dynamic_pointer_cast<DiscreteFactor>(f)) {
// If discrete-only: doesn't need linearization. // If discrete-only: doesn't need linearization.
linearFG->push_back(f); linearFG->push_back(f);
} else if (auto gmf = dynamic_pointer_cast<GaussianMixtureFactor>(f)) { } else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
linearFG->push_back(gmf); linearFG->push_back(gmf);
} else if (auto gm = dynamic_pointer_cast<GaussianMixture>(f)) { } else if (auto gm = dynamic_pointer_cast<HybridGaussianConditional>(f)) {
linearFG->push_back(gm); linearFG->push_back(gm);
} else if (dynamic_pointer_cast<GaussianFactor>(f)) { } else if (dynamic_pointer_cast<GaussianFactor>(f)) {
linearFG->push_back(f); linearFG->push_back(f);

View File

@ -19,6 +19,7 @@
#include <gtsam/hybrid/HybridGaussianISAM.h> #include <gtsam/hybrid/HybridGaussianISAM.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h> #include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <optional> #include <optional>
namespace gtsam { namespace gtsam {

View File

@ -138,7 +138,7 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph,
} }
/* ************************************************************************* */ /* ************************************************************************* */
GaussianMixture::shared_ptr HybridSmoother::gaussianMixture( HybridGaussianConditional::shared_ptr HybridSmoother::gaussianMixture(
size_t index) const { size_t index) const {
return hybridBayesNet_.at(index)->asMixture(); return hybridBayesNet_.at(index)->asMixture();
} }

View File

@ -69,7 +69,7 @@ class GTSAM_EXPORT HybridSmoother {
const HybridBayesNet& hybridBayesNet, const Ordering& ordering) const; const HybridBayesNet& hybridBayesNet, const Ordering& ordering) const;
/// Get the Gaussian Mixture from the Bayes Net posterior at `index`. /// Get the Gaussian Mixture from the Bayes Net posterior at `index`.
GaussianMixture::shared_ptr gaussianMixture(size_t index) const; HybridGaussianConditional::shared_ptr gaussianMixture(size_t index) const;
/// Return the Bayes Net posterior. /// Return the Bayes Net posterior.
const HybridBayesNet& hybridBayesNet() const; const HybridBayesNet& hybridBayesNet() const;

View File

@ -10,26 +10,26 @@ class HybridValues {
gtsam::DiscreteValues discrete() const; gtsam::DiscreteValues discrete() const;
HybridValues(); HybridValues();
HybridValues(const gtsam::VectorValues &cv, const gtsam::DiscreteValues &dv); HybridValues(const gtsam::VectorValues& cv, const gtsam::DiscreteValues& dv);
void print(string s = "HybridValues", void print(string s = "HybridValues",
const gtsam::KeyFormatter& keyFormatter = const gtsam::KeyFormatter& keyFormatter =
gtsam::DefaultKeyFormatter) const; gtsam::DefaultKeyFormatter) const;
bool equals(const gtsam::HybridValues& other, double tol) const; bool equals(const gtsam::HybridValues& other, double tol) const;
void insert(gtsam::Key j, int value); void insert(gtsam::Key j, int value);
void insert(gtsam::Key j, const gtsam::Vector& value); void insert(gtsam::Key j, const gtsam::Vector& value);
void insert_or_assign(gtsam::Key j, const gtsam::Vector& value); void insert_or_assign(gtsam::Key j, const gtsam::Vector& value);
void insert_or_assign(gtsam::Key j, size_t value); void insert_or_assign(gtsam::Key j, size_t value);
void insert(const gtsam::VectorValues& values); void insert(const gtsam::VectorValues& values);
void insert(const gtsam::DiscreteValues& values); void insert(const gtsam::DiscreteValues& values);
void insert(const gtsam::HybridValues& values); void insert(const gtsam::HybridValues& values);
void update(const gtsam::VectorValues& values); void update(const gtsam::VectorValues& values);
void update(const gtsam::DiscreteValues& values); void update(const gtsam::DiscreteValues& values);
void update(const gtsam::HybridValues& values); void update(const gtsam::HybridValues& values);
size_t& atDiscrete(gtsam::Key j); size_t& atDiscrete(gtsam::Key j);
gtsam::Vector& at(gtsam::Key j); gtsam::Vector& at(gtsam::Key j);
}; };
@ -42,7 +42,7 @@ virtual class HybridFactor : gtsam::Factor {
bool equals(const gtsam::HybridFactor& other, double tol = 1e-9) const; bool equals(const gtsam::HybridFactor& other, double tol = 1e-9) const;
// Standard interface: // Standard interface:
double error(const gtsam::HybridValues &values) const; double error(const gtsam::HybridValues& values) const;
bool isDiscrete() const; bool isDiscrete() const;
bool isContinuous() const; bool isContinuous() const;
bool isHybrid() const; bool isHybrid() const;
@ -65,39 +65,41 @@ virtual class HybridConditional {
double logProbability(const gtsam::HybridValues& values) const; double logProbability(const gtsam::HybridValues& values) const;
double evaluate(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const;
double operator()(const gtsam::HybridValues& values) const; double operator()(const gtsam::HybridValues& values) const;
gtsam::GaussianMixture* asMixture() const; gtsam::HybridGaussianConditional* asMixture() const;
gtsam::GaussianConditional* asGaussian() const; gtsam::GaussianConditional* asGaussian() const;
gtsam::DiscreteConditional* asDiscrete() const; gtsam::DiscreteConditional* asDiscrete() const;
gtsam::Factor* inner(); gtsam::Factor* inner();
double error(const gtsam::HybridValues& values) const; double error(const gtsam::HybridValues& values) const;
}; };
#include <gtsam/hybrid/GaussianMixtureFactor.h> #include <gtsam/hybrid/HybridGaussianFactor.h>
class GaussianMixtureFactor : gtsam::HybridFactor { class HybridGaussianFactor : gtsam::HybridFactor {
GaussianMixtureFactor( HybridGaussianFactor(
const gtsam::KeyVector& continuousKeys, const gtsam::KeyVector& continuousKeys,
const gtsam::DiscreteKeys& discreteKeys, const gtsam::DiscreteKeys& discreteKeys,
const std::vector<gtsam::GaussianFactor::shared_ptr>& factorsList); const std::vector<std::pair<gtsam::GaussianFactor::shared_ptr, double>>&
factorsList);
void print(string s = "GaussianMixtureFactor\n", void print(string s = "HybridGaussianFactor\n",
const gtsam::KeyFormatter& keyFormatter = const gtsam::KeyFormatter& keyFormatter =
gtsam::DefaultKeyFormatter) const; gtsam::DefaultKeyFormatter) const;
}; };
#include <gtsam/hybrid/GaussianMixture.h> #include <gtsam/hybrid/HybridGaussianConditional.h>
class GaussianMixture : gtsam::HybridFactor { class HybridGaussianConditional : gtsam::HybridFactor {
GaussianMixture(const gtsam::KeyVector& continuousFrontals, HybridGaussianConditional(
const gtsam::KeyVector& continuousParents, const gtsam::KeyVector& continuousFrontals,
const gtsam::DiscreteKeys& discreteParents, const gtsam::KeyVector& continuousParents,
const std::vector<gtsam::GaussianConditional::shared_ptr>& const gtsam::DiscreteKeys& discreteParents,
conditionalsList); const std::vector<gtsam::GaussianConditional::shared_ptr>&
conditionalsList);
gtsam::GaussianMixtureFactor* likelihood( gtsam::HybridGaussianFactor* likelihood(
const gtsam::VectorValues& frontals) const; const gtsam::VectorValues& frontals) const;
double logProbability(const gtsam::HybridValues& values) const; double logProbability(const gtsam::HybridValues& values) const;
double evaluate(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const;
void print(string s = "GaussianMixture\n", void print(string s = "HybridGaussianConditional\n",
const gtsam::KeyFormatter& keyFormatter = const gtsam::KeyFormatter& keyFormatter =
gtsam::DefaultKeyFormatter) const; gtsam::DefaultKeyFormatter) const;
}; };
@ -131,7 +133,7 @@ class HybridBayesTree {
#include <gtsam/hybrid/HybridBayesNet.h> #include <gtsam/hybrid/HybridBayesNet.h>
class HybridBayesNet { class HybridBayesNet {
HybridBayesNet(); HybridBayesNet();
void push_back(const gtsam::GaussianMixture* s); void push_back(const gtsam::HybridGaussianConditional* s);
void push_back(const gtsam::GaussianConditional* s); void push_back(const gtsam::GaussianConditional* s);
void push_back(const gtsam::DiscreteConditional* s); void push_back(const gtsam::DiscreteConditional* s);
@ -139,7 +141,7 @@ class HybridBayesNet {
size_t size() const; size_t size() const;
gtsam::KeySet keys() const; gtsam::KeySet keys() const;
const gtsam::HybridConditional* at(size_t i) const; const gtsam::HybridConditional* at(size_t i) const;
// Standard interface: // Standard interface:
double logProbability(const gtsam::HybridValues& values) const; double logProbability(const gtsam::HybridValues& values) const;
double evaluate(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const;
@ -149,7 +151,7 @@ class HybridBayesNet {
const gtsam::VectorValues& measurements) const; const gtsam::VectorValues& measurements) const;
gtsam::HybridValues optimize() const; gtsam::HybridValues optimize() const;
gtsam::HybridValues sample(const gtsam::HybridValues &given) const; gtsam::HybridValues sample(const gtsam::HybridValues& given) const;
gtsam::HybridValues sample() const; gtsam::HybridValues sample() const;
void print(string s = "HybridBayesNet\n", void print(string s = "HybridBayesNet\n",
@ -177,7 +179,7 @@ class HybridGaussianFactorGraph {
void push_back(const gtsam::HybridGaussianFactorGraph& graph); void push_back(const gtsam::HybridGaussianFactorGraph& graph);
void push_back(const gtsam::HybridBayesNet& bayesNet); void push_back(const gtsam::HybridBayesNet& bayesNet);
void push_back(const gtsam::HybridBayesTree& bayesTree); void push_back(const gtsam::HybridBayesTree& bayesTree);
void push_back(const gtsam::GaussianMixtureFactor* gmm); void push_back(const gtsam::HybridGaussianFactor* gmm);
void push_back(gtsam::DecisionTreeFactor* factor); void push_back(gtsam::DecisionTreeFactor* factor);
void push_back(gtsam::TableFactor* factor); void push_back(gtsam::TableFactor* factor);
void push_back(gtsam::JacobianFactor* factor); void push_back(gtsam::JacobianFactor* factor);
@ -189,7 +191,8 @@ class HybridGaussianFactorGraph {
const gtsam::HybridFactor* at(size_t i) const; const gtsam::HybridFactor* at(size_t i) const;
void print(string s = "") const; void print(string s = "") const;
bool equals(const gtsam::HybridGaussianFactorGraph& fg, double tol = 1e-9) const; bool equals(const gtsam::HybridGaussianFactorGraph& fg,
double tol = 1e-9) const;
// evaluation // evaluation
double error(const gtsam::HybridValues& values) const; double error(const gtsam::HybridValues& values) const;
@ -222,7 +225,8 @@ class HybridNonlinearFactorGraph {
void push_back(gtsam::HybridFactor* factor); void push_back(gtsam::HybridFactor* factor);
void push_back(gtsam::NonlinearFactor* factor); void push_back(gtsam::NonlinearFactor* factor);
void push_back(gtsam::DiscreteFactor* factor); void push_back(gtsam::DiscreteFactor* factor);
gtsam::HybridGaussianFactorGraph linearize(const gtsam::Values& continuousValues) const; gtsam::HybridGaussianFactorGraph linearize(
const gtsam::Values& continuousValues) const;
bool empty() const; bool empty() const;
void remove(size_t i); void remove(size_t i);
@ -231,32 +235,32 @@ class HybridNonlinearFactorGraph {
const gtsam::HybridFactor* at(size_t i) const; const gtsam::HybridFactor* at(size_t i) const;
void print(string s = "HybridNonlinearFactorGraph\n", void print(string s = "HybridNonlinearFactorGraph\n",
const gtsam::KeyFormatter& keyFormatter = const gtsam::KeyFormatter& keyFormatter =
gtsam::DefaultKeyFormatter) const; gtsam::DefaultKeyFormatter) const;
}; };
#include <gtsam/hybrid/MixtureFactor.h> #include <gtsam/hybrid/HybridNonlinearFactor.h>
class MixtureFactor : gtsam::HybridFactor { class HybridNonlinearFactor : gtsam::HybridFactor {
MixtureFactor( HybridNonlinearFactor(
const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys,
const gtsam::DecisionTree<gtsam::Key, gtsam::NonlinearFactor*>& factors, const gtsam::DecisionTree<
gtsam::Key, std::pair<gtsam::NonlinearFactor*, double>>& factors,
bool normalized = false); bool normalized = false);
template <FACTOR = {gtsam::NonlinearFactor}> HybridNonlinearFactor(
MixtureFactor(const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys,
const std::vector<FACTOR*>& factors, const std::vector<std::pair<gtsam::NonlinearFactor*, double>>& factors,
bool normalized = false); bool normalized = false);
double error(const gtsam::Values& continuousValues, double error(const gtsam::Values& continuousValues,
const gtsam::DiscreteValues& discreteValues) const; const gtsam::DiscreteValues& discreteValues) const;
double nonlinearFactorLogNormalizingConstant(const gtsam::NonlinearFactor* factor, double nonlinearFactorLogNormalizingConstant(
const gtsam::Values& values) const; const gtsam::NonlinearFactor* factor, const gtsam::Values& values) const;
GaussianMixtureFactor* linearize( HybridGaussianFactor* linearize(const gtsam::Values& continuousValues) const;
const gtsam::Values& continuousValues) const;
void print(string s = "MixtureFactor\n", void print(string s = "HybridNonlinearFactor\n",
const gtsam::KeyFormatter& keyFormatter = const gtsam::KeyFormatter& keyFormatter =
gtsam::DefaultKeyFormatter) const; gtsam::DefaultKeyFormatter) const;
}; };

View File

@ -19,10 +19,10 @@
#include <gtsam/base/Matrix.h> #include <gtsam/base/Matrix.h>
#include <gtsam/discrete/DecisionTreeFactor.h> #include <gtsam/discrete/DecisionTreeFactor.h>
#include <gtsam/discrete/DiscreteDistribution.h> #include <gtsam/discrete/DiscreteDistribution.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h> #include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h> #include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h> #include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/inference/Symbol.h> #include <gtsam/inference/Symbol.h>
#include <gtsam/linear/JacobianFactor.h> #include <gtsam/linear/JacobianFactor.h>
#include <gtsam/linear/NoiseModel.h> #include <gtsam/linear/NoiseModel.h>
@ -57,12 +57,15 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain(
// keyFunc(1) to keyFunc(n+1) // keyFunc(1) to keyFunc(n+1)
for (size_t t = 1; t < n; t++) { for (size_t t = 1; t < n; t++) {
hfg.add(GaussianMixtureFactor( std::vector<GaussianFactorValuePair> components = {
{keyFunc(t), keyFunc(t + 1)}, {{dKeyFunc(t), 2}},
{std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1), {std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
I_3x3, Z_3x1), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1), 0.0},
I_3x3, Vector3::Ones())})); {std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
I_3x3, Vector3::Ones()),
0.0}};
hfg.add(HybridGaussianFactor({keyFunc(t), keyFunc(t + 1)},
{{dKeyFunc(t), 2}}, components));
if (t > 1) { if (t > 1) {
hfg.add(DecisionTreeFactor({{dKeyFunc(t - 1), 2}, {dKeyFunc(t), 2}}, hfg.add(DecisionTreeFactor({{dKeyFunc(t - 1), 2}, {dKeyFunc(t), 2}},
@ -159,11 +162,12 @@ struct Switching {
for (size_t k = 0; k < K - 1; k++) { for (size_t k = 0; k < K - 1; k++) {
KeyVector keys = {X(k), X(k + 1)}; KeyVector keys = {X(k), X(k + 1)};
auto motion_models = motionModels(k, between_sigma); auto motion_models = motionModels(k, between_sigma);
std::vector<NonlinearFactor::shared_ptr> components; std::vector<NonlinearFactorValuePair> components;
for (auto &&f : motion_models) { for (auto &&f : motion_models) {
components.push_back(std::dynamic_pointer_cast<NonlinearFactor>(f)); components.push_back(
{std::dynamic_pointer_cast<NonlinearFactor>(f), 0.0});
} }
nonlinearFactorGraph.emplace_shared<MixtureFactor>( nonlinearFactorGraph.emplace_shared<HybridNonlinearFactor>(
keys, DiscreteKeys{modes[k]}, components); keys, DiscreteKeys{modes[k]}, components);
} }

View File

@ -43,7 +43,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1,
// Create Gaussian mixture z_i = x0 + noise for each measurement. // Create Gaussian mixture z_i = x0 + noise for each measurement.
for (size_t i = 0; i < num_measurements; i++) { for (size_t i = 0; i < num_measurements; i++) {
const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode; const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode;
bayesNet.emplace_shared<GaussianMixture>( bayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i}, KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i},
std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0),
Z_1x1, 0.5), Z_1x1, 0.5),

View File

@ -107,7 +107,7 @@ TEST(HybridBayesNet, evaluateHybrid) {
// Create hybrid Bayes net. // Create hybrid Bayes net.
HybridBayesNet bayesNet; HybridBayesNet bayesNet;
bayesNet.push_back(continuousConditional); bayesNet.push_back(continuousConditional);
bayesNet.emplace_shared<GaussianMixture>( bayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia}, KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
std::vector{conditional0, conditional1}); std::vector{conditional0, conditional1});
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1"); bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
@ -168,7 +168,7 @@ TEST(HybridBayesNet, Error) {
conditional1 = std::make_shared<GaussianConditional>( conditional1 = std::make_shared<GaussianConditional>(
X(1), Vector1::Constant(2), I_1x1, model1); X(1), Vector1::Constant(2), I_1x1, model1);
auto gm = std::make_shared<GaussianMixture>( auto gm = std::make_shared<HybridGaussianConditional>(
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia}, KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
std::vector{conditional0, conditional1}); std::vector{conditional0, conditional1});
// Create hybrid Bayes net. // Create hybrid Bayes net.
@ -387,9 +387,10 @@ TEST(HybridBayesNet, Sampling) {
std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model); std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model);
auto one_motion = auto one_motion =
std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model); std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
std::vector<NonlinearFactor::shared_ptr> factors = {zero_motion, one_motion}; std::vector<NonlinearFactorValuePair> factors = {{zero_motion, 0.0},
{one_motion, 0.0}};
nfg.emplace_shared<PriorFactor<double>>(X(0), 0.0, noise_model); nfg.emplace_shared<PriorFactor<double>>(X(0), 0.0, noise_model);
nfg.emplace_shared<MixtureFactor>( nfg.emplace_shared<HybridNonlinearFactor>(
KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors); KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors);
DiscreteKey mode(M(0), 2); DiscreteKey mode(M(0), 2);

View File

@ -43,9 +43,9 @@ TEST(HybridConditional, Invariants) {
auto hc0 = bn.at(0); auto hc0 = bn.at(0);
CHECK(hc0->isHybrid()); CHECK(hc0->isHybrid());
// Check invariants as a GaussianMixture. // Check invariants as a HybridGaussianConditional.
const auto mixture = hc0->asMixture(); const auto mixture = hc0->asMixture();
EXPECT(GaussianMixture::CheckInvariants(*mixture, values)); EXPECT(HybridGaussianConditional::CheckInvariants(*mixture, values));
// Check invariants as a HybridConditional. // Check invariants as a HybridConditional.
EXPECT(HybridConditional::CheckInvariants(*hc0, values)); EXPECT(HybridConditional::CheckInvariants(*hc0, values));

View File

@ -19,10 +19,10 @@
#include <gtsam/geometry/Pose2.h> #include <gtsam/geometry/Pose2.h>
#include <gtsam/geometry/Pose3.h> #include <gtsam/geometry/Pose3.h>
#include <gtsam/hybrid/HybridBayesNet.h> #include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h> #include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearISAM.h> #include <gtsam/hybrid/HybridNonlinearISAM.h>
#include <gtsam/hybrid/HybridSmoother.h> #include <gtsam/hybrid/HybridSmoother.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/inference/Symbol.h> #include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianBayesNet.h> #include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianBayesTree.h> #include <gtsam/linear/GaussianBayesTree.h>
@ -435,9 +435,10 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() {
std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model); std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model);
const auto one_motion = const auto one_motion =
std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model); std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
nfg.emplace_shared<MixtureFactor>( std::vector<NonlinearFactorValuePair> components = {{zero_motion, 0.0},
KeyVector{X(0), X(1)}, DiscreteKeys{m}, {one_motion, 0.0}};
std::vector<NonlinearFactor::shared_ptr>{zero_motion, one_motion}); nfg.emplace_shared<HybridNonlinearFactor>(KeyVector{X(0), X(1)},
DiscreteKeys{m}, components);
return nfg; return nfg;
} }
@ -531,10 +532,10 @@ TEST(HybridEstimation, CorrectnessViaSampling) {
* Helper function to add the constant term corresponding to * Helper function to add the constant term corresponding to
* the difference in noise models. * the difference in noise models.
*/ */
std::shared_ptr<GaussianMixtureFactor> mixedVarianceFactor( std::shared_ptr<HybridGaussianFactor> mixedVarianceFactor(
const MixtureFactor& mf, const Values& initial, const Key& mode, const HybridNonlinearFactor& mf, const Values& initial, const Key& mode,
double noise_tight, double noise_loose, size_t d, size_t tight_index) { double noise_tight, double noise_loose, size_t d, size_t tight_index) {
GaussianMixtureFactor::shared_ptr gmf = mf.linearize(initial); HybridGaussianFactor::shared_ptr gmf = mf.linearize(initial);
constexpr double log2pi = 1.8378770664093454835606594728112; constexpr double log2pi = 1.8378770664093454835606594728112;
// logConstant will be of the tighter model // logConstant will be of the tighter model
@ -560,8 +561,13 @@ std::shared_ptr<GaussianMixtureFactor> mixedVarianceFactor(
} }
}; };
auto updated_components = gmf->factors().apply(func); auto updated_components = gmf->factors().apply(func);
auto updated_gmf = std::make_shared<GaussianMixtureFactor>( auto updated_pairs = HybridGaussianFactor::FactorValuePairs(
gmf->continuousKeys(), gmf->discreteKeys(), updated_components); updated_components,
[](const GaussianFactor::shared_ptr& gf) -> GaussianFactorValuePair {
return {gf, 0.0};
});
auto updated_gmf = std::make_shared<HybridGaussianFactor>(
gmf->continuousKeys(), gmf->discreteKeys(), updated_pairs);
return updated_gmf; return updated_gmf;
} }
@ -589,10 +595,11 @@ TEST(HybridEstimation, ModeSelection) {
model1 = std::make_shared<MotionModel>( model1 = std::make_shared<MotionModel>(
X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight));
std::vector<NonlinearFactor::shared_ptr> components = {model0, model1}; std::vector<NonlinearFactorValuePair> components = {{model0, 0.0},
{model1, 0.0}};
KeyVector keys = {X(0), X(1)}; KeyVector keys = {X(0), X(1)};
MixtureFactor mf(keys, modes, components); HybridNonlinearFactor mf(keys, modes, components);
initial.insert(X(0), 0.0); initial.insert(X(0), 0.0);
initial.insert(X(1), 0.0); initial.insert(X(1), 0.0);
@ -616,7 +623,7 @@ TEST(HybridEstimation, ModeSelection) {
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1));
bn.push_back( bn.push_back(
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1));
bn.emplace_shared<GaussianMixture>( bn.emplace_shared<HybridGaussianConditional>(
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
std::vector{GaussianConditional::sharedMeanAndStddev( std::vector{GaussianConditional::sharedMeanAndStddev(
Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose), Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose),
@ -647,7 +654,7 @@ TEST(HybridEstimation, ModeSelection2) {
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1)); GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1));
bn.push_back( bn.push_back(
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1)); GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1));
bn.emplace_shared<GaussianMixture>( bn.emplace_shared<HybridGaussianConditional>(
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
std::vector{GaussianConditional::sharedMeanAndStddev( std::vector{GaussianConditional::sharedMeanAndStddev(
Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose), Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose),
@ -680,10 +687,11 @@ TEST(HybridEstimation, ModeSelection2) {
model1 = std::make_shared<BetweenFactor<Vector3>>( model1 = std::make_shared<BetweenFactor<Vector3>>(
X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight)); X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight));
std::vector<NonlinearFactor::shared_ptr> components = {model0, model1}; std::vector<NonlinearFactorValuePair> components = {{model0, 0.0},
{model1, 0.0}};
KeyVector keys = {X(0), X(1)}; KeyVector keys = {X(0), X(1)};
MixtureFactor mf(keys, modes, components); HybridNonlinearFactor mf(keys, modes, components);
initial.insert<Vector3>(X(0), Z_3x1); initial.insert<Vector3>(X(0), Z_3x1);
initial.insert<Vector3>(X(1), Z_3x1); initial.insert<Vector3>(X(1), Z_3x1);

View File

@ -52,10 +52,10 @@ TEST(HybridFactorGraph, Keys) {
// Add a gaussian mixture factor ϕ(x1, c1) // Add a gaussian mixture factor ϕ(x1, c1)
DiscreteKey m1(M(1), 2); DiscreteKey m1(M(1), 2);
DecisionTree<Key, GaussianFactor::shared_ptr> dt( DecisionTree<Key, GaussianFactorValuePair> dt(
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), M(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())); {std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0});
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt)); hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt));
KeySet expected_continuous{X(0), X(1)}; KeySet expected_continuous{X(0), X(1)};
EXPECT( EXPECT(

View File

@ -10,8 +10,8 @@
* -------------------------------------------------------------------------- */ * -------------------------------------------------------------------------- */
/** /**
* @file testGaussianMixture.cpp * @file testHybridGaussianConditional.cpp
* @brief Unit tests for GaussianMixture class * @brief Unit tests for HybridGaussianConditional class
* @author Varun Agrawal * @author Varun Agrawal
* @author Fan Jiang * @author Fan Jiang
* @author Frank Dellaert * @author Frank Dellaert
@ -19,8 +19,8 @@
*/ */
#include <gtsam/discrete/DiscreteValues.h> #include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixture.h> #include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h> #include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridValues.h> #include <gtsam/hybrid/HybridValues.h>
#include <gtsam/inference/Symbol.h> #include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianConditional.h> #include <gtsam/linear/GaussianConditional.h>
@ -31,7 +31,6 @@
#include <CppUnitLite/TestHarness.h> #include <CppUnitLite/TestHarness.h>
using namespace gtsam; using namespace gtsam;
using noiseModel::Isotropic;
using symbol_shorthand::M; using symbol_shorthand::M;
using symbol_shorthand::X; using symbol_shorthand::X;
using symbol_shorthand::Z; using symbol_shorthand::Z;
@ -46,19 +45,19 @@ static const HybridValues hv1{vv, assignment1};
/* ************************************************************************* */ /* ************************************************************************* */
namespace equal_constants { namespace equal_constants {
// Create a simple GaussianMixture // Create a simple HybridGaussianConditional
const double commonSigma = 2.0; const double commonSigma = 2.0;
const std::vector<GaussianConditional::shared_ptr> conditionals{ const std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
commonSigma), commonSigma),
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
commonSigma)}; commonSigma)};
const GaussianMixture mixture({Z(0)}, {X(0)}, {mode}, conditionals); const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals);
} // namespace equal_constants } // namespace equal_constants
/* ************************************************************************* */ /* ************************************************************************* */
/// Check that invariants hold /// Check that invariants hold
TEST(GaussianMixture, Invariants) { TEST(HybridGaussianConditional, Invariants) {
using namespace equal_constants; using namespace equal_constants;
// Check that the mixture normalization constant is the max of all constants // Check that the mixture normalization constant is the max of all constants
@ -67,13 +66,13 @@ TEST(GaussianMixture, Invariants) {
EXPECT_DOUBLES_EQUAL(K, conditionals[0]->logNormalizationConstant(), 1e-8); EXPECT_DOUBLES_EQUAL(K, conditionals[0]->logNormalizationConstant(), 1e-8);
EXPECT_DOUBLES_EQUAL(K, conditionals[1]->logNormalizationConstant(), 1e-8); EXPECT_DOUBLES_EQUAL(K, conditionals[1]->logNormalizationConstant(), 1e-8);
EXPECT(GaussianMixture::CheckInvariants(mixture, hv0)); EXPECT(HybridGaussianConditional::CheckInvariants(mixture, hv0));
EXPECT(GaussianMixture::CheckInvariants(mixture, hv1)); EXPECT(HybridGaussianConditional::CheckInvariants(mixture, hv1));
} }
/* ************************************************************************* */ /* ************************************************************************* */
/// Check LogProbability. /// Check LogProbability.
TEST(GaussianMixture, LogProbability) { TEST(HybridGaussianConditional, LogProbability) {
using namespace equal_constants; using namespace equal_constants;
auto actual = mixture.logProbability(vv); auto actual = mixture.logProbability(vv);
@ -95,7 +94,7 @@ TEST(GaussianMixture, LogProbability) {
/* ************************************************************************* */ /* ************************************************************************* */
/// Check error. /// Check error.
TEST(GaussianMixture, Error) { TEST(HybridGaussianConditional, Error) {
using namespace equal_constants; using namespace equal_constants;
auto actual = mixture.errorTree(vv); auto actual = mixture.errorTree(vv);
@ -118,7 +117,7 @@ TEST(GaussianMixture, Error) {
/* ************************************************************************* */ /* ************************************************************************* */
/// Check that the likelihood is proportional to the conditional density given /// Check that the likelihood is proportional to the conditional density given
/// the measurements. /// the measurements.
TEST(GaussianMixture, Likelihood) { TEST(HybridGaussianConditional, Likelihood) {
using namespace equal_constants; using namespace equal_constants;
// Compute likelihood // Compute likelihood
@ -147,19 +146,19 @@ TEST(GaussianMixture, Likelihood) {
/* ************************************************************************* */ /* ************************************************************************* */
namespace mode_dependent_constants { namespace mode_dependent_constants {
// Create a GaussianMixture with mode-dependent noise models. // Create a HybridGaussianConditional with mode-dependent noise models.
// 0 is low-noise, 1 is high-noise. // 0 is low-noise, 1 is high-noise.
const std::vector<GaussianConditional::shared_ptr> conditionals{ const std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
0.5), 0.5),
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
3.0)}; 3.0)};
const GaussianMixture mixture({Z(0)}, {X(0)}, {mode}, conditionals); const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals);
} // namespace mode_dependent_constants } // namespace mode_dependent_constants
/* ************************************************************************* */ /* ************************************************************************* */
// Create a test for continuousParents. // Create a test for continuousParents.
TEST(GaussianMixture, ContinuousParents) { TEST(HybridGaussianConditional, ContinuousParents) {
using namespace mode_dependent_constants; using namespace mode_dependent_constants;
const KeyVector continuousParentKeys = mixture.continuousParents(); const KeyVector continuousParentKeys = mixture.continuousParents();
// Check that the continuous parent keys are correct: // Check that the continuous parent keys are correct:
@ -170,7 +169,7 @@ TEST(GaussianMixture, ContinuousParents) {
/* ************************************************************************* */ /* ************************************************************************* */
/// Check that the likelihood is proportional to the conditional density given /// Check that the likelihood is proportional to the conditional density given
/// the measurements. /// the measurements.
TEST(GaussianMixture, Likelihood2) { TEST(HybridGaussianConditional, Likelihood2) {
using namespace mode_dependent_constants; using namespace mode_dependent_constants;
// Compute likelihood // Compute likelihood

View File

@ -10,50 +10,53 @@
* -------------------------------------------------------------------------- */ * -------------------------------------------------------------------------- */
/** /**
* @file testGaussianMixtureFactor.cpp * @file testHybridGaussianFactor.cpp
* @brief Unit tests for GaussianMixtureFactor * @brief Unit tests for HybridGaussianFactor
* @author Varun Agrawal * @author Varun Agrawal
* @author Fan Jiang * @author Fan Jiang
* @author Frank Dellaert * @author Frank Dellaert
* @date December 2021 * @date December 2021
*/ */
#include <gtsam/base/Testable.h>
#include <gtsam/base/TestableAssertions.h> #include <gtsam/base/TestableAssertions.h>
#include <gtsam/discrete/DiscreteConditional.h>
#include <gtsam/discrete/DiscreteValues.h> #include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridBayesNet.h> #include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h> #include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridValues.h> #include <gtsam/hybrid/HybridValues.h>
#include <gtsam/inference/Symbol.h> #include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianFactorGraph.h> #include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/VectorValues.h>
#include <gtsam/nonlinear/PriorFactor.h> #include <gtsam/nonlinear/PriorFactor.h>
#include <gtsam/slam/BetweenFactor.h> #include <gtsam/slam/BetweenFactor.h>
// Include for test suite // Include for test suite
#include <CppUnitLite/TestHarness.h> #include <CppUnitLite/TestHarness.h>
#include <memory>
using namespace std; using namespace std;
using namespace gtsam; using namespace gtsam;
using noiseModel::Isotropic;
using symbol_shorthand::F;
using symbol_shorthand::M; using symbol_shorthand::M;
using symbol_shorthand::X; using symbol_shorthand::X;
using symbol_shorthand::Z; using symbol_shorthand::Z;
/* ************************************************************************* */ /* ************************************************************************* */
// Check iterators of empty mixture. // Check iterators of empty mixture.
TEST(GaussianMixtureFactor, Constructor) { TEST(HybridGaussianFactor, Constructor) {
GaussianMixtureFactor factor; HybridGaussianFactor factor;
GaussianMixtureFactor::const_iterator const_it = factor.begin(); HybridGaussianFactor::const_iterator const_it = factor.begin();
CHECK(const_it == factor.end()); CHECK(const_it == factor.end());
GaussianMixtureFactor::iterator it = factor.begin(); HybridGaussianFactor::iterator it = factor.begin();
CHECK(it == factor.end()); CHECK(it == factor.end());
} }
/* ************************************************************************* */ /* ************************************************************************* */
// "Add" two mixture factors together. // "Add" two mixture factors together.
TEST(GaussianMixtureFactor, Sum) { TEST(HybridGaussianFactor, Sum) {
DiscreteKey m1(1, 2), m2(2, 3); DiscreteKey m1(1, 2), m2(2, 3);
auto A1 = Matrix::Zero(2, 1); auto A1 = Matrix::Zero(2, 1);
@ -68,14 +71,15 @@ TEST(GaussianMixtureFactor, Sum) {
auto f20 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b); auto f20 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
auto f21 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b); auto f21 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
auto f22 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b); auto f22 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
std::vector<GaussianFactor::shared_ptr> factorsA{f10, f11}; std::vector<GaussianFactorValuePair> factorsA{{f10, 0.0}, {f11, 0.0}};
std::vector<GaussianFactor::shared_ptr> factorsB{f20, f21, f22}; std::vector<GaussianFactorValuePair> factorsB{
{f20, 0.0}, {f21, 0.0}, {f22, 0.0}};
// TODO(Frank): why specify keys at all? And: keys in factor should be *all* // TODO(Frank): why specify keys at all? And: keys in factor should be *all*
// keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey? // keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey?
// Design review! // Design review!
GaussianMixtureFactor mixtureFactorA({X(1), X(2)}, {m1}, factorsA); HybridGaussianFactor mixtureFactorA({X(1), X(2)}, {m1}, factorsA);
GaussianMixtureFactor mixtureFactorB({X(1), X(3)}, {m2}, factorsB); HybridGaussianFactor mixtureFactorB({X(1), X(3)}, {m2}, factorsB);
// Check that number of keys is 3 // Check that number of keys is 3
EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size()); EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size());
@ -99,19 +103,19 @@ TEST(GaussianMixtureFactor, Sum) {
} }
/* ************************************************************************* */ /* ************************************************************************* */
TEST(GaussianMixtureFactor, Printing) { TEST(HybridGaussianFactor, Printing) {
DiscreteKey m1(1, 2); DiscreteKey m1(1, 2);
auto A1 = Matrix::Zero(2, 1); auto A1 = Matrix::Zero(2, 1);
auto A2 = Matrix::Zero(2, 2); auto A2 = Matrix::Zero(2, 2);
auto b = Matrix::Zero(2, 1); auto b = Matrix::Zero(2, 1);
auto f10 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b); auto f10 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b); auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
std::vector<GaussianFactor::shared_ptr> factors{f10, f11}; std::vector<GaussianFactorValuePair> factors{{f10, 0.0}, {f11, 0.0}};
GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
std::string expected = std::string expected =
R"(GaussianMixtureFactor R"(HybridGaussianFactor
Hybrid [x1 x2; 1]{ Hybrid [x1 x2; 1]{
Choice(1) Choice(1)
0 Leaf : 0 Leaf :
@ -144,7 +148,7 @@ Hybrid [x1 x2; 1]{
} }
/* ************************************************************************* */ /* ************************************************************************* */
TEST(GaussianMixtureFactor, GaussianMixture) { TEST(HybridGaussianFactor, HybridGaussianConditional) {
KeyVector keys; KeyVector keys;
keys.push_back(X(0)); keys.push_back(X(0));
keys.push_back(X(1)); keys.push_back(X(1));
@ -154,15 +158,15 @@ TEST(GaussianMixtureFactor, GaussianMixture) {
dKeys.emplace_back(M(1), 2); dKeys.emplace_back(M(1), 2);
auto gaussians = std::make_shared<GaussianConditional>(); auto gaussians = std::make_shared<GaussianConditional>();
GaussianMixture::Conditionals conditionals(gaussians); HybridGaussianConditional::Conditionals conditionals(gaussians);
GaussianMixture gm({}, keys, dKeys, conditionals); HybridGaussianConditional gm({}, keys, dKeys, conditionals);
EXPECT_LONGS_EQUAL(2, gm.discreteKeys().size()); EXPECT_LONGS_EQUAL(2, gm.discreteKeys().size());
} }
/* ************************************************************************* */ /* ************************************************************************* */
// Test the error of the GaussianMixtureFactor // Test the error of the HybridGaussianFactor
TEST(GaussianMixtureFactor, Error) { TEST(HybridGaussianFactor, Error) {
DiscreteKey m1(1, 2); DiscreteKey m1(1, 2);
auto A01 = Matrix2::Identity(); auto A01 = Matrix2::Identity();
@ -175,9 +179,9 @@ TEST(GaussianMixtureFactor, Error) {
auto f0 = std::make_shared<JacobianFactor>(X(1), A01, X(2), A02, b); auto f0 = std::make_shared<JacobianFactor>(X(1), A01, X(2), A02, b);
auto f1 = std::make_shared<JacobianFactor>(X(1), A11, X(2), A12, b); auto f1 = std::make_shared<JacobianFactor>(X(1), A11, X(2), A12, b);
std::vector<GaussianFactor::shared_ptr> factors{f0, f1}; std::vector<GaussianFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
VectorValues continuousValues; VectorValues continuousValues;
continuousValues.insert(X(1), Vector2(0, 0)); continuousValues.insert(X(1), Vector2(0, 0));
@ -229,10 +233,10 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1,
c1 = make_shared<GaussianConditional>(z, Vector1(mu1), I_1x1, model1); c1 = make_shared<GaussianConditional>(z, Vector1(mu1), I_1x1, model1);
HybridBayesNet hbn; HybridBayesNet hbn;
hbn.emplace_shared<GaussianMixture>(KeyVector{z}, KeyVector{}, hbn.emplace_shared<HybridGaussianConditional>(
DiscreteKeys{m}, std::vector{c0, c1}); KeyVector{z}, KeyVector{}, DiscreteKeys{m}, std::vector{c0, c1});
auto mixing = make_shared<DiscreteConditional>(m, "0.5/0.5"); auto mixing = make_shared<DiscreteConditional>(m, "50/50");
hbn.push_back(mixing); hbn.push_back(mixing);
return hbn; return hbn;
@ -250,7 +254,7 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1,
* The resulting factor graph should eliminate to a Bayes net * The resulting factor graph should eliminate to a Bayes net
* which represents a sigmoid function. * which represents a sigmoid function.
*/ */
TEST(GaussianMixtureFactor, GaussianMixtureModel) { TEST(HybridGaussianFactor, GaussianMixtureModel) {
using namespace test_gmm; using namespace test_gmm;
double mu0 = 1.0, mu1 = 3.0; double mu0 = 1.0, mu1 = 3.0;
@ -278,7 +282,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) {
// At the halfway point between the means, we should get P(m|z)=0.5 // At the halfway point between the means, we should get P(m|z)=0.5
HybridBayesNet expected; HybridBayesNet expected;
expected.emplace_shared<DiscreteConditional>(m, "0.5/0.5"); expected.emplace_shared<DiscreteConditional>(m, "50/50");
EXPECT(assert_equal(expected, *bn)); EXPECT(assert_equal(expected, *bn));
} }
@ -322,7 +326,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) {
* which represents a Gaussian-like function * which represents a Gaussian-like function
* where m1>m0 close to 3.1333. * where m1>m0 close to 3.1333.
*/ */
TEST(GaussianMixtureFactor, GaussianMixtureModel2) { TEST(HybridGaussianFactor, GaussianMixtureModel2) {
using namespace test_gmm; using namespace test_gmm;
double mu0 = 1.0, mu1 = 3.0; double mu0 = 1.0, mu1 = 3.0;
@ -387,103 +391,132 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) {
namespace test_two_state_estimation { namespace test_two_state_estimation {
/// Create Two State Bayes Network with measurements DiscreteKey m1(M(1), 2);
/// The Bayes network is P(z0|x0)P(x1|x0,m1)p(m1) and optionally p(z1|x1)
static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0,
double sigma1,
bool add_second_measurement = false,
double measurement_sigma = 3.0) {
DiscreteKey m1(M(1), 2);
Key z0 = Z(0), z1 = Z(1);
Key x0 = X(0), x1 = X(1);
HybridBayesNet hbn; void addMeasurement(HybridBayesNet& hbn, Key z_key, Key x_key, double sigma) {
auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma);
hbn.emplace_shared<GaussianConditional>(z_key, Vector1(0.0), I_1x1, x_key,
-I_1x1, measurement_model);
}
auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma); /// Create hybrid motion model p(x1 | x0, m1)
// Add measurement P(z0 | x0) static HybridGaussianConditional::shared_ptr CreateHybridMotionModel(
auto p_z0 = std::make_shared<GaussianConditional>( double mu0, double mu1, double sigma0, double sigma1) {
z0, Vector1(0.0), -I_1x1, x0, I_1x1, measurement_model);
hbn.push_back(p_z0);
// Add hybrid motion model
auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1);
auto c0 = make_shared<GaussianConditional>(x1, Vector1(mu0), I_1x1, x0, auto c0 = make_shared<GaussianConditional>(X(1), Vector1(mu0), I_1x1, X(0),
-I_1x1, model0), -I_1x1, model0),
c1 = make_shared<GaussianConditional>(x1, Vector1(mu1), I_1x1, x0, c1 = make_shared<GaussianConditional>(X(1), Vector1(mu1), I_1x1, X(0),
-I_1x1, model1); -I_1x1, model1);
return std::make_shared<HybridGaussianConditional>(
KeyVector{X(1)}, KeyVector{X(0)}, DiscreteKeys{m1}, std::vector{c0, c1});
}
auto motion = std::make_shared<GaussianMixture>( /// Create two state Bayes network with 1 or two measurement models
KeyVector{x1}, KeyVector{x0}, DiscreteKeys{m1}, std::vector{c0, c1}); HybridBayesNet CreateBayesNet(
hbn.push_back(motion); const HybridGaussianConditional::shared_ptr& hybridMotionModel,
bool add_second_measurement = false) {
HybridBayesNet hbn;
// Add measurement model p(z0 | x0)
addMeasurement(hbn, Z(0), X(0), 3.0);
// Optionally add second measurement model p(z1 | x1)
if (add_second_measurement) { if (add_second_measurement) {
// Add second measurement addMeasurement(hbn, Z(1), X(1), 3.0);
auto p_z1 = std::make_shared<GaussianConditional>(
z1, Vector1(0.0), -I_1x1, x1, I_1x1, measurement_model);
hbn.push_back(p_z1);
} }
// Add hybrid motion model
hbn.push_back(hybridMotionModel);
// Discrete uniform prior. // Discrete uniform prior.
auto p_m1 = std::make_shared<DiscreteConditional>(m1, "0.5/0.5"); hbn.emplace_shared<DiscreteConditional>(m1, "50/50");
hbn.push_back(p_m1);
return hbn; return hbn;
} }
/// Approximate the discrete marginal P(m1) using importance sampling
std::pair<double, double> approximateDiscreteMarginal(
const HybridBayesNet& hbn,
const HybridGaussianConditional::shared_ptr& hybridMotionModel,
const VectorValues& given, size_t N = 100000) {
/// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1),
/// using q(x0) = N(z0, sigmaQ) to sample x0.
HybridBayesNet q;
q.push_back(hybridMotionModel); // Add hybrid motion model
q.emplace_shared<GaussianConditional>(GaussianConditional::FromMeanAndStddev(
X(0), given.at(Z(0)), /* sigmaQ = */ 3.0)); // Add proposal q(x0) for x0
q.emplace_shared<DiscreteConditional>(m1, "50/50"); // Discrete prior.
// Do importance sampling
double w0 = 0.0, w1 = 0.0;
std::mt19937_64 rng(42);
for (int i = 0; i < N; i++) {
HybridValues sample = q.sample(&rng);
sample.insert(given);
double weight = hbn.evaluate(sample) / q.evaluate(sample);
(sample.atDiscrete(M(1)) == 0) ? w0 += weight : w1 += weight;
}
double pm1 = w1 / (w0 + w1);
std::cout << "p(m0) = " << 100 * (1.0 - pm1) << std::endl;
std::cout << "p(m1) = " << 100 * pm1 << std::endl;
return {1.0 - pm1, pm1};
}
} // namespace test_two_state_estimation } // namespace test_two_state_estimation
/* ************************************************************************* */ /* ************************************************************************* */
/** /**
* Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). * Test a model p(z0|x0)p(z1|x1)p(x1|x0,m1)P(m1).
* *
* P(x1|x0,m1) has different means and same covariance. * p(x1|x0,m1) has mode-dependent mean but same covariance.
* *
* Converting to a factor graph gives us * Converting to a factor graph gives us ϕ(x0;z0)ϕ(x1;z1)ϕ(x1,x0,m1)P(m1)
* ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1)
* *
* If we only have a measurement on z0, then * If we only have a measurement on x0, then
* the probability of m1 should be 0.5/0.5. * the posterior probability of m1 should be 0.5/0.5.
* Getting a measurement on z1 gives use more information. * Getting a measurement on z1 gives use more information.
*/ */
TEST(GaussianMixtureFactor, TwoStateModel) { TEST(HybridGaussianFactor, TwoStateModel) {
using namespace test_two_state_estimation; using namespace test_two_state_estimation;
double mu0 = 1.0, mu1 = 3.0; double mu0 = 1.0, mu1 = 3.0;
double sigma = 2.0; double sigma = 0.5;
auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma, sigma);
DiscreteKey m1(M(1), 2);
Key z0 = Z(0), z1 = Z(1);
// Start with no measurement on x1, only on x0 // Start with no measurement on x1, only on x0
HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma, sigma, false); const Vector1 z0(0.5);
VectorValues given; VectorValues given;
given.insert(z0, Vector1(0.5)); given.insert(Z(0), z0);
{ {
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Since no measurement on x1, we hedge our bets // Since no measurement on x1, we hedge our bets
DiscreteConditional expected(m1, "0.5/0.5"); // Importance sampling run with 100k samples gives 50.051/49.949
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
DiscreteConditional expected(m1, "50/50");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete())));
} }
{ {
// Now we add a measurement z1 on x1 // If we set z1=4.5 (>> 2.5 which is the halfway point),
hbn = CreateBayesNet(mu0, mu1, sigma, sigma, true); // probability of discrete mode should be leaning to m1==1.
const Vector1 z1(4.5);
given.insert(Z(1), z1);
// If we see z1=2.6 (> 2.5 which is the halfway point), HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
// discrete mode should say m1=1
given.insert(z1, Vector1(2.6));
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Since we have a measurement on z2, we get a definite result // Since we have a measurement on x1, we get a definite result
DiscreteConditional expected(m1, "0.49772729/0.50227271"); // Values taken from an importance sampling run with 100k samples:
// regression // approximateDiscreteMarginal(hbn, hybridMotionModel, given);
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); DiscreteConditional expected(m1, "44.3854/55.6146");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
} }
} }
@ -500,89 +533,206 @@ TEST(GaussianMixtureFactor, TwoStateModel) {
* the P(m1) should be 0.5/0.5. * the P(m1) should be 0.5/0.5.
* Getting a measurement on z1 gives use more information. * Getting a measurement on z1 gives use more information.
*/ */
TEST(GaussianMixtureFactor, TwoStateModel2) { TEST(HybridGaussianFactor, TwoStateModel2) {
using namespace test_two_state_estimation; using namespace test_two_state_estimation;
double mu0 = 1.0, mu1 = 3.0; double mu0 = 1.0, mu1 = 3.0;
double sigma0 = 6.0, sigma1 = 4.0; double sigma0 = 0.5, sigma1 = 2.0;
auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1);
auto model1 = noiseModel::Isotropic::Sigma(1, sigma1);
DiscreteKey m1(M(1), 2);
Key z0 = Z(0), z1 = Z(1);
// Start with no measurement on x1, only on x0 // Start with no measurement on x1, only on x0
HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma0, sigma1, false); const Vector1 z0(0.5);
VectorValues given; VectorValues given;
given.insert(z0, Vector1(0.5)); given.insert(Z(0), z0);
{ {
// Start with no measurement on x1, only on x0 HybridBayesNet hbn = CreateBayesNet(hybridMotionModel);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
{ // Check that ratio of Bayes net and factor graph for different modes is
VectorValues vv{ // equal for several values of {x0,x1}.
{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}, {Z(0), Vector1(0.5)}}; for (VectorValues vv :
HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
hv1(vv, DiscreteValues{{M(1), 1}}); VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), vv.insert(given); // add measurements for HBN
gfg.error(hv1) / hbn.error(hv1), 1e-9); HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
}
{
VectorValues vv{
{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}, {Z(0), Vector1(0.5)}};
HybridValues hv0(vv, DiscreteValues{{M(1), 0}}),
hv1(vv, DiscreteValues{{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9); gfg.error(hv1) / hbn.error(hv1), 1e-9);
} }
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Importance sampling run with 100k samples gives 50.095/49.905
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
// Since no measurement on x1, we a 50/50 probability // Since no measurement on x1, we a 50/50 probability
auto p_m = bn->at(2)->asDiscrete(); auto p_m = bn->at(2)->asDiscrete();
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{m1.first, 0}}), EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9);
1e-9); EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9);
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{m1.first, 1}}),
1e-9);
} }
{ {
// Now we add a measurement z1 on x1 // Now we add a measurement z1 on x1
hbn = CreateBayesNet(mu0, mu1, sigma0, sigma1, true); const Vector1 z1(4.0); // favors m==1
given.insert(Z(1), z1);
given.insert(z1, Vector1(2.2)); HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
{ // Check that ratio of Bayes net and factor graph for different modes is
VectorValues vv{{X(0), Vector1(0.0)}, // equal for several values of {x0,x1}.
{X(1), Vector1(1.0)}, for (VectorValues vv :
{Z(0), Vector1(0.5)}, {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
{Z(1), Vector1(2.2)}}; VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), vv.insert(given); // add measurements for HBN
hv1(vv, DiscreteValues{{M(1), 1}}); HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
{
VectorValues vv{{X(0), Vector1(0.5)},
{X(1), Vector1(3.0)},
{Z(0), Vector1(0.5)},
{Z(1), Vector1(2.2)}};
HybridValues hv0(vv, DiscreteValues{{M(1), 0}}),
hv1(vv, DiscreteValues{{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9); gfg.error(hv1) / hbn.error(hv1), 1e-9);
} }
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Since we have a measurement on z2, we get a definite result // Values taken from an importance sampling run with 100k samples:
DiscreteConditional expected(m1, "0.44744586/0.55255414"); // approximateDiscreteMarginal(hbn, hybridMotionModel, given);
// regression DiscreteConditional expected(m1, "48.3158/51.6842");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
} }
{
// Add a different measurement z1 on x1 that favors m==0
const Vector1 z1(1.1);
given.insert_or_assign(Z(1), z1);
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Values taken from an importance sampling run with 100k samples:
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
DiscreteConditional expected(m1, "55.396/44.604");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
}
}
/* ************************************************************************* */
/**
* Test a model p(z0|x0)p(x1|x0,m1)p(z1|x1)p(m1).
*
* p(x1|x0,m1) has the same means but different covariances.
*
* Converting to a factor graph gives us
* ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)p(m1)
*
* If we only have a measurement on z0, then
* the p(m1) should be 0.5/0.5.
* Getting a measurement on z1 gives use more information.
*/
TEST(HybridGaussianFactor, TwoStateModel3) {
using namespace test_two_state_estimation;
double mu = 1.0;
double sigma0 = 0.5, sigma1 = 2.0;
auto hybridMotionModel = CreateHybridMotionModel(mu, mu, sigma0, sigma1);
// Start with no measurement on x1, only on x0
const Vector1 z0(0.5);
VectorValues given;
given.insert(Z(0), z0);
{
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
// Check that ratio of Bayes net and factor graph for different modes is
// equal for several values of {x0,x1}.
for (VectorValues vv :
{VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
vv.insert(given); // add measurements for HBN
HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Importance sampling run with 100k samples gives 50.095/49.905
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
// Since no measurement on x1, we a 50/50 probability
auto p_m = bn->at(2)->asDiscrete();
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9);
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9);
}
{
// Now we add a measurement z1 on x1
const Vector1 z1(4.0); // favors m==1
given.insert(Z(1), z1);
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
// Check that ratio of Bayes net and factor graph for different modes is
// equal for several values of {x0,x1}.
for (VectorValues vv :
{VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
vv.insert(given); // add measurements for HBN
HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Values taken from an importance sampling run with 100k samples:
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
DiscreteConditional expected(m1, "51.7762/48.2238");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
}
{
// Add a different measurement z1 on x1 that favors m==1
const Vector1 z1(7.0);
given.insert_or_assign(Z(1), z1);
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Values taken from an importance sampling run with 100k samples:
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
DiscreteConditional expected(m1, "49.0762/50.9238");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.005));
}
}
/* ************************************************************************* */
/**
* Same model, P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1), but now with very informative
* measurements and vastly different motion model: either stand still or move
* far. This yields a very informative posterior.
*/
TEST(HybridGaussianFactor, TwoStateModel4) {
using namespace test_two_state_estimation;
double mu0 = 0.0, mu1 = 10.0;
double sigma0 = 0.2, sigma1 = 5.0;
auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1);
// We only check the 2-measurement case
const Vector1 z0(0.0), z1(10.0);
VectorValues given{{Z(0), z0}, {Z(1), z1}};
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Values taken from an importance sampling run with 100k samples:
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
DiscreteConditional expected(m1, "8.91527/91.0847");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
} }
/** /**

View File

@ -21,12 +21,12 @@
#include <gtsam/discrete/DecisionTreeFactor.h> #include <gtsam/discrete/DecisionTreeFactor.h>
#include <gtsam/discrete/DiscreteKey.h> #include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/discrete/DiscreteValues.h> #include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridBayesNet.h> #include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridBayesTree.h> #include <gtsam/hybrid/HybridBayesTree.h>
#include <gtsam/hybrid/HybridConditional.h> #include <gtsam/hybrid/HybridConditional.h>
#include <gtsam/hybrid/HybridFactor.h> #include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h> #include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridGaussianISAM.h> #include <gtsam/hybrid/HybridGaussianISAM.h>
#include <gtsam/hybrid/HybridValues.h> #include <gtsam/hybrid/HybridValues.h>
@ -71,13 +71,14 @@ TEST(HybridGaussianFactorGraph, Creation) {
// Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor // Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor
// graph // graph
GaussianMixture gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), HybridGaussianConditional gm(
GaussianMixture::Conditionals( {X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}),
M(0), HybridGaussianConditional::Conditionals(
std::make_shared<GaussianConditional>( M(0),
X(0), Z_3x1, I_3x3, X(1), I_3x3), std::make_shared<GaussianConditional>(X(0), Z_3x1, I_3x3, X(1),
std::make_shared<GaussianConditional>( I_3x3),
X(0), Vector3::Ones(), I_3x3, X(1), I_3x3))); std::make_shared<GaussianConditional>(X(0), Vector3::Ones(), I_3x3,
X(1), I_3x3)));
hfg.add(gm); hfg.add(gm);
EXPECT_LONGS_EQUAL(2, hfg.size()); EXPECT_LONGS_EQUAL(2, hfg.size());
@ -126,10 +127,10 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
// Add a gaussian mixture factor ϕ(x1, c1) // Add a gaussian mixture factor ϕ(x1, c1)
DiscreteKey m1(M(1), 2); DiscreteKey m1(M(1), 2);
DecisionTree<Key, GaussianFactor::shared_ptr> dt( DecisionTree<Key, GaussianFactorValuePair> dt(
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), M(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())); {std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0});
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt)); hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt));
auto result = hfg.eliminateSequential(); auto result = hfg.eliminateSequential();
@ -152,10 +153,10 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
// Add factor between x0 and x1 // Add factor between x0 and x1
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
std::vector<GaussianFactor::shared_ptr> factors = { std::vector<GaussianFactorValuePair> factors = {
std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())}; {std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0}};
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, factors)); hfg.add(HybridGaussianFactor({X(1)}, {m1}, factors));
// Discrete probability table for c1 // Discrete probability table for c1
hfg.add(DecisionTreeFactor(m1, {2, 8})); hfg.add(DecisionTreeFactor(m1, {2, 8}));
@ -177,10 +178,10 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
hfg.add(GaussianMixtureFactor( std::vector<GaussianFactorValuePair> factors = {
{X(1)}, {{M(1), 2}}, {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
{std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0}};
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())})); hfg.add(HybridGaussianFactor({X(1)}, {{M(1), 2}}, factors));
hfg.add(DecisionTreeFactor(m1, {2, 8})); hfg.add(DecisionTreeFactor(m1, {2, 8}));
// TODO(Varun) Adding extra discrete variable not connected to continuous // TODO(Varun) Adding extra discrete variable not connected to continuous
@ -207,12 +208,12 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) {
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
// Decision tree with different modes on x1 // Decision tree with different modes on x1
DecisionTree<Key, GaussianFactor::shared_ptr> dt( DecisionTree<Key, GaussianFactorValuePair> dt(
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), M(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())); {std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0});
// Hybrid factor P(x1|c1) // Hybrid factor P(x1|c1)
hfg.add(GaussianMixtureFactor({X(1)}, {m}, dt)); hfg.add(HybridGaussianFactor({X(1)}, {m}, dt));
// Prior factor on c1 // Prior factor on c1
hfg.add(DecisionTreeFactor(m, {2, 8})); hfg.add(DecisionTreeFactor(m, {2, 8}));
@ -237,16 +238,16 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1));
{ {
hfg.add(GaussianMixtureFactor( std::vector<GaussianFactorValuePair> factors = {
{X(0)}, {{M(0), 2}}, {std::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1), 0.0},
{std::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1), {std::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones()), 0.0}};
std::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones())})); hfg.add(HybridGaussianFactor({X(0)}, {{M(0), 2}}, factors));
DecisionTree<Key, GaussianFactor::shared_ptr> dt1( DecisionTree<Key, GaussianFactorValuePair> dt1(
M(1), std::make_shared<JacobianFactor>(X(2), I_3x3, Z_3x1), M(1), {std::make_shared<JacobianFactor>(X(2), I_3x3, Z_3x1), 0.0},
std::make_shared<JacobianFactor>(X(2), I_3x3, Vector3::Ones())); {std::make_shared<JacobianFactor>(X(2), I_3x3, Vector3::Ones()), 0.0});
hfg.add(GaussianMixtureFactor({X(2)}, {{M(1), 2}}, dt1)); hfg.add(HybridGaussianFactor({X(2)}, {{M(1), 2}}, dt1));
} }
hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
@ -255,17 +256,17 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1));
{ {
DecisionTree<Key, GaussianFactor::shared_ptr> dt( DecisionTree<Key, GaussianFactorValuePair> dt(
M(3), std::make_shared<JacobianFactor>(X(3), I_3x3, Z_3x1), M(3), {std::make_shared<JacobianFactor>(X(3), I_3x3, Z_3x1), 0.0},
std::make_shared<JacobianFactor>(X(3), I_3x3, Vector3::Ones())); {std::make_shared<JacobianFactor>(X(3), I_3x3, Vector3::Ones()), 0.0});
hfg.add(GaussianMixtureFactor({X(3)}, {{M(3), 2}}, dt)); hfg.add(HybridGaussianFactor({X(3)}, {{M(3), 2}}, dt));
DecisionTree<Key, GaussianFactor::shared_ptr> dt1( DecisionTree<Key, GaussianFactorValuePair> dt1(
M(2), std::make_shared<JacobianFactor>(X(5), I_3x3, Z_3x1), M(2), {std::make_shared<JacobianFactor>(X(5), I_3x3, Z_3x1), 0.0},
std::make_shared<JacobianFactor>(X(5), I_3x3, Vector3::Ones())); {std::make_shared<JacobianFactor>(X(5), I_3x3, Vector3::Ones()), 0.0});
hfg.add(GaussianMixtureFactor({X(5)}, {{M(2), 2}}, dt1)); hfg.add(HybridGaussianFactor({X(5)}, {{M(2), 2}}, dt1));
} }
auto ordering_full = auto ordering_full =
@ -551,11 +552,11 @@ TEST(HybridGaussianFactorGraph, optimize) {
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
DecisionTree<Key, GaussianFactor::shared_ptr> dt( DecisionTree<Key, GaussianFactorValuePair> dt(
C(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), C(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())); {std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0});
hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); hfg.add(HybridGaussianFactor({X(1)}, {c1}, dt));
auto result = hfg.eliminateSequential(); auto result = hfg.eliminateSequential();
@ -681,8 +682,8 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) {
x0, -I_1x1, model0), x0, -I_1x1, model0),
c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1, c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
x0, -I_1x1, model1); x0, -I_1x1, model1);
hbn.emplace_shared<GaussianMixture>(KeyVector{f01}, KeyVector{x0, x1}, hbn.emplace_shared<HybridGaussianConditional>(
DiscreteKeys{m1}, std::vector{c0, c1}); KeyVector{f01}, KeyVector{x0, x1}, DiscreteKeys{m1}, std::vector{c0, c1});
// Discrete uniform prior. // Discrete uniform prior.
hbn.emplace_shared<DiscreteConditional>(m1, "0.5/0.5"); hbn.emplace_shared<DiscreteConditional>(m1, "0.5/0.5");
@ -717,7 +718,7 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) {
// Create expected decision tree with two factor graphs: // Create expected decision tree with two factor graphs:
// Get mixture factor: // Get mixture factor:
auto mixture = fg.at<GaussianMixtureFactor>(0); auto mixture = fg.at<HybridGaussianFactor>(0);
CHECK(mixture); CHECK(mixture);
// Get prior factor: // Get prior factor:
@ -805,7 +806,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) {
X(0), Vector1(14.1421), I_1x1 * 2.82843), X(0), Vector1(14.1421), I_1x1 * 2.82843),
conditional1 = std::make_shared<GaussianConditional>( conditional1 = std::make_shared<GaussianConditional>(
X(0), Vector1(10.1379), I_1x1 * 2.02759); X(0), Vector1(10.1379), I_1x1 * 2.02759);
expectedBayesNet.emplace_shared<GaussianMixture>( expectedBayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
std::vector{conditional0, conditional1}); std::vector{conditional0, conditional1});
@ -830,7 +831,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
HybridBayesNet bn; HybridBayesNet bn;
// Create Gaussian mixture z_0 = x0 + noise for each measurement. // Create Gaussian mixture z_0 = x0 + noise for each measurement.
auto gm = std::make_shared<GaussianMixture>( auto gm = std::make_shared<HybridGaussianConditional>(
KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode}, KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode},
std::vector{ std::vector{
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
@ -862,7 +863,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
X(0), Vector1(10.1379), I_1x1 * 2.02759), X(0), Vector1(10.1379), I_1x1 * 2.02759),
conditional1 = std::make_shared<GaussianConditional>( conditional1 = std::make_shared<GaussianConditional>(
X(0), Vector1(14.1421), I_1x1 * 2.82843); X(0), Vector1(14.1421), I_1x1 * 2.82843);
expectedBayesNet.emplace_shared<GaussianMixture>( expectedBayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
std::vector{conditional0, conditional1}); std::vector{conditional0, conditional1});
@ -899,7 +900,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) {
X(0), Vector1(17.3205), I_1x1 * 3.4641), X(0), Vector1(17.3205), I_1x1 * 3.4641),
conditional1 = std::make_shared<GaussianConditional>( conditional1 = std::make_shared<GaussianConditional>(
X(0), Vector1(10.274), I_1x1 * 2.0548); X(0), Vector1(10.274), I_1x1 * 2.0548);
expectedBayesNet.emplace_shared<GaussianMixture>( expectedBayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
std::vector{conditional0, conditional1}); std::vector{conditional0, conditional1});
@ -946,7 +947,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
for (size_t t : {0, 1, 2}) { for (size_t t : {0, 1, 2}) {
// Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t): // Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t):
const auto noise_mode_t = DiscreteKey{N(t), 2}; const auto noise_mode_t = DiscreteKey{N(t), 2};
bn.emplace_shared<GaussianMixture>( bn.emplace_shared<HybridGaussianConditional>(
KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t}, KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t},
std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t),
Z_1x1, 0.5), Z_1x1, 0.5),
@ -961,7 +962,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
for (size_t t : {2, 1}) { for (size_t t : {2, 1}) {
// Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1): // Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1):
const auto motion_model_t = DiscreteKey{M(t), 2}; const auto motion_model_t = DiscreteKey{M(t), 2};
auto gm = std::make_shared<GaussianMixture>( auto gm = std::make_shared<HybridGaussianConditional>(
KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t}, KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t},
std::vector{GaussianConditional::sharedMeanAndStddev( std::vector{GaussianConditional::sharedMeanAndStddev(
X(t), I_1x1, X(t - 1), Z_1x1, 0.2), X(t), I_1x1, X(t - 1), Z_1x1, 0.2),

View File

@ -126,31 +126,34 @@ TEST(HybridGaussianElimination, IncrementalInference) {
/********************************************************/ /********************************************************/
// Run batch elimination so we can compare results. // Run batch elimination so we can compare results.
const Ordering ordering {X(0), X(1), X(2)}; const Ordering ordering{X(0), X(1), X(2)};
// Now we calculate the expected factors using full elimination // Now we calculate the expected factors using full elimination
const auto [expectedHybridBayesTree, expectedRemainingGraph] = const auto [expectedHybridBayesTree, expectedRemainingGraph] =
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
// The densities on X(0) should be the same // The densities on X(0) should be the same
auto x0_conditional = auto x0_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
dynamic_pointer_cast<GaussianMixture>(isam[X(0)]->conditional()->inner()); isam[X(0)]->conditional()->inner());
auto expected_x0_conditional = dynamic_pointer_cast<GaussianMixture>( auto expected_x0_conditional =
(*expectedHybridBayesTree)[X(0)]->conditional()->inner()); dynamic_pointer_cast<HybridGaussianConditional>(
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional)); EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional));
// The densities on X(1) should be the same // The densities on X(1) should be the same
auto x1_conditional = auto x1_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
dynamic_pointer_cast<GaussianMixture>(isam[X(1)]->conditional()->inner()); isam[X(1)]->conditional()->inner());
auto expected_x1_conditional = dynamic_pointer_cast<GaussianMixture>( auto expected_x1_conditional =
(*expectedHybridBayesTree)[X(1)]->conditional()->inner()); dynamic_pointer_cast<HybridGaussianConditional>(
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional)); EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional));
// The densities on X(2) should be the same // The densities on X(2) should be the same
auto x2_conditional = auto x2_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
dynamic_pointer_cast<GaussianMixture>(isam[X(2)]->conditional()->inner()); isam[X(2)]->conditional()->inner());
auto expected_x2_conditional = dynamic_pointer_cast<GaussianMixture>( auto expected_x2_conditional =
(*expectedHybridBayesTree)[X(2)]->conditional()->inner()); dynamic_pointer_cast<HybridGaussianConditional>(
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional)); EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
// We only perform manual continuous elimination for 0,0. // We only perform manual continuous elimination for 0,0.
@ -279,9 +282,9 @@ TEST(HybridGaussianElimination, Approx_inference) {
// Check that the hybrid nodes of the bayes net match those of the pre-pruning // Check that the hybrid nodes of the bayes net match those of the pre-pruning
// bayes net, at the same positions. // bayes net, at the same positions.
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>( auto &unprunedLastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
unprunedHybridBayesTree->clique(X(3))->conditional()->inner()); unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>( auto &lastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
incrementalHybrid[X(3)]->conditional()->inner()); incrementalHybrid[X(3)]->conditional()->inner());
std::vector<std::pair<DiscreteValues, double>> assignments = std::vector<std::pair<DiscreteValues, double>> assignments =
@ -381,11 +384,11 @@ TEST(HybridGaussianISAM, NonTrivial) {
// Add connecting poses similar to PoseFactors in GTD // Add connecting poses similar to PoseFactors in GTD
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), Y(0), Pose2(0, 1.0, 0), fg.emplace_shared<BetweenFactor<Pose2>>(X(0), Y(0), Pose2(0, 1.0, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(0), Z(0), Pose2(0, 1.0, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Y(0), Z(0), Pose2(0, 1.0, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(0), W(0), Pose2(0, 1.0, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Z(0), W(0), Pose2(0, 1.0, 0),
poseNoise); poseNoise);
// Create initial estimate // Create initial estimate
Values initial; Values initial;
@ -414,24 +417,25 @@ TEST(HybridGaussianISAM, NonTrivial) {
KeyVector contKeys = {W(0), W(1)}; KeyVector contKeys = {W(0), W(1)};
auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0);
auto still = std::make_shared<PlanarMotionModel>(W(0), W(1), Pose2(0, 0, 0), auto still = std::make_shared<PlanarMotionModel>(W(0), W(1), Pose2(0, 0, 0),
noise_model), noise_model),
moving = std::make_shared<PlanarMotionModel>(W(0), W(1), odometry, moving = std::make_shared<PlanarMotionModel>(W(0), W(1), odometry,
noise_model); noise_model);
std::vector<PlanarMotionModel::shared_ptr> components = {moving, still}; std::vector<std::pair<PlanarMotionModel::shared_ptr, double>> components = {
auto mixtureFactor = std::make_shared<MixtureFactor>( {moving, 0.0}, {still, 0.0}};
auto mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
fg.push_back(mixtureFactor); fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor // Add equivalent of ImuFactor
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), X(1), Pose2(1.0, 0.0, 0), fg.emplace_shared<BetweenFactor<Pose2>>(X(0), X(1), Pose2(1.0, 0.0, 0),
poseNoise); poseNoise);
// PoseFactors-like at k=1 // PoseFactors-like at k=1
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), Y(1), Pose2(0, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(X(1), Y(1), Pose2(0, 1, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(1), Z(1), Pose2(0, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Y(1), Z(1), Pose2(0, 1, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(1), W(1), Pose2(-1, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Z(1), W(1), Pose2(-1, 1, 0),
poseNoise); poseNoise);
initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); initial.insert(X(1), Pose2(1.0, 0.0, 0.0));
initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); initial.insert(Y(1), Pose2(1.0, 1.0, 0.0));
@ -454,24 +458,24 @@ TEST(HybridGaussianISAM, NonTrivial) {
// Add odometry factor with discrete modes. // Add odometry factor with discrete modes.
contKeys = {W(1), W(2)}; contKeys = {W(1), W(2)};
still = std::make_shared<PlanarMotionModel>(W(1), W(2), Pose2(0, 0, 0), still = std::make_shared<PlanarMotionModel>(W(1), W(2), Pose2(0, 0, 0),
noise_model); noise_model);
moving = moving =
std::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model); std::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model);
components = {moving, still}; components = {{moving, 0.0}, {still, 0.0}};
mixtureFactor = std::make_shared<MixtureFactor>( mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
fg.push_back(mixtureFactor); fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor // Add equivalent of ImuFactor
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), X(2), Pose2(1.0, 0.0, 0), fg.emplace_shared<BetweenFactor<Pose2>>(X(1), X(2), Pose2(1.0, 0.0, 0),
poseNoise); poseNoise);
// PoseFactors-like at k=1 // PoseFactors-like at k=1
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), Y(2), Pose2(0, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(X(2), Y(2), Pose2(0, 1, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(2), Z(2), Pose2(0, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Y(2), Z(2), Pose2(0, 1, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(2), W(2), Pose2(-2, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Z(2), W(2), Pose2(-2, 1, 0),
poseNoise); poseNoise);
initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); initial.insert(X(2), Pose2(2.0, 0.0, 0.0));
initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); initial.insert(Y(2), Pose2(2.0, 1.0, 0.0));
@ -497,24 +501,24 @@ TEST(HybridGaussianISAM, NonTrivial) {
// Add odometry factor with discrete modes. // Add odometry factor with discrete modes.
contKeys = {W(2), W(3)}; contKeys = {W(2), W(3)};
still = std::make_shared<PlanarMotionModel>(W(2), W(3), Pose2(0, 0, 0), still = std::make_shared<PlanarMotionModel>(W(2), W(3), Pose2(0, 0, 0),
noise_model); noise_model);
moving = moving =
std::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model); std::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model);
components = {moving, still}; components = {{moving, 0.0}, {still, 0.0}};
mixtureFactor = std::make_shared<MixtureFactor>( mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
fg.push_back(mixtureFactor); fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor // Add equivalent of ImuFactor
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), X(3), Pose2(1.0, 0.0, 0), fg.emplace_shared<BetweenFactor<Pose2>>(X(2), X(3), Pose2(1.0, 0.0, 0),
poseNoise); poseNoise);
// PoseFactors-like at k=3 // PoseFactors-like at k=3
fg.emplace_shared<BetweenFactor<Pose2>>(X(3), Y(3), Pose2(0, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(X(3), Y(3), Pose2(0, 1, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(3), Z(3), Pose2(0, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Y(3), Z(3), Pose2(0, 1, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(3), W(3), Pose2(-3, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Z(3), W(3), Pose2(-3, 1, 0),
poseNoise); poseNoise);
initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); initial.insert(X(3), Pose2(3.0, 0.0, 0.0));
initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); initial.insert(Y(3), Pose2(3.0, 1.0, 0.0));

View File

@ -10,8 +10,8 @@
* -------------------------------------------------------------------------- */ * -------------------------------------------------------------------------- */
/** /**
* @file testMixtureFactor.cpp * @file testHybridNonlinearFactor.cpp
* @brief Unit tests for MixtureFactor * @brief Unit tests for HybridNonlinearFactor
* @author Varun Agrawal * @author Varun Agrawal
* @date October 2022 * @date October 2022
*/ */
@ -20,8 +20,8 @@
#include <gtsam/discrete/DiscreteValues.h> #include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/HybridBayesNet.h> #include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h> #include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h> #include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/inference/Symbol.h> #include <gtsam/inference/Symbol.h>
#include <gtsam/slam/BetweenFactor.h> #include <gtsam/slam/BetweenFactor.h>
@ -36,17 +36,17 @@ using symbol_shorthand::X;
/* ************************************************************************* */ /* ************************************************************************* */
// Check iterators of empty mixture. // Check iterators of empty mixture.
TEST(MixtureFactor, Constructor) { TEST(HybridNonlinearFactor, Constructor) {
MixtureFactor factor; HybridNonlinearFactor factor;
MixtureFactor::const_iterator const_it = factor.begin(); HybridNonlinearFactor::const_iterator const_it = factor.begin();
CHECK(const_it == factor.end()); CHECK(const_it == factor.end());
MixtureFactor::iterator it = factor.begin(); HybridNonlinearFactor::iterator it = factor.begin();
CHECK(it == factor.end()); CHECK(it == factor.end());
} }
/* ************************************************************************* */ /* ************************************************************************* */
// Test .print() output. // Test .print() output.
TEST(MixtureFactor, Printing) { TEST(HybridNonlinearFactor, Printing) {
DiscreteKey m1(1, 2); DiscreteKey m1(1, 2);
double between0 = 0.0; double between0 = 0.0;
double between1 = 1.0; double between1 = 1.0;
@ -58,13 +58,13 @@ TEST(MixtureFactor, Printing) {
std::make_shared<BetweenFactor<double>>(X(1), X(2), between0, model); std::make_shared<BetweenFactor<double>>(X(1), X(2), between0, model);
auto f1 = auto f1 =
std::make_shared<BetweenFactor<double>>(X(1), X(2), between1, model); std::make_shared<BetweenFactor<double>>(X(1), X(2), between1, model);
std::vector<NonlinearFactor::shared_ptr> factors{f0, f1}; std::vector<NonlinearFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); HybridNonlinearFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
std::string expected = std::string expected =
R"(Hybrid [x1 x2; 1] R"(Hybrid [x1 x2; 1]
MixtureFactor HybridNonlinearFactor
Choice(1) Choice(1)
0 Leaf Nonlinear factor on 2 keys 0 Leaf Nonlinear factor on 2 keys
1 Leaf Nonlinear factor on 2 keys 1 Leaf Nonlinear factor on 2 keys
@ -73,7 +73,7 @@ MixtureFactor
} }
/* ************************************************************************* */ /* ************************************************************************* */
static MixtureFactor getMixtureFactor() { static HybridNonlinearFactor getHybridNonlinearFactor() {
DiscreteKey m1(1, 2); DiscreteKey m1(1, 2);
double between0 = 0.0; double between0 = 0.0;
@ -86,15 +86,15 @@ static MixtureFactor getMixtureFactor() {
std::make_shared<BetweenFactor<double>>(X(1), X(2), between0, model); std::make_shared<BetweenFactor<double>>(X(1), X(2), between0, model);
auto f1 = auto f1 =
std::make_shared<BetweenFactor<double>>(X(1), X(2), between1, model); std::make_shared<BetweenFactor<double>>(X(1), X(2), between1, model);
std::vector<NonlinearFactor::shared_ptr> factors{f0, f1}; std::vector<NonlinearFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
return MixtureFactor({X(1), X(2)}, {m1}, factors); return HybridNonlinearFactor({X(1), X(2)}, {m1}, factors);
} }
/* ************************************************************************* */ /* ************************************************************************* */
// Test the error of the MixtureFactor // Test the error of the HybridNonlinearFactor
TEST(MixtureFactor, Error) { TEST(HybridNonlinearFactor, Error) {
auto mixtureFactor = getMixtureFactor(); auto mixtureFactor = getHybridNonlinearFactor();
Values continuousValues; Values continuousValues;
continuousValues.insert<double>(X(1), 0); continuousValues.insert<double>(X(1), 0);
@ -112,9 +112,9 @@ TEST(MixtureFactor, Error) {
} }
/* ************************************************************************* */ /* ************************************************************************* */
// Test dim of the MixtureFactor // Test dim of the HybridNonlinearFactor
TEST(MixtureFactor, Dim) { TEST(HybridNonlinearFactor, Dim) {
auto mixtureFactor = getMixtureFactor(); auto mixtureFactor = getHybridNonlinearFactor();
EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); EXPECT_LONGS_EQUAL(1, mixtureFactor.dim());
} }

View File

@ -23,10 +23,11 @@
#include <gtsam/geometry/Pose2.h> #include <gtsam/geometry/Pose2.h>
#include <gtsam/hybrid/HybridEliminationTree.h> #include <gtsam/hybrid/HybridEliminationTree.h>
#include <gtsam/hybrid/HybridFactor.h> #include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h> #include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/linear/GaussianBayesNet.h> #include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianFactorGraph.h> #include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/NoiseModel.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h> #include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/PriorFactor.h> #include <gtsam/nonlinear/PriorFactor.h>
#include <gtsam/sam/BearingRangeFactor.h> #include <gtsam/sam/BearingRangeFactor.h>
@ -105,7 +106,7 @@ TEST(HybridNonlinearFactorGraph, Resize) {
auto discreteFactor = std::make_shared<DecisionTreeFactor>(); auto discreteFactor = std::make_shared<DecisionTreeFactor>();
fg.push_back(discreteFactor); fg.push_back(discreteFactor);
auto dcFactor = std::make_shared<MixtureFactor>(); auto dcFactor = std::make_shared<HybridNonlinearFactor>();
fg.push_back(dcFactor); fg.push_back(dcFactor);
EXPECT_LONGS_EQUAL(fg.size(), 3); EXPECT_LONGS_EQUAL(fg.size(), 3);
@ -131,8 +132,9 @@ TEST(HybridGaussianFactorGraph, Resize) {
auto still = std::make_shared<MotionModel>(X(0), X(1), 0.0, noise_model), auto still = std::make_shared<MotionModel>(X(0), X(1), 0.0, noise_model),
moving = std::make_shared<MotionModel>(X(0), X(1), 1.0, noise_model); moving = std::make_shared<MotionModel>(X(0), X(1), 1.0, noise_model);
std::vector<MotionModel::shared_ptr> components = {still, moving}; std::vector<std::pair<MotionModel::shared_ptr, double>> components = {
auto dcFactor = std::make_shared<MixtureFactor>( {still, 0.0}, {moving, 0.0}};
auto dcFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
nhfg.push_back(dcFactor); nhfg.push_back(dcFactor);
@ -150,10 +152,10 @@ TEST(HybridGaussianFactorGraph, Resize) {
} }
/*************************************************************************** /***************************************************************************
* Test that the MixtureFactor reports correctly if the number of continuous * Test that the HybridNonlinearFactor reports correctly if the number of
* keys provided do not match the keys in the factors. * continuous keys provided do not match the keys in the factors.
*/ */
TEST(HybridGaussianFactorGraph, MixtureFactor) { TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) {
auto nonlinearFactor = std::make_shared<BetweenFactor<double>>( auto nonlinearFactor = std::make_shared<BetweenFactor<double>>(
X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1)); X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1));
auto discreteFactor = std::make_shared<DecisionTreeFactor>(); auto discreteFactor = std::make_shared<DecisionTreeFactor>();
@ -162,16 +164,17 @@ TEST(HybridGaussianFactorGraph, MixtureFactor) {
auto still = std::make_shared<MotionModel>(X(0), X(1), 0.0, noise_model), auto still = std::make_shared<MotionModel>(X(0), X(1), 0.0, noise_model),
moving = std::make_shared<MotionModel>(X(0), X(1), 1.0, noise_model); moving = std::make_shared<MotionModel>(X(0), X(1), 1.0, noise_model);
std::vector<MotionModel::shared_ptr> components = {still, moving}; std::vector<std::pair<MotionModel::shared_ptr, double>> components = {
{still, 0.0}, {moving, 0.0}};
// Check for exception when number of continuous keys are under-specified. // Check for exception when number of continuous keys are under-specified.
KeyVector contKeys = {X(0)}; KeyVector contKeys = {X(0)};
THROWS_EXCEPTION(std::make_shared<MixtureFactor>( THROWS_EXCEPTION(std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components)); contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components));
// Check for exception when number of continuous keys are too many. // Check for exception when number of continuous keys are too many.
contKeys = {X(0), X(1), X(2)}; contKeys = {X(0), X(1), X(2)};
THROWS_EXCEPTION(std::make_shared<MixtureFactor>( THROWS_EXCEPTION(std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components)); contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components));
} }
@ -195,7 +198,7 @@ TEST(HybridFactorGraph, PushBack) {
fg = HybridNonlinearFactorGraph(); fg = HybridNonlinearFactorGraph();
auto dcFactor = std::make_shared<MixtureFactor>(); auto dcFactor = std::make_shared<HybridNonlinearFactor>();
fg.push_back(dcFactor); fg.push_back(dcFactor);
EXPECT_LONGS_EQUAL(fg.size(), 1); EXPECT_LONGS_EQUAL(fg.size(), 1);
@ -350,7 +353,8 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
EliminateHybrid(factors, ordering); EliminateHybrid(factors, ordering);
auto gaussianConditionalMixture = auto gaussianConditionalMixture =
dynamic_pointer_cast<GaussianMixture>(hybridConditionalMixture->inner()); dynamic_pointer_cast<HybridGaussianConditional>(
hybridConditionalMixture->inner());
CHECK(gaussianConditionalMixture); CHECK(gaussianConditionalMixture);
// Frontals = [x0, x1] // Frontals = [x0, x1]
@ -413,7 +417,8 @@ TEST(HybridFactorGraph, PrintErrors) {
// fg.print(); // fg.print();
// std::cout << "\n\n\n" << std::endl; // std::cout << "\n\n\n" << std::endl;
// fg.printErrors( // fg.printErrors(
// HybridValues(hv.continuous(), DiscreteValues(), self.linearizationPoint)); // HybridValues(hv.continuous(), DiscreteValues(),
// self.linearizationPoint));
} }
/**************************************************************************** /****************************************************************************
@ -438,7 +443,7 @@ TEST(HybridFactorGraph, Full_Elimination) {
DiscreteFactorGraph discrete_fg; DiscreteFactorGraph discrete_fg;
// TODO(Varun) Make this a function of HybridGaussianFactorGraph? // TODO(Varun) Make this a function of HybridGaussianFactorGraph?
for (auto& factor : (*remainingFactorGraph_partial)) { for (auto &factor : (*remainingFactorGraph_partial)) {
auto df = dynamic_pointer_cast<DiscreteFactor>(factor); auto df = dynamic_pointer_cast<DiscreteFactor>(factor);
assert(df); assert(df);
discrete_fg.push_back(df); discrete_fg.push_back(df);
@ -510,7 +515,7 @@ factor 0:
b = [ -10 ] b = [ -10 ]
No noise model No noise model
factor 1: factor 1:
GaussianMixtureFactor HybridGaussianFactor
Hybrid [x0 x1; m0]{ Hybrid [x0 x1; m0]{
Choice(m0) Choice(m0)
0 Leaf : 0 Leaf :
@ -535,7 +540,7 @@ Hybrid [x0 x1; m0]{
} }
factor 2: factor 2:
GaussianMixtureFactor HybridGaussianFactor
Hybrid [x1 x2; m1]{ Hybrid [x1 x2; m1]{
Choice(m1) Choice(m1)
0 Leaf : 0 Leaf :
@ -799,8 +804,9 @@ TEST(HybridFactorGraph, DefaultDecisionTree) {
noise_model), noise_model),
moving = std::make_shared<PlanarMotionModel>(X(0), X(1), odometry, moving = std::make_shared<PlanarMotionModel>(X(0), X(1), odometry,
noise_model); noise_model);
std::vector<PlanarMotionModel::shared_ptr> motion_models = {still, moving}; std::vector<std::pair<PlanarMotionModel::shared_ptr, double>> motion_models =
fg.emplace_shared<MixtureFactor>( {{still, 0.0}, {moving, 0.0}};
fg.emplace_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models); contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models);
// Add Range-Bearing measurements to from X0 to L0 and X1 to L1. // Add Range-Bearing measurements to from X0 to L0 and X1 to L1.
@ -836,9 +842,174 @@ TEST(HybridFactorGraph, DefaultDecisionTree) {
EXPECT_LONGS_EQUAL(1, remainingFactorGraph->size()); EXPECT_LONGS_EQUAL(1, remainingFactorGraph->size());
} }
namespace test_relinearization {
/**
* @brief Create a Factor Graph by directly specifying all
* the factors instead of creating conditionals first.
* This way we can directly provide the likelihoods and
* then perform (re-)linearization.
*
* @param means The means of the GaussianMixtureFactor components.
* @param sigmas The covariances of the GaussianMixtureFactor components.
* @param m1 The discrete key.
* @param x0_measurement A measurement on X0
* @return HybridGaussianFactorGraph
*/
static HybridNonlinearFactorGraph CreateFactorGraph(
const std::vector<double> &means, const std::vector<double> &sigmas,
DiscreteKey &m1, double x0_measurement) {
auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]);
auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]);
auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3);
auto f0 =
std::make_shared<BetweenFactor<double>>(X(0), X(1), means[0], model0);
auto f1 =
std::make_shared<BetweenFactor<double>>(X(0), X(1), means[1], model1);
// Create HybridNonlinearFactor
std::vector<NonlinearFactorValuePair> factors{
{f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}};
HybridNonlinearFactor mixtureFactor({X(0), X(1)}, {m1}, factors);
HybridNonlinearFactorGraph hfg;
hfg.push_back(mixtureFactor);
hfg.push_back(PriorFactor<double>(X(0), x0_measurement, prior_noise));
return hfg;
}
} // namespace test_relinearization
/* ************************************************************************* */ /* ************************************************************************* */
/**
* @brief Test components with differing means but the same covariances.
* The factor graph is
* *-X1-*-X2
* |
* M1
*/
TEST(HybridNonlinearFactorGraph, DifferentMeans) {
using namespace test_relinearization;
DiscreteKey m1(M(1), 2);
Values values;
double x0 = 0.0, x1 = 1.75;
values.insert(X(0), x0);
values.insert(X(1), x1);
std::vector<double> means = {0.0, 2.0}, sigmas = {1e-0, 1e-0};
HybridNonlinearFactorGraph hfg = CreateFactorGraph(means, sigmas, m1, x0);
{
auto bn = hfg.linearize(values)->eliminateSequential();
HybridValues actual = bn->optimize();
HybridValues expected(
VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(-1.75)}},
DiscreteValues{{M(1), 0}});
EXPECT(assert_equal(expected, actual));
DiscreteValues dv0{{M(1), 0}};
VectorValues cont0 = bn->optimize(dv0);
double error0 = bn->error(HybridValues(cont0, dv0));
// TODO(Varun) Perform importance sampling to estimate error?
// regression
EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9);
DiscreteValues dv1{{M(1), 1}};
VectorValues cont1 = bn->optimize(dv1);
double error1 = bn->error(HybridValues(cont1, dv1));
EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9);
}
{
// Add measurement on x1
auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3);
hfg.push_back(PriorFactor<double>(X(1), means[1], prior_noise));
auto bn = hfg.linearize(values)->eliminateSequential();
HybridValues actual = bn->optimize();
HybridValues expected(
VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}},
DiscreteValues{{M(1), 1}});
EXPECT(assert_equal(expected, actual));
{
DiscreteValues dv{{M(1), 0}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9);
}
{
DiscreteValues dv{{M(1), 1}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9);
}
}
}
/* ************************************************************************* */
/**
* @brief Test components with differing covariances but the same means.
* The factor graph is
* *-X1-*-X2
* |
* M1
*/
TEST_DISABLED(HybridNonlinearFactorGraph, DifferentCovariances) {
using namespace test_relinearization;
DiscreteKey m1(M(1), 2);
Values values;
double x0 = 1.0, x1 = 1.0;
values.insert(X(0), x0);
values.insert(X(1), x1);
std::vector<double> means = {0.0, 0.0}, sigmas = {1e2, 1e-2};
// Create FG with HybridNonlinearFactor and prior on X1
HybridNonlinearFactorGraph hfg = CreateFactorGraph(means, sigmas, m1, x0);
// Linearize and eliminate
auto hbn = hfg.linearize(values)->eliminateSequential();
VectorValues cv;
cv.insert(X(0), Vector1(0.0));
cv.insert(X(1), Vector1(0.0));
// Check that the error values at the MLE point μ.
AlgebraicDecisionTree<Key> errorTree = hbn->errorTree(cv);
DiscreteValues dv0{{M(1), 0}};
DiscreteValues dv1{{M(1), 1}};
// regression
EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9);
EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9);
DiscreteConditional expected_m1(m1, "0.5/0.5");
DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete());
EXPECT(assert_equal(expected_m1, actual_m1));
}
/* *************************************************************************
*/
int main() { int main() {
TestResult tr; TestResult tr;
return TestRegistry::runAllTests(tr); return TestRegistry::runAllTests(tr);
} }
/* ************************************************************************* */ /* *************************************************************************
*/

View File

@ -143,7 +143,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
/********************************************************/ /********************************************************/
// Run batch elimination so we can compare results. // Run batch elimination so we can compare results.
const Ordering ordering {X(0), X(1), X(2)}; const Ordering ordering{X(0), X(1), X(2)};
// Now we calculate the actual factors using full elimination // Now we calculate the actual factors using full elimination
const auto [expectedHybridBayesTree, expectedRemainingGraph] = const auto [expectedHybridBayesTree, expectedRemainingGraph] =
@ -151,24 +151,27 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
.BaseEliminateable::eliminatePartialMultifrontal(ordering); .BaseEliminateable::eliminatePartialMultifrontal(ordering);
// The densities on X(1) should be the same // The densities on X(1) should be the same
auto x0_conditional = dynamic_pointer_cast<GaussianMixture>( auto x0_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
bayesTree[X(0)]->conditional()->inner()); bayesTree[X(0)]->conditional()->inner());
auto expected_x0_conditional = dynamic_pointer_cast<GaussianMixture>( auto expected_x0_conditional =
(*expectedHybridBayesTree)[X(0)]->conditional()->inner()); dynamic_pointer_cast<HybridGaussianConditional>(
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional)); EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional));
// The densities on X(1) should be the same // The densities on X(1) should be the same
auto x1_conditional = dynamic_pointer_cast<GaussianMixture>( auto x1_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
bayesTree[X(1)]->conditional()->inner()); bayesTree[X(1)]->conditional()->inner());
auto expected_x1_conditional = dynamic_pointer_cast<GaussianMixture>( auto expected_x1_conditional =
(*expectedHybridBayesTree)[X(1)]->conditional()->inner()); dynamic_pointer_cast<HybridGaussianConditional>(
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional)); EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional));
// The densities on X(2) should be the same // The densities on X(2) should be the same
auto x2_conditional = dynamic_pointer_cast<GaussianMixture>( auto x2_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
bayesTree[X(2)]->conditional()->inner()); bayesTree[X(2)]->conditional()->inner());
auto expected_x2_conditional = dynamic_pointer_cast<GaussianMixture>( auto expected_x2_conditional =
(*expectedHybridBayesTree)[X(2)]->conditional()->inner()); dynamic_pointer_cast<HybridGaussianConditional>(
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional)); EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
// We only perform manual continuous elimination for 0,0. // We only perform manual continuous elimination for 0,0.
@ -300,9 +303,9 @@ TEST(HybridNonlinearISAM, Approx_inference) {
// Check that the hybrid nodes of the bayes net match those of the pre-pruning // Check that the hybrid nodes of the bayes net match those of the pre-pruning
// bayes net, at the same positions. // bayes net, at the same positions.
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>( auto &unprunedLastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
unprunedHybridBayesTree->clique(X(3))->conditional()->inner()); unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>( auto &lastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
bayesTree[X(3)]->conditional()->inner()); bayesTree[X(3)]->conditional()->inner());
std::vector<std::pair<DiscreteValues, double>> assignments = std::vector<std::pair<DiscreteValues, double>> assignments =
@ -410,11 +413,11 @@ TEST(HybridNonlinearISAM, NonTrivial) {
// Add connecting poses similar to PoseFactors in GTD // Add connecting poses similar to PoseFactors in GTD
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), Y(0), Pose2(0, 1.0, 0), fg.emplace_shared<BetweenFactor<Pose2>>(X(0), Y(0), Pose2(0, 1.0, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(0), Z(0), Pose2(0, 1.0, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Y(0), Z(0), Pose2(0, 1.0, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(0), W(0), Pose2(0, 1.0, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Z(0), W(0), Pose2(0, 1.0, 0),
poseNoise); poseNoise);
// Create initial estimate // Create initial estimate
Values initial; Values initial;
@ -433,24 +436,25 @@ TEST(HybridNonlinearISAM, NonTrivial) {
KeyVector contKeys = {W(0), W(1)}; KeyVector contKeys = {W(0), W(1)};
auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0);
auto still = std::make_shared<PlanarMotionModel>(W(0), W(1), Pose2(0, 0, 0), auto still = std::make_shared<PlanarMotionModel>(W(0), W(1), Pose2(0, 0, 0),
noise_model), noise_model),
moving = std::make_shared<PlanarMotionModel>(W(0), W(1), odometry, moving = std::make_shared<PlanarMotionModel>(W(0), W(1), odometry,
noise_model); noise_model);
std::vector<PlanarMotionModel::shared_ptr> components = {moving, still}; std::vector<std::pair<PlanarMotionModel::shared_ptr, double>> components = {
auto mixtureFactor = std::make_shared<MixtureFactor>( {moving, 0.0}, {still, 0.0}};
auto mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
fg.push_back(mixtureFactor); fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor // Add equivalent of ImuFactor
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), X(1), Pose2(1.0, 0.0, 0), fg.emplace_shared<BetweenFactor<Pose2>>(X(0), X(1), Pose2(1.0, 0.0, 0),
poseNoise); poseNoise);
// PoseFactors-like at k=1 // PoseFactors-like at k=1
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), Y(1), Pose2(0, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(X(1), Y(1), Pose2(0, 1, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(1), Z(1), Pose2(0, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Y(1), Z(1), Pose2(0, 1, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(1), W(1), Pose2(-1, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Z(1), W(1), Pose2(-1, 1, 0),
poseNoise); poseNoise);
initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); initial.insert(X(1), Pose2(1.0, 0.0, 0.0));
initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); initial.insert(Y(1), Pose2(1.0, 1.0, 0.0));
@ -473,24 +477,24 @@ TEST(HybridNonlinearISAM, NonTrivial) {
// Add odometry factor with discrete modes. // Add odometry factor with discrete modes.
contKeys = {W(1), W(2)}; contKeys = {W(1), W(2)};
still = std::make_shared<PlanarMotionModel>(W(1), W(2), Pose2(0, 0, 0), still = std::make_shared<PlanarMotionModel>(W(1), W(2), Pose2(0, 0, 0),
noise_model); noise_model);
moving = moving =
std::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model); std::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model);
components = {moving, still}; components = {{moving, 0.0}, {still, 0.0}};
mixtureFactor = std::make_shared<MixtureFactor>( mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
fg.push_back(mixtureFactor); fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor // Add equivalent of ImuFactor
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), X(2), Pose2(1.0, 0.0, 0), fg.emplace_shared<BetweenFactor<Pose2>>(X(1), X(2), Pose2(1.0, 0.0, 0),
poseNoise); poseNoise);
// PoseFactors-like at k=1 // PoseFactors-like at k=1
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), Y(2), Pose2(0, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(X(2), Y(2), Pose2(0, 1, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(2), Z(2), Pose2(0, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Y(2), Z(2), Pose2(0, 1, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(2), W(2), Pose2(-2, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Z(2), W(2), Pose2(-2, 1, 0),
poseNoise); poseNoise);
initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); initial.insert(X(2), Pose2(2.0, 0.0, 0.0));
initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); initial.insert(Y(2), Pose2(2.0, 1.0, 0.0));
@ -516,24 +520,24 @@ TEST(HybridNonlinearISAM, NonTrivial) {
// Add odometry factor with discrete modes. // Add odometry factor with discrete modes.
contKeys = {W(2), W(3)}; contKeys = {W(2), W(3)};
still = std::make_shared<PlanarMotionModel>(W(2), W(3), Pose2(0, 0, 0), still = std::make_shared<PlanarMotionModel>(W(2), W(3), Pose2(0, 0, 0),
noise_model); noise_model);
moving = moving =
std::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model); std::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model);
components = {moving, still}; components = {{moving, 0.0}, {still, 0.0}};
mixtureFactor = std::make_shared<MixtureFactor>( mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
fg.push_back(mixtureFactor); fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor // Add equivalent of ImuFactor
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), X(3), Pose2(1.0, 0.0, 0), fg.emplace_shared<BetweenFactor<Pose2>>(X(2), X(3), Pose2(1.0, 0.0, 0),
poseNoise); poseNoise);
// PoseFactors-like at k=3 // PoseFactors-like at k=3
fg.emplace_shared<BetweenFactor<Pose2>>(X(3), Y(3), Pose2(0, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(X(3), Y(3), Pose2(0, 1, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(3), Z(3), Pose2(0, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Y(3), Z(3), Pose2(0, 1, 0),
poseNoise); poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(3), W(3), Pose2(-3, 1, 0), fg.emplace_shared<BetweenFactor<Pose2>>(Z(3), W(3), Pose2(-3, 1, 0),
poseNoise); poseNoise);
initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); initial.insert(X(3), Pose2(3.0, 0.0, 0.0));
initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); initial.insert(Y(3), Pose2(3.0, 1.0, 0.0));

View File

@ -18,11 +18,11 @@
#include <gtsam/base/serializationTestHelpers.h> #include <gtsam/base/serializationTestHelpers.h>
#include <gtsam/discrete/DiscreteConditional.h> #include <gtsam/discrete/DiscreteConditional.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridBayesNet.h> #include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridBayesTree.h> #include <gtsam/hybrid/HybridBayesTree.h>
#include <gtsam/hybrid/HybridConditional.h> #include <gtsam/hybrid/HybridConditional.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/inference/Symbol.h> #include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianConditional.h> #include <gtsam/linear/GaussianConditional.h>
@ -51,29 +51,30 @@ BOOST_CLASS_EXPORT_GUID(ADT, "gtsam_AlgebraicDecisionTree");
BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_AlgebraicDecisionTree_Leaf"); BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_AlgebraicDecisionTree_Leaf");
BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_AlgebraicDecisionTree_Choice") BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_AlgebraicDecisionTree_Choice")
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor, "gtsam_GaussianMixtureFactor"); BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor, "gtsam_HybridGaussianFactor");
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors, BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors,
"gtsam_GaussianMixtureFactor_Factors"); "gtsam_HybridGaussianFactor_Factors");
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors::Leaf, BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Leaf,
"gtsam_GaussianMixtureFactor_Factors_Leaf"); "gtsam_HybridGaussianFactor_Factors_Leaf");
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors::Choice, BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Choice,
"gtsam_GaussianMixtureFactor_Factors_Choice"); "gtsam_HybridGaussianFactor_Factors_Choice");
BOOST_CLASS_EXPORT_GUID(GaussianMixture, "gtsam_GaussianMixture"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional,
BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals, "gtsam_HybridGaussianConditional");
"gtsam_GaussianMixture_Conditionals"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals,
BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals::Leaf, "gtsam_HybridGaussianConditional_Conditionals");
"gtsam_GaussianMixture_Conditionals_Leaf"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Leaf,
BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals::Choice, "gtsam_HybridGaussianConditional_Conditionals_Leaf");
"gtsam_GaussianMixture_Conditionals_Choice"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Choice,
"gtsam_HybridGaussianConditional_Conditionals_Choice");
// Needed since GaussianConditional::FromMeanAndStddev uses it // Needed since GaussianConditional::FromMeanAndStddev uses it
BOOST_CLASS_EXPORT_GUID(noiseModel::Isotropic, "gtsam_noiseModel_Isotropic"); BOOST_CLASS_EXPORT_GUID(noiseModel::Isotropic, "gtsam_noiseModel_Isotropic");
BOOST_CLASS_EXPORT_GUID(HybridBayesNet, "gtsam_HybridBayesNet"); BOOST_CLASS_EXPORT_GUID(HybridBayesNet, "gtsam_HybridBayesNet");
/* ****************************************************************************/ /* ****************************************************************************/
// Test GaussianMixtureFactor serialization. // Test HybridGaussianFactor serialization.
TEST(HybridSerialization, GaussianMixtureFactor) { TEST(HybridSerialization, HybridGaussianFactor) {
KeyVector continuousKeys{X(0)}; KeyVector continuousKeys{X(0)};
DiscreteKeys discreteKeys{{M(0), 2}}; DiscreteKeys discreteKeys{{M(0), 2}};
@ -82,13 +83,13 @@ TEST(HybridSerialization, GaussianMixtureFactor) {
auto b1 = Matrix::Ones(2, 1); auto b1 = Matrix::Ones(2, 1);
auto f0 = std::make_shared<JacobianFactor>(X(0), A, b0); auto f0 = std::make_shared<JacobianFactor>(X(0), A, b0);
auto f1 = std::make_shared<JacobianFactor>(X(0), A, b1); auto f1 = std::make_shared<JacobianFactor>(X(0), A, b1);
std::vector<GaussianFactor::shared_ptr> factors{f0, f1}; std::vector<GaussianFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
const GaussianMixtureFactor factor(continuousKeys, discreteKeys, factors); const HybridGaussianFactor factor(continuousKeys, discreteKeys, factors);
EXPECT(equalsObj<GaussianMixtureFactor>(factor)); EXPECT(equalsObj<HybridGaussianFactor>(factor));
EXPECT(equalsXML<GaussianMixtureFactor>(factor)); EXPECT(equalsXML<HybridGaussianFactor>(factor));
EXPECT(equalsBinary<GaussianMixtureFactor>(factor)); EXPECT(equalsBinary<HybridGaussianFactor>(factor));
} }
/* ****************************************************************************/ /* ****************************************************************************/
@ -106,20 +107,20 @@ TEST(HybridSerialization, HybridConditional) {
} }
/* ****************************************************************************/ /* ****************************************************************************/
// Test GaussianMixture serialization. // Test HybridGaussianConditional serialization.
TEST(HybridSerialization, GaussianMixture) { TEST(HybridSerialization, HybridGaussianConditional) {
const DiscreteKey mode(M(0), 2); const DiscreteKey mode(M(0), 2);
Matrix1 I = Matrix1::Identity(); Matrix1 I = Matrix1::Identity();
const auto conditional0 = std::make_shared<GaussianConditional>( const auto conditional0 = std::make_shared<GaussianConditional>(
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 0.5)); GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 0.5));
const auto conditional1 = std::make_shared<GaussianConditional>( const auto conditional1 = std::make_shared<GaussianConditional>(
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3)); GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3));
const GaussianMixture gm({Z(0)}, {X(0)}, {mode}, const HybridGaussianConditional gm({Z(0)}, {X(0)}, {mode},
{conditional0, conditional1}); {conditional0, conditional1});
EXPECT(equalsObj<GaussianMixture>(gm)); EXPECT(equalsObj<HybridGaussianConditional>(gm));
EXPECT(equalsXML<GaussianMixture>(gm)); EXPECT(equalsXML<HybridGaussianConditional>(gm));
EXPECT(equalsBinary<GaussianMixture>(gm)); EXPECT(equalsBinary<HybridGaussianConditional>(gm));
} }
/* ****************************************************************************/ /* ****************************************************************************/

View File

@ -46,7 +46,7 @@ namespace gtsam {
* Gaussian density over a set of continuous variables. * Gaussian density over a set of continuous variables.
* - \b Discrete conditionals, implemented in \class DiscreteConditional, which * - \b Discrete conditionals, implemented in \class DiscreteConditional, which
* represent a discrete conditional distribution over discrete variables. * represent a discrete conditional distribution over discrete variables.
* - \b Hybrid conditional densities, such as \class GaussianMixture, which is * - \b Hybrid conditional densities, such as \class HybridGaussianConditional, which is
* a density over continuous variables given discrete/continuous parents. * a density over continuous variables given discrete/continuous parents.
* - \b Symbolic factors, used to represent a graph structure, implemented in * - \b Symbolic factors, used to represent a graph structure, implemented in
* \class SymbolicConditional. Only used for symbolic elimination etc. * \class SymbolicConditional. Only used for symbolic elimination etc.

View File

@ -25,6 +25,7 @@
#include <boost/serialization/nvp.hpp> #include <boost/serialization/nvp.hpp>
#endif #endif
#include <memory> #include <memory>
#include <algorithm>
#include <gtsam/base/types.h> #include <gtsam/base/types.h>
#include <gtsam/base/FastVector.h> #include <gtsam/base/FastVector.h>

View File

@ -707,6 +707,22 @@ const RobustModel::shared_ptr &robust, const NoiseModel::shared_ptr noise){
} }
/* ************************************************************************* */ /* ************************************************************************* */
} // namespace noiseModel
/* *******************************************************************************/
double ComputeLogNormalizer(
const noiseModel::Gaussian::shared_ptr& noise_model) {
// Since noise models are Gaussian, we can get the logDeterminant using
// the same trick as in GaussianConditional
double logDetR = noise_model->R()
.diagonal()
.unaryExpr([](double x) { return log(x); })
.sum();
double logDeterminantSigma = -2.0 * logDetR;
size_t n = noise_model->dim();
constexpr double log2pi = 1.8378770664093454835606594728112;
return n * log2pi + logDeterminantSigma;
} }
} // gtsam } // gtsam

View File

@ -751,6 +751,18 @@ namespace gtsam {
template<> struct traits<noiseModel::Isotropic> : public Testable<noiseModel::Isotropic> {}; template<> struct traits<noiseModel::Isotropic> : public Testable<noiseModel::Isotropic> {};
template<> struct traits<noiseModel::Unit> : public Testable<noiseModel::Unit> {}; template<> struct traits<noiseModel::Unit> : public Testable<noiseModel::Unit> {};
/**
* @brief Helper function to compute the sqrt(|2πΣ|) normalizer values
* for a Gaussian noise model.
* We compute this in the log-space for numerical accuracy.
*
* @param noise_model The Gaussian noise model
* whose normalizer we wish to compute.
* @return double
*/
GTSAM_EXPORT double ComputeLogNormalizer(
const noiseModel::Gaussian::shared_ptr& noise_model);
} //\ namespace gtsam } //\ namespace gtsam

View File

@ -18,6 +18,7 @@
* @brief Binary factor for a relative translation direction measurement. * @brief Binary factor for a relative translation direction measurement.
*/ */
#include <gtsam/base/Vector.h>
#include <gtsam/geometry/Point3.h> #include <gtsam/geometry/Point3.h>
#include <gtsam/geometry/Unit3.h> #include <gtsam/geometry/Unit3.h>
#include <gtsam/linear/NoiseModel.h> #include <gtsam/linear/NoiseModel.h>
@ -36,8 +37,6 @@ namespace gtsam {
* normalized(Tb - Ta) - w_aZb.point3() * normalized(Tb - Ta) - w_aZb.point3()
* *
* @ingroup sfm * @ingroup sfm
*
*
*/ */
class TranslationFactor : public NoiseModelFactorN<Point3, Point3> { class TranslationFactor : public NoiseModelFactorN<Point3, Point3> {
private: private:
@ -45,7 +44,6 @@ class TranslationFactor : public NoiseModelFactorN<Point3, Point3> {
Point3 measured_w_aZb_; Point3 measured_w_aZb_;
public: public:
// Provide access to the Matrix& version of evaluateError: // Provide access to the Matrix& version of evaluateError:
using NoiseModelFactor2<Point3, Point3>::evaluateError; using NoiseModelFactor2<Point3, Point3>::evaluateError;
@ -60,20 +58,20 @@ class TranslationFactor : public NoiseModelFactorN<Point3, Point3> {
* @brief Caclulate error: (norm(Tb - Ta) - measurement) * @brief Caclulate error: (norm(Tb - Ta) - measurement)
* where Tb and Ta are Point3 translations and measurement is * where Tb and Ta are Point3 translations and measurement is
* the Unit3 translation direction from a to b. * the Unit3 translation direction from a to b.
* *
* @param Ta translation for key a * @param Ta translation for key a
* @param Tb translation for key b * @param Tb translation for key b
* @param H1 optional jacobian in Ta * @param H1 optional jacobian in Ta
* @param H2 optional jacobian in Tb * @param H2 optional jacobian in Tb
* @return * Vector * @return * Vector
*/ */
Vector evaluateError( Vector evaluateError(const Point3& Ta, const Point3& Tb,
const Point3& Ta, const Point3& Tb, OptionalMatrixType H1,
OptionalMatrixType H1, OptionalMatrixType H2) const override {
OptionalMatrixType H2) const override {
const Point3 dir = Tb - Ta; const Point3 dir = Tb - Ta;
Matrix33 H_predicted_dir; Matrix33 H_predicted_dir;
const Point3 predicted = normalize(dir, H1 || H2 ? &H_predicted_dir : nullptr); const Point3 predicted =
normalize(dir, H1 || H2 ? &H_predicted_dir : nullptr);
if (H1) *H1 = -H_predicted_dir; if (H1) *H1 = -H_predicted_dir;
if (H2) *H2 = H_predicted_dir; if (H2) *H2 = H_predicted_dir;
return predicted - measured_w_aZb_; return predicted - measured_w_aZb_;
@ -89,4 +87,73 @@ class TranslationFactor : public NoiseModelFactorN<Point3, Point3> {
} }
#endif #endif
}; // \ TranslationFactor }; // \ TranslationFactor
/**
* Binary factor for a relative translation direction measurement
* w_aZb. The measurement equation is
* w_aZb = scale * (Tb - Ta)
* i.e., w_aZb is the translation direction from frame A to B, in world
* coordinates.
*
* Instead of normalizing the Tb - Ta vector, we multiply it by a scalar
* which is also jointly optimized. This is inspired by the work on
* BATA (Zhuang et al, CVPR 2018).
*
* @ingroup sfm
*/
class BilinearAngleTranslationFactor
: public NoiseModelFactorN<Point3, Point3, Vector1> {
private:
typedef NoiseModelFactorN<Point3, Point3, Vector1> Base;
Point3 measured_w_aZb_;
public:
/// default constructor
BilinearAngleTranslationFactor() {}
BilinearAngleTranslationFactor(Key a, Key b, Key scale_key,
const Unit3& w_aZb,
const SharedNoiseModel& noiseModel)
: Base(noiseModel, a, b, scale_key), measured_w_aZb_(w_aZb.point3()) {}
// Provide access to the Matrix& version of evaluateError:
using NoiseModelFactor2<Point3, Point3, Vector1>::evaluateError;
/**
* @brief Caclulate error: (scale * (Tb - Ta) - measurement)
* where Tb and Ta are Point3 translations and measurement is
* the Unit3 translation direction from a to b.
*
* @param Ta translation for key a
* @param Tb translation for key b
* @param H1 optional jacobian in Ta
* @param H2 optional jacobian in Tb
* @return * Vector
*/
Vector evaluateError(const Point3& Ta, const Point3& Tb, const Vector1& scale,
OptionalMatrixType H1, OptionalMatrixType H2,
OptionalMatrixType H3) const override {
// Ideally we should use a positive real valued scalar datatype for scale.
const double abs_scale = abs(scale[0]);
const Point3 predicted = (Tb - Ta) * abs_scale;
if (H1) {
*H1 = -Matrix3::Identity() * abs_scale;
}
if (H2) {
*H2 = Matrix3::Identity() * abs_scale;
}
if (H3) {
*H3 = scale[0] >= 0 ? (Tb - Ta) : (Ta - Tb);
}
return predicted - measured_w_aZb_;
}
private:
friend class boost::serialization::access;
template <class ARCHIVE>
void serialize(ARCHIVE& ar, const unsigned int /*version*/) {
ar& boost::serialization::make_nvp(
"Base", boost::serialization::base_object<Base>(*this));
}
}; // \ BilinearAngleTranslationFactor
} // namespace gtsam } // namespace gtsam

View File

@ -101,9 +101,17 @@ NonlinearFactorGraph TranslationRecovery::buildGraph(
NonlinearFactorGraph graph; NonlinearFactorGraph graph;
// Add translation factors for input translation directions. // Add translation factors for input translation directions.
uint64_t i = 0;
for (auto edge : relativeTranslations) { for (auto edge : relativeTranslations) {
graph.emplace_shared<TranslationFactor>(edge.key1(), edge.key2(), if (use_bilinear_translation_factor_) {
edge.measured(), edge.noiseModel()); graph.emplace_shared<BilinearAngleTranslationFactor>(
edge.key1(), edge.key2(), Symbol('S', i), edge.measured(),
edge.noiseModel());
} else {
graph.emplace_shared<TranslationFactor>(
edge.key1(), edge.key2(), edge.measured(), edge.noiseModel());
}
i++;
} }
return graph; return graph;
} }
@ -163,6 +171,12 @@ Values TranslationRecovery::initializeRandomly(
insert(edge.key1()); insert(edge.key1());
insert(edge.key2()); insert(edge.key2());
} }
if (use_bilinear_translation_factor_) {
for (uint64_t i = 0; i < relativeTranslations.size(); i++) {
initial.insert<Vector1>(Symbol('S', i), Vector1(1.0));
}
}
return initial; return initial;
} }

View File

@ -60,14 +60,18 @@ class GTSAM_EXPORT TranslationRecovery {
// Parameters. // Parameters.
LevenbergMarquardtParams lmParams_; LevenbergMarquardtParams lmParams_;
const bool use_bilinear_translation_factor_ = false;
public: public:
/** /**
* @brief Construct a new Translation Recovery object * @brief Construct a new Translation Recovery object
* *
* @param lmParams parameters for optimization. * @param lmParams parameters for optimization.
*/ */
TranslationRecovery(const LevenbergMarquardtParams &lmParams) TranslationRecovery(const LevenbergMarquardtParams &lmParams,
: lmParams_(lmParams) {} bool use_bilinear_translation_factor = false)
: lmParams_(lmParams),
use_bilinear_translation_factor_(use_bilinear_translation_factor) {}
/** /**
* @brief Default constructor. * @brief Default constructor.

View File

@ -23,7 +23,7 @@ virtual class SfmTrack : gtsam::SfmTrack2d {
SfmTrack(); SfmTrack();
SfmTrack(const gtsam::Point3& pt); SfmTrack(const gtsam::Point3& pt);
const Point3& point3() const; const Point3& point3() const;
Point3 p; Point3 p;
double r; double r;
@ -37,8 +37,8 @@ virtual class SfmTrack : gtsam::SfmTrack2d {
bool equals(const gtsam::SfmTrack& expected, double tol) const; bool equals(const gtsam::SfmTrack& expected, double tol) const;
}; };
#include <gtsam/nonlinear/Values.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h> #include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/sfm/SfmData.h> #include <gtsam/sfm/SfmData.h>
class SfmData { class SfmData {
SfmData(); SfmData();
@ -268,6 +268,8 @@ class MFAS {
#include <gtsam/sfm/TranslationRecovery.h> #include <gtsam/sfm/TranslationRecovery.h>
class TranslationRecovery { class TranslationRecovery {
TranslationRecovery(const gtsam::LevenbergMarquardtParams& lmParams,
const bool use_bilinear_translation_factor);
TranslationRecovery(const gtsam::LevenbergMarquardtParams& lmParams); TranslationRecovery(const gtsam::LevenbergMarquardtParams& lmParams);
TranslationRecovery(); // default params. TranslationRecovery(); // default params.
void addPrior(const gtsam::BinaryMeasurementsUnit3& relativeTranslations, void addPrior(const gtsam::BinaryMeasurementsUnit3& relativeTranslations,

View File

@ -30,7 +30,7 @@ using namespace gtsam;
static SharedNoiseModel model(noiseModel::Isotropic::Sigma(3, 0.05)); static SharedNoiseModel model(noiseModel::Isotropic::Sigma(3, 0.05));
// Keys are deliberately *not* in sorted order to test that case. // Keys are deliberately *not* in sorted order to test that case.
static const Key kKey1(2), kKey2(1); static const Key kKey1(2), kKey2(1), kEdgeKey(3);
static const Unit3 kMeasured(1, 0, 0); static const Unit3 kMeasured(1, 0, 0);
/* ************************************************************************* */ /* ************************************************************************* */
@ -99,6 +99,79 @@ TEST(TranslationFactor, Jacobian) {
EXPECT(assert_equal(H2Expected, H2Actual, 1e-9)); EXPECT(assert_equal(H2Expected, H2Actual, 1e-9));
} }
/* ************************************************************************* */
TEST(BilinearAngleTranslationFactor, Constructor) {
BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model);
}
/* ************************************************************************* */
TEST(BilinearAngleTranslationFactor, ZeroError) {
// Create a factor
BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model);
// Set the linearization
Point3 T1(1, 0, 0), T2(2, 0, 0);
Vector1 scale(1.0);
// Use the factor to calculate the error
Vector actualError(factor.evaluateError(T1, T2, scale));
// Verify we get the expected error
Vector expected = (Vector3() << 0, 0, 0).finished();
EXPECT(assert_equal(expected, actualError, 1e-9));
}
/* ************************************************************************* */
TEST(BilinearAngleTranslationFactor, NonZeroError) {
// create a factor
BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model);
// set the linearization
Point3 T1(0, 1, 1), T2(0, 2, 2);
Vector1 scale(1.0 / sqrt(2));
// use the factor to calculate the error
Vector actualError(factor.evaluateError(T1, T2, scale));
// verify we get the expected error
Vector expected = (Vector3() << -1, 1 / sqrt(2), 1 / sqrt(2)).finished();
EXPECT(assert_equal(expected, actualError, 1e-9));
}
/* ************************************************************************* */
Vector bilinearAngleFactorError(const Point3 &T1, const Point3 &T2, const Vector1 &scale,
const BilinearAngleTranslationFactor &factor) {
return factor.evaluateError(T1, T2, scale);
}
TEST(BilinearAngleTranslationFactor, Jacobian) {
// Create a factor
BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model);
// Set the linearization
Point3 T1(1, 0, 0), T2(2, 0, 0);
Vector1 scale(1.0);
// Use the factor to calculate the Jacobians
Matrix H1Actual, H2Actual, H3Actual;
factor.evaluateError(T1, T2, scale, H1Actual, H2Actual, H3Actual);
// Use numerical derivatives to calculate the Jacobians
Matrix H1Expected, H2Expected, H3Expected;
H1Expected = numericalDerivative11<Vector, Point3>(
std::bind(&bilinearAngleFactorError, std::placeholders::_1, T2, scale, factor), T1);
H2Expected = numericalDerivative11<Vector, Point3>(
std::bind(&bilinearAngleFactorError, T1, std::placeholders::_1, scale, factor), T2);
H3Expected = numericalDerivative11<Vector, Vector1>(
std::bind(&bilinearAngleFactorError, T1, T2, std::placeholders::_1, factor), scale);
// Verify the Jacobians are correct
EXPECT(assert_equal(H1Expected, H1Actual, 1e-9));
EXPECT(assert_equal(H2Expected, H2Actual, 1e-9));
EXPECT(assert_equal(H3Expected, H3Actual, 1e-9));
}
/* ************************************************************************* */ /* ************************************************************************* */
int main() { int main() {
TestResult tr; TestResult tr;

View File

@ -171,6 +171,7 @@ file(COPY "${GTSAM_SOURCE_DIR}/examples/Data" DESTINATION "${GTSAM_MODULE_PATH}"
# Add gtsam as a dependency to the install target # Add gtsam as a dependency to the install target
set(GTSAM_PYTHON_DEPENDENCIES ${GTSAM_PYTHON_TARGET}) set(GTSAM_PYTHON_DEPENDENCIES ${GTSAM_PYTHON_TARGET})
set(GTSAM_PYTHON_INSTALL_EXTRA "")
if(GTSAM_UNSTABLE_BUILD_PYTHON) if(GTSAM_UNSTABLE_BUILD_PYTHON)
set(ignore set(ignore
@ -250,6 +251,22 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON)
VERBATIM VERBATIM
) )
endif() endif()
add_custom_target(
python-unstable-stubs
COMMAND
${CMAKE_COMMAND} -E env
"PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}"
pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" --numpy-array-use-type-var gtsam_unstable
DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_UNSTABLE_TARGET}
WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/"
)
if(NOT WIN32)
# Add the stubgen target as a dependency to the install target
list(APPEND GTSAM_PYTHON_INSTALL_EXTRA python-unstable-stubs)
endif()
# Custom make command to run all GTSAM_UNSTABLE Python tests # Custom make command to run all GTSAM_UNSTABLE Python tests
add_custom_target( add_custom_target(
python-test-unstable python-test-unstable
@ -262,26 +279,30 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON)
) )
endif() endif()
add_custom_target(
python-stubs
COMMAND
${CMAKE_COMMAND} -E env
"PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}"
pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" --numpy-array-use-type-var gtsam
DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_TARGET}
WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/"
)
if(NOT WIN32)
# Add the stubgen target as a dependency to the install target
list(APPEND GTSAM_PYTHON_INSTALL_EXTRA python-stubs)
endif()
# Add custom target so we can install with `make python-install` # Add custom target so we can install with `make python-install`
# Note below we make sure to install with --user iff not in a virtualenv # Note below we make sure to install with --user iff not in a virtualenv
set(GTSAM_PYTHON_INSTALL_TARGET python-install) set(GTSAM_PYTHON_INSTALL_TARGET python-install)
#TODO(Varun) Maybe move the long command to script?
# https://stackoverflow.com/questions/49053544/how-do-i-run-a-python-script-every-time-in-a-cmake-build
if (NOT WIN32) # WIN32=1 is target platform is Windows
add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET}
COMMAND stubgen -q -p gtsam && cp -a out/gtsam/ gtsam && ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])"
DEPENDS ${GTSAM_PYTHON_DEPENDENCIES}
WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}
VERBATIM)
else()
#TODO(Varun) Find equivalent cp on Windows
add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET}
COMMAND ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])"
DEPENDS ${GTSAM_PYTHON_DEPENDENCIES}
WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}
VERBATIM)
endif()
add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET}
COMMAND ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])"
DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_INSTALL_EXTRA}
WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}
VERBATIM)
# Custom make command to run all GTSAM Python tests # Custom make command to run all GTSAM Python tests
add_custom_target( add_custom_target(

View File

@ -1,3 +1,3 @@
-r requirements.txt -r requirements.txt
pyparsing>=2.4.2 pyparsing>=2.4.2
mypy==1.4.1 #TODO(Varun) A bug in mypy>=1.5.0 causes an unresolved placeholder error when importing numpy>=2.0.0 (https://github.com/python/mypy/issues/17396) pybind11-stubgen>=2.5.1

View File

@ -39,12 +39,12 @@ namespace py = pybind11;
{module_def} {{ {module_def} {{
m_.doc() = "pybind11 wrapper of {module_name}"; m_.doc() = "pybind11 wrapper of {module_name}";
// Specializations for STL classes
#include "python/gtsam/specializations/{module_name}.h"
{submodules_init} {submodules_init}
{wrapped_namespace} {wrapped_namespace}
// Specializations for STL classes
#include "python/gtsam/specializations/{module_name}.h"
}} }}

View File

@ -18,8 +18,9 @@ from gtsam.symbol_shorthand import A, X
from gtsam.utils.test_case import GtsamTestCase from gtsam.utils.test_case import GtsamTestCase
from gtsam import (DiscreteConditional, DiscreteKeys, DiscreteValues, from gtsam import (DiscreteConditional, DiscreteKeys, DiscreteValues,
GaussianConditional, GaussianMixture, HybridBayesNet, GaussianConditional, HybridBayesNet,
HybridValues, VectorValues, noiseModel) HybridGaussianConditional, HybridValues, VectorValues,
noiseModel)
class TestHybridBayesNet(GtsamTestCase): class TestHybridBayesNet(GtsamTestCase):
@ -49,8 +50,8 @@ class TestHybridBayesNet(GtsamTestCase):
bayesNet = HybridBayesNet() bayesNet = HybridBayesNet()
bayesNet.push_back(conditional) bayesNet.push_back(conditional)
bayesNet.push_back( bayesNet.push_back(
GaussianMixture([X(1)], [], discrete_keys, HybridGaussianConditional([X(1)], [], discrete_keys,
[conditional0, conditional1])) [conditional0, conditional1]))
bayesNet.push_back(DiscreteConditional(Asia, "99/1")) bayesNet.push_back(DiscreteConditional(Asia, "99/1"))
# Create values at which to evaluate. # Create values at which to evaluate.

View File

@ -18,15 +18,16 @@ from gtsam.utils.test_case import GtsamTestCase
import gtsam import gtsam
from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional, from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional,
GaussianMixture, GaussianMixtureFactor, HybridBayesNet, HybridBayesNet, HybridGaussianConditional,
HybridGaussianFactorGraph, HybridValues, JacobianFactor, HybridGaussianFactor, HybridGaussianFactorGraph,
Ordering, noiseModel) HybridValues, JacobianFactor, Ordering, noiseModel)
DEBUG_MARGINALS = False DEBUG_MARGINALS = False
class TestHybridGaussianFactorGraph(GtsamTestCase): class TestHybridGaussianFactorGraph(GtsamTestCase):
"""Unit tests for HybridGaussianFactorGraph.""" """Unit tests for HybridGaussianFactorGraph."""
def test_create(self): def test_create(self):
"""Test construction of hybrid factor graph.""" """Test construction of hybrid factor graph."""
model = noiseModel.Unit.Create(3) model = noiseModel.Unit.Create(3)
@ -36,7 +37,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model) jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model) jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2]) gmf = HybridGaussianFactor([X(0)], dk, [(jf1, 0), (jf2, 0)])
hfg = HybridGaussianFactorGraph() hfg = HybridGaussianFactorGraph()
hfg.push_back(jf1) hfg.push_back(jf1)
@ -48,7 +49,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
self.assertEqual(hbn.size(), 2) self.assertEqual(hbn.size(), 2)
mixture = hbn.at(0).inner() mixture = hbn.at(0).inner()
self.assertIsInstance(mixture, GaussianMixture) self.assertIsInstance(mixture, HybridGaussianConditional)
self.assertEqual(len(mixture.keys()), 2) self.assertEqual(len(mixture.keys()), 2)
discrete_conditional = hbn.at(hbn.size() - 1).inner() discrete_conditional = hbn.at(hbn.size() - 1).inner()
@ -63,7 +64,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model) jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model) jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2]) gmf = HybridGaussianFactor([X(0)], dk, [(jf1, 0), (jf2, 0)])
hfg = HybridGaussianFactorGraph() hfg = HybridGaussianFactorGraph()
hfg.push_back(jf1) hfg.push_back(jf1)
@ -106,8 +107,9 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
I_1x1, I_1x1,
X(0), [0], X(0), [0],
sigma=3) sigma=3)
bayesNet.push_back(GaussianMixture([Z(i)], [X(0)], keys, bayesNet.push_back(
[conditional0, conditional1])) HybridGaussianConditional([Z(i)], [X(0)], keys,
[conditional0, conditional1]))
# Create prior on X(0). # Create prior on X(0).
prior_on_x0 = GaussianConditional.FromMeanAndStddev( prior_on_x0 = GaussianConditional.FromMeanAndStddev(
@ -219,9 +221,9 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
# Check ratio between unnormalized posterior and factor graph is the same for all modes: # Check ratio between unnormalized posterior and factor graph is the same for all modes:
for mode in [1, 0]: for mode in [1, 0]:
values.insert_or_assign(M(0), mode) values.insert_or_assign(M(0), mode)
self.assertAlmostEqual(bayesNet.evaluate(values) / self.assertAlmostEqual(
np.exp(-fg.error(values)), bayesNet.evaluate(values) / np.exp(-fg.error(values)),
0.6366197723675815) 0.6366197723675815)
self.assertAlmostEqual(bayesNet.error(values), fg.error(values)) self.assertAlmostEqual(bayesNet.error(values), fg.error(values))
# Test elimination. # Test elimination.

View File

@ -17,29 +17,30 @@ import unittest
import numpy as np import numpy as np
from gtsam.symbol_shorthand import C, X from gtsam.symbol_shorthand import C, X
from gtsam.utils.test_case import GtsamTestCase from gtsam.utils.test_case import GtsamTestCase
from gtsam import BetweenFactorPoint3, noiseModel, PriorFactorPoint3, Point3
import gtsam import gtsam
from gtsam import BetweenFactorPoint3, Point3, PriorFactorPoint3, noiseModel
class TestHybridGaussianFactorGraph(GtsamTestCase): class TestHybridGaussianFactorGraph(GtsamTestCase):
"""Unit tests for HybridGaussianFactorGraph.""" """Unit tests for HybridGaussianFactorGraph."""
def test_nonlinear_hybrid(self): def test_nonlinear_hybrid(self):
nlfg = gtsam.HybridNonlinearFactorGraph() nlfg = gtsam.HybridNonlinearFactorGraph()
dk = gtsam.DiscreteKeys() dk = gtsam.DiscreteKeys()
dk.push_back((10, 2)) dk.push_back((10, 2))
nlfg.push_back(BetweenFactorPoint3(1, 2, Point3( nlfg.push_back(
1, 2, 3), noiseModel.Diagonal.Variances([1, 1, 1]))) BetweenFactorPoint3(1, 2, Point3(1, 2, 3),
noiseModel.Diagonal.Variances([1, 1, 1])))
nlfg.push_back( nlfg.push_back(
PriorFactorPoint3(2, Point3(1, 2, 3), PriorFactorPoint3(2, Point3(1, 2, 3),
noiseModel.Diagonal.Variances([0.5, 0.5, 0.5]))) noiseModel.Diagonal.Variances([0.5, 0.5, 0.5])))
nlfg.push_back(
gtsam.MixtureFactor([1], dk, [ factors = [(PriorFactorPoint3(1, Point3(0, 0, 0),
PriorFactorPoint3(1, Point3(0, 0, 0), noiseModel.Unit.Create(3)), 0.0),
noiseModel.Unit.Create(3)), (PriorFactorPoint3(1, Point3(1, 2, 1),
PriorFactorPoint3(1, Point3(1, 2, 1), noiseModel.Unit.Create(3)), 0.0)]
noiseModel.Unit.Create(3)) nlfg.push_back(gtsam.HybridNonlinearFactor([1], dk, factors))
]))
nlfg.push_back(gtsam.DecisionTreeFactor((10, 2), "1 3")) nlfg.push_back(gtsam.DecisionTreeFactor((10, 2), "1 3"))
values = gtsam.Values() values = gtsam.Values()
values.insert_point3(1, Point3(0, 0, 0)) values.insert_point3(1, Point3(0, 0, 0))

View File

@ -14,11 +14,12 @@ from __future__ import print_function
import unittest import unittest
import gtsam
import numpy as np import numpy as np
from gtsam.symbol_shorthand import C, X from gtsam.symbol_shorthand import C, X
from gtsam.utils.test_case import GtsamTestCase from gtsam.utils.test_case import GtsamTestCase
import gtsam
class TestHybridValues(GtsamTestCase): class TestHybridValues(GtsamTestCase):
"""Unit tests for HybridValues.""" """Unit tests for HybridValues."""

View File

@ -353,6 +353,340 @@ TEST(TranslationRecovery, NodeWithBetweenFactorAndNoMeasurements) {
EXPECT(assert_equal(Point3(1, 2, 1), result.at<Point3>(4), 1e-4)); EXPECT(assert_equal(Point3(1, 2, 1), result.at<Point3>(4), 1e-4));
} }
/* *************************************************************************
* Repeat all tests, but with the Bilinear Angle Translation Factor.
* ************************************************************************* */
/* ************************************************************************* */
// We read the BAL file, which has 3 cameras in it, with poses. We then assume
// the rotations are correct, but translations have to be estimated from
// translation directions only. Since we have 3 cameras, A, B, and C, we can at
// most create three relative measurements, let's call them w_aZb, w_aZc, and
// bZc. These will be of type Unit3. We then call `recoverTranslations` which
// sets up an optimization problem for the three unknown translations.
TEST(TranslationRecovery, BALBATA) {
const string filename = findExampleDataFile("dubrovnik-3-7-pre");
SfmData db = SfmData::FromBalFile(filename);
// Get camera poses, as Values
size_t j = 0;
Values poses;
for (auto camera : db.cameras) {
poses.insert(j++, camera.pose());
}
// Simulate measurements
const auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
poses, {{0, 1}, {0, 2}, {1, 2}});
// Check simulated measurements.
for (auto& unitTranslation : relativeTranslations) {
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
unitTranslation.measured()));
}
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
const auto graph = algorithm.buildGraph(relativeTranslations);
EXPECT_LONGS_EQUAL(3, graph.size());
// Run translation recovery
const double scale = 2.0;
const auto result = algorithm.run(relativeTranslations, scale);
// Check result for first two translations, determined by prior
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0)));
EXPECT(assert_equal(
Point3(2 * GetDirectionFromPoses(poses, relativeTranslations[0])),
result.at<Point3>(1)));
// Check that the third translations is correct
Point3 Ta = poses.at<Pose3>(0).translation();
Point3 Tb = poses.at<Pose3>(1).translation();
Point3 Tc = poses.at<Pose3>(2).translation();
Point3 expected = (Tc - Ta) * (scale / (Tb - Ta).norm());
EXPECT(assert_equal(expected, result.at<Point3>(2), 1e-4));
// TODO(frank): how to get stats back?
// EXPECT_DOUBLES_EQUAL(0.0199833, actualError, 1e-5);
}
TEST(TranslationRecovery, TwoPoseTestBATA) {
// Create a dataset with 2 poses.
// __ __
// \/ \/
// 0 _____ 1
//
// 0 and 1 face in the same direction but have a translation offset.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
auto relativeTranslations =
TranslationRecovery::SimulateMeasurements(poses, {{0, 1}});
// Check simulated measurements.
for (auto& unitTranslation : relativeTranslations) {
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
unitTranslation.measured()));
}
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
const auto graph = algorithm.buildGraph(relativeTranslations);
EXPECT_LONGS_EQUAL(1, graph.size());
// Run translation recovery
const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0);
// Check result for first two translations, determined by prior
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
EXPECT(assert_equal(Point3(3, 0, 0), result.at<Point3>(1), 1e-8));
}
TEST(TranslationRecovery, ThreePoseTestBATA) {
// Create a dataset with 3 poses.
// __ __
// \/ \/
// 0 _____ 1
// \ __ /
// \\//
// 3
//
// 0 and 1 face in the same direction but have a translation offset. 3 is in
// the same direction as 0 and 1, in between 0 and 1, with some Y axis offset.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
poses, {{0, 1}, {1, 3}, {3, 0}});
// Check simulated measurements.
for (auto& unitTranslation : relativeTranslations) {
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
unitTranslation.measured()));
}
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
const auto graph = algorithm.buildGraph(relativeTranslations);
EXPECT_LONGS_EQUAL(3, graph.size());
const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0);
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
EXPECT(assert_equal(Point3(3, 0, 0), result.at<Point3>(1), 1e-8));
EXPECT(assert_equal(Point3(1.5, -1.5, 0), result.at<Point3>(3), 1e-8));
}
TEST(TranslationRecovery, ThreePosesIncludingZeroTranslationBATA) {
// Create a dataset with 3 poses.
// __ __
// \/ \/
// 0 _____ 1
// 2 <|
//
// 0 and 1 face in the same direction but have a translation offset. 2 is at
// the same point as 1 but is rotated, with little FOV overlap.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
poses.insert<Pose3>(2, Pose3(Rot3::RzRyRx(-M_PI / 2, 0, 0), Point3(2, 0, 0)));
auto relativeTranslations =
TranslationRecovery::SimulateMeasurements(poses, {{0, 1}, {1, 2}});
// Check simulated measurements.
for (auto& unitTranslation : relativeTranslations) {
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
unitTranslation.measured()));
}
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
// Run translation recovery
const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0);
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
EXPECT(assert_equal(Point3(3, 0, 0), result.at<Point3>(1), 1e-8));
EXPECT(assert_equal(Point3(3, 0, 0), result.at<Point3>(2), 1e-8));
}
TEST(TranslationRecovery, FourPosesIncludingZeroTranslationBATA) {
// Create a dataset with 4 poses.
// __ __
// \/ \/
// 0 _____ 1
// \ __ 2 <|
// \\//
// 3
//
// 0 and 1 face in the same direction but have a translation offset. 2 is at
// the same point as 1 but is rotated, with very little FOV overlap. 3 is in
// the same direction as 0 and 1, in between 0 and 1, with some Y axis offset.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
poses.insert<Pose3>(2, Pose3(Rot3::RzRyRx(-M_PI / 2, 0, 0), Point3(2, 0, 0)));
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
poses, {{0, 1}, {1, 2}, {1, 3}, {3, 0}});
// Check simulated measurements.
for (auto& unitTranslation : relativeTranslations) {
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
unitTranslation.measured()));
}
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
// Run translation recovery
const auto result = algorithm.run(relativeTranslations, /*scale=*/4.0);
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
EXPECT(assert_equal(Point3(4, 0, 0), result.at<Point3>(1), 1e-8));
EXPECT(assert_equal(Point3(4, 0, 0), result.at<Point3>(2), 1e-8));
EXPECT(assert_equal(Point3(2, -2, 0), result.at<Point3>(3), 1e-8));
}
TEST(TranslationRecovery, ThreePosesWithZeroTranslationBATA) {
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3::RzRyRx(-M_PI / 6, 0, 0), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(2, Pose3(Rot3::RzRyRx(M_PI / 6, 0, 0), Point3(0, 0, 0)));
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
poses, {{0, 1}, {1, 2}, {2, 0}});
// Check simulated measurements.
for (auto& unitTranslation : relativeTranslations) {
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
unitTranslation.measured()));
}
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
// Run translation recovery
const auto result = algorithm.run(relativeTranslations, /*scale=*/4.0);
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(1), 1e-8));
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(2), 1e-8));
}
TEST(TranslationRecovery, ThreePosesWithOneSoftConstraintBATA) {
// Create a dataset with 3 poses.
// __ __
// \/ \/
// 0 _____ 1
// \ __ /
// \\//
// 3
//
// 0 and 1 face in the same direction but have a translation offset. 3 is in
// the same direction as 0 and 1, in between 0 and 1, with some Y axis offset.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
poses, {{0, 1}, {0, 3}, {1, 3}});
std::vector<BinaryMeasurement<Point3>> betweenTranslations;
betweenTranslations.emplace_back(0, 3, Point3(1, -1, 0),
noiseModel::Isotropic::Sigma(3, 1e-2));
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
auto result =
algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations);
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
EXPECT(assert_equal(Point3(2, 0, 0), result.at<Point3>(1), 1e-4));
EXPECT(assert_equal(Point3(1, -1, 0), result.at<Point3>(3), 1e-4));
}
TEST(TranslationRecovery, ThreePosesWithOneHardConstraintBATA) {
// Create a dataset with 3 poses.
// __ __
// \/ \/
// 0 _____ 1
// \ __ /
// \\//
// 3
//
// 0 and 1 face in the same direction but have a translation offset. 3 is in
// the same direction as 0 and 1, in between 0 and 1, with some Y axis offset.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
poses, {{0, 1}, {0, 3}, {1, 3}});
std::vector<BinaryMeasurement<Point3>> betweenTranslations;
betweenTranslations.emplace_back(0, 1, Point3(2, 0, 0),
noiseModel::Constrained::All(3, 1e2));
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
auto result =
algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations);
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
EXPECT(assert_equal(Point3(2, 0, 0), result.at<Point3>(1), 1e-4));
EXPECT(assert_equal(Point3(1, -1, 0), result.at<Point3>(3), 1e-4));
}
TEST(TranslationRecovery, NodeWithBetweenFactorAndNoMeasurementsBATA) {
// Checks that valid results are obtained when a between translation edge is
// provided with a node that does not have any other relative translations.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
poses.insert<Pose3>(4, Pose3(Rot3(), Point3(1, 2, 1)));
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
poses, {{0, 1}, {0, 3}, {1, 3}});
std::vector<BinaryMeasurement<Point3>> betweenTranslations;
betweenTranslations.emplace_back(0, 1, Point3(2, 0, 0),
noiseModel::Constrained::All(3, 1e2));
// Node 4 only has this between translation prior, no relative translations.
betweenTranslations.emplace_back(0, 4, Point3(1, 2, 1));
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
auto result =
algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations);
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
EXPECT(assert_equal(Point3(2, 0, 0), result.at<Point3>(1), 1e-4));
EXPECT(assert_equal(Point3(1, -1, 0), result.at<Point3>(3), 1e-4));
EXPECT(assert_equal(Point3(1, 2, 1), result.at<Point3>(4), 1e-4));
}
/* ************************************************************************* */ /* ************************************************************************* */
int main() { int main() {
TestResult tr; TestResult tr;