Merge branch 'hybrid-error-scalars' into direct-hybrid-fg

release/4.3a0
Varun Agrawal 2024-09-15 11:19:10 -04:00
commit 506cda86b3
51 changed files with 1660 additions and 767 deletions

View File

@ -191,17 +191,17 @@ E_{gc}(x,y)=\frac{1}{2}\|Rx+Sy-d\|_{\Sigma}^{2}.\label{eq:gc_error}
\end_layout
\begin_layout Subsubsection*
GaussianMixture
HybridGaussianConditional
\end_layout
\begin_layout Standard
A
\emph on
GaussianMixture
HybridGaussianConditional
\emph default
(maybe to be renamed to
\emph on
GaussianMixtureComponent
HybridGaussianConditionalComponent
\emph default
) just indexes into a number of
\emph on
@ -233,7 +233,7 @@ GaussianConditional
to a set of discrete variables.
As
\emph on
GaussianMixture
HybridGaussianConditional
\emph default
is a
\emph on
@ -324,7 +324,7 @@ The key point here is that
\color inherit
is the log-normalization constant for the complete
\emph on
GaussianMixture
HybridGaussianConditional
\emph default
across all values of
\begin_inset Formula $m$
@ -548,15 +548,15 @@ with
\end_layout
\begin_layout Subsubsection*
GaussianMixtureFactor
HybridGaussianFactor
\end_layout
\begin_layout Standard
Analogously, a
\emph on
GaussianMixtureFactor
HybridGaussianFactor
\emph default
typically results from a GaussianMixture by having known values
typically results from a HybridGaussianConditional by having known values
\begin_inset Formula $\bar{x}$
\end_inset
@ -817,7 +817,7 @@ E_{mf}(y,m)=\frac{1}{2}\|A_{m}y-b_{m}\|_{\Sigma_{mfm}}^{2}=E_{gcm}(\bar{x},y)+K_
\end_inset
which is identical to the GaussianMixture error
which is identical to the HybridGaussianConditional error
\begin_inset CommandInset ref
LatexCommand eqref
reference "eq:gm_error"

View File

@ -168,11 +168,11 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) {
DecisionTreeFactor prunedDiscreteProbs =
this->pruneDiscreteConditionals(maxNrLeaves);
/* To prune, we visitWith every leaf in the GaussianMixture.
/* To prune, we visitWith every leaf in the HybridGaussianConditional.
* For each leaf, using the assignment we can check the discrete decision tree
* for 0.0 probability, then just set the leaf to a nullptr.
*
* We can later check the GaussianMixture for just nullptrs.
* We can later check the HybridGaussianConditional for just nullptrs.
*/
HybridBayesNet prunedBayesNetFragment;
@ -182,14 +182,16 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) {
for (auto &&conditional : *this) {
if (auto gm = conditional->asMixture()) {
// Make a copy of the Gaussian mixture and prune it!
auto prunedGaussianMixture = std::make_shared<GaussianMixture>(*gm);
prunedGaussianMixture->prune(prunedDiscreteProbs); // imperative :-(
auto prunedHybridGaussianConditional =
std::make_shared<HybridGaussianConditional>(*gm);
prunedHybridGaussianConditional->prune(
prunedDiscreteProbs); // imperative :-(
// Type-erase and add to the pruned Bayes Net fragment.
prunedBayesNetFragment.push_back(prunedGaussianMixture);
prunedBayesNetFragment.push_back(prunedHybridGaussianConditional);
} else {
// Add the non-GaussianMixture conditional
// Add the non-HybridGaussianConditional conditional
prunedBayesNetFragment.push_back(conditional);
}
}

View File

@ -79,7 +79,7 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
*
* Example:
* auto shared_ptr_to_a_conditional =
* std::make_shared<GaussianMixture>(...);
* std::make_shared<HybridGaussianConditional>(...);
* hbn.push_back(shared_ptr_to_a_conditional);
*/
void push_back(HybridConditional &&conditional) {
@ -106,7 +106,7 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
* Preferred: Emplace a conditional directly using arguments.
*
* Examples:
* hbn.emplace_shared<GaussianMixture>(...)));
* hbn.emplace_shared<HybridGaussianConditional>(...)));
* hbn.emplace_shared<GaussianConditional>(...)));
* hbn.emplace_shared<DiscreteConditional>(...)));
*/

View File

@ -55,7 +55,7 @@ HybridConditional::HybridConditional(
/* ************************************************************************ */
HybridConditional::HybridConditional(
const std::shared_ptr<GaussianMixture> &gaussianMixture)
const std::shared_ptr<HybridGaussianConditional> &gaussianMixture)
: BaseFactor(KeyVector(gaussianMixture->keys().begin(),
gaussianMixture->keys().begin() +
gaussianMixture->nrContinuous()),
@ -157,10 +157,10 @@ double HybridConditional::logNormalizationConstant() const {
return gc->logNormalizationConstant();
}
if (auto gm = asMixture()) {
return gm->logNormalizationConstant(); // 0.0!
return gm->logNormalizationConstant(); // 0.0!
}
if (auto dc = asDiscrete()) {
return dc->logNormalizationConstant(); // 0.0!
return dc->logNormalizationConstant(); // 0.0!
}
throw std::runtime_error(
"HybridConditional::logProbability: conditional type not handled");

View File

@ -18,8 +18,8 @@
#pragma once
#include <gtsam/discrete/DiscreteConditional.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/inference/Conditional.h>
#include <gtsam/inference/Key.h>
@ -39,7 +39,7 @@ namespace gtsam {
* As a type-erased variant of:
* - DiscreteConditional
* - GaussianConditional
* - GaussianMixture
* - HybridGaussianConditional
*
* The reason why this is important is that `Conditional<T>` is a CRTP class.
* CRTP is static polymorphism such that all CRTP classes, while bearing the
@ -127,7 +127,8 @@ class GTSAM_EXPORT HybridConditional
* @param gaussianMixture Gaussian Mixture Conditional used to create the
* HybridConditional.
*/
HybridConditional(const std::shared_ptr<GaussianMixture>& gaussianMixture);
HybridConditional(
const std::shared_ptr<HybridGaussianConditional>& gaussianMixture);
/// @}
/// @name Testable
@ -146,12 +147,12 @@ class GTSAM_EXPORT HybridConditional
/// @{
/**
* @brief Return HybridConditional as a GaussianMixture
* @brief Return HybridConditional as a HybridGaussianConditional
* @return nullptr if not a mixture
* @return GaussianMixture::shared_ptr otherwise
* @return HybridGaussianConditional::shared_ptr otherwise
*/
GaussianMixture::shared_ptr asMixture() const {
return std::dynamic_pointer_cast<GaussianMixture>(inner_);
HybridGaussianConditional::shared_ptr asMixture() const {
return std::dynamic_pointer_cast<HybridGaussianConditional>(inner_);
}
/**
@ -222,8 +223,10 @@ class GTSAM_EXPORT HybridConditional
boost::serialization::void_cast_register<GaussianConditional, Factor>(
static_cast<GaussianConditional*>(NULL), static_cast<Factor*>(NULL));
} else {
boost::serialization::void_cast_register<GaussianMixture, Factor>(
static_cast<GaussianMixture*>(NULL), static_cast<Factor*>(NULL));
boost::serialization::void_cast_register<HybridGaussianConditional,
Factor>(
static_cast<HybridGaussianConditional*>(NULL),
static_cast<Factor*>(NULL));
}
}
#endif

View File

@ -35,8 +35,8 @@ class GTSAM_EXPORT HybridEliminationTree
public:
typedef EliminationTree<HybridBayesNet, HybridGaussianFactorGraph>
Base; ///< Base class
typedef HybridEliminationTree This; ///< This class
Base; ///< Base class
typedef HybridEliminationTree This; ///< This class
typedef std::shared_ptr<This> shared_ptr; ///< Shared pointer to this class
/// @name Constructors

View File

@ -45,9 +45,9 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1,
* Base class for *truly* hybrid probabilistic factors
*
* Examples:
* - MixtureFactor
* - GaussianMixtureFactor
* - GaussianMixture
* - HybridNonlinearFactor
* - HybridGaussianFactor
* - HybridGaussianConditional
*
* @ingroup hybrid
*/

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */
/**
* @file GaussianMixture.cpp
* @file HybridGaussianConditional.cpp
* @brief A hybrid conditional in the Conditional Linear Gaussian scheme
* @author Fan Jiang
* @author Varun Agrawal
@ -20,8 +20,8 @@
#include <gtsam/base/utilities.h>
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/inference/Conditional-inst.h>
#include <gtsam/linear/GaussianBayesNet.h>
@ -29,10 +29,10 @@
namespace gtsam {
GaussianMixture::GaussianMixture(
HybridGaussianConditional::HybridGaussianConditional(
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
const DiscreteKeys &discreteParents,
const GaussianMixture::Conditionals &conditionals)
const HybridGaussianConditional::Conditionals &conditionals)
: BaseFactor(CollectKeys(continuousFrontals, continuousParents),
discreteParents),
BaseConditional(continuousFrontals.size()),
@ -50,30 +50,33 @@ GaussianMixture::GaussianMixture(
}
/* *******************************************************************************/
const GaussianMixture::Conditionals &GaussianMixture::conditionals() const {
const HybridGaussianConditional::Conditionals &
HybridGaussianConditional::conditionals() const {
return conditionals_;
}
/* *******************************************************************************/
GaussianMixture::GaussianMixture(
HybridGaussianConditional::HybridGaussianConditional(
KeyVector &&continuousFrontals, KeyVector &&continuousParents,
DiscreteKeys &&discreteParents,
std::vector<GaussianConditional::shared_ptr> &&conditionals)
: GaussianMixture(continuousFrontals, continuousParents, discreteParents,
Conditionals(discreteParents, conditionals)) {}
: HybridGaussianConditional(continuousFrontals, continuousParents,
discreteParents,
Conditionals(discreteParents, conditionals)) {}
/* *******************************************************************************/
GaussianMixture::GaussianMixture(
HybridGaussianConditional::HybridGaussianConditional(
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
const DiscreteKeys &discreteParents,
const std::vector<GaussianConditional::shared_ptr> &conditionals)
: GaussianMixture(continuousFrontals, continuousParents, discreteParents,
Conditionals(discreteParents, conditionals)) {}
: HybridGaussianConditional(continuousFrontals, continuousParents,
discreteParents,
Conditionals(discreteParents, conditionals)) {}
/* *******************************************************************************/
// TODO(dellaert): This is copy/paste: GaussianMixture should be derived from
// GaussianMixtureFactor, no?
GaussianFactorGraphTree GaussianMixture::add(
// TODO(dellaert): This is copy/paste: HybridGaussianConditional should be
// derived from HybridGaussianFactor, no?
GaussianFactorGraphTree HybridGaussianConditional::add(
const GaussianFactorGraphTree &sum) const {
using Y = GaussianFactorGraph;
auto add = [](const Y &graph1, const Y &graph2) {
@ -86,7 +89,8 @@ GaussianFactorGraphTree GaussianMixture::add(
}
/* *******************************************************************************/
GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const {
GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree()
const {
auto wrap = [this](const GaussianConditional::shared_ptr &gc) {
// First check if conditional has not been pruned
if (gc) {
@ -109,7 +113,7 @@ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const {
}
/* *******************************************************************************/
size_t GaussianMixture::nrComponents() const {
size_t HybridGaussianConditional::nrComponents() const {
size_t total = 0;
conditionals_.visit([&total](const GaussianFactor::shared_ptr &node) {
if (node) total += 1;
@ -118,7 +122,7 @@ size_t GaussianMixture::nrComponents() const {
}
/* *******************************************************************************/
GaussianConditional::shared_ptr GaussianMixture::operator()(
GaussianConditional::shared_ptr HybridGaussianConditional::operator()(
const DiscreteValues &discreteValues) const {
auto &ptr = conditionals_(discreteValues);
if (!ptr) return nullptr;
@ -127,11 +131,12 @@ GaussianConditional::shared_ptr GaussianMixture::operator()(
return conditional;
else
throw std::logic_error(
"A GaussianMixture unexpectedly contained a non-conditional");
"A HybridGaussianConditional unexpectedly contained a non-conditional");
}
/* *******************************************************************************/
bool GaussianMixture::equals(const HybridFactor &lf, double tol) const {
bool HybridGaussianConditional::equals(const HybridFactor &lf,
double tol) const {
const This *e = dynamic_cast<const This *>(&lf);
if (e == nullptr) return false;
@ -149,8 +154,8 @@ bool GaussianMixture::equals(const HybridFactor &lf, double tol) const {
}
/* *******************************************************************************/
void GaussianMixture::print(const std::string &s,
const KeyFormatter &formatter) const {
void HybridGaussianConditional::print(const std::string &s,
const KeyFormatter &formatter) const {
std::cout << (s.empty() ? "" : s + "\n");
if (isContinuous()) std::cout << "Continuous ";
if (isDiscrete()) std::cout << "Discrete ";
@ -177,7 +182,7 @@ void GaussianMixture::print(const std::string &s,
}
/* ************************************************************************* */
KeyVector GaussianMixture::continuousParents() const {
KeyVector HybridGaussianConditional::continuousParents() const {
// Get all parent keys:
const auto range = parents();
KeyVector continuousParentKeys(range.begin(), range.end());
@ -193,7 +198,8 @@ KeyVector GaussianMixture::continuousParents() const {
}
/* ************************************************************************* */
bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const {
bool HybridGaussianConditional::allFrontalsGiven(
const VectorValues &given) const {
for (auto &&kv : given) {
if (given.find(kv.first) == given.end()) {
return false;
@ -203,44 +209,21 @@ bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const {
}
/* ************************************************************************* */
std::shared_ptr<GaussianMixtureFactor> GaussianMixture::likelihood(
std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood(
const VectorValues &given) const {
if (!allFrontalsGiven(given)) {
throw std::runtime_error(
"GaussianMixture::likelihood: given values are missing some frontals.");
"HybridGaussianConditional::likelihood: given values are missing some "
"frontals.");
}
const DiscreteKeys discreteParentKeys = discreteKeys();
const KeyVector continuousParentKeys = continuousParents();
const GaussianMixtureFactor::Factors likelihoods(
conditionals_, [&](const GaussianConditional::shared_ptr &conditional) {
const HybridGaussianFactor::FactorValuePairs likelihoods(
conditionals_,
[&](const GaussianConditional::shared_ptr &conditional)
-> GaussianFactorValuePair {
const auto likelihood_m = conditional->likelihood(given);
return likelihood_m;
});
// First compute all the sqrt(|2 pi Sigma|) terms
auto computeLogNormalizers = [](const GaussianFactor::shared_ptr &gf) {
auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf);
// If we have, say, a Hessian factor, then no need to do anything
if (!jf) return 0.0;
auto model = jf->get_model();
// If there is no noise model, there is nothing to do.
if (!model) {
return 0.0;
}
return ComputeLogNormalizer(model);
};
AlgebraicDecisionTree<Key> log_normalizers =
DecisionTree<Key, double>(likelihoods, computeLogNormalizers);
return std::make_shared<GaussianMixtureFactor>(
continuousParentKeys, discreteParentKeys, likelihoods, log_normalizers);
}
/* ************************************************************************* */
std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) {
std::set<DiscreteKey> s;
s.insert(discreteKeys.begin(), discreteKeys.end());
return s;
}
@ -254,8 +237,7 @@ std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) {
* const Assignment<Key> &, const GaussianConditional::shared_ptr &)>
*/
std::function<GaussianConditional::shared_ptr(
const Assignment<Key> &, const GaussianConditional::shared_ptr &)>
GaussianMixture::prunerFunc(const DecisionTreeFactor &discreteProbs) {
HybridGaussianConditional::prunerFunc(const DecisionTreeFactor &discreteProbs) {
// Get the discrete keys as sets for the decision tree
// and the gaussian mixture.
auto discreteProbsKeySet = DiscreteKeysAsSet(discreteProbs.discreteKeys());
@ -306,7 +288,7 @@ GaussianMixture::prunerFunc(const DecisionTreeFactor &discreteProbs) {
}
/* *******************************************************************************/
void GaussianMixture::prune(const DecisionTreeFactor &discreteProbs) {
void HybridGaussianConditional::prune(const DecisionTreeFactor &discreteProbs) {
// Functional which loops over all assignments and create a set of
// GaussianConditionals
auto pruner = prunerFunc(discreteProbs);
@ -316,7 +298,7 @@ void GaussianMixture::prune(const DecisionTreeFactor &discreteProbs) {
}
/* *******************************************************************************/
AlgebraicDecisionTree<Key> GaussianMixture::logProbability(
AlgebraicDecisionTree<Key> HybridGaussianConditional::logProbability(
const VectorValues &continuousValues) const {
// functor to calculate (double) logProbability value from
// GaussianConditional.
@ -334,7 +316,7 @@ AlgebraicDecisionTree<Key> GaussianMixture::logProbability(
}
/* ************************************************************************* */
double GaussianMixture::conditionalError(
double HybridGaussianConditional::conditionalError(
const GaussianConditional::shared_ptr &conditional,
const VectorValues &continuousValues) const {
// Check if valid pointer
@ -351,7 +333,7 @@ double GaussianMixture::conditionalError(
}
/* *******************************************************************************/
AlgebraicDecisionTree<Key> GaussianMixture::errorTree(
AlgebraicDecisionTree<Key> HybridGaussianConditional::errorTree(
const VectorValues &continuousValues) const {
auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) {
return conditionalError(conditional, continuousValues);
@ -361,20 +343,21 @@ AlgebraicDecisionTree<Key> GaussianMixture::errorTree(
}
/* *******************************************************************************/
double GaussianMixture::error(const HybridValues &values) const {
double HybridGaussianConditional::error(const HybridValues &values) const {
// Directly index to get the conditional, no need to build the whole tree.
auto conditional = conditionals_(values.discrete());
return conditionalError(conditional, values.continuous());
}
/* *******************************************************************************/
double GaussianMixture::logProbability(const HybridValues &values) const {
double HybridGaussianConditional::logProbability(
const HybridValues &values) const {
auto conditional = conditionals_(values.discrete());
return conditional->logProbability(values.continuous());
}
/* *******************************************************************************/
double GaussianMixture::evaluate(const HybridValues &values) const {
double HybridGaussianConditional::evaluate(const HybridValues &values) const {
auto conditional = conditionals_(values.discrete());
return conditional->evaluate(values.continuous());
}

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */
/**
* @file GaussianMixture.h
* @file HybridGaussianConditional.h
* @brief A hybrid conditional in the Conditional Linear Gaussian scheme
* @author Fan Jiang
* @author Varun Agrawal
@ -23,8 +23,8 @@
#include <gtsam/discrete/DecisionTree.h>
#include <gtsam/discrete/DecisionTreeFactor.h>
#include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/inference/Conditional.h>
#include <gtsam/linear/GaussianConditional.h>
@ -50,14 +50,14 @@ class HybridValues;
*
* @ingroup hybrid
*/
class GTSAM_EXPORT GaussianMixture
class GTSAM_EXPORT HybridGaussianConditional
: public HybridFactor,
public Conditional<HybridFactor, GaussianMixture> {
public Conditional<HybridFactor, HybridGaussianConditional> {
public:
using This = GaussianMixture;
using shared_ptr = std::shared_ptr<GaussianMixture>;
using This = HybridGaussianConditional;
using shared_ptr = std::shared_ptr<HybridGaussianConditional>;
using BaseFactor = HybridFactor;
using BaseConditional = Conditional<HybridFactor, GaussianMixture>;
using BaseConditional = Conditional<HybridFactor, HybridGaussianConditional>;
/// typedef for Decision Tree of Gaussian Conditionals
using Conditionals = DecisionTree<Key, GaussianConditional::shared_ptr>;
@ -67,7 +67,7 @@ class GTSAM_EXPORT GaussianMixture
double logConstant_; ///< log of the normalization constant.
/**
* @brief Convert a GaussianMixture of conditionals into
* @brief Convert a HybridGaussianConditional of conditionals into
* a DecisionTree of Gaussian factor graphs.
*/
GaussianFactorGraphTree asGaussianFactorGraphTree() const;
@ -88,10 +88,10 @@ class GTSAM_EXPORT GaussianMixture
/// @{
/// Default constructor, mainly for serialization.
GaussianMixture() = default;
HybridGaussianConditional() = default;
/**
* @brief Construct a new GaussianMixture object.
* @brief Construct a new HybridGaussianConditional object.
*
* @param continuousFrontals the continuous frontals.
* @param continuousParents the continuous parents.
@ -101,10 +101,10 @@ class GTSAM_EXPORT GaussianMixture
* cardinality of the DiscreteKeys in discreteParents, since the
* discreteParents will be used as the labels in the decision tree.
*/
GaussianMixture(const KeyVector &continuousFrontals,
const KeyVector &continuousParents,
const DiscreteKeys &discreteParents,
const Conditionals &conditionals);
HybridGaussianConditional(const KeyVector &continuousFrontals,
const KeyVector &continuousParents,
const DiscreteKeys &discreteParents,
const Conditionals &conditionals);
/**
* @brief Make a Gaussian Mixture from a list of Gaussian conditionals
@ -114,9 +114,10 @@ class GTSAM_EXPORT GaussianMixture
* @param discreteParents Discrete parents variables
* @param conditionals List of conditionals
*/
GaussianMixture(KeyVector &&continuousFrontals, KeyVector &&continuousParents,
DiscreteKeys &&discreteParents,
std::vector<GaussianConditional::shared_ptr> &&conditionals);
HybridGaussianConditional(
KeyVector &&continuousFrontals, KeyVector &&continuousParents,
DiscreteKeys &&discreteParents,
std::vector<GaussianConditional::shared_ptr> &&conditionals);
/**
* @brief Make a Gaussian Mixture from a list of Gaussian conditionals
@ -126,7 +127,7 @@ class GTSAM_EXPORT GaussianMixture
* @param discreteParents Discrete parents variables
* @param conditionals List of conditionals
*/
GaussianMixture(
HybridGaussianConditional(
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
const DiscreteKeys &discreteParents,
const std::vector<GaussianConditional::shared_ptr> &conditionals);
@ -140,7 +141,7 @@ class GTSAM_EXPORT GaussianMixture
/// Print utility
void print(
const std::string &s = "GaussianMixture\n",
const std::string &s = "HybridGaussianConditional\n",
const KeyFormatter &formatter = DefaultKeyFormatter) const override;
/// @}
@ -165,14 +166,14 @@ class GTSAM_EXPORT GaussianMixture
* Create a likelihood factor for a Gaussian mixture, return a Mixture factor
* on the parents.
*/
std::shared_ptr<GaussianMixtureFactor> likelihood(
std::shared_ptr<HybridGaussianFactor> likelihood(
const VectorValues &given) const;
/// Getter for the underlying Conditionals DecisionTree
const Conditionals &conditionals() const;
/**
* @brief Compute logProbability of the GaussianMixture as a tree.
* @brief Compute logProbability of the HybridGaussianConditional as a tree.
*
* @param continuousValues The continuous VectorValues.
* @return AlgebraicDecisionTree<Key> A decision tree with the same keys
@ -209,7 +210,7 @@ class GTSAM_EXPORT GaussianMixture
double error(const HybridValues &values) const override;
/**
* @brief Compute error of the GaussianMixture as a tree.
* @brief Compute error of the HybridGaussianConditional as a tree.
*
* @param continuousValues The continuous VectorValues.
* @return AlgebraicDecisionTree<Key> A decision tree on the discrete keys
@ -277,6 +278,7 @@ std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &discreteKeys);
// traits
template <>
struct traits<GaussianMixture> : public Testable<GaussianMixture> {};
struct traits<HybridGaussianConditional>
: public Testable<HybridGaussianConditional> {};
} // namespace gtsam

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */
/**
* @file GaussianMixtureFactor.cpp
* @file HybridGaussianFactor.cpp
* @brief A set of Gaussian factors indexed by a set of discrete keys.
* @author Fan Jiang
* @author Varun Agrawal
@ -21,7 +21,7 @@
#include <gtsam/base/utilities.h>
#include <gtsam/discrete/DecisionTree-inl.h>
#include <gtsam/discrete/DecisionTree.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/linear/GaussianFactor.h>
#include <gtsam/linear/GaussianFactorGraph.h>
@ -34,52 +34,51 @@ namespace gtsam {
* This is done by storing the normalizer value in
* the `b` vector as an additional row.
*
* @param factors DecisionTree of GaussianFactor shared pointers.
* @param logNormalizers Tree of log-normalizers corresponding to each
* @param factors DecisionTree of GaussianFactors and arbitrary scalars.
* Gaussian factor in factors.
* @return GaussianMixtureFactor::Factors
* @return HybridGaussianFactor::Factors
*/
GaussianMixtureFactor::Factors augment(
const GaussianMixtureFactor::Factors &factors,
const AlgebraicDecisionTree<Key> &logNormalizers) {
HybridGaussianFactor::Factors augment(
const HybridGaussianFactor::FactorValuePairs &factors) {
// Find the minimum value so we can "proselytize" to positive values.
// Done because we can't have sqrt of negative numbers.
double min_log_normalizer = logNormalizers.min();
AlgebraicDecisionTree<Key> log_normalizers = logNormalizers.apply(
[&min_log_normalizer](double n) { return n - min_log_normalizer; });
auto unzipped_pair = unzip(factors);
const HybridGaussianFactor::Factors gaussianFactors = unzipped_pair.first;
const AlgebraicDecisionTree<Key> valueTree = unzipped_pair.second;
double min_value = valueTree.min();
AlgebraicDecisionTree<Key> values =
valueTree.apply([&min_value](double n) { return n - min_value; });
// Finally, update the [A|b] matrices.
auto update = [&log_normalizers](
const Assignment<Key> &assignment,
const GaussianMixtureFactor::sharedFactor &gf) {
auto update = [&values](const Assignment<Key> &assignment,
const HybridGaussianFactor::sharedFactor &gf) {
auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf);
if (!jf) return gf;
// If the log_normalizer is 0, do nothing
if (log_normalizers(assignment) == 0.0) return gf;
if (values(assignment) == 0.0) return gf;
GaussianFactorGraph gfg;
gfg.push_back(jf);
Vector c(1);
c << std::sqrt(log_normalizers(assignment));
c << std::sqrt(values(assignment));
auto constantFactor = std::make_shared<JacobianFactor>(c);
gfg.push_back(constantFactor);
return std::dynamic_pointer_cast<GaussianFactor>(
std::make_shared<JacobianFactor>(gfg));
};
return factors.apply(update);
return gaussianFactors.apply(update);
}
/* *******************************************************************************/
GaussianMixtureFactor::GaussianMixtureFactor(
const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys,
const Factors &factors, const AlgebraicDecisionTree<Key> &logNormalizers)
: Base(continuousKeys, discreteKeys),
factors_(augment(factors, logNormalizers)) {}
HybridGaussianFactor::HybridGaussianFactor(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys,
const FactorValuePairs &factors)
: Base(continuousKeys, discreteKeys), factors_(augment(factors)) {}
/* *******************************************************************************/
bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const {
bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const {
const This *e = dynamic_cast<const This *>(&lf);
if (e == nullptr) return false;
@ -96,10 +95,10 @@ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const {
}
/* *******************************************************************************/
void GaussianMixtureFactor::print(const std::string &s,
const KeyFormatter &formatter) const {
void HybridGaussianFactor::print(const std::string &s,
const KeyFormatter &formatter) const {
std::cout << (s.empty() ? "" : s + "\n");
std::cout << "GaussianMixtureFactor" << std::endl;
std::cout << "HybridGaussianFactor" << std::endl;
HybridFactor::print("", formatter);
std::cout << "{\n";
if (factors_.empty()) {
@ -122,13 +121,13 @@ void GaussianMixtureFactor::print(const std::string &s,
}
/* *******************************************************************************/
GaussianMixtureFactor::sharedFactor GaussianMixtureFactor::operator()(
HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()(
const DiscreteValues &assignment) const {
return factors_(assignment);
}
/* *******************************************************************************/
GaussianFactorGraphTree GaussianMixtureFactor::add(
GaussianFactorGraphTree HybridGaussianFactor::add(
const GaussianFactorGraphTree &sum) const {
using Y = GaussianFactorGraph;
auto add = [](const Y &graph1, const Y &graph2) {
@ -141,14 +140,14 @@ GaussianFactorGraphTree GaussianMixtureFactor::add(
}
/* *******************************************************************************/
GaussianFactorGraphTree GaussianMixtureFactor::asGaussianFactorGraphTree()
GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree()
const {
auto wrap = [](const sharedFactor &gf) { return GaussianFactorGraph{gf}; };
return {factors_, wrap};
}
/* *******************************************************************************/
AlgebraicDecisionTree<Key> GaussianMixtureFactor::errorTree(
AlgebraicDecisionTree<Key> HybridGaussianFactor::errorTree(
const VectorValues &continuousValues) const {
// functor to convert from sharedFactor to double error value.
auto errorFunc = [&continuousValues](const sharedFactor &gf) {
@ -159,7 +158,7 @@ AlgebraicDecisionTree<Key> GaussianMixtureFactor::errorTree(
}
/* *******************************************************************************/
double GaussianMixtureFactor::error(const HybridValues &values) const {
double HybridGaussianFactor::error(const HybridValues &values) const {
const sharedFactor gf = factors_(values.discrete());
return gf->error(values.continuous());
}

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */
/**
* @file GaussianMixtureFactor.h
* @file HybridGaussianFactor.h
* @brief A set of GaussianFactors, indexed by a set of discrete keys.
* @author Fan Jiang
* @author Varun Agrawal
@ -33,6 +33,9 @@ class HybridValues;
class DiscreteValues;
class VectorValues;
/// Alias for pair of GaussianFactor::shared_pointer and a double value.
using GaussianFactorValuePair = std::pair<GaussianFactor::shared_ptr, double>;
/**
* @brief Implementation of a discrete conditional mixture factor.
* Implements a joint discrete-continuous factor where the discrete variable
@ -44,15 +47,17 @@ class VectorValues;
*
* @ingroup hybrid
*/
class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
public:
using Base = HybridFactor;
using This = GaussianMixtureFactor;
using This = HybridGaussianFactor;
using shared_ptr = std::shared_ptr<This>;
using sharedFactor = std::shared_ptr<GaussianFactor>;
/// typedef for Decision Tree of Gaussian factors and log-constant.
/// typedef for Decision Tree of Gaussian factors and arbitrary value.
using FactorValuePairs = DecisionTree<Key, GaussianFactorValuePair>;
/// typedef for Decision Tree of Gaussian factors.
using Factors = DecisionTree<Key, sharedFactor>;
private:
@ -72,7 +77,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
/// @{
/// Default constructor, mainly for serialization.
GaussianMixtureFactor() = default;
HybridGaussianFactor() = default;
/**
* @brief Construct a new Gaussian mixture factor.
@ -80,34 +85,26 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
* @param continuousKeys A vector of keys representing continuous variables.
* @param discreteKeys A vector of keys representing discrete variables and
* their cardinalities.
* @param factors The decision tree of Gaussian factors stored
* as the mixture density.
* @param logNormalizers Tree of log-normalizers corresponding to each
* Gaussian factor in factors.
* @param factors The decision tree of Gaussian factors and arbitrary scalars.
*/
GaussianMixtureFactor(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys,
const Factors &factors,
const AlgebraicDecisionTree<Key> &logNormalizers =
AlgebraicDecisionTree<Key>(0.0));
HybridGaussianFactor(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys,
const FactorValuePairs &factors);
/**
* @brief Construct a new GaussianMixtureFactor object using a vector of
* @brief Construct a new HybridGaussianFactor object using a vector of
* GaussianFactor shared pointers.
*
* @param continuousKeys Vector of keys for continuous factors.
* @param discreteKeys Vector of discrete keys.
* @param factors Vector of gaussian factor shared pointers.
* @param logNormalizers Tree of log-normalizers corresponding to each
* Gaussian factor in factors.
* @param factors Vector of gaussian factor shared pointers
* and arbitrary scalars.
*/
GaussianMixtureFactor(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys,
const std::vector<sharedFactor> &factors,
const AlgebraicDecisionTree<Key> &logNormalizers =
AlgebraicDecisionTree<Key>(0.0))
: GaussianMixtureFactor(continuousKeys, discreteKeys,
Factors(discreteKeys, factors), logNormalizers) {}
HybridGaussianFactor(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys,
const std::vector<GaussianFactorValuePair> &factors)
: HybridGaussianFactor(continuousKeys, discreteKeys,
FactorValuePairs(discreteKeys, factors)) {}
/// @}
/// @name Testable
@ -136,7 +133,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const;
/**
* @brief Compute error of the GaussianMixtureFactor as a tree.
* @brief Compute error of the HybridGaussianFactor as a tree.
*
* @param continuousValues The continuous VectorValues.
* @return AlgebraicDecisionTree<Key> A decision tree with the same keys
@ -154,9 +151,9 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
/// Getter for GaussianFactor decision tree
const Factors &factors() const { return factors_; }
/// Add MixtureFactor to a Sum, syntactic sugar.
/// Add HybridNonlinearFactor to a Sum, syntactic sugar.
friend GaussianFactorGraphTree &operator+=(
GaussianFactorGraphTree &sum, const GaussianMixtureFactor &factor) {
GaussianFactorGraphTree &sum, const HybridGaussianFactor &factor) {
sum = factor.add(sum);
return sum;
}
@ -176,8 +173,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
// traits
template <>
struct traits<GaussianMixtureFactor> : public Testable<GaussianMixtureFactor> {
};
struct traits<HybridGaussianFactor> : public Testable<HybridGaussianFactor> {};
/**
* @brief Helper function to compute the sqrt(|2πΣ|) normalizer values

View File

@ -23,11 +23,11 @@
#include <gtsam/discrete/DiscreteEliminationTree.h>
#include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/discrete/DiscreteJunctionTree.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridConditional.h>
#include <gtsam/hybrid/HybridEliminationTree.h>
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridJunctionTree.h>
#include <gtsam/inference/EliminateableFactorGraph-inst.h>
@ -92,13 +92,13 @@ void HybridGaussianFactorGraph::printErrors(
// Clear the stringstream
ss.str(std::string());
if (auto gmf = std::dynamic_pointer_cast<GaussianMixtureFactor>(factor)) {
if (auto hgf = std::dynamic_pointer_cast<HybridGaussianFactor>(factor)) {
if (factor == nullptr) {
std::cout << "nullptr"
<< "\n";
} else {
gmf->operator()(values.discrete())->print(ss.str(), keyFormatter);
std::cout << "error = " << gmf->error(values) << std::endl;
hgf->operator()(values.discrete())->print(ss.str(), keyFormatter);
std::cout << "error = " << factor->error(values) << std::endl;
}
} else if (auto hc = std::dynamic_pointer_cast<HybridConditional>(factor)) {
if (factor == nullptr) {
@ -178,9 +178,9 @@ GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const {
// TODO(dellaert): just use a virtual method defined in HybridFactor.
if (auto gf = dynamic_pointer_cast<GaussianFactor>(f)) {
result = addGaussian(result, gf);
} else if (auto gmf = dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
} else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
result = gmf->add(result);
} else if (auto gm = dynamic_pointer_cast<GaussianMixture>(f)) {
} else if (auto gm = dynamic_pointer_cast<HybridGaussianConditional>(f)) {
result = gm->add(result);
} else if (auto hc = dynamic_pointer_cast<HybridConditional>(f)) {
if (auto gm = hc->asMixture()) {
@ -258,8 +258,8 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
for (auto &f : factors) {
if (auto df = dynamic_pointer_cast<DiscreteFactor>(f)) {
dfg.push_back(df);
} else if (auto gmf = dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
// Case where we have a GaussianMixtureFactor with no continuous keys.
} else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
// Case where we have a HybridGaussianFactor with no continuous keys.
// In this case, compute discrete probabilities.
auto logProbability =
[&](const GaussianFactor::shared_ptr &factor) -> double {
@ -309,7 +309,7 @@ GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) {
/* ************************************************************************ */
using Result = std::pair<std::shared_ptr<GaussianConditional>,
GaussianMixtureFactor::sharedFactor>;
HybridGaussianFactor::sharedFactor>;
/**
* Compute the probability p(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m)
@ -341,14 +341,14 @@ static std::shared_ptr<Factor> createDiscreteFactor(
return std::make_shared<DecisionTreeFactor>(discreteSeparator, probabilities);
}
// Create GaussianMixtureFactor on the separator, taking care to correct
// Create HybridGaussianFactor on the separator, taking care to correct
// for conditional constants.
static std::shared_ptr<Factor> createGaussianMixtureFactor(
static std::shared_ptr<Factor> createHybridGaussianFactor(
const DecisionTree<Key, Result> &eliminationResults,
const KeyVector &continuousSeparator,
const DiscreteKeys &discreteSeparator) {
// Correct for the normalization constant used up by the conditional
auto correct = [&](const Result &pair) -> GaussianFactor::shared_ptr {
auto correct = [&](const Result &pair) -> GaussianFactorValuePair {
const auto &[conditional, factor] = pair;
if (factor) {
auto hf = std::dynamic_pointer_cast<HessianFactor>(factor);
@ -357,13 +357,13 @@ static std::shared_ptr<Factor> createGaussianMixtureFactor(
// as per the Hessian definition
hf->constantTerm() += 2.0 * conditional->logNormalizationConstant();
}
return factor;
return {factor, 0.0};
};
DecisionTree<Key, GaussianFactor::shared_ptr> newFactors(eliminationResults,
correct);
DecisionTree<Key, GaussianFactorValuePair> newFactors(eliminationResults,
correct);
return std::make_shared<GaussianMixtureFactor>(continuousSeparator,
discreteSeparator, newFactors);
return std::make_shared<HybridGaussianFactor>(continuousSeparator,
discreteSeparator, newFactors);
}
static std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>>
@ -400,18 +400,18 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
DecisionTree<Key, Result> eliminationResults(factorGraphTree, eliminate);
// If there are no more continuous parents we create a DiscreteFactor with the
// error for each discrete choice. Otherwise, create a GaussianMixtureFactor
// error for each discrete choice. Otherwise, create a HybridGaussianFactor
// on the separator, taking care to correct for conditional constants.
auto newFactor =
continuousSeparator.empty()
? createDiscreteFactor(eliminationResults, discreteSeparator)
: createGaussianMixtureFactor(eliminationResults, continuousSeparator,
discreteSeparator);
: createHybridGaussianFactor(eliminationResults, continuousSeparator,
discreteSeparator);
// Create the GaussianMixture from the conditionals
GaussianMixture::Conditionals conditionals(
// Create the HybridGaussianConditional from the conditionals
HybridGaussianConditional::Conditionals conditionals(
eliminationResults, [](const Result &pair) { return pair.first; });
auto gaussianMixture = std::make_shared<GaussianMixture>(
auto gaussianMixture = std::make_shared<HybridGaussianConditional>(
frontalKeys, continuousSeparator, discreteSeparator, conditionals);
return {std::make_shared<HybridConditional>(gaussianMixture), newFactor};
@ -458,7 +458,7 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors,
// Because of all these reasons, we carefully consider how to
// implement the hybrid factors so that we do not get poor performance.
// The first thing is how to represent the GaussianMixture.
// The first thing is how to represent the HybridGaussianConditional.
// A very possible scenario is that the incoming factors will have different
// levels of discrete keys. For example, imagine we are going to eliminate the
// fragment: $\phi(x1,c1,c2)$, $\phi(x1,c2,c3)$, which is perfectly valid.
@ -549,7 +549,7 @@ AlgebraicDecisionTree<Key> HybridGaussianFactorGraph::errorTree(
f = hc->inner();
}
if (auto gaussianMixture = dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
if (auto gaussianMixture = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
// Compute factor error and add it.
error_tree = error_tree + gaussianMixture->errorTree(continuousValues);
} else if (auto gaussian = dynamic_pointer_cast<GaussianFactor>(f)) {
@ -597,10 +597,10 @@ GaussianFactorGraph HybridGaussianFactorGraph::operator()(
gfg.push_back(gf);
} else if (auto gc = std::dynamic_pointer_cast<GaussianConditional>(f)) {
gfg.push_back(gf);
} else if (auto gmf = std::dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
gfg.push_back((*gmf)(assignment));
} else if (auto gm = dynamic_pointer_cast<GaussianMixture>(f)) {
gfg.push_back((*gm)(assignment));
} else if (auto hgf = std::dynamic_pointer_cast<HybridGaussianFactor>(f)) {
gfg.push_back((*hgf)(assignment));
} else if (auto hgc = dynamic_pointer_cast<HybridGaussianConditional>(f)) {
gfg.push_back((*hgc)(assignment));
} else {
continue;
}

View File

@ -18,9 +18,9 @@
#pragma once
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridFactorGraph.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/inference/EliminateableFactorGraph.h>
#include <gtsam/inference/FactorGraph.h>
#include <gtsam/inference/Ordering.h>
@ -221,7 +221,6 @@ class GTSAM_EXPORT HybridGaussianFactorGraph
/// Get the GaussianFactorGraph at a given discrete assignment.
GaussianFactorGraph operator()(const DiscreteValues& assignment) const;
};
} // namespace gtsam

View File

@ -51,11 +51,10 @@ class HybridEliminationTree;
*/
class GTSAM_EXPORT HybridJunctionTree
: public JunctionTree<HybridBayesTree, HybridGaussianFactorGraph> {
public:
typedef JunctionTree<HybridBayesTree, HybridGaussianFactorGraph>
Base; ///< Base class
typedef HybridJunctionTree This; ///< This class
Base; ///< Base class
typedef HybridJunctionTree This; ///< This class
typedef std::shared_ptr<This> shared_ptr; ///< Shared pointer to this class
/**

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */
/**
* @file MixtureFactor.h
* @file HybridNonlinearFactor.h
* @brief Nonlinear Mixture factor of continuous and discrete.
* @author Kevin Doherty, kdoherty@mit.edu
* @author Varun Agrawal
@ -20,7 +20,7 @@
#pragma once
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/nonlinear/NonlinearFactor.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
@ -33,6 +33,9 @@
namespace gtsam {
/// Alias for a NonlinearFactor shared pointer and double scalar pair.
using NonlinearFactorValuePair = std::pair<NonlinearFactor::shared_ptr, double>;
/**
* @brief Implementation of a discrete conditional mixture factor.
*
@ -44,18 +47,18 @@ namespace gtsam {
* one of (NonlinearFactor, GaussianFactor) which can then be checked to perform
* the correct operation.
*/
class MixtureFactor : public HybridFactor {
class HybridNonlinearFactor : public HybridFactor {
public:
using Base = HybridFactor;
using This = MixtureFactor;
using shared_ptr = std::shared_ptr<MixtureFactor>;
using This = HybridNonlinearFactor;
using shared_ptr = std::shared_ptr<HybridNonlinearFactor>;
using sharedFactor = std::shared_ptr<NonlinearFactor>;
/**
* @brief typedef for DecisionTree which has Keys as node labels and
* NonlinearFactor as leaf nodes.
* pairs of NonlinearFactor & an arbitrary scalar as leaf nodes.
*/
using Factors = DecisionTree<Key, sharedFactor>;
using Factors = DecisionTree<Key, NonlinearFactorValuePair>;
private:
/// Decision tree of Gaussian factors indexed by discrete keys.
@ -63,7 +66,7 @@ class MixtureFactor : public HybridFactor {
bool normalized_;
public:
MixtureFactor() = default;
HybridNonlinearFactor() = default;
/**
* @brief Construct from Decision tree.
@ -74,8 +77,8 @@ class MixtureFactor : public HybridFactor {
* @param normalized Flag indicating if the factor error is already
* normalized.
*/
MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys,
const Factors& factors, bool normalized = false)
HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys,
const Factors& factors, bool normalized = false)
: Base(keys, discreteKeys), factors_(factors), normalized_(normalized) {}
/**
@ -90,28 +93,29 @@ class MixtureFactor : public HybridFactor {
* Will be typecast to NonlinearFactor shared pointers.
* @param keys Vector of keys for continuous factors.
* @param discreteKeys Vector of discrete keys.
* @param factors Vector of nonlinear factors.
* @param factors Vector of nonlinear factor and scalar pairs.
* @param normalized Flag indicating if the factor error is already
* normalized.
*/
template <typename FACTOR>
MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys,
const std::vector<std::shared_ptr<FACTOR>>& factors,
bool normalized = false)
HybridNonlinearFactor(
const KeyVector& keys, const DiscreteKeys& discreteKeys,
const std::vector<std::pair<std::shared_ptr<FACTOR>, double>>& factors,
bool normalized = false)
: Base(keys, discreteKeys), normalized_(normalized) {
std::vector<NonlinearFactor::shared_ptr> nonlinear_factors;
std::vector<NonlinearFactorValuePair> nonlinear_factors;
KeySet continuous_keys_set(keys.begin(), keys.end());
KeySet factor_keys_set;
for (auto&& f : factors) {
for (auto&& [f, val] : factors) {
// Insert all factor continuous keys in the continuous keys set.
std::copy(f->keys().begin(), f->keys().end(),
std::inserter(factor_keys_set, factor_keys_set.end()));
if (auto nf = std::dynamic_pointer_cast<NonlinearFactor>(f)) {
nonlinear_factors.push_back(nf);
nonlinear_factors.emplace_back(nf, val);
} else {
throw std::runtime_error(
"Factors passed into MixtureFactor need to be nonlinear!");
"Factors passed into HybridNonlinearFactor need to be nonlinear!");
}
}
factors_ = Factors(discreteKeys, nonlinear_factors);
@ -124,7 +128,7 @@ class MixtureFactor : public HybridFactor {
}
/**
* @brief Compute error of the MixtureFactor as a tree.
* @brief Compute error of the HybridNonlinearFactor as a tree.
*
* @param continuousValues The continuous values for which to compute the
* error.
@ -133,9 +137,11 @@ class MixtureFactor : public HybridFactor {
*/
AlgebraicDecisionTree<Key> errorTree(const Values& continuousValues) const {
// functor to convert from sharedFactor to double error value.
auto errorFunc = [continuousValues](const sharedFactor& factor) {
return factor->error(continuousValues);
};
auto errorFunc =
[continuousValues](const std::pair<sharedFactor, double>& f) {
auto [factor, val] = f;
return factor->error(continuousValues) + (0.5 * val * val);
};
DecisionTree<Key, double> result(factors_, errorFunc);
return result;
}
@ -150,12 +156,10 @@ class MixtureFactor : public HybridFactor {
double error(const Values& continuousValues,
const DiscreteValues& discreteValues) const {
// Retrieve the factor corresponding to the assignment in discreteValues.
auto factor = factors_(discreteValues);
auto [factor, val] = factors_(discreteValues);
// Compute the error for the selected factor
const double factorError = factor->error(continuousValues);
if (normalized_) return factorError;
return factorError + this->nonlinearFactorLogNormalizingConstant(
factor, continuousValues);
return factorError + (0.5 * val * val);
}
/**
@ -175,7 +179,7 @@ class MixtureFactor : public HybridFactor {
*/
size_t dim() const {
const auto assignments = DiscreteValues::CartesianProduct(discreteKeys_);
auto factor = factors_(assignments.at(0));
auto [factor, val] = factors_(assignments.at(0));
return factor->dim();
}
@ -188,10 +192,12 @@ class MixtureFactor : public HybridFactor {
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override {
std::cout << (s.empty() ? "" : s + " ");
Base::print("", keyFormatter);
std::cout << "\nMixtureFactor\n";
auto valueFormatter = [](const sharedFactor& v) {
if (v) {
return "Nonlinear factor on " + std::to_string(v->size()) + " keys";
std::cout << "\nHybridNonlinearFactor\n";
auto valueFormatter = [](const std::pair<sharedFactor, double>& v) {
auto [factor, val] = v;
if (factor) {
return "Nonlinear factor on " + std::to_string(factor->size()) +
" keys";
} else {
return std::string("nullptr");
}
@ -201,17 +207,20 @@ class MixtureFactor : public HybridFactor {
/// Check equality
bool equals(const HybridFactor& other, double tol = 1e-9) const override {
// We attempt a dynamic cast from HybridFactor to MixtureFactor. If it
// fails, return false.
if (!dynamic_cast<const MixtureFactor*>(&other)) return false;
// We attempt a dynamic cast from HybridFactor to HybridNonlinearFactor. If
// it fails, return false.
if (!dynamic_cast<const HybridNonlinearFactor*>(&other)) return false;
// If the cast is successful, we'll properly construct a MixtureFactor
// object from `other`
const MixtureFactor& f(static_cast<const MixtureFactor&>(other));
// If the cast is successful, we'll properly construct a
// HybridNonlinearFactor object from `other`
const HybridNonlinearFactor& f(
static_cast<const HybridNonlinearFactor&>(other));
// Ensure that this MixtureFactor and `f` have the same `factors_`.
auto compare = [tol](const sharedFactor& a, const sharedFactor& b) {
return traits<NonlinearFactor>::Equals(*a, *b, tol);
// Ensure that this HybridNonlinearFactor and `f` have the same `factors_`.
auto compare = [tol](const std::pair<sharedFactor, double>& a,
const std::pair<sharedFactor, double>& b) {
return traits<NonlinearFactor>::Equals(*a.first, *b.first, tol) &&
(a.second == b.second);
};
if (!factors_.equals(f.factors_, compare)) return false;
@ -229,22 +238,25 @@ class MixtureFactor : public HybridFactor {
GaussianFactor::shared_ptr linearize(
const Values& continuousValues,
const DiscreteValues& discreteValues) const {
auto factor = factors_(discreteValues);
auto factor = factors_(discreteValues).first;
return factor->linearize(continuousValues);
}
/// Linearize all the continuous factors to get a GaussianMixtureFactor.
std::shared_ptr<GaussianMixtureFactor> linearize(
/// Linearize all the continuous factors to get a HybridGaussianFactor.
std::shared_ptr<HybridGaussianFactor> linearize(
const Values& continuousValues) const {
// functional to linearize each factor in the decision tree
auto linearizeDT = [continuousValues](const sharedFactor& factor) {
return factor->linearize(continuousValues);
auto linearizeDT =
[continuousValues](const std::pair<sharedFactor, double>& f)
-> GaussianFactorValuePair {
auto [factor, val] = f;
return {factor->linearize(continuousValues), val};
};
DecisionTree<Key, GaussianFactor::shared_ptr> linearized_factors(
factors_, linearizeDT);
DecisionTree<Key, std::pair<GaussianFactor::shared_ptr, double>>
linearized_factors(factors_, linearizeDT);
return std::make_shared<GaussianMixtureFactor>(
return std::make_shared<HybridGaussianFactor>(
continuousKeys_, discreteKeys_, linearized_factors);
}

View File

@ -18,10 +18,10 @@
#include <gtsam/discrete/DecisionTreeFactor.h>
#include <gtsam/discrete/TableFactor.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/nonlinear/NonlinearFactor.h>
namespace gtsam {
@ -59,7 +59,7 @@ void HybridNonlinearFactorGraph::printErrors(
// Clear the stringstream
ss.str(std::string());
if (auto mf = std::dynamic_pointer_cast<MixtureFactor>(factor)) {
if (auto mf = std::dynamic_pointer_cast<HybridNonlinearFactor>(factor)) {
if (factor == nullptr) {
std::cout << "nullptr"
<< "\n";
@ -70,7 +70,7 @@ void HybridNonlinearFactorGraph::printErrors(
std::cout << std::endl;
}
} else if (auto gmf =
std::dynamic_pointer_cast<GaussianMixtureFactor>(factor)) {
std::dynamic_pointer_cast<HybridGaussianFactor>(factor)) {
if (factor == nullptr) {
std::cout << "nullptr"
<< "\n";
@ -80,7 +80,8 @@ void HybridNonlinearFactorGraph::printErrors(
gmf->errorTree(values.continuous()).print("", keyFormatter);
std::cout << std::endl;
}
} else if (auto gm = std::dynamic_pointer_cast<GaussianMixture>(factor)) {
} else if (auto gm = std::dynamic_pointer_cast<HybridGaussianConditional>(
factor)) {
if (factor == nullptr) {
std::cout << "nullptr"
<< "\n";
@ -151,8 +152,8 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize(
continue;
}
// Check if it is a nonlinear mixture factor
if (auto mf = dynamic_pointer_cast<MixtureFactor>(f)) {
const GaussianMixtureFactor::shared_ptr& gmf =
if (auto mf = dynamic_pointer_cast<HybridNonlinearFactor>(f)) {
const HybridGaussianFactor::shared_ptr& gmf =
mf->linearize(continuousValues);
linearFG->push_back(gmf);
} else if (auto nlf = dynamic_pointer_cast<NonlinearFactor>(f)) {
@ -161,9 +162,9 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize(
} else if (dynamic_pointer_cast<DiscreteFactor>(f)) {
// If discrete-only: doesn't need linearization.
linearFG->push_back(f);
} else if (auto gmf = dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
} else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
linearFG->push_back(gmf);
} else if (auto gm = dynamic_pointer_cast<GaussianMixture>(f)) {
} else if (auto gm = dynamic_pointer_cast<HybridGaussianConditional>(f)) {
linearFG->push_back(gm);
} else if (dynamic_pointer_cast<GaussianFactor>(f)) {
linearFG->push_back(f);

View File

@ -19,6 +19,7 @@
#include <gtsam/hybrid/HybridGaussianISAM.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <optional>
namespace gtsam {

View File

@ -138,7 +138,7 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph,
}
/* ************************************************************************* */
GaussianMixture::shared_ptr HybridSmoother::gaussianMixture(
HybridGaussianConditional::shared_ptr HybridSmoother::gaussianMixture(
size_t index) const {
return hybridBayesNet_.at(index)->asMixture();
}

View File

@ -69,7 +69,7 @@ class GTSAM_EXPORT HybridSmoother {
const HybridBayesNet& hybridBayesNet, const Ordering& ordering) const;
/// Get the Gaussian Mixture from the Bayes Net posterior at `index`.
GaussianMixture::shared_ptr gaussianMixture(size_t index) const;
HybridGaussianConditional::shared_ptr gaussianMixture(size_t index) const;
/// Return the Bayes Net posterior.
const HybridBayesNet& hybridBayesNet() const;

View File

@ -10,26 +10,26 @@ class HybridValues {
gtsam::DiscreteValues discrete() const;
HybridValues();
HybridValues(const gtsam::VectorValues &cv, const gtsam::DiscreteValues &dv);
HybridValues(const gtsam::VectorValues& cv, const gtsam::DiscreteValues& dv);
void print(string s = "HybridValues",
const gtsam::KeyFormatter& keyFormatter =
gtsam::DefaultKeyFormatter) const;
bool equals(const gtsam::HybridValues& other, double tol) const;
void insert(gtsam::Key j, int value);
void insert(gtsam::Key j, const gtsam::Vector& value);
void insert_or_assign(gtsam::Key j, const gtsam::Vector& value);
void insert_or_assign(gtsam::Key j, size_t value);
void insert(const gtsam::VectorValues& values);
void insert(const gtsam::DiscreteValues& values);
void insert(const gtsam::HybridValues& values);
void update(const gtsam::VectorValues& values);
void update(const gtsam::DiscreteValues& values);
void update(const gtsam::HybridValues& values);
size_t& atDiscrete(gtsam::Key j);
gtsam::Vector& at(gtsam::Key j);
};
@ -42,7 +42,7 @@ virtual class HybridFactor : gtsam::Factor {
bool equals(const gtsam::HybridFactor& other, double tol = 1e-9) const;
// Standard interface:
double error(const gtsam::HybridValues &values) const;
double error(const gtsam::HybridValues& values) const;
bool isDiscrete() const;
bool isContinuous() const;
bool isHybrid() const;
@ -65,39 +65,41 @@ virtual class HybridConditional {
double logProbability(const gtsam::HybridValues& values) const;
double evaluate(const gtsam::HybridValues& values) const;
double operator()(const gtsam::HybridValues& values) const;
gtsam::GaussianMixture* asMixture() const;
gtsam::HybridGaussianConditional* asMixture() const;
gtsam::GaussianConditional* asGaussian() const;
gtsam::DiscreteConditional* asDiscrete() const;
gtsam::Factor* inner();
double error(const gtsam::HybridValues& values) const;
};
#include <gtsam/hybrid/GaussianMixtureFactor.h>
class GaussianMixtureFactor : gtsam::HybridFactor {
GaussianMixtureFactor(
#include <gtsam/hybrid/HybridGaussianFactor.h>
class HybridGaussianFactor : gtsam::HybridFactor {
HybridGaussianFactor(
const gtsam::KeyVector& continuousKeys,
const gtsam::DiscreteKeys& discreteKeys,
const std::vector<gtsam::GaussianFactor::shared_ptr>& factorsList);
const std::vector<std::pair<gtsam::GaussianFactor::shared_ptr, double>>&
factorsList);
void print(string s = "GaussianMixtureFactor\n",
void print(string s = "HybridGaussianFactor\n",
const gtsam::KeyFormatter& keyFormatter =
gtsam::DefaultKeyFormatter) const;
};
#include <gtsam/hybrid/GaussianMixture.h>
class GaussianMixture : gtsam::HybridFactor {
GaussianMixture(const gtsam::KeyVector& continuousFrontals,
const gtsam::KeyVector& continuousParents,
const gtsam::DiscreteKeys& discreteParents,
const std::vector<gtsam::GaussianConditional::shared_ptr>&
conditionalsList);
#include <gtsam/hybrid/HybridGaussianConditional.h>
class HybridGaussianConditional : gtsam::HybridFactor {
HybridGaussianConditional(
const gtsam::KeyVector& continuousFrontals,
const gtsam::KeyVector& continuousParents,
const gtsam::DiscreteKeys& discreteParents,
const std::vector<gtsam::GaussianConditional::shared_ptr>&
conditionalsList);
gtsam::GaussianMixtureFactor* likelihood(
gtsam::HybridGaussianFactor* likelihood(
const gtsam::VectorValues& frontals) const;
double logProbability(const gtsam::HybridValues& values) const;
double evaluate(const gtsam::HybridValues& values) const;
void print(string s = "GaussianMixture\n",
void print(string s = "HybridGaussianConditional\n",
const gtsam::KeyFormatter& keyFormatter =
gtsam::DefaultKeyFormatter) const;
};
@ -131,7 +133,7 @@ class HybridBayesTree {
#include <gtsam/hybrid/HybridBayesNet.h>
class HybridBayesNet {
HybridBayesNet();
void push_back(const gtsam::GaussianMixture* s);
void push_back(const gtsam::HybridGaussianConditional* s);
void push_back(const gtsam::GaussianConditional* s);
void push_back(const gtsam::DiscreteConditional* s);
@ -139,7 +141,7 @@ class HybridBayesNet {
size_t size() const;
gtsam::KeySet keys() const;
const gtsam::HybridConditional* at(size_t i) const;
// Standard interface:
double logProbability(const gtsam::HybridValues& values) const;
double evaluate(const gtsam::HybridValues& values) const;
@ -149,7 +151,7 @@ class HybridBayesNet {
const gtsam::VectorValues& measurements) const;
gtsam::HybridValues optimize() const;
gtsam::HybridValues sample(const gtsam::HybridValues &given) const;
gtsam::HybridValues sample(const gtsam::HybridValues& given) const;
gtsam::HybridValues sample() const;
void print(string s = "HybridBayesNet\n",
@ -177,7 +179,7 @@ class HybridGaussianFactorGraph {
void push_back(const gtsam::HybridGaussianFactorGraph& graph);
void push_back(const gtsam::HybridBayesNet& bayesNet);
void push_back(const gtsam::HybridBayesTree& bayesTree);
void push_back(const gtsam::GaussianMixtureFactor* gmm);
void push_back(const gtsam::HybridGaussianFactor* gmm);
void push_back(gtsam::DecisionTreeFactor* factor);
void push_back(gtsam::TableFactor* factor);
void push_back(gtsam::JacobianFactor* factor);
@ -189,7 +191,8 @@ class HybridGaussianFactorGraph {
const gtsam::HybridFactor* at(size_t i) const;
void print(string s = "") const;
bool equals(const gtsam::HybridGaussianFactorGraph& fg, double tol = 1e-9) const;
bool equals(const gtsam::HybridGaussianFactorGraph& fg,
double tol = 1e-9) const;
// evaluation
double error(const gtsam::HybridValues& values) const;
@ -222,7 +225,8 @@ class HybridNonlinearFactorGraph {
void push_back(gtsam::HybridFactor* factor);
void push_back(gtsam::NonlinearFactor* factor);
void push_back(gtsam::DiscreteFactor* factor);
gtsam::HybridGaussianFactorGraph linearize(const gtsam::Values& continuousValues) const;
gtsam::HybridGaussianFactorGraph linearize(
const gtsam::Values& continuousValues) const;
bool empty() const;
void remove(size_t i);
@ -231,32 +235,32 @@ class HybridNonlinearFactorGraph {
const gtsam::HybridFactor* at(size_t i) const;
void print(string s = "HybridNonlinearFactorGraph\n",
const gtsam::KeyFormatter& keyFormatter =
gtsam::DefaultKeyFormatter) const;
const gtsam::KeyFormatter& keyFormatter =
gtsam::DefaultKeyFormatter) const;
};
#include <gtsam/hybrid/MixtureFactor.h>
class MixtureFactor : gtsam::HybridFactor {
MixtureFactor(
#include <gtsam/hybrid/HybridNonlinearFactor.h>
class HybridNonlinearFactor : gtsam::HybridFactor {
HybridNonlinearFactor(
const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys,
const gtsam::DecisionTree<gtsam::Key, gtsam::NonlinearFactor*>& factors,
const gtsam::DecisionTree<
gtsam::Key, std::pair<gtsam::NonlinearFactor*, double>>& factors,
bool normalized = false);
template <FACTOR = {gtsam::NonlinearFactor}>
MixtureFactor(const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys,
const std::vector<FACTOR*>& factors,
bool normalized = false);
HybridNonlinearFactor(
const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys,
const std::vector<std::pair<gtsam::NonlinearFactor*, double>>& factors,
bool normalized = false);
double error(const gtsam::Values& continuousValues,
const gtsam::DiscreteValues& discreteValues) const;
double nonlinearFactorLogNormalizingConstant(const gtsam::NonlinearFactor* factor,
const gtsam::Values& values) const;
double nonlinearFactorLogNormalizingConstant(
const gtsam::NonlinearFactor* factor, const gtsam::Values& values) const;
GaussianMixtureFactor* linearize(
const gtsam::Values& continuousValues) const;
HybridGaussianFactor* linearize(const gtsam::Values& continuousValues) const;
void print(string s = "MixtureFactor\n",
void print(string s = "HybridNonlinearFactor\n",
const gtsam::KeyFormatter& keyFormatter =
gtsam::DefaultKeyFormatter) const;
};

View File

@ -19,10 +19,10 @@
#include <gtsam/base/Matrix.h>
#include <gtsam/discrete/DecisionTreeFactor.h>
#include <gtsam/discrete/DiscreteDistribution.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/linear/JacobianFactor.h>
#include <gtsam/linear/NoiseModel.h>
@ -57,12 +57,15 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain(
// keyFunc(1) to keyFunc(n+1)
for (size_t t = 1; t < n; t++) {
hfg.add(GaussianMixtureFactor(
{keyFunc(t), keyFunc(t + 1)}, {{dKeyFunc(t), 2}},
std::vector<GaussianFactorValuePair> components = {
{std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
I_3x3, Vector3::Ones())}));
I_3x3, Z_3x1),
0.0},
{std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
I_3x3, Vector3::Ones()),
0.0}};
hfg.add(HybridGaussianFactor({keyFunc(t), keyFunc(t + 1)},
{{dKeyFunc(t), 2}}, components));
if (t > 1) {
hfg.add(DecisionTreeFactor({{dKeyFunc(t - 1), 2}, {dKeyFunc(t), 2}},
@ -159,11 +162,12 @@ struct Switching {
for (size_t k = 0; k < K - 1; k++) {
KeyVector keys = {X(k), X(k + 1)};
auto motion_models = motionModels(k, between_sigma);
std::vector<NonlinearFactor::shared_ptr> components;
std::vector<NonlinearFactorValuePair> components;
for (auto &&f : motion_models) {
components.push_back(std::dynamic_pointer_cast<NonlinearFactor>(f));
components.push_back(
{std::dynamic_pointer_cast<NonlinearFactor>(f), 0.0});
}
nonlinearFactorGraph.emplace_shared<MixtureFactor>(
nonlinearFactorGraph.emplace_shared<HybridNonlinearFactor>(
keys, DiscreteKeys{modes[k]}, components);
}

View File

@ -43,7 +43,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1,
// Create Gaussian mixture z_i = x0 + noise for each measurement.
for (size_t i = 0; i < num_measurements; i++) {
const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode;
bayesNet.emplace_shared<GaussianMixture>(
bayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i},
std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0),
Z_1x1, 0.5),

View File

@ -107,7 +107,7 @@ TEST(HybridBayesNet, evaluateHybrid) {
// Create hybrid Bayes net.
HybridBayesNet bayesNet;
bayesNet.push_back(continuousConditional);
bayesNet.emplace_shared<GaussianMixture>(
bayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
std::vector{conditional0, conditional1});
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
@ -168,7 +168,7 @@ TEST(HybridBayesNet, Error) {
conditional1 = std::make_shared<GaussianConditional>(
X(1), Vector1::Constant(2), I_1x1, model1);
auto gm = std::make_shared<GaussianMixture>(
auto gm = std::make_shared<HybridGaussianConditional>(
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
std::vector{conditional0, conditional1});
// Create hybrid Bayes net.
@ -387,9 +387,10 @@ TEST(HybridBayesNet, Sampling) {
std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model);
auto one_motion =
std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
std::vector<NonlinearFactor::shared_ptr> factors = {zero_motion, one_motion};
std::vector<NonlinearFactorValuePair> factors = {{zero_motion, 0.0},
{one_motion, 0.0}};
nfg.emplace_shared<PriorFactor<double>>(X(0), 0.0, noise_model);
nfg.emplace_shared<MixtureFactor>(
nfg.emplace_shared<HybridNonlinearFactor>(
KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors);
DiscreteKey mode(M(0), 2);

View File

@ -43,9 +43,9 @@ TEST(HybridConditional, Invariants) {
auto hc0 = bn.at(0);
CHECK(hc0->isHybrid());
// Check invariants as a GaussianMixture.
// Check invariants as a HybridGaussianConditional.
const auto mixture = hc0->asMixture();
EXPECT(GaussianMixture::CheckInvariants(*mixture, values));
EXPECT(HybridGaussianConditional::CheckInvariants(*mixture, values));
// Check invariants as a HybridConditional.
EXPECT(HybridConditional::CheckInvariants(*hc0, values));

View File

@ -19,10 +19,10 @@
#include <gtsam/geometry/Pose2.h>
#include <gtsam/geometry/Pose3.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearISAM.h>
#include <gtsam/hybrid/HybridSmoother.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianBayesTree.h>
@ -435,9 +435,10 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() {
std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model);
const auto one_motion =
std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
nfg.emplace_shared<MixtureFactor>(
KeyVector{X(0), X(1)}, DiscreteKeys{m},
std::vector<NonlinearFactor::shared_ptr>{zero_motion, one_motion});
std::vector<NonlinearFactorValuePair> components = {{zero_motion, 0.0},
{one_motion, 0.0}};
nfg.emplace_shared<HybridNonlinearFactor>(KeyVector{X(0), X(1)},
DiscreteKeys{m}, components);
return nfg;
}
@ -531,10 +532,10 @@ TEST(HybridEstimation, CorrectnessViaSampling) {
* Helper function to add the constant term corresponding to
* the difference in noise models.
*/
std::shared_ptr<GaussianMixtureFactor> mixedVarianceFactor(
const MixtureFactor& mf, const Values& initial, const Key& mode,
std::shared_ptr<HybridGaussianFactor> mixedVarianceFactor(
const HybridNonlinearFactor& mf, const Values& initial, const Key& mode,
double noise_tight, double noise_loose, size_t d, size_t tight_index) {
GaussianMixtureFactor::shared_ptr gmf = mf.linearize(initial);
HybridGaussianFactor::shared_ptr gmf = mf.linearize(initial);
constexpr double log2pi = 1.8378770664093454835606594728112;
// logConstant will be of the tighter model
@ -560,8 +561,13 @@ std::shared_ptr<GaussianMixtureFactor> mixedVarianceFactor(
}
};
auto updated_components = gmf->factors().apply(func);
auto updated_gmf = std::make_shared<GaussianMixtureFactor>(
gmf->continuousKeys(), gmf->discreteKeys(), updated_components);
auto updated_pairs = HybridGaussianFactor::FactorValuePairs(
updated_components,
[](const GaussianFactor::shared_ptr& gf) -> GaussianFactorValuePair {
return {gf, 0.0};
});
auto updated_gmf = std::make_shared<HybridGaussianFactor>(
gmf->continuousKeys(), gmf->discreteKeys(), updated_pairs);
return updated_gmf;
}
@ -589,10 +595,11 @@ TEST(HybridEstimation, ModeSelection) {
model1 = std::make_shared<MotionModel>(
X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight));
std::vector<NonlinearFactor::shared_ptr> components = {model0, model1};
std::vector<NonlinearFactorValuePair> components = {{model0, 0.0},
{model1, 0.0}};
KeyVector keys = {X(0), X(1)};
MixtureFactor mf(keys, modes, components);
HybridNonlinearFactor mf(keys, modes, components);
initial.insert(X(0), 0.0);
initial.insert(X(1), 0.0);
@ -616,7 +623,7 @@ TEST(HybridEstimation, ModeSelection) {
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1));
bn.push_back(
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1));
bn.emplace_shared<GaussianMixture>(
bn.emplace_shared<HybridGaussianConditional>(
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
std::vector{GaussianConditional::sharedMeanAndStddev(
Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose),
@ -647,7 +654,7 @@ TEST(HybridEstimation, ModeSelection2) {
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1));
bn.push_back(
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1));
bn.emplace_shared<GaussianMixture>(
bn.emplace_shared<HybridGaussianConditional>(
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
std::vector{GaussianConditional::sharedMeanAndStddev(
Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose),
@ -680,10 +687,11 @@ TEST(HybridEstimation, ModeSelection2) {
model1 = std::make_shared<BetweenFactor<Vector3>>(
X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight));
std::vector<NonlinearFactor::shared_ptr> components = {model0, model1};
std::vector<NonlinearFactorValuePair> components = {{model0, 0.0},
{model1, 0.0}};
KeyVector keys = {X(0), X(1)};
MixtureFactor mf(keys, modes, components);
HybridNonlinearFactor mf(keys, modes, components);
initial.insert<Vector3>(X(0), Z_3x1);
initial.insert<Vector3>(X(1), Z_3x1);

View File

@ -52,10 +52,10 @@ TEST(HybridFactorGraph, Keys) {
// Add a gaussian mixture factor ϕ(x1, c1)
DiscreteKey m1(M(1), 2);
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt));
DecisionTree<Key, GaussianFactorValuePair> dt(
M(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
{std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0});
hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt));
KeySet expected_continuous{X(0), X(1)};
EXPECT(

View File

@ -10,8 +10,8 @@
* -------------------------------------------------------------------------- */
/**
* @file testGaussianMixture.cpp
* @brief Unit tests for GaussianMixture class
* @file testHybridGaussianConditional.cpp
* @brief Unit tests for HybridGaussianConditional class
* @author Varun Agrawal
* @author Fan Jiang
* @author Frank Dellaert
@ -19,8 +19,8 @@
*/
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianConditional.h>
@ -31,7 +31,6 @@
#include <CppUnitLite/TestHarness.h>
using namespace gtsam;
using noiseModel::Isotropic;
using symbol_shorthand::M;
using symbol_shorthand::X;
using symbol_shorthand::Z;
@ -46,19 +45,19 @@ static const HybridValues hv1{vv, assignment1};
/* ************************************************************************* */
namespace equal_constants {
// Create a simple GaussianMixture
// Create a simple HybridGaussianConditional
const double commonSigma = 2.0;
const std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
commonSigma),
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
commonSigma)};
const GaussianMixture mixture({Z(0)}, {X(0)}, {mode}, conditionals);
const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals);
} // namespace equal_constants
/* ************************************************************************* */
/// Check that invariants hold
TEST(GaussianMixture, Invariants) {
TEST(HybridGaussianConditional, Invariants) {
using namespace equal_constants;
// Check that the mixture normalization constant is the max of all constants
@ -67,13 +66,13 @@ TEST(GaussianMixture, Invariants) {
EXPECT_DOUBLES_EQUAL(K, conditionals[0]->logNormalizationConstant(), 1e-8);
EXPECT_DOUBLES_EQUAL(K, conditionals[1]->logNormalizationConstant(), 1e-8);
EXPECT(GaussianMixture::CheckInvariants(mixture, hv0));
EXPECT(GaussianMixture::CheckInvariants(mixture, hv1));
EXPECT(HybridGaussianConditional::CheckInvariants(mixture, hv0));
EXPECT(HybridGaussianConditional::CheckInvariants(mixture, hv1));
}
/* ************************************************************************* */
/// Check LogProbability.
TEST(GaussianMixture, LogProbability) {
TEST(HybridGaussianConditional, LogProbability) {
using namespace equal_constants;
auto actual = mixture.logProbability(vv);
@ -95,7 +94,7 @@ TEST(GaussianMixture, LogProbability) {
/* ************************************************************************* */
/// Check error.
TEST(GaussianMixture, Error) {
TEST(HybridGaussianConditional, Error) {
using namespace equal_constants;
auto actual = mixture.errorTree(vv);
@ -118,7 +117,7 @@ TEST(GaussianMixture, Error) {
/* ************************************************************************* */
/// Check that the likelihood is proportional to the conditional density given
/// the measurements.
TEST(GaussianMixture, Likelihood) {
TEST(HybridGaussianConditional, Likelihood) {
using namespace equal_constants;
// Compute likelihood
@ -147,19 +146,19 @@ TEST(GaussianMixture, Likelihood) {
/* ************************************************************************* */
namespace mode_dependent_constants {
// Create a GaussianMixture with mode-dependent noise models.
// Create a HybridGaussianConditional with mode-dependent noise models.
// 0 is low-noise, 1 is high-noise.
const std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
0.5),
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
3.0)};
const GaussianMixture mixture({Z(0)}, {X(0)}, {mode}, conditionals);
const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals);
} // namespace mode_dependent_constants
/* ************************************************************************* */
// Create a test for continuousParents.
TEST(GaussianMixture, ContinuousParents) {
TEST(HybridGaussianConditional, ContinuousParents) {
using namespace mode_dependent_constants;
const KeyVector continuousParentKeys = mixture.continuousParents();
// Check that the continuous parent keys are correct:
@ -170,7 +169,7 @@ TEST(GaussianMixture, ContinuousParents) {
/* ************************************************************************* */
/// Check that the likelihood is proportional to the conditional density given
/// the measurements.
TEST(GaussianMixture, Likelihood2) {
TEST(HybridGaussianConditional, Likelihood2) {
using namespace mode_dependent_constants;
// Compute likelihood

View File

@ -10,50 +10,53 @@
* -------------------------------------------------------------------------- */
/**
* @file testGaussianMixtureFactor.cpp
* @brief Unit tests for GaussianMixtureFactor
* @file testHybridGaussianFactor.cpp
* @brief Unit tests for HybridGaussianFactor
* @author Varun Agrawal
* @author Fan Jiang
* @author Frank Dellaert
* @date December 2021
*/
#include <gtsam/base/Testable.h>
#include <gtsam/base/TestableAssertions.h>
#include <gtsam/discrete/DiscreteConditional.h>
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/VectorValues.h>
#include <gtsam/nonlinear/PriorFactor.h>
#include <gtsam/slam/BetweenFactor.h>
// Include for test suite
#include <CppUnitLite/TestHarness.h>
#include <memory>
using namespace std;
using namespace gtsam;
using noiseModel::Isotropic;
using symbol_shorthand::F;
using symbol_shorthand::M;
using symbol_shorthand::X;
using symbol_shorthand::Z;
/* ************************************************************************* */
// Check iterators of empty mixture.
TEST(GaussianMixtureFactor, Constructor) {
GaussianMixtureFactor factor;
GaussianMixtureFactor::const_iterator const_it = factor.begin();
TEST(HybridGaussianFactor, Constructor) {
HybridGaussianFactor factor;
HybridGaussianFactor::const_iterator const_it = factor.begin();
CHECK(const_it == factor.end());
GaussianMixtureFactor::iterator it = factor.begin();
HybridGaussianFactor::iterator it = factor.begin();
CHECK(it == factor.end());
}
/* ************************************************************************* */
// "Add" two mixture factors together.
TEST(GaussianMixtureFactor, Sum) {
TEST(HybridGaussianFactor, Sum) {
DiscreteKey m1(1, 2), m2(2, 3);
auto A1 = Matrix::Zero(2, 1);
@ -68,14 +71,15 @@ TEST(GaussianMixtureFactor, Sum) {
auto f20 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
auto f21 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
auto f22 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
std::vector<GaussianFactor::shared_ptr> factorsA{f10, f11};
std::vector<GaussianFactor::shared_ptr> factorsB{f20, f21, f22};
std::vector<GaussianFactorValuePair> factorsA{{f10, 0.0}, {f11, 0.0}};
std::vector<GaussianFactorValuePair> factorsB{
{f20, 0.0}, {f21, 0.0}, {f22, 0.0}};
// TODO(Frank): why specify keys at all? And: keys in factor should be *all*
// keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey?
// Design review!
GaussianMixtureFactor mixtureFactorA({X(1), X(2)}, {m1}, factorsA);
GaussianMixtureFactor mixtureFactorB({X(1), X(3)}, {m2}, factorsB);
HybridGaussianFactor mixtureFactorA({X(1), X(2)}, {m1}, factorsA);
HybridGaussianFactor mixtureFactorB({X(1), X(3)}, {m2}, factorsB);
// Check that number of keys is 3
EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size());
@ -99,19 +103,19 @@ TEST(GaussianMixtureFactor, Sum) {
}
/* ************************************************************************* */
TEST(GaussianMixtureFactor, Printing) {
TEST(HybridGaussianFactor, Printing) {
DiscreteKey m1(1, 2);
auto A1 = Matrix::Zero(2, 1);
auto A2 = Matrix::Zero(2, 2);
auto b = Matrix::Zero(2, 1);
auto f10 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
std::vector<GaussianFactor::shared_ptr> factors{f10, f11};
std::vector<GaussianFactorValuePair> factors{{f10, 0.0}, {f11, 0.0}};
GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
std::string expected =
R"(GaussianMixtureFactor
R"(HybridGaussianFactor
Hybrid [x1 x2; 1]{
Choice(1)
0 Leaf :
@ -144,7 +148,7 @@ Hybrid [x1 x2; 1]{
}
/* ************************************************************************* */
TEST(GaussianMixtureFactor, GaussianMixture) {
TEST(HybridGaussianFactor, HybridGaussianConditional) {
KeyVector keys;
keys.push_back(X(0));
keys.push_back(X(1));
@ -154,15 +158,15 @@ TEST(GaussianMixtureFactor, GaussianMixture) {
dKeys.emplace_back(M(1), 2);
auto gaussians = std::make_shared<GaussianConditional>();
GaussianMixture::Conditionals conditionals(gaussians);
GaussianMixture gm({}, keys, dKeys, conditionals);
HybridGaussianConditional::Conditionals conditionals(gaussians);
HybridGaussianConditional gm({}, keys, dKeys, conditionals);
EXPECT_LONGS_EQUAL(2, gm.discreteKeys().size());
}
/* ************************************************************************* */
// Test the error of the GaussianMixtureFactor
TEST(GaussianMixtureFactor, Error) {
// Test the error of the HybridGaussianFactor
TEST(HybridGaussianFactor, Error) {
DiscreteKey m1(1, 2);
auto A01 = Matrix2::Identity();
@ -175,9 +179,9 @@ TEST(GaussianMixtureFactor, Error) {
auto f0 = std::make_shared<JacobianFactor>(X(1), A01, X(2), A02, b);
auto f1 = std::make_shared<JacobianFactor>(X(1), A11, X(2), A12, b);
std::vector<GaussianFactor::shared_ptr> factors{f0, f1};
std::vector<GaussianFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
VectorValues continuousValues;
continuousValues.insert(X(1), Vector2(0, 0));
@ -229,10 +233,10 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1,
c1 = make_shared<GaussianConditional>(z, Vector1(mu1), I_1x1, model1);
HybridBayesNet hbn;
hbn.emplace_shared<GaussianMixture>(KeyVector{z}, KeyVector{},
DiscreteKeys{m}, std::vector{c0, c1});
hbn.emplace_shared<HybridGaussianConditional>(
KeyVector{z}, KeyVector{}, DiscreteKeys{m}, std::vector{c0, c1});
auto mixing = make_shared<DiscreteConditional>(m, "0.5/0.5");
auto mixing = make_shared<DiscreteConditional>(m, "50/50");
hbn.push_back(mixing);
return hbn;
@ -250,7 +254,7 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1,
* The resulting factor graph should eliminate to a Bayes net
* which represents a sigmoid function.
*/
TEST(GaussianMixtureFactor, GaussianMixtureModel) {
TEST(HybridGaussianFactor, GaussianMixtureModel) {
using namespace test_gmm;
double mu0 = 1.0, mu1 = 3.0;
@ -278,7 +282,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) {
// At the halfway point between the means, we should get P(m|z)=0.5
HybridBayesNet expected;
expected.emplace_shared<DiscreteConditional>(m, "0.5/0.5");
expected.emplace_shared<DiscreteConditional>(m, "50/50");
EXPECT(assert_equal(expected, *bn));
}
@ -322,7 +326,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) {
* which represents a Gaussian-like function
* where m1>m0 close to 3.1333.
*/
TEST(GaussianMixtureFactor, GaussianMixtureModel2) {
TEST(HybridGaussianFactor, GaussianMixtureModel2) {
using namespace test_gmm;
double mu0 = 1.0, mu1 = 3.0;
@ -387,103 +391,132 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) {
namespace test_two_state_estimation {
/// Create Two State Bayes Network with measurements
/// The Bayes network is P(z0|x0)P(x1|x0,m1)p(m1) and optionally p(z1|x1)
static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0,
double sigma1,
bool add_second_measurement = false,
double measurement_sigma = 3.0) {
DiscreteKey m1(M(1), 2);
Key z0 = Z(0), z1 = Z(1);
Key x0 = X(0), x1 = X(1);
DiscreteKey m1(M(1), 2);
HybridBayesNet hbn;
void addMeasurement(HybridBayesNet& hbn, Key z_key, Key x_key, double sigma) {
auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma);
hbn.emplace_shared<GaussianConditional>(z_key, Vector1(0.0), I_1x1, x_key,
-I_1x1, measurement_model);
}
auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma);
// Add measurement P(z0 | x0)
auto p_z0 = std::make_shared<GaussianConditional>(
z0, Vector1(0.0), -I_1x1, x0, I_1x1, measurement_model);
hbn.push_back(p_z0);
// Add hybrid motion model
/// Create hybrid motion model p(x1 | x0, m1)
static HybridGaussianConditional::shared_ptr CreateHybridMotionModel(
double mu0, double mu1, double sigma0, double sigma1) {
auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
auto model1 = noiseModel::Isotropic::Sigma(1, sigma1);
auto c0 = make_shared<GaussianConditional>(x1, Vector1(mu0), I_1x1, x0,
auto c0 = make_shared<GaussianConditional>(X(1), Vector1(mu0), I_1x1, X(0),
-I_1x1, model0),
c1 = make_shared<GaussianConditional>(x1, Vector1(mu1), I_1x1, x0,
c1 = make_shared<GaussianConditional>(X(1), Vector1(mu1), I_1x1, X(0),
-I_1x1, model1);
return std::make_shared<HybridGaussianConditional>(
KeyVector{X(1)}, KeyVector{X(0)}, DiscreteKeys{m1}, std::vector{c0, c1});
}
auto motion = std::make_shared<GaussianMixture>(
KeyVector{x1}, KeyVector{x0}, DiscreteKeys{m1}, std::vector{c0, c1});
hbn.push_back(motion);
/// Create two state Bayes network with 1 or two measurement models
HybridBayesNet CreateBayesNet(
const HybridGaussianConditional::shared_ptr& hybridMotionModel,
bool add_second_measurement = false) {
HybridBayesNet hbn;
// Add measurement model p(z0 | x0)
addMeasurement(hbn, Z(0), X(0), 3.0);
// Optionally add second measurement model p(z1 | x1)
if (add_second_measurement) {
// Add second measurement
auto p_z1 = std::make_shared<GaussianConditional>(
z1, Vector1(0.0), -I_1x1, x1, I_1x1, measurement_model);
hbn.push_back(p_z1);
addMeasurement(hbn, Z(1), X(1), 3.0);
}
// Add hybrid motion model
hbn.push_back(hybridMotionModel);
// Discrete uniform prior.
auto p_m1 = std::make_shared<DiscreteConditional>(m1, "0.5/0.5");
hbn.push_back(p_m1);
hbn.emplace_shared<DiscreteConditional>(m1, "50/50");
return hbn;
}
/// Approximate the discrete marginal P(m1) using importance sampling
std::pair<double, double> approximateDiscreteMarginal(
const HybridBayesNet& hbn,
const HybridGaussianConditional::shared_ptr& hybridMotionModel,
const VectorValues& given, size_t N = 100000) {
/// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1),
/// using q(x0) = N(z0, sigmaQ) to sample x0.
HybridBayesNet q;
q.push_back(hybridMotionModel); // Add hybrid motion model
q.emplace_shared<GaussianConditional>(GaussianConditional::FromMeanAndStddev(
X(0), given.at(Z(0)), /* sigmaQ = */ 3.0)); // Add proposal q(x0) for x0
q.emplace_shared<DiscreteConditional>(m1, "50/50"); // Discrete prior.
// Do importance sampling
double w0 = 0.0, w1 = 0.0;
std::mt19937_64 rng(42);
for (int i = 0; i < N; i++) {
HybridValues sample = q.sample(&rng);
sample.insert(given);
double weight = hbn.evaluate(sample) / q.evaluate(sample);
(sample.atDiscrete(M(1)) == 0) ? w0 += weight : w1 += weight;
}
double pm1 = w1 / (w0 + w1);
std::cout << "p(m0) = " << 100 * (1.0 - pm1) << std::endl;
std::cout << "p(m1) = " << 100 * pm1 << std::endl;
return {1.0 - pm1, pm1};
}
} // namespace test_two_state_estimation
/* ************************************************************************* */
/**
* Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1).
* Test a model p(z0|x0)p(z1|x1)p(x1|x0,m1)P(m1).
*
* P(x1|x0,m1) has different means and same covariance.
* p(x1|x0,m1) has mode-dependent mean but same covariance.
*
* Converting to a factor graph gives us
* ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1)
* Converting to a factor graph gives us ϕ(x0;z0)ϕ(x1;z1)ϕ(x1,x0,m1)P(m1)
*
* If we only have a measurement on z0, then
* the probability of m1 should be 0.5/0.5.
* If we only have a measurement on x0, then
* the posterior probability of m1 should be 0.5/0.5.
* Getting a measurement on z1 gives use more information.
*/
TEST(GaussianMixtureFactor, TwoStateModel) {
TEST(HybridGaussianFactor, TwoStateModel) {
using namespace test_two_state_estimation;
double mu0 = 1.0, mu1 = 3.0;
double sigma = 2.0;
DiscreteKey m1(M(1), 2);
Key z0 = Z(0), z1 = Z(1);
double sigma = 0.5;
auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma, sigma);
// Start with no measurement on x1, only on x0
HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma, sigma, false);
const Vector1 z0(0.5);
VectorValues given;
given.insert(z0, Vector1(0.5));
given.insert(Z(0), z0);
{
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Since no measurement on x1, we hedge our bets
DiscreteConditional expected(m1, "0.5/0.5");
// Importance sampling run with 100k samples gives 50.051/49.949
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
DiscreteConditional expected(m1, "50/50");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete())));
}
{
// Now we add a measurement z1 on x1
hbn = CreateBayesNet(mu0, mu1, sigma, sigma, true);
// If we set z1=4.5 (>> 2.5 which is the halfway point),
// probability of discrete mode should be leaning to m1==1.
const Vector1 z1(4.5);
given.insert(Z(1), z1);
// If we see z1=2.6 (> 2.5 which is the halfway point),
// discrete mode should say m1=1
given.insert(z1, Vector1(2.6));
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Since we have a measurement on z2, we get a definite result
DiscreteConditional expected(m1, "0.49772729/0.50227271");
// regression
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6));
// Since we have a measurement on x1, we get a definite result
// Values taken from an importance sampling run with 100k samples:
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
DiscreteConditional expected(m1, "44.3854/55.6146");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
}
}
@ -500,89 +533,206 @@ TEST(GaussianMixtureFactor, TwoStateModel) {
* the P(m1) should be 0.5/0.5.
* Getting a measurement on z1 gives use more information.
*/
TEST(GaussianMixtureFactor, TwoStateModel2) {
TEST(HybridGaussianFactor, TwoStateModel2) {
using namespace test_two_state_estimation;
double mu0 = 1.0, mu1 = 3.0;
double sigma0 = 6.0, sigma1 = 4.0;
auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
auto model1 = noiseModel::Isotropic::Sigma(1, sigma1);
DiscreteKey m1(M(1), 2);
Key z0 = Z(0), z1 = Z(1);
double sigma0 = 0.5, sigma1 = 2.0;
auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1);
// Start with no measurement on x1, only on x0
HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma0, sigma1, false);
const Vector1 z0(0.5);
VectorValues given;
given.insert(z0, Vector1(0.5));
given.insert(Z(0), z0);
{
// Start with no measurement on x1, only on x0
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
{
VectorValues vv{
{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}, {Z(0), Vector1(0.5)}};
HybridValues hv0(vv, DiscreteValues{{M(1), 0}}),
hv1(vv, DiscreteValues{{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
{
VectorValues vv{
{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}, {Z(0), Vector1(0.5)}};
HybridValues hv0(vv, DiscreteValues{{M(1), 0}}),
hv1(vv, DiscreteValues{{M(1), 1}});
// Check that ratio of Bayes net and factor graph for different modes is
// equal for several values of {x0,x1}.
for (VectorValues vv :
{VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
vv.insert(given); // add measurements for HBN
HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Importance sampling run with 100k samples gives 50.095/49.905
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
// Since no measurement on x1, we a 50/50 probability
auto p_m = bn->at(2)->asDiscrete();
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{m1.first, 0}}),
1e-9);
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{m1.first, 1}}),
1e-9);
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9);
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9);
}
{
// Now we add a measurement z1 on x1
hbn = CreateBayesNet(mu0, mu1, sigma0, sigma1, true);
const Vector1 z1(4.0); // favors m==1
given.insert(Z(1), z1);
given.insert(z1, Vector1(2.2));
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
{
VectorValues vv{{X(0), Vector1(0.0)},
{X(1), Vector1(1.0)},
{Z(0), Vector1(0.5)},
{Z(1), Vector1(2.2)}};
HybridValues hv0(vv, DiscreteValues{{M(1), 0}}),
hv1(vv, DiscreteValues{{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
{
VectorValues vv{{X(0), Vector1(0.5)},
{X(1), Vector1(3.0)},
{Z(0), Vector1(0.5)},
{Z(1), Vector1(2.2)}};
HybridValues hv0(vv, DiscreteValues{{M(1), 0}}),
hv1(vv, DiscreteValues{{M(1), 1}});
// Check that ratio of Bayes net and factor graph for different modes is
// equal for several values of {x0,x1}.
for (VectorValues vv :
{VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
vv.insert(given); // add measurements for HBN
HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Since we have a measurement on z2, we get a definite result
DiscreteConditional expected(m1, "0.44744586/0.55255414");
// regression
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6));
// Values taken from an importance sampling run with 100k samples:
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
DiscreteConditional expected(m1, "48.3158/51.6842");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
}
{
// Add a different measurement z1 on x1 that favors m==0
const Vector1 z1(1.1);
given.insert_or_assign(Z(1), z1);
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Values taken from an importance sampling run with 100k samples:
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
DiscreteConditional expected(m1, "55.396/44.604");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
}
}
/* ************************************************************************* */
/**
* Test a model p(z0|x0)p(x1|x0,m1)p(z1|x1)p(m1).
*
* p(x1|x0,m1) has the same means but different covariances.
*
* Converting to a factor graph gives us
* ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)p(m1)
*
* If we only have a measurement on z0, then
* the p(m1) should be 0.5/0.5.
* Getting a measurement on z1 gives use more information.
*/
TEST(HybridGaussianFactor, TwoStateModel3) {
using namespace test_two_state_estimation;
double mu = 1.0;
double sigma0 = 0.5, sigma1 = 2.0;
auto hybridMotionModel = CreateHybridMotionModel(mu, mu, sigma0, sigma1);
// Start with no measurement on x1, only on x0
const Vector1 z0(0.5);
VectorValues given;
given.insert(Z(0), z0);
{
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
// Check that ratio of Bayes net and factor graph for different modes is
// equal for several values of {x0,x1}.
for (VectorValues vv :
{VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
vv.insert(given); // add measurements for HBN
HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Importance sampling run with 100k samples gives 50.095/49.905
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
// Since no measurement on x1, we a 50/50 probability
auto p_m = bn->at(2)->asDiscrete();
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9);
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9);
}
{
// Now we add a measurement z1 on x1
const Vector1 z1(4.0); // favors m==1
given.insert(Z(1), z1);
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
// Check that ratio of Bayes net and factor graph for different modes is
// equal for several values of {x0,x1}.
for (VectorValues vv :
{VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
vv.insert(given); // add measurements for HBN
HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Values taken from an importance sampling run with 100k samples:
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
DiscreteConditional expected(m1, "51.7762/48.2238");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
}
{
// Add a different measurement z1 on x1 that favors m==1
const Vector1 z1(7.0);
given.insert_or_assign(Z(1), z1);
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Values taken from an importance sampling run with 100k samples:
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
DiscreteConditional expected(m1, "49.0762/50.9238");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.005));
}
}
/* ************************************************************************* */
/**
* Same model, P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1), but now with very informative
* measurements and vastly different motion model: either stand still or move
* far. This yields a very informative posterior.
*/
TEST(HybridGaussianFactor, TwoStateModel4) {
using namespace test_two_state_estimation;
double mu0 = 0.0, mu1 = 10.0;
double sigma0 = 0.2, sigma1 = 5.0;
auto hybridMotionModel = CreateHybridMotionModel(mu0, mu1, sigma0, sigma1);
// We only check the 2-measurement case
const Vector1 z0(0.0), z1(10.0);
VectorValues given{{Z(0), z0}, {Z(1), z1}};
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Values taken from an importance sampling run with 100k samples:
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
DiscreteConditional expected(m1, "8.91527/91.0847");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
}
/**

View File

@ -21,12 +21,12 @@
#include <gtsam/discrete/DecisionTreeFactor.h>
#include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridBayesTree.h>
#include <gtsam/hybrid/HybridConditional.h>
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridGaussianISAM.h>
#include <gtsam/hybrid/HybridValues.h>
@ -71,13 +71,14 @@ TEST(HybridGaussianFactorGraph, Creation) {
// Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor
// graph
GaussianMixture gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}),
GaussianMixture::Conditionals(
M(0),
std::make_shared<GaussianConditional>(
X(0), Z_3x1, I_3x3, X(1), I_3x3),
std::make_shared<GaussianConditional>(
X(0), Vector3::Ones(), I_3x3, X(1), I_3x3)));
HybridGaussianConditional gm(
{X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}),
HybridGaussianConditional::Conditionals(
M(0),
std::make_shared<GaussianConditional>(X(0), Z_3x1, I_3x3, X(1),
I_3x3),
std::make_shared<GaussianConditional>(X(0), Vector3::Ones(), I_3x3,
X(1), I_3x3)));
hfg.add(gm);
EXPECT_LONGS_EQUAL(2, hfg.size());
@ -126,10 +127,10 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
// Add a gaussian mixture factor ϕ(x1, c1)
DiscreteKey m1(M(1), 2);
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt));
DecisionTree<Key, GaussianFactorValuePair> dt(
M(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
{std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0});
hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt));
auto result = hfg.eliminateSequential();
@ -152,10 +153,10 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
// Add factor between x0 and x1
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
std::vector<GaussianFactor::shared_ptr> factors = {
std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())};
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, factors));
std::vector<GaussianFactorValuePair> factors = {
{std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
{std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0}};
hfg.add(HybridGaussianFactor({X(1)}, {m1}, factors));
// Discrete probability table for c1
hfg.add(DecisionTreeFactor(m1, {2, 8}));
@ -177,10 +178,10 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
hfg.add(GaussianMixtureFactor(
{X(1)}, {{M(1), 2}},
{std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())}));
std::vector<GaussianFactorValuePair> factors = {
{std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
{std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0}};
hfg.add(HybridGaussianFactor({X(1)}, {{M(1), 2}}, factors));
hfg.add(DecisionTreeFactor(m1, {2, 8}));
// TODO(Varun) Adding extra discrete variable not connected to continuous
@ -207,12 +208,12 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) {
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
// Decision tree with different modes on x1
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
DecisionTree<Key, GaussianFactorValuePair> dt(
M(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
{std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0});
// Hybrid factor P(x1|c1)
hfg.add(GaussianMixtureFactor({X(1)}, {m}, dt));
hfg.add(HybridGaussianFactor({X(1)}, {m}, dt));
// Prior factor on c1
hfg.add(DecisionTreeFactor(m, {2, 8}));
@ -237,16 +238,16 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1));
{
hfg.add(GaussianMixtureFactor(
{X(0)}, {{M(0), 2}},
{std::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones())}));
std::vector<GaussianFactorValuePair> factors = {
{std::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1), 0.0},
{std::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones()), 0.0}};
hfg.add(HybridGaussianFactor({X(0)}, {{M(0), 2}}, factors));
DecisionTree<Key, GaussianFactor::shared_ptr> dt1(
M(1), std::make_shared<JacobianFactor>(X(2), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(2), I_3x3, Vector3::Ones()));
DecisionTree<Key, GaussianFactorValuePair> dt1(
M(1), {std::make_shared<JacobianFactor>(X(2), I_3x3, Z_3x1), 0.0},
{std::make_shared<JacobianFactor>(X(2), I_3x3, Vector3::Ones()), 0.0});
hfg.add(GaussianMixtureFactor({X(2)}, {{M(1), 2}}, dt1));
hfg.add(HybridGaussianFactor({X(2)}, {{M(1), 2}}, dt1));
}
hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
@ -255,17 +256,17 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1));
{
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
M(3), std::make_shared<JacobianFactor>(X(3), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(3), I_3x3, Vector3::Ones()));
DecisionTree<Key, GaussianFactorValuePair> dt(
M(3), {std::make_shared<JacobianFactor>(X(3), I_3x3, Z_3x1), 0.0},
{std::make_shared<JacobianFactor>(X(3), I_3x3, Vector3::Ones()), 0.0});
hfg.add(GaussianMixtureFactor({X(3)}, {{M(3), 2}}, dt));
hfg.add(HybridGaussianFactor({X(3)}, {{M(3), 2}}, dt));
DecisionTree<Key, GaussianFactor::shared_ptr> dt1(
M(2), std::make_shared<JacobianFactor>(X(5), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(5), I_3x3, Vector3::Ones()));
DecisionTree<Key, GaussianFactorValuePair> dt1(
M(2), {std::make_shared<JacobianFactor>(X(5), I_3x3, Z_3x1), 0.0},
{std::make_shared<JacobianFactor>(X(5), I_3x3, Vector3::Ones()), 0.0});
hfg.add(GaussianMixtureFactor({X(5)}, {{M(2), 2}}, dt1));
hfg.add(HybridGaussianFactor({X(5)}, {{M(2), 2}}, dt1));
}
auto ordering_full =
@ -551,11 +552,11 @@ TEST(HybridGaussianFactorGraph, optimize) {
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
C(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
DecisionTree<Key, GaussianFactorValuePair> dt(
C(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0},
{std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0});
hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt));
hfg.add(HybridGaussianFactor({X(1)}, {c1}, dt));
auto result = hfg.eliminateSequential();
@ -681,8 +682,8 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) {
x0, -I_1x1, model0),
c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
x0, -I_1x1, model1);
hbn.emplace_shared<GaussianMixture>(KeyVector{f01}, KeyVector{x0, x1},
DiscreteKeys{m1}, std::vector{c0, c1});
hbn.emplace_shared<HybridGaussianConditional>(
KeyVector{f01}, KeyVector{x0, x1}, DiscreteKeys{m1}, std::vector{c0, c1});
// Discrete uniform prior.
hbn.emplace_shared<DiscreteConditional>(m1, "0.5/0.5");
@ -717,7 +718,7 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) {
// Create expected decision tree with two factor graphs:
// Get mixture factor:
auto mixture = fg.at<GaussianMixtureFactor>(0);
auto mixture = fg.at<HybridGaussianFactor>(0);
CHECK(mixture);
// Get prior factor:
@ -805,7 +806,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) {
X(0), Vector1(14.1421), I_1x1 * 2.82843),
conditional1 = std::make_shared<GaussianConditional>(
X(0), Vector1(10.1379), I_1x1 * 2.02759);
expectedBayesNet.emplace_shared<GaussianMixture>(
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
std::vector{conditional0, conditional1});
@ -830,7 +831,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
HybridBayesNet bn;
// Create Gaussian mixture z_0 = x0 + noise for each measurement.
auto gm = std::make_shared<GaussianMixture>(
auto gm = std::make_shared<HybridGaussianConditional>(
KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode},
std::vector{
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
@ -862,7 +863,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
X(0), Vector1(10.1379), I_1x1 * 2.02759),
conditional1 = std::make_shared<GaussianConditional>(
X(0), Vector1(14.1421), I_1x1 * 2.82843);
expectedBayesNet.emplace_shared<GaussianMixture>(
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
std::vector{conditional0, conditional1});
@ -899,7 +900,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) {
X(0), Vector1(17.3205), I_1x1 * 3.4641),
conditional1 = std::make_shared<GaussianConditional>(
X(0), Vector1(10.274), I_1x1 * 2.0548);
expectedBayesNet.emplace_shared<GaussianMixture>(
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
std::vector{conditional0, conditional1});
@ -946,7 +947,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
for (size_t t : {0, 1, 2}) {
// Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t):
const auto noise_mode_t = DiscreteKey{N(t), 2};
bn.emplace_shared<GaussianMixture>(
bn.emplace_shared<HybridGaussianConditional>(
KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t},
std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t),
Z_1x1, 0.5),
@ -961,7 +962,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
for (size_t t : {2, 1}) {
// Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1):
const auto motion_model_t = DiscreteKey{M(t), 2};
auto gm = std::make_shared<GaussianMixture>(
auto gm = std::make_shared<HybridGaussianConditional>(
KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t},
std::vector{GaussianConditional::sharedMeanAndStddev(
X(t), I_1x1, X(t - 1), Z_1x1, 0.2),

View File

@ -126,31 +126,34 @@ TEST(HybridGaussianElimination, IncrementalInference) {
/********************************************************/
// Run batch elimination so we can compare results.
const Ordering ordering {X(0), X(1), X(2)};
const Ordering ordering{X(0), X(1), X(2)};
// Now we calculate the expected factors using full elimination
const auto [expectedHybridBayesTree, expectedRemainingGraph] =
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
// The densities on X(0) should be the same
auto x0_conditional =
dynamic_pointer_cast<GaussianMixture>(isam[X(0)]->conditional()->inner());
auto expected_x0_conditional = dynamic_pointer_cast<GaussianMixture>(
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
auto x0_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
isam[X(0)]->conditional()->inner());
auto expected_x0_conditional =
dynamic_pointer_cast<HybridGaussianConditional>(
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional));
// The densities on X(1) should be the same
auto x1_conditional =
dynamic_pointer_cast<GaussianMixture>(isam[X(1)]->conditional()->inner());
auto expected_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
auto x1_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
isam[X(1)]->conditional()->inner());
auto expected_x1_conditional =
dynamic_pointer_cast<HybridGaussianConditional>(
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional));
// The densities on X(2) should be the same
auto x2_conditional =
dynamic_pointer_cast<GaussianMixture>(isam[X(2)]->conditional()->inner());
auto expected_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
auto x2_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
isam[X(2)]->conditional()->inner());
auto expected_x2_conditional =
dynamic_pointer_cast<HybridGaussianConditional>(
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
// We only perform manual continuous elimination for 0,0.
@ -279,9 +282,9 @@ TEST(HybridGaussianElimination, Approx_inference) {
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
// bayes net, at the same positions.
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>(
auto &unprunedLastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>(
auto &lastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
incrementalHybrid[X(3)]->conditional()->inner());
std::vector<std::pair<DiscreteValues, double>> assignments =
@ -381,11 +384,11 @@ TEST(HybridGaussianISAM, NonTrivial) {
// Add connecting poses similar to PoseFactors in GTD
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), Y(0), Pose2(0, 1.0, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(0), Z(0), Pose2(0, 1.0, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(0), W(0), Pose2(0, 1.0, 0),
poseNoise);
poseNoise);
// Create initial estimate
Values initial;
@ -414,24 +417,25 @@ TEST(HybridGaussianISAM, NonTrivial) {
KeyVector contKeys = {W(0), W(1)};
auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0);
auto still = std::make_shared<PlanarMotionModel>(W(0), W(1), Pose2(0, 0, 0),
noise_model),
noise_model),
moving = std::make_shared<PlanarMotionModel>(W(0), W(1), odometry,
noise_model);
std::vector<PlanarMotionModel::shared_ptr> components = {moving, still};
auto mixtureFactor = std::make_shared<MixtureFactor>(
noise_model);
std::vector<std::pair<PlanarMotionModel::shared_ptr, double>> components = {
{moving, 0.0}, {still, 0.0}};
auto mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), X(1), Pose2(1.0, 0.0, 0),
poseNoise);
poseNoise);
// PoseFactors-like at k=1
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), Y(1), Pose2(0, 1, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(1), Z(1), Pose2(0, 1, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(1), W(1), Pose2(-1, 1, 0),
poseNoise);
poseNoise);
initial.insert(X(1), Pose2(1.0, 0.0, 0.0));
initial.insert(Y(1), Pose2(1.0, 1.0, 0.0));
@ -454,24 +458,24 @@ TEST(HybridGaussianISAM, NonTrivial) {
// Add odometry factor with discrete modes.
contKeys = {W(1), W(2)};
still = std::make_shared<PlanarMotionModel>(W(1), W(2), Pose2(0, 0, 0),
noise_model);
noise_model);
moving =
std::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model);
components = {moving, still};
mixtureFactor = std::make_shared<MixtureFactor>(
components = {{moving, 0.0}, {still, 0.0}};
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), X(2), Pose2(1.0, 0.0, 0),
poseNoise);
poseNoise);
// PoseFactors-like at k=1
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), Y(2), Pose2(0, 1, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(2), Z(2), Pose2(0, 1, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(2), W(2), Pose2(-2, 1, 0),
poseNoise);
poseNoise);
initial.insert(X(2), Pose2(2.0, 0.0, 0.0));
initial.insert(Y(2), Pose2(2.0, 1.0, 0.0));
@ -497,24 +501,24 @@ TEST(HybridGaussianISAM, NonTrivial) {
// Add odometry factor with discrete modes.
contKeys = {W(2), W(3)};
still = std::make_shared<PlanarMotionModel>(W(2), W(3), Pose2(0, 0, 0),
noise_model);
noise_model);
moving =
std::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model);
components = {moving, still};
mixtureFactor = std::make_shared<MixtureFactor>(
components = {{moving, 0.0}, {still, 0.0}};
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), X(3), Pose2(1.0, 0.0, 0),
poseNoise);
poseNoise);
// PoseFactors-like at k=3
fg.emplace_shared<BetweenFactor<Pose2>>(X(3), Y(3), Pose2(0, 1, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(3), Z(3), Pose2(0, 1, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(3), W(3), Pose2(-3, 1, 0),
poseNoise);
poseNoise);
initial.insert(X(3), Pose2(3.0, 0.0, 0.0));
initial.insert(Y(3), Pose2(3.0, 1.0, 0.0));

View File

@ -10,8 +10,8 @@
* -------------------------------------------------------------------------- */
/**
* @file testMixtureFactor.cpp
* @brief Unit tests for MixtureFactor
* @file testHybridNonlinearFactor.cpp
* @brief Unit tests for HybridNonlinearFactor
* @author Varun Agrawal
* @date October 2022
*/
@ -20,8 +20,8 @@
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/slam/BetweenFactor.h>
@ -36,17 +36,17 @@ using symbol_shorthand::X;
/* ************************************************************************* */
// Check iterators of empty mixture.
TEST(MixtureFactor, Constructor) {
MixtureFactor factor;
MixtureFactor::const_iterator const_it = factor.begin();
TEST(HybridNonlinearFactor, Constructor) {
HybridNonlinearFactor factor;
HybridNonlinearFactor::const_iterator const_it = factor.begin();
CHECK(const_it == factor.end());
MixtureFactor::iterator it = factor.begin();
HybridNonlinearFactor::iterator it = factor.begin();
CHECK(it == factor.end());
}
/* ************************************************************************* */
// Test .print() output.
TEST(MixtureFactor, Printing) {
TEST(HybridNonlinearFactor, Printing) {
DiscreteKey m1(1, 2);
double between0 = 0.0;
double between1 = 1.0;
@ -58,13 +58,13 @@ TEST(MixtureFactor, Printing) {
std::make_shared<BetweenFactor<double>>(X(1), X(2), between0, model);
auto f1 =
std::make_shared<BetweenFactor<double>>(X(1), X(2), between1, model);
std::vector<NonlinearFactor::shared_ptr> factors{f0, f1};
std::vector<NonlinearFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
HybridNonlinearFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
std::string expected =
R"(Hybrid [x1 x2; 1]
MixtureFactor
HybridNonlinearFactor
Choice(1)
0 Leaf Nonlinear factor on 2 keys
1 Leaf Nonlinear factor on 2 keys
@ -73,7 +73,7 @@ MixtureFactor
}
/* ************************************************************************* */
static MixtureFactor getMixtureFactor() {
static HybridNonlinearFactor getHybridNonlinearFactor() {
DiscreteKey m1(1, 2);
double between0 = 0.0;
@ -86,15 +86,15 @@ static MixtureFactor getMixtureFactor() {
std::make_shared<BetweenFactor<double>>(X(1), X(2), between0, model);
auto f1 =
std::make_shared<BetweenFactor<double>>(X(1), X(2), between1, model);
std::vector<NonlinearFactor::shared_ptr> factors{f0, f1};
std::vector<NonlinearFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
return MixtureFactor({X(1), X(2)}, {m1}, factors);
return HybridNonlinearFactor({X(1), X(2)}, {m1}, factors);
}
/* ************************************************************************* */
// Test the error of the MixtureFactor
TEST(MixtureFactor, Error) {
auto mixtureFactor = getMixtureFactor();
// Test the error of the HybridNonlinearFactor
TEST(HybridNonlinearFactor, Error) {
auto mixtureFactor = getHybridNonlinearFactor();
Values continuousValues;
continuousValues.insert<double>(X(1), 0);
@ -112,9 +112,9 @@ TEST(MixtureFactor, Error) {
}
/* ************************************************************************* */
// Test dim of the MixtureFactor
TEST(MixtureFactor, Dim) {
auto mixtureFactor = getMixtureFactor();
// Test dim of the HybridNonlinearFactor
TEST(HybridNonlinearFactor, Dim) {
auto mixtureFactor = getHybridNonlinearFactor();
EXPECT_LONGS_EQUAL(1, mixtureFactor.dim());
}

View File

@ -23,10 +23,11 @@
#include <gtsam/geometry/Pose2.h>
#include <gtsam/hybrid/HybridEliminationTree.h>
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/NoiseModel.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/PriorFactor.h>
#include <gtsam/sam/BearingRangeFactor.h>
@ -105,7 +106,7 @@ TEST(HybridNonlinearFactorGraph, Resize) {
auto discreteFactor = std::make_shared<DecisionTreeFactor>();
fg.push_back(discreteFactor);
auto dcFactor = std::make_shared<MixtureFactor>();
auto dcFactor = std::make_shared<HybridNonlinearFactor>();
fg.push_back(dcFactor);
EXPECT_LONGS_EQUAL(fg.size(), 3);
@ -131,8 +132,9 @@ TEST(HybridGaussianFactorGraph, Resize) {
auto still = std::make_shared<MotionModel>(X(0), X(1), 0.0, noise_model),
moving = std::make_shared<MotionModel>(X(0), X(1), 1.0, noise_model);
std::vector<MotionModel::shared_ptr> components = {still, moving};
auto dcFactor = std::make_shared<MixtureFactor>(
std::vector<std::pair<MotionModel::shared_ptr, double>> components = {
{still, 0.0}, {moving, 0.0}};
auto dcFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
nhfg.push_back(dcFactor);
@ -150,10 +152,10 @@ TEST(HybridGaussianFactorGraph, Resize) {
}
/***************************************************************************
* Test that the MixtureFactor reports correctly if the number of continuous
* keys provided do not match the keys in the factors.
* Test that the HybridNonlinearFactor reports correctly if the number of
* continuous keys provided do not match the keys in the factors.
*/
TEST(HybridGaussianFactorGraph, MixtureFactor) {
TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) {
auto nonlinearFactor = std::make_shared<BetweenFactor<double>>(
X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1));
auto discreteFactor = std::make_shared<DecisionTreeFactor>();
@ -162,16 +164,17 @@ TEST(HybridGaussianFactorGraph, MixtureFactor) {
auto still = std::make_shared<MotionModel>(X(0), X(1), 0.0, noise_model),
moving = std::make_shared<MotionModel>(X(0), X(1), 1.0, noise_model);
std::vector<MotionModel::shared_ptr> components = {still, moving};
std::vector<std::pair<MotionModel::shared_ptr, double>> components = {
{still, 0.0}, {moving, 0.0}};
// Check for exception when number of continuous keys are under-specified.
KeyVector contKeys = {X(0)};
THROWS_EXCEPTION(std::make_shared<MixtureFactor>(
THROWS_EXCEPTION(std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components));
// Check for exception when number of continuous keys are too many.
contKeys = {X(0), X(1), X(2)};
THROWS_EXCEPTION(std::make_shared<MixtureFactor>(
THROWS_EXCEPTION(std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components));
}
@ -195,7 +198,7 @@ TEST(HybridFactorGraph, PushBack) {
fg = HybridNonlinearFactorGraph();
auto dcFactor = std::make_shared<MixtureFactor>();
auto dcFactor = std::make_shared<HybridNonlinearFactor>();
fg.push_back(dcFactor);
EXPECT_LONGS_EQUAL(fg.size(), 1);
@ -350,7 +353,8 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
EliminateHybrid(factors, ordering);
auto gaussianConditionalMixture =
dynamic_pointer_cast<GaussianMixture>(hybridConditionalMixture->inner());
dynamic_pointer_cast<HybridGaussianConditional>(
hybridConditionalMixture->inner());
CHECK(gaussianConditionalMixture);
// Frontals = [x0, x1]
@ -413,7 +417,8 @@ TEST(HybridFactorGraph, PrintErrors) {
// fg.print();
// std::cout << "\n\n\n" << std::endl;
// fg.printErrors(
// HybridValues(hv.continuous(), DiscreteValues(), self.linearizationPoint));
// HybridValues(hv.continuous(), DiscreteValues(),
// self.linearizationPoint));
}
/****************************************************************************
@ -438,7 +443,7 @@ TEST(HybridFactorGraph, Full_Elimination) {
DiscreteFactorGraph discrete_fg;
// TODO(Varun) Make this a function of HybridGaussianFactorGraph?
for (auto& factor : (*remainingFactorGraph_partial)) {
for (auto &factor : (*remainingFactorGraph_partial)) {
auto df = dynamic_pointer_cast<DiscreteFactor>(factor);
assert(df);
discrete_fg.push_back(df);
@ -510,7 +515,7 @@ factor 0:
b = [ -10 ]
No noise model
factor 1:
GaussianMixtureFactor
HybridGaussianFactor
Hybrid [x0 x1; m0]{
Choice(m0)
0 Leaf :
@ -535,7 +540,7 @@ Hybrid [x0 x1; m0]{
}
factor 2:
GaussianMixtureFactor
HybridGaussianFactor
Hybrid [x1 x2; m1]{
Choice(m1)
0 Leaf :
@ -799,8 +804,9 @@ TEST(HybridFactorGraph, DefaultDecisionTree) {
noise_model),
moving = std::make_shared<PlanarMotionModel>(X(0), X(1), odometry,
noise_model);
std::vector<PlanarMotionModel::shared_ptr> motion_models = {still, moving};
fg.emplace_shared<MixtureFactor>(
std::vector<std::pair<PlanarMotionModel::shared_ptr, double>> motion_models =
{{still, 0.0}, {moving, 0.0}};
fg.emplace_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models);
// Add Range-Bearing measurements to from X0 to L0 and X1 to L1.
@ -836,9 +842,174 @@ TEST(HybridFactorGraph, DefaultDecisionTree) {
EXPECT_LONGS_EQUAL(1, remainingFactorGraph->size());
}
namespace test_relinearization {
/**
* @brief Create a Factor Graph by directly specifying all
* the factors instead of creating conditionals first.
* This way we can directly provide the likelihoods and
* then perform (re-)linearization.
*
* @param means The means of the GaussianMixtureFactor components.
* @param sigmas The covariances of the GaussianMixtureFactor components.
* @param m1 The discrete key.
* @param x0_measurement A measurement on X0
* @return HybridGaussianFactorGraph
*/
static HybridNonlinearFactorGraph CreateFactorGraph(
const std::vector<double> &means, const std::vector<double> &sigmas,
DiscreteKey &m1, double x0_measurement) {
auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]);
auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]);
auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3);
auto f0 =
std::make_shared<BetweenFactor<double>>(X(0), X(1), means[0], model0);
auto f1 =
std::make_shared<BetweenFactor<double>>(X(0), X(1), means[1], model1);
// Create HybridNonlinearFactor
std::vector<NonlinearFactorValuePair> factors{
{f0, ComputeLogNormalizer(model0)}, {f1, ComputeLogNormalizer(model1)}};
HybridNonlinearFactor mixtureFactor({X(0), X(1)}, {m1}, factors);
HybridNonlinearFactorGraph hfg;
hfg.push_back(mixtureFactor);
hfg.push_back(PriorFactor<double>(X(0), x0_measurement, prior_noise));
return hfg;
}
} // namespace test_relinearization
/* ************************************************************************* */
/**
* @brief Test components with differing means but the same covariances.
* The factor graph is
* *-X1-*-X2
* |
* M1
*/
TEST(HybridNonlinearFactorGraph, DifferentMeans) {
using namespace test_relinearization;
DiscreteKey m1(M(1), 2);
Values values;
double x0 = 0.0, x1 = 1.75;
values.insert(X(0), x0);
values.insert(X(1), x1);
std::vector<double> means = {0.0, 2.0}, sigmas = {1e-0, 1e-0};
HybridNonlinearFactorGraph hfg = CreateFactorGraph(means, sigmas, m1, x0);
{
auto bn = hfg.linearize(values)->eliminateSequential();
HybridValues actual = bn->optimize();
HybridValues expected(
VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(-1.75)}},
DiscreteValues{{M(1), 0}});
EXPECT(assert_equal(expected, actual));
DiscreteValues dv0{{M(1), 0}};
VectorValues cont0 = bn->optimize(dv0);
double error0 = bn->error(HybridValues(cont0, dv0));
// TODO(Varun) Perform importance sampling to estimate error?
// regression
EXPECT_DOUBLES_EQUAL(0.69314718056, error0, 1e-9);
DiscreteValues dv1{{M(1), 1}};
VectorValues cont1 = bn->optimize(dv1);
double error1 = bn->error(HybridValues(cont1, dv1));
EXPECT_DOUBLES_EQUAL(error0, error1, 1e-9);
}
{
// Add measurement on x1
auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3);
hfg.push_back(PriorFactor<double>(X(1), means[1], prior_noise));
auto bn = hfg.linearize(values)->eliminateSequential();
HybridValues actual = bn->optimize();
HybridValues expected(
VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(0.25)}},
DiscreteValues{{M(1), 1}});
EXPECT(assert_equal(expected, actual));
{
DiscreteValues dv{{M(1), 0}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9);
}
{
DiscreteValues dv{{M(1), 1}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9);
}
}
}
/* ************************************************************************* */
/**
* @brief Test components with differing covariances but the same means.
* The factor graph is
* *-X1-*-X2
* |
* M1
*/
TEST_DISABLED(HybridNonlinearFactorGraph, DifferentCovariances) {
using namespace test_relinearization;
DiscreteKey m1(M(1), 2);
Values values;
double x0 = 1.0, x1 = 1.0;
values.insert(X(0), x0);
values.insert(X(1), x1);
std::vector<double> means = {0.0, 0.0}, sigmas = {1e2, 1e-2};
// Create FG with HybridNonlinearFactor and prior on X1
HybridNonlinearFactorGraph hfg = CreateFactorGraph(means, sigmas, m1, x0);
// Linearize and eliminate
auto hbn = hfg.linearize(values)->eliminateSequential();
VectorValues cv;
cv.insert(X(0), Vector1(0.0));
cv.insert(X(1), Vector1(0.0));
// Check that the error values at the MLE point μ.
AlgebraicDecisionTree<Key> errorTree = hbn->errorTree(cv);
DiscreteValues dv0{{M(1), 0}};
DiscreteValues dv1{{M(1), 1}};
// regression
EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9);
EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9);
DiscreteConditional expected_m1(m1, "0.5/0.5");
DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete());
EXPECT(assert_equal(expected_m1, actual_m1));
}
/* *************************************************************************
*/
int main() {
TestResult tr;
return TestRegistry::runAllTests(tr);
}
/* ************************************************************************* */
/* *************************************************************************
*/

View File

@ -143,7 +143,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
/********************************************************/
// Run batch elimination so we can compare results.
const Ordering ordering {X(0), X(1), X(2)};
const Ordering ordering{X(0), X(1), X(2)};
// Now we calculate the actual factors using full elimination
const auto [expectedHybridBayesTree, expectedRemainingGraph] =
@ -151,24 +151,27 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
.BaseEliminateable::eliminatePartialMultifrontal(ordering);
// The densities on X(1) should be the same
auto x0_conditional = dynamic_pointer_cast<GaussianMixture>(
auto x0_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
bayesTree[X(0)]->conditional()->inner());
auto expected_x0_conditional = dynamic_pointer_cast<GaussianMixture>(
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
auto expected_x0_conditional =
dynamic_pointer_cast<HybridGaussianConditional>(
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional));
// The densities on X(1) should be the same
auto x1_conditional = dynamic_pointer_cast<GaussianMixture>(
auto x1_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
bayesTree[X(1)]->conditional()->inner());
auto expected_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
auto expected_x1_conditional =
dynamic_pointer_cast<HybridGaussianConditional>(
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional));
// The densities on X(2) should be the same
auto x2_conditional = dynamic_pointer_cast<GaussianMixture>(
auto x2_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
bayesTree[X(2)]->conditional()->inner());
auto expected_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
auto expected_x2_conditional =
dynamic_pointer_cast<HybridGaussianConditional>(
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
// We only perform manual continuous elimination for 0,0.
@ -300,9 +303,9 @@ TEST(HybridNonlinearISAM, Approx_inference) {
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
// bayes net, at the same positions.
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>(
auto &unprunedLastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>(
auto &lastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
bayesTree[X(3)]->conditional()->inner());
std::vector<std::pair<DiscreteValues, double>> assignments =
@ -410,11 +413,11 @@ TEST(HybridNonlinearISAM, NonTrivial) {
// Add connecting poses similar to PoseFactors in GTD
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), Y(0), Pose2(0, 1.0, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(0), Z(0), Pose2(0, 1.0, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(0), W(0), Pose2(0, 1.0, 0),
poseNoise);
poseNoise);
// Create initial estimate
Values initial;
@ -433,24 +436,25 @@ TEST(HybridNonlinearISAM, NonTrivial) {
KeyVector contKeys = {W(0), W(1)};
auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0);
auto still = std::make_shared<PlanarMotionModel>(W(0), W(1), Pose2(0, 0, 0),
noise_model),
noise_model),
moving = std::make_shared<PlanarMotionModel>(W(0), W(1), odometry,
noise_model);
std::vector<PlanarMotionModel::shared_ptr> components = {moving, still};
auto mixtureFactor = std::make_shared<MixtureFactor>(
noise_model);
std::vector<std::pair<PlanarMotionModel::shared_ptr, double>> components = {
{moving, 0.0}, {still, 0.0}};
auto mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), X(1), Pose2(1.0, 0.0, 0),
poseNoise);
poseNoise);
// PoseFactors-like at k=1
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), Y(1), Pose2(0, 1, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(1), Z(1), Pose2(0, 1, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(1), W(1), Pose2(-1, 1, 0),
poseNoise);
poseNoise);
initial.insert(X(1), Pose2(1.0, 0.0, 0.0));
initial.insert(Y(1), Pose2(1.0, 1.0, 0.0));
@ -473,24 +477,24 @@ TEST(HybridNonlinearISAM, NonTrivial) {
// Add odometry factor with discrete modes.
contKeys = {W(1), W(2)};
still = std::make_shared<PlanarMotionModel>(W(1), W(2), Pose2(0, 0, 0),
noise_model);
noise_model);
moving =
std::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model);
components = {moving, still};
mixtureFactor = std::make_shared<MixtureFactor>(
components = {{moving, 0.0}, {still, 0.0}};
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), X(2), Pose2(1.0, 0.0, 0),
poseNoise);
poseNoise);
// PoseFactors-like at k=1
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), Y(2), Pose2(0, 1, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(2), Z(2), Pose2(0, 1, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(2), W(2), Pose2(-2, 1, 0),
poseNoise);
poseNoise);
initial.insert(X(2), Pose2(2.0, 0.0, 0.0));
initial.insert(Y(2), Pose2(2.0, 1.0, 0.0));
@ -516,24 +520,24 @@ TEST(HybridNonlinearISAM, NonTrivial) {
// Add odometry factor with discrete modes.
contKeys = {W(2), W(3)};
still = std::make_shared<PlanarMotionModel>(W(2), W(3), Pose2(0, 0, 0),
noise_model);
noise_model);
moving =
std::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model);
components = {moving, still};
mixtureFactor = std::make_shared<MixtureFactor>(
components = {{moving, 0.0}, {still, 0.0}};
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
fg.push_back(mixtureFactor);
// Add equivalent of ImuFactor
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), X(3), Pose2(1.0, 0.0, 0),
poseNoise);
poseNoise);
// PoseFactors-like at k=3
fg.emplace_shared<BetweenFactor<Pose2>>(X(3), Y(3), Pose2(0, 1, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Y(3), Z(3), Pose2(0, 1, 0),
poseNoise);
poseNoise);
fg.emplace_shared<BetweenFactor<Pose2>>(Z(3), W(3), Pose2(-3, 1, 0),
poseNoise);
poseNoise);
initial.insert(X(3), Pose2(3.0, 0.0, 0.0));
initial.insert(Y(3), Pose2(3.0, 1.0, 0.0));

View File

@ -18,11 +18,11 @@
#include <gtsam/base/serializationTestHelpers.h>
#include <gtsam/discrete/DiscreteConditional.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridBayesTree.h>
#include <gtsam/hybrid/HybridConditional.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianConditional.h>
@ -51,29 +51,30 @@ BOOST_CLASS_EXPORT_GUID(ADT, "gtsam_AlgebraicDecisionTree");
BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_AlgebraicDecisionTree_Leaf");
BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_AlgebraicDecisionTree_Choice")
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor, "gtsam_GaussianMixtureFactor");
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors,
"gtsam_GaussianMixtureFactor_Factors");
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors::Leaf,
"gtsam_GaussianMixtureFactor_Factors_Leaf");
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors::Choice,
"gtsam_GaussianMixtureFactor_Factors_Choice");
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor, "gtsam_HybridGaussianFactor");
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors,
"gtsam_HybridGaussianFactor_Factors");
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Leaf,
"gtsam_HybridGaussianFactor_Factors_Leaf");
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Choice,
"gtsam_HybridGaussianFactor_Factors_Choice");
BOOST_CLASS_EXPORT_GUID(GaussianMixture, "gtsam_GaussianMixture");
BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals,
"gtsam_GaussianMixture_Conditionals");
BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals::Leaf,
"gtsam_GaussianMixture_Conditionals_Leaf");
BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals::Choice,
"gtsam_GaussianMixture_Conditionals_Choice");
BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional,
"gtsam_HybridGaussianConditional");
BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals,
"gtsam_HybridGaussianConditional_Conditionals");
BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Leaf,
"gtsam_HybridGaussianConditional_Conditionals_Leaf");
BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Choice,
"gtsam_HybridGaussianConditional_Conditionals_Choice");
// Needed since GaussianConditional::FromMeanAndStddev uses it
BOOST_CLASS_EXPORT_GUID(noiseModel::Isotropic, "gtsam_noiseModel_Isotropic");
BOOST_CLASS_EXPORT_GUID(HybridBayesNet, "gtsam_HybridBayesNet");
/* ****************************************************************************/
// Test GaussianMixtureFactor serialization.
TEST(HybridSerialization, GaussianMixtureFactor) {
// Test HybridGaussianFactor serialization.
TEST(HybridSerialization, HybridGaussianFactor) {
KeyVector continuousKeys{X(0)};
DiscreteKeys discreteKeys{{M(0), 2}};
@ -82,13 +83,13 @@ TEST(HybridSerialization, GaussianMixtureFactor) {
auto b1 = Matrix::Ones(2, 1);
auto f0 = std::make_shared<JacobianFactor>(X(0), A, b0);
auto f1 = std::make_shared<JacobianFactor>(X(0), A, b1);
std::vector<GaussianFactor::shared_ptr> factors{f0, f1};
std::vector<GaussianFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
const GaussianMixtureFactor factor(continuousKeys, discreteKeys, factors);
const HybridGaussianFactor factor(continuousKeys, discreteKeys, factors);
EXPECT(equalsObj<GaussianMixtureFactor>(factor));
EXPECT(equalsXML<GaussianMixtureFactor>(factor));
EXPECT(equalsBinary<GaussianMixtureFactor>(factor));
EXPECT(equalsObj<HybridGaussianFactor>(factor));
EXPECT(equalsXML<HybridGaussianFactor>(factor));
EXPECT(equalsBinary<HybridGaussianFactor>(factor));
}
/* ****************************************************************************/
@ -106,20 +107,20 @@ TEST(HybridSerialization, HybridConditional) {
}
/* ****************************************************************************/
// Test GaussianMixture serialization.
TEST(HybridSerialization, GaussianMixture) {
// Test HybridGaussianConditional serialization.
TEST(HybridSerialization, HybridGaussianConditional) {
const DiscreteKey mode(M(0), 2);
Matrix1 I = Matrix1::Identity();
const auto conditional0 = std::make_shared<GaussianConditional>(
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 0.5));
const auto conditional1 = std::make_shared<GaussianConditional>(
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3));
const GaussianMixture gm({Z(0)}, {X(0)}, {mode},
{conditional0, conditional1});
const HybridGaussianConditional gm({Z(0)}, {X(0)}, {mode},
{conditional0, conditional1});
EXPECT(equalsObj<GaussianMixture>(gm));
EXPECT(equalsXML<GaussianMixture>(gm));
EXPECT(equalsBinary<GaussianMixture>(gm));
EXPECT(equalsObj<HybridGaussianConditional>(gm));
EXPECT(equalsXML<HybridGaussianConditional>(gm));
EXPECT(equalsBinary<HybridGaussianConditional>(gm));
}
/* ****************************************************************************/

View File

@ -46,7 +46,7 @@ namespace gtsam {
* Gaussian density over a set of continuous variables.
* - \b Discrete conditionals, implemented in \class DiscreteConditional, which
* represent a discrete conditional distribution over discrete variables.
* - \b Hybrid conditional densities, such as \class GaussianMixture, which is
* - \b Hybrid conditional densities, such as \class HybridGaussianConditional, which is
* a density over continuous variables given discrete/continuous parents.
* - \b Symbolic factors, used to represent a graph structure, implemented in
* \class SymbolicConditional. Only used for symbolic elimination etc.

View File

@ -25,6 +25,7 @@
#include <boost/serialization/nvp.hpp>
#endif
#include <memory>
#include <algorithm>
#include <gtsam/base/types.h>
#include <gtsam/base/FastVector.h>

View File

@ -707,6 +707,22 @@ const RobustModel::shared_ptr &robust, const NoiseModel::shared_ptr noise){
}
/* ************************************************************************* */
} // namespace noiseModel
/* *******************************************************************************/
double ComputeLogNormalizer(
const noiseModel::Gaussian::shared_ptr& noise_model) {
// Since noise models are Gaussian, we can get the logDeterminant using
// the same trick as in GaussianConditional
double logDetR = noise_model->R()
.diagonal()
.unaryExpr([](double x) { return log(x); })
.sum();
double logDeterminantSigma = -2.0 * logDetR;
size_t n = noise_model->dim();
constexpr double log2pi = 1.8378770664093454835606594728112;
return n * log2pi + logDeterminantSigma;
}
} // gtsam

View File

@ -751,6 +751,18 @@ namespace gtsam {
template<> struct traits<noiseModel::Isotropic> : public Testable<noiseModel::Isotropic> {};
template<> struct traits<noiseModel::Unit> : public Testable<noiseModel::Unit> {};
/**
* @brief Helper function to compute the sqrt(|2πΣ|) normalizer values
* for a Gaussian noise model.
* We compute this in the log-space for numerical accuracy.
*
* @param noise_model The Gaussian noise model
* whose normalizer we wish to compute.
* @return double
*/
GTSAM_EXPORT double ComputeLogNormalizer(
const noiseModel::Gaussian::shared_ptr& noise_model);
} //\ namespace gtsam

View File

@ -18,6 +18,7 @@
* @brief Binary factor for a relative translation direction measurement.
*/
#include <gtsam/base/Vector.h>
#include <gtsam/geometry/Point3.h>
#include <gtsam/geometry/Unit3.h>
#include <gtsam/linear/NoiseModel.h>
@ -36,8 +37,6 @@ namespace gtsam {
* normalized(Tb - Ta) - w_aZb.point3()
*
* @ingroup sfm
*
*
*/
class TranslationFactor : public NoiseModelFactorN<Point3, Point3> {
private:
@ -45,7 +44,6 @@ class TranslationFactor : public NoiseModelFactorN<Point3, Point3> {
Point3 measured_w_aZb_;
public:
// Provide access to the Matrix& version of evaluateError:
using NoiseModelFactor2<Point3, Point3>::evaluateError;
@ -60,20 +58,20 @@ class TranslationFactor : public NoiseModelFactorN<Point3, Point3> {
* @brief Caclulate error: (norm(Tb - Ta) - measurement)
* where Tb and Ta are Point3 translations and measurement is
* the Unit3 translation direction from a to b.
*
*
* @param Ta translation for key a
* @param Tb translation for key b
* @param H1 optional jacobian in Ta
* @param H2 optional jacobian in Tb
* @return * Vector
*/
Vector evaluateError(
const Point3& Ta, const Point3& Tb,
OptionalMatrixType H1,
OptionalMatrixType H2) const override {
Vector evaluateError(const Point3& Ta, const Point3& Tb,
OptionalMatrixType H1,
OptionalMatrixType H2) const override {
const Point3 dir = Tb - Ta;
Matrix33 H_predicted_dir;
const Point3 predicted = normalize(dir, H1 || H2 ? &H_predicted_dir : nullptr);
const Point3 predicted =
normalize(dir, H1 || H2 ? &H_predicted_dir : nullptr);
if (H1) *H1 = -H_predicted_dir;
if (H2) *H2 = H_predicted_dir;
return predicted - measured_w_aZb_;
@ -89,4 +87,73 @@ class TranslationFactor : public NoiseModelFactorN<Point3, Point3> {
}
#endif
}; // \ TranslationFactor
/**
* Binary factor for a relative translation direction measurement
* w_aZb. The measurement equation is
* w_aZb = scale * (Tb - Ta)
* i.e., w_aZb is the translation direction from frame A to B, in world
* coordinates.
*
* Instead of normalizing the Tb - Ta vector, we multiply it by a scalar
* which is also jointly optimized. This is inspired by the work on
* BATA (Zhuang et al, CVPR 2018).
*
* @ingroup sfm
*/
class BilinearAngleTranslationFactor
: public NoiseModelFactorN<Point3, Point3, Vector1> {
private:
typedef NoiseModelFactorN<Point3, Point3, Vector1> Base;
Point3 measured_w_aZb_;
public:
/// default constructor
BilinearAngleTranslationFactor() {}
BilinearAngleTranslationFactor(Key a, Key b, Key scale_key,
const Unit3& w_aZb,
const SharedNoiseModel& noiseModel)
: Base(noiseModel, a, b, scale_key), measured_w_aZb_(w_aZb.point3()) {}
// Provide access to the Matrix& version of evaluateError:
using NoiseModelFactor2<Point3, Point3, Vector1>::evaluateError;
/**
* @brief Caclulate error: (scale * (Tb - Ta) - measurement)
* where Tb and Ta are Point3 translations and measurement is
* the Unit3 translation direction from a to b.
*
* @param Ta translation for key a
* @param Tb translation for key b
* @param H1 optional jacobian in Ta
* @param H2 optional jacobian in Tb
* @return * Vector
*/
Vector evaluateError(const Point3& Ta, const Point3& Tb, const Vector1& scale,
OptionalMatrixType H1, OptionalMatrixType H2,
OptionalMatrixType H3) const override {
// Ideally we should use a positive real valued scalar datatype for scale.
const double abs_scale = abs(scale[0]);
const Point3 predicted = (Tb - Ta) * abs_scale;
if (H1) {
*H1 = -Matrix3::Identity() * abs_scale;
}
if (H2) {
*H2 = Matrix3::Identity() * abs_scale;
}
if (H3) {
*H3 = scale[0] >= 0 ? (Tb - Ta) : (Ta - Tb);
}
return predicted - measured_w_aZb_;
}
private:
friend class boost::serialization::access;
template <class ARCHIVE>
void serialize(ARCHIVE& ar, const unsigned int /*version*/) {
ar& boost::serialization::make_nvp(
"Base", boost::serialization::base_object<Base>(*this));
}
}; // \ BilinearAngleTranslationFactor
} // namespace gtsam

View File

@ -101,9 +101,17 @@ NonlinearFactorGraph TranslationRecovery::buildGraph(
NonlinearFactorGraph graph;
// Add translation factors for input translation directions.
uint64_t i = 0;
for (auto edge : relativeTranslations) {
graph.emplace_shared<TranslationFactor>(edge.key1(), edge.key2(),
edge.measured(), edge.noiseModel());
if (use_bilinear_translation_factor_) {
graph.emplace_shared<BilinearAngleTranslationFactor>(
edge.key1(), edge.key2(), Symbol('S', i), edge.measured(),
edge.noiseModel());
} else {
graph.emplace_shared<TranslationFactor>(
edge.key1(), edge.key2(), edge.measured(), edge.noiseModel());
}
i++;
}
return graph;
}
@ -163,6 +171,12 @@ Values TranslationRecovery::initializeRandomly(
insert(edge.key1());
insert(edge.key2());
}
if (use_bilinear_translation_factor_) {
for (uint64_t i = 0; i < relativeTranslations.size(); i++) {
initial.insert<Vector1>(Symbol('S', i), Vector1(1.0));
}
}
return initial;
}

View File

@ -60,14 +60,18 @@ class GTSAM_EXPORT TranslationRecovery {
// Parameters.
LevenbergMarquardtParams lmParams_;
const bool use_bilinear_translation_factor_ = false;
public:
/**
* @brief Construct a new Translation Recovery object
*
* @param lmParams parameters for optimization.
*/
TranslationRecovery(const LevenbergMarquardtParams &lmParams)
: lmParams_(lmParams) {}
TranslationRecovery(const LevenbergMarquardtParams &lmParams,
bool use_bilinear_translation_factor = false)
: lmParams_(lmParams),
use_bilinear_translation_factor_(use_bilinear_translation_factor) {}
/**
* @brief Default constructor.

View File

@ -23,7 +23,7 @@ virtual class SfmTrack : gtsam::SfmTrack2d {
SfmTrack();
SfmTrack(const gtsam::Point3& pt);
const Point3& point3() const;
Point3 p;
double r;
@ -37,8 +37,8 @@ virtual class SfmTrack : gtsam::SfmTrack2d {
bool equals(const gtsam::SfmTrack& expected, double tol) const;
};
#include <gtsam/nonlinear/Values.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/sfm/SfmData.h>
class SfmData {
SfmData();
@ -268,6 +268,8 @@ class MFAS {
#include <gtsam/sfm/TranslationRecovery.h>
class TranslationRecovery {
TranslationRecovery(const gtsam::LevenbergMarquardtParams& lmParams,
const bool use_bilinear_translation_factor);
TranslationRecovery(const gtsam::LevenbergMarquardtParams& lmParams);
TranslationRecovery(); // default params.
void addPrior(const gtsam::BinaryMeasurementsUnit3& relativeTranslations,

View File

@ -30,7 +30,7 @@ using namespace gtsam;
static SharedNoiseModel model(noiseModel::Isotropic::Sigma(3, 0.05));
// Keys are deliberately *not* in sorted order to test that case.
static const Key kKey1(2), kKey2(1);
static const Key kKey1(2), kKey2(1), kEdgeKey(3);
static const Unit3 kMeasured(1, 0, 0);
/* ************************************************************************* */
@ -99,6 +99,79 @@ TEST(TranslationFactor, Jacobian) {
EXPECT(assert_equal(H2Expected, H2Actual, 1e-9));
}
/* ************************************************************************* */
TEST(BilinearAngleTranslationFactor, Constructor) {
BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model);
}
/* ************************************************************************* */
TEST(BilinearAngleTranslationFactor, ZeroError) {
// Create a factor
BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model);
// Set the linearization
Point3 T1(1, 0, 0), T2(2, 0, 0);
Vector1 scale(1.0);
// Use the factor to calculate the error
Vector actualError(factor.evaluateError(T1, T2, scale));
// Verify we get the expected error
Vector expected = (Vector3() << 0, 0, 0).finished();
EXPECT(assert_equal(expected, actualError, 1e-9));
}
/* ************************************************************************* */
TEST(BilinearAngleTranslationFactor, NonZeroError) {
// create a factor
BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model);
// set the linearization
Point3 T1(0, 1, 1), T2(0, 2, 2);
Vector1 scale(1.0 / sqrt(2));
// use the factor to calculate the error
Vector actualError(factor.evaluateError(T1, T2, scale));
// verify we get the expected error
Vector expected = (Vector3() << -1, 1 / sqrt(2), 1 / sqrt(2)).finished();
EXPECT(assert_equal(expected, actualError, 1e-9));
}
/* ************************************************************************* */
Vector bilinearAngleFactorError(const Point3 &T1, const Point3 &T2, const Vector1 &scale,
const BilinearAngleTranslationFactor &factor) {
return factor.evaluateError(T1, T2, scale);
}
TEST(BilinearAngleTranslationFactor, Jacobian) {
// Create a factor
BilinearAngleTranslationFactor factor(kKey1, kKey2, kEdgeKey, kMeasured, model);
// Set the linearization
Point3 T1(1, 0, 0), T2(2, 0, 0);
Vector1 scale(1.0);
// Use the factor to calculate the Jacobians
Matrix H1Actual, H2Actual, H3Actual;
factor.evaluateError(T1, T2, scale, H1Actual, H2Actual, H3Actual);
// Use numerical derivatives to calculate the Jacobians
Matrix H1Expected, H2Expected, H3Expected;
H1Expected = numericalDerivative11<Vector, Point3>(
std::bind(&bilinearAngleFactorError, std::placeholders::_1, T2, scale, factor), T1);
H2Expected = numericalDerivative11<Vector, Point3>(
std::bind(&bilinearAngleFactorError, T1, std::placeholders::_1, scale, factor), T2);
H3Expected = numericalDerivative11<Vector, Vector1>(
std::bind(&bilinearAngleFactorError, T1, T2, std::placeholders::_1, factor), scale);
// Verify the Jacobians are correct
EXPECT(assert_equal(H1Expected, H1Actual, 1e-9));
EXPECT(assert_equal(H2Expected, H2Actual, 1e-9));
EXPECT(assert_equal(H3Expected, H3Actual, 1e-9));
}
/* ************************************************************************* */
int main() {
TestResult tr;

View File

@ -171,6 +171,7 @@ file(COPY "${GTSAM_SOURCE_DIR}/examples/Data" DESTINATION "${GTSAM_MODULE_PATH}"
# Add gtsam as a dependency to the install target
set(GTSAM_PYTHON_DEPENDENCIES ${GTSAM_PYTHON_TARGET})
set(GTSAM_PYTHON_INSTALL_EXTRA "")
if(GTSAM_UNSTABLE_BUILD_PYTHON)
set(ignore
@ -250,6 +251,22 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON)
VERBATIM
)
endif()
add_custom_target(
python-unstable-stubs
COMMAND
${CMAKE_COMMAND} -E env
"PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}"
pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" --numpy-array-use-type-var gtsam_unstable
DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_UNSTABLE_TARGET}
WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/"
)
if(NOT WIN32)
# Add the stubgen target as a dependency to the install target
list(APPEND GTSAM_PYTHON_INSTALL_EXTRA python-unstable-stubs)
endif()
# Custom make command to run all GTSAM_UNSTABLE Python tests
add_custom_target(
python-test-unstable
@ -262,26 +279,30 @@ if(GTSAM_UNSTABLE_BUILD_PYTHON)
)
endif()
add_custom_target(
python-stubs
COMMAND
${CMAKE_COMMAND} -E env
"PYTHONPATH=${GTSAM_PYTHON_BUILD_DIRECTORY}/$ENV{PYTHONPATH}"
pybind11-stubgen -o . --enum-class-locations \"KernelFunctionType|NoiseFormat:gtsam.gtsam\" --enum-class-locations \"OrderingType:gtsam.gtsam.Ordering\" --numpy-array-use-type-var gtsam
DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_TEST_FILES} ${GTSAM_PYTHON_TARGET}
WORKING_DIRECTORY "${GTSAM_PYTHON_BUILD_DIRECTORY}/"
)
if(NOT WIN32)
# Add the stubgen target as a dependency to the install target
list(APPEND GTSAM_PYTHON_INSTALL_EXTRA python-stubs)
endif()
# Add custom target so we can install with `make python-install`
# Note below we make sure to install with --user iff not in a virtualenv
set(GTSAM_PYTHON_INSTALL_TARGET python-install)
#TODO(Varun) Maybe move the long command to script?
# https://stackoverflow.com/questions/49053544/how-do-i-run-a-python-script-every-time-in-a-cmake-build
if (NOT WIN32) # WIN32=1 is target platform is Windows
add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET}
COMMAND stubgen -q -p gtsam && cp -a out/gtsam/ gtsam && ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])"
DEPENDS ${GTSAM_PYTHON_DEPENDENCIES}
WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}
VERBATIM)
else()
#TODO(Varun) Find equivalent cp on Windows
add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET}
COMMAND ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])"
DEPENDS ${GTSAM_PYTHON_DEPENDENCIES}
WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}
VERBATIM)
endif()
add_custom_target(${GTSAM_PYTHON_INSTALL_TARGET}
COMMAND ${PYTHON_EXECUTABLE} -c "import sys, subprocess; cmd = [sys.executable, '-m', 'pip', 'install']; has_venv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix); cmd.append('--user' if not has_venv else ''); cmd.append('.'); subprocess.check_call([c for c in cmd if c])"
DEPENDS ${GTSAM_PYTHON_DEPENDENCIES} ${GTSAM_PYTHON_INSTALL_EXTRA}
WORKING_DIRECTORY ${GTSAM_PYTHON_BUILD_DIRECTORY}
VERBATIM)
# Custom make command to run all GTSAM Python tests
add_custom_target(

View File

@ -1,3 +1,3 @@
-r requirements.txt
pyparsing>=2.4.2
mypy==1.4.1 #TODO(Varun) A bug in mypy>=1.5.0 causes an unresolved placeholder error when importing numpy>=2.0.0 (https://github.com/python/mypy/issues/17396)
pybind11-stubgen>=2.5.1

View File

@ -39,12 +39,12 @@ namespace py = pybind11;
{module_def} {{
m_.doc() = "pybind11 wrapper of {module_name}";
// Specializations for STL classes
#include "python/gtsam/specializations/{module_name}.h"
{submodules_init}
{wrapped_namespace}
// Specializations for STL classes
#include "python/gtsam/specializations/{module_name}.h"
}}

View File

@ -18,8 +18,9 @@ from gtsam.symbol_shorthand import A, X
from gtsam.utils.test_case import GtsamTestCase
from gtsam import (DiscreteConditional, DiscreteKeys, DiscreteValues,
GaussianConditional, GaussianMixture, HybridBayesNet,
HybridValues, VectorValues, noiseModel)
GaussianConditional, HybridBayesNet,
HybridGaussianConditional, HybridValues, VectorValues,
noiseModel)
class TestHybridBayesNet(GtsamTestCase):
@ -49,8 +50,8 @@ class TestHybridBayesNet(GtsamTestCase):
bayesNet = HybridBayesNet()
bayesNet.push_back(conditional)
bayesNet.push_back(
GaussianMixture([X(1)], [], discrete_keys,
[conditional0, conditional1]))
HybridGaussianConditional([X(1)], [], discrete_keys,
[conditional0, conditional1]))
bayesNet.push_back(DiscreteConditional(Asia, "99/1"))
# Create values at which to evaluate.

View File

@ -18,15 +18,16 @@ from gtsam.utils.test_case import GtsamTestCase
import gtsam
from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional,
GaussianMixture, GaussianMixtureFactor, HybridBayesNet,
HybridGaussianFactorGraph, HybridValues, JacobianFactor,
Ordering, noiseModel)
HybridBayesNet, HybridGaussianConditional,
HybridGaussianFactor, HybridGaussianFactorGraph,
HybridValues, JacobianFactor, Ordering, noiseModel)
DEBUG_MARGINALS = False
class TestHybridGaussianFactorGraph(GtsamTestCase):
"""Unit tests for HybridGaussianFactorGraph."""
def test_create(self):
"""Test construction of hybrid factor graph."""
model = noiseModel.Unit.Create(3)
@ -36,7 +37,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2])
gmf = HybridGaussianFactor([X(0)], dk, [(jf1, 0), (jf2, 0)])
hfg = HybridGaussianFactorGraph()
hfg.push_back(jf1)
@ -48,7 +49,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
self.assertEqual(hbn.size(), 2)
mixture = hbn.at(0).inner()
self.assertIsInstance(mixture, GaussianMixture)
self.assertIsInstance(mixture, HybridGaussianConditional)
self.assertEqual(len(mixture.keys()), 2)
discrete_conditional = hbn.at(hbn.size() - 1).inner()
@ -63,7 +64,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2])
gmf = HybridGaussianFactor([X(0)], dk, [(jf1, 0), (jf2, 0)])
hfg = HybridGaussianFactorGraph()
hfg.push_back(jf1)
@ -106,8 +107,9 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
I_1x1,
X(0), [0],
sigma=3)
bayesNet.push_back(GaussianMixture([Z(i)], [X(0)], keys,
[conditional0, conditional1]))
bayesNet.push_back(
HybridGaussianConditional([Z(i)], [X(0)], keys,
[conditional0, conditional1]))
# Create prior on X(0).
prior_on_x0 = GaussianConditional.FromMeanAndStddev(
@ -219,9 +221,9 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
# Check ratio between unnormalized posterior and factor graph is the same for all modes:
for mode in [1, 0]:
values.insert_or_assign(M(0), mode)
self.assertAlmostEqual(bayesNet.evaluate(values) /
np.exp(-fg.error(values)),
0.6366197723675815)
self.assertAlmostEqual(
bayesNet.evaluate(values) / np.exp(-fg.error(values)),
0.6366197723675815)
self.assertAlmostEqual(bayesNet.error(values), fg.error(values))
# Test elimination.

View File

@ -17,29 +17,30 @@ import unittest
import numpy as np
from gtsam.symbol_shorthand import C, X
from gtsam.utils.test_case import GtsamTestCase
from gtsam import BetweenFactorPoint3, noiseModel, PriorFactorPoint3, Point3
import gtsam
from gtsam import BetweenFactorPoint3, Point3, PriorFactorPoint3, noiseModel
class TestHybridGaussianFactorGraph(GtsamTestCase):
"""Unit tests for HybridGaussianFactorGraph."""
def test_nonlinear_hybrid(self):
nlfg = gtsam.HybridNonlinearFactorGraph()
dk = gtsam.DiscreteKeys()
dk.push_back((10, 2))
nlfg.push_back(BetweenFactorPoint3(1, 2, Point3(
1, 2, 3), noiseModel.Diagonal.Variances([1, 1, 1])))
nlfg.push_back(
BetweenFactorPoint3(1, 2, Point3(1, 2, 3),
noiseModel.Diagonal.Variances([1, 1, 1])))
nlfg.push_back(
PriorFactorPoint3(2, Point3(1, 2, 3),
noiseModel.Diagonal.Variances([0.5, 0.5, 0.5])))
nlfg.push_back(
gtsam.MixtureFactor([1], dk, [
PriorFactorPoint3(1, Point3(0, 0, 0),
noiseModel.Unit.Create(3)),
PriorFactorPoint3(1, Point3(1, 2, 1),
noiseModel.Unit.Create(3))
]))
factors = [(PriorFactorPoint3(1, Point3(0, 0, 0),
noiseModel.Unit.Create(3)), 0.0),
(PriorFactorPoint3(1, Point3(1, 2, 1),
noiseModel.Unit.Create(3)), 0.0)]
nlfg.push_back(gtsam.HybridNonlinearFactor([1], dk, factors))
nlfg.push_back(gtsam.DecisionTreeFactor((10, 2), "1 3"))
values = gtsam.Values()
values.insert_point3(1, Point3(0, 0, 0))

View File

@ -14,11 +14,12 @@ from __future__ import print_function
import unittest
import gtsam
import numpy as np
from gtsam.symbol_shorthand import C, X
from gtsam.utils.test_case import GtsamTestCase
import gtsam
class TestHybridValues(GtsamTestCase):
"""Unit tests for HybridValues."""

View File

@ -353,6 +353,340 @@ TEST(TranslationRecovery, NodeWithBetweenFactorAndNoMeasurements) {
EXPECT(assert_equal(Point3(1, 2, 1), result.at<Point3>(4), 1e-4));
}
/* *************************************************************************
* Repeat all tests, but with the Bilinear Angle Translation Factor.
* ************************************************************************* */
/* ************************************************************************* */
// We read the BAL file, which has 3 cameras in it, with poses. We then assume
// the rotations are correct, but translations have to be estimated from
// translation directions only. Since we have 3 cameras, A, B, and C, we can at
// most create three relative measurements, let's call them w_aZb, w_aZc, and
// bZc. These will be of type Unit3. We then call `recoverTranslations` which
// sets up an optimization problem for the three unknown translations.
TEST(TranslationRecovery, BALBATA) {
const string filename = findExampleDataFile("dubrovnik-3-7-pre");
SfmData db = SfmData::FromBalFile(filename);
// Get camera poses, as Values
size_t j = 0;
Values poses;
for (auto camera : db.cameras) {
poses.insert(j++, camera.pose());
}
// Simulate measurements
const auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
poses, {{0, 1}, {0, 2}, {1, 2}});
// Check simulated measurements.
for (auto& unitTranslation : relativeTranslations) {
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
unitTranslation.measured()));
}
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
const auto graph = algorithm.buildGraph(relativeTranslations);
EXPECT_LONGS_EQUAL(3, graph.size());
// Run translation recovery
const double scale = 2.0;
const auto result = algorithm.run(relativeTranslations, scale);
// Check result for first two translations, determined by prior
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0)));
EXPECT(assert_equal(
Point3(2 * GetDirectionFromPoses(poses, relativeTranslations[0])),
result.at<Point3>(1)));
// Check that the third translations is correct
Point3 Ta = poses.at<Pose3>(0).translation();
Point3 Tb = poses.at<Pose3>(1).translation();
Point3 Tc = poses.at<Pose3>(2).translation();
Point3 expected = (Tc - Ta) * (scale / (Tb - Ta).norm());
EXPECT(assert_equal(expected, result.at<Point3>(2), 1e-4));
// TODO(frank): how to get stats back?
// EXPECT_DOUBLES_EQUAL(0.0199833, actualError, 1e-5);
}
TEST(TranslationRecovery, TwoPoseTestBATA) {
// Create a dataset with 2 poses.
// __ __
// \/ \/
// 0 _____ 1
//
// 0 and 1 face in the same direction but have a translation offset.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
auto relativeTranslations =
TranslationRecovery::SimulateMeasurements(poses, {{0, 1}});
// Check simulated measurements.
for (auto& unitTranslation : relativeTranslations) {
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
unitTranslation.measured()));
}
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
const auto graph = algorithm.buildGraph(relativeTranslations);
EXPECT_LONGS_EQUAL(1, graph.size());
// Run translation recovery
const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0);
// Check result for first two translations, determined by prior
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
EXPECT(assert_equal(Point3(3, 0, 0), result.at<Point3>(1), 1e-8));
}
TEST(TranslationRecovery, ThreePoseTestBATA) {
// Create a dataset with 3 poses.
// __ __
// \/ \/
// 0 _____ 1
// \ __ /
// \\//
// 3
//
// 0 and 1 face in the same direction but have a translation offset. 3 is in
// the same direction as 0 and 1, in between 0 and 1, with some Y axis offset.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
poses, {{0, 1}, {1, 3}, {3, 0}});
// Check simulated measurements.
for (auto& unitTranslation : relativeTranslations) {
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
unitTranslation.measured()));
}
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
const auto graph = algorithm.buildGraph(relativeTranslations);
EXPECT_LONGS_EQUAL(3, graph.size());
const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0);
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
EXPECT(assert_equal(Point3(3, 0, 0), result.at<Point3>(1), 1e-8));
EXPECT(assert_equal(Point3(1.5, -1.5, 0), result.at<Point3>(3), 1e-8));
}
TEST(TranslationRecovery, ThreePosesIncludingZeroTranslationBATA) {
// Create a dataset with 3 poses.
// __ __
// \/ \/
// 0 _____ 1
// 2 <|
//
// 0 and 1 face in the same direction but have a translation offset. 2 is at
// the same point as 1 but is rotated, with little FOV overlap.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
poses.insert<Pose3>(2, Pose3(Rot3::RzRyRx(-M_PI / 2, 0, 0), Point3(2, 0, 0)));
auto relativeTranslations =
TranslationRecovery::SimulateMeasurements(poses, {{0, 1}, {1, 2}});
// Check simulated measurements.
for (auto& unitTranslation : relativeTranslations) {
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
unitTranslation.measured()));
}
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
// Run translation recovery
const auto result = algorithm.run(relativeTranslations, /*scale=*/3.0);
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
EXPECT(assert_equal(Point3(3, 0, 0), result.at<Point3>(1), 1e-8));
EXPECT(assert_equal(Point3(3, 0, 0), result.at<Point3>(2), 1e-8));
}
TEST(TranslationRecovery, FourPosesIncludingZeroTranslationBATA) {
// Create a dataset with 4 poses.
// __ __
// \/ \/
// 0 _____ 1
// \ __ 2 <|
// \\//
// 3
//
// 0 and 1 face in the same direction but have a translation offset. 2 is at
// the same point as 1 but is rotated, with very little FOV overlap. 3 is in
// the same direction as 0 and 1, in between 0 and 1, with some Y axis offset.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
poses.insert<Pose3>(2, Pose3(Rot3::RzRyRx(-M_PI / 2, 0, 0), Point3(2, 0, 0)));
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
poses, {{0, 1}, {1, 2}, {1, 3}, {3, 0}});
// Check simulated measurements.
for (auto& unitTranslation : relativeTranslations) {
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
unitTranslation.measured()));
}
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
// Run translation recovery
const auto result = algorithm.run(relativeTranslations, /*scale=*/4.0);
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
EXPECT(assert_equal(Point3(4, 0, 0), result.at<Point3>(1), 1e-8));
EXPECT(assert_equal(Point3(4, 0, 0), result.at<Point3>(2), 1e-8));
EXPECT(assert_equal(Point3(2, -2, 0), result.at<Point3>(3), 1e-8));
}
TEST(TranslationRecovery, ThreePosesWithZeroTranslationBATA) {
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3::RzRyRx(-M_PI / 6, 0, 0), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(2, Pose3(Rot3::RzRyRx(M_PI / 6, 0, 0), Point3(0, 0, 0)));
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
poses, {{0, 1}, {1, 2}, {2, 0}});
// Check simulated measurements.
for (auto& unitTranslation : relativeTranslations) {
EXPECT(assert_equal(GetDirectionFromPoses(poses, unitTranslation),
unitTranslation.measured()));
}
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
// Run translation recovery
const auto result = algorithm.run(relativeTranslations, /*scale=*/4.0);
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-8));
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(1), 1e-8));
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(2), 1e-8));
}
TEST(TranslationRecovery, ThreePosesWithOneSoftConstraintBATA) {
// Create a dataset with 3 poses.
// __ __
// \/ \/
// 0 _____ 1
// \ __ /
// \\//
// 3
//
// 0 and 1 face in the same direction but have a translation offset. 3 is in
// the same direction as 0 and 1, in between 0 and 1, with some Y axis offset.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
poses, {{0, 1}, {0, 3}, {1, 3}});
std::vector<BinaryMeasurement<Point3>> betweenTranslations;
betweenTranslations.emplace_back(0, 3, Point3(1, -1, 0),
noiseModel::Isotropic::Sigma(3, 1e-2));
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
auto result =
algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations);
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
EXPECT(assert_equal(Point3(2, 0, 0), result.at<Point3>(1), 1e-4));
EXPECT(assert_equal(Point3(1, -1, 0), result.at<Point3>(3), 1e-4));
}
TEST(TranslationRecovery, ThreePosesWithOneHardConstraintBATA) {
// Create a dataset with 3 poses.
// __ __
// \/ \/
// 0 _____ 1
// \ __ /
// \\//
// 3
//
// 0 and 1 face in the same direction but have a translation offset. 3 is in
// the same direction as 0 and 1, in between 0 and 1, with some Y axis offset.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
poses, {{0, 1}, {0, 3}, {1, 3}});
std::vector<BinaryMeasurement<Point3>> betweenTranslations;
betweenTranslations.emplace_back(0, 1, Point3(2, 0, 0),
noiseModel::Constrained::All(3, 1e2));
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
auto result =
algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations);
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
EXPECT(assert_equal(Point3(2, 0, 0), result.at<Point3>(1), 1e-4));
EXPECT(assert_equal(Point3(1, -1, 0), result.at<Point3>(3), 1e-4));
}
TEST(TranslationRecovery, NodeWithBetweenFactorAndNoMeasurementsBATA) {
// Checks that valid results are obtained when a between translation edge is
// provided with a node that does not have any other relative translations.
Values poses;
poses.insert<Pose3>(0, Pose3(Rot3(), Point3(0, 0, 0)));
poses.insert<Pose3>(1, Pose3(Rot3(), Point3(2, 0, 0)));
poses.insert<Pose3>(3, Pose3(Rot3(), Point3(1, -1, 0)));
poses.insert<Pose3>(4, Pose3(Rot3(), Point3(1, 2, 1)));
auto relativeTranslations = TranslationRecovery::SimulateMeasurements(
poses, {{0, 1}, {0, 3}, {1, 3}});
std::vector<BinaryMeasurement<Point3>> betweenTranslations;
betweenTranslations.emplace_back(0, 1, Point3(2, 0, 0),
noiseModel::Constrained::All(3, 1e2));
// Node 4 only has this between translation prior, no relative translations.
betweenTranslations.emplace_back(0, 4, Point3(1, 2, 1));
LevenbergMarquardtParams params;
TranslationRecovery algorithm(params, true);
auto result =
algorithm.run(relativeTranslations, /*scale=*/0.0, betweenTranslations);
// Check result
EXPECT(assert_equal(Point3(0, 0, 0), result.at<Point3>(0), 1e-4));
EXPECT(assert_equal(Point3(2, 0, 0), result.at<Point3>(1), 1e-4));
EXPECT(assert_equal(Point3(1, -1, 0), result.at<Point3>(3), 1e-4));
EXPECT(assert_equal(Point3(1, 2, 1), result.at<Point3>(4), 1e-4));
}
/* ************************************************************************* */
int main() {
TestResult tr;