Merge pull request #1830 from borglab/hybrid-renaming
commit
525ff7cc11
|
|
@ -191,17 +191,17 @@ E_{gc}(x,y)=\frac{1}{2}\|Rx+Sy-d\|_{\Sigma}^{2}.\label{eq:gc_error}
|
||||||
\end_layout
|
\end_layout
|
||||||
|
|
||||||
\begin_layout Subsubsection*
|
\begin_layout Subsubsection*
|
||||||
GaussianMixture
|
HybridGaussianConditional
|
||||||
\end_layout
|
\end_layout
|
||||||
|
|
||||||
\begin_layout Standard
|
\begin_layout Standard
|
||||||
A
|
A
|
||||||
\emph on
|
\emph on
|
||||||
GaussianMixture
|
HybridGaussianConditional
|
||||||
\emph default
|
\emph default
|
||||||
(maybe to be renamed to
|
(maybe to be renamed to
|
||||||
\emph on
|
\emph on
|
||||||
GaussianMixtureComponent
|
HybridGaussianConditionalComponent
|
||||||
\emph default
|
\emph default
|
||||||
) just indexes into a number of
|
) just indexes into a number of
|
||||||
\emph on
|
\emph on
|
||||||
|
|
@ -233,7 +233,7 @@ GaussianConditional
|
||||||
to a set of discrete variables.
|
to a set of discrete variables.
|
||||||
As
|
As
|
||||||
\emph on
|
\emph on
|
||||||
GaussianMixture
|
HybridGaussianConditional
|
||||||
\emph default
|
\emph default
|
||||||
is a
|
is a
|
||||||
\emph on
|
\emph on
|
||||||
|
|
@ -324,7 +324,7 @@ The key point here is that
|
||||||
\color inherit
|
\color inherit
|
||||||
is the log-normalization constant for the complete
|
is the log-normalization constant for the complete
|
||||||
\emph on
|
\emph on
|
||||||
GaussianMixture
|
HybridGaussianConditional
|
||||||
\emph default
|
\emph default
|
||||||
across all values of
|
across all values of
|
||||||
\begin_inset Formula $m$
|
\begin_inset Formula $m$
|
||||||
|
|
@ -548,15 +548,15 @@ with
|
||||||
\end_layout
|
\end_layout
|
||||||
|
|
||||||
\begin_layout Subsubsection*
|
\begin_layout Subsubsection*
|
||||||
GaussianMixtureFactor
|
HybridGaussianFactor
|
||||||
\end_layout
|
\end_layout
|
||||||
|
|
||||||
\begin_layout Standard
|
\begin_layout Standard
|
||||||
Analogously, a
|
Analogously, a
|
||||||
\emph on
|
\emph on
|
||||||
GaussianMixtureFactor
|
HybridGaussianFactor
|
||||||
\emph default
|
\emph default
|
||||||
typically results from a GaussianMixture by having known values
|
typically results from a HybridGaussianConditional by having known values
|
||||||
\begin_inset Formula $\bar{x}$
|
\begin_inset Formula $\bar{x}$
|
||||||
\end_inset
|
\end_inset
|
||||||
|
|
||||||
|
|
@ -817,7 +817,7 @@ E_{mf}(y,m)=\frac{1}{2}\|A_{m}y-b_{m}\|_{\Sigma_{mfm}}^{2}=E_{gcm}(\bar{x},y)+K_
|
||||||
|
|
||||||
\end_inset
|
\end_inset
|
||||||
|
|
||||||
which is identical to the GaussianMixture error
|
which is identical to the HybridGaussianConditional error
|
||||||
\begin_inset CommandInset ref
|
\begin_inset CommandInset ref
|
||||||
LatexCommand eqref
|
LatexCommand eqref
|
||||||
reference "eq:gm_error"
|
reference "eq:gm_error"
|
||||||
|
|
|
||||||
|
|
@ -168,11 +168,11 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) {
|
||||||
DecisionTreeFactor prunedDiscreteProbs =
|
DecisionTreeFactor prunedDiscreteProbs =
|
||||||
this->pruneDiscreteConditionals(maxNrLeaves);
|
this->pruneDiscreteConditionals(maxNrLeaves);
|
||||||
|
|
||||||
/* To prune, we visitWith every leaf in the GaussianMixture.
|
/* To prune, we visitWith every leaf in the HybridGaussianConditional.
|
||||||
* For each leaf, using the assignment we can check the discrete decision tree
|
* For each leaf, using the assignment we can check the discrete decision tree
|
||||||
* for 0.0 probability, then just set the leaf to a nullptr.
|
* for 0.0 probability, then just set the leaf to a nullptr.
|
||||||
*
|
*
|
||||||
* We can later check the GaussianMixture for just nullptrs.
|
* We can later check the HybridGaussianConditional for just nullptrs.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
HybridBayesNet prunedBayesNetFragment;
|
HybridBayesNet prunedBayesNetFragment;
|
||||||
|
|
@ -182,14 +182,16 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) {
|
||||||
for (auto &&conditional : *this) {
|
for (auto &&conditional : *this) {
|
||||||
if (auto gm = conditional->asMixture()) {
|
if (auto gm = conditional->asMixture()) {
|
||||||
// Make a copy of the Gaussian mixture and prune it!
|
// Make a copy of the Gaussian mixture and prune it!
|
||||||
auto prunedGaussianMixture = std::make_shared<GaussianMixture>(*gm);
|
auto prunedHybridGaussianConditional =
|
||||||
prunedGaussianMixture->prune(prunedDiscreteProbs); // imperative :-(
|
std::make_shared<HybridGaussianConditional>(*gm);
|
||||||
|
prunedHybridGaussianConditional->prune(
|
||||||
|
prunedDiscreteProbs); // imperative :-(
|
||||||
|
|
||||||
// Type-erase and add to the pruned Bayes Net fragment.
|
// Type-erase and add to the pruned Bayes Net fragment.
|
||||||
prunedBayesNetFragment.push_back(prunedGaussianMixture);
|
prunedBayesNetFragment.push_back(prunedHybridGaussianConditional);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
// Add the non-GaussianMixture conditional
|
// Add the non-HybridGaussianConditional conditional
|
||||||
prunedBayesNetFragment.push_back(conditional);
|
prunedBayesNetFragment.push_back(conditional);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -79,7 +79,7 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
|
||||||
*
|
*
|
||||||
* Example:
|
* Example:
|
||||||
* auto shared_ptr_to_a_conditional =
|
* auto shared_ptr_to_a_conditional =
|
||||||
* std::make_shared<GaussianMixture>(...);
|
* std::make_shared<HybridGaussianConditional>(...);
|
||||||
* hbn.push_back(shared_ptr_to_a_conditional);
|
* hbn.push_back(shared_ptr_to_a_conditional);
|
||||||
*/
|
*/
|
||||||
void push_back(HybridConditional &&conditional) {
|
void push_back(HybridConditional &&conditional) {
|
||||||
|
|
@ -106,7 +106,7 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
|
||||||
* Preferred: Emplace a conditional directly using arguments.
|
* Preferred: Emplace a conditional directly using arguments.
|
||||||
*
|
*
|
||||||
* Examples:
|
* Examples:
|
||||||
* hbn.emplace_shared<GaussianMixture>(...)));
|
* hbn.emplace_shared<HybridGaussianConditional>(...)));
|
||||||
* hbn.emplace_shared<GaussianConditional>(...)));
|
* hbn.emplace_shared<GaussianConditional>(...)));
|
||||||
* hbn.emplace_shared<DiscreteConditional>(...)));
|
* hbn.emplace_shared<DiscreteConditional>(...)));
|
||||||
*/
|
*/
|
||||||
|
|
|
||||||
|
|
@ -55,7 +55,7 @@ HybridConditional::HybridConditional(
|
||||||
|
|
||||||
/* ************************************************************************ */
|
/* ************************************************************************ */
|
||||||
HybridConditional::HybridConditional(
|
HybridConditional::HybridConditional(
|
||||||
const std::shared_ptr<GaussianMixture> &gaussianMixture)
|
const std::shared_ptr<HybridGaussianConditional> &gaussianMixture)
|
||||||
: BaseFactor(KeyVector(gaussianMixture->keys().begin(),
|
: BaseFactor(KeyVector(gaussianMixture->keys().begin(),
|
||||||
gaussianMixture->keys().begin() +
|
gaussianMixture->keys().begin() +
|
||||||
gaussianMixture->nrContinuous()),
|
gaussianMixture->nrContinuous()),
|
||||||
|
|
@ -157,10 +157,10 @@ double HybridConditional::logNormalizationConstant() const {
|
||||||
return gc->logNormalizationConstant();
|
return gc->logNormalizationConstant();
|
||||||
}
|
}
|
||||||
if (auto gm = asMixture()) {
|
if (auto gm = asMixture()) {
|
||||||
return gm->logNormalizationConstant(); // 0.0!
|
return gm->logNormalizationConstant(); // 0.0!
|
||||||
}
|
}
|
||||||
if (auto dc = asDiscrete()) {
|
if (auto dc = asDiscrete()) {
|
||||||
return dc->logNormalizationConstant(); // 0.0!
|
return dc->logNormalizationConstant(); // 0.0!
|
||||||
}
|
}
|
||||||
throw std::runtime_error(
|
throw std::runtime_error(
|
||||||
"HybridConditional::logProbability: conditional type not handled");
|
"HybridConditional::logProbability: conditional type not handled");
|
||||||
|
|
|
||||||
|
|
@ -18,8 +18,8 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <gtsam/discrete/DiscreteConditional.h>
|
#include <gtsam/discrete/DiscreteConditional.h>
|
||||||
#include <gtsam/hybrid/GaussianMixture.h>
|
|
||||||
#include <gtsam/hybrid/HybridFactor.h>
|
#include <gtsam/hybrid/HybridFactor.h>
|
||||||
|
#include <gtsam/hybrid/HybridGaussianConditional.h>
|
||||||
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
||||||
#include <gtsam/inference/Conditional.h>
|
#include <gtsam/inference/Conditional.h>
|
||||||
#include <gtsam/inference/Key.h>
|
#include <gtsam/inference/Key.h>
|
||||||
|
|
@ -39,7 +39,7 @@ namespace gtsam {
|
||||||
* As a type-erased variant of:
|
* As a type-erased variant of:
|
||||||
* - DiscreteConditional
|
* - DiscreteConditional
|
||||||
* - GaussianConditional
|
* - GaussianConditional
|
||||||
* - GaussianMixture
|
* - HybridGaussianConditional
|
||||||
*
|
*
|
||||||
* The reason why this is important is that `Conditional<T>` is a CRTP class.
|
* The reason why this is important is that `Conditional<T>` is a CRTP class.
|
||||||
* CRTP is static polymorphism such that all CRTP classes, while bearing the
|
* CRTP is static polymorphism such that all CRTP classes, while bearing the
|
||||||
|
|
@ -127,7 +127,8 @@ class GTSAM_EXPORT HybridConditional
|
||||||
* @param gaussianMixture Gaussian Mixture Conditional used to create the
|
* @param gaussianMixture Gaussian Mixture Conditional used to create the
|
||||||
* HybridConditional.
|
* HybridConditional.
|
||||||
*/
|
*/
|
||||||
HybridConditional(const std::shared_ptr<GaussianMixture>& gaussianMixture);
|
HybridConditional(
|
||||||
|
const std::shared_ptr<HybridGaussianConditional>& gaussianMixture);
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
/// @name Testable
|
/// @name Testable
|
||||||
|
|
@ -146,12 +147,12 @@ class GTSAM_EXPORT HybridConditional
|
||||||
/// @{
|
/// @{
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Return HybridConditional as a GaussianMixture
|
* @brief Return HybridConditional as a HybridGaussianConditional
|
||||||
* @return nullptr if not a mixture
|
* @return nullptr if not a mixture
|
||||||
* @return GaussianMixture::shared_ptr otherwise
|
* @return HybridGaussianConditional::shared_ptr otherwise
|
||||||
*/
|
*/
|
||||||
GaussianMixture::shared_ptr asMixture() const {
|
HybridGaussianConditional::shared_ptr asMixture() const {
|
||||||
return std::dynamic_pointer_cast<GaussianMixture>(inner_);
|
return std::dynamic_pointer_cast<HybridGaussianConditional>(inner_);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -222,8 +223,10 @@ class GTSAM_EXPORT HybridConditional
|
||||||
boost::serialization::void_cast_register<GaussianConditional, Factor>(
|
boost::serialization::void_cast_register<GaussianConditional, Factor>(
|
||||||
static_cast<GaussianConditional*>(NULL), static_cast<Factor*>(NULL));
|
static_cast<GaussianConditional*>(NULL), static_cast<Factor*>(NULL));
|
||||||
} else {
|
} else {
|
||||||
boost::serialization::void_cast_register<GaussianMixture, Factor>(
|
boost::serialization::void_cast_register<HybridGaussianConditional,
|
||||||
static_cast<GaussianMixture*>(NULL), static_cast<Factor*>(NULL));
|
Factor>(
|
||||||
|
static_cast<HybridGaussianConditional*>(NULL),
|
||||||
|
static_cast<Factor*>(NULL));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
||||||
|
|
@ -35,8 +35,8 @@ class GTSAM_EXPORT HybridEliminationTree
|
||||||
|
|
||||||
public:
|
public:
|
||||||
typedef EliminationTree<HybridBayesNet, HybridGaussianFactorGraph>
|
typedef EliminationTree<HybridBayesNet, HybridGaussianFactorGraph>
|
||||||
Base; ///< Base class
|
Base; ///< Base class
|
||||||
typedef HybridEliminationTree This; ///< This class
|
typedef HybridEliminationTree This; ///< This class
|
||||||
typedef std::shared_ptr<This> shared_ptr; ///< Shared pointer to this class
|
typedef std::shared_ptr<This> shared_ptr; ///< Shared pointer to this class
|
||||||
|
|
||||||
/// @name Constructors
|
/// @name Constructors
|
||||||
|
|
|
||||||
|
|
@ -45,9 +45,9 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1,
|
||||||
* Base class for *truly* hybrid probabilistic factors
|
* Base class for *truly* hybrid probabilistic factors
|
||||||
*
|
*
|
||||||
* Examples:
|
* Examples:
|
||||||
* - MixtureFactor
|
* - HybridNonlinearFactor
|
||||||
* - GaussianMixtureFactor
|
* - HybridGaussianFactor
|
||||||
* - GaussianMixture
|
* - HybridGaussianConditional
|
||||||
*
|
*
|
||||||
* @ingroup hybrid
|
* @ingroup hybrid
|
||||||
*/
|
*/
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@
|
||||||
* -------------------------------------------------------------------------- */
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @file GaussianMixture.cpp
|
* @file HybridGaussianConditional.cpp
|
||||||
* @brief A hybrid conditional in the Conditional Linear Gaussian scheme
|
* @brief A hybrid conditional in the Conditional Linear Gaussian scheme
|
||||||
* @author Fan Jiang
|
* @author Fan Jiang
|
||||||
* @author Varun Agrawal
|
* @author Varun Agrawal
|
||||||
|
|
@ -20,8 +20,8 @@
|
||||||
|
|
||||||
#include <gtsam/base/utilities.h>
|
#include <gtsam/base/utilities.h>
|
||||||
#include <gtsam/discrete/DiscreteValues.h>
|
#include <gtsam/discrete/DiscreteValues.h>
|
||||||
#include <gtsam/hybrid/GaussianMixture.h>
|
#include <gtsam/hybrid/HybridGaussianConditional.h>
|
||||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
#include <gtsam/hybrid/HybridGaussianFactor.h>
|
||||||
#include <gtsam/hybrid/HybridValues.h>
|
#include <gtsam/hybrid/HybridValues.h>
|
||||||
#include <gtsam/inference/Conditional-inst.h>
|
#include <gtsam/inference/Conditional-inst.h>
|
||||||
#include <gtsam/linear/GaussianBayesNet.h>
|
#include <gtsam/linear/GaussianBayesNet.h>
|
||||||
|
|
@ -29,10 +29,10 @@
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
GaussianMixture::GaussianMixture(
|
HybridGaussianConditional::HybridGaussianConditional(
|
||||||
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
|
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
|
||||||
const DiscreteKeys &discreteParents,
|
const DiscreteKeys &discreteParents,
|
||||||
const GaussianMixture::Conditionals &conditionals)
|
const HybridGaussianConditional::Conditionals &conditionals)
|
||||||
: BaseFactor(CollectKeys(continuousFrontals, continuousParents),
|
: BaseFactor(CollectKeys(continuousFrontals, continuousParents),
|
||||||
discreteParents),
|
discreteParents),
|
||||||
BaseConditional(continuousFrontals.size()),
|
BaseConditional(continuousFrontals.size()),
|
||||||
|
|
@ -50,30 +50,33 @@ GaussianMixture::GaussianMixture(
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
const GaussianMixture::Conditionals &GaussianMixture::conditionals() const {
|
const HybridGaussianConditional::Conditionals &
|
||||||
|
HybridGaussianConditional::conditionals() const {
|
||||||
return conditionals_;
|
return conditionals_;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
GaussianMixture::GaussianMixture(
|
HybridGaussianConditional::HybridGaussianConditional(
|
||||||
KeyVector &&continuousFrontals, KeyVector &&continuousParents,
|
KeyVector &&continuousFrontals, KeyVector &&continuousParents,
|
||||||
DiscreteKeys &&discreteParents,
|
DiscreteKeys &&discreteParents,
|
||||||
std::vector<GaussianConditional::shared_ptr> &&conditionals)
|
std::vector<GaussianConditional::shared_ptr> &&conditionals)
|
||||||
: GaussianMixture(continuousFrontals, continuousParents, discreteParents,
|
: HybridGaussianConditional(continuousFrontals, continuousParents,
|
||||||
Conditionals(discreteParents, conditionals)) {}
|
discreteParents,
|
||||||
|
Conditionals(discreteParents, conditionals)) {}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
GaussianMixture::GaussianMixture(
|
HybridGaussianConditional::HybridGaussianConditional(
|
||||||
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
|
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
|
||||||
const DiscreteKeys &discreteParents,
|
const DiscreteKeys &discreteParents,
|
||||||
const std::vector<GaussianConditional::shared_ptr> &conditionals)
|
const std::vector<GaussianConditional::shared_ptr> &conditionals)
|
||||||
: GaussianMixture(continuousFrontals, continuousParents, discreteParents,
|
: HybridGaussianConditional(continuousFrontals, continuousParents,
|
||||||
Conditionals(discreteParents, conditionals)) {}
|
discreteParents,
|
||||||
|
Conditionals(discreteParents, conditionals)) {}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
// TODO(dellaert): This is copy/paste: GaussianMixture should be derived from
|
// TODO(dellaert): This is copy/paste: HybridGaussianConditional should be
|
||||||
// GaussianMixtureFactor, no?
|
// derived from HybridGaussianFactor, no?
|
||||||
GaussianFactorGraphTree GaussianMixture::add(
|
GaussianFactorGraphTree HybridGaussianConditional::add(
|
||||||
const GaussianFactorGraphTree &sum) const {
|
const GaussianFactorGraphTree &sum) const {
|
||||||
using Y = GaussianFactorGraph;
|
using Y = GaussianFactorGraph;
|
||||||
auto add = [](const Y &graph1, const Y &graph2) {
|
auto add = [](const Y &graph1, const Y &graph2) {
|
||||||
|
|
@ -86,7 +89,8 @@ GaussianFactorGraphTree GaussianMixture::add(
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const {
|
GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree()
|
||||||
|
const {
|
||||||
auto wrap = [this](const GaussianConditional::shared_ptr &gc) {
|
auto wrap = [this](const GaussianConditional::shared_ptr &gc) {
|
||||||
// First check if conditional has not been pruned
|
// First check if conditional has not been pruned
|
||||||
if (gc) {
|
if (gc) {
|
||||||
|
|
@ -109,7 +113,7 @@ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
size_t GaussianMixture::nrComponents() const {
|
size_t HybridGaussianConditional::nrComponents() const {
|
||||||
size_t total = 0;
|
size_t total = 0;
|
||||||
conditionals_.visit([&total](const GaussianFactor::shared_ptr &node) {
|
conditionals_.visit([&total](const GaussianFactor::shared_ptr &node) {
|
||||||
if (node) total += 1;
|
if (node) total += 1;
|
||||||
|
|
@ -118,7 +122,7 @@ size_t GaussianMixture::nrComponents() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
GaussianConditional::shared_ptr GaussianMixture::operator()(
|
GaussianConditional::shared_ptr HybridGaussianConditional::operator()(
|
||||||
const DiscreteValues &discreteValues) const {
|
const DiscreteValues &discreteValues) const {
|
||||||
auto &ptr = conditionals_(discreteValues);
|
auto &ptr = conditionals_(discreteValues);
|
||||||
if (!ptr) return nullptr;
|
if (!ptr) return nullptr;
|
||||||
|
|
@ -127,11 +131,12 @@ GaussianConditional::shared_ptr GaussianMixture::operator()(
|
||||||
return conditional;
|
return conditional;
|
||||||
else
|
else
|
||||||
throw std::logic_error(
|
throw std::logic_error(
|
||||||
"A GaussianMixture unexpectedly contained a non-conditional");
|
"A HybridGaussianConditional unexpectedly contained a non-conditional");
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
bool GaussianMixture::equals(const HybridFactor &lf, double tol) const {
|
bool HybridGaussianConditional::equals(const HybridFactor &lf,
|
||||||
|
double tol) const {
|
||||||
const This *e = dynamic_cast<const This *>(&lf);
|
const This *e = dynamic_cast<const This *>(&lf);
|
||||||
if (e == nullptr) return false;
|
if (e == nullptr) return false;
|
||||||
|
|
||||||
|
|
@ -149,8 +154,8 @@ bool GaussianMixture::equals(const HybridFactor &lf, double tol) const {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
void GaussianMixture::print(const std::string &s,
|
void HybridGaussianConditional::print(const std::string &s,
|
||||||
const KeyFormatter &formatter) const {
|
const KeyFormatter &formatter) const {
|
||||||
std::cout << (s.empty() ? "" : s + "\n");
|
std::cout << (s.empty() ? "" : s + "\n");
|
||||||
if (isContinuous()) std::cout << "Continuous ";
|
if (isContinuous()) std::cout << "Continuous ";
|
||||||
if (isDiscrete()) std::cout << "Discrete ";
|
if (isDiscrete()) std::cout << "Discrete ";
|
||||||
|
|
@ -177,7 +182,7 @@ void GaussianMixture::print(const std::string &s,
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
KeyVector GaussianMixture::continuousParents() const {
|
KeyVector HybridGaussianConditional::continuousParents() const {
|
||||||
// Get all parent keys:
|
// Get all parent keys:
|
||||||
const auto range = parents();
|
const auto range = parents();
|
||||||
KeyVector continuousParentKeys(range.begin(), range.end());
|
KeyVector continuousParentKeys(range.begin(), range.end());
|
||||||
|
|
@ -193,7 +198,8 @@ KeyVector GaussianMixture::continuousParents() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const {
|
bool HybridGaussianConditional::allFrontalsGiven(
|
||||||
|
const VectorValues &given) const {
|
||||||
for (auto &&kv : given) {
|
for (auto &&kv : given) {
|
||||||
if (given.find(kv.first) == given.end()) {
|
if (given.find(kv.first) == given.end()) {
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -203,16 +209,17 @@ bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
std::shared_ptr<GaussianMixtureFactor> GaussianMixture::likelihood(
|
std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood(
|
||||||
const VectorValues &given) const {
|
const VectorValues &given) const {
|
||||||
if (!allFrontalsGiven(given)) {
|
if (!allFrontalsGiven(given)) {
|
||||||
throw std::runtime_error(
|
throw std::runtime_error(
|
||||||
"GaussianMixture::likelihood: given values are missing some frontals.");
|
"HybridGaussianConditional::likelihood: given values are missing some "
|
||||||
|
"frontals.");
|
||||||
}
|
}
|
||||||
|
|
||||||
const DiscreteKeys discreteParentKeys = discreteKeys();
|
const DiscreteKeys discreteParentKeys = discreteKeys();
|
||||||
const KeyVector continuousParentKeys = continuousParents();
|
const KeyVector continuousParentKeys = continuousParents();
|
||||||
const GaussianMixtureFactor::Factors likelihoods(
|
const HybridGaussianFactor::Factors likelihoods(
|
||||||
conditionals_, [&](const GaussianConditional::shared_ptr &conditional) {
|
conditionals_, [&](const GaussianConditional::shared_ptr &conditional) {
|
||||||
const auto likelihood_m = conditional->likelihood(given);
|
const auto likelihood_m = conditional->likelihood(given);
|
||||||
const double Cgm_Kgcm =
|
const double Cgm_Kgcm =
|
||||||
|
|
@ -231,7 +238,7 @@ std::shared_ptr<GaussianMixtureFactor> GaussianMixture::likelihood(
|
||||||
return std::make_shared<JacobianFactor>(gfg);
|
return std::make_shared<JacobianFactor>(gfg);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
return std::make_shared<GaussianMixtureFactor>(
|
return std::make_shared<HybridGaussianFactor>(
|
||||||
continuousParentKeys, discreteParentKeys, likelihoods);
|
continuousParentKeys, discreteParentKeys, likelihoods);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -252,7 +259,7 @@ std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) {
|
||||||
*/
|
*/
|
||||||
std::function<GaussianConditional::shared_ptr(
|
std::function<GaussianConditional::shared_ptr(
|
||||||
const Assignment<Key> &, const GaussianConditional::shared_ptr &)>
|
const Assignment<Key> &, const GaussianConditional::shared_ptr &)>
|
||||||
GaussianMixture::prunerFunc(const DecisionTreeFactor &discreteProbs) {
|
HybridGaussianConditional::prunerFunc(const DecisionTreeFactor &discreteProbs) {
|
||||||
// Get the discrete keys as sets for the decision tree
|
// Get the discrete keys as sets for the decision tree
|
||||||
// and the gaussian mixture.
|
// and the gaussian mixture.
|
||||||
auto discreteProbsKeySet = DiscreteKeysAsSet(discreteProbs.discreteKeys());
|
auto discreteProbsKeySet = DiscreteKeysAsSet(discreteProbs.discreteKeys());
|
||||||
|
|
@ -303,7 +310,7 @@ GaussianMixture::prunerFunc(const DecisionTreeFactor &discreteProbs) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
void GaussianMixture::prune(const DecisionTreeFactor &discreteProbs) {
|
void HybridGaussianConditional::prune(const DecisionTreeFactor &discreteProbs) {
|
||||||
// Functional which loops over all assignments and create a set of
|
// Functional which loops over all assignments and create a set of
|
||||||
// GaussianConditionals
|
// GaussianConditionals
|
||||||
auto pruner = prunerFunc(discreteProbs);
|
auto pruner = prunerFunc(discreteProbs);
|
||||||
|
|
@ -313,7 +320,7 @@ void GaussianMixture::prune(const DecisionTreeFactor &discreteProbs) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
AlgebraicDecisionTree<Key> GaussianMixture::logProbability(
|
AlgebraicDecisionTree<Key> HybridGaussianConditional::logProbability(
|
||||||
const VectorValues &continuousValues) const {
|
const VectorValues &continuousValues) const {
|
||||||
// functor to calculate (double) logProbability value from
|
// functor to calculate (double) logProbability value from
|
||||||
// GaussianConditional.
|
// GaussianConditional.
|
||||||
|
|
@ -331,7 +338,7 @@ AlgebraicDecisionTree<Key> GaussianMixture::logProbability(
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
double GaussianMixture::conditionalError(
|
double HybridGaussianConditional::conditionalError(
|
||||||
const GaussianConditional::shared_ptr &conditional,
|
const GaussianConditional::shared_ptr &conditional,
|
||||||
const VectorValues &continuousValues) const {
|
const VectorValues &continuousValues) const {
|
||||||
// Check if valid pointer
|
// Check if valid pointer
|
||||||
|
|
@ -348,7 +355,7 @@ double GaussianMixture::conditionalError(
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
AlgebraicDecisionTree<Key> GaussianMixture::errorTree(
|
AlgebraicDecisionTree<Key> HybridGaussianConditional::errorTree(
|
||||||
const VectorValues &continuousValues) const {
|
const VectorValues &continuousValues) const {
|
||||||
auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) {
|
auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) {
|
||||||
return conditionalError(conditional, continuousValues);
|
return conditionalError(conditional, continuousValues);
|
||||||
|
|
@ -358,20 +365,21 @@ AlgebraicDecisionTree<Key> GaussianMixture::errorTree(
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
double GaussianMixture::error(const HybridValues &values) const {
|
double HybridGaussianConditional::error(const HybridValues &values) const {
|
||||||
// Directly index to get the conditional, no need to build the whole tree.
|
// Directly index to get the conditional, no need to build the whole tree.
|
||||||
auto conditional = conditionals_(values.discrete());
|
auto conditional = conditionals_(values.discrete());
|
||||||
return conditionalError(conditional, values.continuous());
|
return conditionalError(conditional, values.continuous());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
double GaussianMixture::logProbability(const HybridValues &values) const {
|
double HybridGaussianConditional::logProbability(
|
||||||
|
const HybridValues &values) const {
|
||||||
auto conditional = conditionals_(values.discrete());
|
auto conditional = conditionals_(values.discrete());
|
||||||
return conditional->logProbability(values.continuous());
|
return conditional->logProbability(values.continuous());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
double GaussianMixture::evaluate(const HybridValues &values) const {
|
double HybridGaussianConditional::evaluate(const HybridValues &values) const {
|
||||||
auto conditional = conditionals_(values.discrete());
|
auto conditional = conditionals_(values.discrete());
|
||||||
return conditional->evaluate(values.continuous());
|
return conditional->evaluate(values.continuous());
|
||||||
}
|
}
|
||||||
|
|
@ -10,7 +10,7 @@
|
||||||
* -------------------------------------------------------------------------- */
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @file GaussianMixture.h
|
* @file HybridGaussianConditional.h
|
||||||
* @brief A hybrid conditional in the Conditional Linear Gaussian scheme
|
* @brief A hybrid conditional in the Conditional Linear Gaussian scheme
|
||||||
* @author Fan Jiang
|
* @author Fan Jiang
|
||||||
* @author Varun Agrawal
|
* @author Varun Agrawal
|
||||||
|
|
@ -23,8 +23,8 @@
|
||||||
#include <gtsam/discrete/DecisionTree.h>
|
#include <gtsam/discrete/DecisionTree.h>
|
||||||
#include <gtsam/discrete/DecisionTreeFactor.h>
|
#include <gtsam/discrete/DecisionTreeFactor.h>
|
||||||
#include <gtsam/discrete/DiscreteKey.h>
|
#include <gtsam/discrete/DiscreteKey.h>
|
||||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
|
||||||
#include <gtsam/hybrid/HybridFactor.h>
|
#include <gtsam/hybrid/HybridFactor.h>
|
||||||
|
#include <gtsam/hybrid/HybridGaussianFactor.h>
|
||||||
#include <gtsam/inference/Conditional.h>
|
#include <gtsam/inference/Conditional.h>
|
||||||
#include <gtsam/linear/GaussianConditional.h>
|
#include <gtsam/linear/GaussianConditional.h>
|
||||||
|
|
||||||
|
|
@ -50,14 +50,14 @@ class HybridValues;
|
||||||
*
|
*
|
||||||
* @ingroup hybrid
|
* @ingroup hybrid
|
||||||
*/
|
*/
|
||||||
class GTSAM_EXPORT GaussianMixture
|
class GTSAM_EXPORT HybridGaussianConditional
|
||||||
: public HybridFactor,
|
: public HybridFactor,
|
||||||
public Conditional<HybridFactor, GaussianMixture> {
|
public Conditional<HybridFactor, HybridGaussianConditional> {
|
||||||
public:
|
public:
|
||||||
using This = GaussianMixture;
|
using This = HybridGaussianConditional;
|
||||||
using shared_ptr = std::shared_ptr<GaussianMixture>;
|
using shared_ptr = std::shared_ptr<HybridGaussianConditional>;
|
||||||
using BaseFactor = HybridFactor;
|
using BaseFactor = HybridFactor;
|
||||||
using BaseConditional = Conditional<HybridFactor, GaussianMixture>;
|
using BaseConditional = Conditional<HybridFactor, HybridGaussianConditional>;
|
||||||
|
|
||||||
/// typedef for Decision Tree of Gaussian Conditionals
|
/// typedef for Decision Tree of Gaussian Conditionals
|
||||||
using Conditionals = DecisionTree<Key, GaussianConditional::shared_ptr>;
|
using Conditionals = DecisionTree<Key, GaussianConditional::shared_ptr>;
|
||||||
|
|
@ -67,7 +67,7 @@ class GTSAM_EXPORT GaussianMixture
|
||||||
double logConstant_; ///< log of the normalization constant.
|
double logConstant_; ///< log of the normalization constant.
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Convert a GaussianMixture of conditionals into
|
* @brief Convert a HybridGaussianConditional of conditionals into
|
||||||
* a DecisionTree of Gaussian factor graphs.
|
* a DecisionTree of Gaussian factor graphs.
|
||||||
*/
|
*/
|
||||||
GaussianFactorGraphTree asGaussianFactorGraphTree() const;
|
GaussianFactorGraphTree asGaussianFactorGraphTree() const;
|
||||||
|
|
@ -88,10 +88,10 @@ class GTSAM_EXPORT GaussianMixture
|
||||||
/// @{
|
/// @{
|
||||||
|
|
||||||
/// Default constructor, mainly for serialization.
|
/// Default constructor, mainly for serialization.
|
||||||
GaussianMixture() = default;
|
HybridGaussianConditional() = default;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Construct a new GaussianMixture object.
|
* @brief Construct a new HybridGaussianConditional object.
|
||||||
*
|
*
|
||||||
* @param continuousFrontals the continuous frontals.
|
* @param continuousFrontals the continuous frontals.
|
||||||
* @param continuousParents the continuous parents.
|
* @param continuousParents the continuous parents.
|
||||||
|
|
@ -101,10 +101,10 @@ class GTSAM_EXPORT GaussianMixture
|
||||||
* cardinality of the DiscreteKeys in discreteParents, since the
|
* cardinality of the DiscreteKeys in discreteParents, since the
|
||||||
* discreteParents will be used as the labels in the decision tree.
|
* discreteParents will be used as the labels in the decision tree.
|
||||||
*/
|
*/
|
||||||
GaussianMixture(const KeyVector &continuousFrontals,
|
HybridGaussianConditional(const KeyVector &continuousFrontals,
|
||||||
const KeyVector &continuousParents,
|
const KeyVector &continuousParents,
|
||||||
const DiscreteKeys &discreteParents,
|
const DiscreteKeys &discreteParents,
|
||||||
const Conditionals &conditionals);
|
const Conditionals &conditionals);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Make a Gaussian Mixture from a list of Gaussian conditionals
|
* @brief Make a Gaussian Mixture from a list of Gaussian conditionals
|
||||||
|
|
@ -114,9 +114,10 @@ class GTSAM_EXPORT GaussianMixture
|
||||||
* @param discreteParents Discrete parents variables
|
* @param discreteParents Discrete parents variables
|
||||||
* @param conditionals List of conditionals
|
* @param conditionals List of conditionals
|
||||||
*/
|
*/
|
||||||
GaussianMixture(KeyVector &&continuousFrontals, KeyVector &&continuousParents,
|
HybridGaussianConditional(
|
||||||
DiscreteKeys &&discreteParents,
|
KeyVector &&continuousFrontals, KeyVector &&continuousParents,
|
||||||
std::vector<GaussianConditional::shared_ptr> &&conditionals);
|
DiscreteKeys &&discreteParents,
|
||||||
|
std::vector<GaussianConditional::shared_ptr> &&conditionals);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Make a Gaussian Mixture from a list of Gaussian conditionals
|
* @brief Make a Gaussian Mixture from a list of Gaussian conditionals
|
||||||
|
|
@ -126,7 +127,7 @@ class GTSAM_EXPORT GaussianMixture
|
||||||
* @param discreteParents Discrete parents variables
|
* @param discreteParents Discrete parents variables
|
||||||
* @param conditionals List of conditionals
|
* @param conditionals List of conditionals
|
||||||
*/
|
*/
|
||||||
GaussianMixture(
|
HybridGaussianConditional(
|
||||||
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
|
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
|
||||||
const DiscreteKeys &discreteParents,
|
const DiscreteKeys &discreteParents,
|
||||||
const std::vector<GaussianConditional::shared_ptr> &conditionals);
|
const std::vector<GaussianConditional::shared_ptr> &conditionals);
|
||||||
|
|
@ -140,7 +141,7 @@ class GTSAM_EXPORT GaussianMixture
|
||||||
|
|
||||||
/// Print utility
|
/// Print utility
|
||||||
void print(
|
void print(
|
||||||
const std::string &s = "GaussianMixture\n",
|
const std::string &s = "HybridGaussianConditional\n",
|
||||||
const KeyFormatter &formatter = DefaultKeyFormatter) const override;
|
const KeyFormatter &formatter = DefaultKeyFormatter) const override;
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
|
|
@ -165,14 +166,14 @@ class GTSAM_EXPORT GaussianMixture
|
||||||
* Create a likelihood factor for a Gaussian mixture, return a Mixture factor
|
* Create a likelihood factor for a Gaussian mixture, return a Mixture factor
|
||||||
* on the parents.
|
* on the parents.
|
||||||
*/
|
*/
|
||||||
std::shared_ptr<GaussianMixtureFactor> likelihood(
|
std::shared_ptr<HybridGaussianFactor> likelihood(
|
||||||
const VectorValues &given) const;
|
const VectorValues &given) const;
|
||||||
|
|
||||||
/// Getter for the underlying Conditionals DecisionTree
|
/// Getter for the underlying Conditionals DecisionTree
|
||||||
const Conditionals &conditionals() const;
|
const Conditionals &conditionals() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Compute logProbability of the GaussianMixture as a tree.
|
* @brief Compute logProbability of the HybridGaussianConditional as a tree.
|
||||||
*
|
*
|
||||||
* @param continuousValues The continuous VectorValues.
|
* @param continuousValues The continuous VectorValues.
|
||||||
* @return AlgebraicDecisionTree<Key> A decision tree with the same keys
|
* @return AlgebraicDecisionTree<Key> A decision tree with the same keys
|
||||||
|
|
@ -209,7 +210,7 @@ class GTSAM_EXPORT GaussianMixture
|
||||||
double error(const HybridValues &values) const override;
|
double error(const HybridValues &values) const override;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Compute error of the GaussianMixture as a tree.
|
* @brief Compute error of the HybridGaussianConditional as a tree.
|
||||||
*
|
*
|
||||||
* @param continuousValues The continuous VectorValues.
|
* @param continuousValues The continuous VectorValues.
|
||||||
* @return AlgebraicDecisionTree<Key> A decision tree on the discrete keys
|
* @return AlgebraicDecisionTree<Key> A decision tree on the discrete keys
|
||||||
|
|
@ -277,6 +278,7 @@ std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &discreteKeys);
|
||||||
|
|
||||||
// traits
|
// traits
|
||||||
template <>
|
template <>
|
||||||
struct traits<GaussianMixture> : public Testable<GaussianMixture> {};
|
struct traits<HybridGaussianConditional>
|
||||||
|
: public Testable<HybridGaussianConditional> {};
|
||||||
|
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
@ -10,7 +10,7 @@
|
||||||
* -------------------------------------------------------------------------- */
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @file GaussianMixtureFactor.cpp
|
* @file HybridGaussianFactor.cpp
|
||||||
* @brief A set of Gaussian factors indexed by a set of discrete keys.
|
* @brief A set of Gaussian factors indexed by a set of discrete keys.
|
||||||
* @author Fan Jiang
|
* @author Fan Jiang
|
||||||
* @author Varun Agrawal
|
* @author Varun Agrawal
|
||||||
|
|
@ -21,7 +21,7 @@
|
||||||
#include <gtsam/base/utilities.h>
|
#include <gtsam/base/utilities.h>
|
||||||
#include <gtsam/discrete/DecisionTree-inl.h>
|
#include <gtsam/discrete/DecisionTree-inl.h>
|
||||||
#include <gtsam/discrete/DecisionTree.h>
|
#include <gtsam/discrete/DecisionTree.h>
|
||||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
#include <gtsam/hybrid/HybridGaussianFactor.h>
|
||||||
#include <gtsam/hybrid/HybridValues.h>
|
#include <gtsam/hybrid/HybridValues.h>
|
||||||
#include <gtsam/linear/GaussianFactor.h>
|
#include <gtsam/linear/GaussianFactor.h>
|
||||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||||
|
|
@ -29,13 +29,13 @@
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys,
|
HybridGaussianFactor::HybridGaussianFactor(const KeyVector &continuousKeys,
|
||||||
const DiscreteKeys &discreteKeys,
|
const DiscreteKeys &discreteKeys,
|
||||||
const Factors &factors)
|
const Factors &factors)
|
||||||
: Base(continuousKeys, discreteKeys), factors_(factors) {}
|
: Base(continuousKeys, discreteKeys), factors_(factors) {}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const {
|
bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const {
|
||||||
const This *e = dynamic_cast<const This *>(&lf);
|
const This *e = dynamic_cast<const This *>(&lf);
|
||||||
if (e == nullptr) return false;
|
if (e == nullptr) return false;
|
||||||
|
|
||||||
|
|
@ -52,10 +52,10 @@ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
void GaussianMixtureFactor::print(const std::string &s,
|
void HybridGaussianFactor::print(const std::string &s,
|
||||||
const KeyFormatter &formatter) const {
|
const KeyFormatter &formatter) const {
|
||||||
std::cout << (s.empty() ? "" : s + "\n");
|
std::cout << (s.empty() ? "" : s + "\n");
|
||||||
std::cout << "GaussianMixtureFactor" << std::endl;
|
std::cout << "HybridGaussianFactor" << std::endl;
|
||||||
HybridFactor::print("", formatter);
|
HybridFactor::print("", formatter);
|
||||||
std::cout << "{\n";
|
std::cout << "{\n";
|
||||||
if (factors_.empty()) {
|
if (factors_.empty()) {
|
||||||
|
|
@ -78,13 +78,13 @@ void GaussianMixtureFactor::print(const std::string &s,
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
GaussianMixtureFactor::sharedFactor GaussianMixtureFactor::operator()(
|
HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()(
|
||||||
const DiscreteValues &assignment) const {
|
const DiscreteValues &assignment) const {
|
||||||
return factors_(assignment);
|
return factors_(assignment);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
GaussianFactorGraphTree GaussianMixtureFactor::add(
|
GaussianFactorGraphTree HybridGaussianFactor::add(
|
||||||
const GaussianFactorGraphTree &sum) const {
|
const GaussianFactorGraphTree &sum) const {
|
||||||
using Y = GaussianFactorGraph;
|
using Y = GaussianFactorGraph;
|
||||||
auto add = [](const Y &graph1, const Y &graph2) {
|
auto add = [](const Y &graph1, const Y &graph2) {
|
||||||
|
|
@ -97,14 +97,14 @@ GaussianFactorGraphTree GaussianMixtureFactor::add(
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
GaussianFactorGraphTree GaussianMixtureFactor::asGaussianFactorGraphTree()
|
GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree()
|
||||||
const {
|
const {
|
||||||
auto wrap = [](const sharedFactor &gf) { return GaussianFactorGraph{gf}; };
|
auto wrap = [](const sharedFactor &gf) { return GaussianFactorGraph{gf}; };
|
||||||
return {factors_, wrap};
|
return {factors_, wrap};
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
AlgebraicDecisionTree<Key> GaussianMixtureFactor::errorTree(
|
AlgebraicDecisionTree<Key> HybridGaussianFactor::errorTree(
|
||||||
const VectorValues &continuousValues) const {
|
const VectorValues &continuousValues) const {
|
||||||
// functor to convert from sharedFactor to double error value.
|
// functor to convert from sharedFactor to double error value.
|
||||||
auto errorFunc = [&continuousValues](const sharedFactor &gf) {
|
auto errorFunc = [&continuousValues](const sharedFactor &gf) {
|
||||||
|
|
@ -115,7 +115,7 @@ AlgebraicDecisionTree<Key> GaussianMixtureFactor::errorTree(
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
double GaussianMixtureFactor::error(const HybridValues &values) const {
|
double HybridGaussianFactor::error(const HybridValues &values) const {
|
||||||
const sharedFactor gf = factors_(values.discrete());
|
const sharedFactor gf = factors_(values.discrete());
|
||||||
return gf->error(values.continuous());
|
return gf->error(values.continuous());
|
||||||
}
|
}
|
||||||
|
|
@ -10,7 +10,7 @@
|
||||||
* -------------------------------------------------------------------------- */
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @file GaussianMixtureFactor.h
|
* @file HybridGaussianFactor.h
|
||||||
* @brief A set of GaussianFactors, indexed by a set of discrete keys.
|
* @brief A set of GaussianFactors, indexed by a set of discrete keys.
|
||||||
* @author Fan Jiang
|
* @author Fan Jiang
|
||||||
* @author Varun Agrawal
|
* @author Varun Agrawal
|
||||||
|
|
@ -44,10 +44,10 @@ class VectorValues;
|
||||||
*
|
*
|
||||||
* @ingroup hybrid
|
* @ingroup hybrid
|
||||||
*/
|
*/
|
||||||
class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
|
class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
|
||||||
public:
|
public:
|
||||||
using Base = HybridFactor;
|
using Base = HybridFactor;
|
||||||
using This = GaussianMixtureFactor;
|
using This = HybridGaussianFactor;
|
||||||
using shared_ptr = std::shared_ptr<This>;
|
using shared_ptr = std::shared_ptr<This>;
|
||||||
|
|
||||||
using sharedFactor = std::shared_ptr<GaussianFactor>;
|
using sharedFactor = std::shared_ptr<GaussianFactor>;
|
||||||
|
|
@ -72,7 +72,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
|
||||||
/// @{
|
/// @{
|
||||||
|
|
||||||
/// Default constructor, mainly for serialization.
|
/// Default constructor, mainly for serialization.
|
||||||
GaussianMixtureFactor() = default;
|
HybridGaussianFactor() = default;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Construct a new Gaussian mixture factor.
|
* @brief Construct a new Gaussian mixture factor.
|
||||||
|
|
@ -83,23 +83,23 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
|
||||||
* @param factors The decision tree of Gaussian factors stored
|
* @param factors The decision tree of Gaussian factors stored
|
||||||
* as the mixture density.
|
* as the mixture density.
|
||||||
*/
|
*/
|
||||||
GaussianMixtureFactor(const KeyVector &continuousKeys,
|
HybridGaussianFactor(const KeyVector &continuousKeys,
|
||||||
const DiscreteKeys &discreteKeys,
|
const DiscreteKeys &discreteKeys,
|
||||||
const Factors &factors);
|
const Factors &factors);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Construct a new GaussianMixtureFactor object using a vector of
|
* @brief Construct a new HybridGaussianFactor object using a vector of
|
||||||
* GaussianFactor shared pointers.
|
* GaussianFactor shared pointers.
|
||||||
*
|
*
|
||||||
* @param continuousKeys Vector of keys for continuous factors.
|
* @param continuousKeys Vector of keys for continuous factors.
|
||||||
* @param discreteKeys Vector of discrete keys.
|
* @param discreteKeys Vector of discrete keys.
|
||||||
* @param factors Vector of gaussian factor shared pointers.
|
* @param factors Vector of gaussian factor shared pointers.
|
||||||
*/
|
*/
|
||||||
GaussianMixtureFactor(const KeyVector &continuousKeys,
|
HybridGaussianFactor(const KeyVector &continuousKeys,
|
||||||
const DiscreteKeys &discreteKeys,
|
const DiscreteKeys &discreteKeys,
|
||||||
const std::vector<sharedFactor> &factors)
|
const std::vector<sharedFactor> &factors)
|
||||||
: GaussianMixtureFactor(continuousKeys, discreteKeys,
|
: HybridGaussianFactor(continuousKeys, discreteKeys,
|
||||||
Factors(discreteKeys, factors)) {}
|
Factors(discreteKeys, factors)) {}
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
/// @name Testable
|
/// @name Testable
|
||||||
|
|
@ -128,7 +128,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
|
||||||
GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const;
|
GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Compute error of the GaussianMixtureFactor as a tree.
|
* @brief Compute error of the HybridGaussianFactor as a tree.
|
||||||
*
|
*
|
||||||
* @param continuousValues The continuous VectorValues.
|
* @param continuousValues The continuous VectorValues.
|
||||||
* @return AlgebraicDecisionTree<Key> A decision tree with the same keys
|
* @return AlgebraicDecisionTree<Key> A decision tree with the same keys
|
||||||
|
|
@ -146,9 +146,9 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
|
||||||
/// Getter for GaussianFactor decision tree
|
/// Getter for GaussianFactor decision tree
|
||||||
const Factors &factors() const { return factors_; }
|
const Factors &factors() const { return factors_; }
|
||||||
|
|
||||||
/// Add MixtureFactor to a Sum, syntactic sugar.
|
/// Add HybridNonlinearFactor to a Sum, syntactic sugar.
|
||||||
friend GaussianFactorGraphTree &operator+=(
|
friend GaussianFactorGraphTree &operator+=(
|
||||||
GaussianFactorGraphTree &sum, const GaussianMixtureFactor &factor) {
|
GaussianFactorGraphTree &sum, const HybridGaussianFactor &factor) {
|
||||||
sum = factor.add(sum);
|
sum = factor.add(sum);
|
||||||
return sum;
|
return sum;
|
||||||
}
|
}
|
||||||
|
|
@ -168,7 +168,6 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
|
||||||
|
|
||||||
// traits
|
// traits
|
||||||
template <>
|
template <>
|
||||||
struct traits<GaussianMixtureFactor> : public Testable<GaussianMixtureFactor> {
|
struct traits<HybridGaussianFactor> : public Testable<HybridGaussianFactor> {};
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
@ -23,11 +23,11 @@
|
||||||
#include <gtsam/discrete/DiscreteEliminationTree.h>
|
#include <gtsam/discrete/DiscreteEliminationTree.h>
|
||||||
#include <gtsam/discrete/DiscreteFactorGraph.h>
|
#include <gtsam/discrete/DiscreteFactorGraph.h>
|
||||||
#include <gtsam/discrete/DiscreteJunctionTree.h>
|
#include <gtsam/discrete/DiscreteJunctionTree.h>
|
||||||
#include <gtsam/hybrid/GaussianMixture.h>
|
|
||||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
|
||||||
#include <gtsam/hybrid/HybridConditional.h>
|
#include <gtsam/hybrid/HybridConditional.h>
|
||||||
#include <gtsam/hybrid/HybridEliminationTree.h>
|
#include <gtsam/hybrid/HybridEliminationTree.h>
|
||||||
#include <gtsam/hybrid/HybridFactor.h>
|
#include <gtsam/hybrid/HybridFactor.h>
|
||||||
|
#include <gtsam/hybrid/HybridGaussianConditional.h>
|
||||||
|
#include <gtsam/hybrid/HybridGaussianFactor.h>
|
||||||
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
||||||
#include <gtsam/hybrid/HybridJunctionTree.h>
|
#include <gtsam/hybrid/HybridJunctionTree.h>
|
||||||
#include <gtsam/inference/EliminateableFactorGraph-inst.h>
|
#include <gtsam/inference/EliminateableFactorGraph-inst.h>
|
||||||
|
|
@ -92,7 +92,7 @@ void HybridGaussianFactorGraph::printErrors(
|
||||||
// Clear the stringstream
|
// Clear the stringstream
|
||||||
ss.str(std::string());
|
ss.str(std::string());
|
||||||
|
|
||||||
if (auto gmf = std::dynamic_pointer_cast<GaussianMixtureFactor>(factor)) {
|
if (auto gmf = std::dynamic_pointer_cast<HybridGaussianFactor>(factor)) {
|
||||||
if (factor == nullptr) {
|
if (factor == nullptr) {
|
||||||
std::cout << "nullptr"
|
std::cout << "nullptr"
|
||||||
<< "\n";
|
<< "\n";
|
||||||
|
|
@ -178,9 +178,9 @@ GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const {
|
||||||
// TODO(dellaert): just use a virtual method defined in HybridFactor.
|
// TODO(dellaert): just use a virtual method defined in HybridFactor.
|
||||||
if (auto gf = dynamic_pointer_cast<GaussianFactor>(f)) {
|
if (auto gf = dynamic_pointer_cast<GaussianFactor>(f)) {
|
||||||
result = addGaussian(result, gf);
|
result = addGaussian(result, gf);
|
||||||
} else if (auto gmf = dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
|
} else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
|
||||||
result = gmf->add(result);
|
result = gmf->add(result);
|
||||||
} else if (auto gm = dynamic_pointer_cast<GaussianMixture>(f)) {
|
} else if (auto gm = dynamic_pointer_cast<HybridGaussianConditional>(f)) {
|
||||||
result = gm->add(result);
|
result = gm->add(result);
|
||||||
} else if (auto hc = dynamic_pointer_cast<HybridConditional>(f)) {
|
} else if (auto hc = dynamic_pointer_cast<HybridConditional>(f)) {
|
||||||
if (auto gm = hc->asMixture()) {
|
if (auto gm = hc->asMixture()) {
|
||||||
|
|
@ -258,8 +258,8 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
|
||||||
for (auto &f : factors) {
|
for (auto &f : factors) {
|
||||||
if (auto df = dynamic_pointer_cast<DiscreteFactor>(f)) {
|
if (auto df = dynamic_pointer_cast<DiscreteFactor>(f)) {
|
||||||
dfg.push_back(df);
|
dfg.push_back(df);
|
||||||
} else if (auto gmf = dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
|
} else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
|
||||||
// Case where we have a GaussianMixtureFactor with no continuous keys.
|
// Case where we have a HybridGaussianFactor with no continuous keys.
|
||||||
// In this case, compute discrete probabilities.
|
// In this case, compute discrete probabilities.
|
||||||
auto logProbability =
|
auto logProbability =
|
||||||
[&](const GaussianFactor::shared_ptr &factor) -> double {
|
[&](const GaussianFactor::shared_ptr &factor) -> double {
|
||||||
|
|
@ -309,7 +309,7 @@ GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) {
|
||||||
|
|
||||||
/* ************************************************************************ */
|
/* ************************************************************************ */
|
||||||
using Result = std::pair<std::shared_ptr<GaussianConditional>,
|
using Result = std::pair<std::shared_ptr<GaussianConditional>,
|
||||||
GaussianMixtureFactor::sharedFactor>;
|
HybridGaussianFactor::sharedFactor>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Compute the probability p(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m)
|
* Compute the probability p(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m)
|
||||||
|
|
@ -341,9 +341,9 @@ static std::shared_ptr<Factor> createDiscreteFactor(
|
||||||
return std::make_shared<DecisionTreeFactor>(discreteSeparator, probabilities);
|
return std::make_shared<DecisionTreeFactor>(discreteSeparator, probabilities);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create GaussianMixtureFactor on the separator, taking care to correct
|
// Create HybridGaussianFactor on the separator, taking care to correct
|
||||||
// for conditional constants.
|
// for conditional constants.
|
||||||
static std::shared_ptr<Factor> createGaussianMixtureFactor(
|
static std::shared_ptr<Factor> createHybridGaussianFactor(
|
||||||
const DecisionTree<Key, Result> &eliminationResults,
|
const DecisionTree<Key, Result> &eliminationResults,
|
||||||
const KeyVector &continuousSeparator,
|
const KeyVector &continuousSeparator,
|
||||||
const DiscreteKeys &discreteSeparator) {
|
const DiscreteKeys &discreteSeparator) {
|
||||||
|
|
@ -362,8 +362,8 @@ static std::shared_ptr<Factor> createGaussianMixtureFactor(
|
||||||
DecisionTree<Key, GaussianFactor::shared_ptr> newFactors(eliminationResults,
|
DecisionTree<Key, GaussianFactor::shared_ptr> newFactors(eliminationResults,
|
||||||
correct);
|
correct);
|
||||||
|
|
||||||
return std::make_shared<GaussianMixtureFactor>(continuousSeparator,
|
return std::make_shared<HybridGaussianFactor>(continuousSeparator,
|
||||||
discreteSeparator, newFactors);
|
discreteSeparator, newFactors);
|
||||||
}
|
}
|
||||||
|
|
||||||
static std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>>
|
static std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>>
|
||||||
|
|
@ -400,18 +400,18 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
|
||||||
DecisionTree<Key, Result> eliminationResults(factorGraphTree, eliminate);
|
DecisionTree<Key, Result> eliminationResults(factorGraphTree, eliminate);
|
||||||
|
|
||||||
// If there are no more continuous parents we create a DiscreteFactor with the
|
// If there are no more continuous parents we create a DiscreteFactor with the
|
||||||
// error for each discrete choice. Otherwise, create a GaussianMixtureFactor
|
// error for each discrete choice. Otherwise, create a HybridGaussianFactor
|
||||||
// on the separator, taking care to correct for conditional constants.
|
// on the separator, taking care to correct for conditional constants.
|
||||||
auto newFactor =
|
auto newFactor =
|
||||||
continuousSeparator.empty()
|
continuousSeparator.empty()
|
||||||
? createDiscreteFactor(eliminationResults, discreteSeparator)
|
? createDiscreteFactor(eliminationResults, discreteSeparator)
|
||||||
: createGaussianMixtureFactor(eliminationResults, continuousSeparator,
|
: createHybridGaussianFactor(eliminationResults, continuousSeparator,
|
||||||
discreteSeparator);
|
discreteSeparator);
|
||||||
|
|
||||||
// Create the GaussianMixture from the conditionals
|
// Create the HybridGaussianConditional from the conditionals
|
||||||
GaussianMixture::Conditionals conditionals(
|
HybridGaussianConditional::Conditionals conditionals(
|
||||||
eliminationResults, [](const Result &pair) { return pair.first; });
|
eliminationResults, [](const Result &pair) { return pair.first; });
|
||||||
auto gaussianMixture = std::make_shared<GaussianMixture>(
|
auto gaussianMixture = std::make_shared<HybridGaussianConditional>(
|
||||||
frontalKeys, continuousSeparator, discreteSeparator, conditionals);
|
frontalKeys, continuousSeparator, discreteSeparator, conditionals);
|
||||||
|
|
||||||
return {std::make_shared<HybridConditional>(gaussianMixture), newFactor};
|
return {std::make_shared<HybridConditional>(gaussianMixture), newFactor};
|
||||||
|
|
@ -458,7 +458,7 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors,
|
||||||
// Because of all these reasons, we carefully consider how to
|
// Because of all these reasons, we carefully consider how to
|
||||||
// implement the hybrid factors so that we do not get poor performance.
|
// implement the hybrid factors so that we do not get poor performance.
|
||||||
|
|
||||||
// The first thing is how to represent the GaussianMixture.
|
// The first thing is how to represent the HybridGaussianConditional.
|
||||||
// A very possible scenario is that the incoming factors will have different
|
// A very possible scenario is that the incoming factors will have different
|
||||||
// levels of discrete keys. For example, imagine we are going to eliminate the
|
// levels of discrete keys. For example, imagine we are going to eliminate the
|
||||||
// fragment: $\phi(x1,c1,c2)$, $\phi(x1,c2,c3)$, which is perfectly valid.
|
// fragment: $\phi(x1,c1,c2)$, $\phi(x1,c2,c3)$, which is perfectly valid.
|
||||||
|
|
@ -549,7 +549,7 @@ AlgebraicDecisionTree<Key> HybridGaussianFactorGraph::errorTree(
|
||||||
f = hc->inner();
|
f = hc->inner();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (auto gaussianMixture = dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
|
if (auto gaussianMixture = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
|
||||||
// Compute factor error and add it.
|
// Compute factor error and add it.
|
||||||
error_tree = error_tree + gaussianMixture->errorTree(continuousValues);
|
error_tree = error_tree + gaussianMixture->errorTree(continuousValues);
|
||||||
} else if (auto gaussian = dynamic_pointer_cast<GaussianFactor>(f)) {
|
} else if (auto gaussian = dynamic_pointer_cast<GaussianFactor>(f)) {
|
||||||
|
|
@ -597,9 +597,9 @@ GaussianFactorGraph HybridGaussianFactorGraph::operator()(
|
||||||
gfg.push_back(gf);
|
gfg.push_back(gf);
|
||||||
} else if (auto gc = std::dynamic_pointer_cast<GaussianConditional>(f)) {
|
} else if (auto gc = std::dynamic_pointer_cast<GaussianConditional>(f)) {
|
||||||
gfg.push_back(gf);
|
gfg.push_back(gf);
|
||||||
} else if (auto gmf = std::dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
|
} else if (auto gmf = std::dynamic_pointer_cast<HybridGaussianFactor>(f)) {
|
||||||
gfg.push_back((*gmf)(assignment));
|
gfg.push_back((*gmf)(assignment));
|
||||||
} else if (auto gm = dynamic_pointer_cast<GaussianMixture>(f)) {
|
} else if (auto gm = dynamic_pointer_cast<HybridGaussianConditional>(f)) {
|
||||||
gfg.push_back((*gm)(assignment));
|
gfg.push_back((*gm)(assignment));
|
||||||
} else {
|
} else {
|
||||||
continue;
|
continue;
|
||||||
|
|
|
||||||
|
|
@ -18,9 +18,9 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
|
||||||
#include <gtsam/hybrid/HybridFactor.h>
|
#include <gtsam/hybrid/HybridFactor.h>
|
||||||
#include <gtsam/hybrid/HybridFactorGraph.h>
|
#include <gtsam/hybrid/HybridFactorGraph.h>
|
||||||
|
#include <gtsam/hybrid/HybridGaussianFactor.h>
|
||||||
#include <gtsam/inference/EliminateableFactorGraph.h>
|
#include <gtsam/inference/EliminateableFactorGraph.h>
|
||||||
#include <gtsam/inference/FactorGraph.h>
|
#include <gtsam/inference/FactorGraph.h>
|
||||||
#include <gtsam/inference/Ordering.h>
|
#include <gtsam/inference/Ordering.h>
|
||||||
|
|
@ -221,7 +221,6 @@ class GTSAM_EXPORT HybridGaussianFactorGraph
|
||||||
|
|
||||||
/// Get the GaussianFactorGraph at a given discrete assignment.
|
/// Get the GaussianFactorGraph at a given discrete assignment.
|
||||||
GaussianFactorGraph operator()(const DiscreteValues& assignment) const;
|
GaussianFactorGraph operator()(const DiscreteValues& assignment) const;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
|
||||||
|
|
@ -51,11 +51,10 @@ class HybridEliminationTree;
|
||||||
*/
|
*/
|
||||||
class GTSAM_EXPORT HybridJunctionTree
|
class GTSAM_EXPORT HybridJunctionTree
|
||||||
: public JunctionTree<HybridBayesTree, HybridGaussianFactorGraph> {
|
: public JunctionTree<HybridBayesTree, HybridGaussianFactorGraph> {
|
||||||
|
|
||||||
public:
|
public:
|
||||||
typedef JunctionTree<HybridBayesTree, HybridGaussianFactorGraph>
|
typedef JunctionTree<HybridBayesTree, HybridGaussianFactorGraph>
|
||||||
Base; ///< Base class
|
Base; ///< Base class
|
||||||
typedef HybridJunctionTree This; ///< This class
|
typedef HybridJunctionTree This; ///< This class
|
||||||
typedef std::shared_ptr<This> shared_ptr; ///< Shared pointer to this class
|
typedef std::shared_ptr<This> shared_ptr; ///< Shared pointer to this class
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@
|
||||||
* -------------------------------------------------------------------------- */
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @file MixtureFactor.h
|
* @file HybridNonlinearFactor.h
|
||||||
* @brief Nonlinear Mixture factor of continuous and discrete.
|
* @brief Nonlinear Mixture factor of continuous and discrete.
|
||||||
* @author Kevin Doherty, kdoherty@mit.edu
|
* @author Kevin Doherty, kdoherty@mit.edu
|
||||||
* @author Varun Agrawal
|
* @author Varun Agrawal
|
||||||
|
|
@ -20,7 +20,7 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <gtsam/discrete/DiscreteValues.h>
|
#include <gtsam/discrete/DiscreteValues.h>
|
||||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
#include <gtsam/hybrid/HybridGaussianFactor.h>
|
||||||
#include <gtsam/hybrid/HybridValues.h>
|
#include <gtsam/hybrid/HybridValues.h>
|
||||||
#include <gtsam/nonlinear/NonlinearFactor.h>
|
#include <gtsam/nonlinear/NonlinearFactor.h>
|
||||||
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
||||||
|
|
@ -44,11 +44,11 @@ namespace gtsam {
|
||||||
* one of (NonlinearFactor, GaussianFactor) which can then be checked to perform
|
* one of (NonlinearFactor, GaussianFactor) which can then be checked to perform
|
||||||
* the correct operation.
|
* the correct operation.
|
||||||
*/
|
*/
|
||||||
class MixtureFactor : public HybridFactor {
|
class HybridNonlinearFactor : public HybridFactor {
|
||||||
public:
|
public:
|
||||||
using Base = HybridFactor;
|
using Base = HybridFactor;
|
||||||
using This = MixtureFactor;
|
using This = HybridNonlinearFactor;
|
||||||
using shared_ptr = std::shared_ptr<MixtureFactor>;
|
using shared_ptr = std::shared_ptr<HybridNonlinearFactor>;
|
||||||
using sharedFactor = std::shared_ptr<NonlinearFactor>;
|
using sharedFactor = std::shared_ptr<NonlinearFactor>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -63,7 +63,7 @@ class MixtureFactor : public HybridFactor {
|
||||||
bool normalized_;
|
bool normalized_;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
MixtureFactor() = default;
|
HybridNonlinearFactor() = default;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Construct from Decision tree.
|
* @brief Construct from Decision tree.
|
||||||
|
|
@ -74,8 +74,8 @@ class MixtureFactor : public HybridFactor {
|
||||||
* @param normalized Flag indicating if the factor error is already
|
* @param normalized Flag indicating if the factor error is already
|
||||||
* normalized.
|
* normalized.
|
||||||
*/
|
*/
|
||||||
MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys,
|
HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys,
|
||||||
const Factors& factors, bool normalized = false)
|
const Factors& factors, bool normalized = false)
|
||||||
: Base(keys, discreteKeys), factors_(factors), normalized_(normalized) {}
|
: Base(keys, discreteKeys), factors_(factors), normalized_(normalized) {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -95,9 +95,9 @@ class MixtureFactor : public HybridFactor {
|
||||||
* normalized.
|
* normalized.
|
||||||
*/
|
*/
|
||||||
template <typename FACTOR>
|
template <typename FACTOR>
|
||||||
MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys,
|
HybridNonlinearFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys,
|
||||||
const std::vector<std::shared_ptr<FACTOR>>& factors,
|
const std::vector<std::shared_ptr<FACTOR>>& factors,
|
||||||
bool normalized = false)
|
bool normalized = false)
|
||||||
: Base(keys, discreteKeys), normalized_(normalized) {
|
: Base(keys, discreteKeys), normalized_(normalized) {
|
||||||
std::vector<NonlinearFactor::shared_ptr> nonlinear_factors;
|
std::vector<NonlinearFactor::shared_ptr> nonlinear_factors;
|
||||||
KeySet continuous_keys_set(keys.begin(), keys.end());
|
KeySet continuous_keys_set(keys.begin(), keys.end());
|
||||||
|
|
@ -111,7 +111,7 @@ class MixtureFactor : public HybridFactor {
|
||||||
nonlinear_factors.push_back(nf);
|
nonlinear_factors.push_back(nf);
|
||||||
} else {
|
} else {
|
||||||
throw std::runtime_error(
|
throw std::runtime_error(
|
||||||
"Factors passed into MixtureFactor need to be nonlinear!");
|
"Factors passed into HybridNonlinearFactor need to be nonlinear!");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
factors_ = Factors(discreteKeys, nonlinear_factors);
|
factors_ = Factors(discreteKeys, nonlinear_factors);
|
||||||
|
|
@ -124,7 +124,7 @@ class MixtureFactor : public HybridFactor {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Compute error of the MixtureFactor as a tree.
|
* @brief Compute error of the HybridNonlinearFactor as a tree.
|
||||||
*
|
*
|
||||||
* @param continuousValues The continuous values for which to compute the
|
* @param continuousValues The continuous values for which to compute the
|
||||||
* error.
|
* error.
|
||||||
|
|
@ -188,7 +188,7 @@ class MixtureFactor : public HybridFactor {
|
||||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override {
|
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override {
|
||||||
std::cout << (s.empty() ? "" : s + " ");
|
std::cout << (s.empty() ? "" : s + " ");
|
||||||
Base::print("", keyFormatter);
|
Base::print("", keyFormatter);
|
||||||
std::cout << "\nMixtureFactor\n";
|
std::cout << "\nHybridNonlinearFactor\n";
|
||||||
auto valueFormatter = [](const sharedFactor& v) {
|
auto valueFormatter = [](const sharedFactor& v) {
|
||||||
if (v) {
|
if (v) {
|
||||||
return "Nonlinear factor on " + std::to_string(v->size()) + " keys";
|
return "Nonlinear factor on " + std::to_string(v->size()) + " keys";
|
||||||
|
|
@ -201,15 +201,16 @@ class MixtureFactor : public HybridFactor {
|
||||||
|
|
||||||
/// Check equality
|
/// Check equality
|
||||||
bool equals(const HybridFactor& other, double tol = 1e-9) const override {
|
bool equals(const HybridFactor& other, double tol = 1e-9) const override {
|
||||||
// We attempt a dynamic cast from HybridFactor to MixtureFactor. If it
|
// We attempt a dynamic cast from HybridFactor to HybridNonlinearFactor. If
|
||||||
// fails, return false.
|
// it fails, return false.
|
||||||
if (!dynamic_cast<const MixtureFactor*>(&other)) return false;
|
if (!dynamic_cast<const HybridNonlinearFactor*>(&other)) return false;
|
||||||
|
|
||||||
// If the cast is successful, we'll properly construct a MixtureFactor
|
// If the cast is successful, we'll properly construct a
|
||||||
// object from `other`
|
// HybridNonlinearFactor object from `other`
|
||||||
const MixtureFactor& f(static_cast<const MixtureFactor&>(other));
|
const HybridNonlinearFactor& f(
|
||||||
|
static_cast<const HybridNonlinearFactor&>(other));
|
||||||
|
|
||||||
// Ensure that this MixtureFactor and `f` have the same `factors_`.
|
// Ensure that this HybridNonlinearFactor and `f` have the same `factors_`.
|
||||||
auto compare = [tol](const sharedFactor& a, const sharedFactor& b) {
|
auto compare = [tol](const sharedFactor& a, const sharedFactor& b) {
|
||||||
return traits<NonlinearFactor>::Equals(*a, *b, tol);
|
return traits<NonlinearFactor>::Equals(*a, *b, tol);
|
||||||
};
|
};
|
||||||
|
|
@ -233,8 +234,8 @@ class MixtureFactor : public HybridFactor {
|
||||||
return factor->linearize(continuousValues);
|
return factor->linearize(continuousValues);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Linearize all the continuous factors to get a GaussianMixtureFactor.
|
/// Linearize all the continuous factors to get a HybridGaussianFactor.
|
||||||
std::shared_ptr<GaussianMixtureFactor> linearize(
|
std::shared_ptr<HybridGaussianFactor> linearize(
|
||||||
const Values& continuousValues) const {
|
const Values& continuousValues) const {
|
||||||
// functional to linearize each factor in the decision tree
|
// functional to linearize each factor in the decision tree
|
||||||
auto linearizeDT = [continuousValues](const sharedFactor& factor) {
|
auto linearizeDT = [continuousValues](const sharedFactor& factor) {
|
||||||
|
|
@ -244,7 +245,7 @@ class MixtureFactor : public HybridFactor {
|
||||||
DecisionTree<Key, GaussianFactor::shared_ptr> linearized_factors(
|
DecisionTree<Key, GaussianFactor::shared_ptr> linearized_factors(
|
||||||
factors_, linearizeDT);
|
factors_, linearizeDT);
|
||||||
|
|
||||||
return std::make_shared<GaussianMixtureFactor>(
|
return std::make_shared<HybridGaussianFactor>(
|
||||||
continuousKeys_, discreteKeys_, linearized_factors);
|
continuousKeys_, discreteKeys_, linearized_factors);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -18,10 +18,10 @@
|
||||||
|
|
||||||
#include <gtsam/discrete/DecisionTreeFactor.h>
|
#include <gtsam/discrete/DecisionTreeFactor.h>
|
||||||
#include <gtsam/discrete/TableFactor.h>
|
#include <gtsam/discrete/TableFactor.h>
|
||||||
#include <gtsam/hybrid/GaussianMixture.h>
|
#include <gtsam/hybrid/HybridGaussianConditional.h>
|
||||||
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
||||||
|
#include <gtsam/hybrid/HybridNonlinearFactor.h>
|
||||||
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
||||||
#include <gtsam/hybrid/MixtureFactor.h>
|
|
||||||
#include <gtsam/nonlinear/NonlinearFactor.h>
|
#include <gtsam/nonlinear/NonlinearFactor.h>
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
@ -59,7 +59,7 @@ void HybridNonlinearFactorGraph::printErrors(
|
||||||
// Clear the stringstream
|
// Clear the stringstream
|
||||||
ss.str(std::string());
|
ss.str(std::string());
|
||||||
|
|
||||||
if (auto mf = std::dynamic_pointer_cast<MixtureFactor>(factor)) {
|
if (auto mf = std::dynamic_pointer_cast<HybridNonlinearFactor>(factor)) {
|
||||||
if (factor == nullptr) {
|
if (factor == nullptr) {
|
||||||
std::cout << "nullptr"
|
std::cout << "nullptr"
|
||||||
<< "\n";
|
<< "\n";
|
||||||
|
|
@ -70,7 +70,7 @@ void HybridNonlinearFactorGraph::printErrors(
|
||||||
std::cout << std::endl;
|
std::cout << std::endl;
|
||||||
}
|
}
|
||||||
} else if (auto gmf =
|
} else if (auto gmf =
|
||||||
std::dynamic_pointer_cast<GaussianMixtureFactor>(factor)) {
|
std::dynamic_pointer_cast<HybridGaussianFactor>(factor)) {
|
||||||
if (factor == nullptr) {
|
if (factor == nullptr) {
|
||||||
std::cout << "nullptr"
|
std::cout << "nullptr"
|
||||||
<< "\n";
|
<< "\n";
|
||||||
|
|
@ -80,7 +80,8 @@ void HybridNonlinearFactorGraph::printErrors(
|
||||||
gmf->errorTree(values.continuous()).print("", keyFormatter);
|
gmf->errorTree(values.continuous()).print("", keyFormatter);
|
||||||
std::cout << std::endl;
|
std::cout << std::endl;
|
||||||
}
|
}
|
||||||
} else if (auto gm = std::dynamic_pointer_cast<GaussianMixture>(factor)) {
|
} else if (auto gm = std::dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
|
factor)) {
|
||||||
if (factor == nullptr) {
|
if (factor == nullptr) {
|
||||||
std::cout << "nullptr"
|
std::cout << "nullptr"
|
||||||
<< "\n";
|
<< "\n";
|
||||||
|
|
@ -151,8 +152,8 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize(
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// Check if it is a nonlinear mixture factor
|
// Check if it is a nonlinear mixture factor
|
||||||
if (auto mf = dynamic_pointer_cast<MixtureFactor>(f)) {
|
if (auto mf = dynamic_pointer_cast<HybridNonlinearFactor>(f)) {
|
||||||
const GaussianMixtureFactor::shared_ptr& gmf =
|
const HybridGaussianFactor::shared_ptr& gmf =
|
||||||
mf->linearize(continuousValues);
|
mf->linearize(continuousValues);
|
||||||
linearFG->push_back(gmf);
|
linearFG->push_back(gmf);
|
||||||
} else if (auto nlf = dynamic_pointer_cast<NonlinearFactor>(f)) {
|
} else if (auto nlf = dynamic_pointer_cast<NonlinearFactor>(f)) {
|
||||||
|
|
@ -161,9 +162,9 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize(
|
||||||
} else if (dynamic_pointer_cast<DiscreteFactor>(f)) {
|
} else if (dynamic_pointer_cast<DiscreteFactor>(f)) {
|
||||||
// If discrete-only: doesn't need linearization.
|
// If discrete-only: doesn't need linearization.
|
||||||
linearFG->push_back(f);
|
linearFG->push_back(f);
|
||||||
} else if (auto gmf = dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
|
} else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
|
||||||
linearFG->push_back(gmf);
|
linearFG->push_back(gmf);
|
||||||
} else if (auto gm = dynamic_pointer_cast<GaussianMixture>(f)) {
|
} else if (auto gm = dynamic_pointer_cast<HybridGaussianConditional>(f)) {
|
||||||
linearFG->push_back(gm);
|
linearFG->push_back(gm);
|
||||||
} else if (dynamic_pointer_cast<GaussianFactor>(f)) {
|
} else if (dynamic_pointer_cast<GaussianFactor>(f)) {
|
||||||
linearFG->push_back(f);
|
linearFG->push_back(f);
|
||||||
|
|
|
||||||
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
#include <gtsam/hybrid/HybridGaussianISAM.h>
|
#include <gtsam/hybrid/HybridGaussianISAM.h>
|
||||||
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
||||||
|
|
||||||
#include <optional>
|
#include <optional>
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
|
||||||
|
|
@ -138,7 +138,7 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph,
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
GaussianMixture::shared_ptr HybridSmoother::gaussianMixture(
|
HybridGaussianConditional::shared_ptr HybridSmoother::gaussianMixture(
|
||||||
size_t index) const {
|
size_t index) const {
|
||||||
return hybridBayesNet_.at(index)->asMixture();
|
return hybridBayesNet_.at(index)->asMixture();
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -69,7 +69,7 @@ class GTSAM_EXPORT HybridSmoother {
|
||||||
const HybridBayesNet& hybridBayesNet, const Ordering& ordering) const;
|
const HybridBayesNet& hybridBayesNet, const Ordering& ordering) const;
|
||||||
|
|
||||||
/// Get the Gaussian Mixture from the Bayes Net posterior at `index`.
|
/// Get the Gaussian Mixture from the Bayes Net posterior at `index`.
|
||||||
GaussianMixture::shared_ptr gaussianMixture(size_t index) const;
|
HybridGaussianConditional::shared_ptr gaussianMixture(size_t index) const;
|
||||||
|
|
||||||
/// Return the Bayes Net posterior.
|
/// Return the Bayes Net posterior.
|
||||||
const HybridBayesNet& hybridBayesNet() const;
|
const HybridBayesNet& hybridBayesNet() const;
|
||||||
|
|
|
||||||
|
|
@ -10,26 +10,26 @@ class HybridValues {
|
||||||
gtsam::DiscreteValues discrete() const;
|
gtsam::DiscreteValues discrete() const;
|
||||||
|
|
||||||
HybridValues();
|
HybridValues();
|
||||||
HybridValues(const gtsam::VectorValues &cv, const gtsam::DiscreteValues &dv);
|
HybridValues(const gtsam::VectorValues& cv, const gtsam::DiscreteValues& dv);
|
||||||
void print(string s = "HybridValues",
|
void print(string s = "HybridValues",
|
||||||
const gtsam::KeyFormatter& keyFormatter =
|
const gtsam::KeyFormatter& keyFormatter =
|
||||||
gtsam::DefaultKeyFormatter) const;
|
gtsam::DefaultKeyFormatter) const;
|
||||||
bool equals(const gtsam::HybridValues& other, double tol) const;
|
bool equals(const gtsam::HybridValues& other, double tol) const;
|
||||||
|
|
||||||
void insert(gtsam::Key j, int value);
|
void insert(gtsam::Key j, int value);
|
||||||
void insert(gtsam::Key j, const gtsam::Vector& value);
|
void insert(gtsam::Key j, const gtsam::Vector& value);
|
||||||
|
|
||||||
void insert_or_assign(gtsam::Key j, const gtsam::Vector& value);
|
void insert_or_assign(gtsam::Key j, const gtsam::Vector& value);
|
||||||
void insert_or_assign(gtsam::Key j, size_t value);
|
void insert_or_assign(gtsam::Key j, size_t value);
|
||||||
|
|
||||||
void insert(const gtsam::VectorValues& values);
|
void insert(const gtsam::VectorValues& values);
|
||||||
void insert(const gtsam::DiscreteValues& values);
|
void insert(const gtsam::DiscreteValues& values);
|
||||||
void insert(const gtsam::HybridValues& values);
|
void insert(const gtsam::HybridValues& values);
|
||||||
|
|
||||||
void update(const gtsam::VectorValues& values);
|
void update(const gtsam::VectorValues& values);
|
||||||
void update(const gtsam::DiscreteValues& values);
|
void update(const gtsam::DiscreteValues& values);
|
||||||
void update(const gtsam::HybridValues& values);
|
void update(const gtsam::HybridValues& values);
|
||||||
|
|
||||||
size_t& atDiscrete(gtsam::Key j);
|
size_t& atDiscrete(gtsam::Key j);
|
||||||
gtsam::Vector& at(gtsam::Key j);
|
gtsam::Vector& at(gtsam::Key j);
|
||||||
};
|
};
|
||||||
|
|
@ -42,7 +42,7 @@ virtual class HybridFactor : gtsam::Factor {
|
||||||
bool equals(const gtsam::HybridFactor& other, double tol = 1e-9) const;
|
bool equals(const gtsam::HybridFactor& other, double tol = 1e-9) const;
|
||||||
|
|
||||||
// Standard interface:
|
// Standard interface:
|
||||||
double error(const gtsam::HybridValues &values) const;
|
double error(const gtsam::HybridValues& values) const;
|
||||||
bool isDiscrete() const;
|
bool isDiscrete() const;
|
||||||
bool isContinuous() const;
|
bool isContinuous() const;
|
||||||
bool isHybrid() const;
|
bool isHybrid() const;
|
||||||
|
|
@ -65,39 +65,40 @@ virtual class HybridConditional {
|
||||||
double logProbability(const gtsam::HybridValues& values) const;
|
double logProbability(const gtsam::HybridValues& values) const;
|
||||||
double evaluate(const gtsam::HybridValues& values) const;
|
double evaluate(const gtsam::HybridValues& values) const;
|
||||||
double operator()(const gtsam::HybridValues& values) const;
|
double operator()(const gtsam::HybridValues& values) const;
|
||||||
gtsam::GaussianMixture* asMixture() const;
|
gtsam::HybridGaussianConditional* asMixture() const;
|
||||||
gtsam::GaussianConditional* asGaussian() const;
|
gtsam::GaussianConditional* asGaussian() const;
|
||||||
gtsam::DiscreteConditional* asDiscrete() const;
|
gtsam::DiscreteConditional* asDiscrete() const;
|
||||||
gtsam::Factor* inner();
|
gtsam::Factor* inner();
|
||||||
double error(const gtsam::HybridValues& values) const;
|
double error(const gtsam::HybridValues& values) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
#include <gtsam/hybrid/HybridGaussianFactor.h>
|
||||||
class GaussianMixtureFactor : gtsam::HybridFactor {
|
class HybridGaussianFactor : gtsam::HybridFactor {
|
||||||
GaussianMixtureFactor(
|
HybridGaussianFactor(
|
||||||
const gtsam::KeyVector& continuousKeys,
|
const gtsam::KeyVector& continuousKeys,
|
||||||
const gtsam::DiscreteKeys& discreteKeys,
|
const gtsam::DiscreteKeys& discreteKeys,
|
||||||
const std::vector<gtsam::GaussianFactor::shared_ptr>& factorsList);
|
const std::vector<gtsam::GaussianFactor::shared_ptr>& factorsList);
|
||||||
|
|
||||||
void print(string s = "GaussianMixtureFactor\n",
|
void print(string s = "HybridGaussianFactor\n",
|
||||||
const gtsam::KeyFormatter& keyFormatter =
|
const gtsam::KeyFormatter& keyFormatter =
|
||||||
gtsam::DefaultKeyFormatter) const;
|
gtsam::DefaultKeyFormatter) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
#include <gtsam/hybrid/GaussianMixture.h>
|
#include <gtsam/hybrid/HybridGaussianConditional.h>
|
||||||
class GaussianMixture : gtsam::HybridFactor {
|
class HybridGaussianConditional : gtsam::HybridFactor {
|
||||||
GaussianMixture(const gtsam::KeyVector& continuousFrontals,
|
HybridGaussianConditional(
|
||||||
const gtsam::KeyVector& continuousParents,
|
const gtsam::KeyVector& continuousFrontals,
|
||||||
const gtsam::DiscreteKeys& discreteParents,
|
const gtsam::KeyVector& continuousParents,
|
||||||
const std::vector<gtsam::GaussianConditional::shared_ptr>&
|
const gtsam::DiscreteKeys& discreteParents,
|
||||||
conditionalsList);
|
const std::vector<gtsam::GaussianConditional::shared_ptr>&
|
||||||
|
conditionalsList);
|
||||||
|
|
||||||
gtsam::GaussianMixtureFactor* likelihood(
|
gtsam::HybridGaussianFactor* likelihood(
|
||||||
const gtsam::VectorValues& frontals) const;
|
const gtsam::VectorValues& frontals) const;
|
||||||
double logProbability(const gtsam::HybridValues& values) const;
|
double logProbability(const gtsam::HybridValues& values) const;
|
||||||
double evaluate(const gtsam::HybridValues& values) const;
|
double evaluate(const gtsam::HybridValues& values) const;
|
||||||
|
|
||||||
void print(string s = "GaussianMixture\n",
|
void print(string s = "HybridGaussianConditional\n",
|
||||||
const gtsam::KeyFormatter& keyFormatter =
|
const gtsam::KeyFormatter& keyFormatter =
|
||||||
gtsam::DefaultKeyFormatter) const;
|
gtsam::DefaultKeyFormatter) const;
|
||||||
};
|
};
|
||||||
|
|
@ -131,7 +132,7 @@ class HybridBayesTree {
|
||||||
#include <gtsam/hybrid/HybridBayesNet.h>
|
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||||
class HybridBayesNet {
|
class HybridBayesNet {
|
||||||
HybridBayesNet();
|
HybridBayesNet();
|
||||||
void push_back(const gtsam::GaussianMixture* s);
|
void push_back(const gtsam::HybridGaussianConditional* s);
|
||||||
void push_back(const gtsam::GaussianConditional* s);
|
void push_back(const gtsam::GaussianConditional* s);
|
||||||
void push_back(const gtsam::DiscreteConditional* s);
|
void push_back(const gtsam::DiscreteConditional* s);
|
||||||
|
|
||||||
|
|
@ -139,7 +140,7 @@ class HybridBayesNet {
|
||||||
size_t size() const;
|
size_t size() const;
|
||||||
gtsam::KeySet keys() const;
|
gtsam::KeySet keys() const;
|
||||||
const gtsam::HybridConditional* at(size_t i) const;
|
const gtsam::HybridConditional* at(size_t i) const;
|
||||||
|
|
||||||
// Standard interface:
|
// Standard interface:
|
||||||
double logProbability(const gtsam::HybridValues& values) const;
|
double logProbability(const gtsam::HybridValues& values) const;
|
||||||
double evaluate(const gtsam::HybridValues& values) const;
|
double evaluate(const gtsam::HybridValues& values) const;
|
||||||
|
|
@ -149,7 +150,7 @@ class HybridBayesNet {
|
||||||
const gtsam::VectorValues& measurements) const;
|
const gtsam::VectorValues& measurements) const;
|
||||||
|
|
||||||
gtsam::HybridValues optimize() const;
|
gtsam::HybridValues optimize() const;
|
||||||
gtsam::HybridValues sample(const gtsam::HybridValues &given) const;
|
gtsam::HybridValues sample(const gtsam::HybridValues& given) const;
|
||||||
gtsam::HybridValues sample() const;
|
gtsam::HybridValues sample() const;
|
||||||
|
|
||||||
void print(string s = "HybridBayesNet\n",
|
void print(string s = "HybridBayesNet\n",
|
||||||
|
|
@ -177,7 +178,7 @@ class HybridGaussianFactorGraph {
|
||||||
void push_back(const gtsam::HybridGaussianFactorGraph& graph);
|
void push_back(const gtsam::HybridGaussianFactorGraph& graph);
|
||||||
void push_back(const gtsam::HybridBayesNet& bayesNet);
|
void push_back(const gtsam::HybridBayesNet& bayesNet);
|
||||||
void push_back(const gtsam::HybridBayesTree& bayesTree);
|
void push_back(const gtsam::HybridBayesTree& bayesTree);
|
||||||
void push_back(const gtsam::GaussianMixtureFactor* gmm);
|
void push_back(const gtsam::HybridGaussianFactor* gmm);
|
||||||
void push_back(gtsam::DecisionTreeFactor* factor);
|
void push_back(gtsam::DecisionTreeFactor* factor);
|
||||||
void push_back(gtsam::TableFactor* factor);
|
void push_back(gtsam::TableFactor* factor);
|
||||||
void push_back(gtsam::JacobianFactor* factor);
|
void push_back(gtsam::JacobianFactor* factor);
|
||||||
|
|
@ -189,7 +190,8 @@ class HybridGaussianFactorGraph {
|
||||||
const gtsam::HybridFactor* at(size_t i) const;
|
const gtsam::HybridFactor* at(size_t i) const;
|
||||||
|
|
||||||
void print(string s = "") const;
|
void print(string s = "") const;
|
||||||
bool equals(const gtsam::HybridGaussianFactorGraph& fg, double tol = 1e-9) const;
|
bool equals(const gtsam::HybridGaussianFactorGraph& fg,
|
||||||
|
double tol = 1e-9) const;
|
||||||
|
|
||||||
// evaluation
|
// evaluation
|
||||||
double error(const gtsam::HybridValues& values) const;
|
double error(const gtsam::HybridValues& values) const;
|
||||||
|
|
@ -222,7 +224,8 @@ class HybridNonlinearFactorGraph {
|
||||||
void push_back(gtsam::HybridFactor* factor);
|
void push_back(gtsam::HybridFactor* factor);
|
||||||
void push_back(gtsam::NonlinearFactor* factor);
|
void push_back(gtsam::NonlinearFactor* factor);
|
||||||
void push_back(gtsam::DiscreteFactor* factor);
|
void push_back(gtsam::DiscreteFactor* factor);
|
||||||
gtsam::HybridGaussianFactorGraph linearize(const gtsam::Values& continuousValues) const;
|
gtsam::HybridGaussianFactorGraph linearize(
|
||||||
|
const gtsam::Values& continuousValues) const;
|
||||||
|
|
||||||
bool empty() const;
|
bool empty() const;
|
||||||
void remove(size_t i);
|
void remove(size_t i);
|
||||||
|
|
@ -231,32 +234,32 @@ class HybridNonlinearFactorGraph {
|
||||||
const gtsam::HybridFactor* at(size_t i) const;
|
const gtsam::HybridFactor* at(size_t i) const;
|
||||||
|
|
||||||
void print(string s = "HybridNonlinearFactorGraph\n",
|
void print(string s = "HybridNonlinearFactorGraph\n",
|
||||||
const gtsam::KeyFormatter& keyFormatter =
|
const gtsam::KeyFormatter& keyFormatter =
|
||||||
gtsam::DefaultKeyFormatter) const;
|
gtsam::DefaultKeyFormatter) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
#include <gtsam/hybrid/MixtureFactor.h>
|
#include <gtsam/hybrid/HybridNonlinearFactor.h>
|
||||||
class MixtureFactor : gtsam::HybridFactor {
|
class HybridNonlinearFactor : gtsam::HybridFactor {
|
||||||
MixtureFactor(
|
HybridNonlinearFactor(
|
||||||
const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys,
|
const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys,
|
||||||
const gtsam::DecisionTree<gtsam::Key, gtsam::NonlinearFactor*>& factors,
|
const gtsam::DecisionTree<gtsam::Key, gtsam::NonlinearFactor*>& factors,
|
||||||
bool normalized = false);
|
bool normalized = false);
|
||||||
|
|
||||||
template <FACTOR = {gtsam::NonlinearFactor}>
|
template <FACTOR = {gtsam::NonlinearFactor}>
|
||||||
MixtureFactor(const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys,
|
HybridNonlinearFactor(const gtsam::KeyVector& keys,
|
||||||
const std::vector<FACTOR*>& factors,
|
const gtsam::DiscreteKeys& discreteKeys,
|
||||||
bool normalized = false);
|
const std::vector<FACTOR*>& factors,
|
||||||
|
bool normalized = false);
|
||||||
|
|
||||||
double error(const gtsam::Values& continuousValues,
|
double error(const gtsam::Values& continuousValues,
|
||||||
const gtsam::DiscreteValues& discreteValues) const;
|
const gtsam::DiscreteValues& discreteValues) const;
|
||||||
|
|
||||||
double nonlinearFactorLogNormalizingConstant(const gtsam::NonlinearFactor* factor,
|
double nonlinearFactorLogNormalizingConstant(
|
||||||
const gtsam::Values& values) const;
|
const gtsam::NonlinearFactor* factor, const gtsam::Values& values) const;
|
||||||
|
|
||||||
GaussianMixtureFactor* linearize(
|
HybridGaussianFactor* linearize(const gtsam::Values& continuousValues) const;
|
||||||
const gtsam::Values& continuousValues) const;
|
|
||||||
|
|
||||||
void print(string s = "MixtureFactor\n",
|
void print(string s = "HybridNonlinearFactor\n",
|
||||||
const gtsam::KeyFormatter& keyFormatter =
|
const gtsam::KeyFormatter& keyFormatter =
|
||||||
gtsam::DefaultKeyFormatter) const;
|
gtsam::DefaultKeyFormatter) const;
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -19,10 +19,10 @@
|
||||||
#include <gtsam/base/Matrix.h>
|
#include <gtsam/base/Matrix.h>
|
||||||
#include <gtsam/discrete/DecisionTreeFactor.h>
|
#include <gtsam/discrete/DecisionTreeFactor.h>
|
||||||
#include <gtsam/discrete/DiscreteDistribution.h>
|
#include <gtsam/discrete/DiscreteDistribution.h>
|
||||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
#include <gtsam/hybrid/HybridGaussianFactor.h>
|
||||||
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
||||||
|
#include <gtsam/hybrid/HybridNonlinearFactor.h>
|
||||||
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
||||||
#include <gtsam/hybrid/MixtureFactor.h>
|
|
||||||
#include <gtsam/inference/Symbol.h>
|
#include <gtsam/inference/Symbol.h>
|
||||||
#include <gtsam/linear/JacobianFactor.h>
|
#include <gtsam/linear/JacobianFactor.h>
|
||||||
#include <gtsam/linear/NoiseModel.h>
|
#include <gtsam/linear/NoiseModel.h>
|
||||||
|
|
@ -57,12 +57,12 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain(
|
||||||
|
|
||||||
// keyFunc(1) to keyFunc(n+1)
|
// keyFunc(1) to keyFunc(n+1)
|
||||||
for (size_t t = 1; t < n; t++) {
|
for (size_t t = 1; t < n; t++) {
|
||||||
hfg.add(GaussianMixtureFactor(
|
hfg.add(HybridGaussianFactor(
|
||||||
{keyFunc(t), keyFunc(t + 1)}, {{dKeyFunc(t), 2}},
|
{keyFunc(t), keyFunc(t + 1)}, {{dKeyFunc(t), 2}},
|
||||||
{std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
|
{std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
|
||||||
I_3x3, Z_3x1),
|
I_3x3, Z_3x1),
|
||||||
std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
|
std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
|
||||||
I_3x3, Vector3::Ones())}));
|
I_3x3, Vector3::Ones())}));
|
||||||
|
|
||||||
if (t > 1) {
|
if (t > 1) {
|
||||||
hfg.add(DecisionTreeFactor({{dKeyFunc(t - 1), 2}, {dKeyFunc(t), 2}},
|
hfg.add(DecisionTreeFactor({{dKeyFunc(t - 1), 2}, {dKeyFunc(t), 2}},
|
||||||
|
|
@ -163,7 +163,7 @@ struct Switching {
|
||||||
for (auto &&f : motion_models) {
|
for (auto &&f : motion_models) {
|
||||||
components.push_back(std::dynamic_pointer_cast<NonlinearFactor>(f));
|
components.push_back(std::dynamic_pointer_cast<NonlinearFactor>(f));
|
||||||
}
|
}
|
||||||
nonlinearFactorGraph.emplace_shared<MixtureFactor>(
|
nonlinearFactorGraph.emplace_shared<HybridNonlinearFactor>(
|
||||||
keys, DiscreteKeys{modes[k]}, components);
|
keys, DiscreteKeys{modes[k]}, components);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1,
|
||||||
// Create Gaussian mixture z_i = x0 + noise for each measurement.
|
// Create Gaussian mixture z_i = x0 + noise for each measurement.
|
||||||
for (size_t i = 0; i < num_measurements; i++) {
|
for (size_t i = 0; i < num_measurements; i++) {
|
||||||
const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode;
|
const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode;
|
||||||
bayesNet.emplace_shared<GaussianMixture>(
|
bayesNet.emplace_shared<HybridGaussianConditional>(
|
||||||
KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i},
|
KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i},
|
||||||
std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0),
|
std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0),
|
||||||
Z_1x1, 0.5),
|
Z_1x1, 0.5),
|
||||||
|
|
|
||||||
|
|
@ -107,7 +107,7 @@ TEST(HybridBayesNet, evaluateHybrid) {
|
||||||
// Create hybrid Bayes net.
|
// Create hybrid Bayes net.
|
||||||
HybridBayesNet bayesNet;
|
HybridBayesNet bayesNet;
|
||||||
bayesNet.push_back(continuousConditional);
|
bayesNet.push_back(continuousConditional);
|
||||||
bayesNet.emplace_shared<GaussianMixture>(
|
bayesNet.emplace_shared<HybridGaussianConditional>(
|
||||||
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
|
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
|
||||||
std::vector{conditional0, conditional1});
|
std::vector{conditional0, conditional1});
|
||||||
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
|
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
|
||||||
|
|
@ -168,7 +168,7 @@ TEST(HybridBayesNet, Error) {
|
||||||
conditional1 = std::make_shared<GaussianConditional>(
|
conditional1 = std::make_shared<GaussianConditional>(
|
||||||
X(1), Vector1::Constant(2), I_1x1, model1);
|
X(1), Vector1::Constant(2), I_1x1, model1);
|
||||||
|
|
||||||
auto gm = std::make_shared<GaussianMixture>(
|
auto gm = std::make_shared<HybridGaussianConditional>(
|
||||||
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
|
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
|
||||||
std::vector{conditional0, conditional1});
|
std::vector{conditional0, conditional1});
|
||||||
// Create hybrid Bayes net.
|
// Create hybrid Bayes net.
|
||||||
|
|
@ -389,7 +389,7 @@ TEST(HybridBayesNet, Sampling) {
|
||||||
std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
|
std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
|
||||||
std::vector<NonlinearFactor::shared_ptr> factors = {zero_motion, one_motion};
|
std::vector<NonlinearFactor::shared_ptr> factors = {zero_motion, one_motion};
|
||||||
nfg.emplace_shared<PriorFactor<double>>(X(0), 0.0, noise_model);
|
nfg.emplace_shared<PriorFactor<double>>(X(0), 0.0, noise_model);
|
||||||
nfg.emplace_shared<MixtureFactor>(
|
nfg.emplace_shared<HybridNonlinearFactor>(
|
||||||
KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors);
|
KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors);
|
||||||
|
|
||||||
DiscreteKey mode(M(0), 2);
|
DiscreteKey mode(M(0), 2);
|
||||||
|
|
|
||||||
|
|
@ -43,9 +43,9 @@ TEST(HybridConditional, Invariants) {
|
||||||
auto hc0 = bn.at(0);
|
auto hc0 = bn.at(0);
|
||||||
CHECK(hc0->isHybrid());
|
CHECK(hc0->isHybrid());
|
||||||
|
|
||||||
// Check invariants as a GaussianMixture.
|
// Check invariants as a HybridGaussianConditional.
|
||||||
const auto mixture = hc0->asMixture();
|
const auto mixture = hc0->asMixture();
|
||||||
EXPECT(GaussianMixture::CheckInvariants(*mixture, values));
|
EXPECT(HybridGaussianConditional::CheckInvariants(*mixture, values));
|
||||||
|
|
||||||
// Check invariants as a HybridConditional.
|
// Check invariants as a HybridConditional.
|
||||||
EXPECT(HybridConditional::CheckInvariants(*hc0, values));
|
EXPECT(HybridConditional::CheckInvariants(*hc0, values));
|
||||||
|
|
|
||||||
|
|
@ -19,10 +19,10 @@
|
||||||
#include <gtsam/geometry/Pose2.h>
|
#include <gtsam/geometry/Pose2.h>
|
||||||
#include <gtsam/geometry/Pose3.h>
|
#include <gtsam/geometry/Pose3.h>
|
||||||
#include <gtsam/hybrid/HybridBayesNet.h>
|
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||||
|
#include <gtsam/hybrid/HybridNonlinearFactor.h>
|
||||||
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
||||||
#include <gtsam/hybrid/HybridNonlinearISAM.h>
|
#include <gtsam/hybrid/HybridNonlinearISAM.h>
|
||||||
#include <gtsam/hybrid/HybridSmoother.h>
|
#include <gtsam/hybrid/HybridSmoother.h>
|
||||||
#include <gtsam/hybrid/MixtureFactor.h>
|
|
||||||
#include <gtsam/inference/Symbol.h>
|
#include <gtsam/inference/Symbol.h>
|
||||||
#include <gtsam/linear/GaussianBayesNet.h>
|
#include <gtsam/linear/GaussianBayesNet.h>
|
||||||
#include <gtsam/linear/GaussianBayesTree.h>
|
#include <gtsam/linear/GaussianBayesTree.h>
|
||||||
|
|
@ -435,7 +435,7 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() {
|
||||||
std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model);
|
std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model);
|
||||||
const auto one_motion =
|
const auto one_motion =
|
||||||
std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
|
std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model);
|
||||||
nfg.emplace_shared<MixtureFactor>(
|
nfg.emplace_shared<HybridNonlinearFactor>(
|
||||||
KeyVector{X(0), X(1)}, DiscreteKeys{m},
|
KeyVector{X(0), X(1)}, DiscreteKeys{m},
|
||||||
std::vector<NonlinearFactor::shared_ptr>{zero_motion, one_motion});
|
std::vector<NonlinearFactor::shared_ptr>{zero_motion, one_motion});
|
||||||
|
|
||||||
|
|
@ -531,10 +531,10 @@ TEST(HybridEstimation, CorrectnessViaSampling) {
|
||||||
* Helper function to add the constant term corresponding to
|
* Helper function to add the constant term corresponding to
|
||||||
* the difference in noise models.
|
* the difference in noise models.
|
||||||
*/
|
*/
|
||||||
std::shared_ptr<GaussianMixtureFactor> mixedVarianceFactor(
|
std::shared_ptr<HybridGaussianFactor> mixedVarianceFactor(
|
||||||
const MixtureFactor& mf, const Values& initial, const Key& mode,
|
const HybridNonlinearFactor& mf, const Values& initial, const Key& mode,
|
||||||
double noise_tight, double noise_loose, size_t d, size_t tight_index) {
|
double noise_tight, double noise_loose, size_t d, size_t tight_index) {
|
||||||
GaussianMixtureFactor::shared_ptr gmf = mf.linearize(initial);
|
HybridGaussianFactor::shared_ptr gmf = mf.linearize(initial);
|
||||||
|
|
||||||
constexpr double log2pi = 1.8378770664093454835606594728112;
|
constexpr double log2pi = 1.8378770664093454835606594728112;
|
||||||
// logConstant will be of the tighter model
|
// logConstant will be of the tighter model
|
||||||
|
|
@ -560,7 +560,7 @@ std::shared_ptr<GaussianMixtureFactor> mixedVarianceFactor(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
auto updated_components = gmf->factors().apply(func);
|
auto updated_components = gmf->factors().apply(func);
|
||||||
auto updated_gmf = std::make_shared<GaussianMixtureFactor>(
|
auto updated_gmf = std::make_shared<HybridGaussianFactor>(
|
||||||
gmf->continuousKeys(), gmf->discreteKeys(), updated_components);
|
gmf->continuousKeys(), gmf->discreteKeys(), updated_components);
|
||||||
|
|
||||||
return updated_gmf;
|
return updated_gmf;
|
||||||
|
|
@ -592,7 +592,7 @@ TEST(HybridEstimation, ModeSelection) {
|
||||||
std::vector<NonlinearFactor::shared_ptr> components = {model0, model1};
|
std::vector<NonlinearFactor::shared_ptr> components = {model0, model1};
|
||||||
|
|
||||||
KeyVector keys = {X(0), X(1)};
|
KeyVector keys = {X(0), X(1)};
|
||||||
MixtureFactor mf(keys, modes, components);
|
HybridNonlinearFactor mf(keys, modes, components);
|
||||||
|
|
||||||
initial.insert(X(0), 0.0);
|
initial.insert(X(0), 0.0);
|
||||||
initial.insert(X(1), 0.0);
|
initial.insert(X(1), 0.0);
|
||||||
|
|
@ -616,7 +616,7 @@ TEST(HybridEstimation, ModeSelection) {
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1));
|
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1));
|
||||||
bn.push_back(
|
bn.push_back(
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1));
|
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1));
|
||||||
bn.emplace_shared<GaussianMixture>(
|
bn.emplace_shared<HybridGaussianConditional>(
|
||||||
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
|
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
|
||||||
std::vector{GaussianConditional::sharedMeanAndStddev(
|
std::vector{GaussianConditional::sharedMeanAndStddev(
|
||||||
Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose),
|
Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose),
|
||||||
|
|
@ -647,7 +647,7 @@ TEST(HybridEstimation, ModeSelection2) {
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1));
|
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1));
|
||||||
bn.push_back(
|
bn.push_back(
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1));
|
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1));
|
||||||
bn.emplace_shared<GaussianMixture>(
|
bn.emplace_shared<HybridGaussianConditional>(
|
||||||
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
|
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
|
||||||
std::vector{GaussianConditional::sharedMeanAndStddev(
|
std::vector{GaussianConditional::sharedMeanAndStddev(
|
||||||
Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose),
|
Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose),
|
||||||
|
|
@ -683,7 +683,7 @@ TEST(HybridEstimation, ModeSelection2) {
|
||||||
std::vector<NonlinearFactor::shared_ptr> components = {model0, model1};
|
std::vector<NonlinearFactor::shared_ptr> components = {model0, model1};
|
||||||
|
|
||||||
KeyVector keys = {X(0), X(1)};
|
KeyVector keys = {X(0), X(1)};
|
||||||
MixtureFactor mf(keys, modes, components);
|
HybridNonlinearFactor mf(keys, modes, components);
|
||||||
|
|
||||||
initial.insert<Vector3>(X(0), Z_3x1);
|
initial.insert<Vector3>(X(0), Z_3x1);
|
||||||
initial.insert<Vector3>(X(1), Z_3x1);
|
initial.insert<Vector3>(X(1), Z_3x1);
|
||||||
|
|
|
||||||
|
|
@ -55,7 +55,7 @@ TEST(HybridFactorGraph, Keys) {
|
||||||
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
||||||
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||||
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
||||||
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt));
|
hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt));
|
||||||
|
|
||||||
KeySet expected_continuous{X(0), X(1)};
|
KeySet expected_continuous{X(0), X(1)};
|
||||||
EXPECT(
|
EXPECT(
|
||||||
|
|
|
||||||
|
|
@ -10,8 +10,8 @@
|
||||||
* -------------------------------------------------------------------------- */
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @file testGaussianMixture.cpp
|
* @file testHybridGaussianConditional.cpp
|
||||||
* @brief Unit tests for GaussianMixture class
|
* @brief Unit tests for HybridGaussianConditional class
|
||||||
* @author Varun Agrawal
|
* @author Varun Agrawal
|
||||||
* @author Fan Jiang
|
* @author Fan Jiang
|
||||||
* @author Frank Dellaert
|
* @author Frank Dellaert
|
||||||
|
|
@ -19,8 +19,8 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <gtsam/discrete/DiscreteValues.h>
|
#include <gtsam/discrete/DiscreteValues.h>
|
||||||
#include <gtsam/hybrid/GaussianMixture.h>
|
#include <gtsam/hybrid/HybridGaussianConditional.h>
|
||||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
#include <gtsam/hybrid/HybridGaussianFactor.h>
|
||||||
#include <gtsam/hybrid/HybridValues.h>
|
#include <gtsam/hybrid/HybridValues.h>
|
||||||
#include <gtsam/inference/Symbol.h>
|
#include <gtsam/inference/Symbol.h>
|
||||||
#include <gtsam/linear/GaussianConditional.h>
|
#include <gtsam/linear/GaussianConditional.h>
|
||||||
|
|
@ -45,19 +45,19 @@ static const HybridValues hv1{vv, assignment1};
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
namespace equal_constants {
|
namespace equal_constants {
|
||||||
// Create a simple GaussianMixture
|
// Create a simple HybridGaussianConditional
|
||||||
const double commonSigma = 2.0;
|
const double commonSigma = 2.0;
|
||||||
const std::vector<GaussianConditional::shared_ptr> conditionals{
|
const std::vector<GaussianConditional::shared_ptr> conditionals{
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
|
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
|
||||||
commonSigma),
|
commonSigma),
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
|
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
|
||||||
commonSigma)};
|
commonSigma)};
|
||||||
const GaussianMixture mixture({Z(0)}, {X(0)}, {mode}, conditionals);
|
const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals);
|
||||||
} // namespace equal_constants
|
} // namespace equal_constants
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
/// Check that invariants hold
|
/// Check that invariants hold
|
||||||
TEST(GaussianMixture, Invariants) {
|
TEST(HybridGaussianConditional, Invariants) {
|
||||||
using namespace equal_constants;
|
using namespace equal_constants;
|
||||||
|
|
||||||
// Check that the mixture normalization constant is the max of all constants
|
// Check that the mixture normalization constant is the max of all constants
|
||||||
|
|
@ -66,13 +66,13 @@ TEST(GaussianMixture, Invariants) {
|
||||||
EXPECT_DOUBLES_EQUAL(K, conditionals[0]->logNormalizationConstant(), 1e-8);
|
EXPECT_DOUBLES_EQUAL(K, conditionals[0]->logNormalizationConstant(), 1e-8);
|
||||||
EXPECT_DOUBLES_EQUAL(K, conditionals[1]->logNormalizationConstant(), 1e-8);
|
EXPECT_DOUBLES_EQUAL(K, conditionals[1]->logNormalizationConstant(), 1e-8);
|
||||||
|
|
||||||
EXPECT(GaussianMixture::CheckInvariants(mixture, hv0));
|
EXPECT(HybridGaussianConditional::CheckInvariants(mixture, hv0));
|
||||||
EXPECT(GaussianMixture::CheckInvariants(mixture, hv1));
|
EXPECT(HybridGaussianConditional::CheckInvariants(mixture, hv1));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
/// Check LogProbability.
|
/// Check LogProbability.
|
||||||
TEST(GaussianMixture, LogProbability) {
|
TEST(HybridGaussianConditional, LogProbability) {
|
||||||
using namespace equal_constants;
|
using namespace equal_constants;
|
||||||
auto actual = mixture.logProbability(vv);
|
auto actual = mixture.logProbability(vv);
|
||||||
|
|
||||||
|
|
@ -94,7 +94,7 @@ TEST(GaussianMixture, LogProbability) {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
/// Check error.
|
/// Check error.
|
||||||
TEST(GaussianMixture, Error) {
|
TEST(HybridGaussianConditional, Error) {
|
||||||
using namespace equal_constants;
|
using namespace equal_constants;
|
||||||
auto actual = mixture.errorTree(vv);
|
auto actual = mixture.errorTree(vv);
|
||||||
|
|
||||||
|
|
@ -117,7 +117,7 @@ TEST(GaussianMixture, Error) {
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
/// Check that the likelihood is proportional to the conditional density given
|
/// Check that the likelihood is proportional to the conditional density given
|
||||||
/// the measurements.
|
/// the measurements.
|
||||||
TEST(GaussianMixture, Likelihood) {
|
TEST(HybridGaussianConditional, Likelihood) {
|
||||||
using namespace equal_constants;
|
using namespace equal_constants;
|
||||||
|
|
||||||
// Compute likelihood
|
// Compute likelihood
|
||||||
|
|
@ -146,19 +146,19 @@ TEST(GaussianMixture, Likelihood) {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
namespace mode_dependent_constants {
|
namespace mode_dependent_constants {
|
||||||
// Create a GaussianMixture with mode-dependent noise models.
|
// Create a HybridGaussianConditional with mode-dependent noise models.
|
||||||
// 0 is low-noise, 1 is high-noise.
|
// 0 is low-noise, 1 is high-noise.
|
||||||
const std::vector<GaussianConditional::shared_ptr> conditionals{
|
const std::vector<GaussianConditional::shared_ptr> conditionals{
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
|
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
|
||||||
0.5),
|
0.5),
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
|
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
|
||||||
3.0)};
|
3.0)};
|
||||||
const GaussianMixture mixture({Z(0)}, {X(0)}, {mode}, conditionals);
|
const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals);
|
||||||
} // namespace mode_dependent_constants
|
} // namespace mode_dependent_constants
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
// Create a test for continuousParents.
|
// Create a test for continuousParents.
|
||||||
TEST(GaussianMixture, ContinuousParents) {
|
TEST(HybridGaussianConditional, ContinuousParents) {
|
||||||
using namespace mode_dependent_constants;
|
using namespace mode_dependent_constants;
|
||||||
const KeyVector continuousParentKeys = mixture.continuousParents();
|
const KeyVector continuousParentKeys = mixture.continuousParents();
|
||||||
// Check that the continuous parent keys are correct:
|
// Check that the continuous parent keys are correct:
|
||||||
|
|
@ -169,7 +169,7 @@ TEST(GaussianMixture, ContinuousParents) {
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
/// Check that the likelihood is proportional to the conditional density given
|
/// Check that the likelihood is proportional to the conditional density given
|
||||||
/// the measurements.
|
/// the measurements.
|
||||||
TEST(GaussianMixture, Likelihood2) {
|
TEST(HybridGaussianConditional, Likelihood2) {
|
||||||
using namespace mode_dependent_constants;
|
using namespace mode_dependent_constants;
|
||||||
|
|
||||||
// Compute likelihood
|
// Compute likelihood
|
||||||
|
|
@ -10,8 +10,8 @@
|
||||||
* -------------------------------------------------------------------------- */
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @file testGaussianMixtureFactor.cpp
|
* @file testHybridGaussianFactor.cpp
|
||||||
* @brief Unit tests for GaussianMixtureFactor
|
* @brief Unit tests for HybridGaussianFactor
|
||||||
* @author Varun Agrawal
|
* @author Varun Agrawal
|
||||||
* @author Fan Jiang
|
* @author Fan Jiang
|
||||||
* @author Frank Dellaert
|
* @author Frank Dellaert
|
||||||
|
|
@ -22,9 +22,9 @@
|
||||||
#include <gtsam/base/TestableAssertions.h>
|
#include <gtsam/base/TestableAssertions.h>
|
||||||
#include <gtsam/discrete/DiscreteConditional.h>
|
#include <gtsam/discrete/DiscreteConditional.h>
|
||||||
#include <gtsam/discrete/DiscreteValues.h>
|
#include <gtsam/discrete/DiscreteValues.h>
|
||||||
#include <gtsam/hybrid/GaussianMixture.h>
|
|
||||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
|
||||||
#include <gtsam/hybrid/HybridBayesNet.h>
|
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||||
|
#include <gtsam/hybrid/HybridGaussianConditional.h>
|
||||||
|
#include <gtsam/hybrid/HybridGaussianFactor.h>
|
||||||
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
||||||
#include <gtsam/hybrid/HybridValues.h>
|
#include <gtsam/hybrid/HybridValues.h>
|
||||||
#include <gtsam/inference/Symbol.h>
|
#include <gtsam/inference/Symbol.h>
|
||||||
|
|
@ -46,17 +46,17 @@ using symbol_shorthand::Z;
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
// Check iterators of empty mixture.
|
// Check iterators of empty mixture.
|
||||||
TEST(GaussianMixtureFactor, Constructor) {
|
TEST(HybridGaussianFactor, Constructor) {
|
||||||
GaussianMixtureFactor factor;
|
HybridGaussianFactor factor;
|
||||||
GaussianMixtureFactor::const_iterator const_it = factor.begin();
|
HybridGaussianFactor::const_iterator const_it = factor.begin();
|
||||||
CHECK(const_it == factor.end());
|
CHECK(const_it == factor.end());
|
||||||
GaussianMixtureFactor::iterator it = factor.begin();
|
HybridGaussianFactor::iterator it = factor.begin();
|
||||||
CHECK(it == factor.end());
|
CHECK(it == factor.end());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
// "Add" two mixture factors together.
|
// "Add" two mixture factors together.
|
||||||
TEST(GaussianMixtureFactor, Sum) {
|
TEST(HybridGaussianFactor, Sum) {
|
||||||
DiscreteKey m1(1, 2), m2(2, 3);
|
DiscreteKey m1(1, 2), m2(2, 3);
|
||||||
|
|
||||||
auto A1 = Matrix::Zero(2, 1);
|
auto A1 = Matrix::Zero(2, 1);
|
||||||
|
|
@ -77,8 +77,8 @@ TEST(GaussianMixtureFactor, Sum) {
|
||||||
// TODO(Frank): why specify keys at all? And: keys in factor should be *all*
|
// TODO(Frank): why specify keys at all? And: keys in factor should be *all*
|
||||||
// keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey?
|
// keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey?
|
||||||
// Design review!
|
// Design review!
|
||||||
GaussianMixtureFactor mixtureFactorA({X(1), X(2)}, {m1}, factorsA);
|
HybridGaussianFactor mixtureFactorA({X(1), X(2)}, {m1}, factorsA);
|
||||||
GaussianMixtureFactor mixtureFactorB({X(1), X(3)}, {m2}, factorsB);
|
HybridGaussianFactor mixtureFactorB({X(1), X(3)}, {m2}, factorsB);
|
||||||
|
|
||||||
// Check that number of keys is 3
|
// Check that number of keys is 3
|
||||||
EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size());
|
EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size());
|
||||||
|
|
@ -102,7 +102,7 @@ TEST(GaussianMixtureFactor, Sum) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(GaussianMixtureFactor, Printing) {
|
TEST(HybridGaussianFactor, Printing) {
|
||||||
DiscreteKey m1(1, 2);
|
DiscreteKey m1(1, 2);
|
||||||
auto A1 = Matrix::Zero(2, 1);
|
auto A1 = Matrix::Zero(2, 1);
|
||||||
auto A2 = Matrix::Zero(2, 2);
|
auto A2 = Matrix::Zero(2, 2);
|
||||||
|
|
@ -111,10 +111,10 @@ TEST(GaussianMixtureFactor, Printing) {
|
||||||
auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
|
auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
|
||||||
std::vector<GaussianFactor::shared_ptr> factors{f10, f11};
|
std::vector<GaussianFactor::shared_ptr> factors{f10, f11};
|
||||||
|
|
||||||
GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
|
HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
|
||||||
|
|
||||||
std::string expected =
|
std::string expected =
|
||||||
R"(GaussianMixtureFactor
|
R"(HybridGaussianFactor
|
||||||
Hybrid [x1 x2; 1]{
|
Hybrid [x1 x2; 1]{
|
||||||
Choice(1)
|
Choice(1)
|
||||||
0 Leaf :
|
0 Leaf :
|
||||||
|
|
@ -147,7 +147,7 @@ Hybrid [x1 x2; 1]{
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(GaussianMixtureFactor, GaussianMixture) {
|
TEST(HybridGaussianFactor, HybridGaussianConditional) {
|
||||||
KeyVector keys;
|
KeyVector keys;
|
||||||
keys.push_back(X(0));
|
keys.push_back(X(0));
|
||||||
keys.push_back(X(1));
|
keys.push_back(X(1));
|
||||||
|
|
@ -157,15 +157,15 @@ TEST(GaussianMixtureFactor, GaussianMixture) {
|
||||||
dKeys.emplace_back(M(1), 2);
|
dKeys.emplace_back(M(1), 2);
|
||||||
|
|
||||||
auto gaussians = std::make_shared<GaussianConditional>();
|
auto gaussians = std::make_shared<GaussianConditional>();
|
||||||
GaussianMixture::Conditionals conditionals(gaussians);
|
HybridGaussianConditional::Conditionals conditionals(gaussians);
|
||||||
GaussianMixture gm({}, keys, dKeys, conditionals);
|
HybridGaussianConditional gm({}, keys, dKeys, conditionals);
|
||||||
|
|
||||||
EXPECT_LONGS_EQUAL(2, gm.discreteKeys().size());
|
EXPECT_LONGS_EQUAL(2, gm.discreteKeys().size());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
// Test the error of the GaussianMixtureFactor
|
// Test the error of the HybridGaussianFactor
|
||||||
TEST(GaussianMixtureFactor, Error) {
|
TEST(HybridGaussianFactor, Error) {
|
||||||
DiscreteKey m1(1, 2);
|
DiscreteKey m1(1, 2);
|
||||||
|
|
||||||
auto A01 = Matrix2::Identity();
|
auto A01 = Matrix2::Identity();
|
||||||
|
|
@ -180,7 +180,7 @@ TEST(GaussianMixtureFactor, Error) {
|
||||||
auto f1 = std::make_shared<JacobianFactor>(X(1), A11, X(2), A12, b);
|
auto f1 = std::make_shared<JacobianFactor>(X(1), A11, X(2), A12, b);
|
||||||
std::vector<GaussianFactor::shared_ptr> factors{f0, f1};
|
std::vector<GaussianFactor::shared_ptr> factors{f0, f1};
|
||||||
|
|
||||||
GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
|
HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
|
||||||
|
|
||||||
VectorValues continuousValues;
|
VectorValues continuousValues;
|
||||||
continuousValues.insert(X(1), Vector2(0, 0));
|
continuousValues.insert(X(1), Vector2(0, 0));
|
||||||
|
|
@ -232,8 +232,8 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1,
|
||||||
c1 = make_shared<GaussianConditional>(z, Vector1(mu1), I_1x1, model1);
|
c1 = make_shared<GaussianConditional>(z, Vector1(mu1), I_1x1, model1);
|
||||||
|
|
||||||
HybridBayesNet hbn;
|
HybridBayesNet hbn;
|
||||||
hbn.emplace_shared<GaussianMixture>(KeyVector{z}, KeyVector{},
|
hbn.emplace_shared<HybridGaussianConditional>(
|
||||||
DiscreteKeys{m}, std::vector{c0, c1});
|
KeyVector{z}, KeyVector{}, DiscreteKeys{m}, std::vector{c0, c1});
|
||||||
|
|
||||||
auto mixing = make_shared<DiscreteConditional>(m, "50/50");
|
auto mixing = make_shared<DiscreteConditional>(m, "50/50");
|
||||||
hbn.push_back(mixing);
|
hbn.push_back(mixing);
|
||||||
|
|
@ -253,7 +253,7 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1,
|
||||||
* The resulting factor graph should eliminate to a Bayes net
|
* The resulting factor graph should eliminate to a Bayes net
|
||||||
* which represents a sigmoid function.
|
* which represents a sigmoid function.
|
||||||
*/
|
*/
|
||||||
TEST(GaussianMixtureFactor, GaussianMixtureModel) {
|
TEST(HybridGaussianFactor, GaussianMixtureModel) {
|
||||||
using namespace test_gmm;
|
using namespace test_gmm;
|
||||||
|
|
||||||
double mu0 = 1.0, mu1 = 3.0;
|
double mu0 = 1.0, mu1 = 3.0;
|
||||||
|
|
@ -325,7 +325,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) {
|
||||||
* which represents a Gaussian-like function
|
* which represents a Gaussian-like function
|
||||||
* where m1>m0 close to 3.1333.
|
* where m1>m0 close to 3.1333.
|
||||||
*/
|
*/
|
||||||
TEST(GaussianMixtureFactor, GaussianMixtureModel2) {
|
TEST(HybridGaussianFactor, GaussianMixtureModel2) {
|
||||||
using namespace test_gmm;
|
using namespace test_gmm;
|
||||||
|
|
||||||
double mu0 = 1.0, mu1 = 3.0;
|
double mu0 = 1.0, mu1 = 3.0;
|
||||||
|
|
@ -399,23 +399,21 @@ void addMeasurement(HybridBayesNet& hbn, Key z_key, Key x_key, double sigma) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create hybrid motion model p(x1 | x0, m1)
|
/// Create hybrid motion model p(x1 | x0, m1)
|
||||||
static GaussianMixture::shared_ptr CreateHybridMotionModel(double mu0,
|
static HybridGaussianConditional::shared_ptr CreateHybridMotionModel(
|
||||||
double mu1,
|
double mu0, double mu1, double sigma0, double sigma1) {
|
||||||
double sigma0,
|
|
||||||
double sigma1) {
|
|
||||||
auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
|
auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
|
||||||
auto model1 = noiseModel::Isotropic::Sigma(1, sigma1);
|
auto model1 = noiseModel::Isotropic::Sigma(1, sigma1);
|
||||||
auto c0 = make_shared<GaussianConditional>(X(1), Vector1(mu0), I_1x1, X(0),
|
auto c0 = make_shared<GaussianConditional>(X(1), Vector1(mu0), I_1x1, X(0),
|
||||||
-I_1x1, model0),
|
-I_1x1, model0),
|
||||||
c1 = make_shared<GaussianConditional>(X(1), Vector1(mu1), I_1x1, X(0),
|
c1 = make_shared<GaussianConditional>(X(1), Vector1(mu1), I_1x1, X(0),
|
||||||
-I_1x1, model1);
|
-I_1x1, model1);
|
||||||
return std::make_shared<GaussianMixture>(
|
return std::make_shared<HybridGaussianConditional>(
|
||||||
KeyVector{X(1)}, KeyVector{X(0)}, DiscreteKeys{m1}, std::vector{c0, c1});
|
KeyVector{X(1)}, KeyVector{X(0)}, DiscreteKeys{m1}, std::vector{c0, c1});
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create two state Bayes network with 1 or two measurement models
|
/// Create two state Bayes network with 1 or two measurement models
|
||||||
HybridBayesNet CreateBayesNet(
|
HybridBayesNet CreateBayesNet(
|
||||||
const GaussianMixture::shared_ptr& hybridMotionModel,
|
const HybridGaussianConditional::shared_ptr& hybridMotionModel,
|
||||||
bool add_second_measurement = false) {
|
bool add_second_measurement = false) {
|
||||||
HybridBayesNet hbn;
|
HybridBayesNet hbn;
|
||||||
|
|
||||||
|
|
@ -439,7 +437,7 @@ HybridBayesNet CreateBayesNet(
|
||||||
/// Approximate the discrete marginal P(m1) using importance sampling
|
/// Approximate the discrete marginal P(m1) using importance sampling
|
||||||
std::pair<double, double> approximateDiscreteMarginal(
|
std::pair<double, double> approximateDiscreteMarginal(
|
||||||
const HybridBayesNet& hbn,
|
const HybridBayesNet& hbn,
|
||||||
const GaussianMixture::shared_ptr& hybridMotionModel,
|
const HybridGaussianConditional::shared_ptr& hybridMotionModel,
|
||||||
const VectorValues& given, size_t N = 100000) {
|
const VectorValues& given, size_t N = 100000) {
|
||||||
/// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1),
|
/// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1),
|
||||||
/// using q(x0) = N(z0, sigmaQ) to sample x0.
|
/// using q(x0) = N(z0, sigmaQ) to sample x0.
|
||||||
|
|
@ -478,7 +476,7 @@ std::pair<double, double> approximateDiscreteMarginal(
|
||||||
* the posterior probability of m1 should be 0.5/0.5.
|
* the posterior probability of m1 should be 0.5/0.5.
|
||||||
* Getting a measurement on z1 gives use more information.
|
* Getting a measurement on z1 gives use more information.
|
||||||
*/
|
*/
|
||||||
TEST(GaussianMixtureFactor, TwoStateModel) {
|
TEST(HybridGaussianFactor, TwoStateModel) {
|
||||||
using namespace test_two_state_estimation;
|
using namespace test_two_state_estimation;
|
||||||
|
|
||||||
double mu0 = 1.0, mu1 = 3.0;
|
double mu0 = 1.0, mu1 = 3.0;
|
||||||
|
|
@ -534,7 +532,7 @@ TEST(GaussianMixtureFactor, TwoStateModel) {
|
||||||
* the P(m1) should be 0.5/0.5.
|
* the P(m1) should be 0.5/0.5.
|
||||||
* Getting a measurement on z1 gives use more information.
|
* Getting a measurement on z1 gives use more information.
|
||||||
*/
|
*/
|
||||||
TEST(GaussianMixtureFactor, TwoStateModel2) {
|
TEST(HybridGaussianFactor, TwoStateModel2) {
|
||||||
using namespace test_two_state_estimation;
|
using namespace test_two_state_estimation;
|
||||||
|
|
||||||
double mu0 = 1.0, mu1 = 3.0;
|
double mu0 = 1.0, mu1 = 3.0;
|
||||||
|
|
@ -621,7 +619,7 @@ TEST(GaussianMixtureFactor, TwoStateModel2) {
|
||||||
* measurements and vastly different motion model: either stand still or move
|
* measurements and vastly different motion model: either stand still or move
|
||||||
* far. This yields a very informative posterior.
|
* far. This yields a very informative posterior.
|
||||||
*/
|
*/
|
||||||
TEST(GaussianMixtureFactor, TwoStateModel3) {
|
TEST(HybridGaussianFactor, TwoStateModel3) {
|
||||||
using namespace test_two_state_estimation;
|
using namespace test_two_state_estimation;
|
||||||
|
|
||||||
double mu0 = 0.0, mu1 = 10.0;
|
double mu0 = 0.0, mu1 = 10.0;
|
||||||
|
|
@ -21,12 +21,12 @@
|
||||||
#include <gtsam/discrete/DecisionTreeFactor.h>
|
#include <gtsam/discrete/DecisionTreeFactor.h>
|
||||||
#include <gtsam/discrete/DiscreteKey.h>
|
#include <gtsam/discrete/DiscreteKey.h>
|
||||||
#include <gtsam/discrete/DiscreteValues.h>
|
#include <gtsam/discrete/DiscreteValues.h>
|
||||||
#include <gtsam/hybrid/GaussianMixture.h>
|
|
||||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
|
||||||
#include <gtsam/hybrid/HybridBayesNet.h>
|
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||||
#include <gtsam/hybrid/HybridBayesTree.h>
|
#include <gtsam/hybrid/HybridBayesTree.h>
|
||||||
#include <gtsam/hybrid/HybridConditional.h>
|
#include <gtsam/hybrid/HybridConditional.h>
|
||||||
#include <gtsam/hybrid/HybridFactor.h>
|
#include <gtsam/hybrid/HybridFactor.h>
|
||||||
|
#include <gtsam/hybrid/HybridGaussianConditional.h>
|
||||||
|
#include <gtsam/hybrid/HybridGaussianFactor.h>
|
||||||
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
||||||
#include <gtsam/hybrid/HybridGaussianISAM.h>
|
#include <gtsam/hybrid/HybridGaussianISAM.h>
|
||||||
#include <gtsam/hybrid/HybridValues.h>
|
#include <gtsam/hybrid/HybridValues.h>
|
||||||
|
|
@ -71,13 +71,14 @@ TEST(HybridGaussianFactorGraph, Creation) {
|
||||||
|
|
||||||
// Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor
|
// Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor
|
||||||
// graph
|
// graph
|
||||||
GaussianMixture gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}),
|
HybridGaussianConditional gm(
|
||||||
GaussianMixture::Conditionals(
|
{X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}),
|
||||||
M(0),
|
HybridGaussianConditional::Conditionals(
|
||||||
std::make_shared<GaussianConditional>(
|
M(0),
|
||||||
X(0), Z_3x1, I_3x3, X(1), I_3x3),
|
std::make_shared<GaussianConditional>(X(0), Z_3x1, I_3x3, X(1),
|
||||||
std::make_shared<GaussianConditional>(
|
I_3x3),
|
||||||
X(0), Vector3::Ones(), I_3x3, X(1), I_3x3)));
|
std::make_shared<GaussianConditional>(X(0), Vector3::Ones(), I_3x3,
|
||||||
|
X(1), I_3x3)));
|
||||||
hfg.add(gm);
|
hfg.add(gm);
|
||||||
|
|
||||||
EXPECT_LONGS_EQUAL(2, hfg.size());
|
EXPECT_LONGS_EQUAL(2, hfg.size());
|
||||||
|
|
@ -129,7 +130,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
|
||||||
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
||||||
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||||
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
||||||
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt));
|
hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt));
|
||||||
|
|
||||||
auto result = hfg.eliminateSequential();
|
auto result = hfg.eliminateSequential();
|
||||||
|
|
||||||
|
|
@ -155,7 +156,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
|
||||||
std::vector<GaussianFactor::shared_ptr> factors = {
|
std::vector<GaussianFactor::shared_ptr> factors = {
|
||||||
std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||||
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())};
|
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())};
|
||||||
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, factors));
|
hfg.add(HybridGaussianFactor({X(1)}, {m1}, factors));
|
||||||
|
|
||||||
// Discrete probability table for c1
|
// Discrete probability table for c1
|
||||||
hfg.add(DecisionTreeFactor(m1, {2, 8}));
|
hfg.add(DecisionTreeFactor(m1, {2, 8}));
|
||||||
|
|
@ -177,7 +178,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
|
||||||
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
||||||
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
||||||
|
|
||||||
hfg.add(GaussianMixtureFactor(
|
hfg.add(HybridGaussianFactor(
|
||||||
{X(1)}, {{M(1), 2}},
|
{X(1)}, {{M(1), 2}},
|
||||||
{std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
{std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||||
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())}));
|
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())}));
|
||||||
|
|
@ -212,7 +213,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) {
|
||||||
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
||||||
|
|
||||||
// Hybrid factor P(x1|c1)
|
// Hybrid factor P(x1|c1)
|
||||||
hfg.add(GaussianMixtureFactor({X(1)}, {m}, dt));
|
hfg.add(HybridGaussianFactor({X(1)}, {m}, dt));
|
||||||
// Prior factor on c1
|
// Prior factor on c1
|
||||||
hfg.add(DecisionTreeFactor(m, {2, 8}));
|
hfg.add(DecisionTreeFactor(m, {2, 8}));
|
||||||
|
|
||||||
|
|
@ -237,7 +238,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
|
||||||
hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1));
|
hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1));
|
||||||
|
|
||||||
{
|
{
|
||||||
hfg.add(GaussianMixtureFactor(
|
hfg.add(HybridGaussianFactor(
|
||||||
{X(0)}, {{M(0), 2}},
|
{X(0)}, {{M(0), 2}},
|
||||||
{std::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1),
|
{std::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1),
|
||||||
std::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones())}));
|
std::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones())}));
|
||||||
|
|
@ -246,7 +247,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
|
||||||
M(1), std::make_shared<JacobianFactor>(X(2), I_3x3, Z_3x1),
|
M(1), std::make_shared<JacobianFactor>(X(2), I_3x3, Z_3x1),
|
||||||
std::make_shared<JacobianFactor>(X(2), I_3x3, Vector3::Ones()));
|
std::make_shared<JacobianFactor>(X(2), I_3x3, Vector3::Ones()));
|
||||||
|
|
||||||
hfg.add(GaussianMixtureFactor({X(2)}, {{M(1), 2}}, dt1));
|
hfg.add(HybridGaussianFactor({X(2)}, {{M(1), 2}}, dt1));
|
||||||
}
|
}
|
||||||
|
|
||||||
hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
|
hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
|
||||||
|
|
@ -259,13 +260,13 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
|
||||||
M(3), std::make_shared<JacobianFactor>(X(3), I_3x3, Z_3x1),
|
M(3), std::make_shared<JacobianFactor>(X(3), I_3x3, Z_3x1),
|
||||||
std::make_shared<JacobianFactor>(X(3), I_3x3, Vector3::Ones()));
|
std::make_shared<JacobianFactor>(X(3), I_3x3, Vector3::Ones()));
|
||||||
|
|
||||||
hfg.add(GaussianMixtureFactor({X(3)}, {{M(3), 2}}, dt));
|
hfg.add(HybridGaussianFactor({X(3)}, {{M(3), 2}}, dt));
|
||||||
|
|
||||||
DecisionTree<Key, GaussianFactor::shared_ptr> dt1(
|
DecisionTree<Key, GaussianFactor::shared_ptr> dt1(
|
||||||
M(2), std::make_shared<JacobianFactor>(X(5), I_3x3, Z_3x1),
|
M(2), std::make_shared<JacobianFactor>(X(5), I_3x3, Z_3x1),
|
||||||
std::make_shared<JacobianFactor>(X(5), I_3x3, Vector3::Ones()));
|
std::make_shared<JacobianFactor>(X(5), I_3x3, Vector3::Ones()));
|
||||||
|
|
||||||
hfg.add(GaussianMixtureFactor({X(5)}, {{M(2), 2}}, dt1));
|
hfg.add(HybridGaussianFactor({X(5)}, {{M(2), 2}}, dt1));
|
||||||
}
|
}
|
||||||
|
|
||||||
auto ordering_full =
|
auto ordering_full =
|
||||||
|
|
@ -555,7 +556,7 @@ TEST(HybridGaussianFactorGraph, optimize) {
|
||||||
C(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
C(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||||
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
||||||
|
|
||||||
hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt));
|
hfg.add(HybridGaussianFactor({X(1)}, {c1}, dt));
|
||||||
|
|
||||||
auto result = hfg.eliminateSequential();
|
auto result = hfg.eliminateSequential();
|
||||||
|
|
||||||
|
|
@ -681,8 +682,8 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) {
|
||||||
x0, -I_1x1, model0),
|
x0, -I_1x1, model0),
|
||||||
c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
|
c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
|
||||||
x0, -I_1x1, model1);
|
x0, -I_1x1, model1);
|
||||||
hbn.emplace_shared<GaussianMixture>(KeyVector{f01}, KeyVector{x0, x1},
|
hbn.emplace_shared<HybridGaussianConditional>(
|
||||||
DiscreteKeys{m1}, std::vector{c0, c1});
|
KeyVector{f01}, KeyVector{x0, x1}, DiscreteKeys{m1}, std::vector{c0, c1});
|
||||||
|
|
||||||
// Discrete uniform prior.
|
// Discrete uniform prior.
|
||||||
hbn.emplace_shared<DiscreteConditional>(m1, "0.5/0.5");
|
hbn.emplace_shared<DiscreteConditional>(m1, "0.5/0.5");
|
||||||
|
|
@ -717,7 +718,7 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) {
|
||||||
// Create expected decision tree with two factor graphs:
|
// Create expected decision tree with two factor graphs:
|
||||||
|
|
||||||
// Get mixture factor:
|
// Get mixture factor:
|
||||||
auto mixture = fg.at<GaussianMixtureFactor>(0);
|
auto mixture = fg.at<HybridGaussianFactor>(0);
|
||||||
CHECK(mixture);
|
CHECK(mixture);
|
||||||
|
|
||||||
// Get prior factor:
|
// Get prior factor:
|
||||||
|
|
@ -805,7 +806,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) {
|
||||||
X(0), Vector1(14.1421), I_1x1 * 2.82843),
|
X(0), Vector1(14.1421), I_1x1 * 2.82843),
|
||||||
conditional1 = std::make_shared<GaussianConditional>(
|
conditional1 = std::make_shared<GaussianConditional>(
|
||||||
X(0), Vector1(10.1379), I_1x1 * 2.02759);
|
X(0), Vector1(10.1379), I_1x1 * 2.02759);
|
||||||
expectedBayesNet.emplace_shared<GaussianMixture>(
|
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
|
||||||
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
||||||
std::vector{conditional0, conditional1});
|
std::vector{conditional0, conditional1});
|
||||||
|
|
||||||
|
|
@ -830,7 +831,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
|
||||||
HybridBayesNet bn;
|
HybridBayesNet bn;
|
||||||
|
|
||||||
// Create Gaussian mixture z_0 = x0 + noise for each measurement.
|
// Create Gaussian mixture z_0 = x0 + noise for each measurement.
|
||||||
auto gm = std::make_shared<GaussianMixture>(
|
auto gm = std::make_shared<HybridGaussianConditional>(
|
||||||
KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode},
|
KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode},
|
||||||
std::vector{
|
std::vector{
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
|
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
|
||||||
|
|
@ -862,7 +863,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
|
||||||
X(0), Vector1(10.1379), I_1x1 * 2.02759),
|
X(0), Vector1(10.1379), I_1x1 * 2.02759),
|
||||||
conditional1 = std::make_shared<GaussianConditional>(
|
conditional1 = std::make_shared<GaussianConditional>(
|
||||||
X(0), Vector1(14.1421), I_1x1 * 2.82843);
|
X(0), Vector1(14.1421), I_1x1 * 2.82843);
|
||||||
expectedBayesNet.emplace_shared<GaussianMixture>(
|
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
|
||||||
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
||||||
std::vector{conditional0, conditional1});
|
std::vector{conditional0, conditional1});
|
||||||
|
|
||||||
|
|
@ -899,7 +900,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) {
|
||||||
X(0), Vector1(17.3205), I_1x1 * 3.4641),
|
X(0), Vector1(17.3205), I_1x1 * 3.4641),
|
||||||
conditional1 = std::make_shared<GaussianConditional>(
|
conditional1 = std::make_shared<GaussianConditional>(
|
||||||
X(0), Vector1(10.274), I_1x1 * 2.0548);
|
X(0), Vector1(10.274), I_1x1 * 2.0548);
|
||||||
expectedBayesNet.emplace_shared<GaussianMixture>(
|
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
|
||||||
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
||||||
std::vector{conditional0, conditional1});
|
std::vector{conditional0, conditional1});
|
||||||
|
|
||||||
|
|
@ -946,7 +947,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
|
||||||
for (size_t t : {0, 1, 2}) {
|
for (size_t t : {0, 1, 2}) {
|
||||||
// Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t):
|
// Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t):
|
||||||
const auto noise_mode_t = DiscreteKey{N(t), 2};
|
const auto noise_mode_t = DiscreteKey{N(t), 2};
|
||||||
bn.emplace_shared<GaussianMixture>(
|
bn.emplace_shared<HybridGaussianConditional>(
|
||||||
KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t},
|
KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t},
|
||||||
std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t),
|
std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t),
|
||||||
Z_1x1, 0.5),
|
Z_1x1, 0.5),
|
||||||
|
|
@ -961,7 +962,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
|
||||||
for (size_t t : {2, 1}) {
|
for (size_t t : {2, 1}) {
|
||||||
// Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1):
|
// Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1):
|
||||||
const auto motion_model_t = DiscreteKey{M(t), 2};
|
const auto motion_model_t = DiscreteKey{M(t), 2};
|
||||||
auto gm = std::make_shared<GaussianMixture>(
|
auto gm = std::make_shared<HybridGaussianConditional>(
|
||||||
KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t},
|
KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t},
|
||||||
std::vector{GaussianConditional::sharedMeanAndStddev(
|
std::vector{GaussianConditional::sharedMeanAndStddev(
|
||||||
X(t), I_1x1, X(t - 1), Z_1x1, 0.2),
|
X(t), I_1x1, X(t - 1), Z_1x1, 0.2),
|
||||||
|
|
|
||||||
|
|
@ -126,31 +126,34 @@ TEST(HybridGaussianElimination, IncrementalInference) {
|
||||||
|
|
||||||
/********************************************************/
|
/********************************************************/
|
||||||
// Run batch elimination so we can compare results.
|
// Run batch elimination so we can compare results.
|
||||||
const Ordering ordering {X(0), X(1), X(2)};
|
const Ordering ordering{X(0), X(1), X(2)};
|
||||||
|
|
||||||
// Now we calculate the expected factors using full elimination
|
// Now we calculate the expected factors using full elimination
|
||||||
const auto [expectedHybridBayesTree, expectedRemainingGraph] =
|
const auto [expectedHybridBayesTree, expectedRemainingGraph] =
|
||||||
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
|
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
|
||||||
|
|
||||||
// The densities on X(0) should be the same
|
// The densities on X(0) should be the same
|
||||||
auto x0_conditional =
|
auto x0_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
dynamic_pointer_cast<GaussianMixture>(isam[X(0)]->conditional()->inner());
|
isam[X(0)]->conditional()->inner());
|
||||||
auto expected_x0_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto expected_x0_conditional =
|
||||||
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
|
dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
|
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
|
||||||
EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional));
|
EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional));
|
||||||
|
|
||||||
// The densities on X(1) should be the same
|
// The densities on X(1) should be the same
|
||||||
auto x1_conditional =
|
auto x1_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
dynamic_pointer_cast<GaussianMixture>(isam[X(1)]->conditional()->inner());
|
isam[X(1)]->conditional()->inner());
|
||||||
auto expected_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto expected_x1_conditional =
|
||||||
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
|
dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
|
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
|
||||||
EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional));
|
EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional));
|
||||||
|
|
||||||
// The densities on X(2) should be the same
|
// The densities on X(2) should be the same
|
||||||
auto x2_conditional =
|
auto x2_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
dynamic_pointer_cast<GaussianMixture>(isam[X(2)]->conditional()->inner());
|
isam[X(2)]->conditional()->inner());
|
||||||
auto expected_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto expected_x2_conditional =
|
||||||
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
|
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
||||||
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
|
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
|
||||||
|
|
||||||
// We only perform manual continuous elimination for 0,0.
|
// We only perform manual continuous elimination for 0,0.
|
||||||
|
|
@ -279,9 +282,9 @@ TEST(HybridGaussianElimination, Approx_inference) {
|
||||||
|
|
||||||
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
|
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
|
||||||
// bayes net, at the same positions.
|
// bayes net, at the same positions.
|
||||||
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
auto &unprunedLastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
|
unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
|
||||||
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
auto &lastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
incrementalHybrid[X(3)]->conditional()->inner());
|
incrementalHybrid[X(3)]->conditional()->inner());
|
||||||
|
|
||||||
std::vector<std::pair<DiscreteValues, double>> assignments =
|
std::vector<std::pair<DiscreteValues, double>> assignments =
|
||||||
|
|
@ -381,11 +384,11 @@ TEST(HybridGaussianISAM, NonTrivial) {
|
||||||
|
|
||||||
// Add connecting poses similar to PoseFactors in GTD
|
// Add connecting poses similar to PoseFactors in GTD
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), Y(0), Pose2(0, 1.0, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), Y(0), Pose2(0, 1.0, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Y(0), Z(0), Pose2(0, 1.0, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Y(0), Z(0), Pose2(0, 1.0, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Z(0), W(0), Pose2(0, 1.0, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Z(0), W(0), Pose2(0, 1.0, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
|
|
||||||
// Create initial estimate
|
// Create initial estimate
|
||||||
Values initial;
|
Values initial;
|
||||||
|
|
@ -414,24 +417,24 @@ TEST(HybridGaussianISAM, NonTrivial) {
|
||||||
KeyVector contKeys = {W(0), W(1)};
|
KeyVector contKeys = {W(0), W(1)};
|
||||||
auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0);
|
auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0);
|
||||||
auto still = std::make_shared<PlanarMotionModel>(W(0), W(1), Pose2(0, 0, 0),
|
auto still = std::make_shared<PlanarMotionModel>(W(0), W(1), Pose2(0, 0, 0),
|
||||||
noise_model),
|
noise_model),
|
||||||
moving = std::make_shared<PlanarMotionModel>(W(0), W(1), odometry,
|
moving = std::make_shared<PlanarMotionModel>(W(0), W(1), odometry,
|
||||||
noise_model);
|
noise_model);
|
||||||
std::vector<PlanarMotionModel::shared_ptr> components = {moving, still};
|
std::vector<PlanarMotionModel::shared_ptr> components = {moving, still};
|
||||||
auto mixtureFactor = std::make_shared<MixtureFactor>(
|
auto mixtureFactor = std::make_shared<HybridNonlinearFactor>(
|
||||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
|
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
|
||||||
fg.push_back(mixtureFactor);
|
fg.push_back(mixtureFactor);
|
||||||
|
|
||||||
// Add equivalent of ImuFactor
|
// Add equivalent of ImuFactor
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), X(1), Pose2(1.0, 0.0, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), X(1), Pose2(1.0, 0.0, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
// PoseFactors-like at k=1
|
// PoseFactors-like at k=1
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), Y(1), Pose2(0, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), Y(1), Pose2(0, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Y(1), Z(1), Pose2(0, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Y(1), Z(1), Pose2(0, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Z(1), W(1), Pose2(-1, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Z(1), W(1), Pose2(-1, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
|
|
||||||
initial.insert(X(1), Pose2(1.0, 0.0, 0.0));
|
initial.insert(X(1), Pose2(1.0, 0.0, 0.0));
|
||||||
initial.insert(Y(1), Pose2(1.0, 1.0, 0.0));
|
initial.insert(Y(1), Pose2(1.0, 1.0, 0.0));
|
||||||
|
|
@ -454,24 +457,24 @@ TEST(HybridGaussianISAM, NonTrivial) {
|
||||||
// Add odometry factor with discrete modes.
|
// Add odometry factor with discrete modes.
|
||||||
contKeys = {W(1), W(2)};
|
contKeys = {W(1), W(2)};
|
||||||
still = std::make_shared<PlanarMotionModel>(W(1), W(2), Pose2(0, 0, 0),
|
still = std::make_shared<PlanarMotionModel>(W(1), W(2), Pose2(0, 0, 0),
|
||||||
noise_model);
|
noise_model);
|
||||||
moving =
|
moving =
|
||||||
std::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model);
|
std::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model);
|
||||||
components = {moving, still};
|
components = {moving, still};
|
||||||
mixtureFactor = std::make_shared<MixtureFactor>(
|
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
|
||||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
|
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
|
||||||
fg.push_back(mixtureFactor);
|
fg.push_back(mixtureFactor);
|
||||||
|
|
||||||
// Add equivalent of ImuFactor
|
// Add equivalent of ImuFactor
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), X(2), Pose2(1.0, 0.0, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), X(2), Pose2(1.0, 0.0, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
// PoseFactors-like at k=1
|
// PoseFactors-like at k=1
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), Y(2), Pose2(0, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), Y(2), Pose2(0, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Y(2), Z(2), Pose2(0, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Y(2), Z(2), Pose2(0, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Z(2), W(2), Pose2(-2, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Z(2), W(2), Pose2(-2, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
|
|
||||||
initial.insert(X(2), Pose2(2.0, 0.0, 0.0));
|
initial.insert(X(2), Pose2(2.0, 0.0, 0.0));
|
||||||
initial.insert(Y(2), Pose2(2.0, 1.0, 0.0));
|
initial.insert(Y(2), Pose2(2.0, 1.0, 0.0));
|
||||||
|
|
@ -497,24 +500,24 @@ TEST(HybridGaussianISAM, NonTrivial) {
|
||||||
// Add odometry factor with discrete modes.
|
// Add odometry factor with discrete modes.
|
||||||
contKeys = {W(2), W(3)};
|
contKeys = {W(2), W(3)};
|
||||||
still = std::make_shared<PlanarMotionModel>(W(2), W(3), Pose2(0, 0, 0),
|
still = std::make_shared<PlanarMotionModel>(W(2), W(3), Pose2(0, 0, 0),
|
||||||
noise_model);
|
noise_model);
|
||||||
moving =
|
moving =
|
||||||
std::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model);
|
std::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model);
|
||||||
components = {moving, still};
|
components = {moving, still};
|
||||||
mixtureFactor = std::make_shared<MixtureFactor>(
|
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
|
||||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
|
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
|
||||||
fg.push_back(mixtureFactor);
|
fg.push_back(mixtureFactor);
|
||||||
|
|
||||||
// Add equivalent of ImuFactor
|
// Add equivalent of ImuFactor
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), X(3), Pose2(1.0, 0.0, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), X(3), Pose2(1.0, 0.0, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
// PoseFactors-like at k=3
|
// PoseFactors-like at k=3
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(X(3), Y(3), Pose2(0, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(X(3), Y(3), Pose2(0, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Y(3), Z(3), Pose2(0, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Y(3), Z(3), Pose2(0, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Z(3), W(3), Pose2(-3, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Z(3), W(3), Pose2(-3, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
|
|
||||||
initial.insert(X(3), Pose2(3.0, 0.0, 0.0));
|
initial.insert(X(3), Pose2(3.0, 0.0, 0.0));
|
||||||
initial.insert(Y(3), Pose2(3.0, 1.0, 0.0));
|
initial.insert(Y(3), Pose2(3.0, 1.0, 0.0));
|
||||||
|
|
|
||||||
|
|
@ -10,8 +10,8 @@
|
||||||
* -------------------------------------------------------------------------- */
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @file testMixtureFactor.cpp
|
* @file testHybridNonlinearFactor.cpp
|
||||||
* @brief Unit tests for MixtureFactor
|
* @brief Unit tests for HybridNonlinearFactor
|
||||||
* @author Varun Agrawal
|
* @author Varun Agrawal
|
||||||
* @date October 2022
|
* @date October 2022
|
||||||
*/
|
*/
|
||||||
|
|
@ -20,8 +20,8 @@
|
||||||
#include <gtsam/discrete/DiscreteValues.h>
|
#include <gtsam/discrete/DiscreteValues.h>
|
||||||
#include <gtsam/hybrid/HybridBayesNet.h>
|
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||||
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
||||||
|
#include <gtsam/hybrid/HybridNonlinearFactor.h>
|
||||||
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
||||||
#include <gtsam/hybrid/MixtureFactor.h>
|
|
||||||
#include <gtsam/inference/Symbol.h>
|
#include <gtsam/inference/Symbol.h>
|
||||||
#include <gtsam/slam/BetweenFactor.h>
|
#include <gtsam/slam/BetweenFactor.h>
|
||||||
|
|
||||||
|
|
@ -36,17 +36,17 @@ using symbol_shorthand::X;
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
// Check iterators of empty mixture.
|
// Check iterators of empty mixture.
|
||||||
TEST(MixtureFactor, Constructor) {
|
TEST(HybridNonlinearFactor, Constructor) {
|
||||||
MixtureFactor factor;
|
HybridNonlinearFactor factor;
|
||||||
MixtureFactor::const_iterator const_it = factor.begin();
|
HybridNonlinearFactor::const_iterator const_it = factor.begin();
|
||||||
CHECK(const_it == factor.end());
|
CHECK(const_it == factor.end());
|
||||||
MixtureFactor::iterator it = factor.begin();
|
HybridNonlinearFactor::iterator it = factor.begin();
|
||||||
CHECK(it == factor.end());
|
CHECK(it == factor.end());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
// Test .print() output.
|
// Test .print() output.
|
||||||
TEST(MixtureFactor, Printing) {
|
TEST(HybridNonlinearFactor, Printing) {
|
||||||
DiscreteKey m1(1, 2);
|
DiscreteKey m1(1, 2);
|
||||||
double between0 = 0.0;
|
double between0 = 0.0;
|
||||||
double between1 = 1.0;
|
double between1 = 1.0;
|
||||||
|
|
@ -60,11 +60,11 @@ TEST(MixtureFactor, Printing) {
|
||||||
std::make_shared<BetweenFactor<double>>(X(1), X(2), between1, model);
|
std::make_shared<BetweenFactor<double>>(X(1), X(2), between1, model);
|
||||||
std::vector<NonlinearFactor::shared_ptr> factors{f0, f1};
|
std::vector<NonlinearFactor::shared_ptr> factors{f0, f1};
|
||||||
|
|
||||||
MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
|
HybridNonlinearFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
|
||||||
|
|
||||||
std::string expected =
|
std::string expected =
|
||||||
R"(Hybrid [x1 x2; 1]
|
R"(Hybrid [x1 x2; 1]
|
||||||
MixtureFactor
|
HybridNonlinearFactor
|
||||||
Choice(1)
|
Choice(1)
|
||||||
0 Leaf Nonlinear factor on 2 keys
|
0 Leaf Nonlinear factor on 2 keys
|
||||||
1 Leaf Nonlinear factor on 2 keys
|
1 Leaf Nonlinear factor on 2 keys
|
||||||
|
|
@ -73,7 +73,7 @@ MixtureFactor
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
static MixtureFactor getMixtureFactor() {
|
static HybridNonlinearFactor getHybridNonlinearFactor() {
|
||||||
DiscreteKey m1(1, 2);
|
DiscreteKey m1(1, 2);
|
||||||
|
|
||||||
double between0 = 0.0;
|
double between0 = 0.0;
|
||||||
|
|
@ -88,13 +88,13 @@ static MixtureFactor getMixtureFactor() {
|
||||||
std::make_shared<BetweenFactor<double>>(X(1), X(2), between1, model);
|
std::make_shared<BetweenFactor<double>>(X(1), X(2), between1, model);
|
||||||
std::vector<NonlinearFactor::shared_ptr> factors{f0, f1};
|
std::vector<NonlinearFactor::shared_ptr> factors{f0, f1};
|
||||||
|
|
||||||
return MixtureFactor({X(1), X(2)}, {m1}, factors);
|
return HybridNonlinearFactor({X(1), X(2)}, {m1}, factors);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
// Test the error of the MixtureFactor
|
// Test the error of the HybridNonlinearFactor
|
||||||
TEST(MixtureFactor, Error) {
|
TEST(HybridNonlinearFactor, Error) {
|
||||||
auto mixtureFactor = getMixtureFactor();
|
auto mixtureFactor = getHybridNonlinearFactor();
|
||||||
|
|
||||||
Values continuousValues;
|
Values continuousValues;
|
||||||
continuousValues.insert<double>(X(1), 0);
|
continuousValues.insert<double>(X(1), 0);
|
||||||
|
|
@ -112,9 +112,9 @@ TEST(MixtureFactor, Error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
// Test dim of the MixtureFactor
|
// Test dim of the HybridNonlinearFactor
|
||||||
TEST(MixtureFactor, Dim) {
|
TEST(HybridNonlinearFactor, Dim) {
|
||||||
auto mixtureFactor = getMixtureFactor();
|
auto mixtureFactor = getHybridNonlinearFactor();
|
||||||
EXPECT_LONGS_EQUAL(1, mixtureFactor.dim());
|
EXPECT_LONGS_EQUAL(1, mixtureFactor.dim());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -23,8 +23,8 @@
|
||||||
#include <gtsam/geometry/Pose2.h>
|
#include <gtsam/geometry/Pose2.h>
|
||||||
#include <gtsam/hybrid/HybridEliminationTree.h>
|
#include <gtsam/hybrid/HybridEliminationTree.h>
|
||||||
#include <gtsam/hybrid/HybridFactor.h>
|
#include <gtsam/hybrid/HybridFactor.h>
|
||||||
|
#include <gtsam/hybrid/HybridNonlinearFactor.h>
|
||||||
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
||||||
#include <gtsam/hybrid/MixtureFactor.h>
|
|
||||||
#include <gtsam/linear/GaussianBayesNet.h>
|
#include <gtsam/linear/GaussianBayesNet.h>
|
||||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||||
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
||||||
|
|
@ -105,7 +105,7 @@ TEST(HybridNonlinearFactorGraph, Resize) {
|
||||||
auto discreteFactor = std::make_shared<DecisionTreeFactor>();
|
auto discreteFactor = std::make_shared<DecisionTreeFactor>();
|
||||||
fg.push_back(discreteFactor);
|
fg.push_back(discreteFactor);
|
||||||
|
|
||||||
auto dcFactor = std::make_shared<MixtureFactor>();
|
auto dcFactor = std::make_shared<HybridNonlinearFactor>();
|
||||||
fg.push_back(dcFactor);
|
fg.push_back(dcFactor);
|
||||||
|
|
||||||
EXPECT_LONGS_EQUAL(fg.size(), 3);
|
EXPECT_LONGS_EQUAL(fg.size(), 3);
|
||||||
|
|
@ -132,7 +132,7 @@ TEST(HybridGaussianFactorGraph, Resize) {
|
||||||
moving = std::make_shared<MotionModel>(X(0), X(1), 1.0, noise_model);
|
moving = std::make_shared<MotionModel>(X(0), X(1), 1.0, noise_model);
|
||||||
|
|
||||||
std::vector<MotionModel::shared_ptr> components = {still, moving};
|
std::vector<MotionModel::shared_ptr> components = {still, moving};
|
||||||
auto dcFactor = std::make_shared<MixtureFactor>(
|
auto dcFactor = std::make_shared<HybridNonlinearFactor>(
|
||||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
|
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
|
||||||
nhfg.push_back(dcFactor);
|
nhfg.push_back(dcFactor);
|
||||||
|
|
||||||
|
|
@ -150,10 +150,10 @@ TEST(HybridGaussianFactorGraph, Resize) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/***************************************************************************
|
/***************************************************************************
|
||||||
* Test that the MixtureFactor reports correctly if the number of continuous
|
* Test that the HybridNonlinearFactor reports correctly if the number of
|
||||||
* keys provided do not match the keys in the factors.
|
* continuous keys provided do not match the keys in the factors.
|
||||||
*/
|
*/
|
||||||
TEST(HybridGaussianFactorGraph, MixtureFactor) {
|
TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) {
|
||||||
auto nonlinearFactor = std::make_shared<BetweenFactor<double>>(
|
auto nonlinearFactor = std::make_shared<BetweenFactor<double>>(
|
||||||
X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1));
|
X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1));
|
||||||
auto discreteFactor = std::make_shared<DecisionTreeFactor>();
|
auto discreteFactor = std::make_shared<DecisionTreeFactor>();
|
||||||
|
|
@ -166,12 +166,12 @@ TEST(HybridGaussianFactorGraph, MixtureFactor) {
|
||||||
|
|
||||||
// Check for exception when number of continuous keys are under-specified.
|
// Check for exception when number of continuous keys are under-specified.
|
||||||
KeyVector contKeys = {X(0)};
|
KeyVector contKeys = {X(0)};
|
||||||
THROWS_EXCEPTION(std::make_shared<MixtureFactor>(
|
THROWS_EXCEPTION(std::make_shared<HybridNonlinearFactor>(
|
||||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components));
|
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components));
|
||||||
|
|
||||||
// Check for exception when number of continuous keys are too many.
|
// Check for exception when number of continuous keys are too many.
|
||||||
contKeys = {X(0), X(1), X(2)};
|
contKeys = {X(0), X(1), X(2)};
|
||||||
THROWS_EXCEPTION(std::make_shared<MixtureFactor>(
|
THROWS_EXCEPTION(std::make_shared<HybridNonlinearFactor>(
|
||||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components));
|
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -195,7 +195,7 @@ TEST(HybridFactorGraph, PushBack) {
|
||||||
|
|
||||||
fg = HybridNonlinearFactorGraph();
|
fg = HybridNonlinearFactorGraph();
|
||||||
|
|
||||||
auto dcFactor = std::make_shared<MixtureFactor>();
|
auto dcFactor = std::make_shared<HybridNonlinearFactor>();
|
||||||
fg.push_back(dcFactor);
|
fg.push_back(dcFactor);
|
||||||
|
|
||||||
EXPECT_LONGS_EQUAL(fg.size(), 1);
|
EXPECT_LONGS_EQUAL(fg.size(), 1);
|
||||||
|
|
@ -350,7 +350,8 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
|
||||||
EliminateHybrid(factors, ordering);
|
EliminateHybrid(factors, ordering);
|
||||||
|
|
||||||
auto gaussianConditionalMixture =
|
auto gaussianConditionalMixture =
|
||||||
dynamic_pointer_cast<GaussianMixture>(hybridConditionalMixture->inner());
|
dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
|
hybridConditionalMixture->inner());
|
||||||
|
|
||||||
CHECK(gaussianConditionalMixture);
|
CHECK(gaussianConditionalMixture);
|
||||||
// Frontals = [x0, x1]
|
// Frontals = [x0, x1]
|
||||||
|
|
@ -413,7 +414,8 @@ TEST(HybridFactorGraph, PrintErrors) {
|
||||||
// fg.print();
|
// fg.print();
|
||||||
// std::cout << "\n\n\n" << std::endl;
|
// std::cout << "\n\n\n" << std::endl;
|
||||||
// fg.printErrors(
|
// fg.printErrors(
|
||||||
// HybridValues(hv.continuous(), DiscreteValues(), self.linearizationPoint));
|
// HybridValues(hv.continuous(), DiscreteValues(),
|
||||||
|
// self.linearizationPoint));
|
||||||
}
|
}
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
|
|
@ -510,7 +512,7 @@ factor 0:
|
||||||
b = [ -10 ]
|
b = [ -10 ]
|
||||||
No noise model
|
No noise model
|
||||||
factor 1:
|
factor 1:
|
||||||
GaussianMixtureFactor
|
HybridGaussianFactor
|
||||||
Hybrid [x0 x1; m0]{
|
Hybrid [x0 x1; m0]{
|
||||||
Choice(m0)
|
Choice(m0)
|
||||||
0 Leaf :
|
0 Leaf :
|
||||||
|
|
@ -535,7 +537,7 @@ Hybrid [x0 x1; m0]{
|
||||||
|
|
||||||
}
|
}
|
||||||
factor 2:
|
factor 2:
|
||||||
GaussianMixtureFactor
|
HybridGaussianFactor
|
||||||
Hybrid [x1 x2; m1]{
|
Hybrid [x1 x2; m1]{
|
||||||
Choice(m1)
|
Choice(m1)
|
||||||
0 Leaf :
|
0 Leaf :
|
||||||
|
|
@ -800,7 +802,7 @@ TEST(HybridFactorGraph, DefaultDecisionTree) {
|
||||||
moving = std::make_shared<PlanarMotionModel>(X(0), X(1), odometry,
|
moving = std::make_shared<PlanarMotionModel>(X(0), X(1), odometry,
|
||||||
noise_model);
|
noise_model);
|
||||||
std::vector<PlanarMotionModel::shared_ptr> motion_models = {still, moving};
|
std::vector<PlanarMotionModel::shared_ptr> motion_models = {still, moving};
|
||||||
fg.emplace_shared<MixtureFactor>(
|
fg.emplace_shared<HybridNonlinearFactor>(
|
||||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models);
|
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models);
|
||||||
|
|
||||||
// Add Range-Bearing measurements to from X0 to L0 and X1 to L1.
|
// Add Range-Bearing measurements to from X0 to L0 and X1 to L1.
|
||||||
|
|
|
||||||
|
|
@ -143,7 +143,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
|
||||||
|
|
||||||
/********************************************************/
|
/********************************************************/
|
||||||
// Run batch elimination so we can compare results.
|
// Run batch elimination so we can compare results.
|
||||||
const Ordering ordering {X(0), X(1), X(2)};
|
const Ordering ordering{X(0), X(1), X(2)};
|
||||||
|
|
||||||
// Now we calculate the actual factors using full elimination
|
// Now we calculate the actual factors using full elimination
|
||||||
const auto [expectedHybridBayesTree, expectedRemainingGraph] =
|
const auto [expectedHybridBayesTree, expectedRemainingGraph] =
|
||||||
|
|
@ -151,24 +151,27 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
|
||||||
.BaseEliminateable::eliminatePartialMultifrontal(ordering);
|
.BaseEliminateable::eliminatePartialMultifrontal(ordering);
|
||||||
|
|
||||||
// The densities on X(1) should be the same
|
// The densities on X(1) should be the same
|
||||||
auto x0_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto x0_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
bayesTree[X(0)]->conditional()->inner());
|
bayesTree[X(0)]->conditional()->inner());
|
||||||
auto expected_x0_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto expected_x0_conditional =
|
||||||
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
|
dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
|
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
|
||||||
EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional));
|
EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional));
|
||||||
|
|
||||||
// The densities on X(1) should be the same
|
// The densities on X(1) should be the same
|
||||||
auto x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto x1_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
bayesTree[X(1)]->conditional()->inner());
|
bayesTree[X(1)]->conditional()->inner());
|
||||||
auto expected_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto expected_x1_conditional =
|
||||||
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
|
dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
|
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
|
||||||
EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional));
|
EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional));
|
||||||
|
|
||||||
// The densities on X(2) should be the same
|
// The densities on X(2) should be the same
|
||||||
auto x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto x2_conditional = dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
bayesTree[X(2)]->conditional()->inner());
|
bayesTree[X(2)]->conditional()->inner());
|
||||||
auto expected_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
auto expected_x2_conditional =
|
||||||
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
|
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
||||||
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
|
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
|
||||||
|
|
||||||
// We only perform manual continuous elimination for 0,0.
|
// We only perform manual continuous elimination for 0,0.
|
||||||
|
|
@ -300,9 +303,9 @@ TEST(HybridNonlinearISAM, Approx_inference) {
|
||||||
|
|
||||||
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
|
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
|
||||||
// bayes net, at the same positions.
|
// bayes net, at the same positions.
|
||||||
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
auto &unprunedLastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
|
unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
|
||||||
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
auto &lastDensity = *dynamic_pointer_cast<HybridGaussianConditional>(
|
||||||
bayesTree[X(3)]->conditional()->inner());
|
bayesTree[X(3)]->conditional()->inner());
|
||||||
|
|
||||||
std::vector<std::pair<DiscreteValues, double>> assignments =
|
std::vector<std::pair<DiscreteValues, double>> assignments =
|
||||||
|
|
@ -410,11 +413,11 @@ TEST(HybridNonlinearISAM, NonTrivial) {
|
||||||
|
|
||||||
// Add connecting poses similar to PoseFactors in GTD
|
// Add connecting poses similar to PoseFactors in GTD
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), Y(0), Pose2(0, 1.0, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), Y(0), Pose2(0, 1.0, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Y(0), Z(0), Pose2(0, 1.0, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Y(0), Z(0), Pose2(0, 1.0, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Z(0), W(0), Pose2(0, 1.0, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Z(0), W(0), Pose2(0, 1.0, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
|
|
||||||
// Create initial estimate
|
// Create initial estimate
|
||||||
Values initial;
|
Values initial;
|
||||||
|
|
@ -433,24 +436,24 @@ TEST(HybridNonlinearISAM, NonTrivial) {
|
||||||
KeyVector contKeys = {W(0), W(1)};
|
KeyVector contKeys = {W(0), W(1)};
|
||||||
auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0);
|
auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0);
|
||||||
auto still = std::make_shared<PlanarMotionModel>(W(0), W(1), Pose2(0, 0, 0),
|
auto still = std::make_shared<PlanarMotionModel>(W(0), W(1), Pose2(0, 0, 0),
|
||||||
noise_model),
|
noise_model),
|
||||||
moving = std::make_shared<PlanarMotionModel>(W(0), W(1), odometry,
|
moving = std::make_shared<PlanarMotionModel>(W(0), W(1), odometry,
|
||||||
noise_model);
|
noise_model);
|
||||||
std::vector<PlanarMotionModel::shared_ptr> components = {moving, still};
|
std::vector<PlanarMotionModel::shared_ptr> components = {moving, still};
|
||||||
auto mixtureFactor = std::make_shared<MixtureFactor>(
|
auto mixtureFactor = std::make_shared<HybridNonlinearFactor>(
|
||||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
|
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
|
||||||
fg.push_back(mixtureFactor);
|
fg.push_back(mixtureFactor);
|
||||||
|
|
||||||
// Add equivalent of ImuFactor
|
// Add equivalent of ImuFactor
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), X(1), Pose2(1.0, 0.0, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(X(0), X(1), Pose2(1.0, 0.0, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
// PoseFactors-like at k=1
|
// PoseFactors-like at k=1
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), Y(1), Pose2(0, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), Y(1), Pose2(0, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Y(1), Z(1), Pose2(0, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Y(1), Z(1), Pose2(0, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Z(1), W(1), Pose2(-1, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Z(1), W(1), Pose2(-1, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
|
|
||||||
initial.insert(X(1), Pose2(1.0, 0.0, 0.0));
|
initial.insert(X(1), Pose2(1.0, 0.0, 0.0));
|
||||||
initial.insert(Y(1), Pose2(1.0, 1.0, 0.0));
|
initial.insert(Y(1), Pose2(1.0, 1.0, 0.0));
|
||||||
|
|
@ -473,24 +476,24 @@ TEST(HybridNonlinearISAM, NonTrivial) {
|
||||||
// Add odometry factor with discrete modes.
|
// Add odometry factor with discrete modes.
|
||||||
contKeys = {W(1), W(2)};
|
contKeys = {W(1), W(2)};
|
||||||
still = std::make_shared<PlanarMotionModel>(W(1), W(2), Pose2(0, 0, 0),
|
still = std::make_shared<PlanarMotionModel>(W(1), W(2), Pose2(0, 0, 0),
|
||||||
noise_model);
|
noise_model);
|
||||||
moving =
|
moving =
|
||||||
std::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model);
|
std::make_shared<PlanarMotionModel>(W(1), W(2), odometry, noise_model);
|
||||||
components = {moving, still};
|
components = {moving, still};
|
||||||
mixtureFactor = std::make_shared<MixtureFactor>(
|
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
|
||||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
|
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components);
|
||||||
fg.push_back(mixtureFactor);
|
fg.push_back(mixtureFactor);
|
||||||
|
|
||||||
// Add equivalent of ImuFactor
|
// Add equivalent of ImuFactor
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), X(2), Pose2(1.0, 0.0, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(X(1), X(2), Pose2(1.0, 0.0, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
// PoseFactors-like at k=1
|
// PoseFactors-like at k=1
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), Y(2), Pose2(0, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), Y(2), Pose2(0, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Y(2), Z(2), Pose2(0, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Y(2), Z(2), Pose2(0, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Z(2), W(2), Pose2(-2, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Z(2), W(2), Pose2(-2, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
|
|
||||||
initial.insert(X(2), Pose2(2.0, 0.0, 0.0));
|
initial.insert(X(2), Pose2(2.0, 0.0, 0.0));
|
||||||
initial.insert(Y(2), Pose2(2.0, 1.0, 0.0));
|
initial.insert(Y(2), Pose2(2.0, 1.0, 0.0));
|
||||||
|
|
@ -516,24 +519,24 @@ TEST(HybridNonlinearISAM, NonTrivial) {
|
||||||
// Add odometry factor with discrete modes.
|
// Add odometry factor with discrete modes.
|
||||||
contKeys = {W(2), W(3)};
|
contKeys = {W(2), W(3)};
|
||||||
still = std::make_shared<PlanarMotionModel>(W(2), W(3), Pose2(0, 0, 0),
|
still = std::make_shared<PlanarMotionModel>(W(2), W(3), Pose2(0, 0, 0),
|
||||||
noise_model);
|
noise_model);
|
||||||
moving =
|
moving =
|
||||||
std::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model);
|
std::make_shared<PlanarMotionModel>(W(2), W(3), odometry, noise_model);
|
||||||
components = {moving, still};
|
components = {moving, still};
|
||||||
mixtureFactor = std::make_shared<MixtureFactor>(
|
mixtureFactor = std::make_shared<HybridNonlinearFactor>(
|
||||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
|
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components);
|
||||||
fg.push_back(mixtureFactor);
|
fg.push_back(mixtureFactor);
|
||||||
|
|
||||||
// Add equivalent of ImuFactor
|
// Add equivalent of ImuFactor
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), X(3), Pose2(1.0, 0.0, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(X(2), X(3), Pose2(1.0, 0.0, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
// PoseFactors-like at k=3
|
// PoseFactors-like at k=3
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(X(3), Y(3), Pose2(0, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(X(3), Y(3), Pose2(0, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Y(3), Z(3), Pose2(0, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Y(3), Z(3), Pose2(0, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
fg.emplace_shared<BetweenFactor<Pose2>>(Z(3), W(3), Pose2(-3, 1, 0),
|
fg.emplace_shared<BetweenFactor<Pose2>>(Z(3), W(3), Pose2(-3, 1, 0),
|
||||||
poseNoise);
|
poseNoise);
|
||||||
|
|
||||||
initial.insert(X(3), Pose2(3.0, 0.0, 0.0));
|
initial.insert(X(3), Pose2(3.0, 0.0, 0.0));
|
||||||
initial.insert(Y(3), Pose2(3.0, 1.0, 0.0));
|
initial.insert(Y(3), Pose2(3.0, 1.0, 0.0));
|
||||||
|
|
|
||||||
|
|
@ -18,11 +18,11 @@
|
||||||
|
|
||||||
#include <gtsam/base/serializationTestHelpers.h>
|
#include <gtsam/base/serializationTestHelpers.h>
|
||||||
#include <gtsam/discrete/DiscreteConditional.h>
|
#include <gtsam/discrete/DiscreteConditional.h>
|
||||||
#include <gtsam/hybrid/GaussianMixture.h>
|
|
||||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
|
||||||
#include <gtsam/hybrid/HybridBayesNet.h>
|
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||||
#include <gtsam/hybrid/HybridBayesTree.h>
|
#include <gtsam/hybrid/HybridBayesTree.h>
|
||||||
#include <gtsam/hybrid/HybridConditional.h>
|
#include <gtsam/hybrid/HybridConditional.h>
|
||||||
|
#include <gtsam/hybrid/HybridGaussianConditional.h>
|
||||||
|
#include <gtsam/hybrid/HybridGaussianFactor.h>
|
||||||
#include <gtsam/inference/Symbol.h>
|
#include <gtsam/inference/Symbol.h>
|
||||||
#include <gtsam/linear/GaussianConditional.h>
|
#include <gtsam/linear/GaussianConditional.h>
|
||||||
|
|
||||||
|
|
@ -51,29 +51,30 @@ BOOST_CLASS_EXPORT_GUID(ADT, "gtsam_AlgebraicDecisionTree");
|
||||||
BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_AlgebraicDecisionTree_Leaf");
|
BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_AlgebraicDecisionTree_Leaf");
|
||||||
BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_AlgebraicDecisionTree_Choice")
|
BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_AlgebraicDecisionTree_Choice")
|
||||||
|
|
||||||
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor, "gtsam_GaussianMixtureFactor");
|
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor, "gtsam_HybridGaussianFactor");
|
||||||
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors,
|
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors,
|
||||||
"gtsam_GaussianMixtureFactor_Factors");
|
"gtsam_HybridGaussianFactor_Factors");
|
||||||
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors::Leaf,
|
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Leaf,
|
||||||
"gtsam_GaussianMixtureFactor_Factors_Leaf");
|
"gtsam_HybridGaussianFactor_Factors_Leaf");
|
||||||
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors::Choice,
|
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Choice,
|
||||||
"gtsam_GaussianMixtureFactor_Factors_Choice");
|
"gtsam_HybridGaussianFactor_Factors_Choice");
|
||||||
|
|
||||||
BOOST_CLASS_EXPORT_GUID(GaussianMixture, "gtsam_GaussianMixture");
|
BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional,
|
||||||
BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals,
|
"gtsam_HybridGaussianConditional");
|
||||||
"gtsam_GaussianMixture_Conditionals");
|
BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals,
|
||||||
BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals::Leaf,
|
"gtsam_HybridGaussianConditional_Conditionals");
|
||||||
"gtsam_GaussianMixture_Conditionals_Leaf");
|
BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Leaf,
|
||||||
BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals::Choice,
|
"gtsam_HybridGaussianConditional_Conditionals_Leaf");
|
||||||
"gtsam_GaussianMixture_Conditionals_Choice");
|
BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional::Conditionals::Choice,
|
||||||
|
"gtsam_HybridGaussianConditional_Conditionals_Choice");
|
||||||
// Needed since GaussianConditional::FromMeanAndStddev uses it
|
// Needed since GaussianConditional::FromMeanAndStddev uses it
|
||||||
BOOST_CLASS_EXPORT_GUID(noiseModel::Isotropic, "gtsam_noiseModel_Isotropic");
|
BOOST_CLASS_EXPORT_GUID(noiseModel::Isotropic, "gtsam_noiseModel_Isotropic");
|
||||||
|
|
||||||
BOOST_CLASS_EXPORT_GUID(HybridBayesNet, "gtsam_HybridBayesNet");
|
BOOST_CLASS_EXPORT_GUID(HybridBayesNet, "gtsam_HybridBayesNet");
|
||||||
|
|
||||||
/* ****************************************************************************/
|
/* ****************************************************************************/
|
||||||
// Test GaussianMixtureFactor serialization.
|
// Test HybridGaussianFactor serialization.
|
||||||
TEST(HybridSerialization, GaussianMixtureFactor) {
|
TEST(HybridSerialization, HybridGaussianFactor) {
|
||||||
KeyVector continuousKeys{X(0)};
|
KeyVector continuousKeys{X(0)};
|
||||||
DiscreteKeys discreteKeys{{M(0), 2}};
|
DiscreteKeys discreteKeys{{M(0), 2}};
|
||||||
|
|
||||||
|
|
@ -84,11 +85,11 @@ TEST(HybridSerialization, GaussianMixtureFactor) {
|
||||||
auto f1 = std::make_shared<JacobianFactor>(X(0), A, b1);
|
auto f1 = std::make_shared<JacobianFactor>(X(0), A, b1);
|
||||||
std::vector<GaussianFactor::shared_ptr> factors{f0, f1};
|
std::vector<GaussianFactor::shared_ptr> factors{f0, f1};
|
||||||
|
|
||||||
const GaussianMixtureFactor factor(continuousKeys, discreteKeys, factors);
|
const HybridGaussianFactor factor(continuousKeys, discreteKeys, factors);
|
||||||
|
|
||||||
EXPECT(equalsObj<GaussianMixtureFactor>(factor));
|
EXPECT(equalsObj<HybridGaussianFactor>(factor));
|
||||||
EXPECT(equalsXML<GaussianMixtureFactor>(factor));
|
EXPECT(equalsXML<HybridGaussianFactor>(factor));
|
||||||
EXPECT(equalsBinary<GaussianMixtureFactor>(factor));
|
EXPECT(equalsBinary<HybridGaussianFactor>(factor));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ****************************************************************************/
|
/* ****************************************************************************/
|
||||||
|
|
@ -106,20 +107,20 @@ TEST(HybridSerialization, HybridConditional) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ****************************************************************************/
|
/* ****************************************************************************/
|
||||||
// Test GaussianMixture serialization.
|
// Test HybridGaussianConditional serialization.
|
||||||
TEST(HybridSerialization, GaussianMixture) {
|
TEST(HybridSerialization, HybridGaussianConditional) {
|
||||||
const DiscreteKey mode(M(0), 2);
|
const DiscreteKey mode(M(0), 2);
|
||||||
Matrix1 I = Matrix1::Identity();
|
Matrix1 I = Matrix1::Identity();
|
||||||
const auto conditional0 = std::make_shared<GaussianConditional>(
|
const auto conditional0 = std::make_shared<GaussianConditional>(
|
||||||
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 0.5));
|
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 0.5));
|
||||||
const auto conditional1 = std::make_shared<GaussianConditional>(
|
const auto conditional1 = std::make_shared<GaussianConditional>(
|
||||||
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3));
|
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3));
|
||||||
const GaussianMixture gm({Z(0)}, {X(0)}, {mode},
|
const HybridGaussianConditional gm({Z(0)}, {X(0)}, {mode},
|
||||||
{conditional0, conditional1});
|
{conditional0, conditional1});
|
||||||
|
|
||||||
EXPECT(equalsObj<GaussianMixture>(gm));
|
EXPECT(equalsObj<HybridGaussianConditional>(gm));
|
||||||
EXPECT(equalsXML<GaussianMixture>(gm));
|
EXPECT(equalsXML<HybridGaussianConditional>(gm));
|
||||||
EXPECT(equalsBinary<GaussianMixture>(gm));
|
EXPECT(equalsBinary<HybridGaussianConditional>(gm));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ****************************************************************************/
|
/* ****************************************************************************/
|
||||||
|
|
|
||||||
|
|
@ -46,7 +46,7 @@ namespace gtsam {
|
||||||
* Gaussian density over a set of continuous variables.
|
* Gaussian density over a set of continuous variables.
|
||||||
* - \b Discrete conditionals, implemented in \class DiscreteConditional, which
|
* - \b Discrete conditionals, implemented in \class DiscreteConditional, which
|
||||||
* represent a discrete conditional distribution over discrete variables.
|
* represent a discrete conditional distribution over discrete variables.
|
||||||
* - \b Hybrid conditional densities, such as \class GaussianMixture, which is
|
* - \b Hybrid conditional densities, such as \class HybridGaussianConditional, which is
|
||||||
* a density over continuous variables given discrete/continuous parents.
|
* a density over continuous variables given discrete/continuous parents.
|
||||||
* - \b Symbolic factors, used to represent a graph structure, implemented in
|
* - \b Symbolic factors, used to represent a graph structure, implemented in
|
||||||
* \class SymbolicConditional. Only used for symbolic elimination etc.
|
* \class SymbolicConditional. Only used for symbolic elimination etc.
|
||||||
|
|
|
||||||
|
|
@ -18,8 +18,9 @@ from gtsam.symbol_shorthand import A, X
|
||||||
from gtsam.utils.test_case import GtsamTestCase
|
from gtsam.utils.test_case import GtsamTestCase
|
||||||
|
|
||||||
from gtsam import (DiscreteConditional, DiscreteKeys, DiscreteValues,
|
from gtsam import (DiscreteConditional, DiscreteKeys, DiscreteValues,
|
||||||
GaussianConditional, GaussianMixture, HybridBayesNet,
|
GaussianConditional, HybridBayesNet,
|
||||||
HybridValues, VectorValues, noiseModel)
|
HybridGaussianConditional, HybridValues, VectorValues,
|
||||||
|
noiseModel)
|
||||||
|
|
||||||
|
|
||||||
class TestHybridBayesNet(GtsamTestCase):
|
class TestHybridBayesNet(GtsamTestCase):
|
||||||
|
|
@ -49,8 +50,8 @@ class TestHybridBayesNet(GtsamTestCase):
|
||||||
bayesNet = HybridBayesNet()
|
bayesNet = HybridBayesNet()
|
||||||
bayesNet.push_back(conditional)
|
bayesNet.push_back(conditional)
|
||||||
bayesNet.push_back(
|
bayesNet.push_back(
|
||||||
GaussianMixture([X(1)], [], discrete_keys,
|
HybridGaussianConditional([X(1)], [], discrete_keys,
|
||||||
[conditional0, conditional1]))
|
[conditional0, conditional1]))
|
||||||
bayesNet.push_back(DiscreteConditional(Asia, "99/1"))
|
bayesNet.push_back(DiscreteConditional(Asia, "99/1"))
|
||||||
|
|
||||||
# Create values at which to evaluate.
|
# Create values at which to evaluate.
|
||||||
|
|
|
||||||
|
|
@ -18,15 +18,16 @@ from gtsam.utils.test_case import GtsamTestCase
|
||||||
|
|
||||||
import gtsam
|
import gtsam
|
||||||
from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional,
|
from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional,
|
||||||
GaussianMixture, GaussianMixtureFactor, HybridBayesNet,
|
HybridBayesNet, HybridGaussianConditional,
|
||||||
HybridGaussianFactorGraph, HybridValues, JacobianFactor,
|
HybridGaussianFactor, HybridGaussianFactorGraph,
|
||||||
Ordering, noiseModel)
|
HybridValues, JacobianFactor, Ordering, noiseModel)
|
||||||
|
|
||||||
DEBUG_MARGINALS = False
|
DEBUG_MARGINALS = False
|
||||||
|
|
||||||
|
|
||||||
class TestHybridGaussianFactorGraph(GtsamTestCase):
|
class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
"""Unit tests for HybridGaussianFactorGraph."""
|
"""Unit tests for HybridGaussianFactorGraph."""
|
||||||
|
|
||||||
def test_create(self):
|
def test_create(self):
|
||||||
"""Test construction of hybrid factor graph."""
|
"""Test construction of hybrid factor graph."""
|
||||||
model = noiseModel.Unit.Create(3)
|
model = noiseModel.Unit.Create(3)
|
||||||
|
|
@ -36,7 +37,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
|
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
|
||||||
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
|
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
|
||||||
|
|
||||||
gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2])
|
gmf = HybridGaussianFactor([X(0)], dk, [jf1, jf2])
|
||||||
|
|
||||||
hfg = HybridGaussianFactorGraph()
|
hfg = HybridGaussianFactorGraph()
|
||||||
hfg.push_back(jf1)
|
hfg.push_back(jf1)
|
||||||
|
|
@ -48,7 +49,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
self.assertEqual(hbn.size(), 2)
|
self.assertEqual(hbn.size(), 2)
|
||||||
|
|
||||||
mixture = hbn.at(0).inner()
|
mixture = hbn.at(0).inner()
|
||||||
self.assertIsInstance(mixture, GaussianMixture)
|
self.assertIsInstance(mixture, HybridGaussianConditional)
|
||||||
self.assertEqual(len(mixture.keys()), 2)
|
self.assertEqual(len(mixture.keys()), 2)
|
||||||
|
|
||||||
discrete_conditional = hbn.at(hbn.size() - 1).inner()
|
discrete_conditional = hbn.at(hbn.size() - 1).inner()
|
||||||
|
|
@ -63,7 +64,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
|
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
|
||||||
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
|
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
|
||||||
|
|
||||||
gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2])
|
gmf = HybridGaussianFactor([X(0)], dk, [jf1, jf2])
|
||||||
|
|
||||||
hfg = HybridGaussianFactorGraph()
|
hfg = HybridGaussianFactorGraph()
|
||||||
hfg.push_back(jf1)
|
hfg.push_back(jf1)
|
||||||
|
|
@ -106,8 +107,9 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
I_1x1,
|
I_1x1,
|
||||||
X(0), [0],
|
X(0), [0],
|
||||||
sigma=3)
|
sigma=3)
|
||||||
bayesNet.push_back(GaussianMixture([Z(i)], [X(0)], keys,
|
bayesNet.push_back(
|
||||||
[conditional0, conditional1]))
|
HybridGaussianConditional([Z(i)], [X(0)], keys,
|
||||||
|
[conditional0, conditional1]))
|
||||||
|
|
||||||
# Create prior on X(0).
|
# Create prior on X(0).
|
||||||
prior_on_x0 = GaussianConditional.FromMeanAndStddev(
|
prior_on_x0 = GaussianConditional.FromMeanAndStddev(
|
||||||
|
|
@ -219,9 +221,9 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
# Check ratio between unnormalized posterior and factor graph is the same for all modes:
|
# Check ratio between unnormalized posterior and factor graph is the same for all modes:
|
||||||
for mode in [1, 0]:
|
for mode in [1, 0]:
|
||||||
values.insert_or_assign(M(0), mode)
|
values.insert_or_assign(M(0), mode)
|
||||||
self.assertAlmostEqual(bayesNet.evaluate(values) /
|
self.assertAlmostEqual(
|
||||||
np.exp(-fg.error(values)),
|
bayesNet.evaluate(values) / np.exp(-fg.error(values)),
|
||||||
0.6366197723675815)
|
0.6366197723675815)
|
||||||
self.assertAlmostEqual(bayesNet.error(values), fg.error(values))
|
self.assertAlmostEqual(bayesNet.error(values), fg.error(values))
|
||||||
|
|
||||||
# Test elimination.
|
# Test elimination.
|
||||||
|
|
|
||||||
|
|
@ -17,24 +17,26 @@ import unittest
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from gtsam.symbol_shorthand import C, X
|
from gtsam.symbol_shorthand import C, X
|
||||||
from gtsam.utils.test_case import GtsamTestCase
|
from gtsam.utils.test_case import GtsamTestCase
|
||||||
from gtsam import BetweenFactorPoint3, noiseModel, PriorFactorPoint3, Point3
|
|
||||||
|
|
||||||
import gtsam
|
import gtsam
|
||||||
|
from gtsam import BetweenFactorPoint3, Point3, PriorFactorPoint3, noiseModel
|
||||||
|
|
||||||
|
|
||||||
class TestHybridGaussianFactorGraph(GtsamTestCase):
|
class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
"""Unit tests for HybridGaussianFactorGraph."""
|
"""Unit tests for HybridGaussianFactorGraph."""
|
||||||
|
|
||||||
def test_nonlinear_hybrid(self):
|
def test_nonlinear_hybrid(self):
|
||||||
nlfg = gtsam.HybridNonlinearFactorGraph()
|
nlfg = gtsam.HybridNonlinearFactorGraph()
|
||||||
dk = gtsam.DiscreteKeys()
|
dk = gtsam.DiscreteKeys()
|
||||||
dk.push_back((10, 2))
|
dk.push_back((10, 2))
|
||||||
nlfg.push_back(BetweenFactorPoint3(1, 2, Point3(
|
nlfg.push_back(
|
||||||
1, 2, 3), noiseModel.Diagonal.Variances([1, 1, 1])))
|
BetweenFactorPoint3(1, 2, Point3(1, 2, 3),
|
||||||
|
noiseModel.Diagonal.Variances([1, 1, 1])))
|
||||||
nlfg.push_back(
|
nlfg.push_back(
|
||||||
PriorFactorPoint3(2, Point3(1, 2, 3),
|
PriorFactorPoint3(2, Point3(1, 2, 3),
|
||||||
noiseModel.Diagonal.Variances([0.5, 0.5, 0.5])))
|
noiseModel.Diagonal.Variances([0.5, 0.5, 0.5])))
|
||||||
nlfg.push_back(
|
nlfg.push_back(
|
||||||
gtsam.MixtureFactor([1], dk, [
|
gtsam.HybridNonlinearFactor([1], dk, [
|
||||||
PriorFactorPoint3(1, Point3(0, 0, 0),
|
PriorFactorPoint3(1, Point3(0, 0, 0),
|
||||||
noiseModel.Unit.Create(3)),
|
noiseModel.Unit.Create(3)),
|
||||||
PriorFactorPoint3(1, Point3(1, 2, 1),
|
PriorFactorPoint3(1, Point3(1, 2, 1),
|
||||||
|
|
|
||||||
|
|
@ -14,11 +14,12 @@ from __future__ import print_function
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import gtsam
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from gtsam.symbol_shorthand import C, X
|
from gtsam.symbol_shorthand import C, X
|
||||||
from gtsam.utils.test_case import GtsamTestCase
|
from gtsam.utils.test_case import GtsamTestCase
|
||||||
|
|
||||||
|
import gtsam
|
||||||
|
|
||||||
|
|
||||||
class TestHybridValues(GtsamTestCase):
|
class TestHybridValues(GtsamTestCase):
|
||||||
"""Unit tests for HybridValues."""
|
"""Unit tests for HybridValues."""
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue