rename GaussianMixtureFactor to HybridGaussianFactor

release/4.3a0
Varun Agrawal 2024-09-13 00:24:18 -04:00
parent 5e419e1233
commit 6a92db62c3
20 changed files with 112 additions and 112 deletions

View File

@ -548,13 +548,13 @@ with
\end_layout
\begin_layout Subsubsection*
GaussianMixtureFactor
HybridGaussianFactor
\end_layout
\begin_layout Standard
Analogously, a
\emph on
GaussianMixtureFactor
HybridGaussianFactor
\emph default
typically results from a GaussianMixture by having known values
\begin_inset Formula $\bar{x}$

View File

@ -21,7 +21,7 @@
#include <gtsam/base/utilities.h>
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/inference/Conditional-inst.h>
#include <gtsam/linear/GaussianBayesNet.h>
@ -72,7 +72,7 @@ GaussianMixture::GaussianMixture(
/* *******************************************************************************/
// TODO(dellaert): This is copy/paste: GaussianMixture should be derived from
// GaussianMixtureFactor, no?
// HybridGaussianFactor, no?
GaussianFactorGraphTree GaussianMixture::add(
const GaussianFactorGraphTree &sum) const {
using Y = GaussianFactorGraph;
@ -203,7 +203,7 @@ bool GaussianMixture::allFrontalsGiven(const VectorValues &given) const {
}
/* ************************************************************************* */
std::shared_ptr<GaussianMixtureFactor> GaussianMixture::likelihood(
std::shared_ptr<HybridGaussianFactor> GaussianMixture::likelihood(
const VectorValues &given) const {
if (!allFrontalsGiven(given)) {
throw std::runtime_error(
@ -212,7 +212,7 @@ std::shared_ptr<GaussianMixtureFactor> GaussianMixture::likelihood(
const DiscreteKeys discreteParentKeys = discreteKeys();
const KeyVector continuousParentKeys = continuousParents();
const GaussianMixtureFactor::Factors likelihoods(
const HybridGaussianFactor::Factors likelihoods(
conditionals_, [&](const GaussianConditional::shared_ptr &conditional) {
const auto likelihood_m = conditional->likelihood(given);
const double Cgm_Kgcm =
@ -231,7 +231,7 @@ std::shared_ptr<GaussianMixtureFactor> GaussianMixture::likelihood(
return std::make_shared<JacobianFactor>(gfg);
}
});
return std::make_shared<GaussianMixtureFactor>(
return std::make_shared<HybridGaussianFactor>(
continuousParentKeys, discreteParentKeys, likelihoods);
}

View File

@ -23,7 +23,7 @@
#include <gtsam/discrete/DecisionTree.h>
#include <gtsam/discrete/DecisionTreeFactor.h>
#include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/inference/Conditional.h>
#include <gtsam/linear/GaussianConditional.h>
@ -165,7 +165,7 @@ class GTSAM_EXPORT GaussianMixture
* Create a likelihood factor for a Gaussian mixture, return a Mixture factor
* on the parents.
*/
std::shared_ptr<GaussianMixtureFactor> likelihood(
std::shared_ptr<HybridGaussianFactor> likelihood(
const VectorValues &given) const;
/// Getter for the underlying Conditionals DecisionTree

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */
/**
* @file GaussianMixtureFactor.cpp
* @file HybridGaussianFactor.cpp
* @brief A set of Gaussian factors indexed by a set of discrete keys.
* @author Fan Jiang
* @author Varun Agrawal
@ -21,7 +21,7 @@
#include <gtsam/base/utilities.h>
#include <gtsam/discrete/DecisionTree-inl.h>
#include <gtsam/discrete/DecisionTree.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/linear/GaussianFactor.h>
#include <gtsam/linear/GaussianFactorGraph.h>
@ -29,13 +29,13 @@
namespace gtsam {
/* *******************************************************************************/
GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys,
HybridGaussianFactor::HybridGaussianFactor(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys,
const Factors &factors)
: Base(continuousKeys, discreteKeys), factors_(factors) {}
/* *******************************************************************************/
bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const {
bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const {
const This *e = dynamic_cast<const This *>(&lf);
if (e == nullptr) return false;
@ -52,10 +52,10 @@ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const {
}
/* *******************************************************************************/
void GaussianMixtureFactor::print(const std::string &s,
void HybridGaussianFactor::print(const std::string &s,
const KeyFormatter &formatter) const {
std::cout << (s.empty() ? "" : s + "\n");
std::cout << "GaussianMixtureFactor" << std::endl;
std::cout << "HybridGaussianFactor" << std::endl;
HybridFactor::print("", formatter);
std::cout << "{\n";
if (factors_.empty()) {
@ -78,13 +78,13 @@ void GaussianMixtureFactor::print(const std::string &s,
}
/* *******************************************************************************/
GaussianMixtureFactor::sharedFactor GaussianMixtureFactor::operator()(
HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()(
const DiscreteValues &assignment) const {
return factors_(assignment);
}
/* *******************************************************************************/
GaussianFactorGraphTree GaussianMixtureFactor::add(
GaussianFactorGraphTree HybridGaussianFactor::add(
const GaussianFactorGraphTree &sum) const {
using Y = GaussianFactorGraph;
auto add = [](const Y &graph1, const Y &graph2) {
@ -97,14 +97,14 @@ GaussianFactorGraphTree GaussianMixtureFactor::add(
}
/* *******************************************************************************/
GaussianFactorGraphTree GaussianMixtureFactor::asGaussianFactorGraphTree()
GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree()
const {
auto wrap = [](const sharedFactor &gf) { return GaussianFactorGraph{gf}; };
return {factors_, wrap};
}
/* *******************************************************************************/
AlgebraicDecisionTree<Key> GaussianMixtureFactor::errorTree(
AlgebraicDecisionTree<Key> HybridGaussianFactor::errorTree(
const VectorValues &continuousValues) const {
// functor to convert from sharedFactor to double error value.
auto errorFunc = [&continuousValues](const sharedFactor &gf) {
@ -115,7 +115,7 @@ AlgebraicDecisionTree<Key> GaussianMixtureFactor::errorTree(
}
/* *******************************************************************************/
double GaussianMixtureFactor::error(const HybridValues &values) const {
double HybridGaussianFactor::error(const HybridValues &values) const {
const sharedFactor gf = factors_(values.discrete());
return gf->error(values.continuous());
}

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */
/**
* @file GaussianMixtureFactor.h
* @file HybridGaussianFactor.h
* @brief A set of GaussianFactors, indexed by a set of discrete keys.
* @author Fan Jiang
* @author Varun Agrawal
@ -44,10 +44,10 @@ class VectorValues;
*
* @ingroup hybrid
*/
class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
public:
using Base = HybridFactor;
using This = GaussianMixtureFactor;
using This = HybridGaussianFactor;
using shared_ptr = std::shared_ptr<This>;
using sharedFactor = std::shared_ptr<GaussianFactor>;
@ -72,7 +72,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
/// @{
/// Default constructor, mainly for serialization.
GaussianMixtureFactor() = default;
HybridGaussianFactor() = default;
/**
* @brief Construct a new Gaussian mixture factor.
@ -83,22 +83,22 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
* @param factors The decision tree of Gaussian factors stored
* as the mixture density.
*/
GaussianMixtureFactor(const KeyVector &continuousKeys,
HybridGaussianFactor(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys,
const Factors &factors);
/**
* @brief Construct a new GaussianMixtureFactor object using a vector of
* @brief Construct a new HybridGaussianFactor object using a vector of
* GaussianFactor shared pointers.
*
* @param continuousKeys Vector of keys for continuous factors.
* @param discreteKeys Vector of discrete keys.
* @param factors Vector of gaussian factor shared pointers.
*/
GaussianMixtureFactor(const KeyVector &continuousKeys,
HybridGaussianFactor(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys,
const std::vector<sharedFactor> &factors)
: GaussianMixtureFactor(continuousKeys, discreteKeys,
: HybridGaussianFactor(continuousKeys, discreteKeys,
Factors(discreteKeys, factors)) {}
/// @}
@ -128,7 +128,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const;
/**
* @brief Compute error of the GaussianMixtureFactor as a tree.
* @brief Compute error of the HybridGaussianFactor as a tree.
*
* @param continuousValues The continuous VectorValues.
* @return AlgebraicDecisionTree<Key> A decision tree with the same keys
@ -148,7 +148,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
/// Add MixtureFactor to a Sum, syntactic sugar.
friend GaussianFactorGraphTree &operator+=(
GaussianFactorGraphTree &sum, const GaussianMixtureFactor &factor) {
GaussianFactorGraphTree &sum, const HybridGaussianFactor &factor) {
sum = factor.add(sum);
return sum;
}
@ -168,7 +168,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
// traits
template <>
struct traits<GaussianMixtureFactor> : public Testable<GaussianMixtureFactor> {
struct traits<HybridGaussianFactor> : public Testable<HybridGaussianFactor> {
};
} // namespace gtsam

View File

@ -46,7 +46,7 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1,
*
* Examples:
* - MixtureFactor
* - GaussianMixtureFactor
* - HybridGaussianFactor
* - GaussianMixture
*
* @ingroup hybrid

View File

@ -24,7 +24,7 @@
#include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/discrete/DiscreteJunctionTree.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridConditional.h>
#include <gtsam/hybrid/HybridEliminationTree.h>
#include <gtsam/hybrid/HybridFactor.h>
@ -92,7 +92,7 @@ void HybridGaussianFactorGraph::printErrors(
// Clear the stringstream
ss.str(std::string());
if (auto gmf = std::dynamic_pointer_cast<GaussianMixtureFactor>(factor)) {
if (auto gmf = std::dynamic_pointer_cast<HybridGaussianFactor>(factor)) {
if (factor == nullptr) {
std::cout << "nullptr"
<< "\n";
@ -178,7 +178,7 @@ GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const {
// TODO(dellaert): just use a virtual method defined in HybridFactor.
if (auto gf = dynamic_pointer_cast<GaussianFactor>(f)) {
result = addGaussian(result, gf);
} else if (auto gmf = dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
} else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
result = gmf->add(result);
} else if (auto gm = dynamic_pointer_cast<GaussianMixture>(f)) {
result = gm->add(result);
@ -258,8 +258,8 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
for (auto &f : factors) {
if (auto df = dynamic_pointer_cast<DiscreteFactor>(f)) {
dfg.push_back(df);
} else if (auto gmf = dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
// Case where we have a GaussianMixtureFactor with no continuous keys.
} else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
// Case where we have a HybridGaussianFactor with no continuous keys.
// In this case, compute discrete probabilities.
auto logProbability =
[&](const GaussianFactor::shared_ptr &factor) -> double {
@ -309,7 +309,7 @@ GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) {
/* ************************************************************************ */
using Result = std::pair<std::shared_ptr<GaussianConditional>,
GaussianMixtureFactor::sharedFactor>;
HybridGaussianFactor::sharedFactor>;
/**
* Compute the probability p(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m)
@ -341,7 +341,7 @@ static std::shared_ptr<Factor> createDiscreteFactor(
return std::make_shared<DecisionTreeFactor>(discreteSeparator, probabilities);
}
// Create GaussianMixtureFactor on the separator, taking care to correct
// Create HybridGaussianFactor on the separator, taking care to correct
// for conditional constants.
static std::shared_ptr<Factor> createGaussianMixtureFactor(
const DecisionTree<Key, Result> &eliminationResults,
@ -362,7 +362,7 @@ static std::shared_ptr<Factor> createGaussianMixtureFactor(
DecisionTree<Key, GaussianFactor::shared_ptr> newFactors(eliminationResults,
correct);
return std::make_shared<GaussianMixtureFactor>(continuousSeparator,
return std::make_shared<HybridGaussianFactor>(continuousSeparator,
discreteSeparator, newFactors);
}
@ -400,7 +400,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
DecisionTree<Key, Result> eliminationResults(factorGraphTree, eliminate);
// If there are no more continuous parents we create a DiscreteFactor with the
// error for each discrete choice. Otherwise, create a GaussianMixtureFactor
// error for each discrete choice. Otherwise, create a HybridGaussianFactor
// on the separator, taking care to correct for conditional constants.
auto newFactor =
continuousSeparator.empty()
@ -549,7 +549,7 @@ AlgebraicDecisionTree<Key> HybridGaussianFactorGraph::errorTree(
f = hc->inner();
}
if (auto gaussianMixture = dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
if (auto gaussianMixture = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
// Compute factor error and add it.
error_tree = error_tree + gaussianMixture->errorTree(continuousValues);
} else if (auto gaussian = dynamic_pointer_cast<GaussianFactor>(f)) {
@ -597,7 +597,7 @@ GaussianFactorGraph HybridGaussianFactorGraph::operator()(
gfg.push_back(gf);
} else if (auto gc = std::dynamic_pointer_cast<GaussianConditional>(f)) {
gfg.push_back(gf);
} else if (auto gmf = std::dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
} else if (auto gmf = std::dynamic_pointer_cast<HybridGaussianFactor>(f)) {
gfg.push_back((*gmf)(assignment));
} else if (auto gm = dynamic_pointer_cast<GaussianMixture>(f)) {
gfg.push_back((*gm)(assignment));

View File

@ -18,7 +18,7 @@
#pragma once
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridFactor.h>
#include <gtsam/hybrid/HybridFactorGraph.h>
#include <gtsam/inference/EliminateableFactorGraph.h>

View File

@ -70,7 +70,7 @@ void HybridNonlinearFactorGraph::printErrors(
std::cout << std::endl;
}
} else if (auto gmf =
std::dynamic_pointer_cast<GaussianMixtureFactor>(factor)) {
std::dynamic_pointer_cast<HybridGaussianFactor>(factor)) {
if (factor == nullptr) {
std::cout << "nullptr"
<< "\n";
@ -152,7 +152,7 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize(
}
// Check if it is a nonlinear mixture factor
if (auto mf = dynamic_pointer_cast<MixtureFactor>(f)) {
const GaussianMixtureFactor::shared_ptr& gmf =
const HybridGaussianFactor::shared_ptr& gmf =
mf->linearize(continuousValues);
linearFG->push_back(gmf);
} else if (auto nlf = dynamic_pointer_cast<NonlinearFactor>(f)) {
@ -161,7 +161,7 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize(
} else if (dynamic_pointer_cast<DiscreteFactor>(f)) {
// If discrete-only: doesn't need linearization.
linearFG->push_back(f);
} else if (auto gmf = dynamic_pointer_cast<GaussianMixtureFactor>(f)) {
} else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
linearFG->push_back(gmf);
} else if (auto gm = dynamic_pointer_cast<GaussianMixture>(f)) {
linearFG->push_back(gm);

View File

@ -20,7 +20,7 @@
#pragma once
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/nonlinear/NonlinearFactor.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
@ -233,8 +233,8 @@ class MixtureFactor : public HybridFactor {
return factor->linearize(continuousValues);
}
/// Linearize all the continuous factors to get a GaussianMixtureFactor.
std::shared_ptr<GaussianMixtureFactor> linearize(
/// Linearize all the continuous factors to get a HybridGaussianFactor.
std::shared_ptr<HybridGaussianFactor> linearize(
const Values& continuousValues) const {
// functional to linearize each factor in the decision tree
auto linearizeDT = [continuousValues](const sharedFactor& factor) {
@ -244,7 +244,7 @@ class MixtureFactor : public HybridFactor {
DecisionTree<Key, GaussianFactor::shared_ptr> linearized_factors(
factors_, linearizeDT);
return std::make_shared<GaussianMixtureFactor>(
return std::make_shared<HybridGaussianFactor>(
continuousKeys_, discreteKeys_, linearized_factors);
}

View File

@ -72,14 +72,14 @@ virtual class HybridConditional {
double error(const gtsam::HybridValues& values) const;
};
#include <gtsam/hybrid/GaussianMixtureFactor.h>
class GaussianMixtureFactor : gtsam::HybridFactor {
GaussianMixtureFactor(
#include <gtsam/hybrid/HybridGaussianFactor.h>
class HybridGaussianFactor : gtsam::HybridFactor {
HybridGaussianFactor(
const gtsam::KeyVector& continuousKeys,
const gtsam::DiscreteKeys& discreteKeys,
const std::vector<gtsam::GaussianFactor::shared_ptr>& factorsList);
void print(string s = "GaussianMixtureFactor\n",
void print(string s = "HybridGaussianFactor\n",
const gtsam::KeyFormatter& keyFormatter =
gtsam::DefaultKeyFormatter) const;
};
@ -92,7 +92,7 @@ class GaussianMixture : gtsam::HybridFactor {
const std::vector<gtsam::GaussianConditional::shared_ptr>&
conditionalsList);
gtsam::GaussianMixtureFactor* likelihood(
gtsam::HybridGaussianFactor* likelihood(
const gtsam::VectorValues& frontals) const;
double logProbability(const gtsam::HybridValues& values) const;
double evaluate(const gtsam::HybridValues& values) const;
@ -177,7 +177,7 @@ class HybridGaussianFactorGraph {
void push_back(const gtsam::HybridGaussianFactorGraph& graph);
void push_back(const gtsam::HybridBayesNet& bayesNet);
void push_back(const gtsam::HybridBayesTree& bayesTree);
void push_back(const gtsam::GaussianMixtureFactor* gmm);
void push_back(const gtsam::HybridGaussianFactor* gmm);
void push_back(gtsam::DecisionTreeFactor* factor);
void push_back(gtsam::TableFactor* factor);
void push_back(gtsam::JacobianFactor* factor);
@ -253,7 +253,7 @@ class MixtureFactor : gtsam::HybridFactor {
double nonlinearFactorLogNormalizingConstant(const gtsam::NonlinearFactor* factor,
const gtsam::Values& values) const;
GaussianMixtureFactor* linearize(
HybridGaussianFactor* linearize(
const gtsam::Values& continuousValues) const;
void print(string s = "MixtureFactor\n",

View File

@ -19,7 +19,7 @@
#include <gtsam/base/Matrix.h>
#include <gtsam/discrete/DecisionTreeFactor.h>
#include <gtsam/discrete/DiscreteDistribution.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/MixtureFactor.h>
@ -57,7 +57,7 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain(
// keyFunc(1) to keyFunc(n+1)
for (size_t t = 1; t < n; t++) {
hfg.add(GaussianMixtureFactor(
hfg.add(HybridGaussianFactor(
{keyFunc(t), keyFunc(t + 1)}, {{dKeyFunc(t), 2}},
{std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
I_3x3, Z_3x1),

View File

@ -20,7 +20,7 @@
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianConditional.h>

View File

@ -11,7 +11,7 @@
/**
* @file testGaussianMixtureFactor.cpp
* @brief Unit tests for GaussianMixtureFactor
* @brief Unit tests for HybridGaussianFactor
* @author Varun Agrawal
* @author Fan Jiang
* @author Frank Dellaert
@ -21,7 +21,7 @@
#include <gtsam/base/TestableAssertions.h>
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridValues.h>
@ -43,17 +43,17 @@ using symbol_shorthand::Z;
/* ************************************************************************* */
// Check iterators of empty mixture.
TEST(GaussianMixtureFactor, Constructor) {
GaussianMixtureFactor factor;
GaussianMixtureFactor::const_iterator const_it = factor.begin();
TEST(HybridGaussianFactor, Constructor) {
HybridGaussianFactor factor;
HybridGaussianFactor::const_iterator const_it = factor.begin();
CHECK(const_it == factor.end());
GaussianMixtureFactor::iterator it = factor.begin();
HybridGaussianFactor::iterator it = factor.begin();
CHECK(it == factor.end());
}
/* ************************************************************************* */
// "Add" two mixture factors together.
TEST(GaussianMixtureFactor, Sum) {
TEST(HybridGaussianFactor, Sum) {
DiscreteKey m1(1, 2), m2(2, 3);
auto A1 = Matrix::Zero(2, 1);
@ -74,8 +74,8 @@ TEST(GaussianMixtureFactor, Sum) {
// TODO(Frank): why specify keys at all? And: keys in factor should be *all*
// keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey?
// Design review!
GaussianMixtureFactor mixtureFactorA({X(1), X(2)}, {m1}, factorsA);
GaussianMixtureFactor mixtureFactorB({X(1), X(3)}, {m2}, factorsB);
HybridGaussianFactor mixtureFactorA({X(1), X(2)}, {m1}, factorsA);
HybridGaussianFactor mixtureFactorB({X(1), X(3)}, {m2}, factorsB);
// Check that number of keys is 3
EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size());
@ -99,7 +99,7 @@ TEST(GaussianMixtureFactor, Sum) {
}
/* ************************************************************************* */
TEST(GaussianMixtureFactor, Printing) {
TEST(HybridGaussianFactor, Printing) {
DiscreteKey m1(1, 2);
auto A1 = Matrix::Zero(2, 1);
auto A2 = Matrix::Zero(2, 2);
@ -108,10 +108,10 @@ TEST(GaussianMixtureFactor, Printing) {
auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
std::vector<GaussianFactor::shared_ptr> factors{f10, f11};
GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
std::string expected =
R"(GaussianMixtureFactor
R"(HybridGaussianFactor
Hybrid [x1 x2; 1]{
Choice(1)
0 Leaf :
@ -144,7 +144,7 @@ Hybrid [x1 x2; 1]{
}
/* ************************************************************************* */
TEST(GaussianMixtureFactor, GaussianMixture) {
TEST(HybridGaussianFactor, GaussianMixture) {
KeyVector keys;
keys.push_back(X(0));
keys.push_back(X(1));
@ -161,8 +161,8 @@ TEST(GaussianMixtureFactor, GaussianMixture) {
}
/* ************************************************************************* */
// Test the error of the GaussianMixtureFactor
TEST(GaussianMixtureFactor, Error) {
// Test the error of the HybridGaussianFactor
TEST(HybridGaussianFactor, Error) {
DiscreteKey m1(1, 2);
auto A01 = Matrix2::Identity();
@ -177,7 +177,7 @@ TEST(GaussianMixtureFactor, Error) {
auto f1 = std::make_shared<JacobianFactor>(X(1), A11, X(2), A12, b);
std::vector<GaussianFactor::shared_ptr> factors{f0, f1};
GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
HybridGaussianFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
VectorValues continuousValues;
continuousValues.insert(X(1), Vector2(0, 0));
@ -250,7 +250,7 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1,
* The resulting factor graph should eliminate to a Bayes net
* which represents a sigmoid function.
*/
TEST(GaussianMixtureFactor, GaussianMixtureModel) {
TEST(HybridGaussianFactor, GaussianMixtureModel) {
using namespace test_gmm;
double mu0 = 1.0, mu1 = 3.0;
@ -322,7 +322,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) {
* which represents a Gaussian-like function
* where m1>m0 close to 3.1333.
*/
TEST(GaussianMixtureFactor, GaussianMixtureModel2) {
TEST(HybridGaussianFactor, GaussianMixtureModel2) {
using namespace test_gmm;
double mu0 = 1.0, mu1 = 3.0;
@ -446,7 +446,7 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0,
* the probability of m1 should be 0.5/0.5.
* Getting a measurement on z1 gives use more information.
*/
TEST(GaussianMixtureFactor, TwoStateModel) {
TEST(HybridGaussianFactor, TwoStateModel) {
using namespace test_two_state_estimation;
double mu0 = 1.0, mu1 = 3.0;
@ -500,7 +500,7 @@ TEST(GaussianMixtureFactor, TwoStateModel) {
* the P(m1) should be 0.5/0.5.
* Getting a measurement on z1 gives use more information.
*/
TEST(GaussianMixtureFactor, TwoStateModel2) {
TEST(HybridGaussianFactor, TwoStateModel2) {
using namespace test_two_state_estimation;
double mu0 = 1.0, mu1 = 3.0;

View File

@ -531,10 +531,10 @@ TEST(HybridEstimation, CorrectnessViaSampling) {
* Helper function to add the constant term corresponding to
* the difference in noise models.
*/
std::shared_ptr<GaussianMixtureFactor> mixedVarianceFactor(
std::shared_ptr<HybridGaussianFactor> mixedVarianceFactor(
const MixtureFactor& mf, const Values& initial, const Key& mode,
double noise_tight, double noise_loose, size_t d, size_t tight_index) {
GaussianMixtureFactor::shared_ptr gmf = mf.linearize(initial);
HybridGaussianFactor::shared_ptr gmf = mf.linearize(initial);
constexpr double log2pi = 1.8378770664093454835606594728112;
// logConstant will be of the tighter model
@ -560,7 +560,7 @@ std::shared_ptr<GaussianMixtureFactor> mixedVarianceFactor(
}
};
auto updated_components = gmf->factors().apply(func);
auto updated_gmf = std::make_shared<GaussianMixtureFactor>(
auto updated_gmf = std::make_shared<HybridGaussianFactor>(
gmf->continuousKeys(), gmf->discreteKeys(), updated_components);
return updated_gmf;

View File

@ -55,7 +55,7 @@ TEST(HybridFactorGraph, Keys) {
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt));
hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt));
KeySet expected_continuous{X(0), X(1)};
EXPECT(

View File

@ -22,7 +22,7 @@
#include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridBayesTree.h>
#include <gtsam/hybrid/HybridConditional.h>
@ -129,7 +129,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
M(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt));
hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt));
auto result = hfg.eliminateSequential();
@ -155,7 +155,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
std::vector<GaussianFactor::shared_ptr> factors = {
std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())};
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, factors));
hfg.add(HybridGaussianFactor({X(1)}, {m1}, factors));
// Discrete probability table for c1
hfg.add(DecisionTreeFactor(m1, {2, 8}));
@ -177,7 +177,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
hfg.add(GaussianMixtureFactor(
hfg.add(HybridGaussianFactor(
{X(1)}, {{M(1), 2}},
{std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())}));
@ -212,7 +212,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) {
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
// Hybrid factor P(x1|c1)
hfg.add(GaussianMixtureFactor({X(1)}, {m}, dt));
hfg.add(HybridGaussianFactor({X(1)}, {m}, dt));
// Prior factor on c1
hfg.add(DecisionTreeFactor(m, {2, 8}));
@ -237,7 +237,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1));
{
hfg.add(GaussianMixtureFactor(
hfg.add(HybridGaussianFactor(
{X(0)}, {{M(0), 2}},
{std::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones())}));
@ -246,7 +246,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
M(1), std::make_shared<JacobianFactor>(X(2), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(2), I_3x3, Vector3::Ones()));
hfg.add(GaussianMixtureFactor({X(2)}, {{M(1), 2}}, dt1));
hfg.add(HybridGaussianFactor({X(2)}, {{M(1), 2}}, dt1));
}
hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
@ -259,13 +259,13 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
M(3), std::make_shared<JacobianFactor>(X(3), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(3), I_3x3, Vector3::Ones()));
hfg.add(GaussianMixtureFactor({X(3)}, {{M(3), 2}}, dt));
hfg.add(HybridGaussianFactor({X(3)}, {{M(3), 2}}, dt));
DecisionTree<Key, GaussianFactor::shared_ptr> dt1(
M(2), std::make_shared<JacobianFactor>(X(5), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(5), I_3x3, Vector3::Ones()));
hfg.add(GaussianMixtureFactor({X(5)}, {{M(2), 2}}, dt1));
hfg.add(HybridGaussianFactor({X(5)}, {{M(2), 2}}, dt1));
}
auto ordering_full =
@ -555,7 +555,7 @@ TEST(HybridGaussianFactorGraph, optimize) {
C(1), std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt));
hfg.add(HybridGaussianFactor({X(1)}, {c1}, dt));
auto result = hfg.eliminateSequential();
@ -717,7 +717,7 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) {
// Create expected decision tree with two factor graphs:
// Get mixture factor:
auto mixture = fg.at<GaussianMixtureFactor>(0);
auto mixture = fg.at<HybridGaussianFactor>(0);
CHECK(mixture);
// Get prior factor:

View File

@ -510,7 +510,7 @@ factor 0:
b = [ -10 ]
No noise model
factor 1:
GaussianMixtureFactor
HybridGaussianFactor
Hybrid [x0 x1; m0]{
Choice(m0)
0 Leaf :
@ -535,7 +535,7 @@ Hybrid [x0 x1; m0]{
}
factor 2:
GaussianMixtureFactor
HybridGaussianFactor
Hybrid [x1 x2; m1]{
Choice(m1)
0 Leaf :

View File

@ -19,7 +19,7 @@
#include <gtsam/base/serializationTestHelpers.h>
#include <gtsam/discrete/DiscreteConditional.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridBayesTree.h>
#include <gtsam/hybrid/HybridConditional.h>
@ -51,12 +51,12 @@ BOOST_CLASS_EXPORT_GUID(ADT, "gtsam_AlgebraicDecisionTree");
BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_AlgebraicDecisionTree_Leaf");
BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_AlgebraicDecisionTree_Choice")
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor, "gtsam_GaussianMixtureFactor");
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors,
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor, "gtsam_GaussianMixtureFactor");
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors,
"gtsam_GaussianMixtureFactor_Factors");
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors::Leaf,
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Leaf,
"gtsam_GaussianMixtureFactor_Factors_Leaf");
BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors::Choice,
BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Choice,
"gtsam_GaussianMixtureFactor_Factors_Choice");
BOOST_CLASS_EXPORT_GUID(GaussianMixture, "gtsam_GaussianMixture");
@ -72,8 +72,8 @@ BOOST_CLASS_EXPORT_GUID(noiseModel::Isotropic, "gtsam_noiseModel_Isotropic");
BOOST_CLASS_EXPORT_GUID(HybridBayesNet, "gtsam_HybridBayesNet");
/* ****************************************************************************/
// Test GaussianMixtureFactor serialization.
TEST(HybridSerialization, GaussianMixtureFactor) {
// Test HybridGaussianFactor serialization.
TEST(HybridSerialization, HybridGaussianFactor) {
KeyVector continuousKeys{X(0)};
DiscreteKeys discreteKeys{{M(0), 2}};
@ -84,11 +84,11 @@ TEST(HybridSerialization, GaussianMixtureFactor) {
auto f1 = std::make_shared<JacobianFactor>(X(0), A, b1);
std::vector<GaussianFactor::shared_ptr> factors{f0, f1};
const GaussianMixtureFactor factor(continuousKeys, discreteKeys, factors);
const HybridGaussianFactor factor(continuousKeys, discreteKeys, factors);
EXPECT(equalsObj<GaussianMixtureFactor>(factor));
EXPECT(equalsXML<GaussianMixtureFactor>(factor));
EXPECT(equalsBinary<GaussianMixtureFactor>(factor));
EXPECT(equalsObj<HybridGaussianFactor>(factor));
EXPECT(equalsXML<HybridGaussianFactor>(factor));
EXPECT(equalsBinary<HybridGaussianFactor>(factor));
}
/* ****************************************************************************/

View File

@ -18,7 +18,7 @@ from gtsam.utils.test_case import GtsamTestCase
import gtsam
from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional,
GaussianMixture, GaussianMixtureFactor, HybridBayesNet,
GaussianMixture, HybridBayesNet, HybridGaussianFactor,
HybridGaussianFactorGraph, HybridValues, JacobianFactor,
Ordering, noiseModel)
@ -36,7 +36,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2])
gmf = HybridGaussianFactor([X(0)], dk, [jf1, jf2])
hfg = HybridGaussianFactorGraph()
hfg.push_back(jf1)
@ -63,7 +63,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2])
gmf = HybridGaussianFactor([X(0)], dk, [jf1, jf2])
hfg = HybridGaussianFactorGraph()
hfg.push_back(jf1)