Merge branch 'model-selection-integration' into model-selection-bayestree

release/4.3a0
Varun Agrawal 2024-08-20 14:11:10 -04:00
commit 37c6484cbd
9 changed files with 414 additions and 190 deletions

View File

@ -71,29 +71,27 @@ GaussianMixture::GaussianMixture(
Conditionals(discreteParents, conditionals)) {}
/* *******************************************************************************/
// TODO(dellaert): This is copy/paste: GaussianMixture should be derived from
// GaussianMixtureFactor, no?
GaussianFactorGraphTree GaussianMixture::add(
const GaussianFactorGraphTree &sum) const {
using Y = GaussianFactorGraph;
auto add = [](const Y &graph1, const Y &graph2) {
auto result = graph1;
result.push_back(graph2);
return result;
};
const auto tree = asGaussianFactorGraphTree();
return sum.empty() ? tree : sum.apply(tree, add);
}
/* *******************************************************************************/
GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const {
GaussianBayesNetTree GaussianMixture::asGaussianBayesNetTree() const {
auto wrap = [](const GaussianConditional::shared_ptr &gc) {
return GaussianFactorGraph{gc};
if (gc) {
return GaussianBayesNet{gc};
} else {
return GaussianBayesNet();
}
};
return {conditionals_, wrap};
}
/* *******************************************************************************/
GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const {
auto wrap = [](const GaussianBayesNet &gbn) {
return GaussianFactorGraph(gbn);
};
return {this->asGaussianBayesNetTree(), wrap};
}
/*
*******************************************************************************/
GaussianBayesNetTree GaussianMixture::add(
const GaussianBayesNetTree &sum) const {
using Y = GaussianBayesNet;
@ -110,15 +108,18 @@ GaussianBayesNetTree GaussianMixture::add(
}
/* *******************************************************************************/
GaussianBayesNetTree GaussianMixture::asGaussianBayesNetTree() const {
auto wrap = [](const GaussianConditional::shared_ptr &gc) {
if (gc) {
return GaussianBayesNet{gc};
} else {
return GaussianBayesNet();
}
// TODO(dellaert): This is copy/paste: GaussianMixture should be derived from
// GaussianMixtureFactor, no?
GaussianFactorGraphTree GaussianMixture::add(
const GaussianFactorGraphTree &sum) const {
using Y = GaussianFactorGraph;
auto add = [](const Y &graph1, const Y &graph2) {
auto result = graph1;
result.push_back(graph2);
return result;
};
return {conditionals_, wrap};
const auto tree = asGaussianFactorGraphTree();
return sum.empty() ? tree : sum.apply(tree, add);
}
/* *******************************************************************************/

View File

@ -28,11 +28,86 @@
namespace gtsam {
/**
* @brief Helper function to correct the [A|b] matrices in the factor components
* with the normalizer values.
* This is done by storing the normalizer value in
* the `b` vector as an additional row.
*
* @param factors DecisionTree of GaussianFactor shared pointers.
* @param varyingNormalizers Flag indicating the normalizers are different for
* each component.
* @return GaussianMixtureFactor::Factors
*/
GaussianMixtureFactor::Factors correct(
const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) {
if (!varyingNormalizers) {
return factors;
}
// First compute all the sqrt(|2 pi Sigma|) terms
auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) {
auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf);
// If we have, say, a Hessian factor, then no need to do anything
if (!jf) return 0.0;
auto model = jf->get_model();
// If there is no noise model, there is nothing to do.
if (!model) {
return 0.0;
}
// Since noise models are Gaussian, we can get the logDeterminant using the
// same trick as in GaussianConditional
double logDetR =
model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum();
double logDeterminantSigma = -2.0 * logDetR;
size_t n = model->dim();
constexpr double log2pi = 1.8378770664093454835606594728112;
return n * log2pi + logDeterminantSigma;
};
AlgebraicDecisionTree<Key> log_normalizers =
DecisionTree<Key, double>(factors, computeNormalizers);
// Find the minimum value so we can "proselytize" to positive values.
// Done because we can't have sqrt of negative numbers.
double min_log_normalizer = log_normalizers.min();
log_normalizers = log_normalizers.apply(
[&min_log_normalizer](double n) { return n - min_log_normalizer; });
// Finally, update the [A|b] matrices.
auto update = [&log_normalizers](
const Assignment<Key> &assignment,
const GaussianMixtureFactor::sharedFactor &gf) {
auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf);
if (!jf) return gf;
// If there is no noise model, there is nothing to do.
if (!jf->get_model()) return gf;
// If the log_normalizer is 0, do nothing
if (log_normalizers(assignment) == 0.0) return gf;
GaussianFactorGraph gfg;
gfg.push_back(jf);
Vector c(1);
c << std::sqrt(log_normalizers(assignment));
auto constantFactor = std::make_shared<JacobianFactor>(c);
gfg.push_back(constantFactor);
return std::dynamic_pointer_cast<GaussianFactor>(
std::make_shared<JacobianFactor>(gfg));
};
return factors.apply(update);
}
/* *******************************************************************************/
GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys,
const Factors &factors)
: Base(continuousKeys, discreteKeys), factors_(factors) {}
const Factors &factors,
bool varyingNormalizers)
: Base(continuousKeys, discreteKeys),
factors_(correct(factors, varyingNormalizers)) {}
/* *******************************************************************************/
bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const {

View File

@ -82,10 +82,13 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
* their cardinalities.
* @param factors The decision tree of Gaussian factors stored as the mixture
* density.
* @param varyingNormalizers Flag indicating factor components have varying
* normalizer values.
*/
GaussianMixtureFactor(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys,
const Factors &factors);
const Factors &factors,
bool varyingNormalizers = false);
/**
* @brief Construct a new GaussianMixtureFactor object using a vector of
@ -94,12 +97,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
* @param continuousKeys Vector of keys for continuous factors.
* @param discreteKeys Vector of discrete keys.
* @param factors Vector of gaussian factor shared pointers.
* @param varyingNormalizers Flag indicating factor components have varying
* normalizer values.
*/
GaussianMixtureFactor(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys,
const std::vector<sharedFactor> &factors)
const std::vector<sharedFactor> &factors,
bool varyingNormalizers = false)
: GaussianMixtureFactor(continuousKeys, discreteKeys,
Factors(discreteKeys, factors)) {}
Factors(discreteKeys, factors),
varyingNormalizers) {}
/// @}
/// @name Testable
@ -134,7 +141,8 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
* @return AlgebraicDecisionTree<Key> A decision tree with the same keys
* as the factors involved, and leaf values as the error.
*/
AlgebraicDecisionTree<Key> errorTree(const VectorValues &continuousValues) const;
AlgebraicDecisionTree<Key> errorTree(
const VectorValues &continuousValues) const;
/**
* @brief Compute the log-likelihood, including the log-normalizing constant.

View File

@ -26,16 +26,6 @@ static std::mt19937_64 kRandomNumberGenerator(42);
namespace gtsam {
/* ************************************************************************ */
// Throw a runtime exception for method specified in string s,
// and conditional f:
static void throwRuntimeError(const std::string &s,
const std::shared_ptr<HybridConditional> &f) {
auto &fr = *f;
throw std::runtime_error(s + " not implemented for conditional type " +
demangle(typeid(fr).name()) + ".");
}
/* ************************************************************************* */
void HybridBayesNet::print(const std::string &s,
const KeyFormatter &formatter) const {
@ -227,124 +217,17 @@ GaussianBayesNet HybridBayesNet::choose(
return gbn;
}
/* ************************************************************************ */
GaussianBayesNetValTree HybridBayesNet::assembleTree() const {
GaussianBayesNetTree result;
for (auto &f : factors_) {
// TODO(dellaert): just use a virtual method defined in HybridFactor.
if (auto gm = std::dynamic_pointer_cast<GaussianMixture>(f)) {
result = gm->add(result);
} else if (auto hc = std::dynamic_pointer_cast<HybridConditional>(f)) {
if (auto gm = hc->asMixture()) {
result = gm->add(result);
} else if (auto g = hc->asGaussian()) {
result = addGaussian(result, g);
} else {
// Has to be discrete.
// TODO(dellaert): in C++20, we can use std::visit.
continue;
}
} else if (std::dynamic_pointer_cast<DiscreteFactor>(f)) {
// Don't do anything for discrete-only factors
// since we want to evaluate continuous values only.
continue;
} else {
// We need to handle the case where the object is actually an
// BayesTreeOrphanWrapper!
throwRuntimeError("HybridBayesNet::assembleTree", f);
}
}
GaussianBayesNetValTree resultTree(result, [](const GaussianBayesNet &gbn) {
return std::make_pair(gbn, 0.0);
});
return resultTree;
}
/* ************************************************************************* */
AlgebraicDecisionTree<Key> HybridBayesNet::modelSelection() const {
/*
To perform model selection, we need: q(mu; M, Z) = exp(-error)
where error is computed at the corresponding MAP point, gbn.error(mu).
So we compute (-error) and exponentiate later
*/
GaussianBayesNetValTree bnTree = assembleTree();
GaussianBayesNetValTree bn_error = bnTree.apply(
[this](const Assignment<Key> &assignment,
const std::pair<GaussianBayesNet, double> &gbnAndValue) {
// Compute the X* of each assignment
VectorValues mu = gbnAndValue.first.optimize();
// mu is empty if gbn had nullptrs
if (mu.size() == 0) {
return std::make_pair(gbnAndValue.first,
std::numeric_limits<double>::max());
}
// Compute the error at the MLE point X* for the current assignment
double error =
this->error(HybridValues(mu, DiscreteValues(assignment)));
return std::make_pair(gbnAndValue.first, error);
});
auto trees = unzip(bn_error);
AlgebraicDecisionTree<Key> errorTree = trees.second;
// Compute model selection term (with help from ADT methods)
AlgebraicDecisionTree<Key> modelSelectionTerm = errorTree * -1;
// Exponentiate using our scheme
double max_log = modelSelectionTerm.max();
modelSelectionTerm = DecisionTree<Key, double>(
modelSelectionTerm,
[&max_log](const double &x) { return std::exp(x - max_log); });
modelSelectionTerm = modelSelectionTerm.normalize(modelSelectionTerm.sum());
return modelSelectionTerm;
}
/* ************************************************************************* */
HybridValues HybridBayesNet::optimize() const {
// Collect all the discrete factors to compute MPE
DiscreteFactorGraph discrete_fg;
// Compute model selection term
AlgebraicDecisionTree<Key> modelSelectionTerm = modelSelection();
// Get the set of all discrete keys involved in model selection
std::set<DiscreteKey> discreteKeySet;
for (auto &&conditional : *this) {
if (conditional->isDiscrete()) {
discrete_fg.push_back(conditional->asDiscrete());
} else {
if (conditional->isContinuous()) {
/*
If we are here, it means there are no discrete variables in
the Bayes net (due to strong elimination ordering).
This is a continuous-only problem hence model selection doesn't matter.
*/
} else if (conditional->isHybrid()) {
auto gm = conditional->asMixture();
// Include the discrete keys
std::copy(gm->discreteKeys().begin(), gm->discreteKeys().end(),
std::inserter(discreteKeySet, discreteKeySet.end()));
}
}
}
// Only add model_selection if we have discrete keys
if (discreteKeySet.size() > 0) {
discrete_fg.push_back(DecisionTreeFactor(
DiscreteKeys(discreteKeySet.begin(), discreteKeySet.end()),
modelSelectionTerm));
}
// Solve for the MPE
DiscreteValues mpe = discrete_fg.optimize();

View File

@ -118,29 +118,6 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
return evaluate(values);
}
/**
* @brief Assemble a DecisionTree of (GaussianBayesNet, double) leaves for
* each discrete assignment.
* The included double value is used to make
* constructing the model selection term cleaner and more efficient.
*
* @return GaussianBayesNetValTree
*/
GaussianBayesNetValTree assembleTree() const;
/**
* @brief Compute the model selection term q(μ_X; M, Z)
* given the error for each discrete assignment.
*
* The q(μ) terms are obtained as a result of elimination
* as part of the separator factor.
*
* Perform normalization to handle underflow issues.
*
* @return AlgebraicDecisionTree<Key>
*/
AlgebraicDecisionTree<Key> modelSelection() const;
/**
* @brief Solve the HybridBayesNet by first computing the MPE of all the
* discrete variables and then optimizing the continuous variables based on

View File

@ -13,6 +13,7 @@
* @file HybridFactor.h
* @date Mar 11, 2022
* @author Fan Jiang
* @author Varun Agrawal
*/
#pragma once
@ -35,12 +36,6 @@ class HybridValues;
using GaussianFactorGraphTree = DecisionTree<Key, GaussianFactorGraph>;
/// Alias for DecisionTree of GaussianBayesNets
using GaussianBayesNetTree = DecisionTree<Key, GaussianBayesNet>;
/**
* Alias for DecisionTree of (GaussianBayesNet, double) pairs.
* Used for model selection in BayesNet::optimize
*/
using GaussianBayesNetValTree =
DecisionTree<Key, std::pair<GaussianBayesNet, double>>;
KeyVector CollectKeys(const KeyVector &continuousKeys,
const DiscreteKeys &discreteKeys);

View File

@ -295,7 +295,7 @@ static std::shared_ptr<Factor> createDiscreteFactor(
if (!factor) return 1.0; // TODO(dellaert): not loving this.
// Logspace version of:
// exp(-factor->error(kEmpty)) * conditional->normalizationConstant();
// exp(-factor->error(kEmpty)) / conditional->normalizationConstant();
// We take negative of the logNormalizationConstant `log(1/k)`
// to get `log(k)`.
return -factor->error(kEmpty) + (-conditional->logNormalizationConstant());
@ -372,6 +372,12 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
// Perform elimination!
DecisionTree<Key, Result> eliminationResults(factorGraphTree, eliminate);
// Create the GaussianMixture from the conditionals
GaussianMixture::Conditionals conditionals(
eliminationResults, [](const Result &pair) { return pair.first; });
auto gaussianMixture = std::make_shared<GaussianMixture>(
frontalKeys, continuousSeparator, discreteSeparator, conditionals);
// If there are no more continuous parents we create a DiscreteFactor with the
// error for each discrete choice. Otherwise, create a GaussianMixtureFactor
// on the separator, taking care to correct for conditional constants.
@ -381,12 +387,6 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
: createGaussianMixtureFactor(eliminationResults, continuousSeparator,
discreteSeparator);
// Create the GaussianMixture from the conditionals
GaussianMixture::Conditionals conditionals(
eliminationResults, [](const Result &pair) { return pair.first; });
auto gaussianMixture = std::make_shared<GaussianMixture>(
frontalKeys, continuousSeparator, discreteSeparator, conditionals);
return {std::make_shared<HybridConditional>(gaussianMixture), newFactor};
}

View File

@ -22,9 +22,13 @@
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/GaussianMixture.h>
#include <gtsam/hybrid/GaussianMixtureFactor.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/nonlinear/PriorFactor.h>
#include <gtsam/slam/BetweenFactor.h>
// Include for test suite
#include <CppUnitLite/TestHarness.h>
@ -56,7 +60,6 @@ TEST(GaussianMixtureFactor, Sum) {
auto b = Matrix::Zero(2, 1);
Vector2 sigmas;
sigmas << 1, 2;
auto model = noiseModel::Diagonal::Sigmas(sigmas, true);
auto f10 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
@ -179,7 +182,8 @@ TEST(GaussianMixtureFactor, Error) {
continuousValues.insert(X(2), Vector2(1, 1));
// error should return a tree of errors, with nodes for each discrete value.
AlgebraicDecisionTree<Key> error_tree = mixtureFactor.errorTree(continuousValues);
AlgebraicDecisionTree<Key> error_tree =
mixtureFactor.errorTree(continuousValues);
std::vector<DiscreteKey> discrete_keys = {m1};
// Error values for regression test
@ -192,8 +196,240 @@ TEST(GaussianMixtureFactor, Error) {
DiscreteValues discreteValues;
discreteValues[m1.first] = 1;
EXPECT_DOUBLES_EQUAL(
4.0, mixtureFactor.error({continuousValues, discreteValues}),
1e-9);
4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9);
}
/* ************************************************************************* */
// Test components with differing means
TEST(GaussianMixtureFactor, DifferentMeans) {
DiscreteKey m1(M(1), 2), m2(M(2), 2);
Values values;
double x1 = 0.0, x2 = 1.75, x3 = 2.60;
values.insert(X(1), x1);
values.insert(X(2), x2);
values.insert(X(3), x3);
auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0);
auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0);
auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0);
auto f0 = std::make_shared<BetweenFactor<double>>(X(1), X(2), 0.0, model0)
->linearize(values);
auto f1 = std::make_shared<BetweenFactor<double>>(X(1), X(2), 2.0, model1)
->linearize(values);
std::vector<GaussianFactor::shared_ptr> factors{f0, f1};
GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors, true);
HybridGaussianFactorGraph hfg;
hfg.push_back(mixtureFactor);
f0 = std::make_shared<BetweenFactor<double>>(X(2), X(3), 0.0, model0)
->linearize(values);
f1 = std::make_shared<BetweenFactor<double>>(X(2), X(3), 2.0, model1)
->linearize(values);
std::vector<GaussianFactor::shared_ptr> factors23{f0, f1};
hfg.push_back(GaussianMixtureFactor({X(2), X(3)}, {m2}, factors23, true));
auto prior = PriorFactor<double>(X(1), x1, prior_noise).linearize(values);
hfg.push_back(prior);
hfg.push_back(PriorFactor<double>(X(2), 2.0, prior_noise).linearize(values));
auto bn = hfg.eliminateSequential();
HybridValues actual = bn->optimize();
HybridValues expected(
VectorValues{
{X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}},
DiscreteValues{{M(1), 1}, {M(2), 0}});
EXPECT(assert_equal(expected, actual));
{
DiscreteValues dv{{M(1), 0}, {M(2), 0}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9);
}
{
DiscreteValues dv{{M(1), 0}, {M(2), 1}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9);
}
{
DiscreteValues dv{{M(1), 1}, {M(2), 0}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9);
}
{
DiscreteValues dv{{M(1), 1}, {M(2), 1}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9);
}
}
/* ************************************************************************* */
/**
* @brief Test components with differing covariances.
* The factor graph is
* *-X1-*-X2
* |
* M1
*/
TEST(GaussianMixtureFactor, DifferentCovariances) {
DiscreteKey m1(M(1), 2);
Values values;
double x1 = 1.0, x2 = 1.0;
values.insert(X(1), x1);
values.insert(X(2), x2);
double between = 0.0;
auto model0 = noiseModel::Isotropic::Sigma(1, 1e2);
auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2);
auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3);
auto f0 =
std::make_shared<BetweenFactor<double>>(X(1), X(2), between, model0);
auto f1 =
std::make_shared<BetweenFactor<double>>(X(1), X(2), between, model1);
std::vector<NonlinearFactor::shared_ptr> factors{f0, f1};
// Create via toFactorGraph
using symbol_shorthand::Z;
Matrix H0_1, H0_2, H1_1, H1_2;
Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2);
std::vector<std::pair<Key, Matrix>> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/},
//
{X(1), H0_1 /*Sp1*/},
{X(2), H0_2 /*Tp2*/}};
Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2);
std::vector<std::pair<Key, Matrix>> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/},
//
{X(1), H1_1 /*Sp1*/},
{X(2), H1_2 /*Tp2*/}};
gtsam::GaussianMixtureFactor gmf(
{X(1), X(2)}, {m1},
{std::make_shared<JacobianFactor>(X(1), H0_1, X(2), H0_2, -d0, model0),
std::make_shared<JacobianFactor>(X(1), H1_1, X(2), H1_2, -d1, model1)},
true);
// Create FG with single GaussianMixtureFactor
HybridGaussianFactorGraph mixture_fg;
mixture_fg.add(gmf);
// Linearized prior factor on X1
auto prior = PriorFactor<double>(X(1), x1, prior_noise).linearize(values);
mixture_fg.push_back(prior);
auto hbn = mixture_fg.eliminateSequential();
// hbn->print();
VectorValues cv;
cv.insert(X(1), Vector1(0.0));
cv.insert(X(2), Vector1(0.0));
// Check that the error values at the MLE point μ.
AlgebraicDecisionTree<Key> errorTree = hbn->errorTree(cv);
DiscreteValues dv0{{M(1), 0}};
DiscreteValues dv1{{M(1), 1}};
// regression
EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9);
EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9);
DiscreteConditional expected_m1(m1, "0.5/0.5");
DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete());
EXPECT(assert_equal(expected_m1, actual_m1));
}
/* ************************************************************************* */
/**
* @brief Test components with differing covariances
* but with a Bayes net P(Z|X, M) converted to a FG.
*/
TEST(GaussianMixtureFactor, DifferentCovariances2) {
DiscreteKey m1(M(1), 2);
Values values;
double x1 = 1.0, x2 = 1.0;
values.insert(X(1), x1);
values.insert(X(2), x2);
double between = 0.0;
auto model0 = noiseModel::Isotropic::Sigma(1, 1e2);
auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2);
auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3);
auto f0 =
std::make_shared<BetweenFactor<double>>(X(1), X(2), between, model0);
auto f1 =
std::make_shared<BetweenFactor<double>>(X(1), X(2), between, model1);
std::vector<NonlinearFactor::shared_ptr> factors{f0, f1};
// Create via toFactorGraph
using symbol_shorthand::Z;
Matrix H0_1, H0_2, H1_1, H1_2;
Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2);
std::vector<std::pair<Key, Matrix>> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/},
//
{X(1), H0_1 /*Sp1*/},
{X(2), H0_2 /*Tp2*/}};
Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2);
std::vector<std::pair<Key, Matrix>> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/},
//
{X(1), H1_1 /*Sp1*/},
{X(2), H1_2 /*Tp2*/}};
auto gm = new gtsam::GaussianMixture(
{Z(1)}, {X(1), X(2)}, {m1},
{std::make_shared<GaussianConditional>(terms0, 1, -d0, model0),
std::make_shared<GaussianConditional>(terms1, 1, -d1, model1)});
gtsam::HybridBayesNet bn;
bn.emplace_back(gm);
gtsam::VectorValues measurements;
measurements.insert(Z(1), gtsam::Z_1x1);
// Create FG with single GaussianMixtureFactor
HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements);
// Linearized prior factor on X1
auto prior = PriorFactor<double>(X(1), x1, prior_noise).linearize(values);
mixture_fg.push_back(prior);
auto hbn = mixture_fg.eliminateSequential();
VectorValues cv;
cv.insert(X(1), Vector1(0.0));
cv.insert(X(2), Vector1(0.0));
// Check that the error values at the MLE point μ.
AlgebraicDecisionTree<Key> errorTree = hbn->errorTree(cv);
DiscreteValues dv0{{M(1), 0}};
DiscreteValues dv1{{M(1), 1}};
// regression
EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9);
EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9);
DiscreteConditional expected_m1(m1, "0.5/0.5");
DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete());
EXPECT(assert_equal(expected_m1, actual_m1));
}
/* ************************************************************************* */

View File

@ -161,6 +161,35 @@ TEST(MixtureFactor, DifferentMeans) {
DiscreteValues{{M(1), 1}, {M(2), 0}});
EXPECT(assert_equal(expected, actual));
{
DiscreteValues dv{{M(1), 0}, {M(2), 0}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9);
}
{
DiscreteValues dv{{M(1), 0}, {M(2), 1}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9);
}
{
DiscreteValues dv{{M(1), 1}, {M(2), 0}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9);
}
{
DiscreteValues dv{{M(1), 1}, {M(2), 1}};
VectorValues cont = bn->optimize(dv);
double error = bn->error(HybridValues(cont, dv));
// regression
EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9);
}
}
/* ************************************************************************* */
@ -217,15 +246,35 @@ TEST(MixtureFactor, DifferentCovariances) {
auto hbn = mixture_fg.eliminateSequential();
HybridValues actual_values = hbn->optimize();
VectorValues cv;
cv.insert(X(1), Vector1(0.0));
cv.insert(X(2), Vector1(0.0));
// Check that we get different error values at the MLE point μ.
AlgebraicDecisionTree<Key> errorTree = hbn->errorTree(cv);
auto cond0 = hbn->at(0)->asMixture();
auto cond1 = hbn->at(1)->asMixture();
auto discrete_cond = hbn->at(2)->asDiscrete();
HybridValues hv0(cv, DiscreteValues{{M(1), 0}});
HybridValues hv1(cv, DiscreteValues{{M(1), 1}});
AlgebraicDecisionTree<Key> expectedErrorTree(
m1,
cond0->error(hv0) // cond0(0)->logNormalizationConstant()
// - cond0(1)->logNormalizationConstant
+ cond1->error(hv0) + discrete_cond->error(DiscreteValues{{M(1), 0}}),
cond0->error(hv1) // cond1(0)->logNormalizationConstant()
// - cond1(1)->logNormalizationConstant
+ cond1->error(hv1) +
discrete_cond->error(DiscreteValues{{M(1), 0}}));
EXPECT(assert_equal(expectedErrorTree, errorTree));
DiscreteValues dv;
dv.insert({M(1), 1});
HybridValues expected_values(cv, dv);
HybridValues actual_values = hbn->optimize();
EXPECT(assert_equal(expected_values, actual_values));
}