Merge pull request #1352 from borglab/feature/HBN-evaluate

release/4.3a0
Varun Agrawal 2022-12-28 21:21:08 -05:00 committed by GitHub
commit a849eab99d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 353 additions and 104 deletions

View File

@ -36,7 +36,7 @@ DecisionTreeFactor::shared_ptr HybridBayesNet::discreteConditionals() const {
for (auto &&conditional : *this) {
if (conditional->isDiscrete()) {
// Convert to a DecisionTreeFactor and add it to the main factor.
DecisionTreeFactor f(*conditional->asDiscreteConditional());
DecisionTreeFactor f(*conditional->asDiscrete());
dtFactor = dtFactor * f;
}
}
@ -108,7 +108,7 @@ void HybridBayesNet::updateDiscreteConditionals(
HybridConditional::shared_ptr conditional = this->at(i);
if (conditional->isDiscrete()) {
// std::cout << demangle(typeid(conditional).name()) << std::endl;
auto discrete = conditional->asDiscreteConditional();
auto discrete = conditional->asDiscrete();
KeyVector frontals(discrete->frontals().begin(),
discrete->frontals().end());
@ -150,16 +150,11 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) {
// Go through all the conditionals in the
// Bayes Net and prune them as per decisionTree.
for (size_t i = 0; i < this->size(); i++) {
HybridConditional::shared_ptr conditional = this->at(i);
if (conditional->isHybrid()) {
GaussianMixture::shared_ptr gaussianMixture = conditional->asMixture();
for (auto &&conditional : *this) {
if (auto gm = conditional->asMixture()) {
// Make a copy of the Gaussian mixture and prune it!
auto prunedGaussianMixture =
boost::make_shared<GaussianMixture>(*gaussianMixture);
prunedGaussianMixture->prune(*decisionTree);
auto prunedGaussianMixture = boost::make_shared<GaussianMixture>(*gm);
prunedGaussianMixture->prune(*decisionTree); // imperative :-(
// Type-erase and add to the pruned Bayes Net fragment.
prunedBayesNetFragment.push_back(
@ -186,7 +181,7 @@ GaussianConditional::shared_ptr HybridBayesNet::atGaussian(size_t i) const {
/* ************************************************************************* */
DiscreteConditional::shared_ptr HybridBayesNet::atDiscrete(size_t i) const {
return at(i)->asDiscreteConditional();
return at(i)->asDiscrete();
}
/* ************************************************************************* */
@ -194,16 +189,13 @@ GaussianBayesNet HybridBayesNet::choose(
const DiscreteValues &assignment) const {
GaussianBayesNet gbn;
for (auto &&conditional : *this) {
if (conditional->isHybrid()) {
if (auto gm = conditional->asMixture()) {
// If conditional is hybrid, select based on assignment.
GaussianMixture gm = *conditional->asMixture();
gbn.push_back(gm(assignment));
} else if (conditional->isContinuous()) {
gbn.push_back((*gm)(assignment));
} else if (auto gc = conditional->asGaussian()) {
// If continuous only, add Gaussian conditional.
gbn.push_back((conditional->asGaussian()));
} else if (conditional->isDiscrete()) {
gbn.push_back(gc);
} else if (auto dc = conditional->asDiscrete()) {
// If conditional is discrete-only, we simply continue.
continue;
}
@ -218,23 +210,47 @@ HybridValues HybridBayesNet::optimize() const {
DiscreteBayesNet discrete_bn;
for (auto &&conditional : *this) {
if (conditional->isDiscrete()) {
discrete_bn.push_back(conditional->asDiscreteConditional());
discrete_bn.push_back(conditional->asDiscrete());
}
}
DiscreteValues mpe = DiscreteFactorGraph(discrete_bn).optimize();
// Given the MPE, compute the optimal continuous values.
GaussianBayesNet gbn = this->choose(mpe);
GaussianBayesNet gbn = choose(mpe);
return HybridValues(mpe, gbn.optimize());
}
/* ************************************************************************* */
VectorValues HybridBayesNet::optimize(const DiscreteValues &assignment) const {
GaussianBayesNet gbn = this->choose(assignment);
GaussianBayesNet gbn = choose(assignment);
return gbn.optimize();
}
/* ************************************************************************* */
double HybridBayesNet::evaluate(const HybridValues &values) const {
const DiscreteValues &discreteValues = values.discrete();
const VectorValues &continuousValues = values.continuous();
double logDensity = 0.0, probability = 1.0;
// Iterate over each conditional.
for (auto &&conditional : *this) {
if (auto gm = conditional->asMixture()) {
const auto component = (*gm)(discreteValues);
logDensity += component->logDensity(continuousValues);
} else if (auto gc = conditional->asGaussian()) {
// If continuous only, evaluate the probability and multiply.
logDensity += gc->logDensity(continuousValues);
} else if (auto dc = conditional->asDiscrete()) {
// Conditional is discrete-only, so return its probability.
probability *= dc->operator()(discreteValues);
}
}
return probability * exp(logDensity);
}
/* ************************************************************************* */
HybridValues HybridBayesNet::sample(const HybridValues &given,
std::mt19937_64 *rng) const {
@ -242,7 +258,7 @@ HybridValues HybridBayesNet::sample(const HybridValues &given,
for (auto &&conditional : *this) {
if (conditional->isDiscrete()) {
// If conditional is discrete-only, we add to the discrete Bayes net.
dbn.push_back(conditional->asDiscreteConditional());
dbn.push_back(conditional->asDiscrete());
}
}
// Sample a discrete assignment.
@ -273,7 +289,7 @@ HybridValues HybridBayesNet::sample() const {
/* ************************************************************************* */
double HybridBayesNet::error(const VectorValues &continuousValues,
const DiscreteValues &discreteValues) const {
GaussianBayesNet gbn = this->choose(discreteValues);
GaussianBayesNet gbn = choose(discreteValues);
return gbn.error(continuousValues);
}
@ -284,23 +300,20 @@ AlgebraicDecisionTree<Key> HybridBayesNet::error(
// Iterate over each conditional.
for (auto &&conditional : *this) {
if (conditional->isHybrid()) {
if (auto gm = conditional->asMixture()) {
// If conditional is hybrid, select based on assignment and compute error.
GaussianMixture::shared_ptr gm = conditional->asMixture();
AlgebraicDecisionTree<Key> conditional_error =
gm->error(continuousValues);
error_tree = error_tree + conditional_error;
} else if (conditional->isContinuous()) {
} else if (auto gc = conditional->asGaussian()) {
// If continuous only, get the (double) error
// and add it to the error_tree
double error = conditional->asGaussian()->error(continuousValues);
double error = gc->error(continuousValues);
// Add the computed error to every leaf of the error tree.
error_tree = error_tree.apply(
[error](double leaf_value) { return leaf_value + error; });
} else if (conditional->isDiscrete()) {
} else if (auto dc = conditional->asDiscrete()) {
// Conditional is discrete-only, we skip.
continue;
}

View File

@ -95,6 +95,14 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
*/
GaussianBayesNet choose(const DiscreteValues &assignment) const;
/// Evaluate hybrid probability density for given HybridValues.
double evaluate(const HybridValues &values) const;
/// Evaluate hybrid probability density for given HybridValues, sugar.
double operator()(const HybridValues &values) const {
return evaluate(values);
}
/**
* @brief Solve the HybridBayesNet by first computing the MPE of all the
* discrete variables and then optimizing the continuous variables based on

View File

@ -49,7 +49,7 @@ HybridValues HybridBayesTree::optimize() const {
// The root should be discrete only, we compute the MPE
if (root_conditional->isDiscrete()) {
dbn.push_back(root_conditional->asDiscreteConditional());
dbn.push_back(root_conditional->asDiscrete());
mpe = DiscreteFactorGraph(dbn).optimize();
} else {
throw std::runtime_error(
@ -147,7 +147,7 @@ VectorValues HybridBayesTree::optimize(const DiscreteValues& assignment) const {
/* ************************************************************************* */
void HybridBayesTree::prune(const size_t maxNrLeaves) {
auto decisionTree =
this->roots_.at(0)->conditional()->asDiscreteConditional();
this->roots_.at(0)->conditional()->asDiscrete();
DecisionTreeFactor prunedDecisionTree = decisionTree->prune(maxNrLeaves);
decisionTree->root_ = prunedDecisionTree.root_;

View File

@ -131,34 +131,29 @@ class GTSAM_EXPORT HybridConditional
/**
* @brief Return HybridConditional as a GaussianMixture
*
* @return GaussianMixture::shared_ptr
* @return nullptr if not a mixture
* @return GaussianMixture::shared_ptr otherwise
*/
GaussianMixture::shared_ptr asMixture() {
if (!isHybrid()) throw std::invalid_argument("Not a mixture");
return boost::static_pointer_cast<GaussianMixture>(inner_);
return boost::dynamic_pointer_cast<GaussianMixture>(inner_);
}
/**
* @brief Return HybridConditional as a GaussianConditional
*
* @return GaussianConditional::shared_ptr
* @return nullptr if not a GaussianConditional
* @return GaussianConditional::shared_ptr otherwise
*/
GaussianConditional::shared_ptr asGaussian() {
if (!isContinuous())
throw std::invalid_argument("Not a continuous conditional");
return boost::static_pointer_cast<GaussianConditional>(inner_);
return boost::dynamic_pointer_cast<GaussianConditional>(inner_);
}
/**
* @brief Return conditional as a DiscreteConditional
*
* @return nullptr if not a DiscreteConditional
* @return DiscreteConditional::shared_ptr
*/
DiscreteConditional::shared_ptr asDiscreteConditional() {
if (!isDiscrete())
throw std::invalid_argument("Not a discrete conditional");
return boost::static_pointer_cast<DiscreteConditional>(inner_);
DiscreteConditional::shared_ptr asDiscrete() {
return boost::dynamic_pointer_cast<DiscreteConditional>(inner_);
}
/// @}

View File

@ -78,10 +78,10 @@ class GTSAM_EXPORT HybridValues {
/// @{
/// Return the discrete MPE assignment
DiscreteValues discrete() const { return discrete_; }
const DiscreteValues& discrete() const { return discrete_; }
/// Return the delta update for the continuous vectors
VectorValues continuous() const { return continuous_; }
const VectorValues& continuous() const { return continuous_; }
/// Check whether a variable with key \c j exists in DiscreteValue.
bool existsDiscrete(Key j) { return (discrete_.find(j) != discrete_.end()); };

View File

@ -36,36 +36,80 @@ using noiseModel::Isotropic;
using symbol_shorthand::M;
using symbol_shorthand::X;
static const DiscreteKey Asia(0, 2);
static const Key asiaKey = 0;
static const DiscreteKey Asia(asiaKey, 2);
/* ****************************************************************************/
// Test creation
// Test creation of a pure discrete Bayes net.
TEST(HybridBayesNet, Creation) {
HybridBayesNet bayesNet;
bayesNet.add(Asia, "99/1");
DiscreteConditional expected(Asia, "99/1");
CHECK(bayesNet.atDiscrete(0));
auto& df = *bayesNet.atDiscrete(0);
EXPECT(df.equals(expected));
EXPECT(assert_equal(expected, *bayesNet.atDiscrete(0)));
}
/* ****************************************************************************/
// Test adding a bayes net to another one.
// Test adding a Bayes net to another one.
TEST(HybridBayesNet, Add) {
HybridBayesNet bayesNet;
bayesNet.add(Asia, "99/1");
DiscreteConditional expected(Asia, "99/1");
HybridBayesNet other;
other.push_back(bayesNet);
EXPECT(bayesNet.equals(other));
}
/* ****************************************************************************/
// Test evaluate for a pure discrete Bayes net P(Asia).
TEST(HybridBayesNet, evaluatePureDiscrete) {
HybridBayesNet bayesNet;
bayesNet.add(Asia, "99/1");
HybridValues values;
values.insert(asiaKey, 0);
EXPECT_DOUBLES_EQUAL(0.99, bayesNet.evaluate(values), 1e-9);
}
/* ****************************************************************************/
// Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).
TEST(HybridBayesNet, evaluateHybrid) {
const auto continuousConditional = GaussianConditional::FromMeanAndStddev(
X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0);
const SharedDiagonal model0 = noiseModel::Diagonal::Sigmas(Vector1(2.0)),
model1 = noiseModel::Diagonal::Sigmas(Vector1(3.0));
const auto conditional0 = boost::make_shared<GaussianConditional>(
X(1), Vector1::Constant(5), I_1x1, model0),
conditional1 = boost::make_shared<GaussianConditional>(
X(1), Vector1::Constant(2), I_1x1, model1);
// TODO(dellaert): creating and adding mixture is clumsy.
const auto mixture = GaussianMixture::FromConditionals(
{X(1)}, {}, {Asia}, {conditional0, conditional1});
// Create hybrid Bayes net.
HybridBayesNet bayesNet;
bayesNet.push_back(HybridConditional(
boost::make_shared<GaussianConditional>(continuousConditional)));
bayesNet.push_back(
HybridConditional(boost::make_shared<GaussianMixture>(mixture)));
bayesNet.add(Asia, "99/1");
// Create values at which to evaluate.
HybridValues values;
values.insert(asiaKey, 0);
values.insert(X(0), Vector1(-6));
values.insert(X(1), Vector1(1));
const double conditionalProbability =
continuousConditional.evaluate(values.continuous());
const double mixtureProbability = conditional0->evaluate(values.continuous());
EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99,
bayesNet.evaluate(values), 1e-9);
}
/* ****************************************************************************/
// Test choosing an assignment of conditionals
TEST(HybridBayesNet, Choose) {
@ -105,7 +149,7 @@ TEST(HybridBayesNet, Choose) {
}
/* ****************************************************************************/
// Test bayes net optimize
// Test Bayes net optimize
TEST(HybridBayesNet, OptimizeAssignment) {
Switching s(4);
@ -139,7 +183,7 @@ TEST(HybridBayesNet, OptimizeAssignment) {
}
/* ****************************************************************************/
// Test bayes net optimize
// Test Bayes net optimize
TEST(HybridBayesNet, Optimize) {
Switching s(4);
@ -203,7 +247,7 @@ TEST(HybridBayesNet, Error) {
// regression
EXPECT(assert_equal(expected_error, error_tree, 1e-9));
// Error on pruned bayes net
// Error on pruned Bayes net
auto prunedBayesNet = hybridBayesNet->prune(2);
auto pruned_error_tree = prunedBayesNet.error(delta.continuous());
@ -238,7 +282,7 @@ TEST(HybridBayesNet, Error) {
}
/* ****************************************************************************/
// Test bayes net pruning
// Test Bayes net pruning
TEST(HybridBayesNet, Prune) {
Switching s(4);
@ -256,7 +300,7 @@ TEST(HybridBayesNet, Prune) {
}
/* ****************************************************************************/
// Test bayes net updateDiscreteConditionals
// Test Bayes net updateDiscreteConditionals
TEST(HybridBayesNet, UpdateDiscreteConditionals) {
Switching s(4);
@ -273,8 +317,7 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) {
EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/,
prunedDecisionTree->nrLeaves());
auto original_discrete_conditionals =
*(hybridBayesNet->at(4)->asDiscreteConditional());
auto original_discrete_conditionals = *(hybridBayesNet->at(4)->asDiscrete());
// Prune!
hybridBayesNet->prune(maxNrLeaves);
@ -294,8 +337,7 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) {
};
// Get the pruned discrete conditionals as an AlgebraicDecisionTree
auto pruned_discrete_conditionals =
hybridBayesNet->at(4)->asDiscreteConditional();
auto pruned_discrete_conditionals = hybridBayesNet->at(4)->asDiscrete();
auto discrete_conditional_tree =
boost::dynamic_pointer_cast<DecisionTreeFactor::ADT>(
pruned_discrete_conditionals);
@ -358,7 +400,7 @@ TEST(HybridBayesNet, Sampling) {
// Sample
HybridValues sample = bn->sample(&gen);
discrete_samples.push_back(sample.discrete()[M(0)]);
discrete_samples.push_back(sample.discrete().at(M(0)));
if (i == 0) {
average_continuous.insert(sample.continuous());

View File

@ -133,7 +133,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
auto result =
hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {M(1)}));
auto dc = result->at(2)->asDiscreteConditional();
auto dc = result->at(2)->asDiscrete();
DiscreteValues dv;
dv[M(1)] = 0;
EXPECT_DOUBLES_EQUAL(1, dc->operator()(dv), 1e-3);

View File

@ -111,8 +111,7 @@ TEST(HybridGaussianElimination, IncrementalInference) {
// Run update step
isam.update(graph1);
auto discreteConditional_m0 =
isam[M(0)]->conditional()->asDiscreteConditional();
auto discreteConditional_m0 = isam[M(0)]->conditional()->asDiscrete();
EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)}));
/********************************************************/
@ -170,10 +169,10 @@ TEST(HybridGaussianElimination, IncrementalInference) {
DiscreteValues m00;
m00[M(0)] = 0, m00[M(1)] = 0;
DiscreteConditional decisionTree =
*(*discreteBayesTree)[M(1)]->conditional()->asDiscreteConditional();
*(*discreteBayesTree)[M(1)]->conditional()->asDiscrete();
double m00_prob = decisionTree(m00);
auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional();
auto discreteConditional = isam[M(1)]->conditional()->asDiscrete();
// Test if the probability values are as expected with regression tests.
DiscreteValues assignment;
@ -535,7 +534,7 @@ TEST(HybridGaussianISAM, NonTrivial) {
// The final discrete graph should not be empty since we have eliminated
// all continuous variables.
auto discreteTree = inc[M(3)]->conditional()->asDiscreteConditional();
auto discreteTree = inc[M(3)]->conditional()->asDiscrete();
EXPECT_LONGS_EQUAL(3, discreteTree->size());
// Test if the optimal discrete mode assignment is (1, 1, 1).

View File

@ -124,8 +124,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
isam.update(graph1, initial);
HybridGaussianISAM bayesTree = isam.bayesTree();
auto discreteConditional_m0 =
bayesTree[M(0)]->conditional()->asDiscreteConditional();
auto discreteConditional_m0 = bayesTree[M(0)]->conditional()->asDiscrete();
EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)}));
/********************************************************/
@ -187,11 +186,11 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
DiscreteValues m00;
m00[M(0)] = 0, m00[M(1)] = 0;
DiscreteConditional decisionTree =
*(*discreteBayesTree)[M(1)]->conditional()->asDiscreteConditional();
*(*discreteBayesTree)[M(1)]->conditional()->asDiscrete();
double m00_prob = decisionTree(m00);
auto discreteConditional =
bayesTree[M(1)]->conditional()->asDiscreteConditional();
bayesTree[M(1)]->conditional()->asDiscrete();
// Test if the probability values are as expected with regression tests.
DiscreteValues assignment;
@ -558,7 +557,7 @@ TEST(HybridNonlinearISAM, NonTrivial) {
// The final discrete graph should not be empty since we have eliminated
// all continuous variables.
auto discreteTree = bayesTree[M(3)]->conditional()->asDiscreteConditional();
auto discreteTree = bayesTree[M(3)]->conditional()->asDiscrete();
EXPECT_LONGS_EQUAL(3, discreteTree->size());
// Test if the optimal discrete mode assignment is (1, 1, 1).

View File

@ -224,5 +224,19 @@ namespace gtsam {
}
/* ************************************************************************* */
double GaussianBayesNet::logDensity(const VectorValues& x) const {
double sum = 0.0;
for (const auto& conditional : *this) {
if (conditional) sum += conditional->logDensity(x);
}
return sum;
}
/* ************************************************************************* */
double GaussianBayesNet::evaluate(const VectorValues& x) const {
return exp(logDensity(x));
}
/* ************************************************************************* */
} // namespace gtsam

View File

@ -88,6 +88,26 @@ namespace gtsam {
/// @name Standard Interface
/// @{
/**
* Calculate probability density for given values `x`:
* exp(-error(x)) / sqrt((2*pi)^n*det(Sigma))
* where x is the vector of values, and Sigma is the covariance matrix.
* Note that error(x)=0.5*e'*e includes the 0.5 factor already.
*/
double evaluate(const VectorValues& x) const;
/// Evaluate probability density, sugar.
double operator()(const VectorValues& x) const {
return evaluate(x);
}
/**
* Calculate log-density for given values `x`:
* -error(x) - 0.5 * n*log(2*pi) - 0.5 * log det(Sigma)
* where x is the vector of values, and Sigma is the covariance matrix.
*/
double logDensity(const VectorValues& x) const;
/// Solve the GaussianBayesNet, i.e. return \f$ x = R^{-1}*d \f$, by
/// back-substitution
VectorValues optimize() const;

View File

@ -169,6 +169,21 @@ double GaussianConditional::logDeterminant() const {
return logDet;
}
/* ************************************************************************* */
// density = exp(-error(x)) / sqrt((2*pi)^n*det(Sigma))
// log = -error(x) - 0.5 * n*log(2*pi) - 0.5 * log det(Sigma)
double GaussianConditional::logDensity(const VectorValues& x) const {
constexpr double log2pi = 1.8378770664093454835606594728112;
size_t n = d().size();
// log det(Sigma)) = - 2.0 * logDeterminant()
return - error(x) - 0.5 * n * log2pi + logDeterminant();
}
/* ************************************************************************* */
double GaussianConditional::evaluate(const VectorValues& x) const {
return exp(logDensity(x));
}
/* ************************************************************************* */
VectorValues GaussianConditional::solve(const VectorValues& x) const {
// Concatenate all vector values that correspond to parent variables

View File

@ -121,6 +121,26 @@ namespace gtsam {
/// @name Standard Interface
/// @{
/**
* Calculate probability density for given values `x`:
* exp(-error(x)) / sqrt((2*pi)^n*det(Sigma))
* where x is the vector of values, and Sigma is the covariance matrix.
* Note that error(x)=0.5*e'*e includes the 0.5 factor already.
*/
double evaluate(const VectorValues& x) const;
/// Evaluate probability density, sugar.
double operator()(const VectorValues& x) const {
return evaluate(x);
}
/**
* Calculate log-density for given values `x`:
* -error(x) - 0.5 * n*log(2*pi) - 0.5 * log det(Sigma)
* where x is the vector of values, and Sigma is the covariance matrix.
*/
double logDensity(const VectorValues& x) const;
/** Return a view of the upper-triangular R block of the conditional */
constABlock R() const { return Ab_.range(0, nrFrontals()); }
@ -134,27 +154,31 @@ namespace gtsam {
const constBVector d() const { return BaseFactor::getb(); }
/**
* @brief Compute the log determinant of the Gaussian conditional.
* The determinant is computed using the R matrix, which is upper
* triangular.
* For numerical stability, the determinant is computed in log
* form, so it is a summation rather than a multiplication.
* @brief Compute the determinant of the R matrix.
*
* @return double
*/
double logDeterminant() const;
/**
* @brief Compute the determinant of the conditional from the
* upper-triangular R matrix.
*
* The determinant is computed in log form (hence summation) for numerical
* stability and then exponentiated.
* The determinant is computed in log form using logDeterminant for
* numerical stability and then exponentiated.
*
* Note, the covariance matrix \f$ \Sigma = (R^T R)^{-1} \f$, and hence
* \f$ \det(\Sigma) = 1 / \det(R^T R) = 1 / determinant()^ 2 \f$.
*
* @return double
*/
double determinant() const { return exp(this->logDeterminant()); }
/**
* @brief Compute the log determinant of the R matrix.
*
* For numerical stability, the determinant is computed in log
* form, so it is a summation rather than a multiplication.
*
* Note, the covariance matrix \f$ \Sigma = (R^T R)^{-1} \f$, and hence
* \f$ \log \det(\Sigma) = - \log \det(R^T R) = - 2 logDeterminant() \f$.
*
* @return double
*/
double logDeterminant() const;
/**
* Solves a conditional Gaussian and writes the solution into the entries of
* \c x for each frontal variable of the conditional. The parents are

View File

@ -1,6 +1,6 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* GTSAM Copyright 2010-2022, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
@ -67,6 +67,36 @@ TEST( GaussianBayesNet, Matrix )
EXPECT(assert_equal(d,d1));
}
/* ************************************************************************* */
// Check that the evaluate function matches direct calculation with R.
TEST(GaussianBayesNet, Evaluate1) {
// Let's evaluate at the mean
const VectorValues mean = smallBayesNet.optimize();
// We get the matrix, which has noise model applied!
const Matrix R = smallBayesNet.matrix().first;
const Matrix invSigma = R.transpose() * R;
// The Bayes net is a Gaussian density ~ exp (-0.5*(Rx-d)'*(Rx-d))
// which at the mean is 1.0! So, the only thing we need to calculate is
// the normalization constant 1.0/sqrt((2*pi*Sigma).det()).
// The covariance matrix inv(Sigma) = R'*R, so the determinant is
const double expected = sqrt((invSigma / (2 * M_PI)).determinant());
const double actual = smallBayesNet.evaluate(mean);
EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9);
}
// Check the evaluate with non-unit noise.
TEST(GaussianBayesNet, Evaluate2) {
// See comments in test above.
const VectorValues mean = noisyBayesNet.optimize();
const Matrix R = noisyBayesNet.matrix().first;
const Matrix invSigma = R.transpose() * R;
const double expected = sqrt((invSigma / (2 * M_PI)).determinant());
const double actual = noisyBayesNet.evaluate(mean);
EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9);
}
/* ************************************************************************* */
TEST( GaussianBayesNet, NoisyMatrix )
{
@ -142,14 +172,18 @@ TEST( GaussianBayesNet, optimize3 )
}
/* ************************************************************************* */
TEST(GaussianBayesNet, sample) {
GaussianBayesNet gbn;
Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.).finished();
const Vector2 mean(20, 40), b(10, 10);
const double sigma = 0.01;
namespace sampling {
static Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.).finished();
static const Vector2 mean(20, 40), b(10, 10);
static const double sigma = 0.01;
static const GaussianBayesNet gbn =
list_of(GaussianConditional::FromMeanAndStddev(X(0), A1, X(1), b, sigma))(
GaussianDensity::FromMeanAndStddev(X(1), mean, sigma));
} // namespace sampling
gbn.add(GaussianConditional::FromMeanAndStddev(X(0), A1, X(1), b, sigma));
gbn.add(GaussianDensity::FromMeanAndStddev(X(1), mean, sigma));
/* ************************************************************************* */
TEST(GaussianBayesNet, sample) {
using namespace sampling;
auto actual = gbn.sample();
EXPECT_LONGS_EQUAL(2, actual.size());
@ -165,6 +199,23 @@ TEST(GaussianBayesNet, sample) {
// EXPECT(assert_equal(Vector2(110.032083, 230.039811), actual[X(0)], 1e-5));
}
/* ************************************************************************* */
// Do Monte Carlo integration of square deviation, should be equal to 9.0.
TEST(GaussianBayesNet, MonteCarloIntegration) {
GaussianBayesNet gbn;
gbn.push_back(noisyBayesNet.at(1));
double sum = 0.0;
constexpr size_t N = 1000;
// loop for N samples:
for (size_t i = 0; i < N; i++) {
const auto X_i = gbn.sample();
sum += pow(X_i[_y_].x() - 5.0, 2.0);
}
// Expected is variance = 3*3
EXPECT_DOUBLES_EQUAL(9.0, sum / N, 0.5); // Pretty high.
}
/* ************************************************************************* */
TEST(GaussianBayesNet, ordering)
{

View File

@ -130,6 +130,75 @@ TEST( GaussianConditional, equals )
EXPECT( expected.equals(actual) );
}
/* ************************************************************************* */
namespace density {
static const Key key = 77;
static constexpr double sigma = 3.0;
static const auto unitPrior =
GaussianConditional(key, Vector1::Constant(5), I_1x1),
widerPrior = GaussianConditional(
key, Vector1::Constant(5), I_1x1,
noiseModel::Isotropic::Sigma(1, sigma));
} // namespace density
/* ************************************************************************* */
// Check that the evaluate function matches direct calculation with R.
TEST(GaussianConditional, Evaluate1) {
// Let's evaluate at the mean
const VectorValues mean = density::unitPrior.solve(VectorValues());
// We get the Hessian matrix, which has noise model applied!
const Matrix invSigma = density::unitPrior.information();
// A Gaussian density ~ exp (-0.5*(Rx-d)'*(Rx-d))
// which at the mean is 1.0! So, the only thing we need to calculate is
// the normalization constant 1.0/sqrt((2*pi*Sigma).det()).
// The covariance matrix inv(Sigma) = R'*R, so the determinant is
const double expected = sqrt((invSigma / (2 * M_PI)).determinant());
const double actual = density::unitPrior.evaluate(mean);
EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9);
using density::key;
using density::sigma;
// Let's numerically integrate and see that we integrate to 1.0.
double integral = 0.0;
// Loop from -5*sigma to 5*sigma in 0.1*sigma steps:
for (double x = -5.0 * sigma; x <= 5.0 * sigma; x += 0.1 * sigma) {
VectorValues xValues;
xValues.insert(key, mean.at(key) + Vector1(x));
const double density = density::unitPrior.evaluate(xValues);
integral += 0.1 * sigma * density;
}
EXPECT_DOUBLES_EQUAL(1.0, integral, 1e-9);
}
/* ************************************************************************* */
// Check the evaluate with non-unit noise.
TEST(GaussianConditional, Evaluate2) {
// See comments in test above.
const VectorValues mean = density::widerPrior.solve(VectorValues());
const Matrix R = density::widerPrior.R();
const Matrix invSigma = density::widerPrior.information();
const double expected = sqrt((invSigma / (2 * M_PI)).determinant());
const double actual = density::widerPrior.evaluate(mean);
EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9);
using density::key;
using density::sigma;
// Let's numerically integrate and see that we integrate to 1.0.
double integral = 0.0;
// Loop from -5*sigma to 5*sigma in 0.1*sigma steps:
for (double x = -5.0 * sigma; x <= 5.0 * sigma; x += 0.1 * sigma) {
VectorValues xValues;
xValues.insert(key, mean.at(key) + Vector1(x));
const double density = density::widerPrior.evaluate(xValues);
integral += 0.1 * sigma * density;
}
EXPECT_DOUBLES_EQUAL(1.0, integral, 1e-5);
}
/* ************************************************************************* */
TEST( GaussianConditional, solve )
{