Added logDensity and evaluate to Gaussian conditional and Bayes net
parent
8d4dc3d880
commit
1d3a7d4753
|
|
@ -224,5 +224,19 @@ namespace gtsam {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
double GaussianBayesNet::logDensity(const VectorValues& x) const {
|
||||||
|
double sum = 0.0;
|
||||||
|
for (const auto& conditional : *this) {
|
||||||
|
if (conditional) sum += conditional->logDensity(x);
|
||||||
|
}
|
||||||
|
return sum;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
double GaussianBayesNet::evaluate(const VectorValues& x) const {
|
||||||
|
return exp(logDensity(x));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
|
||||||
|
|
@ -88,6 +88,20 @@ namespace gtsam {
|
||||||
/// @name Standard Interface
|
/// @name Standard Interface
|
||||||
/// @{
|
/// @{
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate log-density for given values `x`:
|
||||||
|
* -0.5*(error + n*log(2*pi) + log det(Sigma))
|
||||||
|
* where x is the vector of values, and Sigma is the covariance matrix.
|
||||||
|
*/
|
||||||
|
double logDensity(const VectorValues& x) const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate probability density for given values `x`:
|
||||||
|
* exp(-0.5*error(x)) / sqrt((2*pi)^n*det(Sigma))
|
||||||
|
* where x is the vector of values, and Sigma is the covariance matrix.
|
||||||
|
*/
|
||||||
|
double evaluate(const VectorValues& x) const;
|
||||||
|
|
||||||
/// Solve the GaussianBayesNet, i.e. return \f$ x = R^{-1}*d \f$, by
|
/// Solve the GaussianBayesNet, i.e. return \f$ x = R^{-1}*d \f$, by
|
||||||
/// back-substitution
|
/// back-substitution
|
||||||
VectorValues optimize() const;
|
VectorValues optimize() const;
|
||||||
|
|
|
||||||
|
|
@ -169,6 +169,20 @@ double GaussianConditional::logDeterminant() const {
|
||||||
return logDet;
|
return logDet;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
double GaussianConditional::logDensity(const VectorValues& x) const {
|
||||||
|
constexpr double log2pi = 1.8378770664093454835606594728112;
|
||||||
|
size_t n = d().size();
|
||||||
|
// log det(Sigma)) = - 2 * logDeterminant()
|
||||||
|
double sum = error(x) + n * log2pi - 2 * logDeterminant();
|
||||||
|
return -0.5 * sum;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
double GaussianConditional::evaluate(const VectorValues& x) const {
|
||||||
|
return exp(logDensity(x));
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
VectorValues GaussianConditional::solve(const VectorValues& x) const {
|
VectorValues GaussianConditional::solve(const VectorValues& x) const {
|
||||||
// Concatenate all vector values that correspond to parent variables
|
// Concatenate all vector values that correspond to parent variables
|
||||||
|
|
|
||||||
|
|
@ -121,6 +121,20 @@ namespace gtsam {
|
||||||
/// @name Standard Interface
|
/// @name Standard Interface
|
||||||
/// @{
|
/// @{
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate log-density for given values `x`:
|
||||||
|
* -0.5*(error + n*log(2*pi) + log det(Sigma))
|
||||||
|
* where x is the vector of values, and Sigma is the covariance matrix.
|
||||||
|
*/
|
||||||
|
double logDensity(const VectorValues& x) const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate probability density for given values `x`:
|
||||||
|
* exp(-0.5*error(x)) / sqrt((2*pi)^n*det(Sigma))
|
||||||
|
* where x is the vector of values, and Sigma is the covariance matrix.
|
||||||
|
*/
|
||||||
|
double evaluate(const VectorValues& x) const;
|
||||||
|
|
||||||
/** Return a view of the upper-triangular R block of the conditional */
|
/** Return a view of the upper-triangular R block of the conditional */
|
||||||
constABlock R() const { return Ab_.range(0, nrFrontals()); }
|
constABlock R() const { return Ab_.range(0, nrFrontals()); }
|
||||||
|
|
||||||
|
|
@ -134,9 +148,7 @@ namespace gtsam {
|
||||||
const constBVector d() const { return BaseFactor::getb(); }
|
const constBVector d() const { return BaseFactor::getb(); }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Compute the log determinant of the Gaussian conditional.
|
* @brief Compute the log determinant of the R matrix.
|
||||||
* The determinant is computed using the R matrix, which is upper
|
|
||||||
* triangular.
|
|
||||||
* For numerical stability, the determinant is computed in log
|
* For numerical stability, the determinant is computed in log
|
||||||
* form, so it is a summation rather than a multiplication.
|
* form, so it is a summation rather than a multiplication.
|
||||||
*
|
*
|
||||||
|
|
@ -145,8 +157,7 @@ namespace gtsam {
|
||||||
double logDeterminant() const;
|
double logDeterminant() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Compute the determinant of the conditional from the
|
* @brief Compute the determinant of the R matrix.
|
||||||
* upper-triangular R matrix.
|
|
||||||
*
|
*
|
||||||
* The determinant is computed in log form (hence summation) for numerical
|
* The determinant is computed in log form (hence summation) for numerical
|
||||||
* stability and then exponentiated.
|
* stability and then exponentiated.
|
||||||
|
|
|
||||||
|
|
@ -68,39 +68,6 @@ TEST( GaussianBayesNet, Matrix )
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
/**
|
|
||||||
* Calculate log-density for given values `x`:
|
|
||||||
* -0.5*(error + n*log(2*pi) + log det(Sigma))
|
|
||||||
* where x is the vector of values, and Sigma is the covariance matrix.
|
|
||||||
*/
|
|
||||||
double logDensity(const GaussianConditional::shared_ptr& gc,
|
|
||||||
const VectorValues& x) {
|
|
||||||
constexpr double log2pi = 1.8378770664093454835606594728112;
|
|
||||||
size_t n = gc->d().size();
|
|
||||||
// log det(Sigma)) = - 2 * gc->logDeterminant()
|
|
||||||
double sum = gc->error(x) + n * log2pi - 2 * gc->logDeterminant();
|
|
||||||
return -0.5 * sum;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Calculate probability density for given values `x`:
|
|
||||||
* exp(-0.5*error(x)) / sqrt((2*pi)^n*det(Sigma))
|
|
||||||
* where x is the vector of values, and Sigma is the covariance matrix.
|
|
||||||
*/
|
|
||||||
double evaluate(const GaussianConditional::shared_ptr& gc,
|
|
||||||
const VectorValues& x) {
|
|
||||||
return exp(logDensity(gc, x));
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Calculate probability for given values `x` */
|
|
||||||
double evaluate(const GaussianBayesNet& gbn, const VectorValues& x) {
|
|
||||||
double density = 1.0;
|
|
||||||
for (const auto& conditional : gbn) {
|
|
||||||
if (conditional) density *= evaluate(conditional, x);
|
|
||||||
}
|
|
||||||
return density;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check that the evaluate function matches direct calculation with R.
|
// Check that the evaluate function matches direct calculation with R.
|
||||||
TEST(GaussianBayesNet, Evaluate1) {
|
TEST(GaussianBayesNet, Evaluate1) {
|
||||||
// Let's evaluate at the mean
|
// Let's evaluate at the mean
|
||||||
|
|
@ -115,7 +82,7 @@ TEST(GaussianBayesNet, Evaluate1) {
|
||||||
// the normalization constant 1.0/sqrt((2*pi*Sigma).det()).
|
// the normalization constant 1.0/sqrt((2*pi*Sigma).det()).
|
||||||
// The covariance matrix inv(Sigma) = R'*R, so the determinant is
|
// The covariance matrix inv(Sigma) = R'*R, so the determinant is
|
||||||
const double expected = sqrt((invSigma / (2 * M_PI)).determinant());
|
const double expected = sqrt((invSigma / (2 * M_PI)).determinant());
|
||||||
const double actual = evaluate(smallBayesNet, mean);
|
const double actual = smallBayesNet.evaluate(mean);
|
||||||
EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9);
|
EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -126,7 +93,7 @@ TEST(GaussianBayesNet, Evaluate2) {
|
||||||
const Matrix R = noisyBayesNet.matrix().first;
|
const Matrix R = noisyBayesNet.matrix().first;
|
||||||
const Matrix invSigma = R.transpose() * R;
|
const Matrix invSigma = R.transpose() * R;
|
||||||
const double expected = sqrt((invSigma / (2 * M_PI)).determinant());
|
const double expected = sqrt((invSigma / (2 * M_PI)).determinant());
|
||||||
const double actual = evaluate(noisyBayesNet, mean);
|
const double actual = noisyBayesNet.evaluate(mean);
|
||||||
EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9);
|
EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -130,6 +130,44 @@ TEST( GaussianConditional, equals )
|
||||||
EXPECT( expected.equals(actual) );
|
EXPECT( expected.equals(actual) );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
namespace density {
|
||||||
|
static const Key key = 77;
|
||||||
|
static const auto unitPrior =
|
||||||
|
GaussianConditional(key, Vector1::Constant(5), I_1x1),
|
||||||
|
widerPrior =
|
||||||
|
GaussianConditional(key, Vector1::Constant(5), I_1x1,
|
||||||
|
noiseModel::Isotropic::Sigma(1, 3.0));
|
||||||
|
} // namespace density
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
// Check that the evaluate function matches direct calculation with R.
|
||||||
|
TEST(GaussianConditional, Evaluate1) {
|
||||||
|
// Let's evaluate at the mean
|
||||||
|
const VectorValues mean = density::unitPrior.solve(VectorValues());
|
||||||
|
|
||||||
|
// We get the Hessian matrix, which has noise model applied!
|
||||||
|
const Matrix invSigma = density::unitPrior.information();
|
||||||
|
|
||||||
|
// A Gaussian density ~ exp (-0.5*(Rx-d)'*(Rx-d))
|
||||||
|
// which at the mean is 1.0! So, the only thing we need to calculate is
|
||||||
|
// the normalization constant 1.0/sqrt((2*pi*Sigma).det()).
|
||||||
|
// The covariance matrix inv(Sigma) = R'*R, so the determinant is
|
||||||
|
const double expected = sqrt((invSigma / (2 * M_PI)).determinant());
|
||||||
|
const double actual = density::unitPrior.evaluate(mean);
|
||||||
|
EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check the evaluate with non-unit noise.
|
||||||
|
TEST(GaussianConditional, Evaluate2) {
|
||||||
|
// See comments in test above.
|
||||||
|
const VectorValues mean = density::widerPrior.solve(VectorValues());
|
||||||
|
const Matrix R = density::widerPrior.R();
|
||||||
|
const Matrix invSigma = density::widerPrior.information();
|
||||||
|
const double expected = sqrt((invSigma / (2 * M_PI)).determinant());
|
||||||
|
const double actual = density::widerPrior.evaluate(mean);
|
||||||
|
EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9);
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST( GaussianConditional, solve )
|
TEST( GaussianConditional, solve )
|
||||||
{
|
{
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue