kill logNormalizationConstant in favor of negLogConstant
parent
e09344c6ba
commit
e95b8be014
|
@ -113,7 +113,6 @@ virtual class DiscreteConditional : gtsam::DecisionTreeFactor {
|
||||||
|
|
||||||
// Standard interface
|
// Standard interface
|
||||||
double negLogConstant() const;
|
double negLogConstant() const;
|
||||||
double logNormalizationConstant() const;
|
|
||||||
double logProbability(const gtsam::DiscreteValues& values) const;
|
double logProbability(const gtsam::DiscreteValues& values) const;
|
||||||
double evaluate(const gtsam::DiscreteValues& values) const;
|
double evaluate(const gtsam::DiscreteValues& values) const;
|
||||||
double error(const gtsam::DiscreteValues& values) const;
|
double error(const gtsam::DiscreteValues& values) const;
|
||||||
|
|
|
@ -155,7 +155,7 @@ void HybridGaussianConditional::print(const std::string &s,
|
||||||
std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), ";
|
std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), ";
|
||||||
}
|
}
|
||||||
std::cout << std::endl
|
std::cout << std::endl
|
||||||
<< " logNormalizationConstant: " << logNormalizationConstant()
|
<< " logNormalizationConstant: " << -negLogConstant()
|
||||||
<< std::endl
|
<< std::endl
|
||||||
<< std::endl;
|
<< std::endl;
|
||||||
conditionals_.print(
|
conditionals_.print(
|
||||||
|
|
|
@ -62,7 +62,6 @@ virtual class HybridConditional {
|
||||||
|
|
||||||
// Standard interface:
|
// Standard interface:
|
||||||
double negLogConstant() const;
|
double negLogConstant() const;
|
||||||
double logNormalizationConstant() const;
|
|
||||||
double logProbability(const gtsam::HybridValues& values) const;
|
double logProbability(const gtsam::HybridValues& values) const;
|
||||||
double evaluate(const gtsam::HybridValues& values) const;
|
double evaluate(const gtsam::HybridValues& values) const;
|
||||||
double operator()(const gtsam::HybridValues& values) const;
|
double operator()(const gtsam::HybridValues& values) const;
|
||||||
|
|
|
@ -61,11 +61,11 @@ const HybridGaussianConditional hybrid_conditional({Z(0)}, {X(0)}, mode,
|
||||||
TEST(HybridGaussianConditional, Invariants) {
|
TEST(HybridGaussianConditional, Invariants) {
|
||||||
using namespace equal_constants;
|
using namespace equal_constants;
|
||||||
|
|
||||||
// Check that the conditional normalization constant is the max of all
|
// Check that the conditional (negative log) normalization constant is the min
|
||||||
// constants which are all equal, in this case, hence:
|
// of all constants which are all equal, in this case, hence:
|
||||||
const double K = hybrid_conditional.logNormalizationConstant();
|
const double K = hybrid_conditional.negLogConstant();
|
||||||
EXPECT_DOUBLES_EQUAL(K, conditionals[0]->logNormalizationConstant(), 1e-8);
|
EXPECT_DOUBLES_EQUAL(K, conditionals[0]->negLogConstant(), 1e-8);
|
||||||
EXPECT_DOUBLES_EQUAL(K, conditionals[1]->logNormalizationConstant(), 1e-8);
|
EXPECT_DOUBLES_EQUAL(K, conditionals[1]->negLogConstant(), 1e-8);
|
||||||
|
|
||||||
EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv0));
|
EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv0));
|
||||||
EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv1));
|
EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv1));
|
||||||
|
@ -231,8 +231,8 @@ TEST(HybridGaussianConditional, Likelihood2) {
|
||||||
CHECK(jf1->rows() == 2);
|
CHECK(jf1->rows() == 2);
|
||||||
|
|
||||||
// Check that the constant C1 is properly encoded in the JacobianFactor.
|
// Check that the constant C1 is properly encoded in the JacobianFactor.
|
||||||
const double C1 = hybrid_conditional.logNormalizationConstant() -
|
const double C1 =
|
||||||
conditionals[1]->logNormalizationConstant();
|
conditionals[1]->negLogConstant() - hybrid_conditional.negLogConstant();
|
||||||
const double c1 = std::sqrt(2.0 * C1);
|
const double c1 = std::sqrt(2.0 * C1);
|
||||||
Vector expected_unwhitened(2);
|
Vector expected_unwhitened(2);
|
||||||
expected_unwhitened << 4.9 - 5.0, -c1;
|
expected_unwhitened << 4.9 - 5.0, -c1;
|
||||||
|
|
|
@ -59,23 +59,14 @@ double Conditional<FACTOR, DERIVEDCONDITIONAL>::evaluate(
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
template <class FACTOR, class DERIVEDCONDITIONAL>
|
template <class FACTOR, class DERIVEDCONDITIONAL>
|
||||||
double Conditional<FACTOR, DERIVEDCONDITIONAL>::negLogConstant()
|
double Conditional<FACTOR, DERIVEDCONDITIONAL>::negLogConstant() const {
|
||||||
const {
|
throw std::runtime_error("Conditional::negLogConstant is not implemented");
|
||||||
throw std::runtime_error(
|
|
||||||
"Conditional::negLogConstant is not implemented");
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
template <class FACTOR, class DERIVEDCONDITIONAL>
|
|
||||||
double Conditional<FACTOR, DERIVEDCONDITIONAL>::logNormalizationConstant()
|
|
||||||
const {
|
|
||||||
return -negLogConstant();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
template <class FACTOR, class DERIVEDCONDITIONAL>
|
template <class FACTOR, class DERIVEDCONDITIONAL>
|
||||||
double Conditional<FACTOR, DERIVEDCONDITIONAL>::normalizationConstant() const {
|
double Conditional<FACTOR, DERIVEDCONDITIONAL>::normalizationConstant() const {
|
||||||
return std::exp(logNormalizationConstant());
|
return std::exp(-negLogConstant());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
@ -90,8 +81,8 @@ bool Conditional<FACTOR, DERIVEDCONDITIONAL>::CheckInvariants(
|
||||||
const double logProb = conditional.logProbability(values);
|
const double logProb = conditional.logProbability(values);
|
||||||
if (std::abs(prob_or_density - std::exp(logProb)) > 1e-9)
|
if (std::abs(prob_or_density - std::exp(logProb)) > 1e-9)
|
||||||
return false; // logProb is not consistent with prob_or_density
|
return false; // logProb is not consistent with prob_or_density
|
||||||
if (std::abs(conditional.logNormalizationConstant() -
|
if (std::abs(conditional.negLogConstant() -
|
||||||
std::log(conditional.normalizationConstant())) > 1e-9)
|
(-std::log(conditional.normalizationConstant()))) > 1e-9)
|
||||||
return false; // log normalization constant is not consistent with
|
return false; // log normalization constant is not consistent with
|
||||||
// normalization constant
|
// normalization constant
|
||||||
const double error = conditional.error(values);
|
const double error = conditional.error(values);
|
||||||
|
|
|
@ -39,7 +39,8 @@ namespace gtsam {
|
||||||
* logProbability(x) = -(K + error(x))
|
* logProbability(x) = -(K + error(x))
|
||||||
* i.e., K = -log(k). The normalization constant k is assumed to *not* depend
|
* i.e., K = -log(k). The normalization constant k is assumed to *not* depend
|
||||||
* on any argument, only (possibly) on the conditional parameters.
|
* on any argument, only (possibly) on the conditional parameters.
|
||||||
* This class provides a default logNormalizationConstant() == 0.0.
|
* This class provides a default negative log normalization constant
|
||||||
|
* negLogConstant() == 0.0.
|
||||||
*
|
*
|
||||||
* There are four broad classes of conditionals that derive from Conditional:
|
* There are four broad classes of conditionals that derive from Conditional:
|
||||||
*
|
*
|
||||||
|
@ -165,19 +166,13 @@ namespace gtsam {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief All conditional types need to implement this as the negative log
|
* @brief All conditional types need to implement this as the negative log
|
||||||
* of the normalization constant.
|
* of the normalization constant to make it such that error>=0.
|
||||||
*
|
*
|
||||||
* @return double
|
* @return double
|
||||||
*/
|
*/
|
||||||
virtual double negLogConstant() const;
|
virtual double negLogConstant() const;
|
||||||
|
|
||||||
/**
|
/** Non-virtual, negate and exponentiate negLogConstant. */
|
||||||
* All conditional types need to implement a log normalization constant to
|
|
||||||
* make it such that error>=0.
|
|
||||||
*/
|
|
||||||
virtual double logNormalizationConstant() const;
|
|
||||||
|
|
||||||
/** Non-virtual, exponentiate logNormalizationConstant. */
|
|
||||||
double normalizationConstant() const;
|
double normalizationConstant() const;
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
|
@ -217,9 +212,9 @@ namespace gtsam {
|
||||||
* - evaluate >= 0.0
|
* - evaluate >= 0.0
|
||||||
* - evaluate(x) == conditional(x)
|
* - evaluate(x) == conditional(x)
|
||||||
* - exp(logProbability(x)) == evaluate(x)
|
* - exp(logProbability(x)) == evaluate(x)
|
||||||
* - logNormalizationConstant() = log(normalizationConstant())
|
* - negLogConstant() = -log(normalizationConstant())
|
||||||
* - error >= 0.0
|
* - error >= 0.0
|
||||||
* - logProbability(x) == logNormalizationConstant() - error(x)
|
* - logProbability(x) == -(negLogConstant() + error(x))
|
||||||
*
|
*
|
||||||
* @param conditional The conditional to test, as a reference to the derived type.
|
* @param conditional The conditional to test, as a reference to the derived type.
|
||||||
* @tparam VALUES HybridValues, or a more narrow type like DiscreteValues.
|
* @tparam VALUES HybridValues, or a more narrow type like DiscreteValues.
|
||||||
|
|
|
@ -243,25 +243,24 @@ namespace gtsam {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
double GaussianBayesNet::logNormalizationConstant() const {
|
double GaussianBayesNet::negLogConstant() const {
|
||||||
/*
|
/*
|
||||||
normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma))
|
normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma))
|
||||||
logConstant = -log(normalizationConstant)
|
negLogConstant = -log(normalizationConstant)
|
||||||
= -0.5 * n*log(2*pi) - 0.5 * log(det(Sigma))
|
= 0.5 * n*log(2*pi) + 0.5 * log(det(Sigma))
|
||||||
|
|
||||||
log(det(Sigma)) = -2.0 * logDeterminant()
|
log(det(Sigma)) = -2.0 * logDeterminant()
|
||||||
thus, logConstant = -0.5*n*log(2*pi) + logDeterminant()
|
thus, negLogConstant = 0.5*n*log(2*pi) - logDeterminant()
|
||||||
|
|
||||||
BayesNet logConstant = sum(-0.5*n_i*log(2*pi) + logDeterminant_i())
|
BayesNet negLogConstant = sum(0.5*n_i*log(2*pi) - logDeterminant_i())
|
||||||
= sum(-0.5*n_i*log(2*pi)) + sum(logDeterminant_i())
|
= sum(0.5*n_i*log(2*pi)) + sum(logDeterminant_i())
|
||||||
= sum(-0.5*n_i*log(2*pi)) + bn->logDeterminant()
|
= sum(0.5*n_i*log(2*pi)) + bn->logDeterminant()
|
||||||
= sum(logNormalizationConstant_i)
|
|
||||||
*/
|
*/
|
||||||
double logNormConst = 0.0;
|
double negLogNormConst = 0.0;
|
||||||
for (const sharedConditional& cg : *this) {
|
for (const sharedConditional& cg : *this) {
|
||||||
logNormConst += cg->logNormalizationConstant();
|
negLogNormConst += cg->negLogConstant();
|
||||||
}
|
}
|
||||||
return logNormConst;
|
return negLogNormConst;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
|
@ -235,12 +235,12 @@ namespace gtsam {
|
||||||
double logDeterminant() const;
|
double logDeterminant() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Get the log of the normalization constant corresponding to the
|
* @brief Get the negative log of the normalization constant corresponding
|
||||||
* joint Gaussian density represented by this Bayes net.
|
* to the joint Gaussian density represented by this Bayes net.
|
||||||
*
|
*
|
||||||
* @return double
|
* @return double
|
||||||
*/
|
*/
|
||||||
double logNormalizationConstant() const;
|
double negLogConstant() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Backsubstitute with a different RHS vector than the one stored in this BayesNet.
|
* Backsubstitute with a different RHS vector than the one stored in this BayesNet.
|
||||||
|
|
|
@ -121,7 +121,7 @@ namespace gtsam {
|
||||||
const auto mean = solve({}); // solve for mean.
|
const auto mean = solve({}); // solve for mean.
|
||||||
mean.print(" mean", formatter);
|
mean.print(" mean", formatter);
|
||||||
}
|
}
|
||||||
cout << " logNormalizationConstant: " << logNormalizationConstant() << endl;
|
cout << " logNormalizationConstant: " << -negLogConstant() << endl;
|
||||||
if (model_)
|
if (model_)
|
||||||
model_->print(" Noise model: ");
|
model_->print(" Noise model: ");
|
||||||
else
|
else
|
||||||
|
@ -198,7 +198,7 @@ namespace gtsam {
|
||||||
// density = k exp(-error(x))
|
// density = k exp(-error(x))
|
||||||
// log = log(k) - error(x)
|
// log = log(k) - error(x)
|
||||||
double GaussianConditional::logProbability(const VectorValues& x) const {
|
double GaussianConditional::logProbability(const VectorValues& x) const {
|
||||||
return logNormalizationConstant() - error(x);
|
return -(negLogConstant() + error(x));
|
||||||
}
|
}
|
||||||
|
|
||||||
double GaussianConditional::logProbability(const HybridValues& x) const {
|
double GaussianConditional::logProbability(const HybridValues& x) const {
|
||||||
|
|
|
@ -266,11 +266,6 @@ double Gaussian::negLogConstant() const {
|
||||||
return 0.5 * n * log2pi - logDetR();
|
return 0.5 * n * log2pi - logDetR();
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
|
||||||
double Gaussian::logNormalizationConstant() const {
|
|
||||||
return -negLogConstant();
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
// Diagonal
|
// Diagonal
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
|
@ -273,20 +273,12 @@ namespace gtsam {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Compute the negative log of the normalization constant
|
* @brief Compute the negative log of the normalization constant
|
||||||
* for a Gaussian noise model k = \sqrt(1/|2πΣ|).
|
* for a Gaussian noise model k = 1/\sqrt(|2πΣ|).
|
||||||
*
|
*
|
||||||
* @return double
|
* @return double
|
||||||
*/
|
*/
|
||||||
double negLogConstant() const;
|
double negLogConstant() const;
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Method to compute the normalization constant
|
|
||||||
* for a Gaussian noise model k = \sqrt(1/|2πΣ|).
|
|
||||||
*
|
|
||||||
* @return double
|
|
||||||
*/
|
|
||||||
double logNormalizationConstant() const;
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
#ifdef GTSAM_ENABLE_BOOST_SERIALIZATION
|
#ifdef GTSAM_ENABLE_BOOST_SERIALIZATION
|
||||||
/** Serialization function */
|
/** Serialization function */
|
||||||
|
|
|
@ -549,7 +549,6 @@ virtual class GaussianConditional : gtsam::JacobianFactor {
|
||||||
|
|
||||||
// Standard Interface
|
// Standard Interface
|
||||||
double negLogConstant() const;
|
double negLogConstant() const;
|
||||||
double logNormalizationConstant() const;
|
|
||||||
double logProbability(const gtsam::VectorValues& x) const;
|
double logProbability(const gtsam::VectorValues& x) const;
|
||||||
double evaluate(const gtsam::VectorValues& x) const;
|
double evaluate(const gtsam::VectorValues& x) const;
|
||||||
double error(const gtsam::VectorValues& x) const;
|
double error(const gtsam::VectorValues& x) const;
|
||||||
|
|
|
@ -76,12 +76,11 @@ TEST(GaussianBayesNet, Evaluate1) {
|
||||||
// the normalization constant 1.0/sqrt((2*pi*Sigma).det()).
|
// the normalization constant 1.0/sqrt((2*pi*Sigma).det()).
|
||||||
// The covariance matrix inv(Sigma) = R'*R, so the determinant is
|
// The covariance matrix inv(Sigma) = R'*R, so the determinant is
|
||||||
const double constant = sqrt((invSigma / (2 * M_PI)).determinant());
|
const double constant = sqrt((invSigma / (2 * M_PI)).determinant());
|
||||||
EXPECT_DOUBLES_EQUAL(log(constant),
|
EXPECT_DOUBLES_EQUAL(-log(constant),
|
||||||
smallBayesNet.at(0)->logNormalizationConstant() +
|
smallBayesNet.at(0)->negLogConstant() +
|
||||||
smallBayesNet.at(1)->logNormalizationConstant(),
|
smallBayesNet.at(1)->negLogConstant(),
|
||||||
1e-9);
|
|
||||||
EXPECT_DOUBLES_EQUAL(log(constant), smallBayesNet.logNormalizationConstant(),
|
|
||||||
1e-9);
|
1e-9);
|
||||||
|
EXPECT_DOUBLES_EQUAL(-log(constant), smallBayesNet.negLogConstant(), 1e-9);
|
||||||
const double actual = smallBayesNet.evaluate(mean);
|
const double actual = smallBayesNet.evaluate(mean);
|
||||||
EXPECT_DOUBLES_EQUAL(constant, actual, 1e-9);
|
EXPECT_DOUBLES_EQUAL(constant, actual, 1e-9);
|
||||||
}
|
}
|
||||||
|
|
|
@ -486,17 +486,17 @@ TEST(GaussianConditional, Error) {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
// Similar test for multivariate gaussian but with sigma 2.0
|
// Similar test for multivariate gaussian but with sigma 2.0
|
||||||
TEST(GaussianConditional, LogNormalizationConstant) {
|
TEST(GaussianConditional, NegLogConstant) {
|
||||||
double sigma = 2.0;
|
double sigma = 2.0;
|
||||||
auto conditional = GaussianConditional::FromMeanAndStddev(X(0), Vector3::Zero(), sigma);
|
auto conditional = GaussianConditional::FromMeanAndStddev(X(0), Vector3::Zero(), sigma);
|
||||||
VectorValues x;
|
VectorValues x;
|
||||||
x.insert(X(0), Vector3::Zero());
|
x.insert(X(0), Vector3::Zero());
|
||||||
Matrix3 Sigma = I_3x3 * sigma * sigma;
|
Matrix3 Sigma = I_3x3 * sigma * sigma;
|
||||||
double expectedLogNormalizationConstant =
|
double expectedNegLogConstant =
|
||||||
log(1 / sqrt((2 * M_PI * Sigma).determinant()));
|
-log(1 / sqrt((2 * M_PI * Sigma).determinant()));
|
||||||
|
|
||||||
EXPECT_DOUBLES_EQUAL(expectedLogNormalizationConstant,
|
EXPECT_DOUBLES_EQUAL(expectedNegLogConstant, conditional.negLogConstant(),
|
||||||
conditional.logNormalizationConstant(), 1e-9);
|
1e-9);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
|
@ -807,50 +807,50 @@ TEST(NoiseModel, NonDiagonalGaussian)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(NoiseModel, LogNormalizationConstant1D) {
|
TEST(NoiseModel, NegLogNormalizationConstant1D) {
|
||||||
// Very simple 1D noise model, which we can compute by hand.
|
// Very simple 1D noise model, which we can compute by hand.
|
||||||
double sigma = 0.1;
|
double sigma = 0.1;
|
||||||
// For expected values, we compute log(1/sqrt(|2πΣ|)) by hand.
|
// For expected values, we compute -log(1/sqrt(|2πΣ|)) by hand.
|
||||||
// = -0.5*(log(2π) + log(Σ)) (since it is 1D)
|
// = 0.5*(log(2π) - log(Σ)) (since it is 1D)
|
||||||
double expected_value = -0.5 * log(2 * M_PI * sigma * sigma);
|
double expected_value = 0.5 * log(2 * M_PI * sigma * sigma);
|
||||||
|
|
||||||
// Gaussian
|
// Gaussian
|
||||||
{
|
{
|
||||||
Matrix11 R;
|
Matrix11 R;
|
||||||
R << 1 / sigma;
|
R << 1 / sigma;
|
||||||
auto noise_model = Gaussian::SqrtInformation(R);
|
auto noise_model = Gaussian::SqrtInformation(R);
|
||||||
double actual_value = noise_model->logNormalizationConstant();
|
double actual_value = noise_model->negLogConstant();
|
||||||
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
||||||
}
|
}
|
||||||
// Diagonal
|
// Diagonal
|
||||||
{
|
{
|
||||||
auto noise_model = Diagonal::Sigmas(Vector1(sigma));
|
auto noise_model = Diagonal::Sigmas(Vector1(sigma));
|
||||||
double actual_value = noise_model->logNormalizationConstant();
|
double actual_value = noise_model->negLogConstant();
|
||||||
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
||||||
}
|
}
|
||||||
// Isotropic
|
// Isotropic
|
||||||
{
|
{
|
||||||
auto noise_model = Isotropic::Sigma(1, sigma);
|
auto noise_model = Isotropic::Sigma(1, sigma);
|
||||||
double actual_value = noise_model->logNormalizationConstant();
|
double actual_value = noise_model->negLogConstant();
|
||||||
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
||||||
}
|
}
|
||||||
// Unit
|
// Unit
|
||||||
{
|
{
|
||||||
auto noise_model = Unit::Create(1);
|
auto noise_model = Unit::Create(1);
|
||||||
double actual_value = noise_model->logNormalizationConstant();
|
double actual_value = noise_model->negLogConstant();
|
||||||
double sigma = 1.0;
|
double sigma = 1.0;
|
||||||
expected_value = -0.5 * log(2 * M_PI * sigma * sigma);
|
expected_value = 0.5 * log(2 * M_PI * sigma * sigma);
|
||||||
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(NoiseModel, LogNormalizationConstant3D) {
|
TEST(NoiseModel, NegLogNormalizationConstant3D) {
|
||||||
// Simple 3D noise model, which we can compute by hand.
|
// Simple 3D noise model, which we can compute by hand.
|
||||||
double sigma = 0.1;
|
double sigma = 0.1;
|
||||||
size_t n = 3;
|
size_t n = 3;
|
||||||
// We compute the expected values just like in the LogNormalizationConstant1D
|
// We compute the expected values just like in the NegLogNormalizationConstant1D
|
||||||
// test, but we multiply by 3 due to the determinant.
|
// test, but we multiply by 3 due to the determinant.
|
||||||
double expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma);
|
double expected_value = 0.5 * n * log(2 * M_PI * sigma * sigma);
|
||||||
|
|
||||||
// Gaussian
|
// Gaussian
|
||||||
{
|
{
|
||||||
|
@ -859,27 +859,27 @@ TEST(NoiseModel, LogNormalizationConstant3D) {
|
||||||
0, 1 / sigma, 4, //
|
0, 1 / sigma, 4, //
|
||||||
0, 0, 1 / sigma;
|
0, 0, 1 / sigma;
|
||||||
auto noise_model = Gaussian::SqrtInformation(R);
|
auto noise_model = Gaussian::SqrtInformation(R);
|
||||||
double actual_value = noise_model->logNormalizationConstant();
|
double actual_value = noise_model->negLogConstant();
|
||||||
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
||||||
}
|
}
|
||||||
// Diagonal
|
// Diagonal
|
||||||
{
|
{
|
||||||
auto noise_model = Diagonal::Sigmas(Vector3(sigma, sigma, sigma));
|
auto noise_model = Diagonal::Sigmas(Vector3(sigma, sigma, sigma));
|
||||||
double actual_value = noise_model->logNormalizationConstant();
|
double actual_value = noise_model->negLogConstant();
|
||||||
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
||||||
}
|
}
|
||||||
// Isotropic
|
// Isotropic
|
||||||
{
|
{
|
||||||
auto noise_model = Isotropic::Sigma(n, sigma);
|
auto noise_model = Isotropic::Sigma(n, sigma);
|
||||||
double actual_value = noise_model->logNormalizationConstant();
|
double actual_value = noise_model->negLogConstant();
|
||||||
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
||||||
}
|
}
|
||||||
// Unit
|
// Unit
|
||||||
{
|
{
|
||||||
auto noise_model = Unit::Create(3);
|
auto noise_model = Unit::Create(3);
|
||||||
double actual_value = noise_model->logNormalizationConstant();
|
double actual_value = noise_model->negLogConstant();
|
||||||
double sigma = 1.0;
|
double sigma = 1.0;
|
||||||
expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma);
|
expected_value = 0.5 * n * log(2 * M_PI * sigma * sigma);
|
||||||
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue