Merge pull request #1839 from borglab/improved-api-3

release/4.3a0
Varun Agrawal 2024-09-23 17:19:17 -04:00 committed by GitHub
commit fd7df61d45
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
27 changed files with 175 additions and 173 deletions

View File

@ -465,6 +465,12 @@ string DiscreteConditional::html(const KeyFormatter& keyFormatter,
double DiscreteConditional::evaluate(const HybridValues& x) const { double DiscreteConditional::evaluate(const HybridValues& x) const {
return this->evaluate(x.discrete()); return this->evaluate(x.discrete());
} }
/* ************************************************************************* */
double DiscreteConditional::negLogConstant() const {
return 0.0;
}
/* ************************************************************************* */ /* ************************************************************************* */
} // namespace gtsam } // namespace gtsam

View File

@ -264,11 +264,12 @@ class GTSAM_EXPORT DiscreteConditional
} }
/** /**
* logNormalizationConstant K is just zero, such that * negLogConstant is just zero, such that
* logProbability(x) = log(evaluate(x)) = - error(x) * -logProbability(x) = -log(evaluate(x)) = error(x)
* and hence error(x) = - log(evaluate(x)) > 0 for all x. * and hence error(x) > 0 for all x.
* Thus -log(K) for the normalization constant k is 0.
*/ */
double logNormalizationConstant() const override { return 0.0; } double negLogConstant() const override;
/// @} /// @}

View File

@ -112,7 +112,7 @@ virtual class DiscreteConditional : gtsam::DecisionTreeFactor {
const std::vector<double>& table); const std::vector<double>& table);
// Standard interface // Standard interface
double logNormalizationConstant() const; double negLogConstant() const;
double logProbability(const gtsam::DiscreteValues& values) const; double logProbability(const gtsam::DiscreteValues& values) const;
double evaluate(const gtsam::DiscreteValues& values) const; double evaluate(const gtsam::DiscreteValues& values) const;
double error(const gtsam::DiscreteValues& values) const; double error(const gtsam::DiscreteValues& values) const;

View File

@ -161,18 +161,18 @@ double HybridConditional::logProbability(const HybridValues &values) const {
} }
/* ************************************************************************ */ /* ************************************************************************ */
double HybridConditional::logNormalizationConstant() const { double HybridConditional::negLogConstant() const {
if (auto gc = asGaussian()) { if (auto gc = asGaussian()) {
return gc->logNormalizationConstant(); return gc->negLogConstant();
} }
if (auto gm = asHybrid()) { if (auto gm = asHybrid()) {
return gm->logNormalizationConstant(); // 0.0! return gm->negLogConstant(); // 0.0!
} }
if (auto dc = asDiscrete()) { if (auto dc = asDiscrete()) {
return dc->logNormalizationConstant(); // 0.0! return dc->negLogConstant(); // 0.0!
} }
throw std::runtime_error( throw std::runtime_error(
"HybridConditional::logProbability: conditional type not handled"); "HybridConditional::negLogConstant: conditional type not handled");
} }
/* ************************************************************************ */ /* ************************************************************************ */

View File

@ -193,11 +193,12 @@ class GTSAM_EXPORT HybridConditional
double logProbability(const HybridValues& values) const override; double logProbability(const HybridValues& values) const override;
/** /**
* Return the log normalization constant. * Return the negative log of the normalization constant.
* This shows up in the error as -(error(x) + negLogConstant)
* Note this is 0.0 for discrete and hybrid conditionals, but depends * Note this is 0.0 for discrete and hybrid conditionals, but depends
* on the continuous parameters for Gaussian conditionals. * on the continuous parameters for Gaussian conditionals.
*/ */
double logNormalizationConstant() const override; double negLogConstant() const override;
/// Return the probability (or density) of the underlying conditional. /// Return the probability (or density) of the underlying conditional.
double evaluate(const HybridValues& values) const override; double evaluate(const HybridValues& values) const override;

View File

@ -36,7 +36,7 @@ HybridGaussianFactor::FactorValuePairs GetFactorValuePairs(
// Check if conditional is pruned // Check if conditional is pruned
if (conditional) { if (conditional) {
// Assign log(\sqrt(|2πΣ|)) = -log(1 / sqrt(|2πΣ|)) // Assign log(\sqrt(|2πΣ|)) = -log(1 / sqrt(|2πΣ|))
value = -conditional->logNormalizationConstant(); value = conditional->negLogConstant();
} }
return {std::dynamic_pointer_cast<GaussianFactor>(conditional), value}; return {std::dynamic_pointer_cast<GaussianFactor>(conditional), value};
}; };
@ -51,14 +51,14 @@ HybridGaussianConditional::HybridGaussianConditional(
discreteParents, GetFactorValuePairs(conditionals)), discreteParents, GetFactorValuePairs(conditionals)),
BaseConditional(continuousFrontals.size()), BaseConditional(continuousFrontals.size()),
conditionals_(conditionals) { conditionals_(conditionals) {
// Calculate logConstant_ as the minimum of the log normalizers of the // Calculate negLogConstant_ as the minimum of the negative-log normalizers of
// conditionals, by visiting the decision tree: // the conditionals, by visiting the decision tree:
logConstant_ = std::numeric_limits<double>::infinity(); negLogConstant_ = std::numeric_limits<double>::infinity();
conditionals_.visit( conditionals_.visit(
[this](const GaussianConditional::shared_ptr &conditional) { [this](const GaussianConditional::shared_ptr &conditional) {
if (conditional) { if (conditional) {
this->logConstant_ = std::min( this->negLogConstant_ =
this->logConstant_, -conditional->logNormalizationConstant()); std::min(this->negLogConstant_, conditional->negLogConstant());
} }
}); });
} }
@ -84,8 +84,7 @@ GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree()
auto wrap = [this](const GaussianConditional::shared_ptr &gc) { auto wrap = [this](const GaussianConditional::shared_ptr &gc) {
// First check if conditional has not been pruned // First check if conditional has not been pruned
if (gc) { if (gc) {
const double Cgm_Kgcm = const double Cgm_Kgcm = gc->negLogConstant() - this->negLogConstant_;
-this->logConstant_ - gc->logNormalizationConstant();
// If there is a difference in the covariances, we need to account for // If there is a difference in the covariances, we need to account for
// that since the error is dependent on the mode. // that since the error is dependent on the mode.
if (Cgm_Kgcm > 0.0) { if (Cgm_Kgcm > 0.0) {
@ -156,8 +155,7 @@ void HybridGaussianConditional::print(const std::string &s,
std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), ";
} }
std::cout << std::endl std::cout << std::endl
<< " logNormalizationConstant: " << logNormalizationConstant() << " logNormalizationConstant: " << -negLogConstant() << std::endl
<< std::endl
<< std::endl; << std::endl;
conditionals_.print( conditionals_.print(
"", [&](Key k) { return formatter(k); }, "", [&](Key k) { return formatter(k); },
@ -215,8 +213,7 @@ std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood(
[&](const GaussianConditional::shared_ptr &conditional) [&](const GaussianConditional::shared_ptr &conditional)
-> GaussianFactorValuePair { -> GaussianFactorValuePair {
const auto likelihood_m = conditional->likelihood(given); const auto likelihood_m = conditional->likelihood(given);
const double Cgm_Kgcm = const double Cgm_Kgcm = conditional->negLogConstant() - negLogConstant_;
-logConstant_ - conditional->logNormalizationConstant();
if (Cgm_Kgcm == 0.0) { if (Cgm_Kgcm == 0.0) {
return {likelihood_m, 0.0}; return {likelihood_m, 0.0};
} else { } else {

View File

@ -66,7 +66,7 @@ class GTSAM_EXPORT HybridGaussianConditional
Conditionals conditionals_; ///< a decision tree of Gaussian conditionals. Conditionals conditionals_; ///< a decision tree of Gaussian conditionals.
///< Negative-log of the normalization constant (log(\sqrt(|2πΣ|))). ///< Negative-log of the normalization constant (log(\sqrt(|2πΣ|))).
///< Take advantage of the neg-log space so everything is a minimization ///< Take advantage of the neg-log space so everything is a minimization
double logConstant_; double negLogConstant_;
/** /**
* @brief Convert a HybridGaussianConditional of conditionals into * @brief Convert a HybridGaussianConditional of conditionals into
@ -150,9 +150,15 @@ class GTSAM_EXPORT HybridGaussianConditional
/// Returns the continuous keys among the parents. /// Returns the continuous keys among the parents.
KeyVector continuousParents() const; KeyVector continuousParents() const;
/// The log normalization constant is max of the the individual /**
/// log-normalization constants. * @brief Return log normalization constant in negative log space.
double logNormalizationConstant() const override { return -logConstant_; } *
* The log normalization constant is the min of the individual
* log-normalization constants.
*
* @return double
*/
inline double negLogConstant() const override { return negLogConstant_; }
/** /**
* Create a likelihood factor for a hybrid Gaussian conditional, * Create a likelihood factor for a hybrid Gaussian conditional,

View File

@ -233,18 +233,18 @@ continuousElimination(const HybridGaussianFactorGraph &factors,
/* ************************************************************************ */ /* ************************************************************************ */
/** /**
* @brief Exponentiate log-values, not necessarily normalized, normalize, and * @brief Exponentiate (not necessarily normalized) negative log-values,
* return as AlgebraicDecisionTree<Key>. * normalize, and then return as AlgebraicDecisionTree<Key>.
* *
* @param logValues DecisionTree of (unnormalized) log values. * @param logValues DecisionTree of (unnormalized) log values.
* @return AlgebraicDecisionTree<Key> * @return AlgebraicDecisionTree<Key>
*/ */
static AlgebraicDecisionTree<Key> probabilitiesFromLogValues( static AlgebraicDecisionTree<Key> probabilitiesFromNegativeLogValues(
const AlgebraicDecisionTree<Key> &logValues) { const AlgebraicDecisionTree<Key> &logValues) {
// Perform normalization // Perform normalization
double max_log = logValues.max(); double min_log = logValues.min();
AlgebraicDecisionTree<Key> probabilities = DecisionTree<Key, double>( AlgebraicDecisionTree<Key> probabilities = DecisionTree<Key, double>(
logValues, [&max_log](const double x) { return exp(x - max_log); }); logValues, [&min_log](const double x) { return exp(-(x - min_log)); });
probabilities = probabilities.normalize(probabilities.sum()); probabilities = probabilities.normalize(probabilities.sum());
return probabilities; return probabilities;
@ -265,13 +265,13 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
auto logProbability = auto logProbability =
[&](const GaussianFactor::shared_ptr &factor) -> double { [&](const GaussianFactor::shared_ptr &factor) -> double {
if (!factor) return 0.0; if (!factor) return 0.0;
return -factor->error(VectorValues()); return factor->error(VectorValues());
}; };
AlgebraicDecisionTree<Key> logProbabilities = AlgebraicDecisionTree<Key> logProbabilities =
DecisionTree<Key, double>(gmf->factors(), logProbability); DecisionTree<Key, double>(gmf->factors(), logProbability);
AlgebraicDecisionTree<Key> probabilities = AlgebraicDecisionTree<Key> probabilities =
probabilitiesFromLogValues(logProbabilities); probabilitiesFromNegativeLogValues(logProbabilities);
dfg.emplace_shared<DecisionTreeFactor>(gmf->discreteKeys(), dfg.emplace_shared<DecisionTreeFactor>(gmf->discreteKeys(),
probabilities); probabilities);
@ -321,23 +321,23 @@ using Result = std::pair<std::shared_ptr<GaussianConditional>,
static std::shared_ptr<Factor> createDiscreteFactor( static std::shared_ptr<Factor> createDiscreteFactor(
const DecisionTree<Key, Result> &eliminationResults, const DecisionTree<Key, Result> &eliminationResults,
const DiscreteKeys &discreteSeparator) { const DiscreteKeys &discreteSeparator) {
auto logProbability = [&](const Result &pair) -> double { auto negLogProbability = [&](const Result &pair) -> double {
const auto &[conditional, factor] = pair; const auto &[conditional, factor] = pair;
static const VectorValues kEmpty; static const VectorValues kEmpty;
// If the factor is not null, it has no keys, just contains the residual. // If the factor is not null, it has no keys, just contains the residual.
if (!factor) return 1.0; // TODO(dellaert): not loving this. if (!factor) return 1.0; // TODO(dellaert): not loving this.
// Logspace version of: // Negative logspace version of:
// exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); // exp(-factor->error(kEmpty)) / conditional->normalizationConstant();
// We take negative of the logNormalizationConstant `log(k)` // negLogConstant gives `-log(k)`
// to get `log(1/k) = log(\sqrt{|2πΣ|})`. // which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`.
return -factor->error(kEmpty) - conditional->logNormalizationConstant(); return factor->error(kEmpty) - conditional->negLogConstant();
}; };
AlgebraicDecisionTree<Key> logProbabilities( AlgebraicDecisionTree<Key> negLogProbabilities(
DecisionTree<Key, double>(eliminationResults, logProbability)); DecisionTree<Key, double>(eliminationResults, negLogProbability));
AlgebraicDecisionTree<Key> probabilities = AlgebraicDecisionTree<Key> probabilities =
probabilitiesFromLogValues(logProbabilities); probabilitiesFromNegativeLogValues(negLogProbabilities);
return std::make_shared<DecisionTreeFactor>(discreteSeparator, probabilities); return std::make_shared<DecisionTreeFactor>(discreteSeparator, probabilities);
} }
@ -355,8 +355,9 @@ static std::shared_ptr<Factor> createHybridGaussianFactor(
auto hf = std::dynamic_pointer_cast<HessianFactor>(factor); auto hf = std::dynamic_pointer_cast<HessianFactor>(factor);
if (!hf) throw std::runtime_error("Expected HessianFactor!"); if (!hf) throw std::runtime_error("Expected HessianFactor!");
// Add 2.0 term since the constant term will be premultiplied by 0.5 // Add 2.0 term since the constant term will be premultiplied by 0.5
// as per the Hessian definition // as per the Hessian definition,
hf->constantTerm() += 2.0 * conditional->logNormalizationConstant(); // and negative since we want log(k)
hf->constantTerm() += -2.0 * conditional->negLogConstant();
} }
return {factor, 0.0}; return {factor, 0.0};
}; };

View File

@ -61,7 +61,7 @@ virtual class HybridConditional {
size_t nrParents() const; size_t nrParents() const;
// Standard interface: // Standard interface:
double logNormalizationConstant() const; double negLogConstant() const;
double logProbability(const gtsam::HybridValues& values) const; double logProbability(const gtsam::HybridValues& values) const;
double evaluate(const gtsam::HybridValues& values) const; double evaluate(const gtsam::HybridValues& values) const;
double operator()(const gtsam::HybridValues& values) const; double operator()(const gtsam::HybridValues& values) const;

View File

@ -61,11 +61,11 @@ const HybridGaussianConditional hybrid_conditional({Z(0)}, {X(0)}, mode,
TEST(HybridGaussianConditional, Invariants) { TEST(HybridGaussianConditional, Invariants) {
using namespace equal_constants; using namespace equal_constants;
// Check that the conditional normalization constant is the max of all // Check that the conditional (negative log) normalization constant is the min
// constants which are all equal, in this case, hence: // of all constants which are all equal, in this case, hence:
const double K = hybrid_conditional.logNormalizationConstant(); const double K = hybrid_conditional.negLogConstant();
EXPECT_DOUBLES_EQUAL(K, conditionals[0]->logNormalizationConstant(), 1e-8); EXPECT_DOUBLES_EQUAL(K, conditionals[0]->negLogConstant(), 1e-8);
EXPECT_DOUBLES_EQUAL(K, conditionals[1]->logNormalizationConstant(), 1e-8); EXPECT_DOUBLES_EQUAL(K, conditionals[1]->negLogConstant(), 1e-8);
EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv0)); EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv0));
EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv1)); EXPECT(HybridGaussianConditional::CheckInvariants(hybrid_conditional, hv1));
@ -180,15 +180,16 @@ TEST(HybridGaussianConditional, Error2) {
// Check result. // Check result.
DiscreteKeys discrete_keys{mode}; DiscreteKeys discrete_keys{mode};
double logNormalizer0 = -conditionals[0]->logNormalizationConstant(); double negLogConstant0 = conditionals[0]->negLogConstant();
double logNormalizer1 = -conditionals[1]->logNormalizationConstant(); double negLogConstant1 = conditionals[1]->negLogConstant();
double minLogNormalizer = std::min(logNormalizer0, logNormalizer1); double minErrorConstant = std::min(negLogConstant0, negLogConstant1);
// Expected error is e(X) + log(|2πΣ|). // Expected error is e(X) + log(sqrt(|2πΣ|)).
// We normalize log(|2πΣ|) with min(logNormalizers) so it is non-negative. // We normalize log(sqrt(|2πΣ|)) with min(negLogConstant)
// so it is non-negative.
std::vector<double> leaves = { std::vector<double> leaves = {
conditionals[0]->error(vv) + logNormalizer0 - minLogNormalizer, conditionals[0]->error(vv) + negLogConstant0 - minErrorConstant,
conditionals[1]->error(vv) + logNormalizer1 - minLogNormalizer}; conditionals[1]->error(vv) + negLogConstant1 - minErrorConstant};
AlgebraicDecisionTree<Key> expected(discrete_keys, leaves); AlgebraicDecisionTree<Key> expected(discrete_keys, leaves);
EXPECT(assert_equal(expected, actual, 1e-6)); EXPECT(assert_equal(expected, actual, 1e-6));
@ -196,9 +197,9 @@ TEST(HybridGaussianConditional, Error2) {
// Check for non-tree version. // Check for non-tree version.
for (size_t mode : {0, 1}) { for (size_t mode : {0, 1}) {
const HybridValues hv{vv, {{M(0), mode}}}; const HybridValues hv{vv, {{M(0), mode}}};
EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) - EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) +
conditionals[mode]->logNormalizationConstant() - conditionals[mode]->negLogConstant() -
minLogNormalizer, minErrorConstant,
hybrid_conditional.error(hv), 1e-8); hybrid_conditional.error(hv), 1e-8);
} }
} }
@ -230,8 +231,8 @@ TEST(HybridGaussianConditional, Likelihood2) {
CHECK(jf1->rows() == 2); CHECK(jf1->rows() == 2);
// Check that the constant C1 is properly encoded in the JacobianFactor. // Check that the constant C1 is properly encoded in the JacobianFactor.
const double C1 = hybrid_conditional.logNormalizationConstant() - const double C1 =
conditionals[1]->logNormalizationConstant(); conditionals[1]->negLogConstant() - hybrid_conditional.negLogConstant();
const double c1 = std::sqrt(2.0 * C1); const double c1 = std::sqrt(2.0 * C1);
Vector expected_unwhitened(2); Vector expected_unwhitened(2);
expected_unwhitened << 4.9 - 5.0, -c1; expected_unwhitened << 4.9 - 5.0, -c1;

View File

@ -780,9 +780,8 @@ static HybridGaussianFactorGraph CreateFactorGraph(
// Create HybridGaussianFactor // Create HybridGaussianFactor
// We take negative since we want // We take negative since we want
// the underlying scalar to be log(\sqrt(|2πΣ|)) // the underlying scalar to be log(\sqrt(|2πΣ|))
std::vector<GaussianFactorValuePair> factors{ std::vector<GaussianFactorValuePair> factors{{f0, model0->negLogConstant()},
{f0, -model0->logNormalizationConstant()}, {f1, model1->negLogConstant()}};
{f1, -model1->logNormalizationConstant()}};
HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors);
HybridGaussianFactorGraph hfg; HybridGaussianFactorGraph hfg;

View File

@ -902,9 +902,8 @@ static HybridNonlinearFactorGraph CreateFactorGraph(
// Create HybridNonlinearFactor // Create HybridNonlinearFactor
// We take negative since we want // We take negative since we want
// the underlying scalar to be log(\sqrt(|2πΣ|)) // the underlying scalar to be log(\sqrt(|2πΣ|))
std::vector<NonlinearFactorValuePair> factors{ std::vector<NonlinearFactorValuePair> factors{{f0, model0->negLogConstant()},
{f0, -model0->logNormalizationConstant()}, {f1, model1->negLogConstant()}};
{f1, -model1->logNormalizationConstant()}};
HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors);

View File

@ -59,16 +59,8 @@ double Conditional<FACTOR, DERIVEDCONDITIONAL>::evaluate(
/* ************************************************************************* */ /* ************************************************************************* */
template <class FACTOR, class DERIVEDCONDITIONAL> template <class FACTOR, class DERIVEDCONDITIONAL>
double Conditional<FACTOR, DERIVEDCONDITIONAL>::logNormalizationConstant() double Conditional<FACTOR, DERIVEDCONDITIONAL>::negLogConstant() const {
const { throw std::runtime_error("Conditional::negLogConstant is not implemented");
throw std::runtime_error(
"Conditional::logNormalizationConstant is not implemented");
}
/* ************************************************************************* */
template <class FACTOR, class DERIVEDCONDITIONAL>
double Conditional<FACTOR, DERIVEDCONDITIONAL>::normalizationConstant() const {
return std::exp(logNormalizationConstant());
} }
/* ************************************************************************* */ /* ************************************************************************* */
@ -83,13 +75,9 @@ bool Conditional<FACTOR, DERIVEDCONDITIONAL>::CheckInvariants(
const double logProb = conditional.logProbability(values); const double logProb = conditional.logProbability(values);
if (std::abs(prob_or_density - std::exp(logProb)) > 1e-9) if (std::abs(prob_or_density - std::exp(logProb)) > 1e-9)
return false; // logProb is not consistent with prob_or_density return false; // logProb is not consistent with prob_or_density
if (std::abs(conditional.logNormalizationConstant() -
std::log(conditional.normalizationConstant())) > 1e-9)
return false; // log normalization constant is not consistent with
// normalization constant
const double error = conditional.error(values); const double error = conditional.error(values);
if (error < 0.0) return false; // prob_or_density is negative. if (error < 0.0) return false; // prob_or_density is negative.
const double expected = conditional.logNormalizationConstant() - error; const double expected = -(conditional.negLogConstant() + error);
if (std::abs(logProb - expected) > 1e-9) if (std::abs(logProb - expected) > 1e-9)
return false; // logProb is not consistent with error return false; // logProb is not consistent with error
return true; return true;

View File

@ -34,11 +34,13 @@ namespace gtsam {
* `logProbability` is the main methods that need to be implemented in derived * `logProbability` is the main methods that need to be implemented in derived
* classes. These two methods relate to the `error` method in the factor by: * classes. These two methods relate to the `error` method in the factor by:
* probability(x) = k exp(-error(x)) * probability(x) = k exp(-error(x))
* where k is a normalization constant making \int probability(x) == 1.0, and * where k is a normalization constant making
* logProbability(x) = K - error(x) * \int probability(x) = \int k exp(-error(x)) == 1.0, and
* i.e., K = log(K). The normalization constant K is assumed to *not* depend * logProbability(x) = -(K + error(x))
* i.e., K = -log(k). The normalization constant k is assumed to *not* depend
* on any argument, only (possibly) on the conditional parameters. * on any argument, only (possibly) on the conditional parameters.
* This class provides a default logNormalizationConstant() == 0.0. * This class provides a default negative log normalization constant
* negLogConstant() == 0.0.
* *
* There are four broad classes of conditionals that derive from Conditional: * There are four broad classes of conditionals that derive from Conditional:
* *
@ -163,13 +165,12 @@ namespace gtsam {
} }
/** /**
* All conditional types need to implement a log normalization constant to * @brief All conditional types need to implement this as the negative log
* make it such that error>=0. * of the normalization constant to make it such that error>=0.
*
* @return double
*/ */
virtual double logNormalizationConstant() const; virtual double negLogConstant() const;
/** Non-virtual, exponentiate logNormalizationConstant. */
double normalizationConstant() const;
/// @} /// @}
/// @name Advanced Interface /// @name Advanced Interface
@ -208,9 +209,9 @@ namespace gtsam {
* - evaluate >= 0.0 * - evaluate >= 0.0
* - evaluate(x) == conditional(x) * - evaluate(x) == conditional(x)
* - exp(logProbability(x)) == evaluate(x) * - exp(logProbability(x)) == evaluate(x)
* - logNormalizationConstant() = log(normalizationConstant()) * - negLogConstant() = -log(normalizationConstant())
* - error >= 0.0 * - error >= 0.0
* - logProbability(x) == logNormalizationConstant() - error(x) * - logProbability(x) == -(negLogConstant() + error(x))
* *
* @param conditional The conditional to test, as a reference to the derived type. * @param conditional The conditional to test, as a reference to the derived type.
* @tparam VALUES HybridValues, or a more narrow type like DiscreteValues. * @tparam VALUES HybridValues, or a more narrow type like DiscreteValues.

View File

@ -243,23 +243,24 @@ namespace gtsam {
} }
/* ************************************************************************* */ /* ************************************************************************* */
double GaussianBayesNet::logNormalizationConstant() const { double GaussianBayesNet::negLogConstant() const {
/* /*
normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma)) normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma))
logConstant = -0.5 * n*log(2*pi) - 0.5 * log det(Sigma) negLogConstant = -log(normalizationConstant)
= 0.5 * n*log(2*pi) + 0.5 * log(det(Sigma))
log det(Sigma)) = -2.0 * logDeterminant()
thus, logConstant = -0.5*n*log(2*pi) + logDeterminant()
BayesNet logConstant = sum(-0.5*n_i*log(2*pi) + logDeterminant_i()) log(det(Sigma)) = -2.0 * logDeterminant()
= sum(-0.5*n_i*log(2*pi)) + sum(logDeterminant_i()) thus, negLogConstant = 0.5*n*log(2*pi) - logDeterminant()
= sum(-0.5*n_i*log(2*pi)) + bn->logDeterminant()
BayesNet negLogConstant = sum(0.5*n_i*log(2*pi) - logDeterminant_i())
= sum(0.5*n_i*log(2*pi)) + sum(logDeterminant_i())
= sum(0.5*n_i*log(2*pi)) + bn->logDeterminant()
*/ */
double logNormConst = 0.0; double negLogNormConst = 0.0;
for (const sharedConditional& cg : *this) { for (const sharedConditional& cg : *this) {
logNormConst += cg->logNormalizationConstant(); negLogNormConst += cg->negLogConstant();
} }
return logNormConst; return negLogNormConst;
} }
/* ************************************************************************* */ /* ************************************************************************* */

View File

@ -235,12 +235,12 @@ namespace gtsam {
double logDeterminant() const; double logDeterminant() const;
/** /**
* @brief Get the log of the normalization constant corresponding to the * @brief Get the negative log of the normalization constant corresponding
* joint Gaussian density represented by this Bayes net. * to the joint Gaussian density represented by this Bayes net.
* *
* @return double * @return double
*/ */
double logNormalizationConstant() const; double negLogConstant() const;
/** /**
* Backsubstitute with a different RHS vector than the one stored in this BayesNet. * Backsubstitute with a different RHS vector than the one stored in this BayesNet.

View File

@ -121,7 +121,7 @@ namespace gtsam {
const auto mean = solve({}); // solve for mean. const auto mean = solve({}); // solve for mean.
mean.print(" mean", formatter); mean.print(" mean", formatter);
} }
cout << " logNormalizationConstant: " << logNormalizationConstant() << endl; cout << " logNormalizationConstant: " << -negLogConstant() << endl;
if (model_) if (model_)
model_->print(" Noise model: "); model_->print(" Noise model: ");
else else
@ -181,24 +181,24 @@ namespace gtsam {
/* ************************************************************************* */ /* ************************************************************************* */
// normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma)) // normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma))
// log = - 0.5 * n*log(2*pi) - 0.5 * log det(Sigma) // neg-log = 0.5 * n*log(2*pi) + 0.5 * log det(Sigma)
double GaussianConditional::logNormalizationConstant() const { double GaussianConditional::negLogConstant() const {
constexpr double log2pi = 1.8378770664093454835606594728112; constexpr double log2pi = 1.8378770664093454835606594728112;
size_t n = d().size(); size_t n = d().size();
// Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1}
// log det(Sigma) = -log(det(R'R)) = -2*log(det(R)) // log det(Sigma) = -log(det(R'R)) = -2*log(det(R))
// Hence, log det(Sigma)) = -2.0 * logDeterminant() // Hence, log det(Sigma)) = -2.0 * logDeterminant()
// which gives log = -0.5*n*log(2*pi) - 0.5*(-2.0 * logDeterminant()) // which gives neg-log = 0.5*n*log(2*pi) + 0.5*(-2.0 * logDeterminant())
// = -0.5*n*log(2*pi) + (0.5*2.0 * logDeterminant()) // = 0.5*n*log(2*pi) - (0.5*2.0 * logDeterminant())
// = -0.5*n*log(2*pi) + logDeterminant() // = 0.5*n*log(2*pi) - logDeterminant()
return -0.5 * n * log2pi + logDeterminant(); return 0.5 * n * log2pi - logDeterminant();
} }
/* ************************************************************************* */ /* ************************************************************************* */
// density = k exp(-error(x)) // density = k exp(-error(x))
// log = log(k) - error(x) // log = log(k) - error(x)
double GaussianConditional::logProbability(const VectorValues& x) const { double GaussianConditional::logProbability(const VectorValues& x) const {
return logNormalizationConstant() - error(x); return -(negLogConstant() + error(x));
} }
double GaussianConditional::logProbability(const HybridValues& x) const { double GaussianConditional::logProbability(const HybridValues& x) const {

View File

@ -133,10 +133,14 @@ namespace gtsam {
/// @{ /// @{
/** /**
* normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma)) * @brief Return the negative log of the normalization constant.
* log = - 0.5 * n*log(2*pi) - 0.5 * log det(Sigma) *
* normalization constant k = 1.0 / sqrt((2*pi)^n*det(Sigma))
* -log(k) = 0.5 * n*log(2*pi) + 0.5 * log det(Sigma)
*
* @return double
*/ */
double logNormalizationConstant() const override; double negLogConstant() const override;
/** /**
* Calculate log-probability log(evaluate(x)) for given values `x`: * Calculate log-probability log(evaluate(x)) for given values `x`:

View File

@ -255,18 +255,17 @@ double Gaussian::logDeterminant() const {
} }
/* *******************************************************************************/ /* *******************************************************************************/
double Gaussian::logNormalizationConstant() const { double Gaussian::negLogConstant() const {
// log(det(Sigma)) = -2.0 * logDetR // log(det(Sigma)) = -2.0 * logDetR
// which gives log = -0.5*n*log(2*pi) - 0.5*(-2.0 * logDetR()) // which gives neg-log = 0.5*n*log(2*pi) + 0.5*(-2.0 * logDetR())
// = -0.5*n*log(2*pi) + (0.5*2.0 * logDetR()) // = 0.5*n*log(2*pi) - (0.5*2.0 * logDetR())
// = -0.5*n*log(2*pi) + logDetR() // = 0.5*n*log(2*pi) - logDetR()
size_t n = dim(); size_t n = dim();
constexpr double log2pi = 1.8378770664093454835606594728112; constexpr double log2pi = 1.8378770664093454835606594728112;
// Get 1/log(\sqrt(|2pi Sigma|)) = -0.5*log(|2pi Sigma|) // Get -log(1/\sqrt(|2pi Sigma|)) = 0.5*log(|2pi Sigma|)
return -0.5 * n * log2pi + logDetR(); return 0.5 * n * log2pi - logDetR();
} }
/* ************************************************************************* */ /* ************************************************************************* */
// Diagonal // Diagonal
/* ************************************************************************* */ /* ************************************************************************* */

View File

@ -272,14 +272,12 @@ namespace gtsam {
double logDeterminant() const; double logDeterminant() const;
/** /**
* @brief Method to compute the normalization constant * @brief Compute the negative log of the normalization constant
* for a Gaussian noise model k = \sqrt(1/|2πΣ|). * for a Gaussian noise model k = 1/\sqrt(|2πΣ|).
* We compute this in the log-space for numerical accuracy, *
* thus returning log(k). * @return double
*
* @return double
*/ */
double logNormalizationConstant() const; double negLogConstant() const;
private: private:
#ifdef GTSAM_ENABLE_BOOST_SERIALIZATION #ifdef GTSAM_ENABLE_BOOST_SERIALIZATION

View File

@ -548,7 +548,7 @@ virtual class GaussianConditional : gtsam::JacobianFactor {
bool equals(const gtsam::GaussianConditional& cg, double tol) const; bool equals(const gtsam::GaussianConditional& cg, double tol) const;
// Standard Interface // Standard Interface
double logNormalizationConstant() const; double negLogConstant() const;
double logProbability(const gtsam::VectorValues& x) const; double logProbability(const gtsam::VectorValues& x) const;
double evaluate(const gtsam::VectorValues& x) const; double evaluate(const gtsam::VectorValues& x) const;
double error(const gtsam::VectorValues& x) const; double error(const gtsam::VectorValues& x) const;

View File

@ -76,12 +76,11 @@ TEST(GaussianBayesNet, Evaluate1) {
// the normalization constant 1.0/sqrt((2*pi*Sigma).det()). // the normalization constant 1.0/sqrt((2*pi*Sigma).det()).
// The covariance matrix inv(Sigma) = R'*R, so the determinant is // The covariance matrix inv(Sigma) = R'*R, so the determinant is
const double constant = sqrt((invSigma / (2 * M_PI)).determinant()); const double constant = sqrt((invSigma / (2 * M_PI)).determinant());
EXPECT_DOUBLES_EQUAL(log(constant), EXPECT_DOUBLES_EQUAL(-log(constant),
smallBayesNet.at(0)->logNormalizationConstant() + smallBayesNet.at(0)->negLogConstant() +
smallBayesNet.at(1)->logNormalizationConstant(), smallBayesNet.at(1)->negLogConstant(),
1e-9);
EXPECT_DOUBLES_EQUAL(log(constant), smallBayesNet.logNormalizationConstant(),
1e-9); 1e-9);
EXPECT_DOUBLES_EQUAL(-log(constant), smallBayesNet.negLogConstant(), 1e-9);
const double actual = smallBayesNet.evaluate(mean); const double actual = smallBayesNet.evaluate(mean);
EXPECT_DOUBLES_EQUAL(constant, actual, 1e-9); EXPECT_DOUBLES_EQUAL(constant, actual, 1e-9);
} }

View File

@ -486,16 +486,17 @@ TEST(GaussianConditional, Error) {
/* ************************************************************************* */ /* ************************************************************************* */
// Similar test for multivariate gaussian but with sigma 2.0 // Similar test for multivariate gaussian but with sigma 2.0
TEST(GaussianConditional, LogNormalizationConstant) { TEST(GaussianConditional, NegLogConstant) {
double sigma = 2.0; double sigma = 2.0;
auto conditional = GaussianConditional::FromMeanAndStddev(X(0), Vector3::Zero(), sigma); auto conditional = GaussianConditional::FromMeanAndStddev(X(0), Vector3::Zero(), sigma);
VectorValues x; VectorValues x;
x.insert(X(0), Vector3::Zero()); x.insert(X(0), Vector3::Zero());
Matrix3 Sigma = I_3x3 * sigma * sigma; Matrix3 Sigma = I_3x3 * sigma * sigma;
double expectedLogNormalizingConstant = log(1 / sqrt((2 * M_PI * Sigma).determinant())); double expectedNegLogConstant =
-log(1 / sqrt((2 * M_PI * Sigma).determinant()));
EXPECT_DOUBLES_EQUAL(expectedLogNormalizingConstant, EXPECT_DOUBLES_EQUAL(expectedNegLogConstant, conditional.negLogConstant(),
conditional.logNormalizationConstant(), 1e-9); 1e-9);
} }
/* ************************************************************************* */ /* ************************************************************************* */

View File

@ -55,7 +55,7 @@ TEST(GaussianDensity, FromMeanAndStddev) {
double expected1 = 0.5 * e.dot(e); double expected1 = 0.5 * e.dot(e);
EXPECT_DOUBLES_EQUAL(expected1, density.error(values), 1e-9); EXPECT_DOUBLES_EQUAL(expected1, density.error(values), 1e-9);
double expected2 = density.logNormalizationConstant()- 0.5 * e.dot(e); double expected2 = -(density.negLogConstant() + 0.5 * e.dot(e));
EXPECT_DOUBLES_EQUAL(expected2, density.logProbability(values), 1e-9); EXPECT_DOUBLES_EQUAL(expected2, density.logProbability(values), 1e-9);
} }

View File

@ -807,50 +807,50 @@ TEST(NoiseModel, NonDiagonalGaussian)
} }
} }
TEST(NoiseModel, LogNormalizationConstant1D) { TEST(NoiseModel, NegLogNormalizationConstant1D) {
// Very simple 1D noise model, which we can compute by hand. // Very simple 1D noise model, which we can compute by hand.
double sigma = 0.1; double sigma = 0.1;
// For expected values, we compute 1/log(sqrt(|2πΣ|)) by hand. // For expected values, we compute -log(1/sqrt(|2πΣ|)) by hand.
// = -0.5*(log(2π) + log(Σ)) (since it is 1D) // = 0.5*(log(2π) - log(Σ)) (since it is 1D)
double expected_value = -0.5 * log(2 * M_PI * sigma * sigma); double expected_value = 0.5 * log(2 * M_PI * sigma * sigma);
// Gaussian // Gaussian
{ {
Matrix11 R; Matrix11 R;
R << 1 / sigma; R << 1 / sigma;
auto noise_model = Gaussian::SqrtInformation(R); auto noise_model = Gaussian::SqrtInformation(R);
double actual_value = noise_model->logNormalizationConstant(); double actual_value = noise_model->negLogConstant();
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
} }
// Diagonal // Diagonal
{ {
auto noise_model = Diagonal::Sigmas(Vector1(sigma)); auto noise_model = Diagonal::Sigmas(Vector1(sigma));
double actual_value = noise_model->logNormalizationConstant(); double actual_value = noise_model->negLogConstant();
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
} }
// Isotropic // Isotropic
{ {
auto noise_model = Isotropic::Sigma(1, sigma); auto noise_model = Isotropic::Sigma(1, sigma);
double actual_value = noise_model->logNormalizationConstant(); double actual_value = noise_model->negLogConstant();
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
} }
// Unit // Unit
{ {
auto noise_model = Unit::Create(1); auto noise_model = Unit::Create(1);
double actual_value = noise_model->logNormalizationConstant(); double actual_value = noise_model->negLogConstant();
double sigma = 1.0; double sigma = 1.0;
expected_value = -0.5 * log(2 * M_PI * sigma * sigma); expected_value = 0.5 * log(2 * M_PI * sigma * sigma);
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
} }
} }
TEST(NoiseModel, LogNormalizationConstant3D) { TEST(NoiseModel, NegLogNormalizationConstant3D) {
// Simple 3D noise model, which we can compute by hand. // Simple 3D noise model, which we can compute by hand.
double sigma = 0.1; double sigma = 0.1;
size_t n = 3; size_t n = 3;
// We compute the expected values just like in the LogNormalizationConstant1D // We compute the expected values just like in the NegLogNormalizationConstant1D
// test, but we multiply by 3 due to the determinant. // test, but we multiply by 3 due to the determinant.
double expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma); double expected_value = 0.5 * n * log(2 * M_PI * sigma * sigma);
// Gaussian // Gaussian
{ {
@ -859,27 +859,27 @@ TEST(NoiseModel, LogNormalizationConstant3D) {
0, 1 / sigma, 4, // 0, 1 / sigma, 4, //
0, 0, 1 / sigma; 0, 0, 1 / sigma;
auto noise_model = Gaussian::SqrtInformation(R); auto noise_model = Gaussian::SqrtInformation(R);
double actual_value = noise_model->logNormalizationConstant(); double actual_value = noise_model->negLogConstant();
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
} }
// Diagonal // Diagonal
{ {
auto noise_model = Diagonal::Sigmas(Vector3(sigma, sigma, sigma)); auto noise_model = Diagonal::Sigmas(Vector3(sigma, sigma, sigma));
double actual_value = noise_model->logNormalizationConstant(); double actual_value = noise_model->negLogConstant();
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
} }
// Isotropic // Isotropic
{ {
auto noise_model = Isotropic::Sigma(n, sigma); auto noise_model = Isotropic::Sigma(n, sigma);
double actual_value = noise_model->logNormalizationConstant(); double actual_value = noise_model->negLogConstant();
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
} }
// Unit // Unit
{ {
auto noise_model = Unit::Create(3); auto noise_model = Unit::Create(3);
double actual_value = noise_model->logNormalizationConstant(); double actual_value = noise_model->negLogConstant();
double sigma = 1.0; double sigma = 1.0;
expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma); expected_value = 0.5 * n * log(2 * M_PI * sigma * sigma);
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
} }
} }

View File

@ -90,8 +90,7 @@ class TestHybridBayesNet(GtsamTestCase):
self.assertTrue(probability >= 0.0) self.assertTrue(probability >= 0.0)
logProb = conditional.logProbability(values) logProb = conditional.logProbability(values)
self.assertAlmostEqual(probability, np.exp(logProb)) self.assertAlmostEqual(probability, np.exp(logProb))
expected = conditional.logNormalizationConstant() - \ expected = -(conditional.negLogConstant() + conditional.error(values))
conditional.error(values)
self.assertAlmostEqual(logProb, expected) self.assertAlmostEqual(logProb, expected)

View File

@ -14,15 +14,16 @@ from __future__ import print_function
import unittest import unittest
import gtsam
from gtsam import (
DoglegOptimizer, DoglegParams, DummyPreconditionerParameters, GaussNewtonOptimizer,
GaussNewtonParams, GncLMParams, GncLossType, GncLMOptimizer, LevenbergMarquardtOptimizer,
LevenbergMarquardtParams, NonlinearFactorGraph, Ordering, PCGSolverParameters, Point2,
PriorFactorPoint2, Values
)
from gtsam.utils.test_case import GtsamTestCase from gtsam.utils.test_case import GtsamTestCase
import gtsam
from gtsam import (DoglegOptimizer, DoglegParams,
DummyPreconditionerParameters, GaussNewtonOptimizer,
GaussNewtonParams, GncLMOptimizer, GncLMParams, GncLossType,
LevenbergMarquardtOptimizer, LevenbergMarquardtParams,
NonlinearFactorGraph, Ordering, PCGSolverParameters, Point2,
PriorFactorPoint2, Values)
KEY1 = 1 KEY1 = 1
KEY2 = 2 KEY2 = 2
@ -136,7 +137,7 @@ class TestScenario(GtsamTestCase):
# Test optimizer params # Test optimizer params
optimizer = GncLMOptimizer(self.fg, self.initial_values, params) optimizer = GncLMOptimizer(self.fg, self.initial_values, params)
for ict_factor in (0.9, 1.1): for ict_factor in (0.9, 1.1):
new_ict = ict_factor * optimizer.getInlierCostThresholds() new_ict = ict_factor * optimizer.getInlierCostThresholds().item()
optimizer.setInlierCostThresholds(new_ict) optimizer.setInlierCostThresholds(new_ict)
self.assertAlmostEqual(optimizer.getInlierCostThresholds(), new_ict) self.assertAlmostEqual(optimizer.getInlierCostThresholds(), new_ict)
for w_factor in (0.8, 0.9): for w_factor in (0.8, 0.9):