diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index fb943366c..4ee5241a3 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -36,7 +36,7 @@ HybridGaussianFactor::FactorValuePairs GetFactorValuePairs( // Check if conditional is pruned if (conditional) { // Assign log(\sqrt(|2πΣ|)) = -log(1 / sqrt(|2πΣ|)) - value = -conditional->logNormalizationConstant(); + value = conditional->logNormalizationConstant(); } return {std::dynamic_pointer_cast(conditional), value}; }; @@ -51,14 +51,14 @@ HybridGaussianConditional::HybridGaussianConditional( discreteParents, GetFactorValuePairs(conditionals)), BaseConditional(continuousFrontals.size()), conditionals_(conditionals) { - // Calculate logConstant_ as the minimum of the log normalizers of the - // conditionals, by visiting the decision tree: + // Calculate logConstant_ as the minimum of the negative-log normalizers of + // the conditionals, by visiting the decision tree: logConstant_ = std::numeric_limits::infinity(); conditionals_.visit( [this](const GaussianConditional::shared_ptr &conditional) { if (conditional) { this->logConstant_ = std::min( - this->logConstant_, -conditional->logNormalizationConstant()); + this->logConstant_, conditional->logNormalizationConstant()); } }); } @@ -85,7 +85,7 @@ GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() // First check if conditional has not been pruned if (gc) { const double Cgm_Kgcm = - -this->logConstant_ - gc->logNormalizationConstant(); + gc->logNormalizationConstant() - this->logConstant_; // If there is a difference in the covariances, we need to account for // that since the error is dependent on the mode. if (Cgm_Kgcm > 0.0) { @@ -216,7 +216,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( -> GaussianFactorValuePair { const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = - -logConstant_ - conditional->logNormalizationConstant(); + conditional->logNormalizationConstant() - logConstant_; if (Cgm_Kgcm == 0.0) { return {likelihood_m, 0.0}; } else { diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 4a5fdcc89..676c45f61 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -152,7 +152,7 @@ class GTSAM_EXPORT HybridGaussianConditional /// The log normalization constant is max of the the individual /// log-normalization constants. - double logNormalizationConstant() const override { return -logConstant_; } + double logNormalizationConstant() const override { return logConstant_; } /** * Create a likelihood factor for a hybrid Gaussian conditional, diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index a6fe955eb..ff9ca7a55 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -329,9 +329,9 @@ static std::shared_ptr createDiscreteFactor( // Logspace version of: // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); - // We take negative of the logNormalizationConstant `log(k)` - // to get `log(1/k) = log(\sqrt{|2πΣ|})`. - return -factor->error(kEmpty) - conditional->logNormalizationConstant(); + // logNormalizationConstant gives `-log(k)` + // which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`. + return -factor->error(kEmpty) + conditional->logNormalizationConstant(); }; AlgebraicDecisionTree logProbabilities( @@ -355,8 +355,9 @@ static std::shared_ptr createHybridGaussianFactor( auto hf = std::dynamic_pointer_cast(factor); if (!hf) throw std::runtime_error("Expected HessianFactor!"); // Add 2.0 term since the constant term will be premultiplied by 0.5 - // as per the Hessian definition - hf->constantTerm() += 2.0 * conditional->logNormalizationConstant(); + // as per the Hessian definition, + // and negative since we want log(k) + hf->constantTerm() += -2.0 * conditional->logNormalizationConstant(); } return {factor, 0.0}; }; diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index dde493685..040cd2ff0 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -180,12 +180,13 @@ TEST(HybridGaussianConditional, Error2) { // Check result. DiscreteKeys discrete_keys{mode}; - double logNormalizer0 = -conditionals[0]->logNormalizationConstant(); - double logNormalizer1 = -conditionals[1]->logNormalizationConstant(); + double logNormalizer0 = conditionals[0]->logNormalizationConstant(); + double logNormalizer1 = conditionals[1]->logNormalizationConstant(); double minLogNormalizer = std::min(logNormalizer0, logNormalizer1); - // Expected error is e(X) + log(|2πΣ|). - // We normalize log(|2πΣ|) with min(logNormalizers) so it is non-negative. + // Expected error is e(X) + log(sqrt(|2πΣ|)). + // We normalize log(sqrt(|2πΣ|)) with min(logNormalizers) + // so it is non-negative. std::vector leaves = { conditionals[0]->error(vv) + logNormalizer0 - minLogNormalizer, conditionals[1]->error(vv) + logNormalizer1 - minLogNormalizer}; @@ -196,7 +197,7 @@ TEST(HybridGaussianConditional, Error2) { // Check for non-tree version. for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) - + EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) + conditionals[mode]->logNormalizationConstant() - minLogNormalizer, hybrid_conditional.error(hv), 1e-8); @@ -230,8 +231,8 @@ TEST(HybridGaussianConditional, Likelihood2) { CHECK(jf1->rows() == 2); // Check that the constant C1 is properly encoded in the JacobianFactor. - const double C1 = hybrid_conditional.logNormalizationConstant() - - conditionals[1]->logNormalizationConstant(); + const double C1 = conditionals[1]->logNormalizationConstant() - + hybrid_conditional.logNormalizationConstant(); const double c1 = std::sqrt(2.0 * C1); Vector expected_unwhitened(2); expected_unwhitened << 4.9 - 5.0, -c1; diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index bfa283983..cd5f9bf07 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -773,8 +773,8 @@ static HybridGaussianFactorGraph CreateFactorGraph( // We take negative since we want // the underlying scalar to be log(\sqrt(|2πΣ|)) std::vector factors{ - {f0, -model0->logNormalizationConstant()}, - {f1, -model1->logNormalizationConstant()}}; + {f0, model0->logNormalizationConstant()}, + {f1, model1->logNormalizationConstant()}}; HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactorGraph hfg; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 347cc5f1f..cd9b24b37 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -714,26 +714,26 @@ factor 6: P( m1 | m0 ): size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), - logNormalizationConstant: 1.38862 + logNormalizationConstant: -1.38862 Choice(m0) 0 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] - logNormalizationConstant: 1.38862 + logNormalizationConstant: -1.38862 No noise model 1 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] - logNormalizationConstant: 1.38862 + logNormalizationConstant: -1.38862 No noise model conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), - logNormalizationConstant: 1.3935 + logNormalizationConstant: -1.3935 Choice(m1) 0 Choice(m0) @@ -741,14 +741,14 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] - logNormalizationConstant: 1.3935 + logNormalizationConstant: -1.3935 No noise model 0 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] - logNormalizationConstant: 1.3935 + logNormalizationConstant: -1.3935 No noise model 1 Choice(m0) @@ -756,19 +756,19 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] - logNormalizationConstant: 1.3935 + logNormalizationConstant: -1.3935 No noise model 1 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] - logNormalizationConstant: 1.3935 + logNormalizationConstant: -1.3935 No noise model conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), - logNormalizationConstant: 1.38857 + logNormalizationConstant: -1.38857 Choice(m1) 0 Choice(m0) @@ -777,7 +777,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1489 ] mean: 1 elements x2: -1.0099 - logNormalizationConstant: 1.38857 + logNormalizationConstant: -1.38857 No noise model 0 1 Leaf p(x2) @@ -785,7 +785,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1479 ] mean: 1 elements x2: -1.0098 - logNormalizationConstant: 1.38857 + logNormalizationConstant: -1.38857 No noise model 1 Choice(m0) @@ -794,7 +794,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0504 ] mean: 1 elements x2: -1.0001 - logNormalizationConstant: 1.38857 + logNormalizationConstant: -1.38857 No noise model 1 1 Leaf p(x2) @@ -802,7 +802,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0494 ] mean: 1 elements x2: -1 - logNormalizationConstant: 1.38857 + logNormalizationConstant: -1.38857 No noise model )"; @@ -903,8 +903,8 @@ static HybridNonlinearFactorGraph CreateFactorGraph( // We take negative since we want // the underlying scalar to be log(\sqrt(|2πΣ|)) std::vector factors{ - {f0, -model0->logNormalizationConstant()}, - {f1, -model1->logNormalizationConstant()}}; + {f0, model0->logNormalizationConstant()}, + {f1, model1->logNormalizationConstant()}}; HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors);