From 0f23eedbd063fe982f913392267b02b3e3617d30 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 19 Aug 2024 11:39:02 -0400 Subject: [PATCH 01/10] fix issue in geometry.i file --- gtsam/geometry/geometry.i | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/geometry/geometry.i b/gtsam/geometry/geometry.i index 3d816fc25..095a350dd 100644 --- a/gtsam/geometry/geometry.i +++ b/gtsam/geometry/geometry.i @@ -856,7 +856,7 @@ class Cal3_S2Stereo { gtsam::Matrix K() const; gtsam::Point2 principalPoint() const; double baseline() const; - Vector6 vector() const; + gtsam::Vector6 vector() const; gtsam::Matrix inverse() const; }; From 8d54c4abe0fea13334fb2f6927639e53b250fae9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:40:13 -0400 Subject: [PATCH 02/10] update VectorValues::== docstring --- gtsam/linear/VectorValues.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/linear/VectorValues.h b/gtsam/linear/VectorValues.h index 99ee4eb83..2fa50b7f6 100644 --- a/gtsam/linear/VectorValues.h +++ b/gtsam/linear/VectorValues.h @@ -263,7 +263,7 @@ namespace gtsam { /** equals required by Testable for unit testing */ bool equals(const VectorValues& x, double tol = 1e-9) const; - /// Check exact equality. + /// Check equality. friend bool operator==(const VectorValues& lhs, const VectorValues& rhs) { return lhs.equals(rhs); } From a78ffe19e81d6ae41b385535046d1bd5cbfcf938 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:45:19 -0400 Subject: [PATCH 03/10] update unit test to also check for GaussianBayesNet::logNormalizationConstant --- gtsam/linear/tests/testGaussianBayesNet.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gtsam/linear/tests/testGaussianBayesNet.cpp b/gtsam/linear/tests/testGaussianBayesNet.cpp index 966b70915..99453ee4e 100644 --- a/gtsam/linear/tests/testGaussianBayesNet.cpp +++ b/gtsam/linear/tests/testGaussianBayesNet.cpp @@ -80,6 +80,8 @@ TEST(GaussianBayesNet, Evaluate1) { smallBayesNet.at(0)->logNormalizationConstant() + smallBayesNet.at(1)->logNormalizationConstant(), 1e-9); + EXPECT_DOUBLES_EQUAL(log(constant), smallBayesNet.logNormalizationConstant(), + 1e-9); const double actual = smallBayesNet.evaluate(mean); EXPECT_DOUBLES_EQUAL(constant, actual, 1e-9); } From 910300b810cb32dcabf02e6286918f2442de6fec Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 17:43:48 -0400 Subject: [PATCH 04/10] fix test --- gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 93081d309..751e84d91 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -680,12 +680,14 @@ conditional 0: Hybrid P( x0 | x1 m0) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] + logNormalizationConstant: 1.38862 No noise model 1 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] + logNormalizationConstant: 1.38862 No noise model conditional 1: Hybrid P( x1 | x2 m0 m1) @@ -696,12 +698,14 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] + logNormalizationConstant: 1.3935 No noise model 0 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] + logNormalizationConstant: 1.3935 No noise model 1 Choice(m0) @@ -709,12 +713,14 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] + logNormalizationConstant: 1.3935 No noise model 1 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] + logNormalizationConstant: 1.3935 No noise model conditional 2: Hybrid P( x2 | m0 m1) @@ -726,6 +732,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1489 ] mean: 1 elements x2: -1.0099 + logNormalizationConstant: 1.38857 No noise model 0 1 Leaf p(x2) @@ -733,6 +740,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1479 ] mean: 1 elements x2: -1.0098 + logNormalizationConstant: 1.38857 No noise model 1 Choice(m0) @@ -741,6 +749,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0504 ] mean: 1 elements x2: -1.0001 + logNormalizationConstant: 1.38857 No noise model 1 1 Leaf p(x2) @@ -748,6 +757,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0494 ] mean: 1 elements x2: -1 + logNormalizationConstant: 1.38857 No noise model )"; From 73d971a3c6aef8985e947d824ba96ba2ebaae0a6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 04:12:21 -0400 Subject: [PATCH 05/10] unit tests for AlgebraicDecisionTree helper methods --- .../tests/testAlgebraicDecisionTree.cpp | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp b/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp index 19f4686c2..d65a9c82b 100644 --- a/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp +++ b/gtsam/discrete/tests/testAlgebraicDecisionTree.cpp @@ -593,6 +593,55 @@ TEST(ADT, zero) { EXPECT_DOUBLES_EQUAL(0, anotb(x11), 1e-9); } +/// Example ADT from 0 to 11. +ADT exampleADT() { + DiscreteKey A(0, 2), B(1, 3), C(2, 2); + ADT f(A & B & C, "0 6 2 8 4 10 1 7 3 9 5 11"); + return f; +} +/* ************************************************************************** */ +// Test sum +TEST(ADT, Sum) { + ADT a = exampleADT(); + double expected_sum = 0; + for (double i = 0; i < 12; i++) { + expected_sum += i; + } + EXPECT_DOUBLES_EQUAL(expected_sum, a.sum(), 1e-9); +} + +/* ************************************************************************** */ +// Test normalize +TEST(ADT, Normalize) { + ADT a = exampleADT(); + double sum = a.sum(); + auto actual = a.normalize(sum); + + DiscreteKey A(0, 2), B(1, 3), C(2, 2); + DiscreteKeys keys = DiscreteKeys{A, B, C}; + std::vector cpt{0 / sum, 6 / sum, 2 / sum, 8 / sum, + 4 / sum, 10 / sum, 1 / sum, 7 / sum, + 3 / sum, 9 / sum, 5 / sum, 11 / sum}; + ADT expected(keys, cpt); + EXPECT(assert_equal(expected, actual)); +} + +/* ************************************************************************** */ +// Test min +TEST(ADT, Min) { + ADT a = exampleADT(); + double min = a.min(); + EXPECT_DOUBLES_EQUAL(0.0, min, 1e-9); +} + +/* ************************************************************************** */ +// Test max +TEST(ADT, Max) { + ADT a = exampleADT(); + double max = a.max(); + EXPECT_DOUBLES_EQUAL(11.0, max, 1e-9); +} + /* ************************************************************************* */ int main() { TestResult tr; From 79c7c6a8b6f4bb2945ea6ac621fc3c3a1f66aaa6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:10:21 -0400 Subject: [PATCH 06/10] provide logNormalizers directly to the augment method --- gtsam/hybrid/GaussianMixtureFactor.cpp | 66 ++++++++++---------------- gtsam/hybrid/GaussianMixtureFactor.h | 20 ++++---- 2 files changed, 37 insertions(+), 49 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 7fb16f0d1..0427eef7b 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -35,45 +35,17 @@ namespace gtsam { * the `b` vector as an additional row. * * @param factors DecisionTree of GaussianFactor shared pointers. - * @param varyingNormalizers Flag indicating the normalizers are different for - * each component. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. * @return GaussianMixtureFactor::Factors */ GaussianMixtureFactor::Factors augment( - const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { - if (!varyingNormalizers) { - return factors; - } - - // First compute all the sqrt(|2 pi Sigma|) terms - auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) { - auto jf = std::dynamic_pointer_cast(gf); - // If we have, say, a Hessian factor, then no need to do anything - if (!jf) return 0.0; - - auto model = jf->get_model(); - // If there is no noise model, there is nothing to do. - if (!model) { - return 0.0; - } - // Since noise models are Gaussian, we can get the logDeterminant using the - // same trick as in GaussianConditional - double logDetR = - model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; - }; - - AlgebraicDecisionTree log_normalizers = - DecisionTree(factors, computeNormalizers); - + const GaussianMixtureFactor::Factors &factors, + const AlgebraicDecisionTree &logNormalizers) { // Find the minimum value so we can "proselytize" to positive values. // Done because we can't have sqrt of negative numbers. - double min_log_normalizer = log_normalizers.min(); - log_normalizers = log_normalizers.apply( + double min_log_normalizer = logNormalizers.min(); + AlgebraicDecisionTree log_normalizers = logNormalizers.apply( [&min_log_normalizer](double n) { return n - min_log_normalizer; }); // Finally, update the [A|b] matrices. @@ -82,8 +54,6 @@ GaussianMixtureFactor::Factors augment( const GaussianMixtureFactor::sharedFactor &gf) { auto jf = std::dynamic_pointer_cast(gf); if (!jf) return gf; - // If there is no noise model, there is nothing to do. - if (!jf->get_model()) return gf; // If the log_normalizer is 0, do nothing if (log_normalizers(assignment) == 0.0) return gf; @@ -102,12 +72,11 @@ GaussianMixtureFactor::Factors augment( } /* *******************************************************************************/ -GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const Factors &factors, - bool varyingNormalizers) +GaussianMixtureFactor::GaussianMixtureFactor( + const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, + const Factors &factors, const AlgebraicDecisionTree &logNormalizers) : Base(continuousKeys, discreteKeys), - factors_(augment(factors, varyingNormalizers)) {} + factors_(augment(factors, logNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { @@ -194,6 +163,21 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { const sharedFactor gf = factors_(values.discrete()); return gf->error(values.continuous()); } + /* *******************************************************************************/ +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model) { + // Since noise models are Gaussian, we can get the logDeterminant using + // the same trick as in GaussianConditional + double logDetR = noise_model->R() + .diagonal() + .unaryExpr([](double x) { return log(x); }) + .sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = noise_model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; +} } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 588501bbe..6e9f6034e 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,13 +82,14 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. - * @param varyingNormalizers Flag indicating factor components have varying - * normalizer values. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const Factors &factors, - bool varyingNormalizers = false); + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -97,16 +98,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. - * @param varyingNormalizers Flag indicating factor components have varying - * normalizer values. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const std::vector &factors, - bool varyingNormalizers = false) + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors), - varyingNormalizers) {} + Factors(discreteKeys, factors), logNormalizers) {} /// @} /// @name Testable @@ -178,4 +179,7 @@ template <> struct traits : public Testable { }; +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model); + } // namespace gtsam From 9a6d2cf32384d437960def61cd3d547c5f73f41e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:10:40 -0400 Subject: [PATCH 07/10] update GaussianMixture::likelihood to compute the logNormalizers --- gtsam/hybrid/GaussianMixture.cpp | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 1440f7e9e..6c92f0252 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -202,8 +202,25 @@ std::shared_ptr GaussianMixture::likelihood( const auto likelihood_m = conditional->likelihood(given); return likelihood_m; }); + + // First compute all the sqrt(|2 pi Sigma|) terms + auto computeLogNormalizers = [](const GaussianFactor::shared_ptr &gf) { + auto jf = std::dynamic_pointer_cast(gf); + // If we have, say, a Hessian factor, then no need to do anything + if (!jf) return 0.0; + + auto model = jf->get_model(); + // If there is no noise model, there is nothing to do. + if (!model) { + return 0.0; + } + return ComputeLogNormalizer(model); + }; + + AlgebraicDecisionTree log_normalizers = + DecisionTree(likelihoods, computeLogNormalizers); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods, true); + continuousParentKeys, discreteParentKeys, likelihoods, log_normalizers); } /* ************************************************************************* */ From 98a2668fb1e78684ec9fa290e032235162cf2fe9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 11:57:57 -0400 Subject: [PATCH 08/10] add docstring and GTSAM_EXPORT for ComputeLogNormalizer --- gtsam/hybrid/GaussianMixtureFactor.h | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 6e9f6034e..6680abb54 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -179,7 +179,16 @@ template <> struct traits : public Testable { }; -double ComputeLogNormalizer( +/** + * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values + * for a Gaussian noise model. + * We compute this in the log-space for numerical accuracy. + * + * @param noise_model The Gaussian noise model + * whose normalizer we wish to compute. + * @return double + */ +GTSAM_EXPORT double ComputeLogNormalizer( const noiseModel::Gaussian::shared_ptr &noise_model); } // namespace gtsam From cd9ee0845765767e98c8f334e3a2cf4a4bed6a7e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 14:43:23 -0400 Subject: [PATCH 09/10] remove augment method in favor of conversion --- gtsam/hybrid/GaussianMixture.cpp | 35 ++++++++--------- gtsam/hybrid/GaussianMixtureFactor.cpp | 52 ++------------------------ gtsam/hybrid/GaussianMixtureFactor.h | 14 ++----- 3 files changed, 23 insertions(+), 78 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 6c92f0252..0a0332af8 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -200,27 +200,24 @@ std::shared_ptr GaussianMixture::likelihood( const GaussianMixtureFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); - return likelihood_m; + const double Cgm_Kgcm = + logConstant_ - conditional->logNormalizationConstant(); + if (Cgm_Kgcm == 0.0) { + return likelihood_m; + } else { + // Add a constant factor to the likelihood in case the noise models + // are not all equal. + GaussianFactorGraph gfg; + gfg.push_back(likelihood_m); + Vector c(1); + c << std::sqrt(2.0 * Cgm_Kgcm); + auto constantFactor = std::make_shared(c); + gfg.push_back(constantFactor); + return std::make_shared(gfg); + } }); - - // First compute all the sqrt(|2 pi Sigma|) terms - auto computeLogNormalizers = [](const GaussianFactor::shared_ptr &gf) { - auto jf = std::dynamic_pointer_cast(gf); - // If we have, say, a Hessian factor, then no need to do anything - if (!jf) return 0.0; - - auto model = jf->get_model(); - // If there is no noise model, there is nothing to do. - if (!model) { - return 0.0; - } - return ComputeLogNormalizer(model); - }; - - AlgebraicDecisionTree log_normalizers = - DecisionTree(likelihoods, computeLogNormalizers); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods, log_normalizers); + continuousParentKeys, discreteParentKeys, likelihoods); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 0427eef7b..b4ee89cb0 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -28,55 +28,11 @@ namespace gtsam { -/** - * @brief Helper function to augment the [A|b] matrices in the factor components - * with the normalizer values. - * This is done by storing the normalizer value in - * the `b` vector as an additional row. - * - * @param factors DecisionTree of GaussianFactor shared pointers. - * @param logNormalizers Tree of log-normalizers corresponding to each - * Gaussian factor in factors. - * @return GaussianMixtureFactor::Factors - */ -GaussianMixtureFactor::Factors augment( - const GaussianMixtureFactor::Factors &factors, - const AlgebraicDecisionTree &logNormalizers) { - // Find the minimum value so we can "proselytize" to positive values. - // Done because we can't have sqrt of negative numbers. - double min_log_normalizer = logNormalizers.min(); - AlgebraicDecisionTree log_normalizers = logNormalizers.apply( - [&min_log_normalizer](double n) { return n - min_log_normalizer; }); - - // Finally, update the [A|b] matrices. - auto update = [&log_normalizers]( - const Assignment &assignment, - const GaussianMixtureFactor::sharedFactor &gf) { - auto jf = std::dynamic_pointer_cast(gf); - if (!jf) return gf; - // If the log_normalizer is 0, do nothing - if (log_normalizers(assignment) == 0.0) return gf; - - GaussianFactorGraph gfg; - gfg.push_back(jf); - - Vector c(1); - c << std::sqrt(log_normalizers(assignment)); - auto constantFactor = std::make_shared(c); - - gfg.push_back(constantFactor); - return std::dynamic_pointer_cast( - std::make_shared(gfg)); - }; - return factors.apply(update); -} - /* *******************************************************************************/ -GaussianMixtureFactor::GaussianMixtureFactor( - const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors, const AlgebraicDecisionTree &logNormalizers) - : Base(continuousKeys, discreteKeys), - factors_(augment(factors, logNormalizers)) {} +GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, + const DiscreteKeys &discreteKeys, + const Factors &factors) + : Base(continuousKeys, discreteKeys), factors_(factors) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 6680abb54..4570268f1 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,14 +82,10 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. - * @param logNormalizers Tree of log-normalizers corresponding to each - * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors, - const AlgebraicDecisionTree &logNormalizers = - AlgebraicDecisionTree(0.0)); + const Factors &factors); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -98,16 +94,12 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. - * @param logNormalizers Tree of log-normalizers corresponding to each - * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector &factors, - const AlgebraicDecisionTree &logNormalizers = - AlgebraicDecisionTree(0.0)) + const std::vector &factors) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors), logNormalizers) {} + Factors(discreteKeys, factors)) {} /// @} /// @name Testable From 91ccbebf224c52063dd43ffb69ecb7cef889cfbd Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 14:44:54 -0400 Subject: [PATCH 10/10] remove ComputeLogNormalizer function --- gtsam/hybrid/GaussianMixtureFactor.cpp | 16 ---------------- gtsam/hybrid/GaussianMixtureFactor.h | 12 ------------ 2 files changed, 28 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index b4ee89cb0..8471cef33 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -120,20 +120,4 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { return gf->error(values.continuous()); } -/* *******************************************************************************/ -double ComputeLogNormalizer( - const noiseModel::Gaussian::shared_ptr &noise_model) { - // Since noise models are Gaussian, we can get the logDeterminant using - // the same trick as in GaussianConditional - double logDetR = noise_model->R() - .diagonal() - .unaryExpr([](double x) { return log(x); }) - .sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = noise_model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; -} - } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 4570268f1..b1be6ef1d 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -171,16 +171,4 @@ template <> struct traits : public Testable { }; -/** - * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values - * for a Gaussian noise model. - * We compute this in the log-space for numerical accuracy. - * - * @param noise_model The Gaussian noise model - * whose normalizer we wish to compute. - * @return double - */ -GTSAM_EXPORT double ComputeLogNormalizer( - const noiseModel::Gaussian::shared_ptr &noise_model); - } // namespace gtsam