From 1ab82f382cf390704e7321336c174f4cf12dd945 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 20 Sep 2024 03:26:07 -0400 Subject: [PATCH] hide sqrt(2*value) so the user doesn't have to premultiply by 2 --- gtsam/hybrid/HybridGaussianConditional.cpp | 3 +-- gtsam/hybrid/HybridGaussianFactor.cpp | 19 +++++++++++-------- .../hybrid/tests/testHybridGaussianFactor.cpp | 7 ++++--- .../tests/testHybridNonlinearFactorGraph.cpp | 7 ++++--- 4 files changed, 20 insertions(+), 16 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 79673d9f2..df59637aa 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -222,8 +222,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( } else { // Add a constant to the likelihood in case the noise models // are not all equal. - double c = 2.0 * Cgm_Kgcm; - return {likelihood_m, c}; + return {likelihood_m, Cgm_Kgcm}; } }); return std::make_shared( diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 13b26db6c..2fbd4bd88 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -46,31 +46,34 @@ HybridGaussianFactor::Factors augment( AlgebraicDecisionTree valueTree; std::tie(gaussianFactors, valueTree) = unzip(factors); - // Normalize + // Compute minimum value for normalization. double min_value = valueTree.min(); - AlgebraicDecisionTree values = - valueTree.apply([&min_value](double n) { return n - min_value; }); // Finally, update the [A|b] matrices. - auto update = [&values](const Assignment &assignment, - const HybridGaussianFactor::sharedFactor &gf) { + auto update = [&min_value](const GaussianFactorValuePair &gfv) { + auto [gf, value] = gfv; + auto jf = std::dynamic_pointer_cast(gf); if (!jf) return gf; + + double normalized_value = value - min_value; + // If the value is 0, do nothing - if (values(assignment) == 0.0) return gf; + if (normalized_value == 0.0) return gf; GaussianFactorGraph gfg; gfg.push_back(jf); Vector c(1); - c << std::sqrt(values(assignment)); + // When hiding c inside the `b` vector, value == 0.5*c^2 + c << std::sqrt(2.0 * normalized_value); auto constantFactor = std::make_shared(c); gfg.push_back(constantFactor); return std::dynamic_pointer_cast( std::make_shared(gfg)); }; - return gaussianFactors.apply(update); + return HybridGaussianFactor::Factors(factors, update); } /* *******************************************************************************/ diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 1ecd77132..bfa283983 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -770,10 +770,11 @@ static HybridGaussianFactorGraph CreateFactorGraph( ->linearize(values); // Create HybridGaussianFactor - // We multiply by -2 since the we want the underlying scalar to be log(|2πΣ|) + // We take negative since we want + // the underlying scalar to be log(\sqrt(|2πΣ|)) std::vector factors{ - {f0, -2 * model0->logNormalizationConstant()}, - {f1, -2 * model1->logNormalizationConstant()}}; + {f0, -model0->logNormalizationConstant()}, + {f1, -model1->logNormalizationConstant()}}; HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactorGraph hfg; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 8d2a5a74b..621c8708e 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -868,10 +868,11 @@ static HybridNonlinearFactorGraph CreateFactorGraph( std::make_shared>(X(0), X(1), means[1], model1); // Create HybridNonlinearFactor - // We multiply by -2 since the we want the underlying scalar to be log(|2πΣ|) + // We take negative since we want + // the underlying scalar to be log(\sqrt(|2πΣ|)) std::vector factors{ - {f0, -2 * model0->logNormalizationConstant()}, - {f1, -2 * model1->logNormalizationConstant()}}; + {f0, -model0->logNormalizationConstant()}, + {f1, -model1->logNormalizationConstant()}}; HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors);