From 7fab3f8cc3859f5a693b5f017e9cc9aaa7eda735 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:21:46 -0400 Subject: [PATCH 01/21] improved MixtureFactor tests --- gtsam/hybrid/tests/testMixtureFactor.cpp | 153 +++++++++++++++++++++++ 1 file changed, 153 insertions(+) diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 0b2564403..48b193eeb 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -18,6 +18,9 @@ #include #include +#include +#include +#include #include #include #include @@ -115,6 +118,156 @@ TEST(MixtureFactor, Dim) { EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); } +/* ************************************************************************* */ +// Test components with differing means +TEST(MixtureFactor, DifferentMeans) { + DiscreteKey m1(M(1), 2), m2(M(2), 2); + + Values values; + double x1 = 0.0, x2 = 1.75, x3 = 2.60; + values.insert(X(1), x1); + values.insert(X(2), x2); + values.insert(X(3), x3); + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); + + auto f0 = std::make_shared>(X(1), X(2), 0.0, model0); + auto f1 = std::make_shared>(X(1), X(2), 2.0, model1); + std::vector factors{f0, f1}; + + MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridNonlinearFactorGraph hnfg; + hnfg.push_back(mixtureFactor); + + f0 = std::make_shared>(X(2), X(3), 0.0, model0); + f1 = std::make_shared>(X(2), X(3), 2.0, model1); + std::vector factors23{f0, f1}; + hnfg.push_back(MixtureFactor({X(2), X(3)}, {m2}, factors23)); + + auto prior = PriorFactor(X(1), x1, prior_noise); + hnfg.push_back(prior); + + hnfg.emplace_shared>(X(2), 2.0, prior_noise); + + auto hgfg = hnfg.linearize(values); + auto bn = hgfg->eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{ + {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, + DiscreteValues{{M(1), 1}, {M(2), 0}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 0}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } +} + +/* ************************************************************************* */ +// Test components with differing covariances +TEST(MixtureFactor, DifferentCovariances) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(1), x1); + values.insert(X(2), x2); + + double between = 0.0; + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(1), X(2), between, model0); + auto f1 = + std::make_shared>(X(1), X(2), between, model1); + std::vector factors{f0, f1}; + + // Create via toFactorGraph + using symbol_shorthand::Z; + Matrix H0_1, H0_2, H1_1, H1_2; + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + auto gm = new gtsam::GaussianMixture( + {Z(1)}, {X(1), X(2)}, {m1}, + {std::make_shared(terms0, 1, -d0, model0), + std::make_shared(terms1, 1, -d1, model1)}); + gtsam::HybridBayesNet bn; + bn.emplace_back(gm); + + gtsam::VectorValues measurements; + measurements.insert(Z(1), gtsam::Z_1x1); + // Create FG with single GaussianMixtureFactor + HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + auto hbn = mixture_fg.eliminateSequential(); + + VectorValues cv; + cv.insert(X(1), Vector1(0.0)); + cv.insert(X(2), Vector1(0.0)); + // P(m1) = [0.5, 0.5], so we should pick 0 + DiscreteValues dv; + dv.insert({M(1), 0}); + HybridValues expected_values(cv, dv); + + HybridValues actual_values = hbn->optimize(); + EXPECT(assert_equal(expected_values, actual_values)); + + // Check that we get different error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + + HybridValues hv0(cv, DiscreteValues{{M(1), 0}}); + HybridValues hv1(cv, DiscreteValues{{M(1), 1}}); + + AlgebraicDecisionTree expectedErrorTree(m1, 9.90348755254, + 0.69314718056); + EXPECT(assert_equal(expectedErrorTree, errorTree)); +} + /* ************************************************************************* */ int main() { TestResult tr; From ccad6f6c48febbabfd989a619313725fc37f2a49 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:22:07 -0400 Subject: [PATCH 02/21] print main logNormalizationConstant in GaussianMixture --- gtsam/hybrid/GaussianMixture.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index c105a329e..531a9f2e5 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -145,6 +145,8 @@ void GaussianMixture::print(const std::string &s, std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; } std::cout << "\n"; + std::cout << " logNormalizationConstant: " << logConstant_ << "\n" + << std::endl; conditionals_.print( "", [&](Key k) { return formatter(k); }, [&](const GaussianConditional::shared_ptr &gf) -> std::string { From 2007ef53de47f75a317509acf7fbd7828371898e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:22:34 -0400 Subject: [PATCH 03/21] use DiscreteFactorGraph directly --- gtsam/hybrid/HybridBayesNet.cpp | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index b02967555..1d01baed2 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -220,15 +220,16 @@ GaussianBayesNet HybridBayesNet::choose( /* ************************************************************************* */ HybridValues HybridBayesNet::optimize() const { // Collect all the discrete factors to compute MPE - DiscreteBayesNet discrete_bn; + DiscreteFactorGraph discrete_fg; + for (auto &&conditional : *this) { if (conditional->isDiscrete()) { - discrete_bn.push_back(conditional->asDiscrete()); + discrete_fg.push_back(conditional->asDiscrete()); } } // Solve for the MPE - DiscreteValues mpe = DiscreteFactorGraph(discrete_bn).optimize(); + DiscreteValues mpe = discrete_fg.optimize(); // Given the MPE, compute the optimal continuous values. return HybridValues(optimize(mpe), mpe); From 598edfacce2d80229b178a2b0f1dd271a79512c3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:28:28 -0400 Subject: [PATCH 04/21] improve GaussianMixture by checking for invalid conditionals and adding 2 new methods --- gtsam/hybrid/GaussianMixture.cpp | 67 ++++++++++++++++++++++++++++++-- gtsam/hybrid/GaussianMixture.h | 15 +++++++ gtsam/hybrid/HybridFactor.h | 3 ++ 3 files changed, 81 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 531a9f2e5..36a34226b 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -24,6 +24,7 @@ #include #include #include +#include #include namespace gtsam { @@ -92,6 +93,35 @@ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { return {conditionals_, wrap}; } +/* +*******************************************************************************/ +GaussianBayesNetTree GaussianMixture::add( + const GaussianBayesNetTree &sum) const { + using Y = GaussianBayesNet; + auto add = [](const Y &graph1, const Y &graph2) { + auto result = graph1; + if (graph2.size() == 0) { + return GaussianBayesNet(); + } + result.push_back(graph2); + return result; + }; + const auto tree = asGaussianBayesNetTree(); + return sum.empty() ? tree : sum.apply(tree, add); +} + +/* *******************************************************************************/ +GaussianBayesNetTree GaussianMixture::asGaussianBayesNetTree() const { + auto wrap = [](const GaussianConditional::shared_ptr &gc) { + if (gc) { + return GaussianBayesNet{gc}; + } else { + return GaussianBayesNet(); + } + }; + return {conditionals_, wrap}; +} + /* *******************************************************************************/ size_t GaussianMixture::nrComponents() const { size_t total = 0; @@ -318,8 +348,15 @@ AlgebraicDecisionTree GaussianMixture::logProbability( AlgebraicDecisionTree GaussianMixture::errorTree( const VectorValues &continuousValues) const { auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) { - return conditional->error(continuousValues) + // - logConstant_ - conditional->logNormalizationConstant(); + // Check if valid pointer + if (conditional) { + return conditional->error(continuousValues) + // + logConstant_ - conditional->logNormalizationConstant(); + } else { + // If not valid, pointer, it means this conditional was pruned, + // so we return maximum error. + return std::numeric_limits::max(); + } }; DecisionTree error_tree(conditionals_, errorFunc); return error_tree; @@ -327,10 +364,32 @@ AlgebraicDecisionTree GaussianMixture::errorTree( /* *******************************************************************************/ double GaussianMixture::error(const HybridValues &values) const { + // Check if discrete keys in discrete assignment are + // present in the GaussianMixture + KeyVector dKeys = this->discreteKeys_.indices(); + bool valid_assignment = false; + for (auto &&kv : values.discrete()) { + if (std::find(dKeys.begin(), dKeys.end(), kv.first) != dKeys.end()) { + valid_assignment = true; + break; + } + } + + // The discrete assignment is not valid so we return 0.0 erorr. + if (!valid_assignment) { + return 0.0; + } + // Directly index to get the conditional, no need to build the whole tree. auto conditional = conditionals_(values.discrete()); - return conditional->error(values.continuous()) + // - logConstant_ - conditional->logNormalizationConstant(); + if (conditional) { + return conditional->error(values.continuous()) + // + logConstant_ - conditional->logNormalizationConstant(); + } else { + // If not valid, pointer, it means this conditional was pruned, + // so we return maximum error. + return std::numeric_limits::max(); + } } /* *******************************************************************************/ diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index c1ef504f8..bfa342dcf 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -72,6 +72,12 @@ class GTSAM_EXPORT GaussianMixture */ GaussianFactorGraphTree asGaussianFactorGraphTree() const; + /** + * @brief Convert a DecisionTree of conditionals into + * a DecisionTree of Gaussian Bayes nets. + */ + GaussianBayesNetTree asGaussianBayesNetTree() const; + /** * @brief Helper function to get the pruner functor. * @@ -250,6 +256,15 @@ class GTSAM_EXPORT GaussianMixture * @return GaussianFactorGraphTree */ GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; + + /** + * @brief Merge the Gaussian Bayes Nets in `this` and `sum` while + * maintaining the decision tree structure. + * + * @param sum Decision Tree of Gaussian Bayes Nets + * @return GaussianBayesNetTree + */ + GaussianBayesNetTree add(const GaussianBayesNetTree &sum) const; /// @} private: diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index afd1c8032..418489d66 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -13,6 +13,7 @@ * @file HybridFactor.h * @date Mar 11, 2022 * @author Fan Jiang + * @author Varun Agrawal */ #pragma once @@ -33,6 +34,8 @@ class HybridValues; /// Alias for DecisionTree of GaussianFactorGraphs using GaussianFactorGraphTree = DecisionTree; +/// Alias for DecisionTree of GaussianBayesNets +using GaussianBayesNetTree = DecisionTree; KeyVector CollectKeys(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys); From eef9765e4a16ca90f1bf3c953500e99c2ec4b2e2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:29:02 -0400 Subject: [PATCH 05/21] normalize the discrete factor at the continuous-discrete boundary --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 28 +++++++++++++++++----- 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index b764dc9e0..bf11a50fc 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -279,21 +279,37 @@ GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) { using Result = std::pair, GaussianMixtureFactor::sharedFactor>; -// Integrate the probability mass in the last continuous conditional using -// the unnormalized probability q(μ;m) = exp(-error(μ;m)) at the mean. -// discrete_probability = exp(-error(μ;m)) * sqrt(det(2π Σ_m)) +/** + * Compute the probability q(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m) + * from the residual error at the mean μ. + * The residual error contains no keys, and only + * depends on the discrete separator if present. + */ static std::shared_ptr createDiscreteFactor( const DecisionTree &eliminationResults, const DiscreteKeys &discreteSeparator) { - auto probability = [&](const Result &pair) -> double { + auto logProbability = [&](const Result &pair) -> double { const auto &[conditional, factor] = pair; static const VectorValues kEmpty; // If the factor is not null, it has no keys, just contains the residual. if (!factor) return 1.0; // TODO(dellaert): not loving this. - return exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); + + // Logspace version of: + // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); + // We take negative of the logNormalizationConstant `log(1/k)` + // to get `log(k)`. + return -factor->error(kEmpty) + (-conditional->logNormalizationConstant()); }; - DecisionTree probabilities(eliminationResults, probability); + AlgebraicDecisionTree logProbabilities( + DecisionTree(eliminationResults, logProbability)); + + // Perform normalization + double max_log = logProbabilities.max(); + AlgebraicDecisionTree probabilities = DecisionTree( + logProbabilities, + [&max_log](const double x) { return exp(x - max_log); }); + probabilities = probabilities.normalize(probabilities.sum()); return std::make_shared(discreteSeparator, probabilities); } From ea104c4b8399f27aa48fc95f0888d304c8ff39d2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:29:40 -0400 Subject: [PATCH 06/21] support for varying normalizers in GaussianMixtureFactor --- gtsam/hybrid/GaussianMixtureFactor.cpp | 83 +++++++++++++++++++++++++- gtsam/hybrid/GaussianMixtureFactor.h | 18 ++++-- 2 files changed, 92 insertions(+), 9 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index a3db16d04..e519cefe6 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -28,11 +28,86 @@ namespace gtsam { +/** + * @brief Helper function to correct the [A|b] matrices in the factor components + * with the normalizer values. + * This is done by storing the normalizer value in + * the `b` vector as an additional row. + * + * @param factors DecisionTree of GaussianFactor shared pointers. + * @param varyingNormalizers Flag indicating the normalizers are different for + * each component. + * @return GaussianMixtureFactor::Factors + */ +GaussianMixtureFactor::Factors correct( + const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { + if (!varyingNormalizers) { + return factors; + } + + // First compute all the sqrt(|2 pi Sigma|) terms + auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) { + auto jf = std::dynamic_pointer_cast(gf); + // If we have, say, a Hessian factor, then no need to do anything + if (!jf) return 0.0; + + auto model = jf->get_model(); + // If there is no noise model, there is nothing to do. + if (!model) { + return 0.0; + } + // Since noise models are Gaussian, we can get the logDeterminant using the + // same trick as in GaussianConditional + double logDetR = + model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; + }; + + AlgebraicDecisionTree log_normalizers = + DecisionTree(factors, computeNormalizers); + + // Find the minimum value so we can "proselytize" to positive values. + // Done because we can't have sqrt of negative numbers. + double min_log_normalizer = log_normalizers.min(); + log_normalizers = log_normalizers.apply( + [&min_log_normalizer](double n) { return n - min_log_normalizer; }); + + // Finally, update the [A|b] matrices. + auto update = [&log_normalizers]( + const Assignment &assignment, + const GaussianMixtureFactor::sharedFactor &gf) { + auto jf = std::dynamic_pointer_cast(gf); + if (!jf) return gf; + // If there is no noise model, there is nothing to do. + if (!jf->get_model()) return gf; + // If the log_normalizer is 0, do nothing + if (log_normalizers(assignment) == 0.0) return gf; + + GaussianFactorGraph gfg; + gfg.push_back(jf); + + Vector c(1); + c << std::sqrt(log_normalizers(assignment)); + auto constantFactor = std::make_shared(c); + + gfg.push_back(constantFactor); + return std::dynamic_pointer_cast( + std::make_shared(gfg)); + }; + return factors.apply(update); +} + /* *******************************************************************************/ GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors) - : Base(continuousKeys, discreteKeys), factors_(factors) {} + const Factors &factors, + bool varyingNormalizers) + : Base(continuousKeys, discreteKeys), + factors_(correct(factors, varyingNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { @@ -54,7 +129,9 @@ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { /* *******************************************************************************/ void GaussianMixtureFactor::print(const std::string &s, const KeyFormatter &formatter) const { - HybridFactor::print(s, formatter); + std::cout << (s.empty() ? "" : s + "\n"); + std::cout << "GaussianMixtureFactor" << std::endl; + HybridFactor::print("", formatter); std::cout << "{\n"; if (factors_.empty()) { std::cout << " empty" << std::endl; diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 67d12ddb0..588501bbe 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,10 +82,13 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. + * @param varyingNormalizers Flag indicating factor components have varying + * normalizer values. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors); + const Factors &factors, + bool varyingNormalizers = false); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -94,12 +97,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. + * @param varyingNormalizers Flag indicating factor components have varying + * normalizer values. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector &factors) + const std::vector &factors, + bool varyingNormalizers = false) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors)) {} + Factors(discreteKeys, factors), + varyingNormalizers) {} /// @} /// @name Testable @@ -107,9 +114,8 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { bool equals(const HybridFactor &lf, double tol = 1e-9) const override; - void print( - const std::string &s = "GaussianMixtureFactor\n", - const KeyFormatter &formatter = DefaultKeyFormatter) const override; + void print(const std::string &s = "", const KeyFormatter &formatter = + DefaultKeyFormatter) const override; /// @} /// @name Standard API From 5e3093e3e2a693eda24f6a02d5a4ee6d54ef1a1c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:30:32 -0400 Subject: [PATCH 07/21] tests for verification --- .../tests/testGaussianMixtureFactor.cpp | 247 +++++++++++++++++- .../tests/testHybridNonlinearFactorGraph.cpp | 18 ++ 2 files changed, 260 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 9cc7e6bfd..47b9ddc99 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -22,9 +22,13 @@ #include #include #include +#include +#include #include #include #include +#include +#include // Include for test suite #include @@ -56,7 +60,6 @@ TEST(GaussianMixtureFactor, Sum) { auto b = Matrix::Zero(2, 1); Vector2 sigmas; sigmas << 1, 2; - auto model = noiseModel::Diagonal::Sigmas(sigmas, true); auto f10 = std::make_shared(X(1), A1, X(2), A2, b); auto f11 = std::make_shared(X(1), A1, X(2), A2, b); @@ -106,7 +109,8 @@ TEST(GaussianMixtureFactor, Printing) { GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); std::string expected = - R"(Hybrid [x1 x2; 1]{ + R"(GaussianMixtureFactor +Hybrid [x1 x2; 1]{ Choice(1) 0 Leaf : A[x1] = [ @@ -178,7 +182,8 @@ TEST(GaussianMixtureFactor, Error) { continuousValues.insert(X(2), Vector2(1, 1)); // error should return a tree of errors, with nodes for each discrete value. - AlgebraicDecisionTree error_tree = mixtureFactor.errorTree(continuousValues); + AlgebraicDecisionTree error_tree = + mixtureFactor.errorTree(continuousValues); std::vector discrete_keys = {m1}; // Error values for regression test @@ -191,8 +196,240 @@ TEST(GaussianMixtureFactor, Error) { DiscreteValues discreteValues; discreteValues[m1.first] = 1; EXPECT_DOUBLES_EQUAL( - 4.0, mixtureFactor.error({continuousValues, discreteValues}), - 1e-9); + 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); +} + +/* ************************************************************************* */ +// Test components with differing means +TEST(GaussianMixtureFactor, DifferentMeans) { + DiscreteKey m1(M(1), 2), m2(M(2), 2); + + Values values; + double x1 = 0.0, x2 = 1.75, x3 = 2.60; + values.insert(X(1), x1); + values.insert(X(2), x2); + values.insert(X(3), x3); + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); + + auto f0 = std::make_shared>(X(1), X(2), 0.0, model0) + ->linearize(values); + auto f1 = std::make_shared>(X(1), X(2), 2.0, model1) + ->linearize(values); + std::vector factors{f0, f1}; + + GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors, true); + HybridGaussianFactorGraph hfg; + hfg.push_back(mixtureFactor); + + f0 = std::make_shared>(X(2), X(3), 0.0, model0) + ->linearize(values); + f1 = std::make_shared>(X(2), X(3), 2.0, model1) + ->linearize(values); + std::vector factors23{f0, f1}; + hfg.push_back(GaussianMixtureFactor({X(2), X(3)}, {m2}, factors23, true)); + + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + hfg.push_back(prior); + + hfg.push_back(PriorFactor(X(2), 2.0, prior_noise).linearize(values)); + + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{ + {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, + DiscreteValues{{M(1), 1}, {M(2), 0}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 0}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } +} + +/* ************************************************************************* */ +/** + * @brief Test components with differing covariances. + * The factor graph is + * *-X1-*-X2 + * | + * M1 + */ +TEST(GaussianMixtureFactor, DifferentCovariances) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(1), x1); + values.insert(X(2), x2); + + double between = 0.0; + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(1), X(2), between, model0); + auto f1 = + std::make_shared>(X(1), X(2), between, model1); + std::vector factors{f0, f1}; + + // Create via toFactorGraph + using symbol_shorthand::Z; + Matrix H0_1, H0_2, H1_1, H1_2; + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + gtsam::GaussianMixtureFactor gmf( + {X(1), X(2)}, {m1}, + {std::make_shared(X(1), H0_1, X(2), H0_2, -d0, model0), + std::make_shared(X(1), H1_1, X(2), H1_2, -d1, model1)}, + true); + + // Create FG with single GaussianMixtureFactor + HybridGaussianFactorGraph mixture_fg; + mixture_fg.add(gmf); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + auto hbn = mixture_fg.eliminateSequential(); + // hbn->print(); + + VectorValues cv; + cv.insert(X(1), Vector1(0.0)); + cv.insert(X(2), Vector1(0.0)); + + // Check that the error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + + DiscreteValues dv0{{M(1), 0}}; + DiscreteValues dv1{{M(1), 1}}; + + // regression + EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); + + DiscreteConditional expected_m1(m1, "0.5/0.5"); + DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); + + EXPECT(assert_equal(expected_m1, actual_m1)); +} + +/* ************************************************************************* */ +/** + * @brief Test components with differing covariances + * but with a Bayes net P(Z|X, M) converted to a FG. + */ +TEST(GaussianMixtureFactor, DifferentCovariances2) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(1), x1); + values.insert(X(2), x2); + + double between = 0.0; + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(1), X(2), between, model0); + auto f1 = + std::make_shared>(X(1), X(2), between, model1); + std::vector factors{f0, f1}; + + // Create via toFactorGraph + using symbol_shorthand::Z; + Matrix H0_1, H0_2, H1_1, H1_2; + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + auto gm = new gtsam::GaussianMixture( + {Z(1)}, {X(1), X(2)}, {m1}, + {std::make_shared(terms0, 1, -d0, model0), + std::make_shared(terms1, 1, -d1, model1)}); + gtsam::HybridBayesNet bn; + bn.emplace_back(gm); + + gtsam::VectorValues measurements; + measurements.insert(Z(1), gtsam::Z_1x1); + // Create FG with single GaussianMixtureFactor + HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + auto hbn = mixture_fg.eliminateSequential(); + + VectorValues cv; + cv.insert(X(1), Vector1(0.0)); + cv.insert(X(2), Vector1(0.0)); + + // Check that the error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + + DiscreteValues dv0{{M(1), 0}}; + DiscreteValues dv1{{M(1), 1}}; + + // regression + EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); + + DiscreteConditional expected_m1(m1, "0.5/0.5"); + DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); + + EXPECT(assert_equal(expected_m1, actual_m1)); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 93081d309..2d851b0ff 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -510,6 +510,7 @@ factor 0: b = [ -10 ] No noise model factor 1: +GaussianMixtureFactor Hybrid [x0 x1; m0]{ Choice(m0) 0 Leaf : @@ -534,6 +535,7 @@ Hybrid [x0 x1; m0]{ } factor 2: +GaussianMixtureFactor Hybrid [x1 x2; m1]{ Choice(m1) 0 Leaf : @@ -675,33 +677,41 @@ factor 6: P( m1 | m0 ): size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), + logNormalizationConstant: 1.38862 + Choice(m0) 0 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] + logNormalizationConstant: 1.38862 No noise model 1 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] + logNormalizationConstant: 1.38862 No noise model conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), + logNormalizationConstant: 1.3935 + Choice(m1) 0 Choice(m0) 0 0 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] + logNormalizationConstant: 1.3935 No noise model 0 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] + logNormalizationConstant: 1.3935 No noise model 1 Choice(m0) @@ -709,16 +719,20 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] + logNormalizationConstant: 1.3935 No noise model 1 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] + logNormalizationConstant: 1.3935 No noise model conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), + logNormalizationConstant: 1.38857 + Choice(m1) 0 Choice(m0) 0 0 Leaf p(x2) @@ -726,6 +740,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1489 ] mean: 1 elements x2: -1.0099 + logNormalizationConstant: 1.38857 No noise model 0 1 Leaf p(x2) @@ -733,6 +748,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1479 ] mean: 1 elements x2: -1.0098 + logNormalizationConstant: 1.38857 No noise model 1 Choice(m0) @@ -741,6 +757,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0504 ] mean: 1 elements x2: -1.0001 + logNormalizationConstant: 1.38857 No noise model 1 1 Leaf p(x2) @@ -748,6 +765,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0494 ] mean: 1 elements x2: -1 + logNormalizationConstant: 1.38857 No noise model )"; From 9818f89ceca8000e24b7eb0e81bb5d05685718ca Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 17:48:04 -0400 Subject: [PATCH 08/21] remove GaussianBayesNetTree methods --- gtsam/hybrid/GaussianMixture.cpp | 29 ----------------------------- gtsam/hybrid/GaussianMixture.h | 17 +---------------- gtsam/hybrid/HybridFactor.h | 2 -- 3 files changed, 1 insertion(+), 47 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 36a34226b..1411ede8d 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -93,35 +93,6 @@ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { return {conditionals_, wrap}; } -/* -*******************************************************************************/ -GaussianBayesNetTree GaussianMixture::add( - const GaussianBayesNetTree &sum) const { - using Y = GaussianBayesNet; - auto add = [](const Y &graph1, const Y &graph2) { - auto result = graph1; - if (graph2.size() == 0) { - return GaussianBayesNet(); - } - result.push_back(graph2); - return result; - }; - const auto tree = asGaussianBayesNetTree(); - return sum.empty() ? tree : sum.apply(tree, add); -} - -/* *******************************************************************************/ -GaussianBayesNetTree GaussianMixture::asGaussianBayesNetTree() const { - auto wrap = [](const GaussianConditional::shared_ptr &gc) { - if (gc) { - return GaussianBayesNet{gc}; - } else { - return GaussianBayesNet(); - } - }; - return {conditionals_, wrap}; -} - /* *******************************************************************************/ size_t GaussianMixture::nrComponents() const { size_t total = 0; diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index bfa342dcf..41692bb1c 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -67,17 +67,11 @@ class GTSAM_EXPORT GaussianMixture double logConstant_; ///< log of the normalization constant. /** - * @brief Convert a DecisionTree of factors into + * @brief Convert a GaussianMixture of conditionals into * a DecisionTree of Gaussian factor graphs. */ GaussianFactorGraphTree asGaussianFactorGraphTree() const; - /** - * @brief Convert a DecisionTree of conditionals into - * a DecisionTree of Gaussian Bayes nets. - */ - GaussianBayesNetTree asGaussianBayesNetTree() const; - /** * @brief Helper function to get the pruner functor. * @@ -256,15 +250,6 @@ class GTSAM_EXPORT GaussianMixture * @return GaussianFactorGraphTree */ GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; - - /** - * @brief Merge the Gaussian Bayes Nets in `this` and `sum` while - * maintaining the decision tree structure. - * - * @param sum Decision Tree of Gaussian Bayes Nets - * @return GaussianBayesNetTree - */ - GaussianBayesNetTree add(const GaussianBayesNetTree &sum) const; /// @} private: diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 418489d66..a9c0e53d2 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -34,8 +34,6 @@ class HybridValues; /// Alias for DecisionTree of GaussianFactorGraphs using GaussianFactorGraphTree = DecisionTree; -/// Alias for DecisionTree of GaussianBayesNets -using GaussianBayesNetTree = DecisionTree; KeyVector CollectKeys(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys); From 8abd2756ea895dbd0da4ab5246730533f27075ff Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 04:19:44 -0400 Subject: [PATCH 09/21] throw error if invalid assignment --- gtsam/hybrid/GaussianMixture.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 1411ede8d..0a0332af8 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -346,9 +346,10 @@ double GaussianMixture::error(const HybridValues &values) const { } } - // The discrete assignment is not valid so we return 0.0 erorr. + // The discrete assignment is not valid so we throw an error. if (!valid_assignment) { - return 0.0; + throw std::runtime_error( + "Invalid discrete values in values. Not all discrete keys specified."); } // Directly index to get the conditional, no need to build the whole tree. From 450fb0a016942b5ac3bc9f0639a2c951cddd61d9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 06:12:51 -0400 Subject: [PATCH 10/21] improve tests --- .../tests/testGaussianMixtureFactor.cpp | 328 ++++++++---------- 1 file changed, 143 insertions(+), 185 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 47b9ddc99..dfafb923b 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -38,6 +38,7 @@ using namespace gtsam; using noiseModel::Isotropic; using symbol_shorthand::M; using symbol_shorthand::X; +using symbol_shorthand::Z; /* ************************************************************************* */ // Check iterators of empty mixture. @@ -199,90 +200,161 @@ TEST(GaussianMixtureFactor, Error) { 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); } +/** + * @brief Helper function to specify a Hybrid Bayes Net + * {P(X1) P(Z1 | X1, X2, M1)} and convert it to a Hybrid Factor Graph + * {P(X1)L(X1, X2, M1; Z1)} by converting to likelihoods given Z1. + * + * We can specify either different means or different sigmas, + * or both for each hybrid factor component. + * + * @param values Initial values for linearization. + * @param means The mean values for the conditional components. + * @param sigmas Noise model sigma values (standard deviation). + * @param m1 The discrete mode key. + * @param z1 The measurement value. + * @return HybridGaussianFactorGraph + */ +HybridGaussianFactorGraph GetFactorGraphFromBayesNet( + const gtsam::Values &values, const std::vector &means, + const std::vector &sigmas, DiscreteKey &m1, double z1 = 0.0) { + // Noise models + auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); + auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + // GaussianMixtureFactor component factors + auto f0 = + std::make_shared>(X(1), X(2), means[0], model0); + auto f1 = + std::make_shared>(X(1), X(2), means[1], model1); + std::vector factors{f0, f1}; + + /// Get terms for each p^m(z1 | x1, x2) + Matrix H0_1, H0_2, H1_1, H1_2; + double x1 = values.at(X(1)), x2 = values.at(X(2)); + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + // Create conditional P(Z1 | X1, X2, M1) + auto gm = new gtsam::GaussianMixture( + {Z(1)}, {X(1), X(2)}, {m1}, + {std::make_shared(terms0, 1, -d0, model0), + std::make_shared(terms1, 1, -d1, model1)}); + gtsam::HybridBayesNet bn; + bn.emplace_back(gm); + // bn.print(); + + // Create FG via toFactorGraph + gtsam::VectorValues measurements; + measurements.insert(Z(1), gtsam::I_1x1 * z1); // Set Z1 = 0 + HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + return mixture_fg; +} + /* ************************************************************************* */ -// Test components with differing means -TEST(GaussianMixtureFactor, DifferentMeans) { - DiscreteKey m1(M(1), 2), m2(M(2), 2); +/** + * @brief Test components with differing means. + * + * We specify a hybrid Bayes network P(Z | X, M) =p(X1)p(Z1 | X1, X2, M1), + * which is then converted to a factor graph by specifying Z1. + * + * p(Z1 | X1, X2, M1) has 2 factors each for the binary mode m1, with only the + * means being different. + */ +TEST(GaussianMixtureFactor, DifferentMeansHBN) { + DiscreteKey m1(M(1), 2); Values values; - double x1 = 0.0, x2 = 1.75, x3 = 2.60; + double x1 = 0.0, x2 = 1.75; values.insert(X(1), x1); values.insert(X(2), x2); - values.insert(X(3), x3); - auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); + // Different means, same sigma + std::vector means{0.0, 2.0}, sigmas{1e-0, 1e-0}; - auto f0 = std::make_shared>(X(1), X(2), 0.0, model0) - ->linearize(values); - auto f1 = std::make_shared>(X(1), X(2), 2.0, model1) - ->linearize(values); - std::vector factors{f0, f1}; - - GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors, true); - HybridGaussianFactorGraph hfg; - hfg.push_back(mixtureFactor); - - f0 = std::make_shared>(X(2), X(3), 0.0, model0) - ->linearize(values); - f1 = std::make_shared>(X(2), X(3), 2.0, model1) - ->linearize(values); - std::vector factors23{f0, f1}; - hfg.push_back(GaussianMixtureFactor({X(2), X(3)}, {m2}, factors23, true)); - - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - hfg.push_back(prior); - - hfg.push_back(PriorFactor(X(2), 2.0, prior_noise).linearize(values)); - - auto bn = hfg.eliminateSequential(); - HybridValues actual = bn->optimize(); - - HybridValues expected( - VectorValues{ - {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, - DiscreteValues{{M(1), 1}, {M(2), 0}}); - - EXPECT(assert_equal(expected, actual)); + HybridGaussianFactorGraph hfg = + GetFactorGraphFromBayesNet(values, means, sigmas, m1, 0.0); { - DiscreteValues dv{{M(1), 0}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + // With no measurement on X2, each mode should be equally likely + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(-1.75)}}, + DiscreteValues{{M(1), 0}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + } } { - DiscreteValues dv{{M(1), 0}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + // If we add a measurement on X2, we have more information to work with. + // Add a measurement on X2 + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + GaussianConditional meas_z2(Z(2), Vector1(2.0), I_1x1, X(2), I_1x1, + prior_noise); + auto prior_x2 = meas_z2.likelihood(Vector1(x2)); + + hfg.push_back(prior_x2); + + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}}, + DiscreteValues{{M(1), 1}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9); + } } } /* ************************************************************************* */ /** - * @brief Test components with differing covariances. - * The factor graph is - * *-X1-*-X2 - * | - * M1 + * @brief Test components with differing covariances + * but with a Bayes net P(Z|X, M) converted to a FG. */ TEST(GaussianMixtureFactor, DifferentCovariances) { DiscreteKey m1(M(1), 2); @@ -292,123 +364,9 @@ TEST(GaussianMixtureFactor, DifferentCovariances) { values.insert(X(1), x1); values.insert(X(2), x2); - double between = 0.0; - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - auto f0 = - std::make_shared>(X(1), X(2), between, model0); - auto f1 = - std::make_shared>(X(1), X(2), between, model1); - std::vector factors{f0, f1}; - - // Create via toFactorGraph - using symbol_shorthand::Z; - Matrix H0_1, H0_2, H1_1, H1_2; - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - gtsam::GaussianMixtureFactor gmf( - {X(1), X(2)}, {m1}, - {std::make_shared(X(1), H0_1, X(2), H0_2, -d0, model0), - std::make_shared(X(1), H1_1, X(2), H1_2, -d1, model1)}, - true); - - // Create FG with single GaussianMixtureFactor - HybridGaussianFactorGraph mixture_fg; - mixture_fg.add(gmf); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); - - auto hbn = mixture_fg.eliminateSequential(); - // hbn->print(); - - VectorValues cv; - cv.insert(X(1), Vector1(0.0)); - cv.insert(X(2), Vector1(0.0)); - - // Check that the error values at the MLE point μ. - AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - - DiscreteValues dv0{{M(1), 0}}; - DiscreteValues dv1{{M(1), 1}}; - - // regression - EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); - EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); - - DiscreteConditional expected_m1(m1, "0.5/0.5"); - DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); - - EXPECT(assert_equal(expected_m1, actual_m1)); -} - -/* ************************************************************************* */ -/** - * @brief Test components with differing covariances - * but with a Bayes net P(Z|X, M) converted to a FG. - */ -TEST(GaussianMixtureFactor, DifferentCovariances2) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 1.0, x2 = 1.0; - values.insert(X(1), x1); - values.insert(X(2), x2); - - double between = 0.0; - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - auto f0 = - std::make_shared>(X(1), X(2), between, model0); - auto f1 = - std::make_shared>(X(1), X(2), between, model1); - std::vector factors{f0, f1}; - - // Create via toFactorGraph - using symbol_shorthand::Z; - Matrix H0_1, H0_2, H1_1, H1_2; - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(1), X(2)}, {m1}, - {std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}); - gtsam::HybridBayesNet bn; - bn.emplace_back(gm); - - gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::Z_1x1); - // Create FG with single GaussianMixtureFactor - HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); + std::vector means{0.0, 0.0}, sigmas{1e2, 1e-2}; + HybridGaussianFactorGraph mixture_fg = + GetFactorGraphFromBayesNet(values, means, sigmas, m1); auto hbn = mixture_fg.eliminateSequential(); From 191fc3da118aedc9461c6cbb22a213538b9b2a27 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 06:13:43 -0400 Subject: [PATCH 11/21] remove augmentation in GaussianMixture in favor of augmentation in GaussianMixtureFactor --- gtsam/hybrid/GaussianMixture.cpp | 18 ++---------------- gtsam/hybrid/GaussianMixtureFactor.cpp | 6 +++--- 2 files changed, 5 insertions(+), 19 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 0a0332af8..1440f7e9e 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -200,24 +200,10 @@ std::shared_ptr GaussianMixture::likelihood( const GaussianMixtureFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); - const double Cgm_Kgcm = - logConstant_ - conditional->logNormalizationConstant(); - if (Cgm_Kgcm == 0.0) { - return likelihood_m; - } else { - // Add a constant factor to the likelihood in case the noise models - // are not all equal. - GaussianFactorGraph gfg; - gfg.push_back(likelihood_m); - Vector c(1); - c << std::sqrt(2.0 * Cgm_Kgcm); - auto constantFactor = std::make_shared(c); - gfg.push_back(constantFactor); - return std::make_shared(gfg); - } + return likelihood_m; }); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods); + continuousParentKeys, discreteParentKeys, likelihoods, true); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index e519cefe6..7fb16f0d1 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -29,7 +29,7 @@ namespace gtsam { /** - * @brief Helper function to correct the [A|b] matrices in the factor components + * @brief Helper function to augment the [A|b] matrices in the factor components * with the normalizer values. * This is done by storing the normalizer value in * the `b` vector as an additional row. @@ -39,7 +39,7 @@ namespace gtsam { * each component. * @return GaussianMixtureFactor::Factors */ -GaussianMixtureFactor::Factors correct( +GaussianMixtureFactor::Factors augment( const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { if (!varyingNormalizers) { return factors; @@ -107,7 +107,7 @@ GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, const Factors &factors, bool varyingNormalizers) : Base(continuousKeys, discreteKeys), - factors_(correct(factors, varyingNormalizers)) {} + factors_(augment(factors, varyingNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { From 41d6113aea27b8fd0bf16531d477c4f2356f0c79 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:05:31 -0400 Subject: [PATCH 12/21] Kill == operator in VectorValues --- gtsam/linear/VectorValues.h | 5 ----- 1 file changed, 5 deletions(-) diff --git a/gtsam/linear/VectorValues.h b/gtsam/linear/VectorValues.h index 99ee4eb83..7fbd43ffc 100644 --- a/gtsam/linear/VectorValues.h +++ b/gtsam/linear/VectorValues.h @@ -263,11 +263,6 @@ namespace gtsam { /** equals required by Testable for unit testing */ bool equals(const VectorValues& x, double tol = 1e-9) const; - /// Check exact equality. - friend bool operator==(const VectorValues& lhs, const VectorValues& rhs) { - return lhs.equals(rhs); - } - /// @{ /// @name Advanced Interface /// @{ From 79c7c6a8b6f4bb2945ea6ac621fc3c3a1f66aaa6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:10:21 -0400 Subject: [PATCH 13/21] provide logNormalizers directly to the augment method --- gtsam/hybrid/GaussianMixtureFactor.cpp | 66 ++++++++++---------------- gtsam/hybrid/GaussianMixtureFactor.h | 20 ++++---- 2 files changed, 37 insertions(+), 49 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 7fb16f0d1..0427eef7b 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -35,45 +35,17 @@ namespace gtsam { * the `b` vector as an additional row. * * @param factors DecisionTree of GaussianFactor shared pointers. - * @param varyingNormalizers Flag indicating the normalizers are different for - * each component. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. * @return GaussianMixtureFactor::Factors */ GaussianMixtureFactor::Factors augment( - const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { - if (!varyingNormalizers) { - return factors; - } - - // First compute all the sqrt(|2 pi Sigma|) terms - auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) { - auto jf = std::dynamic_pointer_cast(gf); - // If we have, say, a Hessian factor, then no need to do anything - if (!jf) return 0.0; - - auto model = jf->get_model(); - // If there is no noise model, there is nothing to do. - if (!model) { - return 0.0; - } - // Since noise models are Gaussian, we can get the logDeterminant using the - // same trick as in GaussianConditional - double logDetR = - model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; - }; - - AlgebraicDecisionTree log_normalizers = - DecisionTree(factors, computeNormalizers); - + const GaussianMixtureFactor::Factors &factors, + const AlgebraicDecisionTree &logNormalizers) { // Find the minimum value so we can "proselytize" to positive values. // Done because we can't have sqrt of negative numbers. - double min_log_normalizer = log_normalizers.min(); - log_normalizers = log_normalizers.apply( + double min_log_normalizer = logNormalizers.min(); + AlgebraicDecisionTree log_normalizers = logNormalizers.apply( [&min_log_normalizer](double n) { return n - min_log_normalizer; }); // Finally, update the [A|b] matrices. @@ -82,8 +54,6 @@ GaussianMixtureFactor::Factors augment( const GaussianMixtureFactor::sharedFactor &gf) { auto jf = std::dynamic_pointer_cast(gf); if (!jf) return gf; - // If there is no noise model, there is nothing to do. - if (!jf->get_model()) return gf; // If the log_normalizer is 0, do nothing if (log_normalizers(assignment) == 0.0) return gf; @@ -102,12 +72,11 @@ GaussianMixtureFactor::Factors augment( } /* *******************************************************************************/ -GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const Factors &factors, - bool varyingNormalizers) +GaussianMixtureFactor::GaussianMixtureFactor( + const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, + const Factors &factors, const AlgebraicDecisionTree &logNormalizers) : Base(continuousKeys, discreteKeys), - factors_(augment(factors, varyingNormalizers)) {} + factors_(augment(factors, logNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { @@ -194,6 +163,21 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { const sharedFactor gf = factors_(values.discrete()); return gf->error(values.continuous()); } + /* *******************************************************************************/ +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model) { + // Since noise models are Gaussian, we can get the logDeterminant using + // the same trick as in GaussianConditional + double logDetR = noise_model->R() + .diagonal() + .unaryExpr([](double x) { return log(x); }) + .sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = noise_model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; +} } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 588501bbe..6e9f6034e 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,13 +82,14 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. - * @param varyingNormalizers Flag indicating factor components have varying - * normalizer values. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const Factors &factors, - bool varyingNormalizers = false); + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -97,16 +98,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. - * @param varyingNormalizers Flag indicating factor components have varying - * normalizer values. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const std::vector &factors, - bool varyingNormalizers = false) + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors), - varyingNormalizers) {} + Factors(discreteKeys, factors), logNormalizers) {} /// @} /// @name Testable @@ -178,4 +179,7 @@ template <> struct traits : public Testable { }; +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model); + } // namespace gtsam From 9a6d2cf32384d437960def61cd3d547c5f73f41e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:10:40 -0400 Subject: [PATCH 14/21] update GaussianMixture::likelihood to compute the logNormalizers --- gtsam/hybrid/GaussianMixture.cpp | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 1440f7e9e..6c92f0252 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -202,8 +202,25 @@ std::shared_ptr GaussianMixture::likelihood( const auto likelihood_m = conditional->likelihood(given); return likelihood_m; }); + + // First compute all the sqrt(|2 pi Sigma|) terms + auto computeLogNormalizers = [](const GaussianFactor::shared_ptr &gf) { + auto jf = std::dynamic_pointer_cast(gf); + // If we have, say, a Hessian factor, then no need to do anything + if (!jf) return 0.0; + + auto model = jf->get_model(); + // If there is no noise model, there is nothing to do. + if (!model) { + return 0.0; + } + return ComputeLogNormalizer(model); + }; + + AlgebraicDecisionTree log_normalizers = + DecisionTree(likelihoods, computeLogNormalizers); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods, true); + continuousParentKeys, discreteParentKeys, likelihoods, log_normalizers); } /* ************************************************************************* */ From 98a2668fb1e78684ec9fa290e032235162cf2fe9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 11:57:57 -0400 Subject: [PATCH 15/21] add docstring and GTSAM_EXPORT for ComputeLogNormalizer --- gtsam/hybrid/GaussianMixtureFactor.h | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 6e9f6034e..6680abb54 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -179,7 +179,16 @@ template <> struct traits : public Testable { }; -double ComputeLogNormalizer( +/** + * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values + * for a Gaussian noise model. + * We compute this in the log-space for numerical accuracy. + * + * @param noise_model The Gaussian noise model + * whose normalizer we wish to compute. + * @return double + */ +GTSAM_EXPORT double ComputeLogNormalizer( const noiseModel::Gaussian::shared_ptr &noise_model); } // namespace gtsam From cd9ee0845765767e98c8f334e3a2cf4a4bed6a7e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 14:43:23 -0400 Subject: [PATCH 16/21] remove augment method in favor of conversion --- gtsam/hybrid/GaussianMixture.cpp | 35 ++++++++--------- gtsam/hybrid/GaussianMixtureFactor.cpp | 52 ++------------------------ gtsam/hybrid/GaussianMixtureFactor.h | 14 ++----- 3 files changed, 23 insertions(+), 78 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 6c92f0252..0a0332af8 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -200,27 +200,24 @@ std::shared_ptr GaussianMixture::likelihood( const GaussianMixtureFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); - return likelihood_m; + const double Cgm_Kgcm = + logConstant_ - conditional->logNormalizationConstant(); + if (Cgm_Kgcm == 0.0) { + return likelihood_m; + } else { + // Add a constant factor to the likelihood in case the noise models + // are not all equal. + GaussianFactorGraph gfg; + gfg.push_back(likelihood_m); + Vector c(1); + c << std::sqrt(2.0 * Cgm_Kgcm); + auto constantFactor = std::make_shared(c); + gfg.push_back(constantFactor); + return std::make_shared(gfg); + } }); - - // First compute all the sqrt(|2 pi Sigma|) terms - auto computeLogNormalizers = [](const GaussianFactor::shared_ptr &gf) { - auto jf = std::dynamic_pointer_cast(gf); - // If we have, say, a Hessian factor, then no need to do anything - if (!jf) return 0.0; - - auto model = jf->get_model(); - // If there is no noise model, there is nothing to do. - if (!model) { - return 0.0; - } - return ComputeLogNormalizer(model); - }; - - AlgebraicDecisionTree log_normalizers = - DecisionTree(likelihoods, computeLogNormalizers); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods, log_normalizers); + continuousParentKeys, discreteParentKeys, likelihoods); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 0427eef7b..b4ee89cb0 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -28,55 +28,11 @@ namespace gtsam { -/** - * @brief Helper function to augment the [A|b] matrices in the factor components - * with the normalizer values. - * This is done by storing the normalizer value in - * the `b` vector as an additional row. - * - * @param factors DecisionTree of GaussianFactor shared pointers. - * @param logNormalizers Tree of log-normalizers corresponding to each - * Gaussian factor in factors. - * @return GaussianMixtureFactor::Factors - */ -GaussianMixtureFactor::Factors augment( - const GaussianMixtureFactor::Factors &factors, - const AlgebraicDecisionTree &logNormalizers) { - // Find the minimum value so we can "proselytize" to positive values. - // Done because we can't have sqrt of negative numbers. - double min_log_normalizer = logNormalizers.min(); - AlgebraicDecisionTree log_normalizers = logNormalizers.apply( - [&min_log_normalizer](double n) { return n - min_log_normalizer; }); - - // Finally, update the [A|b] matrices. - auto update = [&log_normalizers]( - const Assignment &assignment, - const GaussianMixtureFactor::sharedFactor &gf) { - auto jf = std::dynamic_pointer_cast(gf); - if (!jf) return gf; - // If the log_normalizer is 0, do nothing - if (log_normalizers(assignment) == 0.0) return gf; - - GaussianFactorGraph gfg; - gfg.push_back(jf); - - Vector c(1); - c << std::sqrt(log_normalizers(assignment)); - auto constantFactor = std::make_shared(c); - - gfg.push_back(constantFactor); - return std::dynamic_pointer_cast( - std::make_shared(gfg)); - }; - return factors.apply(update); -} - /* *******************************************************************************/ -GaussianMixtureFactor::GaussianMixtureFactor( - const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors, const AlgebraicDecisionTree &logNormalizers) - : Base(continuousKeys, discreteKeys), - factors_(augment(factors, logNormalizers)) {} +GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, + const DiscreteKeys &discreteKeys, + const Factors &factors) + : Base(continuousKeys, discreteKeys), factors_(factors) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 6680abb54..4570268f1 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,14 +82,10 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. - * @param logNormalizers Tree of log-normalizers corresponding to each - * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors, - const AlgebraicDecisionTree &logNormalizers = - AlgebraicDecisionTree(0.0)); + const Factors &factors); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -98,16 +94,12 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. - * @param logNormalizers Tree of log-normalizers corresponding to each - * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector &factors, - const AlgebraicDecisionTree &logNormalizers = - AlgebraicDecisionTree(0.0)) + const std::vector &factors) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors), logNormalizers) {} + Factors(discreteKeys, factors)) {} /// @} /// @name Testable From 91ccbebf224c52063dd43ffb69ecb7cef889cfbd Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 14:44:54 -0400 Subject: [PATCH 17/21] remove ComputeLogNormalizer function --- gtsam/hybrid/GaussianMixtureFactor.cpp | 16 ---------------- gtsam/hybrid/GaussianMixtureFactor.h | 12 ------------ 2 files changed, 28 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index b4ee89cb0..8471cef33 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -120,20 +120,4 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { return gf->error(values.continuous()); } -/* *******************************************************************************/ -double ComputeLogNormalizer( - const noiseModel::Gaussian::shared_ptr &noise_model) { - // Since noise models are Gaussian, we can get the logDeterminant using - // the same trick as in GaussianConditional - double logDetR = noise_model->R() - .diagonal() - .unaryExpr([](double x) { return log(x); }) - .sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = noise_model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; -} - } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 4570268f1..b1be6ef1d 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -171,16 +171,4 @@ template <> struct traits : public Testable { }; -/** - * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values - * for a Gaussian noise model. - * We compute this in the log-space for numerical accuracy. - * - * @param noise_model The Gaussian noise model - * whose normalizer we wish to compute. - * @return double - */ -GTSAM_EXPORT double ComputeLogNormalizer( - const noiseModel::Gaussian::shared_ptr &noise_model); - } // namespace gtsam From 3d8383089bb19e0f60ce1afa545bf6e27de1e70e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Aug 2024 22:34:57 -0400 Subject: [PATCH 18/21] don't check if GaussianFactor has no contKeys since it may not (e.g. GaussianMixtureModel) --- gtsam/hybrid/GaussianMixtureFactor.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 8471cef33..94bc09407 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -66,7 +66,7 @@ void GaussianMixtureFactor::print(const std::string &s, [&](const sharedFactor &gf) -> std::string { RedirectCout rd; std::cout << ":\n"; - if (gf && !gf->empty()) { + if (gf) { gf->print("", formatter); return rd.str(); } else { From 1f373fd136e1ac46a3e06b1f568e3bff1737989f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 00:48:32 -0400 Subject: [PATCH 19/21] add examples of Hybrid FGs from HBNs as unit tests --- .../tests/testGaussianMixtureFactor.cpp | 233 +++++++++++++++++- 1 file changed, 229 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index dfafb923b..0910d2f40 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -200,6 +200,228 @@ TEST(GaussianMixtureFactor, Error) { 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); } +/* ************************************************************************* */ +/** + * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) + * where m is a discrete variable and z is a continuous variable. + * m is binary and depending on m, we have 2 different means + * μ1 and μ2 for the Gaussian distribution around which we sample z. + * + * The resulting factor graph should eliminate to a Bayes net + * which represents a sigmoid function. + */ +TEST(GaussianMixtureFactor, GaussianMixtureModel) { + double mu0 = 1.0, mu1 = 3.0; + double sigma = 2.0; + auto model = noiseModel::Isotropic::Sigma(1, sigma); + + DiscreteKey m(M(0), 2); + Key z = Z(0); + + auto c0 = make_shared(z, Vector1(mu0), I_1x1, model), + c1 = make_shared(z, Vector1(mu1), I_1x1, model); + + auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); + auto mixing = new DiscreteConditional(m, "0.5/0.5"); + + HybridBayesNet hbn; + hbn.emplace_back(gm); + hbn.emplace_back(mixing); + + // The result should be a sigmoid. + // So should be m = 0.5 at z=3.0 - 1.0=2.0 + VectorValues given; + given.insert(z, Vector1(mu1 - mu0)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + HybridBayesNet expected; + expected.emplace_back(new DiscreteConditional(m, "0.5/0.5")); + + EXPECT(assert_equal(expected, *bn)); +} + +/* ************************************************************************* */ +/** + * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) + * where m is a discrete variable and z is a continuous variable. + * m is binary and depending on m, we have 2 different means + * and covariances each for the + * Gaussian distribution around which we sample z. + * + * The resulting factor graph should eliminate to a Bayes net + * which represents a sigmoid function leaning towards + * the tighter covariance Gaussian. + */ +TEST(GaussianMixtureFactor, GaussianMixtureModel2) { + double mu0 = 1.0, mu1 = 3.0; + auto model0 = noiseModel::Isotropic::Sigma(1, 8.0); + auto model1 = noiseModel::Isotropic::Sigma(1, 4.0); + + DiscreteKey m(M(0), 2); + Key z = Z(0); + + auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), + c1 = make_shared(z, Vector1(mu1), I_1x1, model1); + + auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); + auto mixing = new DiscreteConditional(m, "0.5/0.5"); + + HybridBayesNet hbn; + hbn.emplace_back(gm); + hbn.emplace_back(mixing); + + // The result should be a sigmoid leaning towards model1 + // since it has the tighter covariance. + // So should be m = 0.34/0.66 at z=3.0 - 1.0=2.0 + VectorValues given; + given.insert(z, Vector1(mu1 - mu0)); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + HybridBayesNet expected; + expected.emplace_back( + new DiscreteConditional(m, "0.338561851224/0.661438148776")); + + EXPECT(assert_equal(expected, *bn)); +} + +/* ************************************************************************* */ +/** + * Test a model P(x0)P(z0|x0)p(x1|m1)p(z1|x1)p(m1). + * + * p(x1|m1) has different means and same covariance. + * + * Converting to a factor graph gives us + * P(x0)ϕ(x0)P(x1|m1)ϕ(x1)P(m1) + * + * If we only have a measurement on z0, then + * the probability of x1 should be 0.5/0.5. + * Getting a measurement on z1 gives use more information. + */ +TEST(GaussianMixtureFactor, TwoStateModel) { + double mu0 = 1.0, mu1 = 3.0; + auto model = noiseModel::Isotropic::Sigma(1, 2.0); + + DiscreteKey m1(M(1), 2); + Key z0 = Z(0), z1 = Z(1), x0 = X(0), x1 = X(1); + + auto c0 = make_shared(x1, Vector1(mu0), I_1x1, model), + c1 = make_shared(x1, Vector1(mu1), I_1x1, model); + + auto p_x0 = new GaussianConditional(x0, Vector1(0.0), I_1x1, + noiseModel::Isotropic::Sigma(1, 1.0)); + auto p_z0x0 = new GaussianConditional(z0, Vector1(0.0), I_1x1, x0, -I_1x1, + noiseModel::Isotropic::Sigma(1, 1.0)); + auto p_x1m1 = new GaussianMixture({x1}, {}, {m1}, {c0, c1}); + auto p_z1x1 = new GaussianConditional(z1, Vector1(0.0), I_1x1, x1, -I_1x1, + noiseModel::Isotropic::Sigma(1, 3.0)); + auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); + + HybridBayesNet hbn; + hbn.emplace_back(p_x0); + hbn.emplace_back(p_z0x0); + hbn.emplace_back(p_x1m1); + hbn.emplace_back(p_m1); + + VectorValues given; + given.insert(z0, Vector1(0.5)); + + { + // Start with no measurement on x1, only on x0 + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since no measurement on x1, we hedge our bets + DiscreteConditional expected(m1, "0.5/0.5"); + + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); + } + + { + // Now we add a measurement z1 on x1 + hbn.emplace_back(p_z1x1); + + given.insert(z1, Vector1(2.2)); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since we have a measurement on z2, we get a definite result + DiscreteConditional expected(m1, "0.4923083/0.5076917"); + + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); + } +} + +/* ************************************************************************* */ +/** + * Test a model P(x0)P(z0|x0)p(x1|m1)p(z1|x1)p(m1). + * + * p(x1|m1) has different means and different covariances. + * + * Converting to a factor graph gives us + * P(x0)ϕ(x0)P(x1|m1)ϕ(x1)P(m1) + * + * If we only have a measurement on z0, then + * the probability of x1 should be the ratio of covariances. + * Getting a measurement on z1 gives use more information. + */ +TEST(GaussianMixtureFactor, TwoStateModel2) { + double mu0 = 1.0, mu1 = 3.0; + auto model0 = noiseModel::Isotropic::Sigma(1, 6.0); + auto model1 = noiseModel::Isotropic::Sigma(1, 4.0); + + DiscreteKey m1(M(1), 2); + Key z0 = Z(0), z1 = Z(1), x0 = X(0), x1 = X(1); + + auto c0 = make_shared(x1, Vector1(mu0), I_1x1, model0), + c1 = make_shared(x1, Vector1(mu1), I_1x1, model1); + + auto p_x0 = new GaussianConditional(x0, Vector1(0.0), I_1x1, + noiseModel::Isotropic::Sigma(1, 1.0)); + auto p_z0x0 = new GaussianConditional(z0, Vector1(0.0), I_1x1, x0, -I_1x1, + noiseModel::Isotropic::Sigma(1, 1.0)); + auto p_x1m1 = new GaussianMixture({x1}, {}, {m1}, {c0, c1}); + auto p_z1x1 = new GaussianConditional(z1, Vector1(0.0), I_1x1, x1, -I_1x1, + noiseModel::Isotropic::Sigma(1, 3.0)); + auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); + + HybridBayesNet hbn; + hbn.emplace_back(p_x0); + hbn.emplace_back(p_z0x0); + hbn.emplace_back(p_x1m1); + hbn.emplace_back(p_m1); + + VectorValues given; + given.insert(z0, Vector1(0.5)); + + { + // Start with no measurement on x1, only on x0 + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since no measurement on x1, we get the ratio of covariances. + DiscreteConditional expected(m1, "0.6/0.4"); + + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); + } + + { + // Now we add a measurement z1 on x1 + hbn.emplace_back(p_z1x1); + + given.insert(z1, Vector1(2.2)); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since we have a measurement on z2, we get a definite result + DiscreteConditional expected(m1, "0.52706646/0.47293354"); + + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); + } +} + /** * @brief Helper function to specify a Hybrid Bayes Net * {P(X1) P(Z1 | X1, X2, M1)} and convert it to a Hybrid Factor Graph @@ -271,11 +493,12 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( * * We specify a hybrid Bayes network P(Z | X, M) =p(X1)p(Z1 | X1, X2, M1), * which is then converted to a factor graph by specifying Z1. - * - * p(Z1 | X1, X2, M1) has 2 factors each for the binary mode m1, with only the - * means being different. + * This is a different case since now we have a hybrid factor + * with 2 continuous variables ϕ(x1, x2, m1). + * p(Z1 | X1, X2, M1) has 2 factors each for the binary + * mode m1, with only the means being different. */ -TEST(GaussianMixtureFactor, DifferentMeansHBN) { +TEST(GaussianMixtureFactor, DifferentMeans) { DiscreteKey m1(M(1), 2); Values values; @@ -355,6 +578,8 @@ TEST(GaussianMixtureFactor, DifferentMeansHBN) { /** * @brief Test components with differing covariances * but with a Bayes net P(Z|X, M) converted to a FG. + * Same as the DifferentMeans example but in this case, + * we keep the means the same and vary the covariances. */ TEST(GaussianMixtureFactor, DifferentCovariances) { DiscreteKey m1(M(1), 2); From 7358effe038d0cb82c9b378a21c8afabfa0ea269 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 00:49:28 -0400 Subject: [PATCH 20/21] allow for GaussianMixtureFactors with 0 continuous keys --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index bf11a50fc..cb8ceed20 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -242,6 +242,18 @@ discreteElimination(const HybridGaussianFactorGraph &factors, for (auto &f : factors) { if (auto df = dynamic_pointer_cast(f)) { dfg.push_back(df); + } else if (auto gmf = dynamic_pointer_cast(f)) { + // Case where we have a GaussianMixtureFactor with no continuous keys. + // In this case, compute discrete probabilities. + auto probability = + [&](const GaussianFactor::shared_ptr &factor) -> double { + if (!factor) return 0.0; + return exp(-factor->error(VectorValues())); + }; + dfg.emplace_shared( + gmf->discreteKeys(), + DecisionTree(gmf->factors(), probability)); + } else if (auto orphan = dynamic_pointer_cast(f)) { // Ignore orphaned clique. // TODO(dellaert): is this correct? If so explain here. From 88c2ad55bea7d23148029f895cec4f453ca3b8ad Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 00:49:36 -0400 Subject: [PATCH 21/21] better docstring --- gtsam/hybrid/GaussianMixtureFactor.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index b1be6ef1d..2459af259 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -80,8 +80,8 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys A vector of keys representing continuous variables. * @param discreteKeys A vector of keys representing discrete variables and * their cardinalities. - * @param factors The decision tree of Gaussian factors stored as the mixture - * density. + * @param factors The decision tree of Gaussian factors stored + * as the mixture density. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys,