From 7fab3f8cc3859f5a693b5f017e9cc9aaa7eda735 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:21:46 -0400 Subject: [PATCH 01/43] improved MixtureFactor tests --- gtsam/hybrid/tests/testMixtureFactor.cpp | 153 +++++++++++++++++++++++ 1 file changed, 153 insertions(+) diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 0b2564403..48b193eeb 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -18,6 +18,9 @@ #include #include +#include +#include +#include #include #include #include @@ -115,6 +118,156 @@ TEST(MixtureFactor, Dim) { EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); } +/* ************************************************************************* */ +// Test components with differing means +TEST(MixtureFactor, DifferentMeans) { + DiscreteKey m1(M(1), 2), m2(M(2), 2); + + Values values; + double x1 = 0.0, x2 = 1.75, x3 = 2.60; + values.insert(X(1), x1); + values.insert(X(2), x2); + values.insert(X(3), x3); + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); + + auto f0 = std::make_shared>(X(1), X(2), 0.0, model0); + auto f1 = std::make_shared>(X(1), X(2), 2.0, model1); + std::vector factors{f0, f1}; + + MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + HybridNonlinearFactorGraph hnfg; + hnfg.push_back(mixtureFactor); + + f0 = std::make_shared>(X(2), X(3), 0.0, model0); + f1 = std::make_shared>(X(2), X(3), 2.0, model1); + std::vector factors23{f0, f1}; + hnfg.push_back(MixtureFactor({X(2), X(3)}, {m2}, factors23)); + + auto prior = PriorFactor(X(1), x1, prior_noise); + hnfg.push_back(prior); + + hnfg.emplace_shared>(X(2), 2.0, prior_noise); + + auto hgfg = hnfg.linearize(values); + auto bn = hgfg->eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{ + {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, + DiscreteValues{{M(1), 1}, {M(2), 0}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 0}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } +} + +/* ************************************************************************* */ +// Test components with differing covariances +TEST(MixtureFactor, DifferentCovariances) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(1), x1); + values.insert(X(2), x2); + + double between = 0.0; + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(1), X(2), between, model0); + auto f1 = + std::make_shared>(X(1), X(2), between, model1); + std::vector factors{f0, f1}; + + // Create via toFactorGraph + using symbol_shorthand::Z; + Matrix H0_1, H0_2, H1_1, H1_2; + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + auto gm = new gtsam::GaussianMixture( + {Z(1)}, {X(1), X(2)}, {m1}, + {std::make_shared(terms0, 1, -d0, model0), + std::make_shared(terms1, 1, -d1, model1)}); + gtsam::HybridBayesNet bn; + bn.emplace_back(gm); + + gtsam::VectorValues measurements; + measurements.insert(Z(1), gtsam::Z_1x1); + // Create FG with single GaussianMixtureFactor + HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + auto hbn = mixture_fg.eliminateSequential(); + + VectorValues cv; + cv.insert(X(1), Vector1(0.0)); + cv.insert(X(2), Vector1(0.0)); + // P(m1) = [0.5, 0.5], so we should pick 0 + DiscreteValues dv; + dv.insert({M(1), 0}); + HybridValues expected_values(cv, dv); + + HybridValues actual_values = hbn->optimize(); + EXPECT(assert_equal(expected_values, actual_values)); + + // Check that we get different error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + + HybridValues hv0(cv, DiscreteValues{{M(1), 0}}); + HybridValues hv1(cv, DiscreteValues{{M(1), 1}}); + + AlgebraicDecisionTree expectedErrorTree(m1, 9.90348755254, + 0.69314718056); + EXPECT(assert_equal(expectedErrorTree, errorTree)); +} + /* ************************************************************************* */ int main() { TestResult tr; From ccad6f6c48febbabfd989a619313725fc37f2a49 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:22:07 -0400 Subject: [PATCH 02/43] print main logNormalizationConstant in GaussianMixture --- gtsam/hybrid/GaussianMixture.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index c105a329e..531a9f2e5 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -145,6 +145,8 @@ void GaussianMixture::print(const std::string &s, std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; } std::cout << "\n"; + std::cout << " logNormalizationConstant: " << logConstant_ << "\n" + << std::endl; conditionals_.print( "", [&](Key k) { return formatter(k); }, [&](const GaussianConditional::shared_ptr &gf) -> std::string { From 2007ef53de47f75a317509acf7fbd7828371898e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:22:34 -0400 Subject: [PATCH 03/43] use DiscreteFactorGraph directly --- gtsam/hybrid/HybridBayesNet.cpp | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index b02967555..1d01baed2 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -220,15 +220,16 @@ GaussianBayesNet HybridBayesNet::choose( /* ************************************************************************* */ HybridValues HybridBayesNet::optimize() const { // Collect all the discrete factors to compute MPE - DiscreteBayesNet discrete_bn; + DiscreteFactorGraph discrete_fg; + for (auto &&conditional : *this) { if (conditional->isDiscrete()) { - discrete_bn.push_back(conditional->asDiscrete()); + discrete_fg.push_back(conditional->asDiscrete()); } } // Solve for the MPE - DiscreteValues mpe = DiscreteFactorGraph(discrete_bn).optimize(); + DiscreteValues mpe = discrete_fg.optimize(); // Given the MPE, compute the optimal continuous values. return HybridValues(optimize(mpe), mpe); From 598edfacce2d80229b178a2b0f1dd271a79512c3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:28:28 -0400 Subject: [PATCH 04/43] improve GaussianMixture by checking for invalid conditionals and adding 2 new methods --- gtsam/hybrid/GaussianMixture.cpp | 67 ++++++++++++++++++++++++++++++-- gtsam/hybrid/GaussianMixture.h | 15 +++++++ gtsam/hybrid/HybridFactor.h | 3 ++ 3 files changed, 81 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 531a9f2e5..36a34226b 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -24,6 +24,7 @@ #include #include #include +#include #include namespace gtsam { @@ -92,6 +93,35 @@ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { return {conditionals_, wrap}; } +/* +*******************************************************************************/ +GaussianBayesNetTree GaussianMixture::add( + const GaussianBayesNetTree &sum) const { + using Y = GaussianBayesNet; + auto add = [](const Y &graph1, const Y &graph2) { + auto result = graph1; + if (graph2.size() == 0) { + return GaussianBayesNet(); + } + result.push_back(graph2); + return result; + }; + const auto tree = asGaussianBayesNetTree(); + return sum.empty() ? tree : sum.apply(tree, add); +} + +/* *******************************************************************************/ +GaussianBayesNetTree GaussianMixture::asGaussianBayesNetTree() const { + auto wrap = [](const GaussianConditional::shared_ptr &gc) { + if (gc) { + return GaussianBayesNet{gc}; + } else { + return GaussianBayesNet(); + } + }; + return {conditionals_, wrap}; +} + /* *******************************************************************************/ size_t GaussianMixture::nrComponents() const { size_t total = 0; @@ -318,8 +348,15 @@ AlgebraicDecisionTree GaussianMixture::logProbability( AlgebraicDecisionTree GaussianMixture::errorTree( const VectorValues &continuousValues) const { auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) { - return conditional->error(continuousValues) + // - logConstant_ - conditional->logNormalizationConstant(); + // Check if valid pointer + if (conditional) { + return conditional->error(continuousValues) + // + logConstant_ - conditional->logNormalizationConstant(); + } else { + // If not valid, pointer, it means this conditional was pruned, + // so we return maximum error. + return std::numeric_limits::max(); + } }; DecisionTree error_tree(conditionals_, errorFunc); return error_tree; @@ -327,10 +364,32 @@ AlgebraicDecisionTree GaussianMixture::errorTree( /* *******************************************************************************/ double GaussianMixture::error(const HybridValues &values) const { + // Check if discrete keys in discrete assignment are + // present in the GaussianMixture + KeyVector dKeys = this->discreteKeys_.indices(); + bool valid_assignment = false; + for (auto &&kv : values.discrete()) { + if (std::find(dKeys.begin(), dKeys.end(), kv.first) != dKeys.end()) { + valid_assignment = true; + break; + } + } + + // The discrete assignment is not valid so we return 0.0 erorr. + if (!valid_assignment) { + return 0.0; + } + // Directly index to get the conditional, no need to build the whole tree. auto conditional = conditionals_(values.discrete()); - return conditional->error(values.continuous()) + // - logConstant_ - conditional->logNormalizationConstant(); + if (conditional) { + return conditional->error(values.continuous()) + // + logConstant_ - conditional->logNormalizationConstant(); + } else { + // If not valid, pointer, it means this conditional was pruned, + // so we return maximum error. + return std::numeric_limits::max(); + } } /* *******************************************************************************/ diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index c1ef504f8..bfa342dcf 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -72,6 +72,12 @@ class GTSAM_EXPORT GaussianMixture */ GaussianFactorGraphTree asGaussianFactorGraphTree() const; + /** + * @brief Convert a DecisionTree of conditionals into + * a DecisionTree of Gaussian Bayes nets. + */ + GaussianBayesNetTree asGaussianBayesNetTree() const; + /** * @brief Helper function to get the pruner functor. * @@ -250,6 +256,15 @@ class GTSAM_EXPORT GaussianMixture * @return GaussianFactorGraphTree */ GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; + + /** + * @brief Merge the Gaussian Bayes Nets in `this` and `sum` while + * maintaining the decision tree structure. + * + * @param sum Decision Tree of Gaussian Bayes Nets + * @return GaussianBayesNetTree + */ + GaussianBayesNetTree add(const GaussianBayesNetTree &sum) const; /// @} private: diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index afd1c8032..418489d66 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -13,6 +13,7 @@ * @file HybridFactor.h * @date Mar 11, 2022 * @author Fan Jiang + * @author Varun Agrawal */ #pragma once @@ -33,6 +34,8 @@ class HybridValues; /// Alias for DecisionTree of GaussianFactorGraphs using GaussianFactorGraphTree = DecisionTree; +/// Alias for DecisionTree of GaussianBayesNets +using GaussianBayesNetTree = DecisionTree; KeyVector CollectKeys(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys); From eef9765e4a16ca90f1bf3c953500e99c2ec4b2e2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:29:02 -0400 Subject: [PATCH 05/43] normalize the discrete factor at the continuous-discrete boundary --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 28 +++++++++++++++++----- 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index b764dc9e0..bf11a50fc 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -279,21 +279,37 @@ GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) { using Result = std::pair, GaussianMixtureFactor::sharedFactor>; -// Integrate the probability mass in the last continuous conditional using -// the unnormalized probability q(μ;m) = exp(-error(μ;m)) at the mean. -// discrete_probability = exp(-error(μ;m)) * sqrt(det(2π Σ_m)) +/** + * Compute the probability q(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m) + * from the residual error at the mean μ. + * The residual error contains no keys, and only + * depends on the discrete separator if present. + */ static std::shared_ptr createDiscreteFactor( const DecisionTree &eliminationResults, const DiscreteKeys &discreteSeparator) { - auto probability = [&](const Result &pair) -> double { + auto logProbability = [&](const Result &pair) -> double { const auto &[conditional, factor] = pair; static const VectorValues kEmpty; // If the factor is not null, it has no keys, just contains the residual. if (!factor) return 1.0; // TODO(dellaert): not loving this. - return exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); + + // Logspace version of: + // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); + // We take negative of the logNormalizationConstant `log(1/k)` + // to get `log(k)`. + return -factor->error(kEmpty) + (-conditional->logNormalizationConstant()); }; - DecisionTree probabilities(eliminationResults, probability); + AlgebraicDecisionTree logProbabilities( + DecisionTree(eliminationResults, logProbability)); + + // Perform normalization + double max_log = logProbabilities.max(); + AlgebraicDecisionTree probabilities = DecisionTree( + logProbabilities, + [&max_log](const double x) { return exp(x - max_log); }); + probabilities = probabilities.normalize(probabilities.sum()); return std::make_shared(discreteSeparator, probabilities); } From ea104c4b8399f27aa48fc95f0888d304c8ff39d2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:29:40 -0400 Subject: [PATCH 06/43] support for varying normalizers in GaussianMixtureFactor --- gtsam/hybrid/GaussianMixtureFactor.cpp | 83 +++++++++++++++++++++++++- gtsam/hybrid/GaussianMixtureFactor.h | 18 ++++-- 2 files changed, 92 insertions(+), 9 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index a3db16d04..e519cefe6 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -28,11 +28,86 @@ namespace gtsam { +/** + * @brief Helper function to correct the [A|b] matrices in the factor components + * with the normalizer values. + * This is done by storing the normalizer value in + * the `b` vector as an additional row. + * + * @param factors DecisionTree of GaussianFactor shared pointers. + * @param varyingNormalizers Flag indicating the normalizers are different for + * each component. + * @return GaussianMixtureFactor::Factors + */ +GaussianMixtureFactor::Factors correct( + const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { + if (!varyingNormalizers) { + return factors; + } + + // First compute all the sqrt(|2 pi Sigma|) terms + auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) { + auto jf = std::dynamic_pointer_cast(gf); + // If we have, say, a Hessian factor, then no need to do anything + if (!jf) return 0.0; + + auto model = jf->get_model(); + // If there is no noise model, there is nothing to do. + if (!model) { + return 0.0; + } + // Since noise models are Gaussian, we can get the logDeterminant using the + // same trick as in GaussianConditional + double logDetR = + model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; + }; + + AlgebraicDecisionTree log_normalizers = + DecisionTree(factors, computeNormalizers); + + // Find the minimum value so we can "proselytize" to positive values. + // Done because we can't have sqrt of negative numbers. + double min_log_normalizer = log_normalizers.min(); + log_normalizers = log_normalizers.apply( + [&min_log_normalizer](double n) { return n - min_log_normalizer; }); + + // Finally, update the [A|b] matrices. + auto update = [&log_normalizers]( + const Assignment &assignment, + const GaussianMixtureFactor::sharedFactor &gf) { + auto jf = std::dynamic_pointer_cast(gf); + if (!jf) return gf; + // If there is no noise model, there is nothing to do. + if (!jf->get_model()) return gf; + // If the log_normalizer is 0, do nothing + if (log_normalizers(assignment) == 0.0) return gf; + + GaussianFactorGraph gfg; + gfg.push_back(jf); + + Vector c(1); + c << std::sqrt(log_normalizers(assignment)); + auto constantFactor = std::make_shared(c); + + gfg.push_back(constantFactor); + return std::dynamic_pointer_cast( + std::make_shared(gfg)); + }; + return factors.apply(update); +} + /* *******************************************************************************/ GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors) - : Base(continuousKeys, discreteKeys), factors_(factors) {} + const Factors &factors, + bool varyingNormalizers) + : Base(continuousKeys, discreteKeys), + factors_(correct(factors, varyingNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { @@ -54,7 +129,9 @@ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { /* *******************************************************************************/ void GaussianMixtureFactor::print(const std::string &s, const KeyFormatter &formatter) const { - HybridFactor::print(s, formatter); + std::cout << (s.empty() ? "" : s + "\n"); + std::cout << "GaussianMixtureFactor" << std::endl; + HybridFactor::print("", formatter); std::cout << "{\n"; if (factors_.empty()) { std::cout << " empty" << std::endl; diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 67d12ddb0..588501bbe 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,10 +82,13 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. + * @param varyingNormalizers Flag indicating factor components have varying + * normalizer values. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors); + const Factors &factors, + bool varyingNormalizers = false); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -94,12 +97,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. + * @param varyingNormalizers Flag indicating factor components have varying + * normalizer values. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector &factors) + const std::vector &factors, + bool varyingNormalizers = false) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors)) {} + Factors(discreteKeys, factors), + varyingNormalizers) {} /// @} /// @name Testable @@ -107,9 +114,8 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { bool equals(const HybridFactor &lf, double tol = 1e-9) const override; - void print( - const std::string &s = "GaussianMixtureFactor\n", - const KeyFormatter &formatter = DefaultKeyFormatter) const override; + void print(const std::string &s = "", const KeyFormatter &formatter = + DefaultKeyFormatter) const override; /// @} /// @name Standard API From 5e3093e3e2a693eda24f6a02d5a4ee6d54ef1a1c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 16:30:32 -0400 Subject: [PATCH 07/43] tests for verification --- .../tests/testGaussianMixtureFactor.cpp | 247 +++++++++++++++++- .../tests/testHybridNonlinearFactorGraph.cpp | 18 ++ 2 files changed, 260 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 9cc7e6bfd..47b9ddc99 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -22,9 +22,13 @@ #include #include #include +#include +#include #include #include #include +#include +#include // Include for test suite #include @@ -56,7 +60,6 @@ TEST(GaussianMixtureFactor, Sum) { auto b = Matrix::Zero(2, 1); Vector2 sigmas; sigmas << 1, 2; - auto model = noiseModel::Diagonal::Sigmas(sigmas, true); auto f10 = std::make_shared(X(1), A1, X(2), A2, b); auto f11 = std::make_shared(X(1), A1, X(2), A2, b); @@ -106,7 +109,8 @@ TEST(GaussianMixtureFactor, Printing) { GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); std::string expected = - R"(Hybrid [x1 x2; 1]{ + R"(GaussianMixtureFactor +Hybrid [x1 x2; 1]{ Choice(1) 0 Leaf : A[x1] = [ @@ -178,7 +182,8 @@ TEST(GaussianMixtureFactor, Error) { continuousValues.insert(X(2), Vector2(1, 1)); // error should return a tree of errors, with nodes for each discrete value. - AlgebraicDecisionTree error_tree = mixtureFactor.errorTree(continuousValues); + AlgebraicDecisionTree error_tree = + mixtureFactor.errorTree(continuousValues); std::vector discrete_keys = {m1}; // Error values for regression test @@ -191,8 +196,240 @@ TEST(GaussianMixtureFactor, Error) { DiscreteValues discreteValues; discreteValues[m1.first] = 1; EXPECT_DOUBLES_EQUAL( - 4.0, mixtureFactor.error({continuousValues, discreteValues}), - 1e-9); + 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); +} + +/* ************************************************************************* */ +// Test components with differing means +TEST(GaussianMixtureFactor, DifferentMeans) { + DiscreteKey m1(M(1), 2), m2(M(2), 2); + + Values values; + double x1 = 0.0, x2 = 1.75, x3 = 2.60; + values.insert(X(1), x1); + values.insert(X(2), x2); + values.insert(X(3), x3); + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); + + auto f0 = std::make_shared>(X(1), X(2), 0.0, model0) + ->linearize(values); + auto f1 = std::make_shared>(X(1), X(2), 2.0, model1) + ->linearize(values); + std::vector factors{f0, f1}; + + GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors, true); + HybridGaussianFactorGraph hfg; + hfg.push_back(mixtureFactor); + + f0 = std::make_shared>(X(2), X(3), 0.0, model0) + ->linearize(values); + f1 = std::make_shared>(X(2), X(3), 2.0, model1) + ->linearize(values); + std::vector factors23{f0, f1}; + hfg.push_back(GaussianMixtureFactor({X(2), X(3)}, {m2}, factors23, true)); + + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + hfg.push_back(prior); + + hfg.push_back(PriorFactor(X(2), 2.0, prior_noise).linearize(values)); + + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{ + {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, + DiscreteValues{{M(1), 1}, {M(2), 0}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 0}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } +} + +/* ************************************************************************* */ +/** + * @brief Test components with differing covariances. + * The factor graph is + * *-X1-*-X2 + * | + * M1 + */ +TEST(GaussianMixtureFactor, DifferentCovariances) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(1), x1); + values.insert(X(2), x2); + + double between = 0.0; + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(1), X(2), between, model0); + auto f1 = + std::make_shared>(X(1), X(2), between, model1); + std::vector factors{f0, f1}; + + // Create via toFactorGraph + using symbol_shorthand::Z; + Matrix H0_1, H0_2, H1_1, H1_2; + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + gtsam::GaussianMixtureFactor gmf( + {X(1), X(2)}, {m1}, + {std::make_shared(X(1), H0_1, X(2), H0_2, -d0, model0), + std::make_shared(X(1), H1_1, X(2), H1_2, -d1, model1)}, + true); + + // Create FG with single GaussianMixtureFactor + HybridGaussianFactorGraph mixture_fg; + mixture_fg.add(gmf); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + auto hbn = mixture_fg.eliminateSequential(); + // hbn->print(); + + VectorValues cv; + cv.insert(X(1), Vector1(0.0)); + cv.insert(X(2), Vector1(0.0)); + + // Check that the error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + + DiscreteValues dv0{{M(1), 0}}; + DiscreteValues dv1{{M(1), 1}}; + + // regression + EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); + + DiscreteConditional expected_m1(m1, "0.5/0.5"); + DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); + + EXPECT(assert_equal(expected_m1, actual_m1)); +} + +/* ************************************************************************* */ +/** + * @brief Test components with differing covariances + * but with a Bayes net P(Z|X, M) converted to a FG. + */ +TEST(GaussianMixtureFactor, DifferentCovariances2) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(1), x1); + values.insert(X(2), x2); + + double between = 0.0; + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(1), X(2), between, model0); + auto f1 = + std::make_shared>(X(1), X(2), between, model1); + std::vector factors{f0, f1}; + + // Create via toFactorGraph + using symbol_shorthand::Z; + Matrix H0_1, H0_2, H1_1, H1_2; + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + auto gm = new gtsam::GaussianMixture( + {Z(1)}, {X(1), X(2)}, {m1}, + {std::make_shared(terms0, 1, -d0, model0), + std::make_shared(terms1, 1, -d1, model1)}); + gtsam::HybridBayesNet bn; + bn.emplace_back(gm); + + gtsam::VectorValues measurements; + measurements.insert(Z(1), gtsam::Z_1x1); + // Create FG with single GaussianMixtureFactor + HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + auto hbn = mixture_fg.eliminateSequential(); + + VectorValues cv; + cv.insert(X(1), Vector1(0.0)); + cv.insert(X(2), Vector1(0.0)); + + // Check that the error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + + DiscreteValues dv0{{M(1), 0}}; + DiscreteValues dv1{{M(1), 1}}; + + // regression + EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); + + DiscreteConditional expected_m1(m1, "0.5/0.5"); + DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); + + EXPECT(assert_equal(expected_m1, actual_m1)); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 93081d309..2d851b0ff 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -510,6 +510,7 @@ factor 0: b = [ -10 ] No noise model factor 1: +GaussianMixtureFactor Hybrid [x0 x1; m0]{ Choice(m0) 0 Leaf : @@ -534,6 +535,7 @@ Hybrid [x0 x1; m0]{ } factor 2: +GaussianMixtureFactor Hybrid [x1 x2; m1]{ Choice(m1) 0 Leaf : @@ -675,33 +677,41 @@ factor 6: P( m1 | m0 ): size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), + logNormalizationConstant: 1.38862 + Choice(m0) 0 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] + logNormalizationConstant: 1.38862 No noise model 1 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] + logNormalizationConstant: 1.38862 No noise model conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), + logNormalizationConstant: 1.3935 + Choice(m1) 0 Choice(m0) 0 0 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] + logNormalizationConstant: 1.3935 No noise model 0 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] + logNormalizationConstant: 1.3935 No noise model 1 Choice(m0) @@ -709,16 +719,20 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] + logNormalizationConstant: 1.3935 No noise model 1 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] + logNormalizationConstant: 1.3935 No noise model conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), + logNormalizationConstant: 1.38857 + Choice(m1) 0 Choice(m0) 0 0 Leaf p(x2) @@ -726,6 +740,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1489 ] mean: 1 elements x2: -1.0099 + logNormalizationConstant: 1.38857 No noise model 0 1 Leaf p(x2) @@ -733,6 +748,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1479 ] mean: 1 elements x2: -1.0098 + logNormalizationConstant: 1.38857 No noise model 1 Choice(m0) @@ -741,6 +757,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0504 ] mean: 1 elements x2: -1.0001 + logNormalizationConstant: 1.38857 No noise model 1 1 Leaf p(x2) @@ -748,6 +765,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0494 ] mean: 1 elements x2: -1 + logNormalizationConstant: 1.38857 No noise model )"; From 9818f89ceca8000e24b7eb0e81bb5d05685718ca Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 20 Aug 2024 17:48:04 -0400 Subject: [PATCH 08/43] remove GaussianBayesNetTree methods --- gtsam/hybrid/GaussianMixture.cpp | 29 ----------------------------- gtsam/hybrid/GaussianMixture.h | 17 +---------------- gtsam/hybrid/HybridFactor.h | 2 -- 3 files changed, 1 insertion(+), 47 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 36a34226b..1411ede8d 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -93,35 +93,6 @@ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { return {conditionals_, wrap}; } -/* -*******************************************************************************/ -GaussianBayesNetTree GaussianMixture::add( - const GaussianBayesNetTree &sum) const { - using Y = GaussianBayesNet; - auto add = [](const Y &graph1, const Y &graph2) { - auto result = graph1; - if (graph2.size() == 0) { - return GaussianBayesNet(); - } - result.push_back(graph2); - return result; - }; - const auto tree = asGaussianBayesNetTree(); - return sum.empty() ? tree : sum.apply(tree, add); -} - -/* *******************************************************************************/ -GaussianBayesNetTree GaussianMixture::asGaussianBayesNetTree() const { - auto wrap = [](const GaussianConditional::shared_ptr &gc) { - if (gc) { - return GaussianBayesNet{gc}; - } else { - return GaussianBayesNet(); - } - }; - return {conditionals_, wrap}; -} - /* *******************************************************************************/ size_t GaussianMixture::nrComponents() const { size_t total = 0; diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index bfa342dcf..41692bb1c 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -67,17 +67,11 @@ class GTSAM_EXPORT GaussianMixture double logConstant_; ///< log of the normalization constant. /** - * @brief Convert a DecisionTree of factors into + * @brief Convert a GaussianMixture of conditionals into * a DecisionTree of Gaussian factor graphs. */ GaussianFactorGraphTree asGaussianFactorGraphTree() const; - /** - * @brief Convert a DecisionTree of conditionals into - * a DecisionTree of Gaussian Bayes nets. - */ - GaussianBayesNetTree asGaussianBayesNetTree() const; - /** * @brief Helper function to get the pruner functor. * @@ -256,15 +250,6 @@ class GTSAM_EXPORT GaussianMixture * @return GaussianFactorGraphTree */ GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; - - /** - * @brief Merge the Gaussian Bayes Nets in `this` and `sum` while - * maintaining the decision tree structure. - * - * @param sum Decision Tree of Gaussian Bayes Nets - * @return GaussianBayesNetTree - */ - GaussianBayesNetTree add(const GaussianBayesNetTree &sum) const; /// @} private: diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 418489d66..a9c0e53d2 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -34,8 +34,6 @@ class HybridValues; /// Alias for DecisionTree of GaussianFactorGraphs using GaussianFactorGraphTree = DecisionTree; -/// Alias for DecisionTree of GaussianBayesNets -using GaussianBayesNetTree = DecisionTree; KeyVector CollectKeys(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys); From 8abd2756ea895dbd0da4ab5246730533f27075ff Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 04:19:44 -0400 Subject: [PATCH 09/43] throw error if invalid assignment --- gtsam/hybrid/GaussianMixture.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 1411ede8d..0a0332af8 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -346,9 +346,10 @@ double GaussianMixture::error(const HybridValues &values) const { } } - // The discrete assignment is not valid so we return 0.0 erorr. + // The discrete assignment is not valid so we throw an error. if (!valid_assignment) { - return 0.0; + throw std::runtime_error( + "Invalid discrete values in values. Not all discrete keys specified."); } // Directly index to get the conditional, no need to build the whole tree. From 450fb0a016942b5ac3bc9f0639a2c951cddd61d9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 06:12:51 -0400 Subject: [PATCH 10/43] improve tests --- .../tests/testGaussianMixtureFactor.cpp | 328 ++++++++---------- 1 file changed, 143 insertions(+), 185 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 47b9ddc99..dfafb923b 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -38,6 +38,7 @@ using namespace gtsam; using noiseModel::Isotropic; using symbol_shorthand::M; using symbol_shorthand::X; +using symbol_shorthand::Z; /* ************************************************************************* */ // Check iterators of empty mixture. @@ -199,90 +200,161 @@ TEST(GaussianMixtureFactor, Error) { 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); } +/** + * @brief Helper function to specify a Hybrid Bayes Net + * {P(X1) P(Z1 | X1, X2, M1)} and convert it to a Hybrid Factor Graph + * {P(X1)L(X1, X2, M1; Z1)} by converting to likelihoods given Z1. + * + * We can specify either different means or different sigmas, + * or both for each hybrid factor component. + * + * @param values Initial values for linearization. + * @param means The mean values for the conditional components. + * @param sigmas Noise model sigma values (standard deviation). + * @param m1 The discrete mode key. + * @param z1 The measurement value. + * @return HybridGaussianFactorGraph + */ +HybridGaussianFactorGraph GetFactorGraphFromBayesNet( + const gtsam::Values &values, const std::vector &means, + const std::vector &sigmas, DiscreteKey &m1, double z1 = 0.0) { + // Noise models + auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); + auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + // GaussianMixtureFactor component factors + auto f0 = + std::make_shared>(X(1), X(2), means[0], model0); + auto f1 = + std::make_shared>(X(1), X(2), means[1], model1); + std::vector factors{f0, f1}; + + /// Get terms for each p^m(z1 | x1, x2) + Matrix H0_1, H0_2, H1_1, H1_2; + double x1 = values.at(X(1)), x2 = values.at(X(2)); + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + // Create conditional P(Z1 | X1, X2, M1) + auto gm = new gtsam::GaussianMixture( + {Z(1)}, {X(1), X(2)}, {m1}, + {std::make_shared(terms0, 1, -d0, model0), + std::make_shared(terms1, 1, -d1, model1)}); + gtsam::HybridBayesNet bn; + bn.emplace_back(gm); + // bn.print(); + + // Create FG via toFactorGraph + gtsam::VectorValues measurements; + measurements.insert(Z(1), gtsam::I_1x1 * z1); // Set Z1 = 0 + HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + return mixture_fg; +} + /* ************************************************************************* */ -// Test components with differing means -TEST(GaussianMixtureFactor, DifferentMeans) { - DiscreteKey m1(M(1), 2), m2(M(2), 2); +/** + * @brief Test components with differing means. + * + * We specify a hybrid Bayes network P(Z | X, M) =p(X1)p(Z1 | X1, X2, M1), + * which is then converted to a factor graph by specifying Z1. + * + * p(Z1 | X1, X2, M1) has 2 factors each for the binary mode m1, with only the + * means being different. + */ +TEST(GaussianMixtureFactor, DifferentMeansHBN) { + DiscreteKey m1(M(1), 2); Values values; - double x1 = 0.0, x2 = 1.75, x3 = 2.60; + double x1 = 0.0, x2 = 1.75; values.insert(X(1), x1); values.insert(X(2), x2); - values.insert(X(3), x3); - auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); + // Different means, same sigma + std::vector means{0.0, 2.0}, sigmas{1e-0, 1e-0}; - auto f0 = std::make_shared>(X(1), X(2), 0.0, model0) - ->linearize(values); - auto f1 = std::make_shared>(X(1), X(2), 2.0, model1) - ->linearize(values); - std::vector factors{f0, f1}; - - GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors, true); - HybridGaussianFactorGraph hfg; - hfg.push_back(mixtureFactor); - - f0 = std::make_shared>(X(2), X(3), 0.0, model0) - ->linearize(values); - f1 = std::make_shared>(X(2), X(3), 2.0, model1) - ->linearize(values); - std::vector factors23{f0, f1}; - hfg.push_back(GaussianMixtureFactor({X(2), X(3)}, {m2}, factors23, true)); - - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - hfg.push_back(prior); - - hfg.push_back(PriorFactor(X(2), 2.0, prior_noise).linearize(values)); - - auto bn = hfg.eliminateSequential(); - HybridValues actual = bn->optimize(); - - HybridValues expected( - VectorValues{ - {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, - DiscreteValues{{M(1), 1}, {M(2), 0}}); - - EXPECT(assert_equal(expected, actual)); + HybridGaussianFactorGraph hfg = + GetFactorGraphFromBayesNet(values, means, sigmas, m1, 0.0); { - DiscreteValues dv{{M(1), 0}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + // With no measurement on X2, each mode should be equally likely + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(-1.75)}}, + DiscreteValues{{M(1), 0}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); + } } { - DiscreteValues dv{{M(1), 0}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + // If we add a measurement on X2, we have more information to work with. + // Add a measurement on X2 + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + GaussianConditional meas_z2(Z(2), Vector1(2.0), I_1x1, X(2), I_1x1, + prior_noise); + auto prior_x2 = meas_z2.likelihood(Vector1(x2)); + + hfg.push_back(prior_x2); + + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}}, + DiscreteValues{{M(1), 1}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9); + } } } /* ************************************************************************* */ /** - * @brief Test components with differing covariances. - * The factor graph is - * *-X1-*-X2 - * | - * M1 + * @brief Test components with differing covariances + * but with a Bayes net P(Z|X, M) converted to a FG. */ TEST(GaussianMixtureFactor, DifferentCovariances) { DiscreteKey m1(M(1), 2); @@ -292,123 +364,9 @@ TEST(GaussianMixtureFactor, DifferentCovariances) { values.insert(X(1), x1); values.insert(X(2), x2); - double between = 0.0; - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - auto f0 = - std::make_shared>(X(1), X(2), between, model0); - auto f1 = - std::make_shared>(X(1), X(2), between, model1); - std::vector factors{f0, f1}; - - // Create via toFactorGraph - using symbol_shorthand::Z; - Matrix H0_1, H0_2, H1_1, H1_2; - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - gtsam::GaussianMixtureFactor gmf( - {X(1), X(2)}, {m1}, - {std::make_shared(X(1), H0_1, X(2), H0_2, -d0, model0), - std::make_shared(X(1), H1_1, X(2), H1_2, -d1, model1)}, - true); - - // Create FG with single GaussianMixtureFactor - HybridGaussianFactorGraph mixture_fg; - mixture_fg.add(gmf); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); - - auto hbn = mixture_fg.eliminateSequential(); - // hbn->print(); - - VectorValues cv; - cv.insert(X(1), Vector1(0.0)); - cv.insert(X(2), Vector1(0.0)); - - // Check that the error values at the MLE point μ. - AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - - DiscreteValues dv0{{M(1), 0}}; - DiscreteValues dv1{{M(1), 1}}; - - // regression - EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); - EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); - - DiscreteConditional expected_m1(m1, "0.5/0.5"); - DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); - - EXPECT(assert_equal(expected_m1, actual_m1)); -} - -/* ************************************************************************* */ -/** - * @brief Test components with differing covariances - * but with a Bayes net P(Z|X, M) converted to a FG. - */ -TEST(GaussianMixtureFactor, DifferentCovariances2) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 1.0, x2 = 1.0; - values.insert(X(1), x1); - values.insert(X(2), x2); - - double between = 0.0; - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - auto f0 = - std::make_shared>(X(1), X(2), between, model0); - auto f1 = - std::make_shared>(X(1), X(2), between, model1); - std::vector factors{f0, f1}; - - // Create via toFactorGraph - using symbol_shorthand::Z; - Matrix H0_1, H0_2, H1_1, H1_2; - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(1), X(2)}, {m1}, - {std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}); - gtsam::HybridBayesNet bn; - bn.emplace_back(gm); - - gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::Z_1x1); - // Create FG with single GaussianMixtureFactor - HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); + std::vector means{0.0, 0.0}, sigmas{1e2, 1e-2}; + HybridGaussianFactorGraph mixture_fg = + GetFactorGraphFromBayesNet(values, means, sigmas, m1); auto hbn = mixture_fg.eliminateSequential(); From 191fc3da118aedc9461c6cbb22a213538b9b2a27 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 06:13:43 -0400 Subject: [PATCH 11/43] remove augmentation in GaussianMixture in favor of augmentation in GaussianMixtureFactor --- gtsam/hybrid/GaussianMixture.cpp | 18 ++---------------- gtsam/hybrid/GaussianMixtureFactor.cpp | 6 +++--- 2 files changed, 5 insertions(+), 19 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 0a0332af8..1440f7e9e 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -200,24 +200,10 @@ std::shared_ptr GaussianMixture::likelihood( const GaussianMixtureFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); - const double Cgm_Kgcm = - logConstant_ - conditional->logNormalizationConstant(); - if (Cgm_Kgcm == 0.0) { - return likelihood_m; - } else { - // Add a constant factor to the likelihood in case the noise models - // are not all equal. - GaussianFactorGraph gfg; - gfg.push_back(likelihood_m); - Vector c(1); - c << std::sqrt(2.0 * Cgm_Kgcm); - auto constantFactor = std::make_shared(c); - gfg.push_back(constantFactor); - return std::make_shared(gfg); - } + return likelihood_m; }); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods); + continuousParentKeys, discreteParentKeys, likelihoods, true); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index e519cefe6..7fb16f0d1 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -29,7 +29,7 @@ namespace gtsam { /** - * @brief Helper function to correct the [A|b] matrices in the factor components + * @brief Helper function to augment the [A|b] matrices in the factor components * with the normalizer values. * This is done by storing the normalizer value in * the `b` vector as an additional row. @@ -39,7 +39,7 @@ namespace gtsam { * each component. * @return GaussianMixtureFactor::Factors */ -GaussianMixtureFactor::Factors correct( +GaussianMixtureFactor::Factors augment( const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { if (!varyingNormalizers) { return factors; @@ -107,7 +107,7 @@ GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, const Factors &factors, bool varyingNormalizers) : Base(continuousKeys, discreteKeys), - factors_(correct(factors, varyingNormalizers)) {} + factors_(augment(factors, varyingNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { From 41d6113aea27b8fd0bf16531d477c4f2356f0c79 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:05:31 -0400 Subject: [PATCH 12/43] Kill == operator in VectorValues --- gtsam/linear/VectorValues.h | 5 ----- 1 file changed, 5 deletions(-) diff --git a/gtsam/linear/VectorValues.h b/gtsam/linear/VectorValues.h index 99ee4eb83..7fbd43ffc 100644 --- a/gtsam/linear/VectorValues.h +++ b/gtsam/linear/VectorValues.h @@ -263,11 +263,6 @@ namespace gtsam { /** equals required by Testable for unit testing */ bool equals(const VectorValues& x, double tol = 1e-9) const; - /// Check exact equality. - friend bool operator==(const VectorValues& lhs, const VectorValues& rhs) { - return lhs.equals(rhs); - } - /// @{ /// @name Advanced Interface /// @{ From 79c7c6a8b6f4bb2945ea6ac621fc3c3a1f66aaa6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:10:21 -0400 Subject: [PATCH 13/43] provide logNormalizers directly to the augment method --- gtsam/hybrid/GaussianMixtureFactor.cpp | 66 ++++++++++---------------- gtsam/hybrid/GaussianMixtureFactor.h | 20 ++++---- 2 files changed, 37 insertions(+), 49 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 7fb16f0d1..0427eef7b 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -35,45 +35,17 @@ namespace gtsam { * the `b` vector as an additional row. * * @param factors DecisionTree of GaussianFactor shared pointers. - * @param varyingNormalizers Flag indicating the normalizers are different for - * each component. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. * @return GaussianMixtureFactor::Factors */ GaussianMixtureFactor::Factors augment( - const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { - if (!varyingNormalizers) { - return factors; - } - - // First compute all the sqrt(|2 pi Sigma|) terms - auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) { - auto jf = std::dynamic_pointer_cast(gf); - // If we have, say, a Hessian factor, then no need to do anything - if (!jf) return 0.0; - - auto model = jf->get_model(); - // If there is no noise model, there is nothing to do. - if (!model) { - return 0.0; - } - // Since noise models are Gaussian, we can get the logDeterminant using the - // same trick as in GaussianConditional - double logDetR = - model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; - }; - - AlgebraicDecisionTree log_normalizers = - DecisionTree(factors, computeNormalizers); - + const GaussianMixtureFactor::Factors &factors, + const AlgebraicDecisionTree &logNormalizers) { // Find the minimum value so we can "proselytize" to positive values. // Done because we can't have sqrt of negative numbers. - double min_log_normalizer = log_normalizers.min(); - log_normalizers = log_normalizers.apply( + double min_log_normalizer = logNormalizers.min(); + AlgebraicDecisionTree log_normalizers = logNormalizers.apply( [&min_log_normalizer](double n) { return n - min_log_normalizer; }); // Finally, update the [A|b] matrices. @@ -82,8 +54,6 @@ GaussianMixtureFactor::Factors augment( const GaussianMixtureFactor::sharedFactor &gf) { auto jf = std::dynamic_pointer_cast(gf); if (!jf) return gf; - // If there is no noise model, there is nothing to do. - if (!jf->get_model()) return gf; // If the log_normalizer is 0, do nothing if (log_normalizers(assignment) == 0.0) return gf; @@ -102,12 +72,11 @@ GaussianMixtureFactor::Factors augment( } /* *******************************************************************************/ -GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, - const Factors &factors, - bool varyingNormalizers) +GaussianMixtureFactor::GaussianMixtureFactor( + const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, + const Factors &factors, const AlgebraicDecisionTree &logNormalizers) : Base(continuousKeys, discreteKeys), - factors_(augment(factors, varyingNormalizers)) {} + factors_(augment(factors, logNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { @@ -194,6 +163,21 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { const sharedFactor gf = factors_(values.discrete()); return gf->error(values.continuous()); } + /* *******************************************************************************/ +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model) { + // Since noise models are Gaussian, we can get the logDeterminant using + // the same trick as in GaussianConditional + double logDetR = noise_model->R() + .diagonal() + .unaryExpr([](double x) { return log(x); }) + .sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = noise_model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; +} } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 588501bbe..6e9f6034e 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,13 +82,14 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. - * @param varyingNormalizers Flag indicating factor components have varying - * normalizer values. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const Factors &factors, - bool varyingNormalizers = false); + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -97,16 +98,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. - * @param varyingNormalizers Flag indicating factor components have varying - * normalizer values. + * @param logNormalizers Tree of log-normalizers corresponding to each + * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, const std::vector &factors, - bool varyingNormalizers = false) + const AlgebraicDecisionTree &logNormalizers = + AlgebraicDecisionTree(0.0)) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors), - varyingNormalizers) {} + Factors(discreteKeys, factors), logNormalizers) {} /// @} /// @name Testable @@ -178,4 +179,7 @@ template <> struct traits : public Testable { }; +double ComputeLogNormalizer( + const noiseModel::Gaussian::shared_ptr &noise_model); + } // namespace gtsam From 9a6d2cf32384d437960def61cd3d547c5f73f41e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Aug 2024 20:10:40 -0400 Subject: [PATCH 14/43] update GaussianMixture::likelihood to compute the logNormalizers --- gtsam/hybrid/GaussianMixture.cpp | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 1440f7e9e..6c92f0252 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -202,8 +202,25 @@ std::shared_ptr GaussianMixture::likelihood( const auto likelihood_m = conditional->likelihood(given); return likelihood_m; }); + + // First compute all the sqrt(|2 pi Sigma|) terms + auto computeLogNormalizers = [](const GaussianFactor::shared_ptr &gf) { + auto jf = std::dynamic_pointer_cast(gf); + // If we have, say, a Hessian factor, then no need to do anything + if (!jf) return 0.0; + + auto model = jf->get_model(); + // If there is no noise model, there is nothing to do. + if (!model) { + return 0.0; + } + return ComputeLogNormalizer(model); + }; + + AlgebraicDecisionTree log_normalizers = + DecisionTree(likelihoods, computeLogNormalizers); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods, true); + continuousParentKeys, discreteParentKeys, likelihoods, log_normalizers); } /* ************************************************************************* */ From 98a2668fb1e78684ec9fa290e032235162cf2fe9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 11:57:57 -0400 Subject: [PATCH 15/43] add docstring and GTSAM_EXPORT for ComputeLogNormalizer --- gtsam/hybrid/GaussianMixtureFactor.h | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 6e9f6034e..6680abb54 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -179,7 +179,16 @@ template <> struct traits : public Testable { }; -double ComputeLogNormalizer( +/** + * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values + * for a Gaussian noise model. + * We compute this in the log-space for numerical accuracy. + * + * @param noise_model The Gaussian noise model + * whose normalizer we wish to compute. + * @return double + */ +GTSAM_EXPORT double ComputeLogNormalizer( const noiseModel::Gaussian::shared_ptr &noise_model); } // namespace gtsam From cd9ee0845765767e98c8f334e3a2cf4a4bed6a7e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 14:43:23 -0400 Subject: [PATCH 16/43] remove augment method in favor of conversion --- gtsam/hybrid/GaussianMixture.cpp | 35 ++++++++--------- gtsam/hybrid/GaussianMixtureFactor.cpp | 52 ++------------------------ gtsam/hybrid/GaussianMixtureFactor.h | 14 ++----- 3 files changed, 23 insertions(+), 78 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 6c92f0252..0a0332af8 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -200,27 +200,24 @@ std::shared_ptr GaussianMixture::likelihood( const GaussianMixtureFactor::Factors likelihoods( conditionals_, [&](const GaussianConditional::shared_ptr &conditional) { const auto likelihood_m = conditional->likelihood(given); - return likelihood_m; + const double Cgm_Kgcm = + logConstant_ - conditional->logNormalizationConstant(); + if (Cgm_Kgcm == 0.0) { + return likelihood_m; + } else { + // Add a constant factor to the likelihood in case the noise models + // are not all equal. + GaussianFactorGraph gfg; + gfg.push_back(likelihood_m); + Vector c(1); + c << std::sqrt(2.0 * Cgm_Kgcm); + auto constantFactor = std::make_shared(c); + gfg.push_back(constantFactor); + return std::make_shared(gfg); + } }); - - // First compute all the sqrt(|2 pi Sigma|) terms - auto computeLogNormalizers = [](const GaussianFactor::shared_ptr &gf) { - auto jf = std::dynamic_pointer_cast(gf); - // If we have, say, a Hessian factor, then no need to do anything - if (!jf) return 0.0; - - auto model = jf->get_model(); - // If there is no noise model, there is nothing to do. - if (!model) { - return 0.0; - } - return ComputeLogNormalizer(model); - }; - - AlgebraicDecisionTree log_normalizers = - DecisionTree(likelihoods, computeLogNormalizers); return std::make_shared( - continuousParentKeys, discreteParentKeys, likelihoods, log_normalizers); + continuousParentKeys, discreteParentKeys, likelihoods); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 0427eef7b..b4ee89cb0 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -28,55 +28,11 @@ namespace gtsam { -/** - * @brief Helper function to augment the [A|b] matrices in the factor components - * with the normalizer values. - * This is done by storing the normalizer value in - * the `b` vector as an additional row. - * - * @param factors DecisionTree of GaussianFactor shared pointers. - * @param logNormalizers Tree of log-normalizers corresponding to each - * Gaussian factor in factors. - * @return GaussianMixtureFactor::Factors - */ -GaussianMixtureFactor::Factors augment( - const GaussianMixtureFactor::Factors &factors, - const AlgebraicDecisionTree &logNormalizers) { - // Find the minimum value so we can "proselytize" to positive values. - // Done because we can't have sqrt of negative numbers. - double min_log_normalizer = logNormalizers.min(); - AlgebraicDecisionTree log_normalizers = logNormalizers.apply( - [&min_log_normalizer](double n) { return n - min_log_normalizer; }); - - // Finally, update the [A|b] matrices. - auto update = [&log_normalizers]( - const Assignment &assignment, - const GaussianMixtureFactor::sharedFactor &gf) { - auto jf = std::dynamic_pointer_cast(gf); - if (!jf) return gf; - // If the log_normalizer is 0, do nothing - if (log_normalizers(assignment) == 0.0) return gf; - - GaussianFactorGraph gfg; - gfg.push_back(jf); - - Vector c(1); - c << std::sqrt(log_normalizers(assignment)); - auto constantFactor = std::make_shared(c); - - gfg.push_back(constantFactor); - return std::dynamic_pointer_cast( - std::make_shared(gfg)); - }; - return factors.apply(update); -} - /* *******************************************************************************/ -GaussianMixtureFactor::GaussianMixtureFactor( - const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors, const AlgebraicDecisionTree &logNormalizers) - : Base(continuousKeys, discreteKeys), - factors_(augment(factors, logNormalizers)) {} +GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, + const DiscreteKeys &discreteKeys, + const Factors &factors) + : Base(continuousKeys, discreteKeys), factors_(factors) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 6680abb54..4570268f1 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,14 +82,10 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. - * @param logNormalizers Tree of log-normalizers corresponding to each - * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors, - const AlgebraicDecisionTree &logNormalizers = - AlgebraicDecisionTree(0.0)); + const Factors &factors); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -98,16 +94,12 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. - * @param logNormalizers Tree of log-normalizers corresponding to each - * Gaussian factor in factors. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector &factors, - const AlgebraicDecisionTree &logNormalizers = - AlgebraicDecisionTree(0.0)) + const std::vector &factors) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors), logNormalizers) {} + Factors(discreteKeys, factors)) {} /// @} /// @name Testable From 91ccbebf224c52063dd43ffb69ecb7cef889cfbd Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 22 Aug 2024 14:44:54 -0400 Subject: [PATCH 17/43] remove ComputeLogNormalizer function --- gtsam/hybrid/GaussianMixtureFactor.cpp | 16 ---------------- gtsam/hybrid/GaussianMixtureFactor.h | 12 ------------ 2 files changed, 28 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index b4ee89cb0..8471cef33 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -120,20 +120,4 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { return gf->error(values.continuous()); } -/* *******************************************************************************/ -double ComputeLogNormalizer( - const noiseModel::Gaussian::shared_ptr &noise_model) { - // Since noise models are Gaussian, we can get the logDeterminant using - // the same trick as in GaussianConditional - double logDetR = noise_model->R() - .diagonal() - .unaryExpr([](double x) { return log(x); }) - .sum(); - double logDeterminantSigma = -2.0 * logDetR; - - size_t n = noise_model->dim(); - constexpr double log2pi = 1.8378770664093454835606594728112; - return n * log2pi + logDeterminantSigma; -} - } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 4570268f1..b1be6ef1d 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -171,16 +171,4 @@ template <> struct traits : public Testable { }; -/** - * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values - * for a Gaussian noise model. - * We compute this in the log-space for numerical accuracy. - * - * @param noise_model The Gaussian noise model - * whose normalizer we wish to compute. - * @return double - */ -GTSAM_EXPORT double ComputeLogNormalizer( - const noiseModel::Gaussian::shared_ptr &noise_model); - } // namespace gtsam From 3d8383089bb19e0f60ce1afa545bf6e27de1e70e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Aug 2024 22:34:57 -0400 Subject: [PATCH 18/43] don't check if GaussianFactor has no contKeys since it may not (e.g. GaussianMixtureModel) --- gtsam/hybrid/GaussianMixtureFactor.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 8471cef33..94bc09407 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -66,7 +66,7 @@ void GaussianMixtureFactor::print(const std::string &s, [&](const sharedFactor &gf) -> std::string { RedirectCout rd; std::cout << ":\n"; - if (gf && !gf->empty()) { + if (gf) { gf->print("", formatter); return rd.str(); } else { From 1f373fd136e1ac46a3e06b1f568e3bff1737989f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 00:48:32 -0400 Subject: [PATCH 19/43] add examples of Hybrid FGs from HBNs as unit tests --- .../tests/testGaussianMixtureFactor.cpp | 233 +++++++++++++++++- 1 file changed, 229 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index dfafb923b..0910d2f40 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -200,6 +200,228 @@ TEST(GaussianMixtureFactor, Error) { 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); } +/* ************************************************************************* */ +/** + * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) + * where m is a discrete variable and z is a continuous variable. + * m is binary and depending on m, we have 2 different means + * μ1 and μ2 for the Gaussian distribution around which we sample z. + * + * The resulting factor graph should eliminate to a Bayes net + * which represents a sigmoid function. + */ +TEST(GaussianMixtureFactor, GaussianMixtureModel) { + double mu0 = 1.0, mu1 = 3.0; + double sigma = 2.0; + auto model = noiseModel::Isotropic::Sigma(1, sigma); + + DiscreteKey m(M(0), 2); + Key z = Z(0); + + auto c0 = make_shared(z, Vector1(mu0), I_1x1, model), + c1 = make_shared(z, Vector1(mu1), I_1x1, model); + + auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); + auto mixing = new DiscreteConditional(m, "0.5/0.5"); + + HybridBayesNet hbn; + hbn.emplace_back(gm); + hbn.emplace_back(mixing); + + // The result should be a sigmoid. + // So should be m = 0.5 at z=3.0 - 1.0=2.0 + VectorValues given; + given.insert(z, Vector1(mu1 - mu0)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + HybridBayesNet expected; + expected.emplace_back(new DiscreteConditional(m, "0.5/0.5")); + + EXPECT(assert_equal(expected, *bn)); +} + +/* ************************************************************************* */ +/** + * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) + * where m is a discrete variable and z is a continuous variable. + * m is binary and depending on m, we have 2 different means + * and covariances each for the + * Gaussian distribution around which we sample z. + * + * The resulting factor graph should eliminate to a Bayes net + * which represents a sigmoid function leaning towards + * the tighter covariance Gaussian. + */ +TEST(GaussianMixtureFactor, GaussianMixtureModel2) { + double mu0 = 1.0, mu1 = 3.0; + auto model0 = noiseModel::Isotropic::Sigma(1, 8.0); + auto model1 = noiseModel::Isotropic::Sigma(1, 4.0); + + DiscreteKey m(M(0), 2); + Key z = Z(0); + + auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), + c1 = make_shared(z, Vector1(mu1), I_1x1, model1); + + auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); + auto mixing = new DiscreteConditional(m, "0.5/0.5"); + + HybridBayesNet hbn; + hbn.emplace_back(gm); + hbn.emplace_back(mixing); + + // The result should be a sigmoid leaning towards model1 + // since it has the tighter covariance. + // So should be m = 0.34/0.66 at z=3.0 - 1.0=2.0 + VectorValues given; + given.insert(z, Vector1(mu1 - mu0)); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + HybridBayesNet expected; + expected.emplace_back( + new DiscreteConditional(m, "0.338561851224/0.661438148776")); + + EXPECT(assert_equal(expected, *bn)); +} + +/* ************************************************************************* */ +/** + * Test a model P(x0)P(z0|x0)p(x1|m1)p(z1|x1)p(m1). + * + * p(x1|m1) has different means and same covariance. + * + * Converting to a factor graph gives us + * P(x0)ϕ(x0)P(x1|m1)ϕ(x1)P(m1) + * + * If we only have a measurement on z0, then + * the probability of x1 should be 0.5/0.5. + * Getting a measurement on z1 gives use more information. + */ +TEST(GaussianMixtureFactor, TwoStateModel) { + double mu0 = 1.0, mu1 = 3.0; + auto model = noiseModel::Isotropic::Sigma(1, 2.0); + + DiscreteKey m1(M(1), 2); + Key z0 = Z(0), z1 = Z(1), x0 = X(0), x1 = X(1); + + auto c0 = make_shared(x1, Vector1(mu0), I_1x1, model), + c1 = make_shared(x1, Vector1(mu1), I_1x1, model); + + auto p_x0 = new GaussianConditional(x0, Vector1(0.0), I_1x1, + noiseModel::Isotropic::Sigma(1, 1.0)); + auto p_z0x0 = new GaussianConditional(z0, Vector1(0.0), I_1x1, x0, -I_1x1, + noiseModel::Isotropic::Sigma(1, 1.0)); + auto p_x1m1 = new GaussianMixture({x1}, {}, {m1}, {c0, c1}); + auto p_z1x1 = new GaussianConditional(z1, Vector1(0.0), I_1x1, x1, -I_1x1, + noiseModel::Isotropic::Sigma(1, 3.0)); + auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); + + HybridBayesNet hbn; + hbn.emplace_back(p_x0); + hbn.emplace_back(p_z0x0); + hbn.emplace_back(p_x1m1); + hbn.emplace_back(p_m1); + + VectorValues given; + given.insert(z0, Vector1(0.5)); + + { + // Start with no measurement on x1, only on x0 + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since no measurement on x1, we hedge our bets + DiscreteConditional expected(m1, "0.5/0.5"); + + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); + } + + { + // Now we add a measurement z1 on x1 + hbn.emplace_back(p_z1x1); + + given.insert(z1, Vector1(2.2)); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since we have a measurement on z2, we get a definite result + DiscreteConditional expected(m1, "0.4923083/0.5076917"); + + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); + } +} + +/* ************************************************************************* */ +/** + * Test a model P(x0)P(z0|x0)p(x1|m1)p(z1|x1)p(m1). + * + * p(x1|m1) has different means and different covariances. + * + * Converting to a factor graph gives us + * P(x0)ϕ(x0)P(x1|m1)ϕ(x1)P(m1) + * + * If we only have a measurement on z0, then + * the probability of x1 should be the ratio of covariances. + * Getting a measurement on z1 gives use more information. + */ +TEST(GaussianMixtureFactor, TwoStateModel2) { + double mu0 = 1.0, mu1 = 3.0; + auto model0 = noiseModel::Isotropic::Sigma(1, 6.0); + auto model1 = noiseModel::Isotropic::Sigma(1, 4.0); + + DiscreteKey m1(M(1), 2); + Key z0 = Z(0), z1 = Z(1), x0 = X(0), x1 = X(1); + + auto c0 = make_shared(x1, Vector1(mu0), I_1x1, model0), + c1 = make_shared(x1, Vector1(mu1), I_1x1, model1); + + auto p_x0 = new GaussianConditional(x0, Vector1(0.0), I_1x1, + noiseModel::Isotropic::Sigma(1, 1.0)); + auto p_z0x0 = new GaussianConditional(z0, Vector1(0.0), I_1x1, x0, -I_1x1, + noiseModel::Isotropic::Sigma(1, 1.0)); + auto p_x1m1 = new GaussianMixture({x1}, {}, {m1}, {c0, c1}); + auto p_z1x1 = new GaussianConditional(z1, Vector1(0.0), I_1x1, x1, -I_1x1, + noiseModel::Isotropic::Sigma(1, 3.0)); + auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); + + HybridBayesNet hbn; + hbn.emplace_back(p_x0); + hbn.emplace_back(p_z0x0); + hbn.emplace_back(p_x1m1); + hbn.emplace_back(p_m1); + + VectorValues given; + given.insert(z0, Vector1(0.5)); + + { + // Start with no measurement on x1, only on x0 + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since no measurement on x1, we get the ratio of covariances. + DiscreteConditional expected(m1, "0.6/0.4"); + + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); + } + + { + // Now we add a measurement z1 on x1 + hbn.emplace_back(p_z1x1); + + given.insert(z1, Vector1(2.2)); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + // Since we have a measurement on z2, we get a definite result + DiscreteConditional expected(m1, "0.52706646/0.47293354"); + + EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); + } +} + /** * @brief Helper function to specify a Hybrid Bayes Net * {P(X1) P(Z1 | X1, X2, M1)} and convert it to a Hybrid Factor Graph @@ -271,11 +493,12 @@ HybridGaussianFactorGraph GetFactorGraphFromBayesNet( * * We specify a hybrid Bayes network P(Z | X, M) =p(X1)p(Z1 | X1, X2, M1), * which is then converted to a factor graph by specifying Z1. - * - * p(Z1 | X1, X2, M1) has 2 factors each for the binary mode m1, with only the - * means being different. + * This is a different case since now we have a hybrid factor + * with 2 continuous variables ϕ(x1, x2, m1). + * p(Z1 | X1, X2, M1) has 2 factors each for the binary + * mode m1, with only the means being different. */ -TEST(GaussianMixtureFactor, DifferentMeansHBN) { +TEST(GaussianMixtureFactor, DifferentMeans) { DiscreteKey m1(M(1), 2); Values values; @@ -355,6 +578,8 @@ TEST(GaussianMixtureFactor, DifferentMeansHBN) { /** * @brief Test components with differing covariances * but with a Bayes net P(Z|X, M) converted to a FG. + * Same as the DifferentMeans example but in this case, + * we keep the means the same and vary the covariances. */ TEST(GaussianMixtureFactor, DifferentCovariances) { DiscreteKey m1(M(1), 2); From 7358effe038d0cb82c9b378a21c8afabfa0ea269 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 00:49:28 -0400 Subject: [PATCH 20/43] allow for GaussianMixtureFactors with 0 continuous keys --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index bf11a50fc..cb8ceed20 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -242,6 +242,18 @@ discreteElimination(const HybridGaussianFactorGraph &factors, for (auto &f : factors) { if (auto df = dynamic_pointer_cast(f)) { dfg.push_back(df); + } else if (auto gmf = dynamic_pointer_cast(f)) { + // Case where we have a GaussianMixtureFactor with no continuous keys. + // In this case, compute discrete probabilities. + auto probability = + [&](const GaussianFactor::shared_ptr &factor) -> double { + if (!factor) return 0.0; + return exp(-factor->error(VectorValues())); + }; + dfg.emplace_shared( + gmf->discreteKeys(), + DecisionTree(gmf->factors(), probability)); + } else if (auto orphan = dynamic_pointer_cast(f)) { // Ignore orphaned clique. // TODO(dellaert): is this correct? If so explain here. From 88c2ad55bea7d23148029f895cec4f453ca3b8ad Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Aug 2024 00:49:36 -0400 Subject: [PATCH 21/43] better docstring --- gtsam/hybrid/GaussianMixtureFactor.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index b1be6ef1d..2459af259 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -80,8 +80,8 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys A vector of keys representing continuous variables. * @param discreteKeys A vector of keys representing discrete variables and * their cardinalities. - * @param factors The decision tree of Gaussian factors stored as the mixture - * density. + * @param factors The decision tree of Gaussian factors stored + * as the mixture density. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, From f4830baa5e9e319259f17d31ae002e15904744b0 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 26 Aug 2024 17:27:44 -0400 Subject: [PATCH 22/43] common logProbability normalization function --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 43 ++++++++++++++++------ 1 file changed, 31 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index cb8ceed20..bbf739c65 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -233,6 +233,26 @@ continuousElimination(const HybridGaussianFactorGraph &factors, return {std::make_shared(result.first), result.second}; } +/* ************************************************************************ */ +/** + * @brief Exponential log-probabilities after performing + * the necessary normalizations. + * + * @param logProbabilities DecisionTree of log-probabilities. + * @return AlgebraicDecisionTree + */ +static AlgebraicDecisionTree exponentiateLogProbabilities( + const AlgebraicDecisionTree &logProbabilities) { + // Perform normalization + double max_log = logProbabilities.max(); + AlgebraicDecisionTree probabilities = DecisionTree( + logProbabilities, + [&max_log](const double x) { return exp(x - max_log); }); + probabilities = probabilities.normalize(probabilities.sum()); + + return probabilities; +} + /* ************************************************************************ */ static std::pair> discreteElimination(const HybridGaussianFactorGraph &factors, @@ -245,14 +265,18 @@ discreteElimination(const HybridGaussianFactorGraph &factors, } else if (auto gmf = dynamic_pointer_cast(f)) { // Case where we have a GaussianMixtureFactor with no continuous keys. // In this case, compute discrete probabilities. - auto probability = + auto logProbability = [&](const GaussianFactor::shared_ptr &factor) -> double { if (!factor) return 0.0; - return exp(-factor->error(VectorValues())); + return -factor->error(VectorValues()); }; - dfg.emplace_shared( - gmf->discreteKeys(), - DecisionTree(gmf->factors(), probability)); + AlgebraicDecisionTree logProbabilities = + DecisionTree(gmf->factors(), logProbability); + + AlgebraicDecisionTree probabilities = + exponentiateLogProbabilities(logProbabilities); + dfg.emplace_shared(gmf->discreteKeys(), + probabilities); } else if (auto orphan = dynamic_pointer_cast(f)) { // Ignore orphaned clique. @@ -315,13 +339,8 @@ static std::shared_ptr createDiscreteFactor( AlgebraicDecisionTree logProbabilities( DecisionTree(eliminationResults, logProbability)); - - // Perform normalization - double max_log = logProbabilities.max(); - AlgebraicDecisionTree probabilities = DecisionTree( - logProbabilities, - [&max_log](const double x) { return exp(x - max_log); }); - probabilities = probabilities.normalize(probabilities.sum()); + AlgebraicDecisionTree probabilities = + exponentiateLogProbabilities(logProbabilities); return std::make_shared(discreteSeparator, probabilities); } From e81272b078ea36f0e6cca1f77d28fc3102afba2f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 27 Aug 2024 09:55:05 -0400 Subject: [PATCH 23/43] fix comments --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 0910d2f40..4c293c2b9 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -251,8 +251,8 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { * Gaussian distribution around which we sample z. * * The resulting factor graph should eliminate to a Bayes net - * which represents a sigmoid function leaning towards - * the tighter covariance Gaussian. + * which represents a Gaussian-like function + * where m1>m0 close to 3.1333. */ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { double mu0 = 1.0, mu1 = 3.0; @@ -272,17 +272,16 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { hbn.emplace_back(gm); hbn.emplace_back(mixing); - // The result should be a sigmoid leaning towards model1 - // since it has the tighter covariance. - // So should be m = 0.34/0.66 at z=3.0 - 1.0=2.0 + // The result should be a bell curve like function + // with m1 > m0 close to 3.1333. VectorValues given; - given.insert(z, Vector1(mu1 - mu0)); + given.insert(z, Vector1(3.133)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); HybridBayesNet expected; expected.emplace_back( - new DiscreteConditional(m, "0.338561851224/0.661438148776")); + new DiscreteConditional(m, "0.325603277954/0.674396722046")); EXPECT(assert_equal(expected, *bn)); } From 3dab868ef0cf2e8c8093366d1afaf06ea19d8635 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 28 Aug 2024 12:47:24 -0400 Subject: [PATCH 24/43] rename from exponentiateLogProbabilities to probabilitiesFromLogValues --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index bbf739c65..2c5d239f5 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -235,19 +235,18 @@ continuousElimination(const HybridGaussianFactorGraph &factors, /* ************************************************************************ */ /** - * @brief Exponential log-probabilities after performing - * the necessary normalizations. + * @brief Exponentiate log-values, not necessarily normalized, normalize, and + * return as AlgebraicDecisionTree. * - * @param logProbabilities DecisionTree of log-probabilities. + * @param logValues DecisionTree of (unnormalized) log values. * @return AlgebraicDecisionTree */ -static AlgebraicDecisionTree exponentiateLogProbabilities( - const AlgebraicDecisionTree &logProbabilities) { +static AlgebraicDecisionTree probabilitiesFromLogValues( + const AlgebraicDecisionTree &logValues) { // Perform normalization - double max_log = logProbabilities.max(); + double max_log = logValues.max(); AlgebraicDecisionTree probabilities = DecisionTree( - logProbabilities, - [&max_log](const double x) { return exp(x - max_log); }); + logValues, [&max_log](const double x) { return exp(x - max_log); }); probabilities = probabilities.normalize(probabilities.sum()); return probabilities; @@ -274,7 +273,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, DecisionTree(gmf->factors(), logProbability); AlgebraicDecisionTree probabilities = - exponentiateLogProbabilities(logProbabilities); + probabilitiesFromLogValues(logProbabilities); dfg.emplace_shared(gmf->discreteKeys(), probabilities); @@ -340,7 +339,7 @@ static std::shared_ptr createDiscreteFactor( AlgebraicDecisionTree logProbabilities( DecisionTree(eliminationResults, logProbability)); AlgebraicDecisionTree probabilities = - exponentiateLogProbabilities(logProbabilities); + probabilitiesFromLogValues(logProbabilities); return std::make_shared(discreteSeparator, probabilities); } From 088f1f04bb6a0e27c1dfa9feacefd3bda05cc6ba Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 28 Aug 2024 13:18:55 -0400 Subject: [PATCH 25/43] improve the gaussian mixture model tests --- .../tests/testGaussianMixtureFactor.cpp | 113 +++++++++++++----- 1 file changed, 85 insertions(+), 28 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 4c293c2b9..4d333f2c3 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -200,6 +200,42 @@ TEST(GaussianMixtureFactor, Error) { 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); } +namespace test_gmm { + +/** + * Function to compute P(m=1|z). For P(m=0|z), swap `mus and sigmas. + * Follows equation 7.108 since it is more generic. + */ +double sigmoid(double mu0, double mu1, double sigma0, double sigma1, double z) { + double x1 = ((z - mu0) / sigma0), x2 = ((z - mu1) / sigma1); + double d = std::sqrt(sigma0 * sigma0) / std::sqrt(sigma1 * sigma1); + double e = d * std::exp(-0.5 * (x1 * x1 - x2 * x2)); + return 1 / (1 + e); +}; + +HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, double sigma0, + double sigma1) { + DiscreteKey m(M(0), 2); + Key z = Z(0); + + auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); + auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); + + auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), + c1 = make_shared(z, Vector1(mu1), I_1x1, model1); + + auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); + auto mixing = new DiscreteConditional(m, "0.5/0.5"); + + HybridBayesNet hbn; + hbn.emplace_back(gm); + hbn.emplace_back(mixing); + + return hbn; +} + +} // namespace test_gmm + /* ************************************************************************* */ /** * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) @@ -211,6 +247,8 @@ TEST(GaussianMixtureFactor, Error) { * which represents a sigmoid function. */ TEST(GaussianMixtureFactor, GaussianMixtureModel) { + using namespace test_gmm; + double mu0 = 1.0, mu1 = 3.0; double sigma = 2.0; auto model = noiseModel::Isotropic::Sigma(1, sigma); @@ -218,28 +256,52 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { DiscreteKey m(M(0), 2); Key z = Z(0); - auto c0 = make_shared(z, Vector1(mu0), I_1x1, model), - c1 = make_shared(z, Vector1(mu1), I_1x1, model); - - auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); - auto mixing = new DiscreteConditional(m, "0.5/0.5"); - - HybridBayesNet hbn; - hbn.emplace_back(gm); - hbn.emplace_back(mixing); + auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma, sigma); // The result should be a sigmoid. - // So should be m = 0.5 at z=3.0 - 1.0=2.0 - VectorValues given; - given.insert(z, Vector1(mu1 - mu0)); + // So should be P(m=1|z) = 0.5 at z=3.0 - 1.0=2.0 + double midway = mu1 - mu0, lambda = 4; + { + VectorValues given; + given.insert(z, Vector1(midway)); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - HybridBayesNet expected; - expected.emplace_back(new DiscreteConditional(m, "0.5/0.5")); + EXPECT_DOUBLES_EQUAL( + sigmoid(mu0, mu1, sigma, sigma, midway), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); - EXPECT(assert_equal(expected, *bn)); + // At the halfway point between the means, we should get P(m|z)=0.5 + HybridBayesNet expected; + expected.emplace_back(new DiscreteConditional(m, "0.5/0.5")); + + EXPECT(assert_equal(expected, *bn)); + } + { + // Shift by -lambda + VectorValues given; + given.insert(z, Vector1(midway - lambda)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + EXPECT_DOUBLES_EQUAL( + sigmoid(mu0, mu1, sigma, sigma, midway - lambda), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); + } + { + // Shift by lambda + VectorValues given; + given.insert(z, Vector1(midway + lambda)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + EXPECT_DOUBLES_EQUAL( + sigmoid(mu0, mu1, sigma, sigma, midway + lambda), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); + } } /* ************************************************************************* */ @@ -255,30 +317,25 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { * where m1>m0 close to 3.1333. */ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { + using namespace test_gmm; + double mu0 = 1.0, mu1 = 3.0; - auto model0 = noiseModel::Isotropic::Sigma(1, 8.0); - auto model1 = noiseModel::Isotropic::Sigma(1, 4.0); + double sigma0 = 8.0, sigma1 = 4.0; DiscreteKey m(M(0), 2); Key z = Z(0); - auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), - c1 = make_shared(z, Vector1(mu1), I_1x1, model1); - - auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); - auto mixing = new DiscreteConditional(m, "0.5/0.5"); - - HybridBayesNet hbn; - hbn.emplace_back(gm); - hbn.emplace_back(mixing); + auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma0, sigma1); // The result should be a bell curve like function // with m1 > m0 close to 3.1333. + // We get 3.1333 by finding the maximum value of the function. VectorValues given; given.insert(z, Vector1(3.133)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + // regression HybridBayesNet expected; expected.emplace_back( new DiscreteConditional(m, "0.325603277954/0.674396722046")); From bb77b0cabbbd9ceb433d80c62950d4cf509bc979 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 28 Aug 2024 17:55:49 -0400 Subject: [PATCH 26/43] improved two state model with different means --- .../tests/testGaussianMixtureFactor.cpp | 118 ++++++++++++------ 1 file changed, 83 insertions(+), 35 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 4d333f2c3..8b7b5c146 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -36,6 +36,7 @@ using namespace std; using namespace gtsam; using noiseModel::Isotropic; +using symbol_shorthand::F; using symbol_shorthand::M; using symbol_shorthand::X; using symbol_shorthand::Z; @@ -270,7 +271,8 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { EXPECT_DOUBLES_EQUAL( sigmoid(mu0, mu1, sigma, sigma, midway), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); // At the halfway point between the means, we should get P(m|z)=0.5 HybridBayesNet expected; @@ -288,7 +290,8 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { EXPECT_DOUBLES_EQUAL( sigmoid(mu0, mu1, sigma, sigma, midway - lambda), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); } { // Shift by lambda @@ -300,7 +303,8 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { EXPECT_DOUBLES_EQUAL( sigmoid(mu0, mu1, sigma, sigma, midway + lambda), - bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); } } @@ -343,81 +347,124 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { EXPECT(assert_equal(expected, *bn)); } +namespace test_two_state_estimation { + +/// Create Two State Bayes Network with measurements +HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, + double sigma1, + bool add_second_measurement = false, + double prior_sigma = 1e-3, + double measurement_sigma = 3.0) { + DiscreteKey m1(M(1), 2); + Key z0 = Z(0), z1 = Z(1), f01 = F(0); + Key x0 = X(0), x1 = X(1); + + HybridBayesNet hbn; + + auto prior_model = noiseModel::Isotropic::Sigma(1, prior_sigma); + auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma); + + // Set a prior P(x0) at x0=0 + hbn.emplace_back( + new GaussianConditional(x0, Vector1(0.0), I_1x1, prior_model)); + + // Add measurement P(z0 | x0) + auto p_z0 = new GaussianConditional(z0, Vector1(0.0), -I_1x1, x0, I_1x1, + measurement_model); + hbn.emplace_back(p_z0); + + // Add hybrid motion model + auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); + auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); + auto c0 = make_shared(f01, Vector1(mu0), I_1x1, x1, + I_1x1, x0, -I_1x1, model0), + c1 = make_shared(f01, Vector1(mu1), I_1x1, x1, + I_1x1, x0, -I_1x1, model1); + + auto motion = new GaussianMixture({f01}, {x0, x1}, {m1}, {c0, c1}); + + hbn.emplace_back(motion); + + if (add_second_measurement) { + // Add second measurement + auto p_z1 = new GaussianConditional(z1, Vector1(0.0), -I_1x1, x1, I_1x1, + measurement_model); + hbn.emplace_back(p_z1); + } + + // Discrete uniform prior. + auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); + hbn.emplace_back(p_m1); + + return hbn; +} + +} // namespace test_two_state_estimation + /* ************************************************************************* */ /** - * Test a model P(x0)P(z0|x0)p(x1|m1)p(z1|x1)p(m1). + * Test a model P(x0)P(z0|x0)P(f01|x1,x0,m1)P(z1|x1)P(m1). * - * p(x1|m1) has different means and same covariance. + * P(f01|x1,x0,m1) has different means and same covariance. * * Converting to a factor graph gives us - * P(x0)ϕ(x0)P(x1|m1)ϕ(x1)P(m1) + * P(x0)ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) * * If we only have a measurement on z0, then - * the probability of x1 should be 0.5/0.5. + * the probability of m1 should be 0.5/0.5. * Getting a measurement on z1 gives use more information. */ TEST(GaussianMixtureFactor, TwoStateModel) { + using namespace test_two_state_estimation; + double mu0 = 1.0, mu1 = 3.0; - auto model = noiseModel::Isotropic::Sigma(1, 2.0); + double sigma = 2.0; DiscreteKey m1(M(1), 2); - Key z0 = Z(0), z1 = Z(1), x0 = X(0), x1 = X(1); + Key z0 = Z(0), z1 = Z(1), f01 = F(0); - auto c0 = make_shared(x1, Vector1(mu0), I_1x1, model), - c1 = make_shared(x1, Vector1(mu1), I_1x1, model); - - auto p_x0 = new GaussianConditional(x0, Vector1(0.0), I_1x1, - noiseModel::Isotropic::Sigma(1, 1.0)); - auto p_z0x0 = new GaussianConditional(z0, Vector1(0.0), I_1x1, x0, -I_1x1, - noiseModel::Isotropic::Sigma(1, 1.0)); - auto p_x1m1 = new GaussianMixture({x1}, {}, {m1}, {c0, c1}); - auto p_z1x1 = new GaussianConditional(z1, Vector1(0.0), I_1x1, x1, -I_1x1, - noiseModel::Isotropic::Sigma(1, 3.0)); - auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); - - HybridBayesNet hbn; - hbn.emplace_back(p_x0); - hbn.emplace_back(p_z0x0); - hbn.emplace_back(p_x1m1); - hbn.emplace_back(p_m1); + // Start with no measurement on x1, only on x0 + HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma, sigma, false); VectorValues given; given.insert(z0, Vector1(0.5)); + // The motion model says we didn't move + given.insert(f01, Vector1(0.0)); { - // Start with no measurement on x1, only on x0 HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since no measurement on x1, we hedge our bets DiscreteConditional expected(m1, "0.5/0.5"); - + // regression EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); } { // Now we add a measurement z1 on x1 - hbn.emplace_back(p_z1x1); + hbn = CreateBayesNet(mu0, mu1, sigma, sigma, true); + // If we see z1=2.2, discrete mode should say m1=1 given.insert(z1, Vector1(2.2)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since we have a measurement on z2, we get a definite result DiscreteConditional expected(m1, "0.4923083/0.5076917"); - + // regression EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); } } /* ************************************************************************* */ /** - * Test a model P(x0)P(z0|x0)p(x1|m1)p(z1|x1)p(m1). + * Test a model P(x0)P(z0|x0)P(f01|x1,x0,m1)P(z1|x1)P(m1). * - * p(x1|m1) has different means and different covariances. + * P(f01|x1,x0,m1) has different means and different covariances. * * Converting to a factor graph gives us - * P(x0)ϕ(x0)P(x1|m1)ϕ(x1)P(m1) + * P(x0)ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) * * If we only have a measurement on z0, then * the probability of x1 should be the ratio of covariances. @@ -425,8 +472,9 @@ TEST(GaussianMixtureFactor, TwoStateModel) { */ TEST(GaussianMixtureFactor, TwoStateModel2) { double mu0 = 1.0, mu1 = 3.0; - auto model0 = noiseModel::Isotropic::Sigma(1, 6.0); - auto model1 = noiseModel::Isotropic::Sigma(1, 4.0); + double sigma0 = 6.0, sigma1 = 4.0; + auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); + auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); DiscreteKey m1(M(1), 2); Key z0 = Z(0), z1 = Z(1), x0 = X(0), x1 = X(1); From 28f30a232d8b0fac02c4185bae2d1ece2600ddd2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 28 Aug 2024 18:13:54 -0400 Subject: [PATCH 27/43] update up TwoStateModel test and remove DifferentMeans and DifferentCovariances for later --- .../tests/testGaussianMixtureFactor.cpp | 236 ++---------------- 1 file changed, 17 insertions(+), 219 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 8b7b5c146..aa2fe0a1d 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -428,7 +428,7 @@ TEST(GaussianMixtureFactor, TwoStateModel) { VectorValues given; given.insert(z0, Vector1(0.5)); - // The motion model says we didn't move + // The motion model measurement says we didn't move given.insert(f01, Vector1(0.0)); { @@ -467,258 +467,56 @@ TEST(GaussianMixtureFactor, TwoStateModel) { * P(x0)ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) * * If we only have a measurement on z0, then - * the probability of x1 should be the ratio of covariances. + * the P(m1) should be 0.5/0.5. * Getting a measurement on z1 gives use more information. */ TEST(GaussianMixtureFactor, TwoStateModel2) { + using namespace test_two_state_estimation; + double mu0 = 1.0, mu1 = 3.0; double sigma0 = 6.0, sigma1 = 4.0; auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); DiscreteKey m1(M(1), 2); - Key z0 = Z(0), z1 = Z(1), x0 = X(0), x1 = X(1); + Key z0 = Z(0), z1 = Z(1), f01 = F(0); - auto c0 = make_shared(x1, Vector1(mu0), I_1x1, model0), - c1 = make_shared(x1, Vector1(mu1), I_1x1, model1); - - auto p_x0 = new GaussianConditional(x0, Vector1(0.0), I_1x1, - noiseModel::Isotropic::Sigma(1, 1.0)); - auto p_z0x0 = new GaussianConditional(z0, Vector1(0.0), I_1x1, x0, -I_1x1, - noiseModel::Isotropic::Sigma(1, 1.0)); - auto p_x1m1 = new GaussianMixture({x1}, {}, {m1}, {c0, c1}); - auto p_z1x1 = new GaussianConditional(z1, Vector1(0.0), I_1x1, x1, -I_1x1, - noiseModel::Isotropic::Sigma(1, 3.0)); - auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); - - HybridBayesNet hbn; - hbn.emplace_back(p_x0); - hbn.emplace_back(p_z0x0); - hbn.emplace_back(p_x1m1); - hbn.emplace_back(p_m1); + // Start with no measurement on x1, only on x0 + HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma0, sigma1, false); VectorValues given; given.insert(z0, Vector1(0.5)); + // The motion model measurement says we didn't move + given.insert(f01, Vector1(0.0)); { // Start with no measurement on x1, only on x0 HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - // Since no measurement on x1, we get the ratio of covariances. - DiscreteConditional expected(m1, "0.6/0.4"); - - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); + // Since no measurement on x1, we a 50/50 probability + auto p_m = bn->at(2)->asDiscrete(); + EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{m1.first, 0}}), + 1e-9); + EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()(DiscreteValues{{m1.first, 1}}), + 1e-9); } { // Now we add a measurement z1 on x1 - hbn.emplace_back(p_z1x1); + hbn = CreateBayesNet(mu0, mu1, sigma0, sigma1, true); given.insert(z1, Vector1(2.2)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since we have a measurement on z2, we get a definite result - DiscreteConditional expected(m1, "0.52706646/0.47293354"); + DiscreteConditional expected(m1, "0.4262682/0.5737318"); EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); } } -/** - * @brief Helper function to specify a Hybrid Bayes Net - * {P(X1) P(Z1 | X1, X2, M1)} and convert it to a Hybrid Factor Graph - * {P(X1)L(X1, X2, M1; Z1)} by converting to likelihoods given Z1. - * - * We can specify either different means or different sigmas, - * or both for each hybrid factor component. - * - * @param values Initial values for linearization. - * @param means The mean values for the conditional components. - * @param sigmas Noise model sigma values (standard deviation). - * @param m1 The discrete mode key. - * @param z1 The measurement value. - * @return HybridGaussianFactorGraph - */ -HybridGaussianFactorGraph GetFactorGraphFromBayesNet( - const gtsam::Values &values, const std::vector &means, - const std::vector &sigmas, DiscreteKey &m1, double z1 = 0.0) { - // Noise models - auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); - auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - // GaussianMixtureFactor component factors - auto f0 = - std::make_shared>(X(1), X(2), means[0], model0); - auto f1 = - std::make_shared>(X(1), X(2), means[1], model1); - std::vector factors{f0, f1}; - - /// Get terms for each p^m(z1 | x1, x2) - Matrix H0_1, H0_2, H1_1, H1_2; - double x1 = values.at(X(1)), x2 = values.at(X(2)); - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - // Create conditional P(Z1 | X1, X2, M1) - auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(1), X(2)}, {m1}, - {std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}); - gtsam::HybridBayesNet bn; - bn.emplace_back(gm); - // bn.print(); - - // Create FG via toFactorGraph - gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::I_1x1 * z1); // Set Z1 = 0 - HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); - - return mixture_fg; -} - -/* ************************************************************************* */ -/** - * @brief Test components with differing means. - * - * We specify a hybrid Bayes network P(Z | X, M) =p(X1)p(Z1 | X1, X2, M1), - * which is then converted to a factor graph by specifying Z1. - * This is a different case since now we have a hybrid factor - * with 2 continuous variables ϕ(x1, x2, m1). - * p(Z1 | X1, X2, M1) has 2 factors each for the binary - * mode m1, with only the means being different. - */ -TEST(GaussianMixtureFactor, DifferentMeans) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 0.0, x2 = 1.75; - values.insert(X(1), x1); - values.insert(X(2), x2); - - // Different means, same sigma - std::vector means{0.0, 2.0}, sigmas{1e-0, 1e-0}; - - HybridGaussianFactorGraph hfg = - GetFactorGraphFromBayesNet(values, means, sigmas, m1, 0.0); - - { - // With no measurement on X2, each mode should be equally likely - auto bn = hfg.eliminateSequential(); - HybridValues actual = bn->optimize(); - - HybridValues expected( - VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(-1.75)}}, - DiscreteValues{{M(1), 0}}); - - EXPECT(assert_equal(expected, actual)); - - { - DiscreteValues dv{{M(1), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); - } - } - { - // If we add a measurement on X2, we have more information to work with. - // Add a measurement on X2 - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - GaussianConditional meas_z2(Z(2), Vector1(2.0), I_1x1, X(2), I_1x1, - prior_noise); - auto prior_x2 = meas_z2.likelihood(Vector1(x2)); - - hfg.push_back(prior_x2); - - auto bn = hfg.eliminateSequential(); - HybridValues actual = bn->optimize(); - - HybridValues expected( - VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}}, - DiscreteValues{{M(1), 1}}); - - EXPECT(assert_equal(expected, actual)); - - { - DiscreteValues dv{{M(1), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9); - } - } -} - -/* ************************************************************************* */ -/** - * @brief Test components with differing covariances - * but with a Bayes net P(Z|X, M) converted to a FG. - * Same as the DifferentMeans example but in this case, - * we keep the means the same and vary the covariances. - */ -TEST(GaussianMixtureFactor, DifferentCovariances) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 1.0, x2 = 1.0; - values.insert(X(1), x1); - values.insert(X(2), x2); - - std::vector means{0.0, 0.0}, sigmas{1e2, 1e-2}; - HybridGaussianFactorGraph mixture_fg = - GetFactorGraphFromBayesNet(values, means, sigmas, m1); - - auto hbn = mixture_fg.eliminateSequential(); - - VectorValues cv; - cv.insert(X(1), Vector1(0.0)); - cv.insert(X(2), Vector1(0.0)); - - // Check that the error values at the MLE point μ. - AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - - DiscreteValues dv0{{M(1), 0}}; - DiscreteValues dv1{{M(1), 1}}; - - // regression - EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); - EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); - - DiscreteConditional expected_m1(m1, "0.5/0.5"); - DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); - - EXPECT(assert_equal(expected_m1, actual_m1)); -} - /* ************************************************************************* */ int main() { TestResult tr; From f8a7b804d3167196b7eed0f257a4e75545076ae8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 10:45:50 -0400 Subject: [PATCH 28/43] address some comments --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index aa2fe0a1d..3d26ae573 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -204,12 +204,12 @@ TEST(GaussianMixtureFactor, Error) { namespace test_gmm { /** - * Function to compute P(m=1|z). For P(m=0|z), swap `mus and sigmas. + * Function to compute P(m=1|z). For P(m=0|z), swap mus and sigmas. * Follows equation 7.108 since it is more generic. */ double sigmoid(double mu0, double mu1, double sigma0, double sigma1, double z) { double x1 = ((z - mu0) / sigma0), x2 = ((z - mu1) / sigma1); - double d = std::sqrt(sigma0 * sigma0) / std::sqrt(sigma1 * sigma1); + double d = sigma0 / sigma1; double e = d * std::exp(-0.5 * (x1 * x1 - x2 * x2)); return 1 / (1 + e); }; @@ -252,7 +252,6 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { double mu0 = 1.0, mu1 = 3.0; double sigma = 2.0; - auto model = noiseModel::Isotropic::Sigma(1, sigma); DiscreteKey m(M(0), 2); Key z = Z(0); @@ -382,7 +381,6 @@ HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, I_1x1, x0, -I_1x1, model1); auto motion = new GaussianMixture({f01}, {x0, x1}, {m1}, {c0, c1}); - hbn.emplace_back(motion); if (add_second_measurement) { @@ -487,7 +485,7 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { VectorValues given; given.insert(z0, Vector1(0.5)); // The motion model measurement says we didn't move - given.insert(f01, Vector1(0.0)); + // given.insert(x1, Vector1(0.0)); { // Start with no measurement on x1, only on x0 @@ -512,7 +510,7 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { // Since we have a measurement on z2, we get a definite result DiscreteConditional expected(m1, "0.4262682/0.5737318"); - + // regression EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); } } From 617a99f6bf1a61cc84e35c8d26770d7dacc65518 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 13:26:16 -0400 Subject: [PATCH 29/43] format HybridConditional.h --- gtsam/hybrid/HybridConditional.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index 64bdcb2c1..fb6542822 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -61,7 +61,7 @@ class GTSAM_EXPORT HybridConditional public Conditional { public: // typedefs needed to play nice with gtsam - typedef HybridConditional This; ///< Typedef to this class + typedef HybridConditional This; ///< Typedef to this class typedef std::shared_ptr shared_ptr; ///< shared_ptr to this class typedef HybridFactor BaseFactor; ///< Typedef to our factor base class typedef Conditional @@ -185,7 +185,7 @@ class GTSAM_EXPORT HybridConditional * Return the log normalization constant. * Note this is 0.0 for discrete and hybrid conditionals, but depends * on the continuous parameters for Gaussian conditionals. - */ + */ double logNormalizationConstant() const override; /// Return the probability (or density) of the underlying conditional. From b4637049267cab61110251e70ebfbc0fa4363db1 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 13:37:28 -0400 Subject: [PATCH 30/43] improved HybridGaussianFactorGraph::printErrors --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 27 ++++++++++------------ gtsam/hybrid/HybridGaussianFactorGraph.h | 8 +++++++ 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 2c5d239f5..7ac6cef98 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -97,29 +97,27 @@ void HybridGaussianFactorGraph::printErrors( std::cout << "nullptr" << "\n"; } else { - factor->print(ss.str(), keyFormatter); - std::cout << "error = "; - gmf->errorTree(values.continuous()).print("", keyFormatter); - std::cout << std::endl; + gmf->operator()(values.discrete())->print(ss.str(), keyFormatter); + std::cout << "error = " << gmf->error(values) << std::endl; } } else if (auto hc = std::dynamic_pointer_cast(factor)) { if (factor == nullptr) { std::cout << "nullptr" << "\n"; } else { - factor->print(ss.str(), keyFormatter); - if (hc->isContinuous()) { + factor->print(ss.str(), keyFormatter); std::cout << "error = " << hc->asGaussian()->error(values) << "\n"; } else if (hc->isDiscrete()) { - std::cout << "error = "; - hc->asDiscrete()->errorTree().print("", keyFormatter); - std::cout << "\n"; + factor->print(ss.str(), keyFormatter); + std::cout << "error = " << hc->asDiscrete()->error(values.discrete()) + << "\n"; } else { // Is hybrid - std::cout << "error = "; - hc->asMixture()->errorTree(values.continuous()).print(); - std::cout << "\n"; + auto mixtureComponent = + hc->asMixture()->operator()(values.discrete()); + mixtureComponent->print(ss.str(), keyFormatter); + std::cout << "error = " << mixtureComponent->error(values) << "\n"; } } } else if (auto gf = std::dynamic_pointer_cast(factor)) { @@ -140,8 +138,7 @@ void HybridGaussianFactorGraph::printErrors( << "\n"; } else { factor->print(ss.str(), keyFormatter); - std::cout << "error = "; - df->errorTree().print("", keyFormatter); + std::cout << "error = " << df->error(values.discrete()) << std::endl; } } else { @@ -550,7 +547,7 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( AlgebraicDecisionTree error_tree(0.0); // Iterate over each factor. - for (auto &f : factors_) { + for (auto &factor : factors_) { // TODO(dellaert): just use a virtual method defined in HybridFactor. AlgebraicDecisionTree factor_error; diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 1708ff64b..2ca6e4c95 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -144,6 +144,14 @@ class GTSAM_EXPORT HybridGaussianFactorGraph // const std::string& s = "HybridGaussianFactorGraph", // const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override; + /** + * @brief Print the errors of each factor in the hybrid factor graph. + * + * @param values The HybridValues for the variables used to compute the error. + * @param str String that is output before the factor graph and errors. + * @param keyFormatter Formatter function for the keys in the factors. + * @param printCondition A condition to check if a factor should be printed. + */ void printErrors( const HybridValues& values, const std::string& str = "HybridGaussianFactorGraph: ", From 1673c47ea0b232e2c00adf9d60cc9b67c50ead3b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 14:23:18 -0400 Subject: [PATCH 31/43] unpack HybridConditional in errorTree --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 5 ++ .../tests/testHybridGaussianFactorGraph.cpp | 51 +++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 7ac6cef98..6c3442f97 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -551,6 +551,11 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( // TODO(dellaert): just use a virtual method defined in HybridFactor. AlgebraicDecisionTree factor_error; + auto f = factor; + if (auto hc = dynamic_pointer_cast(factor)) { + f = hc->inner(); + } + if (auto gaussianMixture = dynamic_pointer_cast(f)) { // Compute factor error and add it. error_tree = error_tree + gaussianMixture->errorTree(continuousValues); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 5be2f2742..68b3b8215 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -598,6 +598,57 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { EXPECT(assert_equal(expected_probs, probs, 1e-7)); } +/* ****************************************************************************/ +// Test hybrid gaussian factor graph errorTree when there is a HybridConditional in the graph +TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { + using symbol_shorthand::F; + + DiscreteKey m1(M(1), 2); + Key z0 = Z(0), f01 = F(0); + Key x0 = X(0), x1 = X(1); + + HybridBayesNet hbn; + + auto prior_model = noiseModel::Isotropic::Sigma(1, 1e-1); + auto measurement_model = noiseModel::Isotropic::Sigma(1, 2.0); + + // Set a prior P(x0) at x0=0 + hbn.emplace_back( + new GaussianConditional(x0, Vector1(0.0), I_1x1, prior_model)); + + // Add measurement P(z0 | x0) + hbn.emplace_back(new GaussianConditional(z0, Vector1(0.0), -I_1x1, x0, I_1x1, + measurement_model)); + + // Add hybrid motion model + double mu = 0.0; + double sigma0 = 1e2, sigma1 = 1e-2; + auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); + auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); + auto c0 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, + x0, -I_1x1, model0), + c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, + x0, -I_1x1, model1); + hbn.emplace_back(new GaussianMixture({f01}, {x0, x1}, {m1}, {c0, c1})); + + // Discrete uniform prior. + hbn.emplace_back(new DiscreteConditional(m1, "0.5/0.5")); + + VectorValues given; + given.insert(z0, Vector1(0.0)); + given.insert(f01, Vector1(0.0)); + auto gfg = hbn.toFactorGraph(given); + + VectorValues vv; + vv.insert(x0, Vector1(1.0)); + vv.insert(x1, Vector1(2.0)); + AlgebraicDecisionTree errorTree = gfg.errorTree(vv); + + // regression + AlgebraicDecisionTree expected(m1, 59.335390372, 5050.125); + EXPECT(assert_equal(expected, errorTree, 1e-9)); +} + /* ****************************************************************************/ // Check that assembleGraphTree assembles Gaussian factor graphs for each // assignment. From 1236e058a11c168006287f614a57b171cf432165 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 19:05:38 -0400 Subject: [PATCH 32/43] rename sigmoid to prob_m_z --- .../tests/testGaussianMixtureFactor.cpp | 32 ++++++++++--------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 3d26ae573..187d9cf23 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -205,17 +205,20 @@ namespace test_gmm { /** * Function to compute P(m=1|z). For P(m=0|z), swap mus and sigmas. + * If sigma0 == sigma1, it simplifies to a sigmoid function. + * * Follows equation 7.108 since it is more generic. */ -double sigmoid(double mu0, double mu1, double sigma0, double sigma1, double z) { +double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, + double z) { double x1 = ((z - mu0) / sigma0), x2 = ((z - mu1) / sigma1); double d = sigma0 / sigma1; double e = d * std::exp(-0.5 * (x1 * x1 - x2 * x2)); return 1 / (1 + e); }; -HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, double sigma0, - double sigma1) { +static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, + double sigma0, double sigma1) { DiscreteKey m(M(0), 2); Key z = Z(0); @@ -269,7 +272,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); EXPECT_DOUBLES_EQUAL( - sigmoid(mu0, mu1, sigma, sigma, midway), + prob_m_z(mu0, mu1, sigma, sigma, midway), bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), 1e-8); @@ -288,7 +291,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); EXPECT_DOUBLES_EQUAL( - sigmoid(mu0, mu1, sigma, sigma, midway - lambda), + prob_m_z(mu0, mu1, sigma, sigma, midway - lambda), bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), 1e-8); } @@ -301,7 +304,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); EXPECT_DOUBLES_EQUAL( - sigmoid(mu0, mu1, sigma, sigma, midway + lambda), + prob_m_z(mu0, mu1, sigma, sigma, midway + lambda), bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), 1e-8); } @@ -349,11 +352,11 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { namespace test_two_state_estimation { /// Create Two State Bayes Network with measurements -HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, - double sigma1, - bool add_second_measurement = false, - double prior_sigma = 1e-3, - double measurement_sigma = 3.0) { +static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, + double sigma1, + bool add_second_measurement = false, + double prior_sigma = 1e-3, + double measurement_sigma = 3.0) { DiscreteKey m1(M(1), 2); Key z0 = Z(0), z1 = Z(1), f01 = F(0); Key x0 = X(0), x1 = X(1); @@ -401,7 +404,7 @@ HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, /* ************************************************************************* */ /** - * Test a model P(x0)P(z0|x0)P(f01|x1,x0,m1)P(z1|x1)P(m1). + * Test a model P(x0)P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). * * P(f01|x1,x0,m1) has different means and same covariance. * @@ -435,7 +438,6 @@ TEST(GaussianMixtureFactor, TwoStateModel) { // Since no measurement on x1, we hedge our bets DiscreteConditional expected(m1, "0.5/0.5"); - // regression EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); } @@ -457,7 +459,7 @@ TEST(GaussianMixtureFactor, TwoStateModel) { /* ************************************************************************* */ /** - * Test a model P(x0)P(z0|x0)P(f01|x1,x0,m1)P(z1|x1)P(m1). + * Test a model P(x0)P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). * * P(f01|x1,x0,m1) has different means and different covariances. * @@ -520,4 +522,4 @@ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ \ No newline at end of file +/* ************************************************************************* */ From 88605c851baeae92f507f0fbb228bc0fa063f784 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 19:32:00 -0400 Subject: [PATCH 33/43] make GaussianMixture2 test as good as GaussianMixture test --- .../tests/testGaussianMixtureFactor.cpp | 65 +++++++++++++++---- 1 file changed, 51 insertions(+), 14 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 187d9cf23..8d2fba821 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -333,20 +333,56 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { auto hbn = GetGaussianMixtureModel(mu0, mu1, sigma0, sigma1); - // The result should be a bell curve like function - // with m1 > m0 close to 3.1333. - // We get 3.1333 by finding the maximum value of the function. - VectorValues given; - given.insert(z, Vector1(3.133)); - HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + double m1_high = 3.133, lambda = 4; + { + // The result should be a bell curve like function + // with m1 > m0 close to 3.1333. + // We get 3.1333 by finding the maximum value of the function. + VectorValues given; + given.insert(z, Vector1(3.133)); - // regression - HybridBayesNet expected; - expected.emplace_back( - new DiscreteConditional(m, "0.325603277954/0.674396722046")); + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); - EXPECT(assert_equal(expected, *bn)); + EXPECT_DOUBLES_EQUAL( + prob_m_z(mu0, mu1, sigma0, sigma1, m1_high), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{M(0), 1}}), 1e-8); + + // At the halfway point between the means + HybridBayesNet expected; + expected.emplace_back(new DiscreteConditional( + m, {}, + vector{prob_m_z(mu1, mu0, sigma1, sigma0, m1_high), + prob_m_z(mu0, mu1, sigma0, sigma1, m1_high)})); + + EXPECT(assert_equal(expected, *bn)); + } + { + // Shift by -lambda + VectorValues given; + given.insert(z, Vector1(m1_high - lambda)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + EXPECT_DOUBLES_EQUAL( + prob_m_z(mu0, mu1, sigma0, sigma1, m1_high - lambda), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); + } + { + // Shift by lambda + VectorValues given; + given.insert(z, Vector1(m1_high + lambda)); + + HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + + EXPECT_DOUBLES_EQUAL( + prob_m_z(mu0, mu1, sigma0, sigma1, m1_high + lambda), + bn->at(0)->asDiscrete()->operator()(DiscreteValues{{m.first, 1}}), + 1e-8); + } } namespace test_two_state_estimation { @@ -470,7 +506,8 @@ TEST(GaussianMixtureFactor, TwoStateModel) { * the P(m1) should be 0.5/0.5. * Getting a measurement on z1 gives use more information. */ -TEST(GaussianMixtureFactor, TwoStateModel2) { +// TODO(Varun) Figuring out the correctness of this +TEST_DISABLED(GaussianMixtureFactor, TwoStateModel2) { using namespace test_two_state_estimation; double mu0 = 1.0, mu1 = 3.0; @@ -479,7 +516,7 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); DiscreteKey m1(M(1), 2); - Key z0 = Z(0), z1 = Z(1), f01 = F(0); + Key z0 = Z(0), z1 = Z(1); // Start with no measurement on x1, only on x0 HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma0, sigma1, false); From ca073fd9ef32718161a53c903f8d258438009ed8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 19:32:19 -0400 Subject: [PATCH 34/43] fix bug in prob_m_z --- gtsam/hybrid/tests/testGaussianMixtureFactor.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 8d2fba821..c473e567c 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -212,7 +212,7 @@ namespace test_gmm { double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, double z) { double x1 = ((z - mu0) / sigma0), x2 = ((z - mu1) / sigma1); - double d = sigma0 / sigma1; + double d = sigma1 / sigma0; double e = d * std::exp(-0.5 * (x1 * x1 - x2 * x2)); return 1 / (1 + e); }; From b18dc3c812f167dc97ecd118397678344b9d53f7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 29 Aug 2024 19:38:00 -0400 Subject: [PATCH 35/43] fix motion model for TwoStateModel test --- .../hybrid/tests/testGaussianMixtureFactor.cpp | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index c473e567c..c8ffdc63b 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -394,7 +394,7 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, double prior_sigma = 1e-3, double measurement_sigma = 3.0) { DiscreteKey m1(M(1), 2); - Key z0 = Z(0), z1 = Z(1), f01 = F(0); + Key z0 = Z(0), z1 = Z(1); Key x0 = X(0), x1 = X(1); HybridBayesNet hbn; @@ -414,12 +414,12 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, // Add hybrid motion model auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); - auto c0 = make_shared(f01, Vector1(mu0), I_1x1, x1, - I_1x1, x0, -I_1x1, model0), - c1 = make_shared(f01, Vector1(mu1), I_1x1, x1, - I_1x1, x0, -I_1x1, model1); + auto c0 = make_shared(x1, Vector1(mu0), I_1x1, x0, + -I_1x1, model0), + c1 = make_shared(x1, Vector1(mu1), I_1x1, x0, + -I_1x1, model1); - auto motion = new GaussianMixture({f01}, {x0, x1}, {m1}, {c0, c1}); + auto motion = new GaussianMixture({x1}, {x0}, {m1}, {c0, c1}); hbn.emplace_back(motion); if (add_second_measurement) { @@ -458,15 +458,13 @@ TEST(GaussianMixtureFactor, TwoStateModel) { double sigma = 2.0; DiscreteKey m1(M(1), 2); - Key z0 = Z(0), z1 = Z(1), f01 = F(0); + Key z0 = Z(0), z1 = Z(1); // Start with no measurement on x1, only on x0 HybridBayesNet hbn = CreateBayesNet(mu0, mu1, sigma, sigma, false); VectorValues given; given.insert(z0, Vector1(0.5)); - // The motion model measurement says we didn't move - given.insert(f01, Vector1(0.0)); { HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); @@ -523,8 +521,6 @@ TEST_DISABLED(GaussianMixtureFactor, TwoStateModel2) { VectorValues given; given.insert(z0, Vector1(0.5)); - // The motion model measurement says we didn't move - // given.insert(x1, Vector1(0.0)); { // Start with no measurement on x1, only on x0 From d34dd659bc65165fe862b3012f126305134164b9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Sep 2024 15:10:52 -0400 Subject: [PATCH 36/43] remove overwhelming prior and adjust measurements accordingly --- .../hybrid/tests/testGaussianMixtureFactor.cpp | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index c8ffdc63b..cfcf33e4d 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -399,13 +399,7 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, HybridBayesNet hbn; - auto prior_model = noiseModel::Isotropic::Sigma(1, prior_sigma); auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma); - - // Set a prior P(x0) at x0=0 - hbn.emplace_back( - new GaussianConditional(x0, Vector1(0.0), I_1x1, prior_model)); - // Add measurement P(z0 | x0) auto p_z0 = new GaussianConditional(z0, Vector1(0.0), -I_1x1, x0, I_1x1, measurement_model); @@ -479,13 +473,14 @@ TEST(GaussianMixtureFactor, TwoStateModel) { // Now we add a measurement z1 on x1 hbn = CreateBayesNet(mu0, mu1, sigma, sigma, true); - // If we see z1=2.2, discrete mode should say m1=1 - given.insert(z1, Vector1(2.2)); + // If we see z1=2.6 (> 2.5 which is the halfway point), + // discrete mode should say m1=1 + given.insert(z1, Vector1(2.6)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since we have a measurement on z2, we get a definite result - DiscreteConditional expected(m1, "0.4923083/0.5076917"); + DiscreteConditional expected(m1, "0.49772729/0.50227271"); // regression EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); } @@ -504,8 +499,7 @@ TEST(GaussianMixtureFactor, TwoStateModel) { * the P(m1) should be 0.5/0.5. * Getting a measurement on z1 gives use more information. */ -// TODO(Varun) Figuring out the correctness of this -TEST_DISABLED(GaussianMixtureFactor, TwoStateModel2) { +TEST(GaussianMixtureFactor, TwoStateModel2) { using namespace test_two_state_estimation; double mu0 = 1.0, mu1 = 3.0; @@ -544,7 +538,7 @@ TEST_DISABLED(GaussianMixtureFactor, TwoStateModel2) { HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since we have a measurement on z2, we get a definite result - DiscreteConditional expected(m1, "0.4262682/0.5737318"); + DiscreteConditional expected(m1, "0.44744586/0.55255414"); // regression EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); } From 82fc9b9eeb94427a093a2a86a76923ff5e798de1 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Sep 2024 15:17:18 -0400 Subject: [PATCH 37/43] account for extra error when sigmas are different --- gtsam/hybrid/GaussianMixture.cpp | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 0a0332af8..228e851bb 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -87,7 +87,18 @@ GaussianFactorGraphTree GaussianMixture::add( /* *******************************************************************************/ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { - auto wrap = [](const GaussianConditional::shared_ptr &gc) { + auto wrap = [this](const GaussianConditional::shared_ptr &gc) { + const double Cgm_Kgcm = this->logConstant_ - gc->logNormalizationConstant(); + // If there is a difference in the covariances, we need to account for that + // since the error is dependent on the mode. + if (Cgm_Kgcm > 0.0) { + // We add a constant factor which will be used when computing + // the probability of the discrete variables. + Vector c(1); + c << std::sqrt(2.0 * Cgm_Kgcm); + auto constantFactor = std::make_shared(c); + return GaussianFactorGraph{gc, constantFactor}; + } return GaussianFactorGraph{gc}; }; return {conditionals_, wrap}; From 06ecf00dba3fa77124bce8eb0b47dc710d4c2328 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Sep 2024 15:27:33 -0400 Subject: [PATCH 38/43] improve docs when computing discreteSeparator --- gtsam/hybrid/HybridFactorGraph.h | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h index 8b59fd4f9..1830d44ab 100644 --- a/gtsam/hybrid/HybridFactorGraph.h +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -66,7 +66,7 @@ class GTSAM_EXPORT HybridFactorGraph : public FactorGraph { /// Get all the discrete keys in the factor graph. std::set discreteKeys() const; - /// Get all the discrete keys in the factor graph, as a set. + /// Get all the discrete keys in the factor graph, as a set of Keys. KeySet discreteKeySet() const; /// Get a map from Key to corresponding DiscreteKey. diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 6c3442f97..c44fc4f1d 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -330,7 +330,7 @@ static std::shared_ptr createDiscreteFactor( // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); // We take negative of the logNormalizationConstant `log(1/k)` // to get `log(k)`. - return -factor->error(kEmpty) + (-conditional->logNormalizationConstant()); + return -factor->error(kEmpty) - conditional->logNormalizationConstant(); }; AlgebraicDecisionTree logProbabilities( @@ -523,14 +523,15 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, std::inserter(continuousSeparator, continuousSeparator.begin())); // Similarly for the discrete separator. - KeySet discreteSeparatorSet; - std::set discreteSeparator; auto discreteKeySet = factors.discreteKeySet(); + // Use set-difference to get the difference in keys + KeySet discreteSeparatorSet; std::set_difference( discreteKeySet.begin(), discreteKeySet.end(), frontalKeysSet.begin(), frontalKeysSet.end(), std::inserter(discreteSeparatorSet, discreteSeparatorSet.begin())); // Convert from set of keys to set of DiscreteKeys + std::set discreteSeparator; auto discreteKeyMap = factors.discreteKeyMap(); for (auto key : discreteSeparatorSet) { discreteSeparator.insert(discreteKeyMap.at(key)); From 3f782a4ae7cf99bdc3e40205d665721651fa5ee4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Sep 2024 17:36:45 -0400 Subject: [PATCH 39/43] check for valid GaussianConditional --- gtsam/hybrid/GaussianMixture.cpp | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 228e851bb..fa4248921 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -88,16 +88,20 @@ GaussianFactorGraphTree GaussianMixture::add( /* *******************************************************************************/ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { auto wrap = [this](const GaussianConditional::shared_ptr &gc) { - const double Cgm_Kgcm = this->logConstant_ - gc->logNormalizationConstant(); - // If there is a difference in the covariances, we need to account for that - // since the error is dependent on the mode. - if (Cgm_Kgcm > 0.0) { - // We add a constant factor which will be used when computing - // the probability of the discrete variables. - Vector c(1); - c << std::sqrt(2.0 * Cgm_Kgcm); - auto constantFactor = std::make_shared(c); - return GaussianFactorGraph{gc, constantFactor}; + // First check if conditional has not been pruned + if (gc) { + const double Cgm_Kgcm = + this->logConstant_ - gc->logNormalizationConstant(); + // If there is a difference in the covariances, we need to account for + // that since the error is dependent on the mode. + if (Cgm_Kgcm > 0.0) { + // We add a constant factor which will be used when computing + // the probability of the discrete variables. + Vector c(1); + c << std::sqrt(2.0 * Cgm_Kgcm); + auto constantFactor = std::make_shared(c); + return GaussianFactorGraph{gc, constantFactor}; + } } return GaussianFactorGraph{gc}; }; From 997d0b411b2f7a03124cd28a75e12c6729b9a7ac Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Sep 2024 17:39:52 -0400 Subject: [PATCH 40/43] ratio tests for GaussianMixtureFactor --- .../tests/testGaussianMixtureFactor.cpp | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index cfcf33e4d..139b50e2a 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -519,6 +519,24 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { { // Start with no measurement on x1, only on x0 HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + + { + VectorValues vv{ + {X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}, {Z(0), Vector1(0.5)}}; + HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), + hv1(vv, DiscreteValues{{M(1), 1}}); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); + } + { + VectorValues vv{ + {X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}, {Z(0), Vector1(0.5)}}; + HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), + hv1(vv, DiscreteValues{{M(1), 1}}); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); + } + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since no measurement on x1, we a 50/50 probability @@ -535,6 +553,28 @@ TEST(GaussianMixtureFactor, TwoStateModel2) { given.insert(z1, Vector1(2.2)); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + + { + VectorValues vv{{X(0), Vector1(0.0)}, + {X(1), Vector1(1.0)}, + {Z(0), Vector1(0.5)}, + {Z(1), Vector1(2.2)}}; + HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), + hv1(vv, DiscreteValues{{M(1), 1}}); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); + } + { + VectorValues vv{{X(0), Vector1(0.5)}, + {X(1), Vector1(3.0)}, + {Z(0), Vector1(0.5)}, + {Z(1), Vector1(2.2)}}; + HybridValues hv0(vv, DiscreteValues{{M(1), 0}}), + hv1(vv, DiscreteValues{{M(1), 1}}); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); + } + HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); // Since we have a measurement on z2, we get a definite result From cc0400371652f00dc066c4cc854e03d35b1959d3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 4 Sep 2024 15:18:27 -0400 Subject: [PATCH 41/43] conditionalError method --- gtsam/hybrid/GaussianMixture.cpp | 53 ++++++++++++-------------------- gtsam/hybrid/GaussianMixture.h | 4 +++ 2 files changed, 23 insertions(+), 34 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index fa4248921..325c32f95 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -330,19 +330,28 @@ AlgebraicDecisionTree GaussianMixture::logProbability( return DecisionTree(conditionals_, probFunc); } +/* ************************************************************************* */ +double GaussianMixture::conditionalError( + const GaussianConditional::shared_ptr &conditional, + const VectorValues &continuousValues) const { + // Check if valid pointer + if (conditional) { + return conditional->error(continuousValues) + // + logConstant_ - conditional->logNormalizationConstant(); + } else { + // If not valid, pointer, it means this conditional was pruned, + // so we return maximum error. + // This way the negative exponential will give + // a probability value close to 0.0. + return std::numeric_limits::max(); + } +} + /* *******************************************************************************/ AlgebraicDecisionTree GaussianMixture::errorTree( const VectorValues &continuousValues) const { auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) { - // Check if valid pointer - if (conditional) { - return conditional->error(continuousValues) + // - logConstant_ - conditional->logNormalizationConstant(); - } else { - // If not valid, pointer, it means this conditional was pruned, - // so we return maximum error. - return std::numeric_limits::max(); - } + return conditionalError(conditional, continuousValues); }; DecisionTree error_tree(conditionals_, errorFunc); return error_tree; @@ -350,33 +359,9 @@ AlgebraicDecisionTree GaussianMixture::errorTree( /* *******************************************************************************/ double GaussianMixture::error(const HybridValues &values) const { - // Check if discrete keys in discrete assignment are - // present in the GaussianMixture - KeyVector dKeys = this->discreteKeys_.indices(); - bool valid_assignment = false; - for (auto &&kv : values.discrete()) { - if (std::find(dKeys.begin(), dKeys.end(), kv.first) != dKeys.end()) { - valid_assignment = true; - break; - } - } - - // The discrete assignment is not valid so we throw an error. - if (!valid_assignment) { - throw std::runtime_error( - "Invalid discrete values in values. Not all discrete keys specified."); - } - // Directly index to get the conditional, no need to build the whole tree. auto conditional = conditionals_(values.discrete()); - if (conditional) { - return conditional->error(values.continuous()) + // - logConstant_ - conditional->logNormalizationConstant(); - } else { - // If not valid, pointer, it means this conditional was pruned, - // so we return maximum error. - return std::numeric_limits::max(); - } + return conditionalError(conditional, values.continuous()); } /* *******************************************************************************/ diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index 41692bb1c..714c00272 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -256,6 +256,10 @@ class GTSAM_EXPORT GaussianMixture /// Check whether `given` has values for all frontal keys. bool allFrontalsGiven(const VectorValues &given) const; + /// Helper method to compute the error of a conditional. + double conditionalError(const GaussianConditional::shared_ptr &conditional, + const VectorValues &continuousValues) const; + #ifdef GTSAM_ENABLE_BOOST_SERIALIZATION /** Serialization function */ friend class boost::serialization::access; From 8f65f058e04a5b33fe1650cd2ca3b82106d57357 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 4 Sep 2024 15:18:50 -0400 Subject: [PATCH 42/43] clean up code --- gtsam/hybrid/HybridFactorGraph.cpp | 9 --------- gtsam/hybrid/HybridFactorGraph.h | 5 +---- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 20 +++++-------------- .../tests/testGaussianMixtureFactor.cpp | 8 ++++---- 4 files changed, 10 insertions(+), 32 deletions(-) diff --git a/gtsam/hybrid/HybridFactorGraph.cpp b/gtsam/hybrid/HybridFactorGraph.cpp index f7b96f694..f5a7bcdfe 100644 --- a/gtsam/hybrid/HybridFactorGraph.cpp +++ b/gtsam/hybrid/HybridFactorGraph.cpp @@ -49,15 +49,6 @@ KeySet HybridFactorGraph::discreteKeySet() const { return keys; } -/* ************************************************************************* */ -std::unordered_map HybridFactorGraph::discreteKeyMap() const { - std::unordered_map result; - for (const DiscreteKey& k : discreteKeys()) { - result[k.first] = k; - } - return result; -} - /* ************************************************************************* */ const KeySet HybridFactorGraph::continuousKeySet() const { KeySet keys; diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h index 1830d44ab..79f2a7af1 100644 --- a/gtsam/hybrid/HybridFactorGraph.h +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -38,7 +38,7 @@ using SharedFactor = std::shared_ptr; class GTSAM_EXPORT HybridFactorGraph : public FactorGraph { public: using Base = FactorGraph; - using This = HybridFactorGraph; ///< this class + using This = HybridFactorGraph; ///< this class using shared_ptr = std::shared_ptr; ///< shared_ptr to This using Values = gtsam::Values; ///< backwards compatibility @@ -69,9 +69,6 @@ class GTSAM_EXPORT HybridFactorGraph : public FactorGraph { /// Get all the discrete keys in the factor graph, as a set of Keys. KeySet discreteKeySet() const; - /// Get a map from Key to corresponding DiscreteKey. - std::unordered_map discreteKeyMap() const; - /// Get all the continuous keys in the factor graph. const KeySet continuousKeySet() const; diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index c44fc4f1d..a7a6eee5a 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -312,8 +312,8 @@ using Result = std::pair, GaussianMixtureFactor::sharedFactor>; /** - * Compute the probability q(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m) - * from the residual error at the mean μ. + * Compute the probability p(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m) + * from the residual error ||b||^2 at the mean μ. * The residual error contains no keys, and only * depends on the discrete separator if present. */ @@ -523,19 +523,9 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, std::inserter(continuousSeparator, continuousSeparator.begin())); // Similarly for the discrete separator. - auto discreteKeySet = factors.discreteKeySet(); - // Use set-difference to get the difference in keys - KeySet discreteSeparatorSet; - std::set_difference( - discreteKeySet.begin(), discreteKeySet.end(), frontalKeysSet.begin(), - frontalKeysSet.end(), - std::inserter(discreteSeparatorSet, discreteSeparatorSet.begin())); - // Convert from set of keys to set of DiscreteKeys - std::set discreteSeparator; - auto discreteKeyMap = factors.discreteKeyMap(); - for (auto key : discreteSeparatorSet) { - discreteSeparator.insert(discreteKeyMap.at(key)); - } + // Since we eliminate all continuous variables first, + // the discrete separator will be *all* the discrete keys. + std::set discreteSeparator = factors.discreteKeys(); return hybridElimination(factors, frontalKeys, continuousSeparator, discreteSeparator); diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 139b50e2a..b2a4981f3 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -434,12 +434,12 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, /* ************************************************************************* */ /** - * Test a model P(x0)P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). + * Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). * * P(f01|x1,x0,m1) has different means and same covariance. * * Converting to a factor graph gives us - * P(x0)ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) + * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) * * If we only have a measurement on z0, then * the probability of m1 should be 0.5/0.5. @@ -488,12 +488,12 @@ TEST(GaussianMixtureFactor, TwoStateModel) { /* ************************************************************************* */ /** - * Test a model P(x0)P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). + * Test a model P(z0|x0)P(x1|x0,m1)P(z1|x1)P(m1). * * P(f01|x1,x0,m1) has different means and different covariances. * * Converting to a factor graph gives us - * P(x0)ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) + * ϕ(x0)ϕ(x1,x0,m1)ϕ(x1)P(m1) * * If we only have a measurement on z0, then * the P(m1) should be 0.5/0.5. From 8b04d9b2f510bc310b24c212149fd08c468c6fd8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 4 Sep 2024 19:27:57 -0400 Subject: [PATCH 43/43] remove tests, will add later --- gtsam/hybrid/tests/testMixtureFactor.cpp | 150 ----------------------- 1 file changed, 150 deletions(-) diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index 48b193eeb..a58a4767f 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -118,156 +118,6 @@ TEST(MixtureFactor, Dim) { EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); } -/* ************************************************************************* */ -// Test components with differing means -TEST(MixtureFactor, DifferentMeans) { - DiscreteKey m1(M(1), 2), m2(M(2), 2); - - Values values; - double x1 = 0.0, x2 = 1.75, x3 = 2.60; - values.insert(X(1), x1); - values.insert(X(2), x2); - values.insert(X(3), x3); - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); - - auto f0 = std::make_shared>(X(1), X(2), 0.0, model0); - auto f1 = std::make_shared>(X(1), X(2), 2.0, model1); - std::vector factors{f0, f1}; - - MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); - HybridNonlinearFactorGraph hnfg; - hnfg.push_back(mixtureFactor); - - f0 = std::make_shared>(X(2), X(3), 0.0, model0); - f1 = std::make_shared>(X(2), X(3), 2.0, model1); - std::vector factors23{f0, f1}; - hnfg.push_back(MixtureFactor({X(2), X(3)}, {m2}, factors23)); - - auto prior = PriorFactor(X(1), x1, prior_noise); - hnfg.push_back(prior); - - hnfg.emplace_shared>(X(2), 2.0, prior_noise); - - auto hgfg = hnfg.linearize(values); - auto bn = hgfg->eliminateSequential(); - HybridValues actual = bn->optimize(); - - HybridValues expected( - VectorValues{ - {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, - DiscreteValues{{M(1), 1}, {M(2), 0}}); - - EXPECT(assert_equal(expected, actual)); - - { - DiscreteValues dv{{M(1), 0}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 0}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 0}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); - } - { - DiscreteValues dv{{M(1), 1}, {M(2), 1}}; - VectorValues cont = bn->optimize(dv); - double error = bn->error(HybridValues(cont, dv)); - // regression - EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); - } -} - -/* ************************************************************************* */ -// Test components with differing covariances -TEST(MixtureFactor, DifferentCovariances) { - DiscreteKey m1(M(1), 2); - - Values values; - double x1 = 1.0, x2 = 1.0; - values.insert(X(1), x1); - values.insert(X(2), x2); - - double between = 0.0; - - auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); - auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); - auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); - - auto f0 = - std::make_shared>(X(1), X(2), between, model0); - auto f1 = - std::make_shared>(X(1), X(2), between, model1); - std::vector factors{f0, f1}; - - // Create via toFactorGraph - using symbol_shorthand::Z; - Matrix H0_1, H0_2, H1_1, H1_2; - Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); - std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H0_1 /*Sp1*/}, - {X(2), H0_2 /*Tp2*/}}; - - Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); - std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, - // - {X(1), H1_1 /*Sp1*/}, - {X(2), H1_2 /*Tp2*/}}; - auto gm = new gtsam::GaussianMixture( - {Z(1)}, {X(1), X(2)}, {m1}, - {std::make_shared(terms0, 1, -d0, model0), - std::make_shared(terms1, 1, -d1, model1)}); - gtsam::HybridBayesNet bn; - bn.emplace_back(gm); - - gtsam::VectorValues measurements; - measurements.insert(Z(1), gtsam::Z_1x1); - // Create FG with single GaussianMixtureFactor - HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); - - // Linearized prior factor on X1 - auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); - mixture_fg.push_back(prior); - - auto hbn = mixture_fg.eliminateSequential(); - - VectorValues cv; - cv.insert(X(1), Vector1(0.0)); - cv.insert(X(2), Vector1(0.0)); - // P(m1) = [0.5, 0.5], so we should pick 0 - DiscreteValues dv; - dv.insert({M(1), 0}); - HybridValues expected_values(cv, dv); - - HybridValues actual_values = hbn->optimize(); - EXPECT(assert_equal(expected_values, actual_values)); - - // Check that we get different error values at the MLE point μ. - AlgebraicDecisionTree errorTree = hbn->errorTree(cv); - - HybridValues hv0(cv, DiscreteValues{{M(1), 0}}); - HybridValues hv1(cv, DiscreteValues{{M(1), 1}}); - - AlgebraicDecisionTree expectedErrorTree(m1, 9.90348755254, - 0.69314718056); - EXPECT(assert_equal(expectedErrorTree, errorTree)); -} - /* ************************************************************************* */ int main() { TestResult tr;