diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 4e138acfa..e519cefe6 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -28,11 +28,86 @@ namespace gtsam { +/** + * @brief Helper function to correct the [A|b] matrices in the factor components + * with the normalizer values. + * This is done by storing the normalizer value in + * the `b` vector as an additional row. + * + * @param factors DecisionTree of GaussianFactor shared pointers. + * @param varyingNormalizers Flag indicating the normalizers are different for + * each component. + * @return GaussianMixtureFactor::Factors + */ +GaussianMixtureFactor::Factors correct( + const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { + if (!varyingNormalizers) { + return factors; + } + + // First compute all the sqrt(|2 pi Sigma|) terms + auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) { + auto jf = std::dynamic_pointer_cast(gf); + // If we have, say, a Hessian factor, then no need to do anything + if (!jf) return 0.0; + + auto model = jf->get_model(); + // If there is no noise model, there is nothing to do. + if (!model) { + return 0.0; + } + // Since noise models are Gaussian, we can get the logDeterminant using the + // same trick as in GaussianConditional + double logDetR = + model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); + double logDeterminantSigma = -2.0 * logDetR; + + size_t n = model->dim(); + constexpr double log2pi = 1.8378770664093454835606594728112; + return n * log2pi + logDeterminantSigma; + }; + + AlgebraicDecisionTree log_normalizers = + DecisionTree(factors, computeNormalizers); + + // Find the minimum value so we can "proselytize" to positive values. + // Done because we can't have sqrt of negative numbers. + double min_log_normalizer = log_normalizers.min(); + log_normalizers = log_normalizers.apply( + [&min_log_normalizer](double n) { return n - min_log_normalizer; }); + + // Finally, update the [A|b] matrices. + auto update = [&log_normalizers]( + const Assignment &assignment, + const GaussianMixtureFactor::sharedFactor &gf) { + auto jf = std::dynamic_pointer_cast(gf); + if (!jf) return gf; + // If there is no noise model, there is nothing to do. + if (!jf->get_model()) return gf; + // If the log_normalizer is 0, do nothing + if (log_normalizers(assignment) == 0.0) return gf; + + GaussianFactorGraph gfg; + gfg.push_back(jf); + + Vector c(1); + c << std::sqrt(log_normalizers(assignment)); + auto constantFactor = std::make_shared(c); + + gfg.push_back(constantFactor); + return std::dynamic_pointer_cast( + std::make_shared(gfg)); + }; + return factors.apply(update); +} + /* *******************************************************************************/ GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors) - : Base(continuousKeys, discreteKeys), factors_(factors) {} + const Factors &factors, + bool varyingNormalizers) + : Base(continuousKeys, discreteKeys), + factors_(correct(factors, varyingNormalizers)) {} /* *******************************************************************************/ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index b1be6ef1d..588501bbe 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -82,10 +82,13 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * their cardinalities. * @param factors The decision tree of Gaussian factors stored as the mixture * density. + * @param varyingNormalizers Flag indicating factor components have varying + * normalizer values. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const Factors &factors); + const Factors &factors, + bool varyingNormalizers = false); /** * @brief Construct a new GaussianMixtureFactor object using a vector of @@ -94,12 +97,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @param continuousKeys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of gaussian factor shared pointers. + * @param varyingNormalizers Flag indicating factor components have varying + * normalizer values. */ GaussianMixtureFactor(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, - const std::vector &factors) + const std::vector &factors, + bool varyingNormalizers = false) : GaussianMixtureFactor(continuousKeys, discreteKeys, - Factors(discreteKeys, factors)) {} + Factors(discreteKeys, factors), + varyingNormalizers) {} /// @} /// @name Testable diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 11b01f074..55ecce939 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -22,9 +22,13 @@ #include #include #include +#include +#include #include #include #include +#include +#include // Include for test suite #include @@ -56,7 +60,6 @@ TEST(GaussianMixtureFactor, Sum) { auto b = Matrix::Zero(2, 1); Vector2 sigmas; sigmas << 1, 2; - auto model = noiseModel::Diagonal::Sigmas(sigmas, true); auto f10 = std::make_shared(X(1), A1, X(2), A2, b); auto f11 = std::make_shared(X(1), A1, X(2), A2, b); @@ -179,7 +182,8 @@ TEST(GaussianMixtureFactor, Error) { continuousValues.insert(X(2), Vector2(1, 1)); // error should return a tree of errors, with nodes for each discrete value. - AlgebraicDecisionTree error_tree = mixtureFactor.errorTree(continuousValues); + AlgebraicDecisionTree error_tree = + mixtureFactor.errorTree(continuousValues); std::vector discrete_keys = {m1}; // Error values for regression test @@ -192,8 +196,163 @@ TEST(GaussianMixtureFactor, Error) { DiscreteValues discreteValues; discreteValues[m1.first] = 1; EXPECT_DOUBLES_EQUAL( - 4.0, mixtureFactor.error({continuousValues, discreteValues}), - 1e-9); + 4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); +} + +/* ************************************************************************* */ +// Test components with differing means +TEST(GaussianMixtureFactor, DifferentMeans) { + DiscreteKey m1(M(1), 2), m2(M(2), 2); + + Values values; + double x1 = 0.0, x2 = 1.75, x3 = 2.60; + values.insert(X(1), x1); + values.insert(X(2), x2); + values.insert(X(3), x3); + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); + + auto f0 = std::make_shared>(X(1), X(2), 0.0, model0) + ->linearize(values); + auto f1 = std::make_shared>(X(1), X(2), 2.0, model1) + ->linearize(values); + std::vector factors{f0, f1}; + + GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors, true); + HybridGaussianFactorGraph hfg; + hfg.push_back(mixtureFactor); + + f0 = std::make_shared>(X(2), X(3), 0.0, model0) + ->linearize(values); + f1 = std::make_shared>(X(2), X(3), 2.0, model1) + ->linearize(values); + std::vector factors23{f0, f1}; + hfg.push_back(GaussianMixtureFactor({X(2), X(3)}, {m2}, factors23, true)); + + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + hfg.push_back(prior); + + hfg.push_back(PriorFactor(X(2), 2.0, prior_noise).linearize(values)); + + auto bn = hfg.eliminateSequential(); + HybridValues actual = bn->optimize(); + + HybridValues expected( + VectorValues{ + {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, + DiscreteValues{{M(1), 1}, {M(2), 0}}); + + EXPECT(assert_equal(expected, actual)); + + { + DiscreteValues dv{{M(1), 0}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 0}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 0}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } + { + DiscreteValues dv{{M(1), 1}, {M(2), 1}}; + VectorValues cont = bn->optimize(dv); + double error = bn->error(HybridValues(cont, dv)); + // regression + EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); + } +} + +/* ************************************************************************* */ +/** + * @brief Test components with differing covariances. + * The factor graph is + * *-X1-*-X2 + * | + * M1 + */ +TEST(GaussianMixtureFactor, DifferentCovariances) { + DiscreteKey m1(M(1), 2); + + Values values; + double x1 = 1.0, x2 = 1.0; + values.insert(X(1), x1); + values.insert(X(2), x2); + + double between = 0.0; + + auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); + auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); + auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); + + auto f0 = + std::make_shared>(X(1), X(2), between, model0); + auto f1 = + std::make_shared>(X(1), X(2), between, model1); + std::vector factors{f0, f1}; + + // Create via toFactorGraph + using symbol_shorthand::Z; + Matrix H0_1, H0_2, H1_1, H1_2; + Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); + std::vector> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H0_1 /*Sp1*/}, + {X(2), H0_2 /*Tp2*/}}; + + Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); + std::vector> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, + // + {X(1), H1_1 /*Sp1*/}, + {X(2), H1_2 /*Tp2*/}}; + gtsam::GaussianMixtureFactor gmf( + {X(1), X(2)}, {m1}, + {std::make_shared(X(1), H0_1, X(2), H0_2, -d0, model0), + std::make_shared(X(1), H1_1, X(2), H1_2, -d1, model1)}, + true); + + // Create FG with single GaussianMixtureFactor + HybridGaussianFactorGraph mixture_fg; + mixture_fg.add(gmf); + + // Linearized prior factor on X1 + auto prior = PriorFactor(X(1), x1, prior_noise).linearize(values); + mixture_fg.push_back(prior); + + auto hbn = mixture_fg.eliminateSequential(); + // hbn->print(); + + VectorValues cv; + cv.insert(X(1), Vector1(0.0)); + cv.insert(X(2), Vector1(0.0)); + + // Check that the error values at the MLE point μ. + AlgebraicDecisionTree errorTree = hbn->errorTree(cv); + + DiscreteValues dv0{{M(1), 0}}; + DiscreteValues dv1{{M(1), 1}}; + + // regression + EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv0), 1e-9); + EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); + + DiscreteConditional expected_m1(m1, "0.5/0.5"); + DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); + + EXPECT(assert_equal(expected_m1, actual_m1)); } /* ************************************************************************* */