From c204524a3b2aa86d6e00316add1f0d5c4c3030bd Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 13:02:55 -0500 Subject: [PATCH 1/7] function to add constant term for correcting HybridGaussianFactorGraph --- gtsam/hybrid/GaussianMixtureFactor.h | 3 + gtsam/hybrid/tests/testHybridEstimation.cpp | 195 ++++++++++++++++++-- 2 files changed, 187 insertions(+), 11 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 2088a7837..8e1a95e9d 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -143,6 +143,9 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { */ double error(const HybridValues &values) const override; + /// Getter for GaussianFactor decision tree + Factors factors() const { return factors_; } + /// Add MixtureFactor to a Sum, syntactic sugar. friend GaussianFactorGraphTree &operator+=( GaussianFactorGraphTree &sum, const GaussianMixtureFactor &factor) { diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 28f47232b..8ec41d51f 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -17,6 +17,7 @@ #include #include +#include #include #include #include @@ -37,12 +38,11 @@ #include "Switching.h" -#include - using namespace std; using namespace gtsam; using symbol_shorthand::X; +using symbol_shorthand::Z; Ordering getOrdering(HybridGaussianFactorGraph& factors, const HybridGaussianFactorGraph& newFactors) { @@ -73,7 +73,7 @@ Ordering getOrdering(HybridGaussianFactorGraph& factors, return ordering; } -TEST(HybridEstimation, Full) { +TEST_DISABLED(HybridEstimation, Full) { size_t K = 6; std::vector measurements = {0, 1, 2, 2, 2, 3}; // Ground truth discrete seq @@ -91,8 +91,7 @@ TEST(HybridEstimation, Full) { hybridOrdering.push_back(M(k)); } - HybridBayesNet::shared_ptr bayesNet = - graph.eliminateSequential(); + HybridBayesNet::shared_ptr bayesNet = graph.eliminateSequential(); EXPECT_LONGS_EQUAL(2 * K - 1, bayesNet->size()); @@ -116,7 +115,7 @@ TEST(HybridEstimation, Full) { /****************************************************************************/ // Test approximate inference with an additional pruning step. -TEST(HybridEstimation, Incremental) { +TEST_DISABLED(HybridEstimation, Incremental) { size_t K = 15; std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; @@ -284,7 +283,7 @@ AlgebraicDecisionTree getProbPrimeTree( * Test for correctness of different branches of the P'(Continuous | Discrete). * The values should match those of P'(Continuous) for each discrete mode. ********************************************************************************/ -TEST(HybridEstimation, Probability) { +TEST_DISABLED(HybridEstimation, Probability) { constexpr size_t K = 4; std::vector measurements = {0, 1, 2, 2}; double between_sigma = 1.0, measurement_sigma = 0.1; @@ -327,7 +326,7 @@ TEST(HybridEstimation, Probability) { * in the multi-frontal setting. The values should match those of P'(Continuous) * for each discrete mode. */ -TEST(HybridEstimation, ProbabilityMultifrontal) { +TEST_DISABLED(HybridEstimation, ProbabilityMultifrontal) { constexpr size_t K = 4; std::vector measurements = {0, 1, 2, 2}; @@ -338,7 +337,6 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { Switching switching(K, between_sigma, measurement_sigma, measurements, "1/1 1/1"); auto graph = switching.linearizedFactorGraph; - Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph()); // Get the tree of unnormalized probabilities for each mode sequence. AlgebraicDecisionTree expected_probPrimeTree = getProbPrimeTree(graph); @@ -435,7 +433,7 @@ static HybridGaussianFactorGraph::shared_ptr createHybridGaussianFactorGraph() { /********************************************************************************* * Do hybrid elimination and do regression test on discrete conditional. ********************************************************************************/ -TEST(HybridEstimation, eliminateSequentialRegression) { +TEST_DISABLED(HybridEstimation, eliminateSequentialRegression) { // Create the factor graph from the nonlinear factor graph. HybridGaussianFactorGraph::shared_ptr fg = createHybridGaussianFactorGraph(); @@ -470,7 +468,7 @@ TEST(HybridEstimation, eliminateSequentialRegression) { * 3. Sample from the Bayes Net. * 4. Check that the ratio `BN(x)/FG(x) = constant` for all samples `x`. ********************************************************************************/ -TEST(HybridEstimation, CorrectnessViaSampling) { +TEST_DISABLED(HybridEstimation, CorrectnessViaSampling) { // 1. Create the factor graph from the nonlinear factor graph. const auto fg = createHybridGaussianFactorGraph(); @@ -503,6 +501,181 @@ TEST(HybridEstimation, CorrectnessViaSampling) { } } +/****************************************************************************/ +std::shared_ptr addConstantTerm( + const HybridGaussianFactorGraph& gfg, const Key& mode, double noise_tight, + double noise_loose, size_t d, size_t tight_index) { + HybridGaussianFactorGraph updated_gfg; + + // logConstant will be of the tighter model + double logConstant = + -0.5 * d * 1.8378770664093454835606594728112 + log(1.0 / noise_tight); + + for (auto&& f : gfg) { + if (auto gmf = dynamic_pointer_cast(f)) { + auto func = [&](const Assignment& assignment, + const GaussianFactor::shared_ptr& gf) { + if (assignment.at(mode) != tight_index) { + double factor_log_constant = + -0.5 * d * 1.8378770664093454835606594728112 + + log(1.0 / noise_loose); + + GaussianFactorGraph gfg_; + gfg_.push_back(gf); + Vector c(d); + c << std::sqrt(2.0 * (logConstant - factor_log_constant)); + auto constantFactor = std::make_shared(c); + gfg_.push_back(constantFactor); + return std::make_shared(gfg_); + } else { + return dynamic_pointer_cast(gf); + } + }; + auto updated_factors = gmf->factors().apply(func); + auto updated_gmf = std::make_shared( + gmf->continuousKeys(), gmf->discreteKeys(), updated_factors); + updated_gfg.add(updated_gmf); + } else { + updated_gfg.add(f); + } + } + return std::make_shared(updated_gfg); +} + +TEST(HybridEstimation, ModeSelection) { + HybridNonlinearFactorGraph graph; + Values initial; + + auto measurement_model = noiseModel::Isotropic::Sigma(1, 0.1); + auto motion_model = noiseModel::Isotropic::Sigma(1, 1.0); + + graph.emplace_shared>(X(0), 0.0, measurement_model); + graph.emplace_shared>(X(1), 0.0, measurement_model); + + DiscreteKeys modes; + modes.emplace_back(M(0), 2); + + // The size of the noise model + size_t d = 1; + double noise_tight = 0.5, noise_loose = 5.0; + + auto model0 = std::make_shared( + X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)), + model1 = std::make_shared( + X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); + + std::vector components = {model0, model1}; + + KeyVector keys = {X(0), X(1)}; + graph.emplace_shared(keys, modes, components); + + initial.insert(X(0), 0.0); + initial.insert(X(1), 0.0); + + auto gfg = graph.linearize(initial); + + gfg = addConstantTerm(*gfg, M(0), noise_tight, noise_loose, d, 1); + + HybridBayesNet::shared_ptr bayesNet = gfg->eliminateSequential(); + + HybridValues delta = bayesNet->optimize(); + EXPECT_LONGS_EQUAL(1, delta.discrete().at(M(0))); + + /**************************************************************/ + HybridBayesNet bn; + const DiscreteKey mode{M(0), 2}; + + bn.push_back( + GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); + bn.push_back( + GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); + bn.emplace_back(new GaussianMixture( + {Z(0)}, {X(0), X(1)}, {mode}, + {GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), + Z_1x1, noise_loose), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), + Z_1x1, noise_tight)})); + + VectorValues vv; + vv.insert(Z(0), Z_1x1); + + auto fg = bn.toFactorGraph(vv); + auto expected_posterior = fg.eliminateSequential(); + + // graph.print(); + // gfg->print("Original Gaussian Factor Graph:"); + // fg.print("\n\nFrom Bayes Net"); + + // bayesNet->print("\n\nBayes Net from FG"); + // expected_posterior->print("\n\n"); + EXPECT(assert_equal(*expected_posterior, *bayesNet, 1e-6)); +} + +/****************************************************************************/ +TEST(HybridEstimation, ModeSelection2) { + using symbol_shorthand::Z; + + // The size of the noise model + size_t d = 1; + double noise_tight = 0.5, noise_loose = 5.0; + + HybridBayesNet bn; + const DiscreteKey mode{M(0), 2}; + + bn.push_back( + GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); + bn.push_back( + GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); + bn.emplace_back(new GaussianMixture( + {Z(0)}, {X(0), X(1)}, {mode}, + {GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), + Z_1x1, noise_loose), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), + Z_1x1, noise_tight)})); + + VectorValues vv; + vv.insert(Z(0), Z_1x1); + + auto fg = bn.toFactorGraph(vv); + + auto expected_posterior = fg.eliminateSequential(); + // expected_posterior->print("\n\n\nLikelihood BN:"); + + std::cout << "\n\n==================\n\n" << std::endl; + HybridNonlinearFactorGraph graph; + Values initial; + + auto measurement_model = noiseModel::Isotropic::Sigma(1, 0.1); + auto motion_model = noiseModel::Isotropic::Sigma(1, 1.0); + + graph.emplace_shared>(X(0), 0.0, measurement_model); + graph.emplace_shared>(X(1), 0.0, measurement_model); + + DiscreteKeys modes; + modes.emplace_back(M(0), 2); + + auto model0 = std::make_shared( + X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)), + model1 = std::make_shared( + X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); + + std::vector components = {model0, model1}; + + KeyVector keys = {X(0), X(1)}; + graph.emplace_shared(keys, modes, components); + + initial.insert(X(0), 0.0); + initial.insert(X(1), 0.0); + + auto gfg = graph.linearize(initial); + gfg = addConstantTerm(*gfg, M(0), noise_tight, noise_loose, d, 1); + + HybridBayesNet::shared_ptr bayesNet = gfg->eliminateSequential(); + + // bayesNet->print(); + EXPECT(assert_equal(*expected_posterior, *bayesNet, 1e-6)); +} + /* ************************************************************************* */ int main() { TestResult tr; From 8ba5da44a64fdddac1e1f3c3499c31d9f15916f1 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 14:44:40 -0500 Subject: [PATCH 2/7] some cleanup --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 9 ++++----- gtsam/linear/HessianFactor.h | 2 +- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index aa29c924e..897d56272 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -190,8 +190,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, /* ************************************************************************ */ // If any GaussianFactorGraph in the decision tree contains a nullptr, convert // that leaf to an empty GaussianFactorGraph. Needed since the DecisionTree will -// otherwise create a GFG with a single (null) factor. -// TODO(dellaert): still a mystery to me why this is needed. +// otherwise create a GFG with a single (null) factor, which doesn't register as null. GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) { auto emptyGaussian = [](const GaussianFactorGraph &graph) { bool hasNull = @@ -275,9 +274,9 @@ hybridElimination(const HybridGaussianFactorGraph &factors, }; DecisionTree probabilities(eliminationResults, probability); - return {std::make_shared(gaussianMixture), - std::make_shared(discreteSeparator, - probabilities)}; + return { + std::make_shared(gaussianMixture), + std::make_shared(discreteSeparator, probabilities)}; } else { // Otherwise, we create a resulting GaussianMixtureFactor on the separator, // taking care to correct for conditional constant. diff --git a/gtsam/linear/HessianFactor.h b/gtsam/linear/HessianFactor.h index eb1fe2de2..8cbabcd5d 100644 --- a/gtsam/linear/HessianFactor.h +++ b/gtsam/linear/HessianFactor.h @@ -131,7 +131,7 @@ namespace gtsam { * term, and f the constant term. * JacobianFactor error is \f[ 0.5* (Ax-b)' M (Ax-b) = 0.5*x'A'MAx - x'A'Mb + 0.5*b'Mb \f] * HessianFactor error is \f[ 0.5*(x'Gx - 2x'g + f) = 0.5*x'Gx - x'*g + 0.5*f \f] - * So, with \f$ A = [A1 A2] \f$ and \f$ G=A*'M*A = [A1';A2']*M*[A1 A2] \f$ we have + * So, with \f$ A = [A1 A2] \f$ and \f$ G=A'*M*A = [A1';A2']*M*[A1 A2] \f$ we have \code n1*n1 G11 = A1'*M*A1 n1*n2 G12 = A1'*M*A2 From 01a959b9e56bc94fd647aa5afafe22b7d967c629 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 14:46:47 -0500 Subject: [PATCH 3/7] differing noise models for multi-dimensional problem --- gtsam/hybrid/tests/testHybridEstimation.cpp | 69 ++++++++++++--------- 1 file changed, 39 insertions(+), 30 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 8ec41d51f..c917ffff9 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -73,7 +73,7 @@ Ordering getOrdering(HybridGaussianFactorGraph& factors, return ordering; } -TEST_DISABLED(HybridEstimation, Full) { +TEST(HybridEstimation, Full) { size_t K = 6; std::vector measurements = {0, 1, 2, 2, 2, 3}; // Ground truth discrete seq @@ -115,7 +115,7 @@ TEST_DISABLED(HybridEstimation, Full) { /****************************************************************************/ // Test approximate inference with an additional pruning step. -TEST_DISABLED(HybridEstimation, Incremental) { +TEST(HybridEstimation, Incremental) { size_t K = 15; std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; @@ -283,7 +283,7 @@ AlgebraicDecisionTree getProbPrimeTree( * Test for correctness of different branches of the P'(Continuous | Discrete). * The values should match those of P'(Continuous) for each discrete mode. ********************************************************************************/ -TEST_DISABLED(HybridEstimation, Probability) { +TEST(HybridEstimation, Probability) { constexpr size_t K = 4; std::vector measurements = {0, 1, 2, 2}; double between_sigma = 1.0, measurement_sigma = 0.1; @@ -326,7 +326,7 @@ TEST_DISABLED(HybridEstimation, Probability) { * in the multi-frontal setting. The values should match those of P'(Continuous) * for each discrete mode. */ -TEST_DISABLED(HybridEstimation, ProbabilityMultifrontal) { +TEST(HybridEstimation, ProbabilityMultifrontal) { constexpr size_t K = 4; std::vector measurements = {0, 1, 2, 2}; @@ -433,7 +433,7 @@ static HybridGaussianFactorGraph::shared_ptr createHybridGaussianFactorGraph() { /********************************************************************************* * Do hybrid elimination and do regression test on discrete conditional. ********************************************************************************/ -TEST_DISABLED(HybridEstimation, eliminateSequentialRegression) { +TEST(HybridEstimation, eliminateSequentialRegression) { // Create the factor graph from the nonlinear factor graph. HybridGaussianFactorGraph::shared_ptr fg = createHybridGaussianFactorGraph(); @@ -468,7 +468,7 @@ TEST_DISABLED(HybridEstimation, eliminateSequentialRegression) { * 3. Sample from the Bayes Net. * 4. Check that the ratio `BN(x)/FG(x) = constant` for all samples `x`. ********************************************************************************/ -TEST_DISABLED(HybridEstimation, CorrectnessViaSampling) { +TEST(HybridEstimation, CorrectnessViaSampling) { // 1. Create the factor graph from the nonlinear factor graph. const auto fg = createHybridGaussianFactorGraph(); @@ -502,14 +502,19 @@ TEST_DISABLED(HybridEstimation, CorrectnessViaSampling) { } /****************************************************************************/ +/** + * Helper function to add the constant term corresponding to + * the difference in noise models. + */ std::shared_ptr addConstantTerm( const HybridGaussianFactorGraph& gfg, const Key& mode, double noise_tight, double noise_loose, size_t d, size_t tight_index) { HybridGaussianFactorGraph updated_gfg; + constexpr double log2pi = 1.8378770664093454835606594728112; // logConstant will be of the tighter model - double logConstant = - -0.5 * d * 1.8378770664093454835606594728112 + log(1.0 / noise_tight); + double logNormalizationConstant = log(1.0 / noise_tight); + double logConstant = -0.5 * d * log2pi + logNormalizationConstant; for (auto&& f : gfg) { if (auto gmf = dynamic_pointer_cast(f)) { @@ -517,13 +522,15 @@ std::shared_ptr addConstantTerm( const GaussianFactor::shared_ptr& gf) { if (assignment.at(mode) != tight_index) { double factor_log_constant = - -0.5 * d * 1.8378770664093454835606594728112 + - log(1.0 / noise_loose); + -0.5 * d * log2pi + log(1.0 / noise_loose); GaussianFactorGraph gfg_; gfg_.push_back(gf); Vector c(d); - c << std::sqrt(2.0 * (logConstant - factor_log_constant)); + for (size_t i = 0; i < d; i++) { + c(i) = std::sqrt(2.0 * (logConstant - factor_log_constant)); + } + auto constantFactor = std::make_shared(c); gfg_.push_back(constantFactor); return std::make_shared(gfg_); @@ -542,6 +549,7 @@ std::shared_ptr addConstantTerm( return std::make_shared(updated_gfg); } +/****************************************************************************/ TEST(HybridEstimation, ModeSelection) { HybridNonlinearFactorGraph graph; Values initial; @@ -616,56 +624,57 @@ TEST(HybridEstimation, ModeSelection2) { using symbol_shorthand::Z; // The size of the noise model - size_t d = 1; + size_t d = 3; double noise_tight = 0.5, noise_loose = 5.0; HybridBayesNet bn; const DiscreteKey mode{M(0), 2}; bn.push_back( - GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); + GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1)); bn.push_back( - GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); + GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1)); bn.emplace_back(new GaussianMixture( {Z(0)}, {X(0), X(1)}, {mode}, - {GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), - Z_1x1, noise_loose), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), - Z_1x1, noise_tight)})); + {GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), + Z_3x1, noise_loose), + GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), + Z_3x1, noise_tight)})); VectorValues vv; - vv.insert(Z(0), Z_1x1); + vv.insert(Z(0), Z_3x1); auto fg = bn.toFactorGraph(vv); auto expected_posterior = fg.eliminateSequential(); // expected_posterior->print("\n\n\nLikelihood BN:"); - std::cout << "\n\n==================\n\n" << std::endl; + // std::cout << "\n\n==================\n\n" << std::endl; + HybridNonlinearFactorGraph graph; Values initial; - auto measurement_model = noiseModel::Isotropic::Sigma(1, 0.1); - auto motion_model = noiseModel::Isotropic::Sigma(1, 1.0); + auto measurement_model = noiseModel::Isotropic::Sigma(d, 0.1); + auto motion_model = noiseModel::Isotropic::Sigma(d, 1.0); - graph.emplace_shared>(X(0), 0.0, measurement_model); - graph.emplace_shared>(X(1), 0.0, measurement_model); + graph.emplace_shared>(X(0), Z_3x1, measurement_model); + graph.emplace_shared>(X(1), Z_3x1, measurement_model); DiscreteKeys modes; modes.emplace_back(M(0), 2); - auto model0 = std::make_shared( - X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)), - model1 = std::make_shared( - X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); + auto model0 = std::make_shared>( + X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_loose)), + model1 = std::make_shared>( + X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight)); std::vector components = {model0, model1}; KeyVector keys = {X(0), X(1)}; graph.emplace_shared(keys, modes, components); - initial.insert(X(0), 0.0); - initial.insert(X(1), 0.0); + initial.insert(X(0), Z_3x1); + initial.insert(X(1), Z_3x1); auto gfg = graph.linearize(initial); gfg = addConstantTerm(*gfg, M(0), noise_tight, noise_loose, d, 1); From 05f741e08361b55a212adf197cb211275e58ab32 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 1 Mar 2023 16:25:44 -0500 Subject: [PATCH 4/7] include bitset header --- gtsam/hybrid/tests/testHybridEstimation.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index c917ffff9..f41bee103 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -36,6 +36,8 @@ // Include for test suite #include +#include + #include "Switching.h" using namespace std; From 9211dddf6cdc45d9d52de9c06344ef3cedcba3ad Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 4 Mar 2023 11:50:41 -0500 Subject: [PATCH 5/7] improve the mixture factor handling so it uses the factor directly --- gtsam/hybrid/tests/testHybridEstimation.cpp | 70 ++++++++++----------- 1 file changed, 34 insertions(+), 36 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index f41bee103..836b5cb23 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -508,47 +508,40 @@ TEST(HybridEstimation, CorrectnessViaSampling) { * Helper function to add the constant term corresponding to * the difference in noise models. */ -std::shared_ptr addConstantTerm( - const HybridGaussianFactorGraph& gfg, const Key& mode, double noise_tight, - double noise_loose, size_t d, size_t tight_index) { - HybridGaussianFactorGraph updated_gfg; +std::shared_ptr mixedVarianceFactor( + const MixtureFactor& mf, const Values& initial, const Key& mode, + double noise_tight, double noise_loose, size_t d, size_t tight_index) { + GaussianMixtureFactor::shared_ptr gmf = mf.linearize(initial); constexpr double log2pi = 1.8378770664093454835606594728112; // logConstant will be of the tighter model double logNormalizationConstant = log(1.0 / noise_tight); double logConstant = -0.5 * d * log2pi + logNormalizationConstant; - for (auto&& f : gfg) { - if (auto gmf = dynamic_pointer_cast(f)) { - auto func = [&](const Assignment& assignment, - const GaussianFactor::shared_ptr& gf) { - if (assignment.at(mode) != tight_index) { - double factor_log_constant = - -0.5 * d * log2pi + log(1.0 / noise_loose); + auto func = [&](const Assignment& assignment, + const GaussianFactor::shared_ptr& gf) { + if (assignment.at(mode) != tight_index) { + double factor_log_constant = -0.5 * d * log2pi + log(1.0 / noise_loose); - GaussianFactorGraph gfg_; - gfg_.push_back(gf); - Vector c(d); - for (size_t i = 0; i < d; i++) { - c(i) = std::sqrt(2.0 * (logConstant - factor_log_constant)); - } + GaussianFactorGraph _gfg; + _gfg.push_back(gf); + Vector c(d); + for (size_t i = 0; i < d; i++) { + c(i) = std::sqrt(2.0 * (logConstant - factor_log_constant)); + } - auto constantFactor = std::make_shared(c); - gfg_.push_back(constantFactor); - return std::make_shared(gfg_); - } else { - return dynamic_pointer_cast(gf); - } - }; - auto updated_factors = gmf->factors().apply(func); - auto updated_gmf = std::make_shared( - gmf->continuousKeys(), gmf->discreteKeys(), updated_factors); - updated_gfg.add(updated_gmf); + auto constantFactor = std::make_shared(c); + _gfg.push_back(constantFactor); + return std::make_shared(_gfg); } else { - updated_gfg.add(f); + return dynamic_pointer_cast(gf); } - } - return std::make_shared(updated_gfg); + }; + auto updated_components = gmf->factors().apply(func); + auto updated_gmf = std::make_shared( + gmf->continuousKeys(), gmf->discreteKeys(), updated_components); + + return updated_gmf; } /****************************************************************************/ @@ -577,14 +570,16 @@ TEST(HybridEstimation, ModeSelection) { std::vector components = {model0, model1}; KeyVector keys = {X(0), X(1)}; - graph.emplace_shared(keys, modes, components); + MixtureFactor mf(keys, modes, components); initial.insert(X(0), 0.0); initial.insert(X(1), 0.0); - auto gfg = graph.linearize(initial); + auto gmf = + mixedVarianceFactor(mf, initial, M(0), noise_tight, noise_loose, d, 1); + graph.add(gmf); - gfg = addConstantTerm(*gfg, M(0), noise_tight, noise_loose, d, 1); + auto gfg = graph.linearize(initial); HybridBayesNet::shared_ptr bayesNet = gfg->eliminateSequential(); @@ -673,13 +668,16 @@ TEST(HybridEstimation, ModeSelection2) { std::vector components = {model0, model1}; KeyVector keys = {X(0), X(1)}; - graph.emplace_shared(keys, modes, components); + MixtureFactor mf(keys, modes, components); initial.insert(X(0), Z_3x1); initial.insert(X(1), Z_3x1); + auto gmf = + mixedVarianceFactor(mf, initial, M(0), noise_tight, noise_loose, d, 1); + graph.add(gmf); + auto gfg = graph.linearize(initial); - gfg = addConstantTerm(*gfg, M(0), noise_tight, noise_loose, d, 1); HybridBayesNet::shared_ptr bayesNet = gfg->eliminateSequential(); From 3f89c7af4988cbf4c26c3b1cb05f9856b77e9d5e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 4 Mar 2023 11:50:51 -0500 Subject: [PATCH 6/7] fix typo --- gtsam/hybrid/HybridSmoother.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index 549c71714..43e75770d 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -45,7 +45,7 @@ Ordering HybridSmoother::getOrdering( std::copy(allDiscrete.begin(), allDiscrete.end(), std::back_inserter(newKeysDiscreteLast)); - const VariableIndex index(newFactors); + const VariableIndex index(factors); // Get an ordering where the new keys are eliminated last Ordering ordering = Ordering::ColamdConstrainedLast( From 28217ea7679ce75f4910c7dd89f113a83a11c41e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 4 Mar 2023 20:28:28 -0500 Subject: [PATCH 7/7] some fixes --- gtsam/hybrid/GaussianMixtureFactor.h | 2 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 13 ++----------- 2 files changed, 3 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 8e1a95e9d..1325cfe93 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -144,7 +144,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { double error(const HybridValues &values) const override; /// Getter for GaussianFactor decision tree - Factors factors() const { return factors_; } + const Factors &factors() const { return factors_; } /// Add MixtureFactor to a Sum, syntactic sugar. friend GaussianFactorGraphTree &operator+=( diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 836b5cb23..e74990fe6 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -530,8 +530,7 @@ std::shared_ptr mixedVarianceFactor( c(i) = std::sqrt(2.0 * (logConstant - factor_log_constant)); } - auto constantFactor = std::make_shared(c); - _gfg.push_back(constantFactor); + _gfg.emplace_shared(c); return std::make_shared(_gfg); } else { return dynamic_pointer_cast(gf); @@ -607,12 +606,6 @@ TEST(HybridEstimation, ModeSelection) { auto fg = bn.toFactorGraph(vv); auto expected_posterior = fg.eliminateSequential(); - // graph.print(); - // gfg->print("Original Gaussian Factor Graph:"); - // fg.print("\n\nFrom Bayes Net"); - - // bayesNet->print("\n\nBayes Net from FG"); - // expected_posterior->print("\n\n"); EXPECT(assert_equal(*expected_posterior, *bayesNet, 1e-6)); } @@ -644,9 +637,8 @@ TEST(HybridEstimation, ModeSelection2) { auto fg = bn.toFactorGraph(vv); auto expected_posterior = fg.eliminateSequential(); - // expected_posterior->print("\n\n\nLikelihood BN:"); - // std::cout << "\n\n==================\n\n" << std::endl; + // ===================================== HybridNonlinearFactorGraph graph; Values initial; @@ -681,7 +673,6 @@ TEST(HybridEstimation, ModeSelection2) { HybridBayesNet::shared_ptr bayesNet = gfg->eliminateSequential(); - // bayesNet->print(); EXPECT(assert_equal(*expected_posterior, *bayesNet, 1e-6)); }