From 9e9172b17a87a1aa4af3a0435f6c6b393617c931 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 5 Jan 2023 11:52:56 -0800 Subject: [PATCH] Fix all tests --- gtsam/hybrid/HybridSmoother.cpp | 4 +- gtsam/hybrid/tests/TinyHybridExample.h | 16 +++---- gtsam/hybrid/tests/testHybridBayesNet.cpp | 38 +++++++++-------- gtsam/hybrid/tests/testHybridEstimation.cpp | 2 +- .../tests/testHybridGaussianFactorGraph.cpp | 42 +++++++++---------- 5 files changed, 52 insertions(+), 50 deletions(-) diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index ef77a2413..35dd5f88b 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -46,7 +46,7 @@ void HybridSmoother::update(HybridGaussianFactorGraph graph, } // Add the partial bayes net to the posterior bayes net. - hybridBayesNet_.push_back(*bayesNetFragment); + hybridBayesNet_.add(*bayesNetFragment); } /* ************************************************************************* */ @@ -100,7 +100,7 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph, /* ************************************************************************* */ GaussianMixture::shared_ptr HybridSmoother::gaussianMixture( size_t index) const { - return hybridBayesNet_.atMixture(index); + return hybridBayesNet_.at(index)->asMixture(); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index a427d2042..c34d55bf0 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -43,22 +43,22 @@ inline HybridBayesNet createHybridBayesNet(int num_measurements = 1, // Create Gaussian mixture z_i = x0 + noise for each measurement. for (int i = 0; i < num_measurements; i++) { const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode; - GaussianMixture gm({Z(i)}, {X(0)}, {mode_i}, - {GaussianConditional::sharedMeanAndStddev( - Z(i), I_1x1, X(0), Z_1x1, 0.5), - GaussianConditional::sharedMeanAndStddev( - Z(i), I_1x1, X(0), Z_1x1, 3)}); - bayesNet.emplaceMixture(gm); // copy :-( + bayesNet.emplace_back( + new GaussianMixture({Z(i)}, {X(0)}, {mode_i}, + {GaussianConditional::sharedMeanAndStddev( + Z(i), I_1x1, X(0), Z_1x1, 0.5), + GaussianConditional::sharedMeanAndStddev( + Z(i), I_1x1, X(0), Z_1x1, 3)})); } // Create prior on X(0). - bayesNet.addGaussian( + bayesNet.push_back( GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5)); // Add prior on mode. const size_t nrModes = manyModes ? num_measurements : 1; for (int i = 0; i < nrModes; i++) { - bayesNet.emplaceDiscrete(DiscreteKey{M(i), 2}, "4/6"); + bayesNet.emplace_back(new DiscreteConditional({M(i), 2}, "4/6")); } return bayesNet; } diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index ef552bd92..7d1b9130d 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -42,21 +42,21 @@ static const DiscreteKey Asia(asiaKey, 2); // Test creation of a pure discrete Bayes net. TEST(HybridBayesNet, Creation) { HybridBayesNet bayesNet; - bayesNet.emplaceDiscrete(Asia, "99/1"); + bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); DiscreteConditional expected(Asia, "99/1"); - CHECK(bayesNet.atDiscrete(0)); - EXPECT(assert_equal(expected, *bayesNet.atDiscrete(0))); + CHECK(bayesNet.at(0)->asDiscrete()); + EXPECT(assert_equal(expected, *bayesNet.at(0)->asDiscrete())); } /* ****************************************************************************/ // Test adding a Bayes net to another one. TEST(HybridBayesNet, Add) { HybridBayesNet bayesNet; - bayesNet.emplaceDiscrete(Asia, "99/1"); + bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); HybridBayesNet other; - other.push_back(bayesNet); + other.add(bayesNet); EXPECT(bayesNet.equals(other)); } @@ -64,7 +64,7 @@ TEST(HybridBayesNet, Add) { // Test evaluate for a pure discrete Bayes net P(Asia). TEST(HybridBayesNet, EvaluatePureDiscrete) { HybridBayesNet bayesNet; - bayesNet.emplaceDiscrete(Asia, "99/1"); + bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); HybridValues values; values.insert(asiaKey, 0); EXPECT_DOUBLES_EQUAL(0.99, bayesNet.evaluate(values), 1e-9); @@ -80,7 +80,7 @@ TEST(HybridBayesNet, Tiny) { /* ****************************************************************************/ // Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia). TEST(HybridBayesNet, evaluateHybrid) { - const auto continuousConditional = GaussianConditional::FromMeanAndStddev( + const auto continuousConditional = GaussianConditional::sharedMeanAndStddev( X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0); const SharedDiagonal model0 = noiseModel::Diagonal::Sigmas(Vector1(2.0)), @@ -93,10 +93,11 @@ TEST(HybridBayesNet, evaluateHybrid) { // Create hybrid Bayes net. HybridBayesNet bayesNet; - bayesNet.emplaceGaussian(continuousConditional); - GaussianMixture gm({X(1)}, {}, {Asia}, {conditional0, conditional1}); - bayesNet.emplaceMixture(gm); // copy :-( - bayesNet.emplaceDiscrete(Asia, "99/1"); + bayesNet.push_back(GaussianConditional::sharedMeanAndStddev( + X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0)); + bayesNet.emplace_back( + new GaussianMixture({X(1)}, {}, {Asia}, {conditional0, conditional1})); + bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); // Create values at which to evaluate. HybridValues values; @@ -105,7 +106,7 @@ TEST(HybridBayesNet, evaluateHybrid) { values.insert(X(1), Vector1(1)); const double conditionalProbability = - continuousConditional.evaluate(values.continuous()); + continuousConditional->evaluate(values.continuous()); const double mixtureProbability = conditional0->evaluate(values.continuous()); EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99, bayesNet.evaluate(values), 1e-9); @@ -136,16 +137,16 @@ TEST(HybridBayesNet, Choose) { EXPECT_LONGS_EQUAL(4, gbn.size()); EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( - hybridBayesNet->atMixture(0)))(assignment), + hybridBayesNet->at(0)->asMixture()))(assignment), *gbn.at(0))); EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( - hybridBayesNet->atMixture(1)))(assignment), + hybridBayesNet->at(1)->asMixture()))(assignment), *gbn.at(1))); EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( - hybridBayesNet->atMixture(2)))(assignment), + hybridBayesNet->at(2)->asMixture()))(assignment), *gbn.at(2))); EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( - hybridBayesNet->atMixture(3)))(assignment), + hybridBayesNet->at(3)->asMixture()))(assignment), *gbn.at(3))); } @@ -247,11 +248,12 @@ TEST(HybridBayesNet, Error) { double total_error = 0; for (size_t idx = 0; idx < hybridBayesNet->size(); idx++) { if (hybridBayesNet->at(idx)->isHybrid()) { - double error = hybridBayesNet->atMixture(idx)->error( + double error = hybridBayesNet->at(idx)->asMixture()->error( {delta.continuous(), discrete_values}); total_error += error; } else if (hybridBayesNet->at(idx)->isContinuous()) { - double error = hybridBayesNet->atGaussian(idx)->error(delta.continuous()); + double error = + hybridBayesNet->at(idx)->asGaussian()->error(delta.continuous()); total_error += error; } } diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 4a53a3141..84f686c59 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -310,7 +310,7 @@ TEST(HybridEstimation, Probability) { for (auto discrete_conditional : *discreteBayesNet) { bayesNet->add(discrete_conditional); } - auto discreteConditional = discreteBayesNet->atDiscrete(0); + auto discreteConditional = discreteBayesNet->at(0)->asDiscrete(); HybridValues hybrid_values = bayesNet->optimize(); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index cab867715..422cdf64e 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -677,11 +677,11 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { X(0), Vector1(14.1421), I_1x1 * 2.82843), conditional1 = boost::make_shared( X(0), Vector1(10.1379), I_1x1 * 2.02759); - GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1}); - expectedBayesNet.emplaceMixture(gm); // copy :-( + expectedBayesNet.emplace_back( + new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1})); // Add prior on mode. - expectedBayesNet.emplaceDiscrete(mode, "74/26"); + expectedBayesNet.emplace_back(new DiscreteConditional(mode, "74/26")); // Test elimination Ordering ordering; @@ -712,11 +712,11 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { X(0), Vector1(17.3205), I_1x1 * 3.4641), conditional1 = boost::make_shared( X(0), Vector1(10.274), I_1x1 * 2.0548); - GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1}); - expectedBayesNet.emplaceMixture(gm); // copy :-( + expectedBayesNet.emplace_back( + new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1})); // Add prior on mode. - expectedBayesNet.emplaceDiscrete(mode, "23/77"); + expectedBayesNet.emplace_back(new DiscreteConditional(mode, "23/77")); // Test elimination Ordering ordering; @@ -784,34 +784,34 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { for (size_t t : {0, 1, 2}) { // Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t): const auto noise_mode_t = DiscreteKey{N(t), 2}; - GaussianMixture gm({Z(t)}, {X(t)}, {noise_mode_t}, - {GaussianConditional::sharedMeanAndStddev( - Z(t), I_1x1, X(t), Z_1x1, 0.5), - GaussianConditional::sharedMeanAndStddev( - Z(t), I_1x1, X(t), Z_1x1, 3.0)}); - bn.emplaceMixture(gm); // copy :-( + bn.emplace_back( + new GaussianMixture({Z(t)}, {X(t)}, {noise_mode_t}, + {GaussianConditional::sharedMeanAndStddev( + Z(t), I_1x1, X(t), Z_1x1, 0.5), + GaussianConditional::sharedMeanAndStddev( + Z(t), I_1x1, X(t), Z_1x1, 3.0)})); // Create prior on discrete mode M(t): - bn.emplaceDiscrete(noise_mode_t, "20/80"); + bn.emplace_back(new DiscreteConditional(noise_mode_t, "20/80")); } // Add motion models: for (size_t t : {2, 1}) { // Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1): const auto motion_model_t = DiscreteKey{M(t), 2}; - GaussianMixture gm({X(t)}, {X(t - 1)}, {motion_model_t}, - {GaussianConditional::sharedMeanAndStddev( - X(t), I_1x1, X(t - 1), Z_1x1, 0.2), - GaussianConditional::sharedMeanAndStddev( - X(t), I_1x1, X(t - 1), I_1x1, 0.2)}); - bn.emplaceMixture(gm); // copy :-( + bn.emplace_back( + new GaussianMixture({X(t)}, {X(t - 1)}, {motion_model_t}, + {GaussianConditional::sharedMeanAndStddev( + X(t), I_1x1, X(t - 1), Z_1x1, 0.2), + GaussianConditional::sharedMeanAndStddev( + X(t), I_1x1, X(t - 1), I_1x1, 0.2)})); // Create prior on motion model M(t): - bn.emplaceDiscrete(motion_model_t, "40/60"); + bn.emplace_back(new DiscreteConditional(motion_model_t, "40/60")); } // Create Gaussian prior on continuous X(0) using sharedMeanAndStddev: - bn.addGaussian(GaussianConditional::sharedMeanAndStddev(X(0), Z_1x1, 0.1)); + bn.push_back(GaussianConditional::sharedMeanAndStddev(X(0), Z_1x1, 0.1)); // Make sure we an sample from the Bayes net: EXPECT_LONGS_EQUAL(6, bn.sample().continuous().size());