diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index 39a1a1a9e..26b83db29 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -43,12 +43,12 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, // Create Gaussian mixture z_i = x0 + noise for each measurement. for (size_t i = 0; i < num_measurements; i++) { const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode; - bayesNet.emplace_back( - new GaussianMixture({Z(i)}, {X(0)}, {mode_i}, - {GaussianConditional::sharedMeanAndStddev( - Z(i), I_1x1, X(0), Z_1x1, 0.5), - GaussianConditional::sharedMeanAndStddev( - Z(i), I_1x1, X(0), Z_1x1, 3)})); + bayesNet.emplace_shared( + KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i}, + std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), + Z_1x1, 0.5), + GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), + Z_1x1, 3)}); } // Create prior on X(0). @@ -58,7 +58,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, // Add prior on mode. const size_t nrModes = manyModes ? num_measurements : 1; for (size_t i = 0; i < nrModes; i++) { - bayesNet.emplace_back(new DiscreteConditional({M(i), 2}, "4/6")); + bayesNet.emplace_shared(DiscreteKey{M(i), 2}, "4/6"); } return bayesNet; } @@ -70,8 +70,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, * the generative Bayes net model HybridBayesNet::Example(num_measurements) */ inline HybridGaussianFactorGraph createHybridGaussianFactorGraph( - size_t num_measurements = 1, - std::optional measurements = {}, + size_t num_measurements = 1, std::optional measurements = {}, bool manyModes = false) { auto bayesNet = createHybridBayesNet(num_measurements, manyModes); if (measurements) { diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 5ec600aa8..fcd9dd08f 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -227,12 +227,12 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, auto c0 = make_shared(z, Vector1(mu0), I_1x1, model0), c1 = make_shared(z, Vector1(mu1), I_1x1, model1); - auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); - - auto mixing = make_shared(m, "0.5/0.5"); HybridBayesNet hbn; - hbn.emplace_back(gm); + hbn.emplace_shared(KeyVector{z}, KeyVector{}, + DiscreteKeys{m}, std::vector{c0, c1}); + + auto mixing = make_shared(m, "0.5/0.5"); hbn.push_back(mixing); return hbn; @@ -278,7 +278,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) { // At the halfway point between the means, we should get P(m|z)=0.5 HybridBayesNet expected; - expected.emplace_back(new DiscreteConditional(m, "0.5/0.5")); + expected.emplace_shared(m, "0.5/0.5"); EXPECT(assert_equal(expected, *bn)); } @@ -350,10 +350,10 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) { // At the halfway point between the means HybridBayesNet expected; - expected.emplace_back(new DiscreteConditional( - m, {}, + expected.emplace_shared( + m, DiscreteKeys{}, vector{prob_m_z(mu1, mu0, sigma1, sigma0, m1_high), - prob_m_z(mu0, mu1, sigma0, sigma1, m1_high)})); + prob_m_z(mu0, mu1, sigma0, sigma1, m1_high)}); EXPECT(assert_equal(expected, *bn)); } @@ -401,9 +401,9 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma); // Add measurement P(z0 | x0) - auto p_z0 = new GaussianConditional(z0, Vector1(0.0), -I_1x1, x0, I_1x1, - measurement_model); - hbn.emplace_back(p_z0); + auto p_z0 = std::make_shared( + z0, Vector1(0.0), -I_1x1, x0, I_1x1, measurement_model); + hbn.push_back(p_z0); // Add hybrid motion model auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); @@ -413,19 +413,20 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0, c1 = make_shared(x1, Vector1(mu1), I_1x1, x0, -I_1x1, model1); - auto motion = new GaussianMixture({x1}, {x0}, {m1}, {c0, c1}); - hbn.emplace_back(motion); + auto motion = std::make_shared( + KeyVector{x1}, KeyVector{x0}, DiscreteKeys{m1}, std::vector{c0, c1}); + hbn.push_back(motion); if (add_second_measurement) { // Add second measurement - auto p_z1 = new GaussianConditional(z1, Vector1(0.0), -I_1x1, x1, I_1x1, - measurement_model); - hbn.emplace_back(p_z1); + auto p_z1 = std::make_shared( + z1, Vector1(0.0), -I_1x1, x1, I_1x1, measurement_model); + hbn.push_back(p_z1); } // Discrete uniform prior. - auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); - hbn.emplace_back(p_m1); + auto p_m1 = std::make_shared(m1, "0.5/0.5"); + hbn.push_back(p_m1); return hbn; } diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 00dc36cd0..3a7e008d8 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -43,7 +43,7 @@ static const DiscreteKey Asia(asiaKey, 2); // Test creation of a pure discrete Bayes net. TEST(HybridBayesNet, Creation) { HybridBayesNet bayesNet; - bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); + bayesNet.emplace_shared(Asia, "99/1"); DiscreteConditional expected(Asia, "99/1"); CHECK(bayesNet.at(0)->asDiscrete()); @@ -54,7 +54,7 @@ TEST(HybridBayesNet, Creation) { // Test adding a Bayes net to another one. TEST(HybridBayesNet, Add) { HybridBayesNet bayesNet; - bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); + bayesNet.emplace_shared(Asia, "99/1"); HybridBayesNet other; other.add(bayesNet); @@ -65,7 +65,7 @@ TEST(HybridBayesNet, Add) { // Test evaluate for a pure discrete Bayes net P(Asia). TEST(HybridBayesNet, EvaluatePureDiscrete) { HybridBayesNet bayesNet; - bayesNet.emplace_back(new DiscreteConditional(Asia, "4/6")); + bayesNet.emplace_shared(Asia, "4/6"); HybridValues values; values.insert(asiaKey, 0); EXPECT_DOUBLES_EQUAL(0.4, bayesNet.evaluate(values), 1e-9); @@ -107,9 +107,10 @@ TEST(HybridBayesNet, evaluateHybrid) { // Create hybrid Bayes net. HybridBayesNet bayesNet; bayesNet.push_back(continuousConditional); - bayesNet.emplace_back( - new GaussianMixture({X(1)}, {}, {Asia}, {conditional0, conditional1})); - bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); + bayesNet.emplace_shared( + KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia}, + std::vector{conditional0, conditional1}); + bayesNet.emplace_shared(Asia, "99/1"); // Create values at which to evaluate. HybridValues values; @@ -167,13 +168,14 @@ TEST(HybridBayesNet, Error) { conditional1 = std::make_shared( X(1), Vector1::Constant(2), I_1x1, model1); - auto gm = - new GaussianMixture({X(1)}, {}, {Asia}, {conditional0, conditional1}); + auto gm = std::make_shared( + KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia}, + std::vector{conditional0, conditional1}); // Create hybrid Bayes net. HybridBayesNet bayesNet; bayesNet.push_back(continuousConditional); - bayesNet.emplace_back(gm); - bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); + bayesNet.push_back(gm); + bayesNet.emplace_shared(Asia, "99/1"); // Create values at which to evaluate. HybridValues values; diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 1cc28b386..bdc298762 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -616,12 +616,12 @@ TEST(HybridEstimation, ModeSelection) { GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); - bn.emplace_back(new GaussianMixture( - {Z(0)}, {X(0), X(1)}, {mode}, - {GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), - Z_1x1, noise_loose), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), - Z_1x1, noise_tight)})); + bn.emplace_shared( + KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, + std::vector{GaussianConditional::sharedMeanAndStddev( + Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose), + GaussianConditional::sharedMeanAndStddev( + Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_tight)}); VectorValues vv; vv.insert(Z(0), Z_1x1); @@ -647,12 +647,12 @@ TEST(HybridEstimation, ModeSelection2) { GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1)); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1)); - bn.emplace_back(new GaussianMixture( - {Z(0)}, {X(0), X(1)}, {mode}, - {GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), - Z_3x1, noise_loose), - GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), - Z_3x1, noise_tight)})); + bn.emplace_shared( + KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, + std::vector{GaussianConditional::sharedMeanAndStddev( + Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose), + GaussianConditional::sharedMeanAndStddev( + Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_tight)}); VectorValues vv; vv.insert(Z(0), Z_3x1); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 559f59c8b..a7a315c87 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -651,7 +651,8 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { } /* ****************************************************************************/ -// Test hybrid gaussian factor graph errorTree when there is a HybridConditional in the graph +// Test hybrid gaussian factor graph errorTree when +// there is a HybridConditional in the graph TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { using symbol_shorthand::F; @@ -665,12 +666,11 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { auto measurement_model = noiseModel::Isotropic::Sigma(1, 2.0); // Set a prior P(x0) at x0=0 - hbn.emplace_back( - new GaussianConditional(x0, Vector1(0.0), I_1x1, prior_model)); + hbn.emplace_shared(x0, Vector1(0.0), I_1x1, prior_model); // Add measurement P(z0 | x0) - hbn.emplace_back(new GaussianConditional(z0, Vector1(0.0), -I_1x1, x0, I_1x1, - measurement_model)); + hbn.emplace_shared(z0, Vector1(0.0), -I_1x1, x0, I_1x1, + measurement_model); // Add hybrid motion model double mu = 0.0; @@ -681,10 +681,11 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { x0, -I_1x1, model0), c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model1); - hbn.emplace_back(new GaussianMixture({f01}, {x0, x1}, {m1}, {c0, c1})); + hbn.emplace_shared(KeyVector{f01}, KeyVector{x0, x1}, + DiscreteKeys{m1}, std::vector{c0, c1}); // Discrete uniform prior. - hbn.emplace_back(new DiscreteConditional(m1, "0.5/0.5")); + hbn.emplace_shared(m1, "0.5/0.5"); VectorValues given; given.insert(z0, Vector1(0.0)); @@ -804,11 +805,12 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { X(0), Vector1(14.1421), I_1x1 * 2.82843), conditional1 = std::make_shared( X(0), Vector1(10.1379), I_1x1 * 2.02759); - expectedBayesNet.emplace_back( - new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1})); + expectedBayesNet.emplace_shared( + KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, + std::vector{conditional0, conditional1}); // Add prior on mode. - expectedBayesNet.emplace_back(new DiscreteConditional(mode, "74/26")); + expectedBayesNet.emplace_shared(mode, "74/26"); // Test elimination const auto posterior = fg.eliminateSequential(); @@ -828,18 +830,20 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { HybridBayesNet bn; // Create Gaussian mixture z_0 = x0 + noise for each measurement. - bn.emplace_back(new GaussianMixture( - {Z(0)}, {X(0)}, {mode}, - {GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, - 0.5)})); + auto gm = std::make_shared( + KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode}, + std::vector{ + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, + 0.5)}); + bn.push_back(gm); // Create prior on X(0). bn.push_back( GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5)); // Add prior on mode. - bn.emplace_back(new DiscreteConditional(mode, "1/1")); + bn.emplace_shared(mode, "1/1"); // bn.print(); auto fg = bn.toFactorGraph(measurements); @@ -858,11 +862,12 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { X(0), Vector1(10.1379), I_1x1 * 2.02759), conditional1 = std::make_shared( X(0), Vector1(14.1421), I_1x1 * 2.82843); - expectedBayesNet.emplace_back( - new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1})); + expectedBayesNet.emplace_shared( + KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, + std::vector{conditional0, conditional1}); // Add prior on mode. - expectedBayesNet.emplace_back(new DiscreteConditional(mode, "1/1")); + expectedBayesNet.emplace_shared(mode, "1/1"); // Test elimination const auto posterior = fg.eliminateSequential(); @@ -894,11 +899,12 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { X(0), Vector1(17.3205), I_1x1 * 3.4641), conditional1 = std::make_shared( X(0), Vector1(10.274), I_1x1 * 2.0548); - expectedBayesNet.emplace_back( - new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1})); + expectedBayesNet.emplace_shared( + KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, + std::vector{conditional0, conditional1}); // Add prior on mode. - expectedBayesNet.emplace_back(new DiscreteConditional(mode, "23/77")); + expectedBayesNet.emplace_shared(mode, "23/77"); // Test elimination const auto posterior = fg.eliminateSequential(); @@ -940,30 +946,31 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { for (size_t t : {0, 1, 2}) { // Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t): const auto noise_mode_t = DiscreteKey{N(t), 2}; - bn.emplace_back( - new GaussianMixture({Z(t)}, {X(t)}, {noise_mode_t}, - {GaussianConditional::sharedMeanAndStddev( - Z(t), I_1x1, X(t), Z_1x1, 0.5), - GaussianConditional::sharedMeanAndStddev( - Z(t), I_1x1, X(t), Z_1x1, 3.0)})); + bn.emplace_shared( + KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t}, + std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), + Z_1x1, 0.5), + GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), + Z_1x1, 3.0)}); // Create prior on discrete mode N(t): - bn.emplace_back(new DiscreteConditional(noise_mode_t, "20/80")); + bn.emplace_shared(noise_mode_t, "20/80"); } // Add motion models: for (size_t t : {2, 1}) { // Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1): const auto motion_model_t = DiscreteKey{M(t), 2}; - bn.emplace_back( - new GaussianMixture({X(t)}, {X(t - 1)}, {motion_model_t}, - {GaussianConditional::sharedMeanAndStddev( - X(t), I_1x1, X(t - 1), Z_1x1, 0.2), - GaussianConditional::sharedMeanAndStddev( - X(t), I_1x1, X(t - 1), I_1x1, 0.2)})); + auto gm = std::make_shared( + KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t}, + std::vector{GaussianConditional::sharedMeanAndStddev( + X(t), I_1x1, X(t - 1), Z_1x1, 0.2), + GaussianConditional::sharedMeanAndStddev( + X(t), I_1x1, X(t - 1), I_1x1, 0.2)}); + bn.push_back(gm); // Create prior on motion model M(t): - bn.emplace_back(new DiscreteConditional(motion_model_t, "40/60")); + bn.emplace_shared(motion_model_t, "40/60"); } // Create Gaussian prior on continuous X(0) using sharedMeanAndStddev: