update all tests to use emplace_shared
parent
36c0b931a4
commit
605542bd0c
|
@ -43,12 +43,12 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1,
|
||||||
// Create Gaussian mixture z_i = x0 + noise for each measurement.
|
// Create Gaussian mixture z_i = x0 + noise for each measurement.
|
||||||
for (size_t i = 0; i < num_measurements; i++) {
|
for (size_t i = 0; i < num_measurements; i++) {
|
||||||
const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode;
|
const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode;
|
||||||
bayesNet.emplace_back(
|
bayesNet.emplace_shared<GaussianMixture>(
|
||||||
new GaussianMixture({Z(i)}, {X(0)}, {mode_i},
|
KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i},
|
||||||
{GaussianConditional::sharedMeanAndStddev(
|
std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0),
|
||||||
Z(i), I_1x1, X(0), Z_1x1, 0.5),
|
Z_1x1, 0.5),
|
||||||
GaussianConditional::sharedMeanAndStddev(
|
GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0),
|
||||||
Z(i), I_1x1, X(0), Z_1x1, 3)}));
|
Z_1x1, 3)});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create prior on X(0).
|
// Create prior on X(0).
|
||||||
|
@ -58,7 +58,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1,
|
||||||
// Add prior on mode.
|
// Add prior on mode.
|
||||||
const size_t nrModes = manyModes ? num_measurements : 1;
|
const size_t nrModes = manyModes ? num_measurements : 1;
|
||||||
for (size_t i = 0; i < nrModes; i++) {
|
for (size_t i = 0; i < nrModes; i++) {
|
||||||
bayesNet.emplace_back(new DiscreteConditional({M(i), 2}, "4/6"));
|
bayesNet.emplace_shared<DiscreteConditional>(DiscreteKey{M(i), 2}, "4/6");
|
||||||
}
|
}
|
||||||
return bayesNet;
|
return bayesNet;
|
||||||
}
|
}
|
||||||
|
@ -70,8 +70,7 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1,
|
||||||
* the generative Bayes net model HybridBayesNet::Example(num_measurements)
|
* the generative Bayes net model HybridBayesNet::Example(num_measurements)
|
||||||
*/
|
*/
|
||||||
inline HybridGaussianFactorGraph createHybridGaussianFactorGraph(
|
inline HybridGaussianFactorGraph createHybridGaussianFactorGraph(
|
||||||
size_t num_measurements = 1,
|
size_t num_measurements = 1, std::optional<VectorValues> measurements = {},
|
||||||
std::optional<VectorValues> measurements = {},
|
|
||||||
bool manyModes = false) {
|
bool manyModes = false) {
|
||||||
auto bayesNet = createHybridBayesNet(num_measurements, manyModes);
|
auto bayesNet = createHybridBayesNet(num_measurements, manyModes);
|
||||||
if (measurements) {
|
if (measurements) {
|
||||||
|
|
|
@ -227,12 +227,12 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1,
|
||||||
|
|
||||||
auto c0 = make_shared<GaussianConditional>(z, Vector1(mu0), I_1x1, model0),
|
auto c0 = make_shared<GaussianConditional>(z, Vector1(mu0), I_1x1, model0),
|
||||||
c1 = make_shared<GaussianConditional>(z, Vector1(mu1), I_1x1, model1);
|
c1 = make_shared<GaussianConditional>(z, Vector1(mu1), I_1x1, model1);
|
||||||
auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1});
|
|
||||||
|
|
||||||
auto mixing = make_shared<DiscreteConditional>(m, "0.5/0.5");
|
|
||||||
|
|
||||||
HybridBayesNet hbn;
|
HybridBayesNet hbn;
|
||||||
hbn.emplace_back(gm);
|
hbn.emplace_shared<GaussianMixture>(KeyVector{z}, KeyVector{},
|
||||||
|
DiscreteKeys{m}, std::vector{c0, c1});
|
||||||
|
|
||||||
|
auto mixing = make_shared<DiscreteConditional>(m, "0.5/0.5");
|
||||||
hbn.push_back(mixing);
|
hbn.push_back(mixing);
|
||||||
|
|
||||||
return hbn;
|
return hbn;
|
||||||
|
@ -278,7 +278,7 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel) {
|
||||||
|
|
||||||
// At the halfway point between the means, we should get P(m|z)=0.5
|
// At the halfway point between the means, we should get P(m|z)=0.5
|
||||||
HybridBayesNet expected;
|
HybridBayesNet expected;
|
||||||
expected.emplace_back(new DiscreteConditional(m, "0.5/0.5"));
|
expected.emplace_shared<DiscreteConditional>(m, "0.5/0.5");
|
||||||
|
|
||||||
EXPECT(assert_equal(expected, *bn));
|
EXPECT(assert_equal(expected, *bn));
|
||||||
}
|
}
|
||||||
|
@ -350,10 +350,10 @@ TEST(GaussianMixtureFactor, GaussianMixtureModel2) {
|
||||||
|
|
||||||
// At the halfway point between the means
|
// At the halfway point between the means
|
||||||
HybridBayesNet expected;
|
HybridBayesNet expected;
|
||||||
expected.emplace_back(new DiscreteConditional(
|
expected.emplace_shared<DiscreteConditional>(
|
||||||
m, {},
|
m, DiscreteKeys{},
|
||||||
vector<double>{prob_m_z(mu1, mu0, sigma1, sigma0, m1_high),
|
vector<double>{prob_m_z(mu1, mu0, sigma1, sigma0, m1_high),
|
||||||
prob_m_z(mu0, mu1, sigma0, sigma1, m1_high)}));
|
prob_m_z(mu0, mu1, sigma0, sigma1, m1_high)});
|
||||||
|
|
||||||
EXPECT(assert_equal(expected, *bn));
|
EXPECT(assert_equal(expected, *bn));
|
||||||
}
|
}
|
||||||
|
@ -401,9 +401,9 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0,
|
||||||
|
|
||||||
auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma);
|
auto measurement_model = noiseModel::Isotropic::Sigma(1, measurement_sigma);
|
||||||
// Add measurement P(z0 | x0)
|
// Add measurement P(z0 | x0)
|
||||||
auto p_z0 = new GaussianConditional(z0, Vector1(0.0), -I_1x1, x0, I_1x1,
|
auto p_z0 = std::make_shared<GaussianConditional>(
|
||||||
measurement_model);
|
z0, Vector1(0.0), -I_1x1, x0, I_1x1, measurement_model);
|
||||||
hbn.emplace_back(p_z0);
|
hbn.push_back(p_z0);
|
||||||
|
|
||||||
// Add hybrid motion model
|
// Add hybrid motion model
|
||||||
auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
|
auto model0 = noiseModel::Isotropic::Sigma(1, sigma0);
|
||||||
|
@ -413,19 +413,20 @@ static HybridBayesNet CreateBayesNet(double mu0, double mu1, double sigma0,
|
||||||
c1 = make_shared<GaussianConditional>(x1, Vector1(mu1), I_1x1, x0,
|
c1 = make_shared<GaussianConditional>(x1, Vector1(mu1), I_1x1, x0,
|
||||||
-I_1x1, model1);
|
-I_1x1, model1);
|
||||||
|
|
||||||
auto motion = new GaussianMixture({x1}, {x0}, {m1}, {c0, c1});
|
auto motion = std::make_shared<GaussianMixture>(
|
||||||
hbn.emplace_back(motion);
|
KeyVector{x1}, KeyVector{x0}, DiscreteKeys{m1}, std::vector{c0, c1});
|
||||||
|
hbn.push_back(motion);
|
||||||
|
|
||||||
if (add_second_measurement) {
|
if (add_second_measurement) {
|
||||||
// Add second measurement
|
// Add second measurement
|
||||||
auto p_z1 = new GaussianConditional(z1, Vector1(0.0), -I_1x1, x1, I_1x1,
|
auto p_z1 = std::make_shared<GaussianConditional>(
|
||||||
measurement_model);
|
z1, Vector1(0.0), -I_1x1, x1, I_1x1, measurement_model);
|
||||||
hbn.emplace_back(p_z1);
|
hbn.push_back(p_z1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Discrete uniform prior.
|
// Discrete uniform prior.
|
||||||
auto p_m1 = new DiscreteConditional(m1, "0.5/0.5");
|
auto p_m1 = std::make_shared<DiscreteConditional>(m1, "0.5/0.5");
|
||||||
hbn.emplace_back(p_m1);
|
hbn.push_back(p_m1);
|
||||||
|
|
||||||
return hbn;
|
return hbn;
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,7 @@ static const DiscreteKey Asia(asiaKey, 2);
|
||||||
// Test creation of a pure discrete Bayes net.
|
// Test creation of a pure discrete Bayes net.
|
||||||
TEST(HybridBayesNet, Creation) {
|
TEST(HybridBayesNet, Creation) {
|
||||||
HybridBayesNet bayesNet;
|
HybridBayesNet bayesNet;
|
||||||
bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
|
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
|
||||||
|
|
||||||
DiscreteConditional expected(Asia, "99/1");
|
DiscreteConditional expected(Asia, "99/1");
|
||||||
CHECK(bayesNet.at(0)->asDiscrete());
|
CHECK(bayesNet.at(0)->asDiscrete());
|
||||||
|
@ -54,7 +54,7 @@ TEST(HybridBayesNet, Creation) {
|
||||||
// Test adding a Bayes net to another one.
|
// Test adding a Bayes net to another one.
|
||||||
TEST(HybridBayesNet, Add) {
|
TEST(HybridBayesNet, Add) {
|
||||||
HybridBayesNet bayesNet;
|
HybridBayesNet bayesNet;
|
||||||
bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
|
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
|
||||||
|
|
||||||
HybridBayesNet other;
|
HybridBayesNet other;
|
||||||
other.add(bayesNet);
|
other.add(bayesNet);
|
||||||
|
@ -65,7 +65,7 @@ TEST(HybridBayesNet, Add) {
|
||||||
// Test evaluate for a pure discrete Bayes net P(Asia).
|
// Test evaluate for a pure discrete Bayes net P(Asia).
|
||||||
TEST(HybridBayesNet, EvaluatePureDiscrete) {
|
TEST(HybridBayesNet, EvaluatePureDiscrete) {
|
||||||
HybridBayesNet bayesNet;
|
HybridBayesNet bayesNet;
|
||||||
bayesNet.emplace_back(new DiscreteConditional(Asia, "4/6"));
|
bayesNet.emplace_shared<DiscreteConditional>(Asia, "4/6");
|
||||||
HybridValues values;
|
HybridValues values;
|
||||||
values.insert(asiaKey, 0);
|
values.insert(asiaKey, 0);
|
||||||
EXPECT_DOUBLES_EQUAL(0.4, bayesNet.evaluate(values), 1e-9);
|
EXPECT_DOUBLES_EQUAL(0.4, bayesNet.evaluate(values), 1e-9);
|
||||||
|
@ -107,9 +107,10 @@ TEST(HybridBayesNet, evaluateHybrid) {
|
||||||
// Create hybrid Bayes net.
|
// Create hybrid Bayes net.
|
||||||
HybridBayesNet bayesNet;
|
HybridBayesNet bayesNet;
|
||||||
bayesNet.push_back(continuousConditional);
|
bayesNet.push_back(continuousConditional);
|
||||||
bayesNet.emplace_back(
|
bayesNet.emplace_shared<GaussianMixture>(
|
||||||
new GaussianMixture({X(1)}, {}, {Asia}, {conditional0, conditional1}));
|
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
|
||||||
bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
|
std::vector{conditional0, conditional1});
|
||||||
|
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
|
||||||
|
|
||||||
// Create values at which to evaluate.
|
// Create values at which to evaluate.
|
||||||
HybridValues values;
|
HybridValues values;
|
||||||
|
@ -167,13 +168,14 @@ TEST(HybridBayesNet, Error) {
|
||||||
conditional1 = std::make_shared<GaussianConditional>(
|
conditional1 = std::make_shared<GaussianConditional>(
|
||||||
X(1), Vector1::Constant(2), I_1x1, model1);
|
X(1), Vector1::Constant(2), I_1x1, model1);
|
||||||
|
|
||||||
auto gm =
|
auto gm = std::make_shared<GaussianMixture>(
|
||||||
new GaussianMixture({X(1)}, {}, {Asia}, {conditional0, conditional1});
|
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
|
||||||
|
std::vector{conditional0, conditional1});
|
||||||
// Create hybrid Bayes net.
|
// Create hybrid Bayes net.
|
||||||
HybridBayesNet bayesNet;
|
HybridBayesNet bayesNet;
|
||||||
bayesNet.push_back(continuousConditional);
|
bayesNet.push_back(continuousConditional);
|
||||||
bayesNet.emplace_back(gm);
|
bayesNet.push_back(gm);
|
||||||
bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1"));
|
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
|
||||||
|
|
||||||
// Create values at which to evaluate.
|
// Create values at which to evaluate.
|
||||||
HybridValues values;
|
HybridValues values;
|
||||||
|
|
|
@ -616,12 +616,12 @@ TEST(HybridEstimation, ModeSelection) {
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1));
|
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1));
|
||||||
bn.push_back(
|
bn.push_back(
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1));
|
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1));
|
||||||
bn.emplace_back(new GaussianMixture(
|
bn.emplace_shared<GaussianMixture>(
|
||||||
{Z(0)}, {X(0), X(1)}, {mode},
|
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
|
||||||
{GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1),
|
std::vector{GaussianConditional::sharedMeanAndStddev(
|
||||||
Z_1x1, noise_loose),
|
Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose),
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1),
|
GaussianConditional::sharedMeanAndStddev(
|
||||||
Z_1x1, noise_tight)}));
|
Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_tight)});
|
||||||
|
|
||||||
VectorValues vv;
|
VectorValues vv;
|
||||||
vv.insert(Z(0), Z_1x1);
|
vv.insert(Z(0), Z_1x1);
|
||||||
|
@ -647,12 +647,12 @@ TEST(HybridEstimation, ModeSelection2) {
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1));
|
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1));
|
||||||
bn.push_back(
|
bn.push_back(
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1));
|
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1));
|
||||||
bn.emplace_back(new GaussianMixture(
|
bn.emplace_shared<GaussianMixture>(
|
||||||
{Z(0)}, {X(0), X(1)}, {mode},
|
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
|
||||||
{GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1),
|
std::vector{GaussianConditional::sharedMeanAndStddev(
|
||||||
Z_3x1, noise_loose),
|
Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose),
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1),
|
GaussianConditional::sharedMeanAndStddev(
|
||||||
Z_3x1, noise_tight)}));
|
Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_tight)});
|
||||||
|
|
||||||
VectorValues vv;
|
VectorValues vv;
|
||||||
vv.insert(Z(0), Z_3x1);
|
vv.insert(Z(0), Z_3x1);
|
||||||
|
|
|
@ -651,7 +651,8 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ****************************************************************************/
|
/* ****************************************************************************/
|
||||||
// Test hybrid gaussian factor graph errorTree when there is a HybridConditional in the graph
|
// Test hybrid gaussian factor graph errorTree when
|
||||||
|
// there is a HybridConditional in the graph
|
||||||
TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) {
|
TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) {
|
||||||
using symbol_shorthand::F;
|
using symbol_shorthand::F;
|
||||||
|
|
||||||
|
@ -665,12 +666,11 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) {
|
||||||
auto measurement_model = noiseModel::Isotropic::Sigma(1, 2.0);
|
auto measurement_model = noiseModel::Isotropic::Sigma(1, 2.0);
|
||||||
|
|
||||||
// Set a prior P(x0) at x0=0
|
// Set a prior P(x0) at x0=0
|
||||||
hbn.emplace_back(
|
hbn.emplace_shared<GaussianConditional>(x0, Vector1(0.0), I_1x1, prior_model);
|
||||||
new GaussianConditional(x0, Vector1(0.0), I_1x1, prior_model));
|
|
||||||
|
|
||||||
// Add measurement P(z0 | x0)
|
// Add measurement P(z0 | x0)
|
||||||
hbn.emplace_back(new GaussianConditional(z0, Vector1(0.0), -I_1x1, x0, I_1x1,
|
hbn.emplace_shared<GaussianConditional>(z0, Vector1(0.0), -I_1x1, x0, I_1x1,
|
||||||
measurement_model));
|
measurement_model);
|
||||||
|
|
||||||
// Add hybrid motion model
|
// Add hybrid motion model
|
||||||
double mu = 0.0;
|
double mu = 0.0;
|
||||||
|
@ -681,10 +681,11 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) {
|
||||||
x0, -I_1x1, model0),
|
x0, -I_1x1, model0),
|
||||||
c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
|
c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
|
||||||
x0, -I_1x1, model1);
|
x0, -I_1x1, model1);
|
||||||
hbn.emplace_back(new GaussianMixture({f01}, {x0, x1}, {m1}, {c0, c1}));
|
hbn.emplace_shared<GaussianMixture>(KeyVector{f01}, KeyVector{x0, x1},
|
||||||
|
DiscreteKeys{m1}, std::vector{c0, c1});
|
||||||
|
|
||||||
// Discrete uniform prior.
|
// Discrete uniform prior.
|
||||||
hbn.emplace_back(new DiscreteConditional(m1, "0.5/0.5"));
|
hbn.emplace_shared<DiscreteConditional>(m1, "0.5/0.5");
|
||||||
|
|
||||||
VectorValues given;
|
VectorValues given;
|
||||||
given.insert(z0, Vector1(0.0));
|
given.insert(z0, Vector1(0.0));
|
||||||
|
@ -804,11 +805,12 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) {
|
||||||
X(0), Vector1(14.1421), I_1x1 * 2.82843),
|
X(0), Vector1(14.1421), I_1x1 * 2.82843),
|
||||||
conditional1 = std::make_shared<GaussianConditional>(
|
conditional1 = std::make_shared<GaussianConditional>(
|
||||||
X(0), Vector1(10.1379), I_1x1 * 2.02759);
|
X(0), Vector1(10.1379), I_1x1 * 2.02759);
|
||||||
expectedBayesNet.emplace_back(
|
expectedBayesNet.emplace_shared<GaussianMixture>(
|
||||||
new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1}));
|
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
||||||
|
std::vector{conditional0, conditional1});
|
||||||
|
|
||||||
// Add prior on mode.
|
// Add prior on mode.
|
||||||
expectedBayesNet.emplace_back(new DiscreteConditional(mode, "74/26"));
|
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "74/26");
|
||||||
|
|
||||||
// Test elimination
|
// Test elimination
|
||||||
const auto posterior = fg.eliminateSequential();
|
const auto posterior = fg.eliminateSequential();
|
||||||
|
@ -828,18 +830,20 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
|
||||||
HybridBayesNet bn;
|
HybridBayesNet bn;
|
||||||
|
|
||||||
// Create Gaussian mixture z_0 = x0 + noise for each measurement.
|
// Create Gaussian mixture z_0 = x0 + noise for each measurement.
|
||||||
bn.emplace_back(new GaussianMixture(
|
auto gm = std::make_shared<GaussianMixture>(
|
||||||
{Z(0)}, {X(0)}, {mode},
|
KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode},
|
||||||
{GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
|
std::vector{
|
||||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1,
|
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
|
||||||
0.5)}));
|
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1,
|
||||||
|
0.5)});
|
||||||
|
bn.push_back(gm);
|
||||||
|
|
||||||
// Create prior on X(0).
|
// Create prior on X(0).
|
||||||
bn.push_back(
|
bn.push_back(
|
||||||
GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5));
|
GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5));
|
||||||
|
|
||||||
// Add prior on mode.
|
// Add prior on mode.
|
||||||
bn.emplace_back(new DiscreteConditional(mode, "1/1"));
|
bn.emplace_shared<DiscreteConditional>(mode, "1/1");
|
||||||
|
|
||||||
// bn.print();
|
// bn.print();
|
||||||
auto fg = bn.toFactorGraph(measurements);
|
auto fg = bn.toFactorGraph(measurements);
|
||||||
|
@ -858,11 +862,12 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
|
||||||
X(0), Vector1(10.1379), I_1x1 * 2.02759),
|
X(0), Vector1(10.1379), I_1x1 * 2.02759),
|
||||||
conditional1 = std::make_shared<GaussianConditional>(
|
conditional1 = std::make_shared<GaussianConditional>(
|
||||||
X(0), Vector1(14.1421), I_1x1 * 2.82843);
|
X(0), Vector1(14.1421), I_1x1 * 2.82843);
|
||||||
expectedBayesNet.emplace_back(
|
expectedBayesNet.emplace_shared<GaussianMixture>(
|
||||||
new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1}));
|
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
||||||
|
std::vector{conditional0, conditional1});
|
||||||
|
|
||||||
// Add prior on mode.
|
// Add prior on mode.
|
||||||
expectedBayesNet.emplace_back(new DiscreteConditional(mode, "1/1"));
|
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "1/1");
|
||||||
|
|
||||||
// Test elimination
|
// Test elimination
|
||||||
const auto posterior = fg.eliminateSequential();
|
const auto posterior = fg.eliminateSequential();
|
||||||
|
@ -894,11 +899,12 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) {
|
||||||
X(0), Vector1(17.3205), I_1x1 * 3.4641),
|
X(0), Vector1(17.3205), I_1x1 * 3.4641),
|
||||||
conditional1 = std::make_shared<GaussianConditional>(
|
conditional1 = std::make_shared<GaussianConditional>(
|
||||||
X(0), Vector1(10.274), I_1x1 * 2.0548);
|
X(0), Vector1(10.274), I_1x1 * 2.0548);
|
||||||
expectedBayesNet.emplace_back(
|
expectedBayesNet.emplace_shared<GaussianMixture>(
|
||||||
new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1}));
|
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
||||||
|
std::vector{conditional0, conditional1});
|
||||||
|
|
||||||
// Add prior on mode.
|
// Add prior on mode.
|
||||||
expectedBayesNet.emplace_back(new DiscreteConditional(mode, "23/77"));
|
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "23/77");
|
||||||
|
|
||||||
// Test elimination
|
// Test elimination
|
||||||
const auto posterior = fg.eliminateSequential();
|
const auto posterior = fg.eliminateSequential();
|
||||||
|
@ -940,30 +946,31 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
|
||||||
for (size_t t : {0, 1, 2}) {
|
for (size_t t : {0, 1, 2}) {
|
||||||
// Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t):
|
// Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t):
|
||||||
const auto noise_mode_t = DiscreteKey{N(t), 2};
|
const auto noise_mode_t = DiscreteKey{N(t), 2};
|
||||||
bn.emplace_back(
|
bn.emplace_shared<GaussianMixture>(
|
||||||
new GaussianMixture({Z(t)}, {X(t)}, {noise_mode_t},
|
KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t},
|
||||||
{GaussianConditional::sharedMeanAndStddev(
|
std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t),
|
||||||
Z(t), I_1x1, X(t), Z_1x1, 0.5),
|
Z_1x1, 0.5),
|
||||||
GaussianConditional::sharedMeanAndStddev(
|
GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t),
|
||||||
Z(t), I_1x1, X(t), Z_1x1, 3.0)}));
|
Z_1x1, 3.0)});
|
||||||
|
|
||||||
// Create prior on discrete mode N(t):
|
// Create prior on discrete mode N(t):
|
||||||
bn.emplace_back(new DiscreteConditional(noise_mode_t, "20/80"));
|
bn.emplace_shared<DiscreteConditional>(noise_mode_t, "20/80");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add motion models:
|
// Add motion models:
|
||||||
for (size_t t : {2, 1}) {
|
for (size_t t : {2, 1}) {
|
||||||
// Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1):
|
// Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1):
|
||||||
const auto motion_model_t = DiscreteKey{M(t), 2};
|
const auto motion_model_t = DiscreteKey{M(t), 2};
|
||||||
bn.emplace_back(
|
auto gm = std::make_shared<GaussianMixture>(
|
||||||
new GaussianMixture({X(t)}, {X(t - 1)}, {motion_model_t},
|
KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t},
|
||||||
{GaussianConditional::sharedMeanAndStddev(
|
std::vector{GaussianConditional::sharedMeanAndStddev(
|
||||||
X(t), I_1x1, X(t - 1), Z_1x1, 0.2),
|
X(t), I_1x1, X(t - 1), Z_1x1, 0.2),
|
||||||
GaussianConditional::sharedMeanAndStddev(
|
GaussianConditional::sharedMeanAndStddev(
|
||||||
X(t), I_1x1, X(t - 1), I_1x1, 0.2)}));
|
X(t), I_1x1, X(t - 1), I_1x1, 0.2)});
|
||||||
|
bn.push_back(gm);
|
||||||
|
|
||||||
// Create prior on motion model M(t):
|
// Create prior on motion model M(t):
|
||||||
bn.emplace_back(new DiscreteConditional(motion_model_t, "40/60"));
|
bn.emplace_shared<DiscreteConditional>(motion_model_t, "40/60");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create Gaussian prior on continuous X(0) using sharedMeanAndStddev:
|
// Create Gaussian prior on continuous X(0) using sharedMeanAndStddev:
|
||||||
|
|
Loading…
Reference in New Issue