All tests still work with zero constant!
parent
34a9aef6f3
commit
1dcc6ddde9
|
|
@ -204,8 +204,7 @@ boost::shared_ptr<GaussianMixtureFactor> GaussianMixture::likelihood(
|
||||||
const GaussianMixtureFactor::Factors likelihoods(
|
const GaussianMixtureFactor::Factors likelihoods(
|
||||||
conditionals_, [&](const GaussianConditional::shared_ptr &conditional) {
|
conditionals_, [&](const GaussianConditional::shared_ptr &conditional) {
|
||||||
return GaussianMixtureFactor::FactorAndConstant{
|
return GaussianMixtureFactor::FactorAndConstant{
|
||||||
conditional->likelihood(given),
|
conditional->likelihood(given), 0.0};
|
||||||
conditional->logNormalizationConstant()};
|
|
||||||
});
|
});
|
||||||
return boost::make_shared<GaussianMixtureFactor>(
|
return boost::make_shared<GaussianMixtureFactor>(
|
||||||
continuousParentKeys, discreteParentKeys, likelihoods);
|
continuousParentKeys, discreteParentKeys, likelihoods);
|
||||||
|
|
|
||||||
|
|
@ -341,11 +341,13 @@ HybridGaussianFactorGraph HybridBayesNet::toFactorGraph(
|
||||||
// replace it by a likelihood factor:
|
// replace it by a likelihood factor:
|
||||||
for (auto &&conditional : *this) {
|
for (auto &&conditional : *this) {
|
||||||
if (conditional->frontalsIn(measurements)) {
|
if (conditional->frontalsIn(measurements)) {
|
||||||
if (auto gc = conditional->asGaussian())
|
if (auto gc = conditional->asGaussian()) {
|
||||||
fg.push_back(gc->likelihood(measurements));
|
fg.push_back(gc->likelihood(measurements));
|
||||||
else if (auto gm = conditional->asMixture())
|
} else if (auto gm = conditional->asMixture()) {
|
||||||
fg.push_back(gm->likelihood(measurements));
|
fg.push_back(gm->likelihood(measurements));
|
||||||
else {
|
const auto constantsFactor = gm->normalizationConstants();
|
||||||
|
if (constantsFactor) fg.push_back(constantsFactor);
|
||||||
|
} else {
|
||||||
throw std::runtime_error("Unknown conditional type");
|
throw std::runtime_error("Unknown conditional type");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
||||||
|
|
@ -198,8 +198,7 @@ TEST(GaussianMixture, Likelihood) {
|
||||||
gm.conditionals(),
|
gm.conditionals(),
|
||||||
[measurements](const GaussianConditional::shared_ptr& conditional) {
|
[measurements](const GaussianConditional::shared_ptr& conditional) {
|
||||||
return GaussianMixtureFactor::FactorAndConstant{
|
return GaussianMixtureFactor::FactorAndConstant{
|
||||||
conditional->likelihood(measurements),
|
conditional->likelihood(measurements), 0.0};
|
||||||
conditional->logNormalizationConstant()};
|
|
||||||
});
|
});
|
||||||
const GaussianMixtureFactor expected({X(0)}, {mode}, factors);
|
const GaussianMixtureFactor expected({X(0)}, {mode}, factors);
|
||||||
EXPECT(assert_equal(expected, *factor));
|
EXPECT(assert_equal(expected, *factor));
|
||||||
|
|
|
||||||
|
|
@ -613,7 +613,7 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) {
|
||||||
const int num_measurements = 1;
|
const int num_measurements = 1;
|
||||||
auto fg = tiny::createHybridGaussianFactorGraph(
|
auto fg = tiny::createHybridGaussianFactorGraph(
|
||||||
num_measurements, VectorValues{{Z(0), Vector1(5.0)}});
|
num_measurements, VectorValues{{Z(0), Vector1(5.0)}});
|
||||||
EXPECT_LONGS_EQUAL(3, fg.size());
|
EXPECT_LONGS_EQUAL(4, fg.size());
|
||||||
|
|
||||||
// Assemble graph tree:
|
// Assemble graph tree:
|
||||||
auto actual = fg.assembleGraphTree();
|
auto actual = fg.assembleGraphTree();
|
||||||
|
|
@ -625,7 +625,7 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) {
|
||||||
CHECK(mixture);
|
CHECK(mixture);
|
||||||
|
|
||||||
// Get prior factor:
|
// Get prior factor:
|
||||||
const auto gf = boost::dynamic_pointer_cast<HybridConditional>(fg.at(1));
|
const auto gf = boost::dynamic_pointer_cast<HybridConditional>(fg.at(2));
|
||||||
CHECK(gf);
|
CHECK(gf);
|
||||||
using GF = GaussianFactor::shared_ptr;
|
using GF = GaussianFactor::shared_ptr;
|
||||||
const GF prior = gf->asGaussian();
|
const GF prior = gf->asGaussian();
|
||||||
|
|
@ -654,7 +654,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) {
|
||||||
const int num_measurements = 1;
|
const int num_measurements = 1;
|
||||||
auto fg = tiny::createHybridGaussianFactorGraph(
|
auto fg = tiny::createHybridGaussianFactorGraph(
|
||||||
num_measurements, VectorValues{{Z(0), Vector1(5.0)}});
|
num_measurements, VectorValues{{Z(0), Vector1(5.0)}});
|
||||||
EXPECT_LONGS_EQUAL(3, fg.size());
|
EXPECT_LONGS_EQUAL(4, fg.size());
|
||||||
|
|
||||||
// Create expected Bayes Net:
|
// Create expected Bayes Net:
|
||||||
HybridBayesNet expectedBayesNet;
|
HybridBayesNet expectedBayesNet;
|
||||||
|
|
@ -686,7 +686,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) {
|
||||||
auto fg = tiny::createHybridGaussianFactorGraph(
|
auto fg = tiny::createHybridGaussianFactorGraph(
|
||||||
num_measurements,
|
num_measurements,
|
||||||
VectorValues{{Z(0), Vector1(4.0)}, {Z(1), Vector1(6.0)}});
|
VectorValues{{Z(0), Vector1(4.0)}, {Z(1), Vector1(6.0)}});
|
||||||
EXPECT_LONGS_EQUAL(4, fg.size());
|
EXPECT_LONGS_EQUAL(6, fg.size());
|
||||||
|
|
||||||
// Create expected Bayes Net:
|
// Create expected Bayes Net:
|
||||||
HybridBayesNet expectedBayesNet;
|
HybridBayesNet expectedBayesNet;
|
||||||
|
|
@ -721,7 +721,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny22) {
|
||||||
auto bn = tiny::createHybridBayesNet(num_measurements, manyModes);
|
auto bn = tiny::createHybridBayesNet(num_measurements, manyModes);
|
||||||
const VectorValues measurements{{Z(0), Vector1(4.0)}, {Z(1), Vector1(6.0)}};
|
const VectorValues measurements{{Z(0), Vector1(4.0)}, {Z(1), Vector1(6.0)}};
|
||||||
auto fg = bn.toFactorGraph(measurements);
|
auto fg = bn.toFactorGraph(measurements);
|
||||||
EXPECT_LONGS_EQUAL(5, fg.size());
|
EXPECT_LONGS_EQUAL(7, fg.size());
|
||||||
|
|
||||||
// Test elimination
|
// Test elimination
|
||||||
const auto posterior = fg.eliminateSequential();
|
const auto posterior = fg.eliminateSequential();
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue