Test SumFrontals
parent
fa76d53f16
commit
039c9b91c9
|
@ -28,6 +28,9 @@ using symbol_shorthand::M;
|
|||
using symbol_shorthand::X;
|
||||
using symbol_shorthand::Z;
|
||||
|
||||
// Create mode key: 0 is low-noise, 1 is high-noise.
|
||||
const DiscreteKey mode{M(0), 2};
|
||||
|
||||
/**
|
||||
* Create a tiny two variable hybrid model which represents
|
||||
* the generative probability P(z, x, n) = P(z | x, n)P(x)P(n).
|
||||
|
@ -36,9 +39,6 @@ static HybridBayesNet createHybridBayesNet(int num_measurements = 1) {
|
|||
// Create hybrid Bayes net.
|
||||
HybridBayesNet bayesNet;
|
||||
|
||||
// Create mode key: 0 is low-noise, 1 is high-noise.
|
||||
const DiscreteKey mode{M(0), 2};
|
||||
|
||||
// Create Gaussian mixture Z(0) = X(0) + noise for each measurement.
|
||||
for (int i = 0; i < num_measurements; i++) {
|
||||
const auto conditional0 = boost::make_shared<GaussianConditional>(
|
||||
|
|
|
@ -615,18 +615,32 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) {
|
|||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test creation of a tiny hybrid Bayes net.
|
||||
TEST(HybridBayesNet, Tiny) {
|
||||
// SumFrontals just assembles Gaussian factor graphs for each assignment.
|
||||
TEST(HybridGaussianFactorGraph, SumFrontals) {
|
||||
auto fg = tiny::createHybridGaussianFactorGraph();
|
||||
EXPECT_LONGS_EQUAL(3, fg.size());
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// // Test summing frontals
|
||||
// TEST(HybridGaussianFactorGraph, SumFrontals) {
|
||||
// HybridGaussianFactorGraph fg;
|
||||
// fg.
|
||||
// }
|
||||
auto sum = fg.SumFrontals();
|
||||
|
||||
// Get mixture factor:
|
||||
auto mixture = boost::dynamic_pointer_cast<GaussianMixtureFactor>(fg.at(0));
|
||||
using GF = GaussianFactor::shared_ptr;
|
||||
|
||||
// Get prior factor:
|
||||
const GF prior =
|
||||
boost::dynamic_pointer_cast<HybridGaussianFactor>(fg.at(1))->inner();
|
||||
|
||||
// Create DiscreteValues for both 0 and 1:
|
||||
DiscreteValues d0{{M(0), 0}}, d1{{M(0), 1}};
|
||||
|
||||
// Expected decision tree with two factor graphs:
|
||||
// f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0)
|
||||
GaussianMixture::Sum expected{
|
||||
M(0), GaussianFactorGraph(std::vector<GF>{mixture->factor(d0), prior}),
|
||||
GaussianFactorGraph(std::vector<GF>{mixture->factor(d1), prior})};
|
||||
|
||||
EXPECT(assert_equal(expected(d0), sum(d0)));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() {
|
||||
|
|
Loading…
Reference in New Issue