Example with 2 measurements agrees with importance sampling
parent
797ac344fa
commit
625977ee06
|
@ -1,6 +1,6 @@
|
||||||
/* ----------------------------------------------------------------------------
|
/* ----------------------------------------------------------------------------
|
||||||
|
|
||||||
* GTSAM Copyright 2010-2022, Georgia Tech Research Corporation,
|
* GTSAM Copyright 2010-2023, Georgia Tech Research Corporation,
|
||||||
* Atlanta, Georgia 30332-0415
|
* Atlanta, Georgia 30332-0415
|
||||||
* All Rights Reserved
|
* All Rights Reserved
|
||||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||||
|
@ -10,10 +10,9 @@
|
||||||
* -------------------------------------------------------------------------- */
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* @file TinyHybrdiExample.h
|
* @file TinyHybridExample.h
|
||||||
* @date Mar 11, 2022
|
* @date December, 2022
|
||||||
* @author Varun Agrawal
|
* @author Frank Dellaert
|
||||||
* @author Fan Jiang
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <gtsam/hybrid/HybridBayesNet.h>
|
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||||
|
@ -33,10 +32,9 @@ const DiscreteKey mode{M(0), 2};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a tiny two variable hybrid model which represents
|
* Create a tiny two variable hybrid model which represents
|
||||||
* the generative probability P(z, x, n) = P(z | x, n)P(x)P(n).
|
* the generative probability P(z,x,mode) = P(z|x,mode)P(x)P(mode).
|
||||||
*/
|
*/
|
||||||
HybridBayesNet createHybridBayesNet(int num_measurements = 1) {
|
HybridBayesNet createHybridBayesNet(int num_measurements = 1) {
|
||||||
// Create hybrid Bayes net.
|
|
||||||
HybridBayesNet bayesNet;
|
HybridBayesNet bayesNet;
|
||||||
|
|
||||||
// Create Gaussian mixture z_i = x0 + noise for each measurement.
|
// Create Gaussian mixture z_i = x0 + noise for each measurement.
|
||||||
|
@ -60,6 +58,9 @@ HybridBayesNet createHybridBayesNet(int num_measurements = 1) {
|
||||||
return bayesNet;
|
return bayesNet;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert a hybrid Bayes net to a hybrid Gaussian factor graph.
|
||||||
|
*/
|
||||||
HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet,
|
HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet,
|
||||||
const VectorValues& measurements) {
|
const VectorValues& measurements) {
|
||||||
HybridGaussianFactorGraph fg;
|
HybridGaussianFactorGraph fg;
|
||||||
|
@ -74,18 +75,19 @@ HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet,
|
||||||
return fg;
|
return fg;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a tiny two variable hybrid factor graph which represents a discrete
|
||||||
|
* mode and a continuous variable x0, given a number of measurements of the
|
||||||
|
* continuous variable x0. If no measurements are given, they are sampled from
|
||||||
|
* the generative Bayes net model HybridBayesNet::Example(num_measurements)
|
||||||
|
*/
|
||||||
HybridGaussianFactorGraph createHybridGaussianFactorGraph(
|
HybridGaussianFactorGraph createHybridGaussianFactorGraph(
|
||||||
int num_measurements = 1, bool deterministic = false) {
|
int num_measurements = 1,
|
||||||
|
boost::optional<VectorValues> measurements = boost::none) {
|
||||||
auto bayesNet = createHybridBayesNet(num_measurements);
|
auto bayesNet = createHybridBayesNet(num_measurements);
|
||||||
if (deterministic) {
|
if (measurements) {
|
||||||
// Create a deterministic set of measurements:
|
return convertBayesNet(bayesNet, *measurements);
|
||||||
VectorValues measurements;
|
|
||||||
for (int i = 0; i < num_measurements; i++) {
|
|
||||||
measurements.insert(Z(i), Vector1(5.0 + 0.1 * i));
|
|
||||||
}
|
|
||||||
return convertBayesNet(bayesNet, measurements);
|
|
||||||
} else {
|
} else {
|
||||||
// Create a random set of measurements:
|
|
||||||
return convertBayesNet(bayesNet, bayesNet.sample().continuous());
|
return convertBayesNet(bayesNet, bayesNet.sample().continuous());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -618,10 +618,10 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) {
|
||||||
// Check that assembleGraphTree assembles Gaussian factor graphs for each
|
// Check that assembleGraphTree assembles Gaussian factor graphs for each
|
||||||
// assignment.
|
// assignment.
|
||||||
TEST(HybridGaussianFactorGraph, assembleGraphTree) {
|
TEST(HybridGaussianFactorGraph, assembleGraphTree) {
|
||||||
|
using symbol_shorthand::Z;
|
||||||
const int num_measurements = 1;
|
const int num_measurements = 1;
|
||||||
const bool deterministic = true;
|
auto fg = tiny::createHybridGaussianFactorGraph(
|
||||||
auto fg =
|
num_measurements, VectorValues{{Z(0), Vector1(5.0)}});
|
||||||
tiny::createHybridGaussianFactorGraph(num_measurements, deterministic);
|
|
||||||
EXPECT_LONGS_EQUAL(3, fg.size());
|
EXPECT_LONGS_EQUAL(3, fg.size());
|
||||||
|
|
||||||
auto sum = fg.assembleGraphTree();
|
auto sum = fg.assembleGraphTree();
|
||||||
|
@ -653,10 +653,10 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) {
|
||||||
/* ****************************************************************************/
|
/* ****************************************************************************/
|
||||||
// Check that eliminating tiny net with 1 measurement yields correct result.
|
// Check that eliminating tiny net with 1 measurement yields correct result.
|
||||||
TEST(HybridGaussianFactorGraph, EliminateTiny1) {
|
TEST(HybridGaussianFactorGraph, EliminateTiny1) {
|
||||||
|
using symbol_shorthand::Z;
|
||||||
const int num_measurements = 1;
|
const int num_measurements = 1;
|
||||||
const bool deterministic = true;
|
auto fg = tiny::createHybridGaussianFactorGraph(
|
||||||
auto fg =
|
num_measurements, VectorValues{{Z(0), Vector1(5.0)}});
|
||||||
tiny::createHybridGaussianFactorGraph(num_measurements, deterministic);
|
|
||||||
|
|
||||||
// Create expected Bayes Net:
|
// Create expected Bayes Net:
|
||||||
HybridBayesNet expectedBayesNet;
|
HybridBayesNet expectedBayesNet;
|
||||||
|
@ -679,39 +679,41 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) {
|
||||||
ordering.push_back(X(0));
|
ordering.push_back(X(0));
|
||||||
ordering.push_back(M(0));
|
ordering.push_back(M(0));
|
||||||
const auto posterior = fg.eliminateSequential(ordering);
|
const auto posterior = fg.eliminateSequential(ordering);
|
||||||
EXPECT(assert_equal(expectedBayesNet, *posterior, 1e-4));
|
EXPECT(assert_equal(expectedBayesNet, *posterior, 0.01));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ****************************************************************************/
|
/* ****************************************************************************/
|
||||||
// Check that eliminating tiny net with 2 measurements yields correct result.
|
// Check that eliminating tiny net with 2 measurements yields correct result.
|
||||||
TEST(HybridGaussianFactorGraph, EliminateTiny2) {
|
TEST(HybridGaussianFactorGraph, EliminateTiny2) {
|
||||||
|
// Create factor graph with 2 measurements such that posterior mean = 5.0.
|
||||||
|
using symbol_shorthand::Z;
|
||||||
const int num_measurements = 2;
|
const int num_measurements = 2;
|
||||||
const bool deterministic = true;
|
auto fg = tiny::createHybridGaussianFactorGraph(
|
||||||
auto fg =
|
num_measurements,
|
||||||
tiny::createHybridGaussianFactorGraph(num_measurements, deterministic);
|
VectorValues{{Z(0), Vector1(4.0)}, {Z(1), Vector1(6.0)}});
|
||||||
|
|
||||||
// Create expected Bayes Net:
|
// Create expected Bayes Net:
|
||||||
HybridBayesNet expectedBayesNet;
|
HybridBayesNet expectedBayesNet;
|
||||||
|
|
||||||
// Create Gaussian mixture on X(0).
|
// Create Gaussian mixture on X(0).
|
||||||
using tiny::mode;
|
using tiny::mode;
|
||||||
// regression, but mean checked to be > 5.0 in both cases:
|
// regression, but mean checked to be 5.0 in both cases:
|
||||||
const auto conditional0 = boost::make_shared<GaussianConditional>(
|
const auto conditional0 = boost::make_shared<GaussianConditional>(
|
||||||
X(0), Vector1(18.4752), I_1x1 * 3.4641),
|
X(0), Vector1(17.3205), I_1x1 * 3.4641),
|
||||||
conditional1 = boost::make_shared<GaussianConditional>(
|
conditional1 = boost::make_shared<GaussianConditional>(
|
||||||
X(0), Vector1(10.3281), I_1x1 * 2.0548);
|
X(0), Vector1(10.274), I_1x1 * 2.0548);
|
||||||
GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1});
|
GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1});
|
||||||
expectedBayesNet.emplaceMixture(gm); // copy :-(
|
expectedBayesNet.emplaceMixture(gm); // copy :-(
|
||||||
|
|
||||||
// Add prior on mode.
|
// Add prior on mode.
|
||||||
expectedBayesNet.emplaceDiscrete(mode, "4/6");
|
expectedBayesNet.emplaceDiscrete(mode, "23/77");
|
||||||
|
|
||||||
// Test elimination
|
// Test elimination
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
ordering.push_back(X(0));
|
ordering.push_back(X(0));
|
||||||
ordering.push_back(M(0));
|
ordering.push_back(M(0));
|
||||||
const auto posterior = fg.eliminateSequential(ordering);
|
const auto posterior = fg.eliminateSequential(ordering);
|
||||||
EXPECT(assert_equal(expectedBayesNet, *posterior, 1e-4));
|
EXPECT(assert_equal(expectedBayesNet, *posterior, 0.01));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
|
@ -18,9 +18,9 @@ from gtsam.utils.test_case import GtsamTestCase
|
||||||
|
|
||||||
import gtsam
|
import gtsam
|
||||||
from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional,
|
from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional,
|
||||||
GaussianMixture, GaussianMixtureFactor, HybridBayesNet, HybridValues,
|
GaussianMixture, GaussianMixtureFactor, HybridBayesNet,
|
||||||
HybridGaussianFactorGraph, JacobianFactor, Ordering,
|
HybridGaussianFactorGraph, HybridValues, JacobianFactor,
|
||||||
noiseModel)
|
Ordering, noiseModel)
|
||||||
|
|
||||||
|
|
||||||
class TestHybridGaussianFactorGraph(GtsamTestCase):
|
class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
|
@ -96,16 +96,16 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
mode = (M(0), 2)
|
mode = (M(0), 2)
|
||||||
|
|
||||||
# Create Gaussian mixture Z(0) = X(0) + noise for each measurement.
|
# Create Gaussian mixture Z(0) = X(0) + noise for each measurement.
|
||||||
I = np.eye(1)
|
I_1x1 = np.eye(1)
|
||||||
keys = DiscreteKeys()
|
keys = DiscreteKeys()
|
||||||
keys.push_back(mode)
|
keys.push_back(mode)
|
||||||
for i in range(num_measurements):
|
for i in range(num_measurements):
|
||||||
conditional0 = GaussianConditional.FromMeanAndStddev(Z(i),
|
conditional0 = GaussianConditional.FromMeanAndStddev(Z(i),
|
||||||
I,
|
I_1x1,
|
||||||
X(0), [0],
|
X(0), [0],
|
||||||
sigma=0.5)
|
sigma=0.5)
|
||||||
conditional1 = GaussianConditional.FromMeanAndStddev(Z(i),
|
conditional1 = GaussianConditional.FromMeanAndStddev(Z(i),
|
||||||
I,
|
I_1x1,
|
||||||
X(0), [0],
|
X(0), [0],
|
||||||
sigma=3)
|
sigma=3)
|
||||||
bayesNet.emplaceMixture([Z(i)], [X(0)], keys,
|
bayesNet.emplaceMixture([Z(i)], [X(0)], keys,
|
||||||
|
@ -135,24 +135,27 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
mean0.insert(Z(0), [5.0])
|
mean0.insert(Z(0), [5.0])
|
||||||
mean0.insert(M(0), 0)
|
mean0.insert(M(0), 0)
|
||||||
self.assertAlmostEqual(bayesNet1.evaluate(mean0) /
|
self.assertAlmostEqual(bayesNet1.evaluate(mean0) /
|
||||||
bayesNet2.evaluate(mean0), expected_ratio, delta=1e-9)
|
bayesNet2.evaluate(mean0), expected_ratio,
|
||||||
|
delta=1e-9)
|
||||||
mean1 = HybridValues()
|
mean1 = HybridValues()
|
||||||
mean1.insert(X(0), [5.0])
|
mean1.insert(X(0), [5.0])
|
||||||
mean1.insert(Z(0), [5.0])
|
mean1.insert(Z(0), [5.0])
|
||||||
mean1.insert(M(0), 1)
|
mean1.insert(M(0), 1)
|
||||||
self.assertAlmostEqual(bayesNet1.evaluate(mean1) /
|
self.assertAlmostEqual(bayesNet1.evaluate(mean1) /
|
||||||
bayesNet2.evaluate(mean1), expected_ratio, delta=1e-9)
|
bayesNet2.evaluate(mean1), expected_ratio,
|
||||||
|
delta=1e-9)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def measurements(sample: HybridValues, indices) -> gtsam.VectorValues:
|
def measurements(sample: HybridValues, indices) -> gtsam.VectorValues:
|
||||||
"""Create measurements from a sample, grabbing Z(i) where i in indices."""
|
"""Create measurements from a sample, grabbing Z(i) for indices."""
|
||||||
measurements = gtsam.VectorValues()
|
measurements = gtsam.VectorValues()
|
||||||
for i in indices:
|
for i in indices:
|
||||||
measurements.insert(Z(i), sample.at(Z(i)))
|
measurements.insert(Z(i), sample.at(Z(i)))
|
||||||
return measurements
|
return measurements
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def factor_graph_from_bayes_net(cls, bayesNet: HybridBayesNet, sample: HybridValues):
|
def factor_graph_from_bayes_net(cls, bayesNet: HybridBayesNet,
|
||||||
|
sample: HybridValues):
|
||||||
"""Create a factor graph from the Bayes net with sampled measurements.
|
"""Create a factor graph from the Bayes net with sampled measurements.
|
||||||
The factor graph is `P(x)P(n) ϕ(x, n; z0) ϕ(x, n; z1) ...`
|
The factor graph is `P(x)P(n) ϕ(x, n; z0) ϕ(x, n; z1) ...`
|
||||||
and thus represents the same joint probability as the Bayes net.
|
and thus represents the same joint probability as the Bayes net.
|
||||||
|
@ -170,7 +173,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
@classmethod
|
@classmethod
|
||||||
def estimate_marginals(cls, target, proposal_density: HybridBayesNet,
|
def estimate_marginals(cls, target, proposal_density: HybridBayesNet,
|
||||||
N=10000):
|
N=10000):
|
||||||
"""Do importance sampling to get an estimate of the discrete marginal P(mode)."""
|
"""Do importance sampling to estimate discrete marginal P(mode)."""
|
||||||
# Allocate space for marginals on mode.
|
# Allocate space for marginals on mode.
|
||||||
marginals = np.zeros((2,))
|
marginals = np.zeros((2,))
|
||||||
|
|
||||||
|
@ -190,11 +193,11 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
|
|
||||||
def test_tiny(self):
|
def test_tiny(self):
|
||||||
"""Test a tiny two variable hybrid model."""
|
"""Test a tiny two variable hybrid model."""
|
||||||
prior_sigma = 0.5
|
|
||||||
# P(x0)P(mode)P(z0|x0,mode)
|
# P(x0)P(mode)P(z0|x0,mode)
|
||||||
|
prior_sigma = 0.5
|
||||||
bayesNet = self.tiny(prior_sigma=prior_sigma)
|
bayesNet = self.tiny(prior_sigma=prior_sigma)
|
||||||
|
|
||||||
# Deterministic values exactly at the mean, for both x and z:
|
# Deterministic values exactly at the mean, for both x and Z:
|
||||||
values = HybridValues()
|
values = HybridValues()
|
||||||
values.insert(X(0), [5.0])
|
values.insert(X(0), [5.0])
|
||||||
values.insert(M(0), 0) # low-noise, standard deviation 0.5
|
values.insert(M(0), 0) # low-noise, standard deviation 0.5
|
||||||
|
@ -204,22 +207,20 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
def unnormalized_posterior(x):
|
def unnormalized_posterior(x):
|
||||||
"""Posterior is proportional to joint, centered at 5.0 as well."""
|
"""Posterior is proportional to joint, centered at 5.0 as well."""
|
||||||
x.insert(Z(0), [z0])
|
x.insert(Z(0), [z0])
|
||||||
# print(x)
|
|
||||||
return bayesNet.evaluate(x)
|
return bayesNet.evaluate(x)
|
||||||
|
|
||||||
# Create proposal density on (x0, mode), making sure it has same mean:
|
# Create proposal density on (x0, mode), making sure it has same mean:
|
||||||
posterior_information = 1/(prior_sigma**2) + 1/(0.5**2)
|
posterior_information = 1/(prior_sigma**2) + 1/(0.5**2)
|
||||||
posterior_sigma = posterior_information**(-0.5)
|
posterior_sigma = posterior_information**(-0.5)
|
||||||
print(f"Posterior sigma: {posterior_sigma}")
|
|
||||||
proposal_density = self.tiny(
|
proposal_density = self.tiny(
|
||||||
num_measurements=0, prior_mean=5.0, prior_sigma=posterior_sigma)
|
num_measurements=0, prior_mean=5.0, prior_sigma=posterior_sigma)
|
||||||
|
|
||||||
# Estimate marginals using importance sampling.
|
# Estimate marginals using importance sampling.
|
||||||
marginals = self.estimate_marginals(
|
marginals = self.estimate_marginals(
|
||||||
target=unnormalized_posterior, proposal_density=proposal_density, N=10_000)
|
target=unnormalized_posterior, proposal_density=proposal_density)
|
||||||
print(f"True mode: {values.atDiscrete(M(0))}")
|
# print(f"True mode: {values.atDiscrete(M(0))}")
|
||||||
print(f"P(mode=0; z0) = {marginals[0]}")
|
# print(f"P(mode=0; Z) = {marginals[0]}")
|
||||||
print(f"P(mode=1; z0) = {marginals[1]}")
|
# print(f"P(mode=1; Z) = {marginals[1]}")
|
||||||
|
|
||||||
# Check that the estimate is close to the true value.
|
# Check that the estimate is close to the true value.
|
||||||
self.assertAlmostEqual(marginals[0], 0.74, delta=0.01)
|
self.assertAlmostEqual(marginals[0], 0.74, delta=0.01)
|
||||||
|
@ -233,20 +234,18 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
ordering.push_back(X(0))
|
ordering.push_back(X(0))
|
||||||
ordering.push_back(M(0))
|
ordering.push_back(M(0))
|
||||||
posterior = fg.eliminateSequential(ordering)
|
posterior = fg.eliminateSequential(ordering)
|
||||||
print(posterior)
|
|
||||||
|
|
||||||
def true_posterior(x):
|
def true_posterior(x):
|
||||||
"""Posterior from elimination."""
|
"""Posterior from elimination."""
|
||||||
x.insert(Z(0), [z0])
|
x.insert(Z(0), [z0])
|
||||||
# print(x)
|
|
||||||
return posterior.evaluate(x)
|
return posterior.evaluate(x)
|
||||||
|
|
||||||
# Estimate marginals using importance sampling.
|
# Estimate marginals using importance sampling.
|
||||||
marginals = self.estimate_marginals(
|
marginals = self.estimate_marginals(
|
||||||
target=true_posterior, proposal_density=proposal_density)
|
target=true_posterior, proposal_density=proposal_density)
|
||||||
print(f"True mode: {values.atDiscrete(M(0))}")
|
# print(f"True mode: {values.atDiscrete(M(0))}")
|
||||||
print(f"P(mode=0; z0) = {marginals[0]}")
|
# print(f"P(mode=0; z0) = {marginals[0]}")
|
||||||
print(f"P(mode=1; z0) = {marginals[1]}")
|
# print(f"P(mode=1; z0) = {marginals[1]}")
|
||||||
|
|
||||||
# Check that the estimate is close to the true value.
|
# Check that the estimate is close to the true value.
|
||||||
self.assertAlmostEqual(marginals[0], 0.74, delta=0.01)
|
self.assertAlmostEqual(marginals[0], 0.74, delta=0.01)
|
||||||
|
@ -256,65 +255,65 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
def calculate_ratio(bayesNet: HybridBayesNet,
|
def calculate_ratio(bayesNet: HybridBayesNet,
|
||||||
fg: HybridGaussianFactorGraph,
|
fg: HybridGaussianFactorGraph,
|
||||||
sample: HybridValues):
|
sample: HybridValues):
|
||||||
"""Calculate ratio between Bayes net probability and the factor graph."""
|
"""Calculate ratio between Bayes net and factor graph."""
|
||||||
return bayesNet.evaluate(sample) / fg.probPrime(sample) if fg.probPrime(sample) > 0 else 0
|
return bayesNet.evaluate(sample) / fg.probPrime(sample) if \
|
||||||
|
fg.probPrime(sample) > 0 else 0
|
||||||
|
|
||||||
@unittest.skip("This test is too slow.")
|
|
||||||
def test_ratio(self):
|
def test_ratio(self):
|
||||||
"""
|
"""
|
||||||
Given a tiny two variable hybrid model, with 2 measurements,
|
Given a tiny two variable hybrid model, with 2 measurements, test the
|
||||||
test the ratio of the bayes net model representing P(z, x, n)=P(z|x, n)P(x)P(n)
|
ratio of the bayes net model representing P(z,x,n)=P(z|x, n)P(x)P(n)
|
||||||
and the factor graph P(x, n | z)=P(x | n, z)P(n|z),
|
and the factor graph P(x, n | z)=P(x | n, z)P(n|z),
|
||||||
both of which represent the same posterior.
|
both of which represent the same posterior.
|
||||||
"""
|
"""
|
||||||
# Create the Bayes net representing the generative model P(z, x, n)=P(z|x, n)P(x)P(n)
|
# Create generative model P(z, x, n)=P(z|x, n)P(x)P(n)
|
||||||
bayesNet = self.tiny(num_measurements=2)
|
prior_sigma = 0.5
|
||||||
# Sample from the Bayes net.
|
bayesNet = self.tiny(prior_sigma=prior_sigma, num_measurements=2)
|
||||||
sample: HybridValues = bayesNet.sample()
|
|
||||||
# print(sample)
|
|
||||||
|
|
||||||
# Deterministic measurements:
|
# Deterministic values exactly at the mean, for both x and Z:
|
||||||
deterministic = gtsam.VectorValues()
|
values = HybridValues()
|
||||||
deterministic.insert(Z(0), [5.0])
|
values.insert(X(0), [5.0])
|
||||||
deterministic.insert(Z(1), [5.1])
|
values.insert(M(0), 0) # high-noise, standard deviation 3
|
||||||
sample.update(deterministic)
|
measurements = gtsam.VectorValues()
|
||||||
|
measurements.insert(Z(0), [4.0])
|
||||||
|
measurements.insert(Z(1), [6.0])
|
||||||
|
values.insert(measurements)
|
||||||
|
|
||||||
|
def unnormalized_posterior(x):
|
||||||
|
"""Posterior is proportional to joint, centered at 5.0 as well."""
|
||||||
|
x.insert(measurements)
|
||||||
|
return bayesNet.evaluate(x)
|
||||||
|
|
||||||
|
# Create proposal density on (x0, mode), making sure it has same mean:
|
||||||
|
posterior_information = 1/(prior_sigma**2) + 2.0/(3.0**2)
|
||||||
|
posterior_sigma = posterior_information**(-0.5)
|
||||||
|
proposal_density = self.tiny(
|
||||||
|
num_measurements=0, prior_mean=5.0, prior_sigma=posterior_sigma)
|
||||||
|
|
||||||
# Estimate marginals using importance sampling.
|
# Estimate marginals using importance sampling.
|
||||||
marginals = self.estimate_marginals(bayesNet, sample, N=10000)
|
marginals = self.estimate_marginals(
|
||||||
print(f"True mode: {sample.atDiscrete(M(0))}")
|
target=unnormalized_posterior, proposal_density=proposal_density)
|
||||||
print(f"P(mode=0; z0, z1) = {marginals[0]}")
|
# print(f"True mode: {values.atDiscrete(M(0))}")
|
||||||
print(f"P(mode=1; z0, z1) = {marginals[1]}")
|
# print(f"P(mode=0; Z) = {marginals[0]}")
|
||||||
|
# print(f"P(mode=1; Z) = {marginals[1]}")
|
||||||
|
|
||||||
# Check marginals based on sampled mode.
|
# Check that the estimate is close to the true value.
|
||||||
if sample.atDiscrete(M(0)) == 0:
|
self.assertAlmostEqual(marginals[0], 0.23, delta=0.01)
|
||||||
self.assertGreater(marginals[0], marginals[1])
|
self.assertAlmostEqual(marginals[1], 0.77, delta=0.01)
|
||||||
else:
|
|
||||||
self.assertGreater(marginals[1], marginals[0])
|
|
||||||
|
|
||||||
fg = self.factor_graph_from_bayes_net(bayesNet, sample)
|
# Convert to factor graph using measurements.
|
||||||
|
fg = self.factor_graph_from_bayes_net(bayesNet, values)
|
||||||
self.assertEqual(fg.size(), 4)
|
self.assertEqual(fg.size(), 4)
|
||||||
|
|
||||||
# Create measurements from the sample.
|
|
||||||
measurements = self.measurements(sample, [0, 1])
|
|
||||||
print(measurements)
|
|
||||||
|
|
||||||
# Calculate ratio between Bayes net probability and the factor graph:
|
# Calculate ratio between Bayes net probability and the factor graph:
|
||||||
expected_ratio = self.calculate_ratio(bayesNet, fg, sample)
|
expected_ratio = self.calculate_ratio(bayesNet, fg, values)
|
||||||
# print(f"expected_ratio: {expected_ratio}\n")
|
# print(f"expected_ratio: {expected_ratio}\n")
|
||||||
|
|
||||||
# Print conditional and factor values side by side:
|
|
||||||
print("\nConditional and factor values:")
|
|
||||||
for i in range(4):
|
|
||||||
print(f"Conditional {i}:")
|
|
||||||
print(bayesNet.at(i).error(sample))
|
|
||||||
print(f"Factor {i}:")
|
|
||||||
print(fg.at(i).error(sample))
|
|
||||||
|
|
||||||
# Check with a number of other samples.
|
# Check with a number of other samples.
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
other = bayesNet.sample()
|
samples = bayesNet.sample()
|
||||||
other.update(measurements)
|
samples.update(measurements)
|
||||||
ratio = self.calculate_ratio(bayesNet, fg, other)
|
ratio = self.calculate_ratio(bayesNet, fg, samples)
|
||||||
# print(f"Ratio: {ratio}\n")
|
# print(f"Ratio: {ratio}\n")
|
||||||
if (ratio > 0):
|
if (ratio > 0):
|
||||||
self.assertAlmostEqual(ratio, expected_ratio)
|
self.assertAlmostEqual(ratio, expected_ratio)
|
||||||
|
@ -324,18 +323,17 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||||
ordering.push_back(X(0))
|
ordering.push_back(X(0))
|
||||||
ordering.push_back(M(0))
|
ordering.push_back(M(0))
|
||||||
posterior = fg.eliminateSequential(ordering)
|
posterior = fg.eliminateSequential(ordering)
|
||||||
print(posterior)
|
|
||||||
|
|
||||||
# Calculate ratio between Bayes net probability and the factor graph:
|
# Calculate ratio between Bayes net probability and the factor graph:
|
||||||
expected_ratio = self.calculate_ratio(posterior, fg, sample)
|
expected_ratio = self.calculate_ratio(posterior, fg, values)
|
||||||
print(f"expected_ratio: {expected_ratio}\n")
|
# print(f"expected_ratio: {expected_ratio}\n")
|
||||||
|
|
||||||
# Check with a number of other samples.
|
# Check with a number of other samples.
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
other = posterior.sample()
|
samples = posterior.sample()
|
||||||
other.insert(measurements)
|
samples.insert(measurements)
|
||||||
ratio = self.calculate_ratio(posterior, fg, other)
|
ratio = self.calculate_ratio(posterior, fg, samples)
|
||||||
print(f"Ratio: {ratio}\n")
|
# print(f"Ratio: {ratio}\n")
|
||||||
if (ratio > 0):
|
if (ratio > 0):
|
||||||
self.assertAlmostEqual(ratio, expected_ratio)
|
self.assertAlmostEqual(ratio, expected_ratio)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue