Fixed discreteEimination
parent
0f48efb0a9
commit
524161403a
|
@ -229,12 +229,12 @@ static std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>> discret
|
|||
// In this case, compute discrete probabilities.
|
||||
// TODO(frank): What about the scalar!?
|
||||
auto potential = [&](const auto& pair) -> double {
|
||||
auto [factor, _] = pair;
|
||||
auto [factor, scalar] = pair;
|
||||
// If the factor is null, it has been pruned, hence return potential of zero
|
||||
if (!factor)
|
||||
return 0;
|
||||
return 0.0;
|
||||
else
|
||||
return exp(-factor->error(kEmpty));
|
||||
return exp(-scalar - factor->error(kEmpty));
|
||||
};
|
||||
DecisionTree<Key, double> potentials(gmf->factors(), potential);
|
||||
dfg.emplace_shared<DecisionTreeFactor>(gmf->discreteKeys(), potentials);
|
||||
|
|
|
@ -40,8 +40,7 @@ const DiscreteKey m(M(0), 2);
|
|||
const DiscreteValues m1Assignment{{M(0), 1}};
|
||||
|
||||
// Define a 50/50 prior on the mode
|
||||
DiscreteConditional::shared_ptr mixing =
|
||||
std::make_shared<DiscreteConditional>(m, "60/40");
|
||||
DiscreteConditional::shared_ptr mixing = std::make_shared<DiscreteConditional>(m, "60/40");
|
||||
|
||||
/// Gaussian density function
|
||||
double Gaussian(double mu, double sigma, double z) {
|
||||
|
@ -53,24 +52,12 @@ double Gaussian(double mu, double sigma, double z) {
|
|||
* If sigma0 == sigma1, it simplifies to a sigmoid function.
|
||||
* Hardcodes 60/40 prior on mode.
|
||||
*/
|
||||
double prob_m_z(double mu0, double mu1, double sigma0, double sigma1,
|
||||
double z) {
|
||||
double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, double z) {
|
||||
const double p0 = 0.6 * Gaussian(mu0, sigma0, z);
|
||||
const double p1 = 0.4 * Gaussian(mu1, sigma1, z);
|
||||
return p1 / (p0 + p1);
|
||||
};
|
||||
|
||||
/// Given \phi(m;z)\phi(m) use eliminate to obtain P(m|z).
|
||||
DiscreteConditional SolveHFG(const HybridGaussianFactorGraph &hfg) {
|
||||
return *hfg.eliminateSequential()->at(0)->asDiscrete();
|
||||
}
|
||||
|
||||
/// Given p(z,m) and z, convert to HFG and solve.
|
||||
DiscreteConditional SolveHBN(const HybridBayesNet &hbn, double z) {
|
||||
VectorValues given{{Z(0), Vector1(z)}};
|
||||
return SolveHFG(hbn.toFactorGraph(given));
|
||||
}
|
||||
|
||||
/*
|
||||
* Test a Gaussian Mixture Model P(m)p(z|m) with same sigma.
|
||||
* The posterior, as a function of z, should be a sigmoid function.
|
||||
|
@ -81,14 +68,14 @@ TEST(GaussianMixture, GaussianMixtureModel) {
|
|||
|
||||
// Create a Gaussian mixture model p(z|m) with same sigma.
|
||||
HybridBayesNet gmm;
|
||||
std::vector<std::pair<Vector, double>> parameters{{Vector1(mu0), sigma},
|
||||
{Vector1(mu1), sigma}};
|
||||
std::vector<std::pair<Vector, double>> parameters{{Vector1(mu0), sigma}, {Vector1(mu1), sigma}};
|
||||
gmm.emplace_shared<HybridGaussianConditional>(m, Z(0), parameters);
|
||||
gmm.push_back(mixing);
|
||||
|
||||
// At the halfway point between the means, we should get P(m|z)=0.5
|
||||
double midway = mu1 - mu0;
|
||||
auto pMid = SolveHBN(gmm, midway);
|
||||
auto eliminationResult = gmm.toFactorGraph({{Z(0), Vector1(midway)}}).eliminateSequential();
|
||||
auto pMid = *eliminationResult->at(0)->asDiscrete();
|
||||
EXPECT(assert_equal(DiscreteConditional(m, "60/40"), pMid));
|
||||
|
||||
// Everywhere else, the result should be a sigmoid.
|
||||
|
@ -97,7 +84,8 @@ TEST(GaussianMixture, GaussianMixtureModel) {
|
|||
const double expected = prob_m_z(mu0, mu1, sigma, sigma, z);
|
||||
|
||||
// Workflow 1: convert HBN to HFG and solve
|
||||
auto posterior1 = SolveHBN(gmm, z);
|
||||
auto eliminationResult1 = gmm.toFactorGraph({{Z(0), Vector1(z)}}).eliminateSequential();
|
||||
auto posterior1 = *eliminationResult1->at(0)->asDiscrete();
|
||||
EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8);
|
||||
|
||||
// Workflow 2: directly specify HFG and solve
|
||||
|
@ -105,7 +93,8 @@ TEST(GaussianMixture, GaussianMixtureModel) {
|
|||
hfg1.emplace_shared<DecisionTreeFactor>(
|
||||
m, std::vector{Gaussian(mu0, sigma, z), Gaussian(mu1, sigma, z)});
|
||||
hfg1.push_back(mixing);
|
||||
auto posterior2 = SolveHFG(hfg1);
|
||||
auto eliminationResult2 = hfg1.eliminateSequential();
|
||||
auto posterior2 = *eliminationResult2->at(0)->asDiscrete();
|
||||
EXPECT_DOUBLES_EQUAL(expected, posterior2(m1Assignment), 1e-8);
|
||||
}
|
||||
}
|
||||
|
@ -120,15 +109,27 @@ TEST(GaussianMixture, GaussianMixtureModel2) {
|
|||
|
||||
// Create a Gaussian mixture model p(z|m) with same sigma.
|
||||
HybridBayesNet gmm;
|
||||
std::vector<std::pair<Vector, double>> parameters{{Vector1(mu0), sigma0},
|
||||
{Vector1(mu1), sigma1}};
|
||||
std::vector<std::pair<Vector, double>> parameters{{Vector1(mu0), sigma0}, {Vector1(mu1), sigma1}};
|
||||
gmm.emplace_shared<HybridGaussianConditional>(m, Z(0), parameters);
|
||||
gmm.push_back(mixing);
|
||||
|
||||
// We get zMax=3.1333 by finding the maximum value of the function, at which
|
||||
// point the mode m==1 is about twice as probable as m==0.
|
||||
double zMax = 3.133;
|
||||
auto pMax = SolveHBN(gmm, zMax);
|
||||
const VectorValues vv{{Z(0), Vector1(zMax)}};
|
||||
auto gfg = gmm.toFactorGraph(vv);
|
||||
|
||||
// Equality of posteriors asserts that the elimination is correct (same ratios for all modes)
|
||||
const auto& expectedDiscretePosterior = gmm.discretePosterior(vv);
|
||||
EXPECT(assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv)));
|
||||
|
||||
// Eliminate the graph!
|
||||
auto eliminationResultMax = gfg.eliminateSequential();
|
||||
|
||||
// Equality of posteriors asserts that the elimination is correct (same ratios for all modes)
|
||||
EXPECT(assert_equal(expectedDiscretePosterior, eliminationResultMax->discretePosterior(vv)));
|
||||
|
||||
auto pMax = *eliminationResultMax->at(0)->asDiscrete();
|
||||
EXPECT(assert_equal(DiscreteConditional(m, "42/58"), pMax, 1e-4));
|
||||
|
||||
// Everywhere else, the result should be a bell curve like function.
|
||||
|
@ -137,7 +138,8 @@ TEST(GaussianMixture, GaussianMixtureModel2) {
|
|||
const double expected = prob_m_z(mu0, mu1, sigma0, sigma1, z);
|
||||
|
||||
// Workflow 1: convert HBN to HFG and solve
|
||||
auto posterior1 = SolveHBN(gmm, z);
|
||||
auto eliminationResult1 = gmm.toFactorGraph({{Z(0), Vector1(z)}}).eliminateSequential();
|
||||
auto posterior1 = *eliminationResult1->at(0)->asDiscrete();
|
||||
EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8);
|
||||
|
||||
// Workflow 2: directly specify HFG and solve
|
||||
|
@ -145,11 +147,11 @@ TEST(GaussianMixture, GaussianMixtureModel2) {
|
|||
hfg.emplace_shared<DecisionTreeFactor>(
|
||||
m, std::vector{Gaussian(mu0, sigma0, z), Gaussian(mu1, sigma1, z)});
|
||||
hfg.push_back(mixing);
|
||||
auto posterior2 = SolveHFG(hfg);
|
||||
auto eliminationResult2 = hfg.eliminateSequential();
|
||||
auto posterior2 = *eliminationResult2->at(0)->asDiscrete();
|
||||
EXPECT_DOUBLES_EQUAL(expected, posterior2(m1Assignment), 1e-8);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() {
|
||||
TestResult tr;
|
||||
|
|
|
@ -518,8 +518,6 @@ TEST(HybridEstimation, CorrectnessViaSampling) {
|
|||
// the normalizing term computed via the Bayes net determinant.
|
||||
const HybridValues sample = bn->sample(&rng);
|
||||
double expected_ratio = compute_ratio(sample);
|
||||
// regression
|
||||
EXPECT_DOUBLES_EQUAL(0.728588, expected_ratio, 1e-6);
|
||||
|
||||
// 3. Do sampling
|
||||
constexpr int num_samples = 10;
|
||||
|
|
Loading…
Reference in New Issue