updated tests
parent
2d2213e880
commit
6488a0ceec
|
|
@ -180,16 +180,16 @@ TEST(HybridGaussianConditional, Error2) {
|
|||
|
||||
// Check result.
|
||||
DiscreteKeys discrete_keys{mode};
|
||||
double logNormalizer0 = conditionals[0]->logNormalizationConstant();
|
||||
double logNormalizer1 = conditionals[1]->logNormalizationConstant();
|
||||
double minLogNormalizer = std::min(logNormalizer0, logNormalizer1);
|
||||
double errorConstant0 = conditionals[0]->errorConstant();
|
||||
double errorConstant1 = conditionals[1]->errorConstant();
|
||||
double minErrorConstant = std::min(errorConstant0, errorConstant1);
|
||||
|
||||
// Expected error is e(X) + log(sqrt(|2πΣ|)).
|
||||
// We normalize log(sqrt(|2πΣ|)) with min(logNormalizers)
|
||||
// We normalize log(sqrt(|2πΣ|)) with min(errorConstant)
|
||||
// so it is non-negative.
|
||||
std::vector<double> leaves = {
|
||||
conditionals[0]->error(vv) + logNormalizer0 - minLogNormalizer,
|
||||
conditionals[1]->error(vv) + logNormalizer1 - minLogNormalizer};
|
||||
conditionals[0]->error(vv) + errorConstant0 - minErrorConstant,
|
||||
conditionals[1]->error(vv) + errorConstant1 - minErrorConstant};
|
||||
AlgebraicDecisionTree<Key> expected(discrete_keys, leaves);
|
||||
|
||||
EXPECT(assert_equal(expected, actual, 1e-6));
|
||||
|
|
@ -198,8 +198,8 @@ TEST(HybridGaussianConditional, Error2) {
|
|||
for (size_t mode : {0, 1}) {
|
||||
const HybridValues hv{vv, {{M(0), mode}}};
|
||||
EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) +
|
||||
conditionals[mode]->logNormalizationConstant() -
|
||||
minLogNormalizer,
|
||||
conditionals[mode]->errorConstant() -
|
||||
minErrorConstant,
|
||||
hybrid_conditional.error(hv), 1e-8);
|
||||
}
|
||||
}
|
||||
|
|
@ -231,8 +231,8 @@ TEST(HybridGaussianConditional, Likelihood2) {
|
|||
CHECK(jf1->rows() == 2);
|
||||
|
||||
// Check that the constant C1 is properly encoded in the JacobianFactor.
|
||||
const double C1 = conditionals[1]->logNormalizationConstant() -
|
||||
hybrid_conditional.logNormalizationConstant();
|
||||
const double C1 = hybrid_conditional.logNormalizationConstant() -
|
||||
conditionals[1]->logNormalizationConstant();
|
||||
const double c1 = std::sqrt(2.0 * C1);
|
||||
Vector expected_unwhitened(2);
|
||||
expected_unwhitened << 4.9 - 5.0, -c1;
|
||||
|
|
|
|||
|
|
@ -780,9 +780,8 @@ static HybridGaussianFactorGraph CreateFactorGraph(
|
|||
// Create HybridGaussianFactor
|
||||
// We take negative since we want
|
||||
// the underlying scalar to be log(\sqrt(|2πΣ|))
|
||||
std::vector<GaussianFactorValuePair> factors{
|
||||
{f0, model0->logNormalizationConstant()},
|
||||
{f1, model1->logNormalizationConstant()}};
|
||||
std::vector<GaussianFactorValuePair> factors{{f0, model0->errorConstant()},
|
||||
{f1, model1->errorConstant()}};
|
||||
HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors);
|
||||
|
||||
HybridGaussianFactorGraph hfg;
|
||||
|
|
|
|||
|
|
@ -714,26 +714,26 @@ factor 6: P( m1 | m0 ):
|
|||
size: 3
|
||||
conditional 0: Hybrid P( x0 | x1 m0)
|
||||
Discrete Keys = (m0, 2),
|
||||
logNormalizationConstant: -1.38862
|
||||
logNormalizationConstant: 1.38862
|
||||
|
||||
Choice(m0)
|
||||
0 Leaf p(x0 | x1)
|
||||
R = [ 10.0499 ]
|
||||
S[x1] = [ -0.0995037 ]
|
||||
d = [ -9.85087 ]
|
||||
logNormalizationConstant: -1.38862
|
||||
logNormalizationConstant: 1.38862
|
||||
No noise model
|
||||
|
||||
1 Leaf p(x0 | x1)
|
||||
R = [ 10.0499 ]
|
||||
S[x1] = [ -0.0995037 ]
|
||||
d = [ -9.95037 ]
|
||||
logNormalizationConstant: -1.38862
|
||||
logNormalizationConstant: 1.38862
|
||||
No noise model
|
||||
|
||||
conditional 1: Hybrid P( x1 | x2 m0 m1)
|
||||
Discrete Keys = (m0, 2), (m1, 2),
|
||||
logNormalizationConstant: -1.3935
|
||||
logNormalizationConstant: 1.3935
|
||||
|
||||
Choice(m1)
|
||||
0 Choice(m0)
|
||||
|
|
@ -741,14 +741,14 @@ conditional 1: Hybrid P( x1 | x2 m0 m1)
|
|||
R = [ 10.099 ]
|
||||
S[x2] = [ -0.0990196 ]
|
||||
d = [ -9.99901 ]
|
||||
logNormalizationConstant: -1.3935
|
||||
logNormalizationConstant: 1.3935
|
||||
No noise model
|
||||
|
||||
0 1 Leaf p(x1 | x2)
|
||||
R = [ 10.099 ]
|
||||
S[x2] = [ -0.0990196 ]
|
||||
d = [ -9.90098 ]
|
||||
logNormalizationConstant: -1.3935
|
||||
logNormalizationConstant: 1.3935
|
||||
No noise model
|
||||
|
||||
1 Choice(m0)
|
||||
|
|
@ -756,19 +756,19 @@ conditional 1: Hybrid P( x1 | x2 m0 m1)
|
|||
R = [ 10.099 ]
|
||||
S[x2] = [ -0.0990196 ]
|
||||
d = [ -10.098 ]
|
||||
logNormalizationConstant: -1.3935
|
||||
logNormalizationConstant: 1.3935
|
||||
No noise model
|
||||
|
||||
1 1 Leaf p(x1 | x2)
|
||||
R = [ 10.099 ]
|
||||
S[x2] = [ -0.0990196 ]
|
||||
d = [ -10 ]
|
||||
logNormalizationConstant: -1.3935
|
||||
logNormalizationConstant: 1.3935
|
||||
No noise model
|
||||
|
||||
conditional 2: Hybrid P( x2 | m0 m1)
|
||||
Discrete Keys = (m0, 2), (m1, 2),
|
||||
logNormalizationConstant: -1.38857
|
||||
logNormalizationConstant: 1.38857
|
||||
|
||||
Choice(m1)
|
||||
0 Choice(m0)
|
||||
|
|
@ -777,7 +777,7 @@ conditional 2: Hybrid P( x2 | m0 m1)
|
|||
d = [ -10.1489 ]
|
||||
mean: 1 elements
|
||||
x2: -1.0099
|
||||
logNormalizationConstant: -1.38857
|
||||
logNormalizationConstant: 1.38857
|
||||
No noise model
|
||||
|
||||
0 1 Leaf p(x2)
|
||||
|
|
@ -785,7 +785,7 @@ conditional 2: Hybrid P( x2 | m0 m1)
|
|||
d = [ -10.1479 ]
|
||||
mean: 1 elements
|
||||
x2: -1.0098
|
||||
logNormalizationConstant: -1.38857
|
||||
logNormalizationConstant: 1.38857
|
||||
No noise model
|
||||
|
||||
1 Choice(m0)
|
||||
|
|
@ -794,7 +794,7 @@ conditional 2: Hybrid P( x2 | m0 m1)
|
|||
d = [ -10.0504 ]
|
||||
mean: 1 elements
|
||||
x2: -1.0001
|
||||
logNormalizationConstant: -1.38857
|
||||
logNormalizationConstant: 1.38857
|
||||
No noise model
|
||||
|
||||
1 1 Leaf p(x2)
|
||||
|
|
@ -802,7 +802,7 @@ conditional 2: Hybrid P( x2 | m0 m1)
|
|||
d = [ -10.0494 ]
|
||||
mean: 1 elements
|
||||
x2: -1
|
||||
logNormalizationConstant: -1.38857
|
||||
logNormalizationConstant: 1.38857
|
||||
No noise model
|
||||
|
||||
)";
|
||||
|
|
@ -902,9 +902,8 @@ static HybridNonlinearFactorGraph CreateFactorGraph(
|
|||
// Create HybridNonlinearFactor
|
||||
// We take negative since we want
|
||||
// the underlying scalar to be log(\sqrt(|2πΣ|))
|
||||
std::vector<NonlinearFactorValuePair> factors{
|
||||
{f0, model0->logNormalizationConstant()},
|
||||
{f1, model1->logNormalizationConstant()}};
|
||||
std::vector<NonlinearFactorValuePair> factors{{f0, model0->errorConstant()},
|
||||
{f1, model1->errorConstant()}};
|
||||
|
||||
HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors);
|
||||
|
||||
|
|
|
|||
|
|
@ -76,11 +76,11 @@ TEST(GaussianBayesNet, Evaluate1) {
|
|||
// the normalization constant 1.0/sqrt((2*pi*Sigma).det()).
|
||||
// The covariance matrix inv(Sigma) = R'*R, so the determinant is
|
||||
const double constant = sqrt((invSigma / (2 * M_PI)).determinant());
|
||||
EXPECT_DOUBLES_EQUAL(-log(constant),
|
||||
EXPECT_DOUBLES_EQUAL(log(constant),
|
||||
smallBayesNet.at(0)->logNormalizationConstant() +
|
||||
smallBayesNet.at(1)->logNormalizationConstant(),
|
||||
1e-9);
|
||||
EXPECT_DOUBLES_EQUAL(-log(constant), smallBayesNet.logNormalizationConstant(),
|
||||
EXPECT_DOUBLES_EQUAL(log(constant), smallBayesNet.logNormalizationConstant(),
|
||||
1e-9);
|
||||
const double actual = smallBayesNet.evaluate(mean);
|
||||
EXPECT_DOUBLES_EQUAL(constant, actual, 1e-9);
|
||||
|
|
|
|||
|
|
@ -493,7 +493,7 @@ TEST(GaussianConditional, LogNormalizationConstant) {
|
|||
x.insert(X(0), Vector3::Zero());
|
||||
Matrix3 Sigma = I_3x3 * sigma * sigma;
|
||||
double expectedLogNormalizationConstant =
|
||||
-log(1 / sqrt((2 * M_PI * Sigma).determinant()));
|
||||
log(1 / sqrt((2 * M_PI * Sigma).determinant()));
|
||||
|
||||
EXPECT_DOUBLES_EQUAL(expectedLogNormalizationConstant,
|
||||
conditional.logNormalizationConstant(), 1e-9);
|
||||
|
|
@ -517,7 +517,7 @@ TEST(GaussianConditional, Print) {
|
|||
" d = [ 20 40 ]\n"
|
||||
" mean: 1 elements\n"
|
||||
" x0: 20 40\n"
|
||||
" logNormalizationConstant: 4.0351\n"
|
||||
" logNormalizationConstant: -4.0351\n"
|
||||
"isotropic dim=2 sigma=3\n";
|
||||
EXPECT(assert_print_equal(expected, conditional, "GaussianConditional"));
|
||||
|
||||
|
|
@ -532,7 +532,7 @@ TEST(GaussianConditional, Print) {
|
|||
" S[x1] = [ -1 -2 ]\n"
|
||||
" [ -3 -4 ]\n"
|
||||
" d = [ 20 40 ]\n"
|
||||
" logNormalizationConstant: 4.0351\n"
|
||||
" logNormalizationConstant: -4.0351\n"
|
||||
"isotropic dim=2 sigma=3\n";
|
||||
EXPECT(assert_print_equal(expected1, conditional1, "GaussianConditional"));
|
||||
|
||||
|
|
@ -548,7 +548,7 @@ TEST(GaussianConditional, Print) {
|
|||
" S[y1] = [ -5 -6 ]\n"
|
||||
" [ -7 -8 ]\n"
|
||||
" d = [ 20 40 ]\n"
|
||||
" logNormalizationConstant: 4.0351\n"
|
||||
" logNormalizationConstant: -4.0351\n"
|
||||
"isotropic dim=2 sigma=3\n";
|
||||
EXPECT(assert_print_equal(expected2, conditional2, "GaussianConditional"));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ TEST(GaussianDensity, FromMeanAndStddev) {
|
|||
double expected1 = 0.5 * e.dot(e);
|
||||
EXPECT_DOUBLES_EQUAL(expected1, density.error(values), 1e-9);
|
||||
|
||||
double expected2 = -(density.logNormalizationConstant() + 0.5 * e.dot(e));
|
||||
double expected2 = -(density.errorConstant() + 0.5 * e.dot(e));
|
||||
EXPECT_DOUBLES_EQUAL(expected2, density.logProbability(values), 1e-9);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -810,9 +810,9 @@ TEST(NoiseModel, NonDiagonalGaussian)
|
|||
TEST(NoiseModel, LogNormalizationConstant1D) {
|
||||
// Very simple 1D noise model, which we can compute by hand.
|
||||
double sigma = 0.1;
|
||||
// For expected values, we compute -log(1/sqrt(|2πΣ|)) by hand.
|
||||
// = 0.5*(log(2π) + log(Σ)) (since it is 1D)
|
||||
double expected_value = 0.5 * log(2 * M_PI * sigma * sigma);
|
||||
// For expected values, we compute log(1/sqrt(|2πΣ|)) by hand.
|
||||
// = -0.5*(log(2π) + log(Σ)) (since it is 1D)
|
||||
double expected_value = -0.5 * log(2 * M_PI * sigma * sigma);
|
||||
|
||||
// Gaussian
|
||||
{
|
||||
|
|
@ -839,7 +839,7 @@ TEST(NoiseModel, LogNormalizationConstant1D) {
|
|||
auto noise_model = Unit::Create(1);
|
||||
double actual_value = noise_model->logNormalizationConstant();
|
||||
double sigma = 1.0;
|
||||
expected_value = 0.5 * log(2 * M_PI * sigma * sigma);
|
||||
expected_value = -0.5 * log(2 * M_PI * sigma * sigma);
|
||||
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
||||
}
|
||||
}
|
||||
|
|
@ -850,7 +850,7 @@ TEST(NoiseModel, LogNormalizationConstant3D) {
|
|||
size_t n = 3;
|
||||
// We compute the expected values just like in the LogNormalizationConstant1D
|
||||
// test, but we multiply by 3 due to the determinant.
|
||||
double expected_value = 0.5 * n * log(2 * M_PI * sigma * sigma);
|
||||
double expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma);
|
||||
|
||||
// Gaussian
|
||||
{
|
||||
|
|
@ -879,7 +879,7 @@ TEST(NoiseModel, LogNormalizationConstant3D) {
|
|||
auto noise_model = Unit::Create(3);
|
||||
double actual_value = noise_model->logNormalizationConstant();
|
||||
double sigma = 1.0;
|
||||
expected_value = 0.5 * n * log(2 * M_PI * sigma * sigma);
|
||||
expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma);
|
||||
EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -90,8 +90,7 @@ class TestHybridBayesNet(GtsamTestCase):
|
|||
self.assertTrue(probability >= 0.0)
|
||||
logProb = conditional.logProbability(values)
|
||||
self.assertAlmostEqual(probability, np.exp(logProb))
|
||||
expected = -(conditional.logNormalizationConstant() + \
|
||||
conditional.error(values))
|
||||
expected = -(conditional.errorConstant() + conditional.error(values))
|
||||
self.assertAlmostEqual(logProb, expected)
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue