Fixed tests to work with new definition of error.
parent
877e564744
commit
4858e39ecf
|
|
@ -118,9 +118,10 @@ TEST(GaussianMixture, Error) {
|
|||
values.insert(X(2), Vector2::Zero());
|
||||
auto error_tree = mixture.error(values);
|
||||
|
||||
// regression
|
||||
// Check result.
|
||||
std::vector<DiscreteKey> discrete_keys = {m1};
|
||||
std::vector<double> leaves = {0.5, 4.3252595};
|
||||
std::vector<double> leaves = {conditional0->error(values),
|
||||
conditional1->error(values)};
|
||||
AlgebraicDecisionTree<Key> expected_error(discrete_keys, leaves);
|
||||
|
||||
EXPECT(assert_equal(expected_error, error_tree, 1e-6));
|
||||
|
|
@ -128,10 +129,11 @@ TEST(GaussianMixture, Error) {
|
|||
// Regression for non-tree version.
|
||||
DiscreteValues assignment;
|
||||
assignment[M(1)] = 0;
|
||||
EXPECT_DOUBLES_EQUAL(0.5, mixture.error({values, assignment}), 1e-8);
|
||||
EXPECT_DOUBLES_EQUAL(conditional0->error(values),
|
||||
mixture.error({values, assignment}), 1e-8);
|
||||
assignment[M(1)] = 1;
|
||||
EXPECT_DOUBLES_EQUAL(4.3252595155709335, mixture.error({values, assignment}),
|
||||
1e-8);
|
||||
EXPECT_DOUBLES_EQUAL(conditional1->error(values),
|
||||
mixture.error({values, assignment}), 1e-8);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
|||
|
|
@ -217,23 +217,22 @@ TEST(HybridBayesNet, Error) {
|
|||
auto error_tree = hybridBayesNet->error(delta.continuous());
|
||||
|
||||
std::vector<DiscreteKey> discrete_keys = {{M(0), 2}, {M(1), 2}};
|
||||
std::vector<double> leaves = {0.0097568009, 3.3973404e-31, 0.029126214,
|
||||
0.0097568009};
|
||||
std::vector<double> leaves = {-4.1609374, -4.1706942, -4.141568, -4.1609374};
|
||||
AlgebraicDecisionTree<Key> expected_error(discrete_keys, leaves);
|
||||
|
||||
// regression
|
||||
EXPECT(assert_equal(expected_error, error_tree, 1e-9));
|
||||
EXPECT(assert_equal(expected_error, error_tree, 1e-6));
|
||||
|
||||
// Error on pruned Bayes net
|
||||
auto prunedBayesNet = hybridBayesNet->prune(2);
|
||||
auto pruned_error_tree = prunedBayesNet.error(delta.continuous());
|
||||
|
||||
std::vector<double> pruned_leaves = {2e50, 3.3973404e-31, 2e50, 0.0097568009};
|
||||
std::vector<double> pruned_leaves = {2e50, -4.1706942, 2e50, -4.1609374};
|
||||
AlgebraicDecisionTree<Key> expected_pruned_error(discrete_keys,
|
||||
pruned_leaves);
|
||||
|
||||
// regression
|
||||
EXPECT(assert_equal(expected_pruned_error, pruned_error_tree, 1e-9));
|
||||
EXPECT(assert_equal(expected_pruned_error, pruned_error_tree, 1e-6));
|
||||
|
||||
// Verify error computation and check for specific error value
|
||||
DiscreteValues discrete_values{{M(0), 1}, {M(1), 1}};
|
||||
|
|
|
|||
|
|
@ -381,13 +381,13 @@ TEST(GaussianConditional, FromMeanAndStddev) {
|
|||
auto conditional1 =
|
||||
GaussianConditional::FromMeanAndStddev(X(0), A1, X(1), b, sigma);
|
||||
Vector2 e1 = (x0 - (A1 * x1 + b)) / sigma;
|
||||
double expected1 = 0.5 * e1.dot(e1);
|
||||
double expected1 = 0.5 * e1.dot(e1) - conditional1.logNormalizationConstant();
|
||||
EXPECT_DOUBLES_EQUAL(expected1, conditional1.error(values), 1e-9);
|
||||
|
||||
auto conditional2 = GaussianConditional::FromMeanAndStddev(X(0), A1, X(1), A2,
|
||||
X(2), b, sigma);
|
||||
Vector2 e2 = (x0 - (A1 * x1 + A2 * x2 + b)) / sigma;
|
||||
double expected2 = 0.5 * e2.dot(e2);
|
||||
double expected2 = 0.5 * e2.dot(e2) - conditional2.logNormalizationConstant();
|
||||
EXPECT_DOUBLES_EQUAL(expected2, conditional2.error(values), 1e-9);
|
||||
}
|
||||
|
||||
|
|
@ -448,20 +448,23 @@ TEST(GaussianConditional, sample) {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(GaussianConditional, LogNormalizationConstant) {
|
||||
TEST(GaussianConditional, Error) {
|
||||
// Create univariate standard gaussian conditional
|
||||
auto std_gaussian =
|
||||
auto stdGaussian =
|
||||
GaussianConditional::FromMeanAndStddev(X(0), Vector1::Zero(), 1.0);
|
||||
VectorValues values;
|
||||
values.insert(X(0), Vector1::Zero());
|
||||
double logDensity = std_gaussian.logDensity(values);
|
||||
double error = stdGaussian.error(values);
|
||||
|
||||
// Regression.
|
||||
// These values were computed by hand for a univariate standard gaussian.
|
||||
EXPECT_DOUBLES_EQUAL(-0.9189385332046727, logDensity, 1e-9);
|
||||
EXPECT_DOUBLES_EQUAL(0.3989422804014327, exp(logDensity), 1e-9);
|
||||
EXPECT_DOUBLES_EQUAL(0.9189385332046727, error, 1e-9);
|
||||
EXPECT_DOUBLES_EQUAL(0.3989422804014327, exp(-error), 1e-9);
|
||||
}
|
||||
|
||||
// Similar test for multivariate gaussian but with sigma 2.0
|
||||
/* ************************************************************************* */
|
||||
// Similar test for multivariate gaussian but with sigma 2.0
|
||||
TEST(GaussianConditional, LogNormalizationConstant) {
|
||||
double sigma = 2.0;
|
||||
auto conditional = GaussianConditional::FromMeanAndStddev(X(0), Vector3::Zero(), sigma);
|
||||
VectorValues x;
|
||||
|
|
@ -469,7 +472,8 @@ TEST(GaussianConditional, LogNormalizationConstant) {
|
|||
Matrix3 Sigma = I_3x3 * sigma * sigma;
|
||||
double expectedLogNormalizingConstant = log(1 / sqrt((2 * M_PI * Sigma).determinant()));
|
||||
|
||||
EXPECT_DOUBLES_EQUAL(expectedLogNormalizingConstant, conditional.logNormalizationConstant(), 1e-9);
|
||||
EXPECT_DOUBLES_EQUAL(expectedLogNormalizingConstant,
|
||||
conditional.logNormalizationConstant(), 1e-9);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ TEST(GaussianDensity, FromMeanAndStddev) {
|
|||
|
||||
auto density = GaussianDensity::FromMeanAndStddev(X(0), b, sigma);
|
||||
Vector2 e = (x0 - b) / sigma;
|
||||
double expected = 0.5 * e.dot(e);
|
||||
double expected = 0.5 * e.dot(e) - density.logNormalizationConstant();
|
||||
EXPECT_DOUBLES_EQUAL(expected, density.error(values), 1e-9);
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue