Refactored tests and removed incorrect (R not upper-triangular) test.

release/4.3a0
Frank Dellaert 2023-01-16 15:34:00 -08:00
parent 57e59d1237
commit 7a41180e82
1 changed files with 145 additions and 128 deletions

View File

@ -30,137 +30,142 @@
// Include for test suite // Include for test suite
#include <CppUnitLite/TestHarness.h> #include <CppUnitLite/TestHarness.h>
using namespace std;
using namespace gtsam; using namespace gtsam;
using noiseModel::Isotropic; using noiseModel::Isotropic;
using symbol_shorthand::M; using symbol_shorthand::M;
using symbol_shorthand::X; using symbol_shorthand::X;
using symbol_shorthand::Z; using symbol_shorthand::Z;
/* ************************************************************************* */ // Common constants
/* Check construction of GaussianMixture P(x1 | x2, m1) as well as accessing a static const Key modeKey = M(0);
* specific mode i.e. P(x1 | x2, m1=1). static const DiscreteKey mode(modeKey, 2);
*/ static const VectorValues vv{{Z(0), Vector1(4.9)}, {X(0), Vector1(5.0)}};
TEST(GaussianMixture, Equals) { static const DiscreteValues assignment0{{M(0), 0}}, assignment1{{M(0), 1}};
// create a conditional gaussian node static const HybridValues hv0{vv, assignment0};
Matrix S1(2, 2); static const HybridValues hv1{vv, assignment1};
S1(0, 0) = 1;
S1(1, 0) = 2;
S1(0, 1) = 3;
S1(1, 1) = 4;
Matrix S2(2, 2);
S2(0, 0) = 6;
S2(1, 0) = 0.2;
S2(0, 1) = 8;
S2(1, 1) = 0.4;
Matrix R1(2, 2);
R1(0, 0) = 0.1;
R1(1, 0) = 0.3;
R1(0, 1) = 0.0;
R1(1, 1) = 0.34;
Matrix R2(2, 2);
R2(0, 0) = 0.1;
R2(1, 0) = 0.3;
R2(0, 1) = 0.0;
R2(1, 1) = 0.34;
SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34));
Vector2 d1(0.2, 0.5), d2(0.5, 0.2);
auto conditional0 = boost::make_shared<GaussianConditional>(X(1), d1, R1,
X(2), S1, model),
conditional1 = boost::make_shared<GaussianConditional>(X(1), d2, R2,
X(2), S2, model);
// Create decision tree
DiscreteKey m1(1, 2);
GaussianMixture::Conditionals conditionals(
{m1},
vector<GaussianConditional::shared_ptr>{conditional0, conditional1});
GaussianMixture mixture({X(1)}, {X(2)}, {m1}, conditionals);
// Let's check that this worked:
DiscreteValues mode;
mode[m1.first] = 1;
auto actual = mixture(mode);
EXPECT(actual == conditional1);
}
/* ************************************************************************* */ /* ************************************************************************* */
/// Test error method of GaussianMixture. namespace equal_constants {
TEST(GaussianMixture, Error) { // Create a simple GaussianMixture
Matrix22 S1 = Matrix22::Identity(); const double commonSigma = 2.0;
Matrix22 S2 = Matrix22::Identity() * 2; const std::vector<GaussianConditional::shared_ptr> conditionals{
Matrix22 R1 = Matrix22::Ones(); GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
Matrix22 R2 = Matrix22::Ones(); commonSigma),
Vector2 d1(1, 2), d2(2, 1); GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
commonSigma)};
const GaussianMixture mixture({Z(0)}, {X(0)}, {mode}, conditionals);
} // namespace equal_constants
SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34)); /* ************************************************************************* */
/// Check that invariants hold
auto conditional0 = boost::make_shared<GaussianConditional>(X(1), d1, R1, TEST(GaussianMixture, Invariants) {
X(2), S1, model), using namespace equal_constants;
conditional1 = boost::make_shared<GaussianConditional>(X(1), d2, R2,
X(2), S2, model);
// Create Gaussian Mixture.
DiscreteKey m1(M(1), 2);
GaussianMixture::Conditionals conditionals(
{m1},
vector<GaussianConditional::shared_ptr>{conditional0, conditional1});
GaussianMixture mixture({X(1)}, {X(2)}, {m1}, conditionals);
// Check that normalizationConstants returns nullptr, as all constants equal. // Check that normalizationConstants returns nullptr, as all constants equal.
CHECK(!mixture.normalizationConstants()); CHECK(!mixture.normalizationConstants());
VectorValues values; // Check that the mixture normalization constant is the max of all constants
values.insert(X(1), Vector2::Ones()); // which are all equal, in this case, hence:
values.insert(X(2), Vector2::Zero()); const double K = mixture.logNormalizationConstant();
auto error_tree = mixture.logProbability(values); EXPECT_DOUBLES_EQUAL(K, conditionals[0]->logNormalizationConstant(), 1e-8);
EXPECT_DOUBLES_EQUAL(K, conditionals[1]->logNormalizationConstant(), 1e-8);
// Check result. EXPECT(GaussianMixture::CheckInvariants(mixture, hv0));
std::vector<DiscreteKey> discrete_keys = {m1}; EXPECT(GaussianMixture::CheckInvariants(mixture, hv1));
std::vector<double> leaves = {conditional0->logProbability(values),
conditional1->logProbability(values)};
AlgebraicDecisionTree<Key> expected_error(discrete_keys, leaves);
EXPECT(assert_equal(expected_error, error_tree, 1e-6));
// Regression for non-tree version.
DiscreteValues assignment;
assignment[M(1)] = 0;
EXPECT_DOUBLES_EQUAL(conditional0->logProbability(values),
mixture.logProbability({values, assignment}), 1e-8);
assignment[M(1)] = 1;
EXPECT_DOUBLES_EQUAL(conditional1->logProbability(values),
mixture.logProbability({values, assignment}), 1e-8);
} }
/* ************************************************************************* */ /* ************************************************************************* */
// Create mode key: 0 is low-noise, 1 is high-noise. /// Check LogProbability.
static const Key modeKey = M(0); TEST(GaussianMixture, LogProbability) {
static const DiscreteKey mode(modeKey, 2); using namespace equal_constants;
auto actual = mixture.logProbability(vv);
// Create a simple GaussianMixture // Check result.
static GaussianMixture createSimpleGaussianMixture() { std::vector<DiscreteKey> discrete_keys = {mode};
// Create Gaussian mixture Z(0) = X(0) + noise. std::vector<double> leaves = {conditionals[0]->logProbability(vv),
// TODO(dellaert): making copies below is not ideal ! conditionals[1]->logProbability(vv)};
Matrix1 I = Matrix1::Identity(); AlgebraicDecisionTree<Key> expected(discrete_keys, leaves);
const auto conditional0 = boost::make_shared<GaussianConditional>(
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 0.5)); EXPECT(assert_equal(expected, actual, 1e-6));
const auto conditional1 = boost::make_shared<GaussianConditional>(
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3)); // Check for non-tree version.
return GaussianMixture({Z(0)}, {X(0)}, {mode}, {conditional0, conditional1}); for (size_t mode : {0, 1}) {
const HybridValues hv{vv, {{M(0), mode}}};
EXPECT_DOUBLES_EQUAL(conditionals[mode]->logProbability(vv),
mixture.logProbability(hv), 1e-8);
}
} }
/* ************************************************************************* */
/// Check error.
TEST(GaussianMixture, Error) {
using namespace equal_constants;
auto actual = mixture.error(vv);
// Check result.
std::vector<DiscreteKey> discrete_keys = {mode};
std::vector<double> leaves = {conditionals[0]->error(vv),
conditionals[1]->error(vv)};
AlgebraicDecisionTree<Key> expected(discrete_keys, leaves);
EXPECT(assert_equal(expected, actual, 1e-6));
// Check for non-tree version.
for (size_t mode : {0, 1}) {
const HybridValues hv{vv, {{M(0), mode}}};
EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv), mixture.error(hv),
1e-8);
}
}
/* ************************************************************************* */
/// Check that the likelihood is proportional to the conditional density given
/// the measurements.
TEST(GaussianMixture, Likelihood) {
using namespace equal_constants;
// Compute likelihood
auto likelihood = mixture.likelihood(vv);
// Check that the mixture error and the likelihood error are the same.
EXPECT_DOUBLES_EQUAL(mixture.error(hv0), likelihood->error(hv0), 1e-8);
EXPECT_DOUBLES_EQUAL(mixture.error(hv1), likelihood->error(hv1), 1e-8);
// Check that likelihood error is as expected, i.e., just the errors of the
// individual likelihoods, in the `equal_constants` case.
std::vector<DiscreteKey> discrete_keys = {mode};
std::vector<double> leaves = {conditionals[0]->likelihood(vv)->error(vv),
conditionals[1]->likelihood(vv)->error(vv)};
AlgebraicDecisionTree<Key> expected(discrete_keys, leaves);
EXPECT(assert_equal(expected, likelihood->error(vv), 1e-6));
// Check that the ratio of probPrime to evaluate is the same for all modes.
std::vector<double> ratio(2);
for (size_t mode : {0, 1}) {
const HybridValues hv{vv, {{M(0), mode}}};
// Print error of mixture and likelihood:
ratio[mode] = std::exp(-likelihood->error(hv)) / mixture.evaluate(hv);
}
EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8);
}
/* ************************************************************************* */
namespace mode_dependent_constants {
// Create a GaussianMixture with mode-dependent noise models.
// 0 is low-noise, 1 is high-noise.
const std::vector<GaussianConditional::shared_ptr> conditionals{
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
0.5),
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
3.0)};
const GaussianMixture mixture({Z(0)}, {X(0)}, {mode}, conditionals);
} // namespace mode_dependent_constants
/* ************************************************************************* */ /* ************************************************************************* */
// Create a test for continuousParents. // Create a test for continuousParents.
TEST(GaussianMixture, ContinuousParents) { TEST(GaussianMixture, ContinuousParents) {
const GaussianMixture gm = createSimpleGaussianMixture(); using namespace mode_dependent_constants;
const KeyVector continuousParentKeys = gm.continuousParents(); const KeyVector continuousParentKeys = mixture.continuousParents();
// Check that the continuous parent keys are correct: // Check that the continuous parent keys are correct:
EXPECT(continuousParentKeys.size() == 1); EXPECT(continuousParentKeys.size() == 1);
EXPECT(continuousParentKeys[0] == X(0)); EXPECT(continuousParentKeys[0] == X(0));
@ -169,9 +174,9 @@ TEST(GaussianMixture, ContinuousParents) {
/* ************************************************************************* */ /* ************************************************************************* */
/// Check we can create a DecisionTreeFactor with all normalization constants. /// Check we can create a DecisionTreeFactor with all normalization constants.
TEST(GaussianMixture, NormalizationConstants) { TEST(GaussianMixture, NormalizationConstants) {
const GaussianMixture gm = createSimpleGaussianMixture(); using namespace mode_dependent_constants;
const auto factor = gm.normalizationConstants(); const auto factor = mixture.normalizationConstants();
// Test with 1D Gaussian normalization constants for sigma 0.5 and 3: // Test with 1D Gaussian normalization constants for sigma 0.5 and 3:
auto c = [](double sigma) { return 1.0 / (sqrt(2 * M_PI) * sigma); }; auto c = [](double sigma) { return 1.0 / (sqrt(2 * M_PI) * sigma); };
@ -180,27 +185,39 @@ TEST(GaussianMixture, NormalizationConstants) {
} }
/* ************************************************************************* */ /* ************************************************************************* */
/// Check that likelihood returns a mixture factor on the parents. /// Check that the likelihood is proportional to the conditional density given
TEST(GaussianMixture, Likelihood) { /// the measurements.
const GaussianMixture gm = createSimpleGaussianMixture(); TEST(GaussianMixture, Likelihood2) {
using namespace mode_dependent_constants;
// Call the likelihood function: // Compute likelihood
VectorValues measurements; auto likelihood = mixture.likelihood(vv);
measurements.insert(Z(0), Vector1(0));
const auto factor = gm.likelihood(measurements);
// Check that the factor is a mixture factor on the parents. GTSAM_PRINT(mixture);
// Loop over all discrete assignments over the discrete parents: GTSAM_PRINT(*likelihood);
const DiscreteKeys discreteParentKeys = gm.discreteKeys();
// Apply the likelihood function to all conditionals: // Check that the mixture error and the likelihood error are the same.
const GaussianMixtureFactor::Factors factors( EXPECT_DOUBLES_EQUAL(mixture.error(hv0), likelihood->error(hv0), 1e-8);
gm.conditionals(), EXPECT_DOUBLES_EQUAL(mixture.error(hv1), likelihood->error(hv1), 1e-8);
[measurements](const GaussianConditional::shared_ptr& conditional) {
return conditional->likelihood(measurements); // Check that likelihood error is as expected, i.e., just the errors of the
}); // individual likelihoods, in the `equal_constants` case.
const GaussianMixtureFactor expected({X(0)}, {mode}, factors); std::vector<DiscreteKey> discrete_keys = {mode};
EXPECT(assert_equal(expected, *factor)); std::vector<double> leaves = {conditionals[0]->likelihood(vv)->error(vv),
conditionals[1]->likelihood(vv)->error(vv)};
AlgebraicDecisionTree<Key> expected(discrete_keys, leaves);
EXPECT(assert_equal(expected, likelihood->error(vv), 1e-6));
// Check that the ratio of probPrime to evaluate is the same for all modes.
std::vector<double> ratio(2);
for (size_t mode : {0, 1}) {
const HybridValues hv{vv, {{M(0), mode}}};
// Print error of mixture and likelihood:
std::cout << "mode " << mode << " mixture: " << mixture.error(hv)
<< " likelihood: " << likelihood->error(hv) << std::endl;
ratio[mode] = std::exp(-likelihood->error(hv)) / mixture.evaluate(hv);
}
EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8);
} }
/* ************************************************************************* */ /* ************************************************************************* */