From 6488a0ceecfc18dad6d494e8589a507c92a72419 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 22 Sep 2024 22:16:59 -0400 Subject: [PATCH] updated tests --- .../tests/testHybridGaussianConditional.cpp | 20 ++++++------ .../hybrid/tests/testHybridGaussianFactor.cpp | 5 ++- .../tests/testHybridNonlinearFactorGraph.cpp | 31 +++++++++---------- gtsam/linear/tests/testGaussianBayesNet.cpp | 4 +-- .../linear/tests/testGaussianConditional.cpp | 8 ++--- gtsam/linear/tests/testGaussianDensity.cpp | 2 +- gtsam/linear/tests/testNoiseModel.cpp | 12 +++---- python/gtsam/tests/test_HybridBayesNet.py | 3 +- 8 files changed, 41 insertions(+), 44 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 040cd2ff0..6c1037e1d 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -180,16 +180,16 @@ TEST(HybridGaussianConditional, Error2) { // Check result. DiscreteKeys discrete_keys{mode}; - double logNormalizer0 = conditionals[0]->logNormalizationConstant(); - double logNormalizer1 = conditionals[1]->logNormalizationConstant(); - double minLogNormalizer = std::min(logNormalizer0, logNormalizer1); + double errorConstant0 = conditionals[0]->errorConstant(); + double errorConstant1 = conditionals[1]->errorConstant(); + double minErrorConstant = std::min(errorConstant0, errorConstant1); // Expected error is e(X) + log(sqrt(|2πΣ|)). - // We normalize log(sqrt(|2πΣ|)) with min(logNormalizers) + // We normalize log(sqrt(|2πΣ|)) with min(errorConstant) // so it is non-negative. std::vector leaves = { - conditionals[0]->error(vv) + logNormalizer0 - minLogNormalizer, - conditionals[1]->error(vv) + logNormalizer1 - minLogNormalizer}; + conditionals[0]->error(vv) + errorConstant0 - minErrorConstant, + conditionals[1]->error(vv) + errorConstant1 - minErrorConstant}; AlgebraicDecisionTree expected(discrete_keys, leaves); EXPECT(assert_equal(expected, actual, 1e-6)); @@ -198,8 +198,8 @@ TEST(HybridGaussianConditional, Error2) { for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) + - conditionals[mode]->logNormalizationConstant() - - minLogNormalizer, + conditionals[mode]->errorConstant() - + minErrorConstant, hybrid_conditional.error(hv), 1e-8); } } @@ -231,8 +231,8 @@ TEST(HybridGaussianConditional, Likelihood2) { CHECK(jf1->rows() == 2); // Check that the constant C1 is properly encoded in the JacobianFactor. - const double C1 = conditionals[1]->logNormalizationConstant() - - hybrid_conditional.logNormalizationConstant(); + const double C1 = hybrid_conditional.logNormalizationConstant() - + conditionals[1]->logNormalizationConstant(); const double c1 = std::sqrt(2.0 * C1); Vector expected_unwhitened(2); expected_unwhitened << 4.9 - 5.0, -c1; diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 03130bc10..d84948c75 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -780,9 +780,8 @@ static HybridGaussianFactorGraph CreateFactorGraph( // Create HybridGaussianFactor // We take negative since we want // the underlying scalar to be log(\sqrt(|2πΣ|)) - std::vector factors{ - {f0, model0->logNormalizationConstant()}, - {f1, model1->logNormalizationConstant()}}; + std::vector factors{{f0, model0->errorConstant()}, + {f1, model1->errorConstant()}}; HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactorGraph hfg; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index cd9b24b37..5f1108ace 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -714,26 +714,26 @@ factor 6: P( m1 | m0 ): size: 3 conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), - logNormalizationConstant: -1.38862 + logNormalizationConstant: 1.38862 Choice(m0) 0 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.85087 ] - logNormalizationConstant: -1.38862 + logNormalizationConstant: 1.38862 No noise model 1 Leaf p(x0 | x1) R = [ 10.0499 ] S[x1] = [ -0.0995037 ] d = [ -9.95037 ] - logNormalizationConstant: -1.38862 + logNormalizationConstant: 1.38862 No noise model conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), - logNormalizationConstant: -1.3935 + logNormalizationConstant: 1.3935 Choice(m1) 0 Choice(m0) @@ -741,14 +741,14 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.99901 ] - logNormalizationConstant: -1.3935 + logNormalizationConstant: 1.3935 No noise model 0 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -9.90098 ] - logNormalizationConstant: -1.3935 + logNormalizationConstant: 1.3935 No noise model 1 Choice(m0) @@ -756,19 +756,19 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10.098 ] - logNormalizationConstant: -1.3935 + logNormalizationConstant: 1.3935 No noise model 1 1 Leaf p(x1 | x2) R = [ 10.099 ] S[x2] = [ -0.0990196 ] d = [ -10 ] - logNormalizationConstant: -1.3935 + logNormalizationConstant: 1.3935 No noise model conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), - logNormalizationConstant: -1.38857 + logNormalizationConstant: 1.38857 Choice(m1) 0 Choice(m0) @@ -777,7 +777,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1489 ] mean: 1 elements x2: -1.0099 - logNormalizationConstant: -1.38857 + logNormalizationConstant: 1.38857 No noise model 0 1 Leaf p(x2) @@ -785,7 +785,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.1479 ] mean: 1 elements x2: -1.0098 - logNormalizationConstant: -1.38857 + logNormalizationConstant: 1.38857 No noise model 1 Choice(m0) @@ -794,7 +794,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0504 ] mean: 1 elements x2: -1.0001 - logNormalizationConstant: -1.38857 + logNormalizationConstant: 1.38857 No noise model 1 1 Leaf p(x2) @@ -802,7 +802,7 @@ conditional 2: Hybrid P( x2 | m0 m1) d = [ -10.0494 ] mean: 1 elements x2: -1 - logNormalizationConstant: -1.38857 + logNormalizationConstant: 1.38857 No noise model )"; @@ -902,9 +902,8 @@ static HybridNonlinearFactorGraph CreateFactorGraph( // Create HybridNonlinearFactor // We take negative since we want // the underlying scalar to be log(\sqrt(|2πΣ|)) - std::vector factors{ - {f0, model0->logNormalizationConstant()}, - {f1, model1->logNormalizationConstant()}}; + std::vector factors{{f0, model0->errorConstant()}, + {f1, model1->errorConstant()}}; HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); diff --git a/gtsam/linear/tests/testGaussianBayesNet.cpp b/gtsam/linear/tests/testGaussianBayesNet.cpp index a186eb2b2..99453ee4e 100644 --- a/gtsam/linear/tests/testGaussianBayesNet.cpp +++ b/gtsam/linear/tests/testGaussianBayesNet.cpp @@ -76,11 +76,11 @@ TEST(GaussianBayesNet, Evaluate1) { // the normalization constant 1.0/sqrt((2*pi*Sigma).det()). // The covariance matrix inv(Sigma) = R'*R, so the determinant is const double constant = sqrt((invSigma / (2 * M_PI)).determinant()); - EXPECT_DOUBLES_EQUAL(-log(constant), + EXPECT_DOUBLES_EQUAL(log(constant), smallBayesNet.at(0)->logNormalizationConstant() + smallBayesNet.at(1)->logNormalizationConstant(), 1e-9); - EXPECT_DOUBLES_EQUAL(-log(constant), smallBayesNet.logNormalizationConstant(), + EXPECT_DOUBLES_EQUAL(log(constant), smallBayesNet.logNormalizationConstant(), 1e-9); const double actual = smallBayesNet.evaluate(mean); EXPECT_DOUBLES_EQUAL(constant, actual, 1e-9); diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index b03e0a060..26086104c 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -493,7 +493,7 @@ TEST(GaussianConditional, LogNormalizationConstant) { x.insert(X(0), Vector3::Zero()); Matrix3 Sigma = I_3x3 * sigma * sigma; double expectedLogNormalizationConstant = - -log(1 / sqrt((2 * M_PI * Sigma).determinant())); + log(1 / sqrt((2 * M_PI * Sigma).determinant())); EXPECT_DOUBLES_EQUAL(expectedLogNormalizationConstant, conditional.logNormalizationConstant(), 1e-9); @@ -517,7 +517,7 @@ TEST(GaussianConditional, Print) { " d = [ 20 40 ]\n" " mean: 1 elements\n" " x0: 20 40\n" - " logNormalizationConstant: 4.0351\n" + " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected, conditional, "GaussianConditional")); @@ -532,7 +532,7 @@ TEST(GaussianConditional, Print) { " S[x1] = [ -1 -2 ]\n" " [ -3 -4 ]\n" " d = [ 20 40 ]\n" - " logNormalizationConstant: 4.0351\n" + " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected1, conditional1, "GaussianConditional")); @@ -548,7 +548,7 @@ TEST(GaussianConditional, Print) { " S[y1] = [ -5 -6 ]\n" " [ -7 -8 ]\n" " d = [ 20 40 ]\n" - " logNormalizationConstant: 4.0351\n" + " logNormalizationConstant: -4.0351\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected2, conditional2, "GaussianConditional")); } diff --git a/gtsam/linear/tests/testGaussianDensity.cpp b/gtsam/linear/tests/testGaussianDensity.cpp index 3226f40ab..e88fd8cc4 100644 --- a/gtsam/linear/tests/testGaussianDensity.cpp +++ b/gtsam/linear/tests/testGaussianDensity.cpp @@ -55,7 +55,7 @@ TEST(GaussianDensity, FromMeanAndStddev) { double expected1 = 0.5 * e.dot(e); EXPECT_DOUBLES_EQUAL(expected1, density.error(values), 1e-9); - double expected2 = -(density.logNormalizationConstant() + 0.5 * e.dot(e)); + double expected2 = -(density.errorConstant() + 0.5 * e.dot(e)); EXPECT_DOUBLES_EQUAL(expected2, density.logProbability(values), 1e-9); } diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index 59ee05d07..5e756a483 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -810,9 +810,9 @@ TEST(NoiseModel, NonDiagonalGaussian) TEST(NoiseModel, LogNormalizationConstant1D) { // Very simple 1D noise model, which we can compute by hand. double sigma = 0.1; - // For expected values, we compute -log(1/sqrt(|2πΣ|)) by hand. - // = 0.5*(log(2π) + log(Σ)) (since it is 1D) - double expected_value = 0.5 * log(2 * M_PI * sigma * sigma); + // For expected values, we compute log(1/sqrt(|2πΣ|)) by hand. + // = -0.5*(log(2π) + log(Σ)) (since it is 1D) + double expected_value = -0.5 * log(2 * M_PI * sigma * sigma); // Gaussian { @@ -839,7 +839,7 @@ TEST(NoiseModel, LogNormalizationConstant1D) { auto noise_model = Unit::Create(1); double actual_value = noise_model->logNormalizationConstant(); double sigma = 1.0; - expected_value = 0.5 * log(2 * M_PI * sigma * sigma); + expected_value = -0.5 * log(2 * M_PI * sigma * sigma); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); } } @@ -850,7 +850,7 @@ TEST(NoiseModel, LogNormalizationConstant3D) { size_t n = 3; // We compute the expected values just like in the LogNormalizationConstant1D // test, but we multiply by 3 due to the determinant. - double expected_value = 0.5 * n * log(2 * M_PI * sigma * sigma); + double expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma); // Gaussian { @@ -879,7 +879,7 @@ TEST(NoiseModel, LogNormalizationConstant3D) { auto noise_model = Unit::Create(3); double actual_value = noise_model->logNormalizationConstant(); double sigma = 1.0; - expected_value = 0.5 * n * log(2 * M_PI * sigma * sigma); + expected_value = -0.5 * n * log(2 * M_PI * sigma * sigma); EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); } } diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py index bb12ff02b..a72e34062 100644 --- a/python/gtsam/tests/test_HybridBayesNet.py +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -90,8 +90,7 @@ class TestHybridBayesNet(GtsamTestCase): self.assertTrue(probability >= 0.0) logProb = conditional.logProbability(values) self.assertAlmostEqual(probability, np.exp(logProb)) - expected = -(conditional.logNormalizationConstant() + \ - conditional.error(values)) + expected = -(conditional.errorConstant() + conditional.error(values)) self.assertAlmostEqual(logProb, expected)