update hybrid code to use -log(k) consistently

release/4.3a0
Varun Agrawal 2024-09-21 05:16:14 -04:00
parent ecbf3d872e
commit ceb9496e7c
6 changed files with 38 additions and 36 deletions

View File

@ -36,7 +36,7 @@ HybridGaussianFactor::FactorValuePairs GetFactorValuePairs(
// Check if conditional is pruned // Check if conditional is pruned
if (conditional) { if (conditional) {
// Assign log(\sqrt(|2πΣ|)) = -log(1 / sqrt(|2πΣ|)) // Assign log(\sqrt(|2πΣ|)) = -log(1 / sqrt(|2πΣ|))
value = -conditional->logNormalizationConstant(); value = conditional->logNormalizationConstant();
} }
return {std::dynamic_pointer_cast<GaussianFactor>(conditional), value}; return {std::dynamic_pointer_cast<GaussianFactor>(conditional), value};
}; };
@ -51,14 +51,14 @@ HybridGaussianConditional::HybridGaussianConditional(
discreteParents, GetFactorValuePairs(conditionals)), discreteParents, GetFactorValuePairs(conditionals)),
BaseConditional(continuousFrontals.size()), BaseConditional(continuousFrontals.size()),
conditionals_(conditionals) { conditionals_(conditionals) {
// Calculate logConstant_ as the minimum of the log normalizers of the // Calculate logConstant_ as the minimum of the negative-log normalizers of
// conditionals, by visiting the decision tree: // the conditionals, by visiting the decision tree:
logConstant_ = std::numeric_limits<double>::infinity(); logConstant_ = std::numeric_limits<double>::infinity();
conditionals_.visit( conditionals_.visit(
[this](const GaussianConditional::shared_ptr &conditional) { [this](const GaussianConditional::shared_ptr &conditional) {
if (conditional) { if (conditional) {
this->logConstant_ = std::min( this->logConstant_ = std::min(
this->logConstant_, -conditional->logNormalizationConstant()); this->logConstant_, conditional->logNormalizationConstant());
} }
}); });
} }
@ -85,7 +85,7 @@ GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree()
// First check if conditional has not been pruned // First check if conditional has not been pruned
if (gc) { if (gc) {
const double Cgm_Kgcm = const double Cgm_Kgcm =
-this->logConstant_ - gc->logNormalizationConstant(); gc->logNormalizationConstant() - this->logConstant_;
// If there is a difference in the covariances, we need to account for // If there is a difference in the covariances, we need to account for
// that since the error is dependent on the mode. // that since the error is dependent on the mode.
if (Cgm_Kgcm > 0.0) { if (Cgm_Kgcm > 0.0) {
@ -216,7 +216,7 @@ std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood(
-> GaussianFactorValuePair { -> GaussianFactorValuePair {
const auto likelihood_m = conditional->likelihood(given); const auto likelihood_m = conditional->likelihood(given);
const double Cgm_Kgcm = const double Cgm_Kgcm =
-logConstant_ - conditional->logNormalizationConstant(); conditional->logNormalizationConstant() - logConstant_;
if (Cgm_Kgcm == 0.0) { if (Cgm_Kgcm == 0.0) {
return {likelihood_m, 0.0}; return {likelihood_m, 0.0};
} else { } else {

View File

@ -152,7 +152,7 @@ class GTSAM_EXPORT HybridGaussianConditional
/// The log normalization constant is max of the the individual /// The log normalization constant is max of the the individual
/// log-normalization constants. /// log-normalization constants.
double logNormalizationConstant() const override { return -logConstant_; } double logNormalizationConstant() const override { return logConstant_; }
/** /**
* Create a likelihood factor for a hybrid Gaussian conditional, * Create a likelihood factor for a hybrid Gaussian conditional,

View File

@ -329,9 +329,9 @@ static std::shared_ptr<Factor> createDiscreteFactor(
// Logspace version of: // Logspace version of:
// exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); // exp(-factor->error(kEmpty)) / conditional->normalizationConstant();
// We take negative of the logNormalizationConstant `log(k)` // logNormalizationConstant gives `-log(k)`
// to get `log(1/k) = log(\sqrt{|2πΣ|})`. // which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`.
return -factor->error(kEmpty) - conditional->logNormalizationConstant(); return -factor->error(kEmpty) + conditional->logNormalizationConstant();
}; };
AlgebraicDecisionTree<Key> logProbabilities( AlgebraicDecisionTree<Key> logProbabilities(
@ -355,8 +355,9 @@ static std::shared_ptr<Factor> createHybridGaussianFactor(
auto hf = std::dynamic_pointer_cast<HessianFactor>(factor); auto hf = std::dynamic_pointer_cast<HessianFactor>(factor);
if (!hf) throw std::runtime_error("Expected HessianFactor!"); if (!hf) throw std::runtime_error("Expected HessianFactor!");
// Add 2.0 term since the constant term will be premultiplied by 0.5 // Add 2.0 term since the constant term will be premultiplied by 0.5
// as per the Hessian definition // as per the Hessian definition,
hf->constantTerm() += 2.0 * conditional->logNormalizationConstant(); // and negative since we want log(k)
hf->constantTerm() += -2.0 * conditional->logNormalizationConstant();
} }
return {factor, 0.0}; return {factor, 0.0};
}; };

View File

@ -180,12 +180,13 @@ TEST(HybridGaussianConditional, Error2) {
// Check result. // Check result.
DiscreteKeys discrete_keys{mode}; DiscreteKeys discrete_keys{mode};
double logNormalizer0 = -conditionals[0]->logNormalizationConstant(); double logNormalizer0 = conditionals[0]->logNormalizationConstant();
double logNormalizer1 = -conditionals[1]->logNormalizationConstant(); double logNormalizer1 = conditionals[1]->logNormalizationConstant();
double minLogNormalizer = std::min(logNormalizer0, logNormalizer1); double minLogNormalizer = std::min(logNormalizer0, logNormalizer1);
// Expected error is e(X) + log(|2πΣ|). // Expected error is e(X) + log(sqrt(|2πΣ|)).
// We normalize log(|2πΣ|) with min(logNormalizers) so it is non-negative. // We normalize log(sqrt(|2πΣ|)) with min(logNormalizers)
// so it is non-negative.
std::vector<double> leaves = { std::vector<double> leaves = {
conditionals[0]->error(vv) + logNormalizer0 - minLogNormalizer, conditionals[0]->error(vv) + logNormalizer0 - minLogNormalizer,
conditionals[1]->error(vv) + logNormalizer1 - minLogNormalizer}; conditionals[1]->error(vv) + logNormalizer1 - minLogNormalizer};
@ -196,7 +197,7 @@ TEST(HybridGaussianConditional, Error2) {
// Check for non-tree version. // Check for non-tree version.
for (size_t mode : {0, 1}) { for (size_t mode : {0, 1}) {
const HybridValues hv{vv, {{M(0), mode}}}; const HybridValues hv{vv, {{M(0), mode}}};
EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) - EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) +
conditionals[mode]->logNormalizationConstant() - conditionals[mode]->logNormalizationConstant() -
minLogNormalizer, minLogNormalizer,
hybrid_conditional.error(hv), 1e-8); hybrid_conditional.error(hv), 1e-8);
@ -230,8 +231,8 @@ TEST(HybridGaussianConditional, Likelihood2) {
CHECK(jf1->rows() == 2); CHECK(jf1->rows() == 2);
// Check that the constant C1 is properly encoded in the JacobianFactor. // Check that the constant C1 is properly encoded in the JacobianFactor.
const double C1 = hybrid_conditional.logNormalizationConstant() - const double C1 = conditionals[1]->logNormalizationConstant() -
conditionals[1]->logNormalizationConstant(); hybrid_conditional.logNormalizationConstant();
const double c1 = std::sqrt(2.0 * C1); const double c1 = std::sqrt(2.0 * C1);
Vector expected_unwhitened(2); Vector expected_unwhitened(2);
expected_unwhitened << 4.9 - 5.0, -c1; expected_unwhitened << 4.9 - 5.0, -c1;

View File

@ -773,8 +773,8 @@ static HybridGaussianFactorGraph CreateFactorGraph(
// We take negative since we want // We take negative since we want
// the underlying scalar to be log(\sqrt(|2πΣ|)) // the underlying scalar to be log(\sqrt(|2πΣ|))
std::vector<GaussianFactorValuePair> factors{ std::vector<GaussianFactorValuePair> factors{
{f0, -model0->logNormalizationConstant()}, {f0, model0->logNormalizationConstant()},
{f1, -model1->logNormalizationConstant()}}; {f1, model1->logNormalizationConstant()}};
HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors); HybridGaussianFactor motionFactor({X(0), X(1)}, m1, factors);
HybridGaussianFactorGraph hfg; HybridGaussianFactorGraph hfg;

View File

@ -714,26 +714,26 @@ factor 6: P( m1 | m0 ):
size: 3 size: 3
conditional 0: Hybrid P( x0 | x1 m0) conditional 0: Hybrid P( x0 | x1 m0)
Discrete Keys = (m0, 2), Discrete Keys = (m0, 2),
logNormalizationConstant: 1.38862 logNormalizationConstant: -1.38862
Choice(m0) Choice(m0)
0 Leaf p(x0 | x1) 0 Leaf p(x0 | x1)
R = [ 10.0499 ] R = [ 10.0499 ]
S[x1] = [ -0.0995037 ] S[x1] = [ -0.0995037 ]
d = [ -9.85087 ] d = [ -9.85087 ]
logNormalizationConstant: 1.38862 logNormalizationConstant: -1.38862
No noise model No noise model
1 Leaf p(x0 | x1) 1 Leaf p(x0 | x1)
R = [ 10.0499 ] R = [ 10.0499 ]
S[x1] = [ -0.0995037 ] S[x1] = [ -0.0995037 ]
d = [ -9.95037 ] d = [ -9.95037 ]
logNormalizationConstant: 1.38862 logNormalizationConstant: -1.38862
No noise model No noise model
conditional 1: Hybrid P( x1 | x2 m0 m1) conditional 1: Hybrid P( x1 | x2 m0 m1)
Discrete Keys = (m0, 2), (m1, 2), Discrete Keys = (m0, 2), (m1, 2),
logNormalizationConstant: 1.3935 logNormalizationConstant: -1.3935
Choice(m1) Choice(m1)
0 Choice(m0) 0 Choice(m0)
@ -741,14 +741,14 @@ conditional 1: Hybrid P( x1 | x2 m0 m1)
R = [ 10.099 ] R = [ 10.099 ]
S[x2] = [ -0.0990196 ] S[x2] = [ -0.0990196 ]
d = [ -9.99901 ] d = [ -9.99901 ]
logNormalizationConstant: 1.3935 logNormalizationConstant: -1.3935
No noise model No noise model
0 1 Leaf p(x1 | x2) 0 1 Leaf p(x1 | x2)
R = [ 10.099 ] R = [ 10.099 ]
S[x2] = [ -0.0990196 ] S[x2] = [ -0.0990196 ]
d = [ -9.90098 ] d = [ -9.90098 ]
logNormalizationConstant: 1.3935 logNormalizationConstant: -1.3935
No noise model No noise model
1 Choice(m0) 1 Choice(m0)
@ -756,19 +756,19 @@ conditional 1: Hybrid P( x1 | x2 m0 m1)
R = [ 10.099 ] R = [ 10.099 ]
S[x2] = [ -0.0990196 ] S[x2] = [ -0.0990196 ]
d = [ -10.098 ] d = [ -10.098 ]
logNormalizationConstant: 1.3935 logNormalizationConstant: -1.3935
No noise model No noise model
1 1 Leaf p(x1 | x2) 1 1 Leaf p(x1 | x2)
R = [ 10.099 ] R = [ 10.099 ]
S[x2] = [ -0.0990196 ] S[x2] = [ -0.0990196 ]
d = [ -10 ] d = [ -10 ]
logNormalizationConstant: 1.3935 logNormalizationConstant: -1.3935
No noise model No noise model
conditional 2: Hybrid P( x2 | m0 m1) conditional 2: Hybrid P( x2 | m0 m1)
Discrete Keys = (m0, 2), (m1, 2), Discrete Keys = (m0, 2), (m1, 2),
logNormalizationConstant: 1.38857 logNormalizationConstant: -1.38857
Choice(m1) Choice(m1)
0 Choice(m0) 0 Choice(m0)
@ -777,7 +777,7 @@ conditional 2: Hybrid P( x2 | m0 m1)
d = [ -10.1489 ] d = [ -10.1489 ]
mean: 1 elements mean: 1 elements
x2: -1.0099 x2: -1.0099
logNormalizationConstant: 1.38857 logNormalizationConstant: -1.38857
No noise model No noise model
0 1 Leaf p(x2) 0 1 Leaf p(x2)
@ -785,7 +785,7 @@ conditional 2: Hybrid P( x2 | m0 m1)
d = [ -10.1479 ] d = [ -10.1479 ]
mean: 1 elements mean: 1 elements
x2: -1.0098 x2: -1.0098
logNormalizationConstant: 1.38857 logNormalizationConstant: -1.38857
No noise model No noise model
1 Choice(m0) 1 Choice(m0)
@ -794,7 +794,7 @@ conditional 2: Hybrid P( x2 | m0 m1)
d = [ -10.0504 ] d = [ -10.0504 ]
mean: 1 elements mean: 1 elements
x2: -1.0001 x2: -1.0001
logNormalizationConstant: 1.38857 logNormalizationConstant: -1.38857
No noise model No noise model
1 1 Leaf p(x2) 1 1 Leaf p(x2)
@ -802,7 +802,7 @@ conditional 2: Hybrid P( x2 | m0 m1)
d = [ -10.0494 ] d = [ -10.0494 ]
mean: 1 elements mean: 1 elements
x2: -1 x2: -1
logNormalizationConstant: 1.38857 logNormalizationConstant: -1.38857
No noise model No noise model
)"; )";
@ -903,8 +903,8 @@ static HybridNonlinearFactorGraph CreateFactorGraph(
// We take negative since we want // We take negative since we want
// the underlying scalar to be log(\sqrt(|2πΣ|)) // the underlying scalar to be log(\sqrt(|2πΣ|))
std::vector<NonlinearFactorValuePair> factors{ std::vector<NonlinearFactorValuePair> factors{
{f0, -model0->logNormalizationConstant()}, {f0, model0->logNormalizationConstant()},
{f1, -model1->logNormalizationConstant()}}; {f1, model1->logNormalizationConstant()}};
HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors); HybridNonlinearFactor mixtureFactor({X(0), X(1)}, m1, factors);