Merge pull request #1801 from borglab/gaussian-bayes-net-improvements
commit
54711929fa
|
@ -680,12 +680,14 @@ conditional 0: Hybrid P( x0 | x1 m0)
|
|||
R = [ 10.0499 ]
|
||||
S[x1] = [ -0.0995037 ]
|
||||
d = [ -9.85087 ]
|
||||
logNormalizationConstant: 1.38862
|
||||
No noise model
|
||||
|
||||
1 Leaf p(x0 | x1)
|
||||
R = [ 10.0499 ]
|
||||
S[x1] = [ -0.0995037 ]
|
||||
d = [ -9.95037 ]
|
||||
logNormalizationConstant: 1.38862
|
||||
No noise model
|
||||
|
||||
conditional 1: Hybrid P( x1 | x2 m0 m1)
|
||||
|
@ -696,12 +698,14 @@ conditional 1: Hybrid P( x1 | x2 m0 m1)
|
|||
R = [ 10.099 ]
|
||||
S[x2] = [ -0.0990196 ]
|
||||
d = [ -9.99901 ]
|
||||
logNormalizationConstant: 1.3935
|
||||
No noise model
|
||||
|
||||
0 1 Leaf p(x1 | x2)
|
||||
R = [ 10.099 ]
|
||||
S[x2] = [ -0.0990196 ]
|
||||
d = [ -9.90098 ]
|
||||
logNormalizationConstant: 1.3935
|
||||
No noise model
|
||||
|
||||
1 Choice(m0)
|
||||
|
@ -709,12 +713,14 @@ conditional 1: Hybrid P( x1 | x2 m0 m1)
|
|||
R = [ 10.099 ]
|
||||
S[x2] = [ -0.0990196 ]
|
||||
d = [ -10.098 ]
|
||||
logNormalizationConstant: 1.3935
|
||||
No noise model
|
||||
|
||||
1 1 Leaf p(x1 | x2)
|
||||
R = [ 10.099 ]
|
||||
S[x2] = [ -0.0990196 ]
|
||||
d = [ -10 ]
|
||||
logNormalizationConstant: 1.3935
|
||||
No noise model
|
||||
|
||||
conditional 2: Hybrid P( x2 | m0 m1)
|
||||
|
@ -726,6 +732,7 @@ conditional 2: Hybrid P( x2 | m0 m1)
|
|||
d = [ -10.1489 ]
|
||||
mean: 1 elements
|
||||
x2: -1.0099
|
||||
logNormalizationConstant: 1.38857
|
||||
No noise model
|
||||
|
||||
0 1 Leaf p(x2)
|
||||
|
@ -733,6 +740,7 @@ conditional 2: Hybrid P( x2 | m0 m1)
|
|||
d = [ -10.1479 ]
|
||||
mean: 1 elements
|
||||
x2: -1.0098
|
||||
logNormalizationConstant: 1.38857
|
||||
No noise model
|
||||
|
||||
1 Choice(m0)
|
||||
|
@ -741,6 +749,7 @@ conditional 2: Hybrid P( x2 | m0 m1)
|
|||
d = [ -10.0504 ]
|
||||
mean: 1 elements
|
||||
x2: -1.0001
|
||||
logNormalizationConstant: 1.38857
|
||||
No noise model
|
||||
|
||||
1 1 Leaf p(x2)
|
||||
|
@ -748,6 +757,7 @@ conditional 2: Hybrid P( x2 | m0 m1)
|
|||
d = [ -10.0494 ]
|
||||
mean: 1 elements
|
||||
x2: -1
|
||||
logNormalizationConstant: 1.38857
|
||||
No noise model
|
||||
|
||||
)";
|
||||
|
|
|
@ -243,5 +243,25 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
double GaussianBayesNet::logNormalizationConstant() const {
|
||||
/*
|
||||
normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma))
|
||||
logConstant = -0.5 * n*log(2*pi) - 0.5 * log det(Sigma)
|
||||
|
||||
log det(Sigma)) = -2.0 * logDeterminant()
|
||||
thus, logConstant = -0.5*n*log(2*pi) + logDeterminant()
|
||||
|
||||
BayesNet logConstant = sum(-0.5*n_i*log(2*pi) + logDeterminant_i())
|
||||
= sum(-0.5*n_i*log(2*pi)) + sum(logDeterminant_i())
|
||||
= sum(-0.5*n_i*log(2*pi)) + bn->logDeterminant()
|
||||
*/
|
||||
double logNormConst = 0.0;
|
||||
for (const sharedConditional& cg : *this) {
|
||||
logNormConst += cg->logNormalizationConstant();
|
||||
}
|
||||
return logNormConst;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
@ -82,6 +82,12 @@ namespace gtsam {
|
|||
/** Check equality */
|
||||
bool equals(const This& bn, double tol = 1e-9) const;
|
||||
|
||||
/// Check exact equality.
|
||||
friend bool operator==(const GaussianBayesNet& lhs,
|
||||
const GaussianBayesNet& rhs) {
|
||||
return lhs.isEqual(rhs);
|
||||
}
|
||||
|
||||
/// print graph
|
||||
void print(
|
||||
const std::string& s = "",
|
||||
|
@ -228,6 +234,14 @@ namespace gtsam {
|
|||
* @return The determinant */
|
||||
double logDeterminant() const;
|
||||
|
||||
/**
|
||||
* @brief Get the log of the normalization constant corresponding to the
|
||||
* joint Gaussian density represented by this Bayes net.
|
||||
*
|
||||
* @return double
|
||||
*/
|
||||
double logNormalizationConstant() const;
|
||||
|
||||
/**
|
||||
* Backsubstitute with a different RHS vector than the one stored in this BayesNet.
|
||||
* gy=inv(R*inv(Sigma))*gx
|
||||
|
|
|
@ -121,6 +121,7 @@ namespace gtsam {
|
|||
const auto mean = solve({}); // solve for mean.
|
||||
mean.print(" mean", formatter);
|
||||
}
|
||||
cout << " logNormalizationConstant: " << logNormalizationConstant() << std::endl;
|
||||
if (model_)
|
||||
model_->print(" Noise model: ");
|
||||
else
|
||||
|
@ -184,8 +185,13 @@ namespace gtsam {
|
|||
double GaussianConditional::logNormalizationConstant() const {
|
||||
constexpr double log2pi = 1.8378770664093454835606594728112;
|
||||
size_t n = d().size();
|
||||
// log det(Sigma)) = - 2.0 * logDeterminant()
|
||||
return - 0.5 * n * log2pi + logDeterminant();
|
||||
// Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1}
|
||||
// log det(Sigma) = -log(det(R'R)) = -2*log(det(R))
|
||||
// Hence, log det(Sigma)) = -2.0 * logDeterminant()
|
||||
// which gives log = -0.5*n*log(2*pi) - 0.5*(-2.0 * logDeterminant())
|
||||
// = -0.5*n*log(2*pi) + (0.5*2.0 * logDeterminant())
|
||||
// = -0.5*n*log(2*pi) + logDeterminant()
|
||||
return -0.5 * n * log2pi + logDeterminant();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -263,6 +263,11 @@ namespace gtsam {
|
|||
/** equals required by Testable for unit testing */
|
||||
bool equals(const VectorValues& x, double tol = 1e-9) const;
|
||||
|
||||
/// Check equality.
|
||||
friend bool operator==(const VectorValues& lhs, const VectorValues& rhs) {
|
||||
return lhs.equals(rhs);
|
||||
}
|
||||
|
||||
/// @{
|
||||
/// @name Advanced Interface
|
||||
/// @{
|
||||
|
|
|
@ -510,12 +510,17 @@ virtual class GaussianConditional : gtsam::JacobianFactor {
|
|||
GaussianConditional(size_t key, gtsam::Vector d, gtsam::Matrix R, size_t name1, gtsam::Matrix S,
|
||||
size_t name2, gtsam::Matrix T,
|
||||
const gtsam::noiseModel::Diagonal* sigmas);
|
||||
GaussianConditional(const vector<std::pair<gtsam::Key, gtsam::Matrix>> terms,
|
||||
size_t nrFrontals, gtsam::Vector d,
|
||||
const gtsam::noiseModel::Diagonal* sigmas);
|
||||
|
||||
// Constructors with no noise model
|
||||
GaussianConditional(size_t key, gtsam::Vector d, gtsam::Matrix R);
|
||||
GaussianConditional(size_t key, gtsam::Vector d, gtsam::Matrix R, size_t name1, gtsam::Matrix S);
|
||||
GaussianConditional(size_t key, gtsam::Vector d, gtsam::Matrix R, size_t name1, gtsam::Matrix S,
|
||||
size_t name2, gtsam::Matrix T);
|
||||
GaussianConditional(const gtsam::KeyVector& keys, size_t nrFrontals,
|
||||
const gtsam::VerticalBlockMatrix& augmentedMatrix);
|
||||
|
||||
// Named constructors
|
||||
static gtsam::GaussianConditional FromMeanAndStddev(gtsam::Key key,
|
||||
|
|
|
@ -80,6 +80,8 @@ TEST(GaussianBayesNet, Evaluate1) {
|
|||
smallBayesNet.at(0)->logNormalizationConstant() +
|
||||
smallBayesNet.at(1)->logNormalizationConstant(),
|
||||
1e-9);
|
||||
EXPECT_DOUBLES_EQUAL(log(constant), smallBayesNet.logNormalizationConstant(),
|
||||
1e-9);
|
||||
const double actual = smallBayesNet.evaluate(mean);
|
||||
EXPECT_DOUBLES_EQUAL(constant, actual, 1e-9);
|
||||
}
|
||||
|
|
|
@ -516,6 +516,7 @@ TEST(GaussianConditional, Print) {
|
|||
" d = [ 20 40 ]\n"
|
||||
" mean: 1 elements\n"
|
||||
" x0: 20 40\n"
|
||||
" logNormalizationConstant: -4.0351\n"
|
||||
"isotropic dim=2 sigma=3\n";
|
||||
EXPECT(assert_print_equal(expected, conditional, "GaussianConditional"));
|
||||
|
||||
|
@ -530,6 +531,7 @@ TEST(GaussianConditional, Print) {
|
|||
" S[x1] = [ -1 -2 ]\n"
|
||||
" [ -3 -4 ]\n"
|
||||
" d = [ 20 40 ]\n"
|
||||
" logNormalizationConstant: -4.0351\n"
|
||||
"isotropic dim=2 sigma=3\n";
|
||||
EXPECT(assert_print_equal(expected1, conditional1, "GaussianConditional"));
|
||||
|
||||
|
@ -545,6 +547,7 @@ TEST(GaussianConditional, Print) {
|
|||
" S[y1] = [ -5 -6 ]\n"
|
||||
" [ -7 -8 ]\n"
|
||||
" d = [ 20 40 ]\n"
|
||||
" logNormalizationConstant: -4.0351\n"
|
||||
"isotropic dim=2 sigma=3\n";
|
||||
EXPECT(assert_print_equal(expected2, conditional2, "GaussianConditional"));
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue