logNormalizationConstant() for GaussianBayesNet
parent
93c824c482
commit
b20d33d79e
|
@ -243,5 +243,25 @@ namespace gtsam {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
double GaussianBayesNet::logNormalizationConstant() const {
|
||||||
|
/*
|
||||||
|
normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma))
|
||||||
|
logConstant = -0.5 * n*log(2*pi) - 0.5 * log det(Sigma)
|
||||||
|
|
||||||
|
log det(Sigma)) = -2.0 * logDeterminant()
|
||||||
|
thus, logConstant = -0.5*n*log(2*pi) + logDeterminant()
|
||||||
|
|
||||||
|
BayesNet logConstant = sum(-0.5*n_i*log(2*pi) + logDeterminant_i())
|
||||||
|
= sum(-0.5*n_i*log(2*pi)) + sum(logDeterminant_i())
|
||||||
|
= sum(-0.5*n_i*log(2*pi)) + bn->logDeterminant()
|
||||||
|
*/
|
||||||
|
double logNormConst = 0.0;
|
||||||
|
for (const sharedConditional& cg : *this) {
|
||||||
|
logNormConst += cg->logNormalizationConstant();
|
||||||
|
}
|
||||||
|
return logNormConst;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
|
@ -234,6 +234,14 @@ namespace gtsam {
|
||||||
* @return The determinant */
|
* @return The determinant */
|
||||||
double logDeterminant() const;
|
double logDeterminant() const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Get the log of the normalization constant corresponding to the
|
||||||
|
* joint Gaussian density represented by this Bayes net.
|
||||||
|
*
|
||||||
|
* @return double
|
||||||
|
*/
|
||||||
|
double logNormalizationConstant() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Backsubstitute with a different RHS vector than the one stored in this BayesNet.
|
* Backsubstitute with a different RHS vector than the one stored in this BayesNet.
|
||||||
* gy=inv(R*inv(Sigma))*gx
|
* gy=inv(R*inv(Sigma))*gx
|
||||||
|
|
Loading…
Reference in New Issue