leaner and meaner

release/4.3a0
Frank Dellaert 2012-01-20 05:24:02 +00:00
parent 4aa4b8b938
commit 7687b91a32
2 changed files with 34 additions and 51 deletions

View File

@ -45,7 +45,21 @@ namespace gtsam {
/* ************************************************************************* */ /* ************************************************************************* */
KalmanFilter::KalmanFilter(size_t n, GaussianConditional* density) : KalmanFilter::KalmanFilter(size_t n, GaussianConditional* density) :
n_(n), I_(eye(n_, n_)),density_(density) { n_(n), I_(eye(n_, n_)), density_(density) {
}
/* ************************************************************************* */
KalmanFilter KalmanFilter::add(GaussianFactor* newFactor) {
// Create a factor graph
GaussianFactorGraph factorGraph;
// push back previous solution and new factor
factorGraph.push_back(density_->toFactor());
factorGraph.push_back(GaussianFactor::shared_ptr(newFactor));
// Eliminate graph in order x0, x1, to get Bayes net P(x0|x1)P(x1)
return KalmanFilter(n_, solve(factorGraph));
} }
/* ************************************************************************* */ /* ************************************************************************* */
@ -65,7 +79,7 @@ namespace gtsam {
// Create a factor graph f(x0), eliminate it into P(x0) // Create a factor graph f(x0), eliminate it into P(x0)
GaussianFactorGraph factorGraph; GaussianFactorGraph factorGraph;
// 0.5*(x-x0)'*inv(Sigma)*(x-x0) // 0.5*(x-x0)'*inv(Sigma)*(x-x0)
HessianFactor::shared_ptr factor(new HessianFactor(0, x, P0)); HessianFactor::shared_ptr factor(new HessianFactor(0, x, P0));
factorGraph.push_back(factor); factorGraph.push_back(factor);
density_.reset(solve(factorGraph)); density_.reset(solve(factorGraph));
} }
@ -91,27 +105,17 @@ namespace gtsam {
} }
/* ************************************************************************* */ /* ************************************************************************* */
KalmanFilter KalmanFilter::predict(const Matrix& F, const Matrix& B, const Vector& u, KalmanFilter KalmanFilter::predict(const Matrix& F, const Matrix& B,
const SharedDiagonal& model) { const Vector& u, const SharedDiagonal& model) {
// We will create a small factor graph f1-(x0)-f2-(x1)
// where factor f1 is just the prior from time t0, P(x0)
// and factor f2 is from the motion model
GaussianFactorGraph factorGraph;
// push back f1
factorGraph.push_back(density_->toFactor());
// The factor related to the motion model is defined as // The factor related to the motion model is defined as
// f2(x_{t},x_{t+1}) = (F*x_{t} + B*u - x_{t+1}) * Q^-1 * (F*x_{t} + B*u - x_{t+1})^T // f2(x_{t},x_{t+1}) = (F*x_{t} + B*u - x_{t+1}) * Q^-1 * (F*x_{t} + B*u - x_{t+1})^T
factorGraph.add(0, -F, 1, I_, B * u, model); return add(new JacobianFactor(0, -F, 1, I_, B * u, model));
// Eliminate graph in order x0, x1, to get Bayes net P(x0|x1)P(x1)
return KalmanFilter(n_,solve(factorGraph));
} }
/* ************************************************************************* */ /* ************************************************************************* */
KalmanFilter KalmanFilter::predictQ(const Matrix& F, const Matrix& B, const Vector& u, KalmanFilter KalmanFilter::predictQ(const Matrix& F, const Matrix& B,
const Matrix& Q) { const Vector& u, const Matrix& Q) {
#ifndef NDEBUG #ifndef NDEBUG
int n = F.cols(); int n = F.cols();
@ -122,57 +126,32 @@ namespace gtsam {
assert(Q.cols() == n); assert(Q.cols() == n);
#endif #endif
// Same scheme as in predict:
GaussianFactorGraph factorGraph;
factorGraph.push_back(density_->toFactor());
// The factor related to the motion model is defined as // The factor related to the motion model is defined as
// f2(x_{t},x_{t+1}) = (F*x_{t} + B*u - x_{t+1}) * Q^-1 * (F*x_{t} + B*u - x_{t+1})^T // f2(x_{t},x_{t+1}) = (F*x_{t} + B*u - x_{t+1}) * Q^-1 * (F*x_{t} + B*u - x_{t+1})^T
// See documentation in HessianFactor, we have A1 = -F, A2 = I_, b = B*u: // See documentation in HessianFactor, we have A1 = -F, A2 = I_, b = B*u:
// TODO: starts to seem more elaborate than straight-up KF equations? // TODO: starts to seem more elaborate than straight-up KF equations?
Matrix M = inverse(Q), Ft = trans(F); Matrix M = inverse(Q), Ft = trans(F);
Matrix G12 = -Ft*M, G11 = -G12*F, G22 = M; Matrix G12 = -Ft * M, G11 = -G12 * F, G22 = M;
Vector b = B*u, g2 = M*b, g1 = -Ft*g2; Vector b = B * u, g2 = M * b, g1 = -Ft * g2;
double f = dot(b,g2); double f = dot(b, g2);
HessianFactor::shared_ptr factor(new HessianFactor(0, 1, G11, G12, g1, G22, g2, f)); return add(new HessianFactor(0, 1, G11, G12, g1, G22, g2, f));
factorGraph.push_back(factor);
// Eliminate graph in order x0, x1, to get Bayes net P(x0|x1)P(x1)
return KalmanFilter(n_,solve(factorGraph));
} }
/* ************************************************************************* */ /* ************************************************************************* */
KalmanFilter KalmanFilter::predict2(const Matrix& A0, const Matrix& A1, const Vector& b, KalmanFilter KalmanFilter::predict2(const Matrix& A0, const Matrix& A1,
const SharedDiagonal& model) { const Vector& b, const SharedDiagonal& model) {
// Nhe factor related to the motion model is defined as
// Same scheme as in predict:
GaussianFactorGraph factorGraph;
factorGraph.push_back(density_->toFactor());
// However, now the factor related to the motion model is defined as
// f2(x_{t},x_{t+1}) = |A0*x_{t} + A1*x_{t+1} - b|^2 // f2(x_{t},x_{t+1}) = |A0*x_{t} + A1*x_{t+1} - b|^2
factorGraph.add(0, A0, 1, A1, b, model); return add(new JacobianFactor(0, A0, 1, A1, b, model));
return KalmanFilter(n_,solve(factorGraph));
} }
/* ************************************************************************* */ /* ************************************************************************* */
KalmanFilter KalmanFilter::update(const Matrix& H, const Vector& z, KalmanFilter KalmanFilter::update(const Matrix& H, const Vector& z,
const SharedDiagonal& model) { const SharedDiagonal& model) {
// We will create a small factor graph f1-(x0)-f2
// where factor f1 is the predictive density
// and factor f2 is from the measurement model
GaussianFactorGraph factorGraph;
// push back f1
factorGraph.push_back(density_->toFactor());
// The factor related to the measurements would be defined as // The factor related to the measurements would be defined as
// f2 = (h(x_{t}) - z_{t}) * R^-1 * (h(x_{t}) - z_{t})^T // f2 = (h(x_{t}) - z_{t}) * R^-1 * (h(x_{t}) - z_{t})^T
// = (x_{t} - z_{t}) * R^-1 * (x_{t} - z_{t})^T // = (x_{t} - z_{t}) * R^-1 * (x_{t} - z_{t})^T
factorGraph.add(0, H, z, model); return add(new JacobianFactor(0, H, z, model));
// Eliminate graph in order x0, x1, to get Bayes net P(x0|x1)P(x1)
return KalmanFilter(n_,solve(factorGraph));
} }
/* ************************************************************************* */ /* ************************************************************************* */

View File

@ -20,6 +20,7 @@
* @author Frank Dellaert * @author Frank Dellaert
*/ */
#include <gtsam/linear/GaussianFactor.h>
#include <gtsam/linear/GaussianConditional.h> #include <gtsam/linear/GaussianConditional.h>
namespace gtsam { namespace gtsam {
@ -42,6 +43,9 @@ namespace gtsam {
/// private constructor /// private constructor
KalmanFilter(size_t n, GaussianConditional* density); KalmanFilter(size_t n, GaussianConditional* density);
/// add a new factor and marginalize to new Kalman filter
KalmanFilter add(GaussianFactor* newFactor);
public: public:
/** /**