(in branch) Separate gradient functions for FactorGraph, BayesNet, BayesTree, and BayesTree with ISAM2 Cliques (specialized for using stored gradient contributions in ISAM2 cliques)

release/4.3a0
Richard Roberts 2011-12-13 22:51:28 +00:00
parent 2ef911b7e9
commit 6e1136ba20
9 changed files with 330 additions and 138 deletions

View File

@ -19,6 +19,7 @@
#include <boost/foreach.hpp> #include <boost/foreach.hpp>
#include <boost/tuple/tuple.hpp> #include <boost/tuple/tuple.hpp>
#include <gtsam/linear/JacobianFactor.h>
#include <gtsam/linear/GaussianBayesNet.h> #include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/VectorValues.h> #include <gtsam/linear/VectorValues.h>
@ -213,6 +214,16 @@ double determinant(const GaussianBayesNet& bayesNet) {
return exp(logDet); return exp(logDet);
} }
/* ************************************************************************* */
VectorValues gradient(const GaussianBayesNet& bayesNet, const VectorValues& x0) {
return gradient(FactorGraph<JacobianFactor>(bayesNet), x0);
}
/* ************************************************************************* */
void gradientAtZero(const GaussianBayesNet& bayesNet, VectorValues& g) {
gradientAtZero(FactorGraph<JacobianFactor>(bayesNet), g);
}
/* ************************************************************************* */ /* ************************************************************************* */
} // namespace gtsam } // namespace gtsam

View File

@ -64,19 +64,19 @@ namespace gtsam {
*/ */
boost::shared_ptr<VectorValues> optimize_(const GaussianBayesNet& bn); boost::shared_ptr<VectorValues> optimize_(const GaussianBayesNet& bn);
/* /**
* Backsubstitute * Backsubstitute
* (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas) * (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas)
* @param y is the RHS of the system * @param y is the RHS of the system
*/ */
VectorValues backSubstitute(const GaussianBayesNet& bn, const VectorValues& y); VectorValues backSubstitute(const GaussianBayesNet& bn, const VectorValues& y);
/* /**
* Backsubstitute in place, y starts as RHS and is replaced with solution * Backsubstitute in place, y starts as RHS and is replaced with solution
*/ */
void backSubstituteInPlace(const GaussianBayesNet& bn, VectorValues& y); void backSubstituteInPlace(const GaussianBayesNet& bn, VectorValues& y);
/* /**
* Transpose Backsubstitute * Transpose Backsubstitute
* gy=inv(L)*gx by solving L*gy=gx. * gy=inv(L)*gx by solving L*gy=gx.
* gy=inv(R'*inv(Sigma))*gx * gy=inv(R'*inv(Sigma))*gx
@ -108,4 +108,26 @@ namespace gtsam {
*/ */
double determinant(const GaussianBayesNet& bayesNet); double determinant(const GaussianBayesNet& bayesNet);
/**
* Compute the gradient of the energy function,
* \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$,
* centered around \f$ x = x_0 \f$.
* The gradient is \f$ R^T(Rx-d) \f$.
* @param bayesNet The Gaussian Bayes net $(R,d)$
* @param x0 The center about which to compute the gradient
* @return The gradient as a VectorValues
*/
VectorValues gradient(const GaussianBayesNet& bayesNet, const VectorValues& x0);
/**
* Compute the gradient of the energy function,
* \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$,
* centered around zero.
* The gradient about zero is \f$ -R^T d \f$. See also gradient(const GaussianBayesNet&, const VectorValues&).
* @param bayesNet The Gaussian Bayes net $(R,d)$
* @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues
* @return The gradient as a VectorValues
*/
void gradientAtZero(const GaussianBayesNet& bayesNet, VectorValues& g);
} /// namespace gtsam } /// namespace gtsam

View File

@ -582,17 +582,28 @@ namespace gtsam {
} }
/* ************************************************************************* */ /* ************************************************************************* */
VectorValues gradient(const FactorGraph<JacobianFactor>& fg, const VectorValues& x) { VectorValues gradient(const FactorGraph<JacobianFactor>& fg, const VectorValues& x0) {
// It is crucial for performance to make a zero-valued clone of x // It is crucial for performance to make a zero-valued clone of x
VectorValues g = VectorValues::Zero(x); VectorValues g = VectorValues::Zero(x0);
Errors e; Errors e;
BOOST_FOREACH(const JacobianFactor::shared_ptr& factor, fg) { BOOST_FOREACH(const JacobianFactor::shared_ptr& factor, fg) {
e.push_back(factor->error_vector(x)); e.push_back(factor->error_vector(x0));
} }
transposeMultiplyAdd(fg, 1.0, e, g); transposeMultiplyAdd(fg, 1.0, e, g);
return g; return g;
} }
/* ************************************************************************* */
void gradientAtZero(const FactorGraph<JacobianFactor>& fg, VectorValues& g) {
// Zero-out the gradient
g.setZero();
Errors e;
BOOST_FOREACH(const JacobianFactor::shared_ptr& factor, fg) {
e.push_back(-factor->getb());
}
transposeMultiplyAdd(fg, 1.0, e, g);
}
/* ************************************************************************* */ /* ************************************************************************* */
void residual(const FactorGraph<JacobianFactor>& fg, const VectorValues &x, VectorValues &r) { void residual(const FactorGraph<JacobianFactor>& fg, const VectorValues &x, VectorValues &r) {
Index i = 0 ; Index i = 0 ;

View File

@ -318,11 +318,26 @@ namespace gtsam {
void transposeMultiplyAdd(const FactorGraph<JacobianFactor>& fg, double alpha, const Errors& e, VectorValues& x); void transposeMultiplyAdd(const FactorGraph<JacobianFactor>& fg, double alpha, const Errors& e, VectorValues& x);
/** /**
* Calculate Gradient of A^(A*x-b) for a given config * Compute the gradient of the energy function,
* @param x: VectorValues specifying where to calculate gradient * \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} A x - b \right\Vert^2 \f$,
* @return gradient, as a VectorValues as well * centered around \f$ x = x_0 \f$.
* The gradient is \f$ A^T(Ax-b) \f$.
* @param fg The Jacobian factor graph $(A,b)$
* @param x0 The center about which to compute the gradient
* @return The gradient as a VectorValues
*/ */
VectorValues gradient(const FactorGraph<JacobianFactor>& fg, const VectorValues& x); VectorValues gradient(const FactorGraph<JacobianFactor>& fg, const VectorValues& x0);
/**
* Compute the gradient of the energy function,
* \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} A x - b \right\Vert^2 \f$,
* centered around zero.
* The gradient is \f$ A^T(Ax-b) \f$.
* @param fg The Jacobian factor graph $(A,b)$
* @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues
* @return The gradient as a VectorValues
*/
void gradientAtZero(const FactorGraph<JacobianFactor>& fg, VectorValues& g);
// matrix-vector operations // matrix-vector operations
void residual(const FactorGraph<JacobianFactor>& fg, const VectorValues &x, VectorValues &r); void residual(const FactorGraph<JacobianFactor>& fg, const VectorValues &x, VectorValues &r);

View File

@ -233,14 +233,14 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate(
template<class M> template<class M>
VectorValues DoglegOptimizerImpl::ComputeSteepestDescentPoint(const M& Rd) { VectorValues DoglegOptimizerImpl::ComputeSteepestDescentPoint(const M& Rd) {
// Compute gradient // Compute gradient (call gradientAtZero function, which is defined for various linear systems)
// Convert to JacobianFactor's to use existing gradient function VectorValues grad = *allocateVectorValues(Rd);
FactorGraph<JacobianFactor> jfg(Rd); gradientAtZero(Rd, grad);
VectorValues grad = gradient(jfg, VectorValues::Zero(*allocateVectorValues(Rd)));
double gradientSqNorm = grad.dot(grad); double gradientSqNorm = grad.dot(grad);
// Compute R * g // Compute R * g
Errors Rg = jfg * grad; FactorGraph<JacobianFactor> Rd_jfg(Rd);
Errors Rg = Rd_jfg * grad;
// Compute minimizing step size // Compute minimizing step size
double step = -gradientSqNorm / dot(Rg, Rg); double step = -gradientSqNorm / dot(Rg, Rg);

View File

@ -1,10 +1,22 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/** /**
* @file GaussianISAM2 * @file GaussianISAM2
* @brief Full non-linear ISAM * @brief Full non-linear ISAM
* @author Michael Kaess * @author Michael Kaess
*/ */
#include <gtsam/nonlinear/GaussianISAM2.h> #include <gtsam/inference/FactorGraph.h>
#include <gtsam/linear/JacobianFactor.h>
using namespace std; using namespace std;
using namespace gtsam; using namespace gtsam;
@ -13,137 +25,137 @@ using namespace gtsam;
namespace gtsam { namespace gtsam {
/* ************************************************************************* */ /* ************************************************************************* */
template<class CLIQUE> template<class CLIQUE>
void optimize2(const boost::shared_ptr<CLIQUE>& clique, double threshold, void optimize2(const boost::shared_ptr<CLIQUE>& clique, double threshold,
vector<bool>& changed, const vector<bool>& replaced, Permuted<VectorValues>& delta, int& count) { vector<bool>& changed, const vector<bool>& replaced, Permuted<VectorValues>& delta, int& count) {
// if none of the variables in this clique (frontal and separator!) changed // if none of the variables in this clique (frontal and separator!) changed
// significantly, then by the running intersection property, none of the // significantly, then by the running intersection property, none of the
// cliques in the children need to be processed // cliques in the children need to be processed
// Are any clique variables part of the tree that has been redone? // Are any clique variables part of the tree that has been redone?
bool cliqueReplaced = replaced[(*clique)->frontals().front()]; bool cliqueReplaced = replaced[(*clique)->frontals().front()];
#ifndef NDEBUG #ifndef NDEBUG
BOOST_FOREACH(Index frontal, (*clique)->frontals()) { BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
assert(cliqueReplaced == replaced[frontal]); assert(cliqueReplaced == replaced[frontal]);
} }
#endif #endif
// If not redone, then has one of the separator variables changed significantly? // If not redone, then has one of the separator variables changed significantly?
bool recalculate = cliqueReplaced; bool recalculate = cliqueReplaced;
if(!recalculate) { if(!recalculate) {
BOOST_FOREACH(Index parent, (*clique)->parents()) { BOOST_FOREACH(Index parent, (*clique)->parents()) {
if(changed[parent]) { if(changed[parent]) {
recalculate = true; recalculate = true;
break;
}
}
}
// Solve clique if it was replaced, or if any parents were changed above the
// threshold or themselves replaced.
if(recalculate) {
// Temporary copy of the original values, to check how much they change
vector<Vector> originalValues((*clique)->nrFrontals());
GaussianConditional::const_iterator it;
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
originalValues[it - (*clique)->beginFrontals()] = delta[*it];
}
// Back-substitute
(*clique)->rhs(delta);
(*clique)->solveInPlace(delta);
count += (*clique)->nrFrontals();
// Whether the values changed above a threshold, or always true if the
// clique was replaced.
bool valuesChanged = cliqueReplaced;
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
if(!valuesChanged) {
const Vector& oldValue(originalValues[it - (*clique)->beginFrontals()]);
const SubVector& newValue(delta[*it]);
if((oldValue - newValue).lpNorm<Eigen::Infinity>() >= threshold) {
valuesChanged = true;
break; break;
} }
} else }
break;
} }
// If the values were above the threshold or this clique was replaced // Solve clique if it was replaced, or if any parents were changed above the
if(valuesChanged) { // threshold or themselves replaced.
// Set changed flag for each frontal variable and leave the new values if(recalculate) {
BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
changed[frontal] = true; // Temporary copy of the original values, to check how much they change
} vector<Vector> originalValues((*clique)->nrFrontals());
} else { GaussianConditional::const_iterator it;
// Replace with the old values
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
delta[*it] = originalValues[it - (*clique)->beginFrontals()]; originalValues[it - (*clique)->beginFrontals()] = delta[*it];
}
// Back-substitute
(*clique)->rhs(delta);
(*clique)->solveInPlace(delta);
count += (*clique)->nrFrontals();
// Whether the values changed above a threshold, or always true if the
// clique was replaced.
bool valuesChanged = cliqueReplaced;
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
if(!valuesChanged) {
const Vector& oldValue(originalValues[it - (*clique)->beginFrontals()]);
const SubVector& newValue(delta[*it]);
if((oldValue - newValue).lpNorm<Eigen::Infinity>() >= threshold) {
valuesChanged = true;
break;
}
} else
break;
}
// If the values were above the threshold or this clique was replaced
if(valuesChanged) {
// Set changed flag for each frontal variable and leave the new values
BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
changed[frontal] = true;
}
} else {
// Replace with the old values
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
delta[*it] = originalValues[it - (*clique)->beginFrontals()];
}
}
// Recurse to children
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) {
optimize2(child, threshold, changed, replaced, delta, count);
} }
} }
}
// Recurse to children /* ************************************************************************* */
// fast full version without threshold
template<class CLIQUE>
void optimize2(const boost::shared_ptr<CLIQUE>& clique, VectorValues& delta) {
// parents are assumed to already be solved and available in result
(*clique)->rhs(delta);
(*clique)->solveInPlace(delta);
// Solve chilren recursively
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) { BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) {
optimize2(child, threshold, changed, replaced, delta, count); optimize2(child, delta);
} }
} }
}
/* ************************************************************************* */ ///* ************************************************************************* */
// fast full version without threshold //boost::shared_ptr<VectorValues> optimize2(const GaussianISAM2::sharedClique& root) {
// boost::shared_ptr<VectorValues> delta(new VectorValues());
// set<Symbol> changed;
// // starting from the root, call optimize on each conditional
// optimize2(root, delta);
// return delta;
//}
/* ************************************************************************* */
template<class CLIQUE> template<class CLIQUE>
void optimize2(const boost::shared_ptr<CLIQUE>& clique, VectorValues& delta) { int optimize2(const boost::shared_ptr<CLIQUE>& root, double threshold, const vector<bool>& keys, Permuted<VectorValues>& delta) {
vector<bool> changed(keys.size(), false);
int count = 0;
// starting from the root, call optimize on each conditional
optimize2(root, threshold, changed, keys, delta, count);
return count;
}
// parents are assumed to already be solved and available in result /* ************************************************************************* */
(*clique)->rhs(delta);
(*clique)->solveInPlace(delta);
// Solve chilren recursively
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) {
optimize2(child, delta);
}
}
///* ************************************************************************* */
//boost::shared_ptr<VectorValues> optimize2(const GaussianISAM2::sharedClique& root) {
// boost::shared_ptr<VectorValues> delta(new VectorValues());
// set<Symbol> changed;
// // starting from the root, call optimize on each conditional
// optimize2(root, delta);
// return delta;
//}
/* ************************************************************************* */
template<class CLIQUE> template<class CLIQUE>
int optimize2(const boost::shared_ptr<CLIQUE>& root, double threshold, const vector<bool>& keys, Permuted<VectorValues>& delta) { void nnz_internal(const boost::shared_ptr<CLIQUE>& clique, int& result) {
vector<bool> changed(keys.size(), false); int dimR = (*clique)->dim();
int count = 0; int dimSep = (*clique)->get_S().cols() - dimR;
// starting from the root, call optimize on each conditional result += ((dimR+1)*dimR)/2 + dimSep*dimR;
optimize2(root, threshold, changed, keys, delta, count); // traverse the children
return count; BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) {
} nnz_internal(child, result);
}
}
/* ************************************************************************* */ /* ************************************************************************* */
template<class CLIQUE> template<class CLIQUE>
void nnz_internal(const boost::shared_ptr<CLIQUE>& clique, int& result) { int calculate_nnz(const boost::shared_ptr<CLIQUE>& clique) {
int dimR = (*clique)->dim(); int result = 0;
int dimSep = (*clique)->get_S().cols() - dimR; // starting from the root, add up entries of frontal and conditional matrices of each conditional
result += ((dimR+1)*dimR)/2 + dimSep*dimR; nnz_internal(clique, result);
// traverse the children return result;
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) { }
nnz_internal(child, result);
}
}
/* ************************************************************************* */
template<class CLIQUE>
int calculate_nnz(const boost::shared_ptr<CLIQUE>& clique) {
int result = 0;
// starting from the root, add up entries of frontal and conditional matrices of each conditional
nnz_internal(clique, result);
return result;
}
} /// namespace gtsam } /// namespace gtsam

View File

@ -0,0 +1,61 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file GaussianISAM2
* @brief Full non-linear ISAM
* @author Michael Kaess
*/
#include <gtsam/inference/FactorGraph.h>
#include <gtsam/linear/JacobianFactor.h>
using namespace std;
using namespace gtsam;
#include <boost/bind.hpp>
namespace gtsam {
/* ************************************************************************* */
VectorValues gradient(const BayesTree<GaussianConditional>& bayesTree, const VectorValues& x0) {
return gradient(FactorGraph<JacobianFactor>(bayesTree), x0);
}
/* ************************************************************************* */
void gradientAtZero(const BayesTree<GaussianConditional>& bayesTree, VectorValues& g) {
gradientAtZero(FactorGraph<JacobianFactor>(bayesTree), g);
}
/* ************************************************************************* */
VectorValues gradient(const BayesTree<GaussianConditional, ISAM2Clique<GaussianConditional> >& bayesTree, const VectorValues& x0) {
return gradient(FactorGraph<JacobianFactor>(bayesTree));
}
/* ************************************************************************* */
void gradientAtZero(const BayesTree<GaussianConditional, ISAM2Clique<GaussianConditional> >& bayesTree, VectorValues& g) {
// Zero-out gradient
g.setZero();
// Sum up contributions for each clique
typedef boost::shared_ptr<ISAM2Clique<GaussianConditional> > sharedClique;
BOOST_FOREACH(const sharedClique& clique, bayesTree.nodes()) {
// Loop through variables in each clique, adding contributions
int variablePosition = 0;
for(GaussianConditional::const_iterator jit = clique->conditional()->beginFrontals(); jit != clique->conditional()->endFrontals(); ++jit) {
const int dim = clique->conditional()->dim(jit);
x0[*jit] += clique->gradientContribution().segment(variablePosition, dim);
variablePosition += dim;
}
}
}
}

View File

@ -1,3 +1,14 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/** /**
* @file GaussianISAM * @file GaussianISAM
* @brief Full non-linear ISAM. * @brief Full non-linear ISAM.
@ -41,26 +52,74 @@ public:
} }
}; };
// optimize the BayesTree, starting from the root /** optimize the BayesTree, starting from the root */
template<class CLIQUE> template<class CLIQUE>
void optimize2(const boost::shared_ptr<CLIQUE>& root, VectorValues& delta); void optimize2(const boost::shared_ptr<CLIQUE>& root, VectorValues& delta);
// optimize the BayesTree, starting from the root; "replaced" needs to contain /// optimize the BayesTree, starting from the root; "replaced" needs to contain
// all variables that are contained in the top of the Bayes tree that has been /// all variables that are contained in the top of the Bayes tree that has been
// redone; "delta" is the current solution, an offset from the linearization /// redone; "delta" is the current solution, an offset from the linearization
// point; "threshold" is the maximum change against the PREVIOUS delta for /// point; "threshold" is the maximum change against the PREVIOUS delta for
// non-replaced variables that can be ignored, ie. the old delta entry is kept /// non-replaced variables that can be ignored, ie. the old delta entry is kept
// and recursive backsubstitution might eventually stop if none of the changed /// and recursive backsubstitution might eventually stop if none of the changed
// variables are contained in the subtree. /// variables are contained in the subtree.
// returns the number of variables that were solved for /// returns the number of variables that were solved for
template<class CLIQUE> template<class CLIQUE>
int optimize2(const boost::shared_ptr<CLIQUE>& root, int optimize2(const boost::shared_ptr<CLIQUE>& root,
double threshold, const std::vector<bool>& replaced, Permuted<VectorValues>& delta); double threshold, const std::vector<bool>& replaced, Permuted<VectorValues>& delta);
// calculate the number of non-zero entries for the tree starting at clique (use root for complete matrix) /// calculate the number of non-zero entries for the tree starting at clique (use root for complete matrix)
template<class CLIQUE> template<class CLIQUE>
int calculate_nnz(const boost::shared_ptr<CLIQUE>& clique); int calculate_nnz(const boost::shared_ptr<CLIQUE>& clique);
/**
* Compute the gradient of the energy function,
* \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$,
* centered around \f$ x = x_0 \f$.
* The gradient is \f$ R^T(Rx-d) \f$.
* @param bayesTree The Gaussian Bayes Tree $(R,d)$
* @param x0 The center about which to compute the gradient
* @return The gradient as a VectorValues
*/
VectorValues gradient(const BayesTree<GaussianConditional>& bayesTree, const VectorValues& x0);
/**
* Compute the gradient of the energy function,
* \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$,
* centered around zero.
* The gradient about zero is \f$ -R^T d \f$. See also gradient(const GaussianBayesNet&, const VectorValues&).
* @param bayesTree The Gaussian Bayes Tree $(R,d)$
* @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues
* @return The gradient as a VectorValues
*/
VectorValues gradientAtZero(const BayesTree<GaussianConditional>& bayesTree, VectorValues& g);
/**
* Compute the gradient of the energy function,
* \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$,
* centered around \f$ x = x_0 \f$.
* The gradient is \f$ R^T(Rx-d) \f$.
* This specialized version is used with ISAM2, where each clique stores its
* gradient contribution.
* @param bayesTree The Gaussian Bayes Tree $(R,d)$
* @param x0 The center about which to compute the gradient
* @return The gradient as a VectorValues
*/
VectorValues gradient(const BayesTree<GaussianConditional, ISAM2Clique<GaussianConditional> >& bayesTree, const VectorValues& x0);
/**
* Compute the gradient of the energy function,
* \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$,
* centered around zero.
* The gradient about zero is \f$ -R^T d \f$. See also gradient(const GaussianBayesNet&, const VectorValues&).
* This specialized version is used with ISAM2, where each clique stores its
* gradient contribution.
* @param bayesTree The Gaussian Bayes Tree $(R,d)$
* @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues
* @return The gradient as a VectorValues
*/
VectorValues gradientAtZero(const BayesTree<GaussianConditional, ISAM2Clique<GaussianConditional> >& bayesTree, VectorValues& g);
}/// namespace gtsam }/// namespace gtsam
#include <gtsam/nonlinear/GaussianISAM2-inl.h> #include <gtsam/nonlinear/GaussianISAM2-inl.h>

View File

@ -30,7 +30,8 @@ headers += DoglegOptimizer.h DoglegOptimizer-inl.h
# Nonlinear iSAM(2) # Nonlinear iSAM(2)
headers += NonlinearISAM.h NonlinearISAM-inl.h headers += NonlinearISAM.h NonlinearISAM-inl.h
headers += ISAM2.h ISAM2-inl.h ISAM2-impl-inl.h headers += ISAM2.h ISAM2-inl.h ISAM2-impl-inl.h
headers += GaussianISAM2.h GaussianISAM2-inl.h sources += GaussianISAM2.cpp
headers += GaussianISAM2-inl.h
# Nonlinear constraints # Nonlinear constraints
headers += NonlinearEquality.h headers += NonlinearEquality.h