diff --git a/gtsam/inference/tests/testEliminationTree.cpp b/gtsam/inference/tests/testEliminationTree.cpp index b1efc26c8..0ec8b2266 100644 --- a/gtsam/inference/tests/testEliminationTree.cpp +++ b/gtsam/inference/tests/testEliminationTree.cpp @@ -25,7 +25,8 @@ using namespace gtsam; using namespace std; -struct EliminationTreeTester { +class EliminationTreeTester { +public: // build hardcoded tree static SymbolicEliminationTree::shared_ptr buildHardcodedTree(const SymbolicFactorGraph& fg) { diff --git a/gtsam/linear/GaussianBayesTree-inl.h b/gtsam/linear/GaussianBayesTree-inl.h index 9cb327bec..4972b13bf 100644 --- a/gtsam/linear/GaussianBayesTree-inl.h +++ b/gtsam/linear/GaussianBayesTree-inl.h @@ -25,4 +25,17 @@ namespace gtsam { +/* ************************************************************************* */ +namespace internal { +template +void optimizeInPlace(const typename BAYESTREE::sharedClique& clique, VectorValues& result) { + // parents are assumed to already be solved and available in result + clique->conditional()->solveInPlace(result); + + // starting from the root, call optimize on each conditional + BOOST_FOREACH(const typename BAYESTREE::sharedClique& child, clique->children_) + optimizeInPlace(child, result); +} +} + } diff --git a/gtsam/linear/GaussianBayesTree.cpp b/gtsam/linear/GaussianBayesTree.cpp index 4243657e5..ae76d958e 100644 --- a/gtsam/linear/GaussianBayesTree.cpp +++ b/gtsam/linear/GaussianBayesTree.cpp @@ -23,22 +23,15 @@ namespace gtsam { /* ************************************************************************* */ -namespace internal { -void optimizeInPlace(const boost::shared_ptr >& clique, VectorValues& result) { - // parents are assumed to already be solved and available in result - clique->conditional()->solveInPlace(result); - - // starting from the root, call optimize on each conditional - BOOST_FOREACH(const boost::shared_ptr >& child, clique->children_) - optimizeInPlace(child, result); -} +VectorValues optimize(const GaussianBayesTree& bayesTree) { + VectorValues result = *allocateVectorValues(bayesTree); + optimizeInPlace(bayesTree, result); + return result; } /* ************************************************************************* */ -VectorValues optimize(const GaussianBayesTree& bayesTree) { - VectorValues result = *allocateVectorValues(bayesTree); - internal::optimizeInPlace(bayesTree.root(), result); - return result; +void optimizeInPlace(const GaussianBayesTree& bayesTree, VectorValues& result) { + internal::optimizeInPlace(bayesTree.root(), result); } /* ************************************************************************* */ @@ -77,11 +70,6 @@ void optimizeGradientSearchInPlace(const GaussianBayesTree& Rd, VectorValues& gr toc(4, "Compute point"); } -/* ************************************************************************* */ -void optimizeInPlace(const GaussianBayesTree& bayesTree, VectorValues& result) { - internal::optimizeInPlace(bayesTree.root(), result); -} - /* ************************************************************************* */ VectorValues gradient(const GaussianBayesTree& bayesTree, const VectorValues& x0) { return gradient(FactorGraph(bayesTree), x0); diff --git a/gtsam/linear/GaussianBayesTree.h b/gtsam/linear/GaussianBayesTree.h index bb15eb063..8b581351c 100644 --- a/gtsam/linear/GaussianBayesTree.h +++ b/gtsam/linear/GaussianBayesTree.h @@ -34,7 +34,8 @@ VectorValues optimize(const GaussianBayesTree& bayesTree); void optimizeInPlace(const GaussianBayesTree& clique, VectorValues& result); namespace internal { -void optimizeInPlace(const boost::shared_ptr >& clique, VectorValues& result); +template +void optimizeInPlace(const typename BAYESTREE::sharedClique& clique, VectorValues& result); } /** diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 233dc4b34..2fca36b6a 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -27,42 +27,6 @@ using namespace std; namespace gtsam { -/* ************************************************************************* */ -// Helper function used only in this file - extracts vectors with variable indices -// in the first and last iterators, and concatenates them in that order into the -// output. -template -static Vector extractVectorValuesSlices(const VALUES& values, ITERATOR first, ITERATOR last) { - // Find total dimensionality - int dim = 0; - for(ITERATOR j = first; j != last; ++j) - dim += values[*j].rows(); - - // Copy vectors - Vector ret(dim); - int varStart = 0; - for(ITERATOR j = first; j != last; ++j) { - ret.segment(varStart, values[*j].rows()) = values[*j]; - varStart += values[*j].rows(); - } - return ret; -} - -/* ************************************************************************* */ -// Helper function used only in this file - writes to the variables in values -// with indices iterated over by first and last, interpreting vector as the -// concatenated vectors to write. -template -static void writeVectorValuesSlices(const VECTOR& vector, VALUES& values, ITERATOR first, ITERATOR last) { - // Copy vectors - int varStart = 0; - for(ITERATOR j = first; j != last; ++j) { - values[*j] = vector.segment(varStart, values[*j].rows()); - varStart += values[*j].rows(); - } - assert(varStart == vector.rows()); -} - /* ************************************************************************* */ GaussianConditional::GaussianConditional() : rsd_(matrix_) {} @@ -230,7 +194,7 @@ inline static void doSolveInPlace(const GaussianConditional& conditional, VALUES static const bool debug = false; if(debug) conditional.print("Solving conditional in place"); - Vector xS = extractVectorValuesSlices(x, conditional.beginParents(), conditional.endParents()); + Vector xS = internal::extractVectorValuesSlices(x, conditional.beginParents(), conditional.endParents()); xS = conditional.get_d() - conditional.get_S() * xS; Vector soln = conditional.permutation().transpose() * conditional.get_R().triangularView().solve(xS); @@ -238,7 +202,7 @@ inline static void doSolveInPlace(const GaussianConditional& conditional, VALUES gtsam::print(Matrix(conditional.get_R()), "Calling backSubstituteUpper on "); gtsam::print(soln, "full back-substitution solution: "); } - writeVectorValuesSlices(soln, x, conditional.beginFrontals(), conditional.endFrontals()); + internal::writeVectorValuesSlices(soln, x, conditional.beginFrontals(), conditional.endFrontals()); } /* ************************************************************************* */ @@ -253,7 +217,7 @@ void GaussianConditional::solveInPlace(Permuted& x) const { /* ************************************************************************* */ void GaussianConditional::solveTransposeInPlace(VectorValues& gy) const { - Vector frontalVec = extractVectorValuesSlices(gy, beginFrontals(), endFrontals()); + Vector frontalVec = internal::extractVectorValuesSlices(gy, beginFrontals(), endFrontals()); // TODO: verify permutation frontalVec = permutation_ * gtsam::backSubstituteUpper(frontalVec,Matrix(get_R())); GaussianConditional::const_iterator it; @@ -261,14 +225,14 @@ void GaussianConditional::solveTransposeInPlace(VectorValues& gy) const { const Index i = *it; transposeMultiplyAdd(-1.0,get_S(it),frontalVec,gy[i]); } - writeVectorValuesSlices(frontalVec, gy, beginFrontals(), endFrontals()); + internal::writeVectorValuesSlices(frontalVec, gy, beginFrontals(), endFrontals()); } /* ************************************************************************* */ void GaussianConditional::scaleFrontalsBySigma(VectorValues& gy) const { - Vector frontalVec = extractVectorValuesSlices(gy, beginFrontals(), endFrontals()); + Vector frontalVec = internal::extractVectorValuesSlices(gy, beginFrontals(), endFrontals()); frontalVec = emul(frontalVec, get_sigmas()); - writeVectorValuesSlices(frontalVec, gy, beginFrontals(), endFrontals()); + internal::writeVectorValuesSlices(frontalVec, gy, beginFrontals(), endFrontals()); } } diff --git a/gtsam/linear/GaussianFactorGraph.h b/gtsam/linear/GaussianFactorGraph.h index f48f7f13f..822c8f62a 100644 --- a/gtsam/linear/GaussianFactorGraph.h +++ b/gtsam/linear/GaussianFactorGraph.h @@ -31,7 +31,7 @@ namespace gtsam { - struct SharedDiagonal; + class SharedDiagonal; /** return A*x-b * \todo Make this a member function - affects SubgraphPreconditioner */ diff --git a/gtsam/linear/GaussianJunctionTree.cpp b/gtsam/linear/GaussianJunctionTree.cpp index 2c2a44dbb..87063bb48 100644 --- a/gtsam/linear/GaussianJunctionTree.cpp +++ b/gtsam/linear/GaussianJunctionTree.cpp @@ -50,7 +50,7 @@ namespace gtsam { // back-substitution tic(3, "back-substitute"); - internal::optimizeInPlace(rootClique, result); + internal::optimizeInPlace(rootClique, result); toc(3, "back-substitute"); return result; } diff --git a/gtsam/linear/HessianFactor.h b/gtsam/linear/HessianFactor.h index c1e5f016a..7cba0c744 100644 --- a/gtsam/linear/HessianFactor.h +++ b/gtsam/linear/HessianFactor.h @@ -33,7 +33,7 @@ namespace gtsam { // Forward declarations class JacobianFactor; - struct SharedDiagonal; + class SharedDiagonal; class GaussianConditional; template class BayesNet; diff --git a/gtsam/linear/JacobianFactor.cpp b/gtsam/linear/JacobianFactor.cpp index dbc2a5cff..1f9dca77b 100644 --- a/gtsam/linear/JacobianFactor.cpp +++ b/gtsam/linear/JacobianFactor.cpp @@ -42,20 +42,20 @@ using namespace boost::lambda; namespace gtsam { /* ************************************************************************* */ - inline void JacobianFactor::assertInvariants() const { - #ifndef NDEBUG + void JacobianFactor::assertInvariants() const { +#ifndef NDEBUG GaussianFactor::assertInvariants(); // The base class checks for unique keys assert((size() == 0 && Ab_.rows() == 0 && Ab_.nBlocks() == 0) || size()+1 == Ab_.nBlocks()); assert(firstNonzeroBlocks_.size() == Ab_.rows()); for(size_t i=0; i // that call Sigmas and Sigma, respectively. - struct SharedDiagonal: public noiseModel::Diagonal::shared_ptr { + class SharedDiagonal: public noiseModel::Diagonal::shared_ptr { + public: SharedDiagonal() { } SharedDiagonal(const noiseModel::Diagonal::shared_ptr& p) : diff --git a/gtsam/linear/SharedGaussian.h b/gtsam/linear/SharedGaussian.h index 4748efd04..a3154bac1 100644 --- a/gtsam/linear/SharedGaussian.h +++ b/gtsam/linear/SharedGaussian.h @@ -27,7 +27,8 @@ namespace gtsam { // note, deliberately not in noiseModel namespace * A useful convenience class to refer to a shared Gaussian model * Also needed to make noise models in matlab */ - struct SharedGaussian: public SharedNoiseModel { + class SharedGaussian: public SharedNoiseModel { + public: typedef SharedNoiseModel Base; diff --git a/gtsam/linear/VectorValues.cpp b/gtsam/linear/VectorValues.cpp index 6c3ec797e..96bafcd16 100644 --- a/gtsam/linear/VectorValues.cpp +++ b/gtsam/linear/VectorValues.cpp @@ -156,3 +156,22 @@ void VectorValues::operator+=(const VectorValues& c) { assert(this->hasSameStructure(c)); this->values_ += c.values_; } + +/* ************************************************************************* */ +VectorValues& VectorValues::operator=(const Permuted& rhs) { + if(this->size() != rhs.size()) + throw std::invalid_argument("VectorValues assignment from Permuted requires pre-allocation, see documentation."); + for(size_t j=0; jsize(); ++j) { + if(exists(j)) { + SubVector& l(this->at(j)); + const SubVector& r(rhs[j]); + if(l.rows() != r.rows()) + throw std::invalid_argument("VectorValues assignment from Permuted requires pre-allocation, see documentation."); + l = r; + } else { + if(rhs.container().exists(rhs.permutation()[j])) + throw std::invalid_argument("VectorValues assignment from Permuted requires pre-allocation, see documentation."); + } + } + return *this; +} diff --git a/gtsam/linear/VectorValues.h b/gtsam/linear/VectorValues.h index 0be2cad19..7939b486d 100644 --- a/gtsam/linear/VectorValues.h +++ b/gtsam/linear/VectorValues.h @@ -19,6 +19,7 @@ #include #include +#include #include #include @@ -177,7 +178,7 @@ namespace gtsam { /** Construct from a container of variable dimensions (in variable order), without initializing any values. */ template - VectorValues(const CONTAINER& dimensions) { append(dimensions); } + explicit VectorValues(const CONTAINER& dimensions) { append(dimensions); } /** Construct to hold nVars vectors of varDim dimension each. */ VectorValues(Index nVars, size_t varDim) { resize(nVars, varDim); } @@ -273,6 +274,11 @@ namespace gtsam { */ void operator+=(const VectorValues& c); + /** Assignment operator from Permuted, requires the dimensions + * of the assignee to already be properly pre-allocated. + */ + VectorValues& operator=(const Permuted& rhs); + /// @} private: @@ -402,4 +408,42 @@ namespace gtsam { #endif } + namespace internal { + /* ************************************************************************* */ + // Helper function, extracts vectors with variable indices + // in the first and last iterators, and concatenates them in that order into the + // output. + template + Vector extractVectorValuesSlices(const VALUES& values, ITERATOR first, ITERATOR last) { + // Find total dimensionality + int dim = 0; + for(ITERATOR j = first; j != last; ++j) + dim += values[*j].rows(); + + // Copy vectors + Vector ret(dim); + int varStart = 0; + for(ITERATOR j = first; j != last; ++j) { + ret.segment(varStart, values[*j].rows()) = values[*j]; + varStart += values[*j].rows(); + } + return ret; + } + + /* ************************************************************************* */ + // Helper function, writes to the variables in values + // with indices iterated over by first and last, interpreting vector as the + // concatenated vectors to write. + template + void writeVectorValuesSlices(const VECTOR& vector, VALUES& values, ITERATOR first, ITERATOR last) { + // Copy vectors + int varStart = 0; + for(ITERATOR j = first; j != last; ++j) { + values[*j] = vector.segment(varStart, values[*j].rows()); + varStart += values[*j].rows(); + } + assert(varStart == vector.rows()); + } + } + } // \namespace gtsam diff --git a/gtsam/nonlinear/DoglegOptimizerImpl.h b/gtsam/nonlinear/DoglegOptimizerImpl.h index 5feaa401a..fbfa06028 100644 --- a/gtsam/nonlinear/DoglegOptimizerImpl.h +++ b/gtsam/nonlinear/DoglegOptimizerImpl.h @@ -135,12 +135,18 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( const F& f, const VALUES& x0, const Ordering& ordering, const double f_error, const bool verbose) { // Compute steepest descent and Newton's method points - tic(0, "Steepest Descent"); - VectorValues dx_u = optimizeGradientSearch(Rd); - toc(0, "Steepest Descent"); - tic(1, "optimize"); - VectorValues dx_n = optimize(Rd); - toc(1, "optimize"); + tic(0, "optimizeGradientSearch"); + tic(0, "allocateVectorValues"); + VectorValues dx_u = *allocateVectorValues(Rd); + toc(0, "allocateVectorValues"); + tic(1, "optimizeGradientSearchInPlace"); + optimizeGradientSearchInPlace(Rd, dx_u); + toc(1, "optimizeGradientSearchInPlace"); + toc(0, "optimizeGradientSearch"); + tic(1, "optimizeInPlace"); + VectorValues dx_n(VectorValues::SameStructure(dx_u)); + optimizeInPlace(Rd, dx_n); + toc(1, "optimizeInPlace"); tic(2, "jfg error"); const GaussianFactorGraph jfg(Rd); const double M_error = jfg.error(VectorValues::Zero(dx_u)); @@ -177,6 +183,7 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( if(verbose) cout << "f error: " << f_error << " -> " << result.f_error << endl; if(verbose) cout << "M error: " << M_error << " -> " << new_M_error << endl; + tic(7, "adjust Delta"); // Compute gain ratio. Here we take advantage of the invariant that the // Bayes' net error at zero is equal to the nonlinear error const double rho = fabs(M_error - new_M_error) < 1e-15 ? @@ -186,7 +193,6 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( if(verbose) cout << "rho = " << rho << endl; if(rho >= 0.75) { - tic(7, "Rho >= 0.75"); // M agrees very well with f, so try to increase lambda const double dx_d_norm = result.dx_d.vector().norm(); const double newDelta = std::max(Delta, 3.0 * dx_d_norm); // Compute new Delta @@ -204,14 +210,12 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( assert(false); } Delta = newDelta; // Update Delta from new Delta - toc(7, "Rho >= 0.75"); } else if(0.75 > rho && rho >= 0.25) { // M agrees so-so with f, keep the same Delta stay = false; } else if(0.25 > rho && rho >= 0.0) { - tic(8, "0.25 > Rho >= 0.75"); // M does not agree well with f, decrease Delta until it does double newDelta; if(Delta > 1e-5) @@ -227,11 +231,8 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( assert(false); } Delta = newDelta; // Update Delta from new Delta - toc(8, "0.25 > Rho >= 0.75"); - } - else { - tic(9, "Rho < 0"); + } else { // f actually increased, so keep decreasing Delta until f does not decrease assert(0.0 > rho); if(Delta > 1e-5) { @@ -242,8 +243,8 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( if(verbose) cout << "Warning: Dog leg stopping because cannot decrease error with minimum Delta" << endl; stay = false; } - toc(9, "Rho < 0"); } + toc(7, "adjust Delta"); } // dx_d and f_error have already been filled in during the loop diff --git a/gtsam/nonlinear/GaussianISAM2-inl.h b/gtsam/nonlinear/GaussianISAM2-inl.h deleted file mode 100644 index 57801fbe4..000000000 --- a/gtsam/nonlinear/GaussianISAM2-inl.h +++ /dev/null @@ -1,176 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file GaussianISAM2 - * @brief Full non-linear ISAM - * @author Michael Kaess - */ - -#include -#include - -#include - -namespace gtsam { - - using namespace std; - - /* ************************************************************************* */ - namespace internal { - template - void optimizeWildfire(const boost::shared_ptr& clique, double threshold, - vector& changed, const vector& replaced, Permuted& delta, int& count) { - // if none of the variables in this clique (frontal and separator!) changed - // significantly, then by the running intersection property, none of the - // cliques in the children need to be processed - - // Are any clique variables part of the tree that has been redone? - bool cliqueReplaced = replaced[(*clique)->frontals().front()]; -#ifndef NDEBUG - BOOST_FOREACH(Index frontal, (*clique)->frontals()) { - assert(cliqueReplaced == replaced[frontal]); - } -#endif - - // If not redone, then has one of the separator variables changed significantly? - bool recalculate = cliqueReplaced; - if(!recalculate) { - BOOST_FOREACH(Index parent, (*clique)->parents()) { - if(changed[parent]) { - recalculate = true; - break; - } - } - } - - // Solve clique if it was replaced, or if any parents were changed above the - // threshold or themselves replaced. - if(recalculate) { - - // Temporary copy of the original values, to check how much they change - vector originalValues((*clique)->nrFrontals()); - GaussianConditional::const_iterator it; - for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { - originalValues[it - (*clique)->beginFrontals()] = delta[*it]; - } - - // Back-substitute - (*clique)->solveInPlace(delta); - count += (*clique)->nrFrontals(); - - // Whether the values changed above a threshold, or always true if the - // clique was replaced. - bool valuesChanged = cliqueReplaced; - for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { - if(!valuesChanged) { - const Vector& oldValue(originalValues[it - (*clique)->beginFrontals()]); - const SubVector& newValue(delta[*it]); - if((oldValue - newValue).lpNorm() >= threshold) { - valuesChanged = true; - break; - } - } else - break; - } - - // If the values were above the threshold or this clique was replaced - if(valuesChanged) { - // Set changed flag for each frontal variable and leave the new values - BOOST_FOREACH(Index frontal, (*clique)->frontals()) { - changed[frontal] = true; - } - } else { - // Replace with the old values - for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { - delta[*it] = originalValues[it - (*clique)->beginFrontals()]; - } - } - - // Recurse to children - BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) { - optimizeWildfire(child, threshold, changed, replaced, delta, count); - } - } - } - } - - /* ************************************************************************* */ - template - int optimizeWildfire(const boost::shared_ptr& root, double threshold, const vector& keys, Permuted& delta) { - vector changed(keys.size(), false); - int count = 0; - // starting from the root, call optimize on each conditional - if(root) - internal::optimizeWildfire(root, threshold, changed, keys, delta, count); - return count; - } - - /* ************************************************************************* */ - template - VectorValues optimizeGradientSearch(const ISAM2& isam) { - tic(0, "Allocate VectorValues"); - VectorValues grad = *allocateVectorValues(isam); - toc(0, "Allocate VectorValues"); - - optimizeGradientSearchInPlace(isam, grad); - - return grad; - } - - /* ************************************************************************* */ - template - void optimizeGradientSearchInPlace(const ISAM2& Rd, VectorValues& grad) { - tic(1, "Compute Gradient"); - // Compute gradient (call gradientAtZero function, which is defined for various linear systems) - gradientAtZero(Rd, grad); - double gradientSqNorm = grad.dot(grad); - toc(1, "Compute Gradient"); - - tic(2, "Compute R*g"); - // Compute R * g - FactorGraph Rd_jfg(Rd); - Errors Rg = Rd_jfg * grad; - toc(2, "Compute R*g"); - - tic(3, "Compute minimizing step size"); - // Compute minimizing step size - double step = -gradientSqNorm / dot(Rg, Rg); - toc(3, "Compute minimizing step size"); - - tic(4, "Compute point"); - // Compute steepest descent point - scal(step, grad); - toc(4, "Compute point"); - } - - /* ************************************************************************* */ - template - void nnz_internal(const boost::shared_ptr& clique, int& result) { - int dimR = (*clique)->dim(); - int dimSep = (*clique)->get_S().cols() - dimR; - result += ((dimR+1)*dimR)/2 + dimSep*dimR; - // traverse the children - BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) { - nnz_internal(child, result); - } - } - - /* ************************************************************************* */ - template - int calculate_nnz(const boost::shared_ptr& clique) { - int result = 0; - // starting from the root, add up entries of frontal and conditional matrices of each conditional - nnz_internal(clique, result); - return result; - } - -} /// namespace gtsam diff --git a/gtsam/nonlinear/GaussianISAM2.cpp b/gtsam/nonlinear/GaussianISAM2.cpp deleted file mode 100644 index 624c5d4f0..000000000 --- a/gtsam/nonlinear/GaussianISAM2.cpp +++ /dev/null @@ -1,60 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file GaussianISAM2 - * @brief Full non-linear ISAM - * @author Michael Kaess - */ - -#include -#include -#include - -using namespace std; -using namespace gtsam; - -#include - -namespace gtsam { - - /* ************************************************************************* */ - VectorValues gradient(const BayesTree >& bayesTree, const VectorValues& x0) { - return gradient(FactorGraph(bayesTree), x0); - } - - /* ************************************************************************* */ - static void gradientAtZeroTreeAdder(const boost::shared_ptr >& root, VectorValues& g) { - // Loop through variables in each clique, adding contributions - int variablePosition = 0; - for(GaussianConditional::const_iterator jit = root->conditional()->begin(); jit != root->conditional()->end(); ++jit) { - const int dim = root->conditional()->dim(jit); - g[*jit] += root->gradientContribution().segment(variablePosition, dim); - variablePosition += dim; - } - - // Recursively add contributions from children - typedef boost::shared_ptr > sharedClique; - BOOST_FOREACH(const sharedClique& child, root->children()) { - gradientAtZeroTreeAdder(child, g); - } - } - - /* ************************************************************************* */ - void gradientAtZero(const BayesTree >& bayesTree, VectorValues& g) { - // Zero-out gradient - g.setZero(); - - // Sum up contributions for each clique - gradientAtZeroTreeAdder(bayesTree.root(), g); - } - -} diff --git a/gtsam/nonlinear/GaussianISAM2.h b/gtsam/nonlinear/GaussianISAM2.h deleted file mode 100644 index a602d81a6..000000000 --- a/gtsam/nonlinear/GaussianISAM2.h +++ /dev/null @@ -1,153 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file GaussianISAM - * @brief Full non-linear ISAM. - * @author Michael Kaess - */ - -// \callgraph - -#pragma once - -#include -#include - -namespace gtsam { - -/** - * @ingroup ISAM2 - * @brief The main ISAM2 class that is exposed to gtsam users, see ISAM2 for usage. - * - * This is a thin wrapper around an ISAM2 class templated on - * GaussianConditional, and the values on which that GaussianISAM2 is - * templated. - * - * @tparam VALUES The Values or TupleValues\Emph{N} that contains the - * variables. - * @tparam GRAPH The NonlinearFactorGraph structure to store factors. Defaults to standard NonlinearFactorGraph - */ -template -class GaussianISAM2 : public ISAM2 { - typedef ISAM2 Base; -public: - - /// @name Standard Constructors - /// @{ - - /** Create an empty ISAM2 instance */ - GaussianISAM2(const ISAM2Params& params) : ISAM2(params) {} - - /** Create an empty ISAM2 instance using the default set of parameters (see ISAM2Params) */ - GaussianISAM2() : ISAM2() {} - - /// @} - /// @name Advanced Interface - /// @{ - - void cloneTo(boost::shared_ptr& newGaussianISAM2) const { - boost::shared_ptr isam2 = boost::static_pointer_cast(newGaussianISAM2); - Base::cloneTo(isam2); - } - - /// @} - -}; - -/** Get the linear delta for the ISAM2 object, unpermuted the delta returned by ISAM2::getDelta() */ -template -VectorValues optimize(const ISAM2& isam) { - VectorValues delta = *allocateVectorValues(isam); - internal::optimizeInPlace(isam.root(), delta); - return delta; -} - -/// Optimize the BayesTree, starting from the root. -/// @param replaced Needs to contain -/// all variables that are contained in the top of the Bayes tree that has been -/// redone. -/// @param delta The current solution, an offset from the linearization -/// point. -/// @param threshold The maximum change against the PREVIOUS delta for -/// non-replaced variables that can be ignored, ie. the old delta entry is kept -/// and recursive backsubstitution might eventually stop if none of the changed -/// variables are contained in the subtree. -/// @return The number of variables that were solved for -template -int optimizeWildfire(const boost::shared_ptr& root, - double threshold, const std::vector& replaced, Permuted& delta); - -/** - * Optimize along the gradient direction, with a closed-form computation to - * perform the line search. The gradient is computed about \f$ \delta x=0 \f$. - * - * This function returns \f$ \delta x \f$ that minimizes a reparametrized - * problem. The error function of a GaussianBayesNet is - * - * \f[ f(\delta x) = \frac{1}{2} |R \delta x - d|^2 = \frac{1}{2}d^T d - d^T R \delta x + \frac{1}{2} \delta x^T R^T R \delta x \f] - * - * with gradient and Hessian - * - * \f[ g(\delta x) = R^T(R\delta x - d), \qquad G(\delta x) = R^T R. \f] - * - * This function performs the line search in the direction of the - * gradient evaluated at \f$ g = g(\delta x = 0) \f$ with step size - * \f$ \alpha \f$ that minimizes \f$ f(\delta x = \alpha g) \f$: - * - * \f[ f(\alpha) = \frac{1}{2} d^T d + g^T \delta x + \frac{1}{2} \alpha^2 g^T G g \f] - * - * Optimizing by setting the derivative to zero yields - * \f$ \hat \alpha = (-g^T g) / (g^T G g) \f$. For efficiency, this function - * evaluates the denominator without computing the Hessian \f$ G \f$, returning - * - * \f[ \delta x = \hat\alpha g = \frac{-g^T g}{(R g)^T(R g)} \f] - */ -template -VectorValues optimizeGradientSearch(const ISAM2& isam); - -/** In-place version of optimizeGradientSearch requiring pre-allocated VectorValues \c x */ -template -void optimizeGradientSearchInPlace(const ISAM2& isam, VectorValues& grad); - -/// calculate the number of non-zero entries for the tree starting at clique (use root for complete matrix) -template -int calculate_nnz(const boost::shared_ptr& clique); - -/** - * Compute the gradient of the energy function, - * \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$, - * centered around \f$ x = x_0 \f$. - * The gradient is \f$ R^T(Rx-d) \f$. - * This specialized version is used with ISAM2, where each clique stores its - * gradient contribution. - * @param bayesTree The Gaussian Bayes Tree $(R,d)$ - * @param x0 The center about which to compute the gradient - * @return The gradient as a VectorValues - */ -VectorValues gradient(const BayesTree >& bayesTree, const VectorValues& x0); - -/** - * Compute the gradient of the energy function, - * \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$, - * centered around zero. - * The gradient about zero is \f$ -R^T d \f$. See also gradient(const GaussianBayesNet&, const VectorValues&). - * This specialized version is used with ISAM2, where each clique stores its - * gradient contribution. - * @param bayesTree The Gaussian Bayes Tree $(R,d)$ - * @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues - * @return The gradient as a VectorValues - */ -void gradientAtZero(const BayesTree >& bayesTree, VectorValues& g); - -}/// namespace gtsam - -#include diff --git a/gtsam/nonlinear/ISAM2-impl-inl.h b/gtsam/nonlinear/ISAM2-impl.cpp similarity index 59% rename from gtsam/nonlinear/ISAM2-impl-inl.h rename to gtsam/nonlinear/ISAM2-impl.cpp index 40e87a09b..6df7e8450 100644 --- a/gtsam/nonlinear/ISAM2-impl-inl.h +++ b/gtsam/nonlinear/ISAM2-impl.cpp @@ -10,128 +10,21 @@ * -------------------------------------------------------------------------- */ /** - * @file ISAM2-impl-inl.h + * @file ISAM2-impl.cpp * @brief Incremental update functionality (ISAM2) for BayesTree, with fluid relinearization. * @author Michael Kaess, Richard Roberts */ -#include +#include +#include namespace gtsam { -using namespace std; - -template -struct ISAM2::Impl { - - typedef ISAM2 ISAM2Type; - - struct PartialSolveResult { - typename ISAM2Type::sharedClique bayesTree; - Permutation fullReordering; - Permutation fullReorderingInverse; - }; - - struct ReorderingMode { - size_t nFullSystemVars; - enum { /*AS_ADDED,*/ COLAMD } algorithm; - enum { NO_CONSTRAINT, CONSTRAIN_LAST } constrain; - boost::optional&> constrainedKeys; - }; - - /** - * Add new variables to the ISAM2 system. - * @param newTheta Initial values for new variables - * @param theta Current solution to be augmented with new initialization - * @param delta Current linear delta to be augmented with zeros - * @param ordering Current ordering to be augmented with new variables - * @param nodes Current BayesTree::Nodes index to be augmented with slots for new variables - * @param keyFormatter Formatter for printing nonlinear keys during debugging - */ - static void AddVariables(const Values& newTheta, Values& theta, Permuted& delta, vector& replacedKeys, - Ordering& ordering, typename Base::Nodes& nodes, const KeyFormatter& keyFormatter = DefaultKeyFormatter); - - /** - * Extract the set of variable indices from a NonlinearFactorGraph. For each Symbol - * in each NonlinearFactor, obtains the index by calling ordering[symbol]. - * @param ordering The current ordering from which to obtain the variable indices - * @param factors The factors from which to extract the variables - * @return The set of variables indices from the factors - */ - static FastSet IndicesFromFactors(const Ordering& ordering, const GRAPH& factors); - - /** - * Find the set of variables to be relinearized according to relinearizeThreshold. - * Any variables in the VectorValues delta whose vector magnitude is greater than - * or equal to relinearizeThreshold are returned. - * @param delta The linear delta to check against the threshold - * @param keyFormatter Formatter for printing nonlinear keys during debugging - * @return The set of variable indices in delta whose magnitude is greater than or - * equal to relinearizeThreshold - */ - static FastSet CheckRelinearization(const Permuted& delta, const Ordering& ordering, - const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter = DefaultKeyFormatter); - - /** - * Recursively search this clique and its children for marked keys appearing - * in the separator, and add the *frontal* keys of any cliques whose - * separator contains any marked keys to the set \c keys. The purpose of - * this is to discover the cliques that need to be redone due to information - * propagating to them from cliques that directly contain factors being - * relinearized. - * - * The original comment says this finds all variables directly connected to - * the marked ones by measurements. Is this true, because it seems like this - * would also pull in variables indirectly connected through other frontal or - * separator variables? - * - * Alternatively could we trace up towards the root for each variable here? - */ - static void FindAll(typename ISAM2Clique::shared_ptr clique, FastSet& keys, const vector& markedMask); - - /** - * Apply expmap to the given values, but only for indices appearing in - * \c markedRelinMask. Values are expmapped in-place. - * \param [in][out] values The value to expmap in-place - * \param delta The linear delta with which to expmap - * \param ordering The ordering - * \param mask Mask on linear indices, only \c true entries are expmapped - * \param invalidateIfDebug If this is true, *and* NDEBUG is not defined, - * expmapped deltas will be set to an invalid value (infinity) to catch bugs - * where we might expmap something twice, or expmap it but then not - * recalculate its delta. - * @param keyFormatter Formatter for printing nonlinear keys during debugging - */ - static void ExpmapMasked(Values& values, const Permuted& delta, - const Ordering& ordering, const std::vector& mask, - boost::optional&> invalidateIfDebug = boost::optional&>(), - const KeyFormatter& keyFormatter = DefaultKeyFormatter); - - /** - * Reorder and eliminate factors. These factors form a subset of the full - * problem, so along with the BayesTree we get a partial reordering of the - * problem that needs to be applied to the other data in ISAM2, which is the - * VariableIndex, the delta, the ordering, and the orphans (including cached - * factors). - * \param factors The factors to eliminate, representing part of the full - * problem. This is permuted during use and so is cleared when this function - * returns in order to invalidate it. - * \param keys The set of indices used in \c factors. - * \return The eliminated BayesTree and the permutation to be applied to the - * rest of the ISAM2 data. - */ - static PartialSolveResult PartialSolve(GaussianFactorGraph& factors, const FastSet& keys, - const ReorderingMode& reorderingMode); - - static size_t UpdateDelta(const boost::shared_ptr >& root, std::vector& replacedKeys, Permuted& delta, double wildfireThreshold); - -}; - /* ************************************************************************* */ -template -void ISAM2::Impl::AddVariables( - const Values& newTheta, Values& theta, Permuted& delta, vector& replacedKeys, - Ordering& ordering,typename Base::Nodes& nodes, const KeyFormatter& keyFormatter) { +void ISAM2::Impl::AddVariables( + const Values& newTheta, Values& theta, Permuted& delta, + Permuted& deltaNewton, Permuted& deltaGradSearch, vector& replacedKeys, + Ordering& ordering, Base::Nodes& nodes, const KeyFormatter& keyFormatter) { const bool debug = ISDEBUG("ISAM2 AddVariables"); theta.insert(newTheta); @@ -145,10 +38,18 @@ void ISAM2::Impl::AddVariables( delta.container().append(dims); delta.container().vector().segment(originalDim, newDim).operator=(Vector::Zero(newDim)); delta.permutation().resize(originalnVars + newTheta.size()); + deltaNewton.container().append(dims); + deltaNewton.container().vector().segment(originalDim, newDim).operator=(Vector::Zero(newDim)); + deltaNewton.permutation().resize(originalnVars + newTheta.size()); + deltaGradSearch.container().append(dims); + deltaGradSearch.container().vector().segment(originalDim, newDim).operator=(Vector::Zero(newDim)); + deltaGradSearch.permutation().resize(originalnVars + newTheta.size()); { Index nextVar = originalnVars; BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, newTheta) { delta.permutation()[nextVar] = nextVar; + deltaNewton.permutation()[nextVar] = nextVar; + deltaGradSearch.permutation()[nextVar] = nextVar; ordering.insert(key_value.key, nextVar); if(debug) cout << "Adding variable " << keyFormatter(key_value.key) << " with order " << nextVar << endl; ++ nextVar; @@ -163,10 +64,9 @@ void ISAM2::Impl::AddVariables( } /* ************************************************************************* */ -template -FastSet ISAM2::Impl::IndicesFromFactors(const Ordering& ordering, const GRAPH& factors) { +FastSet ISAM2::Impl::IndicesFromFactors(const Ordering& ordering, const NonlinearFactorGraph& factors) { FastSet indices; - BOOST_FOREACH(const typename NonlinearFactor::shared_ptr& factor, factors) { + BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, factors) { BOOST_FOREACH(Key key, factor->keys()) { indices.insert(ordering[key]); } @@ -175,8 +75,7 @@ FastSet ISAM2::Impl::IndicesFromFactors(const Ordering } /* ************************************************************************* */ -template -FastSet ISAM2::Impl::CheckRelinearization(const Permuted& delta, const Ordering& ordering, +FastSet ISAM2::Impl::CheckRelinearization(const Permuted& delta, const Ordering& ordering, const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter) { FastSet relinKeys; @@ -204,8 +103,7 @@ FastSet ISAM2::Impl::CheckRelinearization(const Permut } /* ************************************************************************* */ -template -void ISAM2::Impl::FindAll(typename ISAM2Clique::shared_ptr clique, FastSet& keys, const vector& markedMask) { +void ISAM2::Impl::FindAll(ISAM2Clique::shared_ptr clique, FastSet& keys, const vector& markedMask) { static const bool debug = false; // does the separator contain any of the variables? bool found = false; @@ -219,14 +117,13 @@ void ISAM2::Impl::FindAll(typename ISAM2Clique:: if(debug) clique->print("Key(s) marked in clique "); if(debug) cout << "so marking key " << (*clique)->keys().front() << endl; } - BOOST_FOREACH(const typename ISAM2Clique::shared_ptr& child, clique->children_) { + BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children_) { FindAll(child, keys, markedMask); } } /* ************************************************************************* */ -template -void ISAM2::Impl::ExpmapMasked(Values& values, const Permuted& delta, const Ordering& ordering, +void ISAM2::Impl::ExpmapMasked(Values& values, const Permuted& delta, const Ordering& ordering, const vector& mask, boost::optional&> invalidateIfDebug, const KeyFormatter& keyFormatter) { // If debugging, invalidate if requested, otherwise do not invalidate. // Invalidating means setting expmapped entries to Inf, to trigger assertions @@ -262,9 +159,8 @@ void ISAM2::Impl::ExpmapMasked(Values& values, const Permute } /* ************************************************************************* */ -template -typename ISAM2::Impl::PartialSolveResult -ISAM2::Impl::PartialSolve(GaussianFactorGraph& factors, +ISAM2::Impl::PartialSolveResult +ISAM2::Impl::PartialSolve(GaussianFactorGraph& factors, const FastSet& keys, const ReorderingMode& reorderingMode) { static const bool debug = ISDEBUG("ISAM2 recalculate"); @@ -340,14 +236,8 @@ ISAM2::Impl::PartialSolve(GaussianFactorGraph& factors, // eliminate into a Bayes net tic(7,"eliminate"); - JunctionTree jt(factors, affectedFactorsIndex); + JunctionTree jt(factors, affectedFactorsIndex); result.bayesTree = jt.eliminate(EliminatePreferLDL); - if(debug && result.bayesTree) { - if(boost::dynamic_pointer_cast >(result.bayesTree)) - cout << "Is an ISAM2 clique" << endl; - cout << "Re-eliminated BT:\n"; - result.bayesTree->printTree(""); - } toc(7,"eliminate"); tic(8,"permute eliminated"); @@ -363,19 +253,18 @@ ISAM2::Impl::PartialSolve(GaussianFactorGraph& factors, /* ************************************************************************* */ namespace internal { -inline static void optimizeInPlace(const boost::shared_ptr >& clique, VectorValues& result) { +inline static void optimizeInPlace(const boost::shared_ptr& clique, VectorValues& result) { // parents are assumed to already be solved and available in result clique->conditional()->solveInPlace(result); // starting from the root, call optimize on each conditional - BOOST_FOREACH(const boost::shared_ptr >& child, clique->children_) + BOOST_FOREACH(const boost::shared_ptr& child, clique->children_) optimizeInPlace(child, result); } } /* ************************************************************************* */ -template -size_t ISAM2::Impl::UpdateDelta(const boost::shared_ptr >& root, std::vector& replacedKeys, Permuted& delta, double wildfireThreshold) { +size_t ISAM2::Impl::UpdateDelta(const boost::shared_ptr& root, std::vector& replacedKeys, Permuted& delta, double wildfireThreshold) { size_t lastBacksubVariableCount; @@ -412,4 +301,76 @@ size_t ISAM2::Impl::UpdateDelta(const boost::shared_ptr& clique, std::vector& replacedKeys, + const VectorValues& grad, Permuted& deltaNewton, Permuted& RgProd, size_t& varsUpdated) { + + // Check if any frontal or separator keys were recalculated, if so, we need + // update deltas and recurse to children, but if not, we do not need to + // recurse further because of the running separator property. + bool anyReplaced = false; + BOOST_FOREACH(Index j, *clique->conditional()) { + if(replacedKeys[j]) { + anyReplaced = true; + break; + } + } + + if(anyReplaced) { + // Update the current variable + // Get VectorValues slice corresponding to current variables + Vector gR = internal::extractVectorValuesSlices(grad, (*clique)->beginFrontals(), (*clique)->endFrontals()); + Vector gS = internal::extractVectorValuesSlices(grad, (*clique)->beginParents(), (*clique)->endParents()); + + // Compute R*g and S*g for this clique + Vector RSgProd = ((*clique)->get_R() * (*clique)->permutation().transpose()) * gR + (*clique)->get_S() * gS; + + // Write into RgProd vector + internal::writeVectorValuesSlices(RSgProd, RgProd, (*clique)->beginFrontals(), (*clique)->endFrontals()); + + // Now solve the part of the Newton's method point for this clique (back-substitution) + //(*clique)->solveInPlace(deltaNewton); + + varsUpdated += (*clique)->nrFrontals(); + + // Recurse to children + BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children()) { + updateDoglegDeltas(child, replacedKeys, grad, deltaNewton, RgProd, varsUpdated); } + } +} +} + +/* ************************************************************************* */ +size_t ISAM2::Impl::UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, std::vector& replacedKeys, + Permuted& deltaNewton, Permuted& RgProd) { + + // Get gradient + VectorValues grad = *allocateVectorValues(isam); + gradientAtZero(isam, grad); + + // Update variables + size_t varsUpdated = 0; + internal::updateDoglegDeltas(isam.root(), replacedKeys, grad, deltaNewton, RgProd, varsUpdated); + optimizeWildfire(isam.root(), wildfireThreshold, replacedKeys, deltaNewton); + +#if 0 + VectorValues expected = *allocateVectorValues(isam); + internal::optimizeInPlace(isam.root(), expected); + for(size_t j = 0; j Rd_jfg(isam); + Errors Rg = Rd_jfg * grad; + double RgMagExpected = dot(Rg, Rg); + double RgMagActual = RgProd.container().vector().squaredNorm(); + cout << fabs(RgMagExpected - RgMagActual) << endl; + assert(fabs(RgMagExpected - RgMagActual) < (1e-8 * RgMagActual + 1e-4)); +#endif + + replacedKeys.assign(replacedKeys.size(), false); + + return varsUpdated; +} + } diff --git a/gtsam/nonlinear/ISAM2-impl.h b/gtsam/nonlinear/ISAM2-impl.h new file mode 100644 index 000000000..351a371df --- /dev/null +++ b/gtsam/nonlinear/ISAM2-impl.h @@ -0,0 +1,134 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file ISAM2-impl.h + * @brief Incremental update functionality (ISAM2) for BayesTree, with fluid relinearization. + * @author Michael Kaess, Richard Roberts + */ + +#pragma once + +#include +#include + +namespace gtsam { + +using namespace std; + +struct ISAM2::Impl { + + struct PartialSolveResult { + ISAM2::sharedClique bayesTree; + Permutation fullReordering; + Permutation fullReorderingInverse; + }; + + struct ReorderingMode { + size_t nFullSystemVars; + enum { /*AS_ADDED,*/ COLAMD } algorithm; + enum { NO_CONSTRAINT, CONSTRAIN_LAST } constrain; + boost::optional&> constrainedKeys; + }; + + /** + * Add new variables to the ISAM2 system. + * @param newTheta Initial values for new variables + * @param theta Current solution to be augmented with new initialization + * @param delta Current linear delta to be augmented with zeros + * @param ordering Current ordering to be augmented with new variables + * @param nodes Current BayesTree::Nodes index to be augmented with slots for new variables + * @param keyFormatter Formatter for printing nonlinear keys during debugging + */ + static void AddVariables(const Values& newTheta, Values& theta, Permuted& delta, + Permuted& deltaNewton, Permuted& deltaGradSearch, vector& replacedKeys, + Ordering& ordering, Base::Nodes& nodes, const KeyFormatter& keyFormatter = DefaultKeyFormatter); + + /** + * Extract the set of variable indices from a NonlinearFactorGraph. For each Symbol + * in each NonlinearFactor, obtains the index by calling ordering[symbol]. + * @param ordering The current ordering from which to obtain the variable indices + * @param factors The factors from which to extract the variables + * @return The set of variables indices from the factors + */ + static FastSet IndicesFromFactors(const Ordering& ordering, const NonlinearFactorGraph& factors); + + /** + * Find the set of variables to be relinearized according to relinearizeThreshold. + * Any variables in the VectorValues delta whose vector magnitude is greater than + * or equal to relinearizeThreshold are returned. + * @param delta The linear delta to check against the threshold + * @param keyFormatter Formatter for printing nonlinear keys during debugging + * @return The set of variable indices in delta whose magnitude is greater than or + * equal to relinearizeThreshold + */ + static FastSet CheckRelinearization(const Permuted& delta, const Ordering& ordering, + const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter = DefaultKeyFormatter); + + /** + * Recursively search this clique and its children for marked keys appearing + * in the separator, and add the *frontal* keys of any cliques whose + * separator contains any marked keys to the set \c keys. The purpose of + * this is to discover the cliques that need to be redone due to information + * propagating to them from cliques that directly contain factors being + * relinearized. + * + * The original comment says this finds all variables directly connected to + * the marked ones by measurements. Is this true, because it seems like this + * would also pull in variables indirectly connected through other frontal or + * separator variables? + * + * Alternatively could we trace up towards the root for each variable here? + */ + static void FindAll(ISAM2Clique::shared_ptr clique, FastSet& keys, const vector& markedMask); + + /** + * Apply expmap to the given values, but only for indices appearing in + * \c markedRelinMask. Values are expmapped in-place. + * \param [in][out] values The value to expmap in-place + * \param delta The linear delta with which to expmap + * \param ordering The ordering + * \param mask Mask on linear indices, only \c true entries are expmapped + * \param invalidateIfDebug If this is true, *and* NDEBUG is not defined, + * expmapped deltas will be set to an invalid value (infinity) to catch bugs + * where we might expmap something twice, or expmap it but then not + * recalculate its delta. + * @param keyFormatter Formatter for printing nonlinear keys during debugging + */ + static void ExpmapMasked(Values& values, const Permuted& delta, + const Ordering& ordering, const std::vector& mask, + boost::optional&> invalidateIfDebug = boost::optional&>(), + const KeyFormatter& keyFormatter = DefaultKeyFormatter); + + /** + * Reorder and eliminate factors. These factors form a subset of the full + * problem, so along with the BayesTree we get a partial reordering of the + * problem that needs to be applied to the other data in ISAM2, which is the + * VariableIndex, the delta, the ordering, and the orphans (including cached + * factors). + * \param factors The factors to eliminate, representing part of the full + * problem. This is permuted during use and so is cleared when this function + * returns in order to invalidate it. + * \param keys The set of indices used in \c factors. + * \return The eliminated BayesTree and the permutation to be applied to the + * rest of the ISAM2 data. + */ + static PartialSolveResult PartialSolve(GaussianFactorGraph& factors, const FastSet& keys, + const ReorderingMode& reorderingMode); + + static size_t UpdateDelta(const boost::shared_ptr& root, std::vector& replacedKeys, Permuted& delta, double wildfireThreshold); + + static size_t UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, std::vector& replacedKeys, + Permuted& deltaNewton, Permuted& RgProd); + +}; + +} diff --git a/gtsam/nonlinear/ISAM2-inl.h b/gtsam/nonlinear/ISAM2-inl.h index 0d5f190a3..04395069c 100644 --- a/gtsam/nonlinear/ISAM2-inl.h +++ b/gtsam/nonlinear/ISAM2-inl.h @@ -1,6 +1,6 @@ /* ---------------------------------------------------------------------------- - * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * GTSAM Copyright 2010, Georgia Tech Research Corporation, * Atlanta, Georgia 30332-0415 * All Rights Reserved * Authors: Frank Dellaert, et al. (see THANKS for the full author list) @@ -10,624 +10,144 @@ * -------------------------------------------------------------------------- */ /** - * @file ISAM2-inl.h - * @brief Incremental update functionality (ISAM2) for BayesTree, with fluid relinearization. - * @author Michael Kaess, Richard Roberts + * @file ISAM2-inl.h + * @brief + * @author Richard Roberts + * @date Mar 16, 2012 */ + #pragma once -#include -#include // for operator += -using namespace boost::assign; -#include -#include -#include -#include -#include - -#include -#include +#include +#include +#include namespace gtsam { using namespace std; -static const bool disableReordering = false; -static const double batchThreshold = 0.65; - /* ************************************************************************* */ -template -ISAM2::ISAM2(const ISAM2Params& params): - delta_(Permutation(), deltaUnpermuted_), deltaUptodate_(true), params_(params) { - // See note in gtsam/base/boost_variant_with_workaround.h - if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) - doglegDelta_ = boost::get(params_.optimizationParams).initialDelta; -} - -/* ************************************************************************* */ -template -ISAM2::ISAM2(): - delta_(Permutation(), deltaUnpermuted_), deltaUptodate_(true) { - // See note in gtsam/base/boost_variant_with_workaround.h - if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) - doglegDelta_ = boost::get(params_.optimizationParams).initialDelta; -} - -/* ************************************************************************* */ -template -FastList ISAM2::getAffectedFactors(const FastList& keys) const { - static const bool debug = false; - if(debug) cout << "Getting affected factors for "; - if(debug) { BOOST_FOREACH(const Index key, keys) { cout << key << " "; } } - if(debug) cout << endl; - - FactorGraph allAffected; - FastList indices; - BOOST_FOREACH(const Index key, keys) { -// const list l = nonlinearFactors_.factors(key); -// indices.insert(indices.begin(), l.begin(), l.end()); - const VariableIndex::Factors& factors(variableIndex_[key]); - BOOST_FOREACH(size_t factor, factors) { - if(debug) cout << "Variable " << key << " affects factor " << factor << endl; - indices.push_back(factor); - } - } - indices.sort(); - indices.unique(); - if(debug) cout << "Affected factors are: "; - if(debug) { BOOST_FOREACH(const size_t index, indices) { cout << index << " "; } } - if(debug) cout << endl; - return indices; -} - -/* ************************************************************************* */ -// retrieve all factors that ONLY contain the affected variables -// (note that the remaining stuff is summarized in the cached factors) -template -FactorGraph::shared_ptr -ISAM2::relinearizeAffectedFactors(const FastList& affectedKeys) const { - - tic(1,"getAffectedFactors"); - FastList candidates = getAffectedFactors(affectedKeys); - toc(1,"getAffectedFactors"); - - GRAPH nonlinearAffectedFactors; - - tic(2,"affectedKeysSet"); - // for fast lookup below - FastSet affectedKeysSet; - affectedKeysSet.insert(affectedKeys.begin(), affectedKeys.end()); - toc(2,"affectedKeysSet"); - - tic(3,"check candidates"); - BOOST_FOREACH(size_t idx, candidates) { - bool inside = true; - BOOST_FOREACH(Key key, nonlinearFactors_[idx]->keys()) { - Index var = ordering_[key]; - if (affectedKeysSet.find(var) == affectedKeysSet.end()) { - inside = false; - break; - } - } - if (inside) - nonlinearAffectedFactors.push_back(nonlinearFactors_[idx]); - } - toc(3,"check candidates"); - - tic(4,"linearize"); - FactorGraph::shared_ptr linearized(nonlinearAffectedFactors.linearize(theta_, ordering_)); - toc(4,"linearize"); - return linearized; -} - -/* ************************************************************************* */ -// find intermediate (linearized) factors from cache that are passed into the affected area -template -FactorGraph::CacheFactor> -ISAM2::getCachedBoundaryFactors(Cliques& orphans) { - - static const bool debug = false; - - FactorGraph cachedBoundary; - - BOOST_FOREACH(sharedClique orphan, orphans) { - // find the last variable that was eliminated - Index key = (*orphan)->frontals().back(); -#ifndef NDEBUG -// typename BayesNet::const_iterator it = orphan->end(); -// const CONDITIONAL& lastCONDITIONAL = **(--it); -// typename CONDITIONAL::const_iterator keyit = lastCONDITIONAL.endParents(); -// const Index lastKey = *(--keyit); -// assert(key == lastKey); -#endif - // retrieve the cached factor and add to boundary - cachedBoundary.push_back(boost::dynamic_pointer_cast(orphan->cachedFactor())); - if(debug) { cout << "Cached factor for variable " << key; orphan->cachedFactor()->print(""); } - } - - return cachedBoundary; -} - -template -boost::shared_ptr > ISAM2::recalculate( - const FastSet& markedKeys, const FastVector& newKeys, const FactorGraph::shared_ptr newFactors, - const boost::optional >& constrainKeys, ISAM2Result& result) { - - // TODO: new factors are linearized twice, the newFactors passed in are not used. - - static const bool debug = ISDEBUG("ISAM2 recalculate"); - - // Input: BayesTree(this), newFactors - -//#define PRINT_STATS // figures for paper, disable for timing -#ifdef PRINT_STATS - static int counter = 0; - int maxClique = 0; - double avgClique = 0; - int numCliques = 0; - int nnzR = 0; - if (counter>0) { // cannot call on empty tree - GaussianISAM2_P::CliqueData cdata = this->getCliqueData(); - GaussianISAM2_P::CliqueStats cstats = cdata.getStats(); - maxClique = cstats.maxCONDITIONALSize; - avgClique = cstats.avgCONDITIONALSize; - numCliques = cdata.conditionalSizes.size(); - nnzR = calculate_nnz(this->root()); - } - counter++; -#endif - - if(debug) { - cout << "markedKeys: "; - BOOST_FOREACH(const Index key, markedKeys) { cout << key << " "; } - cout << endl; - cout << "newKeys: "; - BOOST_FOREACH(const Index key, newKeys) { cout << key << " "; } - cout << endl; - } - - // 1. Remove top of Bayes tree and convert to a factor graph: - // (a) For each affected variable, remove the corresponding clique and all parents up to the root. - // (b) Store orphaned sub-trees \BayesTree_{O} of removed cliques. - tic(1, "removetop"); - Cliques orphans; - BayesNet affectedBayesNet; - this->removeTop(markedKeys, affectedBayesNet, orphans); - toc(1, "removetop"); - - if(debug) affectedBayesNet.print("Removed top: "); - if(debug) orphans.print("Orphans: "); - - // FactorGraph factors(affectedBayesNet); - // bug was here: we cannot reuse the original factors, because then the cached factors get messed up - // [all the necessary data is actually contained in the affectedBayesNet, including what was passed in from the boundaries, - // so this would be correct; however, in the process we also generate new cached_ entries that will be wrong (ie. they don't - // contain what would be passed up at a certain point if batch elimination was done, but that's what we need); we could choose - // not to update cached_ from here, but then the new information (and potentially different variable ordering) is not reflected - // in the cached_ values which again will be wrong] - // so instead we have to retrieve the original linearized factors AND add the cached factors from the boundary - - // BEGIN OF COPIED CODE - - // ordering provides all keys in conditionals, there cannot be others because path to root included - tic(2,"affectedKeys"); - FastList affectedKeys = affectedBayesNet.ordering(); - toc(2,"affectedKeys"); - - if(affectedKeys.size() >= theta_.size() * batchThreshold) { - - tic(3,"batch"); - - tic(0,"add keys"); - boost::shared_ptr > affectedKeysSet(new FastSet()); - BOOST_FOREACH(const Ordering::value_type& key_index, ordering_) { affectedKeysSet->insert(key_index.second); } - toc(0,"add keys"); - - tic(1,"reorder"); - tic(1,"CCOLAMD"); - // Do a batch step - reorder and relinearize all variables - vector cmember(theta_.size(), 0); - FastSet constrainedKeysSet; - if(constrainKeys) - constrainedKeysSet = *constrainKeys; - else - constrainedKeysSet.insert(newKeys.begin(), newKeys.end()); - if(theta_.size() > constrainedKeysSet.size()) { - BOOST_FOREACH(Index var, constrainedKeysSet) { cmember[var] = 1; } - } - Permutation::shared_ptr colamd(inference::PermutationCOLAMD_(variableIndex_, cmember)); - Permutation::shared_ptr colamdInverse(colamd->inverse()); - toc(1,"CCOLAMD"); - - // Reorder - tic(2,"permute global variable index"); - variableIndex_.permute(*colamd); - toc(2,"permute global variable index"); - tic(3,"permute delta"); - delta_.permute(*colamd); - toc(3,"permute delta"); - tic(4,"permute ordering"); - ordering_.permuteWithInverse(*colamdInverse); - toc(4,"permute ordering"); - toc(1,"reorder"); - - tic(2,"linearize"); - GaussianFactorGraph factors(*nonlinearFactors_.linearize(theta_, ordering_)); - toc(2,"linearize"); - - tic(5,"eliminate"); - JunctionTree jt(factors, variableIndex_); - sharedClique newRoot = jt.eliminate(EliminatePreferLDL); - if(debug) newRoot->print("Eliminated: "); - toc(5,"eliminate"); - - tic(6,"insert"); - this->clear(); - this->insert(newRoot); - toc(6,"insert"); - - toc(3,"batch"); - - result.variablesReeliminated = affectedKeysSet->size(); - - lastAffectedMarkedCount = markedKeys.size(); - lastAffectedVariableCount = affectedKeysSet->size(); - lastAffectedFactorCount = factors.size(); - - return affectedKeysSet; - - } else { - - tic(4,"incremental"); - - // 2. Add the new factors \Factors' into the resulting factor graph - FastList affectedAndNewKeys; - affectedAndNewKeys.insert(affectedAndNewKeys.end(), affectedKeys.begin(), affectedKeys.end()); - affectedAndNewKeys.insert(affectedAndNewKeys.end(), newKeys.begin(), newKeys.end()); - tic(1,"relinearizeAffected"); - GaussianFactorGraph factors(*relinearizeAffectedFactors(affectedAndNewKeys)); - if(debug) factors.print("Relinearized factors: "); - toc(1,"relinearizeAffected"); - - if(debug) { cout << "Affected keys: "; BOOST_FOREACH(const Index key, affectedKeys) { cout << key << " "; } cout << endl; } - - result.variablesReeliminated = affectedAndNewKeys.size(); - lastAffectedMarkedCount = markedKeys.size(); - lastAffectedVariableCount = affectedKeys.size(); - lastAffectedFactorCount = factors.size(); - -#ifdef PRINT_STATS - // output for generating figures - cout << "linear: #markedKeys: " << markedKeys.size() << " #affectedVariables: " << affectedKeys.size() - << " #affectedFactors: " << factors.size() << " maxCliqueSize: " << maxClique - << " avgCliqueSize: " << avgClique << " #Cliques: " << numCliques << " nnzR: " << nnzR << endl; -#endif - - tic(2,"cached"); - // add the cached intermediate results from the boundary of the orphans ... - FactorGraph cachedBoundary = getCachedBoundaryFactors(orphans); - if(debug) cachedBoundary.print("Boundary factors: "); - factors.reserve(factors.size() + cachedBoundary.size()); - // Copy so that we can later permute factors - BOOST_FOREACH(const CacheFactor::shared_ptr& cached, cachedBoundary) { - factors.push_back(GaussianFactor::shared_ptr(new CacheFactor(*cached))); - } - toc(2,"cached"); - - // END OF COPIED CODE - - // 3. Re-order and eliminate the factor graph into a Bayes net (Algorithm [alg:eliminate]), and re-assemble into a new Bayes tree (Algorithm [alg:BayesTree]) - - tic(4,"reorder and eliminate"); - - tic(1,"list to set"); - // create a partial reordering for the new and contaminated factors - // markedKeys are passed in: those variables will be forced to the end in the ordering - boost::shared_ptr > affectedKeysSet(new FastSet(markedKeys)); - affectedKeysSet->insert(affectedKeys.begin(), affectedKeys.end()); - toc(1,"list to set"); - - tic(2,"PartialSolve"); - typename Impl::ReorderingMode reorderingMode; - reorderingMode.nFullSystemVars = ordering_.nVars(); - reorderingMode.algorithm = Impl::ReorderingMode::COLAMD; - reorderingMode.constrain = Impl::ReorderingMode::CONSTRAIN_LAST; - if(constrainKeys) - reorderingMode.constrainedKeys = *constrainKeys; - else - reorderingMode.constrainedKeys = FastSet(newKeys.begin(), newKeys.end()); - typename Impl::PartialSolveResult partialSolveResult = - Impl::PartialSolve(factors, *affectedKeysSet, reorderingMode); - toc(2,"PartialSolve"); - - // We now need to permute everything according this partial reordering: the - // delta vector, the global ordering, and the factors we're about to - // re-eliminate. The reordered variables are also mentioned in the - // orphans and the leftover cached factors. - tic(3,"permute global variable index"); - variableIndex_.permute(partialSolveResult.fullReordering); - toc(3,"permute global variable index"); - tic(4,"permute delta"); - delta_.permute(partialSolveResult.fullReordering); - toc(4,"permute delta"); - tic(5,"permute ordering"); - ordering_.permuteWithInverse(partialSolveResult.fullReorderingInverse); - toc(5,"permute ordering"); - - toc(4,"reorder and eliminate"); - - tic(6,"re-assemble"); - if(partialSolveResult.bayesTree) { - assert(!this->root_); - this->insert(partialSolveResult.bayesTree); - } - toc(6,"re-assemble"); - - // 4. Insert the orphans back into the new Bayes tree. - tic(7,"orphans"); - tic(1,"permute"); - BOOST_FOREACH(sharedClique orphan, orphans) { - (void)orphan->permuteSeparatorWithInverse(partialSolveResult.fullReorderingInverse); - } - toc(1,"permute"); - tic(2,"insert"); - // add orphans to the bottom of the new tree - BOOST_FOREACH(sharedClique orphan, orphans) { - // Because the affectedKeysSelector is sorted, the orphan separator keys - // will be sorted correctly according to the new elimination order after - // applying the permutation, so findParentClique, which looks for the - // lowest-ordered parent, will still work. - Index parentRepresentative = Base::findParentClique((*orphan)->parents()); - sharedClique parent = (*this)[parentRepresentative]; - parent->children_ += orphan; - orphan->parent_ = parent; // set new parent! - } - toc(2,"insert"); - toc(7,"orphans"); - - toc(4,"incremental"); - - return affectedKeysSet; - } -} - -/* ************************************************************************* */ -template -ISAM2Result ISAM2::update( - const GRAPH& newFactors, const Values& newTheta, const FastVector& removeFactorIndices, - const boost::optional >& constrainedKeys, bool force_relinearize) { - - static const bool debug = ISDEBUG("ISAM2 update"); - static const bool verbose = ISDEBUG("ISAM2 update verbose"); - - static int count = 0; - count++; - - lastAffectedVariableCount = 0; - lastAffectedFactorCount = 0; - lastAffectedCliqueCount = 0; - lastAffectedMarkedCount = 0; - lastBacksubVariableCount = 0; - lastNnzTop = 0; - ISAM2Result result; - const bool relinearizeThisStep = force_relinearize || (params_.enableRelinearization && count % params_.relinearizeSkip == 0); - - if(verbose) { - cout << "ISAM2::update\n"; - this->print("ISAM2: "); - } - - // Update delta if we need it to check relinearization later - if(relinearizeThisStep) { - tic(0, "updateDelta"); - updateDelta(disableReordering); - toc(0, "updateDelta"); - } - - tic(1,"push_back factors"); - // Add the new factor indices to the result struct - result.newFactorsIndices.resize(newFactors.size()); - for(size_t i=0; i markedKeys = Impl::IndicesFromFactors(ordering_, newFactors); // Get keys from new factors - // Also mark keys involved in removed factors - { - FastSet markedRemoveKeys = Impl::IndicesFromFactors(ordering_, removeFactors); // Get keys involved in removed factors - markedKeys.insert(markedRemoveKeys.begin(), markedRemoveKeys.end()); // Add to the overall set of marked keys - } - // NOTE: we use assign instead of the iterator constructor here because this - // is a vector of size_t, so the constructor unintentionally resolves to - // vector(size_t count, Index value) instead of the iterator constructor. - FastVector newKeys; newKeys.assign(markedKeys.begin(), markedKeys.end()); // Make a copy of these, as we'll soon add to them - toc(4,"gather involved keys"); - - // Check relinearization if we're at the nth step, or we are using a looser loop relin threshold - if (relinearizeThisStep) { - tic(5,"gather relinearize keys"); - vector markedRelinMask(ordering_.nVars(), false); - // 4. Mark keys in \Delta above threshold \beta: J=\{\Delta_{j}\in\Delta|\Delta_{j}\geq\beta\}. - FastSet relinKeys = Impl::CheckRelinearization(delta_, ordering_, params_.relinearizeThreshold); - if(disableReordering) relinKeys = Impl::CheckRelinearization(delta_, ordering_, 0.0); // This is used for debugging - - // Add the variables being relinearized to the marked keys - BOOST_FOREACH(const Index j, relinKeys) { markedRelinMask[j] = true; } - markedKeys.insert(relinKeys.begin(), relinKeys.end()); - toc(5,"gather relinearize keys"); - - tic(6,"fluid find_all"); - // 5. Mark all cliques that involve marked variables \Theta_{J} and all their ancestors. - if (!relinKeys.empty() && this->root()) - Impl::FindAll(this->root(), markedKeys, markedRelinMask); // add other cliques that have the marked ones in the separator - toc(6,"fluid find_all"); - - tic(7,"expmap"); - // 6. Update linearization point for marked variables: \Theta_{J}:=\Theta_{J}+\Delta_{J}. - if (!relinKeys.empty()) - Impl::ExpmapMasked(theta_, delta_, ordering_, markedRelinMask, delta_); - toc(7,"expmap"); - - result.variablesRelinearized = markedKeys.size(); - -#ifndef NDEBUG - lastRelinVariables_ = markedRelinMask; -#endif - } else { - result.variablesRelinearized = 0; -#ifndef NDEBUG - lastRelinVariables_ = vector(ordering_.nVars(), false); -#endif - } - - tic(8,"linearize new"); - tic(1,"linearize"); - // 7. Linearize new factors - FactorGraph::shared_ptr linearFactors = newFactors.linearize(theta_, ordering_); - toc(1,"linearize"); - - tic(2,"augment VI"); - // Augment the variable index with the new factors - variableIndex_.augment(*linearFactors); - toc(2,"augment VI"); - toc(8,"linearize new"); - - tic(9,"recalculate"); - // 8. Redo top of Bayes tree - // Convert constrained symbols to indices - boost::optional > constrainedIndices; - if(constrainedKeys) { - constrainedIndices.reset(FastSet()); - BOOST_FOREACH(Key key, *constrainedKeys) { - constrainedIndices->insert(ordering_[key]); - } - } - boost::shared_ptr > replacedKeys; - if(!markedKeys.empty() || !newKeys.empty()) - replacedKeys = recalculate(markedKeys, newKeys, linearFactors, constrainedIndices, result); - - // Update replaced keys mask (accumulates until back-substitution takes place) - if(replacedKeys) { - BOOST_FOREACH(const Index var, *replacedKeys) { - deltaReplacedMask_[var] = true; } } - toc(9,"recalculate"); - - //tic(9,"solve"); - // 9. Solve - if(debug) delta_.print("delta_: "); - //toc(9,"solve"); - - tic(10,"evaluate error after"); - if(params_.evaluateNonlinearError) - result.errorAfter.reset(nonlinearFactors_.error(calculateEstimate())); - toc(10,"evaluate error after"); - - result.cliques = this->nodes().size(); - deltaUptodate_ = false; - - return result; -} - -/* ************************************************************************* */ -template -void ISAM2::updateDelta(bool forceFullSolve) const { - - if(params_.optimizationParams.type() == typeid(ISAM2GaussNewtonParams)) { - // If using Gauss-Newton, update with wildfireThreshold - const ISAM2GaussNewtonParams& gaussNewtonParams = - boost::get(params_.optimizationParams); - const double effectiveWildfireThreshold = forceFullSolve ? 0.0 : gaussNewtonParams.wildfireThreshold; - tic(0, "Wildfire update"); - lastBacksubVariableCount = Impl::UpdateDelta(this->root(), deltaReplacedMask_, delta_, effectiveWildfireThreshold); - toc(0, "Wildfire update"); - - } else if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) { - // If using Dogleg, do a Dogleg step - const ISAM2DoglegParams& doglegParams = - boost::get(params_.optimizationParams); - - // Do one Dogleg iteration - tic(1, "Dogleg Iterate"); - DoglegOptimizerImpl::IterationResult doglegResult = DoglegOptimizerImpl::Iterate( - *doglegDelta_, doglegParams.adaptationMode, *this, nonlinearFactors_, theta_, ordering_, nonlinearFactors_.error(theta_), doglegParams.verbose); - toc(1, "Dogleg Iterate"); - - // Update Delta and linear step - doglegDelta_ = doglegResult.Delta; - delta_.permutation() = Permutation::Identity(delta_.size()); // Dogleg solves for the full delta so there is no permutation - delta_.container() = doglegResult.dx_d; // Copy the VectorValues containing with the linear solution - - // Clear replaced mask - deltaReplacedMask_.assign(deltaReplacedMask_.size(), false); - } - - deltaUptodate_ = true; -} - -/* ************************************************************************* */ -template -Values ISAM2::calculateEstimate() const { - // We use ExpmapMasked here instead of regular expmap because the former - // handles Permuted - Values ret(theta_); - vector mask(ordering_.nVars(), true); - Impl::ExpmapMasked(ret, getDelta(), ordering_, mask); - return ret; -} - -/* ************************************************************************* */ -template template -VALUE ISAM2::calculateEstimate(Key key) const { +VALUE ISAM2::calculateEstimate(Key key) const { const Index index = getOrdering()[key]; const SubVector delta = getDelta()[index]; return theta_.at(key).retract(delta); } /* ************************************************************************* */ -template -Values ISAM2::calculateBestEstimate() const { - VectorValues delta(theta_.dims(ordering_)); - optimize2(this->root(), delta); - return theta_.retract(delta, ordering_); +namespace internal { +template +void optimizeWildfire(const boost::shared_ptr& clique, double threshold, + vector& changed, const vector& replaced, Permuted& delta, int& count) { + // if none of the variables in this clique (frontal and separator!) changed + // significantly, then by the running intersection property, none of the + // cliques in the children need to be processed + + // Are any clique variables part of the tree that has been redone? + bool cliqueReplaced = replaced[(*clique)->frontals().front()]; +#ifndef NDEBUG + BOOST_FOREACH(Index frontal, (*clique)->frontals()) { + assert(cliqueReplaced == replaced[frontal]); + } +#endif + + // If not redone, then has one of the separator variables changed significantly? + bool recalculate = cliqueReplaced; + if(!recalculate) { + BOOST_FOREACH(Index parent, (*clique)->parents()) { + if(changed[parent]) { + recalculate = true; + break; + } + } + } + + // Solve clique if it was replaced, or if any parents were changed above the + // threshold or themselves replaced. + if(recalculate) { + + // Temporary copy of the original values, to check how much they change + vector originalValues((*clique)->nrFrontals()); + GaussianConditional::const_iterator it; + for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { + originalValues[it - (*clique)->beginFrontals()] = delta[*it]; + } + + // Back-substitute + (*clique)->solveInPlace(delta); + count += (*clique)->nrFrontals(); + + // Whether the values changed above a threshold, or always true if the + // clique was replaced. + bool valuesChanged = cliqueReplaced; + for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { + if(!valuesChanged) { + const Vector& oldValue(originalValues[it - (*clique)->beginFrontals()]); + const SubVector& newValue(delta[*it]); + if((oldValue - newValue).lpNorm() >= threshold) { + valuesChanged = true; + break; + } + } else + break; + } + + // If the values were above the threshold or this clique was replaced + if(valuesChanged) { + // Set changed flag for each frontal variable and leave the new values + BOOST_FOREACH(Index frontal, (*clique)->frontals()) { + changed[frontal] = true; + } + } else { + // Replace with the old values + for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { + delta[*it] = originalValues[it - (*clique)->beginFrontals()]; + } + } + + // Recurse to children + BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) { + optimizeWildfire(child, threshold, changed, replaced, delta, count); + } + } +} } /* ************************************************************************* */ -template -const Permuted& ISAM2::getDelta() const { - if(!deltaUptodate_) - updateDelta(); - return delta_; +template +int optimizeWildfire(const boost::shared_ptr& root, double threshold, const vector& keys, Permuted& delta) { + vector changed(keys.size(), false); + int count = 0; + // starting from the root, call optimize on each conditional + if(root) + internal::optimizeWildfire(root, threshold, changed, keys, delta, count); + return count; +} + +/* ************************************************************************* */ +template +void nnz_internal(const boost::shared_ptr& clique, int& result) { + int dimR = (*clique)->dim(); + int dimSep = (*clique)->get_S().cols() - dimR; + result += ((dimR+1)*dimR)/2 + dimSep*dimR; + // traverse the children + BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) { + nnz_internal(child, result); + } +} + +/* ************************************************************************* */ +template +int calculate_nnz(const boost::shared_ptr& clique) { + int result = 0; + // starting from the root, add up entries of frontal and conditional matrices of each conditional + nnz_internal(clique, result); + return result; } } -/// namespace gtsam + + diff --git a/gtsam/nonlinear/ISAM2.cpp b/gtsam/nonlinear/ISAM2.cpp new file mode 100644 index 000000000..58af039dd --- /dev/null +++ b/gtsam/nonlinear/ISAM2.cpp @@ -0,0 +1,732 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file ISAM2-inl.h + * @brief Incremental update functionality (ISAM2) for BayesTree, with fluid relinearization. + * @author Michael Kaess, Richard Roberts + */ + +#include +#include // for operator += +using namespace boost::assign; + +#include +#include +#include +#include +#include + +#include +#include + + +namespace gtsam { + +using namespace std; + +static const bool disableReordering = false; +static const double batchThreshold = 0.65; + +/* ************************************************************************* */ +ISAM2::ISAM2(const ISAM2Params& params): + delta_(deltaUnpermuted_), deltaNewton_(deltaNewtonUnpermuted_), RgProd_(RgProdUnpermuted_), + deltaDoglegUptodate_(true), deltaUptodate_(true), params_(params) { + // See note in gtsam/base/boost_variant_with_workaround.h + if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) + doglegDelta_ = boost::get(params_.optimizationParams).initialDelta; +} + +/* ************************************************************************* */ +ISAM2::ISAM2(): + delta_(deltaUnpermuted_), deltaNewton_(deltaNewtonUnpermuted_), RgProd_(RgProdUnpermuted_), + deltaDoglegUptodate_(true), deltaUptodate_(true) { + // See note in gtsam/base/boost_variant_with_workaround.h + if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) + doglegDelta_ = boost::get(params_.optimizationParams).initialDelta; +} + +/* ************************************************************************* */ +FastList ISAM2::getAffectedFactors(const FastList& keys) const { + static const bool debug = false; + if(debug) cout << "Getting affected factors for "; + if(debug) { BOOST_FOREACH(const Index key, keys) { cout << key << " "; } } + if(debug) cout << endl; + + FactorGraph allAffected; + FastList indices; + BOOST_FOREACH(const Index key, keys) { +// const list l = nonlinearFactors_.factors(key); +// indices.insert(indices.begin(), l.begin(), l.end()); + const VariableIndex::Factors& factors(variableIndex_[key]); + BOOST_FOREACH(size_t factor, factors) { + if(debug) cout << "Variable " << key << " affects factor " << factor << endl; + indices.push_back(factor); + } + } + indices.sort(); + indices.unique(); + if(debug) cout << "Affected factors are: "; + if(debug) { BOOST_FOREACH(const size_t index, indices) { cout << index << " "; } } + if(debug) cout << endl; + return indices; +} + +/* ************************************************************************* */ +// retrieve all factors that ONLY contain the affected variables +// (note that the remaining stuff is summarized in the cached factors) +FactorGraph::shared_ptr +ISAM2::relinearizeAffectedFactors(const FastList& affectedKeys) const { + + tic(1,"getAffectedFactors"); + FastList candidates = getAffectedFactors(affectedKeys); + toc(1,"getAffectedFactors"); + + NonlinearFactorGraph nonlinearAffectedFactors; + + tic(2,"affectedKeysSet"); + // for fast lookup below + FastSet affectedKeysSet; + affectedKeysSet.insert(affectedKeys.begin(), affectedKeys.end()); + toc(2,"affectedKeysSet"); + + tic(3,"check candidates"); + BOOST_FOREACH(size_t idx, candidates) { + bool inside = true; + BOOST_FOREACH(Key key, nonlinearFactors_[idx]->keys()) { + Index var = ordering_[key]; + if (affectedKeysSet.find(var) == affectedKeysSet.end()) { + inside = false; + break; + } + } + if (inside) + nonlinearAffectedFactors.push_back(nonlinearFactors_[idx]); + } + toc(3,"check candidates"); + + tic(4,"linearize"); + FactorGraph::shared_ptr linearized(nonlinearAffectedFactors.linearize(theta_, ordering_)); + toc(4,"linearize"); + return linearized; +} + +/* ************************************************************************* */ +// find intermediate (linearized) factors from cache that are passed into the affected area +FactorGraph +ISAM2::getCachedBoundaryFactors(Cliques& orphans) { + + static const bool debug = false; + + FactorGraph cachedBoundary; + + BOOST_FOREACH(sharedClique orphan, orphans) { + // find the last variable that was eliminated + Index key = (*orphan)->frontals().back(); +#ifndef NDEBUG +// typename BayesNet::const_iterator it = orphan->end(); +// const CONDITIONAL& lastCONDITIONAL = **(--it); +// typename CONDITIONAL::const_iterator keyit = lastCONDITIONAL.endParents(); +// const Index lastKey = *(--keyit); +// assert(key == lastKey); +#endif + // retrieve the cached factor and add to boundary + cachedBoundary.push_back(boost::dynamic_pointer_cast(orphan->cachedFactor())); + if(debug) { cout << "Cached factor for variable " << key; orphan->cachedFactor()->print(""); } + } + + return cachedBoundary; +} + +boost::shared_ptr > ISAM2::recalculate( + const FastSet& markedKeys, const FastVector& newKeys, const FactorGraph::shared_ptr newFactors, + const boost::optional >& constrainKeys, ISAM2Result& result) { + + // TODO: new factors are linearized twice, the newFactors passed in are not used. + + static const bool debug = ISDEBUG("ISAM2 recalculate"); + + // Input: BayesTree(this), newFactors + +//#define PRINT_STATS // figures for paper, disable for timing +#ifdef PRINT_STATS + static int counter = 0; + int maxClique = 0; + double avgClique = 0; + int numCliques = 0; + int nnzR = 0; + if (counter>0) { // cannot call on empty tree + GaussianISAM2_P::CliqueData cdata = this->getCliqueData(); + GaussianISAM2_P::CliqueStats cstats = cdata.getStats(); + maxClique = cstats.maxCONDITIONALSize; + avgClique = cstats.avgCONDITIONALSize; + numCliques = cdata.conditionalSizes.size(); + nnzR = calculate_nnz(this->root()); + } + counter++; +#endif + + if(debug) { + cout << "markedKeys: "; + BOOST_FOREACH(const Index key, markedKeys) { cout << key << " "; } + cout << endl; + cout << "newKeys: "; + BOOST_FOREACH(const Index key, newKeys) { cout << key << " "; } + cout << endl; + } + + // 1. Remove top of Bayes tree and convert to a factor graph: + // (a) For each affected variable, remove the corresponding clique and all parents up to the root. + // (b) Store orphaned sub-trees \BayesTree_{O} of removed cliques. + tic(1, "removetop"); + Cliques orphans; + BayesNet affectedBayesNet; + this->removeTop(markedKeys, affectedBayesNet, orphans); + toc(1, "removetop"); + + if(debug) affectedBayesNet.print("Removed top: "); + if(debug) orphans.print("Orphans: "); + + // FactorGraph factors(affectedBayesNet); + // bug was here: we cannot reuse the original factors, because then the cached factors get messed up + // [all the necessary data is actually contained in the affectedBayesNet, including what was passed in from the boundaries, + // so this would be correct; however, in the process we also generate new cached_ entries that will be wrong (ie. they don't + // contain what would be passed up at a certain point if batch elimination was done, but that's what we need); we could choose + // not to update cached_ from here, but then the new information (and potentially different variable ordering) is not reflected + // in the cached_ values which again will be wrong] + // so instead we have to retrieve the original linearized factors AND add the cached factors from the boundary + + // BEGIN OF COPIED CODE + + // ordering provides all keys in conditionals, there cannot be others because path to root included + tic(2,"affectedKeys"); + FastList affectedKeys = affectedBayesNet.ordering(); + toc(2,"affectedKeys"); + + if(affectedKeys.size() >= theta_.size() * batchThreshold) { + + tic(3,"batch"); + + tic(0,"add keys"); + boost::shared_ptr > affectedKeysSet(new FastSet()); + BOOST_FOREACH(const Ordering::value_type& key_index, ordering_) { affectedKeysSet->insert(key_index.second); } + toc(0,"add keys"); + + tic(1,"reorder"); + tic(1,"CCOLAMD"); + // Do a batch step - reorder and relinearize all variables + vector cmember(theta_.size(), 0); + FastSet constrainedKeysSet; + if(constrainKeys) + constrainedKeysSet = *constrainKeys; + else + constrainedKeysSet.insert(newKeys.begin(), newKeys.end()); + if(theta_.size() > constrainedKeysSet.size()) { + BOOST_FOREACH(Index var, constrainedKeysSet) { cmember[var] = 1; } + } + Permutation::shared_ptr colamd(inference::PermutationCOLAMD_(variableIndex_, cmember)); + Permutation::shared_ptr colamdInverse(colamd->inverse()); + toc(1,"CCOLAMD"); + + // Reorder + tic(2,"permute global variable index"); + variableIndex_.permute(*colamd); + toc(2,"permute global variable index"); + tic(3,"permute delta"); + delta_.permute(*colamd); + deltaNewton_.permute(*colamd); + RgProd_.permute(*colamd); + toc(3,"permute delta"); + tic(4,"permute ordering"); + ordering_.permuteWithInverse(*colamdInverse); + toc(4,"permute ordering"); + toc(1,"reorder"); + + tic(2,"linearize"); + GaussianFactorGraph factors(*nonlinearFactors_.linearize(theta_, ordering_)); + toc(2,"linearize"); + + tic(5,"eliminate"); + JunctionTree jt(factors, variableIndex_); + sharedClique newRoot = jt.eliminate(EliminatePreferLDL); + if(debug) newRoot->print("Eliminated: "); + toc(5,"eliminate"); + + tic(6,"insert"); + this->clear(); + this->insert(newRoot); + toc(6,"insert"); + + toc(3,"batch"); + + result.variablesReeliminated = affectedKeysSet->size(); + + lastAffectedMarkedCount = markedKeys.size(); + lastAffectedVariableCount = affectedKeysSet->size(); + lastAffectedFactorCount = factors.size(); + + return affectedKeysSet; + + } else { + + tic(4,"incremental"); + + // 2. Add the new factors \Factors' into the resulting factor graph + FastList affectedAndNewKeys; + affectedAndNewKeys.insert(affectedAndNewKeys.end(), affectedKeys.begin(), affectedKeys.end()); + affectedAndNewKeys.insert(affectedAndNewKeys.end(), newKeys.begin(), newKeys.end()); + tic(1,"relinearizeAffected"); + GaussianFactorGraph factors(*relinearizeAffectedFactors(affectedAndNewKeys)); + if(debug) factors.print("Relinearized factors: "); + toc(1,"relinearizeAffected"); + + if(debug) { cout << "Affected keys: "; BOOST_FOREACH(const Index key, affectedKeys) { cout << key << " "; } cout << endl; } + + result.variablesReeliminated = affectedAndNewKeys.size(); + lastAffectedMarkedCount = markedKeys.size(); + lastAffectedVariableCount = affectedKeys.size(); + lastAffectedFactorCount = factors.size(); + +#ifdef PRINT_STATS + // output for generating figures + cout << "linear: #markedKeys: " << markedKeys.size() << " #affectedVariables: " << affectedKeys.size() + << " #affectedFactors: " << factors.size() << " maxCliqueSize: " << maxClique + << " avgCliqueSize: " << avgClique << " #Cliques: " << numCliques << " nnzR: " << nnzR << endl; +#endif + + tic(2,"cached"); + // add the cached intermediate results from the boundary of the orphans ... + FactorGraph cachedBoundary = getCachedBoundaryFactors(orphans); + if(debug) cachedBoundary.print("Boundary factors: "); + factors.reserve(factors.size() + cachedBoundary.size()); + // Copy so that we can later permute factors + BOOST_FOREACH(const CacheFactor::shared_ptr& cached, cachedBoundary) { + factors.push_back(GaussianFactor::shared_ptr(new CacheFactor(*cached))); + } + toc(2,"cached"); + + // END OF COPIED CODE + + // 3. Re-order and eliminate the factor graph into a Bayes net (Algorithm [alg:eliminate]), and re-assemble into a new Bayes tree (Algorithm [alg:BayesTree]) + + tic(4,"reorder and eliminate"); + + tic(1,"list to set"); + // create a partial reordering for the new and contaminated factors + // markedKeys are passed in: those variables will be forced to the end in the ordering + boost::shared_ptr > affectedKeysSet(new FastSet(markedKeys)); + affectedKeysSet->insert(affectedKeys.begin(), affectedKeys.end()); + toc(1,"list to set"); + + tic(2,"PartialSolve"); + Impl::ReorderingMode reorderingMode; + reorderingMode.nFullSystemVars = ordering_.nVars(); + reorderingMode.algorithm = Impl::ReorderingMode::COLAMD; + reorderingMode.constrain = Impl::ReorderingMode::CONSTRAIN_LAST; + if(constrainKeys) + reorderingMode.constrainedKeys = *constrainKeys; + else + reorderingMode.constrainedKeys = FastSet(newKeys.begin(), newKeys.end()); + Impl::PartialSolveResult partialSolveResult = + Impl::PartialSolve(factors, *affectedKeysSet, reorderingMode); + toc(2,"PartialSolve"); + + // We now need to permute everything according this partial reordering: the + // delta vector, the global ordering, and the factors we're about to + // re-eliminate. The reordered variables are also mentioned in the + // orphans and the leftover cached factors. + tic(3,"permute global variable index"); + variableIndex_.permute(partialSolveResult.fullReordering); + toc(3,"permute global variable index"); + tic(4,"permute delta"); + delta_.permute(partialSolveResult.fullReordering); + deltaNewton_.permute(partialSolveResult.fullReordering); + RgProd_.permute(partialSolveResult.fullReordering); + toc(4,"permute delta"); + tic(5,"permute ordering"); + ordering_.permuteWithInverse(partialSolveResult.fullReorderingInverse); + toc(5,"permute ordering"); + + toc(4,"reorder and eliminate"); + + tic(6,"re-assemble"); + if(partialSolveResult.bayesTree) { + assert(!this->root_); + this->insert(partialSolveResult.bayesTree); + } + toc(6,"re-assemble"); + + // 4. Insert the orphans back into the new Bayes tree. + tic(7,"orphans"); + tic(1,"permute"); + BOOST_FOREACH(sharedClique orphan, orphans) { + (void)orphan->permuteSeparatorWithInverse(partialSolveResult.fullReorderingInverse); + } + toc(1,"permute"); + tic(2,"insert"); + // add orphans to the bottom of the new tree + BOOST_FOREACH(sharedClique orphan, orphans) { + // Because the affectedKeysSelector is sorted, the orphan separator keys + // will be sorted correctly according to the new elimination order after + // applying the permutation, so findParentClique, which looks for the + // lowest-ordered parent, will still work. + Index parentRepresentative = Base::findParentClique((*orphan)->parents()); + sharedClique parent = (*this)[parentRepresentative]; + parent->children_ += orphan; + orphan->parent_ = parent; // set new parent! + } + toc(2,"insert"); + toc(7,"orphans"); + + toc(4,"incremental"); + + return affectedKeysSet; + } +} + +/* ************************************************************************* */ +ISAM2Result ISAM2::update( + const NonlinearFactorGraph& newFactors, const Values& newTheta, const FastVector& removeFactorIndices, + const boost::optional >& constrainedKeys, bool force_relinearize) { + + static const bool debug = ISDEBUG("ISAM2 update"); + static const bool verbose = ISDEBUG("ISAM2 update verbose"); + + static int count = 0; + count++; + + lastAffectedVariableCount = 0; + lastAffectedFactorCount = 0; + lastAffectedCliqueCount = 0; + lastAffectedMarkedCount = 0; + lastBacksubVariableCount = 0; + lastNnzTop = 0; + ISAM2Result result; + const bool relinearizeThisStep = force_relinearize || (params_.enableRelinearization && count % params_.relinearizeSkip == 0); + + if(verbose) { + cout << "ISAM2::update\n"; + this->print("ISAM2: "); + } + + // Update delta if we need it to check relinearization later + if(relinearizeThisStep) { + tic(0, "updateDelta"); + updateDelta(disableReordering); + toc(0, "updateDelta"); + } + + tic(1,"push_back factors"); + // Add the new factor indices to the result struct + result.newFactorsIndices.resize(newFactors.size()); + for(size_t i=0; i markedKeys = Impl::IndicesFromFactors(ordering_, newFactors); // Get keys from new factors + // Also mark keys involved in removed factors + { + FastSet markedRemoveKeys = Impl::IndicesFromFactors(ordering_, removeFactors); // Get keys involved in removed factors + markedKeys.insert(markedRemoveKeys.begin(), markedRemoveKeys.end()); // Add to the overall set of marked keys + } + // NOTE: we use assign instead of the iterator constructor here because this + // is a vector of size_t, so the constructor unintentionally resolves to + // vector(size_t count, Index value) instead of the iterator constructor. + FastVector newKeys; newKeys.assign(markedKeys.begin(), markedKeys.end()); // Make a copy of these, as we'll soon add to them + toc(4,"gather involved keys"); + + // Check relinearization if we're at the nth step, or we are using a looser loop relin threshold + if (relinearizeThisStep) { + tic(5,"gather relinearize keys"); + vector markedRelinMask(ordering_.nVars(), false); + // 4. Mark keys in \Delta above threshold \beta: J=\{\Delta_{j}\in\Delta|\Delta_{j}\geq\beta\}. + FastSet relinKeys = Impl::CheckRelinearization(delta_, ordering_, params_.relinearizeThreshold); + if(disableReordering) relinKeys = Impl::CheckRelinearization(delta_, ordering_, 0.0); // This is used for debugging + + // Add the variables being relinearized to the marked keys + BOOST_FOREACH(const Index j, relinKeys) { markedRelinMask[j] = true; } + markedKeys.insert(relinKeys.begin(), relinKeys.end()); + toc(5,"gather relinearize keys"); + + tic(6,"fluid find_all"); + // 5. Mark all cliques that involve marked variables \Theta_{J} and all their ancestors. + if (!relinKeys.empty() && this->root()) + Impl::FindAll(this->root(), markedKeys, markedRelinMask); // add other cliques that have the marked ones in the separator + toc(6,"fluid find_all"); + + tic(7,"expmap"); + // 6. Update linearization point for marked variables: \Theta_{J}:=\Theta_{J}+\Delta_{J}. + if (!relinKeys.empty()) + Impl::ExpmapMasked(theta_, delta_, ordering_, markedRelinMask, delta_); + toc(7,"expmap"); + + result.variablesRelinearized = markedKeys.size(); + +#ifndef NDEBUG + lastRelinVariables_ = markedRelinMask; +#endif + } else { + result.variablesRelinearized = 0; +#ifndef NDEBUG + lastRelinVariables_ = vector(ordering_.nVars(), false); +#endif + } + + tic(8,"linearize new"); + tic(1,"linearize"); + // 7. Linearize new factors + FactorGraph::shared_ptr linearFactors = newFactors.linearize(theta_, ordering_); + toc(1,"linearize"); + + tic(2,"augment VI"); + // Augment the variable index with the new factors + variableIndex_.augment(*linearFactors); + toc(2,"augment VI"); + toc(8,"linearize new"); + + tic(9,"recalculate"); + // 8. Redo top of Bayes tree + // Convert constrained symbols to indices + boost::optional > constrainedIndices; + if(constrainedKeys) { + constrainedIndices.reset(FastSet()); + BOOST_FOREACH(Key key, *constrainedKeys) { + constrainedIndices->insert(ordering_[key]); + } + } + boost::shared_ptr > replacedKeys; + if(!markedKeys.empty() || !newKeys.empty()) + replacedKeys = recalculate(markedKeys, newKeys, linearFactors, constrainedIndices, result); + + // Update replaced keys mask (accumulates until back-substitution takes place) + if(replacedKeys) { + BOOST_FOREACH(const Index var, *replacedKeys) { + deltaReplacedMask_[var] = true; } } + toc(9,"recalculate"); + + //tic(9,"solve"); + // 9. Solve + if(debug) delta_.print("delta_: "); + //toc(9,"solve"); + + tic(10,"evaluate error after"); + if(params_.evaluateNonlinearError) + result.errorAfter.reset(nonlinearFactors_.error(calculateEstimate())); + toc(10,"evaluate error after"); + + result.cliques = this->nodes().size(); + deltaDoglegUptodate_ = false; + deltaUptodate_ = false; + + return result; +} + +/* ************************************************************************* */ +void ISAM2::updateDelta(bool forceFullSolve) const { + + if(params_.optimizationParams.type() == typeid(ISAM2GaussNewtonParams)) { + // If using Gauss-Newton, update with wildfireThreshold + const ISAM2GaussNewtonParams& gaussNewtonParams = + boost::get(params_.optimizationParams); + const double effectiveWildfireThreshold = forceFullSolve ? 0.0 : gaussNewtonParams.wildfireThreshold; + tic(0, "Wildfire update"); + lastBacksubVariableCount = Impl::UpdateDelta(this->root(), deltaReplacedMask_, delta_, effectiveWildfireThreshold); + toc(0, "Wildfire update"); + + } else if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) { + // If using Dogleg, do a Dogleg step + const ISAM2DoglegParams& doglegParams = + boost::get(params_.optimizationParams); + + // Do one Dogleg iteration + tic(1, "Dogleg Iterate"); + DoglegOptimizerImpl::IterationResult doglegResult(DoglegOptimizerImpl::Iterate( + *doglegDelta_, doglegParams.adaptationMode, *this, nonlinearFactors_, theta_, ordering_, nonlinearFactors_.error(theta_), doglegParams.verbose)); + toc(1, "Dogleg Iterate"); + + tic(2, "Copy dx_d"); + // Update Delta and linear step + doglegDelta_ = doglegResult.Delta; + delta_.permutation() = Permutation::Identity(delta_.size()); // Dogleg solves for the full delta so there is no permutation + delta_.container() = doglegResult.dx_d; // Copy the VectorValues containing with the linear solution + toc(2, "Copy dx_d"); + } + + deltaUptodate_ = true; +} + +/* ************************************************************************* */ +Values ISAM2::calculateEstimate() const { + // We use ExpmapMasked here instead of regular expmap because the former + // handles Permuted + tic(1, "Copy Values"); + Values ret(theta_); + toc(1, "Copy Values"); + tic(2, "getDelta"); + const Permuted& delta(getDelta()); + toc(2, "getDelta"); + tic(3, "Expmap"); + vector mask(ordering_.nVars(), true); + Impl::ExpmapMasked(ret, delta, ordering_, mask); + toc(3, "Expmap"); + return ret; +} + +/* ************************************************************************* */ +Values ISAM2::calculateBestEstimate() const { + VectorValues delta(theta_.dims(ordering_)); + internal::optimizeInPlace(this->root(), delta); + return theta_.retract(delta, ordering_); +} + +/* ************************************************************************* */ +const Permuted& ISAM2::getDelta() const { + if(!deltaUptodate_) + updateDelta(); + return delta_; +} + +/* ************************************************************************* */ +VectorValues optimize(const ISAM2& isam) { + tic(0, "allocateVectorValues"); + VectorValues delta = *allocateVectorValues(isam); + toc(0, "allocateVectorValues"); + optimizeInPlace(isam, delta); + return delta; +} + +/* ************************************************************************* */ +void optimizeInPlace(const ISAM2& isam, VectorValues& delta) { + // We may need to update the solution calcaulations + if(!isam.deltaDoglegUptodate_) { + tic(1, "UpdateDoglegDeltas"); + double wildfireThreshold = 0.0; + if(isam.params().optimizationParams.type() == typeid(ISAM2GaussNewtonParams)) + wildfireThreshold = boost::get(isam.params().optimizationParams).wildfireThreshold; + else if(isam.params().optimizationParams.type() == typeid(ISAM2DoglegParams)) + wildfireThreshold = boost::get(isam.params().optimizationParams).wildfireThreshold; + else + assert(false); + ISAM2::Impl::UpdateDoglegDeltas(isam, wildfireThreshold, isam.deltaReplacedMask_, isam.deltaNewton_, isam.RgProd_); + isam.deltaDoglegUptodate_ = true; + toc(1, "UpdateDoglegDeltas"); + } + + tic(2, "copy delta"); + delta = isam.deltaNewton_; + toc(2, "copy delta"); +} + +/* ************************************************************************* */ +VectorValues optimizeGradientSearch(const ISAM2& isam) { + tic(0, "Allocate VectorValues"); + VectorValues grad = *allocateVectorValues(isam); + toc(0, "Allocate VectorValues"); + + optimizeGradientSearchInPlace(isam, grad); + + return grad; +} + +/* ************************************************************************* */ +void optimizeGradientSearchInPlace(const ISAM2& isam, VectorValues& grad) { + // We may need to update the solution calcaulations + if(!isam.deltaDoglegUptodate_) { + tic(1, "UpdateDoglegDeltas"); + double wildfireThreshold = 0.0; + if(isam.params().optimizationParams.type() == typeid(ISAM2GaussNewtonParams)) + wildfireThreshold = boost::get(isam.params().optimizationParams).wildfireThreshold; + else if(isam.params().optimizationParams.type() == typeid(ISAM2DoglegParams)) + wildfireThreshold = boost::get(isam.params().optimizationParams).wildfireThreshold; + else + assert(false); + ISAM2::Impl::UpdateDoglegDeltas(isam, wildfireThreshold, isam.deltaReplacedMask_, isam.deltaNewton_, isam.RgProd_); + isam.deltaDoglegUptodate_ = true; + toc(1, "UpdateDoglegDeltas"); + } + + tic(2, "Compute Gradient"); + // Compute gradient (call gradientAtZero function, which is defined for various linear systems) + gradientAtZero(isam, grad); + double gradientSqNorm = grad.dot(grad); + toc(2, "Compute Gradient"); + + tic(3, "Compute minimizing step size"); + // Compute minimizing step size + double RgNormSq = isam.RgProd_.container().vector().squaredNorm(); + double step = -gradientSqNorm / RgNormSq; + toc(3, "Compute minimizing step size"); + + tic(4, "Compute point"); + // Compute steepest descent point + grad.vector() *= step; + toc(4, "Compute point"); +} + +/* ************************************************************************* */ +VectorValues gradient(const ISAM2& bayesTree, const VectorValues& x0) { + return gradient(FactorGraph(bayesTree), x0); +} + +/* ************************************************************************* */ +static void gradientAtZeroTreeAdder(const boost::shared_ptr& root, VectorValues& g) { + // Loop through variables in each clique, adding contributions + int variablePosition = 0; + for(GaussianConditional::const_iterator jit = root->conditional()->begin(); jit != root->conditional()->end(); ++jit) { + const int dim = root->conditional()->dim(jit); + g[*jit] += root->gradientContribution().segment(variablePosition, dim); + variablePosition += dim; + } + + // Recursively add contributions from children + typedef boost::shared_ptr sharedClique; + BOOST_FOREACH(const sharedClique& child, root->children()) { + gradientAtZeroTreeAdder(child, g); + } +} + +/* ************************************************************************* */ +void gradientAtZero(const ISAM2& bayesTree, VectorValues& g) { + // Zero-out gradient + g.setZero(); + + // Sum up contributions for each clique + gradientAtZeroTreeAdder(bayesTree.root(), g); +} + +} +/// namespace gtsam diff --git a/gtsam/nonlinear/ISAM2.h b/gtsam/nonlinear/ISAM2.h index 208f0748f..b967eadc4 100644 --- a/gtsam/nonlinear/ISAM2.h +++ b/gtsam/nonlinear/ISAM2.h @@ -12,7 +12,7 @@ /** * @file ISAM2.h * @brief Incremental update functionality (ISAM2) for BayesTree, with fluid relinearization. - * @author Michael Kaess + * @author Michael Kaess, Richard Roberts */ // \callgraph @@ -21,10 +21,8 @@ #include #include -#include +#include -// Workaround for boost < 1.47, see note in file -//#include #include namespace gtsam { @@ -56,15 +54,18 @@ struct ISAM2GaussNewtonParams { */ struct ISAM2DoglegParams { double initialDelta; ///< The initial trust region radius for Dogleg + double wildfireThreshold; ///< Continue updating the linear delta only when changes are above this threshold (default: 0.001) DoglegOptimizerImpl::TrustRegionAdaptationMode adaptationMode; ///< See description in DoglegOptimizerImpl::TrustRegionAdaptationMode bool verbose; ///< Whether Dogleg prints iteration and convergence information /** Specify parameters as constructor arguments */ ISAM2DoglegParams( - double _initialDelta = 1.0, ///< see ISAM2DoglegParams public variables, ISAM2DoglegParams::initialDelta - DoglegOptimizerImpl::TrustRegionAdaptationMode _adaptationMode = DoglegOptimizerImpl::SEARCH_EACH_ITERATION, ///< see ISAM2DoglegParams public variables, ISAM2DoglegParams::adaptationMode - bool _verbose = false ///< see ISAM2DoglegParams public variables, ISAM2DoglegParams::verbose - ) : initialDelta(_initialDelta), adaptationMode(_adaptationMode), verbose(_verbose) {} + double _initialDelta = 1.0, ///< see ISAM2DoglegParams::initialDelta + double _wildfireThreshold = 1e-5, ///< see ISAM2DoglegParams::wildfireThreshold + DoglegOptimizerImpl::TrustRegionAdaptationMode _adaptationMode = DoglegOptimizerImpl::SEARCH_EACH_ITERATION, ///< see ISAM2DoglegParams::adaptationMode + bool _verbose = false ///< see ISAM2DoglegParams::verbose + ) : initialDelta(_initialDelta), wildfireThreshold(_wildfireThreshold), + adaptationMode(_adaptationMode), verbose(_verbose) {} }; /** @@ -181,17 +182,16 @@ struct ISAM2Result { FastVector newFactorsIndices; }; -template -struct ISAM2Clique : public BayesTreeCliqueBase, CONDITIONAL> { +struct ISAM2Clique : public BayesTreeCliqueBase { - typedef ISAM2Clique This; - typedef BayesTreeCliqueBase Base; + typedef ISAM2Clique This; + typedef BayesTreeCliqueBase Base; typedef boost::shared_ptr shared_ptr; typedef boost::weak_ptr weak_ptr; - typedef CONDITIONAL ConditionalType; - typedef typename ConditionalType::shared_ptr sharedConditional; + typedef GaussianConditional ConditionalType; + typedef ConditionalType::shared_ptr sharedConditional; - typename Base::FactorType::shared_ptr cachedFactor_; + Base::FactorType::shared_ptr cachedFactor_; Vector gradientContribution_; /** Construct from a conditional */ @@ -199,7 +199,7 @@ struct ISAM2Clique : public BayesTreeCliqueBase, CONDIT throw runtime_error("ISAM2Clique should always be constructed with the elimination result constructor"); } /** Construct from an elimination result */ - ISAM2Clique(const std::pair >& result) : + ISAM2Clique(const std::pair >& result) : Base(result.first), cachedFactor_(result.second), gradientContribution_(result.first->get_R().cols() + result.first->get_S().cols()) { // Compute gradient contribution const ConditionalType& conditional(*result.first); @@ -211,13 +211,13 @@ struct ISAM2Clique : public BayesTreeCliqueBase, CONDIT shared_ptr clone() const { shared_ptr copy(new ISAM2Clique(make_pair( sharedConditional(new ConditionalType(*Base::conditional_)), - cachedFactor_ ? cachedFactor_->clone() : typename Base::FactorType::shared_ptr()))); + cachedFactor_ ? cachedFactor_->clone() : Base::FactorType::shared_ptr()))); copy->gradientContribution_ = gradientContribution_; return copy; } /** Access the cached factor */ - typename Base::FactorType::shared_ptr& cachedFactor() { return cachedFactor_; } + Base::FactorType::shared_ptr& cachedFactor() { return cachedFactor_; } /** Access the gradient contribution */ const Vector& gradientContribution() const { return gradientContribution_; } @@ -269,8 +269,7 @@ private: * estimate of all variables. * */ -template -class ISAM2: public BayesTree > { +class ISAM2: public BayesTree { protected: @@ -296,6 +295,12 @@ protected: */ mutable Permuted delta_; + VectorValues deltaNewtonUnpermuted_; + mutable Permuted deltaNewton_; + VectorValues RgProdUnpermuted_; + mutable Permuted RgProd_; + mutable bool deltaDoglegUptodate_; + /** Indicates whether the current delta is up-to-date, only used * internally - delta will always be updated if necessary when it is * requested with getDelta() or calculateEstimate(). @@ -316,7 +321,7 @@ protected: mutable std::vector deltaReplacedMask_; /** All original nonlinear factors are stored here to use during relinearization */ - GRAPH nonlinearFactors_; + NonlinearFactorGraph nonlinearFactors_; /** The current elimination ordering Symbols to Index (integer) keys. * @@ -339,9 +344,7 @@ private: public: - typedef BayesTree > Base; ///< The BayesTree base class - typedef ISAM2 This; ///< This class - typedef GRAPH Graph; + typedef BayesTree Base; ///< The BayesTree base class /** Create an empty ISAM2 instance */ ISAM2(const ISAM2Params& params); @@ -349,17 +352,22 @@ public: /** Create an empty ISAM2 instance using the default set of parameters (see ISAM2Params) */ ISAM2(); - typedef typename Base::Clique Clique; ///< A clique - typedef typename Base::sharedClique sharedClique; ///< Shared pointer to a clique - typedef typename Base::Cliques Cliques; ///< List of Clique typedef from base class + typedef Base::Clique Clique; ///< A clique + typedef Base::sharedClique sharedClique; ///< Shared pointer to a clique + typedef Base::Cliques Cliques; ///< List of Clique typedef from base class - void cloneTo(boost::shared_ptr& newISAM2) const { + void cloneTo(boost::shared_ptr& newISAM2) const { boost::shared_ptr bayesTree = boost::static_pointer_cast(newISAM2); Base::cloneTo(bayesTree); newISAM2->theta_ = theta_; newISAM2->variableIndex_ = variableIndex_; newISAM2->deltaUnpermuted_ = deltaUnpermuted_; newISAM2->delta_ = delta_; + newISAM2->deltaNewtonUnpermuted_ = deltaNewtonUnpermuted_; + newISAM2->deltaNewton_ = deltaNewton_; + newISAM2->RgProdUnpermuted_ = RgProdUnpermuted_; + newISAM2->RgProd_ = RgProd_; + newISAM2->deltaDoglegUptodate_ = deltaDoglegUptodate_; newISAM2->deltaUptodate_ = deltaUptodate_; newISAM2->deltaReplacedMask_ = deltaReplacedMask_; newISAM2->nonlinearFactors_ = nonlinearFactors_; @@ -395,7 +403,7 @@ public: * (Params::relinearizeSkip). * @return An ISAM2Result struct containing information about the update */ - ISAM2Result update(const GRAPH& newFactors = GRAPH(), const Values& newTheta = Values(), + ISAM2Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values(), const FastVector& removeFactorIndices = FastVector(), const boost::optional >& constrainedKeys = boost::none, bool force_relinearize = false); @@ -432,7 +440,7 @@ public: const Permuted& getDelta() const; /** Access the set of nonlinear factors */ - const GRAPH& getFactorsUnsafe() const { return nonlinearFactors_; } + const NonlinearFactorGraph& getFactorsUnsafe() const { return nonlinearFactors_; } /** Access the current ordering */ const Ordering& getOrdering() const { return ordering_; } @@ -460,8 +468,93 @@ private: // void linear_update(const GaussianFactorGraph& newFactors); void updateDelta(bool forceFullSolve = false) const; + friend void optimizeInPlace(const ISAM2&, VectorValues&); + friend void optimizeGradientSearchInPlace(const ISAM2&, VectorValues&); + }; // ISAM2 +/** Get the linear delta for the ISAM2 object, unpermuted the delta returned by ISAM2::getDelta() */ +VectorValues optimize(const ISAM2& isam); + +/** Get the linear delta for the ISAM2 object, unpermuted the delta returned by ISAM2::getDelta() */ +void optimizeInPlace(const ISAM2& isam, VectorValues& delta); + +/// Optimize the BayesTree, starting from the root. +/// @param replaced Needs to contain +/// all variables that are contained in the top of the Bayes tree that has been +/// redone. +/// @param delta The current solution, an offset from the linearization +/// point. +/// @param threshold The maximum change against the PREVIOUS delta for +/// non-replaced variables that can be ignored, ie. the old delta entry is kept +/// and recursive backsubstitution might eventually stop if none of the changed +/// variables are contained in the subtree. +/// @return The number of variables that were solved for +template +int optimizeWildfire(const boost::shared_ptr& root, + double threshold, const std::vector& replaced, Permuted& delta); + +/** + * Optimize along the gradient direction, with a closed-form computation to + * perform the line search. The gradient is computed about \f$ \delta x=0 \f$. + * + * This function returns \f$ \delta x \f$ that minimizes a reparametrized + * problem. The error function of a GaussianBayesNet is + * + * \f[ f(\delta x) = \frac{1}{2} |R \delta x - d|^2 = \frac{1}{2}d^T d - d^T R \delta x + \frac{1}{2} \delta x^T R^T R \delta x \f] + * + * with gradient and Hessian + * + * \f[ g(\delta x) = R^T(R\delta x - d), \qquad G(\delta x) = R^T R. \f] + * + * This function performs the line search in the direction of the + * gradient evaluated at \f$ g = g(\delta x = 0) \f$ with step size + * \f$ \alpha \f$ that minimizes \f$ f(\delta x = \alpha g) \f$: + * + * \f[ f(\alpha) = \frac{1}{2} d^T d + g^T \delta x + \frac{1}{2} \alpha^2 g^T G g \f] + * + * Optimizing by setting the derivative to zero yields + * \f$ \hat \alpha = (-g^T g) / (g^T G g) \f$. For efficiency, this function + * evaluates the denominator without computing the Hessian \f$ G \f$, returning + * + * \f[ \delta x = \hat\alpha g = \frac{-g^T g}{(R g)^T(R g)} \f] + */ +VectorValues optimizeGradientSearch(const ISAM2& isam); + +/** In-place version of optimizeGradientSearch requiring pre-allocated VectorValues \c x */ +void optimizeGradientSearchInPlace(const ISAM2& isam, VectorValues& grad); + +/// calculate the number of non-zero entries for the tree starting at clique (use root for complete matrix) +template +int calculate_nnz(const boost::shared_ptr& clique); + +/** + * Compute the gradient of the energy function, + * \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$, + * centered around \f$ x = x_0 \f$. + * The gradient is \f$ R^T(Rx-d) \f$. + * This specialized version is used with ISAM2, where each clique stores its + * gradient contribution. + * @param bayesTree The Gaussian Bayes Tree $(R,d)$ + * @param x0 The center about which to compute the gradient + * @return The gradient as a VectorValues + */ +VectorValues gradient(const ISAM2& bayesTree, const VectorValues& x0); + +/** + * Compute the gradient of the energy function, + * \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$, + * centered around zero. + * The gradient about zero is \f$ -R^T d \f$. See also gradient(const GaussianBayesNet&, const VectorValues&). + * This specialized version is used with ISAM2, where each clique stores its + * gradient contribution. + * @param bayesTree The Gaussian Bayes Tree $(R,d)$ + * @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues + * @return The gradient as a VectorValues + */ +void gradientAtZero(const ISAM2& bayesTree, VectorValues& g); + } /// namespace gtsam #include +#include diff --git a/gtsam/nonlinear/Values.h b/gtsam/nonlinear/Values.h index bdd1739da..5f6dad022 100644 --- a/gtsam/nonlinear/Values.h +++ b/gtsam/nonlinear/Values.h @@ -117,6 +117,8 @@ namespace gtsam { typedef boost::transform_iterator< boost::function1, KeyValueMap::const_reverse_iterator> const_reverse_iterator; + typedef KeyValuePair value_type; + private: template struct _KeyValuePair { @@ -143,6 +145,7 @@ namespace gtsam { /** A key-value pair, with the value a specific derived Value type. */ typedef _KeyValuePair KeyValuePair; typedef _ConstKeyValuePair ConstKeyValuePair; + typedef KeyValuePair value_type; typedef boost::transform_iterator< @@ -208,6 +211,7 @@ namespace gtsam { public: /** A const key-value pair, with the value a specific derived Value type. */ typedef _ConstKeyValuePair KeyValuePair; + typedef KeyValuePair value_type; typedef typename Filtered::const_const_iterator iterator; typedef typename Filtered::const_const_iterator const_iterator; diff --git a/tests/testGaussianISAM2.cpp b/tests/testGaussianISAM2.cpp index 21f2e62e9..a990d2ded 100644 --- a/tests/testGaussianISAM2.cpp +++ b/tests/testGaussianISAM2.cpp @@ -17,7 +17,7 @@ using namespace boost::assign; #include #include #include -#include +#include #include #include @@ -29,7 +29,7 @@ using boost::shared_ptr; const double tol = 1e-4; /* ************************************************************************* */ -TEST(ISAM2, AddVariables) { +TEST_UNSAFE(ISAM2, AddVariables) { // Create initial state Values theta; @@ -48,11 +48,31 @@ TEST(ISAM2, AddVariables) { Permuted delta(permutation, deltaUnpermuted); + VectorValues deltaNewtonUnpermuted; + deltaNewtonUnpermuted.insert(0, Vector_(3, .1, .2, .3)); + deltaNewtonUnpermuted.insert(1, Vector_(2, .4, .5)); + + Permutation permutationNewton(2); + permutationNewton[0] = 1; + permutationNewton[1] = 0; + + Permuted deltaNewton(permutationNewton, deltaNewtonUnpermuted); + + VectorValues deltaRgUnpermuted; + deltaRgUnpermuted.insert(0, Vector_(3, .1, .2, .3)); + deltaRgUnpermuted.insert(1, Vector_(2, .4, .5)); + + Permutation permutationRg(2); + permutationRg[0] = 1; + permutationRg[1] = 0; + + Permuted deltaRg(permutationRg, deltaRgUnpermuted); + vector replacedKeys(2, false); Ordering ordering; ordering += planarSLAM::PointKey(0), planarSLAM::PoseKey(0); - GaussianISAM2<>::Nodes nodes(2); + ISAM2::Nodes nodes(2); // Verify initial state LONGS_EQUAL(0, ordering[planarSLAM::PointKey(0)]); @@ -78,19 +98,47 @@ TEST(ISAM2, AddVariables) { Permuted deltaExpected(permutationExpected, deltaUnpermutedExpected); + VectorValues deltaNewtonUnpermutedExpected; + deltaNewtonUnpermutedExpected.insert(0, Vector_(3, .1, .2, .3)); + deltaNewtonUnpermutedExpected.insert(1, Vector_(2, .4, .5)); + deltaNewtonUnpermutedExpected.insert(2, Vector_(3, 0.0, 0.0, 0.0)); + + Permutation permutationNewtonExpected(3); + permutationNewtonExpected[0] = 1; + permutationNewtonExpected[1] = 0; + permutationNewtonExpected[2] = 2; + + Permuted deltaNewtonExpected(permutationNewtonExpected, deltaNewtonUnpermutedExpected); + + VectorValues deltaRgUnpermutedExpected; + deltaRgUnpermutedExpected.insert(0, Vector_(3, .1, .2, .3)); + deltaRgUnpermutedExpected.insert(1, Vector_(2, .4, .5)); + deltaRgUnpermutedExpected.insert(2, Vector_(3, 0.0, 0.0, 0.0)); + + Permutation permutationRgExpected(3); + permutationRgExpected[0] = 1; + permutationRgExpected[1] = 0; + permutationRgExpected[2] = 2; + + Permuted deltaRgExpected(permutationRgExpected, deltaRgUnpermutedExpected); + vector replacedKeysExpected(3, false); Ordering orderingExpected; orderingExpected += planarSLAM::PointKey(0), planarSLAM::PoseKey(0), planarSLAM::PoseKey(1); - GaussianISAM2<>::Nodes nodesExpected( - 3, GaussianISAM2<>::sharedClique()); + ISAM2::Nodes nodesExpected( + 3, ISAM2::sharedClique()); // Expand initial state - GaussianISAM2<>::Impl::AddVariables(newTheta, theta, delta, replacedKeys, ordering, nodes); + ISAM2::Impl::AddVariables(newTheta, theta, delta, deltaNewton, deltaRg, replacedKeys, ordering, nodes); EXPECT(assert_equal(thetaExpected, theta)); EXPECT(assert_equal(deltaUnpermutedExpected, deltaUnpermuted)); EXPECT(assert_equal(deltaExpected.permutation(), delta.permutation())); + EXPECT(assert_equal(deltaNewtonUnpermutedExpected, deltaNewtonUnpermuted)); + EXPECT(assert_equal(deltaNewtonExpected.permutation(), deltaNewton.permutation())); + EXPECT(assert_equal(deltaRgUnpermutedExpected, deltaRgUnpermuted)); + EXPECT(assert_equal(deltaRgExpected.permutation(), deltaRg.permutation())); EXPECT(assert_container_equality(replacedKeysExpected, replacedKeys)); EXPECT(assert_equal(orderingExpected, ordering)); } @@ -171,10 +219,10 @@ TEST(ISAM2, optimize2) { conditional->solveInPlace(expected); // Clique - GaussianISAM2<>::sharedClique clique( - GaussianISAM2<>::Clique::Create(make_pair(conditional,GaussianFactor::shared_ptr()))); + ISAM2::sharedClique clique( + ISAM2::Clique::Create(make_pair(conditional,GaussianFactor::shared_ptr()))); VectorValues actual(theta.dims(ordering)); - internal::optimizeInPlace(clique, actual); + internal::optimizeInPlace(clique, actual); // expected.print("expected: "); // actual.print("actual: "); @@ -182,7 +230,7 @@ TEST(ISAM2, optimize2) { } /* ************************************************************************* */ -bool isam_check(const planarSLAM::Graph& fullgraph, const Values& fullinit, const GaussianISAM2<>& isam) { +bool isam_check(const planarSLAM::Graph& fullgraph, const Values& fullinit, const ISAM2& isam) { Values actual = isam.calculateEstimate(); Ordering ordering = isam.getOrdering(); // *fullgraph.orderingCOLAMD(fullinit).first; GaussianFactorGraph linearized = *fullgraph.linearize(fullinit, ordering); @@ -212,7 +260,7 @@ TEST(ISAM2, slamlike_solution_gaussnewton) SharedDiagonal brNoise = sharedSigmas(Vector_(2, M_PI/100.0, 0.1)); // These variables will be reused and accumulate factors and values - GaussianISAM2<> isam(ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false)); + ISAM2 isam(ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false)); Values fullinit; planarSLAM::Graph fullgraph; @@ -300,7 +348,7 @@ TEST(ISAM2, slamlike_solution_gaussnewton) CHECK(isam_check(fullgraph, fullinit, isam)); // Check gradient at each node - typedef GaussianISAM2<>::sharedClique sharedClique; + typedef ISAM2::sharedClique sharedClique; BOOST_FOREACH(const sharedClique& clique, isam.nodes()) { // Compute expected gradient FactorGraph jfg; @@ -345,7 +393,7 @@ TEST(ISAM2, slamlike_solution_dogleg) SharedDiagonal brNoise = sharedSigmas(Vector_(2, M_PI/100.0, 0.1)); // These variables will be reused and accumulate factors and values - GaussianISAM2<> isam(ISAM2Params(ISAM2DoglegParams(1.0), 0.0, 0, false)); + ISAM2 isam(ISAM2Params(ISAM2DoglegParams(1.0), 0.0, 0, false)); Values fullinit; planarSLAM::Graph fullgraph; @@ -433,7 +481,7 @@ TEST(ISAM2, slamlike_solution_dogleg) CHECK(isam_check(fullgraph, fullinit, isam)); // Check gradient at each node - typedef GaussianISAM2<>::sharedClique sharedClique; + typedef ISAM2::sharedClique sharedClique; BOOST_FOREACH(const sharedClique& clique, isam.nodes()) { // Compute expected gradient FactorGraph jfg; @@ -473,7 +521,7 @@ TEST(ISAM2, clone) { SharedDiagonal brNoise = sharedSigmas(Vector_(2, M_PI/100.0, 0.1)); // These variables will be reused and accumulate factors and values - GaussianISAM2<> isam(ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false, true)); + ISAM2 isam(ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false, true)); Values fullinit; planarSLAM::Graph fullgraph; @@ -558,8 +606,8 @@ TEST(ISAM2, clone) { } // CLONING... - boost::shared_ptr > isam2 - = boost::shared_ptr >(new GaussianISAM2<>()); + boost::shared_ptr isam2 + = boost::shared_ptr(new ISAM2()); isam.cloneTo(isam2); CHECK(assert_equal(isam, *isam2)); @@ -567,24 +615,23 @@ TEST(ISAM2, clone) { /* ************************************************************************* */ TEST(ISAM2, permute_cached) { - typedef ISAM2Clique Clique; - typedef boost::shared_ptr > sharedClique; + typedef boost::shared_ptr sharedISAM2Clique; // Construct expected permuted BayesTree (variable 2 has been changed to 1) - BayesTree expected; - expected.insert(sharedClique(new Clique(make_pair( + BayesTree expected; + expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair( boost::make_shared(pair_list_of (3, Matrix_(1,1,1.0)) (4, Matrix_(1,1,2.0)), 2, Vector_(1,1.0), Vector_(1,1.0)), // p(3,4) HessianFactor::shared_ptr())))); // Cached: empty - expected.insert(sharedClique(new Clique(make_pair( + expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair( boost::make_shared(pair_list_of (2, Matrix_(1,1,1.0)) (3, Matrix_(1,1,2.0)), 1, Vector_(1,1.0), Vector_(1,1.0)), // p(2|3) boost::make_shared(3, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(3) - expected.insert(sharedClique(new Clique(make_pair( + expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair( boost::make_shared(pair_list_of (0, Matrix_(1,1,1.0)) (2, Matrix_(1,1,2.0)), @@ -595,20 +642,20 @@ TEST(ISAM2, permute_cached) { expected.root()->children().front()->children().front()->conditional()->keys()[1] = 1; // Construct unpermuted BayesTree - BayesTree actual; - actual.insert(sharedClique(new Clique(make_pair( + BayesTree actual; + actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair( boost::make_shared(pair_list_of (3, Matrix_(1,1,1.0)) (4, Matrix_(1,1,2.0)), 2, Vector_(1,1.0), Vector_(1,1.0)), // p(3,4) HessianFactor::shared_ptr())))); // Cached: empty - actual.insert(sharedClique(new Clique(make_pair( + actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair( boost::make_shared(pair_list_of (2, Matrix_(1,1,1.0)) (3, Matrix_(1,1,2.0)), 1, Vector_(1,1.0), Vector_(1,1.0)), // p(2|3) boost::make_shared(3, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(3) - actual.insert(sharedClique(new Clique(make_pair( + actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair( boost::make_shared(pair_list_of (0, Matrix_(1,1,1.0)) (2, Matrix_(1,1,2.0)), @@ -646,7 +693,7 @@ TEST(ISAM2, removeFactors) SharedDiagonal brNoise = sharedSigmas(Vector_(2, M_PI/100.0, 0.1)); // These variables will be reused and accumulate factors and values - GaussianISAM2<> isam(ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false)); + ISAM2 isam(ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false)); Values fullinit; planarSLAM::Graph fullgraph; @@ -740,7 +787,7 @@ TEST(ISAM2, removeFactors) CHECK(isam_check(fullgraph, fullinit, isam)); // Check gradient at each node - typedef GaussianISAM2<>::sharedClique sharedClique; + typedef ISAM2::sharedClique sharedClique; BOOST_FOREACH(const sharedClique& clique, isam.nodes()) { // Compute expected gradient FactorGraph jfg; @@ -785,7 +832,7 @@ TEST(ISAM2, constrained_ordering) SharedDiagonal brNoise = sharedSigmas(Vector_(2, M_PI/100.0, 0.1)); // These variables will be reused and accumulate factors and values - GaussianISAM2<> isam(ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false)); + ISAM2 isam(ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false)); Values fullinit; planarSLAM::Graph fullgraph; @@ -883,7 +930,7 @@ TEST(ISAM2, constrained_ordering) (isam.getOrdering()[planarSLAM::PoseKey(3)] == 13 && isam.getOrdering()[planarSLAM::PoseKey(4)] == 12)); // Check gradient at each node - typedef GaussianISAM2<>::sharedClique sharedClique; + typedef ISAM2::sharedClique sharedClique; BOOST_FOREACH(const sharedClique& clique, isam.nodes()) { // Compute expected gradient FactorGraph jfg;