Split ISAM2 Clique, Params, and Result out in different files, cleaned up headers, and removed ISAM2-inl.h header

release/4.3a0
Frank Dellaert 2018-09-30 12:58:48 -04:00
parent 7fd8bc1bf5
commit d401edc917
8 changed files with 1103 additions and 999 deletions

View File

@ -1,34 +0,0 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ISAM2-inl.h
* @brief
* @author Richard Roberts
* @date Mar 16, 2012
*/
#pragma once
#include <gtsam/nonlinear/ISAM2.h>
namespace gtsam {
/* ************************************************************************* */
template <class VALUE>
VALUE ISAM2::calculateEstimate(Key key) const {
const Vector& delta = getDelta()[key];
return traits<VALUE>::Retract(theta_.at<VALUE>(key), delta);
}
/* ************************************************************************* */
} // namespace gtsam

View File

@ -10,22 +10,21 @@
* -------------------------------------------------------------------------- */
/**
* @file ISAM2-inl.h
* @file ISAM2.cpp
* @brief Incremental update functionality (ISAM2) for BayesTree, with fluid
* relinearization.
* @author Michael Kaess, Richard Roberts
* @author Michael Kaess, Richard Roberts, Frank Dellaert
*/
#include <gtsam/nonlinear/ISAM2.h>
#include <algorithm>
#include <map>
#include <stack>
#include <utility>
#include <gtsam/base/debug.h>
#include <gtsam/base/timing.h>
#include <gtsam/inference/BayesTree-inst.h>
#include <gtsam/inference/JunctionTree-inst.h> // We need the inst file because we'll make a special JT templated on ISAM2
#include <gtsam/linear/GaussianEliminationTree.h>
#include <gtsam/nonlinear/LinearContainerFactor.h>
#include <boost/assign/std/list.hpp> // for operator +=
using namespace boost::assign;
#include <boost/algorithm/string.hpp>
#include <boost/range/adaptors.hpp>
#include <boost/range/algorithm/copy.hpp>
namespace br {
@ -33,30 +32,19 @@ using namespace boost::range;
using namespace boost::adaptors;
} // namespace br
#include <gtsam/base/debug.h>
#include <gtsam/base/timing.h>
#include <gtsam/inference/BayesTree-inst.h>
#include <gtsam/inference/BayesTreeCliqueBase-inst.h>
#include <gtsam/inference/JunctionTree-inst.h> // We need the inst file because we'll make a special JT templated on ISAM2
#include <gtsam/linear/GaussianEliminationTree.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/HessianFactor.h>
#include <gtsam/linear/linearAlgorithms-inst.h>
#include <gtsam/nonlinear/DoglegOptimizerImpl.h>
#include <gtsam/nonlinear/LinearContainerFactor.h>
#include <gtsam/nonlinear/nonlinearExceptions.h>
#include <algorithm>
#include <map>
#include <utility>
using namespace std;
namespace gtsam {
// Instantiate base classes
template class BayesTreeCliqueBase<ISAM2Clique, GaussianFactorGraph>;
// Instantiate base class
template class BayesTree<ISAM2Clique>;
static const bool disableReordering = false;
static const double batchThreshold = 0.65;
static const bool kDisableReordering = false;
static const double kBatchThreshold = 0.65;
/* ************************************************************************* */
// Special BayesTree class that uses ISAM2 cliques - this is the result of
@ -80,345 +68,10 @@ class ISAM2JunctionTree
typedef ISAM2JunctionTree This;
typedef boost::shared_ptr<This> shared_ptr;
ISAM2JunctionTree(const GaussianEliminationTree& eliminationTree)
explicit ISAM2JunctionTree(const GaussianEliminationTree& eliminationTree)
: Base(eliminationTree) {}
};
/* ************************************************************************* */
string ISAM2DoglegParams::adaptationModeTranslator(
const DoglegOptimizerImpl::TrustRegionAdaptationMode& adaptationMode)
const {
string s;
switch (adaptationMode) {
case DoglegOptimizerImpl::SEARCH_EACH_ITERATION:
s = "SEARCH_EACH_ITERATION";
break;
case DoglegOptimizerImpl::ONE_STEP_PER_ITERATION:
s = "ONE_STEP_PER_ITERATION";
break;
default:
s = "UNDEFINED";
break;
}
return s;
}
/* ************************************************************************* */
DoglegOptimizerImpl::TrustRegionAdaptationMode
ISAM2DoglegParams::adaptationModeTranslator(
const string& adaptationMode) const {
string s = adaptationMode;
boost::algorithm::to_upper(s);
if (s == "SEARCH_EACH_ITERATION")
return DoglegOptimizerImpl::SEARCH_EACH_ITERATION;
if (s == "ONE_STEP_PER_ITERATION")
return DoglegOptimizerImpl::ONE_STEP_PER_ITERATION;
/* default is SEARCH_EACH_ITERATION */
return DoglegOptimizerImpl::SEARCH_EACH_ITERATION;
}
/* ************************************************************************* */
ISAM2Params::Factorization ISAM2Params::factorizationTranslator(
const string& str) {
string s = str;
boost::algorithm::to_upper(s);
if (s == "QR") return ISAM2Params::QR;
if (s == "CHOLESKY") return ISAM2Params::CHOLESKY;
/* default is CHOLESKY */
return ISAM2Params::CHOLESKY;
}
/* ************************************************************************* */
string ISAM2Params::factorizationTranslator(
const ISAM2Params::Factorization& value) {
string s;
switch (value) {
case ISAM2Params::QR:
s = "QR";
break;
case ISAM2Params::CHOLESKY:
s = "CHOLESKY";
break;
default:
s = "UNDEFINED";
break;
}
return s;
}
/* ************************************************************************* */
void ISAM2Clique::setEliminationResult(
const FactorGraphType::EliminationResult& eliminationResult) {
conditional_ = eliminationResult.first;
cachedFactor_ = eliminationResult.second;
// Compute gradient contribution
gradientContribution_.resize(conditional_->cols() - 1);
// Rewrite -(R * P')'*d as -(d' * R * P')' for computational speed
// reasons
gradientContribution_ << -conditional_->get_R().transpose() *
conditional_->get_d(),
-conditional_->get_S().transpose() * conditional_->get_d();
}
/* ************************************************************************* */
bool ISAM2Clique::equals(const This& other, double tol) const {
return Base::equals(other) &&
((!cachedFactor_ && !other.cachedFactor_) ||
(cachedFactor_ && other.cachedFactor_ &&
cachedFactor_->equals(*other.cachedFactor_, tol)));
}
/* ************************************************************************* */
void ISAM2Clique::print(const string& s, const KeyFormatter& formatter) const {
Base::print(s, formatter);
if (cachedFactor_)
cachedFactor_->print(s + "Cached: ", formatter);
else
cout << s << "Cached empty" << endl;
if (gradientContribution_.rows() != 0)
gtsam::print(gradientContribution_, "Gradient contribution: ");
}
/* ************************************************************************* */
bool ISAM2Clique::isDirty(const KeySet& replaced, const KeySet& changed) const {
// if none of the variables in this clique (frontal and separator!) changed
// significantly, then by the running intersection property, none of the
// cliques in the children need to be processed
// Are any clique variables part of the tree that has been redone?
bool dirty = replaced.exists(conditional_->frontals().front());
#if !defined(NDEBUG) && defined(GTSAM_EXTRA_CONSISTENCY_CHECKS)
for (Key frontal : conditional_->frontals()) {
assert(dirty == replaced.exists(frontal));
}
#endif
// If not, then has one of the separator variables changed significantly?
if (!dirty) {
for (Key parent : conditional_->parents()) {
if (changed.exists(parent)) {
dirty = true;
break;
}
}
}
return dirty;
}
/* ************************************************************************* */
/**
* Back-substitute - special version stores solution pointers in cliques for
* fast access.
*/
void ISAM2Clique::fastBackSubstitute(VectorValues* delta) const {
#ifdef USE_BROKEN_FAST_BACKSUBSTITUTE
// TODO(gareth): This code shares a lot of logic w/ linearAlgorithms-inst,
// potentially refactor
// Create solution part pointers if necessary and possible - necessary if
// solnPointers_ is empty, and possible if either we're a root, or we have
// a parent with valid solnPointers_.
ISAM2::sharedClique parent = parent_.lock();
if (solnPointers_.empty() && (isRoot() || !parent->solnPointers_.empty())) {
for (Key frontal : conditional_->frontals())
solnPointers_.emplace(frontal, delta->find(frontal));
for (Key parentKey : conditional_->parents()) {
assert(parent->solnPointers_.exists(parentKey));
solnPointers_.emplace(parentKey, parent->solnPointers_.at(parentKey));
}
}
// See if we can use solution part pointers - we can if they either
// already existed or were created above.
if (!solnPointers_.empty()) {
GaussianConditional& c = *conditional_;
// Solve matrix
Vector xS;
{
// Count dimensions of vector
DenseIndex dim = 0;
FastVector<VectorValues::const_iterator> parentPointers;
parentPointers.reserve(conditional_->nrParents());
for (Key parent : conditional_->parents()) {
parentPointers.push_back(solnPointers_.at(parent));
dim += parentPointers.back()->second.size();
}
// Fill parent vector
xS.resize(dim);
DenseIndex vectorPos = 0;
for (const VectorValues::const_iterator& parentPointer : parentPointers) {
const Vector& parentVector = parentPointer->second;
xS.block(vectorPos, 0, parentVector.size(), 1) =
parentVector.block(0, 0, parentVector.size(), 1);
vectorPos += parentVector.size();
}
}
// NOTE(gareth): We can no longer write: xS = b - S * xS
// This is because Eigen (as of 3.3) no longer evaluates S * xS into
// a temporary, and the operation trashes valus in xS.
// See: http://eigen.tuxfamily.org/index.php?title=3.3
const Vector rhs = c.getb() - c.get_S() * xS;
const Vector solution = c.get_R().triangularView<Eigen::Upper>().solve(rhs);
// Check for indeterminant solution
if (solution.hasNaN())
throw IndeterminantLinearSystemException(c.keys().front());
// Insert solution into a VectorValues
DenseIndex vectorPosition = 0;
for (GaussianConditional::const_iterator frontal = c.beginFrontals();
frontal != c.endFrontals(); ++frontal) {
solnPointers_.at(*frontal)->second =
solution.segment(vectorPosition, c.getDim(frontal));
vectorPosition += c.getDim(frontal);
}
} else {
// Just call plain solve because we couldn't use solution pointers.
delta->update(conditional_->solve(*delta));
}
#else
delta->update(conditional_->solve(*delta));
#endif
}
/* ************************************************************************* */
bool ISAM2Clique::valuesChanged(const KeySet& replaced,
const Vector& originalValues,
const VectorValues& delta,
double threshold) const {
auto frontals = conditional_->frontals();
if (replaced.exists(frontals.front())) return true;
auto diff = originalValues - delta.vector(frontals);
return diff.lpNorm<Eigen::Infinity>() >= threshold;
}
/* ************************************************************************* */
/// Set changed flag for each frontal variable
void ISAM2Clique::markFrontalsAsChanged(KeySet* changed) const {
for (Key frontal : conditional_->frontals()) {
changed->insert(frontal);
}
}
/* ************************************************************************* */
void ISAM2Clique::restoreFromOriginals(const Vector& originalValues,
VectorValues* delta) const {
size_t pos = 0;
for (Key frontal : conditional_->frontals()) {
auto v = delta->at(frontal);
v = originalValues.segment(pos, v.size());
pos += v.size();
}
}
/* ************************************************************************* */
// Note: not being used right now in favor of non-recursive version below.
void ISAM2Clique::optimizeWildfire(const KeySet& replaced, double threshold,
KeySet* changed, VectorValues* delta,
size_t* count) const {
if (isDirty(replaced, *changed)) {
// Temporary copy of the original values, to check how much they change
auto originalValues = delta->vector(conditional_->frontals());
// Back-substitute
fastBackSubstitute(delta);
count += conditional_->nrFrontals();
if (valuesChanged(replaced, originalValues, *delta, threshold)) {
markFrontalsAsChanged(changed);
} else {
restoreFromOriginals(originalValues, delta);
}
// Recurse to children
for (const auto& child : children) {
child->optimizeWildfire(replaced, threshold, changed, delta, count);
}
}
}
size_t optimizeWildfire(const ISAM2::sharedClique& root, double threshold,
const KeySet& keys, VectorValues* delta) {
KeySet changed;
size_t count = 0;
// starting from the root, call optimize on each conditional
if (root) root->optimizeWildfire(keys, threshold, &changed, delta, &count);
return count;
}
/* ************************************************************************* */
bool ISAM2Clique::optimizeWildfireNode(const KeySet& replaced, double threshold,
KeySet* changed, VectorValues* delta,
size_t* count) const {
// TODO(gareth): This code shares a lot of logic w/ linearAlgorithms-inst,
// potentially refactor
bool dirty = isDirty(replaced, *changed);
if (dirty) {
// Temporary copy of the original values, to check how much they change
auto originalValues = delta->vector(conditional_->frontals());
// Back-substitute
fastBackSubstitute(delta);
count += conditional_->nrFrontals();
if (valuesChanged(replaced, originalValues, *delta, threshold)) {
markFrontalsAsChanged(changed);
} else {
restoreFromOriginals(originalValues, delta);
}
}
return dirty;
}
size_t optimizeWildfireNonRecursive(const ISAM2::sharedClique& root,
double threshold, const KeySet& keys,
VectorValues* delta) {
KeySet changed;
size_t count = 0;
if (root) {
std::stack<ISAM2::sharedClique> travStack;
travStack.push(root);
ISAM2::sharedClique currentNode = root;
while (!travStack.empty()) {
currentNode = travStack.top();
travStack.pop();
bool dirty = currentNode->optimizeWildfireNode(keys, threshold, &changed,
delta, &count);
if (dirty) {
for (const auto& child : currentNode->children) {
travStack.push(child);
}
}
}
}
return count;
}
/* ************************************************************************* */
void ISAM2Clique::nnz_internal(size_t* result) const {
size_t dimR = conditional_->rows();
size_t dimSep = conditional_->get_S().cols();
*result += ((dimR + 1) * dimR) / 2 + dimSep * dimR;
// traverse the children
for (const auto& child : children) {
child->nnz_internal(result);
}
}
/* ************************************************************************* */
size_t ISAM2Clique::calculate_nnz() const {
size_t result = 0;
nnz_internal(&result);
return result;
}
/* ************************************************************************* */
ISAM2::ISAM2(const ISAM2Params& params) : params_(params), update_count_(0) {
if (params_.optimizationParams.type() == typeid(ISAM2DoglegParams))
@ -622,7 +275,7 @@ boost::shared_ptr<KeySet> ISAM2::recalculate(
boost::shared_ptr<KeySet> affectedKeysSet(
new KeySet()); // Will return this result
if (affectedKeys.size() >= theta_.size() * batchThreshold) {
if (affectedKeys.size() >= theta_.size() * kBatchThreshold) {
// Do a batch step - reorder and relinearize all variables
gttic(batch);
@ -867,7 +520,7 @@ ISAM2Result ISAM2::update(
// Update delta if we need it to check relinearization later
if (relinearizeThisStep) {
gttic(updateDelta);
updateDelta(disableReordering);
updateDelta(kDisableReordering);
gttoc(updateDelta);
}
@ -982,7 +635,7 @@ ISAM2Result ISAM2::update(
else
relinKeys =
Impl::CheckRelinearizationFull(delta_, params_.relinearizeThreshold);
if (disableReordering)
if (kDisableReordering)
relinKeys = Impl::CheckRelinearizationFull(
delta_, 0.0); // This is used for debugging
@ -1485,4 +1138,3 @@ VectorValues ISAM2::gradientAtZero() const {
}
} // namespace gtsam
/// namespace gtsam

View File

@ -13,593 +13,23 @@
* @file ISAM2.h
* @brief Incremental update functionality (ISAM2) for BayesTree, with fluid
* relinearization.
* @author Michael Kaess, Richard Roberts
* @author Michael Kaess, Richard Roberts, Frank Dellaert
*/
// \callgraph
#pragma once
#include <string>
#include <gtsam/nonlinear/ISAM2Params.h>
#include <gtsam/nonlinear/ISAM2Result.h>
#include <gtsam/nonlinear/ISAM2Clique.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/linear/GaussianBayesTree.h>
#include <vector>
#include <gtsam/linear/GaussianBayesTree.h>
#include <gtsam/nonlinear/DoglegOptimizerImpl.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <boost/variant.hpp>
namespace gtsam {
/**
* @addtogroup ISAM2
* Parameters for ISAM2 using Gauss-Newton optimization. Either this class or
* ISAM2DoglegParams should be specified as the optimizationParams in
* ISAM2Params, which should in turn be passed to ISAM2(const ISAM2Params&).
*/
struct GTSAM_EXPORT ISAM2GaussNewtonParams {
double
wildfireThreshold; ///< Continue updating the linear delta only when
///< changes are above this threshold (default: 0.001)
/** Specify parameters as constructor arguments */
ISAM2GaussNewtonParams(
double _wildfireThreshold =
0.001 ///< see ISAM2GaussNewtonParams public variables,
///< ISAM2GaussNewtonParams::wildfireThreshold
)
: wildfireThreshold(_wildfireThreshold) {}
void print(const std::string str = "") const {
using std::cout;
cout << str << "type: ISAM2GaussNewtonParams\n";
cout << str << "wildfireThreshold: " << wildfireThreshold << "\n";
cout.flush();
}
double getWildfireThreshold() const { return wildfireThreshold; }
void setWildfireThreshold(double wildfireThreshold) {
this->wildfireThreshold = wildfireThreshold;
}
};
/**
* @addtogroup ISAM2
* Parameters for ISAM2 using Dogleg optimization. Either this class or
* ISAM2GaussNewtonParams should be specified as the optimizationParams in
* ISAM2Params, which should in turn be passed to ISAM2(const ISAM2Params&).
*/
struct GTSAM_EXPORT ISAM2DoglegParams {
double initialDelta; ///< The initial trust region radius for Dogleg
double
wildfireThreshold; ///< Continue updating the linear delta only when
///< changes are above this threshold (default: 1e-5)
DoglegOptimizerImpl::TrustRegionAdaptationMode
adaptationMode; ///< See description in
///< DoglegOptimizerImpl::TrustRegionAdaptationMode
bool
verbose; ///< Whether Dogleg prints iteration and convergence information
/** Specify parameters as constructor arguments */
ISAM2DoglegParams(
double _initialDelta = 1.0, ///< see ISAM2DoglegParams::initialDelta
double _wildfireThreshold =
1e-5, ///< see ISAM2DoglegParams::wildfireThreshold
DoglegOptimizerImpl::TrustRegionAdaptationMode _adaptationMode =
DoglegOptimizerImpl::
SEARCH_EACH_ITERATION, ///< see ISAM2DoglegParams::adaptationMode
bool _verbose = false ///< see ISAM2DoglegParams::verbose
)
: initialDelta(_initialDelta),
wildfireThreshold(_wildfireThreshold),
adaptationMode(_adaptationMode),
verbose(_verbose) {}
void print(const std::string str = "") const {
using std::cout;
cout << str << "type: ISAM2DoglegParams\n";
cout << str << "initialDelta: " << initialDelta << "\n";
cout << str << "wildfireThreshold: " << wildfireThreshold << "\n";
cout << str
<< "adaptationMode: " << adaptationModeTranslator(adaptationMode)
<< "\n";
cout.flush();
}
double getInitialDelta() const { return initialDelta; }
double getWildfireThreshold() const { return wildfireThreshold; }
std::string getAdaptationMode() const {
return adaptationModeTranslator(adaptationMode);
}
bool isVerbose() const { return verbose; }
void setInitialDelta(double initialDelta) {
this->initialDelta = initialDelta;
}
void setWildfireThreshold(double wildfireThreshold) {
this->wildfireThreshold = wildfireThreshold;
}
void setAdaptationMode(const std::string& adaptationMode) {
this->adaptationMode = adaptationModeTranslator(adaptationMode);
}
void setVerbose(bool verbose) { this->verbose = verbose; }
std::string adaptationModeTranslator(
const DoglegOptimizerImpl::TrustRegionAdaptationMode& adaptationMode)
const;
DoglegOptimizerImpl::TrustRegionAdaptationMode adaptationModeTranslator(
const std::string& adaptationMode) const;
};
/**
* @addtogroup ISAM2
* Parameters for the ISAM2 algorithm. Default parameter values are listed
* below.
*/
typedef FastMap<char, Vector> ISAM2ThresholdMap;
typedef ISAM2ThresholdMap::value_type ISAM2ThresholdMapValue;
struct GTSAM_EXPORT ISAM2Params {
typedef boost::variant<ISAM2GaussNewtonParams, ISAM2DoglegParams>
OptimizationParams; ///< Either ISAM2GaussNewtonParams or
///< ISAM2DoglegParams
typedef boost::variant<double, FastMap<char, Vector> >
RelinearizationThreshold; ///< Either a constant relinearization
///< threshold or a per-variable-type set of
///< thresholds
/** Optimization parameters, this both selects the nonlinear optimization
* method and specifies its parameters, either ISAM2GaussNewtonParams or
* ISAM2DoglegParams. In the former, Gauss-Newton optimization will be used
* with the specified parameters, and in the latter Powell's dog-leg
* algorithm will be used with the specified parameters.
*/
OptimizationParams optimizationParams;
/** Only relinearize variables whose linear delta magnitude is greater than
* this threshold (default: 0.1). If this is a FastMap<char,Vector> instead
* of a double, then the threshold is specified for each dimension of each
* variable type. This parameter then maps from a character indicating the
* variable type to a Vector of thresholds for each dimension of that
* variable. For example, if Pose keys are of type TypedSymbol<'x',Pose3>,
* and landmark keys are of type TypedSymbol<'l',Point3>, then appropriate
* entries would be added with:
* \code
FastMap<char,Vector> thresholds;
thresholds['x'] = (Vector(6) << 0.1, 0.1, 0.1, 0.5, 0.5, 0.5).finished();
// 0.1 rad rotation threshold, 0.5 m translation threshold thresholds['l'] =
Vector3(1.0, 1.0, 1.0); // 1.0 m landmark position threshold
params.relinearizeThreshold = thresholds;
\endcode
*/
RelinearizationThreshold relinearizeThreshold;
int relinearizeSkip; ///< Only relinearize any variables every
///< relinearizeSkip calls to ISAM2::update (default:
///< 10)
bool enableRelinearization; ///< Controls whether ISAM2 will ever relinearize
///< any variables (default: true)
bool evaluateNonlinearError; ///< Whether to evaluate the nonlinear error
///< before and after the update, to return in
///< ISAM2Result from update()
enum Factorization { CHOLESKY, QR };
/** Specifies whether to use QR or CHOESKY numerical factorization (default:
* CHOLESKY). Cholesky is faster but potentially numerically unstable for
* poorly-conditioned problems, which can occur when uncertainty is very low
* in some variables (or dimensions of variables) and very high in others. QR
* is slower but more numerically stable in poorly-conditioned problems. We
* suggest using the default of Cholesky unless gtsam sometimes throws
* IndefiniteLinearSystemException when your problem's Hessian is actually
* positive definite. For positive definite problems, numerical error
* accumulation can cause the problem to become numerically negative or
* indefinite as solving proceeds, especially when using Cholesky.
*/
Factorization factorization;
/** Whether to cache linear factors (default: true).
* This can improve performance if linearization is expensive, but can hurt
* performance if linearization is very cleap due to computation to look up
* additional keys.
*/
bool cacheLinearizedFactors;
KeyFormatter
keyFormatter; ///< A KeyFormatter for when keys are printed during
///< debugging (default: DefaultKeyFormatter)
bool enableDetailedResults; ///< Whether to compute and return
///< ISAM2Result::detailedResults, this can
///< increase running time (default: false)
/** Check variables for relinearization in tree-order, stopping the check once
* a variable does not need to be relinearized (default: false). This can
* improve speed by only checking a small part of the top of the tree.
* However, variables below the check cut-off can accumulate significant
* deltas without triggering relinearization. This is particularly useful in
* exploration scenarios where real-time performance is desired over
* correctness. Use with caution.
*/
bool enablePartialRelinearizationCheck;
/// When you will be removing many factors, e.g. when using ISAM2 as a
/// fixed-lag smoother, enable this option to add factors in the first
/// available factor slots, to avoid accumulating NULL factor slots, at the
/// cost of having to search for slots every time a factor is added.
bool findUnusedFactorSlots;
/**
* Specify parameters as constructor arguments
* See the documentation of member variables above.
*/
ISAM2Params(OptimizationParams _optimizationParams = ISAM2GaussNewtonParams(),
RelinearizationThreshold _relinearizeThreshold = 0.1,
int _relinearizeSkip = 10, bool _enableRelinearization = true,
bool _evaluateNonlinearError = false,
Factorization _factorization = ISAM2Params::CHOLESKY,
bool _cacheLinearizedFactors = true,
const KeyFormatter& _keyFormatter =
DefaultKeyFormatter ///< see ISAM2::Params::keyFormatter
)
: optimizationParams(_optimizationParams),
relinearizeThreshold(_relinearizeThreshold),
relinearizeSkip(_relinearizeSkip),
enableRelinearization(_enableRelinearization),
evaluateNonlinearError(_evaluateNonlinearError),
factorization(_factorization),
cacheLinearizedFactors(_cacheLinearizedFactors),
keyFormatter(_keyFormatter),
enableDetailedResults(false),
enablePartialRelinearizationCheck(false),
findUnusedFactorSlots(false) {}
/// print iSAM2 parameters
void print(const std::string& str = "") const {
using std::cout;
cout << str << "\n";
static const std::string kStr("optimizationParams: ");
if (optimizationParams.type() == typeid(ISAM2GaussNewtonParams))
boost::get<ISAM2GaussNewtonParams>(optimizationParams).print();
else if (optimizationParams.type() == typeid(ISAM2DoglegParams))
boost::get<ISAM2DoglegParams>(optimizationParams).print(kStr);
else
cout << kStr << "{unknown type}\n";
cout << "relinearizeThreshold: ";
if (relinearizeThreshold.type() == typeid(double)) {
cout << boost::get<double>(relinearizeThreshold) << "\n";
} else {
cout << "{mapped}\n";
for (const ISAM2ThresholdMapValue& value :
boost::get<ISAM2ThresholdMap>(relinearizeThreshold)) {
cout << " '" << value.first
<< "' -> [" << value.second.transpose() << " ]\n";
}
}
cout << "relinearizeSkip: " << relinearizeSkip << "\n";
cout << "enableRelinearization: " << enableRelinearization
<< "\n";
cout << "evaluateNonlinearError: " << evaluateNonlinearError
<< "\n";
cout << "factorization: "
<< factorizationTranslator(factorization) << "\n";
cout << "cacheLinearizedFactors: " << cacheLinearizedFactors
<< "\n";
cout << "enableDetailedResults: " << enableDetailedResults
<< "\n";
cout << "enablePartialRelinearizationCheck: "
<< enablePartialRelinearizationCheck << "\n";
cout << "findUnusedFactorSlots: " << findUnusedFactorSlots
<< "\n";
cout.flush();
}
/// @name Getters and Setters for all properties
/// @{
OptimizationParams getOptimizationParams() const {
return this->optimizationParams;
}
RelinearizationThreshold getRelinearizeThreshold() const {
return relinearizeThreshold;
}
int getRelinearizeSkip() const { return relinearizeSkip; }
bool isEnableRelinearization() const { return enableRelinearization; }
bool isEvaluateNonlinearError() const { return evaluateNonlinearError; }
std::string getFactorization() const {
return factorizationTranslator(factorization);
}
bool isCacheLinearizedFactors() const { return cacheLinearizedFactors; }
KeyFormatter getKeyFormatter() const { return keyFormatter; }
bool isEnableDetailedResults() const { return enableDetailedResults; }
bool isEnablePartialRelinearizationCheck() const {
return enablePartialRelinearizationCheck;
}
void setOptimizationParams(OptimizationParams optimizationParams) {
this->optimizationParams = optimizationParams;
}
void setRelinearizeThreshold(RelinearizationThreshold relinearizeThreshold) {
this->relinearizeThreshold = relinearizeThreshold;
}
void setRelinearizeSkip(int relinearizeSkip) {
this->relinearizeSkip = relinearizeSkip;
}
void setEnableRelinearization(bool enableRelinearization) {
this->enableRelinearization = enableRelinearization;
}
void setEvaluateNonlinearError(bool evaluateNonlinearError) {
this->evaluateNonlinearError = evaluateNonlinearError;
}
void setFactorization(const std::string& factorization) {
this->factorization = factorizationTranslator(factorization);
}
void setCacheLinearizedFactors(bool cacheLinearizedFactors) {
this->cacheLinearizedFactors = cacheLinearizedFactors;
}
void setKeyFormatter(KeyFormatter keyFormatter) {
this->keyFormatter = keyFormatter;
}
void setEnableDetailedResults(bool enableDetailedResults) {
this->enableDetailedResults = enableDetailedResults;
}
void setEnablePartialRelinearizationCheck(
bool enablePartialRelinearizationCheck) {
this->enablePartialRelinearizationCheck = enablePartialRelinearizationCheck;
}
GaussianFactorGraph::Eliminate getEliminationFunction() const {
return factorization == CHOLESKY
? (GaussianFactorGraph::Eliminate)EliminatePreferCholesky
: (GaussianFactorGraph::Eliminate)EliminateQR;
}
/// @}
/// @name Some utilities
/// @{
static Factorization factorizationTranslator(const std::string& str);
static std::string factorizationTranslator(const Factorization& value);
/// @}
};
typedef FastVector<size_t> FactorIndices;
/**
* @addtogroup ISAM2
* This struct is returned from ISAM2::update() and contains information about
* the update that is useful for determining whether the solution is
* converging, and about how much work was required for the update. See member
* variables for details and information about each entry.
*/
struct GTSAM_EXPORT ISAM2Result {
/** The nonlinear error of all of the factors, \a including new factors and
* variables added during the current call to ISAM2::update(). This error is
* calculated using the following variable values:
* \li Pre-existing variables will be evaluated by combining their
* linearization point before this call to update, with their partial linear
* delta, as computed by ISAM2::calculateEstimate().
* \li New variables will be evaluated at their initialization points passed
* into the current call to update.
* \par Note: This will only be computed if
* ISAM2Params::evaluateNonlinearError is set to \c true, because there is
* some cost to this computation.
*/
boost::optional<double> errorBefore;
/** The nonlinear error of all of the factors computed after the current
* update, meaning that variables above the relinearization threshold
* (ISAM2Params::relinearizeThreshold) have been relinearized and new
* variables have undergone one linear update. Variable values are
* again computed by combining their linearization points with their
* partial linear deltas, by ISAM2::calculateEstimate().
* \par Note: This will only be computed if
* ISAM2Params::evaluateNonlinearError is set to \c true, because there is
* some cost to this computation.
*/
boost::optional<double> errorAfter;
/** The number of variables that were relinearized because their linear
* deltas exceeded the reslinearization threshold
* (ISAM2Params::relinearizeThreshold), combined with any additional
* variables that had to be relinearized because they were involved in
* the same factor as a variable above the relinearization threshold.
* On steps where no relinearization is considered
* (see ISAM2Params::relinearizeSkip), this count will be zero.
*/
size_t variablesRelinearized;
/** The number of variables that were reeliminated as parts of the Bayes'
* Tree were recalculated, due to new factors. When loop closures occur,
* this count will be large as the new loop-closing factors will tend to
* involve variables far away from the root, and everything up to the root
* will be reeliminated.
*/
size_t variablesReeliminated;
/** The number of factors that were included in reelimination of the Bayes'
* tree. */
size_t factorsRecalculated;
/** The number of cliques in the Bayes' Tree */
size_t cliques;
/** The indices of the newly-added factors, in 1-to-1 correspondence with the
* factors passed as \c newFactors to ISAM2::update(). These indices may be
* used later to refer to the factors in order to remove them.
*/
FactorIndices newFactorsIndices;
/** A struct holding detailed results, which must be enabled with
* ISAM2Params::enableDetailedResults.
*/
struct DetailedResults {
/** The status of a single variable, this struct is stored in
* DetailedResults::variableStatus */
struct VariableStatus {
/** Whether the variable was just reeliminated, due to being relinearized,
* observed, new, or on the path up to the root clique from another
* reeliminated variable. */
bool isReeliminated;
bool isAboveRelinThreshold; ///< Whether the variable was just
///< relinearized due to being above the
///< relinearization threshold
bool isRelinearizeInvolved; ///< Whether the variable was below the
///< relinearization threshold but was
///< relinearized by being involved in a
///< factor with a variable above the
///< relinearization threshold
bool isRelinearized; /// Whether the variable was relinearized, either by
/// being above the relinearization threshold or by
/// involvement.
bool isObserved; ///< Whether the variable was just involved in new
///< factors
bool isNew; ///< Whether the variable itself was just added
bool inRootClique; ///< Whether the variable is in the root clique
VariableStatus()
: isReeliminated(false),
isAboveRelinThreshold(false),
isRelinearizeInvolved(false),
isRelinearized(false),
isObserved(false),
isNew(false),
inRootClique(false) {}
};
/** The status of each variable during this update, see VariableStatus.
*/
FastMap<Key, VariableStatus> variableStatus;
};
/** Detailed results, if enabled by ISAM2Params::enableDetailedResults. See
* Detail for information about the results data stored here. */
boost::optional<DetailedResults> detail;
void print(const std::string str = "") const {
using std::cout;
cout << str << " Reelimintated: " << variablesReeliminated
<< " Relinearized: " << variablesRelinearized
<< " Cliques: " << cliques << std::endl;
}
/** Getters and Setters */
size_t getVariablesRelinearized() const { return variablesRelinearized; }
size_t getVariablesReeliminated() const { return variablesReeliminated; }
size_t getCliques() const { return cliques; }
};
/**
* Specialized Clique structure for ISAM2, incorporating caching and gradient
* contribution
* TODO: more documentation
*/
class GTSAM_EXPORT ISAM2Clique
: public BayesTreeCliqueBase<ISAM2Clique, GaussianFactorGraph> {
public:
typedef ISAM2Clique This;
typedef BayesTreeCliqueBase<This, GaussianFactorGraph> Base;
typedef boost::shared_ptr<This> shared_ptr;
typedef boost::weak_ptr<This> weak_ptr;
typedef GaussianConditional ConditionalType;
typedef ConditionalType::shared_ptr sharedConditional;
Base::FactorType::shared_ptr cachedFactor_;
Vector gradientContribution_;
#ifdef USE_BROKEN_FAST_BACKSUBSTITUTE
mutable FastMap<Key, VectorValues::iterator> solnPointers_;
#endif
/// Default constructor
ISAM2Clique() : Base() {}
/// Copy constructor, does *not* copy solution pointers as these are invalid
/// in different trees.
ISAM2Clique(const ISAM2Clique& other)
: Base(other),
cachedFactor_(other.cachedFactor_),
gradientContribution_(other.gradientContribution_) {}
/// Assignment operator, does *not* copy solution pointers as these are
/// invalid in different trees.
ISAM2Clique& operator=(const ISAM2Clique& other) {
Base::operator=(other);
cachedFactor_ = other.cachedFactor_;
gradientContribution_ = other.gradientContribution_;
return *this;
}
/// Overridden to also store the remaining factor and gradient contribution
void setEliminationResult(
const FactorGraphType::EliminationResult& eliminationResult);
/** Access the cached factor */
Base::FactorType::shared_ptr& cachedFactor() { return cachedFactor_; }
/** Access the gradient contribution */
const Vector& gradientContribution() const { return gradientContribution_; }
bool equals(const This& other, double tol = 1e-9) const;
/** print this node */
void print(const std::string& s = "",
const KeyFormatter& formatter = DefaultKeyFormatter) const;
void optimizeWildfire(const KeySet& replaced, double threshold,
KeySet* changed, VectorValues* delta,
size_t* count) const;
bool optimizeWildfireNode(const KeySet& replaced, double threshold,
KeySet* changed, VectorValues* delta,
size_t* count) const;
/**
* Starting from the root, add up entries of frontal and conditional matrices
* of each conditional
*/
void nnz_internal(size_t* result) const;
size_t calculate_nnz() const;
private:
/**
* Check if clique was replaced, or if any parents were changed above the
* threshold or themselves replaced.
*/
bool isDirty(const KeySet& replaced, const KeySet& changed) const;
/**
* Back-substitute - special version stores solution pointers in cliques for
* fast access.
*/
void fastBackSubstitute(VectorValues* delta) const;
/*
* Check whether the values changed above a threshold, or always true if the
* clique was replaced.
*/
bool valuesChanged(const KeySet& replaced, const Vector& originalValues,
const VectorValues& delta, double threshold) const;
/// Set changed flag for each frontal variable
void markFrontalsAsChanged(KeySet* changed) const;
/// Restore delta to original values, guided by frontal keys.
void restoreFromOriginals(const Vector& originalValues,
VectorValues* delta) const;
/** Serialization function */
friend class boost::serialization::access;
template <class ARCHIVE>
void serialize(ARCHIVE& ar, const unsigned int /*version*/) {
ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base);
ar& BOOST_SERIALIZATION_NVP(cachedFactor_);
ar& BOOST_SERIALIZATION_NVP(gradientContribution_);
}
}; // \struct ISAM2Clique
/**
* @addtogroup ISAM2
* Implementation of the full ISAM2 algorithm for incremental nonlinear
@ -643,7 +73,7 @@ class GTSAM_EXPORT ISAM2 : public BayesTree<ISAM2Clique> {
* until it is needed.
*/
mutable KeySet
deltaReplacedMask_; // TODO: Make sure accessed in the right way
deltaReplacedMask_; // TODO(dellaert): Make sure accessed in the right way
/** All original nonlinear factors are stored here to use during
* relinearization */
@ -768,7 +198,11 @@ class GTSAM_EXPORT ISAM2 : public BayesTree<ISAM2Clique> {
* @return
*/
template <class VALUE>
VALUE calculateEstimate(Key key) const;
VALUE calculateEstimate(Key key) const {
const Vector& delta = getDelta()[key];
return traits<VALUE>::Retract(theta_.at<VALUE>(key), delta);
}
/** Compute an estimate for a single variable using its incomplete linear
* delta computed during the last update. This is faster than calling the
@ -846,32 +280,12 @@ class GTSAM_EXPORT ISAM2 : public BayesTree<ISAM2Clique> {
const boost::optional<FastMap<Key, int> >& constrainKeys,
ISAM2Result& result);
void updateDelta(bool forceFullSolve = false) const;
}; // ISAM2
/// traits
template <>
struct traits<ISAM2> : public Testable<ISAM2> {};
/**
* Optimize the BayesTree, starting from the root.
* @param threshold The maximum change against the PREVIOUS delta for
* non-replaced variables that can be ignored, ie. the old delta entry is kept
* and recursive backsubstitution might eventually stop if none of the changed
* variables are contained in the subtree.
* @param replaced Needs to contain all variables that are contained in the top
* of the Bayes tree that has been redone.
* @return The number of variables that were solved for.
* @param delta The current solution, an offset from the linearization point.
*/
size_t optimizeWildfire(const ISAM2::sharedClique& root, double threshold,
const KeySet& replaced, VectorValues* delta);
size_t optimizeWildfireNonRecursive(const ISAM2::sharedClique& root,
double threshold, const KeySet& replaced,
VectorValues* delta);
} // namespace gtsam
#include <gtsam/nonlinear/ISAM2-impl.h>
#include <gtsam/nonlinear/ISAM2-inl.h>

View File

@ -0,0 +1,302 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ISAM2Clique.cpp
* @brief Specialized iSAM2 Clique
* @author Michael Kaess, Richard Roberts, Frank Dellaert
*/
#include <gtsam/nonlinear/ISAM2Clique.h>
#include <gtsam/linear/VectorValues.h>
#include <gtsam/linear/linearAlgorithms-inst.h>
#include <gtsam/inference/BayesTreeCliqueBase-inst.h>
#include <stack>
using namespace std;
namespace gtsam {
// Instantiate base class
template class BayesTreeCliqueBase<ISAM2Clique, GaussianFactorGraph>;
/* ************************************************************************* */
void ISAM2Clique::setEliminationResult(
const FactorGraphType::EliminationResult& eliminationResult) {
conditional_ = eliminationResult.first;
cachedFactor_ = eliminationResult.second;
// Compute gradient contribution
gradientContribution_.resize(conditional_->cols() - 1);
// Rewrite -(R * P')'*d as -(d' * R * P')' for computational speed
// reasons
gradientContribution_ << -conditional_->get_R().transpose() *
conditional_->get_d(),
-conditional_->get_S().transpose() * conditional_->get_d();
}
/* ************************************************************************* */
bool ISAM2Clique::equals(const This& other, double tol) const {
return Base::equals(other) &&
((!cachedFactor_ && !other.cachedFactor_) ||
(cachedFactor_ && other.cachedFactor_ &&
cachedFactor_->equals(*other.cachedFactor_, tol)));
}
/* ************************************************************************* */
void ISAM2Clique::print(const string& s, const KeyFormatter& formatter) const {
Base::print(s, formatter);
if (cachedFactor_)
cachedFactor_->print(s + "Cached: ", formatter);
else
cout << s << "Cached empty" << endl;
if (gradientContribution_.rows() != 0)
gtsam::print(gradientContribution_, "Gradient contribution: ");
}
/* ************************************************************************* */
bool ISAM2Clique::isDirty(const KeySet& replaced, const KeySet& changed) const {
// if none of the variables in this clique (frontal and separator!) changed
// significantly, then by the running intersection property, none of the
// cliques in the children need to be processed
// Are any clique variables part of the tree that has been redone?
bool dirty = replaced.exists(conditional_->frontals().front());
#if !defined(NDEBUG) && defined(GTSAM_EXTRA_CONSISTENCY_CHECKS)
for (Key frontal : conditional_->frontals()) {
assert(dirty == replaced.exists(frontal));
}
#endif
// If not, then has one of the separator variables changed significantly?
if (!dirty) {
for (Key parent : conditional_->parents()) {
if (changed.exists(parent)) {
dirty = true;
break;
}
}
}
return dirty;
}
/* ************************************************************************* */
/**
* Back-substitute - special version stores solution pointers in cliques for
* fast access.
*/
void ISAM2Clique::fastBackSubstitute(VectorValues* delta) const {
#ifdef USE_BROKEN_FAST_BACKSUBSTITUTE
// TODO(gareth): This code shares a lot of logic w/ linearAlgorithms-inst,
// potentially refactor
// Create solution part pointers if necessary and possible - necessary if
// solnPointers_ is empty, and possible if either we're a root, or we have
// a parent with valid solnPointers_.
ISAM2Clique::shared_ptr parent = parent_.lock();
if (solnPointers_.empty() && (isRoot() || !parent->solnPointers_.empty())) {
for (Key frontal : conditional_->frontals())
solnPointers_.emplace(frontal, delta->find(frontal));
for (Key parentKey : conditional_->parents()) {
assert(parent->solnPointers_.exists(parentKey));
solnPointers_.emplace(parentKey, parent->solnPointers_.at(parentKey));
}
}
// See if we can use solution part pointers - we can if they either
// already existed or were created above.
if (!solnPointers_.empty()) {
GaussianConditional& c = *conditional_;
// Solve matrix
Vector xS;
{
// Count dimensions of vector
DenseIndex dim = 0;
FastVector<VectorValues::const_iterator> parentPointers;
parentPointers.reserve(conditional_->nrParents());
for (Key parent : conditional_->parents()) {
parentPointers.push_back(solnPointers_.at(parent));
dim += parentPointers.back()->second.size();
}
// Fill parent vector
xS.resize(dim);
DenseIndex vectorPos = 0;
for (const VectorValues::const_iterator& parentPointer : parentPointers) {
const Vector& parentVector = parentPointer->second;
xS.block(vectorPos, 0, parentVector.size(), 1) =
parentVector.block(0, 0, parentVector.size(), 1);
vectorPos += parentVector.size();
}
}
// NOTE(gareth): We can no longer write: xS = b - S * xS
// This is because Eigen (as of 3.3) no longer evaluates S * xS into
// a temporary, and the operation trashes valus in xS.
// See: http://eigen.tuxfamily.org/index.php?title=3.3
const Vector rhs = c.getb() - c.get_S() * xS;
const Vector solution = c.get_R().triangularView<Eigen::Upper>().solve(rhs);
// Check for indeterminant solution
if (solution.hasNaN())
throw IndeterminantLinearSystemException(c.keys().front());
// Insert solution into a VectorValues
DenseIndex vectorPosition = 0;
for (GaussianConditional::const_iterator frontal = c.beginFrontals();
frontal != c.endFrontals(); ++frontal) {
solnPointers_.at(*frontal)->second =
solution.segment(vectorPosition, c.getDim(frontal));
vectorPosition += c.getDim(frontal);
}
} else {
// Just call plain solve because we couldn't use solution pointers.
delta->update(conditional_->solve(*delta));
}
#else
delta->update(conditional_->solve(*delta));
#endif
}
/* ************************************************************************* */
bool ISAM2Clique::valuesChanged(const KeySet& replaced,
const Vector& originalValues,
const VectorValues& delta,
double threshold) const {
auto frontals = conditional_->frontals();
if (replaced.exists(frontals.front())) return true;
auto diff = originalValues - delta.vector(frontals);
return diff.lpNorm<Eigen::Infinity>() >= threshold;
}
/* ************************************************************************* */
/// Set changed flag for each frontal variable
void ISAM2Clique::markFrontalsAsChanged(KeySet* changed) const {
for (Key frontal : conditional_->frontals()) {
changed->insert(frontal);
}
}
/* ************************************************************************* */
void ISAM2Clique::restoreFromOriginals(const Vector& originalValues,
VectorValues* delta) const {
size_t pos = 0;
for (Key frontal : conditional_->frontals()) {
auto v = delta->at(frontal);
v = originalValues.segment(pos, v.size());
pos += v.size();
}
}
/* ************************************************************************* */
// Note: not being used right now in favor of non-recursive version below.
void ISAM2Clique::optimizeWildfire(const KeySet& replaced, double threshold,
KeySet* changed, VectorValues* delta,
size_t* count) const {
if (isDirty(replaced, *changed)) {
// Temporary copy of the original values, to check how much they change
auto originalValues = delta->vector(conditional_->frontals());
// Back-substitute
fastBackSubstitute(delta);
count += conditional_->nrFrontals();
if (valuesChanged(replaced, originalValues, *delta, threshold)) {
markFrontalsAsChanged(changed);
} else {
restoreFromOriginals(originalValues, delta);
}
// Recurse to children
for (const auto& child : children) {
child->optimizeWildfire(replaced, threshold, changed, delta, count);
}
}
}
size_t optimizeWildfire(const ISAM2Clique::shared_ptr& root, double threshold,
const KeySet& keys, VectorValues* delta) {
KeySet changed;
size_t count = 0;
// starting from the root, call optimize on each conditional
if (root) root->optimizeWildfire(keys, threshold, &changed, delta, &count);
return count;
}
/* ************************************************************************* */
bool ISAM2Clique::optimizeWildfireNode(const KeySet& replaced, double threshold,
KeySet* changed, VectorValues* delta,
size_t* count) const {
// TODO(gareth): This code shares a lot of logic w/ linearAlgorithms-inst,
// potentially refactor
bool dirty = isDirty(replaced, *changed);
if (dirty) {
// Temporary copy of the original values, to check how much they change
auto originalValues = delta->vector(conditional_->frontals());
// Back-substitute
fastBackSubstitute(delta);
count += conditional_->nrFrontals();
if (valuesChanged(replaced, originalValues, *delta, threshold)) {
markFrontalsAsChanged(changed);
} else {
restoreFromOriginals(originalValues, delta);
}
}
return dirty;
}
size_t optimizeWildfireNonRecursive(const ISAM2Clique::shared_ptr& root,
double threshold, const KeySet& keys,
VectorValues* delta) {
KeySet changed;
size_t count = 0;
if (root) {
std::stack<ISAM2Clique::shared_ptr> travStack;
travStack.push(root);
ISAM2Clique::shared_ptr currentNode = root;
while (!travStack.empty()) {
currentNode = travStack.top();
travStack.pop();
bool dirty = currentNode->optimizeWildfireNode(keys, threshold, &changed,
delta, &count);
if (dirty) {
for (const auto& child : currentNode->children) {
travStack.push(child);
}
}
}
}
return count;
}
/* ************************************************************************* */
void ISAM2Clique::nnz_internal(size_t* result) const {
size_t dimR = conditional_->rows();
size_t dimSep = conditional_->get_S().cols();
*result += ((dimR + 1) * dimR) / 2 + dimSep * dimR;
// traverse the children
for (const auto& child : children) {
child->nnz_internal(result);
}
}
/* ************************************************************************* */
size_t ISAM2Clique::calculate_nnz() const {
size_t result = 0;
nnz_internal(&result);
return result;
}
} // namespace gtsam

View File

@ -0,0 +1,157 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ISAM2Clique.h
* @brief Specialized iSAM2 Clique
* @author Michael Kaess, Richard Roberts
*/
// \callgraph
#pragma once
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianConditional.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/inference/BayesTreeCliqueBase.h>
#include <gtsam/inference/Key.h>
#include <string>
namespace gtsam {
/**
* Specialized Clique structure for ISAM2, incorporating caching and gradient
* contribution
* TODO: more documentation
*/
class GTSAM_EXPORT ISAM2Clique
: public BayesTreeCliqueBase<ISAM2Clique, GaussianFactorGraph> {
public:
typedef ISAM2Clique This;
typedef BayesTreeCliqueBase<This, GaussianFactorGraph> Base;
typedef boost::shared_ptr<This> shared_ptr;
typedef boost::weak_ptr<This> weak_ptr;
typedef GaussianConditional ConditionalType;
typedef ConditionalType::shared_ptr sharedConditional;
Base::FactorType::shared_ptr cachedFactor_;
Vector gradientContribution_;
#ifdef USE_BROKEN_FAST_BACKSUBSTITUTE
mutable FastMap<Key, VectorValues::iterator> solnPointers_;
#endif
/// Default constructor
ISAM2Clique() : Base() {}
/// Copy constructor, does *not* copy solution pointers as these are invalid
/// in different trees.
ISAM2Clique(const ISAM2Clique& other)
: Base(other),
cachedFactor_(other.cachedFactor_),
gradientContribution_(other.gradientContribution_) {}
/// Assignment operator, does *not* copy solution pointers as these are
/// invalid in different trees.
ISAM2Clique& operator=(const ISAM2Clique& other) {
Base::operator=(other);
cachedFactor_ = other.cachedFactor_;
gradientContribution_ = other.gradientContribution_;
return *this;
}
/// Overridden to also store the remaining factor and gradient contribution
void setEliminationResult(
const FactorGraphType::EliminationResult& eliminationResult);
/** Access the cached factor */
Base::FactorType::shared_ptr& cachedFactor() { return cachedFactor_; }
/** Access the gradient contribution */
const Vector& gradientContribution() const { return gradientContribution_; }
bool equals(const This& other, double tol = 1e-9) const;
/** print this node */
void print(const std::string& s = "",
const KeyFormatter& formatter = DefaultKeyFormatter) const;
void optimizeWildfire(const KeySet& replaced, double threshold,
KeySet* changed, VectorValues* delta,
size_t* count) const;
bool optimizeWildfireNode(const KeySet& replaced, double threshold,
KeySet* changed, VectorValues* delta,
size_t* count) const;
/**
* Starting from the root, add up entries of frontal and conditional matrices
* of each conditional
*/
void nnz_internal(size_t* result) const;
size_t calculate_nnz() const;
private:
/**
* Check if clique was replaced, or if any parents were changed above the
* threshold or themselves replaced.
*/
bool isDirty(const KeySet& replaced, const KeySet& changed) const;
/**
* Back-substitute - special version stores solution pointers in cliques for
* fast access.
*/
void fastBackSubstitute(VectorValues* delta) const;
/*
* Check whether the values changed above a threshold, or always true if the
* clique was replaced.
*/
bool valuesChanged(const KeySet& replaced, const Vector& originalValues,
const VectorValues& delta, double threshold) const;
/// Set changed flag for each frontal variable
void markFrontalsAsChanged(KeySet* changed) const;
/// Restore delta to original values, guided by frontal keys.
void restoreFromOriginals(const Vector& originalValues,
VectorValues* delta) const;
/** Serialization function */
friend class boost::serialization::access;
template <class ARCHIVE>
void serialize(ARCHIVE& ar, const unsigned int /*version*/) {
ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base);
ar& BOOST_SERIALIZATION_NVP(cachedFactor_);
ar& BOOST_SERIALIZATION_NVP(gradientContribution_);
}
}; // \struct ISAM2Clique
/**
* Optimize the BayesTree, starting from the root.
* @param threshold The maximum change against the PREVIOUS delta for
* non-replaced variables that can be ignored, ie. the old delta entry is kept
* and recursive backsubstitution might eventually stop if none of the changed
* variables are contained in the subtree.
* @param replaced Needs to contain all variables that are contained in the top
* of the Bayes tree that has been redone.
* @return The number of variables that were solved for.
* @param delta The current solution, an offset from the linearization point.
*/
size_t optimizeWildfire(const ISAM2Clique::shared_ptr& root, double threshold,
const KeySet& replaced, VectorValues* delta);
size_t optimizeWildfireNonRecursive(const ISAM2Clique::shared_ptr& root,
double threshold, const KeySet& replaced,
VectorValues* delta);
} // namespace gtsam

View File

@ -0,0 +1,89 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ISAM2Params.cpp
* @brief Parameters for iSAM 2.
* @author Michael Kaess, Richard Roberts, Frank Dellaert
*/
#include <gtsam/nonlinear/ISAM2Params.h>
#include <boost/algorithm/string.hpp>
using namespace std;
namespace gtsam {
/* ************************************************************************* */
string ISAM2DoglegParams::adaptationModeTranslator(
const DoglegOptimizerImpl::TrustRegionAdaptationMode& adaptationMode)
const {
string s;
switch (adaptationMode) {
case DoglegOptimizerImpl::SEARCH_EACH_ITERATION:
s = "SEARCH_EACH_ITERATION";
break;
case DoglegOptimizerImpl::ONE_STEP_PER_ITERATION:
s = "ONE_STEP_PER_ITERATION";
break;
default:
s = "UNDEFINED";
break;
}
return s;
}
/* ************************************************************************* */
DoglegOptimizerImpl::TrustRegionAdaptationMode
ISAM2DoglegParams::adaptationModeTranslator(
const string& adaptationMode) const {
string s = adaptationMode;
boost::algorithm::to_upper(s);
if (s == "SEARCH_EACH_ITERATION")
return DoglegOptimizerImpl::SEARCH_EACH_ITERATION;
if (s == "ONE_STEP_PER_ITERATION")
return DoglegOptimizerImpl::ONE_STEP_PER_ITERATION;
/* default is SEARCH_EACH_ITERATION */
return DoglegOptimizerImpl::SEARCH_EACH_ITERATION;
}
/* ************************************************************************* */
ISAM2Params::Factorization ISAM2Params::factorizationTranslator(
const string& str) {
string s = str;
boost::algorithm::to_upper(s);
if (s == "QR") return ISAM2Params::QR;
if (s == "CHOLESKY") return ISAM2Params::CHOLESKY;
/* default is CHOLESKY */
return ISAM2Params::CHOLESKY;
}
/* ************************************************************************* */
string ISAM2Params::factorizationTranslator(
const ISAM2Params::Factorization& value) {
string s;
switch (value) {
case ISAM2Params::QR:
s = "QR";
break;
case ISAM2Params::CHOLESKY:
s = "CHOLESKY";
break;
default:
s = "UNDEFINED";
break;
}
return s;
}
} // namespace gtsam

View File

@ -0,0 +1,365 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ISAM2Params.h
* @brief Parameters for iSAM 2.
* @author Michael Kaess, Richard Roberts, Frank Dellaert
*/
// \callgraph
#pragma once
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/nonlinear/DoglegOptimizerImpl.h>
#include <boost/variant.hpp>
#include <string>
namespace gtsam {
/**
* @addtogroup ISAM2
* Parameters for ISAM2 using Gauss-Newton optimization. Either this class or
* ISAM2DoglegParams should be specified as the optimizationParams in
* ISAM2Params, which should in turn be passed to ISAM2(const ISAM2Params&).
*/
struct GTSAM_EXPORT ISAM2GaussNewtonParams {
double
wildfireThreshold; ///< Continue updating the linear delta only when
///< changes are above this threshold (default: 0.001)
/** Specify parameters as constructor arguments */
ISAM2GaussNewtonParams(
double _wildfireThreshold =
0.001 ///< see ISAM2GaussNewtonParams public variables,
///< ISAM2GaussNewtonParams::wildfireThreshold
)
: wildfireThreshold(_wildfireThreshold) {}
void print(const std::string str = "") const {
using std::cout;
cout << str << "type: ISAM2GaussNewtonParams\n";
cout << str << "wildfireThreshold: " << wildfireThreshold << "\n";
cout.flush();
}
double getWildfireThreshold() const { return wildfireThreshold; }
void setWildfireThreshold(double wildfireThreshold) {
this->wildfireThreshold = wildfireThreshold;
}
};
/**
* @addtogroup ISAM2
* Parameters for ISAM2 using Dogleg optimization. Either this class or
* ISAM2GaussNewtonParams should be specified as the optimizationParams in
* ISAM2Params, which should in turn be passed to ISAM2(const ISAM2Params&).
*/
struct GTSAM_EXPORT ISAM2DoglegParams {
double initialDelta; ///< The initial trust region radius for Dogleg
double
wildfireThreshold; ///< Continue updating the linear delta only when
///< changes are above this threshold (default: 1e-5)
DoglegOptimizerImpl::TrustRegionAdaptationMode
adaptationMode; ///< See description in
///< DoglegOptimizerImpl::TrustRegionAdaptationMode
bool
verbose; ///< Whether Dogleg prints iteration and convergence information
/** Specify parameters as constructor arguments */
ISAM2DoglegParams(
double _initialDelta = 1.0, ///< see ISAM2DoglegParams::initialDelta
double _wildfireThreshold =
1e-5, ///< see ISAM2DoglegParams::wildfireThreshold
DoglegOptimizerImpl::TrustRegionAdaptationMode _adaptationMode =
DoglegOptimizerImpl::
SEARCH_EACH_ITERATION, ///< see ISAM2DoglegParams::adaptationMode
bool _verbose = false ///< see ISAM2DoglegParams::verbose
)
: initialDelta(_initialDelta),
wildfireThreshold(_wildfireThreshold),
adaptationMode(_adaptationMode),
verbose(_verbose) {}
void print(const std::string str = "") const {
using std::cout;
cout << str << "type: ISAM2DoglegParams\n";
cout << str << "initialDelta: " << initialDelta << "\n";
cout << str << "wildfireThreshold: " << wildfireThreshold << "\n";
cout << str
<< "adaptationMode: " << adaptationModeTranslator(adaptationMode)
<< "\n";
cout.flush();
}
double getInitialDelta() const { return initialDelta; }
double getWildfireThreshold() const { return wildfireThreshold; }
std::string getAdaptationMode() const {
return adaptationModeTranslator(adaptationMode);
}
bool isVerbose() const { return verbose; }
void setInitialDelta(double initialDelta) {
this->initialDelta = initialDelta;
}
void setWildfireThreshold(double wildfireThreshold) {
this->wildfireThreshold = wildfireThreshold;
}
void setAdaptationMode(const std::string& adaptationMode) {
this->adaptationMode = adaptationModeTranslator(adaptationMode);
}
void setVerbose(bool verbose) { this->verbose = verbose; }
std::string adaptationModeTranslator(
const DoglegOptimizerImpl::TrustRegionAdaptationMode& adaptationMode)
const;
DoglegOptimizerImpl::TrustRegionAdaptationMode adaptationModeTranslator(
const std::string& adaptationMode) const;
};
/**
* @addtogroup ISAM2
* Parameters for the ISAM2 algorithm. Default parameter values are listed
* below.
*/
typedef FastMap<char, Vector> ISAM2ThresholdMap;
typedef ISAM2ThresholdMap::value_type ISAM2ThresholdMapValue;
struct GTSAM_EXPORT ISAM2Params {
typedef boost::variant<ISAM2GaussNewtonParams, ISAM2DoglegParams>
OptimizationParams; ///< Either ISAM2GaussNewtonParams or
///< ISAM2DoglegParams
typedef boost::variant<double, FastMap<char, Vector> >
RelinearizationThreshold; ///< Either a constant relinearization
///< threshold or a per-variable-type set of
///< thresholds
/** Optimization parameters, this both selects the nonlinear optimization
* method and specifies its parameters, either ISAM2GaussNewtonParams or
* ISAM2DoglegParams. In the former, Gauss-Newton optimization will be used
* with the specified parameters, and in the latter Powell's dog-leg
* algorithm will be used with the specified parameters.
*/
OptimizationParams optimizationParams;
/** Only relinearize variables whose linear delta magnitude is greater than
* this threshold (default: 0.1). If this is a FastMap<char,Vector> instead
* of a double, then the threshold is specified for each dimension of each
* variable type. This parameter then maps from a character indicating the
* variable type to a Vector of thresholds for each dimension of that
* variable. For example, if Pose keys are of type TypedSymbol<'x',Pose3>,
* and landmark keys are of type TypedSymbol<'l',Point3>, then appropriate
* entries would be added with:
* \code
FastMap<char,Vector> thresholds;
thresholds['x'] = (Vector(6) << 0.1, 0.1, 0.1, 0.5, 0.5, 0.5).finished();
// 0.1 rad rotation threshold, 0.5 m translation threshold thresholds['l'] =
Vector3(1.0, 1.0, 1.0); // 1.0 m landmark position threshold
params.relinearizeThreshold = thresholds;
\endcode
*/
RelinearizationThreshold relinearizeThreshold;
int relinearizeSkip; ///< Only relinearize any variables every
///< relinearizeSkip calls to ISAM2::update (default:
///< 10)
bool enableRelinearization; ///< Controls whether ISAM2 will ever relinearize
///< any variables (default: true)
bool evaluateNonlinearError; ///< Whether to evaluate the nonlinear error
///< before and after the update, to return in
///< ISAM2Result from update()
enum Factorization { CHOLESKY, QR };
/** Specifies whether to use QR or CHOESKY numerical factorization (default:
* CHOLESKY). Cholesky is faster but potentially numerically unstable for
* poorly-conditioned problems, which can occur when uncertainty is very low
* in some variables (or dimensions of variables) and very high in others. QR
* is slower but more numerically stable in poorly-conditioned problems. We
* suggest using the default of Cholesky unless gtsam sometimes throws
* IndefiniteLinearSystemException when your problem's Hessian is actually
* positive definite. For positive definite problems, numerical error
* accumulation can cause the problem to become numerically negative or
* indefinite as solving proceeds, especially when using Cholesky.
*/
Factorization factorization;
/** Whether to cache linear factors (default: true).
* This can improve performance if linearization is expensive, but can hurt
* performance if linearization is very cleap due to computation to look up
* additional keys.
*/
bool cacheLinearizedFactors;
KeyFormatter
keyFormatter; ///< A KeyFormatter for when keys are printed during
///< debugging (default: DefaultKeyFormatter)
bool enableDetailedResults; ///< Whether to compute and return
///< ISAM2Result::detailedResults, this can
///< increase running time (default: false)
/** Check variables for relinearization in tree-order, stopping the check once
* a variable does not need to be relinearized (default: false). This can
* improve speed by only checking a small part of the top of the tree.
* However, variables below the check cut-off can accumulate significant
* deltas without triggering relinearization. This is particularly useful in
* exploration scenarios where real-time performance is desired over
* correctness. Use with caution.
*/
bool enablePartialRelinearizationCheck;
/// When you will be removing many factors, e.g. when using ISAM2 as a
/// fixed-lag smoother, enable this option to add factors in the first
/// available factor slots, to avoid accumulating NULL factor slots, at the
/// cost of having to search for slots every time a factor is added.
bool findUnusedFactorSlots;
/**
* Specify parameters as constructor arguments
* See the documentation of member variables above.
*/
ISAM2Params(OptimizationParams _optimizationParams = ISAM2GaussNewtonParams(),
RelinearizationThreshold _relinearizeThreshold = 0.1,
int _relinearizeSkip = 10, bool _enableRelinearization = true,
bool _evaluateNonlinearError = false,
Factorization _factorization = ISAM2Params::CHOLESKY,
bool _cacheLinearizedFactors = true,
const KeyFormatter& _keyFormatter =
DefaultKeyFormatter ///< see ISAM2::Params::keyFormatter
)
: optimizationParams(_optimizationParams),
relinearizeThreshold(_relinearizeThreshold),
relinearizeSkip(_relinearizeSkip),
enableRelinearization(_enableRelinearization),
evaluateNonlinearError(_evaluateNonlinearError),
factorization(_factorization),
cacheLinearizedFactors(_cacheLinearizedFactors),
keyFormatter(_keyFormatter),
enableDetailedResults(false),
enablePartialRelinearizationCheck(false),
findUnusedFactorSlots(false) {}
/// print iSAM2 parameters
void print(const std::string& str = "") const {
using std::cout;
cout << str << "\n";
static const std::string kStr("optimizationParams: ");
if (optimizationParams.type() == typeid(ISAM2GaussNewtonParams))
boost::get<ISAM2GaussNewtonParams>(optimizationParams).print();
else if (optimizationParams.type() == typeid(ISAM2DoglegParams))
boost::get<ISAM2DoglegParams>(optimizationParams).print(kStr);
else
cout << kStr << "{unknown type}\n";
cout << "relinearizeThreshold: ";
if (relinearizeThreshold.type() == typeid(double)) {
cout << boost::get<double>(relinearizeThreshold) << "\n";
} else {
cout << "{mapped}\n";
for (const ISAM2ThresholdMapValue& value :
boost::get<ISAM2ThresholdMap>(relinearizeThreshold)) {
cout << " '" << value.first
<< "' -> [" << value.second.transpose() << " ]\n";
}
}
cout << "relinearizeSkip: " << relinearizeSkip << "\n";
cout << "enableRelinearization: " << enableRelinearization
<< "\n";
cout << "evaluateNonlinearError: " << evaluateNonlinearError
<< "\n";
cout << "factorization: "
<< factorizationTranslator(factorization) << "\n";
cout << "cacheLinearizedFactors: " << cacheLinearizedFactors
<< "\n";
cout << "enableDetailedResults: " << enableDetailedResults
<< "\n";
cout << "enablePartialRelinearizationCheck: "
<< enablePartialRelinearizationCheck << "\n";
cout << "findUnusedFactorSlots: " << findUnusedFactorSlots
<< "\n";
cout.flush();
}
/// @name Getters and Setters for all properties
/// @{
OptimizationParams getOptimizationParams() const {
return this->optimizationParams;
}
RelinearizationThreshold getRelinearizeThreshold() const {
return relinearizeThreshold;
}
int getRelinearizeSkip() const { return relinearizeSkip; }
bool isEnableRelinearization() const { return enableRelinearization; }
bool isEvaluateNonlinearError() const { return evaluateNonlinearError; }
std::string getFactorization() const {
return factorizationTranslator(factorization);
}
bool isCacheLinearizedFactors() const { return cacheLinearizedFactors; }
KeyFormatter getKeyFormatter() const { return keyFormatter; }
bool isEnableDetailedResults() const { return enableDetailedResults; }
bool isEnablePartialRelinearizationCheck() const {
return enablePartialRelinearizationCheck;
}
void setOptimizationParams(OptimizationParams optimizationParams) {
this->optimizationParams = optimizationParams;
}
void setRelinearizeThreshold(RelinearizationThreshold relinearizeThreshold) {
this->relinearizeThreshold = relinearizeThreshold;
}
void setRelinearizeSkip(int relinearizeSkip) {
this->relinearizeSkip = relinearizeSkip;
}
void setEnableRelinearization(bool enableRelinearization) {
this->enableRelinearization = enableRelinearization;
}
void setEvaluateNonlinearError(bool evaluateNonlinearError) {
this->evaluateNonlinearError = evaluateNonlinearError;
}
void setFactorization(const std::string& factorization) {
this->factorization = factorizationTranslator(factorization);
}
void setCacheLinearizedFactors(bool cacheLinearizedFactors) {
this->cacheLinearizedFactors = cacheLinearizedFactors;
}
void setKeyFormatter(KeyFormatter keyFormatter) {
this->keyFormatter = keyFormatter;
}
void setEnableDetailedResults(bool enableDetailedResults) {
this->enableDetailedResults = enableDetailedResults;
}
void setEnablePartialRelinearizationCheck(
bool enablePartialRelinearizationCheck) {
this->enablePartialRelinearizationCheck = enablePartialRelinearizationCheck;
}
GaussianFactorGraph::Eliminate getEliminationFunction() const {
return factorization == CHOLESKY
? (GaussianFactorGraph::Eliminate)EliminatePreferCholesky
: (GaussianFactorGraph::Eliminate)EliminateQR;
}
/// @}
/// @name Some utilities
/// @{
static Factorization factorizationTranslator(const std::string& str);
static std::string factorizationTranslator(const Factorization& value);
/// @}
};
} // namespace gtsam

View File

@ -0,0 +1,159 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ISAM2Result.h
* @brief Class that stores detailed iSAM2 result.
* @author Michael Kaess, Richard Roberts, Frank Dellaert
*/
// \callgraph
#pragma once
#include <string>
#include <vector>
#include <gtsam/linear/GaussianBayesTree.h>
#include <gtsam/nonlinear/DoglegOptimizerImpl.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/ISAM2Params.h>
#include <boost/variant.hpp>
namespace gtsam {
typedef FastVector<size_t> FactorIndices;
/**
* @addtogroup ISAM2
* This struct is returned from ISAM2::update() and contains information about
* the update that is useful for determining whether the solution is
* converging, and about how much work was required for the update. See member
* variables for details and information about each entry.
*/
struct GTSAM_EXPORT ISAM2Result {
/** The nonlinear error of all of the factors, \a including new factors and
* variables added during the current call to ISAM2::update(). This error is
* calculated using the following variable values:
* \li Pre-existing variables will be evaluated by combining their
* linearization point before this call to update, with their partial linear
* delta, as computed by ISAM2::calculateEstimate().
* \li New variables will be evaluated at their initialization points passed
* into the current call to update.
* \par Note: This will only be computed if
* ISAM2Params::evaluateNonlinearError is set to \c true, because there is
* some cost to this computation.
*/
boost::optional<double> errorBefore;
/** The nonlinear error of all of the factors computed after the current
* update, meaning that variables above the relinearization threshold
* (ISAM2Params::relinearizeThreshold) have been relinearized and new
* variables have undergone one linear update. Variable values are
* again computed by combining their linearization points with their
* partial linear deltas, by ISAM2::calculateEstimate().
* \par Note: This will only be computed if
* ISAM2Params::evaluateNonlinearError is set to \c true, because there is
* some cost to this computation.
*/
boost::optional<double> errorAfter;
/** The number of variables that were relinearized because their linear
* deltas exceeded the reslinearization threshold
* (ISAM2Params::relinearizeThreshold), combined with any additional
* variables that had to be relinearized because they were involved in
* the same factor as a variable above the relinearization threshold.
* On steps where no relinearization is considered
* (see ISAM2Params::relinearizeSkip), this count will be zero.
*/
size_t variablesRelinearized;
/** The number of variables that were reeliminated as parts of the Bayes'
* Tree were recalculated, due to new factors. When loop closures occur,
* this count will be large as the new loop-closing factors will tend to
* involve variables far away from the root, and everything up to the root
* will be reeliminated.
*/
size_t variablesReeliminated;
/** The number of factors that were included in reelimination of the Bayes'
* tree. */
size_t factorsRecalculated;
/** The number of cliques in the Bayes' Tree */
size_t cliques;
/** The indices of the newly-added factors, in 1-to-1 correspondence with the
* factors passed as \c newFactors to ISAM2::update(). These indices may be
* used later to refer to the factors in order to remove them.
*/
FactorIndices newFactorsIndices;
/** A struct holding detailed results, which must be enabled with
* ISAM2Params::enableDetailedResults.
*/
struct DetailedResults {
/** The status of a single variable, this struct is stored in
* DetailedResults::variableStatus */
struct VariableStatus {
/** Whether the variable was just reeliminated, due to being relinearized,
* observed, new, or on the path up to the root clique from another
* reeliminated variable. */
bool isReeliminated;
bool isAboveRelinThreshold; ///< Whether the variable was just
///< relinearized due to being above the
///< relinearization threshold
bool isRelinearizeInvolved; ///< Whether the variable was below the
///< relinearization threshold but was
///< relinearized by being involved in a
///< factor with a variable above the
///< relinearization threshold
bool isRelinearized; /// Whether the variable was relinearized, either by
/// being above the relinearization threshold or by
/// involvement.
bool isObserved; ///< Whether the variable was just involved in new
///< factors
bool isNew; ///< Whether the variable itself was just added
bool inRootClique; ///< Whether the variable is in the root clique
VariableStatus()
: isReeliminated(false),
isAboveRelinThreshold(false),
isRelinearizeInvolved(false),
isRelinearized(false),
isObserved(false),
isNew(false),
inRootClique(false) {}
};
/** The status of each variable during this update, see VariableStatus.
*/
FastMap<Key, VariableStatus> variableStatus;
};
/** Detailed results, if enabled by ISAM2Params::enableDetailedResults. See
* Detail for information about the results data stored here. */
boost::optional<DetailedResults> detail;
void print(const std::string str = "") const {
using std::cout;
cout << str << " Reelimintated: " << variablesReeliminated
<< " Relinearized: " << variablesRelinearized
<< " Cliques: " << cliques << std::endl;
}
/** Getters and Setters */
size_t getVariablesRelinearized() const { return variablesRelinearized; }
size_t getVariablesReeliminated() const { return variablesReeliminated; }
size_t getCliques() const { return cliques; }
};
} // namespace gtsam