Merge develop into fix/Unit3

# Conflicts:
#	gtsam_unstable/slam/SmartRangeFactor.h
release/4.3a0
Frank Dellaert 2018-10-12 23:40:20 -04:00
commit 658ec8c17b
22 changed files with 2206 additions and 1759 deletions

1
.gitignore vendored
View File

@ -16,3 +16,4 @@ cython/gtsam.pyx
cython/gtsam.so
cython/gtsam_wrapper.pxd
.vscode
.env

View File

@ -0,0 +1,120 @@
/**
* @file ISAM2Example_SmartFactor.cpp
* @brief test of iSAM with smart factors, led to bitbucket issue #367
* @author Alexander (pumaking on BitBucket)
*/
#include <gtsam/geometry/SimpleCamera.h>
#include <gtsam/nonlinear/ISAM2.h>
#include <gtsam/slam/BetweenFactor.h>
#include <gtsam/slam/SmartProjectionPoseFactor.h>
#include <iostream>
#include <vector>
using namespace std;
using namespace gtsam;
using symbol_shorthand::P;
using symbol_shorthand::X;
// Make the typename short so it looks much cleaner
typedef SmartProjectionPoseFactor<Cal3_S2> SmartFactor;
int main(int argc, char* argv[]) {
Cal3_S2::shared_ptr K(new Cal3_S2(50.0, 50.0, 0.0, 50.0, 50.0));
auto measurementNoise =
noiseModel::Isotropic::Sigma(2, 1.0); // one pixel in u and v
Vector6 sigmas;
sigmas << Vector3::Constant(0.3), Vector3::Constant(0.1);
auto noise = noiseModel::Diagonal::Sigmas(sigmas);
ISAM2Params parameters;
parameters.relinearizeThreshold = 0.01;
parameters.relinearizeSkip = 1;
parameters.cacheLinearizedFactors = false;
parameters.enableDetailedResults = true;
parameters.print();
ISAM2 isam(parameters);
// Create a factor graph
NonlinearFactorGraph graph;
Values initialEstimate;
Point3 point(0.0, 0.0, 1.0);
// Intentionally initialize the variables off from the ground truth
Pose3 delta(Rot3::Rodrigues(0.0, 0.0, 0.0), Point3(0.05, -0.10, 0.20));
Pose3 pose1(Rot3(), Point3(0.0, 0.0, 0.0));
Pose3 pose2(Rot3(), Point3(0.0, 0.2, 0.0));
Pose3 pose3(Rot3(), Point3(0.0, 0.4, 0.0));
Pose3 pose4(Rot3(), Point3(0.0, 0.5, 0.0));
Pose3 pose5(Rot3(), Point3(0.0, 0.6, 0.0));
vector<Pose3> poses = {pose1, pose2, pose3, pose4, pose5};
// Add first pose
graph.emplace_shared<PriorFactor<Pose3>>(X(0), poses[0], noise);
initialEstimate.insert(X(0), poses[0].compose(delta));
// Create smart factor with measurement from first pose only
SmartFactor::shared_ptr smartFactor(new SmartFactor(measurementNoise, K));
smartFactor->add(PinholePose<Cal3_S2>(poses[0], K).project(point), X(0));
graph.push_back(smartFactor);
// loop over remaining poses
for (size_t i = 1; i < 5; i++) {
cout << "****************************************************" << endl;
cout << "i = " << i << endl;
// Add prior on new pose
graph.emplace_shared<PriorFactor<Pose3>>(X(i), poses[i], noise);
initialEstimate.insert(X(i), poses[i].compose(delta));
// "Simulate" measurement from this pose
PinholePose<Cal3_S2> camera(poses[i], K);
Point2 measurement = camera.project(point);
cout << "Measurement " << i << "" << measurement << endl;
// Add measurement to smart factor
smartFactor->add(measurement, X(i));
// Update iSAM2
ISAM2Result result = isam.update(graph, initialEstimate);
result.print();
cout << "Detailed results:" << endl;
for (auto keyedStatus : result.detail->variableStatus) {
const auto& status = keyedStatus.second;
PrintKey(keyedStatus.first);
cout << " {" << endl;
cout << "reeliminated: " << status.isReeliminated << endl;
cout << "relinearized above thresh: " << status.isAboveRelinThreshold
<< endl;
cout << "relinearized involved: " << status.isRelinearizeInvolved << endl;
cout << "relinearized: " << status.isRelinearized << endl;
cout << "observed: " << status.isObserved << endl;
cout << "new: " << status.isNew << endl;
cout << "in the root clique: " << status.inRootClique << endl;
cout << "}" << endl;
}
Values currentEstimate = isam.calculateEstimate();
currentEstimate.print("Current estimate: ");
boost::optional<Point3> pointEstimate = smartFactor->point(currentEstimate);
if (pointEstimate) {
cout << *pointEstimate << endl;
} else {
cout << "Point degenerate." << endl;
}
// Reset graph and initial estimate for next iteration
graph.resize(0);
initialEstimate.clear();
}
return 0;
}

View File

@ -11,8 +11,8 @@
/**
* @file VisualISAM2Example.cpp
* @brief A visualSLAM example for the structure-from-motion problem on a simulated dataset
* This version uses iSAM2 to solve the problem incrementally
* @brief A visualSLAM example for the structure-from-motion problem on a
* simulated dataset This version uses iSAM2 to solve the problem incrementally
* @author Duy-Nguyen Ta
*/
@ -25,27 +25,28 @@
// For loading the data
#include "SFMdata.h"
// Camera observations of landmarks (i.e. pixel coordinates) will be stored as Point2 (x, y).
// Camera observations of landmarks will be stored as Point2 (x, y).
#include <gtsam/geometry/Point2.h>
// Each variable in the system (poses and landmarks) must be identified with a unique key.
// We can either use simple integer keys (1, 2, 3, ...) or symbols (X1, X2, L1).
// Here we will use Symbols
// Each variable in the system (poses and landmarks) must be identified with a
// unique key. We can either use simple integer keys (1, 2, 3, ...) or symbols
// (X1, X2, L1). Here we will use Symbols
#include <gtsam/inference/Symbol.h>
// We want to use iSAM2 to solve the structure-from-motion problem incrementally, so
// include iSAM2 here
// We want to use iSAM2 to solve the structure-from-motion problem
// incrementally, so include iSAM2 here
#include <gtsam/nonlinear/ISAM2.h>
// iSAM2 requires as input a set set of new factors to be added stored in a factor graph,
// and initial guesses for any new variables used in the added factors
// iSAM2 requires as input a set of new factors to be added stored in a factor
// graph, and initial guesses for any new variables used in the added factors
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/Values.h>
// In GTSAM, measurement functions are represented as 'factors'. Several common factors
// have been provided with the library for solving robotics/SLAM/Bundle Adjustment problems.
// Here we will use Projection factors to model the camera's landmark observations.
// Also, we will initialize the robot at some location using a Prior factor.
// In GTSAM, measurement functions are represented as 'factors'. Several common
// factors have been provided with the library for solving robotics/SLAM/Bundle
// Adjustment problems. Here we will use Projection factors to model the
// camera's landmark observations. Also, we will initialize the robot at some
// location using a Prior factor.
#include <gtsam/slam/PriorFactor.h>
#include <gtsam/slam/ProjectionFactor.h>
@ -56,12 +57,11 @@ using namespace gtsam;
/* ************************************************************************* */
int main(int argc, char* argv[]) {
// Define the camera calibration parameters
Cal3_S2::shared_ptr K(new Cal3_S2(50.0, 50.0, 0.0, 50.0, 50.0));
// Define the camera observation noise model
noiseModel::Isotropic::shared_ptr measurementNoise = noiseModel::Isotropic::Sigma(2, 1.0); // one pixel in u and v
// Define the camera observation noise model, 1 pixel stddev
auto measurementNoise = noiseModel::Isotropic::Sigma(2, 1.0);
// Create the set of ground-truth landmarks
vector<Point3> points = createPoints();
@ -69,10 +69,12 @@ int main(int argc, char* argv[]) {
// Create the set of ground-truth poses
vector<Pose3> poses = createPoses();
// Create an iSAM2 object. Unlike iSAM1, which performs periodic batch steps to maintain proper linearization
// and efficient variable ordering, iSAM2 performs partial relinearization/reordering at each step. A parameter
// structure is available that allows the user to set various properties, such as the relinearization threshold
// and type of linear solver. For this example, we we set the relinearization threshold small so the iSAM2 result
// Create an iSAM2 object. Unlike iSAM1, which performs periodic batch steps
// to maintain proper linearization and efficient variable ordering, iSAM2
// performs partial relinearization/reordering at each step. A parameter
// structure is available that allows the user to set various properties, such
// as the relinearization threshold and type of linear solver. For this
// example, we we set the relinearization threshold small so the iSAM2 result
// will approach the batch result.
ISAM2Params parameters;
parameters.relinearizeThreshold = 0.01;
@ -83,44 +85,52 @@ int main(int argc, char* argv[]) {
NonlinearFactorGraph graph;
Values initialEstimate;
// Loop over the different poses, adding the observations to iSAM incrementally
// Loop over the poses, adding the observations to iSAM incrementally
for (size_t i = 0; i < poses.size(); ++i) {
// Add factors for each landmark observation
for (size_t j = 0; j < points.size(); ++j) {
SimpleCamera camera(poses[i], *K);
Point2 measurement = camera.project(points[j]);
graph.emplace_shared<GenericProjectionFactor<Pose3, Point3, Cal3_S2> >(measurement, measurementNoise, Symbol('x', i), Symbol('l', j), K);
graph.emplace_shared<GenericProjectionFactor<Pose3, Point3, Cal3_S2> >(
measurement, measurementNoise, Symbol('x', i), Symbol('l', j), K);
}
// Add an initial guess for the current pose
// Intentionally initialize the variables off from the ground truth
initialEstimate.insert(Symbol('x', i), poses[i].compose(Pose3(Rot3::Rodrigues(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20))));
static Pose3 kDeltaPose(Rot3::Rodrigues(-0.1, 0.2, 0.25),
Point3(0.05, -0.10, 0.20));
initialEstimate.insert(Symbol('x', i), poses[i] * kDeltaPose);
// If this is the first iteration, add a prior on the first pose to set the coordinate frame
// and a prior on the first landmark to set the scale
// Also, as iSAM solves incrementally, we must wait until each is observed at least twice before
// adding it to iSAM.
if( i == 0) {
// Add a prior on pose x0
noiseModel::Diagonal::shared_ptr poseNoise = noiseModel::Diagonal::Sigmas((Vector(6) << Vector3::Constant(0.3),Vector3::Constant(0.1)).finished()); // 30cm std on x,y,z 0.1 rad on roll,pitch,yaw
graph.emplace_shared<PriorFactor<Pose3> >(Symbol('x', 0), poses[0], poseNoise);
// If this is the first iteration, add a prior on the first pose to set the
// coordinate frame and a prior on the first landmark to set the scale Also,
// as iSAM solves incrementally, we must wait until each is observed at
// least twice before adding it to iSAM.
if (i == 0) {
// Add a prior on pose x0, 30cm std on x,y,z and 0.1 rad on roll,pitch,yaw
static auto kPosePrior = noiseModel::Diagonal::Sigmas(
(Vector(6) << Vector3::Constant(0.3), Vector3::Constant(0.1))
.finished());
graph.emplace_shared<PriorFactor<Pose3> >(Symbol('x', 0), poses[0],
kPosePrior);
// Add a prior on landmark l0
noiseModel::Isotropic::shared_ptr pointNoise = noiseModel::Isotropic::Sigma(3, 0.1);
graph.emplace_shared<PriorFactor<Point3> >(Symbol('l', 0), points[0], pointNoise); // add directly to graph
static auto kPointPrior = noiseModel::Isotropic::Sigma(3, 0.1);
graph.emplace_shared<PriorFactor<Point3> >(Symbol('l', 0), points[0],
kPointPrior);
// Add initial guesses to all observed landmarks
// Intentionally initialize the variables off from the ground truth
static Point3 kDeltaPoint(-0.25, 0.20, 0.15);
for (size_t j = 0; j < points.size(); ++j)
initialEstimate.insert<Point3>(Symbol('l', j), points[j] +Point3(-0.25, 0.20, 0.15));
initialEstimate.insert<Point3>(Symbol('l', j), points[j] + kDeltaPoint);
} else {
// Update iSAM with the new factors
isam.update(graph, initialEstimate);
// Each call to iSAM2 update(*) performs one iteration of the iterative nonlinear solver.
// If accuracy is desired at the expense of time, update(*) can be called additional times
// to perform multiple optimizer iterations every step.
// Each call to iSAM2 update(*) performs one iteration of the iterative
// nonlinear solver. If accuracy is desired at the expense of time,
// update(*) can be called additional times to perform multiple optimizer
// iterations every step.
isam.update();
Values currentEstimate = isam.calculateEstimate();
cout << "****************************************************" << endl;

View File

@ -262,7 +262,7 @@ Unit3 Unit3::retract(const Vector2& v) const {
std::cos(theta) * p_ + xi_hat * (sin(theta) / theta);
return Unit3(exp_p_xi_hat);
}
/* ************************************************************************* */
Vector2 Unit3::localCoordinates(const Unit3& other) const {
const double x = p_.dot(other.p_);

View File

@ -17,9 +17,13 @@
#pragma once
#include <boost/optional.hpp>
#include <gtsam/inference/Key.h>
#include <gtsam/inference/Ordering.h>
#include <gtsam/base/types.h>
#include <gtsam/base/FastVector.h>
#include <boost/optional.hpp>
#include <string>
namespace gtsam {

View File

@ -12,11 +12,13 @@
/**
* @file GaussianConditional.cpp
* @brief Conditional Gaussian Base class
* @author Christian Potthast
* @author Christian Potthast, Frank Dellaert
*/
#include <string.h>
#include <functional>
#include <gtsam/linear/linearExceptions.h>
#include <gtsam/linear/GaussianConditional.h>
#include <gtsam/linear/VectorValues.h>
#include <boost/format.hpp>
#ifdef __GNUC__
#pragma GCC diagnostic push
@ -28,9 +30,9 @@
#pragma GCC diagnostic pop
#endif
#include <gtsam/linear/linearExceptions.h>
#include <gtsam/linear/GaussianConditional.h>
#include <gtsam/linear/VectorValues.h>
#include <functional>
#include <list>
#include <string>
using namespace std;
@ -54,38 +56,36 @@ namespace gtsam {
BaseFactor(key, R, name1, S, name2, T, d, sigmas), BaseConditional(1) {}
/* ************************************************************************* */
void GaussianConditional::print(const string &s, const KeyFormatter& formatter) const
{
void GaussianConditional::print(const string &s, const KeyFormatter& formatter) const {
cout << s << " Conditional density ";
for(const_iterator it = beginFrontals(); it != endFrontals(); ++it) {
for (const_iterator it = beginFrontals(); it != endFrontals(); ++it) {
cout << (boost::format("[%1%]")%(formatter(*it))).str() << " ";
}
cout << endl;
cout << formatMatrixIndented(" R = ", get_R()) << endl;
for(const_iterator it = beginParents() ; it != endParents() ; ++it ) {
for (const_iterator it = beginParents() ; it != endParents() ; ++it) {
cout << formatMatrixIndented((boost::format(" S[%1%] = ")%(formatter(*it))).str(), getA(it))
<< endl;
}
cout << formatMatrixIndented(" d = ", getb(), true) << "\n";
if(model_)
if (model_)
model_->print(" Noise model: ");
else
cout << " No noise model" << endl;
}
/* ************************************************************************* */
bool GaussianConditional::equals(const GaussianFactor& f, double tol) const
{
if (const GaussianConditional* c = dynamic_cast<const GaussianConditional*>(&f))
{
bool GaussianConditional::equals(const GaussianFactor& f, double tol) const {
if (const GaussianConditional* c = dynamic_cast<const GaussianConditional*>(&f)) {
// check if the size of the parents_ map is the same
if (parents().size() != c->parents().size())
return false;
// check if R_ and d_ are linear independent
for (DenseIndex i = 0; i < Ab_.rows(); i++) {
list<Vector> rows1; rows1.push_back(Vector(get_R().row(i)));
list<Vector> rows2; rows2.push_back(Vector(c->get_R().row(i)));
list<Vector> rows1, rows2;
rows1.push_back(Vector(get_R().row(i)));
rows2.push_back(Vector(c->get_R().row(i)));
// check if the matrices are the same
// iterate over the parents_ map
@ -109,16 +109,13 @@ namespace gtsam {
return false;
return true;
}
else
{
} else {
return false;
}
}
/* ************************************************************************* */
VectorValues GaussianConditional::solve(const VectorValues& x) const
{
VectorValues GaussianConditional::solve(const VectorValues& x) const {
// Concatenate all vector values that correspond to parent variables
const Vector xS = x.vector(FastVector<Key>(beginParents(), endParents()));
@ -146,8 +143,7 @@ namespace gtsam {
/* ************************************************************************* */
VectorValues GaussianConditional::solveOtherRHS(
const VectorValues& parents, const VectorValues& rhs) const
{
const VectorValues& parents, const VectorValues& rhs) const {
// Concatenate all vector values that correspond to parent variables
Vector xS = parents.vector(FastVector<Key>(beginParents(), endParents()));
@ -159,13 +155,13 @@ namespace gtsam {
Vector soln = get_R().triangularView<Eigen::Upper>().solve(xS);
// Scale by sigmas
if(model_)
if (model_)
soln.array() *= model_->sigmas().array();
// Insert solution into a VectorValues
VectorValues result;
DenseIndex vectorPosition = 0;
for(const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) {
for (const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) {
result.insert(*frontal, soln.segment(vectorPosition, getDim(frontal)));
vectorPosition += getDim(frontal);
}
@ -174,8 +170,7 @@ namespace gtsam {
}
/* ************************************************************************* */
void GaussianConditional::solveTransposeInPlace(VectorValues& gy) const
{
void GaussianConditional::solveTransposeInPlace(VectorValues& gy) const {
Vector frontalVec = gy.vector(FastVector<Key>(beginFrontals(), endFrontals()));
frontalVec = gtsam::backSubstituteUpper(frontalVec, Matrix(get_R()));
@ -186,25 +181,24 @@ namespace gtsam {
gy[*it] += -1.0 * Matrix(getA(it)).transpose() * frontalVec;
// Scale by sigmas
if(model_)
if (model_)
frontalVec.array() *= model_->sigmas().array();
// Write frontal solution into a VectorValues
DenseIndex vectorPosition = 0;
for(const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) {
for (const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) {
gy[*frontal] = frontalVec.segment(vectorPosition, getDim(frontal));
vectorPosition += getDim(frontal);
}
}
/* ************************************************************************* */
void GaussianConditional::scaleFrontalsBySigma(VectorValues& gy) const
{
void GaussianConditional::scaleFrontalsBySigma(VectorValues& gy) const {
DenseIndex vectorPosition = 0;
for(const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) {
for (const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) {
gy[*frontal].array() *= model_->sigmas().segment(vectorPosition, getDim(frontal)).array();
vectorPosition += getDim(frontal);
}
}
}
} // namespace gtsam

View File

@ -70,9 +70,6 @@ Scatter::Scatter(const GaussianFactorGraph& gfg,
iterator first = begin();
if (ordering) first += ordering->size();
if (first != end()) std::sort(first, end());
// Filter out keys with zero dimensions (if ordering had more keys)
erase(std::remove_if(begin(), end(), SlotEntry::Zero), end());
}
/* ************************************************************************* */

View File

@ -160,28 +160,6 @@ namespace gtsam {
return result;
}
/* ************************************************************************* */
Vector VectorValues::vector(const FastVector<Key>& keys) const
{
// Count dimensions and collect pointers to avoid double lookups
DenseIndex totalDim = 0;
FastVector<const Vector*> items(keys.size());
for(size_t i = 0; i < keys.size(); ++i) {
items[i] = &at(keys[i]);
totalDim += items[i]->size();
}
// Copy vectors
Vector result(totalDim);
DenseIndex pos = 0;
for(const Vector *v: items) {
result.segment(pos, v->size()) = *v;
pos += v->size();
}
return result;
}
/* ************************************************************************* */
Vector VectorValues::vector(const Dims& keys) const
{

View File

@ -26,6 +26,9 @@
#include <boost/shared_ptr.hpp>
#include <map>
#include <string>
namespace gtsam {
/**
@ -43,10 +46,6 @@ namespace gtsam {
* - \ref exists (Key) to check if a variable is present
* - Other facilities like iterators, size(), dim(), etc.
*
* Indices can be non-consecutive and inserted out-of-order, but you should not
* use indices that are larger than a reasonable array size because the indices
* correspond to positions in an internal array.
*
* Example:
* \code
VectorValues values;
@ -64,12 +63,6 @@ namespace gtsam {
*
* <h2>Advanced Interface and Performance Information</h2>
*
* Internally, all vector values are stored as part of one large vector. In
* gtsam this vector is always pre-allocated for efficiency, using the
* advanced interface described below. Accessing and modifying already-allocated
* values is \f$ O(1) \f$. Using the insert() function of the standard interface
* is slow because it requires re-allocating the internal vector.
*
* For advanced usage, or where speed is important:
* - Allocate space ahead of time using a pre-allocating constructor
* (\ref AdvancedConstructors "Advanced Constructors"), Zero(),
@ -88,20 +81,18 @@ namespace gtsam {
* \nosubgrouping
*/
class GTSAM_EXPORT VectorValues {
protected:
protected:
typedef VectorValues This;
typedef ConcurrentMap<Key, Vector> Values; ///< Typedef for the collection of Vectors making up a VectorValues
Values values_; ///< Collection of Vectors making up this VectorValues
typedef ConcurrentMap<Key, Vector> Values; ///< Collection of Vectors making up a VectorValues
Values values_; ///< Vectors making up this VectorValues
public:
typedef Values::iterator iterator; ///< Iterator over vector values
typedef Values::const_iterator const_iterator; ///< Const iterator over vector values
//typedef Values::reverse_iterator reverse_iterator; ///< Reverse iterator over vector values
//typedef Values::const_reverse_iterator const_reverse_iterator; ///< Const reverse iterator over vector values
typedef boost::shared_ptr<This> shared_ptr; ///< shared_ptr to this class
typedef Values::value_type value_type; ///< Typedef to pair<Key, Vector>, a key-value pair
typedef value_type KeyValuePair; ///< Typedef to pair<Key, Vector>, a key-value pair
typedef std::map<Key,size_t> Dims;
public:
typedef Values::iterator iterator; ///< Iterator over vector values
typedef Values::const_iterator const_iterator; ///< Const iterator over vector values
typedef boost::shared_ptr<This> shared_ptr; ///< shared_ptr to this class
typedef Values::value_type value_type; ///< Typedef to pair<Key, Vector>
typedef value_type KeyValuePair; ///< Typedef to pair<Key, Vector>
typedef std::map<Key, size_t> Dims; ///< Keyed vector dimensions
/// @name Standard Constructors
/// @{
@ -111,7 +102,8 @@ namespace gtsam {
*/
VectorValues() {}
/** Merge two VectorValues into one, this is more efficient than inserting elements one by one. */
/** Merge two VectorValues into one, this is more efficient than inserting
* elements one by one. */
VectorValues(const VectorValues& first, const VectorValues& second);
/** Create from another container holding pair<Key,Vector>. */
@ -147,20 +139,26 @@ namespace gtsam {
/** Check whether a variable with key \c j exists. */
bool exists(Key j) const { return find(j) != end(); }
/** Read/write access to the vector value with key \c j, throws std::out_of_range if \c j does not exist, identical to operator[](Key). */
/**
* Read/write access to the vector value with key \c j, throws
* std::out_of_range if \c j does not exist, identical to operator[](Key).
*/
Vector& at(Key j) {
iterator item = find(j);
if(item == end())
if (item == end())
throw std::out_of_range(
"Requested variable '" + DefaultKeyFormatter(j) + "' is not in this VectorValues.");
else
return item->second;
}
/** Access the vector value with key \c j (const version), throws std::out_of_range if \c j does not exist, identical to operator[](Key). */
/**
* Access the vector value with key \c j (const version), throws
* std::out_of_range if \c j does not exist, identical to operator[](Key).
*/
const Vector& at(Key j) const {
const_iterator item = find(j);
if(item == end())
if (item == end())
throw std::out_of_range(
"Requested variable '" + DefaultKeyFormatter(j) + "' is not in this VectorValues.");
else
@ -207,26 +205,30 @@ namespace gtsam {
/** Erase the vector with the given key, or throw std::out_of_range if it does not exist */
void erase(Key var) {
if(values_.unsafe_erase(var) == 0)
throw std::invalid_argument("Requested variable '" + DefaultKeyFormatter(var) + "', is not in this VectorValues.");
if (values_.unsafe_erase(var) == 0)
throw std::invalid_argument("Requested variable '" +
DefaultKeyFormatter(var) +
"', is not in this VectorValues.");
}
/** Set all values to zero vectors. */
void setZero();
iterator begin() { return values_.begin(); } ///< Iterator over variables
const_iterator begin() const { return values_.begin(); } ///< Iterator over variables
iterator end() { return values_.end(); } ///< Iterator over variables
const_iterator end() const { return values_.end(); } ///< Iterator over variables
//reverse_iterator rbegin() { return values_.rbegin(); } ///< Reverse iterator over variables
//const_reverse_iterator rbegin() const { return values_.rbegin(); } ///< Reverse iterator over variables
//reverse_iterator rend() { return values_.rend(); } ///< Reverse iterator over variables
//const_reverse_iterator rend() const { return values_.rend(); } ///< Reverse iterator over variables
iterator begin() { return values_.begin(); } ///< Iterator over variables
const_iterator begin() const { return values_.begin(); } ///< Iterator over variables
iterator end() { return values_.end(); } ///< Iterator over variables
const_iterator end() const { return values_.end(); } ///< Iterator over variables
/** Return the iterator corresponding to the requested key, or end() if no variable is present with this key. */
/**
* Return the iterator corresponding to the requested key, or end() if no
* variable is present with this key.
*/
iterator find(Key j) { return values_.find(j); }
/** Return the iterator corresponding to the requested key, or end() if no variable is present with this key. */
/**
* Return the iterator corresponding to the requested key, or end() if no
* variable is present with this key.
*/
const_iterator find(Key j) const { return values_.find(j); }
/** print required by Testable for unit testing */
@ -244,7 +246,26 @@ namespace gtsam {
Vector vector() const;
/** Access a vector that is a subset of relevant keys. */
Vector vector(const FastVector<Key>& keys) const;
template <typename CONTAINER>
Vector vector(const CONTAINER& keys) const {
DenseIndex totalDim = 0;
FastVector<const Vector*> items;
items.reserve(keys.end() - keys.begin());
for (Key key : keys) {
const Vector* v = &at(key);
totalDim += v->size();
items.push_back(v);
}
Vector result(totalDim);
DenseIndex pos = 0;
for (const Vector* v : items) {
result.segment(pos, v->size()) = *v;
pos += v->size();
}
return result;
}
/** Access a vector that is a subset of relevant keys, dims version. */
Vector vector(const Dims& dims) const;
@ -319,54 +340,6 @@ namespace gtsam {
/// @}
/**
* scale a vector by a scalar
*/
//friend VectorValues operator*(const double a, const VectorValues &v) {
// VectorValues result = VectorValues::SameStructure(v);
// for(Key j = 0; j < v.size(); ++j)
// result.values_[j] = a * v.values_[j];
// return result;
//}
//// TODO: linear algebra interface seems to have been added for SPCG.
//friend void axpy(double alpha, const VectorValues& x, VectorValues& y) {
// if(x.size() != y.size())
// throw std::invalid_argument("axpy(VectorValues) called with different vector sizes");
// for(Key j = 0; j < x.size(); ++j)
// if(x.values_[j].size() == y.values_[j].size())
// y.values_[j] += alpha * x.values_[j];
// else
// throw std::invalid_argument("axpy(VectorValues) called with different vector sizes");
//}
//// TODO: linear algebra interface seems to have been added for SPCG.
//friend void sqrt(VectorValues &x) {
// for(Key j = 0; j < x.size(); ++j)
// x.values_[j] = x.values_[j].cwiseSqrt();
//}
//// TODO: linear algebra interface seems to have been added for SPCG.
//friend void ediv(const VectorValues& numerator, const VectorValues& denominator, VectorValues &result) {
// if(numerator.size() != denominator.size() || numerator.size() != result.size())
// throw std::invalid_argument("ediv(VectorValues) called with different vector sizes");
// for(Key j = 0; j < numerator.size(); ++j)
// if(numerator.values_[j].size() == denominator.values_[j].size() && numerator.values_[j].size() == result.values_[j].size())
// result.values_[j] = numerator.values_[j].cwiseQuotient(denominator.values_[j]);
// else
// throw std::invalid_argument("ediv(VectorValues) called with different vector sizes");
//}
//// TODO: linear algebra interface seems to have been added for SPCG.
//friend void edivInPlace(VectorValues& x, const VectorValues& y) {
// if(x.size() != y.size())
// throw std::invalid_argument("edivInPlace(VectorValues) called with different vector sizes");
// for(Key j = 0; j < x.size(); ++j)
// if(x.values_[j].size() == y.values_[j].size())
// x.values_[j].array() /= y.values_[j].array();
// else
// throw std::invalid_argument("edivInPlace(VectorValues) called with different vector sizes");
//}
private:
/** Serialization function */
friend class boost::serialization::access;

View File

@ -11,120 +11,89 @@
/**
* @file ISAM2-impl.cpp
* @brief Incremental update functionality (ISAM2) for BayesTree, with fluid relinearization.
* @brief Incremental update functionality (ISAM2) for BayesTree, with fluid
* relinearization.
* @author Michael Kaess
* @author Richard Roberts
*/
#include <gtsam/nonlinear/ISAM2-impl.h>
#include <gtsam/inference/Symbol.h> // for selective linearization thresholds
#include <gtsam/base/debug.h>
#include <gtsam/config.h> // for GTSAM_USE_TBB
#include <gtsam/config.h> // for GTSAM_USE_TBB
#include <gtsam/inference/Symbol.h> // for selective linearization thresholds
#include <gtsam/nonlinear/ISAM2-impl.h>
#include <functional>
#include <boost/range/adaptors.hpp>
#include <functional>
#include <limits>
#include <string>
using namespace std;
namespace gtsam {
/* ************************************************************************* */
void ISAM2::Impl::AddVariables(
const Values& newTheta, Values& theta, VectorValues& delta,
VectorValues& deltaNewton, VectorValues& RgProd,
const KeyFormatter& keyFormatter)
{
const bool debug = ISDEBUG("ISAM2 AddVariables");
void ISAM2::Impl::AddFactorsStep1(const NonlinearFactorGraph& newFactors,
bool useUnusedSlots,
NonlinearFactorGraph* nonlinearFactors,
FactorIndices* newFactorIndices) {
newFactorIndices->resize(newFactors.size());
theta.insert(newTheta);
if(debug) newTheta.print("The new variables are: ");
// Add zeros into the VectorValues
delta.insert(newTheta.zeroVectors());
deltaNewton.insert(newTheta.zeroVectors());
RgProd.insert(newTheta.zeroVectors());
}
/* ************************************************************************* */
void ISAM2::Impl::AddFactorsStep1(const NonlinearFactorGraph& newFactors, bool useUnusedSlots,
NonlinearFactorGraph& nonlinearFactors, FactorIndices& newFactorIndices)
{
newFactorIndices.resize(newFactors.size());
if(useUnusedSlots)
{
if (useUnusedSlots) {
size_t globalFactorIndex = 0;
for(size_t newFactorIndex = 0; newFactorIndex < newFactors.size(); ++newFactorIndex)
{
for (size_t newFactorIndex = 0; newFactorIndex < newFactors.size();
++newFactorIndex) {
// Loop to find the next available factor slot
do
{
// If we need to add more factors than we have room for, resize nonlinearFactors,
// filling the new slots with NULL factors. Otherwise, check if the current
// factor in nonlinearFactors is already used, and if so, increase
// globalFactorIndex. If the current factor in nonlinearFactors is unused, break
// out of the loop and use the current slot.
if(globalFactorIndex >= nonlinearFactors.size())
nonlinearFactors.resize(nonlinearFactors.size() + newFactors.size() - newFactorIndex);
else if(nonlinearFactors[globalFactorIndex])
++ globalFactorIndex;
do {
// If we need to add more factors than we have room for, resize
// nonlinearFactors, filling the new slots with NULL factors. Otherwise,
// check if the current factor in nonlinearFactors is already used, and
// if so, increase globalFactorIndex. If the current factor in
// nonlinearFactors is unused, break out of the loop and use the current
// slot.
if (globalFactorIndex >= nonlinearFactors->size())
nonlinearFactors->resize(nonlinearFactors->size() +
newFactors.size() - newFactorIndex);
else if ((*nonlinearFactors)[globalFactorIndex])
++globalFactorIndex;
else
break;
} while(true);
} while (true);
// Use the current slot, updating nonlinearFactors and newFactorSlots.
nonlinearFactors[globalFactorIndex] = newFactors[newFactorIndex];
newFactorIndices[newFactorIndex] = globalFactorIndex;
(*nonlinearFactors)[globalFactorIndex] = newFactors[newFactorIndex];
(*newFactorIndices)[newFactorIndex] = globalFactorIndex;
}
}
else
{
} else {
// We're not looking for unused slots, so just add the factors at the end.
for(size_t i = 0; i < newFactors.size(); ++i)
newFactorIndices[i] = i + nonlinearFactors.size();
nonlinearFactors.push_back(newFactors);
for (size_t i = 0; i < newFactors.size(); ++i)
(*newFactorIndices)[i] = i + nonlinearFactors->size();
nonlinearFactors->push_back(newFactors);
}
}
/* ************************************************************************* */
void ISAM2::Impl::RemoveVariables(const KeySet& unusedKeys, const FastVector<ISAM2::sharedClique>& roots,
Values& theta, VariableIndex& variableIndex,
VectorValues& delta, VectorValues& deltaNewton, VectorValues& RgProd,
KeySet& replacedKeys, Base::Nodes& nodes,
KeySet& fixedVariables)
{
variableIndex.removeUnusedVariables(unusedKeys.begin(), unusedKeys.end());
for(Key key: unusedKeys) {
delta.erase(key);
deltaNewton.erase(key);
RgProd.erase(key);
replacedKeys.erase(key);
nodes.unsafe_erase(key);
theta.erase(key);
fixedVariables.erase(key);
}
}
/* ************************************************************************* */
KeySet ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta,
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold)
{
KeySet ISAM2::Impl::CheckRelinearizationFull(
const VectorValues& delta,
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold) {
KeySet relinKeys;
if(const double* threshold = boost::get<double>(&relinearizeThreshold))
{
for(const VectorValues::KeyValuePair& key_delta: delta) {
if (const double* threshold = boost::get<double>(&relinearizeThreshold)) {
for (const VectorValues::KeyValuePair& key_delta : delta) {
double maxDelta = key_delta.second.lpNorm<Eigen::Infinity>();
if(maxDelta >= *threshold)
relinKeys.insert(key_delta.first);
if (maxDelta >= *threshold) relinKeys.insert(key_delta.first);
}
}
else if(const FastMap<char,Vector>* thresholds = boost::get<FastMap<char,Vector> >(&relinearizeThreshold))
{
for(const VectorValues::KeyValuePair& key_delta: delta) {
const Vector& threshold = thresholds->find(Symbol(key_delta.first).chr())->second;
if(threshold.rows() != key_delta.second.rows())
throw std::invalid_argument("Relinearization threshold vector dimensionality for '" + std::string(1, Symbol(key_delta.first).chr()) + "' passed into iSAM2 parameters does not match actual variable dimensionality.");
if((key_delta.second.array().abs() > threshold.array()).any())
} else if (const FastMap<char, Vector>* thresholds =
boost::get<FastMap<char, Vector> >(&relinearizeThreshold)) {
for (const VectorValues::KeyValuePair& key_delta : delta) {
const Vector& threshold =
thresholds->find(Symbol(key_delta.first).chr())->second;
if (threshold.rows() != key_delta.second.rows())
throw std::invalid_argument(
"Relinearization threshold vector dimensionality for '" +
std::string(1, Symbol(key_delta.first).chr()) +
"' passed into iSAM2 parameters does not match actual variable "
"dimensionality.");
if ((key_delta.second.array().abs() > threshold.array()).any())
relinKeys.insert(key_delta.first);
}
}
@ -133,167 +102,116 @@ KeySet ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta,
}
/* ************************************************************************* */
void CheckRelinearizationRecursiveDouble(KeySet& relinKeys, double threshold,
const VectorValues& delta, const ISAM2Clique::shared_ptr& clique)
{
static void CheckRelinearizationRecursiveDouble(
double threshold, const VectorValues& delta,
const ISAM2::sharedClique& clique, KeySet* relinKeys) {
// Check the current clique for relinearization
bool relinearize = false;
for(Key var: *clique->conditional()) {
for (Key var : *clique->conditional()) {
double maxDelta = delta[var].lpNorm<Eigen::Infinity>();
if(maxDelta >= threshold) {
relinKeys.insert(var);
if (maxDelta >= threshold) {
relinKeys->insert(var);
relinearize = true;
}
}
// If this node was relinearized, also check its children
if(relinearize) {
for(const ISAM2Clique::shared_ptr& child: clique->children) {
CheckRelinearizationRecursiveDouble(relinKeys, threshold, delta, child);
if (relinearize) {
for (const ISAM2::sharedClique& child : clique->children) {
CheckRelinearizationRecursiveDouble(threshold, delta, child, relinKeys);
}
}
}
/* ************************************************************************* */
void CheckRelinearizationRecursiveMap(KeySet& relinKeys, const FastMap<char,Vector>& thresholds,
const VectorValues& delta,
const ISAM2Clique::shared_ptr& clique)
{
static void CheckRelinearizationRecursiveMap(
const FastMap<char, Vector>& thresholds, const VectorValues& delta,
const ISAM2::sharedClique& clique, KeySet* relinKeys) {
// Check the current clique for relinearization
bool relinearize = false;
for(Key var: *clique->conditional()) {
for (Key var : *clique->conditional()) {
// Find the threshold for this variable type
const Vector& threshold = thresholds.find(Symbol(var).chr())->second;
const Vector& deltaVar = delta[var];
// Verify the threshold vector matches the actual variable size
if(threshold.rows() != deltaVar.rows())
throw std::invalid_argument("Relinearization threshold vector dimensionality for '" + std::string(1, Symbol(var).chr()) + "' passed into iSAM2 parameters does not match actual variable dimensionality.");
if (threshold.rows() != deltaVar.rows())
throw std::invalid_argument(
"Relinearization threshold vector dimensionality for '" +
std::string(1, Symbol(var).chr()) +
"' passed into iSAM2 parameters does not match actual variable "
"dimensionality.");
// Check for relinearization
if((deltaVar.array().abs() > threshold.array()).any()) {
relinKeys.insert(var);
if ((deltaVar.array().abs() > threshold.array()).any()) {
relinKeys->insert(var);
relinearize = true;
}
}
// If this node was relinearized, also check its children
if(relinearize) {
for(const ISAM2Clique::shared_ptr& child: clique->children) {
CheckRelinearizationRecursiveMap(relinKeys, thresholds, delta, child);
if (relinearize) {
for (const ISAM2::sharedClique& child : clique->children) {
CheckRelinearizationRecursiveMap(thresholds, delta, child, relinKeys);
}
}
}
/* ************************************************************************* */
KeySet ISAM2::Impl::CheckRelinearizationPartial(const FastVector<ISAM2::sharedClique>& roots,
const VectorValues& delta,
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold)
{
KeySet ISAM2::Impl::CheckRelinearizationPartial(
const ISAM2::Roots& roots, const VectorValues& delta,
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold) {
KeySet relinKeys;
for(const ISAM2::sharedClique& root: roots) {
if(relinearizeThreshold.type() == typeid(double))
CheckRelinearizationRecursiveDouble(relinKeys, boost::get<double>(relinearizeThreshold), delta, root);
else if(relinearizeThreshold.type() == typeid(FastMap<char,Vector>))
CheckRelinearizationRecursiveMap(relinKeys, boost::get<FastMap<char,Vector> >(relinearizeThreshold), delta, root);
for (const ISAM2::sharedClique& root : roots) {
if (relinearizeThreshold.type() == typeid(double))
CheckRelinearizationRecursiveDouble(
boost::get<double>(relinearizeThreshold), delta, root, &relinKeys);
else if (relinearizeThreshold.type() == typeid(FastMap<char, Vector>))
CheckRelinearizationRecursiveMap(
boost::get<FastMap<char, Vector> >(relinearizeThreshold), delta, root,
&relinKeys);
}
return relinKeys;
}
/* ************************************************************************* */
void ISAM2::Impl::FindAll(ISAM2Clique::shared_ptr clique, KeySet& keys, const KeySet& markedMask)
{
static const bool debug = false;
// does the separator contain any of the variables?
bool found = false;
for(Key key: clique->conditional()->parents()) {
if (markedMask.exists(key)) {
found = true;
break;
}
}
if (found) {
// then add this clique
keys.insert(clique->conditional()->beginFrontals(), clique->conditional()->endFrontals());
if(debug) clique->print("Key(s) marked in clique ");
if(debug) cout << "so marking key " << clique->conditional()->front() << endl;
}
for(const ISAM2Clique::shared_ptr& child: clique->children) {
FindAll(child, keys, markedMask);
}
}
/* ************************************************************************* */
void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta,
const KeySet& mask, boost::optional<VectorValues&> invalidateIfDebug, const KeyFormatter& keyFormatter)
{
// If debugging, invalidate if requested, otherwise do not invalidate.
// Invalidating means setting expmapped entries to Inf, to trigger assertions
// if we try to re-use them.
#ifdef NDEBUG
invalidateIfDebug = boost::none;
#endif
assert(values.size() == delta.size());
Values::iterator key_value;
VectorValues::const_iterator key_delta;
#ifdef GTSAM_USE_TBB
for(key_value = values.begin(); key_value != values.end(); ++key_value)
{
key_delta = delta.find(key_value->key);
#else
for(key_value = values.begin(), key_delta = delta.begin(); key_value != values.end(); ++key_value, ++key_delta)
{
assert(key_value->key == key_delta->first);
#endif
Key var = key_value->key;
assert(delta[var].size() == (int)key_value->value.dim());
assert(delta[var].allFinite());
if(mask.exists(var)) {
Value* retracted = key_value->value.retract_(delta[var]);
key_value->value = *retracted;
retracted->deallocate_();
if(invalidateIfDebug)
(*invalidateIfDebug)[var].operator=(Vector::Constant(delta[var].rows(), numeric_limits<double>::infinity())); // Strange syntax to work with clang++ (bug in clang?)
}
}
}
/* ************************************************************************* */
namespace internal {
inline static void optimizeInPlace(const boost::shared_ptr<ISAM2Clique>& clique, VectorValues& result) {
inline static void optimizeInPlace(const ISAM2::sharedClique& clique,
VectorValues* result) {
// parents are assumed to already be solved and available in result
result.update(clique->conditional()->solve(result));
result->update(clique->conditional()->solve(*result));
// starting from the root, call optimize on each conditional
for(const boost::shared_ptr<ISAM2Clique>& child: clique->children)
for (const ISAM2::sharedClique& child : clique->children)
optimizeInPlace(child, result);
}
}
} // namespace internal
/* ************************************************************************* */
size_t ISAM2::Impl::UpdateGaussNewtonDelta(const FastVector<ISAM2::sharedClique>& roots,
const KeySet& replacedKeys, VectorValues& delta, double wildfireThreshold) {
size_t ISAM2::Impl::UpdateGaussNewtonDelta(const ISAM2::Roots& roots,
const KeySet& replacedKeys,
double wildfireThreshold,
VectorValues* delta) {
size_t lastBacksubVariableCount;
if (wildfireThreshold <= 0.0) {
// Threshold is zero or less, so do a full recalculation
for(const ISAM2::sharedClique& root: roots)
for (const ISAM2::sharedClique& root : roots)
internal::optimizeInPlace(root, delta);
lastBacksubVariableCount = delta.size();
lastBacksubVariableCount = delta->size();
} else {
// Optimize with wildfire
lastBacksubVariableCount = 0;
for(const ISAM2::sharedClique& root: roots)
for (const ISAM2::sharedClique& root : roots)
lastBacksubVariableCount += optimizeWildfireNonRecursive(
root, wildfireThreshold, replacedKeys, delta); // modifies delta
root, wildfireThreshold, replacedKeys, delta); // modifies delta
#if !defined(NDEBUG) && defined(GTSAM_EXTRA_CONSISTENCY_CHECKS)
for (VectorValues::const_iterator key_delta = delta.begin(); key_delta != delta.end(); ++key_delta) {
assert(delta[key_delta->first].allFinite());
for (VectorValues::const_iterator key_delta = delta->begin();
key_delta != delta->end(); ++key_delta) {
assert((*delta)[key_delta->first].allFinite());
}
#endif
}
@ -303,69 +221,77 @@ size_t ISAM2::Impl::UpdateGaussNewtonDelta(const FastVector<ISAM2::sharedClique>
/* ************************************************************************* */
namespace internal {
void updateRgProd(const boost::shared_ptr<ISAM2Clique>& clique, const KeySet& replacedKeys,
const VectorValues& grad, VectorValues& RgProd, size_t& varsUpdated) {
void updateRgProd(const ISAM2::sharedClique& clique, const KeySet& replacedKeys,
const VectorValues& grad, VectorValues* RgProd,
size_t* varsUpdated) {
// Check if any frontal or separator keys were recalculated, if so, we need
// update deltas and recurse to children, but if not, we do not need to
// recurse further because of the running separator property.
bool anyReplaced = false;
for(Key j: *clique->conditional()) {
if(replacedKeys.exists(j)) {
for (Key j : *clique->conditional()) {
if (replacedKeys.exists(j)) {
anyReplaced = true;
break;
}
}
if(anyReplaced) {
if (anyReplaced) {
// Update the current variable
// Get VectorValues slice corresponding to current variables
Vector gR = grad.vector(FastVector<Key>(clique->conditional()->beginFrontals(), clique->conditional()->endFrontals()));
Vector gS = grad.vector(FastVector<Key>(clique->conditional()->beginParents(), clique->conditional()->endParents()));
Vector gR =
grad.vector(FastVector<Key>(clique->conditional()->beginFrontals(),
clique->conditional()->endFrontals()));
Vector gS =
grad.vector(FastVector<Key>(clique->conditional()->beginParents(),
clique->conditional()->endParents()));
// Compute R*g and S*g for this clique
Vector RSgProd = clique->conditional()->get_R() * gR + clique->conditional()->get_S() * gS;
Vector RSgProd = clique->conditional()->get_R() * gR +
clique->conditional()->get_S() * gS;
// Write into RgProd vector
DenseIndex vectorPosition = 0;
for(Key frontal: clique->conditional()->frontals()) {
Vector& RgProdValue = RgProd[frontal];
for (Key frontal : clique->conditional()->frontals()) {
Vector& RgProdValue = (*RgProd)[frontal];
RgProdValue = RSgProd.segment(vectorPosition, RgProdValue.size());
vectorPosition += RgProdValue.size();
}
// Now solve the part of the Newton's method point for this clique (back-substitution)
//(*clique)->solveInPlace(deltaNewton);
// Now solve the part of the Newton's method point for this clique
// (back-substitution)
// (*clique)->solveInPlace(deltaNewton);
varsUpdated += clique->conditional()->nrFrontals();
*varsUpdated += clique->conditional()->nrFrontals();
// Recurse to children
for(const ISAM2Clique::shared_ptr& child: clique->children) {
updateRgProd(child, replacedKeys, grad, RgProd, varsUpdated); }
for (const ISAM2::sharedClique& child : clique->children) {
updateRgProd(child, replacedKeys, grad, RgProd, varsUpdated);
}
}
}
}
} // namespace internal
/* ************************************************************************* */
size_t ISAM2::Impl::UpdateRgProd(const ISAM2::Roots& roots, const KeySet& replacedKeys,
const VectorValues& gradAtZero, VectorValues& RgProd) {
size_t ISAM2::Impl::UpdateRgProd(const ISAM2::Roots& roots,
const KeySet& replacedKeys,
const VectorValues& gradAtZero,
VectorValues* RgProd) {
// Update variables
size_t varsUpdated = 0;
for(const ISAM2::sharedClique& root: roots) {
internal::updateRgProd(root, replacedKeys, gradAtZero, RgProd, varsUpdated);
for (const ISAM2::sharedClique& root : roots) {
internal::updateRgProd(root, replacedKeys, gradAtZero, RgProd,
&varsUpdated);
}
return varsUpdated;
}
/* ************************************************************************* */
VectorValues ISAM2::Impl::ComputeGradientSearch(const VectorValues& gradAtZero,
const VectorValues& RgProd)
{
const VectorValues& RgProd) {
// Compute gradient squared-magnitude
const double gradientSqNorm = gradAtZero.dot(gradAtZero);
// Compute minimizing step size
double RgNormSq = RgProd.vector().squaredNorm();
double step = -gradientSqNorm / RgNormSq;
@ -374,4 +300,4 @@ VectorValues ISAM2::Impl::ComputeGradientSearch(const VectorValues& gradAtZero,
return step * gradAtZero;
}
}
} // namespace gtsam

View File

@ -23,7 +23,6 @@
namespace gtsam {
struct GTSAM_EXPORT ISAM2::Impl {
struct GTSAM_EXPORT PartialSolveResult {
ISAM2::sharedClique bayesTree;
};
@ -32,35 +31,14 @@ struct GTSAM_EXPORT ISAM2::Impl {
size_t nFullSystemVars;
enum { /*AS_ADDED,*/ COLAMD } algorithm;
enum { NO_CONSTRAINT, CONSTRAIN_LAST } constrain;
boost::optional<FastMap<Key,int> > constrainedKeys;
boost::optional<FastMap<Key, int> > constrainedKeys;
};
/**
* Add new variables to the ISAM2 system.
* @param newTheta Initial values for new variables
* @param theta Current solution to be augmented with new initialization
* @param delta Current linear delta to be augmented with zeros
* @param ordering Current ordering to be augmented with new variables
* @param nodes Current BayesTree::Nodes index to be augmented with slots for new variables
* @param keyFormatter Formatter for printing nonlinear keys during debugging
*/
static void AddVariables(const Values& newTheta, Values& theta, VectorValues& delta,
VectorValues& deltaNewton, VectorValues& RgProd,
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
/// Perform the first part of the bookkeeping updates for adding new factors. Adds them to the
/// complete list of nonlinear factors, and populates the list of new factor indices, both
/// optionally finding and reusing empty factor slots.
static void AddFactorsStep1(const NonlinearFactorGraph& newFactors, bool useUnusedSlots,
NonlinearFactorGraph& nonlinearFactors, FactorIndices& newFactorIndices);
/**
* Remove variables from the ISAM2 system.
*/
static void RemoveVariables(const KeySet& unusedKeys, const FastVector<ISAM2::sharedClique>& roots,
Values& theta, VariableIndex& variableIndex, VectorValues& delta, VectorValues& deltaNewton,
VectorValues& RgProd, KeySet& replacedKeys, Base::Nodes& nodes,
KeySet& fixedVariables);
NonlinearFactorGraph* nonlinearFactors, FactorIndices* newFactorIndices);
/**
* Find the set of variables to be relinearized according to relinearizeThreshold.
@ -85,57 +63,22 @@ struct GTSAM_EXPORT ISAM2::Impl {
* @return The set of variable indices in delta whose magnitude is greater than or
* equal to relinearizeThreshold
*/
static KeySet CheckRelinearizationPartial(const FastVector<ISAM2::sharedClique>& roots,
static KeySet CheckRelinearizationPartial(const ISAM2::Roots& roots,
const VectorValues& delta, const ISAM2Params::RelinearizationThreshold& relinearizeThreshold);
/**
* Recursively search this clique and its children for marked keys appearing
* in the separator, and add the *frontal* keys of any cliques whose
* separator contains any marked keys to the set \c keys. The purpose of
* this is to discover the cliques that need to be redone due to information
* propagating to them from cliques that directly contain factors being
* relinearized.
*
* The original comment says this finds all variables directly connected to
* the marked ones by measurements. Is this true, because it seems like this
* would also pull in variables indirectly connected through other frontal or
* separator variables?
*
* Alternatively could we trace up towards the root for each variable here?
*/
static void FindAll(ISAM2Clique::shared_ptr clique, KeySet& keys, const KeySet& markedMask);
/**
* Apply expmap to the given values, but only for indices appearing in
* \c markedRelinMask. Values are expmapped in-place.
* \param [in, out] values The value to expmap in-place
* \param delta The linear delta with which to expmap
* \param ordering The ordering
* \param mask Mask on linear indices, only \c true entries are expmapped
* \param invalidateIfDebug If this is true, *and* NDEBUG is not defined,
* expmapped deltas will be set to an invalid value (infinity) to catch bugs
* where we might expmap something twice, or expmap it but then not
* recalculate its delta.
* @param keyFormatter Formatter for printing nonlinear keys during debugging
*/
static void ExpmapMasked(Values& values, const VectorValues& delta,
const KeySet& mask,
boost::optional<VectorValues&> invalidateIfDebug = boost::none,
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
/**
* Update the Newton's method step point, using wildfire
*/
static size_t UpdateGaussNewtonDelta(const FastVector<ISAM2::sharedClique>& roots,
const KeySet& replacedKeys, VectorValues& delta, double wildfireThreshold);
static size_t UpdateGaussNewtonDelta(const ISAM2::Roots& roots,
const KeySet& replacedKeys, double wildfireThreshold, VectorValues* delta);
/**
* Update the RgProd (R*g) incrementally taking into account which variables
* have been recalculated in \c replacedKeys. Only used in Dogleg.
*/
static size_t UpdateRgProd(const ISAM2::Roots& roots, const KeySet& replacedKeys,
const VectorValues& gradAtZero, VectorValues& RgProd);
const VectorValues& gradAtZero, VectorValues* RgProd);
/**
* Compute the gradient-search point. Only used in Dogleg.
*/

View File

@ -1,311 +0,0 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ISAM2-inl.h
* @brief
* @author Richard Roberts
* @date Mar 16, 2012
*/
#pragma once
#include <stack>
#include <gtsam/inference/FactorGraph.h>
#include <gtsam/linear/JacobianFactor.h>
namespace gtsam {
/* ************************************************************************* */
template<class VALUE>
VALUE ISAM2::calculateEstimate(Key key) const {
const Vector& delta = getDelta()[key];
return traits<VALUE>::Retract(theta_.at<VALUE>(key), delta);
}
/* ************************************************************************* */
namespace internal {
template<class CLIQUE>
void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
KeySet& changed, const KeySet& replaced, VectorValues& delta, size_t& count)
{
// if none of the variables in this clique (frontal and separator!) changed
// significantly, then by the running intersection property, none of the
// cliques in the children need to be processed
// Are any clique variables part of the tree that has been redone?
bool cliqueReplaced = replaced.exists((*clique)->frontals().front());
#if !defined(NDEBUG) && defined(GTSAM_EXTRA_CONSISTENCY_CHECKS)
for(Key frontal: clique->conditional()->frontals()) {
assert(cliqueReplaced == replaced.exists(frontal));
}
#endif
// If not redone, then has one of the separator variables changed significantly?
bool recalculate = cliqueReplaced;
if(!recalculate) {
for(Key parent: clique->conditional()->parents()) {
if(changed.exists(parent)) {
recalculate = true;
break;
}
}
}
// Solve clique if it was replaced, or if any parents were changed above the
// threshold or themselves replaced.
if(recalculate) {
// Temporary copy of the original values, to check how much they change
FastVector<Vector> originalValues(clique->conditional()->nrFrontals());
GaussianConditional::const_iterator it;
for(it = clique->conditional()->beginFrontals(); it!=clique->conditional()->endFrontals(); it++) {
originalValues[it - clique->conditional()->beginFrontals()] = delta[*it];
}
// Back-substitute
delta.update(clique->conditional()->solve(delta));
count += clique->conditional()->nrFrontals();
// Whether the values changed above a threshold, or always true if the
// clique was replaced.
bool valuesChanged = cliqueReplaced;
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
if(!valuesChanged) {
const Vector& oldValue(originalValues[it - clique->conditional()->beginFrontals()]);
const Vector& newValue(delta[*it]);
if((oldValue - newValue).lpNorm<Eigen::Infinity>() >= threshold) {
valuesChanged = true;
break;
}
} else
break;
}
// If the values were above the threshold or this clique was replaced
if(valuesChanged) {
// Set changed flag for each frontal variable and leave the new values
for(Key frontal: clique->conditional()->frontals()) {
changed.insert(frontal);
}
} else {
// Replace with the old values
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
delta[*it] = originalValues[it - clique->conditional()->beginFrontals()];
}
}
// Recurse to children
for(const typename CLIQUE::shared_ptr& child: clique->children) {
optimizeWildfire(child, threshold, changed, replaced, delta, count);
}
}
}
template<class CLIQUE>
bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double threshold,
KeySet& changed, const KeySet& replaced, VectorValues& delta, size_t& count)
{
// if none of the variables in this clique (frontal and separator!) changed
// significantly, then by the running intersection property, none of the
// cliques in the children need to be processed
// Are any clique variables part of the tree that has been redone?
bool cliqueReplaced = replaced.exists(clique->conditional()->frontals().front());
#if !defined(NDEBUG) && defined(GTSAM_EXTRA_CONSISTENCY_CHECKS)
for(Key frontal: clique->conditional()->frontals()) {
assert(cliqueReplaced == replaced.exists(frontal));
}
#endif
// If not redone, then has one of the separator variables changed significantly?
bool recalculate = cliqueReplaced;
if(!recalculate) {
for(Key parent: clique->conditional()->parents()) {
if(changed.exists(parent)) {
recalculate = true;
break;
}
}
}
// Solve clique if it was replaced, or if any parents were changed above the
// threshold or themselves replaced.
// TODO(gareth): This code shares a lot of logic w/ linearAlgorithms-inst, potentially refactor
if(recalculate)
{
// Temporary copy of the original values, to check how much they change
FastVector<Vector> originalValues(clique->conditional()->nrFrontals());
GaussianConditional::const_iterator it;
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
originalValues[it - clique->conditional()->beginFrontals()] = delta[*it];
}
// Back-substitute - special version stores solution pointers in cliques for fast access.
{
// Create solution part pointers if necessary and possible - necessary if solnPointers_ is
// empty, and possible if either we're a root, or we have a parent with valid solnPointers_.
boost::shared_ptr<CLIQUE> parent = clique->parent_.lock();
if(clique->solnPointers_.empty() && (clique->isRoot() || !parent->solnPointers_.empty()))
{
for(Key key: clique->conditional()->frontals())
clique->solnPointers_.insert(std::make_pair(key, delta.find(key)));
for(Key key: clique->conditional()->parents())
clique->solnPointers_.insert(std::make_pair(key, parent->solnPointers_.at(key)));
}
// See if we can use solution part pointers - we can if they either already existed or were
// created above.
if(!clique->solnPointers_.empty())
{
GaussianConditional& c = *clique->conditional();
// Solve matrix
Vector xS;
{
// Count dimensions of vector
DenseIndex dim = 0;
FastVector<VectorValues::const_iterator> parentPointers;
parentPointers.reserve(clique->conditional()->nrParents());
for(Key parent: clique->conditional()->parents()) {
parentPointers.push_back(clique->solnPointers_.at(parent));
dim += parentPointers.back()->second.size();
}
// Fill parent vector
xS.resize(dim);
DenseIndex vectorPos = 0;
for(const VectorValues::const_iterator& parentPointer: parentPointers) {
const Vector& parentVector = parentPointer->second;
xS.block(vectorPos,0,parentVector.size(),1) = parentVector.block(0,0,parentVector.size(),1);
vectorPos += parentVector.size();
}
}
// NOTE(gareth): We can no longer write: xS = b - S * xS
// This is because Eigen (as of 3.3) no longer evaluates S * xS into
// a temporary, and the operation trashes valus in xS.
// See: http://eigen.tuxfamily.org/index.php?title=3.3
const Vector rhs = c.getb() - c.get_S() * xS;
const Vector solution = c.get_R().triangularView<Eigen::Upper>().solve(rhs);
// Check for indeterminant solution
if(solution.hasNaN()) throw IndeterminantLinearSystemException(c.keys().front());
// Insert solution into a VectorValues
DenseIndex vectorPosition = 0;
for(GaussianConditional::const_iterator frontal = c.beginFrontals(); frontal != c.endFrontals(); ++frontal) {
clique->solnPointers_.at(*frontal)->second = solution.segment(vectorPosition, c.getDim(frontal));
vectorPosition += c.getDim(frontal);
}
}
else
{
// Just call plain solve because we couldn't use solution pointers.
delta.update(clique->conditional()->solve(delta));
}
}
count += clique->conditional()->nrFrontals();
// Whether the values changed above a threshold, or always true if the
// clique was replaced.
bool valuesChanged = cliqueReplaced;
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
if(!valuesChanged) {
const Vector& oldValue(originalValues[it - clique->conditional()->beginFrontals()]);
const Vector& newValue(delta[*it]);
if((oldValue - newValue).lpNorm<Eigen::Infinity>() >= threshold) {
valuesChanged = true;
break;
}
} else
break;
}
// If the values were above the threshold or this clique was replaced
if(valuesChanged) {
// Set changed flag for each frontal variable and leave the new values
for(Key frontal: clique->conditional()->frontals()) {
changed.insert(frontal);
}
} else {
// Replace with the old values
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
delta[*it] = originalValues[it - clique->conditional()->beginFrontals()];
}
}
}
return recalculate;
}
} // namespace internal
/* ************************************************************************* */
template<class CLIQUE>
size_t optimizeWildfire(const boost::shared_ptr<CLIQUE>& root, double threshold, const KeySet& keys, VectorValues& delta) {
KeySet changed;
int count = 0;
// starting from the root, call optimize on each conditional
if(root)
internal::optimizeWildfire(root, threshold, changed, keys, delta, count);
return count;
}
/* ************************************************************************* */
template<class CLIQUE>
size_t optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root, double threshold, const KeySet& keys, VectorValues& delta)
{
KeySet changed;
size_t count = 0;
if (root) {
std::stack<boost::shared_ptr<CLIQUE> > travStack;
travStack.push(root);
boost::shared_ptr<CLIQUE> currentNode = root;
while (!travStack.empty()) {
currentNode = travStack.top();
travStack.pop();
bool recalculate = internal::optimizeWildfireNode(currentNode, threshold, changed, keys, delta, count);
if (recalculate) {
for(const typename CLIQUE::shared_ptr& child: currentNode->children) {
travStack.push(child);
}
}
}
}
return count;
}
/* ************************************************************************* */
template<class CLIQUE>
void nnz_internal(const boost::shared_ptr<CLIQUE>& clique, int& result) {
int dimR = (int)clique->conditional()->rows();
int dimSep = (int)clique->conditional()->get_S().cols();
result += ((dimR+1)*dimR)/2 + dimSep*dimR;
// traverse the children
for(const typename CLIQUE::shared_ptr& child: clique->children) {
nnz_internal(child, result);
}
}
/* ************************************************************************* */
template<class CLIQUE>
int calculate_nnz(const boost::shared_ptr<CLIQUE>& clique) {
int result = 0;
// starting from the root, add up entries of frontal and conditional matrices of each conditional
nnz_internal(clique, result);
return result;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -11,438 +11,47 @@
/**
* @file ISAM2.h
* @brief Incremental update functionality (ISAM2) for BayesTree, with fluid relinearization.
* @author Michael Kaess, Richard Roberts
* @brief Incremental update functionality (ISAM2) for BayesTree, with fluid
* relinearization.
* @author Michael Kaess, Richard Roberts, Frank Dellaert
*/
// \callgraph
#pragma once
#include <gtsam/nonlinear/ISAM2Params.h>
#include <gtsam/nonlinear/ISAM2Result.h>
#include <gtsam/nonlinear/ISAM2Clique.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/DoglegOptimizerImpl.h>
#include <gtsam/linear/GaussianBayesTree.h>
#include <boost/variant.hpp>
#include <vector>
namespace gtsam {
/**
* @addtogroup ISAM2
* Parameters for ISAM2 using Gauss-Newton optimization. Either this class or
* ISAM2DoglegParams should be specified as the optimizationParams in
* ISAM2Params, which should in turn be passed to ISAM2(const ISAM2Params&).
*/
struct GTSAM_EXPORT ISAM2GaussNewtonParams {
double wildfireThreshold; ///< Continue updating the linear delta only when changes are above this threshold (default: 0.001)
/** Specify parameters as constructor arguments */
ISAM2GaussNewtonParams(
double _wildfireThreshold = 0.001 ///< see ISAM2GaussNewtonParams public variables, ISAM2GaussNewtonParams::wildfireThreshold
) : wildfireThreshold(_wildfireThreshold) {}
void print(const std::string str = "") const {
std::cout << str << "type: ISAM2GaussNewtonParams\n";
std::cout << str << "wildfireThreshold: " << wildfireThreshold << "\n";
std::cout.flush();
}
double getWildfireThreshold() const { return wildfireThreshold; }
void setWildfireThreshold(double wildfireThreshold) { this->wildfireThreshold = wildfireThreshold; }
};
/**
* @addtogroup ISAM2
* Parameters for ISAM2 using Dogleg optimization. Either this class or
* ISAM2GaussNewtonParams should be specified as the optimizationParams in
* ISAM2Params, which should in turn be passed to ISAM2(const ISAM2Params&).
*/
struct GTSAM_EXPORT ISAM2DoglegParams {
double initialDelta; ///< The initial trust region radius for Dogleg
double wildfireThreshold; ///< Continue updating the linear delta only when changes are above this threshold (default: 1e-5)
DoglegOptimizerImpl::TrustRegionAdaptationMode adaptationMode; ///< See description in DoglegOptimizerImpl::TrustRegionAdaptationMode
bool verbose; ///< Whether Dogleg prints iteration and convergence information
/** Specify parameters as constructor arguments */
ISAM2DoglegParams(
double _initialDelta = 1.0, ///< see ISAM2DoglegParams::initialDelta
double _wildfireThreshold = 1e-5, ///< see ISAM2DoglegParams::wildfireThreshold
DoglegOptimizerImpl::TrustRegionAdaptationMode _adaptationMode = DoglegOptimizerImpl::SEARCH_EACH_ITERATION, ///< see ISAM2DoglegParams::adaptationMode
bool _verbose = false ///< see ISAM2DoglegParams::verbose
) : initialDelta(_initialDelta), wildfireThreshold(_wildfireThreshold),
adaptationMode(_adaptationMode), verbose(_verbose) {}
void print(const std::string str = "") const {
std::cout << str << "type: ISAM2DoglegParams\n";
std::cout << str << "initialDelta: " << initialDelta << "\n";
std::cout << str << "wildfireThreshold: " << wildfireThreshold << "\n";
std::cout << str << "adaptationMode: " << adaptationModeTranslator(adaptationMode) << "\n";
std::cout.flush();
}
double getInitialDelta() const { return initialDelta; }
double getWildfireThreshold() const { return wildfireThreshold; }
std::string getAdaptationMode() const { return adaptationModeTranslator(adaptationMode); };
bool isVerbose() const { return verbose; };
void setInitialDelta(double initialDelta) { this->initialDelta = initialDelta; }
void setWildfireThreshold(double wildfireThreshold) { this->wildfireThreshold = wildfireThreshold; }
void setAdaptationMode(const std::string& adaptationMode) { this->adaptationMode = adaptationModeTranslator(adaptationMode); }
void setVerbose(bool verbose) { this->verbose = verbose; };
std::string adaptationModeTranslator(const DoglegOptimizerImpl::TrustRegionAdaptationMode& adaptationMode) const;
DoglegOptimizerImpl::TrustRegionAdaptationMode adaptationModeTranslator(const std::string& adaptationMode) const;
};
/**
* @addtogroup ISAM2
* Parameters for the ISAM2 algorithm. Default parameter values are listed below.
*/
typedef FastMap<char,Vector> ISAM2ThresholdMap;
typedef ISAM2ThresholdMap::value_type ISAM2ThresholdMapValue;
struct GTSAM_EXPORT ISAM2Params {
typedef boost::variant<ISAM2GaussNewtonParams, ISAM2DoglegParams> OptimizationParams; ///< Either ISAM2GaussNewtonParams or ISAM2DoglegParams
typedef boost::variant<double, FastMap<char,Vector> > RelinearizationThreshold; ///< Either a constant relinearization threshold or a per-variable-type set of thresholds
/** Optimization parameters, this both selects the nonlinear optimization
* method and specifies its parameters, either ISAM2GaussNewtonParams or
* ISAM2DoglegParams. In the former, Gauss-Newton optimization will be used
* with the specified parameters, and in the latter Powell's dog-leg
* algorithm will be used with the specified parameters.
*/
OptimizationParams optimizationParams;
/** Only relinearize variables whose linear delta magnitude is greater than
* this threshold (default: 0.1). If this is a FastMap<char,Vector> instead
* of a double, then the threshold is specified for each dimension of each
* variable type. This parameter then maps from a character indicating the
* variable type to a Vector of thresholds for each dimension of that
* variable. For example, if Pose keys are of type TypedSymbol<'x',Pose3>,
* and landmark keys are of type TypedSymbol<'l',Point3>, then appropriate
* entries would be added with:
* \code
FastMap<char,Vector> thresholds;
thresholds['x'] = (Vector(6) << 0.1, 0.1, 0.1, 0.5, 0.5, 0.5).finished(); // 0.1 rad rotation threshold, 0.5 m translation threshold
thresholds['l'] = Vector3(1.0, 1.0, 1.0); // 1.0 m landmark position threshold
params.relinearizeThreshold = thresholds;
\endcode
*/
RelinearizationThreshold relinearizeThreshold;
int relinearizeSkip; ///< Only relinearize any variables every relinearizeSkip calls to ISAM2::update (default: 10)
bool enableRelinearization; ///< Controls whether ISAM2 will ever relinearize any variables (default: true)
bool evaluateNonlinearError; ///< Whether to evaluate the nonlinear error before and after the update, to return in ISAM2Result from update()
enum Factorization { CHOLESKY, QR };
/** Specifies whether to use QR or CHOESKY numerical factorization (default: CHOLESKY).
* Cholesky is faster but potentially numerically unstable for poorly-conditioned problems, which can occur when
* uncertainty is very low in some variables (or dimensions of variables) and very high in others. QR is
* slower but more numerically stable in poorly-conditioned problems. We suggest using the default of Cholesky
* unless gtsam sometimes throws IndefiniteLinearSystemException when your problem's Hessian is actually positive
* definite. For positive definite problems, numerical error accumulation can cause the problem to become
* numerically negative or indefinite as solving proceeds, especially when using Cholesky.
*/
Factorization factorization;
/** Whether to cache linear factors (default: true).
* This can improve performance if linearization is expensive, but can hurt
* performance if linearization is very cleap due to computation to look up
* additional keys.
*/
bool cacheLinearizedFactors;
KeyFormatter keyFormatter; ///< A KeyFormatter for when keys are printed during debugging (default: DefaultKeyFormatter)
bool enableDetailedResults; ///< Whether to compute and return ISAM2Result::detailedResults, this can increase running time (default: false)
/** Check variables for relinearization in tree-order, stopping the check once a variable does not need to be relinearized (default: false).
* This can improve speed by only checking a small part of the top of the tree. However, variables below the check cut-off can accumulate
* significant deltas without triggering relinearization. This is particularly useful in exploration scenarios where real-time performance
* is desired over correctness. Use with caution.
*/
bool enablePartialRelinearizationCheck;
/// When you will be removing many factors, e.g. when using ISAM2 as a fixed-lag smoother, enable this option to
/// add factors in the first available factor slots, to avoid accumulating NULL factor slots, at the cost of
/// having to search for slots every time a factor is added.
bool findUnusedFactorSlots;
/** Specify parameters as constructor arguments */
ISAM2Params(
OptimizationParams _optimizationParams = ISAM2GaussNewtonParams(), ///< see ISAM2Params::optimizationParams
RelinearizationThreshold _relinearizeThreshold = 0.1, ///< see ISAM2Params::relinearizeThreshold
int _relinearizeSkip = 10, ///< see ISAM2Params::relinearizeSkip
bool _enableRelinearization = true, ///< see ISAM2Params::enableRelinearization
bool _evaluateNonlinearError = false, ///< see ISAM2Params::evaluateNonlinearError
Factorization _factorization = ISAM2Params::CHOLESKY, ///< see ISAM2Params::factorization
bool _cacheLinearizedFactors = true, ///< see ISAM2Params::cacheLinearizedFactors
const KeyFormatter& _keyFormatter = DefaultKeyFormatter ///< see ISAM2::Params::keyFormatter
) : optimizationParams(_optimizationParams), relinearizeThreshold(_relinearizeThreshold),
relinearizeSkip(_relinearizeSkip), enableRelinearization(_enableRelinearization),
evaluateNonlinearError(_evaluateNonlinearError), factorization(_factorization),
cacheLinearizedFactors(_cacheLinearizedFactors), keyFormatter(_keyFormatter),
enableDetailedResults(false), enablePartialRelinearizationCheck(false),
findUnusedFactorSlots(false) {}
/// print iSAM2 parameters
void print(const std::string& str = "") const {
std::cout << str << "\n";
if(optimizationParams.type() == typeid(ISAM2GaussNewtonParams))
boost::get<ISAM2GaussNewtonParams>(optimizationParams).print("optimizationParams: ");
else if(optimizationParams.type() == typeid(ISAM2DoglegParams))
boost::get<ISAM2DoglegParams>(optimizationParams).print("optimizationParams: ");
else
std::cout << "optimizationParams: " << "{unknown type}" << "\n";
if(relinearizeThreshold.type() == typeid(double))
std::cout << "relinearizeThreshold: " << boost::get<double>(relinearizeThreshold) << "\n";
else
{
std::cout << "relinearizeThreshold: " << "{mapped}" << "\n";
for(const ISAM2ThresholdMapValue& value: boost::get<ISAM2ThresholdMap>(relinearizeThreshold)) {
std::cout << " '" << value.first << "' -> [" << value.second.transpose() << " ]\n";
}
}
std::cout << "relinearizeSkip: " << relinearizeSkip << "\n";
std::cout << "enableRelinearization: " << enableRelinearization << "\n";
std::cout << "evaluateNonlinearError: " << evaluateNonlinearError << "\n";
std::cout << "factorization: " << factorizationTranslator(factorization) << "\n";
std::cout << "cacheLinearizedFactors: " << cacheLinearizedFactors << "\n";
std::cout << "enableDetailedResults: " << enableDetailedResults << "\n";
std::cout << "enablePartialRelinearizationCheck: " << enablePartialRelinearizationCheck << "\n";
std::cout << "findUnusedFactorSlots: " << findUnusedFactorSlots << "\n";
std::cout.flush();
}
/// @name Getters and Setters for all properties
/// @{
OptimizationParams getOptimizationParams() const { return this->optimizationParams; }
RelinearizationThreshold getRelinearizeThreshold() const { return relinearizeThreshold; }
int getRelinearizeSkip() const { return relinearizeSkip; }
bool isEnableRelinearization() const { return enableRelinearization; }
bool isEvaluateNonlinearError() const { return evaluateNonlinearError; }
std::string getFactorization() const { return factorizationTranslator(factorization); }
bool isCacheLinearizedFactors() const { return cacheLinearizedFactors; }
KeyFormatter getKeyFormatter() const { return keyFormatter; }
bool isEnableDetailedResults() const { return enableDetailedResults; }
bool isEnablePartialRelinearizationCheck() const { return enablePartialRelinearizationCheck; }
void setOptimizationParams(OptimizationParams optimizationParams) { this->optimizationParams = optimizationParams; }
void setRelinearizeThreshold(RelinearizationThreshold relinearizeThreshold) { this->relinearizeThreshold = relinearizeThreshold; }
void setRelinearizeSkip(int relinearizeSkip) { this->relinearizeSkip = relinearizeSkip; }
void setEnableRelinearization(bool enableRelinearization) { this->enableRelinearization = enableRelinearization; }
void setEvaluateNonlinearError(bool evaluateNonlinearError) { this->evaluateNonlinearError = evaluateNonlinearError; }
void setFactorization(const std::string& factorization) { this->factorization = factorizationTranslator(factorization); }
void setCacheLinearizedFactors(bool cacheLinearizedFactors) { this->cacheLinearizedFactors = cacheLinearizedFactors; }
void setKeyFormatter(KeyFormatter keyFormatter) { this->keyFormatter = keyFormatter; }
void setEnableDetailedResults(bool enableDetailedResults) { this->enableDetailedResults = enableDetailedResults; }
void setEnablePartialRelinearizationCheck(bool enablePartialRelinearizationCheck) { this->enablePartialRelinearizationCheck = enablePartialRelinearizationCheck; }
GaussianFactorGraph::Eliminate getEliminationFunction() const {
return factorization == CHOLESKY
? (GaussianFactorGraph::Eliminate)EliminatePreferCholesky
: (GaussianFactorGraph::Eliminate)EliminateQR;
}
/// @}
/// @name Some utilities
/// @{
static Factorization factorizationTranslator(const std::string& str);
static std::string factorizationTranslator(const Factorization& value);
/// @}
};
typedef FastVector<size_t> FactorIndices;
/**
* @addtogroup ISAM2
* This struct is returned from ISAM2::update() and contains information about
* the update that is useful for determining whether the solution is
* converging, and about how much work was required for the update. See member
* variables for details and information about each entry.
*/
struct GTSAM_EXPORT ISAM2Result {
/** The nonlinear error of all of the factors, \a including new factors and
* variables added during the current call to ISAM2::update(). This error is
* calculated using the following variable values:
* \li Pre-existing variables will be evaluated by combining their
* linearization point before this call to update, with their partial linear
* delta, as computed by ISAM2::calculateEstimate().
* \li New variables will be evaluated at their initialization points passed
* into the current call to update.
* \par Note: This will only be computed if ISAM2Params::evaluateNonlinearError
* is set to \c true, because there is some cost to this computation.
*/
boost::optional<double> errorBefore;
/** The nonlinear error of all of the factors computed after the current
* update, meaning that variables above the relinearization threshold
* (ISAM2Params::relinearizeThreshold) have been relinearized and new
* variables have undergone one linear update. Variable values are
* again computed by combining their linearization points with their
* partial linear deltas, by ISAM2::calculateEstimate().
* \par Note: This will only be computed if ISAM2Params::evaluateNonlinearError
* is set to \c true, because there is some cost to this computation.
*/
boost::optional<double> errorAfter;
/** The number of variables that were relinearized because their linear
* deltas exceeded the reslinearization threshold
* (ISAM2Params::relinearizeThreshold), combined with any additional
* variables that had to be relinearized because they were involved in
* the same factor as a variable above the relinearization threshold.
* On steps where no relinearization is considered
* (see ISAM2Params::relinearizeSkip), this count will be zero.
*/
size_t variablesRelinearized;
/** The number of variables that were reeliminated as parts of the Bayes'
* Tree were recalculated, due to new factors. When loop closures occur,
* this count will be large as the new loop-closing factors will tend to
* involve variables far away from the root, and everything up to the root
* will be reeliminated.
*/
size_t variablesReeliminated;
/** The number of factors that were included in reelimination of the Bayes' tree. */
size_t factorsRecalculated;
/** The number of cliques in the Bayes' Tree */
size_t cliques;
/** The indices of the newly-added factors, in 1-to-1 correspondence with the
* factors passed as \c newFactors to ISAM2::update(). These indices may be
* used later to refer to the factors in order to remove them.
*/
FactorIndices newFactorsIndices;
/** A struct holding detailed results, which must be enabled with
* ISAM2Params::enableDetailedResults.
*/
struct DetailedResults {
/** The status of a single variable, this struct is stored in
* DetailedResults::variableStatus */
struct VariableStatus {
/** Whether the variable was just reeliminated, due to being relinearized,
* observed, new, or on the path up to the root clique from another
* reeliminated variable. */
bool isReeliminated;
bool isAboveRelinThreshold; ///< Whether the variable was just relinearized due to being above the relinearization threshold
bool isRelinearizeInvolved; ///< Whether the variable was below the relinearization threshold but was relinearized by being involved in a factor with a variable above the relinearization threshold
bool isRelinearized; /// Whether the variable was relinearized, either by being above the relinearization threshold or by involvement.
bool isObserved; ///< Whether the variable was just involved in new factors
bool isNew; ///< Whether the variable itself was just added
bool inRootClique; ///< Whether the variable is in the root clique
VariableStatus(): isReeliminated(false), isAboveRelinThreshold(false), isRelinearizeInvolved(false),
isRelinearized(false), isObserved(false), isNew(false), inRootClique(false) {}
};
/** The status of each variable during this update, see VariableStatus.
*/
FastMap<Key, VariableStatus> variableStatus;
};
/** Detailed results, if enabled by ISAM2Params::enableDetailedResults. See
* Detail for information about the results data stored here. */
boost::optional<DetailedResults> detail;
void print(const std::string str = "") const {
std::cout << str << " Reelimintated: " << variablesReeliminated << " Relinearized: " << variablesRelinearized << " Cliques: " << cliques << std::endl;
}
/** Getters and Setters */
size_t getVariablesRelinearized() const { return variablesRelinearized; };
size_t getVariablesReeliminated() const { return variablesReeliminated; };
size_t getCliques() const { return cliques; };
};
/**
* Specialized Clique structure for ISAM2, incorporating caching and gradient contribution
* TODO: more documentation
*/
class GTSAM_EXPORT ISAM2Clique : public BayesTreeCliqueBase<ISAM2Clique, GaussianFactorGraph>
{
public:
typedef ISAM2Clique This;
typedef BayesTreeCliqueBase<This, GaussianFactorGraph> Base;
typedef boost::shared_ptr<This> shared_ptr;
typedef boost::weak_ptr<This> weak_ptr;
typedef GaussianConditional ConditionalType;
typedef ConditionalType::shared_ptr sharedConditional;
Base::FactorType::shared_ptr cachedFactor_;
Vector gradientContribution_;
FastMap<Key, VectorValues::iterator> solnPointers_;
/// Default constructor
ISAM2Clique() : Base() {}
/// Copy constructor, does *not* copy solution pointers as these are invalid in different trees.
ISAM2Clique(const ISAM2Clique& other) :
Base(other), cachedFactor_(other.cachedFactor_), gradientContribution_(other.gradientContribution_) {}
/// Assignment operator, does *not* copy solution pointers as these are invalid in different trees.
ISAM2Clique& operator=(const ISAM2Clique& other)
{
Base::operator=(other);
cachedFactor_ = other.cachedFactor_;
gradientContribution_ = other.gradientContribution_;
return *this;
}
/// Overridden to also store the remaining factor and gradient contribution
void setEliminationResult(const FactorGraphType::EliminationResult& eliminationResult);
/** Access the cached factor */
Base::FactorType::shared_ptr& cachedFactor() { return cachedFactor_; }
/** Access the gradient contribution */
const Vector& gradientContribution() const { return gradientContribution_; }
bool equals(const This& other, double tol=1e-9) const;
/** print this node */
void print(const std::string& s = "", const KeyFormatter& formatter = DefaultKeyFormatter) const;
private:
/** Serialization function */
friend class boost::serialization::access;
template<class ARCHIVE>
void serialize(ARCHIVE & ar, const unsigned int /*version*/) {
ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base);
ar & BOOST_SERIALIZATION_NVP(cachedFactor_);
ar & BOOST_SERIALIZATION_NVP(gradientContribution_);
}
}; // \struct ISAM2Clique
/**
* @addtogroup ISAM2
* Implementation of the full ISAM2 algorithm for incremental nonlinear optimization.
* Implementation of the full ISAM2 algorithm for incremental nonlinear
* optimization.
*
* The typical cycle of using this class to create an instance by providing ISAM2Params
* to the constructor, then add measurements and variables as they arrive using the update()
* method. At any time, calculateEstimate() may be called to obtain the current
* estimate of all variables.
* The typical cycle of using this class to create an instance by providing
* ISAM2Params to the constructor, then add measurements and variables as they
* arrive using the update() method. At any time, calculateEstimate() may be
* called to obtain the current estimate of all variables.
*
*/
class GTSAM_EXPORT ISAM2: public BayesTree<ISAM2Clique> {
protected:
class GTSAM_EXPORT ISAM2 : public BayesTree<ISAM2Clique> {
protected:
/** The current linearization point */
Values theta_;
/** VariableIndex lets us look up factors by involved variable and keeps track of dimensions */
/** VariableIndex lets us look up factors by involved variable and keeps track
* of dimensions */
VariableIndex variableIndex_;
/** The linear delta from the last linear solution, an update to the estimate in theta
/** The linear delta from the last linear solution, an update to the estimate
* in theta
*
* This is \c mutable because it is a "cached" variable - it is not updated
* until either requested with getDelta() or calculateEstimate(), or needed
@ -450,8 +59,10 @@ protected:
*/
mutable VectorValues delta_;
mutable VectorValues deltaNewton_; // Only used when using Dogleg - stores the Gauss-Newton update
mutable VectorValues RgProd_; // Only used when using Dogleg - stores R*g and is updated incrementally
mutable VectorValues deltaNewton_; // Only used when using Dogleg - stores
// the Gauss-Newton update
mutable VectorValues RgProd_; // Only used when using Dogleg - stores R*g and
// is updated incrementally
/** A cumulative mask for the variables that were replaced and have not yet
* been updated in the linear solution delta_, this is only used internally,
@ -461,9 +72,11 @@ protected:
* This is \c mutable because it is used internally to not update delta_
* until it is needed.
*/
mutable KeySet deltaReplacedMask_; // TODO: Make sure accessed in the right way
mutable KeySet
deltaReplacedMask_; // TODO(dellaert): Make sure accessed in the right way
/** All original nonlinear factors are stored here to use during relinearization */
/** All original nonlinear factors are stored here to use during
* relinearization */
NonlinearFactorGraph nonlinearFactors_;
/** The current linear factors, which are only updated as needed */
@ -479,20 +92,21 @@ protected:
* variables and thus cannot have their linearization points changed. */
KeySet fixedVariables_;
int update_count_; ///< Counter incremented every update(), used to determine periodic relinearization
int update_count_; ///< Counter incremented every update(), used to determine
///< periodic relinearization
public:
typedef ISAM2 This; ///< This class
typedef BayesTree<ISAM2Clique> Base; ///< The BayesTree base class
typedef Base::Clique Clique; ///< A clique
typedef Base::sharedClique sharedClique; ///< Shared pointer to a clique
typedef Base::Cliques Cliques; ///< List of Clique typedef from base class
public:
typedef ISAM2 This; ///< This class
typedef BayesTree<ISAM2Clique> Base; ///< The BayesTree base class
typedef Base::Clique Clique; ///< A clique
typedef Base::sharedClique sharedClique; ///< Shared pointer to a clique
typedef Base::Cliques Cliques; ///< List of Clique typedef from base class
/** Create an empty ISAM2 instance */
ISAM2(const ISAM2Params& params);
explicit ISAM2(const ISAM2Params& params);
/** Create an empty ISAM2 instance using the default set of parameters (see ISAM2Params) */
/** Create an empty ISAM2 instance using the default set of parameters (see
* ISAM2Params) */
ISAM2();
/** default virtual destructor */
@ -513,26 +127,31 @@ public:
* thresholds.
*
* @param newFactors The new factors to be added to the system
* @param newTheta Initialization points for new variables to be added to the system.
* You must include here all new variables occuring in newFactors (which were not already
* in the system). There must not be any variables here that do not occur in newFactors,
* and additionally, variables that were already in the system must not be included here.
* @param newTheta Initialization points for new variables to be added to the
* system. You must include here all new variables occuring in newFactors
* (which were not already in the system). There must not be any variables
* here that do not occur in newFactors, and additionally, variables that were
* already in the system must not be included here.
* @param removeFactorIndices Indices of factors to remove from system
* @param force_relinearize Relinearize any variables whose delta magnitude is sufficiently
* large (Params::relinearizeThreshold), regardless of the relinearization interval
* (Params::relinearizeSkip).
* @param constrainedKeys is an optional map of keys to group labels, such that a variable can
* be constrained to a particular grouping in the BayesTree
* @param noRelinKeys is an optional set of nonlinear keys that iSAM2 will hold at a constant linearization
* point, regardless of the size of the linear delta
* @param extraReelimKeys is an optional set of nonlinear keys that iSAM2 will re-eliminate, regardless
* of the size of the linear delta. This allows the provided keys to be reordered.
* @param force_relinearize Relinearize any variables whose delta magnitude is
* sufficiently large (Params::relinearizeThreshold), regardless of the
* relinearization interval (Params::relinearizeSkip).
* @param constrainedKeys is an optional map of keys to group labels, such
* that a variable can be constrained to a particular grouping in the
* BayesTree
* @param noRelinKeys is an optional set of nonlinear keys that iSAM2 will
* hold at a constant linearization point, regardless of the size of the
* linear delta
* @param extraReelimKeys is an optional set of nonlinear keys that iSAM2 will
* re-eliminate, regardless of the size of the linear delta. This allows the
* provided keys to be reordered.
* @return An ISAM2Result struct containing information about the update
*/
virtual ISAM2Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(),
virtual ISAM2Result update(
const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(),
const Values& newTheta = Values(),
const FactorIndices& removeFactorIndices = FactorIndices(),
const boost::optional<FastMap<Key,int> >& constrainedKeys = boost::none,
const boost::optional<FastMap<Key, int> >& constrainedKeys = boost::none,
const boost::optional<FastList<Key> >& noRelinKeys = boost::none,
const boost::optional<FastList<Key> >& extraReelimKeys = boost::none,
bool force_relinearize = false);
@ -542,48 +161,54 @@ public:
* requested to be marginalized. Marginalization leaves a linear
* approximation of the marginal in the system, and the linearization points
* of any variables involved in this linear marginal become fixed. The set
* fixed variables will include any key involved with the marginalized variables
* in the original factors, and possibly additional ones due to fill-in.
* fixed variables will include any key involved with the marginalized
* variables in the original factors, and possibly additional ones due to
* fill-in.
*
* If provided, 'marginalFactorsIndices' will be augmented with the factor graph
* indices of the marginal factors added during the 'marginalizeLeaves' call
* If provided, 'marginalFactorsIndices' will be augmented with the factor
* graph indices of the marginal factors added during the 'marginalizeLeaves'
* call
*
* If provided, 'deletedFactorsIndices' will be augmented with the factor graph
* indices of any factor that was removed during the 'marginalizeLeaves' call
* If provided, 'deletedFactorsIndices' will be augmented with the factor
* graph indices of any factor that was removed during the 'marginalizeLeaves'
* call
*/
void marginalizeLeaves(const FastList<Key>& leafKeys,
boost::optional<FactorIndices&> marginalFactorsIndices = boost::none,
boost::optional<FactorIndices&> deletedFactorsIndices = boost::none);
void marginalizeLeaves(
const FastList<Key>& leafKeys,
boost::optional<FactorIndices&> marginalFactorsIndices = boost::none,
boost::optional<FactorIndices&> deletedFactorsIndices = boost::none);
/// Access the current linearization point
const Values& getLinearizationPoint() const {
return theta_;
}
const Values& getLinearizationPoint() const { return theta_; }
/// Check whether variable with given key exists in linearization point
bool valueExists(Key key) const {
return theta_.exists(key);
}
bool valueExists(Key key) const { return theta_.exists(key); }
/** Compute an estimate from the incomplete linear delta computed during the last update.
* This delta is incomplete because it was not updated below wildfire_threshold. If only
* a single variable is needed, it is faster to call calculateEstimate(const KEY&).
/** Compute an estimate from the incomplete linear delta computed during the
* last update. This delta is incomplete because it was not updated below
* wildfire_threshold. If only a single variable is needed, it is faster to
* call calculateEstimate(const KEY&).
*/
Values calculateEstimate() const;
/** Compute an estimate for a single variable using its incomplete linear delta computed
* during the last update. This is faster than calling the no-argument version of
* calculateEstimate, which operates on all variables.
/** Compute an estimate for a single variable using its incomplete linear
* delta computed during the last update. This is faster than calling the
* no-argument version of calculateEstimate, which operates on all variables.
* @param key
* @return
*/
template<class VALUE>
VALUE calculateEstimate(Key key) const;
template <class VALUE>
VALUE calculateEstimate(Key key) const {
const Vector& delta = getDelta()[key];
return traits<VALUE>::Retract(theta_.at<VALUE>(key), delta);
}
/** Compute an estimate for a single variable using its incomplete linear delta computed
* during the last update. This is faster than calling the no-argument version of
* calculateEstimate, which operates on all variables. This is a non-templated version
* that returns a Value base class for use with the MATLAB wrapper.
/** Compute an estimate for a single variable using its incomplete linear
* delta computed during the last update. This is faster than calling the
* no-argument version of calculateEstimate, which operates on all variables.
* This is a non-templated version that returns a Value base class for use
* with the MATLAB wrapper.
* @param key
* @return
*/
@ -598,7 +223,8 @@ public:
/** Internal implementation functions */
struct Impl;
/** Compute an estimate using a complete delta computed by a full back-substitution.
/** Compute an estimate using a complete delta computed by a full
* back-substitution.
*/
Values calculateBestEstimate() const;
@ -609,7 +235,9 @@ public:
double error(const VectorValues& x) const;
/** Access the set of nonlinear factors */
const NonlinearFactorGraph& getFactorsUnsafe() const { return nonlinearFactors_; }
const NonlinearFactorGraph& getFactorsUnsafe() const {
return nonlinearFactors_;
}
/** Access the nonlinear variable index */
const VariableIndex& getVariableIndex() const { return variableIndex_; }
@ -628,56 +256,60 @@ public:
/** prints out clique statistics */
void printStats() const { getCliqueData().getStats().print(); }
/** Compute the gradient of the energy function, \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} R x - d
* \right\Vert^2 \f$, centered around zero. The gradient about zero is \f$ -R^T d \f$. See also
* gradient(const GaussianBayesNet&, const VectorValues&).
/** Compute the gradient of the energy function, \f$ \nabla_{x=0} \left\Vert
* \Sigma^{-1} R x - d \right\Vert^2 \f$, centered around zero. The gradient
* about zero is \f$ -R^T d \f$. See also gradient(const GaussianBayesNet&,
* const VectorValues&).
*
* @return A VectorValues storing the gradient.
*/
VectorValues gradientAtZero() const;
/// @}
protected:
protected:
/**
* Add new variables to the ISAM2 system.
* @param newTheta Initial values for new variables
* @param theta Current solution to be augmented with new initialization
* @param delta Current linear delta to be augmented with zeros
* @param deltaNewton
* @param RgProd
* @param keyFormatter Formatter for printing nonlinear keys during debugging
*/
void addVariables(const Values& newTheta);
/**
* Remove variables from the ISAM2 system.
*/
void removeVariables(const KeySet& unusedKeys);
/**
* Apply expmap to the given values, but only for indices appearing in
* \c mask. Values are expmapped in-place.
* \param mask Mask on linear indices, only \c true entries are expmapped
*/
void expmapMasked(const KeySet& mask);
FastSet<Key> getAffectedFactors(const FastList<Key>& keys) const;
GaussianFactorGraph::shared_ptr relinearizeAffectedFactors(const FastList<Key>& affectedKeys, const KeySet& relinKeys) const;
GaussianFactorGraph getCachedBoundaryFactors(Cliques& orphans);
GaussianFactorGraph::shared_ptr relinearizeAffectedFactors(
const FastList<Key>& affectedKeys, const KeySet& relinKeys) const;
GaussianFactorGraph getCachedBoundaryFactors(const Cliques& orphans);
virtual boost::shared_ptr<KeySet> recalculate(
const KeySet& markedKeys, const KeySet& relinKeys,
const std::vector<Key>& observedKeys, const KeySet& unusedIndices,
const boost::optional<FastMap<Key, int> >& constrainKeys,
ISAM2Result* result);
virtual boost::shared_ptr<KeySet > recalculate(const KeySet& markedKeys, const KeySet& relinKeys,
const std::vector<Key>& observedKeys, const KeySet& unusedIndices, const boost::optional<FastMap<Key,int> >& constrainKeys, ISAM2Result& result);
void updateDelta(bool forceFullSolve = false) const;
}; // ISAM2
}; // ISAM2
/// traits
template<> struct traits<ISAM2> : public Testable<ISAM2> {};
template <>
struct traits<ISAM2> : public Testable<ISAM2> {};
/// Optimize the BayesTree, starting from the root.
/// @param replaced Needs to contain
/// all variables that are contained in the top of the Bayes tree that has been
/// redone.
/// @param delta The current solution, an offset from the linearization
/// point.
/// @param threshold The maximum change against the PREVIOUS delta for
/// non-replaced variables that can be ignored, ie. the old delta entry is kept
/// and recursive backsubstitution might eventually stop if none of the changed
/// variables are contained in the subtree.
/// @return The number of variables that were solved for
template<class CLIQUE>
size_t optimizeWildfire(const boost::shared_ptr<CLIQUE>& root,
double threshold, const KeySet& replaced, VectorValues& delta);
} // namespace gtsam
template<class CLIQUE>
size_t optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root,
double threshold, const KeySet& replaced, VectorValues& delta);
/// calculate the number of non-zero entries for the tree starting at clique (use root for complete matrix)
template<class CLIQUE>
int calculate_nnz(const boost::shared_ptr<CLIQUE>& clique);
} /// namespace gtsam
#include <gtsam/nonlinear/ISAM2-inl.h>
#include <gtsam/nonlinear/ISAM2-impl.h>

View File

@ -0,0 +1,325 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ISAM2Clique.cpp
* @brief Specialized iSAM2 Clique
* @author Michael Kaess, Richard Roberts, Frank Dellaert
*/
#include <gtsam/inference/BayesTreeCliqueBase-inst.h>
#include <gtsam/linear/VectorValues.h>
#include <gtsam/linear/linearAlgorithms-inst.h>
#include <gtsam/nonlinear/ISAM2Clique.h>
#include <stack>
using namespace std;
namespace gtsam {
// Instantiate base class
template class BayesTreeCliqueBase<ISAM2Clique, GaussianFactorGraph>;
/* ************************************************************************* */
void ISAM2Clique::setEliminationResult(
const FactorGraphType::EliminationResult& eliminationResult) {
conditional_ = eliminationResult.first;
cachedFactor_ = eliminationResult.second;
// Compute gradient contribution
gradientContribution_.resize(conditional_->cols() - 1);
// Rewrite -(R * P')'*d as -(d' * R * P')' for computational speed
// reasons
gradientContribution_ << -conditional_->get_R().transpose() *
conditional_->get_d(),
-conditional_->get_S().transpose() * conditional_->get_d();
}
/* ************************************************************************* */
bool ISAM2Clique::equals(const This& other, double tol) const {
return Base::equals(other) &&
((!cachedFactor_ && !other.cachedFactor_) ||
(cachedFactor_ && other.cachedFactor_ &&
cachedFactor_->equals(*other.cachedFactor_, tol)));
}
/* ************************************************************************* */
void ISAM2Clique::print(const string& s, const KeyFormatter& formatter) const {
Base::print(s, formatter);
if (cachedFactor_)
cachedFactor_->print(s + "Cached: ", formatter);
else
cout << s << "Cached empty" << endl;
if (gradientContribution_.rows() != 0)
gtsam::print(gradientContribution_, "Gradient contribution: ");
}
/* ************************************************************************* */
bool ISAM2Clique::isDirty(const KeySet& replaced, const KeySet& changed) const {
// if none of the variables in this clique (frontal and separator!) changed
// significantly, then by the running intersection property, none of the
// cliques in the children need to be processed
// Are any clique variables part of the tree that has been redone?
bool dirty = replaced.exists(conditional_->frontals().front());
#if !defined(NDEBUG) && defined(GTSAM_EXTRA_CONSISTENCY_CHECKS)
for (Key frontal : conditional_->frontals()) {
assert(dirty == replaced.exists(frontal));
}
#endif
// If not, then has one of the separator variables changed significantly?
if (!dirty) {
for (Key parent : conditional_->parents()) {
if (changed.exists(parent)) {
dirty = true;
break;
}
}
}
return dirty;
}
/* ************************************************************************* */
/**
* Back-substitute - special version stores solution pointers in cliques for
* fast access.
*/
void ISAM2Clique::fastBackSubstitute(VectorValues* delta) const {
#ifdef USE_BROKEN_FAST_BACKSUBSTITUTE
// TODO(gareth): This code shares a lot of logic w/ linearAlgorithms-inst,
// potentially refactor
// Create solution part pointers if necessary and possible - necessary if
// solnPointers_ is empty, and possible if either we're a root, or we have
// a parent with valid solnPointers_.
ISAM2Clique::shared_ptr parent = parent_.lock();
if (solnPointers_.empty() && (isRoot() || !parent->solnPointers_.empty())) {
for (Key frontal : conditional_->frontals())
solnPointers_.emplace(frontal, delta->find(frontal));
for (Key parentKey : conditional_->parents()) {
assert(parent->solnPointers_.exists(parentKey));
solnPointers_.emplace(parentKey, parent->solnPointers_.at(parentKey));
}
}
// See if we can use solution part pointers - we can if they either
// already existed or were created above.
if (!solnPointers_.empty()) {
GaussianConditional& c = *conditional_;
// Solve matrix
Vector xS;
{
// Count dimensions of vector
DenseIndex dim = 0;
FastVector<VectorValues::const_iterator> parentPointers;
parentPointers.reserve(conditional_->nrParents());
for (Key parent : conditional_->parents()) {
parentPointers.push_back(solnPointers_.at(parent));
dim += parentPointers.back()->second.size();
}
// Fill parent vector
xS.resize(dim);
DenseIndex vectorPos = 0;
for (const VectorValues::const_iterator& parentPointer : parentPointers) {
const Vector& parentVector = parentPointer->second;
xS.block(vectorPos, 0, parentVector.size(), 1) =
parentVector.block(0, 0, parentVector.size(), 1);
vectorPos += parentVector.size();
}
}
// NOTE(gareth): We can no longer write: xS = b - S * xS
// This is because Eigen (as of 3.3) no longer evaluates S * xS into
// a temporary, and the operation trashes valus in xS.
// See: http://eigen.tuxfamily.org/index.php?title=3.3
const Vector rhs = c.getb() - c.get_S() * xS;
const Vector solution = c.get_R().triangularView<Eigen::Upper>().solve(rhs);
// Check for indeterminant solution
if (solution.hasNaN())
throw IndeterminantLinearSystemException(c.keys().front());
// Insert solution into a VectorValues
DenseIndex vectorPosition = 0;
for (GaussianConditional::const_iterator frontal = c.beginFrontals();
frontal != c.endFrontals(); ++frontal) {
solnPointers_.at(*frontal)->second =
solution.segment(vectorPosition, c.getDim(frontal));
vectorPosition += c.getDim(frontal);
}
} else {
// Just call plain solve because we couldn't use solution pointers.
delta->update(conditional_->solve(*delta));
}
#else
delta->update(conditional_->solve(*delta));
#endif
}
/* ************************************************************************* */
bool ISAM2Clique::valuesChanged(const KeySet& replaced,
const Vector& originalValues,
const VectorValues& delta,
double threshold) const {
auto frontals = conditional_->frontals();
if (replaced.exists(frontals.front())) return true;
auto diff = originalValues - delta.vector(frontals);
return diff.lpNorm<Eigen::Infinity>() >= threshold;
}
/* ************************************************************************* */
/// Set changed flag for each frontal variable
void ISAM2Clique::markFrontalsAsChanged(KeySet* changed) const {
for (Key frontal : conditional_->frontals()) {
changed->insert(frontal);
}
}
/* ************************************************************************* */
void ISAM2Clique::restoreFromOriginals(const Vector& originalValues,
VectorValues* delta) const {
size_t pos = 0;
for (Key frontal : conditional_->frontals()) {
auto v = delta->at(frontal);
v = originalValues.segment(pos, v.size());
pos += v.size();
}
}
/* ************************************************************************* */
// Note: not being used right now in favor of non-recursive version below.
void ISAM2Clique::optimizeWildfire(const KeySet& replaced, double threshold,
KeySet* changed, VectorValues* delta,
size_t* count) const {
if (isDirty(replaced, *changed)) {
// Temporary copy of the original values, to check how much they change
auto originalValues = delta->vector(conditional_->frontals());
// Back-substitute
fastBackSubstitute(delta);
count += conditional_->nrFrontals();
if (valuesChanged(replaced, originalValues, *delta, threshold)) {
markFrontalsAsChanged(changed);
} else {
restoreFromOriginals(originalValues, delta);
}
// Recurse to children
for (const auto& child : children) {
child->optimizeWildfire(replaced, threshold, changed, delta, count);
}
}
}
size_t optimizeWildfire(const ISAM2Clique::shared_ptr& root, double threshold,
const KeySet& keys, VectorValues* delta) {
KeySet changed;
size_t count = 0;
// starting from the root, call optimize on each conditional
if (root) root->optimizeWildfire(keys, threshold, &changed, delta, &count);
return count;
}
/* ************************************************************************* */
bool ISAM2Clique::optimizeWildfireNode(const KeySet& replaced, double threshold,
KeySet* changed, VectorValues* delta,
size_t* count) const {
// TODO(gareth): This code shares a lot of logic w/ linearAlgorithms-inst,
// potentially refactor
bool dirty = isDirty(replaced, *changed);
if (dirty) {
// Temporary copy of the original values, to check how much they change
auto originalValues = delta->vector(conditional_->frontals());
// Back-substitute
fastBackSubstitute(delta);
count += conditional_->nrFrontals();
if (valuesChanged(replaced, originalValues, *delta, threshold)) {
markFrontalsAsChanged(changed);
} else {
restoreFromOriginals(originalValues, delta);
}
}
return dirty;
}
size_t optimizeWildfireNonRecursive(const ISAM2Clique::shared_ptr& root,
double threshold, const KeySet& keys,
VectorValues* delta) {
KeySet changed;
size_t count = 0;
if (root) {
std::stack<ISAM2Clique::shared_ptr> travStack;
travStack.push(root);
ISAM2Clique::shared_ptr currentNode = root;
while (!travStack.empty()) {
currentNode = travStack.top();
travStack.pop();
bool dirty = currentNode->optimizeWildfireNode(keys, threshold, &changed,
delta, &count);
if (dirty) {
for (const auto& child : currentNode->children) {
travStack.push(child);
}
}
}
}
return count;
}
/* ************************************************************************* */
void ISAM2Clique::nnz_internal(size_t* result) const {
size_t dimR = conditional_->rows();
size_t dimSep = conditional_->get_S().cols();
*result += ((dimR + 1) * dimR) / 2 + dimSep * dimR;
// traverse the children
for (const auto& child : children) {
child->nnz_internal(result);
}
}
/* ************************************************************************* */
size_t ISAM2Clique::calculate_nnz() const {
size_t result = 0;
nnz_internal(&result);
return result;
}
/* ************************************************************************* */
void ISAM2Clique::findAll(const KeySet& markedMask, KeySet* keys) const {
static const bool debug = false;
// does the separator contain any of the variables?
bool found = false;
for (Key key : conditional()->parents()) {
if (markedMask.exists(key)) {
found = true;
break;
}
}
if (found) {
// then add this clique
keys->insert(conditional()->beginFrontals(), conditional()->endFrontals());
if (debug) print("Key(s) marked in clique ");
if (debug) cout << "so marking key " << conditional()->front() << endl;
}
for (const auto& child : children) {
child->findAll(markedMask, keys);
}
}
/* ************************************************************************* */
} // namespace gtsam

View File

@ -0,0 +1,174 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ISAM2Clique.h
* @brief Specialized iSAM2 Clique
* @author Michael Kaess, Richard Roberts
*/
// \callgraph
#pragma once
#include <gtsam/inference/BayesTreeCliqueBase.h>
#include <gtsam/inference/Key.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianConditional.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <string>
namespace gtsam {
/**
* Specialized Clique structure for ISAM2, incorporating caching and gradient
* contribution
* TODO: more documentation
*/
class GTSAM_EXPORT ISAM2Clique
: public BayesTreeCliqueBase<ISAM2Clique, GaussianFactorGraph> {
public:
typedef ISAM2Clique This;
typedef BayesTreeCliqueBase<This, GaussianFactorGraph> Base;
typedef boost::shared_ptr<This> shared_ptr;
typedef boost::weak_ptr<This> weak_ptr;
typedef GaussianConditional ConditionalType;
typedef ConditionalType::shared_ptr sharedConditional;
Base::FactorType::shared_ptr cachedFactor_;
Vector gradientContribution_;
#ifdef USE_BROKEN_FAST_BACKSUBSTITUTE
mutable FastMap<Key, VectorValues::iterator> solnPointers_;
#endif
/// Default constructor
ISAM2Clique() : Base() {}
/// Copy constructor, does *not* copy solution pointers as these are invalid
/// in different trees.
ISAM2Clique(const ISAM2Clique& other)
: Base(other),
cachedFactor_(other.cachedFactor_),
gradientContribution_(other.gradientContribution_) {}
/// Assignment operator, does *not* copy solution pointers as these are
/// invalid in different trees.
ISAM2Clique& operator=(const ISAM2Clique& other) {
Base::operator=(other);
cachedFactor_ = other.cachedFactor_;
gradientContribution_ = other.gradientContribution_;
return *this;
}
/// Overridden to also store the remaining factor and gradient contribution
void setEliminationResult(
const FactorGraphType::EliminationResult& eliminationResult);
/** Access the cached factor */
Base::FactorType::shared_ptr& cachedFactor() { return cachedFactor_; }
/** Access the gradient contribution */
const Vector& gradientContribution() const { return gradientContribution_; }
bool equals(const This& other, double tol = 1e-9) const;
/** print this node */
void print(const std::string& s = "",
const KeyFormatter& formatter = DefaultKeyFormatter) const;
void optimizeWildfire(const KeySet& replaced, double threshold,
KeySet* changed, VectorValues* delta,
size_t* count) const;
bool optimizeWildfireNode(const KeySet& replaced, double threshold,
KeySet* changed, VectorValues* delta,
size_t* count) const;
/**
* Starting from the root, add up entries of frontal and conditional matrices
* of each conditional
*/
void nnz_internal(size_t* result) const;
size_t calculate_nnz() const;
/**
* Recursively search this clique and its children for marked keys appearing
* in the separator, and add the *frontal* keys of any cliques whose
* separator contains any marked keys to the set \c keys. The purpose of
* this is to discover the cliques that need to be redone due to information
* propagating to them from cliques that directly contain factors being
* relinearized.
*
* The original comment says this finds all variables directly connected to
* the marked ones by measurements. Is this true, because it seems like this
* would also pull in variables indirectly connected through other frontal or
* separator variables?
*
* Alternatively could we trace up towards the root for each variable here?
*/
void findAll(const KeySet& markedMask, KeySet* keys) const;
private:
/**
* Check if clique was replaced, or if any parents were changed above the
* threshold or themselves replaced.
*/
bool isDirty(const KeySet& replaced, const KeySet& changed) const;
/**
* Back-substitute - special version stores solution pointers in cliques for
* fast access.
*/
void fastBackSubstitute(VectorValues* delta) const;
/*
* Check whether the values changed above a threshold, or always true if the
* clique was replaced.
*/
bool valuesChanged(const KeySet& replaced, const Vector& originalValues,
const VectorValues& delta, double threshold) const;
/// Set changed flag for each frontal variable
void markFrontalsAsChanged(KeySet* changed) const;
/// Restore delta to original values, guided by frontal keys.
void restoreFromOriginals(const Vector& originalValues,
VectorValues* delta) const;
/** Serialization function */
friend class boost::serialization::access;
template <class ARCHIVE>
void serialize(ARCHIVE& ar, const unsigned int /*version*/) {
ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base);
ar& BOOST_SERIALIZATION_NVP(cachedFactor_);
ar& BOOST_SERIALIZATION_NVP(gradientContribution_);
}
}; // \struct ISAM2Clique
/**
* Optimize the BayesTree, starting from the root.
* @param threshold The maximum change against the PREVIOUS delta for
* non-replaced variables that can be ignored, ie. the old delta entry is kept
* and recursive backsubstitution might eventually stop if none of the changed
* variables are contained in the subtree.
* @param replaced Needs to contain all variables that are contained in the top
* of the Bayes tree that has been redone.
* @return The number of variables that were solved for.
* @param delta The current solution, an offset from the linearization point.
*/
size_t optimizeWildfire(const ISAM2Clique::shared_ptr& root, double threshold,
const KeySet& replaced, VectorValues* delta);
size_t optimizeWildfireNonRecursive(const ISAM2Clique::shared_ptr& root,
double threshold, const KeySet& replaced,
VectorValues* delta);
} // namespace gtsam

View File

@ -0,0 +1,89 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ISAM2Params.cpp
* @brief Parameters for iSAM 2.
* @author Michael Kaess, Richard Roberts, Frank Dellaert
*/
#include <gtsam/nonlinear/ISAM2Params.h>
#include <boost/algorithm/string.hpp>
using namespace std;
namespace gtsam {
/* ************************************************************************* */
string ISAM2DoglegParams::adaptationModeTranslator(
const DoglegOptimizerImpl::TrustRegionAdaptationMode& adaptationMode)
const {
string s;
switch (adaptationMode) {
case DoglegOptimizerImpl::SEARCH_EACH_ITERATION:
s = "SEARCH_EACH_ITERATION";
break;
case DoglegOptimizerImpl::ONE_STEP_PER_ITERATION:
s = "ONE_STEP_PER_ITERATION";
break;
default:
s = "UNDEFINED";
break;
}
return s;
}
/* ************************************************************************* */
DoglegOptimizerImpl::TrustRegionAdaptationMode
ISAM2DoglegParams::adaptationModeTranslator(
const string& adaptationMode) const {
string s = adaptationMode;
boost::algorithm::to_upper(s);
if (s == "SEARCH_EACH_ITERATION")
return DoglegOptimizerImpl::SEARCH_EACH_ITERATION;
if (s == "ONE_STEP_PER_ITERATION")
return DoglegOptimizerImpl::ONE_STEP_PER_ITERATION;
/* default is SEARCH_EACH_ITERATION */
return DoglegOptimizerImpl::SEARCH_EACH_ITERATION;
}
/* ************************************************************************* */
ISAM2Params::Factorization ISAM2Params::factorizationTranslator(
const string& str) {
string s = str;
boost::algorithm::to_upper(s);
if (s == "QR") return ISAM2Params::QR;
if (s == "CHOLESKY") return ISAM2Params::CHOLESKY;
/* default is CHOLESKY */
return ISAM2Params::CHOLESKY;
}
/* ************************************************************************* */
string ISAM2Params::factorizationTranslator(
const ISAM2Params::Factorization& value) {
string s;
switch (value) {
case ISAM2Params::QR:
s = "QR";
break;
case ISAM2Params::CHOLESKY:
s = "CHOLESKY";
break;
default:
s = "UNDEFINED";
break;
}
return s;
}
} // namespace gtsam

View File

@ -0,0 +1,365 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ISAM2Params.h
* @brief Parameters for iSAM 2.
* @author Michael Kaess, Richard Roberts, Frank Dellaert
*/
// \callgraph
#pragma once
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/nonlinear/DoglegOptimizerImpl.h>
#include <boost/variant.hpp>
#include <string>
namespace gtsam {
/**
* @addtogroup ISAM2
* Parameters for ISAM2 using Gauss-Newton optimization. Either this class or
* ISAM2DoglegParams should be specified as the optimizationParams in
* ISAM2Params, which should in turn be passed to ISAM2(const ISAM2Params&).
*/
struct GTSAM_EXPORT ISAM2GaussNewtonParams {
double
wildfireThreshold; ///< Continue updating the linear delta only when
///< changes are above this threshold (default: 0.001)
/** Specify parameters as constructor arguments */
ISAM2GaussNewtonParams(
double _wildfireThreshold =
0.001 ///< see ISAM2GaussNewtonParams public variables,
///< ISAM2GaussNewtonParams::wildfireThreshold
)
: wildfireThreshold(_wildfireThreshold) {}
void print(const std::string str = "") const {
using std::cout;
cout << str << "type: ISAM2GaussNewtonParams\n";
cout << str << "wildfireThreshold: " << wildfireThreshold << "\n";
cout.flush();
}
double getWildfireThreshold() const { return wildfireThreshold; }
void setWildfireThreshold(double wildfireThreshold) {
this->wildfireThreshold = wildfireThreshold;
}
};
/**
* @addtogroup ISAM2
* Parameters for ISAM2 using Dogleg optimization. Either this class or
* ISAM2GaussNewtonParams should be specified as the optimizationParams in
* ISAM2Params, which should in turn be passed to ISAM2(const ISAM2Params&).
*/
struct GTSAM_EXPORT ISAM2DoglegParams {
double initialDelta; ///< The initial trust region radius for Dogleg
double
wildfireThreshold; ///< Continue updating the linear delta only when
///< changes are above this threshold (default: 1e-5)
DoglegOptimizerImpl::TrustRegionAdaptationMode
adaptationMode; ///< See description in
///< DoglegOptimizerImpl::TrustRegionAdaptationMode
bool
verbose; ///< Whether Dogleg prints iteration and convergence information
/** Specify parameters as constructor arguments */
ISAM2DoglegParams(
double _initialDelta = 1.0, ///< see ISAM2DoglegParams::initialDelta
double _wildfireThreshold =
1e-5, ///< see ISAM2DoglegParams::wildfireThreshold
DoglegOptimizerImpl::TrustRegionAdaptationMode _adaptationMode =
DoglegOptimizerImpl::
SEARCH_EACH_ITERATION, ///< see ISAM2DoglegParams::adaptationMode
bool _verbose = false ///< see ISAM2DoglegParams::verbose
)
: initialDelta(_initialDelta),
wildfireThreshold(_wildfireThreshold),
adaptationMode(_adaptationMode),
verbose(_verbose) {}
void print(const std::string str = "") const {
using std::cout;
cout << str << "type: ISAM2DoglegParams\n";
cout << str << "initialDelta: " << initialDelta << "\n";
cout << str << "wildfireThreshold: " << wildfireThreshold << "\n";
cout << str
<< "adaptationMode: " << adaptationModeTranslator(adaptationMode)
<< "\n";
cout.flush();
}
double getInitialDelta() const { return initialDelta; }
double getWildfireThreshold() const { return wildfireThreshold; }
std::string getAdaptationMode() const {
return adaptationModeTranslator(adaptationMode);
}
bool isVerbose() const { return verbose; }
void setInitialDelta(double initialDelta) {
this->initialDelta = initialDelta;
}
void setWildfireThreshold(double wildfireThreshold) {
this->wildfireThreshold = wildfireThreshold;
}
void setAdaptationMode(const std::string& adaptationMode) {
this->adaptationMode = adaptationModeTranslator(adaptationMode);
}
void setVerbose(bool verbose) { this->verbose = verbose; }
std::string adaptationModeTranslator(
const DoglegOptimizerImpl::TrustRegionAdaptationMode& adaptationMode)
const;
DoglegOptimizerImpl::TrustRegionAdaptationMode adaptationModeTranslator(
const std::string& adaptationMode) const;
};
/**
* @addtogroup ISAM2
* Parameters for the ISAM2 algorithm. Default parameter values are listed
* below.
*/
typedef FastMap<char, Vector> ISAM2ThresholdMap;
typedef ISAM2ThresholdMap::value_type ISAM2ThresholdMapValue;
struct GTSAM_EXPORT ISAM2Params {
typedef boost::variant<ISAM2GaussNewtonParams, ISAM2DoglegParams>
OptimizationParams; ///< Either ISAM2GaussNewtonParams or
///< ISAM2DoglegParams
typedef boost::variant<double, FastMap<char, Vector> >
RelinearizationThreshold; ///< Either a constant relinearization
///< threshold or a per-variable-type set of
///< thresholds
/** Optimization parameters, this both selects the nonlinear optimization
* method and specifies its parameters, either ISAM2GaussNewtonParams or
* ISAM2DoglegParams. In the former, Gauss-Newton optimization will be used
* with the specified parameters, and in the latter Powell's dog-leg
* algorithm will be used with the specified parameters.
*/
OptimizationParams optimizationParams;
/** Only relinearize variables whose linear delta magnitude is greater than
* this threshold (default: 0.1). If this is a FastMap<char,Vector> instead
* of a double, then the threshold is specified for each dimension of each
* variable type. This parameter then maps from a character indicating the
* variable type to a Vector of thresholds for each dimension of that
* variable. For example, if Pose keys are of type TypedSymbol<'x',Pose3>,
* and landmark keys are of type TypedSymbol<'l',Point3>, then appropriate
* entries would be added with:
* \code
FastMap<char,Vector> thresholds;
thresholds['x'] = (Vector(6) << 0.1, 0.1, 0.1, 0.5, 0.5, 0.5).finished();
// 0.1 rad rotation threshold, 0.5 m translation threshold thresholds['l'] =
Vector3(1.0, 1.0, 1.0); // 1.0 m landmark position threshold
params.relinearizeThreshold = thresholds;
\endcode
*/
RelinearizationThreshold relinearizeThreshold;
int relinearizeSkip; ///< Only relinearize any variables every
///< relinearizeSkip calls to ISAM2::update (default:
///< 10)
bool enableRelinearization; ///< Controls whether ISAM2 will ever relinearize
///< any variables (default: true)
bool evaluateNonlinearError; ///< Whether to evaluate the nonlinear error
///< before and after the update, to return in
///< ISAM2Result from update()
enum Factorization { CHOLESKY, QR };
/** Specifies whether to use QR or CHOESKY numerical factorization (default:
* CHOLESKY). Cholesky is faster but potentially numerically unstable for
* poorly-conditioned problems, which can occur when uncertainty is very low
* in some variables (or dimensions of variables) and very high in others. QR
* is slower but more numerically stable in poorly-conditioned problems. We
* suggest using the default of Cholesky unless gtsam sometimes throws
* IndefiniteLinearSystemException when your problem's Hessian is actually
* positive definite. For positive definite problems, numerical error
* accumulation can cause the problem to become numerically negative or
* indefinite as solving proceeds, especially when using Cholesky.
*/
Factorization factorization;
/** Whether to cache linear factors (default: true).
* This can improve performance if linearization is expensive, but can hurt
* performance if linearization is very cleap due to computation to look up
* additional keys.
*/
bool cacheLinearizedFactors;
KeyFormatter
keyFormatter; ///< A KeyFormatter for when keys are printed during
///< debugging (default: DefaultKeyFormatter)
bool enableDetailedResults; ///< Whether to compute and return
///< ISAM2Result::detailedResults, this can
///< increase running time (default: false)
/** Check variables for relinearization in tree-order, stopping the check once
* a variable does not need to be relinearized (default: false). This can
* improve speed by only checking a small part of the top of the tree.
* However, variables below the check cut-off can accumulate significant
* deltas without triggering relinearization. This is particularly useful in
* exploration scenarios where real-time performance is desired over
* correctness. Use with caution.
*/
bool enablePartialRelinearizationCheck;
/// When you will be removing many factors, e.g. when using ISAM2 as a
/// fixed-lag smoother, enable this option to add factors in the first
/// available factor slots, to avoid accumulating NULL factor slots, at the
/// cost of having to search for slots every time a factor is added.
bool findUnusedFactorSlots;
/**
* Specify parameters as constructor arguments
* See the documentation of member variables above.
*/
ISAM2Params(OptimizationParams _optimizationParams = ISAM2GaussNewtonParams(),
RelinearizationThreshold _relinearizeThreshold = 0.1,
int _relinearizeSkip = 10, bool _enableRelinearization = true,
bool _evaluateNonlinearError = false,
Factorization _factorization = ISAM2Params::CHOLESKY,
bool _cacheLinearizedFactors = true,
const KeyFormatter& _keyFormatter =
DefaultKeyFormatter ///< see ISAM2::Params::keyFormatter
)
: optimizationParams(_optimizationParams),
relinearizeThreshold(_relinearizeThreshold),
relinearizeSkip(_relinearizeSkip),
enableRelinearization(_enableRelinearization),
evaluateNonlinearError(_evaluateNonlinearError),
factorization(_factorization),
cacheLinearizedFactors(_cacheLinearizedFactors),
keyFormatter(_keyFormatter),
enableDetailedResults(false),
enablePartialRelinearizationCheck(false),
findUnusedFactorSlots(false) {}
/// print iSAM2 parameters
void print(const std::string& str = "") const {
using std::cout;
cout << str << "\n";
static const std::string kStr("optimizationParams: ");
if (optimizationParams.type() == typeid(ISAM2GaussNewtonParams))
boost::get<ISAM2GaussNewtonParams>(optimizationParams).print();
else if (optimizationParams.type() == typeid(ISAM2DoglegParams))
boost::get<ISAM2DoglegParams>(optimizationParams).print(kStr);
else
cout << kStr << "{unknown type}\n";
cout << "relinearizeThreshold: ";
if (relinearizeThreshold.type() == typeid(double)) {
cout << boost::get<double>(relinearizeThreshold) << "\n";
} else {
cout << "{mapped}\n";
for (const ISAM2ThresholdMapValue& value :
boost::get<ISAM2ThresholdMap>(relinearizeThreshold)) {
cout << " '" << value.first
<< "' -> [" << value.second.transpose() << " ]\n";
}
}
cout << "relinearizeSkip: " << relinearizeSkip << "\n";
cout << "enableRelinearization: " << enableRelinearization
<< "\n";
cout << "evaluateNonlinearError: " << evaluateNonlinearError
<< "\n";
cout << "factorization: "
<< factorizationTranslator(factorization) << "\n";
cout << "cacheLinearizedFactors: " << cacheLinearizedFactors
<< "\n";
cout << "enableDetailedResults: " << enableDetailedResults
<< "\n";
cout << "enablePartialRelinearizationCheck: "
<< enablePartialRelinearizationCheck << "\n";
cout << "findUnusedFactorSlots: " << findUnusedFactorSlots
<< "\n";
cout.flush();
}
/// @name Getters and Setters for all properties
/// @{
OptimizationParams getOptimizationParams() const {
return this->optimizationParams;
}
RelinearizationThreshold getRelinearizeThreshold() const {
return relinearizeThreshold;
}
int getRelinearizeSkip() const { return relinearizeSkip; }
bool isEnableRelinearization() const { return enableRelinearization; }
bool isEvaluateNonlinearError() const { return evaluateNonlinearError; }
std::string getFactorization() const {
return factorizationTranslator(factorization);
}
bool isCacheLinearizedFactors() const { return cacheLinearizedFactors; }
KeyFormatter getKeyFormatter() const { return keyFormatter; }
bool isEnableDetailedResults() const { return enableDetailedResults; }
bool isEnablePartialRelinearizationCheck() const {
return enablePartialRelinearizationCheck;
}
void setOptimizationParams(OptimizationParams optimizationParams) {
this->optimizationParams = optimizationParams;
}
void setRelinearizeThreshold(RelinearizationThreshold relinearizeThreshold) {
this->relinearizeThreshold = relinearizeThreshold;
}
void setRelinearizeSkip(int relinearizeSkip) {
this->relinearizeSkip = relinearizeSkip;
}
void setEnableRelinearization(bool enableRelinearization) {
this->enableRelinearization = enableRelinearization;
}
void setEvaluateNonlinearError(bool evaluateNonlinearError) {
this->evaluateNonlinearError = evaluateNonlinearError;
}
void setFactorization(const std::string& factorization) {
this->factorization = factorizationTranslator(factorization);
}
void setCacheLinearizedFactors(bool cacheLinearizedFactors) {
this->cacheLinearizedFactors = cacheLinearizedFactors;
}
void setKeyFormatter(KeyFormatter keyFormatter) {
this->keyFormatter = keyFormatter;
}
void setEnableDetailedResults(bool enableDetailedResults) {
this->enableDetailedResults = enableDetailedResults;
}
void setEnablePartialRelinearizationCheck(
bool enablePartialRelinearizationCheck) {
this->enablePartialRelinearizationCheck = enablePartialRelinearizationCheck;
}
GaussianFactorGraph::Eliminate getEliminationFunction() const {
return factorization == CHOLESKY
? (GaussianFactorGraph::Eliminate)EliminatePreferCholesky
: (GaussianFactorGraph::Eliminate)EliminateQR;
}
/// @}
/// @name Some utilities
/// @{
static Factorization factorizationTranslator(const std::string& str);
static std::string factorizationTranslator(const Factorization& value);
/// @}
};
} // namespace gtsam

View File

@ -0,0 +1,159 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ISAM2Result.h
* @brief Class that stores detailed iSAM2 result.
* @author Michael Kaess, Richard Roberts, Frank Dellaert
*/
// \callgraph
#pragma once
#include <string>
#include <vector>
#include <gtsam/linear/GaussianBayesTree.h>
#include <gtsam/nonlinear/DoglegOptimizerImpl.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/ISAM2Params.h>
#include <boost/variant.hpp>
namespace gtsam {
typedef FastVector<size_t> FactorIndices;
/**
* @addtogroup ISAM2
* This struct is returned from ISAM2::update() and contains information about
* the update that is useful for determining whether the solution is
* converging, and about how much work was required for the update. See member
* variables for details and information about each entry.
*/
struct GTSAM_EXPORT ISAM2Result {
/** The nonlinear error of all of the factors, \a including new factors and
* variables added during the current call to ISAM2::update(). This error is
* calculated using the following variable values:
* \li Pre-existing variables will be evaluated by combining their
* linearization point before this call to update, with their partial linear
* delta, as computed by ISAM2::calculateEstimate().
* \li New variables will be evaluated at their initialization points passed
* into the current call to update.
* \par Note: This will only be computed if
* ISAM2Params::evaluateNonlinearError is set to \c true, because there is
* some cost to this computation.
*/
boost::optional<double> errorBefore;
/** The nonlinear error of all of the factors computed after the current
* update, meaning that variables above the relinearization threshold
* (ISAM2Params::relinearizeThreshold) have been relinearized and new
* variables have undergone one linear update. Variable values are
* again computed by combining their linearization points with their
* partial linear deltas, by ISAM2::calculateEstimate().
* \par Note: This will only be computed if
* ISAM2Params::evaluateNonlinearError is set to \c true, because there is
* some cost to this computation.
*/
boost::optional<double> errorAfter;
/** The number of variables that were relinearized because their linear
* deltas exceeded the reslinearization threshold
* (ISAM2Params::relinearizeThreshold), combined with any additional
* variables that had to be relinearized because they were involved in
* the same factor as a variable above the relinearization threshold.
* On steps where no relinearization is considered
* (see ISAM2Params::relinearizeSkip), this count will be zero.
*/
size_t variablesRelinearized;
/** The number of variables that were reeliminated as parts of the Bayes'
* Tree were recalculated, due to new factors. When loop closures occur,
* this count will be large as the new loop-closing factors will tend to
* involve variables far away from the root, and everything up to the root
* will be reeliminated.
*/
size_t variablesReeliminated;
/** The number of factors that were included in reelimination of the Bayes'
* tree. */
size_t factorsRecalculated;
/** The number of cliques in the Bayes' Tree */
size_t cliques;
/** The indices of the newly-added factors, in 1-to-1 correspondence with the
* factors passed as \c newFactors to ISAM2::update(). These indices may be
* used later to refer to the factors in order to remove them.
*/
FactorIndices newFactorsIndices;
/** A struct holding detailed results, which must be enabled with
* ISAM2Params::enableDetailedResults.
*/
struct DetailedResults {
/** The status of a single variable, this struct is stored in
* DetailedResults::variableStatus */
struct VariableStatus {
/** Whether the variable was just reeliminated, due to being relinearized,
* observed, new, or on the path up to the root clique from another
* reeliminated variable. */
bool isReeliminated;
bool isAboveRelinThreshold; ///< Whether the variable was just
///< relinearized due to being above the
///< relinearization threshold
bool isRelinearizeInvolved; ///< Whether the variable was below the
///< relinearization threshold but was
///< relinearized by being involved in a
///< factor with a variable above the
///< relinearization threshold
bool isRelinearized; /// Whether the variable was relinearized, either by
/// being above the relinearization threshold or by
/// involvement.
bool isObserved; ///< Whether the variable was just involved in new
///< factors
bool isNew; ///< Whether the variable itself was just added
bool inRootClique; ///< Whether the variable is in the root clique
VariableStatus()
: isReeliminated(false),
isAboveRelinThreshold(false),
isRelinearizeInvolved(false),
isRelinearized(false),
isObserved(false),
isNew(false),
inRootClique(false) {}
};
/** The status of each variable during this update, see VariableStatus.
*/
FastMap<Key, VariableStatus> variableStatus;
};
/** Detailed results, if enabled by ISAM2Params::enableDetailedResults. See
* Detail for information about the results data stored here. */
boost::optional<DetailedResults> detail;
void print(const std::string str = "") const {
using std::cout;
cout << str << " Reelimintated: " << variablesReeliminated
<< " Relinearized: " << variablesRelinearized
<< " Cliques: " << cliques << std::endl;
}
/** Getters and Setters */
size_t getVariablesRelinearized() const { return variablesRelinearized; }
size_t getVariablesReeliminated() const { return variablesReeliminated; }
size_t getCliques() const { return cliques; }
};
} // namespace gtsam

View File

@ -113,9 +113,9 @@ struct LevenbergMarquardtState : public NonlinearOptimizerState {
// Small cache of A|b|model indexed by dimension. Can save many mallocs.
mutable std::vector<CachedModel> noiseModelCache;
CachedModel* getCachedModel(size_t dim) const {
if (dim > noiseModelCache.size())
noiseModelCache.resize(dim);
CachedModel* item = &noiseModelCache[dim - 1];
if (dim >= noiseModelCache.size())
noiseModelCache.resize(dim+1);
CachedModel* item = &noiseModelCache[dim];
if (!item->model)
*item = CachedModel(dim, 1.0 / std::sqrt(lambda));
return item;

View File

@ -122,8 +122,8 @@ class SmartRangeFactor: public NoiseModelFactor {
// use best fh to find actual intersection points
if (bestCircle2 && best_fh) {
auto bestCircleCenter = bestCircle2->center;
std::list<Point2> intersections = circleCircleIntersection(
circle1.center, bestCircleCenter, best_fh);
std::list<Point2> intersections =
circleCircleIntersection(circle1.center, bestCircleCenter, best_fh);
// pick winner based on other measurements
double error1 = 0, error2 = 0;

View File

@ -304,7 +304,7 @@ TEST(ISAM2, AddFactorsStep1)
FactorIndices actualNewFactorIndices;
ISAM2::Impl::AddFactorsStep1(newFactors, true, nonlinearFactors, actualNewFactorIndices);
ISAM2::Impl::AddFactorsStep1(newFactors, true, &nonlinearFactors, &actualNewFactorIndices);
EXPECT(assert_equal(expectedNonlinearFactors, nonlinearFactors));
EXPECT(assert_container_equality(expectedNewFactorIndices, actualNewFactorIndices));
@ -843,7 +843,7 @@ TEST(ISAM2, calculate_nnz)
{
ISAM2 isam = createSlamlikeISAM2();
int expected = 241;
int actual = calculate_nnz(isam.roots().front());
int actual = isam.roots().front()->calculate_nnz();
EXPECT_LONGS_EQUAL(expected, actual);
}