Moved ISAM2 from inference to nonlinear

release/4.3a0
Richard Roberts 2011-08-30 17:15:50 +00:00
parent d4dbf12f8b
commit 059de23b6e
8 changed files with 49 additions and 43 deletions

View File

@ -34,7 +34,6 @@ headers += EliminationTree.h EliminationTree-inl.h
headers += BayesNet.h BayesNet-inl.h
headers += BayesTree.h BayesTree-inl.h
headers += ISAM.h ISAM-inl.h
headers += ISAM2.h ISAM2-inl.h ISAM2-impl-inl.h
check_PROGRAMS += tests/testInference
check_PROGRAMS += tests/testFactorGraph
check_PROGRAMS += tests/testFactorGraph

View File

@ -5,7 +5,6 @@
*/
#include <gtsam/nonlinear/GaussianISAM2.h>
#include <gtsam/inference/ISAM2-inl.h>
#include <gtsam/nonlinear/TupleValues-inl.h>
using namespace std;
@ -15,11 +14,8 @@ using namespace gtsam;
namespace gtsam {
// Explicitly instantiate so we don't have to include everywhere
template class ISAM2<GaussianConditional, planarSLAM::Values>;
/* ************************************************************************* */
void optimize2(const GaussianISAM2::sharedClique& clique, double threshold,
void optimize2(const typename BayesTree<GaussianConditional>::sharedClique& clique, double threshold,
vector<bool>& changed, const vector<bool>& replaced, Permuted<VectorValues>& delta, int& count) {
// if none of the variables in this clique (frontal and separator!) changed
// significantly, then by the running intersection property, none of the
@ -50,7 +46,7 @@ void optimize2(const GaussianISAM2::sharedClique& clique, double threshold,
// Temporary copy of the original values, to check how much they change
vector<Vector> originalValues((*clique)->nrFrontals());
GaussianISAM2::ConditionalType::const_iterator it;
GaussianConditional::const_iterator it;
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
originalValues[it - (*clique)->beginFrontals()] = delta[*it];
}
@ -89,7 +85,7 @@ void optimize2(const GaussianISAM2::sharedClique& clique, double threshold,
}
// Recurse to children
BOOST_FOREACH(const GaussianISAM2::sharedClique& child, clique->children_) {
BOOST_FOREACH(const typename BayesTree<GaussianConditional>::sharedClique& child, clique->children_) {
optimize2(child, threshold, changed, replaced, delta, count);
}
}
@ -97,14 +93,14 @@ void optimize2(const GaussianISAM2::sharedClique& clique, double threshold,
/* ************************************************************************* */
// fast full version without threshold
void optimize2(const GaussianISAM2::sharedClique& clique, VectorValues& delta) {
void optimize2(const typename BayesTree<GaussianConditional>::sharedClique& clique, VectorValues& delta) {
// parents are assumed to already be solved and available in result
(*clique)->rhs(delta);
(*clique)->solveInPlace(delta);
// Solve chilren recursively
BOOST_FOREACH(const GaussianISAM2::sharedClique& child, clique->children_) {
BOOST_FOREACH(const typename BayesTree<GaussianConditional>::sharedClique& child, clique->children_) {
optimize2(child, delta);
}
}
@ -119,7 +115,7 @@ void optimize2(const GaussianISAM2::sharedClique& clique, VectorValues& delta) {
//}
/* ************************************************************************* */
int optimize2(const GaussianISAM2::sharedClique& root, double threshold, const vector<bool>& keys, Permuted<VectorValues>& delta) {
int optimize2(const typename BayesTree<GaussianConditional>::sharedClique& root, double threshold, const vector<bool>& keys, Permuted<VectorValues>& delta) {
vector<bool> changed(keys.size(), false);
int count = 0;
// starting from the root, call optimize on each conditional
@ -128,18 +124,18 @@ int optimize2(const GaussianISAM2::sharedClique& root, double threshold, const v
}
/* ************************************************************************* */
void nnz_internal(const GaussianISAM2::sharedClique& clique, int& result) {
void nnz_internal(const typename BayesTree<GaussianConditional>::sharedClique& clique, int& result) {
int dimR = (*clique)->dim();
int dimSep = (*clique)->get_S().cols() - dimR;
result += ((dimR+1)*dimR)/2 + dimSep*dimR;
// traverse the children
BOOST_FOREACH(const GaussianISAM2::sharedClique& child, clique->children_) {
BOOST_FOREACH(const typename BayesTree<GaussianConditional>::sharedClique& child, clique->children_) {
nnz_internal(child, result);
}
}
/* ************************************************************************* */
int calculate_nnz(const GaussianISAM2::sharedClique& clique) {
int calculate_nnz(const typename BayesTree<GaussianConditional>::sharedClique& clique) {
int result = 0;
// starting from the root, add up entries of frontal and conditional matrices of each conditional
nnz_internal(clique, result);

View File

@ -8,32 +8,42 @@
#pragma once
#include <gtsam/inference/ISAM2.h>
#include <gtsam/linear/GaussianConditional.h>
#include <gtsam/linear/GaussianFactor.h>
#include <gtsam/slam/simulated2D.h>
#include <gtsam/slam/planarSLAM.h>
#include <gtsam/nonlinear/ISAM2-inl.h>
namespace gtsam {
typedef ISAM2<GaussianConditional, simulated2D::Values> GaussianISAM2;
typedef ISAM2<GaussianConditional, planarSLAM::Values> GaussianISAM2_P;
/**
* @brief The main ISAM2 class that is exposed to gtsam users.
*
* This is a thin wrapper around an ISAM2 class templated on
* GaussianConditional, and the values on which that GaussianISAM2 is
* templated.
*
* @tparam VALUES The LieValues or TupleValues\Emph{N} to contain the
* variables.
*/
template <class VALUES>
class GaussianISAM2 : public ISAM2<GaussianConditional, VALUES> {};
// optimize the BayesTree, starting from the root
void optimize2(const GaussianISAM2::sharedClique& root, VectorValues& delta);
// optimize the BayesTree, starting from the root
void optimize2(const typename BayesTree<GaussianConditional>::sharedClique& root, VectorValues& delta);
// optimize the BayesTree, starting from the root; "replaced" needs to contain
// all variables that are contained in the top of the Bayes tree that has been
// redone; "delta" is the current solution, an offset from the linearization
// point; "threshold" is the maximum change against the PREVIOUS delta for
// non-replaced variables that can be ignored, ie. the old delta entry is kept
// and recursive backsubstitution might eventually stop if none of the changed
// variables are contained in the subtree.
// returns the number of variables that were solved for
int optimize2(const GaussianISAM2::sharedClique& root,
// optimize the BayesTree, starting from the root; "replaced" needs to contain
// all variables that are contained in the top of the Bayes tree that has been
// redone; "delta" is the current solution, an offset from the linearization
// point; "threshold" is the maximum change against the PREVIOUS delta for
// non-replaced variables that can be ignored, ie. the old delta entry is kept
// and recursive backsubstitution might eventually stop if none of the changed
// variables are contained in the subtree.
// returns the number of variables that were solved for
int optimize2(const typename BayesTree<GaussianConditional>::sharedClique& root,
double threshold, const std::vector<bool>& replaced, Permuted<VectorValues>& delta);
// calculate the number of non-zero entries for the tree starting at clique (use root for complete matrix)
int calculate_nnz(const GaussianISAM2::sharedClique& clique);
// calculate the number of non-zero entries for the tree starting at clique (use root for complete matrix)
int calculate_nnz(const typename BayesTree<GaussianConditional>::sharedClique& clique);
}/// namespace gtsam

View File

@ -7,6 +7,7 @@
namespace gtsam {
/* ************************************************************************* */
template <class UNIQUE>
struct _VariableAdder {
Ordering& ordering;
Permuted<VectorValues>& vconfig;
@ -35,7 +36,7 @@ void ISAM2<CONDITIONAL,VALUES>::Impl::AddVariables(
delta.container().reserve(delta->size() + newTheta.size(), delta->dim() + accumulate(dims.begin(), dims.end(), 0));
delta.permutation().resize(delta->size() + newTheta.size());
{
_VariableAdder vadder(ordering, delta);
_VariableAdder<ISAM2<CONDITIONAL,VALUES> > vadder(ordering, delta);
newTheta.apply(vadder);
assert(delta.permutation().size() == delta.container().size());
assert(delta.container().dim() == delta.container().dimCapacity());

View File

@ -20,11 +20,11 @@ using namespace boost::assign;
#include <gtsam/linear/GaussianJunctionTree.h>
#include <gtsam/inference/BayesTree-inl.h>
#include <gtsam/inference/ISAM2.h>
#include <gtsam/inference/GenericSequentialSolver-inl.h>
#include <gtsam/inference/ISAM2-impl-inl.h>
#include <gtsam/nonlinear/ISAM2.h>
#include <gtsam/nonlinear/ISAM2-impl-inl.h>
// for WAFR paper, separate update and relinearization steps if defined
//#define SEPARATE_STEPS
@ -740,7 +740,7 @@ void ISAM2<Conditional, Values>::update(
vector<bool> markedRelinMask(ordering_.nVars(), false);
bool relinAny = false;
// Check relinearization if we're at a 10th step, or we are using a looser loop relin threshold
if (force_relinearize || (relinearize && count%10 == 0)) { // todo: every n steps
if (force_relinearize || (relinearize && count%1 == 0)) { // todo: every n steps
tic(4,"gather relinearize keys");
// 4. Mark keys in \Delta above threshold \beta: J=\{\Delta_{j}\in\Delta|\Delta_{j}\geq\beta\}.
for(Index var=0; var<delta_.size(); ++var) {

View File

@ -29,6 +29,7 @@ check_PROGRAMS += tests/testKey tests/testOrdering
# Nonlinear iSAM(2)
headers += NonlinearISAM.h NonlinearISAM-inl.h
headers += ISAM2.h ISAM2-inl.h ISAM2-impl-inl.h
sources += GaussianISAM2.cpp
# Nonlinear constraints

View File

@ -17,7 +17,6 @@ using namespace boost::assign;
#include <gtsam/nonlinear/Ordering.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianSequentialSolver.h>
#include <gtsam/inference/ISAM2-inl.h>
#include <gtsam/nonlinear/GaussianISAM2.h>
#include <gtsam/slam/smallExample.h>
#include <gtsam/slam/planarSLAM.h>
@ -52,7 +51,7 @@ TEST(ISAM2, AddVariables) {
Ordering ordering; ordering += planarSLAM::PointKey(0), planarSLAM::PoseKey(0);
ISAM2<GaussianConditional, planarSLAM::Values>::Nodes nodes(2);
GaussianISAM2<planarSLAM::Values>::Nodes nodes(2);
// Verify initial state
LONGS_EQUAL(0, ordering[planarSLAM::PointKey(0)]);
@ -81,11 +80,11 @@ TEST(ISAM2, AddVariables) {
Ordering orderingExpected; orderingExpected += planarSLAM::PointKey(0), planarSLAM::PoseKey(0), planarSLAM::PoseKey(1);
ISAM2<GaussianConditional, planarSLAM::Values>::Nodes nodesExpected(
3, ISAM2<GaussianConditional, planarSLAM::Values>::sharedClique());
GaussianISAM2<planarSLAM::Values>::Nodes nodesExpected(
3, GaussianISAM2<planarSLAM::Values>::sharedClique());
// Expand initial state
ISAM2<GaussianConditional, planarSLAM::Values>::Impl::AddVariables(newTheta, theta, delta, ordering, nodes);
GaussianISAM2<planarSLAM::Values>::Impl::AddVariables(newTheta, theta, delta, ordering, nodes);
EXPECT(assert_equal(thetaExpected, theta));
EXPECT(assert_equal(deltaUnpermutedExpected, deltaUnpermuted));
@ -117,7 +116,7 @@ TEST(ISAM2, optimize2) {
conditional->solveInPlace(expected);
// Clique
GaussianISAM2::sharedClique clique(new GaussianISAM2::Clique(conditional));
typename GaussianISAM2<planarSLAM::Values>::sharedClique clique(new GaussianISAM2<planarSLAM::Values>::Clique(conditional));
VectorValues actual(theta.dims(ordering));
conditional->rhs(actual);
optimize2(clique, actual);
@ -128,7 +127,7 @@ TEST(ISAM2, optimize2) {
}
/* ************************************************************************* */
bool isam_check(const planarSLAM::Graph& fullgraph, const planarSLAM::Values& fullinit, const GaussianISAM2_P& isam) {
bool isam_check(const planarSLAM::Graph& fullgraph, const planarSLAM::Values& fullinit, const GaussianISAM2<planarSLAM::Values>& isam) {
planarSLAM::Values actual = isam.calculateEstimate();
Ordering ordering = isam.getOrdering(); // *fullgraph.orderingCOLAMD(fullinit).first;
GaussianFactorGraph linearized = *fullgraph.linearize(fullinit, ordering);
@ -155,7 +154,7 @@ TEST(ISAM2, slamlike_solution)
SharedDiagonal brNoise = sharedSigmas(Vector_(2, M_PI/100.0, 0.1));
// These variables will be reused and accumulate factors and values
GaussianISAM2_P isam;
GaussianISAM2<planarSLAM::Values> isam;
planarSLAM::Values fullinit;
planarSLAM::Graph fullgraph;