Merged from branch 'branches/unordered-isam2'
commit
f1fb9374c9
|
|
@ -58,6 +58,9 @@ public:
|
|||
return std::map<KEY,VALUE>(this->begin(), this->end());
|
||||
}
|
||||
|
||||
/** Handy 'exists' function */
|
||||
bool exists(const KEY& e) const { return this->find(e) != this->end(); }
|
||||
|
||||
private:
|
||||
/** Serialization function */
|
||||
friend class boost::serialization::access;
|
||||
|
|
|
|||
|
|
@ -91,6 +91,9 @@ public:
|
|||
return std::set<VALUE>(this->begin(), this->end());
|
||||
}
|
||||
|
||||
/** Handy 'exists' function */
|
||||
bool exists(const VALUE& e) const { return this->find(e) != this->end(); }
|
||||
|
||||
/** Print to implement Testable */
|
||||
void print(const std::string& str = "") const { FastSetTestableHelper<VALUE>::print(*this, str); }
|
||||
|
||||
|
|
|
|||
|
|
@ -131,20 +131,6 @@ namespace gtsam {
|
|||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class CLIQUE>
|
||||
typename BayesTree<CLIQUE>::sharedClique BayesTree<CLIQUE>::addClique(
|
||||
const sharedConditional& conditional, std::list<sharedClique>& child_cliques)
|
||||
{
|
||||
sharedClique new_clique(new Clique(conditional));
|
||||
BOOST_FOREACH(Index j, conditional->frontals())
|
||||
nodes_[j] = new_clique;
|
||||
new_clique->children.assign(child_cliques.begin(), child_cliques.end());
|
||||
BOOST_FOREACH(sharedClique& child, child_cliques)
|
||||
child->parent_ = new_clique;
|
||||
return new_clique;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// TODO: Clean up
|
||||
namespace {
|
||||
|
|
@ -248,7 +234,10 @@ namespace gtsam {
|
|||
template<class CLIQUE>
|
||||
void BayesTree<CLIQUE>::fillNodesIndex(const sharedClique& subtree) {
|
||||
// Add each frontal variable of this root node
|
||||
BOOST_FOREACH(const Key& j, subtree->conditional()->frontals()) { nodes_[j] = subtree; }
|
||||
BOOST_FOREACH(const Key& j, subtree->conditional()->frontals()) {
|
||||
bool inserted = nodes_.insert(std::make_pair(j, subtree)).second;
|
||||
assert(inserted); (void)inserted;
|
||||
}
|
||||
// Fill index for each child
|
||||
typedef typename BayesTree<CLIQUE>::sharedClique sharedClique;
|
||||
BOOST_FOREACH(const sharedClique& child, subtree->children) {
|
||||
|
|
@ -350,7 +339,7 @@ namespace gtsam {
|
|||
// Factor the shortcuts to be conditioned on the full root
|
||||
// Get the set of variables to eliminate, which is C1\B.
|
||||
gttic(Full_root_factoring);
|
||||
shared_ptr p_C1_B; {
|
||||
boost::shared_ptr<typename EliminationTraitsType::BayesTreeType> p_C1_B; {
|
||||
std::vector<Index> C1_minus_B; {
|
||||
FastSet<Index> C1_minus_B_set(C1->conditional()->beginParents(), C1->conditional()->endParents());
|
||||
BOOST_FOREACH(const Index j, *B->conditional()) {
|
||||
|
|
@ -362,7 +351,7 @@ namespace gtsam {
|
|||
boost::tie(p_C1_B, temp_remaining) =
|
||||
FactorGraphType(p_C1_Bred).eliminatePartialMultifrontal(Ordering(C1_minus_B), function);
|
||||
}
|
||||
shared_ptr p_C2_B; {
|
||||
boost::shared_ptr<typename EliminationTraitsType::BayesTreeType> p_C2_B; {
|
||||
std::vector<Index> C2_minus_B; {
|
||||
FastSet<Index> C2_minus_B_set(C2->conditional()->beginParents(), C2->conditional()->endParents());
|
||||
BOOST_FOREACH(const Index j, *B->conditional()) {
|
||||
|
|
@ -436,7 +425,7 @@ namespace gtsam {
|
|||
void BayesTree<CLIQUE>::removePath(sharedClique clique, BayesNetType& bn, Cliques& orphans)
|
||||
{
|
||||
// base case is NULL, if so we do nothing and return empties above
|
||||
if (clique!=NULL) {
|
||||
if (clique) {
|
||||
|
||||
// remove the clique from orphans in case it has been added earlier
|
||||
orphans.remove(clique);
|
||||
|
|
|
|||
|
|
@ -239,9 +239,6 @@ namespace gtsam {
|
|||
/** remove a clique: warning, can result in a forest */
|
||||
void removeClique(sharedClique clique);
|
||||
|
||||
/** add a clique (bottom up) */
|
||||
sharedClique addClique(const sharedConditional& conditional, std::list<sharedClique>& child_cliques);
|
||||
|
||||
/** Fill the nodes index for a subtree */
|
||||
void fillNodesIndex(const sharedClique& subtree);
|
||||
|
||||
|
|
|
|||
|
|
@ -22,6 +22,14 @@
|
|||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class DERIVED, class FACTORGRAPH>
|
||||
void BayesTreeCliqueBase<DERIVED, FACTORGRAPH>::setEliminationResult(
|
||||
const typename FactorGraphType::EliminationResult& eliminationResult)
|
||||
{
|
||||
conditional_ = eliminationResult.first;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class DERIVED, class FACTORGRAPH>
|
||||
bool BayesTreeCliqueBase<DERIVED, FACTORGRAPH>::equals(
|
||||
|
|
|
|||
|
|
@ -80,6 +80,11 @@ namespace gtsam {
|
|||
derived_weak_ptr parent_;
|
||||
std::vector<derived_ptr> children;
|
||||
|
||||
/// Fill the elimination result produced during elimination. Here this just stores the
|
||||
/// conditional and ignores the remaining factor, but this is overridden in ISAM2Clique
|
||||
/// to also cache the remaining factor.
|
||||
void setEliminationResult(const typename FactorGraphType::EliminationResult& eliminationResult);
|
||||
|
||||
/// @name Testable
|
||||
/// @{
|
||||
|
||||
|
|
|
|||
|
|
@ -196,8 +196,8 @@ namespace gtsam {
|
|||
std::pair<boost::shared_ptr<ConditionalType>, boost::shared_ptr<FactorType> > eliminationResult =
|
||||
eliminationFunction(gatheredFactors, Ordering(node->keys));
|
||||
|
||||
// Store conditional in BayesTree clique
|
||||
myData.bayesTreeNode->conditional_ = eliminationResult.first;
|
||||
// Store conditional in BayesTree clique, and in the case of ISAM2Clique also store the remaining factor
|
||||
myData.bayesTreeNode->setEliminationResult(eliminationResult);
|
||||
|
||||
// Store remaining factor in parent's gathered factors
|
||||
if(!eliminationResult.second->empty())
|
||||
|
|
|
|||
|
|
@ -139,10 +139,7 @@ public:
|
|||
template<typename ITERATOR, class FG>
|
||||
void remove(ITERATOR firstFactor, ITERATOR lastFactor, const FG& factors);
|
||||
|
||||
/** Remove unused empty variables at the end of the ordering (in debug mode
|
||||
* verifies they are empty).
|
||||
* @param nToRemove The number of unused variables at the end to remove
|
||||
*/
|
||||
/** Remove unused empty variables (in debug mode verifies they are empty). */
|
||||
template<typename ITERATOR>
|
||||
void removeUnusedVariables(ITERATOR firstKey, ITERATOR lastKey);
|
||||
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@
|
|||
#include <gtsam/base/treeTraversal-inst.h>
|
||||
#include <gtsam/inference/BayesTree-inst.h>
|
||||
#include <gtsam/inference/BayesTreeCliqueBase-inst.h>
|
||||
#include <gtsam/linear/linearAlgorithms-inst.h>
|
||||
#include <gtsam/linear/GaussianBayesTree.h>
|
||||
#include <gtsam/linear/GaussianBayesNet.h>
|
||||
#include <gtsam/linear/VectorValues.h>
|
||||
|
|
@ -33,63 +34,6 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
namespace internal
|
||||
{
|
||||
/* ************************************************************************* */
|
||||
struct OptimizeData {
|
||||
boost::optional<OptimizeData&> parentData;
|
||||
//VectorValues ancestorResults;
|
||||
//VectorValues results;
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
/** Pre-order visitor for back-substitution in a Bayes tree. The visitor function operator()()
|
||||
* optimizes the clique given the solution for the parents, and returns the solution for the
|
||||
* clique's frontal variables. In addition, it adds the solution to a global collected
|
||||
* solution that will finally be returned to the user. The reason we pass the individual
|
||||
* clique solutions between nodes is to avoid log(n) lookups over all variables, they instead
|
||||
* then are only over a node's parent variables. */
|
||||
struct OptimizeClique
|
||||
{
|
||||
VectorValues collectedResult;
|
||||
|
||||
OptimizeData operator()(
|
||||
const GaussianBayesTreeClique::shared_ptr& clique,
|
||||
OptimizeData& parentData)
|
||||
{
|
||||
OptimizeData myData;
|
||||
myData.parentData = parentData;
|
||||
// Take any ancestor results we'll need
|
||||
//BOOST_FOREACH(Key parent, clique->conditional_->parents())
|
||||
// myData.ancestorResults.insert(parent, myData.parentData->ancestorResults[parent]);
|
||||
// Solve and store in our results
|
||||
VectorValues result = clique->conditional()->solve(collectedResult/*myData.ancestorResults*/);
|
||||
collectedResult.insert(result);
|
||||
//myData.ancestorResults.insert(result);
|
||||
return myData;
|
||||
}
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
//OptimizeData OptimizePreVisitor(const GaussianBayesTreeClique::shared_ptr& clique, OptimizeData& parentData)
|
||||
//{
|
||||
// // Create data - holds a pointer to our parent, a copy of parent solution, and our results
|
||||
// OptimizeData myData;
|
||||
// myData.parentData = parentData;
|
||||
// // Take any ancestor results we'll need
|
||||
// BOOST_FOREACH(Key parent, clique->conditional_->parents())
|
||||
// myData.ancestorResults.insert(parent, myData.parentData->ancestorResults[parent]);
|
||||
// // Solve and store in our results
|
||||
// myData.results.insert(clique->conditional()->solve(myData.ancestorResults));
|
||||
// myData.ancestorResults.insert(myData.results);
|
||||
// return myData;
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
//void OptimizePostVisitor(const GaussianBayesTreeClique::shared_ptr& clique, OptimizeData& myData)
|
||||
//{
|
||||
// // Conglomerate our results to the parent
|
||||
// myData.parentData->results.insert(myData.results);
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
double logDeterminant(const GaussianBayesTreeClique::shared_ptr& clique, double& parentSum)
|
||||
{
|
||||
|
|
@ -99,17 +43,6 @@ namespace gtsam {
|
|||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianBayesTree::GaussianBayesTree(const GaussianBayesTree& other) :
|
||||
Base(other) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianBayesTree& GaussianBayesTree::operator=(const GaussianBayesTree& other)
|
||||
{
|
||||
(void) Base::operator=(other);
|
||||
return *this;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
bool GaussianBayesTree::equals(const This& other, double tol) const
|
||||
{
|
||||
|
|
@ -119,14 +52,7 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
VectorValues GaussianBayesTree::optimize() const
|
||||
{
|
||||
gttic(GaussianBayesTree_optimize);
|
||||
//internal::OptimizeData rootData; // Will hold final solution
|
||||
//treeTraversal::DepthFirstForest(*this, rootData, internal::OptimizePreVisitor, internal::OptimizePostVisitor);
|
||||
//return rootData.results;
|
||||
internal::OptimizeData rootData;
|
||||
internal::OptimizeClique preVisitor;
|
||||
treeTraversal::DepthFirstForest(*this, rootData, preVisitor);
|
||||
return preVisitor.collectedResult;
|
||||
return internal::linearAlgorithms::optimizeBayesTree(*this);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
|||
|
|
@ -59,14 +59,6 @@ namespace gtsam {
|
|||
/** Default constructor, creates an empty Bayes tree */
|
||||
GaussianBayesTree() {}
|
||||
|
||||
/** Makes a deep copy of the tree structure, but only pointers to conditionals are
|
||||
* copied, the conditionals and their matrices are not cloned. */
|
||||
GaussianBayesTree(const GaussianBayesTree& other);
|
||||
|
||||
/** Makes a deep copy of the tree structure, but only pointers to conditionals are
|
||||
* copied, the conditionals and their matrices are not cloned. */
|
||||
GaussianBayesTree& operator=(const GaussianBayesTree& other);
|
||||
|
||||
/** Check equality */
|
||||
bool equals(const This& other, double tol = 1e-9) const;
|
||||
|
||||
|
|
|
|||
|
|
@ -35,18 +35,6 @@ namespace gtsam {
|
|||
const GaussianFactorGraph& factorGraph, const Ordering& order) :
|
||||
Base(factorGraph, order) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianEliminationTree::GaussianEliminationTree(
|
||||
const This& other) :
|
||||
Base(other) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianEliminationTree& GaussianEliminationTree::operator=(const This& other)
|
||||
{
|
||||
(void) Base::operator=(other);
|
||||
return *this;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
bool GaussianEliminationTree::equals(const This& other, double tol) const
|
||||
{
|
||||
|
|
|
|||
|
|
@ -51,14 +51,6 @@ namespace gtsam {
|
|||
GaussianEliminationTree(const GaussianFactorGraph& factorGraph,
|
||||
const Ordering& order);
|
||||
|
||||
/** Copy constructor - makes a deep copy of the tree structure, but only pointers to factors are
|
||||
* copied, factors are not cloned. */
|
||||
GaussianEliminationTree(const This& other);
|
||||
|
||||
/** Assignment operator - makes a deep copy of the tree structure, but only pointers to factors are
|
||||
* copied, factors are not cloned. */
|
||||
This& operator=(const This& other);
|
||||
|
||||
/** Test whether the tree is equal to another */
|
||||
bool equals(const This& other, double tol = 1e-9) const;
|
||||
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ namespace gtsam {
|
|||
virtual double error(const VectorValues& c) const = 0; /** 0.5*(A*x-b)'*D*(A*x-b) */
|
||||
|
||||
/** Return the dimension of the variable pointed to by the given key iterator */
|
||||
virtual size_t getDim(const_iterator variable) const = 0;
|
||||
virtual DenseIndex getDim(const_iterator variable) const = 0;
|
||||
|
||||
/**
|
||||
* Return a dense \f$ [ \;A\;b\; ] \in \mathbb{R}^{m \times n+1} \f$
|
||||
|
|
|
|||
|
|
@ -30,15 +30,4 @@ namespace gtsam {
|
|||
const GaussianEliminationTree& eliminationTree) :
|
||||
Base(Base::FromEliminationTree(eliminationTree)) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianJunctionTree::GaussianJunctionTree(const This& other) :
|
||||
Base(other) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianJunctionTree& GaussianJunctionTree::operator=(const This& other)
|
||||
{
|
||||
(void) Base::operator=(other);
|
||||
return *this;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -61,14 +61,6 @@ namespace gtsam {
|
|||
* @return The elimination tree
|
||||
*/
|
||||
GaussianJunctionTree(const GaussianEliminationTree& eliminationTree);
|
||||
|
||||
/** Copy constructor - makes a deep copy of the tree structure, but only pointers to factors are
|
||||
* copied, factors are not cloned. */
|
||||
GaussianJunctionTree(const This& other);
|
||||
|
||||
/** Assignment operator - makes a deep copy of the tree structure, but only pointers to factors are
|
||||
* copied, factors are not cloned. */
|
||||
This& operator=(const This& other);
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -70,7 +70,10 @@ Scatter::Scatter(const GaussianFactorGraph& gfg, boost::optional<const Ordering&
|
|||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, gfg) {
|
||||
if(factor) {
|
||||
for(GaussianFactor::const_iterator variable = factor->begin(); variable != factor->end(); ++variable) {
|
||||
this->insert(make_pair(*variable, SlotEntry(none, factor->getDim(variable))));
|
||||
// TODO: Fix this hack to cope with zero-row Jacobians that come from BayesTreeOrphanWrappers
|
||||
const JacobianFactor* asJacobian = dynamic_cast<const JacobianFactor*>(factor.get());
|
||||
if(!asJacobian || asJacobian->rows() > 0)
|
||||
this->insert(make_pair(*variable, SlotEntry(none, factor->getDim(variable))));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -221,7 +221,7 @@ namespace gtsam {
|
|||
* @param variable An iterator pointing to the slot in this factor. You can
|
||||
* use, for example, begin() + 2 to get the 3rd variable in this factor.
|
||||
*/
|
||||
virtual size_t getDim(const_iterator variable) const { return info_(variable-this->begin(), 0).rows(); }
|
||||
virtual DenseIndex getDim(const_iterator variable) const { return info_(variable-this->begin(), 0).rows(); }
|
||||
|
||||
/** Return the number of columns and rows of the Hessian matrix, including the information vector. */
|
||||
size_t rows() const { return info_.rows(); }
|
||||
|
|
|
|||
|
|
@ -227,7 +227,7 @@ namespace gtsam {
|
|||
/** Return the dimension of the variable pointed to by the given key iterator
|
||||
* todo: Remove this in favor of keeping track of dimensions with variables?
|
||||
*/
|
||||
virtual size_t getDim(const_iterator variable) const { return Ab_(variable - begin()).cols(); }
|
||||
virtual DenseIndex getDim(const_iterator variable) const { return Ab_(variable - begin()).cols(); }
|
||||
|
||||
/**
|
||||
* return the number of rows in the corresponding linear system
|
||||
|
|
|
|||
|
|
@ -53,6 +53,24 @@ namespace gtsam {
|
|||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void VectorValues::update(const VectorValues& values)
|
||||
{
|
||||
iterator hint = begin();
|
||||
BOOST_FOREACH(const KeyValuePair& key_value, values)
|
||||
{
|
||||
// Use this trick to find the value using a hint, since we are inserting from another sorted map
|
||||
size_t oldSize = values_.size();
|
||||
hint = values_.insert(hint, key_value);
|
||||
if(values_.size() > oldSize) {
|
||||
values_.erase(hint);
|
||||
throw std::out_of_range("Requested to update a VectorValues with another VectorValues that contains keys not present in the first.");
|
||||
} else {
|
||||
hint->second = key_value.second;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void VectorValues::insert(const VectorValues& values)
|
||||
{
|
||||
|
|
|
|||
|
|
@ -157,24 +157,31 @@ namespace gtsam {
|
|||
return item->second;
|
||||
}
|
||||
|
||||
/** Read/write access to the vector value with key \c j, throws std::out_of_range if \c j does not exist, identical to at(Key). */
|
||||
/** Read/write access to the vector value with key \c j, throws std::out_of_range if \c j does
|
||||
* not exist, identical to at(Key). */
|
||||
Vector& operator[](Key j) { return at(j); }
|
||||
|
||||
/** Access the vector value with key \c j (const version), throws std::out_of_range if \c j does not exist, identical to at(Key). */
|
||||
/** Access the vector value with key \c j (const version), throws std::out_of_range if \c j does
|
||||
* not exist, identical to at(Key). */
|
||||
const Vector& operator[](Key j) const { return at(j); }
|
||||
|
||||
/** Insert a vector \c value with key \c j. Throws an invalid_argument exception if the key \c j is already used.
|
||||
/** For all key/value pairs in \c values, replace values with corresponding keys in this class
|
||||
* with those in \c values. Throws std::out_of_range if any keys in \c values are not present
|
||||
* in this class. */
|
||||
void update(const VectorValues& values);
|
||||
|
||||
/** Insert a vector \c value with key \c j. Throws an invalid_argument exception if the key \c
|
||||
* j is already used.
|
||||
* @param value The vector to be inserted.
|
||||
* @param j The index with which the value will be associated.
|
||||
*/
|
||||
* @param j The index with which the value will be associated. */
|
||||
void insert(Key j, const Vector& value) {
|
||||
insert(std::pair<Key, const Vector&>(j, value)); // Note only passing a reference to the Vector
|
||||
}
|
||||
|
||||
/** Insert a vector \c value with key \c j. Throws an invalid_argument exception if the key \c j is already used.
|
||||
/** Insert a vector \c value with key \c j. Throws an invalid_argument exception if the key \c
|
||||
* j is already used.
|
||||
* @param value The vector to be inserted.
|
||||
* @param j The index with which the value will be associated.
|
||||
*/
|
||||
* @param j The index with which the value will be associated. */
|
||||
void insert(std::pair<Key, const Vector&> key_value) {
|
||||
// Note that here we accept a pair with a reference to the Vector, but the Vector is copied as
|
||||
// it is inserted into the values_ map.
|
||||
|
|
@ -195,6 +202,12 @@ namespace gtsam {
|
|||
std::pair<iterator, bool> tryInsert(Key j, const Vector& value) {
|
||||
return values_.insert(std::make_pair(j, value)); }
|
||||
|
||||
/** Erase the vector with the given key, or throw std::out_of_range if it does not exist */
|
||||
void erase(Key var) {
|
||||
if(values_.erase(var) == 0)
|
||||
throw std::invalid_argument("Requested variable '" + DefaultKeyFormatter(var) + "', is not in this VectorValues.");
|
||||
}
|
||||
|
||||
/** Set all values to zero vectors. */
|
||||
void setZero();
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,103 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file linearAlgorithms-inst.h
|
||||
* @brief Templated algorithms that are used in multiple places in linear
|
||||
* @author Richard Roberts
|
||||
*/
|
||||
|
||||
#include <gtsam/linear/VectorValues.h>
|
||||
#include <gtsam/base/treeTraversal-inst.h>
|
||||
|
||||
#include <boost/optional.hpp>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
|
||||
namespace gtsam
|
||||
{
|
||||
namespace internal
|
||||
{
|
||||
namespace linearAlgorithms
|
||||
{
|
||||
/* ************************************************************************* */
|
||||
struct OptimizeData {
|
||||
boost::optional<OptimizeData&> parentData;
|
||||
//VectorValues ancestorResults;
|
||||
//VectorValues results;
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
/** Pre-order visitor for back-substitution in a Bayes tree. The visitor function operator()()
|
||||
* optimizes the clique given the solution for the parents, and returns the solution for the
|
||||
* clique's frontal variables. In addition, it adds the solution to a global collected
|
||||
* solution that will finally be returned to the user. The reason we pass the individual
|
||||
* clique solutions between nodes is to avoid log(n) lookups over all variables, they instead
|
||||
* then are only over a node's parent variables. */
|
||||
template<class CLIQUE>
|
||||
struct OptimizeClique
|
||||
{
|
||||
VectorValues collectedResult;
|
||||
|
||||
OptimizeData operator()(
|
||||
const boost::shared_ptr<CLIQUE>& clique,
|
||||
OptimizeData& parentData)
|
||||
{
|
||||
OptimizeData myData;
|
||||
myData.parentData = parentData;
|
||||
// Take any ancestor results we'll need
|
||||
//BOOST_FOREACH(Key parent, clique->conditional_->parents())
|
||||
// myData.ancestorResults.insert(parent, myData.parentData->ancestorResults[parent]);
|
||||
// Solve and store in our results
|
||||
VectorValues result = clique->conditional()->solve(collectedResult/*myData.ancestorResults*/);
|
||||
collectedResult.insert(result);
|
||||
//myData.ancestorResults.insert(result);
|
||||
return myData;
|
||||
}
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
//OptimizeData OptimizePreVisitor(const GaussianBayesTreeClique::shared_ptr& clique, OptimizeData& parentData)
|
||||
//{
|
||||
// // Create data - holds a pointer to our parent, a copy of parent solution, and our results
|
||||
// OptimizeData myData;
|
||||
// myData.parentData = parentData;
|
||||
// // Take any ancestor results we'll need
|
||||
// BOOST_FOREACH(Key parent, clique->conditional_->parents())
|
||||
// myData.ancestorResults.insert(parent, myData.parentData->ancestorResults[parent]);
|
||||
// // Solve and store in our results
|
||||
// myData.results.insert(clique->conditional()->solve(myData.ancestorResults));
|
||||
// myData.ancestorResults.insert(myData.results);
|
||||
// return myData;
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
//void OptimizePostVisitor(const GaussianBayesTreeClique::shared_ptr& clique, OptimizeData& myData)
|
||||
//{
|
||||
// // Conglomerate our results to the parent
|
||||
// myData.parentData->results.insert(myData.results);
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class BAYESTREE>
|
||||
VectorValues optimizeBayesTree(const BAYESTREE& bayesTree)
|
||||
{
|
||||
gttic(linear_optimizeBayesTree);
|
||||
//internal::OptimizeData rootData; // Will hold final solution
|
||||
//treeTraversal::DepthFirstForest(*this, rootData, internal::OptimizePreVisitor, internal::OptimizePostVisitor);
|
||||
//return rootData.results;
|
||||
OptimizeData rootData;
|
||||
OptimizeClique<typename BAYESTREE::Clique> preVisitor;
|
||||
treeTraversal::DepthFirstForest(bayesTree, rootData, preVisitor);
|
||||
return preVisitor.collectedResult;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -64,14 +64,20 @@ void DoglegOptimizer::iterate(void) {
|
|||
|
||||
if ( params_.isMultifrontal() ) {
|
||||
GaussianBayesTree bt = *linear->eliminateMultifrontal(*params_.ordering, params_.getEliminationFunction());
|
||||
result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, bt, graph_, state_.values, state_.error, dlVerbose);
|
||||
VectorValues dx_u = bt.optimizeGradientSearch();
|
||||
VectorValues dx_n = bt.optimize();
|
||||
result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION,
|
||||
dx_u, dx_n, bt, graph_, state_.values, state_.error, dlVerbose);
|
||||
}
|
||||
else if ( params_.isSequential() ) {
|
||||
GaussianBayesNet bn = *linear->eliminateSequential(*params_.ordering, params_.getEliminationFunction());
|
||||
result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, bn, graph_, state_.values, state_.error, dlVerbose);
|
||||
VectorValues dx_u = bn.optimizeGradientSearch();
|
||||
VectorValues dx_n = bn.optimize();
|
||||
result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION,
|
||||
dx_u, dx_n, bn, graph_, state_.values, state_.error, dlVerbose);
|
||||
}
|
||||
else if ( params_.isCG() ) {
|
||||
throw runtime_error("todo: ");
|
||||
throw runtime_error("Dogleg is not currently compatible with the linear conjugate gradient solver");
|
||||
}
|
||||
else {
|
||||
throw runtime_error("Optimization parameter is invalid: DoglegParams::elimination");
|
||||
|
|
|
|||
|
|
@ -100,8 +100,8 @@ struct GTSAM_EXPORT DoglegOptimizerImpl {
|
|||
*/
|
||||
template<class M, class F, class VALUES>
|
||||
static IterationResult Iterate(
|
||||
double Delta, TrustRegionAdaptationMode mode, const M& Rd,
|
||||
const F& f, const VALUES& x0, const double f_error, const bool verbose=false);
|
||||
double Delta, TrustRegionAdaptationMode mode, const VectorValues& dx_u, const VectorValues& dx_n,
|
||||
const M& Rd, const F& f, const VALUES& x0, const double f_error, const bool verbose=false);
|
||||
|
||||
/**
|
||||
* Compute the dogleg point given a trust region radius \f$ \Delta \f$. The
|
||||
|
|
@ -143,16 +143,9 @@ struct GTSAM_EXPORT DoglegOptimizerImpl {
|
|||
/* ************************************************************************* */
|
||||
template<class M, class F, class VALUES>
|
||||
typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate(
|
||||
double Delta, TrustRegionAdaptationMode mode, const M& Rd,
|
||||
const F& f, const VALUES& x0, const double f_error, const bool verbose)
|
||||
double Delta, TrustRegionAdaptationMode mode, const VectorValues& dx_u, const VectorValues& dx_n,
|
||||
const M& Rd, const F& f, const VALUES& x0, const double f_error, const bool verbose)
|
||||
{
|
||||
// Compute steepest descent and Newton's method points
|
||||
gttic(optimizeGradientSearch);
|
||||
VectorValues dx_u = GaussianFactorGraph(Rd).optimizeGradientSearch();
|
||||
gttoc(optimizeGradientSearch);
|
||||
gttic(optimize);
|
||||
VectorValues dx_n = Rd.optimize();
|
||||
gttoc(optimize);
|
||||
gttic(M_error);
|
||||
const double M_error = Rd.error(VectorValues::Zero(dx_u));
|
||||
gttoc(M_error);
|
||||
|
|
|
|||
|
|
@ -16,8 +16,6 @@
|
|||
* @author Richard Roberts
|
||||
*/
|
||||
|
||||
#if 0
|
||||
|
||||
#include <gtsam/nonlinear/ISAM2-impl.h>
|
||||
#include <gtsam/nonlinear/Symbol.h> // for selective linearization thresholds
|
||||
#include <gtsam/base/debug.h>
|
||||
|
|
@ -32,137 +30,60 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
void ISAM2::Impl::AddVariables(
|
||||
const Values& newTheta, Values& theta, VectorValues& delta,
|
||||
VectorValues& deltaNewton, VectorValues& RgProd, vector<bool>& replacedKeys,
|
||||
Ordering& ordering, const KeyFormatter& keyFormatter) {
|
||||
VectorValues& deltaNewton, VectorValues& RgProd,
|
||||
const KeyFormatter& keyFormatter)
|
||||
{
|
||||
const bool debug = ISDEBUG("ISAM2 AddVariables");
|
||||
|
||||
theta.insert(newTheta);
|
||||
if(debug) newTheta.print("The new variables are: ");
|
||||
// Add the new keys onto the ordering, add zeros to the delta for the new variables
|
||||
std::vector<Index> dims(newTheta.dims(*newTheta.orderingArbitrary()));
|
||||
if(debug) cout << "New variables have total dimensionality " << accumulate(dims.begin(), dims.end(), 0) << endl;
|
||||
const size_t originalnVars = delta.size();
|
||||
delta.append(dims);
|
||||
deltaNewton.append(dims);
|
||||
RgProd.append(dims);
|
||||
for(Index j = originalnVars; j < delta.size(); ++j) {
|
||||
delta[j].setZero();
|
||||
deltaNewton[j].setZero();
|
||||
RgProd[j].setZero();
|
||||
}
|
||||
{
|
||||
Index nextVar = originalnVars;
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, newTheta) {
|
||||
ordering.insert(key_value.key, nextVar);
|
||||
if(debug) cout << "Adding variable " << keyFormatter(key_value.key) << " with order " << nextVar << endl;
|
||||
++ nextVar;
|
||||
}
|
||||
assert(ordering.size() == delta.size());
|
||||
}
|
||||
replacedKeys.resize(ordering.size(), false);
|
||||
// Add zeros into the VectorValues
|
||||
delta.insert(newTheta.zeroVectors());
|
||||
deltaNewton.insert(newTheta.zeroVectors());
|
||||
RgProd.insert(newTheta.zeroVectors());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void ISAM2::Impl::RemoveVariables(const FastSet<Key>& unusedKeys, const ISAM2Clique::shared_ptr& root,
|
||||
void ISAM2::Impl::RemoveVariables(const FastSet<Key>& unusedKeys, const std::vector<ISAM2::sharedClique>& roots,
|
||||
Values& theta, VariableIndex& variableIndex,
|
||||
VectorValues& delta, VectorValues& deltaNewton, VectorValues& RgProd,
|
||||
std::vector<bool>& replacedKeys, Ordering& ordering, Base::Nodes& nodes,
|
||||
GaussianFactorGraph& linearFactors, FastSet<Key>& fixedVariables) {
|
||||
|
||||
// Get indices of unused keys
|
||||
vector<Index> unusedIndices; unusedIndices.reserve(unusedKeys.size());
|
||||
BOOST_FOREACH(Key key, unusedKeys) { unusedIndices.push_back(ordering[key]); }
|
||||
|
||||
// Create a permutation that shifts the unused variables to the end
|
||||
Permutation unusedToEnd = Permutation::PushToBack(unusedIndices, delta.size());
|
||||
Permutation unusedToEndInverse = *unusedToEnd.inverse();
|
||||
|
||||
// Use the permutation to remove unused variables while shifting all the others to take up the space
|
||||
variableIndex.permuteInPlace(unusedToEnd);
|
||||
variableIndex.removeUnusedAtEnd(unusedIndices.size());
|
||||
{
|
||||
// Create a vector of variable dimensions with the unused ones removed
|
||||
// by applying the unusedToEnd permutation to the original vector of
|
||||
// variable dimensions. We only allocate space in the shifted dims
|
||||
// vector for the used variables, so that the unused ones are dropped
|
||||
// when the permutation is applied.
|
||||
vector<size_t> originalDims = delta.dims();
|
||||
vector<size_t> dims(delta.size() - unusedIndices.size());
|
||||
unusedToEnd.applyToCollection(dims, originalDims);
|
||||
|
||||
// Copy from the old data structures to new ones, only iterating up to
|
||||
// the number of used variables, and applying the unusedToEnd permutation
|
||||
// in order to remove the unused variables.
|
||||
VectorValues newDelta(dims);
|
||||
VectorValues newDeltaNewton(dims);
|
||||
VectorValues newDeltaGradSearch(dims);
|
||||
std::vector<bool> newReplacedKeys(replacedKeys.size() - unusedIndices.size());
|
||||
Base::Nodes newNodes(replacedKeys.size() - unusedIndices.size());
|
||||
|
||||
for(size_t j = 0; j < dims.size(); ++j) {
|
||||
newDelta[j] = delta[unusedToEnd[j]];
|
||||
newDeltaNewton[j] = deltaNewton[unusedToEnd[j]];
|
||||
newDeltaGradSearch[j] = RgProd[unusedToEnd[j]];
|
||||
newReplacedKeys[j] = replacedKeys[unusedToEnd[j]];
|
||||
newNodes[j] = nodes[unusedToEnd[j]];
|
||||
}
|
||||
|
||||
// Swap the new data structures with the outputs of this function
|
||||
delta.swap(newDelta);
|
||||
deltaNewton.swap(newDeltaNewton);
|
||||
RgProd.swap(newDeltaGradSearch);
|
||||
replacedKeys.swap(newReplacedKeys);
|
||||
nodes.swap(newNodes);
|
||||
}
|
||||
|
||||
// Reorder and remove from ordering, solution, and fixed keys
|
||||
ordering.permuteInPlace(unusedToEnd);
|
||||
BOOST_REVERSE_FOREACH(Key key, unusedKeys) {
|
||||
Ordering::value_type removed = ordering.pop_back();
|
||||
assert(removed.first == key);
|
||||
theta.erase(key);
|
||||
fixedVariables.erase(key);
|
||||
}
|
||||
|
||||
// Finally, permute references to variables
|
||||
if(root)
|
||||
root->permuteWithInverse(unusedToEndInverse);
|
||||
linearFactors.permuteWithInverse(unusedToEndInverse);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
FastSet<Index> ISAM2::Impl::IndicesFromFactors(const Ordering& ordering, const NonlinearFactorGraph& factors) {
|
||||
FastSet<Index> indices;
|
||||
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, factors) {
|
||||
BOOST_FOREACH(Key key, factor->keys()) {
|
||||
indices.insert(ordering[key]);
|
||||
}
|
||||
FastSet<Key>& replacedKeys, Base::Nodes& nodes,
|
||||
FastSet<Key>& fixedVariables)
|
||||
{
|
||||
variableIndex.removeUnusedVariables(unusedKeys.begin(), unusedKeys.end());
|
||||
BOOST_FOREACH(Key key, unusedKeys) {
|
||||
delta.erase(key);
|
||||
deltaNewton.erase(key);
|
||||
RgProd.erase(key);
|
||||
replacedKeys.erase(key);
|
||||
nodes.erase(key);
|
||||
theta.erase(key);
|
||||
fixedVariables.erase(key);
|
||||
}
|
||||
return indices;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
FastSet<Index> ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta, const Ordering& ordering,
|
||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter) {
|
||||
FastSet<Index> ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta,
|
||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold)
|
||||
{
|
||||
FastSet<Index> relinKeys;
|
||||
|
||||
if(relinearizeThreshold.type() == typeid(double)) {
|
||||
double threshold = boost::get<double>(relinearizeThreshold);
|
||||
for(Index var=0; var<delta.size(); ++var) {
|
||||
double maxDelta = delta[var].lpNorm<Eigen::Infinity>();
|
||||
if(maxDelta >= threshold) {
|
||||
relinKeys.insert(var);
|
||||
}
|
||||
if(const double* threshold = boost::get<const double*>(relinearizeThreshold))
|
||||
{
|
||||
BOOST_FOREACH(const VectorValues::KeyValuePair& key_delta, delta) {
|
||||
double maxDelta = key_delta.second.lpNorm<Eigen::Infinity>();
|
||||
if(maxDelta >= *threshold)
|
||||
relinKeys.insert(key_delta.first);
|
||||
}
|
||||
} else if(relinearizeThreshold.type() == typeid(FastMap<char,Vector>)) {
|
||||
const FastMap<char,Vector>& thresholds = boost::get<FastMap<char,Vector> >(relinearizeThreshold);
|
||||
BOOST_FOREACH(const Ordering::value_type& key_index, ordering) {
|
||||
const Vector& threshold = thresholds.find(Symbol(key_index.first).chr())->second;
|
||||
Index j = key_index.second;
|
||||
if(threshold.rows() != delta[j].rows())
|
||||
throw std::invalid_argument("Relinearization threshold vector dimensionality passed into iSAM2 parameters does not match actual variable dimensionality");
|
||||
if((delta[j].array().abs() > threshold.array()).any())
|
||||
relinKeys.insert(j);
|
||||
}
|
||||
else if(const FastMap<char,Vector>* thresholds = boost::get<const FastMap<char,Vector>*>(relinearizeThreshold))
|
||||
{
|
||||
BOOST_FOREACH(const VectorValues::KeyValuePair& key_delta, delta) {
|
||||
const Vector& threshold = thresholds->find(Symbol(key_delta.first).chr())->second;
|
||||
if(threshold.rows() != key_delta.second.rows())
|
||||
throw std::invalid_argument("Relinearization threshold vector dimensionality for '" + std::string(1, Symbol(key_delta.first).chr()) + "' passed into iSAM2 parameters does not match actual variable dimensionality.");
|
||||
if((key_delta.second.array().abs() > threshold.array()).any())
|
||||
relinKeys.insert(key_delta.first);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -170,11 +91,12 @@ FastSet<Index> ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta,
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void CheckRelinearizationRecursiveDouble(FastSet<Index>& relinKeys, double threshold, const VectorValues& delta, const ISAM2Clique::shared_ptr& clique) {
|
||||
|
||||
void CheckRelinearizationRecursiveDouble(FastSet<Index>& relinKeys, double threshold,
|
||||
const VectorValues& delta, const ISAM2Clique::shared_ptr& clique)
|
||||
{
|
||||
// Check the current clique for relinearization
|
||||
bool relinearize = false;
|
||||
BOOST_FOREACH(Index var, clique->conditional()->keys()) {
|
||||
BOOST_FOREACH(Key var, *clique->conditional()) {
|
||||
double maxDelta = delta[var].lpNorm<Eigen::Infinity>();
|
||||
if(maxDelta >= threshold) {
|
||||
relinKeys.insert(var);
|
||||
|
|
@ -184,31 +106,31 @@ void CheckRelinearizationRecursiveDouble(FastSet<Index>& relinKeys, double thres
|
|||
|
||||
// If this node was relinearized, also check its children
|
||||
if(relinearize) {
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children()) {
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||
CheckRelinearizationRecursiveDouble(relinKeys, threshold, delta, child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void CheckRelinearizationRecursiveMap(FastSet<Index>& relinKeys, const FastMap<char,Vector>& thresholds, const VectorValues& delta, const Ordering& ordering, const ISAM2Clique::shared_ptr& clique) {
|
||||
|
||||
void CheckRelinearizationRecursiveMap(FastSet<Index>& relinKeys, const FastMap<char,Vector>& thresholds,
|
||||
const VectorValues& delta,
|
||||
const ISAM2Clique::shared_ptr& clique)
|
||||
{
|
||||
// Check the current clique for relinearization
|
||||
bool relinearize = false;
|
||||
BOOST_FOREACH(Index var, clique->conditional()->keys()) {
|
||||
|
||||
// Lookup the key associated with this index
|
||||
Key key = ordering.key(var);
|
||||
|
||||
BOOST_FOREACH(Key var, *clique->conditional()) {
|
||||
// Find the threshold for this variable type
|
||||
const Vector& threshold = thresholds.find(Symbol(key).chr())->second;
|
||||
const Vector& threshold = thresholds.find(Symbol(var).chr())->second;
|
||||
|
||||
const Vector& deltaVar = delta[var];
|
||||
|
||||
// Verify the threshold vector matches the actual variable size
|
||||
if(threshold.rows() != delta[var].rows())
|
||||
throw std::invalid_argument("Relinearization threshold vector dimensionality passed into iSAM2 parameters does not match actual variable dimensionality");
|
||||
if(threshold.rows() != deltaVar.rows())
|
||||
throw std::invalid_argument("Relinearization threshold vector dimensionality for '" + std::string(1, Symbol(var).chr()) + "' passed into iSAM2 parameters does not match actual variable dimensionality.");
|
||||
|
||||
// Check for relinearization
|
||||
if((delta[var].array().abs() > threshold.array()).any()) {
|
||||
if((deltaVar.array().abs() > threshold.array()).any()) {
|
||||
relinKeys.insert(var);
|
||||
relinearize = true;
|
||||
}
|
||||
|
|
@ -216,52 +138,54 @@ void CheckRelinearizationRecursiveMap(FastSet<Index>& relinKeys, const FastMap<c
|
|||
|
||||
// If this node was relinearized, also check its children
|
||||
if(relinearize) {
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children()) {
|
||||
CheckRelinearizationRecursiveMap(relinKeys, thresholds, delta, ordering, child);
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||
CheckRelinearizationRecursiveMap(relinKeys, thresholds, delta, child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
FastSet<Index> ISAM2::Impl::CheckRelinearizationPartial(const ISAM2Clique::shared_ptr& root, const VectorValues& delta, const Ordering& ordering,
|
||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter) {
|
||||
|
||||
FastSet<Index> ISAM2::Impl::CheckRelinearizationPartial(const std::vector<ISAM2::sharedClique>& roots,
|
||||
const VectorValues& delta,
|
||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold)
|
||||
{
|
||||
FastSet<Index> relinKeys;
|
||||
|
||||
if(root) {
|
||||
if(relinearizeThreshold.type() == typeid(double)) {
|
||||
BOOST_FOREACH(const ISAM2::sharedClique& root, roots) {
|
||||
if(relinearizeThreshold.type() == typeid(double))
|
||||
CheckRelinearizationRecursiveDouble(relinKeys, boost::get<double>(relinearizeThreshold), delta, root);
|
||||
} else if(relinearizeThreshold.type() == typeid(FastMap<char,Vector>)) {
|
||||
CheckRelinearizationRecursiveMap(relinKeys, boost::get<FastMap<char,Vector> >(relinearizeThreshold), delta, ordering, root);
|
||||
}
|
||||
else if(relinearizeThreshold.type() == typeid(FastMap<char,Vector>))
|
||||
CheckRelinearizationRecursiveMap(relinKeys, boost::get<FastMap<char,Vector> >(relinearizeThreshold), delta, root);
|
||||
}
|
||||
|
||||
return relinKeys;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void ISAM2::Impl::FindAll(ISAM2Clique::shared_ptr clique, FastSet<Index>& keys, const vector<bool>& markedMask) {
|
||||
void ISAM2::Impl::FindAll(ISAM2Clique::shared_ptr clique, FastSet<Index>& keys, const FastSet<Key>& markedMask)
|
||||
{
|
||||
static const bool debug = false;
|
||||
// does the separator contain any of the variables?
|
||||
bool found = false;
|
||||
BOOST_FOREACH(const Index& key, (*clique)->parents()) {
|
||||
if (markedMask[key])
|
||||
BOOST_FOREACH(Key key, clique->conditional()->parents()) {
|
||||
if (markedMask.exists(key)) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (found) {
|
||||
// then add this clique
|
||||
keys.insert((*clique)->beginFrontals(), (*clique)->endFrontals());
|
||||
keys.insert(clique->conditional()->beginFrontals(), clique->conditional()->endFrontals());
|
||||
if(debug) clique->print("Key(s) marked in clique ");
|
||||
if(debug) cout << "so marking key " << (*clique)->keys().front() << endl;
|
||||
if(debug) cout << "so marking key " << clique->conditional()->front() << endl;
|
||||
}
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children_) {
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||
FindAll(child, keys, markedMask);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta, const Ordering& ordering,
|
||||
const vector<bool>& mask, boost::optional<VectorValues&> invalidateIfDebug, const KeyFormatter& keyFormatter) {
|
||||
void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta,
|
||||
const FastSet<Key>& mask, boost::optional<VectorValues&> invalidateIfDebug, const KeyFormatter& keyFormatter)
|
||||
{
|
||||
// If debugging, invalidate if requested, otherwise do not invalidate.
|
||||
// Invalidating means setting expmapped entries to Inf, to trigger assertions
|
||||
// if we try to re-use them.
|
||||
|
|
@ -269,23 +193,16 @@ void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta, const
|
|||
invalidateIfDebug = boost::none;
|
||||
#endif
|
||||
|
||||
assert(values.size() == ordering.size());
|
||||
assert(delta.size() == ordering.size());
|
||||
assert(values.size() == delta.size());
|
||||
Values::iterator key_value;
|
||||
Ordering::const_iterator key_index;
|
||||
for(key_value = values.begin(), key_index = ordering.begin();
|
||||
key_value != values.end() && key_index != ordering.end(); ++key_value, ++key_index) {
|
||||
assert(key_value->key == key_index->first);
|
||||
const Index var = key_index->second;
|
||||
if(ISDEBUG("ISAM2 update verbose")) {
|
||||
if(mask[var])
|
||||
cout << "expmap " << keyFormatter(key_value->key) << " (j = " << var << "), delta = " << delta[var].transpose() << endl;
|
||||
else
|
||||
cout << " " << keyFormatter(key_value->key) << " (j = " << var << "), delta = " << delta[var].transpose() << endl;
|
||||
}
|
||||
VectorValues::const_iterator key_delta;
|
||||
for(key_value = values.begin(), key_delta = delta.begin(); key_value != values.end(); ++key_value, ++key_delta)
|
||||
{
|
||||
assert(key_value->key == key_delta->first);
|
||||
Key var = key_value->key;
|
||||
assert(delta[var].size() == (int)key_value->value.dim());
|
||||
assert(delta[var].unaryExpr(ptr_fun(isfinite<double>)).all());
|
||||
if(mask[var]) {
|
||||
if(mask.exists(var)) {
|
||||
Value* retracted = key_value->value.retract_(delta[var]);
|
||||
key_value->value = *retracted;
|
||||
retracted->deallocate_();
|
||||
|
|
@ -295,134 +212,35 @@ void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta, const
|
|||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
ISAM2::Impl::PartialSolveResult
|
||||
ISAM2::Impl::PartialSolve(GaussianFactorGraph& factors,
|
||||
const FastSet<Index>& keys, const ReorderingMode& reorderingMode, bool useQR) {
|
||||
|
||||
const bool debug = ISDEBUG("ISAM2 recalculate");
|
||||
|
||||
PartialSolveResult result;
|
||||
|
||||
gttic(select_affected_variables);
|
||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
||||
// Debug check that all variables involved in the factors to be re-eliminated
|
||||
// are in affectedKeys, since we will use it to select a subset of variables.
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) {
|
||||
BOOST_FOREACH(Index key, factor->keys()) {
|
||||
assert(find(keys.begin(), keys.end(), key) != keys.end());
|
||||
}
|
||||
}
|
||||
#endif
|
||||
Permutation affectedKeysSelector(keys.size()); // Create a permutation that pulls the affected keys to the front
|
||||
Permutation affectedKeysSelectorInverse(keys.size() > 0 ? *keys.rbegin()+1 : 0 /*ordering_.nVars()*/); // And its inverse
|
||||
#ifndef NDEBUG
|
||||
// If debugging, fill with invalid values that will trip asserts if dereferenced
|
||||
std::fill(affectedKeysSelectorInverse.begin(), affectedKeysSelectorInverse.end(), numeric_limits<Index>::max());
|
||||
#endif
|
||||
{ Index position=0; BOOST_FOREACH(Index var, keys) {
|
||||
affectedKeysSelector[position] = var;
|
||||
affectedKeysSelectorInverse[var] = position;
|
||||
++ position; } }
|
||||
if(debug) affectedKeysSelector.print("affectedKeysSelector: ");
|
||||
if(debug) affectedKeysSelectorInverse.print("affectedKeysSelectorInverse: ");
|
||||
factors.permuteWithInverse(affectedKeysSelectorInverse);
|
||||
if(debug) factors.print("Factors to reorder/re-eliminate: ");
|
||||
gttoc(select_affected_variables);
|
||||
gttic(variable_index);
|
||||
VariableIndex affectedFactorsIndex(factors); // Create a variable index for the factors to be re-eliminated
|
||||
if(debug) affectedFactorsIndex.print("affectedFactorsIndex: ");
|
||||
gttoc(variable_index);
|
||||
gttic(ccolamd);
|
||||
vector<int> cmember(affectedKeysSelector.size(), 0);
|
||||
if(reorderingMode.constrain == ReorderingMode::CONSTRAIN_LAST) {
|
||||
assert(reorderingMode.constrainedKeys);
|
||||
if(!reorderingMode.constrainedKeys->empty()) {
|
||||
typedef std::pair<const Index,int> Index_Group;
|
||||
if(keys.size() > reorderingMode.constrainedKeys->size()) { // Only if some variables are unconstrained
|
||||
BOOST_FOREACH(const Index_Group& index_group, *reorderingMode.constrainedKeys) {
|
||||
cmember[affectedKeysSelectorInverse[index_group.first]] = index_group.second; }
|
||||
} else {
|
||||
int minGroup = *boost::range::min_element(boost::adaptors::values(*reorderingMode.constrainedKeys));
|
||||
BOOST_FOREACH(const Index_Group& index_group, *reorderingMode.constrainedKeys) {
|
||||
cmember[affectedKeysSelectorInverse[index_group.first]] = index_group.second - minGroup; }
|
||||
}
|
||||
}
|
||||
}
|
||||
Permutation::shared_ptr affectedColamd(inference::PermutationCOLAMD_(affectedFactorsIndex, cmember));
|
||||
gttoc(ccolamd);
|
||||
gttic(ccolamd_permutations);
|
||||
Permutation::shared_ptr affectedColamdInverse(affectedColamd->inverse());
|
||||
if(debug) affectedColamd->print("affectedColamd: ");
|
||||
if(debug) affectedColamdInverse->print("affectedColamdInverse: ");
|
||||
result.reorderingSelector = affectedKeysSelector;
|
||||
result.reorderingPermutation = *affectedColamd;
|
||||
result.reorderingInverse = internal::Reduction::CreateFromPartialPermutation(affectedKeysSelector, *affectedColamdInverse);
|
||||
gttoc(ccolamd_permutations);
|
||||
|
||||
gttic(permute_affected_variable_index);
|
||||
affectedFactorsIndex.permuteInPlace(*affectedColamd);
|
||||
gttoc(permute_affected_variable_index);
|
||||
|
||||
gttic(permute_affected_factors);
|
||||
factors.permuteWithInverse(*affectedColamdInverse);
|
||||
gttoc(permute_affected_factors);
|
||||
|
||||
if(debug) factors.print("Colamd-ordered affected factors: ");
|
||||
|
||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
||||
VariableIndex fromScratchIndex(factors);
|
||||
assert(assert_equal(fromScratchIndex, affectedFactorsIndex));
|
||||
#endif
|
||||
|
||||
// eliminate into a Bayes net
|
||||
gttic(eliminate);
|
||||
JunctionTree<GaussianFactorGraph, ISAM2::Clique> jt(factors, affectedFactorsIndex);
|
||||
if(!useQR)
|
||||
result.bayesTree = jt.eliminate(EliminatePreferCholesky);
|
||||
else
|
||||
result.bayesTree = jt.eliminate(EliminateQR);
|
||||
gttoc(eliminate);
|
||||
|
||||
gttic(permute_eliminated);
|
||||
if(result.bayesTree) result.bayesTree->permuteWithInverse(affectedKeysSelector);
|
||||
if(debug && result.bayesTree) {
|
||||
cout << "Full var-ordered eliminated BT:\n";
|
||||
result.bayesTree->printTree("");
|
||||
}
|
||||
// Undo permutation on our subset of cached factors, we must later permute *all* of the cached factors
|
||||
factors.permuteWithInverse(*affectedColamd);
|
||||
factors.permuteWithInverse(affectedKeysSelector);
|
||||
gttoc(permute_eliminated);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
namespace internal {
|
||||
inline static void optimizeInPlace(const boost::shared_ptr<ISAM2Clique>& clique, VectorValues& result) {
|
||||
// parents are assumed to already be solved and available in result
|
||||
clique->conditional()->solveInPlace(result);
|
||||
result.update(clique->conditional()->solve(result));
|
||||
|
||||
// starting from the root, call optimize on each conditional
|
||||
BOOST_FOREACH(const boost::shared_ptr<ISAM2Clique>& child, clique->children_)
|
||||
BOOST_FOREACH(const boost::shared_ptr<ISAM2Clique>& child, clique->children)
|
||||
optimizeInPlace(child, result);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
size_t ISAM2::Impl::UpdateDelta(const boost::shared_ptr<ISAM2Clique>& root, std::vector<bool>& replacedKeys, VectorValues& delta, double wildfireThreshold) {
|
||||
size_t ISAM2::Impl::UpdateDelta(const std::vector<ISAM2::sharedClique>& roots, FastSet<Key>& replacedKeys, VectorValues& delta, double wildfireThreshold) {
|
||||
|
||||
size_t lastBacksubVariableCount;
|
||||
|
||||
if (wildfireThreshold <= 0.0) {
|
||||
// Threshold is zero or less, so do a full recalculation
|
||||
internal::optimizeInPlace(root, delta);
|
||||
BOOST_FOREACH(const ISAM2::sharedClique& root, roots)
|
||||
internal::optimizeInPlace(root, delta);
|
||||
lastBacksubVariableCount = delta.size();
|
||||
|
||||
} else {
|
||||
// Optimize with wildfire
|
||||
lastBacksubVariableCount = optimizeWildfireNonRecursive(root, wildfireThreshold, replacedKeys, delta); // modifies delta_
|
||||
lastBacksubVariableCount = 0;
|
||||
BOOST_FOREACH(const ISAM2::sharedClique& root, roots)
|
||||
lastBacksubVariableCount += optimizeWildfireNonRecursive(
|
||||
root, wildfireThreshold, replacedKeys, delta); // modifies delta
|
||||
|
||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
||||
for(size_t j=0; j<delta.size(); ++j)
|
||||
|
|
@ -431,22 +249,22 @@ size_t ISAM2::Impl::UpdateDelta(const boost::shared_ptr<ISAM2Clique>& root, std:
|
|||
}
|
||||
|
||||
// Clear replacedKeys
|
||||
replacedKeys.assign(replacedKeys.size(), false);
|
||||
replacedKeys.clear();
|
||||
|
||||
return lastBacksubVariableCount;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
namespace internal {
|
||||
void updateDoglegDeltas(const boost::shared_ptr<ISAM2Clique>& clique, const std::vector<bool>& replacedKeys,
|
||||
const VectorValues& grad, VectorValues& deltaNewton, VectorValues& RgProd, size_t& varsUpdated) {
|
||||
void updateDoglegDeltas(const boost::shared_ptr<ISAM2Clique>& clique, const FastSet<Key>& replacedKeys,
|
||||
const VectorValues& grad, VectorValues& RgProd, size_t& varsUpdated) {
|
||||
|
||||
// Check if any frontal or separator keys were recalculated, if so, we need
|
||||
// update deltas and recurse to children, but if not, we do not need to
|
||||
// recurse further because of the running separator property.
|
||||
bool anyReplaced = false;
|
||||
BOOST_FOREACH(Index j, *clique->conditional()) {
|
||||
if(replacedKeys[j]) {
|
||||
BOOST_FOREACH(Key j, *clique->conditional()) {
|
||||
if(replacedKeys.exists(j)) {
|
||||
anyReplaced = true;
|
||||
break;
|
||||
}
|
||||
|
|
@ -455,45 +273,51 @@ void updateDoglegDeltas(const boost::shared_ptr<ISAM2Clique>& clique, const std:
|
|||
if(anyReplaced) {
|
||||
// Update the current variable
|
||||
// Get VectorValues slice corresponding to current variables
|
||||
Vector gR = internal::extractVectorValuesSlices(grad, (*clique)->beginFrontals(), (*clique)->endFrontals());
|
||||
Vector gS = internal::extractVectorValuesSlices(grad, (*clique)->beginParents(), (*clique)->endParents());
|
||||
Vector gR = grad.vector(std::vector<Key>(clique->conditional()->beginFrontals(), clique->conditional()->endFrontals()));
|
||||
Vector gS = grad.vector(std::vector<Key>(clique->conditional()->beginParents(), clique->conditional()->endParents()));
|
||||
|
||||
// Compute R*g and S*g for this clique
|
||||
Vector RSgProd = (*clique)->get_R() * gR + (*clique)->get_S() * gS;
|
||||
Vector RSgProd = clique->conditional()->get_R() * gR + clique->conditional()->get_S() * gS;
|
||||
|
||||
// Write into RgProd vector
|
||||
internal::writeVectorValuesSlices(RSgProd, RgProd, (*clique)->beginFrontals(), (*clique)->endFrontals());
|
||||
DenseIndex vectorPosition = 0;
|
||||
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||
Vector& RgProdValue = RgProd[frontal];
|
||||
RgProdValue = RSgProd.segment(vectorPosition, RgProdValue.size());
|
||||
vectorPosition += RgProdValue.size();
|
||||
}
|
||||
|
||||
// Now solve the part of the Newton's method point for this clique (back-substitution)
|
||||
//(*clique)->solveInPlace(deltaNewton);
|
||||
|
||||
varsUpdated += (*clique)->nrFrontals();
|
||||
varsUpdated += clique->conditional()->nrFrontals();
|
||||
|
||||
// Recurse to children
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children()) {
|
||||
updateDoglegDeltas(child, replacedKeys, grad, deltaNewton, RgProd, varsUpdated); }
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||
updateDoglegDeltas(child, replacedKeys, grad, RgProd, varsUpdated); }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
size_t ISAM2::Impl::UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, std::vector<bool>& replacedKeys,
|
||||
size_t ISAM2::Impl::UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, FastSet<Key>& replacedKeys,
|
||||
VectorValues& deltaNewton, VectorValues& RgProd) {
|
||||
|
||||
// Get gradient
|
||||
VectorValues grad = *allocateVectorValues(isam);
|
||||
VectorValues grad;
|
||||
gradientAtZero(isam, grad);
|
||||
|
||||
// Update variables
|
||||
size_t varsUpdated = 0;
|
||||
internal::updateDoglegDeltas(isam.root(), replacedKeys, grad, deltaNewton, RgProd, varsUpdated);
|
||||
optimizeWildfireNonRecursive(isam.root(), wildfireThreshold, replacedKeys, deltaNewton);
|
||||
BOOST_FOREACH(const ISAM2::sharedClique& root, isam.roots())
|
||||
{
|
||||
internal::updateDoglegDeltas(root, replacedKeys, grad, RgProd, varsUpdated);
|
||||
optimizeWildfireNonRecursive(root, wildfireThreshold, replacedKeys, deltaNewton);
|
||||
}
|
||||
|
||||
replacedKeys.assign(replacedKeys.size(), false);
|
||||
replacedKeys.clear();
|
||||
|
||||
return varsUpdated;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -17,8 +17,6 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#if 0
|
||||
|
||||
#include <gtsam/linear/GaussianBayesTree.h>
|
||||
#include <gtsam/nonlinear/ISAM2.h>
|
||||
|
||||
|
|
@ -28,9 +26,6 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
|||
|
||||
struct GTSAM_EXPORT PartialSolveResult {
|
||||
ISAM2::sharedClique bayesTree;
|
||||
Permutation reorderingSelector;
|
||||
Permutation reorderingPermutation;
|
||||
internal::Reduction reorderingInverse;
|
||||
};
|
||||
|
||||
struct GTSAM_EXPORT ReorderingMode {
|
||||
|
|
@ -50,25 +45,16 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
|||
* @param keyFormatter Formatter for printing nonlinear keys during debugging
|
||||
*/
|
||||
static void AddVariables(const Values& newTheta, Values& theta, VectorValues& delta,
|
||||
VectorValues& deltaNewton, VectorValues& RgProd, std::vector<bool>& replacedKeys,
|
||||
Ordering& ordering, const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
VectorValues& deltaNewton, VectorValues& RgProd,
|
||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
|
||||
/**
|
||||
* Remove variables from the ISAM2 system.
|
||||
*/
|
||||
static void RemoveVariables(const FastSet<Key>& unusedKeys, const ISAM2Clique::shared_ptr& root,
|
||||
static void RemoveVariables(const FastSet<Key>& unusedKeys, const std::vector<ISAM2::sharedClique>& roots,
|
||||
Values& theta, VariableIndex& variableIndex, VectorValues& delta, VectorValues& deltaNewton,
|
||||
VectorValues& RgProd, std::vector<bool>& replacedKeys, Ordering& ordering, Base::Nodes& nodes,
|
||||
GaussianFactorGraph& linearFactors, FastSet<Key>& fixedVariables);
|
||||
|
||||
/**
|
||||
* Extract the set of variable indices from a NonlinearFactorGraph. For each Symbol
|
||||
* in each NonlinearFactor, obtains the index by calling ordering[symbol].
|
||||
* @param ordering The current ordering from which to obtain the variable indices
|
||||
* @param factors The factors from which to extract the variables
|
||||
* @return The set of variables indices from the factors
|
||||
*/
|
||||
static FastSet<Index> IndicesFromFactors(const Ordering& ordering, const NonlinearFactorGraph& factors);
|
||||
VectorValues& RgProd, FastSet<Key>& replacedKeys, Base::Nodes& nodes,
|
||||
FastSet<Key>& fixedVariables);
|
||||
|
||||
/**
|
||||
* Find the set of variables to be relinearized according to relinearizeThreshold.
|
||||
|
|
@ -79,8 +65,8 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
|||
* @return The set of variable indices in delta whose magnitude is greater than or
|
||||
* equal to relinearizeThreshold
|
||||
*/
|
||||
static FastSet<Index> CheckRelinearizationFull(const VectorValues& delta, const Ordering& ordering,
|
||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
static FastSet<Index> CheckRelinearizationFull(const VectorValues& delta,
|
||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold);
|
||||
|
||||
/**
|
||||
* Find the set of variables to be relinearized according to relinearizeThreshold.
|
||||
|
|
@ -93,8 +79,8 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
|||
* @return The set of variable indices in delta whose magnitude is greater than or
|
||||
* equal to relinearizeThreshold
|
||||
*/
|
||||
static FastSet<Index> CheckRelinearizationPartial(const ISAM2Clique::shared_ptr& root, const VectorValues& delta, const Ordering& ordering,
|
||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
static FastSet<Index> CheckRelinearizationPartial(const std::vector<ISAM2::sharedClique>& roots,
|
||||
const VectorValues& delta, const ISAM2Params::RelinearizationThreshold& relinearizeThreshold);
|
||||
|
||||
/**
|
||||
* Recursively search this clique and its children for marked keys appearing
|
||||
|
|
@ -111,7 +97,7 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
|||
*
|
||||
* Alternatively could we trace up towards the root for each variable here?
|
||||
*/
|
||||
static void FindAll(ISAM2Clique::shared_ptr clique, FastSet<Index>& keys, const std::vector<bool>& markedMask);
|
||||
static void FindAll(ISAM2Clique::shared_ptr clique, FastSet<Index>& keys, const FastSet<Key>& markedMask);
|
||||
|
||||
/**
|
||||
* Apply expmap to the given values, but only for indices appearing in
|
||||
|
|
@ -127,34 +113,16 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
|||
* @param keyFormatter Formatter for printing nonlinear keys during debugging
|
||||
*/
|
||||
static void ExpmapMasked(Values& values, const VectorValues& delta,
|
||||
const Ordering& ordering, const std::vector<bool>& mask,
|
||||
const FastSet<Key>& mask,
|
||||
boost::optional<VectorValues&> invalidateIfDebug = boost::none,
|
||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
|
||||
/**
|
||||
* Reorder and eliminate factors. These factors form a subset of the full
|
||||
* problem, so along with the BayesTree we get a partial reordering of the
|
||||
* problem that needs to be applied to the other data in ISAM2, which is the
|
||||
* VariableIndex, the delta, the ordering, and the orphans (including cached
|
||||
* factors).
|
||||
* \param factors The factors to eliminate, representing part of the full
|
||||
* problem. This is permuted during use and so is cleared when this function
|
||||
* returns in order to invalidate it.
|
||||
* \param keys The set of indices used in \c factors.
|
||||
* \param useQR Whether to use QR (if true), or Cholesky (if false).
|
||||
* \return The eliminated BayesTree and the permutation to be applied to the
|
||||
* rest of the ISAM2 data.
|
||||
*/
|
||||
static PartialSolveResult PartialSolve(GaussianFactorGraph& factors, const FastSet<Index>& keys,
|
||||
const ReorderingMode& reorderingMode, bool useQR);
|
||||
static size_t UpdateDelta(const std::vector<ISAM2::sharedClique>& roots,
|
||||
FastSet<Key>& replacedKeys, VectorValues& delta, double wildfireThreshold);
|
||||
|
||||
static size_t UpdateDelta(const boost::shared_ptr<ISAM2Clique>& root, std::vector<bool>& replacedKeys, VectorValues& delta, double wildfireThreshold);
|
||||
|
||||
static size_t UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, std::vector<bool>& replacedKeys,
|
||||
static size_t UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, FastSet<Key>& replacedKeys,
|
||||
VectorValues& deltaNewton, VectorValues& RgProd);
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -28,8 +28,7 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
template<class VALUE>
|
||||
VALUE ISAM2::calculateEstimate(Key key) const {
|
||||
const Index index = getOrdering()[key];
|
||||
const Vector& delta = getDelta()[index];
|
||||
const Vector& delta = getDelta()[key];
|
||||
return theta_.at<VALUE>(key).retract(delta);
|
||||
}
|
||||
|
||||
|
|
@ -37,24 +36,25 @@ VALUE ISAM2::calculateEstimate(Key key) const {
|
|||
namespace internal {
|
||||
template<class CLIQUE>
|
||||
void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
||||
std::vector<bool>& changed, const std::vector<bool>& replaced, VectorValues& delta, int& count) {
|
||||
FastSet<Key>& changed, const FastSet<Key>& replaced, VectorValues& delta, size_t& count)
|
||||
{
|
||||
// if none of the variables in this clique (frontal and separator!) changed
|
||||
// significantly, then by the running intersection property, none of the
|
||||
// cliques in the children need to be processed
|
||||
|
||||
// Are any clique variables part of the tree that has been redone?
|
||||
bool cliqueReplaced = replaced[(*clique)->frontals().front()];
|
||||
bool cliqueReplaced = replaced.exists((*clique)->frontals().front());
|
||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
||||
BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
|
||||
assert(cliqueReplaced == replaced[frontal]);
|
||||
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||
assert(cliqueReplaced == replaced.exists(frontal));
|
||||
}
|
||||
#endif
|
||||
|
||||
// If not redone, then has one of the separator variables changed significantly?
|
||||
bool recalculate = cliqueReplaced;
|
||||
if(!recalculate) {
|
||||
BOOST_FOREACH(Index parent, (*clique)->parents()) {
|
||||
if(changed[parent]) {
|
||||
BOOST_FOREACH(Key parent, clique->conditional()->parents()) {
|
||||
if(changed.exists(parent)) {
|
||||
recalculate = true;
|
||||
break;
|
||||
}
|
||||
|
|
@ -66,22 +66,22 @@ void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
|||
if(recalculate) {
|
||||
|
||||
// Temporary copy of the original values, to check how much they change
|
||||
std::vector<Vector> originalValues((*clique)->nrFrontals());
|
||||
std::vector<Vector> originalValues(clique->conditional()->nrFrontals());
|
||||
GaussianConditional::const_iterator it;
|
||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
||||
originalValues[it - (*clique)->beginFrontals()] = delta[*it];
|
||||
for(it = clique->conditional()->beginFrontals(); it!=clique->conditional()->endFrontals(); it++) {
|
||||
originalValues[it - clique->conditional()->beginFrontals()] = delta[*it];
|
||||
}
|
||||
|
||||
// Back-substitute
|
||||
(*clique)->solveInPlace(delta);
|
||||
count += (*clique)->nrFrontals();
|
||||
delta.update(clique->conditional()->solve(delta));
|
||||
count += clique->conditional()->nrFrontals();
|
||||
|
||||
// Whether the values changed above a threshold, or always true if the
|
||||
// clique was replaced.
|
||||
bool valuesChanged = cliqueReplaced;
|
||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
||||
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||
if(!valuesChanged) {
|
||||
const Vector& oldValue(originalValues[it - (*clique)->beginFrontals()]);
|
||||
const Vector& oldValue(originalValues[it - clique->conditional()->beginFrontals()]);
|
||||
const Vector& newValue(delta[*it]);
|
||||
if((oldValue - newValue).lpNorm<Eigen::Infinity>() >= threshold) {
|
||||
valuesChanged = true;
|
||||
|
|
@ -94,18 +94,18 @@ void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
|||
// If the values were above the threshold or this clique was replaced
|
||||
if(valuesChanged) {
|
||||
// Set changed flag for each frontal variable and leave the new values
|
||||
BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
|
||||
changed[frontal] = true;
|
||||
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||
changed.insert(frontal);
|
||||
}
|
||||
} else {
|
||||
// Replace with the old values
|
||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
||||
delta[*it] = originalValues[it - (*clique)->beginFrontals()];
|
||||
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||
delta[*it] = originalValues[it - clique->conditional()->beginFrontals()];
|
||||
}
|
||||
}
|
||||
|
||||
// Recurse to children
|
||||
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) {
|
||||
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children) {
|
||||
optimizeWildfire(child, threshold, changed, replaced, delta, count);
|
||||
}
|
||||
}
|
||||
|
|
@ -113,24 +113,25 @@ void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
|||
|
||||
template<class CLIQUE>
|
||||
bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
||||
std::vector<bool>& changed, const std::vector<bool>& replaced, VectorValues& delta, int& count) {
|
||||
FastSet<Key>& changed, const FastSet<Key>& replaced, VectorValues& delta, size_t& count)
|
||||
{
|
||||
// if none of the variables in this clique (frontal and separator!) changed
|
||||
// significantly, then by the running intersection property, none of the
|
||||
// cliques in the children need to be processed
|
||||
|
||||
// Are any clique variables part of the tree that has been redone?
|
||||
bool cliqueReplaced = replaced[(*clique)->frontals().front()];
|
||||
bool cliqueReplaced = replaced.exists(clique->conditional()->frontals().front());
|
||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
||||
BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
|
||||
assert(cliqueReplaced == replaced[frontal]);
|
||||
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||
assert(cliqueReplaced == replaced.exists(frontal));
|
||||
}
|
||||
#endif
|
||||
|
||||
// If not redone, then has one of the separator variables changed significantly?
|
||||
bool recalculate = cliqueReplaced;
|
||||
if(!recalculate) {
|
||||
BOOST_FOREACH(Index parent, (*clique)->parents()) {
|
||||
if(changed[parent]) {
|
||||
BOOST_FOREACH(Key parent, clique->conditional()->parents()) {
|
||||
if(changed.exists(parent)) {
|
||||
recalculate = true;
|
||||
break;
|
||||
}
|
||||
|
|
@ -139,25 +140,25 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
|
|||
|
||||
// Solve clique if it was replaced, or if any parents were changed above the
|
||||
// threshold or themselves replaced.
|
||||
if(recalculate) {
|
||||
|
||||
if(recalculate)
|
||||
{
|
||||
// Temporary copy of the original values, to check how much they change
|
||||
std::vector<Vector> originalValues((*clique)->nrFrontals());
|
||||
std::vector<Vector> originalValues(clique->conditional()->nrFrontals());
|
||||
GaussianConditional::const_iterator it;
|
||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
||||
originalValues[it - (*clique)->beginFrontals()] = delta[*it];
|
||||
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||
originalValues[it - clique->conditional()->beginFrontals()] = delta[*it];
|
||||
}
|
||||
|
||||
// Back-substitute
|
||||
(*clique)->solveInPlace(delta);
|
||||
count += (*clique)->nrFrontals();
|
||||
delta.update(clique->conditional()->solve(delta));
|
||||
count += clique->conditional()->nrFrontals();
|
||||
|
||||
// Whether the values changed above a threshold, or always true if the
|
||||
// clique was replaced.
|
||||
bool valuesChanged = cliqueReplaced;
|
||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
||||
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||
if(!valuesChanged) {
|
||||
const Vector& oldValue(originalValues[it - (*clique)->beginFrontals()]);
|
||||
const Vector& oldValue(originalValues[it - clique->conditional()->beginFrontals()]);
|
||||
const Vector& newValue(delta[*it]);
|
||||
if((oldValue - newValue).lpNorm<Eigen::Infinity>() >= threshold) {
|
||||
valuesChanged = true;
|
||||
|
|
@ -170,16 +171,15 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
|
|||
// If the values were above the threshold or this clique was replaced
|
||||
if(valuesChanged) {
|
||||
// Set changed flag for each frontal variable and leave the new values
|
||||
BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
|
||||
changed[frontal] = true;
|
||||
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||
changed.insert(frontal);
|
||||
}
|
||||
} else {
|
||||
// Replace with the old values
|
||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
||||
delta[*it] = originalValues[it - (*clique)->beginFrontals()];
|
||||
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||
delta[*it] = originalValues[it - clique->conditional()->beginFrontals()];
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return recalculate;
|
||||
|
|
@ -189,8 +189,8 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
|
|||
|
||||
/* ************************************************************************* */
|
||||
template<class CLIQUE>
|
||||
int optimizeWildfire(const boost::shared_ptr<CLIQUE>& root, double threshold, const std::vector<bool>& keys, VectorValues& delta) {
|
||||
std::vector<bool> changed(keys.size(), false);
|
||||
size_t optimizeWildfire(const boost::shared_ptr<CLIQUE>& root, double threshold, const FastSet<Key>& keys, VectorValues& delta) {
|
||||
FastSet<Key> changed;
|
||||
int count = 0;
|
||||
// starting from the root, call optimize on each conditional
|
||||
if(root)
|
||||
|
|
@ -200,9 +200,10 @@ int optimizeWildfire(const boost::shared_ptr<CLIQUE>& root, double threshold, co
|
|||
|
||||
/* ************************************************************************* */
|
||||
template<class CLIQUE>
|
||||
int optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root, double threshold, const std::vector<bool>& keys, VectorValues& delta) {
|
||||
std::vector<bool> changed(keys.size(), false);
|
||||
int count = 0;
|
||||
size_t optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root, double threshold, const FastSet<Key>& keys, VectorValues& delta)
|
||||
{
|
||||
FastSet<Key> changed;
|
||||
size_t count = 0;
|
||||
|
||||
if (root) {
|
||||
std::stack<boost::shared_ptr<CLIQUE> > travStack;
|
||||
|
|
@ -213,7 +214,7 @@ int optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root, double t
|
|||
travStack.pop();
|
||||
bool recalculate = internal::optimizeWildfireNode(currentNode, threshold, changed, keys, delta, count);
|
||||
if (recalculate) {
|
||||
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, currentNode->children_) {
|
||||
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, currentNode->children) {
|
||||
travStack.push(child);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#if 0
|
||||
|
||||
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
||||
#include <gtsam/nonlinear/DoglegOptimizerImpl.h>
|
||||
#include <gtsam/linear/GaussianBayesTree.h>
|
||||
|
|
@ -234,6 +232,12 @@ struct GTSAM_EXPORT ISAM2Params {
|
|||
|
||||
Factorization factorizationTranslator(const std::string& str) const;
|
||||
std::string factorizationTranslator(const Factorization& value) const;
|
||||
|
||||
GaussianFactorGraph::Eliminate getEliminationFunction() const {
|
||||
return factorization == CHOLESKY
|
||||
? (GaussianFactorGraph::Eliminate)EliminatePreferCholesky
|
||||
: (GaussianFactorGraph::Eliminate)EliminateQR;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
|
@ -344,10 +348,11 @@ struct GTSAM_EXPORT ISAM2Result {
|
|||
* Specialized Clique structure for ISAM2, incorporating caching and gradient contribution
|
||||
* TODO: more documentation
|
||||
*/
|
||||
class GTSAM_EXPORT ISAM2Clique : public BayesTreeCliqueBase<ISAM2Clique, GaussianConditional> {
|
||||
class GTSAM_EXPORT ISAM2Clique : public BayesTreeCliqueBase<ISAM2Clique, GaussianFactorGraph>
|
||||
{
|
||||
public:
|
||||
typedef ISAM2Clique This;
|
||||
typedef BayesTreeCliqueBase<This,GaussianConditional> Base;
|
||||
typedef BayesTreeCliqueBase<This, GaussianFactorGraph> Base;
|
||||
typedef boost::shared_ptr<This> shared_ptr;
|
||||
typedef boost::weak_ptr<This> weak_ptr;
|
||||
typedef GaussianConditional ConditionalType;
|
||||
|
|
@ -356,29 +361,8 @@ public:
|
|||
Base::FactorType::shared_ptr cachedFactor_;
|
||||
Vector gradientContribution_;
|
||||
|
||||
/** Construct from a conditional */
|
||||
ISAM2Clique(const sharedConditional& conditional) : Base(conditional) {
|
||||
throw std::runtime_error("ISAM2Clique should always be constructed with the elimination result constructor"); }
|
||||
|
||||
/** Construct from an elimination result */
|
||||
ISAM2Clique(const std::pair<sharedConditional, boost::shared_ptr<ConditionalType::FactorType> >& result) :
|
||||
Base(result.first), cachedFactor_(result.second),
|
||||
gradientContribution_(result.first->get_R().cols() + result.first->get_S().cols()) {
|
||||
// Compute gradient contribution
|
||||
const ConditionalType& conditional(*result.first);
|
||||
// Rewrite -(R * P')'*d as -(d' * R * P')' for computational speed reasons
|
||||
gradientContribution_ << -conditional.get_R().transpose() * conditional.get_d(),
|
||||
-conditional.get_S().transpose() * conditional.get_d();
|
||||
}
|
||||
|
||||
/** Produce a deep copy, copying the cached factor and gradient contribution */
|
||||
shared_ptr clone() const {
|
||||
shared_ptr copy(new ISAM2Clique(std::make_pair(
|
||||
sharedConditional(new ConditionalType(*Base::conditional_)),
|
||||
cachedFactor_ ? cachedFactor_->clone() : Base::FactorType::shared_ptr())));
|
||||
copy->gradientContribution_ = gradientContribution_;
|
||||
return copy;
|
||||
}
|
||||
/// Overridden to also store the remaining factor and gradient contribution
|
||||
void setEliminationResult(const FactorGraphType::EliminationResult& eliminationResult);
|
||||
|
||||
/** Access the cached factor */
|
||||
Base::FactorType::shared_ptr& cachedFactor() { return cachedFactor_; }
|
||||
|
|
@ -386,35 +370,10 @@ public:
|
|||
/** Access the gradient contribution */
|
||||
const Vector& gradientContribution() const { return gradientContribution_; }
|
||||
|
||||
bool equals(const This& other, double tol=1e-9) const {
|
||||
return Base::equals(other) &&
|
||||
((!cachedFactor_ && !other.cachedFactor_)
|
||||
|| (cachedFactor_ && other.cachedFactor_
|
||||
&& cachedFactor_->equals(*other.cachedFactor_, tol)));
|
||||
}
|
||||
bool equals(const This& other, double tol=1e-9) const;
|
||||
|
||||
/** print this node */
|
||||
void print(const std::string& s = "",
|
||||
const IndexFormatter& formatter = DefaultIndexFormatter) const {
|
||||
Base::print(s,formatter);
|
||||
if(cachedFactor_)
|
||||
cachedFactor_->print(s + "Cached: ", formatter);
|
||||
else
|
||||
std::cout << s << "Cached empty" << std::endl;
|
||||
if(gradientContribution_.rows() != 0)
|
||||
gtsam::print(gradientContribution_, "Gradient contribution: ");
|
||||
}
|
||||
|
||||
void permuteWithInverse(const Permutation& inversePermutation) {
|
||||
if(cachedFactor_) cachedFactor_->permuteWithInverse(inversePermutation);
|
||||
Base::permuteWithInverse(inversePermutation);
|
||||
}
|
||||
|
||||
bool reduceSeparatorWithInverse(const internal::Reduction& inverseReduction) {
|
||||
bool changed = Base::reduceSeparatorWithInverse(inverseReduction);
|
||||
if(changed) if(cachedFactor_) cachedFactor_->reduceWithInverse(inverseReduction);
|
||||
return changed;
|
||||
}
|
||||
void print(const std::string& s = "", const KeyFormatter& formatter = DefaultKeyFormatter) const;
|
||||
|
||||
private:
|
||||
|
||||
|
|
@ -438,7 +397,7 @@ private:
|
|||
* estimate of all variables.
|
||||
*
|
||||
*/
|
||||
class ISAM2: public BayesTree<GaussianConditional, ISAM2Clique> {
|
||||
class GTSAM_EXPORT ISAM2: public BayesTree<ISAM2Clique> {
|
||||
|
||||
protected:
|
||||
|
||||
|
|
@ -481,7 +440,7 @@ protected:
|
|||
* This is \c mutable because it is used internally to not update delta_
|
||||
* until it is needed.
|
||||
*/
|
||||
mutable std::vector<bool> deltaReplacedMask_;
|
||||
mutable FastSet<Key> deltaReplacedMask_; // TODO: Make sure accessed in the right way
|
||||
|
||||
/** All original nonlinear factors are stored here to use during relinearization */
|
||||
NonlinearFactorGraph nonlinearFactors_;
|
||||
|
|
@ -489,10 +448,6 @@ protected:
|
|||
/** The current linear factors, which are only updated as needed */
|
||||
mutable GaussianFactorGraph linearFactors_;
|
||||
|
||||
/** The current elimination ordering Symbols to Index (integer) keys.
|
||||
* We keep it up to date as we add and reorder variables. */
|
||||
Ordering ordering_;
|
||||
|
||||
/** The current parameters */
|
||||
ISAM2Params params_;
|
||||
|
||||
|
|
@ -506,24 +461,20 @@ protected:
|
|||
public:
|
||||
|
||||
typedef ISAM2 This; ///< This class
|
||||
typedef BayesTree<GaussianConditional,ISAM2Clique> Base; ///< The BayesTree base class
|
||||
|
||||
/** Create an empty ISAM2 instance */
|
||||
GTSAM_EXPORT ISAM2(const ISAM2Params& params);
|
||||
|
||||
/** Create an empty ISAM2 instance using the default set of parameters (see ISAM2Params) */
|
||||
GTSAM_EXPORT ISAM2();
|
||||
|
||||
/** Copy constructor */
|
||||
GTSAM_EXPORT ISAM2(const ISAM2& other);
|
||||
|
||||
/** Assignment operator */
|
||||
GTSAM_EXPORT ISAM2& operator=(const ISAM2& rhs);
|
||||
|
||||
typedef BayesTree<ISAM2Clique> Base; ///< The BayesTree base class
|
||||
typedef Base::Clique Clique; ///< A clique
|
||||
typedef Base::sharedClique sharedClique; ///< Shared pointer to a clique
|
||||
typedef Base::Cliques Cliques; ///< List of Clique typedef from base class
|
||||
|
||||
/** Create an empty ISAM2 instance */
|
||||
ISAM2(const ISAM2Params& params);
|
||||
|
||||
/** Create an empty ISAM2 instance using the default set of parameters (see ISAM2Params) */
|
||||
ISAM2();
|
||||
|
||||
/** Compare equality */
|
||||
bool equals(const ISAM2& other, double tol = 1e-9) const;
|
||||
|
||||
/**
|
||||
* Add new factors, updating the solution and relinearizing as needed.
|
||||
*
|
||||
|
|
@ -552,7 +503,7 @@ public:
|
|||
* of the size of the linear delta. This allows the provided keys to be reordered.
|
||||
* @return An ISAM2Result struct containing information about the update
|
||||
*/
|
||||
GTSAM_EXPORT ISAM2Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values(),
|
||||
ISAM2Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values(),
|
||||
const FastVector<size_t>& removeFactorIndices = FastVector<size_t>(),
|
||||
const boost::optional<FastMap<Key,int> >& constrainedKeys = boost::none,
|
||||
const boost::optional<FastList<Key> >& noRelinKeys = boost::none,
|
||||
|
|
@ -567,16 +518,24 @@ public:
|
|||
* fixed variables will include any involved with the marginalized variables
|
||||
* in the original factors, and possibly additional ones due to fill-in.
|
||||
*/
|
||||
GTSAM_EXPORT void marginalizeLeaves(const FastList<Key>& leafKeys);
|
||||
void marginalizeLeaves(const FastList<Key>& leafKeys);
|
||||
|
||||
/** Access the current linearization point */
|
||||
const Values& getLinearizationPoint() const { return theta_; }
|
||||
|
||||
/// Compute the current solution. This is the "standard" function for computing the solution that
|
||||
/// uses:
|
||||
/// - Partial relinearization and backsubstitution using the thresholds provided in ISAM2Params.
|
||||
/// - Dogleg trust-region step, if enabled in ISAM2Params.
|
||||
/// - Equivalent to getLinearizationPoint().retract(getDelta())
|
||||
/// The solution returned is in general not the same as that returned by getLinearizationPoint().
|
||||
Values optimize() const;
|
||||
|
||||
/** Compute an estimate from the incomplete linear delta computed during the last update.
|
||||
* This delta is incomplete because it was not updated below wildfire_threshold. If only
|
||||
* a single variable is needed, it is faster to call calculateEstimate(const KEY&).
|
||||
*/
|
||||
GTSAM_EXPORT Values calculateEstimate() const;
|
||||
Values calculateEstimate() const;
|
||||
|
||||
/** Compute an estimate for a single variable using its incomplete linear delta computed
|
||||
* during the last update. This is faster than calling the no-argument version of
|
||||
|
|
@ -594,10 +553,10 @@ public:
|
|||
* @param key
|
||||
* @return
|
||||
*/
|
||||
GTSAM_EXPORT const Value& calculateEstimate(Key key) const;
|
||||
const Value& calculateEstimate(Key key) const;
|
||||
|
||||
/** Return marginal on any variable as a covariance matrix */
|
||||
GTSAM_EXPORT Matrix marginalCovariance(Index key) const;
|
||||
Matrix marginalCovariance(Index key) const;
|
||||
|
||||
/// @name Public members for non-typical usage
|
||||
/// @{
|
||||
|
|
@ -607,19 +566,19 @@ public:
|
|||
|
||||
/** Compute an estimate using a complete delta computed by a full back-substitution.
|
||||
*/
|
||||
GTSAM_EXPORT Values calculateBestEstimate() const;
|
||||
Values calculateBestEstimate() const;
|
||||
|
||||
/** Access the current delta, computed during the last call to update */
|
||||
GTSAM_EXPORT const VectorValues& getDelta() const;
|
||||
const VectorValues& getDelta() const;
|
||||
|
||||
/** Compute the linear error */
|
||||
double error(const VectorValues& x) const;
|
||||
|
||||
/** Access the set of nonlinear factors */
|
||||
GTSAM_EXPORT const NonlinearFactorGraph& getFactorsUnsafe() const { return nonlinearFactors_; }
|
||||
|
||||
/** Access the current ordering */
|
||||
GTSAM_EXPORT const Ordering& getOrdering() const { return ordering_; }
|
||||
const NonlinearFactorGraph& getFactorsUnsafe() const { return nonlinearFactors_; }
|
||||
|
||||
/** Access the nonlinear variable index */
|
||||
GTSAM_EXPORT const VariableIndex& getVariableIndex() const { return variableIndex_; }
|
||||
const VariableIndex& getVariableIndex() const { return variableIndex_; }
|
||||
|
||||
size_t lastAffectedVariableCount;
|
||||
size_t lastAffectedFactorCount;
|
||||
|
|
@ -628,26 +587,26 @@ public:
|
|||
mutable size_t lastBacksubVariableCount;
|
||||
size_t lastNnzTop;
|
||||
|
||||
GTSAM_EXPORT const ISAM2Params& params() const { return params_; }
|
||||
const ISAM2Params& params() const { return params_; }
|
||||
|
||||
/** prints out clique statistics */
|
||||
GTSAM_EXPORT void printStats() const { getCliqueData().getStats().print(); }
|
||||
void printStats() const { getCliqueData().getStats().print(); }
|
||||
|
||||
/// @}
|
||||
|
||||
private:
|
||||
|
||||
GTSAM_EXPORT FastList<size_t> getAffectedFactors(const FastList<Index>& keys) const;
|
||||
GTSAM_EXPORT FactorGraph<GaussianFactor>::shared_ptr relinearizeAffectedFactors(const FastList<Index>& affectedKeys, const FastSet<Index>& relinKeys) const;
|
||||
GTSAM_EXPORT GaussianFactorGraph getCachedBoundaryFactors(Cliques& orphans);
|
||||
FastList<size_t> getAffectedFactors(const FastList<Key>& keys) const;
|
||||
GaussianFactorGraph::shared_ptr relinearizeAffectedFactors(const FastList<Key>& affectedKeys, const FastSet<Key>& relinKeys) const;
|
||||
GaussianFactorGraph getCachedBoundaryFactors(Cliques& orphans);
|
||||
|
||||
GTSAM_EXPORT boost::shared_ptr<FastSet<Index> > recalculate(const FastSet<Index>& markedKeys, const FastSet<Index>& relinKeys,
|
||||
const FastVector<Index>& observedKeys, const FastSet<Index>& unusedIndices, const boost::optional<FastMap<Index,int> >& constrainKeys, ISAM2Result& result);
|
||||
boost::shared_ptr<FastSet<Key> > recalculate(const FastSet<Key>& markedKeys, const FastSet<Key>& relinKeys,
|
||||
const FastVector<Key>& observedKeys, const FastSet<Key>& unusedIndices, const boost::optional<FastMap<Key,int> >& constrainKeys, ISAM2Result& result);
|
||||
// void linear_update(const GaussianFactorGraph& newFactors);
|
||||
GTSAM_EXPORT void updateDelta(bool forceFullSolve = false) const;
|
||||
void updateDelta(bool forceFullSolve = false) const;
|
||||
|
||||
GTSAM_EXPORT friend void optimizeInPlace(const ISAM2&, VectorValues&);
|
||||
GTSAM_EXPORT friend void optimizeGradientSearchInPlace(const ISAM2&, VectorValues&);
|
||||
friend GTSAM_EXPORT void optimizeInPlace(const ISAM2&, VectorValues&);
|
||||
friend GTSAM_EXPORT void optimizeGradientSearchInPlace(const ISAM2&, VectorValues&);
|
||||
|
||||
}; // ISAM2
|
||||
|
||||
|
|
@ -669,12 +628,12 @@ GTSAM_EXPORT void optimizeInPlace(const ISAM2& isam, VectorValues& delta);
|
|||
/// variables are contained in the subtree.
|
||||
/// @return The number of variables that were solved for
|
||||
template<class CLIQUE>
|
||||
int optimizeWildfire(const boost::shared_ptr<CLIQUE>& root,
|
||||
double threshold, const std::vector<bool>& replaced, VectorValues& delta);
|
||||
size_t optimizeWildfire(const boost::shared_ptr<CLIQUE>& root,
|
||||
double threshold, const FastSet<Key>& replaced, VectorValues& delta);
|
||||
|
||||
template<class CLIQUE>
|
||||
int optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root,
|
||||
double threshold, const std::vector<bool>& replaced, VectorValues& delta);
|
||||
size_t optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root,
|
||||
double threshold, const FastSet<Key>& replaced, VectorValues& delta);
|
||||
|
||||
/**
|
||||
* Optimize along the gradient direction, with a closed-form computation to
|
||||
|
|
@ -740,5 +699,3 @@ GTSAM_EXPORT void gradientAtZero(const ISAM2& bayesTree, VectorValues& g);
|
|||
|
||||
#include <gtsam/nonlinear/ISAM2-inl.h>
|
||||
#include <gtsam/nonlinear/ISAM2-impl.h>
|
||||
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -31,17 +31,6 @@ namespace gtsam {
|
|||
template class BayesTreeCliqueBase<SymbolicBayesTreeClique, SymbolicFactorGraph>;
|
||||
template class BayesTree<SymbolicBayesTreeClique>;
|
||||
|
||||
/* ************************************************************************* */
|
||||
SymbolicBayesTree::SymbolicBayesTree(const SymbolicBayesTree& other) :
|
||||
Base(other) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
SymbolicBayesTree& SymbolicBayesTree::operator=(const SymbolicBayesTree& other)
|
||||
{
|
||||
(void) Base::operator=(other);
|
||||
return *this;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */\
|
||||
bool SymbolicBayesTree::equals(const This& other, double tol /* = 1e-9 */) const
|
||||
{
|
||||
|
|
|
|||
|
|
@ -58,14 +58,6 @@ namespace gtsam {
|
|||
/** Default constructor, creates an empty Bayes tree */
|
||||
SymbolicBayesTree() {}
|
||||
|
||||
/** Makes a deep copy of the tree structure, but only pointers to conditionals are
|
||||
* copied, the conditionals and their matrices are not cloned. */
|
||||
SymbolicBayesTree(const SymbolicBayesTree& other);
|
||||
|
||||
/** Makes a deep copy of the tree structure, but only pointers to conditionals are
|
||||
* copied, the conditionals and their matrices are not cloned. */
|
||||
SymbolicBayesTree& operator=(const SymbolicBayesTree& other);
|
||||
|
||||
/** check equality */
|
||||
bool equals(const This& other, double tol = 1e-9) const;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -35,18 +35,6 @@ namespace gtsam {
|
|||
const SymbolicFactorGraph& factorGraph, const Ordering& order) :
|
||||
Base(factorGraph, order) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
SymbolicEliminationTree::SymbolicEliminationTree(
|
||||
const This& other) :
|
||||
Base(other) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
SymbolicEliminationTree& SymbolicEliminationTree::operator=(const This& other)
|
||||
{
|
||||
(void) Base::operator=(other);
|
||||
return *this;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
bool SymbolicEliminationTree::equals(const This& other, double tol) const
|
||||
{
|
||||
|
|
|
|||
|
|
@ -47,14 +47,6 @@ namespace gtsam {
|
|||
SymbolicEliminationTree(const SymbolicFactorGraph& factorGraph,
|
||||
const Ordering& order);
|
||||
|
||||
/** Copy constructor - makes a deep copy of the tree structure, but only pointers to factors are
|
||||
* copied, factors are not cloned. */
|
||||
SymbolicEliminationTree(const This& other);
|
||||
|
||||
/** Assignment operator - makes a deep copy of the tree structure, but only pointers to factors are
|
||||
* copied, factors are not cloned. */
|
||||
This& operator=(const This& other);
|
||||
|
||||
/** Test whether the tree is equal to another */
|
||||
bool equals(const This& other, double tol = 1e-9) const;
|
||||
|
||||
|
|
|
|||
|
|
@ -30,15 +30,4 @@ namespace gtsam {
|
|||
const SymbolicEliminationTree& eliminationTree) :
|
||||
Base(Base::FromEliminationTree(eliminationTree)) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
SymbolicJunctionTree::SymbolicJunctionTree(const This& other) :
|
||||
Base(other) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
SymbolicJunctionTree& SymbolicJunctionTree::operator=(const This& other)
|
||||
{
|
||||
(void) Base::operator=(other);
|
||||
return *this;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -61,14 +61,6 @@ namespace gtsam {
|
|||
* @return The elimination tree
|
||||
*/
|
||||
SymbolicJunctionTree(const SymbolicEliminationTree& eliminationTree);
|
||||
|
||||
/** Copy constructor - makes a deep copy of the tree structure, but only pointers to factors are
|
||||
* copied, factors are not cloned. */
|
||||
SymbolicJunctionTree(const This& other);
|
||||
|
||||
/** Assignment operator - makes a deep copy of the tree structure, but only pointers to factors are
|
||||
* copied, factors are not cloned. */
|
||||
This& operator=(const This& other);
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -53,18 +53,16 @@ TEST( ISAM, iSAM_smoother )
|
|||
}
|
||||
|
||||
// Create expected Bayes Tree by solving smoother with "natural" ordering
|
||||
GaussianBayesTree bayesTree = *smoother.eliminateMultifrontal(ordering);
|
||||
GaussianISAM expected;
|
||||
expected.insertRoot(bayesTree.roots().front());
|
||||
GaussianBayesTree expected = *smoother.eliminateMultifrontal(ordering);
|
||||
|
||||
// Verify sigmas in the bayes tree
|
||||
BOOST_FOREACH(const GaussianBayesTree::sharedClique& clique, bayesTree.nodes() | br::map_values) {
|
||||
BOOST_FOREACH(const GaussianBayesTree::sharedClique& clique, expected.nodes() | br::map_values) {
|
||||
GaussianConditional::shared_ptr conditional = clique->conditional();
|
||||
EXPECT(!conditional->get_model());
|
||||
}
|
||||
|
||||
// Check whether BayesTree is correct
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
EXPECT(assert_equal(GaussianFactorGraph(expected).augmentedHessian(), GaussianFactorGraph(actual).augmentedHessian()));
|
||||
|
||||
// obtain solution
|
||||
VectorValues e; // expected solution
|
||||
|
|
|
|||
|
|
@ -6,11 +6,10 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#if 0
|
||||
|
||||
#include <gtsam/base/debug.h>
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
#include <gtsam/base/LieVector.h>
|
||||
#include <gtsam/base/treeTraversal-inst.h>
|
||||
#include <gtsam/geometry/Point2.h>
|
||||
#include <gtsam/geometry/Pose2.h>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
|
|
@ -27,9 +26,10 @@
|
|||
#include <tests/smallExample.h>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/assign/std/list.hpp> // for operator +=
|
||||
#include <boost/assign.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
using namespace boost::assign;
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
namespace br { using namespace boost::adaptors; using namespace boost::range; }
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
|
@ -37,6 +37,8 @@ using boost::shared_ptr;
|
|||
|
||||
const double tol = 1e-4;
|
||||
|
||||
#define TEST TEST_UNSAFE
|
||||
|
||||
// SETDEBUG("ISAM2 update", true);
|
||||
// SETDEBUG("ISAM2 update verbose", true);
|
||||
// SETDEBUG("ISAM2 recalculate", true);
|
||||
|
|
@ -48,7 +50,8 @@ SharedDiagonal brNoise = noiseModel::Diagonal::Sigmas(Vector_(2, M_PI/100.0, 0.1
|
|||
ISAM2 createSlamlikeISAM2(
|
||||
boost::optional<Values&> init_values = boost::none,
|
||||
boost::optional<NonlinearFactorGraph&> full_graph = boost::none,
|
||||
const ISAM2Params& params = ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false, true)) {
|
||||
const ISAM2Params& params = ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false, true),
|
||||
size_t maxPoses = 10) {
|
||||
|
||||
// These variables will be reused and accumulate factors and values
|
||||
ISAM2 isam(params);
|
||||
|
|
@ -61,7 +64,7 @@ ISAM2 createSlamlikeISAM2(
|
|||
// Add a prior at time 0 and update isam
|
||||
{
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(PriorFactor<Pose2>(0, Pose2(0.0, 0.0, 0.0), odoNoise));
|
||||
newfactors += PriorFactor<Pose2>(0, Pose2(0.0, 0.0, 0.0), odoNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -71,10 +74,13 @@ ISAM2 createSlamlikeISAM2(
|
|||
isam.update(newfactors, init);
|
||||
}
|
||||
|
||||
if(i > maxPoses)
|
||||
goto done;
|
||||
|
||||
// Add odometry from time 0 to time 5
|
||||
for( ; i<5; ++i) {
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -82,14 +88,20 @@ ISAM2 createSlamlikeISAM2(
|
|||
fullinit.insert((i+1), Pose2(double(i+1)+0.1, -0.1, 0.01));
|
||||
|
||||
isam.update(newfactors, init);
|
||||
|
||||
if(i > maxPoses)
|
||||
goto done;
|
||||
}
|
||||
|
||||
if(i > maxPoses)
|
||||
goto done;
|
||||
|
||||
// Add odometry from time 5 to 6 and landmark measurement at time 5
|
||||
{
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -104,10 +116,13 @@ ISAM2 createSlamlikeISAM2(
|
|||
++ i;
|
||||
}
|
||||
|
||||
if(i > maxPoses)
|
||||
goto done;
|
||||
|
||||
// Add odometry from time 6 to time 10
|
||||
for( ; i<10; ++i) {
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -115,14 +130,20 @@ ISAM2 createSlamlikeISAM2(
|
|||
fullinit.insert((i+1), Pose2(double(i+1)+0.1, -0.1, 0.01));
|
||||
|
||||
isam.update(newfactors, init);
|
||||
|
||||
if(i > maxPoses)
|
||||
goto done;
|
||||
}
|
||||
|
||||
if(i > maxPoses)
|
||||
goto done;
|
||||
|
||||
// Add odometry from time 10 to 11 and landmark measurement at time 10
|
||||
{
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -133,6 +154,8 @@ ISAM2 createSlamlikeISAM2(
|
|||
++ i;
|
||||
}
|
||||
|
||||
done:
|
||||
|
||||
if (full_graph)
|
||||
*full_graph = fullgraph;
|
||||
|
||||
|
|
@ -142,185 +165,6 @@ ISAM2 createSlamlikeISAM2(
|
|||
return isam;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(ISAM2, ImplAddVariables) {
|
||||
|
||||
// Create initial state
|
||||
Values theta;
|
||||
theta.insert(0, Pose2(.1, .2, .3));
|
||||
theta.insert(100, Point2(.4, .5));
|
||||
Values newTheta;
|
||||
newTheta.insert(1, Pose2(.6, .7, .8));
|
||||
|
||||
VectorValues delta;
|
||||
delta.insert(0, Vector_(3, .1, .2, .3));
|
||||
delta.insert(1, Vector_(2, .4, .5));
|
||||
|
||||
VectorValues deltaNewton;
|
||||
deltaNewton.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaNewton.insert(1, Vector_(2, .4, .5));
|
||||
|
||||
VectorValues deltaRg;
|
||||
deltaRg.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaRg.insert(1, Vector_(2, .4, .5));
|
||||
|
||||
vector<bool> replacedKeys(2, false);
|
||||
|
||||
Ordering ordering; ordering += 100, 0;
|
||||
|
||||
// Verify initial state
|
||||
LONGS_EQUAL(0, ordering[100]);
|
||||
LONGS_EQUAL(1, ordering[0]);
|
||||
EXPECT(assert_equal(delta[0], delta[ordering[100]]));
|
||||
EXPECT(assert_equal(delta[1], delta[ordering[0]]));
|
||||
|
||||
// Create expected state
|
||||
Values thetaExpected;
|
||||
thetaExpected.insert(0, Pose2(.1, .2, .3));
|
||||
thetaExpected.insert(100, Point2(.4, .5));
|
||||
thetaExpected.insert(1, Pose2(.6, .7, .8));
|
||||
|
||||
VectorValues deltaExpected;
|
||||
deltaExpected.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaExpected.insert(1, Vector_(2, .4, .5));
|
||||
deltaExpected.insert(2, Vector_(3, 0.0, 0.0, 0.0));
|
||||
|
||||
VectorValues deltaNewtonExpected;
|
||||
deltaNewtonExpected.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaNewtonExpected.insert(1, Vector_(2, .4, .5));
|
||||
deltaNewtonExpected.insert(2, Vector_(3, 0.0, 0.0, 0.0));
|
||||
|
||||
VectorValues deltaRgExpected;
|
||||
deltaRgExpected.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaRgExpected.insert(1, Vector_(2, .4, .5));
|
||||
deltaRgExpected.insert(2, Vector_(3, 0.0, 0.0, 0.0));
|
||||
|
||||
vector<bool> replacedKeysExpected(3, false);
|
||||
|
||||
Ordering orderingExpected; orderingExpected += 100, 0, 1;
|
||||
|
||||
// Expand initial state
|
||||
ISAM2::Impl::AddVariables(newTheta, theta, delta, deltaNewton, deltaRg, replacedKeys, ordering);
|
||||
|
||||
EXPECT(assert_equal(thetaExpected, theta));
|
||||
EXPECT(assert_equal(deltaExpected, delta));
|
||||
EXPECT(assert_equal(deltaNewtonExpected, deltaNewton));
|
||||
EXPECT(assert_equal(deltaRgExpected, deltaRg));
|
||||
EXPECT(assert_container_equality(replacedKeysExpected, replacedKeys));
|
||||
EXPECT(assert_equal(orderingExpected, ordering));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(ISAM2, ImplRemoveVariables) {
|
||||
|
||||
// Create initial state
|
||||
Values theta;
|
||||
theta.insert(0, Pose2(.1, .2, .3));
|
||||
theta.insert(1, Pose2(.6, .7, .8));
|
||||
theta.insert(100, Point2(.4, .5));
|
||||
|
||||
SymbolicFactorGraph sfg;
|
||||
sfg.push_back(boost::make_shared<IndexFactor>(Index(0), Index(2)));
|
||||
sfg.push_back(boost::make_shared<IndexFactor>(Index(0), Index(1)));
|
||||
VariableIndex variableIndex(sfg);
|
||||
|
||||
VectorValues delta;
|
||||
delta.insert(0, Vector_(3, .1, .2, .3));
|
||||
delta.insert(1, Vector_(3, .4, .5, .6));
|
||||
delta.insert(2, Vector_(2, .7, .8));
|
||||
|
||||
VectorValues deltaNewton;
|
||||
deltaNewton.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaNewton.insert(1, Vector_(3, .4, .5, .6));
|
||||
deltaNewton.insert(2, Vector_(2, .7, .8));
|
||||
|
||||
VectorValues deltaRg;
|
||||
deltaRg.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaRg.insert(1, Vector_(3, .4, .5, .6));
|
||||
deltaRg.insert(2, Vector_(2, .7, .8));
|
||||
|
||||
vector<bool> replacedKeys(3, false);
|
||||
replacedKeys[0] = true;
|
||||
replacedKeys[1] = false;
|
||||
replacedKeys[2] = true;
|
||||
|
||||
Ordering ordering; ordering += 100, 1, 0;
|
||||
|
||||
ISAM2::Nodes nodes(3);
|
||||
|
||||
// Verify initial state
|
||||
LONGS_EQUAL(0, ordering[100]);
|
||||
LONGS_EQUAL(1, ordering[1]);
|
||||
LONGS_EQUAL(2, ordering[0]);
|
||||
|
||||
// Create expected state
|
||||
Values thetaExpected;
|
||||
thetaExpected.insert(0, Pose2(.1, .2, .3));
|
||||
thetaExpected.insert(100, Point2(.4, .5));
|
||||
|
||||
SymbolicFactorGraph sfgRemoved;
|
||||
sfgRemoved.push_back(boost::make_shared<IndexFactor>(Index(0), Index(1)));
|
||||
sfgRemoved.push_back(SymbolicFactorGraph::sharedFactor()); // Add empty factor to keep factor indices consistent
|
||||
VariableIndex variableIndexExpected(sfgRemoved);
|
||||
|
||||
VectorValues deltaExpected;
|
||||
deltaExpected.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaExpected.insert(1, Vector_(2, .7, .8));
|
||||
|
||||
VectorValues deltaNewtonExpected;
|
||||
deltaNewtonExpected.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaNewtonExpected.insert(1, Vector_(2, .7, .8));
|
||||
|
||||
VectorValues deltaRgExpected;
|
||||
deltaRgExpected.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaRgExpected.insert(1, Vector_(2, .7, .8));
|
||||
|
||||
vector<bool> replacedKeysExpected(2, true);
|
||||
|
||||
Ordering orderingExpected; orderingExpected += 100, 0;
|
||||
|
||||
ISAM2::Nodes nodesExpected(2);
|
||||
|
||||
// Reduce initial state
|
||||
FastSet<Key> unusedKeys;
|
||||
unusedKeys.insert(1);
|
||||
vector<size_t> removedFactorsI; removedFactorsI.push_back(1);
|
||||
SymbolicFactorGraph removedFactors; removedFactors.push_back(sfg[1]);
|
||||
variableIndex.remove(removedFactorsI, removedFactors);
|
||||
GaussianFactorGraph linearFactors;
|
||||
FastSet<Key> fixedVariables;
|
||||
ISAM2::Impl::RemoveVariables(unusedKeys, ISAM2::sharedClique(), theta, variableIndex, delta, deltaNewton, deltaRg,
|
||||
replacedKeys, ordering, nodes, linearFactors, fixedVariables);
|
||||
|
||||
EXPECT(assert_equal(thetaExpected, theta));
|
||||
EXPECT(assert_equal(variableIndexExpected, variableIndex));
|
||||
EXPECT(assert_equal(deltaExpected, delta));
|
||||
EXPECT(assert_equal(deltaNewtonExpected, deltaNewton));
|
||||
EXPECT(assert_equal(deltaRgExpected, deltaRg));
|
||||
EXPECT(assert_container_equality(replacedKeysExpected, replacedKeys));
|
||||
EXPECT(assert_equal(orderingExpected, ordering));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
//TEST(ISAM2, IndicesFromFactors) {
|
||||
//
|
||||
// using namespace gtsam::planarSLAM;
|
||||
// typedef GaussianISAM2<Values>::Impl Impl;
|
||||
//
|
||||
// Ordering ordering; ordering += (0), (0), (1);
|
||||
// NonlinearFactorGraph graph;
|
||||
// graph.add(PriorFactor<Pose2>((0), Pose2(), noiseModel::Unit::Create(Pose2::dimension));
|
||||
// graph.addRange((0), (0), 1.0, noiseModel::Unit::Create(1));
|
||||
//
|
||||
// FastSet<Index> expected;
|
||||
// expected.insert(0);
|
||||
// expected.insert(1);
|
||||
//
|
||||
// FastSet<Index> actual = Impl::IndicesFromFactors(ordering, graph);
|
||||
//
|
||||
// EXPECT(assert_equal(expected, actual));
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
//TEST(ISAM2, CheckRelinearization) {
|
||||
//
|
||||
|
|
@ -355,37 +199,29 @@ TEST(ISAM2, ImplRemoveVariables) {
|
|||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(ISAM2, optimize2) {
|
||||
|
||||
// Create initialization
|
||||
Values theta;
|
||||
theta.insert((0), Pose2(0.01, 0.01, 0.01));
|
||||
|
||||
// Create conditional
|
||||
Vector d(3); d << -0.1, -0.1, -0.31831;
|
||||
Matrix R(3,3); R <<
|
||||
10, 0.0, 0.0,
|
||||
0.0, 10, 0.0,
|
||||
0.0, 0.0, 31.8309886;
|
||||
GaussianConditional::shared_ptr conditional(new GaussianConditional(0, d, R, Vector::Ones(3)));
|
||||
|
||||
// Create ordering
|
||||
Ordering ordering; ordering += (0);
|
||||
|
||||
// Expected vector
|
||||
VectorValues expected(1, 3);
|
||||
conditional->solveInPlace(expected);
|
||||
|
||||
// Clique
|
||||
ISAM2::sharedClique clique(
|
||||
ISAM2::Clique::Create(make_pair(conditional,GaussianFactor::shared_ptr())));
|
||||
VectorValues actual(theta.dims(ordering));
|
||||
internal::optimizeInPlace<ISAM2::Base>(clique, actual);
|
||||
|
||||
// expected.print("expected: ");
|
||||
// actual.print("actual: ");
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
struct ConsistencyVisitor
|
||||
{
|
||||
bool consistent;
|
||||
const ISAM2& isam;
|
||||
ConsistencyVisitor(const ISAM2& isam) :
|
||||
consistent(true), isam(isam) {}
|
||||
int operator()(const ISAM2::sharedClique& node, int& parentData)
|
||||
{
|
||||
if(std::find(isam.roots().begin(), isam.roots().end(), node) == isam.roots().end())
|
||||
{
|
||||
if(node->parent_.expired())
|
||||
consistent = false;
|
||||
if(std::find(node->parent()->children.begin(), node->parent()->children.end(), node) == node->parent()->children.end())
|
||||
consistent = false;
|
||||
}
|
||||
BOOST_FOREACH(Key j, node->conditional()->frontals())
|
||||
{
|
||||
if(isam.nodes().at(j).get() != node.get())
|
||||
consistent = false;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, const ISAM2& isam, Test& test, TestResult& result) {
|
||||
|
|
@ -394,31 +230,38 @@ bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, c
|
|||
const std::string name_ = test.getName();
|
||||
|
||||
Values actual = isam.calculateEstimate();
|
||||
Ordering ordering = isam.getOrdering(); // *fullgraph.orderingCOLAMD(fullinit).first;
|
||||
GaussianFactorGraph linearized = *fullgraph.linearize(fullinit, ordering);
|
||||
// linearized.print("Expected linearized: ");
|
||||
GaussianBayesNet gbn = *GaussianSequentialSolver(linearized).eliminate();
|
||||
// gbn.print("Expected bayesnet: ");
|
||||
VectorValues delta = optimize(gbn);
|
||||
Values expected = fullinit.retract(delta, ordering);
|
||||
Values expected = fullinit.retract(fullgraph.linearize(fullinit)->optimize());
|
||||
|
||||
bool isamEqual = assert_equal(expected, actual);
|
||||
|
||||
// Check information
|
||||
GaussianFactorGraph isamGraph(isam);
|
||||
isamGraph += isam.roots().front()->cachedFactor_;
|
||||
Matrix expectedHessian = fullgraph.linearize(isam.getLinearizationPoint())->augmentedHessian();
|
||||
Matrix actualHessian = isamGraph.augmentedHessian();
|
||||
expectedHessian.bottomRightCorner(1,1) = actualHessian.bottomRightCorner(1,1);
|
||||
bool isamTreeEqual = assert_equal(expectedHessian, actualHessian);
|
||||
|
||||
// Check consistency
|
||||
ConsistencyVisitor visitor(isam);
|
||||
int data; // Unused
|
||||
treeTraversal::DepthFirstForest(isam, data, visitor);
|
||||
bool consistent = visitor.consistent;
|
||||
|
||||
// The following two checks make sure that the cached gradients are maintained and used correctly
|
||||
|
||||
// Check gradient at each node
|
||||
bool nodeGradientsOk = true;
|
||||
typedef ISAM2::sharedClique sharedClique;
|
||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes()) {
|
||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes() | br::map_values) {
|
||||
// Compute expected gradient
|
||||
FactorGraph<JacobianFactor> jfg;
|
||||
jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional())));
|
||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
||||
gradientAtZero(jfg, expectedGradient);
|
||||
GaussianFactorGraph jfg;
|
||||
jfg += clique->conditional();
|
||||
VectorValues expectedGradient = jfg.gradientAtZero();
|
||||
// Compare with actual gradients
|
||||
int variablePosition = 0;
|
||||
DenseIndex variablePosition = 0;
|
||||
for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) {
|
||||
const int dim = clique->conditional()->dim(jit);
|
||||
const DenseIndex dim = clique->conditional()->getDim(jit);
|
||||
Vector actual = clique->gradientContribution().segment(variablePosition, dim);
|
||||
bool gradOk = assert_equal(expectedGradient[*jit], actual);
|
||||
EXPECT(gradOk);
|
||||
|
|
@ -431,17 +274,30 @@ bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, c
|
|||
}
|
||||
|
||||
// Check gradient
|
||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
||||
gradientAtZero(FactorGraph<JacobianFactor>(isam), expectedGradient);
|
||||
VectorValues expectedGradient2(gradient(FactorGraph<JacobianFactor>(isam), VectorValues::Zero(expectedGradient)));
|
||||
VectorValues actualGradient(*allocateVectorValues(isam));
|
||||
VectorValues expectedGradient = GaussianFactorGraph(isam).gradientAtZero();
|
||||
VectorValues expectedGradient2 = GaussianFactorGraph(isam).gradient(VectorValues::Zero(expectedGradient));
|
||||
VectorValues actualGradient;
|
||||
gradientAtZero(isam, actualGradient);
|
||||
bool expectedGradOk = assert_equal(expectedGradient2, expectedGradient);
|
||||
EXPECT(expectedGradOk);
|
||||
bool totalGradOk = assert_equal(expectedGradient, actualGradient);
|
||||
EXPECT(totalGradOk);
|
||||
|
||||
return nodeGradientsOk && expectedGradOk && totalGradOk && isamEqual;
|
||||
return nodeGradientsOk && expectedGradOk && totalGradOk && isamEqual && isamTreeEqual && consistent;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(ISAM2, simple)
|
||||
{
|
||||
for(size_t i = 0; i < 10; ++i) {
|
||||
// These variables will be reused and accumulate factors and values
|
||||
Values fullinit;
|
||||
NonlinearFactorGraph fullgraph;
|
||||
ISAM2 isam = createSlamlikeISAM2(fullinit, fullgraph, ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false), i);
|
||||
|
||||
// Compare solutions
|
||||
EXPECT(isam_check(fullgraph, fullinit, isam, *this, result_));
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -505,8 +361,8 @@ TEST(ISAM2, clone) {
|
|||
|
||||
// Modify original isam
|
||||
NonlinearFactorGraph factors;
|
||||
factors.add(BetweenFactor<Pose2>(0, 10,
|
||||
isam.calculateEstimate<Pose2>(0).between(isam.calculateEstimate<Pose2>(10)), noiseModel::Unit::Create(3)));
|
||||
factors += BetweenFactor<Pose2>(0, 10,
|
||||
isam.calculateEstimate<Pose2>(0).between(isam.calculateEstimate<Pose2>(10)), noiseModel::Unit::Create(3));
|
||||
isam.update(factors);
|
||||
|
||||
CHECK(assert_equal(createSlamlikeISAM2(), clone2));
|
||||
|
|
@ -526,66 +382,6 @@ TEST(ISAM2, clone) {
|
|||
CHECK(assert_equal(ISAM2(), clone1));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(ISAM2, permute_cached) {
|
||||
typedef boost::shared_ptr<ISAM2Clique> sharedISAM2Clique;
|
||||
|
||||
// Construct expected permuted BayesTree (variable 2 has been changed to 1)
|
||||
BayesTree<GaussianConditional, ISAM2Clique> expected;
|
||||
expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
||||
boost::make_shared<GaussianConditional>(pair_list_of
|
||||
(3, Matrix_(1,1,1.0))
|
||||
(4, Matrix_(1,1,2.0)),
|
||||
2, Vector_(1,1.0), Vector_(1,1.0)), // p(3,4)
|
||||
HessianFactor::shared_ptr())))); // Cached: empty
|
||||
expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
||||
boost::make_shared<GaussianConditional>(pair_list_of
|
||||
(2, Matrix_(1,1,1.0))
|
||||
(3, Matrix_(1,1,2.0)),
|
||||
1, Vector_(1,1.0), Vector_(1,1.0)), // p(2|3)
|
||||
boost::make_shared<HessianFactor>(3, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(3)
|
||||
expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
||||
boost::make_shared<GaussianConditional>(pair_list_of
|
||||
(0, Matrix_(1,1,1.0))
|
||||
(2, Matrix_(1,1,2.0)),
|
||||
1, Vector_(1,1.0), Vector_(1,1.0)), // p(0|2)
|
||||
boost::make_shared<HessianFactor>(1, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(1)
|
||||
// Change variable 2 to 1
|
||||
expected.root()->children().front()->conditional()->keys()[0] = 1;
|
||||
expected.root()->children().front()->children().front()->conditional()->keys()[1] = 1;
|
||||
|
||||
// Construct unpermuted BayesTree
|
||||
BayesTree<GaussianConditional, ISAM2Clique> actual;
|
||||
actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
||||
boost::make_shared<GaussianConditional>(pair_list_of
|
||||
(3, Matrix_(1,1,1.0))
|
||||
(4, Matrix_(1,1,2.0)),
|
||||
2, Vector_(1,1.0), Vector_(1,1.0)), // p(3,4)
|
||||
HessianFactor::shared_ptr())))); // Cached: empty
|
||||
actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
||||
boost::make_shared<GaussianConditional>(pair_list_of
|
||||
(2, Matrix_(1,1,1.0))
|
||||
(3, Matrix_(1,1,2.0)),
|
||||
1, Vector_(1,1.0), Vector_(1,1.0)), // p(2|3)
|
||||
boost::make_shared<HessianFactor>(3, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(3)
|
||||
actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
||||
boost::make_shared<GaussianConditional>(pair_list_of
|
||||
(0, Matrix_(1,1,1.0))
|
||||
(2, Matrix_(1,1,2.0)),
|
||||
1, Vector_(1,1.0), Vector_(1,1.0)), // p(0|2)
|
||||
boost::make_shared<HessianFactor>(2, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(2)
|
||||
|
||||
// Create permutation that changes variable 2 -> 0
|
||||
Permutation permutation = Permutation::Identity(5);
|
||||
permutation[2] = 1;
|
||||
|
||||
// Permute BayesTree
|
||||
actual.root()->permuteWithInverse(permutation);
|
||||
|
||||
// Check
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(ISAM2, removeFactors)
|
||||
{
|
||||
|
|
@ -650,8 +446,8 @@ TEST(ISAM2, swapFactors)
|
|||
fullgraph.remove(swap_idx);
|
||||
|
||||
NonlinearFactorGraph swapfactors;
|
||||
// swapfactors.add(BearingRange<Pose2,Point2>(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise); // original factor
|
||||
swapfactors.add(BearingRangeFactor<Pose2,Point2>(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 5.0, brNoise));
|
||||
// swapfactors += BearingRange<Pose2,Point2>(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise; // original factor
|
||||
swapfactors += BearingRangeFactor<Pose2,Point2>(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 5.0, brNoise);
|
||||
fullgraph.push_back(swapfactors);
|
||||
isam.update(swapfactors, Values(), toRemove);
|
||||
}
|
||||
|
|
@ -662,28 +458,26 @@ TEST(ISAM2, swapFactors)
|
|||
|
||||
// Check gradient at each node
|
||||
typedef ISAM2::sharedClique sharedClique;
|
||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes()) {
|
||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes() | br::map_values) {
|
||||
// Compute expected gradient
|
||||
FactorGraph<JacobianFactor> jfg;
|
||||
jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional())));
|
||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
||||
gradientAtZero(jfg, expectedGradient);
|
||||
GaussianFactorGraph jfg;
|
||||
jfg += clique->conditional();
|
||||
VectorValues expectedGradient = jfg.gradientAtZero();
|
||||
// Compare with actual gradients
|
||||
int variablePosition = 0;
|
||||
DenseIndex variablePosition = 0;
|
||||
for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) {
|
||||
const int dim = clique->conditional()->dim(jit);
|
||||
const DenseIndex dim = clique->conditional()->getDim(jit);
|
||||
Vector actual = clique->gradientContribution().segment(variablePosition, dim);
|
||||
EXPECT(assert_equal(expectedGradient[*jit], actual));
|
||||
variablePosition += dim;
|
||||
}
|
||||
EXPECT_LONGS_EQUAL(clique->gradientContribution().rows(), variablePosition);
|
||||
EXPECT_LONGS_EQUAL((long)clique->gradientContribution().rows(), (long)variablePosition);
|
||||
}
|
||||
|
||||
// Check gradient
|
||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
||||
gradientAtZero(FactorGraph<JacobianFactor>(isam), expectedGradient);
|
||||
VectorValues expectedGradient2(gradient(FactorGraph<JacobianFactor>(isam), VectorValues::Zero(expectedGradient)));
|
||||
VectorValues actualGradient(*allocateVectorValues(isam));
|
||||
VectorValues expectedGradient = GaussianFactorGraph(isam).gradientAtZero();
|
||||
VectorValues expectedGradient2 = GaussianFactorGraph(isam).gradient(VectorValues::Zero(expectedGradient));
|
||||
VectorValues actualGradient;
|
||||
gradientAtZero(isam, actualGradient);
|
||||
EXPECT(assert_equal(expectedGradient2, expectedGradient));
|
||||
EXPECT(assert_equal(expectedGradient, actualGradient));
|
||||
|
|
@ -708,7 +502,7 @@ TEST(ISAM2, constrained_ordering)
|
|||
// Add a prior at time 0 and update isam
|
||||
{
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(PriorFactor<Pose2>(0, Pose2(0.0, 0.0, 0.0), odoNoise));
|
||||
newfactors += PriorFactor<Pose2>(0, Pose2(0.0, 0.0, 0.0), odoNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -723,7 +517,7 @@ TEST(ISAM2, constrained_ordering)
|
|||
// Add odometry from time 0 to time 5
|
||||
for( ; i<5; ++i) {
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -739,9 +533,9 @@ TEST(ISAM2, constrained_ordering)
|
|||
// Add odometry from time 5 to 6 and landmark measurement at time 5
|
||||
{
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -759,7 +553,7 @@ TEST(ISAM2, constrained_ordering)
|
|||
// Add odometry from time 6 to time 10
|
||||
for( ; i<10; ++i) {
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -772,9 +566,9 @@ TEST(ISAM2, constrained_ordering)
|
|||
// Add odometry from time 10 to 11 and landmark measurement at time 10
|
||||
{
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -788,33 +582,28 @@ TEST(ISAM2, constrained_ordering)
|
|||
// Compare solutions
|
||||
EXPECT(isam_check(fullgraph, fullinit, isam, *this, result_));
|
||||
|
||||
// Check that x3 and x4 are last, but either can come before the other
|
||||
EXPECT(isam.getOrdering()[(3)] == 12 && isam.getOrdering()[(4)] == 13);
|
||||
|
||||
// Check gradient at each node
|
||||
typedef ISAM2::sharedClique sharedClique;
|
||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes()) {
|
||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes() | br::map_values) {
|
||||
// Compute expected gradient
|
||||
FactorGraph<JacobianFactor> jfg;
|
||||
jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional())));
|
||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
||||
gradientAtZero(jfg, expectedGradient);
|
||||
GaussianFactorGraph jfg;
|
||||
jfg += clique->conditional();
|
||||
VectorValues expectedGradient = jfg.gradientAtZero();
|
||||
// Compare with actual gradients
|
||||
int variablePosition = 0;
|
||||
DenseIndex variablePosition = 0;
|
||||
for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) {
|
||||
const int dim = clique->conditional()->dim(jit);
|
||||
const DenseIndex dim = clique->conditional()->getDim(jit);
|
||||
Vector actual = clique->gradientContribution().segment(variablePosition, dim);
|
||||
EXPECT(assert_equal(expectedGradient[*jit], actual));
|
||||
variablePosition += dim;
|
||||
}
|
||||
LONGS_EQUAL(clique->gradientContribution().rows(), variablePosition);
|
||||
LONGS_EQUAL((long)clique->gradientContribution().rows(), (long)variablePosition);
|
||||
}
|
||||
|
||||
// Check gradient
|
||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
||||
gradientAtZero(FactorGraph<JacobianFactor>(isam), expectedGradient);
|
||||
VectorValues expectedGradient2(gradient(FactorGraph<JacobianFactor>(isam), VectorValues::Zero(expectedGradient)));
|
||||
VectorValues actualGradient(*allocateVectorValues(isam));
|
||||
VectorValues expectedGradient = GaussianFactorGraph(isam).gradientAtZero();
|
||||
VectorValues expectedGradient2 = GaussianFactorGraph(isam).gradient(VectorValues::Zero(expectedGradient));
|
||||
VectorValues actualGradient;
|
||||
gradientAtZero(isam, actualGradient);
|
||||
EXPECT(assert_equal(expectedGradient2, expectedGradient));
|
||||
EXPECT(assert_equal(expectedGradient, actualGradient));
|
||||
|
|
@ -838,27 +627,20 @@ namespace {
|
|||
bool checkMarginalizeLeaves(ISAM2& isam, const FastList<Key>& leafKeys) {
|
||||
Matrix expectedAugmentedHessian, expected3AugmentedHessian;
|
||||
vector<Index> toKeep;
|
||||
const Index lastVar = isam.getOrdering().size() - 1;
|
||||
for(Index i=0; i<=lastVar; ++i)
|
||||
if(find(leafKeys.begin(), leafKeys.end(), isam.getOrdering().key(i)) == leafKeys.end())
|
||||
toKeep.push_back(i);
|
||||
BOOST_FOREACH(Key j, isam.getDelta() | br::map_keys)
|
||||
if(find(leafKeys.begin(), leafKeys.end(), j) == leafKeys.end())
|
||||
toKeep.push_back(j);
|
||||
|
||||
// Calculate expected marginal from iSAM2 tree
|
||||
GaussianFactorGraph isamAsGraph(isam);
|
||||
GaussianSequentialSolver solver(isamAsGraph);
|
||||
GaussianFactorGraph marginalgfg = *solver.jointFactorGraph(toKeep);
|
||||
expectedAugmentedHessian = marginalgfg.augmentedHessian();
|
||||
expectedAugmentedHessian = GaussianFactorGraph(isam).marginal(toKeep, EliminateQR)->augmentedHessian();
|
||||
|
||||
//// Calculate expected marginal from cached linear factors
|
||||
// Calculate expected marginal from cached linear factors
|
||||
//assert(isam.params().cacheLinearizedFactors);
|
||||
//GaussianSequentialSolver solver2(isam.linearFactors_, isam.params().factorization == ISAM2Params::QR);
|
||||
//expected2AugmentedHessian = solver2.jointFactorGraph(toKeep)->augmentedHessian();
|
||||
//Matrix expected2AugmentedHessian = isam.linearFactors_.marginal(toKeep, EliminateQR)->augmentedHessian();
|
||||
|
||||
// Calculate expected marginal from original nonlinear factors
|
||||
GaussianSequentialSolver solver3(
|
||||
*isam.getFactorsUnsafe().linearize(isam.getLinearizationPoint(), isam.getOrdering()),
|
||||
isam.params().factorization == ISAM2Params::QR);
|
||||
expected3AugmentedHessian = solver3.jointFactorGraph(toKeep)->augmentedHessian();
|
||||
expected3AugmentedHessian = isam.getFactorsUnsafe().linearize(isam.getLinearizationPoint())
|
||||
->marginal(toKeep, EliminateQR)->augmentedHessian();
|
||||
|
||||
// Do marginalization
|
||||
isam.marginalizeLeaves(leafKeys);
|
||||
|
|
@ -868,19 +650,19 @@ namespace {
|
|||
Matrix actualAugmentedHessian = actualMarginalGraph.augmentedHessian();
|
||||
//Matrix actual2AugmentedHessian = linearFactors_.augmentedHessian();
|
||||
Matrix actual3AugmentedHessian = isam.getFactorsUnsafe().linearize(
|
||||
isam.getLinearizationPoint(), isam.getOrdering())->augmentedHessian();
|
||||
isam.getLinearizationPoint())->augmentedHessian();
|
||||
assert(actualAugmentedHessian.unaryExpr(std::ptr_fun(&std::isfinite<double>)).all());
|
||||
|
||||
// Check full marginalization
|
||||
//cout << "treeEqual" << endl;
|
||||
bool treeEqual = assert_equal(expectedAugmentedHessian, actualAugmentedHessian, 1e-6);
|
||||
//bool linEqual = assert_equal(expected2AugmentedHessian, actualAugmentedHessian, 1e-6);
|
||||
//actualAugmentedHessian.bottomRightCorner(1,1) = expected2AugmentedHessian.bottomRightCorner(1,1); bool linEqual = assert_equal(expected2AugmentedHessian, actualAugmentedHessian, 1e-6);
|
||||
//cout << "nonlinEqual" << endl;
|
||||
bool nonlinEqual = assert_equal(expected3AugmentedHessian, actualAugmentedHessian, 1e-6);
|
||||
actualAugmentedHessian.bottomRightCorner(1,1) = expected3AugmentedHessian.bottomRightCorner(1,1); bool nonlinEqual = assert_equal(expected3AugmentedHessian, actualAugmentedHessian, 1e-6);
|
||||
//bool linCorrect = assert_equal(expected3AugmentedHessian, expected2AugmentedHessian, 1e-6);
|
||||
//actual2AugmentedHessian.bottomRightCorner(1,1) = expected3AugmentedHessian.bottomRightCorner(1,1); bool afterLinCorrect = assert_equal(expected3AugmentedHessian, actual2AugmentedHessian, 1e-6);
|
||||
//cout << "nonlinCorrect" << endl;
|
||||
actual3AugmentedHessian.bottomRightCorner(1,1) = expected3AugmentedHessian.bottomRightCorner(1,1); bool afterNonlinCorrect = assert_equal(expected3AugmentedHessian, actual3AugmentedHessian, 1e-6);
|
||||
bool afterNonlinCorrect = assert_equal(expected3AugmentedHessian, actual3AugmentedHessian, 1e-6);
|
||||
|
||||
bool ok = treeEqual && /*linEqual &&*/ nonlinEqual && /*linCorrect &&*/ /*afterLinCorrect &&*/ afterNonlinCorrect;
|
||||
return ok;
|
||||
|
|
@ -893,11 +675,11 @@ TEST(ISAM2, marginalizeLeaves1)
|
|||
ISAM2 isam;
|
||||
|
||||
NonlinearFactorGraph factors;
|
||||
factors.add(PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
factors.add(BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
Values values;
|
||||
values.insert(0, LieVector(0.0));
|
||||
|
|
@ -911,8 +693,7 @@ TEST(ISAM2, marginalizeLeaves1)
|
|||
|
||||
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
||||
|
||||
FastList<Key> leafKeys;
|
||||
leafKeys.push_back(isam.getOrdering().key(0));
|
||||
FastList<Key> leafKeys = list_of(0);
|
||||
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
||||
}
|
||||
|
||||
|
|
@ -922,12 +703,12 @@ TEST(ISAM2, marginalizeLeaves2)
|
|||
ISAM2 isam;
|
||||
|
||||
NonlinearFactorGraph factors;
|
||||
factors.add(PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
factors.add(BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(2, 3, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(2, 3, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
Values values;
|
||||
values.insert(0, LieVector(0.0));
|
||||
|
|
@ -943,8 +724,7 @@ TEST(ISAM2, marginalizeLeaves2)
|
|||
|
||||
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
||||
|
||||
FastList<Key> leafKeys;
|
||||
leafKeys.push_back(isam.getOrdering().key(0));
|
||||
FastList<Key> leafKeys = list_of(0);
|
||||
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
||||
}
|
||||
|
||||
|
|
@ -954,17 +734,17 @@ TEST(ISAM2, marginalizeLeaves3)
|
|||
ISAM2 isam;
|
||||
|
||||
NonlinearFactorGraph factors;
|
||||
factors.add(PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
factors.add(BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
factors.add(BetweenFactor<LieVector>(2, 3, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += BetweenFactor<LieVector>(2, 3, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
factors.add(BetweenFactor<LieVector>(3, 4, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(4, 5, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(3, 5, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += BetweenFactor<LieVector>(3, 4, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(4, 5, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(3, 5, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
Values values;
|
||||
values.insert(0, LieVector(0.0));
|
||||
|
|
@ -984,8 +764,7 @@ TEST(ISAM2, marginalizeLeaves3)
|
|||
|
||||
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
||||
|
||||
FastList<Key> leafKeys;
|
||||
leafKeys.push_back(isam.getOrdering().key(0));
|
||||
FastList<Key> leafKeys = list_of(0);
|
||||
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
||||
}
|
||||
|
||||
|
|
@ -995,9 +774,9 @@ TEST(ISAM2, marginalizeLeaves4)
|
|||
ISAM2 isam;
|
||||
|
||||
NonlinearFactorGraph factors;
|
||||
factors.add(PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
Values values;
|
||||
values.insert(0, LieVector(0.0));
|
||||
|
|
@ -1011,8 +790,7 @@ TEST(ISAM2, marginalizeLeaves4)
|
|||
|
||||
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
||||
|
||||
FastList<Key> leafKeys;
|
||||
leafKeys.push_back(isam.getOrdering().key(1));
|
||||
FastList<Key> leafKeys = list_of(1);
|
||||
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
||||
}
|
||||
|
||||
|
|
@ -1023,8 +801,7 @@ TEST(ISAM2, marginalizeLeaves5)
|
|||
ISAM2 isam = createSlamlikeISAM2();
|
||||
|
||||
// Marginalize
|
||||
FastList<Key> marginalizeKeys;
|
||||
marginalizeKeys.push_back(isam.getOrdering().key(0));
|
||||
FastList<Key> marginalizeKeys = list_of(0);
|
||||
EXPECT(checkMarginalizeLeaves(isam, marginalizeKeys));
|
||||
}
|
||||
|
||||
|
|
@ -1040,8 +817,6 @@ TEST(ISAM2, marginalCovariance)
|
|||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() { TestResult tr; return TestRegistry::runAllTests(tr);}
|
||||
/* ************************************************************************* */
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ TEST( Graph, predecessorMap2Graph )
|
|||
p_map.insert(3, 2);
|
||||
boost::tie(graph, root, key2vertex) = predecessorMap2Graph<SGraph<Key>, SVertex, Key>(p_map);
|
||||
|
||||
LONGS_EQUAL(3, boost::num_vertices(graph));
|
||||
LONGS_EQUAL(3, (long)boost::num_vertices(graph));
|
||||
CHECK(root == key2vertex[2]);
|
||||
}
|
||||
|
||||
|
|
@ -101,7 +101,7 @@ TEST( Graph, composePoses )
|
|||
expected.insert(3, p3);
|
||||
expected.insert(4, p4);
|
||||
|
||||
LONGS_EQUAL(4, actual->size());
|
||||
LONGS_EQUAL(4, (long)actual->size());
|
||||
CHECK(assert_equal(expected, *actual));
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue