ISAM2 compiling and fixed several issues but still some unit tests failing
parent
6843ee8227
commit
789f2bee97
|
|
@ -234,7 +234,10 @@ namespace gtsam {
|
||||||
template<class CLIQUE>
|
template<class CLIQUE>
|
||||||
void BayesTree<CLIQUE>::fillNodesIndex(const sharedClique& subtree) {
|
void BayesTree<CLIQUE>::fillNodesIndex(const sharedClique& subtree) {
|
||||||
// Add each frontal variable of this root node
|
// Add each frontal variable of this root node
|
||||||
BOOST_FOREACH(const Key& j, subtree->conditional()->frontals()) { nodes_[j] = subtree; }
|
BOOST_FOREACH(const Key& j, subtree->conditional()->frontals()) {
|
||||||
|
bool inserted = nodes_.insert(std::make_pair(j, subtree)).second;
|
||||||
|
assert(inserted); (void)inserted;
|
||||||
|
}
|
||||||
// Fill index for each child
|
// Fill index for each child
|
||||||
typedef typename BayesTree<CLIQUE>::sharedClique sharedClique;
|
typedef typename BayesTree<CLIQUE>::sharedClique sharedClique;
|
||||||
BOOST_FOREACH(const sharedClique& child, subtree->children) {
|
BOOST_FOREACH(const sharedClique& child, subtree->children) {
|
||||||
|
|
@ -336,7 +339,7 @@ namespace gtsam {
|
||||||
// Factor the shortcuts to be conditioned on the full root
|
// Factor the shortcuts to be conditioned on the full root
|
||||||
// Get the set of variables to eliminate, which is C1\B.
|
// Get the set of variables to eliminate, which is C1\B.
|
||||||
gttic(Full_root_factoring);
|
gttic(Full_root_factoring);
|
||||||
shared_ptr p_C1_B; {
|
boost::shared_ptr<EliminationTraitsType::BayesTreeType> p_C1_B; {
|
||||||
std::vector<Index> C1_minus_B; {
|
std::vector<Index> C1_minus_B; {
|
||||||
FastSet<Index> C1_minus_B_set(C1->conditional()->beginParents(), C1->conditional()->endParents());
|
FastSet<Index> C1_minus_B_set(C1->conditional()->beginParents(), C1->conditional()->endParents());
|
||||||
BOOST_FOREACH(const Index j, *B->conditional()) {
|
BOOST_FOREACH(const Index j, *B->conditional()) {
|
||||||
|
|
@ -348,7 +351,7 @@ namespace gtsam {
|
||||||
boost::tie(p_C1_B, temp_remaining) =
|
boost::tie(p_C1_B, temp_remaining) =
|
||||||
FactorGraphType(p_C1_Bred).eliminatePartialMultifrontal(Ordering(C1_minus_B), function);
|
FactorGraphType(p_C1_Bred).eliminatePartialMultifrontal(Ordering(C1_minus_B), function);
|
||||||
}
|
}
|
||||||
shared_ptr p_C2_B; {
|
boost::shared_ptr<EliminationTraitsType::BayesTreeType> p_C2_B; {
|
||||||
std::vector<Index> C2_minus_B; {
|
std::vector<Index> C2_minus_B; {
|
||||||
FastSet<Index> C2_minus_B_set(C2->conditional()->beginParents(), C2->conditional()->endParents());
|
FastSet<Index> C2_minus_B_set(C2->conditional()->beginParents(), C2->conditional()->endParents());
|
||||||
BOOST_FOREACH(const Index j, *B->conditional()) {
|
BOOST_FOREACH(const Index j, *B->conditional()) {
|
||||||
|
|
@ -422,7 +425,7 @@ namespace gtsam {
|
||||||
void BayesTree<CLIQUE>::removePath(sharedClique clique, BayesNetType& bn, Cliques& orphans)
|
void BayesTree<CLIQUE>::removePath(sharedClique clique, BayesNetType& bn, Cliques& orphans)
|
||||||
{
|
{
|
||||||
// base case is NULL, if so we do nothing and return empties above
|
// base case is NULL, if so we do nothing and return empties above
|
||||||
if (clique!=NULL) {
|
if (clique) {
|
||||||
|
|
||||||
// remove the clique from orphans in case it has been added earlier
|
// remove the clique from orphans in case it has been added earlier
|
||||||
orphans.remove(clique);
|
orphans.remove(clique);
|
||||||
|
|
|
||||||
|
|
@ -30,126 +30,60 @@ namespace gtsam {
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
void ISAM2::Impl::AddVariables(
|
void ISAM2::Impl::AddVariables(
|
||||||
const Values& newTheta, Values& theta, VectorValues& delta,
|
const Values& newTheta, Values& theta, VectorValues& delta,
|
||||||
VectorValues& deltaNewton, VectorValues& RgProd, vector<bool>& replacedKeys,
|
VectorValues& deltaNewton, VectorValues& RgProd,
|
||||||
Ordering& ordering, const KeyFormatter& keyFormatter) {
|
const KeyFormatter& keyFormatter)
|
||||||
|
{
|
||||||
const bool debug = ISDEBUG("ISAM2 AddVariables");
|
const bool debug = ISDEBUG("ISAM2 AddVariables");
|
||||||
|
|
||||||
theta.insert(newTheta);
|
theta.insert(newTheta);
|
||||||
if(debug) newTheta.print("The new variables are: ");
|
if(debug) newTheta.print("The new variables are: ");
|
||||||
// Add the new keys onto the ordering, add zeros to the delta for the new variables
|
// Add zeros into the VectorValues
|
||||||
std::vector<Index> dims(newTheta.dims(*newTheta.orderingArbitrary()));
|
delta.insert(newTheta.zeroVectors());
|
||||||
if(debug) cout << "New variables have total dimensionality " << accumulate(dims.begin(), dims.end(), 0) << endl;
|
deltaNewton.insert(newTheta.zeroVectors());
|
||||||
const size_t originalnVars = delta.size();
|
RgProd.insert(newTheta.zeroVectors());
|
||||||
delta.append(dims);
|
|
||||||
deltaNewton.append(dims);
|
|
||||||
RgProd.append(dims);
|
|
||||||
for(Index j = originalnVars; j < delta.size(); ++j) {
|
|
||||||
delta[j].setZero();
|
|
||||||
deltaNewton[j].setZero();
|
|
||||||
RgProd[j].setZero();
|
|
||||||
}
|
|
||||||
{
|
|
||||||
Index nextVar = originalnVars;
|
|
||||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, newTheta) {
|
|
||||||
ordering.insert(key_value.key, nextVar);
|
|
||||||
if(debug) cout << "Adding variable " << keyFormatter(key_value.key) << " with order " << nextVar << endl;
|
|
||||||
++ nextVar;
|
|
||||||
}
|
|
||||||
assert(ordering.size() == delta.size());
|
|
||||||
}
|
|
||||||
replacedKeys.resize(ordering.size(), false);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
void ISAM2::Impl::RemoveVariables(const FastSet<Key>& unusedKeys, const ISAM2Clique::shared_ptr& root,
|
void ISAM2::Impl::RemoveVariables(const FastSet<Key>& unusedKeys, const std::vector<ISAM2::sharedClique>& roots,
|
||||||
Values& theta, VariableIndex& variableIndex,
|
Values& theta, VariableIndex& variableIndex,
|
||||||
VectorValues& delta, VectorValues& deltaNewton, VectorValues& RgProd,
|
VectorValues& delta, VectorValues& deltaNewton, VectorValues& RgProd,
|
||||||
std::vector<bool>& replacedKeys, Ordering& ordering, Base::Nodes& nodes,
|
FastSet<Key>& replacedKeys, Base::Nodes& nodes,
|
||||||
GaussianFactorGraph& linearFactors, FastSet<Key>& fixedVariables) {
|
FastSet<Key>& fixedVariables)
|
||||||
|
|
||||||
// Get indices of unused keys
|
|
||||||
vector<Index> unusedIndices; unusedIndices.reserve(unusedKeys.size());
|
|
||||||
BOOST_FOREACH(Key key, unusedKeys) { unusedIndices.push_back(ordering[key]); }
|
|
||||||
|
|
||||||
// Create a permutation that shifts the unused variables to the end
|
|
||||||
Permutation unusedToEnd = Permutation::PushToBack(unusedIndices, delta.size());
|
|
||||||
Permutation unusedToEndInverse = *unusedToEnd.inverse();
|
|
||||||
|
|
||||||
// Use the permutation to remove unused variables while shifting all the others to take up the space
|
|
||||||
variableIndex.permuteInPlace(unusedToEnd);
|
|
||||||
variableIndex.removeUnusedAtEnd(unusedIndices.size());
|
|
||||||
{
|
{
|
||||||
// Create a vector of variable dimensions with the unused ones removed
|
variableIndex.removeUnusedVariables(unusedKeys.begin(), unusedKeys.end());
|
||||||
// by applying the unusedToEnd permutation to the original vector of
|
BOOST_FOREACH(Key key, unusedKeys) {
|
||||||
// variable dimensions. We only allocate space in the shifted dims
|
delta.erase(key);
|
||||||
// vector for the used variables, so that the unused ones are dropped
|
deltaNewton.erase(key);
|
||||||
// when the permutation is applied.
|
RgProd.erase(key);
|
||||||
vector<size_t> originalDims = delta.dims();
|
replacedKeys.erase(key);
|
||||||
vector<size_t> dims(delta.size() - unusedIndices.size());
|
nodes.erase(key);
|
||||||
unusedToEnd.applyToCollection(dims, originalDims);
|
|
||||||
|
|
||||||
// Copy from the old data structures to new ones, only iterating up to
|
|
||||||
// the number of used variables, and applying the unusedToEnd permutation
|
|
||||||
// in order to remove the unused variables.
|
|
||||||
VectorValues newDelta(dims);
|
|
||||||
VectorValues newDeltaNewton(dims);
|
|
||||||
VectorValues newDeltaGradSearch(dims);
|
|
||||||
std::vector<bool> newReplacedKeys(replacedKeys.size() - unusedIndices.size());
|
|
||||||
Base::Nodes newNodes(replacedKeys.size() - unusedIndices.size());
|
|
||||||
|
|
||||||
for(size_t j = 0; j < dims.size(); ++j) {
|
|
||||||
newDelta[j] = delta[unusedToEnd[j]];
|
|
||||||
newDeltaNewton[j] = deltaNewton[unusedToEnd[j]];
|
|
||||||
newDeltaGradSearch[j] = RgProd[unusedToEnd[j]];
|
|
||||||
newReplacedKeys[j] = replacedKeys[unusedToEnd[j]];
|
|
||||||
newNodes[j] = nodes[unusedToEnd[j]];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Swap the new data structures with the outputs of this function
|
|
||||||
delta.swap(newDelta);
|
|
||||||
deltaNewton.swap(newDeltaNewton);
|
|
||||||
RgProd.swap(newDeltaGradSearch);
|
|
||||||
replacedKeys.swap(newReplacedKeys);
|
|
||||||
nodes.swap(newNodes);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reorder and remove from ordering, solution, and fixed keys
|
|
||||||
ordering.permuteInPlace(unusedToEnd);
|
|
||||||
BOOST_REVERSE_FOREACH(Key key, unusedKeys) {
|
|
||||||
Ordering::value_type removed = ordering.pop_back();
|
|
||||||
assert(removed.first == key);
|
|
||||||
theta.erase(key);
|
theta.erase(key);
|
||||||
fixedVariables.erase(key);
|
fixedVariables.erase(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Finally, permute references to variables
|
|
||||||
if(root)
|
|
||||||
root->permuteWithInverse(unusedToEndInverse);
|
|
||||||
linearFactors.permuteWithInverse(unusedToEndInverse);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
FastSet<Index> ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta, const Ordering& ordering,
|
FastSet<Index> ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta,
|
||||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter) {
|
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold)
|
||||||
|
{
|
||||||
FastSet<Index> relinKeys;
|
FastSet<Index> relinKeys;
|
||||||
|
|
||||||
if(relinearizeThreshold.type() == typeid(double)) {
|
if(const double* threshold = boost::get<const double*>(relinearizeThreshold))
|
||||||
double threshold = boost::get<double>(relinearizeThreshold);
|
{
|
||||||
for(Index var=0; var<delta.size(); ++var) {
|
BOOST_FOREACH(const VectorValues::KeyValuePair& key_delta, delta) {
|
||||||
double maxDelta = delta[var].lpNorm<Eigen::Infinity>();
|
double maxDelta = key_delta.second.lpNorm<Eigen::Infinity>();
|
||||||
if(maxDelta >= threshold) {
|
if(maxDelta >= *threshold)
|
||||||
relinKeys.insert(var);
|
relinKeys.insert(key_delta.first);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if(relinearizeThreshold.type() == typeid(FastMap<char,Vector>)) {
|
else if(const FastMap<char,Vector>* thresholds = boost::get<const FastMap<char,Vector>*>(relinearizeThreshold))
|
||||||
const FastMap<char,Vector>& thresholds = boost::get<FastMap<char,Vector> >(relinearizeThreshold);
|
{
|
||||||
BOOST_FOREACH(const Ordering::value_type& key_index, ordering) {
|
BOOST_FOREACH(const VectorValues::KeyValuePair& key_delta, delta) {
|
||||||
const Vector& threshold = thresholds.find(Symbol(key_index.first).chr())->second;
|
const Vector& threshold = thresholds->find(Symbol(key_delta.first).chr())->second;
|
||||||
Index j = key_index.second;
|
if(threshold.rows() != key_delta.second.rows())
|
||||||
if(threshold.rows() != delta[j].rows())
|
throw std::invalid_argument("Relinearization threshold vector dimensionality for '" + std::string(1, Symbol(key_delta.first).chr()) + "' passed into iSAM2 parameters does not match actual variable dimensionality.");
|
||||||
throw std::invalid_argument("Relinearization threshold vector dimensionality passed into iSAM2 parameters does not match actual variable dimensionality");
|
if((key_delta.second.array().abs() > threshold.array()).any())
|
||||||
if((delta[j].array().abs() > threshold.array()).any())
|
relinKeys.insert(key_delta.first);
|
||||||
relinKeys.insert(j);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -157,11 +91,12 @@ FastSet<Index> ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta,
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
void CheckRelinearizationRecursiveDouble(FastSet<Index>& relinKeys, double threshold, const VectorValues& delta, const ISAM2Clique::shared_ptr& clique) {
|
void CheckRelinearizationRecursiveDouble(FastSet<Index>& relinKeys, double threshold,
|
||||||
|
const VectorValues& delta, const ISAM2Clique::shared_ptr& clique)
|
||||||
|
{
|
||||||
// Check the current clique for relinearization
|
// Check the current clique for relinearization
|
||||||
bool relinearize = false;
|
bool relinearize = false;
|
||||||
BOOST_FOREACH(Index var, clique->conditional()->keys()) {
|
BOOST_FOREACH(Key var, *clique->conditional()) {
|
||||||
double maxDelta = delta[var].lpNorm<Eigen::Infinity>();
|
double maxDelta = delta[var].lpNorm<Eigen::Infinity>();
|
||||||
if(maxDelta >= threshold) {
|
if(maxDelta >= threshold) {
|
||||||
relinKeys.insert(var);
|
relinKeys.insert(var);
|
||||||
|
|
@ -171,31 +106,31 @@ void CheckRelinearizationRecursiveDouble(FastSet<Index>& relinKeys, double thres
|
||||||
|
|
||||||
// If this node was relinearized, also check its children
|
// If this node was relinearized, also check its children
|
||||||
if(relinearize) {
|
if(relinearize) {
|
||||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children()) {
|
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||||
CheckRelinearizationRecursiveDouble(relinKeys, threshold, delta, child);
|
CheckRelinearizationRecursiveDouble(relinKeys, threshold, delta, child);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
void CheckRelinearizationRecursiveMap(FastSet<Index>& relinKeys, const FastMap<char,Vector>& thresholds, const VectorValues& delta, const Ordering& ordering, const ISAM2Clique::shared_ptr& clique) {
|
void CheckRelinearizationRecursiveMap(FastSet<Index>& relinKeys, const FastMap<char,Vector>& thresholds,
|
||||||
|
const VectorValues& delta,
|
||||||
|
const ISAM2Clique::shared_ptr& clique)
|
||||||
|
{
|
||||||
// Check the current clique for relinearization
|
// Check the current clique for relinearization
|
||||||
bool relinearize = false;
|
bool relinearize = false;
|
||||||
BOOST_FOREACH(Index var, clique->conditional()->keys()) {
|
BOOST_FOREACH(Key var, *clique->conditional()) {
|
||||||
|
|
||||||
// Lookup the key associated with this index
|
|
||||||
Key key = ordering.key(var);
|
|
||||||
|
|
||||||
// Find the threshold for this variable type
|
// Find the threshold for this variable type
|
||||||
const Vector& threshold = thresholds.find(Symbol(key).chr())->second;
|
const Vector& threshold = thresholds.find(Symbol(var).chr())->second;
|
||||||
|
|
||||||
|
const Vector& deltaVar = delta[var];
|
||||||
|
|
||||||
// Verify the threshold vector matches the actual variable size
|
// Verify the threshold vector matches the actual variable size
|
||||||
if(threshold.rows() != delta[var].rows())
|
if(threshold.rows() != deltaVar.rows())
|
||||||
throw std::invalid_argument("Relinearization threshold vector dimensionality passed into iSAM2 parameters does not match actual variable dimensionality");
|
throw std::invalid_argument("Relinearization threshold vector dimensionality for '" + std::string(1, Symbol(var).chr()) + "' passed into iSAM2 parameters does not match actual variable dimensionality.");
|
||||||
|
|
||||||
// Check for relinearization
|
// Check for relinearization
|
||||||
if((delta[var].array().abs() > threshold.array()).any()) {
|
if((deltaVar.array().abs() > threshold.array()).any()) {
|
||||||
relinKeys.insert(var);
|
relinKeys.insert(var);
|
||||||
relinearize = true;
|
relinearize = true;
|
||||||
}
|
}
|
||||||
|
|
@ -203,52 +138,54 @@ void CheckRelinearizationRecursiveMap(FastSet<Index>& relinKeys, const FastMap<c
|
||||||
|
|
||||||
// If this node was relinearized, also check its children
|
// If this node was relinearized, also check its children
|
||||||
if(relinearize) {
|
if(relinearize) {
|
||||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children()) {
|
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||||
CheckRelinearizationRecursiveMap(relinKeys, thresholds, delta, ordering, child);
|
CheckRelinearizationRecursiveMap(relinKeys, thresholds, delta, child);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
FastSet<Index> ISAM2::Impl::CheckRelinearizationPartial(const ISAM2Clique::shared_ptr& root, const VectorValues& delta, const Ordering& ordering,
|
FastSet<Index> ISAM2::Impl::CheckRelinearizationPartial(const std::vector<ISAM2::sharedClique>& roots,
|
||||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter) {
|
const VectorValues& delta,
|
||||||
|
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold)
|
||||||
|
{
|
||||||
FastSet<Index> relinKeys;
|
FastSet<Index> relinKeys;
|
||||||
|
BOOST_FOREACH(const ISAM2::sharedClique& root, roots) {
|
||||||
if(root) {
|
if(relinearizeThreshold.type() == typeid(double))
|
||||||
if(relinearizeThreshold.type() == typeid(double)) {
|
|
||||||
CheckRelinearizationRecursiveDouble(relinKeys, boost::get<double>(relinearizeThreshold), delta, root);
|
CheckRelinearizationRecursiveDouble(relinKeys, boost::get<double>(relinearizeThreshold), delta, root);
|
||||||
} else if(relinearizeThreshold.type() == typeid(FastMap<char,Vector>)) {
|
else if(relinearizeThreshold.type() == typeid(FastMap<char,Vector>))
|
||||||
CheckRelinearizationRecursiveMap(relinKeys, boost::get<FastMap<char,Vector> >(relinearizeThreshold), delta, ordering, root);
|
CheckRelinearizationRecursiveMap(relinKeys, boost::get<FastMap<char,Vector> >(relinearizeThreshold), delta, root);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return relinKeys;
|
return relinKeys;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
void ISAM2::Impl::FindAll(ISAM2Clique::shared_ptr clique, FastSet<Index>& keys, const vector<bool>& markedMask) {
|
void ISAM2::Impl::FindAll(ISAM2Clique::shared_ptr clique, FastSet<Index>& keys, const FastSet<Key>& markedMask)
|
||||||
|
{
|
||||||
static const bool debug = false;
|
static const bool debug = false;
|
||||||
// does the separator contain any of the variables?
|
// does the separator contain any of the variables?
|
||||||
bool found = false;
|
bool found = false;
|
||||||
BOOST_FOREACH(const Index& key, (*clique)->parents()) {
|
BOOST_FOREACH(Key key, clique->conditional()->parents()) {
|
||||||
if (markedMask[key])
|
if (markedMask.exists(key)) {
|
||||||
found = true;
|
found = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (found) {
|
if (found) {
|
||||||
// then add this clique
|
// then add this clique
|
||||||
keys.insert((*clique)->beginFrontals(), (*clique)->endFrontals());
|
keys.insert(clique->conditional()->beginFrontals(), clique->conditional()->endFrontals());
|
||||||
if(debug) clique->print("Key(s) marked in clique ");
|
if(debug) clique->print("Key(s) marked in clique ");
|
||||||
if(debug) cout << "so marking key " << (*clique)->keys().front() << endl;
|
if(debug) cout << "so marking key " << clique->conditional()->front() << endl;
|
||||||
}
|
}
|
||||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children_) {
|
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||||
FindAll(child, keys, markedMask);
|
FindAll(child, keys, markedMask);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta, const Ordering& ordering,
|
void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta,
|
||||||
const vector<bool>& mask, boost::optional<VectorValues&> invalidateIfDebug, const KeyFormatter& keyFormatter) {
|
const FastSet<Key>& mask, boost::optional<VectorValues&> invalidateIfDebug, const KeyFormatter& keyFormatter)
|
||||||
|
{
|
||||||
// If debugging, invalidate if requested, otherwise do not invalidate.
|
// If debugging, invalidate if requested, otherwise do not invalidate.
|
||||||
// Invalidating means setting expmapped entries to Inf, to trigger assertions
|
// Invalidating means setting expmapped entries to Inf, to trigger assertions
|
||||||
// if we try to re-use them.
|
// if we try to re-use them.
|
||||||
|
|
@ -256,23 +193,16 @@ void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta, const
|
||||||
invalidateIfDebug = boost::none;
|
invalidateIfDebug = boost::none;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
assert(values.size() == ordering.size());
|
assert(values.size() == delta.size());
|
||||||
assert(delta.size() == ordering.size());
|
|
||||||
Values::iterator key_value;
|
Values::iterator key_value;
|
||||||
Ordering::const_iterator key_index;
|
VectorValues::const_iterator key_delta;
|
||||||
for(key_value = values.begin(), key_index = ordering.begin();
|
for(key_value = values.begin(), key_delta = delta.begin(); key_value != values.end(); ++key_value, ++key_delta)
|
||||||
key_value != values.end() && key_index != ordering.end(); ++key_value, ++key_index) {
|
{
|
||||||
assert(key_value->key == key_index->first);
|
assert(key_value->key == key_delta->first);
|
||||||
const Index var = key_index->second;
|
Key var = key_value->key;
|
||||||
if(ISDEBUG("ISAM2 update verbose")) {
|
|
||||||
if(mask[var])
|
|
||||||
cout << "expmap " << keyFormatter(key_value->key) << " (j = " << var << "), delta = " << delta[var].transpose() << endl;
|
|
||||||
else
|
|
||||||
cout << " " << keyFormatter(key_value->key) << " (j = " << var << "), delta = " << delta[var].transpose() << endl;
|
|
||||||
}
|
|
||||||
assert(delta[var].size() == (int)key_value->value.dim());
|
assert(delta[var].size() == (int)key_value->value.dim());
|
||||||
assert(delta[var].unaryExpr(ptr_fun(isfinite<double>)).all());
|
assert(delta[var].unaryExpr(ptr_fun(isfinite<double>)).all());
|
||||||
if(mask[var]) {
|
if(mask.exists(var)) {
|
||||||
Value* retracted = key_value->value.retract_(delta[var]);
|
Value* retracted = key_value->value.retract_(delta[var]);
|
||||||
key_value->value = *retracted;
|
key_value->value = *retracted;
|
||||||
retracted->deallocate_();
|
retracted->deallocate_();
|
||||||
|
|
@ -282,134 +212,35 @@ void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta, const
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
ISAM2::Impl::PartialSolveResult
|
|
||||||
ISAM2::Impl::PartialSolve(GaussianFactorGraph& factors,
|
|
||||||
const FastSet<Index>& keys, const ReorderingMode& reorderingMode, bool useQR) {
|
|
||||||
|
|
||||||
const bool debug = ISDEBUG("ISAM2 recalculate");
|
|
||||||
|
|
||||||
PartialSolveResult result;
|
|
||||||
|
|
||||||
gttic(select_affected_variables);
|
|
||||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
|
||||||
// Debug check that all variables involved in the factors to be re-eliminated
|
|
||||||
// are in affectedKeys, since we will use it to select a subset of variables.
|
|
||||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) {
|
|
||||||
BOOST_FOREACH(Index key, factor->keys()) {
|
|
||||||
assert(find(keys.begin(), keys.end(), key) != keys.end());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
Permutation affectedKeysSelector(keys.size()); // Create a permutation that pulls the affected keys to the front
|
|
||||||
Permutation affectedKeysSelectorInverse(keys.size() > 0 ? *keys.rbegin()+1 : 0 /*ordering_.nVars()*/); // And its inverse
|
|
||||||
#ifndef NDEBUG
|
|
||||||
// If debugging, fill with invalid values that will trip asserts if dereferenced
|
|
||||||
std::fill(affectedKeysSelectorInverse.begin(), affectedKeysSelectorInverse.end(), numeric_limits<Index>::max());
|
|
||||||
#endif
|
|
||||||
{ Index position=0; BOOST_FOREACH(Index var, keys) {
|
|
||||||
affectedKeysSelector[position] = var;
|
|
||||||
affectedKeysSelectorInverse[var] = position;
|
|
||||||
++ position; } }
|
|
||||||
if(debug) affectedKeysSelector.print("affectedKeysSelector: ");
|
|
||||||
if(debug) affectedKeysSelectorInverse.print("affectedKeysSelectorInverse: ");
|
|
||||||
factors.permuteWithInverse(affectedKeysSelectorInverse);
|
|
||||||
if(debug) factors.print("Factors to reorder/re-eliminate: ");
|
|
||||||
gttoc(select_affected_variables);
|
|
||||||
gttic(variable_index);
|
|
||||||
VariableIndex affectedFactorsIndex(factors); // Create a variable index for the factors to be re-eliminated
|
|
||||||
if(debug) affectedFactorsIndex.print("affectedFactorsIndex: ");
|
|
||||||
gttoc(variable_index);
|
|
||||||
gttic(ccolamd);
|
|
||||||
vector<int> cmember(affectedKeysSelector.size(), 0);
|
|
||||||
if(reorderingMode.constrain == ReorderingMode::CONSTRAIN_LAST) {
|
|
||||||
assert(reorderingMode.constrainedKeys);
|
|
||||||
if(!reorderingMode.constrainedKeys->empty()) {
|
|
||||||
typedef std::pair<const Index,int> Index_Group;
|
|
||||||
if(keys.size() > reorderingMode.constrainedKeys->size()) { // Only if some variables are unconstrained
|
|
||||||
BOOST_FOREACH(const Index_Group& index_group, *reorderingMode.constrainedKeys) {
|
|
||||||
cmember[affectedKeysSelectorInverse[index_group.first]] = index_group.second; }
|
|
||||||
} else {
|
|
||||||
int minGroup = *boost::range::min_element(boost::adaptors::values(*reorderingMode.constrainedKeys));
|
|
||||||
BOOST_FOREACH(const Index_Group& index_group, *reorderingMode.constrainedKeys) {
|
|
||||||
cmember[affectedKeysSelectorInverse[index_group.first]] = index_group.second - minGroup; }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Permutation::shared_ptr affectedColamd(inference::PermutationCOLAMD_(affectedFactorsIndex, cmember));
|
|
||||||
gttoc(ccolamd);
|
|
||||||
gttic(ccolamd_permutations);
|
|
||||||
Permutation::shared_ptr affectedColamdInverse(affectedColamd->inverse());
|
|
||||||
if(debug) affectedColamd->print("affectedColamd: ");
|
|
||||||
if(debug) affectedColamdInverse->print("affectedColamdInverse: ");
|
|
||||||
result.reorderingSelector = affectedKeysSelector;
|
|
||||||
result.reorderingPermutation = *affectedColamd;
|
|
||||||
result.reorderingInverse = internal::Reduction::CreateFromPartialPermutation(affectedKeysSelector, *affectedColamdInverse);
|
|
||||||
gttoc(ccolamd_permutations);
|
|
||||||
|
|
||||||
gttic(permute_affected_variable_index);
|
|
||||||
affectedFactorsIndex.permuteInPlace(*affectedColamd);
|
|
||||||
gttoc(permute_affected_variable_index);
|
|
||||||
|
|
||||||
gttic(permute_affected_factors);
|
|
||||||
factors.permuteWithInverse(*affectedColamdInverse);
|
|
||||||
gttoc(permute_affected_factors);
|
|
||||||
|
|
||||||
if(debug) factors.print("Colamd-ordered affected factors: ");
|
|
||||||
|
|
||||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
|
||||||
VariableIndex fromScratchIndex(factors);
|
|
||||||
assert(assert_equal(fromScratchIndex, affectedFactorsIndex));
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// eliminate into a Bayes net
|
|
||||||
gttic(eliminate);
|
|
||||||
JunctionTree<GaussianFactorGraph, ISAM2::Clique> jt(factors, affectedFactorsIndex);
|
|
||||||
if(!useQR)
|
|
||||||
result.bayesTree = jt.eliminate(EliminatePreferCholesky);
|
|
||||||
else
|
|
||||||
result.bayesTree = jt.eliminate(EliminateQR);
|
|
||||||
gttoc(eliminate);
|
|
||||||
|
|
||||||
gttic(permute_eliminated);
|
|
||||||
if(result.bayesTree) result.bayesTree->permuteWithInverse(affectedKeysSelector);
|
|
||||||
if(debug && result.bayesTree) {
|
|
||||||
cout << "Full var-ordered eliminated BT:\n";
|
|
||||||
result.bayesTree->printTree("");
|
|
||||||
}
|
|
||||||
// Undo permutation on our subset of cached factors, we must later permute *all* of the cached factors
|
|
||||||
factors.permuteWithInverse(*affectedColamd);
|
|
||||||
factors.permuteWithInverse(affectedKeysSelector);
|
|
||||||
gttoc(permute_eliminated);
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
namespace internal {
|
namespace internal {
|
||||||
inline static void optimizeInPlace(const boost::shared_ptr<ISAM2Clique>& clique, VectorValues& result) {
|
inline static void optimizeInPlace(const boost::shared_ptr<ISAM2Clique>& clique, VectorValues& result) {
|
||||||
// parents are assumed to already be solved and available in result
|
// parents are assumed to already be solved and available in result
|
||||||
clique->conditional()->solveInPlace(result);
|
result.update(clique->conditional()->solve(result));
|
||||||
|
|
||||||
// starting from the root, call optimize on each conditional
|
// starting from the root, call optimize on each conditional
|
||||||
BOOST_FOREACH(const boost::shared_ptr<ISAM2Clique>& child, clique->children_)
|
BOOST_FOREACH(const boost::shared_ptr<ISAM2Clique>& child, clique->children)
|
||||||
optimizeInPlace(child, result);
|
optimizeInPlace(child, result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
size_t ISAM2::Impl::UpdateDelta(const boost::shared_ptr<ISAM2Clique>& root, std::vector<bool>& replacedKeys, VectorValues& delta, double wildfireThreshold) {
|
size_t ISAM2::Impl::UpdateDelta(const std::vector<ISAM2::sharedClique>& roots, FastSet<Key>& replacedKeys, VectorValues& delta, double wildfireThreshold) {
|
||||||
|
|
||||||
size_t lastBacksubVariableCount;
|
size_t lastBacksubVariableCount;
|
||||||
|
|
||||||
if (wildfireThreshold <= 0.0) {
|
if (wildfireThreshold <= 0.0) {
|
||||||
// Threshold is zero or less, so do a full recalculation
|
// Threshold is zero or less, so do a full recalculation
|
||||||
|
BOOST_FOREACH(const ISAM2::sharedClique& root, roots)
|
||||||
internal::optimizeInPlace(root, delta);
|
internal::optimizeInPlace(root, delta);
|
||||||
lastBacksubVariableCount = delta.size();
|
lastBacksubVariableCount = delta.size();
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
// Optimize with wildfire
|
// Optimize with wildfire
|
||||||
lastBacksubVariableCount = optimizeWildfireNonRecursive(root, wildfireThreshold, replacedKeys, delta); // modifies delta_
|
lastBacksubVariableCount = 0;
|
||||||
|
BOOST_FOREACH(const ISAM2::sharedClique& root, roots)
|
||||||
|
lastBacksubVariableCount += optimizeWildfireNonRecursive(
|
||||||
|
root, wildfireThreshold, replacedKeys, delta); // modifies delta
|
||||||
|
|
||||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
||||||
for(size_t j=0; j<delta.size(); ++j)
|
for(size_t j=0; j<delta.size(); ++j)
|
||||||
|
|
@ -418,22 +249,22 @@ size_t ISAM2::Impl::UpdateDelta(const boost::shared_ptr<ISAM2Clique>& root, std:
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clear replacedKeys
|
// Clear replacedKeys
|
||||||
replacedKeys.assign(replacedKeys.size(), false);
|
replacedKeys.clear();
|
||||||
|
|
||||||
return lastBacksubVariableCount;
|
return lastBacksubVariableCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
namespace internal {
|
namespace internal {
|
||||||
void updateDoglegDeltas(const boost::shared_ptr<ISAM2Clique>& clique, const std::vector<bool>& replacedKeys,
|
void updateDoglegDeltas(const boost::shared_ptr<ISAM2Clique>& clique, const FastSet<Key>& replacedKeys,
|
||||||
const VectorValues& grad, VectorValues& deltaNewton, VectorValues& RgProd, size_t& varsUpdated) {
|
const VectorValues& grad, VectorValues& RgProd, size_t& varsUpdated) {
|
||||||
|
|
||||||
// Check if any frontal or separator keys were recalculated, if so, we need
|
// Check if any frontal or separator keys were recalculated, if so, we need
|
||||||
// update deltas and recurse to children, but if not, we do not need to
|
// update deltas and recurse to children, but if not, we do not need to
|
||||||
// recurse further because of the running separator property.
|
// recurse further because of the running separator property.
|
||||||
bool anyReplaced = false;
|
bool anyReplaced = false;
|
||||||
BOOST_FOREACH(Index j, *clique->conditional()) {
|
BOOST_FOREACH(Key j, *clique->conditional()) {
|
||||||
if(replacedKeys[j]) {
|
if(replacedKeys.exists(j)) {
|
||||||
anyReplaced = true;
|
anyReplaced = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
@ -442,41 +273,49 @@ void updateDoglegDeltas(const boost::shared_ptr<ISAM2Clique>& clique, const std:
|
||||||
if(anyReplaced) {
|
if(anyReplaced) {
|
||||||
// Update the current variable
|
// Update the current variable
|
||||||
// Get VectorValues slice corresponding to current variables
|
// Get VectorValues slice corresponding to current variables
|
||||||
Vector gR = internal::extractVectorValuesSlices(grad, (*clique)->beginFrontals(), (*clique)->endFrontals());
|
Vector gR = grad.vector(std::vector<Key>(clique->conditional()->beginFrontals(), clique->conditional()->endFrontals()));
|
||||||
Vector gS = internal::extractVectorValuesSlices(grad, (*clique)->beginParents(), (*clique)->endParents());
|
Vector gS = grad.vector(std::vector<Key>(clique->conditional()->beginParents(), clique->conditional()->endParents()));
|
||||||
|
|
||||||
// Compute R*g and S*g for this clique
|
// Compute R*g and S*g for this clique
|
||||||
Vector RSgProd = (*clique)->get_R() * gR + (*clique)->get_S() * gS;
|
Vector RSgProd = clique->conditional()->get_R() * gR + clique->conditional()->get_S() * gS;
|
||||||
|
|
||||||
// Write into RgProd vector
|
// Write into RgProd vector
|
||||||
internal::writeVectorValuesSlices(RSgProd, RgProd, (*clique)->beginFrontals(), (*clique)->endFrontals());
|
DenseIndex vectorPosition = 0;
|
||||||
|
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||||
|
Vector& RgProdValue = RgProd[frontal];
|
||||||
|
RgProdValue = RSgProd.segment(vectorPosition, RgProdValue.size());
|
||||||
|
vectorPosition += RgProdValue.size();
|
||||||
|
}
|
||||||
|
|
||||||
// Now solve the part of the Newton's method point for this clique (back-substitution)
|
// Now solve the part of the Newton's method point for this clique (back-substitution)
|
||||||
//(*clique)->solveInPlace(deltaNewton);
|
//(*clique)->solveInPlace(deltaNewton);
|
||||||
|
|
||||||
varsUpdated += (*clique)->nrFrontals();
|
varsUpdated += clique->conditional()->nrFrontals();
|
||||||
|
|
||||||
// Recurse to children
|
// Recurse to children
|
||||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children()) {
|
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||||
updateDoglegDeltas(child, replacedKeys, grad, deltaNewton, RgProd, varsUpdated); }
|
updateDoglegDeltas(child, replacedKeys, grad, RgProd, varsUpdated); }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
size_t ISAM2::Impl::UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, std::vector<bool>& replacedKeys,
|
size_t ISAM2::Impl::UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, FastSet<Key>& replacedKeys,
|
||||||
VectorValues& deltaNewton, VectorValues& RgProd) {
|
VectorValues& deltaNewton, VectorValues& RgProd) {
|
||||||
|
|
||||||
// Get gradient
|
// Get gradient
|
||||||
VectorValues grad = *allocateVectorValues(isam);
|
VectorValues grad;
|
||||||
gradientAtZero(isam, grad);
|
gradientAtZero(isam, grad);
|
||||||
|
|
||||||
// Update variables
|
// Update variables
|
||||||
size_t varsUpdated = 0;
|
size_t varsUpdated = 0;
|
||||||
internal::updateDoglegDeltas(isam.root(), replacedKeys, grad, deltaNewton, RgProd, varsUpdated);
|
BOOST_FOREACH(const ISAM2::sharedClique& root, isam.roots())
|
||||||
optimizeWildfireNonRecursive(isam.root(), wildfireThreshold, replacedKeys, deltaNewton);
|
{
|
||||||
|
internal::updateDoglegDeltas(root, replacedKeys, grad, RgProd, varsUpdated);
|
||||||
|
optimizeWildfireNonRecursive(root, wildfireThreshold, replacedKeys, deltaNewton);
|
||||||
|
}
|
||||||
|
|
||||||
replacedKeys.assign(replacedKeys.size(), false);
|
replacedKeys.clear();
|
||||||
|
|
||||||
return varsUpdated;
|
return varsUpdated;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -26,9 +26,6 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
||||||
|
|
||||||
struct GTSAM_EXPORT PartialSolveResult {
|
struct GTSAM_EXPORT PartialSolveResult {
|
||||||
ISAM2::sharedClique bayesTree;
|
ISAM2::sharedClique bayesTree;
|
||||||
Permutation reorderingSelector;
|
|
||||||
Permutation reorderingPermutation;
|
|
||||||
internal::Reduction reorderingInverse;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
struct GTSAM_EXPORT ReorderingMode {
|
struct GTSAM_EXPORT ReorderingMode {
|
||||||
|
|
@ -48,16 +45,16 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
||||||
* @param keyFormatter Formatter for printing nonlinear keys during debugging
|
* @param keyFormatter Formatter for printing nonlinear keys during debugging
|
||||||
*/
|
*/
|
||||||
static void AddVariables(const Values& newTheta, Values& theta, VectorValues& delta,
|
static void AddVariables(const Values& newTheta, Values& theta, VectorValues& delta,
|
||||||
VectorValues& deltaNewton, VectorValues& RgProd, std::vector<bool>& replacedKeys,
|
VectorValues& deltaNewton, VectorValues& RgProd,
|
||||||
Ordering& ordering, const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove variables from the ISAM2 system.
|
* Remove variables from the ISAM2 system.
|
||||||
*/
|
*/
|
||||||
static void RemoveVariables(const FastSet<Key>& unusedKeys, const ISAM2Clique::shared_ptr& root,
|
static void RemoveVariables(const FastSet<Key>& unusedKeys, const std::vector<ISAM2::sharedClique>& roots,
|
||||||
Values& theta, VariableIndex& variableIndex, VectorValues& delta, VectorValues& deltaNewton,
|
Values& theta, VariableIndex& variableIndex, VectorValues& delta, VectorValues& deltaNewton,
|
||||||
VectorValues& RgProd, std::vector<bool>& replacedKeys, Ordering& ordering, Base::Nodes& nodes,
|
VectorValues& RgProd, FastSet<Key>& replacedKeys, Base::Nodes& nodes,
|
||||||
GaussianFactorGraph& linearFactors, FastSet<Key>& fixedVariables);
|
FastSet<Key>& fixedVariables);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find the set of variables to be relinearized according to relinearizeThreshold.
|
* Find the set of variables to be relinearized according to relinearizeThreshold.
|
||||||
|
|
@ -68,8 +65,8 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
||||||
* @return The set of variable indices in delta whose magnitude is greater than or
|
* @return The set of variable indices in delta whose magnitude is greater than or
|
||||||
* equal to relinearizeThreshold
|
* equal to relinearizeThreshold
|
||||||
*/
|
*/
|
||||||
static FastSet<Index> CheckRelinearizationFull(const VectorValues& delta, const Ordering& ordering,
|
static FastSet<Index> CheckRelinearizationFull(const VectorValues& delta,
|
||||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find the set of variables to be relinearized according to relinearizeThreshold.
|
* Find the set of variables to be relinearized according to relinearizeThreshold.
|
||||||
|
|
@ -82,8 +79,8 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
||||||
* @return The set of variable indices in delta whose magnitude is greater than or
|
* @return The set of variable indices in delta whose magnitude is greater than or
|
||||||
* equal to relinearizeThreshold
|
* equal to relinearizeThreshold
|
||||||
*/
|
*/
|
||||||
static FastSet<Index> CheckRelinearizationPartial(const ISAM2Clique::shared_ptr& root, const VectorValues& delta, const Ordering& ordering,
|
static FastSet<Index> CheckRelinearizationPartial(const std::vector<ISAM2::sharedClique>& roots,
|
||||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
const VectorValues& delta, const ISAM2Params::RelinearizationThreshold& relinearizeThreshold);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Recursively search this clique and its children for marked keys appearing
|
* Recursively search this clique and its children for marked keys appearing
|
||||||
|
|
@ -116,30 +113,14 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
||||||
* @param keyFormatter Formatter for printing nonlinear keys during debugging
|
* @param keyFormatter Formatter for printing nonlinear keys during debugging
|
||||||
*/
|
*/
|
||||||
static void ExpmapMasked(Values& values, const VectorValues& delta,
|
static void ExpmapMasked(Values& values, const VectorValues& delta,
|
||||||
const Ordering& ordering, const std::vector<bool>& mask,
|
const FastSet<Key>& mask,
|
||||||
boost::optional<VectorValues&> invalidateIfDebug = boost::none,
|
boost::optional<VectorValues&> invalidateIfDebug = boost::none,
|
||||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||||
|
|
||||||
/**
|
static size_t UpdateDelta(const std::vector<ISAM2::sharedClique>& roots,
|
||||||
* Reorder and eliminate factors. These factors form a subset of the full
|
FastSet<Key>& replacedKeys, VectorValues& delta, double wildfireThreshold);
|
||||||
* problem, so along with the BayesTree we get a partial reordering of the
|
|
||||||
* problem that needs to be applied to the other data in ISAM2, which is the
|
|
||||||
* VariableIndex, the delta, the ordering, and the orphans (including cached
|
|
||||||
* factors).
|
|
||||||
* \param factors The factors to eliminate, representing part of the full
|
|
||||||
* problem. This is permuted during use and so is cleared when this function
|
|
||||||
* returns in order to invalidate it.
|
|
||||||
* \param keys The set of indices used in \c factors.
|
|
||||||
* \param useQR Whether to use QR (if true), or Cholesky (if false).
|
|
||||||
* \return The eliminated BayesTree and the permutation to be applied to the
|
|
||||||
* rest of the ISAM2 data.
|
|
||||||
*/
|
|
||||||
static PartialSolveResult PartialSolve(GaussianFactorGraph& factors, const FastSet<Index>& keys,
|
|
||||||
const ReorderingMode& reorderingMode, bool useQR);
|
|
||||||
|
|
||||||
static size_t UpdateDelta(const boost::shared_ptr<ISAM2Clique>& root, std::vector<bool>& replacedKeys, VectorValues& delta, double wildfireThreshold);
|
static size_t UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, FastSet<Key>& replacedKeys,
|
||||||
|
|
||||||
static size_t UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, std::vector<bool>& replacedKeys,
|
|
||||||
VectorValues& deltaNewton, VectorValues& RgProd);
|
VectorValues& deltaNewton, VectorValues& RgProd);
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -28,8 +28,7 @@ namespace gtsam {
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
template<class VALUE>
|
template<class VALUE>
|
||||||
VALUE ISAM2::calculateEstimate(Key key) const {
|
VALUE ISAM2::calculateEstimate(Key key) const {
|
||||||
const Index index = getOrdering()[key];
|
const Vector& delta = getDelta()[key];
|
||||||
const Vector& delta = getDelta()[index];
|
|
||||||
return theta_.at<VALUE>(key).retract(delta);
|
return theta_.at<VALUE>(key).retract(delta);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -37,24 +36,25 @@ VALUE ISAM2::calculateEstimate(Key key) const {
|
||||||
namespace internal {
|
namespace internal {
|
||||||
template<class CLIQUE>
|
template<class CLIQUE>
|
||||||
void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
||||||
std::vector<bool>& changed, const std::vector<bool>& replaced, VectorValues& delta, int& count) {
|
FastSet<Key>& changed, const FastSet<Key>& replaced, VectorValues& delta, size_t& count)
|
||||||
|
{
|
||||||
// if none of the variables in this clique (frontal and separator!) changed
|
// if none of the variables in this clique (frontal and separator!) changed
|
||||||
// significantly, then by the running intersection property, none of the
|
// significantly, then by the running intersection property, none of the
|
||||||
// cliques in the children need to be processed
|
// cliques in the children need to be processed
|
||||||
|
|
||||||
// Are any clique variables part of the tree that has been redone?
|
// Are any clique variables part of the tree that has been redone?
|
||||||
bool cliqueReplaced = replaced[(*clique)->frontals().front()];
|
bool cliqueReplaced = replaced.exists((*clique)->frontals().front());
|
||||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
||||||
BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
|
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||||
assert(cliqueReplaced == replaced[frontal]);
|
assert(cliqueReplaced == replaced.exists(frontal));
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// If not redone, then has one of the separator variables changed significantly?
|
// If not redone, then has one of the separator variables changed significantly?
|
||||||
bool recalculate = cliqueReplaced;
|
bool recalculate = cliqueReplaced;
|
||||||
if(!recalculate) {
|
if(!recalculate) {
|
||||||
BOOST_FOREACH(Index parent, (*clique)->parents()) {
|
BOOST_FOREACH(Key parent, clique->conditional()->parents()) {
|
||||||
if(changed[parent]) {
|
if(changed.exists(parent)) {
|
||||||
recalculate = true;
|
recalculate = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
@ -66,22 +66,22 @@ void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
||||||
if(recalculate) {
|
if(recalculate) {
|
||||||
|
|
||||||
// Temporary copy of the original values, to check how much they change
|
// Temporary copy of the original values, to check how much they change
|
||||||
std::vector<Vector> originalValues((*clique)->nrFrontals());
|
std::vector<Vector> originalValues(clique->conditional()->nrFrontals());
|
||||||
GaussianConditional::const_iterator it;
|
GaussianConditional::const_iterator it;
|
||||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
for(it = clique->conditional()->beginFrontals(); it!=clique->conditional()->endFrontals(); it++) {
|
||||||
originalValues[it - (*clique)->beginFrontals()] = delta[*it];
|
originalValues[it - clique->conditional()->beginFrontals()] = delta[*it];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Back-substitute
|
// Back-substitute
|
||||||
(*clique)->solveInPlace(delta);
|
delta.update(clique->conditional()->solve(delta));
|
||||||
count += (*clique)->nrFrontals();
|
count += clique->conditional()->nrFrontals();
|
||||||
|
|
||||||
// Whether the values changed above a threshold, or always true if the
|
// Whether the values changed above a threshold, or always true if the
|
||||||
// clique was replaced.
|
// clique was replaced.
|
||||||
bool valuesChanged = cliqueReplaced;
|
bool valuesChanged = cliqueReplaced;
|
||||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||||
if(!valuesChanged) {
|
if(!valuesChanged) {
|
||||||
const Vector& oldValue(originalValues[it - (*clique)->beginFrontals()]);
|
const Vector& oldValue(originalValues[it - clique->conditional()->beginFrontals()]);
|
||||||
const Vector& newValue(delta[*it]);
|
const Vector& newValue(delta[*it]);
|
||||||
if((oldValue - newValue).lpNorm<Eigen::Infinity>() >= threshold) {
|
if((oldValue - newValue).lpNorm<Eigen::Infinity>() >= threshold) {
|
||||||
valuesChanged = true;
|
valuesChanged = true;
|
||||||
|
|
@ -94,18 +94,18 @@ void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
||||||
// If the values were above the threshold or this clique was replaced
|
// If the values were above the threshold or this clique was replaced
|
||||||
if(valuesChanged) {
|
if(valuesChanged) {
|
||||||
// Set changed flag for each frontal variable and leave the new values
|
// Set changed flag for each frontal variable and leave the new values
|
||||||
BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
|
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||||
changed[frontal] = true;
|
changed.insert(frontal);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Replace with the old values
|
// Replace with the old values
|
||||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||||
delta[*it] = originalValues[it - (*clique)->beginFrontals()];
|
delta[*it] = originalValues[it - clique->conditional()->beginFrontals()];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Recurse to children
|
// Recurse to children
|
||||||
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) {
|
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children) {
|
||||||
optimizeWildfire(child, threshold, changed, replaced, delta, count);
|
optimizeWildfire(child, threshold, changed, replaced, delta, count);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -113,24 +113,25 @@ void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
||||||
|
|
||||||
template<class CLIQUE>
|
template<class CLIQUE>
|
||||||
bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
||||||
std::vector<bool>& changed, const std::vector<bool>& replaced, VectorValues& delta, int& count) {
|
FastSet<Key>& changed, const FastSet<Key>& replaced, VectorValues& delta, size_t& count)
|
||||||
|
{
|
||||||
// if none of the variables in this clique (frontal and separator!) changed
|
// if none of the variables in this clique (frontal and separator!) changed
|
||||||
// significantly, then by the running intersection property, none of the
|
// significantly, then by the running intersection property, none of the
|
||||||
// cliques in the children need to be processed
|
// cliques in the children need to be processed
|
||||||
|
|
||||||
// Are any clique variables part of the tree that has been redone?
|
// Are any clique variables part of the tree that has been redone?
|
||||||
bool cliqueReplaced = replaced[(*clique)->frontals().front()];
|
bool cliqueReplaced = replaced.exists(clique->conditional()->frontals().front());
|
||||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
||||||
BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
|
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||||
assert(cliqueReplaced == replaced[frontal]);
|
assert(cliqueReplaced == replaced.exists(frontal));
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// If not redone, then has one of the separator variables changed significantly?
|
// If not redone, then has one of the separator variables changed significantly?
|
||||||
bool recalculate = cliqueReplaced;
|
bool recalculate = cliqueReplaced;
|
||||||
if(!recalculate) {
|
if(!recalculate) {
|
||||||
BOOST_FOREACH(Index parent, (*clique)->parents()) {
|
BOOST_FOREACH(Key parent, clique->conditional()->parents()) {
|
||||||
if(changed[parent]) {
|
if(changed.exists(parent)) {
|
||||||
recalculate = true;
|
recalculate = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
@ -139,25 +140,25 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
|
||||||
|
|
||||||
// Solve clique if it was replaced, or if any parents were changed above the
|
// Solve clique if it was replaced, or if any parents were changed above the
|
||||||
// threshold or themselves replaced.
|
// threshold or themselves replaced.
|
||||||
if(recalculate) {
|
if(recalculate)
|
||||||
|
{
|
||||||
// Temporary copy of the original values, to check how much they change
|
// Temporary copy of the original values, to check how much they change
|
||||||
std::vector<Vector> originalValues((*clique)->nrFrontals());
|
std::vector<Vector> originalValues(clique->conditional()->nrFrontals());
|
||||||
GaussianConditional::const_iterator it;
|
GaussianConditional::const_iterator it;
|
||||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||||
originalValues[it - (*clique)->beginFrontals()] = delta[*it];
|
originalValues[it - clique->conditional()->beginFrontals()] = delta[*it];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Back-substitute
|
// Back-substitute
|
||||||
(*clique)->solveInPlace(delta);
|
delta.update(clique->conditional()->solve(delta));
|
||||||
count += (*clique)->nrFrontals();
|
count += clique->conditional()->nrFrontals();
|
||||||
|
|
||||||
// Whether the values changed above a threshold, or always true if the
|
// Whether the values changed above a threshold, or always true if the
|
||||||
// clique was replaced.
|
// clique was replaced.
|
||||||
bool valuesChanged = cliqueReplaced;
|
bool valuesChanged = cliqueReplaced;
|
||||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||||
if(!valuesChanged) {
|
if(!valuesChanged) {
|
||||||
const Vector& oldValue(originalValues[it - (*clique)->beginFrontals()]);
|
const Vector& oldValue(originalValues[it - clique->conditional()->beginFrontals()]);
|
||||||
const Vector& newValue(delta[*it]);
|
const Vector& newValue(delta[*it]);
|
||||||
if((oldValue - newValue).lpNorm<Eigen::Infinity>() >= threshold) {
|
if((oldValue - newValue).lpNorm<Eigen::Infinity>() >= threshold) {
|
||||||
valuesChanged = true;
|
valuesChanged = true;
|
||||||
|
|
@ -170,16 +171,15 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
|
||||||
// If the values were above the threshold or this clique was replaced
|
// If the values were above the threshold or this clique was replaced
|
||||||
if(valuesChanged) {
|
if(valuesChanged) {
|
||||||
// Set changed flag for each frontal variable and leave the new values
|
// Set changed flag for each frontal variable and leave the new values
|
||||||
BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
|
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||||
changed[frontal] = true;
|
changed.insert(frontal);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Replace with the old values
|
// Replace with the old values
|
||||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||||
delta[*it] = originalValues[it - (*clique)->beginFrontals()];
|
delta[*it] = originalValues[it - clique->conditional()->beginFrontals()];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return recalculate;
|
return recalculate;
|
||||||
|
|
@ -189,8 +189,8 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
template<class CLIQUE>
|
template<class CLIQUE>
|
||||||
int optimizeWildfire(const boost::shared_ptr<CLIQUE>& root, double threshold, const std::vector<bool>& keys, VectorValues& delta) {
|
size_t optimizeWildfire(const boost::shared_ptr<CLIQUE>& root, double threshold, const FastSet<Key>& keys, VectorValues& delta) {
|
||||||
std::vector<bool> changed(keys.size(), false);
|
FastSet<Key> changed;
|
||||||
int count = 0;
|
int count = 0;
|
||||||
// starting from the root, call optimize on each conditional
|
// starting from the root, call optimize on each conditional
|
||||||
if(root)
|
if(root)
|
||||||
|
|
@ -200,9 +200,10 @@ int optimizeWildfire(const boost::shared_ptr<CLIQUE>& root, double threshold, co
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
template<class CLIQUE>
|
template<class CLIQUE>
|
||||||
int optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root, double threshold, const std::vector<bool>& keys, VectorValues& delta) {
|
size_t optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root, double threshold, const FastSet<Key>& keys, VectorValues& delta)
|
||||||
std::vector<bool> changed(keys.size(), false);
|
{
|
||||||
int count = 0;
|
FastSet<Key> changed;
|
||||||
|
size_t count = 0;
|
||||||
|
|
||||||
if (root) {
|
if (root) {
|
||||||
std::stack<boost::shared_ptr<CLIQUE> > travStack;
|
std::stack<boost::shared_ptr<CLIQUE> > travStack;
|
||||||
|
|
@ -213,7 +214,7 @@ int optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root, double t
|
||||||
travStack.pop();
|
travStack.pop();
|
||||||
bool recalculate = internal::optimizeWildfireNode(currentNode, threshold, changed, keys, delta, count);
|
bool recalculate = internal::optimizeWildfireNode(currentNode, threshold, changed, keys, delta, count);
|
||||||
if (recalculate) {
|
if (recalculate) {
|
||||||
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, currentNode->children_) {
|
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, currentNode->children) {
|
||||||
travStack.push(child);
|
travStack.push(child);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,9 @@ namespace br { using namespace boost::range; using namespace boost::adaptors; }
|
||||||
#include <gtsam/base/timing.h>
|
#include <gtsam/base/timing.h>
|
||||||
#include <gtsam/base/debug.h>
|
#include <gtsam/base/debug.h>
|
||||||
#include <gtsam/inference/BayesTree-inst.h>
|
#include <gtsam/inference/BayesTree-inst.h>
|
||||||
|
#include <gtsam/inference/BayesTreeCliqueBase-inst.h>
|
||||||
#include <gtsam/inference/JunctionTree-inst.h> // We need the inst file because we'll make a special JT templated on ISAM2
|
#include <gtsam/inference/JunctionTree-inst.h> // We need the inst file because we'll make a special JT templated on ISAM2
|
||||||
|
#include <gtsam/linear/linearAlgorithms-inst.h>
|
||||||
#include <gtsam/linear/HessianFactor.h>
|
#include <gtsam/linear/HessianFactor.h>
|
||||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||||
#include <gtsam/linear/GaussianEliminationTree.h>
|
#include <gtsam/linear/GaussianEliminationTree.h>
|
||||||
|
|
@ -36,9 +38,13 @@ namespace br { using namespace boost::range; using namespace boost::adaptors; }
|
||||||
#include <gtsam/nonlinear/nonlinearExceptions.h>
|
#include <gtsam/nonlinear/nonlinearExceptions.h>
|
||||||
#include <gtsam/nonlinear/LinearContainerFactor.h>
|
#include <gtsam/nonlinear/LinearContainerFactor.h>
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
using namespace std;
|
// Instantiate base classes
|
||||||
|
template class BayesTreeCliqueBase<ISAM2Clique, GaussianFactorGraph>;
|
||||||
|
template class BayesTree<ISAM2Clique>;
|
||||||
|
|
||||||
static const bool disableReordering = false;
|
static const bool disableReordering = false;
|
||||||
static const double batchThreshold = 0.65;
|
static const double batchThreshold = 0.65;
|
||||||
|
|
@ -113,14 +119,14 @@ std::string ISAM2Params::factorizationTranslator(const ISAM2Params::Factorizatio
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
void ISAM2Clique::setEliminationResult(const typename FactorGraphType::EliminationResult& eliminationResult)
|
void ISAM2Clique::setEliminationResult(const FactorGraphType::EliminationResult& eliminationResult)
|
||||||
{
|
{
|
||||||
conditional_ = eliminationResult.first;
|
conditional_ = eliminationResult.first;
|
||||||
cachedFactor_ = eliminationResult.second;
|
cachedFactor_ = eliminationResult.second;
|
||||||
// Compute gradient contribution
|
// Compute gradient contribution
|
||||||
gradientContribution_.resize(conditional_->cols() - 1);
|
gradientContribution_.resize(conditional_->cols() - 1);
|
||||||
// Rewrite -(R * P')'*d as -(d' * R * P')' for computational speed reasons
|
// Rewrite -(R * P')'*d as -(d' * R * P')' for computational speed reasons
|
||||||
gradientContribution_ = -conditional_->get_R().transpose() * conditional_->get_d(),
|
gradientContribution_ << -conditional_->get_R().transpose() * conditional_->get_d(),
|
||||||
-conditional_->get_S().transpose() * conditional_->get_d();
|
-conditional_->get_S().transpose() * conditional_->get_d();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -159,37 +165,11 @@ ISAM2::ISAM2():
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
ISAM2::ISAM2(const ISAM2& other) {
|
bool ISAM2::equals(const ISAM2& other, double tol) const {
|
||||||
*this = other;
|
return Base::equals(other, tol)
|
||||||
}
|
&& theta_.equals(other.theta_, tol) && variableIndex_.equals(other.variableIndex_, tol)
|
||||||
|
&& nonlinearFactors_.equals(other.nonlinearFactors_, tol)
|
||||||
/* ************************************************************************* */
|
&& fixedVariables_ == other.fixedVariables_;
|
||||||
ISAM2& ISAM2::operator=(const ISAM2& rhs)
|
|
||||||
{
|
|
||||||
// Copy BayesTree
|
|
||||||
this->Base::operator=(rhs);
|
|
||||||
|
|
||||||
// Copy our variables
|
|
||||||
theta_ = rhs.theta_;
|
|
||||||
variableIndex_ = rhs.variableIndex_;
|
|
||||||
delta_ = rhs.delta_;
|
|
||||||
deltaNewton_ = rhs.deltaNewton_;
|
|
||||||
RgProd_ = rhs.RgProd_;
|
|
||||||
deltaDoglegUptodate_ = rhs.deltaDoglegUptodate_;
|
|
||||||
deltaUptodate_ = rhs.deltaUptodate_;
|
|
||||||
deltaReplacedMask_ = rhs.deltaReplacedMask_;
|
|
||||||
nonlinearFactors_ = rhs.nonlinearFactors_;
|
|
||||||
linearFactors_ = rhs.linearFactors_;
|
|
||||||
params_ = rhs.params_;
|
|
||||||
doglegDelta_ = rhs.doglegDelta_;
|
|
||||||
lastAffectedVariableCount = rhs.lastAffectedVariableCount;
|
|
||||||
lastAffectedFactorCount = rhs.lastAffectedFactorCount;
|
|
||||||
lastAffectedCliqueCount = rhs.lastAffectedCliqueCount;
|
|
||||||
lastAffectedMarkedCount = rhs.lastAffectedMarkedCount;
|
|
||||||
lastBacksubVariableCount = rhs.lastBacksubVariableCount;
|
|
||||||
lastNnzTop = rhs.lastNnzTop;
|
|
||||||
|
|
||||||
return *this;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
@ -337,12 +317,9 @@ boost::shared_ptr<FastSet<Key> > ISAM2::recalculate(const FastSet<Key>& markedKe
|
||||||
gttic(removetop);
|
gttic(removetop);
|
||||||
Cliques orphans;
|
Cliques orphans;
|
||||||
GaussianBayesNet affectedBayesNet;
|
GaussianBayesNet affectedBayesNet;
|
||||||
this->removeTop(markedKeys, affectedBayesNet, orphans);
|
this->removeTop(vector<Key>(markedKeys.begin(), markedKeys.end()), affectedBayesNet, orphans);
|
||||||
gttoc(removetop);
|
gttoc(removetop);
|
||||||
|
|
||||||
if(debug) affectedBayesNet.print("Removed top: ");
|
|
||||||
if(debug) orphans.print("Orphans: ");
|
|
||||||
|
|
||||||
// FactorGraph<GaussianFactor> factors(affectedBayesNet);
|
// FactorGraph<GaussianFactor> factors(affectedBayesNet);
|
||||||
// bug was here: we cannot reuse the original factors, because then the cached factors get messed up
|
// bug was here: we cannot reuse the original factors, because then the cached factors get messed up
|
||||||
// [all the necessary data is actually contained in the affectedBayesNet, including what was passed in from the boundaries,
|
// [all the necessary data is actually contained in the affectedBayesNet, including what was passed in from the boundaries,
|
||||||
|
|
@ -401,14 +378,14 @@ boost::shared_ptr<FastSet<Key> > ISAM2::recalculate(const FastSet<Key>& markedKe
|
||||||
gttoc(linearize);
|
gttoc(linearize);
|
||||||
|
|
||||||
gttic(eliminate);
|
gttic(eliminate);
|
||||||
ISAM2BayesTree bayesTree = *ISAM2JunctionTree(GaussianEliminationTree(linearized, variableIndex_, order))
|
ISAM2BayesTree::shared_ptr bayesTree = ISAM2JunctionTree(GaussianEliminationTree(linearized, variableIndex_, order))
|
||||||
.eliminate(params_.getEliminationFunction()).first;
|
.eliminate(params_.getEliminationFunction()).first;
|
||||||
gttoc(eliminate);
|
gttoc(eliminate);
|
||||||
|
|
||||||
gttic(insert);
|
gttic(insert);
|
||||||
this->clear();
|
this->clear();
|
||||||
BOOST_FOREACH(const sharedNode& root, bayesTree.roots())
|
this->roots_.insert(this->roots_.end(), bayesTree->roots().begin(), bayesTree->roots().end());
|
||||||
this->insertRoot(root);
|
this->nodes_.insert(bayesTree->nodes().begin(), bayesTree->nodes().end());
|
||||||
gttoc(insert);
|
gttoc(insert);
|
||||||
|
|
||||||
result.variablesReeliminated = affectedKeysSet->size();
|
result.variablesReeliminated = affectedKeysSet->size();
|
||||||
|
|
@ -469,6 +446,13 @@ boost::shared_ptr<FastSet<Key> > ISAM2::recalculate(const FastSet<Key>& markedKe
|
||||||
factors.push_back(cachedBoundary);
|
factors.push_back(cachedBoundary);
|
||||||
gttoc(cached);
|
gttoc(cached);
|
||||||
|
|
||||||
|
gttic(orphans);
|
||||||
|
// Add the orphaned subtrees
|
||||||
|
BOOST_FOREACH(const sharedClique& orphan, orphans)
|
||||||
|
factors += boost::make_shared<BayesTreeOrphanWrapper<Clique> >(orphan);
|
||||||
|
gttoc(orphans);
|
||||||
|
|
||||||
|
|
||||||
// END OF COPIED CODE
|
// END OF COPIED CODE
|
||||||
|
|
||||||
// 3. Re-order and eliminate the factor graph into a Bayes net (Algorithm [alg:eliminate]), and re-assemble into a new Bayes tree (Algorithm [alg:BayesTree])
|
// 3. Re-order and eliminate the factor graph into a Bayes net (Algorithm [alg:eliminate]), and re-assemble into a new Bayes tree (Algorithm [alg:BayesTree])
|
||||||
|
|
@ -482,38 +466,41 @@ boost::shared_ptr<FastSet<Key> > ISAM2::recalculate(const FastSet<Key>& markedKe
|
||||||
affectedKeysSet->insert(affectedKeys.begin(), affectedKeys.end());
|
affectedKeysSet->insert(affectedKeys.begin(), affectedKeys.end());
|
||||||
gttoc(list_to_set);
|
gttoc(list_to_set);
|
||||||
|
|
||||||
gttic(PartialSolve);
|
VariableIndex affectedFactorsVarIndex(factors);
|
||||||
Impl::ReorderingMode reorderingMode;
|
|
||||||
reorderingMode.nFullSystemVars = variableIndex_.size();
|
gttic(ordering_constraints);
|
||||||
reorderingMode.algorithm = Impl::ReorderingMode::COLAMD;
|
// Create ordering constraints
|
||||||
reorderingMode.constrain = Impl::ReorderingMode::CONSTRAIN_LAST;
|
FastMap<Key,int> constraintGroups;
|
||||||
if(constrainKeys) {
|
if(constrainKeys) {
|
||||||
reorderingMode.constrainedKeys = *constrainKeys;
|
constraintGroups = *constrainKeys;
|
||||||
} else {
|
} else {
|
||||||
reorderingMode.constrainedKeys = FastMap<Key,int>();
|
constraintGroups = FastMap<Key,int>();
|
||||||
|
const int group = observedKeys.size() < affectedFactorsVarIndex.size()
|
||||||
|
? 1 : 0;
|
||||||
BOOST_FOREACH(Key var, observedKeys)
|
BOOST_FOREACH(Key var, observedKeys)
|
||||||
reorderingMode.constrainedKeys->insert(make_pair(var, 1));
|
constraintGroups.insert(make_pair(var, group));
|
||||||
}
|
}
|
||||||
FastSet<Key> affectedUsedKeys = *affectedKeysSet; // Remove unused keys from the set we pass to PartialSolve
|
|
||||||
BOOST_FOREACH(Key unused, unusedIndices)
|
|
||||||
affectedUsedKeys.erase(unused);
|
|
||||||
// Remove unaffected keys from the constraints
|
// Remove unaffected keys from the constraints
|
||||||
FastMap<Key,int>::iterator iter = reorderingMode.constrainedKeys->begin();
|
for(FastMap<Key,int>::iterator iter = constraintGroups.begin(); iter != constraintGroups.end(); ++iter) {
|
||||||
while(iter != reorderingMode.constrainedKeys->end())
|
if(unusedIndices.exists(iter->first) || !affectedKeysSet->exists(iter->first))
|
||||||
if(affectedUsedKeys.find(iter->first) == affectedUsedKeys.end())
|
constraintGroups.erase(iter);
|
||||||
reorderingMode.constrainedKeys->erase(iter++);
|
}
|
||||||
else
|
gttoc(ordering_constraints);
|
||||||
++iter;
|
|
||||||
Impl::PartialSolveResult partialSolveResult =
|
// Generate ordering
|
||||||
Impl::PartialSolve(factors, affectedUsedKeys, reorderingMode, (params_.factorization == ISAM2Params::QR));
|
gttic(Ordering);
|
||||||
|
Ordering ordering = Ordering::COLAMDConstrained(affectedFactorsVarIndex, constraintGroups);
|
||||||
|
|
||||||
|
ISAM2BayesTree::shared_ptr bayesTree = ISAM2JunctionTree(GaussianEliminationTree(
|
||||||
|
factors, affectedFactorsVarIndex, ordering)).eliminate(params_.getEliminationFunction()).first;
|
||||||
gttoc(PartialSolve);
|
gttoc(PartialSolve);
|
||||||
|
|
||||||
gttoc(reorder_and_eliminate);
|
gttoc(reorder_and_eliminate);
|
||||||
|
|
||||||
gttic(reassemble);
|
gttic(reassemble);
|
||||||
if(partialSolveResult.bayesTree)
|
this->roots_.insert(this->roots_.end(), bayesTree->roots().begin(), bayesTree->roots().end());
|
||||||
BOOST_FOREACH(const sharedNode& root, *partialSolveResult.bayesTree.roots())
|
this->nodes_.insert(bayesTree->nodes().begin(), bayesTree->nodes().end());
|
||||||
this->insertRoot(root);
|
|
||||||
gttoc(reassemble);
|
gttoc(reassemble);
|
||||||
|
|
||||||
// 4. The orphans have already been inserted during elimination
|
// 4. The orphans have already been inserted during elimination
|
||||||
|
|
@ -524,7 +511,7 @@ boost::shared_ptr<FastSet<Key> > ISAM2::recalculate(const FastSet<Key>& markedKe
|
||||||
// Root clique variables for detailed results
|
// Root clique variables for detailed results
|
||||||
if(params_.enableDetailedResults)
|
if(params_.enableDetailedResults)
|
||||||
BOOST_FOREACH(const sharedNode& root, this->roots())
|
BOOST_FOREACH(const sharedNode& root, this->roots())
|
||||||
BOOST_FOREACH(Key var, *root)
|
BOOST_FOREACH(Key var, *root->conditional())
|
||||||
result.detail->variableStatus[var].inRootClique = true;
|
result.detail->variableStatus[var].inRootClique = true;
|
||||||
|
|
||||||
return affectedKeysSet;
|
return affectedKeysSet;
|
||||||
|
|
@ -585,7 +572,7 @@ ISAM2Result ISAM2::update(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove removed factors from the variable index so we do not attempt to relinearize them
|
// Remove removed factors from the variable index so we do not attempt to relinearize them
|
||||||
variableIndex_.remove(removeFactorIndices, removeFactors);
|
variableIndex_.remove(removeFactorIndices.begin(), removeFactorIndices.end(), removeFactors);
|
||||||
|
|
||||||
// Compute unused keys and indices
|
// Compute unused keys and indices
|
||||||
FastSet<Key> unusedKeys;
|
FastSet<Key> unusedKeys;
|
||||||
|
|
@ -611,7 +598,7 @@ ISAM2Result ISAM2::update(
|
||||||
|
|
||||||
gttic(add_new_variables);
|
gttic(add_new_variables);
|
||||||
// 2. Initialize any new variables \Theta_{new} and add \Theta:=\Theta\cup\Theta_{new}.
|
// 2. Initialize any new variables \Theta_{new} and add \Theta:=\Theta\cup\Theta_{new}.
|
||||||
Impl::AddVariables(newTheta, theta_, delta_, deltaNewton_, RgProd_, deltaReplacedMask_);
|
Impl::AddVariables(newTheta, theta_, delta_, deltaNewton_, RgProd_);
|
||||||
// New keys for detailed results
|
// New keys for detailed results
|
||||||
if(params_.enableDetailedResults) {
|
if(params_.enableDetailedResults) {
|
||||||
BOOST_FOREACH(Key key, newTheta.keys()) { result.detail->variableStatus[key].isNew = true; } }
|
BOOST_FOREACH(Key key, newTheta.keys()) { result.detail->variableStatus[key].isNew = true; } }
|
||||||
|
|
@ -743,9 +730,8 @@ ISAM2Result ISAM2::update(
|
||||||
replacedKeys = recalculate(markedKeys, relinKeys, observedKeys, unusedIndices, constrainedKeys, result);
|
replacedKeys = recalculate(markedKeys, relinKeys, observedKeys, unusedIndices, constrainedKeys, result);
|
||||||
|
|
||||||
// Update replaced keys mask (accumulates until back-substitution takes place)
|
// Update replaced keys mask (accumulates until back-substitution takes place)
|
||||||
if(replacedKeys) {
|
if(replacedKeys)
|
||||||
BOOST_FOREACH(const Key var, *replacedKeys) {
|
deltaReplacedMask_.insert(replacedKeys->begin(), replacedKeys->end());
|
||||||
deltaReplacedMask_[var] = true; } }
|
|
||||||
gttoc(recalculate);
|
gttoc(recalculate);
|
||||||
|
|
||||||
// After the top of the tree has been redone and may have index gaps from
|
// After the top of the tree has been redone and may have index gaps from
|
||||||
|
|
@ -753,8 +739,8 @@ ISAM2Result ISAM2::update(
|
||||||
// in all data structures.
|
// in all data structures.
|
||||||
if(!unusedKeys.empty()) {
|
if(!unusedKeys.empty()) {
|
||||||
gttic(remove_variables);
|
gttic(remove_variables);
|
||||||
Impl::RemoveVariables(unusedKeys, root_, theta_, variableIndex_, delta_, deltaNewton_, RgProd_,
|
Impl::RemoveVariables(unusedKeys, roots_, theta_, variableIndex_, delta_, deltaNewton_, RgProd_,
|
||||||
deltaReplacedMask_, Base::nodes_, linearFactors_, fixedVariables_);
|
deltaReplacedMask_, Base::nodes_, fixedVariables_);
|
||||||
gttoc(remove_variables);
|
gttoc(remove_variables);
|
||||||
}
|
}
|
||||||
result.cliques = this->nodes().size();
|
result.cliques = this->nodes().size();
|
||||||
|
|
@ -890,7 +876,8 @@ void ISAM2::marginalizeLeaves(const FastList<Key>& leafKeysList)
|
||||||
graph3.push_back(eliminationResult2.second);
|
graph3.push_back(eliminationResult2.second);
|
||||||
GaussianFactorGraph::EliminationResult eliminationResult3 =
|
GaussianFactorGraph::EliminationResult eliminationResult3 =
|
||||||
params_.getEliminationFunction()(graph3, Ordering(jPosition, clique->conditional()->endFrontals()));
|
params_.getEliminationFunction()(graph3, Ordering(jPosition, clique->conditional()->endFrontals()));
|
||||||
sharedClique newClique = boost::make_shared<Clique>(make_pair(eliminationResult3.first, clique->cachedFactor()));
|
sharedClique newClique = boost::make_shared<Clique>();
|
||||||
|
newClique->setEliminationResult(make_pair(eliminationResult3.first, clique->cachedFactor()));
|
||||||
|
|
||||||
// Add the marginalized clique to the BayesTree
|
// Add the marginalized clique to the BayesTree
|
||||||
this->addClique(newClique, parent);
|
this->addClique(newClique, parent);
|
||||||
|
|
@ -911,32 +898,32 @@ void ISAM2::marginalizeLeaves(const FastList<Key>& leafKeysList)
|
||||||
if(clique_factor.second)
|
if(clique_factor.second)
|
||||||
factorsToAdd.push_back(clique_factor.second);
|
factorsToAdd.push_back(clique_factor.second);
|
||||||
nonlinearFactors_.push_back(boost::make_shared<LinearContainerFactor>(
|
nonlinearFactors_.push_back(boost::make_shared<LinearContainerFactor>(
|
||||||
clique_factor.second, ordering_));
|
clique_factor.second));
|
||||||
if(params_.cacheLinearizedFactors)
|
if(params_.cacheLinearizedFactors)
|
||||||
linearFactors_.push_back(clique_factor.second);
|
linearFactors_.push_back(clique_factor.second);
|
||||||
BOOST_FOREACH(Index factorIndex, *clique_factor.second) {
|
BOOST_FOREACH(Key factorKey, *clique_factor.second) {
|
||||||
fixedVariables_.insert(ordering_.key(factorIndex)); }
|
fixedVariables_.insert(factorKey); }
|
||||||
}
|
}
|
||||||
variableIndex_.augment(factorsToAdd); // Augment the variable index
|
variableIndex_.augment(factorsToAdd); // Augment the variable index
|
||||||
|
|
||||||
// Remove the factors to remove that have been summarized in the newly-added marginal factors
|
// Remove the factors to remove that have been summarized in the newly-added marginal factors
|
||||||
FastSet<size_t> factorIndicesToRemove;
|
FastSet<size_t> factorIndicesToRemove;
|
||||||
BOOST_FOREACH(Index j, indices) {
|
BOOST_FOREACH(Key j, leafKeys) {
|
||||||
factorIndicesToRemove.insert(variableIndex_[j].begin(), variableIndex_[j].end()); }
|
factorIndicesToRemove.insert(variableIndex_[j].begin(), variableIndex_[j].end()); }
|
||||||
vector<size_t> removedFactorIndices;
|
vector<size_t> removedFactorIndices;
|
||||||
SymbolicFactorGraph removedFactors;
|
NonlinearFactorGraph removedFactors;
|
||||||
BOOST_FOREACH(size_t i, factorIndicesToRemove) {
|
BOOST_FOREACH(size_t i, factorIndicesToRemove) {
|
||||||
removedFactorIndices.push_back(i);
|
removedFactorIndices.push_back(i);
|
||||||
removedFactors.push_back(nonlinearFactors_[i]->symbolic(ordering_));
|
removedFactors.push_back(nonlinearFactors_[i]);
|
||||||
nonlinearFactors_.remove(i);
|
nonlinearFactors_.remove(i);
|
||||||
if(params_.cacheLinearizedFactors)
|
if(params_.cacheLinearizedFactors)
|
||||||
linearFactors_.remove(i);
|
linearFactors_.remove(i);
|
||||||
}
|
}
|
||||||
variableIndex_.remove(removedFactorIndices, removedFactors);
|
variableIndex_.remove(removedFactorIndices.begin(), removedFactorIndices.end(), removedFactors);
|
||||||
|
|
||||||
// Remove the marginalized variables
|
// Remove the marginalized variables
|
||||||
Impl::RemoveVariables(FastSet<Key>(leafKeys.begin(), leafKeys.end()), root_, theta_, variableIndex_, delta_, deltaNewton_, RgProd_,
|
Impl::RemoveVariables(FastSet<Key>(leafKeys.begin(), leafKeys.end()), roots_, theta_, variableIndex_, delta_, deltaNewton_, RgProd_,
|
||||||
deltaReplacedMask_, ordering_, nodes_, linearFactors_, fixedVariables_);
|
deltaReplacedMask_, nodes_, fixedVariables_);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
@ -948,7 +935,7 @@ void ISAM2::updateDelta(bool forceFullSolve) const {
|
||||||
boost::get<ISAM2GaussNewtonParams>(params_.optimizationParams);
|
boost::get<ISAM2GaussNewtonParams>(params_.optimizationParams);
|
||||||
const double effectiveWildfireThreshold = forceFullSolve ? 0.0 : gaussNewtonParams.wildfireThreshold;
|
const double effectiveWildfireThreshold = forceFullSolve ? 0.0 : gaussNewtonParams.wildfireThreshold;
|
||||||
gttic(Wildfire_update);
|
gttic(Wildfire_update);
|
||||||
lastBacksubVariableCount = Impl::UpdateDelta(this->root(), deltaReplacedMask_, delta_, effectiveWildfireThreshold);
|
lastBacksubVariableCount = Impl::UpdateDelta(roots_, deltaReplacedMask_, delta_, effectiveWildfireThreshold);
|
||||||
gttoc(Wildfire_update);
|
gttoc(Wildfire_update);
|
||||||
|
|
||||||
} else if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) {
|
} else if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) {
|
||||||
|
|
@ -958,8 +945,11 @@ void ISAM2::updateDelta(bool forceFullSolve) const {
|
||||||
|
|
||||||
// Do one Dogleg iteration
|
// Do one Dogleg iteration
|
||||||
gttic(Dogleg_Iterate);
|
gttic(Dogleg_Iterate);
|
||||||
|
VectorValues dx_u = gtsam::optimizeGradientSearch(*this);
|
||||||
|
VectorValues dx_n = gtsam::optimize(*this);
|
||||||
DoglegOptimizerImpl::IterationResult doglegResult(DoglegOptimizerImpl::Iterate(
|
DoglegOptimizerImpl::IterationResult doglegResult(DoglegOptimizerImpl::Iterate(
|
||||||
*doglegDelta_, doglegParams.adaptationMode, *this, nonlinearFactors_, theta_, ordering_, nonlinearFactors_.error(theta_), doglegParams.verbose));
|
*doglegDelta_, doglegParams.adaptationMode, dx_u, dx_n, *this, nonlinearFactors_,
|
||||||
|
theta_, nonlinearFactors_.error(theta_), doglegParams.verbose));
|
||||||
gttoc(Dogleg_Iterate);
|
gttoc(Dogleg_Iterate);
|
||||||
|
|
||||||
gttic(Copy_dx_d);
|
gttic(Copy_dx_d);
|
||||||
|
|
@ -974,8 +964,6 @@ void ISAM2::updateDelta(bool forceFullSolve) const {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Values ISAM2::calculateEstimate() const {
|
Values ISAM2::calculateEstimate() const {
|
||||||
// We use ExpmapMasked here instead of regular expmap because the former
|
|
||||||
// handles Permuted<VectorValues>
|
|
||||||
gttic(Copy_Values);
|
gttic(Copy_Values);
|
||||||
Values ret(theta_);
|
Values ret(theta_);
|
||||||
gttoc(Copy_Values);
|
gttoc(Copy_Values);
|
||||||
|
|
@ -983,31 +971,25 @@ Values ISAM2::calculateEstimate() const {
|
||||||
const VectorValues& delta(getDelta());
|
const VectorValues& delta(getDelta());
|
||||||
gttoc(getDelta);
|
gttoc(getDelta);
|
||||||
gttic(Expmap);
|
gttic(Expmap);
|
||||||
vector<bool> mask(ordering_.size(), true);
|
ret = ret.retract(delta);
|
||||||
Impl::ExpmapMasked(ret, delta, ordering_, mask);
|
|
||||||
gttoc(Expmap);
|
gttoc(Expmap);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
const Value& ISAM2::calculateEstimate(Key key) const {
|
const Value& ISAM2::calculateEstimate(Key key) const {
|
||||||
const Index index = getOrdering()[key];
|
const Vector& delta = getDelta()[key];
|
||||||
const Vector& delta = getDelta()[index];
|
|
||||||
return *theta_.at(key).retract_(delta);
|
return *theta_.at(key).retract_(delta);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Values ISAM2::calculateBestEstimate() const {
|
Values ISAM2::calculateBestEstimate() const {
|
||||||
VectorValues delta(theta_.dims(ordering_));
|
return theta_.retract(internal::linearAlgorithms::optimizeBayesTree(*this));
|
||||||
internal::optimizeInPlace<Base>(this->root(), delta);
|
|
||||||
return theta_.retract(delta, ordering_);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Matrix ISAM2::marginalCovariance(Index key) const {
|
Matrix ISAM2::marginalCovariance(Key key) const {
|
||||||
return marginalFactor(ordering_[key],
|
return marginalFactor(key, params_.getEliminationFunction())->information().inverse();
|
||||||
params_.factorization == ISAM2Params::QR ? EliminateQR : EliminatePreferCholesky)
|
|
||||||
->information().inverse();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
@ -1018,11 +1000,14 @@ const VectorValues& ISAM2::getDelta() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
double ISAM2::error(const VectorValues& x) const
|
||||||
|
{
|
||||||
|
return GaussianFactorGraph(*this).error(x);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
VectorValues optimize(const ISAM2& isam) {
|
VectorValues optimize(const ISAM2& isam) {
|
||||||
gttic(allocateVectorValues);
|
VectorValues delta;
|
||||||
VectorValues delta = *allocateVectorValues(isam);
|
|
||||||
gttoc(allocateVectorValues);
|
|
||||||
optimizeInPlace(isam, delta);
|
optimizeInPlace(isam, delta);
|
||||||
return delta;
|
return delta;
|
||||||
}
|
}
|
||||||
|
|
@ -1052,12 +1037,8 @@ void optimizeInPlace(const ISAM2& isam, VectorValues& delta) {
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
||||||
VectorValues optimizeGradientSearch(const ISAM2& isam) {
|
VectorValues optimizeGradientSearch(const ISAM2& isam) {
|
||||||
gttic(Allocate_VectorValues);
|
VectorValues grad;
|
||||||
VectorValues grad = *allocateVectorValues(isam);
|
|
||||||
gttoc(Allocate_VectorValues);
|
|
||||||
|
|
||||||
optimizeGradientSearchInPlace(isam, grad);
|
optimizeGradientSearchInPlace(isam, grad);
|
||||||
|
|
||||||
return grad;
|
return grad;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1086,35 +1067,38 @@ void optimizeGradientSearchInPlace(const ISAM2& isam, VectorValues& grad) {
|
||||||
|
|
||||||
gttic(Compute_minimizing_step_size);
|
gttic(Compute_minimizing_step_size);
|
||||||
// Compute minimizing step size
|
// Compute minimizing step size
|
||||||
double RgNormSq = isam.RgProd_.asVector().squaredNorm();
|
double RgNormSq = isam.RgProd_.vector().squaredNorm();
|
||||||
double step = -gradientSqNorm / RgNormSq;
|
double step = -gradientSqNorm / RgNormSq;
|
||||||
gttoc(Compute_minimizing_step_size);
|
gttoc(Compute_minimizing_step_size);
|
||||||
|
|
||||||
gttic(Compute_point);
|
gttic(Compute_point);
|
||||||
// Compute steepest descent point
|
// Compute steepest descent point
|
||||||
scal(step, grad);
|
grad *= step;
|
||||||
gttoc(Compute_point);
|
gttoc(Compute_point);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
||||||
VectorValues gradient(const ISAM2& bayesTree, const VectorValues& x0) {
|
VectorValues gradient(const ISAM2& bayesTree, const VectorValues& x0) {
|
||||||
return gradient(FactorGraph<JacobianFactor>(bayesTree), x0);
|
return GaussianFactorGraph(bayesTree).gradient(x0);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
static void gradientAtZeroTreeAdder(const boost::shared_ptr<ISAM2Clique>& root, VectorValues& g) {
|
static void gradientAtZeroTreeAdder(const boost::shared_ptr<ISAM2Clique>& root, VectorValues& g) {
|
||||||
// Loop through variables in each clique, adding contributions
|
// Loop through variables in each clique, adding contributions
|
||||||
int variablePosition = 0;
|
DenseIndex variablePosition = 0;
|
||||||
for(GaussianConditional::const_iterator jit = root->conditional()->begin(); jit != root->conditional()->end(); ++jit) {
|
for(GaussianConditional::const_iterator jit = root->conditional()->begin(); jit != root->conditional()->end(); ++jit) {
|
||||||
const int dim = root->conditional()->dim(jit);
|
const DenseIndex dim = root->conditional()->getDim(jit);
|
||||||
g[*jit] += root->gradientContribution().segment(variablePosition, dim);
|
pair<VectorValues::iterator, bool> pos_ins =
|
||||||
|
g.tryInsert(*jit, root->gradientContribution().segment(variablePosition, dim));
|
||||||
|
if(!pos_ins.second)
|
||||||
|
pos_ins.first->second += root->gradientContribution().segment(variablePosition, dim);
|
||||||
variablePosition += dim;
|
variablePosition += dim;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Recursively add contributions from children
|
// Recursively add contributions from children
|
||||||
typedef boost::shared_ptr<ISAM2Clique> sharedClique;
|
typedef boost::shared_ptr<ISAM2Clique> sharedClique;
|
||||||
BOOST_FOREACH(const sharedClique& child, root->children()) {
|
BOOST_FOREACH(const sharedClique& child, root->children) {
|
||||||
gradientAtZeroTreeAdder(child, g);
|
gradientAtZeroTreeAdder(child, g);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1125,8 +1109,8 @@ void gradientAtZero(const ISAM2& bayesTree, VectorValues& g) {
|
||||||
g.setZero();
|
g.setZero();
|
||||||
|
|
||||||
// Sum up contributions for each clique
|
// Sum up contributions for each clique
|
||||||
if(bayesTree.root())
|
BOOST_FOREACH(const ISAM2::sharedClique& root, bayesTree.roots())
|
||||||
gradientAtZeroTreeAdder(bayesTree.root(), g);
|
gradientAtZeroTreeAdder(root, g);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -362,7 +362,7 @@ public:
|
||||||
Vector gradientContribution_;
|
Vector gradientContribution_;
|
||||||
|
|
||||||
/// Overridden to also store the remaining factor and gradient contribution
|
/// Overridden to also store the remaining factor and gradient contribution
|
||||||
void setEliminationResult(const typename FactorGraphType::EliminationResult& eliminationResult);
|
void setEliminationResult(const FactorGraphType::EliminationResult& eliminationResult);
|
||||||
|
|
||||||
/** Access the cached factor */
|
/** Access the cached factor */
|
||||||
Base::FactorType::shared_ptr& cachedFactor() { return cachedFactor_; }
|
Base::FactorType::shared_ptr& cachedFactor() { return cachedFactor_; }
|
||||||
|
|
@ -397,7 +397,7 @@ private:
|
||||||
* estimate of all variables.
|
* estimate of all variables.
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
class ISAM2: public BayesTree<ISAM2Clique> {
|
class GTSAM_EXPORT ISAM2: public BayesTree<ISAM2Clique> {
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
|
|
@ -440,7 +440,7 @@ protected:
|
||||||
* This is \c mutable because it is used internally to not update delta_
|
* This is \c mutable because it is used internally to not update delta_
|
||||||
* until it is needed.
|
* until it is needed.
|
||||||
*/
|
*/
|
||||||
mutable FastMap<Key,bool> deltaReplacedMask_; // TODO: Make sure accessed in the right way
|
mutable FastSet<Key> deltaReplacedMask_; // TODO: Make sure accessed in the right way
|
||||||
|
|
||||||
/** All original nonlinear factors are stored here to use during relinearization */
|
/** All original nonlinear factors are stored here to use during relinearization */
|
||||||
NonlinearFactorGraph nonlinearFactors_;
|
NonlinearFactorGraph nonlinearFactors_;
|
||||||
|
|
@ -462,23 +462,19 @@ public:
|
||||||
|
|
||||||
typedef ISAM2 This; ///< This class
|
typedef ISAM2 This; ///< This class
|
||||||
typedef BayesTree<ISAM2Clique> Base; ///< The BayesTree base class
|
typedef BayesTree<ISAM2Clique> Base; ///< The BayesTree base class
|
||||||
|
|
||||||
/** Create an empty ISAM2 instance */
|
|
||||||
GTSAM_EXPORT ISAM2(const ISAM2Params& params);
|
|
||||||
|
|
||||||
/** Create an empty ISAM2 instance using the default set of parameters (see ISAM2Params) */
|
|
||||||
GTSAM_EXPORT ISAM2();
|
|
||||||
|
|
||||||
/** Copy constructor */
|
|
||||||
GTSAM_EXPORT ISAM2(const ISAM2& other);
|
|
||||||
|
|
||||||
/** Assignment operator */
|
|
||||||
GTSAM_EXPORT ISAM2& operator=(const ISAM2& rhs);
|
|
||||||
|
|
||||||
typedef Base::Clique Clique; ///< A clique
|
typedef Base::Clique Clique; ///< A clique
|
||||||
typedef Base::sharedClique sharedClique; ///< Shared pointer to a clique
|
typedef Base::sharedClique sharedClique; ///< Shared pointer to a clique
|
||||||
typedef Base::Cliques Cliques; ///< List of Clique typedef from base class
|
typedef Base::Cliques Cliques; ///< List of Clique typedef from base class
|
||||||
|
|
||||||
|
/** Create an empty ISAM2 instance */
|
||||||
|
ISAM2(const ISAM2Params& params);
|
||||||
|
|
||||||
|
/** Create an empty ISAM2 instance using the default set of parameters (see ISAM2Params) */
|
||||||
|
ISAM2();
|
||||||
|
|
||||||
|
/** Compare equality */
|
||||||
|
bool equals(const ISAM2& other, double tol = 1e-9) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add new factors, updating the solution and relinearizing as needed.
|
* Add new factors, updating the solution and relinearizing as needed.
|
||||||
*
|
*
|
||||||
|
|
@ -507,7 +503,7 @@ public:
|
||||||
* of the size of the linear delta. This allows the provided keys to be reordered.
|
* of the size of the linear delta. This allows the provided keys to be reordered.
|
||||||
* @return An ISAM2Result struct containing information about the update
|
* @return An ISAM2Result struct containing information about the update
|
||||||
*/
|
*/
|
||||||
GTSAM_EXPORT ISAM2Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values(),
|
ISAM2Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values(),
|
||||||
const FastVector<size_t>& removeFactorIndices = FastVector<size_t>(),
|
const FastVector<size_t>& removeFactorIndices = FastVector<size_t>(),
|
||||||
const boost::optional<FastMap<Key,int> >& constrainedKeys = boost::none,
|
const boost::optional<FastMap<Key,int> >& constrainedKeys = boost::none,
|
||||||
const boost::optional<FastList<Key> >& noRelinKeys = boost::none,
|
const boost::optional<FastList<Key> >& noRelinKeys = boost::none,
|
||||||
|
|
@ -522,16 +518,24 @@ public:
|
||||||
* fixed variables will include any involved with the marginalized variables
|
* fixed variables will include any involved with the marginalized variables
|
||||||
* in the original factors, and possibly additional ones due to fill-in.
|
* in the original factors, and possibly additional ones due to fill-in.
|
||||||
*/
|
*/
|
||||||
GTSAM_EXPORT void marginalizeLeaves(const FastList<Key>& leafKeys);
|
void marginalizeLeaves(const FastList<Key>& leafKeys);
|
||||||
|
|
||||||
/** Access the current linearization point */
|
/** Access the current linearization point */
|
||||||
const Values& getLinearizationPoint() const { return theta_; }
|
const Values& getLinearizationPoint() const { return theta_; }
|
||||||
|
|
||||||
|
/// Compute the current solution. This is the "standard" function for computing the solution that
|
||||||
|
/// uses:
|
||||||
|
/// - Partial relinearization and backsubstitution using the thresholds provided in ISAM2Params.
|
||||||
|
/// - Dogleg trust-region step, if enabled in ISAM2Params.
|
||||||
|
/// - Equivalent to getLinearizationPoint().retract(getDelta())
|
||||||
|
/// The solution returned is in general not the same as that returned by getLinearizationPoint().
|
||||||
|
Values optimize() const;
|
||||||
|
|
||||||
/** Compute an estimate from the incomplete linear delta computed during the last update.
|
/** Compute an estimate from the incomplete linear delta computed during the last update.
|
||||||
* This delta is incomplete because it was not updated below wildfire_threshold. If only
|
* This delta is incomplete because it was not updated below wildfire_threshold. If only
|
||||||
* a single variable is needed, it is faster to call calculateEstimate(const KEY&).
|
* a single variable is needed, it is faster to call calculateEstimate(const KEY&).
|
||||||
*/
|
*/
|
||||||
GTSAM_EXPORT Values calculateEstimate() const;
|
Values calculateEstimate() const;
|
||||||
|
|
||||||
/** Compute an estimate for a single variable using its incomplete linear delta computed
|
/** Compute an estimate for a single variable using its incomplete linear delta computed
|
||||||
* during the last update. This is faster than calling the no-argument version of
|
* during the last update. This is faster than calling the no-argument version of
|
||||||
|
|
@ -549,10 +553,10 @@ public:
|
||||||
* @param key
|
* @param key
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
GTSAM_EXPORT const Value& calculateEstimate(Key key) const;
|
const Value& calculateEstimate(Key key) const;
|
||||||
|
|
||||||
/** Return marginal on any variable as a covariance matrix */
|
/** Return marginal on any variable as a covariance matrix */
|
||||||
GTSAM_EXPORT Matrix marginalCovariance(Index key) const;
|
Matrix marginalCovariance(Index key) const;
|
||||||
|
|
||||||
/// @name Public members for non-typical usage
|
/// @name Public members for non-typical usage
|
||||||
/// @{
|
/// @{
|
||||||
|
|
@ -562,16 +566,19 @@ public:
|
||||||
|
|
||||||
/** Compute an estimate using a complete delta computed by a full back-substitution.
|
/** Compute an estimate using a complete delta computed by a full back-substitution.
|
||||||
*/
|
*/
|
||||||
GTSAM_EXPORT Values calculateBestEstimate() const;
|
Values calculateBestEstimate() const;
|
||||||
|
|
||||||
/** Access the current delta, computed during the last call to update */
|
/** Access the current delta, computed during the last call to update */
|
||||||
GTSAM_EXPORT const VectorValues& getDelta() const;
|
const VectorValues& getDelta() const;
|
||||||
|
|
||||||
|
/** Compute the linear error */
|
||||||
|
double error(const VectorValues& x) const;
|
||||||
|
|
||||||
/** Access the set of nonlinear factors */
|
/** Access the set of nonlinear factors */
|
||||||
GTSAM_EXPORT const NonlinearFactorGraph& getFactorsUnsafe() const { return nonlinearFactors_; }
|
const NonlinearFactorGraph& getFactorsUnsafe() const { return nonlinearFactors_; }
|
||||||
|
|
||||||
/** Access the nonlinear variable index */
|
/** Access the nonlinear variable index */
|
||||||
GTSAM_EXPORT const VariableIndex& getVariableIndex() const { return variableIndex_; }
|
const VariableIndex& getVariableIndex() const { return variableIndex_; }
|
||||||
|
|
||||||
size_t lastAffectedVariableCount;
|
size_t lastAffectedVariableCount;
|
||||||
size_t lastAffectedFactorCount;
|
size_t lastAffectedFactorCount;
|
||||||
|
|
@ -580,26 +587,26 @@ public:
|
||||||
mutable size_t lastBacksubVariableCount;
|
mutable size_t lastBacksubVariableCount;
|
||||||
size_t lastNnzTop;
|
size_t lastNnzTop;
|
||||||
|
|
||||||
GTSAM_EXPORT const ISAM2Params& params() const { return params_; }
|
const ISAM2Params& params() const { return params_; }
|
||||||
|
|
||||||
/** prints out clique statistics */
|
/** prints out clique statistics */
|
||||||
GTSAM_EXPORT void printStats() const { getCliqueData().getStats().print(); }
|
void printStats() const { getCliqueData().getStats().print(); }
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
GTSAM_EXPORT FastList<size_t> getAffectedFactors(const FastList<Key>& keys) const;
|
FastList<size_t> getAffectedFactors(const FastList<Key>& keys) const;
|
||||||
GTSAM_EXPORT GaussianFactorGraph::shared_ptr relinearizeAffectedFactors(const FastList<Key>& affectedKeys, const FastSet<Key>& relinKeys) const;
|
GaussianFactorGraph::shared_ptr relinearizeAffectedFactors(const FastList<Key>& affectedKeys, const FastSet<Key>& relinKeys) const;
|
||||||
GTSAM_EXPORT GaussianFactorGraph getCachedBoundaryFactors(Cliques& orphans);
|
GaussianFactorGraph getCachedBoundaryFactors(Cliques& orphans);
|
||||||
|
|
||||||
GTSAM_EXPORT boost::shared_ptr<FastSet<Key> > recalculate(const FastSet<Key>& markedKeys, const FastSet<Key>& relinKeys,
|
boost::shared_ptr<FastSet<Key> > recalculate(const FastSet<Key>& markedKeys, const FastSet<Key>& relinKeys,
|
||||||
const FastVector<Key>& observedKeys, const FastSet<Key>& unusedIndices, const boost::optional<FastMap<Key,int> >& constrainKeys, ISAM2Result& result);
|
const FastVector<Key>& observedKeys, const FastSet<Key>& unusedIndices, const boost::optional<FastMap<Key,int> >& constrainKeys, ISAM2Result& result);
|
||||||
// void linear_update(const GaussianFactorGraph& newFactors);
|
// void linear_update(const GaussianFactorGraph& newFactors);
|
||||||
GTSAM_EXPORT void updateDelta(bool forceFullSolve = false) const;
|
void updateDelta(bool forceFullSolve = false) const;
|
||||||
|
|
||||||
GTSAM_EXPORT friend void optimizeInPlace(const ISAM2&, VectorValues&);
|
friend GTSAM_EXPORT void optimizeInPlace(const ISAM2&, VectorValues&);
|
||||||
GTSAM_EXPORT friend void optimizeGradientSearchInPlace(const ISAM2&, VectorValues&);
|
friend GTSAM_EXPORT void optimizeGradientSearchInPlace(const ISAM2&, VectorValues&);
|
||||||
|
|
||||||
}; // ISAM2
|
}; // ISAM2
|
||||||
|
|
||||||
|
|
@ -621,12 +628,12 @@ GTSAM_EXPORT void optimizeInPlace(const ISAM2& isam, VectorValues& delta);
|
||||||
/// variables are contained in the subtree.
|
/// variables are contained in the subtree.
|
||||||
/// @return The number of variables that were solved for
|
/// @return The number of variables that were solved for
|
||||||
template<class CLIQUE>
|
template<class CLIQUE>
|
||||||
int optimizeWildfire(const boost::shared_ptr<CLIQUE>& root,
|
size_t optimizeWildfire(const boost::shared_ptr<CLIQUE>& root,
|
||||||
double threshold, const std::vector<bool>& replaced, VectorValues& delta);
|
double threshold, const FastSet<Key>& replaced, VectorValues& delta);
|
||||||
|
|
||||||
template<class CLIQUE>
|
template<class CLIQUE>
|
||||||
int optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root,
|
size_t optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root,
|
||||||
double threshold, const std::vector<bool>& replaced, VectorValues& delta);
|
double threshold, const FastSet<Key>& replaced, VectorValues& delta);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Optimize along the gradient direction, with a closed-form computation to
|
* Optimize along the gradient direction, with a closed-form computation to
|
||||||
|
|
|
||||||
|
|
@ -6,11 +6,10 @@
|
||||||
|
|
||||||
#include <CppUnitLite/TestHarness.h>
|
#include <CppUnitLite/TestHarness.h>
|
||||||
|
|
||||||
#if 0
|
|
||||||
|
|
||||||
#include <gtsam/base/debug.h>
|
#include <gtsam/base/debug.h>
|
||||||
#include <gtsam/base/TestableAssertions.h>
|
#include <gtsam/base/TestableAssertions.h>
|
||||||
#include <gtsam/base/LieVector.h>
|
#include <gtsam/base/LieVector.h>
|
||||||
|
#include <gtsam/base/treeTraversal-inst.h>
|
||||||
#include <gtsam/geometry/Point2.h>
|
#include <gtsam/geometry/Point2.h>
|
||||||
#include <gtsam/geometry/Pose2.h>
|
#include <gtsam/geometry/Pose2.h>
|
||||||
#include <gtsam/inference/Ordering.h>
|
#include <gtsam/inference/Ordering.h>
|
||||||
|
|
@ -27,9 +26,10 @@
|
||||||
#include <tests/smallExample.h>
|
#include <tests/smallExample.h>
|
||||||
|
|
||||||
#include <boost/foreach.hpp>
|
#include <boost/foreach.hpp>
|
||||||
#include <boost/assign/std/list.hpp> // for operator +=
|
#include <boost/assign/list_of.hpp>
|
||||||
#include <boost/assign.hpp>
|
|
||||||
using namespace boost::assign;
|
using namespace boost::assign;
|
||||||
|
#include <boost/range/adaptor/map.hpp>
|
||||||
|
namespace br { using namespace boost::adaptors; using namespace boost::range; }
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using namespace gtsam;
|
using namespace gtsam;
|
||||||
|
|
@ -37,6 +37,8 @@ using boost::shared_ptr;
|
||||||
|
|
||||||
const double tol = 1e-4;
|
const double tol = 1e-4;
|
||||||
|
|
||||||
|
#define TEST TEST_UNSAFE
|
||||||
|
|
||||||
// SETDEBUG("ISAM2 update", true);
|
// SETDEBUG("ISAM2 update", true);
|
||||||
// SETDEBUG("ISAM2 update verbose", true);
|
// SETDEBUG("ISAM2 update verbose", true);
|
||||||
// SETDEBUG("ISAM2 recalculate", true);
|
// SETDEBUG("ISAM2 recalculate", true);
|
||||||
|
|
@ -48,7 +50,8 @@ SharedDiagonal brNoise = noiseModel::Diagonal::Sigmas(Vector_(2, M_PI/100.0, 0.1
|
||||||
ISAM2 createSlamlikeISAM2(
|
ISAM2 createSlamlikeISAM2(
|
||||||
boost::optional<Values&> init_values = boost::none,
|
boost::optional<Values&> init_values = boost::none,
|
||||||
boost::optional<NonlinearFactorGraph&> full_graph = boost::none,
|
boost::optional<NonlinearFactorGraph&> full_graph = boost::none,
|
||||||
const ISAM2Params& params = ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false, true)) {
|
const ISAM2Params& params = ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false, true),
|
||||||
|
size_t maxPoses = 10) {
|
||||||
|
|
||||||
// These variables will be reused and accumulate factors and values
|
// These variables will be reused and accumulate factors and values
|
||||||
ISAM2 isam(params);
|
ISAM2 isam(params);
|
||||||
|
|
@ -61,7 +64,7 @@ ISAM2 createSlamlikeISAM2(
|
||||||
// Add a prior at time 0 and update isam
|
// Add a prior at time 0 and update isam
|
||||||
{
|
{
|
||||||
NonlinearFactorGraph newfactors;
|
NonlinearFactorGraph newfactors;
|
||||||
newfactors.add(PriorFactor<Pose2>(0, Pose2(0.0, 0.0, 0.0), odoNoise));
|
newfactors += PriorFactor<Pose2>(0, Pose2(0.0, 0.0, 0.0), odoNoise);
|
||||||
fullgraph.push_back(newfactors);
|
fullgraph.push_back(newfactors);
|
||||||
|
|
||||||
Values init;
|
Values init;
|
||||||
|
|
@ -71,10 +74,13 @@ ISAM2 createSlamlikeISAM2(
|
||||||
isam.update(newfactors, init);
|
isam.update(newfactors, init);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(i > maxPoses)
|
||||||
|
goto done;
|
||||||
|
|
||||||
// Add odometry from time 0 to time 5
|
// Add odometry from time 0 to time 5
|
||||||
for( ; i<5; ++i) {
|
for( ; i<5; ++i) {
|
||||||
NonlinearFactorGraph newfactors;
|
NonlinearFactorGraph newfactors;
|
||||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||||
fullgraph.push_back(newfactors);
|
fullgraph.push_back(newfactors);
|
||||||
|
|
||||||
Values init;
|
Values init;
|
||||||
|
|
@ -82,14 +88,17 @@ ISAM2 createSlamlikeISAM2(
|
||||||
fullinit.insert((i+1), Pose2(double(i+1)+0.1, -0.1, 0.01));
|
fullinit.insert((i+1), Pose2(double(i+1)+0.1, -0.1, 0.01));
|
||||||
|
|
||||||
isam.update(newfactors, init);
|
isam.update(newfactors, init);
|
||||||
|
|
||||||
|
if(i > maxPoses)
|
||||||
|
goto done;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add odometry from time 5 to 6 and landmark measurement at time 5
|
// Add odometry from time 5 to 6 and landmark measurement at time 5
|
||||||
{
|
{
|
||||||
NonlinearFactorGraph newfactors;
|
NonlinearFactorGraph newfactors;
|
||||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise));
|
newfactors += BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise);
|
||||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise));
|
newfactors += BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise);
|
||||||
fullgraph.push_back(newfactors);
|
fullgraph.push_back(newfactors);
|
||||||
|
|
||||||
Values init;
|
Values init;
|
||||||
|
|
@ -104,10 +113,13 @@ ISAM2 createSlamlikeISAM2(
|
||||||
++ i;
|
++ i;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(i > maxPoses)
|
||||||
|
goto done;
|
||||||
|
|
||||||
// Add odometry from time 6 to time 10
|
// Add odometry from time 6 to time 10
|
||||||
for( ; i<10; ++i) {
|
for( ; i<10; ++i) {
|
||||||
NonlinearFactorGraph newfactors;
|
NonlinearFactorGraph newfactors;
|
||||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||||
fullgraph.push_back(newfactors);
|
fullgraph.push_back(newfactors);
|
||||||
|
|
||||||
Values init;
|
Values init;
|
||||||
|
|
@ -115,14 +127,17 @@ ISAM2 createSlamlikeISAM2(
|
||||||
fullinit.insert((i+1), Pose2(double(i+1)+0.1, -0.1, 0.01));
|
fullinit.insert((i+1), Pose2(double(i+1)+0.1, -0.1, 0.01));
|
||||||
|
|
||||||
isam.update(newfactors, init);
|
isam.update(newfactors, init);
|
||||||
|
|
||||||
|
if(i > maxPoses)
|
||||||
|
goto done;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add odometry from time 10 to 11 and landmark measurement at time 10
|
// Add odometry from time 10 to 11 and landmark measurement at time 10
|
||||||
{
|
{
|
||||||
NonlinearFactorGraph newfactors;
|
NonlinearFactorGraph newfactors;
|
||||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
|
newfactors += BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise);
|
||||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
|
newfactors += BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise);
|
||||||
fullgraph.push_back(newfactors);
|
fullgraph.push_back(newfactors);
|
||||||
|
|
||||||
Values init;
|
Values init;
|
||||||
|
|
@ -133,6 +148,8 @@ ISAM2 createSlamlikeISAM2(
|
||||||
++ i;
|
++ i;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
done:
|
||||||
|
|
||||||
if (full_graph)
|
if (full_graph)
|
||||||
*full_graph = fullgraph;
|
*full_graph = fullgraph;
|
||||||
|
|
||||||
|
|
@ -142,185 +159,6 @@ ISAM2 createSlamlikeISAM2(
|
||||||
return isam;
|
return isam;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
TEST(ISAM2, ImplAddVariables) {
|
|
||||||
|
|
||||||
// Create initial state
|
|
||||||
Values theta;
|
|
||||||
theta.insert(0, Pose2(.1, .2, .3));
|
|
||||||
theta.insert(100, Point2(.4, .5));
|
|
||||||
Values newTheta;
|
|
||||||
newTheta.insert(1, Pose2(.6, .7, .8));
|
|
||||||
|
|
||||||
VectorValues delta;
|
|
||||||
delta.insert(0, Vector_(3, .1, .2, .3));
|
|
||||||
delta.insert(1, Vector_(2, .4, .5));
|
|
||||||
|
|
||||||
VectorValues deltaNewton;
|
|
||||||
deltaNewton.insert(0, Vector_(3, .1, .2, .3));
|
|
||||||
deltaNewton.insert(1, Vector_(2, .4, .5));
|
|
||||||
|
|
||||||
VectorValues deltaRg;
|
|
||||||
deltaRg.insert(0, Vector_(3, .1, .2, .3));
|
|
||||||
deltaRg.insert(1, Vector_(2, .4, .5));
|
|
||||||
|
|
||||||
vector<bool> replacedKeys(2, false);
|
|
||||||
|
|
||||||
Ordering ordering; ordering += 100, 0;
|
|
||||||
|
|
||||||
// Verify initial state
|
|
||||||
LONGS_EQUAL(0, ordering[100]);
|
|
||||||
LONGS_EQUAL(1, ordering[0]);
|
|
||||||
EXPECT(assert_equal(delta[0], delta[ordering[100]]));
|
|
||||||
EXPECT(assert_equal(delta[1], delta[ordering[0]]));
|
|
||||||
|
|
||||||
// Create expected state
|
|
||||||
Values thetaExpected;
|
|
||||||
thetaExpected.insert(0, Pose2(.1, .2, .3));
|
|
||||||
thetaExpected.insert(100, Point2(.4, .5));
|
|
||||||
thetaExpected.insert(1, Pose2(.6, .7, .8));
|
|
||||||
|
|
||||||
VectorValues deltaExpected;
|
|
||||||
deltaExpected.insert(0, Vector_(3, .1, .2, .3));
|
|
||||||
deltaExpected.insert(1, Vector_(2, .4, .5));
|
|
||||||
deltaExpected.insert(2, Vector_(3, 0.0, 0.0, 0.0));
|
|
||||||
|
|
||||||
VectorValues deltaNewtonExpected;
|
|
||||||
deltaNewtonExpected.insert(0, Vector_(3, .1, .2, .3));
|
|
||||||
deltaNewtonExpected.insert(1, Vector_(2, .4, .5));
|
|
||||||
deltaNewtonExpected.insert(2, Vector_(3, 0.0, 0.0, 0.0));
|
|
||||||
|
|
||||||
VectorValues deltaRgExpected;
|
|
||||||
deltaRgExpected.insert(0, Vector_(3, .1, .2, .3));
|
|
||||||
deltaRgExpected.insert(1, Vector_(2, .4, .5));
|
|
||||||
deltaRgExpected.insert(2, Vector_(3, 0.0, 0.0, 0.0));
|
|
||||||
|
|
||||||
vector<bool> replacedKeysExpected(3, false);
|
|
||||||
|
|
||||||
Ordering orderingExpected; orderingExpected += 100, 0, 1;
|
|
||||||
|
|
||||||
// Expand initial state
|
|
||||||
ISAM2::Impl::AddVariables(newTheta, theta, delta, deltaNewton, deltaRg, replacedKeys, ordering);
|
|
||||||
|
|
||||||
EXPECT(assert_equal(thetaExpected, theta));
|
|
||||||
EXPECT(assert_equal(deltaExpected, delta));
|
|
||||||
EXPECT(assert_equal(deltaNewtonExpected, deltaNewton));
|
|
||||||
EXPECT(assert_equal(deltaRgExpected, deltaRg));
|
|
||||||
EXPECT(assert_container_equality(replacedKeysExpected, replacedKeys));
|
|
||||||
EXPECT(assert_equal(orderingExpected, ordering));
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
TEST(ISAM2, ImplRemoveVariables) {
|
|
||||||
|
|
||||||
// Create initial state
|
|
||||||
Values theta;
|
|
||||||
theta.insert(0, Pose2(.1, .2, .3));
|
|
||||||
theta.insert(1, Pose2(.6, .7, .8));
|
|
||||||
theta.insert(100, Point2(.4, .5));
|
|
||||||
|
|
||||||
SymbolicFactorGraph sfg;
|
|
||||||
sfg.push_back(boost::make_shared<IndexFactor>(Index(0), Index(2)));
|
|
||||||
sfg.push_back(boost::make_shared<IndexFactor>(Index(0), Index(1)));
|
|
||||||
VariableIndex variableIndex(sfg);
|
|
||||||
|
|
||||||
VectorValues delta;
|
|
||||||
delta.insert(0, Vector_(3, .1, .2, .3));
|
|
||||||
delta.insert(1, Vector_(3, .4, .5, .6));
|
|
||||||
delta.insert(2, Vector_(2, .7, .8));
|
|
||||||
|
|
||||||
VectorValues deltaNewton;
|
|
||||||
deltaNewton.insert(0, Vector_(3, .1, .2, .3));
|
|
||||||
deltaNewton.insert(1, Vector_(3, .4, .5, .6));
|
|
||||||
deltaNewton.insert(2, Vector_(2, .7, .8));
|
|
||||||
|
|
||||||
VectorValues deltaRg;
|
|
||||||
deltaRg.insert(0, Vector_(3, .1, .2, .3));
|
|
||||||
deltaRg.insert(1, Vector_(3, .4, .5, .6));
|
|
||||||
deltaRg.insert(2, Vector_(2, .7, .8));
|
|
||||||
|
|
||||||
vector<bool> replacedKeys(3, false);
|
|
||||||
replacedKeys[0] = true;
|
|
||||||
replacedKeys[1] = false;
|
|
||||||
replacedKeys[2] = true;
|
|
||||||
|
|
||||||
Ordering ordering; ordering += 100, 1, 0;
|
|
||||||
|
|
||||||
ISAM2::Nodes nodes(3);
|
|
||||||
|
|
||||||
// Verify initial state
|
|
||||||
LONGS_EQUAL(0, ordering[100]);
|
|
||||||
LONGS_EQUAL(1, ordering[1]);
|
|
||||||
LONGS_EQUAL(2, ordering[0]);
|
|
||||||
|
|
||||||
// Create expected state
|
|
||||||
Values thetaExpected;
|
|
||||||
thetaExpected.insert(0, Pose2(.1, .2, .3));
|
|
||||||
thetaExpected.insert(100, Point2(.4, .5));
|
|
||||||
|
|
||||||
SymbolicFactorGraph sfgRemoved;
|
|
||||||
sfgRemoved.push_back(boost::make_shared<IndexFactor>(Index(0), Index(1)));
|
|
||||||
sfgRemoved.push_back(SymbolicFactorGraph::sharedFactor()); // Add empty factor to keep factor indices consistent
|
|
||||||
VariableIndex variableIndexExpected(sfgRemoved);
|
|
||||||
|
|
||||||
VectorValues deltaExpected;
|
|
||||||
deltaExpected.insert(0, Vector_(3, .1, .2, .3));
|
|
||||||
deltaExpected.insert(1, Vector_(2, .7, .8));
|
|
||||||
|
|
||||||
VectorValues deltaNewtonExpected;
|
|
||||||
deltaNewtonExpected.insert(0, Vector_(3, .1, .2, .3));
|
|
||||||
deltaNewtonExpected.insert(1, Vector_(2, .7, .8));
|
|
||||||
|
|
||||||
VectorValues deltaRgExpected;
|
|
||||||
deltaRgExpected.insert(0, Vector_(3, .1, .2, .3));
|
|
||||||
deltaRgExpected.insert(1, Vector_(2, .7, .8));
|
|
||||||
|
|
||||||
vector<bool> replacedKeysExpected(2, true);
|
|
||||||
|
|
||||||
Ordering orderingExpected; orderingExpected += 100, 0;
|
|
||||||
|
|
||||||
ISAM2::Nodes nodesExpected(2);
|
|
||||||
|
|
||||||
// Reduce initial state
|
|
||||||
FastSet<Key> unusedKeys;
|
|
||||||
unusedKeys.insert(1);
|
|
||||||
vector<size_t> removedFactorsI; removedFactorsI.push_back(1);
|
|
||||||
SymbolicFactorGraph removedFactors; removedFactors.push_back(sfg[1]);
|
|
||||||
variableIndex.remove(removedFactorsI, removedFactors);
|
|
||||||
GaussianFactorGraph linearFactors;
|
|
||||||
FastSet<Key> fixedVariables;
|
|
||||||
ISAM2::Impl::RemoveVariables(unusedKeys, ISAM2::sharedClique(), theta, variableIndex, delta, deltaNewton, deltaRg,
|
|
||||||
replacedKeys, ordering, nodes, linearFactors, fixedVariables);
|
|
||||||
|
|
||||||
EXPECT(assert_equal(thetaExpected, theta));
|
|
||||||
EXPECT(assert_equal(variableIndexExpected, variableIndex));
|
|
||||||
EXPECT(assert_equal(deltaExpected, delta));
|
|
||||||
EXPECT(assert_equal(deltaNewtonExpected, deltaNewton));
|
|
||||||
EXPECT(assert_equal(deltaRgExpected, deltaRg));
|
|
||||||
EXPECT(assert_container_equality(replacedKeysExpected, replacedKeys));
|
|
||||||
EXPECT(assert_equal(orderingExpected, ordering));
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
//TEST(ISAM2, IndicesFromFactors) {
|
|
||||||
//
|
|
||||||
// using namespace gtsam::planarSLAM;
|
|
||||||
// typedef GaussianISAM2<Values>::Impl Impl;
|
|
||||||
//
|
|
||||||
// Ordering ordering; ordering += (0), (0), (1);
|
|
||||||
// NonlinearFactorGraph graph;
|
|
||||||
// graph.add(PriorFactor<Pose2>((0), Pose2(), noiseModel::Unit::Create(Pose2::dimension));
|
|
||||||
// graph.addRange((0), (0), 1.0, noiseModel::Unit::Create(1));
|
|
||||||
//
|
|
||||||
// FastSet<Index> expected;
|
|
||||||
// expected.insert(0);
|
|
||||||
// expected.insert(1);
|
|
||||||
//
|
|
||||||
// FastSet<Index> actual = Impl::IndicesFromFactors(ordering, graph);
|
|
||||||
//
|
|
||||||
// EXPECT(assert_equal(expected, actual));
|
|
||||||
//}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
//TEST(ISAM2, CheckRelinearization) {
|
//TEST(ISAM2, CheckRelinearization) {
|
||||||
//
|
//
|
||||||
|
|
@ -355,37 +193,29 @@ TEST(ISAM2, ImplRemoveVariables) {
|
||||||
//}
|
//}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(ISAM2, optimize2) {
|
struct ConsistencyVisitor
|
||||||
|
{
|
||||||
// Create initialization
|
bool consistent;
|
||||||
Values theta;
|
const ISAM2& isam;
|
||||||
theta.insert((0), Pose2(0.01, 0.01, 0.01));
|
ConsistencyVisitor(const ISAM2& isam) :
|
||||||
|
consistent(true), isam(isam) {}
|
||||||
// Create conditional
|
int operator()(const ISAM2::sharedClique& node, int& parentData)
|
||||||
Vector d(3); d << -0.1, -0.1, -0.31831;
|
{
|
||||||
Matrix R(3,3); R <<
|
if(std::find(isam.roots().begin(), isam.roots().end(), node) == isam.roots().end())
|
||||||
10, 0.0, 0.0,
|
{
|
||||||
0.0, 10, 0.0,
|
if(node->parent_.expired())
|
||||||
0.0, 0.0, 31.8309886;
|
consistent = false;
|
||||||
GaussianConditional::shared_ptr conditional(new GaussianConditional(0, d, R, Vector::Ones(3)));
|
if(std::find(node->parent()->children.begin(), node->parent()->children.end(), node) == node->parent()->children.end())
|
||||||
|
consistent = false;
|
||||||
// Create ordering
|
|
||||||
Ordering ordering; ordering += (0);
|
|
||||||
|
|
||||||
// Expected vector
|
|
||||||
VectorValues expected(1, 3);
|
|
||||||
conditional->solveInPlace(expected);
|
|
||||||
|
|
||||||
// Clique
|
|
||||||
ISAM2::sharedClique clique(
|
|
||||||
ISAM2::Clique::Create(make_pair(conditional,GaussianFactor::shared_ptr())));
|
|
||||||
VectorValues actual(theta.dims(ordering));
|
|
||||||
internal::optimizeInPlace<ISAM2::Base>(clique, actual);
|
|
||||||
|
|
||||||
// expected.print("expected: ");
|
|
||||||
// actual.print("actual: ");
|
|
||||||
EXPECT(assert_equal(expected, actual));
|
|
||||||
}
|
}
|
||||||
|
BOOST_FOREACH(Key j, node->conditional()->frontals())
|
||||||
|
{
|
||||||
|
if(isam.nodes().at(j).get() != node.get())
|
||||||
|
consistent = false;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, const ISAM2& isam, Test& test, TestResult& result) {
|
bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, const ISAM2& isam, Test& test, TestResult& result) {
|
||||||
|
|
@ -394,31 +224,35 @@ bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, c
|
||||||
const std::string name_ = test.getName();
|
const std::string name_ = test.getName();
|
||||||
|
|
||||||
Values actual = isam.calculateEstimate();
|
Values actual = isam.calculateEstimate();
|
||||||
Ordering ordering = isam.getOrdering(); // *fullgraph.orderingCOLAMD(fullinit).first;
|
Values expected = fullinit.retract(fullgraph.linearize(fullinit)->optimize());
|
||||||
GaussianFactorGraph linearized = *fullgraph.linearize(fullinit, ordering);
|
|
||||||
// linearized.print("Expected linearized: ");
|
|
||||||
GaussianBayesNet gbn = *GaussianSequentialSolver(linearized).eliminate();
|
|
||||||
// gbn.print("Expected bayesnet: ");
|
|
||||||
VectorValues delta = optimize(gbn);
|
|
||||||
Values expected = fullinit.retract(delta, ordering);
|
|
||||||
|
|
||||||
bool isamEqual = assert_equal(expected, actual);
|
bool isamEqual = assert_equal(expected, actual);
|
||||||
|
|
||||||
|
// Check information
|
||||||
|
ISAM2 expectedisam(isam.params());
|
||||||
|
expectedisam.update(fullgraph, fullinit);
|
||||||
|
bool isamTreeEqual = assert_equal(GaussianFactorGraph(expectedisam).augmentedHessian(), GaussianFactorGraph(isam).augmentedHessian());
|
||||||
|
|
||||||
|
// Check consistency
|
||||||
|
ConsistencyVisitor visitor(isam);
|
||||||
|
int data; // Unused
|
||||||
|
treeTraversal::DepthFirstForest(isam, data, visitor);
|
||||||
|
bool consistent = visitor.consistent;
|
||||||
|
|
||||||
// The following two checks make sure that the cached gradients are maintained and used correctly
|
// The following two checks make sure that the cached gradients are maintained and used correctly
|
||||||
|
|
||||||
// Check gradient at each node
|
// Check gradient at each node
|
||||||
bool nodeGradientsOk = true;
|
bool nodeGradientsOk = true;
|
||||||
typedef ISAM2::sharedClique sharedClique;
|
typedef ISAM2::sharedClique sharedClique;
|
||||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes()) {
|
BOOST_FOREACH(const sharedClique& clique, isam.nodes() | br::map_values) {
|
||||||
// Compute expected gradient
|
// Compute expected gradient
|
||||||
FactorGraph<JacobianFactor> jfg;
|
GaussianFactorGraph jfg;
|
||||||
jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional())));
|
jfg += clique->conditional();
|
||||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
VectorValues expectedGradient = jfg.gradientAtZero();
|
||||||
gradientAtZero(jfg, expectedGradient);
|
|
||||||
// Compare with actual gradients
|
// Compare with actual gradients
|
||||||
int variablePosition = 0;
|
DenseIndex variablePosition = 0;
|
||||||
for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) {
|
for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) {
|
||||||
const int dim = clique->conditional()->dim(jit);
|
const DenseIndex dim = clique->conditional()->getDim(jit);
|
||||||
Vector actual = clique->gradientContribution().segment(variablePosition, dim);
|
Vector actual = clique->gradientContribution().segment(variablePosition, dim);
|
||||||
bool gradOk = assert_equal(expectedGradient[*jit], actual);
|
bool gradOk = assert_equal(expectedGradient[*jit], actual);
|
||||||
EXPECT(gradOk);
|
EXPECT(gradOk);
|
||||||
|
|
@ -431,17 +265,30 @@ bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, c
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check gradient
|
// Check gradient
|
||||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
VectorValues expectedGradient = GaussianFactorGraph(isam).gradientAtZero();
|
||||||
gradientAtZero(FactorGraph<JacobianFactor>(isam), expectedGradient);
|
VectorValues expectedGradient2 = GaussianFactorGraph(isam).gradient(VectorValues::Zero(expectedGradient));
|
||||||
VectorValues expectedGradient2(gradient(FactorGraph<JacobianFactor>(isam), VectorValues::Zero(expectedGradient)));
|
VectorValues actualGradient;
|
||||||
VectorValues actualGradient(*allocateVectorValues(isam));
|
|
||||||
gradientAtZero(isam, actualGradient);
|
gradientAtZero(isam, actualGradient);
|
||||||
bool expectedGradOk = assert_equal(expectedGradient2, expectedGradient);
|
bool expectedGradOk = assert_equal(expectedGradient2, expectedGradient);
|
||||||
EXPECT(expectedGradOk);
|
EXPECT(expectedGradOk);
|
||||||
bool totalGradOk = assert_equal(expectedGradient, actualGradient);
|
bool totalGradOk = assert_equal(expectedGradient, actualGradient);
|
||||||
EXPECT(totalGradOk);
|
EXPECT(totalGradOk);
|
||||||
|
|
||||||
return nodeGradientsOk && expectedGradOk && totalGradOk && isamEqual;
|
return nodeGradientsOk && expectedGradOk && totalGradOk && isamEqual && isamTreeEqual && consistent;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
TEST(ISAM2, simple)
|
||||||
|
{
|
||||||
|
for(size_t i = 0; i < 10; ++i) {
|
||||||
|
// These variables will be reused and accumulate factors and values
|
||||||
|
Values fullinit;
|
||||||
|
NonlinearFactorGraph fullgraph;
|
||||||
|
ISAM2 isam = createSlamlikeISAM2(fullinit, fullgraph, ISAM2Params(ISAM2GaussNewtonParams(0.0), 0.0, 0, false), i);
|
||||||
|
|
||||||
|
// Compare solutions
|
||||||
|
CHECK(isam_check(fullgraph, fullinit, isam, *this, result_));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
@ -505,8 +352,8 @@ TEST(ISAM2, clone) {
|
||||||
|
|
||||||
// Modify original isam
|
// Modify original isam
|
||||||
NonlinearFactorGraph factors;
|
NonlinearFactorGraph factors;
|
||||||
factors.add(BetweenFactor<Pose2>(0, 10,
|
factors += BetweenFactor<Pose2>(0, 10,
|
||||||
isam.calculateEstimate<Pose2>(0).between(isam.calculateEstimate<Pose2>(10)), noiseModel::Unit::Create(3)));
|
isam.calculateEstimate<Pose2>(0).between(isam.calculateEstimate<Pose2>(10)), noiseModel::Unit::Create(3));
|
||||||
isam.update(factors);
|
isam.update(factors);
|
||||||
|
|
||||||
CHECK(assert_equal(createSlamlikeISAM2(), clone2));
|
CHECK(assert_equal(createSlamlikeISAM2(), clone2));
|
||||||
|
|
@ -526,66 +373,6 @@ TEST(ISAM2, clone) {
|
||||||
CHECK(assert_equal(ISAM2(), clone1));
|
CHECK(assert_equal(ISAM2(), clone1));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
TEST(ISAM2, permute_cached) {
|
|
||||||
typedef boost::shared_ptr<ISAM2Clique> sharedISAM2Clique;
|
|
||||||
|
|
||||||
// Construct expected permuted BayesTree (variable 2 has been changed to 1)
|
|
||||||
BayesTree<GaussianConditional, ISAM2Clique> expected;
|
|
||||||
expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
|
||||||
boost::make_shared<GaussianConditional>(pair_list_of
|
|
||||||
(3, Matrix_(1,1,1.0))
|
|
||||||
(4, Matrix_(1,1,2.0)),
|
|
||||||
2, Vector_(1,1.0), Vector_(1,1.0)), // p(3,4)
|
|
||||||
HessianFactor::shared_ptr())))); // Cached: empty
|
|
||||||
expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
|
||||||
boost::make_shared<GaussianConditional>(pair_list_of
|
|
||||||
(2, Matrix_(1,1,1.0))
|
|
||||||
(3, Matrix_(1,1,2.0)),
|
|
||||||
1, Vector_(1,1.0), Vector_(1,1.0)), // p(2|3)
|
|
||||||
boost::make_shared<HessianFactor>(3, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(3)
|
|
||||||
expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
|
||||||
boost::make_shared<GaussianConditional>(pair_list_of
|
|
||||||
(0, Matrix_(1,1,1.0))
|
|
||||||
(2, Matrix_(1,1,2.0)),
|
|
||||||
1, Vector_(1,1.0), Vector_(1,1.0)), // p(0|2)
|
|
||||||
boost::make_shared<HessianFactor>(1, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(1)
|
|
||||||
// Change variable 2 to 1
|
|
||||||
expected.root()->children().front()->conditional()->keys()[0] = 1;
|
|
||||||
expected.root()->children().front()->children().front()->conditional()->keys()[1] = 1;
|
|
||||||
|
|
||||||
// Construct unpermuted BayesTree
|
|
||||||
BayesTree<GaussianConditional, ISAM2Clique> actual;
|
|
||||||
actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
|
||||||
boost::make_shared<GaussianConditional>(pair_list_of
|
|
||||||
(3, Matrix_(1,1,1.0))
|
|
||||||
(4, Matrix_(1,1,2.0)),
|
|
||||||
2, Vector_(1,1.0), Vector_(1,1.0)), // p(3,4)
|
|
||||||
HessianFactor::shared_ptr())))); // Cached: empty
|
|
||||||
actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
|
||||||
boost::make_shared<GaussianConditional>(pair_list_of
|
|
||||||
(2, Matrix_(1,1,1.0))
|
|
||||||
(3, Matrix_(1,1,2.0)),
|
|
||||||
1, Vector_(1,1.0), Vector_(1,1.0)), // p(2|3)
|
|
||||||
boost::make_shared<HessianFactor>(3, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(3)
|
|
||||||
actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
|
||||||
boost::make_shared<GaussianConditional>(pair_list_of
|
|
||||||
(0, Matrix_(1,1,1.0))
|
|
||||||
(2, Matrix_(1,1,2.0)),
|
|
||||||
1, Vector_(1,1.0), Vector_(1,1.0)), // p(0|2)
|
|
||||||
boost::make_shared<HessianFactor>(2, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(2)
|
|
||||||
|
|
||||||
// Create permutation that changes variable 2 -> 0
|
|
||||||
Permutation permutation = Permutation::Identity(5);
|
|
||||||
permutation[2] = 1;
|
|
||||||
|
|
||||||
// Permute BayesTree
|
|
||||||
actual.root()->permuteWithInverse(permutation);
|
|
||||||
|
|
||||||
// Check
|
|
||||||
EXPECT(assert_equal(expected, actual));
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(ISAM2, removeFactors)
|
TEST(ISAM2, removeFactors)
|
||||||
{
|
{
|
||||||
|
|
@ -650,8 +437,8 @@ TEST(ISAM2, swapFactors)
|
||||||
fullgraph.remove(swap_idx);
|
fullgraph.remove(swap_idx);
|
||||||
|
|
||||||
NonlinearFactorGraph swapfactors;
|
NonlinearFactorGraph swapfactors;
|
||||||
// swapfactors.add(BearingRange<Pose2,Point2>(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise); // original factor
|
// swapfactors += BearingRange<Pose2,Point2>(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise; // original factor
|
||||||
swapfactors.add(BearingRangeFactor<Pose2,Point2>(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 5.0, brNoise));
|
swapfactors += BearingRangeFactor<Pose2,Point2>(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 5.0, brNoise);
|
||||||
fullgraph.push_back(swapfactors);
|
fullgraph.push_back(swapfactors);
|
||||||
isam.update(swapfactors, Values(), toRemove);
|
isam.update(swapfactors, Values(), toRemove);
|
||||||
}
|
}
|
||||||
|
|
@ -662,28 +449,26 @@ TEST(ISAM2, swapFactors)
|
||||||
|
|
||||||
// Check gradient at each node
|
// Check gradient at each node
|
||||||
typedef ISAM2::sharedClique sharedClique;
|
typedef ISAM2::sharedClique sharedClique;
|
||||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes()) {
|
BOOST_FOREACH(const sharedClique& clique, isam.nodes() | br::map_values) {
|
||||||
// Compute expected gradient
|
// Compute expected gradient
|
||||||
FactorGraph<JacobianFactor> jfg;
|
GaussianFactorGraph jfg;
|
||||||
jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional())));
|
jfg += clique->conditional();
|
||||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
VectorValues expectedGradient = jfg.gradientAtZero();
|
||||||
gradientAtZero(jfg, expectedGradient);
|
|
||||||
// Compare with actual gradients
|
// Compare with actual gradients
|
||||||
int variablePosition = 0;
|
DenseIndex variablePosition = 0;
|
||||||
for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) {
|
for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) {
|
||||||
const int dim = clique->conditional()->dim(jit);
|
const DenseIndex dim = clique->conditional()->getDim(jit);
|
||||||
Vector actual = clique->gradientContribution().segment(variablePosition, dim);
|
Vector actual = clique->gradientContribution().segment(variablePosition, dim);
|
||||||
EXPECT(assert_equal(expectedGradient[*jit], actual));
|
EXPECT(assert_equal(expectedGradient[*jit], actual));
|
||||||
variablePosition += dim;
|
variablePosition += dim;
|
||||||
}
|
}
|
||||||
EXPECT_LONGS_EQUAL(clique->gradientContribution().rows(), variablePosition);
|
EXPECT_LONGS_EQUAL((long)clique->gradientContribution().rows(), (long)variablePosition);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check gradient
|
// Check gradient
|
||||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
VectorValues expectedGradient = GaussianFactorGraph(isam).gradientAtZero();
|
||||||
gradientAtZero(FactorGraph<JacobianFactor>(isam), expectedGradient);
|
VectorValues expectedGradient2 = GaussianFactorGraph(isam).gradient(VectorValues::Zero(expectedGradient));
|
||||||
VectorValues expectedGradient2(gradient(FactorGraph<JacobianFactor>(isam), VectorValues::Zero(expectedGradient)));
|
VectorValues actualGradient;
|
||||||
VectorValues actualGradient(*allocateVectorValues(isam));
|
|
||||||
gradientAtZero(isam, actualGradient);
|
gradientAtZero(isam, actualGradient);
|
||||||
EXPECT(assert_equal(expectedGradient2, expectedGradient));
|
EXPECT(assert_equal(expectedGradient2, expectedGradient));
|
||||||
EXPECT(assert_equal(expectedGradient, actualGradient));
|
EXPECT(assert_equal(expectedGradient, actualGradient));
|
||||||
|
|
@ -708,7 +493,7 @@ TEST(ISAM2, constrained_ordering)
|
||||||
// Add a prior at time 0 and update isam
|
// Add a prior at time 0 and update isam
|
||||||
{
|
{
|
||||||
NonlinearFactorGraph newfactors;
|
NonlinearFactorGraph newfactors;
|
||||||
newfactors.add(PriorFactor<Pose2>(0, Pose2(0.0, 0.0, 0.0), odoNoise));
|
newfactors += PriorFactor<Pose2>(0, Pose2(0.0, 0.0, 0.0), odoNoise);
|
||||||
fullgraph.push_back(newfactors);
|
fullgraph.push_back(newfactors);
|
||||||
|
|
||||||
Values init;
|
Values init;
|
||||||
|
|
@ -723,7 +508,7 @@ TEST(ISAM2, constrained_ordering)
|
||||||
// Add odometry from time 0 to time 5
|
// Add odometry from time 0 to time 5
|
||||||
for( ; i<5; ++i) {
|
for( ; i<5; ++i) {
|
||||||
NonlinearFactorGraph newfactors;
|
NonlinearFactorGraph newfactors;
|
||||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||||
fullgraph.push_back(newfactors);
|
fullgraph.push_back(newfactors);
|
||||||
|
|
||||||
Values init;
|
Values init;
|
||||||
|
|
@ -739,9 +524,9 @@ TEST(ISAM2, constrained_ordering)
|
||||||
// Add odometry from time 5 to 6 and landmark measurement at time 5
|
// Add odometry from time 5 to 6 and landmark measurement at time 5
|
||||||
{
|
{
|
||||||
NonlinearFactorGraph newfactors;
|
NonlinearFactorGraph newfactors;
|
||||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise));
|
newfactors += BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise);
|
||||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise));
|
newfactors += BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise);
|
||||||
fullgraph.push_back(newfactors);
|
fullgraph.push_back(newfactors);
|
||||||
|
|
||||||
Values init;
|
Values init;
|
||||||
|
|
@ -759,7 +544,7 @@ TEST(ISAM2, constrained_ordering)
|
||||||
// Add odometry from time 6 to time 10
|
// Add odometry from time 6 to time 10
|
||||||
for( ; i<10; ++i) {
|
for( ; i<10; ++i) {
|
||||||
NonlinearFactorGraph newfactors;
|
NonlinearFactorGraph newfactors;
|
||||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||||
fullgraph.push_back(newfactors);
|
fullgraph.push_back(newfactors);
|
||||||
|
|
||||||
Values init;
|
Values init;
|
||||||
|
|
@ -772,9 +557,9 @@ TEST(ISAM2, constrained_ordering)
|
||||||
// Add odometry from time 10 to 11 and landmark measurement at time 10
|
// Add odometry from time 10 to 11 and landmark measurement at time 10
|
||||||
{
|
{
|
||||||
NonlinearFactorGraph newfactors;
|
NonlinearFactorGraph newfactors;
|
||||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
|
newfactors += BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise);
|
||||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
|
newfactors += BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise);
|
||||||
fullgraph.push_back(newfactors);
|
fullgraph.push_back(newfactors);
|
||||||
|
|
||||||
Values init;
|
Values init;
|
||||||
|
|
@ -788,33 +573,28 @@ TEST(ISAM2, constrained_ordering)
|
||||||
// Compare solutions
|
// Compare solutions
|
||||||
EXPECT(isam_check(fullgraph, fullinit, isam, *this, result_));
|
EXPECT(isam_check(fullgraph, fullinit, isam, *this, result_));
|
||||||
|
|
||||||
// Check that x3 and x4 are last, but either can come before the other
|
|
||||||
EXPECT(isam.getOrdering()[(3)] == 12 && isam.getOrdering()[(4)] == 13);
|
|
||||||
|
|
||||||
// Check gradient at each node
|
// Check gradient at each node
|
||||||
typedef ISAM2::sharedClique sharedClique;
|
typedef ISAM2::sharedClique sharedClique;
|
||||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes()) {
|
BOOST_FOREACH(const sharedClique& clique, isam.nodes() | br::map_values) {
|
||||||
// Compute expected gradient
|
// Compute expected gradient
|
||||||
FactorGraph<JacobianFactor> jfg;
|
GaussianFactorGraph jfg;
|
||||||
jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional())));
|
jfg += clique->conditional();
|
||||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
VectorValues expectedGradient = jfg.gradientAtZero();
|
||||||
gradientAtZero(jfg, expectedGradient);
|
|
||||||
// Compare with actual gradients
|
// Compare with actual gradients
|
||||||
int variablePosition = 0;
|
DenseIndex variablePosition = 0;
|
||||||
for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) {
|
for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) {
|
||||||
const int dim = clique->conditional()->dim(jit);
|
const DenseIndex dim = clique->conditional()->getDim(jit);
|
||||||
Vector actual = clique->gradientContribution().segment(variablePosition, dim);
|
Vector actual = clique->gradientContribution().segment(variablePosition, dim);
|
||||||
EXPECT(assert_equal(expectedGradient[*jit], actual));
|
EXPECT(assert_equal(expectedGradient[*jit], actual));
|
||||||
variablePosition += dim;
|
variablePosition += dim;
|
||||||
}
|
}
|
||||||
LONGS_EQUAL(clique->gradientContribution().rows(), variablePosition);
|
LONGS_EQUAL((long)clique->gradientContribution().rows(), (long)variablePosition);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check gradient
|
// Check gradient
|
||||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
VectorValues expectedGradient = GaussianFactorGraph(isam).gradientAtZero();
|
||||||
gradientAtZero(FactorGraph<JacobianFactor>(isam), expectedGradient);
|
VectorValues expectedGradient2 = GaussianFactorGraph(isam).gradient(VectorValues::Zero(expectedGradient));
|
||||||
VectorValues expectedGradient2(gradient(FactorGraph<JacobianFactor>(isam), VectorValues::Zero(expectedGradient)));
|
VectorValues actualGradient;
|
||||||
VectorValues actualGradient(*allocateVectorValues(isam));
|
|
||||||
gradientAtZero(isam, actualGradient);
|
gradientAtZero(isam, actualGradient);
|
||||||
EXPECT(assert_equal(expectedGradient2, expectedGradient));
|
EXPECT(assert_equal(expectedGradient2, expectedGradient));
|
||||||
EXPECT(assert_equal(expectedGradient, actualGradient));
|
EXPECT(assert_equal(expectedGradient, actualGradient));
|
||||||
|
|
@ -838,16 +618,12 @@ namespace {
|
||||||
bool checkMarginalizeLeaves(ISAM2& isam, const FastList<Key>& leafKeys) {
|
bool checkMarginalizeLeaves(ISAM2& isam, const FastList<Key>& leafKeys) {
|
||||||
Matrix expectedAugmentedHessian, expected3AugmentedHessian;
|
Matrix expectedAugmentedHessian, expected3AugmentedHessian;
|
||||||
vector<Index> toKeep;
|
vector<Index> toKeep;
|
||||||
const Index lastVar = isam.getOrdering().size() - 1;
|
BOOST_FOREACH(Key j, isam.getDelta() | br::map_keys)
|
||||||
for(Index i=0; i<=lastVar; ++i)
|
if(find(leafKeys.begin(), leafKeys.end(), j) == leafKeys.end())
|
||||||
if(find(leafKeys.begin(), leafKeys.end(), isam.getOrdering().key(i)) == leafKeys.end())
|
toKeep.push_back(j);
|
||||||
toKeep.push_back(i);
|
|
||||||
|
|
||||||
// Calculate expected marginal from iSAM2 tree
|
// Calculate expected marginal from iSAM2 tree
|
||||||
GaussianFactorGraph isamAsGraph(isam);
|
expectedAugmentedHessian = GaussianFactorGraph(isam).marginal(toKeep)->augmentedHessian();
|
||||||
GaussianSequentialSolver solver(isamAsGraph);
|
|
||||||
GaussianFactorGraph marginalgfg = *solver.jointFactorGraph(toKeep);
|
|
||||||
expectedAugmentedHessian = marginalgfg.augmentedHessian();
|
|
||||||
|
|
||||||
//// Calculate expected marginal from cached linear factors
|
//// Calculate expected marginal from cached linear factors
|
||||||
//assert(isam.params().cacheLinearizedFactors);
|
//assert(isam.params().cacheLinearizedFactors);
|
||||||
|
|
@ -855,10 +631,8 @@ namespace {
|
||||||
//expected2AugmentedHessian = solver2.jointFactorGraph(toKeep)->augmentedHessian();
|
//expected2AugmentedHessian = solver2.jointFactorGraph(toKeep)->augmentedHessian();
|
||||||
|
|
||||||
// Calculate expected marginal from original nonlinear factors
|
// Calculate expected marginal from original nonlinear factors
|
||||||
GaussianSequentialSolver solver3(
|
expected3AugmentedHessian = isam.getFactorsUnsafe().linearize(isam.getLinearizationPoint())
|
||||||
*isam.getFactorsUnsafe().linearize(isam.getLinearizationPoint(), isam.getOrdering()),
|
->marginal(toKeep)->augmentedHessian();
|
||||||
isam.params().factorization == ISAM2Params::QR);
|
|
||||||
expected3AugmentedHessian = solver3.jointFactorGraph(toKeep)->augmentedHessian();
|
|
||||||
|
|
||||||
// Do marginalization
|
// Do marginalization
|
||||||
isam.marginalizeLeaves(leafKeys);
|
isam.marginalizeLeaves(leafKeys);
|
||||||
|
|
@ -868,7 +642,7 @@ namespace {
|
||||||
Matrix actualAugmentedHessian = actualMarginalGraph.augmentedHessian();
|
Matrix actualAugmentedHessian = actualMarginalGraph.augmentedHessian();
|
||||||
//Matrix actual2AugmentedHessian = linearFactors_.augmentedHessian();
|
//Matrix actual2AugmentedHessian = linearFactors_.augmentedHessian();
|
||||||
Matrix actual3AugmentedHessian = isam.getFactorsUnsafe().linearize(
|
Matrix actual3AugmentedHessian = isam.getFactorsUnsafe().linearize(
|
||||||
isam.getLinearizationPoint(), isam.getOrdering())->augmentedHessian();
|
isam.getLinearizationPoint())->augmentedHessian();
|
||||||
assert(actualAugmentedHessian.unaryExpr(std::ptr_fun(&std::isfinite<double>)).all());
|
assert(actualAugmentedHessian.unaryExpr(std::ptr_fun(&std::isfinite<double>)).all());
|
||||||
|
|
||||||
// Check full marginalization
|
// Check full marginalization
|
||||||
|
|
@ -893,11 +667,11 @@ TEST(ISAM2, marginalizeLeaves1)
|
||||||
ISAM2 isam;
|
ISAM2 isam;
|
||||||
|
|
||||||
NonlinearFactorGraph factors;
|
NonlinearFactorGraph factors;
|
||||||
factors.add(PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
|
|
||||||
factors.add(BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
factors.add(BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
factors.add(BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
|
|
||||||
Values values;
|
Values values;
|
||||||
values.insert(0, LieVector(0.0));
|
values.insert(0, LieVector(0.0));
|
||||||
|
|
@ -911,8 +685,7 @@ TEST(ISAM2, marginalizeLeaves1)
|
||||||
|
|
||||||
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
||||||
|
|
||||||
FastList<Key> leafKeys;
|
FastList<Key> leafKeys = list_of(0);
|
||||||
leafKeys.push_back(isam.getOrdering().key(0));
|
|
||||||
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -922,12 +695,12 @@ TEST(ISAM2, marginalizeLeaves2)
|
||||||
ISAM2 isam;
|
ISAM2 isam;
|
||||||
|
|
||||||
NonlinearFactorGraph factors;
|
NonlinearFactorGraph factors;
|
||||||
factors.add(PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
|
|
||||||
factors.add(BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
factors.add(BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
factors.add(BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
factors.add(BetweenFactor<LieVector>(2, 3, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(2, 3, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
|
|
||||||
Values values;
|
Values values;
|
||||||
values.insert(0, LieVector(0.0));
|
values.insert(0, LieVector(0.0));
|
||||||
|
|
@ -943,8 +716,7 @@ TEST(ISAM2, marginalizeLeaves2)
|
||||||
|
|
||||||
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
||||||
|
|
||||||
FastList<Key> leafKeys;
|
FastList<Key> leafKeys = list_of(0);
|
||||||
leafKeys.push_back(isam.getOrdering().key(0));
|
|
||||||
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -954,17 +726,17 @@ TEST(ISAM2, marginalizeLeaves3)
|
||||||
ISAM2 isam;
|
ISAM2 isam;
|
||||||
|
|
||||||
NonlinearFactorGraph factors;
|
NonlinearFactorGraph factors;
|
||||||
factors.add(PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
|
|
||||||
factors.add(BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
factors.add(BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
factors.add(BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
|
|
||||||
factors.add(BetweenFactor<LieVector>(2, 3, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(2, 3, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
|
|
||||||
factors.add(BetweenFactor<LieVector>(3, 4, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(3, 4, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
factors.add(BetweenFactor<LieVector>(4, 5, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(4, 5, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
factors.add(BetweenFactor<LieVector>(3, 5, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(3, 5, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
|
|
||||||
Values values;
|
Values values;
|
||||||
values.insert(0, LieVector(0.0));
|
values.insert(0, LieVector(0.0));
|
||||||
|
|
@ -984,8 +756,7 @@ TEST(ISAM2, marginalizeLeaves3)
|
||||||
|
|
||||||
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
||||||
|
|
||||||
FastList<Key> leafKeys;
|
FastList<Key> leafKeys = list_of(0);
|
||||||
leafKeys.push_back(isam.getOrdering().key(0));
|
|
||||||
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -995,9 +766,9 @@ TEST(ISAM2, marginalizeLeaves4)
|
||||||
ISAM2 isam;
|
ISAM2 isam;
|
||||||
|
|
||||||
NonlinearFactorGraph factors;
|
NonlinearFactorGraph factors;
|
||||||
factors.add(PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
factors.add(BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
factors.add(BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
factors += BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||||
|
|
||||||
Values values;
|
Values values;
|
||||||
values.insert(0, LieVector(0.0));
|
values.insert(0, LieVector(0.0));
|
||||||
|
|
@ -1011,8 +782,7 @@ TEST(ISAM2, marginalizeLeaves4)
|
||||||
|
|
||||||
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
||||||
|
|
||||||
FastList<Key> leafKeys;
|
FastList<Key> leafKeys = list_of(1);
|
||||||
leafKeys.push_back(isam.getOrdering().key(1));
|
|
||||||
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1023,8 +793,7 @@ TEST(ISAM2, marginalizeLeaves5)
|
||||||
ISAM2 isam = createSlamlikeISAM2();
|
ISAM2 isam = createSlamlikeISAM2();
|
||||||
|
|
||||||
// Marginalize
|
// Marginalize
|
||||||
FastList<Key> marginalizeKeys;
|
FastList<Key> marginalizeKeys = list_of(0);
|
||||||
marginalizeKeys.push_back(isam.getOrdering().key(0));
|
|
||||||
EXPECT(checkMarginalizeLeaves(isam, marginalizeKeys));
|
EXPECT(checkMarginalizeLeaves(isam, marginalizeKeys));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1040,8 +809,6 @@ TEST(ISAM2, marginalCovariance)
|
||||||
EXPECT(assert_equal(expected, actual));
|
EXPECT(assert_equal(expected, actual));
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
int main() { TestResult tr; return TestRegistry::runAllTests(tr);}
|
int main() { TestResult tr; return TestRegistry::runAllTests(tr);}
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue