diff --git a/gtsam/base/BTree.h b/gtsam/base/BTree.h index e0bb4c4a3..2a40465a3 100644 --- a/gtsam/base/BTree.h +++ b/gtsam/base/BTree.h @@ -147,7 +147,7 @@ namespace gtsam { /** add a key-value pair */ BTree add(const KEY& x, const VALUE& d) const { - return add(make_pair(x, d)); + return add(std::make_pair(x, d)); } /** member predicate */ diff --git a/gtsam/base/DSF.h b/gtsam/base/DSF.h index ddc5b4836..9952f2e10 100644 --- a/gtsam/base/DSF.h +++ b/gtsam/base/DSF.h @@ -59,7 +59,7 @@ namespace gtsam { DSF(const std::set& keys) : Tree() { BOOST_FOREACH(const KEY& key, keys) *this = this->add(key, key); } // create a new singleton, does nothing if already exists - Self makeSet(const KEY& key) const { if (mem(key)) return *this; else return this->add(key, key); } + Self makeSet(const KEY& key) const { if (this->mem(key)) return *this; else return this->add(key, key); } // find the label of the set in which {key} lives Label findSet(const KEY& key) const { diff --git a/gtsam/inference/BayesNet-inl.h b/gtsam/inference/BayesNet-inl.h index df9e74717..7ff8fc076 100644 --- a/gtsam/inference/BayesNet-inl.h +++ b/gtsam/inference/BayesNet-inl.h @@ -18,7 +18,6 @@ #pragma once #include -#include #include #include diff --git a/gtsam/inference/BayesNet.h b/gtsam/inference/BayesNet.h index 7b9a85122..672d9a4e8 100644 --- a/gtsam/inference/BayesNet.h +++ b/gtsam/inference/BayesNet.h @@ -206,4 +206,6 @@ private: } }; // BayesNet -} /// namespace gtsam +} // namespace gtsam + +#include diff --git a/gtsam/inference/BayesTree-inl.h b/gtsam/inference/BayesTree-inl.h index fce5c3e19..1c696d3ef 100644 --- a/gtsam/inference/BayesTree-inl.h +++ b/gtsam/inference/BayesTree-inl.h @@ -24,7 +24,7 @@ #include #include #include -#include +#include #include #include @@ -42,93 +42,18 @@ namespace gtsam { using namespace std; - /* ************************************************************************* */ - template - void BayesTree::Clique::assertInvariants() const { -#ifndef NDEBUG - // We rely on the keys being sorted -// FastVector sortedUniqueKeys(conditional_->begin(), conditional_->end()); -// std::sort(sortedUniqueKeys.begin(), sortedUniqueKeys.end()); -// std::unique(sortedUniqueKeys.begin(), sortedUniqueKeys.end()); -// assert(sortedUniqueKeys.size() == conditional_->size() && -// std::equal(sortedUniqueKeys.begin(), sortedUniqueKeys.end(), conditional_->begin())); -#endif - } - /* ************************************************************************* */ - template - BayesTree::Clique::Clique(const sharedConditional& conditional) : conditional_(conditional) { - assertInvariants(); - } - - /* ************************************************************************* */ - template - void BayesTree::Clique::print(const string& s) const { - conditional_->print(s); - } - - /* ************************************************************************* */ - template - size_t BayesTree::Clique::treeSize() const { - size_t size = 1; - BOOST_FOREACH(const shared_ptr& child, children_) - size += child->treeSize(); - return size; - } - - /* ************************************************************************* */ - template - void BayesTree::Clique::printTree(const string& indent) const { - print(indent); - BOOST_FOREACH(const shared_ptr& child, children_) - child->printTree(indent+" "); - } - - /* ************************************************************************* */ - template - void BayesTree::Clique::permuteWithInverse(const Permutation& inversePermutation) { - conditional_->permuteWithInverse(inversePermutation); - if(cachedFactor_) cachedFactor_->permuteWithInverse(inversePermutation); - BOOST_FOREACH(const shared_ptr& child, children_) { - child->permuteWithInverse(inversePermutation); - } - assertInvariants(); - } - - /* ************************************************************************* */ - template - bool BayesTree::Clique::permuteSeparatorWithInverse(const Permutation& inversePermutation) { - bool changed = conditional_->permuteSeparatorWithInverse(inversePermutation); -#ifndef NDEBUG - if(!changed) { - BOOST_FOREACH(Index& separatorKey, conditional_->parents()) { assert(separatorKey == inversePermutation[separatorKey]); } - BOOST_FOREACH(const shared_ptr& child, children_) { - assert(child->permuteSeparatorWithInverse(inversePermutation) == false); - } - } -#endif - if(changed) { - if(cachedFactor_) cachedFactor_->permuteWithInverse(inversePermutation); - BOOST_FOREACH(const shared_ptr& child, children_) { - (void)child->permuteSeparatorWithInverse(inversePermutation); - } - } - assertInvariants(); - return changed; - } - - /* ************************************************************************* */ - template - typename BayesTree::CliqueData - BayesTree::getCliqueData() const { + template + typename BayesTree::CliqueData + BayesTree::getCliqueData() const { CliqueData data; getCliqueData(data, root_); return data; } - template - void BayesTree::getCliqueData(CliqueData& data, - BayesTree::sharedClique clique) const { + template + void BayesTree::getCliqueData(CliqueData& data, + BayesTree::sharedClique clique) const { data.conditionalSizes.push_back((*clique)->nrFrontals()); data.separatorSizes.push_back((*clique)->nrParents()); BOOST_FOREACH(sharedClique c, clique->children_) { @@ -137,8 +62,8 @@ namespace gtsam { } /* ************************************************************************* */ - template - void BayesTree::saveGraph(const std::string &s) const { + template + void BayesTree::saveGraph(const std::string &s) const { if (!root_.get()) throw invalid_argument("the root of bayes tree has not been initialized!"); ofstream of(s.c_str()); of<< "digraph G{\n"; @@ -147,9 +72,9 @@ namespace gtsam { of.close(); } - template - void BayesTree::saveGraph(ostream &s, - BayesTree::sharedClique clique, + template + void BayesTree::saveGraph(ostream &s, + BayesTree::sharedClique clique, int parentnum) const { static int num = 0; bool first = true; @@ -184,9 +109,9 @@ namespace gtsam { } - template - typename BayesTree::CliqueStats - BayesTree::CliqueData::getStats() const { + template + typename BayesTree::CliqueStats + BayesTree::CliqueData::getStats() const { CliqueStats stats; double sum = 0.0; @@ -211,179 +136,23 @@ namespace gtsam { } /* ************************************************************************* */ - // The shortcut density is a conditional P(S|R) of the separator of this - // clique on the root. We can compute it recursively from the parent shortcut - // P(Sp|R) as \int P(Fp|Sp) P(Sp|R), where Fp are the frontal nodes in p - /* ************************************************************************* */ - template - BayesNet BayesTree::Clique::shortcut(shared_ptr R, - Eliminate function) { - - static const bool debug = false; - - // A first base case is when this clique or its parent is the root, - // in which case we return an empty Bayes net. - - sharedClique parent(parent_.lock()); - - if (R.get()==this || parent==R) { - BayesNet empty; - return empty; - } - - // The root conditional - FactorGraph p_R(BayesNet(R->conditional())); - - // The parent clique has a CONDITIONAL for each frontal node in Fp - // so we can obtain P(Fp|Sp) in factor graph form - FactorGraph p_Fp_Sp(BayesNet(parent->conditional())); - - // If not the base case, obtain the parent shortcut P(Sp|R) as factors - FactorGraph p_Sp_R(parent->shortcut(R, function)); - - // now combine P(Cp|R) = P(Fp|Sp) * P(Sp|R) - FactorGraph p_Cp_R; - p_Cp_R.push_back(p_R); - p_Cp_R.push_back(p_Fp_Sp); - p_Cp_R.push_back(p_Sp_R); - - // Eliminate into a Bayes net with ordering designed to integrate out - // any variables not in *our* separator. Variables to integrate out must be - // eliminated first hence the desired ordering is [Cp\S S]. - // However, an added wrinkle is that Cp might overlap with the root. - // Keys corresponding to the root should not be added to the ordering at all. - - if(debug) { - p_R.print("p_R: "); - p_Fp_Sp.print("p_Fp_Sp: "); - p_Sp_R.print("p_Sp_R: "); - } - - // We want to factor into a conditional of the clique variables given the - // root and the marginal on the root, integrating out all other variables. - // The integrands include any parents of this clique and the variables of - // the parent clique. - FastSet variablesAtBack; - FastSet separator; - size_t uniqueRootVariables = 0; - BOOST_FOREACH(const Index separatorIndex, this->conditional()->parents()) { - variablesAtBack.insert(separatorIndex); - separator.insert(separatorIndex); - if(debug) cout << "At back (this): " << separatorIndex << endl; - } - BOOST_FOREACH(const Index key, R->conditional()->keys()) { - if(variablesAtBack.insert(key).second) - ++ uniqueRootVariables; - if(debug) cout << "At back (root): " << key << endl; - } - - Permutation toBack = Permutation::PushToBack( - vector(variablesAtBack.begin(), variablesAtBack.end()), - R->conditional()->lastFrontalKey() + 1); - Permutation::shared_ptr toBackInverse(toBack.inverse()); - BOOST_FOREACH(const typename FactorType::shared_ptr& factor, p_Cp_R) { - factor->permuteWithInverse(*toBackInverse); } - typename BayesNet::shared_ptr eliminated(EliminationTree< - FactorType>::Create(p_Cp_R)->eliminate(function)); - - // Take only the conditionals for p(S|R). We check for each variable being - // in the separator set because if some separator variables overlap with - // root variables, we cannot rely on the number of root variables, and also - // want to include those variables in the conditional. - BayesNet p_S_R; - BOOST_REVERSE_FOREACH(typename CONDITIONAL::shared_ptr conditional, *eliminated) { - assert(conditional->nrFrontals() == 1); - if(separator.find(toBack[conditional->firstFrontalKey()]) != separator.end()) { - if(debug) - conditional->print("Taking C|R conditional: "); - p_S_R.push_front(conditional); - } - if(p_S_R.size() == separator.size()) - break; - } - - // Undo the permutation - if(debug) toBack.print("toBack: "); - p_S_R.permuteWithInverse(toBack); - - // return the parent shortcut P(Sp|R) - assertInvariants(); - return p_S_R; - } - - /* ************************************************************************* */ - // P(C) = \int_R P(F|S) P(S|R) P(R) - // TODO: Maybe we should integrate given parent marginal P(Cp), - // \int(Cp\S) P(F|S)P(S|Cp)P(Cp) - // Because the root clique could be very big. - /* ************************************************************************* */ - template - FactorGraph BayesTree::Clique::marginal( - shared_ptr R, Eliminate function) { - // If we are the root, just return this root - // NOTE: immediately cast to a factor graph - BayesNet bn(R->conditional()); - if (R.get()==this) return bn; - - // Combine P(F|S), P(S|R), and P(R) - BayesNet p_FSR = this->shortcut(R, function); - p_FSR.push_front(this->conditional()); - p_FSR.push_back(R->conditional()); - - assertInvariants(); - GenericSequentialSolver solver(p_FSR); - return *solver.jointFactorGraph(conditional_->keys(), function); - } - - /* ************************************************************************* */ - // P(C1,C2) = \int_R P(F1|S1) P(S1|R) P(F2|S1) P(S2|R) P(R) - /* ************************************************************************* */ - template - FactorGraph BayesTree::Clique::joint( - shared_ptr C2, shared_ptr R, Eliminate function) { - // For now, assume neither is the root - - // Combine P(F1|S1), P(S1|R), P(F2|S2), P(S2|R), and P(R) - FactorGraph joint; - if (!isRoot()) joint.push_back(this->conditional()->toFactor()); // P(F1|S1) - if (!isRoot()) joint.push_back(shortcut(R, function)); // P(S1|R) - if (!C2->isRoot()) joint.push_back(C2->conditional()->toFactor()); // P(F2|S2) - if (!C2->isRoot()) joint.push_back(C2->shortcut(R, function)); // P(S2|R) - joint.push_back(R->conditional()->toFactor()); // P(R) - - // Find the keys of both C1 and C2 - vector keys1(conditional_->keys()); - vector keys2(C2->conditional_->keys()); - FastSet keys12; - keys12.insert(keys1.begin(), keys1.end()); - keys12.insert(keys2.begin(), keys2.end()); - - // Calculate the marginal - vector keys12vector; keys12vector.reserve(keys12.size()); - keys12vector.insert(keys12vector.begin(), keys12.begin(), keys12.end()); - assertInvariants(); - GenericSequentialSolver solver(joint); - return *solver.jointFactorGraph(keys12vector, function); - } - - /* ************************************************************************* */ - template - void BayesTree::Cliques::print(const std::string& s) const { + template + void BayesTree::Cliques::print(const std::string& s) const { cout << s << ":\n"; BOOST_FOREACH(sharedClique clique, *this) clique->printTree(); } /* ************************************************************************* */ - template - bool BayesTree::Cliques::equals(const Cliques& other, double tol) const { + template + bool BayesTree::Cliques::equals(const Cliques& other, double tol) const { return other == *this; } /* ************************************************************************* */ - template - typename BayesTree::sharedClique BayesTree::addClique( - const sharedConditional& conditional, sharedClique parent_clique) { + template + typename BayesTree::sharedClique + BayesTree::addClique(const sharedConditional& conditional, sharedClique parent_clique) { sharedClique new_clique(new Clique(conditional)); nodes_.resize(std::max(conditional->lastFrontalKey()+1, nodes_.size())); BOOST_FOREACH(Index key, conditional->frontals()) @@ -397,8 +166,8 @@ namespace gtsam { } /* ************************************************************************* */ - template - typename BayesTree::sharedClique BayesTree::addClique( + template + typename BayesTree::sharedClique BayesTree::addClique( const sharedConditional& conditional, list& child_cliques) { sharedClique new_clique(new Clique(conditional)); nodes_.resize(std::max(conditional->lastFrontalKey()+1, nodes_.size())); @@ -412,8 +181,8 @@ namespace gtsam { } /* ************************************************************************* */ - template - inline void BayesTree::addToCliqueFront(BayesTree& bayesTree, const sharedConditional& conditional, const sharedClique& clique) { + template + inline void BayesTree::addToCliqueFront(BayesTree& bayesTree, const sharedConditional& conditional, const sharedClique& clique) { static const bool debug = false; #ifndef NDEBUG // Debug check to make sure the conditional variable is ordered lower than @@ -439,8 +208,8 @@ namespace gtsam { } /* ************************************************************************* */ - template - void BayesTree::removeClique(sharedClique clique) { + template + void BayesTree::removeClique(sharedClique clique) { if (clique->isRoot()) root_.reset(); @@ -449,7 +218,7 @@ namespace gtsam { // orphan my children BOOST_FOREACH(sharedClique child, clique->children_) - child->parent_ = typename BayesTree::Clique::weak_ptr(); + child->parent_ = typename Clique::weak_ptr(); BOOST_FOREACH(Index key, (*clique->conditional())) { nodes_[key].reset(); @@ -457,27 +226,28 @@ namespace gtsam { } /* ************************************************************************* */ - template - BayesTree::BayesTree() { + template + BayesTree::BayesTree() { } /* ************************************************************************* */ - template - BayesTree::BayesTree(const BayesNet& bayesNet) { + template + BayesTree::BayesTree(const BayesNet& bayesNet) { typename BayesNet::const_reverse_iterator rit; for ( rit=bayesNet.rbegin(); rit != bayesNet.rend(); ++rit ) insert(*this, *rit); } /* ************************************************************************* */ - template - BayesTree::BayesTree(const BayesNet& bayesNet, std::list > subtrees) { + template + BayesTree::BayesTree(const BayesNet& bayesNet, std::list > subtrees) { if (bayesNet.size() == 0) throw invalid_argument("BayesTree::insert: empty bayes net!"); // get the roots of child subtrees and merge their nodes_ list childRoots; - BOOST_FOREACH(const BayesTree& subtree, subtrees) { + typedef BayesTree Tree; + BOOST_FOREACH(const Tree& subtree, subtrees) { nodes_.insert(subtree.nodes_.begin(), subtree.nodes_.end()); childRoots.push_back(subtree.root()); } @@ -496,8 +266,8 @@ namespace gtsam { } /* ************************************************************************* */ - template - void BayesTree::print(const string& s) const { + template + void BayesTree::print(const string& s) const { if (root_.use_count() == 0) { printf("WARNING: BayesTree.print encountered a forest...\n"); return; @@ -509,26 +279,26 @@ namespace gtsam { /* ************************************************************************* */ // binary predicate to test equality of a pair for use in equals - template + template bool check_sharedCliques( - const typename BayesTree::sharedClique& v1, - const typename BayesTree::sharedClique& v2 + const typename BayesTree::sharedClique& v1, + const typename BayesTree::sharedClique& v2 ) { - return v1->equals(*v2); + return (!v1 && !v2) || (v1 && v2 && v1->equals(*v2)); } /* ************************************************************************* */ - template - bool BayesTree::equals(const BayesTree& other, + template + bool BayesTree::equals(const BayesTree& other, double tol) const { return size()==other.size() && - std::equal(nodes_.begin(), nodes_.end(), other.nodes_.begin(), &check_sharedCliques); + std::equal(nodes_.begin(), nodes_.end(), other.nodes_.begin(), &check_sharedCliques); } /* ************************************************************************* */ - template + template template - inline Index BayesTree::findParentClique(const CONTAINER& parents) const { + inline Index BayesTree::findParentClique(const CONTAINER& parents) const { typename CONTAINER::const_iterator lowestOrderedParent = min_element(parents.begin(), parents.end()); assert(lowestOrderedParent != parents.end()); return *lowestOrderedParent; @@ -548,8 +318,8 @@ namespace gtsam { } /* ************************************************************************* */ - template - void BayesTree::insert(BayesTree& bayesTree, const sharedConditional& conditional) + template + void BayesTree::insert(BayesTree& bayesTree, const sharedConditional& conditional) { static const bool debug = false; @@ -597,8 +367,8 @@ namespace gtsam { /* ************************************************************************* */ //TODO: remove this function after removing TSAM.cpp - template - typename BayesTree::sharedClique BayesTree::insert( + template + typename BayesTree::sharedClique BayesTree::insert( const sharedConditional& clique, list& children, bool isRootClique) { if (clique->nrFrontals() == 0) @@ -612,18 +382,19 @@ namespace gtsam { } /* ************************************************************************* */ - template - void BayesTree::fillNodesIndex(const sharedClique& subtree) { + template + void BayesTree::fillNodesIndex(const sharedClique& subtree) { // Add each frontal variable of this root node BOOST_FOREACH(const Index& key, subtree->conditional()->frontals()) { nodes_[key] = subtree; } // Fill index for each child - BOOST_FOREACH(const typename BayesTree::sharedClique& child, subtree->children_) { + typedef typename BayesTree::sharedClique sharedClique; + BOOST_FOREACH(const sharedClique& child, subtree->children_) { fillNodesIndex(child); } } /* ************************************************************************* */ - template - void BayesTree::insert(const sharedClique& subtree) { + template + void BayesTree::insert(const sharedClique& subtree) { if(subtree) { // Find the parent clique of the new subtree. By the running intersection // property, those separator variables in the subtree that are ordered @@ -651,8 +422,8 @@ namespace gtsam { /* ************************************************************************* */ // First finds clique marginal then marginalizes that /* ************************************************************************* */ - template - typename CONDITIONAL::FactorType::shared_ptr BayesTree::marginalFactor( + template + typename CONDITIONAL::FactorType::shared_ptr BayesTree::marginalFactor( Index key, Eliminate function) const { // get clique containing key @@ -666,8 +437,8 @@ namespace gtsam { } /* ************************************************************************* */ - template - typename BayesNet::shared_ptr BayesTree::marginalBayesNet( + template + typename BayesNet::shared_ptr BayesTree::marginalBayesNet( Index key, Eliminate function) const { // calculate marginal as a factor graph @@ -681,9 +452,9 @@ namespace gtsam { /* ************************************************************************* */ // Find two cliques, their joint, then marginalizes /* ************************************************************************* */ - template - typename FactorGraph::shared_ptr BayesTree< - CONDITIONAL>::joint(Index key1, Index key2, Eliminate function) const { + template + typename FactorGraph::shared_ptr + BayesTree::joint(Index key1, Index key2, Eliminate function) const { // get clique C1 and C2 sharedClique C1 = (*this)[key1], C2 = (*this)[key2]; @@ -698,8 +469,8 @@ namespace gtsam { } /* ************************************************************************* */ - template - typename BayesNet::shared_ptr BayesTree::jointBayesNet( + template + typename BayesNet::shared_ptr BayesTree::jointBayesNet( Index key1, Index key2, Eliminate function) const { // eliminate factor graph marginal to a Bayes net @@ -708,17 +479,17 @@ namespace gtsam { } /* ************************************************************************* */ - template - void BayesTree::clear() { + template + void BayesTree::clear() { // Remove all nodes and clear the root pointer nodes_.clear(); root_.reset(); } /* ************************************************************************* */ - template - void BayesTree::removePath(sharedClique clique, - BayesNet& bn, typename BayesTree::Cliques& orphans) { + template + void BayesTree::removePath(sharedClique clique, + BayesNet& bn, typename BayesTree::Cliques& orphans) { // base case is NULL, if so we do nothing and return empties above if (clique!=NULL) { @@ -730,7 +501,7 @@ namespace gtsam { this->removeClique(clique); // remove path above me - this->removePath(clique->parent_.lock(), bn, orphans); + this->removePath(typename Clique::shared_ptr(clique->parent_.lock()), bn, orphans); // add children to list of orphans (splice also removed them from clique->children_) orphans.splice (orphans.begin(), clique->children_); @@ -741,10 +512,10 @@ namespace gtsam { } /* ************************************************************************* */ - template + template template - void BayesTree::removeTop(const CONTAINER& keys, - BayesNet& bn, typename BayesTree::Cliques& orphans) { + void BayesTree::removeTop(const CONTAINER& keys, + BayesNet& bn, typename BayesTree::Cliques& orphans) { // process each key of the new factor BOOST_FOREACH(const Index& key, keys) { diff --git a/gtsam/inference/BayesTree.h b/gtsam/inference/BayesTree.h index 1b762e210..650341a5c 100644 --- a/gtsam/inference/BayesTree.h +++ b/gtsam/inference/BayesTree.h @@ -24,142 +24,42 @@ #include #include #include +#include #include #include #include #include +#include #include namespace gtsam { + // Forward declaration of BayesTreeClique which is defined below BayesTree in this file + template struct BayesTreeClique; + /** * Bayes tree - * Templated on the CONDITIONAL class, the type of node in the underlying Bayes chain. - * This could be a ConditionalProbabilityTable, a GaussianConditional, or a SymbolicConditional + * @tparam CONDITIONAL The type of the conditional densities, i.e. the type of node in the underlying Bayes chain, + * which could be a ConditionalProbabilityTable, a GaussianConditional, or a SymbolicConditional. + * @tparam CLIQUE The type of the clique data structure, defaults to BayesTreeClique, normally do not change this + * as it is only used when developing special versions of BayesTree, e.g. for ISAM2. * * \ingroup Multifrontal */ - template + template > class BayesTree { public: - typedef boost::shared_ptr > shared_ptr; + typedef boost::shared_ptr > shared_ptr; typedef boost::shared_ptr sharedConditional; typedef boost::shared_ptr > sharedBayesNet; typedef CONDITIONAL ConditionalType; typedef typename CONDITIONAL::FactorType FactorType; typedef typename FactorGraph::Eliminate Eliminate; - /** - * A Clique in the tree is an incomplete Bayes net: the variables - * in the Bayes net are the frontal nodes, and the variables conditioned - * on are the separator. We also have pointers up and down the tree. - * - * Since our Conditional class already handles multiple frontal variables, - * this Clique contains exactly 1 conditional. - */ - struct Clique { - - protected: - void assertInvariants() const; - - public: - typedef CONDITIONAL ConditionalType; - typedef typename boost::shared_ptr shared_ptr; - typedef typename boost::weak_ptr weak_ptr; - sharedConditional conditional_; - weak_ptr parent_; - std::list children_; - typename FactorType::shared_ptr cachedFactor_; - - friend class BayesTree; - - //* Constructor */ - Clique() {} - - Clique(const sharedConditional& conditional); - - void cloneToBayesTree(BayesTree& newTree, shared_ptr parent_clique = shared_ptr()) const { - sharedConditional newConditional = sharedConditional(new CONDITIONAL(*conditional_)); - sharedClique newClique = newTree.addClique(newConditional, parent_clique); - if (cachedFactor_) - newClique->cachedFactor_ = cachedFactor_->clone(); - else newClique->cachedFactor_ = typename FactorType::shared_ptr(); - if (!parent_clique) { - newTree.root_ = newClique; - } - BOOST_FOREACH(const shared_ptr& childClique, children_) { - childClique->cloneToBayesTree(newTree, newClique); - } - } - - /** print this node */ - void print(const std::string& s = "") const; - - /** The arrow operator accesses the conditional */ - const CONDITIONAL* operator->() const { return conditional_.get(); } - - /** The arrow operator accesses the conditional */ - CONDITIONAL* operator->() { return conditional_.get(); } - - /** Access the conditional */ - const sharedConditional& conditional() const { return conditional_; } - - /** is this the root of a Bayes tree ? */ - inline bool isRoot() const { return parent_.expired(); } - - /** return the const reference of children */ - std::list& children() { return children_; } - const std::list& children() const { return children_; } - - /** The size of subtree rooted at this clique, i.e., nr of Cliques */ - size_t treeSize() const; - - /** Access the cached factor (this is a hack) */ - typename FactorType::shared_ptr& cachedFactor() { return cachedFactor_; } - - /** print this node and entire subtree below it */ - void printTree(const std::string& indent="") const; - - /** Permute the variables in the whole subtree rooted at this clique */ - void permuteWithInverse(const Permutation& inversePermutation); - - /** Permute variables when they only appear in the separators. In this - * case the running intersection property will be used to prevent always - * traversing the whole tree. Returns whether any separator variables in - * this subtree were reordered. - */ - bool permuteSeparatorWithInverse(const Permutation& inversePermutation); - - /** return the conditional P(S|Root) on the separator given the root */ - // TODO: create a cached version - BayesNet shortcut(shared_ptr root, Eliminate function); - - /** return the marginal P(C) of the clique */ - FactorGraph marginal(shared_ptr root, Eliminate function); - - /** return the joint P(C1,C2), where C1==this. TODO: not a method? */ - FactorGraph joint(shared_ptr C2, shared_ptr root, Eliminate function); - - bool equals(const Clique& other, double tol=1e-9) const { - return (!conditional_ && !other.conditional()) || - conditional_->equals(*(other.conditional()), tol); - } - - private: - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_NVP(conditional_); - ar & BOOST_SERIALIZATION_NVP(parent_); - ar & BOOST_SERIALIZATION_NVP(children_); - ar & BOOST_SERIALIZATION_NVP(cachedFactor_); - } - - }; // \struct Clique + typedef CLIQUE Clique; ///< The clique type, normally BayesTreeClique // typedef for shared pointers to cliques typedef boost::shared_ptr sharedClique; @@ -221,7 +121,7 @@ namespace gtsam { * parents are already in the clique or its separators. This function does * not check for this condition, it just updates the data structures. */ - static void addToCliqueFront(BayesTree& bayesTree, + static void addToCliqueFront(BayesTree& bayesTree, const sharedConditional& conditional, const sharedClique& clique); /** Fill the nodes index for a subtree */ @@ -239,7 +139,7 @@ namespace gtsam { * Create a Bayes Tree from a Bayes Net and some subtrees. The Bayes net corresponds to the * new root clique and the subtrees are connected to the root clique. */ - BayesTree(const BayesNet& bayesNet, std::list > subtrees); + BayesTree(const BayesNet& bayesNet, std::list > subtrees); /** Destructor */ virtual ~BayesTree() {} @@ -252,7 +152,7 @@ namespace gtsam { * This function only applies for Symbolic case with IndexCondtional, * We make it static so that it won't be compiled in GaussianConditional case. * */ - static void insert(BayesTree& bayesTree, const sharedConditional& conditional); + static void insert(BayesTree& bayesTree, const sharedConditional& conditional); /** * Insert a new clique corresponding to the given Bayes net. @@ -276,13 +176,25 @@ namespace gtsam { */ /** check equality */ - bool equals(const BayesTree& other, double tol = 1e-9) const; + bool equals(const BayesTree& other, double tol = 1e-9) const; - /** deep copy from another tree */ void cloneTo(shared_ptr& newTree) const { - root_->cloneToBayesTree(*newTree); + cloneTo(newTree, root()); } + private: + /** deep copy from another tree */ + void cloneTo(shared_ptr& newTree, const sharedClique& root) const { + if(root) { + sharedClique newClique = root->clone(); + newTree->insert(newClique); + BOOST_FOREACH(const sharedClique& childClique, root->children()) { + cloneTo(newTree, childClique); + } + } + } + public: + /** * Find parent clique of a conditional. It will look at all parents and * return the one with the lowest index in the ordering. @@ -303,6 +215,8 @@ namespace gtsam { /** return root clique */ const sharedClique& root() const { return root_; } + + /** Access the root clique (non-const version) */ sharedClique& root() { return root_; } /** find the clique to which key belongs */ @@ -314,24 +228,20 @@ namespace gtsam { CliqueData getCliqueData() const; /** return marginal on any variable */ - typename FactorType::shared_ptr marginalFactor(Index key, - Eliminate function) const; + typename FactorType::shared_ptr marginalFactor(Index key, Eliminate function) const; /** * return marginal on any variable, as a Bayes Net * NOTE: this function calls marginal, and then eliminates it into a Bayes Net * This is more expensive than the above function */ - typename BayesNet::shared_ptr marginalBayesNet(Index key, - Eliminate function) const; + typename BayesNet::shared_ptr marginalBayesNet(Index key, Eliminate function) const; /** return joint on two variables */ - typename FactorGraph::shared_ptr joint(Index key1, Index key2, - Eliminate function) const; + typename FactorGraph::shared_ptr joint(Index key1, Index key2, Eliminate function) const; /** return joint on two variables as a BayesNet */ - typename BayesNet::shared_ptr jointBayesNet(Index key1, - Index key2, Eliminate function) const; + typename BayesNet::shared_ptr jointBayesNet(Index key1, Index key2, Eliminate function) const; /** * Read only with side effects @@ -376,10 +286,10 @@ namespace gtsam { /* ************************************************************************* */ - template + template void _BayesTree_dim_adder( std::vector& dims, - const typename BayesTree::sharedClique& clique) { + const typename BayesTree::sharedClique& clique) { if(clique) { // Add dims from this clique @@ -387,18 +297,57 @@ namespace gtsam { dims[*it] = (*clique)->dim(it); // Traverse children - BOOST_FOREACH(const typename BayesTree::sharedClique& child, clique->children()) { - _BayesTree_dim_adder(dims, child); + typedef typename BayesTree::sharedClique sharedClique; + BOOST_FOREACH(const sharedClique& child, clique->children()) { + _BayesTree_dim_adder(dims, child); } } } /* ************************************************************************* */ - template - boost::shared_ptr allocateVectorValues(const BayesTree& bt) { + template + boost::shared_ptr allocateVectorValues(const BayesTree& bt) { std::vector dimensions(bt.nodes().size(), 0); - _BayesTree_dim_adder(dimensions, bt.root()); + _BayesTree_dim_adder(dimensions, bt.root()); return boost::shared_ptr(new VectorValues(dimensions)); } + + /* ************************************************************************* */ + /** + * A Clique in the tree is an incomplete Bayes net: the variables + * in the Bayes net are the frontal nodes, and the variables conditioned + * on are the separator. We also have pointers up and down the tree. + * + * Since our Conditional class already handles multiple frontal variables, + * this Clique contains exactly 1 conditional. + * + * This is the default clique type in a BayesTree, but some algorithms, like + * iSAM2 (see ISAM2Clique), use a different clique type in order to store + * extra data along with the clique. + */ + template + struct BayesTreeClique : public BayesTreeCliqueBase, CONDITIONAL> { + public: + typedef CONDITIONAL ConditionalType; + typedef BayesTreeClique This; + typedef BayesTreeCliqueBase Base; + typedef boost::shared_ptr shared_ptr; + typedef boost::weak_ptr weak_ptr; + BayesTreeClique() {} + BayesTreeClique(const typename ConditionalType::shared_ptr& conditional) : Base(conditional) {} + BayesTreeClique(const std::pair& result) : Base(result) {} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + } + }; + } /// namespace gtsam + +#include +#include diff --git a/gtsam/inference/BayesTreeCliqueBase-inl.h b/gtsam/inference/BayesTreeCliqueBase-inl.h new file mode 100644 index 000000000..dfc8864f7 --- /dev/null +++ b/gtsam/inference/BayesTreeCliqueBase-inl.h @@ -0,0 +1,260 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file BayesTreeCliqueBase + * @brief Base class for cliques of a BayesTree + * @author Richard Roberts and Frank Dellaert + */ + +#pragma once + +#include + +namespace gtsam { + + /* ************************************************************************* */ + template + void BayesTreeCliqueBase::assertInvariants() const { +#ifndef NDEBUG + // We rely on the keys being sorted +// FastVector sortedUniqueKeys(conditional_->begin(), conditional_->end()); +// std::sort(sortedUniqueKeys.begin(), sortedUniqueKeys.end()); +// std::unique(sortedUniqueKeys.begin(), sortedUniqueKeys.end()); +// assert(sortedUniqueKeys.size() == conditional_->size() && +// std::equal(sortedUniqueKeys.begin(), sortedUniqueKeys.end(), conditional_->begin())); +#endif + } + + /* ************************************************************************* */ + template + BayesTreeCliqueBase::BayesTreeCliqueBase(const sharedConditional& conditional) : + conditional_(conditional) { + assertInvariants(); + } + + /* ************************************************************************* */ + template + BayesTreeCliqueBase::BayesTreeCliqueBase(const std::pair >& result) : + conditional_(result.first) { + assertInvariants(); + } + + /* ************************************************************************* */ + template + void BayesTreeCliqueBase::print(const std::string& s) const { + conditional_->print(s); + } + + /* ************************************************************************* */ + template + size_t BayesTreeCliqueBase::treeSize() const { + size_t size = 1; + BOOST_FOREACH(const derived_ptr& child, children_) + size += child->treeSize(); + return size; + } + + /* ************************************************************************* */ + template + void BayesTreeCliqueBase::printTree(const std::string& indent) const { + asDerived(this)->print(indent); + BOOST_FOREACH(const derived_ptr& child, children_) + child->printTree(indent+" "); + } + + /* ************************************************************************* */ + template + void BayesTreeCliqueBase::permuteWithInverse(const Permutation& inversePermutation) { + conditional_->permuteWithInverse(inversePermutation); + BOOST_FOREACH(const derived_ptr& child, children_) { + child->permuteWithInverse(inversePermutation); + } + assertInvariants(); + } + + /* ************************************************************************* */ + template + bool BayesTreeCliqueBase::permuteSeparatorWithInverse(const Permutation& inversePermutation) { + bool changed = conditional_->permuteSeparatorWithInverse(inversePermutation); +#ifndef NDEBUG + if(!changed) { + BOOST_FOREACH(Index& separatorKey, conditional_->parents()) { assert(separatorKey == inversePermutation[separatorKey]); } + BOOST_FOREACH(const derived_ptr& child, children_) { + assert(child->permuteSeparatorWithInverse(inversePermutation) == false); + } + } +#endif + if(changed) { + BOOST_FOREACH(const derived_ptr& child, children_) { + (void)child->permuteSeparatorWithInverse(inversePermutation); + } + } + assertInvariants(); + return changed; + } + + /* ************************************************************************* */ + // The shortcut density is a conditional P(S|R) of the separator of this + // clique on the root. We can compute it recursively from the parent shortcut + // P(Sp|R) as \int P(Fp|Sp) P(Sp|R), where Fp are the frontal nodes in p + /* ************************************************************************* */ + template + BayesNet BayesTreeCliqueBase::shortcut(derived_ptr R, Eliminate function) { + + static const bool debug = false; + + // A first base case is when this clique or its parent is the root, + // in which case we return an empty Bayes net. + + derived_ptr parent(parent_.lock()); + + if (R.get()==this || parent==R) { + BayesNet empty; + return empty; + } + + // The root conditional + FactorGraph p_R(BayesNet(R->conditional())); + + // The parent clique has a ConditionalType for each frontal node in Fp + // so we can obtain P(Fp|Sp) in factor graph form + FactorGraph p_Fp_Sp(BayesNet(parent->conditional())); + + // If not the base case, obtain the parent shortcut P(Sp|R) as factors + FactorGraph p_Sp_R(parent->shortcut(R, function)); + + // now combine P(Cp|R) = P(Fp|Sp) * P(Sp|R) + FactorGraph p_Cp_R; + p_Cp_R.push_back(p_R); + p_Cp_R.push_back(p_Fp_Sp); + p_Cp_R.push_back(p_Sp_R); + + // Eliminate into a Bayes net with ordering designed to integrate out + // any variables not in *our* separator. Variables to integrate out must be + // eliminated first hence the desired ordering is [Cp\S S]. + // However, an added wrinkle is that Cp might overlap with the root. + // Keys corresponding to the root should not be added to the ordering at all. + + if(debug) { + p_R.print("p_R: "); + p_Fp_Sp.print("p_Fp_Sp: "); + p_Sp_R.print("p_Sp_R: "); + } + + // We want to factor into a conditional of the clique variables given the + // root and the marginal on the root, integrating out all other variables. + // The integrands include any parents of this clique and the variables of + // the parent clique. + FastSet variablesAtBack; + FastSet separator; + size_t uniqueRootVariables = 0; + BOOST_FOREACH(const Index separatorIndex, this->conditional()->parents()) { + variablesAtBack.insert(separatorIndex); + separator.insert(separatorIndex); + if(debug) std::cout << "At back (this): " << separatorIndex << std::endl; + } + BOOST_FOREACH(const Index key, R->conditional()->keys()) { + if(variablesAtBack.insert(key).second) + ++ uniqueRootVariables; + if(debug) std::cout << "At back (root): " << key << std::endl; + } + + Permutation toBack = Permutation::PushToBack( + std::vector(variablesAtBack.begin(), variablesAtBack.end()), + R->conditional()->lastFrontalKey() + 1); + Permutation::shared_ptr toBackInverse(toBack.inverse()); + BOOST_FOREACH(const typename FactorType::shared_ptr& factor, p_Cp_R) { + factor->permuteWithInverse(*toBackInverse); } + typename BayesNet::shared_ptr eliminated(EliminationTree< + FactorType>::Create(p_Cp_R)->eliminate(function)); + + // Take only the conditionals for p(S|R). We check for each variable being + // in the separator set because if some separator variables overlap with + // root variables, we cannot rely on the number of root variables, and also + // want to include those variables in the conditional. + BayesNet p_S_R; + BOOST_REVERSE_FOREACH(typename ConditionalType::shared_ptr conditional, *eliminated) { + assert(conditional->nrFrontals() == 1); + if(separator.find(toBack[conditional->firstFrontalKey()]) != separator.end()) { + if(debug) + conditional->print("Taking C|R conditional: "); + p_S_R.push_front(conditional); + } + if(p_S_R.size() == separator.size()) + break; + } + + // Undo the permutation + if(debug) toBack.print("toBack: "); + p_S_R.permuteWithInverse(toBack); + + // return the parent shortcut P(Sp|R) + assertInvariants(); + return p_S_R; + } + + /* ************************************************************************* */ + // P(C) = \int_R P(F|S) P(S|R) P(R) + // TODO: Maybe we should integrate given parent marginal P(Cp), + // \int(Cp\S) P(F|S)P(S|Cp)P(Cp) + // Because the root clique could be very big. + /* ************************************************************************* */ + template + FactorGraph::FactorType> BayesTreeCliqueBase::marginal( + derived_ptr R, Eliminate function) { + // If we are the root, just return this root + // NOTE: immediately cast to a factor graph + BayesNet bn(R->conditional()); + if (R.get()==this) return bn; + + // Combine P(F|S), P(S|R), and P(R) + BayesNet p_FSR = this->shortcut(R, function); + p_FSR.push_front(this->conditional()); + p_FSR.push_back(R->conditional()); + + assertInvariants(); + GenericSequentialSolver solver(p_FSR); + return *solver.jointFactorGraph(conditional_->keys(), function); + } + + /* ************************************************************************* */ + // P(C1,C2) = \int_R P(F1|S1) P(S1|R) P(F2|S1) P(S2|R) P(R) + /* ************************************************************************* */ + template + FactorGraph::FactorType> BayesTreeCliqueBase::joint( + derived_ptr C2, derived_ptr R, Eliminate function) { + // For now, assume neither is the root + + // Combine P(F1|S1), P(S1|R), P(F2|S2), P(S2|R), and P(R) + FactorGraph joint; + if (!isRoot()) joint.push_back(this->conditional()->toFactor()); // P(F1|S1) + if (!isRoot()) joint.push_back(shortcut(R, function)); // P(S1|R) + if (!C2->isRoot()) joint.push_back(C2->conditional()->toFactor()); // P(F2|S2) + if (!C2->isRoot()) joint.push_back(C2->shortcut(R, function)); // P(S2|R) + joint.push_back(R->conditional()->toFactor()); // P(R) + + // Find the keys of both C1 and C2 + std::vector keys1(conditional_->keys()); + std::vector keys2(C2->conditional_->keys()); + FastSet keys12; + keys12.insert(keys1.begin(), keys1.end()); + keys12.insert(keys2.begin(), keys2.end()); + + // Calculate the marginal + std::vector keys12vector; keys12vector.reserve(keys12.size()); + keys12vector.insert(keys12vector.begin(), keys12.begin(), keys12.end()); + assertInvariants(); + GenericSequentialSolver solver(joint); + return *solver.jointFactorGraph(keys12vector, function); + } + +} diff --git a/gtsam/inference/BayesTreeCliqueBase.h b/gtsam/inference/BayesTreeCliqueBase.h new file mode 100644 index 000000000..75bf86449 --- /dev/null +++ b/gtsam/inference/BayesTreeCliqueBase.h @@ -0,0 +1,163 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file BayesTreeCliqueBase + * @brief Base class for cliques of a BayesTree + * @author Richard Roberts and Frank Dellaert + */ + +#pragma once + +#include +#include + +#include +#include +#include + +namespace gtsam { template class BayesTree; } + +namespace gtsam { + + /** + * This is the base class for BayesTree cliques. The default and standard + * derived type is BayesTreeClique, but some algorithms, like iSAM2, use a + * different clique type in order to store extra data along with the clique. + * + * This class is templated on the derived class (i.e. the curiously recursive + * template pattern). The advantage of this over using virtual classes is + * that it avoids the need for casting to get the derived type. This is + * possible because all cliques in a BayesTree are the same type - if they + * were not then we'd need a virtual class. + * + * @tparam DERIVED The derived clique type. + * @tparam CONDITIONAL The conditional type. + */ + template + struct BayesTreeCliqueBase { + + public: + typedef BayesTreeCliqueBase This; + typedef DERIVED DerivedType; + typedef CONDITIONAL ConditionalType; + typedef boost::shared_ptr sharedConditional; + typedef boost::shared_ptr shared_ptr; + typedef boost::weak_ptr weak_ptr; + typedef boost::shared_ptr derived_ptr; + typedef boost::weak_ptr derived_weak_ptr; + typedef typename ConditionalType::FactorType FactorType; + typedef typename FactorGraph::Eliminate Eliminate; + + protected: + void assertInvariants() const; + + /** Default constructor */ + BayesTreeCliqueBase() {} + + /** Construct from a conditional, leaving parent and child pointers uninitialized */ + BayesTreeCliqueBase(const sharedConditional& conditional); + + /** Construct from an elimination result, which is a pair */ + BayesTreeCliqueBase(const std::pair >& result); + + public: + sharedConditional conditional_; + derived_weak_ptr parent_; + std::list children_; + + /** Construct shared_ptr from a conditional, leaving parent and child pointers uninitialized */ + static derived_ptr Create(const sharedConditional& conditional) { return boost::make_shared(conditional); } + + /** Construct shared_ptr from a FactorGraph::EliminationResult. In this class + * the conditional part is kept and the factor part is ignored, but in derived clique + * types, such as ISAM2Clique, the factor part is kept as a cached factor. + * @param An elimination result, which is a pair + */ + static derived_ptr Create(const std::pair >& result) { return boost::make_shared(result); } + + derived_ptr clone() const { return Create(sharedConditional(new ConditionalType(*conditional_))); } + + /** print this node */ + void print(const std::string& s = "") const; + + /** The arrow operator accesses the conditional */ + const ConditionalType* operator->() const { return conditional_.get(); } + + /** The arrow operator accesses the conditional */ + ConditionalType* operator->() { return conditional_.get(); } + + /** Access the conditional */ + const sharedConditional& conditional() const { return conditional_; } + + /** is this the root of a Bayes tree ? */ + inline bool isRoot() const { return parent_.expired(); } + + /** return the const reference of children */ + std::list& children() { return children_; } + const std::list& children() const { return children_; } + + /** The size of subtree rooted at this clique, i.e., nr of Cliques */ + size_t treeSize() const; + + /** print this node and entire subtree below it */ + void printTree(const std::string& indent="") const; + + /** Permute the variables in the whole subtree rooted at this clique */ + void permuteWithInverse(const Permutation& inversePermutation); + + /** Permute variables when they only appear in the separators. In this + * case the running intersection property will be used to prevent always + * traversing the whole tree. Returns whether any separator variables in + * this subtree were reordered. + */ + bool permuteSeparatorWithInverse(const Permutation& inversePermutation); + + /** return the conditional P(S|Root) on the separator given the root */ + // TODO: create a cached version + BayesNet shortcut(derived_ptr root, Eliminate function); + + /** return the marginal P(C) of the clique */ + FactorGraph marginal(derived_ptr root, Eliminate function); + + /** return the joint P(C1,C2), where C1==this. TODO: not a method? */ + FactorGraph joint(derived_ptr C2, derived_ptr root, Eliminate function); + + bool equals(const This& other, double tol=1e-9) const { + return (!conditional_ && !other.conditional()) || + conditional_->equals(*other.conditional(), tol); + } + + friend class BayesTree; + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_NVP(conditional_); + ar & BOOST_SERIALIZATION_NVP(parent_); + ar & BOOST_SERIALIZATION_NVP(children_); + } + + }; // \struct Clique + + template + const DERIVED* asDerived(const BayesTreeCliqueBase* base) { + return static_cast(base); + } + + template + DERIVED* asDerived(BayesTreeCliqueBase* base) { + return static_cast(base); + } + +} diff --git a/gtsam/inference/ClusterTree.h b/gtsam/inference/ClusterTree.h index f264bc010..d72c0fcdf 100644 --- a/gtsam/inference/ClusterTree.h +++ b/gtsam/inference/ClusterTree.h @@ -110,3 +110,5 @@ namespace gtsam { }; // ClusterTree } // namespace gtsam + +#include diff --git a/gtsam/inference/EliminationTree-inl.h b/gtsam/inference/EliminationTree-inl.h index 1d62e46fc..462a2fd4a 100644 --- a/gtsam/inference/EliminationTree-inl.h +++ b/gtsam/inference/EliminationTree-inl.h @@ -9,7 +9,6 @@ #include #include #include -#include #include #include diff --git a/gtsam/inference/EliminationTree.h b/gtsam/inference/EliminationTree.h index 6e47983a0..76db131ed 100644 --- a/gtsam/inference/EliminationTree.h +++ b/gtsam/inference/EliminationTree.h @@ -10,10 +10,10 @@ #include #include #include -#include #include class EliminationTreeTester; // for unit tests, see testEliminationTree +namespace gtsam { template class BayesNet; } namespace gtsam { @@ -140,3 +140,5 @@ struct DisconnectedGraphException : public std::exception { }; } + +#include diff --git a/gtsam/inference/FactorGraph-inl.h b/gtsam/inference/FactorGraph-inl.h index 533f36c05..3b3d568bd 100644 --- a/gtsam/inference/FactorGraph-inl.h +++ b/gtsam/inference/FactorGraph-inl.h @@ -24,7 +24,7 @@ #include #include -#include +#include #include #include @@ -126,28 +126,29 @@ namespace gtsam { } /* ************************************************************************* */ - template + template void _FactorGraph_BayesTree_adder( vector::sharedFactor>& factors, - const typename BayesTree::sharedClique& clique) { + const typename BayesTree::sharedClique& clique) { if(clique) { // Add factor from this clique factors.push_back((*clique)->toFactor()); // Traverse children - BOOST_FOREACH(const typename BayesTree::sharedClique& child, clique->children()) { - _FactorGraph_BayesTree_adder(factors, child); + typedef typename BayesTree::sharedClique sharedClique; + BOOST_FOREACH(const sharedClique& child, clique->children()) { + _FactorGraph_BayesTree_adder(factors, child); } } } /* ************************************************************************* */ template - template - FactorGraph::FactorGraph(const BayesTree& bayesTree) { + template + FactorGraph::FactorGraph(const BayesTree& bayesTree) { factors_.reserve(bayesTree.size()); - _FactorGraph_BayesTree_adder(factors_, bayesTree.root()); + _FactorGraph_BayesTree_adder(factors_, bayesTree.root()); } /* ************************************************************************* */ diff --git a/gtsam/inference/FactorGraph.h b/gtsam/inference/FactorGraph.h index 91a879392..df684d7d2 100644 --- a/gtsam/inference/FactorGraph.h +++ b/gtsam/inference/FactorGraph.h @@ -33,7 +33,7 @@ namespace gtsam { // Forward declarations -template class BayesTree; +template class BayesTree; /** * A factor graph is a bipartite graph with factor nodes connected to variable nodes. @@ -77,9 +77,9 @@ template class BayesTree; template FactorGraph(const BayesNet& bayesNet); - /** convert from Bayes net */ - template - FactorGraph(const BayesTree& bayesTree); + /** convert from Bayes tree */ + template + FactorGraph(const BayesTree& bayesTree); /** convert from a derived type */ template @@ -239,3 +239,4 @@ template class BayesTree; } // namespace gtsam +#include diff --git a/gtsam/inference/GenericMultifrontalSolver-inl.h b/gtsam/inference/GenericMultifrontalSolver-inl.h index 30685a751..6fb135009 100644 --- a/gtsam/inference/GenericMultifrontalSolver-inl.h +++ b/gtsam/inference/GenericMultifrontalSolver-inl.h @@ -19,7 +19,7 @@ #include #include -#include +#include #include using namespace std; @@ -51,14 +51,14 @@ namespace gtsam { /* ************************************************************************* */ template - typename JT::BayesTree::shared_ptr GenericMultifrontalSolver::eliminate( + typename BayesTree::shared_ptr GenericMultifrontalSolver::eliminate( typename FactorGraph::Eliminate function) const { // eliminate junction tree, returns pointer to root - typename JT::BayesTree::sharedClique root = junctionTree_->eliminate(function); + typename BayesTree::sharedClique root = junctionTree_->eliminate(function); // create an empty Bayes tree and insert root clique - typename JT::BayesTree::shared_ptr bayesTree(new typename JT::BayesTree); + typename BayesTree::shared_ptr bayesTree(new BayesTree); bayesTree->insert(root); // return the Bayes tree diff --git a/gtsam/inference/GenericMultifrontalSolver.h b/gtsam/inference/GenericMultifrontalSolver.h index 4228a058b..2375ff08c 100644 --- a/gtsam/inference/GenericMultifrontalSolver.h +++ b/gtsam/inference/GenericMultifrontalSolver.h @@ -79,7 +79,7 @@ namespace gtsam { * Eliminate the factor graph sequentially. Uses a column elimination tree * to recursively eliminate. */ - typename JUNCTIONTREE::BayesTree::shared_ptr + typename BayesTree::shared_ptr eliminate(Eliminate function) const; /** diff --git a/gtsam/inference/GenericSequentialSolver-inl.h b/gtsam/inference/GenericSequentialSolver-inl.h index b4bd22e67..04e8cfbeb 100644 --- a/gtsam/inference/GenericSequentialSolver-inl.h +++ b/gtsam/inference/GenericSequentialSolver-inl.h @@ -18,10 +18,10 @@ #pragma once -#include -#include -#include -#include +#include +#include +#include +#include #include #include @@ -77,9 +77,8 @@ namespace gtsam { /* ************************************************************************* */ template - typename BayesNet::shared_ptr // - GenericSequentialSolver::eliminate( - typename EliminationTree::Eliminate function) const { + typename boost::shared_ptr > // + GenericSequentialSolver::eliminate(Eliminate function) const { return eliminationTree_->eliminate(function); } @@ -87,8 +86,7 @@ namespace gtsam { template typename FactorGraph::shared_ptr // GenericSequentialSolver::jointFactorGraph( - const std::vector& js, - typename EliminationTree::Eliminate function) const { + const std::vector& js, Eliminate function) const { // Compute a COLAMD permutation with the marginal variable constrained to the end. Permutation::shared_ptr permutation(Inference::PermutationCOLAMD(*structure_, js)); @@ -127,10 +125,9 @@ namespace gtsam { /* ************************************************************************* */ template typename FACTOR::shared_ptr // - GenericSequentialSolver::marginalFactor(Index j, - typename EliminationTree::Eliminate function) const { + GenericSequentialSolver::marginalFactor(Index j, Eliminate function) const { // Create a container for the one variable index - vector js(1); + std::vector js(1); js[0] = j; // Call joint and return the only factor in the factor graph it returns diff --git a/gtsam/inference/GenericSequentialSolver.h b/gtsam/inference/GenericSequentialSolver.h index 16d47f8df..d9c593b25 100644 --- a/gtsam/inference/GenericSequentialSolver.h +++ b/gtsam/inference/GenericSequentialSolver.h @@ -19,9 +19,15 @@ #pragma once #include - +#include +#include +#include #include -#include + +namespace gtsam { class VariableIndex; } +namespace gtsam { template class EliminationTree; } +namespace gtsam { template class FactorGraph; } +namespace gtsam { template class BayesNet; } namespace gtsam { @@ -43,7 +49,12 @@ namespace gtsam { protected: - typedef typename FactorGraph::shared_ptr sharedFactorGraph; + typedef boost::shared_ptr > sharedFactorGraph; + + typedef std::pair< + boost::shared_ptr, + boost::shared_ptr > EliminationResult; + typedef boost::function&, size_t)> Eliminate; /** Store the original factors for computing marginals * TODO Frank says: really? Marginals should be computed from result. @@ -51,15 +62,14 @@ namespace gtsam { sharedFactorGraph factors_; /** Store column structure of the factor graph. Why? */ - VariableIndex::shared_ptr structure_; + boost::shared_ptr structure_; /** Elimination tree that performs elimination */ - typedef EliminationTree EliminationTree_; - typename EliminationTree::shared_ptr eliminationTree_; + boost::shared_ptr > eliminationTree_; /** concept checks */ GTSAM_CONCEPT_TESTABLE_TYPE(FACTOR) - GTSAM_CONCEPT_TESTABLE_TYPE(EliminationTree_) +// GTSAM_CONCEPT_TESTABLE_TYPE(EliminationTree) public: @@ -76,7 +86,7 @@ namespace gtsam { */ GenericSequentialSolver( const sharedFactorGraph& factorGraph, - const VariableIndex::shared_ptr& variableIndex); + const boost::shared_ptr& variableIndex); /** Print to cout */ void print(const std::string& name = "GenericSequentialSolver: ") const; @@ -95,25 +105,23 @@ namespace gtsam { * Eliminate the factor graph sequentially. Uses a column elimination tree * to recursively eliminate. */ - typename BayesNet::shared_ptr - eliminate(typename EliminationTree::Eliminate function) const; + typename boost::shared_ptr > eliminate(Eliminate function) const; /** * Compute the marginal joint over a set of variables, by integrating out * all of the other variables. Returns the result as a factor graph. */ typename FactorGraph::shared_ptr jointFactorGraph( - const std::vector& js, - typename EliminationTree::Eliminate function) const; + const std::vector& js, Eliminate function) const; /** * Compute the marginal Gaussian density over a variable, by integrating out * all of the other variables. This function returns the result as a factor. */ - typename FACTOR::shared_ptr marginalFactor(Index j, - typename EliminationTree::Eliminate function) const; + typename FACTOR::shared_ptr marginalFactor(Index j, Eliminate function) const; }; // GenericSequentialSolver } // namespace gtsam +#include diff --git a/gtsam/inference/ISAM-inl.h b/gtsam/inference/ISAM-inl.h index 8fdad31b2..1daab686f 100644 --- a/gtsam/inference/ISAM-inl.h +++ b/gtsam/inference/ISAM-inl.h @@ -41,7 +41,7 @@ namespace gtsam { // Remove the contaminated part of the Bayes tree BayesNet bn; - removeTop(newFactors.keys(), bn, orphans); + this->removeTop(newFactors.keys(), bn, orphans); FG factors(bn); // add the factors themselves diff --git a/gtsam/inference/JunctionTree-inl.h b/gtsam/inference/JunctionTree-inl.h index 6ca112c18..ad8b389ab 100644 --- a/gtsam/inference/JunctionTree-inl.h +++ b/gtsam/inference/JunctionTree-inl.h @@ -21,11 +21,8 @@ #include #include -#include -#include #include -#include -#include +#include #include #include @@ -36,8 +33,8 @@ namespace gtsam { using namespace std; /* ************************************************************************* */ - template - void JunctionTree::construct(const FG& fg, const VariableIndex& variableIndex) { + template + void JunctionTree::construct(const FG& fg, const VariableIndex& variableIndex) { tic(1, "JT Constructor"); tic(1, "JT symbolic ET"); const typename EliminationTree::shared_ptr symETree = @@ -58,8 +55,8 @@ namespace gtsam { } /* ************************************************************************* */ - template - JunctionTree::JunctionTree(const FG& fg) { + template + JunctionTree::JunctionTree(const FG& fg) { tic(0, "VariableIndex"); VariableIndex varIndex(fg); toc(0, "VariableIndex"); @@ -67,14 +64,14 @@ namespace gtsam { } /* ************************************************************************* */ - template - JunctionTree::JunctionTree(const FG& fg, const VariableIndex& variableIndex) { + template + JunctionTree::JunctionTree(const FG& fg, const VariableIndex& variableIndex) { construct(fg, variableIndex); } /* ************************************************************************* */ - template - typename JunctionTree::sharedClique JunctionTree::distributeFactors( + template + typename JunctionTree::sharedClique JunctionTree::distributeFactors( const FG& fg, const typename SymbolicBayesTree::sharedClique& bayesClique) { // Build "target" index. This is an index for each variable of the factors @@ -109,8 +106,8 @@ namespace gtsam { } /* ************************************************************************* */ - template - typename JunctionTree::sharedClique JunctionTree::distributeFactors(const FG& fg, + template + typename JunctionTree::sharedClique JunctionTree::distributeFactors(const FG& fg, const std::vector >& targets, const SymbolicBayesTree::sharedClique& bayesClique) { @@ -147,21 +144,21 @@ namespace gtsam { } /* ************************************************************************* */ - template - pair::BayesTree::sharedClique, - typename FG::sharedFactor> JunctionTree::eliminateOneClique( + template + pair::BTClique::shared_ptr, + typename FG::sharedFactor> JunctionTree::eliminateOneClique( typename FG::Eliminate function, - const boost::shared_ptr& current, bool cache) const { + const boost::shared_ptr& current) const { FG fg; // factor graph will be assembled from local factors and marginalized children fg.reserve(current->size() + current->children().size()); fg.push_back(*current); // add the local factors // receive the factors from the child and its clique point - list children; + list children; BOOST_FOREACH(const boost::shared_ptr& child, current->children()) { - pair tree_factor( - eliminateOneClique(function, child, cache)); + pair tree_factor( + eliminateOneClique(function, child)); children.push_back(tree_factor.first); fg.push_back(tree_factor.second); } @@ -172,9 +169,7 @@ namespace gtsam { // Now that we know which factors and variables, and where variables // come from and go to, create and eliminate the new joint factor. tic(2, "CombineAndEliminate"); - pair< - typename FG::FactorType::ConditionalType::shared_ptr, - typename FG::sharedFactor> eliminated(function(fg, + typename FG::EliminationResult eliminated(function(fg, current->frontal.size())); toc(2, "CombineAndEliminate"); @@ -182,33 +177,31 @@ namespace gtsam { tic(3, "Update tree"); // create a new clique corresponding the combined factors - typename BayesTree::sharedClique new_clique(new typename BayesTree::Clique(eliminated.first)); + typename BTClique::shared_ptr new_clique(BTClique::Create(eliminated)); new_clique->children_ = children; - BOOST_FOREACH(typename BayesTree::sharedClique& childRoot, children) { + BOOST_FOREACH(typename BTClique::shared_ptr& childRoot, children) { childRoot->parent_ = new_clique; } - if(cache) - new_clique->cachedFactor() = eliminated.second; toc(3, "Update tree"); return make_pair(new_clique, eliminated.second); } /* ************************************************************************* */ - template - typename JunctionTree::BayesTree::sharedClique JunctionTree::eliminate( - typename FG::Eliminate function, bool cache) const { + template + typename BTCLIQUE::shared_ptr JunctionTree::eliminate( + typename FG::Eliminate function) const { if (this->root()) { tic(2, "JT eliminate"); - pair ret = - this->eliminateOneClique(function, this->root(), cache); + pair ret = + this->eliminateOneClique(function, this->root()); if (ret.second->size() != 0) throw runtime_error( "JuntionTree::eliminate: elimination failed because of factors left over!"); toc(2, "JT eliminate"); return ret.first; } else - return typename BayesTree::sharedClique(); + return typename BTClique::shared_ptr(); } } //namespace gtsam diff --git a/gtsam/inference/JunctionTree.h b/gtsam/inference/JunctionTree.h index 909cebc62..2f74357fe 100644 --- a/gtsam/inference/JunctionTree.h +++ b/gtsam/inference/JunctionTree.h @@ -45,7 +45,7 @@ namespace gtsam { * * \ingroup Multifrontal */ - template + template::Clique> class JunctionTree: public ClusterTree { public: @@ -55,7 +55,7 @@ namespace gtsam { typedef typename Clique::shared_ptr sharedClique; ///< Shared pointer to a clique /// The BayesTree type produced by elimination - typedef class BayesTree BayesTree; + typedef BTCLIQUE BTClique; /// Shared pointer to this class typedef boost::shared_ptr > shared_ptr; @@ -73,9 +73,9 @@ namespace gtsam { const SymbolicBayesTree::sharedClique& clique); // recursive elimination function - std::pair + std::pair eliminateOneClique(typename FG::Eliminate function, - const boost::shared_ptr& clique, bool cache = false) const; + const boost::shared_ptr& clique) const; // internal constructor void construct(const FG& fg, const VariableIndex& variableIndex); @@ -103,13 +103,12 @@ namespace gtsam { /** Eliminate the factors in the subgraphs to produce a BayesTree. * @param function The function used to eliminate, see the namespace functions * in GaussianFactorGraph.h - * @param cache Whether to cache the intermediate elimination factors for use in ISAM2 - this - * should always be false when called outside of ISAM2 (this will be fixed in the future). * @return The BayesTree resulting from elimination */ - typename BayesTree::sharedClique eliminate(typename FG::Eliminate function, - bool cache = false) const; + typename BTClique::shared_ptr eliminate(typename FG::Eliminate function) const; }; // JunctionTree } // namespace gtsam + +#include diff --git a/gtsam/inference/SymbolicFactorGraph.cpp b/gtsam/inference/SymbolicFactorGraph.cpp index 5a008a192..2d5190f45 100644 --- a/gtsam/inference/SymbolicFactorGraph.cpp +++ b/gtsam/inference/SymbolicFactorGraph.cpp @@ -18,19 +18,14 @@ #include #include -#include -#include -#include +#include +#include +#include namespace gtsam { using namespace std; - // Explicitly instantiate so we don't have to include everywhere - template class FactorGraph; - template class BayesNet; - template class EliminationTree; - /* ************************************************************************* */ SymbolicFactorGraph::SymbolicFactorGraph(const BayesNet& bayesNet) : FactorGraph(bayesNet) {} diff --git a/gtsam/inference/SymbolicFactorGraph.h b/gtsam/inference/SymbolicFactorGraph.h index dd9932d74..90413ab06 100644 --- a/gtsam/inference/SymbolicFactorGraph.h +++ b/gtsam/inference/SymbolicFactorGraph.h @@ -17,8 +17,13 @@ #pragma once -#include -#include +#include +#include +#include + +namespace gtsam { template class EliminationTree; } +namespace gtsam { template class BayesNet; } +namespace gtsam { class IndexConditional; } namespace gtsam { @@ -72,7 +77,7 @@ namespace gtsam { * Combine and eliminate can also be called separately, but for this and * derived classes calling them separately generally does extra work. */ - std::pair + std::pair, boost::shared_ptr > EliminateSymbolic(const FactorGraph&, size_t nrFrontals = 1); /* Template function implementation */ diff --git a/gtsam/inference/SymbolicMultifrontalSolver.cpp b/gtsam/inference/SymbolicMultifrontalSolver.cpp index 77df4e04d..7348cbc5d 100644 --- a/gtsam/inference/SymbolicMultifrontalSolver.cpp +++ b/gtsam/inference/SymbolicMultifrontalSolver.cpp @@ -16,7 +16,7 @@ */ #include -#include +#include namespace gtsam { diff --git a/gtsam/inference/inference.h b/gtsam/inference/inference.h index 2be90a074..d853ff91b 100644 --- a/gtsam/inference/inference.h +++ b/gtsam/inference/inference.h @@ -18,7 +18,6 @@ #pragma once -#include #include #include @@ -77,4 +76,4 @@ namespace gtsam { return PermutationCOLAMD_(variableIndex, cmember); } -} /// namespace gtsam +} // namespace gtsam diff --git a/gtsam/inference/tests/testClusterTree.cpp b/gtsam/inference/tests/testClusterTree.cpp index ab7716c50..2d3a2744c 100644 --- a/gtsam/inference/tests/testClusterTree.cpp +++ b/gtsam/inference/tests/testClusterTree.cpp @@ -22,12 +22,12 @@ using namespace boost::assign; #include #include -#include +#include using namespace gtsam; // explicit instantiation and typedef -template class ClusterTree; +namespace gtsam { template class ClusterTree; } typedef ClusterTree SymbolicClusterTree; /* ************************************************************************* */ diff --git a/gtsam/inference/tests/testJunctionTree.cpp b/gtsam/inference/tests/testJunctionTree.cpp index 7dd129472..13f047e88 100644 --- a/gtsam/inference/tests/testJunctionTree.cpp +++ b/gtsam/inference/tests/testJunctionTree.cpp @@ -17,6 +17,7 @@ */ #include // for operator += +#include // for operator += #include // for operator += using namespace boost::assign; @@ -29,12 +30,13 @@ using namespace boost::assign; #include #include #include -#include -#include +#include +#include #include #include using namespace gtsam; +using namespace std; typedef JunctionTree SymbolicJunctionTree; typedef BayesTree SymbolicBayesTree; diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index 9806b435c..e05fbe45c 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -19,6 +19,7 @@ #include #include +#include #include #include @@ -213,6 +214,16 @@ double determinant(const GaussianBayesNet& bayesNet) { return exp(logDet); } +/* ************************************************************************* */ +VectorValues gradient(const GaussianBayesNet& bayesNet, const VectorValues& x0) { + return gradient(FactorGraph(bayesNet), x0); +} + +/* ************************************************************************* */ +void gradientAtZero(const GaussianBayesNet& bayesNet, VectorValues& g) { + gradientAtZero(FactorGraph(bayesNet), g); +} + /* ************************************************************************* */ } // namespace gtsam diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index 171d061ee..455536a4f 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -64,19 +64,19 @@ namespace gtsam { */ boost::shared_ptr optimize_(const GaussianBayesNet& bn); - /* + /** * Backsubstitute * (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas) * @param y is the RHS of the system */ VectorValues backSubstitute(const GaussianBayesNet& bn, const VectorValues& y); - /* + /** * Backsubstitute in place, y starts as RHS and is replaced with solution */ void backSubstituteInPlace(const GaussianBayesNet& bn, VectorValues& y); - /* + /** * Transpose Backsubstitute * gy=inv(L)*gx by solving L*gy=gx. * gy=inv(R'*inv(Sigma))*gx @@ -108,4 +108,26 @@ namespace gtsam { */ double determinant(const GaussianBayesNet& bayesNet); + /** + * Compute the gradient of the energy function, + * \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$, + * centered around \f$ x = x_0 \f$. + * The gradient is \f$ R^T(Rx-d) \f$. + * @param bayesNet The Gaussian Bayes net $(R,d)$ + * @param x0 The center about which to compute the gradient + * @return The gradient as a VectorValues + */ + VectorValues gradient(const GaussianBayesNet& bayesNet, const VectorValues& x0); + + /** + * Compute the gradient of the energy function, + * \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$, + * centered around zero. + * The gradient about zero is \f$ -R^T d \f$. See also gradient(const GaussianBayesNet&, const VectorValues&). + * @param bayesNet The Gaussian Bayes net $(R,d)$ + * @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues + * @return The gradient as a VectorValues + */ + void gradientAtZero(const GaussianBayesNet& bayesNet, VectorValues& g); + } /// namespace gtsam diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 37d9c1add..168bfec97 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -162,7 +162,7 @@ void GaussianConditional::print(const string &s) const } gtsam::print(Vector(get_d()),"d"); gtsam::print(sigmas_,"sigmas"); - cout << "Permutation: " << permutation_.indices() << endl; + cout << "Permutation: " << permutation_.indices().transpose() << endl; } /* ************************************************************************* */ diff --git a/gtsam/linear/GaussianConditional.h b/gtsam/linear/GaussianConditional.h index 54f96efe2..8a58bbc34 100644 --- a/gtsam/linear/GaussianConditional.h +++ b/gtsam/linear/GaussianConditional.h @@ -24,7 +24,6 @@ #include #include #include -#include #include // Forward declaration to friend unit tests @@ -39,6 +38,7 @@ namespace gtsam { // Forward declarations class GaussianFactor; +class JacobianFactor; /** * A conditional Gaussian functions as the node in a Bayes network @@ -52,8 +52,8 @@ public: typedef boost::shared_ptr shared_ptr; /** Store the conditional matrix as upper-triangular column-major */ - typedef Matrix AbMatrix; - typedef VerticalBlockView rsd_type; + typedef Matrix RdMatrix; + typedef VerticalBlockView rsd_type; typedef rsd_type::Block r_type; typedef rsd_type::constBlock const_r_type; @@ -72,7 +72,7 @@ protected: * Use R*permutation_ to get back the correct non-permuted order, * for example when converting to the Jacobian * */ - AbMatrix matrix_; + RdMatrix matrix_; rsd_type rsd_; /** vector of standard deviations */ @@ -169,7 +169,7 @@ public: protected: - const AbMatrix& matrix() const { return matrix_; } + const RdMatrix& matrix() const { return matrix_; } const rsd_type& rsd() const { return rsd_; } public: diff --git a/gtsam/linear/GaussianFactorGraph.cpp b/gtsam/linear/GaussianFactorGraph.cpp index 46462f60f..98e74abd7 100644 --- a/gtsam/linear/GaussianFactorGraph.cpp +++ b/gtsam/linear/GaussianFactorGraph.cpp @@ -42,9 +42,6 @@ namespace gtsam { /* ************************************************************************* */ GaussianFactorGraph::GaussianFactorGraph(const GaussianBayesNet& CBN) : Base(CBN) {} - /* ************************************************************************* */ - GaussianFactorGraph::GaussianFactorGraph(const BayesTree& GBT) : Base(GBT) {} - /* ************************************************************************* */ GaussianFactorGraph::Keys GaussianFactorGraph::keys() const { FastSet keys; @@ -370,10 +367,8 @@ namespace gtsam { // Pull out keys and dimensions tic(2, "keys"); - vector keys(scatter.size()); vector dimensions(scatter.size() + 1); BOOST_FOREACH(const Scatter::value_type& var_slot, scatter) { - keys[var_slot.second.slot] = var_slot.first; dimensions[var_slot.second.slot] = var_slot.second.dimension; } // This is for the r.h.s. vector @@ -404,7 +399,7 @@ namespace gtsam { // Extract conditionals and fill in details of the remaining factor tic(5, "split"); GaussianConditional::shared_ptr conditionals = - combinedFactor->splitEliminatedFactor(nrFrontals, keys); + combinedFactor->splitEliminatedFactor(nrFrontals); if (debug) { conditionals->print("Extracted conditionals: "); combinedFactor->print("Eliminated factor (L piece): "); @@ -575,10 +570,8 @@ namespace gtsam { // Pull out keys and dimensions tic(2, "keys"); - vector keys(scatter.size()); vector dimensions(scatter.size() + 1); BOOST_FOREACH(const Scatter::value_type& var_slot, scatter) { - keys[var_slot.second.slot] = var_slot.first; dimensions[var_slot.second.slot] = var_slot.second.dimension; } // This is for the r.h.s. vector @@ -612,7 +605,7 @@ namespace gtsam { // Extract conditionals and fill in details of the remaining factor tic(5, "split"); GaussianConditional::shared_ptr conditionals = - combinedFactor->splitEliminatedFactor(nrFrontals, keys, permutation); + combinedFactor->splitEliminatedFactor(nrFrontals, permutation); if (debug) { conditionals->print("Extracted conditionals: "); combinedFactor->print("Eliminated factor (L piece): "); diff --git a/gtsam/linear/GaussianFactorGraph.h b/gtsam/linear/GaussianFactorGraph.h index 6b937c214..3ec8a78f0 100644 --- a/gtsam/linear/GaussianFactorGraph.h +++ b/gtsam/linear/GaussianFactorGraph.h @@ -75,7 +75,8 @@ namespace gtsam { /** * Constructor that receives a BayesTree and returns a GaussianFactorGraph */ - GaussianFactorGraph(const BayesTree& GBT); + template + GaussianFactorGraph(const BayesTree& gbt) : Base(gbt) {} /** Constructor from a factor graph of GaussianFactor or a derived type */ template diff --git a/gtsam/linear/GaussianJunctionTree.cpp b/gtsam/linear/GaussianJunctionTree.cpp index f539e1759..e2e1c2951 100644 --- a/gtsam/linear/GaussianJunctionTree.cpp +++ b/gtsam/linear/GaussianJunctionTree.cpp @@ -17,8 +17,8 @@ * @brief: the Gaussian junction tree */ -#include -#include +#include +#include #include #include @@ -34,7 +34,7 @@ namespace gtsam { using namespace std; /* ************************************************************************* */ - void GaussianJunctionTree::btreeBackSubstitute(const boost::shared_ptr& current, VectorValues& config) const { + void GaussianJunctionTree::btreeBackSubstitute(const BTClique::shared_ptr& current, VectorValues& config) const { // solve the bayes net in the current node current->conditional()->solveInPlace(config); @@ -47,15 +47,15 @@ namespace gtsam { // } // solve the bayes nets in the child nodes - BOOST_FOREACH(const BayesTree::sharedClique& child, current->children()) { + BOOST_FOREACH(const BTClique::shared_ptr& child, current->children()) { btreeBackSubstitute(child, config); } } /* ************************************************************************* */ - void GaussianJunctionTree::btreeRHS(const boost::shared_ptr& current, VectorValues& config) const { + void GaussianJunctionTree::btreeRHS(const BTClique::shared_ptr& current, VectorValues& config) const { current->conditional()->rhs(config); - BOOST_FOREACH(const BayesTree::sharedClique& child, current->children()) + BOOST_FOREACH(const BTClique::shared_ptr& child, current->children()) btreeRHS(child, config); } @@ -63,7 +63,7 @@ namespace gtsam { VectorValues GaussianJunctionTree::optimize(Eliminate function) const { tic(1, "GJT eliminate"); // eliminate from leaves to the root - boost::shared_ptr rootClique(this->eliminate(function)); + BTClique::shared_ptr rootClique(this->eliminate(function)); toc(1, "GJT eliminate"); // Allocate solution vector and copy RHS diff --git a/gtsam/linear/GaussianJunctionTree.h b/gtsam/linear/GaussianJunctionTree.h index 0397a646b..678c7c616 100644 --- a/gtsam/linear/GaussianJunctionTree.h +++ b/gtsam/linear/GaussianJunctionTree.h @@ -43,10 +43,10 @@ namespace gtsam { protected: // back-substitute in topological sort order (parents first) - void btreeBackSubstitute(const boost::shared_ptr& current, VectorValues& config) const; + void btreeBackSubstitute(const BTClique::shared_ptr& current, VectorValues& config) const; // find the RHS for the system in order to perform backsubstitution - void btreeRHS(const boost::shared_ptr& current, VectorValues& config) const; + void btreeRHS(const BTClique::shared_ptr& current, VectorValues& config) const; public : @@ -64,22 +64,22 @@ namespace gtsam { VectorValues optimize(Eliminate function) const; // convenient function to return dimensions of all variables in the BayesTree - template - static void countDims(const BayesTree& bayesTree, DIM_CONTAINER& dims) { + template + static void countDims(const BayesTree& bayesTree, DIM_CONTAINER& dims) { dims = DIM_CONTAINER(bayesTree.root()->conditional()->back()+1, 0); countDims(bayesTree.root(), dims); } private: - template - static void countDims(const boost::shared_ptr& clique, DIM_CONTAINER& dims) { + template + static void countDims(const boost::shared_ptr& clique, DIM_CONTAINER& dims) { GaussianConditional::const_iterator it = clique->conditional()->beginFrontals(); for (; it != clique->conditional()->endFrontals(); ++it) { assert(dims.at(*it) == 0); dims.at(*it) = clique->conditional()->dim(it); } - BOOST_FOREACH(const boost::shared_ptr& child, clique->children()) { + BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children()) { countDims(child, dims); } } diff --git a/gtsam/linear/HessianFactor.cpp b/gtsam/linear/HessianFactor.cpp index da57e302b..8eb34d10c 100644 --- a/gtsam/linear/HessianFactor.cpp +++ b/gtsam/linear/HessianFactor.cpp @@ -20,9 +20,8 @@ #include #include #include -#include -#include #include +#include #include #include @@ -37,8 +36,6 @@ using namespace std; -using namespace boost::lambda; - namespace gtsam { /* ************************************************************************* */ @@ -193,6 +190,9 @@ HessianFactor::HessianFactor(const FactorGraph& factors, // Form Ab' * Ab tic(1, "allocate"); info_.resize(dimensions.begin(), dimensions.end(), false); + // Fill in keys + keys_.resize(scatter.size()); + std::transform(scatter.begin(), scatter.end(), keys_.begin(), boost::bind(&Scatter::value_type::first, ::_1)); toc(1, "allocate"); tic(2, "zero"); matrix_.noalias() = Matrix::Zero(matrix_.rows(),matrix_.cols()); @@ -406,7 +406,7 @@ Eigen::LDLT::TranspositionType HessianFactor::partialLDL(size_t nrFronta /* ************************************************************************* */ GaussianConditional::shared_ptr -HessianFactor::splitEliminatedFactor(size_t nrFrontals, const vector& keys, const Eigen::LDLT::TranspositionType& permutation) { +HessianFactor::splitEliminatedFactor(size_t nrFrontals, const Eigen::LDLT::TranspositionType& permutation) { static const bool debug = false; @@ -423,7 +423,7 @@ HessianFactor::splitEliminatedFactor(size_t nrFrontals, const vector& key // Because of the pivoting permutation when using LDL, treating each variable separately doesn't make sense. tic(2, "construct cond"); Vector sigmas = Vector::Ones(varDim); - conditionals = boost::make_shared(keys.begin(), keys.end(), nrFrontals, Ab, sigmas, permutation); + conditionals = boost::make_shared(keys_.begin(), keys_.end(), nrFrontals, Ab, sigmas, permutation); toc(2, "construct cond"); if(debug) conditionals->print("Extracted conditional: "); @@ -433,7 +433,9 @@ HessianFactor::splitEliminatedFactor(size_t nrFrontals, const vector& key tic(2, "remaining factor"); info_.blockStart() = nrFrontals; // Assign the keys - keys_.assign(keys.begin() + nrFrontals, keys.end()); + vector remainingKeys(keys_.size() - nrFrontals); + remainingKeys.assign(keys_.begin() + nrFrontals, keys_.end()); + keys_.swap(remainingKeys); toc(2, "remaining factor"); return conditionals; diff --git a/gtsam/linear/HessianFactor.h b/gtsam/linear/HessianFactor.h index 88e4364ae..2af208a59 100644 --- a/gtsam/linear/HessianFactor.h +++ b/gtsam/linear/HessianFactor.h @@ -276,7 +276,7 @@ namespace gtsam { /** split partially eliminated factor */ boost::shared_ptr splitEliminatedFactor( - size_t nrFrontals, const std::vector& keys, const Eigen::LDLT::TranspositionType& permutation = Eigen::LDLT::TranspositionType()); + size_t nrFrontals, const Eigen::LDLT::TranspositionType& permutation = Eigen::LDLT::TranspositionType()); /** assert invariants */ void assertInvariants() const; diff --git a/gtsam/linear/JacobianFactor.cpp b/gtsam/linear/JacobianFactor.cpp index c20dab3c1..3347fa4f9 100644 --- a/gtsam/linear/JacobianFactor.cpp +++ b/gtsam/linear/JacobianFactor.cpp @@ -582,17 +582,28 @@ namespace gtsam { } /* ************************************************************************* */ - VectorValues gradient(const FactorGraph& fg, const VectorValues& x) { + VectorValues gradient(const FactorGraph& fg, const VectorValues& x0) { // It is crucial for performance to make a zero-valued clone of x - VectorValues g = VectorValues::Zero(x); + VectorValues g = VectorValues::Zero(x0); Errors e; BOOST_FOREACH(const JacobianFactor::shared_ptr& factor, fg) { - e.push_back(factor->error_vector(x)); + e.push_back(factor->error_vector(x0)); } transposeMultiplyAdd(fg, 1.0, e, g); return g; } + /* ************************************************************************* */ + void gradientAtZero(const FactorGraph& fg, VectorValues& g) { + // Zero-out the gradient + g.setZero(); + Errors e; + BOOST_FOREACH(const JacobianFactor::shared_ptr& factor, fg) { + e.push_back(-factor->getb()); + } + transposeMultiplyAdd(fg, 1.0, e, g); + } + /* ************************************************************************* */ void residual(const FactorGraph& fg, const VectorValues &x, VectorValues &r) { Index i = 0 ; diff --git a/gtsam/linear/JacobianFactor.h b/gtsam/linear/JacobianFactor.h index 09089df20..35a1bfa9d 100644 --- a/gtsam/linear/JacobianFactor.h +++ b/gtsam/linear/JacobianFactor.h @@ -318,11 +318,26 @@ namespace gtsam { void transposeMultiplyAdd(const FactorGraph& fg, double alpha, const Errors& e, VectorValues& x); /** - * Calculate Gradient of A^(A*x-b) for a given config - * @param x: VectorValues specifying where to calculate gradient - * @return gradient, as a VectorValues as well + * Compute the gradient of the energy function, + * \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} A x - b \right\Vert^2 \f$, + * centered around \f$ x = x_0 \f$. + * The gradient is \f$ A^T(Ax-b) \f$. + * @param fg The Jacobian factor graph $(A,b)$ + * @param x0 The center about which to compute the gradient + * @return The gradient as a VectorValues */ - VectorValues gradient(const FactorGraph& fg, const VectorValues& x); + VectorValues gradient(const FactorGraph& fg, const VectorValues& x0); + + /** + * Compute the gradient of the energy function, + * \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} A x - b \right\Vert^2 \f$, + * centered around zero. + * The gradient is \f$ A^T(Ax-b) \f$. + * @param fg The Jacobian factor graph $(A,b)$ + * @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues + * @return The gradient as a VectorValues + */ + void gradientAtZero(const FactorGraph& fg, VectorValues& g); // matrix-vector operations void residual(const FactorGraph& fg, const VectorValues &x, VectorValues &r); diff --git a/gtsam/linear/KalmanFilter.h b/gtsam/linear/KalmanFilter.h index eeee0dccc..ba21eed25 100644 --- a/gtsam/linear/KalmanFilter.h +++ b/gtsam/linear/KalmanFilter.h @@ -22,6 +22,8 @@ #include +namespace gtsam { class SharedDiagonal; } + namespace gtsam { class KalmanFilter { diff --git a/gtsam/linear/VectorValues.cpp b/gtsam/linear/VectorValues.cpp index d592ccdfe..6c3ec797e 100644 --- a/gtsam/linear/VectorValues.cpp +++ b/gtsam/linear/VectorValues.cpp @@ -39,7 +39,7 @@ VectorValues& VectorValues::operator=(const VectorValues& rhs) { /* ************************************************************************* */ VectorValues VectorValues::Zero(const VectorValues& x) { VectorValues cloned(SameStructure(x)); - cloned.vector() = Vector::Zero(x.dim()); + cloned.setZero(); return cloned; } @@ -124,10 +124,15 @@ VectorValues VectorValues::SameStructure(const VectorValues& other) { /* ************************************************************************* */ VectorValues VectorValues::Zero(Index nVars, size_t varDim) { VectorValues ret(nVars, varDim); - ret.vector() = Vector::Zero(ret.dim()); + ret.setZero(); return ret; } +/* ************************************************************************* */ +void VectorValues::setZero() { + values_.setZero(); +} + /* ************************************************************************* */ bool VectorValues::hasSameStructure(const VectorValues& other) const { if(this->size() != other.size()) diff --git a/gtsam/linear/VectorValues.h b/gtsam/linear/VectorValues.h index 0bcafd23c..33351ceb7 100644 --- a/gtsam/linear/VectorValues.h +++ b/gtsam/linear/VectorValues.h @@ -238,6 +238,9 @@ namespace gtsam { template void append(const CONTAINER& dimensions); + /** Set all entries to zero, does not modify the size. */ + void setZero(); + /** Reference the entire solution vector (const version). */ const Vector& vector() const { chk(); return values_; } @@ -383,7 +386,7 @@ namespace gtsam { template VectorValues VectorValues::Zero(const CONTAINER& dimensions) { VectorValues ret(dimensions); - ret.vector() = Vector::Zero(ret.dim()); + ret.setZero(); return ret; } diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index d211b2e06..e11765b6c 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -19,6 +19,7 @@ #include #include +#include #include #include diff --git a/gtsam/linear/tests/testKalmanFilter.cpp b/gtsam/linear/tests/testKalmanFilter.cpp index d873da83c..4ceda0cfb 100644 --- a/gtsam/linear/tests/testKalmanFilter.cpp +++ b/gtsam/linear/tests/testKalmanFilter.cpp @@ -18,6 +18,8 @@ */ #include +#include +#include #include using namespace std; diff --git a/gtsam/nonlinear/DoglegOptimizerImpl.cpp b/gtsam/nonlinear/DoglegOptimizerImpl.cpp index 32ef1b0ac..60e8d2a59 100644 --- a/gtsam/nonlinear/DoglegOptimizerImpl.cpp +++ b/gtsam/nonlinear/DoglegOptimizerImpl.cpp @@ -9,28 +9,28 @@ namespace gtsam { /* ************************************************************************* */ VectorValues DoglegOptimizerImpl::ComputeDoglegPoint( - double Delta, const VectorValues& x_u, const VectorValues& x_n, const bool verbose) { + double Delta, const VectorValues& dx_u, const VectorValues& dx_n, const bool verbose) { // Get magnitude of each update and find out which segment Delta falls in assert(Delta >= 0.0); double DeltaSq = Delta*Delta; - double x_u_norm_sq = x_u.vector().squaredNorm(); - double x_n_norm_sq = x_n.vector().squaredNorm(); + double x_u_norm_sq = dx_u.vector().squaredNorm(); + double x_n_norm_sq = dx_n.vector().squaredNorm(); if(verbose) cout << "Steepest descent magnitude " << sqrt(x_u_norm_sq) << ", Newton's method magnitude " << sqrt(x_n_norm_sq) << endl; if(DeltaSq < x_u_norm_sq) { // Trust region is smaller than steepest descent update - VectorValues x_d = VectorValues::SameStructure(x_u); - x_d.vector() = x_u.vector() * sqrt(DeltaSq / x_u_norm_sq); + VectorValues x_d = VectorValues::SameStructure(dx_u); + x_d.vector() = dx_u.vector() * sqrt(DeltaSq / x_u_norm_sq); if(verbose) cout << "In steepest descent region with fraction " << sqrt(DeltaSq / x_u_norm_sq) << " of steepest descent magnitude" << endl; return x_d; } else if(DeltaSq < x_n_norm_sq) { // Trust region boundary is between steepest descent point and Newton's method point - return ComputeBlend(Delta, x_u, x_n); + return ComputeBlend(Delta, dx_u, dx_n, verbose); } else { assert(DeltaSq >= x_n_norm_sq); if(verbose) cout << "In pure Newton's method region" << endl; // Trust region is larger than Newton's method point - return x_n; + return dx_n; } } diff --git a/gtsam/nonlinear/DoglegOptimizerImpl.h b/gtsam/nonlinear/DoglegOptimizerImpl.h index bfb1c1878..f843a9185 100644 --- a/gtsam/nonlinear/DoglegOptimizerImpl.h +++ b/gtsam/nonlinear/DoglegOptimizerImpl.h @@ -33,6 +33,19 @@ struct DoglegOptimizerImpl { double f_error; }; + /** Specifies how the trust region is adapted at each Dogleg iteration. If + * this is SEARCH_EACH_ITERATION, then the trust region radius will be + * increased potentially multiple times during one iteration until increasing + * it further no longer decreases the error. If this is + * ONE_STEP_PER_ITERATION, then the step in one iteration will not exceed the + * current trust region radius, but the radius will be increased for the next + * iteration if the error decrease is good. The former will generally result + * in slower iterations, but sometimes larger steps in early iterations. The + * latter generally results in faster iterations but it may take several + * iterations before the trust region radius is increased to the optimal + * value. Generally ONE_STEP_PER_ITERATION should be used, corresponding to + * most published descriptions of the algorithm. + */ enum TrustRegionAdaptationMode { SEARCH_EACH_ITERATION, ONE_STEP_PER_ITERATION @@ -62,7 +75,8 @@ struct DoglegOptimizerImpl { * @tparam F For normal usage this will be NonlinearFactorGraph. * @tparam VALUES The Values or TupleValues to pass to F::error() to evaluate * the error function. - * @param initialDelta The initial trust region radius. + * @param Delta The initial trust region radius. + * @param mode See DoglegOptimizerImpl::TrustRegionAdaptationMode * @param Rd The Bayes' net or tree as described above. * @param f The original nonlinear factor graph with which to evaluate the * accuracy of \f$ M(\delta x) \f$ to adjust \f$ \Delta \f$. @@ -95,10 +109,11 @@ struct DoglegOptimizerImpl { * GaussianBayesNet, containing GaussianConditional s. * * @param Delta The trust region radius - * @param bayesNet The Bayes' net \f$ (R,d) \f$ as described above. + * @param dx_u The steepest descent point, i.e. the Cauchy point + * @param dx_n The Gauss-Newton point * @return The dogleg point \f$ \delta x_d \f$ */ - static VectorValues ComputeDoglegPoint(double Delta, const VectorValues& x_u, const VectorValues& x_n, const bool verbose=false); + static VectorValues ComputeDoglegPoint(double Delta, const VectorValues& dx_u, const VectorValues& dx_n, const bool verbose=false); /** Compute the minimizer \f$ \delta x_u \f$ of the line search along the gradient direction \f$ g \f$ of * the function @@ -152,9 +167,10 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( IterationResult result; bool stay = true; + enum { NONE, INCREASED_DELTA, DECREASED_DELTA } lastAction; // Used to prevent alternating between increasing and decreasing in one iteration while(stay) { // Compute dog leg point - result.dx_d = ComputeDoglegPoint(Delta, dx_u, dx_n); + result.dx_d = ComputeDoglegPoint(Delta, dx_u, dx_n, verbose); if(verbose) cout << "Delta = " << Delta << ", dx_d_norm = " << result.dx_d.vector().norm() << endl; @@ -186,10 +202,12 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( if(mode == ONE_STEP_PER_ITERATION) stay = false; // If not searching, just return with the new Delta else if(mode == SEARCH_EACH_ITERATION) { - if(newDelta == Delta) + if(newDelta == Delta || lastAction == DECREASED_DELTA) stay = false; // Searching, but Newton's solution is within trust region so keep the same trust region - else + else { stay = true; // Searching and increased Delta, so try again to increase Delta + lastAction = INCREASED_DELTA; + } } else { assert(false); } @@ -202,11 +220,12 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( } else if(0.25 > rho && rho >= 0.0) { // M does not agree well with f, decrease Delta until it does Delta *= 0.5; - if(mode == ONE_STEP_PER_ITERATION) + if(mode == ONE_STEP_PER_ITERATION || lastAction == INCREASED_DELTA) stay = false; // If not searching, just return with the new smaller delta - else if(mode == SEARCH_EACH_ITERATION) + else if(mode == SEARCH_EACH_ITERATION) { stay = true; - else { + lastAction = DECREASED_DELTA; + } else { assert(false); } } @@ -214,9 +233,10 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( // f actually increased, so keep decreasing Delta until f does not decrease assert(0.0 > rho); Delta *= 0.5; - if(Delta > 1e-5) + if(Delta > 1e-5) { stay = true; - else { + lastAction = DECREASED_DELTA; + } else { if(verbose) cout << "Warning: Dog leg stopping because cannot decrease error with minimum Delta" << endl; stay = false; } @@ -232,14 +252,14 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( template VectorValues DoglegOptimizerImpl::ComputeSteepestDescentPoint(const M& Rd) { - // Compute gradient - // Convert to JacobianFactor's to use existing gradient function - FactorGraph jfg(Rd); - VectorValues grad = gradient(jfg, VectorValues::Zero(*allocateVectorValues(Rd))); + // Compute gradient (call gradientAtZero function, which is defined for various linear systems) + VectorValues grad = *allocateVectorValues(Rd); + gradientAtZero(Rd, grad); double gradientSqNorm = grad.dot(grad); // Compute R * g - Errors Rg = jfg * grad; + FactorGraph Rd_jfg(Rd); + Errors Rg = Rd_jfg * grad; // Compute minimizing step size double step = -gradientSqNorm / dot(Rg, Rg); diff --git a/gtsam/nonlinear/GaussianISAM2-inl.h b/gtsam/nonlinear/GaussianISAM2-inl.h new file mode 100644 index 000000000..d0d37d886 --- /dev/null +++ b/gtsam/nonlinear/GaussianISAM2-inl.h @@ -0,0 +1,161 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file GaussianISAM2 + * @brief Full non-linear ISAM + * @author Michael Kaess + */ + +#include +#include + +using namespace std; +using namespace gtsam; + +#include + +namespace gtsam { + + /* ************************************************************************* */ + template + void optimize2(const boost::shared_ptr& clique, double threshold, + vector& changed, const vector& replaced, Permuted& delta, int& count) { + // if none of the variables in this clique (frontal and separator!) changed + // significantly, then by the running intersection property, none of the + // cliques in the children need to be processed + + // Are any clique variables part of the tree that has been redone? + bool cliqueReplaced = replaced[(*clique)->frontals().front()]; +#ifndef NDEBUG + BOOST_FOREACH(Index frontal, (*clique)->frontals()) { + assert(cliqueReplaced == replaced[frontal]); + } +#endif + + // If not redone, then has one of the separator variables changed significantly? + bool recalculate = cliqueReplaced; + if(!recalculate) { + BOOST_FOREACH(Index parent, (*clique)->parents()) { + if(changed[parent]) { + recalculate = true; + break; + } + } + } + + // Solve clique if it was replaced, or if any parents were changed above the + // threshold or themselves replaced. + if(recalculate) { + + // Temporary copy of the original values, to check how much they change + vector originalValues((*clique)->nrFrontals()); + GaussianConditional::const_iterator it; + for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { + originalValues[it - (*clique)->beginFrontals()] = delta[*it]; + } + + // Back-substitute + (*clique)->rhs(delta); + (*clique)->solveInPlace(delta); + count += (*clique)->nrFrontals(); + + // Whether the values changed above a threshold, or always true if the + // clique was replaced. + bool valuesChanged = cliqueReplaced; + for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { + if(!valuesChanged) { + const Vector& oldValue(originalValues[it - (*clique)->beginFrontals()]); + const SubVector& newValue(delta[*it]); + if((oldValue - newValue).lpNorm() >= threshold) { + valuesChanged = true; + break; + } + } else + break; + } + + // If the values were above the threshold or this clique was replaced + if(valuesChanged) { + // Set changed flag for each frontal variable and leave the new values + BOOST_FOREACH(Index frontal, (*clique)->frontals()) { + changed[frontal] = true; + } + } else { + // Replace with the old values + for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { + delta[*it] = originalValues[it - (*clique)->beginFrontals()]; + } + } + + // Recurse to children + BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) { + optimize2(child, threshold, changed, replaced, delta, count); + } + } + } + + /* ************************************************************************* */ + // fast full version without threshold + template + void optimize2(const boost::shared_ptr& clique, VectorValues& delta) { + + // parents are assumed to already be solved and available in result + (*clique)->rhs(delta); + (*clique)->solveInPlace(delta); + + // Solve chilren recursively + BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) { + optimize2(child, delta); + } + } + + ///* ************************************************************************* */ + //boost::shared_ptr optimize2(const GaussianISAM2::sharedClique& root) { + // boost::shared_ptr delta(new VectorValues()); + // set changed; + // // starting from the root, call optimize on each conditional + // optimize2(root, delta); + // return delta; + //} + + /* ************************************************************************* */ + template + int optimize2(const boost::shared_ptr& root, double threshold, const vector& keys, Permuted& delta) { + vector changed(keys.size(), false); + int count = 0; + // starting from the root, call optimize on each conditional + optimize2(root, threshold, changed, keys, delta, count); + return count; + } + + /* ************************************************************************* */ + template + void nnz_internal(const boost::shared_ptr& clique, int& result) { + int dimR = (*clique)->dim(); + int dimSep = (*clique)->get_S().cols() - dimR; + result += ((dimR+1)*dimR)/2 + dimSep*dimR; + // traverse the children + BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) { + nnz_internal(child, result); + } + } + + /* ************************************************************************* */ + template + int calculate_nnz(const boost::shared_ptr& clique) { + int result = 0; + // starting from the root, add up entries of frontal and conditional matrices of each conditional + nnz_internal(clique, result); + return result; + } + +} /// namespace gtsam diff --git a/gtsam/nonlinear/GaussianISAM2.cpp b/gtsam/nonlinear/GaussianISAM2.cpp index 7df21bf2c..34456ad36 100644 --- a/gtsam/nonlinear/GaussianISAM2.cpp +++ b/gtsam/nonlinear/GaussianISAM2.cpp @@ -1,9 +1,22 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + /** * @file GaussianISAM2 * @brief Full non-linear ISAM * @author Michael Kaess */ +#include +#include #include using namespace std; @@ -13,132 +26,45 @@ using namespace gtsam; namespace gtsam { -/* ************************************************************************* */ -void optimize2(const BayesTree::sharedClique& clique, double threshold, - vector& changed, const vector& replaced, Permuted& delta, int& count) { - // if none of the variables in this clique (frontal and separator!) changed - // significantly, then by the running intersection property, none of the - // cliques in the children need to be processed + /* ************************************************************************* */ + VectorValues gradient(const BayesTree& bayesTree, const VectorValues& x0) { + return gradient(FactorGraph(bayesTree), x0); + } - // Are any clique variables part of the tree that has been redone? - bool cliqueReplaced = replaced[(*clique)->frontals().front()]; -#ifndef NDEBUG - BOOST_FOREACH(Index frontal, (*clique)->frontals()) { - assert(cliqueReplaced == replaced[frontal]); - } -#endif + /* ************************************************************************* */ + void gradientAtZero(const BayesTree& bayesTree, VectorValues& g) { + gradientAtZero(FactorGraph(bayesTree), g); + } - // If not redone, then has one of the separator variables changed significantly? - bool recalculate = cliqueReplaced; - if(!recalculate) { - BOOST_FOREACH(Index parent, (*clique)->parents()) { - if(changed[parent]) { - recalculate = true; - break; - } - } - } + /* ************************************************************************* */ + VectorValues gradient(const BayesTree >& bayesTree, const VectorValues& x0) { + return gradient(FactorGraph(bayesTree), x0); + } - // Solve clique if it was replaced, or if any parents were changed above the - // threshold or themselves replaced. - if(recalculate) { - - // Temporary copy of the original values, to check how much they change - vector originalValues((*clique)->nrFrontals()); - GaussianConditional::const_iterator it; - for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { - originalValues[it - (*clique)->beginFrontals()] = delta[*it]; - } - - // Back-substitute - (*clique)->rhs(delta); - (*clique)->solveInPlace(delta); - count += (*clique)->nrFrontals(); - - // Whether the values changed above a threshold, or always true if the - // clique was replaced. - bool valuesChanged = cliqueReplaced; - for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { - if(!valuesChanged) { - const Vector& oldValue(originalValues[it - (*clique)->beginFrontals()]); - const SubVector& newValue(delta[*it]); - if((oldValue - newValue).lpNorm() >= threshold) { - valuesChanged = true; - break; - } - } else - break; + /* ************************************************************************* */ + static void gradientAtZeroTreeAdder(const boost::shared_ptr >& root, VectorValues& g) { + // Loop through variables in each clique, adding contributions + int variablePosition = 0; + for(GaussianConditional::const_iterator jit = root->conditional()->begin(); jit != root->conditional()->end(); ++jit) { + const int dim = root->conditional()->dim(jit); + g[*jit] += root->gradientContribution().segment(variablePosition, dim); + variablePosition += dim; } - // If the values were above the threshold or this clique was replaced - if(valuesChanged) { - // Set changed flag for each frontal variable and leave the new values - BOOST_FOREACH(Index frontal, (*clique)->frontals()) { - changed[frontal] = true; - } - } else { - // Replace with the old values - for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { - delta[*it] = originalValues[it - (*clique)->beginFrontals()]; - } + // Recursively add contributions from children + typedef boost::shared_ptr > sharedClique; + BOOST_FOREACH(const sharedClique& child, root->children()) { + gradientAtZeroTreeAdder(child, g); } + } + + /* ************************************************************************* */ + void gradientAtZero(const BayesTree >& bayesTree, VectorValues& g) { + // Zero-out gradient + g.setZero(); + + // Sum up contributions for each clique + gradientAtZeroTreeAdder(bayesTree.root(), g); + } - // Recurse to children - BOOST_FOREACH(const BayesTree::sharedClique& child, clique->children_) { - optimize2(child, threshold, changed, replaced, delta, count); - } - } } - -/* ************************************************************************* */ -// fast full version without threshold -void optimize2(const BayesTree::sharedClique& clique, VectorValues& delta) { - - // parents are assumed to already be solved and available in result - (*clique)->rhs(delta); - (*clique)->solveInPlace(delta); - - // Solve chilren recursively - BOOST_FOREACH(const BayesTree::sharedClique& child, clique->children_) { - optimize2(child, delta); - } -} - -///* ************************************************************************* */ -//boost::shared_ptr optimize2(const GaussianISAM2::sharedClique& root) { -// boost::shared_ptr delta(new VectorValues()); -// set changed; -// // starting from the root, call optimize on each conditional -// optimize2(root, delta); -// return delta; -//} - -/* ************************************************************************* */ -int optimize2(const BayesTree::sharedClique& root, double threshold, const vector& keys, Permuted& delta) { - vector changed(keys.size(), false); - int count = 0; - // starting from the root, call optimize on each conditional - optimize2(root, threshold, changed, keys, delta, count); - return count; -} - -/* ************************************************************************* */ -void nnz_internal(const BayesTree::sharedClique& clique, int& result) { - int dimR = (*clique)->dim(); - int dimSep = (*clique)->get_S().cols() - dimR; - result += ((dimR+1)*dimR)/2 + dimSep*dimR; - // traverse the children - BOOST_FOREACH(const BayesTree::sharedClique& child, clique->children_) { - nnz_internal(child, result); - } -} - -/* ************************************************************************* */ -int calculate_nnz(const BayesTree::sharedClique& clique) { - int result = 0; - // starting from the root, add up entries of frontal and conditional matrices of each conditional - nnz_internal(clique, result); - return result; -} - -} /// namespace gtsam diff --git a/gtsam/nonlinear/GaussianISAM2.h b/gtsam/nonlinear/GaussianISAM2.h index fe29936cb..4da1d3053 100644 --- a/gtsam/nonlinear/GaussianISAM2.h +++ b/gtsam/nonlinear/GaussianISAM2.h @@ -1,3 +1,14 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + /** * @file GaussianISAM * @brief Full non-linear ISAM. @@ -41,21 +52,74 @@ public: } }; -// optimize the BayesTree, starting from the root -void optimize2(const BayesTree::sharedClique& root, VectorValues& delta); +/** optimize the BayesTree, starting from the root */ +template +void optimize2(const boost::shared_ptr& root, VectorValues& delta); -// optimize the BayesTree, starting from the root; "replaced" needs to contain -// all variables that are contained in the top of the Bayes tree that has been -// redone; "delta" is the current solution, an offset from the linearization -// point; "threshold" is the maximum change against the PREVIOUS delta for -// non-replaced variables that can be ignored, ie. the old delta entry is kept -// and recursive backsubstitution might eventually stop if none of the changed -// variables are contained in the subtree. -// returns the number of variables that were solved for -int optimize2(const BayesTree::sharedClique& root, +/// optimize the BayesTree, starting from the root; "replaced" needs to contain +/// all variables that are contained in the top of the Bayes tree that has been +/// redone; "delta" is the current solution, an offset from the linearization +/// point; "threshold" is the maximum change against the PREVIOUS delta for +/// non-replaced variables that can be ignored, ie. the old delta entry is kept +/// and recursive backsubstitution might eventually stop if none of the changed +/// variables are contained in the subtree. +/// returns the number of variables that were solved for +template +int optimize2(const boost::shared_ptr& root, double threshold, const std::vector& replaced, Permuted& delta); -// calculate the number of non-zero entries for the tree starting at clique (use root for complete matrix) -int calculate_nnz(const BayesTree::sharedClique& clique); +/// calculate the number of non-zero entries for the tree starting at clique (use root for complete matrix) +template +int calculate_nnz(const boost::shared_ptr& clique); + +/** + * Compute the gradient of the energy function, + * \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$, + * centered around \f$ x = x_0 \f$. + * The gradient is \f$ R^T(Rx-d) \f$. + * @param bayesTree The Gaussian Bayes Tree $(R,d)$ + * @param x0 The center about which to compute the gradient + * @return The gradient as a VectorValues + */ +VectorValues gradient(const BayesTree& bayesTree, const VectorValues& x0); + +/** + * Compute the gradient of the energy function, + * \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$, + * centered around zero. + * The gradient about zero is \f$ -R^T d \f$. See also gradient(const GaussianBayesNet&, const VectorValues&). + * @param bayesTree The Gaussian Bayes Tree $(R,d)$ + * @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues + * @return The gradient as a VectorValues + */ +void gradientAtZero(const BayesTree& bayesTree, VectorValues& g); + +/** + * Compute the gradient of the energy function, + * \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$, + * centered around \f$ x = x_0 \f$. + * The gradient is \f$ R^T(Rx-d) \f$. + * This specialized version is used with ISAM2, where each clique stores its + * gradient contribution. + * @param bayesTree The Gaussian Bayes Tree $(R,d)$ + * @param x0 The center about which to compute the gradient + * @return The gradient as a VectorValues + */ +VectorValues gradient(const BayesTree >& bayesTree, const VectorValues& x0); + +/** + * Compute the gradient of the energy function, + * \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$, + * centered around zero. + * The gradient about zero is \f$ -R^T d \f$. See also gradient(const GaussianBayesNet&, const VectorValues&). + * This specialized version is used with ISAM2, where each clique stores its + * gradient contribution. + * @param bayesTree The Gaussian Bayes Tree $(R,d)$ + * @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues + * @return The gradient as a VectorValues + */ +void gradientAtZero(const BayesTree >& bayesTree, VectorValues& g); }/// namespace gtsam + +#include diff --git a/gtsam/nonlinear/ISAM2-impl-inl.h b/gtsam/nonlinear/ISAM2-impl-inl.h index 5dc201c18..17c24e23a 100644 --- a/gtsam/nonlinear/ISAM2-impl-inl.h +++ b/gtsam/nonlinear/ISAM2-impl-inl.h @@ -81,7 +81,7 @@ struct ISAM2::Impl { * * Alternatively could we trace up towards the root for each variable here? */ - static void FindAll(ISAM2Type::sharedClique clique, FastSet& keys, const std::vector& markedMask); + static void FindAll(typename ISAM2Clique::shared_ptr clique, FastSet& keys, const vector& markedMask); /** * Apply expmap to the given values, but only for indices appearing in @@ -148,7 +148,7 @@ void ISAM2::Impl::AddVariables( const size_t originalDim = delta->dim(); const size_t originalnVars = delta->size(); delta.container().append(dims); - delta.container().vector().segment(originalDim, newDim) = Vector::Zero(newDim); + delta.container().vector().segment(originalDim, newDim).operator=(Vector::Zero(newDim)); delta.permutation().resize(originalnVars + newTheta.size()); { _VariableAdder vadder(ordering, delta, originalnVars); @@ -188,7 +188,7 @@ FastSet ISAM2::Impl::CheckRelinearization(Permu /* ************************************************************************* */ template -void ISAM2::Impl::FindAll(ISAM2Type::sharedClique clique, FastSet& keys, const std::vector& markedMask) { +void ISAM2::Impl::FindAll(typename ISAM2Clique::shared_ptr clique, FastSet& keys, const vector& markedMask) { static const bool debug = false; // does the separator contain any of the variables? bool found = false; @@ -202,7 +202,7 @@ void ISAM2::Impl::FindAll(ISAM2Type::sharedClique cl if(debug) clique->print("Key(s) marked in clique "); if(debug) cout << "so marking key " << (*clique)->keys().front() << endl; } - BOOST_FOREACH(const sharedClique& child, clique->children_) { + BOOST_FOREACH(const typename ISAM2Clique::shared_ptr& child, clique->children_) { FindAll(child, keys, markedMask); } } @@ -334,9 +334,11 @@ ISAM2::Impl::PartialSolve(GaussianFactorGraph& facto // eliminate into a Bayes net tic(7,"eliminate"); - GaussianJunctionTree jt(factors, affectedFactorsIndex); - result.bayesTree = jt.eliminate(EliminatePreferLDL, true); + JunctionTree jt(factors, affectedFactorsIndex); + result.bayesTree = jt.eliminate(EliminatePreferLDL); if(debug && result.bayesTree) { + if(boost::dynamic_pointer_cast >(result.bayesTree)) + cout << "Is an ISAM2 clique" << endl; cout << "Re-eliminated BT:\n"; result.bayesTree->printTree(""); } diff --git a/gtsam/nonlinear/ISAM2-inl.h b/gtsam/nonlinear/ISAM2-inl.h index 0519af50c..a8c76392c 100644 --- a/gtsam/nonlinear/ISAM2-inl.h +++ b/gtsam/nonlinear/ISAM2-inl.h @@ -28,7 +28,7 @@ using namespace boost::assign; #include #include - +#include namespace gtsam { @@ -42,12 +42,18 @@ static const bool structuralLast = false; /* ************************************************************************* */ template ISAM2::ISAM2(const ISAM2Params& params): - delta_(Permutation(), deltaUnpermuted_), params_(params) {} + delta_(Permutation(), deltaUnpermuted_), params_(params) { + if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) + doglegDelta_ = boost::get(params_.optimizationParams).initialDelta; +} /* ************************************************************************* */ template ISAM2::ISAM2(): - delta_(Permutation(), deltaUnpermuted_) {} + delta_(Permutation(), deltaUnpermuted_) { + if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) + doglegDelta_ = boost::get(params_.optimizationParams).initialDelta; +} /* ************************************************************************* */ template @@ -182,7 +188,11 @@ boost::shared_ptr > ISAM2::recalculat sharedClique clique = this->nodes_[key]; while(clique) { affectedStructuralKeys.insert((*clique)->beginFrontals(), (*clique)->endFrontals()); - clique = clique->parent_.lock(); +#ifndef NDEBUG // This is because BayesTreeClique stores pointers to BayesTreeClique but we actually have the derived type ISAM2Clique + clique = boost::dynamic_pointer_cast(clique->parent_.lock()); +#else + clique = boost::static_pointer_cast(clique->parent_.lock()); +#endif } } toc(0, "affectedStructuralKeys"); @@ -276,13 +286,13 @@ boost::shared_ptr > ISAM2::recalculat toc(2,"linearize"); tic(5,"eliminate"); - GaussianJunctionTree jt(factors, variableIndex_); - sharedClique newRoot = jt.eliminate(EliminatePreferLDL, true); + JunctionTree jt(factors, variableIndex_); + sharedClique newRoot = jt.eliminate(EliminatePreferLDL); if(debug) newRoot->print("Eliminated: "); toc(5,"eliminate"); tic(6,"insert"); - BayesTree::clear(); + this->clear(); this->insert(newRoot); toc(6,"insert"); @@ -306,6 +316,7 @@ boost::shared_ptr > ISAM2::recalculat affectedAndNewKeys.insert(affectedAndNewKeys.end(), newKeys.begin(), newKeys.end()); tic(1,"relinearizeAffected"); GaussianFactorGraph factors(*relinearizeAffectedFactors(affectedAndNewKeys)); + if(debug) factors.print("Relinearized factors: "); toc(1,"relinearizeAffected"); if(debug) { cout << "Affected keys: "; BOOST_FOREACH(const Index key, affectedKeys) { cout << key << " "; } cout << endl; } @@ -450,7 +461,10 @@ ISAM2Result ISAM2::update( tic(3,"gather involved keys"); // 3. Mark linear update FastSet markedKeys = Impl::IndicesFromFactors(ordering_, newFactors); // Get keys from new factors - FastVector newKeys(markedKeys.begin(), markedKeys.end()); // Make a copy of these, as we'll soon add to them + // NOTE: we use assign instead of the iterator constructor here because this + // is a vector of size_t, so the constructor unintentionally resolves to + // vector(size_t count, Index value) instead of the iterator constructor. + FastVector newKeys; newKeys.assign(markedKeys.begin(), markedKeys.end()); // Make a copy of these, as we'll soon add to them FastSet structuralKeys; if(structuralLast) structuralKeys = markedKeys; // If we're using structural-last ordering, make another copy toc(3,"gather involved keys"); @@ -513,25 +527,37 @@ ISAM2Result ISAM2::update( tic(9,"solve"); // 9. Solve - if (params_.wildfireThreshold <= 0.0 || disableReordering) { - VectorValues newDelta(theta_.dims(ordering_)); - optimize2(this->root(), newDelta); - if(debug) newDelta.print("newDelta: "); - assert(newDelta.size() == delta_.size()); - delta_.permutation() = Permutation::Identity(delta_.size()); - delta_.container() = newDelta; - lastBacksubVariableCount = theta_.size(); - } else { - vector replacedKeysMask(variableIndex_.size(), false); - if(replacedKeys) { - BOOST_FOREACH(const Index var, *replacedKeys) { - replacedKeysMask[var] = true; } } - lastBacksubVariableCount = optimize2(this->root(), params_.wildfireThreshold, replacedKeysMask, delta_); // modifies delta_ + if(params_.optimizationParams.type() == typeid(ISAM2GaussNewtonParams)) { + const ISAM2GaussNewtonParams& gaussNewtonParams = boost::get(params_.optimizationParams); + if (gaussNewtonParams.wildfireThreshold <= 0.0 || disableReordering) { + VectorValues newDelta(theta_.dims(ordering_)); + optimize2(this->root(), newDelta); + if(debug) newDelta.print("newDelta: "); + assert(newDelta.size() == delta_.size()); + delta_.permutation() = Permutation::Identity(delta_.size()); + delta_.container() = newDelta; + lastBacksubVariableCount = theta_.size(); + } else { + vector replacedKeysMask(variableIndex_.size(), false); + if(replacedKeys) { + BOOST_FOREACH(const Index var, *replacedKeys) { + replacedKeysMask[var] = true; } } + lastBacksubVariableCount = optimize2(this->root(), gaussNewtonParams.wildfireThreshold, replacedKeysMask, delta_); // modifies delta_ #ifndef NDEBUG - for(size_t j=0; j).all()); + for(size_t j=0; j).all()); #endif + } + } else if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) { + const ISAM2DoglegParams& doglegParams = boost::get(params_.optimizationParams); + // Do one Dogleg iteration + DoglegOptimizerImpl::IterationResult doglegResult = DoglegOptimizerImpl::Iterate( + *doglegDelta_, doglegParams.adaptationMode, *this, nonlinearFactors_, theta_, ordering_, nonlinearFactors_.error(theta_), doglegParams.verbose); + // Update Delta and linear step + doglegDelta_ = doglegResult.Delta; + delta_.permutation() = Permutation::Identity(delta_.size()); // Dogleg solves for the full delta so there is no permutation + delta_.container() = doglegResult.dx_d; // Copy the VectorValues containing with the linear solution } toc(9,"solve"); @@ -540,6 +566,8 @@ ISAM2Result ISAM2::update( result.errorAfter.reset(nonlinearFactors_.error(calculateEstimate())); toc(10,"evaluate error after"); + result.cliques = this->nodes().size(); + return result; } @@ -571,5 +599,13 @@ VALUES ISAM2::calculateBestEstimate() const { return theta_.retract(delta, ordering_); } +/* ************************************************************************* */ +template +VectorValues optimize(const ISAM2& isam) { + VectorValues delta = *allocateVectorValues(isam); + optimize2(isam.root(), delta); + return delta; +} + } /// namespace gtsam diff --git a/gtsam/nonlinear/ISAM2.h b/gtsam/nonlinear/ISAM2.h index 50b22f1c5..9f552b7fc 100644 --- a/gtsam/nonlinear/ISAM2.h +++ b/gtsam/nonlinear/ISAM2.h @@ -20,20 +20,64 @@ #pragma once #include +#include #include +#include + namespace gtsam { /** * @defgroup ISAM2 */ +/** + * @ingroup ISAM2 + * Parameters for ISAM2 using Gauss-Newton optimization. Either this class or + * ISAM2DoglegParams should be specified as the optimizationParams in + * ISAM2Params, which should in turn be passed to ISAM2(const ISAM2Params&). + */ +struct ISAM2GaussNewtonParams { + double wildfireThreshold; ///< Continue updating the linear delta only when changes are above this threshold (default: 0.001) + + /** Specify parameters as constructor arguments */ + ISAM2GaussNewtonParams( + double _wildfireThreshold = 0.001 ///< see ISAM2GaussNewtonParams public variables, ISAM2GaussNewtonParams::wildfireThreshold + ) : wildfireThreshold(_wildfireThreshold) {} +}; + +/** + * @ingroup ISAM2 + * Parameters for ISAM2 using Dogleg optimization. Either this class or + * ISAM2GaussNewtonParams should be specified as the optimizationParams in + * ISAM2Params, which should in turn be passed to ISAM2(const ISAM2Params&). + */ +struct ISAM2DoglegParams { + double initialDelta; ///< The initial trust region radius for Dogleg + DoglegOptimizerImpl::TrustRegionAdaptationMode adaptationMode; ///< See description in DoglegOptimizerImpl::TrustRegionAdaptationMode + bool verbose; ///< Whether Dogleg prints iteration and convergence information + + /** Specify parameters as constructor arguments */ + ISAM2DoglegParams( + double _initialDelta = 1.0, ///< see ISAM2DoglegParams public variables, ISAM2DoglegParams::initialDelta + DoglegOptimizerImpl::TrustRegionAdaptationMode _adaptationMode = DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, ///< see ISAM2DoglegParams public variables, ISAM2DoglegParams::adaptationMode + bool _verbose = false ///< see ISAM2DoglegParams public variables, ISAM2DoglegParams::verbose + ) : initialDelta(_initialDelta), adaptationMode(_adaptationMode), verbose(_verbose) {} +}; + /** * @ingroup ISAM2 * Parameters for the ISAM2 algorithm. Default parameter values are listed below. */ struct ISAM2Params { - double wildfireThreshold; ///< Continue updating the linear delta only when changes are above this threshold (default: 0.001) + typedef boost::variant OptimizationParams; ///< Either ISAM2GaussNewtonParams or ISAM2DoglegParams + /** Optimization parameters, this both selects the nonlinear optimization + * method and specifies its parameters, either ISAM2GaussNewtonParams or + * ISAM2DoglegParams. In the former, Gauss-Newton optimization will be used + * with the specified parameters, and in the latter Powell's dog-leg + * algorithm will be used with the specified parameters. + */ + OptimizationParams optimizationParams; double relinearizeThreshold; ///< Only relinearize variables whose linear delta magnitude is greater than this threshold (default: 0.1) int relinearizeSkip; ///< Only relinearize any variables every relinearizeSkip calls to ISAM2::update (default: 10) bool enableRelinearization; ///< Controls whether ISAM2 will ever relinearize any variables (default: true) @@ -41,12 +85,12 @@ struct ISAM2Params { /** Specify parameters as constructor arguments */ ISAM2Params( - double _wildfireThreshold = 0.001, ///< ISAM2Params::wildfireThreshold - double _relinearizeThreshold = 0.1, ///< ISAM2Params::relinearizeThreshold - int _relinearizeSkip = 10, ///< ISAM2Params::relinearizeSkip - bool _enableRelinearization = true, ///< ISAM2Params::enableRelinearization - bool _evaluateNonlinearError = false ///< ISAM2Params::evaluateNonlinearError - ) : wildfireThreshold(_wildfireThreshold), relinearizeThreshold(_relinearizeThreshold), + OptimizationParams _optimizationParams = ISAM2GaussNewtonParams(), ///< see ISAM2Params public variables, ISAM2Params::optimizationParams + double _relinearizeThreshold = 0.1, ///< see ISAM2Params public variables, ISAM2Params::relinearizeThreshold + int _relinearizeSkip = 10, ///< see ISAM2Params public variables, ISAM2Params::relinearizeSkip + bool _enableRelinearization = true, ///< see ISAM2Params public variables, ISAM2Params::enableRelinearization + bool _evaluateNonlinearError = false ///< see ISAM2Params public variables, ISAM2Params::evaluateNonlinearError + ) : optimizationParams(_optimizationParams), relinearizeThreshold(_relinearizeThreshold), relinearizeSkip(_relinearizeSkip), enableRelinearization(_enableRelinearization), evaluateNonlinearError(_evaluateNonlinearError) {} }; @@ -100,6 +144,87 @@ struct ISAM2Result { * will be reeliminated. */ size_t variablesReeliminated; + + /** The number of cliques in the Bayes' Tree */ + size_t cliques; +}; + +template +struct ISAM2Clique : public BayesTreeCliqueBase, CONDITIONAL> { + + typedef ISAM2Clique This; + typedef BayesTreeCliqueBase Base; + typedef boost::shared_ptr shared_ptr; + typedef boost::weak_ptr weak_ptr; + typedef CONDITIONAL ConditionalType; + typedef typename ConditionalType::shared_ptr sharedConditional; + + typename Base::FactorType::shared_ptr cachedFactor_; + Vector gradientContribution_; + + /** Construct from a conditional */ + ISAM2Clique(const sharedConditional& conditional) : Base(conditional) { + throw runtime_error("ISAM2Clique should always be constructed with the elimination result constructor"); } + + /** Construct from an elimination result */ + ISAM2Clique(const std::pair >& result) : + Base(result.first), cachedFactor_(result.second), gradientContribution_(result.first->get_R().cols() + result.first->get_S().cols()) { + // Compute gradient contribution + const ConditionalType& conditional(*result.first); + gradientContribution_ << -(conditional.get_R() * conditional.permutation().transpose()).transpose() * conditional.get_d(), + -conditional.get_S().transpose() * conditional.get_d(); + } + + /** Produce a deep copy, copying the cached factor and gradient contribution */ + shared_ptr clone() const { + shared_ptr copy(new ISAM2Clique(make_pair( + sharedConditional(new ConditionalType(*Base::conditional_)), + cachedFactor_ ? cachedFactor_->clone() : typename Base::FactorType::shared_ptr()))); + copy->gradientContribution_ = gradientContribution_; + return copy; + } + + /** Access the cached factor */ + typename Base::FactorType::shared_ptr& cachedFactor() { return cachedFactor_; } + + /** Access the gradient contribution */ + const Vector& gradientContribution() const { return gradientContribution_; } + + bool equals(const This& other, double tol=1e-9) const { + return Base::equals(other) && ((!cachedFactor_ && !other.cachedFactor_) || (cachedFactor_ && other.cachedFactor_ && cachedFactor_->equals(*other.cachedFactor_, tol))); + } + + /** print this node */ + void print(const std::string& s = "") const { + Base::print(s); + if(cachedFactor_) + cachedFactor_->print(s + "Cached: "); + else + cout << s << "Cached empty" << endl; + if(gradientContribution_.rows() != 0) + gtsam::print(gradientContribution_, "Gradient contribution: "); + } + + void permuteWithInverse(const Permutation& inversePermutation) { + if(cachedFactor_) cachedFactor_->permuteWithInverse(inversePermutation); + Base::permuteWithInverse(inversePermutation); + } + + bool permuteSeparatorWithInverse(const Permutation& inversePermutation) { + bool changed = Base::permuteSeparatorWithInverse(inversePermutation); + if(changed) if(cachedFactor_) cachedFactor_->permuteWithInverse(inversePermutation); + return changed; + } + +private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + ar & BOOST_SERIALIZATION_NVP(cachedFactor_); + ar & BOOST_SERIALIZATION_NVP(gradientContribution_); + } }; /** @@ -113,7 +238,7 @@ struct ISAM2Result { * */ template > -class ISAM2: public BayesTree { +class ISAM2: public BayesTree > { protected: @@ -147,6 +272,9 @@ protected: /** The current parameters */ ISAM2Params params_; + /** The current Dogleg Delta (trust region radius) */ + boost::optional doglegDelta_; + private: #ifndef NDEBUG std::vector lastRelinVariables_; @@ -156,7 +284,7 @@ private: public: - typedef BayesTree Base; ///< The BayesTree base class + typedef BayesTree > Base; ///< The BayesTree base class typedef ISAM2 This; ///< This class typedef VALUES Values; typedef GRAPH Graph; @@ -167,7 +295,11 @@ public: /** Create an empty ISAM2 instance using the default set of parameters (see ISAM2Params) */ ISAM2(); - void cloneTo(boost::shared_ptr& newISAM2) const { + typedef typename Base::Clique Clique; ///< A clique + typedef typename Base::sharedClique sharedClique; ///< Shared pointer to a clique + typedef typename Base::Cliques Cliques; ///< List of Clique typedef from base class + + void cloneTo(boost::shared_ptr& newISAM2) const { boost::shared_ptr bayesTree = boost::static_pointer_cast(newISAM2); Base::cloneTo(bayesTree); newISAM2->theta_ = theta_; @@ -177,6 +309,7 @@ public: newISAM2->nonlinearFactors_ = nonlinearFactors_; newISAM2->ordering_ = ordering_; newISAM2->params_ = params_; + newISAM2->doglegDelta_ = doglegDelta_; #ifndef NDEBUG newISAM2->lastRelinVariables_ = lastRelinVariables_; #endif @@ -188,9 +321,6 @@ public: newISAM2->lastNnzTop = lastNnzTop; } - typedef typename BayesTree::sharedClique sharedClique; ///< Shared pointer to a clique - typedef typename BayesTree::Cliques Cliques; ///< List of Clique typedef from base class - /** * Add new factors, updating the solution and relinearizing as needed. * @@ -209,7 +339,7 @@ public: * (Params::relinearizeSkip). * @return An ISAM2Result struct containing information about the update */ - ISAM2Result update(const GRAPH& newFactors, const VALUES& newTheta, + ISAM2Result update(const GRAPH& newFactors = GRAPH(), const VALUES& newTheta = VALUES(), bool force_relinearize = false); /** Access the current linearization point */ @@ -272,6 +402,10 @@ private: }; // ISAM2 +/** Get the linear delta for the ISAM2 object, unpermuted the delta returned by ISAM2::getDelta() */ +template +VectorValues optimize(const ISAM2& isam); + } /// namespace gtsam #include diff --git a/gtsam/nonlinear/Makefile.am b/gtsam/nonlinear/Makefile.am index 610bb7f5c..58503466c 100644 --- a/gtsam/nonlinear/Makefile.am +++ b/gtsam/nonlinear/Makefile.am @@ -31,6 +31,7 @@ headers += DoglegOptimizer.h DoglegOptimizer-inl.h headers += NonlinearISAM.h NonlinearISAM-inl.h headers += ISAM2.h ISAM2-inl.h ISAM2-impl-inl.h sources += GaussianISAM2.cpp +headers += GaussianISAM2-inl.h # Nonlinear constraints headers += NonlinearEquality.h diff --git a/gtsam/nonlinear/NonlinearFactor.h b/gtsam/nonlinear/NonlinearFactor.h index ad22d3e41..c65b4ac08 100644 --- a/gtsam/nonlinear/NonlinearFactor.h +++ b/gtsam/nonlinear/NonlinearFactor.h @@ -249,7 +249,7 @@ public: * to transform it to \f$ (h(x)-z)^2/\sigma^2 \f$, and then multiply by 0.5. */ virtual double error(const VALUES& c) const { - if (active(c)) + if (this->active(c)) return 0.5 * noiseModel_->distance(unwhitenedError(c)); else return 0.0; @@ -262,7 +262,7 @@ public: */ boost::shared_ptr linearize(const VALUES& x, const Ordering& ordering) const { // Only linearize if the factor is active - if (!active(x)) + if (!this->active(x)) return boost::shared_ptr(); // Create the set of terms - Jacobians for each index diff --git a/gtsam/nonlinear/Ordering.h b/gtsam/nonlinear/Ordering.h index 79a9aa436..faa06bbc7 100644 --- a/gtsam/nonlinear/Ordering.h +++ b/gtsam/nonlinear/Ordering.h @@ -18,6 +18,7 @@ #pragma once #include +#include #include #include diff --git a/gtsam/nonlinear/tests/testOrdering.cpp b/gtsam/nonlinear/tests/testOrdering.cpp index 64f5e7f4c..8c1804016 100644 --- a/gtsam/nonlinear/tests/testOrdering.cpp +++ b/gtsam/nonlinear/tests/testOrdering.cpp @@ -15,6 +15,7 @@ */ #include +#include #include using namespace gtsam; diff --git a/tests/testGaussianISAM2.cpp b/tests/testGaussianISAM2.cpp index 979f7c7c8..179deed62 100644 --- a/tests/testGaussianISAM2.cpp +++ b/tests/testGaussianISAM2.cpp @@ -6,6 +6,7 @@ #include #include // for operator += +#include using namespace boost::assign; #include @@ -167,7 +168,8 @@ TEST(ISAM2, optimize2) { conditional->solveInPlace(expected); // Clique - GaussianISAM2::sharedClique clique(new GaussianISAM2::Clique(conditional)); + GaussianISAM2::sharedClique clique( + GaussianISAM2::Clique::Create(make_pair(conditional,GaussianFactor::shared_ptr()))); VectorValues actual(theta.dims(ordering)); conditional->rhs(actual); optimize2(clique, actual); @@ -192,9 +194,13 @@ bool isam_check(const planarSLAM::Graph& fullgraph, const planarSLAM::Values& fu } /* ************************************************************************* */ -TEST(ISAM2, slamlike_solution) +TEST(ISAM2, slamlike_solution_gaussnewton) { +// SETDEBUG("ISAM2 update", true); +// SETDEBUG("ISAM2 update verbose", true); +// SETDEBUG("ISAM2 recalculate", true); + // Pose and landmark key types from planarSLAM typedef planarSLAM::PoseKey PoseKey; typedef planarSLAM::PointKey PointKey; @@ -204,7 +210,7 @@ TEST(ISAM2, slamlike_solution) SharedDiagonal brNoise = sharedSigmas(Vector_(2, M_PI/100.0, 0.1)); // These variables will be reused and accumulate factors and values - GaussianISAM2 isam(ISAM2Params(0.001, 0.0, 0, false)); + GaussianISAM2 isam(ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false)); planarSLAM::Values fullinit; planarSLAM::Graph fullgraph; @@ -224,7 +230,7 @@ TEST(ISAM2, slamlike_solution) isam.update(newfactors, init); } - EXPECT(isam_check(fullgraph, fullinit, isam)); + CHECK(isam_check(fullgraph, fullinit, isam)); // Add odometry from time 0 to time 5 for( ; i<5; ++i) { @@ -289,11 +295,44 @@ TEST(ISAM2, slamlike_solution) } // Compare solutions - EXPECT(isam_check(fullgraph, fullinit, isam)); + CHECK(isam_check(fullgraph, fullinit, isam)); + + // Check gradient at each node + typedef GaussianISAM2::sharedClique sharedClique; + BOOST_FOREACH(const sharedClique& clique, isam.nodes()) { + // Compute expected gradient + FactorGraph jfg; + jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional()))); + VectorValues expectedGradient(*allocateVectorValues(isam)); + gradientAtZero(jfg, expectedGradient); + // Compare with actual gradients + int variablePosition = 0; + for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) { + const int dim = clique->conditional()->dim(jit); + Vector actual = clique->gradientContribution().segment(variablePosition, dim); + EXPECT(assert_equal(expectedGradient[*jit], actual)); + variablePosition += dim; + } + LONGS_EQUAL(clique->gradientContribution().rows(), variablePosition); + } + + // Check gradient + VectorValues expectedGradient(*allocateVectorValues(isam)); + gradientAtZero(FactorGraph(isam), expectedGradient); + VectorValues expectedGradient2(gradient(FactorGraph(isam), VectorValues::Zero(expectedGradient))); + VectorValues actualGradient(*allocateVectorValues(isam)); + gradientAtZero(isam, actualGradient); + EXPECT(assert_equal(expectedGradient2, expectedGradient)); + EXPECT(assert_equal(expectedGradient, actualGradient)); } /* ************************************************************************* */ -TEST_UNSAFE(ISAM2, clone) { +TEST(ISAM2, slamlike_solution_dogleg) +{ + +// SETDEBUG("ISAM2 update", true); +// SETDEBUG("ISAM2 update verbose", true); +// SETDEBUG("ISAM2 recalculate", true); // Pose and landmark key types from planarSLAM typedef planarSLAM::PoseKey PoseKey; @@ -304,7 +343,135 @@ TEST_UNSAFE(ISAM2, clone) { SharedDiagonal brNoise = sharedSigmas(Vector_(2, M_PI/100.0, 0.1)); // These variables will be reused and accumulate factors and values - GaussianISAM2 isam(ISAM2Params(0.001, 0.0, 0, false, true)); + GaussianISAM2 isam(ISAM2Params(ISAM2DoglegParams(1.0), 0.0, 0, false)); + planarSLAM::Values fullinit; + planarSLAM::Graph fullgraph; + + // i keeps track of the time step + size_t i = 0; + + // Add a prior at time 0 and update isam + { + planarSLAM::Graph newfactors; + newfactors.addPrior(0, Pose2(0.0, 0.0, 0.0), odoNoise); + fullgraph.push_back(newfactors); + + planarSLAM::Values init; + init.insert(PoseKey(0), Pose2(0.01, 0.01, 0.01)); + fullinit.insert(PoseKey(0), Pose2(0.01, 0.01, 0.01)); + + isam.update(newfactors, init); + } + + CHECK(isam_check(fullgraph, fullinit, isam)); + + // Add odometry from time 0 to time 5 + for( ; i<5; ++i) { + planarSLAM::Graph newfactors; + newfactors.addOdometry(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise); + fullgraph.push_back(newfactors); + + planarSLAM::Values init; + init.insert(PoseKey(i+1), Pose2(double(i+1)+0.1, -0.1, 0.01)); + fullinit.insert(PoseKey(i+1), Pose2(double(i+1)+0.1, -0.1, 0.01)); + + isam.update(newfactors, init); + } + + // Add odometry from time 5 to 6 and landmark measurement at time 5 + { + planarSLAM::Graph newfactors; + newfactors.addOdometry(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise); + newfactors.addBearingRange(i, 0, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise); + newfactors.addBearingRange(i, 1, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise); + fullgraph.push_back(newfactors); + + planarSLAM::Values init; + init.insert(PoseKey(i+1), Pose2(1.01, 0.01, 0.01)); + init.insert(PointKey(0), Point2(5.0/sqrt(2.0), 5.0/sqrt(2.0))); + init.insert(PointKey(1), Point2(5.0/sqrt(2.0), -5.0/sqrt(2.0))); + fullinit.insert(PoseKey(i+1), Pose2(1.01, 0.01, 0.01)); + fullinit.insert(PointKey(0), Point2(5.0/sqrt(2.0), 5.0/sqrt(2.0))); + fullinit.insert(PointKey(1), Point2(5.0/sqrt(2.0), -5.0/sqrt(2.0))); + + isam.update(newfactors, init); + ++ i; + } + + // Add odometry from time 6 to time 10 + for( ; i<10; ++i) { + planarSLAM::Graph newfactors; + newfactors.addOdometry(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise); + fullgraph.push_back(newfactors); + + planarSLAM::Values init; + init.insert(PoseKey(i+1), Pose2(double(i+1)+0.1, -0.1, 0.01)); + fullinit.insert(PoseKey(i+1), Pose2(double(i+1)+0.1, -0.1, 0.01)); + + isam.update(newfactors, init); + } + + // Add odometry from time 10 to 11 and landmark measurement at time 10 + { + planarSLAM::Graph newfactors; + newfactors.addOdometry(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise); + newfactors.addBearingRange(i, 0, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise); + newfactors.addBearingRange(i, 1, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise); + fullgraph.push_back(newfactors); + + planarSLAM::Values init; + init.insert(PoseKey(i+1), Pose2(6.9, 0.1, 0.01)); + fullinit.insert(PoseKey(i+1), Pose2(6.9, 0.1, 0.01)); + + isam.update(newfactors, init); + ++ i; + } + + // Compare solutions + CHECK(isam_check(fullgraph, fullinit, isam)); + + // Check gradient at each node + typedef GaussianISAM2::sharedClique sharedClique; + BOOST_FOREACH(const sharedClique& clique, isam.nodes()) { + // Compute expected gradient + FactorGraph jfg; + jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional()))); + VectorValues expectedGradient(*allocateVectorValues(isam)); + gradientAtZero(jfg, expectedGradient); + // Compare with actual gradients + int variablePosition = 0; + for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) { + const int dim = clique->conditional()->dim(jit); + Vector actual = clique->gradientContribution().segment(variablePosition, dim); + EXPECT(assert_equal(expectedGradient[*jit], actual)); + variablePosition += dim; + } + LONGS_EQUAL(clique->gradientContribution().rows(), variablePosition); + } + + // Check gradient + VectorValues expectedGradient(*allocateVectorValues(isam)); + gradientAtZero(FactorGraph(isam), expectedGradient); + VectorValues expectedGradient2(gradient(FactorGraph(isam), VectorValues::Zero(expectedGradient))); + VectorValues actualGradient(*allocateVectorValues(isam)); + gradientAtZero(isam, actualGradient); + EXPECT(assert_equal(expectedGradient2, expectedGradient)); + EXPECT(assert_equal(expectedGradient, actualGradient)); +} + +/* ************************************************************************* */ +TEST(ISAM2, clone) { + + // Pose and landmark key types from planarSLAM + typedef planarSLAM::PoseKey PoseKey; + typedef planarSLAM::PointKey PointKey; + + // Set up parameters + SharedDiagonal odoNoise = sharedSigmas(Vector_(3, 0.1, 0.1, M_PI/100.0)); + SharedDiagonal brNoise = sharedSigmas(Vector_(2, M_PI/100.0, 0.1)); + + // These variables will be reused and accumulate factors and values + GaussianISAM2 isam(ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false, true)); planarSLAM::Values fullinit; planarSLAM::Graph fullgraph; @@ -396,6 +563,67 @@ TEST_UNSAFE(ISAM2, clone) { CHECK(assert_equal(isam, *isam2)); } +/* ************************************************************************* */ +TEST(ISAM2, permute_cached) { + typedef ISAM2Clique Clique; + typedef boost::shared_ptr > sharedClique; + + // Construct expected permuted BayesTree (variable 2 has been changed to 1) + BayesTree expected; + expected.insert(sharedClique(new Clique(make_pair( + boost::make_shared(pair_list_of + (3, Matrix_(1,1,1.0)) + (4, Matrix_(1,1,2.0)), + 2, Vector_(1,1.0), Vector_(1,1.0)), // p(3,4) + HessianFactor::shared_ptr())))); // Cached: empty + expected.insert(sharedClique(new Clique(make_pair( + boost::make_shared(pair_list_of + (2, Matrix_(1,1,1.0)) + (3, Matrix_(1,1,2.0)), + 1, Vector_(1,1.0), Vector_(1,1.0)), // p(2|3) + boost::make_shared(3, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(3) + expected.insert(sharedClique(new Clique(make_pair( + boost::make_shared(pair_list_of + (0, Matrix_(1,1,1.0)) + (2, Matrix_(1,1,2.0)), + 1, Vector_(1,1.0), Vector_(1,1.0)), // p(0|2) + boost::make_shared(1, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(1) + // Change variable 2 to 1 + expected.root()->children().front()->conditional()->keys()[0] = 1; + expected.root()->children().front()->children().front()->conditional()->keys()[1] = 1; + + // Construct unpermuted BayesTree + BayesTree actual; + actual.insert(sharedClique(new Clique(make_pair( + boost::make_shared(pair_list_of + (3, Matrix_(1,1,1.0)) + (4, Matrix_(1,1,2.0)), + 2, Vector_(1,1.0), Vector_(1,1.0)), // p(3,4) + HessianFactor::shared_ptr())))); // Cached: empty + actual.insert(sharedClique(new Clique(make_pair( + boost::make_shared(pair_list_of + (2, Matrix_(1,1,1.0)) + (3, Matrix_(1,1,2.0)), + 1, Vector_(1,1.0), Vector_(1,1.0)), // p(2|3) + boost::make_shared(3, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(3) + actual.insert(sharedClique(new Clique(make_pair( + boost::make_shared(pair_list_of + (0, Matrix_(1,1,1.0)) + (2, Matrix_(1,1,2.0)), + 1, Vector_(1,1.0), Vector_(1,1.0)), // p(0|2) + boost::make_shared(2, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(2) + + // Create permutation that changes variable 2 -> 0 + Permutation permutation = Permutation::Identity(5); + permutation[2] = 1; + + // Permute BayesTree + actual.root()->permuteWithInverse(permutation); + + // Check + EXPECT(assert_equal(expected, actual)); +} + /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr);} /* ************************************************************************* */