Merge remote-tracking branch 'origin/develop' into feature/SmartFactors3

release/4.3a0
dellaert 2015-06-23 22:41:33 -07:00
commit d415cffd4b
31 changed files with 1254 additions and 954 deletions

16
gtsam.h
View File

@ -1654,12 +1654,12 @@ char symbolChr(size_t key);
size_t symbolIndex(size_t key);
// Default keyformatter
void printKeyList (const gtsam::KeyList& keys);
void printKeyList (const gtsam::KeyList& keys, string s);
void printKeyVector(const gtsam::KeyVector& keys);
void printKeyVector(const gtsam::KeyVector& keys, string s);
void printKeySet (const gtsam::KeySet& keys);
void printKeySet (const gtsam::KeySet& keys, string s);
void PrintKeyList (const gtsam::KeyList& keys);
void PrintKeyList (const gtsam::KeyList& keys, string s);
void PrintKeyVector(const gtsam::KeyVector& keys);
void PrintKeyVector(const gtsam::KeyVector& keys, string s);
void PrintKeySet (const gtsam::KeySet& keys);
void PrintKeySet (const gtsam::KeySet& keys, string s);
#include <gtsam/inference/LabeledSymbol.h>
class LabeledSymbol {
@ -1776,7 +1776,7 @@ class Values {
void swap(gtsam::Values& values);
bool exists(size_t j) const;
gtsam::KeyList keys() const;
gtsam::KeyVector keys() const;
gtsam::VectorValues zeroVectors() const;
@ -1893,8 +1893,6 @@ class KeySet {
class KeyVector {
KeyVector();
KeyVector(const gtsam::KeyVector& other);
KeyVector(const gtsam::KeySet& other);
KeyVector(const gtsam::KeyList& other);
// Note: no print function

View File

@ -1,19 +1,19 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
* -------------------------------------------------------------------------- */
/**
* @file treeTraversal-inst.h
* @author Richard Roberts
* @date April 9, 2013
*/
* @file treeTraversal-inst.h
* @author Richard Roberts
* @date April 9, 2013
*/
#pragma once
#include <gtsam/base/treeTraversal/parallelTraversalTasks.h>
@ -34,192 +34,197 @@
namespace gtsam {
/** Internal functions used for traversing trees */
namespace treeTraversal {
/** Internal functions used for traversing trees */
namespace treeTraversal {
/* ************************************************************************* */
namespace {
// Internal node used in DFS preorder stack
template<typename NODE, typename DATA>
struct TraversalNode {
bool expanded;
const boost::shared_ptr<NODE>& treeNode;
DATA& parentData;
typename FastList<DATA>::iterator dataPointer;
TraversalNode(const boost::shared_ptr<NODE>& _treeNode, DATA& _parentData) :
expanded(false), treeNode(_treeNode), parentData(_parentData) {}
};
// Do nothing - default argument for post-visitor for tree traversal
struct no_op {
template<typename NODE, typename DATA>
void operator()(const boost::shared_ptr<NODE>& node, const DATA& data) {}
};
}
/** Traverse a forest depth-first with pre-order and post-order visits.
* @param forest The forest of trees to traverse. The method \c forest.roots() should exist
* and return a collection of (shared) pointers to \c FOREST::Node.
* @param visitorPre \c visitorPre(node, parentData) will be called at every node, before
* visiting its children, and will be passed, by reference, the \c DATA object returned
* by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object
* to pass to the children. The returned \c DATA object will be copy-constructed only
* upon returning to store internally, thus may be modified by visiting the children.
* Regarding efficiency, this copy-on-return is usually optimized out by the compiler.
* @param visitorPost \c visitorPost(node, data) will be called at every node, after visiting
* its children, and will be passed, by reference, the \c DATA object returned by the
* call to \c visitorPre (the \c DATA object may be modified by visiting the children).
* @param rootData The data to pass by reference to \c visitorPre when it is called on each
* root node. */
template<class FOREST, typename DATA, typename VISITOR_PRE, typename VISITOR_POST>
void DepthFirstForest(FOREST& forest, DATA& rootData, VISITOR_PRE& visitorPre, VISITOR_POST& visitorPost)
{
// Typedefs
typedef typename FOREST::Node Node;
typedef boost::shared_ptr<Node> sharedNode;
// Depth first traversal stack
typedef TraversalNode<typename FOREST::Node, DATA> TraversalNode;
typedef FastList<TraversalNode> Stack;
Stack stack;
FastList<DATA> dataList; // List to store node data as it is returned from the pre-order visitor
// Add roots to stack (insert such that they are visited and processed in order
{
typename Stack::iterator insertLocation = stack.begin();
BOOST_FOREACH(const sharedNode& root, forest.roots())
stack.insert(insertLocation, TraversalNode(root, rootData));
}
// Traverse
while(!stack.empty())
{
// Get next node
TraversalNode& node = stack.front();
if(node.expanded) {
// If already expanded, then the data stored in the node is no longer needed, so visit
// then delete it.
(void) visitorPost(node.treeNode, *node.dataPointer);
dataList.erase(node.dataPointer);
stack.pop_front();
} else {
// If not already visited, visit the node and add its children (use reverse iterators so
// children are processed in the order they appear)
node.dataPointer = dataList.insert(dataList.end(), visitorPre(node.treeNode, node.parentData));
typename Stack::iterator insertLocation = stack.begin();
BOOST_FOREACH(const sharedNode& child, node.treeNode->children)
stack.insert(insertLocation, TraversalNode(child, *node.dataPointer));
node.expanded = true;
}
}
assert(dataList.empty());
}
/** Traverse a forest depth-first, with a pre-order visit but no post-order visit.
* @param forest The forest of trees to traverse. The method \c forest.roots() should exist
* and return a collection of (shared) pointers to \c FOREST::Node.
* @param visitorPre \c visitorPre(node, parentData) will be called at every node, before
* visiting its children, and will be passed, by reference, the \c DATA object returned
* by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object
* to pass to the children. The returned \c DATA object will be copy-constructed only
* upon returning to store internally, thus may be modified by visiting the children.
* Regarding efficiency, this copy-on-return is usually optimized out by the compiler.
* @param rootData The data to pass by reference to \c visitorPre when it is called on each
* root node. */
template<class FOREST, typename DATA, typename VISITOR_PRE>
void DepthFirstForest(FOREST& forest, DATA& rootData, VISITOR_PRE& visitorPre)
{
no_op visitorPost;
DepthFirstForest(forest, rootData, visitorPre, visitorPost);
}
/** Traverse a forest depth-first with pre-order and post-order visits.
* @param forest The forest of trees to traverse. The method \c forest.roots() should exist
* and return a collection of (shared) pointers to \c FOREST::Node.
* @param visitorPre \c visitorPre(node, parentData) will be called at every node, before
* visiting its children, and will be passed, by reference, the \c DATA object returned
* by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object
* to pass to the children. The returned \c DATA object will be copy-constructed only
* upon returning to store internally, thus may be modified by visiting the children.
* Regarding efficiency, this copy-on-return is usually optimized out by the compiler.
* @param visitorPost \c visitorPost(node, data) will be called at every node, after visiting
* its children, and will be passed, by reference, the \c DATA object returned by the
* call to \c visitorPre (the \c DATA object may be modified by visiting the children).
* @param rootData The data to pass by reference to \c visitorPre when it is called on each
* root node. */
template<class FOREST, typename DATA, typename VISITOR_PRE, typename VISITOR_POST>
void DepthFirstForestParallel(FOREST& forest, DATA& rootData, VISITOR_PRE& visitorPre, VISITOR_POST& visitorPost,
int problemSizeThreshold = 10)
{
#ifdef GTSAM_USE_TBB
// Typedefs
typedef typename FOREST::Node Node;
typedef boost::shared_ptr<Node> sharedNode;
tbb::task::spawn_root_and_wait(internal::CreateRootTask<Node>(
forest.roots(), rootData, visitorPre, visitorPost, problemSizeThreshold));
#else
DepthFirstForest(forest, rootData, visitorPre, visitorPost);
#endif
}
/* ************************************************************************* */
/** Traversal function for CloneForest */
namespace {
template<typename NODE>
boost::shared_ptr<NODE>
CloneForestVisitorPre(const boost::shared_ptr<NODE>& node, const boost::shared_ptr<NODE>& parentPointer)
{
// Clone the current node and add it to its cloned parent
boost::shared_ptr<NODE> clone = boost::make_shared<NODE>(*node);
clone->children.clear();
parentPointer->children.push_back(clone);
return clone;
}
}
/** Clone a tree, copy-constructing new nodes (calling boost::make_shared) and setting up child
* pointers for a clone of the original tree.
* @param forest The forest of trees to clone. The method \c forest.roots() should exist and
* return a collection of shared pointers to \c FOREST::Node.
* @return The new collection of roots. */
template<class FOREST>
FastVector<boost::shared_ptr<typename FOREST::Node> > CloneForest(const FOREST& forest)
{
typedef typename FOREST::Node Node;
boost::shared_ptr<Node> rootContainer = boost::make_shared<Node>();
DepthFirstForest(forest, rootContainer, CloneForestVisitorPre<Node>);
return FastVector<boost::shared_ptr<Node> >(rootContainer->children.begin(), rootContainer->children.end());
}
/* ************************************************************************* */
/** Traversal function for PrintForest */
namespace {
struct PrintForestVisitorPre
{
const KeyFormatter& formatter;
PrintForestVisitorPre(const KeyFormatter& formatter) : formatter(formatter) {}
template<typename NODE> std::string operator()(const boost::shared_ptr<NODE>& node, const std::string& parentString)
{
// Print the current node
node->print(parentString + "-", formatter);
// Increment the indentation
return parentString + "| ";
}
};
}
/** Print a tree, prefixing each line with \c str, and formatting keys using \c keyFormatter.
* To print each node, this function calls the \c print function of the tree nodes. */
template<class FOREST>
void PrintForest(const FOREST& forest, std::string str, const KeyFormatter& keyFormatter) {
PrintForestVisitorPre visitor(keyFormatter);
DepthFirstForest(forest, str, visitor);
}
/* ************************************************************************* */
namespace {
// Internal node used in DFS preorder stack
template<typename NODE, typename DATA>
struct TraversalNode {
bool expanded;
const boost::shared_ptr<NODE>& treeNode;
DATA& parentData;
typename FastList<DATA>::iterator dataPointer;
TraversalNode(const boost::shared_ptr<NODE>& _treeNode, DATA& _parentData) :
expanded(false), treeNode(_treeNode), parentData(_parentData) {
}
};
// Do nothing - default argument for post-visitor for tree traversal
struct no_op {
template<typename NODE, typename DATA>
void operator()(const boost::shared_ptr<NODE>& node, const DATA& data) {
}
};
}
/** Traverse a forest depth-first with pre-order and post-order visits.
* @param forest The forest of trees to traverse. The method \c forest.roots() should exist
* and return a collection of (shared) pointers to \c FOREST::Node.
* @param visitorPre \c visitorPre(node, parentData) will be called at every node, before
* visiting its children, and will be passed, by reference, the \c DATA object returned
* by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object
* to pass to the children. The returned \c DATA object will be copy-constructed only
* upon returning to store internally, thus may be modified by visiting the children.
* Regarding efficiency, this copy-on-return is usually optimized out by the compiler.
* @param visitorPost \c visitorPost(node, data) will be called at every node, after visiting
* its children, and will be passed, by reference, the \c DATA object returned by the
* call to \c visitorPre (the \c DATA object may be modified by visiting the children).
* @param rootData The data to pass by reference to \c visitorPre when it is called on each
* root node. */
template<class FOREST, typename DATA, typename VISITOR_PRE,
typename VISITOR_POST>
void DepthFirstForest(FOREST& forest, DATA& rootData, VISITOR_PRE& visitorPre,
VISITOR_POST& visitorPost) {
// Typedefs
typedef typename FOREST::Node Node;
typedef boost::shared_ptr<Node> sharedNode;
// Depth first traversal stack
typedef TraversalNode<typename FOREST::Node, DATA> TraversalNode;
typedef FastList<TraversalNode> Stack;
Stack stack;
FastList<DATA> dataList; // List to store node data as it is returned from the pre-order visitor
// Add roots to stack (insert such that they are visited and processed in order
{
typename Stack::iterator insertLocation = stack.begin();
BOOST_FOREACH(const sharedNode& root, forest.roots())
stack.insert(insertLocation, TraversalNode(root, rootData));
}
// Traverse
while (!stack.empty()) {
// Get next node
TraversalNode& node = stack.front();
if (node.expanded) {
// If already expanded, then the data stored in the node is no longer needed, so visit
// then delete it.
(void) visitorPost(node.treeNode, *node.dataPointer);
dataList.erase(node.dataPointer);
stack.pop_front();
} else {
// If not already visited, visit the node and add its children (use reverse iterators so
// children are processed in the order they appear)
node.dataPointer = dataList.insert(dataList.end(),
visitorPre(node.treeNode, node.parentData));
typename Stack::iterator insertLocation = stack.begin();
BOOST_FOREACH(const sharedNode& child, node.treeNode->children)
stack.insert(insertLocation, TraversalNode(child, *node.dataPointer));
node.expanded = true;
}
}
assert(dataList.empty());
}
/** Traverse a forest depth-first, with a pre-order visit but no post-order visit.
* @param forest The forest of trees to traverse. The method \c forest.roots() should exist
* and return a collection of (shared) pointers to \c FOREST::Node.
* @param visitorPre \c visitorPre(node, parentData) will be called at every node, before
* visiting its children, and will be passed, by reference, the \c DATA object returned
* by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object
* to pass to the children. The returned \c DATA object will be copy-constructed only
* upon returning to store internally, thus may be modified by visiting the children.
* Regarding efficiency, this copy-on-return is usually optimized out by the compiler.
* @param rootData The data to pass by reference to \c visitorPre when it is called on each
* root node. */
template<class FOREST, typename DATA, typename VISITOR_PRE>
void DepthFirstForest(FOREST& forest, DATA& rootData, VISITOR_PRE& visitorPre) {
no_op visitorPost;
DepthFirstForest(forest, rootData, visitorPre, visitorPost);
}
/** Traverse a forest depth-first with pre-order and post-order visits.
* @param forest The forest of trees to traverse. The method \c forest.roots() should exist
* and return a collection of (shared) pointers to \c FOREST::Node.
* @param visitorPre \c visitorPre(node, parentData) will be called at every node, before
* visiting its children, and will be passed, by reference, the \c DATA object returned
* by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object
* to pass to the children. The returned \c DATA object will be copy-constructed only
* upon returning to store internally, thus may be modified by visiting the children.
* Regarding efficiency, this copy-on-return is usually optimized out by the compiler.
* @param visitorPost \c visitorPost(node, data) will be called at every node, after visiting
* its children, and will be passed, by reference, the \c DATA object returned by the
* call to \c visitorPre (the \c DATA object may be modified by visiting the children).
* @param rootData The data to pass by reference to \c visitorPre when it is called on each
* root node. */
template<class FOREST, typename DATA, typename VISITOR_PRE,
typename VISITOR_POST>
void DepthFirstForestParallel(FOREST& forest, DATA& rootData,
VISITOR_PRE& visitorPre, VISITOR_POST& visitorPost,
int problemSizeThreshold = 10) {
#ifdef GTSAM_USE_TBB
// Typedefs
typedef typename FOREST::Node Node;
typedef boost::shared_ptr<Node> sharedNode;
tbb::task::spawn_root_and_wait(
internal::CreateRootTask<Node>(forest.roots(), rootData, visitorPre,
visitorPost, problemSizeThreshold));
#else
DepthFirstForest(forest, rootData, visitorPre, visitorPost);
#endif
}
/* ************************************************************************* */
/** Traversal function for CloneForest */
namespace {
template<typename NODE>
boost::shared_ptr<NODE> CloneForestVisitorPre(
const boost::shared_ptr<NODE>& node,
const boost::shared_ptr<NODE>& parentPointer) {
// Clone the current node and add it to its cloned parent
boost::shared_ptr<NODE> clone = boost::make_shared<NODE>(*node);
clone->children.clear();
parentPointer->children.push_back(clone);
return clone;
}
}
/** Clone a tree, copy-constructing new nodes (calling boost::make_shared) and setting up child
* pointers for a clone of the original tree.
* @param forest The forest of trees to clone. The method \c forest.roots() should exist and
* return a collection of shared pointers to \c FOREST::Node.
* @return The new collection of roots. */
template<class FOREST>
FastVector<boost::shared_ptr<typename FOREST::Node> > CloneForest(
const FOREST& forest) {
typedef typename FOREST::Node Node;
boost::shared_ptr<Node> rootContainer = boost::make_shared<Node>();
DepthFirstForest(forest, rootContainer, CloneForestVisitorPre<Node>);
return FastVector<boost::shared_ptr<Node> >(rootContainer->children.begin(),
rootContainer->children.end());
}
/* ************************************************************************* */
/** Traversal function for PrintForest */
namespace {
struct PrintForestVisitorPre {
const KeyFormatter& formatter;
PrintForestVisitorPre(const KeyFormatter& formatter) :
formatter(formatter) {
}
template<typename NODE> std::string operator()(
const boost::shared_ptr<NODE>& node, const std::string& parentString) {
// Print the current node
node->print(parentString + "-", formatter);
// Increment the indentation
return parentString + "| ";
}
};
}
/** Print a tree, prefixing each line with \c str, and formatting keys using \c keyFormatter.
* To print each node, this function calls the \c print function of the tree nodes. */
template<class FOREST>
void PrintForest(const FOREST& forest, std::string str,
const KeyFormatter& keyFormatter) {
PrintForestVisitorPre visitor(keyFormatter);
DepthFirstForest(forest, str, visitor);
}
}
}

View File

@ -83,10 +83,21 @@ Point2 Cal3_S2::uncalibrate(const Point2& p, OptionalJacobian<2, 5> Dcal,
}
/* ************************************************************************* */
Point2 Cal3_S2::calibrate(const Point2& p) const {
const double u = p.x(), v = p.y();
return Point2((1 / fx_) * (u - u0_ - (s_ / fy_) * (v - v0_)),
(1 / fy_) * (v - v0_));
Point2 Cal3_S2::calibrate(const Point2& p, OptionalJacobian<2,5> Dcal,
OptionalJacobian<2,2> Dp) const {
const double u = p.x(), v = p.y();
double delta_u = u - u0_, delta_v = v - v0_;
double inv_fx = 1/ fx_, inv_fy = 1/fy_;
double inv_fy_delta_v = inv_fy * delta_v, inv_fx_s_inv_fy = inv_fx * s_ * inv_fy;
Point2 point(inv_fx * (delta_u - s_ * inv_fy_delta_v),
inv_fy_delta_v);
if(Dcal)
*Dcal << - inv_fx * point.x(), inv_fx * s_ * inv_fy * inv_fy_delta_v, -inv_fx * point.y(),
-inv_fx, inv_fx_s_inv_fy,
0, -inv_fy * point.y(), 0, 0, -inv_fy;
if(Dp)
*Dp << inv_fx, -inv_fx_s_inv_fy, 0, inv_fy;
return point;
}
/* ************************************************************************* */

View File

@ -156,9 +156,12 @@ public:
/**
* convert image coordinates uv to intrinsic coordinates xy
* @param p point in image coordinates
* @param Dcal optional 2*5 Jacobian wrpt Cal3_S2 parameters
* @param Dp optional 2*2 Jacobian wrpt intrinsic coordinates
* @return point in intrinsic coordinates
*/
Point2 calibrate(const Point2& p) const;
Point2 calibrate(const Point2& p, OptionalJacobian<2,5> Dcal = boost::none,
OptionalJacobian<2,2> Dp = boost::none) const;
/**
* convert homogeneous image coordinates to intrinsic coordinates

View File

@ -26,6 +26,8 @@ GTSAM_CONCEPT_MANIFOLD_INST(Cal3_S2)
static Cal3_S2 K(500, 500, 0.1, 640 / 2, 480 / 2);
static Point2 p(1, -2);
static Point2 p_uv(1320.3, 1740);
static Point2 p_xy(2, 3);
/* ************************************************************************* */
TEST( Cal3_S2, easy_constructor)
@ -73,6 +75,27 @@ TEST( Cal3_S2, Duncalibrate2)
CHECK(assert_equal(numerical,computed,1e-9));
}
Point2 calibrate_(const Cal3_S2& k, const Point2& pt) {return k.calibrate(pt); }
/* ************************************************************************* */
TEST(Cal3_S2, Dcalibrate1)
{
Matrix computed;
Point2 expected = K.calibrate(p_uv, computed, boost::none);
Matrix numerical = numericalDerivative21(calibrate_, K, p_uv);
CHECK(assert_equal(expected, p_xy, 1e-8));
CHECK(assert_equal(numerical, computed, 1e-8));
}
/* ************************************************************************* */
TEST(Cal3_S2, Dcalibrate2)
{
Matrix computed;
Point2 expected = K.calibrate(p_uv, boost::none, computed);
Matrix numerical = numericalDerivative22(calibrate_, K, p_uv);
CHECK(assert_equal(expected, p_xy, 1e-8));
CHECK(assert_equal(numerical, computed, 1e-8));
}
/* ************************************************************************* */
TEST( Cal3_S2, assert_equal)
{

View File

@ -7,180 +7,233 @@
* @brief Collects factorgraph fragments defined on variable clusters, arranged in a tree
*/
#include <gtsam/base/timing.h>
#include <gtsam/base/treeTraversal-inst.h>
#include <gtsam/inference/ClusterTree.h>
#include <gtsam/inference/BayesTree.h>
#include <gtsam/inference/Ordering.h>
#include <gtsam/base/timing.h>
#include <gtsam/base/treeTraversal-inst.h>
#include <boost/foreach.hpp>
#include <boost/bind.hpp>
namespace gtsam
{
namespace
{
/* ************************************************************************* */
// Elimination traversal data - stores a pointer to the parent data and collects the factors
// resulting from elimination of the children. Also sets up BayesTree cliques with parent and
// child pointers.
template<class CLUSTERTREE>
struct EliminationData {
EliminationData* const parentData;
size_t myIndexInParent;
FastVector<typename CLUSTERTREE::sharedFactor> childFactors;
boost::shared_ptr<typename CLUSTERTREE::BayesTreeType::Node> bayesTreeNode;
EliminationData(EliminationData* _parentData, size_t nChildren) :
parentData(_parentData),
bayesTreeNode(boost::make_shared<typename CLUSTERTREE::BayesTreeType::Node>())
{
if(parentData) {
myIndexInParent = parentData->childFactors.size();
parentData->childFactors.push_back(typename CLUSTERTREE::sharedFactor());
} else {
myIndexInParent = 0;
}
// Set up BayesTree parent and child pointers
if(parentData) {
if(parentData->parentData) // If our parent is not the dummy node
bayesTreeNode->parent_ = parentData->bayesTreeNode;
parentData->bayesTreeNode->children.push_back(bayesTreeNode);
}
}
};
namespace gtsam {
/* ************************************************************************* */
// Elimination pre-order visitor - just creates the EliminationData structure for the visited
// node.
template<class CLUSTERTREE>
EliminationData<CLUSTERTREE> eliminationPreOrderVisitor(
const typename CLUSTERTREE::sharedNode& node, EliminationData<CLUSTERTREE>& parentData)
{
EliminationData<CLUSTERTREE> myData(&parentData, node->children.size());
myData.bayesTreeNode->problemSize_ = node->problemSize();
return myData;
/* ************************************************************************* */
// Elimination traversal data - stores a pointer to the parent data and collects
// the factors resulting from elimination of the children. Also sets up BayesTree
// cliques with parent and child pointers.
template<class CLUSTERTREE>
struct EliminationData {
// Typedefs
typedef typename CLUSTERTREE::sharedFactor sharedFactor;
typedef typename CLUSTERTREE::FactorType FactorType;
typedef typename CLUSTERTREE::FactorGraphType FactorGraphType;
typedef typename CLUSTERTREE::ConditionalType ConditionalType;
typedef typename CLUSTERTREE::BayesTreeType::Node BTNode;
EliminationData* const parentData;
size_t myIndexInParent;
FastVector<sharedFactor> childFactors;
boost::shared_ptr<BTNode> bayesTreeNode;
EliminationData(EliminationData* _parentData, size_t nChildren) :
parentData(_parentData), bayesTreeNode(boost::make_shared<BTNode>()) {
if (parentData) {
myIndexInParent = parentData->childFactors.size();
parentData->childFactors.push_back(sharedFactor());
} else {
myIndexInParent = 0;
}
// Set up BayesTree parent and child pointers
if (parentData) {
if (parentData->parentData) // If our parent is not the dummy node
bayesTreeNode->parent_ = parentData->bayesTreeNode;
parentData->bayesTreeNode->children.push_back(bayesTreeNode);
}
}
/* ************************************************************************* */
// Elimination post-order visitor - combine the child factors with our own factors, add the
// resulting conditional to the BayesTree, and add the remaining factor to the parent.
template<class CLUSTERTREE>
struct EliminationPostOrderVisitor
{
const typename CLUSTERTREE::Eliminate& eliminationFunction;
typename CLUSTERTREE::BayesTreeType::Nodes& nodesIndex;
EliminationPostOrderVisitor(const typename CLUSTERTREE::Eliminate& eliminationFunction,
// Elimination pre-order visitor - creates the EliminationData structure for the visited node.
static EliminationData EliminationPreOrderVisitor(
const typename CLUSTERTREE::sharedNode& node,
EliminationData& parentData) {
assert(node);
EliminationData myData(&parentData, node->children.size());
myData.bayesTreeNode->problemSize_ = node->problemSize();
return myData;
}
// Elimination post-order visitor - combine the child factors with our own factors, add the
// resulting conditional to the BayesTree, and add the remaining factor to the parent.
class EliminationPostOrderVisitor {
const typename CLUSTERTREE::Eliminate& eliminationFunction_;
typename CLUSTERTREE::BayesTreeType::Nodes& nodesIndex_;
public:
// Construct functor
EliminationPostOrderVisitor(
const typename CLUSTERTREE::Eliminate& eliminationFunction,
typename CLUSTERTREE::BayesTreeType::Nodes& nodesIndex) :
eliminationFunction(eliminationFunction), nodesIndex(nodesIndex) {}
void operator()(const typename CLUSTERTREE::sharedNode& node, EliminationData<CLUSTERTREE>& myData)
{
// Typedefs
typedef typename CLUSTERTREE::sharedFactor sharedFactor;
typedef typename CLUSTERTREE::FactorType FactorType;
typedef typename CLUSTERTREE::FactorGraphType FactorGraphType;
typedef typename CLUSTERTREE::ConditionalType ConditionalType;
typedef typename CLUSTERTREE::BayesTreeType::Node BTNode;
// Gather factors
FactorGraphType gatheredFactors;
gatheredFactors.reserve(node->factors.size() + node->children.size());
gatheredFactors += node->factors;
gatheredFactors += myData.childFactors;
// Check for Bayes tree orphan subtrees, and add them to our children
BOOST_FOREACH(const sharedFactor& f, node->factors)
{
if(const BayesTreeOrphanWrapper<BTNode>* asSubtree = dynamic_cast<const BayesTreeOrphanWrapper<BTNode>*>(f.get()))
{
myData.bayesTreeNode->children.push_back(asSubtree->clique);
asSubtree->clique->parent_ = myData.bayesTreeNode;
}
}
// Do dense elimination step
std::pair<boost::shared_ptr<ConditionalType>, boost::shared_ptr<FactorType> > eliminationResult =
eliminationFunction(gatheredFactors, node->orderedFrontalKeys);
// Store conditional in BayesTree clique, and in the case of ISAM2Clique also store the remaining factor
myData.bayesTreeNode->setEliminationResult(eliminationResult);
// Fill nodes index - we do this here instead of calling insertRoot at the end to avoid
// putting orphan subtrees in the index - they'll already be in the index of the ISAM2
// object they're added to.
BOOST_FOREACH(const Key& j, myData.bayesTreeNode->conditional()->frontals())
nodesIndex.insert(std::make_pair(j, myData.bayesTreeNode));
// Store remaining factor in parent's gathered factors
if(!eliminationResult.second->empty())
myData.parentData->childFactors[myData.myIndexInParent] = eliminationResult.second;
}
};
}
/* ************************************************************************* */
template<class BAYESTREE, class GRAPH>
void ClusterTree<BAYESTREE,GRAPH>::Cluster::print(
const std::string& s, const KeyFormatter& keyFormatter) const
{
std::cout << s;
BOOST_FOREACH(Key j, orderedFrontalKeys)
std::cout << j << " ";
std::cout << "problemSize = " << problemSize_ << std::endl;
}
/* ************************************************************************* */
template<class BAYESTREE, class GRAPH>
void ClusterTree<BAYESTREE,GRAPH>::print(
const std::string& s, const KeyFormatter& keyFormatter) const
{
treeTraversal::PrintForest(*this, s, keyFormatter);
}
/* ************************************************************************* */
template<class BAYESTREE, class GRAPH>
ClusterTree<BAYESTREE,GRAPH>& ClusterTree<BAYESTREE,GRAPH>::operator=(const This& other)
{
// Start by duplicating the tree.
roots_ = treeTraversal::CloneForest(other);
// Assign the remaining factors - these are pointers to factors in the original factor graph and
// we do not clone them.
remainingFactors_ = other.remainingFactors_;
return *this;
}
/* ************************************************************************* */
template<class BAYESTREE, class GRAPH>
std::pair<boost::shared_ptr<BAYESTREE>, boost::shared_ptr<GRAPH> >
ClusterTree<BAYESTREE,GRAPH>::eliminate(const Eliminate& function) const
{
gttic(ClusterTree_eliminate);
// Do elimination (depth-first traversal). The rootsContainer stores a 'dummy' BayesTree node
// that contains all of the roots as its children. rootsContainer also stores the remaining
// uneliminated factors passed up from the roots.
boost::shared_ptr<BayesTreeType> result = boost::make_shared<BayesTreeType>();
EliminationData<This> rootsContainer(0, roots_.size());
EliminationPostOrderVisitor<This> visitorPost(function, result->nodes_);
{
TbbOpenMPMixedScope threadLimiter; // Limits OpenMP threads since we're mixing TBB and OpenMP
treeTraversal::DepthFirstForestParallel(*this, rootsContainer,
eliminationPreOrderVisitor<This>, visitorPost, 10);
eliminationFunction_(eliminationFunction), nodesIndex_(nodesIndex) {
}
// Create BayesTree from roots stored in the dummy BayesTree node.
result->roots_.insert(result->roots_.end(), rootsContainer.bayesTreeNode->children.begin(), rootsContainer.bayesTreeNode->children.end());
// Function that does the HEAVY lifting
void operator()(const typename CLUSTERTREE::sharedNode& node,
EliminationData& myData) {
assert(node);
// Add remaining factors that were not involved with eliminated variables
boost::shared_ptr<FactorGraphType> allRemainingFactors = boost::make_shared<FactorGraphType>();
allRemainingFactors->reserve(remainingFactors_.size() + rootsContainer.childFactors.size());
allRemainingFactors->push_back(remainingFactors_.begin(), remainingFactors_.end());
BOOST_FOREACH(const sharedFactor& factor, rootsContainer.childFactors)
if(factor)
allRemainingFactors->push_back(factor);
// Gather factors
FactorGraphType gatheredFactors;
gatheredFactors.reserve(node->factors.size() + node->children.size());
gatheredFactors += node->factors;
gatheredFactors += myData.childFactors;
// Return result
return std::make_pair(result, allRemainingFactors);
// Check for Bayes tree orphan subtrees, and add them to our children
BOOST_FOREACH(const sharedFactor& f, node->factors) {
if (const BayesTreeOrphanWrapper<BTNode>* asSubtree =
dynamic_cast<const BayesTreeOrphanWrapper<BTNode>*>(f.get())) {
myData.bayesTreeNode->children.push_back(asSubtree->clique);
asSubtree->clique->parent_ = myData.bayesTreeNode;
}
}
// >>>>>>>>>>>>>> Do dense elimination step >>>>>>>>>>>>>>>>>>>>>>>>>>>>>
std::pair<boost::shared_ptr<ConditionalType>,
boost::shared_ptr<FactorType> > eliminationResult =
eliminationFunction_(gatheredFactors, node->orderedFrontalKeys);
// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
// Store conditional in BayesTree clique, and in the case of ISAM2Clique also store the remaining factor
myData.bayesTreeNode->setEliminationResult(eliminationResult);
// Fill nodes index - we do this here instead of calling insertRoot at the end to avoid
// putting orphan subtrees in the index - they'll already be in the index of the ISAM2
// object they're added to.
BOOST_FOREACH(const Key& j, myData.bayesTreeNode->conditional()->frontals())
nodesIndex_.insert(std::make_pair(j, myData.bayesTreeNode));
// Store remaining factor in parent's gathered factors
if (!eliminationResult.second->empty())
myData.parentData->childFactors[myData.myIndexInParent] =
eliminationResult.second;
}
};
};
/* ************************************************************************* */
template<class BAYESTREE, class GRAPH>
void ClusterTree<BAYESTREE, GRAPH>::Cluster::print(const std::string& s,
const KeyFormatter& keyFormatter) const {
std::cout << s << " (" << problemSize_ << ")";
PrintKeyVector(orderedFrontalKeys);
}
/* ************************************************************************* */
template<class BAYESTREE, class GRAPH>
void ClusterTree<BAYESTREE, GRAPH>::Cluster::mergeChildren(
const std::vector<bool>& merge) {
gttic(Cluster_mergeChildren);
// Count how many keys, factors and children we'll end up with
size_t nrKeys = orderedFrontalKeys.size();
size_t nrFactors = factors.size();
size_t nrNewChildren = 0;
// Loop over children
size_t i = 0;
BOOST_FOREACH(const sharedNode& child, children) {
if (merge[i]) {
nrKeys += child->orderedFrontalKeys.size();
nrFactors += child->factors.size();
nrNewChildren += child->children.size();
} else {
nrNewChildren += 1; // we keep the child
}
++i;
}
// now reserve space, and really merge
orderedFrontalKeys.reserve(nrKeys);
factors.reserve(nrFactors);
typename Node::Children newChildren;
newChildren.reserve(nrNewChildren);
i = 0;
BOOST_FOREACH(const sharedNode& child, children) {
if (merge[i]) {
// Merge keys. For efficiency, we add keys in reverse order at end, calling reverse after..
orderedFrontalKeys.insert(orderedFrontalKeys.end(),
child->orderedFrontalKeys.rbegin(), child->orderedFrontalKeys.rend());
// Merge keys, factors, and children.
factors.insert(factors.end(), child->factors.begin(),
child->factors.end());
newChildren.insert(newChildren.end(), child->children.begin(),
child->children.end());
// Increment problem size
problemSize_ = std::max(problemSize_, child->problemSize_);
// Increment number of frontal variables
} else {
newChildren.push_back(child); // we keep the child
}
++i;
}
children = newChildren;
std::reverse(orderedFrontalKeys.begin(), orderedFrontalKeys.end());
}
/* ************************************************************************* */
template<class BAYESTREE, class GRAPH>
void ClusterTree<BAYESTREE, GRAPH>::print(const std::string& s,
const KeyFormatter& keyFormatter) const {
treeTraversal::PrintForest(*this, s, keyFormatter);
}
/* ************************************************************************* */
template<class BAYESTREE, class GRAPH>
ClusterTree<BAYESTREE, GRAPH>& ClusterTree<BAYESTREE, GRAPH>::operator=(
const This& other) {
// Start by duplicating the tree.
roots_ = treeTraversal::CloneForest(other);
// Assign the remaining factors - these are pointers to factors in the original factor graph and
// we do not clone them.
remainingFactors_ = other.remainingFactors_;
return *this;
}
/* ************************************************************************* */
template<class BAYESTREE, class GRAPH>
std::pair<boost::shared_ptr<BAYESTREE>, boost::shared_ptr<GRAPH> > ClusterTree<
BAYESTREE, GRAPH>::eliminate(const Eliminate& function) const {
gttic(ClusterTree_eliminate);
// Do elimination (depth-first traversal). The rootsContainer stores a 'dummy' BayesTree node
// that contains all of the roots as its children. rootsContainer also stores the remaining
// uneliminated factors passed up from the roots.
boost::shared_ptr<BayesTreeType> result = boost::make_shared<BayesTreeType>();
typedef EliminationData<This> Data;
Data rootsContainer(0, roots_.size());
typename Data::EliminationPostOrderVisitor visitorPost(function,
result->nodes_);
{
TbbOpenMPMixedScope threadLimiter; // Limits OpenMP threads since we're mixing TBB and OpenMP
treeTraversal::DepthFirstForestParallel(*this, rootsContainer,
Data::EliminationPreOrderVisitor, visitorPost, 10);
}
// Create BayesTree from roots stored in the dummy BayesTree node.
result->roots_.insert(result->roots_.end(),
rootsContainer.bayesTreeNode->children.begin(),
rootsContainer.bayesTreeNode->children.end());
// Add remaining factors that were not involved with eliminated variables
boost::shared_ptr<FactorGraphType> remaining = boost::make_shared<
FactorGraphType>();
remaining->reserve(
remainingFactors_.size() + rootsContainer.childFactors.size());
remaining->push_back(remainingFactors_.begin(), remainingFactors_.end());
BOOST_FOREACH(const sharedFactor& factor, rootsContainer.childFactors) {
if (factor)
remaining->push_back(factor);
}
// Return result
return std::make_pair(result, remaining);
}
}

View File

@ -13,113 +13,122 @@
#include <gtsam/base/FastVector.h>
#include <gtsam/inference/Ordering.h>
namespace gtsam
{
namespace gtsam {
/**
* A cluster-tree is associated with a factor graph and is defined as in Koller-Friedman:
* each node k represents a subset \f$ C_k \sub X \f$, and the tree is family preserving, in that
* each factor \f$ f_i \f$ is associated with a single cluster and \f$ scope(f_i) \sub C_k \f$.
* \nosubgrouping
*/
template<class BAYESTREE, class GRAPH>
class ClusterTree
{
public:
typedef GRAPH FactorGraphType; ///< The factor graph type
typedef typename GRAPH::FactorType FactorType; ///< The type of factors
typedef ClusterTree<BAYESTREE, GRAPH> This; ///< This class
typedef boost::shared_ptr<This> shared_ptr; ///< Shared pointer to this class
typedef boost::shared_ptr<FactorType> sharedFactor; ///< Shared pointer to a factor
typedef BAYESTREE BayesTreeType; ///< The BayesTree type produced by elimination
typedef typename BayesTreeType::ConditionalType ConditionalType; ///< The type of conditionals
typedef boost::shared_ptr<ConditionalType> sharedConditional; ///< Shared pointer to a conditional
typedef typename FactorGraphType::Eliminate Eliminate; ///< Typedef for an eliminate subroutine
/**
* A cluster-tree is associated with a factor graph and is defined as in Koller-Friedman:
* each node k represents a subset \f$ C_k \sub X \f$, and the tree is family preserving, in that
* each factor \f$ f_i \f$ is associated with a single cluster and \f$ scope(f_i) \sub C_k \f$.
* \nosubgrouping
*/
template<class BAYESTREE, class GRAPH>
class ClusterTree {
public:
typedef GRAPH FactorGraphType; ///< The factor graph type
typedef typename GRAPH::FactorType FactorType; ///< The type of factors
typedef ClusterTree<BAYESTREE, GRAPH> This; ///< This class
typedef boost::shared_ptr<This> shared_ptr; ///< Shared pointer to this class
typedef boost::shared_ptr<FactorType> sharedFactor; ///< Shared pointer to a factor
typedef BAYESTREE BayesTreeType; ///< The BayesTree type produced by elimination
typedef typename BayesTreeType::ConditionalType ConditionalType; ///< The type of conditionals
typedef boost::shared_ptr<ConditionalType> sharedConditional; ///< Shared pointer to a conditional
typedef typename FactorGraphType::Eliminate Eliminate; ///< Typedef for an eliminate subroutine
struct Cluster {
typedef Ordering Keys;
typedef FastVector<sharedFactor> Factors;
typedef FastVector<boost::shared_ptr<Cluster> > Children;
struct Cluster {
typedef Ordering Keys;
typedef FastVector<sharedFactor> Factors;
typedef FastVector<boost::shared_ptr<Cluster> > Children;
Keys orderedFrontalKeys; ///< Frontal keys of this node
Factors factors; ///< Factors associated with this node
Children children; ///< sub-trees
int problemSize_;
Cluster() {
}
Cluster(Key key, const Factors& factors) :
factors(factors) {
orderedFrontalKeys.push_back(key);
}
int problemSize() const { return problemSize_; }
Keys orderedFrontalKeys; ///< Frontal keys of this node
Factors factors; ///< Factors associated with this node
Children children; ///< sub-trees
int problemSize_;
/** print this node */
void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
};
int problemSize() const {
return problemSize_;
}
typedef boost::shared_ptr<Cluster> sharedCluster; ///< Shared pointer to Cluster
typedef Cluster Node; ///< Define Node=Cluster for compatibility with tree traversal functions
typedef sharedCluster sharedNode; ///< Define Node=Cluster for compatibility with tree traversal functions
/** concept check */
GTSAM_CONCEPT_TESTABLE_TYPE(FactorType);
protected:
FastVector<sharedNode> roots_;
FastVector<sharedFactor> remainingFactors_;
/// @name Standard Constructors
/// @{
/** Copy constructor - makes a deep copy of the tree structure, but only pointers to factors are
* copied, factors are not cloned. */
ClusterTree(const This& other) { *this = other; }
/// @}
public:
/// @name Testable
/// @{
/** Print the cluster tree */
void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
/// @}
/// @name Standard Interface
/// @{
/** Eliminate the factors to a Bayes tree and remaining factor graph
* @param function The function to use to eliminate, see the namespace functions
* in GaussianFactorGraph.h
* @return The Bayes tree and factor graph resulting from elimination
*/
std::pair<boost::shared_ptr<BayesTreeType>, boost::shared_ptr<FactorGraphType> >
eliminate(const Eliminate& function) const;
/// @}
/// @name Advanced Interface
/// @{
/** Return the set of roots (one for a tree, multiple for a forest) */
const FastVector<sharedNode>& roots() const { return roots_; }
/** Return the remaining factors that are not pulled into elimination */
const FastVector<sharedFactor>& remainingFactors() const { return remainingFactors_; }
/// @}
protected:
/// @name Details
/// Assignment operator - makes a deep copy of the tree structure, but only pointers to factors
/// are copied, factors are not cloned.
This& operator=(const This& other);
/// Default constructor to be used in derived classes
ClusterTree() {}
/// @}
/// print this node
void print(const std::string& s = "", const KeyFormatter& keyFormatter =
DefaultKeyFormatter) const;
/// Merge all children for which bit is set into this node
void mergeChildren(const std::vector<bool>& merge);
};
typedef boost::shared_ptr<Cluster> sharedCluster; ///< Shared pointer to Cluster
typedef Cluster Node; ///< Define Node=Cluster for compatibility with tree traversal functions
typedef sharedCluster sharedNode; ///< Define Node=Cluster for compatibility with tree traversal functions
/** concept check */
GTSAM_CONCEPT_TESTABLE_TYPE(FactorType);
protected:
FastVector<sharedNode> roots_;
FastVector<sharedFactor> remainingFactors_;
/// @name Standard Constructors
/// @{
/** Copy constructor - makes a deep copy of the tree structure, but only pointers to factors are
* copied, factors are not cloned. */
ClusterTree(const This& other) {*this = other;}
/// @}
public:
/// @name Testable
/// @{
/** Print the cluster tree */
void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
/// @}
/// @name Standard Interface
/// @{
/** Eliminate the factors to a Bayes tree and remaining factor graph
* @param function The function to use to eliminate, see the namespace functions
* in GaussianFactorGraph.h
* @return The Bayes tree and factor graph resulting from elimination
*/
std::pair<boost::shared_ptr<BayesTreeType>, boost::shared_ptr<FactorGraphType> >
eliminate(const Eliminate& function) const;
/// @}
/// @name Advanced Interface
/// @{
/** Return the set of roots (one for a tree, multiple for a forest) */
const FastVector<sharedNode>& roots() const {return roots_;}
/** Return the remaining factors that are not pulled into elimination */
const FastVector<sharedFactor>& remainingFactors() const {return remainingFactors_;}
/// @}
protected:
/// @name Details
/// Assignment operator - makes a deep copy of the tree structure, but only pointers to factors
/// are copied, factors are not cloned.
This& operator=(const This& other);
/// Default constructor to be used in derived classes
ClusterTree() {}
/// @}
};
}

View File

@ -101,10 +101,12 @@ namespace gtsam {
{
// Retrieve the factors involving this variable and create the current node
const VariableIndex::Factors& factors = structure[order[j]];
nodes[j] = boost::make_shared<Node>();
nodes[j]->key = order[j];
const sharedNode node = boost::make_shared<Node>();
node->key = order[j];
// for row i \in Struct[A*j] do
node->children.reserve(factors.size());
node->factors.reserve(factors.size());
BOOST_FOREACH(const size_t i, factors) {
// If we already hit a variable in this factor, make the subtree containing the previous
// variable in this factor a child of the current node. This means that the variables
@ -123,16 +125,16 @@ namespace gtsam {
if (r != j) {
// Now that we found the root, hook up parent and child pointers in the nodes.
parents[r] = j;
nodes[j]->children.push_back(nodes[r]);
node->children.push_back(nodes[r]);
}
} else {
// Add the current factor to the current node since we are at the first variable in this
// factor.
nodes[j]->factors.push_back(graph[i]);
// Add the factor to the current node since we are at the first variable in this factor.
node->factors.push_back(graph[i]);
factorUsed[i] = true;
}
prevCol[i] = j;
}
nodes[j] = node;
}
} catch(std::invalid_argument& e) {
// If this is thrown from structure[order[j]] above, it means that it was requested to

View File

@ -1,6 +1,6 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
@ -26,130 +26,132 @@
#include <gtsam/symbolic/SymbolicFactor-inst.h>
namespace gtsam {
namespace {
/* ************************************************************************* */
template<class BAYESTREE, class GRAPH>
struct ConstructorTraversalData {
ConstructorTraversalData* const parentData;
typename JunctionTree<BAYESTREE,GRAPH>::sharedNode myJTNode;
FastVector<SymbolicConditional::shared_ptr> childSymbolicConditionals;
FastVector<SymbolicFactor::shared_ptr> childSymbolicFactors;
ConstructorTraversalData(ConstructorTraversalData* _parentData) : parentData(_parentData) {}
};
/* ************************************************************************* */
// Pre-order visitor function
template<class BAYESTREE, class GRAPH, class ETREE_NODE>
ConstructorTraversalData<BAYESTREE,GRAPH> ConstructorTraversalVisitorPre(
template<class BAYESTREE, class GRAPH, class ETREE_NODE>
struct ConstructorTraversalData {
typedef typename JunctionTree<BAYESTREE, GRAPH>::Node Node;
typedef typename JunctionTree<BAYESTREE, GRAPH>::sharedNode sharedNode;
ConstructorTraversalData* const parentData;
sharedNode myJTNode;
FastVector<SymbolicConditional::shared_ptr> childSymbolicConditionals;
FastVector<SymbolicFactor::shared_ptr> childSymbolicFactors;
// Small inner class to store symbolic factors
class SymbolicFactors: public FactorGraph<Factor> {
};
ConstructorTraversalData(ConstructorTraversalData* _parentData) :
parentData(_parentData) {
}
// Pre-order visitor function
static ConstructorTraversalData ConstructorTraversalVisitorPre(
const boost::shared_ptr<ETREE_NODE>& node,
ConstructorTraversalData<BAYESTREE,GRAPH>& parentData)
{
// On the pre-order pass, before children have been visited, we just set up a traversal data
// structure with its own JT node, and create a child pointer in its parent.
ConstructorTraversalData<BAYESTREE,GRAPH> myData = ConstructorTraversalData<BAYESTREE,GRAPH>(&parentData);
myData.myJTNode = boost::make_shared<typename JunctionTree<BAYESTREE,GRAPH>::Node>();
myData.myJTNode->orderedFrontalKeys.push_back(node->key);
myData.myJTNode->factors.insert(myData.myJTNode->factors.begin(), node->factors.begin(), node->factors.end());
parentData.myJTNode->children.push_back(myData.myJTNode);
return myData;
}
ConstructorTraversalData& parentData) {
// On the pre-order pass, before children have been visited, we just set up
// a traversal data structure with its own JT node, and create a child
// pointer in its parent.
ConstructorTraversalData myData = ConstructorTraversalData(&parentData);
myData.myJTNode = boost::make_shared<Node>(node->key, node->factors);
parentData.myJTNode->children.push_back(myData.myJTNode);
return myData;
}
/* ************************************************************************* */
// Post-order visitor function
template<class BAYESTREE, class GRAPH, class ETREE_NODE>
void ConstructorTraversalVisitorPostAlg2(
// Post-order visitor function
static void ConstructorTraversalVisitorPostAlg2(
const boost::shared_ptr<ETREE_NODE>& ETreeNode,
const ConstructorTraversalData<BAYESTREE, GRAPH>& myData)
{
// In this post-order visitor, we combine the symbolic elimination results from the
// elimination tree children and symbolically eliminate the current elimination tree node. We
// then check whether each of our elimination tree child nodes should be merged with us. The
// check for this is that our number of symbolic elimination parents is exactly 1 less than
// our child's symbolic elimination parents - this condition indicates that eliminating the
// current node did not introduce any parents beyond those already in the child.
const ConstructorTraversalData& myData) {
// In this post-order visitor, we combine the symbolic elimination results
// from the elimination tree children and symbolically eliminate the current
// elimination tree node. We then check whether each of our elimination
// tree child nodes should be merged with us. The check for this is that
// our number of symbolic elimination parents is exactly 1 less than
// our child's symbolic elimination parents - this condition indicates that
// eliminating the current node did not introduce any parents beyond those
// already in the child->
// Do symbolic elimination for this node
class : public FactorGraph<Factor> {} symbolicFactors;
symbolicFactors.reserve(ETreeNode->factors.size() + myData.childSymbolicFactors.size());
// Add ETree node factors
symbolicFactors += ETreeNode->factors;
// Add symbolic factors passed up from children
symbolicFactors += myData.childSymbolicFactors;
// Do symbolic elimination for this node
SymbolicFactors symbolicFactors;
symbolicFactors.reserve(
ETreeNode->factors.size() + myData.childSymbolicFactors.size());
// Add ETree node factors
symbolicFactors += ETreeNode->factors;
// Add symbolic factors passed up from children
symbolicFactors += myData.childSymbolicFactors;
Ordering keyAsOrdering; keyAsOrdering.push_back(ETreeNode->key);
std::pair<SymbolicConditional::shared_ptr, SymbolicFactor::shared_ptr> symbolicElimResult =
internal::EliminateSymbolic(symbolicFactors, keyAsOrdering);
Ordering keyAsOrdering;
keyAsOrdering.push_back(ETreeNode->key);
SymbolicConditional::shared_ptr myConditional;
SymbolicFactor::shared_ptr mySeparatorFactor;
boost::tie(myConditional, mySeparatorFactor) = internal::EliminateSymbolic(
symbolicFactors, keyAsOrdering);
// Store symbolic elimination results in the parent
myData.parentData->childSymbolicConditionals.push_back(symbolicElimResult.first);
myData.parentData->childSymbolicFactors.push_back(symbolicElimResult.second);
// Store symbolic elimination results in the parent
myData.parentData->childSymbolicConditionals.push_back(myConditional);
myData.parentData->childSymbolicFactors.push_back(mySeparatorFactor);
// Merge our children if they are in our clique - if our conditional has exactly one fewer
// parent than our child's conditional.
size_t myNrFrontals = 1;
const size_t myNrParents = symbolicElimResult.first->nrParents();
size_t nrMergedChildren = 0;
assert(myData.myJTNode->children.size() == myData.childSymbolicConditionals.size());
// Loop over children
int combinedProblemSize = (int) (symbolicElimResult.first->size() * symbolicFactors.size());
for(size_t child = 0; child < myData.childSymbolicConditionals.size(); ++child) {
// Check if we should merge the child
if(myNrParents + myNrFrontals == myData.childSymbolicConditionals[child]->nrParents()) {
// Get a reference to the child, adjusting the index to account for children previously
// merged and removed from the child list.
const typename JunctionTree<BAYESTREE, GRAPH>::Node& childToMerge =
*myData.myJTNode->children[child - nrMergedChildren];
// Merge keys, factors, and children.
myData.myJTNode->orderedFrontalKeys.insert(
myData.myJTNode->orderedFrontalKeys.begin(),
childToMerge.orderedFrontalKeys.begin(),
childToMerge.orderedFrontalKeys.end());
myData.myJTNode->factors.insert(myData.myJTNode->factors.end(),
childToMerge.factors.begin(),
childToMerge.factors.end());
myData.myJTNode->children.insert(myData.myJTNode->children.end(),
childToMerge.children.begin(),
childToMerge.children.end());
// Increment problem size
combinedProblemSize = std::max(combinedProblemSize, childToMerge.problemSize_);
// Increment number of frontal variables
myNrFrontals += childToMerge.orderedFrontalKeys.size();
// Remove child from list.
myData.myJTNode->children.erase(myData.myJTNode->children.begin() + (child - nrMergedChildren));
// Increment number of merged children
++ nrMergedChildren;
}
sharedNode node = myData.myJTNode;
const FastVector<SymbolicConditional::shared_ptr>& childConditionals =
myData.childSymbolicConditionals;
node->problemSize_ = (int) (myConditional->size() * symbolicFactors.size());
// Merge our children if they are in our clique - if our conditional has
// exactly one fewer parent than our child's conditional.
const size_t myNrParents = myConditional->nrParents();
const size_t nrChildren = node->children.size();
assert(childConditionals.size() == nrChildren);
// decide which children to merge, as index into children
std::vector<bool> merge(nrChildren, false);
size_t myNrFrontals = 1, i = 0;
BOOST_FOREACH(const sharedNode& child, node->children) {
// Check if we should merge the i^th child
if (myNrParents + myNrFrontals == childConditionals[i]->nrParents()) {
// Increment number of frontal variables
myNrFrontals += child->orderedFrontalKeys.size();
merge[i] = true;
}
myData.myJTNode->problemSize_ = combinedProblemSize;
++i;
}
// now really merge
node->mergeChildren(merge);
}
};
/* ************************************************************************* */
template<class BAYESTREE, class GRAPH>
template<class ETREE_BAYESNET, class ETREE_GRAPH>
JunctionTree<BAYESTREE,GRAPH>::JunctionTree(const EliminationTree<ETREE_BAYESNET, ETREE_GRAPH>& eliminationTree)
{
gttic(JunctionTree_FromEliminationTree);
// Here we rely on the BayesNet having been produced by this elimination tree, such that the
// conditionals are arranged in DFS post-order. We traverse the elimination tree, and inspect
// the symbolic conditional corresponding to each node. The elimination tree node is added to
// the same clique with its parent if it has exactly one more Bayes net conditional parent than
// does its elimination tree parent.
/* ************************************************************************* */
template<class BAYESTREE, class GRAPH>
template<class ETREE_BAYESNET, class ETREE_GRAPH>
JunctionTree<BAYESTREE, GRAPH>::JunctionTree(
const EliminationTree<ETREE_BAYESNET, ETREE_GRAPH>& eliminationTree) {
gttic(JunctionTree_FromEliminationTree);
// Here we rely on the BayesNet having been produced by this elimination tree,
// such that the conditionals are arranged in DFS post-order. We traverse the
// elimination tree, and inspect the symbolic conditional corresponding to
// each node. The elimination tree node is added to the same clique with its
// parent if it has exactly one more Bayes net conditional parent than
// does its elimination tree parent.
// Traverse the elimination tree, doing symbolic elimination and merging nodes as we go. Gather
// the created junction tree roots in a dummy Node.
typedef typename EliminationTree<ETREE_BAYESNET, ETREE_GRAPH>::Node ETreeNode;
ConstructorTraversalData<BAYESTREE, GRAPH> rootData(0);
rootData.myJTNode = boost::make_shared<typename Base::Node>(); // Make a dummy node to gather the junction tree roots
treeTraversal::DepthFirstForest(eliminationTree, rootData,
ConstructorTraversalVisitorPre<BAYESTREE,GRAPH,ETreeNode>, ConstructorTraversalVisitorPostAlg2<BAYESTREE,GRAPH,ETreeNode>);
// Traverse the elimination tree, doing symbolic elimination and merging nodes
// as we go. Gather the created junction tree roots in a dummy Node.
typedef typename EliminationTree<ETREE_BAYESNET, ETREE_GRAPH>::Node ETreeNode;
typedef ConstructorTraversalData<BAYESTREE, GRAPH, ETreeNode> Data;
Data rootData(0);
rootData.myJTNode = boost::make_shared<typename Base::Node>(); // Make a dummy node to gather
// the junction tree roots
treeTraversal::DepthFirstForest(eliminationTree, rootData,
Data::ConstructorTraversalVisitorPre,
Data::ConstructorTraversalVisitorPostAlg2);
// Assign roots from the dummy node
Base::roots_ = rootData.myJTNode->children;
// Assign roots from the dummy node
typedef typename JunctionTree<BAYESTREE, GRAPH>::Node Node;
const typename Node::Children& children = rootData.myJTNode->children;
Base::roots_.reserve(children.size());
Base::roots_.insert(Base::roots_.begin(), children.begin(), children.end());
// Transfer remaining factors from elimination tree
Base::remainingFactors_ = eliminationTree.remainingFactors();
}
// Transfer remaining factors from elimination tree
Base::remainingFactors_ = eliminationTree.remainingFactors();
}
} //namespace gtsam
} // namespace gtsam

View File

@ -43,7 +43,7 @@ class GTSAM_EXPORT VariableIndex {
public:
typedef boost::shared_ptr<VariableIndex> shared_ptr;
typedef FastList<size_t> Factors;
typedef FastVector<size_t> Factors;
typedef Factors::iterator Factor_iterator;
typedef Factors::const_iterator Factor_const_iterator;

View File

@ -237,11 +237,14 @@ HessianFactor::HessianFactor(const GaussianFactorGraph& factors,
// Allocate and copy keys
gttic(allocate);
// Allocate with dimensions for each variable plus 1 at the end for the information vector
keys_.resize(scatter->size());
FastVector<DenseIndex> dims(scatter->size() + 1);
BOOST_FOREACH(const Scatter::value_type& key_slotentry, *scatter) {
keys_[key_slotentry.second.slot] = key_slotentry.first;
dims[key_slotentry.second.slot] = key_slotentry.second.dimension;
const size_t n = scatter->size();
keys_.resize(n);
FastVector<DenseIndex> dims(n + 1);
DenseIndex slot = 0;
BOOST_FOREACH(const SlotEntry& slotentry, *scatter) {
keys_[slot] = slotentry.key;
dims[slot] = slotentry.dimension;
++slot;
}
dims.back() = 1;
info_ = SymmetricBlockMatrix(dims);

View File

@ -21,6 +21,7 @@
#include <gtsam/inference/Ordering.h>
#include <boost/foreach.hpp>
#include <algorithm>
using namespace std;
@ -29,53 +30,62 @@ namespace gtsam {
/* ************************************************************************* */
string SlotEntry::toString() const {
ostringstream oss;
oss << "SlotEntry: slot=" << slot << ", dim=" << dimension;
oss << "SlotEntry: key=" << key << ", dim=" << dimension;
return oss.str();
}
/* ************************************************************************* */
Scatter::Scatter(const GaussianFactorGraph& gfg,
boost::optional<const Ordering&> ordering) {
boost::optional<const Ordering&> ordering) {
gttic(Scatter_Constructor);
static const DenseIndex none = std::numeric_limits<size_t>::max();
// First do the set union.
BOOST_FOREACH (const GaussianFactor::shared_ptr& factor, gfg) {
if (factor) {
for (GaussianFactor::const_iterator variable = factor->begin();
variable != factor->end(); ++variable) {
// TODO: Fix this hack to cope with zero-row Jacobians that come from
// BayesTreeOrphanWrappers
const JacobianFactor* asJacobian =
dynamic_cast<const JacobianFactor*>(factor.get());
if (!asJacobian || asJacobian->cols() > 1)
insert(
make_pair(*variable, SlotEntry(none, factor->getDim(variable))));
}
}
}
// If we have an ordering, pre-fill the ordered variables first
size_t slot = 0;
if (ordering) {
BOOST_FOREACH (Key key, *ordering) {
const_iterator entry = find(key);
if (entry == end())
throw std::invalid_argument(
"The ordering provided to the HessianFactor Scatter constructor\n"
"contained extra variables that did not appear in the factors to "
"combine.");
at(key).slot = (slot++);
push_back(SlotEntry(key, 0));
}
}
// Next fill in the slot indices (we can only get these after doing the set
// union.
BOOST_FOREACH (value_type& var_slot, *this) {
if (var_slot.second.slot == none) var_slot.second.slot = (slot++);
// Now, find dimensions of variables and/or extend
BOOST_FOREACH (const GaussianFactor::shared_ptr& factor, gfg) {
if (!factor) continue;
// TODO: Fix this hack to cope with zero-row Jacobians that come from BayesTreeOrphanWrappers
const JacobianFactor* asJacobian = dynamic_cast<const JacobianFactor*>(factor.get());
if (asJacobian && asJacobian->cols() <= 1) continue;
// loop over variables
for (GaussianFactor::const_iterator variable = factor->begin();
variable != factor->end(); ++variable) {
const Key key = *variable;
iterator it = find(key); // theoretically expensive, yet cache friendly
if (it!=end())
it->dimension = factor->getDim(variable);
else
push_back(SlotEntry(key, factor->getDim(variable)));
}
}
// To keep the same behavior as before, sort the keys after the ordering
iterator first = begin();
if (ordering) first += ordering->size();
if (first != end()) std::sort(first, end());
// Filter out keys with zero dimensions (if ordering had more keys)
erase(std::remove_if(begin(), end(), SlotEntry::Zero), end());
}
/* ************************************************************************* */
FastVector<SlotEntry>::iterator Scatter::find(Key key) {
iterator it = begin();
while(it != end()) {
if (it->key == key)
return it;
++it;
}
return it; // end()
}
/* ************************************************************************* */
} // gtsam
} // gtsam

View File

@ -32,30 +32,32 @@ class Ordering;
/// One SlotEntry stores the slot index for a variable, as well its dim.
struct GTSAM_EXPORT SlotEntry {
DenseIndex slot;
Key key;
size_t dimension;
SlotEntry(DenseIndex _slot, size_t _dimension)
: slot(_slot), dimension(_dimension) {}
SlotEntry(Key _key, size_t _dimension) : key(_key), dimension(_dimension) {}
std::string toString() const;
friend bool operator<(const SlotEntry& p, const SlotEntry& q) {
return p.key < q.key;
}
static bool Zero(const SlotEntry& p) { return p.dimension==0;}
};
/**
* Scatter is an intermediate data structure used when building a HessianFactor
* incrementally, to get the keys in the right order. The "scatter" is a map
* incrementally, to get the keys in the right order. In spirit, it is a map
* from global variable indices to slot indices in the union of involved
* variables. We also include the dimensionality of the variable.
*/
class Scatter : public FastMap<Key, SlotEntry> {
class Scatter : public FastVector<SlotEntry> {
public:
/// Constructor
Scatter(const GaussianFactorGraph& gfg,
boost::optional<const Ordering&> ordering = boost::none);
/// Get the slot corresponding to the given key
DenseIndex slot(Key key) const { return at(key).slot; }
private:
/// Get the dimension corresponding to the given key
DenseIndex dim(Key key) const { return at(key).dimension; }
/// Find the SlotEntry with the right key (linear time worst case)
iterator find(Key key);
};
} // \ namespace gtsam

View File

@ -83,13 +83,30 @@ TEST( GaussianBayesTree, eliminate )
{
GaussianBayesTree bt = *chain.eliminateMultifrontal(chainOrdering);
Scatter scatter(chain);
EXPECT_LONGS_EQUAL(4, scatter.size());
EXPECT_LONGS_EQUAL(1, scatter.at(0).key);
EXPECT_LONGS_EQUAL(2, scatter.at(1).key);
EXPECT_LONGS_EQUAL(3, scatter.at(2).key);
EXPECT_LONGS_EQUAL(4, scatter.at(3).key);
Matrix two = (Matrix(1, 1) << 2.).finished();
Matrix one = (Matrix(1, 1) << 1.).finished();
GaussianBayesTree bayesTree_expected;
bayesTree_expected.insertRoot(
MakeClique(GaussianConditional(pair_list_of<Key, Matrix>(x3, (Matrix(2, 1) << 2., 0.).finished()) (x4, (Matrix(2, 1) << 2., 2.).finished()), 2, Vector2(2., 2.)), list_of
(MakeClique(GaussianConditional(pair_list_of<Key, Matrix>(x2, (Matrix(2, 1) << -2.*sqrt(2.), 0.).finished()) (x1, (Matrix(2, 1) << -sqrt(2.), -sqrt(2.)).finished()) (x3, (Matrix(2, 1) << -sqrt(2.), sqrt(2.)).finished()), 2, (Vector(2) << -2.*sqrt(2.), 0.).finished())))));
MakeClique(
GaussianConditional(
pair_list_of<Key, Matrix>(x3, (Matrix21() << 2., 0.).finished())(
x4, (Matrix21() << 2., 2.).finished()), 2, Vector2(2., 2.)),
list_of(
MakeClique(
GaussianConditional(
pair_list_of<Key, Matrix>(x2,
(Matrix21() << -2. * sqrt(2.), 0.).finished())(x1,
(Matrix21() << -sqrt(2.), -sqrt(2.)).finished())(x3,
(Matrix21() << -sqrt(2.), sqrt(2.)).finished()), 2,
(Vector(2) << -2. * sqrt(2.), 0.).finished())))));
EXPECT(assert_equal(bayesTree_expected, bt));
}

View File

@ -17,10 +17,12 @@
#include <gtsam/linear/Scatter.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/inference/Symbol.h>
#include <CppUnitLite/TestHarness.h>
using namespace std;
using namespace gtsam;
using symbol_shorthand::X;
/* ************************************************************************* */
TEST(HessianFactor, CombineAndEliminate) {
@ -43,14 +45,14 @@ TEST(HessianFactor, CombineAndEliminate) {
Vector3 s2(3.6, 3.6, 3.6);
GaussianFactorGraph gfg;
gfg.add(1, A01, b0, noiseModel::Diagonal::Sigmas(s0, true));
gfg.add(0, A10, 1, A11, b1, noiseModel::Diagonal::Sigmas(s1, true));
gfg.add(1, A21, b2, noiseModel::Diagonal::Sigmas(s2, true));
gfg.add(X(1), A01, b0, noiseModel::Diagonal::Sigmas(s0, true));
gfg.add(X(0), A10, X(1), A11, b1, noiseModel::Diagonal::Sigmas(s1, true));
gfg.add(X(1), A21, b2, noiseModel::Diagonal::Sigmas(s2, true));
Scatter scatter(gfg);
EXPECT_LONGS_EQUAL(2, scatter.size());
EXPECT_LONGS_EQUAL(0, scatter.at(0).slot);
EXPECT_LONGS_EQUAL(1, scatter.at(1).slot);
EXPECT(assert_equal(X(0), scatter.at(0).key));
EXPECT(assert_equal(X(1), scatter.at(1).key));
EXPECT_LONGS_EQUAL(n, scatter.at(0).dimension);
EXPECT_LONGS_EQUAL(n, scatter.at(1).dimension);
}

View File

@ -33,7 +33,7 @@ namespace detail {
template<typename T, typename structure_category>
struct Origin { T operator()() { return traits<T>::Identity();} };
// but dimple manifolds don't have one, so we just use the default constructor
// but simple manifolds don't have one, so we just use the default constructor
template<typename T>
struct Origin<T, manifold_tag> { T operator()() { return T();} };
@ -65,7 +65,12 @@ struct Canonical {
}
};
/// Adapt ceres-style autodiff
/**
* The AdaptAutoDiff class uses ceres-style autodiff to adapt a ceres-style
* Function evaluation, i.e., a function F that defines an operator
* template<typename T> bool operator()(const T* const, const T* const, T* predicted) const;
* For now only binary operators are supported.
*/
template<typename F, typename T, typename A1, typename A2>
class AdaptAutoDiff {
@ -104,7 +109,7 @@ public:
// Get derivatives with AutoDiff
double *parameters[] = { v1.data(), v2.data() };
double rowMajor1[N * M1], rowMajor2[N * M2]; // om the stack
double rowMajor1[N * M1], rowMajor2[N * M2]; // on the stack
double *jacobians[] = { rowMajor1, rowMajor2 };
success = AutoDiff<F, double, 9, 3>::Differentiate(f, parameters, 2,
result.data(), jacobians);

View File

@ -105,8 +105,8 @@ void LevenbergMarquardtParams::print(const std::string& str) const {
std::cout << " lambdaLowerBound: " << lambdaLowerBound << "\n";
std::cout << " minModelFidelity: " << minModelFidelity << "\n";
std::cout << " diagonalDamping: " << diagonalDamping << "\n";
std::cout << " min_diagonal: " << min_diagonal_ << "\n";
std::cout << " max_diagonal: " << max_diagonal_ << "\n";
std::cout << " minDiagonal: " << minDiagonal << "\n";
std::cout << " maxDiagonal: " << maxDiagonal << "\n";
std::cout << " verbosityLM: "
<< verbosityLMTranslator(verbosityLM) << "\n";
std::cout.flush();
@ -119,19 +119,19 @@ GaussianFactorGraph::shared_ptr LevenbergMarquardtOptimizer::linearize() const {
/* ************************************************************************* */
void LevenbergMarquardtOptimizer::increaseLambda() {
if (params_.useFixedLambdaFactor_) {
if (params_.useFixedLambdaFactor) {
state_.lambda *= params_.lambdaFactor;
} else {
state_.lambda *= params_.lambdaFactor;
params_.lambdaFactor *= 2.0;
}
params_.reuse_diagonal_ = true;
state_.reuseDiagonal = true;
}
/* ************************************************************************* */
void LevenbergMarquardtOptimizer::decreaseLambda(double stepQuality) {
if (params_.useFixedLambdaFactor_) {
if (params_.useFixedLambdaFactor) {
state_.lambda /= params_.lambdaFactor;
} else {
// CHECK_GT(step_quality, 0.0);
@ -139,7 +139,7 @@ void LevenbergMarquardtOptimizer::decreaseLambda(double stepQuality) {
params_.lambdaFactor = 2.0;
}
state_.lambda = std::max(params_.lambdaLowerBound, state_.lambda);
params_.reuse_diagonal_ = false;
state_.reuseDiagonal = false;
}
@ -152,12 +152,12 @@ GaussianFactorGraph::shared_ptr LevenbergMarquardtOptimizer::buildDampedSystem(
cout << "building damped system with lambda " << state_.lambda << endl;
// Only retrieve diagonal vector when reuse_diagonal = false
if (params_.diagonalDamping && params_.reuse_diagonal_ == false) {
if (params_.diagonalDamping && state_.reuseDiagonal == false) {
state_.hessianDiagonal = linear.hessianDiagonal();
BOOST_FOREACH(Vector& v, state_.hessianDiagonal | map_values) {
for (int aa = 0; aa < v.size(); aa++) {
v(aa) = std::min(std::max(v(aa), params_.min_diagonal_),
params_.max_diagonal_);
v(aa) = std::min(std::max(v(aa), params_.minDiagonal),
params_.maxDiagonal);
v(aa) = sqrt(v(aa));
}
}
@ -263,7 +263,7 @@ void LevenbergMarquardtOptimizer::iterate() {
double linearizedCostChange = 0,
newlinearizedError = 0;
if (systemSolvedSuccessfully) {
params_.reuse_diagonal_ = true;
state_.reuseDiagonal = true;
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA)
cout << "linear delta norm = " << delta.norm() << endl;

View File

@ -52,50 +52,84 @@ public:
double lambdaLowerBound; ///< The minimum lambda used in LM (default: 0)
VerbosityLM verbosityLM; ///< The verbosity level for Levenberg-Marquardt (default: SILENT), see also NonlinearOptimizerParams::verbosity
double minModelFidelity; ///< Lower bound for the modelFidelity to accept the result of an LM iteration
std::string logFile; ///< an optional CSV log file, with [iteration, time, error, labda]
std::string logFile; ///< an optional CSV log file, with [iteration, time, error, lambda]
bool diagonalDamping; ///< if true, use diagonal of Hessian
bool reuse_diagonal_; ///< an additional option in Ceres for diagonalDamping (TODO: should be in state?)
bool useFixedLambdaFactor_; ///< if true applies constant increase (or decrease) to lambda according to lambdaFactor
double min_diagonal_; ///< when using diagonal damping saturates the minimum diagonal entries (default: 1e-6)
double max_diagonal_; ///< when using diagonal damping saturates the maximum diagonal entries (default: 1e32)
bool useFixedLambdaFactor; ///< if true applies constant increase (or decrease) to lambda according to lambdaFactor
double minDiagonal; ///< when using diagonal damping saturates the minimum diagonal entries (default: 1e-6)
double maxDiagonal; ///< when using diagonal damping saturates the maximum diagonal entries (default: 1e32)
LevenbergMarquardtParams()
: lambdaInitial(1e-5),
lambdaFactor(10.0),
lambdaUpperBound(1e5),
lambdaLowerBound(0.0),
verbosityLM(SILENT),
minModelFidelity(1e-3),
: verbosityLM(SILENT),
diagonalDamping(false),
reuse_diagonal_(false),
useFixedLambdaFactor_(true),
min_diagonal_(1e-6),
max_diagonal_(1e32) {}
minDiagonal(1e-6),
maxDiagonal(1e32) {
SetLegacyDefaults(this);
}
static void SetLegacyDefaults(LevenbergMarquardtParams* p) {
// Relevant NonlinearOptimizerParams:
p->maxIterations = 100;
p->relativeErrorTol = 1e-5;
p->absoluteErrorTol = 1e-5;
// LM-specific:
p->lambdaInitial = 1e-5;
p->lambdaFactor = 10.0;
p->lambdaUpperBound = 1e5;
p->lambdaLowerBound = 0.0;
p->minModelFidelity = 1e-3;
p->diagonalDamping = false;
p->useFixedLambdaFactor = true;
}
// these do seem to work better for SFM
static void SetCeresDefaults(LevenbergMarquardtParams* p) {
// Relevant NonlinearOptimizerParams:
p->maxIterations = 50;
p->absoluteErrorTol = 0; // No corresponding option in CERES
p->relativeErrorTol = 1e-6; // This is function_tolerance
// LM-specific:
p->lambdaUpperBound = 1e32;
p->lambdaLowerBound = 1e-16;
p->lambdaInitial = 1e-04;
p->lambdaFactor = 2.0;
p->minModelFidelity = 1e-3; // options.min_relative_decrease in CERES
p->diagonalDamping = true;
p->useFixedLambdaFactor = false; // This is important
}
static LevenbergMarquardtParams LegacyDefaults() {
LevenbergMarquardtParams p;
SetLegacyDefaults(&p);
return p;
}
static LevenbergMarquardtParams CeresDefaults() {
LevenbergMarquardtParams p;
SetCeresDefaults(&p);
return p;
}
virtual ~LevenbergMarquardtParams() {}
virtual void print(const std::string& str = "") const;
inline double getlambdaInitial() const { return lambdaInitial; }
inline double getlambdaFactor() const { return lambdaFactor; }
inline double getlambdaUpperBound() const { return lambdaUpperBound; }
inline double getlambdaLowerBound() const { return lambdaLowerBound; }
inline std::string getVerbosityLM() const {
return verbosityLMTranslator(verbosityLM);
}
inline std::string getLogFile() const { return logFile; }
inline bool getDiagonalDamping() const { return diagonalDamping; }
inline void setlambdaInitial(double value) { lambdaInitial = value; }
inline void setlambdaFactor(double value) { lambdaFactor = value; }
inline void setlambdaUpperBound(double value) { lambdaUpperBound = value; }
inline void setlambdaLowerBound(double value) { lambdaLowerBound = value; }
inline void setVerbosityLM(const std::string& s) {
verbosityLM = verbosityLMTranslator(s);
}
inline void setLogFile(const std::string& s) { logFile = s; }
inline void setDiagonalDamping(bool flag) { diagonalDamping = flag; }
inline void setUseFixedLambdaFactor(bool flag) {
useFixedLambdaFactor_ = flag;
}
/// @name Getters/Setters, mainly for MATLAB. Use fields above in C++.
/// @{
bool getDiagonalDamping() const { return diagonalDamping; }
double getlambdaFactor() const { return lambdaFactor; }
double getlambdaInitial() const { return lambdaInitial; }
double getlambdaLowerBound() const { return lambdaLowerBound; }
double getlambdaUpperBound() const { return lambdaUpperBound; }
std::string getLogFile() const { return logFile; }
std::string getVerbosityLM() const { return verbosityLMTranslator(verbosityLM);}
void setDiagonalDamping(bool flag) { diagonalDamping = flag; }
void setlambdaFactor(double value) { lambdaFactor = value; }
void setlambdaInitial(double value) { lambdaInitial = value; }
void setlambdaLowerBound(double value) { lambdaLowerBound = value; }
void setlambdaUpperBound(double value) { lambdaUpperBound = value; }
void setLogFile(const std::string& s) { logFile = s; }
void setUseFixedLambdaFactor(bool flag) { useFixedLambdaFactor = flag;}
void setVerbosityLM(const std::string& s) { verbosityLM = verbosityLMTranslator(s);}
// @}
};
/**
@ -105,13 +139,12 @@ class GTSAM_EXPORT LevenbergMarquardtState: public NonlinearOptimizerState {
public:
double lambda;
int totalNumberInnerIterations; // The total number of inner iterations in the optimization (for each iteration, LM may try multiple iterations with different lambdas)
boost::posix_time::ptime startTime;
VectorValues hessianDiagonal; //only update hessianDiagonal when reuse_diagonal_ = false
int totalNumberInnerIterations; //< The total number of inner iterations in the optimization (for each iteration, LM may try multiple iterations with different lambdas)
VectorValues hessianDiagonal; //< we only update hessianDiagonal when reuseDiagonal = false
bool reuseDiagonal; ///< an additional option in Ceres for diagonalDamping
LevenbergMarquardtState() {
initTime();
}
LevenbergMarquardtState() {} // used in LM constructor but immediately overwritten
void initTime() {
startTime = boost::posix_time::microsec_clock::universal_time();
@ -125,7 +158,7 @@ protected:
const Values& initialValues, const LevenbergMarquardtParams& params,
unsigned int iterations = 0) :
NonlinearOptimizerState(graph, initialValues, iterations), lambda(
params.lambdaInitial), totalNumberInnerIterations(0) {
params.lambdaInitial), totalNumberInnerIterations(0),reuseDiagonal(false) {
initTime();
}
@ -155,12 +188,12 @@ public:
* @param initialValues The initial variable assignments
* @param params The optimization parameters
*/
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph,
const Values& initialValues, const LevenbergMarquardtParams& params =
LevenbergMarquardtParams()) :
NonlinearOptimizer(graph), params_(ensureHasOrdering(params, graph)), state_(
graph, initialValues, params_) {
}
LevenbergMarquardtOptimizer(
const NonlinearFactorGraph& graph, const Values& initialValues,
const LevenbergMarquardtParams& params = LevenbergMarquardtParams())
: NonlinearOptimizer(graph),
params_(ensureHasOrdering(params, graph)),
state_(graph, initialValues, params_) {}
/** Standard constructor, requires a nonlinear factor graph, initial
* variable assignments, and optimization parameters. For convenience this
@ -169,9 +202,11 @@ public:
* @param graph The nonlinear factor graph to optimize
* @param initialValues The initial variable assignments
*/
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph,
const Values& initialValues, const Ordering& ordering) :
NonlinearOptimizer(graph) {
LevenbergMarquardtOptimizer(
const NonlinearFactorGraph& graph, const Values& initialValues,
const Ordering& ordering,
const LevenbergMarquardtParams& params = LevenbergMarquardtParams())
: NonlinearOptimizer(graph), params_(params) {
params_.ordering = ordering;
state_ = LevenbergMarquardtState(graph, initialValues, params_);
}

View File

@ -278,23 +278,26 @@ SymbolicFactorGraph::shared_ptr NonlinearFactorGraph::symbolic() const
namespace {
#ifdef GTSAM_USE_TBB
struct _LinearizeOneFactor {
const NonlinearFactorGraph& graph;
const Values& linearizationPoint;
GaussianFactorGraph& result;
_LinearizeOneFactor(const NonlinearFactorGraph& graph, const Values& linearizationPoint, GaussianFactorGraph& result) :
graph(graph), linearizationPoint(linearizationPoint), result(result) {}
void operator()(const tbb::blocked_range<size_t>& r) const
{
for(size_t i = r.begin(); i != r.end(); ++i)
{
if(graph[i])
result[i] = graph[i]->linearize(linearizationPoint);
else
result[i] = GaussianFactor::shared_ptr();
}
class _LinearizeOneFactor {
const NonlinearFactorGraph& nonlinearGraph_;
const Values& linearizationPoint_;
GaussianFactorGraph& result_;
public:
// Create functor with constant parameters
_LinearizeOneFactor(const NonlinearFactorGraph& graph,
const Values& linearizationPoint, GaussianFactorGraph& result) :
nonlinearGraph_(graph), linearizationPoint_(linearizationPoint), result_(result) {
}
// Operator that linearizes a given range of the factors
void operator()(const tbb::blocked_range<size_t>& blocked_range) const {
for (size_t i = blocked_range.begin(); i != blocked_range.end(); ++i) {
if (nonlinearGraph_[i])
result_[i] = nonlinearGraph_[i]->linearize(linearizationPoint_);
else
result_[i] = GaussianFactor::shared_ptr();
}
};
}
};
#endif
}
@ -323,7 +326,7 @@ GaussianFactorGraph::shared_ptr NonlinearFactorGraph::linearize(const Values& li
if(factor) {
(*linearFG) += factor->linearize(linearizationPoint);
} else
(*linearFG) += GaussianFactor::shared_ptr();
(*linearFG) += GaussianFactor::shared_ptr();
}
#endif

View File

@ -210,8 +210,9 @@ namespace gtsam {
}
/* ************************************************************************* */
FastList<Key> Values::keys() const {
FastList<Key> result;
KeyVector Values::keys() const {
KeyVector result;
result.reserve(size());
for(const_iterator key_value = begin(); key_value != end(); ++key_value)
result.push_back(key_value->key);
return result;

View File

@ -291,7 +291,7 @@ namespace gtsam {
* Returns a set of keys in the config
* Note: by construction, the list is ordered
*/
KeyList keys() const;
KeyVector keys() const;
/** Replace all keys and variables */
Values& operator=(const Values& rhs);

View File

@ -26,6 +26,7 @@
#include <CppUnitLite/TestHarness.h>
#include <boost/assign/std/list.hpp> // for operator +=
#include <boost/assign/std/vector.hpp>
#include <boost/assign/list_of.hpp>
using namespace boost::assign;
#include <stdexcept>
@ -308,12 +309,12 @@ TEST(Values, extract_keys)
config.insert(key3, Pose2());
config.insert(key4, Pose2());
FastList<Key> expected, actual;
KeyVector expected, actual;
expected += key1, key2, key3, key4;
actual = config.keys();
CHECK(actual.size() == expected.size());
FastList<Key>::const_iterator itAct = actual.begin(), itExp = expected.begin();
KeyVector::const_iterator itAct = actual.begin(), itExp = expected.begin();
for (; itAct != actual.end() && itExp != expected.end(); ++itAct, ++itExp) {
EXPECT(*itExp == *itAct);
}

View File

@ -17,15 +17,17 @@
#pragma once
#include <utility>
#include <gtsam/symbolic/SymbolicFactor.h>
#include <gtsam/symbolic/SymbolicConditional.h>
#include <gtsam/inference/Factor.h>
#include <gtsam/inference/Key.h>
#include <gtsam/base/timing.h>
#include <boost/shared_ptr.hpp>
#include <boost/foreach.hpp>
#include <boost/make_shared.hpp>
#include <gtsam/inference/Factor.h>
#include <gtsam/inference/Key.h>
#include <gtsam/symbolic/SymbolicFactor.h>
#include <gtsam/symbolic/SymbolicConditional.h>
#include <utility>
namespace gtsam
{
@ -37,6 +39,8 @@ namespace gtsam
std::pair<boost::shared_ptr<SymbolicConditional>, boost::shared_ptr<SymbolicFactor> >
EliminateSymbolic(const FactorGraph<FACTOR>& factors, const Ordering& keys)
{
gttic(EliminateSymbolic);
// Gather all keys
KeySet allKeys;
BOOST_FOREACH(const boost::shared_ptr<FACTOR>& factor, factors) {

View File

@ -407,7 +407,7 @@ void BatchFixedLagSmoother::marginalize(const std::set<Key>& marginalizeKeys) {
std::set<size_t> removedFactorSlots;
VariableIndex variableIndex(factors_);
BOOST_FOREACH(Key key, marginalizeKeys) {
const FastList<size_t>& slots = variableIndex[key];
const FastVector<size_t>& slots = variableIndex[key];
removedFactorSlots.insert(slots.begin(), slots.end());
}

View File

@ -536,7 +536,7 @@ void ConcurrentBatchFilter::moveSeparator(const FastList<Key>& keysToMove) {
std::vector<size_t> removedFactorSlots;
VariableIndex variableIndex(factors_);
BOOST_FOREACH(Key key, keysToMove) {
const FastList<size_t>& slots = variableIndex[key];
const FastVector<size_t>& slots = variableIndex[key];
removedFactorSlots.insert(removedFactorSlots.end(), slots.begin(), slots.end());
}

View File

@ -230,7 +230,7 @@ void ConcurrentBatchSmoother::reorder() {
// Recalculate the variable index
variableIndex_ = VariableIndex(factors_);
FastList<Key> separatorKeys = separatorValues_.keys();
KeyVector separatorKeys = separatorValues_.keys();
ordering_ = Ordering::colamdConstrainedLast(variableIndex_, std::vector<Key>(separatorKeys.begin(), separatorKeys.end()));
}

View File

@ -291,7 +291,7 @@ std::vector<size_t> ConcurrentIncrementalFilter::FindAdjacentFactors(const ISAM2
std::vector<size_t> removedFactorSlots;
const VariableIndex& variableIndex = isam2.getVariableIndex();
BOOST_FOREACH(Key key, keys) {
const FastList<size_t>& slots = variableIndex[key];
const FastVector<size_t>& slots = variableIndex[key];
removedFactorSlots.insert(removedFactorSlots.end(), slots.begin(), slots.end());
}

View File

@ -229,7 +229,7 @@ Values localToWorld(const Values& local, const Pose2& base,
// if no keys given, get all keys from local values
FastVector<Key> keys(user_keys);
if (keys.size()==0)
keys = FastVector<Key>(local.keys());
keys = local.keys();
// Loop over all keys
BOOST_FOREACH(Key key, keys) {

View File

@ -15,29 +15,42 @@
* @author nikai
*/
#include <CppUnitLite/TestHarness.h>
#if 0
#include <tests/smallExample.h>
#include <gtsam/slam/PriorFactor.h>
#include <gtsam/slam/BetweenFactor.h>
#include <gtsam/slam/BearingRangeFactor.h>
#include <gtsam/slam/BetweenFactor.h>
#include <gtsam/slam/PriorFactor.h>
#include <gtsam/geometry/Pose2.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/nonlinear/Ordering.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianConditional.h>
#include <gtsam/linear/GaussianFactor.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/GaussianEliminationTree.h>
#include <gtsam/linear/GaussianJunctionTree.h>
#include <gtsam/linear/HessianFactor.h>
#include <gtsam/linear/JacobianFactor.h>
#include <gtsam/linear/NoiseModel.h>
#include <gtsam/linear/VectorValues.h>
#include <gtsam/symbolic/SymbolicEliminationTree.h>
#include <gtsam/inference/BayesTree.h>
#include <gtsam/geometry/Pose2.h>
#include <gtsam/base/TestableAssertions.h>
#include <gtsam/base/debug.h>
#include <gtsam/base/cholesky.h>
#include <gtsam/inference/ClusterTree.h>
#include <gtsam/inference/Ordering.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/base/Matrix.h>
#include <gtsam/base/Testable.h>
#include <boost/assign/list_of.hpp>
#include <boost/assign/std/list.hpp> // for operator +=
#include <boost/assign/std/set.hpp> // for operator +=
#include <CppUnitLite/TestHarness.h>
#include <boost/shared_ptr.hpp>
#include <boost/tuple/tuple.hpp>
#include <boost/assign/std/vector.hpp>
#include <cmath>
#include <list>
#include <utility>
#include <vector>
using namespace boost::assign;
#include <iostream>
@ -51,190 +64,218 @@ using symbol_shorthand::L;
/* ************************************************************************* *
Bayes tree for smoother with "nested dissection" ordering:
C1 x5 x6 x4
C2 x3 x2 : x4
C3 x1 : x2
C4 x7 : x6
*/
TEST( GaussianJunctionTreeB, constructor2 )
{
C1 x5 x6 x4
C2 x3 x2 : x4
C3 x1 : x2
C4 x7 : x6
*/
TEST( GaussianJunctionTreeB, constructor2 ) {
// create a graph
Ordering ordering; ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4);
GaussianFactorGraph fg = createSmoother(7, ordering).first;
NonlinearFactorGraph nlfg;
Values values;
boost::tie(nlfg, values) = createNonlinearSmoother(7);
SymbolicFactorGraph::shared_ptr symbolic = nlfg.symbolic();
// linearize
GaussianFactorGraph::shared_ptr fg = nlfg.linearize(values);
Ordering ordering;
ordering += X(1), X(3), X(5), X(7), X(2), X(6), X(4);
// create an ordering
GaussianJunctionTree actual(fg);
GaussianEliminationTree etree(*fg, ordering);
SymbolicEliminationTree stree(*symbolic, ordering);
GaussianJunctionTree actual(etree);
vector<Index> frontal1; frontal1 += ordering[X(5)], ordering[X(6)], ordering[X(4)];
vector<Index> frontal2; frontal2 += ordering[X(3)], ordering[X(2)];
vector<Index> frontal3; frontal3 += ordering[X(1)];
vector<Index> frontal4; frontal4 += ordering[X(7)];
vector<Index> sep1;
vector<Index> sep2; sep2 += ordering[X(4)];
vector<Index> sep3; sep3 += ordering[X(2)];
vector<Index> sep4; sep4 += ordering[X(6)];
EXPECT(assert_equal(frontal1, actual.root()->frontal));
EXPECT(assert_equal(sep1, actual.root()->separator));
LONGS_EQUAL(5, actual.root()->size());
list<GaussianJunctionTree::sharedClique>::const_iterator child0it = actual.root()->children().begin();
list<GaussianJunctionTree::sharedClique>::const_iterator child1it = child0it; ++child1it;
GaussianJunctionTree::sharedClique child0 = *child0it;
GaussianJunctionTree::sharedClique child1 = *child1it;
EXPECT(assert_equal(frontal2, child0->frontal));
EXPECT(assert_equal(sep2, child0->separator));
LONGS_EQUAL(4, child0->size());
EXPECT(assert_equal(frontal3, child0->children().front()->frontal));
EXPECT(assert_equal(sep3, child0->children().front()->separator));
LONGS_EQUAL(2, child0->children().front()->size());
EXPECT(assert_equal(frontal4, child1->frontal));
EXPECT(assert_equal(sep4, child1->separator));
LONGS_EQUAL(2, child1->size());
Ordering o324;
o324 += X(3), X(2), X(4);
Ordering o56;
o56 += X(5), X(6);
Ordering o7;
o7 += X(7);
Ordering o1;
o1 += X(1);
// Can no longer test these:
// Ordering sep1;
// Ordering sep2; sep2 += X(4);
// Ordering sep3; sep3 += X(6);
// Ordering sep4; sep4 += X(2);
GaussianJunctionTree::sharedNode x324 = actual.roots().front();
LONGS_EQUAL(2, x324->children.size());
GaussianJunctionTree::sharedNode x1 = x324->children.front();
GaussianJunctionTree::sharedNode x56 = x324->children.back();
if (x1->children.size() > 0)
x1.swap(x56); // makes it work with different tie-breakers
LONGS_EQUAL(0, x1->children.size());
LONGS_EQUAL(1, x56->children.size());
GaussianJunctionTree::sharedNode x7 = x56->children[0];
LONGS_EQUAL(0, x7->children.size());
EXPECT(assert_equal(o324, x324->orderedFrontalKeys));
EXPECT_LONGS_EQUAL(5, x324->factors.size());
EXPECT_LONGS_EQUAL(9, x324->problemSize_);
EXPECT(assert_equal(o56, x56->orderedFrontalKeys));
EXPECT_LONGS_EQUAL(4, x56->factors.size());
EXPECT_LONGS_EQUAL(9, x56->problemSize_);
EXPECT(assert_equal(o7, x7->orderedFrontalKeys));
EXPECT_LONGS_EQUAL(2, x7->factors.size());
EXPECT_LONGS_EQUAL(4, x7->problemSize_);
EXPECT(assert_equal(o1, x1->orderedFrontalKeys));
EXPECT_LONGS_EQUAL(2, x1->factors.size());
EXPECT_LONGS_EQUAL(4, x1->problemSize_);
}
///* ************************************************************************* */
//TEST( GaussianJunctionTreeB, optimizeMultiFrontal )
//{
// // create a graph
// GaussianFactorGraph fg;
// Ordering ordering;
// boost::tie(fg,ordering) = createSmoother(7);
//
// // optimize the graph
// GaussianJunctionTree tree(fg);
// VectorValues actual = tree.optimize(&EliminateQR);
//
// // verify
// VectorValues expected(vector<size_t>(7,2)); // expected solution
// Vector v = Vector2(0., 0.);
// for (int i=1; i<=7; i++)
// expected[ordering[X(i)]] = v;
// EXPECT(assert_equal(expected,actual));
//}
//
///* ************************************************************************* */
//TEST( GaussianJunctionTreeB, optimizeMultiFrontal2)
//{
// // create a graph
// example::Graph nlfg = createNonlinearFactorGraph();
// Values noisy = createNoisyValues();
// Ordering ordering; ordering += X(1),X(2),L(1);
// GaussianFactorGraph fg = *nlfg.linearize(noisy, ordering);
//
// // optimize the graph
// GaussianJunctionTree tree(fg);
// VectorValues actual = tree.optimize(&EliminateQR);
//
// // verify
// VectorValues expected = createCorrectDelta(ordering); // expected solution
// EXPECT(assert_equal(expected,actual));
//}
//
///* ************************************************************************* */
//TEST(GaussianJunctionTreeB, slamlike) {
// Values init;
// NonlinearFactorGraph newfactors;
// NonlinearFactorGraph fullgraph;
// SharedDiagonal odoNoise = noiseModel::Diagonal::Sigmas((Vector(3) << 0.1, 0.1, M_PI/100.0));
// SharedDiagonal brNoise = noiseModel::Diagonal::Sigmas((Vector(2) << M_PI/100.0, 0.1));
//
// size_t i = 0;
//
// newfactors = NonlinearFactorGraph();
// newfactors.add(PriorFactor<Pose2>(X(0), Pose2(0.0, 0.0, 0.0), odoNoise));
// init.insert(X(0), Pose2(0.01, 0.01, 0.01));
// fullgraph.push_back(newfactors);
//
// for( ; i<5; ++i) {
// newfactors = NonlinearFactorGraph();
// newfactors.add(BetweenFactor<Pose2>(X(i), X(i+1), Pose2(1.0, 0.0, 0.0), odoNoise));
// init.insert(X(i+1), Pose2(double(i+1)+0.1, -0.1, 0.01));
// fullgraph.push_back(newfactors);
// }
//
// newfactors = NonlinearFactorGraph();
// newfactors.add(BetweenFactor<Pose2>(X(i), X(i+1), Pose2(1.0, 0.0, 0.0), odoNoise));
// newfactors.add(BearingRangeFactor<Pose2,Point2>(X(i), L(0), Rot2::fromAngle(M_PI/4.0), 5.0, brNoise));
// newfactors.add(BearingRangeFactor<Pose2,Point2>(X(i), L(1), Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise));
// init.insert(X(i+1), Pose2(1.01, 0.01, 0.01));
// init.insert(L(0), Point2(5.0/sqrt(2.0), 5.0/sqrt(2.0)));
// init.insert(L(1), Point2(5.0/sqrt(2.0), -5.0/sqrt(2.0)));
// fullgraph.push_back(newfactors);
// ++ i;
//
// for( ; i<5; ++i) {
// newfactors = NonlinearFactorGraph();
// newfactors.add(BetweenFactor<Pose2>(X(i), X(i+1), Pose2(1.0, 0.0, 0.0), odoNoise));
// init.insert(X(i+1), Pose2(double(i+1)+0.1, -0.1, 0.01));
// fullgraph.push_back(newfactors);
// }
//
// newfactors = NonlinearFactorGraph();
// newfactors.add(BetweenFactor<Pose2>(X(i), X(i+1), Pose2(1.0, 0.0, 0.0), odoNoise));
// newfactors.add(BearingRangeFactor<Pose2,Point2>(X(i), L(0), Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
// newfactors.add(BearingRangeFactor<Pose2,Point2>(X(i), L(1), Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
// init.insert(X(i+1), Pose2(6.9, 0.1, 0.01));
// fullgraph.push_back(newfactors);
// ++ i;
//
// // Compare solutions
// Ordering ordering = *fullgraph.orderingCOLAMD(init);
// GaussianFactorGraph linearized = *fullgraph.linearize(init, ordering);
//
// GaussianJunctionTree gjt(linearized);
// VectorValues deltaactual = gjt.optimize(&EliminateQR);
// Values actual = init.retract(deltaactual, ordering);
//
// GaussianBayesNet gbn = *GaussianSequentialSolver(linearized).eliminate();
// VectorValues delta = optimize(gbn);
// Values expected = init.retract(delta, ordering);
//
// EXPECT(assert_equal(expected, actual));
//}
//
///* ************************************************************************* */
//TEST(GaussianJunctionTreeB, simpleMarginal) {
//
// typedef BayesTree<GaussianConditional> GaussianBayesTree;
//
// // Create a simple graph
// NonlinearFactorGraph fg;
// fg.add(PriorFactor<Pose2>(X(0), Pose2(), noiseModel::Isotropic::Sigma(3, 10.0)));
// fg.add(BetweenFactor<Pose2>(X(0), X(1), Pose2(1.0, 0.0, 0.0), noiseModel::Diagonal::Sigmas(Vector3(10.0, 1.0, 1.0))));
//
// Values init;
// init.insert(X(0), Pose2());
// init.insert(X(1), Pose2(1.0, 0.0, 0.0));
//
// Ordering ordering;
// ordering += X(1), X(0);
//
// GaussianFactorGraph gfg = *fg.linearize(init, ordering);
//
// // Compute marginals with both sequential and multifrontal
// Matrix expected = GaussianSequentialSolver(gfg).marginalCovariance(1);
//
// Matrix actual1 = GaussianMultifrontalSolver(gfg).marginalCovariance(1);
//
// // Compute marginal directly from marginal factor
// GaussianFactor::shared_ptr marginalFactor = GaussianMultifrontalSolver(gfg).marginalFactor(1);
// JacobianFactor::shared_ptr marginalJacobian = boost::dynamic_pointer_cast<JacobianFactor>(marginalFactor);
// Matrix actual2 = inverse(marginalJacobian->getA(marginalJacobian->begin()).transpose() * marginalJacobian->getA(marginalJacobian->begin()));
//
// // Compute marginal directly from BayesTree
// GaussianBayesTree gbt;
// gbt.insert(GaussianJunctionTree(gfg).eliminate(EliminateCholesky));
// marginalFactor = gbt.marginalFactor(1, EliminateCholesky);
// marginalJacobian = boost::dynamic_pointer_cast<JacobianFactor>(marginalFactor);
// Matrix actual3 = inverse(marginalJacobian->getA(marginalJacobian->begin()).transpose() * marginalJacobian->getA(marginalJacobian->begin()));
//
// EXPECT(assert_equal(expected, actual1));
// EXPECT(assert_equal(expected, actual2));
// EXPECT(assert_equal(expected, actual3));
//}
/* ************************************************************************* */
TEST( GaussianJunctionTreeB, optimizeMultiFrontal )
{
// create a graph
GaussianFactorGraph fg;
Ordering ordering;
boost::tie(fg,ordering) = createSmoother(7);
// optimize the graph
GaussianJunctionTree tree(fg);
VectorValues actual = tree.optimize(&EliminateQR);
// verify
VectorValues expected(vector<size_t>(7,2)); // expected solution
Vector v = Vector2(0., 0.);
for (int i=1; i<=7; i++)
expected[ordering[X(i)]] = v;
EXPECT(assert_equal(expected,actual));
int main() {
TestResult tr;
return TestRegistry::runAllTests(tr);
}
/* ************************************************************************* */
TEST( GaussianJunctionTreeB, optimizeMultiFrontal2)
{
// create a graph
example::Graph nlfg = createNonlinearFactorGraph();
Values noisy = createNoisyValues();
Ordering ordering; ordering += X(1),X(2),L(1);
GaussianFactorGraph fg = *nlfg.linearize(noisy, ordering);
// optimize the graph
GaussianJunctionTree tree(fg);
VectorValues actual = tree.optimize(&EliminateQR);
// verify
VectorValues expected = createCorrectDelta(ordering); // expected solution
EXPECT(assert_equal(expected,actual));
}
/* ************************************************************************* */
TEST(GaussianJunctionTreeB, slamlike) {
Values init;
NonlinearFactorGraph newfactors;
NonlinearFactorGraph fullgraph;
SharedDiagonal odoNoise = noiseModel::Diagonal::Sigmas((Vector(3) << 0.1, 0.1, M_PI/100.0));
SharedDiagonal brNoise = noiseModel::Diagonal::Sigmas((Vector(2) << M_PI/100.0, 0.1));
size_t i = 0;
newfactors = NonlinearFactorGraph();
newfactors.add(PriorFactor<Pose2>(X(0), Pose2(0.0, 0.0, 0.0), odoNoise));
init.insert(X(0), Pose2(0.01, 0.01, 0.01));
fullgraph.push_back(newfactors);
for( ; i<5; ++i) {
newfactors = NonlinearFactorGraph();
newfactors.add(BetweenFactor<Pose2>(X(i), X(i+1), Pose2(1.0, 0.0, 0.0), odoNoise));
init.insert(X(i+1), Pose2(double(i+1)+0.1, -0.1, 0.01));
fullgraph.push_back(newfactors);
}
newfactors = NonlinearFactorGraph();
newfactors.add(BetweenFactor<Pose2>(X(i), X(i+1), Pose2(1.0, 0.0, 0.0), odoNoise));
newfactors.add(BearingRangeFactor<Pose2,Point2>(X(i), L(0), Rot2::fromAngle(M_PI/4.0), 5.0, brNoise));
newfactors.add(BearingRangeFactor<Pose2,Point2>(X(i), L(1), Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise));
init.insert(X(i+1), Pose2(1.01, 0.01, 0.01));
init.insert(L(0), Point2(5.0/sqrt(2.0), 5.0/sqrt(2.0)));
init.insert(L(1), Point2(5.0/sqrt(2.0), -5.0/sqrt(2.0)));
fullgraph.push_back(newfactors);
++ i;
for( ; i<5; ++i) {
newfactors = NonlinearFactorGraph();
newfactors.add(BetweenFactor<Pose2>(X(i), X(i+1), Pose2(1.0, 0.0, 0.0), odoNoise));
init.insert(X(i+1), Pose2(double(i+1)+0.1, -0.1, 0.01));
fullgraph.push_back(newfactors);
}
newfactors = NonlinearFactorGraph();
newfactors.add(BetweenFactor<Pose2>(X(i), X(i+1), Pose2(1.0, 0.0, 0.0), odoNoise));
newfactors.add(BearingRangeFactor<Pose2,Point2>(X(i), L(0), Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
newfactors.add(BearingRangeFactor<Pose2,Point2>(X(i), L(1), Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
init.insert(X(i+1), Pose2(6.9, 0.1, 0.01));
fullgraph.push_back(newfactors);
++ i;
// Compare solutions
Ordering ordering = *fullgraph.orderingCOLAMD(init);
GaussianFactorGraph linearized = *fullgraph.linearize(init, ordering);
GaussianJunctionTree gjt(linearized);
VectorValues deltaactual = gjt.optimize(&EliminateQR);
Values actual = init.retract(deltaactual, ordering);
GaussianBayesNet gbn = *GaussianSequentialSolver(linearized).eliminate();
VectorValues delta = optimize(gbn);
Values expected = init.retract(delta, ordering);
EXPECT(assert_equal(expected, actual));
}
/* ************************************************************************* */
TEST(GaussianJunctionTreeB, simpleMarginal) {
typedef BayesTree<GaussianConditional> GaussianBayesTree;
// Create a simple graph
NonlinearFactorGraph fg;
fg.add(PriorFactor<Pose2>(X(0), Pose2(), noiseModel::Isotropic::Sigma(3, 10.0)));
fg.add(BetweenFactor<Pose2>(X(0), X(1), Pose2(1.0, 0.0, 0.0), noiseModel::Diagonal::Sigmas(Vector3(10.0, 1.0, 1.0))));
Values init;
init.insert(X(0), Pose2());
init.insert(X(1), Pose2(1.0, 0.0, 0.0));
Ordering ordering;
ordering += X(1), X(0);
GaussianFactorGraph gfg = *fg.linearize(init, ordering);
// Compute marginals with both sequential and multifrontal
Matrix expected = GaussianSequentialSolver(gfg).marginalCovariance(1);
Matrix actual1 = GaussianMultifrontalSolver(gfg).marginalCovariance(1);
// Compute marginal directly from marginal factor
GaussianFactor::shared_ptr marginalFactor = GaussianMultifrontalSolver(gfg).marginalFactor(1);
JacobianFactor::shared_ptr marginalJacobian = boost::dynamic_pointer_cast<JacobianFactor>(marginalFactor);
Matrix actual2 = inverse(marginalJacobian->getA(marginalJacobian->begin()).transpose() * marginalJacobian->getA(marginalJacobian->begin()));
// Compute marginal directly from BayesTree
GaussianBayesTree gbt;
gbt.insert(GaussianJunctionTree(gfg).eliminate(EliminateCholesky));
marginalFactor = gbt.marginalFactor(1, EliminateCholesky);
marginalJacobian = boost::dynamic_pointer_cast<JacobianFactor>(marginalFactor);
Matrix actual3 = inverse(marginalJacobian->getA(marginalJacobian->begin()).transpose() * marginalJacobian->getA(marginalJacobian->begin()));
EXPECT(assert_equal(expected, actual1));
EXPECT(assert_equal(expected, actual2));
EXPECT(assert_equal(expected, actual3));
}
#endif
/* ************************************************************************* */
int main() { TestResult tr; return TestRegistry::runAllTests(tr);}
/* ************************************************************************* */

View File

@ -187,7 +187,9 @@ TEST( NonlinearOptimizer, Factorization )
ordering.push_back(X(1));
ordering.push_back(X(2));
LevenbergMarquardtOptimizer optimizer(graph, config, ordering);
LevenbergMarquardtParams params;
LevenbergMarquardtParams::SetLegacyDefaults(&params);
LevenbergMarquardtOptimizer optimizer(graph, config, ordering, params);
optimizer.iterate();
Values expected;
@ -260,13 +262,13 @@ TEST_UNSAFE(NonlinearOptimizer, MoreOptimization) {
expectedGradient.insert(2,zero(3));
// Try LM and Dogleg
LevenbergMarquardtParams params;
// params.setVerbosityLM("TRYDELTA");
// params.setVerbosity("TERMINATION");
params.setlambdaUpperBound(1e9);
// params.setRelativeErrorTol(0);
// params.setAbsoluteErrorTol(0);
//params.setlambdaInitial(10);
LevenbergMarquardtParams params = LevenbergMarquardtParams::LegacyDefaults();
// params.setVerbosityLM("TRYDELTA");
// params.setVerbosity("TERMINATION");
params.lambdaUpperBound = 1e9;
// params.relativeErrorTol = 0;
// params.absoluteErrorTol = 0;
//params.lambdaInitial = 10;
{
LevenbergMarquardtOptimizer optimizer(fg, init, params);
@ -290,7 +292,7 @@ TEST_UNSAFE(NonlinearOptimizer, MoreOptimization) {
initBetter.insert(2, Pose2(11,7,M_PI/2));
{
params.setDiagonalDamping(true);
params.diagonalDamping = true;
LevenbergMarquardtOptimizer optimizer(fg, initBetter, params);
// test the diagonal
@ -399,7 +401,7 @@ public:
/// Constructor
IterativeLM(const NonlinearFactorGraph& graph, const Values& initialValues,
const ConjugateGradientParameters &p,
const LevenbergMarquardtParams& params = LevenbergMarquardtParams()) :
const LevenbergMarquardtParams& params = LevenbergMarquardtParams::LegacyDefaults()) :
LevenbergMarquardtOptimizer(graph, initialValues, params), cgParams_(p) {
}
@ -446,8 +448,7 @@ TEST( NonlinearOptimizer, logfile )
// Levenberg-Marquardt
LevenbergMarquardtParams lmParams;
static const string filename("testNonlinearOptimizer.log");
lmParams.setLogFile(filename);
CHECK(lmParams.getLogFile()==filename);
lmParams.logFile = filename;
LevenbergMarquardtOptimizer(fg, c0, lmParams).optimize();
// stringstream expected,actual;

View File

@ -17,9 +17,6 @@
*/
#include <gtsam/slam/dataset.h>
#include <gtsam/slam/GeneralSFMFactor.h>
#include <gtsam/geometry/Cal3Bundler.h>
#include <gtsam/geometry/PinholeCamera.h>
#include <gtsam/geometry/Point3.h>
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
@ -37,50 +34,122 @@
using namespace std;
using namespace gtsam;
//#define TERNARY
#define USE_GTSAM_FACTOR
#ifdef USE_GTSAM_FACTOR
#include <gtsam/slam/GeneralSFMFactor.h>
#include <gtsam/geometry/Cal3Bundler.h>
#include <gtsam/geometry/PinholeCamera.h>
typedef PinholeCamera<Cal3Bundler> Camera;
typedef GeneralSFMFactor<Camera, Point3> SfmFactor;
#else
#include <gtsam/3rdparty/ceres/example.h>
#include <gtsam/nonlinear/ExpressionFactor.h>
#include <gtsam/nonlinear/AdaptAutoDiff.h>
// Special version of Cal3Bundler so that default constructor = 0,0,0
struct CeresCalibration: public Cal3Bundler {
CeresCalibration(double f = 0, double k1 = 0, double k2 = 0, double u0 = 0,
double v0 = 0) :
Cal3Bundler(f, k1, k2, u0, v0) {
}
CeresCalibration(const Cal3Bundler& cal) :
Cal3Bundler(cal) {
}
CeresCalibration retract(const Vector& d) const {
return CeresCalibration(fx() + d(0), k1() + d(1), k2() + d(2), u0(), v0());
}
Vector3 localCoordinates(const CeresCalibration& T2) const {
return T2.vector() - vector();
}
};
namespace gtsam {
template<>
struct traits<CeresCalibration> : public internal::Manifold<CeresCalibration> {
};
}
// With that, camera below behaves like Snavely's 9-dim vector
typedef PinholeCamera<CeresCalibration> Camera;
#endif
int main(int argc, char* argv[]) {
typedef GeneralSFMFactor<PinholeCamera<Cal3Bundler>, Point3> sfmFactor;
using symbol_shorthand::P;
// Load BAL file (default is tiny)
string defaultFilename = findExampleDataFile("dubrovnik-3-7-pre");
SfM_data db;
bool success = readBAL(argc > 1 ? argv[1] : defaultFilename, db);
if (!success) throw runtime_error("Could not access file!");
if (!success)
throw runtime_error("Could not access file!");
#ifndef USE_GTSAM_FACTOR
AdaptAutoDiff<SnavelyProjection, Point2, Camera, Point3> snavely;
#endif
// Build graph
SharedNoiseModel unit2 = noiseModel::Unit::Create(2);
NonlinearFactorGraph graph;
for (size_t j = 0; j < db.number_tracks(); j++) {
BOOST_FOREACH (const SfM_Measurement& m, db.tracks[j].measurements)
graph.push_back(sfmFactor(m.second, unit2, m.first, P(j)));
BOOST_FOREACH (const SfM_Measurement& m, db.tracks[j].measurements) {
size_t i = m.first;
Point2 measurement = m.second;
#ifdef USE_GTSAM_FACTOR
graph.push_back(SfmFactor(measurement, unit2, i, P(j)));
#else
Expression<Camera> camera_(i);
Expression<Point3> point_(P(j));
graph.addExpressionFactor(unit2, measurement,
Expression<Point2>(snavely, camera_, point_));
#endif
}
}
Values initial = initialCamerasAndPointsEstimate(db);
Values initial;
size_t i = 0, j = 0;
BOOST_FOREACH(const SfM_Camera& camera, db.cameras) {
#ifdef USE_GTSAM_FACTOR
initial.insert((i++), camera);
#else
Camera ceresCamera(camera.pose(), camera.calibration());
initial.insert((i++), ceresCamera);
#endif
}
BOOST_FOREACH(const SfM_Track& track, db.tracks)
initial.insert(P(j++), track.p);
// Create Schur-complement ordering
// Check projection
Point2 expected = db.tracks.front().measurements.front().second;
Camera camera = initial.at<Camera>(0);
Point3 point = initial.at<Point3>(P(0));
#ifdef USE_GTSAM_FACTOR
Point2 actual = camera.project(point);
#else
Point2 actual = snavely(camera, point);
#endif
assert_equal(expected,actual,10);
// Create Schur-complement ordering
#ifdef CCOLAMD
vector<Key> pointKeys;
for (size_t j = 0; j < db.number_tracks(); j++) pointKeys.push_back(P(j));
Ordering ordering = Ordering::colamdConstrainedFirst(graph, pointKeys, true);
#else
Ordering ordering;
for (size_t j = 0; j < db.number_tracks(); j++) ordering.push_back(P(j));
for (size_t i = 0; i < db.number_cameras(); i++) ordering.push_back(i);
for (size_t j = 0; j < db.number_tracks(); j++)
ordering.push_back(P(j));
for (size_t i = 0; i < db.number_cameras(); i++)
ordering.push_back(i);
#endif
// Optimize
// Set parameters to be similar to ceres
LevenbergMarquardtParams params;
LevenbergMarquardtParams::SetCeresDefaults(&params);
params.setOrdering(ordering);
params.setVerbosity("ERROR");
params.setVerbosityLM("TRYLAMBDA");
params.setDiagonalDamping(true);
params.setlambdaInitial(1e-4);
params.setlambdaFactor(2.0);
LevenbergMarquardtOptimizer lm(graph, initial, params);
Values actual = lm.optimize();
Values result = lm.optimize();
tictoc_finishedIteration_();
tictoc_print_();