parent
acb37a0277
commit
574936bb5a
11
configure.ac
11
configure.ac
|
@ -78,6 +78,17 @@ AC_ARG_ENABLE([spqr],
|
|||
|
||||
AM_CONDITIONAL([USE_SPQR], [test x$spqr = xtrue])
|
||||
|
||||
# enable profiling
|
||||
AC_ARG_ENABLE([profiling],
|
||||
[ --enable-profiling Enable profiling],
|
||||
[case "${enableval}" in
|
||||
yes) profiling=true ;;
|
||||
no) profiling=false ;;
|
||||
*) AC_MSG_ERROR([bad value ${enableval} for --enable-profiling]) ;;
|
||||
esac],[profiling=false])
|
||||
|
||||
AM_CONDITIONAL([USE_PROFILING], [test x$profiling = xtrue])
|
||||
|
||||
# Checks for programs.
|
||||
AC_PROG_CXX
|
||||
AC_PROG_CC
|
||||
|
|
|
@ -313,74 +313,6 @@ Matrix GaussianFactor::matrix_augmented(const Ordering& ordering, bool weight) c
|
|||
return Ab;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
std::pair<Matrix, SharedDiagonal> GaussianFactor::combineFactorsAndCreateMatrix(
|
||||
const vector<GaussianFactor::shared_ptr>& factors,
|
||||
const Ordering& order, const Dimensions& dimensions) {
|
||||
// find the size of Ab
|
||||
size_t m = 0, n = 1;
|
||||
|
||||
// number of rows
|
||||
BOOST_FOREACH(GaussianFactor::shared_ptr factor, factors) {
|
||||
m += factor->numberOfRows();
|
||||
}
|
||||
|
||||
// find the number of columns
|
||||
BOOST_FOREACH(const Symbol& key, order) {
|
||||
n += dimensions.at(key);
|
||||
}
|
||||
|
||||
// Allocate the new matrix
|
||||
Matrix Ab = zeros(m,n);
|
||||
|
||||
// Allocate a sigmas vector to make into a full noisemodel
|
||||
Vector sigmas = ones(m);
|
||||
|
||||
// copy data over
|
||||
size_t cur_m = 0;
|
||||
bool constrained = false;
|
||||
bool unit = true;
|
||||
BOOST_FOREACH(GaussianFactor::shared_ptr factor, factors) {
|
||||
// loop through ordering
|
||||
size_t cur_n = 0;
|
||||
BOOST_FOREACH(const Symbol& key, order) {
|
||||
// copy over matrix if it exists
|
||||
if (factor->involves(key)) {
|
||||
insertSub(Ab, factor->get_A(key), cur_m, cur_n);
|
||||
}
|
||||
// move onto next element
|
||||
cur_n += dimensions.at(key);
|
||||
}
|
||||
// copy over the RHS
|
||||
insertColumn(Ab, factor->get_b(), cur_m, n-1);
|
||||
|
||||
// check if the model is unit already
|
||||
if (!boost::shared_dynamic_cast<noiseModel::Unit>(factor->get_model())) {
|
||||
unit = false;
|
||||
const Vector& subsigmas = factor->get_model()->sigmas();
|
||||
subInsert(sigmas, subsigmas, cur_m);
|
||||
|
||||
// check for constraint
|
||||
if (boost::shared_dynamic_cast<noiseModel::Constrained>(factor->get_model()))
|
||||
constrained = true;
|
||||
}
|
||||
|
||||
// move to next row
|
||||
cur_m += factor->numberOfRows();
|
||||
}
|
||||
|
||||
// combine the noisemodels
|
||||
SharedDiagonal model;
|
||||
if (unit) {
|
||||
model = noiseModel::Unit::Create(m);
|
||||
} else if (constrained) {
|
||||
model = noiseModel::Constrained::MixedSigmas(sigmas);
|
||||
} else {
|
||||
model = noiseModel::Diagonal::Sigmas(sigmas);
|
||||
}
|
||||
return make_pair(Ab, model);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
boost::tuple<list<int>, list<int>, list<double> >
|
||||
GaussianFactor::sparse(const Dimensions& columnIndices) const {
|
||||
|
|
|
@ -259,17 +259,6 @@ public:
|
|||
*/
|
||||
void append_factor(GaussianFactor::shared_ptr f, size_t m, size_t pos);
|
||||
|
||||
/**
|
||||
* Returns the augmented matrix version of a set of factors
|
||||
* with the corresponding noiseModel
|
||||
* @param factors is the set of factors to combine
|
||||
* @param ordering of variables needed for matrix column order
|
||||
* @return the augmented matrix and a noise model
|
||||
*/
|
||||
static std::pair<Matrix, SharedDiagonal> combineFactorsAndCreateMatrix(
|
||||
const std::vector<GaussianFactor::shared_ptr>& factors,
|
||||
const Ordering& order, const Dimensions& dimensions);
|
||||
|
||||
}; // GaussianFactor
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -29,6 +29,8 @@ using namespace boost::assign;
|
|||
// trick from some reading group
|
||||
#define FOREACH_PAIR( KEY, VAL, COL) BOOST_FOREACH (boost::tie(KEY,VAL),COL)
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
// Explicitly instantiate so we don't have to include everywhere
|
||||
template class FactorGraph<GaussianFactor>;
|
||||
|
||||
|
@ -134,6 +136,75 @@ GaussianFactorGraph::eliminateOne(const Symbol& key, bool old) {
|
|||
#endif
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class Factors>
|
||||
std::pair<Matrix, SharedDiagonal> combineFactorsAndCreateMatrix(
|
||||
const Factors& factors,
|
||||
const Ordering& order, const Dimensions& dimensions) {
|
||||
// find the size of Ab
|
||||
size_t m = 0, n = 1;
|
||||
|
||||
// number of rows
|
||||
BOOST_FOREACH(GaussianFactor::shared_ptr factor, factors) {
|
||||
m += factor->numberOfRows();
|
||||
}
|
||||
|
||||
// find the number of columns
|
||||
BOOST_FOREACH(const Symbol& key, order) {
|
||||
n += dimensions.at(key);
|
||||
}
|
||||
|
||||
// Allocate the new matrix
|
||||
Matrix Ab = zeros(m,n);
|
||||
|
||||
// Allocate a sigmas vector to make into a full noisemodel
|
||||
Vector sigmas = ones(m);
|
||||
|
||||
// copy data over
|
||||
size_t cur_m = 0;
|
||||
bool constrained = false;
|
||||
bool unit = true;
|
||||
BOOST_FOREACH(GaussianFactor::shared_ptr factor, factors) {
|
||||
// loop through ordering
|
||||
size_t cur_n = 0;
|
||||
BOOST_FOREACH(const Symbol& key, order) {
|
||||
// copy over matrix if it exists
|
||||
if (factor->involves(key)) {
|
||||
insertSub(Ab, factor->get_A(key), cur_m, cur_n);
|
||||
}
|
||||
// move onto next element
|
||||
cur_n += dimensions.at(key);
|
||||
}
|
||||
// copy over the RHS
|
||||
insertColumn(Ab, factor->get_b(), cur_m, n-1);
|
||||
|
||||
// check if the model is unit already
|
||||
if (!boost::shared_dynamic_cast<noiseModel::Unit>(factor->get_model())) {
|
||||
unit = false;
|
||||
const Vector& subsigmas = factor->get_model()->sigmas();
|
||||
subInsert(sigmas, subsigmas, cur_m);
|
||||
|
||||
// check for constraint
|
||||
if (boost::shared_dynamic_cast<noiseModel::Constrained>(factor->get_model()))
|
||||
constrained = true;
|
||||
}
|
||||
|
||||
// move to next row
|
||||
cur_m += factor->numberOfRows();
|
||||
}
|
||||
|
||||
// combine the noisemodels
|
||||
SharedDiagonal model;
|
||||
if (unit) {
|
||||
model = noiseModel::Unit::Create(m);
|
||||
} else if (constrained) {
|
||||
model = noiseModel::Constrained::MixedSigmas(sigmas);
|
||||
} else {
|
||||
model = noiseModel::Diagonal::Sigmas(sigmas);
|
||||
}
|
||||
return make_pair(Ab, model);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianConditional::shared_ptr
|
||||
GaussianFactorGraph::eliminateOneMatrixJoin(const Symbol& key) {
|
||||
|
@ -159,8 +230,7 @@ GaussianFactorGraph::eliminateOneMatrixJoin(const Symbol& key) {
|
|||
// combine the factors to get a noisemodel and a combined matrix
|
||||
Matrix Ab; SharedDiagonal model;
|
||||
|
||||
boost::tie(Ab, model) =
|
||||
GaussianFactor::combineFactorsAndCreateMatrix(factors,render,dimensions);
|
||||
boost::tie(Ab, model) = combineFactorsAndCreateMatrix(factors,render,dimensions);
|
||||
|
||||
// eliminate that joint factor
|
||||
GaussianFactor::shared_ptr factor;
|
||||
|
@ -187,6 +257,25 @@ GaussianFactorGraph::eliminate(const Ordering& ordering, bool old)
|
|||
return chordalBayesNet;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianBayesNet
|
||||
GaussianFactorGraph::eliminateFrontals(const Ordering& frontals)
|
||||
{
|
||||
Matrix Ab; SharedDiagonal model;
|
||||
Dimensions dimensions = this->dimensions();
|
||||
boost::tie(Ab, model) = combineFactorsAndCreateMatrix(*this, keys(), dimensions);
|
||||
|
||||
// eliminate that joint factor
|
||||
GaussianFactor::shared_ptr factor;
|
||||
// GaussianConditional::shared_ptr conditional;
|
||||
GaussianBayesNet bn;
|
||||
// boost::tie(bn, factor) =
|
||||
// GaussianFactor::eliminateMatrix(Ab, model, frontals, dimensions);
|
||||
|
||||
return bn;
|
||||
}
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorConfig GaussianFactorGraph::optimize(const Ordering& ordering, bool old)
|
||||
{
|
||||
|
@ -436,3 +525,11 @@ boost::shared_ptr<VectorConfig> GaussianFactorGraph::conjugateGradientDescent_(
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
||||
template std::pair<Matrix, SharedDiagonal> combineFactorsAndCreateMatrix<vector<GaussianFactor::shared_ptr> >(
|
||||
const vector<GaussianFactor::shared_ptr>& factors, const Ordering& order, const Dimensions& dimensions);
|
||||
|
||||
template std::pair<Matrix, SharedDiagonal> combineFactorsAndCreateMatrix<GaussianFactorGraph>(
|
||||
const GaussianFactorGraph& factors, const Ordering& order, const Dimensions& dimensions);
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
@ -141,6 +141,11 @@ namespace gtsam {
|
|||
*/
|
||||
GaussianBayesNet eliminate(const Ordering& ordering, bool enableJoinFactor = true);
|
||||
|
||||
/**
|
||||
* Eliminate multiple variables at once, mostly used to eliminate frontal variables
|
||||
*/
|
||||
GaussianBayesNet eliminateFrontals(const Ordering& frontals);
|
||||
|
||||
/**
|
||||
* optimize a linear factor graph
|
||||
* @param ordering fg in order
|
||||
|
@ -261,4 +266,17 @@ namespace gtsam {
|
|||
const VectorConfig& x0, bool verbose = false, double epsilon = 1e-3,
|
||||
size_t maxIterations = 0) const;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the augmented matrix version of a set of factors
|
||||
* with the corresponding noiseModel
|
||||
* @param factors is the set of factors to combine
|
||||
* @param ordering of variables needed for matrix column order
|
||||
* @return the augmented matrix and a noise model
|
||||
*/
|
||||
template <class Factors>
|
||||
std::pair<Matrix, SharedDiagonal> combineFactorsAndCreateMatrix(
|
||||
const Factors& factors,
|
||||
const Ordering& order, const Dimensions& dimensions);
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
@ -0,0 +1,115 @@
|
|||
/*
|
||||
* JunctionTree-inl.h
|
||||
*
|
||||
* Created on: Feb 4, 2010
|
||||
* Author: nikai
|
||||
* Description: the junction tree
|
||||
*/
|
||||
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/optional.hpp>
|
||||
#include "Pose2.h"
|
||||
#include "BayesTree-inl.h"
|
||||
#include "SymbolicFactorGraph.h"
|
||||
|
||||
#include "JunctionTree.h"
|
||||
|
||||
#define DEBUG(i) \
|
||||
if (verboseLevel>i) cout
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
using namespace std;
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* Linear JunctionTree
|
||||
*/
|
||||
template <class Conditional, class FG>
|
||||
void JunctionTree<Conditional, FG>::SubFG::printTree(const string& indent) const {
|
||||
print(indent);
|
||||
BOOST_FOREACH(const shared_ptr& child, children_)
|
||||
child->printTree(indent+" ");
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class Conditional, class FG>
|
||||
pair<FG, typename BayesTree<Conditional>::sharedClique>
|
||||
JunctionTree<Conditional, FG>::eliminateOneClique(sharedSubFG current, BayesTree<Conditional>& bayesTree) {
|
||||
FG fg; // factor graph will be assembled from local factors and marginalized children
|
||||
list<sharedClique> children;
|
||||
fg.push_back(*current); // add the local factor graph
|
||||
BOOST_FOREACH(sharedSubFG& child, current->children_) {
|
||||
// receive the factors from the child and its clique point
|
||||
FG fgChild; sharedClique cliqueChild;
|
||||
boost::tie(fgChild, cliqueChild) = eliminateOneClique(child, bayesTree);
|
||||
if (!cliqueChild.get()) throw runtime_error("eliminateOneClique: child clique is invalid!");
|
||||
|
||||
fg.push_back(fgChild);
|
||||
children.push_back(cliqueChild);
|
||||
}
|
||||
|
||||
// eliminate the combined factors
|
||||
// warning: fg is being eliminated in-place and will contain marginal afterwards
|
||||
// BayesNet<Conditional> bn = fg.eliminate(current->frontal_);
|
||||
BayesNet<Conditional> bn = fg.eliminateFrontals(current->frontal_);
|
||||
|
||||
// create a new clique corresponding the combined factors
|
||||
sharedClique new_clique = bayesTree.insert(bn, children);
|
||||
|
||||
return make_pair(fg, new_clique);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class Conditional, class FG>
|
||||
BayesTree<Conditional> JunctionTree<Conditional, FG>::eliminate() {
|
||||
BayesTree<Conditional> bayesTree;
|
||||
eliminateOneClique(rootFG_, bayesTree);
|
||||
return bayesTree;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class Conditional, class FG>
|
||||
void JunctionTree<Conditional, FG>::iterSubGraphsDFS(VisitorSubFG visitor, sharedSubFG current) {
|
||||
if (!current.get()) current = rootFG_;
|
||||
// iterateBFS<SubFG>(visitor, current);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class Conditional, class FG>
|
||||
void JunctionTree<Conditional, FG>::iterSubGraphsBFS(VisitorSubFG visitor) {
|
||||
// iterateDFS<SubFG>(visitor, rootFG_);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* Linear JunctionTree
|
||||
*/
|
||||
template <class Conditional, class FG>
|
||||
void LinearJunctionTree<Conditional, FG>::btreeBackSubstitue(typename BayesTree<Conditional>::sharedClique current, VectorConfig& config) {
|
||||
// solve the bayes net in the current node
|
||||
typename BayesNet<Conditional>::const_reverse_iterator it = current->rbegin();
|
||||
for (; it!=current->rend(); it++) {
|
||||
Vector x = (*it)->solve(config); // Solve for that variable
|
||||
config.insert((*it)->key(),x); // store result in partial solution
|
||||
}
|
||||
|
||||
// solve the bayes nets in the child nodes
|
||||
BOOST_FOREACH(sharedClique child, current->children_) {
|
||||
btreeBackSubstitue(child, config);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class Conditional, class FG>
|
||||
VectorConfig LinearJunctionTree<Conditional, FG>::optimize() {
|
||||
// eliminate from leaves to the root
|
||||
BayesTree<Conditional> bayesTree = JunctionTree<Conditional, FG>::eliminate();
|
||||
|
||||
VectorConfig result;
|
||||
btreeBackSubstitue(bayesTree.root(), result);
|
||||
return result;
|
||||
}
|
||||
|
||||
} //namespace gtsam
|
|
@ -0,0 +1,145 @@
|
|||
/*
|
||||
* JunctionTree.h
|
||||
*
|
||||
* Created on: Feb 4, 2010
|
||||
* Author: nikai
|
||||
* Description: The junction tree
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <set>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <boost/function.hpp>
|
||||
#include "GaussianConditional.h"
|
||||
#include "GaussianFactorGraph.h"
|
||||
#include "BayesTree.h"
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class Conditional, class FG>
|
||||
class JunctionTree/*: public BayesTree<Conditional>*/ {
|
||||
public:
|
||||
typedef typename BayesTree<Conditional>::sharedClique sharedClique;
|
||||
|
||||
// the threshold for the sizes of submaps. Smaller ones will be absorbed into the separator
|
||||
static const int const_minNodesPerMap_default = 10;
|
||||
static const int const_minNodesPerMap_ultra = 1;
|
||||
|
||||
// when to stop partitioning
|
||||
static const int const_numNodeStopPartition_default = 50;
|
||||
static const int const_numNodeStopPartition_ultra = 3; // so that A,B,C all have one variable
|
||||
|
||||
|
||||
// the class for subgraphs that also include the pointers to the parents and two children
|
||||
class SubFG : public FG {
|
||||
public:
|
||||
typedef typename boost::shared_ptr<SubFG> shared_ptr;
|
||||
shared_ptr parent_; // the parent subgraph node
|
||||
Ordering frontal_; // the frontal varaibles
|
||||
Unordered separator_; // the separator variables
|
||||
|
||||
friend class JunctionTree<Conditional, FG>;
|
||||
|
||||
public:
|
||||
std::vector<shared_ptr> children_; // the child cliques
|
||||
|
||||
// empty constructor
|
||||
SubFG() {}
|
||||
|
||||
// constructor with all the information
|
||||
SubFG(const FG& fgLocal, const Ordering& frontal, const Unordered& separator,
|
||||
const shared_ptr& parent)
|
||||
: frontal_(frontal), separator_(separator), FG(fgLocal), parent_(parent) {}
|
||||
|
||||
// constructor for an empty graph
|
||||
SubFG(const Ordering& frontal, const Unordered& separator, const shared_ptr& parent)
|
||||
: frontal_(frontal), separator_(separator), parent_(parent) {}
|
||||
|
||||
const Ordering& frontal() const { return frontal_;}
|
||||
const Unordered& separator() const { return separator_;}
|
||||
std::vector<shared_ptr>& children() { return children_; } // TODO:: add const
|
||||
|
||||
// add a child node
|
||||
void addChild(const shared_ptr& child) { children_.push_back(child); }
|
||||
|
||||
void printTree(const std::string& indent) const;
|
||||
};
|
||||
|
||||
// typedef for shared pointers to cliques
|
||||
typedef typename SubFG::shared_ptr sharedSubFG;
|
||||
typedef boost::function<void (sharedSubFG)> VisitorSubFG;
|
||||
|
||||
protected:
|
||||
// Root clique
|
||||
sharedSubFG rootFG_;
|
||||
|
||||
private:
|
||||
|
||||
// utility function called by eliminateBottomUp
|
||||
std::pair<FG, sharedClique> eliminateOneClique(sharedSubFG fg_, BayesTree<Conditional>& bayesTree);
|
||||
|
||||
public:
|
||||
|
||||
JunctionTree() : verboseLevel(0) {}
|
||||
|
||||
// return the root graph
|
||||
sharedSubFG rootFG() const { return rootFG_; }
|
||||
|
||||
// eliminate the factors in the subgraphs
|
||||
BayesTree<Conditional> eliminate();
|
||||
|
||||
// print the object
|
||||
void print(const std::string& str) const {
|
||||
if (rootFG_.get()) rootFG_->printTree("");
|
||||
}
|
||||
|
||||
// iterate over all the subgraphs from root to leaves in the DFS order, recursive
|
||||
void iterSubGraphsDFS(VisitorSubFG visitor, sharedSubFG current = sharedSubFG());
|
||||
|
||||
// iterate over all the subgraphs from root to leaves in the BFS order, non-recursive
|
||||
void iterSubGraphsBFS(VisitorSubFG visitor);
|
||||
|
||||
// the output level
|
||||
int verboseLevel;
|
||||
}; // JunctionTree
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* Linear JunctionTree which can do optimization
|
||||
*/
|
||||
template <class Conditional, class FG>
|
||||
class LinearJunctionTree: public JunctionTree<Conditional, FG> {
|
||||
public:
|
||||
typedef JunctionTree<Conditional, FG> Base;
|
||||
typedef typename BayesTree<Conditional>::sharedClique sharedClique;
|
||||
typedef typename JunctionTree<Conditional, FG>::sharedSubFG sharedSubFG;
|
||||
|
||||
protected:
|
||||
// back-substitute in topological sort order (parents first)
|
||||
void btreeBackSubstitue(typename BayesTree<Conditional>::sharedClique current, VectorConfig& config);
|
||||
|
||||
public :
|
||||
|
||||
LinearJunctionTree() : Base() {}
|
||||
|
||||
// constructor
|
||||
LinearJunctionTree(const FG& fg, const Ordering& ordering, int numNodeStopPartition = Base::const_numNodeStopPartition_default,
|
||||
int minNodesPerMap = Base::const_minNodesPerMap_default) :
|
||||
Base(fg, ordering, numNodeStopPartition, minNodesPerMap) {}
|
||||
|
||||
// optimize the linear graph
|
||||
VectorConfig optimize();
|
||||
}; // Linear JunctionTree
|
||||
|
||||
class SymbolicConditional;
|
||||
class SymbolicFactorGraph;
|
||||
|
||||
/**
|
||||
* recursive partitioning
|
||||
*/
|
||||
typedef JunctionTree<SymbolicConditional, SymbolicFactorGraph> SymbolicTSAM;
|
||||
typedef JunctionTree<GaussianConditional, GaussianFactorGraph> GaussianTSAM;
|
||||
|
||||
} // namespace gtsam
|
|
@ -69,6 +69,7 @@ testSymbolicBayesNet_LDADD = libgtsam.la
|
|||
headers += inference.h inference-inl.h
|
||||
headers += graph.h graph-inl.h
|
||||
headers += FactorGraph.h FactorGraph-inl.h
|
||||
headers += JunctionTree.h JunctionTree-inl.h
|
||||
headers += BayesNet.h BayesNet-inl.h
|
||||
headers += BayesTree.h BayesTree-inl.h
|
||||
headers += ISAM.h ISAM-inl.h GaussianISAM.h
|
||||
|
@ -290,6 +291,7 @@ timeVectorConfig_LDADD = libgtsam.la
|
|||
|
||||
# create both dynamic and static libraries
|
||||
AM_CXXFLAGS = -I$(boost) -fPIC
|
||||
AM_LDFLAGS =
|
||||
lib_LTLIBRARIES = libgtsam.la
|
||||
libgtsam_la_SOURCES = $(sources)
|
||||
libgtsam_la_CPPFLAGS = $(AM_CXXFLAGS)
|
||||
|
@ -300,11 +302,18 @@ if DEBUG
|
|||
AM_CXXFLAGS += -g
|
||||
endif
|
||||
|
||||
if USE_PROFILING
|
||||
AM_CXXFLAGS += -pg
|
||||
libgtsam_la_CPPFLAGS += -pg
|
||||
AM_LDFLAGS += -pg
|
||||
libgtsam_la_LDFLAGS += -pg
|
||||
endif
|
||||
|
||||
# install the header files
|
||||
include_HEADERS = $(headers)
|
||||
|
||||
AM_CXXFLAGS += -I..
|
||||
AM_LDFLAGS = -L../CppUnitLite -lCppUnitLite $(BOOST_LDFLAGS) $(boost_serialization)
|
||||
AM_LDFLAGS += -L../CppUnitLite -lCppUnitLite $(BOOST_LDFLAGS) $(boost_serialization)
|
||||
|
||||
# adding cblas implementation - split into a default linux version using the
|
||||
# autotools script, and a mac version that is hardcoded
|
||||
|
|
|
@ -45,6 +45,13 @@ namespace gtsam {
|
|||
return bayesNet;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
SymbolicBayesNet
|
||||
SymbolicFactorGraph::eliminateFrontals(const Ordering& ordering)
|
||||
{
|
||||
return eliminate(ordering);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void saveGraph(const SymbolicFactorGraph& fg, const SymbolicConfig& config, const std::string& s) {
|
||||
|
||||
|
|
|
@ -81,6 +81,10 @@ namespace gtsam {
|
|||
*/
|
||||
SymbolicBayesNet eliminate(const Ordering& ordering);
|
||||
|
||||
/**
|
||||
* Same as eliminate in the SymbolicFactorGraph case
|
||||
*/
|
||||
SymbolicBayesNet eliminateFrontals(const Ordering& ordering);
|
||||
};
|
||||
|
||||
// save graph to the graphviz format
|
||||
|
|
|
@ -761,35 +761,6 @@ TEST ( GaussianFactor, constraint_eliminate2 )
|
|||
GaussianConditional expectedCG("x", d, R, "y", S, zero(2));
|
||||
CHECK(assert_equal(expectedCG, *actualCG, 1e-4));
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
TEST ( GaussianFactor, combine_matrix ) {
|
||||
// create a small linear factor graph
|
||||
GaussianFactorGraph fg = createGaussianFactorGraph();
|
||||
Dimensions dimensions = fg.dimensions();
|
||||
|
||||
// get two factors from it and insert the factors into a vector
|
||||
vector<GaussianFactor::shared_ptr> lfg;
|
||||
lfg.push_back(fg[4 - 1]);
|
||||
lfg.push_back(fg[2 - 1]);
|
||||
|
||||
// combine in a factor
|
||||
Matrix Ab; SharedDiagonal noise;
|
||||
Ordering order; order += "x2", "l1", "x1";
|
||||
boost::tie(Ab, noise) = GaussianFactor::combineFactorsAndCreateMatrix(lfg, order, dimensions);
|
||||
|
||||
// the expected augmented matrix
|
||||
Matrix expAb = Matrix_(4, 7,
|
||||
-5., 0., 5., 0., 0., 0.,-1.0,
|
||||
+0., -5., 0., 5., 0., 0., 1.5,
|
||||
10., 0., 0., 0.,-10., 0., 2.0,
|
||||
+0., 10., 0., 0., 0.,-10.,-1.0);
|
||||
|
||||
// expected noise model
|
||||
SharedDiagonal expModel = noiseModel::Unit::Create(4);
|
||||
|
||||
CHECK(assert_equal(expAb, Ab));
|
||||
CHECK(assert_equal(*expModel, *noise));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST ( GaussianFactor, exploding_MAST_factor ) {
|
||||
|
|
|
@ -24,8 +24,11 @@ using namespace boost::assign;
|
|||
#include "smallExample.h"
|
||||
#include "GaussianBayesNet.h"
|
||||
#include "numericalDerivative.h"
|
||||
#include "SymbolicFactorGraph.h"
|
||||
#include "BayesTree.h"
|
||||
#include "inference-inl.h" // needed for eliminate and marginals
|
||||
|
||||
|
||||
using namespace gtsam;
|
||||
using namespace example;
|
||||
|
||||
|
@ -472,6 +475,38 @@ TEST( GaussianFactorGraph, optimize )
|
|||
CHECK(assert_equal(expected,actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* *
|
||||
Bayes tree for smoother with "nested dissection" ordering:
|
||||
C1 x5 x6 x4
|
||||
C2 x3 x2 : x4
|
||||
C3 x1 : x2
|
||||
C4 x7 : x6
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( GaussianFactorGraph, optimizeMultiFrontal )
|
||||
{
|
||||
// create a graph
|
||||
GaussianFactorGraph fg = createSmoother(7);
|
||||
|
||||
// create an ordering
|
||||
Ordering ordering;
|
||||
ordering += "x1","x3","x5","x7","x2","x6","x4";
|
||||
|
||||
// Symbolic factorization
|
||||
// GaussianFactorGraph -> SymbolicFactorGraph -> SymbolicBayesNet -> SymbolicBayesTree
|
||||
SymbolicFactorGraph sfg(fg);
|
||||
SymbolicBayesNet sbn = sfg.eliminate(ordering);
|
||||
BayesTree<SymbolicConditional> sbt(sbn);
|
||||
|
||||
// // optimize the graph
|
||||
// VectorConfig actual = fg.optimizeMultiFrontal(sbt);
|
||||
//
|
||||
// // verify
|
||||
// VectorConfig expected = createCorrectDelta();
|
||||
//
|
||||
// CHECK(assert_equal(expected,actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( GaussianFactorGraph, combine)
|
||||
{
|
||||
|
@ -899,6 +934,36 @@ TEST(GaussianFactorGraph, replace)
|
|||
CHECK(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST ( GaussianFactorGraph, combine_matrix ) {
|
||||
// create a small linear factor graph
|
||||
GaussianFactorGraph fg = createGaussianFactorGraph();
|
||||
Dimensions dimensions = fg.dimensions();
|
||||
|
||||
// get two factors from it and insert the factors into a vector
|
||||
vector<GaussianFactor::shared_ptr> lfg;
|
||||
lfg.push_back(fg[4 - 1]);
|
||||
lfg.push_back(fg[2 - 1]);
|
||||
|
||||
// combine in a factor
|
||||
Matrix Ab; SharedDiagonal noise;
|
||||
Ordering order; order += "x2", "l1", "x1";
|
||||
boost::tie(Ab, noise) = combineFactorsAndCreateMatrix(lfg, order, dimensions);
|
||||
|
||||
// the expected augmented matrix
|
||||
Matrix expAb = Matrix_(4, 7,
|
||||
-5., 0., 5., 0., 0., 0.,-1.0,
|
||||
+0., -5., 0., 5., 0., 0., 1.5,
|
||||
10., 0., 0., 0.,-10., 0., 2.0,
|
||||
+0., 10., 0., 0., 0.,-10.,-1.0);
|
||||
|
||||
// expected noise model
|
||||
SharedDiagonal expModel = noiseModel::Unit::Create(4);
|
||||
|
||||
CHECK(assert_equal(expAb, Ab));
|
||||
CHECK(assert_equal(*expModel, *noise));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() { TestResult tr; return TestRegistry::runAllTests(tr);}
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -36,7 +36,7 @@ const double tol = 1e-5;
|
|||
typedef NonlinearOptimizer<example::Graph,example::Config> Optimizer;
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( NonlinearOptimizer, delta )
|
||||
TEST( NonlinearOptimizer, linearizeAndOptimizeForDelta )
|
||||
{
|
||||
shared_ptr<example::Graph> fg(new example::Graph(
|
||||
example::createNonlinearFactorGraph()));
|
||||
|
@ -236,6 +236,31 @@ TEST( NonlinearOptimizer, SubgraphSolver )
|
|||
CHECK(assert_equal(expected, *optimized.config(), 1e-5));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
//TEST( NonlinearOptimizer, MultiFrontalSolver )
|
||||
//{
|
||||
// shared_ptr<example::Graph> fg(new example::Graph(
|
||||
// example::createNonlinearFactorGraph()));
|
||||
// Optimizer::shared_config initial = example::sharedNoisyConfig();
|
||||
//
|
||||
// Config expected;
|
||||
// expected.insert(simulated2D::PoseKey(1), Point2(0.0, 0.0));
|
||||
// expected.insert(simulated2D::PoseKey(2), Point2(1.5, 0.0));
|
||||
// expected.insert(simulated2D::PointKey(1), Point2(0.0, -1.0));
|
||||
//
|
||||
// Optimizer::shared_solver solver;
|
||||
//
|
||||
// // Check one ordering
|
||||
// shared_ptr<Ordering> ord1(new Ordering());
|
||||
// *ord1 += "x2","l1","x1";
|
||||
// solver = Optimizer::shared_solver(new Optimizer::solver(ord1));
|
||||
// Optimizer optimizer1(fg, initial, solver);
|
||||
//
|
||||
// Config actual = optimizer1.levenbergMarquardt();
|
||||
// CHECK(assert_equal(actual,expected));
|
||||
//}
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() {
|
||||
TestResult tr;
|
||||
|
|
Loading…
Reference in New Issue