linking to SparseQR as a shared library results in a performance hit. The proper way is to link the static library.
add prepareLinear to NonlinearOptimizer so that some computation can be cached and does not happen every time.release/4.3a0
parent
c5f716d03d
commit
9ddeaa91c6
|
@ -45,37 +45,13 @@ namespace gtsam {
|
||||||
boost::shared_ptr<GaussianFactorGraph> linearize(const NonlinearGraph& g, const Config& config) const {
|
boost::shared_ptr<GaussianFactorGraph> linearize(const NonlinearGraph& g, const Config& config) const {
|
||||||
return g.linearize(config);
|
return g.linearize(config);
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
#ifdef USE_SPQR
|
|
||||||
/**
|
|
||||||
* A linear system solver using factorization
|
|
||||||
*/
|
|
||||||
template <class NonlinearGraph, class Config>
|
|
||||||
class FactorizationSPQR {
|
|
||||||
private:
|
|
||||||
boost::shared_ptr<const Ordering> ordering_;
|
|
||||||
|
|
||||||
public:
|
|
||||||
FactorizationSPQR(boost::shared_ptr<const Ordering> ordering, bool old=true)
|
|
||||||
: ordering_(ordering) {
|
|
||||||
if (!ordering) throw std::invalid_argument("FactorizationSPQR constructor: ordering = NULL");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* solve for the optimal displacement in the tangent space, and then solve
|
* Does not do anything here in Factorization.
|
||||||
* the resulted linear system
|
|
||||||
*/
|
*/
|
||||||
VectorConfig optimize(GaussianFactorGraph& fg) const {
|
boost::shared_ptr<Factorization> prepareLinear(const GaussianFactorGraph& fg) const {
|
||||||
return fg.optimizeSPQR(*ordering_);
|
return boost::shared_ptr<Factorization>(new Factorization(*this));
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* linearize the non-linear graph around the current config
|
|
||||||
*/
|
|
||||||
boost::shared_ptr<GaussianFactorGraph> linearize(const NonlinearGraph& g, const Config& config) const {
|
|
||||||
return g.linearize(config);
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,10 @@
|
||||||
#include "inference-inl.h"
|
#include "inference-inl.h"
|
||||||
#include "iterative.h"
|
#include "iterative.h"
|
||||||
|
|
||||||
|
#ifdef USE_SPQR
|
||||||
|
#include <spqr.hpp>;
|
||||||
|
#endif
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using namespace gtsam;
|
using namespace gtsam;
|
||||||
using namespace boost::assign;
|
using namespace boost::assign;
|
||||||
|
@ -439,134 +443,4 @@ boost::shared_ptr<VectorConfig> GaussianFactorGraph::conjugateGradientDescent_(
|
||||||
maxIterations)));
|
maxIterations)));
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef USE_SPQR
|
|
||||||
/* ************************************************************************* */
|
|
||||||
cholmod_sparse* GaussianFactorGraph::cholmodSparse(const Ordering& ordering, vector<size_t>& orderedDimensions,
|
|
||||||
cholmod_common *cc) const {
|
|
||||||
Dimensions colIndices;
|
|
||||||
size_t numCols;
|
|
||||||
boost::tie(colIndices, numCols) = columnIndices_(ordering);
|
|
||||||
|
|
||||||
// sort the data from row ordering to the column ordering
|
|
||||||
typedef pair<int, double> RowValue; // the pair of row index and non-zero value
|
|
||||||
int row_start = 0, column_start = 0;
|
|
||||||
size_t nnz = 0;
|
|
||||||
vector<vector<RowValue> > ivs_vector;
|
|
||||||
ivs_vector.resize(numCols);
|
|
||||||
Vector sigmas;
|
|
||||||
SymbolMap<Matrix>::const_iterator jA;
|
|
||||||
BOOST_FOREACH(const sharedFactor& factor,factors_) { // iterate over all the factors
|
|
||||||
for(jA = factor->begin(); jA!=factor->end(); jA++) { // iterate over all matrices in the factor
|
|
||||||
column_start = colIndices.at(jA->first) - 1; // find the first column index for this key
|
|
||||||
sigmas = factor->get_sigmas();
|
|
||||||
for (size_t i = 0; i < jA->second.size1(); i++) // interate over all the non-zero entries in the submatrix
|
|
||||||
for (size_t j = 0; j < jA->second.size2(); j++)
|
|
||||||
if (jA->second(i, j) != 0.0) {
|
|
||||||
ivs_vector[j + column_start].push_back(make_pair(i + row_start, jA->second(i, j) / sigmas[i]));
|
|
||||||
nnz++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
row_start += factor->numberOfRows();
|
|
||||||
}
|
|
||||||
|
|
||||||
// assemble the CHOLMOD data structure
|
|
||||||
int numRows = row_start;
|
|
||||||
cholmod_sparse *A = cholmod_l_allocate_sparse(numRows, numCols, nnz, 1, 1, 0, CHOLMOD_REAL, cc);
|
|
||||||
long* p = (long*)(A->p); // starting index in A->i for each column
|
|
||||||
long* p2 = p+1; // ending index in A->i for each column
|
|
||||||
long* i = (long*)(A->i); // row indices of nnz entries
|
|
||||||
double* x = (double*)A->x; // the values of nnz entries
|
|
||||||
p[0] = 0;
|
|
||||||
BOOST_FOREACH(const vector<RowValue>& ivs, ivs_vector) {
|
|
||||||
*(p2++) = *(p++) + ivs.size();
|
|
||||||
BOOST_FOREACH(const RowValue& iv, ivs) {
|
|
||||||
*(i++) = iv.first;
|
|
||||||
*(x++) = iv.second;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// order the column indices w.r.t. the given ordering
|
|
||||||
vector<size_t> orderedIndices;
|
|
||||||
BOOST_FOREACH(const Symbol& key, ordering)
|
|
||||||
orderedIndices.push_back(colIndices[key] - 1);
|
|
||||||
orderedIndices.push_back(numCols);
|
|
||||||
|
|
||||||
// record the dimensions for each key as the same order in the {ordering}
|
|
||||||
vector<size_t>::const_iterator it1 = orderedIndices.begin();
|
|
||||||
vector<size_t>::const_iterator it2 = orderedIndices.begin(); it2++;
|
|
||||||
while(it2 != orderedIndices.end())
|
|
||||||
orderedDimensions.push_back(*(it2++) - *(it1++));
|
|
||||||
|
|
||||||
return A;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
// learn from spqr_mx_get_dense
|
|
||||||
cholmod_dense* GaussianFactorGraph::cholmodRhs(cholmod_common *cc) const {
|
|
||||||
|
|
||||||
int nrow = 0;
|
|
||||||
BOOST_FOREACH(const sharedFactor& factor,factors_)
|
|
||||||
nrow += factor->numberOfRows();
|
|
||||||
cholmod_dense* b = cholmod_l_allocate_dense(nrow, 1, nrow, CHOLMOD_REAL, cc);
|
|
||||||
|
|
||||||
// fill the data
|
|
||||||
double* x_current = (double*)b->x;
|
|
||||||
Vector::const_iterator it_b, it_sigmas, it_b_end;
|
|
||||||
BOOST_FOREACH(const sharedFactor& factor,factors_) {
|
|
||||||
it_b = factor->get_b().begin();
|
|
||||||
it_b_end = factor->get_b().end();
|
|
||||||
it_sigmas = factor->get_sigmas().begin();
|
|
||||||
for(; it_b != it_b_end; )
|
|
||||||
*(x_current++) = *(it_b++) / *(it_sigmas++);
|
|
||||||
}
|
|
||||||
|
|
||||||
return b;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
VectorConfig GaussianFactorGraph::optimizeSPQR(const Ordering& ordering)
|
|
||||||
{
|
|
||||||
// set up the default parameters
|
|
||||||
cholmod_common Common, *cc ;
|
|
||||||
cc = &Common ;
|
|
||||||
cholmod_l_start(cc) ;
|
|
||||||
cc->metis_memory = 0.0 ;
|
|
||||||
cc->SPQR_grain = 4 ;
|
|
||||||
cc->SPQR_small = 1e6 ;
|
|
||||||
cc->SPQR_nthreads = 2 ; // number of TBB threads (0 = default)
|
|
||||||
|
|
||||||
// get the A matrix and rhs in the compress column-format
|
|
||||||
vector<size_t> orderedDimensions;
|
|
||||||
cholmod_sparse* A = cholmodSparse(ordering, orderedDimensions, cc);
|
|
||||||
cholmod_dense* b = cholmodRhs(cc);
|
|
||||||
|
|
||||||
// QR
|
|
||||||
int ord_method = SPQR_ORDERING_BEST; // matlab uses 7
|
|
||||||
double tol = SPQR_NO_TOL; // matlab uses SPQR_DEFAULT_TOL
|
|
||||||
cholmod_dense* x = SuiteSparseQR<double> (ord_method, tol, A, b, cc) ;
|
|
||||||
|
|
||||||
// create the update vector
|
|
||||||
VectorConfig config;
|
|
||||||
double *x_start = (double*)x->x, *x_end;
|
|
||||||
vector<size_t>::const_iterator itDim = orderedDimensions.begin();
|
|
||||||
BOOST_FOREACH(const Symbol& key, ordering) {
|
|
||||||
Vector v(*itDim);
|
|
||||||
x_end = x_start + *itDim;
|
|
||||||
copy(x_start, x_end, v.data().begin());
|
|
||||||
config.insert(key, v);
|
|
||||||
itDim++;
|
|
||||||
x_start = x_end;
|
|
||||||
}
|
|
||||||
|
|
||||||
// free memory
|
|
||||||
cholmod_l_free_sparse(&A, cc);
|
|
||||||
cholmod_l_free_dense (&b, cc) ;
|
|
||||||
cholmod_l_free_dense (&x, cc);
|
|
||||||
cholmod_l_finish(cc);
|
|
||||||
|
|
||||||
return config;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
|
@ -14,10 +14,6 @@
|
||||||
|
|
||||||
#include <boost/shared_ptr.hpp>
|
#include <boost/shared_ptr.hpp>
|
||||||
|
|
||||||
#ifdef USE_SPQR
|
|
||||||
#include <SuiteSparseQR.hpp>
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include "FactorGraph.h"
|
#include "FactorGraph.h"
|
||||||
#include "Errors.h"
|
#include "Errors.h"
|
||||||
#include "GaussianFactor.h"
|
#include "GaussianFactor.h"
|
||||||
|
@ -264,25 +260,5 @@ namespace gtsam {
|
||||||
boost::shared_ptr<VectorConfig> conjugateGradientDescent_(
|
boost::shared_ptr<VectorConfig> conjugateGradientDescent_(
|
||||||
const VectorConfig& x0, bool verbose = false, double epsilon = 1e-3,
|
const VectorConfig& x0, bool verbose = false, double epsilon = 1e-3,
|
||||||
size_t maxIterations = 0) const;
|
size_t maxIterations = 0) const;
|
||||||
|
|
||||||
#ifdef USE_SPQR
|
|
||||||
/**
|
|
||||||
* Convert to CHOLMOD's compressed-column form, refer to CHOLMOD's user guide for details.
|
|
||||||
* The returned pointer needs to be free by calling cholmod_l_free_sparse
|
|
||||||
*/
|
|
||||||
cholmod_sparse* cholmodSparse(const Ordering& ordering, std::vector<std::size_t>& orderedDimensions,
|
|
||||||
cholmod_common *cc) const;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert the RHS to CHOLMOD's column-major matrix format
|
|
||||||
* The returned pointer needs to be free by calling cholmod_l_free_sparse
|
|
||||||
*/
|
|
||||||
cholmod_dense* cholmodRhs(cholmod_common *cc) const;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* optimizing using SparseQR package
|
|
||||||
*/
|
|
||||||
VectorConfig optimizeSPQR(const Ordering& ordering);
|
|
||||||
#endif
|
|
||||||
};
|
};
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
|
@ -303,7 +303,8 @@ AM_LDFLAGS = -L../CppUnitLite -lCppUnitLite $(BOOST_LDFLAGS) $(boost_serializati
|
||||||
# adding SparseQR support
|
# adding SparseQR support
|
||||||
if USE_SPQR
|
if USE_SPQR
|
||||||
AM_CXXFLAGS += -I$(HOME)/include/SuiteSparse -DUSE_SPQR
|
AM_CXXFLAGS += -I$(HOME)/include/SuiteSparse -DUSE_SPQR
|
||||||
AM_LDFLAGS += -L$(HOME)/lib/SuiteSparse -lcholmod -lsqpr
|
AM_LDFLAGS += -L$(HOME)/lib/SuiteSparse -lufconfig -lamd -lcamd -lcolamd -lbtf -lklu -lldl -lccolamd -lumfpack -lcholmod -lcxsparse -lspqr
|
||||||
|
#AM_LDFLAGS += -L$(HOME)/lib/SuiteSparse -lsqpr
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# adding cblas implementation - split into a default linux version using the
|
# adding cblas implementation - split into a default linux version using the
|
||||||
|
|
|
@ -220,12 +220,13 @@ namespace gtsam {
|
||||||
|
|
||||||
// linearize all factors once
|
// linearize all factors once
|
||||||
boost::shared_ptr<L> linear = solver_->linearize(*graph_, *config_);
|
boost::shared_ptr<L> linear = solver_->linearize(*graph_, *config_);
|
||||||
|
NonlinearOptimizer prepared(graph_, config_, solver_->prepareLinear(*linear), error_, lambda_);
|
||||||
if (verbosity >= LINEAR)
|
if (verbosity >= LINEAR)
|
||||||
linear->print("linear");
|
linear->print("linear");
|
||||||
|
|
||||||
// try lambda steps with successively larger lambda until we achieve descent
|
// try lambda steps with successively larger lambda until we achieve descent
|
||||||
if (verbosity >= LAMBDA) cout << "Trying Lambda for the first time" << endl;
|
if (verbosity >= LAMBDA) cout << "Trying Lambda for the first time" << endl;
|
||||||
return try_lambda(*linear, verbosity, lambdaFactor, lambdaMode);
|
return prepared.try_lambda(*linear, verbosity, lambdaFactor, lambdaMode);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
|
@ -92,10 +92,17 @@ namespace gtsam {
|
||||||
public:
|
public:
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor that evaluates new error
|
||||||
*/
|
*/
|
||||||
NonlinearOptimizer(shared_graph graph, shared_config config, shared_solver solver,
|
NonlinearOptimizer(shared_graph graph, shared_config config, shared_solver solver,
|
||||||
double lambda = 1e-5);
|
const double lambda = 1e-5);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor that does not do any computation
|
||||||
|
*/
|
||||||
|
NonlinearOptimizer(shared_graph graph, shared_config config, shared_solver solver,
|
||||||
|
const double error, const double lambda): graph_(graph), config_(config),
|
||||||
|
error_(error), lambda_(lambda), solver_(solver) {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Copy constructor
|
* Copy constructor
|
||||||
|
|
|
@ -413,91 +413,6 @@ TEST( GaussianFactorGraph, sparse )
|
||||||
10.,10., -10.,-10., 10., 10., 5.,5.,-5.,-5., 5., 5.,-5.,-5.), ijs);
|
10.,10., -10.,-10., 10., 10., 5.,5.,-5.,-5., 5., 5.,-5.,-5.), ijs);
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef USE_SPQR
|
|
||||||
/* ************************************************************************* */
|
|
||||||
TEST( GaussianFactorGraph, cholmodSparse )
|
|
||||||
{
|
|
||||||
// create a small linear factor graph
|
|
||||||
GaussianFactorGraph fg = createGaussianFactorGraph();
|
|
||||||
|
|
||||||
Ordering ord; ord += "x2","l1","x1";
|
|
||||||
vector<size_t> dimensions;
|
|
||||||
cholmod_common Common, *cc ;
|
|
||||||
cc = &Common ;
|
|
||||||
cholmod_l_start (cc) ;
|
|
||||||
cholmod_sparse* A = fg.cholmodSparse(ord, dimensions, cc);
|
|
||||||
|
|
||||||
CHECK(A->ncol == 6);
|
|
||||||
CHECK(A->nrow == 8);
|
|
||||||
CHECK(A->nzmax == 14);
|
|
||||||
CHECK(A->stype == 0);
|
|
||||||
CHECK(A->xtype == CHOLMOD_REAL);
|
|
||||||
CHECK(A->dtype == CHOLMOD_DOUBLE);
|
|
||||||
CHECK(A->itype == CHOLMOD_LONG);
|
|
||||||
CHECK(A->packed == 1);
|
|
||||||
CHECK(A->sorted == 1);
|
|
||||||
|
|
||||||
long p[7] = {0, 2, 4, 6, 8, 11, 14};
|
|
||||||
for(long index=0; index<7; index++)
|
|
||||||
CHECK(p[index] == *((long*)A->p + index));
|
|
||||||
|
|
||||||
long i[14] = {2, 6, 3, 7, 4, 6, 5, 7, 0, 2, 4, 1, 3, 5};
|
|
||||||
for(long index=0; index<14; index++)
|
|
||||||
CHECK(i[index] == *((long*)A->i + index));
|
|
||||||
|
|
||||||
double x[14] = {10, -5, 10, -5, 5, 5, 5, 5, 10, -10, -5, 10, -10, -5};
|
|
||||||
for(int index=0; index<14; index++)
|
|
||||||
CHECK(x[index] == *((double*)A->x + index));
|
|
||||||
|
|
||||||
cholmod_l_free_sparse(&A, cc);
|
|
||||||
cholmod_l_finish(cc) ;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
TEST( GaussianFactorGraph, cholmodRhs )
|
|
||||||
{
|
|
||||||
// create a small linear factor graph
|
|
||||||
GaussianFactorGraph fg = createGaussianFactorGraph();
|
|
||||||
|
|
||||||
cholmod_common Common, *cc;
|
|
||||||
cc = &Common;
|
|
||||||
cholmod_l_start (cc);
|
|
||||||
cholmod_dense* b = fg.cholmodRhs(cc);
|
|
||||||
|
|
||||||
CHECK(b->ncol == 1);
|
|
||||||
CHECK(b->nrow == 8);
|
|
||||||
CHECK(b->nzmax == 8);
|
|
||||||
CHECK(b->d == 8);
|
|
||||||
CHECK(b->xtype == CHOLMOD_REAL);
|
|
||||||
CHECK(b->dtype == CHOLMOD_DOUBLE);
|
|
||||||
|
|
||||||
double x[8] = {-1., -1., 2., -1., 0., 1., -1., 1.5};
|
|
||||||
for(int index=0; index<8; index++)
|
|
||||||
CHECK(x[index] == *((double*)b->x + index));
|
|
||||||
|
|
||||||
cholmod_l_free_dense(&b, cc);
|
|
||||||
cholmod_l_finish(cc);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
TEST( GaussianFactorGraph, optimizeSPQR )
|
|
||||||
{
|
|
||||||
// create a graph
|
|
||||||
GaussianFactorGraph fg = createGaussianFactorGraph();
|
|
||||||
|
|
||||||
// create an ordering
|
|
||||||
Ordering ord; ord += "x2","l1","x1";
|
|
||||||
|
|
||||||
// optimize the graph
|
|
||||||
VectorConfig actual = fg.optimizeSPQR(ord);
|
|
||||||
|
|
||||||
// verify
|
|
||||||
VectorConfig expected = createCorrectDelta();
|
|
||||||
|
|
||||||
CHECK(assert_equal(expected,actual));
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST( GaussianFactorGraph, CONSTRUCTOR_GaussianBayesNet )
|
TEST( GaussianFactorGraph, CONSTRUCTOR_GaussianBayesNet )
|
||||||
{
|
{
|
||||||
|
|
|
@ -201,35 +201,6 @@ TEST( NonlinearOptimizer, Factorization )
|
||||||
CHECK(assert_equal(expected, *optimized.config(), 1e-5));
|
CHECK(assert_equal(expected, *optimized.config(), 1e-5));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
#ifdef USE_SPQR
|
|
||||||
TEST( NonlinearOptimizer, FactorizationSPQR )
|
|
||||||
{
|
|
||||||
typedef NonlinearOptimizer<Pose2Graph, Pose2Config, GaussianFactorGraph, FactorizationSPQR<Pose2Graph, Pose2Config> > Optimizer;
|
|
||||||
|
|
||||||
boost::shared_ptr<Pose2Config> config(new Pose2Config);
|
|
||||||
config->insert(1, Pose2(0.,0.,0.));
|
|
||||||
config->insert(2, Pose2(1.5,0.,0.));
|
|
||||||
|
|
||||||
boost::shared_ptr<Pose2Graph> graph(new Pose2Graph);
|
|
||||||
graph->addPrior(1, Pose2(0.,0.,0.), Isotropic::Sigma(3, 1e-10));
|
|
||||||
graph->addPrior(2, Pose2(1.5,0.,0.), Isotropic::Sigma(3, 1e-10));
|
|
||||||
|
|
||||||
boost::shared_ptr<Ordering> ordering(new Ordering);
|
|
||||||
ordering->push_back(Pose2Config::Key(1));
|
|
||||||
ordering->push_back(Pose2Config::Key(2));
|
|
||||||
Optimizer::shared_solver solver(new FactorizationSPQR<Pose2Graph, Pose2Config>(ordering));
|
|
||||||
|
|
||||||
Optimizer optimizer(graph, config, solver);
|
|
||||||
Optimizer optimized = optimizer.iterateLM();
|
|
||||||
|
|
||||||
Pose2Config expected;
|
|
||||||
expected.insert(1, Pose2(0.,0.,0.));
|
|
||||||
expected.insert(2, Pose2(1.5,0.,0.));
|
|
||||||
CHECK(assert_equal(expected, *optimized.config(), 1e-5));
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST( NonlinearOptimizer, SubgraphSolver )
|
TEST( NonlinearOptimizer, SubgraphSolver )
|
||||||
{
|
{
|
||||||
|
|
Loading…
Reference in New Issue