in the middle of resurrecting spcg

release/4.3a0
Yong-Dian Jian 2012-06-08 16:45:16 +00:00
parent 2f2f1875d9
commit 913160462a
16 changed files with 665 additions and 669 deletions

View File

@ -16,7 +16,6 @@
* @date June 2, 2012
*/
#include <gtsam/linear/IterativeOptimizationParameters.h>
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
#include <gtsam/slam/pose2SLAM.h>

View File

@ -0,0 +1,89 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
#pragma once
#include <gtsam/linear/IterativeSolver.h>
namespace gtsam {
/**
* parameters for the conjugate gradient method
*/
struct ConjugateGradientParameters : public IterativeOptimizationParameters {
typedef IterativeOptimizationParameters Base;
typedef boost::shared_ptr<ConjugateGradientParameters> shared_ptr;
size_t minIterations_; ///< minimum number of cg iterations
size_t maxIterations_; ///< maximum number of cg iterations
size_t reset_; ///< number of iterations before reset
double epsilon_rel_; ///< threshold for relative error decrease
double epsilon_abs_; ///< threshold for absolute error decrease
/* Matrix Operation Kernel */
enum BLASKernel {
GTSAM = 0, ///< Jacobian Factor Graph of GTSAM
SBM, ///< Sparse Block Matrix
SBM_MT ///< Sparse Block Matrix Multithreaded
} blas_kernel_;
enum Verbosity { SILENT = 0, COMPLEXITY = 1, ERROR = 2} verbosity_ ; /* Verbosity */
ConjugateGradientParameters()
: minIterations_(1), maxIterations_(500), reset_(501), epsilon_rel_(1e-3), epsilon_abs_(1e-3),
blas_kernel_(GTSAM), verbosity_(SILENT) {}
ConjugateGradientParameters(size_t minIterations, size_t maxIterations, size_t reset,
double epsilon_rel, double epsilon_abs, BLASKernel blas = GTSAM, Verbosity verbosity = SILENT)
: minIterations_(minIterations), maxIterations_(maxIterations), reset_(reset),
epsilon_rel_(epsilon_rel), epsilon_abs_(epsilon_abs), blas_kernel_(blas), verbosity_(verbosity) {}
/* general interface */
inline size_t minIterations() const { return minIterations_; }
inline size_t maxIterations() const { return maxIterations_; }
inline size_t reset() const { return reset_; }
inline double epsilon() const { return epsilon_rel_; }
inline double epsilon_rel() const { return epsilon_rel_; }
inline double epsilon_abs() const { return epsilon_abs_; }
inline BLASKernel blas_kernel() const { return blas_kernel_; }
inline Verbosity verbosity() const { return verbosity_; }
void print() const {
const std::string blasStr[3] = {"gtsam", "sbm", "sbm-mt"};
Base::print();
std::cout << "ConjugateGradientParameters: "
<< "blas = " << blasStr[blas_kernel_]
<< ", minIter = " << minIterations_
<< ", maxIter = " << maxIterations_
<< ", resetIter = " << reset_
<< ", eps_rel = " << epsilon_rel_
<< ", eps_abs = " << epsilon_abs_
<< ", verbosity = " << verbosity_
<< std::endl;
}
};
//class ConjugateGradientSolver : public IterativeSolver {
//
//public:
//
// typedef ConjugateGradientParameters Parameters;
//
// Parameters parameters_;
//
// ConjugateGradientSolver(const ConjugateGradientParameters &parameters) : parameters_(parameters) {}
// virtual VectorValues::shared_ptr optimize () = 0;
// virtual const IterativeOptimizationParameters& _params() const = 0;
//};
}

View File

@ -1,134 +0,0 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file IterativeOptimizationParameters.h
* @date Oct 22, 2010
* @author Yong-Dian Jian
*/
#pragma once
#include <boost/shared_ptr.hpp>
#include <iostream>
#include <string>
namespace gtsam {
/**
* parameters for the conjugate gradient method
*/
struct ConjugateGradientParameters {
size_t minIterations_; ///< minimum number of cg iterations
size_t maxIterations_; ///< maximum number of cg iterations
size_t reset_; ///< number of iterations before reset
double epsilon_rel_; ///< threshold for relative error decrease
double epsilon_abs_; ///< threshold for absolute error decrease
/* Matrix Operation Kernel */
enum BLASKernel {
GTSAM = 0, ///< Jacobian Factor Graph of GTSAM
SBM, ///< Sparse Block Matrix
SBM_MT ///< Sparse Block Matrix Multithreaded
} blas_kernel_;
size_t degree_; ///< the maximum degree of the vertices to be eliminated before doing cg
enum Verbosity { SILENT = 0, COMPLEXITY = 1, ERROR = 2} verbosity_ ; /* Verbosity */
ConjugateGradientParameters()
: minIterations_(1), maxIterations_(500), reset_(501), epsilon_rel_(1e-3), epsilon_abs_(1e-3),
blas_kernel_(GTSAM), degree_(0), verbosity_(SILENT) {}
ConjugateGradientParameters(size_t minIterations, size_t maxIterations, size_t reset,
double epsilon_rel, double epsilon_abs, BLASKernel blas = GTSAM, size_t degree = 0, Verbosity verbosity = SILENT)
: minIterations_(minIterations), maxIterations_(maxIterations), reset_(reset),
epsilon_rel_(epsilon_rel), epsilon_abs_(epsilon_abs), blas_kernel_(blas), degree_(degree), verbosity_(verbosity) {}
/* general interface */
inline size_t minIterations() const { return minIterations_; }
inline size_t maxIterations() const { return maxIterations_; }
inline size_t reset() const { return reset_; }
inline double epsilon() const { return epsilon_rel_; }
inline double epsilon_rel() const { return epsilon_rel_; }
inline double epsilon_abs() const { return epsilon_abs_; }
inline BLASKernel blas_kernel() const { return blas_kernel_; }
inline size_t degree() const { return degree_; }
inline Verbosity verbosity() const { return verbosity_; }
void print() const {
const std::string blasStr[3] = {"gtsam", "sbm", "sbm-mt"};
std::cout << "ConjugateGradientParameters: "
<< "blas = " << blasStr[blas_kernel_]
<< ", minIter = " << minIterations_
<< ", maxIter = " << maxIterations_
<< ", resetIter = " << reset_
<< ", eps_rel = " << epsilon_rel_
<< ", eps_abs = " << epsilon_abs_
<< ", degree = " << degree_
<< ", verbosity = " << verbosity_
<< std::endl;
}
};
/**
* parameters for iterative linear solvers
*/
class IterativeOptimizationParameters {
public:
typedef boost::shared_ptr<IterativeOptimizationParameters> shared_ptr;
ConjugateGradientParameters cg_; ///< Parameters for the Conjugate Gradient Method
enum Kernel { PCG = 0, LSPCG = 1 } kernel_ ; ///< Iterative Method Kernel
enum Verbosity { SILENT = 0, COMPLEXITY = 1, ERROR = 2} verbosity_ ; ///< Verbosity
public:
IterativeOptimizationParameters() : cg_(), kernel_(LSPCG), verbosity_(SILENT) {}
IterativeOptimizationParameters(const IterativeOptimizationParameters &p)
: cg_(p.cg_), kernel_(p.kernel_), verbosity_(p.verbosity_) {}
IterativeOptimizationParameters(const ConjugateGradientParameters &c, Kernel kernel = LSPCG, Verbosity verbosity = SILENT)
: cg_(c), kernel_(kernel), verbosity_(verbosity) {}
virtual ~IterativeOptimizationParameters() {}
/* general interface */
inline Kernel kernel() const { return kernel_; }
inline Verbosity verbosity() const { return verbosity_; }
/* interface to cg parameters */
inline const ConjugateGradientParameters& cg() const { return cg_; }
inline size_t minIterations() const { return cg_.minIterations(); }
inline size_t maxIterations() const { return cg_.maxIterations(); }
inline size_t reset() const { return cg_.reset(); }
inline double epsilon() const { return cg_.epsilon_rel(); }
inline double epsilon_rel() const { return cg_.epsilon_rel(); }
inline double epsilon_abs() const { return cg_.epsilon_abs(); }
inline size_t degree() const { return cg_.degree(); }
inline ConjugateGradientParameters::BLASKernel blas_kernel() const { return cg_.blas_kernel(); }
virtual void print(const std::string &s="") const {
const std::string kernelStr[2] = {"pcg", "lspcg"};
std::cout << s << std::endl
<< "IterativeOptimizationParameters: "
<< "kernel = " << kernelStr[kernel_]
<< ", verbosity = " << verbosity_ << std::endl;
cg_.print();
}
};
}

View File

@ -11,20 +11,49 @@
#pragma once
#include <gtsam/linear/IterativeOptimizationParameters.h>
#include <gtsam/linear/VectorValues.h>
namespace gtsam {
class IterativeSolver {
/**
* parameters for iterative linear solvers
*/
class IterativeOptimizationParameters {
public:
public:
IterativeSolver(){}
virtual ~IterativeSolver() {}
typedef boost::shared_ptr<IterativeOptimizationParameters> shared_ptr;
virtual VectorValues::shared_ptr optimize () = 0;
virtual const IterativeOptimizationParameters& _params() const = 0;
};
enum Kernel { PCG = 0, LSPCG = 1 } kernel_ ; ///< Iterative Method Kernel
enum Verbosity { SILENT = 0, COMPLEXITY = 1, ERROR = 2} verbosity_ ; ///< Verbosity
public:
IterativeOptimizationParameters(const IterativeOptimizationParameters &p)
: kernel_(p.kernel_), verbosity_(p.verbosity_) {}
IterativeOptimizationParameters(Kernel kernel = LSPCG, Verbosity verbosity = SILENT)
: kernel_(kernel), verbosity_(verbosity) {}
virtual ~IterativeOptimizationParameters() {}
/* general interface */
inline Kernel kernel() const { return kernel_; }
inline Verbosity verbosity() const { return verbosity_; }
void print() const {
const std::string kernelStr[2] = {"pcg", "lspcg"};
std::cout << "IterativeOptimizationParameters: "
<< "kernel = " << kernelStr[kernel_]
<< ", verbosity = " << verbosity_ << std::endl;
}
};
class IterativeSolver {
public:
IterativeSolver(){}
virtual ~IterativeSolver() {}
virtual VectorValues::shared_ptr optimize () = 0;
};
}

View File

@ -9,11 +9,11 @@
* -------------------------------------------------------------------------- */
#include <gtsam/linear/SimpleSPCGSolver.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianConditional.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/JacobianFactorGraph.h>
#include <gtsam/linear/SimpleSPCGSolver.h>
#include <gtsam/linear/VectorValues.h>
#include <gtsam/inference/EliminationTree.h>
#include <boost/foreach.hpp>
@ -100,7 +100,7 @@ VectorValues::shared_ptr SimpleSPCGSolver::optimize (const VectorValues &initial
std::max(parameters_.epsilon_abs(),
parameters_.epsilon() * parameters_.epsilon() * gamma);
const size_t iMaxIterations = parameters_.maxIterations();
const ConjugateGradientParameters::Verbosity verbosity = parameters_.cg_.verbosity();
const Parameters::Verbosity verbosity = parameters_.verbosity();
if ( verbosity >= ConjugateGradientParameters::ERROR )
std::cout << "[SimpleSPCGSolver] epsilon = " << parameters_.epsilon()

View File

@ -11,9 +11,10 @@
#pragma once
#include <gtsam/linear/IterativeSolver.h>
#include <gtsam/linear/ConjugateGradientSolver.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/IterativeSolver.h>
#include <gtsam/linear/JacobianFactor.h>
namespace gtsam {
@ -39,7 +40,7 @@ class SimpleSPCGSolver : public IterativeSolver {
public:
typedef IterativeSolver Base;
typedef IterativeOptimizationParameters Parameters;
typedef ConjugateGradientParameters Parameters;
typedef boost::shared_ptr<IterativeSolver> shared_ptr;
protected:
@ -60,7 +61,6 @@ public:
SimpleSPCGSolver(const GaussianFactorGraph &gfg, const Parameters &parameters);
virtual ~SimpleSPCGSolver() {}
virtual VectorValues::shared_ptr optimize () {return optimize(*y0_);}
virtual const IterativeOptimizationParameters& _params() const { return parameters_; }
protected:

View File

@ -1,150 +1,146 @@
///* ----------------------------------------------------------------------------
//
// * GTSAM Copyright 2010, Georgia Tech Research Corporation,
// * Atlanta, Georgia 30332-0415
// * All Rights Reserved
// * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
//
// * See LICENSE for the license information
//
// * -------------------------------------------------------------------------- */
//
///**
// * @file SubgraphPreconditioner.cpp
// * @date Dec 31, 2009
// * @author: Frank Dellaert
// */
//
//#include <boost/foreach.hpp>
//#include <gtsam/linear/SubgraphPreconditioner.h>
//#include <gtsam/linear/JacobianFactor.h>
//#include <gtsam/linear/GaussianFactorGraph.h>
//
//using namespace std;
//
//namespace gtsam {
//
// /* ************************************************************************* */
// SubgraphPreconditioner::SubgraphPreconditioner(const sharedFG& Ab1, const sharedFG& Ab2,
// const sharedBayesNet& Rc1, const sharedValues& xbar) :
// Ab1_(Ab1), Ab2_(Ab2), Rc1_(Rc1), xbar_(xbar), b2bar_(gaussianErrors_(*Ab2_,*xbar)) {
// }
//
// /* ************************************************************************* */
// // x = xbar + inv(R1)*y
// VectorValues SubgraphPreconditioner::x(const VectorValues& y) const {
//#ifdef VECTORBTREE
// if (!y.cloned(*xbar_)) throw
// invalid_argument("SubgraphPreconditioner::x: y needs to be cloned from xbar");
//#endif
// VectorValues x = y;
// backSubstituteInPlace(*Rc1_,x);
// x += *xbar_;
// return x;
// }
//
//// SubgraphPreconditioner SubgraphPreconditioner::add_priors(double sigma) const {
//// SubgraphPreconditioner result = *this ;
//// result.Ab2_ = sharedFG(new GaussianFactorGraph(Ab2_->add_priors(sigma))) ;
//// return result ;
//// }
//
// /* ************************************************************************* */
// double error(const SubgraphPreconditioner& sp, const VectorValues& y) {
//
// Errors e(y);
// VectorValues x = sp.x(y);
// Errors e2 = gaussianErrors(*sp.Ab2(),x);
// return 0.5 * (dot(e, e) + dot(e2,e2));
// }
//
// /* ************************************************************************* */
// // gradient is y + inv(R1')*A2'*(A2*inv(R1)*y-b2bar),
// VectorValues gradient(const SubgraphPreconditioner& sp, const VectorValues& y) {
// VectorValues x = sp.x(y); // x = inv(R1)*y
// Errors e2 = gaussianErrors(*sp.Ab2(),x);
// VectorValues gx2 = VectorValues::Zero(y);
// gtsam::transposeMultiplyAdd(*sp.Ab2(),1.0,e2,gx2); // A2'*e2;
// VectorValues gy2 = gtsam::backSubstituteTranspose(*sp.Rc1(), gx2); // inv(R1')*gx2
// return y + gy2;
// }
//
// /* ************************************************************************* */
// // Apply operator A, A*y = [I;A2*inv(R1)]*y = [y; A2*inv(R1)*y]
// Errors operator*(const SubgraphPreconditioner& sp, const VectorValues& y) {
//
// Errors e(y);
//
// // Add A2 contribution
// VectorValues x = y; // TODO avoid ?
// gtsam::backSubstituteInPlace(*sp.Rc1(), x); // x=inv(R1)*y
// Errors e2 = *sp.Ab2() * x; // A2*x
// e.splice(e.end(), e2);
//
// return e;
// }
//
// /* ************************************************************************* */
// // In-place version that overwrites e
// void multiplyInPlace(const SubgraphPreconditioner& sp, const VectorValues& y, Errors& e) {
//
//
// Errors::iterator ei = e.begin();
// for ( Index i = 0 ; i < y.size() ; ++i, ++ei ) {
// *ei = y[i];
// }
//
// // Add A2 contribution
// VectorValues x = y; // TODO avoid ?
// gtsam::backSubstituteInPlace(*sp.Rc1(), x); // x=inv(R1)*y
// gtsam::multiplyInPlace(*sp.Ab2(),x,ei); // use iterator version
// }
//
// /* ************************************************************************* */
// // Apply operator A', A'*e = [I inv(R1')*A2']*e = e1 + inv(R1')*A2'*e2
// VectorValues operator^(const SubgraphPreconditioner& sp, const Errors& e) {
//
// Errors::const_iterator it = e.begin();
// VectorValues y = sp.zero();
// for ( Index i = 0 ; i < y.size() ; ++i, ++it )
// y[i] = *it ;
// sp.transposeMultiplyAdd2(1.0,it,e.end(),y);
// return y;
// }
//
// /* ************************************************************************* */
// // y += alpha*A'*e
// void transposeMultiplyAdd
// (const SubgraphPreconditioner& sp, double alpha, const Errors& e, VectorValues& y) {
//
//
// Errors::const_iterator it = e.begin();
// for ( Index i = 0 ; i < y.size() ; ++i, ++it ) {
// const Vector& ei = *it;
// axpy(alpha,ei,y[i]);
// }
// sp.transposeMultiplyAdd2(alpha,it,e.end(),y);
// }
//
// /* ************************************************************************* */
// // y += alpha*inv(R1')*A2'*e2
// void SubgraphPreconditioner::transposeMultiplyAdd2 (double alpha,
// Errors::const_iterator it, Errors::const_iterator end, VectorValues& y) const {
//
// // create e2 with what's left of e
// // TODO can we avoid creating e2 by passing iterator to transposeMultiplyAdd ?
// Errors e2;
// while (it != end)
// e2.push_back(*(it++));
//
// VectorValues x = VectorValues::Zero(y); // x = 0
// gtsam::transposeMultiplyAdd(*Ab2_,1.0,e2,x); // x += A2'*e2
// axpy(alpha, gtsam::backSubstituteTranspose(*Rc1_, x), y); // y += alpha*inv(R1')*x
// }
//
// /* ************************************************************************* */
// void SubgraphPreconditioner::print(const std::string& s) const {
// cout << s << endl;
// Ab2_->print();
// }
//} // nsamespace gtsam
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file SubgraphPreconditioner.cpp
* @date Dec 31, 2009
* @author: Frank Dellaert
*/
#include <gtsam/linear/SubgraphPreconditioner.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/JacobianFactorGraph.h>
#include <boost/foreach.hpp>
using namespace std;
namespace gtsam {
/* ************************************************************************* */
SubgraphPreconditioner::SubgraphPreconditioner(const sharedFG& Ab1, const sharedFG& Ab2,
const sharedBayesNet& Rc1, const sharedValues& xbar) :
Ab1_(Ab1), Ab2_(Ab2), Rc1_(Rc1), xbar_(xbar), b2bar_(gaussianErrors_(*Ab2_,*xbar)) {
}
/* ************************************************************************* */
// x = xbar + inv(R1)*y
VectorValues SubgraphPreconditioner::x(const VectorValues& y) const {
VectorValues x = y;
optimizeInPlace(*Rc1_,x);
x += *xbar_;
return x;
}
// SubgraphPreconditioner SubgraphPreconditioner::add_priors(double sigma) const {
// SubgraphPreconditioner result = *this ;
// result.Ab2_ = sharedFG(new GaussianFactorGraph(Ab2_->add_priors(sigma))) ;
// return result ;
// }
/* ************************************************************************* */
double error(const SubgraphPreconditioner& sp, const VectorValues& y) {
Errors e(y);
VectorValues x = sp.x(y);
Errors e2 = gaussianErrors(*sp.Ab2(),x);
return 0.5 * (dot(e, e) + dot(e2,e2));
}
/* ************************************************************************* */
// gradient is y + inv(R1')*A2'*(A2*inv(R1)*y-b2bar),
VectorValues gradient(const SubgraphPreconditioner& sp, const VectorValues& y) {
VectorValues x = sp.x(y); // x = inv(R1)*y
Errors e2 = gaussianErrors(*sp.Ab2(),x);
VectorValues gx2 = VectorValues::Zero(y);
gtsam::transposeMultiplyAdd(*sp.Ab2(),1.0,e2,gx2); // A2'*e2;
VectorValues gy2 = gtsam::backSubstituteTranspose(*sp.Rc1(), gx2); // inv(R1')*gx2
return y + gy2;
}
/* ************************************************************************* */
// Apply operator A, A*y = [I;A2*inv(R1)]*y = [y; A2*inv(R1)*y]
Errors operator*(const SubgraphPreconditioner& sp, const VectorValues& y) {
Errors e(y);
// Add A2 contribution
VectorValues x = y; // TODO avoid ?
gtsam::optimizeInPlace(*sp.Rc1(), x); // x=inv(R1)*y
Errors e2 = *sp.Ab2() * x; // A2*x
e.splice(e.end(), e2);
return e;
}
/* ************************************************************************* */
// In-place version that overwrites e
void multiplyInPlace(const SubgraphPreconditioner& sp, const VectorValues& y, Errors& e) {
Errors::iterator ei = e.begin();
for ( Index i = 0 ; i < y.size() ; ++i, ++ei ) {
*ei = y[i];
}
// Add A2 contribution
VectorValues x = y; // TODO avoid ?
gtsam::optimizeInPlace(*sp.Rc1(), x); // x=inv(R1)*y
gtsam::multiplyInPlace(*sp.Ab2(),x,ei); // use iterator version
}
/* ************************************************************************* */
// Apply operator A', A'*e = [I inv(R1')*A2']*e = e1 + inv(R1')*A2'*e2
VectorValues operator^(const SubgraphPreconditioner& sp, const Errors& e) {
Errors::const_iterator it = e.begin();
VectorValues y = sp.zero();
for ( Index i = 0 ; i < y.size() ; ++i, ++it )
y[i] = *it ;
sp.transposeMultiplyAdd2(1.0,it,e.end(),y);
return y;
}
/* ************************************************************************* */
// y += alpha*A'*e
void transposeMultiplyAdd
(const SubgraphPreconditioner& sp, double alpha, const Errors& e, VectorValues& y) {
Errors::const_iterator it = e.begin();
for ( Index i = 0 ; i < y.size() ; ++i, ++it ) {
const Vector& ei = *it;
axpy(alpha,ei,y[i]);
}
sp.transposeMultiplyAdd2(alpha,it,e.end(),y);
}
/* ************************************************************************* */
// y += alpha*inv(R1')*A2'*e2
void SubgraphPreconditioner::transposeMultiplyAdd2 (double alpha,
Errors::const_iterator it, Errors::const_iterator end, VectorValues& y) const {
// create e2 with what's left of e
// TODO can we avoid creating e2 by passing iterator to transposeMultiplyAdd ?
Errors e2;
while (it != end) e2.push_back(*(it++));
VectorValues x = VectorValues::Zero(y); // x = 0
gtsam::transposeMultiplyAdd(*Ab2_,1.0,e2,x); // x += A2'*e2
axpy(alpha, gtsam::backSubstituteTranspose(*Rc1_, x), y); // y += alpha*inv(R1')*x
}
/* ************************************************************************* */
void SubgraphPreconditioner::print(const std::string& s) const {
cout << s << endl;
Ab2_->print();
}
} // nsamespace gtsam

View File

@ -1,116 +1,117 @@
///* ----------------------------------------------------------------------------
//
// * GTSAM Copyright 2010, Georgia Tech Research Corporation,
// * Atlanta, Georgia 30332-0415
// * All Rights Reserved
// * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
//
// * See LICENSE for the license information
//
// * -------------------------------------------------------------------------- */
//
///**
// * @file SubgraphPreconditioner.h
// * @date Dec 31, 2009
// * @author Frank Dellaert
// */
//
//#pragma once
//
//#include <gtsam/linear/JacobianFactor.h>
//#include <gtsam/linear/GaussianBayesNet.h>
//#include <gtsam/nonlinear/Ordering.h> // FIXME shouldn't have nonlinear things in linear
//
//namespace gtsam {
//
// /**
// * Subgraph conditioner class, as explained in the RSS 2010 submission.
// * Starting with a graph A*x=b, we split it in two systems A1*x=b1 and A2*x=b2
// * We solve R1*x=c1, and make the substitution y=R1*x-c1.
// * To use the class, give the Bayes Net R1*x=c1 and Graph A2*x=b2.
// * Then solve for yhat using CG, and solve for xhat = system.x(yhat).
// */
// class SubgraphPreconditioner {
//
// public:
// typedef boost::shared_ptr<const GaussianBayesNet> sharedBayesNet;
// typedef boost::shared_ptr<const FactorGraph<JacobianFactor> > sharedFG;
// typedef boost::shared_ptr<const VectorValues> sharedValues;
// typedef boost::shared_ptr<const Errors> sharedErrors;
//
// private:
// sharedFG Ab1_, Ab2_;
// sharedBayesNet Rc1_;
// sharedValues xbar_;
// sharedErrors b2bar_; /** b2 - A2*xbar */
//
// public:
//
// SubgraphPreconditioner();
// /**
// * Constructor
// * @param Ab1: the Graph A1*x=b1
// * @param Ab2: the Graph A2*x=b2
// * @param Rc1: the Bayes Net R1*x=c1
// * @param xbar: the solution to R1*x=c1
// */
// SubgraphPreconditioner(const sharedFG& Ab1, const sharedFG& Ab2, const sharedBayesNet& Rc1, const sharedValues& xbar);
//
// /** Access Ab1 */
// const sharedFG& Ab1() const { return Ab1_; }
//
// /** Access Ab2 */
// const sharedFG& Ab2() const { return Ab2_; }
//
// /** Access Rc1 */
// const sharedBayesNet& Rc1() const { return Rc1_; }
//
// /**
// * Add zero-mean i.i.d. Gaussian prior terms to each variable
// * @param sigma Standard deviation of Gaussian
// */
//// SubgraphPreconditioner add_priors(double sigma) const;
//
// /* x = xbar + inv(R1)*y */
// VectorValues x(const VectorValues& y) const;
//
// /* A zero VectorValues with the structure of xbar */
// VectorValues zero() const {
// VectorValues V(VectorValues::Zero(*xbar_)) ;
// return V ;
// }
//
// /**
// * Add constraint part of the error only, used in both calls above
// * y += alpha*inv(R1')*A2'*e2
// * Takes a range indicating e2 !!!!
// */
// void transposeMultiplyAdd2(double alpha, Errors::const_iterator begin,
// Errors::const_iterator end, VectorValues& y) const;
//
// /** print the object */
// void print(const std::string& s = "SubgraphPreconditioner") const;
// };
//
// /* error, given y */
// double error(const SubgraphPreconditioner& sp, const VectorValues& y);
//
// /** gradient = y + inv(R1')*A2'*(A2*inv(R1)*y-b2bar) */
// VectorValues gradient(const SubgraphPreconditioner& sp, const VectorValues& y);
//
// /** Apply operator A */
// Errors operator*(const SubgraphPreconditioner& sp, const VectorValues& y);
//
// /** Apply operator A in place: needs e allocated already */
// void multiplyInPlace(const SubgraphPreconditioner& sp, const VectorValues& y, Errors& e);
//
// /** Apply operator A' */
// VectorValues operator^(const SubgraphPreconditioner& sp, const Errors& e);
//
// /**
// * Add A'*e to y
// * y += alpha*A'*[e1;e2] = [alpha*e1; alpha*inv(R1')*A2'*e2]
// */
// void transposeMultiplyAdd(const SubgraphPreconditioner& sp, double alpha, const Errors& e, VectorValues& y);
//
//} // namespace gtsam
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file SubgraphPreconditioner.h
* @date Dec 31, 2009
* @author Frank Dellaert
*/
#pragma once
#include <gtsam/linear/JacobianFactor.h>
#include <gtsam/linear/GaussianBayesNet.h>
namespace gtsam {
/**
* Subgraph conditioner class, as explained in the RSS 2010 submission.
* Starting with a graph A*x=b, we split it in two systems A1*x=b1 and A2*x=b2
* We solve R1*x=c1, and make the substitution y=R1*x-c1.
* To use the class, give the Bayes Net R1*x=c1 and Graph A2*x=b2.
* Then solve for yhat using CG, and solve for xhat = system.x(yhat).
*/
class SubgraphPreconditioner {
public:
typedef boost::shared_ptr<SubgraphPreconditioner> shared_ptr;
typedef boost::shared_ptr<const GaussianBayesNet> sharedBayesNet;
typedef boost::shared_ptr<const FactorGraph<JacobianFactor> > sharedFG;
typedef boost::shared_ptr<const VectorValues> sharedValues;
typedef boost::shared_ptr<const Errors> sharedErrors;
private:
sharedFG Ab1_, Ab2_;
sharedBayesNet Rc1_;
sharedValues xbar_;
sharedErrors b2bar_; /** b2 - A2*xbar */
public:
SubgraphPreconditioner();
/**
* Constructor
* @param Ab1: the Graph A1*x=b1
* @param Ab2: the Graph A2*x=b2
* @param Rc1: the Bayes Net R1*x=c1
* @param xbar: the solution to R1*x=c1
*/
SubgraphPreconditioner(const sharedFG& Ab1, const sharedFG& Ab2, const sharedBayesNet& Rc1, const sharedValues& xbar);
/** Access Ab1 */
const sharedFG& Ab1() const { return Ab1_; }
/** Access Ab2 */
const sharedFG& Ab2() const { return Ab2_; }
/** Access Rc1 */
const sharedBayesNet& Rc1() const { return Rc1_; }
/**
* Add zero-mean i.i.d. Gaussian prior terms to each variable
* @param sigma Standard deviation of Gaussian
*/
// SubgraphPreconditioner add_priors(double sigma) const;
/* x = xbar + inv(R1)*y */
VectorValues x(const VectorValues& y) const;
/* A zero VectorValues with the structure of xbar */
VectorValues zero() const {
VectorValues V(VectorValues::Zero(*xbar_)) ;
return V ;
}
/**
* Add constraint part of the error only, used in both calls above
* y += alpha*inv(R1')*A2'*e2
* Takes a range indicating e2 !!!!
*/
void transposeMultiplyAdd2(double alpha, Errors::const_iterator begin,
Errors::const_iterator end, VectorValues& y) const;
/** print the object */
void print(const std::string& s = "SubgraphPreconditioner") const;
};
/* error, given y */
double error(const SubgraphPreconditioner& sp, const VectorValues& y);
/** gradient = y + inv(R1')*A2'*(A2*inv(R1)*y-b2bar) */
VectorValues gradient(const SubgraphPreconditioner& sp, const VectorValues& y);
/** Apply operator A */
Errors operator*(const SubgraphPreconditioner& sp, const VectorValues& y);
/** Apply operator A in place: needs e allocated already */
void multiplyInPlace(const SubgraphPreconditioner& sp, const VectorValues& y, Errors& e);
/** Apply operator A' */
VectorValues operator^(const SubgraphPreconditioner& sp, const Errors& e);
/**
* Add A'*e to y
* y += alpha*A'*[e1;e2] = [alpha*e1; alpha*inv(R1')*A2'*e2]
*/
void transposeMultiplyAdd(const SubgraphPreconditioner& sp, double alpha, const Errors& e, VectorValues& y);
} // namespace gtsam

View File

@ -1,82 +1,80 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
#pragma once
#include <boost/foreach.hpp>
#include <gtsam/linear/iterative-inl.h>
#include <gtsam/inference/graph-inl.h>
#include <gtsam/inference/EliminationTree.h>
using namespace std;
namespace gtsam {
template<class GRAPH, class LINEAR, class KEY>
void SubgraphSolver<GRAPH,LINEAR,KEY>::replaceFactors(const typename LINEAR::shared_ptr &graph) {
GaussianFactorGraph::shared_ptr Ab1 = boost::make_shared<GaussianFactorGraph>();
GaussianFactorGraph::shared_ptr Ab2 = boost::make_shared<GaussianFactorGraph>();
if (parameters_->verbosity()) cout << "split the graph ...";
split(pairs_, *graph, *Ab1, *Ab2) ;
if (parameters_->verbosity()) cout << ",with " << Ab1->size() << " and " << Ab2->size() << " factors" << endl;
// // Add a HardConstraint to the root, otherwise the root will be singular
// Key root = keys.back();
// T_.addHardConstraint(root, theta0[root]);
//
// // compose the approximate solution
// theta_bar_ = composePoses<GRAPH, Constraint, Pose, Values> (T_, tree, theta0[root]);
LINEAR sacrificialAb1 = *Ab1; // duplicate !!!!!
SubgraphPreconditioner::sharedBayesNet Rc1 =
EliminationTree<GaussianFactor>::Create(sacrificialAb1)->eliminate(&EliminateQR);
SubgraphPreconditioner::sharedValues xbar = gtsam::optimize_(*Rc1);
pc_ = boost::make_shared<SubgraphPreconditioner>(
Ab1->dynamicCastFactors<FactorGraph<JacobianFactor> >(), Ab2->dynamicCastFactors<FactorGraph<JacobianFactor> >(),Rc1,xbar);
}
template<class GRAPH, class LINEAR, class KEY>
VectorValues::shared_ptr SubgraphSolver<GRAPH,LINEAR,KEY>::optimize() const {
// preconditioned conjugate gradient
VectorValues zeros = pc_->zero();
VectorValues ybar = conjugateGradients<SubgraphPreconditioner, VectorValues, Errors>
(*pc_, zeros, *parameters_);
boost::shared_ptr<VectorValues> xbar = boost::make_shared<VectorValues>() ;
*xbar = pc_->x(ybar);
return xbar;
}
template<class GRAPH, class LINEAR, class KEY>
void SubgraphSolver<GRAPH,LINEAR,KEY>::initialize(const GRAPH& G, const Values& theta0) {
// generate spanning tree
PredecessorMap<KEY> tree_ = gtsam::findMinimumSpanningTree<GRAPH, KEY, Constraint>(G);
// make the ordering
list<KEY> keys = predecessorMap2Keys(tree_);
ordering_ = boost::make_shared<Ordering>(list<Key>(keys.begin(), keys.end()));
// build factor pairs
pairs_.clear();
typedef pair<KEY,KEY> EG ;
BOOST_FOREACH( const EG &eg, tree_ ) {
Key key1 = Key(eg.first),
key2 = Key(eg.second) ;
pairs_.insert(pair<Index, Index>((*ordering_)[key1], (*ordering_)[key2])) ;
}
}
} // \namespace gtsam
///* ----------------------------------------------------------------------------
//
// * GTSAM Copyright 2010, Georgia Tech Research Corporation,
// * Atlanta, Georgia 30332-0415
// * All Rights Reserved
// * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
//
// * See LICENSE for the license information
//
// * -------------------------------------------------------------------------- */
//
//#pragma once
//
//#include <boost/foreach.hpp>
//
//#include <gtsam_unstable/linear/iterative-inl.h>
//#include <gtsam/inference/graph-inl.h>
//#include <gtsam/inference/EliminationTree.h>
//
//namespace gtsam {
//
//template<class GRAPH, class LINEAR, class KEY>
//void SubgraphSolver<GRAPH,LINEAR,KEY>::replaceFactors(const typename LINEAR::shared_ptr &graph) {
//
// GaussianFactorGraph::shared_ptr Ab1 = boost::make_shared<GaussianFactorGraph>();
// GaussianFactorGraph::shared_ptr Ab2 = boost::make_shared<GaussianFactorGraph>();
//
// if (parameters_->verbosity()) cout << "split the graph ...";
// split(pairs_, *graph, *Ab1, *Ab2) ;
// if (parameters_->verbosity()) cout << ",with " << Ab1->size() << " and " << Ab2->size() << " factors" << endl;
//
// // // Add a HardConstraint to the root, otherwise the root will be singular
// // Key root = keys.back();
// // T_.addHardConstraint(root, theta0[root]);
// //
// // // compose the approximate solution
// // theta_bar_ = composePoses<GRAPH, Constraint, Pose, Values> (T_, tree, theta0[root]);
//
// LINEAR sacrificialAb1 = *Ab1; // duplicate !!!!!
// SubgraphPreconditioner::sharedBayesNet Rc1 =
// EliminationTree<GaussianFactor>::Create(sacrificialAb1)->eliminate(&EliminateQR);
// SubgraphPreconditioner::sharedValues xbar = gtsam::optimize_(*Rc1);
//
// pc_ = boost::make_shared<SubgraphPreconditioner>(
// Ab1->dynamicCastFactors<FactorGraph<JacobianFactor> >(), Ab2->dynamicCastFactors<FactorGraph<JacobianFactor> >(),Rc1,xbar);
//}
//
//template<class GRAPH, class LINEAR, class KEY>
//VectorValues::shared_ptr SubgraphSolver<GRAPH,LINEAR,KEY>::optimize() const {
//
// // preconditioned conjugate gradient
// VectorValues zeros = pc_->zero();
// VectorValues ybar = conjugateGradients<SubgraphPreconditioner, VectorValues, Errors>
// (*pc_, zeros, *parameters_);
//
// boost::shared_ptr<VectorValues> xbar = boost::make_shared<VectorValues>() ;
// *xbar = pc_->x(ybar);
// return xbar;
//}
//
//template<class GRAPH, class LINEAR, class KEY>
//void SubgraphSolver<GRAPH,LINEAR,KEY>::initialize(const GRAPH& G, const Values& theta0) {
// // generate spanning tree
// PredecessorMap<KEY> tree_ = gtsam::findMinimumSpanningTree<GRAPH, KEY, Constraint>(G);
//
// // make the ordering
// list<KEY> keys = predecessorMap2Keys(tree_);
// ordering_ = boost::make_shared<Ordering>(list<Key>(keys.begin(), keys.end()));
//
// // build factor pairs
// pairs_.clear();
// typedef pair<KEY,KEY> EG ;
// BOOST_FOREACH( const EG &eg, tree_ ) {
// Key key1 = Key(eg.first),
// key2 = Key(eg.second) ;
// pairs_.insert(pair<Index, Index>((*ordering_)[key1], (*ordering_)[key2])) ;
// }
//}
//
//} // \namespace gtsam

View File

@ -1,50 +1,108 @@
///* ----------------------------------------------------------------------------
//
// * GTSAM Copyright 2010, Georgia Tech Research Corporation,
// * Atlanta, Georgia 30332-0415
// * All Rights Reserved
// * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
//
// * See LICENSE for the license information
//
// * -------------------------------------------------------------------------- */
//
//#include <gtsam/linear/SubgraphSolver.h>
//
//using namespace std;
//
//namespace gtsam {
//
///* split the gaussian factor graph Ab into Ab1 and Ab2 according to the map */
//bool split(const std::map<Index, Index> &M,
// const GaussianFactorGraph &Ab,
// GaussianFactorGraph &Ab1,
// GaussianFactorGraph &Ab2) {
//
// Ab1 = GaussianFactorGraph();
// Ab2 = GaussianFactorGraph();
//
// for ( size_t i = 0 ; i < Ab.size() ; ++i ) {
//
// boost::shared_ptr<GaussianFactor> factor = Ab[i] ;
//
// if (factor->keys().size() > 2)
// throw(invalid_argument("split: only support factors with at most two keys"));
// if (factor->keys().size() == 1) {
// Ab1.push_back(factor);
// Ab2.push_back(factor);
// continue;
// }
// Index key1 = factor->keys()[0];
// Index key2 = factor->keys()[1];
//
// if ((M.find(key1) != M.end() && M.find(key1)->second == key2) ||
// (M.find(key2) != M.end() && M.find(key2)->second == key1))
// Ab1.push_back(factor);
// else
// Ab2.push_back(factor);
// }
// return true ;
//}
//
//} // \namespace gtsam
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
#include <gtsam/linear/Errors.h>
#include <gtsam/linear/GaussianConditional.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/JacobianFactorGraph.h>
#include <gtsam/linear/SubgraphSolver.h>
#include <gtsam/linear/VectorValues.h>
#include <gtsam/inference/graph-inl.h>
#include <gtsam/inference/EliminationTree.h>
#include <gtsam_unstable/linear/iterative-inl.h>
#include <boost/foreach.hpp>
#include <boost/shared_ptr.hpp>
#include <list>
using namespace std;
namespace gtsam {
SubgraphSolver::SubgraphSolver(const GaussianFactorGraph &gfg, const Parameters &parameters)
: parameters_(parameters)
{
GaussianFactorGraph::shared_ptr Ab1 = boost::make_shared<GaussianFactorGraph>();
GaussianFactorGraph::shared_ptr Ab2 = boost::make_shared<GaussianFactorGraph>();
boost::tie(Ab1, Ab2) = splitGraph(gfg) ;
if (parameters_.verbosity())
cout << ",with " << Ab1->size() << " and " << Ab2->size() << " factors" << endl;
// // Add a HardConstraint to the root, otherwise the root will be singular
// Key root = keys.back();
// T_.addHardConstraint(root, theta0[root]);
//
// // compose the approximate solution
// theta_bar_ = composePoses<GRAPH, Constraint, Pose, Values> (T_, tree, theta0[root]);
GaussianBayesNet::shared_ptr Rc1 = EliminationTree<GaussianFactor>::Create(*Ab1)->eliminate(&EliminateQR);
VectorValues::shared_ptr xbar(new VectorValues(gtsam::optimize(*Rc1)));
pc_ = boost::make_shared<SubgraphPreconditioner>(
Ab1->dynamicCastFactors<FactorGraph<JacobianFactor> >(),
Ab2->dynamicCastFactors<FactorGraph<JacobianFactor> >(),
Rc1, xbar);
}
VectorValues::shared_ptr SubgraphSolver::optimize() {
// preconditioned conjugate gradient
VectorValues zeros = pc_->zero();
VectorValues ybar = conjugateGradients<SubgraphPreconditioner, VectorValues, Errors> (*pc_, zeros, parameters_);
boost::shared_ptr<VectorValues> xbar = boost::make_shared<VectorValues>() ;
*xbar = pc_->x(ybar);
return xbar;
}
boost::tuple<GaussianFactorGraph::shared_ptr, GaussianFactorGraph::shared_ptr>
SubgraphSolver::splitGraph(const GaussianFactorGraph &gfg) {
VariableIndex index(gfg);
size_t n = index.size();
std::vector<bool> connected(n, false);
GaussianFactorGraph::shared_ptr At(new GaussianFactorGraph());
GaussianFactorGraph::shared_ptr Ac( new GaussianFactorGraph());
BOOST_FOREACH ( const GaussianFactor::shared_ptr &gf, gfg ) {
bool augment = false ;
/* check whether this factor should be augmented to the "tree" graph */
if ( gf->keys().size() == 1 ) augment = true;
else {
BOOST_FOREACH ( const Index key, *gf ) {
if ( connected[key] == false ) {
augment = true ;
connected[key] = true;
}
}
}
if ( augment ) At->push_back(gf);
else Ac->push_back(gf);
}
return boost::tie(At, Ac);
}
} // \namespace gtsam

View File

@ -1,100 +1,52 @@
///* ----------------------------------------------------------------------------
//
// * GTSAM Copyright 2010, Georgia Tech Research Corporation,
// * Atlanta, Georgia 30332-0415
// * All Rights Reserved
// * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
//
// * See LICENSE for the license information
//
// * -------------------------------------------------------------------------- */
//
//#pragma once
//
//#include <boost/make_shared.hpp>
//
//#include <gtsam/linear/GaussianFactorGraph.h>
//#include <gtsam/linear/IterativeSolver.h>
//#include <gtsam/linear/SubgraphPreconditioner.h>
//
//namespace gtsam {
//
///* split the gaussian factor graph Ab into Ab1 and Ab2 according to the map */
//bool split(const std::map<Index, Index> &M,
// const GaussianFactorGraph &Ab,
// GaussianFactorGraph &Ab1,
// GaussianFactorGraph &Ab2);
//
///**
// * A nonlinear system solver using subgraph preconditioning conjugate gradient
// * Concept NonLinearSolver<G,T,L> implements
// * linearize: G * T -> L
// * solve : L -> VectorValues
// */
//template<class GRAPH, class LINEAR, class VALUES>
//class SubgraphSolver : public IterativeSolver {
//
//private:
// typedef typename VALUES::Key Key;
// typedef typename GRAPH::Pose Pose;
// typedef typename GRAPH::Constraint Constraint;
//
// typedef boost::shared_ptr<const SubgraphSolver> shared_ptr ;
// typedef boost::shared_ptr<Ordering> shared_ordering ;
// typedef boost::shared_ptr<GRAPH> shared_graph ;
// typedef boost::shared_ptr<LINEAR> shared_linear ;
// typedef boost::shared_ptr<VALUES> shared_values ;
// typedef boost::shared_ptr<SubgraphPreconditioner> shared_preconditioner ;
// typedef std::map<Index,Index> mapPairIndex ;
//
// /* the ordering derived from the spanning tree */
// shared_ordering ordering_;
//
// /* the indice of two vertices in the gaussian factor graph */
// mapPairIndex pairs_;
//
// /* preconditioner */
// shared_preconditioner pc_;
//
// /* flag for direct solver - either QR or LDL */
// bool useQR_;
//
//public:
//
// SubgraphSolver(const GRAPH& G, const VALUES& theta0, const Parameters &parameters = Parameters(), bool useQR = false):
// IterativeSolver(parameters), useQR_(useQR) { initialize(G,theta0); }
//
// SubgraphSolver(const LINEAR& GFG) {
// std::cout << "[SubgraphSolver] Unexpected usage.." << std::endl;
// throw std::runtime_error("SubgraphSolver: gaussian factor graph initialization not supported");
// }
//
// SubgraphSolver(const shared_linear& GFG, const boost::shared_ptr<VariableIndex>& structure, bool useQR = false) {
// std::cout << "[SubgraphSolver] Unexpected usage.." << std::endl;
// throw std::runtime_error("SubgraphSolver: gaussian factor graph and variable index initialization not supported");
// }
//
// SubgraphSolver(const SubgraphSolver& solver) :
// IterativeSolver(solver), ordering_(solver.ordering_), pairs_(solver.pairs_), pc_(solver.pc_), useQR_(solver.useQR_) {}
//
// SubgraphSolver(shared_ordering ordering,
// mapPairIndex pairs,
// shared_preconditioner pc,
// sharedParameters parameters = boost::make_shared<Parameters>(),
// bool useQR = true) :
// IterativeSolver(parameters), ordering_(ordering), pairs_(pairs), pc_(pc), useQR_(useQR) {}
//
// void replaceFactors(const typename LINEAR::shared_ptr &graph);
// VectorValues::shared_ptr optimize() ;
// shared_ordering ordering() const { return ordering_; }
//
//protected:
// void initialize(const GRAPH& G, const VALUES& theta0);
//
//private:
// SubgraphSolver():IterativeSolver(){}
//};
//
//} // namespace gtsam
//
//#include <gtsam/linear/SubgraphSolver-inl.h>
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
#pragma once
#include <gtsam/linear/ConjugateGradientSolver.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/SubgraphPreconditioner.h>
#include <boost/make_shared.hpp>
namespace gtsam {
/**
* A linear system solver using subgraph preconditioning conjugate gradient
*/
class SubgraphSolver : public IterativeSolver {
public:
typedef ConjugateGradientParameters Parameters;
Parameters parameters_;
SubgraphPreconditioner::shared_ptr pc_; ///< preconditioner object
public:
SubgraphSolver(const GaussianFactorGraph &gfg, const Parameters &parameters);
virtual ~SubgraphSolver() {}
virtual VectorValues::shared_ptr optimize () ;
protected:
boost::tuple<GaussianFactorGraph::shared_ptr, GaussianFactorGraph::shared_ptr>
splitGraph(const GaussianFactorGraph &gfg) ;
};
} // namespace gtsam

View File

@ -24,6 +24,7 @@
#include <gtsam/inference/EliminationTree.h>
#include <gtsam/linear/GaussianJunctionTree.h>
#include <gtsam/linear/SimpleSPCGSolver.h>
#include <gtsam/linear/SubgraphSolver.h>
using namespace std;
@ -74,8 +75,15 @@ void LevenbergMarquardtOptimizer::iterate() {
delta = gtsam::optimize(*EliminationTree<GaussianFactor>::Create(dampedSystem)->eliminate(params_.getEliminationFunction()));
}
else if ( params_.isCG() ) {
IterativeOptimizationParameters::shared_ptr params(!params_.iterativeParams ? boost::make_shared<IterativeOptimizationParameters>() : params_.iterativeParams);
ConjugateGradientParameters::shared_ptr params (!params_.iterativeParams ?
boost::make_shared<ConjugateGradientParameters>() :
boost::dynamic_pointer_cast<ConjugateGradientParameters>(params_.iterativeParams));
if ( !params ) throw runtime_error("LMSolver: spcg parameter dynamic casting failed");
SimpleSPCGSolver solver(dampedSystem, *params);
//SubgraphSolver solver(dampedSystem, *params);
delta = *solver.optimize();
}
else {

View File

@ -19,7 +19,7 @@
#pragma once
#include <gtsam/nonlinear/NonlinearOptimizer.h>
#include <gtsam/linear/IterativeOptimizationParameters.h>
#include <gtsam/linear/IterativeSolver.h>
namespace gtsam {

View File

@ -19,7 +19,7 @@
#pragma once
#include <gtsam_unstable/linear/iterative.h>
#include <gtsam/linear/IterativeOptimizationParameters.h>
#include <gtsam/linear/ConjugateGradientSolver.h>
#include <boost/shared_ptr.hpp>
namespace gtsam {
@ -29,7 +29,7 @@ namespace gtsam {
template<class S, class V, class E>
struct CGState {
typedef IterativeOptimizationParameters Parameters;
typedef ConjugateGradientParameters Parameters;
const Parameters &parameters_;
int k; ///< iteration
@ -91,7 +91,7 @@ namespace gtsam {
// check for convergence
double new_gamma = dot(g, g);
if (parameters_.verbosity() != IterativeOptimizationParameters::SILENT)
if (parameters_.verbosity() != ConjugateGradientParameters::SILENT)
std::cout << "iteration " << k << ": alpha = " << alpha
<< ", dotg = " << new_gamma << std::endl;
if (new_gamma < threshold) return true;
@ -121,18 +121,18 @@ namespace gtsam {
V conjugateGradients(
const S& Ab,
V x,
const IterativeOptimizationParameters &parameters,
const ConjugateGradientParameters &parameters,
bool steepest = false) {
CGState<S, V, E> state(Ab, x, parameters, steepest);
if (parameters.verbosity() != IterativeOptimizationParameters::SILENT)
if (parameters.verbosity() != ConjugateGradientParameters::SILENT)
std::cout << "CG: epsilon = " << parameters.epsilon()
<< ", maxIterations = " << parameters.maxIterations()
<< ", ||g0||^2 = " << state.gamma
<< ", threshold = " << state.threshold << std::endl;
if ( state.gamma < state.threshold ) {
if (parameters.verbosity() != IterativeOptimizationParameters::SILENT)
if (parameters.verbosity() != ConjugateGradientParameters::SILENT)
std::cout << "||g0||^2 < threshold, exiting immediately !" << std::endl;
return x;

View File

@ -20,7 +20,7 @@
#include <gtsam/base/Vector.h>
#include <gtsam/base/Matrix.h>
#include <gtsam/linear/JacobianFactorGraph.h>
#include <gtsam/linear/IterativeOptimizationParameters.h>
#include <gtsam/linear/IterativeSolver.h>
#include <iostream>
@ -38,31 +38,31 @@ namespace gtsam {
/* ************************************************************************* */
Vector steepestDescent(const System& Ab, const Vector& x, const IterativeOptimizationParameters & parameters) {
Vector steepestDescent(const System& Ab, const Vector& x, const ConjugateGradientParameters & parameters) {
return conjugateGradients<System, Vector, Vector> (Ab, x, parameters, true);
}
Vector conjugateGradientDescent(const System& Ab, const Vector& x, const IterativeOptimizationParameters & parameters) {
Vector conjugateGradientDescent(const System& Ab, const Vector& x, const ConjugateGradientParameters & parameters) {
return conjugateGradients<System, Vector, Vector> (Ab, x, parameters);
}
/* ************************************************************************* */
Vector steepestDescent(const Matrix& A, const Vector& b, const Vector& x, const IterativeOptimizationParameters & parameters) {
Vector steepestDescent(const Matrix& A, const Vector& b, const Vector& x, const ConjugateGradientParameters & parameters) {
System Ab(A, b);
return conjugateGradients<System, Vector, Vector> (Ab, x, parameters, true);
}
Vector conjugateGradientDescent(const Matrix& A, const Vector& b, const Vector& x, const IterativeOptimizationParameters & parameters) {
Vector conjugateGradientDescent(const Matrix& A, const Vector& b, const Vector& x, const ConjugateGradientParameters & parameters) {
System Ab(A, b);
return conjugateGradients<System, Vector, Vector> (Ab, x, parameters);
}
/* ************************************************************************* */
VectorValues steepestDescent(const FactorGraph<JacobianFactor>& fg, const VectorValues& x, const IterativeOptimizationParameters & parameters) {
VectorValues steepestDescent(const FactorGraph<JacobianFactor>& fg, const VectorValues& x, const ConjugateGradientParameters & parameters) {
return conjugateGradients<FactorGraph<JacobianFactor>, VectorValues, Errors> (fg, x, parameters, true);
}
VectorValues conjugateGradientDescent(const FactorGraph<JacobianFactor>& fg, const VectorValues& x, const IterativeOptimizationParameters & parameters) {
VectorValues conjugateGradientDescent(const FactorGraph<JacobianFactor>& fg, const VectorValues& x, const ConjugateGradientParameters & parameters) {
return conjugateGradients<FactorGraph<JacobianFactor>, VectorValues, Errors> (fg, x, parameters);
}

View File

@ -1,6 +1,6 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
@ -20,7 +20,7 @@
#include <gtsam/base/Matrix.h>
#include <gtsam/linear/VectorValues.h>
#include <gtsam/linear/IterativeOptimizationParameters.h>
#include <gtsam/linear/ConjugateGradientSolver.h>
namespace gtsam {
@ -105,7 +105,7 @@ namespace gtsam {
Vector conjugateGradientDescent(
const System& Ab,
const Vector& x,
const IterativeOptimizationParameters & parameters);
const ConjugateGradientParameters & parameters);
/** convenience calls using matrices, will create System class internally: */
@ -116,7 +116,7 @@ namespace gtsam {
const Matrix& A,
const Vector& b,
const Vector& x,
const IterativeOptimizationParameters & parameters);
const ConjugateGradientParameters & parameters);
/**
* Method of conjugate gradients (CG), Matrix version
@ -125,7 +125,7 @@ namespace gtsam {
const Matrix& A,
const Vector& b,
const Vector& x,
const IterativeOptimizationParameters & parameters);
const ConjugateGradientParameters & parameters);
class GaussianFactorGraph;
@ -135,7 +135,7 @@ namespace gtsam {
VectorValues steepestDescent(
const GaussianFactorGraph& fg,
const VectorValues& x,
const IterativeOptimizationParameters & parameters);
const ConjugateGradientParameters & parameters);
/**
* Method of conjugate gradients (CG), Gaussian Factor Graph version
@ -143,7 +143,7 @@ namespace gtsam {
VectorValues conjugateGradientDescent(
const GaussianFactorGraph& fg,
const VectorValues& x,
const IterativeOptimizationParameters & parameters);
const ConjugateGradientParameters & parameters);
} // namespace gtsam