old spcg solver fixed

release/4.3a0
Yong-Dian Jian 2012-06-09 02:42:45 +00:00
parent 9fdb28f9bf
commit 734a18b02e
16 changed files with 113 additions and 76 deletions

View File

@ -16,6 +16,8 @@
* @date June 2, 2012 * @date June 2, 2012
*/ */
#include <gtsam/linear/SimpleSPCGSolver.h>
#include <gtsam/linear/SubgraphSolver.h>
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h> #include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
#include <gtsam/slam/pose2SLAM.h> #include <gtsam/slam/pose2SLAM.h>
@ -61,10 +63,26 @@ int main(void) {
cout << "initial error = " << graph.error(initialEstimate) << endl ; cout << "initial error = " << graph.error(initialEstimate) << endl ;
// 4. Single Step Optimization using Levenberg-Marquardt // 4. Single Step Optimization using Levenberg-Marquardt
// Note: Although there are many options in IterativeOptimizationParameters, LevenbergMarquardtParams param;
Values result = graph.optimizeSPCG(initialEstimate); param.verbosity = NonlinearOptimizerParams::ERROR;
result.print("\nFinal result:\n"); param.verbosityLM = LevenbergMarquardtParams::LAMBDA;
cout << "final error = " << graph.error(result) << endl; param.linearSolverType = SuccessiveLinearizationParams::CG;
{
param.iterativeParams = boost::make_shared<SimpleSPCGSolverParameters>();
LevenbergMarquardtOptimizer optimizer(graph, initialEstimate, param);
Values result = optimizer.optimize();
result.print("\nFinal result:\n");
cout << "simple spcg solver final error = " << graph.error(result) << endl;
}
{
param.iterativeParams = boost::make_shared<SubgraphSolverParameters>();
LevenbergMarquardtOptimizer optimizer(graph, initialEstimate, param);
Values result = optimizer.optimize();
result.print("\nFinal result:\n");
cout << "subgraph solver final error = " << graph.error(result) << endl;
}
return 0 ; return 0 ;
} }

View File

@ -36,16 +36,14 @@ struct ConjugateGradientParameters : public IterativeOptimizationParameters {
SBM_MT ///< Sparse Block Matrix Multithreaded SBM_MT ///< Sparse Block Matrix Multithreaded
} blas_kernel_; } blas_kernel_;
enum Verbosity { SILENT = 0, COMPLEXITY = 1, ERROR = 2} verbosity_ ; /* Verbosity */
ConjugateGradientParameters() ConjugateGradientParameters()
: minIterations_(1), maxIterations_(500), reset_(501), epsilon_rel_(1e-3), epsilon_abs_(1e-3), : minIterations_(1), maxIterations_(500), reset_(501), epsilon_rel_(1e-3), epsilon_abs_(1e-3),
blas_kernel_(GTSAM), verbosity_(SILENT) {} blas_kernel_(GTSAM) {}
ConjugateGradientParameters(size_t minIterations, size_t maxIterations, size_t reset, ConjugateGradientParameters(size_t minIterations, size_t maxIterations, size_t reset,
double epsilon_rel, double epsilon_abs, BLASKernel blas = GTSAM, Verbosity verbosity = SILENT) double epsilon_rel, double epsilon_abs, BLASKernel blas = GTSAM)
: minIterations_(minIterations), maxIterations_(maxIterations), reset_(reset), : minIterations_(minIterations), maxIterations_(maxIterations), reset_(reset),
epsilon_rel_(epsilon_rel), epsilon_abs_(epsilon_abs), blas_kernel_(blas), verbosity_(verbosity) {} epsilon_rel_(epsilon_rel), epsilon_abs_(epsilon_abs), blas_kernel_(blas) {}
/* general interface */ /* general interface */
inline size_t minIterations() const { return minIterations_; } inline size_t minIterations() const { return minIterations_; }
@ -55,7 +53,6 @@ struct ConjugateGradientParameters : public IterativeOptimizationParameters {
inline double epsilon_rel() const { return epsilon_rel_; } inline double epsilon_rel() const { return epsilon_rel_; }
inline double epsilon_abs() const { return epsilon_abs_; } inline double epsilon_abs() const { return epsilon_abs_; }
inline BLASKernel blas_kernel() const { return blas_kernel_; } inline BLASKernel blas_kernel() const { return blas_kernel_; }
inline Verbosity verbosity() const { return verbosity_; }
void print() const { void print() const {
const std::string blasStr[3] = {"gtsam", "sbm", "sbm-mt"}; const std::string blasStr[3] = {"gtsam", "sbm", "sbm-mt"};
@ -67,7 +64,6 @@ struct ConjugateGradientParameters : public IterativeOptimizationParameters {
<< ", resetIter = " << reset_ << ", resetIter = " << reset_
<< ", eps_rel = " << epsilon_rel_ << ", eps_rel = " << epsilon_rel_
<< ", eps_abs = " << epsilon_abs_ << ", eps_abs = " << epsilon_abs_
<< ", verbosity = " << verbosity_
<< std::endl; << std::endl;
} }
}; };

View File

@ -86,6 +86,16 @@ Errors Errors::operator-(const Errors& b) const {
return result; return result;
} }
/* ************************************************************************* */
Errors Errors::operator-() const {
Errors result;
BOOST_FOREACH(const Vector& ai, *this)
result.push_back(-ai);
return result;
}
/* ************************************************************************* */ /* ************************************************************************* */
double dot(const Errors& a, const Errors& b) { double dot(const Errors& a, const Errors& b) {
#ifndef NDEBUG #ifndef NDEBUG

View File

@ -45,6 +45,8 @@ namespace gtsam {
/** subtraction */ /** subtraction */
Errors operator-(const Errors& b) const; Errors operator-(const Errors& b) const;
/** negation */
Errors operator-() const ;
}; // Errors }; // Errors

View File

@ -99,6 +99,24 @@ void optimizeInPlace(const GaussianBayesNet& bn, VectorValues& x) {
} }
} }
/* ************************************************************************* */
VectorValues backSubstitute(const GaussianBayesNet& bn, const VectorValues& input) {
VectorValues output = input;
BOOST_REVERSE_FOREACH(const boost::shared_ptr<const GaussianConditional> cg, bn) {
const Index key = *(cg->beginFrontals());
Vector xS = internal::extractVectorValuesSlices(output, cg->beginParents(), cg->endParents());
xS = input[key] - cg->get_S() * xS;
output[key] = cg->get_R().triangularView<Eigen::Upper>().solve(xS);
}
BOOST_FOREACH(const boost::shared_ptr<const GaussianConditional> cg, bn) {
cg->scaleFrontalsBySigma(output);
}
return output;
}
/* ************************************************************************* */ /* ************************************************************************* */
// gy=inv(L)*gx by solving L*gy=gx. // gy=inv(L)*gx by solving L*gy=gx.
// gy=inv(R'*inv(Sigma))*gx // gy=inv(R'*inv(Sigma))*gx

View File

@ -106,6 +106,12 @@ namespace gtsam {
* */ * */
void optimizeGradientSearchInPlace(const GaussianBayesNet& bn, VectorValues& grad); void optimizeGradientSearchInPlace(const GaussianBayesNet& bn, VectorValues& grad);
/**
* Backsubstitute
* gy=inv(R*inv(Sigma))*gx
*/
VectorValues backSubstitute(const GaussianBayesNet& bn, const VectorValues& gx);
/** /**
* Transpose Backsubstitute * Transpose Backsubstitute
* gy=inv(L)*gx by solving L*gy=gx. * gy=inv(L)*gx by solving L*gy=gx.

View File

@ -23,8 +23,7 @@ namespace gtsam {
public: public:
typedef boost::shared_ptr<IterativeOptimizationParameters> shared_ptr; typedef boost::shared_ptr<IterativeOptimizationParameters> shared_ptr;
enum Kernel { CG = 0 } kernel_ ; ///< Iterative Method Kernel
enum Kernel { PCG = 0, LSPCG = 1 } kernel_ ; ///< Iterative Method Kernel
enum Verbosity { SILENT = 0, COMPLEXITY = 1, ERROR = 2} verbosity_ ; ///< Verbosity enum Verbosity { SILENT = 0, COMPLEXITY = 1, ERROR = 2} verbosity_ ; ///< Verbosity
public: public:
@ -32,7 +31,7 @@ namespace gtsam {
IterativeOptimizationParameters(const IterativeOptimizationParameters &p) IterativeOptimizationParameters(const IterativeOptimizationParameters &p)
: kernel_(p.kernel_), verbosity_(p.verbosity_) {} : kernel_(p.kernel_), verbosity_(p.verbosity_) {}
IterativeOptimizationParameters(Kernel kernel = LSPCG, Verbosity verbosity = SILENT) IterativeOptimizationParameters(Kernel kernel = CG, Verbosity verbosity = SILENT)
: kernel_(kernel), verbosity_(verbosity) {} : kernel_(kernel), verbosity_(verbosity) {}
virtual ~IterativeOptimizationParameters() {} virtual ~IterativeOptimizationParameters() {}
@ -42,7 +41,7 @@ namespace gtsam {
inline Verbosity verbosity() const { return verbosity_; } inline Verbosity verbosity() const { return verbosity_; }
void print() const { void print() const {
const std::string kernelStr[2] = {"pcg", "lspcg"}; const std::string kernelStr[1] = {"cg"};
std::cout << "IterativeOptimizationParameters: " std::cout << "IterativeOptimizationParameters: "
<< "kernel = " << kernelStr[kernel_] << "kernel = " << kernelStr[kernel_]
<< ", verbosity = " << verbosity_ << std::endl; << ", verbosity = " << verbosity_ << std::endl;

View File

@ -96,9 +96,7 @@ VectorValues::shared_ptr SimpleSPCGSolver::optimize (const VectorValues &initial
double gamma = s.vector().squaredNorm(), new_gamma = 0.0, alpha = 0.0, beta = 0.0 ; double gamma = s.vector().squaredNorm(), new_gamma = 0.0, alpha = 0.0, beta = 0.0 ;
const double threshold = const double threshold = std::max(parameters_.epsilon_abs(), parameters_.epsilon() * parameters_.epsilon() * gamma);
std::max(parameters_.epsilon_abs(),
parameters_.epsilon() * parameters_.epsilon() * gamma);
const size_t iMaxIterations = parameters_.maxIterations(); const size_t iMaxIterations = parameters_.maxIterations();
const Parameters::Verbosity verbosity = parameters_.verbosity(); const Parameters::Verbosity verbosity = parameters_.verbosity();
@ -217,10 +215,6 @@ SimpleSPCGSolver::splitGraph(const GaussianFactorGraph &gfg) {
else Ac->push_back(boost::dynamic_pointer_cast<JacobianFactor>(gf)); else Ac->push_back(boost::dynamic_pointer_cast<JacobianFactor>(gf));
} }
// gfg.print("gfg");
// At->print("At");
// Ac->print("Ac");
return boost::tie(At, Ac); return boost::tie(At, Ac);
} }

View File

@ -19,6 +19,11 @@
namespace gtsam { namespace gtsam {
struct SimpleSPCGSolverParameters : public ConjugateGradientParameters {
typedef ConjugateGradientParameters Base;
SimpleSPCGSolverParameters() : Base() {}
};
/** /**
* This class gives a simple implementation to the SPCG solver presented in Dellaert et al in IROS'10. * This class gives a simple implementation to the SPCG solver presented in Dellaert et al in IROS'10.
* *
@ -40,7 +45,7 @@ class SimpleSPCGSolver : public IterativeSolver {
public: public:
typedef IterativeSolver Base; typedef IterativeSolver Base;
typedef ConjugateGradientParameters Parameters; typedef SimpleSPCGSolverParameters Parameters;
typedef boost::shared_ptr<IterativeSolver> shared_ptr; typedef boost::shared_ptr<IterativeSolver> shared_ptr;
protected: protected:

View File

@ -28,27 +28,17 @@ namespace gtsam {
/* ************************************************************************* */ /* ************************************************************************* */
SubgraphPreconditioner::SubgraphPreconditioner(const sharedFG& Ab1, const sharedFG& Ab2, SubgraphPreconditioner::SubgraphPreconditioner(const sharedFG& Ab1, const sharedFG& Ab2,
const sharedBayesNet& Rc1, const sharedValues& xbar) : const sharedBayesNet& Rc1, const sharedValues& xbar) :
Ab1_(Ab1), Ab2_(Ab2), Rc1_(Rc1), xbar_(xbar), b2bar_(gaussianErrors_(*Ab2_,*xbar)) { Ab1_(Ab1), Ab2_(Ab2), Rc1_(Rc1), xbar_(xbar), b2bar_(new Errors(-gaussianErrors(*Ab2_,*xbar))) {
} }
/* ************************************************************************* */ /* ************************************************************************* */
// x = xbar + inv(R1)*y // x = xbar + inv(R1)*y
VectorValues SubgraphPreconditioner::x(const VectorValues& y) const { VectorValues SubgraphPreconditioner::x(const VectorValues& y) const {
VectorValues x = y; return *xbar_ + gtsam::backSubstitute(*Rc1_, y);
optimizeInPlace(*Rc1_,x);
x += *xbar_;
return x;
} }
// SubgraphPreconditioner SubgraphPreconditioner::add_priors(double sigma) const {
// SubgraphPreconditioner result = *this ;
// result.Ab2_ = sharedFG(new GaussianFactorGraph(Ab2_->add_priors(sigma))) ;
// return result ;
// }
/* ************************************************************************* */ /* ************************************************************************* */
double error(const SubgraphPreconditioner& sp, const VectorValues& y) { double error(const SubgraphPreconditioner& sp, const VectorValues& y) {
Errors e(y); Errors e(y);
VectorValues x = sp.x(y); VectorValues x = sp.x(y);
Errors e2 = gaussianErrors(*sp.Ab2(),x); Errors e2 = gaussianErrors(*sp.Ab2(),x);
@ -58,12 +48,11 @@ namespace gtsam {
/* ************************************************************************* */ /* ************************************************************************* */
// gradient is y + inv(R1')*A2'*(A2*inv(R1)*y-b2bar), // gradient is y + inv(R1')*A2'*(A2*inv(R1)*y-b2bar),
VectorValues gradient(const SubgraphPreconditioner& sp, const VectorValues& y) { VectorValues gradient(const SubgraphPreconditioner& sp, const VectorValues& y) {
VectorValues x = sp.x(y); // x = inv(R1)*y VectorValues x = gtsam::backSubstitute(*sp.Rc1(), y); /* inv(R1)*y */
Errors e2 = gaussianErrors(*sp.Ab2(),x); Errors e = (*sp.Ab2()*x - *sp.b2bar()); /* (A2*inv(R1)*y-b2bar) */
VectorValues gx2 = VectorValues::Zero(y); VectorValues v = VectorValues::Zero(x);
gtsam::transposeMultiplyAdd(*sp.Ab2(),1.0,e2,gx2); // A2'*e2; transposeMultiplyAdd(*sp.Ab2(), 1.0, e, v); /* A2'*(A2*inv(R1)*y-b2bar) */
VectorValues gy2 = gtsam::backSubstituteTranspose(*sp.Rc1(), gx2); // inv(R1')*gx2 return y + gtsam::backSubstituteTranspose(*sp.Rc1(), v);
return y + gy2;
} }
/* ************************************************************************* */ /* ************************************************************************* */
@ -71,13 +60,9 @@ namespace gtsam {
Errors operator*(const SubgraphPreconditioner& sp, const VectorValues& y) { Errors operator*(const SubgraphPreconditioner& sp, const VectorValues& y) {
Errors e(y); Errors e(y);
VectorValues x = gtsam::backSubstitute(*sp.Rc1(), y); /* x=inv(R1)*y */
// Add A2 contribution Errors e2 = *sp.Ab2() * x; /* A2*x */
VectorValues x = y; // TODO avoid ?
gtsam::optimizeInPlace(*sp.Rc1(), x); // x=inv(R1)*y
Errors e2 = *sp.Ab2() * x; // A2*x
e.splice(e.end(), e2); e.splice(e.end(), e2);
return e; return e;
} }
@ -85,16 +70,14 @@ namespace gtsam {
// In-place version that overwrites e // In-place version that overwrites e
void multiplyInPlace(const SubgraphPreconditioner& sp, const VectorValues& y, Errors& e) { void multiplyInPlace(const SubgraphPreconditioner& sp, const VectorValues& y, Errors& e) {
Errors::iterator ei = e.begin(); Errors::iterator ei = e.begin();
for ( Index i = 0 ; i < y.size() ; ++i, ++ei ) { for ( Index i = 0 ; i < y.size() ; ++i, ++ei ) {
*ei = y[i]; *ei = y[i];
} }
// Add A2 contribution // Add A2 contribution
VectorValues x = y; // TODO avoid ? VectorValues x = gtsam::backSubstitute(*sp.Rc1(), y); // x=inv(R1)*y
gtsam::optimizeInPlace(*sp.Rc1(), x); // x=inv(R1)*y gtsam::multiplyInPlace(*sp.Ab2(), x, ei); // use iterator version
gtsam::multiplyInPlace(*sp.Ab2(),x,ei); // use iterator version
} }
/* ************************************************************************* */ /* ************************************************************************* */
@ -114,13 +97,12 @@ namespace gtsam {
void transposeMultiplyAdd void transposeMultiplyAdd
(const SubgraphPreconditioner& sp, double alpha, const Errors& e, VectorValues& y) { (const SubgraphPreconditioner& sp, double alpha, const Errors& e, VectorValues& y) {
Errors::const_iterator it = e.begin(); Errors::const_iterator it = e.begin();
for ( Index i = 0 ; i < y.size() ; ++i, ++it ) { for ( Index i = 0 ; i < y.size() ; ++i, ++it ) {
const Vector& ei = *it; const Vector& ei = *it;
axpy(alpha,ei,y[i]); axpy(alpha, ei, y[i]);
} }
sp.transposeMultiplyAdd2(alpha,it,e.end(),y); sp.transposeMultiplyAdd2(alpha, it, e.end(), y);
} }
/* ************************************************************************* */ /* ************************************************************************* */

View File

@ -42,8 +42,8 @@ namespace gtsam {
private: private:
sharedFG Ab1_, Ab2_; sharedFG Ab1_, Ab2_;
sharedBayesNet Rc1_; sharedBayesNet Rc1_;
sharedValues xbar_; sharedValues xbar_; ///< A1 \ b1
sharedErrors b2bar_; /** b2 - A2*xbar */ sharedErrors b2bar_; ///< A2*xbar - b2
public: public:
@ -66,6 +66,9 @@ namespace gtsam {
/** Access Rc1 */ /** Access Rc1 */
const sharedBayesNet& Rc1() const { return Rc1_; } const sharedBayesNet& Rc1() const { return Rc1_; }
/** Access b2bar */
const sharedErrors b2bar() const { return b2bar_; }
/** /**
* Add zero-mean i.i.d. Gaussian prior terms to each variable * Add zero-mean i.i.d. Gaussian prior terms to each variable
* @param sigma Standard deviation of Gaussian * @param sigma Standard deviation of Gaussian

View File

@ -61,9 +61,7 @@ SubgraphSolver::SubgraphSolver(const GaussianFactorGraph &gfg, const Parameters
VectorValues::shared_ptr SubgraphSolver::optimize() { VectorValues::shared_ptr SubgraphSolver::optimize() {
// preconditioned conjugate gradient VectorValues ybar = conjugateGradients<SubgraphPreconditioner, VectorValues, Errors>(*pc_, pc_->zero(), parameters_);
VectorValues zeros = pc_->zero();
VectorValues ybar = conjugateGradients<SubgraphPreconditioner, VectorValues, Errors> (*pc_, zeros, parameters_);
boost::shared_ptr<VectorValues> xbar = boost::make_shared<VectorValues>() ; boost::shared_ptr<VectorValues> xbar = boost::make_shared<VectorValues>() ;
*xbar = pc_->x(ybar); *xbar = pc_->x(ybar);

View File

@ -19,6 +19,11 @@
namespace gtsam { namespace gtsam {
struct SubgraphSolverParameters : public ConjugateGradientParameters {
typedef ConjugateGradientParameters Base;
SubgraphSolverParameters() : Base() {}
};
/** /**
* A linear system solver using subgraph preconditioning conjugate gradient * A linear system solver using subgraph preconditioning conjugate gradient
*/ */
@ -27,18 +32,17 @@ class SubgraphSolver : public IterativeSolver {
public: public:
typedef ConjugateGradientParameters Parameters; typedef SubgraphSolverParameters Parameters;
protected:
Parameters parameters_; Parameters parameters_;
SubgraphPreconditioner::shared_ptr pc_; ///< preconditioner object SubgraphPreconditioner::shared_ptr pc_; ///< preconditioner object
public: public:
SubgraphSolver(const GaussianFactorGraph &gfg, const Parameters &parameters); SubgraphSolver(const GaussianFactorGraph &gfg, const Parameters &parameters);
virtual ~SubgraphSolver() {} virtual ~SubgraphSolver() {}
virtual VectorValues::shared_ptr optimize () ; virtual VectorValues::shared_ptr optimize () ;
protected: protected:

View File

@ -76,15 +76,19 @@ void LevenbergMarquardtOptimizer::iterate() {
} }
else if ( params_.isCG() ) { else if ( params_.isCG() ) {
ConjugateGradientParameters::shared_ptr params (!params_.iterativeParams ? if ( !params_.iterativeParams ) throw runtime_error("LMSolver: cg parameter has to be assigned ...");
boost::make_shared<ConjugateGradientParameters>() :
boost::dynamic_pointer_cast<ConjugateGradientParameters>(params_.iterativeParams));
if ( !params ) throw runtime_error("LMSolver: spcg parameter dynamic casting failed"); if ( boost::dynamic_pointer_cast<SimpleSPCGSolverParameters>(params_.iterativeParams)) {
SimpleSPCGSolver solver (dampedSystem, *boost::dynamic_pointer_cast<SimpleSPCGSolverParameters>(params_.iterativeParams));
SimpleSPCGSolver solver(dampedSystem, *params); delta = *solver.optimize();
//SubgraphSolver solver(dampedSystem, *params); }
delta = *solver.optimize(); else if ( boost::dynamic_pointer_cast<SubgraphSolverParameters>(params_.iterativeParams) ) {
SubgraphSolver solver (dampedSystem, *boost::dynamic_pointer_cast<SubgraphSolverParameters>(params_.iterativeParams));
delta = *solver.optimize();
}
else {
throw runtime_error("LMSolver: special cg parameter type is not handled in LM solver ...");
}
} }
else { else {
throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::elimination"); throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::elimination");

View File

@ -118,13 +118,10 @@ namespace gtsam {
// conjugate gradient method. // conjugate gradient method.
// S: linear system, V: step vector, E: errors // S: linear system, V: step vector, E: errors
template<class S, class V, class E> template<class S, class V, class E>
V conjugateGradients( V conjugateGradients(const S& Ab, V x, const ConjugateGradientParameters &parameters, bool steepest = false) {
const S& Ab,
V x,
const ConjugateGradientParameters &parameters,
bool steepest = false) {
CGState<S, V, E> state(Ab, x, parameters, steepest); CGState<S, V, E> state(Ab, x, parameters, steepest);
if (parameters.verbosity() != ConjugateGradientParameters::SILENT) if (parameters.verbosity() != ConjugateGradientParameters::SILENT)
std::cout << "CG: epsilon = " << parameters.epsilon() std::cout << "CG: epsilon = " << parameters.epsilon()
<< ", maxIterations = " << parameters.maxIterations() << ", maxIterations = " << parameters.maxIterations()

View File

@ -44,6 +44,7 @@ namespace gtsam {
* Needed to run Conjugate Gradients on matrices * Needed to run Conjugate Gradients on matrices
* */ * */
class System { class System {
private: private:
const Matrix& A_; const Matrix& A_;
const Vector& b_; const Vector& b_;