From a0c77dcc1ca8584a4a1b5c3fb4fd31076edbf92a Mon Sep 17 00:00:00 2001 From: Yong-Dian Jian Date: Tue, 3 Jun 2014 23:52:35 -0400 Subject: [PATCH 1/5] remove unused variables in iterative solvers and rename accordingly. --- examples/Pose2SLAMwSPCG.cpp | 2 +- gtsam.h | 4 +--- gtsam/linear/IterativeSolver.cpp | 21 +++++++++----------- gtsam/linear/IterativeSolver.h | 16 ++++----------- gtsam/nonlinear/DoglegOptimizer.cpp | 2 +- gtsam/nonlinear/NonlinearOptimizer.cpp | 2 +- gtsam/nonlinear/NonlinearOptimizerParams.cpp | 12 +++++------ gtsam/nonlinear/NonlinearOptimizerParams.h | 6 +++--- 8 files changed, 26 insertions(+), 39 deletions(-) diff --git a/examples/Pose2SLAMwSPCG.cpp b/examples/Pose2SLAMwSPCG.cpp index 2b2f63064..8e8e28570 100644 --- a/examples/Pose2SLAMwSPCG.cpp +++ b/examples/Pose2SLAMwSPCG.cpp @@ -104,7 +104,7 @@ int main(int argc, char** argv) { LevenbergMarquardtParams parameters; parameters.verbosity = NonlinearOptimizerParams::ERROR; parameters.verbosityLM = LevenbergMarquardtParams::LAMBDA; - parameters.linearSolverType = NonlinearOptimizerParams::CONJUGATE_GRADIENT; + parameters.linearSolverType = NonlinearOptimizerParams::Iterative; { parameters.iterativeParams = boost::make_shared(); diff --git a/gtsam.h b/gtsam.h index b7178d534..2cd6415a6 100644 --- a/gtsam.h +++ b/gtsam.h @@ -1481,9 +1481,7 @@ class GaussianISAM { #include virtual class IterativeOptimizationParameters { - string getKernel() const ; string getVerbosity() const; - void setKernel(string s) ; void setVerbosity(string s) ; void print() const; }; @@ -1860,7 +1858,7 @@ virtual class NonlinearOptimizerParams { bool isMultifrontal() const; bool isSequential() const; bool isCholmod() const; - bool isCG() const; + bool isIterative() const; }; bool checkConvergence(double relativeErrorTreshold, diff --git a/gtsam/linear/IterativeSolver.cpp b/gtsam/linear/IterativeSolver.cpp index 2bba3e12b..b022a26d8 100644 --- a/gtsam/linear/IterativeSolver.cpp +++ b/gtsam/linear/IterativeSolver.cpp @@ -11,18 +11,19 @@ namespace gtsam { -std::string IterativeOptimizationParameters::getKernel() const { return kernelTranslator(kernel_); } +/*****************************************************************************/ std::string IterativeOptimizationParameters::getVerbosity() const { return verbosityTranslator(verbosity_); } -void IterativeOptimizationParameters::setKernel(const std::string &src) { kernel_ = kernelTranslator(src); } + +/*****************************************************************************/ void IterativeOptimizationParameters::setVerbosity(const std::string &src) { verbosity_ = verbosityTranslator(src); } -IterativeOptimizationParameters::Kernel IterativeOptimizationParameters::kernelTranslator(const std::string &src) { - std::string s = src; boost::algorithm::to_upper(s); - if (s == "CG") return IterativeOptimizationParameters::CG; - /* default is cg */ - else return IterativeOptimizationParameters::CG; +/*****************************************************************************/ +void IterativeOptimizationParameters::print() const { + std::cout << "IterativeOptimizationParameters" << std::endl + << "verbosity: " << verbosityTranslator(verbosity_) << std::endl; } +/*****************************************************************************/ IterativeOptimizationParameters::Verbosity IterativeOptimizationParameters::verbosityTranslator(const std::string &src) { std::string s = src; boost::algorithm::to_upper(s); if (s == "SILENT") return IterativeOptimizationParameters::SILENT; @@ -32,11 +33,7 @@ IterativeOptimizationParameters::Verbosity IterativeOptimizationParameters::verb else return IterativeOptimizationParameters::SILENT; } -std::string IterativeOptimizationParameters::kernelTranslator(IterativeOptimizationParameters::Kernel k) { - if ( k == CG ) return "CG"; - else return "UNKNOWN"; -} - +/*****************************************************************************/ std::string IterativeOptimizationParameters::verbosityTranslator(IterativeOptimizationParameters::Verbosity verbosity) { if (verbosity == SILENT) return "SILENT"; else if (verbosity == COMPLEXITY) return "COMPLEXITY"; diff --git a/gtsam/linear/IterativeSolver.h b/gtsam/linear/IterativeSolver.h index 988293a4f..b4e2e6d49 100644 --- a/gtsam/linear/IterativeSolver.h +++ b/gtsam/linear/IterativeSolver.h @@ -30,21 +30,19 @@ namespace gtsam { public: typedef boost::shared_ptr shared_ptr; - enum Kernel { CG = 0 } kernel_ ; ///< Iterative Method Kernel enum Verbosity { SILENT = 0, COMPLEXITY, ERROR } verbosity_; public: IterativeOptimizationParameters(const IterativeOptimizationParameters &p) - : kernel_(p.kernel_), verbosity_(p.verbosity_) {} + : verbosity_(p.verbosity_) {} - IterativeOptimizationParameters(const Kernel kernel = CG, const Verbosity verbosity = SILENT) - : kernel_(kernel), verbosity_(verbosity) {} + IterativeOptimizationParameters(const Verbosity verbosity = SILENT) + : verbosity_(verbosity) {} virtual ~IterativeOptimizationParameters() {} /* general interface */ - inline Kernel kernel() const { return kernel_; } inline Verbosity verbosity() const { return verbosity_; } /* matlab interface */ @@ -53,15 +51,9 @@ namespace gtsam { void setKernel(const std::string &s) ; void setVerbosity(const std::string &s) ; - virtual void print() const { - std::cout << "IterativeOptimizationParameters" << std::endl - << "kernel: " << kernelTranslator(kernel_) << std::endl - << "verbosity: " << verbosityTranslator(verbosity_) << std::endl; - } + virtual void print() const ; - static Kernel kernelTranslator(const std::string &s); static Verbosity verbosityTranslator(const std::string &s); - static std::string kernelTranslator(Kernel k); static std::string verbosityTranslator(Verbosity v); }; diff --git a/gtsam/nonlinear/DoglegOptimizer.cpp b/gtsam/nonlinear/DoglegOptimizer.cpp index 03da76eb6..082cc66c8 100644 --- a/gtsam/nonlinear/DoglegOptimizer.cpp +++ b/gtsam/nonlinear/DoglegOptimizer.cpp @@ -76,7 +76,7 @@ void DoglegOptimizer::iterate(void) { result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, dx_u, dx_n, bn, graph_, state_.values, state_.error, dlVerbose); } - else if ( params_.isCG() ) { + else if ( params_.isIterative() ) { throw runtime_error("Dogleg is not currently compatible with the linear conjugate gradient solver"); } else { diff --git a/gtsam/nonlinear/NonlinearOptimizer.cpp b/gtsam/nonlinear/NonlinearOptimizer.cpp index 54e5cb9e3..678cac6b4 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.cpp +++ b/gtsam/nonlinear/NonlinearOptimizer.cpp @@ -102,7 +102,7 @@ VectorValues NonlinearOptimizer::solve(const GaussianFactorGraph &gfg, } else if (params.isSequential()) { delta = gfg.eliminateSequential(*params.ordering, params.getEliminationFunction())->optimize(); - } else if (params.isCG()) { + } else if (params.isIterative()) { if (!params.iterativeParams) throw std::runtime_error( "NonlinearOptimizer::solve: cg parameter has to be assigned ..."); diff --git a/gtsam/nonlinear/NonlinearOptimizerParams.cpp b/gtsam/nonlinear/NonlinearOptimizerParams.cpp index 7b0e73985..e0b6e0c6c 100644 --- a/gtsam/nonlinear/NonlinearOptimizerParams.cpp +++ b/gtsam/nonlinear/NonlinearOptimizerParams.cpp @@ -99,8 +99,8 @@ void NonlinearOptimizerParams::print(const std::string& str) const { case CHOLMOD: std::cout << " linear solver type: CHOLMOD\n"; break; - case CONJUGATE_GRADIENT: - std::cout << " linear solver type: CONJUGATE GRADIENT\n"; + case Iterative: + std::cout << " linear solver type: ITERATIVE\n"; break; default: std::cout << " linear solver type: (invalid)\n"; @@ -127,8 +127,8 @@ std::string NonlinearOptimizerParams::linearSolverTranslator( return "SEQUENTIAL_CHOLESKY"; case SEQUENTIAL_QR: return "SEQUENTIAL_QR"; - case CONJUGATE_GRADIENT: - return "CONJUGATE_GRADIENT"; + case Iterative: + return "ITERATIVE"; case CHOLMOD: return "CHOLMOD"; default: @@ -148,8 +148,8 @@ NonlinearOptimizerParams::LinearSolverType NonlinearOptimizerParams::linearSolve return SEQUENTIAL_CHOLESKY; if (linearSolverType == "SEQUENTIAL_QR") return SEQUENTIAL_QR; - if (linearSolverType == "CONJUGATE_GRADIENT") - return CONJUGATE_GRADIENT; + if (linearSolverType == "ITERATIVE") + return Iterative; if (linearSolverType == "CHOLMOD") return CHOLMOD; throw std::invalid_argument( diff --git a/gtsam/nonlinear/NonlinearOptimizerParams.h b/gtsam/nonlinear/NonlinearOptimizerParams.h index 10d468384..641c6da24 100644 --- a/gtsam/nonlinear/NonlinearOptimizerParams.h +++ b/gtsam/nonlinear/NonlinearOptimizerParams.h @@ -99,7 +99,7 @@ public: MULTIFRONTAL_QR, SEQUENTIAL_CHOLESKY, SEQUENTIAL_QR, - CONJUGATE_GRADIENT, /* Experimental Flag */ + Iterative, /* Experimental Flag */ CHOLMOD, /* Experimental Flag */ }; @@ -121,8 +121,8 @@ public: return (linearSolverType == CHOLMOD); } - inline bool isCG() const { - return (linearSolverType == CONJUGATE_GRADIENT); + inline bool isIterative() const { + return (linearSolverType == Iterative); } GaussianFactorGraph::Eliminate getEliminationFunction() const { From c844ab19e3e7dd6665fb98dcbd834d3cb70b92e8 Mon Sep 17 00:00:00 2001 From: Yong-Dian Jian Date: Wed, 4 Jun 2014 23:23:17 -0400 Subject: [PATCH 2/5] move blas_kernel_ up --- gtsam/linear/ConjugateGradientSolver.cpp | 47 ++++++++++++++++++++++++ gtsam/linear/ConjugateGradientSolver.h | 33 ++++++++++------- gtsam/linear/IterativeSolver.h | 6 --- 3 files changed, 66 insertions(+), 20 deletions(-) create mode 100644 gtsam/linear/ConjugateGradientSolver.cpp diff --git a/gtsam/linear/ConjugateGradientSolver.cpp b/gtsam/linear/ConjugateGradientSolver.cpp new file mode 100644 index 000000000..4072c5080 --- /dev/null +++ b/gtsam/linear/ConjugateGradientSolver.cpp @@ -0,0 +1,47 @@ +/* + * ConjugateGradientSolver.cpp + * + * Created on: Jun 4, 2014 + * Author: ydjian + */ + +#include +#include + +namespace gtsam { + +/*****************************************************************************/ +std::string ConjugateGradientParameters::blasTranslator(const BLASKernel value) { + std::string s; + switch (value) { + case ConjugateGradientParameters::GTSAM: s = "GTSAM" ; break; + default: s = "UNDEFINED" ; break; + } + return s; +} + +/*****************************************************************************/ +ConjugateGradientParameters::BLASKernel ConjugateGradientParameters::blasTranslator(const std::string &src) { + std::string s = src; boost::algorithm::to_upper(s); + if (s == "GTSAM") return ConjugateGradientParameters::GTSAM; + + /* default is SBM */ + return ConjugateGradientParameters::GTSAM; +} + +/*****************************************************************************/ + +/*****************************************************************************/ +void ConjugateGradientParameters::print() const { + Base::print(); + std::cout << "ConjugateGradientParameters" << std::endl + << "minIter: " << minIterations_ << std::endl + << "maxIter: " << maxIterations_ << std::endl + << "resetIter: " << reset_ << std::endl + << "eps_rel: " << epsilon_rel_ << std::endl + << "eps_abs: " << epsilon_abs_ << std::endl; + } + +} + + diff --git a/gtsam/linear/ConjugateGradientSolver.h b/gtsam/linear/ConjugateGradientSolver.h index d1b3b2c7e..ce27b7a96 100644 --- a/gtsam/linear/ConjugateGradientSolver.h +++ b/gtsam/linear/ConjugateGradientSolver.h @@ -20,6 +20,7 @@ namespace gtsam { */ class ConjugateGradientParameters : public IterativeOptimizationParameters { + public: typedef IterativeOptimizationParameters Base; typedef boost::shared_ptr shared_ptr; @@ -30,14 +31,23 @@ public: double epsilon_rel_; ///< threshold for relative error decrease double epsilon_abs_; ///< threshold for absolute error decrease - ConjugateGradientParameters() - : minIterations_(1), maxIterations_(500), reset_(501), epsilon_rel_(1e-3), epsilon_abs_(1e-3){} + /* Matrix Operation Kernel */ + enum BLASKernel { + GTSAM = 0, ///< Jacobian Factor Graph of GTSAM + } blas_kernel_ ; - ConjugateGradientParameters(size_t minIterations, size_t maxIterations, size_t reset, double epsilon_rel, double epsilon_abs) - : minIterations_(minIterations), maxIterations_(maxIterations), reset_(reset), epsilon_rel_(epsilon_rel), epsilon_abs_(epsilon_abs){} + ConjugateGradientParameters() + : minIterations_(1), maxIterations_(500), reset_(501), epsilon_rel_(1e-3), + epsilon_abs_(1e-3), blas_kernel_(GTSAM) {} + + ConjugateGradientParameters(size_t minIterations, size_t maxIterations, size_t reset, + double epsilon_rel, double epsilon_abs, BLASKernel blas) + : minIterations_(minIterations), maxIterations_(maxIterations), reset_(reset), + epsilon_rel_(epsilon_rel), epsilon_abs_(epsilon_abs), blas_kernel_(blas) {} ConjugateGradientParameters(const ConjugateGradientParameters &p) - : Base(p), minIterations_(p.minIterations_), maxIterations_(p.maxIterations_), reset_(p.reset_), epsilon_rel_(p.epsilon_rel_), epsilon_abs_(p.epsilon_abs_) {} + : Base(p), minIterations_(p.minIterations_), maxIterations_(p.maxIterations_), reset_(p.reset_), + epsilon_rel_(p.epsilon_rel_), epsilon_abs_(p.epsilon_abs_), blas_kernel_(GTSAM) {} /* general interface */ inline size_t minIterations() const { return minIterations_; } @@ -61,15 +71,10 @@ public: inline void setEpsilon_rel(double value) { epsilon_rel_ = value; } inline void setEpsilon_abs(double value) { epsilon_abs_ = value; } - virtual void print() const { - Base::print(); - std::cout << "ConjugateGradientParameters" << std::endl - << "minIter: " << minIterations_ << std::endl - << "maxIter: " << maxIterations_ << std::endl - << "resetIter: " << reset_ << std::endl - << "eps_rel: " << epsilon_rel_ << std::endl - << "eps_abs: " << epsilon_abs_ << std::endl; - } + static std::string blasTranslator(const BLASKernel k) ; + static BLASKernel blasTranslator(const std::string &s) ; + + virtual void print() const; }; } diff --git a/gtsam/linear/IterativeSolver.h b/gtsam/linear/IterativeSolver.h index b4e2e6d49..daf5960e7 100644 --- a/gtsam/linear/IterativeSolver.h +++ b/gtsam/linear/IterativeSolver.h @@ -14,7 +14,6 @@ #include #include -#include namespace gtsam { @@ -46,9 +45,7 @@ namespace gtsam { inline Verbosity verbosity() const { return verbosity_; } /* matlab interface */ - std::string getKernel() const ; std::string getVerbosity() const; - void setKernel(const std::string &s) ; void setVerbosity(const std::string &s) ; virtual void print() const ; @@ -68,9 +65,6 @@ namespace gtsam { /* interface to the nonlinear optimizer */ virtual VectorValues optimize (const VectorValues &initial) = 0; - - /* update interface to the nonlinear optimizer */ - virtual void replaceFactors(const boost::shared_ptr &factorGraph, const double lambda) {} }; } From e8d38099174b8d7604c95ff7260ac48220381bb1 Mon Sep 17 00:00:00 2001 From: Yong-Dian Jian Date: Sun, 8 Jun 2014 00:34:23 -0400 Subject: [PATCH 3/5] add new generic pcgsolver and preconditioner classes add a unit test for the PCGSolver class --- gtsam/inference/Ordering.h | 11 + gtsam/linear/ConjugateGradientSolver.cpp | 26 +- gtsam/linear/ConjugateGradientSolver.h | 75 +++- gtsam/linear/GaussianFactorGraph.cpp | 14 + gtsam/linear/GaussianFactorGraph.h | 3 + gtsam/linear/IterativeSolver.cpp | 101 +++++- gtsam/linear/IterativeSolver.h | 101 +++++- gtsam/linear/PCGSolver.cpp | 437 +++++++++++++++++++++++ gtsam/linear/PCGSolver.h | 236 ++++++++++++ gtsam/linear/Preconditioner.cpp | 212 +++++++++++ gtsam/linear/Preconditioner.h | 173 +++++++++ gtsam/linear/SubgraphSolver.h | 17 +- gtsam/nonlinear/NonlinearOptimizer.cpp | 22 +- tests/testPCGSolver.cpp | 77 ++++ 14 files changed, 1453 insertions(+), 52 deletions(-) create mode 100644 gtsam/linear/PCGSolver.cpp create mode 100644 gtsam/linear/PCGSolver.h create mode 100644 gtsam/linear/Preconditioner.cpp create mode 100644 gtsam/linear/Preconditioner.h create mode 100644 tests/testPCGSolver.cpp diff --git a/gtsam/inference/Ordering.h b/gtsam/inference/Ordering.h index ea2ad7eda..7b1a2bb2e 100644 --- a/gtsam/inference/Ordering.h +++ b/gtsam/inference/Ordering.h @@ -17,9 +17,11 @@ #pragma once +#include #include #include +#include #include #include #include @@ -135,6 +137,15 @@ namespace gtsam { static GTSAM_EXPORT Ordering COLAMDConstrained(const VariableIndex& variableIndex, const FastMap& groups); + /// Return a natural Ordering. Typically used by iterative solvers + template + static Ordering Natural(const FactorGraph &fg) { + FastSet src = fg.keys(); + std::vector keys(src.begin(), src.end()); + std::stable_sort(keys.begin(), keys.end()); + return Ordering(keys); + } + /// @} /// @name Testable @{ diff --git a/gtsam/linear/ConjugateGradientSolver.cpp b/gtsam/linear/ConjugateGradientSolver.cpp index 4072c5080..0505f6c01 100644 --- a/gtsam/linear/ConjugateGradientSolver.cpp +++ b/gtsam/linear/ConjugateGradientSolver.cpp @@ -6,10 +6,24 @@ */ #include +#include #include +using namespace std; + namespace gtsam { +/*****************************************************************************/ +void ConjugateGradientParameters::print(ostream &os) const { + Base::print(os); + cout << "ConjugateGradientParameters" << endl + << "minIter: " << minIterations_ << endl + << "maxIter: " << maxIterations_ << endl + << "resetIter: " << reset_ << endl + << "eps_rel: " << epsilon_rel_ << endl + << "eps_abs: " << epsilon_abs_ << endl; +} + /*****************************************************************************/ std::string ConjugateGradientParameters::blasTranslator(const BLASKernel value) { std::string s; @@ -29,18 +43,6 @@ ConjugateGradientParameters::BLASKernel ConjugateGradientParameters::blasTransla return ConjugateGradientParameters::GTSAM; } -/*****************************************************************************/ - -/*****************************************************************************/ -void ConjugateGradientParameters::print() const { - Base::print(); - std::cout << "ConjugateGradientParameters" << std::endl - << "minIter: " << minIterations_ << std::endl - << "maxIter: " << maxIterations_ << std::endl - << "resetIter: " << reset_ << std::endl - << "eps_rel: " << epsilon_rel_ << std::endl - << "eps_abs: " << epsilon_abs_ << std::endl; - } } diff --git a/gtsam/linear/ConjugateGradientSolver.h b/gtsam/linear/ConjugateGradientSolver.h index ce27b7a96..47ec4b405 100644 --- a/gtsam/linear/ConjugateGradientSolver.h +++ b/gtsam/linear/ConjugateGradientSolver.h @@ -12,13 +12,13 @@ #pragma once #include +#include namespace gtsam { /** * parameters for the conjugate gradient method */ - class ConjugateGradientParameters : public IterativeOptimizationParameters { public: @@ -71,10 +71,79 @@ public: inline void setEpsilon_rel(double value) { epsilon_rel_ = value; } inline void setEpsilon_abs(double value) { epsilon_abs_ = value; } + virtual void print(std::ostream &os) const; + static std::string blasTranslator(const BLASKernel k) ; static BLASKernel blasTranslator(const std::string &s) ; - - virtual void print() const; }; +/*********************************************************************************************/ +/* + * A template of linear preconditioned conjugate gradient method. + * System class should support residual(v, g), multiply(v,Av), leftPrecondition(v, S^{-t}v, + * rightPrecondition(v, S^{-1}v), scal(alpha,v), dot(v,v), axpy(alpha,x,y) + * Note that the residual is in the preconditioned domain. Refer to Section 9.2 of Saad's book. + */ +template +V preconditionedConjugateGradient(const S &system, const V &initial, const ConjugateGradientParameters ¶meters) { + + V estimate, residual, direction, q1, q2; + estimate = residual = direction = q1 = q2 = initial; + + system.residual(estimate, q1); /* q1 = b-Ax */ + system.leftPrecondition(q1, residual); /* r = S^{-T} (b-Ax) */ + system.rightPrecondition(residual, direction);/* d = S^{-1} r */ + + double currentGamma = system.dot(residual, residual), prevGamma, alpha, beta; + + const size_t iMaxIterations = parameters.maxIterations(), + iMinIterations = parameters.minIterations(), + iReset = parameters.reset() ; + const double threshold = std::max(parameters.epsilon_abs(), + parameters.epsilon() * parameters.epsilon() * currentGamma); + + if (parameters.verbosity() >= ConjugateGradientParameters::COMPLEXITY ) + std::cout << "[PCG] epsilon = " << parameters.epsilon() + << ", max = " << parameters.maxIterations() + << ", reset = " << parameters.reset() + << ", ||r0||^2 = " << currentGamma + << ", threshold = " << threshold << std::endl; + + size_t k; + for ( k = 1 ; k <= iMaxIterations && (currentGamma > threshold || k <= iMinIterations) ; k++ ) { + + if ( k % iReset == 0 ) { + system.residual(estimate, q1); /* q1 = b-Ax */ + system.leftPrecondition(q1, residual); /* r = S^{-T} (b-Ax) */ + system.rightPrecondition(residual, direction); /* d = S^{-1} r */ + currentGamma = system.dot(residual, residual); + } + system.multiply(direction, q1); /* q1 = A d */ + alpha = currentGamma / system.dot(direction, q1); /* alpha = gamma / (d' A d) */ + system.axpy(alpha, direction, estimate); /* estimate += alpha * direction */ + system.leftPrecondition(q1, q2); /* q2 = S^{-T} * q1 */ + system.axpy(-alpha, q2, residual); /* residual -= alpha * q2 */ + prevGamma = currentGamma; + currentGamma = system.dot(residual, residual); /* gamma = |residual|^2 */ + beta = currentGamma / prevGamma; + system.rightPrecondition(residual, q1); /* q1 = S^{-1} residual */ + system.scal(beta, direction); + system.axpy(1.0, q1, direction); /* direction = q1 + beta * direction */ + + if (parameters.verbosity() >= ConjugateGradientParameters::ERROR ) + std::cout << "[PCG] k = " << k + << ", alpha = " << alpha + << ", beta = " << beta + << ", ||r||^2 = " << currentGamma + << std::endl; + } + if (parameters.verbosity() >= ConjugateGradientParameters::COMPLEXITY ) + std::cout << "[PCG] iterations = " << k + << ", ||r||^2 = " << currentGamma + << std::endl; + + return estimate; +} + + } diff --git a/gtsam/linear/GaussianFactorGraph.cpp b/gtsam/linear/GaussianFactorGraph.cpp index d6836783b..f10309d76 100644 --- a/gtsam/linear/GaussianFactorGraph.cpp +++ b/gtsam/linear/GaussianFactorGraph.cpp @@ -54,6 +54,20 @@ namespace gtsam { return keys; } + /* ************************************************************************* */ + std::map GaussianFactorGraph::getKeyDimMap() const { + map spec; + BOOST_FOREACH ( const GaussianFactor::shared_ptr &gf, *this ) { + for ( GaussianFactor::const_iterator it = gf->begin() ; it != gf->end() ; it++ ) { + map::iterator it2 = spec.find(*it); + if ( it2 == spec.end() ) { + spec.insert(make_pair(*it, gf->getDim(it))); + } + } + } + return spec; + } + /* ************************************************************************* */ vector GaussianFactorGraph::getkeydim() const { // First find dimensions of each variable diff --git a/gtsam/linear/GaussianFactorGraph.h b/gtsam/linear/GaussianFactorGraph.h index 692310f26..910b25d1e 100644 --- a/gtsam/linear/GaussianFactorGraph.h +++ b/gtsam/linear/GaussianFactorGraph.h @@ -138,6 +138,9 @@ namespace gtsam { typedef FastSet Keys; Keys keys() const; + /* return a map of (Key, dimension) */ + std::map getKeyDimMap() const; + std::vector getkeydim() const; /** unnormalized error */ diff --git a/gtsam/linear/IterativeSolver.cpp b/gtsam/linear/IterativeSolver.cpp index b022a26d8..36178b191 100644 --- a/gtsam/linear/IterativeSolver.cpp +++ b/gtsam/linear/IterativeSolver.cpp @@ -6,26 +6,42 @@ */ #include +#include +#include #include +#include #include +using namespace std; + namespace gtsam { /*****************************************************************************/ -std::string IterativeOptimizationParameters::getVerbosity() const { return verbosityTranslator(verbosity_); } +string IterativeOptimizationParameters::getVerbosity() const { return verbosityTranslator(verbosity_); } /*****************************************************************************/ -void IterativeOptimizationParameters::setVerbosity(const std::string &src) { verbosity_ = verbosityTranslator(src); } +void IterativeOptimizationParameters::setVerbosity(const string &src) { verbosity_ = verbosityTranslator(src); } /*****************************************************************************/ void IterativeOptimizationParameters::print() const { - std::cout << "IterativeOptimizationParameters" << std::endl - << "verbosity: " << verbosityTranslator(verbosity_) << std::endl; + print(cout); } /*****************************************************************************/ -IterativeOptimizationParameters::Verbosity IterativeOptimizationParameters::verbosityTranslator(const std::string &src) { - std::string s = src; boost::algorithm::to_upper(s); +void IterativeOptimizationParameters::print(ostream &os) const { + os << "IterativeOptimizationParameters:" << endl + << "verbosity: " << verbosityTranslator(verbosity_) << endl; +} + +/*****************************************************************************/ + ostream& operator<<(ostream &os, const IterativeOptimizationParameters &p) { + p.print(os); + return os; +} + +/*****************************************************************************/ +IterativeOptimizationParameters::Verbosity IterativeOptimizationParameters::verbosityTranslator(const string &src) { + string s = src; boost::algorithm::to_upper(s); if (s == "SILENT") return IterativeOptimizationParameters::SILENT; else if (s == "COMPLEXITY") return IterativeOptimizationParameters::COMPLEXITY; else if (s == "ERROR") return IterativeOptimizationParameters::ERROR; @@ -34,13 +50,84 @@ IterativeOptimizationParameters::Verbosity IterativeOptimizationParameters::verb } /*****************************************************************************/ -std::string IterativeOptimizationParameters::verbosityTranslator(IterativeOptimizationParameters::Verbosity verbosity) { + string IterativeOptimizationParameters::verbosityTranslator(IterativeOptimizationParameters::Verbosity verbosity) { if (verbosity == SILENT) return "SILENT"; else if (verbosity == COMPLEXITY) return "COMPLEXITY"; else if (verbosity == ERROR) return "ERROR"; else return "UNKNOWN"; } +/*****************************************************************************/ +VectorValues IterativeSolver::optimize( + const GaussianFactorGraph &gfg, + boost::optional keyInfo, + boost::optional&> lambda) +{ + return optimize( + gfg, + keyInfo ? *keyInfo : KeyInfo(gfg), + lambda ? *lambda : std::map()); +} + +/*****************************************************************************/ +VectorValues IterativeSolver::optimize ( + const GaussianFactorGraph &gfg, + const KeyInfo &keyInfo, + const std::map &lambda) +{ + return optimize(gfg, keyInfo, lambda, keyInfo.x0()); +} + +/****************************************************************************/ +KeyInfo::KeyInfo(const GaussianFactorGraph &fg, const Ordering &ordering) + : ordering_(ordering) { + initialize(fg); +} + +/****************************************************************************/ +KeyInfo::KeyInfo(const GaussianFactorGraph &fg) + : ordering_(Ordering::Natural(fg)) { + initialize(fg); +} + +/****************************************************************************/ +void KeyInfo::initialize(const GaussianFactorGraph &fg){ + const map colspec = fg.getKeyDimMap(); + const size_t n = ordering_.size(); + size_t start = 0; + + for ( size_t i = 0 ; i < n ; ++i ) { + const size_t key = ordering_[i]; + const size_t dim = colspec.find(key)->second; + insert(make_pair(key, KeyInfoEntry(i, dim, start))); + start += dim ; + } + numCols_ = start; +} + +/****************************************************************************/ +vector KeyInfo::colSpec() const { + std::vector result(size(), 0); + BOOST_FOREACH ( const gtsam::KeyInfo::value_type &item, *this ) { + result[item.second.index()] = item.second.dim(); + } + return result; +} + +/****************************************************************************/ +VectorValues KeyInfo::x0() const { + VectorValues result; + BOOST_FOREACH ( const gtsam::KeyInfo::value_type &item, *this ) { + result.insert(item.first, Vector::Zero(item.second.dim())); + } + return result; +} + +/****************************************************************************/ +Vector KeyInfo::x0vector() const { + return Vector::Zero(numCols_); +} + } diff --git a/gtsam/linear/IterativeSolver.h b/gtsam/linear/IterativeSolver.h index daf5960e7..66006875b 100644 --- a/gtsam/linear/IterativeSolver.h +++ b/gtsam/linear/IterativeSolver.h @@ -11,16 +11,28 @@ #pragma once +#include #include - +#include +#include +#include +#include +#include +#include #include +#include namespace gtsam { // Forward declarations - class VectorValues; + class KeyInfo; + class KeyInfoEntry; + class Ordering; class GaussianFactorGraph; + class Values; + class VectorValues; + /************************************************************************************/ /** * parameters for iterative linear solvers */ @@ -33,38 +45,97 @@ namespace gtsam { public: - IterativeOptimizationParameters(const IterativeOptimizationParameters &p) - : verbosity_(p.verbosity_) {} - - IterativeOptimizationParameters(const Verbosity verbosity = SILENT) - : verbosity_(verbosity) {} + IterativeOptimizationParameters(Verbosity v = SILENT) + : verbosity_(v) {} virtual ~IterativeOptimizationParameters() {} - /* general interface */ + /* utility */ inline Verbosity verbosity() const { return verbosity_; } - - /* matlab interface */ std::string getVerbosity() const; void setVerbosity(const std::string &s) ; - virtual void print() const ; + /* matlab interface */ + void print() const ; + + /* virtual print function */ + virtual void print(std::ostream &os) const ; + + /* for serialization */ + friend std::ostream& operator<<(std::ostream &os, const IterativeOptimizationParameters &p); static Verbosity verbosityTranslator(const std::string &s); static std::string verbosityTranslator(Verbosity v); }; + /************************************************************************************/ class GTSAM_EXPORT IterativeSolver { public: typedef boost::shared_ptr shared_ptr; IterativeSolver() {} virtual ~IterativeSolver() {} - /* interface to the nonlinear optimizer */ - virtual VectorValues optimize () = 0; + /* interface to the nonlinear optimizer, without metadata, damping and initial estimate */ + VectorValues optimize ( + const GaussianFactorGraph &gfg, + boost::optional = boost::none, + boost::optional&> lambda = boost::none + ); + + /* interface to the nonlinear optimizer, without initial estimate */ + VectorValues optimize ( + const GaussianFactorGraph &gfg, + const KeyInfo &keyInfo, + const std::map &lambda + ); + + /* interface to the nonlinear optimizer that the subclasses have to implement */ + virtual VectorValues optimize ( + const GaussianFactorGraph &gfg, + const KeyInfo &keyInfo, + const std::map &lambda, + const VectorValues &initial + ) = 0; - /* interface to the nonlinear optimizer */ - virtual VectorValues optimize (const VectorValues &initial) = 0; }; + /************************************************************************************/ + /* Handy data structure for iterative solvers + * key to (index, dimension, colstart) */ + class KeyInfoEntry : public boost::tuple { + public: + typedef boost::tuple Base; + KeyInfoEntry(size_t idx, size_t d, Key start) : Base(idx, d, start) {} + const size_t index() const { return this->get<0>(); } + const size_t dim() const { return this->get<1>(); } + const size_t colstart() const { return this->get<2>(); } + }; + + /************************************************************************************/ + class KeyInfo : public std::map { + public: + typedef std::map Base; + KeyInfo(const GaussianFactorGraph &fg); + KeyInfo(const GaussianFactorGraph &fg, const Ordering &ordering); + + std::vector colSpec() const ; + VectorValues x0() const; + Vector x0vector() const; + + inline size_t numCols() const { return numCols_; } + inline const Ordering & ordering() const { return ordering_; } + + protected: + + void initialize(const GaussianFactorGraph &fg); + + Ordering ordering_; + size_t numCols_; + + private: + KeyInfo() {} + + }; + + } diff --git a/gtsam/linear/PCGSolver.cpp b/gtsam/linear/PCGSolver.cpp new file mode 100644 index 000000000..6bfa389fb --- /dev/null +++ b/gtsam/linear/PCGSolver.cpp @@ -0,0 +1,437 @@ +/* + * PCGSolver.cpp + * + * Created on: Feb 14, 2012 + * Author: ydjian + */ + +#include +//#include +//#include +//#include +//#include +#include +#include +//#include +//#include +//#include +//#include +#include +#include +#include + +using namespace std; + +namespace gtsam { + +/*****************************************************************************/ +void PCGSolverParameters::print(ostream &os) const { + Base::print(os); + os << "PCGSolverParameters:" << endl; + preconditioner_->print(os); +} + +/*****************************************************************************/ +PCGSolver::PCGSolver(const PCGSolverParameters &p) { + preconditioner_ = createPreconditioner(p.preconditioner_); +} + +/*****************************************************************************/ +VectorValues PCGSolver::optimize ( + const GaussianFactorGraph &gfg, + const KeyInfo &keyInfo, + const std::map &lambda, + const VectorValues &initial) +{ + /* build preconditioner */ + preconditioner_->build(gfg, keyInfo, lambda); + + /* apply pcg */ + const Vector sol = preconditionedConjugateGradient( + GaussianFactorGraphSystem(gfg, *preconditioner_, keyInfo, lambda), + initial.vector(keyInfo.ordering()), parameters_); + + return buildVectorValues(sol, keyInfo); +} + +/*****************************************************************************/ +GaussianFactorGraphSystem::GaussianFactorGraphSystem( + const GaussianFactorGraph &gfg, + const Preconditioner &preconditioner, + const KeyInfo &keyInfo, + const std::map &lambda) + : gfg_(gfg), preconditioner_(preconditioner), keyInfo_(keyInfo), lambda_(lambda) {} + +/*****************************************************************************/ +void GaussianFactorGraphSystem::residual(const Vector &x, Vector &r) const { + /* implement b-Ax, assume x and r are pre-allocated */ + + /* reset r to b */ + getb(r); + + /* substract A*x */ + Vector Ax = Vector::Zero(r.rows(), 1); + multiply(x, Ax); + r -= Ax ; +} + +/*****************************************************************************/ +void GaussianFactorGraphSystem::multiply(const Vector &x, Vector& Ax) const { + /* implement Ax, assume x and Ax are pre-allocated */ + + /* reset y */ + Ax.setZero(); + + BOOST_FOREACH ( const GaussianFactor::shared_ptr &gf, gfg_ ) { + if ( JacobianFactor::shared_ptr jf = boost::dynamic_pointer_cast(gf) ) { + /* accumulate At A x*/ + for ( JacobianFactor::const_iterator it = jf->begin() ; it != jf->end() ; ++it ) { + const Matrix Ai = jf->getA(it); + /* this map lookup should be replaced */ + const KeyInfoEntry &entry = keyInfo_.find(*it)->second; + Ax.segment(entry.colstart(), entry.dim()) + += Ai.transpose() * (Ai * x.segment(entry.colstart(), entry.dim())); + } + } + else if ( HessianFactor::shared_ptr hf = boost::dynamic_pointer_cast(gf) ) { + /* accumulate H x */ + + /* use buffer to avoid excessive table lookups */ + const size_t sz = hf->size(); + vector y; + y.reserve(sz); + for (HessianFactor::const_iterator it = hf->begin(); it != hf->end(); it++) { + /* initialize y to zeros */ + y.push_back(zero(hf->getDim(it))); + } + + for (HessianFactor::const_iterator j = hf->begin(); j != hf->end(); j++ ) { + /* retrieve the key mapping */ + const KeyInfoEntry &entry = keyInfo_.find(*j)->second; + // xj is the input vector + const Vector xj = x.segment(entry.colstart(), entry.dim()); + size_t idx = 0; + for (HessianFactor::const_iterator i = hf->begin(); i != hf->end(); i++, idx++ ) { + if ( i == j ) y[idx] += hf->info(j, j).selfadjointView() * xj; + else y[idx] += hf->info(i, j).knownOffDiagonal() * xj; + } + } + + /* accumulate to r */ + for(DenseIndex i = 0; i < (DenseIndex) sz; ++i) { + /* retrieve the key mapping */ + const KeyInfoEntry &entry = keyInfo_.find(hf->keys()[i])->second; + Ax.segment(entry.colstart(), entry.dim()) += y[i]; + } + } + else { + throw invalid_argument("GaussianFactorGraphSystem::multiply gfg contains a factor that is neither a JacobianFactor nor a HessianFactor."); + } + } +} + +/*****************************************************************************/ +void GaussianFactorGraphSystem::getb(Vector &b) const { + /* compute rhs, assume b pre-allocated */ + + /* reset */ + b.setZero(); + + BOOST_FOREACH ( const GaussianFactor::shared_ptr &gf, gfg_ ) { + if ( JacobianFactor::shared_ptr jf = boost::dynamic_pointer_cast(gf) ) { + const Vector rhs = jf->getb(); + /* accumulate At rhs */ + for ( JacobianFactor::const_iterator it = jf->begin() ; it != jf->end() ; ++it ) { + /* this map lookup should be replaced */ + const KeyInfoEntry &entry = keyInfo_.find(*it)->second; + b.segment(entry.colstart(), entry.dim()) += jf->getA(it).transpose() * rhs ; + } + } + else if ( HessianFactor::shared_ptr hf = boost::dynamic_pointer_cast(gf) ) { + /* accumulate g */ + for (HessianFactor::const_iterator it = hf->begin(); it != hf->end(); it++) { + const KeyInfoEntry &entry = keyInfo_.find(*it)->second; + b.segment(entry.colstart(), entry.dim()) += hf->linearTerm(it); + } + } + else { + throw invalid_argument("GaussianFactorGraphSystem::getb gfg contains a factor that is neither a JacobianFactor nor a HessianFactor."); + } + } +} + +/**********************************************************************************/ +void GaussianFactorGraphSystem::leftPrecondition(const Vector &x, Vector &y) const +{ preconditioner_.transposeSolve(x, y); } + +/**********************************************************************************/ +void GaussianFactorGraphSystem::rightPrecondition(const Vector &x, Vector &y) const +{ preconditioner_.solve(x, y); } + +/**********************************************************************************/ +VectorValues buildVectorValues(const Vector &v, + const Ordering &ordering, + const map & dimensions) { + VectorValues result; + + DenseIndex offset = 0; + for ( size_t i = 0 ; i < ordering.size() ; ++i ) { + const Key &key = ordering[i]; + map::const_iterator it = dimensions.find(key); + if ( it == dimensions.end() ) { + throw invalid_argument("buildVectorValues: inconsistent ordering and dimensions"); + } + const size_t dim = it->second; + result.insert(key, v.segment(offset, dim)); + offset += dim; + } + + return result; +} + +/**********************************************************************************/ +VectorValues buildVectorValues(const Vector &v, const KeyInfo &keyInfo) { + VectorValues result; + BOOST_FOREACH ( const KeyInfo::value_type &item, keyInfo ) { + result.insert(item.first, v.segment(item.second.colstart(), item.second.dim())); + } + return result; +} + +///*****************************************************************************/ +//std::string PCGSolverParameters::pcgTranslator(const PCGKernel value) { +// std::string s; +// switch (value) { +// case PCGSolverParameters::PCG: s = "PCG"; break; +// case PCGSolverParameters::LSPCG: s = "LSPCG"; break; +// case PCGSolverParameters::SPCG: s = "SPCG"; break; +// default: s = "UNDEFINED"; break; +// } +// return s; +//} +// +///*****************************************************************************/ +//PCGSolverParameters::PCGKernel PCGSolverParameters::pcgTranslator(const std::string &src) { +// std::string s = src; boost::algorithm::to_upper(s); +// if (s == "PCG") return PCGSolverParameters::PCG; +// if (s == "LSPCG") return PCGSolverParameters::LSPCG; +// if (s == "SPCG") return PCGSolverParameters::SPCG; +// +// /* default is LSPCG */ +// return PCGSolverParameters::LSPCG; +//} +// +///*****************************************************************************/ +//std::string PCGSolverParameters::blasTranslator(const BLASKernel value) { +// std::string s; +// switch (value) { +// case PCGSolverParameters::GTSAM: s = "GTSAM" ; break; +// case PCGSolverParameters::SBM: s = "SBM" ; break; +// case PCGSolverParameters::SBM_MT: s = "SBM-MT" ; break; +// default: s = "UNDEFINED" ; break; +// } +// return s; +//} +// +///*****************************************************************************/ +//PCGSolverParameters::BLASKernel PCGSolverParameters::blasTranslator(const std::string &src) { +// std::string s = src; boost::algorithm::to_upper(s); +// if (s == "GTSAM") return PCGSolverParameters::GTSAM; +// if (s == "SBM") return PCGSolverParameters::SBM; +// if (s == "SBM-MT") return PCGSolverParameters::SBM_MT; +// +// /* default is SBM */ +// return PCGSolverParameters::SBM; +//} +// +///*****************************************************************************/ +//std::string PCGSolverParameters::rbTranslator(const RegisterBlockKernel k) { +// std::string s; +// switch (k) { +// case PCGSolverParameters::RB_NONE: s = "RB_NONE" ; break; +// case PCGSolverParameters::SSE_RB22: s = "SSE_RB22" ; break; +// case PCGSolverParameters::SSE_RB44: s = "SSE_RB44" ; break; +// case PCGSolverParameters::AVX_RB44: s = "AVX_RB44" ; break; +// default: s = "UNDEFINED" ; break; +// } +// return s; +//} +// +///*****************************************************************************/ +//PCGSolverParameters::RegisterBlockKernel PCGSolverParameters::rbTranslator(const std::string &src) { +// std::string s = src; boost::algorithm::to_upper(s); +// if (s == "") return PCGSolverParameters::RB_NONE; +// if (s == "SSE_RB22") return PCGSolverParameters::SSE_RB22; +// if (s == "SSE_RB44") return PCGSolverParameters::SSE_RB44; +// if (s == "AVX_RB44") return PCGSolverParameters::AVX_RB44; +// +// /* default is SBM */ +// return PCGSolverParameters::RB_NONE; +//} +// +///*****************************************************************************/ +//PCGSolver::PCGSolver(const PCGSolverParameters &p) : Base(), parameters_(p), built_(false) { +// preconditioner_ = createPreconditioner(p.preconditioner_); +//} +// +///*****************************************************************************/ +//void PCGSolver::replaceFactors( +// const Values &linearization_point, +// const GaussianFactorGraph &gfg, +// const double lambda) +//{ +// const JacobianFactorGraph jfg(gfg); +// +// /* prepare the column structure */ +// if ( keyInfo_.size() == 0 ) { +// keyInfo_ = KeyInfo(jfg, *orderingNatural(jfg)); +// } +// +// /* implemented by subclass */ +// safe_tic_(); +// convertKernel(jfg, lambda); +// safe_toc_("convert-kernel"); +// +// /* update the preconditioner as well */ +// preconditioner_->replaceFactors(jfg, lambda); +// built_ = false; +//} +// +///*****************************************************************************/ +//void PCGSolver::buildPreconditioner() { +// if ( built_ == false ) { +// safe_tic_(); +// preconditioner_->buildPreconditioner(); +// built_ = true; +// safe_toc_("factorize"); +// } +//} +// +///*****************************************************************************/ +//VectorValues PCGSolver::optimize() { +// +// buildPreconditioner(); +// +// VectorValues zero; +// BOOST_FOREACH ( const KeyInfo::value_type &item, keyInfo_ ) { +// zero.insert(item.first, Vector::Zero(item.second.dim())); +// } +// +// return optimize(zero); +//} +// +///*****************************************************************************/ +//VectorValues PCGSolver::optimize(const VectorValues &initial) { +// safe_tic_(); +// VectorValues result = optimize_(initial); +// safe_toc_("pcg"); +// return result ; +//} +// +///*****************************************************************************/ +////void PCGSolver_FG::convertKernel(const JacobianFactorGraph &jfg, const double lambda) { +//// hfg_ = buildOuterProductJacobianFactorGraph(jfg, VariableIndex(jfg), *x0_, lambda); +////} +//// +////VectorValues PCGSolver_FG::optimize_(const VectorValues &initial) { +//// System system(hfg_, preconditioner_); +//// return preconditionedConjugateGradient(system, initial, parameters_); +////} +// +///*****************************************************************************/ +//void PCGSolver_SBM::convertKernel(const JacobianFactorGraph &jfg, const double lambda) { +// linear_ = buildSparseLinearSystem(jfg, keyInfo_, true /* AtA */, lambda, false /* column major */, +// parameters_.blas_kernel_, parameters_.rb_kernel_); +//} +// +///*****************************************************************************/ +//VectorValues PCGSolver_SBM::optimize_(const VectorValues &initial) { +// System system(linear_, preconditioner_); +// Vector solution = preconditionedConjugateGradient( +// system, initial.vector(keyInfo_.ordering()), parameters_); +// return buildVectorValues(solution, keyInfo_); +//} +// +///*****************************************************************************/ +//ydjian::SparseLinearSystem::shared_ptr +//buildSparseLinearSystem(const JacobianFactorGraph &jfg, const KeyInfo &keyInfo, +// const bool AtA, const double lambda, const bool colMajor, +// const PCGSolverParameters::BLASKernel blas, const PCGSolverParameters::RegisterBlockKernel rbSrc) { +// +// std::map rbMap; +// rbMap[PCGSolverParameters::RB_NONE] = ydjian::SBM::NAIVE; +// rbMap[PCGSolverParameters::SSE_RB22] = ydjian::SBM::SSE_RB22; +// rbMap[PCGSolverParameters::SSE_RB44] = ydjian::SBM::SSE_RB44; +// rbMap[PCGSolverParameters::AVX_RB44] = ydjian::SBM::AVX_RB44; +// +// ydjian::SBM::Type rb; +// SC_ASSERT( rbMap.find(rbSrc) != rbMap.end(), "buildSparseLinearSystem: rbSrc is not supported.."); +// rb = rbMap[rbSrc]; +// +// ydjian::SparseLinearSystem::shared_ptr linear; +// +// switch (blas) { +// +// case PCGSolverParameters::SBM: +// linear = gsp2::buildSparseLinearSystem(jfg, keyInfo, AtA, lambda, colMajor, false /* multithread */, rb); +// break; +// +// case PCGSolverParameters::SBM_MT: +// linear = gsp2::buildSparseLinearSystem(jfg, keyInfo, AtA, lambda, colMajor, true /* multithread */, rb); +// break; +// +// default: +// throw std::invalid_argument("createSparseLinearSystem: unsupported blas kernel"); +// break; +// } +// +// return linear; +//} +// +///*****************************************************************************/ +//PCGSolver::shared_ptr createPCGSolver(const PCGSolverParameters ¶meters){ +// +// PCGSolver::shared_ptr solver; +// switch ( parameters.pcg_kernel_ ) { +// case PCGSolverParameters::PCG: +// switch ( parameters.blas_kernel_ ) { +// +//// case PCGSolverParameters::GTSAM: +//// solver.reset(new PCGSolver_FG(parameters)); +//// break; +// +// case PCGSolverParameters::SBM: +// case PCGSolverParameters::SBM_MT: +// solver.reset(new PCGSolver_SBM(parameters)); +// break; +// +// default: +// throw std::invalid_argument("createPCGSolver: undefined blas_kernel for pcgsolver"); +// break; +// } +// break; +// +// case PCGSolverParameters::LSPCG: +// switch ( parameters.blas_kernel_ ) { +// case PCGSolverParameters::GTSAM: +// solver.reset(new LSPCGSolver_FG(parameters)); +// break; +// +// case PCGSolverParameters::SBM: +// case PCGSolverParameters::SBM_MT: +// solver.reset(new LSPCGSolver_SBM(parameters)); +// break; +// +// default: +// throw std::invalid_argument("createPCGSolver: undefined blas_kernel for lspcg solver"); +// break; +// } +// break; +// +// default: +// throw std::invalid_argument("createPCGSolver: undefined pcg_kernel"); +// break; +// } +// return solver; +//} +} diff --git a/gtsam/linear/PCGSolver.h b/gtsam/linear/PCGSolver.h new file mode 100644 index 000000000..034352c50 --- /dev/null +++ b/gtsam/linear/PCGSolver.h @@ -0,0 +1,236 @@ +/* + * PCGSolver.h + * + * Created on: Jan 31, 2012 + * Author: Yong-Dian Jian + */ + +#pragma once + +#include +#include +#include +#include + +#include +#include +#include +#include + +namespace gtsam { + +class GaussianFactorGraph; +class KeyInfo; +class Preconditioner; +class PreconditionerParameters; + +/*****************************************************************************/ +struct PCGSolverParameters: public ConjugateGradientParameters { +public: + typedef ConjugateGradientParameters Base; + typedef boost::shared_ptr shared_ptr; + + PCGSolverParameters() {} + + virtual void print(std::ostream &os) const; + + /* interface to preconditioner parameters */ + inline const PreconditionerParameters& preconditioner() const { + return *preconditioner_; + } + + boost::shared_ptr preconditioner_; +}; + +/*****************************************************************************/ +/* A virtual base class for the preconditioned conjugate gradient solver */ +class PCGSolver: public IterativeSolver { +public: + typedef IterativeSolver Base; + typedef boost::shared_ptr shared_ptr; + +protected: + + PCGSolverParameters parameters_; + boost::shared_ptr preconditioner_; + +// /* cached local variables */ +// KeyInfo keyInfo_; +// +// /* whether the preconditioner has be built */ +// bool built_ ; + +public: + /* Interface to initialize a solver without a problem */ + PCGSolver(const PCGSolverParameters &p); + virtual ~PCGSolver() {} + +// /* update interface to the nonlinear optimizer */ +// virtual void replaceFactors( +// const GaussianFactorGraph &factorGraph, +// const Values &linearization_point, +// const std::map lambda); + +// /* build the preconditioner */ +// void buildPreconditioner(); + +// /* interface for the NonlinearOptimizer, with initial estimate equals to a zero vector */ +// virtual VectorValues optimize() ; +// +// /* interface for the NonlinearOptimizer, with a custom initial estimate*/ +// virtual VectorValues optimize(const VectorValues &initial); + + using IterativeSolver::optimize; + + virtual VectorValues optimize(const GaussianFactorGraph &gfg, + const KeyInfo &keyInfo, const std::map &lambda, + const VectorValues &initial); + +//protected: + +// /* convert jacobian factor graph to solver-specific kernel */ +// virtual void convertKernel(const JacobianFactorGraph &jfg, const double lambda = 0.0) = 0 ; +// +// /* do the actual optimization */ +// virtual VectorValues optimize_(const VectorValues &initial) = 0; + +}; + +/*****************************************************************************/ +class GaussianFactorGraphSystem { +public: + + GaussianFactorGraphSystem(const GaussianFactorGraph &gfg, + const Preconditioner &preconditioner, const KeyInfo &info, + const std::map &lambda); + + const GaussianFactorGraph &gfg_; + const Preconditioner &preconditioner_; + const KeyInfo &keyInfo_; + const std::map &lambda_; + + void residual(const Vector &x, Vector &r) const; + void multiply(const Vector &x, Vector& y) const; + void leftPrecondition(const Vector &x, Vector &y) const; + void rightPrecondition(const Vector &x, Vector &y) const; + inline void scal(const double alpha, Vector &x) const { + x *= alpha; + } + inline double dot(const Vector &x, const Vector &y) const { + return x.dot(y); + } + inline void axpy(const double alpha, const Vector &x, Vector &y) const { + y += alpha * x; + } + + void getb(Vector &b) const; +}; + +/* utility functions */ +/**********************************************************************************/ +VectorValues buildVectorValues(const Vector &v, const Ordering &ordering, + const std::map & dimensions); + +/**********************************************************************************/ +VectorValues buildVectorValues(const Vector &v, const KeyInfo &keyInfo); + +///*****************************************************************************/ +///* an specialization of the PCGSolver using gtsam's factor graph and linear algebra facility */ +//class PCGSolver_FG : public PCGSolver { +// +//public: +// typedef PCGSolver Base; +// typedef boost::shared_ptr shared_ptr ; +// +//protected: +// JacobianFactorGraph::shared_ptr hfg_; // A'*A + lambda*I +// +//public: +// PCGSolver_FG(const Parameters ¶meters) : Base(parameters) {} +// virtual ~PCGSolver_FG() {} +// +//protected: +// virtual void convertKernel(const JacobianFactorGraph &jfg, const double lambda = 0.0); +// virtual VectorValues optimize_(const VectorValues &initial); +// +// /* interface to the preconditionedConjugateGradient function template */ +// class System { +// public: +// typedef Vector State; +// typedef Vector Residual; +// +// protected: +// const JacobianFactorGraph::shared_ptr hfg_; +// const Preconditioner::shared_ptr preconditioner_; +// const KeyInfo &keyInfo_; +// const RowInfo &rowInfo_; +// +// public: +// System(const JacobianFactorGraph::shared_ptr hfg, +// const Preconditioner::shared_ptr preconditioner, +// const KeyInfo &keyInfo, const RowInfo &rowInfo); +// +// inline void residual(const State &x, State &r) const { gtsam::residual(*hfg_, x, r); } +// inline void multiply(const State &x, State& y) const { gtsam::multiply(*hfg_, x, y); } +// inline void leftPrecondition(const State &x, State &y) const { preconditioner_->transposeSolve(x, y); } +// inline void rightPrecondition(const State &x, State &y) const { preconditioner_->solve(x, y); } +// inline void scal(const double alpha, State &x) const { x.vector() *= alpha; } +// inline double dot(const State &x, const State &y) const { return x.vector().dot(y.vector()); } +// inline void axpy(const double alpha, const State &x, State &y) const { y.vector() += alpha*x.vector(); } +// } ; +//}; +// +///*****************************************************************************/ +///** an specialization of the PCGSolver using sbm and customized blas kernel **/ +//class PCGSolver_SBM : public PCGSolver { +// +//public: +// typedef PCGSolver Base; +// typedef boost::shared_ptr shared_ptr ; +// +//protected: +// ydjian::SparseLinearSystem::shared_ptr linear_; +// +//public: +// PCGSolver_SBM(const Parameters ¶meters) : Base(parameters) {} +// virtual ~PCGSolver_SBM() {} +// +//protected: +// /* virtual function of the PCGSolver */ +// virtual void convertKernel(const JacobianFactorGraph &jfg, const double lambda = 0.0); +// virtual VectorValues optimize_(const VectorValues &initial); +// +// /* interface to the preconditionedConjugateGradient function template */ +// class System { +// +// public: +// typedef Vector State; +// +// protected: +// const ydjian::SparseLinearSystem::shared_ptr linear_; +// const Preconditioner::shared_ptr preconditioner_; +// +// public: +// System(const ydjian::SparseLinearSystem::shared_ptr linear, const Preconditioner::shared_ptr preconditioner) +// : linear_(linear), preconditioner_(preconditioner) {} +// inline void residual(const State &x, State &r) const { linear_->residual(x.data(), r.data(), false); } +// inline void multiply(const State &x, State& y) const { linear_->matrix()->multiply(x.data(), y.data(), false); } +// inline void leftPrecondition(const State &x, State &y) const { preconditioner_->transposeSolve(x, y); } +// inline void rightPrecondition(const State &x, State &y) const { preconditioner_->solve(x, y); } +// inline void scal(const double alpha, State &x) const { x *= alpha; } +// inline double dot(const State &x, const State &y) const { return x.dot(y); } +// inline void axpy(const double alpha, const State &x, State &y) const { y += alpha*x; } +// } ; +//}; +// +///* a factory method to instantiate PCGSolvers and its derivatives */ +//PCGSolver::shared_ptr createPCGSolver(const PCGParameters ¶meters); +// +///* an utility function to create sparse linear system, for all cg solvers using the sbm kernel */ +//ydjian::SparseLinearSystem::shared_ptr +//buildSparseLinearSystem(const JacobianFactorGraph &jfg, const KeyInfo &keyInfo, const bool AtA, +// const double lambda, const bool colMajor, const PCGParameters::BLASKernel blas, +// const PCGParameters::RegisterBlockKernel rb); + +} + diff --git a/gtsam/linear/Preconditioner.cpp b/gtsam/linear/Preconditioner.cpp new file mode 100644 index 000000000..c233c431b --- /dev/null +++ b/gtsam/linear/Preconditioner.cpp @@ -0,0 +1,212 @@ +/* + * Preconditioner.cpp + * + * Created on: Feb 2, 2012 + * Author: ydjian + */ + + +//#include +#include +//#include +#include +#include +//#include +//#include +//#include +#include +//#include +//#include +//#include +//#include +//#include +//#include +#include +#include +#include +#include + +using namespace std; + +namespace gtsam { + + +/*****************************************************************************/ +void PreconditionerParameters::print() const { + print(cout); +} + +/***************************************************************************************/ +void PreconditionerParameters::print(ostream &os) const { + os << "PreconditionerParameters" << endl + << "kernel: " << kernelTranslator(kernel_) << endl + << "verbosity: " << verbosityTranslator(verbosity_) << endl; +} + +/*****************************************************************************/ + ostream& operator<<(ostream &os, const PreconditionerParameters &p) { + p.print(os); + return os; +} + +/***************************************************************************************/ +PreconditionerParameters::Kernel PreconditionerParameters::kernelTranslator(const std::string &src) { + std::string s = src; boost::algorithm::to_upper(s); + if (s == "GTSAM") return PreconditionerParameters::GTSAM; + else if (s == "CHOLMOD") return PreconditionerParameters::CHOLMOD; + /* default is cholmod */ + else return PreconditionerParameters::CHOLMOD; +} + +/***************************************************************************************/ +PreconditionerParameters::Verbosity PreconditionerParameters::verbosityTranslator(const std::string &src) { + std::string s = src; boost::algorithm::to_upper(s); + if (s == "SILENT") return PreconditionerParameters::SILENT; + else if (s == "COMPLEXITY") return PreconditionerParameters::COMPLEXITY; + else if (s == "ERROR") return PreconditionerParameters::ERROR; + /* default is default */ + else return PreconditionerParameters::SILENT; +} + +/***************************************************************************************/ +std::string PreconditionerParameters::kernelTranslator(PreconditionerParameters::Kernel k) { + if ( k == GTSAM ) return "GTSAM"; + if ( k == CHOLMOD ) return "CHOLMOD"; + else return "UNKNOWN"; +} + +/***************************************************************************************/ +std::string PreconditionerParameters::verbosityTranslator(PreconditionerParameters::Verbosity verbosity) { + if (verbosity == SILENT) return "SILENT"; + else if (verbosity == COMPLEXITY) return "COMPLEXITY"; + else if (verbosity == ERROR) return "ERROR"; + else return "UNKNOWN"; +} + +///***************************************************************************************/ +//void Preconditioner::replaceFactors(const JacobianFactorGraph &jfg, const double lambda) +//{ +// const Parameters &p = *parameters_; +// +// if ( keyInfo_.size() == 0 ) { +// keyInfo_ = KeyInfo(jfg, *orderingNatural(jfg)); +// } +// +// if ( p.verbosity() >= Parameters::COMPLEXITY ) +// cout << "Preconditioner::replaceFactors with a jfg of " << jfg.size() << " factors."<< endl; +//} +// +/***************************************************************************************/ +boost::shared_ptr createPreconditioner(const boost::shared_ptr parameters) { + + DummyPreconditionerParameters::shared_ptr dummy = boost::dynamic_pointer_cast(parameters); + if ( dummy ) { + return boost::make_shared(); + } + +// BlockJacobiPreconditioner::Parameters::shared_ptr jacobi = boost::dynamic_pointer_cast(parameters); +// if ( jacobi ) { +// BlockJacobiPreconditioner::shared_ptr p(new BlockJacobiPreconditioner(jacobi)); +// return p; +// } +// +// MIQRPreconditioner::Parameters::shared_ptr miqr = boost::dynamic_pointer_cast(parameters); +// if ( miqr ) { +// MIQRPreconditioner::shared_ptr p(new MIQRPreconditioner(miqr)); +// return p; +// } +// +// CombinatorialPreconditioner::Parameters::shared_ptr combinatorial +// = boost::dynamic_pointer_cast(parameters); +// if ( combinatorial ) { +// return createCombinatorialPreconditioner(combinatorial); +// } + + throw invalid_argument("createPreconditioner: unexpected preconditioner parameter type"); +} +// +///***************************************************************************************/ +//JacobianFactorGraph::shared_ptr +//buildFactorSubgraph(const JacobianFactorGraph &jfg, const KeyInfo &keyInfo, +// const Subgraph &subgraph, const int hessian, const double lambda, const bool clone, const bool includeUnary){ +// +// const Subgraph::Edges &edges = subgraph.edges(); +// const size_t m = jfg.size(), n = keyInfo.size(); +// const bool augment = (lambda!=0.0) ? true : false ; +// +// /* produce an edge weight table */ +// std::vector weights(m, 0.0); +// BOOST_FOREACH ( const Subgraph::Edge &edge, edges ) { +// weights[edge.index()] = edge.weight(); +// } +// +// /* collect the number of dangling unary factors */ +// /* upper bound of the factors */ +// size_t extraUnary = 0; +// if ( includeUnary ) { +// for ( Index i = 0 ; i < m ; ++i ) { +// if ( weights[i] == 0.0 && jfg[i]->size() == 1 ) { +// weights[i] = 1.0; +// ++extraUnary; +// } +// } +// } +// +// const size_t e = edges.size() + extraUnary; +// const size_t sz = e + (augment ? n : 0) + (hessian ? 2*(m-e) : 0); +// +// JacobianFactorGraph::shared_ptr target(new JacobianFactorGraph()); +// target->reserve(sz); +// +// /* auxiliary variable */ +// size_t r = jfg[0]->rows(); +// SharedDiagonal sigma(noiseModel::Unit::Create(r)); +// +// for ( Index i = 0 ; i < m ; ++i ) { +// +// const double w = weights[i]; +// +// if ( w != 0.0 ) { /* subgraph edge */ +// if ( !clone && w == 1.0 ) { +// target->push_back(jfg[i]); +// } +// else { +// JacobianFactor::shared_ptr jf (new JacobianFactor(*jfg[i])); +// scaleJacobianFactor(*jf, w); +// target->push_back(jf); +// } +// } +// else { /* non-subgraph edge */ +// if ( hessian ) { +// const JacobianFactor &f = *jfg[i]; +// /* simple case: unary factor */ +// if ( f.size() == 1 ) { +// if (!clone) target->push_back(jfg[i]); +// else { +// JacobianFactor::shared_ptr jf(new JacobianFactor(*jfg[i])); +// target->push_back(jf); +// } +// } +// else { /* general factor */ +// const size_t rj = f.rows(); +// if ( rj != r ) { +// r = rj; sigma = noiseModel::Unit::Create(r); +// } +// for ( JacobianFactor::const_iterator j = f.begin() ; j != f.end() ; ++j ) { +// JacobianFactor::shared_ptr jf(new JacobianFactor(*j, f.getA(j), Vector::Zero(r,1), sigma)); +// target->push_back(jf); +// } /* for */ +// } /* factor arity */ +// } /* hessian */ +// } /* if w != 0.0 */ +// } /* for */ +// +// if ( !augment ) return target ; +// +// return appendPriorFactors(*target, keyInfo, lambda, false /* clone */); +//} + + +} + + diff --git a/gtsam/linear/Preconditioner.h b/gtsam/linear/Preconditioner.h new file mode 100644 index 000000000..5ca5ec70b --- /dev/null +++ b/gtsam/linear/Preconditioner.h @@ -0,0 +1,173 @@ +/* + * Preconditioner.h + * + * Created on: Feb 1, 2012 + * Author: ydjian + */ + +#pragma once + +#include +#include +#include +#include +#include + +namespace gtsam { + +class GaussianFactorGraph; +class KeyInfo; +class VectorValues; +//class Subgraph; + +/* parameters for the preconditioner */ +struct PreconditionerParameters { + + typedef boost::shared_ptr shared_ptr; + + enum Kernel { /* Preconditioner Kernel */ + GTSAM = 0, + CHOLMOD /* experimental */ + } kernel_ ; + + enum Verbosity { + SILENT = 0, + COMPLEXITY = 1, + ERROR = 2 + } verbosity_ ; + + PreconditionerParameters(): kernel_(GTSAM), verbosity_(SILENT) {} + PreconditionerParameters(const PreconditionerParameters &p) : kernel_(p.kernel_), verbosity_(p.verbosity_) {} + virtual ~PreconditionerParameters() {} + + /* general interface */ + inline Kernel kernel() const { return kernel_; } + inline Verbosity verbosity() const { return verbosity_; } + + void print() const ; + + virtual void print(std::ostream &os) const ; + + static Kernel kernelTranslator(const std::string &s); + static Verbosity verbosityTranslator(const std::string &s); + static std::string kernelTranslator(Kernel k); + static std::string verbosityTranslator(Verbosity v); + + /* for serialization */ + friend std::ostream& operator<<(std::ostream &os, const PreconditionerParameters &p); + + }; + + +/* PCG aims to solve the problem: A x = b by reparametrizing it as + * S^t A S y = S^t b or M A x = M b, where A \approx S S, or A \approx M + * The goal of this class is to provide a general interface to all preconditioners */ +class Preconditioner { +public: + typedef boost::shared_ptr shared_ptr; + typedef std::vector Dimensions; + + /* Generic Constructor and Destructor */ + Preconditioner() {} + virtual ~Preconditioner() {} + + /* Computation Interfaces */ + + /* implement x = S^{-1} y */ + virtual void solve(const Vector& y, Vector &x) const = 0; + virtual void solve(const VectorValues& y, VectorValues &x) const = 0; + + /* implement x = S^{-T} y */ + virtual void transposeSolve(const Vector& y, Vector& x) const = 0; + virtual void transposeSolve(const VectorValues& y, VectorValues &x) const = 0; + + /* implement x = S^{-1} S^{-T} y */ + virtual void fullSolve(const Vector& y, Vector &x) const = 0; + virtual void fullSolve(const VectorValues& y, VectorValues &x) const = 0; + + /* build/factorize the preconditioner */ + virtual void build( + const GaussianFactorGraph &gfg, + const KeyInfo &info, + const std::map &lambda + ) = 0; + +// /* complexity index */ +// virtual size_t complexity() const = 0; +// +// /* is the preconditioner dependent to data */ +// virtual bool isStatic() const = 0; +// +// /* is the preconditioner kind of spanning subgraph preconditioner */ +// virtual bool isSubgraph() const = 0; +// +// /* return A\b */ +// virtual void xstar(Vector &result) const = 0 ; +// +//protected: +// Parameters::shared_ptr parameters_; +// KeyInfo keyInfo_; + +}; + +/*******************************************************************************************/ +struct DummyPreconditionerParameters : public PreconditionerParameters { + typedef PreconditionerParameters Base; + typedef boost::shared_ptr shared_ptr; + DummyPreconditionerParameters() : Base() {} + virtual ~DummyPreconditionerParameters() {} +}; + +/*******************************************************************************************/ +class DummyPreconditioner : public Preconditioner { +public: + typedef Preconditioner Base; + typedef boost::shared_ptr shared_ptr; + +public: + + DummyPreconditioner() : Base() {} + virtual ~DummyPreconditioner() {} + + /* Computation Interfaces for raw vector */ + virtual void solve(const Vector& y, Vector &x) const { x = y; } + virtual void solve(const VectorValues& y, VectorValues& x) const { x = y; } + + virtual void transposeSolve(const Vector& y, Vector& x) const { x = y; } + virtual void transposeSolve(const VectorValues& y, VectorValues& x) const { x = y; } + + virtual void fullSolve(const Vector& y, Vector &x) const { x = y; } + virtual void fullSolve(const VectorValues& y, VectorValues& x) const { x = y; } + + virtual void build( + const GaussianFactorGraph &gfg, + const KeyInfo &info, + const std::map &lambda + ) {} + +// virtual void replaceFactors(const JacobianFactorGraph &jfg, const double lambda = 0.0) { +// Base::replaceFactors(jfg,lambda); +// } +// virtual void buildPreconditioner() {} +// virtual size_t complexity() const { return 0; } +// virtual bool isStatic() const { return true; } +// virtual bool isSubgraph() const { return false; } +// virtual void xstar(Vector &result) const {} +}; + +/* factory method to create preconditioners */ +boost::shared_ptr createPreconditioner(const boost::shared_ptr parameters); + +///* build a factor subgraph, which is defined as a set of weighted edges (factors). +// * "hessian"={0,1} indicates whether the non-subgraph edges are split into multiple unary factors +// * "lambda" indicates whether scaled identity matrix is augmented +// * "clone" indicates whether factors are cloned into the subgraph if the edge has weight = 1.0 +// * "includeUnary" indicates whether the unary factors are forced to join the subgraph: +// * useful for static subgraph preconditioner, because unary factors were imposed dynamically by LM */ +//boost::shared_ptr +//buildFactorSubgraph(const JacobianFactorGraph &jfg, const KeyInfo &keyInfo, +// const Subgraph &subgraph, const int hessian, const double lambda, const bool clone, const bool includeUnary); + +} + + diff --git a/gtsam/linear/SubgraphSolver.h b/gtsam/linear/SubgraphSolver.h index f6557a2c2..a66182281 100644 --- a/gtsam/linear/SubgraphSolver.h +++ b/gtsam/linear/SubgraphSolver.h @@ -15,8 +15,8 @@ #include #include #include - #include +#include namespace gtsam { @@ -28,7 +28,7 @@ class GTSAM_EXPORT SubgraphSolverParameters : public ConjugateGradientParameters public: typedef ConjugateGradientParameters Base; SubgraphSolverParameters() : Base() {} - virtual void print() const { Base::print(); } + virtual void print(std::ostream &os) const { Base::print(os); } }; /** @@ -77,8 +77,17 @@ public: SubgraphSolver(const boost::shared_ptr &Rc1, const boost::shared_ptr &Ab2, const Parameters ¶meters, const Ordering& ordering); virtual ~SubgraphSolver() {} - virtual VectorValues optimize () ; - virtual VectorValues optimize (const VectorValues &initial) ; + + VectorValues optimize () ; + VectorValues optimize (const VectorValues &initial) ; + + /* interface to the nonlinear optimizer that the subclasses have to implement */ + virtual VectorValues optimize ( + const GaussianFactorGraph &gfg, + const KeyInfo &keyInfo, + const std::map &lambda, + const VectorValues &initial + ) { return VectorValues(); } protected: diff --git a/gtsam/nonlinear/NonlinearOptimizer.cpp b/gtsam/nonlinear/NonlinearOptimizer.cpp index 678cac6b4..2a6eb5887 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.cpp +++ b/gtsam/nonlinear/NonlinearOptimizer.cpp @@ -21,6 +21,7 @@ #include #include #include +#include #include #include @@ -100,19 +101,18 @@ VectorValues NonlinearOptimizer::solve(const GaussianFactorGraph &gfg, if (params.isMultifrontal()) { delta = gfg.optimize(*params.ordering, params.getEliminationFunction()); } else if (params.isSequential()) { - delta = gfg.eliminateSequential(*params.ordering, - params.getEliminationFunction())->optimize(); + delta = gfg.eliminateSequential(*params.ordering, params.getEliminationFunction())->optimize(); } else if (params.isIterative()) { if (!params.iterativeParams) - throw std::runtime_error( - "NonlinearOptimizer::solve: cg parameter has to be assigned ..."); - if (boost::dynamic_pointer_cast( - params.iterativeParams)) { - SubgraphSolver solver(gfg, - dynamic_cast(*params.iterativeParams), - *params.ordering); - delta = solver.optimize(); - } else { + throw std::runtime_error("NonlinearOptimizer::solve: cg parameter has to be assigned ..."); + + if (boost::shared_ptr pcg = boost::dynamic_pointer_cast(params.iterativeParams) ) { + delta = PCGSolver(*pcg).optimize(gfg); + } + else if (boost::shared_ptr spcg = boost::dynamic_pointer_cast(params.iterativeParams) ) { + delta = SubgraphSolver(gfg, *spcg, *params.ordering).optimize(); + } + else { throw std::runtime_error( "NonlinearOptimizer::solve: special cg parameter type is not handled in LM solver ..."); } diff --git a/tests/testPCGSolver.cpp b/tests/testPCGSolver.cpp new file mode 100644 index 000000000..ea9b5ec6c --- /dev/null +++ b/tests/testPCGSolver.cpp @@ -0,0 +1,77 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testPCGSolver.cpp + * @brief Unit tests for PCGSolver class + * @author Yong-Dian Jian + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include +#include // for operator += +using namespace boost::assign; + +#include +#include + +using namespace std; +using namespace gtsam; + +const double tol = 1e-3; + +using symbol_shorthand::X; +using symbol_shorthand::L; + +/* ************************************************************************* */ +TEST( NonlinearOptimizer, optimization_method ) +{ + LevenbergMarquardtParams paramsPCG; + paramsPCG.linearSolverType = LevenbergMarquardtParams::Iterative; + PCGSolverParameters::shared_ptr pcg = boost::make_shared(); + pcg->preconditioner_ = boost::make_shared(); + paramsPCG.iterativeParams = pcg; + + NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph(); + + Point2 x0(10,10); + Values c0; + c0.insert(X(1), x0); + + Values actualPCG = LevenbergMarquardtOptimizer(fg, c0, paramsPCG).optimize(); + + DOUBLES_EQUAL(0,fg.error(actualPCG),tol); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} + From 67398f0f13eda8d4b4c89b17bb7c070df02dd82c Mon Sep 17 00:00:00 2001 From: Yong-Dian Jian Date: Sun, 8 Jun 2014 16:15:00 -0400 Subject: [PATCH 4/5] add BlockJacobiPreconditioner class and unit test --- gtsam/linear/Preconditioner.cpp | 142 ++++++++++++++++++++++++++++---- gtsam/linear/Preconditioner.h | 81 ++++++++++-------- tests/testPCGSolver.cpp | 62 +++++++++++++- 3 files changed, 233 insertions(+), 52 deletions(-) diff --git a/gtsam/linear/Preconditioner.cpp b/gtsam/linear/Preconditioner.cpp index c233c431b..42ea56e68 100644 --- a/gtsam/linear/Preconditioner.cpp +++ b/gtsam/linear/Preconditioner.cpp @@ -9,6 +9,7 @@ //#include #include //#include +#include #include #include //#include @@ -83,26 +84,137 @@ std::string PreconditionerParameters::verbosityTranslator(PreconditionerParamete else return "UNKNOWN"; } -///***************************************************************************************/ -//void Preconditioner::replaceFactors(const JacobianFactorGraph &jfg, const double lambda) -//{ -// const Parameters &p = *parameters_; -// -// if ( keyInfo_.size() == 0 ) { -// keyInfo_ = KeyInfo(jfg, *orderingNatural(jfg)); -// } -// -// if ( p.verbosity() >= Parameters::COMPLEXITY ) -// cout << "Preconditioner::replaceFactors with a jfg of " << jfg.size() << " factors."<< endl; -//} -// +/***************************************************************************************/ +BlockJacobiPreconditioner::BlockJacobiPreconditioner() + : Base(), buffer_(0), bufferSize_(0), nnz_(0) {} + +/***************************************************************************************/ +BlockJacobiPreconditioner::~BlockJacobiPreconditioner() { clean(); } + +/***************************************************************************************/ +void BlockJacobiPreconditioner::solve(const Vector& y, Vector &x) const { + + const size_t n = dims_.size(); + double *ptr = buffer_, *dst = x.data(); + + std::copy(y.data(), y.data() + y.rows(), x.data()); + + for ( size_t i = 0 ; i < n ; ++i ) { + const size_t d = dims_[i]; + const size_t sz = d*d; + + const Eigen::Map R(ptr, d, d); + Eigen::Map b(dst, d, 1); + R.triangularView().solveInPlace(b); + + dst += d; + ptr += sz; + } +} + +/***************************************************************************************/ +void BlockJacobiPreconditioner::transposeSolve(const Vector& y, Vector& x) const { + const size_t n = dims_.size(); + double *ptr = buffer_, *dst = x.data(); + + std::copy(y.data(), y.data() + y.rows(), x.data()); + + for ( size_t i = 0 ; i < n ; ++i ) { + const size_t d = dims_[i]; + const size_t sz = d*d; + + const Eigen::Map R(ptr, d, d); + Eigen::Map b(dst, d, 1); + R.transpose().triangularView().solveInPlace(b); + + dst += d; + ptr += sz; + } +} + +/***************************************************************************************/ +void BlockJacobiPreconditioner::build( + const GaussianFactorGraph &gfg, const KeyInfo &keyInfo, const std::map &lambda) +{ + const size_t n = keyInfo.size(); + dims_ = keyInfo.colSpec(); + + /* prepare the buffer of block diagonals */ + std::vector blocks; blocks.reserve(n); + + /* allocate memory for the factorization of block diagonals */ + size_t nnz = 0; + for ( size_t i = 0 ; i < n ; ++i ) { + const size_t dim = dims_[i]; + blocks.push_back(Matrix::Zero(dim, dim)); + // nnz += (((dim)*(dim+1)) >> 1); // d*(d+1) / 2 ; + nnz += dim*dim; + } + + /* compute the block diagonal by scanning over the factors */ + BOOST_FOREACH ( const GaussianFactor::shared_ptr &gf, gfg ) { + if ( JacobianFactor::shared_ptr jf = boost::dynamic_pointer_cast(gf) ) { + for ( JacobianFactor::const_iterator it = jf->begin() ; it != jf->end() ; ++it ) { + const KeyInfoEntry &entry = keyInfo.find(*it)->second; + const Matrix &Ai = jf->getA(it); + blocks[entry.index()] += (Ai.transpose() * Ai); + } + } + else if ( HessianFactor::shared_ptr hf = boost::dynamic_pointer_cast(gf) ) { + for ( HessianFactor::const_iterator it = hf->begin() ; it != hf->end() ; ++it ) { + const KeyInfoEntry &entry = keyInfo.find(*it)->second; + const Matrix &Hii = hf->info(it, it).selfadjointView(); + blocks[entry.index()] += Hii; + } + } + else { + throw invalid_argument("BlockJacobiPreconditioner::build gfg contains a factor that is neither a JacobianFactor nor a HessianFactor."); + } + } + + /* if necessary, allocating the memory for cacheing the factorization results */ + if ( nnz > bufferSize_ ) { + clean(); + buffer_ = new double [nnz]; + bufferSize_ = nnz; + } + nnz_ = nnz; + + /* factorizing the blocks respectively */ + double *ptr = buffer_; + for ( size_t i = 0 ; i < n ; ++i ) { + /* use eigen to decompose Di */ + const Matrix R = blocks[i].llt().matrixL().transpose(); + + /* store the data in the buffer */ + size_t sz = dims_[i]*dims_[i] ; + std::copy(R.data(), R.data() + sz, ptr); + + /* advance the pointer */ + ptr += sz; + } +} + +/*****************************************************************************/ +void BlockJacobiPreconditioner::clean() { + if ( buffer_ ) { + delete [] buffer_; + buffer_ = 0; + bufferSize_ = 0; + nnz_ = 0; + } +} + /***************************************************************************************/ boost::shared_ptr createPreconditioner(const boost::shared_ptr parameters) { - DummyPreconditionerParameters::shared_ptr dummy = boost::dynamic_pointer_cast(parameters); - if ( dummy ) { + if ( DummyPreconditionerParameters::shared_ptr dummy = boost::dynamic_pointer_cast(parameters) ) { return boost::make_shared(); } + else if ( BlockJacobiPreconditionerParameters::shared_ptr blockJacobi = boost::dynamic_pointer_cast(parameters) ) { + return boost::make_shared(); + } + // BlockJacobiPreconditioner::Parameters::shared_ptr jacobi = boost::dynamic_pointer_cast(parameters); // if ( jacobi ) { diff --git a/gtsam/linear/Preconditioner.h b/gtsam/linear/Preconditioner.h index 5ca5ec70b..17c12446f 100644 --- a/gtsam/linear/Preconditioner.h +++ b/gtsam/linear/Preconditioner.h @@ -18,7 +18,6 @@ namespace gtsam { class GaussianFactorGraph; class KeyInfo; class VectorValues; -//class Subgraph; /* parameters for the preconditioner */ struct PreconditionerParameters { @@ -75,15 +74,15 @@ public: /* implement x = S^{-1} y */ virtual void solve(const Vector& y, Vector &x) const = 0; - virtual void solve(const VectorValues& y, VectorValues &x) const = 0; +// virtual void solve(const VectorValues& y, VectorValues &x) const = 0; /* implement x = S^{-T} y */ virtual void transposeSolve(const Vector& y, Vector& x) const = 0; - virtual void transposeSolve(const VectorValues& y, VectorValues &x) const = 0; +// virtual void transposeSolve(const VectorValues& y, VectorValues &x) const = 0; - /* implement x = S^{-1} S^{-T} y */ - virtual void fullSolve(const Vector& y, Vector &x) const = 0; - virtual void fullSolve(const VectorValues& y, VectorValues &x) const = 0; +// /* implement x = S^{-1} S^{-T} y */ +// virtual void fullSolve(const Vector& y, Vector &x) const = 0; +// virtual void fullSolve(const VectorValues& y, VectorValues &x) const = 0; /* build/factorize the preconditioner */ virtual void build( @@ -91,23 +90,6 @@ public: const KeyInfo &info, const std::map &lambda ) = 0; - -// /* complexity index */ -// virtual size_t complexity() const = 0; -// -// /* is the preconditioner dependent to data */ -// virtual bool isStatic() const = 0; -// -// /* is the preconditioner kind of spanning subgraph preconditioner */ -// virtual bool isSubgraph() const = 0; -// -// /* return A\b */ -// virtual void xstar(Vector &result) const = 0 ; -// -//protected: -// Parameters::shared_ptr parameters_; -// KeyInfo keyInfo_; - }; /*******************************************************************************************/ @@ -131,30 +113,57 @@ public: /* Computation Interfaces for raw vector */ virtual void solve(const Vector& y, Vector &x) const { x = y; } - virtual void solve(const VectorValues& y, VectorValues& x) const { x = y; } +// virtual void solve(const VectorValues& y, VectorValues& x) const { x = y; } virtual void transposeSolve(const Vector& y, Vector& x) const { x = y; } - virtual void transposeSolve(const VectorValues& y, VectorValues& x) const { x = y; } +// virtual void transposeSolve(const VectorValues& y, VectorValues& x) const { x = y; } - virtual void fullSolve(const Vector& y, Vector &x) const { x = y; } - virtual void fullSolve(const VectorValues& y, VectorValues& x) const { x = y; } +// virtual void fullSolve(const Vector& y, Vector &x) const { x = y; } +// virtual void fullSolve(const VectorValues& y, VectorValues& x) const { x = y; } virtual void build( const GaussianFactorGraph &gfg, const KeyInfo &info, const std::map &lambda ) {} - -// virtual void replaceFactors(const JacobianFactorGraph &jfg, const double lambda = 0.0) { -// Base::replaceFactors(jfg,lambda); -// } -// virtual void buildPreconditioner() {} -// virtual size_t complexity() const { return 0; } -// virtual bool isStatic() const { return true; } -// virtual bool isSubgraph() const { return false; } -// virtual void xstar(Vector &result) const {} }; +/*******************************************************************************************/ +struct BlockJacobiPreconditionerParameters : public PreconditionerParameters { + typedef PreconditionerParameters Base; + BlockJacobiPreconditionerParameters() : Base() {} + virtual ~BlockJacobiPreconditionerParameters() {} +}; + +/*******************************************************************************************/ +class BlockJacobiPreconditioner : public Preconditioner { +public: + typedef Preconditioner Base; + BlockJacobiPreconditioner() ; + virtual ~BlockJacobiPreconditioner() ; + + /* Computation Interfaces for raw vector */ + virtual void solve(const Vector& y, Vector &x) const; + virtual void transposeSolve(const Vector& y, Vector& x) const ; +// virtual void fullSolve(const Vector& y, Vector &x) const ; + + virtual void build( + const GaussianFactorGraph &gfg, + const KeyInfo &info, + const std::map &lambda + ) ; + +protected: + + void clean() ; + + std::vector dims_; + double *buffer_; + size_t bufferSize_; + size_t nnz_; +}; + +/*********************************************************************************************/ /* factory method to create preconditioners */ boost::shared_ptr createPreconditioner(const boost::shared_ptr parameters); diff --git a/tests/testPCGSolver.cpp b/tests/testPCGSolver.cpp index ea9b5ec6c..ef74ad1ef 100644 --- a/tests/testPCGSolver.cpp +++ b/tests/testPCGSolver.cpp @@ -50,7 +50,47 @@ using symbol_shorthand::X; using symbol_shorthand::L; /* ************************************************************************* */ -TEST( NonlinearOptimizer, optimization_method ) +TEST( PCGSolver, llt ) { + Matrix R = (Matrix(3,3) << + 1., -1., -1., + 0., 2., -1., + 0., 0., 1.); + Matrix AtA = R.transpose() * R; + + Vector Rvector = (Vector(9) << 1., -1., -1., + 0., 2., -1., + 0., 0., 1.); +// Vector Rvector = (Vector(6) << 1., -1., -1., +// 2., -1., +// 1.); + + Vector b = (Vector(3) << 1., 2., 3.); + + Vector x = (Vector(3) << 6.5, 2.5, 3.) ; + + /* test cholesky */ + Matrix Rhat = AtA.llt().matrixL().transpose(); + EXPECT(assert_equal(R, Rhat, 1e-5)); + + /* test backward substitution */ + Vector xhat = Rhat.triangularView().solve(b); + EXPECT(assert_equal(x, xhat, 1e-5)); + + /* test in-place back substitution */ + xhat = b; + Rhat.triangularView().solveInPlace(xhat); + EXPECT(assert_equal(x, xhat, 1e-5)); + + /* test triangular matrix map */ + Eigen::Map Radapter(Rvector.data(), 3, 3); + xhat = Radapter.transpose().triangularView().solve(b); + EXPECT(assert_equal(x, xhat, 1e-5)); + +} + + +/* ************************************************************************* */ +TEST( PCGSolver, dummy ) { LevenbergMarquardtParams paramsPCG; paramsPCG.linearSolverType = LevenbergMarquardtParams::Iterative; @@ -69,6 +109,26 @@ TEST( NonlinearOptimizer, optimization_method ) DOUBLES_EQUAL(0,fg.error(actualPCG),tol); } +/* ************************************************************************* */ +TEST( PCGSolver, blockjacobi ) +{ + LevenbergMarquardtParams paramsPCG; + paramsPCG.linearSolverType = LevenbergMarquardtParams::Iterative; + PCGSolverParameters::shared_ptr pcg = boost::make_shared(); + pcg->preconditioner_ = boost::make_shared(); + paramsPCG.iterativeParams = pcg; + + NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph(); + + Point2 x0(10,10); + Values c0; + c0.insert(X(1), x0); + + Values actualPCG = LevenbergMarquardtOptimizer(fg, c0, paramsPCG).optimize(); + + DOUBLES_EQUAL(0,fg.error(actualPCG),tol); +} + /* ************************************************************************* */ int main() { TestResult tr; From 17426d0076dfbb35742c413b1947f8ba90fc2fda Mon Sep 17 00:00:00 2001 From: Yong-Dian Jian Date: Sun, 15 Jun 2014 23:14:06 -0400 Subject: [PATCH 5/5] subgraph preconditioner revised --- gtsam/linear/ConjugateGradientSolver.h | 2 +- gtsam/linear/IterativeSolver.h | 4 +- gtsam/linear/PCGSolver.cpp | 236 ------ gtsam/linear/PCGSolver.h | 133 +--- gtsam/linear/Preconditioner.cpp | 120 +-- gtsam/linear/Preconditioner.h | 26 +- gtsam/linear/SubgraphPreconditioner.cpp | 722 +++++++++++++++--- gtsam/linear/SubgraphPreconditioner.h | 207 ++++- gtsam/linear/SubgraphSolver.cpp | 8 + gtsam/linear/SubgraphSolver.h | 6 +- gtsam/nonlinear/LevenbergMarquardtOptimizer.h | 1 + gtsam/nonlinear/NonlinearOptimizer.cpp | 1 + tests/testPCGSolver.cpp | 22 +- 13 files changed, 873 insertions(+), 615 deletions(-) diff --git a/gtsam/linear/ConjugateGradientSolver.h b/gtsam/linear/ConjugateGradientSolver.h index 47ec4b405..2382a0eb1 100644 --- a/gtsam/linear/ConjugateGradientSolver.h +++ b/gtsam/linear/ConjugateGradientSolver.h @@ -19,7 +19,7 @@ namespace gtsam { /** * parameters for the conjugate gradient method */ -class ConjugateGradientParameters : public IterativeOptimizationParameters { +class GTSAM_EXPORT ConjugateGradientParameters : public IterativeOptimizationParameters { public: typedef IterativeOptimizationParameters Base; diff --git a/gtsam/linear/IterativeSolver.h b/gtsam/linear/IterativeSolver.h index 66006875b..e02d92e3a 100644 --- a/gtsam/linear/IterativeSolver.h +++ b/gtsam/linear/IterativeSolver.h @@ -115,6 +115,7 @@ namespace gtsam { class KeyInfo : public std::map { public: typedef std::map Base; + KeyInfo() {} KeyInfo(const GaussianFactorGraph &fg); KeyInfo(const GaussianFactorGraph &fg, const Ordering &ordering); @@ -132,9 +133,6 @@ namespace gtsam { Ordering ordering_; size_t numCols_; - private: - KeyInfo() {} - }; diff --git a/gtsam/linear/PCGSolver.cpp b/gtsam/linear/PCGSolver.cpp index 6bfa389fb..27eb57b44 100644 --- a/gtsam/linear/PCGSolver.cpp +++ b/gtsam/linear/PCGSolver.cpp @@ -198,240 +198,4 @@ VectorValues buildVectorValues(const Vector &v, const KeyInfo &keyInfo) { return result; } -///*****************************************************************************/ -//std::string PCGSolverParameters::pcgTranslator(const PCGKernel value) { -// std::string s; -// switch (value) { -// case PCGSolverParameters::PCG: s = "PCG"; break; -// case PCGSolverParameters::LSPCG: s = "LSPCG"; break; -// case PCGSolverParameters::SPCG: s = "SPCG"; break; -// default: s = "UNDEFINED"; break; -// } -// return s; -//} -// -///*****************************************************************************/ -//PCGSolverParameters::PCGKernel PCGSolverParameters::pcgTranslator(const std::string &src) { -// std::string s = src; boost::algorithm::to_upper(s); -// if (s == "PCG") return PCGSolverParameters::PCG; -// if (s == "LSPCG") return PCGSolverParameters::LSPCG; -// if (s == "SPCG") return PCGSolverParameters::SPCG; -// -// /* default is LSPCG */ -// return PCGSolverParameters::LSPCG; -//} -// -///*****************************************************************************/ -//std::string PCGSolverParameters::blasTranslator(const BLASKernel value) { -// std::string s; -// switch (value) { -// case PCGSolverParameters::GTSAM: s = "GTSAM" ; break; -// case PCGSolverParameters::SBM: s = "SBM" ; break; -// case PCGSolverParameters::SBM_MT: s = "SBM-MT" ; break; -// default: s = "UNDEFINED" ; break; -// } -// return s; -//} -// -///*****************************************************************************/ -//PCGSolverParameters::BLASKernel PCGSolverParameters::blasTranslator(const std::string &src) { -// std::string s = src; boost::algorithm::to_upper(s); -// if (s == "GTSAM") return PCGSolverParameters::GTSAM; -// if (s == "SBM") return PCGSolverParameters::SBM; -// if (s == "SBM-MT") return PCGSolverParameters::SBM_MT; -// -// /* default is SBM */ -// return PCGSolverParameters::SBM; -//} -// -///*****************************************************************************/ -//std::string PCGSolverParameters::rbTranslator(const RegisterBlockKernel k) { -// std::string s; -// switch (k) { -// case PCGSolverParameters::RB_NONE: s = "RB_NONE" ; break; -// case PCGSolverParameters::SSE_RB22: s = "SSE_RB22" ; break; -// case PCGSolverParameters::SSE_RB44: s = "SSE_RB44" ; break; -// case PCGSolverParameters::AVX_RB44: s = "AVX_RB44" ; break; -// default: s = "UNDEFINED" ; break; -// } -// return s; -//} -// -///*****************************************************************************/ -//PCGSolverParameters::RegisterBlockKernel PCGSolverParameters::rbTranslator(const std::string &src) { -// std::string s = src; boost::algorithm::to_upper(s); -// if (s == "") return PCGSolverParameters::RB_NONE; -// if (s == "SSE_RB22") return PCGSolverParameters::SSE_RB22; -// if (s == "SSE_RB44") return PCGSolverParameters::SSE_RB44; -// if (s == "AVX_RB44") return PCGSolverParameters::AVX_RB44; -// -// /* default is SBM */ -// return PCGSolverParameters::RB_NONE; -//} -// -///*****************************************************************************/ -//PCGSolver::PCGSolver(const PCGSolverParameters &p) : Base(), parameters_(p), built_(false) { -// preconditioner_ = createPreconditioner(p.preconditioner_); -//} -// -///*****************************************************************************/ -//void PCGSolver::replaceFactors( -// const Values &linearization_point, -// const GaussianFactorGraph &gfg, -// const double lambda) -//{ -// const JacobianFactorGraph jfg(gfg); -// -// /* prepare the column structure */ -// if ( keyInfo_.size() == 0 ) { -// keyInfo_ = KeyInfo(jfg, *orderingNatural(jfg)); -// } -// -// /* implemented by subclass */ -// safe_tic_(); -// convertKernel(jfg, lambda); -// safe_toc_("convert-kernel"); -// -// /* update the preconditioner as well */ -// preconditioner_->replaceFactors(jfg, lambda); -// built_ = false; -//} -// -///*****************************************************************************/ -//void PCGSolver::buildPreconditioner() { -// if ( built_ == false ) { -// safe_tic_(); -// preconditioner_->buildPreconditioner(); -// built_ = true; -// safe_toc_("factorize"); -// } -//} -// -///*****************************************************************************/ -//VectorValues PCGSolver::optimize() { -// -// buildPreconditioner(); -// -// VectorValues zero; -// BOOST_FOREACH ( const KeyInfo::value_type &item, keyInfo_ ) { -// zero.insert(item.first, Vector::Zero(item.second.dim())); -// } -// -// return optimize(zero); -//} -// -///*****************************************************************************/ -//VectorValues PCGSolver::optimize(const VectorValues &initial) { -// safe_tic_(); -// VectorValues result = optimize_(initial); -// safe_toc_("pcg"); -// return result ; -//} -// -///*****************************************************************************/ -////void PCGSolver_FG::convertKernel(const JacobianFactorGraph &jfg, const double lambda) { -//// hfg_ = buildOuterProductJacobianFactorGraph(jfg, VariableIndex(jfg), *x0_, lambda); -////} -//// -////VectorValues PCGSolver_FG::optimize_(const VectorValues &initial) { -//// System system(hfg_, preconditioner_); -//// return preconditionedConjugateGradient(system, initial, parameters_); -////} -// -///*****************************************************************************/ -//void PCGSolver_SBM::convertKernel(const JacobianFactorGraph &jfg, const double lambda) { -// linear_ = buildSparseLinearSystem(jfg, keyInfo_, true /* AtA */, lambda, false /* column major */, -// parameters_.blas_kernel_, parameters_.rb_kernel_); -//} -// -///*****************************************************************************/ -//VectorValues PCGSolver_SBM::optimize_(const VectorValues &initial) { -// System system(linear_, preconditioner_); -// Vector solution = preconditionedConjugateGradient( -// system, initial.vector(keyInfo_.ordering()), parameters_); -// return buildVectorValues(solution, keyInfo_); -//} -// -///*****************************************************************************/ -//ydjian::SparseLinearSystem::shared_ptr -//buildSparseLinearSystem(const JacobianFactorGraph &jfg, const KeyInfo &keyInfo, -// const bool AtA, const double lambda, const bool colMajor, -// const PCGSolverParameters::BLASKernel blas, const PCGSolverParameters::RegisterBlockKernel rbSrc) { -// -// std::map rbMap; -// rbMap[PCGSolverParameters::RB_NONE] = ydjian::SBM::NAIVE; -// rbMap[PCGSolverParameters::SSE_RB22] = ydjian::SBM::SSE_RB22; -// rbMap[PCGSolverParameters::SSE_RB44] = ydjian::SBM::SSE_RB44; -// rbMap[PCGSolverParameters::AVX_RB44] = ydjian::SBM::AVX_RB44; -// -// ydjian::SBM::Type rb; -// SC_ASSERT( rbMap.find(rbSrc) != rbMap.end(), "buildSparseLinearSystem: rbSrc is not supported.."); -// rb = rbMap[rbSrc]; -// -// ydjian::SparseLinearSystem::shared_ptr linear; -// -// switch (blas) { -// -// case PCGSolverParameters::SBM: -// linear = gsp2::buildSparseLinearSystem(jfg, keyInfo, AtA, lambda, colMajor, false /* multithread */, rb); -// break; -// -// case PCGSolverParameters::SBM_MT: -// linear = gsp2::buildSparseLinearSystem(jfg, keyInfo, AtA, lambda, colMajor, true /* multithread */, rb); -// break; -// -// default: -// throw std::invalid_argument("createSparseLinearSystem: unsupported blas kernel"); -// break; -// } -// -// return linear; -//} -// -///*****************************************************************************/ -//PCGSolver::shared_ptr createPCGSolver(const PCGSolverParameters ¶meters){ -// -// PCGSolver::shared_ptr solver; -// switch ( parameters.pcg_kernel_ ) { -// case PCGSolverParameters::PCG: -// switch ( parameters.blas_kernel_ ) { -// -//// case PCGSolverParameters::GTSAM: -//// solver.reset(new PCGSolver_FG(parameters)); -//// break; -// -// case PCGSolverParameters::SBM: -// case PCGSolverParameters::SBM_MT: -// solver.reset(new PCGSolver_SBM(parameters)); -// break; -// -// default: -// throw std::invalid_argument("createPCGSolver: undefined blas_kernel for pcgsolver"); -// break; -// } -// break; -// -// case PCGSolverParameters::LSPCG: -// switch ( parameters.blas_kernel_ ) { -// case PCGSolverParameters::GTSAM: -// solver.reset(new LSPCGSolver_FG(parameters)); -// break; -// -// case PCGSolverParameters::SBM: -// case PCGSolverParameters::SBM_MT: -// solver.reset(new LSPCGSolver_SBM(parameters)); -// break; -// -// default: -// throw std::invalid_argument("createPCGSolver: undefined blas_kernel for lspcg solver"); -// break; -// } -// break; -// -// default: -// throw std::invalid_argument("createPCGSolver: undefined pcg_kernel"); -// break; -// } -// return solver; -//} } diff --git a/gtsam/linear/PCGSolver.h b/gtsam/linear/PCGSolver.h index 034352c50..8d48da149 100644 --- a/gtsam/linear/PCGSolver.h +++ b/gtsam/linear/PCGSolver.h @@ -25,7 +25,7 @@ class Preconditioner; class PreconditionerParameters; /*****************************************************************************/ -struct PCGSolverParameters: public ConjugateGradientParameters { +struct GTSAM_EXPORT PCGSolverParameters: public ConjugateGradientParameters { public: typedef ConjugateGradientParameters Base; typedef boost::shared_ptr shared_ptr; @@ -44,7 +44,7 @@ public: /*****************************************************************************/ /* A virtual base class for the preconditioned conjugate gradient solver */ -class PCGSolver: public IterativeSolver { +class GTSAM_EXPORT PCGSolver: public IterativeSolver { public: typedef IterativeSolver Base; typedef boost::shared_ptr shared_ptr; @@ -54,50 +54,21 @@ protected: PCGSolverParameters parameters_; boost::shared_ptr preconditioner_; -// /* cached local variables */ -// KeyInfo keyInfo_; -// -// /* whether the preconditioner has be built */ -// bool built_ ; - public: /* Interface to initialize a solver without a problem */ PCGSolver(const PCGSolverParameters &p); virtual ~PCGSolver() {} -// /* update interface to the nonlinear optimizer */ -// virtual void replaceFactors( -// const GaussianFactorGraph &factorGraph, -// const Values &linearization_point, -// const std::map lambda); - -// /* build the preconditioner */ -// void buildPreconditioner(); - -// /* interface for the NonlinearOptimizer, with initial estimate equals to a zero vector */ -// virtual VectorValues optimize() ; -// -// /* interface for the NonlinearOptimizer, with a custom initial estimate*/ -// virtual VectorValues optimize(const VectorValues &initial); - using IterativeSolver::optimize; virtual VectorValues optimize(const GaussianFactorGraph &gfg, const KeyInfo &keyInfo, const std::map &lambda, const VectorValues &initial); -//protected: - -// /* convert jacobian factor graph to solver-specific kernel */ -// virtual void convertKernel(const JacobianFactorGraph &jfg, const double lambda = 0.0) = 0 ; -// -// /* do the actual optimization */ -// virtual VectorValues optimize_(const VectorValues &initial) = 0; - }; /*****************************************************************************/ -class GaussianFactorGraphSystem { +class GTSAM_EXPORT GaussianFactorGraphSystem { public: GaussianFactorGraphSystem(const GaussianFactorGraph &gfg, @@ -134,103 +105,5 @@ VectorValues buildVectorValues(const Vector &v, const Ordering &ordering, /**********************************************************************************/ VectorValues buildVectorValues(const Vector &v, const KeyInfo &keyInfo); -///*****************************************************************************/ -///* an specialization of the PCGSolver using gtsam's factor graph and linear algebra facility */ -//class PCGSolver_FG : public PCGSolver { -// -//public: -// typedef PCGSolver Base; -// typedef boost::shared_ptr shared_ptr ; -// -//protected: -// JacobianFactorGraph::shared_ptr hfg_; // A'*A + lambda*I -// -//public: -// PCGSolver_FG(const Parameters ¶meters) : Base(parameters) {} -// virtual ~PCGSolver_FG() {} -// -//protected: -// virtual void convertKernel(const JacobianFactorGraph &jfg, const double lambda = 0.0); -// virtual VectorValues optimize_(const VectorValues &initial); -// -// /* interface to the preconditionedConjugateGradient function template */ -// class System { -// public: -// typedef Vector State; -// typedef Vector Residual; -// -// protected: -// const JacobianFactorGraph::shared_ptr hfg_; -// const Preconditioner::shared_ptr preconditioner_; -// const KeyInfo &keyInfo_; -// const RowInfo &rowInfo_; -// -// public: -// System(const JacobianFactorGraph::shared_ptr hfg, -// const Preconditioner::shared_ptr preconditioner, -// const KeyInfo &keyInfo, const RowInfo &rowInfo); -// -// inline void residual(const State &x, State &r) const { gtsam::residual(*hfg_, x, r); } -// inline void multiply(const State &x, State& y) const { gtsam::multiply(*hfg_, x, y); } -// inline void leftPrecondition(const State &x, State &y) const { preconditioner_->transposeSolve(x, y); } -// inline void rightPrecondition(const State &x, State &y) const { preconditioner_->solve(x, y); } -// inline void scal(const double alpha, State &x) const { x.vector() *= alpha; } -// inline double dot(const State &x, const State &y) const { return x.vector().dot(y.vector()); } -// inline void axpy(const double alpha, const State &x, State &y) const { y.vector() += alpha*x.vector(); } -// } ; -//}; -// -///*****************************************************************************/ -///** an specialization of the PCGSolver using sbm and customized blas kernel **/ -//class PCGSolver_SBM : public PCGSolver { -// -//public: -// typedef PCGSolver Base; -// typedef boost::shared_ptr shared_ptr ; -// -//protected: -// ydjian::SparseLinearSystem::shared_ptr linear_; -// -//public: -// PCGSolver_SBM(const Parameters ¶meters) : Base(parameters) {} -// virtual ~PCGSolver_SBM() {} -// -//protected: -// /* virtual function of the PCGSolver */ -// virtual void convertKernel(const JacobianFactorGraph &jfg, const double lambda = 0.0); -// virtual VectorValues optimize_(const VectorValues &initial); -// -// /* interface to the preconditionedConjugateGradient function template */ -// class System { -// -// public: -// typedef Vector State; -// -// protected: -// const ydjian::SparseLinearSystem::shared_ptr linear_; -// const Preconditioner::shared_ptr preconditioner_; -// -// public: -// System(const ydjian::SparseLinearSystem::shared_ptr linear, const Preconditioner::shared_ptr preconditioner) -// : linear_(linear), preconditioner_(preconditioner) {} -// inline void residual(const State &x, State &r) const { linear_->residual(x.data(), r.data(), false); } -// inline void multiply(const State &x, State& y) const { linear_->matrix()->multiply(x.data(), y.data(), false); } -// inline void leftPrecondition(const State &x, State &y) const { preconditioner_->transposeSolve(x, y); } -// inline void rightPrecondition(const State &x, State &y) const { preconditioner_->solve(x, y); } -// inline void scal(const double alpha, State &x) const { x *= alpha; } -// inline double dot(const State &x, const State &y) const { return x.dot(y); } -// inline void axpy(const double alpha, const State &x, State &y) const { y += alpha*x; } -// } ; -//}; -// -///* a factory method to instantiate PCGSolvers and its derivatives */ -//PCGSolver::shared_ptr createPCGSolver(const PCGParameters ¶meters); -// -///* an utility function to create sparse linear system, for all cg solvers using the sbm kernel */ -//ydjian::SparseLinearSystem::shared_ptr -//buildSparseLinearSystem(const JacobianFactorGraph &jfg, const KeyInfo &keyInfo, const bool AtA, -// const double lambda, const bool colMajor, const PCGParameters::BLASKernel blas, -// const PCGParameters::RegisterBlockKernel rb); - } diff --git a/gtsam/linear/Preconditioner.cpp b/gtsam/linear/Preconditioner.cpp index 42ea56e68..c180f1160 100644 --- a/gtsam/linear/Preconditioner.cpp +++ b/gtsam/linear/Preconditioner.cpp @@ -1,27 +1,16 @@ /* * Preconditioner.cpp * - * Created on: Feb 2, 2012 + * Created on: Jun 2, 2014 * Author: ydjian */ - -//#include #include -//#include #include #include #include -//#include -//#include -//#include +#include #include -//#include -//#include -//#include -//#include -//#include -//#include #include #include #include @@ -31,7 +20,6 @@ using namespace std; namespace gtsam { - /*****************************************************************************/ void PreconditionerParameters::print() const { print(cout); @@ -214,110 +202,12 @@ boost::shared_ptr createPreconditioner(const boost::shared_ptr

(parameters) ) { return boost::make_shared(); } - - -// BlockJacobiPreconditioner::Parameters::shared_ptr jacobi = boost::dynamic_pointer_cast(parameters); -// if ( jacobi ) { -// BlockJacobiPreconditioner::shared_ptr p(new BlockJacobiPreconditioner(jacobi)); -// return p; -// } -// -// MIQRPreconditioner::Parameters::shared_ptr miqr = boost::dynamic_pointer_cast(parameters); -// if ( miqr ) { -// MIQRPreconditioner::shared_ptr p(new MIQRPreconditioner(miqr)); -// return p; -// } -// -// CombinatorialPreconditioner::Parameters::shared_ptr combinatorial -// = boost::dynamic_pointer_cast(parameters); -// if ( combinatorial ) { -// return createCombinatorialPreconditioner(combinatorial); -// } + else if ( SubgraphPreconditionerParameters::shared_ptr subgraph = boost::dynamic_pointer_cast(parameters) ) { + return boost::make_shared(*subgraph); + } throw invalid_argument("createPreconditioner: unexpected preconditioner parameter type"); } -// -///***************************************************************************************/ -//JacobianFactorGraph::shared_ptr -//buildFactorSubgraph(const JacobianFactorGraph &jfg, const KeyInfo &keyInfo, -// const Subgraph &subgraph, const int hessian, const double lambda, const bool clone, const bool includeUnary){ -// -// const Subgraph::Edges &edges = subgraph.edges(); -// const size_t m = jfg.size(), n = keyInfo.size(); -// const bool augment = (lambda!=0.0) ? true : false ; -// -// /* produce an edge weight table */ -// std::vector weights(m, 0.0); -// BOOST_FOREACH ( const Subgraph::Edge &edge, edges ) { -// weights[edge.index()] = edge.weight(); -// } -// -// /* collect the number of dangling unary factors */ -// /* upper bound of the factors */ -// size_t extraUnary = 0; -// if ( includeUnary ) { -// for ( Index i = 0 ; i < m ; ++i ) { -// if ( weights[i] == 0.0 && jfg[i]->size() == 1 ) { -// weights[i] = 1.0; -// ++extraUnary; -// } -// } -// } -// -// const size_t e = edges.size() + extraUnary; -// const size_t sz = e + (augment ? n : 0) + (hessian ? 2*(m-e) : 0); -// -// JacobianFactorGraph::shared_ptr target(new JacobianFactorGraph()); -// target->reserve(sz); -// -// /* auxiliary variable */ -// size_t r = jfg[0]->rows(); -// SharedDiagonal sigma(noiseModel::Unit::Create(r)); -// -// for ( Index i = 0 ; i < m ; ++i ) { -// -// const double w = weights[i]; -// -// if ( w != 0.0 ) { /* subgraph edge */ -// if ( !clone && w == 1.0 ) { -// target->push_back(jfg[i]); -// } -// else { -// JacobianFactor::shared_ptr jf (new JacobianFactor(*jfg[i])); -// scaleJacobianFactor(*jf, w); -// target->push_back(jf); -// } -// } -// else { /* non-subgraph edge */ -// if ( hessian ) { -// const JacobianFactor &f = *jfg[i]; -// /* simple case: unary factor */ -// if ( f.size() == 1 ) { -// if (!clone) target->push_back(jfg[i]); -// else { -// JacobianFactor::shared_ptr jf(new JacobianFactor(*jfg[i])); -// target->push_back(jf); -// } -// } -// else { /* general factor */ -// const size_t rj = f.rows(); -// if ( rj != r ) { -// r = rj; sigma = noiseModel::Unit::Create(r); -// } -// for ( JacobianFactor::const_iterator j = f.begin() ; j != f.end() ; ++j ) { -// JacobianFactor::shared_ptr jf(new JacobianFactor(*j, f.getA(j), Vector::Zero(r,1), sigma)); -// target->push_back(jf); -// } /* for */ -// } /* factor arity */ -// } /* hessian */ -// } /* if w != 0.0 */ -// } /* for */ -// -// if ( !augment ) return target ; -// -// return appendPriorFactors(*target, keyInfo, lambda, false /* clone */); -//} - } diff --git a/gtsam/linear/Preconditioner.h b/gtsam/linear/Preconditioner.h index 17c12446f..10ceb78a9 100644 --- a/gtsam/linear/Preconditioner.h +++ b/gtsam/linear/Preconditioner.h @@ -1,7 +1,7 @@ /* * Preconditioner.h * - * Created on: Feb 1, 2012 + * Created on: Jun 2, 2014 * Author: ydjian */ @@ -20,7 +20,7 @@ class KeyInfo; class VectorValues; /* parameters for the preconditioner */ -struct PreconditionerParameters { +struct GTSAM_EXPORT PreconditionerParameters { typedef boost::shared_ptr shared_ptr; @@ -54,14 +54,12 @@ struct PreconditionerParameters { /* for serialization */ friend std::ostream& operator<<(std::ostream &os, const PreconditionerParameters &p); - }; - /* PCG aims to solve the problem: A x = b by reparametrizing it as * S^t A S y = S^t b or M A x = M b, where A \approx S S, or A \approx M * The goal of this class is to provide a general interface to all preconditioners */ -class Preconditioner { +class GTSAM_EXPORT Preconditioner { public: typedef boost::shared_ptr shared_ptr; typedef std::vector Dimensions; @@ -93,7 +91,7 @@ public: }; /*******************************************************************************************/ -struct DummyPreconditionerParameters : public PreconditionerParameters { +struct GTSAM_EXPORT DummyPreconditionerParameters : public PreconditionerParameters { typedef PreconditionerParameters Base; typedef boost::shared_ptr shared_ptr; DummyPreconditionerParameters() : Base() {} @@ -101,7 +99,7 @@ struct DummyPreconditionerParameters : public PreconditionerParameters { }; /*******************************************************************************************/ -class DummyPreconditioner : public Preconditioner { +class GTSAM_EXPORT DummyPreconditioner : public Preconditioner { public: typedef Preconditioner Base; typedef boost::shared_ptr shared_ptr; @@ -129,14 +127,14 @@ public: }; /*******************************************************************************************/ -struct BlockJacobiPreconditionerParameters : public PreconditionerParameters { +struct GTSAM_EXPORT BlockJacobiPreconditionerParameters : public PreconditionerParameters { typedef PreconditionerParameters Base; BlockJacobiPreconditionerParameters() : Base() {} virtual ~BlockJacobiPreconditionerParameters() {} }; /*******************************************************************************************/ -class BlockJacobiPreconditioner : public Preconditioner { +class GTSAM_EXPORT BlockJacobiPreconditioner : public Preconditioner { public: typedef Preconditioner Base; BlockJacobiPreconditioner() ; @@ -167,16 +165,6 @@ protected: /* factory method to create preconditioners */ boost::shared_ptr createPreconditioner(const boost::shared_ptr parameters); -///* build a factor subgraph, which is defined as a set of weighted edges (factors). -// * "hessian"={0,1} indicates whether the non-subgraph edges are split into multiple unary factors -// * "lambda" indicates whether scaled identity matrix is augmented -// * "clone" indicates whether factors are cloned into the subgraph if the edge has weight = 1.0 -// * "includeUnary" indicates whether the unary factors are forced to join the subgraph: -// * useful for static subgraph preconditioner, because unary factors were imposed dynamically by LM */ -//boost::shared_ptr -//buildFactorSubgraph(const JacobianFactorGraph &jfg, const KeyInfo &keyInfo, -// const Subgraph &subgraph, const int hessian, const double lambda, const bool clone, const bool includeUnary); - } diff --git a/gtsam/linear/SubgraphPreconditioner.cpp b/gtsam/linear/SubgraphPreconditioner.cpp index 44cb8c146..20b434b9f 100644 --- a/gtsam/linear/SubgraphPreconditioner.cpp +++ b/gtsam/linear/SubgraphPreconditioner.cpp @@ -15,126 +15,646 @@ * @author: Frank Dellaert */ +#include #include #include #include #include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include using namespace std; namespace gtsam { - /* ************************************************************************* */ - static GaussianFactorGraph::shared_ptr convertToJacobianFactors(const GaussianFactorGraph &gfg) { - GaussianFactorGraph::shared_ptr result(new GaussianFactorGraph()); - BOOST_FOREACH(const GaussianFactor::shared_ptr &gf, gfg) { - JacobianFactor::shared_ptr jf = boost::dynamic_pointer_cast(gf); - if( !jf ) { - jf = boost::make_shared(*gf); // Convert any non-Jacobian factors to Jacobians (e.g. Hessian -> Jacobian with Cholesky) +/* ************************************************************************* */ +static GaussianFactorGraph::shared_ptr convertToJacobianFactors(const GaussianFactorGraph &gfg) { + GaussianFactorGraph::shared_ptr result(new GaussianFactorGraph()); + BOOST_FOREACH(const GaussianFactor::shared_ptr &gf, gfg) { + JacobianFactor::shared_ptr jf = boost::dynamic_pointer_cast(gf); + if( !jf ) { + jf = boost::make_shared(*gf); // Convert any non-Jacobian factors to Jacobians (e.g. Hessian -> Jacobian with Cholesky) + } + result->push_back(jf); + } + return result; +} + +/*****************************************************************************/ +static std::vector iidSampler(const vector &weight, const size_t n) { + + /* compute the sum of the weights */ + const double sum = std::accumulate(weight.begin(), weight.end(), 0.0); + + /* make a normalized and accumulated version of the weight vector */ + const size_t m = weight.size(); + vector w; w.reserve(m); + for ( size_t i = 0 ; i < m ; ++i ) { + w.push_back(weight[i]/sum); + } + + vector acc(m); + std::partial_sum(w.begin(),w.end(),acc.begin()); + + /* iid sample n times */ + vector result; result.reserve(n); + const double denominator = (double)RAND_MAX; + for ( size_t i = 0 ; i < n ; ++i ) { + const double value = rand() / denominator; + /* binary search the interval containing "value" */ + vector::iterator it = std::lower_bound(acc.begin(), acc.end(), value); + size_t idx = it - acc.begin(); + result.push_back(idx); + } + return result; +} + +/*****************************************************************************/ +vector uniqueSampler(const vector &weight, const size_t n) { + + const size_t m = weight.size(); + if ( n > m ) throw std::invalid_argument("uniqueSampler: invalid input size"); + + vector result; + + size_t count = 0; + std::vector touched(m, false); + while ( count < n ) { + std::vector localIndices; localIndices.reserve(n-count); + std::vector localWeights; localWeights.reserve(n-count); + + /* collect data */ + for ( size_t i = 0 ; i < m ; ++i ) { + if ( !touched[i] ) { + localIndices.push_back(i); + localWeights.push_back(weight[i]); } - result->push_back(jf); - } - return result; - } - - /* ************************************************************************* */ - SubgraphPreconditioner::SubgraphPreconditioner(const sharedFG& Ab2, - const sharedBayesNet& Rc1, const sharedValues& xbar) : - Ab2_(convertToJacobianFactors(*Ab2)), Rc1_(Rc1), xbar_(xbar), b2bar_(new Errors(-Ab2_->gaussianErrors(*xbar))) { - } - - /* ************************************************************************* */ - // x = xbar + inv(R1)*y - VectorValues SubgraphPreconditioner::x(const VectorValues& y) const { - return *xbar_ + Rc1_->backSubstitute(y); - } - - /* ************************************************************************* */ - double SubgraphPreconditioner::error(const VectorValues& y) const { - Errors e(y); - VectorValues x = this->x(y); - Errors e2 = Ab2()->gaussianErrors(x); - return 0.5 * (dot(e, e) + dot(e2,e2)); - } - - /* ************************************************************************* */ - // gradient is y + inv(R1')*A2'*(A2*inv(R1)*y-b2bar), - VectorValues SubgraphPreconditioner::gradient(const VectorValues& y) const { - VectorValues x = Rc1()->backSubstitute(y); /* inv(R1)*y */ - Errors e = (*Ab2()*x - *b2bar()); /* (A2*inv(R1)*y-b2bar) */ - VectorValues v = VectorValues::Zero(x); - Ab2()->transposeMultiplyAdd(1.0, e, v); /* A2'*(A2*inv(R1)*y-b2bar) */ - return y + Rc1()->backSubstituteTranspose(v); - } - - /* ************************************************************************* */ - // Apply operator A, A*y = [I;A2*inv(R1)]*y = [y; A2*inv(R1)*y] - Errors SubgraphPreconditioner::operator*(const VectorValues& y) const { - - Errors e(y); - VectorValues x = Rc1()->backSubstitute(y); /* x=inv(R1)*y */ - Errors e2 = *Ab2() * x; /* A2*x */ - e.splice(e.end(), e2); - return e; - } - - /* ************************************************************************* */ - // In-place version that overwrites e - void SubgraphPreconditioner::multiplyInPlace(const VectorValues& y, Errors& e) const { - - Errors::iterator ei = e.begin(); - for ( Key i = 0 ; i < y.size() ; ++i, ++ei ) { - *ei = y[i]; } - // Add A2 contribution - VectorValues x = Rc1()->backSubstitute(y); // x=inv(R1)*y - Ab2()->multiplyInPlace(x, ei); // use iterator version - } - - /* ************************************************************************* */ - // Apply operator A', A'*e = [I inv(R1')*A2']*e = e1 + inv(R1')*A2'*e2 - VectorValues SubgraphPreconditioner::operator^(const Errors& e) const { - - Errors::const_iterator it = e.begin(); - VectorValues y = zero(); - for ( Key i = 0 ; i < y.size() ; ++i, ++it ) - y[i] = *it ; - transposeMultiplyAdd2(1.0,it,e.end(),y); - return y; - } - - /* ************************************************************************* */ - // y += alpha*A'*e - void SubgraphPreconditioner::transposeMultiplyAdd - (double alpha, const Errors& e, VectorValues& y) const { - - Errors::const_iterator it = e.begin(); - for ( Key i = 0 ; i < y.size() ; ++i, ++it ) { - const Vector& ei = *it; - axpy(alpha, ei, y[i]); + /* sampling and cache results */ + vector samples = iidSampler(localWeights, n-count); + BOOST_FOREACH ( const size_t &id, samples ) { + if ( touched[id] == false ) { + touched[id] = true ; + result.push_back(id); + if ( ++count >= n ) break; + } } - transposeMultiplyAdd2(alpha, it, e.end(), y); + } + return result; +} + +/****************************************************************************/ +Subgraph::Subgraph(const std::vector &indices) { + edges_.reserve(indices.size()); + BOOST_FOREACH ( const size_t &idx, indices ) { + edges_.push_back(SubgraphEdge(idx, 1.0)); + } +} + +/****************************************************************************/ +std::vector Subgraph::edgeIndices() const { + std::vector eid; eid.reserve(size()); + BOOST_FOREACH ( const SubgraphEdge &edge, edges_ ) { + eid.push_back(edge.index_); + } + return eid; +} + +/****************************************************************************/ +void Subgraph::save(const std::string &fn) const { + std::ofstream os(fn.c_str()); + boost::archive::text_oarchive oa(os); + oa << *this; + os.close(); +} + +/****************************************************************************/ +Subgraph::shared_ptr Subgraph::load(const std::string &fn) { + std::ifstream is(fn.c_str()); + boost::archive::text_iarchive ia(is); + Subgraph::shared_ptr subgraph(new Subgraph()); + ia >> *subgraph; + is.close(); + return subgraph; +} + +/****************************************************************************/ +std::ostream &operator<<(std::ostream &os, const SubgraphEdge &edge) { + if ( edge.weight() != 1.0 ) + os << edge.index() << "(" << std::setprecision(2) << edge.weight() << ")"; + else + os << edge.index() ; + return os; +} + +/****************************************************************************/ +std::ostream &operator<<(std::ostream &os, const Subgraph &subgraph) { + os << "Subgraph" << endl; + BOOST_FOREACH ( const SubgraphEdge &e, subgraph.edges() ) { + os << e << ", " ; + } + return os; +} + +/*****************************************************************************/ +void SubgraphBuilderParameters::print() const { + print(cout); +} + +/***************************************************************************************/ +void SubgraphBuilderParameters::print(ostream &os) const { + os << "SubgraphBuilderParameters" << endl + << "skeleton: " << skeletonTranslator(skeleton_) << endl + << "skeleton weight: " << skeletonWeightTranslator(skeletonWeight_) << endl + << "augmentation weight: " << augmentationWeightTranslator(augmentationWeight_) << endl + ; +} + +/*****************************************************************************/ +ostream& operator<<(ostream &os, const SubgraphBuilderParameters &p) { + p.print(os); + return os; +} + +/*****************************************************************************/ +SubgraphBuilderParameters::Skeleton SubgraphBuilderParameters::skeletonTranslator(const std::string &src){ + std::string s = src; boost::algorithm::to_upper(s); + if (s == "NATURALCHAIN") return NATURALCHAIN; + else if (s == "BFS") return BFS; + else if (s == "KRUSKAL") return KRUSKAL; + throw invalid_argument("SubgraphBuilderParameters::skeletonTranslator undefined string " + s); + return KRUSKAL; +} + +/****************************************************************/ +std::string SubgraphBuilderParameters::skeletonTranslator(Skeleton w) { + if ( w == NATURALCHAIN )return "NATURALCHAIN"; + else if ( w == BFS ) return "BFS"; + else if ( w == KRUSKAL )return "KRUSKAL"; + else return "UNKNOWN"; +} + +/****************************************************************/ +SubgraphBuilderParameters::SkeletonWeight SubgraphBuilderParameters::skeletonWeightTranslator(const std::string &src) { + std::string s = src; boost::algorithm::to_upper(s); + if (s == "EQUAL") return EQUAL; + else if (s == "RHS") return RHS_2NORM; + else if (s == "LHS") return LHS_FNORM; + else if (s == "RANDOM") return RANDOM; + throw invalid_argument("SubgraphBuilderParameters::skeletonWeightTranslator undefined string " + s); + return EQUAL; +} + +/****************************************************************/ +std::string SubgraphBuilderParameters::skeletonWeightTranslator(SkeletonWeight w) { + if ( w == EQUAL ) return "EQUAL"; + else if ( w == RHS_2NORM ) return "RHS"; + else if ( w == LHS_FNORM ) return "LHS"; + else if ( w == RANDOM ) return "RANDOM"; + else return "UNKNOWN"; +} + +/****************************************************************/ +SubgraphBuilderParameters::AugmentationWeight SubgraphBuilderParameters::augmentationWeightTranslator(const std::string &src) { + std::string s = src; boost::algorithm::to_upper(s); + if (s == "SKELETON") return SKELETON; +// else if (s == "STRETCH") return STRETCH; +// else if (s == "GENERALIZED_STRETCH") return GENERALIZED_STRETCH; + throw invalid_argument("SubgraphBuilder::Parameters::augmentationWeightTranslator undefined string " + s); + return SKELETON; +} + +/****************************************************************/ +std::string SubgraphBuilderParameters::augmentationWeightTranslator(AugmentationWeight w) { + if ( w == SKELETON ) return "SKELETON"; +// else if ( w == STRETCH ) return "STRETCH"; +// else if ( w == GENERALIZED_STRETCH ) return "GENERALIZED_STRETCH"; + else return "UNKNOWN"; +} + +/****************************************************************/ +std::vector SubgraphBuilder::buildTree(const GaussianFactorGraph &gfg, const FastMap &ordering, const std::vector &w) const { + const SubgraphBuilderParameters &p = parameters_; + switch (p.skeleton_) { + case SubgraphBuilderParameters::NATURALCHAIN: + return natural_chain(gfg); + break; + case SubgraphBuilderParameters::BFS: + return bfs(gfg); + break; + case SubgraphBuilderParameters::KRUSKAL: + return kruskal(gfg, ordering, w); + break; + default: + cerr << "SubgraphBuilder::buildTree undefined skeleton type" << endl; + break; + } + return vector(); +} + +/****************************************************************/ +std::vector SubgraphBuilder::unary(const GaussianFactorGraph &gfg) const { + std::vector result ; + size_t idx = 0; + BOOST_FOREACH ( const GaussianFactor::shared_ptr &gf, gfg ) { + if ( gf->size() == 1 ) { + result.push_back(idx); + } + idx++; + } + return result; +} + +/****************************************************************/ +std::vector SubgraphBuilder::natural_chain(const GaussianFactorGraph &gfg) const { + std::vector result ; + size_t idx = 0; + BOOST_FOREACH ( const GaussianFactor::shared_ptr &gf, gfg ) { + if ( gf->size() == 2 ) { + const Key k0 = gf->keys()[0], k1 = gf->keys()[1]; + if ( (k1-k0) == 1 || (k0-k1) == 1 ) + result.push_back(idx); + } + idx++; + } + return result; +} + +/****************************************************************/ +std::vector SubgraphBuilder::bfs(const GaussianFactorGraph &gfg) const { + const VariableIndex variableIndex(gfg); + /* start from the first key of the first factor */ + Key seed = gfg[0]->keys()[0]; + + const size_t n = variableIndex.size(); + + /* each vertex has self as the predecessor */ + std::vector result; + result.reserve(n-1); + + /* Initialize */ + std::queue q; + q.push(seed); + + std::set flags; + flags.insert(seed); + + /* traversal */ + while ( !q.empty() ) { + const size_t head = q.front(); q.pop(); + BOOST_FOREACH ( const size_t id, variableIndex[head] ) { + const GaussianFactor &gf = *gfg[id]; + BOOST_FOREACH ( const size_t key, gf.keys() ) { + if ( flags.count(key) == 0 ) { + q.push(key); + flags.insert(key); + result.push_back(id); + } + } + } + } + return result; +} + +/****************************************************************/ +std::vector SubgraphBuilder::kruskal(const GaussianFactorGraph &gfg, const FastMap &ordering, const std::vector &w) const { + const VariableIndex variableIndex(gfg); + const size_t n = variableIndex.size(); + const vector idx = sort_idx(w) ; + + /* initialize buffer */ + vector result; + result.reserve(n-1); + + // container for acsendingly sorted edges + DSFVector D(n) ; + + size_t count = 0 ; double sum = 0.0 ; + BOOST_FOREACH (const size_t id, idx) { + const GaussianFactor &gf = *gfg[id]; + if ( gf.keys().size() != 2 ) continue; + const size_t u = ordering.find(gf.keys()[0])->second, + u_root = D.find(u), + v = ordering.find(gf.keys()[1])->second, + v_root = D.find(v) ; + if ( u_root != v_root ) { + D.merge(u_root, v_root) ; + result.push_back(id) ; + sum += w[id] ; + if ( ++count == n-1 ) break ; + } + } + return result; +} + +/****************************************************************/ +std::vector SubgraphBuilder::sample(const std::vector &weights, const size_t t) const { + return uniqueSampler(weights, t); +} + +/****************************************************************/ +Subgraph::shared_ptr SubgraphBuilder::operator() (const GaussianFactorGraph &gfg) const { + + const SubgraphBuilderParameters &p = parameters_; + const Ordering inverse_ordering = Ordering::Natural(gfg); + const FastMap forward_ordering = inverse_ordering.invert(); + const size_t n = inverse_ordering.size(), t = n * p.complexity_ ; + + vector w = weights(gfg); + const vector tree = buildTree(gfg, forward_ordering, w); + + /* sanity check */ + if ( tree.size() != n-1 ) { + throw runtime_error("SubgraphBuilder::operator() tree.size() != n-1 failed "); } - /* ************************************************************************* */ - // y += alpha*inv(R1')*A2'*e2 - void SubgraphPreconditioner::transposeMultiplyAdd2 (double alpha, + /* down weight the tree edges to zero */ + BOOST_FOREACH ( const size_t id, tree ) { + w[id] = 0.0; + } + + /* decide how many edges to augment */ + std::vector offTree = sample(w, t); + + vector subgraph = unary(gfg); + subgraph.insert(subgraph.end(), tree.begin(), tree.end()); + subgraph.insert(subgraph.end(), offTree.begin(), offTree.end()); + + return boost::make_shared(subgraph); +} + +/****************************************************************/ +SubgraphBuilder::Weights SubgraphBuilder::weights(const GaussianFactorGraph &gfg) const +{ + const size_t m = gfg.size() ; + Weights weight; weight.reserve(m); + + BOOST_FOREACH(const GaussianFactor::shared_ptr &gf, gfg ) { + switch ( parameters_.skeletonWeight_ ) { + case SubgraphBuilderParameters::EQUAL: + weight.push_back(1.0); + break; + case SubgraphBuilderParameters::RHS_2NORM: + { + if ( JacobianFactor::shared_ptr jf = boost::dynamic_pointer_cast(gf) ) { + weight.push_back(jf->getb().norm()); + } + else if ( HessianFactor::shared_ptr hf = boost::dynamic_pointer_cast(gf) ) { + weight.push_back(hf->linearTerm().norm()); + } + } + break; + case SubgraphBuilderParameters::LHS_FNORM: + { + if ( JacobianFactor::shared_ptr jf = boost::dynamic_pointer_cast(gf) ) { + weight.push_back(std::sqrt(jf->getA().squaredNorm())); + } + else if ( HessianFactor::shared_ptr hf = boost::dynamic_pointer_cast(gf) ) { + weight.push_back(std::sqrt(hf->information().squaredNorm())); + } + } + break; + + case SubgraphBuilderParameters::RANDOM: + weight.push_back(std::rand()%100 + 1.0); + break; + + default: + throw invalid_argument("SubgraphBuilder::weights: undefined weight scheme "); + break; + } + } + return weight; +} + +/* ************************************************************************* */ +SubgraphPreconditioner::SubgraphPreconditioner(const SubgraphPreconditionerParameters &p) : + parameters_(p) {} + +/* ************************************************************************* */ +SubgraphPreconditioner::SubgraphPreconditioner(const sharedFG& Ab2, + const sharedBayesNet& Rc1, const sharedValues& xbar, const SubgraphPreconditionerParameters &p) : + Ab2_(convertToJacobianFactors(*Ab2)), Rc1_(Rc1), xbar_(xbar), + b2bar_(new Errors(-Ab2_->gaussianErrors(*xbar))), parameters_(p) { +} + +/* ************************************************************************* */ +// x = xbar + inv(R1)*y +VectorValues SubgraphPreconditioner::x(const VectorValues& y) const { + return *xbar_ + Rc1_->backSubstitute(y); +} + +/* ************************************************************************* */ +double SubgraphPreconditioner::error(const VectorValues& y) const { + Errors e(y); + VectorValues x = this->x(y); + Errors e2 = Ab2()->gaussianErrors(x); + return 0.5 * (dot(e, e) + dot(e2,e2)); +} + +/* ************************************************************************* */ +// gradient is y + inv(R1')*A2'*(A2*inv(R1)*y-b2bar), +VectorValues SubgraphPreconditioner::gradient(const VectorValues& y) const { + VectorValues x = Rc1()->backSubstitute(y); /* inv(R1)*y */ + Errors e = (*Ab2()*x - *b2bar()); /* (A2*inv(R1)*y-b2bar) */ + VectorValues v = VectorValues::Zero(x); + Ab2()->transposeMultiplyAdd(1.0, e, v); /* A2'*(A2*inv(R1)*y-b2bar) */ + return y + Rc1()->backSubstituteTranspose(v); +} + +/* ************************************************************************* */ +// Apply operator A, A*y = [I;A2*inv(R1)]*y = [y; A2*inv(R1)*y] +Errors SubgraphPreconditioner::operator*(const VectorValues& y) const { + + Errors e(y); + VectorValues x = Rc1()->backSubstitute(y); /* x=inv(R1)*y */ + Errors e2 = *Ab2() * x; /* A2*x */ + e.splice(e.end(), e2); + return e; +} + +/* ************************************************************************* */ +// In-place version that overwrites e +void SubgraphPreconditioner::multiplyInPlace(const VectorValues& y, Errors& e) const { + + Errors::iterator ei = e.begin(); + for ( Key i = 0 ; i < y.size() ; ++i, ++ei ) { + *ei = y[i]; + } + + // Add A2 contribution + VectorValues x = Rc1()->backSubstitute(y); // x=inv(R1)*y + Ab2()->multiplyInPlace(x, ei); // use iterator version +} + +/* ************************************************************************* */ +// Apply operator A', A'*e = [I inv(R1')*A2']*e = e1 + inv(R1')*A2'*e2 +VectorValues SubgraphPreconditioner::operator^(const Errors& e) const { + + Errors::const_iterator it = e.begin(); + VectorValues y = zero(); + for ( Key i = 0 ; i < y.size() ; ++i, ++it ) + y[i] = *it ; + transposeMultiplyAdd2(1.0,it,e.end(),y); + return y; +} + +/* ************************************************************************* */ +// y += alpha*A'*e +void SubgraphPreconditioner::transposeMultiplyAdd +(double alpha, const Errors& e, VectorValues& y) const { + + Errors::const_iterator it = e.begin(); + for ( Key i = 0 ; i < y.size() ; ++i, ++it ) { + const Vector& ei = *it; + axpy(alpha, ei, y[i]); + } + transposeMultiplyAdd2(alpha, it, e.end(), y); +} + +/* ************************************************************************* */ +// y += alpha*inv(R1')*A2'*e2 +void SubgraphPreconditioner::transposeMultiplyAdd2 (double alpha, Errors::const_iterator it, Errors::const_iterator end, VectorValues& y) const { - // create e2 with what's left of e - // TODO can we avoid creating e2 by passing iterator to transposeMultiplyAdd ? - Errors e2; - while (it != end) e2.push_back(*(it++)); + // create e2 with what's left of e + // TODO can we avoid creating e2 by passing iterator to transposeMultiplyAdd ? + Errors e2; + while (it != end) e2.push_back(*(it++)); - VectorValues x = VectorValues::Zero(y); // x = 0 - Ab2_->transposeMultiplyAdd(1.0,e2,x); // x += A2'*e2 - axpy(alpha, Rc1_->backSubstituteTranspose(x), y); // y += alpha*inv(R1')*x + VectorValues x = VectorValues::Zero(y); // x = 0 + Ab2_->transposeMultiplyAdd(1.0,e2,x); // x += A2'*e2 + axpy(alpha, Rc1_->backSubstituteTranspose(x), y); // y += alpha*inv(R1')*x +} + +/* ************************************************************************* */ +void SubgraphPreconditioner::print(const std::string& s) const { + cout << s << endl; + Ab2_->print(); +} + +/*****************************************************************************/ +void SubgraphPreconditioner::solve(const Vector& y, Vector &x) const +{ + /* copy first */ + std::copy(y.data(), y.data() + y.rows(), x.data()); + + /* in place back substitute */ + BOOST_REVERSE_FOREACH(const boost::shared_ptr & cg, *Rc1_) { + /* collect a subvector of x that consists of the parents of cg (S) */ + const Vector xParent = getSubvector(x, keyInfo_, FastVector(cg->beginParents(), cg->endParents())); + const Vector rhsFrontal = getSubvector(x, keyInfo_, FastVector(cg->beginFrontals(), cg->endFrontals())); + + /* compute the solution for the current pivot */ + const Vector solFrontal = cg->get_R().triangularView().solve(rhsFrontal - cg->get_S() * xParent); + + /* assign subvector of sol to the frontal variables */ + setSubvector(solFrontal, keyInfo_, FastVector(cg->beginFrontals(), cg->endFrontals()), x); + } +} + +/*****************************************************************************/ +void SubgraphPreconditioner::transposeSolve(const Vector& y, Vector& x) const +{ + /* copy first */ + std::copy(y.data(), y.data() + y.rows(), x.data()); + + /* in place back substitute */ + BOOST_FOREACH(const boost::shared_ptr & cg, *Rc1_) { + const Vector rhsFrontal = getSubvector(x, keyInfo_, FastVector(cg->beginFrontals(), cg->endFrontals())); +// const Vector solFrontal = cg->get_R().triangularView().transpose().solve(rhsFrontal); + const Vector solFrontal = cg->get_R().transpose().triangularView().solve(rhsFrontal); + + // Check for indeterminant solution + if ( solFrontal.hasNaN()) throw IndeterminantLinearSystemException(cg->keys().front()); + + /* assign subvector of sol to the frontal variables */ + setSubvector(solFrontal, keyInfo_, FastVector(cg->beginFrontals(), cg->endFrontals()), x); + + /* substract from parent variables */ + for (GaussianConditional::const_iterator it = cg->beginParents(); it != cg->endParents(); it++) { + KeyInfo::const_iterator it2 = keyInfo_.find(*it); + Eigen::Map rhsParent(x.data()+it2->second.colstart(), it2->second.dim(), 1); + rhsParent -= Matrix(cg->getA(it)).transpose() * solFrontal; + } + } +} + +/*****************************************************************************/ +void SubgraphPreconditioner::build(const GaussianFactorGraph &gfg, const KeyInfo &keyInfo, const std::map &lambda) +{ + /* identify the subgraph structure */ + const SubgraphBuilder builder(parameters_.builderParams_); + Subgraph::shared_ptr subgraph = builder(gfg); + + keyInfo_ = keyInfo; + + /* build factor subgraph */ + GaussianFactorGraph::shared_ptr gfg_subgraph = buildFactorSubgraph(gfg, *subgraph, true); + + /* factorize and cache BayesNet */ + Rc1_ = gfg_subgraph->eliminateSequential(); +} + +/*****************************************************************************/ +Vector getSubvector(const Vector &src, const KeyInfo &keyInfo, const FastVector &keys) { + + /* a cache of starting index and dim */ + typedef vector > Cache; + Cache cache; + + /* figure out dimension by traversing the keys */ + size_t d = 0; + BOOST_FOREACH ( const Key &key, keys ) { + const KeyInfoEntry &entry = keyInfo.find(key)->second; + cache.push_back(make_pair(entry.colstart(), entry.dim())); + d += entry.dim(); } - /* ************************************************************************* */ - void SubgraphPreconditioner::print(const std::string& s) const { - cout << s << endl; - Ab2_->print(); + /* use the cache to fill the result */ + Vector result = Vector::Zero(d, 1); + size_t idx = 0; + BOOST_FOREACH ( const Cache::value_type &p, cache ) { + result.segment(idx, p.second) = src.segment(p.first, p.second) ; + idx += p.second; } + + return result; +} + +/*****************************************************************************/ +void setSubvector(const Vector &src, const KeyInfo &keyInfo, const FastVector &keys, Vector &dst) { + /* use the cache */ + size_t idx = 0; + BOOST_FOREACH ( const Key &key, keys ) { + const KeyInfoEntry &entry = keyInfo.find(key)->second; + dst.segment(entry.colstart(), entry.dim()) = src.segment(idx, entry.dim()) ; + idx += entry.dim(); + } +} + +/*****************************************************************************/ +boost::shared_ptr +buildFactorSubgraph(const GaussianFactorGraph &gfg, const Subgraph &subgraph, const bool clone) { + + GaussianFactorGraph::shared_ptr result(new GaussianFactorGraph()); + result->reserve(subgraph.size()); + BOOST_FOREACH ( const SubgraphEdge &e, subgraph ) { + const size_t idx = e.index(); + if ( clone ) result->push_back(gfg[idx]->clone()); + else result->push_back(gfg[idx]); + } + return result; +} + } // nsamespace gtsam diff --git a/gtsam/linear/SubgraphPreconditioner.h b/gtsam/linear/SubgraphPreconditioner.h index d9d7524b6..e237b5853 100644 --- a/gtsam/linear/SubgraphPreconditioner.h +++ b/gtsam/linear/SubgraphPreconditioner.h @@ -12,15 +12,16 @@ /** * @file SubgraphPreconditioner.h * @date Dec 31, 2009 - * @author Frank Dellaert + * @author Frank Dellaert, Yong-Dian Jian */ #pragma once #include -#include #include - +#include +#include +#include #include namespace gtsam { @@ -30,6 +31,146 @@ namespace gtsam { class GaussianFactorGraph; class VectorValues; + struct SubgraphEdge { + size_t index_; /* edge id */ + double weight_; /* edge weight */ + SubgraphEdge() : index_(0), weight_(1.0) {} + SubgraphEdge(const SubgraphEdge &e) : index_(e.index()), weight_(e.weight()) {} + SubgraphEdge(const size_t index, const double weight = 1.0): index_(index), weight_(weight) {} + inline size_t index() const { return index_; } + inline double weight() const { return weight_; } + inline bool isUnitWeight() const { return (weight_ == 1.0); } + friend std::ostream &operator<<(std::ostream &os, const SubgraphEdge &edge); + private: + friend class boost::serialization::access; + template + void serialize(Archive & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_NVP(index_); + ar & BOOST_SERIALIZATION_NVP(weight_); + } + }; + + /**************************************************************************/ + class Subgraph { + public: + typedef boost::shared_ptr shared_ptr; + typedef std::vector vector_shared_ptr; + typedef std::vector Edges; + typedef std::vector EdgeIndices; + typedef Edges::iterator iterator; + typedef Edges::const_iterator const_iterator; + + protected: + Edges edges_; /* index to the factors */ + + public: + Subgraph() {} + Subgraph(const Subgraph &subgraph) : edges_(subgraph.edges()) {} + Subgraph(const Edges &edges) : edges_(edges) {} + Subgraph(const std::vector &indices) ; + + inline const Edges& edges() const { return edges_; } + inline const size_t size() const { return edges_.size(); } + EdgeIndices edgeIndices() const; + + iterator begin() { return edges_.begin(); } + const_iterator begin() const { return edges_.begin(); } + iterator end() { return edges_.end(); } + const_iterator end() const { return edges_.end(); } + + void save(const std::string &fn) const; + static shared_ptr load(const std::string &fn); + friend std::ostream &operator<<(std::ostream &os, const Subgraph &subgraph); + + private: + friend class boost::serialization::access; + template + void serialize(Archive & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_NVP(edges_); + } + }; + + /****************************************************************************/ + struct SubgraphBuilderParameters { + public: + typedef boost::shared_ptr shared_ptr; + + enum Skeleton { + /* augmented tree */ + NATURALCHAIN = 0, /* natural ordering of the graph */ + BFS, /* breadth-first search tree */ + KRUSKAL, /* maximum weighted spanning tree */ + } skeleton_ ; + + enum SkeletonWeight { /* how to weigh the graph edges */ + EQUAL = 0, /* every block edge has equal weight */ + RHS_2NORM, /* use the 2-norm of the rhs */ + LHS_FNORM, /* use the frobenius norm of the lhs */ + RANDOM, /* bounded random edge weight */ + } skeletonWeight_ ; + + enum AugmentationWeight { /* how to weigh the graph edges */ + SKELETON = 0, /* use the same weights in building the skeleton */ +// STRETCH, /* stretch in the laplacian sense */ +// GENERALIZED_STRETCH /* the generalized stretch defined in jian2013iros */ + } augmentationWeight_ ; + + double complexity_; + + SubgraphBuilderParameters() + : skeleton_(KRUSKAL), skeletonWeight_(RANDOM), augmentationWeight_(SKELETON), complexity_(1.0) {} + virtual ~SubgraphBuilderParameters() {} + + /* for serialization */ + void print() const ; + virtual void print(std::ostream &os) const ; + friend std::ostream& operator<<(std::ostream &os, const PreconditionerParameters &p); + + static Skeleton skeletonTranslator(const std::string &s); + static std::string skeletonTranslator(Skeleton w); + static SkeletonWeight skeletonWeightTranslator(const std::string &s); + static std::string skeletonWeightTranslator(SkeletonWeight w); + static AugmentationWeight augmentationWeightTranslator(const std::string &s); + static std::string augmentationWeightTranslator(AugmentationWeight w); + }; + + /*****************************************************************************/ + class SubgraphBuilder { + + public: + typedef SubgraphBuilder Base; + typedef boost::shared_ptr shared_ptr; + typedef std::vector Weights; + + SubgraphBuilder(const SubgraphBuilderParameters &p = SubgraphBuilderParameters()) + : parameters_(p) {} + virtual ~SubgraphBuilder() {} + virtual boost::shared_ptr operator() (const GaussianFactorGraph &jfg) const ; + + private: + std::vector buildTree(const GaussianFactorGraph &gfg, const FastMap &ordering, const std::vector &weights) const ; + std::vector unary(const GaussianFactorGraph &gfg) const ; + std::vector natural_chain(const GaussianFactorGraph &gfg) const ; + std::vector bfs(const GaussianFactorGraph &gfg) const ; + std::vector kruskal(const GaussianFactorGraph &gfg, const FastMap &ordering, const std::vector &w) const ; + std::vector sample(const std::vector &weights, const size_t t) const ; + Weights weights(const GaussianFactorGraph &gfg) const; + SubgraphBuilderParameters parameters_; + + private: + SubgraphBuilder() {} + }; + + /*******************************************************************************************/ + struct GTSAM_EXPORT SubgraphPreconditionerParameters : public PreconditionerParameters { + typedef PreconditionerParameters Base; + typedef boost::shared_ptr shared_ptr; + SubgraphPreconditionerParameters(const SubgraphBuilderParameters &p = SubgraphBuilderParameters()) + : Base(), builderParams_(p) {} + virtual ~SubgraphPreconditionerParameters() {} + SubgraphBuilderParameters builderParams_; + }; + /** * Subgraph conditioner class, as explained in the RSS 2010 submission. * Starting with a graph A*x=b, we split it in two systems A1*x=b1 and A2*x=b2 @@ -37,7 +178,7 @@ namespace gtsam { * To use the class, give the Bayes Net R1*x=c1 and Graph A2*x=b2. * Then solve for yhat using CG, and solve for xhat = system.x(yhat). */ - class GTSAM_EXPORT SubgraphPreconditioner { + class GTSAM_EXPORT SubgraphPreconditioner : public Preconditioner { public: typedef boost::shared_ptr shared_ptr; @@ -52,9 +193,12 @@ namespace gtsam { sharedValues xbar_; ///< A1 \ b1 sharedErrors b2bar_; ///< A2*xbar - b2 + KeyInfo keyInfo_; + SubgraphPreconditionerParameters parameters_; + public: - SubgraphPreconditioner(); + SubgraphPreconditioner(const SubgraphPreconditionerParameters &p = SubgraphPreconditionerParameters()); /** * Constructor @@ -62,7 +206,10 @@ namespace gtsam { * @param Rc1: the Bayes Net R1*x=c1 * @param xbar: the solution to R1*x=c1 */ - SubgraphPreconditioner(const sharedFG& Ab2, const sharedBayesNet& Rc1, const sharedValues& xbar); + SubgraphPreconditioner(const sharedFG& Ab2, const sharedBayesNet& Rc1, const sharedValues& xbar, + const SubgraphPreconditionerParameters &p = SubgraphPreconditionerParameters()); + + virtual ~SubgraphPreconditioner() {} /** print the object */ void print(const std::string& s = "SubgraphPreconditioner") const; @@ -119,6 +266,54 @@ namespace gtsam { * y += alpha*A'*[e1;e2] = [alpha*e1; alpha*inv(R1')*A2'*e2] */ void transposeMultiplyAdd(double alpha, const Errors& e, VectorValues& y) const; + + /*****************************************************************************/ + /* implement virtual functions of Preconditioner */ + + /* Computation Interfaces for Vector */ + virtual void solve(const Vector& y, Vector &x) const; + virtual void transposeSolve(const Vector& y, Vector& x) const ; + + virtual void build( + const GaussianFactorGraph &gfg, + const KeyInfo &info, + const std::map &lambda + ) ; + /*****************************************************************************/ }; + /* get subvectors */ + Vector getSubvector(const Vector &src, const KeyInfo &keyInfo, const FastVector &keys); + + /* set subvectors */ + void setSubvector(const Vector &src, const KeyInfo &keyInfo, const FastVector &keys, Vector &dst); + + + /* build a factor subgraph, which is defined as a set of weighted edges (factors) */ + boost::shared_ptr + buildFactorSubgraph(const GaussianFactorGraph &gfg, const Subgraph &subgraph, const bool clone); + + + /* sort the container and return permutation index with default comparator */ + template + std::vector sort_idx(const Container &src) + { + typedef typename Container::value_type T; + const size_t n = src.size() ; + std::vector > tmp; + tmp.reserve(n); + for ( size_t i = 0 ; i < n ; i++ ) + tmp.push_back(std::make_pair(i, src[i])); + + /* sort */ + std::stable_sort(tmp.begin(), tmp.end()) ; + + /* copy back */ + std::vector idx; idx.reserve(n); + for ( size_t i = 0 ; i < n ; i++ ) { + idx.push_back(tmp[i].first) ; + } + return idx; + } + } // namespace gtsam diff --git a/gtsam/linear/SubgraphSolver.cpp b/gtsam/linear/SubgraphSolver.cpp index 034fdeeaf..6f10d04ad 100644 --- a/gtsam/linear/SubgraphSolver.cpp +++ b/gtsam/linear/SubgraphSolver.cpp @@ -14,6 +14,7 @@ #include #include #include +#include #include #include #include @@ -143,4 +144,11 @@ SubgraphSolver::splitGraph(const GaussianFactorGraph &jfg) { return boost::tie(At, Ac); } +/****************************************************************************/ +VectorValues SubgraphSolver::optimize ( + const GaussianFactorGraph &gfg, + const KeyInfo &keyInfo, + const std::map &lambda, + const VectorValues &initial +) { return VectorValues(); } } // \namespace gtsam diff --git a/gtsam/linear/SubgraphSolver.h b/gtsam/linear/SubgraphSolver.h index a66182281..e4d3f50f3 100644 --- a/gtsam/linear/SubgraphSolver.h +++ b/gtsam/linear/SubgraphSolver.h @@ -13,7 +13,6 @@ #include -#include #include #include #include @@ -23,6 +22,7 @@ namespace gtsam { // Forward declarations class GaussianFactorGraph; class GaussianBayesNet; + class SubgraphPreconditioner; class GTSAM_EXPORT SubgraphSolverParameters : public ConjugateGradientParameters { public: @@ -61,7 +61,7 @@ public: protected: Parameters parameters_; Ordering ordering_; - SubgraphPreconditioner::shared_ptr pc_; ///< preconditioner object + boost::shared_ptr pc_; ///< preconditioner object public: /* Given a gaussian factor graph, split it into a spanning tree (A1) + others (A2) for SPCG */ @@ -87,7 +87,7 @@ public: const KeyInfo &keyInfo, const std::map &lambda, const VectorValues &initial - ) { return VectorValues(); } + ) ; protected: diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.h b/gtsam/nonlinear/LevenbergMarquardtOptimizer.h index f365fc524..5e8e3923c 100644 --- a/gtsam/nonlinear/LevenbergMarquardtOptimizer.h +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.h @@ -19,6 +19,7 @@ #pragma once #include +#include #include class NonlinearOptimizerMoreOptimizationTest; diff --git a/gtsam/nonlinear/NonlinearOptimizer.cpp b/gtsam/nonlinear/NonlinearOptimizer.cpp index 2a6eb5887..88bece4e1 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.cpp +++ b/gtsam/nonlinear/NonlinearOptimizer.cpp @@ -103,6 +103,7 @@ VectorValues NonlinearOptimizer::solve(const GaussianFactorGraph &gfg, } else if (params.isSequential()) { delta = gfg.eliminateSequential(*params.ordering, params.getEliminationFunction())->optimize(); } else if (params.isIterative()) { + if (!params.iterativeParams) throw std::runtime_error("NonlinearOptimizer::solve: cg parameter has to be assigned ..."); diff --git a/tests/testPCGSolver.cpp b/tests/testPCGSolver.cpp index ef74ad1ef..38a40521a 100644 --- a/tests/testPCGSolver.cpp +++ b/tests/testPCGSolver.cpp @@ -27,6 +27,7 @@ #include #include #include +#include #include #include #include @@ -88,7 +89,6 @@ TEST( PCGSolver, llt ) { } - /* ************************************************************************* */ TEST( PCGSolver, dummy ) { @@ -129,6 +129,26 @@ TEST( PCGSolver, blockjacobi ) DOUBLES_EQUAL(0,fg.error(actualPCG),tol); } +/* ************************************************************************* */ +TEST( PCGSolver, subgraph ) +{ + LevenbergMarquardtParams paramsPCG; + paramsPCG.linearSolverType = LevenbergMarquardtParams::Iterative; + PCGSolverParameters::shared_ptr pcg = boost::make_shared(); + pcg->preconditioner_ = boost::make_shared(); + paramsPCG.iterativeParams = pcg; + + NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph(); + + Point2 x0(10,10); + Values c0; + c0.insert(X(1), x0); + + Values actualPCG = LevenbergMarquardtOptimizer(fg, c0, paramsPCG).optimize(); + + DOUBLES_EQUAL(0,fg.error(actualPCG),tol); +} + /* ************************************************************************* */ int main() { TestResult tr;