fix iterative parameters
							parent
							
								
									0f079b70f2
								
							
						
					
					
						commit
						0e4f2b414c
					
				|  | @ -17,125 +17,29 @@ | |||
| 
 | ||||
| #pragma once | ||||
| 
 | ||||
| #include <string> | ||||
| #include <iostream> | ||||
| #include <boost/shared_ptr.hpp> | ||||
| #include <iostream> | ||||
| #include <string> | ||||
| 
 | ||||
| namespace gtsam { | ||||
| 
 | ||||
| /* parameters for combinatorial preconditioner */ | ||||
| struct CombinatorialParameters { | ||||
| 
 | ||||
|   enum Group {    /* group of variables */ | ||||
|     BLOCK = 0,    /* utilize the inherent block structure */ | ||||
|     SCALAR        /* break the block variables into scalars */ | ||||
|   } group_; | ||||
| 
 | ||||
|   enum Type {     /* subgraph specification */ | ||||
|     JACOBI = 0,   /* block diagonal */ | ||||
|     SPTREE,       /* spanning tree */ | ||||
|     GSP,          /* gspn, n = 0: all factors, n = 1: all unary factors, n = 2: spanning tree, etc .. */ | ||||
|     KOUTIS,       /* implement Koutis2011Focs */ | ||||
|   } type_; | ||||
| 
 | ||||
|   enum Weight {   /* how to weigh the graph edges */ | ||||
|     EQUAL = 0,    /* 0: every block edge has equal weight */ | ||||
|     RHS_2NORM,    /* 1: use the 2-norm of the rhs */ | ||||
|     LHS_FNORM,    /* 2: use the frobenius norm of the lhs */ | ||||
|     RAW,          /* 3: use raw scalar weight for the scalar case */ | ||||
|   } weight_ ; | ||||
| 
 | ||||
|   int complexity_;/* a parameter for the subgraph complexity */ | ||||
|   int hessian_;   /* 0: do nothing, 1: use whole block hessian */ | ||||
| 
 | ||||
|   CombinatorialParameters(): group_(BLOCK), type_(JACOBI), weight_(EQUAL), complexity_(0), hessian_(1) {} | ||||
|   CombinatorialParameters(Group group, Type type, Weight weight, int complexity, int hessian) | ||||
|     : group_(group), type_(type), weight_(weight), complexity_(complexity), hessian_(hessian) {} | ||||
| 
 | ||||
|   inline Group group() const { return group_; } | ||||
|   inline Type type() const { return type_; } | ||||
|   inline Weight weight() const { return weight_; } | ||||
|   inline int complexity() const { return complexity_; } | ||||
|   inline int hessian() const { return hessian_; } | ||||
| 
 | ||||
|   inline bool isScalar() const { return group_ == SCALAR; } | ||||
|   inline bool isBlock() const { return group_ == BLOCK; } | ||||
|   inline bool isStatic() const { return (type_ == JACOBI || weight_ == EQUAL) ? true : false;} | ||||
|   inline bool isDynamic() const { return !isStatic(); } | ||||
| 
 | ||||
|   inline void print() const { | ||||
| 
 | ||||
|     const std::string groupStr[2] = {"block", "scalar"}; | ||||
|     const std::string typeStr[4] = {"jacobi", "sptree", "gsp", "koutis"}; | ||||
|     const std::string weightStr[4] = {"equal", "rhs-2norm", "lhs-form", "raw"}; | ||||
| 
 | ||||
|     std::cout << "CombinatorialParameters: " | ||||
|     << "group = " << groupStr[group_] | ||||
|     << ", type = " << typeStr[type_] | ||||
|     << ", weight = " << weightStr[weight_] | ||||
|     << ", complexity = " << complexity_ | ||||
|     << ", hessian = " << hessian_  << std::endl; | ||||
|   } | ||||
| }; | ||||
| 
 | ||||
| /* parameters for the preconditioner */ | ||||
| struct PreconditionerParameters { | ||||
| 
 | ||||
|   typedef boost::shared_ptr<PreconditionerParameters> shared_ptr; | ||||
| 
 | ||||
|   enum Kernel { /* Preconditioner Kernel */ | ||||
|     GTSAM = 0,  /* Jacobian Factor Graph of GTSAM */ | ||||
|     CHOLMOD     /* Cholmod Sparse */ | ||||
|   } kernel_ ; | ||||
| 
 | ||||
|   enum Type {         /* preconditioner type */ | ||||
|     Combinatorial = 0 /* combinatorial preconditioner */ | ||||
|   } type_; | ||||
| 
 | ||||
|   CombinatorialParameters combinatorial_; | ||||
| 
 | ||||
|   enum Verbosity { SILENT = 0, COMPLEXITY = 1, ERROR = 2} verbosity_ ;  /* Verbosity */ | ||||
| 
 | ||||
|   PreconditionerParameters(): kernel_(GTSAM), type_(Combinatorial), verbosity_(SILENT) {} | ||||
|   PreconditionerParameters(Kernel kernel, const CombinatorialParameters &combinatorial, Verbosity verbosity) | ||||
|     : kernel_(kernel), type_(Combinatorial), combinatorial_(combinatorial), verbosity_(verbosity) {} | ||||
| 
 | ||||
|   /* general interface */ | ||||
|   inline Kernel kernel() const { return kernel_; } | ||||
|   inline Type type() const { return type_; } | ||||
|   inline const CombinatorialParameters & combinatorial() const { return combinatorial_; } | ||||
|   inline Verbosity verbosity() const { return verbosity_; } | ||||
| 
 | ||||
| 
 | ||||
|   void print() const { | ||||
|     const std::string kernelStr[2] = {"gtsam", "cholmod"}; | ||||
|     const std::string typeStr[1] = {"combinatorial"}; | ||||
| 
 | ||||
|     std::cout << "PreconditionerParameters: " | ||||
|               << "kernel = " << kernelStr[kernel_] | ||||
|               << ", type = " << typeStr[type_] | ||||
|               << ", verbosity = " << verbosity_ | ||||
|               << std::endl; | ||||
|     combinatorial_.print(); | ||||
|   } | ||||
| }; | ||||
| 
 | ||||
| /* parameters for the conjugate gradient method */ | ||||
| struct ConjugateGradientParameters { | ||||
| 
 | ||||
|   size_t minIterations_;  /* minimum number of cg iterations */ | ||||
|   size_t maxIterations_;  /* maximum number of cg iterations */ | ||||
|   size_t reset_;          /* number of iterations before reset */ | ||||
|   double epsilon_rel_;    /* threshold for relative error decrease */ | ||||
|   double epsilon_abs_;    /* threshold for absolute error decrease */ | ||||
|   size_t minIterations_;  ///< minimum number of cg iterations
 | ||||
|   size_t maxIterations_;  ///< maximum number of cg iterations
 | ||||
|   size_t reset_;          ///< number of iterations before reset
 | ||||
|   double epsilon_rel_;    ///< threshold for relative error decrease
 | ||||
|   double epsilon_abs_;    ///< threshold for absolute error decrease
 | ||||
| 
 | ||||
|   enum BLASKernel {   /* Matrix Operation Kernel */ | ||||
|     GTSAM = 0,        /* Jacobian Factor Graph of GTSAM */ | ||||
|     SBM,              /* Sparse Block Matrix */ | ||||
|     SBM_MT            /* Sparse Block Matrix Multithreaded */ | ||||
|   /* Matrix Operation Kernel */ | ||||
|   enum BLASKernel { | ||||
|     GTSAM = 0,        ///< Jacobian Factor Graph of GTSAM
 | ||||
|     SBM,              ///< Sparse Block Matrix
 | ||||
|     SBM_MT            ///< Sparse Block Matrix Multithreaded
 | ||||
|   } blas_kernel_; | ||||
| 
 | ||||
|   size_t degree_;         /* the maximum degree of the vertices to be eliminated before doing cg */ | ||||
|   size_t degree_;     ///< the maximum degree of the vertices to be eliminated before doing cg
 | ||||
| 
 | ||||
|   enum Verbosity { SILENT = 0, COMPLEXITY = 1, ERROR = 2} verbosity_ ;  /* Verbosity */ | ||||
| 
 | ||||
|  | @ -160,7 +64,7 @@ struct ConjugateGradientParameters { | |||
|   inline Verbosity verbosity() const { return verbosity_; } | ||||
| 
 | ||||
|   void print() const { | ||||
|     const std::string blasStr[4] = {"gtsam", "sbm", "sbm-mt"}; | ||||
|     const std::string blasStr[3] = {"gtsam", "sbm", "sbm-mt"}; | ||||
|     std::cout << "ConjugateGradientParameters: " | ||||
|               << "blas = " << blasStr[blas_kernel_] | ||||
|               << ", minIter = " << minIterations_ | ||||
|  | @ -175,42 +79,34 @@ struct ConjugateGradientParameters { | |||
| }; | ||||
| 
 | ||||
| /* parameters for iterative linear solvers */ | ||||
| struct IterativeOptimizationParameters { | ||||
| class IterativeOptimizationParameters { | ||||
| 
 | ||||
| public: | ||||
| 
 | ||||
|   typedef boost::shared_ptr<IterativeOptimizationParameters> shared_ptr; | ||||
| 
 | ||||
|   PreconditionerParameters preconditioner_; | ||||
|   ConjugateGradientParameters cg_; | ||||
|   ConjugateGradientParameters cg_;                                      ///< Parameters for the Conjugate Gradient Method
 | ||||
|   enum Kernel { PCG = 0, LSPCG = 1 } kernel_ ;                          ///< Iterative Method Kernel
 | ||||
|   enum Verbosity { SILENT = 0, COMPLEXITY = 1, ERROR = 2} verbosity_ ;  ///< Verbosity
 | ||||
| 
 | ||||
| public: | ||||
| 
 | ||||
|   IterativeOptimizationParameters() | ||||
|   : preconditioner_(), cg_(), kernel_(LSPCG), verbosity_(SILENT) {} | ||||
|   IterativeOptimizationParameters() : cg_(), kernel_(LSPCG), verbosity_(SILENT) {} | ||||
| 
 | ||||
|   IterativeOptimizationParameters(const IterativeOptimizationParameters &p) : | ||||
|     preconditioner_(p.preconditioner_), cg_(p.cg_), kernel_(p.kernel_), verbosity_(p.verbosity_) {} | ||||
|   IterativeOptimizationParameters(const IterativeOptimizationParameters &p) | ||||
|     : cg_(p.cg_), kernel_(p.kernel_), verbosity_(p.verbosity_) {} | ||||
| 
 | ||||
|   IterativeOptimizationParameters( | ||||
|     const PreconditionerParameters &p, const ConjugateGradientParameters &c, Kernel kernel = PCG, Verbosity verbosity = SILENT) | ||||
|     : preconditioner_(p), cg_(c), kernel_(kernel), verbosity_(verbosity) {} | ||||
|   IterativeOptimizationParameters(const ConjugateGradientParameters &c, Kernel kernel = LSPCG, Verbosity verbosity = SILENT) | ||||
|     : cg_(c), kernel_(kernel), verbosity_(verbosity) {} | ||||
| 
 | ||||
|   virtual ~IterativeOptimizationParameters() {} | ||||
| 
 | ||||
|   /* general interface */ | ||||
|   inline Kernel kernel() const { return kernel_; } | ||||
|   inline Verbosity verbosity() const { return verbosity_; } | ||||
| 
 | ||||
|   /* utility */ | ||||
|   inline const PreconditionerParameters& preconditioner() const { return preconditioner_; } | ||||
|   inline const ConjugateGradientParameters& cg() const { return cg_; } | ||||
| 
 | ||||
|   /* interface to preconditioner parameters */ | ||||
|   inline PreconditionerParameters::Kernel preconditioner_kernel() const { return preconditioner_.kernel(); } | ||||
|   inline PreconditionerParameters::Type type() const { return preconditioner_.type(); } | ||||
| 
 | ||||
|   /* interface to cg parameters */ | ||||
|   inline const ConjugateGradientParameters& cg() const { return cg_; } | ||||
|   inline size_t minIterations() const { return cg_.minIterations(); } | ||||
|   inline size_t maxIterations() const { return cg_.maxIterations(); } | ||||
|   inline size_t reset() const { return cg_.reset(); } | ||||
|  | @ -220,15 +116,13 @@ public: | |||
|   inline size_t degree() const { return cg_.degree(); } | ||||
|   inline ConjugateGradientParameters::BLASKernel blas_kernel() const { return cg_.blas_kernel(); } | ||||
| 
 | ||||
|   void print(const std::string &s="") const { | ||||
|   virtual void print(const std::string &s="") const { | ||||
|     const std::string kernelStr[2] = {"pcg", "lspcg"}; | ||||
|     std::cout << s << std::endl | ||||
|               << "IterativeOptimizationParameters: " | ||||
|               << "kernel = " << kernelStr[kernel_] | ||||
|               << ", verbosity = " << verbosity_ << std::endl; | ||||
|     cg_.print(); | ||||
|     preconditioner_.print(); | ||||
| 
 | ||||
|   } | ||||
| 
 | ||||
| }; | ||||
|  |  | |||
|  | @ -20,23 +20,11 @@ class IterativeSolver { | |||
| 
 | ||||
| public: | ||||
| 
 | ||||
| 	typedef IterativeOptimizationParameters Parameters; | ||||
| 
 | ||||
| protected: | ||||
| 
 | ||||
| 	Parameters parameters_ ; | ||||
| 
 | ||||
| public: | ||||
| 
 | ||||
|   IterativeSolver(): parameters_() {} | ||||
| 	IterativeSolver(const IterativeSolver &solver) : parameters_(solver.parameters_) {} | ||||
| 	IterativeSolver(const Parameters ¶meters) :	parameters_(parameters) {} | ||||
| 
 | ||||
|   IterativeSolver(){} | ||||
| 	virtual ~IterativeSolver() {} | ||||
| 
 | ||||
| 	virtual VectorValues::shared_ptr optimize () = 0; | ||||
| 
 | ||||
| 	inline const Parameters& parameters() const { return parameters_ ; } | ||||
| 	virtual const IterativeOptimizationParameters& _params() const = 0; | ||||
| }; | ||||
| 
 | ||||
| } | ||||
|  |  | |||
|  | @ -43,7 +43,7 @@ std::vector<size_t> extractColSpec_(const FactorGraph<GaussianFactor>& gfg, cons | |||
| } | ||||
| 
 | ||||
| SimpleSPCGSolver::SimpleSPCGSolver(const GaussianFactorGraph &gfg, const Parameters ¶meters) | ||||
|   : Base(parameters) | ||||
|   : Base(), parameters_(parameters) | ||||
| { | ||||
|   std::vector<size_t> colSpec = extractColSpec_(gfg, VariableIndex(gfg)); | ||||
| 
 | ||||
|  |  | |||
|  | @ -39,6 +39,7 @@ class SimpleSPCGSolver : public IterativeSolver { | |||
| public: | ||||
| 
 | ||||
|   typedef IterativeSolver Base; | ||||
|   typedef IterativeOptimizationParameters Parameters; | ||||
|   typedef boost::shared_ptr<IterativeSolver> shared_ptr; | ||||
| 
 | ||||
| protected: | ||||
|  | @ -52,12 +53,14 @@ protected: | |||
|   VectorValues::shared_ptr by_;                 ///< \f$ [\bar{b_y} ; 0 ] \f$
 | ||||
|   VectorValues::shared_ptr tmpY_;               ///< buffer for the column vectors
 | ||||
|   VectorValues::shared_ptr tmpB_;               ///< buffer for the row vectors
 | ||||
|   Parameters parameters_;                       ///< Parameters for iterative method
 | ||||
| 
 | ||||
| public: | ||||
| 
 | ||||
|   SimpleSPCGSolver(const GaussianFactorGraph &gfg, const Parameters ¶meters); | ||||
|   virtual ~SimpleSPCGSolver() {} | ||||
|   virtual VectorValues::shared_ptr optimize () {return optimize(*y0_);} | ||||
|   virtual const IterativeOptimizationParameters& _params() const { return parameters_; } | ||||
| 
 | ||||
| protected: | ||||
| 
 | ||||
|  | @ -85,7 +88,6 @@ protected: | |||
|    * Note: This function has to be refined for your graph/application */ | ||||
|   boost::tuple<GaussianFactorGraph::shared_ptr, FactorGraph<JacobianFactor>::shared_ptr> | ||||
|   splitGraph(const GaussianFactorGraph &gfg); | ||||
| 
 | ||||
| }; | ||||
| 
 | ||||
| } | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue