make ConjugateGradientParameters a public struct
parent
a94169a973
commit
df1c008955
|
@ -28,11 +28,11 @@ namespace gtsam {
|
|||
void ConjugateGradientParameters::print(ostream &os) const {
|
||||
Base::print(os);
|
||||
cout << "ConjugateGradientParameters" << endl
|
||||
<< "minIter: " << minIterations_ << endl
|
||||
<< "maxIter: " << maxIterations_ << endl
|
||||
<< "resetIter: " << reset_ << endl
|
||||
<< "eps_rel: " << epsilon_rel_ << endl
|
||||
<< "eps_abs: " << epsilon_abs_ << endl;
|
||||
<< "minIter: " << minIterations << endl
|
||||
<< "maxIter: " << maxIterations << endl
|
||||
<< "resetIter: " << reset << endl
|
||||
<< "eps_rel: " << epsilon_rel << endl
|
||||
<< "eps_abs: " << epsilon_abs << endl;
|
||||
}
|
||||
|
||||
/*****************************************************************************/
|
||||
|
|
|
@ -26,38 +26,50 @@ namespace gtsam {
|
|||
/**
|
||||
* Parameters for the Conjugate Gradient method
|
||||
*/
|
||||
class GTSAM_EXPORT ConjugateGradientParameters : public IterativeOptimizationParameters {
|
||||
|
||||
public:
|
||||
struct GTSAM_EXPORT ConjugateGradientParameters
|
||||
: public IterativeOptimizationParameters {
|
||||
typedef IterativeOptimizationParameters Base;
|
||||
typedef std::shared_ptr<ConjugateGradientParameters> shared_ptr;
|
||||
|
||||
protected:
|
||||
size_t minIterations_; ///< minimum number of cg iterations
|
||||
size_t maxIterations_; ///< maximum number of cg iterations
|
||||
size_t reset_; ///< number of iterations before reset
|
||||
double epsilon_rel_; ///< threshold for relative error decrease
|
||||
double epsilon_abs_; ///< threshold for absolute error decrease
|
||||
size_t minIterations; ///< minimum number of cg iterations
|
||||
size_t maxIterations; ///< maximum number of cg iterations
|
||||
size_t reset; ///< number of iterations before reset
|
||||
double epsilon_rel; ///< threshold for relative error decrease
|
||||
double epsilon_abs; ///< threshold for absolute error decrease
|
||||
|
||||
public:
|
||||
/* Matrix Operation Kernel */
|
||||
enum BLASKernel {
|
||||
GTSAM = 0, ///< Jacobian Factor Graph of GTSAM
|
||||
} blas_kernel_ ;
|
||||
} blas_kernel;
|
||||
|
||||
ConjugateGradientParameters()
|
||||
: minIterations_(1), maxIterations_(500), reset_(501), epsilon_rel_(1e-3),
|
||||
epsilon_abs_(1e-3), blas_kernel_(GTSAM) {}
|
||||
: minIterations(1),
|
||||
maxIterations(500),
|
||||
reset(501),
|
||||
epsilon_rel(1e-3),
|
||||
epsilon_abs(1e-3),
|
||||
blas_kernel(GTSAM) {}
|
||||
|
||||
ConjugateGradientParameters(size_t minIterations, size_t maxIterations, size_t reset,
|
||||
double epsilon_rel, double epsilon_abs, BLASKernel blas)
|
||||
: minIterations_(minIterations), maxIterations_(maxIterations), reset_(reset),
|
||||
epsilon_rel_(epsilon_rel), epsilon_abs_(epsilon_abs), blas_kernel_(blas) {}
|
||||
ConjugateGradientParameters(size_t minIterations, size_t maxIterations,
|
||||
size_t reset, double epsilon_rel,
|
||||
double epsilon_abs, BLASKernel blas)
|
||||
: minIterations(minIterations),
|
||||
maxIterations(maxIterations),
|
||||
reset(reset),
|
||||
epsilon_rel(epsilon_rel),
|
||||
epsilon_abs(epsilon_abs),
|
||||
blas_kernel(blas) {}
|
||||
|
||||
ConjugateGradientParameters(const ConjugateGradientParameters &p)
|
||||
: Base(p), minIterations_(p.minIterations_), maxIterations_(p.maxIterations_), reset_(p.reset_),
|
||||
epsilon_rel_(p.epsilon_rel_), epsilon_abs_(p.epsilon_abs_), blas_kernel_(GTSAM) {}
|
||||
: Base(p),
|
||||
minIterations(p.minIterations),
|
||||
maxIterations(p.maxIterations),
|
||||
reset(p.reset),
|
||||
epsilon_rel(p.epsilon_rel),
|
||||
epsilon_abs(p.epsilon_abs),
|
||||
blas_kernel(GTSAM) {}
|
||||
|
||||
#ifdef GTSAM_ALLOW_DEPRECATED_SINCE_V43
|
||||
/* general interface */
|
||||
inline size_t minIterations() const { return minIterations_; }
|
||||
inline size_t maxIterations() const { return maxIterations_; }
|
||||
|
@ -79,6 +91,7 @@ class GTSAM_EXPORT ConjugateGradientParameters : public IterativeOptimizationPar
|
|||
inline void setEpsilon(double value) { epsilon_rel_ = value; }
|
||||
inline void setEpsilon_rel(double value) { epsilon_rel_ = value; }
|
||||
inline void setEpsilon_abs(double value) { epsilon_abs_ = value; }
|
||||
#endif
|
||||
|
||||
|
||||
void print() const { Base::print(); }
|
||||
|
@ -111,16 +124,17 @@ V preconditionedConjugateGradient(const S &system, const V &initial,
|
|||
|
||||
double currentGamma = system.dot(residual, residual), prevGamma, alpha, beta;
|
||||
|
||||
const size_t iMaxIterations = parameters.maxIterations(),
|
||||
iMinIterations = parameters.minIterations(),
|
||||
iReset = parameters.reset() ;
|
||||
const double threshold = std::max(parameters.epsilon_abs(),
|
||||
parameters.epsilon() * parameters.epsilon() * currentGamma);
|
||||
const size_t iMaxIterations = parameters.maxIterations,
|
||||
iMinIterations = parameters.minIterations,
|
||||
iReset = parameters.reset;
|
||||
const double threshold =
|
||||
std::max(parameters.epsilon_abs,
|
||||
parameters.epsilon_rel * parameters.epsilon_rel * currentGamma);
|
||||
|
||||
if (parameters.verbosity() >= ConjugateGradientParameters::COMPLEXITY )
|
||||
std::cout << "[PCG] epsilon = " << parameters.epsilon()
|
||||
<< ", max = " << parameters.maxIterations()
|
||||
<< ", reset = " << parameters.reset()
|
||||
if (parameters.verbosity() >= ConjugateGradientParameters::COMPLEXITY)
|
||||
std::cout << "[PCG] epsilon = " << parameters.epsilon_rel
|
||||
<< ", max = " << parameters.maxIterations
|
||||
<< ", reset = " << parameters.reset
|
||||
<< ", ||r0||^2 = " << currentGamma
|
||||
<< ", threshold = " << threshold << std::endl;
|
||||
|
||||
|
|
|
@ -49,10 +49,12 @@ namespace gtsam {
|
|||
|
||||
// init gamma and calculate threshold
|
||||
gamma = dot(g,g);
|
||||
threshold = std::max(parameters_.epsilon_abs(), parameters_.epsilon() * parameters_.epsilon() * gamma);
|
||||
threshold =
|
||||
std::max(parameters_.epsilon_abs,
|
||||
parameters_.epsilon_rel * parameters_.epsilon_rel * gamma);
|
||||
|
||||
// Allocate and calculate A*d for first iteration
|
||||
if (gamma > parameters_.epsilon_abs()) Ad = Ab * d;
|
||||
if (gamma > parameters_.epsilon_abs) Ad = Ab * d;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -79,13 +81,13 @@ namespace gtsam {
|
|||
// take a step, return true if converged
|
||||
bool step(const S& Ab, V& x) {
|
||||
|
||||
if ((++k) >= ((int)parameters_.maxIterations())) return true;
|
||||
if ((++k) >= ((int)parameters_.maxIterations)) return true;
|
||||
|
||||
//---------------------------------->
|
||||
double alpha = takeOptimalStep(x);
|
||||
|
||||
// update gradient (or re-calculate at reset time)
|
||||
if (k % parameters_.reset() == 0) g = Ab.gradient(x);
|
||||
if (k % parameters_.reset == 0) g = Ab.gradient(x);
|
||||
// axpy(alpha, Ab ^ Ad, g); // g += alpha*(Ab^Ad)
|
||||
else Ab.transposeMultiplyAdd(alpha, Ad, g);
|
||||
|
||||
|
@ -126,11 +128,10 @@ namespace gtsam {
|
|||
CGState<S, V, E> state(Ab, x, parameters, steepest);
|
||||
|
||||
if (parameters.verbosity() != ConjugateGradientParameters::SILENT)
|
||||
std::cout << "CG: epsilon = " << parameters.epsilon()
|
||||
<< ", maxIterations = " << parameters.maxIterations()
|
||||
std::cout << "CG: epsilon = " << parameters.epsilon_rel
|
||||
<< ", maxIterations = " << parameters.maxIterations
|
||||
<< ", ||g0||^2 = " << state.gamma
|
||||
<< ", threshold = " << state.threshold
|
||||
<< std::endl;
|
||||
<< ", threshold = " << state.threshold << std::endl;
|
||||
|
||||
if ( state.gamma < state.threshold ) {
|
||||
if (parameters.verbosity() != ConjugateGradientParameters::SILENT)
|
||||
|
|
Loading…
Reference in New Issue