Merge pull request #1876 from borglab/conjugate-gradient

release/4.3a0
Varun Agrawal 2024-10-18 09:55:02 -04:00 committed by GitHub
commit dc24255418
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 246 additions and 257 deletions

View File

@ -94,11 +94,10 @@ int main(int argc, char* argv[]) {
parameters.maxIterations = 500; parameters.maxIterations = 500;
PCGSolverParameters::shared_ptr pcg = PCGSolverParameters::shared_ptr pcg =
std::make_shared<PCGSolverParameters>(); std::make_shared<PCGSolverParameters>();
pcg->preconditioner_ = pcg->preconditioner = std::make_shared<BlockJacobiPreconditionerParameters>();
std::make_shared<BlockJacobiPreconditionerParameters>();
// Following is crucial: // Following is crucial:
pcg->setEpsilon_abs(1e-10); pcg->epsilon_abs = 1e-10;
pcg->setEpsilon_rel(1e-10); pcg->epsilon_rel = 1e-10;
parameters.iterativeParams = pcg; parameters.iterativeParams = pcg;
LevenbergMarquardtOptimizer optimizer(graph, initialEstimate, parameters); LevenbergMarquardtOptimizer optimizer(graph, initialEstimate, parameters);

View File

@ -26,13 +26,13 @@ namespace gtsam {
/*****************************************************************************/ /*****************************************************************************/
void ConjugateGradientParameters::print(ostream &os) const { void ConjugateGradientParameters::print(ostream &os) const {
Base::print(os); Base::print(os);
cout << "ConjugateGradientParameters" << endl cout << "ConjugateGradientParameters" << endl
<< "minIter: " << minIterations_ << endl << "minIter: " << minIterations << endl
<< "maxIter: " << maxIterations_ << endl << "maxIter: " << maxIterations << endl
<< "resetIter: " << reset_ << endl << "resetIter: " << reset << endl
<< "eps_rel: " << epsilon_rel_ << endl << "eps_rel: " << epsilon_rel << endl
<< "eps_abs: " << epsilon_abs_ << endl; << "eps_abs: " << epsilon_abs << endl;
} }
/*****************************************************************************/ /*****************************************************************************/

View File

@ -24,59 +24,66 @@
namespace gtsam { namespace gtsam {
/** /**
* parameters for the conjugate gradient method * Parameters for the Conjugate Gradient method
*/ */
class GTSAM_EXPORT ConjugateGradientParameters : public IterativeOptimizationParameters { struct GTSAM_EXPORT ConjugateGradientParameters
: public IterativeOptimizationParameters {
public:
typedef IterativeOptimizationParameters Base; typedef IterativeOptimizationParameters Base;
typedef std::shared_ptr<ConjugateGradientParameters> shared_ptr; typedef std::shared_ptr<ConjugateGradientParameters> shared_ptr;
size_t minIterations_; ///< minimum number of cg iterations size_t minIterations; ///< minimum number of cg iterations
size_t maxIterations_; ///< maximum number of cg iterations size_t maxIterations; ///< maximum number of cg iterations
size_t reset_; ///< number of iterations before reset size_t reset; ///< number of iterations before reset
double epsilon_rel_; ///< threshold for relative error decrease double epsilon_rel; ///< threshold for relative error decrease
double epsilon_abs_; ///< threshold for absolute error decrease double epsilon_abs; ///< threshold for absolute error decrease
/* Matrix Operation Kernel */ /* Matrix Operation Kernel */
enum BLASKernel { enum BLASKernel {
GTSAM = 0, ///< Jacobian Factor Graph of GTSAM GTSAM = 0, ///< Jacobian Factor Graph of GTSAM
} blas_kernel_ ; } blas_kernel;
ConjugateGradientParameters() ConjugateGradientParameters()
: minIterations_(1), maxIterations_(500), reset_(501), epsilon_rel_(1e-3), : minIterations(1),
epsilon_abs_(1e-3), blas_kernel_(GTSAM) {} maxIterations(500),
reset(501),
epsilon_rel(1e-3),
epsilon_abs(1e-3),
blas_kernel(GTSAM) {}
ConjugateGradientParameters(size_t minIterations, size_t maxIterations, size_t reset, ConjugateGradientParameters(size_t minIterations, size_t maxIterations,
double epsilon_rel, double epsilon_abs, BLASKernel blas) size_t reset, double epsilon_rel,
: minIterations_(minIterations), maxIterations_(maxIterations), reset_(reset), double epsilon_abs, BLASKernel blas)
epsilon_rel_(epsilon_rel), epsilon_abs_(epsilon_abs), blas_kernel_(blas) {} : minIterations(minIterations),
maxIterations(maxIterations),
reset(reset),
epsilon_rel(epsilon_rel),
epsilon_abs(epsilon_abs),
blas_kernel(blas) {}
ConjugateGradientParameters(const ConjugateGradientParameters &p) ConjugateGradientParameters(const ConjugateGradientParameters &p)
: Base(p), minIterations_(p.minIterations_), maxIterations_(p.maxIterations_), reset_(p.reset_), : Base(p),
epsilon_rel_(p.epsilon_rel_), epsilon_abs_(p.epsilon_abs_), blas_kernel_(GTSAM) {} minIterations(p.minIterations),
maxIterations(p.maxIterations),
reset(p.reset),
epsilon_rel(p.epsilon_rel),
epsilon_abs(p.epsilon_abs),
blas_kernel(GTSAM) {}
/* general interface */ #ifdef GTSAM_ALLOW_DEPRECATED_SINCE_V43
inline size_t minIterations() const { return minIterations_; } inline size_t getMinIterations() const { return minIterations; }
inline size_t maxIterations() const { return maxIterations_; } inline size_t getMaxIterations() const { return maxIterations; }
inline size_t reset() const { return reset_; } inline size_t getReset() const { return reset; }
inline double epsilon() const { return epsilon_rel_; } inline double getEpsilon() const { return epsilon_rel; }
inline double epsilon_rel() const { return epsilon_rel_; } inline double getEpsilon_rel() const { return epsilon_rel; }
inline double epsilon_abs() const { return epsilon_abs_; } inline double getEpsilon_abs() const { return epsilon_abs; }
inline size_t getMinIterations() const { return minIterations_; } inline void setMinIterations(size_t value) { minIterations = value; }
inline size_t getMaxIterations() const { return maxIterations_; } inline void setMaxIterations(size_t value) { maxIterations = value; }
inline size_t getReset() const { return reset_; } inline void setReset(size_t value) { reset = value; }
inline double getEpsilon() const { return epsilon_rel_; } inline void setEpsilon(double value) { epsilon_rel = value; }
inline double getEpsilon_rel() const { return epsilon_rel_; } inline void setEpsilon_rel(double value) { epsilon_rel = value; }
inline double getEpsilon_abs() const { return epsilon_abs_; } inline void setEpsilon_abs(double value) { epsilon_abs = value; }
#endif
inline void setMinIterations(size_t value) { minIterations_ = value; }
inline void setMaxIterations(size_t value) { maxIterations_ = value; }
inline void setReset(size_t value) { reset_ = value; }
inline void setEpsilon(double value) { epsilon_rel_ = value; }
inline void setEpsilon_rel(double value) { epsilon_rel_ = value; }
inline void setEpsilon_abs(double value) { epsilon_abs_ = value; }
void print() const { Base::print(); } void print() const { Base::print(); }
@ -109,18 +116,19 @@ V preconditionedConjugateGradient(const S &system, const V &initial,
double currentGamma = system.dot(residual, residual), prevGamma, alpha, beta; double currentGamma = system.dot(residual, residual), prevGamma, alpha, beta;
const size_t iMaxIterations = parameters.maxIterations(), const size_t iMaxIterations = parameters.maxIterations,
iMinIterations = parameters.minIterations(), iMinIterations = parameters.minIterations,
iReset = parameters.reset() ; iReset = parameters.reset;
const double threshold = std::max(parameters.epsilon_abs(), const double threshold =
parameters.epsilon() * parameters.epsilon() * currentGamma); std::max(parameters.epsilon_abs,
parameters.epsilon_rel * parameters.epsilon_rel * currentGamma);
if (parameters.verbosity() >= ConjugateGradientParameters::COMPLEXITY ) if (parameters.verbosity() >= ConjugateGradientParameters::COMPLEXITY)
std::cout << "[PCG] epsilon = " << parameters.epsilon() std::cout << "[PCG] epsilon = " << parameters.epsilon_rel
<< ", max = " << parameters.maxIterations() << ", max = " << parameters.maxIterations
<< ", reset = " << parameters.reset() << ", reset = " << parameters.reset
<< ", ||r0||^2 = " << currentGamma << ", ||r0||^2 = " << currentGamma
<< ", threshold = " << threshold << std::endl; << ", threshold = " << threshold << std::endl;
size_t k; size_t k;
for ( k = 1 ; k <= iMaxIterations && (currentGamma > threshold || k <= iMinIterations) ; k++ ) { for ( k = 1 ; k <= iMaxIterations && (currentGamma > threshold || k <= iMinIterations) ; k++ ) {

View File

@ -34,17 +34,13 @@ namespace gtsam {
void PCGSolverParameters::print(ostream &os) const { void PCGSolverParameters::print(ostream &os) const {
Base::print(os); Base::print(os);
os << "PCGSolverParameters:" << endl; os << "PCGSolverParameters:" << endl;
preconditioner_->print(os); preconditioner->print(os);
} }
/*****************************************************************************/ /*****************************************************************************/
PCGSolver::PCGSolver(const PCGSolverParameters &p) { PCGSolver::PCGSolver(const PCGSolverParameters &p) {
parameters_ = p; parameters_ = p;
preconditioner_ = createPreconditioner(p.preconditioner_); preconditioner_ = createPreconditioner(p.preconditioner);
}
void PCGSolverParameters::setPreconditionerParams(const std::shared_ptr<PreconditionerParameters> preconditioner) {
preconditioner_ = preconditioner;
} }
void PCGSolverParameters::print(const std::string &s) const { void PCGSolverParameters::print(const std::string &s) const {

View File

@ -31,29 +31,22 @@ class VectorValues;
struct PreconditionerParameters; struct PreconditionerParameters;
/** /**
* Parameters for PCG * Parameters for Preconditioned Conjugate Gradient solver.
*/ */
struct GTSAM_EXPORT PCGSolverParameters: public ConjugateGradientParameters { struct GTSAM_EXPORT PCGSolverParameters : public ConjugateGradientParameters {
public:
typedef ConjugateGradientParameters Base; typedef ConjugateGradientParameters Base;
typedef std::shared_ptr<PCGSolverParameters> shared_ptr; typedef std::shared_ptr<PCGSolverParameters> shared_ptr;
PCGSolverParameters() { std::shared_ptr<PreconditionerParameters> preconditioner;
}
PCGSolverParameters() {}
PCGSolverParameters(
const std::shared_ptr<PreconditionerParameters> &preconditioner)
: preconditioner(preconditioner) {}
void print(std::ostream &os) const override; void print(std::ostream &os) const override;
/* interface to preconditioner parameters */
inline const PreconditionerParameters& preconditioner() const {
return *preconditioner_;
}
// needed for python wrapper
void print(const std::string &s) const; void print(const std::string &s) const;
std::shared_ptr<PreconditionerParameters> preconditioner_;
void setPreconditionerParams(const std::shared_ptr<PreconditionerParameters> preconditioner);
}; };
/** /**
@ -87,16 +80,16 @@ public:
* System class needed for calling preconditionedConjugateGradient * System class needed for calling preconditionedConjugateGradient
*/ */
class GTSAM_EXPORT GaussianFactorGraphSystem { class GTSAM_EXPORT GaussianFactorGraphSystem {
public:
GaussianFactorGraphSystem(const GaussianFactorGraph &gfg,
const Preconditioner &preconditioner, const KeyInfo &info,
const std::map<Key, Vector> &lambda);
const GaussianFactorGraph &gfg_; const GaussianFactorGraph &gfg_;
const Preconditioner &preconditioner_; const Preconditioner &preconditioner_;
const KeyInfo &keyInfo_; KeyInfo keyInfo_;
const std::map<Key, Vector> &lambda_; std::map<Key, Vector> lambda_;
public:
GaussianFactorGraphSystem(const GaussianFactorGraph &gfg,
const Preconditioner &preconditioner,
const KeyInfo &info,
const std::map<Key, Vector> &lambda);
void residual(const Vector &x, Vector &r) const; void residual(const Vector &x, Vector &r) const;
void multiply(const Vector &x, Vector& y) const; void multiply(const Vector &x, Vector& y) const;

View File

@ -49,10 +49,12 @@ namespace gtsam {
// init gamma and calculate threshold // init gamma and calculate threshold
gamma = dot(g,g); gamma = dot(g,g);
threshold = std::max(parameters_.epsilon_abs(), parameters_.epsilon() * parameters_.epsilon() * gamma); threshold =
std::max(parameters_.epsilon_abs,
parameters_.epsilon_rel * parameters_.epsilon_rel * gamma);
// Allocate and calculate A*d for first iteration // Allocate and calculate A*d for first iteration
if (gamma > parameters_.epsilon_abs()) Ad = Ab * d; if (gamma > parameters_.epsilon_abs) Ad = Ab * d;
} }
/* ************************************************************************* */ /* ************************************************************************* */
@ -79,13 +81,13 @@ namespace gtsam {
// take a step, return true if converged // take a step, return true if converged
bool step(const S& Ab, V& x) { bool step(const S& Ab, V& x) {
if ((++k) >= ((int)parameters_.maxIterations())) return true; if ((++k) >= ((int)parameters_.maxIterations)) return true;
//----------------------------------> //---------------------------------->
double alpha = takeOptimalStep(x); double alpha = takeOptimalStep(x);
// update gradient (or re-calculate at reset time) // update gradient (or re-calculate at reset time)
if (k % parameters_.reset() == 0) g = Ab.gradient(x); if (k % parameters_.reset == 0) g = Ab.gradient(x);
// axpy(alpha, Ab ^ Ad, g); // g += alpha*(Ab^Ad) // axpy(alpha, Ab ^ Ad, g); // g += alpha*(Ab^Ad)
else Ab.transposeMultiplyAdd(alpha, Ad, g); else Ab.transposeMultiplyAdd(alpha, Ad, g);
@ -126,11 +128,10 @@ namespace gtsam {
CGState<S, V, E> state(Ab, x, parameters, steepest); CGState<S, V, E> state(Ab, x, parameters, steepest);
if (parameters.verbosity() != ConjugateGradientParameters::SILENT) if (parameters.verbosity() != ConjugateGradientParameters::SILENT)
std::cout << "CG: epsilon = " << parameters.epsilon() std::cout << "CG: epsilon = " << parameters.epsilon_rel
<< ", maxIterations = " << parameters.maxIterations() << ", maxIterations = " << parameters.maxIterations
<< ", ||g0||^2 = " << state.gamma << ", ||g0||^2 = " << state.gamma
<< ", threshold = " << state.threshold << ", threshold = " << state.threshold << std::endl;
<< std::endl;
if ( state.gamma < state.threshold ) { if ( state.gamma < state.threshold ) {
if (parameters.verbosity() != ConjugateGradientParameters::SILENT) if (parameters.verbosity() != ConjugateGradientParameters::SILENT)

View File

@ -710,17 +710,11 @@ virtual class IterativeOptimizationParameters {
#include <gtsam/linear/ConjugateGradientSolver.h> #include <gtsam/linear/ConjugateGradientSolver.h>
virtual class ConjugateGradientParameters : gtsam::IterativeOptimizationParameters { virtual class ConjugateGradientParameters : gtsam::IterativeOptimizationParameters {
ConjugateGradientParameters(); ConjugateGradientParameters();
int getMinIterations() const ; int minIterations;
int getMaxIterations() const ; int maxIterations;
int getReset() const; int reset;
double getEpsilon_rel() const; double epsilon_rel;
double getEpsilon_abs() const; double epsilon_abs;
void setMinIterations(int value);
void setMaxIterations(int value);
void setReset(int value);
void setEpsilon_rel(double value);
void setEpsilon_abs(double value);
}; };
#include <gtsam/linear/Preconditioner.h> #include <gtsam/linear/Preconditioner.h>
@ -739,8 +733,10 @@ virtual class BlockJacobiPreconditionerParameters : gtsam::PreconditionerParamet
#include <gtsam/linear/PCGSolver.h> #include <gtsam/linear/PCGSolver.h>
virtual class PCGSolverParameters : gtsam::ConjugateGradientParameters { virtual class PCGSolverParameters : gtsam::ConjugateGradientParameters {
PCGSolverParameters(); PCGSolverParameters();
PCGSolverParameters(gtsam::PreconditionerParameters* preconditioner);
void print(string s = ""); void print(string s = "");
void setPreconditionerParams(gtsam::PreconditionerParameters* preconditioner);
gtsam::PreconditionerParameters* preconditioner;
}; };
#include <gtsam/linear/SubgraphSolver.h> #include <gtsam/linear/SubgraphSolver.h>

View File

@ -16,11 +16,11 @@
* @date Jun 11, 2012 * @date Jun 11, 2012
*/ */
#include <gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h>
#include <gtsam/nonlinear/internal/NonlinearOptimizerState.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/linear/GaussianFactorGraph.h> #include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/VectorValues.h> #include <gtsam/linear/VectorValues.h>
#include <gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/nonlinear/internal/NonlinearOptimizerState.h>
#include <cmath> #include <cmath>
@ -34,29 +34,35 @@ typedef internal::NonlinearOptimizerState State;
* @param values a linearization point * @param values a linearization point
* Can be moved to NonlinearFactorGraph.h if desired * Can be moved to NonlinearFactorGraph.h if desired
*/ */
static VectorValues gradientInPlace(const NonlinearFactorGraph &nfg, static VectorValues gradientInPlace(const NonlinearFactorGraph& nfg,
const Values &values) { const Values& values) {
// Linearize graph // Linearize graph
GaussianFactorGraph::shared_ptr linear = nfg.linearize(values); GaussianFactorGraph::shared_ptr linear = nfg.linearize(values);
return linear->gradientAtZero(); return linear->gradientAtZero();
} }
NonlinearConjugateGradientOptimizer::NonlinearConjugateGradientOptimizer( NonlinearConjugateGradientOptimizer::NonlinearConjugateGradientOptimizer(
const NonlinearFactorGraph& graph, const Values& initialValues, const Parameters& params) const NonlinearFactorGraph& graph, const Values& initialValues,
: Base(graph, std::unique_ptr<State>(new State(initialValues, graph.error(initialValues)))), const Parameters& params)
params_(params) {} : Base(graph, std::unique_ptr<State>(
new State(initialValues, graph.error(initialValues)))),
params_(params) {}
double NonlinearConjugateGradientOptimizer::System::error(const State& state) const { double NonlinearConjugateGradientOptimizer::System::error(
const State& state) const {
return graph_.error(state); return graph_.error(state);
} }
NonlinearConjugateGradientOptimizer::System::Gradient NonlinearConjugateGradientOptimizer::System::gradient( NonlinearConjugateGradientOptimizer::System::Gradient
const State &state) const { NonlinearConjugateGradientOptimizer::System::gradient(
const State& state) const {
return gradientInPlace(graph_, state); return gradientInPlace(graph_, state);
} }
NonlinearConjugateGradientOptimizer::System::State NonlinearConjugateGradientOptimizer::System::advance( NonlinearConjugateGradientOptimizer::System::State
const State &current, const double alpha, const Gradient &g) const { NonlinearConjugateGradientOptimizer::System::advance(const State& current,
const double alpha,
const Gradient& g) const {
Gradient step = g; Gradient step = g;
step *= alpha; step *= alpha;
return current.retract(step); return current.retract(step);
@ -65,7 +71,8 @@ NonlinearConjugateGradientOptimizer::System::State NonlinearConjugateGradientOpt
GaussianFactorGraph::shared_ptr NonlinearConjugateGradientOptimizer::iterate() { GaussianFactorGraph::shared_ptr NonlinearConjugateGradientOptimizer::iterate() {
const auto [newValues, dummy] = nonlinearConjugateGradient<System, Values>( const auto [newValues, dummy] = nonlinearConjugateGradient<System, Values>(
System(graph_), state_->values, params_, true /* single iteration */); System(graph_), state_->values, params_, true /* single iteration */);
state_.reset(new State(newValues, graph_.error(newValues), state_->iterations + 1)); state_.reset(
new State(newValues, graph_.error(newValues), state_->iterations + 1));
// NOTE(frank): We don't linearize this system, so we must return null here. // NOTE(frank): We don't linearize this system, so we must return null here.
return nullptr; return nullptr;
@ -76,9 +83,9 @@ const Values& NonlinearConjugateGradientOptimizer::optimize() {
System system(graph_); System system(graph_);
const auto [newValues, iterations] = const auto [newValues, iterations] =
nonlinearConjugateGradient(system, state_->values, params_, false); nonlinearConjugateGradient(system, state_->values, params_, false);
state_.reset(new State(std::move(newValues), graph_.error(newValues), iterations)); state_.reset(
new State(std::move(newValues), graph_.error(newValues), iterations));
return state_->values; return state_->values;
} }
} /* namespace gtsam */ } /* namespace gtsam */

View File

@ -24,89 +24,80 @@
namespace gtsam { namespace gtsam {
/** An implementation of the nonlinear CG method using the template below */ /** An implementation of the nonlinear CG method using the template below */
class GTSAM_EXPORT NonlinearConjugateGradientOptimizer : public NonlinearOptimizer { class GTSAM_EXPORT NonlinearConjugateGradientOptimizer
: public NonlinearOptimizer {
/* a class for the nonlinearConjugateGradient template */ /* a class for the nonlinearConjugateGradient template */
class System { class System {
public: public:
typedef Values State; typedef Values State;
typedef VectorValues Gradient; typedef VectorValues Gradient;
typedef NonlinearOptimizerParams Parameters; typedef NonlinearOptimizerParams Parameters;
protected: protected:
const NonlinearFactorGraph &graph_; const NonlinearFactorGraph &graph_;
public: public:
System(const NonlinearFactorGraph &graph) : System(const NonlinearFactorGraph &graph) : graph_(graph) {}
graph_(graph) {
}
double error(const State &state) const; double error(const State &state) const;
Gradient gradient(const State &state) const; Gradient gradient(const State &state) const;
State advance(const State &current, const double alpha, State advance(const State &current, const double alpha,
const Gradient &g) const; const Gradient &g) const;
}; };
public: public:
typedef NonlinearOptimizer Base; typedef NonlinearOptimizer Base;
typedef NonlinearOptimizerParams Parameters; typedef NonlinearOptimizerParams Parameters;
typedef std::shared_ptr<NonlinearConjugateGradientOptimizer> shared_ptr; typedef std::shared_ptr<NonlinearConjugateGradientOptimizer> shared_ptr;
protected: protected:
Parameters params_; Parameters params_;
const NonlinearOptimizerParams& _params() const override { const NonlinearOptimizerParams &_params() const override { return params_; }
return params_;
}
public:
public:
/// Constructor /// Constructor
NonlinearConjugateGradientOptimizer(const NonlinearFactorGraph& graph, NonlinearConjugateGradientOptimizer(const NonlinearFactorGraph &graph,
const Values& initialValues, const Parameters& params = Parameters()); const Values &initialValues,
const Parameters &params = Parameters());
/// Destructor /// Destructor
~NonlinearConjugateGradientOptimizer() override { ~NonlinearConjugateGradientOptimizer() override {}
}
/** /**
* Perform a single iteration, returning GaussianFactorGraph corresponding to * Perform a single iteration, returning GaussianFactorGraph corresponding to
* the linearized factor graph. * the linearized factor graph.
*/ */
GaussianFactorGraph::shared_ptr iterate() override; GaussianFactorGraph::shared_ptr iterate() override;
/** /**
* Optimize for the maximum-likelihood estimate, returning a the optimized * Optimize for the maximum-likelihood estimate, returning a the optimized
* variable assignments. * variable assignments.
*/ */
const Values& optimize() override; const Values &optimize() override;
}; };
/** Implement the golden-section line search algorithm */ /** Implement the golden-section line search algorithm */
template<class S, class V, class W> template <class S, class V, class W>
double lineSearch(const S &system, const V currentValues, const W &gradient) { double lineSearch(const S &system, const V currentValues, const W &gradient) {
/* normalize it such that it becomes a unit vector */ /* normalize it such that it becomes a unit vector */
const double g = gradient.norm(); const double g = gradient.norm();
// perform the golden section search algorithm to decide the the optimal step size // perform the golden section search algorithm to decide the the optimal step
// detail refer to http://en.wikipedia.org/wiki/Golden_section_search // size detail refer to http://en.wikipedia.org/wiki/Golden_section_search
const double phi = 0.5 * (1.0 + std::sqrt(5.0)), resphi = 2.0 - phi, tau = const double phi = 0.5 * (1.0 + std::sqrt(5.0)), resphi = 2.0 - phi,
1e-5; tau = 1e-5;
double minStep = -1.0 / g, maxStep = 0, newStep = minStep double minStep = -1.0 / g, maxStep = 0,
+ (maxStep - minStep) / (phi + 1.0); newStep = minStep + (maxStep - minStep) / (phi + 1.0);
V newValues = system.advance(currentValues, newStep, gradient); V newValues = system.advance(currentValues, newStep, gradient);
double newError = system.error(newValues); double newError = system.error(newValues);
while (true) { while (true) {
const bool flag = (maxStep - newStep > newStep - minStep) ? true : false; const bool flag = (maxStep - newStep > newStep - minStep) ? true : false;
const double testStep = const double testStep = flag ? newStep + resphi * (maxStep - newStep)
flag ? newStep + resphi * (maxStep - newStep) : : newStep - resphi * (newStep - minStep);
newStep - resphi * (newStep - minStep);
if ((maxStep - minStep) if ((maxStep - minStep) < tau * (std::abs(testStep) + std::abs(newStep))) {
< tau * (std::abs(testStep) + std::abs(newStep))) {
return 0.5 * (minStep + maxStep); return 0.5 * (minStep + maxStep);
} }
@ -135,19 +126,21 @@ double lineSearch(const S &system, const V currentValues, const W &gradient) {
} }
/** /**
* Implement the nonlinear conjugate gradient method using the Polak-Ribiere formula suggested in * Implement the nonlinear conjugate gradient method using the Polak-Ribiere
* formula suggested in
* http://en.wikipedia.org/wiki/Nonlinear_conjugate_gradient_method. * http://en.wikipedia.org/wiki/Nonlinear_conjugate_gradient_method.
* *
* The S (system) class requires three member functions: error(state), gradient(state) and * The S (system) class requires three member functions: error(state),
* advance(state, step-size, direction). The V class denotes the state or the solution. * gradient(state) and advance(state, step-size, direction). The V class denotes
* the state or the solution.
* *
* The last parameter is a switch between gradient-descent and conjugate gradient * The last parameter is a switch between gradient-descent and conjugate
* gradient
*/ */
template<class S, class V> template <class S, class V>
std::tuple<V, int> nonlinearConjugateGradient(const S &system, std::tuple<V, int> nonlinearConjugateGradient(
const V &initial, const NonlinearOptimizerParams &params, const S &system, const V &initial, const NonlinearOptimizerParams &params,
const bool singleIteration, const bool gradientDescent = false) { const bool singleIteration, const bool gradientDescent = false) {
// GTSAM_CONCEPT_MANIFOLD_TYPE(V) // GTSAM_CONCEPT_MANIFOLD_TYPE(V)
size_t iteration = 0; size_t iteration = 0;
@ -157,14 +150,14 @@ std::tuple<V, int> nonlinearConjugateGradient(const S &system,
if (currentError <= params.errorTol) { if (currentError <= params.errorTol) {
if (params.verbosity >= NonlinearOptimizerParams::ERROR) { if (params.verbosity >= NonlinearOptimizerParams::ERROR) {
std::cout << "Exiting, as error = " << currentError << " < " std::cout << "Exiting, as error = " << currentError << " < "
<< params.errorTol << std::endl; << params.errorTol << std::endl;
} }
return {initial, iteration}; return {initial, iteration};
} }
V currentValues = initial; V currentValues = initial;
typename S::Gradient currentGradient = system.gradient(currentValues), typename S::Gradient currentGradient = system.gradient(currentValues),
prevGradient, direction = currentGradient; prevGradient, direction = currentGradient;
/* do one step of gradient descent */ /* do one step of gradient descent */
V prevValues = currentValues; V prevValues = currentValues;
@ -185,9 +178,9 @@ std::tuple<V, int> nonlinearConjugateGradient(const S &system,
prevGradient = currentGradient; prevGradient = currentGradient;
currentGradient = system.gradient(currentValues); currentGradient = system.gradient(currentValues);
// Polak-Ribiere: beta = g'*(g_n-g_n-1)/g_n-1'*g_n-1 // Polak-Ribiere: beta = g'*(g_n-g_n-1)/g_n-1'*g_n-1
const double beta = std::max(0.0, const double beta =
currentGradient.dot(currentGradient - prevGradient) std::max(0.0, currentGradient.dot(currentGradient - prevGradient) /
/ prevGradient.dot(prevGradient)); prevGradient.dot(prevGradient));
direction = currentGradient + (beta * direction); direction = currentGradient + (beta * direction);
} }
@ -205,20 +198,21 @@ std::tuple<V, int> nonlinearConjugateGradient(const S &system,
// Maybe show output // Maybe show output
if (params.verbosity >= NonlinearOptimizerParams::ERROR) if (params.verbosity >= NonlinearOptimizerParams::ERROR)
std::cout << "iteration: " << iteration << ", currentError: " << currentError << std::endl; std::cout << "iteration: " << iteration
} while (++iteration < params.maxIterations && !singleIteration << ", currentError: " << currentError << std::endl;
&& !checkConvergence(params.relativeErrorTol, params.absoluteErrorTol, } while (++iteration < params.maxIterations && !singleIteration &&
params.errorTol, prevError, currentError, params.verbosity)); !checkConvergence(params.relativeErrorTol, params.absoluteErrorTol,
params.errorTol, prevError, currentError,
params.verbosity));
// Printing if verbose // Printing if verbose
if (params.verbosity >= NonlinearOptimizerParams::ERROR if (params.verbosity >= NonlinearOptimizerParams::ERROR &&
&& iteration >= params.maxIterations) iteration >= params.maxIterations)
std::cout std::cout << "nonlinearConjugateGradient: Terminating because reached "
<< "nonlinearConjugateGradient: Terminating because reached maximum iterations" "maximum iterations"
<< std::endl; << std::endl;
return {currentValues, iteration}; return {currentValues, iteration};
} }
} // \ namespace gtsam } // namespace gtsam

View File

@ -1,56 +1,52 @@
/** /**
* @file NonlinearConjugateGradientOptimizer.cpp * @file testNonlinearConjugateGradientOptimizer.cpp
* @brief Test simple CG optimizer * @brief Test nonlinear CG optimizer
* @author Yong-Dian Jian * @author Yong-Dian Jian
* @author Varun Agrawal
* @date June 11, 2012 * @date June 11, 2012
*/ */
/** #include <CppUnitLite/TestHarness.h>
* @file testGradientDescentOptimizer.cpp #include <gtsam/geometry/Pose2.h>
* @brief Small test of NonlinearConjugateGradientOptimizer
* @author Yong-Dian Jian
* @date Jun 11, 2012
*/
#include <gtsam/slam/BetweenFactor.h>
#include <gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h> #include <gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h> #include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/Values.h> #include <gtsam/nonlinear/Values.h>
#include <gtsam/geometry/Pose2.h> #include <gtsam/slam/BetweenFactor.h>
#include <CppUnitLite/TestHarness.h>
using namespace std; using namespace std;
using namespace gtsam; using namespace gtsam;
// Generate a small PoseSLAM problem // Generate a small PoseSLAM problem
std::tuple<NonlinearFactorGraph, Values> generateProblem() { std::tuple<NonlinearFactorGraph, Values> generateProblem() {
// 1. Create graph container and add factors to it // 1. Create graph container and add factors to it
NonlinearFactorGraph graph; NonlinearFactorGraph graph;
// 2a. Add Gaussian prior // 2a. Add Gaussian prior
Pose2 priorMean(0.0, 0.0, 0.0); // prior at origin Pose2 priorMean(0.0, 0.0, 0.0); // prior at origin
SharedDiagonal priorNoise = noiseModel::Diagonal::Sigmas( SharedDiagonal priorNoise =
Vector3(0.3, 0.3, 0.1)); noiseModel::Diagonal::Sigmas(Vector3(0.3, 0.3, 0.1));
graph.addPrior(1, priorMean, priorNoise); graph.addPrior(1, priorMean, priorNoise);
// 2b. Add odometry factors // 2b. Add odometry factors
SharedDiagonal odometryNoise = noiseModel::Diagonal::Sigmas( SharedDiagonal odometryNoise =
Vector3(0.2, 0.2, 0.1)); noiseModel::Diagonal::Sigmas(Vector3(0.2, 0.2, 0.1));
graph.emplace_shared<BetweenFactor<Pose2>>(1, 2, Pose2(2.0, 0.0, 0.0), odometryNoise); graph.emplace_shared<BetweenFactor<Pose2>>(1, 2, Pose2(2.0, 0.0, 0.0),
graph.emplace_shared<BetweenFactor<Pose2>>(2, 3, Pose2(2.0, 0.0, M_PI_2), odometryNoise); odometryNoise);
graph.emplace_shared<BetweenFactor<Pose2>>(3, 4, Pose2(2.0, 0.0, M_PI_2), odometryNoise); graph.emplace_shared<BetweenFactor<Pose2>>(2, 3, Pose2(2.0, 0.0, M_PI_2),
graph.emplace_shared<BetweenFactor<Pose2>>(4, 5, Pose2(2.0, 0.0, M_PI_2), odometryNoise); odometryNoise);
graph.emplace_shared<BetweenFactor<Pose2>>(3, 4, Pose2(2.0, 0.0, M_PI_2),
odometryNoise);
graph.emplace_shared<BetweenFactor<Pose2>>(4, 5, Pose2(2.0, 0.0, M_PI_2),
odometryNoise);
// 2c. Add pose constraint // 2c. Add pose constraint
SharedDiagonal constraintUncertainty = noiseModel::Diagonal::Sigmas( SharedDiagonal constraintUncertainty =
Vector3(0.2, 0.2, 0.1)); noiseModel::Diagonal::Sigmas(Vector3(0.2, 0.2, 0.1));
graph.emplace_shared<BetweenFactor<Pose2>>(5, 2, Pose2(2.0, 0.0, M_PI_2), graph.emplace_shared<BetweenFactor<Pose2>>(5, 2, Pose2(2.0, 0.0, M_PI_2),
constraintUncertainty); constraintUncertainty);
// 3. Create the data structure to hold the initialEstimate estimate to the solution // 3. Create the data structure to hold the initialEstimate estimate to the
// solution
Values initialEstimate; Values initialEstimate;
Pose2 x1(0.5, 0.0, 0.2); Pose2 x1(0.5, 0.0, 0.2);
initialEstimate.insert(1, x1); initialEstimate.insert(1, x1);
@ -68,16 +64,17 @@ std::tuple<NonlinearFactorGraph, Values> generateProblem() {
/* ************************************************************************* */ /* ************************************************************************* */
TEST(NonlinearConjugateGradientOptimizer, Optimize) { TEST(NonlinearConjugateGradientOptimizer, Optimize) {
const auto [graph, initialEstimate] = generateProblem(); const auto [graph, initialEstimate] = generateProblem();
// cout << "initial error = " << graph.error(initialEstimate) << endl; // cout << "initial error = " << graph.error(initialEstimate) << endl;
NonlinearOptimizerParams param; NonlinearOptimizerParams param;
param.maxIterations = 500; /* requires a larger number of iterations to converge */ param.maxIterations =
500; /* requires a larger number of iterations to converge */
param.verbosity = NonlinearOptimizerParams::SILENT; param.verbosity = NonlinearOptimizerParams::SILENT;
NonlinearConjugateGradientOptimizer optimizer(graph, initialEstimate, param); NonlinearConjugateGradientOptimizer optimizer(graph, initialEstimate, param);
Values result = optimizer.optimize(); Values result = optimizer.optimize();
// cout << "cg final error = " << graph.error(result) << endl; // cout << "cg final error = " << graph.error(result) << endl;
EXPECT_DOUBLES_EQUAL(0.0, graph.error(result), 1e-4); EXPECT_DOUBLES_EQUAL(0.0, graph.error(result), 1e-4);
} }

View File

@ -67,20 +67,15 @@ ShonanAveragingParameters<d>::ShonanAveragingParameters(
builderParameters.augmentationWeight = SubgraphBuilderParameters::SKELETON; builderParameters.augmentationWeight = SubgraphBuilderParameters::SKELETON;
builderParameters.augmentationFactor = 0.0; builderParameters.augmentationFactor = 0.0;
auto pcg = std::make_shared<PCGSolverParameters>();
// Choose optimization method // Choose optimization method
if (method == "SUBGRAPH") { if (method == "SUBGRAPH") {
lm.iterativeParams = lm.iterativeParams =
std::make_shared<SubgraphSolverParameters>(builderParameters); std::make_shared<SubgraphSolverParameters>(builderParameters);
} else if (method == "SGPC") { } else if (method == "SGPC") {
pcg->preconditioner_ = lm.iterativeParams = std::make_shared<PCGSolverParameters>(
std::make_shared<SubgraphPreconditionerParameters>(builderParameters); std::make_shared<SubgraphPreconditionerParameters>(builderParameters));
lm.iterativeParams = pcg;
} else if (method == "JACOBI") { } else if (method == "JACOBI") {
pcg->preconditioner_ = lm.iterativeParams = std::make_shared<PCGSolverParameters>(std::make_shared<BlockJacobiPreconditionerParameters>());
std::make_shared<BlockJacobiPreconditionerParameters>();
lm.iterativeParams = pcg;
} else if (method == "QR") { } else if (method == "QR") {
lm.setLinearSolverType("MULTIFRONTAL_QR"); lm.setLinearSolverType("MULTIFRONTAL_QR");
} else if (method == "CHOLESKY") { } else if (method == "CHOLESKY") {

View File

@ -69,7 +69,7 @@ class TestScenario(GtsamTestCase):
lmParams = LevenbergMarquardtParams.CeresDefaults() lmParams = LevenbergMarquardtParams.CeresDefaults()
lmParams.setLinearSolverType("ITERATIVE") lmParams.setLinearSolverType("ITERATIVE")
cgParams = PCGSolverParameters() cgParams = PCGSolverParameters()
cgParams.setPreconditionerParams(DummyPreconditionerParameters()) cgParams.preconditioner = DummyPreconditionerParameters()
lmParams.setIterativeParams(cgParams) lmParams.setIterativeParams(cgParams)
actual = LevenbergMarquardtOptimizer(self.fg, self.initial_values, lmParams).optimize() actual = LevenbergMarquardtOptimizer(self.fg, self.initial_values, lmParams).optimize()
self.assertAlmostEqual(0, self.fg.error(actual)) self.assertAlmostEqual(0, self.fg.error(actual))

View File

@ -95,9 +95,9 @@ TEST( Iterative, conjugateGradientDescent_hard_constraint )
VectorValues zeros = config.zeroVectors(); VectorValues zeros = config.zeroVectors();
ConjugateGradientParameters parameters; ConjugateGradientParameters parameters;
parameters.setEpsilon_abs(1e-3); parameters.epsilon_abs = 1e-3;
parameters.setEpsilon_rel(1e-5); parameters.epsilon_rel = 1e-5;
parameters.setMaxIterations(100); parameters.maxIterations = 100;
VectorValues actual = conjugateGradientDescent(*fg, zeros, parameters); VectorValues actual = conjugateGradientDescent(*fg, zeros, parameters);
VectorValues expected; VectorValues expected;
@ -122,9 +122,9 @@ TEST( Iterative, conjugateGradientDescent_soft_constraint )
VectorValues zeros = config.zeroVectors(); VectorValues zeros = config.zeroVectors();
ConjugateGradientParameters parameters; ConjugateGradientParameters parameters;
parameters.setEpsilon_abs(1e-3); parameters.epsilon_abs = 1e-3;
parameters.setEpsilon_rel(1e-5); parameters.epsilon_rel = 1e-5;
parameters.setMaxIterations(100); parameters.maxIterations = 100;
VectorValues actual = conjugateGradientDescent(*fg, zeros, parameters); VectorValues actual = conjugateGradientDescent(*fg, zeros, parameters);
VectorValues expected; VectorValues expected;

View File

@ -125,8 +125,8 @@ TEST( GaussianFactorGraphSystem, multiply_getb)
TEST(PCGSolver, dummy) { TEST(PCGSolver, dummy) {
LevenbergMarquardtParams params; LevenbergMarquardtParams params;
params.linearSolverType = LevenbergMarquardtParams::Iterative; params.linearSolverType = LevenbergMarquardtParams::Iterative;
auto pcg = std::make_shared<PCGSolverParameters>(); auto pcg = std::make_shared<PCGSolverParameters>(
pcg->preconditioner_ = std::make_shared<DummyPreconditionerParameters>(); std::make_shared<DummyPreconditionerParameters>());
params.iterativeParams = pcg; params.iterativeParams = pcg;
NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph(); NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph();
@ -145,9 +145,8 @@ TEST(PCGSolver, dummy) {
TEST(PCGSolver, blockjacobi) { TEST(PCGSolver, blockjacobi) {
LevenbergMarquardtParams params; LevenbergMarquardtParams params;
params.linearSolverType = LevenbergMarquardtParams::Iterative; params.linearSolverType = LevenbergMarquardtParams::Iterative;
auto pcg = std::make_shared<PCGSolverParameters>(); auto pcg = std::make_shared<PCGSolverParameters>(
pcg->preconditioner_ = std::make_shared<BlockJacobiPreconditionerParameters>());
std::make_shared<BlockJacobiPreconditionerParameters>();
params.iterativeParams = pcg; params.iterativeParams = pcg;
NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph(); NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph();
@ -166,8 +165,8 @@ TEST(PCGSolver, blockjacobi) {
TEST(PCGSolver, subgraph) { TEST(PCGSolver, subgraph) {
LevenbergMarquardtParams params; LevenbergMarquardtParams params;
params.linearSolverType = LevenbergMarquardtParams::Iterative; params.linearSolverType = LevenbergMarquardtParams::Iterative;
auto pcg = std::make_shared<PCGSolverParameters>(); auto pcg = std::make_shared<PCGSolverParameters>(
pcg->preconditioner_ = std::make_shared<SubgraphPreconditionerParameters>(); std::make_shared<SubgraphPreconditionerParameters>());
params.iterativeParams = pcg; params.iterativeParams = pcg;
NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph(); NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph();

View File

@ -54,21 +54,23 @@ TEST( PCGsolver, verySimpleLinearSystem) {
// Solve the system using Preconditioned Conjugate Gradient solver // Solve the system using Preconditioned Conjugate Gradient solver
// Common PCG parameters // Common PCG parameters
gtsam::PCGSolverParameters::shared_ptr pcg = std::make_shared<gtsam::PCGSolverParameters>(); gtsam::PCGSolverParameters::shared_ptr pcg = std::make_shared<gtsam::PCGSolverParameters>();
pcg->setMaxIterations(500); pcg->maxIterations = 500;
pcg->setEpsilon_abs(0.0); pcg->epsilon_abs = 0.0;
pcg->setEpsilon_rel(0.0); pcg->epsilon_rel = 0.0;
//pcg->setVerbosity("ERROR"); //pcg->setVerbosity("ERROR");
// With Dummy preconditioner // With Dummy preconditioner
pcg->preconditioner_ = std::make_shared<gtsam::DummyPreconditionerParameters>(); pcg->preconditioner =
std::make_shared<gtsam::DummyPreconditionerParameters>();
VectorValues deltaPCGDummy = PCGSolver(*pcg).optimize(simpleGFG); VectorValues deltaPCGDummy = PCGSolver(*pcg).optimize(simpleGFG);
EXPECT(assert_equal(exactSolution, deltaPCGDummy, 1e-7)); EXPECT(assert_equal(exactSolution, deltaPCGDummy, 1e-7));
//deltaPCGDummy.print("PCG Dummy"); //deltaPCGDummy.print("PCG Dummy");
// With Block-Jacobi preconditioner // With Block-Jacobi preconditioner
pcg->preconditioner_ = std::make_shared<gtsam::BlockJacobiPreconditionerParameters>(); pcg->preconditioner =
std::make_shared<gtsam::BlockJacobiPreconditionerParameters>();
// It takes more than 1000 iterations for this test // It takes more than 1000 iterations for this test
pcg->setMaxIterations(1500); pcg->maxIterations = 1500;
VectorValues deltaPCGJacobi = PCGSolver(*pcg).optimize(simpleGFG); VectorValues deltaPCGJacobi = PCGSolver(*pcg).optimize(simpleGFG);
EXPECT(assert_equal(exactSolution, deltaPCGJacobi, 1e-5)); EXPECT(assert_equal(exactSolution, deltaPCGJacobi, 1e-5));
@ -105,19 +107,21 @@ TEST(PCGSolver, simpleLinearSystem) {
// Solve the system using Preconditioned Conjugate Gradient solver // Solve the system using Preconditioned Conjugate Gradient solver
// Common PCG parameters // Common PCG parameters
gtsam::PCGSolverParameters::shared_ptr pcg = std::make_shared<gtsam::PCGSolverParameters>(); gtsam::PCGSolverParameters::shared_ptr pcg = std::make_shared<gtsam::PCGSolverParameters>();
pcg->setMaxIterations(500); pcg->maxIterations = 500;
pcg->setEpsilon_abs(0.0); pcg->epsilon_abs = 0.0;
pcg->setEpsilon_rel(0.0); pcg->epsilon_rel = 0.0;
//pcg->setVerbosity("ERROR"); //pcg->setVerbosity("ERROR");
// With Dummy preconditioner // With Dummy preconditioner
pcg->preconditioner_ = std::make_shared<gtsam::DummyPreconditionerParameters>(); pcg->preconditioner =
std::make_shared<gtsam::DummyPreconditionerParameters>();
VectorValues deltaPCGDummy = PCGSolver(*pcg).optimize(simpleGFG); VectorValues deltaPCGDummy = PCGSolver(*pcg).optimize(simpleGFG);
EXPECT(assert_equal(expectedSolution, deltaPCGDummy, 1e-5)); EXPECT(assert_equal(expectedSolution, deltaPCGDummy, 1e-5));
//deltaPCGDummy.print("PCG Dummy"); //deltaPCGDummy.print("PCG Dummy");
// With Block-Jacobi preconditioner // With Block-Jacobi preconditioner
pcg->preconditioner_ = std::make_shared<gtsam::BlockJacobiPreconditionerParameters>(); pcg->preconditioner =
std::make_shared<gtsam::BlockJacobiPreconditionerParameters>();
VectorValues deltaPCGJacobi = PCGSolver(*pcg).optimize(simpleGFG); VectorValues deltaPCGJacobi = PCGSolver(*pcg).optimize(simpleGFG);
EXPECT(assert_equal(expectedSolution, deltaPCGJacobi, 1e-5)); EXPECT(assert_equal(expectedSolution, deltaPCGJacobi, 1e-5));
//deltaPCGJacobi.print("PCG Jacobi"); //deltaPCGJacobi.print("PCG Jacobi");

View File

@ -48,7 +48,7 @@ static double error(const GaussianFactorGraph& fg, const VectorValues& x) {
TEST( SubgraphSolver, Parameters ) TEST( SubgraphSolver, Parameters )
{ {
LONGS_EQUAL(SubgraphSolverParameters::SILENT, kParameters.verbosity()); LONGS_EQUAL(SubgraphSolverParameters::SILENT, kParameters.verbosity());
LONGS_EQUAL(500, kParameters.maxIterations()); LONGS_EQUAL(500, kParameters.maxIterations);
} }
/* ************************************************************************* */ /* ************************************************************************* */

View File

@ -83,7 +83,7 @@ int main(int argc, char* argv[]) {
// params.setVerbosityLM("SUMMARY"); // params.setVerbosityLM("SUMMARY");
// params.linearSolverType = LevenbergMarquardtParams::Iterative; // params.linearSolverType = LevenbergMarquardtParams::Iterative;
// auto pcg = std::make_shared<PCGSolverParameters>(); // auto pcg = std::make_shared<PCGSolverParameters>();
// pcg->preconditioner_ = // pcg->preconditioner =
// std::make_shared<SubgraphPreconditionerParameters>(); // std::make_shared<SubgraphPreconditionerParameters>();
// std::make_shared<BlockJacobiPreconditionerParameters>(); // std::make_shared<BlockJacobiPreconditionerParameters>();
// params.iterativeParams = pcg; // params.iterativeParams = pcg;