Added virtual solve method to NonlinearOptimizer that you can override, e.g., with Ceres
See example in testNonlinearOptimizer As part of this, I also merged SuccessiveLinearizationParams into NonlinearOptimizerParams, which is now in its own separate file NonlinearOptimizerParams.hrelease/4.3a0
parent
ee38b8f884
commit
def9b84e45
|
@ -104,7 +104,7 @@ int main(int argc, char** argv) {
|
||||||
LevenbergMarquardtParams parameters;
|
LevenbergMarquardtParams parameters;
|
||||||
parameters.verbosity = NonlinearOptimizerParams::ERROR;
|
parameters.verbosity = NonlinearOptimizerParams::ERROR;
|
||||||
parameters.verbosityLM = LevenbergMarquardtParams::LAMBDA;
|
parameters.verbosityLM = LevenbergMarquardtParams::LAMBDA;
|
||||||
parameters.linearSolverType = SuccessiveLinearizationParams::CONJUGATE_GRADIENT;
|
parameters.linearSolverType = NonlinearSolverParams::CONJUGATE_GRADIENT;
|
||||||
|
|
||||||
{
|
{
|
||||||
parameters.iterativeParams = boost::make_shared<SubgraphSolverParameters>();
|
parameters.iterativeParams = boost::make_shared<SubgraphSolverParameters>();
|
||||||
|
|
|
@ -442,7 +442,7 @@ void runBatch()
|
||||||
|
|
||||||
gttic_(Create_optimizer);
|
gttic_(Create_optimizer);
|
||||||
GaussNewtonParams params;
|
GaussNewtonParams params;
|
||||||
params.linearSolverType = SuccessiveLinearizationParams::MULTIFRONTAL_CHOLESKY;
|
params.linearSolverType = NonlinearSolverParams::MULTIFRONTAL_CHOLESKY;
|
||||||
GaussNewtonOptimizer optimizer(measurements, initial, params);
|
GaussNewtonOptimizer optimizer(measurements, initial, params);
|
||||||
gttoc_(Create_optimizer);
|
gttoc_(Create_optimizer);
|
||||||
double lastError;
|
double lastError;
|
||||||
|
|
12
gtsam.h
12
gtsam.h
|
@ -1761,9 +1761,9 @@ bool checkConvergence(double relativeErrorTreshold,
|
||||||
double absoluteErrorTreshold, double errorThreshold,
|
double absoluteErrorTreshold, double errorThreshold,
|
||||||
double currentError, double newError);
|
double currentError, double newError);
|
||||||
|
|
||||||
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
|
#include <gtsam/nonlinear/NonlinearOptimizerParams.h>
|
||||||
virtual class SuccessiveLinearizationParams : gtsam::NonlinearOptimizerParams {
|
virtual class NonlinearSolverParams : gtsam::NonlinearOptimizerParams {
|
||||||
SuccessiveLinearizationParams();
|
NonlinearSolverParams();
|
||||||
|
|
||||||
string getLinearSolverType() const;
|
string getLinearSolverType() const;
|
||||||
|
|
||||||
|
@ -1778,12 +1778,12 @@ virtual class SuccessiveLinearizationParams : gtsam::NonlinearOptimizerParams {
|
||||||
};
|
};
|
||||||
|
|
||||||
#include <gtsam/nonlinear/GaussNewtonOptimizer.h>
|
#include <gtsam/nonlinear/GaussNewtonOptimizer.h>
|
||||||
virtual class GaussNewtonParams : gtsam::SuccessiveLinearizationParams {
|
virtual class GaussNewtonParams : gtsam::NonlinearSolverParams {
|
||||||
GaussNewtonParams();
|
GaussNewtonParams();
|
||||||
};
|
};
|
||||||
|
|
||||||
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
|
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
|
||||||
virtual class LevenbergMarquardtParams : gtsam::SuccessiveLinearizationParams {
|
virtual class LevenbergMarquardtParams : gtsam::NonlinearSolverParams {
|
||||||
LevenbergMarquardtParams();
|
LevenbergMarquardtParams();
|
||||||
|
|
||||||
double getlambdaInitial() const;
|
double getlambdaInitial() const;
|
||||||
|
@ -1798,7 +1798,7 @@ virtual class LevenbergMarquardtParams : gtsam::SuccessiveLinearizationParams {
|
||||||
};
|
};
|
||||||
|
|
||||||
#include <gtsam/nonlinear/DoglegOptimizer.h>
|
#include <gtsam/nonlinear/DoglegOptimizer.h>
|
||||||
virtual class DoglegParams : gtsam::SuccessiveLinearizationParams {
|
virtual class DoglegParams : gtsam::NonlinearSolverParams {
|
||||||
DoglegParams();
|
DoglegParams();
|
||||||
|
|
||||||
double getDeltaInitial() const;
|
double getDeltaInitial() const;
|
||||||
|
|
|
@ -45,7 +45,7 @@ public:
|
||||||
* To use it in nonlinear optimization, please see the following example
|
* To use it in nonlinear optimization, please see the following example
|
||||||
*
|
*
|
||||||
* LevenbergMarquardtParams parameters;
|
* LevenbergMarquardtParams parameters;
|
||||||
* parameters.linearSolverType = SuccessiveLinearizationParams::CONJUGATE_GRADIENT;
|
* parameters.linearSolverType = NonlinearSolverParams::CONJUGATE_GRADIENT;
|
||||||
* parameters.iterativeParams = boost::make_shared<SubgraphSolverParameters>();
|
* parameters.iterativeParams = boost::make_shared<SubgraphSolverParameters>();
|
||||||
* LevenbergMarquardtOptimizer optimizer(graph, initialEstimate, parameters);
|
* LevenbergMarquardtOptimizer optimizer(graph, initialEstimate, parameters);
|
||||||
* Values result = optimizer.optimize();
|
* Values result = optimizer.optimize();
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
|
#include <gtsam/nonlinear/NonlinearOptimizer.h>
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ class DoglegOptimizer;
|
||||||
* common to all nonlinear optimization algorithms. This class also contains
|
* common to all nonlinear optimization algorithms. This class also contains
|
||||||
* all of those parameters.
|
* all of those parameters.
|
||||||
*/
|
*/
|
||||||
class GTSAM_EXPORT DoglegParams : public SuccessiveLinearizationParams {
|
class GTSAM_EXPORT DoglegParams : public NonlinearOptimizerParams {
|
||||||
public:
|
public:
|
||||||
/** See DoglegParams::dlVerbosity */
|
/** See DoglegParams::dlVerbosity */
|
||||||
enum VerbosityDL {
|
enum VerbosityDL {
|
||||||
|
@ -46,7 +46,7 @@ public:
|
||||||
virtual ~DoglegParams() {}
|
virtual ~DoglegParams() {}
|
||||||
|
|
||||||
virtual void print(const std::string& str = "") const {
|
virtual void print(const std::string& str = "") const {
|
||||||
SuccessiveLinearizationParams::print(str);
|
NonlinearOptimizerParams::print(str);
|
||||||
std::cout << " deltaInitial: " << deltaInitial << "\n";
|
std::cout << " deltaInitial: " << deltaInitial << "\n";
|
||||||
std::cout.flush();
|
std::cout.flush();
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,7 @@ void GaussNewtonOptimizer::iterate() {
|
||||||
GaussianFactorGraph::shared_ptr linear = graph_.linearize(current.values);
|
GaussianFactorGraph::shared_ptr linear = graph_.linearize(current.values);
|
||||||
|
|
||||||
// Solve Factor Graph
|
// Solve Factor Graph
|
||||||
const VectorValues delta = solveGaussianFactorGraph(*linear, params_);
|
const VectorValues delta = solve(*linear, current.values, params_);
|
||||||
|
|
||||||
// Maybe show output
|
// Maybe show output
|
||||||
if(params_.verbosity >= NonlinearOptimizerParams::DELTA) delta.print("delta");
|
if(params_.verbosity >= NonlinearOptimizerParams::DELTA) delta.print("delta");
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
|
#include <gtsam/nonlinear/NonlinearOptimizer.h>
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ class GaussNewtonOptimizer;
|
||||||
/** Parameters for Gauss-Newton optimization, inherits from
|
/** Parameters for Gauss-Newton optimization, inherits from
|
||||||
* NonlinearOptimizationParams.
|
* NonlinearOptimizationParams.
|
||||||
*/
|
*/
|
||||||
class GTSAM_EXPORT GaussNewtonParams : public SuccessiveLinearizationParams {
|
class GTSAM_EXPORT GaussNewtonParams : public NonlinearOptimizerParams {
|
||||||
};
|
};
|
||||||
|
|
||||||
class GTSAM_EXPORT GaussNewtonState : public NonlinearOptimizerState {
|
class GTSAM_EXPORT GaussNewtonState : public NonlinearOptimizerState {
|
||||||
|
|
|
@ -16,16 +16,14 @@
|
||||||
* @date Feb 26, 2012
|
* @date Feb 26, 2012
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <cmath>
|
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
|
||||||
|
|
||||||
#include <gtsam/linear/linearExceptions.h>
|
#include <gtsam/linear/linearExceptions.h>
|
||||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||||
#include <gtsam/linear/VectorValues.h>
|
#include <gtsam/linear/VectorValues.h>
|
||||||
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
|
|
||||||
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
|
|
||||||
|
|
||||||
#include <boost/algorithm/string.hpp>
|
#include <boost/algorithm/string.hpp>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
#include <cmath>
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
|
@ -62,7 +60,7 @@ std::string LevenbergMarquardtParams::verbosityLMTranslator(VerbosityLM value) c
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
void LevenbergMarquardtParams::print(const std::string& str) const {
|
void LevenbergMarquardtParams::print(const std::string& str) const {
|
||||||
SuccessiveLinearizationParams::print(str);
|
NonlinearOptimizerParams::print(str);
|
||||||
std::cout << " lambdaInitial: " << lambdaInitial << "\n";
|
std::cout << " lambdaInitial: " << lambdaInitial << "\n";
|
||||||
std::cout << " lambdaFactor: " << lambdaFactor << "\n";
|
std::cout << " lambdaFactor: " << lambdaFactor << "\n";
|
||||||
std::cout << " lambdaUpperBound: " << lambdaUpperBound << "\n";
|
std::cout << " lambdaUpperBound: " << lambdaUpperBound << "\n";
|
||||||
|
@ -110,7 +108,7 @@ void LevenbergMarquardtOptimizer::iterate() {
|
||||||
// Try solving
|
// Try solving
|
||||||
try {
|
try {
|
||||||
// Solve Damped Gaussian Factor Graph
|
// Solve Damped Gaussian Factor Graph
|
||||||
const VectorValues delta = solveGaussianFactorGraph(dampedSystem, params_);
|
const VectorValues delta = solve(dampedSystem, state_.values, params_);
|
||||||
|
|
||||||
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) cout << "linear delta norm = " << delta.norm() << endl;
|
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) cout << "linear delta norm = " << delta.norm() << endl;
|
||||||
if (lmVerbosity >= LevenbergMarquardtParams::TRYDELTA) delta.print("delta");
|
if (lmVerbosity >= LevenbergMarquardtParams::TRYDELTA) delta.print("delta");
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
|
#include <gtsam/nonlinear/NonlinearOptimizer.h>
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ class LevenbergMarquardtOptimizer;
|
||||||
* common to all nonlinear optimization algorithms. This class also contains
|
* common to all nonlinear optimization algorithms. This class also contains
|
||||||
* all of those parameters.
|
* all of those parameters.
|
||||||
*/
|
*/
|
||||||
class GTSAM_EXPORT LevenbergMarquardtParams : public SuccessiveLinearizationParams {
|
class GTSAM_EXPORT LevenbergMarquardtParams : public NonlinearOptimizerParams {
|
||||||
|
|
||||||
public:
|
public:
|
||||||
/** See LevenbergMarquardtParams::lmVerbosity */
|
/** See LevenbergMarquardtParams::lmVerbosity */
|
||||||
|
|
|
@ -16,52 +16,27 @@
|
||||||
* @date Jul 17, 2010
|
* @date Jul 17, 2010
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include <gtsam/nonlinear/NonlinearOptimizer.h>
|
||||||
|
|
||||||
|
#include <gtsam/linear/GaussianEliminationTree.h>
|
||||||
|
#include <gtsam/linear/VectorValues.h>
|
||||||
|
#include <gtsam/linear/SubgraphSolver.h>
|
||||||
|
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||||
|
#include <gtsam/linear/VectorValues.h>
|
||||||
|
|
||||||
|
#include <gtsam/inference/Ordering.h>
|
||||||
|
|
||||||
|
#include <boost/algorithm/string.hpp>
|
||||||
|
#include <boost/shared_ptr.hpp>
|
||||||
|
|
||||||
|
#include <stdexcept>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <iomanip>
|
#include <iomanip>
|
||||||
#include <gtsam/nonlinear/NonlinearOptimizer.h>
|
|
||||||
#include <boost/algorithm/string.hpp>
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
NonlinearOptimizerParams::Verbosity NonlinearOptimizerParams::verbosityTranslator(const std::string &src) const {
|
|
||||||
std::string s = src; boost::algorithm::to_upper(s);
|
|
||||||
if (s == "SILENT") return NonlinearOptimizerParams::SILENT;
|
|
||||||
if (s == "ERROR") return NonlinearOptimizerParams::ERROR;
|
|
||||||
if (s == "VALUES") return NonlinearOptimizerParams::VALUES;
|
|
||||||
if (s == "DELTA") return NonlinearOptimizerParams::DELTA;
|
|
||||||
if (s == "LINEAR") return NonlinearOptimizerParams::LINEAR;
|
|
||||||
|
|
||||||
/* default is silent */
|
|
||||||
return NonlinearOptimizerParams::SILENT;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
std::string NonlinearOptimizerParams::verbosityTranslator(Verbosity value) const {
|
|
||||||
std::string s;
|
|
||||||
switch (value) {
|
|
||||||
case NonlinearOptimizerParams::SILENT: s = "SILENT"; break;
|
|
||||||
case NonlinearOptimizerParams::ERROR: s = "ERROR"; break;
|
|
||||||
case NonlinearOptimizerParams::VALUES: s = "VALUES"; break;
|
|
||||||
case NonlinearOptimizerParams::DELTA: s = "DELTA"; break;
|
|
||||||
case NonlinearOptimizerParams::LINEAR: s = "LINEAR"; break;
|
|
||||||
default: s = "UNDEFINED"; break;
|
|
||||||
}
|
|
||||||
return s;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
void NonlinearOptimizerParams::print(const std::string& str) const {
|
|
||||||
std::cout << str << "\n";
|
|
||||||
std::cout << "relative decrease threshold: " << relativeErrorTol << "\n";
|
|
||||||
std::cout << "absolute decrease threshold: " << absoluteErrorTol << "\n";
|
|
||||||
std::cout << " total error threshold: " << errorTol << "\n";
|
|
||||||
std::cout << " maximum iterations: " << maxIterations << "\n";
|
|
||||||
std::cout << " verbosity: " << verbosityTranslator(verbosity) << "\n";
|
|
||||||
std::cout.flush();
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
void NonlinearOptimizer::defaultOptimize() {
|
void NonlinearOptimizer::defaultOptimize() {
|
||||||
|
|
||||||
|
@ -114,6 +89,36 @@ const Values& NonlinearOptimizer::optimizeSafely() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
VectorValues NonlinearOptimizer::solve(const GaussianFactorGraph &gfg,
|
||||||
|
const Values& initial, const NonlinearOptimizerParams& params) const {
|
||||||
|
VectorValues delta;
|
||||||
|
if (params.isMultifrontal()) {
|
||||||
|
delta = gfg.optimize(*params.ordering, params.getEliminationFunction());
|
||||||
|
} else if (params.isSequential()) {
|
||||||
|
delta = gfg.eliminateSequential(*params.ordering,
|
||||||
|
params.getEliminationFunction())->optimize();
|
||||||
|
} else if (params.isCG()) {
|
||||||
|
if (!params.iterativeParams)
|
||||||
|
throw std::runtime_error(
|
||||||
|
"NonlinearOptimizer::solve: cg parameter has to be assigned ...");
|
||||||
|
if (boost::dynamic_pointer_cast<SubgraphSolverParameters>(
|
||||||
|
params.iterativeParams)) {
|
||||||
|
SubgraphSolver solver(gfg,
|
||||||
|
dynamic_cast<const SubgraphSolverParameters&>(*params.iterativeParams),
|
||||||
|
*params.ordering);
|
||||||
|
delta = solver.optimize();
|
||||||
|
} else {
|
||||||
|
throw std::runtime_error(
|
||||||
|
"NonlinearOptimizer::solve: special cg parameter type is not handled in LM solver ...");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw std::runtime_error(
|
||||||
|
"NonlinearOptimizer::solve: Optimization parameter is invalid");
|
||||||
|
}
|
||||||
|
return delta;
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold,
|
bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold,
|
||||||
double errorThreshold, double currentError, double newError,
|
double errorThreshold, double currentError, double newError,
|
||||||
|
@ -155,6 +160,7 @@ bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold
|
||||||
}
|
}
|
||||||
return converged;
|
return converged;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -19,55 +19,12 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
||||||
|
#include <gtsam/nonlinear/NonlinearOptimizerParams.h>
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
class NonlinearOptimizer;
|
class NonlinearOptimizer;
|
||||||
|
|
||||||
/** The common parameters for Nonlinear optimizers. Most optimizers
|
|
||||||
* deriving from NonlinearOptimizer also subclass the parameters.
|
|
||||||
*/
|
|
||||||
class GTSAM_EXPORT NonlinearOptimizerParams {
|
|
||||||
public:
|
|
||||||
/** See NonlinearOptimizerParams::verbosity */
|
|
||||||
enum Verbosity {
|
|
||||||
SILENT,
|
|
||||||
ERROR,
|
|
||||||
VALUES,
|
|
||||||
DELTA,
|
|
||||||
LINEAR
|
|
||||||
};
|
|
||||||
|
|
||||||
size_t maxIterations; ///< The maximum iterations to stop iterating (default 100)
|
|
||||||
double relativeErrorTol; ///< The maximum relative error decrease to stop iterating (default 1e-5)
|
|
||||||
double absoluteErrorTol; ///< The maximum absolute error decrease to stop iterating (default 1e-5)
|
|
||||||
double errorTol; ///< The maximum total error to stop iterating (default 0.0)
|
|
||||||
Verbosity verbosity; ///< The printing verbosity during optimization (default SILENT)
|
|
||||||
|
|
||||||
NonlinearOptimizerParams() :
|
|
||||||
maxIterations(100), relativeErrorTol(1e-5), absoluteErrorTol(1e-5),
|
|
||||||
errorTol(0.0), verbosity(SILENT) {}
|
|
||||||
|
|
||||||
virtual ~NonlinearOptimizerParams() {}
|
|
||||||
virtual void print(const std::string& str = "") const ;
|
|
||||||
|
|
||||||
size_t getMaxIterations() const { return maxIterations; }
|
|
||||||
double getRelativeErrorTol() const { return relativeErrorTol; }
|
|
||||||
double getAbsoluteErrorTol() const { return absoluteErrorTol; }
|
|
||||||
double getErrorTol() const { return errorTol; }
|
|
||||||
std::string getVerbosity() const { return verbosityTranslator(verbosity); }
|
|
||||||
|
|
||||||
void setMaxIterations(size_t value) { maxIterations = value; }
|
|
||||||
void setRelativeErrorTol(double value) { relativeErrorTol = value; }
|
|
||||||
void setAbsoluteErrorTol(double value) { absoluteErrorTol = value; }
|
|
||||||
void setErrorTol(double value) { errorTol = value ; }
|
|
||||||
void setVerbosity(const std::string &src) { verbosity = verbosityTranslator(src); }
|
|
||||||
|
|
||||||
private:
|
|
||||||
Verbosity verbosityTranslator(const std::string &s) const;
|
|
||||||
std::string verbosityTranslator(Verbosity value) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base class for a nonlinear optimization state, including the current estimate
|
* Base class for a nonlinear optimization state, including the current estimate
|
||||||
* of the variable values, error, and number of iterations. Optimizers derived
|
* of the variable values, error, and number of iterations. Optimizers derived
|
||||||
|
@ -222,6 +179,10 @@ public:
|
||||||
/** Virtual destructor */
|
/** Virtual destructor */
|
||||||
virtual ~NonlinearOptimizer() {}
|
virtual ~NonlinearOptimizer() {}
|
||||||
|
|
||||||
|
/** Default function to do linear solve, i.e. optimize a GaussianFactorGraph */
|
||||||
|
virtual VectorValues solve(const GaussianFactorGraph &gfg,
|
||||||
|
const Values& initial, const NonlinearOptimizerParams& params) const;
|
||||||
|
|
||||||
/** Perform a single iteration, returning a new NonlinearOptimizer class
|
/** Perform a single iteration, returning a new NonlinearOptimizer class
|
||||||
* containing the updated variable assignments, which may be retrieved with
|
* containing the updated variable assignments, which may be retrieved with
|
||||||
* values().
|
* values().
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
// moved to NonlinearOptimizerParams
|
|
@ -0,0 +1 @@
|
||||||
|
// moved to NonlinearOptimizerParams
|
|
@ -0,0 +1,155 @@
|
||||||
|
/**
|
||||||
|
* @file NonlinearOptimizerParams.cpp
|
||||||
|
* @brief Parameters for nonlinear optimization
|
||||||
|
* @date Jul 24, 2012
|
||||||
|
* @author Yong-Dian Jian
|
||||||
|
* @author Richard Roberts
|
||||||
|
* @author Frank Dellaert
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include <gtsam/nonlinear/NonlinearOptimizerParams.h>
|
||||||
|
#include <boost/algorithm/string.hpp>
|
||||||
|
|
||||||
|
namespace gtsam {
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
NonlinearOptimizerParams::Verbosity NonlinearOptimizerParams::verbosityTranslator(
|
||||||
|
const std::string &src) const {
|
||||||
|
std::string s = src;
|
||||||
|
boost::algorithm::to_upper(s);
|
||||||
|
if (s == "SILENT")
|
||||||
|
return NonlinearOptimizerParams::SILENT;
|
||||||
|
if (s == "ERROR")
|
||||||
|
return NonlinearOptimizerParams::ERROR;
|
||||||
|
if (s == "VALUES")
|
||||||
|
return NonlinearOptimizerParams::VALUES;
|
||||||
|
if (s == "DELTA")
|
||||||
|
return NonlinearOptimizerParams::DELTA;
|
||||||
|
if (s == "LINEAR")
|
||||||
|
return NonlinearOptimizerParams::LINEAR;
|
||||||
|
|
||||||
|
/* default is silent */
|
||||||
|
return NonlinearOptimizerParams::SILENT;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
std::string NonlinearOptimizerParams::verbosityTranslator(
|
||||||
|
Verbosity value) const {
|
||||||
|
std::string s;
|
||||||
|
switch (value) {
|
||||||
|
case NonlinearOptimizerParams::SILENT:
|
||||||
|
s = "SILENT";
|
||||||
|
break;
|
||||||
|
case NonlinearOptimizerParams::ERROR:
|
||||||
|
s = "ERROR";
|
||||||
|
break;
|
||||||
|
case NonlinearOptimizerParams::VALUES:
|
||||||
|
s = "VALUES";
|
||||||
|
break;
|
||||||
|
case NonlinearOptimizerParams::DELTA:
|
||||||
|
s = "DELTA";
|
||||||
|
break;
|
||||||
|
case NonlinearOptimizerParams::LINEAR:
|
||||||
|
s = "LINEAR";
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
s = "UNDEFINED";
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
void NonlinearOptimizerParams::setIterativeParams(
|
||||||
|
const SubgraphSolverParameters ¶ms) {
|
||||||
|
iterativeParams = boost::make_shared<SubgraphSolverParameters>(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
void NonlinearOptimizerParams::print(const std::string& str) const {
|
||||||
|
|
||||||
|
//NonlinearOptimizerParams::print(str);
|
||||||
|
std::cout << str << "\n";
|
||||||
|
std::cout << "relative decrease threshold: " << relativeErrorTol << "\n";
|
||||||
|
std::cout << "absolute decrease threshold: " << absoluteErrorTol << "\n";
|
||||||
|
std::cout << " total error threshold: " << errorTol << "\n";
|
||||||
|
std::cout << " maximum iterations: " << maxIterations << "\n";
|
||||||
|
std::cout << " verbosity: " << verbosityTranslator(verbosity)
|
||||||
|
<< "\n";
|
||||||
|
std::cout.flush();
|
||||||
|
|
||||||
|
switch (linearSolverType) {
|
||||||
|
case MULTIFRONTAL_CHOLESKY:
|
||||||
|
std::cout << " linear solver type: MULTIFRONTAL CHOLESKY\n";
|
||||||
|
break;
|
||||||
|
case MULTIFRONTAL_QR:
|
||||||
|
std::cout << " linear solver type: MULTIFRONTAL QR\n";
|
||||||
|
break;
|
||||||
|
case SEQUENTIAL_CHOLESKY:
|
||||||
|
std::cout << " linear solver type: SEQUENTIAL CHOLESKY\n";
|
||||||
|
break;
|
||||||
|
case SEQUENTIAL_QR:
|
||||||
|
std::cout << " linear solver type: SEQUENTIAL QR\n";
|
||||||
|
break;
|
||||||
|
case CHOLMOD:
|
||||||
|
std::cout << " linear solver type: CHOLMOD\n";
|
||||||
|
break;
|
||||||
|
case CONJUGATE_GRADIENT:
|
||||||
|
std::cout << " linear solver type: CONJUGATE GRADIENT\n";
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
std::cout << " linear solver type: (invalid)\n";
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ordering)
|
||||||
|
std::cout << " ordering: custom\n";
|
||||||
|
else
|
||||||
|
std::cout << " ordering: COLAMD\n";
|
||||||
|
|
||||||
|
std::cout.flush();
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
std::string NonlinearOptimizerParams::linearSolverTranslator(
|
||||||
|
LinearSolverType linearSolverType) const {
|
||||||
|
switch (linearSolverType) {
|
||||||
|
case MULTIFRONTAL_CHOLESKY:
|
||||||
|
return "MULTIFRONTAL_CHOLESKY";
|
||||||
|
case MULTIFRONTAL_QR:
|
||||||
|
return "MULTIFRONTAL_QR";
|
||||||
|
case SEQUENTIAL_CHOLESKY:
|
||||||
|
return "SEQUENTIAL_CHOLESKY";
|
||||||
|
case SEQUENTIAL_QR:
|
||||||
|
return "SEQUENTIAL_QR";
|
||||||
|
case CONJUGATE_GRADIENT:
|
||||||
|
return "CONJUGATE_GRADIENT";
|
||||||
|
case CHOLMOD:
|
||||||
|
return "CHOLMOD";
|
||||||
|
default:
|
||||||
|
throw std::invalid_argument(
|
||||||
|
"Unknown linear solver type in SuccessiveLinearizationOptimizer");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
NonlinearOptimizerParams::LinearSolverType NonlinearOptimizerParams::linearSolverTranslator(
|
||||||
|
const std::string& linearSolverType) const {
|
||||||
|
if (linearSolverType == "MULTIFRONTAL_CHOLESKY")
|
||||||
|
return MULTIFRONTAL_CHOLESKY;
|
||||||
|
if (linearSolverType == "MULTIFRONTAL_QR")
|
||||||
|
return MULTIFRONTAL_QR;
|
||||||
|
if (linearSolverType == "SEQUENTIAL_CHOLESKY")
|
||||||
|
return SEQUENTIAL_CHOLESKY;
|
||||||
|
if (linearSolverType == "SEQUENTIAL_QR")
|
||||||
|
return SEQUENTIAL_QR;
|
||||||
|
if (linearSolverType == "CONJUGATE_GRADIENT")
|
||||||
|
return CONJUGATE_GRADIENT;
|
||||||
|
if (linearSolverType == "CHOLMOD")
|
||||||
|
return CHOLMOD;
|
||||||
|
throw std::invalid_argument(
|
||||||
|
"Unknown linear solver type in SuccessiveLinearizationOptimizer");
|
||||||
|
}
|
||||||
|
/* ************************************************************************* */
|
||||||
|
|
||||||
|
} // namespace
|
|
@ -0,0 +1,165 @@
|
||||||
|
/* ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||||
|
* Atlanta, Georgia 30332-0415
|
||||||
|
* All Rights Reserved
|
||||||
|
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||||
|
|
||||||
|
* See LICENSE for the license information
|
||||||
|
|
||||||
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @file NonlinearOptimizerParams.h
|
||||||
|
* @brief Parameters for nonlinear optimization
|
||||||
|
* @author Yong-Dian Jian
|
||||||
|
* @author Richard Roberts
|
||||||
|
* @author Frank Dellaert
|
||||||
|
* @date Apr 1, 2012
|
||||||
|
*/
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||||
|
#include <gtsam/linear/SubgraphSolver.h>
|
||||||
|
#include <boost/optional.hpp>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
namespace gtsam {
|
||||||
|
|
||||||
|
/** The common parameters for Nonlinear optimizers. Most optimizers
|
||||||
|
* deriving from NonlinearOptimizer also subclass the parameters.
|
||||||
|
*/
|
||||||
|
class GTSAM_EXPORT NonlinearOptimizerParams {
|
||||||
|
public:
|
||||||
|
/** See NonlinearOptimizerParams::verbosity */
|
||||||
|
enum Verbosity {
|
||||||
|
SILENT, ERROR, VALUES, DELTA, LINEAR
|
||||||
|
};
|
||||||
|
|
||||||
|
size_t maxIterations; ///< The maximum iterations to stop iterating (default 100)
|
||||||
|
double relativeErrorTol; ///< The maximum relative error decrease to stop iterating (default 1e-5)
|
||||||
|
double absoluteErrorTol; ///< The maximum absolute error decrease to stop iterating (default 1e-5)
|
||||||
|
double errorTol; ///< The maximum total error to stop iterating (default 0.0)
|
||||||
|
Verbosity verbosity; ///< The printing verbosity during optimization (default SILENT)
|
||||||
|
|
||||||
|
NonlinearOptimizerParams() :
|
||||||
|
maxIterations(100), relativeErrorTol(1e-5), absoluteErrorTol(1e-5), errorTol(
|
||||||
|
0.0), verbosity(SILENT), linearSolverType(MULTIFRONTAL_CHOLESKY) {
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual ~NonlinearOptimizerParams() {
|
||||||
|
}
|
||||||
|
virtual void print(const std::string& str = "") const;
|
||||||
|
|
||||||
|
size_t getMaxIterations() const {
|
||||||
|
return maxIterations;
|
||||||
|
}
|
||||||
|
double getRelativeErrorTol() const {
|
||||||
|
return relativeErrorTol;
|
||||||
|
}
|
||||||
|
double getAbsoluteErrorTol() const {
|
||||||
|
return absoluteErrorTol;
|
||||||
|
}
|
||||||
|
double getErrorTol() const {
|
||||||
|
return errorTol;
|
||||||
|
}
|
||||||
|
std::string getVerbosity() const {
|
||||||
|
return verbosityTranslator(verbosity);
|
||||||
|
}
|
||||||
|
|
||||||
|
void setMaxIterations(size_t value) {
|
||||||
|
maxIterations = value;
|
||||||
|
}
|
||||||
|
void setRelativeErrorTol(double value) {
|
||||||
|
relativeErrorTol = value;
|
||||||
|
}
|
||||||
|
void setAbsoluteErrorTol(double value) {
|
||||||
|
absoluteErrorTol = value;
|
||||||
|
}
|
||||||
|
void setErrorTol(double value) {
|
||||||
|
errorTol = value;
|
||||||
|
}
|
||||||
|
void setVerbosity(const std::string &src) {
|
||||||
|
verbosity = verbosityTranslator(src);
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
Verbosity verbosityTranslator(const std::string &s) const;
|
||||||
|
std::string verbosityTranslator(Verbosity value) const;
|
||||||
|
|
||||||
|
// Successive Linearization Parameters
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
/** See NonlinearSolverParams::linearSolverType */
|
||||||
|
|
||||||
|
enum LinearSolverType {
|
||||||
|
MULTIFRONTAL_CHOLESKY,
|
||||||
|
MULTIFRONTAL_QR,
|
||||||
|
SEQUENTIAL_CHOLESKY,
|
||||||
|
SEQUENTIAL_QR,
|
||||||
|
CONJUGATE_GRADIENT, /* Experimental Flag */
|
||||||
|
CHOLMOD, /* Experimental Flag */
|
||||||
|
};
|
||||||
|
|
||||||
|
LinearSolverType linearSolverType; ///< The type of linear solver to use in the nonlinear optimizer
|
||||||
|
boost::optional<Ordering> ordering; ///< The variable elimination ordering, or empty to use COLAMD (default: empty)
|
||||||
|
IterativeOptimizationParameters::shared_ptr iterativeParams; ///< The container for iterativeOptimization parameters. used in CG Solvers.
|
||||||
|
|
||||||
|
inline bool isMultifrontal() const {
|
||||||
|
return (linearSolverType == MULTIFRONTAL_CHOLESKY)
|
||||||
|
|| (linearSolverType == MULTIFRONTAL_QR);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline bool isSequential() const {
|
||||||
|
return (linearSolverType == SEQUENTIAL_CHOLESKY)
|
||||||
|
|| (linearSolverType == SEQUENTIAL_QR);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline bool isCholmod() const {
|
||||||
|
return (linearSolverType == CHOLMOD);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline bool isCG() const {
|
||||||
|
return (linearSolverType == CONJUGATE_GRADIENT);
|
||||||
|
}
|
||||||
|
|
||||||
|
GaussianFactorGraph::Eliminate getEliminationFunction() const {
|
||||||
|
switch (linearSolverType) {
|
||||||
|
case MULTIFRONTAL_CHOLESKY:
|
||||||
|
case SEQUENTIAL_CHOLESKY:
|
||||||
|
return EliminatePreferCholesky;
|
||||||
|
|
||||||
|
case MULTIFRONTAL_QR:
|
||||||
|
case SEQUENTIAL_QR:
|
||||||
|
return EliminateQR;
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw std::runtime_error(
|
||||||
|
"Nonlinear optimization parameter \"factorization\" is invalid");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string getLinearSolverType() const {
|
||||||
|
return linearSolverTranslator(linearSolverType);
|
||||||
|
}
|
||||||
|
|
||||||
|
void setLinearSolverType(const std::string& solver) {
|
||||||
|
linearSolverType = linearSolverTranslator(solver);
|
||||||
|
}
|
||||||
|
void setIterativeParams(const SubgraphSolverParameters& params);
|
||||||
|
void setOrdering(const Ordering& ordering) {
|
||||||
|
this->ordering = ordering;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
std::string linearSolverTranslator(LinearSolverType linearSolverType) const;
|
||||||
|
LinearSolverType linearSolverTranslator(
|
||||||
|
const std::string& linearSolverType) const;
|
||||||
|
};
|
||||||
|
|
||||||
|
// For backward compatibility:
|
||||||
|
typedef NonlinearOptimizerParams SuccessiveLinearizationParams;
|
||||||
|
|
||||||
|
} /* namespace gtsam */
|
|
@ -1,79 +0,0 @@
|
||||||
/**
|
|
||||||
* @file SuccessiveLinearizationOptimizer.cpp
|
|
||||||
* @brief
|
|
||||||
* @date Jul 24, 2012
|
|
||||||
* @author Yong-Dian Jian
|
|
||||||
*/
|
|
||||||
|
|
||||||
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
|
|
||||||
#include <gtsam/linear/GaussianEliminationTree.h>
|
|
||||||
#include <gtsam/linear/VectorValues.h>
|
|
||||||
#include <boost/shared_ptr.hpp>
|
|
||||||
#include <stdexcept>
|
|
||||||
|
|
||||||
namespace gtsam {
|
|
||||||
|
|
||||||
void SuccessiveLinearizationParams::setIterativeParams(const SubgraphSolverParameters ¶ms) {
|
|
||||||
iterativeParams = boost::make_shared<SubgraphSolverParameters>(params);
|
|
||||||
}
|
|
||||||
|
|
||||||
void SuccessiveLinearizationParams::print(const std::string& str) const {
|
|
||||||
NonlinearOptimizerParams::print(str);
|
|
||||||
switch ( linearSolverType ) {
|
|
||||||
case MULTIFRONTAL_CHOLESKY:
|
|
||||||
std::cout << " linear solver type: MULTIFRONTAL CHOLESKY\n";
|
|
||||||
break;
|
|
||||||
case MULTIFRONTAL_QR:
|
|
||||||
std::cout << " linear solver type: MULTIFRONTAL QR\n";
|
|
||||||
break;
|
|
||||||
case SEQUENTIAL_CHOLESKY:
|
|
||||||
std::cout << " linear solver type: SEQUENTIAL CHOLESKY\n";
|
|
||||||
break;
|
|
||||||
case SEQUENTIAL_QR:
|
|
||||||
std::cout << " linear solver type: SEQUENTIAL QR\n";
|
|
||||||
break;
|
|
||||||
case CHOLMOD:
|
|
||||||
std::cout << " linear solver type: CHOLMOD\n";
|
|
||||||
break;
|
|
||||||
case CONJUGATE_GRADIENT:
|
|
||||||
std::cout << " linear solver type: CONJUGATE GRADIENT\n";
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
std::cout << " linear solver type: (invalid)\n";
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if(ordering)
|
|
||||||
std::cout << " ordering: custom\n";
|
|
||||||
else
|
|
||||||
std::cout << " ordering: COLAMD\n";
|
|
||||||
|
|
||||||
std::cout.flush();
|
|
||||||
}
|
|
||||||
|
|
||||||
VectorValues solveGaussianFactorGraph(const GaussianFactorGraph &gfg, const SuccessiveLinearizationParams ¶ms)
|
|
||||||
{
|
|
||||||
gttic(solveGaussianFactorGraph);
|
|
||||||
VectorValues delta;
|
|
||||||
if (params.isMultifrontal()) {
|
|
||||||
delta = gfg.optimize(*params.ordering, params.getEliminationFunction());
|
|
||||||
} else if(params.isSequential()) {
|
|
||||||
delta = gfg.eliminateSequential(*params.ordering, params.getEliminationFunction())->optimize();
|
|
||||||
}
|
|
||||||
else if ( params.isCG() ) {
|
|
||||||
if ( !params.iterativeParams ) throw std::runtime_error("solveGaussianFactorGraph: cg parameter has to be assigned ...");
|
|
||||||
if ( boost::dynamic_pointer_cast<SubgraphSolverParameters>(params.iterativeParams) ) {
|
|
||||||
SubgraphSolver solver(gfg, dynamic_cast<const SubgraphSolverParameters&>(*params.iterativeParams), *params.ordering);
|
|
||||||
delta = solver.optimize();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw std::runtime_error("solveGaussianFactorGraph: special cg parameter type is not handled in LM solver ...");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw std::runtime_error("solveGaussianFactorGraph: Optimization parameter is invalid");
|
|
||||||
}
|
|
||||||
return delta;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,108 +0,0 @@
|
||||||
/* ----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
|
||||||
* Atlanta, Georgia 30332-0415
|
|
||||||
* All Rights Reserved
|
|
||||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
|
||||||
|
|
||||||
* See LICENSE for the license information
|
|
||||||
|
|
||||||
* -------------------------------------------------------------------------- */
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @file SuccessiveLinearizationOptimizer.h
|
|
||||||
* @brief
|
|
||||||
* @author Richard Roberts
|
|
||||||
* @date Apr 1, 2012
|
|
||||||
*/
|
|
||||||
|
|
||||||
#pragma once
|
|
||||||
|
|
||||||
#include <gtsam/nonlinear/NonlinearOptimizer.h>
|
|
||||||
#include <gtsam/linear/SubgraphSolver.h>
|
|
||||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
|
||||||
#include <gtsam/inference/Ordering.h>
|
|
||||||
|
|
||||||
namespace gtsam {
|
|
||||||
|
|
||||||
// Forward declarations
|
|
||||||
|
|
||||||
class GTSAM_EXPORT SuccessiveLinearizationParams : public NonlinearOptimizerParams {
|
|
||||||
public:
|
|
||||||
/** See SuccessiveLinearizationParams::linearSolverType */
|
|
||||||
enum LinearSolverType {
|
|
||||||
MULTIFRONTAL_CHOLESKY,
|
|
||||||
MULTIFRONTAL_QR,
|
|
||||||
SEQUENTIAL_CHOLESKY,
|
|
||||||
SEQUENTIAL_QR,
|
|
||||||
CONJUGATE_GRADIENT, /* Experimental Flag */
|
|
||||||
CHOLMOD, /* Experimental Flag */
|
|
||||||
};
|
|
||||||
|
|
||||||
LinearSolverType linearSolverType; ///< The type of linear solver to use in the nonlinear optimizer
|
|
||||||
boost::optional<Ordering> ordering; ///< The variable elimination ordering, or empty to use COLAMD (default: empty)
|
|
||||||
IterativeOptimizationParameters::shared_ptr iterativeParams; ///< The container for iterativeOptimization parameters. used in CG Solvers.
|
|
||||||
|
|
||||||
SuccessiveLinearizationParams() : linearSolverType(MULTIFRONTAL_CHOLESKY) {}
|
|
||||||
virtual ~SuccessiveLinearizationParams() {}
|
|
||||||
|
|
||||||
inline bool isMultifrontal() const {
|
|
||||||
return (linearSolverType == MULTIFRONTAL_CHOLESKY) || (linearSolverType == MULTIFRONTAL_QR); }
|
|
||||||
|
|
||||||
inline bool isSequential() const {
|
|
||||||
return (linearSolverType == SEQUENTIAL_CHOLESKY) || (linearSolverType == SEQUENTIAL_QR); }
|
|
||||||
|
|
||||||
inline bool isCholmod() const { return (linearSolverType == CHOLMOD); }
|
|
||||||
|
|
||||||
inline bool isCG() const { return (linearSolverType == CONJUGATE_GRADIENT); }
|
|
||||||
|
|
||||||
virtual void print(const std::string& str) const;
|
|
||||||
|
|
||||||
GaussianFactorGraph::Eliminate getEliminationFunction() const {
|
|
||||||
switch (linearSolverType) {
|
|
||||||
case MULTIFRONTAL_CHOLESKY:
|
|
||||||
case SEQUENTIAL_CHOLESKY:
|
|
||||||
return EliminatePreferCholesky;
|
|
||||||
|
|
||||||
case MULTIFRONTAL_QR:
|
|
||||||
case SEQUENTIAL_QR:
|
|
||||||
return EliminateQR;
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw std::runtime_error("Nonlinear optimization parameter \"factorization\" is invalid");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string getLinearSolverType() const { return linearSolverTranslator(linearSolverType); }
|
|
||||||
|
|
||||||
void setLinearSolverType(const std::string& solver) { linearSolverType = linearSolverTranslator(solver); }
|
|
||||||
void setIterativeParams(const SubgraphSolverParameters& params);
|
|
||||||
void setOrdering(const Ordering& ordering) { this->ordering = ordering; }
|
|
||||||
|
|
||||||
private:
|
|
||||||
std::string linearSolverTranslator(LinearSolverType linearSolverType) const {
|
|
||||||
switch(linearSolverType) {
|
|
||||||
case MULTIFRONTAL_CHOLESKY: return "MULTIFRONTAL_CHOLESKY";
|
|
||||||
case MULTIFRONTAL_QR: return "MULTIFRONTAL_QR";
|
|
||||||
case SEQUENTIAL_CHOLESKY: return "SEQUENTIAL_CHOLESKY";
|
|
||||||
case SEQUENTIAL_QR: return "SEQUENTIAL_QR";
|
|
||||||
case CONJUGATE_GRADIENT: return "CONJUGATE_GRADIENT";
|
|
||||||
case CHOLMOD: return "CHOLMOD";
|
|
||||||
default: throw std::invalid_argument("Unknown linear solver type in SuccessiveLinearizationOptimizer");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
LinearSolverType linearSolverTranslator(const std::string& linearSolverType) const {
|
|
||||||
if(linearSolverType == "MULTIFRONTAL_CHOLESKY") return MULTIFRONTAL_CHOLESKY;
|
|
||||||
if(linearSolverType == "MULTIFRONTAL_QR") return MULTIFRONTAL_QR;
|
|
||||||
if(linearSolverType == "SEQUENTIAL_CHOLESKY") return SEQUENTIAL_CHOLESKY;
|
|
||||||
if(linearSolverType == "SEQUENTIAL_QR") return SEQUENTIAL_QR;
|
|
||||||
if(linearSolverType == "CONJUGATE_GRADIENT") return CONJUGATE_GRADIENT;
|
|
||||||
if(linearSolverType == "CHOLMOD") return CHOLMOD;
|
|
||||||
throw std::invalid_argument("Unknown linear solver type in SuccessiveLinearizationOptimizer");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/* a wrapper for solving a GaussianFactorGraph according to the parameters */
|
|
||||||
GTSAM_EXPORT VectorValues solveGaussianFactorGraph(const GaussianFactorGraph &gfg, const SuccessiveLinearizationParams ¶ms);
|
|
||||||
|
|
||||||
} /* namespace gtsam */
|
|
|
@ -133,7 +133,7 @@ Point3 triangulateDLT(const std::vector<Pose3>& poses,
|
||||||
params.absoluteErrorTol = 1.0;
|
params.absoluteErrorTol = 1.0;
|
||||||
params.verbosityLM = LevenbergMarquardtParams::SILENT;
|
params.verbosityLM = LevenbergMarquardtParams::SILENT;
|
||||||
params.verbosity = NonlinearOptimizerParams::SILENT;
|
params.verbosity = NonlinearOptimizerParams::SILENT;
|
||||||
params.linearSolverType = SuccessiveLinearizationParams::MULTIFRONTAL_CHOLESKY;
|
params.linearSolverType = NonlinearOptimizerParams::MULTIFRONTAL_CHOLESKY;
|
||||||
LevenbergMarquardtOptimizer optimizer(graph, values, params);
|
LevenbergMarquardtOptimizer optimizer(graph, values, params);
|
||||||
Values result = optimizer.optimize();
|
Values result = optimizer.optimize();
|
||||||
|
|
||||||
|
|
|
@ -244,7 +244,6 @@ ConcurrentBatchSmoother::Result ConcurrentBatchSmoother::optimize() {
|
||||||
result.linearVariables = separatorValues_.size();
|
result.linearVariables = separatorValues_.size();
|
||||||
|
|
||||||
// Pull out parameters we'll use
|
// Pull out parameters we'll use
|
||||||
const NonlinearOptimizerParams::Verbosity nloVerbosity = parameters_.verbosity;
|
|
||||||
const LevenbergMarquardtParams::VerbosityLM lmVerbosity = parameters_.verbosityLM;
|
const LevenbergMarquardtParams::VerbosityLM lmVerbosity = parameters_.verbosityLM;
|
||||||
double lambda = parameters_.lambdaInitial;
|
double lambda = parameters_.lambdaInitial;
|
||||||
|
|
||||||
|
|
|
@ -58,7 +58,7 @@ Values BatchOptimize(const NonlinearFactorGraph& graph, const Values& theta, int
|
||||||
// parameters.lambdaInitial = 1;
|
// parameters.lambdaInitial = 1;
|
||||||
// parameters.verbosity = NonlinearOptimizerParams::ERROR;
|
// parameters.verbosity = NonlinearOptimizerParams::ERROR;
|
||||||
// parameters.verbosityLM = ISAM2Params::DAMPED;
|
// parameters.verbosityLM = ISAM2Params::DAMPED;
|
||||||
// parameters.linearSolverType = SuccessiveLinearizationParams::MULTIFRONTAL_QR;
|
// parameters.linearSolverType = NonlinearSolverParams::MULTIFRONTAL_QR;
|
||||||
|
|
||||||
// it is the same as the input graph, but we removed the empty factors that may be present in the input graph
|
// it is the same as the input graph, but we removed the empty factors that may be present in the input graph
|
||||||
NonlinearFactorGraph graphForISAM2;
|
NonlinearFactorGraph graphForISAM2;
|
||||||
|
|
|
@ -58,7 +58,7 @@ Values BatchOptimize(const NonlinearFactorGraph& graph, const Values& theta, int
|
||||||
// parameters.lambdaInitial = 1;
|
// parameters.lambdaInitial = 1;
|
||||||
// parameters.verbosity = NonlinearOptimizerParams::ERROR;
|
// parameters.verbosity = NonlinearOptimizerParams::ERROR;
|
||||||
// parameters.verbosityLM = ISAM2Params::DAMPED;
|
// parameters.verbosityLM = ISAM2Params::DAMPED;
|
||||||
// parameters.linearSolverType = SuccessiveLinearizationParams::MULTIFRONTAL_QR;
|
// parameters.linearSolverType = NonlinearSolverParams::MULTIFRONTAL_QR;
|
||||||
|
|
||||||
ISAM2 optimizer(parameters);
|
ISAM2 optimizer(parameters);
|
||||||
optimizer.update( graph, theta );
|
optimizer.update( graph, theta );
|
||||||
|
|
|
@ -57,7 +57,7 @@ Values BatchOptimize(const NonlinearFactorGraph& graph, const Values& theta, int
|
||||||
// parameters.lambdaInitial = 1;
|
// parameters.lambdaInitial = 1;
|
||||||
// parameters.verbosity = NonlinearOptimizerParams::ERROR;
|
// parameters.verbosity = NonlinearOptimizerParams::ERROR;
|
||||||
// parameters.verbosityLM = ISAM2Params::DAMPED;
|
// parameters.verbosityLM = ISAM2Params::DAMPED;
|
||||||
// parameters.linearSolverType = SuccessiveLinearizationParams::MULTIFRONTAL_QR;
|
// parameters.linearSolverType = NonlinearSolverParams::MULTIFRONTAL_QR;
|
||||||
|
|
||||||
ISAM2 optimizer(parameters);
|
ISAM2 optimizer(parameters);
|
||||||
optimizer.update( graph, theta );
|
optimizer.update( graph, theta );
|
||||||
|
|
|
@ -587,7 +587,7 @@ TEST( GaussianFactorGraph, conditional_sigma_failure) {
|
||||||
// Check that all sigmas in an unconstrained bayes tree are set to one
|
// Check that all sigmas in an unconstrained bayes tree are set to one
|
||||||
BOOST_FOREACH(const GaussianBayesTree::sharedClique& clique, actBT.nodes() | br::map_values) {
|
BOOST_FOREACH(const GaussianBayesTree::sharedClique& clique, actBT.nodes() | br::map_values) {
|
||||||
GaussianConditional::shared_ptr conditional = clique->conditional();
|
GaussianConditional::shared_ptr conditional = clique->conditional();
|
||||||
size_t dim = conditional->rows();
|
//size_t dim = conditional->rows();
|
||||||
//EXPECT(assert_equal(gtsam::ones(dim), conditional->get_model()->sigmas(), tol));
|
//EXPECT(assert_equal(gtsam::ones(dim), conditional->get_model()->sigmas(), tol));
|
||||||
EXPECT(!conditional->get_model());
|
EXPECT(!conditional->get_model());
|
||||||
}
|
}
|
||||||
|
|
|
@ -302,6 +302,52 @@ TEST(NonlinearOptimizer, disconnected_graph) {
|
||||||
EXPECT(assert_equal(expected, LevenbergMarquardtOptimizer(graph, init).optimize()));
|
EXPECT(assert_equal(expected, LevenbergMarquardtOptimizer(graph, init).optimize()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
#include <gtsam/linear/iterative.h>
|
||||||
|
|
||||||
|
class IterativeLM: public LevenbergMarquardtOptimizer {
|
||||||
|
|
||||||
|
/// Solver specific parameters
|
||||||
|
ConjugateGradientParameters cgParams_;
|
||||||
|
|
||||||
|
public:
|
||||||
|
/// Constructor
|
||||||
|
IterativeLM(const NonlinearFactorGraph& graph, const Values& initialValues,
|
||||||
|
const ConjugateGradientParameters &p,
|
||||||
|
const LevenbergMarquardtParams& params = LevenbergMarquardtParams()) :
|
||||||
|
LevenbergMarquardtOptimizer(graph, initialValues, params), cgParams_(p) {
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Solve that uses conjugate gradient
|
||||||
|
virtual VectorValues solve(const GaussianFactorGraph &gfg,
|
||||||
|
const Values& initial, const NonlinearOptimizerParams& params) const {
|
||||||
|
VectorValues zeros = initial.zeroVectors();
|
||||||
|
return conjugateGradientDescent(gfg, zeros, cgParams_);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
TEST(NonlinearOptimizer, subclass_solver) {
|
||||||
|
Values expected;
|
||||||
|
expected.insert(X(1), Pose2(0.,0.,0.));
|
||||||
|
expected.insert(X(2), Pose2(1.5,0.,0.));
|
||||||
|
expected.insert(X(3), Pose2(3.0,0.,0.));
|
||||||
|
|
||||||
|
Values init;
|
||||||
|
init.insert(X(1), Pose2(0.,0.,0.));
|
||||||
|
init.insert(X(2), Pose2(0.,0.,0.));
|
||||||
|
init.insert(X(3), Pose2(0.,0.,0.));
|
||||||
|
|
||||||
|
NonlinearFactorGraph graph;
|
||||||
|
graph += PriorFactor<Pose2>(X(1), Pose2(0.,0.,0.), noiseModel::Isotropic::Sigma(3,1));
|
||||||
|
graph += BetweenFactor<Pose2>(X(1),X(2), Pose2(1.5,0.,0.), noiseModel::Isotropic::Sigma(3,1));
|
||||||
|
graph += PriorFactor<Pose2>(X(3), Pose2(3.,0.,0.), noiseModel::Isotropic::Sigma(3,1));
|
||||||
|
|
||||||
|
ConjugateGradientParameters p;
|
||||||
|
Values actual = IterativeLM(graph, init, p).optimize();
|
||||||
|
EXPECT(assert_equal(expected, actual, 1e-4));
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
int main() {
|
int main() {
|
||||||
TestResult tr;
|
TestResult tr;
|
||||||
|
|
Loading…
Reference in New Issue