remove unused variables in iterative solvers and rename accordingly.

release/4.3a0
Yong-Dian Jian 2014-06-03 23:52:35 -04:00
parent e60f21a22f
commit a0c77dcc1c
8 changed files with 26 additions and 39 deletions

View File

@ -104,7 +104,7 @@ int main(int argc, char** argv) {
LevenbergMarquardtParams parameters; LevenbergMarquardtParams parameters;
parameters.verbosity = NonlinearOptimizerParams::ERROR; parameters.verbosity = NonlinearOptimizerParams::ERROR;
parameters.verbosityLM = LevenbergMarquardtParams::LAMBDA; parameters.verbosityLM = LevenbergMarquardtParams::LAMBDA;
parameters.linearSolverType = NonlinearOptimizerParams::CONJUGATE_GRADIENT; parameters.linearSolverType = NonlinearOptimizerParams::Iterative;
{ {
parameters.iterativeParams = boost::make_shared<SubgraphSolverParameters>(); parameters.iterativeParams = boost::make_shared<SubgraphSolverParameters>();

View File

@ -1481,9 +1481,7 @@ class GaussianISAM {
#include <gtsam/linear/IterativeSolver.h> #include <gtsam/linear/IterativeSolver.h>
virtual class IterativeOptimizationParameters { virtual class IterativeOptimizationParameters {
string getKernel() const ;
string getVerbosity() const; string getVerbosity() const;
void setKernel(string s) ;
void setVerbosity(string s) ; void setVerbosity(string s) ;
void print() const; void print() const;
}; };
@ -1860,7 +1858,7 @@ virtual class NonlinearOptimizerParams {
bool isMultifrontal() const; bool isMultifrontal() const;
bool isSequential() const; bool isSequential() const;
bool isCholmod() const; bool isCholmod() const;
bool isCG() const; bool isIterative() const;
}; };
bool checkConvergence(double relativeErrorTreshold, bool checkConvergence(double relativeErrorTreshold,

View File

@ -11,18 +11,19 @@
namespace gtsam { namespace gtsam {
std::string IterativeOptimizationParameters::getKernel() const { return kernelTranslator(kernel_); } /*****************************************************************************/
std::string IterativeOptimizationParameters::getVerbosity() const { return verbosityTranslator(verbosity_); } std::string IterativeOptimizationParameters::getVerbosity() const { return verbosityTranslator(verbosity_); }
void IterativeOptimizationParameters::setKernel(const std::string &src) { kernel_ = kernelTranslator(src); }
/*****************************************************************************/
void IterativeOptimizationParameters::setVerbosity(const std::string &src) { verbosity_ = verbosityTranslator(src); } void IterativeOptimizationParameters::setVerbosity(const std::string &src) { verbosity_ = verbosityTranslator(src); }
IterativeOptimizationParameters::Kernel IterativeOptimizationParameters::kernelTranslator(const std::string &src) { /*****************************************************************************/
std::string s = src; boost::algorithm::to_upper(s); void IterativeOptimizationParameters::print() const {
if (s == "CG") return IterativeOptimizationParameters::CG; std::cout << "IterativeOptimizationParameters" << std::endl
/* default is cg */ << "verbosity: " << verbosityTranslator(verbosity_) << std::endl;
else return IterativeOptimizationParameters::CG;
} }
/*****************************************************************************/
IterativeOptimizationParameters::Verbosity IterativeOptimizationParameters::verbosityTranslator(const std::string &src) { IterativeOptimizationParameters::Verbosity IterativeOptimizationParameters::verbosityTranslator(const std::string &src) {
std::string s = src; boost::algorithm::to_upper(s); std::string s = src; boost::algorithm::to_upper(s);
if (s == "SILENT") return IterativeOptimizationParameters::SILENT; if (s == "SILENT") return IterativeOptimizationParameters::SILENT;
@ -32,11 +33,7 @@ IterativeOptimizationParameters::Verbosity IterativeOptimizationParameters::verb
else return IterativeOptimizationParameters::SILENT; else return IterativeOptimizationParameters::SILENT;
} }
std::string IterativeOptimizationParameters::kernelTranslator(IterativeOptimizationParameters::Kernel k) { /*****************************************************************************/
if ( k == CG ) return "CG";
else return "UNKNOWN";
}
std::string IterativeOptimizationParameters::verbosityTranslator(IterativeOptimizationParameters::Verbosity verbosity) { std::string IterativeOptimizationParameters::verbosityTranslator(IterativeOptimizationParameters::Verbosity verbosity) {
if (verbosity == SILENT) return "SILENT"; if (verbosity == SILENT) return "SILENT";
else if (verbosity == COMPLEXITY) return "COMPLEXITY"; else if (verbosity == COMPLEXITY) return "COMPLEXITY";

View File

@ -30,21 +30,19 @@ namespace gtsam {
public: public:
typedef boost::shared_ptr<IterativeOptimizationParameters> shared_ptr; typedef boost::shared_ptr<IterativeOptimizationParameters> shared_ptr;
enum Kernel { CG = 0 } kernel_ ; ///< Iterative Method Kernel
enum Verbosity { SILENT = 0, COMPLEXITY, ERROR } verbosity_; enum Verbosity { SILENT = 0, COMPLEXITY, ERROR } verbosity_;
public: public:
IterativeOptimizationParameters(const IterativeOptimizationParameters &p) IterativeOptimizationParameters(const IterativeOptimizationParameters &p)
: kernel_(p.kernel_), verbosity_(p.verbosity_) {} : verbosity_(p.verbosity_) {}
IterativeOptimizationParameters(const Kernel kernel = CG, const Verbosity verbosity = SILENT) IterativeOptimizationParameters(const Verbosity verbosity = SILENT)
: kernel_(kernel), verbosity_(verbosity) {} : verbosity_(verbosity) {}
virtual ~IterativeOptimizationParameters() {} virtual ~IterativeOptimizationParameters() {}
/* general interface */ /* general interface */
inline Kernel kernel() const { return kernel_; }
inline Verbosity verbosity() const { return verbosity_; } inline Verbosity verbosity() const { return verbosity_; }
/* matlab interface */ /* matlab interface */
@ -53,15 +51,9 @@ namespace gtsam {
void setKernel(const std::string &s) ; void setKernel(const std::string &s) ;
void setVerbosity(const std::string &s) ; void setVerbosity(const std::string &s) ;
virtual void print() const { virtual void print() const ;
std::cout << "IterativeOptimizationParameters" << std::endl
<< "kernel: " << kernelTranslator(kernel_) << std::endl
<< "verbosity: " << verbosityTranslator(verbosity_) << std::endl;
}
static Kernel kernelTranslator(const std::string &s);
static Verbosity verbosityTranslator(const std::string &s); static Verbosity verbosityTranslator(const std::string &s);
static std::string kernelTranslator(Kernel k);
static std::string verbosityTranslator(Verbosity v); static std::string verbosityTranslator(Verbosity v);
}; };

View File

@ -76,7 +76,7 @@ void DoglegOptimizer::iterate(void) {
result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION,
dx_u, dx_n, bn, graph_, state_.values, state_.error, dlVerbose); dx_u, dx_n, bn, graph_, state_.values, state_.error, dlVerbose);
} }
else if ( params_.isCG() ) { else if ( params_.isIterative() ) {
throw runtime_error("Dogleg is not currently compatible with the linear conjugate gradient solver"); throw runtime_error("Dogleg is not currently compatible with the linear conjugate gradient solver");
} }
else { else {

View File

@ -102,7 +102,7 @@ VectorValues NonlinearOptimizer::solve(const GaussianFactorGraph &gfg,
} else if (params.isSequential()) { } else if (params.isSequential()) {
delta = gfg.eliminateSequential(*params.ordering, delta = gfg.eliminateSequential(*params.ordering,
params.getEliminationFunction())->optimize(); params.getEliminationFunction())->optimize();
} else if (params.isCG()) { } else if (params.isIterative()) {
if (!params.iterativeParams) if (!params.iterativeParams)
throw std::runtime_error( throw std::runtime_error(
"NonlinearOptimizer::solve: cg parameter has to be assigned ..."); "NonlinearOptimizer::solve: cg parameter has to be assigned ...");

View File

@ -99,8 +99,8 @@ void NonlinearOptimizerParams::print(const std::string& str) const {
case CHOLMOD: case CHOLMOD:
std::cout << " linear solver type: CHOLMOD\n"; std::cout << " linear solver type: CHOLMOD\n";
break; break;
case CONJUGATE_GRADIENT: case Iterative:
std::cout << " linear solver type: CONJUGATE GRADIENT\n"; std::cout << " linear solver type: ITERATIVE\n";
break; break;
default: default:
std::cout << " linear solver type: (invalid)\n"; std::cout << " linear solver type: (invalid)\n";
@ -127,8 +127,8 @@ std::string NonlinearOptimizerParams::linearSolverTranslator(
return "SEQUENTIAL_CHOLESKY"; return "SEQUENTIAL_CHOLESKY";
case SEQUENTIAL_QR: case SEQUENTIAL_QR:
return "SEQUENTIAL_QR"; return "SEQUENTIAL_QR";
case CONJUGATE_GRADIENT: case Iterative:
return "CONJUGATE_GRADIENT"; return "ITERATIVE";
case CHOLMOD: case CHOLMOD:
return "CHOLMOD"; return "CHOLMOD";
default: default:
@ -148,8 +148,8 @@ NonlinearOptimizerParams::LinearSolverType NonlinearOptimizerParams::linearSolve
return SEQUENTIAL_CHOLESKY; return SEQUENTIAL_CHOLESKY;
if (linearSolverType == "SEQUENTIAL_QR") if (linearSolverType == "SEQUENTIAL_QR")
return SEQUENTIAL_QR; return SEQUENTIAL_QR;
if (linearSolverType == "CONJUGATE_GRADIENT") if (linearSolverType == "ITERATIVE")
return CONJUGATE_GRADIENT; return Iterative;
if (linearSolverType == "CHOLMOD") if (linearSolverType == "CHOLMOD")
return CHOLMOD; return CHOLMOD;
throw std::invalid_argument( throw std::invalid_argument(

View File

@ -99,7 +99,7 @@ public:
MULTIFRONTAL_QR, MULTIFRONTAL_QR,
SEQUENTIAL_CHOLESKY, SEQUENTIAL_CHOLESKY,
SEQUENTIAL_QR, SEQUENTIAL_QR,
CONJUGATE_GRADIENT, /* Experimental Flag */ Iterative, /* Experimental Flag */
CHOLMOD, /* Experimental Flag */ CHOLMOD, /* Experimental Flag */
}; };
@ -121,8 +121,8 @@ public:
return (linearSolverType == CHOLMOD); return (linearSolverType == CHOLMOD);
} }
inline bool isCG() const { inline bool isIterative() const {
return (linearSolverType == CONJUGATE_GRADIENT); return (linearSolverType == Iterative);
} }
GaussianFactorGraph::Eliminate getEliminationFunction() const { GaussianFactorGraph::Eliminate getEliminationFunction() const {