remove unused variables in iterative solvers and rename accordingly.

release/4.3a0
Yong-Dian Jian 2014-06-03 23:52:35 -04:00
parent e60f21a22f
commit a0c77dcc1c
8 changed files with 26 additions and 39 deletions

View File

@ -104,7 +104,7 @@ int main(int argc, char** argv) {
LevenbergMarquardtParams parameters;
parameters.verbosity = NonlinearOptimizerParams::ERROR;
parameters.verbosityLM = LevenbergMarquardtParams::LAMBDA;
parameters.linearSolverType = NonlinearOptimizerParams::CONJUGATE_GRADIENT;
parameters.linearSolverType = NonlinearOptimizerParams::Iterative;
{
parameters.iterativeParams = boost::make_shared<SubgraphSolverParameters>();

View File

@ -1481,9 +1481,7 @@ class GaussianISAM {
#include <gtsam/linear/IterativeSolver.h>
virtual class IterativeOptimizationParameters {
string getKernel() const ;
string getVerbosity() const;
void setKernel(string s) ;
void setVerbosity(string s) ;
void print() const;
};
@ -1860,7 +1858,7 @@ virtual class NonlinearOptimizerParams {
bool isMultifrontal() const;
bool isSequential() const;
bool isCholmod() const;
bool isCG() const;
bool isIterative() const;
};
bool checkConvergence(double relativeErrorTreshold,

View File

@ -11,18 +11,19 @@
namespace gtsam {
std::string IterativeOptimizationParameters::getKernel() const { return kernelTranslator(kernel_); }
/*****************************************************************************/
std::string IterativeOptimizationParameters::getVerbosity() const { return verbosityTranslator(verbosity_); }
void IterativeOptimizationParameters::setKernel(const std::string &src) { kernel_ = kernelTranslator(src); }
/*****************************************************************************/
void IterativeOptimizationParameters::setVerbosity(const std::string &src) { verbosity_ = verbosityTranslator(src); }
IterativeOptimizationParameters::Kernel IterativeOptimizationParameters::kernelTranslator(const std::string &src) {
std::string s = src; boost::algorithm::to_upper(s);
if (s == "CG") return IterativeOptimizationParameters::CG;
/* default is cg */
else return IterativeOptimizationParameters::CG;
/*****************************************************************************/
void IterativeOptimizationParameters::print() const {
std::cout << "IterativeOptimizationParameters" << std::endl
<< "verbosity: " << verbosityTranslator(verbosity_) << std::endl;
}
/*****************************************************************************/
IterativeOptimizationParameters::Verbosity IterativeOptimizationParameters::verbosityTranslator(const std::string &src) {
std::string s = src; boost::algorithm::to_upper(s);
if (s == "SILENT") return IterativeOptimizationParameters::SILENT;
@ -32,11 +33,7 @@ IterativeOptimizationParameters::Verbosity IterativeOptimizationParameters::verb
else return IterativeOptimizationParameters::SILENT;
}
std::string IterativeOptimizationParameters::kernelTranslator(IterativeOptimizationParameters::Kernel k) {
if ( k == CG ) return "CG";
else return "UNKNOWN";
}
/*****************************************************************************/
std::string IterativeOptimizationParameters::verbosityTranslator(IterativeOptimizationParameters::Verbosity verbosity) {
if (verbosity == SILENT) return "SILENT";
else if (verbosity == COMPLEXITY) return "COMPLEXITY";

View File

@ -30,21 +30,19 @@ namespace gtsam {
public:
typedef boost::shared_ptr<IterativeOptimizationParameters> shared_ptr;
enum Kernel { CG = 0 } kernel_ ; ///< Iterative Method Kernel
enum Verbosity { SILENT = 0, COMPLEXITY, ERROR } verbosity_;
public:
IterativeOptimizationParameters(const IterativeOptimizationParameters &p)
: kernel_(p.kernel_), verbosity_(p.verbosity_) {}
: verbosity_(p.verbosity_) {}
IterativeOptimizationParameters(const Kernel kernel = CG, const Verbosity verbosity = SILENT)
: kernel_(kernel), verbosity_(verbosity) {}
IterativeOptimizationParameters(const Verbosity verbosity = SILENT)
: verbosity_(verbosity) {}
virtual ~IterativeOptimizationParameters() {}
/* general interface */
inline Kernel kernel() const { return kernel_; }
inline Verbosity verbosity() const { return verbosity_; }
/* matlab interface */
@ -53,15 +51,9 @@ namespace gtsam {
void setKernel(const std::string &s) ;
void setVerbosity(const std::string &s) ;
virtual void print() const {
std::cout << "IterativeOptimizationParameters" << std::endl
<< "kernel: " << kernelTranslator(kernel_) << std::endl
<< "verbosity: " << verbosityTranslator(verbosity_) << std::endl;
}
virtual void print() const ;
static Kernel kernelTranslator(const std::string &s);
static Verbosity verbosityTranslator(const std::string &s);
static std::string kernelTranslator(Kernel k);
static std::string verbosityTranslator(Verbosity v);
};

View File

@ -76,7 +76,7 @@ void DoglegOptimizer::iterate(void) {
result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION,
dx_u, dx_n, bn, graph_, state_.values, state_.error, dlVerbose);
}
else if ( params_.isCG() ) {
else if ( params_.isIterative() ) {
throw runtime_error("Dogleg is not currently compatible with the linear conjugate gradient solver");
}
else {

View File

@ -102,7 +102,7 @@ VectorValues NonlinearOptimizer::solve(const GaussianFactorGraph &gfg,
} else if (params.isSequential()) {
delta = gfg.eliminateSequential(*params.ordering,
params.getEliminationFunction())->optimize();
} else if (params.isCG()) {
} else if (params.isIterative()) {
if (!params.iterativeParams)
throw std::runtime_error(
"NonlinearOptimizer::solve: cg parameter has to be assigned ...");

View File

@ -99,8 +99,8 @@ void NonlinearOptimizerParams::print(const std::string& str) const {
case CHOLMOD:
std::cout << " linear solver type: CHOLMOD\n";
break;
case CONJUGATE_GRADIENT:
std::cout << " linear solver type: CONJUGATE GRADIENT\n";
case Iterative:
std::cout << " linear solver type: ITERATIVE\n";
break;
default:
std::cout << " linear solver type: (invalid)\n";
@ -127,8 +127,8 @@ std::string NonlinearOptimizerParams::linearSolverTranslator(
return "SEQUENTIAL_CHOLESKY";
case SEQUENTIAL_QR:
return "SEQUENTIAL_QR";
case CONJUGATE_GRADIENT:
return "CONJUGATE_GRADIENT";
case Iterative:
return "ITERATIVE";
case CHOLMOD:
return "CHOLMOD";
default:
@ -148,8 +148,8 @@ NonlinearOptimizerParams::LinearSolverType NonlinearOptimizerParams::linearSolve
return SEQUENTIAL_CHOLESKY;
if (linearSolverType == "SEQUENTIAL_QR")
return SEQUENTIAL_QR;
if (linearSolverType == "CONJUGATE_GRADIENT")
return CONJUGATE_GRADIENT;
if (linearSolverType == "ITERATIVE")
return Iterative;
if (linearSolverType == "CHOLMOD")
return CHOLMOD;
throw std::invalid_argument(

View File

@ -99,7 +99,7 @@ public:
MULTIFRONTAL_QR,
SEQUENTIAL_CHOLESKY,
SEQUENTIAL_QR,
CONJUGATE_GRADIENT, /* Experimental Flag */
Iterative, /* Experimental Flag */
CHOLMOD, /* Experimental Flag */
};
@ -121,8 +121,8 @@ public:
return (linearSolverType == CHOLMOD);
}
inline bool isCG() const {
return (linearSolverType == CONJUGATE_GRADIENT);
inline bool isIterative() const {
return (linearSolverType == Iterative);
}
GaussianFactorGraph::Eliminate getEliminationFunction() const {