NonlinearOptimizer, GaussNewtonOptimizer, and LevenbergMarquardt Optimizer compile

release/4.3a0
Richard Roberts 2012-02-28 05:30:53 +00:00
parent 67564d043b
commit 8748f483b0
8 changed files with 231 additions and 123 deletions

View File

@ -21,7 +21,7 @@
<folderInfo id="cdt.managedbuild.toolchain.gnu.macosx.base.1359703544.2031210194" name="/" resourcePath="">
<toolChain id="cdt.managedbuild.toolchain.gnu.macosx.base.677243255" name="cdt.managedbuild.toolchain.gnu.macosx.base" superClass="cdt.managedbuild.toolchain.gnu.macosx.base">
<targetPlatform archList="all" binaryParser="org.eclipse.cdt.core.ELF;org.eclipse.cdt.core.MachO64" id="cdt.managedbuild.target.gnu.platform.macosx.base.752782918" name="Debug Platform" osList="macosx" superClass="cdt.managedbuild.target.gnu.platform.macosx.base"/>
<builder arguments="" buildPath="${ProjDirPath}/build" command="make" id="cdt.managedbuild.target.gnu.builder.macosx.base.319933862" keepEnvironmentInBuildfile="false" managedBuildOn="false" name="Gnu Make Builder" parallelBuildOn="true" parallelizationNumber="5" superClass="cdt.managedbuild.target.gnu.builder.macosx.base"/>
<builder arguments="" buildPath="${ProjDirPath}/build" command="make" id="cdt.managedbuild.target.gnu.builder.macosx.base.319933862" keepEnvironmentInBuildfile="false" managedBuildOn="false" name="Gnu Make Builder" parallelBuildOn="false" parallelizationNumber="5" superClass="cdt.managedbuild.target.gnu.builder.macosx.base"/>
<tool id="cdt.managedbuild.tool.macosx.c.linker.macosx.base.457360678" name="MacOS X C Linker" superClass="cdt.managedbuild.tool.macosx.c.linker.macosx.base"/>
<tool id="cdt.managedbuild.tool.macosx.cpp.linker.macosx.base.1011140787" name="MacOS X C++ Linker" superClass="cdt.managedbuild.tool.macosx.cpp.linker.macosx.base">
<inputType id="cdt.managedbuild.tool.macosx.cpp.linker.input.1032375444" superClass="cdt.managedbuild.tool.macosx.cpp.linker.input">

View File

@ -23,7 +23,7 @@
</dictionary>
<dictionary>
<key>org.eclipse.cdt.make.core.buildArguments</key>
<value>-j5</value>
<value></value>
</dictionary>
<dictionary>
<key>org.eclipse.cdt.make.core.buildCommand</key>

View File

@ -27,10 +27,10 @@ namespace gtsam {
NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const {
// Linearize graph
GaussianFactorGraph::shared_ptr linear = graph_->linearize(values_, gnParams_->ordering);
GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_);
// Check whether to use QR
const bool useQR;
bool useQR;
if(gnParams_->factorization == GaussNewtonParams::LDL)
useQR = false;
else if(gnParams_->factorization == GaussNewtonParams::QR)
@ -40,9 +40,9 @@ NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const {
// Optimize
VectorValues::shared_ptr delta;
if(gnParams_->elimination == MULTIFRONTAL)
if(gnParams_->elimination == GaussNewtonParams::MULTIFRONTAL)
delta = GaussianMultifrontalSolver(*linear, useQR).optimize();
else if(gnParams_->elimination == SEQUENTIAL)
else if(gnParams_->elimination == GaussNewtonParams::SEQUENTIAL)
delta = GaussianSequentialSolver(*linear, useQR).optimize();
else
throw runtime_error("Optimization parameter is invalid: GaussNewtonParams::elimination");
@ -51,16 +51,16 @@ NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const {
if(params_->verbosity >= NonlinearOptimizerParams::DELTA) delta->print("delta");
// Update values
SharedValues newValues(new Values(values_->retract(*delta, gnParams_->ordering)));
double newError = graph_->error(newValues);
SharedValues newValues(new Values(values_->retract(*delta, *ordering_)));
double newError = graph_->error(*newValues);
// Maybe show output
if (params_->verbosity >= NonlinearOptimizerParams::VALUES) newValues->print("newValues");
if (params_->verbosity >= NonlinearOptimizerParams::ERROR) cout << "error: " << newError << endl;
// Create new optimizer with new values and new error
auto_ptr<GaussNewtonOptimizer> newOptimizer(new GaussNewtonOptimizer(
graph_, newValues, gnParams_, newError, iterations_+1));
NonlinearOptimizer::auto_ptr newOptimizer(new GaussNewtonOptimizer(
*this, newValues, newError));
return newOptimizer;
}

View File

@ -41,11 +41,12 @@ public:
Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL)
Factorization factorization; ///< The numerical factorization (default: LDL)
Ordering ordering; ///< The variable elimination ordering (default: empty -> COLAMD)
GaussNewtonParams() :
elimination(MULTIFRONTAL), factorization(LDL) {}
~GaussNewtonParams() {}
virtual void print(const std::string& str = "") const {
NonlinearOptimizerParams::print(str);
if(elimination == MULTIFRONTAL)
@ -59,8 +60,6 @@ public:
std::cout << " factorization method: LDL\n";
else if(factorization == QR)
std::cout << " factorization method: QR\n";
else if(factorization == CHOLESKY)
std::cout << " factorization method: CHOLESKY\n";
else
std::cout << " factorization method: (invalid)\n";
@ -75,7 +74,8 @@ class GaussNewtonOptimizer : public NonlinearOptimizer {
public:
typedef boost::shared_ptr<GaussNewtonParams> SharedGNParams;
typedef boost::shared_ptr<const GaussNewtonParams> SharedGNParams;
typedef boost::shared_ptr<const Ordering> SharedOrdering;
/// @name Standard interface
/// @{
@ -89,12 +89,16 @@ public:
* @param params The optimization parameters
*/
GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Values& values,
const GaussNewtonParams& params) :
const GaussNewtonParams& params = GaussNewtonParams(),
const Ordering& ordering = Ordering()) :
NonlinearOptimizer(
SharedGraph(new NonlinearFactorGraph(graph)),
SharedValues(new Values(values)),
SharedGNParams(new GaussNewtonParams(params))),
gnParams_(boost::static_pointer_cast<GaussNewtonParams>(params_)) {}
gnParams_(boost::static_pointer_cast<const GaussNewtonParams>(params_)),
colamdOrdering_(ordering.size() == 0),
ordering_(colamdOrdering_ ?
graph_->orderingCOLAMD(*values_) : Ordering::shared_ptr(new Ordering(ordering))) {}
/** Standard constructor, requires a nonlinear factor graph, initial
* variable assignments, and optimization parameters.
@ -103,8 +107,12 @@ public:
* @param params The optimization parameters
*/
GaussNewtonOptimizer(const SharedGraph& graph, const SharedValues& values,
const SharedGNParams& params) :
NonlinearOptimizer(graph, values, params), gnParams_(params) {}
const SharedGNParams& params = SharedGNParams(),
const SharedOrdering& ordering = SharedOrdering()) :
NonlinearOptimizer(graph, values, params ? params : SharedGNParams(new GaussNewtonParams())),
gnParams_(boost::static_pointer_cast<const GaussNewtonParams>(params_)),
colamdOrdering_(!ordering || ordering->size() == 0),
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : ordering) {}
/// @}
@ -126,16 +134,24 @@ public:
* NonlinearOptimzier object, the original is not modified.
*/
virtual auto_ptr update(
const SharedGraph& newGraph = SharedGraph(),
const SharedGraph& newGraph,
const SharedValues& newValues = SharedValues(),
const SharedParams& newParams = SharedParams()) const {
return new GaussNewtonOptimizer(*this, newGraph, newValues,
boost::dynamic_pointer_cast<GaussNewtonParams>(newParams));
return auto_ptr(new GaussNewtonOptimizer(*this, newGraph, newValues,
boost::dynamic_pointer_cast<const GaussNewtonParams>(newParams)));
}
/** Update the ordering, leaving all other state the same. If newOrdering
* is an empty pointer <emph>or</emph> contains an empty Ordering object
* (with zero size), a COLAMD ordering will be computed. Returns a new
* NonlinearOptimizer object, the original is not modified.
*/
virtual auto_ptr update(const SharedOrdering& newOrdering) const {
return auto_ptr(new GaussNewtonOptimizer(*this, newOrdering)); }
/** Create a copy of the NonlinearOptimizer */
virtual auto_ptr clone() const {
return new GaussNewtonOptimizer(*this);
return auto_ptr(new GaussNewtonOptimizer(*this));
}
/// @}
@ -143,16 +159,41 @@ public:
protected:
const SharedGNParams gnParams_;
const bool colamdOrdering_;
const SharedOrdering ordering_;
GaussNewtonOptimizer(const SharedGraph& graph, const SharedValues& values,
const SharedGNParams& params, double error, int iterations) :
NonlinearOptimizer(graph, values, params, error, iterations), gnParams_(params) {}
/** Protected constructor called by update() to modify the graph, values, or
* parameters. Computes a COLAMD ordering if the optimizer was originally
* constructed with an empty ordering, and if the graph is changing.
*/
GaussNewtonOptimizer(const GaussNewtonOptimizer& original, const SharedGraph& newGraph,
const SharedValues& newValues, const SharedGNParams& newParams) :
NonlinearOptimizer(original, newGraph, newValues, newParams),
gnParams_(newParams ? newParams : original.gnParams_) {}
gnParams_(newParams ? newParams : original.gnParams_),
colamdOrdering_(original.colamdOrdering_),
ordering_(newGraph && colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : original.ordering_) {}
/** Protected constructor called by update() to modify the ordering, computing
* a COLAMD ordering if the new ordering is empty, and also recomputing the
* dimensions.
*/
GaussNewtonOptimizer(
const GaussNewtonOptimizer& original, const SharedOrdering& newOrdering) :
NonlinearOptimizer(original),
gnParams_(original.gnParams_),
colamdOrdering_(!newOrdering || newOrdering->size() == 0),
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : newOrdering) {}
private:
// Special constructor for completing an iteration, updates the values and
// error, and increments the iteration count.
GaussNewtonOptimizer(const GaussNewtonOptimizer& original,
const SharedValues& newValues, double newError) :
NonlinearOptimizer(graph_, newValues, params_, newError, iterations_+1),
gnParams_(original.gnParams_),
colamdOrdering_(original.colamdOrdering_),
ordering_(original.ordering_) {}
};
}

View File

@ -17,6 +17,8 @@
*/
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
#include <gtsam/base/cholesky.h> // For NegativeMatrixException
#include <gtsam/linear/GaussianMultifrontalSolver.h>
#include <gtsam/linear/GaussianSequentialSolver.h>
@ -27,75 +29,72 @@ namespace gtsam {
NonlinearOptimizer::auto_ptr LevenbergMarquardtOptimizer::iterate() const {
// Linearize graph
GaussianFactorGraph::shared_ptr linear = graph_->linearize(values_, gnParams_->ordering);
GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_);
// Check whether to use QR
const bool useQR;
if(gnParams_->factorization == LevenbergMarquardtParams::LDL)
bool useQR;
if(lmParams_->factorization == LevenbergMarquardtParams::LDL)
useQR = false;
else if(gnParams_->factorization == LevenbergMarquardtParams::QR)
else if(lmParams_->factorization == LevenbergMarquardtParams::QR)
useQR = true;
else
throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::factorization");
// Optimize
VectorValues::shared_ptr delta;
if(gnParams_->elimination == MULTIFRONTAL)
delta = GaussianMultifrontalSolver(*linear, useQR).optimize();
else if(gnParams_->elimination == SEQUENTIAL)
delta = GaussianSequentialSolver(*linear, useQR).optimize();
else
throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::elimination");
// Pull out parameters we'll use
const NonlinearOptimizerParams::Verbosity nloVerbosity = params_->verbosity;
const LevenbergMarquardtParams::LMVerbosity lmVerbosity = lmParams_->lmVerbosity;
const double lambdaFactor = lmParams_->lambdaFactor;
const NonlinearOptimizerParams::Verbosity verbosity = params_->verbosity;
const double lambdaFactor = parameters_->lambdaFactor_ ;
double lambda = params_->lambda;
double next_error = error_;
SharedValues next_values = values_;
// Variables to update during try_lambda loop
double lambda = lmParams_->lambdaInitial;
double next_error = error();
SharedValues next_values = values();
// Keep increasing lambda until we make make progress
while(true) {
if (verbosity >= Parameters::TRYLAMBDA) cout << "trying lambda = " << lambda << endl;
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA)
cout << "trying lambda = " << lambda << endl;
// add prior-factors
// Add prior-factors
// TODO: replace this dampening with a backsubstitution approach
typename L::shared_ptr dampedSystem(new L(linearSystem));
GaussianFactorGraph dampedSystem(*linear);
{
double sigma = 1.0 / sqrt(lambda);
dampedSystem->reserve(dampedSystem->size() + dimensions_->size());
dampedSystem.reserve(dampedSystem.size() + dimensions_->size());
// for each of the variables, add a prior
for(Index j=0; j<dimensions_->size(); ++j) {
size_t dim = (*dimensions_)[j];
Matrix A = eye(dim);
Vector b = zero(dim);
SharedDiagonal model = noiseModel::Isotropic::Sigma(dim,sigma);
typename L::sharedFactor prior(new JacobianFactor(j, A, b, model));
dampedSystem->push_back(prior);
GaussianFactor::shared_ptr prior(new JacobianFactor(j, A, b, model));
dampedSystem.push_back(prior);
}
}
if (verbosity >= Parameters::DAMPED) dampedSystem->print("damped");
// Create a new solver using the damped linear system
// FIXME: remove spcg specific code
if (spcg_solver_) spcg_solver_->replaceFactors(dampedSystem);
shared_solver solver = (spcg_solver_) ? spcg_solver_ : shared_solver(
new S(dampedSystem, structure_, parameters_->useQR_));
if (lmVerbosity >= LevenbergMarquardtParams::DAMPED) dampedSystem.print("damped");
// Try solving
try {
VectorValues delta = *solver->optimize();
if (verbosity >= Parameters::TRYLAMBDA) cout << "linear delta norm = " << delta.vector().norm() << endl;
if (verbosity >= Parameters::TRYDELTA) delta.print("delta");
// Optimize
VectorValues::shared_ptr delta;
if(lmParams_->elimination == LevenbergMarquardtParams::MULTIFRONTAL)
delta = GaussianMultifrontalSolver(dampedSystem, useQR).optimize();
else if(lmParams_->elimination == LevenbergMarquardtParams::SEQUENTIAL)
delta = GaussianSequentialSolver(dampedSystem, useQR).optimize();
else
throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::elimination");
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) cout << "linear delta norm = " << delta->vector().norm() << endl;
if (lmVerbosity >= LevenbergMarquardtParams::TRYDELTA) delta->print("delta");
// update values
shared_values newValues(new Values(values_->retract(delta, *ordering_)));
SharedValues newValues(new Values(values_->retract(*delta, *ordering_)));
// create new optimization state with more adventurous lambda
double error = graph_->error(*newValues);
if (verbosity >= Parameters::TRYLAMBDA) cout << "next error = " << error << endl;
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) cout << "next error = " << error << endl;
if( error <= error_ ) {
next_values = newValues;
@ -107,49 +106,39 @@ NonlinearOptimizer::auto_ptr LevenbergMarquardtOptimizer::iterate() const {
// Either we're not cautious, or the same lambda was worse than the current error.
// The more adventurous lambda was worse too, so make lambda more conservative
// and keep the same values.
if(lambdaMode >= Parameters::BOUNDED && lambda >= 1.0e5) {
if(verbosity >= Parameters::ERROR)
if(lambda >= lmParams_->lambdaUpperBound) {
if(nloVerbosity >= NonlinearOptimizerParams::ERROR)
cout << "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" << endl;
break;
} else {
lambda *= factor;
lambda *= lambdaFactor;
}
}
} catch(const NegativeMatrixException& e) {
if(verbosity >= Parameters::LAMBDA)
if(lmVerbosity >= LevenbergMarquardtParams::LAMBDA)
cout << "Negative matrix, increasing lambda" << endl;
// Either we're not cautious, or the same lambda was worse than the current error.
// The more adventurous lambda was worse too, so make lambda more conservative
// and keep the same values.
if(lambdaMode >= Parameters::BOUNDED && lambda >= 1.0e5) {
if(verbosity >= Parameters::ERROR)
if(lambda >= lmParams_->lambdaUpperBound) {
if(nloVerbosity >= NonlinearOptimizerParams::ERROR)
cout << "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" << endl;
break;
} else {
lambda *= factor;
lambda *= lambdaFactor;
}
} catch(...) {
throw;
}
} // end while
return newValuesErrorLambda_(next_values, next_error, lambda);
// Maybe show output
if(params_->verbosity >= NonlinearOptimizerParams::DELTA) delta->print("delta");
// Update values
SharedValues newValues(new Values(values_->retract(*delta, gnParams_->ordering)));
double newError = graph_->error(newValues);
// Maybe show output
if (params_->verbosity >= NonlinearOptimizerParams::VALUES) newValues->print("newValues");
if (params_->verbosity >= NonlinearOptimizerParams::ERROR) cout << "error: " << newError << endl;
if (params_->verbosity >= NonlinearOptimizerParams::VALUES) next_values->print("newValues");
if (params_->verbosity >= NonlinearOptimizerParams::ERROR) cout << "error: " << next_error << endl;
// Create new optimizer with new values and new error
auto_ptr<LevenbergMarquardtOptimizer> newOptimizer(new LevenbergMarquardtOptimizer(
graph_, newValues, gnParams_, newError, iterations_+1));
NonlinearOptimizer::auto_ptr newOptimizer(new LevenbergMarquardtOptimizer(
*this, next_values, next_error, lambda));
return newOptimizer;
}

View File

@ -22,8 +22,10 @@
namespace gtsam {
/** Parameters for Levenberg-Marquardt optimization, inherits from
* NonlinearOptimizationParams.
/** Parameters for Levenberg-Marquardt optimization. Note that this parameters
* class inherits from NonlinearOptimizerParams, which specifies the parameters
* common to all nonlinear optimization algorithms. This class also contains
* all of those parameters.
*/
class LevenbergMarquardtParams : public NonlinearOptimizerParams {
public:
@ -41,18 +43,26 @@ public:
/** See LevenbergMarquardtParams::lmVerbosity */
enum LMVerbosity {
SILENT,
LAMBDA,
TRYLAMBDA,
TRYCONFIG,
TRYDELTA,
DAMPED
};
Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL)
Factorization factorization; ///< The numerical factorization (default: LDL)
Ordering::shared_ptr ordering; ///< The variable elimination ordering (default: empty -> COLAMD)
double lambda; ///< The initial (and current after each iteration) Levenberg-Marquardt damping term (default: 1e-5)
double lambdaInitial; ///< The initial (and current after each iteration) Levenberg-Marquardt damping term (default: 1e-5)
double lambdaFactor; ///< The amount by which to multiply or divide lambda when adjusting lambda (default: 10.0)
double lambdaUpperBound; ///< The maximum lambda to try before assuming the optimization has failed (default: 1e5)
LMVerbosity lmVerbosity; ///< The verbosity level for Levenberg-Marquardt (default: SILENT), see also NonlinearOptimizerParams::verbosity
LevenbergMarquardtParams() :
elimination(MULTIFRONTAL), factorization(LDL), lambda(1e-5), lambdaFactor(10.0) {}
elimination(MULTIFRONTAL), factorization(LDL), lambdaInitial(1e-5),
lambdaFactor(10.0), lambdaUpperBound(1e5), lmVerbosity(SILENT) {}
virtual ~LevenbergMarquardtParams() {}
virtual void print(const std::string& str = "") const {
NonlinearOptimizerParams::print(str);
@ -67,13 +77,12 @@ public:
std::cout << " factorization method: LDL\n";
else if(factorization == QR)
std::cout << " factorization method: QR\n";
else if(factorization == CHOLESKY)
std::cout << " factorization method: CHOLESKY\n";
else
std::cout << " factorization method: (invalid)\n";
std::cout << " lambda: " << lambda << "\n";
std::cout << " lambdaInitial: " << lambdaInitial << "\n";
std::cout << " lambdaFactor: " << lambdaFactor << "\n";
std::cout << " lambdaUpperBound: " << lambdaUpperBound << "\n";
std::cout.flush();
}
@ -86,7 +95,8 @@ class LevenbergMarquardtOptimizer : public NonlinearOptimizer {
public:
typedef boost::shared_ptr<LevenbergMarquardtParams> SharedLMParams;
typedef boost::shared_ptr<const LevenbergMarquardtParams> SharedLMParams;
typedef boost::shared_ptr<const Ordering> SharedOrdering;
/// @name Standard interface
/// @{
@ -100,12 +110,18 @@ public:
* @param params The optimization parameters
*/
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph, const Values& values,
const LevenbergMarquardtParams& params) :
const LevenbergMarquardtParams& params = LevenbergMarquardtParams(),
const Ordering& ordering = Ordering()) :
NonlinearOptimizer(
SharedGraph(new NonlinearFactorGraph(graph)),
SharedValues(new Values(values)),
SharedLMParams(new LevenbergMarquardtParams(params))),
lmParams_(boost::static_pointer_cast<LevenbergMarquardtParams>(params_)) {}
lmParams_(boost::static_pointer_cast<const LevenbergMarquardtParams>(params_)),
colamdOrdering_(ordering.size() == 0),
ordering_(colamdOrdering_ ?
graph_->orderingCOLAMD(*values_) : Ordering::shared_ptr(new Ordering(ordering))),
dimensions_(new vector<size_t>(values_->dims(*ordering_))),
lambda_(lmParams_->lambdaInitial) {}
/** Standard constructor, requires a nonlinear factor graph, initial
* variable assignments, and optimization parameters.
@ -114,8 +130,14 @@ public:
* @param params The optimization parameters
*/
LevenbergMarquardtOptimizer(const SharedGraph& graph, const SharedValues& values,
const SharedLMParams& params) :
NonlinearOptimizer(graph, values, params), lmParams_(params) {}
const SharedLMParams& params = SharedLMParams(),
const SharedOrdering& ordering = SharedOrdering()) :
NonlinearOptimizer(graph, values, params ? params : SharedLMParams(new LevenbergMarquardtParams())),
lmParams_(boost::static_pointer_cast<const LevenbergMarquardtParams>(params_)),
colamdOrdering_(!ordering || ordering->size() == 0),
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : ordering),
dimensions_(new vector<size_t>(values_->dims(*ordering_))),
lambda_(lmParams_->lambdaInitial) {}
/// @}
@ -133,37 +155,98 @@ public:
/** Update the graph, values, and/or parameters, leaving all other state
* the same. Any of these that are empty shared pointers are left unchanged
* in the returned optimizer object. Returns a new updated
* NonlinearOptimzier object, the original is not modified.
* in the returned optimizer object. Returns a new updated NonlinearOptimzier
* object, the original is not modified.
*/
virtual auto_ptr update(
const SharedGraph& newGraph = SharedGraph(),
const SharedGraph& newGraph,
const SharedValues& newValues = SharedValues(),
const SharedParams& newParams = SharedParams()) const {
return new LevenbergMarquardtOptimizer(*this, newGraph, newValues,
boost::dynamic_pointer_cast<LevenbergMarquardtParams>(newParams));
return auto_ptr(new LevenbergMarquardtOptimizer(*this, newGraph, newValues,
boost::dynamic_pointer_cast<const LevenbergMarquardtParams>(newParams)));
}
/** Update the ordering, leaving all other state the same. If newOrdering
* is an empty pointer <emph>or</emph> contains an empty Ordering object
* (with zero size), a COLAMD ordering will be computed. Returns a new
* NonlinearOptimizer object, the original is not modified.
*/
virtual auto_ptr update(const SharedOrdering& newOrdering) const {
return auto_ptr(new LevenbergMarquardtOptimizer(*this, newOrdering)); }
/** Update the damping factor lambda, leaving all other state the same.
* Returns a new NonlinearOptimizer object, the original is not modified.
*/
virtual auto_ptr update(double newLambda) const {
return auto_ptr(new LevenbergMarquardtOptimizer(*this, newLambda)); }
/** Create a copy of the NonlinearOptimizer */
virtual auto_ptr clone() const {
return new LevenbergMarquardtOptimizer(*this);
return auto_ptr(new LevenbergMarquardtOptimizer(*this));
}
/// @}
protected:
typedef boost::shared_ptr<const std::vector<size_t> > SharedDimensions;
const SharedLMParams lmParams_;
const bool colamdOrdering_;
const SharedOrdering ordering_;
const SharedDimensions dimensions_;
const double lambda_;
LevenbergMarquardtOptimizer(const SharedGraph& graph, const SharedValues& values,
const SharedLMParams& params, double error, int iterations) :
NonlinearOptimizer(graph, values, params, error, iterations), lmParams_(params) {}
/** Protected constructor called by update() to modify the graph, values, or
* parameters. Computes a COLAMD ordering if the optimizer was originally
* constructed with an empty ordering, and if the graph is changing.
* Recomputes the dimensions if the values are changing.
*/
LevenbergMarquardtOptimizer(const LevenbergMarquardtOptimizer& original, const SharedGraph& newGraph,
const SharedValues& newValues, const SharedLMParams& newParams) :
NonlinearOptimizer(original, newGraph, newValues, newParams),
lmParams_(newParams ? newParams : original.lmParams_) {}
NonlinearOptimizer(original, newGraph, newValues, newParams),
lmParams_(newParams ? newParams : original.lmParams_),
colamdOrdering_(original.colamdOrdering_),
ordering_(newGraph && colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : original.ordering_),
dimensions_(newValues ?
SharedDimensions(new std::vector<size_t>(values_->dims(*ordering_))) : original.dimensions_),
lambda_(original.lambda_) {}
/** Protected constructor called by update() to modify the ordering, computing
* a COLAMD ordering if the new ordering is empty, and also recomputing the
* dimensions.
*/
LevenbergMarquardtOptimizer(
const LevenbergMarquardtOptimizer& original, const SharedOrdering& newOrdering) :
NonlinearOptimizer(original),
lmParams_(original.lmParams_),
colamdOrdering_(!newOrdering || newOrdering->size() == 0),
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : newOrdering),
dimensions_(new std::vector<size_t>(values_->dims(*ordering_))),
lambda_(original.lambda_) {}
/** Protected constructor called by update() to modify lambda. */
LevenbergMarquardtOptimizer(
const LevenbergMarquardtOptimizer& original, double newLambda) :
NonlinearOptimizer(original),
lmParams_(original.lmParams_),
colamdOrdering_(original.colamdOrdering_),
ordering_(original.ordering_),
dimensions_(original.dimensions_),
lambda_(newLambda) {}
private:
// Special constructor for completing an iteration, updates the values and
// error, and increments the iteration count.
LevenbergMarquardtOptimizer(const LevenbergMarquardtOptimizer& original,
const SharedValues& newValues, double newError, double newLambda) :
NonlinearOptimizer(graph_, newValues, params_, newError, iterations_+1),
lmParams_(original.lmParams_),
colamdOrdering_(original.colamdOrdering_),
ordering_(original.ordering_),
dimensions_(original.dimensions_),
lambda_(newLambda) {}
};
}

View File

@ -25,14 +25,14 @@ using namespace std;
namespace gtsam {
/* ************************************************************************* */
auto_ptr NonlinearOptimizer::defaultOptimize() const {
NonlinearOptimizer::auto_ptr NonlinearOptimizer::defaultOptimize() const {
double currentError = this->error();
// check if we're already close enough
if(currentError <= params_->errorTol) {
if (params_->verbosity >= NonlinearOptimizerParams::ERROR)
cout << "Exiting, as error = " << currentError << " < " << errorTol << endl;
cout << "Exiting, as error = " << currentError << " < " << params_->errorTol << endl;
return this->clone();
}

View File

@ -27,18 +27,13 @@ namespace gtsam {
*/
class NonlinearOptimizerParams {
public:
/** See NonlinearOptimizationParams::verbosity */
/** See NonlinearOptimizerParams::verbosity */
enum Verbosity {
SILENT,
ERROR,
LAMBDA,
TRYLAMBDA,
VALUES,
DELTA,
TRYCONFIG,
TRYDELTA,
LINEAR,
DAMPED
LINEAR
};
int maxIterations; ///< The maximum iterations to stop iterating (default 100)
@ -52,7 +47,7 @@ public:
errorTol(0.0), verbosity(SILENT) {}
virtual void print(const std::string& str = "") const {
std::cout << s << "\n";
std::cout << str << "\n";
std::cout << "relative decrease threshold: " << relativeErrorTol << "\n";
std::cout << "absolute decrease threshold: " << absoluteErrorTol << "\n";
std::cout << " total error threshold: " << errorTol << "\n";
@ -60,7 +55,7 @@ public:
std::cout << " verbosity level: " << verbosity << std::endl;
}
virtual ~NonlinearOptimizationParams() {}
virtual ~NonlinearOptimizerParams() {}
};
@ -112,7 +107,7 @@ class NonlinearOptimizer {
public:
/** An auto pointer to this class */
typedef std::auto_ptr<NonlinearOptimizer> auto_ptr;
typedef std::auto_ptr<const NonlinearOptimizer> auto_ptr;
/** A const shared_ptr to a NonlinearFactorGraph */
typedef boost::shared_ptr<const NonlinearFactorGraph> SharedGraph;