Simplifying code and minor restructuring to support SPCG - removed update functions so NLO is now completely immutable, moved ordering to parameters, added SuccessiveLinearizationOptimizer base class that will do direct and iterative linear solving, with derived classes for GN, LM, DL. This structure will allow us to later add a new NLO subclass for nonlinear gradient descent methods.

release/4.3a0
Richard Roberts 2012-04-04 23:20:42 +00:00
parent 9366136c78
commit d0211bb031
14 changed files with 210 additions and 302 deletions

View File

@ -21,7 +21,7 @@
<folderInfo id="cdt.managedbuild.toolchain.gnu.macosx.base.1359703544.2031210194" name="/" resourcePath="">
<toolChain id="cdt.managedbuild.toolchain.gnu.macosx.base.677243255" name="cdt.managedbuild.toolchain.gnu.macosx.base" superClass="cdt.managedbuild.toolchain.gnu.macosx.base">
<targetPlatform archList="all" binaryParser="org.eclipse.cdt.core.ELF;org.eclipse.cdt.core.MachO64" id="cdt.managedbuild.target.gnu.platform.macosx.base.752782918" name="Debug Platform" osList="macosx" superClass="cdt.managedbuild.target.gnu.platform.macosx.base"/>
<builder arguments="" buildPath="${ProjDirPath}/build" command="make" id="cdt.managedbuild.target.gnu.builder.macosx.base.319933862" keepEnvironmentInBuildfile="false" managedBuildOn="false" name="Gnu Make Builder" parallelBuildOn="true" parallelizationNumber="5" superClass="cdt.managedbuild.target.gnu.builder.macosx.base"/>
<builder arguments="" buildPath="${ProjDirPath}/build" command="make" id="cdt.managedbuild.target.gnu.builder.macosx.base.319933862" keepEnvironmentInBuildfile="false" managedBuildOn="false" name="Gnu Make Builder" parallelBuildOn="true" parallelizationNumber="8" superClass="cdt.managedbuild.target.gnu.builder.macosx.base"/>
<tool id="cdt.managedbuild.tool.macosx.c.linker.macosx.base.457360678" name="MacOS X C Linker" superClass="cdt.managedbuild.tool.macosx.c.linker.macosx.base"/>
<tool id="cdt.managedbuild.tool.macosx.cpp.linker.macosx.base.1011140787" name="MacOS X C++ Linker" superClass="cdt.managedbuild.tool.macosx.cpp.linker.macosx.base">
<inputType id="cdt.managedbuild.tool.macosx.cpp.linker.input.1032375444" superClass="cdt.managedbuild.tool.macosx.cpp.linker.input">

View File

@ -23,7 +23,7 @@
</dictionary>
<dictionary>
<key>org.eclipse.cdt.make.core.buildArguments</key>
<value>-j5</value>
<value>-j8</value>
</dictionary>
<dictionary>
<key>org.eclipse.cdt.make.core.buildCommand</key>

View File

@ -1,58 +0,0 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file DirectOptimizer.cpp
* @brief
* @author Richard Roberts
* @date Apr 1, 2012
*/
#include <gtsam/nonlinear/NonlinearOptimizer.h>
namespace gtsam {
/* ************************************************************************* */
NonlinearOptimizer::shared_ptr DirectOptimizer::update(const SharedGraph& newGraph) const {
// Call update on the base class
shared_ptr result = boost::static_pointer_cast<DirectOptimizer>();
// Need to recompute the ordering if we're using an automatic COLAMD ordering
if(result->colamdOrdering_) {
result->ordering_ = result->graph_->orderingCOLAMD(*result->values_);
}
return result;
}
/* ************************************************************************* */
DirectOptimizer::shared_ptr DirectOptimizer::update(const SharedOrdering& newOrdering) const {
return update(graph_, newOrdering);
}
/* ************************************************************************* */
DirectOptimizer::shared_ptr DirectOptimizer::update(const SharedGraph& newGraph, const SharedOrdering& newOrdering) const {
// Call update on base class
shared_ptr result = boost::static_pointer_cast<DirectOptimizer>(NonlinearOptimizer::update(newGraph));
if(newOrdering && newOrdering->size() > 0) {
result->colamdOrdering_ = false;
result->ordering_ = newOrdering;
} else {
result->colamdOrdering_ = true;
result->ordering_ = result->graph_->orderingCOLAMD(*result->values_);
}
return result;
}
} /* namespace gtsam */

View File

@ -26,49 +26,59 @@ using namespace std;
namespace gtsam {
NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const SharedState& current) const {
/* ************************************************************************* */
NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const NonlinearOptimizer::SharedState& current) const {
// Linearize graph
GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_);
const Ordering& ordering = *ordering(current->values);
GaussianFactorGraph::shared_ptr linear = graph_->linearize(current->values, ordering);
// Check whether to use QR
bool useQR;
if(dlParams_->factorization == DoglegParams::LDL)
if(params_->factorization == DoglegParams::LDL)
useQR = false;
else if(dlParams_->factorization == DoglegParams::QR)
else if(params_->factorization == DoglegParams::QR)
useQR = true;
else
throw runtime_error("Optimization parameter is invalid: DoglegParams::factorization");
// Pull out parameters we'll use
const bool dlVerbose = (dlParams_->dlVerbosity > DoglegParams::SILENT);
const bool dlVerbose = (params_->dlVerbosity > DoglegParams::SILENT);
// Do Dogleg iteration with either Multifrontal or Sequential elimination
DoglegOptimizerImpl::IterationResult result;
if(dlParams_->elimination == DoglegParams::MULTIFRONTAL) {
if(params_->elimination == DoglegParams::MULTIFRONTAL) {
GaussianBayesTree::shared_ptr bt = GaussianMultifrontalSolver(*linear, useQR).eliminate();
result = DoglegOptimizerImpl::Iterate(delta_, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bt, *graph_, *values(), *ordering_, error(), dlVerbose);
result = DoglegOptimizerImpl::Iterate(current->Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bt, *graph_, *values(), ordering, error(), dlVerbose);
} else if(dlParams_->elimination == DoglegParams::SEQUENTIAL) {
} else if(params_->elimination == DoglegParams::SEQUENTIAL) {
GaussianBayesNet::shared_ptr bn = GaussianSequentialSolver(*linear, useQR).eliminate();
result = DoglegOptimizerImpl::Iterate(delta_, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, *graph_, *values(), *ordering_, error(), dlVerbose);
result = DoglegOptimizerImpl::Iterate(current->Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, *graph_, *values(), ordering, error(), dlVerbose);
} else {
throw runtime_error("Optimization parameter is invalid: DoglegParams::elimination");
}
// Update values
SharedValues newValues = boost::make_shared<Values>(values_->retract(result.dx_d, *ordering_));
// Create new state with new values and new error
DoglegOptimizer::SharedState newState = boost::make_shared<DoglegState>();
newState->values = newValues;
newState->error = rsult.f_error;
// Update values
newState->values = current->values.retract(result.dx_d, ordering);
newState->error = result.f_error;
newState->iterations = current->iterations + 1;
newState->Delta = result.Delta;
return newState;
}
/* ************************************************************************* */
NonlinearOptimizer::SharedState DoglegOptimizer::initialState(const Values& initialValues) const {
SharedState initial = boost::make_shared<DoglegState>();
defaultInitialState(initialValues);
initial->Delta = params_->deltaInitial;
return initial;
}
} /* namespace gtsam */

View File

@ -18,7 +18,7 @@
#pragma once
#include <gtsam/nonlinear/DirectOptimizer.h>
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
namespace gtsam {
@ -27,7 +27,7 @@ namespace gtsam {
* common to all nonlinear optimization algorithms. This class also contains
* all of those parameters.
*/
class DoglegParams : public DirectOptimizerParams {
class DoglegParams : public SuccessiveLinearizationParams {
public:
/** See DoglegParams::dlVerbosity */
enum DLVerbosity {
@ -44,7 +44,7 @@ public:
virtual ~DoglegParams() {}
virtual void print(const std::string& str = "") const {
DirectOptimizerParams::print(str);
SuccessiveLinearizationParams::print(str);
std::cout << " deltaInitial: " << deltaInitial << "\n";
std::cout.flush();
}
@ -53,7 +53,7 @@ public:
/**
* State for DoglegOptimizer
*/
class DoglegState : public NonlinearOptimizerState {
class DoglegState : public SuccessiveLinearizationState {
public:
double Delta;
@ -63,11 +63,11 @@ public:
/**
* This class performs Dogleg nonlinear optimization
*/
class DoglegOptimizer : public DirectOptimizer {
class DoglegOptimizer : public SuccessiveLinearizationOptimizer {
public:
typedef boost::shared_ptr<DoglegParams> SharedParams;
typedef boost::shared_ptr<const DoglegParams> SharedParams;
typedef boost::shared_ptr<DoglegState> SharedState;
typedef boost::shared_ptr<DoglegOptimizer> shared_ptr;
@ -85,7 +85,7 @@ public:
DoglegOptimizer(const NonlinearFactorGraph& graph,
const DoglegParams& params = DoglegParams(),
const Ordering& ordering = Ordering()) :
DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new DoglegParams(params)) {}
/** Standard constructor, requires a nonlinear factor graph, initial
@ -96,9 +96,8 @@ public:
* @param values The initial variable assignments
* @param params The optimization parameters
*/
DoglegOptimizer(const NonlinearFactorGraph& graph,
const Ordering& ordering) :
DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
DoglegOptimizer(const NonlinearFactorGraph& graph, const Ordering& ordering) :
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new DoglegParams()) {}
/** Standard constructor, requires a nonlinear factor graph, initial
@ -110,14 +109,11 @@ public:
DoglegOptimizer(const SharedGraph& graph,
const DoglegParams& params = DoglegParams(),
const SharedOrdering& ordering = SharedOrdering()) :
DirectOptimizer(graph),
SuccessiveLinearizationOptimizer(graph),
params_(new DoglegParams(params)) {}
/** Access the parameters */
virtual const NonlinearOptimizer::SharedParams& params() const { return params_; }
/** Access the parameters */
const DoglegOptimizer::SharedParams& params() const { return params_; }
virtual NonlinearOptimizer::SharedParams params() const { return params_; }
/// @}
@ -131,19 +127,18 @@ public:
* containing the updated variable assignments, which may be retrieved with
* values().
*/
virtual NonlinearOptimizer::SharedState iterate(const SharedState& current) const;
virtual NonlinearOptimizer::SharedState iterate(const NonlinearOptimizer::SharedState& current) const;
/** Create a copy of the NonlinearOptimizer */
virtual NonlinearOptimizer::shared_ptr clone() const {
return boost::make_shared<DoglegOptimizer>(*this); }
/** Create an initial state with the specified variable assignment values and
* all other default state.
*/
virtual NonlinearOptimizer::SharedState initialState(const Values& initialValues) const;
/// @}
protected:
const SharedParams params_;
virtual void setParams(const NonlinearOptimizer::SharedParams& newParams) { params_ = newParams; }
SharedParams params_;
};
}

View File

@ -24,6 +24,7 @@ using namespace std;
namespace gtsam {
/* ************************************************************************* */
NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const {
// Linearize graph
@ -61,4 +62,11 @@ NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const {
return newOptimizer;
}
/* ************************************************************************* */
NonlinearOptimizer::SharedState GaussNewtonOptimizer::initialState(const Values& initialValues) const {
SharedState initial = boost::make_shared<GaussNewtonState>();
defaultInitialState(*initial);
return initial;
}
} /* namespace gtsam */

View File

@ -18,65 +18,28 @@
#pragma once
#include <gtsam/nonlinear/NonlinearOptimizer.h>
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
namespace gtsam {
/** Parameters for Gauss-Newton optimization, inherits from
* NonlinearOptimizationParams.
*/
class GaussNewtonParams : public NonlinearOptimizerParams {
public:
/** See GaussNewtonParams::elimination */
enum Elimination {
MULTIFRONTAL,
SEQUENTIAL
class GaussNewtonParams : public SuccessiveLinearizationParams {
};
/** See GaussNewtonParams::factorization */
enum Factorization {
LDL,
QR,
};
Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL)
Factorization factorization; ///< The numerical factorization (default: LDL)
GaussNewtonParams() :
elimination(MULTIFRONTAL), factorization(LDL) {}
~GaussNewtonParams() {}
virtual void print(const std::string& str = "") const {
NonlinearOptimizerParams::print(str);
if(elimination == MULTIFRONTAL)
std::cout << " elimination method: MULTIFRONTAL\n";
else if(elimination == SEQUENTIAL)
std::cout << " elimination method: SEQUENTIAL\n";
else
std::cout << " elimination method: (invalid)\n";
if(factorization == LDL)
std::cout << " factorization method: LDL\n";
else if(factorization == QR)
std::cout << " factorization method: QR\n";
else
std::cout << " factorization method: (invalid)\n";
std::cout.flush();
}
class GaussNewtonState : public SuccessiveLinearizationState {
};
/**
* This class performs Gauss-Newton nonlinear optimization
* TODO: use make_shared
*/
class GaussNewtonOptimizer : public NonlinearOptimizer {
class GaussNewtonOptimizer : public SuccessiveLinearizationOptimizer {
public:
typedef boost::shared_ptr<const GaussNewtonParams> SharedGNParams;
typedef boost::shared_ptr<const Ordering> SharedOrdering;
typedef boost::shared_ptr<const GaussNewtonParams> SharedParams;
/// @name Standard interface
/// @{
@ -89,17 +52,11 @@ public:
* @param values The initial variable assignments
* @param params The optimization parameters
*/
GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Values& values,
GaussNewtonOptimizer(const NonlinearFactorGraph& graph,
const GaussNewtonParams& params = GaussNewtonParams(),
const Ordering& ordering = Ordering()) :
NonlinearOptimizer(
SharedGraph(new NonlinearFactorGraph(graph)),
SharedValues(new Values(values)),
SharedGNParams(new GaussNewtonParams(params))),
gnParams_(boost::static_pointer_cast<const GaussNewtonParams>(params_)),
colamdOrdering_(ordering.size() == 0),
ordering_(colamdOrdering_ ?
graph_->orderingCOLAMD(*values_) : Ordering::shared_ptr(new Ordering(ordering))) {}
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new GaussNewtonParams(params)) {}
/** Standard constructor, requires a nonlinear factor graph, initial
* variable assignments, and optimization parameters. For convenience this
@ -109,16 +66,9 @@ public:
* @param values The initial variable assignments
* @param params The optimization parameters
*/
GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Values& values,
const Ordering& ordering) :
NonlinearOptimizer(
SharedGraph(new NonlinearFactorGraph(graph)),
SharedValues(new Values(values)),
SharedGNParams(new GaussNewtonParams())),
gnParams_(boost::static_pointer_cast<const GaussNewtonParams>(params_)),
colamdOrdering_(ordering.size() == 0),
ordering_(colamdOrdering_ ?
graph_->orderingCOLAMD(*values_) : Ordering::shared_ptr(new Ordering(ordering))) {}
GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Ordering& ordering) :
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new GaussNewtonParams()) {}
/** Standard constructor, requires a nonlinear factor graph, initial
* variable assignments, and optimization parameters.
@ -126,16 +76,14 @@ public:
* @param values The initial variable assignments
* @param params The optimization parameters
*/
GaussNewtonOptimizer(const SharedGraph& graph, const SharedValues& values,
GaussNewtonOptimizer(const SharedGraph& graph,
const GaussNewtonParams& params = GaussNewtonParams(),
const SharedOrdering& ordering = SharedOrdering()) :
NonlinearOptimizer(graph, values, SharedGNParams(new GaussNewtonParams(params))),
gnParams_(boost::static_pointer_cast<const GaussNewtonParams>(params_)),
colamdOrdering_(!ordering || ordering->size() == 0),
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : ordering) {}
SuccessiveLinearizationOptimizer(graph),
params_(new GaussNewtonParams(params)) {}
/** Access the variable ordering used by this optimizer */
const SharedOrdering& ordering() const { return ordering_; }
/** Access the parameters */
virtual NonlinearOptimizer::SharedParams params() const { return params_; }
/// @}
@ -149,74 +97,18 @@ public:
* containing the updated variable assignments, which may be retrieved with
* values().
*/
virtual auto_ptr iterate() const;
virtual NonlinearOptimizer::SharedState iterate(const NonlinearOptimizer::SharedState& current) const;
/** Update the graph, values, and/or parameters, leaving all other state
* the same. Any of these that are empty shared pointers are left unchanged
* in the returned optimizer object. Returns a new updated
* NonlinearOptimizer object, the original is not modified.
/** Create an initial state with the specified variable assignment values and
* all other default state.
*/
virtual auto_ptr update(
const SharedGraph& newGraph,
const SharedValues& newValues = SharedValues(),
const SharedParams& newParams = SharedParams()) const {
return auto_ptr(new GaussNewtonOptimizer(*this, newGraph, newValues,
boost::dynamic_pointer_cast<const GaussNewtonParams>(newParams)));
}
/** Update the ordering, leaving all other state the same. If newOrdering
* is an empty pointer <emph>or</emph> contains an empty Ordering object
* (with zero size), a COLAMD ordering will be computed. Returns a new
* NonlinearOptimizer object, the original is not modified.
*/
virtual auto_ptr update(const SharedOrdering& newOrdering) const {
return auto_ptr(new GaussNewtonOptimizer(*this, newOrdering)); }
/** Create a copy of the NonlinearOptimizer */
virtual auto_ptr clone() const {
return auto_ptr(new GaussNewtonOptimizer(*this));
}
virtual NonlinearOptimizer::SharedState initialState(const Values& initialValues) const;
/// @}
protected:
const SharedGNParams gnParams_;
const bool colamdOrdering_;
const SharedOrdering ordering_;
/** Protected constructor called by update() to modify the graph, values, or
* parameters. Computes a COLAMD ordering if the optimizer was originally
* constructed with an empty ordering, and if the graph is changing.
*/
GaussNewtonOptimizer(const GaussNewtonOptimizer& original, const SharedGraph& newGraph,
const SharedValues& newValues, const SharedGNParams& newParams) :
NonlinearOptimizer(original, newGraph, newValues, newParams),
gnParams_(newParams ? newParams : original.gnParams_),
colamdOrdering_(original.colamdOrdering_),
ordering_(newGraph && colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : original.ordering_) {}
/** Protected constructor called by update() to modify the ordering, computing
* a COLAMD ordering if the new ordering is empty, and also recomputing the
* dimensions.
*/
GaussNewtonOptimizer(
const GaussNewtonOptimizer& original, const SharedOrdering& newOrdering) :
NonlinearOptimizer(original),
gnParams_(original.gnParams_),
colamdOrdering_(!newOrdering || newOrdering->size() == 0),
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : newOrdering) {}
private:
// Special constructor for completing an iteration, updates the values and
// error, and increments the iteration count.
GaussNewtonOptimizer(const GaussNewtonOptimizer& original,
const SharedValues& newValues, double newError) :
NonlinearOptimizer(original.graph_, newValues, original.params_, newError, original.iterations_+1),
gnParams_(original.gnParams_),
colamdOrdering_(original.colamdOrdering_),
ordering_(original.ordering_) {}
const SharedParams params_;
};
}

View File

@ -26,7 +26,8 @@ using namespace std;
namespace gtsam {
NonlinearOptimizer::auto_ptr LevenbergMarquardtOptimizer::iterate(const SharedState& current) const {
/* ************************************************************************* */
NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const NonlinearOptimizer::SharedState& current) const {
// Linearize graph
GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_);
@ -141,4 +142,12 @@ NonlinearOptimizer::auto_ptr LevenbergMarquardtOptimizer::iterate(const SharedSt
return newState;
}
/* ************************************************************************* */
NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::initialState(const Values& initialValues) const {
SharedState initial = boost::make_shared<LevenbergMarquardtState>();
defaultInitialState(*initial);
initial->lambda = params_->lambdaInitial;
return initial;
}
} /* namespace gtsam */

View File

@ -18,7 +18,7 @@
#pragma once
#include <gtsam/nonlinear/DirectOptimizer.h>
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
namespace gtsam {
@ -27,7 +27,7 @@ namespace gtsam {
* common to all nonlinear optimization algorithms. This class also contains
* all of those parameters.
*/
class LevenbergMarquardtParams : public DirectOptimizerParams {
class LevenbergMarquardtParams : public SuccessiveLinearizationParams {
public:
/** See LevenbergMarquardtParams::lmVerbosity */
enum LMVerbosity {
@ -50,7 +50,7 @@ public:
virtual ~LevenbergMarquardtParams() {}
virtual void print(const std::string& str = "") const {
DirectOptimizerParams::print(str);
SuccessiveLinearizationParams::print(str);
std::cout << " lambdaInitial: " << lambdaInitial << "\n";
std::cout << " lambdaFactor: " << lambdaFactor << "\n";
std::cout << " lambdaUpperBound: " << lambdaUpperBound << "\n";
@ -61,7 +61,7 @@ public:
/**
* State for LevenbergMarquardtOptimizer
*/
class LevenbergMarquardtState : public NonlinearOptimizerState {
class LevenbergMarquardtState : public SuccessiveLinearizationState {
public:
double lambda;
@ -72,11 +72,11 @@ public:
* This class performs Levenberg-Marquardt nonlinear optimization
* TODO: use make_shared
*/
class LevenbergMarquardtOptimizer : public DirectOptimizer {
class LevenbergMarquardtOptimizer : public SuccessiveLinearizationOptimizer {
public:
typedef boost::shared_ptr<LevenbergMarquardtParams> SharedParams;
typedef boost::shared_ptr<const LevenbergMarquardtParams> SharedParams;
typedef boost::shared_ptr<LevenbergMarquardtState> SharedState;
typedef boost::shared_ptr<LevenbergMarquardtOptimizer> shared_ptr;
@ -94,7 +94,7 @@ public:
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph,
const LevenbergMarquardtParams& params = LevenbergMarquardtParams(),
const Ordering& ordering = Ordering()) :
DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new LevenbergMarquardtParams(params)) {}
/** Standard constructor, requires a nonlinear factor graph, initial
@ -107,7 +107,7 @@ public:
*/
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph,
const Ordering& ordering) :
DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new LevenbergMarquardtParams()) {}
/** Standard constructor, requires a nonlinear factor graph, initial
@ -119,14 +119,11 @@ public:
LevenbergMarquardtOptimizer(const SharedGraph& graph,
const LevenbergMarquardtParams& params = LevenbergMarquardtParams(),
const SharedOrdering& ordering = SharedOrdering()) :
DirectOptimizer(graph),
SuccessiveLinearizationOptimizer(graph),
params_(new LevenbergMarquardtParams(params)) {}
/** Access the parameters */
virtual const NonlinearOptimizer::SharedParams& params() const { return params_; }
/** Access the parameters */
const LevenbergMarquardtOptimizer::SharedParams& params() const { return params_; }
virtual NonlinearOptimizer::SharedParams params() const { return params_; }
/// @}
@ -140,11 +137,12 @@ public:
* containing the updated variable assignments, which may be retrieved with
* values().
*/
virtual NonlinearOptimizer::SharedState iterate(const SharedState& current) const;
virtual NonlinearOptimizer::SharedState iterate(const NonlinearOptimizer::SharedState& current) const;
/** Create a copy of the NonlinearOptimizer */
virtual NonlinearOptimizer::shared_ptr clone() const {
return boost::make_shared<LevenbergMarquardtOptimizer>(*this); }
/** Create an initial state with the specified variable assignment values and
* all other default state.
*/
virtual NonlinearOptimizer::SharedState initialState(const Values& initialValues) const;
/// @}
@ -152,7 +150,7 @@ protected:
typedef boost::shared_ptr<const std::vector<size_t> > SharedDimensions;
const SharedParams params_;
SharedParams params_;
const SharedDimensions dimensions_;
};

View File

@ -82,6 +82,13 @@ NonlinearOptimizer::SharedState NonlinearOptimizer::defaultOptimize(const Shared
return next;
}
/* ************************************************************************* */
void NonlinearOptimizer::defaultInitialState(NonlinearOptimizerState& initial) const {
state.values = initialValues;
state.error = graph_->error(initialValues);
state.iterations = 0;
}
/* ************************************************************************* */
bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold,
double errorThreshold, double currentError, double newError,

View File

@ -78,8 +78,11 @@ public:
/** The number of optimization iterations performed. */
unsigned int iterations;
/** Virtual destructor to enable RTTI */
/** Virtual destructor */
virtual ~NonlinearOptimizerState() {}
/** Clone the state (i.e. make a copy of the derived class) */
virtual boost::shared_ptr<NonlinearOptimizerState> clone() = 0;
};
@ -185,15 +188,19 @@ public:
*/
virtual SharedState optimize(const SharedState& initial) const { return defaultOptimize(initial); }
SharedState optimize(const Values& initialization) const { return optimize(initialState(initialization)); }
/** Shortcut to optimize and return the resulting Values of the maximum-
* likelihood estimate. To access statistics and information such as the
* final error and number of iterations, use optimize() instead.
* @return The maximum-likelihood estimate.
*/
virtual SharedValues optimized(const SharedState& initial) const { return this->optimize(initial)->values(); }
virtual Values optimized(const SharedState& initial) const { return this->optimize(initial)->values; }
Values optimized(const Values& initialization) const { return optimized(initialState(initialization)); }
/** Retrieve the parameters. */
virtual const SharedParams& params() const = 0;
virtual SharedParams params() const = 0;
/// @}
@ -209,20 +216,10 @@ public:
*/
virtual SharedState iterate(const SharedState& current) const = 0;
/** Update the graph, leaving all other parts of the optimizer unchanged,
* returns a new updated NonlinearOptimzier object, the original is not
* modified.
/** Create an initial state from a variable assignment Values, with all
* other state values at their default initial values.
*/
virtual shared_ptr update(const SharedGraph& newGraph) const;
/** Update the parameters, leaving all other parts of the optimizer unchanged,
* returns a new updated NonlinearOptimzier object, the original is not
* modified.
*/
const shared_ptr update(const SharedParams& newParams) const;
/** Create a copy of the NonlinearOptimizer */
virtual shared_ptr clone() const = 0;
virtual SharedState initialState(const Values& initialValues) const = 0;
/// @}
@ -235,13 +232,12 @@ protected:
*/
SharedState defaultOptimize(const SharedState& initial) const;
/** Modify the parameters in-place (not for external use) */
virtual void setParams(const NonlinearOptimizer::SharedParams& newParams);
/** Initialize a state, using the current error and 0 iterations */
void defaultInitialState(SharedState& initial) const;
/** Constructor for initial construction of base classes.
*/
NonlinearOptimizer(const SharedGraph& graph) :
graph_(graph) {}
NonlinearOptimizer(const SharedGraph& graph) : graph_(graph) {}
};

View File

@ -0,0 +1,41 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file DirectOptimizer.cpp
* @brief
* @author Richard Roberts
* @date Apr 1, 2012
*/
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
#include <gtsam/nonlinear/NonlinearOptimizer.h>
namespace gtsam {
/* ************************************************************************* */
const SuccessiveLinearizationOptimizer::SharedOrdering& SuccessiveLinearizationOptimizer::ordering(const Values& values) const {
if(!ordering_) {
SharedParams params =
boost::dynamic_pointer_cast<const SuccessiveLinearizationParams>(params());
// If we're using a COLAMD ordering, compute it
if(!params.ordering)
ordering_ = graph_->orderingCOLAMD(values);
else
ordering_ = params.ordering;
}
return ordering_;
}
} /* namespace gtsam */

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */
/**
* @file DirectOptimizer.h
* @file SuccessiveLinearizationOptimizer.h
* @brief
* @author Richard Roberts
* @date Apr 1, 2012
@ -18,31 +18,65 @@
#pragma once
#include <boost/variant.hpp>
#include <gtsam/nonlinear/NonlinearOptimizer.h>
namespace gtsam {
class DirectOptimizerParams : public NonlinearOptimizerParams {
/**
* This is an intermediate base class for NonlinearOptimizers that use direct
* solving, i.e. factorization. This class is here to reduce code duplication
* for storing the ordering, having factorization and elimination parameters,
* etc.
*/
class SuccessiveLinearizationOptimizer : public NonlinearOptimizer {
public:
/** See DoglegParams::elimination */
class SuccessiveLinearizationParams;
class SuccessiveLinearizationState;
typedef boost::shared_ptr<const SuccessiveLinearizationParams> SharedParams;
typedef boost::shared_ptr<const SuccessiveLinearizationOptimizer> shared_ptr;
protected:
typedef boost::shared_ptr<const Ordering> SharedOrdering;
SuccessiveLinearizationOptimizer(const SharedGraph& graph) : NonlinearOptimizer(graph) {}
const SharedOrdering& ordering(const Values& values) const;
private:
SharedOrdering ordering_;
};
class SuccessiveLinearizationParams : public NonlinearOptimizerParams {
public:
/** See SuccessiveLinearizationParams::elimination */
enum Elimination {
MULTIFRONTAL,
SEQUENTIAL
};
/** See DoglegParams::factorization */
/** See SuccessiveLinearizationParams::factorization */
enum Factorization {
LDL,
QR,
};
/** See SuccessiveLinearizationParams::ordering */
typedef boost::shared_ptr<const Ordering> SharedOrdering;
Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL)
Factorization factorization; ///< The numerical factorization (default: LDL)
SharedOrdering ordering; ///< The variable elimination ordering, or empty to use COLAMD (default: empty)
DirectOptimizerParams() :
SuccessiveLinearizationParams() :
elimination(MULTIFRONTAL), factorization(LDL) {}
virtual ~DirectOptimizerParams() {}
virtual ~SuccessiveLinearizationParams() {}
virtual void print(const std::string& str = "") const {
NonlinearOptimizerParams::print(str);
@ -60,40 +94,16 @@ public:
else
std::cout << " factorization method: (invalid)\n";
if(ordering)
std::cout << " ordering: custom\n";
else
std::cout << " ordering: COLAMD\n";
std::cout.flush();
}
};
/**
* This is an intermediate base class for NonlinearOptimizers that use direct
* solving, i.e. factorization. This class is here to reduce code duplication
* for storing the ordering, having factorization and elimination parameters,
* etc.
*/
class DirectOptimizer : public NonlinearOptimizer {
public:
typedef boost::shared_ptr<const Ordering> SharedOrdering;
typedef boost::shared_ptr<NonlinearOptimizer> shared_ptr;
virtual NonlinearOptimizer::shared_ptr update(const SharedGraph& newGraph) const;
virtual shared_ptr update(const SharedOrdering& newOrdering) const;
virtual shared_ptr update(const SharedGraph& newGraph, const SharedOrdering& newOrdering) const;
/** Access the variable ordering used by this optimizer */
const SharedOrdering& ordering() const { return ordering_; }
protected:
const bool colamdOrdering_;
const SharedOrdering ordering_;
DirectOptimizer(const SharedGraph& graph,
const SharedOrdering ordering = SharedOrdering()) :
NonlinearOptimizer(graph),
colamdOrdering_(!ordering || ordering->size() == 0),
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : ordering) {}
class SuccessiveLinearizationState : public NonlinearOptimizerState {
};
} /* namespace gtsam */

View File

@ -97,7 +97,7 @@ namespace pose2SLAM {
/// Optimize
Values optimize(const Values& initialEstimate) {
return *LevenbergMarquardtOptimizer(*this, initialEstimate).optimized();
return *LevenbergMarquardtOptimizer(*this).optimized(initialEstimate);
}
};