Simplifying code and minor restructuring to support SPCG - removed update functions so NLO is now completely immutable, moved ordering to parameters, added SuccessiveLinearizationOptimizer base class that will do direct and iterative linear solving, with derived classes for GN, LM, DL. This structure will allow us to later add a new NLO subclass for nonlinear gradient descent methods.
parent
9366136c78
commit
d0211bb031
|
@ -21,7 +21,7 @@
|
||||||
<folderInfo id="cdt.managedbuild.toolchain.gnu.macosx.base.1359703544.2031210194" name="/" resourcePath="">
|
<folderInfo id="cdt.managedbuild.toolchain.gnu.macosx.base.1359703544.2031210194" name="/" resourcePath="">
|
||||||
<toolChain id="cdt.managedbuild.toolchain.gnu.macosx.base.677243255" name="cdt.managedbuild.toolchain.gnu.macosx.base" superClass="cdt.managedbuild.toolchain.gnu.macosx.base">
|
<toolChain id="cdt.managedbuild.toolchain.gnu.macosx.base.677243255" name="cdt.managedbuild.toolchain.gnu.macosx.base" superClass="cdt.managedbuild.toolchain.gnu.macosx.base">
|
||||||
<targetPlatform archList="all" binaryParser="org.eclipse.cdt.core.ELF;org.eclipse.cdt.core.MachO64" id="cdt.managedbuild.target.gnu.platform.macosx.base.752782918" name="Debug Platform" osList="macosx" superClass="cdt.managedbuild.target.gnu.platform.macosx.base"/>
|
<targetPlatform archList="all" binaryParser="org.eclipse.cdt.core.ELF;org.eclipse.cdt.core.MachO64" id="cdt.managedbuild.target.gnu.platform.macosx.base.752782918" name="Debug Platform" osList="macosx" superClass="cdt.managedbuild.target.gnu.platform.macosx.base"/>
|
||||||
<builder arguments="" buildPath="${ProjDirPath}/build" command="make" id="cdt.managedbuild.target.gnu.builder.macosx.base.319933862" keepEnvironmentInBuildfile="false" managedBuildOn="false" name="Gnu Make Builder" parallelBuildOn="true" parallelizationNumber="5" superClass="cdt.managedbuild.target.gnu.builder.macosx.base"/>
|
<builder arguments="" buildPath="${ProjDirPath}/build" command="make" id="cdt.managedbuild.target.gnu.builder.macosx.base.319933862" keepEnvironmentInBuildfile="false" managedBuildOn="false" name="Gnu Make Builder" parallelBuildOn="true" parallelizationNumber="8" superClass="cdt.managedbuild.target.gnu.builder.macosx.base"/>
|
||||||
<tool id="cdt.managedbuild.tool.macosx.c.linker.macosx.base.457360678" name="MacOS X C Linker" superClass="cdt.managedbuild.tool.macosx.c.linker.macosx.base"/>
|
<tool id="cdt.managedbuild.tool.macosx.c.linker.macosx.base.457360678" name="MacOS X C Linker" superClass="cdt.managedbuild.tool.macosx.c.linker.macosx.base"/>
|
||||||
<tool id="cdt.managedbuild.tool.macosx.cpp.linker.macosx.base.1011140787" name="MacOS X C++ Linker" superClass="cdt.managedbuild.tool.macosx.cpp.linker.macosx.base">
|
<tool id="cdt.managedbuild.tool.macosx.cpp.linker.macosx.base.1011140787" name="MacOS X C++ Linker" superClass="cdt.managedbuild.tool.macosx.cpp.linker.macosx.base">
|
||||||
<inputType id="cdt.managedbuild.tool.macosx.cpp.linker.input.1032375444" superClass="cdt.managedbuild.tool.macosx.cpp.linker.input">
|
<inputType id="cdt.managedbuild.tool.macosx.cpp.linker.input.1032375444" superClass="cdt.managedbuild.tool.macosx.cpp.linker.input">
|
||||||
|
|
2
.project
2
.project
|
@ -23,7 +23,7 @@
|
||||||
</dictionary>
|
</dictionary>
|
||||||
<dictionary>
|
<dictionary>
|
||||||
<key>org.eclipse.cdt.make.core.buildArguments</key>
|
<key>org.eclipse.cdt.make.core.buildArguments</key>
|
||||||
<value>-j5</value>
|
<value>-j8</value>
|
||||||
</dictionary>
|
</dictionary>
|
||||||
<dictionary>
|
<dictionary>
|
||||||
<key>org.eclipse.cdt.make.core.buildCommand</key>
|
<key>org.eclipse.cdt.make.core.buildCommand</key>
|
||||||
|
|
|
@ -1,58 +0,0 @@
|
||||||
/* ----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
|
||||||
* Atlanta, Georgia 30332-0415
|
|
||||||
* All Rights Reserved
|
|
||||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
|
||||||
|
|
||||||
* See LICENSE for the license information
|
|
||||||
|
|
||||||
* -------------------------------------------------------------------------- */
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @file DirectOptimizer.cpp
|
|
||||||
* @brief
|
|
||||||
* @author Richard Roberts
|
|
||||||
* @date Apr 1, 2012
|
|
||||||
*/
|
|
||||||
|
|
||||||
#include <gtsam/nonlinear/NonlinearOptimizer.h>
|
|
||||||
|
|
||||||
namespace gtsam {
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
NonlinearOptimizer::shared_ptr DirectOptimizer::update(const SharedGraph& newGraph) const {
|
|
||||||
|
|
||||||
// Call update on the base class
|
|
||||||
shared_ptr result = boost::static_pointer_cast<DirectOptimizer>();
|
|
||||||
|
|
||||||
// Need to recompute the ordering if we're using an automatic COLAMD ordering
|
|
||||||
if(result->colamdOrdering_) {
|
|
||||||
result->ordering_ = result->graph_->orderingCOLAMD(*result->values_);
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
DirectOptimizer::shared_ptr DirectOptimizer::update(const SharedOrdering& newOrdering) const {
|
|
||||||
return update(graph_, newOrdering);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
DirectOptimizer::shared_ptr DirectOptimizer::update(const SharedGraph& newGraph, const SharedOrdering& newOrdering) const {
|
|
||||||
// Call update on base class
|
|
||||||
shared_ptr result = boost::static_pointer_cast<DirectOptimizer>(NonlinearOptimizer::update(newGraph));
|
|
||||||
|
|
||||||
if(newOrdering && newOrdering->size() > 0) {
|
|
||||||
result->colamdOrdering_ = false;
|
|
||||||
result->ordering_ = newOrdering;
|
|
||||||
} else {
|
|
||||||
result->colamdOrdering_ = true;
|
|
||||||
result->ordering_ = result->graph_->orderingCOLAMD(*result->values_);
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
} /* namespace gtsam */
|
|
|
@ -26,49 +26,59 @@ using namespace std;
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const SharedState& current) const {
|
/* ************************************************************************* */
|
||||||
|
NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const NonlinearOptimizer::SharedState& current) const {
|
||||||
|
|
||||||
// Linearize graph
|
// Linearize graph
|
||||||
GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_);
|
const Ordering& ordering = *ordering(current->values);
|
||||||
|
GaussianFactorGraph::shared_ptr linear = graph_->linearize(current->values, ordering);
|
||||||
|
|
||||||
// Check whether to use QR
|
// Check whether to use QR
|
||||||
bool useQR;
|
bool useQR;
|
||||||
if(dlParams_->factorization == DoglegParams::LDL)
|
if(params_->factorization == DoglegParams::LDL)
|
||||||
useQR = false;
|
useQR = false;
|
||||||
else if(dlParams_->factorization == DoglegParams::QR)
|
else if(params_->factorization == DoglegParams::QR)
|
||||||
useQR = true;
|
useQR = true;
|
||||||
else
|
else
|
||||||
throw runtime_error("Optimization parameter is invalid: DoglegParams::factorization");
|
throw runtime_error("Optimization parameter is invalid: DoglegParams::factorization");
|
||||||
|
|
||||||
// Pull out parameters we'll use
|
// Pull out parameters we'll use
|
||||||
const bool dlVerbose = (dlParams_->dlVerbosity > DoglegParams::SILENT);
|
const bool dlVerbose = (params_->dlVerbosity > DoglegParams::SILENT);
|
||||||
|
|
||||||
// Do Dogleg iteration with either Multifrontal or Sequential elimination
|
// Do Dogleg iteration with either Multifrontal or Sequential elimination
|
||||||
DoglegOptimizerImpl::IterationResult result;
|
DoglegOptimizerImpl::IterationResult result;
|
||||||
|
|
||||||
if(dlParams_->elimination == DoglegParams::MULTIFRONTAL) {
|
if(params_->elimination == DoglegParams::MULTIFRONTAL) {
|
||||||
GaussianBayesTree::shared_ptr bt = GaussianMultifrontalSolver(*linear, useQR).eliminate();
|
GaussianBayesTree::shared_ptr bt = GaussianMultifrontalSolver(*linear, useQR).eliminate();
|
||||||
result = DoglegOptimizerImpl::Iterate(delta_, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bt, *graph_, *values(), *ordering_, error(), dlVerbose);
|
result = DoglegOptimizerImpl::Iterate(current->Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bt, *graph_, *values(), ordering, error(), dlVerbose);
|
||||||
|
|
||||||
} else if(dlParams_->elimination == DoglegParams::SEQUENTIAL) {
|
} else if(params_->elimination == DoglegParams::SEQUENTIAL) {
|
||||||
GaussianBayesNet::shared_ptr bn = GaussianSequentialSolver(*linear, useQR).eliminate();
|
GaussianBayesNet::shared_ptr bn = GaussianSequentialSolver(*linear, useQR).eliminate();
|
||||||
result = DoglegOptimizerImpl::Iterate(delta_, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, *graph_, *values(), *ordering_, error(), dlVerbose);
|
result = DoglegOptimizerImpl::Iterate(current->Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, *graph_, *values(), ordering, error(), dlVerbose);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
throw runtime_error("Optimization parameter is invalid: DoglegParams::elimination");
|
throw runtime_error("Optimization parameter is invalid: DoglegParams::elimination");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update values
|
|
||||||
SharedValues newValues = boost::make_shared<Values>(values_->retract(result.dx_d, *ordering_));
|
|
||||||
|
|
||||||
// Create new state with new values and new error
|
// Create new state with new values and new error
|
||||||
DoglegOptimizer::SharedState newState = boost::make_shared<DoglegState>();
|
DoglegOptimizer::SharedState newState = boost::make_shared<DoglegState>();
|
||||||
newState->values = newValues;
|
|
||||||
newState->error = rsult.f_error;
|
// Update values
|
||||||
|
newState->values = current->values.retract(result.dx_d, ordering);
|
||||||
|
|
||||||
|
newState->error = result.f_error;
|
||||||
newState->iterations = current->iterations + 1;
|
newState->iterations = current->iterations + 1;
|
||||||
newState->Delta = result.Delta;
|
newState->Delta = result.Delta;
|
||||||
|
|
||||||
return newState;
|
return newState;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
NonlinearOptimizer::SharedState DoglegOptimizer::initialState(const Values& initialValues) const {
|
||||||
|
SharedState initial = boost::make_shared<DoglegState>();
|
||||||
|
defaultInitialState(initialValues);
|
||||||
|
initial->Delta = params_->deltaInitial;
|
||||||
|
return initial;
|
||||||
|
}
|
||||||
|
|
||||||
} /* namespace gtsam */
|
} /* namespace gtsam */
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <gtsam/nonlinear/DirectOptimizer.h>
|
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ namespace gtsam {
|
||||||
* common to all nonlinear optimization algorithms. This class also contains
|
* common to all nonlinear optimization algorithms. This class also contains
|
||||||
* all of those parameters.
|
* all of those parameters.
|
||||||
*/
|
*/
|
||||||
class DoglegParams : public DirectOptimizerParams {
|
class DoglegParams : public SuccessiveLinearizationParams {
|
||||||
public:
|
public:
|
||||||
/** See DoglegParams::dlVerbosity */
|
/** See DoglegParams::dlVerbosity */
|
||||||
enum DLVerbosity {
|
enum DLVerbosity {
|
||||||
|
@ -44,7 +44,7 @@ public:
|
||||||
virtual ~DoglegParams() {}
|
virtual ~DoglegParams() {}
|
||||||
|
|
||||||
virtual void print(const std::string& str = "") const {
|
virtual void print(const std::string& str = "") const {
|
||||||
DirectOptimizerParams::print(str);
|
SuccessiveLinearizationParams::print(str);
|
||||||
std::cout << " deltaInitial: " << deltaInitial << "\n";
|
std::cout << " deltaInitial: " << deltaInitial << "\n";
|
||||||
std::cout.flush();
|
std::cout.flush();
|
||||||
}
|
}
|
||||||
|
@ -53,7 +53,7 @@ public:
|
||||||
/**
|
/**
|
||||||
* State for DoglegOptimizer
|
* State for DoglegOptimizer
|
||||||
*/
|
*/
|
||||||
class DoglegState : public NonlinearOptimizerState {
|
class DoglegState : public SuccessiveLinearizationState {
|
||||||
public:
|
public:
|
||||||
|
|
||||||
double Delta;
|
double Delta;
|
||||||
|
@ -63,11 +63,11 @@ public:
|
||||||
/**
|
/**
|
||||||
* This class performs Dogleg nonlinear optimization
|
* This class performs Dogleg nonlinear optimization
|
||||||
*/
|
*/
|
||||||
class DoglegOptimizer : public DirectOptimizer {
|
class DoglegOptimizer : public SuccessiveLinearizationOptimizer {
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
typedef boost::shared_ptr<DoglegParams> SharedParams;
|
typedef boost::shared_ptr<const DoglegParams> SharedParams;
|
||||||
typedef boost::shared_ptr<DoglegState> SharedState;
|
typedef boost::shared_ptr<DoglegState> SharedState;
|
||||||
typedef boost::shared_ptr<DoglegOptimizer> shared_ptr;
|
typedef boost::shared_ptr<DoglegOptimizer> shared_ptr;
|
||||||
|
|
||||||
|
@ -85,7 +85,7 @@ public:
|
||||||
DoglegOptimizer(const NonlinearFactorGraph& graph,
|
DoglegOptimizer(const NonlinearFactorGraph& graph,
|
||||||
const DoglegParams& params = DoglegParams(),
|
const DoglegParams& params = DoglegParams(),
|
||||||
const Ordering& ordering = Ordering()) :
|
const Ordering& ordering = Ordering()) :
|
||||||
DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
|
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
|
||||||
params_(new DoglegParams(params)) {}
|
params_(new DoglegParams(params)) {}
|
||||||
|
|
||||||
/** Standard constructor, requires a nonlinear factor graph, initial
|
/** Standard constructor, requires a nonlinear factor graph, initial
|
||||||
|
@ -96,9 +96,8 @@ public:
|
||||||
* @param values The initial variable assignments
|
* @param values The initial variable assignments
|
||||||
* @param params The optimization parameters
|
* @param params The optimization parameters
|
||||||
*/
|
*/
|
||||||
DoglegOptimizer(const NonlinearFactorGraph& graph,
|
DoglegOptimizer(const NonlinearFactorGraph& graph, const Ordering& ordering) :
|
||||||
const Ordering& ordering) :
|
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
|
||||||
DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
|
|
||||||
params_(new DoglegParams()) {}
|
params_(new DoglegParams()) {}
|
||||||
|
|
||||||
/** Standard constructor, requires a nonlinear factor graph, initial
|
/** Standard constructor, requires a nonlinear factor graph, initial
|
||||||
|
@ -110,14 +109,11 @@ public:
|
||||||
DoglegOptimizer(const SharedGraph& graph,
|
DoglegOptimizer(const SharedGraph& graph,
|
||||||
const DoglegParams& params = DoglegParams(),
|
const DoglegParams& params = DoglegParams(),
|
||||||
const SharedOrdering& ordering = SharedOrdering()) :
|
const SharedOrdering& ordering = SharedOrdering()) :
|
||||||
DirectOptimizer(graph),
|
SuccessiveLinearizationOptimizer(graph),
|
||||||
params_(new DoglegParams(params)) {}
|
params_(new DoglegParams(params)) {}
|
||||||
|
|
||||||
/** Access the parameters */
|
/** Access the parameters */
|
||||||
virtual const NonlinearOptimizer::SharedParams& params() const { return params_; }
|
virtual NonlinearOptimizer::SharedParams params() const { return params_; }
|
||||||
|
|
||||||
/** Access the parameters */
|
|
||||||
const DoglegOptimizer::SharedParams& params() const { return params_; }
|
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
|
|
||||||
|
@ -131,19 +127,18 @@ public:
|
||||||
* containing the updated variable assignments, which may be retrieved with
|
* containing the updated variable assignments, which may be retrieved with
|
||||||
* values().
|
* values().
|
||||||
*/
|
*/
|
||||||
virtual NonlinearOptimizer::SharedState iterate(const SharedState& current) const;
|
virtual NonlinearOptimizer::SharedState iterate(const NonlinearOptimizer::SharedState& current) const;
|
||||||
|
|
||||||
/** Create a copy of the NonlinearOptimizer */
|
/** Create an initial state with the specified variable assignment values and
|
||||||
virtual NonlinearOptimizer::shared_ptr clone() const {
|
* all other default state.
|
||||||
return boost::make_shared<DoglegOptimizer>(*this); }
|
*/
|
||||||
|
virtual NonlinearOptimizer::SharedState initialState(const Values& initialValues) const;
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
const SharedParams params_;
|
SharedParams params_;
|
||||||
|
|
||||||
virtual void setParams(const NonlinearOptimizer::SharedParams& newParams) { params_ = newParams; }
|
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ using namespace std;
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const {
|
NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const {
|
||||||
|
|
||||||
// Linearize graph
|
// Linearize graph
|
||||||
|
@ -61,4 +62,11 @@ NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const {
|
||||||
return newOptimizer;
|
return newOptimizer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
NonlinearOptimizer::SharedState GaussNewtonOptimizer::initialState(const Values& initialValues) const {
|
||||||
|
SharedState initial = boost::make_shared<GaussNewtonState>();
|
||||||
|
defaultInitialState(*initial);
|
||||||
|
return initial;
|
||||||
|
}
|
||||||
|
|
||||||
} /* namespace gtsam */
|
} /* namespace gtsam */
|
||||||
|
|
|
@ -18,65 +18,28 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <gtsam/nonlinear/NonlinearOptimizer.h>
|
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
/** Parameters for Gauss-Newton optimization, inherits from
|
/** Parameters for Gauss-Newton optimization, inherits from
|
||||||
* NonlinearOptimizationParams.
|
* NonlinearOptimizationParams.
|
||||||
*/
|
*/
|
||||||
class GaussNewtonParams : public NonlinearOptimizerParams {
|
class GaussNewtonParams : public SuccessiveLinearizationParams {
|
||||||
public:
|
};
|
||||||
/** See GaussNewtonParams::elimination */
|
|
||||||
enum Elimination {
|
|
||||||
MULTIFRONTAL,
|
|
||||||
SEQUENTIAL
|
|
||||||
};
|
|
||||||
|
|
||||||
/** See GaussNewtonParams::factorization */
|
class GaussNewtonState : public SuccessiveLinearizationState {
|
||||||
enum Factorization {
|
|
||||||
LDL,
|
|
||||||
QR,
|
|
||||||
};
|
|
||||||
|
|
||||||
Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL)
|
|
||||||
Factorization factorization; ///< The numerical factorization (default: LDL)
|
|
||||||
|
|
||||||
GaussNewtonParams() :
|
|
||||||
elimination(MULTIFRONTAL), factorization(LDL) {}
|
|
||||||
|
|
||||||
~GaussNewtonParams() {}
|
|
||||||
|
|
||||||
virtual void print(const std::string& str = "") const {
|
|
||||||
NonlinearOptimizerParams::print(str);
|
|
||||||
if(elimination == MULTIFRONTAL)
|
|
||||||
std::cout << " elimination method: MULTIFRONTAL\n";
|
|
||||||
else if(elimination == SEQUENTIAL)
|
|
||||||
std::cout << " elimination method: SEQUENTIAL\n";
|
|
||||||
else
|
|
||||||
std::cout << " elimination method: (invalid)\n";
|
|
||||||
|
|
||||||
if(factorization == LDL)
|
|
||||||
std::cout << " factorization method: LDL\n";
|
|
||||||
else if(factorization == QR)
|
|
||||||
std::cout << " factorization method: QR\n";
|
|
||||||
else
|
|
||||||
std::cout << " factorization method: (invalid)\n";
|
|
||||||
|
|
||||||
std::cout.flush();
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class performs Gauss-Newton nonlinear optimization
|
* This class performs Gauss-Newton nonlinear optimization
|
||||||
* TODO: use make_shared
|
* TODO: use make_shared
|
||||||
*/
|
*/
|
||||||
class GaussNewtonOptimizer : public NonlinearOptimizer {
|
class GaussNewtonOptimizer : public SuccessiveLinearizationOptimizer {
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
typedef boost::shared_ptr<const GaussNewtonParams> SharedGNParams;
|
typedef boost::shared_ptr<const GaussNewtonParams> SharedParams;
|
||||||
typedef boost::shared_ptr<const Ordering> SharedOrdering;
|
|
||||||
|
|
||||||
/// @name Standard interface
|
/// @name Standard interface
|
||||||
/// @{
|
/// @{
|
||||||
|
@ -89,17 +52,11 @@ public:
|
||||||
* @param values The initial variable assignments
|
* @param values The initial variable assignments
|
||||||
* @param params The optimization parameters
|
* @param params The optimization parameters
|
||||||
*/
|
*/
|
||||||
GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Values& values,
|
GaussNewtonOptimizer(const NonlinearFactorGraph& graph,
|
||||||
const GaussNewtonParams& params = GaussNewtonParams(),
|
const GaussNewtonParams& params = GaussNewtonParams(),
|
||||||
const Ordering& ordering = Ordering()) :
|
const Ordering& ordering = Ordering()) :
|
||||||
NonlinearOptimizer(
|
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
|
||||||
SharedGraph(new NonlinearFactorGraph(graph)),
|
params_(new GaussNewtonParams(params)) {}
|
||||||
SharedValues(new Values(values)),
|
|
||||||
SharedGNParams(new GaussNewtonParams(params))),
|
|
||||||
gnParams_(boost::static_pointer_cast<const GaussNewtonParams>(params_)),
|
|
||||||
colamdOrdering_(ordering.size() == 0),
|
|
||||||
ordering_(colamdOrdering_ ?
|
|
||||||
graph_->orderingCOLAMD(*values_) : Ordering::shared_ptr(new Ordering(ordering))) {}
|
|
||||||
|
|
||||||
/** Standard constructor, requires a nonlinear factor graph, initial
|
/** Standard constructor, requires a nonlinear factor graph, initial
|
||||||
* variable assignments, and optimization parameters. For convenience this
|
* variable assignments, and optimization parameters. For convenience this
|
||||||
|
@ -109,16 +66,9 @@ public:
|
||||||
* @param values The initial variable assignments
|
* @param values The initial variable assignments
|
||||||
* @param params The optimization parameters
|
* @param params The optimization parameters
|
||||||
*/
|
*/
|
||||||
GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Values& values,
|
GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Ordering& ordering) :
|
||||||
const Ordering& ordering) :
|
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
|
||||||
NonlinearOptimizer(
|
params_(new GaussNewtonParams()) {}
|
||||||
SharedGraph(new NonlinearFactorGraph(graph)),
|
|
||||||
SharedValues(new Values(values)),
|
|
||||||
SharedGNParams(new GaussNewtonParams())),
|
|
||||||
gnParams_(boost::static_pointer_cast<const GaussNewtonParams>(params_)),
|
|
||||||
colamdOrdering_(ordering.size() == 0),
|
|
||||||
ordering_(colamdOrdering_ ?
|
|
||||||
graph_->orderingCOLAMD(*values_) : Ordering::shared_ptr(new Ordering(ordering))) {}
|
|
||||||
|
|
||||||
/** Standard constructor, requires a nonlinear factor graph, initial
|
/** Standard constructor, requires a nonlinear factor graph, initial
|
||||||
* variable assignments, and optimization parameters.
|
* variable assignments, and optimization parameters.
|
||||||
|
@ -126,16 +76,14 @@ public:
|
||||||
* @param values The initial variable assignments
|
* @param values The initial variable assignments
|
||||||
* @param params The optimization parameters
|
* @param params The optimization parameters
|
||||||
*/
|
*/
|
||||||
GaussNewtonOptimizer(const SharedGraph& graph, const SharedValues& values,
|
GaussNewtonOptimizer(const SharedGraph& graph,
|
||||||
const GaussNewtonParams& params = GaussNewtonParams(),
|
const GaussNewtonParams& params = GaussNewtonParams(),
|
||||||
const SharedOrdering& ordering = SharedOrdering()) :
|
const SharedOrdering& ordering = SharedOrdering()) :
|
||||||
NonlinearOptimizer(graph, values, SharedGNParams(new GaussNewtonParams(params))),
|
SuccessiveLinearizationOptimizer(graph),
|
||||||
gnParams_(boost::static_pointer_cast<const GaussNewtonParams>(params_)),
|
params_(new GaussNewtonParams(params)) {}
|
||||||
colamdOrdering_(!ordering || ordering->size() == 0),
|
|
||||||
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : ordering) {}
|
|
||||||
|
|
||||||
/** Access the variable ordering used by this optimizer */
|
/** Access the parameters */
|
||||||
const SharedOrdering& ordering() const { return ordering_; }
|
virtual NonlinearOptimizer::SharedParams params() const { return params_; }
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
|
|
||||||
|
@ -149,74 +97,18 @@ public:
|
||||||
* containing the updated variable assignments, which may be retrieved with
|
* containing the updated variable assignments, which may be retrieved with
|
||||||
* values().
|
* values().
|
||||||
*/
|
*/
|
||||||
virtual auto_ptr iterate() const;
|
virtual NonlinearOptimizer::SharedState iterate(const NonlinearOptimizer::SharedState& current) const;
|
||||||
|
|
||||||
/** Update the graph, values, and/or parameters, leaving all other state
|
/** Create an initial state with the specified variable assignment values and
|
||||||
* the same. Any of these that are empty shared pointers are left unchanged
|
* all other default state.
|
||||||
* in the returned optimizer object. Returns a new updated
|
|
||||||
* NonlinearOptimizer object, the original is not modified.
|
|
||||||
*/
|
*/
|
||||||
virtual auto_ptr update(
|
virtual NonlinearOptimizer::SharedState initialState(const Values& initialValues) const;
|
||||||
const SharedGraph& newGraph,
|
|
||||||
const SharedValues& newValues = SharedValues(),
|
|
||||||
const SharedParams& newParams = SharedParams()) const {
|
|
||||||
return auto_ptr(new GaussNewtonOptimizer(*this, newGraph, newValues,
|
|
||||||
boost::dynamic_pointer_cast<const GaussNewtonParams>(newParams)));
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Update the ordering, leaving all other state the same. If newOrdering
|
|
||||||
* is an empty pointer <emph>or</emph> contains an empty Ordering object
|
|
||||||
* (with zero size), a COLAMD ordering will be computed. Returns a new
|
|
||||||
* NonlinearOptimizer object, the original is not modified.
|
|
||||||
*/
|
|
||||||
virtual auto_ptr update(const SharedOrdering& newOrdering) const {
|
|
||||||
return auto_ptr(new GaussNewtonOptimizer(*this, newOrdering)); }
|
|
||||||
|
|
||||||
/** Create a copy of the NonlinearOptimizer */
|
|
||||||
virtual auto_ptr clone() const {
|
|
||||||
return auto_ptr(new GaussNewtonOptimizer(*this));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
const SharedGNParams gnParams_;
|
const SharedParams params_;
|
||||||
const bool colamdOrdering_;
|
|
||||||
const SharedOrdering ordering_;
|
|
||||||
|
|
||||||
/** Protected constructor called by update() to modify the graph, values, or
|
|
||||||
* parameters. Computes a COLAMD ordering if the optimizer was originally
|
|
||||||
* constructed with an empty ordering, and if the graph is changing.
|
|
||||||
*/
|
|
||||||
GaussNewtonOptimizer(const GaussNewtonOptimizer& original, const SharedGraph& newGraph,
|
|
||||||
const SharedValues& newValues, const SharedGNParams& newParams) :
|
|
||||||
NonlinearOptimizer(original, newGraph, newValues, newParams),
|
|
||||||
gnParams_(newParams ? newParams : original.gnParams_),
|
|
||||||
colamdOrdering_(original.colamdOrdering_),
|
|
||||||
ordering_(newGraph && colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : original.ordering_) {}
|
|
||||||
|
|
||||||
/** Protected constructor called by update() to modify the ordering, computing
|
|
||||||
* a COLAMD ordering if the new ordering is empty, and also recomputing the
|
|
||||||
* dimensions.
|
|
||||||
*/
|
|
||||||
GaussNewtonOptimizer(
|
|
||||||
const GaussNewtonOptimizer& original, const SharedOrdering& newOrdering) :
|
|
||||||
NonlinearOptimizer(original),
|
|
||||||
gnParams_(original.gnParams_),
|
|
||||||
colamdOrdering_(!newOrdering || newOrdering->size() == 0),
|
|
||||||
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : newOrdering) {}
|
|
||||||
|
|
||||||
private:
|
|
||||||
|
|
||||||
// Special constructor for completing an iteration, updates the values and
|
|
||||||
// error, and increments the iteration count.
|
|
||||||
GaussNewtonOptimizer(const GaussNewtonOptimizer& original,
|
|
||||||
const SharedValues& newValues, double newError) :
|
|
||||||
NonlinearOptimizer(original.graph_, newValues, original.params_, newError, original.iterations_+1),
|
|
||||||
gnParams_(original.gnParams_),
|
|
||||||
colamdOrdering_(original.colamdOrdering_),
|
|
||||||
ordering_(original.ordering_) {}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,8 @@ using namespace std;
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
NonlinearOptimizer::auto_ptr LevenbergMarquardtOptimizer::iterate(const SharedState& current) const {
|
/* ************************************************************************* */
|
||||||
|
NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const NonlinearOptimizer::SharedState& current) const {
|
||||||
|
|
||||||
// Linearize graph
|
// Linearize graph
|
||||||
GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_);
|
GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_);
|
||||||
|
@ -141,4 +142,12 @@ NonlinearOptimizer::auto_ptr LevenbergMarquardtOptimizer::iterate(const SharedSt
|
||||||
return newState;
|
return newState;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::initialState(const Values& initialValues) const {
|
||||||
|
SharedState initial = boost::make_shared<LevenbergMarquardtState>();
|
||||||
|
defaultInitialState(*initial);
|
||||||
|
initial->lambda = params_->lambdaInitial;
|
||||||
|
return initial;
|
||||||
|
}
|
||||||
|
|
||||||
} /* namespace gtsam */
|
} /* namespace gtsam */
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <gtsam/nonlinear/DirectOptimizer.h>
|
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ namespace gtsam {
|
||||||
* common to all nonlinear optimization algorithms. This class also contains
|
* common to all nonlinear optimization algorithms. This class also contains
|
||||||
* all of those parameters.
|
* all of those parameters.
|
||||||
*/
|
*/
|
||||||
class LevenbergMarquardtParams : public DirectOptimizerParams {
|
class LevenbergMarquardtParams : public SuccessiveLinearizationParams {
|
||||||
public:
|
public:
|
||||||
/** See LevenbergMarquardtParams::lmVerbosity */
|
/** See LevenbergMarquardtParams::lmVerbosity */
|
||||||
enum LMVerbosity {
|
enum LMVerbosity {
|
||||||
|
@ -50,7 +50,7 @@ public:
|
||||||
virtual ~LevenbergMarquardtParams() {}
|
virtual ~LevenbergMarquardtParams() {}
|
||||||
|
|
||||||
virtual void print(const std::string& str = "") const {
|
virtual void print(const std::string& str = "") const {
|
||||||
DirectOptimizerParams::print(str);
|
SuccessiveLinearizationParams::print(str);
|
||||||
std::cout << " lambdaInitial: " << lambdaInitial << "\n";
|
std::cout << " lambdaInitial: " << lambdaInitial << "\n";
|
||||||
std::cout << " lambdaFactor: " << lambdaFactor << "\n";
|
std::cout << " lambdaFactor: " << lambdaFactor << "\n";
|
||||||
std::cout << " lambdaUpperBound: " << lambdaUpperBound << "\n";
|
std::cout << " lambdaUpperBound: " << lambdaUpperBound << "\n";
|
||||||
|
@ -61,7 +61,7 @@ public:
|
||||||
/**
|
/**
|
||||||
* State for LevenbergMarquardtOptimizer
|
* State for LevenbergMarquardtOptimizer
|
||||||
*/
|
*/
|
||||||
class LevenbergMarquardtState : public NonlinearOptimizerState {
|
class LevenbergMarquardtState : public SuccessiveLinearizationState {
|
||||||
public:
|
public:
|
||||||
|
|
||||||
double lambda;
|
double lambda;
|
||||||
|
@ -72,11 +72,11 @@ public:
|
||||||
* This class performs Levenberg-Marquardt nonlinear optimization
|
* This class performs Levenberg-Marquardt nonlinear optimization
|
||||||
* TODO: use make_shared
|
* TODO: use make_shared
|
||||||
*/
|
*/
|
||||||
class LevenbergMarquardtOptimizer : public DirectOptimizer {
|
class LevenbergMarquardtOptimizer : public SuccessiveLinearizationOptimizer {
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
typedef boost::shared_ptr<LevenbergMarquardtParams> SharedParams;
|
typedef boost::shared_ptr<const LevenbergMarquardtParams> SharedParams;
|
||||||
typedef boost::shared_ptr<LevenbergMarquardtState> SharedState;
|
typedef boost::shared_ptr<LevenbergMarquardtState> SharedState;
|
||||||
typedef boost::shared_ptr<LevenbergMarquardtOptimizer> shared_ptr;
|
typedef boost::shared_ptr<LevenbergMarquardtOptimizer> shared_ptr;
|
||||||
|
|
||||||
|
@ -94,7 +94,7 @@ public:
|
||||||
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph,
|
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph,
|
||||||
const LevenbergMarquardtParams& params = LevenbergMarquardtParams(),
|
const LevenbergMarquardtParams& params = LevenbergMarquardtParams(),
|
||||||
const Ordering& ordering = Ordering()) :
|
const Ordering& ordering = Ordering()) :
|
||||||
DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
|
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
|
||||||
params_(new LevenbergMarquardtParams(params)) {}
|
params_(new LevenbergMarquardtParams(params)) {}
|
||||||
|
|
||||||
/** Standard constructor, requires a nonlinear factor graph, initial
|
/** Standard constructor, requires a nonlinear factor graph, initial
|
||||||
|
@ -107,7 +107,7 @@ public:
|
||||||
*/
|
*/
|
||||||
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph,
|
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph,
|
||||||
const Ordering& ordering) :
|
const Ordering& ordering) :
|
||||||
DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
|
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
|
||||||
params_(new LevenbergMarquardtParams()) {}
|
params_(new LevenbergMarquardtParams()) {}
|
||||||
|
|
||||||
/** Standard constructor, requires a nonlinear factor graph, initial
|
/** Standard constructor, requires a nonlinear factor graph, initial
|
||||||
|
@ -119,14 +119,11 @@ public:
|
||||||
LevenbergMarquardtOptimizer(const SharedGraph& graph,
|
LevenbergMarquardtOptimizer(const SharedGraph& graph,
|
||||||
const LevenbergMarquardtParams& params = LevenbergMarquardtParams(),
|
const LevenbergMarquardtParams& params = LevenbergMarquardtParams(),
|
||||||
const SharedOrdering& ordering = SharedOrdering()) :
|
const SharedOrdering& ordering = SharedOrdering()) :
|
||||||
DirectOptimizer(graph),
|
SuccessiveLinearizationOptimizer(graph),
|
||||||
params_(new LevenbergMarquardtParams(params)) {}
|
params_(new LevenbergMarquardtParams(params)) {}
|
||||||
|
|
||||||
/** Access the parameters */
|
/** Access the parameters */
|
||||||
virtual const NonlinearOptimizer::SharedParams& params() const { return params_; }
|
virtual NonlinearOptimizer::SharedParams params() const { return params_; }
|
||||||
|
|
||||||
/** Access the parameters */
|
|
||||||
const LevenbergMarquardtOptimizer::SharedParams& params() const { return params_; }
|
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
|
|
||||||
|
@ -140,11 +137,12 @@ public:
|
||||||
* containing the updated variable assignments, which may be retrieved with
|
* containing the updated variable assignments, which may be retrieved with
|
||||||
* values().
|
* values().
|
||||||
*/
|
*/
|
||||||
virtual NonlinearOptimizer::SharedState iterate(const SharedState& current) const;
|
virtual NonlinearOptimizer::SharedState iterate(const NonlinearOptimizer::SharedState& current) const;
|
||||||
|
|
||||||
/** Create a copy of the NonlinearOptimizer */
|
/** Create an initial state with the specified variable assignment values and
|
||||||
virtual NonlinearOptimizer::shared_ptr clone() const {
|
* all other default state.
|
||||||
return boost::make_shared<LevenbergMarquardtOptimizer>(*this); }
|
*/
|
||||||
|
virtual NonlinearOptimizer::SharedState initialState(const Values& initialValues) const;
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
|
|
||||||
|
@ -152,7 +150,7 @@ protected:
|
||||||
|
|
||||||
typedef boost::shared_ptr<const std::vector<size_t> > SharedDimensions;
|
typedef boost::shared_ptr<const std::vector<size_t> > SharedDimensions;
|
||||||
|
|
||||||
const SharedParams params_;
|
SharedParams params_;
|
||||||
const SharedDimensions dimensions_;
|
const SharedDimensions dimensions_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -82,6 +82,13 @@ NonlinearOptimizer::SharedState NonlinearOptimizer::defaultOptimize(const Shared
|
||||||
return next;
|
return next;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
void NonlinearOptimizer::defaultInitialState(NonlinearOptimizerState& initial) const {
|
||||||
|
state.values = initialValues;
|
||||||
|
state.error = graph_->error(initialValues);
|
||||||
|
state.iterations = 0;
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold,
|
bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold,
|
||||||
double errorThreshold, double currentError, double newError,
|
double errorThreshold, double currentError, double newError,
|
||||||
|
|
|
@ -78,8 +78,11 @@ public:
|
||||||
/** The number of optimization iterations performed. */
|
/** The number of optimization iterations performed. */
|
||||||
unsigned int iterations;
|
unsigned int iterations;
|
||||||
|
|
||||||
/** Virtual destructor to enable RTTI */
|
/** Virtual destructor */
|
||||||
virtual ~NonlinearOptimizerState() {}
|
virtual ~NonlinearOptimizerState() {}
|
||||||
|
|
||||||
|
/** Clone the state (i.e. make a copy of the derived class) */
|
||||||
|
virtual boost::shared_ptr<NonlinearOptimizerState> clone() = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -185,15 +188,19 @@ public:
|
||||||
*/
|
*/
|
||||||
virtual SharedState optimize(const SharedState& initial) const { return defaultOptimize(initial); }
|
virtual SharedState optimize(const SharedState& initial) const { return defaultOptimize(initial); }
|
||||||
|
|
||||||
|
SharedState optimize(const Values& initialization) const { return optimize(initialState(initialization)); }
|
||||||
|
|
||||||
/** Shortcut to optimize and return the resulting Values of the maximum-
|
/** Shortcut to optimize and return the resulting Values of the maximum-
|
||||||
* likelihood estimate. To access statistics and information such as the
|
* likelihood estimate. To access statistics and information such as the
|
||||||
* final error and number of iterations, use optimize() instead.
|
* final error and number of iterations, use optimize() instead.
|
||||||
* @return The maximum-likelihood estimate.
|
* @return The maximum-likelihood estimate.
|
||||||
*/
|
*/
|
||||||
virtual SharedValues optimized(const SharedState& initial) const { return this->optimize(initial)->values(); }
|
virtual Values optimized(const SharedState& initial) const { return this->optimize(initial)->values; }
|
||||||
|
|
||||||
|
Values optimized(const Values& initialization) const { return optimized(initialState(initialization)); }
|
||||||
|
|
||||||
/** Retrieve the parameters. */
|
/** Retrieve the parameters. */
|
||||||
virtual const SharedParams& params() const = 0;
|
virtual SharedParams params() const = 0;
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
|
|
||||||
|
@ -209,20 +216,10 @@ public:
|
||||||
*/
|
*/
|
||||||
virtual SharedState iterate(const SharedState& current) const = 0;
|
virtual SharedState iterate(const SharedState& current) const = 0;
|
||||||
|
|
||||||
/** Update the graph, leaving all other parts of the optimizer unchanged,
|
/** Create an initial state from a variable assignment Values, with all
|
||||||
* returns a new updated NonlinearOptimzier object, the original is not
|
* other state values at their default initial values.
|
||||||
* modified.
|
|
||||||
*/
|
*/
|
||||||
virtual shared_ptr update(const SharedGraph& newGraph) const;
|
virtual SharedState initialState(const Values& initialValues) const = 0;
|
||||||
|
|
||||||
/** Update the parameters, leaving all other parts of the optimizer unchanged,
|
|
||||||
* returns a new updated NonlinearOptimzier object, the original is not
|
|
||||||
* modified.
|
|
||||||
*/
|
|
||||||
const shared_ptr update(const SharedParams& newParams) const;
|
|
||||||
|
|
||||||
/** Create a copy of the NonlinearOptimizer */
|
|
||||||
virtual shared_ptr clone() const = 0;
|
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
|
|
||||||
|
@ -235,13 +232,12 @@ protected:
|
||||||
*/
|
*/
|
||||||
SharedState defaultOptimize(const SharedState& initial) const;
|
SharedState defaultOptimize(const SharedState& initial) const;
|
||||||
|
|
||||||
/** Modify the parameters in-place (not for external use) */
|
/** Initialize a state, using the current error and 0 iterations */
|
||||||
virtual void setParams(const NonlinearOptimizer::SharedParams& newParams);
|
void defaultInitialState(SharedState& initial) const;
|
||||||
|
|
||||||
/** Constructor for initial construction of base classes.
|
/** Constructor for initial construction of base classes.
|
||||||
*/
|
*/
|
||||||
NonlinearOptimizer(const SharedGraph& graph) :
|
NonlinearOptimizer(const SharedGraph& graph) : graph_(graph) {}
|
||||||
graph_(graph) {}
|
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,41 @@
|
||||||
|
/* ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||||
|
* Atlanta, Georgia 30332-0415
|
||||||
|
* All Rights Reserved
|
||||||
|
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||||
|
|
||||||
|
* See LICENSE for the license information
|
||||||
|
|
||||||
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @file DirectOptimizer.cpp
|
||||||
|
* @brief
|
||||||
|
* @author Richard Roberts
|
||||||
|
* @date Apr 1, 2012
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
|
||||||
|
#include <gtsam/nonlinear/NonlinearOptimizer.h>
|
||||||
|
|
||||||
|
namespace gtsam {
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
const SuccessiveLinearizationOptimizer::SharedOrdering& SuccessiveLinearizationOptimizer::ordering(const Values& values) const {
|
||||||
|
|
||||||
|
if(!ordering_) {
|
||||||
|
SharedParams params =
|
||||||
|
boost::dynamic_pointer_cast<const SuccessiveLinearizationParams>(params());
|
||||||
|
|
||||||
|
// If we're using a COLAMD ordering, compute it
|
||||||
|
if(!params.ordering)
|
||||||
|
ordering_ = graph_->orderingCOLAMD(values);
|
||||||
|
else
|
||||||
|
ordering_ = params.ordering;
|
||||||
|
}
|
||||||
|
|
||||||
|
return ordering_;
|
||||||
|
}
|
||||||
|
|
||||||
|
} /* namespace gtsam */
|
|
@ -10,7 +10,7 @@
|
||||||
* -------------------------------------------------------------------------- */
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @file DirectOptimizer.h
|
* @file SuccessiveLinearizationOptimizer.h
|
||||||
* @brief
|
* @brief
|
||||||
* @author Richard Roberts
|
* @author Richard Roberts
|
||||||
* @date Apr 1, 2012
|
* @date Apr 1, 2012
|
||||||
|
@ -18,31 +18,65 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include <boost/variant.hpp>
|
||||||
|
|
||||||
#include <gtsam/nonlinear/NonlinearOptimizer.h>
|
#include <gtsam/nonlinear/NonlinearOptimizer.h>
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
class DirectOptimizerParams : public NonlinearOptimizerParams {
|
/**
|
||||||
|
* This is an intermediate base class for NonlinearOptimizers that use direct
|
||||||
|
* solving, i.e. factorization. This class is here to reduce code duplication
|
||||||
|
* for storing the ordering, having factorization and elimination parameters,
|
||||||
|
* etc.
|
||||||
|
*/
|
||||||
|
class SuccessiveLinearizationOptimizer : public NonlinearOptimizer {
|
||||||
public:
|
public:
|
||||||
/** See DoglegParams::elimination */
|
|
||||||
|
class SuccessiveLinearizationParams;
|
||||||
|
class SuccessiveLinearizationState;
|
||||||
|
|
||||||
|
typedef boost::shared_ptr<const SuccessiveLinearizationParams> SharedParams;
|
||||||
|
typedef boost::shared_ptr<const SuccessiveLinearizationOptimizer> shared_ptr;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
|
||||||
|
typedef boost::shared_ptr<const Ordering> SharedOrdering;
|
||||||
|
|
||||||
|
SuccessiveLinearizationOptimizer(const SharedGraph& graph) : NonlinearOptimizer(graph) {}
|
||||||
|
|
||||||
|
const SharedOrdering& ordering(const Values& values) const;
|
||||||
|
|
||||||
|
private:
|
||||||
|
|
||||||
|
SharedOrdering ordering_;
|
||||||
|
};
|
||||||
|
|
||||||
|
class SuccessiveLinearizationParams : public NonlinearOptimizerParams {
|
||||||
|
public:
|
||||||
|
/** See SuccessiveLinearizationParams::elimination */
|
||||||
enum Elimination {
|
enum Elimination {
|
||||||
MULTIFRONTAL,
|
MULTIFRONTAL,
|
||||||
SEQUENTIAL
|
SEQUENTIAL
|
||||||
};
|
};
|
||||||
|
|
||||||
/** See DoglegParams::factorization */
|
/** See SuccessiveLinearizationParams::factorization */
|
||||||
enum Factorization {
|
enum Factorization {
|
||||||
LDL,
|
LDL,
|
||||||
QR,
|
QR,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/** See SuccessiveLinearizationParams::ordering */
|
||||||
|
typedef boost::shared_ptr<const Ordering> SharedOrdering;
|
||||||
|
|
||||||
Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL)
|
Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL)
|
||||||
Factorization factorization; ///< The numerical factorization (default: LDL)
|
Factorization factorization; ///< The numerical factorization (default: LDL)
|
||||||
|
SharedOrdering ordering; ///< The variable elimination ordering, or empty to use COLAMD (default: empty)
|
||||||
|
|
||||||
DirectOptimizerParams() :
|
SuccessiveLinearizationParams() :
|
||||||
elimination(MULTIFRONTAL), factorization(LDL) {}
|
elimination(MULTIFRONTAL), factorization(LDL) {}
|
||||||
|
|
||||||
virtual ~DirectOptimizerParams() {}
|
virtual ~SuccessiveLinearizationParams() {}
|
||||||
|
|
||||||
virtual void print(const std::string& str = "") const {
|
virtual void print(const std::string& str = "") const {
|
||||||
NonlinearOptimizerParams::print(str);
|
NonlinearOptimizerParams::print(str);
|
||||||
|
@ -60,40 +94,16 @@ public:
|
||||||
else
|
else
|
||||||
std::cout << " factorization method: (invalid)\n";
|
std::cout << " factorization method: (invalid)\n";
|
||||||
|
|
||||||
|
if(ordering)
|
||||||
|
std::cout << " ordering: custom\n";
|
||||||
|
else
|
||||||
|
std::cout << " ordering: COLAMD\n";
|
||||||
|
|
||||||
std::cout.flush();
|
std::cout.flush();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
class SuccessiveLinearizationState : public NonlinearOptimizerState {
|
||||||
* This is an intermediate base class for NonlinearOptimizers that use direct
|
|
||||||
* solving, i.e. factorization. This class is here to reduce code duplication
|
|
||||||
* for storing the ordering, having factorization and elimination parameters,
|
|
||||||
* etc.
|
|
||||||
*/
|
|
||||||
class DirectOptimizer : public NonlinearOptimizer {
|
|
||||||
public:
|
|
||||||
typedef boost::shared_ptr<const Ordering> SharedOrdering;
|
|
||||||
typedef boost::shared_ptr<NonlinearOptimizer> shared_ptr;
|
|
||||||
|
|
||||||
virtual NonlinearOptimizer::shared_ptr update(const SharedGraph& newGraph) const;
|
|
||||||
|
|
||||||
virtual shared_ptr update(const SharedOrdering& newOrdering) const;
|
|
||||||
|
|
||||||
virtual shared_ptr update(const SharedGraph& newGraph, const SharedOrdering& newOrdering) const;
|
|
||||||
|
|
||||||
/** Access the variable ordering used by this optimizer */
|
|
||||||
const SharedOrdering& ordering() const { return ordering_; }
|
|
||||||
|
|
||||||
protected:
|
|
||||||
|
|
||||||
const bool colamdOrdering_;
|
|
||||||
const SharedOrdering ordering_;
|
|
||||||
|
|
||||||
DirectOptimizer(const SharedGraph& graph,
|
|
||||||
const SharedOrdering ordering = SharedOrdering()) :
|
|
||||||
NonlinearOptimizer(graph),
|
|
||||||
colamdOrdering_(!ordering || ordering->size() == 0),
|
|
||||||
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : ordering) {}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} /* namespace gtsam */
|
} /* namespace gtsam */
|
|
@ -97,7 +97,7 @@ namespace pose2SLAM {
|
||||||
|
|
||||||
/// Optimize
|
/// Optimize
|
||||||
Values optimize(const Values& initialEstimate) {
|
Values optimize(const Values& initialEstimate) {
|
||||||
return *LevenbergMarquardtOptimizer(*this, initialEstimate).optimized();
|
return *LevenbergMarquardtOptimizer(*this).optimized(initialEstimate);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue