diff --git a/gtsam/nonlinear/GaussNewtonOptimizer.cpp b/gtsam/nonlinear/GaussNewtonOptimizer.cpp new file mode 100644 index 000000000..a372fae4e --- /dev/null +++ b/gtsam/nonlinear/GaussNewtonOptimizer.cpp @@ -0,0 +1,68 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file GaussNewtonOptimizer.cpp + * @brief + * @author Richard Roberts + * @created Feb 26, 2012 + */ + +#include +#include +#include + +using namespace std; + +namespace gtsam { + +NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const { + + // Linearize graph + GaussianFactorGraph::shared_ptr linear = graph_->linearize(values_, gnParams_->ordering); + + // Check whether to use QR + const bool useQR; + if(gnParams_->factorization == GaussNewtonParams::LDL) + useQR = false; + else if(gnParams_->factorization == GaussNewtonParams::QR) + useQR = true; + else + throw runtime_error("Optimization parameter is invalid: GaussNewtonParams::factorization"); + + // Optimize + VectorValues::shared_ptr delta; + if(gnParams_->elimination == MULTIFRONTAL) + delta = GaussianMultifrontalSolver(*linear, useQR).optimize(); + else if(gnParams_->elimination == SEQUENTIAL) + delta = GaussianSequentialSolver(*linear, useQR).optimize(); + else + throw runtime_error("Optimization parameter is invalid: GaussNewtonParams::elimination"); + + // Maybe show output + if(params_->verbosity >= NonlinearOptimizerParams::DELTA) delta->print("delta"); + + // Update values + SharedValues newValues(new Values(values_->retract(*delta, gnParams_->ordering))); + double newError = graph_->error(newValues); + + // Maybe show output + if (params_->verbosity >= NonlinearOptimizerParams::VALUES) newValues->print("newValues"); + if (params_->verbosity >= NonlinearOptimizerParams::ERROR) cout << "error: " << newError << endl; + + // Create new optimizer with new values and new error + auto_ptr newOptimizer(new GaussNewtonOptimizer( + graph_, newValues, gnParams_, newError, iterations_+1)); + + return newOptimizer; +} + +} /* namespace gtsam */ diff --git a/gtsam/nonlinear/GaussNewtonOptimizer.h b/gtsam/nonlinear/GaussNewtonOptimizer.h new file mode 100644 index 000000000..df5a2f88f --- /dev/null +++ b/gtsam/nonlinear/GaussNewtonOptimizer.h @@ -0,0 +1,158 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file GaussNewtonOptimizer.h + * @brief + * @author Richard Roberts + * @created Feb 26, 2012 + */ + +#pragma once + +#include + +namespace gtsam { + +/** Parameters for Gauss-Newton optimization, inherits from + * NonlinearOptimizationParams. + */ +class GaussNewtonParams : public NonlinearOptimizerParams { +public: + /** See GaussNewtonParams::elimination */ + enum Elimination { + MULTIFRONTAL, + SEQUENTIAL + }; + + /** See GaussNewtonParams::factorization */ + enum Factorization { + LDL, + QR, + }; + + Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL) + Factorization factorization; ///< The numerical factorization (default: LDL) + Ordering ordering; ///< The variable elimination ordering (default: empty -> COLAMD) + + GaussNewtonParams() : + elimination(MULTIFRONTAL), factorization(LDL) {} + + virtual void print(const std::string& str = "") const { + NonlinearOptimizerParams::print(str); + if(elimination == MULTIFRONTAL) + std::cout << " elimination method: MULTIFRONTAL\n"; + else if(elimination == SEQUENTIAL) + std::cout << " elimination method: SEQUENTIAL\n"; + else + std::cout << " elimination method: (invalid)\n"; + + if(factorization == LDL) + std::cout << " factorization method: LDL\n"; + else if(factorization == QR) + std::cout << " factorization method: QR\n"; + else if(factorization == CHOLESKY) + std::cout << " factorization method: CHOLESKY\n"; + else + std::cout << " factorization method: (invalid)\n"; + + std::cout.flush(); + } +}; + +/** + * This class performs Gauss-Newton nonlinear optimization + */ +class GaussNewtonOptimizer : public NonlinearOptimizer { + +public: + + typedef boost::shared_ptr SharedGNParams; + + /// @name Standard interface + /// @{ + + /** Standard constructor, requires a nonlinear factor graph, initial + * variable assignments, and optimization parameters. For convenience this + * version takes plain objects instead of shared pointers, but internally + * copies the objects. + * @param graph The nonlinear factor graph to optimize + * @param values The initial variable assignments + * @param params The optimization parameters + */ + GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Values& values, + const GaussNewtonParams& params) : + NonlinearOptimizer( + SharedGraph(new NonlinearFactorGraph(graph)), + SharedValues(new Values(values)), + SharedGNParams(new GaussNewtonParams(params))), + gnParams_(boost::static_pointer_cast(params_)) {} + + /** Standard constructor, requires a nonlinear factor graph, initial + * variable assignments, and optimization parameters. + * @param graph The nonlinear factor graph to optimize + * @param values The initial variable assignments + * @param params The optimization parameters + */ + GaussNewtonOptimizer(const SharedGraph& graph, const SharedValues& values, + const SharedGNParams& params) : + NonlinearOptimizer(graph, values, params), gnParams_(params) {} + + /// @} + + /// @name Advanced interface + /// @{ + + /** Virtual destructor */ + virtual ~GaussNewtonOptimizer() {} + + /** Perform a single iteration, returning a new NonlinearOptimizer class + * containing the updated variable assignments, which may be retrieved with + * values(). + */ + virtual auto_ptr iterate() const; + + /** Update the graph, values, and/or parameters, leaving all other state + * the same. Any of these that are empty shared pointers are left unchanged + * in the returned optimizer object. Returns a new updated + * NonlinearOptimzier object, the original is not modified. + */ + virtual auto_ptr update( + const SharedGraph& newGraph = SharedGraph(), + const SharedValues& newValues = SharedValues(), + const SharedParams& newParams = SharedParams()) const { + return new GaussNewtonOptimizer(*this, newGraph, newValues, + boost::dynamic_pointer_cast(newParams)); + } + + /** Create a copy of the NonlinearOptimizer */ + virtual auto_ptr clone() const { + return new GaussNewtonOptimizer(*this); + } + + /// @} + +protected: + + const SharedGNParams gnParams_; + + GaussNewtonOptimizer(const SharedGraph& graph, const SharedValues& values, + const SharedGNParams& params, double error, int iterations) : + NonlinearOptimizer(graph, values, params, error, iterations), gnParams_(params) {} + + GaussNewtonOptimizer(const GaussNewtonOptimizer& original, const SharedGraph& newGraph, + const SharedValues& newValues, const SharedGNParams& newParams) : + NonlinearOptimizer(original, newGraph, newValues, newParams), + gnParams_(newParams ? newParams : original.gnParams_) {} + +}; + +} diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp new file mode 100644 index 000000000..270778afe --- /dev/null +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp @@ -0,0 +1,68 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file LevenbergMarquardtOptimizer.cpp + * @brief + * @author Richard Roberts + * @created Feb 26, 2012 + */ + +#include +#include +#include + +using namespace std; + +namespace gtsam { + +NonlinearOptimizer::auto_ptr LevenbergMarquardtOptimizer::iterate() const { + + // Linearize graph + GaussianFactorGraph::shared_ptr linear = graph_->linearize(values_, gnParams_->ordering); + + // Check whether to use QR + const bool useQR; + if(gnParams_->factorization == LevenbergMarquardtParams::LDL) + useQR = false; + else if(gnParams_->factorization == LevenbergMarquardtParams::QR) + useQR = true; + else + throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::factorization"); + + // Optimize + VectorValues::shared_ptr delta; + if(gnParams_->elimination == MULTIFRONTAL) + delta = GaussianMultifrontalSolver(*linear, useQR).optimize(); + else if(gnParams_->elimination == SEQUENTIAL) + delta = GaussianSequentialSolver(*linear, useQR).optimize(); + else + throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::elimination"); + + // Maybe show output + if(params_->verbosity >= NonlinearOptimizerParams::DELTA) delta->print("delta"); + + // Update values + SharedValues newValues(new Values(values_->retract(*delta, gnParams_->ordering))); + double newError = graph_->error(newValues); + + // Maybe show output + if (params_->verbosity >= NonlinearOptimizerParams::VALUES) newValues->print("newValues"); + if (params_->verbosity >= NonlinearOptimizerParams::ERROR) cout << "error: " << newError << endl; + + // Create new optimizer with new values and new error + auto_ptr newOptimizer(new LevenbergMarquardtOptimizer( + graph_, newValues, gnParams_, newError, iterations_+1)); + + return newOptimizer; +} + +} /* namespace gtsam */ diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.h b/gtsam/nonlinear/LevenbergMarquardtOptimizer.h new file mode 100644 index 000000000..83c4ebe88 --- /dev/null +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.h @@ -0,0 +1,163 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file LevenbergMarquardtOptimizer.h + * @brief + * @author Richard Roberts + * @created Feb 26, 2012 + */ + +#pragma once + +#include + +namespace gtsam { + +/** Parameters for Levenberg-Marquardt optimization, inherits from + * NonlinearOptimizationParams. + */ +class LevenbergMarquardtParams : public NonlinearOptimizerParams { +public: + /** See LevenbergMarquardtParams::elimination */ + enum Elimination { + MULTIFRONTAL, + SEQUENTIAL + }; + + /** See LevenbergMarquardtParams::factorization */ + enum Factorization { + LDL, + QR, + }; + + Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL) + Factorization factorization; ///< The numerical factorization (default: LDL) + Ordering::shared_ptr ordering; ///< The variable elimination ordering (default: empty -> COLAMD) + double lambda; ///< The initial (and current after each iteration) Levenberg-Marquardt damping term (default: 1e-5) + double lambdaFactor; ///< The amount by which to multiply or divide lambda when adjusting lambda (default: 10.0) + + LevenbergMarquardtParams() : + elimination(MULTIFRONTAL), factorization(LDL), lambda(1e-5), lambdaFactor(10.0) {} + + virtual void print(const std::string& str = "") const { + NonlinearOptimizerParams::print(str); + if(elimination == MULTIFRONTAL) + std::cout << " elimination method: MULTIFRONTAL\n"; + else if(elimination == SEQUENTIAL) + std::cout << " elimination method: SEQUENTIAL\n"; + else + std::cout << " elimination method: (invalid)\n"; + + if(factorization == LDL) + std::cout << " factorization method: LDL\n"; + else if(factorization == QR) + std::cout << " factorization method: QR\n"; + else if(factorization == CHOLESKY) + std::cout << " factorization method: CHOLESKY\n"; + else + std::cout << " factorization method: (invalid)\n"; + + std::cout << " lambda: " << lambda << "\n"; + std::cout << " lambdaFactor: " << lambdaFactor << "\n"; + + std::cout.flush(); + } +}; + +/** + * This class performs Levenberg-Marquardt nonlinear optimization + */ +class LevenbergMarquardtOptimizer : public NonlinearOptimizer { + +public: + + typedef boost::shared_ptr SharedLMParams; + + /// @name Standard interface + /// @{ + + /** Standard constructor, requires a nonlinear factor graph, initial + * variable assignments, and optimization parameters. For convenience this + * version takes plain objects instead of shared pointers, but internally + * copies the objects. + * @param graph The nonlinear factor graph to optimize + * @param values The initial variable assignments + * @param params The optimization parameters + */ + LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph, const Values& values, + const LevenbergMarquardtParams& params) : + NonlinearOptimizer( + SharedGraph(new NonlinearFactorGraph(graph)), + SharedValues(new Values(values)), + SharedLMParams(new LevenbergMarquardtParams(params))), + lmParams_(boost::static_pointer_cast(params_)) {} + + /** Standard constructor, requires a nonlinear factor graph, initial + * variable assignments, and optimization parameters. + * @param graph The nonlinear factor graph to optimize + * @param values The initial variable assignments + * @param params The optimization parameters + */ + LevenbergMarquardtOptimizer(const SharedGraph& graph, const SharedValues& values, + const SharedLMParams& params) : + NonlinearOptimizer(graph, values, params), lmParams_(params) {} + + /// @} + + /// @name Advanced interface + /// @{ + + /** Virtual destructor */ + virtual ~LevenbergMarquardtOptimizer() {} + + /** Perform a single iteration, returning a new NonlinearOptimizer class + * containing the updated variable assignments, which may be retrieved with + * values(). + */ + virtual auto_ptr iterate() const; + + /** Update the graph, values, and/or parameters, leaving all other state + * the same. Any of these that are empty shared pointers are left unchanged + * in the returned optimizer object. Returns a new updated + * NonlinearOptimzier object, the original is not modified. + */ + virtual auto_ptr update( + const SharedGraph& newGraph = SharedGraph(), + const SharedValues& newValues = SharedValues(), + const SharedParams& newParams = SharedParams()) const { + return new LevenbergMarquardtOptimizer(*this, newGraph, newValues, + boost::dynamic_pointer_cast(newParams)); + } + + /** Create a copy of the NonlinearOptimizer */ + virtual auto_ptr clone() const { + return new LevenbergMarquardtOptimizer(*this); + } + + /// @} + +protected: + + const SharedLMParams lmParams_; + + LevenbergMarquardtOptimizer(const SharedGraph& graph, const SharedValues& values, + const SharedLMParams& params, double error, int iterations) : + NonlinearOptimizer(graph, values, params, error, iterations), lmParams_(params) {} + + LevenbergMarquardtOptimizer(const LevenbergMarquardtOptimizer& original, const SharedGraph& newGraph, + const SharedValues& newValues, const SharedLMParams& newParams) : + NonlinearOptimizer(original, newGraph, newValues, newParams), + lmParams_(newParams ? newParams : original.lmParams_) {} + +}; + +} diff --git a/gtsam/nonlinear/NonlinearOptimizer.h b/gtsam/nonlinear/NonlinearOptimizer.h index 7981146d7..dba87d6fb 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.h +++ b/gtsam/nonlinear/NonlinearOptimizer.h @@ -11,15 +11,14 @@ /** * @file NonlinearOptimizer.h - * @brief Encapsulates nonlinear optimization state - * @author Frank Dellaert + * @brief Base class and parameters for nonlinear optimization algorithms + * @author Richard Roberts * @date Sep 7, 2009 */ #pragma once #include -#include namespace gtsam { @@ -28,8 +27,7 @@ namespace gtsam { */ class NonlinearOptimizerParams { public: - - /** Control the printing verbosity */ + /** See NonlinearOptimizationParams::verbosity */ enum Verbosity { SILENT, ERROR, @@ -53,14 +51,16 @@ public: maxIterations(100.0), relativeErrorTol(1e-5), absoluteErrorTol(0.0), errorTol(0.0), verbosity(SILENT) {} - void print(const std::string& str = "") const { - cout << s << "\n"; - cout << "relative decrease threshold: " << relativeErrorTol << "\n"; - cout << "absolute decrease threshold: " << absoluteErrorTol << "\n"; - cout << " total error threshold: " << errorTol << "\n"; - cout << " maximum iterations: " << maxIterations << "\n"; - cout << " verbosity level: " << verbosity << "\n"; + virtual void print(const std::string& str = "") const { + std::cout << s << "\n"; + std::cout << "relative decrease threshold: " << relativeErrorTol << "\n"; + std::cout << "absolute decrease threshold: " << absoluteErrorTol << "\n"; + std::cout << " total error threshold: " << errorTol << "\n"; + std::cout << " maximum iterations: " << maxIterations << "\n"; + std::cout << " verbosity level: " << verbosity << std::endl; } + + virtual ~NonlinearOptimizationParams() {} }; @@ -80,7 +80,7 @@ public: * * Example: * \code -NonlinearOptimizer::shared_ptr optimizer = DoglegOptimizer::Create(graph, initialValues); +NonlinearOptimizer::auto_ptr optimizer = DoglegOptimizer::Create(graph, initialValues); optimizer = optimizer->optimizer(); Values result = optimizer->values(); useTheResult(result); @@ -123,26 +123,6 @@ public: /** A const shared_ptr to the parameters */ typedef boost::shared_ptr SharedParams; -protected: - - const SharedGraph graph_; - const SharedValues values_; - const SharedParams params_; - const double error_; - const int iterations_; - - NonlinearOptimizer(const SharedGraph& graph, const SharedValues& values, - const SharedParams& params) : - graph_(graph), values_(values), params_(params), - error_(graph_->error(*values_)), iterations_(0) {} - - NonlinearOptimizer(const SharedGraph& graph, const SharedValues& values, - const SharedParams& params, double error, int iterations) : - graph_(graph), values_(values), params_(params), - error_(error), iterations_(iterations) {} - -public: - /// @name Standard interface /// @{ @@ -169,29 +149,24 @@ public: /// @name Advanced interface /// @{ + /** Virtual destructor */ + virtual ~NonlinearOptimizer() {} + /** Perform a single iteration, returning a new NonlinearOptimizer class * containing the updated variable assignments, which may be retrieved with * values(). */ virtual auto_ptr iterate() const = 0; - /** Update the nonlinear factor graph, leaving all other state the same. - * Returns a new updated NonlinearOptimzier object, the original is not - * modified. + /** Update the graph, values, and/or parameters, leaving all other state + * the same. Any of these that are empty shared pointers are left unchanged + * in the returned optimizer object. Returns a new updated + * NonlinearOptimzier object, the original is not modified. */ - virtual auto_ptr update(const SharedGraph& newGraph) const = 0; - - /** Update the variable assignments, leaving all other state the same. - * Returns a new updated NonlinearOptimzier object, the original is not - * modified. - */ - virtual auto_ptr update(const SharedValues& newValues) const = 0; - - /** Update the parameters, leaving all other state the same. - * Returns a new updated NonlinearOptimzier object, the original is not - * modified. - */ - virtual auto_ptr update(const SharedParams& newParams) const = 0; + virtual auto_ptr update( + const SharedGraph& newGraph = SharedGraph(), + const SharedValues& newValues = SharedValues(), + const SharedParams& newParams = SharedParams()) const = 0; /** Create a copy of the NonlinearOptimizer */ virtual auto_ptr clone() const = 0; @@ -205,6 +180,37 @@ protected: */ auto_ptr defaultOptimize() const; +protected: + + const SharedGraph graph_; + const SharedValues values_; + const SharedParams params_; + const double error_; + const int iterations_; + + /** Constructor for initial construction of base classes, computes error and + * sets iterations to zero. + */ + NonlinearOptimizer(const SharedGraph& graph, const SharedValues& values, + const SharedParams& params) : + graph_(graph), values_(values), params_(params), + error_(graph_->error(*values_)), iterations_(0) {} + + /** Constructor that specifies all parts of the state, used for updates */ + NonlinearOptimizer(const SharedGraph& graph, const SharedValues& values, + const SharedParams& params, double error, int iterations) : + graph_(graph), values_(values), params_(params), + error_(error), iterations_(iterations) {} + + /** Convenience constructor for modifying only some of the state. */ + NonlinearOptimizer(const NonlinearOptimizer& original, const SharedGraph& newGraph, + const SharedValues& newValues, const SharedParams& newParams) : + graph_(newGraph ? newGraph : original.graph_), + values_(newValues ? newValues : original.values_), + params_(newParams ? newParams : original.params_), + error_(newGraph || newValues ? graph_->error(*values_) : original.error_), + iterations_(original.iterations_) {} + }; /** Check whether the relative error decrease is less than relativeErrorTreshold, diff --git a/tests/testNonlinearOptimizer.cpp b/tests/testNonlinearOptimizer.cpp index e397ea443..91d92b530 100644 --- a/tests/testNonlinearOptimizer.cpp +++ b/tests/testNonlinearOptimizer.cpp @@ -35,7 +35,6 @@ using namespace boost; // template definitions #include #include -#include using namespace gtsam; @@ -235,6 +234,10 @@ TEST( NonlinearOptimizer, SimpleGNOptimizer_noshared ) /* ************************************************************************* */ TEST( NonlinearOptimizer, optimization_method ) { + GaussNewtonParams params; + params.elimination = GaussNewtonParams::QR; + EXPECT(false); + example::Graph fg = example::createReallyNonlinearFactorGraph(); Point2 x0(3,3);