diff --git a/.cproject b/.cproject index c6699da3e..e80acb39e 100644 --- a/.cproject +++ b/.cproject @@ -21,7 +21,7 @@ - + diff --git a/.project b/.project index 9856df2ea..e52e979df 100644 --- a/.project +++ b/.project @@ -23,7 +23,7 @@ org.eclipse.cdt.make.core.buildArguments - -j5 + -j8 org.eclipse.cdt.make.core.buildCommand diff --git a/gtsam/nonlinear/DirectOptimizer.cpp b/gtsam/nonlinear/DirectOptimizer.cpp deleted file mode 100644 index 424bf0396..000000000 --- a/gtsam/nonlinear/DirectOptimizer.cpp +++ /dev/null @@ -1,58 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file DirectOptimizer.cpp - * @brief - * @author Richard Roberts - * @date Apr 1, 2012 - */ - -#include - -namespace gtsam { - -/* ************************************************************************* */ -NonlinearOptimizer::shared_ptr DirectOptimizer::update(const SharedGraph& newGraph) const { - - // Call update on the base class - shared_ptr result = boost::static_pointer_cast(); - - // Need to recompute the ordering if we're using an automatic COLAMD ordering - if(result->colamdOrdering_) { - result->ordering_ = result->graph_->orderingCOLAMD(*result->values_); - } - - return result; -} - -/* ************************************************************************* */ -DirectOptimizer::shared_ptr DirectOptimizer::update(const SharedOrdering& newOrdering) const { - return update(graph_, newOrdering); -} - -/* ************************************************************************* */ -DirectOptimizer::shared_ptr DirectOptimizer::update(const SharedGraph& newGraph, const SharedOrdering& newOrdering) const { - // Call update on base class - shared_ptr result = boost::static_pointer_cast(NonlinearOptimizer::update(newGraph)); - - if(newOrdering && newOrdering->size() > 0) { - result->colamdOrdering_ = false; - result->ordering_ = newOrdering; - } else { - result->colamdOrdering_ = true; - result->ordering_ = result->graph_->orderingCOLAMD(*result->values_); - } - - return result; -} - -} /* namespace gtsam */ diff --git a/gtsam/nonlinear/DoglegOptimizer.cpp b/gtsam/nonlinear/DoglegOptimizer.cpp index 1a490b744..9a7822c6f 100644 --- a/gtsam/nonlinear/DoglegOptimizer.cpp +++ b/gtsam/nonlinear/DoglegOptimizer.cpp @@ -26,49 +26,59 @@ using namespace std; namespace gtsam { -NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const SharedState& current) const { +/* ************************************************************************* */ +NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const NonlinearOptimizer::SharedState& current) const { // Linearize graph - GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_); + const Ordering& ordering = *ordering(current->values); + GaussianFactorGraph::shared_ptr linear = graph_->linearize(current->values, ordering); // Check whether to use QR bool useQR; - if(dlParams_->factorization == DoglegParams::LDL) + if(params_->factorization == DoglegParams::LDL) useQR = false; - else if(dlParams_->factorization == DoglegParams::QR) + else if(params_->factorization == DoglegParams::QR) useQR = true; else throw runtime_error("Optimization parameter is invalid: DoglegParams::factorization"); // Pull out parameters we'll use - const bool dlVerbose = (dlParams_->dlVerbosity > DoglegParams::SILENT); + const bool dlVerbose = (params_->dlVerbosity > DoglegParams::SILENT); // Do Dogleg iteration with either Multifrontal or Sequential elimination DoglegOptimizerImpl::IterationResult result; - if(dlParams_->elimination == DoglegParams::MULTIFRONTAL) { + if(params_->elimination == DoglegParams::MULTIFRONTAL) { GaussianBayesTree::shared_ptr bt = GaussianMultifrontalSolver(*linear, useQR).eliminate(); - result = DoglegOptimizerImpl::Iterate(delta_, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bt, *graph_, *values(), *ordering_, error(), dlVerbose); + result = DoglegOptimizerImpl::Iterate(current->Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bt, *graph_, *values(), ordering, error(), dlVerbose); - } else if(dlParams_->elimination == DoglegParams::SEQUENTIAL) { + } else if(params_->elimination == DoglegParams::SEQUENTIAL) { GaussianBayesNet::shared_ptr bn = GaussianSequentialSolver(*linear, useQR).eliminate(); - result = DoglegOptimizerImpl::Iterate(delta_, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, *graph_, *values(), *ordering_, error(), dlVerbose); + result = DoglegOptimizerImpl::Iterate(current->Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, *graph_, *values(), ordering, error(), dlVerbose); } else { throw runtime_error("Optimization parameter is invalid: DoglegParams::elimination"); } - // Update values - SharedValues newValues = boost::make_shared(values_->retract(result.dx_d, *ordering_)); - // Create new state with new values and new error DoglegOptimizer::SharedState newState = boost::make_shared(); - newState->values = newValues; - newState->error = rsult.f_error; + + // Update values + newState->values = current->values.retract(result.dx_d, ordering); + + newState->error = result.f_error; newState->iterations = current->iterations + 1; newState->Delta = result.Delta; return newState; } +/* ************************************************************************* */ +NonlinearOptimizer::SharedState DoglegOptimizer::initialState(const Values& initialValues) const { + SharedState initial = boost::make_shared(); + defaultInitialState(initialValues); + initial->Delta = params_->deltaInitial; + return initial; +} + } /* namespace gtsam */ diff --git a/gtsam/nonlinear/DoglegOptimizer.h b/gtsam/nonlinear/DoglegOptimizer.h index 89c88edb0..0ef9da536 100644 --- a/gtsam/nonlinear/DoglegOptimizer.h +++ b/gtsam/nonlinear/DoglegOptimizer.h @@ -18,7 +18,7 @@ #pragma once -#include +#include namespace gtsam { @@ -27,7 +27,7 @@ namespace gtsam { * common to all nonlinear optimization algorithms. This class also contains * all of those parameters. */ -class DoglegParams : public DirectOptimizerParams { +class DoglegParams : public SuccessiveLinearizationParams { public: /** See DoglegParams::dlVerbosity */ enum DLVerbosity { @@ -44,7 +44,7 @@ public: virtual ~DoglegParams() {} virtual void print(const std::string& str = "") const { - DirectOptimizerParams::print(str); + SuccessiveLinearizationParams::print(str); std::cout << " deltaInitial: " << deltaInitial << "\n"; std::cout.flush(); } @@ -53,7 +53,7 @@ public: /** * State for DoglegOptimizer */ -class DoglegState : public NonlinearOptimizerState { +class DoglegState : public SuccessiveLinearizationState { public: double Delta; @@ -63,11 +63,11 @@ public: /** * This class performs Dogleg nonlinear optimization */ -class DoglegOptimizer : public DirectOptimizer { +class DoglegOptimizer : public SuccessiveLinearizationOptimizer { public: - typedef boost::shared_ptr SharedParams; + typedef boost::shared_ptr SharedParams; typedef boost::shared_ptr SharedState; typedef boost::shared_ptr shared_ptr; @@ -85,7 +85,7 @@ public: DoglegOptimizer(const NonlinearFactorGraph& graph, const DoglegParams& params = DoglegParams(), const Ordering& ordering = Ordering()) : - DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), + SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), params_(new DoglegParams(params)) {} /** Standard constructor, requires a nonlinear factor graph, initial @@ -96,9 +96,8 @@ public: * @param values The initial variable assignments * @param params The optimization parameters */ - DoglegOptimizer(const NonlinearFactorGraph& graph, - const Ordering& ordering) : - DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), + DoglegOptimizer(const NonlinearFactorGraph& graph, const Ordering& ordering) : + SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), params_(new DoglegParams()) {} /** Standard constructor, requires a nonlinear factor graph, initial @@ -110,14 +109,11 @@ public: DoglegOptimizer(const SharedGraph& graph, const DoglegParams& params = DoglegParams(), const SharedOrdering& ordering = SharedOrdering()) : - DirectOptimizer(graph), + SuccessiveLinearizationOptimizer(graph), params_(new DoglegParams(params)) {} /** Access the parameters */ - virtual const NonlinearOptimizer::SharedParams& params() const { return params_; } - - /** Access the parameters */ - const DoglegOptimizer::SharedParams& params() const { return params_; } + virtual NonlinearOptimizer::SharedParams params() const { return params_; } /// @} @@ -131,19 +127,18 @@ public: * containing the updated variable assignments, which may be retrieved with * values(). */ - virtual NonlinearOptimizer::SharedState iterate(const SharedState& current) const; + virtual NonlinearOptimizer::SharedState iterate(const NonlinearOptimizer::SharedState& current) const; - /** Create a copy of the NonlinearOptimizer */ - virtual NonlinearOptimizer::shared_ptr clone() const { - return boost::make_shared(*this); } + /** Create an initial state with the specified variable assignment values and + * all other default state. + */ + virtual NonlinearOptimizer::SharedState initialState(const Values& initialValues) const; /// @} protected: - const SharedParams params_; - - virtual void setParams(const NonlinearOptimizer::SharedParams& newParams) { params_ = newParams; } + SharedParams params_; }; } diff --git a/gtsam/nonlinear/GaussNewtonOptimizer.cpp b/gtsam/nonlinear/GaussNewtonOptimizer.cpp index e3dd49461..3a5a7aee0 100644 --- a/gtsam/nonlinear/GaussNewtonOptimizer.cpp +++ b/gtsam/nonlinear/GaussNewtonOptimizer.cpp @@ -24,6 +24,7 @@ using namespace std; namespace gtsam { +/* ************************************************************************* */ NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const { // Linearize graph @@ -61,4 +62,11 @@ NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const { return newOptimizer; } +/* ************************************************************************* */ +NonlinearOptimizer::SharedState GaussNewtonOptimizer::initialState(const Values& initialValues) const { + SharedState initial = boost::make_shared(); + defaultInitialState(*initial); + return initial; +} + } /* namespace gtsam */ diff --git a/gtsam/nonlinear/GaussNewtonOptimizer.h b/gtsam/nonlinear/GaussNewtonOptimizer.h index f9deaeded..8b1d4fbe6 100644 --- a/gtsam/nonlinear/GaussNewtonOptimizer.h +++ b/gtsam/nonlinear/GaussNewtonOptimizer.h @@ -18,65 +18,28 @@ #pragma once -#include +#include namespace gtsam { /** Parameters for Gauss-Newton optimization, inherits from * NonlinearOptimizationParams. */ -class GaussNewtonParams : public NonlinearOptimizerParams { -public: - /** See GaussNewtonParams::elimination */ - enum Elimination { - MULTIFRONTAL, - SEQUENTIAL - }; +class GaussNewtonParams : public SuccessiveLinearizationParams { +}; - /** See GaussNewtonParams::factorization */ - enum Factorization { - LDL, - QR, - }; - - Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL) - Factorization factorization; ///< The numerical factorization (default: LDL) - - GaussNewtonParams() : - elimination(MULTIFRONTAL), factorization(LDL) {} - - ~GaussNewtonParams() {} - - virtual void print(const std::string& str = "") const { - NonlinearOptimizerParams::print(str); - if(elimination == MULTIFRONTAL) - std::cout << " elimination method: MULTIFRONTAL\n"; - else if(elimination == SEQUENTIAL) - std::cout << " elimination method: SEQUENTIAL\n"; - else - std::cout << " elimination method: (invalid)\n"; - - if(factorization == LDL) - std::cout << " factorization method: LDL\n"; - else if(factorization == QR) - std::cout << " factorization method: QR\n"; - else - std::cout << " factorization method: (invalid)\n"; - - std::cout.flush(); - } +class GaussNewtonState : public SuccessiveLinearizationState { }; /** * This class performs Gauss-Newton nonlinear optimization * TODO: use make_shared */ -class GaussNewtonOptimizer : public NonlinearOptimizer { +class GaussNewtonOptimizer : public SuccessiveLinearizationOptimizer { public: - typedef boost::shared_ptr SharedGNParams; - typedef boost::shared_ptr SharedOrdering; + typedef boost::shared_ptr SharedParams; /// @name Standard interface /// @{ @@ -89,17 +52,11 @@ public: * @param values The initial variable assignments * @param params The optimization parameters */ - GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Values& values, + GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const GaussNewtonParams& params = GaussNewtonParams(), const Ordering& ordering = Ordering()) : - NonlinearOptimizer( - SharedGraph(new NonlinearFactorGraph(graph)), - SharedValues(new Values(values)), - SharedGNParams(new GaussNewtonParams(params))), - gnParams_(boost::static_pointer_cast(params_)), - colamdOrdering_(ordering.size() == 0), - ordering_(colamdOrdering_ ? - graph_->orderingCOLAMD(*values_) : Ordering::shared_ptr(new Ordering(ordering))) {} + SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), + params_(new GaussNewtonParams(params)) {} /** Standard constructor, requires a nonlinear factor graph, initial * variable assignments, and optimization parameters. For convenience this @@ -109,16 +66,9 @@ public: * @param values The initial variable assignments * @param params The optimization parameters */ - GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Values& values, - const Ordering& ordering) : - NonlinearOptimizer( - SharedGraph(new NonlinearFactorGraph(graph)), - SharedValues(new Values(values)), - SharedGNParams(new GaussNewtonParams())), - gnParams_(boost::static_pointer_cast(params_)), - colamdOrdering_(ordering.size() == 0), - ordering_(colamdOrdering_ ? - graph_->orderingCOLAMD(*values_) : Ordering::shared_ptr(new Ordering(ordering))) {} + GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Ordering& ordering) : + SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), + params_(new GaussNewtonParams()) {} /** Standard constructor, requires a nonlinear factor graph, initial * variable assignments, and optimization parameters. @@ -126,16 +76,14 @@ public: * @param values The initial variable assignments * @param params The optimization parameters */ - GaussNewtonOptimizer(const SharedGraph& graph, const SharedValues& values, + GaussNewtonOptimizer(const SharedGraph& graph, const GaussNewtonParams& params = GaussNewtonParams(), const SharedOrdering& ordering = SharedOrdering()) : - NonlinearOptimizer(graph, values, SharedGNParams(new GaussNewtonParams(params))), - gnParams_(boost::static_pointer_cast(params_)), - colamdOrdering_(!ordering || ordering->size() == 0), - ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : ordering) {} + SuccessiveLinearizationOptimizer(graph), + params_(new GaussNewtonParams(params)) {} - /** Access the variable ordering used by this optimizer */ - const SharedOrdering& ordering() const { return ordering_; } + /** Access the parameters */ + virtual NonlinearOptimizer::SharedParams params() const { return params_; } /// @} @@ -149,74 +97,18 @@ public: * containing the updated variable assignments, which may be retrieved with * values(). */ - virtual auto_ptr iterate() const; + virtual NonlinearOptimizer::SharedState iterate(const NonlinearOptimizer::SharedState& current) const; - /** Update the graph, values, and/or parameters, leaving all other state - * the same. Any of these that are empty shared pointers are left unchanged - * in the returned optimizer object. Returns a new updated - * NonlinearOptimizer object, the original is not modified. + /** Create an initial state with the specified variable assignment values and + * all other default state. */ - virtual auto_ptr update( - const SharedGraph& newGraph, - const SharedValues& newValues = SharedValues(), - const SharedParams& newParams = SharedParams()) const { - return auto_ptr(new GaussNewtonOptimizer(*this, newGraph, newValues, - boost::dynamic_pointer_cast(newParams))); - } - - /** Update the ordering, leaving all other state the same. If newOrdering - * is an empty pointer or contains an empty Ordering object - * (with zero size), a COLAMD ordering will be computed. Returns a new - * NonlinearOptimizer object, the original is not modified. - */ - virtual auto_ptr update(const SharedOrdering& newOrdering) const { - return auto_ptr(new GaussNewtonOptimizer(*this, newOrdering)); } - - /** Create a copy of the NonlinearOptimizer */ - virtual auto_ptr clone() const { - return auto_ptr(new GaussNewtonOptimizer(*this)); - } + virtual NonlinearOptimizer::SharedState initialState(const Values& initialValues) const; /// @} protected: - const SharedGNParams gnParams_; - const bool colamdOrdering_; - const SharedOrdering ordering_; - - /** Protected constructor called by update() to modify the graph, values, or - * parameters. Computes a COLAMD ordering if the optimizer was originally - * constructed with an empty ordering, and if the graph is changing. - */ - GaussNewtonOptimizer(const GaussNewtonOptimizer& original, const SharedGraph& newGraph, - const SharedValues& newValues, const SharedGNParams& newParams) : - NonlinearOptimizer(original, newGraph, newValues, newParams), - gnParams_(newParams ? newParams : original.gnParams_), - colamdOrdering_(original.colamdOrdering_), - ordering_(newGraph && colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : original.ordering_) {} - - /** Protected constructor called by update() to modify the ordering, computing - * a COLAMD ordering if the new ordering is empty, and also recomputing the - * dimensions. - */ - GaussNewtonOptimizer( - const GaussNewtonOptimizer& original, const SharedOrdering& newOrdering) : - NonlinearOptimizer(original), - gnParams_(original.gnParams_), - colamdOrdering_(!newOrdering || newOrdering->size() == 0), - ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : newOrdering) {} - -private: - - // Special constructor for completing an iteration, updates the values and - // error, and increments the iteration count. - GaussNewtonOptimizer(const GaussNewtonOptimizer& original, - const SharedValues& newValues, double newError) : - NonlinearOptimizer(original.graph_, newValues, original.params_, newError, original.iterations_+1), - gnParams_(original.gnParams_), - colamdOrdering_(original.colamdOrdering_), - ordering_(original.ordering_) {} + const SharedParams params_; }; } diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp index b90af8348..5657da572 100644 --- a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp @@ -26,7 +26,8 @@ using namespace std; namespace gtsam { -NonlinearOptimizer::auto_ptr LevenbergMarquardtOptimizer::iterate(const SharedState& current) const { +/* ************************************************************************* */ +NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const NonlinearOptimizer::SharedState& current) const { // Linearize graph GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_); @@ -141,4 +142,12 @@ NonlinearOptimizer::auto_ptr LevenbergMarquardtOptimizer::iterate(const SharedSt return newState; } +/* ************************************************************************* */ +NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::initialState(const Values& initialValues) const { + SharedState initial = boost::make_shared(); + defaultInitialState(*initial); + initial->lambda = params_->lambdaInitial; + return initial; +} + } /* namespace gtsam */ diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.h b/gtsam/nonlinear/LevenbergMarquardtOptimizer.h index 4ccb330f2..058bf347b 100644 --- a/gtsam/nonlinear/LevenbergMarquardtOptimizer.h +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.h @@ -18,7 +18,7 @@ #pragma once -#include +#include namespace gtsam { @@ -27,7 +27,7 @@ namespace gtsam { * common to all nonlinear optimization algorithms. This class also contains * all of those parameters. */ -class LevenbergMarquardtParams : public DirectOptimizerParams { +class LevenbergMarquardtParams : public SuccessiveLinearizationParams { public: /** See LevenbergMarquardtParams::lmVerbosity */ enum LMVerbosity { @@ -50,7 +50,7 @@ public: virtual ~LevenbergMarquardtParams() {} virtual void print(const std::string& str = "") const { - DirectOptimizerParams::print(str); + SuccessiveLinearizationParams::print(str); std::cout << " lambdaInitial: " << lambdaInitial << "\n"; std::cout << " lambdaFactor: " << lambdaFactor << "\n"; std::cout << " lambdaUpperBound: " << lambdaUpperBound << "\n"; @@ -61,7 +61,7 @@ public: /** * State for LevenbergMarquardtOptimizer */ -class LevenbergMarquardtState : public NonlinearOptimizerState { +class LevenbergMarquardtState : public SuccessiveLinearizationState { public: double lambda; @@ -72,11 +72,11 @@ public: * This class performs Levenberg-Marquardt nonlinear optimization * TODO: use make_shared */ -class LevenbergMarquardtOptimizer : public DirectOptimizer { +class LevenbergMarquardtOptimizer : public SuccessiveLinearizationOptimizer { public: - typedef boost::shared_ptr SharedParams; + typedef boost::shared_ptr SharedParams; typedef boost::shared_ptr SharedState; typedef boost::shared_ptr shared_ptr; @@ -94,7 +94,7 @@ public: LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph, const LevenbergMarquardtParams& params = LevenbergMarquardtParams(), const Ordering& ordering = Ordering()) : - DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), + SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), params_(new LevenbergMarquardtParams(params)) {} /** Standard constructor, requires a nonlinear factor graph, initial @@ -107,7 +107,7 @@ public: */ LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph, const Ordering& ordering) : - DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), + SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), params_(new LevenbergMarquardtParams()) {} /** Standard constructor, requires a nonlinear factor graph, initial @@ -119,14 +119,11 @@ public: LevenbergMarquardtOptimizer(const SharedGraph& graph, const LevenbergMarquardtParams& params = LevenbergMarquardtParams(), const SharedOrdering& ordering = SharedOrdering()) : - DirectOptimizer(graph), + SuccessiveLinearizationOptimizer(graph), params_(new LevenbergMarquardtParams(params)) {} /** Access the parameters */ - virtual const NonlinearOptimizer::SharedParams& params() const { return params_; } - - /** Access the parameters */ - const LevenbergMarquardtOptimizer::SharedParams& params() const { return params_; } + virtual NonlinearOptimizer::SharedParams params() const { return params_; } /// @} @@ -140,11 +137,12 @@ public: * containing the updated variable assignments, which may be retrieved with * values(). */ - virtual NonlinearOptimizer::SharedState iterate(const SharedState& current) const; + virtual NonlinearOptimizer::SharedState iterate(const NonlinearOptimizer::SharedState& current) const; - /** Create a copy of the NonlinearOptimizer */ - virtual NonlinearOptimizer::shared_ptr clone() const { - return boost::make_shared(*this); } + /** Create an initial state with the specified variable assignment values and + * all other default state. + */ + virtual NonlinearOptimizer::SharedState initialState(const Values& initialValues) const; /// @} @@ -152,7 +150,7 @@ protected: typedef boost::shared_ptr > SharedDimensions; - const SharedParams params_; + SharedParams params_; const SharedDimensions dimensions_; }; diff --git a/gtsam/nonlinear/NonlinearOptimizer.cpp b/gtsam/nonlinear/NonlinearOptimizer.cpp index dd91563f3..cfe8a4945 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.cpp +++ b/gtsam/nonlinear/NonlinearOptimizer.cpp @@ -82,6 +82,13 @@ NonlinearOptimizer::SharedState NonlinearOptimizer::defaultOptimize(const Shared return next; } +/* ************************************************************************* */ +void NonlinearOptimizer::defaultInitialState(NonlinearOptimizerState& initial) const { + state.values = initialValues; + state.error = graph_->error(initialValues); + state.iterations = 0; +} + /* ************************************************************************* */ bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold, double errorThreshold, double currentError, double newError, diff --git a/gtsam/nonlinear/NonlinearOptimizer.h b/gtsam/nonlinear/NonlinearOptimizer.h index 52effe514..c4f8be2fc 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.h +++ b/gtsam/nonlinear/NonlinearOptimizer.h @@ -78,8 +78,11 @@ public: /** The number of optimization iterations performed. */ unsigned int iterations; - /** Virtual destructor to enable RTTI */ + /** Virtual destructor */ virtual ~NonlinearOptimizerState() {} + + /** Clone the state (i.e. make a copy of the derived class) */ + virtual boost::shared_ptr clone() = 0; }; @@ -185,15 +188,19 @@ public: */ virtual SharedState optimize(const SharedState& initial) const { return defaultOptimize(initial); } + SharedState optimize(const Values& initialization) const { return optimize(initialState(initialization)); } + /** Shortcut to optimize and return the resulting Values of the maximum- * likelihood estimate. To access statistics and information such as the * final error and number of iterations, use optimize() instead. * @return The maximum-likelihood estimate. */ - virtual SharedValues optimized(const SharedState& initial) const { return this->optimize(initial)->values(); } + virtual Values optimized(const SharedState& initial) const { return this->optimize(initial)->values; } + + Values optimized(const Values& initialization) const { return optimized(initialState(initialization)); } /** Retrieve the parameters. */ - virtual const SharedParams& params() const = 0; + virtual SharedParams params() const = 0; /// @} @@ -209,20 +216,10 @@ public: */ virtual SharedState iterate(const SharedState& current) const = 0; - /** Update the graph, leaving all other parts of the optimizer unchanged, - * returns a new updated NonlinearOptimzier object, the original is not - * modified. + /** Create an initial state from a variable assignment Values, with all + * other state values at their default initial values. */ - virtual shared_ptr update(const SharedGraph& newGraph) const; - - /** Update the parameters, leaving all other parts of the optimizer unchanged, - * returns a new updated NonlinearOptimzier object, the original is not - * modified. - */ - const shared_ptr update(const SharedParams& newParams) const; - - /** Create a copy of the NonlinearOptimizer */ - virtual shared_ptr clone() const = 0; + virtual SharedState initialState(const Values& initialValues) const = 0; /// @} @@ -235,13 +232,12 @@ protected: */ SharedState defaultOptimize(const SharedState& initial) const; - /** Modify the parameters in-place (not for external use) */ - virtual void setParams(const NonlinearOptimizer::SharedParams& newParams); + /** Initialize a state, using the current error and 0 iterations */ + void defaultInitialState(SharedState& initial) const; /** Constructor for initial construction of base classes. */ - NonlinearOptimizer(const SharedGraph& graph) : - graph_(graph) {} + NonlinearOptimizer(const SharedGraph& graph) : graph_(graph) {} }; diff --git a/gtsam/nonlinear/SuccessiveLinearizationOptimizer.cpp b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.cpp new file mode 100644 index 000000000..2303c203a --- /dev/null +++ b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.cpp @@ -0,0 +1,41 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file DirectOptimizer.cpp + * @brief + * @author Richard Roberts + * @date Apr 1, 2012 + */ + +#include +#include + +namespace gtsam { + +/* ************************************************************************* */ +const SuccessiveLinearizationOptimizer::SharedOrdering& SuccessiveLinearizationOptimizer::ordering(const Values& values) const { + + if(!ordering_) { + SharedParams params = + boost::dynamic_pointer_cast(params()); + + // If we're using a COLAMD ordering, compute it + if(!params.ordering) + ordering_ = graph_->orderingCOLAMD(values); + else + ordering_ = params.ordering; + } + + return ordering_; +} + +} /* namespace gtsam */ diff --git a/gtsam/nonlinear/DirectOptimizer.h b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h similarity index 59% rename from gtsam/nonlinear/DirectOptimizer.h rename to gtsam/nonlinear/SuccessiveLinearizationOptimizer.h index dac5a0004..e8c74d1a8 100644 --- a/gtsam/nonlinear/DirectOptimizer.h +++ b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file DirectOptimizer.h + * @file SuccessiveLinearizationOptimizer.h * @brief * @author Richard Roberts * @date Apr 1, 2012 @@ -18,31 +18,65 @@ #pragma once +#include + #include namespace gtsam { -class DirectOptimizerParams : public NonlinearOptimizerParams { +/** + * This is an intermediate base class for NonlinearOptimizers that use direct + * solving, i.e. factorization. This class is here to reduce code duplication + * for storing the ordering, having factorization and elimination parameters, + * etc. + */ +class SuccessiveLinearizationOptimizer : public NonlinearOptimizer { public: - /** See DoglegParams::elimination */ + + class SuccessiveLinearizationParams; + class SuccessiveLinearizationState; + + typedef boost::shared_ptr SharedParams; + typedef boost::shared_ptr shared_ptr; + +protected: + + typedef boost::shared_ptr SharedOrdering; + + SuccessiveLinearizationOptimizer(const SharedGraph& graph) : NonlinearOptimizer(graph) {} + + const SharedOrdering& ordering(const Values& values) const; + +private: + + SharedOrdering ordering_; +}; + +class SuccessiveLinearizationParams : public NonlinearOptimizerParams { +public: + /** See SuccessiveLinearizationParams::elimination */ enum Elimination { MULTIFRONTAL, SEQUENTIAL }; - /** See DoglegParams::factorization */ + /** See SuccessiveLinearizationParams::factorization */ enum Factorization { LDL, QR, }; + /** See SuccessiveLinearizationParams::ordering */ + typedef boost::shared_ptr SharedOrdering; + Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL) Factorization factorization; ///< The numerical factorization (default: LDL) + SharedOrdering ordering; ///< The variable elimination ordering, or empty to use COLAMD (default: empty) - DirectOptimizerParams() : + SuccessiveLinearizationParams() : elimination(MULTIFRONTAL), factorization(LDL) {} - virtual ~DirectOptimizerParams() {} + virtual ~SuccessiveLinearizationParams() {} virtual void print(const std::string& str = "") const { NonlinearOptimizerParams::print(str); @@ -60,40 +94,16 @@ public: else std::cout << " factorization method: (invalid)\n"; + if(ordering) + std::cout << " ordering: custom\n"; + else + std::cout << " ordering: COLAMD\n"; + std::cout.flush(); } }; -/** - * This is an intermediate base class for NonlinearOptimizers that use direct - * solving, i.e. factorization. This class is here to reduce code duplication - * for storing the ordering, having factorization and elimination parameters, - * etc. - */ -class DirectOptimizer : public NonlinearOptimizer { -public: - typedef boost::shared_ptr SharedOrdering; - typedef boost::shared_ptr shared_ptr; - - virtual NonlinearOptimizer::shared_ptr update(const SharedGraph& newGraph) const; - - virtual shared_ptr update(const SharedOrdering& newOrdering) const; - - virtual shared_ptr update(const SharedGraph& newGraph, const SharedOrdering& newOrdering) const; - - /** Access the variable ordering used by this optimizer */ - const SharedOrdering& ordering() const { return ordering_; } - -protected: - - const bool colamdOrdering_; - const SharedOrdering ordering_; - - DirectOptimizer(const SharedGraph& graph, - const SharedOrdering ordering = SharedOrdering()) : - NonlinearOptimizer(graph), - colamdOrdering_(!ordering || ordering->size() == 0), - ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : ordering) {} +class SuccessiveLinearizationState : public NonlinearOptimizerState { }; } /* namespace gtsam */ diff --git a/gtsam/slam/pose2SLAM.h b/gtsam/slam/pose2SLAM.h index 7fb86a46a..319dcb1c0 100644 --- a/gtsam/slam/pose2SLAM.h +++ b/gtsam/slam/pose2SLAM.h @@ -97,7 +97,7 @@ namespace pose2SLAM { /// Optimize Values optimize(const Values& initialEstimate) { - return *LevenbergMarquardtOptimizer(*this, initialEstimate).optimized(); + return *LevenbergMarquardtOptimizer(*this).optimized(initialEstimate); } };