diff --git a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h index abf6b257a..a85f27425 100644 --- a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h +++ b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h @@ -200,6 +200,10 @@ boost::tuple nonlinearConjugateGradient(const S &system, currentValues = system.advance(prevValues, alpha, direction); currentError = system.error(currentValues); + // User hook: + if (params.iterationHook) + params.iterationHook(iteration, prevError, currentError); + // Maybe show output if (params.verbosity >= NonlinearOptimizerParams::ERROR) std::cout << "iteration: " << iteration << ", currentError: " << currentError << std::endl; diff --git a/gtsam/nonlinear/NonlinearOptimizer.cpp b/gtsam/nonlinear/NonlinearOptimizer.cpp index fd9961742..0d7e9e17f 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.cpp +++ b/gtsam/nonlinear/NonlinearOptimizer.cpp @@ -97,7 +97,11 @@ void NonlinearOptimizer::defaultOptimize() { // Update newError for either printouts or conditional-end checks: newError = error(); - + + // User hook: + if (params.iterationHook) + params.iterationHook(iterations(), currentError, newError); + // Maybe show output if (params.verbosity >= NonlinearOptimizerParams::VALUES) values().print("newValues"); diff --git a/gtsam/nonlinear/NonlinearOptimizer.h b/gtsam/nonlinear/NonlinearOptimizer.h index 9935f44ce..6fe369dd3 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.h +++ b/gtsam/nonlinear/NonlinearOptimizer.h @@ -81,7 +81,7 @@ protected: public: /** A shared pointer to this class */ - typedef boost::shared_ptr shared_ptr; + using shared_ptr = boost::shared_ptr; /// @name Standard interface /// @{ diff --git a/gtsam/nonlinear/NonlinearOptimizerParams.h b/gtsam/nonlinear/NonlinearOptimizerParams.h index 65fdd1c92..53805f5f0 100644 --- a/gtsam/nonlinear/NonlinearOptimizerParams.h +++ b/gtsam/nonlinear/NonlinearOptimizerParams.h @@ -33,22 +33,19 @@ namespace gtsam { */ class GTSAM_EXPORT NonlinearOptimizerParams { public: + NonlinearOptimizerParams() = default; + /** See NonlinearOptimizerParams::verbosity */ enum Verbosity { SILENT, TERMINATION, ERROR, VALUES, DELTA, LINEAR }; - size_t maxIterations; ///< The maximum iterations to stop iterating (default 100) - double relativeErrorTol; ///< The maximum relative error decrease to stop iterating (default 1e-5) - double absoluteErrorTol; ///< The maximum absolute error decrease to stop iterating (default 1e-5) - double errorTol; ///< The maximum total error to stop iterating (default 0.0) - Verbosity verbosity; ///< The printing verbosity during optimization (default SILENT) - Ordering::OrderingType orderingType; ///< The method of ordering use during variable elimination (default COLAMD) - - NonlinearOptimizerParams() : - maxIterations(100), relativeErrorTol(1e-5), absoluteErrorTol(1e-5), errorTol( - 0.0), verbosity(SILENT), orderingType(Ordering::COLAMD), - linearSolverType(MULTIFRONTAL_CHOLESKY) {} + size_t maxIterations = 100; ///< The maximum iterations to stop iterating (default 100) + double relativeErrorTol = 1e-5; ///< The maximum relative error decrease to stop iterating (default 1e-5) + double absoluteErrorTol = 1e-5; ///< The maximum absolute error decrease to stop iterating (default 1e-5) + double errorTol = 0.0; ///< The maximum total error to stop iterating (default 0.0) + Verbosity verbosity = SILENT; ///< The printing verbosity during optimization (default SILENT) + Ordering::OrderingType orderingType = Ordering::COLAMD; ///< The method of ordering use during variable elimination (default COLAMD) virtual ~NonlinearOptimizerParams() { } @@ -71,6 +68,15 @@ public: static Verbosity verbosityTranslator(const std::string &s) ; static std::string verbosityTranslator(Verbosity value) ; + /** Type for an optional user-provided hook to be called after each + * internal optimizer iteration */ + using IterationHook = std::function< + void(size_t /*iteration*/, double/*errorBefore*/, double/*errorAfter*/)>; + + /** Optional user-provided iteration hook to be called after each + * optimization iteration (Default: empty) */ + IterationHook iterationHook; + /** See NonlinearOptimizerParams::linearSolverType */ enum LinearSolverType { MULTIFRONTAL_CHOLESKY, @@ -81,7 +87,7 @@ public: CHOLMOD, /* Experimental Flag */ }; - LinearSolverType linearSolverType; ///< The type of linear solver to use in the nonlinear optimizer + LinearSolverType linearSolverType = MULTIFRONTAL_CHOLESKY; ///< The type of linear solver to use in the nonlinear optimizer boost::optional ordering; ///< The optional variable elimination ordering, or empty to use COLAMD (default: empty) IterativeOptimizationParameters::shared_ptr iterativeParams; ///< The container for iterativeOptimization parameters. used in CG Solvers. diff --git a/tests/testNonlinearOptimizer.cpp b/tests/testNonlinearOptimizer.cpp index dc19801a2..6415174d5 100644 --- a/tests/testNonlinearOptimizer.cpp +++ b/tests/testNonlinearOptimizer.cpp @@ -566,6 +566,58 @@ TEST( NonlinearOptimizer, logfile ) // EXPECT(actual.str()==expected.str()); } +/* ************************************************************************* */ +TEST( NonlinearOptimizer, iterationHook_LM ) +{ + NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph()); + + Point2 x0(3,3); + Values c0; + c0.insert(X(1), x0); + + // Levenberg-Marquardt + LevenbergMarquardtParams lmParams; + size_t lastIterCalled = 0; + lmParams.iterationHook = [&](size_t iteration, double oldError, double newError) + { + // Tests: + lastIterCalled = iteration; + EXPECT(newError " << newError <<"\n"; + }; + LevenbergMarquardtOptimizer(fg, c0, lmParams).optimize(); + + EXPECT(lastIterCalled>5); +} +/* ************************************************************************* */ +TEST( NonlinearOptimizer, iterationHook_CG ) +{ + NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph()); + + Point2 x0(3,3); + Values c0; + c0.insert(X(1), x0); + + // Levenberg-Marquardt + NonlinearConjugateGradientOptimizer::Parameters cgParams; + size_t lastIterCalled = 0; + cgParams.iterationHook = [&](size_t iteration, double oldError, double newError) + { + // Tests: + lastIterCalled = iteration; + EXPECT(newError " << newError <<"\n"; + }; + NonlinearConjugateGradientOptimizer(fg, c0, cgParams).optimize(); + + EXPECT(lastIterCalled>5); +} + + /* ************************************************************************* */ //// Minimal traits example struct MyType : public Vector3 {