Merge pull request #586 from borglab/feature/lm-iteration-hook-impl
User-provided hook within the internal loop of non-linear optimizersrelease/4.3a0
commit
653c69fccd
|
@ -200,6 +200,10 @@ boost::tuple<V, int> nonlinearConjugateGradient(const S &system,
|
||||||
currentValues = system.advance(prevValues, alpha, direction);
|
currentValues = system.advance(prevValues, alpha, direction);
|
||||||
currentError = system.error(currentValues);
|
currentError = system.error(currentValues);
|
||||||
|
|
||||||
|
// User hook:
|
||||||
|
if (params.iterationHook)
|
||||||
|
params.iterationHook(iteration, prevError, currentError);
|
||||||
|
|
||||||
// Maybe show output
|
// Maybe show output
|
||||||
if (params.verbosity >= NonlinearOptimizerParams::ERROR)
|
if (params.verbosity >= NonlinearOptimizerParams::ERROR)
|
||||||
std::cout << "iteration: " << iteration << ", currentError: " << currentError << std::endl;
|
std::cout << "iteration: " << iteration << ", currentError: " << currentError << std::endl;
|
||||||
|
|
|
@ -97,7 +97,11 @@ void NonlinearOptimizer::defaultOptimize() {
|
||||||
|
|
||||||
// Update newError for either printouts or conditional-end checks:
|
// Update newError for either printouts or conditional-end checks:
|
||||||
newError = error();
|
newError = error();
|
||||||
|
|
||||||
|
// User hook:
|
||||||
|
if (params.iterationHook)
|
||||||
|
params.iterationHook(iterations(), currentError, newError);
|
||||||
|
|
||||||
// Maybe show output
|
// Maybe show output
|
||||||
if (params.verbosity >= NonlinearOptimizerParams::VALUES)
|
if (params.verbosity >= NonlinearOptimizerParams::VALUES)
|
||||||
values().print("newValues");
|
values().print("newValues");
|
||||||
|
|
|
@ -81,7 +81,7 @@ protected:
|
||||||
|
|
||||||
public:
|
public:
|
||||||
/** A shared pointer to this class */
|
/** A shared pointer to this class */
|
||||||
typedef boost::shared_ptr<const NonlinearOptimizer> shared_ptr;
|
using shared_ptr = boost::shared_ptr<const NonlinearOptimizer>;
|
||||||
|
|
||||||
/// @name Standard interface
|
/// @name Standard interface
|
||||||
/// @{
|
/// @{
|
||||||
|
|
|
@ -38,21 +38,12 @@ public:
|
||||||
SILENT, TERMINATION, ERROR, VALUES, DELTA, LINEAR
|
SILENT, TERMINATION, ERROR, VALUES, DELTA, LINEAR
|
||||||
};
|
};
|
||||||
|
|
||||||
size_t maxIterations; ///< The maximum iterations to stop iterating (default 100)
|
size_t maxIterations = 100; ///< The maximum iterations to stop iterating (default 100)
|
||||||
double relativeErrorTol; ///< The maximum relative error decrease to stop iterating (default 1e-5)
|
double relativeErrorTol = 1e-5; ///< The maximum relative error decrease to stop iterating (default 1e-5)
|
||||||
double absoluteErrorTol; ///< The maximum absolute error decrease to stop iterating (default 1e-5)
|
double absoluteErrorTol = 1e-5; ///< The maximum absolute error decrease to stop iterating (default 1e-5)
|
||||||
double errorTol; ///< The maximum total error to stop iterating (default 0.0)
|
double errorTol = 0.0; ///< The maximum total error to stop iterating (default 0.0)
|
||||||
Verbosity verbosity; ///< The printing verbosity during optimization (default SILENT)
|
Verbosity verbosity = SILENT; ///< The printing verbosity during optimization (default SILENT)
|
||||||
Ordering::OrderingType orderingType; ///< The method of ordering use during variable elimination (default COLAMD)
|
Ordering::OrderingType orderingType = Ordering::COLAMD; ///< The method of ordering use during variable elimination (default COLAMD)
|
||||||
|
|
||||||
NonlinearOptimizerParams() :
|
|
||||||
maxIterations(100), relativeErrorTol(1e-5), absoluteErrorTol(1e-5), errorTol(
|
|
||||||
0.0), verbosity(SILENT), orderingType(Ordering::COLAMD),
|
|
||||||
linearSolverType(MULTIFRONTAL_CHOLESKY) {}
|
|
||||||
|
|
||||||
virtual ~NonlinearOptimizerParams() {
|
|
||||||
}
|
|
||||||
virtual void print(const std::string& str = "") const;
|
|
||||||
|
|
||||||
size_t getMaxIterations() const { return maxIterations; }
|
size_t getMaxIterations() const { return maxIterations; }
|
||||||
double getRelativeErrorTol() const { return relativeErrorTol; }
|
double getRelativeErrorTol() const { return relativeErrorTol; }
|
||||||
|
@ -71,6 +62,37 @@ public:
|
||||||
static Verbosity verbosityTranslator(const std::string &s) ;
|
static Verbosity verbosityTranslator(const std::string &s) ;
|
||||||
static std::string verbosityTranslator(Verbosity value) ;
|
static std::string verbosityTranslator(Verbosity value) ;
|
||||||
|
|
||||||
|
/** Type for an optional user-provided hook to be called after each
|
||||||
|
* internal optimizer iteration. See iterationHook below. */
|
||||||
|
using IterationHook = std::function<
|
||||||
|
void(size_t /*iteration*/, double/*errorBefore*/, double/*errorAfter*/)>;
|
||||||
|
|
||||||
|
/** Optional user-provided iteration hook to be called after each
|
||||||
|
* optimization iteration (Default: none).
|
||||||
|
* Note that `IterationHook` is defined as a std::function<> with this
|
||||||
|
* signature:
|
||||||
|
* \code
|
||||||
|
* void(size_t iteration, double errorBefore, double errorAfter)
|
||||||
|
* \endcode
|
||||||
|
* which allows binding by means of a reference to a regular function:
|
||||||
|
* \code
|
||||||
|
* void foo(size_t iteration, double errorBefore, double errorAfter);
|
||||||
|
* // ...
|
||||||
|
* lmOpts.iterationHook = &foo;
|
||||||
|
* \endcode
|
||||||
|
* or to a C++11 lambda (preferred if you need to capture additional
|
||||||
|
* context variables, such that the optimizer object itself, the factor graph,
|
||||||
|
* etc.):
|
||||||
|
* \code
|
||||||
|
* lmOpts.iterationHook = [&](size_t iter, double oldError, double newError)
|
||||||
|
* {
|
||||||
|
* // ...
|
||||||
|
* };
|
||||||
|
* \endcode
|
||||||
|
* or to the result of a properly-formed `std::bind` call.
|
||||||
|
*/
|
||||||
|
IterationHook iterationHook;
|
||||||
|
|
||||||
/** See NonlinearOptimizerParams::linearSolverType */
|
/** See NonlinearOptimizerParams::linearSolverType */
|
||||||
enum LinearSolverType {
|
enum LinearSolverType {
|
||||||
MULTIFRONTAL_CHOLESKY,
|
MULTIFRONTAL_CHOLESKY,
|
||||||
|
@ -81,10 +103,16 @@ public:
|
||||||
CHOLMOD, /* Experimental Flag */
|
CHOLMOD, /* Experimental Flag */
|
||||||
};
|
};
|
||||||
|
|
||||||
LinearSolverType linearSolverType; ///< The type of linear solver to use in the nonlinear optimizer
|
LinearSolverType linearSolverType = MULTIFRONTAL_CHOLESKY; ///< The type of linear solver to use in the nonlinear optimizer
|
||||||
boost::optional<Ordering> ordering; ///< The optional variable elimination ordering, or empty to use COLAMD (default: empty)
|
boost::optional<Ordering> ordering; ///< The optional variable elimination ordering, or empty to use COLAMD (default: empty)
|
||||||
IterativeOptimizationParameters::shared_ptr iterativeParams; ///< The container for iterativeOptimization parameters. used in CG Solvers.
|
IterativeOptimizationParameters::shared_ptr iterativeParams; ///< The container for iterativeOptimization parameters. used in CG Solvers.
|
||||||
|
|
||||||
|
NonlinearOptimizerParams() = default;
|
||||||
|
virtual ~NonlinearOptimizerParams() {
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual void print(const std::string& str = "") const;
|
||||||
|
|
||||||
inline bool isMultifrontal() const {
|
inline bool isMultifrontal() const {
|
||||||
return (linearSolverType == MULTIFRONTAL_CHOLESKY)
|
return (linearSolverType == MULTIFRONTAL_CHOLESKY)
|
||||||
|| (linearSolverType == MULTIFRONTAL_QR);
|
|| (linearSolverType == MULTIFRONTAL_QR);
|
||||||
|
|
|
@ -566,6 +566,58 @@ TEST( NonlinearOptimizer, logfile )
|
||||||
// EXPECT(actual.str()==expected.str());
|
// EXPECT(actual.str()==expected.str());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
TEST( NonlinearOptimizer, iterationHook_LM )
|
||||||
|
{
|
||||||
|
NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
|
||||||
|
|
||||||
|
Point2 x0(3,3);
|
||||||
|
Values c0;
|
||||||
|
c0.insert(X(1), x0);
|
||||||
|
|
||||||
|
// Levenberg-Marquardt
|
||||||
|
LevenbergMarquardtParams lmParams;
|
||||||
|
size_t lastIterCalled = 0;
|
||||||
|
lmParams.iterationHook = [&](size_t iteration, double oldError, double newError)
|
||||||
|
{
|
||||||
|
// Tests:
|
||||||
|
lastIterCalled = iteration;
|
||||||
|
EXPECT(newError<oldError);
|
||||||
|
|
||||||
|
// Example of evolution printout:
|
||||||
|
//std::cout << "iter: " << iteration << " error: " << oldError << " => " << newError <<"\n";
|
||||||
|
};
|
||||||
|
LevenbergMarquardtOptimizer(fg, c0, lmParams).optimize();
|
||||||
|
|
||||||
|
EXPECT(lastIterCalled>5);
|
||||||
|
}
|
||||||
|
/* ************************************************************************* */
|
||||||
|
TEST( NonlinearOptimizer, iterationHook_CG )
|
||||||
|
{
|
||||||
|
NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
|
||||||
|
|
||||||
|
Point2 x0(3,3);
|
||||||
|
Values c0;
|
||||||
|
c0.insert(X(1), x0);
|
||||||
|
|
||||||
|
// Levenberg-Marquardt
|
||||||
|
NonlinearConjugateGradientOptimizer::Parameters cgParams;
|
||||||
|
size_t lastIterCalled = 0;
|
||||||
|
cgParams.iterationHook = [&](size_t iteration, double oldError, double newError)
|
||||||
|
{
|
||||||
|
// Tests:
|
||||||
|
lastIterCalled = iteration;
|
||||||
|
EXPECT(newError<oldError);
|
||||||
|
|
||||||
|
// Example of evolution printout:
|
||||||
|
//std::cout << "iter: " << iteration << " error: " << oldError << " => " << newError <<"\n";
|
||||||
|
};
|
||||||
|
NonlinearConjugateGradientOptimizer(fg, c0, cgParams).optimize();
|
||||||
|
|
||||||
|
EXPECT(lastIterCalled>5);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
//// Minimal traits example
|
//// Minimal traits example
|
||||||
struct MyType : public Vector3 {
|
struct MyType : public Vector3 {
|
||||||
|
|
Loading…
Reference in New Issue