Stop from saying 'converged' and added nonlinear optimizer warnings (if verbose) if maximum iterations reached

release/4.3a0
Richard Roberts 2011-11-28 20:34:28 +00:00
parent cc858224aa
commit 7164a6d790
3 changed files with 14 additions and 5 deletions

View File

@ -183,7 +183,6 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate(
const double dx_d_norm = result.dx_d.vector().norm(); const double dx_d_norm = result.dx_d.vector().norm();
const double newDelta = std::max(Delta, 3.0 * dx_d_norm); // Compute new Delta const double newDelta = std::max(Delta, 3.0 * dx_d_norm); // Compute new Delta
if(mode == ONE_STEP_PER_ITERATION) if(mode == ONE_STEP_PER_ITERATION)
stay = false; // If not searching, just return with the new Delta stay = false; // If not searching, just return with the new Delta
else if(mode == SEARCH_EACH_ITERATION) { else if(mode == SEARCH_EACH_ITERATION) {
@ -217,10 +216,12 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate(
Delta *= 0.5; Delta *= 0.5;
if(Delta > 1e-5) if(Delta > 1e-5)
stay = true; stay = true;
else else {
if(verbose) cout << "Warning: Dog leg stopping because cannot decrease error with minimum Delta" << endl;
stay = false; stay = false;
} }
} }
}
// dx_d and f_error have already been filled in during the loop // dx_d and f_error have already been filled in during the loop
result.Delta = Delta; result.Delta = Delta;

View File

@ -200,6 +200,8 @@ namespace gtsam {
// The more adventurous lambda was worse too, so make lambda more conservative // The more adventurous lambda was worse too, so make lambda more conservative
// and keep the same values. // and keep the same values.
if(lambdaMode >= Parameters::BOUNDED && lambda >= 1.0e5) { if(lambdaMode >= Parameters::BOUNDED && lambda >= 1.0e5) {
if(verbosity >= Parameters::ERROR)
cout << "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" << endl;
break; break;
} else { } else {
lambda *= factor; lambda *= factor;
@ -212,6 +214,8 @@ namespace gtsam {
// The more adventurous lambda was worse too, so make lambda more conservative // The more adventurous lambda was worse too, so make lambda more conservative
// and keep the same values. // and keep the same values.
if(lambdaMode >= Parameters::BOUNDED && lambda >= 1.0e5) { if(lambdaMode >= Parameters::BOUNDED && lambda >= 1.0e5) {
if(verbosity >= Parameters::ERROR)
cout << "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" << endl;
break; break;
} else { } else {
lambda *= factor; lambda *= factor;
@ -306,7 +310,7 @@ namespace gtsam {
S solver(*graph_->linearize(*values_, *ordering_)); S solver(*graph_->linearize(*values_, *ordering_));
DoglegOptimizerImpl::IterationResult result = DoglegOptimizerImpl::Iterate( DoglegOptimizerImpl::IterationResult result = DoglegOptimizerImpl::Iterate(
parameters_->lambda_, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *solver.eliminate(), parameters_->lambda_, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *solver.eliminate(),
*graph_, *values_, *ordering_, error_); *graph_, *values_, *ordering_, error_, parameters_->verbosity_ > Parameters::ERROR);
shared_values newValues(new T(values_->retract(result.dx_d, *ordering_))); shared_values newValues(new T(values_->retract(result.dx_d, *ordering_)));
cout << "newValues: " << newValues.get() << endl; cout << "newValues: " << newValues.get() << endl;
return newValuesErrorLambda_(newValues, result.f_error, result.Delta); return newValuesErrorLambda_(newValues, result.f_error, result.Delta);

View File

@ -69,8 +69,12 @@ bool check_convergence(
} }
bool converged = (relativeErrorTreshold && (relativeDecrease < relativeErrorTreshold)) bool converged = (relativeErrorTreshold && (relativeDecrease < relativeErrorTreshold))
|| (absoluteDecrease < absoluteErrorTreshold); || (absoluteDecrease < absoluteErrorTreshold);
if (verbosity >= 1 && converged) if (verbosity >= 1 && converged) {
if(absoluteDecrease >= 0.0)
cout << "converged" << endl; cout << "converged" << endl;
else
cout << "Warning: stopping nonlinear iterations because error increased" << endl;
}
return converged; return converged;
} }