Stop from saying 'converged' and added nonlinear optimizer warnings (if verbose) if maximum iterations reached
parent
cc858224aa
commit
7164a6d790
|
|
@ -183,7 +183,6 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate(
|
|||
const double dx_d_norm = result.dx_d.vector().norm();
|
||||
const double newDelta = std::max(Delta, 3.0 * dx_d_norm); // Compute new Delta
|
||||
|
||||
|
||||
if(mode == ONE_STEP_PER_ITERATION)
|
||||
stay = false; // If not searching, just return with the new Delta
|
||||
else if(mode == SEARCH_EACH_ITERATION) {
|
||||
|
|
@ -217,8 +216,10 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate(
|
|||
Delta *= 0.5;
|
||||
if(Delta > 1e-5)
|
||||
stay = true;
|
||||
else
|
||||
else {
|
||||
if(verbose) cout << "Warning: Dog leg stopping because cannot decrease error with minimum Delta" << endl;
|
||||
stay = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -200,6 +200,8 @@ namespace gtsam {
|
|||
// The more adventurous lambda was worse too, so make lambda more conservative
|
||||
// and keep the same values.
|
||||
if(lambdaMode >= Parameters::BOUNDED && lambda >= 1.0e5) {
|
||||
if(verbosity >= Parameters::ERROR)
|
||||
cout << "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" << endl;
|
||||
break;
|
||||
} else {
|
||||
lambda *= factor;
|
||||
|
|
@ -212,6 +214,8 @@ namespace gtsam {
|
|||
// The more adventurous lambda was worse too, so make lambda more conservative
|
||||
// and keep the same values.
|
||||
if(lambdaMode >= Parameters::BOUNDED && lambda >= 1.0e5) {
|
||||
if(verbosity >= Parameters::ERROR)
|
||||
cout << "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" << endl;
|
||||
break;
|
||||
} else {
|
||||
lambda *= factor;
|
||||
|
|
@ -306,7 +310,7 @@ namespace gtsam {
|
|||
S solver(*graph_->linearize(*values_, *ordering_));
|
||||
DoglegOptimizerImpl::IterationResult result = DoglegOptimizerImpl::Iterate(
|
||||
parameters_->lambda_, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *solver.eliminate(),
|
||||
*graph_, *values_, *ordering_, error_);
|
||||
*graph_, *values_, *ordering_, error_, parameters_->verbosity_ > Parameters::ERROR);
|
||||
shared_values newValues(new T(values_->retract(result.dx_d, *ordering_)));
|
||||
cout << "newValues: " << newValues.get() << endl;
|
||||
return newValuesErrorLambda_(newValues, result.f_error, result.Delta);
|
||||
|
|
|
|||
|
|
@ -69,8 +69,12 @@ bool check_convergence(
|
|||
}
|
||||
bool converged = (relativeErrorTreshold && (relativeDecrease < relativeErrorTreshold))
|
||||
|| (absoluteDecrease < absoluteErrorTreshold);
|
||||
if (verbosity >= 1 && converged)
|
||||
cout << "converged" << endl;
|
||||
if (verbosity >= 1 && converged) {
|
||||
if(absoluteDecrease >= 0.0)
|
||||
cout << "converged" << endl;
|
||||
else
|
||||
cout << "Warning: stopping nonlinear iterations because error increased" << endl;
|
||||
}
|
||||
return converged;
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue