Merged in feature/lm-verbose-summary (pull request #126)
Add SUMMARY verbose level to LM.release/4.3a0
commit
8e5bc26ac9
|
|
@ -42,6 +42,8 @@ LevenbergMarquardtParams::VerbosityLM LevenbergMarquardtParams::verbosityLMTrans
|
|||
boost::algorithm::to_upper(s);
|
||||
if (s == "SILENT")
|
||||
return LevenbergMarquardtParams::SILENT;
|
||||
if (s == "SUMMARY")
|
||||
return LevenbergMarquardtParams::SUMMARY;
|
||||
if (s == "LAMBDA")
|
||||
return LevenbergMarquardtParams::LAMBDA;
|
||||
if (s == "TRYLAMBDA")
|
||||
|
|
@ -65,6 +67,9 @@ std::string LevenbergMarquardtParams::verbosityLMTranslator(
|
|||
case LevenbergMarquardtParams::SILENT:
|
||||
s = "SILENT";
|
||||
break;
|
||||
case LevenbergMarquardtParams::SUMMARY:
|
||||
s = "SUMMARY";
|
||||
break;
|
||||
case LevenbergMarquardtParams::TERMINATION:
|
||||
s = "TERMINATION";
|
||||
break;
|
||||
|
|
@ -219,9 +224,15 @@ void LevenbergMarquardtOptimizer::iterate() {
|
|||
cout << "linearizing = " << endl;
|
||||
GaussianFactorGraph::shared_ptr linear = linearize();
|
||||
|
||||
if(state_.totalNumberInnerIterations==0) // write initial error
|
||||
if(state_.totalNumberInnerIterations==0) { // write initial error
|
||||
writeLogFile(state_.error);
|
||||
|
||||
if (lmVerbosity == LevenbergMarquardtParams::SUMMARY) {
|
||||
cout << "Initial error: " << state_.error << ", values: " << state_.values.size()
|
||||
<< std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
// Keep increasing lambda until we make make progress
|
||||
while (true) {
|
||||
|
||||
|
|
@ -248,6 +259,8 @@ void LevenbergMarquardtOptimizer::iterate() {
|
|||
systemSolvedSuccessfully = false;
|
||||
}
|
||||
|
||||
double linearizedCostChange = 0,
|
||||
newlinearizedError = 0;
|
||||
if (systemSolvedSuccessfully) {
|
||||
params_.reuse_diagonal_ = true;
|
||||
|
||||
|
|
@ -257,9 +270,9 @@ void LevenbergMarquardtOptimizer::iterate() {
|
|||
delta.print("delta");
|
||||
|
||||
// cost change in the linearized system (old - new)
|
||||
double newlinearizedError = linear->error(delta);
|
||||
newlinearizedError = linear->error(delta);
|
||||
|
||||
double linearizedCostChange = state_.error - newlinearizedError;
|
||||
linearizedCostChange = state_.error - newlinearizedError;
|
||||
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA)
|
||||
cout << "newlinearizedError = " << newlinearizedError <<
|
||||
" linearizedCostChange = " << linearizedCostChange << endl;
|
||||
|
|
@ -304,6 +317,12 @@ void LevenbergMarquardtOptimizer::iterate() {
|
|||
}
|
||||
}
|
||||
|
||||
if (lmVerbosity == LevenbergMarquardtParams::SUMMARY) {
|
||||
cout << "[" << state_.iterations << "]: " << "new error = " << newlinearizedError
|
||||
<< ", delta = " << linearizedCostChange << ", lambda = " << state_.lambda
|
||||
<< ", success = " << systemSolvedSuccessfully << std::endl;
|
||||
}
|
||||
|
||||
++state_.totalNumberInnerIterations;
|
||||
|
||||
if (step_is_successful) { // we have successfully decreased the cost and we have good modelFidelity
|
||||
|
|
@ -320,7 +339,8 @@ void LevenbergMarquardtOptimizer::iterate() {
|
|||
|
||||
// check if lambda is too big
|
||||
if (state_.lambda >= params_.lambdaUpperBound) {
|
||||
if (nloVerbosity >= NonlinearOptimizerParams::TERMINATION)
|
||||
if (nloVerbosity >= NonlinearOptimizerParams::TERMINATION ||
|
||||
lmVerbosity == LevenbergMarquardtParams::SUMMARY)
|
||||
cout << "Warning: Levenberg-Marquardt giving up because "
|
||||
"cannot decrease error with maximum lambda" << endl;
|
||||
break;
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ class GTSAM_EXPORT LevenbergMarquardtParams: public NonlinearOptimizerParams {
|
|||
public:
|
||||
/** See LevenbergMarquardtParams::lmVerbosity */
|
||||
enum VerbosityLM {
|
||||
SILENT = 0, TERMINATION, LAMBDA, TRYLAMBDA, TRYCONFIG, DAMPED, TRYDELTA
|
||||
SILENT = 0, SUMMARY, TERMINATION, LAMBDA, TRYLAMBDA, TRYCONFIG, DAMPED, TRYDELTA
|
||||
};
|
||||
|
||||
static VerbosityLM verbosityLMTranslator(const std::string &s);
|
||||
|
|
|
|||
Loading…
Reference in New Issue