cleaned code to log LM iterations

release/4.3a0
Luca 2014-05-02 16:10:51 -04:00
parent 9f499d2257
commit e8a4767591
2 changed files with 23 additions and 13 deletions

View File

@ -192,6 +192,19 @@ GaussianFactorGraph::shared_ptr LevenbergMarquardtOptimizer::buildDampedSystem(
return dampedPtr; return dampedPtr;
} }
/* ************************************************************************* */
// Log current error/lambda to file
inline void LevenbergMarquardtOptimizer::writeLogFile(double currentError){
if (!params_.logFile.empty()) {
ofstream os(params_.logFile.c_str(), ios::app);
boost::posix_time::ptime currentTime = boost::posix_time::microsec_clock::universal_time();
os << /*inner iterations*/ state_.totalNumberInnerIterations << ","
<< 1e-6 * (currentTime - state_.startTime).total_microseconds() << ","
<< /*current error*/ currentError << "," << state_.lambda << ","
<< /*outer iterations*/ state_.iterations << endl;
}
}
/* ************************************************************************* */ /* ************************************************************************* */
void LevenbergMarquardtOptimizer::iterate() { void LevenbergMarquardtOptimizer::iterate() {
@ -206,6 +219,9 @@ void LevenbergMarquardtOptimizer::iterate() {
cout << "linearizing = " << endl; cout << "linearizing = " << endl;
GaussianFactorGraph::shared_ptr linear = linearize(); GaussianFactorGraph::shared_ptr linear = linearize();
if(state_.totalNumberInnerIterations==0) // write initial error
writeLogFile(state_.error);
// Keep increasing lambda until we make make progress // Keep increasing lambda until we make make progress
while (true) { while (true) {
@ -288,36 +304,29 @@ void LevenbergMarquardtOptimizer::iterate() {
} }
} }
// Log current error/lambda to file
if (!params_.logFile.empty()) {
ofstream os(params_.logFile.c_str(), ios::app);
boost::posix_time::ptime currentTime = boost::posix_time::microsec_clock::universal_time();
os << /*inner iterations*/ state_.totalNumberInnerIterations << ","
<< 1e-6 * (currentTime - state_.startTime).total_microseconds() << ","
<< /*current error*/ state_.error << "," << state_.lambda << ","
<< /*outer iterations*/ state_.iterations << endl;
}
++state_.totalNumberInnerIterations; ++state_.totalNumberInnerIterations;
if (step_is_successful) { // we have successfully decreased the cost and we have good modelFidelity if (step_is_successful) { // we have successfully decreased the cost and we have good modelFidelity
state_.values.swap(newValues); state_.values.swap(newValues);
state_.error = newError; state_.error = newError;
decreaseLambda(modelFidelity); decreaseLambda(modelFidelity);
writeLogFile(state_.error);
break; break;
} else if (!stopSearchingLambda) { // we failed to solved the system or we had no decrease in cost } else if (!stopSearchingLambda) { // we failed to solved the system or we had no decrease in cost
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA)
cout << "increasing lambda" << endl; cout << "increasing lambda" << endl;
increaseLambda(); increaseLambda();
writeLogFile(state_.error);
// check if lambda is too big // check if lambda is too big
if (state_.lambda >= params_.lambdaUpperBound) { if (state_.lambda >= params_.lambdaUpperBound) {
if (nloVerbosity >= NonlinearOptimizerParams::TERMINATION) if (nloVerbosity >= NonlinearOptimizerParams::TERMINATION)
cout cout << "Warning: Levenberg-Marquardt giving up because "
<< "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" "cannot decrease error with maximum lambda" << endl;
<< endl;
break; break;
} }
} else { // the change in the cost is very small and it is not worth trying bigger lambdas } else { // the change in the cost is very small and it is not worth trying bigger lambdas
writeLogFile(state_.error);
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA)
cout << "Levenberg-Marquardt: stopping as relative cost reduction is small" << endl; cout << "Levenberg-Marquardt: stopping as relative cost reduction is small" << endl;
break; break;

View File

@ -113,7 +113,6 @@ public:
inline void setDiagonalDamping(bool flag) { inline void setDiagonalDamping(bool flag) {
diagonalDamping = flag; diagonalDamping = flag;
} }
inline void setUseFixedLambdaFactor(bool flag) { inline void setUseFixedLambdaFactor(bool flag) {
useFixedLambdaFactor_ = flag; useFixedLambdaFactor_ = flag;
} }
@ -258,6 +257,8 @@ public:
GaussianFactorGraph::shared_ptr buildDampedSystem(const GaussianFactorGraph& linear); GaussianFactorGraph::shared_ptr buildDampedSystem(const GaussianFactorGraph& linear);
friend class ::NonlinearOptimizerMoreOptimizationTest; friend class ::NonlinearOptimizerMoreOptimizationTest;
void writeLogFile(double currentError);
/// @} /// @}
protected: protected: