diff --git a/gtsam/nonlinear/NonlinearOptimizer-inl.h b/gtsam/nonlinear/NonlinearOptimizer-inl.h index f2f52b318..be57f0870 100644 --- a/gtsam/nonlinear/NonlinearOptimizer-inl.h +++ b/gtsam/nonlinear/NonlinearOptimizer-inl.h @@ -292,60 +292,61 @@ namespace gtsam { double next_error = error_; shared_values next_values = values_; + shared_solver solver = solver_; while(true) { - if (verbosity >= Parameters::TRYLAMBDA) cout << "trying lambda = " << lambda << endl; + if (verbosity >= Parameters::TRYLAMBDA) cout << "trying lambda = " << lambda << endl; - // add prior-factors - typename L::shared_ptr damped(new L(linear)); - { - double sigma = 1.0 / sqrt(lambda); - damped->reserve(damped->size() + dimensions_->size()); - // for each of the variables, add a prior - for(Index j=0; jsize(); ++j) { - size_t dim = (*dimensions_)[j]; - Matrix A = eye(dim); - Vector b = zero(dim); - SharedDiagonal model = noiseModel::Isotropic::Sigma(dim,sigma); - GaussianFactor::shared_ptr prior(new JacobianFactor(j, A, b, model)); - damped->push_back(prior); - } - } - if (verbosity >= Parameters::DAMPED) damped->print("damped"); + // add prior-factors + typename L::shared_ptr damped(new L(linear)); + { + double sigma = 1.0 / sqrt(lambda); + damped->reserve(damped->size() + dimensions_->size()); + // for each of the variables, add a prior + for(Index j=0; jsize(); ++j) { + size_t dim = (*dimensions_)[j]; + Matrix A = eye(dim); + Vector b = zero(dim); + SharedDiagonal model = noiseModel::Isotropic::Sigma(dim,sigma); + GaussianFactor::shared_ptr prior(new JacobianFactor(j, A, b, model)); + damped->push_back(prior); + } + } + if (verbosity >= Parameters::DAMPED) damped->print("damped"); - // solve - S solver(*damped); // not solver_ !! + // solve + solver.reset(new S(*damped)); - VectorValues delta = *solver.optimize(); - if (verbosity >= Parameters::TRYDELTA) delta.print("delta"); + VectorValues delta = *solver->optimize(); + if (verbosity >= Parameters::TRYDELTA) delta.print("delta"); - // update values - shared_values newValues(new C(values_->expmap(delta, *ordering_))); // TODO: updateValues + // update values + shared_values newValues(new C(values_->expmap(delta, *ordering_))); // TODO: updateValues - // create new optimization state with more adventurous lambda - double error = graph_->error(*newValues); + // create new optimization state with more adventurous lambda + double error = graph_->error(*newValues); - if (verbosity >= Parameters::TRYLAMBDA) cout << "next error = " << error << endl; + if (verbosity >= Parameters::TRYLAMBDA) cout << "next error = " << error << endl; - if( error <= error_ ) { - next_values = newValues; - next_error = error; - lambda /= factor; - break; - } - else { - // Either we're not cautious, or the same lambda was worse than the current error. - // The more adventurous lambda was worse too, so make lambda more conservative - // and keep the same values. - if(lambdaMode >= Parameters::BOUNDED && lambda >= 1.0e5) { - break; - } else { - lambda *= factor; - } - } + if( error <= error_ ) { + next_values = newValues; + next_error = error; + lambda /= factor; + break; + } else { + // Either we're not cautious, or the same lambda was worse than the current error. + // The more adventurous lambda was worse too, so make lambda more conservative + // and keep the same values. + if(lambdaMode >= Parameters::BOUNDED && lambda >= 1.0e5) { + break; + } else { + lambda *= factor; + } + } } // end while - return newValuesErrorLambda_(next_values, next_error, lambda); + return NonlinearOptimizer(graph_, next_values, next_error, ordering_, solver, + parameters_->newLambda_(lambda), dimensions_, iterations_); } /* ************************************************************************* */ diff --git a/tests/testNonlinearOptimizer.cpp b/tests/testNonlinearOptimizer.cpp index b3ed87049..0e74ee996 100644 --- a/tests/testNonlinearOptimizer.cpp +++ b/tests/testNonlinearOptimizer.cpp @@ -187,13 +187,13 @@ TEST( NonlinearOptimizer, optimize_LM_recursive ) Point2 xstar(0,0); example::Values cstar; cstar.insert(simulated2D::PoseKey(1), xstar); - DOUBLES_EQUAL(0.0,fg->error(cstar),0.0); + EXPECT_DOUBLES_EQUAL(0.0,fg->error(cstar),0.0); // test error at initial = [(1-cos(3))^2 + (sin(3))^2]*50 = Point2 x0(3,3); boost::shared_ptr c0(new example::Values); c0->insert(simulated2D::PoseKey(1), x0); - DOUBLES_EQUAL(199.0,fg->error(*c0),1e-3); + EXPECT_DOUBLES_EQUAL(199.0,fg->error(*c0),1e-3); // optimize parameters shared_ptr ord(new Ordering()); @@ -207,7 +207,13 @@ TEST( NonlinearOptimizer, optimize_LM_recursive ) // Levenberg-Marquardt Optimizer actual2 = optimizer.levenbergMarquardtRecursive(); - DOUBLES_EQUAL(0,fg->error(*(actual2.values())),tol); + EXPECT_DOUBLES_EQUAL(0,fg->error(*(actual2.values())),tol); + + // calculate the marginal + Matrix actualCovariance; Vector mean; + boost::tie(mean, actualCovariance) = actual2.marginalCovariance("x1"); + Matrix expectedCovariance = Matrix_(2,2, 8.60817108, 0.0, 0.0, 0.01); + EXPECT(assert_equal(expectedCovariance, actualCovariance, tol)); } /* ************************************************************************* */