diff --git a/gtsam/nonlinear/DoglegOptimizer.cpp b/gtsam/nonlinear/DoglegOptimizer.cpp index 1e76886bd..03da76eb6 100644 --- a/gtsam/nonlinear/DoglegOptimizer.cpp +++ b/gtsam/nonlinear/DoglegOptimizer.cpp @@ -64,14 +64,20 @@ void DoglegOptimizer::iterate(void) { if ( params_.isMultifrontal() ) { GaussianBayesTree bt = *linear->eliminateMultifrontal(*params_.ordering, params_.getEliminationFunction()); - result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, bt, graph_, state_.values, state_.error, dlVerbose); + VectorValues dx_u = bt.optimizeGradientSearch(); + VectorValues dx_n = bt.optimize(); + result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, + dx_u, dx_n, bt, graph_, state_.values, state_.error, dlVerbose); } else if ( params_.isSequential() ) { GaussianBayesNet bn = *linear->eliminateSequential(*params_.ordering, params_.getEliminationFunction()); - result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, bn, graph_, state_.values, state_.error, dlVerbose); + VectorValues dx_u = bn.optimizeGradientSearch(); + VectorValues dx_n = bn.optimize(); + result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, + dx_u, dx_n, bn, graph_, state_.values, state_.error, dlVerbose); } else if ( params_.isCG() ) { - throw runtime_error("todo: "); + throw runtime_error("Dogleg is not currently compatible with the linear conjugate gradient solver"); } else { throw runtime_error("Optimization parameter is invalid: DoglegParams::elimination"); diff --git a/gtsam/nonlinear/DoglegOptimizerImpl.h b/gtsam/nonlinear/DoglegOptimizerImpl.h index 3344da43a..c0b52f88b 100644 --- a/gtsam/nonlinear/DoglegOptimizerImpl.h +++ b/gtsam/nonlinear/DoglegOptimizerImpl.h @@ -100,8 +100,8 @@ struct GTSAM_EXPORT DoglegOptimizerImpl { */ template static IterationResult Iterate( - double Delta, TrustRegionAdaptationMode mode, const M& Rd, - const F& f, const VALUES& x0, const double f_error, const bool verbose=false); + double Delta, TrustRegionAdaptationMode mode, const VectorValues& dx_u, const VectorValues& dx_n, + const M& Rd, const F& f, const VALUES& x0, const double f_error, const bool verbose=false); /** * Compute the dogleg point given a trust region radius \f$ \Delta \f$. The @@ -143,16 +143,9 @@ struct GTSAM_EXPORT DoglegOptimizerImpl { /* ************************************************************************* */ template typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( - double Delta, TrustRegionAdaptationMode mode, const M& Rd, - const F& f, const VALUES& x0, const double f_error, const bool verbose) + double Delta, TrustRegionAdaptationMode mode, const VectorValues& dx_u, const VectorValues& dx_n, + const M& Rd, const F& f, const VALUES& x0, const double f_error, const bool verbose) { - // Compute steepest descent and Newton's method points - gttic(optimizeGradientSearch); - VectorValues dx_u = GaussianFactorGraph(Rd).optimizeGradientSearch(); - gttoc(optimizeGradientSearch); - gttic(optimize); - VectorValues dx_n = Rd.optimize(); - gttoc(optimize); gttic(M_error); const double M_error = Rd.error(VectorValues::Zero(dx_u)); gttoc(M_error);