diff --git a/examples/vSLAMexample/vSFMexample.cpp b/examples/vSLAMexample/vSFMexample.cpp index a9f27b0a5..d2b8196eb 100644 --- a/examples/vSLAMexample/vSFMexample.cpp +++ b/examples/vSLAMexample/vSFMexample.cpp @@ -146,7 +146,7 @@ int main(int argc, char* argv[]) { // Optimize the graph cout << "*******************************************************" << endl; - NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newVerbosity_(Optimizer::Parameters::DAMPED); + NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newVerbosity(Optimizer::Parameters::DAMPED); Optimizer::shared_values result = Optimizer::optimizeGN(graph, initialEstimates, params); // Print final results diff --git a/gtsam/nonlinear/NonlinearOptimizationParameters.h b/gtsam/nonlinear/NonlinearOptimizationParameters.h index 94d3642d4..6c5930112 100644 --- a/gtsam/nonlinear/NonlinearOptimizationParameters.h +++ b/gtsam/nonlinear/NonlinearOptimizationParameters.h @@ -65,19 +65,24 @@ namespace gtsam { verbosity_(parameters.verbosity_), lambdaMode_(parameters.lambdaMode_){} - static sharedThis newVerbosity_(verbosityLevel verbosity) { + /* a copy of old instance except some parameters */ + sharedThis newLambda_(double lambda) const { + sharedThis ptr (boost::make_shared(*this)) ; + ptr->lambda_ = lambda ; + return ptr ; + } + + + + // static + /* new instance with default parameters except some partially assigned parameters */ + static sharedThis newVerbosity(verbosityLevel verbosity) { sharedThis ptr (boost::make_shared()) ; ptr->verbosity_ = verbosity ; return ptr ; } - static sharedThis newLambda_(double lambda) { - sharedThis ptr (boost::make_shared()) ; - ptr->lambda_ = lambda ; - return ptr ; - } - - static sharedThis newMaxIterations_(int maxIterations) { + static sharedThis newMaxIterations(int maxIterations) { sharedThis ptr (boost::make_shared()) ; ptr->maxIterations_ = maxIterations ; return ptr ; diff --git a/gtsam/nonlinear/NonlinearOptimizer-inl.h b/gtsam/nonlinear/NonlinearOptimizer-inl.h index 805c56b32..8dc6033db 100644 --- a/gtsam/nonlinear/NonlinearOptimizer-inl.h +++ b/gtsam/nonlinear/NonlinearOptimizer-inl.h @@ -185,7 +185,6 @@ namespace gtsam { shared_values newValues(new C(values_->expmap(delta, *ordering_))); // TODO: updateValues // create new optimization state with more adventurous lambda - //NonlinearOptimizer next(newValuesSolverLambda_(newValues, newSolver, lambda / factor)); double error = graph_->error(*newValues); if (verbosity >= Parameters::TRYLAMBDA) cout << "next error = " << error << endl; diff --git a/gtsam/nonlinear/NonlinearOptimizer.h b/gtsam/nonlinear/NonlinearOptimizer.h index 3385a1101..d1fc33196 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.h +++ b/gtsam/nonlinear/NonlinearOptimizer.h @@ -121,16 +121,14 @@ namespace gtsam { error_(error), ordering_(ordering), solver_(solver), parameters_(parameters), dimensions_(dimensions) {} /** Create a new NonlinearOptimizer with a different lambda */ -// This newLambda_(double newLambda) const { -// return NonlinearOptimizer(graph_, values_, error_, ordering_, solver_, newLambda, dimensions_); } -// -// This newValuesSolver_(shared_values newValues, shared_solver newSolver) const { -// return NonlinearOptimizer(graph_, newValues, graph_->error(*newValues), ordering_, newSolver, lambda_, dimensions_); } -// -// This newValuesSolverLambda_(shared_values newValues, shared_solver newSolver, double newLambda) const { -// return NonlinearOptimizer(graph_, newValues, graph_->error(*newValues), ordering_, newSolver, newLambda, dimensions_); } + This newValuesSolver_(shared_values newValues, shared_solver newSolver) const { + return NonlinearOptimizer(graph_, newValues, graph_->error(*newValues), ordering_, newSolver, parameters_, dimensions_); } - /** Create a new NonlinearOptimizer with a different lambda */ + This newValuesErrorLambda_(shared_values newValues, double newError, double newLambda) const { + return NonlinearOptimizer(graph_, newValues, newError, ordering_, solver_, parameters_->newLambda_(newLambda), dimensions_); } + + + /* This newLambda_(double newLambda) const { return NonlinearOptimizer(graph_, values_, error_, ordering_, solver_, parameters_->newLambda_(newLambda), dimensions_); } @@ -152,7 +150,7 @@ namespace gtsam { This newMaxIterations_(int maxIterations) const { return NonlinearOptimizer(graph_, values_, error_, ordering_, solver_, parameters_->newMaxIterations_(maxIterations), dimensions_); } - + */ public: /** @@ -279,9 +277,7 @@ namespace gtsam { shared_values values, Parameters::verbosityLevel verbosity) { - Parameters def ; - shared_parameters parameters = def.newVerbosity_(verbosity); - return optimizeLM(graph, values, parameters); + return optimizeLM(graph, values, Parameters::newVerbosity(verbosity)); } /** * Static interface to LM optimization (no shared_ptr arguments) - see above