fix bugs
parent
3ad9c9494e
commit
06c319f42c
|
@ -146,7 +146,7 @@ int main(int argc, char* argv[]) {
|
|||
|
||||
// Optimize the graph
|
||||
cout << "*******************************************************" << endl;
|
||||
NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newVerbosity_(Optimizer::Parameters::DAMPED);
|
||||
NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newVerbosity(Optimizer::Parameters::DAMPED);
|
||||
Optimizer::shared_values result = Optimizer::optimizeGN(graph, initialEstimates, params);
|
||||
|
||||
// Print final results
|
||||
|
|
|
@ -65,19 +65,24 @@ namespace gtsam {
|
|||
verbosity_(parameters.verbosity_), lambdaMode_(parameters.lambdaMode_){}
|
||||
|
||||
|
||||
static sharedThis newVerbosity_(verbosityLevel verbosity) {
|
||||
/* a copy of old instance except some parameters */
|
||||
sharedThis newLambda_(double lambda) const {
|
||||
sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>(*this)) ;
|
||||
ptr->lambda_ = lambda ;
|
||||
return ptr ;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// static
|
||||
/* new instance with default parameters except some partially assigned parameters */
|
||||
static sharedThis newVerbosity(verbosityLevel verbosity) {
|
||||
sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>()) ;
|
||||
ptr->verbosity_ = verbosity ;
|
||||
return ptr ;
|
||||
}
|
||||
|
||||
static sharedThis newLambda_(double lambda) {
|
||||
sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>()) ;
|
||||
ptr->lambda_ = lambda ;
|
||||
return ptr ;
|
||||
}
|
||||
|
||||
static sharedThis newMaxIterations_(int maxIterations) {
|
||||
static sharedThis newMaxIterations(int maxIterations) {
|
||||
sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>()) ;
|
||||
ptr->maxIterations_ = maxIterations ;
|
||||
return ptr ;
|
||||
|
|
|
@ -185,7 +185,6 @@ namespace gtsam {
|
|||
shared_values newValues(new C(values_->expmap(delta, *ordering_))); // TODO: updateValues
|
||||
|
||||
// create new optimization state with more adventurous lambda
|
||||
//NonlinearOptimizer next(newValuesSolverLambda_(newValues, newSolver, lambda / factor));
|
||||
double error = graph_->error(*newValues);
|
||||
|
||||
if (verbosity >= Parameters::TRYLAMBDA) cout << "next error = " << error << endl;
|
||||
|
|
|
@ -121,16 +121,14 @@ namespace gtsam {
|
|||
error_(error), ordering_(ordering), solver_(solver), parameters_(parameters), dimensions_(dimensions) {}
|
||||
|
||||
/** Create a new NonlinearOptimizer with a different lambda */
|
||||
// This newLambda_(double newLambda) const {
|
||||
// return NonlinearOptimizer(graph_, values_, error_, ordering_, solver_, newLambda, dimensions_); }
|
||||
//
|
||||
// This newValuesSolver_(shared_values newValues, shared_solver newSolver) const {
|
||||
// return NonlinearOptimizer(graph_, newValues, graph_->error(*newValues), ordering_, newSolver, lambda_, dimensions_); }
|
||||
//
|
||||
// This newValuesSolverLambda_(shared_values newValues, shared_solver newSolver, double newLambda) const {
|
||||
// return NonlinearOptimizer(graph_, newValues, graph_->error(*newValues), ordering_, newSolver, newLambda, dimensions_); }
|
||||
This newValuesSolver_(shared_values newValues, shared_solver newSolver) const {
|
||||
return NonlinearOptimizer(graph_, newValues, graph_->error(*newValues), ordering_, newSolver, parameters_, dimensions_); }
|
||||
|
||||
/** Create a new NonlinearOptimizer with a different lambda */
|
||||
This newValuesErrorLambda_(shared_values newValues, double newError, double newLambda) const {
|
||||
return NonlinearOptimizer(graph_, newValues, newError, ordering_, solver_, parameters_->newLambda_(newLambda), dimensions_); }
|
||||
|
||||
|
||||
/*
|
||||
This newLambda_(double newLambda) const {
|
||||
return NonlinearOptimizer(graph_, values_, error_, ordering_, solver_, parameters_->newLambda_(newLambda), dimensions_); }
|
||||
|
||||
|
@ -152,7 +150,7 @@ namespace gtsam {
|
|||
|
||||
This newMaxIterations_(int maxIterations) const {
|
||||
return NonlinearOptimizer(graph_, values_, error_, ordering_, solver_, parameters_->newMaxIterations_(maxIterations), dimensions_); }
|
||||
|
||||
*/
|
||||
|
||||
public:
|
||||
/**
|
||||
|
@ -279,9 +277,7 @@ namespace gtsam {
|
|||
shared_values values,
|
||||
Parameters::verbosityLevel verbosity)
|
||||
{
|
||||
Parameters def ;
|
||||
shared_parameters parameters = def.newVerbosity_(verbosity);
|
||||
return optimizeLM(graph, values, parameters);
|
||||
return optimizeLM(graph, values, Parameters::newVerbosity(verbosity));
|
||||
}
|
||||
/**
|
||||
* Static interface to LM optimization (no shared_ptr arguments) - see above
|
||||
|
|
Loading…
Reference in New Issue