remove backward compatible functions in NonlinearOptimizer

release/4.3a0
Kai Ni 2010-11-21 22:00:22 +00:00
parent 8bfd392b9f
commit e726f7c7af
12 changed files with 58 additions and 205 deletions

View File

@ -69,9 +69,9 @@ int main(int argc, char** argv) {
Ordering::shared_ptr ordering = graph->orderingCOLAMD(*initial); Ordering::shared_ptr ordering = graph->orderingCOLAMD(*initial);
/* 4.2.2 set up solver and optimize */ /* 4.2.2 set up solver and optimize */
Optimizer optimizer(graph, initial, ordering); NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newDrecreaseThresholds(1e-15, 1e-15);
Optimizer::Parameters::verbosityLevel verbosity = pose2SLAM::Optimizer::Parameters::SILENT; Optimizer optimizer(graph, initial, ordering, params);
Optimizer optimizer_result = optimizer.levenbergMarquardt(1e-15, 1e-15, verbosity); Optimizer optimizer_result = optimizer.levenbergMarquardt();
Values result = *optimizer_result.values(); Values result = *optimizer_result.values();
result.print("final result"); result.print("final result");

View File

@ -83,8 +83,7 @@ int main(void) {
SPCGOptimizer optimizer(graph, initial, solver->ordering(), solver) ; SPCGOptimizer optimizer(graph, initial, solver->ordering(), solver) ;
cout << "before optimization, sum of error is " << optimizer.error() << endl; cout << "before optimization, sum of error is " << optimizer.error() << endl;
NonlinearOptimizationParameters parameter; SPCGOptimizer optimizer2 = optimizer.levenbergMarquardt();
SPCGOptimizer optimizer2 = optimizer.levenbergMarquardt(parameter);
cout << "after optimization, sum of error is " << optimizer2.error() << endl; cout << "after optimization, sum of error is " << optimizer2.error() << endl;
result = *optimizer2.values() ; result = *optimizer2.values() ;

View File

@ -146,8 +146,8 @@ int main(int argc, char* argv[]) {
// Optimize the graph // Optimize the graph
cout << "*******************************************************" << endl; cout << "*******************************************************" << endl;
Optimizer::Parameters::verbosityLevel verborsity = Optimizer::Parameters::DAMPED; NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newVerbosity_(Optimizer::Parameters::DAMPED);
Optimizer::shared_values result = Optimizer::optimizeGN(graph, initialEstimates, verborsity); Optimizer::shared_values result = Optimizer::optimizeGN(graph, initialEstimates, params);
// Print final results // Print final results
cout << "*******************************************************" << endl; cout << "*******************************************************" << endl;

View File

@ -43,10 +43,10 @@ namespace gtsam {
// initial optimization state is the same in both cases tested // initial optimization state is the same in both cases tested
typedef NonlinearOptimizer<G, T, GaussianFactorGraph, GaussianSequentialSolver> Optimizer; typedef NonlinearOptimizer<G, T, GaussianFactorGraph, GaussianSequentialSolver> Optimizer;
Optimizer optimizer(boost::make_shared<const G>(graph), Optimizer optimizer(boost::make_shared<const G>(graph),
boost::make_shared<const T>(initialEstimate), ordering); boost::make_shared<const T>(initialEstimate), ordering, boost::make_shared<NonlinearOptimizationParameters>(parameters));
// Levenberg-Marquardt // Levenberg-Marquardt
Optimizer result = optimizer.levenbergMarquardt(parameters); Optimizer result = optimizer.levenbergMarquardt();
return *result.values(); return *result.values();
} }
@ -62,10 +62,10 @@ namespace gtsam {
// initial optimization state is the same in both cases tested // initial optimization state is the same in both cases tested
typedef NonlinearOptimizer<G, T, GaussianFactorGraph, GaussianMultifrontalSolver> Optimizer; typedef NonlinearOptimizer<G, T, GaussianFactorGraph, GaussianMultifrontalSolver> Optimizer;
Optimizer optimizer(boost::make_shared<const G>(graph), Optimizer optimizer(boost::make_shared<const G>(graph),
boost::make_shared<const T>(initialEstimate), ordering); boost::make_shared<const T>(initialEstimate), ordering, boost::make_shared<NonlinearOptimizationParameters>(parameters));
// Levenberg-Marquardt // Levenberg-Marquardt
Optimizer result = optimizer.levenbergMarquardt(parameters); Optimizer result = optimizer.levenbergMarquardt();
return *result.values(); return *result.values();
} }
@ -84,10 +84,11 @@ namespace gtsam {
boost::make_shared<const G>(graph), boost::make_shared<const G>(graph),
boost::make_shared<const T>(initialEstimate), boost::make_shared<const T>(initialEstimate),
solver->ordering(), solver->ordering(),
solver); solver,
boost::make_shared<NonlinearOptimizationParameters>(parameters));
// Levenberg-Marquardt // Levenberg-Marquardt
SPCGOptimizer result = optimizer.levenbergMarquardt(parameters); SPCGOptimizer result = optimizer.levenbergMarquardt();
return *result.values(); return *result.values();
} }

View File

@ -65,20 +65,20 @@ namespace gtsam {
verbosity_(parameters.verbosity_), lambdaMode_(parameters.lambdaMode_){} verbosity_(parameters.verbosity_), lambdaMode_(parameters.lambdaMode_){}
sharedThis newVerbosity_(verbosityLevel verbosity) const { static sharedThis newVerbosity_(verbosityLevel verbosity) {
sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>(*this)) ; sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>()) ;
ptr->verbosity_ = verbosity ; ptr->verbosity_ = verbosity ;
return ptr ; return ptr ;
} }
sharedThis newLambda_(double lambda) const { static sharedThis newLambda_(double lambda) {
sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>(*this)) ; sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>()) ;
ptr->lambda_ = lambda ; ptr->lambda_ = lambda ;
return ptr ; return ptr ;
} }
sharedThis newMaxIterations_(int maxIterations) const { static sharedThis newMaxIterations_(int maxIterations) {
sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>(*this)) ; sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>()) ;
ptr->maxIterations_ = maxIterations ; ptr->maxIterations_ = maxIterations ;
return ptr ; return ptr ;
} }
@ -89,11 +89,11 @@ namespace gtsam {
return ptr ; return ptr ;
} }
static sharedThis newDrecreaseThresholds(double absDecrease, double relDecrease) {
sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>());
ptr->absDecrease_ = absDecrease;
ptr->relDecrease_ = relDecrease;
return ptr ;
}
}; };
} }

View File

@ -34,34 +34,6 @@ namespace gtsam {
/* ************************************************************************* */ /* ************************************************************************* */
/* ************************************************************************* */ /* ************************************************************************* */
template<class G, class C, class L, class S, class W>
NonlinearOptimizer<G, C, L, S, W>::NonlinearOptimizer(shared_graph graph,
shared_values values, shared_ordering ordering, double lambda) :
graph_(graph), values_(values), error_(graph->error(*values)), ordering_(ordering),
parameters_(Parameters::newLambda(lambda)), dimensions_(new vector<size_t>(values->dims(*ordering))) {
if (!graph) throw std::invalid_argument(
"NonlinearOptimizer constructor: graph = NULL");
if (!values) throw std::invalid_argument(
"NonlinearOptimizer constructor: values = NULL");
if (!ordering) throw std::invalid_argument(
"NonlinearOptimizer constructor: ordering = NULL");
}
template<class G, class C, class L, class S, class W>
NonlinearOptimizer<G, C, L, S, W>::NonlinearOptimizer(
shared_graph graph, shared_values values, shared_ordering ordering, shared_solver solver, const double lambda):
graph_(graph), values_(values), error_(graph->error(*values)), ordering_(ordering), solver_(solver),
parameters_(Parameters::newLambda(lambda)), dimensions_(new vector<size_t>(values->dims(*ordering))) {
if (!graph) throw std::invalid_argument(
"NonlinearOptimizer constructor: graph = NULL");
if (!values) throw std::invalid_argument(
"NonlinearOptimizer constructor: values = NULL");
if (!ordering) throw std::invalid_argument(
"NonlinearOptimizer constructor: ordering = NULL");
if (!solver) throw std::invalid_argument(
"NonlinearOptimizer constructor: solver = NULL");
}
template<class G, class C, class L, class S, class W> template<class G, class C, class L, class S, class W>
NonlinearOptimizer<G, C, L, S, W>::NonlinearOptimizer(shared_graph graph, NonlinearOptimizer<G, C, L, S, W>::NonlinearOptimizer(shared_graph graph,
shared_values values, shared_ordering ordering, shared_parameters parameters) : shared_values values, shared_ordering ordering, shared_parameters parameters) :
@ -135,12 +107,6 @@ namespace gtsam {
return newOptimizer; return newOptimizer;
} }
template<class G, class C, class L, class S, class W>
NonlinearOptimizer<G, C, L, S, W> NonlinearOptimizer<G, C, L, S, W>::iterate(
Parameters::verbosityLevel verbosity) const {
return this->newVerbosity_(verbosity).iterate();
}
/* ************************************************************************* */ /* ************************************************************************* */
template<class G, class C, class L, class S, class W> template<class G, class C, class L, class S, class W>
@ -167,23 +133,6 @@ namespace gtsam {
else return next.gaussNewton(); else return next.gaussNewton();
} }
template<class G, class C, class L, class S, class W>
NonlinearOptimizer<G, C, L, S, W> NonlinearOptimizer<G, C, L, S, W>::gaussNewton(
double relativeThreshold,
double absoluteThreshold,
Parameters::verbosityLevel verbosity,
int maxIterations) const {
Parameters def ;
def.relDecrease_ = relativeThreshold ;
def.absDecrease_ = absoluteThreshold ;
def.verbosity_ = verbosity ;
def.maxIterations_ = maxIterations ;
shared_parameters ptr(boost::make_shared<NonlinearOptimizationParameters>(def)) ;
return newParameters_(ptr).gaussNewton() ;
}
/* ************************************************************************* */ /* ************************************************************************* */
// Iteratively try to do tempered Gauss-Newton steps until we succeed. // Iteratively try to do tempered Gauss-Newton steps until we succeed.
// Form damped system with given lambda, and return a new, more optimistic // Form damped system with given lambda, and return a new, more optimistic
@ -292,21 +241,6 @@ namespace gtsam {
return try_lambda(*linear); return try_lambda(*linear);
} }
template<class G, class C, class L, class S, class W>
NonlinearOptimizer<G, C, L, S, W> NonlinearOptimizer<G, C, L, S, W>::iterateLM(
Parameters::verbosityLevel verbosity,
double lambdaFactor,
Parameters::LambdaMode lambdaMode) const {
NonlinearOptimizationParameters def(*parameters_) ;
def.verbosity_ = verbosity ;
def.lambdaFactor_ = lambdaFactor ;
def.lambdaMode_ = lambdaMode ;
shared_parameters ptr(boost::make_shared<Parameters>(def)) ;
return newParameters_(ptr).iterateLM();
}
/* ************************************************************************* */ /* ************************************************************************* */
template<class G, class C, class L, class S, class W> template<class G, class C, class L, class S, class W>
@ -344,36 +278,7 @@ namespace gtsam {
} }
maxIterations--; maxIterations--;
} }
} }
template<class G, class C, class L, class S, class W>
NonlinearOptimizer<G, C, L, S, W> NonlinearOptimizer<G, C, L, S, W>::levenbergMarquardt(
double relativeThreshold,
double absoluteThreshold,
Parameters::verbosityLevel verbosity,
int maxIterations,
double lambdaFactor,
Parameters::LambdaMode lambdaMode) const {
NonlinearOptimizationParameters def;
def.relDecrease_ = relativeThreshold ;
def.absDecrease_ = absoluteThreshold ;
def.verbosity_ = verbosity ;
def.maxIterations_ = maxIterations ;
def.lambdaFactor_ = lambdaFactor ;
def.lambdaMode_ = lambdaMode ;
shared_parameters ptr = boost::make_shared<Parameters>(def) ;
return newParameters_(ptr).levenbergMarquardt() ;
}
template<class G, class C, class L, class S, class W>
NonlinearOptimizer<G, C, L, S, W> NonlinearOptimizer<G, C, L, S, W>::
levenbergMarquardt(const NonlinearOptimizationParameters &parameters) const {
boost::shared_ptr<NonlinearOptimizationParameters> ptr (new NonlinearOptimizationParameters(parameters)) ;
return newParameters_(ptr).levenbergMarquardt() ;
}
/* ************************************************************************* */ /* ************************************************************************* */
} }

View File

@ -159,18 +159,6 @@ namespace gtsam {
* Constructor that evaluates new error * Constructor that evaluates new error
*/ */
// backward compatibility
NonlinearOptimizer(shared_graph graph,
shared_values values,
shared_ordering ordering,
const double lambda);
// backward compatibility
NonlinearOptimizer(shared_graph graph,
shared_values values,
shared_ordering ordering,
shared_solver solver,
const double lambda);
// suggested constructors // suggested constructors
NonlinearOptimizer(shared_graph graph, NonlinearOptimizer(shared_graph graph,
shared_values values, shared_values values,
@ -230,9 +218,6 @@ namespace gtsam {
// suggested interface // suggested interface
NonlinearOptimizer iterate() const; NonlinearOptimizer iterate() const;
// backward compatible
NonlinearOptimizer iterate(Parameters::verbosityLevel verbosity) const;
/** /**
* Optimize a solution for a non linear factor graph * Optimize a solution for a non linear factor graph
* @param relativeTreshold * @param relativeTreshold
@ -243,11 +228,6 @@ namespace gtsam {
// suggested interface // suggested interface
NonlinearOptimizer gaussNewton() const; NonlinearOptimizer gaussNewton() const;
// backward compatible
NonlinearOptimizer
gaussNewton(double relativeThreshold, double absoluteThreshold,
Parameters::verbosityLevel verbosity = Parameters::SILENT, int maxIterations = 100) const;
/** /**
* One iteration of Levenberg Marquardt * One iteration of Levenberg Marquardt
*/ */
@ -255,10 +235,6 @@ namespace gtsam {
// suggested interface // suggested interface
NonlinearOptimizer iterateLM(); NonlinearOptimizer iterateLM();
// backward compatible
NonlinearOptimizer iterateLM(Parameters::verbosityLevel verbosity,
double lambdaFactor = 10, Parameters::LambdaMode lambdaMode = Parameters::BOUNDED) const;
/** /**
* Optimize using Levenberg-Marquardt. Really Levenberg's * Optimize using Levenberg-Marquardt. Really Levenberg's
* algorithm at this moment, as we just add I*\lambda to Hessian * algorithm at this moment, as we just add I*\lambda to Hessian
@ -277,19 +253,6 @@ namespace gtsam {
// suggested interface // suggested interface
NonlinearOptimizer levenbergMarquardt(); NonlinearOptimizer levenbergMarquardt();
// backward compatible
NonlinearOptimizer
levenbergMarquardt(double relativeThreshold,
double absoluteThreshold,
Parameters::verbosityLevel verbosity = Parameters::SILENT,
int maxIterations = 100,
double lambdaFactor = 10,
Parameters::LambdaMode lambdaMode = Parameters::BOUNDED) const;
// backward compatible
NonlinearOptimizer
levenbergMarquardt(const NonlinearOptimizationParameters &para) const;
/** /**
* Static interface to LM optimization using default ordering and thresholds * Static interface to LM optimization using default ordering and thresholds
* @param graph Nonlinear factor graph to optimize * @param graph Nonlinear factor graph to optimize
@ -362,15 +325,6 @@ namespace gtsam {
return result.values(); return result.values();
} }
// backward compatible
static shared_values optimizeGN(shared_graph graph,
shared_values values,
Parameters::verbosityLevel verbosity) {
Parameters def ;
shared_parameters parameters = def.newVerbosity_(verbosity);
return optimizeGN(graph, values, parameters);
}
/** /**
* Static interface to GN optimization (no shared_ptr arguments) - see above * Static interface to GN optimization (no shared_ptr arguments) - see above
*/ */
@ -381,14 +335,6 @@ namespace gtsam {
boost::make_shared<const T>(values), boost::make_shared<const T>(values),
boost::make_shared<Parameters>(parameters)); boost::make_shared<Parameters>(parameters));
} }
// backward compatible
static shared_values optimizeGN(const G& graph, const T& values, Parameters::verbosityLevel verbosity) {
return optimizeGN(boost::make_shared<const G>(graph),
boost::make_shared<const T>(values),
verbosity);
}
}; };
/** /**

View File

@ -113,10 +113,10 @@ TEST(Pose2Graph, optimize) {
shared_ptr<Ordering> ordering(new Ordering); shared_ptr<Ordering> ordering(new Ordering);
*ordering += "x0","x1"; *ordering += "x0","x1";
typedef NonlinearOptimizer<Pose2Graph, Pose2Values> Optimizer; typedef NonlinearOptimizer<Pose2Graph, Pose2Values> Optimizer;
Optimizer optimizer0(fg, initial, ordering);
Optimizer::Parameters::verbosityLevel verbosity = Optimizer::Parameters::SILENT; NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newDrecreaseThresholds(1e-15, 1e-15);
//Optimizer::verbosityLevel verbosity = Optimizer::ERROR; Optimizer optimizer0(fg, initial, ordering, params);
Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity); Optimizer optimizer = optimizer0.levenbergMarquardt();
// Check with expected config // Check with expected config
Pose2Values expected; Pose2Values expected;
@ -152,9 +152,9 @@ TEST(Pose2Graph, optimizeThreePoses) {
*ordering += "x0","x1","x2"; *ordering += "x0","x1","x2";
// optimize // optimize
pose2SLAM::Optimizer optimizer0(fg, initial, ordering); NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newDrecreaseThresholds(1e-15, 1e-15);
pose2SLAM::Optimizer::Parameters::verbosityLevel verbosity = pose2SLAM::Optimizer::Parameters::SILENT; pose2SLAM::Optimizer optimizer0(fg, initial, ordering, params);
pose2SLAM::Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity); pose2SLAM::Optimizer optimizer = optimizer0.levenbergMarquardt();
Pose2Values actual = *optimizer.values(); Pose2Values actual = *optimizer.values();
@ -195,9 +195,9 @@ TEST(Pose2Graph, optimizeCircle) {
*ordering += "x0","x1","x2","x3","x4","x5"; *ordering += "x0","x1","x2","x3","x4","x5";
// optimize // optimize
pose2SLAM::Optimizer optimizer0(fg, initial, ordering); NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newDrecreaseThresholds(1e-15, 1e-15);
pose2SLAM::Optimizer::Parameters::verbosityLevel verbosity = pose2SLAM::Optimizer::Parameters::SILENT; pose2SLAM::Optimizer optimizer0(fg, initial, ordering, params);
pose2SLAM::Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity); pose2SLAM::Optimizer optimizer = optimizer0.levenbergMarquardt();
Pose2Values actual = *optimizer.values(); Pose2Values actual = *optimizer.values();

View File

@ -71,10 +71,9 @@ TEST(Pose3Graph, optimizeCircle) {
shared_ptr<Ordering> ordering(new Ordering); shared_ptr<Ordering> ordering(new Ordering);
*ordering += "x0","x1","x2","x3","x4","x5"; *ordering += "x0","x1","x2","x3","x4","x5";
typedef NonlinearOptimizer<Pose3Graph, Pose3Values> Optimizer; typedef NonlinearOptimizer<Pose3Graph, Pose3Values> Optimizer;
Optimizer optimizer0(fg, initial, ordering); NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newDrecreaseThresholds(1e-15, 1e-15);
Optimizer::Parameters::verbosityLevel verbosity = Optimizer::Parameters::SILENT; Optimizer optimizer0(fg, initial, ordering, params);
// Optimizer::verbosityLevel verbosity = Optimizer::ERROR; Optimizer optimizer = optimizer0.levenbergMarquardt();
Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity);
Pose3Values actual = *optimizer.values(); Pose3Values actual = *optimizer.values();

View File

@ -71,8 +71,12 @@ TEST( StereoFactor, singlePoint)
typedef gtsam::NonlinearOptimizer<Graph,Values,gtsam::GaussianFactorGraph,gtsam::GaussianMultifrontalSolver> Optimizer; // optimization engine for this domain typedef gtsam::NonlinearOptimizer<Graph,Values,gtsam::GaussianFactorGraph,gtsam::GaussianMultifrontalSolver> Optimizer; // optimization engine for this domain
NonlinearOptimizationParameters parameters(1.0, 1e-3, 0, double absoluteThreshold = 1e-9;
100, 1.0, 10, NonlinearOptimizationParameters::SILENT, NonlinearOptimizationParameters::BOUNDED); double relativeThreshold = 1e-5;
int maxIterations = 100;
NonlinearOptimizationParameters::verbosityLevel verbose = NonlinearOptimizationParameters::SILENT;
NonlinearOptimizationParameters parameters(absoluteThreshold, relativeThreshold, 0,
maxIterations, 1.0, 10, verbose, NonlinearOptimizationParameters::BOUNDED);
Optimizer optimizer(graph, values, ordering, make_shared<NonlinearOptimizationParameters>(parameters)); Optimizer optimizer(graph, values, ordering, make_shared<NonlinearOptimizationParameters>(parameters));
@ -84,7 +88,7 @@ TEST( StereoFactor, singlePoint)
DOUBLES_EQUAL(0.0, afterOneIteration.error(), 1e-9); DOUBLES_EQUAL(0.0, afterOneIteration.error(), 1e-9);
// Complete solution // Complete solution
Optimizer final = optimizer.gaussNewton(1E-9, 1E-5); Optimizer final = optimizer.gaussNewton();
DOUBLES_EQUAL(0.0, final.error(), 1e-6); DOUBLES_EQUAL(0.0, final.error(), 1e-6);
} }

View File

@ -197,9 +197,9 @@ TEST ( NonlinearEquality, allow_error_optimize ) {
// optimize // optimize
boost::shared_ptr<Ordering> ord(new Ordering()); boost::shared_ptr<Ordering> ord(new Ordering());
ord->push_back(key1); ord->push_back(key1);
PoseOptimizer optimizer(graph, init, ord); NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newDrecreaseThresholds(1e-5, 1e-5);
double relThresh = 1e-5, absThresh = 1e-5; PoseOptimizer optimizer(graph, init, ord, params);
PoseOptimizer result = optimizer.levenbergMarquardt(relThresh, absThresh, PoseOptimizer::Parameters::SILENT); PoseOptimizer result = optimizer.levenbergMarquardt();
// verify // verify
PoseValues expected; PoseValues expected;
@ -233,9 +233,9 @@ TEST ( NonlinearEquality, allow_error_optimize_with_factors ) {
// optimize // optimize
boost::shared_ptr<Ordering> ord(new Ordering()); boost::shared_ptr<Ordering> ord(new Ordering());
ord->push_back(key1); ord->push_back(key1);
PoseOptimizer optimizer(graph, init, ord); NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newDrecreaseThresholds(1e-5, 1e-5);
double relThresh = 1e-5, absThresh = 1e-5; PoseOptimizer optimizer(graph, init, ord, params);
PoseOptimizer result = optimizer.levenbergMarquardt(relThresh, absThresh, PoseOptimizer::Parameters::SILENT); PoseOptimizer result = optimizer.levenbergMarquardt();
// verify // verify
PoseValues expected; PoseValues expected;

View File

@ -120,7 +120,7 @@ TEST( NonlinearOptimizer, iterateLM )
ord->push_back("x1"); ord->push_back("x1");
// create initial optimization state, with lambda=0 // create initial optimization state, with lambda=0
Optimizer optimizer(fg, config, ord, 0.); Optimizer optimizer(fg, config, ord, NonlinearOptimizationParameters::newLambda(0.));
// normal iterate // normal iterate
Optimizer iterated1 = optimizer.iterate(); Optimizer iterated1 = optimizer.iterate();
@ -161,20 +161,19 @@ TEST( NonlinearOptimizer, optimize )
// optimize parameters // optimize parameters
shared_ptr<Ordering> ord(new Ordering()); shared_ptr<Ordering> ord(new Ordering());
ord->push_back("x1"); ord->push_back("x1");
double relativeThreshold = 1e-5;
double absoluteThreshold = 1e-5;
// initial optimization state is the same in both cases tested // initial optimization state is the same in both cases tested
Optimizer optimizer(fg, c0, ord); boost::shared_ptr<NonlinearOptimizationParameters> params = boost::make_shared<NonlinearOptimizationParameters>();
params->relDecrease_ = 1e-5;
params->absDecrease_ = 1e-5;
Optimizer optimizer(fg, c0, ord, params);
// Gauss-Newton // Gauss-Newton
Optimizer actual1 = optimizer.gaussNewton(relativeThreshold, Optimizer actual1 = optimizer.gaussNewton();
absoluteThreshold);
DOUBLES_EQUAL(0,fg->error(*(actual1.values())),tol); DOUBLES_EQUAL(0,fg->error(*(actual1.values())),tol);
// Levenberg-Marquardt // Levenberg-Marquardt
Optimizer actual2 = optimizer.levenbergMarquardt(relativeThreshold, Optimizer actual2 = optimizer.levenbergMarquardt();
absoluteThreshold, Optimizer::Parameters::SILENT);
DOUBLES_EQUAL(0,fg->error(*(actual2.values())),tol); DOUBLES_EQUAL(0,fg->error(*(actual2.values())),tol);
} }