remove backward compatible functions in NonlinearOptimizer
							parent
							
								
									8bfd392b9f
								
							
						
					
					
						commit
						e726f7c7af
					
				|  | @ -69,9 +69,9 @@ int main(int argc, char** argv) { | |||
| 	Ordering::shared_ptr ordering = graph->orderingCOLAMD(*initial); | ||||
| 
 | ||||
| 	/* 4.2.2 set up solver and optimize */ | ||||
| 	Optimizer optimizer(graph, initial, ordering); | ||||
| 	Optimizer::Parameters::verbosityLevel verbosity = pose2SLAM::Optimizer::Parameters::SILENT; | ||||
| 	Optimizer optimizer_result = optimizer.levenbergMarquardt(1e-15, 1e-15, verbosity); | ||||
|   NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newDrecreaseThresholds(1e-15, 1e-15); | ||||
| 	Optimizer optimizer(graph, initial, ordering, params); | ||||
| 	Optimizer optimizer_result = optimizer.levenbergMarquardt(); | ||||
| 
 | ||||
| 	Values result = *optimizer_result.values(); | ||||
| 	result.print("final result"); | ||||
|  |  | |||
|  | @ -83,8 +83,7 @@ int main(void) { | |||
| 	SPCGOptimizer optimizer(graph, initial, solver->ordering(), solver) ; | ||||
| 
 | ||||
| 	cout << "before optimization, sum of error is " << optimizer.error() << endl; | ||||
| 	NonlinearOptimizationParameters parameter; | ||||
| 	SPCGOptimizer optimizer2 = optimizer.levenbergMarquardt(parameter); | ||||
| 	SPCGOptimizer optimizer2 = optimizer.levenbergMarquardt(); | ||||
| 	cout << "after optimization, sum of error is " << optimizer2.error() << endl; | ||||
| 
 | ||||
| 	result = *optimizer2.values() ; | ||||
|  |  | |||
|  | @ -146,8 +146,8 @@ int main(int argc, char* argv[]) { | |||
| 
 | ||||
|   // Optimize the graph
 | ||||
|   cout << "*******************************************************" << endl; | ||||
|   Optimizer::Parameters::verbosityLevel verborsity = Optimizer::Parameters::DAMPED; | ||||
|   Optimizer::shared_values result = Optimizer::optimizeGN(graph, initialEstimates, verborsity); | ||||
|   NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newVerbosity_(Optimizer::Parameters::DAMPED); | ||||
|   Optimizer::shared_values result = Optimizer::optimizeGN(graph, initialEstimates, params); | ||||
| 
 | ||||
|   // Print final results
 | ||||
|   cout << "*******************************************************" << endl; | ||||
|  |  | |||
|  | @ -43,10 +43,10 @@ namespace gtsam { | |||
| 		// initial optimization state is the same in both cases tested
 | ||||
| 	  typedef NonlinearOptimizer<G, T, GaussianFactorGraph, GaussianSequentialSolver> Optimizer; | ||||
| 	  Optimizer optimizer(boost::make_shared<const G>(graph), | ||||
| 	  		boost::make_shared<const T>(initialEstimate), ordering); | ||||
| 	  		boost::make_shared<const T>(initialEstimate), ordering, boost::make_shared<NonlinearOptimizationParameters>(parameters)); | ||||
| 
 | ||||
| 		// Levenberg-Marquardt
 | ||||
| 	  Optimizer result = optimizer.levenbergMarquardt(parameters); | ||||
| 	  Optimizer result = optimizer.levenbergMarquardt(); | ||||
| 		return *result.values(); | ||||
| 	} | ||||
| 
 | ||||
|  | @ -62,10 +62,10 @@ namespace gtsam { | |||
| 		// initial optimization state is the same in both cases tested
 | ||||
| 	  typedef NonlinearOptimizer<G, T, GaussianFactorGraph, GaussianMultifrontalSolver> Optimizer; | ||||
| 	  Optimizer optimizer(boost::make_shared<const G>(graph), | ||||
| 	  		boost::make_shared<const T>(initialEstimate), ordering); | ||||
| 	  		boost::make_shared<const T>(initialEstimate), ordering, boost::make_shared<NonlinearOptimizationParameters>(parameters)); | ||||
| 
 | ||||
| 		// Levenberg-Marquardt
 | ||||
| 	  Optimizer result = optimizer.levenbergMarquardt(parameters); | ||||
| 	  Optimizer result = optimizer.levenbergMarquardt(); | ||||
| 		return *result.values(); | ||||
| 	} | ||||
| 
 | ||||
|  | @ -84,10 +84,11 @@ namespace gtsam { | |||
| 				boost::make_shared<const G>(graph), | ||||
| 				boost::make_shared<const T>(initialEstimate), | ||||
| 				solver->ordering(), | ||||
| 				solver); | ||||
| 				solver, | ||||
| 				boost::make_shared<NonlinearOptimizationParameters>(parameters)); | ||||
| 
 | ||||
| 		// Levenberg-Marquardt
 | ||||
| 		SPCGOptimizer result = optimizer.levenbergMarquardt(parameters); | ||||
| 		SPCGOptimizer result = optimizer.levenbergMarquardt(); | ||||
| 		return *result.values(); | ||||
| 	} | ||||
| 
 | ||||
|  |  | |||
|  | @ -65,20 +65,20 @@ namespace gtsam { | |||
| 			verbosity_(parameters.verbosity_), lambdaMode_(parameters.lambdaMode_){} | ||||
| 
 | ||||
| 
 | ||||
| 		sharedThis newVerbosity_(verbosityLevel verbosity) const { | ||||
| 			sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>(*this)) ; | ||||
| 		static sharedThis newVerbosity_(verbosityLevel verbosity) { | ||||
| 			sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>()) ; | ||||
| 			ptr->verbosity_ = verbosity ; | ||||
| 			return ptr ; | ||||
| 		} | ||||
| 
 | ||||
| 		sharedThis newLambda_(double lambda) const { | ||||
| 			sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>(*this)) ; | ||||
| 		static sharedThis newLambda_(double lambda) { | ||||
| 			sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>()) ; | ||||
| 			ptr->lambda_ = lambda ; | ||||
| 			return ptr ; | ||||
| 		} | ||||
| 
 | ||||
| 		sharedThis newMaxIterations_(int maxIterations) const { | ||||
| 			sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>(*this)) ; | ||||
| 		static sharedThis newMaxIterations_(int maxIterations) { | ||||
| 			sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>()) ; | ||||
| 			ptr->maxIterations_ = maxIterations ; | ||||
| 			return ptr ; | ||||
| 		} | ||||
|  | @ -89,11 +89,11 @@ namespace gtsam { | |||
| 			return ptr ; | ||||
| 		} | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 		static sharedThis newDrecreaseThresholds(double absDecrease, double relDecrease) { | ||||
| 			sharedThis ptr (boost::make_shared<NonlinearOptimizationParameters>()); | ||||
| 			ptr->absDecrease_ = absDecrease; | ||||
| 			ptr->relDecrease_ = relDecrease; | ||||
| 			return ptr ; | ||||
| 		} | ||||
| 	}; | ||||
| } | ||||
|  |  | |||
|  | @ -34,34 +34,6 @@ namespace gtsam { | |||
| 	/* ************************************************************************* */ | ||||
| 
 | ||||
| 	/* ************************************************************************* */ | ||||
| 	template<class G, class C, class L, class S, class W> | ||||
| 	NonlinearOptimizer<G, C, L, S, W>::NonlinearOptimizer(shared_graph graph, | ||||
| 			shared_values values, shared_ordering ordering, double lambda) : | ||||
| 			graph_(graph), values_(values), error_(graph->error(*values)), ordering_(ordering), | ||||
| 			parameters_(Parameters::newLambda(lambda)), dimensions_(new vector<size_t>(values->dims(*ordering))) { | ||||
| 		if (!graph) throw std::invalid_argument( | ||||
| 				"NonlinearOptimizer constructor: graph = NULL"); | ||||
| 		if (!values) throw std::invalid_argument( | ||||
| 				"NonlinearOptimizer constructor: values = NULL"); | ||||
| 		if (!ordering) throw std::invalid_argument( | ||||
| 				"NonlinearOptimizer constructor: ordering = NULL"); | ||||
| 	} | ||||
| 
 | ||||
| 	template<class G, class C, class L, class S, class W> | ||||
| 	NonlinearOptimizer<G, C, L, S, W>::NonlinearOptimizer( | ||||
| 			shared_graph graph,	shared_values values, shared_ordering ordering, shared_solver solver, const double lambda): | ||||
| 			graph_(graph), values_(values), error_(graph->error(*values)), ordering_(ordering), solver_(solver), | ||||
| 			parameters_(Parameters::newLambda(lambda)), dimensions_(new vector<size_t>(values->dims(*ordering))) { | ||||
| 		if (!graph) throw std::invalid_argument( | ||||
| 				"NonlinearOptimizer constructor: graph = NULL"); | ||||
| 		if (!values) throw std::invalid_argument( | ||||
| 				"NonlinearOptimizer constructor: values = NULL"); | ||||
| 		if (!ordering) throw std::invalid_argument( | ||||
| 				"NonlinearOptimizer constructor: ordering = NULL"); | ||||
| 		if (!solver) throw std::invalid_argument( | ||||
| 				"NonlinearOptimizer constructor: solver = NULL"); | ||||
| 	} | ||||
| 
 | ||||
| 	template<class G, class C, class L, class S, class W> | ||||
| 	NonlinearOptimizer<G, C, L, S, W>::NonlinearOptimizer(shared_graph graph, | ||||
| 			shared_values values, shared_ordering ordering, shared_parameters parameters) : | ||||
|  | @ -135,12 +107,6 @@ namespace gtsam { | |||
| 		return newOptimizer; | ||||
| 	} | ||||
| 
 | ||||
| 	template<class G, class C, class L, class S, class W> | ||||
| 	NonlinearOptimizer<G, C, L, S, W> NonlinearOptimizer<G, C, L, S, W>::iterate( | ||||
| 			Parameters::verbosityLevel verbosity) const { | ||||
| 		return this->newVerbosity_(verbosity).iterate(); | ||||
| 	} | ||||
| 
 | ||||
| 	/* ************************************************************************* */ | ||||
| 
 | ||||
| 	template<class G, class C, class L, class S, class W> | ||||
|  | @ -167,23 +133,6 @@ namespace gtsam { | |||
| 		else return next.gaussNewton(); | ||||
| 	} | ||||
| 
 | ||||
| 	template<class G, class C, class L, class S, class W> | ||||
| 	NonlinearOptimizer<G, C, L, S, W> NonlinearOptimizer<G, C, L, S, W>::gaussNewton( | ||||
| 			double relativeThreshold, | ||||
| 			double absoluteThreshold, | ||||
| 			Parameters::verbosityLevel verbosity, | ||||
| 			int maxIterations) const { | ||||
| 
 | ||||
| 		Parameters def ; | ||||
| 		def.relDecrease_ = relativeThreshold ; | ||||
| 		def.absDecrease_ = absoluteThreshold ; | ||||
| 		def.verbosity_ = verbosity ; | ||||
| 		def.maxIterations_ = maxIterations ; | ||||
| 
 | ||||
| 		shared_parameters ptr(boost::make_shared<NonlinearOptimizationParameters>(def)) ; | ||||
| 		return newParameters_(ptr).gaussNewton() ; | ||||
| 	} | ||||
| 
 | ||||
| 	/* ************************************************************************* */ | ||||
| 	// Iteratively try to do tempered Gauss-Newton steps until we succeed.
 | ||||
| 	// Form damped system with given lambda, and return a new, more optimistic
 | ||||
|  | @ -292,21 +241,6 @@ namespace gtsam { | |||
| 		return try_lambda(*linear); | ||||
| 	} | ||||
| 
 | ||||
| 
 | ||||
| 	template<class G, class C, class L, class S, class W> | ||||
| 	NonlinearOptimizer<G, C, L, S, W> NonlinearOptimizer<G, C, L, S, W>::iterateLM( | ||||
| 		Parameters::verbosityLevel verbosity, | ||||
| 		double lambdaFactor, | ||||
| 		Parameters::LambdaMode lambdaMode) const { | ||||
| 
 | ||||
| 		NonlinearOptimizationParameters def(*parameters_) ; | ||||
| 		def.verbosity_ = verbosity ; | ||||
| 		def.lambdaFactor_ = lambdaFactor ; | ||||
| 		def.lambdaMode_ = lambdaMode ; | ||||
| 		shared_parameters ptr(boost::make_shared<Parameters>(def)) ; | ||||
| 		return newParameters_(ptr).iterateLM(); | ||||
| 	} | ||||
| 
 | ||||
| 	/* ************************************************************************* */ | ||||
| 
 | ||||
| 	template<class G, class C, class L, class S, class W> | ||||
|  | @ -344,36 +278,7 @@ namespace gtsam { | |||
| 			} | ||||
| 			maxIterations--; | ||||
| 		} | ||||
| 
 | ||||
| 	} | ||||
| 
 | ||||
| 	template<class G, class C, class L, class S, class W> | ||||
| 	NonlinearOptimizer<G, C, L, S, W> NonlinearOptimizer<G, C, L, S, W>::levenbergMarquardt( | ||||
| 			double relativeThreshold, | ||||
| 			double absoluteThreshold, | ||||
| 			Parameters::verbosityLevel verbosity, | ||||
| 			int maxIterations, | ||||
| 			double lambdaFactor, | ||||
| 			Parameters::LambdaMode lambdaMode) const { | ||||
| 
 | ||||
| 		NonlinearOptimizationParameters def; | ||||
| 		def.relDecrease_ = relativeThreshold ; | ||||
| 		def.absDecrease_ = absoluteThreshold ; | ||||
| 		def.verbosity_ = verbosity ; | ||||
| 		def.maxIterations_ = maxIterations ; | ||||
| 		def.lambdaFactor_ = lambdaFactor ; | ||||
| 		def.lambdaMode_ = lambdaMode ; | ||||
| 		shared_parameters ptr = boost::make_shared<Parameters>(def) ; | ||||
| 		return newParameters_(ptr).levenbergMarquardt() ; | ||||
| 	} | ||||
| 
 | ||||
| 	template<class G, class C, class L, class S, class W> | ||||
| 	NonlinearOptimizer<G, C, L, S, W> NonlinearOptimizer<G, C, L, S, W>:: | ||||
| 	levenbergMarquardt(const NonlinearOptimizationParameters ¶meters) const { | ||||
| 		boost::shared_ptr<NonlinearOptimizationParameters> ptr (new NonlinearOptimizationParameters(parameters)) ; | ||||
| 		return newParameters_(ptr).levenbergMarquardt() ; | ||||
| 	} | ||||
| 
 | ||||
| 	/* ************************************************************************* */ | ||||
| 
 | ||||
| } | ||||
|  |  | |||
|  | @ -159,18 +159,6 @@ namespace gtsam { | |||
| 		 * Constructor that evaluates new error | ||||
| 		 */ | ||||
| 
 | ||||
|     	// backward compatibility
 | ||||
| 		NonlinearOptimizer(shared_graph graph, | ||||
| 						   shared_values values, | ||||
| 						   shared_ordering ordering, | ||||
| 						   const double lambda); | ||||
| 		// backward compatibility
 | ||||
| 		NonlinearOptimizer(shared_graph graph, | ||||
| 						   shared_values values, | ||||
| 						   shared_ordering ordering, | ||||
| 						   shared_solver solver, | ||||
| 						   const double lambda); | ||||
| 
 | ||||
| 		// suggested constructors
 | ||||
| 		NonlinearOptimizer(shared_graph graph, | ||||
| 						   shared_values values, | ||||
|  | @ -230,9 +218,6 @@ namespace gtsam { | |||
| 		// suggested interface
 | ||||
| 		NonlinearOptimizer iterate() const; | ||||
| 
 | ||||
| 		// backward compatible
 | ||||
| 		NonlinearOptimizer iterate(Parameters::verbosityLevel verbosity) const; | ||||
| 
 | ||||
| 		/**
 | ||||
| 		 * Optimize a solution for a non linear factor graph | ||||
| 		 * @param relativeTreshold | ||||
|  | @ -243,11 +228,6 @@ namespace gtsam { | |||
| 		// suggested interface
 | ||||
| 		NonlinearOptimizer gaussNewton() const; | ||||
| 
 | ||||
| 		// backward compatible
 | ||||
| 		NonlinearOptimizer | ||||
| 		gaussNewton(double relativeThreshold, double absoluteThreshold, | ||||
| 				Parameters::verbosityLevel verbosity = Parameters::SILENT, int maxIterations = 100) const; | ||||
| 
 | ||||
| 		/**
 | ||||
| 		 * One iteration of Levenberg Marquardt | ||||
| 		 */ | ||||
|  | @ -255,10 +235,6 @@ namespace gtsam { | |||
| 		// suggested interface
 | ||||
| 		NonlinearOptimizer iterateLM(); | ||||
| 
 | ||||
| 		// backward compatible
 | ||||
| 		NonlinearOptimizer iterateLM(Parameters::verbosityLevel verbosity, | ||||
| 				double lambdaFactor = 10, Parameters::LambdaMode lambdaMode = Parameters::BOUNDED) const; | ||||
| 
 | ||||
| 		/**
 | ||||
| 		 * Optimize using Levenberg-Marquardt. Really Levenberg's | ||||
| 		 * algorithm at this moment, as we just add I*\lambda to Hessian | ||||
|  | @ -277,19 +253,6 @@ namespace gtsam { | |||
| 		// suggested interface
 | ||||
| 		NonlinearOptimizer levenbergMarquardt(); | ||||
| 
 | ||||
| 		// backward compatible
 | ||||
| 		NonlinearOptimizer | ||||
| 		levenbergMarquardt(double relativeThreshold, | ||||
| 						   double absoluteThreshold, | ||||
| 						   Parameters::verbosityLevel verbosity = Parameters::SILENT, | ||||
| 						   int maxIterations = 100, | ||||
| 						   double lambdaFactor = 10, | ||||
| 						   Parameters::LambdaMode lambdaMode = Parameters::BOUNDED) const; | ||||
| 
 | ||||
| 		// backward compatible
 | ||||
| 		NonlinearOptimizer | ||||
| 		levenbergMarquardt(const NonlinearOptimizationParameters ¶) const; | ||||
| 
 | ||||
| 		/**
 | ||||
| 		 * Static interface to LM optimization using default ordering and thresholds | ||||
| 		 * @param graph 	   Nonlinear factor graph to optimize | ||||
|  | @ -362,15 +325,6 @@ namespace gtsam { | |||
| 			return result.values(); | ||||
| 		} | ||||
| 
 | ||||
| 		// backward compatible
 | ||||
| 		static shared_values optimizeGN(shared_graph graph, | ||||
| 										shared_values values, | ||||
| 										Parameters::verbosityLevel verbosity) { | ||||
| 			Parameters def ; | ||||
| 			shared_parameters parameters = def.newVerbosity_(verbosity); | ||||
| 			return optimizeGN(graph, values, parameters); | ||||
| 		} | ||||
| 
 | ||||
| 		/**
 | ||||
| 		 * Static interface to GN optimization (no shared_ptr arguments) - see above | ||||
| 		 */ | ||||
|  | @ -381,14 +335,6 @@ namespace gtsam { | |||
| 							  boost::make_shared<const T>(values), | ||||
| 							  boost::make_shared<Parameters>(parameters)); | ||||
| 		} | ||||
| 
 | ||||
| 		// backward compatible
 | ||||
| 		static shared_values optimizeGN(const G& graph, const T& values, Parameters::verbosityLevel verbosity) { | ||||
| 			return optimizeGN(boost::make_shared<const G>(graph), | ||||
| 							  boost::make_shared<const T>(values), | ||||
| 							  verbosity); | ||||
| 		} | ||||
| 
 | ||||
| 	}; | ||||
| 
 | ||||
| 	/**
 | ||||
|  |  | |||
|  | @ -113,10 +113,10 @@ TEST(Pose2Graph, optimize) { | |||
|   shared_ptr<Ordering> ordering(new Ordering); | ||||
|   *ordering += "x0","x1"; | ||||
|   typedef NonlinearOptimizer<Pose2Graph, Pose2Values> Optimizer; | ||||
|   Optimizer optimizer0(fg, initial, ordering); | ||||
|   Optimizer::Parameters::verbosityLevel verbosity = Optimizer::Parameters::SILENT; | ||||
|   //Optimizer::verbosityLevel verbosity = Optimizer::ERROR;
 | ||||
|   Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity); | ||||
| 
 | ||||
|   NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newDrecreaseThresholds(1e-15, 1e-15); | ||||
|   Optimizer optimizer0(fg, initial, ordering, params); | ||||
|   Optimizer optimizer = optimizer0.levenbergMarquardt(); | ||||
| 
 | ||||
|   // Check with expected config
 | ||||
|   Pose2Values expected; | ||||
|  | @ -152,9 +152,9 @@ TEST(Pose2Graph, optimizeThreePoses) { | |||
|   *ordering += "x0","x1","x2"; | ||||
| 
 | ||||
|   // optimize
 | ||||
|   pose2SLAM::Optimizer optimizer0(fg, initial, ordering); | ||||
|   pose2SLAM::Optimizer::Parameters::verbosityLevel verbosity = pose2SLAM::Optimizer::Parameters::SILENT; | ||||
|   pose2SLAM::Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity); | ||||
|   NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newDrecreaseThresholds(1e-15, 1e-15); | ||||
|   pose2SLAM::Optimizer optimizer0(fg, initial, ordering, params); | ||||
|   pose2SLAM::Optimizer optimizer = optimizer0.levenbergMarquardt(); | ||||
| 
 | ||||
|   Pose2Values actual = *optimizer.values(); | ||||
| 
 | ||||
|  | @ -195,9 +195,9 @@ TEST(Pose2Graph, optimizeCircle) { | |||
|   *ordering += "x0","x1","x2","x3","x4","x5"; | ||||
| 
 | ||||
|   // optimize
 | ||||
|   pose2SLAM::Optimizer optimizer0(fg, initial, ordering); | ||||
|   pose2SLAM::Optimizer::Parameters::verbosityLevel verbosity = pose2SLAM::Optimizer::Parameters::SILENT; | ||||
|   pose2SLAM::Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity); | ||||
|   NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newDrecreaseThresholds(1e-15, 1e-15); | ||||
|   pose2SLAM::Optimizer optimizer0(fg, initial, ordering, params); | ||||
|   pose2SLAM::Optimizer optimizer = optimizer0.levenbergMarquardt(); | ||||
| 
 | ||||
|   Pose2Values actual = *optimizer.values(); | ||||
| 
 | ||||
|  |  | |||
|  | @ -71,10 +71,9 @@ TEST(Pose3Graph, optimizeCircle) { | |||
|   shared_ptr<Ordering> ordering(new Ordering); | ||||
|   *ordering += "x0","x1","x2","x3","x4","x5"; | ||||
|   typedef NonlinearOptimizer<Pose3Graph, Pose3Values> Optimizer; | ||||
|   Optimizer optimizer0(fg, initial, ordering); | ||||
|   Optimizer::Parameters::verbosityLevel verbosity = Optimizer::Parameters::SILENT; | ||||
| //  Optimizer::verbosityLevel verbosity = Optimizer::ERROR;
 | ||||
|   Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity); | ||||
|   NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newDrecreaseThresholds(1e-15, 1e-15); | ||||
|   Optimizer optimizer0(fg, initial, ordering, params); | ||||
|   Optimizer optimizer = optimizer0.levenbergMarquardt(); | ||||
| 
 | ||||
|   Pose3Values actual = *optimizer.values(); | ||||
| 
 | ||||
|  |  | |||
|  | @ -71,8 +71,12 @@ TEST( StereoFactor, singlePoint) | |||
| 
 | ||||
| 	typedef gtsam::NonlinearOptimizer<Graph,Values,gtsam::GaussianFactorGraph,gtsam::GaussianMultifrontalSolver> Optimizer;   // optimization engine for this domain
 | ||||
| 
 | ||||
| 	NonlinearOptimizationParameters parameters(1.0, 1e-3, 0, | ||||
| 			100, 1.0, 10, NonlinearOptimizationParameters::SILENT, NonlinearOptimizationParameters::BOUNDED); | ||||
| 	double absoluteThreshold = 1e-9; | ||||
| 	double relativeThreshold = 1e-5; | ||||
| 	int maxIterations = 100; | ||||
| 	NonlinearOptimizationParameters::verbosityLevel verbose = NonlinearOptimizationParameters::SILENT; | ||||
| 	NonlinearOptimizationParameters parameters(absoluteThreshold, relativeThreshold, 0, | ||||
| 			maxIterations, 1.0, 10, verbose, NonlinearOptimizationParameters::BOUNDED); | ||||
| 
 | ||||
| 	Optimizer optimizer(graph, values, ordering, make_shared<NonlinearOptimizationParameters>(parameters)); | ||||
| 
 | ||||
|  | @ -84,7 +88,7 @@ TEST( StereoFactor, singlePoint) | |||
| 	DOUBLES_EQUAL(0.0, afterOneIteration.error(), 1e-9); | ||||
| 
 | ||||
| 	// Complete solution
 | ||||
| 	Optimizer final = optimizer.gaussNewton(1E-9, 1E-5); | ||||
| 	Optimizer final = optimizer.gaussNewton(); | ||||
| 
 | ||||
| 	DOUBLES_EQUAL(0.0, final.error(), 1e-6); | ||||
| } | ||||
|  |  | |||
|  | @ -197,9 +197,9 @@ TEST ( NonlinearEquality, allow_error_optimize ) { | |||
| 	// optimize
 | ||||
| 	boost::shared_ptr<Ordering> ord(new Ordering()); | ||||
| 	ord->push_back(key1); | ||||
| 	PoseOptimizer optimizer(graph, init, ord); | ||||
| 	double relThresh = 1e-5, absThresh = 1e-5; | ||||
| 	PoseOptimizer result = optimizer.levenbergMarquardt(relThresh, absThresh, PoseOptimizer::Parameters::SILENT); | ||||
|   NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newDrecreaseThresholds(1e-5, 1e-5); | ||||
| 	PoseOptimizer optimizer(graph, init, ord, params); | ||||
| 	PoseOptimizer result = optimizer.levenbergMarquardt(); | ||||
| 
 | ||||
| 	// verify
 | ||||
| 	PoseValues expected; | ||||
|  | @ -233,9 +233,9 @@ TEST ( NonlinearEquality, allow_error_optimize_with_factors ) { | |||
| 	// optimize
 | ||||
| 	boost::shared_ptr<Ordering> ord(new Ordering()); | ||||
| 	ord->push_back(key1); | ||||
| 	PoseOptimizer optimizer(graph, init, ord); | ||||
| 	double relThresh = 1e-5, absThresh = 1e-5; | ||||
| 	PoseOptimizer result = optimizer.levenbergMarquardt(relThresh, absThresh, PoseOptimizer::Parameters::SILENT); | ||||
|   NonlinearOptimizationParameters::sharedThis params = NonlinearOptimizationParameters::newDrecreaseThresholds(1e-5, 1e-5); | ||||
| 	PoseOptimizer optimizer(graph, init, ord, params); | ||||
| 	PoseOptimizer result = optimizer.levenbergMarquardt(); | ||||
| 
 | ||||
| 	// verify
 | ||||
| 	PoseValues expected; | ||||
|  |  | |||
|  | @ -120,7 +120,7 @@ TEST( NonlinearOptimizer, iterateLM ) | |||
| 	ord->push_back("x1"); | ||||
| 
 | ||||
| 	// create initial optimization state, with lambda=0
 | ||||
| 	Optimizer optimizer(fg, config, ord, 0.); | ||||
| 	Optimizer optimizer(fg, config, ord, NonlinearOptimizationParameters::newLambda(0.)); | ||||
| 
 | ||||
| 	// normal iterate
 | ||||
| 	Optimizer iterated1 = optimizer.iterate(); | ||||
|  | @ -161,20 +161,19 @@ TEST( NonlinearOptimizer, optimize ) | |||
| 	// optimize parameters
 | ||||
| 	shared_ptr<Ordering> ord(new Ordering()); | ||||
| 	ord->push_back("x1"); | ||||
| 	double relativeThreshold = 1e-5; | ||||
| 	double absoluteThreshold = 1e-5; | ||||
| 
 | ||||
| 	// initial optimization state is the same in both cases tested
 | ||||
| 	Optimizer optimizer(fg, c0, ord); | ||||
| 	boost::shared_ptr<NonlinearOptimizationParameters> params = boost::make_shared<NonlinearOptimizationParameters>(); | ||||
| 	params->relDecrease_ = 1e-5; | ||||
| 	params->absDecrease_ = 1e-5; | ||||
| 	Optimizer optimizer(fg, c0, ord, params); | ||||
| 
 | ||||
| 	// Gauss-Newton
 | ||||
| 	Optimizer actual1 = optimizer.gaussNewton(relativeThreshold, | ||||
| 			absoluteThreshold); | ||||
| 	Optimizer actual1 = optimizer.gaussNewton(); | ||||
| 	DOUBLES_EQUAL(0,fg->error(*(actual1.values())),tol); | ||||
| 
 | ||||
| 	// Levenberg-Marquardt
 | ||||
| 	Optimizer actual2 = optimizer.levenbergMarquardt(relativeThreshold, | ||||
| 			absoluteThreshold, Optimizer::Parameters::SILENT); | ||||
| 	Optimizer actual2 = optimizer.levenbergMarquardt(); | ||||
| 	DOUBLES_EQUAL(0,fg->error(*(actual2.values())),tol); | ||||
| } | ||||
| 
 | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue