now we have very cool tests!
							parent
							
								
									f897fa81a9
								
							
						
					
					
						commit
						a33c50fcef
					
				|  | @ -363,6 +363,53 @@ inline NonlinearFactorGraph createReallyNonlinearFactorGraph() { | |||
|   return *sharedReallyNonlinearFactorGraph(); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| inline NonlinearFactorGraph sharedNonRobustFactorGraphWithOutliers() { | ||||
|   using symbol_shorthand::X; | ||||
|   boost::shared_ptr<NonlinearFactorGraph> fg(new NonlinearFactorGraph); | ||||
|   Point2 z(0.0, 0.0); | ||||
|   double sigma = 0.1; | ||||
|   boost::shared_ptr<smallOptimize::UnaryFactor> factor( | ||||
|       new smallOptimize::UnaryFactor(z, noiseModel::Isotropic::Sigma(2,sigma), X(1))); | ||||
|   // 3 noiseless inliers
 | ||||
|   fg->push_back(factor); | ||||
|   fg->push_back(factor); | ||||
|   fg->push_back(factor); | ||||
| 
 | ||||
|   // 1 outlier
 | ||||
|   Point2 z_out(1.0, 0.0); | ||||
|   boost::shared_ptr<smallOptimize::UnaryFactor> factor_out( | ||||
|       new smallOptimize::UnaryFactor(z_out, noiseModel::Isotropic::Sigma(2,sigma), X(1))); | ||||
|   fg->push_back(factor_out); | ||||
| 
 | ||||
|   return *fg; | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| inline NonlinearFactorGraph sharedRobustFactorGraphWithOutliers() { | ||||
|   using symbol_shorthand::X; | ||||
|   boost::shared_ptr<NonlinearFactorGraph> fg(new NonlinearFactorGraph); | ||||
|   Point2 z(0.0, 0.0); | ||||
|   double sigma = 0.1; | ||||
|   auto gmNoise = noiseModel::Robust::Create( | ||||
|             noiseModel::mEstimator::GemanMcClure::Create(1.0), noiseModel::Isotropic::Sigma(2,sigma)); | ||||
|   boost::shared_ptr<smallOptimize::UnaryFactor> factor( | ||||
|       new smallOptimize::UnaryFactor(z, gmNoise, X(1))); | ||||
|   // 3 noiseless inliers
 | ||||
|   fg->push_back(factor); | ||||
|   fg->push_back(factor); | ||||
|   fg->push_back(factor); | ||||
| 
 | ||||
|   // 1 outlier
 | ||||
|   Point2 z_out(1.0, 0.0); | ||||
|   boost::shared_ptr<smallOptimize::UnaryFactor> factor_out( | ||||
|       new smallOptimize::UnaryFactor(z_out, gmNoise, X(1))); | ||||
|   fg->push_back(factor_out); | ||||
| 
 | ||||
|   return *fg; | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| inline std::pair<NonlinearFactorGraph, Values> createNonlinearSmoother(int T) { | ||||
|   using namespace impl; | ||||
|  |  | |||
|  | @ -253,9 +253,6 @@ TEST(GncOptimizer, gncConstructor) { | |||
| //TEST(GncOptimizer, calculateWeights) {
 | ||||
| //}
 | ||||
| //
 | ||||
| ///* ************************************************************************* */
 | ||||
| //TEST(GncOptimizer, copyGraph) {
 | ||||
| //}
 | ||||
| 
 | ||||
| /* ************************************************************************* *
 | ||||
| TEST(GncOptimizer, makeGraph) { | ||||
|  | @ -274,7 +271,7 @@ TEST(GncOptimizer, makeGraph) { | |||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| TEST(GncOptimizer, optimize) { | ||||
| TEST(GncOptimizer, optimizeSimple) { | ||||
|   // has to have Gaussian noise models !
 | ||||
|   auto fg = example::createReallyNonlinearFactorGraph(); | ||||
| 
 | ||||
|  | @ -286,12 +283,40 @@ TEST(GncOptimizer, optimize) { | |||
|   GncParams<LevenbergMarquardtParams> gncParams(lmParams); | ||||
|   auto gnc = GncOptimizer<GncParams<LevenbergMarquardtParams>>(fg, initial, gncParams); | ||||
| 
 | ||||
|   gncParams.print(""); | ||||
| 
 | ||||
|   Values actual = gnc.optimize(); | ||||
|   DOUBLES_EQUAL(0, fg.error(actual), tol); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| TEST(GncOptimizer, optimize) { | ||||
|   // has to have Gaussian noise models !
 | ||||
|   auto fg = example::sharedNonRobustFactorGraphWithOutliers(); | ||||
| 
 | ||||
|   Point2 p0(1, 0); | ||||
|   Values initial; | ||||
|   initial.insert(X(1), p0); | ||||
| 
 | ||||
|   // try with nonrobust cost function and standard GN
 | ||||
|   GaussNewtonParams gnParams; | ||||
|   GaussNewtonOptimizer gn(fg, initial, gnParams); | ||||
|   Values gn_results = gn.optimize(); | ||||
|   // converges to incorrect point due to lack of robustness to an outlier, ideal solution is Point2(0,0)
 | ||||
|   CHECK(assert_equal(gn_results.at<Point2>(X(1)), Point2(1.31812,0.0), 1e-3)); | ||||
| 
 | ||||
|   // try with robust loss function and standard GN
 | ||||
|   auto fg_robust = example::sharedRobustFactorGraphWithOutliers(); // same as fg, but with factors wrapped in Geman McClure losses
 | ||||
|   GaussNewtonOptimizer gn2(fg_robust, initial, gnParams); | ||||
|   Values gn2_results = gn2.optimize(); | ||||
|   // converges to incorrect point, this time due to the nonconvexity of the loss
 | ||||
|   CHECK(assert_equal(gn2_results.at<Point2>(X(1)), Point2(1.18712,0.0), 1e-3)); | ||||
| 
 | ||||
|   // .. but graduated nonconvexity ensures both robustness and convergence in the face of nonconvexity
 | ||||
|   GncParams<GaussNewtonParams> gncParams(gnParams); | ||||
|   auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial, gncParams); | ||||
|   Values gnc_result = gnc.optimize(); | ||||
|   CHECK(assert_equal(gnc_result.at<Point2>(X(1)), Point2(0.0,0.0), 1e-3)); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| int main() { | ||||
|   TestResult tr; | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue