refactor python NonlinearOptimizerTest
parent
a0d64a9448
commit
bf8137b0c4
|
@ -15,12 +15,10 @@ from __future__ import print_function
|
|||
import unittest
|
||||
|
||||
import gtsam
|
||||
from gtsam import (DoglegOptimizer, DoglegParams,
|
||||
DummyPreconditionerParameters, GaussNewtonOptimizer,
|
||||
GaussNewtonParams, GncLMParams, GncLMOptimizer,
|
||||
LevenbergMarquardtOptimizer, LevenbergMarquardtParams,
|
||||
NonlinearFactorGraph, Ordering,
|
||||
PCGSolverParameters, Point2, PriorFactorPoint2, Values)
|
||||
from gtsam import (DoglegOptimizer, DoglegParams, DummyPreconditionerParameters,
|
||||
GaussNewtonOptimizer, GaussNewtonParams, GncLMParams, GncLMOptimizer,
|
||||
LevenbergMarquardtOptimizer, LevenbergMarquardtParams, NonlinearFactorGraph,
|
||||
Ordering, PCGSolverParameters, Point2, PriorFactorPoint2, Values)
|
||||
from gtsam.utils.test_case import GtsamTestCase
|
||||
|
||||
KEY1 = 1
|
||||
|
@ -28,62 +26,62 @@ KEY2 = 2
|
|||
|
||||
|
||||
class TestScenario(GtsamTestCase):
|
||||
def test_optimize(self):
|
||||
"""Do trivial test with three optimizer variants."""
|
||||
fg = NonlinearFactorGraph()
|
||||
"""Do trivial test with three optimizer variants."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up the optimization problem and ordering"""
|
||||
# create graph
|
||||
self.fg = NonlinearFactorGraph()
|
||||
model = gtsam.noiseModel.Unit.Create(2)
|
||||
fg.add(PriorFactorPoint2(KEY1, Point2(0, 0), model))
|
||||
self.fg.add(PriorFactorPoint2(KEY1, Point2(0, 0), model))
|
||||
|
||||
# test error at minimum
|
||||
xstar = Point2(0, 0)
|
||||
optimal_values = Values()
|
||||
optimal_values.insert(KEY1, xstar)
|
||||
self.assertEqual(0.0, fg.error(optimal_values), 0.0)
|
||||
self.optimal_values = Values()
|
||||
self.optimal_values.insert(KEY1, xstar)
|
||||
self.assertEqual(0.0, self.fg.error(self.optimal_values), 0.0)
|
||||
|
||||
# test error at initial = [(1-cos(3))^2 + (sin(3))^2]*50 =
|
||||
x0 = Point2(3, 3)
|
||||
initial_values = Values()
|
||||
initial_values.insert(KEY1, x0)
|
||||
self.assertEqual(9.0, fg.error(initial_values), 1e-3)
|
||||
self.initial_values = Values()
|
||||
self.initial_values.insert(KEY1, x0)
|
||||
self.assertEqual(9.0, self.fg.error(self.initial_values), 1e-3)
|
||||
|
||||
# optimize parameters
|
||||
ordering = Ordering()
|
||||
ordering.push_back(KEY1)
|
||||
self.ordering = Ordering()
|
||||
self.ordering.push_back(KEY1)
|
||||
|
||||
# Gauss-Newton
|
||||
def test_gauss_newton(self):
|
||||
gnParams = GaussNewtonParams()
|
||||
gnParams.setOrdering(ordering)
|
||||
actual1 = GaussNewtonOptimizer(fg, initial_values, gnParams).optimize()
|
||||
self.assertAlmostEqual(0, fg.error(actual1))
|
||||
gnParams.setOrdering(self.ordering)
|
||||
actual = GaussNewtonOptimizer(self.fg, self.initial_values, gnParams).optimize()
|
||||
self.assertAlmostEqual(0, self.fg.error(actual))
|
||||
|
||||
# Levenberg-Marquardt
|
||||
def test_levenberg_marquardt(self):
|
||||
lmParams = LevenbergMarquardtParams.CeresDefaults()
|
||||
lmParams.setOrdering(ordering)
|
||||
actual2 = LevenbergMarquardtOptimizer(
|
||||
fg, initial_values, lmParams).optimize()
|
||||
self.assertAlmostEqual(0, fg.error(actual2))
|
||||
lmParams.setOrdering(self.ordering)
|
||||
actual = LevenbergMarquardtOptimizer(self.fg, self.initial_values, lmParams).optimize()
|
||||
self.assertAlmostEqual(0, self.fg.error(actual))
|
||||
|
||||
# Levenberg-Marquardt
|
||||
def test_levenberg_marquardt_pcg(self):
|
||||
lmParams = LevenbergMarquardtParams.CeresDefaults()
|
||||
lmParams.setLinearSolverType("ITERATIVE")
|
||||
cgParams = PCGSolverParameters()
|
||||
cgParams.setPreconditionerParams(DummyPreconditionerParameters())
|
||||
lmParams.setIterativeParams(cgParams)
|
||||
actual2 = LevenbergMarquardtOptimizer(
|
||||
fg, initial_values, lmParams).optimize()
|
||||
self.assertAlmostEqual(0, fg.error(actual2))
|
||||
actual = LevenbergMarquardtOptimizer(self.fg, self.initial_values, lmParams).optimize()
|
||||
self.assertAlmostEqual(0, self.fg.error(actual))
|
||||
|
||||
# Dogleg
|
||||
def test_dogleg(self):
|
||||
dlParams = DoglegParams()
|
||||
dlParams.setOrdering(ordering)
|
||||
actual3 = DoglegOptimizer(fg, initial_values, dlParams).optimize()
|
||||
self.assertAlmostEqual(0, fg.error(actual3))
|
||||
|
||||
# Graduated Non-Convexity (GNC)
|
||||
dlParams.setOrdering(self.ordering)
|
||||
actual = DoglegOptimizer(self.fg, self.initial_values, dlParams).optimize()
|
||||
self.assertAlmostEqual(0, self.fg.error(actual))
|
||||
|
||||
def test_graduated_non_convexity(self):
|
||||
gncParams = GncLMParams()
|
||||
actual4 = GncLMOptimizer(fg, initial_values, gncParams).optimize()
|
||||
self.assertAlmostEqual(0, fg.error(actual4))
|
||||
|
||||
actual = GncLMOptimizer(self.fg, self.initial_values, gncParams).optimize()
|
||||
self.assertAlmostEqual(0, self.fg.error(actual))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
Loading…
Reference in New Issue