diff --git a/gtsam/nonlinear/GncOptimizer.h b/gtsam/nonlinear/GncOptimizer.h index eb353c53f..3ddaf4820 100644 --- a/gtsam/nonlinear/GncOptimizer.h +++ b/gtsam/nonlinear/GncOptimizer.h @@ -42,7 +42,7 @@ static double Chi2inv(const double alpha, const size_t dofs) { /* ************************************************************************* */ template -class GncOptimizer { +class GTSAM_EXPORT GncOptimizer { public: /// For each parameter, specify the corresponding optimizer: e.g., GaussNewtonParams -> GaussNewtonOptimizer. typedef typename GncParameters::OptimizerType BaseOptimizer; diff --git a/gtsam/nonlinear/GncParams.h b/gtsam/nonlinear/GncParams.h index c1bf7a035..086f08acc 100644 --- a/gtsam/nonlinear/GncParams.h +++ b/gtsam/nonlinear/GncParams.h @@ -39,7 +39,7 @@ enum GncLossType { }; template -class GncParams { +class GTSAM_EXPORT GncParams { public: /// For each parameter, specify the corresponding optimizer: e.g., GaussNewtonParams -> GaussNewtonOptimizer. typedef typename BaseOptimizerParameters::OptimizerType OptimizerType; diff --git a/gtsam/nonlinear/nonlinear.i b/gtsam/nonlinear/nonlinear.i index 8071e8bc7..73eaef125 100644 --- a/gtsam/nonlinear/nonlinear.i +++ b/gtsam/nonlinear/nonlinear.i @@ -522,6 +522,17 @@ virtual class DoglegParams : gtsam::NonlinearOptimizerParams { void setVerbosityDL(string verbosityDL) const; }; +#include +template +virtual class GncParams { + GncParams(const PARAMS& baseOptimizerParams); + GncParams(); + void print(const string& str) const; +}; + +typedef gtsam::GncParams GncGaussNewtonParams; +typedef gtsam::GncParams GncLMParams; + #include virtual class NonlinearOptimizer { gtsam::Values optimize(); @@ -551,6 +562,18 @@ virtual class DoglegOptimizer : gtsam::NonlinearOptimizer { const gtsam::DoglegParams& params); double getDelta() const; }; + +#include +template +virtual class GncOptimizer { + GncOptimizer(const gtsam::NonlinearFactorGraph& graph, + const gtsam::Values& initialValues, + const PARAMS& params); + gtsam::Values optimize(); +}; + +typedef gtsam::GncOptimizer > GncGaussNewtonOptimizer; +typedef gtsam::GncOptimizer > GncLMOptimizer; #include virtual class LevenbergMarquardtOptimizer : gtsam::NonlinearOptimizer { diff --git a/python/gtsam/tests/test_NonlinearOptimizer.py b/python/gtsam/tests/test_NonlinearOptimizer.py index e9234a43b..e2561ae52 100644 --- a/python/gtsam/tests/test_NonlinearOptimizer.py +++ b/python/gtsam/tests/test_NonlinearOptimizer.py @@ -17,8 +17,9 @@ import unittest import gtsam from gtsam import (DoglegOptimizer, DoglegParams, DummyPreconditionerParameters, GaussNewtonOptimizer, - GaussNewtonParams, LevenbergMarquardtOptimizer, - LevenbergMarquardtParams, NonlinearFactorGraph, Ordering, + GaussNewtonParams, GncLMParams, GncLMOptimizer, + LevenbergMarquardtOptimizer, LevenbergMarquardtParams, + NonlinearFactorGraph, Ordering, PCGSolverParameters, Point2, PriorFactorPoint2, Values) from gtsam.utils.test_case import GtsamTestCase @@ -77,6 +78,12 @@ class TestScenario(GtsamTestCase): dlParams.setOrdering(ordering) actual3 = DoglegOptimizer(fg, initial_values, dlParams).optimize() self.assertAlmostEqual(0, fg.error(actual3)) + + # Graduated Non-Convexity (GNC) + gncParams = GncLMParams() + actual4 = GncLMOptimizer(fg, initial_values, gncParams).optimize() + self.assertAlmostEqual(0, fg.error(actual4)) + if __name__ == "__main__":