From 6d6d39287e96aaa10704a00376611a9f0b588ab0 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 17 Oct 2024 18:57:32 -0400 Subject: [PATCH] fix jacobians --- ...estNonlinearConjugateGradientOptimizer.cpp | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/gtsam/nonlinear/tests/testNonlinearConjugateGradientOptimizer.cpp b/gtsam/nonlinear/tests/testNonlinearConjugateGradientOptimizer.cpp index ad6cc76bf..1742dc0cb 100644 --- a/gtsam/nonlinear/tests/testNonlinearConjugateGradientOptimizer.cpp +++ b/gtsam/nonlinear/tests/testNonlinearConjugateGradientOptimizer.cpp @@ -69,7 +69,7 @@ std::tuple generateProblem() { } /* ************************************************************************* */ -TEST_DISABLED(NonlinearConjugateGradientOptimizer, Optimize) { +TEST(NonlinearConjugateGradientOptimizer, Optimize) { const auto [graph, initialEstimate] = generateProblem(); // cout << "initial error = " << graph.error(initialEstimate) << endl; @@ -104,8 +104,8 @@ class Rosenbrock1Factor : public NoiseModelFactorN { /// evaluate error Vector evaluateError(const double& x, OptionalMatrixType H) const override { double d = x - a_; - // Because linearized gradient is -A'b, it will multiply by d - if (H) (*H) = Vector1(2 / sqrt_2).transpose(); + // Because linearized gradient is -A'b/sigma, it will multiply by d + if (H) (*H) = Vector1(1).transpose(); return Vector1(d); } }; @@ -130,9 +130,9 @@ class Rosenbrock2Factor : public NoiseModelFactorN { Vector evaluateError(const double& x, const double& y, OptionalMatrixType H1, OptionalMatrixType H2) const override { double x2 = x * x, d = x2 - y; - // Because linearized gradient is -A'b, it will multiply by d - if (H1) (*H1) = Vector1(4 * x / sqrt_2).transpose(); - if (H2) (*H2) = Vector1(-2 / sqrt_2).transpose(); + // Because linearized gradient is -A'b/sigma, it will multiply by d + if (H1) (*H1) = Vector1(2 * x).transpose(); + if (H2) (*H2) = Vector1(-1).transpose(); return Vector1(d); } }; @@ -181,13 +181,16 @@ double rosenbrock_func(double x, double y, double a = 1.0, double b = 100.0) { TEST(NonlinearConjugateGradientOptimizer, Rosenbrock) { using namespace rosenbrock; double a = 1.0, b = 100.0; - Rosenbrock1Factor f1(X(0), a, noiseModel::Isotropic::Precision(1, 2)); - Rosenbrock2Factor f2(X(0), Y(0), noiseModel::Isotropic::Precision(1, 2 * b)); + auto graph = GetRosenbrockGraph(a, b); + Rosenbrock1Factor f1 = + *std::static_pointer_cast(graph.at(0)); + Rosenbrock2Factor f2 = + *std::static_pointer_cast(graph.at(1)); Values values; values.insert(X(0), 3.0); values.insert(Y(0), 5.0); - // EXPECT_CORRECT_FACTOR_JACOBIANS(f1, values, 1e-7, 1e-5); - // EXPECT_CORRECT_FACTOR_JACOBIANS(f2, values, 1e-7, 1e-5); + EXPECT_CORRECT_FACTOR_JACOBIANS(f1, values, 1e-7, 1e-5); + EXPECT_CORRECT_FACTOR_JACOBIANS(f2, values, 1e-7, 1e-5); std::mt19937 rng(42); std::uniform_real_distribution dist(0.0, 100.0); @@ -202,7 +205,7 @@ TEST(NonlinearConjugateGradientOptimizer, Rosenbrock) { /* ************************************************************************* */ // Optimize the Rosenbrock function to verify optimizer works -TEST_DISABLED(NonlinearConjugateGradientOptimizer, Optimization) { +TEST(NonlinearConjugateGradientOptimizer, Optimization) { using namespace rosenbrock; double a = 12;