From 73f8c0830b40330873c94a728a4bb73123f633ad Mon Sep 17 00:00:00 2001 From: Richard Roberts Date: Tue, 4 Sep 2012 15:05:57 +0000 Subject: [PATCH] Created new dense matrix functions in GaussianFactorGraph returning pair for easier access, and renamed functions to augmentedJacobian, augmentedHessian, jacobian, hessian --- gtsam.h | 6 ++- gtsam/linear/GaussianFactorGraph.cpp | 24 +++++++++--- gtsam/linear/GaussianFactorGraph.h | 38 ++++++++++++++++--- .../linear/tests/testGaussianFactorGraph.cpp | 21 ++++++++-- gtsam/nonlinear/Marginals.cpp | 2 +- tests/testDoglegOptimizer.cpp | 6 +-- 6 files changed, 78 insertions(+), 19 deletions(-) diff --git a/gtsam.h b/gtsam.h index c5560668f..9c6f76527 100644 --- a/gtsam.h +++ b/gtsam.h @@ -1022,8 +1022,10 @@ class GaussianFactorGraph { // Conversion to matrices Matrix sparseJacobian_() const; - Matrix denseJacobian() const; - Matrix denseHessian() const; + Matrix augmentedJacobian() const; + pair jacobian() const; + Matrix augmentedHessian() const; + pair hessian() const; }; class GaussianISAM { diff --git a/gtsam/linear/GaussianFactorGraph.cpp b/gtsam/linear/GaussianFactorGraph.cpp index 02f37915c..144f2c0d9 100644 --- a/gtsam/linear/GaussianFactorGraph.cpp +++ b/gtsam/linear/GaussianFactorGraph.cpp @@ -166,7 +166,7 @@ namespace gtsam { } /* ************************************************************************* */ - Matrix GaussianFactorGraph::denseJacobian() const { + Matrix GaussianFactorGraph::augmentedJacobian() const { // Convert to Jacobians FactorGraph jfg; jfg.reserve(this->size()); @@ -182,6 +182,14 @@ namespace gtsam { return combined.matrix_augmented(); } + /* ************************************************************************* */ + std::pair GaussianFactorGraph::jacobian() const { + Matrix augmented = augmentedJacobian(); + return make_pair( + augmented.leftCols(augmented.cols()-1), + augmented.col(augmented.cols()-1)); + } + /* ************************************************************************* */ // Helper functions for Combine static boost::tuple, size_t, size_t> countDims(const FactorGraph& factors, const VariableSlots& variableSlots) { @@ -317,9 +325,7 @@ break; } /* ************************************************************************* */ - static - FastMap findScatterAndDims - (const FactorGraph& factors) { + static FastMap findScatterAndDims(const FactorGraph& factors) { const bool debug = ISDEBUG("findScatterAndDims"); @@ -349,7 +355,7 @@ break; } /* ************************************************************************* */ - Matrix GaussianFactorGraph::denseHessian() const { + Matrix GaussianFactorGraph::augmentedHessian() const { Scatter scatter = findScatterAndDims(*this); @@ -367,6 +373,14 @@ break; return result; } + /* ************************************************************************* */ + std::pair GaussianFactorGraph::hessian() const { + Matrix augmented = augmentedHessian(); + return make_pair( + augmented.topLeftCorner(augmented.rows()-1, augmented.rows()-1), + augmented.col(augmented.rows()-1).head(augmented.rows()-1)); + } + /* ************************************************************************* */ GaussianFactorGraph::EliminationResult EliminateCholesky(const FactorGraph< GaussianFactor>& factors, size_t nrFrontals) { diff --git a/gtsam/linear/GaussianFactorGraph.h b/gtsam/linear/GaussianFactorGraph.h index 51748b79b..e7563b0c5 100644 --- a/gtsam/linear/GaussianFactorGraph.h +++ b/gtsam/linear/GaussianFactorGraph.h @@ -188,15 +188,43 @@ namespace gtsam { Matrix sparseJacobian_() const; /** - * Return a dense \f$ m \times n \f$ Jacobian matrix, augmented with b - * with standard deviations are baked into A and b + * Return a dense \f$ [ \;A\;b\; ] \in \mathbb{R}^{m \times n+1} \f$ + * Jacobian matrix, augmented with b with the noise models baked + * into A and b. The negative log-likelihood is + * \f$ \frac{1}{2} \Vert Ax-b \Vert^2 \f$. See also + * GaussianFactorGraph::jacobian and GaussianFactorGraph::sparseJacobian. */ - Matrix denseJacobian() const; + Matrix augmentedJacobian() const; + + /** + * Return the dense Jacobian \f$ A \f$ and right-hand-side \f$ b \f$, + * with the noise models baked into A and b. The negative log-likelihood + * is \f$ \frac{1}{2} \Vert Ax-b \Vert^2 \f$. See also + * GaussianFactorGraph::augmentedJacobian and + * GaussianFactorGraph::sparseJacobian. + */ + std::pair jacobian() const; /** - * Return a dense \f$ n \times n \f$ Hessian matrix, augmented with \f$ A^T b \f$ + * Return a dense \f$ \Lambda \in \mathbb{R}^{n+1 \times n+1} \f$ Hessian + * matrix, augmented with the information vector \f$ \eta \f$. The + * augmented Hessian is + \f[ \left[ \begin{array}{ccc} + \Lambda & \eta \\ + \eta^T & c + \end{array} \right] \f] + and the negative log-likelihood is + \f$ \frac{1}{2} x^T \Lambda x + \eta^T x + c \f$. */ - Matrix denseHessian() const; + Matrix augmentedHessian() const; + + /** + * Return the dense Hessian \f$ \Lambda \f$ and information vector + * \f$ \eta \f$, with the noise models baked in. The negative log-likelihood + * is \frac{1}{2} x^T \Lambda x + \eta^T x + c. See also + * GaussianFactorGraph::augmentedHessian. + */ + std::pair hessian() const; private: /** Serialization function */ diff --git a/gtsam/linear/tests/testGaussianFactorGraph.cpp b/gtsam/linear/tests/testGaussianFactorGraph.cpp index 4c0aa3871..cb835a76a 100644 --- a/gtsam/linear/tests/testGaussianFactorGraph.cpp +++ b/gtsam/linear/tests/testGaussianFactorGraph.cpp @@ -410,7 +410,7 @@ TEST(GaussianFactor, eliminateFrontals) factors.push_back(factor4); // extract the dense matrix for the graph - Matrix actualDense = factors.denseJacobian(); + Matrix actualDense = factors.augmentedJacobian(); EXPECT(assert_equal(2.0 * Ab, actualDense)); // Convert to Jacobians, inefficient copy of all factors instead of selectively converting only Hessians @@ -619,7 +619,7 @@ TEST(GaussianFactorGraph, sparseJacobian) { } /* ************************************************************************* */ -TEST(GaussianFactorGraph, denseHessian) { +TEST(GaussianFactorGraph, matrices) { // Create factor graph: // x1 x2 x3 x4 x5 b // 1 2 3 0 0 4 @@ -639,9 +639,24 @@ TEST(GaussianFactorGraph, denseHessian) { 9,10, 0,11,12,13, 0, 0, 0,14,15,16; + Matrix expectedJacobian = jacobian; Matrix expectedHessian = jacobian.transpose() * jacobian; - Matrix actualHessian = gfg.denseHessian(); + Matrix expectedA = jacobian.leftCols(jacobian.cols()-1); + Vector expectedb = jacobian.col(jacobian.cols()-1); + Matrix expectedL = expectedA.transpose() * expectedA; + Vector expectedeta = expectedA.transpose() * expectedb; + + Matrix actualJacobian = gfg.augmentedJacobian(); + Matrix actualHessian = gfg.augmentedHessian(); + Matrix actualA; Vector actualb; boost::tie(actualA,actualb) = gfg.jacobian(); + Matrix actualL; Vector actualeta; boost::tie(actualL,actualeta) = gfg.hessian(); + + EXPECT(assert_equal(expectedJacobian, actualJacobian)); EXPECT(assert_equal(expectedHessian, actualHessian)); + EXPECT(assert_equal(expectedA, actualA)); + EXPECT(assert_equal(expectedb, actualb)); + EXPECT(assert_equal(expectedL, actualL)); + EXPECT(assert_equal(expectedeta, actualeta)); } /* ************************************************************************* */ diff --git a/gtsam/nonlinear/Marginals.cpp b/gtsam/nonlinear/Marginals.cpp index 52c507374..c3078ea76 100644 --- a/gtsam/nonlinear/Marginals.cpp +++ b/gtsam/nonlinear/Marginals.cpp @@ -152,7 +152,7 @@ JointMarginal Marginals::jointMarginalInformation(const std::vector& variab } // Get information matrix - Matrix augmentedInfo = jointFG.denseHessian(); + Matrix augmentedInfo = jointFG.augmentedHessian(); Matrix info = augmentedInfo.topLeftCorner(augmentedInfo.rows()-1, augmentedInfo.cols()-1); return JointMarginal(info, dims, variableConversion); diff --git a/tests/testDoglegOptimizer.cpp b/tests/testDoglegOptimizer.cpp index 781685a23..6b03b572b 100644 --- a/tests/testDoglegOptimizer.cpp +++ b/tests/testDoglegOptimizer.cpp @@ -96,7 +96,7 @@ TEST(DoglegOptimizer, ComputeSteepestDescentPoint) { gradientValues.vector() = gradient; // Compute the gradient using dense matrices - Matrix augmentedHessian = GaussianFactorGraph(gbn).denseHessian(); + Matrix augmentedHessian = GaussianFactorGraph(gbn).augmentedHessian(); LONGS_EQUAL(11, augmentedHessian.cols()); VectorValues denseMatrixGradient = *allocateVectorValues(gbn); denseMatrixGradient.vector() = -augmentedHessian.col(10).segment(0,10); @@ -200,7 +200,7 @@ TEST(DoglegOptimizer, BT_BN_equivalency) { GaussianFactorGraph expected(gbn); GaussianFactorGraph actual(bt); - EXPECT(assert_equal(expected.denseHessian(), actual.denseHessian())); + EXPECT(assert_equal(expected.augmentedHessian(), actual.augmentedHessian())); } /* ************************************************************************* */ @@ -276,7 +276,7 @@ TEST(DoglegOptimizer, ComputeSteepestDescentPointBT) { gradientValues.vector() = gradient; // Compute the gradient using dense matrices - Matrix augmentedHessian = GaussianFactorGraph(bt).denseHessian(); + Matrix augmentedHessian = GaussianFactorGraph(bt).augmentedHessian(); LONGS_EQUAL(11, augmentedHessian.cols()); VectorValues denseMatrixGradient = *allocateVectorValues(bt); denseMatrixGradient.vector() = -augmentedHessian.col(10).segment(0,10);