diff --git a/.cproject b/.cproject index 061eab2d5..9e76907be 100644 --- a/.cproject +++ b/.cproject @@ -692,14 +692,6 @@ true true - - make - -j2 - tests/testPose2SLAMwSPCG.run - true - true - true - make -j2 @@ -876,6 +868,46 @@ true true + + make + -j2 + testPose2SLAMwSPCG.run + true + true + true + + + make + -j2 + testNonlinearOptimizer.run + true + true + true + + + make + -j2 + testGaussianBayesNet.run + true + true + true + + + make + -j2 + testNonlinearISAM.run + true + true + true + + + make + -j2 + testBoundingConstraint.run + true + true + true + make -j2 @@ -2487,14 +2519,6 @@ true true - - make - -j2 - tests/testPose2SLAMwSPCG.run - true - true - true - make -j2 @@ -2671,6 +2695,46 @@ true true + + make + -j2 + testPose2SLAMwSPCG.run + true + true + true + + + make + -j2 + testNonlinearOptimizer.run + true + true + true + + + make + -j2 + testGaussianBayesNet.run + true + true + true + + + make + -j2 + testNonlinearISAM.run + true + true + true + + + make + -j2 + testBoundingConstraint.run + true + true + true + make -j2 diff --git a/CppUnitLite/TestRegistry.cpp b/CppUnitLite/TestRegistry.cpp index 432faf990..fa5295229 100644 --- a/CppUnitLite/TestRegistry.cpp +++ b/CppUnitLite/TestRegistry.cpp @@ -19,9 +19,6 @@ #include "TestRegistry.h" #include "SimpleString.h" -//#include -//using namespace std; - void TestRegistry::addTest (Test *test) { instance ().add (test); @@ -63,7 +60,6 @@ int TestRegistry::run (TestResult& result) for (Test *test = tests; test != 0; test = test->getNext ()) { if (test->safe()) { try { -// cout << test->getName().asCharString() << ", " << test->getFilename().asCharString() << ":" << test->getLineNumber() << endl; test->run (result); } catch (std::exception& e) { // catch standard exceptions and derivatives diff --git a/examples/PlanarSLAMSelfContained_advanced.cpp b/examples/PlanarSLAMSelfContained_advanced.cpp index b29871f04..85e8be814 100644 --- a/examples/PlanarSLAMSelfContained_advanced.cpp +++ b/examples/PlanarSLAMSelfContained_advanced.cpp @@ -136,11 +136,11 @@ int main(int argc, char** argv) { resultMF.values()->print("final result (solved with a multifrontal solver)"); // Print marginals covariances for all variables - print(resultMF.marginalCovariance(x1).second, "x1 covariance"); - print(resultMF.marginalCovariance(x2).second, "x2 covariance"); - print(resultMF.marginalCovariance(x3).second, "x3 covariance"); - print(resultMF.marginalCovariance(l1).second, "l1 covariance"); - print(resultMF.marginalCovariance(l2).second, "l2 covariance"); + print(resultMF.marginalCovariance(x1), "x1 covariance"); + print(resultMF.marginalCovariance(x2), "x2 covariance"); + print(resultMF.marginalCovariance(x3), "x3 covariance"); + print(resultMF.marginalCovariance(l1), "l1 covariance"); + print(resultMF.marginalCovariance(l2), "l2 covariance"); return 0; } diff --git a/examples/Pose2SLAMExample_advanced.cpp b/examples/Pose2SLAMExample_advanced.cpp index 697a4b99b..f4e1d3a74 100644 --- a/examples/Pose2SLAMExample_advanced.cpp +++ b/examples/Pose2SLAMExample_advanced.cpp @@ -76,8 +76,8 @@ int main(int argc, char** argv) { result.print("final result"); /* Get covariances */ - Matrix covariance1 = optimizer_result.marginalCovariance(x1).second; - Matrix covariance2 = optimizer_result.marginalCovariance(x2).second; + Matrix covariance1 = optimizer_result.marginalCovariance(x1); + Matrix covariance2 = optimizer_result.marginalCovariance(x2); print(covariance1, "Covariance1"); print(covariance2, "Covariance2"); diff --git a/gtsam/base/FastMap.h b/gtsam/base/FastMap.h index 936c0664e..ea2468d64 100644 --- a/gtsam/base/FastMap.h +++ b/gtsam/base/FastMap.h @@ -57,7 +57,6 @@ public: friend class boost::serialization::access; template void serialize(ARCHIVE & ar, const unsigned int version) { - ar & boost::serialization::base_object(*this); } }; diff --git a/gtsam/base/FastVector.h b/gtsam/base/FastVector.h index 00a9bb801..2e87d638a 100644 --- a/gtsam/base/FastVector.h +++ b/gtsam/base/FastVector.h @@ -40,6 +40,9 @@ public: /** Default constructor */ FastVector() {} + /** Constructor from size */ + FastVector(size_t size) : Base(size) {} + /** Constructor from a range, passes through to base class */ template FastVector(INPUTITERATOR first, INPUTITERATOR last) : Base(first, last) {} diff --git a/gtsam/base/Lie.h b/gtsam/base/Lie.h index af8092c60..ea3c730bf 100644 --- a/gtsam/base/Lie.h +++ b/gtsam/base/Lie.h @@ -56,7 +56,6 @@ #pragma once #include - #include namespace gtsam { diff --git a/gtsam/base/Matrix.cpp b/gtsam/base/Matrix.cpp index 9781f6316..eac671f9e 100644 --- a/gtsam/base/Matrix.cpp +++ b/gtsam/base/Matrix.cpp @@ -259,32 +259,13 @@ Vector columnNormSquare(const Matrix &A) { } /* ************************************************************************* */ -void solve(Matrix& A, Matrix& B) -{ +void solve(Matrix& A, Matrix& B) { // Eigen version - untested - Eigen::FullPivLU lu; - lu.compute(A); - B = lu.solve(B); - A = lu.matrixLU(); - -// typedef ublas::permutation_matrix pmatrix; - // create a working copy of the input -// Matrix A_(A); - // create a permutation matrix for the LU-factorization -// pmatrix pm(A_.rows()); - - // perform LU-factorization - // FIXME: add back with Eigen functionality - // size_t res = lu_factorize(A_,pm); - // if( res != 0 ) throw runtime_error ("Matrix::solve: lu_factorize failed!"); - - // backsubstitute to get the inverse -// lu_substitute(A_, pm, B); + B = A.fullPivLu().solve(B); } /* ************************************************************************* */ -Matrix inverse(const Matrix& A) -{ +Matrix inverse(const Matrix& A) { return A.inverse(); } @@ -445,31 +426,11 @@ Vector backSubstituteUpper(const Matrix& U, const Vector& b, bool unit) { /* ************************************************************************* */ Vector backSubstituteUpper(const Vector& b, const Matrix& U, bool unit) { // @return the solution x of x'*U=b' - size_t n = U.cols(); -#ifndef NDEBUG - size_t m = U.rows(); - if (m!=n) - throw invalid_argument("backSubstituteUpper: U must be square"); -#endif - - Vector result(n); - for (size_t i = 1; i <= n; i++) { - double zi = b(i-1); - for (size_t j = 1; j < i; j++) - zi -= U(j-1,i-1) * result(j-1); -#ifndef NDEBUG - if(!unit && fabs(U(i-1,i-1)) <= numeric_limits::epsilon()) { - stringstream ss; - ss << "backSubstituteUpper: U is singular,\n"; - print(U, "U: ", ss); - throw invalid_argument(ss.str()); - } -#endif - if (!unit) zi /= U(i-1,i-1); - result(i-1) = zi; - } - - return result; + assert(U.rows() == U.cols()); + if (unit) + return U.triangularView().transpose().solve(b); + else + return U.triangularView().transpose().solve(b); } /* ************************************************************************* */ @@ -578,13 +539,14 @@ Matrix skewSymmetric(double wx, double wy, double wz) * pointers are subtracted by one to provide base 1 access */ /* ************************************************************************* */ -double** createNRC(Matrix& A) { - const size_t m=A.rows(); - double** a = new double* [m]; - for(size_t i = 0; i < m; i++) - a[i] = &A(i,0)-1; - return a; -} +// FIXME: assumes row major, rather than column major +//double** createNRC(Matrix& A) { +// const size_t m=A.rows(); +// double** a = new double* [m]; +// for(size_t i = 0; i < m; i++) +// a[i] = &A(i,0)-1; +// return a; +//} /* ****************************************** * @@ -653,7 +615,6 @@ Matrix inverse_square_root(const Matrix& A) { Matrix inverse_square_root(const Matrix& A) { Matrix R = RtR(A); Matrix inv = eye(A.rows()); -// inplace_solve(R, inv, BNU::upper_tag ()); R.triangularView().solveInPlace(inv); return inv; } diff --git a/gtsam/base/Matrix.h b/gtsam/base/Matrix.h index 37990be64..9ff6bc4ce 100644 --- a/gtsam/base/Matrix.h +++ b/gtsam/base/Matrix.h @@ -22,7 +22,6 @@ #pragma once -#include #include #include #include diff --git a/gtsam/base/blockMatrices.h b/gtsam/base/blockMatrices.h index ad3f97059..fc9d8bd4c 100644 --- a/gtsam/base/blockMatrices.h +++ b/gtsam/base/blockMatrices.h @@ -17,9 +17,7 @@ */ #pragma once -#include #include -#include #include namespace gtsam { diff --git a/gtsam/base/cholesky.cpp b/gtsam/base/cholesky.cpp index e129a6d30..663418f70 100644 --- a/gtsam/base/cholesky.cpp +++ b/gtsam/base/cholesky.cpp @@ -148,4 +148,51 @@ void choleskyPartial(Matrix& ABC, size_t nFrontal) { toc(3, "compute L"); } +/* ************************************************************************* */ +Eigen::LDLT::TranspositionType ldlPartial(Matrix& ABC, size_t nFrontal) { + + const bool debug = ISDEBUG("ldlPartial"); + + assert(ABC.rows() == ABC.cols()); + assert(ABC.rows() >= 0 && nFrontal <= size_t(ABC.rows())); + + const size_t n = ABC.rows(); + + // Compute Cholesky factorization of A, overwrites A = sqrt(D)*R + // tic(1, "ldl"); + Eigen::LDLT ldlt; + ldlt.compute(ABC.block(0,0,nFrontal,nFrontal).selfadjointView()); + + Vector sqrtD = ldlt.vectorD().cwiseSqrt(); + if (debug) cout << "Dsqrt: " << sqrtD << endl; + + // U = sqrtD * L^ + Matrix U = ldlt.matrixU(); + + // we store the permuted upper triangular matrix + ABC.block(0,0,nFrontal,nFrontal) = sqrtD.asDiagonal() * U; + if(debug) cout << "R:\n" << ABC.topLeftCorner(nFrontal,nFrontal) << endl; + // toc(1, "ldl"); + + // Compute S = inv(R') * B = inv(P'U')*B = inv(U')*P*B + // tic(2, "compute S"); + if(n - nFrontal > 0) { + ABC.topRightCorner(nFrontal, n-nFrontal) = ldlt.transpositionsP() * ABC.topRightCorner(nFrontal, n-nFrontal); + ABC.block(0,0,nFrontal,nFrontal).triangularView().transpose().solveInPlace(ABC.topRightCorner(nFrontal, n-nFrontal)); + } + if(debug) cout << "S:\n" << ABC.topRightCorner(nFrontal, n-nFrontal) << endl; + // toc(2, "compute S"); + + // Compute L = C - S' * S + // tic(3, "compute L"); + if(debug) cout << "C:\n" << Eigen::MatrixXd(ABC.bottomRightCorner(n-nFrontal,n-nFrontal).selfadjointView()) << endl; + if(n - nFrontal > 0) + ABC.bottomRightCorner(n-nFrontal,n-nFrontal).selfadjointView().rankUpdate( + ABC.topRightCorner(nFrontal, n-nFrontal).transpose(), -1.0); + if(debug) cout << "L:\n" << Eigen::MatrixXd(ABC.bottomRightCorner(n-nFrontal,n-nFrontal).selfadjointView()) << endl; + // toc(3, "compute L"); + + return ldlt.transpositionsP(); +} + } diff --git a/gtsam/base/cholesky.h b/gtsam/base/cholesky.h index 48e1d5314..6bb105356 100644 --- a/gtsam/base/cholesky.h +++ b/gtsam/base/cholesky.h @@ -50,5 +50,25 @@ std::pair choleskyCareful(Matrix& ATA, int order = -1); */ void choleskyPartial(Matrix& ABC, size_t nFrontal); + +/** + * Partial LDL computes a factor [R S such that [P'R' 0 [RP S = [A B + * 0 F] S' I] 0 F] B' C]. + * The input to this function is the matrix ABC = [A B], and the parameter + * [B' C] + * nFrontal determines the split between A, B, and C, with A being of size + * nFrontal x nFrontal. + * P is a permutation matrix obtained from the pivoting process while doing LDL'. + * + * Specifically, if A = P'U'DUP is the LDL' factorization of A, + * then R = sqrt(D)*U is the permuted upper-triangular matrix. + * The permutation is returned so that it can be used in the backsubstitution + * process to return the correct order of variables, and in creating the + * Jacobian factor by permuting R correctly. + * + * Note that S and F are not permuted (in correct original ordering). + */ +Eigen::LDLT::TranspositionType ldlPartial(Matrix& ABC, size_t nFrontal); + } diff --git a/gtsam/base/numericalDerivative.h b/gtsam/base/numericalDerivative.h index 3217fe298..21111881e 100644 --- a/gtsam/base/numericalDerivative.h +++ b/gtsam/base/numericalDerivative.h @@ -22,7 +22,6 @@ #include #include -#include #include #include diff --git a/gtsam/base/tests/testBTree.cpp b/gtsam/base/tests/testBTree.cpp index 9343f00e7..da6b56015 100644 --- a/gtsam/base/tests/testBTree.cpp +++ b/gtsam/base/tests/testBTree.cpp @@ -9,7 +9,7 @@ * -------------------------------------------------------------------------- */ -/* +/** * testBTree.cpp * * Created on: Feb 3, 2010 diff --git a/gtsam/base/tests/testBlockMatrices.cpp b/gtsam/base/tests/testBlockMatrices.cpp index 09b04d4ac..06b3f3002 100644 --- a/gtsam/base/tests/testBlockMatrices.cpp +++ b/gtsam/base/tests/testBlockMatrices.cpp @@ -1,3 +1,14 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + /** * @file testBlockMatrices * @author Alex Cunningham diff --git a/gtsam/base/tests/testCholesky.cpp b/gtsam/base/tests/testCholesky.cpp index c6bdc5cfd..cbc45aff6 100644 --- a/gtsam/base/tests/testCholesky.cpp +++ b/gtsam/base/tests/testCholesky.cpp @@ -21,6 +21,7 @@ #include using namespace gtsam; +using namespace std; /* ************************************************************************* */ TEST(cholesky, choleskyPartial) { @@ -53,6 +54,41 @@ TEST(cholesky, choleskyPartial) { EXPECT(assert_equal(expected, actual, 1e-9)); } +/* ************************************************************************* */ +TEST(cholesky, ldlPartial) { + + // choleskyPartial should only use the upper triangle, so this represents a + // symmetric matrix. + Matrix ABC = Matrix_(7,7, + 4.0375, 3.4584, 3.5735, 2.4815, 2.1471, 2.7400, 2.2063, + 0., 4.7267, 3.8423, 2.3624, 2.8091, 2.9579, 2.5914, + 0., 0., 5.1600, 2.0797, 3.4690, 3.2419, 2.9992, + 0., 0., 0., 1.8786, 1.0535, 1.4250, 1.3347, + 0., 0., 0., 0., 3.0788, 2.6283, 2.3791, + 0., 0., 0., 0., 0., 2.9227, 2.4056, + 0., 0., 0., 0., 0., 0., 2.5776); + + // Do partial Cholesky on 3 frontal scalar variables + Matrix RSL(ABC); + Eigen::LDLT::TranspositionType permutation = ldlPartial(RSL, 3); + + // See the function comment for choleskyPartial, this decomposition should hold. + Matrix R1 = RSL.transpose(); + Matrix R2 = RSL; + R1.block(3, 3, 4, 4).setIdentity(); + + R2.block(3, 3, 4, 4) = R2.block(3, 3, 4, 4).selfadjointView(); + + // un-permute the permuted upper triangular part + R1.topLeftCorner(3,3) = permutation.transpose() * R1.topLeftCorner(3,3); + R2.topLeftCorner(3,3) = R2.topLeftCorner(3,3)*permutation; + + Matrix actual = R1 * R2; + + Matrix expected = ABC.selfadjointView(); + EXPECT(assert_equal(expected, actual, 1e-9)); +} + /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/base/tests/timeMatrixOps.cpp b/gtsam/base/tests/timeMatrixOps.cpp index 114968381..ab9488d32 100644 --- a/gtsam/base/tests/timeMatrixOps.cpp +++ b/gtsam/base/tests/timeMatrixOps.cpp @@ -16,10 +16,6 @@ * @created Sep 18, 2010 */ -//#include -//#include -//#include -//#include #include #include #include diff --git a/gtsam/geometry/Cal3Bundler.h b/gtsam/geometry/Cal3Bundler.h index 466f888f3..1883d28a5 100644 --- a/gtsam/geometry/Cal3Bundler.h +++ b/gtsam/geometry/Cal3Bundler.h @@ -18,8 +18,6 @@ #pragma once -#include -#include #include namespace gtsam { diff --git a/gtsam/geometry/Cal3DS2.h b/gtsam/geometry/Cal3DS2.h index c3578c21e..a834bd3bf 100644 --- a/gtsam/geometry/Cal3DS2.h +++ b/gtsam/geometry/Cal3DS2.h @@ -19,8 +19,6 @@ #pragma once -#include -#include #include namespace gtsam { diff --git a/gtsam/geometry/Cal3_S2.cpp b/gtsam/geometry/Cal3_S2.cpp index a2807587f..709e03493 100644 --- a/gtsam/geometry/Cal3_S2.cpp +++ b/gtsam/geometry/Cal3_S2.cpp @@ -28,7 +28,7 @@ namespace gtsam { Cal3_S2::Cal3_S2(double fov, int w, int h) : s_(0), u0_((double) w / 2.0), v0_((double) h / 2.0) { double a = fov * M_PI / 360.0; // fov/2 in radians - fx_ = (double) w * tan(a); + fx_ = (double)w / (2.0*tan(a)); // old formula: fx_ = (double) w * tan(a); fy_ = fx_; } diff --git a/gtsam/geometry/Cal3_S2.h b/gtsam/geometry/Cal3_S2.h index 66d2fb514..43a88d88b 100644 --- a/gtsam/geometry/Cal3_S2.h +++ b/gtsam/geometry/Cal3_S2.h @@ -17,7 +17,6 @@ #pragma once -#include #include namespace gtsam { diff --git a/gtsam/geometry/Cal3_S2Stereo.h b/gtsam/geometry/Cal3_S2Stereo.h index b22416c37..1b69468e7 100644 --- a/gtsam/geometry/Cal3_S2Stereo.h +++ b/gtsam/geometry/Cal3_S2Stereo.h @@ -17,8 +17,6 @@ #pragma once -#include -#include #include namespace gtsam { diff --git a/gtsam/geometry/CalibratedCameraT.h b/gtsam/geometry/CalibratedCameraT.h index 469a89aa7..3ecdfc01a 100644 --- a/gtsam/geometry/CalibratedCameraT.h +++ b/gtsam/geometry/CalibratedCameraT.h @@ -8,7 +8,6 @@ #pragma once #include -#include #include #include diff --git a/gtsam/geometry/GeneralCameraT.h b/gtsam/geometry/GeneralCameraT.h index 2d6a746e6..ae2235e5b 100644 --- a/gtsam/geometry/GeneralCameraT.h +++ b/gtsam/geometry/GeneralCameraT.h @@ -18,16 +18,8 @@ #pragma once -#include -#include -#include -#include -#include -#include #include #include - - #include #include diff --git a/gtsam/geometry/Point2.h b/gtsam/geometry/Point2.h index 025d0c1f5..4a77e039b 100644 --- a/gtsam/geometry/Point2.h +++ b/gtsam/geometry/Point2.h @@ -18,7 +18,6 @@ #pragma once #include -#include #include #include #include diff --git a/gtsam/geometry/Pose2.h b/gtsam/geometry/Pose2.h index 77b78eb79..93fdaad69 100644 --- a/gtsam/geometry/Pose2.h +++ b/gtsam/geometry/Pose2.h @@ -22,8 +22,6 @@ #include #include -#include -#include #include #include diff --git a/gtsam/geometry/Pose3.h b/gtsam/geometry/Pose3.h index df0bfcbc2..9c4d46847 100644 --- a/gtsam/geometry/Pose3.h +++ b/gtsam/geometry/Pose3.h @@ -20,8 +20,6 @@ #include #include -#include -#include namespace gtsam { diff --git a/gtsam/geometry/Rot2.h b/gtsam/geometry/Rot2.h index 40f77d414..23af69529 100644 --- a/gtsam/geometry/Rot2.h +++ b/gtsam/geometry/Rot2.h @@ -16,14 +16,10 @@ * Author: Frank Dellaert */ -#ifndef ROT2_H_ -#define ROT2_H_ +#pragma once #include -#include #include -#include -#include namespace gtsam { @@ -175,5 +171,3 @@ namespace gtsam { }; // Rot2 } // gtsam - -#endif /* ROT2_H_ */ diff --git a/gtsam/geometry/Rot3.cpp b/gtsam/geometry/Rot3.cpp index 4ea4de6c8..cb31a5c0a 100644 --- a/gtsam/geometry/Rot3.cpp +++ b/gtsam/geometry/Rot3.cpp @@ -161,8 +161,15 @@ namespace gtsam { // Log map at identity - return the canonical coordinates of this rotation Vector Rot3::Logmap(const Rot3& R) { double tr = R.r1().x()+R.r2().y()+R.r3().z(); - if (tr > 3.0 - 1e-10) { // when theta = 0, +-2pi, +-4pi, etc. (or tr > 3 + 1E-10) - return zero(3); + if (tr > 3.0 - 1e-17) { // when theta = 0, +-2pi, +-4pi, etc. (or tr > 3 + 1E-10) + return zero(3); + } else if (tr > 3.0 - 1e-10) { // when theta near 0, +-2pi, +-4pi, etc. (or tr > 3 + 1E-3) + double theta = acos((tr-1.0)/2.0); + // Using Taylor expansion: theta/(2*sin(theta)) \approx 1/2+theta^2/12 + O(theta^4) + return (0.5 + theta*theta/12)*Vector_(3, + R.r2().z()-R.r3().y(), + R.r3().x()-R.r1().z(), + R.r1().y()-R.r2().x()); } else if (fabs(tr - -1.0) < 1e-10) { // when theta = +-pi, +-3pi, +-5pi, etc. if(fabs(R.r3().z() - -1.0) > 1e-10) return (boost::math::constants::pi() / sqrt(2.0+2.0*R.r3().z())) * diff --git a/gtsam/geometry/Rot3.h b/gtsam/geometry/Rot3.h index 33cf2b242..9f955829b 100644 --- a/gtsam/geometry/Rot3.h +++ b/gtsam/geometry/Rot3.h @@ -22,8 +22,6 @@ #pragma once #include -#include -#include namespace gtsam { diff --git a/gtsam/geometry/StereoCamera.h b/gtsam/geometry/StereoCamera.h index 5dab53d5a..f06e45cfb 100644 --- a/gtsam/geometry/StereoCamera.h +++ b/gtsam/geometry/StereoCamera.h @@ -17,8 +17,7 @@ #pragma once -#include -#include "boost/tuple/tuple.hpp" +#include #include #include #include diff --git a/gtsam/geometry/StereoPoint2.h b/gtsam/geometry/StereoPoint2.h index cd7d6166b..225e342cf 100644 --- a/gtsam/geometry/StereoPoint2.h +++ b/gtsam/geometry/StereoPoint2.h @@ -9,20 +9,16 @@ * -------------------------------------------------------------------------- */ -/* +/** * StereoPoint2.h * * Created on: Jan 26, 2010 * Author: dellaert */ -#ifndef STEREOPOINT2_H_ -#define STEREOPOINT2_H_ +#pragma once #include -#include -#include -#include #include namespace gtsam { @@ -133,5 +129,3 @@ namespace gtsam { }; } - -#endif /* STEREOPOINT2_H_ */ diff --git a/gtsam/geometry/tests/testCal3_S2.cpp b/gtsam/geometry/tests/testCal3_S2.cpp index d8a3ab33c..99525ec1f 100644 --- a/gtsam/geometry/tests/testCal3_S2.cpp +++ b/gtsam/geometry/tests/testCal3_S2.cpp @@ -26,7 +26,7 @@ Point2 p(1, -2); /* ************************************************************************* */ TEST( Cal3_S2, easy_constructor) { - Cal3_S2 expected(369.504, 369.504, 0, 640 / 2, 480 / 2); + Cal3_S2 expected(554.256, 554.256, 0, 640 / 2, 480 / 2); double fov = 60; // degrees size_t w=640,h=480; diff --git a/gtsam/geometry/tests/testRot3.cpp b/gtsam/geometry/tests/testRot3.cpp index 2a747e0e8..5421351c0 100644 --- a/gtsam/geometry/tests/testRot3.cpp +++ b/gtsam/geometry/tests/testRot3.cpp @@ -402,6 +402,35 @@ TEST( Rot3, RQ) CHECK(assert_equal(expected,actual,1e-6)); } +/* ************************************************************************* */ +TEST( Rot3, expmapStability ) { + Vector w = Vector_(3, 78e-9, 5e-8, 97e-7); + double theta = w.norm(); + double theta2 = theta*theta; + Rot3 actualR = Rot3::Expmap(w); + Matrix W = Matrix_(3,3, 0.0, -w(2), w(1), + w(2), 0.0, -w(0), + -w(1), w(0), 0.0 ); + Matrix W2 = W*W; + Matrix Rmat = eye(3) + (1.0-theta2/6.0 + theta2*theta2/120.0 + - theta2*theta2*theta2/5040.0)*W + (0.5 - theta2/24.0 + theta2*theta2/720.0)*W2 ; + Rot3 expectedR( Rmat ); + CHECK(assert_equal(expectedR, actualR, 1e-10)); +} + +/* ************************************************************************* */ +TEST( Rot3, logmapStability ) { + Vector w = Vector_(3, 1e-8, 0.0, 0.0); + Rot3 R = Rot3::Expmap(w); +// double tr = R.r1().x()+R.r2().y()+R.r3().z(); +// std::cout.precision(5000); +// std::cout << "theta: " << w.norm() << std::endl; +// std::cout << "trace: " << tr << std::endl; +// R.print("R = "); + Vector actualw = Rot3::Logmap(R); + CHECK(assert_equal(w, actualw, 1e-15)); +} + /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/inference/BayesNet-inl.h b/gtsam/inference/BayesNet-inl.h index 8e26affb6..ac1349f25 100644 --- a/gtsam/inference/BayesNet-inl.h +++ b/gtsam/inference/BayesNet-inl.h @@ -39,7 +39,7 @@ namespace gtsam { void BayesNet::print(const string& s) const { cout << s << ":\n"; BOOST_REVERSE_FOREACH(sharedConditional conditional,conditionals_) - conditional->print((boost::format("Node[%1%]") % conditional->key()).str()); + conditional->print(); } /* ************************************************************************* */ @@ -82,39 +82,43 @@ namespace gtsam { push_front(conditional); } - /* ************************************************************************* */ - template - FastList BayesNet::ordering() const { - FastList ord; - BOOST_FOREACH(sharedConditional conditional,conditionals_) - ord.push_back(conditional->key()); - return ord; - } - - /* ************************************************************************* */ - template - void BayesNet::saveGraph(const std::string &s) const { - ofstream of(s.c_str()); - of<< "digraph G{\n"; - BOOST_FOREACH(const_sharedConditional conditional,conditionals_) { - Index child = conditional->key(); - BOOST_FOREACH(Index parent, conditional->parents()) { - of << parent << "->" << child << endl; - } - } - of<<"}"; - of.close(); - } - +// /* ************************************************************************* */ +// template +// FastList BayesNet::ordering() const { +// FastList ord; +// BOOST_FOREACH(sharedConditional conditional,conditionals_) +// ord.push_back(conditional->key()); +// return ord; +// } +// +// /* ************************************************************************* */ +// template +// void BayesNet::saveGraph(const std::string &s) const { +// ofstream of(s.c_str()); +// of<< "digraph G{\n"; +// BOOST_FOREACH(const_sharedConditional conditional,conditionals_) { +// Index child = conditional->key(); +// BOOST_FOREACH(Index parent, conditional->parents()) { +// of << parent << "->" << child << endl; +// } +// } +// of<<"}"; +// of.close(); +// } +// /* ************************************************************************* */ template typename BayesNet::sharedConditional BayesNet::operator[](Index key) const { - const_iterator it = find_if(conditionals_.begin(), conditionals_.end(), boost::lambda::bind(&CONDITIONAL::key, *boost::lambda::_1) == key); - if (it == conditionals_.end()) throw(invalid_argument((boost::format( + BOOST_FOREACH(typename CONDITIONAL::shared_ptr conditional, conditionals_) { + typename CONDITIONAL::const_iterator it = std::find(conditional->beginFrontals(), conditional->endFrontals(), key); + if (it!=conditional->endFrontals()) { + return conditional; + } + } + throw(invalid_argument((boost::format( "BayesNet::operator['%1%']: not found") % key).str())); - return *it; } /* ************************************************************************* */ diff --git a/gtsam/inference/BayesNet.h b/gtsam/inference/BayesNet.h index 2d70d8589..d43526a46 100644 --- a/gtsam/inference/BayesNet.h +++ b/gtsam/inference/BayesNet.h @@ -19,15 +19,15 @@ #pragma once +#include +#include +#include + #include #include #include #include -#include -#include -#include - namespace gtsam { /** @@ -67,6 +67,12 @@ namespace gtsam { public: + /** Default constructor as an empty BayesNet */ + BayesNet() {}; + + /** BayesNet with 1 conditional */ + BayesNet(const sharedConditional& conditional) { push_back(conditional); } + /** print */ void print(const std::string& s = "") const; @@ -110,10 +116,10 @@ namespace gtsam { return conditionals_.size(); } - /** return keys in reverse topological sort order, i.e., elimination order */ - FastList ordering() const; - - /** SLOW O(n) random access to Conditional by key */ +// /** return keys in reverse topological sort order, i.e., elimination order */ +// FastList ordering() const; +// +// /** SLOW O(n) random access to Conditional by key */ sharedConditional operator[](Index key) const; /** return last node in ordering */ @@ -135,7 +141,7 @@ namespace gtsam { inline const_reverse_iterator const rend() const {return conditionals_.rend();} /** saves the bayes to a text file in GraphViz format */ - void saveGraph(const std::string& s) const; +// void saveGraph(const std::string& s) const; private: /** Serialization function */ diff --git a/gtsam/inference/BayesTree-inl.h b/gtsam/inference/BayesTree-inl.h index 9f950096e..9efdfad5d 100644 --- a/gtsam/inference/BayesTree-inl.h +++ b/gtsam/inference/BayesTree-inl.h @@ -15,11 +15,13 @@ * @author Frank Dellaert * @author Michael Kaess * @author Viorela Ila + * @author Richard Roberts */ #pragma once #include +#include #include #include #include @@ -45,87 +47,24 @@ namespace gtsam { void BayesTree::Clique::assertInvariants() const { #ifndef NDEBUG // We rely on the keys being sorted - typename BayesNet::const_iterator cond = this->begin(); - if(cond != this->end()) { - Index lastKey = (*cond)->key(); - ++cond; - for(; cond != this->end(); ++cond) - assert(lastKey < (*cond)->key()); - } + FastVector sortedUniqueKeys(conditional_->begin(), conditional_->end()); + std::sort(sortedUniqueKeys.begin(), sortedUniqueKeys.end()); + std::unique(sortedUniqueKeys.begin(), sortedUniqueKeys.end()); + assert(sortedUniqueKeys.size() == conditional_->size() && + std::equal(sortedUniqueKeys.begin(), sortedUniqueKeys.end(), conditional_->begin())); #endif } /* ************************************************************************* */ template - BayesTree::Clique::Clique() { assertInvariants(); } - - /* ************************************************************************* */ - template - BayesTree::Clique::Clique(const sharedConditional& conditional) { - separator_.assign(conditional->parents().begin(), conditional->parents().end()); - this->push_back(conditional); + BayesTree::Clique::Clique(const sharedConditional& conditional) : conditional_(conditional) { assertInvariants(); } - /* ************************************************************************* */ - template - BayesTree::Clique::Clique(const BayesNet& bayesNet) - : BayesNet(bayesNet) { - if(bayesNet.size() > 0) { -#ifndef NDEBUG - // Debug check that each parent variable is either a frontal variable in - // a later conditional in the Bayes net fragment, or that the parent is - // also a parent of the last conditional. This checks that the fragment - // is "enough like a clique". todo: this should really check that the - // fragment *is* a clique. - if(bayesNet.size() > 1) { - typename BayesNet::const_reverse_iterator cond = bayesNet.rbegin(); - ++ cond; - typename CONDITIONAL::shared_ptr lastConditional = *cond; - for( ; cond != bayesNet.rend(); ++cond) - for(typename CONDITIONAL::const_iterator parent=(*cond)->beginParents(); parent!=(*cond)->endParents(); ++parent) { - bool infragment = false; - typename BayesNet::const_reverse_iterator parentCond = cond; - do { - if(*parent == (*parentCond)->key()) - infragment = true; - --parentCond; - } while(parentCond != bayesNet.rbegin()); - assert(infragment || find(lastConditional->beginParents(), lastConditional->endParents(), *parent) != lastConditional->endParents()); - } - } -#endif - -// separator_.assign((*bayesNet.rbegin())->parents().begin(), (*bayesNet.rbegin())->parents().end()); - separator_.assign((*bayesNet.rbegin())->beginParents(), (*bayesNet.rbegin())->endParents()); - } - assertInvariants(); - } - - /* ************************************************************************* */ - template - vector BayesTree::Clique::keys() const { - vector keys; - keys.reserve(this->size() + separator_.size()); - BOOST_FOREACH(const sharedConditional conditional, *this) { - keys.push_back(conditional->key()); - } - keys.insert(keys.end(), separator_.begin(), separator_.end()); - return keys; - } - /* ************************************************************************* */ template void BayesTree::Clique::print(const string& s) const { - cout << s << "Clique "; - BOOST_FOREACH(const sharedConditional& conditional, this->conditionals_) - cout << conditional->key() << " "; - cout << "| "; - BOOST_FOREACH(const Index sep, separator_) - cout << sep << " "; - cout << "\n"; - BOOST_FOREACH(const sharedConditional& conditional, this->conditionals_) - conditional->print(" " + s + "conditional"); + conditional_->print(s); } /* ************************************************************************* */ @@ -148,8 +87,7 @@ namespace gtsam { /* ************************************************************************* */ template void BayesTree::Clique::permuteWithInverse(const Permutation& inversePermutation) { - BayesNet::permuteWithInverse(inversePermutation); - BOOST_FOREACH(Index& separatorKey, separator_) { separatorKey = inversePermutation[separatorKey]; } + conditional_->permuteWithInverse(inversePermutation); if(cachedFactor_) cachedFactor_->permuteWithInverse(inversePermutation); BOOST_FOREACH(const shared_ptr& child, children_) { child->permuteWithInverse(inversePermutation); @@ -160,17 +98,16 @@ namespace gtsam { /* ************************************************************************* */ template bool BayesTree::Clique::permuteSeparatorWithInverse(const Permutation& inversePermutation) { - bool changed = BayesNet::permuteSeparatorWithInverse(inversePermutation); + bool changed = conditional_->permuteSeparatorWithInverse(inversePermutation); #ifndef NDEBUG if(!changed) { - BOOST_FOREACH(Index& separatorKey, separator_) { assert(separatorKey == inversePermutation[separatorKey]); } + BOOST_FOREACH(Index& separatorKey, conditional_->parents()) { assert(separatorKey == inversePermutation[separatorKey]); } BOOST_FOREACH(const shared_ptr& child, children_) { assert(child->permuteSeparatorWithInverse(inversePermutation) == false); } } #endif if(changed) { - BOOST_FOREACH(Index& separatorKey, separator_) { separatorKey = inversePermutation[separatorKey]; } if(cachedFactor_) cachedFactor_->permuteWithInverse(inversePermutation); BOOST_FOREACH(const shared_ptr& child, children_) { (void)child->permuteSeparatorWithInverse(inversePermutation); @@ -192,8 +129,8 @@ namespace gtsam { template void BayesTree::getCliqueData(CliqueData& data, BayesTree::sharedClique clique) const { - data.conditionalSizes.push_back(clique->conditionals_.size()); - data.separatorSizes.push_back(clique->separator_.size()); + data.conditionalSizes.push_back((*clique)->nrFrontals()); + data.separatorSizes.push_back((*clique)->nrParents()); BOOST_FOREACH(sharedClique c, clique->children_) { getCliqueData(data, c); } @@ -277,7 +214,6 @@ namespace gtsam { // The shortcut density is a conditional P(S|R) of the separator of this // clique on the root. We can compute it recursively from the parent shortcut // P(Sp|R) as \int P(Fp|Sp) P(Sp|R), where Fp are the frontal nodes in p - // TODO, why do we actually return a shared pointer, why does eliminate? /* ************************************************************************* */ template BayesNet BayesTree::Clique::shortcut(shared_ptr R, @@ -296,11 +232,11 @@ namespace gtsam { } // The root conditional - FactorGraph p_R(*R); + FactorGraph p_R(BayesNet(R->conditional())); // The parent clique has a CONDITIONAL for each frontal node in Fp // so we can obtain P(Fp|Sp) in factor graph form - FactorGraph p_Fp_Sp(*parent); + FactorGraph p_Fp_Sp(BayesNet(parent->conditional())); // If not the base case, obtain the parent shortcut P(Sp|R) as factors FactorGraph p_Sp_R(parent->shortcut(R, function)); @@ -330,20 +266,20 @@ namespace gtsam { FastSet variablesAtBack; FastSet separator; size_t uniqueRootVariables = 0; - BOOST_FOREACH(const Index separatorIndex, this->separator_) { + BOOST_FOREACH(const Index separatorIndex, this->conditional()->parents()) { variablesAtBack.insert(separatorIndex); separator.insert(separatorIndex); if(debug) cout << "At back (this): " << separatorIndex << endl; } - BOOST_FOREACH(const sharedConditional& conditional, *R) { - if(variablesAtBack.insert(conditional->key()).second) + BOOST_FOREACH(const Index key, R->conditional()->keys()) { + if(variablesAtBack.insert(key).second) ++ uniqueRootVariables; - if(debug) cout << "At back (root): " << conditional->key() << endl; + if(debug) cout << "At back (root): " << key << endl; } Permutation toBack = Permutation::PushToBack( vector(variablesAtBack.begin(), variablesAtBack.end()), - R->back()->key() + 1); + R->conditional()->lastFrontalKey() + 1); Permutation::shared_ptr toBackInverse(toBack.inverse()); BOOST_FOREACH(const typename FactorType::shared_ptr& factor, p_Cp_R) { factor->permuteWithInverse(*toBackInverse); } @@ -356,7 +292,8 @@ namespace gtsam { // want to include those variables in the conditional. BayesNet p_S_R; BOOST_REVERSE_FOREACH(typename CONDITIONAL::shared_ptr conditional, *eliminated) { - if(separator.find(toBack[conditional->key()]) != separator.end()) { + // TODO: CHECK!!! is it firstFrontalKey() or lastFrontalKey() or something else??? + if(separator.find(toBack[conditional->firstFrontalKey()]) != separator.end()) { if(debug) conditional->print("Taking C|R conditional: "); p_S_R.push_front(conditional); @@ -385,16 +322,17 @@ namespace gtsam { shared_ptr R, Eliminate function) { // If we are the root, just return this root // NOTE: immediately cast to a factor graph - if (R.get()==this) return *R; + BayesNet bn(R->conditional()); + if (R.get()==this) return bn; // Combine P(F|S), P(S|R), and P(R) BayesNet p_FSR = this->shortcut(R, function); - p_FSR.push_front(*this); - p_FSR.push_back(*R); + p_FSR.push_front(this->conditional()); + p_FSR.push_back(R->conditional()); assertInvariants(); GenericSequentialSolver solver(p_FSR); - return *solver.jointFactorGraph(keys(), function); + return *solver.jointFactorGraph(conditional_->keys(), function); } /* ************************************************************************* */ @@ -407,15 +345,15 @@ namespace gtsam { // Combine P(F1|S1), P(S1|R), P(F2|S2), P(S2|R), and P(R) FactorGraph joint; - if (!isRoot()) joint.push_back(*this); // P(F1|S1) + if (!isRoot()) joint.push_back(this->conditional()->toFactor()); // P(F1|S1) if (!isRoot()) joint.push_back(shortcut(R, function)); // P(S1|R) - if (!C2->isRoot()) joint.push_back(*C2); // P(F2|S2) + if (!C2->isRoot()) joint.push_back(C2->conditional()->toFactor()); // P(F2|S2) if (!C2->isRoot()) joint.push_back(C2->shortcut(R, function)); // P(S2|R) - joint.push_back(*R); // P(R) + joint.push_back(R->conditional()->toFactor()); // P(R) // Find the keys of both C1 and C2 - vector keys1(keys()); - vector keys2(C2->keys()); + vector keys1(conditional_->keys()); + vector keys2(C2->conditional_->keys()); FastSet keys12; keys12.insert(keys1.begin(), keys1.end()); keys12.insert(keys2.begin(), keys2.end()); @@ -447,9 +385,9 @@ namespace gtsam { typename BayesTree::sharedClique BayesTree::addClique( const sharedConditional& conditional, sharedClique parent_clique) { sharedClique new_clique(new Clique(conditional)); - Index key = conditional->key(); - nodes_.resize(std::max(key+1, nodes_.size())); - nodes_[key] = new_clique; + nodes_.resize(std::max(conditional->lastFrontalKey()+1, nodes_.size())); + BOOST_FOREACH(Index key, conditional->frontals()) + nodes_[key] = new_clique; if (parent_clique != NULL) { new_clique->parent_ = parent_clique; parent_clique->children_.push_back(new_clique); @@ -463,9 +401,9 @@ namespace gtsam { typename BayesTree::sharedClique BayesTree::addClique( const sharedConditional& conditional, list& child_cliques) { sharedClique new_clique(new Clique(conditional)); - Index key = conditional->key(); - nodes_.resize(max(key+1, nodes_.size())); - nodes_[key] = new_clique; + nodes_.resize(std::max(conditional->lastFrontalKey()+1, nodes_.size())); + BOOST_FOREACH(Index key, conditional->frontals()) + nodes_[key] = new_clique; new_clique->children_ = child_cliques; BOOST_FOREACH(sharedClique& child, child_cliques) child->parent_ = new_clique; @@ -475,34 +413,28 @@ namespace gtsam { /* ************************************************************************* */ template - inline void BayesTree::addToCliqueFront(const sharedConditional& conditional, const sharedClique& clique) { + inline void BayesTree::addToCliqueFront(BayesTree& bayesTree, const sharedConditional& conditional, const sharedClique& clique) { static const bool debug = false; #ifndef NDEBUG // Debug check to make sure the conditional variable is ordered lower than // its parents and that all of its parents are present either in this // clique or its separator. BOOST_FOREACH(Index parent, conditional->parents()) { - assert(parent > conditional->key()); - bool hasParent = false; + assert(parent > conditional->lastFrontalKey()); const Clique& cliquer(*clique); - BOOST_FOREACH(const sharedConditional& child, cliquer) { - if(child->key() == parent) { - hasParent = true; - break; - } - } - if(!hasParent) - if(find(clique->separator_.begin(), clique->separator_.end(), parent) != clique->separator_.end()) - hasParent = true; - assert(hasParent); + assert(find(cliquer->begin(), cliquer->end(), parent) != cliquer->end()); } #endif if(debug) conditional->print("Adding conditional "); if(debug) clique->print("To clique "); - Index key = conditional->key(); - nodes_.resize(std::max(key+1, nodes_.size())); - nodes_[key] = clique; - clique->push_front(conditional); + Index key = conditional->lastFrontalKey(); + bayesTree.nodes_.resize(std::max(key+1, bayesTree.nodes_.size())); + bayesTree.nodes_[key] = clique; + FastVector newKeys((*clique)->size() + 1); + newKeys[0] = key; + std::copy((*clique)->begin(), (*clique)->end(), newKeys.begin()+1); + clique->conditional_ = CONDITIONAL::FromKeys(newKeys, (*clique)->nrFrontals() + 1); +// clique->conditional_->addFrontal(conditional); if(debug) clique->print("Expanded clique is "); clique->assertInvariants(); } @@ -520,13 +452,9 @@ namespace gtsam { BOOST_FOREACH(sharedClique child, clique->children_) child->parent_ = typename BayesTree::Clique::weak_ptr(); - BOOST_FOREACH(Index key, clique->separator_) { + BOOST_FOREACH(Index key, (*clique->conditional())) { nodes_[key].reset(); } - const Clique& cliquer(*clique); - BOOST_FOREACH(const sharedConditional& conditional, cliquer) { - nodes_[conditional->key()].reset(); - } } /* ************************************************************************* */ @@ -539,7 +467,7 @@ namespace gtsam { BayesTree::BayesTree(const BayesNet& bayesNet) { typename BayesNet::const_reverse_iterator rit; for ( rit=bayesNet.rbegin(); rit != bayesNet.rend(); ++rit ) - insert(*rit); + insert(*this, *rit); } /* ************************************************************************* */ @@ -562,7 +490,7 @@ namespace gtsam { if (!new_clique.get()) new_clique = addClique(conditional,childRoots); else - addToCliqueFront(conditional, new_clique); + addToCliqueFront(*this, conditional, new_clique); } root_ = new_clique; @@ -622,31 +550,31 @@ namespace gtsam { /* ************************************************************************* */ template - void BayesTree::insert(const sharedConditional& conditional) + void BayesTree::insert(BayesTree& bayesTree, const sharedConditional& conditional) { static const bool debug = false; // get key and parents - typename CONDITIONAL::Parents parents = conditional->parents(); // todo: const reference? + const typename CONDITIONAL::Parents& parents = conditional->parents(); if(debug) conditional->print("Adding conditional "); // if no parents, start a new root clique if (parents.empty()) { if(debug) cout << "No parents so making root" << endl; - root_ = addClique(conditional); + bayesTree.root_ = bayesTree.addClique(conditional); return; } // otherwise, find the parent clique by using the index data structure // to find the lowest-ordered parent - Index parentRepresentative = findParentClique(parents); - if(debug) cout << "First-eliminated parent is " << parentRepresentative << ", have " << nodes_.size() << " nodes." << endl; - sharedClique parent_clique = (*this)[parentRepresentative]; + Index parentRepresentative = bayesTree.findParentClique(parents); + if(debug) cout << "First-eliminated parent is " << parentRepresentative << ", have " << bayesTree.nodes_.size() << " nodes." << endl; + sharedClique parent_clique = bayesTree[parentRepresentative]; if(debug) parent_clique->print("Parent clique is "); // if the parents and parent clique have the same size, add to parent clique - if (parent_clique->size() == size_t(parents.size())) { + if ((*parent_clique)->size() == size_t(parents.size())) { if(debug) cout << "Adding to parent clique" << endl; #ifndef NDEBUG // Debug check that the parent keys of the new conditional match the keys @@ -660,11 +588,11 @@ namespace gtsam { // ++ cond; // } #endif - addToCliqueFront(conditional, parent_clique); + addToCliqueFront(bayesTree, conditional, parent_clique); } else { if(debug) cout << "Starting new clique" << endl; // otherwise, start a new clique and add it to the tree - addClique(conditional,parent_clique); + bayesTree.addClique(conditional,parent_clique); } } @@ -672,24 +600,14 @@ namespace gtsam { //TODO: remove this function after removing TSAM.cpp template typename BayesTree::sharedClique BayesTree::insert( - const BayesNet& bayesNet, list& children, bool isRootClique) - { + const sharedConditional& clique, list& children, bool isRootClique) { static const bool debug = false; - if (bayesNet.size() == 0) - throw invalid_argument("BayesTree::insert: empty bayes net!"); + if (clique->nrFrontals() == 0) + throw invalid_argument("BayesTree::insert: empty clique!"); // create a new clique and add all the conditionals to the clique - sharedClique new_clique; - typename BayesNet::sharedConditional conditional; - BOOST_REVERSE_FOREACH(conditional, bayesNet) { - if(debug) conditional->print("Inserting conditional: "); - if (!new_clique.get()) - new_clique = addClique(conditional,children); - else - addToCliqueFront(conditional, new_clique); - } - + sharedClique new_clique = addClique(clique, children); if (isRootClique) root_ = new_clique; return new_clique; @@ -699,7 +617,7 @@ namespace gtsam { template void BayesTree::fillNodesIndex(const sharedClique& subtree) { // Add each frontal variable of this root node - BOOST_FOREACH(const typename CONDITIONAL::shared_ptr& cond, *subtree) { nodes_[cond->key()] = subtree; } + BOOST_FOREACH(const Index& key, subtree->conditional()->frontals()) { nodes_[key] = subtree; } // Fill index for each child BOOST_FOREACH(const typename BayesTree::sharedClique& child, subtree->children_) { fillNodesIndex(child); } @@ -713,19 +631,20 @@ namespace gtsam { // property, those separator variables in the subtree that are ordered // lower than the highest frontal variable of the subtree root will all // appear in the separator of the subtree root. - if(subtree->separator_.empty()) { + if(subtree->conditional()->parents().empty()) { assert(!root_); root_ = subtree; } else { - Index parentRepresentative = findParentClique(subtree->separator_); + Index parentRepresentative = findParentClique(subtree->conditional()->parents()); sharedClique parent = (*this)[parentRepresentative]; parent->children_ += subtree; subtree->parent_ = parent; // set new parent! } // Now fill in the nodes index - if(nodes_.size() == 0 || subtree->back()->key() > (nodes_.size() - 1)) - nodes_.resize(subtree->back()->key() + 1); + if(nodes_.size() == 0 || (subtree->conditional()->lastFrontalKey()) > (nodes_.size() - 1)) { + nodes_.resize(subtree->conditional()->lastFrontalKey() + 1); + } fillNodesIndex(subtree); } } @@ -743,7 +662,7 @@ namespace gtsam { // calculate or retrieve its marginal FactorGraph cliqueMarginal = clique->marginal(root_,function); - return GenericSequentialSolver (cliqueMarginal).marginalFactor( + return GenericSequentialSolver(cliqueMarginal).marginalFactor( key, function); } @@ -817,7 +736,7 @@ namespace gtsam { // add children to list of orphans (splice also removed them from clique->children_) orphans.splice (orphans.begin(), clique->children_); - bn.push_back(*clique); + bn.push_back(clique->conditional()); } } diff --git a/gtsam/inference/BayesTree.h b/gtsam/inference/BayesTree.h index 5de62342f..fafeba94b 100644 --- a/gtsam/inference/BayesTree.h +++ b/gtsam/inference/BayesTree.h @@ -19,13 +19,11 @@ #pragma once -#include #include #include #include #include -#include #include #include @@ -51,43 +49,45 @@ namespace gtsam { * A Clique in the tree is an incomplete Bayes net: the variables * in the Bayes net are the frontal nodes, and the variables conditioned * on are the separator. We also have pointers up and down the tree. + * + * Since our Conditional class already handles multiple frontal variables, + * this Clique contains exactly 1 conditional. */ - struct Clique: public BayesNet { + struct Clique { protected: void assertInvariants() const; public: - typedef BayesNet Base; + typedef CONDITIONAL Base; typedef typename boost::shared_ptr shared_ptr; typedef typename boost::weak_ptr weak_ptr; + sharedConditional conditional_; weak_ptr parent_; std::list children_; - std::list separator_; /** separator keys */ typename FactorType::shared_ptr cachedFactor_; friend class BayesTree; //* Constructor */ - Clique(); + Clique() {} Clique(const sharedConditional& conditional); - Clique(const BayesNet& bayesNet); - - /** return keys in frontal:separator order */ - std::vector keys() const; - /** print this node */ void print(const std::string& s = "") const; - /** The size *includes* the separator */ - size_t size() const { - return this->conditionals_.size() + separator_.size(); - } + /** The arrow operator accesses the conditional */ + const CONDITIONAL* operator->() const { return conditional_.get(); } + + /** The arrow operator accesses the conditional */ + CONDITIONAL* operator->() { return conditional_.get(); } + + /** Access the conditional */ + const sharedConditional& conditional() const { return conditional_; } /** is this the root of a Bayes tree ? */ - inline bool isRoot() const { return parent_.expired();} + inline bool isRoot() const { return parent_.expired(); } /** return the const reference of children */ std::list& children() { return children_; } @@ -122,15 +122,19 @@ namespace gtsam { /** return the joint P(C1,C2), where C1==this. TODO: not a method? */ FactorGraph joint(shared_ptr C2, shared_ptr root, Eliminate function); + bool equals(const Clique& other, double tol=1e-9) const { + return (!conditional_ && !other.conditional()) || + conditional_->equals(*(other.conditional()), tol); + } + private: /** Serialization function */ friend class boost::serialization::access; template void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + ar & BOOST_SERIALIZATION_NVP(conditional_); ar & BOOST_SERIALIZATION_NVP(parent_); ar & BOOST_SERIALIZATION_NVP(children_); - ar & BOOST_SERIALIZATION_NVP(separator_); ar & BOOST_SERIALIZATION_NVP(cachedFactor_); } @@ -194,7 +198,8 @@ namespace gtsam { * parents are already in the clique or its separators. This function does * not check for this condition, it just updates the data structures. */ - void addToCliqueFront(const sharedConditional& conditional, const sharedClique& clique); + static void addToCliqueFront(BayesTree& bayesTree, + const sharedConditional& conditional, const sharedClique& clique); /** Fill the nodes index for a subtree */ void fillNodesIndex(const sharedClique& subtree); @@ -220,15 +225,18 @@ namespace gtsam { * Constructing Bayes trees */ - /** Insert a new conditional */ - void insert(const sharedConditional& conditional); + /** Insert a new conditional + * This function only applies for Symbolic case with IndexCondtional, + * We make it static so that it won't be compiled in GaussianConditional case. + * */ + static void insert(BayesTree& bayesTree, const sharedConditional& conditional); /** * Insert a new clique corresponding to the given Bayes net. * It is the caller's responsibility to decide whether the given Bayes net is a valid clique, * i.e. all the variables (frontal and separator) are connected */ - sharedClique insert(const BayesNet& bayesNet, + sharedClique insert(const sharedConditional& clique, std::list& children, bool isRootClique = false); /** @@ -262,6 +270,9 @@ namespace gtsam { return 0; } + /** return nodes */ + Nodes nodes() const { return nodes_; } + /** return root clique */ const sharedClique& root() const { return root_; } sharedClique& root() { return root_; } @@ -275,7 +286,7 @@ namespace gtsam { CliqueData getCliqueData() const; /** return marginal on any variable */ - typename CONDITIONAL::FactorType::shared_ptr marginalFactor(Index key, + typename FactorType::shared_ptr marginalFactor(Index key, Eliminate function) const; /** diff --git a/gtsam/inference/ClusterTree.h b/gtsam/inference/ClusterTree.h index 77f46a924..18d1202d7 100644 --- a/gtsam/inference/ClusterTree.h +++ b/gtsam/inference/ClusterTree.h @@ -19,15 +19,15 @@ #pragma once -#include -#include +#include +#include +#include #include #include -#include -#include -#include +#include +#include namespace gtsam { diff --git a/gtsam/inference/Conditional.h b/gtsam/inference/Conditional.h index 6d8ef2bb5..55cdda7a7 100644 --- a/gtsam/inference/Conditional.h +++ b/gtsam/inference/Conditional.h @@ -24,7 +24,6 @@ #include #include #include -#include #include namespace gtsam { @@ -47,9 +46,6 @@ class Conditional: public gtsam::Factor, boost::noncopyable, public Testabl private: - /** The first nFrontal variables are frontal and the rest are parents. */ - size_t nrFrontals_; - /** Create keys by adding key in front */ template static std::vector MakeKeys(KEY key, ITERATOR firstParent, ITERATOR lastParent) { @@ -61,6 +57,9 @@ private: protected: + /** The first nFrontal variables are frontal and the rest are parents. */ + size_t nrFrontals_; + // Calls the base class assertInvariants, which checks for unique keys void assertInvariants() const { Factor::assertInvariants(); } @@ -131,7 +130,8 @@ public: size_t nrParents() const { return FactorType::size() - nrFrontals_; } /** Special accessor when there is only one frontal variable. */ - Key key() const { assert(nrFrontals_==1); return FactorType::front(); } + Key firstFrontalKey() const { return FactorType::front(); } + Key lastFrontalKey() const { return *(endFrontals()-1); } /** Iterators over frontal and parent variables. */ const_iterator beginFrontals() const { return FactorType::begin(); } diff --git a/gtsam/inference/EliminationTree-inl.h b/gtsam/inference/EliminationTree-inl.h index 3a9663f54..dddbe8e7e 100644 --- a/gtsam/inference/EliminationTree-inl.h +++ b/gtsam/inference/EliminationTree-inl.h @@ -47,12 +47,12 @@ typename EliminationTree::sharedFactor EliminationTree::eliminat // TODO: wait for completion of all threads // Combine all factors (from this node and from subtrees) into a joint factor - pair + typename FactorGraph::EliminationResult eliminated(function(factors, 1)); - conditionals[this->key_] = eliminated.first->front(); + conditionals[this->key_] = eliminated.first; if(debug) cout << "Eliminated " << this->key_ << " to get:\n"; - if(debug) eliminated.first->front()->print("Conditional: "); + if(debug) eliminated.first->print("Conditional: "); if(debug) eliminated.second->print("Factor: "); return eliminated.second; diff --git a/gtsam/inference/EliminationTree.h b/gtsam/inference/EliminationTree.h index fc3cb3569..e9cf0ae11 100644 --- a/gtsam/inference/EliminationTree.h +++ b/gtsam/inference/EliminationTree.h @@ -6,11 +6,8 @@ */ #pragma once -#include -#include #include -#include #include #include #include diff --git a/gtsam/inference/Factor-inl.h b/gtsam/inference/Factor-inl.h index 543e738e1..82bb29cc8 100644 --- a/gtsam/inference/Factor-inl.h +++ b/gtsam/inference/Factor-inl.h @@ -36,6 +36,7 @@ namespace gtsam { template Factor::Factor(const ConditionalType& c) : keys_(c.keys()) { +// assert(c.nrFrontals() == 1); assertInvariants(); } diff --git a/gtsam/inference/Factor.h b/gtsam/inference/Factor.h index c07b90c9c..b4eeaa800 100644 --- a/gtsam/inference/Factor.h +++ b/gtsam/inference/Factor.h @@ -21,10 +21,8 @@ #pragma once -#include #include #include -#include // for noncopyable #include #include #include diff --git a/gtsam/inference/FactorGraph.h b/gtsam/inference/FactorGraph.h index 7cc099d6c..889b1e246 100644 --- a/gtsam/inference/FactorGraph.h +++ b/gtsam/inference/FactorGraph.h @@ -21,14 +21,11 @@ #pragma once -#include #include #include #include -#include #include -#include #include #include @@ -51,7 +48,7 @@ namespace gtsam { /** typedef for elimination result */ typedef std::pair< - typename BayesNet::shared_ptr, + boost::shared_ptr, typename FACTOR::shared_ptr> EliminationResult; /** typedef for an eliminate subroutine */ @@ -88,6 +85,10 @@ namespace gtsam { template void push_back(ITERATOR firstFactor, ITERATOR lastFactor); + /** push back many factors stored in a vector*/ + template + void push_back(const std::vector >& factors); + /** ------------------ Querying Factor Graphs ---------------------------- */ /** print out graph */ @@ -236,5 +237,13 @@ namespace gtsam { this->push_back(*(factor++)); } + /* ************************************************************************* */ + template + template + void FactorGraph::push_back(const std::vector >& factors) { + BOOST_FOREACH(const boost::shared_ptr& factor, factors) + this->push_back(factor); + } + } // namespace gtsam diff --git a/gtsam/inference/GenericMultifrontalSolver.h b/gtsam/inference/GenericMultifrontalSolver.h index 11f550368..e8d4a3023 100644 --- a/gtsam/inference/GenericMultifrontalSolver.h +++ b/gtsam/inference/GenericMultifrontalSolver.h @@ -18,12 +18,11 @@ #pragma once -#include -#include -#include - #include +#include +#include + namespace gtsam { /** diff --git a/gtsam/inference/GenericSequentialSolver.h b/gtsam/inference/GenericSequentialSolver.h index c8cba1113..52da0a96d 100644 --- a/gtsam/inference/GenericSequentialSolver.h +++ b/gtsam/inference/GenericSequentialSolver.h @@ -18,12 +18,10 @@ #pragma once -#include -#include -#include - #include +#include + namespace gtsam { template diff --git a/gtsam/inference/ISAM-inl.h b/gtsam/inference/ISAM-inl.h index 0ab3d8d48..c77ad96fc 100644 --- a/gtsam/inference/ISAM-inl.h +++ b/gtsam/inference/ISAM-inl.h @@ -24,7 +24,7 @@ using namespace boost::assign; #include #include #include -#include +#include namespace gtsam { @@ -35,9 +35,9 @@ namespace gtsam { ISAM::ISAM() : BayesTree() {} /** Create a Bayes Tree from a Bayes Net */ - template - ISAM::ISAM(const BayesNet& bayesNet) : - BayesTree(bayesNet) {} +// template +// ISAM::ISAM(const BayesNet& bayesNet) : +// BayesTree(bayesNet) {} /* ************************************************************************* */ template @@ -53,13 +53,18 @@ namespace gtsam { factors.push_back(newFactors); // eliminate into a Bayes net - GenericSequentialSolver solver(factors); - typename BayesNet::shared_ptr bayesNet = solver.eliminate(function); - - // insert conditionals back in, straight into the topless bayesTree - typename BayesNet::const_reverse_iterator rit; - for ( rit=bayesNet->rbegin(); rit != bayesNet->rend(); ++rit ) - this->insert(*rit); +// GenericSequentialSolver solver(factors); +// typename BayesNet::shared_ptr bayesNet = solver.eliminate(function); +// +// // insert conditionals back in, straight into the topless bayesTree +// typename BayesNet::const_reverse_iterator rit; +// for ( rit=bayesNet->rbegin(); rit != bayesNet->rend(); ++rit ) +// BayesTree::insert(*this,*rit); + GenericMultifrontalSolver > solver(factors); + boost::shared_ptr > bayesTree; + bayesTree = solver.eliminate(function); + this->root_ = bayesTree->root(); + this->nodes_ = bayesTree->nodes(); // add orphans to the bottom of the new tree BOOST_FOREACH(sharedClique orphan, orphans) diff --git a/gtsam/inference/ISAM.h b/gtsam/inference/ISAM.h index e02aee5b9..ca4ec5bc0 100644 --- a/gtsam/inference/ISAM.h +++ b/gtsam/inference/ISAM.h @@ -19,23 +19,14 @@ #pragma once -#include -#include -#include -#include -//#include -//#include -#include - -#include -#include -#include #include namespace gtsam { template class ISAM: public BayesTree { + private: + typedef BayesTree Base; public: @@ -45,6 +36,9 @@ namespace gtsam { /** Create a Bayes Tree from a Bayes Net */ ISAM(const BayesNet& bayesNet); + /** Create a Bayes Tree from a Bayes Tree */ + ISAM(const Base& bayesTree) : Base(bayesTree) {} + typedef typename BayesTree::sharedClique sharedClique; typedef typename BayesTree::Cliques Cliques; diff --git a/gtsam/inference/IndexConditional.h b/gtsam/inference/IndexConditional.h index 747a805f0..a156f2bf0 100644 --- a/gtsam/inference/IndexConditional.h +++ b/gtsam/inference/IndexConditional.h @@ -73,8 +73,19 @@ namespace gtsam { assertInvariants(); } + /** Named constructor directly returning a shared pointer */ + template + static shared_ptr FromKeys(const KEYS& keys, size_t nrFrontals) { + shared_ptr conditional(new IndexConditional()); + conditional->keys_.assign(keys.begin(), keys.end()); + conditional->nrFrontals_ = nrFrontals; + return conditional; + } + /** Convert to a factor */ - IndexFactor::shared_ptr toFactor() const { return IndexFactor::shared_ptr(new IndexFactor(*this)); } + IndexFactor::shared_ptr toFactor() const { + return IndexFactor::shared_ptr(new IndexFactor(*this)); + } /** Permute the variables when only separator variables need to be permuted. * Returns true if any reordered variables appeared in the separator and diff --git a/gtsam/inference/JunctionTree-inl.h b/gtsam/inference/JunctionTree-inl.h index aece84643..a2a613f59 100644 --- a/gtsam/inference/JunctionTree-inl.h +++ b/gtsam/inference/JunctionTree-inl.h @@ -116,8 +116,8 @@ namespace gtsam { if(bayesClique) { // create a new clique in the junction tree - FastList frontals = bayesClique->ordering(); - sharedClique clique(new Clique(frontals.begin(), frontals.end(), bayesClique->separator_.begin(), bayesClique->separator_.end())); + sharedClique clique(new Clique((*bayesClique)->beginFrontals(), (*bayesClique)->endFrontals(), + (*bayesClique)->beginParents(), (*bayesClique)->endParents())); // count the factors for this cluster to pre-allocate space { @@ -173,7 +173,7 @@ namespace gtsam { // come from and go to, create and eliminate the new joint factor. tic(2, "CombineAndEliminate"); pair< - typename BayesNet::shared_ptr, + typename FG::FactorType::ConditionalType::shared_ptr, typename FG::sharedFactor> eliminated(function(fg, current->frontal.size())); toc(2, "CombineAndEliminate"); @@ -182,7 +182,7 @@ namespace gtsam { tic(3, "Update tree"); // create a new clique corresponding the combined factors - typename BayesTree::sharedClique new_clique(new typename BayesTree::Clique(*eliminated.first)); + typename BayesTree::sharedClique new_clique(new typename BayesTree::Clique(eliminated.first)); new_clique->children_ = children; BOOST_FOREACH(typename BayesTree::sharedClique& childRoot, children) { diff --git a/gtsam/inference/JunctionTree.h b/gtsam/inference/JunctionTree.h index 9037de969..f4e591b70 100644 --- a/gtsam/inference/JunctionTree.h +++ b/gtsam/inference/JunctionTree.h @@ -21,8 +21,6 @@ #include #include -#include -#include #include #include #include diff --git a/gtsam/inference/Permutation.h b/gtsam/inference/Permutation.h index d956cf346..46859b718 100644 --- a/gtsam/inference/Permutation.h +++ b/gtsam/inference/Permutation.h @@ -18,12 +18,12 @@ #pragma once -#include - #include #include #include +#include + namespace gtsam { class Inference; diff --git a/gtsam/inference/SymbolicFactorGraph.cpp b/gtsam/inference/SymbolicFactorGraph.cpp index 33bce0e0c..ace4dbade 100644 --- a/gtsam/inference/SymbolicFactorGraph.cpp +++ b/gtsam/inference/SymbolicFactorGraph.cpp @@ -80,7 +80,7 @@ namespace gtsam { } /* ************************************************************************* */ - pair::shared_ptr, IndexFactor::shared_ptr> // + pair // EliminateSymbolic(const FactorGraph& factors, size_t nrFrontals) { FastSet keys; @@ -91,14 +91,10 @@ namespace gtsam { if (keys.size() < 1) throw invalid_argument( "IndexFactor::CombineAndEliminate called on factors with no variables."); - pair::shared_ptr, IndexFactor::shared_ptr> result; - result.first.reset(new BayesNet ()); - FastSet::const_iterator it; - for (it = keys.begin(); result.first->size() < nrFrontals; ++it) { - std::vector newKeys(it,keys.end()); - result.first->push_back(boost::make_shared(newKeys, 1)); - } - result.second.reset(new IndexFactor(it, keys.end())); + pair result; + std::vector newKeys(keys.begin(),keys.end()); + result.first.reset(new IndexConditional(newKeys, nrFrontals)); + result.second.reset(new IndexFactor(newKeys.begin()+nrFrontals, newKeys.end())); return result; } diff --git a/gtsam/inference/SymbolicFactorGraph.h b/gtsam/inference/SymbolicFactorGraph.h index f48aa3ad4..cf0b35c24 100644 --- a/gtsam/inference/SymbolicFactorGraph.h +++ b/gtsam/inference/SymbolicFactorGraph.h @@ -18,7 +18,6 @@ #pragma once -#include #include #include @@ -74,7 +73,7 @@ namespace gtsam { * Combine and eliminate can also be called separately, but for this and * derived classes calling them separately generally does extra work. */ - std::pair::shared_ptr, IndexFactor::shared_ptr> + std::pair EliminateSymbolic(const FactorGraph&, size_t nrFrontals = 1); /* Template function implementation */ diff --git a/gtsam/inference/SymbolicMultifrontalSolver.h b/gtsam/inference/SymbolicMultifrontalSolver.h index 8c8c8abc8..2233cdd03 100644 --- a/gtsam/inference/SymbolicMultifrontalSolver.h +++ b/gtsam/inference/SymbolicMultifrontalSolver.h @@ -20,7 +20,6 @@ #include #include -#include namespace gtsam { diff --git a/gtsam/inference/VariableIndex.cpp b/gtsam/inference/VariableIndex.cpp index 5641f72d2..75194f2b9 100644 --- a/gtsam/inference/VariableIndex.cpp +++ b/gtsam/inference/VariableIndex.cpp @@ -16,6 +16,7 @@ * @created Oct 22, 2010 */ +#include #include namespace gtsam { diff --git a/gtsam/inference/VariableIndex.h b/gtsam/inference/VariableIndex.h index 083073c12..51501a077 100644 --- a/gtsam/inference/VariableIndex.h +++ b/gtsam/inference/VariableIndex.h @@ -18,13 +18,10 @@ #pragma once -#include #include #include #include -#include -#include #include namespace gtsam { diff --git a/gtsam/inference/VariableSlots.h b/gtsam/inference/VariableSlots.h index bddbff61b..abe753156 100644 --- a/gtsam/inference/VariableSlots.h +++ b/gtsam/inference/VariableSlots.h @@ -27,9 +27,8 @@ #include #include -#include -#include #include +#include namespace gtsam { diff --git a/gtsam/inference/inference.h b/gtsam/inference/inference.h index 00b381201..2be90a074 100644 --- a/gtsam/inference/inference.h +++ b/gtsam/inference/inference.h @@ -18,15 +18,12 @@ #pragma once -#include #include -#include #include #include #include -#include #include namespace gtsam { diff --git a/gtsam/inference/tests/testBayesTree.cpp b/gtsam/inference/tests/testBayesTree.cpp index 727d5bfcd..b1151f8d9 100644 --- a/gtsam/inference/tests/testBayesTree.cpp +++ b/gtsam/inference/tests/testBayesTree.cpp @@ -18,6 +18,8 @@ */ #include // for operator += +#include +#include using namespace boost::assign; #include @@ -66,16 +68,20 @@ IndexConditional::shared_ptr T(new IndexConditional(_T_, _E_, _L_)), X(new IndexConditional(_X_, _E_)); +// Cliques +IndexConditional::shared_ptr + ELB(IndexConditional::FromKeys(cref_list_of<3>(_E_)(_L_)(_B_), 3)); + // Bayes Tree for Asia example SymbolicBayesTree createAsiaSymbolicBayesTree() { SymbolicBayesTree bayesTree; // Ordering asiaOrdering; asiaOrdering += _X_, _T_, _S_, _E_, _L_, _B_; - bayesTree.insert(B); - bayesTree.insert(L); - bayesTree.insert(E); - bayesTree.insert(S); - bayesTree.insert(T); - bayesTree.insert(X); + SymbolicBayesTree::insert(bayesTree, B); + SymbolicBayesTree::insert(bayesTree, L); + SymbolicBayesTree::insert(bayesTree, E); + SymbolicBayesTree::insert(bayesTree, S); + SymbolicBayesTree::insert(bayesTree, T); + SymbolicBayesTree::insert(bayesTree, X); return bayesTree; } @@ -89,12 +95,8 @@ TEST( BayesTree, constructor ) LONGS_EQUAL(4,bayesTree.size()); // Check root - BayesNet expected_root; - expected_root.push_back(E); - expected_root.push_back(L); - expected_root.push_back(B); - boost::shared_ptr > actual_root = bayesTree.root(); - CHECK(assert_equal(expected_root,*actual_root)); + boost::shared_ptr actual_root = bayesTree.root()->conditional(); + CHECK(assert_equal(*ELB,*actual_root)); // Create from symbolic Bayes chain in which we want to discover cliques BayesNet ASIA; @@ -154,18 +156,18 @@ TEST( BayesTree, removePath ) F(new IndexConditional(_F_, _E_)); SymbolicBayesTree bayesTree; // Ordering ord; ord += _A_,_B_,_C_,_D_,_E_,_F_; - bayesTree.insert(A); - bayesTree.insert(B); - bayesTree.insert(C); - bayesTree.insert(D); - bayesTree.insert(E); - bayesTree.insert(F); + SymbolicBayesTree::insert(bayesTree, A); + SymbolicBayesTree::insert(bayesTree, B); + SymbolicBayesTree::insert(bayesTree, C); + SymbolicBayesTree::insert(bayesTree, D); + SymbolicBayesTree::insert(bayesTree, E); + SymbolicBayesTree::insert(bayesTree, F); // remove C, expected outcome: factor graph with ABC, // Bayes Tree now contains two orphan trees: D|C and E|B,F|E SymbolicFactorGraph expected; expected.push_factor(_B_,_A_); - expected.push_factor(_A_); +// expected.push_factor(_A_); expected.push_factor(_C_,_A_); SymbolicBayesTree::Cliques expectedOrphans; expectedOrphans += bayesTree[_D_], bayesTree[_E_]; @@ -205,8 +207,8 @@ TEST( BayesTree, removePath2 ) // Check expected outcome SymbolicFactorGraph expected; expected.push_factor(_E_,_L_,_B_); - expected.push_factor(_L_,_B_); - expected.push_factor(_B_); +// expected.push_factor(_L_,_B_); +// expected.push_factor(_B_); CHECK(assert_equal(expected, factors)); SymbolicBayesTree::Cliques expectedOrphans; expectedOrphans += bayesTree[_S_], bayesTree[_T_], bayesTree[_X_]; @@ -227,8 +229,8 @@ TEST( BayesTree, removePath3 ) // Check expected outcome SymbolicFactorGraph expected; expected.push_factor(_E_,_L_,_B_); - expected.push_factor(_L_,_B_); - expected.push_factor(_B_); +// expected.push_factor(_L_,_B_); +// expected.push_factor(_B_); expected.push_factor(_S_,_L_,_B_); CHECK(assert_equal(expected, factors)); SymbolicBayesTree::Cliques expectedOrphans; @@ -254,8 +256,8 @@ TEST( BayesTree, removeTop ) // Check expected outcome SymbolicFactorGraph expected; expected.push_factor(_E_,_L_,_B_); - expected.push_factor(_L_,_B_); - expected.push_factor(_B_); +// expected.push_factor(_L_,_B_); +// expected.push_factor(_B_); expected.push_factor(_S_,_L_,_B_); CHECK(assert_equal(expected, factors)); SymbolicBayesTree::Cliques expectedOrphans; @@ -295,8 +297,8 @@ TEST( BayesTree, removeTop2 ) // Check expected outcome SymbolicFactorGraph expected; expected.push_factor(_E_,_L_,_B_); - expected.push_factor(_L_,_B_); - expected.push_factor(_B_); +// expected.push_factor(_L_,_B_); +// expected.push_factor(_B_); expected.push_factor(_S_,_L_,_B_); CHECK(assert_equal(expected, factors)); SymbolicBayesTree::Cliques expectedOrphans; @@ -318,10 +320,10 @@ TEST( BayesTree, removeTop3 ) // Ordering newOrdering; // newOrdering += _x3_, _x2_, _x1_, _l2_, _l1_, _x4_, _l5_; SymbolicBayesTree bayesTree; - bayesTree.insert(X); - bayesTree.insert(A); - bayesTree.insert(B); - bayesTree.insert(C); + SymbolicBayesTree::insert(bayesTree, X); + SymbolicBayesTree::insert(bayesTree, A); + SymbolicBayesTree::insert(bayesTree, B); + SymbolicBayesTree::insert(bayesTree, C); // remove all list keys; diff --git a/gtsam/inference/tests/testSymbolicFactor.cpp b/gtsam/inference/tests/testSymbolicFactor.cpp index 1be53aee0..23ccc24e4 100644 --- a/gtsam/inference/tests/testSymbolicFactor.cpp +++ b/gtsam/inference/tests/testSymbolicFactor.cpp @@ -71,28 +71,38 @@ TEST(SymbolicFactor, EliminateSymbolic) { factors.push_factor(1,2,5); factors.push_factor(0,3); - IndexFactor expected_factor(4,5,6); - BayesNet expected_bn; - vector parents; + IndexFactor expectedFactor(4,5,6); + std::vector keys; keys += 0,1,2,3,4,5,6; + IndexConditional::shared_ptr expectedConditional(new IndexConditional(keys, 4)); - parents.clear(); parents += 1,2,3,4,5,6; - expected_bn.push_back(IndexConditional::shared_ptr(new IndexConditional(0, parents))); + IndexFactor::shared_ptr actualFactor; + IndexConditional::shared_ptr actualConditional; + boost::tie(actualConditional, actualFactor) = EliminateSymbolic(factors, 4); - parents.clear(); parents += 2,3,4,5,6; - expected_bn.push_back(IndexConditional::shared_ptr(new IndexConditional(1, parents))); + CHECK(assert_equal(*expectedConditional, *actualConditional)); + CHECK(assert_equal(expectedFactor, *actualFactor)); - parents.clear(); parents += 3,4,5,6; - expected_bn.push_back(IndexConditional::shared_ptr(new IndexConditional(2, parents))); - - parents.clear(); parents += 4,5,6; - expected_bn.push_back(IndexConditional::shared_ptr(new IndexConditional(3, parents))); - - BayesNet::shared_ptr actual_bn; - IndexFactor::shared_ptr actual_factor; - boost::tie(actual_bn, actual_factor) = EliminateSymbolic(factors, 4); - - CHECK(assert_equal(expected_bn, *actual_bn)); - CHECK(assert_equal(expected_factor, *actual_factor)); +// BayesNet expected_bn; +// vector parents; +// +// parents.clear(); parents += 1,2,3,4,5,6; +// expected_bn.push_back(IndexConditional::shared_ptr(new IndexConditional(0, parents))); +// +// parents.clear(); parents += 2,3,4,5,6; +// expected_bn.push_back(IndexConditional::shared_ptr(new IndexConditional(1, parents))); +// +// parents.clear(); parents += 3,4,5,6; +// expected_bn.push_back(IndexConditional::shared_ptr(new IndexConditional(2, parents))); +// +// parents.clear(); parents += 4,5,6; +// expected_bn.push_back(IndexConditional::shared_ptr(new IndexConditional(3, parents))); +// +// BayesNet::shared_ptr actual_bn; +// IndexFactor::shared_ptr actual_factor; +// boost::tie(actual_bn, actual_factor) = EliminateSymbolic(factors, 4); +// +// CHECK(assert_equal(expected_bn, *actual_bn)); +// CHECK(assert_equal(expected_factor, *actual_factor)); } /* ************************************************************************* */ diff --git a/gtsam/linear/Errors.h b/gtsam/linear/Errors.h index 76b756b00..e7098d8bf 100644 --- a/gtsam/linear/Errors.h +++ b/gtsam/linear/Errors.h @@ -19,10 +19,6 @@ #pragma once -#include - -#include -#include #include namespace gtsam { diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index b1403dffc..9d261a003 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -73,28 +73,27 @@ boost::shared_ptr allocateVectorValues(const GaussianBayesNet& bn) vector dimensions(bn.size()); Index var = 0; BOOST_FOREACH(const boost::shared_ptr conditional, bn) { - dimensions[var++] = conditional->get_R().rows(); + dimensions[var++] = conditional->dim(); } return boost::shared_ptr(new VectorValues(dimensions)); } /* ************************************************************************* */ -VectorValues optimize(const GaussianBayesNet& bn) -{ +VectorValues optimize(const GaussianBayesNet& bn) { return *optimize_(bn); } /* ************************************************************************* */ boost::shared_ptr optimize_(const GaussianBayesNet& bn) { - boost::shared_ptr result(allocateVectorValues(bn)); + // get the RHS as a VectorValues to initialize system + boost::shared_ptr result(new VectorValues(rhs(bn))); /** solve each node in turn in topological sort order (parents first)*/ - BOOST_REVERSE_FOREACH(GaussianConditional::shared_ptr cg, bn) { - Vector x = cg->solve(*result); // Solve for that variable - (*result)[cg->key()] = x; // store result in partial solution - } - return result; + BOOST_REVERSE_FOREACH(GaussianConditional::shared_ptr cg, bn) + cg->solveInPlace(*result); // solve and store solution in same step + + return result; } /* ************************************************************************* */ @@ -112,13 +111,7 @@ void backSubstituteInPlace(const GaussianBayesNet& bn, VectorValues& y) { BOOST_REVERSE_FOREACH(const boost::shared_ptr cg, bn) { // i^th part of R*x=y, x=inv(R)*y // (Rii*xi + R_i*x(i+1:))./si = yi <-> xi = inv(Rii)*(yi.*si - R_i*x(i+1:)) - Index i = cg->key(); - Vector zi = emul(y[i],cg->get_sigmas()); - GaussianConditional::const_iterator it; - for (it = cg->beginParents(); it!= cg->endParents(); it++) { - multiplyAdd(-1.0,cg->get_S(it),x[*it],zi); - } - x[i] = gtsam::backSubstituteUpper(cg->get_R(), zi); + cg->solveInPlace(x); } } @@ -135,21 +128,12 @@ VectorValues backSubstituteTranspose(const GaussianBayesNet& bn, // we loop from first-eliminated to last-eliminated // i^th part of L*gy=gx is done block-column by block-column of L - BOOST_FOREACH(const boost::shared_ptr cg, bn) { - Index j = cg->key(); - gy[j] = gtsam::backSubstituteUpper(gy[j],Matrix(cg->get_R())); - GaussianConditional::const_iterator it; - for (it = cg->beginParents(); it!= cg->endParents(); it++) { - const Index i = *it; - transposeMultiplyAdd(-1.0,cg->get_S(it),gy[j],gy[i]); - } - } + BOOST_FOREACH(const boost::shared_ptr cg, bn) + cg->solveTransposeInPlace(gy); // Scale gy - BOOST_FOREACH(GaussianConditional::shared_ptr cg, bn) { - Index j = cg->key(); - gy[j] = emul(gy[j],cg->get_sigmas()); - } + BOOST_FOREACH(GaussianConditional::shared_ptr cg, bn) + cg->scaleFrontalsBySigma(gy); return gy; } @@ -161,8 +145,11 @@ pair matrix(const GaussianBayesNet& bn) { // and at the same time create a mapping from keys to indices size_t N=0; map mapping; BOOST_FOREACH(GaussianConditional::shared_ptr cg,bn) { - mapping.insert(make_pair(cg->key(),N)); - N += cg->dim(); + GaussianConditional::const_iterator it = cg->beginFrontals(); + for (; it != cg->endFrontals(); ++it) { + mapping.insert(make_pair(*it,N)); + N += cg->dim(it); + } } // create matrix and copy in values @@ -172,7 +159,7 @@ pair matrix(const GaussianBayesNet& bn) { FOREACH_PAIR(key,I,mapping) { // find corresponding conditional boost::shared_ptr cg = bn[key]; - + // get sigmas Vector sigmas = cg->get_sigmas(); @@ -207,15 +194,8 @@ pair matrix(const GaussianBayesNet& bn) { /* ************************************************************************* */ VectorValues rhs(const GaussianBayesNet& bn) { boost::shared_ptr result(allocateVectorValues(bn)); - BOOST_FOREACH(boost::shared_ptr cg,bn) { - Index key = cg->key(); - // get sigmas - const Vector& sigmas = cg->get_sigmas(); - - // get RHS and copy to d - GaussianConditional::const_d_type d = cg->get_d(); - (*result)[key] = ediv_(d,sigmas); // TODO ediv_? I think not - } + BOOST_FOREACH(boost::shared_ptr cg,bn) + cg->rhs(*result); return *result; } diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index 6ceb9b360..e769695ca 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -20,10 +20,6 @@ #pragma once -#include - -#include - #include #include #include @@ -71,11 +67,12 @@ namespace gtsam { /* * Backsubstitute * (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas) + * @param y is the RHS of the system */ VectorValues backSubstitute(const GaussianBayesNet& bn, const VectorValues& y); /* - * Backsubstitute in place, y is replaced with solution + * Backsubstitute in place, y starts as RHS and is replaced with solution */ void backSubstituteInPlace(const GaussianBayesNet& bn, VectorValues& y); @@ -89,6 +86,8 @@ namespace gtsam { /** * Return (dense) upper-triangular matrix representation + * NOTE: if this is the result of elimination with LDL, the matrix will + * not necessarily be upper triangular due to column permutations */ std::pair matrix(const GaussianBayesNet&); diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 1c1fe39ec..25693e6af 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -39,7 +39,7 @@ GaussianConditional::GaussianConditional(Index key,const Vector& d, const Matrix assert(R.rows() <= R.cols()); size_t dims[] = { R.cols(), 1 }; rsd_.copyStructureFrom(rsd_type(matrix_, dims, dims+2, d.size())); - rsd_(0) = R; //.triangularView(); + rsd_(0) = R.triangularView(); get_d_() = d; } @@ -93,9 +93,13 @@ GaussianConditional::GaussianConditional(Index key, const Vector& d, const Matri /* ************************************************************************* */ void GaussianConditional::print(const string &s) const { - cout << s << ": density on " << key() << endl; + cout << s << ": density on "; + for(const_iterator it = beginFrontals(); it != endFrontals(); ++it) { + cout << (boost::format("[%1%]")%(*it)).str() << " "; + } + cout << endl; gtsam::print(Matrix(get_R()),"R"); - for(const_iterator it = beginParents() ; it != endParents() ; it++ ) { + for(const_iterator it = beginParents() ; it != endParents() ; ++it ) { gtsam::print(Matrix(get_S(it)), (boost::format("A[%1%]")%(*it)).str()); } gtsam::print(Vector(get_d()),"d"); @@ -135,35 +139,99 @@ bool GaussianConditional::equals(const GaussianConditional &c, double tol) const return true; } +/* ************************************************************************* */ +GaussianConditional::rsd_type::constBlock GaussianConditional::get_R() const { + return rsd_.range(0, nrFrontals()); +} + /* ************************************************************************* */ JacobianFactor::shared_ptr GaussianConditional::toFactor() const { return JacobianFactor::shared_ptr(new JacobianFactor(*this)); } /* ************************************************************************* */ -Vector GaussianConditional::solve(const VectorValues& x) const { - static const bool debug = false; - if(debug) print("Solving conditional "); - Vector rhs(get_d()); - for (const_iterator parent = beginParents(); parent != endParents(); ++parent) { - rhs += -get_S(parent) * x[*parent]; - } - if(debug) gtsam::print(Matrix(get_R()), "Calling backSubstituteUpper on "); - if(debug) gtsam::print(rhs, "rhs: "); - if(debug) { - Vector soln = backSubstituteUpper(get_R(), rhs, false); - gtsam::print(soln, "back-substitution solution: "); - return soln; - } else - return backSubstituteUpper(get_R(), rhs, false); +void GaussianConditional::rhs(VectorValues& x) const { + Vector d = rhs(); + x.range(beginFrontals(), endFrontals(), d); } /* ************************************************************************* */ -Vector GaussianConditional::solve(const Permuted& x) const { - Vector rhs(get_d()); - for (const_iterator parent = beginParents(); parent != endParents(); ++parent) - rhs += -get_S(parent) * x[*parent]; - return backSubstituteUpper(get_R(), rhs, false); +void GaussianConditional::solveInPlace(VectorValues& x) const { + static const bool debug = false; + if(debug) print("Solving conditional in place"); +// Vector rhs(get_d()); + Vector rhs = x.range(beginFrontals(), endFrontals()); + for (const_iterator parent = beginParents(); parent != endParents(); ++parent) { + rhs += -get_S(parent) * x[*parent]; + } + Vector soln = permutation_.transpose()*backSubstituteUpper(get_R(), rhs, false); + if(debug) { + gtsam::print(Matrix(get_R()), "Calling backSubstituteUpper on "); + gtsam::print(rhs, "rhs: "); + gtsam::print(soln, "full back-substitution solution: "); + } + x.range(beginFrontals(), endFrontals(), soln); +} + +/* ************************************************************************* */ +void GaussianConditional::solveInPlace(Permuted& x) const { + static const bool debug = false; + if(debug) print("Solving conditional in place (permuted)"); +// Vector rhs(get_d()); + // Extract RHS from values - inlined from VectorValues + size_t s = 0; + for (const_iterator it=beginFrontals(); it!=endFrontals(); ++it) + s += x[*it].size(); + Vector rhs(s); size_t start = 0; + for (const_iterator it=beginFrontals(); it!=endFrontals(); ++it) { + SubVector v = x[*it]; + const size_t d = v.size(); + rhs.segment(start, d) = v; + start += d; + } + + // apply parents to rhs + for (const_iterator parent = beginParents(); parent != endParents(); ++parent) { + rhs += -get_S(parent) * x[*parent]; + } + + // solve system + Vector soln = permutation_.transpose()*backSubstituteUpper(get_R(), rhs, false); + + // apply solution: inlined manually due to permutation + size_t solnStart = 0; + for (const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) { + const size_t d = dim(frontal); + x[*frontal] = soln.segment(solnStart, d); + solnStart += d; + } +} + +/* ************************************************************************* */ +VectorValues GaussianConditional::solve(const VectorValues& x) const { + VectorValues result = x; + solveInPlace(result); + return result; +} + +/* ************************************************************************* */ +void GaussianConditional::solveTransposeInPlace(VectorValues& gy) const { + Vector frontalVec = gy.range(beginFrontals(), endFrontals()); + // TODO: verify permutation + frontalVec = permutation_ * gtsam::backSubstituteUpper(frontalVec,Matrix(get_R())); + GaussianConditional::const_iterator it; + for (it = beginParents(); it!= endParents(); it++) { + const Index i = *it; + transposeMultiplyAdd(-1.0,get_S(it),frontalVec,gy[i]); + } + gy.range(beginFrontals(), endFrontals(), frontalVec); +} + +/* ************************************************************************* */ +void GaussianConditional::scaleFrontalsBySigma(VectorValues& gy) const { + Vector frontalVec = gy.range(beginFrontals(), endFrontals()); + frontalVec = emul(frontalVec, get_sigmas()); + gy.range(beginFrontals(), endFrontals(), frontalVec); } } diff --git a/gtsam/linear/GaussianConditional.h b/gtsam/linear/GaussianConditional.h index 9b5e56287..b356402ea 100644 --- a/gtsam/linear/GaussianConditional.h +++ b/gtsam/linear/GaussianConditional.h @@ -19,20 +19,22 @@ #pragma once -#include -#include #include -#include #include #include #include #include -#include #include +// Forward declaration to friend unit tests +class eliminate2GaussianFactorTest; +class constructorGaussianConditionalTest; +class eliminationGaussianFactorGraphTest; + namespace gtsam { +// Forward declarations class GaussianFactor; /** @@ -55,16 +57,28 @@ public: typedef rsd_type::Column d_type; typedef rsd_type::constColumn const_d_type; + typedef Eigen::LDLT::TranspositionType TranspositionType; + protected: /** Store the conditional as one big upper-triangular wide matrix, arranged - * as [ R S1 S2 ... d ]. Access these blocks using a VerticalBlockView. */ + * as [ R S1 S2 ... d ]. Access these blocks using a VerticalBlockView. + * + * WARNING!!! When using with LDL, R is the permuted upper triangular matrix. + * Its columns/rows do not correspond to the correct components of the variables. + * Use R*permutation_ to get back the correct non-permuted order, + * for example when converting to the Jacobian + * */ AbMatrix matrix_; rsd_type rsd_; /** vector of standard deviations */ Vector sigmas_; + /** Store the permutation matrix, used by LDL' in the pivoting process + * This is used to get back the correct ordering of x after solving by backSubstitition */ + TranspositionType permutation_; + public: /** default constructor needed for serialization */ @@ -102,7 +116,9 @@ public: * for multiple frontal variables. */ template - GaussianConditional(ITERATOR firstKey, ITERATOR lastKey, size_t nrFrontals, const VerticalBlockView& matrices, const Vector& sigmas); + GaussianConditional(ITERATOR firstKey, ITERATOR lastKey, size_t nrFrontals, + const VerticalBlockView& matrices, const Vector& sigmas, + const TranspositionType& permutation = TranspositionType()); /** print */ void print(const std::string& = "GaussianConditional") const; @@ -113,14 +129,37 @@ public: /** dimension of multivariate variable */ size_t dim() const { return rsd_.rows(); } + /** Compute the information matrix */ + Matrix computeInformation() const { + Matrix R = get_R(); + R = R*permutation_; + return R.transpose()*R; + } + /** return stuff contained in GaussianConditional */ - const_d_type get_d() const { return rsd_.column(1+nrParents(), 0); } - rsd_type::constBlock get_R() const { return rsd_(0); } + rsd_type::constBlock get_R() const; + + /** access the d vector */ + const_d_type get_d() const { return rsd_.column(nrFrontals()+nrParents(), 0); } + + /** + * get a RHS as a single vector + * FIXME: shouldn't this be weighted by sigmas? + */ + Vector rhs() const { return get_d(); } + + /** get the dimension of a variable */ + size_t dim(const_iterator variable) const { return rsd_(variable - this->begin()).cols(); } + rsd_type::constBlock get_S(const_iterator variable) const { return rsd_(variable - this->begin()); } const Vector& get_sigmas() const {return sigmas_;} - const AbMatrix& matrix() const { return matrix_; } - const rsd_type& rsd() const { return rsd_; } /// DEBUGGING ONLY +protected: + + const AbMatrix& matrix() const { return matrix_; } + const rsd_type& rsd() const { return rsd_; } + +public: /** * Copy to a Factor (this creates a JacobianFactor and returns it as its * base class GaussianFactor. @@ -128,25 +167,57 @@ public: boost::shared_ptr toFactor() const; /** - * solve a conditional Gaussian - * @param x values structure in which the parents values (y,z,...) are known - * @return solution x = R \ (d - Sy - Tz - ...) + * Adds the RHS to a given VectorValues for use in solve() functions. + * @param x is the values to be updated, assumed allocated */ - Vector solve(const VectorValues& x) const; + void rhs(VectorValues& x) const; /** - * solve a conditional Gaussian - * @param x values structure in which the parents values (y,z,...) are known - * @return solution x = R \ (d - Sy - Tz - ...) + * solves a conditional Gaussian and stores the result in x + * This function works for multiple frontal variables. + * NOTE: assumes that the RHS for the frontals is stored in x, and + * then replaces the RHS with the partial result for this conditional, + * assuming that parents have been solved already. + * + * @param x values structure with solved parents, and the RHS for this conditional + * @return solution x = R \ (d - Sy - Tz - ...) for each frontal variable */ - Vector solve(const Permuted& x) const; + void solveInPlace(VectorValues& x) const; + + /** + * solves a conditional Gaussian and stores the result in x + * Identical to solveInPlace() above, with a permuted x + */ + void solveInPlace(Permuted& x) const; + + /** + * Solves a conditional Gaussian and returns a new VectorValues + * This function works for multiple frontal variables, but should + * only be used for testing as it copies the input vector values + * + * Assumes, as in solveInPlace, that the RHS has been stored in x + * for all frontal variables + */ + VectorValues solve(const VectorValues& x) const; + + // functions for transpose backsubstitution + + /** + * Performs backsubstition in place on values + */ + void solveTransposeInPlace(VectorValues& gy) const; + void scaleFrontalsBySigma(VectorValues& gy) const; protected: rsd_type::Column get_d_() { return rsd_.column(1+nrParents(), 0); } rsd_type::Block get_R_() { return rsd_(0); } rsd_type::Block get_S_(iterator variable) { return rsd_(variable - this->begin()); } + // Friends friend class JacobianFactor; + friend class ::eliminate2GaussianFactorTest; + friend class ::constructorGaussianConditionalTest; + friend class ::eliminationGaussianFactorGraphTest; private: /** Serialization function */ @@ -164,9 +235,9 @@ private: template GaussianConditional::GaussianConditional(ITERATOR firstKey, ITERATOR lastKey, size_t nrFrontals, const VerticalBlockView& matrices, - const Vector& sigmas) : + const Vector& sigmas, const GaussianConditional::TranspositionType& permutation) : IndexConditional(std::vector(firstKey, lastKey), nrFrontals), rsd_( - matrix_), sigmas_(sigmas) { + matrix_), sigmas_(sigmas), permutation_(permutation) { rsd_.assignNoalias(matrices); } diff --git a/gtsam/linear/GaussianFactorGraph.cpp b/gtsam/linear/GaussianFactorGraph.cpp index c260e012e..d7761b761 100644 --- a/gtsam/linear/GaussianFactorGraph.cpp +++ b/gtsam/linear/GaussianFactorGraph.cpp @@ -277,7 +277,7 @@ namespace gtsam { CombineJacobians(factors, VariableSlots(factors)); toc(1, "Combine"); tic(2, "eliminate"); - GaussianBayesNet::shared_ptr gbn(jointFactor->eliminate(nrFrontals)); + GaussianConditional::shared_ptr gbn = jointFactor->eliminate(nrFrontals); toc(2, "eliminate"); return make_pair(gbn, jointFactor); } @@ -287,7 +287,7 @@ namespace gtsam { FastMap findScatterAndDims (const FactorGraph& factors) { - static const bool debug = false; + const bool debug = ISDEBUG("findScatterAndDims"); // The "scatter" is a map from global variable indices to slot indices in the // union of involved variables. We also include the dimensionality of the @@ -352,7 +352,7 @@ namespace gtsam { // Extract conditionals and fill in details of the remaining factor tic(5, "split"); - GaussianBayesNet::shared_ptr conditionals = + GaussianConditional::shared_ptr conditionals = combinedFactor->splitEliminatedFactor(nrFrontals, keys); if (debug) { conditionals->print("Extracted conditionals: "); @@ -413,12 +413,12 @@ namespace gtsam { toc(1, "convert to Jacobian"); tic(2, "Jacobian EliminateGaussian"); - GaussianBayesNet::shared_ptr bn; + GaussianConditional::shared_ptr conditionals; GaussianFactor::shared_ptr factor; - boost::tie(bn, factor) = EliminateJacobians(jacobians, nrFrontals); + boost::tie(conditionals, factor) = EliminateJacobians(jacobians, nrFrontals); toc(2, "Jacobian EliminateGaussian"); - return make_pair(bn, factor); + return make_pair(conditionals, factor); } // EliminateQR /* ************************************************************************* */ @@ -501,4 +501,146 @@ namespace gtsam { } // EliminatePreferCholesky + + /* ************************************************************************* */ + GaussianFactorGraph::EliminationResult EliminateLDL( + const FactorGraph& factors, size_t nrFrontals) { + const bool debug = ISDEBUG("EliminateLDL"); + + // Find the scatter and variable dimensions + tic(1, "find scatter"); + Scatter scatter(findScatterAndDims(factors)); + toc(1, "find scatter"); + + // Pull out keys and dimensions + tic(2, "keys"); + vector keys(scatter.size()); + vector dimensions(scatter.size() + 1); + BOOST_FOREACH(const Scatter::value_type& var_slot, scatter) { + keys[var_slot.second.slot] = var_slot.first; + dimensions[var_slot.second.slot] = var_slot.second.dimension; + } + // This is for the r.h.s. vector + dimensions.back() = 1; + toc(2, "keys"); + + // Form Ab' * Ab + tic(3, "combine"); + + if (debug) { + // print out everything before combine + factors.print("Factors to be combined into hessian"); + cout << "Dimensions (" << dimensions.size() << "): "; + BOOST_FOREACH(size_t d, dimensions) cout << d << " "; + cout << "\nScatter:" << endl; + BOOST_FOREACH(const Scatter::value_type& p, scatter) + cout << " Index: " << p.first << ", " << p.second.toString() << endl; + } + + + HessianFactor::shared_ptr // + combinedFactor(new HessianFactor(factors, dimensions, scatter)); + toc(3, "combine"); + + // Do LDL, note that after this, the lower triangle still contains + // some untouched non-zeros that should be zero. We zero them while + // extracting submatrices next. + tic(4, "partial LDL"); + Eigen::LDLT::TranspositionType permutation = combinedFactor->partialLDL(nrFrontals); + toc(4, "partial LDL"); + + // Extract conditionals and fill in details of the remaining factor + tic(5, "split"); + GaussianConditional::shared_ptr conditionals = + combinedFactor->splitEliminatedFactor(nrFrontals, keys, permutation); + if (debug) { + conditionals->print("Extracted conditionals: "); + combinedFactor->print("Eliminated factor (L piece): "); + } + toc(5, "split"); + + combinedFactor->assertInvariants(); + return make_pair(conditionals, combinedFactor); + } + + /* ************************************************************************* */ + GaussianFactorGraph::EliminationResult EliminatePreferLDL( + const FactorGraph& factors, size_t nrFrontals) { + + typedef JacobianFactor J; + typedef HessianFactor H; + + // If any JacobianFactors have constrained noise models, we have to convert + // all factors to JacobianFactors. Otherwise, we can convert all factors + // to HessianFactors. This is because QR can handle constrained noise + // models but LDL cannot. + + // Decide whether to use QR or LDL + // Check if any JacobianFactors have constrained noise models. + bool useQR = false; + BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) { + J::shared_ptr jacobian(boost::dynamic_pointer_cast(factor)); + if (jacobian && jacobian->get_model()->isConstrained()) { + useQR = true; + break; + } + } + + // Convert all factors to the appropriate type + // and call the type-specific EliminateGaussian. + if (useQR) return EliminateQR(factors, nrFrontals); + + GaussianFactorGraph::EliminationResult ret; + try { + tic(2, "EliminateLDL"); + ret = EliminateLDL(factors, nrFrontals); + toc(2, "EliminateLDL"); + } catch (const exception& e) { + cout << "Exception in EliminateLDL: " << e.what() << endl; + SETDEBUG("EliminateLDL", true); + SETDEBUG("updateATA", true); + SETDEBUG("JacobianFactor::eliminate", true); + SETDEBUG("JacobianFactor::Combine", true); + SETDEBUG("ldlPartial", true); + SETDEBUG("findScatterAndDims", true); + factors.print("Combining factors: "); + EliminateLDL(factors, nrFrontals); + throw; + } + + const bool checkLDL = ISDEBUG("EliminateGaussian Check LDL"); + if (checkLDL) { + GaussianFactorGraph::EliminationResult expected; + FactorGraph jacobians = convertToJacobians(factors); + try { + // Compare with QR + expected = EliminateJacobians(jacobians, nrFrontals); + } catch (...) { + cout << "Exception in QR" << endl; + throw; + } + + H actual_factor(*ret.second); + H expected_factor(*expected.second); + if (!assert_equal(*expected.first, *ret.first, 100.0) || !assert_equal( + expected_factor, actual_factor, 1.0)) { + cout << "LDL and QR do not agree" << endl; + + SETDEBUG("EliminateLDL", true); + SETDEBUG("updateATA", true); + SETDEBUG("JacobianFactor::eliminate", true); + SETDEBUG("JacobianFactor::Combine", true); + jacobians.print("Jacobian Factors: "); + EliminateJacobians(jacobians, nrFrontals); + EliminateLDL(factors, nrFrontals); + factors.print("Combining factors: "); + + throw runtime_error("LDL and QR do not agree"); + } + } + + return ret; + + } // EliminatePreferLDL + } // namespace gtsam diff --git a/gtsam/linear/GaussianFactorGraph.h b/gtsam/linear/GaussianFactorGraph.h index 9ab83def1..1dcd8bc94 100644 --- a/gtsam/linear/GaussianFactorGraph.h +++ b/gtsam/linear/GaussianFactorGraph.h @@ -21,11 +21,9 @@ #pragma once -#include #include #include -#include #include #include #include @@ -192,8 +190,8 @@ namespace gtsam { GaussianFactorGraph::EliminationResult EliminateJacobians(const FactorGraph< JacobianFactor>& factors, size_t nrFrontals = 1); - GaussianFactorGraph::EliminationResult EliminateHessians(const FactorGraph< - HessianFactor>& factors, size_t nrFrontals = 1); +// GaussianFactorGraph::EliminationResult EliminateHessians(const FactorGraph< +// HessianFactor>& factors, size_t nrFrontals = 1); GaussianFactorGraph::EliminationResult EliminateQR(const FactorGraph< GaussianFactor>& factors, size_t nrFrontals = 1); @@ -204,4 +202,10 @@ namespace gtsam { GaussianFactorGraph::EliminationResult EliminateCholesky(const FactorGraph< GaussianFactor>& factors, size_t nrFrontals = 1); + GaussianFactorGraph::EliminationResult EliminatePreferLDL(const FactorGraph< + GaussianFactor>& factors, size_t nrFrontals = 1); + + GaussianFactorGraph::EliminationResult EliminateLDL(const FactorGraph< + GaussianFactor>& factors, size_t nrFrontals = 1); + } // namespace gtsam diff --git a/gtsam/linear/GaussianISAM.cpp b/gtsam/linear/GaussianISAM.cpp index a7911f2cb..c20592441 100644 --- a/gtsam/linear/GaussianISAM.cpp +++ b/gtsam/linear/GaussianISAM.cpp @@ -37,10 +37,9 @@ BayesNet::shared_ptr GaussianISAM::marginalBayesNet(Index j } /* ************************************************************************* */ -std::pair GaussianISAM::marginal(Index j) const { +Matrix GaussianISAM::marginalCovariance(Index j) const { GaussianConditional::shared_ptr conditional = marginalBayesNet(j)->front(); - Matrix R = conditional->get_R(); - return make_pair(conditional->get_d(), (R.transpose() * R).inverse()); + return conditional->computeInformation().inverse(); } /* ************************************************************************* */ @@ -52,20 +51,30 @@ std::pair GaussianISAM::marginal(Index j) const { /* ************************************************************************* */ void optimize(const GaussianISAM::sharedClique& clique, VectorValues& result) { // parents are assumed to already be solved and available in result - GaussianISAM::Clique::const_reverse_iterator it; - for (it = clique->rbegin(); it!=clique->rend(); it++) { - GaussianConditional::shared_ptr cg = *it; - Vector x = cg->solve(result); // Solve for that variable - result[cg->key()] = x; // store result in partial solution - } - BOOST_FOREACH(const GaussianISAM::sharedClique& child, clique->children_) { + // RHS for current conditional should already be in place in result + clique->conditional()->solveInPlace(result); + + BOOST_FOREACH(const GaussianISAM::sharedClique& child, clique->children_) optimize(child, result); - } +} + +/* ************************************************************************* */ +void GaussianISAM::treeRHS(const GaussianISAM::sharedClique& clique, VectorValues& result) { + clique->conditional()->rhs(result); + BOOST_FOREACH(const GaussianISAM::sharedClique& child, clique->children_) + treeRHS(child, result); +} + +/* ************************************************************************* */ +VectorValues GaussianISAM::rhs(const GaussianISAM& bayesTree) { + VectorValues result(bayesTree.dims_); // allocate + treeRHS(bayesTree.root(), result); // recursively fill + return result; } /* ************************************************************************* */ VectorValues optimize(const GaussianISAM& bayesTree) { - VectorValues result(bayesTree.dims_); + VectorValues result = GaussianISAM::rhs(bayesTree); // starting from the root, call optimize on each conditional optimize(bayesTree.root(), result); return result; @@ -77,4 +86,4 @@ BayesNet GaussianISAM::shortcut(sharedClique clique, shared } /* ************************************************************************* */ -} /// namespace gtsam +} // \namespace gtsam diff --git a/gtsam/linear/GaussianISAM.h b/gtsam/linear/GaussianISAM.h index ddb4255bf..22923dd4e 100644 --- a/gtsam/linear/GaussianISAM.h +++ b/gtsam/linear/GaussianISAM.h @@ -20,8 +20,8 @@ #pragma once #include -#include #include +#include namespace gtsam { @@ -36,7 +36,10 @@ public: GaussianISAM() : Super() {} /** Create a Bayes Tree from a Bayes Net */ - GaussianISAM(const GaussianBayesNet& bayesNet) : Super(bayesNet) {} +// GaussianISAM(const GaussianBayesNet& bayesNet) : Super(bayesNet) {} + GaussianISAM(const BayesTree& bayesTree) : Super(bayesTree) { + GaussianJunctionTree::countDims(bayesTree, dims_); + } /** Override update_internal to also keep track of variable dimensions. */ template @@ -73,19 +76,28 @@ public: /** return marginal on any variable as a factor, Bayes net, or mean/cov */ GaussianFactor::shared_ptr marginalFactor(Index j) const; BayesNet::shared_ptr marginalBayesNet(Index key) const; - std::pair marginal(Index key) const; + Matrix marginalCovariance(Index key) const; /** return joint between two variables, as a Bayes net */ BayesNet::shared_ptr jointBayesNet(Index key1, Index key2) const; /** return the conditional P(S|Root) on the separator given the root */ static BayesNet shortcut(sharedClique clique, sharedClique root); -}; - // recursively optimize this conditional and all subtrees - void optimize(const GaussianISAM::sharedClique& clique, VectorValues& result); + /** load a VectorValues with the RHS of the system for backsubstitution */ + static VectorValues rhs(const GaussianISAM& bayesTree); - // optimize the BayesTree, starting from the root - VectorValues optimize(const GaussianISAM& bayesTree); +protected: -}/// namespace gtsam + /** recursively load RHS for system */ + static void treeRHS(const GaussianISAM::sharedClique& clique, VectorValues& result); + +}; // \class GaussianISAM + +// recursively optimize this conditional and all subtrees +void optimize(const GaussianISAM::sharedClique& clique, VectorValues& result); + +// optimize the BayesTree, starting from the root +VectorValues optimize(const GaussianISAM& bayesTree); + +} // \namespace gtsam diff --git a/gtsam/linear/GaussianJunctionTree.cpp b/gtsam/linear/GaussianJunctionTree.cpp index ab23470a6..78dcaf8e5 100644 --- a/gtsam/linear/GaussianJunctionTree.cpp +++ b/gtsam/linear/GaussianJunctionTree.cpp @@ -36,11 +36,15 @@ namespace gtsam { /* ************************************************************************* */ void GaussianJunctionTree::btreeBackSubstitute(const boost::shared_ptr& current, VectorValues& config) const { // solve the bayes net in the current node - GaussianBayesNet::const_reverse_iterator it = current->rbegin(); - for (; it!=current->rend(); ++it) { - Vector x = (*it)->solve(config); // Solve for that variable - config[(*it)->key()] = x; // store result in partial solution - } + current->conditional()->solveInPlace(config); + + // GaussianBayesNet::const_reverse_iterator it = current->rbegin(); +// for (; it!=current->rend(); ++it) { +// (*it)->solveInPlace(config); // solve and store result +// +//// Vector x = (*it)->solve(config); // Solve for that variable +//// config[(*it)->key()] = x; // store result in partial solution +// } // solve the bayes nets in the child nodes BOOST_FOREACH(const BayesTree::sharedClique& child, current->children()) { @@ -48,16 +52,11 @@ namespace gtsam { } } - /* ************************************************************************* */ - void countDims(const boost::shared_ptr::Clique>& clique, vector& dims) { - BOOST_FOREACH(const boost::shared_ptr& cond, *clique) { - // There should be no two conditionals on the same variable - assert(dims[cond->key()] == 0); - dims[cond->key()] = cond->dim(); - } - BOOST_FOREACH(const boost::shared_ptr::Clique>& child, clique->children()) { - countDims(child, dims); - } + /* ************************************************************************* */ + void GaussianJunctionTree::btreeRHS(const boost::shared_ptr& current, VectorValues& config) const { + current->conditional()->rhs(config); + BOOST_FOREACH(const BayesTree::sharedClique& child, current->children()) + btreeRHS(child, config); } /* ************************************************************************* */ @@ -67,11 +66,12 @@ namespace gtsam { boost::shared_ptr rootClique(this->eliminate(function)); toc(1, "GJT eliminate"); - // Allocate solution vector + // Allocate solution vector and copy RHS tic(2, "allocate VectorValues"); - vector dims(rootClique->back()->key() + 1, 0); + vector dims(rootClique->conditional()->back()+1, 0); countDims(rootClique, dims); VectorValues result(dims); + btreeRHS(rootClique, result); toc(2, "allocate VectorValues"); // back-substitution diff --git a/gtsam/linear/GaussianJunctionTree.h b/gtsam/linear/GaussianJunctionTree.h index 09e996754..f7edc6ae7 100644 --- a/gtsam/linear/GaussianJunctionTree.h +++ b/gtsam/linear/GaussianJunctionTree.h @@ -19,12 +19,12 @@ #pragma once +#include #include +#include #include #include -#include - namespace gtsam { /* ************************************************************************* */ @@ -43,6 +43,9 @@ namespace gtsam { // back-substitute in topological sort order (parents first) void btreeBackSubstitute(const boost::shared_ptr& current, VectorValues& config) const; + // find the RHS for the system in order to perform backsubstitution + void btreeRHS(const boost::shared_ptr& current, VectorValues& config) const; + public : /** Default constructor */ @@ -52,11 +55,33 @@ namespace gtsam { GaussianJunctionTree(const GaussianFactorGraph& fg) : Base(fg) {} /** Construct from a factor graph and a pre-computed variable index. */ - GaussianJunctionTree(const GaussianFactorGraph& fg, const VariableIndex& variableIndex) : Base(fg, variableIndex) {} + GaussianJunctionTree(const GaussianFactorGraph& fg, const VariableIndex& variableIndex) + : Base(fg, variableIndex) {} // optimize the linear graph VectorValues optimize(Eliminate function) const; + // convenient function to return dimensions of all variables in the BayesTree + template + static void countDims(const BayesTree& bayesTree, DIM_CONTAINER& dims) { + dims = DIM_CONTAINER(bayesTree.root()->conditional()->back()+1, 0); + countDims(bayesTree.root(), dims); + } + + private: + template + static void countDims(const boost::shared_ptr& clique, DIM_CONTAINER& dims) { + GaussianConditional::const_iterator it = clique->conditional()->beginFrontals(); + for (; it != clique->conditional()->endFrontals(); ++it) { + assert(dims.at(*it) == 0); + dims.at(*it) = clique->conditional()->dim(it); + } + + BOOST_FOREACH(const boost::shared_ptr& child, clique->children()) { + countDims(child, dims); + } + } + }; // GaussianJunctionTree } // namespace gtsam diff --git a/gtsam/linear/GaussianMultifrontalSolver.cpp b/gtsam/linear/GaussianMultifrontalSolver.cpp index 45b51c97f..e3bc5046d 100644 --- a/gtsam/linear/GaussianMultifrontalSolver.cpp +++ b/gtsam/linear/GaussianMultifrontalSolver.cpp @@ -23,18 +23,19 @@ namespace gtsam { /* ************************************************************************* */ -GaussianMultifrontalSolver::GaussianMultifrontalSolver(const FactorGraph& factorGraph) : - Base(factorGraph) {} +GaussianMultifrontalSolver::GaussianMultifrontalSolver(const FactorGraph& factorGraph, bool useQR) : + Base(factorGraph), useQR_(useQR) {} /* ************************************************************************* */ -GaussianMultifrontalSolver::GaussianMultifrontalSolver(const FactorGraph::shared_ptr& factorGraph, const VariableIndex::shared_ptr& variableIndex) : - Base(factorGraph, variableIndex) {} +GaussianMultifrontalSolver::GaussianMultifrontalSolver(const FactorGraph::shared_ptr& factorGraph, + const VariableIndex::shared_ptr& variableIndex, bool useQR) : + Base(factorGraph, variableIndex), useQR_(useQR) {} /* ************************************************************************* */ GaussianMultifrontalSolver::shared_ptr GaussianMultifrontalSolver::Create(const FactorGraph::shared_ptr& factorGraph, - const VariableIndex::shared_ptr& variableIndex) { - return shared_ptr(new GaussianMultifrontalSolver(factorGraph, variableIndex)); + const VariableIndex::shared_ptr& variableIndex, bool useQR) { + return shared_ptr(new GaussianMultifrontalSolver(factorGraph, variableIndex, useQR)); } /* ************************************************************************* */ @@ -44,34 +45,52 @@ void GaussianMultifrontalSolver::replaceFactors(const FactorGraph::shared_ptr GaussianMultifrontalSolver::eliminate() const { - return Base::eliminate(&EliminateQR); + if (useQR_) + return Base::eliminate(&EliminateQR); + else + return Base::eliminate(&EliminatePreferLDL); } /* ************************************************************************* */ VectorValues::shared_ptr GaussianMultifrontalSolver::optimize() const { tic(2,"optimize"); - VectorValues::shared_ptr values(new VectorValues(junctionTree_->optimize(&EliminateQR))); + VectorValues::shared_ptr values; + if (useQR_) + values = VectorValues::shared_ptr(new VectorValues(junctionTree_->optimize(&EliminateQR))); + else + values= VectorValues::shared_ptr(new VectorValues(junctionTree_->optimize(&EliminatePreferLDL))); toc(2,"optimize"); return values; } /* ************************************************************************* */ GaussianFactorGraph::shared_ptr GaussianMultifrontalSolver::jointFactorGraph(const std::vector& js) const { - return GaussianFactorGraph::shared_ptr(new GaussianFactorGraph(*Base::jointFactorGraph(js,&EliminateQR))); + if (useQR_) + return GaussianFactorGraph::shared_ptr(new GaussianFactorGraph(*Base::jointFactorGraph(js,&EliminateQR))); + else + return GaussianFactorGraph::shared_ptr(new GaussianFactorGraph(*Base::jointFactorGraph(js,&EliminatePreferLDL))); } /* ************************************************************************* */ GaussianFactor::shared_ptr GaussianMultifrontalSolver::marginalFactor(Index j) const { - return Base::marginalFactor(j,&EliminateQR); + if (useQR_) + return Base::marginalFactor(j,&EliminateQR); + else + return Base::marginalFactor(j,&EliminatePreferLDL); } /* ************************************************************************* */ -std::pair GaussianMultifrontalSolver::marginalCovariance(Index j) const { +Matrix GaussianMultifrontalSolver::marginalCovariance(Index j) const { FactorGraph fg; - fg.push_back(Base::marginalFactor(j,&EliminateQR)); - GaussianConditional::shared_ptr conditional = EliminateQR(fg,1).first->front(); - Matrix R = conditional->get_R(); - return make_pair(conditional->get_d(), (R.transpose() * R).inverse()); + GaussianConditional::shared_ptr conditional; + if (useQR_) { + fg.push_back(Base::marginalFactor(j,&EliminateQR)); + conditional = EliminateQR(fg,1).first; + } else { + fg.push_back(Base::marginalFactor(j,&EliminatePreferLDL)); + conditional = EliminatePreferLDL(fg,1).first; + } + return conditional->computeInformation().inverse(); } } diff --git a/gtsam/linear/GaussianMultifrontalSolver.h b/gtsam/linear/GaussianMultifrontalSolver.h index 8519aafa6..0f0e43c61 100644 --- a/gtsam/linear/GaussianMultifrontalSolver.h +++ b/gtsam/linear/GaussianMultifrontalSolver.h @@ -20,12 +20,9 @@ #include #include -#include -#include #include #include -#include namespace gtsam { @@ -50,26 +47,31 @@ protected: typedef GenericMultifrontalSolver Base; typedef boost::shared_ptr shared_ptr; + /** flag to determine whether to use LDL or QR */ + bool useQR_; + public: /** * Construct the solver for a factor graph. This builds the elimination * tree, which already does some of the work of elimination. */ - GaussianMultifrontalSolver(const FactorGraph& factorGraph); + GaussianMultifrontalSolver(const FactorGraph& factorGraph, bool useQR = false); /** * Construct the solver with a shared pointer to a factor graph and to a * VariableIndex. The solver will store these pointers, so this constructor * is the fastest. */ - GaussianMultifrontalSolver(const FactorGraph::shared_ptr& factorGraph, const VariableIndex::shared_ptr& variableIndex); + GaussianMultifrontalSolver(const FactorGraph::shared_ptr& factorGraph, + const VariableIndex::shared_ptr& variableIndex, bool useQR = false); /** * Named constructor to return a shared_ptr. This builds the elimination * tree, which already does some of the symbolic work of elimination. */ - static shared_ptr Create(const FactorGraph::shared_ptr& factorGraph, const VariableIndex::shared_ptr& variableIndex); + static shared_ptr Create(const FactorGraph::shared_ptr& factorGraph, + const VariableIndex::shared_ptr& variableIndex, bool useQR = false); /** * Return a new solver that solves the given factor graph, which must have @@ -115,7 +117,10 @@ public: * returns a GaussianConditional, this function back-substitutes the R factor * to obtain the mean, then computes \Sigma = (R^T * R)^-1. */ - std::pair marginalCovariance(Index j) const; + Matrix marginalCovariance(Index j) const; + + /** @return true if using QR */ + inline bool usingQR() const { return useQR_; } }; diff --git a/gtsam/linear/GaussianSequentialSolver.cpp b/gtsam/linear/GaussianSequentialSolver.cpp index f2610ae80..73f1950a5 100644 --- a/gtsam/linear/GaussianSequentialSolver.cpp +++ b/gtsam/linear/GaussianSequentialSolver.cpp @@ -24,18 +24,18 @@ namespace gtsam { /* ************************************************************************* */ -GaussianSequentialSolver::GaussianSequentialSolver(const FactorGraph& factorGraph) : - Base(factorGraph) {} +GaussianSequentialSolver::GaussianSequentialSolver(const FactorGraph& factorGraph, bool useQR) : + Base(factorGraph), useQR_(useQR) {} /* ************************************************************************* */ GaussianSequentialSolver::GaussianSequentialSolver(const FactorGraph::shared_ptr& factorGraph, - const VariableIndex::shared_ptr& variableIndex) : - Base(factorGraph, variableIndex) {} + const VariableIndex::shared_ptr& variableIndex, bool useQR) : + Base(factorGraph, variableIndex), useQR_(useQR) {} /* ************************************************************************* */ GaussianSequentialSolver::shared_ptr GaussianSequentialSolver::Create( - const FactorGraph::shared_ptr& factorGraph, const VariableIndex::shared_ptr& variableIndex) { - return shared_ptr(new GaussianSequentialSolver(factorGraph, variableIndex)); + const FactorGraph::shared_ptr& factorGraph, const VariableIndex::shared_ptr& variableIndex, bool useQR) { + return shared_ptr(new GaussianSequentialSolver(factorGraph, variableIndex, useQR)); } /* ************************************************************************* */ @@ -45,7 +45,10 @@ void GaussianSequentialSolver::replaceFactors(const FactorGraph: /* ************************************************************************* */ GaussianBayesNet::shared_ptr GaussianSequentialSolver::eliminate() const { - return Base::eliminate(&EliminateQR); + if (useQR_) + return Base::eliminate(&EliminateQR); + else + return Base::eliminate(&EliminatePreferLDL); } /* ************************************************************************* */ @@ -78,23 +81,35 @@ VectorValues::shared_ptr GaussianSequentialSolver::optimize() const { /* ************************************************************************* */ GaussianFactor::shared_ptr GaussianSequentialSolver::marginalFactor(Index j) const { - return Base::marginalFactor(j,&EliminateQR); + if (useQR_) + return Base::marginalFactor(j,&EliminateQR); + else + return Base::marginalFactor(j,&EliminatePreferLDL); } /* ************************************************************************* */ -std::pair GaussianSequentialSolver::marginalCovariance(Index j) const { +Matrix GaussianSequentialSolver::marginalCovariance(Index j) const { FactorGraph fg; - fg.push_back(Base::marginalFactor(j, &EliminateQR)); - GaussianConditional::shared_ptr conditional = EliminateQR(fg, 1).first->front(); - Matrix R = conditional->get_R(); - return make_pair(conditional->get_d(), (R.transpose() * R).inverse()); + GaussianConditional::shared_ptr conditional; + if (useQR_) { + fg.push_back(Base::marginalFactor(j, &EliminateQR)); + conditional = EliminateQR(fg, 1).first; + } else { + fg.push_back(Base::marginalFactor(j, &EliminatePreferLDL)); + conditional = EliminatePreferLDL(fg, 1).first; + } + return conditional->computeInformation().inverse(); } /* ************************************************************************* */ GaussianFactorGraph::shared_ptr GaussianSequentialSolver::jointFactorGraph(const std::vector& js) const { - return GaussianFactorGraph::shared_ptr(new GaussianFactorGraph( - *Base::jointFactorGraph(js, &EliminateQR))); + if (useQR_) + return GaussianFactorGraph::shared_ptr(new GaussianFactorGraph( + *Base::jointFactorGraph(js, &EliminateQR))); + else + return GaussianFactorGraph::shared_ptr(new GaussianFactorGraph( + *Base::jointFactorGraph(js, &EliminatePreferLDL))); } } diff --git a/gtsam/linear/GaussianSequentialSolver.h b/gtsam/linear/GaussianSequentialSolver.h index 25890071c..9c625d9c3 100644 --- a/gtsam/linear/GaussianSequentialSolver.h +++ b/gtsam/linear/GaussianSequentialSolver.h @@ -19,13 +19,10 @@ #pragma once #include -#include #include #include -#include #include -#include namespace gtsam { @@ -57,26 +54,31 @@ protected: typedef GenericSequentialSolver Base; typedef boost::shared_ptr shared_ptr; + /** flag to determine whether to use LDL or QR */ + bool useQR_; + public: /** * Construct the solver for a factor graph. This builds the elimination * tree, which already does some of the work of elimination. */ - GaussianSequentialSolver(const FactorGraph& factorGraph); + GaussianSequentialSolver(const FactorGraph& factorGraph, bool useQR = false); /** * Construct the solver with a shared pointer to a factor graph and to a * VariableIndex. The solver will store these pointers, so this constructor * is the fastest. */ - GaussianSequentialSolver(const FactorGraph::shared_ptr& factorGraph, const VariableIndex::shared_ptr& variableIndex); + GaussianSequentialSolver(const FactorGraph::shared_ptr& factorGraph, + const VariableIndex::shared_ptr& variableIndex, bool useQR = false); /** * Named constructor to return a shared_ptr. This builds the elimination * tree, which already does some of the symbolic work of elimination. */ - static shared_ptr Create(const FactorGraph::shared_ptr& factorGraph, const VariableIndex::shared_ptr& variableIndex); + static shared_ptr Create(const FactorGraph::shared_ptr& factorGraph, + const VariableIndex::shared_ptr& variableIndex, bool useQR = false); /** * Return a new solver that solves the given factor graph, which must have @@ -115,7 +117,7 @@ public: * returns a GaussianConditional, this function back-substitutes the R factor * to obtain the mean, then computes \Sigma = (R^T * R)^-1. */ - std::pair marginalCovariance(Index j) const; + Matrix marginalCovariance(Index j) const; /** * Compute the marginal joint over a set of variables, by integrating out diff --git a/gtsam/linear/HessianFactor.cpp b/gtsam/linear/HessianFactor.cpp index 4a4dc886e..c6ce340e6 100644 --- a/gtsam/linear/HessianFactor.cpp +++ b/gtsam/linear/HessianFactor.cpp @@ -11,11 +11,19 @@ /** * @file HessianFactor.cpp - * @brief * @author Richard Roberts * @created Dec 8, 2010 */ +#include + +#include +#include +#include +#include +#include +#include + #include #include #include @@ -27,21 +35,19 @@ #include #include -#include -#include -#include -#include -#include -#include - -#include - using namespace std; using namespace boost::lambda; namespace gtsam { +/* ************************************************************************* */ +string SlotEntry::toString() const { + ostringstream oss; + oss << "SlotEntry: slot=" << slot << ", dim=" << dimension; + return oss.str(); +} + /* ************************************************************************* */ void HessianFactor::assertInvariants() const { #ifndef NDEBUG @@ -137,7 +143,7 @@ HessianFactor::HessianFactor(const FactorGraph& factors, const vector& dimensions, const Scatter& scatter) : info_(matrix_) { - const bool debug = ISDEBUG("EliminateCholesky"); + const bool debug = ISDEBUG("EliminateCholesky") || ISDEBUG("EliminateLDL"); // Form Ab' * Ab tic(1, "allocate"); info_.resize(dimensions.begin(), dimensions.end(), false); @@ -146,6 +152,7 @@ HessianFactor::HessianFactor(const FactorGraph& factors, matrix_.noalias() = Matrix::Zero(matrix_.rows(),matrix_.cols()); toc(2, "zero"); tic(3, "update"); + if (debug) cout << "Combining " << factors.size() << " factors" << endl; BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) { shared_ptr hessian(boost::dynamic_pointer_cast(factor)); @@ -230,7 +237,7 @@ void HessianFactor::updateATA(const HessianFactor& update, const Scatter& scatte if(debug) { this->print("Updating this: "); - update.print("with: "); + update.print("with (Hessian): "); } // Apply updates to the upper triangle @@ -288,25 +295,26 @@ void HessianFactor::updateATA(const JacobianFactor& update, const Scatter& scatt if(debug) { this->print("Updating this: "); - update.print("with: "); + update.print("with (Jacobian): "); } Eigen::Block updateA(update.matrix_.block( update.Ab_.rowStart(),update.Ab_.offset(0), update.Ab_.full().rows(), update.Ab_.full().cols())); + if (debug) cout << "updateA: \n" << updateA << endl; - Eigen::MatrixXd updateInform; + Matrix updateInform; if(boost::dynamic_pointer_cast(update.model_)) { updateInform.noalias() = updateA.transpose() * updateA; } else { noiseModel::Diagonal::shared_ptr diagonal(boost::dynamic_pointer_cast(update.model_)); if(diagonal) { - typeof(Eigen::Map(&update.model_->invsigmas()(0),0).asDiagonal()) R( - Eigen::Map(&update.model_->invsigmas()(0),update.model_->dim()).asDiagonal()); - updateInform.noalias() = updateA.transpose() * R * R * updateA; + Vector invsigmas2 = update.model_->invsigmas().cwiseProduct(update.model_->invsigmas()); + updateInform.noalias() = updateA.transpose() * invsigmas2.asDiagonal() * updateA; } else throw invalid_argument("In HessianFactor::updateATA, JacobianFactor noise model is neither Unit nor Diagonal"); } - toc(2, "form A^T*A"); + if (debug) cout << "updateInform: \n" << updateInform << endl; + toc(2, "form A^T*A"); // Apply updates to the upper triangle tic(3, "update"); @@ -337,74 +345,6 @@ void HessianFactor::updateATA(const JacobianFactor& update, const Scatter& scatt } } toc(3, "update"); - - // Eigen::Map information(&matrix_(0,0), matrix_.rows(), matrix_.cols()); - // Eigen::Map updateA(&update.matrix_(0,0), update.matrix_.rows(), update.matrix_.cols()); - // - // // Apply updates to the upper triangle - // tic(2, "update"); - // assert(this->info_.nBlocks() - 1 == scatter.size()); - // for(size_t j2=0; j2info_.nBlocks()-1 : slots[j2]; - // for(size_t j1=0; j1<=j2; ++j1) { - // size_t slot1 = (j1 == update.size()) ? this->info_.nBlocks()-1 : slots[j1]; - // typedef typeof(updateA.block(0,0,0,0)) ABlock; - // ABlock A1(updateA.block(update.Ab_.rowStart(),update.Ab_.offset(j1), update.Ab_(j1).rows(),update.Ab_(j1).cols())); - // ABlock A2(updateA.block(update.Ab_.rowStart(),update.Ab_.offset(j2), update.Ab_(j2).rows(),update.Ab_(j2).cols())); - // if(slot2 > slot1) { - // if(debug) - // cout << "Updating (" << slot1 << "," << slot2 << ") from (" << j1 << "," << j2 << ")" << endl; - // if(boost::dynamic_pointer_cast(update.model_)) { - // information.block(info_.offset(slot1), info_.offset(slot2), info_(slot1,slot2).rows(), info_(slot1,slot2).cols()) += - // A1.transpose() * A2; - // } else { - // noiseModel::Diagonal::shared_ptr diagonal(boost::dynamic_pointer_cast(update.model_)); - // if(diagonal) { - // typeof(Eigen::Map(&update.model_->invsigmas()(0),0).asDiagonal()) R( - // Eigen::Map(&update.model_->invsigmas()(0),update.model_->dim()).asDiagonal()); - // information.block(info_.offset(slot1), info_.offset(slot2), info_(slot1,slot2).rows(), info_(slot1,slot2).cols()).noalias() += - // A1.transpose() * R * R * A2; - // } else - // throw invalid_argument("In HessianFactor::updateATA, JacobianFactor noise model is neither Unit nor Diagonal"); - // } - // } else if(slot1 > slot2) { - // if(debug) - // cout << "Updating (" << slot2 << "," << slot1 << ") from (" << j1 << "," << j2 << ")" << endl; - // if(boost::dynamic_pointer_cast(update.model_)) { - // information.block(info_.offset(slot2), info_.offset(slot1), info_(slot2,slot1).rows(), info_(slot2,slot1).cols()).noalias() += - // A2.transpose() * A1; - // } else { - // noiseModel::Diagonal::shared_ptr diagonal(boost::dynamic_pointer_cast(update.model_)); - // if(diagonal) { - // typeof(Eigen::Map(&update.model_->invsigmas()(0),0).asDiagonal()) R( - // Eigen::Map(&update.model_->invsigmas()(0),update.model_->dim()).asDiagonal()); - // information.block(info_.offset(slot1), info_.offset(slot2), info_(slot1,slot2).rows(), info_(slot1,slot2).cols()).noalias() += - // A2.transpose() * R * R * A1; - // } else - // throw invalid_argument("In HessianFactor::updateATA, JacobianFactor noise model is neither Unit nor Diagonal"); - // } - // } else { - // if(debug) - // cout << "Updating (" << slot1 << "," << slot2 << ") from (" << j1 << "," << j2 << ")" << endl; - // if(boost::dynamic_pointer_cast(update.model_)) { - // information.block(info_.offset(slot2), info_.offset(slot1), info_(slot2,slot1).rows(), info_(slot2,slot1).cols()).triangularView() += - // A1.transpose() * A1; - // } else { - // noiseModel::Diagonal::shared_ptr diagonal(boost::dynamic_pointer_cast(update.model_)); - // if(diagonal) { - // typeof(Eigen::Map(&update.model_->invsigmas()(0),0).asDiagonal()) R( - // Eigen::Map(&update.model_->invsigmas()(0),update.model_->dim()).asDiagonal()); - // information.block(info_.offset(slot2), info_.offset(slot1), info_(slot2,slot1).rows(), info_(slot2,slot1).cols()).triangularView() += - // A1.transpose() * R * R * A1; - // } else - // throw invalid_argument("In HessianFactor::updateATA, JacobianFactor noise model is neither Unit nor Diagonal"); - // } - // } - // if(debug) cout << "Updating block " << slot1 << "," << slot2 << " from block " << j1 << "," << j2 << "\n"; - // if(debug) this->print(); - // } - // } - // toc(2, "update"); } /* ************************************************************************* */ @@ -413,49 +353,43 @@ void HessianFactor::partialCholesky(size_t nrFrontals) { } /* ************************************************************************* */ -GaussianBayesNet::shared_ptr -HessianFactor::splitEliminatedFactor(size_t nrFrontals, const vector& keys) { +Eigen::LDLT::TranspositionType HessianFactor::partialLDL(size_t nrFrontals) { + return ldlPartial(matrix_, info_.offset(nrFrontals)); +} - static const bool debug = false; +/* ************************************************************************* */ +GaussianConditional::shared_ptr +HessianFactor::splitEliminatedFactor(size_t nrFrontals, const vector& keys, const Eigen::LDLT::TranspositionType& permutation) { - // Extract conditionals - tic(1, "extract conditionals"); - GaussianBayesNet::shared_ptr conditionals(new GaussianBayesNet()); - typedef VerticalBlockView BlockAb; - BlockAb Ab(matrix_, info_); - for(size_t j=0; j BlockAb; + BlockAb Ab(matrix_, info_); - tic(2, "construct cond"); - Vector sigmas = Vector::Ones(varDim); - conditionals->push_back(boost::make_shared(keys.begin()+j, keys.end(), 1, Ab, sigmas)); - toc(2, "construct cond"); - if(debug) conditionals->back()->print("Extracted conditional: "); - Ab.rowStart() += varDim; - Ab.firstBlock() += 1; - if(debug) cout << "rowStart = " << Ab.rowStart() << ", rowEnd = " << Ab.rowEnd() << endl; - } - toc(1, "extract conditionals"); + size_t varDim = info_.offset(nrFrontals); + Ab.rowEnd() = Ab.rowStart() + varDim; - // Take lower-right block of Ab_ to get the new factor - tic(2, "remaining factor"); - info_.blockStart() = nrFrontals; - // Assign the keys - keys_.assign(keys.begin() + nrFrontals, keys.end()); - toc(2, "remaining factor"); + // Create one big conditionals with many frontal variables. + // Because of the pivoting permutation when using LDL, treating each variable separately doesn't make sense. + tic(2, "construct cond"); + Vector sigmas = Vector::Ones(varDim); + conditionals = boost::make_shared(keys.begin(), keys.end(), nrFrontals, Ab, sigmas, permutation); + toc(2, "construct cond"); + if(debug) conditionals->print("Extracted conditional: "); - return conditionals; + toc(1, "extract conditionals"); + + // Take lower-right block of Ab_ to get the new factor + tic(2, "remaining factor"); + info_.blockStart() = nrFrontals; + // Assign the keys + keys_.assign(keys.begin() + nrFrontals, keys.end()); + toc(2, "remaining factor"); + + return conditionals; } } // gtsam diff --git a/gtsam/linear/HessianFactor.h b/gtsam/linear/HessianFactor.h index e60dbcd51..4c4e2e034 100644 --- a/gtsam/linear/HessianFactor.h +++ b/gtsam/linear/HessianFactor.h @@ -18,7 +18,6 @@ #pragma once -#include #include #include #include @@ -26,6 +25,7 @@ // Forward declarations for friend unit tests class ConversionConstructorHessianFactorTest; class Constructor1HessianFactorTest; +class combineHessianFactorTest; namespace gtsam { @@ -40,7 +40,9 @@ namespace gtsam { struct SlotEntry { size_t slot; size_t dimension; - SlotEntry(size_t _slot, size_t _dimension) : slot(_slot), dimension(_dimension) {} + SlotEntry(size_t _slot, size_t _dimension) + : slot(_slot), dimension(_dimension) {} + std::string toString() const; }; typedef FastMap Scatter; @@ -131,6 +133,7 @@ namespace gtsam { // Friend unit test classes friend class ::ConversionConstructorHessianFactorTest; friend class ::Constructor1HessianFactorTest; + friend class ::combineHessianFactorTest; // Friend JacobianFactor for conversion friend class JacobianFactor; @@ -144,9 +147,14 @@ namespace gtsam { */ void partialCholesky(size_t nrFrontals); + /** + * Do LDL. + */ + Eigen::LDLT::TranspositionType partialLDL(size_t nrFrontals); + /** split partially eliminated factor */ - boost::shared_ptr > splitEliminatedFactor( - size_t nrFrontals, const std::vector& keys); + boost::shared_ptr splitEliminatedFactor( + size_t nrFrontals, const std::vector& keys, const Eigen::LDLT::TranspositionType& permutation = Eigen::LDLT::TranspositionType()); /** assert invariants */ void assertInvariants() const; diff --git a/gtsam/linear/IterativeOptimizationParameters.h b/gtsam/linear/IterativeOptimizationParameters.h index 973ba1836..bc16c222d 100644 --- a/gtsam/linear/IterativeOptimizationParameters.h +++ b/gtsam/linear/IterativeOptimizationParameters.h @@ -21,7 +21,8 @@ public: typedef boost::shared_ptr shared_ptr; typedef enum { - SILENT, ERROR, + SILENT = 0, + ERROR, } verbosityLevel; public: @@ -37,7 +38,7 @@ public: public: IterativeOptimizationParameters() : maxIterations_(100), reset_(101), epsilon_(1e-5), epsilon_abs_(1e-5), - verbosity_(ERROR), nReduce_(0), skeleton_spec_(), est_cond_(false) { + verbosity_(SILENT), nReduce_(0), skeleton_spec_(), est_cond_(false) { } IterativeOptimizationParameters( diff --git a/gtsam/linear/IterativeSolver.h b/gtsam/linear/IterativeSolver.h index d35d455da..c7e7edeab 100644 --- a/gtsam/linear/IterativeSolver.h +++ b/gtsam/linear/IterativeSolver.h @@ -1,17 +1,23 @@ -/* - * IterativeSolver.h - * - * Created on: Oct 24, 2010 - * Author: Yong-Dian Jian - * - * Base Class for all iterative solvers of linear systems +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file IterativeSolver.h + * @date Oct 24, 2010 + * @author Yong-Dian Jian + * @brief Base Class for all iterative solvers of linear systems */ #pragma once -#include -#include - #include namespace gtsam { diff --git a/gtsam/linear/JacobianFactor.cpp b/gtsam/linear/JacobianFactor.cpp index 19f4b8455..ed468bf0d 100644 --- a/gtsam/linear/JacobianFactor.cpp +++ b/gtsam/linear/JacobianFactor.cpp @@ -153,6 +153,8 @@ namespace gtsam { /* ************************************************************************* */ JacobianFactor::JacobianFactor(const GaussianConditional& cg) : GaussianFactor(cg), model_(noiseModel::Diagonal::Sigmas(cg.get_sigmas(), true)), Ab_(matrix_) { Ab_.assignNoalias(cg.rsd_); + // TODO: permutation for all frontal blocks + Ab_.range(0,cg.nrFrontals()) = Ab_.range(0,cg.nrFrontals()) * cg.permutation_; // todo SL: make firstNonzeroCols triangular? firstNonzeroBlocks_.resize(cg.get_d().size(), 0); // set sigmas from precisions assertInvariants(); @@ -197,7 +199,6 @@ namespace gtsam { } else { for(const_iterator key=begin(); key!=end(); ++key) cout << boost::format("A[%1%]=\n")%*key << getA(key) << endl; -// gtsam::print(getA(key), (boost::format("A[%1%]=\n")%*key).str()); cout << "b=" << getb() << endl; model_->print("model"); } @@ -360,11 +361,11 @@ namespace gtsam { /* ************************************************************************* */ GaussianConditional::shared_ptr JacobianFactor::eliminateFirst() { - return this->eliminate(1)->front(); + return this->eliminate(1); } /* ************************************************************************* */ - GaussianBayesNet::shared_ptr JacobianFactor::eliminate(size_t nrFrontals) { + GaussianConditional::shared_ptr JacobianFactor::eliminate(size_t nrFrontals) { assert(Ab_.rowStart() == 0 && Ab_.rowEnd() == (size_t) matrix_.rows() && Ab_.firstBlock() == 0); assert(size() >= nrFrontals); @@ -433,18 +434,15 @@ namespace gtsam { // Extract conditionals tic(3, "cond Rd"); - GaussianBayesNet::shared_ptr conditionals(new GaussianBayesNet()); - for(size_t j=0; j sigmas = noiseModel->sigmas().segment(Ab_.rowStart(), Ab_.rowEnd()-Ab_.rowStart()); - conditionals->push_back(boost::make_shared(begin()+j, end(), 1, Ab_, sigmas)); - if(debug) conditionals->back()->print("Extracted conditional: "); - Ab_.rowStart() += varDim; - Ab_.firstBlock() += 1; - } + GaussianConditional::shared_ptr conditionals(new GaussianConditional()); + + // Restrict the matrix to be in the first nrFrontals variables + Ab_.rowEnd() = Ab_.rowStart() + frontalDim; + const Eigen::VectorBlock sigmas = noiseModel->sigmas().segment(Ab_.rowStart(), Ab_.rowEnd()-Ab_.rowStart()); + conditionals = boost::make_shared(begin(), end(), nrFrontals, Ab_, sigmas); + if(debug) conditionals->print("Extracted conditional: "); + Ab_.rowStart() += frontalDim; + Ab_.firstBlock() += nrFrontals; toc(3, "cond Rd"); if(debug) conditionals->print("Extracted conditionals: "); diff --git a/gtsam/linear/JacobianFactor.h b/gtsam/linear/JacobianFactor.h index 4e60dc1ef..18d42dac6 100644 --- a/gtsam/linear/JacobianFactor.h +++ b/gtsam/linear/JacobianFactor.h @@ -18,15 +18,12 @@ #pragma once #include -#include -#include #include #include #include #include #include -#include #include // Forward declarations of friend unit tests @@ -72,8 +69,6 @@ namespace gtsam { /** Construct Null factor */ JacobianFactor(const Vector& b_in); - // FIXME: make these constructors use other matrix types - column major and blocks - /** Construct unary factor */ JacobianFactor(Index i1, const Matrix& A1, const Vector& b, const SharedDiagonal& model); @@ -200,7 +195,8 @@ namespace gtsam { boost::shared_ptr eliminateFirst(); - boost::shared_ptr > eliminate(size_t nrFrontals = 1); + /** return a multi-frontal conditional. It's actually a chordal Bayesnet */ + boost::shared_ptr eliminate(size_t nrFrontals = 1); // Friend HessianFactor to facilitate convertion constructors friend class HessianFactor; diff --git a/gtsam/linear/Makefile.am b/gtsam/linear/Makefile.am index 1363d148b..448fc04f5 100644 --- a/gtsam/linear/Makefile.am +++ b/gtsam/linear/Makefile.am @@ -16,7 +16,7 @@ sources += NoiseModel.cpp Errors.cpp Sampler.cpp check_PROGRAMS += tests/testNoiseModel tests/testErrors tests/testSampler # Vector Configurations -headers += VectorValues.h +sources += VectorValues.cpp check_PROGRAMS += tests/testVectorValues # Solvers diff --git a/gtsam/linear/NoiseModel.h b/gtsam/linear/NoiseModel.h index 002b73c95..4f2fa4105 100644 --- a/gtsam/linear/NoiseModel.h +++ b/gtsam/linear/NoiseModel.h @@ -19,10 +19,8 @@ #pragma once -#include #include #include -#include #include namespace gtsam { @@ -103,7 +101,6 @@ namespace gtsam { protected: - // TODO: store as boost upper-triangular or whatever is passed from above /* Matrix square root of information matrix (R) */ boost::optional sqrt_information_; @@ -182,9 +179,6 @@ namespace gtsam { * @return in-place QR factorization [R d]. Below-diagonal is undefined !!!!! */ virtual SharedDiagonal QR(Matrix& Ab) const; - // FIXME: these previously had firstZeroRows - what did this do? -// virtual SharedDiagonal QRColumnWise(Matrix& Ab, std::vector& firstZeroRows) const; -// virtual SharedDiagonal QR(Matrix& Ab, boost::optional&> firstZeroRows = boost::none) const; /** * Cholesky factorization @@ -212,7 +206,6 @@ namespace gtsam { }; // Gaussian - /** * A diagonal noise model implements a diagonal covariance matrix, with the * elements of the diagonal specified in a Vector. This class has no public diff --git a/gtsam/linear/SharedGaussian.h b/gtsam/linear/SharedGaussian.h index 7a23e3c51..7d86ab6d2 100644 --- a/gtsam/linear/SharedGaussian.h +++ b/gtsam/linear/SharedGaussian.h @@ -9,7 +9,7 @@ * -------------------------------------------------------------------------- */ -/* +/** * SharedGaussian.h * @brief Class that wraps a shared noise model with diagonal covariance * @Author: Frank Dellaert diff --git a/gtsam/linear/SubgraphPreconditioner.h b/gtsam/linear/SubgraphPreconditioner.h index 5d161f808..608c765c4 100644 --- a/gtsam/linear/SubgraphPreconditioner.h +++ b/gtsam/linear/SubgraphPreconditioner.h @@ -19,7 +19,7 @@ #include #include -#include +#include // FIXME shouldn't have nonlinear things in linear namespace gtsam { diff --git a/gtsam/linear/SubgraphSolver.h b/gtsam/linear/SubgraphSolver.h index bca16453c..40bfb47c0 100644 --- a/gtsam/linear/SubgraphSolver.h +++ b/gtsam/linear/SubgraphSolver.h @@ -12,12 +12,8 @@ #pragma once -#include -#include - #include #include -#include namespace gtsam { @@ -52,29 +48,33 @@ namespace gtsam { /* preconditioner */ shared_preconditioner pc_; + /* flag for direct solver - either QR or LDL */ + bool useQR_; + public: - SubgraphSolver(const GRAPH& G, const VALUES& theta0, const Parameters ¶meters = Parameters()): - IterativeSolver(parameters){ initialize(G,theta0); } + SubgraphSolver(const GRAPH& G, const VALUES& theta0, const Parameters ¶meters = Parameters(), bool useQR = false): + IterativeSolver(parameters), useQR_(useQR) { initialize(G,theta0); } SubgraphSolver(const LINEAR& GFG) { std::cout << "[SubgraphSolver] Unexpected usage.." << std::endl; throw std::runtime_error("SubgraphSolver: gaussian factor graph initialization not supported"); } - SubgraphSolver(const shared_linear& GFG, const boost::shared_ptr& structure) { + SubgraphSolver(const shared_linear& GFG, const boost::shared_ptr& structure, bool useQR = false) { std::cout << "[SubgraphSolver] Unexpected usage.." << std::endl; throw std::runtime_error("SubgraphSolver: gaussian factor graph and variable index initialization not supported"); } SubgraphSolver(const SubgraphSolver& solver) : - IterativeSolver(solver), ordering_(solver.ordering_), pairs_(solver.pairs_), pc_(solver.pc_){} + IterativeSolver(solver), ordering_(solver.ordering_), pairs_(solver.pairs_), pc_(solver.pc_), useQR_(solver.useQR_) {} SubgraphSolver(shared_ordering ordering, mapPairIndex pairs, shared_preconditioner pc, - sharedParameters parameters = boost::make_shared()) : - IterativeSolver(parameters), ordering_(ordering), pairs_(pairs), pc_(pc) {} + sharedParameters parameters = boost::make_shared(), + bool useQR = true) : + IterativeSolver(parameters), ordering_(ordering), pairs_(pairs), pc_(pc), useQR_(useQR) {} void replaceFactors(const typename LINEAR::shared_ptr &graph); VectorValues::shared_ptr optimize() const ; diff --git a/gtsam/linear/VectorValues.cpp b/gtsam/linear/VectorValues.cpp new file mode 100644 index 000000000..32bb88298 --- /dev/null +++ b/gtsam/linear/VectorValues.cpp @@ -0,0 +1,132 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file VectorValues.cpp + * @brief Implementations for VectorValues + * @author Richard Roberts + * @author Alex Cunningham + */ + +#include + +using namespace std; +using namespace gtsam; + +/* ************************************************************************* */ +VectorValues::VectorValues(Index nVars, size_t varDim) : varStarts_(nVars+1) { + varStarts_[0] = 0; + size_t varStart = 0; + for(Index j=1; j<=nVars; ++j) + varStarts_[j] = (varStart += varDim); + values_.resize(varStarts_.back()); +} + +/* ************************************************************************* */ +VectorValues::VectorValues(const std::vector& dimensions, const Vector& values) : + values_(values), varStarts_(dimensions.size()+1) { + varStarts_[0] = 0; + size_t varStart = 0; + Index var = 0; + BOOST_FOREACH(size_t dim, dimensions) { + varStarts_[++var] = (varStart += dim); + } + assert(varStarts_.back() == (size_t) values.size()); +} + +/* ************************************************************************* */ +VectorValues::VectorValues(const std::vector& dimensions, const double* values) : + varStarts_(dimensions.size()+1) { + varStarts_[0] = 0; + size_t varStart = 0; + Index var = 0; + BOOST_FOREACH(size_t dim, dimensions) { + varStarts_[++var] = (varStart += dim); + } + values_ = Vector_(varStart, values); +} + +/* ************************************************************************* */ +VectorValues VectorValues::SameStructure(const VectorValues& otherValues) { + VectorValues ret; + ret.varStarts_ = otherValues.varStarts_; + ret.values_.resize(ret.varStarts_.back(), false); + return ret; +} + +/* ************************************************************************* */ +VectorValues::mapped_type VectorValues::operator[](Index variable) { + checkVariable(variable); + const size_t start = varStarts_[variable], n = varStarts_[variable+1] - start; + return values_.segment(start, n); +} + +/* ************************************************************************* */ +VectorValues::const_mapped_type VectorValues::operator[](Index variable) const { + checkVariable(variable); + const size_t start = varStarts_[variable], n = varStarts_[variable+1] - start; + return values_.segment(start, n); +} + +/* ************************************************************************* */ +Index VectorValues::push_back_preallocated(const Vector& vector) { + Index var = varStarts_.size()-1; + varStarts_.push_back(varStarts_.back()+vector.size()); + this->operator[](var) = vector; // This will assert that values_ has enough allocated space. + return var; +} + +/* ************************************************************************* */ +void VectorValues::print(const std::string& str) const { + std::cout << str << ": " << varStarts_.size()-1 << " elements\n"; + for(Index var=0; varvalues_ += c.values_.head(varStarts_.back()); +} + +/* ************************************************************************* */ +VectorValues VectorValues::zero(const VectorValues& x) { + VectorValues cloned(x); + cloned.makeZero(); + return cloned; +} + +/* ************************************************************************* */ +size_t VectorValues::dim(Index variable) const { + checkVariable(variable); + const size_t start = varStarts_[variable], n = varStarts_[variable+1] - start; + return n; +} + +/* ************************************************************************* */ + + diff --git a/gtsam/linear/VectorValues.h b/gtsam/linear/VectorValues.h index 48474f2f6..6a391d607 100644 --- a/gtsam/linear/VectorValues.h +++ b/gtsam/linear/VectorValues.h @@ -24,8 +24,6 @@ #include #include -#include - namespace gtsam { class VectorValues : public Testable { @@ -75,6 +73,9 @@ public: mapped_type operator[](Index variable); const_mapped_type operator[](Index variable) const; + /** dimension of a particular element */ + size_t dim(Index variable) const; + /** Number of elements */ Index size() const { return varStarts_.size()-1; } @@ -104,50 +105,36 @@ public: /** Reserve space for a total number of variables and dimensionality */ void reserve(Index nVars, size_t totalDims) { values_.resize(totalDims); varStarts_.reserve(nVars+1); } + /** access a range of indices (of no particular order) as a single vector */ + template + Vector range(const ITERATOR& idx_begin, const ITERATOR& idx_end) const; + + /** set a range of indices as a single vector split across the range */ + template + void range(const ITERATOR& idx_begin, const ITERATOR& idx_end, const Vector& v); + /** * Append a variable using the next variable ID, and return that ID. Space * must have been allocated ahead of time using reserve(...). */ - Index push_back_preallocated(const Vector& vector) { - Index var = varStarts_.size()-1; - varStarts_.push_back(varStarts_.back()+vector.size()); - this->operator[](var) = vector; // This will assert that values_ has enough allocated space. - return var; - } + Index push_back_preallocated(const Vector& vector); /** Set all elements to zero */ void makeZero() { values_.setZero(); } /** print required by Testable for unit testing */ - void print(const std::string& str = "VectorValues: ") const { - std::cout << str << ": " << varStarts_.size()-1 << " elements\n"; - for(Index var=0; varvalues_ += c.values_.head(varStarts_.back()); - } + VectorValues operator+(const VectorValues& c) const; + void operator+=(const VectorValues& c); /** * Iterator (handles both iterator and const_iterator depending on whether @@ -177,11 +164,8 @@ public: value_type operator*() { return config_[curVariable_]; } }; - static VectorValues zero(const VectorValues& x) { - VectorValues cloned(x); - cloned.makeZero(); - return cloned; - } + /** Copy structure of x, but set all values to zero */ + static VectorValues zero(const VectorValues& x); protected: void checkVariable(Index variable) const { assert(variable < varStarts_.size()-1); } @@ -243,53 +227,34 @@ inline VectorValues::VectorValues(const CONTAINER& dimensions) : varStarts_(dime values_.resize(varStarts_.back()); } -inline VectorValues::VectorValues(Index nVars, size_t varDim) : varStarts_(nVars+1) { - varStarts_[0] = 0; - size_t varStart = 0; - for(Index j=1; j<=nVars; ++j) - varStarts_[j] = (varStart += varDim); - values_.resize(varStarts_.back()); -} +template +inline Vector VectorValues::range(const ITERATOR& idx_begin, const ITERATOR& idx_end) const { + // find the size of the vector to build + size_t s = 0; + for (ITERATOR it=idx_begin; it!=idx_end; ++it) + s += dim(*it); -inline VectorValues::VectorValues(const std::vector& dimensions, const Vector& values) : - values_(values), varStarts_(dimensions.size()+1) { - varStarts_[0] = 0; - size_t varStart = 0; - Index var = 0; - BOOST_FOREACH(size_t dim, dimensions) { - varStarts_[++var] = (varStart += dim); - } - assert(varStarts_.back() == (size_t) values.size()); -} - -inline VectorValues::VectorValues(const std::vector& dimensions, const double* values) : - varStarts_(dimensions.size()+1) { - varStarts_[0] = 0; - size_t varStart = 0; - Index var = 0; - BOOST_FOREACH(size_t dim, dimensions) { - varStarts_[++var] = (varStart += dim); + // assign vector + Vector result(s); + size_t start = 0; + for (ITERATOR it=idx_begin; it!=idx_end; ++it) { + ConstSubVector v = (*this)[*it]; + const size_t d = v.size(); + result.segment(start, d) = v; + start += d; } - values_ = Vector_(varStart, values); + return result; } -inline VectorValues VectorValues::SameStructure(const VectorValues& otherValues) { - VectorValues ret; - ret.varStarts_ = otherValues.varStarts_; - ret.values_.resize(ret.varStarts_.back(), false); - return ret; -} - -inline VectorValues::mapped_type VectorValues::operator[](Index variable) { - checkVariable(variable); - const size_t start = varStarts_[variable], n = varStarts_[variable+1] - start; - return values_.segment(start, n); -} - -inline VectorValues::const_mapped_type VectorValues::operator[](Index variable) const { - checkVariable(variable); - const size_t start = varStarts_[variable], n = varStarts_[variable+1] - start; - return values_.segment(start, n); +template +inline void VectorValues::range(const ITERATOR& idx_begin, const ITERATOR& idx_end, const Vector& v) { + size_t start = 0; + for (ITERATOR it=idx_begin; it!=idx_end; ++it) { + checkVariable(*it); + const size_t d = dim(*it); + (*this)[*it] = v.segment(start, d); + start += d; + } } struct DimSpec: public std::vector { @@ -306,8 +271,6 @@ struct DimSpec: public std::vector { (*this)[i] = V[i].size() ; } } -} ; +}; - - -} +} // \namespace gtsam diff --git a/gtsam/linear/iterative-inl.h b/gtsam/linear/iterative-inl.h index d3e92e1e7..ba4eec0b0 100644 --- a/gtsam/linear/iterative-inl.h +++ b/gtsam/linear/iterative-inl.h @@ -94,7 +94,7 @@ namespace gtsam { // check for convergence double new_gamma = dot(g, g); - if (parameters_.verbosity()) + if (parameters_.verbosity() != IterativeOptimizationParameters::SILENT) cout << "iteration " << k << ": alpha = " << alpha << ", dotg = " << new_gamma << endl; if (new_gamma < threshold) return true; @@ -128,14 +128,14 @@ namespace gtsam { bool steepest = false) { CGState state(Ab, x, parameters, steepest); - if (parameters.verbosity()) + if (parameters.verbosity() != IterativeOptimizationParameters::SILENT) cout << "CG: epsilon = " << parameters.epsilon() << ", maxIterations = " << parameters.maxIterations() << ", ||g0||^2 = " << state.gamma << ", threshold = " << state.threshold << endl; if ( state.gamma < state.threshold ) { - if (parameters.verbosity()) + if (parameters.verbosity() != IterativeOptimizationParameters::SILENT) cout << "||g0||^2 < threshold, exiting immediately !" << endl; return x; diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index 6e4028b24..c71676548 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -20,16 +20,19 @@ #include #include +#include #include #include #include #include +#include using namespace gtsam; using namespace std; using namespace boost::assign; +static const double tol = 1e-5; static const Index _x_=0, _x1_=1, _l1_=2; /* ************************************************************************* */ @@ -112,17 +115,16 @@ TEST( GaussianConditional, equals ) actual(_x_,d, R, _x1_, A1, _l1_, A2, tau); EXPECT( expected.equals(actual) ); - } /* ************************************************************************* */ TEST( GaussianConditional, solve ) { //expected solution - Vector expected(2); - expected(0) = 20-3-11 ; expected(1) = 40-7-15; + Vector expectedX(2); + expectedX(0) = 20-3-11 ; expectedX(1) = 40-7-15; - // create a conditional gaussion node + // create a conditional Gaussian node Matrix R = Matrix_(2,2, 1., 0., 0., 1.); @@ -137,7 +139,8 @@ TEST( GaussianConditional, solve ) Vector tau = ones(2); - GaussianConditional cg(_x_,d, R, _x1_, A1, _l1_, A2, tau); + // RHS is different than the one in the solution vector + GaussianConditional cg(_x_,ones(2), R, _x1_, A1, _l1_, A2, tau); Vector sx1(2); sx1(0) = 1.0; sx1(1) = 1.0; @@ -146,13 +149,145 @@ TEST( GaussianConditional, solve ) sl1(0) = 1.0; sl1(1) = 1.0; VectorValues solution(vector(3, 2)); - solution[_x1_] = sx1; + solution[_x_] = d; // RHS + solution[_x1_] = sx1; // parents solution[_l1_] = sl1; - Vector result = cg.solve(solution); + // NOTE: the solve functions assume the RHS is passed as the initialization of + // the solution. + VectorValues expected(vector(3, 2)); + expected[_x_] = expectedX; + expected[_x1_] = sx1; + expected[_l1_] = sl1; - EXPECT(assert_equal(expected , result, 0.0001)); + VectorValues copy_result = cg.solve(solution); + cg.solveInPlace(solution); + EXPECT(assert_equal(expected, copy_result, 0.0001)); + EXPECT(assert_equal(expected, solution, 0.0001)); +} + +/* ************************************************************************* */ +TEST( GaussianConditional, solve_simple ) +{ + // no pivoting from LDL, so R matrix is not permuted + // RHS is deliberately not the same as below + Matrix full_matrix = Matrix_(4, 7, + 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.0, + 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, + 0.0, 0.0, 3.0, 0.0, 4.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.0); + + // solve system as a non-multifrontal version first + // 2 variables, frontal has dim=4 + vector dims; dims += 4, 2, 1; + GaussianConditional::rsd_type matrices(full_matrix, dims.begin(), dims.end()); + Vector sigmas = ones(4); + vector cgdims; cgdims += _x_, _x1_; + GaussianConditional cg(cgdims.begin(), cgdims.end(), 1, matrices, sigmas); + + // partial solution + Vector sx1 = Vector_(2, 9.0, 10.0); + + // elimination order; _x_, _x1_ + vector vdim; vdim += 4, 2; + VectorValues actual(vdim); + actual[_x_] = Vector_(4, 0.1, 0.2, 0.3, 0.4); // d + actual[_x1_] = sx1; // parent + + VectorValues expected(vdim); + expected[_x1_] = sx1; + expected[_x_] = Vector_(4, -3.1,-3.4,-11.9,-13.2); + + // verify indices/size + EXPECT_LONGS_EQUAL(2, cg.size()); + EXPECT_LONGS_EQUAL(4, cg.dim()); + + // solve and verify + cg.solveInPlace(actual); + EXPECT(assert_equal(expected, actual, tol)); +} + +/* ************************************************************************* */ +TEST( GaussianConditional, solve_multifrontal ) +{ + // create full system, 3 variables, 2 frontals, all 2 dim + // no pivoting from LDL, so R matrix is not permuted + Matrix full_matrix = Matrix_(4, 7, + 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.5, + 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.6, + 0.0, 0.0, 3.0, 0.0, 4.0, 0.0, 0.7, + 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.8); + + // 3 variables, all dim=2 + vector dims; dims += 2, 2, 2, 1; + GaussianConditional::rsd_type matrices(full_matrix, dims.begin(), dims.end()); + Vector sigmas = ones(4); + vector cgdims; cgdims += _x_, _x1_, _l1_; + GaussianConditional cg(cgdims.begin(), cgdims.end(), 2, matrices, sigmas); + + EXPECT(assert_equal(Vector_(4, 0.5, 0.6, 0.7, 0.8), cg.get_d())); + + // partial solution + Vector sl1 = Vector_(2, 9.0, 10.0); + + // elimination order; _x_, _x1_, _l1_ + VectorValues actual(vector(3, 2)); + actual[_x_] = Vector_(2, 0.1, 0.2); // rhs + actual[_x1_] = Vector_(2, 0.3, 0.4); // rhs + actual[_l1_] = sl1; // parent + + VectorValues expected(vector(3, 2)); + expected[_x_] = Vector_(2, -3.1,-3.4); + expected[_x1_] = Vector_(2, -11.9,-13.2); + expected[_l1_] = sl1; + + // verify indices/size + EXPECT_LONGS_EQUAL(3, cg.size()); + EXPECT_LONGS_EQUAL(4, cg.dim()); + + // solve and verify + cg.solveInPlace(actual); + EXPECT(assert_equal(expected, actual, tol)); + +} + +/* ************************************************************************* */ +TEST( GaussianConditional, solveTranspose ) { + static const Index _y_=1; + /** create small Chordal Bayes Net x <- y + * x y d + * 1 1 9 + * 1 5 + */ + Matrix R11 = Matrix_(1, 1, 1.0), S12 = Matrix_(1, 1, 1.0); + Matrix R22 = Matrix_(1, 1, 1.0); + Vector d1(1), d2(1); + d1(0) = 9; + d2(0) = 5; + Vector tau(1); + tau(0) = 1.0; + + // define nodes and specify in reverse topological sort (i.e. parents last) + GaussianConditional::shared_ptr Px_y(new GaussianConditional(_x_, d1, R11, _y_, S12, tau)); + GaussianConditional::shared_ptr Py(new GaussianConditional(_y_, d2, R22, tau)); + GaussianBayesNet cbn; + cbn.push_back(Px_y); + cbn.push_back(Py); + + // x=R'*y, y=inv(R')*x + // 2 = 1 2 + // 5 1 1 3 + + VectorValues y(vector(2,1)), x(vector(2,1)); + x[_x_] = Vector_(1,2.); + x[_y_] = Vector_(1,5.); + y[_x_] = Vector_(1,2.); + y[_y_] = Vector_(1,3.); + + // test functional version + VectorValues actual = backSubstituteTranspose(cbn,x); + CHECK(assert_equal(y,actual)); } /* ************************************************************************* */ diff --git a/gtsam/linear/tests/testGaussianFactorGraph.cpp b/gtsam/linear/tests/testGaussianFactorGraph.cpp index 46f7431ca..87cfc5c33 100644 --- a/gtsam/linear/tests/testGaussianFactorGraph.cpp +++ b/gtsam/linear/tests/testGaussianFactorGraph.cpp @@ -180,16 +180,16 @@ TEST_UNSAFE(GaussianFactor, CombineAndEliminate) Vector sigmas = gtsam::concatVectors(3, &s1, &s0, &s2); JacobianFactor expectedFactor(0, A0, 1, A1, b, noiseModel::Diagonal::Sigmas(sigmas, true)); - GaussianBayesNet expectedBN(*expectedFactor.eliminate(1)); + GaussianConditional::shared_ptr expectedBN = expectedFactor.eliminate(1); - GaussianBayesNet::shared_ptr actualBN; + GaussianConditional::shared_ptr actualBN; GaussianFactor::shared_ptr actualFactor; boost::tie(actualBN, actualFactor) = // EliminateQR(*gfg.dynamicCastFactors > (), 1); JacobianFactor::shared_ptr actualJacobian = boost::dynamic_pointer_cast< JacobianFactor>(actualFactor); - EXPECT(assert_equal(expectedBN, *actualBN)); + EXPECT(assert_equal(*expectedBN, *actualBN)); EXPECT(assert_equal(expectedFactor, *actualJacobian)); } @@ -434,10 +434,17 @@ TEST(GaussianFactor, eliminateFrontals) GaussianConditional::shared_ptr cond3(new GaussianConditional(7, d3, R3, cterms3, ones(2))); // Create expected Bayes net fragment from three conditionals above - GaussianBayesNet expectedFragment; - expectedFragment.push_back(cond1); - expectedFragment.push_back(cond2); - expectedFragment.push_back(cond3); +// GaussianBayesNet expectedFragment; +// expectedFragment.push_back(cond1); +// expectedFragment.push_back(cond2); +// expectedFragment.push_back(cond3); + Index ikeys[] = {3,5,7,9,11}; + std::vector keys(ikeys, ikeys + sizeof(ikeys)/sizeof(Index)); + size_t dims[] = { 2,2,2,2,2,1 }; + size_t height = 11; + VerticalBlockView Rblock(R, dims, dims+6, height); + GaussianConditional::shared_ptr expectedFragment( new GaussianConditional(keys.begin(), keys.end(), 3, + Rblock, ones(6)) ); // Get expected matrices for remaining factor Matrix Ae1 = sub(R, 6,10, 6,8); @@ -445,9 +452,9 @@ TEST(GaussianFactor, eliminateFrontals) Vector be = R.col(10).segment(6, 4); // Eliminate (3 frontal variables, 6 scalar columns) using QR !!!! - GaussianBayesNet actualFragment_QR = *actualFactor_QR.eliminate(3); + GaussianConditional::shared_ptr actualFragment_QR = actualFactor_QR.eliminate(3); - EXPECT(assert_equal(expectedFragment, actualFragment_QR, 0.001)); + EXPECT(assert_equal(*expectedFragment, *actualFragment_QR, 0.001)); EXPECT(assert_equal(size_t(2), actualFactor_QR.keys().size())); EXPECT(assert_equal(Index(9), actualFactor_QR.keys()[0])); EXPECT(assert_equal(Index(11), actualFactor_QR.keys()[1])); diff --git a/gtsam/linear/tests/testGaussianJunctionTree.cpp b/gtsam/linear/tests/testGaussianJunctionTree.cpp index b92595a03..de38f5b6d 100644 --- a/gtsam/linear/tests/testGaussianJunctionTree.cpp +++ b/gtsam/linear/tests/testGaussianJunctionTree.cpp @@ -1,6 +1,6 @@ /* ---------------------------------------------------------------------------- - * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * GTSAM Copyright 2010, Georgia Tech Research Corporation, * Atlanta, Georgia 30332-0415 * All Rights Reserved * Authors: Frank Dellaert, et al. (see THANKS for the full author list) @@ -35,20 +35,20 @@ static const Index x2=0, x1=1, x3=2, x4=3; GaussianFactorGraph createChain() { - typedef GaussianFactorGraph::sharedFactor Factor; - SharedDiagonal model(Vector_(1, 0.5)); - Factor factor1(new JacobianFactor(x2, Matrix_(1,1,1.), x1, Matrix_(1,1,1.), Vector_(1,1.), model)); - Factor factor2(new JacobianFactor(x2, Matrix_(1,1,1.), x3, Matrix_(1,1,1.), Vector_(1,1.), model)); - Factor factor3(new JacobianFactor(x3, Matrix_(1,1,1.), x4, Matrix_(1,1,1.), Vector_(1,1.), model)); - Factor factor4(new JacobianFactor(x4, Matrix_(1,1,1.), Vector_(1,1.), model)); + typedef GaussianFactorGraph::sharedFactor Factor; + SharedDiagonal model(Vector_(1, 0.5)); + Factor factor1(new JacobianFactor(x2, Matrix_(1,1,1.), x1, Matrix_(1,1,1.), Vector_(1,1.), model)); + Factor factor2(new JacobianFactor(x2, Matrix_(1,1,1.), x3, Matrix_(1,1,1.), Vector_(1,1.), model)); + Factor factor3(new JacobianFactor(x3, Matrix_(1,1,1.), x4, Matrix_(1,1,1.), Vector_(1,1.), model)); + Factor factor4(new JacobianFactor(x4, Matrix_(1,1,1.), Vector_(1,1.), model)); - GaussianFactorGraph fg; - fg.push_back(factor1); - fg.push_back(factor2); - fg.push_back(factor3); - fg.push_back(factor4); + GaussianFactorGraph fg; + fg.push_back(factor1); + fg.push_back(factor2); + fg.push_back(factor3); + fg.push_back(factor4); - return fg; + return fg; } /* ************************************************************************* */ @@ -65,43 +65,55 @@ GaussianFactorGraph createChain() { * * 1 0 0 1 */ -TEST( GaussianJunctionTree, eliminate ) +TEST_UNSAFE( GaussianJunctionTree, eliminate ) { - GaussianFactorGraph fg = createChain(); - GaussianJunctionTree junctionTree(fg); - BayesTree::sharedClique rootClique = junctionTree.eliminate(&EliminateQR); + GaussianFactorGraph fg = createChain(); + GaussianJunctionTree junctionTree(fg); + BayesTree::sharedClique rootClique = junctionTree.eliminate(&EliminateQR); - typedef BayesTree::sharedConditional sharedConditional; - Matrix two = Matrix_(1,1,2.); - Matrix one = Matrix_(1,1,1.); - BayesTree bayesTree_expected; - bayesTree_expected.insert(sharedConditional(new GaussianConditional(x4, Vector_(1,2.), two, Vector_(1,1.)))); - bayesTree_expected.insert(sharedConditional(new GaussianConditional(x3, Vector_(1,2.), two, x4, two, Vector_(1,1.)))); - bayesTree_expected.insert(sharedConditional(new GaussianConditional(x1, Vector_(1,0.), one*(-1), x3, one, Vector_(1,1.)))); - bayesTree_expected.insert(sharedConditional(new GaussianConditional(x2, Vector_(1,2.), two, x1, one, x3, one, Vector_(1,1.)))); - CHECK(assert_equal(*bayesTree_expected.root(), *rootClique)); - CHECK(assert_equal(*(bayesTree_expected.root()->children().front()), *(rootClique->children().front()))); + typedef BayesTree::sharedConditional sharedConditional; + Matrix two = Matrix_(1,1,2.); + Matrix one = Matrix_(1,1,1.); + BayesTree bayesTree_expected; + Index keys_root[] = {x3,x4}; + Matrix rsd_root = Matrix_(2,3, 2., 2., 2., 0., 2., 2.); + size_t dim_root[] = {1, 1, 1}; + sharedConditional root_expected(new GaussianConditional(keys_root, keys_root+2, 2, + VerticalBlockView(rsd_root, dim_root, dim_root+3, 2), ones(2))); + BayesTree::sharedClique rootClique_expected(new BayesTree::Clique(root_expected)); + + Index keys_child[] = {x2,x1,x3}; + Matrix rsd_child = Matrix_(2,4, 2., 1., 1., 2., 0., -1., 1., 0.); + size_t dim_child[] = {1, 1, 1, 1}; + sharedConditional child_expected(new GaussianConditional(keys_child, keys_child+3, 2, + VerticalBlockView(rsd_child, dim_child, dim_child+4, 2), ones(2))); + BayesTree::sharedClique childClique_expected(new BayesTree::Clique(child_expected)); + + bayesTree_expected.insert(rootClique_expected); + bayesTree_expected.insert(childClique_expected); + +// bayesTree_expected.insert(sharedConditional(new GaussianConditional(x4, Vector_(1,2.), two, Vector_(1,1.)))); +// bayesTree_expected.insert(sharedConditional(new GaussianConditional(x3, Vector_(1,2.), two, x4, two, Vector_(1,1.)))); +// bayesTree_expected.insert(sharedConditional(new GaussianConditional(x1, Vector_(1,0.), one*(-1), x3, one, Vector_(1,1.)))); +// bayesTree_expected.insert(sharedConditional(new GaussianConditional(x2, Vector_(1,2.), two, x1, one, x3, one, Vector_(1,1.)))); + CHECK(assert_equal(*bayesTree_expected.root(), *rootClique)); + EXPECT(assert_equal(*(bayesTree_expected.root()->children().front()), *(rootClique->children().front()))); } /* ************************************************************************* */ -TEST( GaussianJunctionTree, optimizeMultiFrontal ) +TEST_UNSAFE( GaussianJunctionTree, optimizeMultiFrontal ) { - GaussianFactorGraph fg = createChain(); - GaussianJunctionTree tree(fg); - - VectorValues actual = tree.optimize(&EliminateQR); - VectorValues expected(vector(4,1)); - expected[x1] = Vector_(1, 0.); - expected[x2] = Vector_(1, 1.); - expected[x3] = Vector_(1, 0.); - expected[x4] = Vector_(1, 1.); - CHECK(assert_equal(expected,actual)); -} - -/* ************************************************************************* */ -TEST(GaussianJunctionTree, complexExample) { + GaussianFactorGraph fg = createChain(); + GaussianJunctionTree tree(fg); + VectorValues actual = tree.optimize(&EliminateQR); + VectorValues expected(vector(4,1)); + expected[x1] = Vector_(1, 0.); + expected[x2] = Vector_(1, 1.); + expected[x3] = Vector_(1, 0.); + expected[x4] = Vector_(1, 1.); + EXPECT(assert_equal(expected,actual)); } /* ************************************************************************* */ diff --git a/gtsam/linear/tests/testHessianFactor.cpp b/gtsam/linear/tests/testHessianFactor.cpp index 7eb18c0d8..59ac5f905 100644 --- a/gtsam/linear/tests/testHessianFactor.cpp +++ b/gtsam/linear/tests/testHessianFactor.cpp @@ -16,19 +16,25 @@ * @created Dec 15, 2010 */ +#include +#include + +#include +#include + #include #include #include #include -#include -#include - #include #include -using namespace gtsam; using namespace std; +using namespace boost::assign; +using namespace gtsam; + +const double tol = 1e-5; /* ************************************************************************* */ TEST(HessianFactor, ConversionConstructor) { @@ -158,7 +164,7 @@ TEST(HessianFactor, Constructor2) } /* ************************************************************************* */ -TEST(HessianFactor, CombineAndEliminate) +TEST_UNSAFE(HessianFactor, CombineAndEliminate) { Matrix A01 = Matrix_(3,3, 1.0, 0.0, 0.0, @@ -200,17 +206,17 @@ TEST(HessianFactor, CombineAndEliminate) JacobianFactor expectedFactor(0, A0, 1, A1, b, noiseModel::Diagonal::Sigmas(sigmas, true)); // perform elimination - GaussianBayesNet expectedBN(*expectedFactor.eliminate(1)); + GaussianConditional::shared_ptr expectedBN = expectedFactor.eliminate(1); // create expected Hessian after elimination HessianFactor expectedCholeskyFactor(expectedFactor); - GaussianFactorGraph::EliminationResult actualCholesky = EliminateCholesky( + GaussianFactorGraph::EliminationResult actualCholesky = EliminateLDL( *gfg.convertCastFactors > (), 1); HessianFactor::shared_ptr actualFactor = boost::dynamic_pointer_cast< HessianFactor>(actualCholesky.second); - EXPECT(assert_equal(expectedBN, *actualCholesky.first, 1e-6)); + EXPECT(assert_equal(*expectedBN, *actualCholesky.first, 1e-6)); EXPECT(assert_equal(expectedCholeskyFactor, *actualFactor, 1e-6)); } @@ -256,7 +262,7 @@ TEST(HessianFactor, eliminate2 ) FactorGraph combinedLFG_Chol; combinedLFG_Chol.push_back(combinedLF_Chol); - GaussianFactorGraph::EliminationResult actual_Chol = EliminateCholesky( + GaussianFactorGraph::EliminationResult actual_Chol = EliminateLDL( combinedLFG_Chol, 1); HessianFactor::shared_ptr actualFactor = boost::dynamic_pointer_cast< HessianFactor>(actual_Chol.second); @@ -273,7 +279,7 @@ TEST(HessianFactor, eliminate2 ) )/oldSigma; Vector d = Vector_(2,0.2,-0.14)/oldSigma; GaussianConditional expectedCG(0,d,R11,1,S12,ones(2)); - EXPECT(assert_equal(expectedCG,*actual_Chol.first->front(),1e-4)); + EXPECT(assert_equal(expectedCG,*actual_Chol.first,1e-4)); // the expected linear factor double sigma = 0.2236; @@ -355,20 +361,72 @@ TEST(HessianFactor, eliminateUnsorted) { // unsortedGraph.push_back(factor1); unsortedGraph.push_back(unsorted_factor2); - GaussianBayesNet::shared_ptr expected_bn; + GaussianConditional::shared_ptr expected_bn; GaussianFactor::shared_ptr expected_factor; boost::tie(expected_bn, expected_factor) = EliminatePreferCholesky(sortedGraph, 1); - GaussianBayesNet::shared_ptr actual_bn; + GaussianConditional::shared_ptr actual_bn; GaussianFactor::shared_ptr actual_factor; boost::tie(actual_bn, actual_factor) = EliminatePreferCholesky(unsortedGraph, 1); EXPECT(assert_equal(*expected_bn, *actual_bn, 1e-10)); EXPECT(assert_equal(*expected_factor, *actual_factor, 1e-10)); + + // Test LDL + boost::tie(expected_bn, expected_factor) = + EliminatePreferLDL(sortedGraph, 1); + + boost::tie(actual_bn, actual_factor) = + EliminatePreferLDL(unsortedGraph, 1); + + EXPECT(assert_equal(*expected_bn, *actual_bn, 1e-10)); + EXPECT(assert_equal(*expected_factor, *actual_factor, 1e-10)); } +/* ************************************************************************* */ +TEST(HessianFactor, combine) { + + // update the information matrix with a single jacobian factor + Matrix A0 = Matrix_(2, 2, + 11.1803399, 0.0, + 0.0, 11.1803399); + Matrix A1 = Matrix_(2, 2, + -2.23606798, 0.0, + 0.0, -2.23606798); + Matrix A2 = Matrix_(2, 2, + -8.94427191, 0.0, + 0.0, -8.94427191); + Vector b = Vector_(2, 2.23606798,-1.56524758); + SharedDiagonal model = noiseModel::Diagonal::Sigmas(ones(2)); + GaussianFactor::shared_ptr f(new JacobianFactor(0, A0, 1, A1, 2, A2, b, model)); + FactorGraph factors; + factors.push_back(f); + + vector dimensions; + dimensions += 2, 2, 2, 1; + + Scatter scatter; + scatter += make_pair(0, SlotEntry(0, 2)); + scatter += make_pair(1, SlotEntry(1, 2)); + scatter += make_pair(2, SlotEntry(2, 2)); + + // Form Ab' * Ab + HessianFactor actual(factors, dimensions, scatter); + + Matrix expected = Matrix_(7, 7, + 125.0000, 0.0, -25.0000, 0.0, -100.0000, 0.0, 25.0000, + 0.0, 125.0000, 0.0, -25.0000, 0.0, -100.0000, -17.5000, + -25.0000, 0.0, 5.0000, 0.0, 20.0000, 0.0, -5.0000, + 0.0, -25.0000, 0.0, 5.0000, 0.0, 20.0000, 3.5000, + -100.0000, 0.0, 20.0000, 0.0, 80.0000, 0.0, -20.0000, + 0.0, -100.0000, 0.0, 20.0000, 0.0, 80.0000, 14.0000, + 25.0000, -17.5000, -5.0000, 3.5000, -20.0000, 14.0000, 7.4500); + EXPECT(assert_equal(Matrix(expected.triangularView()), + Matrix(actual.matrix_.triangularView()), tol)); + +} /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr);} /* ************************************************************************* */ diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index ab3571803..68386e519 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -230,13 +230,13 @@ TEST(NoiseModel, Cholesky) { SharedDiagonal expected = noiseModel::Unit::Create(4); Matrix Ab = exampleQR::Ab; // otherwise overwritten ! - SharedDiagonal actual = exampleQR::diagonal->Cholesky(Ab, 4); // ASSERTION FAILURE: access out of bounds + SharedDiagonal actual = exampleQR::diagonal->Cholesky(Ab, 4); EXPECT(assert_equal(*expected,*actual)); // Ab was modified in place !!! Matrix actualRd = Ab.block(0, 0, actual->dim(), Ab.cols()).triangularView(); // ublas::project(ublas::triangular_adaptor(Ab), // ublas::range(0,actual->dim()), ublas::range(0, Ab.cols())); - EXPECT(linear_dependent(exampleQR::Rd,actualRd,1e-4)); // FIXME: enable test + EXPECT(linear_dependent(exampleQR::Rd,actualRd,1e-4)); } /* ************************************************************************* */ diff --git a/gtsam/linear/tests/testVectorValues.cpp b/gtsam/linear/tests/testVectorValues.cpp index 2b7e66540..0c7da0ca4 100644 --- a/gtsam/linear/tests/testVectorValues.cpp +++ b/gtsam/linear/tests/testVectorValues.cpp @@ -16,15 +16,16 @@ * @created Sep 16, 2010 */ -#include +#include #include #include #include -using namespace gtsam; using namespace std; +using namespace boost::assign; +using namespace gtsam; /* ************************************************************************* */ TEST(VectorValues, constructor) { @@ -52,6 +53,9 @@ TEST(VectorValues, standard) { EXPECT_LONGS_EQUAL(3, combined.size()); EXPECT_LONGS_EQUAL(9, combined.dim()); EXPECT_LONGS_EQUAL(9, combined.dimCapacity()); + EXPECT_LONGS_EQUAL(3, combined.dim(0)); + EXPECT_LONGS_EQUAL(2, combined.dim(1)); + EXPECT_LONGS_EQUAL(4, combined.dim(2)); combined[0] = v1; combined[1] = v2; combined[2] = v3; @@ -179,6 +183,53 @@ TEST(VectorValues, makeZero ) { EXPECT(assert_equal(zero(14), values.vector())); } +/* ************************************************************************* */ +TEST(VectorValues, range ) { + VectorValues v(7,2); + v.makeZero(); + v[1] = Vector_(2, 1.0, 2.0); + v[2] = Vector_(2, 3.0, 4.0); + v[3] = Vector_(2, 5.0, 6.0); + + vector idx1, idx2; + idx1 += 0, 1, 2, 3, 4, 5, 6; // ordered + idx2 += 1, 0, 2; // unordered + + // test access + + Vector actRange1 = v.range(idx1.begin(), idx1.begin() + 2); + EXPECT(assert_equal(Vector_(4, 0.0, 0.0, 1.0, 2.0), actRange1)); + + Vector actRange2 = v.range(idx1.begin()+1, idx1.begin()+2); + EXPECT(assert_equal(Vector_(2, 1.0, 2.0), actRange2)); + + Vector actRange3 = v.range(idx2.begin(), idx2.end()); + EXPECT(assert_equal(Vector_(6, 1.0, 2.0, 0.0, 0.0, 3.0, 4.0), actRange3)); + + // test setting values + VectorValues act1 = v, act2 = v, act3 = v; + + Vector a = Vector_(2, 0.1, 0.2); + VectorValues exp1 = act1; exp1[0] = a; + act1.range(idx1.begin(), idx1.begin()+1, a); + EXPECT(assert_equal(exp1, act1)); + + Vector bc = Vector_(4, 0.1, 0.2, 0.3, 0.4); + VectorValues exp2 = act2; + exp2[2] = Vector_(2, 0.1, 0.2); + exp2[3] = Vector_(2, 0.3, 0.4); + act2.range(idx1.begin()+2, idx1.begin()+4, bc); + EXPECT(assert_equal(exp2, act2)); + + Vector def = Vector_(6, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6); + VectorValues exp3 = act3; + exp3[1] = Vector_(2, 0.1, 0.2); + exp3[0] = Vector_(2, 0.3, 0.4); + exp3[2] = Vector_(2, 0.5, 0.6); + act3.range(idx2.begin(), idx2.end(), def); + EXPECT(assert_equal(exp3, act3)); +} + /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); diff --git a/gtsam/nonlinear/Key.h b/gtsam/nonlinear/Key.h index ab54d471f..d7f501c93 100644 --- a/gtsam/nonlinear/Key.h +++ b/gtsam/nonlinear/Key.h @@ -19,7 +19,6 @@ #pragma once -#include #include #include #include diff --git a/gtsam/nonlinear/LieValues-inl.h b/gtsam/nonlinear/LieValues-inl.h index f2410b70a..cd67fa073 100644 --- a/gtsam/nonlinear/LieValues-inl.h +++ b/gtsam/nonlinear/LieValues-inl.h @@ -16,12 +16,13 @@ #pragma once -#include -#include #include #include #include +#include +#include + #include #include #include @@ -29,12 +30,7 @@ #include -#define INSTANTIATE_LIE_VALUES(J) \ - /*INSTANTIATE_LIE(T);*/ \ - /*template LieValues expmap(const LieValues&, const VectorValues&);*/ \ - /*template LieValues expmap(const LieValues&, const Vector&);*/ \ - /*template VectorValues logmap(const LieValues&, const LieValues&);*/ \ - template class LieValues; +#define INSTANTIATE_LIE_VALUES(J) template class LieValues; using namespace std; diff --git a/gtsam/nonlinear/LieValues.h b/gtsam/nonlinear/LieValues.h index 7c317a641..0cd65f273 100644 --- a/gtsam/nonlinear/LieValues.h +++ b/gtsam/nonlinear/LieValues.h @@ -24,7 +24,6 @@ #pragma once -#include #include #include diff --git a/gtsam/nonlinear/NonlinearConstraint.h b/gtsam/nonlinear/NonlinearConstraint.h index 3e581f733..d7b8586a6 100644 --- a/gtsam/nonlinear/NonlinearConstraint.h +++ b/gtsam/nonlinear/NonlinearConstraint.h @@ -18,9 +18,7 @@ #pragma once -#include #include -#include #include namespace gtsam { @@ -32,8 +30,6 @@ namespace gtsam { * nonzero constraint functions will still be active - inequality * constraints should be sure to force to actual zero) * - * NOTE: inequality constraints removed for now - * * Nonlinear constraints evaluate their error as a part of a quadratic * error function: ||h(x)-z||^2 + mu * ||c(x)|| where mu is a gain * on the constraint function that should be made high enough to be diff --git a/gtsam/nonlinear/NonlinearEquality.h b/gtsam/nonlinear/NonlinearEquality.h index eb63b7138..2fa05e2a4 100644 --- a/gtsam/nonlinear/NonlinearEquality.h +++ b/gtsam/nonlinear/NonlinearEquality.h @@ -20,7 +20,6 @@ #include #include -#include #include namespace gtsam { @@ -134,7 +133,6 @@ namespace gtsam { const T& xj = x[this->key_]; Matrix A; Vector b = evaluateError(xj, A); - // TODO pass unwhitened + noise model to Gaussian factor SharedDiagonal model = noiseModel::Constrained::All(b.size()); return GaussianFactor::shared_ptr(new JacobianFactor(ordering[this->key_], A, b, model)); } diff --git a/gtsam/nonlinear/NonlinearFactor.h b/gtsam/nonlinear/NonlinearFactor.h index 4d26bcc63..b3fe82c8e 100644 --- a/gtsam/nonlinear/NonlinearFactor.h +++ b/gtsam/nonlinear/NonlinearFactor.h @@ -23,15 +23,11 @@ #include #include -#include #include #include #include -#include -#include #include -#include #include #include diff --git a/gtsam/nonlinear/NonlinearFactorGraph-inl.h b/gtsam/nonlinear/NonlinearFactorGraph-inl.h index 638c2da8b..236cc7ec9 100644 --- a/gtsam/nonlinear/NonlinearFactorGraph-inl.h +++ b/gtsam/nonlinear/NonlinearFactorGraph-inl.h @@ -21,7 +21,6 @@ #include #include -#include #include #include #include diff --git a/gtsam/nonlinear/NonlinearFactorGraph.h b/gtsam/nonlinear/NonlinearFactorGraph.h index 8179ee6cf..68b5103b7 100644 --- a/gtsam/nonlinear/NonlinearFactorGraph.h +++ b/gtsam/nonlinear/NonlinearFactorGraph.h @@ -24,7 +24,6 @@ #include #include #include -#include namespace gtsam { diff --git a/gtsam/nonlinear/NonlinearISAM-inl.h b/gtsam/nonlinear/NonlinearISAM-inl.h index 9cdff6fb8..29d79ce8b 100644 --- a/gtsam/nonlinear/NonlinearISAM-inl.h +++ b/gtsam/nonlinear/NonlinearISAM-inl.h @@ -9,7 +9,7 @@ * -------------------------------------------------------------------------- */ -/* +/** * NonlinearISAM-inl.h * * Created on: Jan 19, 2010 @@ -18,12 +18,16 @@ #pragma once +#include + +#include + #include #include #include #include -#include +using namespace std; using namespace gtsam; /* ************************************************************************* */ @@ -98,7 +102,16 @@ Values NonlinearISAM::estimate() const { /* ************************************************************************* */ template Matrix NonlinearISAM::marginalCovariance(const Symbol& key) const { - Matrix covariance; Vector mean; - boost::tie(mean, covariance) = isam_.marginal(ordering_[key]); - return covariance; + return isam_.marginalCovariance(ordering_[key]); +} + +/* ************************************************************************* */ +template +void NonlinearISAM::print(const std::string& s) const { + cout << "ISAM - " << s << ":" << endl; + cout << " ReorderInterval: " << reorderInterval_ << " Current Count: " << reorderCounter_ << endl; + isam_.print("GaussianISAM"); + linPoint_.print("Linearization Point"); + ordering_.print("System Ordering"); + factors_.print("Nonlinear Graph"); } diff --git a/gtsam/nonlinear/NonlinearISAM.h b/gtsam/nonlinear/NonlinearISAM.h index 3178cba16..76462c5f9 100644 --- a/gtsam/nonlinear/NonlinearISAM.h +++ b/gtsam/nonlinear/NonlinearISAM.h @@ -19,7 +19,6 @@ #pragma once #include -#include #include #include @@ -97,6 +96,9 @@ public: /** get counters */ int reorderInterval() const { return reorderInterval_; } int reorderCounter() const { return reorderCounter_; } + + /** prints out all contents of the system */ + void print(const std::string& s="") const; }; } // \namespace gtsam diff --git a/gtsam/nonlinear/NonlinearOptimization-inl.h b/gtsam/nonlinear/NonlinearOptimization-inl.h index 269cefba2..5f6afaecd 100644 --- a/gtsam/nonlinear/NonlinearOptimization-inl.h +++ b/gtsam/nonlinear/NonlinearOptimization-inl.h @@ -19,8 +19,6 @@ #pragma once -#include - #include #include #include @@ -35,7 +33,8 @@ namespace gtsam { * The Elimination solver */ template - T optimizeSequential(const G& graph, const T& initialEstimate, const NonlinearOptimizationParameters& parameters) { + T optimizeSequential(const G& graph, const T& initialEstimate, + const NonlinearOptimizationParameters& parameters, bool useLM) { // Use a variable ordering from COLAMD Ordering::shared_ptr ordering = graph.orderingCOLAMD(initialEstimate); @@ -43,18 +42,22 @@ namespace gtsam { // initial optimization state is the same in both cases tested typedef NonlinearOptimizer Optimizer; Optimizer optimizer(boost::make_shared(graph), - boost::make_shared(initialEstimate), ordering, boost::make_shared(parameters)); + boost::make_shared(initialEstimate), ordering, + boost::make_shared(parameters)); - // Levenberg-Marquardt - Optimizer result = optimizer.levenbergMarquardt(); - return *result.values(); + // choose nonlinear optimization method + if (useLM) + return *optimizer.levenbergMarquardt().values(); + else + return *optimizer.gaussNewton().values(); } /** * The multifrontal solver */ template - T optimizeMultiFrontal(const G& graph, const T& initialEstimate, const NonlinearOptimizationParameters& parameters) { + T optimizeMultiFrontal(const G& graph, const T& initialEstimate, + const NonlinearOptimizationParameters& parameters, bool useLM) { // Use a variable ordering from COLAMD Ordering::shared_ptr ordering = graph.orderingCOLAMD(initialEstimate); @@ -62,24 +65,30 @@ namespace gtsam { // initial optimization state is the same in both cases tested typedef NonlinearOptimizer Optimizer; Optimizer optimizer(boost::make_shared(graph), - boost::make_shared(initialEstimate), ordering, boost::make_shared(parameters)); + boost::make_shared(initialEstimate), ordering, + boost::make_shared(parameters)); - // Levenberg-Marquardt - Optimizer result = optimizer.levenbergMarquardt(); - return *result.values(); + // choose nonlinear optimization method + if (useLM) + return *optimizer.levenbergMarquardt().values(); + else + return *optimizer.gaussNewton().values(); } /** - * The sparse preconditioned conjucate gradient solver + * The sparse preconditioned conjugate gradient solver */ template - T optimizeSPCG(const G& graph, const T& initialEstimate, const NonlinearOptimizationParameters& parameters = NonlinearOptimizationParameters()) { + T optimizeSPCG(const G& graph, const T& initialEstimate, + const NonlinearOptimizationParameters& parameters = NonlinearOptimizationParameters(), + bool useLM = true) { // initial optimization state is the same in both cases tested typedef SubgraphSolver Solver; typedef boost::shared_ptr shared_Solver; typedef NonlinearOptimizer SPCGOptimizer; - shared_Solver solver = boost::make_shared(graph, initialEstimate, IterativeOptimizationParameters()); + shared_Solver solver = boost::make_shared( + graph, initialEstimate, IterativeOptimizationParameters()); SPCGOptimizer optimizer( boost::make_shared(graph), boost::make_shared(initialEstimate), @@ -87,9 +96,11 @@ namespace gtsam { solver, boost::make_shared(parameters)); - // Levenberg-Marquardt - SPCGOptimizer result = optimizer.levenbergMarquardt(); - return *result.values(); + // choose nonlinear optimization method + if (useLM) + return *optimizer.levenbergMarquardt().values(); + else + return *optimizer.gaussNewton().values(); } /** @@ -97,13 +108,18 @@ namespace gtsam { */ template T optimize(const G& graph, const T& initialEstimate, const NonlinearOptimizationParameters& parameters, - const enum LinearSolver& solver) { + const LinearSolver& solver, + const NonlinearOptimizationMethod& nonlinear_method) { switch (solver) { case SEQUENTIAL: - return optimizeSequential(graph, initialEstimate, parameters); + return optimizeSequential(graph, initialEstimate, parameters, + nonlinear_method == LM); case MULTIFRONTAL: - return optimizeMultiFrontal(graph, initialEstimate, parameters); + return optimizeMultiFrontal(graph, initialEstimate, parameters, + nonlinear_method == LM); case SPCG: +// return optimizeSPCG(graph, initialEstimate, parameters, +// nonlinear_method == LM); throw runtime_error("optimize: SPCG not supported yet due to the specific pose constraint"); } throw runtime_error("optimize: undefined solver"); diff --git a/gtsam/nonlinear/NonlinearOptimization.h b/gtsam/nonlinear/NonlinearOptimization.h index 0209819a6..448a66338 100644 --- a/gtsam/nonlinear/NonlinearOptimization.h +++ b/gtsam/nonlinear/NonlinearOptimization.h @@ -26,19 +26,19 @@ namespace gtsam { /** * all the nonlinear optimization methods */ - enum NonlinearOptimizationMethod { + typedef enum { LM, // Levenberg Marquardt GN // Gaussian-Newton - }; + } NonlinearOptimizationMethod; /** * all the linear solver types */ - enum LinearSolver{ + typedef enum { SEQUENTIAL, // Sequential elimination MULTIFRONTAL, // Multi-frontal elimination SPCG, // Subgraph Preconditioned Conjugate Gradient - }; + } LinearSolver; /** * optimization that returns the values @@ -46,7 +46,8 @@ namespace gtsam { template T optimize(const G& graph, const T& initialEstimate, const NonlinearOptimizationParameters& parameters = NonlinearOptimizationParameters(), - const enum LinearSolver& solver = MULTIFRONTAL); + const LinearSolver& solver = MULTIFRONTAL, + const NonlinearOptimizationMethod& nonlinear_method = LM); } diff --git a/gtsam/nonlinear/NonlinearOptimizationParameters.h b/gtsam/nonlinear/NonlinearOptimizationParameters.h index 9e3c19bfc..602f2f79f 100644 --- a/gtsam/nonlinear/NonlinearOptimizationParameters.h +++ b/gtsam/nonlinear/NonlinearOptimizationParameters.h @@ -1,9 +1,19 @@ -/* - * NonlinearOptimizationParameters.h - * - * Created on: Oct 14, 2010 - * Author: nikai - * Description: +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file NonlinearOptimizationParameters.h + * @date Oct 14, 2010 + * @author Kai Ni + * @brief structure to store parameters for nonlinear optimization */ #pragma once @@ -13,94 +23,105 @@ namespace gtsam { - // a container for all related parameters - struct NonlinearOptimizationParameters { +// a container for all related parameters +struct NonlinearOptimizationParameters { - typedef boost::shared_ptr shared_ptr ; - typedef NonlinearOptimizationParameters This; - typedef boost::shared_ptr sharedThis ; + typedef boost::shared_ptr shared_ptr ; + typedef NonlinearOptimizationParameters This; + typedef boost::shared_ptr sharedThis ; + typedef enum { + SILENT, + ERROR, + LAMBDA, + TRYLAMBDA, + VALUES, + DELTA, + TRYCONFIG, + TRYDELTA, + LINEAR, + DAMPED + } verbosityLevel; - typedef enum { - SILENT, - ERROR, - LAMBDA, - TRYLAMBDA, - VALUES, - DELTA, - TRYCONFIG, - TRYDELTA, - LINEAR, - DAMPED - } verbosityLevel; + typedef enum { + FAST, + BOUNDED, + CAUTIOUS + } LambdaMode; - typedef enum { - FAST, - BOUNDED, - CAUTIOUS - } LambdaMode; + // thresholds + double absDecrease_; /* threshold for the absolute decrease per iteration */ + double relDecrease_; /* threshold for the relative decrease per iteration */ + double sumError_; /* threshold for the sum of error */ + size_t maxIterations_ ; + double lambda_ ; + double lambdaFactor_ ; - double absDecrease_; /* threshold for the absolute decrease per iteration */ - double relDecrease_; /* threshold for the relative decrease per iteration */ - double sumError_; /* threshold for the sum of error */ - size_t maxIterations_ ; - double lambda_ ; - double lambdaFactor_ ; - verbosityLevel verbosity_; - LambdaMode lambdaMode_; + // flags and mode switches + verbosityLevel verbosity_; + LambdaMode lambdaMode_; + bool useQR_; /// if true, solve whole system with QR, otherwise use LDL when possible - NonlinearOptimizationParameters(): absDecrease_(1e-6), relDecrease_(1e-6), sumError_(0.0), - maxIterations_(100), lambda_(1e-5), lambdaFactor_(10.0), verbosity_(SILENT), lambdaMode_(BOUNDED){} + NonlinearOptimizationParameters(): absDecrease_(1e-6), relDecrease_(1e-6), sumError_(0.0), + maxIterations_(100), lambda_(1e-5), lambdaFactor_(10.0), verbosity_(SILENT), + lambdaMode_(BOUNDED), useQR_(true) {} - NonlinearOptimizationParameters(double absDecrease, double relDecrease, double sumError, - int iIters = 100, double lambda = 1e-5, double lambdaFactor = 10, verbosityLevel v = SILENT, LambdaMode lambdaMode = BOUNDED) - :absDecrease_(absDecrease), relDecrease_(relDecrease), sumError_(sumError), - maxIterations_(iIters), lambda_(lambda), lambdaFactor_(lambdaFactor), verbosity_(v), lambdaMode_(lambdaMode){} + NonlinearOptimizationParameters(double absDecrease, double relDecrease, double sumError, + int iIters = 100, double lambda = 1e-5, double lambdaFactor = 10, + verbosityLevel v = SILENT, LambdaMode lambdaMode = BOUNDED, bool useQR = true) + :absDecrease_(absDecrease), relDecrease_(relDecrease), sumError_(sumError), + maxIterations_(iIters), lambda_(lambda), lambdaFactor_(lambdaFactor), verbosity_(v), + lambdaMode_(lambdaMode), useQR_(useQR) {} - NonlinearOptimizationParameters(const NonlinearOptimizationParameters ¶meters): - absDecrease_(parameters.absDecrease_), - relDecrease_(parameters.relDecrease_), - sumError_(parameters.sumError_), - maxIterations_(parameters.maxIterations_), - lambda_(parameters.lambda_), - lambdaFactor_(parameters.lambdaFactor_), - verbosity_(parameters.verbosity_), lambdaMode_(parameters.lambdaMode_){} + NonlinearOptimizationParameters(const NonlinearOptimizationParameters ¶meters): + absDecrease_(parameters.absDecrease_), + relDecrease_(parameters.relDecrease_), + sumError_(parameters.sumError_), + maxIterations_(parameters.maxIterations_), + lambda_(parameters.lambda_), + lambdaFactor_(parameters.lambdaFactor_), + verbosity_(parameters.verbosity_), + lambdaMode_(parameters.lambdaMode_), + useQR_(parameters.useQR_) {} + /* a copy of old instance except some parameters */ + sharedThis newLambda_(double lambda) const { + sharedThis ptr (boost::make_shared(*this)) ; + ptr->lambda_ = lambda ; + return ptr ; + } - /* a copy of old instance except some parameters */ - sharedThis newLambda_(double lambda) const { - sharedThis ptr (boost::make_shared(*this)) ; - ptr->lambda_ = lambda ; - return ptr ; - } + /* new instance with default parameters except some partially assigned parameters */ + static sharedThis newVerbosity(verbosityLevel verbosity) { + sharedThis ptr (boost::make_shared()) ; + ptr->verbosity_ = verbosity ; + return ptr ; + } + static sharedThis newMaxIterations(int maxIterations) { + sharedThis ptr (boost::make_shared()) ; + ptr->maxIterations_ = maxIterations ; + return ptr ; + } - // static - /* new instance with default parameters except some partially assigned parameters */ - static sharedThis newVerbosity(verbosityLevel verbosity) { - sharedThis ptr (boost::make_shared()) ; - ptr->verbosity_ = verbosity ; - return ptr ; - } + static sharedThis newLambda(double lambda) { + sharedThis ptr (boost::make_shared()); + ptr->lambda_ = lambda ; + return ptr ; + } - static sharedThis newMaxIterations(int maxIterations) { - sharedThis ptr (boost::make_shared()) ; - ptr->maxIterations_ = maxIterations ; - return ptr ; - } + static sharedThis newDrecreaseThresholds(double absDecrease, double relDecrease) { + sharedThis ptr (boost::make_shared()); + ptr->absDecrease_ = absDecrease; + ptr->relDecrease_ = relDecrease; + return ptr ; + } - static sharedThis newLambda(double lambda) { - sharedThis ptr (boost::make_shared()); - ptr->lambda_ = lambda ; - return ptr ; - } - - static sharedThis newDrecreaseThresholds(double absDecrease, double relDecrease) { - sharedThis ptr (boost::make_shared()); - ptr->absDecrease_ = absDecrease; - ptr->relDecrease_ = relDecrease; - return ptr ; - } - }; + static sharedThis newFactorization(bool useQR) { + sharedThis ptr (boost::make_shared()); + ptr->useQR_= useQR; + return ptr ; + } +}; } diff --git a/gtsam/nonlinear/NonlinearOptimizer-inl.h b/gtsam/nonlinear/NonlinearOptimizer-inl.h index 7ce56ab21..5409efd43 100644 --- a/gtsam/nonlinear/NonlinearOptimizer-inl.h +++ b/gtsam/nonlinear/NonlinearOptimizer-inl.h @@ -168,7 +168,8 @@ namespace gtsam { // solve // FIXME: remove spcg specific code if (spcg_solver_) spcg_solver_->replaceFactors(damped); - shared_solver solver = (spcg_solver_) ? spcg_solver_ : shared_solver(new S(damped, structure_)); + shared_solver solver = (spcg_solver_) ? spcg_solver_ : shared_solver( + new S(damped, structure_, parameters_->useQR_)); VectorValues delta = *solver->optimize(); if (verbosity >= Parameters::TRYDELTA) delta.print("delta"); diff --git a/gtsam/nonlinear/NonlinearOptimizer.cpp b/gtsam/nonlinear/NonlinearOptimizer.cpp index f56570351..31c3e5fa4 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.cpp +++ b/gtsam/nonlinear/NonlinearOptimizer.cpp @@ -11,7 +11,7 @@ /** * NonlinearOptimizer.cpp - * @brief: Dummy c++ file to have *something* in lin-nonlinear . + * @brief: Convergence functions not dependent on graph types * @Author: Frank Dellaert * Created on: Jul 17, 2010 */ diff --git a/gtsam/nonlinear/NonlinearOptimizer.h b/gtsam/nonlinear/NonlinearOptimizer.h index b8a367a48..aec462889 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.h +++ b/gtsam/nonlinear/NonlinearOptimizer.h @@ -16,13 +16,8 @@ * Created on: Sep 7, 2009 */ -#ifndef NONLINEAROPTIMIZER_H_ -#define NONLINEAROPTIMIZER_H_ +#pragma once -#include -#include -#include -#include #include #include #include @@ -60,8 +55,6 @@ public: * one optimizes the linearized system using various methods. * * To use the optimizer in code, include in your cpp file - * - * */ template class NonlinearOptimizer { @@ -216,13 +209,13 @@ public: * NOTE: this will actually solve a linear system */ shared_solver createSolver() const { - return shared_solver(new GS(linearize(), structure_)); + return shared_solver(new GS(linearize(), structure_, parameters_->useQR_)); } /** * Return mean and covariance on a single variable */ - std::pair marginalCovariance(Symbol j) const { + Matrix marginalCovariance(Symbol j) const { return createSolver()->marginalCovariance((*ordering_)[j]); } @@ -367,5 +360,3 @@ bool check_convergence ( double currentError, double newError); } // gtsam - -#endif /* NONLINEAROPTIMIZER_H_ */ diff --git a/gtsam/nonlinear/Ordering.h b/gtsam/nonlinear/Ordering.h index 3e2ac28b6..1be51df3e 100644 --- a/gtsam/nonlinear/Ordering.h +++ b/gtsam/nonlinear/Ordering.h @@ -18,13 +18,12 @@ #pragma once -#include +#include #include #include #include #include -#include #include #include diff --git a/gtsam/slam/BearingFactor.h b/gtsam/slam/BearingFactor.h index d87659cfe..dbdc4111d 100644 --- a/gtsam/slam/BearingFactor.h +++ b/gtsam/slam/BearingFactor.h @@ -16,9 +16,7 @@ #pragma once -#include #include -#include #include namespace gtsam { diff --git a/gtsam/slam/BetweenFactor.h b/gtsam/slam/BetweenFactor.h index c4db0cc48..b76ab4b80 100644 --- a/gtsam/slam/BetweenFactor.h +++ b/gtsam/slam/BetweenFactor.h @@ -20,7 +20,6 @@ #include #include #include -#include namespace gtsam { diff --git a/gtsam/slam/GeneralSFMFactor.h b/gtsam/slam/GeneralSFMFactor.h index aa1d79e45..faae64d93 100644 --- a/gtsam/slam/GeneralSFMFactor.h +++ b/gtsam/slam/GeneralSFMFactor.h @@ -8,7 +8,6 @@ #pragma once -#include #include diff --git a/gtsam/slam/Makefile.am b/gtsam/slam/Makefile.am index 9f9f1270d..b58686d70 100644 --- a/gtsam/slam/Makefile.am +++ b/gtsam/slam/Makefile.am @@ -32,7 +32,7 @@ headers += BetweenConstraint.h BoundingConstraint.h # 2D Pose SLAM sources += pose2SLAM.cpp dataset.cpp -check_PROGRAMS += tests/testPose2SLAM tests/testPose2SLAMwSPCG +check_PROGRAMS += tests/testPose2SLAM # 2D SLAM using Bearing and Range headers += BearingFactor.h RangeFactor.h BearingRangeFactor.h diff --git a/gtsam/slam/StereoFactor.h b/gtsam/slam/StereoFactor.h index d7c120a7e..5b0c1cce6 100644 --- a/gtsam/slam/StereoFactor.h +++ b/gtsam/slam/StereoFactor.h @@ -18,7 +18,6 @@ #pragma once -#include #include namespace gtsam { diff --git a/gtsam/slam/dataset.h b/gtsam/slam/dataset.h index 023d8c443..7bf355893 100644 --- a/gtsam/slam/dataset.h +++ b/gtsam/slam/dataset.h @@ -21,7 +21,6 @@ #include -#include #include #include diff --git a/gtsam/slam/saveGraph.cpp b/gtsam/slam/saveGraph.cpp deleted file mode 100644 index ac390b1e5..000000000 --- a/gtsam/slam/saveGraph.cpp +++ /dev/null @@ -1,67 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/* - * saveGraph.cpp - * Author: Richard Roberts - */ - -#include -#include -#include -#include -#include -#include -#include -#include - -using namespace std; - -namespace gtsam { - - INSTANTIATE_LIE_VALUES(Symbol, Point2) - - /* ************************************************************************* */ - void saveGraph(const SymbolicFactorGraph& fg, const SymbolicValues& config, const std::string& s) { - - Symbol key; - Point2 pt; - float scale = 100; - - string dotfile = s + ".dot"; - ofstream of(dotfile.c_str()); - of << "graph G{" << endl; - of << "bgcolor=\"transparent\";" << endl; - - BOOST_FOREACH(boost::tie(key, pt), config){ - of << (string)key << "[label=\"" << (string)key << "\"][pos=\"" << pt.x()*scale << "," << pt.y()*scale << "\"];" << endl; - } - - int index = 0; - BOOST_FOREACH(const SymbolicFactorGraph::sharedFactor& factor, fg) { - index++; - Point2 center; - BOOST_FOREACH(const Symbol& key, factor->keys()) - center = center + config[key]; - center = Point2(center.x() / factor->keys().size(), center.y() / factor->keys().size()); - of << "f" << index << "[pos=\"" << center.x()*scale << "," << center.y()*scale << "\"][shape=\"point\"];" << endl; - BOOST_FOREACH(const Symbol& key, factor->keys()) - of << "f" << index << "--" << (string)key << endl; - } - of<<"}"; - of.close(); - - string cmd = boost::str(boost::format("neato -s -n -Tpdf %s -o %s.pdf") % dotfile % s); - system(cmd.c_str()); - } - - /* ************************************************************************* */ -} diff --git a/gtsam/slam/saveGraph.h b/gtsam/slam/saveGraph.h deleted file mode 100644 index f690edbf9..000000000 --- a/gtsam/slam/saveGraph.h +++ /dev/null @@ -1,36 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/* - * h - * Author: Richard Roberts - */ - -#pragma once - -#include -#include -#include -#include -#include -#include -#include -#include - -namespace gtsam { - - class Point2; - typedef LieValues SymbolicValues; - - // save graph to the graphviz format - void saveGraph(const SymbolicFactorGraph& fg, const SymbolicValues& config, const std::string& s); - -} // namespace gtsam diff --git a/gtsam/slam/simulated2D.h b/gtsam/slam/simulated2D.h index 17c2d21a7..07cc69f5b 100644 --- a/gtsam/slam/simulated2D.h +++ b/gtsam/slam/simulated2D.h @@ -35,7 +35,6 @@ namespace gtsam { typedef LieValues PoseValues; typedef LieValues PointValues; -// typedef TupleValues2 Values; class Values: public TupleValues2 { public: typedef TupleValues2 Base; diff --git a/gtsam/slam/smallExample.h b/gtsam/slam/smallExample.h index 3cf4a7719..1889ee1b5 100644 --- a/gtsam/slam/smallExample.h +++ b/gtsam/slam/smallExample.h @@ -21,10 +21,7 @@ #pragma once -#include #include -#include -#include #include #include diff --git a/gtsam/slam/tests/testPose2SLAM.cpp b/gtsam/slam/tests/testPose2SLAM.cpp index 29d0a2b0f..471292b53 100644 --- a/gtsam/slam/tests/testPose2SLAM.cpp +++ b/gtsam/slam/tests/testPose2SLAM.cpp @@ -164,7 +164,7 @@ TEST(Pose2Graph, optimizeThreePoses) { /* ************************************************************************* */ // test optimization with 6 poses arranged in a hexagon and a loop closure -TEST(Pose2Graph, optimizeCircle) { +TEST_UNSAFE(Pose2Graph, optimizeCircle) { // Create a hexagon of poses Pose2Values hexagon = pose2SLAM::circle(6,1.0); diff --git a/gtsam/slam/tests/testProjectionFactor.cpp b/gtsam/slam/tests/testProjectionFactor.cpp index 4efd252a0..417b138bc 100644 --- a/gtsam/slam/tests/testProjectionFactor.cpp +++ b/gtsam/slam/tests/testProjectionFactor.cpp @@ -62,8 +62,8 @@ TEST( ProjectionFactor, error ) // Check linearize Ordering ordering; ordering += "x1","l1"; - Matrix Ax1 = Matrix_(2, 6, 0., -369.504, 0., -61.584, 0., 0., 369.504, 0., 0., 0., -61.584, 0.); - Matrix Al1 = Matrix_(2, 3, 61.584, 0., 0., 0., 61.584, 0.); + Matrix Ax1 = Matrix_(2, 6, 0., -554.256, 0., -92.376, 0., 0., 554.256, 0., 0., 0., -92.376, 0.); + Matrix Al1 = Matrix_(2, 3, 92.376, 0., 0., 0., 92.376, 0.); Vector b = Vector_(2,3.,0.); SharedDiagonal probModel1 = noiseModel::Unit::Create(2); JacobianFactor expected(ordering["x1"], Ax1, ordering["l1"], Al1, b, probModel1); diff --git a/gtsam/slam/tests/testSimulated2D.cpp b/gtsam/slam/tests/testSimulated2D.cpp index 0e26af1d3..553c6a22c 100644 --- a/gtsam/slam/tests/testSimulated2D.cpp +++ b/gtsam/slam/tests/testSimulated2D.cpp @@ -21,7 +21,6 @@ #include #include -//#include using namespace gtsam; using namespace std; diff --git a/gtsam/slam/tests/testSimulated2DOriented.cpp b/gtsam/slam/tests/testSimulated2DOriented.cpp index ac41e77e5..2404259ba 100644 --- a/gtsam/slam/tests/testSimulated2DOriented.cpp +++ b/gtsam/slam/tests/testSimulated2DOriented.cpp @@ -21,7 +21,6 @@ #include #include -//#include #include #include diff --git a/gtsam/slam/visualSLAM.h b/gtsam/slam/visualSLAM.h index 34c590761..3a58395a8 100644 --- a/gtsam/slam/visualSLAM.h +++ b/gtsam/slam/visualSLAM.h @@ -18,8 +18,6 @@ #pragma once -#include -#include #include #include #include diff --git a/tests/Makefile.am b/tests/Makefile.am index 0ba01a619..8898d8648 100644 --- a/tests/Makefile.am +++ b/tests/Makefile.am @@ -20,6 +20,7 @@ check_PROGRAMS += testTupleValues check_PROGRAMS += testNonlinearISAM check_PROGRAMS += testBoundingConstraint check_PROGRAMS += testNonlinearEqualityConstraint +check_PROGRAMS += testPose2SLAMwSPCG # only if serialization is available if ENABLE_SERIALIZATION diff --git a/tests/testBoundingConstraint.cpp b/tests/testBoundingConstraint.cpp index 711f103ed..74caa32c1 100644 --- a/tests/testBoundingConstraint.cpp +++ b/tests/testBoundingConstraint.cpp @@ -246,7 +246,7 @@ TEST( testBoundingConstraint, MaxDistance_simple_optimization) { expected.insert(x1, pt1); expected.insert(x2, pt2_goal); - // FAILS: segfaults on optimization + // FAILS: VectorValues assertion failure // Optimizer::shared_values actual = Optimizer::optimizeLM(graph, initial_state); // EXPECT(assert_equal(expected, *actual, tol)); } diff --git a/tests/testGaussianBayesNet.cpp b/tests/testGaussianBayesNet.cpp index 2fbdcf381..6976b6b3b 100644 --- a/tests/testGaussianBayesNet.cpp +++ b/tests/testGaussianBayesNet.cpp @@ -57,8 +57,8 @@ TEST( GaussianBayesNet, constructor ) // check small example which uses constructor GaussianBayesNet cbn = createSmallGaussianBayesNet(); - CHECK( x.equals(*cbn[_x_]) ); - CHECK( y.equals(*cbn[_y_]) ); + EXPECT( x.equals(*cbn[_x_]) ); + EXPECT( y.equals(*cbn[_y_]) ); } /* ************************************************************************* */ @@ -76,8 +76,8 @@ TEST( GaussianBayesNet, matrix ) ); Vector d1 = Vector_(2, 9.0, 5.0); - EQUALITY(R,R1); - CHECK(d==d1); + EXPECT(assert_equal(R,R1)); + EXPECT(assert_equal(d,d1)); } /* ************************************************************************* */ @@ -90,7 +90,7 @@ TEST( GaussianBayesNet, optimize ) expected[_x_] = Vector_(1,4.); expected[_y_] = Vector_(1,5.); - CHECK(assert_equal(expected,actual)); + EXPECT(assert_equal(expected,actual)); } /* ************************************************************************* */ @@ -116,7 +116,7 @@ TEST( GaussianBayesNet, optimize2 ) expected[_y_] = Vector_(1,2.); expected[_z_] = Vector_(1,3.); - CHECK(assert_equal(expected,actual)); + EXPECT(assert_equal(expected,actual)); } /* ************************************************************************* */ @@ -125,6 +125,7 @@ TEST( GaussianBayesNet, backSubstitute ) // y=R*x, x=inv(R)*y // 2 = 1 1 -1 // 3 1 3 + // NOTE: we are supplying a new RHS here GaussianBayesNet cbn = createSmallGaussianBayesNet(); VectorValues y(vector(2,1)), x(vector(2,1)); @@ -135,11 +136,11 @@ TEST( GaussianBayesNet, backSubstitute ) // test functional version VectorValues actual = backSubstitute(cbn,y); - CHECK(assert_equal(x,actual)); + EXPECT(assert_equal(x,actual)); // test imperative version backSubstituteInPlace(cbn,y); - CHECK(assert_equal(x,y)); + EXPECT(assert_equal(x,y)); } /* ************************************************************************* */ @@ -152,7 +153,7 @@ TEST( GaussianBayesNet, rhs ) VectorValues expected = gtsam::optimize(cbn); VectorValues d = rhs(cbn); VectorValues actual = backSubstitute(cbn, d); - CHECK(assert_equal(expected, actual)); + EXPECT(assert_equal(expected, actual)); } /* ************************************************************************* */ @@ -176,7 +177,7 @@ TEST( GaussianBayesNet, rhs_with_sigmas ) VectorValues expected = gtsam::optimize(cbn); VectorValues d = rhs(cbn); VectorValues actual = backSubstitute(cbn, d); - CHECK(assert_equal(expected, actual)); + EXPECT(assert_equal(expected, actual)); } /* ************************************************************************* */ @@ -195,7 +196,7 @@ TEST( GaussianBayesNet, backSubstituteTranspose ) // test functional version VectorValues actual = backSubstituteTranspose(cbn,x); - CHECK(assert_equal(y,actual)); + EXPECT(assert_equal(y,actual)); } /* ************************************************************************* */ diff --git a/tests/testGaussianFactorGraph.cpp b/tests/testGaussianFactorGraph.cpp index 11df6dac1..b91a5fa5d 100644 --- a/tests/testGaussianFactorGraph.cpp +++ b/tests/testGaussianFactorGraph.cpp @@ -35,7 +35,6 @@ using namespace boost::assign; #include #include #include -//#include #include using namespace gtsam; @@ -49,7 +48,7 @@ TEST( GaussianFactorGraph, equals ) { Ordering ordering; ordering += "x1","x2","l1"; GaussianFactorGraph fg = createGaussianFactorGraph(ordering); GaussianFactorGraph fg2 = createGaussianFactorGraph(ordering); - CHECK(fg.equals(fg2)); + EXPECT(fg.equals(fg2)); } /* ************************************************************************* */ @@ -77,10 +76,10 @@ TEST( GaussianFactorGraph, error ) { // expected.insert("x1"); // expected.insert("l1"); // -// CHECK(separator.size()==expected.size()); +// EXPECT(separator.size()==expected.size()); // set::iterator it1 = separator.begin(), it2 = expected.begin(); // for(; it1!=separator.end(); it1++, it2++) -// CHECK(*it1 == *it2); +// EXPECT(*it1 == *it2); //} ///* ************************************************************************* */ @@ -137,7 +136,7 @@ TEST( GaussianFactorGraph, error ) { // //GaussianFactor expected("l1", Al1, "x1", Ax1, "x2", Ax2, b); // // // check if the two factors are the same -// CHECK(assert_equal(expected,*actual)); +// EXPECT(assert_equal(expected,*actual)); //} // ///* ************************************************************************* */ @@ -188,7 +187,7 @@ TEST( GaussianFactorGraph, error ) { // GaussianFactor expected(meas, b, ones(4)); // // // check if the two factors are the same -// CHECK(assert_equal(expected,*actual)); +// EXPECT(assert_equal(expected,*actual)); //} ///* ************************************************************************* */ @@ -203,7 +202,7 @@ TEST( GaussianFactorGraph, error ) { // Vector d = Vector_(2, -0.133333, -0.0222222), sigma = ones(2); // GaussianConditional expected(ordering["x1"],15*d,R11,ordering["l1"],S12,ordering["x2"],S13,sigma); // -// CHECK(assert_equal(expected,*actual,tol)); +// EXPECT(assert_equal(expected,*actual,tol)); //} // ///* ************************************************************************* */ @@ -219,7 +218,7 @@ TEST( GaussianFactorGraph, error ) { // Vector d = Vector_(2, 0.2, -0.14)/sig, sigma = ones(2); // GaussianConditional expected(ordering["x2"],d,R11,ordering["l1"],S12,ordering["x1"],S13,sigma); // -// CHECK(assert_equal(expected,*actual,tol)); +// EXPECT(assert_equal(expected,*actual,tol)); //} // ///* ************************************************************************* */ @@ -235,7 +234,7 @@ TEST( GaussianFactorGraph, error ) { // Vector d = Vector_(2, -0.1, 0.25)/sig, sigma = ones(2); // GaussianConditional expected(ordering["l1"],d,R11,ordering["x1"],S12,ordering["x2"],S13,sigma); // -// CHECK(assert_equal(expected,*actual,tol)); +// EXPECT(assert_equal(expected,*actual,tol)); //} ///* ************************************************************************* */ @@ -249,7 +248,7 @@ TEST( GaussianFactorGraph, error ) { // Vector d = Vector_(2, -0.133333, -0.0222222), sigma = ones(2); // GaussianConditional expected("x1",15*d,R11,"l1",S12,"x2",S13,sigma); // -// CHECK(assert_equal(expected,*actual,tol)); +// EXPECT(assert_equal(expected,*actual,tol)); //} // ///* ************************************************************************* */ @@ -264,7 +263,7 @@ TEST( GaussianFactorGraph, error ) { // Vector d = Vector_(2, 0.2, -0.14)/sig, sigma = ones(2); // GaussianConditional expected("x2",d,R11,"l1",S12,"x1",S13,sigma); // -// CHECK(assert_equal(expected,*actual,tol)); +// EXPECT(assert_equal(expected,*actual,tol)); //} // ///* ************************************************************************* */ @@ -279,7 +278,7 @@ TEST( GaussianFactorGraph, error ) { // Vector d = Vector_(2, -0.1, 0.25)/sig, sigma = ones(2); // GaussianConditional expected("l1",d,R11,"x1",S12,"x2",S13,sigma); // -// CHECK(assert_equal(expected,*actual,tol)); +// EXPECT(assert_equal(expected,*actual,tol)); //} /* ************************************************************************* */ @@ -305,10 +304,10 @@ TEST( GaussianFactorGraph, eliminateAll ) // Check one ordering GaussianFactorGraph fg1 = createGaussianFactorGraph(ordering); GaussianBayesNet actual = *GaussianSequentialSolver(fg1).eliminate(); - CHECK(assert_equal(expected,actual,tol)); + EXPECT(assert_equal(expected,actual,tol)); GaussianBayesNet actualET = *GaussianSequentialSolver(fg1).eliminate(); - CHECK(assert_equal(expected,actualET,tol)); + EXPECT(assert_equal(expected,actualET,tol)); } ///* ************************************************************************* */ @@ -333,7 +332,7 @@ TEST( GaussianFactorGraph, eliminateAll ) // Ordering ordering; // ordering += "x2","l1","x1"; // GaussianBayesNet actual = fg1.eliminate(ordering, false); -// CHECK(assert_equal(expected,actual,tol)); +// EXPECT(assert_equal(expected,actual,tol)); //} ///* ************************************************************************* */ @@ -349,7 +348,7 @@ TEST( GaussianFactorGraph, eliminateAll ) // expected.push_back(GaussianFactor::shared_ptr(new JacobianFactor(ordering["l1"],A,b,sigma))); // expected.push_back(GaussianFactor::shared_ptr(new JacobianFactor(ordering["x1"],A,b,sigma))); // expected.push_back(GaussianFactor::shared_ptr(new JacobianFactor(ordering["x2"],A,b,sigma))); -// CHECK(assert_equal(expected,actual)); +// EXPECT(assert_equal(expected,actual)); //} /* ************************************************************************* */ @@ -369,7 +368,7 @@ TEST( GaussianFactorGraph, copying ) GaussianFactorGraph expected = createGaussianFactorGraph(ordering); // and check that original is still the same graph - CHECK(assert_equal(expected,actual)); + EXPECT(assert_equal(expected,actual)); } ///* ************************************************************************* */ @@ -398,7 +397,7 @@ TEST( GaussianFactorGraph, copying ) // Vector b1 = Vector_(8,-1., -1., 2., -1., 0., 1., -1., 1.5); // // EQUALITY(A,A1); -// CHECK(b==b1); +// EXPECT(b==b1); //} ///* ************************************************************************* */ @@ -408,8 +407,8 @@ TEST( GaussianFactorGraph, copying ) // GaussianFactorGraph fg = createGaussianFactorGraph(); // // pair mn = fg.sizeOfA(); -// CHECK(8 == mn.first); -// CHECK(6 == mn.second); +// EXPECT(8 == mn.first); +// EXPECT(6 == mn.second); //} ///* ************************************************************************* */ @@ -444,12 +443,12 @@ TEST( GaussianFactorGraph, CONSTRUCTOR_GaussianBayesNet ) // True GaussianFactorGraph GaussianFactorGraph fg2(CBN); GaussianBayesNet CBN2 = *GaussianSequentialSolver(fg2).eliminate(); - CHECK(assert_equal(CBN,CBN2)); + EXPECT(assert_equal(CBN,CBN2)); -// // Base FactorGraph only + // Base FactorGraph only // FactorGraph fg3(CBN); // GaussianBayesNet CBN3 = gtsam::eliminate(fg3,ord); -// CHECK(assert_equal(CBN,CBN3)); +// EXPECT(assert_equal(CBN,CBN3)); } /* ************************************************************************* */ @@ -460,7 +459,7 @@ TEST( GaussianFactorGraph, getOrdering) Permutation perm(*Inference::PermutationCOLAMD(VariableIndex(symbolic))); Ordering actual = original; actual.permuteWithInverse((*perm.inverse())); Ordering expected; expected += "l1","x2","x1"; - CHECK(assert_equal(expected,actual)); + EXPECT(assert_equal(expected,actual)); } // SL-FIX TEST( GaussianFactorGraph, getOrdering2) @@ -470,7 +469,7 @@ TEST( GaussianFactorGraph, getOrdering) // GaussianFactorGraph fg = createGaussianFactorGraph(); // set interested; interested += "l1","x1"; // Ordering actual = fg.getOrdering(interested); -// CHECK(assert_equal(expected,actual)); +// EXPECT(assert_equal(expected,actual)); //} /* ************************************************************************* */ @@ -488,7 +487,7 @@ TEST( GaussianFactorGraph, optimize ) // verify VectorValues expected = createCorrectDelta(ord); - CHECK(assert_equal(expected,actual)); + EXPECT(assert_equal(expected,actual)); } ///* ************************************************************************* */ @@ -506,7 +505,7 @@ TEST( GaussianFactorGraph, optimize ) // // verify // VectorValues expected = createCorrectDelta(); // -// CHECK(assert_equal(expected,actual)); +// EXPECT(assert_equal(expected,actual)); //} /* ************************************************************************* */ @@ -528,7 +527,7 @@ TEST( GaussianFactorGraph, combine) // combine them fg1.combine(fg2); - CHECK(size1+size2 == fg1.size()); + EXPECT(size1+size2 == fg1.size()); } /* ************************************************************************* */ @@ -550,7 +549,7 @@ TEST( GaussianFactorGraph, combine2) // combine them GaussianFactorGraph fg3 = GaussianFactorGraph::combine2(fg1, fg2); - CHECK(size1+size2 == fg3.size()); + EXPECT(size1+size2 == fg3.size()); } /* ************************************************************************* */ @@ -571,13 +570,13 @@ void print(vector v) { // list x1_factors = fg.factors("x1"); // size_t x1_indices[] = { 0, 1, 2 }; // list x1_expected(x1_indices, x1_indices + 3); -// CHECK(x1_factors==x1_expected); +// EXPECT(x1_factors==x1_expected); // // // ask for all factor indices connected to x2 // list x2_factors = fg.factors("x2"); // size_t x2_indices[] = { 1, 3 }; // list x2_expected(x2_indices, x2_indices + 2); -// CHECK(x2_factors==x2_expected); +// EXPECT(x2_factors==x2_expected); //} ///* ************************************************************************* */ @@ -595,11 +594,11 @@ void print(vector v) { // vector factors = fg.findAndRemoveFactors("x1"); // // // Check the factors -// CHECK(f0==factors[0]); -// CHECK(f1==factors[1]); -// CHECK(f2==factors[2]); +// EXPECT(f0==factors[0]); +// EXPECT(f1==factors[1]); +// EXPECT(f2==factors[2]); // -// // CHECK if the factors are deleted from the factor graph +// // EXPECT if the factors are deleted from the factor graph // LONGS_EQUAL(1,fg.nrFactors()); //} @@ -619,7 +618,7 @@ TEST(GaussianFactorGraph, createSmoother) // Dimensions expected; // insert(expected)("l1", 2)("x1", 2)("x2", 2); // Dimensions actual = fg.dimensions(); -// CHECK(expected==actual); +// EXPECT(expected==actual); //} ///* ************************************************************************* */ @@ -628,17 +627,17 @@ TEST(GaussianFactorGraph, createSmoother) // GaussianFactorGraph fg = createGaussianFactorGraph(); // Ordering expected; // expected += "l1","x1","x2"; -// CHECK(assert_equal(expected,fg.keys())); +// EXPECT(assert_equal(expected,fg.keys())); //} ///* ************************************************************************* */ // SL-NEEDED? TEST( GaussianFactorGraph, involves ) //{ // GaussianFactorGraph fg = createGaussianFactorGraph(); -// CHECK(fg.involves("l1")); -// CHECK(fg.involves("x1")); -// CHECK(fg.involves("x2")); -// CHECK(!fg.involves("x3")); +// EXPECT(fg.involves("l1")); +// EXPECT(fg.involves("x1")); +// EXPECT(fg.involves("x2")); +// EXPECT(!fg.involves("x3")); //} /* ************************************************************************* */ @@ -667,11 +666,11 @@ double error(const VectorValues& x) { // // Check the gradient at delta=0 // VectorValues zero = createZeroDelta(); // VectorValues actual = fg.gradient(zero); -// CHECK(assert_equal(expected,actual)); +// EXPECT(assert_equal(expected,actual)); // // // Check it numerically for good measure // Vector numerical_g = numericalGradient(error,zero,0.001); -// CHECK(assert_equal(Vector_(6,5.0,-12.5,30.0,5.0,-25.0,17.5),numerical_g)); +// EXPECT(assert_equal(Vector_(6,5.0,-12.5,30.0,5.0,-25.0,17.5),numerical_g)); // // // Check the gradient at the solution (should be zero) // Ordering ord; @@ -679,7 +678,7 @@ double error(const VectorValues& x) { // GaussianFactorGraph fg2 = createGaussianFactorGraph(); // VectorValues solution = fg2.optimize(ord); // destructive // VectorValues actual2 = fg.gradient(solution); -// CHECK(assert_equal(zero,actual2)); +// EXPECT(assert_equal(zero,actual2)); //} /* ************************************************************************* */ @@ -696,7 +695,7 @@ TEST( GaussianFactorGraph, multiplication ) expected += Vector_(2, 2.0,-1.0); expected += Vector_(2, 0.0, 1.0); expected += Vector_(2,-1.0, 1.5); - CHECK(assert_equal(expected,actual)); + EXPECT(assert_equal(expected,actual)); } ///* ************************************************************************* */ @@ -716,7 +715,7 @@ TEST( GaussianFactorGraph, multiplication ) // expected[ord["l1"]] = Vector_(2, -37.5,-50.0); // expected[ord["x1"]] = Vector_(2,-150.0, 25.0); // expected[ord["x2"]] = Vector_(2, 187.5, 25.0); -// CHECK(assert_equal(expected,actual)); +// EXPECT(assert_equal(expected,actual)); //} ///* ************************************************************************* */ @@ -731,7 +730,7 @@ TEST( GaussianFactorGraph, multiplication ) // expected.push_back(Vector_(2, 2.0,-1.0)); // expected.push_back(Vector_(2, 0.0, 1.0)); // expected.push_back(Vector_(2,-1.0, 1.5)); -// CHECK(assert_equal(expected,actual)); +// EXPECT(assert_equal(expected,actual)); //} /* ************************************************************************* */ @@ -753,7 +752,7 @@ TEST( GaussianFactorGraph, elimination ) GaussianBayesNet bayesNet = *GaussianSequentialSolver(fg).eliminate(); // Check sigma - DOUBLES_EQUAL(1.0,bayesNet[ord["x2"]]->get_sigmas()(0),1e-5); + EXPECT_DOUBLES_EQUAL(1.0,bayesNet[ord["x2"]]->get_sigmas()(0),1e-5); // Check matrix Matrix R;Vector d; @@ -764,7 +763,7 @@ TEST( GaussianFactorGraph, elimination ) Matrix expected2 = Matrix_(2,2, 0.707107, -0.353553, 0.0, -0.612372); - CHECK(equal_with_abs_tol(expected, R, 1e-6) || equal_with_abs_tol(expected2, R, 1e-6)); + EXPECT(equal_with_abs_tol(expected, R, 1e-6) || equal_with_abs_tol(expected2, R, 1e-6)); } /* ************************************************************************* */ @@ -780,7 +779,7 @@ TEST( GaussianFactorGraph, constrained_simple ) // verify VectorValues expected = createSimpleConstraintValues(); - CHECK(assert_equal(expected, actual)); + EXPECT(assert_equal(expected, actual)); } /* ************************************************************************* */ @@ -794,7 +793,7 @@ TEST( GaussianFactorGraph, constrained_single ) // verify VectorValues expected = createSingleConstraintValues(); - CHECK(assert_equal(expected, actual)); + EXPECT(assert_equal(expected, actual)); } ///* ************************************************************************* */ @@ -810,7 +809,7 @@ TEST( GaussianFactorGraph, constrained_single ) // // // verify // VectorValues expected = createSingleConstraintValues(); -// CHECK(assert_equal(expected, actual)); +// EXPECT(assert_equal(expected, actual)); //} /* ************************************************************************* */ @@ -824,7 +823,7 @@ TEST( GaussianFactorGraph, constrained_multi1 ) // verify VectorValues expected = createMultiConstraintValues(); - CHECK(assert_equal(expected, actual)); + EXPECT(assert_equal(expected, actual)); } ///* ************************************************************************* */ @@ -840,7 +839,7 @@ TEST( GaussianFactorGraph, constrained_multi1 ) // // // verify // VectorValues expected = createMultiConstraintValues(); -// CHECK(assert_equal(expected, actual)); +// EXPECT(assert_equal(expected, actual)); //} /* ************************************************************************* */ @@ -860,10 +859,10 @@ SharedDiagonal model = sharedSigma(2,1); // g.add("x3", I, "x4", I, b, model); // // map tree = g.findMinimumSpanningTree(); -// CHECK(tree["x1"].compare("x1")==0); -// CHECK(tree["x2"].compare("x1")==0); -// CHECK(tree["x3"].compare("x1")==0); -// CHECK(tree["x4"].compare("x1")==0); +// EXPECT(tree["x1"].compare("x1")==0); +// EXPECT(tree["x2"].compare("x1")==0); +// EXPECT(tree["x3"].compare("x1")==0); +// EXPECT(tree["x4"].compare("x1")==0); //} ///* ************************************************************************* */ @@ -923,7 +922,7 @@ TEST(GaussianFactorGraph, replace) expected.push_back(f3); // actual.checkGraphConsistency(); - CHECK(assert_equal(expected, actual)); + EXPECT(assert_equal(expected, actual)); } ///* ************************************************************************* */ @@ -952,8 +951,8 @@ TEST(GaussianFactorGraph, replace) // // expected noise model // SharedDiagonal expModel = noiseModel::Unit::Create(4); // -// CHECK(assert_equal(expAb, Ab)); -// CHECK(assert_equal(*expModel, *noise)); +// EXPECT(assert_equal(expAb, Ab)); +// EXPECT(assert_equal(*expModel, *noise)); //} /* ************************************************************************* */ @@ -984,12 +983,12 @@ TEST(GaussianFactorGraph, replace) // "x3", Matrix_(1, 1, 1.), Vector_(1, 1.))); // bn_expected.push_back(conditional1); // bn_expected.push_back(conditional2); -// CHECK(assert_equal(bn_expected, bn)); +// EXPECT(assert_equal(bn_expected, bn)); // // GaussianFactorGraph::sharedFactor factor_expected(new JacobianFactor("x3", Matrix_(1, 1, 2.), Vector_(1, 2.), SharedDiagonal(Vector_(1, 1.)))); // GaussianFactorGraph fg_expected; // fg_expected.push_back(factor_expected); -// CHECK(assert_equal(fg_expected, fg)); +// EXPECT(assert_equal(fg_expected, fg)); //} /* ************************************************************************* */ diff --git a/tests/testGaussianISAM.cpp b/tests/testGaussianISAM.cpp index a0c0a6736..1cb2dbb00 100644 --- a/tests/testGaussianISAM.cpp +++ b/tests/testGaussianISAM.cpp @@ -45,7 +45,7 @@ double sigmax1 = 0.786153, sigmax2 = 1.0/1.47292, sigmax3 = 0.671512, sigmax4 = const double tol = 1e-4; /* ************************************************************************* */ -TEST( ISAM, iSAM_smoother ) +TEST_UNSAFE( ISAM, iSAM_smoother ) { Ordering ordering; for (int t = 1; t <= 7; t++) ordering += Symbol('x', t); @@ -61,8 +61,9 @@ TEST( ISAM, iSAM_smoother ) actual.update(factorGraph); } + BayesTree::shared_ptr bayesTree = GaussianMultifrontalSolver(smoother).eliminate(); // Create expected Bayes Tree by solving smoother with "natural" ordering - GaussianISAM expected(*GaussianSequentialSolver(smoother).eliminate()); + GaussianISAM expected(*bayesTree); // Check whether BayesTree is correct EXPECT(assert_equal(expected, actual)); @@ -108,28 +109,27 @@ C4 x3 : x4 C5 x2 : x3 C6 x1 : x2 **************************************************************************** */ -TEST( BayesTree, linear_smoother_shortcuts ) +TEST_UNSAFE( BayesTree, linear_smoother_shortcuts ) { // Create smoother with 7 nodes Ordering ordering; GaussianFactorGraph smoother; boost::tie(smoother, ordering) = createSmoother(7); - // eliminate using the "natural" ordering - GaussianBayesNet chordalBayesNet = *GaussianSequentialSolver(smoother).eliminate(); + BayesTree bayesTree = *GaussianMultifrontalSolver(smoother).eliminate(); // Create the Bayes tree - GaussianISAM bayesTree(chordalBayesNet); - LONGS_EQUAL(6,bayesTree.size()); + GaussianISAM isamTree(bayesTree); + LONGS_EQUAL(6,isamTree.size()); // Check the conditional P(Root|Root) GaussianBayesNet empty; - GaussianISAM::sharedClique R = bayesTree.root(); + GaussianISAM::sharedClique R = isamTree.root(); GaussianBayesNet actual1 = GaussianISAM::shortcut(R,R); EXPECT(assert_equal(empty,actual1,tol)); // Check the conditional P(C2|Root) - GaussianISAM::sharedClique C2 = bayesTree[ordering["x5"]]; + GaussianISAM::sharedClique C2 = isamTree[ordering["x5"]]; GaussianBayesNet actual2 = GaussianISAM::shortcut(C2,R); EXPECT(assert_equal(empty,actual2,tol)); @@ -138,7 +138,7 @@ TEST( BayesTree, linear_smoother_shortcuts ) Matrix A56 = Matrix_(2,2,-0.382022,0.,0.,-0.382022); GaussianBayesNet expected3; push_front(expected3,ordering["x5"], zero(2), eye(2)/sigma3, ordering["x6"], A56/sigma3, ones(2)); - GaussianISAM::sharedClique C3 = bayesTree[ordering["x4"]]; + GaussianISAM::sharedClique C3 = isamTree[ordering["x4"]]; GaussianBayesNet actual3 = GaussianISAM::shortcut(C3,R); EXPECT(assert_equal(expected3,actual3,tol)); @@ -147,7 +147,7 @@ TEST( BayesTree, linear_smoother_shortcuts ) Matrix A46 = Matrix_(2,2,-0.146067,0.,0.,-0.146067); GaussianBayesNet expected4; push_front(expected4, ordering["x4"], zero(2), eye(2)/sigma4, ordering["x6"], A46/sigma4, ones(2)); - GaussianISAM::sharedClique C4 = bayesTree[ordering["x3"]]; + GaussianISAM::sharedClique C4 = isamTree[ordering["x3"]]; GaussianBayesNet actual4 = GaussianISAM::shortcut(C4,R); EXPECT(assert_equal(expected4,actual4,tol)); } @@ -171,7 +171,7 @@ TEST( BayesTree, linear_smoother_shortcuts ) C4 x7 : x6 ************************************************************************* */ -TEST( BayesTree, balanced_smoother_marginals ) +TEST_UNSAFE( BayesTree, balanced_smoother_marginals ) { // Create smoother with 7 nodes Ordering ordering; @@ -179,7 +179,7 @@ TEST( BayesTree, balanced_smoother_marginals ) GaussianFactorGraph smoother = createSmoother(7, ordering).first; // Create the Bayes tree - GaussianBayesNet chordalBayesNet = *GaussianSequentialSolver(smoother).eliminate(); + BayesTree chordalBayesNet = *GaussianMultifrontalSolver(smoother).eliminate(); VectorValues expectedSolution(7, 2); expectedSolution.makeZero(); @@ -196,11 +196,9 @@ TEST( BayesTree, balanced_smoother_marginals ) GaussianBayesNet expected1 = simpleGaussian(ordering["x1"], zero(2), sigmax1); GaussianBayesNet actual1 = *bayesTree.marginalBayesNet(ordering["x1"]); Matrix expectedCovarianceX1 = eye(2,2) * (sigmax1 * sigmax1); - Vector expectedMeanX1 = zero(2); - Matrix actualCovarianceX1; Vector actualMeanX1; - boost::tie(actualMeanX1, actualCovarianceX1) = bayesTree.marginal(ordering["x1"]); + Matrix actualCovarianceX1; + actualCovarianceX1 = bayesTree.marginalCovariance(ordering["x1"]); EXPECT(assert_equal(expectedCovarianceX1, actualCovarianceX1, tol)); - EXPECT(assert_equal(expectedMeanX1, actualMeanX1, tol)); EXPECT(assert_equal(expected1,actual1,tol)); // Check marginal on x2 @@ -208,49 +206,41 @@ TEST( BayesTree, balanced_smoother_marginals ) GaussianBayesNet expected2 = simpleGaussian(ordering["x2"], zero(2), sigx2); GaussianBayesNet actual2 = *bayesTree.marginalBayesNet(ordering["x2"]); Matrix expectedCovarianceX2 = eye(2,2) * (sigx2 * sigx2); - Vector expectedMeanX2 = zero(2); - Matrix actualCovarianceX2; Vector actualMeanX2; - boost::tie(actualMeanX2, actualCovarianceX2) = bayesTree.marginal(ordering["x2"]); + Matrix actualCovarianceX2; + actualCovarianceX2 = bayesTree.marginalCovariance(ordering["x2"]); EXPECT(assert_equal(expectedCovarianceX2, actualCovarianceX2, tol)); - EXPECT(assert_equal(expectedMeanX2, actualMeanX2, tol)); EXPECT(assert_equal(expected2,actual2,tol)); // Check marginal on x3 GaussianBayesNet expected3 = simpleGaussian(ordering["x3"], zero(2), sigmax3); GaussianBayesNet actual3 = *bayesTree.marginalBayesNet(ordering["x3"]); Matrix expectedCovarianceX3 = eye(2,2) * (sigmax3 * sigmax3); - Vector expectedMeanX3 = zero(2); - Matrix actualCovarianceX3; Vector actualMeanX3; - boost::tie(actualMeanX3, actualCovarianceX3) = bayesTree.marginal(ordering["x3"]); + Matrix actualCovarianceX3; + actualCovarianceX3 = bayesTree.marginalCovariance(ordering["x3"]); EXPECT(assert_equal(expectedCovarianceX3, actualCovarianceX3, tol)); - EXPECT(assert_equal(expectedMeanX3, actualMeanX3, tol)); EXPECT(assert_equal(expected3,actual3,tol)); // Check marginal on x4 GaussianBayesNet expected4 = simpleGaussian(ordering["x4"], zero(2), sigmax4); GaussianBayesNet actual4 = *bayesTree.marginalBayesNet(ordering["x4"]); Matrix expectedCovarianceX4 = eye(2,2) * (sigmax4 * sigmax4); - Vector expectedMeanX4 = zero(2); - Matrix actualCovarianceX4; Vector actualMeanX4; - boost::tie(actualMeanX4, actualCovarianceX4) = bayesTree.marginal(ordering["x4"]); + Matrix actualCovarianceX4; + actualCovarianceX4 = bayesTree.marginalCovariance(ordering["x4"]); EXPECT(assert_equal(expectedCovarianceX4, actualCovarianceX4, tol)); - EXPECT(assert_equal(expectedMeanX4, actualMeanX4, tol)); EXPECT(assert_equal(expected4,actual4,tol)); // Check marginal on x7 (should be equal to x1) GaussianBayesNet expected7 = simpleGaussian(ordering["x7"], zero(2), sigmax7); GaussianBayesNet actual7 = *bayesTree.marginalBayesNet(ordering["x7"]); Matrix expectedCovarianceX7 = eye(2,2) * (sigmax7 * sigmax7); - Vector expectedMeanX7 = zero(2); - Matrix actualCovarianceX7; Vector actualMeanX7; - boost::tie(actualMeanX7, actualCovarianceX7) = bayesTree.marginal(ordering["x7"]); + Matrix actualCovarianceX7; + actualCovarianceX7 = bayesTree.marginalCovariance(ordering["x7"]); EXPECT(assert_equal(expectedCovarianceX7, actualCovarianceX7, tol)); - EXPECT(assert_equal(expectedMeanX7, actualMeanX7, tol)); EXPECT(assert_equal(expected7,actual7,tol)); } /* ************************************************************************* */ -TEST( BayesTree, balanced_smoother_shortcuts ) +TEST_UNSAFE( BayesTree, balanced_smoother_shortcuts ) { // Create smoother with 7 nodes Ordering ordering; @@ -258,26 +248,31 @@ TEST( BayesTree, balanced_smoother_shortcuts ) GaussianFactorGraph smoother = createSmoother(7, ordering).first; // Create the Bayes tree - GaussianBayesNet chordalBayesNet = *GaussianSequentialSolver(smoother).eliminate(); - GaussianISAM bayesTree(chordalBayesNet); + BayesTree bayesTree = *GaussianMultifrontalSolver(smoother).eliminate(); + GaussianISAM isamTree(bayesTree); // Check the conditional P(Root|Root) GaussianBayesNet empty; - GaussianISAM::sharedClique R = bayesTree.root(); + GaussianISAM::sharedClique R = isamTree.root(); GaussianBayesNet actual1 = GaussianISAM::shortcut(R,R); EXPECT(assert_equal(empty,actual1,tol)); // Check the conditional P(C2|Root) - GaussianISAM::sharedClique C2 = bayesTree[ordering["x3"]]; + GaussianISAM::sharedClique C2 = isamTree[ordering["x3"]]; GaussianBayesNet actual2 = GaussianISAM::shortcut(C2,R); EXPECT(assert_equal(empty,actual2,tol)); // Check the conditional P(C3|Root), which should be equal to P(x2|x4) - GaussianConditional::shared_ptr p_x2_x4 = chordalBayesNet[ordering["x2"]]; - GaussianBayesNet expected3; expected3.push_back(p_x2_x4); - GaussianISAM::sharedClique C3 = bayesTree[ordering["x1"]]; - GaussianBayesNet actual3 = GaussianISAM::shortcut(C3,R); - EXPECT(assert_equal(expected3,actual3,tol)); + /** TODO: Note for multifrontal conditional: + * p_x2_x4 is now an element conditional of the multifrontal conditional bayesTree[ordering["x2"]]->conditional() + * We don't know yet how to take it out. + */ +// GaussianConditional::shared_ptr p_x2_x4 = bayesTree[ordering["x2"]]->conditional(); +// p_x2_x4->print("Conditional p_x2_x4: "); +// GaussianBayesNet expected3(p_x2_x4); +// GaussianISAM::sharedClique C3 = isamTree[ordering["x1"]]; +// GaussianBayesNet actual3 = GaussianISAM::shortcut(C3,R); +// EXPECT(assert_equal(expected3,actual3,tol)); } ///* ************************************************************************* */ @@ -310,7 +305,7 @@ TEST( BayesTree, balanced_smoother_shortcuts ) //} /* ************************************************************************* */ -TEST( BayesTree, balanced_smoother_joint ) +TEST_UNSAFE( BayesTree, balanced_smoother_joint ) { // Create smoother with 7 nodes Ordering ordering; @@ -322,7 +317,7 @@ TEST( BayesTree, balanced_smoother_joint ) // x3 x2 : x4 // x1 : x2 // x7 : x6 - GaussianBayesNet chordalBayesNet = *GaussianSequentialSolver(smoother).eliminate(); + BayesTree chordalBayesNet = *GaussianMultifrontalSolver(smoother).eliminate(); GaussianISAM bayesTree(chordalBayesNet); // Conditional density elements reused by both tests @@ -372,7 +367,7 @@ TEST( BayesTree, balanced_smoother_joint ) } /* ************************************************************************* */ -TEST(BayesTree, simpleMarginal) +TEST_UNSAFE(BayesTree, simpleMarginal) { GaussianFactorGraph gfg; @@ -382,8 +377,8 @@ TEST(BayesTree, simpleMarginal) gfg.add(0, -eye(2), 1, eye(2), ones(2), sharedSigma(2, 1.0)); gfg.add(1, -eye(2), 2, A12, ones(2), sharedSigma(2, 1.0)); - Matrix expected(GaussianSequentialSolver(gfg).marginalCovariance(2).second); - Matrix actual(GaussianMultifrontalSolver(gfg).marginalCovariance(2).second); + Matrix expected(GaussianSequentialSolver(gfg).marginalCovariance(2)); + Matrix actual(GaussianMultifrontalSolver(gfg).marginalCovariance(2)); EXPECT(assert_equal(expected, actual)); } diff --git a/tests/testGaussianJunctionTree.cpp b/tests/testGaussianJunctionTree.cpp index 460d888be..12bb07ff7 100644 --- a/tests/testGaussianJunctionTree.cpp +++ b/tests/testGaussianJunctionTree.cpp @@ -1,6 +1,6 @@ /* ---------------------------------------------------------------------------- - * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * GTSAM Copyright 2010, Georgia Tech Research Corporation, * Atlanta, Georgia 30332-0415 * All Rights Reserved * Authors: Frank Dellaert, et al. (see THANKS for the full author list) @@ -63,21 +63,21 @@ TEST( GaussianJunctionTree, constructor2 ) vector sep2; sep2 += ordering["x4"]; vector sep3; sep3 += ordering["x2"]; vector sep4; sep4 += ordering["x6"]; - CHECK(assert_equal(frontal1, actual.root()->frontal)); - CHECK(assert_equal(sep1, actual.root()->separator)); + EXPECT(assert_equal(frontal1, actual.root()->frontal)); + EXPECT(assert_equal(sep1, actual.root()->separator)); LONGS_EQUAL(5, actual.root()->size()); list::const_iterator child0it = actual.root()->children().begin(); list::const_iterator child1it = child0it; ++child1it; GaussianJunctionTree::sharedClique child0 = *child0it; GaussianJunctionTree::sharedClique child1 = *child1it; - CHECK(assert_equal(frontal2, child0->frontal)); - CHECK(assert_equal(sep2, child0->separator)); + EXPECT(assert_equal(frontal2, child0->frontal)); + EXPECT(assert_equal(sep2, child0->separator)); LONGS_EQUAL(4, child0->size()); - CHECK(assert_equal(frontal3, child0->children().front()->frontal)); - CHECK(assert_equal(sep3, child0->children().front()->separator)); + EXPECT(assert_equal(frontal3, child0->children().front()->frontal)); + EXPECT(assert_equal(sep3, child0->children().front()->separator)); LONGS_EQUAL(2, child0->children().front()->size()); - CHECK(assert_equal(frontal4, child1->frontal)); - CHECK(assert_equal(sep4, child1->separator)); + EXPECT(assert_equal(frontal4, child1->frontal)); + EXPECT(assert_equal(sep4, child1->separator)); LONGS_EQUAL(2, child1->size()); } @@ -98,7 +98,7 @@ TEST( GaussianJunctionTree, optimizeMultiFrontal ) Vector v = Vector_(2, 0., 0.); for (int i=1; i<=7; i++) expected[ordering[Symbol('x',i)]] = v; - CHECK(assert_equal(expected,actual)); + EXPECT(assert_equal(expected,actual)); } /* ************************************************************************* */ @@ -116,7 +116,7 @@ TEST( GaussianJunctionTree, optimizeMultiFrontal2) // verify VectorValues expected = createCorrectDelta(ordering); // expected solution - CHECK(assert_equal(expected,actual)); + EXPECT(assert_equal(expected,actual)); } /* ************************************************************************* */ @@ -181,7 +181,7 @@ TEST(GaussianJunctionTree, slamlike) { VectorValues delta = optimize(gbn); planarSLAM::Values expected = init.expmap(delta, ordering); - CHECK(assert_equal(expected, actual)); + EXPECT(assert_equal(expected, actual)); } diff --git a/tests/testInference.cpp b/tests/testInference.cpp index 76eec9b0d..4f4428af1 100644 --- a/tests/testInference.cpp +++ b/tests/testInference.cpp @@ -43,8 +43,11 @@ TEST(GaussianFactorGraph, createSmoother) // eliminate vector x3var; x3var.push_back(ordering["x3"]); vector x1var; x1var.push_back(ordering["x1"]); - GaussianBayesNet p_x3 = *GaussianSequentialSolver(*GaussianSequentialSolver(fg2).jointFactorGraph(x3var)).eliminate(); - GaussianBayesNet p_x1 = *GaussianSequentialSolver(*GaussianSequentialSolver(fg2).jointFactorGraph(x1var)).eliminate(); + // NOTE: fails when using LDL, works with QR + GaussianBayesNet p_x3 = *GaussianSequentialSolver( + *GaussianSequentialSolver(fg2).jointFactorGraph(x3var), true).eliminate(); + GaussianBayesNet p_x1 = *GaussianSequentialSolver( + *GaussianSequentialSolver(fg2).jointFactorGraph(x1var), true).eliminate(); CHECK(assert_equal(*p_x1.back(),*p_x3.front())); // should be the same because of symmetry } @@ -55,7 +58,10 @@ TEST( Inference, marginals ) // create and marginalize a small Bayes net on "x" GaussianBayesNet cbn = createSmallGaussianBayesNet(); vector xvar; xvar.push_back(0); - GaussianBayesNet actual = *GaussianSequentialSolver(*GaussianSequentialSolver(GaussianFactorGraph(cbn)).jointFactorGraph(xvar)).eliminate(); + // NOTE: fails when using LDL, works with QR + GaussianBayesNet actual = *GaussianSequentialSolver( + *GaussianSequentialSolver( + GaussianFactorGraph(cbn), true).jointFactorGraph(xvar), true).eliminate(); // expected is just scalar Gaussian on x GaussianBayesNet expected = scalarGaussian(0, 4, sqrt(2)); @@ -86,7 +92,8 @@ TEST( Inference, marginals2) Ordering ordering(*fg.orderingCOLAMD(init)); FactorGraph::shared_ptr gfg(fg.linearize(init, ordering)); - GaussianMultifrontalSolver solver(*gfg); + // NOTE: fails when using LDL, works with QR + GaussianMultifrontalSolver solver(*gfg, true); solver.marginalFactor(ordering[PointKey(0)]); } diff --git a/tests/testNonlinearISAM.cpp b/tests/testNonlinearISAM.cpp index e8e0341c7..b37631d5f 100644 --- a/tests/testNonlinearISAM.cpp +++ b/tests/testNonlinearISAM.cpp @@ -29,6 +29,7 @@ TEST(testNonlinearISAM, markov_chain ) { Pose2 cur_pose; // start at origin Graph start_factors; start_factors.addPoseConstraint(key, cur_pose); + Values init; Values expected; init.insert(key, cur_pose); @@ -59,7 +60,6 @@ TEST(testNonlinearISAM, markov_chain ) { EXPECT(assert_equal(expected, isam.getOrdering())); } - cur_pose = cur_pose.compose(z); new_init.insert(key2, cur_pose.expmap(sampler.sample())); expected.insert(key2, cur_pose); @@ -68,7 +68,7 @@ TEST(testNonlinearISAM, markov_chain ) { // verify values - all but the last one should be very close Values actual = isam.estimate(); - for (size_t i=0; i<21; ++i) { + for (size_t i=0; i #include -//#include +#include using namespace gtsam; @@ -231,6 +231,24 @@ TEST( NonlinearOptimizer, SimpleGNOptimizer_noshared ) DOUBLES_EQUAL(0,fg.error(*actual),tol); } +/* ************************************************************************* */ +TEST( NonlinearOptimizer, optimization_method ) +{ + example::Graph fg = example::createReallyNonlinearFactorGraph(); + + Point2 x0(3,3); + example::Values c0; + c0.insert(simulated2D::PoseKey(1), x0); + + example::Values actualMFQR = optimize( + fg, c0, *NonlinearOptimizationParameters().newFactorization(true), MULTIFRONTAL, LM); + DOUBLES_EQUAL(0,fg.error(actualMFQR),tol); + + example::Values actualMFLDL = optimize( + fg, c0, *NonlinearOptimizationParameters().newFactorization(false), MULTIFRONTAL, LM); + DOUBLES_EQUAL(0,fg.error(actualMFLDL),tol); +} + /* ************************************************************************* */ TEST( NonlinearOptimizer, Factorization ) { diff --git a/gtsam/slam/tests/testPose2SLAMwSPCG.cpp b/tests/testPose2SLAMwSPCG.cpp similarity index 100% rename from gtsam/slam/tests/testPose2SLAMwSPCG.cpp rename to tests/testPose2SLAMwSPCG.cpp diff --git a/tests/testSerialization.cpp b/tests/testSerialization.cpp index 827f8bc57..2a620fad9 100644 --- a/tests/testSerialization.cpp +++ b/tests/testSerialization.cpp @@ -19,7 +19,6 @@ // Serialization testing code. /* ************************************************************************* */ -#include #include #include @@ -41,26 +40,44 @@ // whether to print the serialized text to stdout const bool verbose = false; +template +std::string serialize(const T& input) { + std::ostringstream out_archive_stream; + boost::archive::text_oarchive out_archive(out_archive_stream); + out_archive << input; + return out_archive_stream.str(); +} + +template +void deserialize(const std::string& serialized, T& output) { + std::istringstream in_archive_stream(serialized); + boost::archive::text_iarchive in_archive(in_archive_stream); + in_archive >> output; +} + // Templated round-trip serialization template void roundtrip(const T& input, T& output) { // Serialize - std::ostringstream out_archive_stream; - { - boost::archive::text_oarchive out_archive(out_archive_stream); - out_archive << input; - } - - // Convert to string - std::string serialized = out_archive_stream.str(); +// std::ostringstream out_archive_stream; +// { +// boost::archive::text_oarchive out_archive(out_archive_stream); +// out_archive << input; +// } +// +// // Convert to string +// std::string serialized = out_archive_stream.str(); + std::string serialized = serialize(input); if (verbose) std::cout << serialized << std::endl << std::endl; - // De-serialize - { - std::istringstream in_archive_stream(serialized); - boost::archive::text_iarchive in_archive(in_archive_stream); - in_archive >> output; - } + deserialize(serialized, output); + +// // De-serialize +// { +// std::istringstream in_archive_stream(serialized); +// boost::archive::text_iarchive in_archive(in_archive_stream); +// in_archive >> output; +// } } // This version requires equality operator @@ -144,20 +161,19 @@ bool equalsDereferencedXML(const T& input = T()) { #define GTSAM_MAGIC_KEY -#include -#include -#include -#include #include #include -#include -#include -#include - +#include +#include +#include +#include +#include +#include #include #include #include -#include + +#include #include @@ -309,12 +325,6 @@ TEST (Serialization, SharedDiagonal_noiseModels) { // Linear components /* ************************************************************************* */ -#include -#include -#include -#include -#include - /* ************************************************************************* */ TEST (Serialization, linear_factors) { vector dims; @@ -390,7 +400,6 @@ TEST (Serialization, symbolic_graph) { TEST (Serialization, symbolic_bn) { Ordering o; o += "x2","l1","x1"; - // create expected Chordal bayes Net IndexConditional::shared_ptr x2(new IndexConditional(o["x2"], o["l1"], o["x1"])); IndexConditional::shared_ptr l1(new IndexConditional(o["l1"], o["x1"])); IndexConditional::shared_ptr x1(new IndexConditional(o["x1"])); @@ -404,9 +413,6 @@ TEST (Serialization, symbolic_bn) { EXPECT(equalsXML(sbn)); } -#include -#include - /* ************************************************************************* */ TEST (Serialization, symbolic_bayes_tree ) { typedef BayesTree SymbolicBayesTree; @@ -421,31 +427,28 @@ TEST (Serialization, symbolic_bayes_tree ) { // Bayes Tree for Asia example SymbolicBayesTree bayesTree; - bayesTree.insert(B); - bayesTree.insert(L); - bayesTree.insert(E); - bayesTree.insert(S); - bayesTree.insert(T); - bayesTree.insert(X); + SymbolicBayesTree::insert(bayesTree, B); + SymbolicBayesTree::insert(bayesTree, L); + SymbolicBayesTree::insert(bayesTree, E); + SymbolicBayesTree::insert(bayesTree, S); + SymbolicBayesTree::insert(bayesTree, T); + SymbolicBayesTree::insert(bayesTree, X); EXPECT(equalsObj(bayesTree)); EXPECT(equalsXML(bayesTree)); } -#include -#include - /* ************************************************************************* */ TEST (Serialization, gaussianISAM) { using namespace example; - Ordering ordering; + Ordering ordering; GaussianFactorGraph smoother; boost::tie(smoother, ordering) = createSmoother(7); - GaussianBayesNet chordalBayesNet = *GaussianSequentialSolver(smoother).eliminate(); - GaussianISAM bayesTree(chordalBayesNet); + BayesTree bayesTree = *GaussianMultifrontalSolver(smoother).eliminate(); + GaussianISAM isam(bayesTree); - EXPECT(equalsObj(bayesTree)); - EXPECT(equalsXML(bayesTree)); + EXPECT(equalsObj(isam)); + EXPECT(equalsXML(isam)); } /* ************************************************************************* */ diff --git a/tests/timeMultifrontalOnDataset.cpp b/tests/timeMultifrontalOnDataset.cpp index 423b48a24..a2c446409 100644 --- a/tests/timeMultifrontalOnDataset.cpp +++ b/tests/timeMultifrontalOnDataset.cpp @@ -55,7 +55,8 @@ int main(int argc, char *argv[]) { toc_(2, "linearize"); tictoc_print_(); - for(size_t trial = 0; trial < 10; ++trial) { + const size_t nrTrials = 10; + for(size_t trial = 0; trial < nrTrials; ++trial) { tic_(3, "solve"); tic(1, "construct solver");