diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index e05fbe45c..13a3d4e0e 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -83,33 +83,14 @@ boost::shared_ptr allocateVectorValues(const GaussianBayesNet& bn) /* ************************************************************************* */ VectorValues optimize(const GaussianBayesNet& bn) { - return *optimize_(bn); -} - -/* ************************************************************************* */ -boost::shared_ptr optimize_(const GaussianBayesNet& bn) -{ - // get the RHS as a VectorValues to initialize system - boost::shared_ptr result(new VectorValues(rhs(bn))); - - /** solve each node in turn in topological sort order (parents first)*/ - BOOST_REVERSE_FOREACH(GaussianConditional::shared_ptr cg, bn) - cg->solveInPlace(*result); // solve and store solution in same step - - return result; -} - -/* ************************************************************************* */ -VectorValues backSubstitute(const GaussianBayesNet& bn, const VectorValues& y) { - VectorValues x(y); - backSubstituteInPlace(bn,x); + VectorValues x = *allocateVectorValues(bn); + optimizeInPlace(bn, x); return x; } /* ************************************************************************* */ // (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas) -void backSubstituteInPlace(const GaussianBayesNet& bn, VectorValues& y) { - VectorValues& x = y; +void optimizeInPlace(const GaussianBayesNet& bn, VectorValues& x) { /** solve each node in turn in topological sort order (parents first)*/ BOOST_REVERSE_FOREACH(const boost::shared_ptr cg, bn) { // i^th part of R*x=y, x=inv(R)*y @@ -194,15 +175,6 @@ pair matrix(const GaussianBayesNet& bn) { return make_pair(R,d); } -/* ************************************************************************* */ -VectorValues rhs(const GaussianBayesNet& bn) { - boost::shared_ptr result(allocateVectorValues(bn)); - BOOST_FOREACH(boost::shared_ptr cg,bn) - cg->rhs(*result); - - return *result; -} - /* ************************************************************************* */ double determinant(const GaussianBayesNet& bayesNet) { double logDet = 0.0; diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index 455536a4f..dda579741 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -55,26 +55,19 @@ namespace gtsam { boost::shared_ptr allocateVectorValues(const GaussianBayesNet& bn); /** - * optimize, i.e. return x = inv(R)*d + * Solve the GaussianBayesNet, i.e. return \f$ x = R^{-1}*d \f$, computed by + * back-substitution. */ - VectorValues optimize(const GaussianBayesNet&); + VectorValues optimize(const GaussianBayesNet& bn); - /** - * shared pointer version - */ - boost::shared_ptr optimize_(const GaussianBayesNet& bn); - - /** - * Backsubstitute - * (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas) - * @param y is the RHS of the system - */ - VectorValues backSubstitute(const GaussianBayesNet& bn, const VectorValues& y); - - /** - * Backsubstitute in place, y starts as RHS and is replaced with solution - */ - void backSubstituteInPlace(const GaussianBayesNet& bn, VectorValues& y); + /** + * Solve the GaussianBayesNet, i.e. return \f$ x = R^{-1}*d \f$, computed by + * back-substitution, writes the solution \f$ x \f$ into a pre-allocated + * VectorValues. You can use allocateVectorValues(const GaussianBayesNet&) + * allocate it. See also optimize(const GaussianBayesNet&), which does not + * require pre-allocation. + */ + void optimizeInPlace(const GaussianBayesNet& bn, VectorValues& x); /** * Transpose Backsubstitute @@ -91,12 +84,6 @@ namespace gtsam { */ std::pair matrix(const GaussianBayesNet&); - /** - * Return RHS d as a VectorValues - * Such that backSubstitute(bn,d) = optimize(bn) - */ - VectorValues rhs(const GaussianBayesNet&); - /** * Computes the determinant of a GassianBayesNet * A GaussianBayesNet is an upper triangular matrix and for an upper triangular matrix diff --git a/gtsam/linear/GaussianBayesTree-inl.h b/gtsam/linear/GaussianBayesTree-inl.h new file mode 100644 index 000000000..1b936b638 --- /dev/null +++ b/gtsam/linear/GaussianBayesTree-inl.h @@ -0,0 +1,55 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file GaussianBayesTree.cpp + * @brief Gaussian Bayes Tree, the result of eliminating a GaussianJunctionTree + * @brief GaussianBayesTree + * @author Frank Dellaert + * @author Richard Roberts + */ + +#pragma once + +#include + +#include // Only to help Eclipse + +namespace gtsam { + +/* ************************************************************************* */ +namespace internal { +template +inline static void optimizeInPlace(const boost::shared_ptr& clique, VectorValues& result) { + // parents are assumed to already be solved and available in result + clique->conditional()->solveInPlace(result); + + // starting from the root, call optimize on each conditional + BOOST_FOREACH(const boost::shared_ptr& child, clique->children_) + optimizeInPlace(child, result); +} +} + +/* ************************************************************************* */ +template +VectorValues optimize(const BayesTree& bayesTree) { + VectorValues result = *allocateVectorValues(bayesTree); + internal::optimizeInPlace(bayesTree.root(), result); + return result; +} + +/* ************************************************************************* */ +template +void optimizeInPlace(const BayesTree& bayesTree, VectorValues& result) { + internal::optimizeInPlace(bayesTree.root(), result); +} + +} diff --git a/gtsam/linear/GaussianBayesTree.h b/gtsam/linear/GaussianBayesTree.h new file mode 100644 index 000000000..227cc94f9 --- /dev/null +++ b/gtsam/linear/GaussianBayesTree.h @@ -0,0 +1,40 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file GaussianBayesTree.h + * @brief Gaussian Bayes Tree, the result of eliminating a GaussianJunctionTree + * @brief GaussianBayesTree + * @author Frank Dellaert + * @author Richard Roberts + */ + +#pragma once + +#include +#include + +namespace gtsam { + +typedef BayesTree GaussianBayesTree; + +/// optimize the BayesTree, starting from the root +template +VectorValues optimize(const BayesTree& bayesTree); + +/// recursively optimize this conditional and all subtrees +template +void optimizeInPlace(const BayesTree& clique, VectorValues& result); + +} + +#include + diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 67eb9af34..233dc4b34 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -31,19 +31,19 @@ namespace gtsam { // Helper function used only in this file - extracts vectors with variable indices // in the first and last iterators, and concatenates them in that order into the // output. -template -static Vector extractVectorValuesSlices(const VectorValues& values, ITERATOR first, ITERATOR last) { +template +static Vector extractVectorValuesSlices(const VALUES& values, ITERATOR first, ITERATOR last) { // Find total dimensionality int dim = 0; for(ITERATOR j = first; j != last; ++j) - dim += values.dim(*j); + dim += values[*j].rows(); // Copy vectors Vector ret(dim); int varStart = 0; for(ITERATOR j = first; j != last; ++j) { - ret.segment(varStart, values.dim(*j)) = values[*j]; - varStart += values.dim(*j); + ret.segment(varStart, values[*j].rows()) = values[*j]; + varStart += values[*j].rows(); } return ret; } @@ -52,13 +52,13 @@ static Vector extractVectorValuesSlices(const VectorValues& values, ITERATOR fir // Helper function used only in this file - writes to the variables in values // with indices iterated over by first and last, interpreting vector as the // concatenated vectors to write. -template -static void writeVectorValuesSlices(const VECTOR& vector, VectorValues& values, ITERATOR first, ITERATOR last) { +template +static void writeVectorValuesSlices(const VECTOR& vector, VALUES& values, ITERATOR first, ITERATOR last) { // Copy vectors int varStart = 0; for(ITERATOR j = first; j != last; ++j) { - values[*j] = vector.segment(varStart, values.dim(*j)); - varStart += values.dim(*j); + values[*j] = vector.segment(varStart, values[*j].rows()); + varStart += values[*j].rows(); } assert(varStart == vector.rows()); } @@ -221,78 +221,34 @@ JacobianFactor::shared_ptr GaussianConditional::toFactor() const { } /* ************************************************************************* */ -void GaussianConditional::rhs(VectorValues& x) const { - writeVectorValuesSlices(get_d(), x, beginFrontals(), endFrontals()); -} +template +inline static void doSolveInPlace(const GaussianConditional& conditional, VALUES& x) { -/* ************************************************************************* */ -void GaussianConditional::rhs(Permuted& x) const { - // Copy the rhs into x, accounting for the permutation - Vector d = get_d(); - size_t rhsPosition = 0; // We walk through the rhs by variable - for(const_iterator j = beginFrontals(); j != endFrontals(); ++j) { - // Get the segment of the rhs for this variable - x[*j] = d.segment(rhsPosition, this->dim(j)); - // Increment the position - rhsPosition += this->dim(j); + // Helper function to solve-in-place on a VectorValues or Permuted, + // called by GaussianConditional::solveInPlace(VectorValues&) and by + // GaussianConditional::solveInPlace(Permuted&). + + static const bool debug = false; + if(debug) conditional.print("Solving conditional in place"); + Vector xS = extractVectorValuesSlices(x, conditional.beginParents(), conditional.endParents()); + xS = conditional.get_d() - conditional.get_S() * xS; + Vector soln = conditional.permutation().transpose() * + conditional.get_R().triangularView().solve(xS); + if(debug) { + gtsam::print(Matrix(conditional.get_R()), "Calling backSubstituteUpper on "); + gtsam::print(soln, "full back-substitution solution: "); } + writeVectorValuesSlices(soln, x, conditional.beginFrontals(), conditional.endFrontals()); } /* ************************************************************************* */ void GaussianConditional::solveInPlace(VectorValues& x) const { - static const bool debug = false; - if(debug) print("Solving conditional in place"); - Vector rhs = extractVectorValuesSlices(x, beginFrontals(), endFrontals()); - for (const_iterator parent = beginParents(); parent != endParents(); ++parent) { - rhs += -get_S(parent) * x[*parent]; - } - Vector soln = permutation_.transpose() * get_R().triangularView().solve(rhs); - if(debug) { - gtsam::print(Matrix(get_R()), "Calling backSubstituteUpper on "); - gtsam::print(rhs, "rhs: "); - gtsam::print(soln, "full back-substitution solution: "); - } - writeVectorValuesSlices(soln, x, beginFrontals(), endFrontals()); + doSolveInPlace(*this, x); // Call helper version above } /* ************************************************************************* */ void GaussianConditional::solveInPlace(Permuted& x) const { - static const bool debug = false; - if(debug) print("Solving conditional in place (permuted)"); - // Extract RHS from values - inlined from VectorValues - size_t s = 0; - for (const_iterator it=beginFrontals(); it!=endFrontals(); ++it) - s += x[*it].size(); - Vector rhs(s); size_t start = 0; - for (const_iterator it=beginFrontals(); it!=endFrontals(); ++it) { - SubVector v = x[*it]; - const size_t d = v.size(); - rhs.segment(start, d) = v; - start += d; - } - - // apply parents to rhs - for (const_iterator parent = beginParents(); parent != endParents(); ++parent) { - rhs += -get_S(parent) * x[*parent]; - } - - // solve system - backsubstitution - Vector soln = permutation_.transpose() * get_R().triangularView().solve(rhs); - - // apply solution: inlined manually due to permutation - size_t solnStart = 0; - for (const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) { - const size_t d = this->dim(frontal); - x[*frontal] = soln.segment(solnStart, d); - solnStart += d; - } -} - -/* ************************************************************************* */ -VectorValues GaussianConditional::solve(const VectorValues& x) const { - VectorValues result = x; - solveInPlace(result); - return result; + doSolveInPlace(*this, x); // Call helper version above } /* ************************************************************************* */ diff --git a/gtsam/linear/GaussianConditional.h b/gtsam/linear/GaussianConditional.h index 7eb69b15b..248ac7a9e 100644 --- a/gtsam/linear/GaussianConditional.h +++ b/gtsam/linear/GaussianConditional.h @@ -197,46 +197,39 @@ public: */ boost::shared_ptr toFactor() const; - /** - * Adds the RHS to a given VectorValues for use in solve() functions. - * @param x is the values to be updated, assumed allocated - */ - void rhs(VectorValues& x) const; - /** - * Adds the RHS to a given VectorValues for use in solve() functions. - * @param x is the values to be updated, assumed allocated - */ - void rhs(Permuted& x) const; - - /** - * solves a conditional Gaussian and stores the result in x + * Solves a conditional Gaussian and writes the solution into the entries of + * \c x for each frontal variable of the conditional. The parents are + * assumed to have already been solved in and their values are read from \c x. * This function works for multiple frontal variables. - * NOTE: assumes that the RHS for the frontals is stored in x, and - * then replaces the RHS with the partial result for this conditional, - * assuming that parents have been solved already. * - * @param x values structure with solved parents, and the RHS for this conditional - * @return solution \f$ x = R \ (d - Sy - Tz - ...) \f$ for each frontal variable + * Given the Gaussian conditional with log likelihood \f$ |R x_f - (d - S x_s)|^2, + * where \f$ f \f$ are the frontal variables and \f$ s \f$ are the separator + * variables of this conditional, this solve function computes + * \f$ x_f = R^{-1} (d - S x_s) \f$ using back-substitution. + * + * @param x VectorValues structure with solved parents \f$ x_s \f$, and into which the + * solution \f$ x_f \f$ will be written. */ void solveInPlace(VectorValues& x) const; /** - * solves a conditional Gaussian and stores the result in x - * Identical to solveInPlace() above, with a permuted x + * Solves a conditional Gaussian and writes the solution into the entries of + * \c x for each frontal variable of the conditional (version for permuted + * VectorValues). The parents are assumed to have already been solved in + * and their values are read from \c x. This function works for multiple + * frontal variables. + * + * Given the Gaussian conditional with log likelihood \f$ |R x_f - (d - S x_s)|^2, + * where \f$ f \f$ are the frontal variables and \f$ s \f$ are the separator + * variables of this conditional, this solve function computes + * \f$ x_f = R^{-1} (d - S x_s) \f$ using back-substitution. + * + * @param x VectorValues structure with solved parents \f$ x_s \f$, and into which the + * solution \f$ x_f \f$ will be written. */ void solveInPlace(Permuted& x) const; - /** - * Solves a conditional Gaussian and returns a new VectorValues - * This function works for multiple frontal variables, but should - * only be used for testing as it copies the input vector values - * - * Assumes, as in solveInPlace, that the RHS has been stored in x - * for all frontal variables - */ - VectorValues solve(const VectorValues& x) const; - // functions for transpose backsubstitution /** diff --git a/gtsam/linear/GaussianDensity.cpp b/gtsam/linear/GaussianDensity.cpp index 72e3bb325..5300d594f 100644 --- a/gtsam/linear/GaussianDensity.cpp +++ b/gtsam/linear/GaussianDensity.cpp @@ -42,7 +42,6 @@ namespace gtsam { Index k = firstFrontalKey(); // a VectorValues that only has a value for k: cannot be printed if k<>0 x.insert(k, Vector(sigmas_.size())); - rhs(x); solveInPlace(x); return x[k]; } diff --git a/gtsam/linear/GaussianISAM.cpp b/gtsam/linear/GaussianISAM.cpp index a9aa2479c..4402d8b05 100644 --- a/gtsam/linear/GaussianISAM.cpp +++ b/gtsam/linear/GaussianISAM.cpp @@ -17,6 +17,7 @@ #include #include +#include #include @@ -50,50 +51,14 @@ Matrix GaussianISAM::marginalCovariance(Index j) const { return Super::jointBayesNet(key1, key2, &EliminateQR); } -/* ************************************************************************* */ -void optimize(const BayesTree::sharedClique& clique, VectorValues& result) { - // parents are assumed to already be solved and available in result - // RHS for current conditional should already be in place in result - clique->conditional()->solveInPlace(result); - - BOOST_FOREACH(const BayesTree::sharedClique& child, clique->children_) - optimize(child, result); -} - -/* ************************************************************************* */ -void treeRHS(const BayesTree::sharedClique& clique, VectorValues& result) { - clique->conditional()->rhs(result); - BOOST_FOREACH(const BayesTree::sharedClique& child, clique->children_) - treeRHS(child, result); -} - -/* ************************************************************************* */ -VectorValues rhs(const BayesTree& bayesTree, boost::optional dims) { - VectorValues result; - if(dims) - result = VectorValues(*dims); - else - result = *allocateVectorValues(bayesTree); // allocate - treeRHS(bayesTree.root(), result); // recursively fill - return result; -} - /* ************************************************************************* */ VectorValues optimize(const GaussianISAM& isam) { - VectorValues result = rhs(isam, isam.dims_); - // starting from the root, call optimize on each conditional - optimize(isam.root(), result); + VectorValues result(isam.dims_); + // Call optimize for BayesTree + optimizeInPlace((const BayesTree&)isam, result); return result; } -/* ************************************************************************* */ -VectorValues optimize(const BayesTree& bayesTree) { - VectorValues result = rhs(bayesTree); - // starting from the root, call optimize on each conditional - optimize(bayesTree.root(), result); - return result; -} - /* ************************************************************************* */ BayesNet GaussianISAM::shortcut(sharedClique clique, sharedClique root) { return clique->shortcut(root,&EliminateQR); diff --git a/gtsam/linear/GaussianISAM.h b/gtsam/linear/GaussianISAM.h index a06c50657..af96d5317 100644 --- a/gtsam/linear/GaussianISAM.h +++ b/gtsam/linear/GaussianISAM.h @@ -90,19 +90,7 @@ public: }; // \class GaussianISAM -/** load a VectorValues with the RHS of the system for backsubstitution */ -VectorValues rhs(const BayesTree& bayesTree, boost::optional dims = boost::none); - -/** recursively load RHS for system */ -void treeRHS(const BayesTree::sharedClique& clique, VectorValues& result); - -// recursively optimize this conditional and all subtrees -void optimize(const BayesTree::sharedClique& clique, VectorValues& result); - // optimize the BayesTree, starting from the root VectorValues optimize(const GaussianISAM& isam); -// optimize the BayesTree, starting from the root -VectorValues optimize(const BayesTree& bayesTree); - } // \namespace gtsam diff --git a/gtsam/linear/GaussianJunctionTree.cpp b/gtsam/linear/GaussianJunctionTree.cpp index e2e1c2951..2c2a44dbb 100644 --- a/gtsam/linear/GaussianJunctionTree.cpp +++ b/gtsam/linear/GaussianJunctionTree.cpp @@ -20,6 +20,7 @@ #include #include #include +#include #include @@ -33,32 +34,6 @@ namespace gtsam { using namespace std; - /* ************************************************************************* */ - void GaussianJunctionTree::btreeBackSubstitute(const BTClique::shared_ptr& current, VectorValues& config) const { - // solve the bayes net in the current node - current->conditional()->solveInPlace(config); - - // GaussianBayesNet::const_reverse_iterator it = current->rbegin(); -// for (; it!=current->rend(); ++it) { -// (*it)->solveInPlace(config); // solve and store result -// -//// Vector x = (*it)->solve(config); // Solve for that variable -//// config[(*it)->key()] = x; // store result in partial solution -// } - - // solve the bayes nets in the child nodes - BOOST_FOREACH(const BTClique::shared_ptr& child, current->children()) { - btreeBackSubstitute(child, config); - } - } - - /* ************************************************************************* */ - void GaussianJunctionTree::btreeRHS(const BTClique::shared_ptr& current, VectorValues& config) const { - current->conditional()->rhs(config); - BOOST_FOREACH(const BTClique::shared_ptr& child, current->children()) - btreeRHS(child, config); - } - /* ************************************************************************* */ VectorValues GaussianJunctionTree::optimize(Eliminate function) const { tic(1, "GJT eliminate"); @@ -71,12 +46,11 @@ namespace gtsam { vector dims(rootClique->conditional()->back()+1, 0); countDims(rootClique, dims); VectorValues result(dims); - btreeRHS(rootClique, result); toc(2, "allocate VectorValues"); // back-substitution tic(3, "back-substitute"); - btreeBackSubstitute(rootClique, result); + internal::optimizeInPlace(rootClique, result); toc(3, "back-substitute"); return result; } diff --git a/gtsam/linear/GaussianJunctionTree.h b/gtsam/linear/GaussianJunctionTree.h index be98669f1..31a0a1680 100644 --- a/gtsam/linear/GaussianJunctionTree.h +++ b/gtsam/linear/GaussianJunctionTree.h @@ -45,13 +45,6 @@ namespace gtsam { typedef Base::sharedClique sharedClique; typedef GaussianFactorGraph::Eliminate Eliminate; - protected: - // back-substitute in topological sort order (parents first) - void btreeBackSubstitute(const BTClique::shared_ptr& current, VectorValues& config) const; - - // find the RHS for the system in order to perform backsubstitution - void btreeRHS(const BTClique::shared_ptr& current, VectorValues& config) const; - public : /** Default constructor */ diff --git a/gtsam/linear/GaussianSequentialSolver.cpp b/gtsam/linear/GaussianSequentialSolver.cpp index bfff98e0f..42bce03ce 100644 --- a/gtsam/linear/GaussianSequentialSolver.cpp +++ b/gtsam/linear/GaussianSequentialSolver.cpp @@ -78,7 +78,8 @@ VectorValues::shared_ptr GaussianSequentialSolver::optimize() const { tic(2,"optimize"); // Back-substitute - VectorValues::shared_ptr solution(gtsam::optimize_(*bayesNet)); + VectorValues::shared_ptr solution( + new VectorValues(gtsam::optimize(*bayesNet))); toc(2,"optimize"); if(debug) solution->print("GaussianSequentialSolver, solution "); diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index 9bac18da0..b19a1fda5 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -144,48 +144,6 @@ TEST( GaussianConditional, equals ) EXPECT( expected.equals(actual) ); } -/* ************************************************************************* */ -TEST( GaussianConditional, rhs_permuted ) -{ - // Test filling the rhs when the VectorValues is permuted - - // Create a VectorValues - VectorValues unpermuted(5, 2); - unpermuted[0] << 1, 2; - unpermuted[1] << 3, 4; - unpermuted[2] << 5, 6; - unpermuted[3] << 7, 8; - unpermuted[4] << 9, 10; - - // Create a permutation - Permutation permutation(5); - permutation[0] = 4; - permutation[1] = 3; - permutation[2] = 2; - permutation[3] = 1; - permutation[4] = 0; - - // Permuted VectorValues - Permuted permuted(permutation, unpermuted); - - // Expected VectorValues - VectorValues expected(5, 2); - expected[0] << 1, 2; - expected[1] << 3, 4; - expected[2] << 5, 6; - expected[3] << 7, 8; - expected[4] << 11, 12; - - // GaussianConditional - Vector d(2); d << 11, 12; - GaussianConditional conditional(0, d, Matrix::Identity(2,2), Vector::Ones(2)); - - // Fill rhs, conditional is on index 0, which should fill slot 4 of the values - conditional.rhs(permuted); - - EXPECT(assert_equal(expected, unpermuted)); -} - /* ************************************************************************* */ TEST( GaussianConditional, solve ) { @@ -208,8 +166,7 @@ TEST( GaussianConditional, solve ) Vector tau = ones(2); - // RHS is different than the one in the solution vector - GaussianConditional cg(_x_,ones(2), R, _x1_, A1, _l1_, A2, tau); + GaussianConditional cg(_x_, d, R, _x1_, A1, _l1_, A2, tau); Vector sx1(2); sx1(0) = 1.0; sx1(1) = 1.0; @@ -218,21 +175,16 @@ TEST( GaussianConditional, solve ) sl1(0) = 1.0; sl1(1) = 1.0; VectorValues solution(vector(3, 2)); - solution[_x_] = d; // RHS + solution[_x_] = d; solution[_x1_] = sx1; // parents solution[_l1_] = sl1; - // NOTE: the solve functions assume the RHS is passed as the initialization of - // the solution. VectorValues expected(vector(3, 2)); expected[_x_] = expectedX; expected[_x1_] = sx1; expected[_l1_] = sl1; - - VectorValues copy_result = cg.solve(solution); cg.solveInPlace(solution); - EXPECT(assert_equal(expected, copy_result, 0.0001)); EXPECT(assert_equal(expected, solution, 0.0001)); } @@ -240,12 +192,11 @@ TEST( GaussianConditional, solve ) TEST( GaussianConditional, solve_simple ) { // no pivoting from LDL, so R matrix is not permuted - // RHS is deliberately not the same as below Matrix full_matrix = Matrix_(4, 7, - 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.0, - 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, - 0.0, 0.0, 3.0, 0.0, 4.0, 0.0, 0.0, - 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.0); + 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.1, + 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.2, + 0.0, 0.0, 3.0, 0.0, 4.0, 0.0, 0.3, + 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.4); // solve system as a non-multifrontal version first // 2 variables, frontal has dim=4 @@ -261,7 +212,6 @@ TEST( GaussianConditional, solve_simple ) // elimination order; _x_, _x1_ vector vdim; vdim += 4, 2; VectorValues actual(vdim); - actual[_x_] = Vector_(4, 0.1, 0.2, 0.3, 0.4); // d actual[_x1_] = sx1; // parent VectorValues expected(vdim); @@ -283,10 +233,10 @@ TEST( GaussianConditional, solve_multifrontal ) // create full system, 3 variables, 2 frontals, all 2 dim // no pivoting from LDL, so R matrix is not permuted Matrix full_matrix = Matrix_(4, 7, - 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.5, - 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.6, - 0.0, 0.0, 3.0, 0.0, 4.0, 0.0, 0.7, - 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.8); + 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.1, + 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.2, + 0.0, 0.0, 3.0, 0.0, 4.0, 0.0, 0.3, + 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.4); // 3 variables, all dim=2 vector dims; dims += 2, 2, 2, 1; @@ -295,15 +245,13 @@ TEST( GaussianConditional, solve_multifrontal ) vector cgdims; cgdims += _x_, _x1_, _l1_; GaussianConditional cg(cgdims.begin(), cgdims.end(), 2, matrices, sigmas); - EXPECT(assert_equal(Vector_(4, 0.5, 0.6, 0.7, 0.8), cg.get_d())); + EXPECT(assert_equal(Vector_(4, 0.1, 0.2, 0.3, 0.4), cg.get_d())); // partial solution Vector sl1 = Vector_(2, 9.0, 10.0); // elimination order; _x_, _x1_, _l1_ VectorValues actual(vector(3, 2)); - actual[_x_] = Vector_(2, 0.1, 0.2); // rhs - actual[_x1_] = Vector_(2, 0.3, 0.4); // rhs actual[_l1_] = sl1; // parent VectorValues expected(vector(3, 2)); @@ -327,10 +275,10 @@ TEST( GaussianConditional, solve_multifrontal_permuted ) // create full system, 3 variables, 2 frontals, all 2 dim // no pivoting from LDL, so R matrix is not permuted Matrix full_matrix = Matrix_(4, 7, - 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.5, - 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.6, - 0.0, 0.0, 3.0, 0.0, 4.0, 0.0, 0.7, - 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.8); + 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.1, + 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.2, + 0.0, 0.0, 3.0, 0.0, 4.0, 0.0, 0.3, + 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.4); // 3 variables, all dim=2 vector dims; dims += 2, 2, 2, 1; @@ -339,7 +287,7 @@ TEST( GaussianConditional, solve_multifrontal_permuted ) vector cgdims; cgdims += _x_, _x1_, _l1_; GaussianConditional cg(cgdims.begin(), cgdims.end(), 2, matrices, sigmas); - EXPECT(assert_equal(Vector_(4, 0.5, 0.6, 0.7, 0.8), cg.get_d())); + EXPECT(assert_equal(Vector_(4, 0.1, 0.2, 0.3, 0.4), cg.get_d())); // partial solution Vector sl1 = Vector_(2, 9.0, 10.0); diff --git a/gtsam/nonlinear/GaussianISAM2-inl.h b/gtsam/nonlinear/GaussianISAM2-inl.h index 3d645377d..822eb9674 100644 --- a/gtsam/nonlinear/GaussianISAM2-inl.h +++ b/gtsam/nonlinear/GaussianISAM2-inl.h @@ -25,8 +25,9 @@ namespace gtsam { using namespace std; /* ************************************************************************* */ + namespace internal { template - void optimize2(const boost::shared_ptr& clique, double threshold, + void optimizeWildfire(const boost::shared_ptr& clique, double threshold, vector& changed, const vector& replaced, Permuted& delta, int& count) { // if none of the variables in this clique (frontal and separator!) changed // significantly, then by the running intersection property, none of the @@ -63,7 +64,6 @@ namespace gtsam { } // Back-substitute - (*clique)->rhs(delta); (*clique)->solveInPlace(delta); count += (*clique)->nrFrontals(); @@ -97,42 +97,19 @@ namespace gtsam { // Recurse to children BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) { - optimize2(child, threshold, changed, replaced, delta, count); + optimizeWildfire(child, threshold, changed, replaced, delta, count); } } } - - /* ************************************************************************* */ - // fast full version without threshold - template - void optimize2(const boost::shared_ptr& clique, VectorValues& delta) { - - // parents are assumed to already be solved and available in result - (*clique)->rhs(delta); - (*clique)->solveInPlace(delta); - - // Solve chilren recursively - BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) { - optimize2(child, delta); - } } - ///* ************************************************************************* */ - //boost::shared_ptr optimize2(const GaussianISAM2::sharedClique& root) { - // boost::shared_ptr delta(new VectorValues()); - // set changed; - // // starting from the root, call optimize on each conditional - // optimize2(root, delta); - // return delta; - //} - /* ************************************************************************* */ template - int optimize2(const boost::shared_ptr& root, double threshold, const vector& keys, Permuted& delta) { + int optimizeWildfire(const boost::shared_ptr& root, double threshold, const vector& keys, Permuted& delta) { vector changed(keys.size(), false); int count = 0; // starting from the root, call optimize on each conditional - optimize2(root, threshold, changed, keys, delta, count); + internal::optimizeWildfire(root, threshold, changed, keys, delta, count); return count; } diff --git a/gtsam/nonlinear/GaussianISAM2.h b/gtsam/nonlinear/GaussianISAM2.h index d1a4bef9f..7b2aa625d 100644 --- a/gtsam/nonlinear/GaussianISAM2.h +++ b/gtsam/nonlinear/GaussianISAM2.h @@ -63,10 +63,6 @@ public: }; -/** optimize the BayesTree, starting from the root */ -template -void optimize2(const boost::shared_ptr& root, VectorValues& delta); - /// optimize the BayesTree, starting from the root; "replaced" needs to contain /// all variables that are contained in the top of the Bayes tree that has been /// redone; "delta" is the current solution, an offset from the linearization @@ -76,7 +72,7 @@ void optimize2(const boost::shared_ptr& root, VectorValues& delta); /// variables are contained in the subtree. /// returns the number of variables that were solved for template -int optimize2(const boost::shared_ptr& root, +int optimizeWildfire(const boost::shared_ptr& root, double threshold, const std::vector& replaced, Permuted& delta); /// calculate the number of non-zero entries for the tree starting at clique (use root for complete matrix) diff --git a/gtsam/nonlinear/ISAM2-impl-inl.h b/gtsam/nonlinear/ISAM2-impl-inl.h index 4a3dfe86e..08b73df21 100644 --- a/gtsam/nonlinear/ISAM2-impl-inl.h +++ b/gtsam/nonlinear/ISAM2-impl-inl.h @@ -15,6 +15,8 @@ * @author Michael Kaess, Richard Roberts */ +#include + namespace gtsam { using namespace std; @@ -374,7 +376,7 @@ size_t ISAM2::Impl::UpdateDelta(const boost::shared_ptr::Impl::UpdateDelta(const boost::shared_ptr& ISAM2::getDelta() const { template VectorValues optimize(const ISAM2& isam) { VectorValues delta = *allocateVectorValues(isam); - optimize2(isam.root(), delta); + internal::optimizeInPlace(isam.root(), delta); return delta; } diff --git a/tests/testGaussianBayesNet.cpp b/tests/testGaussianBayesNet.cpp index f0cec7530..8444dacc3 100644 --- a/tests/testGaussianBayesNet.cpp +++ b/tests/testGaussianBayesNet.cpp @@ -119,64 +119,25 @@ TEST( GaussianBayesNet, optimize2 ) } /* ************************************************************************* */ -TEST( GaussianBayesNet, backSubstitute ) +TEST( GaussianBayesNet, optimize3 ) { // y=R*x, x=inv(R)*y - // 2 = 1 1 -1 - // 3 1 3 + // 9 = 1 1 4 + // 5 1 5 // NOTE: we are supplying a new RHS here GaussianBayesNet cbn = createSmallGaussianBayesNet(); - VectorValues y(vector(2,1)), x(vector(2,1)); - y[_x_] = Vector_(1,2.); - y[_y_] = Vector_(1,3.); - x[_x_] = Vector_(1,-1.); - x[_y_] = Vector_(1, 3.); + VectorValues expected(vector(2,1)), x(vector(2,1)); + expected[_x_] = Vector_(1, 4.); + expected[_y_] = Vector_(1, 5.); // test functional version - VectorValues actual = backSubstitute(cbn,y); - EXPECT(assert_equal(x,actual)); + VectorValues actual = optimize(cbn); + EXPECT(assert_equal(expected,actual)); // test imperative version - backSubstituteInPlace(cbn,y); - EXPECT(assert_equal(x,y)); -} - -/* ************************************************************************* */ -TEST( GaussianBayesNet, rhs ) -{ - // y=R*x, x=inv(R)*y - // 2 = 1 1 -1 - // 3 1 3 - GaussianBayesNet cbn = createSmallGaussianBayesNet(); - VectorValues expected = gtsam::optimize(cbn); - VectorValues d = rhs(cbn); - VectorValues actual = backSubstitute(cbn, d); - EXPECT(assert_equal(expected, actual)); -} - -/* ************************************************************************* */ -TEST( GaussianBayesNet, rhs_with_sigmas ) -{ - Matrix R11 = Matrix_(1, 1, 1.0), S12 = Matrix_(1, 1, 1.0); - Matrix R22 = Matrix_(1, 1, 1.0); - Vector d1(1), d2(1); - d1(0) = 9; - d2(0) = 5; - Vector tau(1); - tau(0) = 0.25; - - // define nodes and specify in reverse topological sort (i.e. parents last) - GaussianConditional::shared_ptr Px_y(new GaussianConditional(_x_, d1, R11, - _y_, S12, tau)), Py(new GaussianConditional(_y_, d2, R22, tau)); - GaussianBayesNet cbn; - cbn.push_back(Px_y); - cbn.push_back(Py); - - VectorValues expected = gtsam::optimize(cbn); - VectorValues d = rhs(cbn); - VectorValues actual = backSubstitute(cbn, d); - EXPECT(assert_equal(expected, actual)); + optimizeInPlace(cbn,x); + EXPECT(assert_equal(expected,x)); } /* ************************************************************************* */ diff --git a/tests/testGaussianISAM2.cpp b/tests/testGaussianISAM2.cpp index 82ddb96cd..21f2e62e9 100644 --- a/tests/testGaussianISAM2.cpp +++ b/tests/testGaussianISAM2.cpp @@ -16,6 +16,7 @@ using namespace boost::assign; #include #include #include +#include #include #include #include @@ -167,15 +168,13 @@ TEST(ISAM2, optimize2) { // Expected vector VectorValues expected(1, 3); - conditional->rhs(expected); conditional->solveInPlace(expected); // Clique GaussianISAM2<>::sharedClique clique( GaussianISAM2<>::Clique::Create(make_pair(conditional,GaussianFactor::shared_ptr()))); VectorValues actual(theta.dims(ordering)); - conditional->rhs(actual); - optimize2(clique, actual); + internal::optimizeInPlace(clique, actual); // expected.print("expected: "); // actual.print("actual: ");