Merged into trunk from branch combineandeliminate

release/4.3a0
Richard Roberts 2010-12-02 22:46:43 +00:00
parent f661baacbb
commit d9935519f9
7 changed files with 388 additions and 36 deletions

View File

@ -44,11 +44,11 @@ EliminationTree<FACTOR>::eliminate_(Conditionals& conditionals) const {
factors.push_back(child->eliminate_(conditionals)); }
// Combine all factors (from this node and from subtrees) into a joint factor
sharedFactor jointFactor(FACTOR::Combine(factors, VariableSlots(factors)));
assert(jointFactor->front() == this->key_);
conditionals[this->key_] = jointFactor->eliminateFirst();
pair<typename BayesNet::shared_ptr, typename FACTOR::shared_ptr> eliminated(
FACTOR::CombineAndEliminate(factors, 1));
conditionals[this->key_] = eliminated.first->front();
return jointFactor;
return eliminated.second;
}
/* ************************************************************************* */

View File

@ -18,6 +18,9 @@
#include <gtsam/inference/FactorBase-inl.h>
#include <gtsam/inference/IndexFactor.h>
#include <gtsam/inference/VariableSlots.h>
using namespace std;
namespace gtsam {
@ -25,6 +28,14 @@ template class FactorBase<Index>;
IndexFactor::IndexFactor(const IndexConditional& c) : Base(static_cast<const Base>(c)) {}
pair<typename BayesNet<IndexConditional>::shared_ptr, IndexFactor::shared_ptr> IndexFactor::CombineAndEliminate(
const FactorGraph<This>& factors, size_t nrFrontals) {
pair<typename BayesNet<Conditional>::shared_ptr, shared_ptr> result;
result.second = Combine(factors, VariableSlots(factors));
result.first = result.second->eliminate(nrFrontals);
return result;
}
IndexFactor::shared_ptr IndexFactor::Combine(
const FactorGraph<This>& factors, const FastMap<Index, std::vector<Index> >& variableSlots) {
return Base::Combine<This>(factors, variableSlots); }

View File

@ -62,6 +62,12 @@ public:
/** Construct n-way factor */
IndexFactor(std::set<Index> js) : Base(js) {}
/**
* Combine and eliminate several factors.
*/
static std::pair<typename BayesNet<Conditional>::shared_ptr, shared_ptr> CombineAndEliminate(
const FactorGraph<This>& factors, size_t nrFrontals=1);
/** Create a combined joint factor (new style for EliminationTree). */
static shared_ptr
Combine(const FactorGraph<This>& factors, const FastMap<Index, std::vector<Index> >& variableSlots);

View File

@ -163,41 +163,26 @@ namespace gtsam {
// eliminate the combined factors
// warning: fg is being eliminated in-place and will contain marginal afterwards
tic("JT 2.1 VariableSlots");
VariableSlots variableSlots(fg);
toc("JT 2.1 VariableSlots");
#ifndef NDEBUG
// Debug check that the keys found in the factors match the frontal and
// separator keys of the clique.
list<Index> allKeys;
allKeys.insert(allKeys.end(), current->frontal.begin(), current->frontal.end());
allKeys.insert(allKeys.end(), current->separator.begin(), current->separator.end());
vector<Index> varslotsKeys(variableSlots.size());
std::transform(variableSlots.begin(), variableSlots.end(), varslotsKeys.begin(),
boost::lambda::bind(&VariableSlots::iterator::value_type::first, boost::lambda::_1));
assert(std::equal(allKeys.begin(), allKeys.end(), varslotsKeys.begin()));
#endif
// Now that we know which factors and variables, and where variables
// come from and go to, create and eliminate the new joint factor.
tic("JT 2.2 Combine");
typename FG::sharedFactor jointFactor = FG::Factor::Combine(fg, variableSlots);
toc("JT 2.2 Combine");
tic("JT 2.3 Eliminate");
typename BayesNet<typename FG::Factor::Conditional>::shared_ptr fragment = jointFactor->eliminate(current->frontal.size());
toc("JT 2.3 Eliminate");
assert(std::equal(jointFactor->begin(), jointFactor->end(), current->separator.begin()));
tic("JT 2.2 CombineAndEliminate");
pair<typename BayesNet<typename FG::Factor::Conditional>::shared_ptr, typename FG::sharedFactor> eliminated(
FG::Factor::CombineAndEliminate(fg, current->frontal.size()));
toc("JT 2.2 CombineAndEliminate");
assert(std::equal(eliminated.second->begin(), eliminated.second->end(), current->separator.begin()));
tic("JT 2.4 Update tree");
// create a new clique corresponding the combined factors
typename BayesTree::sharedClique new_clique(new typename BayesTree::Clique(*fragment));
typename BayesTree::sharedClique new_clique(new typename BayesTree::Clique(*eliminated.first));
new_clique->children_ = children;
BOOST_FOREACH(typename BayesTree::sharedClique& childRoot, children)
childRoot->parent_ = new_clique;
toc("JT 2.4 Update tree");
return make_pair(new_clique, jointFactor);
return make_pair(new_clique, eliminated.second);
}
/* ************************************************************************* */

View File

@ -16,6 +16,14 @@
* @author Christian Potthast
*/
#include <gtsam/base/timing.h>
#include <gtsam/base/Matrix.h>
#include <gtsam/base/FastMap.h>
#include <gtsam/base/cholesky.h>
#include <gtsam/linear/GaussianConditional.h>
#include <gtsam/linear/GaussianFactor.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/make_shared.hpp>
@ -28,12 +36,9 @@
#include <boost/numeric/ublas/io.hpp>
#include <boost/numeric/ublas/matrix_proxy.hpp>
#include <boost/numeric/ublas/vector_proxy.hpp>
#include <boost/numeric/ublas/blas.hpp>
#include <gtsam/base/timing.h>
#include <gtsam/base/Matrix.h>
#include <gtsam/linear/GaussianConditional.h>
#include <gtsam/linear/GaussianFactor.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <sstream>
using namespace std;
@ -313,6 +318,291 @@ GaussianFactor::sparse(const Dimensions& columnIndices) const {
return boost::tuple<list<int>, list<int>, list<double> >(I,J,S);
}
/* ************************************************************************* */
GaussianFactor GaussianFactor::whiten() const {
GaussianFactor result(*this);
result.model_->WhitenInPlace(result.matrix_);
result.model_ = noiseModel::Unit::Create(result.model_->dim());
return result;
}
/* ************************************************************************* */
struct SlotEntry {
size_t slot;
size_t dimension;
SlotEntry(size_t _slot, size_t _dimension) : slot(_slot), dimension(_dimension) {}
};
typedef FastMap<Index, SlotEntry> Scatter;
/* ************************************************************************* */
static FastMap<Index, SlotEntry> findScatterAndDims(const FactorGraph<GaussianFactor>& factors) {
static const bool debug = false;
// The "scatter" is a map from global variable indices to slot indices in the
// union of involved variables. We also include the dimensionality of the
// variable.
Scatter scatter;
// First do the set union.
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) {
for(GaussianFactor::const_iterator variable = factor->begin(); variable != factor->end(); ++variable) {
scatter.insert(make_pair(*variable, SlotEntry(0, factor->getDim(variable))));
}
}
// Next fill in the slot indices (we can only get these after doing the set
// union.
size_t slot = 0;
BOOST_FOREACH(Scatter::value_type& var_slot, scatter) {
var_slot.second.slot = (slot ++);
if(debug)
cout << "scatter[" << var_slot.first << "] = (slot " << var_slot.second.slot << ", dim " << var_slot.second.dimension << ")" << endl;
}
return scatter;
}
/* ************************************************************************* */
static MatrixColMajor formAbTAb(const FactorGraph<GaussianFactor>& factors, const Scatter& scatter) {
static const bool debug = false;
tic("CombineAndEliminate: 3.1 varStarts");
// Determine scalar indices of each variable
vector<size_t> varStarts;
varStarts.reserve(scatter.size() + 2);
varStarts.push_back(0);
BOOST_FOREACH(const Scatter::value_type& var_slot, scatter) {
varStarts.push_back(varStarts.back() + var_slot.second.dimension);
}
// This is for the r.h.s. vector
varStarts.push_back(varStarts.back() + 1);
toc("CombineAndEliminate: 3.1 varStarts");
// Allocate and zero matrix for Ab' * Ab
MatrixColMajor ATA(ublas::zero_matrix<double>(varStarts.back(), varStarts.back()));
tic("CombineAndEliminate: 3.2 updates");
// Do blockwise low-rank updates to Ab' * Ab for each factor. Here, we
// only update the upper triangle because this is all that Cholesky uses.
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) {
// Whiten the factor first so it has a unit diagonal noise model
GaussianFactor whitenedFactor(factor->whiten());
if(debug) whitenedFactor.print("whitened factor: ");
for(GaussianFactor::const_iterator var2 = whitenedFactor.begin(); var2 != whitenedFactor.end(); ++var2) {
assert(scatter.find(*var2) != scatter.end());
size_t vj = scatter.find(*var2)->second.slot;
for(GaussianFactor::const_iterator var1 = whitenedFactor.begin(); var1 <= var2; ++var1) {
assert(scatter.find(*var1) != scatter.end());
size_t vi = scatter.find(*var1)->second.slot;
if(debug) cout << "Updating block " << vi << ", " << vj << endl;
if(debug) cout << "Updating (" << varStarts[vi] << ":" << varStarts[vi+1] << ", " <<
varStarts[vj] << ":" << varStarts[vj+1] << ") from A" << *var1 << "' * A" << *var2 << endl;
ublas::project(ATA,
ublas::range(varStarts[vi], varStarts[vi+1]), ublas::range(varStarts[vj], varStarts[vj+1])) +=
ublas::prod(ublas::trans(whitenedFactor.getA(var1)), whitenedFactor.getA(var2));
}
}
// Update r.h.s. vector
size_t vj = scatter.size();
for(GaussianFactor::const_iterator var1 = whitenedFactor.begin(); var1 < whitenedFactor.end(); ++var1) {
assert(scatter.find(*var1) != scatter.end());
size_t vi = scatter.find(*var1)->second.slot;
if(debug) cout << "Updating block " << vi << ", " << vj << endl;
if(debug) cout << "Updating (" << varStarts[vi] << ":" << varStarts[vi+1] << ", " <<
varStarts[vj] << ":" << varStarts[vj+1] << ") from A" << *var1 << "' * b" << endl;
ublas::matrix_column<MatrixColMajor> col(ATA, varStarts[vj]);
ublas::subrange(col, varStarts[vi], varStarts[vi+1]) +=
ublas::prod(ublas::trans(whitenedFactor.getA(var1)), whitenedFactor.getb());
}
size_t vi = scatter.size();
if(debug) cout << "Updating block " << vi << ", " << vj << endl;
if(debug) cout << "Updating (" << varStarts[vi] << ":" << varStarts[vi+1] << ", " <<
varStarts[vj] << ":" << varStarts[vj+1] << ") from b" << "' * b" << endl;
ATA(varStarts[vi], varStarts[vj]) += ublas::inner_prod(whitenedFactor.getb(), whitenedFactor.getb());
}
toc("CombineAndEliminate: 3.2 updates");
return ATA;
}
GaussianBayesNet::shared_ptr GaussianFactor::splitEliminatedFactor(size_t nrFrontals, const vector<Index>& keys) {
static const bool debug = false;
const size_t maxrank = Ab_.size1();
// Check for rank-deficiency that would prevent back-substitution
if(maxrank < Ab_.range(0, nrFrontals).size2()) {
stringstream ss;
ss << "Problem is rank-deficient, discovered while eliminating frontal variables";
for(size_t i=0; i<nrFrontals; ++i)
ss << " " << keys[i];
throw invalid_argument(ss.str());
}
if(debug) gtsam::print(Matrix(Ab_.range(0, Ab_.nBlocks())), "remaining Ab: ");
// Extract conditionals
tic("CombineAndEliminate: 5.1 cond Rd");
GaussianBayesNet::shared_ptr conditionals(new GaussianBayesNet());
for(size_t j=0; j<nrFrontals; ++j) {
// Temporarily restrict the matrix view to the conditional blocks of the
// eliminated Ab_ matrix to create the GaussianConditional from it.
size_t varDim = Ab_(0).size2();
Ab_.rowEnd() = Ab_.rowStart() + varDim;
// Zero the entries below the diagonal (this relies on the matrix being
// column-major).
{
ABlock remainingMatrix(Ab_.range(0, Ab_.nBlocks()));
if(remainingMatrix.size1() > 1)
for(size_t j = 0; j < remainingMatrix.size1() - 1; ++j)
memset(&remainingMatrix(j+1, j), 0, sizeof(remainingMatrix(0,0)) * (remainingMatrix.size1() - j - 1));
}
const ublas::scalar_vector<double> sigmas(varDim, 1.0);
conditionals->push_back(boost::make_shared<Conditional>(keys.begin()+j, keys.end(), 1, Ab_, sigmas));
if(debug) conditionals->back()->print("Extracted conditional: ");
Ab_.rowStart() += varDim;
Ab_.firstBlock() += 1;
if(debug) cout << "rowStart = " << Ab_.rowStart() << ", rowEnd = " << Ab_.rowEnd() << endl;
}
toc("CombineAndEliminate: 5.1 cond Rd");
// Take lower-right block of Ab_ to get the new factor
tic("CombineAndEliminate: 5.2 remaining factor");
Ab_.rowEnd() = maxrank;
// Assign the keys
keys_.assign(keys.begin() + nrFrontals, keys.end());
// Zero the entries below the diagonal (this relies on the matrix being
// column-major).
{
ABlock remainingMatrix(Ab_.range(0, Ab_.nBlocks()));
if(remainingMatrix.size1() > 1)
for(size_t j = 0; j < remainingMatrix.size1() - 1; ++j)
memset(&remainingMatrix(j+1, j), 0, sizeof(remainingMatrix(0,0)) * (remainingMatrix.size1() - j - 1));
}
// Make a unit diagonal noise model
model_ = noiseModel::Unit::Create(Ab_.size1());
if(debug) this->print("Eliminated factor: ");
toc("CombineAndEliminate: 5.2 remaining factor");
// todo SL: deal with "dead" pivot columns!!!
tic("CombineAndEliminate: 5.3 rowstarts");
size_t varpos = 0;
firstNonzeroBlocks_.resize(numberOfRows());
for(size_t row=0; row<numberOfRows(); ++row) {
if(debug) cout << "row " << row << " varpos " << varpos << " Ab_.offset(varpos)=" << Ab_.offset(varpos) << " Ab_.offset(varpos+1)=" << Ab_.offset(varpos+1) << endl;
while(varpos < keys_.size() && Ab_.offset(varpos+1) <= row)
++ varpos;
firstNonzeroBlocks_[row] = varpos;
if(debug) cout << "firstNonzeroVars_[" << row << "] = " << firstNonzeroBlocks_[row] << endl;
}
toc("CombineAndEliminate: 5.3 rowstarts");
return conditionals;
}
/* ************************************************************************* */
pair<GaussianBayesNet::shared_ptr, GaussianFactor::shared_ptr> GaussianFactor::CombineAndEliminate(
const FactorGraph<GaussianFactor>& factors, size_t nrFrontals, SolveMethod solveMethod) {
static const bool debug = false;
SolveMethod correctedSolveMethod = solveMethod;
// Check for constrained noise models
if(correctedSolveMethod != SOLVE_QR) {
BOOST_FOREACH(const shared_ptr& factor, factors) {
if(factor->model_->isConstrained()) {
correctedSolveMethod = SOLVE_QR;
break;
}
}
}
if(correctedSolveMethod == SOLVE_QR) {
shared_ptr jointFactor(Combine(factors, VariableSlots(factors)));
GaussianBayesNet::shared_ptr gbn(jointFactor->eliminate(nrFrontals, SOLVE_QR));
return make_pair(gbn, jointFactor);
} else if(correctedSolveMethod == SOLVE_CHOLESKY) {
// Find the scatter and variable dimensions
tic("CombineAndEliminate: 1 find scatter");
Scatter scatter(findScatterAndDims(factors));
toc("CombineAndEliminate: 1 find scatter");
// Pull out keys and dimensions
tic("CombineAndEliminate: 2 keys");
vector<Index> keys(scatter.size());
vector<size_t> dimensions(scatter.size() + 1);
BOOST_FOREACH(const Scatter::value_type& var_slot, scatter) {
keys[var_slot.second.slot] = var_slot.first;
dimensions[var_slot.second.slot] = var_slot.second.dimension;
}
// This is for the r.h.s. vector
dimensions.back() = 1;
toc("CombineAndEliminate: 2 keys");
// Form Ab' * Ab
tic("CombineAndEliminate: 3 Ab'*Ab");
MatrixColMajor ATA(formAbTAb(factors, scatter));
if(debug) gtsam::print(ATA, "Ab' * Ab: ");
toc("CombineAndEliminate: 3 Ab'*Ab");
// Do Cholesky, note that after this, the lower triangle still contains
// some untouched non-zeros that should be zero. We zero them while
// extracting submatrices next.
tic("CombineAndEliminate: 4 Cholesky careful");
size_t maxrank = choleskyCareful(ATA);
if(maxrank > ATA.size2() - 1)
maxrank = ATA.size2() - 1;
if(debug) {
gtsam::print(ATA, "chol(Ab' * Ab): ");
cout << "maxrank = " << maxrank << endl;
}
toc("CombineAndEliminate: 4 Cholesky careful");
// Create the remaining factor and swap in the matrix and block structure.
// We declare a reference Ab to the block matrix in the remaining factor to
// refer to below.
GaussianFactor::shared_ptr remainingFactor(new GaussianFactor());
BlockAb& Ab(remainingFactor->Ab_);
{
BlockAb newAb(ATA, dimensions.begin(), dimensions.end());
newAb.rowEnd() = maxrank;
newAb.swap(Ab);
}
// Extract conditionals and fill in details of the remaining factor
tic("CombineAndEliminate: 5 Split");
GaussianBayesNet::shared_ptr conditionals(remainingFactor->splitEliminatedFactor(nrFrontals, keys));
if(debug) {
conditionals->print("Extracted conditionals: ");
remainingFactor->print("Eliminated factor: ");
}
toc("CombineAndEliminate: 5 Split");
return make_pair(conditionals, remainingFactor);
} else {
assert(false);
return make_pair(GaussianBayesNet::shared_ptr(), shared_ptr());
}
}
/* ************************************************************************* */
GaussianConditional::shared_ptr GaussianFactor::eliminateFirst(SolveMethod solveMethod) {

View File

@ -77,7 +77,7 @@ namespace gtsam {
protected:
SharedDiagonal model_; // Gaussian noise model with diagonal covariance matrix
std::vector<size_t> firstNonzeroBlocks_;
AbMatrix matrix_; // the full matrix correponding to the factor
AbMatrix matrix_; // the full matrix corresponding to the factor
BlockAb Ab_; // the block view of the full matrix
public:
@ -163,6 +163,13 @@ namespace gtsam {
*/
void permuteWithInverse(const Permutation& inversePermutation);
/**
* Whiten the matrix and r.h.s. so that the noise model is unit diagonal.
* This throws an exception if the noise model cannot whiten, e.g. if it is
* constrained.
*/
GaussianFactor whiten() const;
/**
* Named constructor for combining a set of factors with pre-computed set of variables.
*/
@ -171,15 +178,19 @@ namespace gtsam {
/**
* Combine and eliminate several factors.
*/
// static std::pair<GaussianBayesNet::shared_ptr, shared_ptr> CombineAndEliminate(
// const FactorGraph<GaussianFactor>& factors, const VariableSlots& variableSlots,
// size_t nrFrontals=1, SolveMethod solveMethod = SOLVE_QR);
static std::pair<GaussianBayesNet::shared_ptr, shared_ptr> CombineAndEliminate(
const FactorGraph<GaussianFactor>& factors, size_t nrFrontals=1, SolveMethod solveMethod = SOLVE_QR);
protected:
/** Internal debug check to make sure variables are sorted */
void assertInvariants() const;
/** Internal helper function to extract conditionals from a factor that was
* just numerically eliminated.
*/
GaussianBayesNet::shared_ptr splitEliminatedFactor(size_t nrFrontals, const std::vector<Index>& keys);
public:
/** get a copy of sigmas */

View File

@ -184,6 +184,55 @@ TEST(GaussianFactor, Combine2)
EXPECT(assert_equal(expected, actual));
}
/* ************************************************************************* */
TEST(GaussianFactor, CombineAndEliminate)
{
Matrix A01 = Matrix_(3,3,
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0);
Vector b0 = Vector_(3, 1.5, 1.5, 1.5);
Vector s0 = Vector_(3, 1.6, 1.6, 1.6);
Matrix A10 = Matrix_(3,3,
2.0, 0.0, 0.0,
0.0, 2.0, 0.0,
0.0, 0.0, 2.0);
Matrix A11 = Matrix_(3,3,
-2.0, 0.0, 0.0,
0.0, -2.0, 0.0,
0.0, 0.0, -2.0);
Vector b1 = Vector_(3, 2.5, 2.5, 2.5);
Vector s1 = Vector_(3, 2.6, 2.6, 2.6);
Matrix A21 = Matrix_(3,3,
3.0, 0.0, 0.0,
0.0, 3.0, 0.0,
0.0, 0.0, 3.0);
Vector b2 = Vector_(3, 3.5, 3.5, 3.5);
Vector s2 = Vector_(3, 3.6, 3.6, 3.6);
GaussianFactorGraph gfg;
gfg.add(1, A01, b0, noiseModel::Diagonal::Sigmas(s0, true));
gfg.add(0, A10, 1, A11, b1, noiseModel::Diagonal::Sigmas(s1, true));
gfg.add(1, A21, b2, noiseModel::Diagonal::Sigmas(s2, true));
Matrix zero3x3 = zeros(3,3);
Matrix A0 = gtsam::stack(3, &A10, &zero3x3, &zero3x3);
Matrix A1 = gtsam::stack(3, &A11, &A01, &A21);
Vector b = gtsam::concatVectors(3, &b1, &b0, &b2);
Vector sigmas = gtsam::concatVectors(3, &s1, &s0, &s2);
GaussianFactor expectedFactor(0, A0, 1, A1, b, noiseModel::Diagonal::Sigmas(sigmas, true));
GaussianBayesNet expectedBN(*expectedFactor.eliminate(1, GaussianFactor::SOLVE_QR));
pair<GaussianBayesNet::shared_ptr, GaussianFactor::shared_ptr> actual(
GaussianFactor::CombineAndEliminate(gfg, 1, GaussianFactor::SOLVE_CHOLESKY));
EXPECT(assert_equal(expectedBN, *actual.first));
EXPECT(assert_equal(expectedFactor, *actual.second));
}
///* ************************************************************************* */
//TEST( GaussianFactor, operators )
//{