GaussianBayesNet::backSubstituteInPlace
parent
ac870bce4c
commit
3b6c4917a9
|
|
@ -48,8 +48,11 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
// In-place version that overwrites e
|
||||
// TODO: version that takes scratch space for x
|
||||
void BayesNetPreconditioner::multiplyInPlace(const VectorConfig& y, Errors& e) const {
|
||||
Ab_.multiplyInPlace(x(y),e); // TODO Avoid temporary x ?
|
||||
VectorConfig x = y;
|
||||
backSubstituteInPlace(Rd_,x);
|
||||
Ab_.multiplyInPlace(x,e);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
|||
|
|
@ -78,9 +78,16 @@ boost::shared_ptr<VectorConfig> optimize_(const GaussianBayesNet& bn)
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas)
|
||||
VectorConfig backSubstitute(const GaussianBayesNet& bn, const VectorConfig& y) {
|
||||
VectorConfig x;
|
||||
VectorConfig x = y;
|
||||
backSubstituteInPlace(bn,x);
|
||||
return x;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas)
|
||||
void backSubstituteInPlace(const GaussianBayesNet& bn, VectorConfig& y) {
|
||||
VectorConfig& x = y;
|
||||
/** solve each node in turn in topological sort order (parents first)*/
|
||||
BOOST_REVERSE_FOREACH(GaussianConditional::shared_ptr cg, bn) {
|
||||
// i^th part of R*x=y, x=inv(R)*y
|
||||
|
|
@ -91,12 +98,12 @@ VectorConfig backSubstitute(const GaussianBayesNet& bn, const VectorConfig& y) {
|
|||
for (it = cg->parentsBegin(); it!= cg->parentsEnd(); it++) {
|
||||
const Symbol& j = it->first;
|
||||
const Matrix& Rij = it->second;
|
||||
zi -= Rij * x[j];
|
||||
Vector& xj = x.getReference(j);
|
||||
axpy(-1.0, Rij*xj, zi); // TODO: use BLAS level 2
|
||||
}
|
||||
Vector xi = gtsam::backSubstituteUpper(cg->get_R(), zi);
|
||||
x.insert(i,xi); // store result in partial solution
|
||||
Vector& xi = x.getReference(i);
|
||||
xi = gtsam::backSubstituteUpper(cg->get_R(), zi);
|
||||
}
|
||||
return x;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
|||
|
|
@ -56,6 +56,11 @@ namespace gtsam {
|
|||
*/
|
||||
VectorConfig backSubstitute(const GaussianBayesNet& bn, const VectorConfig& y);
|
||||
|
||||
/*
|
||||
* Backsubstitute in place, y is replaced with solution
|
||||
*/
|
||||
void backSubstituteInPlace(const GaussianBayesNet& bn, VectorConfig& y);
|
||||
|
||||
/*
|
||||
* Transpose Backsubstitute
|
||||
* gy=inv(L)*gx by solving L*gy=gx.
|
||||
|
|
|
|||
|
|
@ -95,14 +95,12 @@ list<Symbol> GaussianConditional::parents() const {
|
|||
/* ************************************************************************* */
|
||||
Vector GaussianConditional::solve(const VectorConfig& x) const {
|
||||
Vector rhs = d_;
|
||||
for (Parents::const_iterator it = parents_.begin(); it
|
||||
!= parents_.end(); it++) {
|
||||
for (Parents::const_iterator it = parents_.begin(); it!= parents_.end(); it++) {
|
||||
const Symbol& j = it->first;
|
||||
const Matrix& Aj = it->second;
|
||||
rhs -= Aj * x[j];
|
||||
axpy(-1, Aj * x[j], rhs); // TODO use BLAS level 2
|
||||
}
|
||||
Vector result = backSubstituteUpper(R_, rhs, false);
|
||||
return result;
|
||||
return backSubstituteUpper(R_, rhs, false);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
|||
|
|
@ -19,7 +19,10 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
// x = xbar + inv(R1)*y
|
||||
VectorConfig SubgraphPreconditioner::x(const VectorConfig& y) const {
|
||||
return *xbar_ + gtsam::backSubstitute(*Rc1_, y);
|
||||
VectorConfig x = y;
|
||||
backSubstituteInPlace(*Rc1_,x);
|
||||
x += *xbar_;
|
||||
return x;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -63,7 +66,8 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
// Add A2 contribution
|
||||
VectorConfig x = gtsam::backSubstitute(*Rc1_, y); // x=inv(R1)*y
|
||||
VectorConfig x = y; // TODO avoid ?
|
||||
gtsam::backSubstituteInPlace(*Rc1_, x); // x=inv(R1)*y
|
||||
Errors e2 = *Ab2_ * x; // A2*x
|
||||
e.splice(e.end(), e2);
|
||||
|
||||
|
|
@ -84,7 +88,8 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
// Add A2 contribution
|
||||
VectorConfig x = gtsam::backSubstitute(*Rc1_, y); // x=inv(R1)*y
|
||||
VectorConfig x = y; // TODO avoid ?
|
||||
gtsam::backSubstituteInPlace(*Rc1_, x); // x=inv(R1)*y
|
||||
Ab2_->multiplyInPlace(x,ei); // use iterator version
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -79,14 +79,32 @@ TEST( GaussianBayesNet, optimize )
|
|||
VectorConfig actual = optimize(cbn);
|
||||
|
||||
VectorConfig expected;
|
||||
Vector x(1), y(1);
|
||||
x(0) = 4; y(0) = 5;
|
||||
expected.insert("x",x);
|
||||
expected.insert("y",y);
|
||||
expected.insert("x",Vector_(1,4.));
|
||||
expected.insert("y",Vector_(1,5.));
|
||||
|
||||
CHECK(assert_equal(expected,actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( GaussianBayesNet, backSubstitute )
|
||||
{
|
||||
GaussianBayesNet cbn = createSmallGaussianBayesNet();
|
||||
|
||||
VectorConfig y, expected;
|
||||
y.insert("x",Vector_(1,4.));
|
||||
y.insert("y",Vector_(1,2.));
|
||||
expected.insert("x",Vector_(1,2.));
|
||||
expected.insert("y",Vector_(1,2.));
|
||||
|
||||
// test functional version
|
||||
VectorConfig actual = backSubstitute(cbn,y);
|
||||
CHECK(assert_equal(expected,actual));
|
||||
|
||||
// test imperative version
|
||||
backSubstituteInPlace(cbn,y);
|
||||
CHECK(assert_equal(expected,y));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
#ifdef HAVE_BOOST_SERIALIZATION
|
||||
TEST( GaussianBayesNet, serialize )
|
||||
|
|
|
|||
Loading…
Reference in New Issue