Removed shared_ptr from Bayes nets and factor graphs

release/4.3a0
Frank Dellaert 2021-12-30 12:12:54 -05:00
parent 2f7ebb4d0c
commit c659336cf8
9 changed files with 106 additions and 118 deletions

View File

@ -446,30 +446,29 @@ SubgraphBuilder::Weights SubgraphBuilder::weights(
}
/*****************************************************************************/
GaussianFactorGraph::shared_ptr buildFactorSubgraph(
const GaussianFactorGraph &gfg, const Subgraph &subgraph,
const bool clone) {
auto subgraphFactors = boost::make_shared<GaussianFactorGraph>();
subgraphFactors->reserve(subgraph.size());
GaussianFactorGraph buildFactorSubgraph(const GaussianFactorGraph &gfg,
const Subgraph &subgraph,
const bool clone) {
GaussianFactorGraph subgraphFactors;
subgraphFactors.reserve(subgraph.size());
for (const auto &e : subgraph) {
const auto factor = gfg[e.index];
subgraphFactors->push_back(clone ? factor->clone() : factor);
subgraphFactors.push_back(clone ? factor->clone() : factor);
}
return subgraphFactors;
}
/**************************************************************************************************/
std::pair<GaussianFactorGraph::shared_ptr, GaussianFactorGraph::shared_ptr> //
splitFactorGraph(const GaussianFactorGraph &factorGraph,
const Subgraph &subgraph) {
std::pair<GaussianFactorGraph, GaussianFactorGraph> splitFactorGraph(
const GaussianFactorGraph &factorGraph, const Subgraph &subgraph) {
// Get the subgraph by calling cheaper method
auto subgraphFactors = buildFactorSubgraph(factorGraph, subgraph, false);
// Now, copy all factors then set subGraph factors to zero
auto remaining = boost::make_shared<GaussianFactorGraph>(factorGraph);
GaussianFactorGraph remaining = factorGraph;
for (const auto &e : subgraph) {
remaining->remove(e.index);
remaining.remove(e.index);
}
return std::make_pair(subgraphFactors, remaining);

View File

@ -172,12 +172,13 @@ class GTSAM_EXPORT SubgraphBuilder {
};
/** Select the factors in a factor graph according to the subgraph. */
boost::shared_ptr<GaussianFactorGraph> buildFactorSubgraph(
const GaussianFactorGraph &gfg, const Subgraph &subgraph, const bool clone);
GaussianFactorGraph buildFactorSubgraph(const GaussianFactorGraph &gfg,
const Subgraph &subgraph,
const bool clone);
/** Split the graph into a subgraph and the remaining edges.
* Note that the remaining factorgraph has null factors. */
std::pair<boost::shared_ptr<GaussianFactorGraph>, boost::shared_ptr<GaussianFactorGraph> >
splitFactorGraph(const GaussianFactorGraph &factorGraph, const Subgraph &subgraph);
std::pair<GaussianFactorGraph, GaussianFactorGraph> splitFactorGraph(
const GaussianFactorGraph &factorGraph, const Subgraph &subgraph);
} // namespace gtsam

View File

@ -77,16 +77,16 @@ static void setSubvector(const Vector &src, const KeyInfo &keyInfo,
/* ************************************************************************* */
// Convert any non-Jacobian factors to Jacobians (e.g. Hessian -> Jacobian with
// Cholesky)
static GaussianFactorGraph::shared_ptr convertToJacobianFactors(
static GaussianFactorGraph convertToJacobianFactors(
const GaussianFactorGraph &gfg) {
auto result = boost::make_shared<GaussianFactorGraph>();
GaussianFactorGraph result;
for (const auto &factor : gfg)
if (factor) {
auto jf = boost::dynamic_pointer_cast<JacobianFactor>(factor);
if (!jf) {
jf = boost::make_shared<JacobianFactor>(*factor);
}
result->push_back(jf);
result.push_back(jf);
}
return result;
}
@ -96,42 +96,42 @@ SubgraphPreconditioner::SubgraphPreconditioner(const SubgraphPreconditionerParam
parameters_(p) {}
/* ************************************************************************* */
SubgraphPreconditioner::SubgraphPreconditioner(const sharedFG& Ab2,
const sharedBayesNet& Rc1, const sharedValues& xbar, const SubgraphPreconditionerParameters &p) :
Ab2_(convertToJacobianFactors(*Ab2)), Rc1_(Rc1), xbar_(xbar),
b2bar_(new Errors(-Ab2_->gaussianErrors(*xbar))), parameters_(p) {
SubgraphPreconditioner::SubgraphPreconditioner(const GaussianFactorGraph& Ab2,
const GaussianBayesNet& Rc1, const VectorValues& xbar, const SubgraphPreconditionerParameters &p) :
Ab2_(convertToJacobianFactors(Ab2)), Rc1_(Rc1), xbar_(xbar),
b2bar_(-Ab2_.gaussianErrors(xbar)), parameters_(p) {
}
/* ************************************************************************* */
// x = xbar + inv(R1)*y
VectorValues SubgraphPreconditioner::x(const VectorValues& y) const {
return *xbar_ + Rc1_->backSubstitute(y);
return xbar_ + Rc1_.backSubstitute(y);
}
/* ************************************************************************* */
double SubgraphPreconditioner::error(const VectorValues& y) const {
Errors e(y);
VectorValues x = this->x(y);
Errors e2 = Ab2()->gaussianErrors(x);
Errors e2 = Ab2_.gaussianErrors(x);
return 0.5 * (dot(e, e) + dot(e2,e2));
}
/* ************************************************************************* */
// gradient is y + inv(R1')*A2'*(A2*inv(R1)*y-b2bar),
VectorValues SubgraphPreconditioner::gradient(const VectorValues &y) const {
VectorValues x = Rc1()->backSubstitute(y); /* inv(R1)*y */
Errors e = (*Ab2() * x - *b2bar()); /* (A2*inv(R1)*y-b2bar) */
VectorValues x = Rc1_.backSubstitute(y); /* inv(R1)*y */
Errors e = Ab2_ * x - b2bar_; /* (A2*inv(R1)*y-b2bar) */
VectorValues v = VectorValues::Zero(x);
Ab2()->transposeMultiplyAdd(1.0, e, v); /* A2'*(A2*inv(R1)*y-b2bar) */
return y + Rc1()->backSubstituteTranspose(v);
Ab2_.transposeMultiplyAdd(1.0, e, v); /* A2'*(A2*inv(R1)*y-b2bar) */
return y + Rc1_.backSubstituteTranspose(v);
}
/* ************************************************************************* */
// Apply operator A, A*y = [I;A2*inv(R1)]*y = [y; A2*inv(R1)*y]
Errors SubgraphPreconditioner::operator*(const VectorValues& y) const {
Errors SubgraphPreconditioner::operator*(const VectorValues &y) const {
Errors e(y);
VectorValues x = Rc1()->backSubstitute(y); /* x=inv(R1)*y */
Errors e2 = *Ab2() * x; /* A2*x */
VectorValues x = Rc1_.backSubstitute(y); /* x=inv(R1)*y */
Errors e2 = Ab2_ * x; /* A2*x */
e.splice(e.end(), e2);
return e;
}
@ -147,8 +147,8 @@ void SubgraphPreconditioner::multiplyInPlace(const VectorValues& y, Errors& e) c
}
// Add A2 contribution
VectorValues x = Rc1()->backSubstitute(y); // x=inv(R1)*y
Ab2()->multiplyInPlace(x, ei); // use iterator version
VectorValues x = Rc1_.backSubstitute(y); // x=inv(R1)*y
Ab2_.multiplyInPlace(x, ei); // use iterator version
}
/* ************************************************************************* */
@ -190,14 +190,14 @@ void SubgraphPreconditioner::transposeMultiplyAdd2 (double alpha,
while (it != end) e2.push_back(*(it++));
VectorValues x = VectorValues::Zero(y); // x = 0
Ab2_->transposeMultiplyAdd(1.0,e2,x); // x += A2'*e2
y += alpha * Rc1_->backSubstituteTranspose(x); // y += alpha*inv(R1')*x
Ab2_.transposeMultiplyAdd(1.0,e2,x); // x += A2'*e2
y += alpha * Rc1_.backSubstituteTranspose(x); // y += alpha*inv(R1')*x
}
/* ************************************************************************* */
void SubgraphPreconditioner::print(const std::string& s) const {
cout << s << endl;
Ab2_->print();
Ab2_.print();
}
/*****************************************************************************/
@ -205,7 +205,7 @@ void SubgraphPreconditioner::solve(const Vector &y, Vector &x) const {
assert(x.size() == y.size());
/* back substitute */
for (const auto &cg : boost::adaptors::reverse(*Rc1_)) {
for (const auto &cg : boost::adaptors::reverse(Rc1_)) {
/* collect a subvector of x that consists of the parents of cg (S) */
const KeyVector parentKeys(cg->beginParents(), cg->endParents());
const KeyVector frontalKeys(cg->beginFrontals(), cg->endFrontals());
@ -228,7 +228,7 @@ void SubgraphPreconditioner::transposeSolve(const Vector &y, Vector &x) const {
std::copy(y.data(), y.data() + y.rows(), x.data());
/* in place back substitute */
for (const auto &cg : *Rc1_) {
for (const auto &cg : Rc1_) {
const KeyVector frontalKeys(cg->beginFrontals(), cg->endFrontals());
const Vector rhsFrontal = getSubvector(x, keyInfo_, frontalKeys);
const Vector solFrontal =
@ -261,10 +261,10 @@ void SubgraphPreconditioner::build(const GaussianFactorGraph &gfg, const KeyInfo
keyInfo_ = keyInfo;
/* build factor subgraph */
GaussianFactorGraph::shared_ptr gfg_subgraph = buildFactorSubgraph(gfg, subgraph, true);
auto gfg_subgraph = buildFactorSubgraph(gfg, subgraph, true);
/* factorize and cache BayesNet */
Rc1_ = gfg_subgraph->eliminateSequential();
Rc1_ = gfg_subgraph.eliminateSequential();
}
/*****************************************************************************/

View File

@ -19,6 +19,8 @@
#include <gtsam/linear/SubgraphBuilder.h>
#include <gtsam/linear/Errors.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/IterativeSolver.h>
#include <gtsam/linear/Preconditioner.h>
#include <gtsam/linear/VectorValues.h>
@ -53,16 +55,12 @@ namespace gtsam {
public:
typedef boost::shared_ptr<SubgraphPreconditioner> shared_ptr;
typedef boost::shared_ptr<const GaussianBayesNet> sharedBayesNet;
typedef boost::shared_ptr<const GaussianFactorGraph> sharedFG;
typedef boost::shared_ptr<const VectorValues> sharedValues;
typedef boost::shared_ptr<const Errors> sharedErrors;
private:
sharedFG Ab2_;
sharedBayesNet Rc1_;
sharedValues xbar_; ///< A1 \ b1
sharedErrors b2bar_; ///< A2*xbar - b2
GaussianFactorGraph Ab2_;
GaussianBayesNet Rc1_;
VectorValues xbar_; ///< A1 \ b1
Errors b2bar_; ///< A2*xbar - b2
KeyInfo keyInfo_;
SubgraphPreconditionerParameters parameters_;
@ -77,7 +75,7 @@ namespace gtsam {
* @param Rc1: the Bayes Net R1*x=c1
* @param xbar: the solution to R1*x=c1
*/
SubgraphPreconditioner(const sharedFG& Ab2, const sharedBayesNet& Rc1, const sharedValues& xbar,
SubgraphPreconditioner(const GaussianFactorGraph& Ab2, const GaussianBayesNet& Rc1, const VectorValues& xbar,
const SubgraphPreconditionerParameters &p = SubgraphPreconditionerParameters());
~SubgraphPreconditioner() override {}
@ -86,13 +84,13 @@ namespace gtsam {
void print(const std::string& s = "SubgraphPreconditioner") const;
/** Access Ab2 */
const sharedFG& Ab2() const { return Ab2_; }
const GaussianFactorGraph& Ab2() const { return Ab2_; }
/** Access Rc1 */
const sharedBayesNet& Rc1() const { return Rc1_; }
const GaussianBayesNet& Rc1() const { return Rc1_; }
/** Access b2bar */
const sharedErrors b2bar() const { return b2bar_; }
const Errors b2bar() const { return b2bar_; }
/**
* Add zero-mean i.i.d. Gaussian prior terms to each variable
@ -104,8 +102,7 @@ namespace gtsam {
/* A zero VectorValues with the structure of xbar */
VectorValues zero() const {
assert(xbar_);
return VectorValues::Zero(*xbar_);
return VectorValues::Zero(xbar_);
}
/**

View File

@ -34,24 +34,24 @@ namespace gtsam {
SubgraphSolver::SubgraphSolver(const GaussianFactorGraph &Ab,
const Parameters &parameters, const Ordering& ordering) :
parameters_(parameters) {
GaussianFactorGraph::shared_ptr Ab1,Ab2;
GaussianFactorGraph Ab1, Ab2;
std::tie(Ab1, Ab2) = splitGraph(Ab);
if (parameters_.verbosity())
cout << "Split A into (A1) " << Ab1->size() << " and (A2) " << Ab2->size()
cout << "Split A into (A1) " << Ab1.size() << " and (A2) " << Ab2.size()
<< " factors" << endl;
auto Rc1 = Ab1->eliminateSequential(ordering, EliminateQR);
auto xbar = boost::make_shared<VectorValues>(Rc1->optimize());
auto Rc1 = Ab1.eliminateSequential(ordering, EliminateQR);
auto xbar = Rc1.optimize();
pc_ = boost::make_shared<SubgraphPreconditioner>(Ab2, Rc1, xbar);
}
/**************************************************************************************************/
// Taking eliminated tree [R1|c] and constraint graph [A2|b2]
SubgraphSolver::SubgraphSolver(const GaussianBayesNet::shared_ptr &Rc1,
const GaussianFactorGraph::shared_ptr &Ab2,
SubgraphSolver::SubgraphSolver(const GaussianBayesNet &Rc1,
const GaussianFactorGraph &Ab2,
const Parameters &parameters)
: parameters_(parameters) {
auto xbar = boost::make_shared<VectorValues>(Rc1->optimize());
auto xbar = Rc1.optimize();
pc_ = boost::make_shared<SubgraphPreconditioner>(Ab2, Rc1, xbar);
}
@ -59,7 +59,7 @@ SubgraphSolver::SubgraphSolver(const GaussianBayesNet::shared_ptr &Rc1,
// Taking subgraphs [A1|b1] and [A2|b2]
// delegate up
SubgraphSolver::SubgraphSolver(const GaussianFactorGraph &Ab1,
const GaussianFactorGraph::shared_ptr &Ab2,
const GaussianFactorGraph &Ab2,
const Parameters &parameters,
const Ordering &ordering)
: SubgraphSolver(Ab1.eliminateSequential(ordering, EliminateQR), Ab2,
@ -78,7 +78,7 @@ VectorValues SubgraphSolver::optimize(const GaussianFactorGraph &gfg,
return VectorValues();
}
/**************************************************************************************************/
pair<GaussianFactorGraph::shared_ptr, GaussianFactorGraph::shared_ptr> //
pair<GaussianFactorGraph, GaussianFactorGraph> //
SubgraphSolver::splitGraph(const GaussianFactorGraph &factorGraph) {
/* identify the subgraph structure */

View File

@ -99,15 +99,13 @@ class GTSAM_EXPORT SubgraphSolver : public IterativeSolver {
* eliminate Ab1. We take Ab1 as a const reference, as it will be transformed
* into Rc1, but take Ab2 as a shared pointer as we need to keep it around.
*/
SubgraphSolver(const GaussianFactorGraph &Ab1,
const boost::shared_ptr<GaussianFactorGraph> &Ab2,
SubgraphSolver(const GaussianFactorGraph &Ab1, const GaussianFactorGraph &Ab2,
const Parameters &parameters, const Ordering &ordering);
/**
* The same as above, but we assume A1 was solved by caller.
* We take two shared pointers as we keep both around.
*/
SubgraphSolver(const boost::shared_ptr<GaussianBayesNet> &Rc1,
const boost::shared_ptr<GaussianFactorGraph> &Ab2,
SubgraphSolver(const GaussianBayesNet &Rc1, const GaussianFactorGraph &Ab2,
const Parameters &parameters);
/// Destructor
@ -131,9 +129,8 @@ class GTSAM_EXPORT SubgraphSolver : public IterativeSolver {
/// @{
/// Split graph using Kruskal algorithm, treating binary factors as edges.
std::pair < boost::shared_ptr<GaussianFactorGraph>,
boost::shared_ptr<GaussianFactorGraph> > splitGraph(
const GaussianFactorGraph &gfg);
std::pair<GaussianFactorGraph, GaussianFactorGraph> splitGraph(
const GaussianFactorGraph &gfg);
/// @}
};

View File

@ -679,26 +679,25 @@ inline Ordering planarOrdering(size_t N) {
}
/* ************************************************************************* */
inline std::pair<GaussianFactorGraph::shared_ptr, GaussianFactorGraph::shared_ptr > splitOffPlanarTree(size_t N,
const GaussianFactorGraph& original) {
auto T = boost::make_shared<GaussianFactorGraph>(), C= boost::make_shared<GaussianFactorGraph>();
inline std::pair<GaussianFactorGraph, GaussianFactorGraph> splitOffPlanarTree(
size_t N, const GaussianFactorGraph& original) {
GaussianFactorGraph T, C;
// Add the x11 constraint to the tree
T->push_back(original[0]);
T.push_back(original[0]);
// Add all horizontal constraints to the tree
size_t i = 1;
for (size_t x = 1; x < N; x++)
for (size_t y = 1; y <= N; y++, i++)
T->push_back(original[i]);
for (size_t y = 1; y <= N; y++, i++) T.push_back(original[i]);
// Add first vertical column of constraints to T, others to C
for (size_t x = 1; x <= N; x++)
for (size_t y = 1; y < N; y++, i++)
if (x == 1)
T->push_back(original[i]);
T.push_back(original[i]);
else
C->push_back(original[i]);
C.push_back(original[i]);
return std::make_pair(T, C);
}

View File

@ -77,8 +77,8 @@ TEST(SubgraphPreconditioner, planarGraph) {
DOUBLES_EQUAL(0, error(A, xtrue), 1e-9); // check zero error for xtrue
// Check that xtrue is optimal
GaussianBayesNet::shared_ptr R1 = A.eliminateSequential();
VectorValues actual = R1->optimize();
GaussianBayesNet R1 = A.eliminateSequential();
VectorValues actual = R1.optimize();
EXPECT(assert_equal(xtrue, actual));
}
@ -90,14 +90,14 @@ TEST(SubgraphPreconditioner, splitOffPlanarTree) {
boost::tie(A, xtrue) = planarGraph(3);
// Get the spanning tree and constraints, and check their sizes
GaussianFactorGraph::shared_ptr T, C;
GaussianFactorGraph T, C;
boost::tie(T, C) = splitOffPlanarTree(3, A);
LONGS_EQUAL(9, T->size());
LONGS_EQUAL(4, C->size());
LONGS_EQUAL(9, T.size());
LONGS_EQUAL(4, C.size());
// Check that the tree can be solved to give the ground xtrue
GaussianBayesNet::shared_ptr R1 = T->eliminateSequential();
VectorValues xbar = R1->optimize();
GaussianBayesNet R1 = T.eliminateSequential();
VectorValues xbar = R1.optimize();
EXPECT(assert_equal(xtrue, xbar));
}
@ -110,31 +110,29 @@ TEST(SubgraphPreconditioner, system) {
boost::tie(Ab, xtrue) = planarGraph(N); // A*x-b
// Get the spanning tree and remaining graph
GaussianFactorGraph::shared_ptr Ab1, Ab2; // A1*x-b1 and A2*x-b2
GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2
boost::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab);
// Eliminate the spanning tree to build a prior
const Ordering ord = planarOrdering(N);
auto Rc1 = Ab1->eliminateSequential(ord); // R1*x-c1
VectorValues xbar = Rc1->optimize(); // xbar = inv(R1)*c1
auto Rc1 = Ab1.eliminateSequential(ord); // R1*x-c1
VectorValues xbar = Rc1.optimize(); // xbar = inv(R1)*c1
// Create Subgraph-preconditioned system
VectorValues::shared_ptr xbarShared(
new VectorValues(xbar)); // TODO: horrible
const SubgraphPreconditioner system(Ab2, Rc1, xbarShared);
const SubgraphPreconditioner system(Ab2, Rc1, xbar);
// Get corresponding matrices for tests. Add dummy factors to Ab2 to make
// sure it works with the ordering.
Ordering ordering = Rc1->ordering(); // not ord in general!
Ab2->add(key(1, 1), Z_2x2, Z_2x1);
Ab2->add(key(1, 2), Z_2x2, Z_2x1);
Ab2->add(key(1, 3), Z_2x2, Z_2x1);
Ordering ordering = Rc1.ordering(); // not ord in general!
Ab2.add(key(1, 1), Z_2x2, Z_2x1);
Ab2.add(key(1, 2), Z_2x2, Z_2x1);
Ab2.add(key(1, 3), Z_2x2, Z_2x1);
Matrix A, A1, A2;
Vector b, b1, b2;
std::tie(A, b) = Ab.jacobian(ordering);
std::tie(A1, b1) = Ab1->jacobian(ordering);
std::tie(A2, b2) = Ab2->jacobian(ordering);
Matrix R1 = Rc1->matrix(ordering).first;
std::tie(A1, b1) = Ab1.jacobian(ordering);
std::tie(A2, b2) = Ab2.jacobian(ordering);
Matrix R1 = Rc1.matrix(ordering).first;
Matrix Abar(13 * 2, 9 * 2);
Abar.topRows(9 * 2) = Matrix::Identity(9 * 2, 9 * 2);
Abar.bottomRows(8) = A2.topRows(8) * R1.inverse();
@ -151,7 +149,7 @@ TEST(SubgraphPreconditioner, system) {
y1[key(3, 3)] = Vector2(1.0, -1.0);
// Check backSubstituteTranspose works with R1
VectorValues actual = Rc1->backSubstituteTranspose(y1);
VectorValues actual = Rc1.backSubstituteTranspose(y1);
Vector expected = R1.transpose().inverse() * vec(y1);
EXPECT(assert_equal(expected, vec(actual)));
@ -230,7 +228,7 @@ TEST(SubgraphSolver, Solves) {
system.build(Ab, keyInfo, lambda);
// Create a perturbed (non-zero) RHS
const auto xbar = system.Rc1()->optimize(); // merely for use in zero below
const auto xbar = system.Rc1().optimize(); // merely for use in zero below
auto values_y = VectorValues::Zero(xbar);
auto it = values_y.begin();
it->second.setConstant(100);
@ -238,13 +236,13 @@ TEST(SubgraphSolver, Solves) {
it->second.setConstant(-100);
// Solve the VectorValues way
auto values_x = system.Rc1()->backSubstitute(values_y);
auto values_x = system.Rc1().backSubstitute(values_y);
// Solve the matrix way, this really just checks BN::backSubstitute
// This only works with Rc1 ordering, not with keyInfo !
// TODO(frank): why does this not work with an arbitrary ordering?
const auto ord = system.Rc1()->ordering();
const Matrix R1 = system.Rc1()->matrix(ord).first;
const auto ord = system.Rc1().ordering();
const Matrix R1 = system.Rc1().matrix(ord).first;
auto ord_y = values_y.vector(ord);
auto vector_x = R1.inverse() * ord_y;
EXPECT(assert_equal(vector_x, values_x.vector(ord)));
@ -261,7 +259,7 @@ TEST(SubgraphSolver, Solves) {
// Test that transposeSolve does implement x = R^{-T} y
// We do this by asserting it gives same answer as backSubstituteTranspose
auto values_x2 = system.Rc1()->backSubstituteTranspose(values_y);
auto values_x2 = system.Rc1().backSubstituteTranspose(values_y);
Vector solveT_x = Vector::Zero(N);
system.transposeSolve(vector_y, solveT_x);
EXPECT(assert_equal(values_x2.vector(ordering), solveT_x));
@ -277,18 +275,15 @@ TEST(SubgraphPreconditioner, conjugateGradients) {
boost::tie(Ab, xtrue) = planarGraph(N); // A*x-b
// Get the spanning tree
GaussianFactorGraph::shared_ptr Ab1, Ab2; // A1*x-b1 and A2*x-b2
GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2
boost::tie(Ab1, Ab2) = splitOffPlanarTree(N, Ab);
// Eliminate the spanning tree to build a prior
SubgraphPreconditioner::sharedBayesNet Rc1 =
Ab1->eliminateSequential(); // R1*x-c1
VectorValues xbar = Rc1->optimize(); // xbar = inv(R1)*c1
GaussianBayesNet Rc1 = Ab1.eliminateSequential(); // R1*x-c1
VectorValues xbar = Rc1.optimize(); // xbar = inv(R1)*c1
// Create Subgraph-preconditioned system
VectorValues::shared_ptr xbarShared(
new VectorValues(xbar)); // TODO: horrible
SubgraphPreconditioner system(Ab2, Rc1, xbarShared);
SubgraphPreconditioner system(Ab2, Rc1, xbar);
// Create zero config y0 and perturbed config y1
VectorValues y0 = VectorValues::Zero(xbar);

View File

@ -68,10 +68,10 @@ TEST( SubgraphSolver, splitFactorGraph )
auto subgraph = builder(Ab);
EXPECT_LONGS_EQUAL(9, subgraph.size());
GaussianFactorGraph::shared_ptr Ab1, Ab2;
GaussianFactorGraph Ab1, Ab2;
std::tie(Ab1, Ab2) = splitFactorGraph(Ab, subgraph);
EXPECT_LONGS_EQUAL(9, Ab1->size());
EXPECT_LONGS_EQUAL(13, Ab2->size());
EXPECT_LONGS_EQUAL(9, Ab1.size());
EXPECT_LONGS_EQUAL(13, Ab2.size());
}
/* ************************************************************************* */
@ -99,12 +99,12 @@ TEST( SubgraphSolver, constructor2 )
std::tie(Ab, xtrue) = example::planarGraph(N); // A*x-b
// Get the spanning tree
GaussianFactorGraph::shared_ptr Ab1, Ab2; // A1*x-b1 and A2*x-b2
GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2
std::tie(Ab1, Ab2) = example::splitOffPlanarTree(N, Ab);
// The second constructor takes two factor graphs, so the caller can specify
// the preconditioner (Ab1) and the constraints that are left out (Ab2)
SubgraphSolver solver(*Ab1, Ab2, kParameters, kOrdering);
SubgraphSolver solver(Ab1, Ab2, kParameters, kOrdering);
VectorValues optimized = solver.optimize();
DOUBLES_EQUAL(0.0, error(Ab, optimized), 1e-5);
}
@ -119,11 +119,11 @@ TEST( SubgraphSolver, constructor3 )
std::tie(Ab, xtrue) = example::planarGraph(N); // A*x-b
// Get the spanning tree and corresponding kOrdering
GaussianFactorGraph::shared_ptr Ab1, Ab2; // A1*x-b1 and A2*x-b2
GaussianFactorGraph Ab1, Ab2; // A1*x-b1 and A2*x-b2
std::tie(Ab1, Ab2) = example::splitOffPlanarTree(N, Ab);
// The caller solves |A1*x-b1|^2 == |R1*x-c1|^2, where R1 is square UT
auto Rc1 = Ab1->eliminateSequential();
auto Rc1 = Ab1.eliminateSequential();
// The third constructor allows the caller to pass an already solved preconditioner Rc1_
// as a Bayes net, in addition to the "loop closing constraints" Ab2, as before