Made SPCG unit tests compile again, needed several fixes in iterative.h

release/4.3a0
Frank Dellaert 2012-09-03 00:06:13 +00:00
parent a058ecb692
commit ab7594e8f0
7 changed files with 227 additions and 203 deletions

View File

@ -48,6 +48,7 @@ namespace gtsam {
public:
SubgraphPreconditioner();
/**
* Constructor
* @param Ab1: the Graph A1*x=b1
@ -55,7 +56,8 @@ namespace gtsam {
* @param Rc1: the Bayes Net R1*x=c1
* @param xbar: the solution to R1*x=c1
*/
SubgraphPreconditioner(const sharedFG& Ab1, const sharedFG& Ab2, const sharedBayesNet& Rc1, const sharedValues& xbar);
SubgraphPreconditioner(const sharedFG& Ab1, const sharedFG& Ab2,
const sharedBayesNet& Rc1, const sharedValues& xbar);
/** Access Ab1 */
const sharedFG& Ab1() const { return Ab1_; }
@ -80,12 +82,12 @@ namespace gtsam {
/* A zero VectorValues with the structure of xbar */
VectorValues zero() const {
VectorValues V(VectorValues::Zero(*xbar_)) ;
VectorValues V(VectorValues::Zero(*xbar_));
return V ;
}
/**
* Add constraint part of the error only, used in both calls above
* Add constraint part of the error only
* y += alpha*inv(R1')*A2'*e2
* Takes a range indicating e2 !!!!
*/

View File

@ -122,7 +122,7 @@ namespace gtsam {
// conjugate gradient method.
// S: linear system, V: step vector, E: errors
template<class S, class V, class E>
V conjugateGradients(const S& Ab, V x, const ConjugateGradientParameters &parameters, bool steepest = false) {
V conjugateGradients(const S& Ab, V x, const ConjugateGradientParameters &parameters, bool steepest) {
CGState<S, V, E> state(Ab, x, parameters, steepest);

View File

@ -22,7 +22,6 @@
#include <gtsam/linear/JacobianFactorGraph.h>
#include <gtsam/linear/IterativeSolver.h>
#include <iostream>
using namespace std;
@ -30,7 +29,7 @@ using namespace std;
namespace gtsam {
/* ************************************************************************* */
void System::print (const string& s) const {
void System::print(const string& s) const {
cout << s << endl;
gtsam::print(A_, "A");
gtsam::print(b_, "b");
@ -38,34 +37,42 @@ namespace gtsam {
/* ************************************************************************* */
Vector steepestDescent(const System& Ab, const Vector& x, const ConjugateGradientParameters & parameters) {
return conjugateGradients<System, Vector, Vector> (Ab, x, parameters, true);
Vector steepestDescent(const System& Ab, const Vector& x,
const ConjugateGradientParameters & parameters) {
return conjugateGradients<System, Vector, Vector>(Ab, x, parameters, true);
}
Vector conjugateGradientDescent(const System& Ab, const Vector& x, const ConjugateGradientParameters & parameters) {
return conjugateGradients<System, Vector, Vector> (Ab, x, parameters);
Vector conjugateGradientDescent(const System& Ab, const Vector& x,
const ConjugateGradientParameters & parameters) {
return conjugateGradients<System, Vector, Vector>(Ab, x, parameters);
}
/* ************************************************************************* */
Vector steepestDescent(const Matrix& A, const Vector& b, const Vector& x, const ConjugateGradientParameters & parameters) {
Vector steepestDescent(const Matrix& A, const Vector& b, const Vector& x,
const ConjugateGradientParameters & parameters) {
System Ab(A, b);
return conjugateGradients<System, Vector, Vector> (Ab, x, parameters, true);
return conjugateGradients<System, Vector, Vector>(Ab, x, parameters, true);
}
Vector conjugateGradientDescent(const Matrix& A, const Vector& b, const Vector& x, const ConjugateGradientParameters & parameters) {
Vector conjugateGradientDescent(const Matrix& A, const Vector& b,
const Vector& x, const ConjugateGradientParameters & parameters) {
System Ab(A, b);
return conjugateGradients<System, Vector, Vector> (Ab, x, parameters);
return conjugateGradients<System, Vector, Vector>(Ab, x, parameters);
}
/* ************************************************************************* */
VectorValues steepestDescent(const FactorGraph<JacobianFactor>& fg, const VectorValues& x, const ConjugateGradientParameters & parameters) {
return conjugateGradients<FactorGraph<JacobianFactor>, VectorValues, Errors> (fg, x, parameters, true);
VectorValues steepestDescent(const FactorGraph<JacobianFactor>& fg,
const VectorValues& x, const ConjugateGradientParameters & parameters) {
return conjugateGradients<FactorGraph<JacobianFactor>, VectorValues, Errors>(
fg, x, parameters, true);
}
VectorValues conjugateGradientDescent(const FactorGraph<JacobianFactor>& fg, const VectorValues& x, const ConjugateGradientParameters & parameters) {
return conjugateGradients<FactorGraph<JacobianFactor>, VectorValues, Errors> (fg, x, parameters);
VectorValues conjugateGradientDescent(const FactorGraph<JacobianFactor>& fg,
const VectorValues& x, const ConjugateGradientParameters & parameters) {
return conjugateGradients<FactorGraph<JacobianFactor>, VectorValues, Errors>(
fg, x, parameters);
}
/* ************************************************************************* */
/* ************************************************************************* */
} // namespace gtsam

View File

@ -31,12 +31,11 @@ namespace gtsam {
* "Vector" class E needs dot(v,v)
* @param Ab, the "system" that needs to be solved, examples below
* @param x is the initial estimate
* @param epsilon determines the convergence criterion: norm(g)<epsilon*norm(g0)
* @param maxIterations, if 0 will be set to |x|
* @param steepest flag, if true does steepest descent, not CG
* */
template<class S, class V, class E>
V conjugateGradients(const S& Ab, V x, bool verbose, double epsilon, size_t maxIterations, bool steepest = false);
V conjugateGradients(const S& Ab, V x,
const ConjugateGradientParameters &parameters, bool steepest = false);
/**
* Helper class encapsulating the combined system |Ax-b_|^2
@ -127,21 +126,19 @@ namespace gtsam {
const Vector& x,
const ConjugateGradientParameters & parameters);
class GaussianFactorGraph;
/**
* Method of steepest gradients, Gaussian Factor Graph version
* */
*/
VectorValues steepestDescent(
const GaussianFactorGraph& fg,
const FactorGraph<JacobianFactor>& fg,
const VectorValues& x,
const ConjugateGradientParameters & parameters);
/**
* Method of conjugate gradients (CG), Gaussian Factor Graph version
* */
*/
VectorValues conjugateGradientDescent(
const GaussianFactorGraph& fg,
const FactorGraph<JacobianFactor>& fg,
const VectorValues& x,
const ConjugateGradientParameters & parameters);

View File

@ -421,7 +421,7 @@ namespace example {
}
/* ************************************************************************* */
boost::tuple<GaussianFactorGraph, VectorValues> planarGraph(size_t N) {
boost::tuple<JacobianFactorGraph, VectorValues> planarGraph(size_t N) {
// create empty graph
NonlinearFactorGraph nlfg;
@ -458,7 +458,13 @@ namespace example {
xtrue[ordering[key(x, y)]] = Point2(x,y).vector();
// linearize around zero
return boost::make_tuple(*nlfg.linearize(zeros, ordering), xtrue);
boost::shared_ptr<GaussianFactorGraph> gfg = nlfg.linearize(zeros, ordering);
JacobianFactorGraph jfg;
BOOST_FOREACH(GaussianFactorGraph::sharedFactor factor, *gfg)
jfg.push_back(boost::dynamic_pointer_cast<JacobianFactor>(factor));
return boost::make_tuple(jfg, xtrue);
}
/* ************************************************************************* */
@ -476,21 +482,21 @@ namespace example {
JacobianFactorGraph T, C;
// Add the x11 constraint to the tree
T.push_back(original[0]);
T.push_back(boost::dynamic_pointer_cast<JacobianFactor>(original[0]));
// Add all horizontal constraints to the tree
size_t i = 1;
for (size_t x = 1; x < N; x++)
for (size_t y = 1; y <= N; y++, i++)
T.push_back(original[i]);
T.push_back(boost::dynamic_pointer_cast<JacobianFactor>(original[i]));
// Add first vertical column of constraints to T, others to C
for (size_t x = 1; x <= N; x++)
for (size_t y = 1; y < N; y++, i++)
if (x == 1)
T.push_back(original[i]);
T.push_back(boost::dynamic_pointer_cast<JacobianFactor>(original[i]));
else
C.push_back(original[i]);
C.push_back(boost::dynamic_pointer_cast<JacobianFactor>(original[i]));
return make_pair(T, C);
}

View File

@ -130,7 +130,7 @@ namespace gtsam {
* -x11-x21-x31
* with x11 clamped at (1,1), and others related by 2D odometry.
*/
boost::tuple<GaussianFactorGraph, VectorValues> planarGraph(size_t N);
boost::tuple<JacobianFactorGraph, VectorValues> planarGraph(size_t N);
/*
* Create canonical ordering for planar graph that also works for tree

View File

@ -17,6 +17,7 @@
#include <tests/smallExample.h>
#include <gtsam/nonlinear/Ordering.h>
#include <gtsam/nonlinear/Symbol.h>
#include <gtsam/linear/iterative.h>
#include <gtsam/linear/JacobianFactorGraph.h>
#include <gtsam/linear/GaussianSequentialSolver.h>
@ -35,30 +36,41 @@ using namespace gtsam;
using namespace example;
// define keys
Key i3003 = 3003, i2003 = 2003, i1003 = 1003;
Key i3002 = 3002, i2002 = 2002, i1002 = 1002;
Key i3001 = 3001, i2001 = 2001, i1001 = 1001;
// Create key for simulated planar graph
Symbol key(int x, int y) {
return symbol_shorthand::X(1000*x+y);
}
// TODO fix Ordering::equals, because the ordering *is* correct !
/* ************************************************************************* */
//TEST( SubgraphPreconditioner, planarOrdering )
//{
// // Check canonical ordering
// Ordering expected, ordering = planarOrdering(3);
// expected += i3003, i2003, i1003, i3002, i2002, i1002, i3001, i2001, i1001;
// CHECK(assert_equal(expected,ordering));
//}
TEST( SubgraphPreconditioner, planarOrdering ) {
// Check canonical ordering
Ordering expected, ordering = planarOrdering(3);
expected +=
key(3, 3), key(2, 3), key(1, 3),
key(3, 2), key(2, 2), key(1, 2),
key(3, 1), key(2, 1), key(1, 1);
CHECK(assert_equal(expected,ordering));
}
/* ************************************************************************* */
/** unnormalized error */
double error(const JacobianFactorGraph& fg, const VectorValues& x) {
double total_error = 0.;
BOOST_FOREACH(const JacobianFactor::shared_ptr& factor, fg)
total_error += factor->error(x);
return total_error;
}
/* ************************************************************************* */
TEST( SubgraphPreconditioner, planarGraph )
{
// Check planar graph construction
GaussianFactorGraph A;
JacobianFactorGraph A;
VectorValues xtrue;
boost::tie(A, xtrue) = planarGraph(3);
LONGS_EQUAL(13,A.size());
LONGS_EQUAL(9,xtrue.size());
DOUBLES_EQUAL(0,A.error(xtrue),1e-9); // check zero error for xtrue
DOUBLES_EQUAL(0,error(A,xtrue),1e-9); // check zero error for xtrue
// Check that xtrue is optimal
GaussianBayesNet::shared_ptr R1 = GaussianSequentialSolver(A).eliminate();
@ -67,143 +79,143 @@ TEST( SubgraphPreconditioner, planarGraph )
}
/* ************************************************************************* */
//TEST( SubgraphPreconditioner, splitOffPlanarTree )
//{
// // Build a planar graph
// GaussianFactorGraph A;
// VectorValues xtrue;
// boost::tie(A, xtrue) = planarGraph(3);
//
// // Get the spanning tree and constraints, and check their sizes
// JacobianFactorGraph T, C;
// // TODO big mess: GFG and JFG mess !!!
// boost::tie(T, C) = splitOffPlanarTree(3, A);
// LONGS_EQUAL(9,T.size());
// LONGS_EQUAL(4,C.size());
//
// // Check that the tree can be solved to give the ground xtrue
// GaussianBayesNet::shared_ptr R1 = GaussianSequentialSolver(T).eliminate();
// VectorValues xbar = optimize(*R1);
// CHECK(assert_equal(xtrue,xbar));
//}
TEST( SubgraphPreconditioner, splitOffPlanarTree )
{
// Build a planar graph
JacobianFactorGraph A;
VectorValues xtrue;
boost::tie(A, xtrue) = planarGraph(3);
// Get the spanning tree and constraints, and check their sizes
JacobianFactorGraph T, C;
boost::tie(T, C) = splitOffPlanarTree(3, A);
LONGS_EQUAL(9,T.size());
LONGS_EQUAL(4,C.size());
// Check that the tree can be solved to give the ground xtrue
GaussianBayesNet::shared_ptr R1 = GaussianSequentialSolver(T).eliminate();
VectorValues xbar = optimize(*R1);
CHECK(assert_equal(xtrue,xbar));
}
/* ************************************************************************* */
//TEST( SubgraphPreconditioner, system )
//{
// // Build a planar graph
// JacobianFactorGraph Ab;
// VectorValues xtrue;
// size_t N = 3;
// boost::tie(Ab, xtrue) = planarGraph(N); // A*x-b
//
// // Get the spanning tree and corresponding ordering
// GaussianFactorGraph Ab1_, Ab2_; // A1*x-b1 and A2*x-b2
// boost::tie(Ab1_, Ab2_) = splitOffPlanarTree(N, Ab);
// SubgraphPreconditioner::sharedFG Ab1(new GaussianFactorGraph(Ab1_));
// SubgraphPreconditioner::sharedFG Ab2(new GaussianFactorGraph(Ab2_));
//
// // Eliminate the spanning tree to build a prior
// SubgraphPreconditioner::sharedBayesNet Rc1 = GaussianSequentialSolver(Ab1_).eliminate(); // R1*x-c1
// VectorValues xbar = optimize(*Rc1); // xbar = inv(R1)*c1
//
// // Create Subgraph-preconditioned system
// VectorValues::shared_ptr xbarShared(new VectorValues(xbar)); // TODO: horrible
// SubgraphPreconditioner system(Ab1, Ab2, Rc1, xbarShared);
//
// // Create zero config
// VectorValues zeros = VectorValues::Zero(xbar);
//
// // Set up y0 as all zeros
// VectorValues y0 = zeros;
//
// // y1 = perturbed y0
// VectorValues y1 = zeros;
// y1[i2003] = Vector_(2, 1.0, -1.0);
//
// // Check corresponding x values
// VectorValues expected_x1 = xtrue, x1 = system.x(y1);
// expected_x1[i2003] = Vector_(2, 2.01, 2.99);
// expected_x1[i3003] = Vector_(2, 3.01, 2.99);
// CHECK(assert_equal(xtrue, system.x(y0)));
// CHECK(assert_equal(expected_x1,system.x(y1)));
//
// // Check errors
//// DOUBLES_EQUAL(0,error(Ab,xtrue),1e-9); // TODO !
//// DOUBLES_EQUAL(3,error(Ab,x1),1e-9); // TODO !
// DOUBLES_EQUAL(0,error(system,y0),1e-9);
// DOUBLES_EQUAL(3,error(system,y1),1e-9);
//
// // Test gradient in x
// VectorValues expected_gx0 = zeros;
// VectorValues expected_gx1 = zeros;
// CHECK(assert_equal(expected_gx0,gradient(Ab,xtrue)));
// expected_gx1[i1003] = Vector_(2, -100., 100.);
// expected_gx1[i2002] = Vector_(2, -100., 100.);
// expected_gx1[i2003] = Vector_(2, 200., -200.);
// expected_gx1[i3002] = Vector_(2, -100., 100.);
// expected_gx1[i3003] = Vector_(2, 100., -100.);
// CHECK(assert_equal(expected_gx1,gradient(Ab,x1)));
//
// // Test gradient in y
// VectorValues expected_gy0 = zeros;
// VectorValues expected_gy1 = zeros;
// expected_gy1[i1003] = Vector_(2, 2., -2.);
// expected_gy1[i2002] = Vector_(2, -2., 2.);
// expected_gy1[i2003] = Vector_(2, 3., -3.);
// expected_gy1[i3002] = Vector_(2, -1., 1.);
// expected_gy1[i3003] = Vector_(2, 1., -1.);
// CHECK(assert_equal(expected_gy0,gradient(system,y0)));
// CHECK(assert_equal(expected_gy1,gradient(system,y1)));
//
// // Check it numerically for good measure
// // TODO use boost::bind(&SubgraphPreconditioner::error,&system,_1)
// // Vector numerical_g1 = numericalGradient<VectorValues> (error, y1, 0.001);
// // Vector expected_g1 = Vector_(18, 0., 0., 0., 0., 2., -2., 0., 0., -2., 2.,
// // 3., -3., 0., 0., -1., 1., 1., -1.);
// // CHECK(assert_equal(expected_g1,numerical_g1));
//}
TEST( SubgraphPreconditioner, system )
{
// Build a planar graph
JacobianFactorGraph Ab;
VectorValues xtrue;
size_t N = 3;
boost::tie(Ab, xtrue) = planarGraph(N); // A*x-b
// Get the spanning tree and corresponding ordering
JacobianFactorGraph Ab1_, Ab2_; // A1*x-b1 and A2*x-b2
boost::tie(Ab1_, Ab2_) = splitOffPlanarTree(N, Ab);
SubgraphPreconditioner::sharedFG Ab1(new JacobianFactorGraph(Ab1_));
SubgraphPreconditioner::sharedFG Ab2(new JacobianFactorGraph(Ab2_));
// Eliminate the spanning tree to build a prior
SubgraphPreconditioner::sharedBayesNet Rc1 = GaussianSequentialSolver(Ab1_).eliminate(); // R1*x-c1
VectorValues xbar = optimize(*Rc1); // xbar = inv(R1)*c1
// Create Subgraph-preconditioned system
VectorValues::shared_ptr xbarShared(new VectorValues(xbar)); // TODO: horrible
SubgraphPreconditioner system(Ab1, Ab2, Rc1, xbarShared);
// Create zero config
VectorValues zeros = VectorValues::Zero(xbar);
// Set up y0 as all zeros
VectorValues y0 = zeros;
// y1 = perturbed y0
VectorValues y1 = zeros;
y1[1] = Vector_(2, 1.0, -1.0);
// Check corresponding x values
VectorValues expected_x1 = xtrue, x1 = system.x(y1);
expected_x1[1] = Vector_(2, 2.01, 2.99);
expected_x1[0] = Vector_(2, 3.01, 2.99);
CHECK(assert_equal(xtrue, system.x(y0)));
CHECK(assert_equal(expected_x1,system.x(y1)));
// Check errors
DOUBLES_EQUAL(0,error(Ab,xtrue),1e-9);
DOUBLES_EQUAL(3,error(Ab,x1),1e-9);
DOUBLES_EQUAL(0,error(system,y0),1e-9);
DOUBLES_EQUAL(3,error(system,y1),1e-9);
// Test gradient in x
VectorValues expected_gx0 = zeros;
VectorValues expected_gx1 = zeros;
CHECK(assert_equal(expected_gx0,gradient(Ab,xtrue)));
expected_gx1[2] = Vector_(2, -100., 100.);
expected_gx1[4] = Vector_(2, -100., 100.);
expected_gx1[1] = Vector_(2, 200., -200.);
expected_gx1[3] = Vector_(2, -100., 100.);
expected_gx1[0] = Vector_(2, 100., -100.);
CHECK(assert_equal(expected_gx1,gradient(Ab,x1)));
// Test gradient in y
VectorValues expected_gy0 = zeros;
VectorValues expected_gy1 = zeros;
expected_gy1[2] = Vector_(2, 2., -2.);
expected_gy1[4] = Vector_(2, -2., 2.);
expected_gy1[1] = Vector_(2, 3., -3.);
expected_gy1[3] = Vector_(2, -1., 1.);
expected_gy1[0] = Vector_(2, 1., -1.);
CHECK(assert_equal(expected_gy0,gradient(system,y0)));
CHECK(assert_equal(expected_gy1,gradient(system,y1)));
// Check it numerically for good measure
// TODO use boost::bind(&SubgraphPreconditioner::error,&system,_1)
// Vector numerical_g1 = numericalGradient<VectorValues> (error, y1, 0.001);
// Vector expected_g1 = Vector_(18, 0., 0., 0., 0., 2., -2., 0., 0., -2., 2.,
// 3., -3., 0., 0., -1., 1., 1., -1.);
// CHECK(assert_equal(expected_g1,numerical_g1));
}
/* ************************************************************************* */
//TEST( SubgraphPreconditioner, conjugateGradients )
//{
// // Build a planar graph
// GaussianFactorGraph Ab;
// VectorValues xtrue;
// size_t N = 3;
// boost::tie(Ab, xtrue) = planarGraph(N); // A*x-b
//
// // Get the spanning tree and corresponding ordering
// GaussianFactorGraph Ab1_, Ab2_; // A1*x-b1 and A2*x-b2
// boost::tie(Ab1_, Ab2_) = splitOffPlanarTree(N, Ab);
// SubgraphPreconditioner::sharedFG Ab1(new GaussianFactorGraph(Ab1_));
// SubgraphPreconditioner::sharedFG Ab2(new GaussianFactorGraph(Ab2_));
//
// // Eliminate the spanning tree to build a prior
// Ordering ordering = planarOrdering(N);
// SubgraphPreconditioner::sharedBayesNet Rc1 = GaussianSequentialSolver(Ab1_).eliminate(); // R1*x-c1
// VectorValues xbar = optimize(*Rc1); // xbar = inv(R1)*c1
//
// // Create Subgraph-preconditioned system
// VectorValues::shared_ptr xbarShared(new VectorValues(xbar)); // TODO: horrible
// SubgraphPreconditioner system(Ab1, Ab2, Rc1, xbarShared);
//
// // Create zero config y0 and perturbed config y1
// VectorValues y0 = VectorValues::Zero(xbar);
//
// VectorValues y1 = y0;
// y1[i2003] = Vector_(2, 1.0, -1.0);
// VectorValues x1 = system.x(y1);
//
// // Solve for the remaining constraints using PCG
// ConjugateGradientParameters parameters;
//// VectorValues actual = gtsam::conjugateGradients<SubgraphPreconditioner,
//// VectorValues, Errors>(system, y1, verbose, epsilon, epsilon, maxIterations);
//// CHECK(assert_equal(y0,actual));
//
// // Compare with non preconditioned version:
// VectorValues actual2 = conjugateGradientDescent(Ab, x1, parameters);
// CHECK(assert_equal(xtrue,actual2,1e-4));
//}
TEST( SubgraphPreconditioner, conjugateGradients )
{
// Build a planar graph
JacobianFactorGraph Ab;
VectorValues xtrue;
size_t N = 3;
boost::tie(Ab, xtrue) = planarGraph(N); // A*x-b
// Get the spanning tree and corresponding ordering
JacobianFactorGraph Ab1_, Ab2_; // A1*x-b1 and A2*x-b2
boost::tie(Ab1_, Ab2_) = splitOffPlanarTree(N, Ab);
SubgraphPreconditioner::sharedFG Ab1(new JacobianFactorGraph(Ab1_));
SubgraphPreconditioner::sharedFG Ab2(new JacobianFactorGraph(Ab2_));
// Eliminate the spanning tree to build a prior
Ordering ordering = planarOrdering(N);
SubgraphPreconditioner::sharedBayesNet Rc1 = GaussianSequentialSolver(Ab1_).eliminate(); // R1*x-c1
VectorValues xbar = optimize(*Rc1); // xbar = inv(R1)*c1
// Create Subgraph-preconditioned system
VectorValues::shared_ptr xbarShared(new VectorValues(xbar)); // TODO: horrible
SubgraphPreconditioner system(Ab1, Ab2, Rc1, xbarShared);
// Create zero config y0 and perturbed config y1
VectorValues y0 = VectorValues::Zero(xbar);
VectorValues y1 = y0;
y1[1] = Vector_(2, 1.0, -1.0);
VectorValues x1 = system.x(y1);
// Solve for the remaining constraints using PCG
ConjugateGradientParameters parameters;
VectorValues actual = conjugateGradients<SubgraphPreconditioner,
VectorValues, Errors>(system, y1, parameters);
CHECK(assert_equal(y0,actual));
// Compare with non preconditioned version:
VectorValues actual2 = conjugateGradientDescent(Ab, x1, parameters);
CHECK(assert_equal(xtrue,actual2,1e-4));
}
/* ************************************************************************* */
int main() { TestResult tr; return TestRegistry::runAllTests(tr); }