Pose2SLAMOptimizer prototype for use in MATLAB
parent
7bc4ee65da
commit
807cffbd61
|
@ -20,7 +20,7 @@
|
|||
<folderInfo id="cdt.managedbuild.toolchain.gnu.macosx.base.1359703544.2031210194" name="/" resourcePath="">
|
||||
<toolChain id="cdt.managedbuild.toolchain.gnu.macosx.base.677243255" name="cdt.managedbuild.toolchain.gnu.macosx.base" superClass="cdt.managedbuild.toolchain.gnu.macosx.base">
|
||||
<targetPlatform archList="all" binaryParser="org.eclipse.cdt.core.MachO;org.eclipse.cdt.core.ELF" id="cdt.managedbuild.target.gnu.platform.macosx.base.752782918" name="Debug Platform" osList="macosx" superClass="cdt.managedbuild.target.gnu.platform.macosx.base"/>
|
||||
<builder arguments="" command="make" id="cdt.managedbuild.target.gnu.builder.macosx.base.319933862" keepEnvironmentInBuildfile="false" managedBuildOn="false" name="Gnu Make Builder" parallelBuildOn="true" parallelizationNumber="2" superClass="cdt.managedbuild.target.gnu.builder.macosx.base"/>
|
||||
<builder arguments="" command="make" id="cdt.managedbuild.target.gnu.builder.macosx.base.319933862" keepEnvironmentInBuildfile="false" managedBuildOn="false" name="Gnu Make Builder" parallelBuildOn="false" parallelizationNumber="2" superClass="cdt.managedbuild.target.gnu.builder.macosx.base"/>
|
||||
<tool id="cdt.managedbuild.tool.macosx.c.linker.macosx.base.457360678" name="MacOS X C Linker" superClass="cdt.managedbuild.tool.macosx.c.linker.macosx.base"/>
|
||||
<tool id="cdt.managedbuild.tool.macosx.cpp.linker.macosx.base.1011140787" name="MacOS X C++ Linker" superClass="cdt.managedbuild.tool.macosx.cpp.linker.macosx.base">
|
||||
<inputType id="cdt.managedbuild.tool.macosx.cpp.linker.input.1032375444" superClass="cdt.managedbuild.tool.macosx.cpp.linker.input">
|
||||
|
|
2
.project
2
.project
|
@ -23,7 +23,7 @@
|
|||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.buildArguments</key>
|
||||
<value>-j2</value>
|
||||
<value></value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.buildCommand</key>
|
||||
|
|
|
@ -196,6 +196,12 @@ Errors GaussianFactorGraph::rhs() const {
|
|||
return e;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Vector GaussianFactorGraph::rhsVector() const {
|
||||
Vector v;
|
||||
return v;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
pair<Matrix,Vector> GaussianFactorGraph::matrix(const Ordering& ordering) const {
|
||||
|
||||
|
|
|
@ -164,6 +164,11 @@ namespace gtsam {
|
|||
*/
|
||||
Errors rhs() const;
|
||||
|
||||
/**
|
||||
* Return RHS (b./sigmas) as Vector
|
||||
*/
|
||||
Vector rhsVector() const;
|
||||
|
||||
/**
|
||||
* Return (dense) matrix associated with factor graph
|
||||
* @param ordering of variables needed for matrix column order
|
||||
|
@ -245,8 +250,8 @@ namespace gtsam {
|
|||
/**
|
||||
* linearize the non-linear graph around the current config
|
||||
*/
|
||||
GaussianFactorGraph linearize(const NonlinearGraph& g, const Config& config) const {
|
||||
return g.linearize(config);
|
||||
boost::shared_ptr<GaussianFactorGraph> linearize(const NonlinearGraph& g, const Config& config) const {
|
||||
return g.linearize_(config);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -187,8 +187,8 @@ headers += BetweenFactor.h PriorFactor.h
|
|||
headers += LieConfig.h LieConfig-inl.h TupleConfig.h TupleConfig-inl.h
|
||||
|
||||
# 2D Pose SLAM
|
||||
headers += dataset.h
|
||||
sources += pose2SLAM.cpp dataset.cpp
|
||||
headers +=
|
||||
sources += pose2SLAM.cpp pose2SLAMOptimizer.cpp dataset.cpp
|
||||
check_PROGRAMS += testPose2Factor testPose2Config testPose2SLAM testPose2Prior
|
||||
testPose2Prior_SOURCES = testPose2Prior.cpp
|
||||
testPose2Prior_LDADD = libgtsam.la
|
||||
|
|
|
@ -262,17 +262,19 @@ namespace gtsam {
|
|||
// Write back result in Ab, imperative as we are
|
||||
// TODO: test that is correct if a column was skipped !!!!
|
||||
size_t i = 0; // start with first row
|
||||
bool mixed = false;
|
||||
BOOST_FOREACH(const Triple& t, Rd) {
|
||||
const size_t& j = t.get<0>();
|
||||
const Vector& rd = t.get<1>();
|
||||
precisions(i) = t.get<2>();
|
||||
if (precisions(i)==inf) mixed = true;
|
||||
for (size_t j2=0; j2<j; ++j2) Ab(i,j2) = 0.0; // fill in zeros below diagonal anway
|
||||
for (size_t j2=j; j2<n+1; ++j2) // copy the j-the row TODO memcpy
|
||||
Ab(i,j2) = rd(j2);
|
||||
i+=1;
|
||||
}
|
||||
|
||||
return Constrained::MixedPrecisions(precisions);
|
||||
return mixed ? Constrained::MixedPrecisions(precisions) : Diagonal::Precisions(precisions);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -58,8 +58,8 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
template<class G, class C, class L, class S, class W>
|
||||
VectorConfig NonlinearOptimizer<G, C, L, S, W>::linearizeAndOptimizeForDelta() const {
|
||||
L linearized = solver_->linearize(*graph_, *config_);
|
||||
return solver_->optimize(linearized);
|
||||
boost::shared_ptr<L> linearized = solver_->linearize(*graph_, *config_);
|
||||
return solver_->optimize(*linearized);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -170,12 +170,12 @@ namespace gtsam {
|
|||
cout << "lambda = " << lambda_ << endl;
|
||||
|
||||
// linearize all factors once
|
||||
L linear = graph_->linearize(*config_);
|
||||
boost::shared_ptr<L> linear = solver_->linearize(*graph_, *config_);
|
||||
if (verbosity >= LINEAR)
|
||||
linear.print("linear");
|
||||
linear->print("linear");
|
||||
|
||||
// try lambda steps with successively larger lambda until we achieve descent
|
||||
return try_lambda(linear, verbosity, lambdaFactor);
|
||||
return try_lambda(*linear, verbosity, lambdaFactor);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Pose2SLAMOptimizer.cpp
|
||||
*
|
||||
* Created on: Jan 22, 2010
|
||||
* Author: dellaert
|
||||
*/
|
||||
|
||||
#include "Pose2SLAMOptimizer.h"
|
||||
#include "pose2SLAM.h"
|
||||
#include "dataset.h"
|
||||
#include "SubgraphPreconditioner-inl.h"
|
||||
|
||||
using namespace std;
|
||||
namespace gtsam {
|
||||
|
||||
using namespace pose2SLAM;
|
||||
|
||||
/* ************************************************************************* */
|
||||
Pose2SLAMOptimizer::Pose2SLAMOptimizer(const string& dataset_name,
|
||||
const string& path) {
|
||||
|
||||
static int maxID = 0;
|
||||
static bool addNoise = false;
|
||||
|
||||
string filename;
|
||||
boost::optional<SharedDiagonal> noiseModel;
|
||||
boost::tie(filename, noiseModel) = dataset(dataset_name);
|
||||
|
||||
// read graph and initial estimate
|
||||
boost::tie(graph_, theta_) = load2D(filename, maxID, noiseModel, addNoise);
|
||||
graph_->addPrior(theta_->begin()->first, theta_->begin()->second,
|
||||
noiseModel::Unit::Create(3));
|
||||
|
||||
// initialize non-linear solver
|
||||
solver_.initialize(*graph_, *theta_);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void Pose2SLAMOptimizer::update(const Vector& x) {
|
||||
VectorConfig X; // TODO
|
||||
update(X);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void Pose2SLAMOptimizer::updatePreconditioned(const Vector& y) {
|
||||
Vector x;
|
||||
update(x);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
}
|
|
@ -0,0 +1,109 @@
|
|||
/*
|
||||
* Pose2SLAMOptimizer.h
|
||||
*
|
||||
* Created on: Jan 22, 2010
|
||||
* Author: dellaert
|
||||
*/
|
||||
|
||||
#ifndef POSE2SLAMOPTIMIZER_H_
|
||||
#define POSE2SLAMOPTIMIZER_H_
|
||||
|
||||
#include "pose2SLAM.h"
|
||||
#include "Ordering.h"
|
||||
#include "SubgraphPreconditioner.h"
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/**
|
||||
* Optimizer class for use in MATLAB
|
||||
* Keeps a Pose2Config estimate
|
||||
* Returns all relevant matrices so MATLAB can optimize :-)
|
||||
*/
|
||||
class Pose2SLAMOptimizer {
|
||||
private:
|
||||
|
||||
/** Non-linear factor graph */
|
||||
boost::shared_ptr<Pose2Graph> graph_;
|
||||
|
||||
/** Current non-linear estimate */
|
||||
boost::shared_ptr<Pose2Config> theta_;
|
||||
|
||||
/** Non-linear solver */
|
||||
typedef SubgraphPCG<Pose2Graph, Pose2Config> SPCG_Solver;
|
||||
SPCG_Solver solver_;
|
||||
|
||||
/** Linear Solver */
|
||||
boost::shared_ptr<SubgraphPreconditioner> system_;
|
||||
|
||||
public:
|
||||
/**
|
||||
* Create optimizer: finds spanning tree and ordering
|
||||
*/
|
||||
Pose2SLAMOptimizer(const std::string& dataset = "",
|
||||
const std::string& path = "");
|
||||
|
||||
/**
|
||||
* Virtual destructor
|
||||
*/
|
||||
virtual ~Pose2SLAMOptimizer() {
|
||||
}
|
||||
|
||||
/**
|
||||
* return graph pointer
|
||||
*/
|
||||
boost::shared_ptr<const Pose2Graph> graph() const {
|
||||
return graph_;
|
||||
}
|
||||
/**
|
||||
* linearize around current theta
|
||||
*/
|
||||
boost::shared_ptr<const Pose2Config> theta() const {
|
||||
return theta_;
|
||||
}
|
||||
/**
|
||||
* linearize around current theta
|
||||
*/
|
||||
void linearize() {
|
||||
system_ = solver_.linearize(*graph_, *theta_);
|
||||
}
|
||||
|
||||
/**
|
||||
* update estimate with pure delta config x
|
||||
*/
|
||||
void update(const VectorConfig& x) {
|
||||
// TODO instead of assigning can we create a new one and replace the shared ptr ?
|
||||
*theta_ = expmap(*theta_, x);
|
||||
linearize();
|
||||
}
|
||||
|
||||
/**
|
||||
* linearize around current theta: done at construction and at update
|
||||
*/
|
||||
void optimize() {
|
||||
VectorConfig X = solver_.optimize(*system_);
|
||||
update(X);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return matrices associated with optimization problem
|
||||
* around current non-linear estimate theta
|
||||
* Returns [IJS] sparse representation
|
||||
*/
|
||||
Matrix Ab1() const { return system_->Ab1(*solver_.ordering()); }
|
||||
Matrix Ab2() const { return system_->Ab2(*solver_.ordering()); }
|
||||
|
||||
/**
|
||||
* update estimate with pure delta config x
|
||||
*/
|
||||
void update(const Vector& x);
|
||||
|
||||
/**
|
||||
* update estimate with pre-conditioned delta config y
|
||||
*/
|
||||
void updatePreconditioned(const Vector& y);
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif /* POSE2SLAMOPTIMIZER_H_ */
|
|
@ -21,8 +21,13 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
template<class G, class T>
|
||||
SubgraphPCG<G, T>::SubgraphPCG(const G& g, const T& theta0) :
|
||||
maxIterations_(100), verbose_(false), epsilon_(1e-4), epsilon_abs_(1e-5) {
|
||||
SubgraphPCG<G, T>::SubgraphPCG(const G& g, const T& theta0) {
|
||||
initialize(g,theta0);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class G, class T>
|
||||
void SubgraphPCG<G, T>::initialize(const G& g, const T& theta0) {
|
||||
|
||||
// generate spanning tree
|
||||
PredecessorMap<Key> tree = g.template findMinimumSpanningTree<Key, Constraint>();
|
||||
|
@ -42,17 +47,22 @@ namespace gtsam {
|
|||
// compose the approximate solution
|
||||
Key root = keys.back();
|
||||
theta_bar_ = composePoses<G, Constraint, Pose, T> (T_, tree, theta0[root]);
|
||||
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class G, class T>
|
||||
SubgraphPreconditioner SubgraphPCG<G, T>::linearize(const G& g, const T& theta_bar) const {
|
||||
GaussianFactorGraph Ab1 = T_.linearize(theta_bar);
|
||||
boost::shared_ptr<SubgraphPreconditioner> SubgraphPCG<G, T>::linearize(const G& g, const T& theta_bar) const {
|
||||
SubgraphPreconditioner::sharedFG Ab1 = T_.linearize_(theta_bar);
|
||||
SubgraphPreconditioner::sharedFG Ab2 = C_.linearize_(theta_bar);
|
||||
SubgraphPreconditioner::sharedBayesNet Rc1 = Ab1.eliminate_(*ordering_);
|
||||
#ifdef TIMING
|
||||
SubgraphPreconditioner::sharedBayesNet Rc1;
|
||||
SubgraphPreconditioner::sharedConfig xbar;
|
||||
#else
|
||||
GaussianFactorGraph sacrificialAb1 = T_.linearize(theta_bar); // duplicate !!!!!
|
||||
SubgraphPreconditioner::sharedBayesNet Rc1 = sacrificialAb1.eliminate_(*ordering_);
|
||||
SubgraphPreconditioner::sharedConfig xbar = gtsam::optimize_(*Rc1);
|
||||
return SubgraphPreconditioner(Rc1, Ab2, xbar);
|
||||
#endif
|
||||
return boost::shared_ptr<SubgraphPreconditioner>(new SubgraphPreconditioner(Ab1, Ab2, Rc1, xbar));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -12,8 +12,8 @@ using namespace std;
|
|||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
SubgraphPreconditioner::SubgraphPreconditioner(sharedBayesNet& Rc1, sharedFG& Ab2, sharedConfig& xbar) :
|
||||
Rc1_(Rc1), Ab2_(Ab2), xbar_(xbar), b2bar_(Ab2_->errors_(*xbar)) {
|
||||
SubgraphPreconditioner::SubgraphPreconditioner(sharedFG& Ab1, sharedFG& Ab2, sharedBayesNet& Rc1, sharedConfig& xbar) :
|
||||
Ab1_(Ab1), Ab2_(Ab2), Rc1_(Rc1), xbar_(xbar), b2bar_(Ab2_->errors_(*xbar)) {
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -29,8 +29,8 @@ namespace gtsam {
|
|||
typedef boost::shared_ptr<const Errors> sharedErrors;
|
||||
|
||||
private:
|
||||
sharedFG Ab1_, Ab2_;
|
||||
sharedBayesNet Rc1_;
|
||||
sharedFG Ab2_;
|
||||
sharedConfig xbar_;
|
||||
sharedErrors b2bar_; /** b2 - A2*xbar */
|
||||
|
||||
|
@ -42,7 +42,10 @@ namespace gtsam {
|
|||
* @param Ab2: the Graph A2*x=b2
|
||||
* @param xbar: the solution to R1*x=c1
|
||||
*/
|
||||
SubgraphPreconditioner(sharedBayesNet& Rc1, sharedFG& Ab2, sharedConfig& xbar);
|
||||
SubgraphPreconditioner(sharedFG& Ab1, sharedFG& Ab2, sharedBayesNet& Rc1, sharedConfig& xbar);
|
||||
|
||||
Matrix Ab1(const Ordering& ordering) const { return Ab1_->sparse(ordering); }
|
||||
Matrix Ab2(const Ordering& ordering) const { return Ab2_->sparse(ordering); }
|
||||
|
||||
/* x = xbar + inv(R1)*y */
|
||||
VectorConfig x(const VectorConfig& y) const;
|
||||
|
@ -77,9 +80,10 @@ namespace gtsam {
|
|||
typedef typename G::Constraint Constraint;
|
||||
typedef typename G::Pose Pose;
|
||||
|
||||
const size_t maxIterations_;
|
||||
const bool verbose_;
|
||||
const double epsilon_, epsilon_abs_;
|
||||
// TODO not hardcode
|
||||
static const size_t maxIterations_=100;
|
||||
static const bool verbose_=false;
|
||||
static const double epsilon_=1e-4, epsilon_abs_=1e-5;
|
||||
|
||||
/* the ordering derived from the spanning tree */
|
||||
boost::shared_ptr<Ordering> ordering_;
|
||||
|
@ -90,8 +94,12 @@ namespace gtsam {
|
|||
G T_, C_;
|
||||
|
||||
public:
|
||||
SubgraphPCG() {}
|
||||
|
||||
SubgraphPCG(const G& g, const T& theta0);
|
||||
|
||||
void initialize(const G& g, const T& theta0);
|
||||
|
||||
boost::shared_ptr<Ordering> ordering() const { return ordering_; }
|
||||
|
||||
boost::shared_ptr<T> theta_bar() const { return theta_bar_; }
|
||||
|
@ -99,7 +107,7 @@ namespace gtsam {
|
|||
/**
|
||||
* linearize the non-linear graph around the current config and build the subgraph preconditioner systme
|
||||
*/
|
||||
SubgraphPreconditioner linearize(const G& g, const T& theta_bar) const;
|
||||
boost::shared_ptr<SubgraphPreconditioner> linearize(const G& g, const T& theta_bar) const;
|
||||
|
||||
/**
|
||||
* solve for the optimal displacement in the tangent space, and then solve
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
class Pose2SLAMOptimizer {
|
||||
Pose2SLAMOptimizer(string dataset_name);
|
||||
Matrix Ab1() const;
|
||||
Matrix Ab2() const;
|
||||
void update(Vector x) const;
|
||||
};
|
||||
|
||||
class SharedGaussian {
|
||||
SharedGaussian(Matrix covariance);
|
||||
SharedGaussian(Vector sigmas);
|
||||
|
|
|
@ -17,6 +17,7 @@ using namespace boost::assign;
|
|||
#include "FactorGraph-inl.h"
|
||||
#include "Ordering.h"
|
||||
#include "pose2SLAM.h"
|
||||
#include "Pose2SLAMOptimizer.h"
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
@ -143,15 +144,15 @@ TEST(Pose2Graph, optimizeCircle) {
|
|||
initial->insert(4, expmap(hexagon[4],Vector_(3, 0.1,-0.1, 0.1)));
|
||||
initial->insert(5, expmap(hexagon[5],Vector_(3,-0.1, 0.1,-0.1)));
|
||||
|
||||
// Choose an ordering and optimize
|
||||
// Choose an ordering
|
||||
shared_ptr<Ordering> ordering(new Ordering);
|
||||
*ordering += "x0","x1","x2","x3","x4","x5";
|
||||
typedef NonlinearOptimizer<Pose2Graph, Pose2Config> Optimizer;
|
||||
Optimizer::shared_solver solver(new Optimizer::solver(ordering));
|
||||
Optimizer optimizer0(fg, initial, solver);
|
||||
Optimizer::verbosityLevel verbosity = Optimizer::SILENT;
|
||||
// Optimizer::verbosityLevel verbosity = Optimizer::ERROR;
|
||||
Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity);
|
||||
|
||||
// optimize
|
||||
pose2SLAM::Optimizer::shared_solver solver(new pose2SLAM::Optimizer::solver(ordering));
|
||||
pose2SLAM::Optimizer optimizer0(fg, initial, solver);
|
||||
pose2SLAM::Optimizer::verbosityLevel verbosity = pose2SLAM::Optimizer::SILENT;
|
||||
pose2SLAM::Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity);
|
||||
|
||||
Pose2Config actual = *optimizer.config();
|
||||
|
||||
|
@ -160,10 +161,29 @@ TEST(Pose2Graph, optimizeCircle) {
|
|||
|
||||
// Check loop closure
|
||||
CHECK(assert_equal(delta,between(actual[5],actual[0])));
|
||||
|
||||
// Try PCG class
|
||||
// Pose2SLAMOptimizer myOptimizer("3");
|
||||
|
||||
// Matrix Ab1 = myOptimizer.Ab1();
|
||||
// CHECK(assert_equal(Matrix_(1,1,1.0),Ab1));
|
||||
//
|
||||
// Matrix Ab2 = myOptimizer.Ab2();
|
||||
// CHECK(assert_equal(Matrix_(1,1,1.0),Ab2));
|
||||
|
||||
// Here, call matlab to
|
||||
// A=[A1;A2], b=[b1;b2]
|
||||
// R=qr(A1)
|
||||
// call pcg on A,b, with preconditioner R -> get x
|
||||
|
||||
// Vector x;
|
||||
// myOptimizer.update(x);
|
||||
|
||||
// Check with ground truth
|
||||
// CHECK(assert_equal(hexagon, *myOptimizer.theta()));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// test optimization with 6 poses arranged in a hexagon and a loop closure
|
||||
TEST(Pose2Graph, findMinimumSpanningTree) {
|
||||
Pose2Graph G, T, C;
|
||||
G.addConstraint(1, 2, Pose2(0.,0.,0.), I3);
|
||||
|
@ -178,7 +198,6 @@ TEST(Pose2Graph, findMinimumSpanningTree) {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// test optimization with 6 poses arranged in a hexagon and a loop closure
|
||||
TEST(Pose2Graph, split) {
|
||||
Pose2Graph G, T, C;
|
||||
G.addConstraint(1, 2, Pose2(0.,0.,0.), I3);
|
||||
|
|
Loading…
Reference in New Issue