move iterative.* back to stable. add a template-style nonlinear iterative solver
parent
03a6317a0c
commit
6fd2ac7f86
|
@ -19,7 +19,9 @@ namespace gtsam {
|
||||||
* parameters for the conjugate gradient method
|
* parameters for the conjugate gradient method
|
||||||
*/
|
*/
|
||||||
|
|
||||||
struct ConjugateGradientParameters : public IterativeOptimizationParameters {
|
class ConjugateGradientParameters : public IterativeOptimizationParameters {
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
typedef IterativeOptimizationParameters Base;
|
typedef IterativeOptimizationParameters Base;
|
||||||
typedef boost::shared_ptr<ConjugateGradientParameters> shared_ptr;
|
typedef boost::shared_ptr<ConjugateGradientParameters> shared_ptr;
|
||||||
|
|
|
@ -13,14 +13,13 @@
|
||||||
#include <gtsam/linear/GaussianConditional.h>
|
#include <gtsam/linear/GaussianConditional.h>
|
||||||
#include <gtsam/linear/GaussianBayesNet.h>
|
#include <gtsam/linear/GaussianBayesNet.h>
|
||||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||||
|
#include <gtsam/linear/iterative-inl.h>
|
||||||
#include <gtsam/linear/JacobianFactorGraph.h>
|
#include <gtsam/linear/JacobianFactorGraph.h>
|
||||||
#include <gtsam/linear/SubgraphSolver.h>
|
#include <gtsam/linear/SubgraphSolver.h>
|
||||||
#include <gtsam/linear/VectorValues.h>
|
#include <gtsam/linear/VectorValues.h>
|
||||||
#include <gtsam/inference/graph-inl.h>
|
#include <gtsam/inference/graph-inl.h>
|
||||||
#include <gtsam/inference/EliminationTree.h>
|
#include <gtsam/inference/EliminationTree.h>
|
||||||
|
|
||||||
#include <gtsam_unstable/linear/iterative-inl.h>
|
|
||||||
|
|
||||||
#include <boost/foreach.hpp>
|
#include <boost/foreach.hpp>
|
||||||
#include <boost/shared_ptr.hpp>
|
#include <boost/shared_ptr.hpp>
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,8 @@
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
struct SubgraphSolverParameters : public ConjugateGradientParameters {
|
class SubgraphSolverParameters : public ConjugateGradientParameters {
|
||||||
|
public:
|
||||||
typedef ConjugateGradientParameters Base;
|
typedef ConjugateGradientParameters Base;
|
||||||
SubgraphSolverParameters() : Base() {}
|
SubgraphSolverParameters() : Base() {}
|
||||||
};
|
};
|
||||||
|
|
|
@ -262,6 +262,11 @@ namespace gtsam {
|
||||||
return gtsam::dot(this->values_, V.values_);
|
return gtsam::dot(this->values_, V.values_);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Vector L2 norm */
|
||||||
|
inline double norm() const {
|
||||||
|
return this->vector().norm();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* + operator does element-wise addition. Both VectorValues must have the
|
* + operator does element-wise addition. Both VectorValues must have the
|
||||||
* same structure (checked when NDEBUG is not defined).
|
* same structure (checked when NDEBUG is not defined).
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <gtsam_unstable/linear/iterative.h>
|
#include <gtsam/linear/iterative.h>
|
||||||
#include <gtsam/linear/ConjugateGradientSolver.h>
|
#include <gtsam/linear/ConjugateGradientSolver.h>
|
||||||
#include <boost/shared_ptr.hpp>
|
#include <boost/shared_ptr.hpp>
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
* @date Dec 28, 2009
|
* @date Dec 28, 2009
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <gtsam_unstable/linear/iterative-inl.h>
|
#include <gtsam/linear/iterative-inl.h>
|
||||||
#include <gtsam/base/Vector.h>
|
#include <gtsam/base/Vector.h>
|
||||||
#include <gtsam/base/Matrix.h>
|
#include <gtsam/base/Matrix.h>
|
||||||
#include <gtsam/linear/JacobianFactorGraph.h>
|
#include <gtsam/linear/JacobianFactorGraph.h>
|
|
@ -149,5 +149,5 @@ namespace gtsam {
|
||||||
|
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
||||||
#include <gtsam_unstable/linear/iterative-inl.h>
|
#include <gtsam/linear/iterative-inl.h>
|
||||||
|
|
|
@ -5,15 +5,13 @@
|
||||||
* @date Jun 11, 2012
|
* @date Jun 11, 2012
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <cmath>
|
|
||||||
|
|
||||||
#include <gtsam/nonlinear/GradientDescentOptimizer.h>
|
#include <gtsam/nonlinear/GradientDescentOptimizer.h>
|
||||||
#include <gtsam/nonlinear/Ordering.h>
|
#include <gtsam/nonlinear/Ordering.h>
|
||||||
#include <gtsam/nonlinear/Values.h>
|
#include <gtsam/nonlinear/Values.h>
|
||||||
#include <gtsam/linear/JacobianFactorGraph.h>
|
#include <gtsam/linear/JacobianFactorGraph.h>
|
||||||
#include <gtsam/linear/VectorValues.h>
|
#include <gtsam/linear/VectorValues.h>
|
||||||
|
|
||||||
#include <gtsam_unstable/linear/iterative.h>
|
#include <cmath>
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
|
@ -30,7 +28,7 @@ void gradientInPlace(const NonlinearFactorGraph &nfg, const Values &values, cons
|
||||||
const FactorGraph<JacobianFactor>::shared_ptr jfg = linear->dynamicCastFactors<FactorGraph<JacobianFactor> >();
|
const FactorGraph<JacobianFactor>::shared_ptr jfg = linear->dynamicCastFactors<FactorGraph<JacobianFactor> >();
|
||||||
|
|
||||||
// compute the gradient direction
|
// compute the gradient direction
|
||||||
gtsam::gradientAtZero(*jfg, g);
|
gradientAtZero(*jfg, g);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -112,9 +110,27 @@ void GradientDescentOptimizer::iterate() {
|
||||||
std::cout << "minStep = " << minStep << ", maxStep = " << maxStep << ", newStep = " << newStep << ", newError = " << newError << std::endl;
|
std::cout << "minStep = " << minStep << ", maxStep = " << maxStep << ", newStep = " << newStep << ", newError = " << newError << std::endl;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Increment the iteration counter
|
// Increment the iteration counter
|
||||||
++state_.iterations;
|
++state_.iterations;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
double GradientDescentOptimizer2::System::error(const State &state) const {
|
||||||
|
return graph_.error(state);
|
||||||
|
}
|
||||||
|
|
||||||
|
GradientDescentOptimizer2::System::Gradient GradientDescentOptimizer2::System::gradient(const State &state) const {
|
||||||
|
Gradient result = state.zeroVectors(ordering_);
|
||||||
|
gradientInPlace(graph_, state, ordering_, result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
GradientDescentOptimizer2::System::State GradientDescentOptimizer2::System::advance(const State ¤t, const double alpha, const Gradient &g) const {
|
||||||
|
Gradient step = g;
|
||||||
|
step.vector() *= alpha;
|
||||||
|
return current.retract(step, ordering_);
|
||||||
|
}
|
||||||
|
|
||||||
|
Values GradientDescentOptimizer2::optimize() {
|
||||||
|
return gradientDescent<System, Values>(System(graph_, *ordering_), initialEstimate_, params_);
|
||||||
|
}
|
||||||
|
|
||||||
} /* namespace gtsam */
|
} /* namespace gtsam */
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
class GradientDescentOptimizer;
|
/* an implementation of gradient-descent method using the NLO interface */
|
||||||
|
|
||||||
class GradientDescentParams : public NonlinearOptimizerParams {
|
class GradientDescentParams : public NonlinearOptimizerParams {
|
||||||
|
|
||||||
|
@ -70,5 +70,151 @@ protected:
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
#include <gtsam/linear/IterativeSolver.h>
|
||||||
|
|
||||||
|
/* Yet another implementation of the gradient-descent method using the iterative.h interface */
|
||||||
|
class GradientDescentParams2 : public NonlinearOptimizerParams {
|
||||||
|
public:
|
||||||
|
GradientDescentParams2(){}
|
||||||
|
};
|
||||||
|
|
||||||
|
class GradientDescentOptimizer2 {
|
||||||
|
|
||||||
|
class System {
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
typedef Values State;
|
||||||
|
typedef VectorValues Gradient;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
|
||||||
|
NonlinearFactorGraph graph_;
|
||||||
|
Ordering ordering_;
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
System(const NonlinearFactorGraph &graph, const Ordering &ordering): graph_(graph), ordering_(ordering) {}
|
||||||
|
double error(const State &state) const ;
|
||||||
|
Gradient gradient(const State &state) const ;
|
||||||
|
State advance(const State ¤t, const double alpha, const Gradient &g) const ;
|
||||||
|
};
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
typedef GradientDescentParams2 Parameters;
|
||||||
|
typedef boost::shared_ptr<GradientDescentOptimizer2> shared_ptr;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
|
||||||
|
NonlinearFactorGraph graph_;
|
||||||
|
Values initialEstimate_;
|
||||||
|
Parameters params_;
|
||||||
|
Ordering::shared_ptr ordering_;
|
||||||
|
VectorValues::shared_ptr gradient_;
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
GradientDescentOptimizer2(const NonlinearFactorGraph& graph, const Values& initialValues, const Parameters& params = Parameters())
|
||||||
|
: graph_(graph), initialEstimate_(initialValues), params_(params),
|
||||||
|
ordering_(initialValues.orderingArbitrary()),
|
||||||
|
gradient_(new VectorValues(initialValues.zeroVectors(*ordering_))) {}
|
||||||
|
|
||||||
|
virtual ~GradientDescentOptimizer2() {}
|
||||||
|
|
||||||
|
virtual Values optimize () ;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
template <class S, class V, class W>
|
||||||
|
double lineSearch(const S &system, const V currentValues, const W &gradient) {
|
||||||
|
|
||||||
|
/* normalize it such that it becomes a unit vector */
|
||||||
|
const double g = gradient.norm();
|
||||||
|
|
||||||
|
// perform the golden section search algorithm to decide the the optimal step size
|
||||||
|
// detail refer to http://en.wikipedia.org/wiki/Golden_section_search
|
||||||
|
const double phi = 0.5*(1.0+std::sqrt(5.0)), resphi = 2.0 - phi, tau = 1e-5;
|
||||||
|
double minStep = -1.0/g, maxStep = 0,
|
||||||
|
newStep = minStep + (maxStep-minStep) / (phi+1.0) ;
|
||||||
|
|
||||||
|
V newValues = system.advance(currentValues, newStep, gradient);
|
||||||
|
double newError = system.error(newValues);
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
const bool flag = (maxStep - newStep > newStep - minStep) ? true : false ;
|
||||||
|
const double testStep = flag ?
|
||||||
|
newStep + resphi * (maxStep - newStep) : newStep - resphi * (newStep - minStep);
|
||||||
|
|
||||||
|
if ( (maxStep- minStep) < tau * (std::fabs(testStep) + std::fabs(newStep)) ) {
|
||||||
|
return 0.5*(minStep+maxStep);
|
||||||
|
}
|
||||||
|
|
||||||
|
const V testValues = system.advance(currentValues, testStep, gradient);
|
||||||
|
const double testError = system.error(testValues);
|
||||||
|
|
||||||
|
// update the working range
|
||||||
|
if ( testError >= newError ) {
|
||||||
|
if ( flag ) maxStep = testStep;
|
||||||
|
else minStep = testStep;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if ( flag ) {
|
||||||
|
minStep = newStep;
|
||||||
|
newStep = testStep;
|
||||||
|
newError = testError;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
maxStep = newStep;
|
||||||
|
newStep = testStep;
|
||||||
|
newError = testError;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
template <class S, class V>
|
||||||
|
V gradientDescent(const S &system, const V &initial, const NonlinearOptimizerParams ¶ms) {
|
||||||
|
|
||||||
|
V currentValues = initial, prevValues;
|
||||||
|
double currentError = system.error(currentValues), prevError;
|
||||||
|
Index iteration = 0;
|
||||||
|
|
||||||
|
// check if we're already close enough
|
||||||
|
if(currentError <= params.errorTol) {
|
||||||
|
if (params.verbosity >= NonlinearOptimizerParams::ERROR)
|
||||||
|
std::cout << "Exiting, as error = " << currentError << " < " << params.errorTol << std::endl;
|
||||||
|
return currentValues;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Maybe show output
|
||||||
|
if (params.verbosity >= NonlinearOptimizerParams::ERROR) std::cout << "Initial error: " << currentError << std::endl;
|
||||||
|
|
||||||
|
// Iterative loop
|
||||||
|
do {
|
||||||
|
// Do next iteration
|
||||||
|
const typename S::Gradient gradient = system.gradient(currentValues);
|
||||||
|
const double alpha = lineSearch(system, currentValues, gradient);
|
||||||
|
|
||||||
|
prevValues = currentValues;
|
||||||
|
prevError = currentError;
|
||||||
|
|
||||||
|
currentValues = system.advance(prevValues, alpha, gradient);
|
||||||
|
currentError = system.error(currentValues);
|
||||||
|
|
||||||
|
// Maybe show output
|
||||||
|
if(params.verbosity >= NonlinearOptimizerParams::ERROR) std::cout << "currentError: " << currentError << std::endl;
|
||||||
|
} while( ++iteration < params.maxIterations &&
|
||||||
|
!checkConvergence(params.relativeErrorTol, params.absoluteErrorTol,
|
||||||
|
params.errorTol, prevError, currentError, params.verbosity));
|
||||||
|
|
||||||
|
// Printing if verbose
|
||||||
|
if (params.verbosity >= NonlinearOptimizerParams::ERROR && iteration >= params.maxIterations)
|
||||||
|
std::cout << "Terminating because reached maximum iterations" << std::endl;
|
||||||
|
|
||||||
|
return currentValues;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@ set (gtsam_unstable_subdirs
|
||||||
base
|
base
|
||||||
discrete
|
discrete
|
||||||
dynamics
|
dynamics
|
||||||
linear
|
|
||||||
nonlinear
|
nonlinear
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -5,19 +5,21 @@
|
||||||
* @date Jun 11, 2012
|
* @date Jun 11, 2012
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <gtsam/nonlinear/GradientDescentOptimizer.h>
|
|
||||||
#include <gtsam/slam/pose2SLAM.h>
|
#include <gtsam/slam/pose2SLAM.h>
|
||||||
|
#include <gtsam/nonlinear/GradientDescentOptimizer.h>
|
||||||
|
|
||||||
#include <CppUnitLite/TestHarness.h>
|
#include <CppUnitLite/TestHarness.h>
|
||||||
|
|
||||||
#include <boost/shared_ptr.hpp>
|
|
||||||
#include <boost/make_shared.hpp>
|
#include <boost/make_shared.hpp>
|
||||||
|
#include <boost/shared_ptr.hpp>
|
||||||
|
#include <boost/tuple/tuple.hpp>
|
||||||
|
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using namespace gtsam;
|
using namespace gtsam;
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
TEST(optimize, GradientDescentOptimizer) {
|
boost::tuple<pose2SLAM::Graph, Values> generateProblem() {
|
||||||
|
|
||||||
// 1. Create graph container and add factors to it
|
// 1. Create graph container and add factors to it
|
||||||
pose2SLAM::Graph graph ;
|
pose2SLAM::Graph graph ;
|
||||||
|
@ -45,9 +47,21 @@ TEST(optimize, GradientDescentOptimizer) {
|
||||||
Pose2 x3(4.1, 0.1, M_PI_2); initialEstimate.insertPose(3, x3);
|
Pose2 x3(4.1, 0.1, M_PI_2); initialEstimate.insertPose(3, x3);
|
||||||
Pose2 x4(4.0, 2.0, M_PI ); initialEstimate.insertPose(4, x4);
|
Pose2 x4(4.0, 2.0, M_PI ); initialEstimate.insertPose(4, x4);
|
||||||
Pose2 x5(2.1, 2.1,-M_PI_2); initialEstimate.insertPose(5, x5);
|
Pose2 x5(2.1, 2.1,-M_PI_2); initialEstimate.insertPose(5, x5);
|
||||||
|
|
||||||
|
return boost::tie(graph, initialEstimate);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
TEST(optimize, GradientDescentOptimizer) {
|
||||||
|
|
||||||
|
pose2SLAM::Graph graph ;
|
||||||
|
pose2SLAM::Values initialEstimate;
|
||||||
|
|
||||||
|
boost::tie(graph, initialEstimate) = generateProblem();
|
||||||
// cout << "initial error = " << graph.error(initialEstimate) << endl ;
|
// cout << "initial error = " << graph.error(initialEstimate) << endl ;
|
||||||
|
|
||||||
// 4. Single Step Optimization using Levenberg-Marquardt
|
// Single Step Optimization using Levenberg-Marquardt
|
||||||
GradientDescentParams param;
|
GradientDescentParams param;
|
||||||
param.maxIterations = 500; /* requires a larger number of iterations to converge */
|
param.maxIterations = 500; /* requires a larger number of iterations to converge */
|
||||||
param.verbosity = NonlinearOptimizerParams::SILENT;
|
param.verbosity = NonlinearOptimizerParams::SILENT;
|
||||||
|
@ -58,8 +72,32 @@ TEST(optimize, GradientDescentOptimizer) {
|
||||||
|
|
||||||
/* the optimality of the solution is not comparable to the */
|
/* the optimality of the solution is not comparable to the */
|
||||||
DOUBLES_EQUAL(0.0, graph.error(result), 1e-2);
|
DOUBLES_EQUAL(0.0, graph.error(result), 1e-2);
|
||||||
|
|
||||||
|
CHECK(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
TEST(optimize, GradientDescentOptimizer2) {
|
||||||
|
|
||||||
|
pose2SLAM::Graph graph ;
|
||||||
|
pose2SLAM::Values initialEstimate;
|
||||||
|
|
||||||
|
boost::tie(graph, initialEstimate) = generateProblem();
|
||||||
|
// cout << "initial error = " << graph.error(initialEstimate) << endl ;
|
||||||
|
|
||||||
|
// Single Step Optimization using Levenberg-Marquardt
|
||||||
|
GradientDescentParams2 param;
|
||||||
|
param.maxIterations = 500; /* requires a larger number of iterations to converge */
|
||||||
|
param.verbosity = NonlinearOptimizerParams::ERROR;
|
||||||
|
|
||||||
|
|
||||||
|
GradientDescentOptimizer2 optimizer(graph, initialEstimate, param);
|
||||||
|
Values result = optimizer.optimize();
|
||||||
|
// cout << "solver final error = " << graph.error(result) << endl;
|
||||||
|
|
||||||
|
/* the optimality of the solution is not comparable to the */
|
||||||
|
DOUBLES_EQUAL(0.0, graph.error(result), 1e-2);
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
int main() { TestResult tr; return TestRegistry::runAllTests(tr); }
|
int main() { TestResult tr; return TestRegistry::runAllTests(tr); }
|
||||||
|
|
|
@ -15,9 +15,9 @@
|
||||||
* @author Frank Dellaert
|
* @author Frank Dellaert
|
||||||
**/
|
**/
|
||||||
|
|
||||||
#include <gtsam_unstable/linear/iterative.h>
|
|
||||||
#include <tests/smallExample.h>
|
#include <tests/smallExample.h>
|
||||||
#include <gtsam/nonlinear/Ordering.h>
|
#include <gtsam/nonlinear/Ordering.h>
|
||||||
|
#include <gtsam/linear/iterative.h>
|
||||||
#include <gtsam/linear/JacobianFactorGraph.h>
|
#include <gtsam/linear/JacobianFactorGraph.h>
|
||||||
#include <gtsam/linear/GaussianSequentialSolver.h>
|
#include <gtsam/linear/GaussianSequentialSolver.h>
|
||||||
#include <gtsam/linear/SubgraphPreconditioner.h>
|
#include <gtsam/linear/SubgraphPreconditioner.h>
|
||||||
|
|
Loading…
Reference in New Issue