wrap iterative solver, subgraph solver, etc.
fix the matlab spcg examplerelease/4.3a0
parent
82fd3982c3
commit
e9eb96a408
41
gtsam.h
41
gtsam.h
|
@ -1045,6 +1045,46 @@ class GaussianSequentialSolver {
|
||||||
Matrix marginalCovariance(size_t j) const;
|
Matrix marginalCovariance(size_t j) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#include <gtsam/linear/IterativeSolver.h>
|
||||||
|
virtual class IterativeOptimizationParameters {
|
||||||
|
string getKernel() const ;
|
||||||
|
string getVerbosity() const;
|
||||||
|
void setKernel(string s) ;
|
||||||
|
void setVerbosity(string s) ;
|
||||||
|
};
|
||||||
|
|
||||||
|
//virtual class IterativeSolver {
|
||||||
|
// IterativeSolver();
|
||||||
|
// gtsam::VectorValues optimize ();
|
||||||
|
//};
|
||||||
|
|
||||||
|
#include <gtsam/linear/ConjugateGradientSolver.h>
|
||||||
|
virtual class ConjugateGradientParameters : gtsam::IterativeOptimizationParameters {
|
||||||
|
ConjugateGradientParameters();
|
||||||
|
size_t getMinIterations() const ;
|
||||||
|
size_t getMaxIterations() const ;
|
||||||
|
size_t getReset() const;
|
||||||
|
double getEpsilon_rel() const;
|
||||||
|
double getEpsilon_abs() const;
|
||||||
|
|
||||||
|
void setMinIterations(size_t value);
|
||||||
|
void setMaxIterations(size_t value);
|
||||||
|
void setReset(size_t value);
|
||||||
|
void setEpsilon_rel(double value);
|
||||||
|
void setEpsilon_abs(double value);
|
||||||
|
};
|
||||||
|
|
||||||
|
#include <gtsam/linear/SubgraphSolver.h>
|
||||||
|
virtual class SubgraphSolverParameters : gtsam::ConjugateGradientParameters {
|
||||||
|
SubgraphSolverParameters();
|
||||||
|
void print(string s) const;
|
||||||
|
};
|
||||||
|
|
||||||
|
class SubgraphSolver {
|
||||||
|
SubgraphSolver(const gtsam::GaussianFactorGraph &A, const gtsam::SubgraphSolverParameters ¶meters);
|
||||||
|
gtsam::VectorValues optimize() const;
|
||||||
|
};
|
||||||
|
|
||||||
#include <gtsam/linear/KalmanFilter.h>
|
#include <gtsam/linear/KalmanFilter.h>
|
||||||
class KalmanFilter {
|
class KalmanFilter {
|
||||||
KalmanFilter(size_t n);
|
KalmanFilter(size_t n);
|
||||||
|
@ -1288,6 +1328,7 @@ virtual class SuccessiveLinearizationParams : gtsam::NonlinearOptimizerParams {
|
||||||
|
|
||||||
void setLinearSolverType(string solver);
|
void setLinearSolverType(string solver);
|
||||||
void setOrdering(const gtsam::Ordering& ordering);
|
void setOrdering(const gtsam::Ordering& ordering);
|
||||||
|
void setIterativeParams(const gtsam::SubgraphSolverParameters ¶ms);
|
||||||
|
|
||||||
bool isMultifrontal() const;
|
bool isMultifrontal() const;
|
||||||
bool isSequential() const;
|
bool isSequential() const;
|
||||||
|
|
|
@ -20,9 +20,7 @@ namespace gtsam {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
class ConjugateGradientParameters : public IterativeOptimizationParameters {
|
class ConjugateGradientParameters : public IterativeOptimizationParameters {
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
typedef IterativeOptimizationParameters Base;
|
typedef IterativeOptimizationParameters Base;
|
||||||
typedef boost::shared_ptr<ConjugateGradientParameters> shared_ptr;
|
typedef boost::shared_ptr<ConjugateGradientParameters> shared_ptr;
|
||||||
|
|
||||||
|
@ -49,7 +47,21 @@ public:
|
||||||
inline double epsilon_rel() const { return epsilon_rel_; }
|
inline double epsilon_rel() const { return epsilon_rel_; }
|
||||||
inline double epsilon_abs() const { return epsilon_abs_; }
|
inline double epsilon_abs() const { return epsilon_abs_; }
|
||||||
|
|
||||||
void print() const {
|
inline size_t getMinIterations() const { return minIterations_; }
|
||||||
|
inline size_t getMaxIterations() const { return maxIterations_; }
|
||||||
|
inline size_t getReset() const { return reset_; }
|
||||||
|
inline double getEpsilon() const { return epsilon_rel_; }
|
||||||
|
inline double getEpsilon_rel() const { return epsilon_rel_; }
|
||||||
|
inline double getEpsilon_abs() const { return epsilon_abs_; }
|
||||||
|
|
||||||
|
inline void setMinIterations(size_t value) { minIterations_ = value; }
|
||||||
|
inline void setMaxIterations(size_t value) { maxIterations_ = value; }
|
||||||
|
inline void setReset(size_t value) { reset_ = value; }
|
||||||
|
inline void setEpsilon(double value) { epsilon_rel_ = value; }
|
||||||
|
inline void setEpsilon_rel(double value) { epsilon_rel_ = value; }
|
||||||
|
inline void setEpsilon_abs(double value) { epsilon_abs_ = value; }
|
||||||
|
|
||||||
|
virtual void print(const std::string &s="") const {
|
||||||
Base::print();
|
Base::print();
|
||||||
std::cout << "ConjugateGradientParameters: "
|
std::cout << "ConjugateGradientParameters: "
|
||||||
<< "minIter = " << minIterations_
|
<< "minIter = " << minIterations_
|
||||||
|
@ -61,18 +73,4 @@ public:
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
//class ConjugateGradientSolver : public IterativeSolver {
|
|
||||||
//
|
|
||||||
//public:
|
|
||||||
//
|
|
||||||
// typedef ConjugateGradientParameters Parameters;
|
|
||||||
//
|
|
||||||
// Parameters parameters_;
|
|
||||||
//
|
|
||||||
// ConjugateGradientSolver(const ConjugateGradientParameters ¶meters) : parameters_(parameters) {}
|
|
||||||
// virtual VectorValues::shared_ptr optimize () = 0;
|
|
||||||
// virtual const IterativeOptimizationParameters& _params() const = 0;
|
|
||||||
//};
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,50 @@
|
||||||
|
/**
|
||||||
|
* @file IterativeSolver.cpp
|
||||||
|
* @brief
|
||||||
|
* @date Sep 3, 2012
|
||||||
|
* @author Yong-Dian Jian
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include <gtsam/linear/IterativeSolver.h>
|
||||||
|
#include <boost/algorithm/string.hpp>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
namespace gtsam {
|
||||||
|
|
||||||
|
std::string IterativeOptimizationParameters::getKernel() const { return kernelTranslator(kernel_); }
|
||||||
|
std::string IterativeOptimizationParameters::getVerbosity() const { return verbosityTranslator(verbosity_); }
|
||||||
|
void IterativeOptimizationParameters::setKernel(const std::string &src) { kernel_ = kernelTranslator(src); }
|
||||||
|
void IterativeOptimizationParameters::setVerbosity(const std::string &src) { verbosity_ = verbosityTranslator(src); }
|
||||||
|
|
||||||
|
IterativeOptimizationParameters::Kernel IterativeOptimizationParameters::kernelTranslator(const std::string &src) {
|
||||||
|
std::string s = src; boost::algorithm::to_upper(s);
|
||||||
|
if (s == "CG") return IterativeOptimizationParameters::CG;
|
||||||
|
/* default is cg */
|
||||||
|
else return IterativeOptimizationParameters::CG;
|
||||||
|
}
|
||||||
|
|
||||||
|
IterativeOptimizationParameters::Verbosity IterativeOptimizationParameters::verbosityTranslator(const std::string &src) {
|
||||||
|
std::string s = src; boost::algorithm::to_upper(s);
|
||||||
|
if (s == "SILENT") return IterativeOptimizationParameters::SILENT;
|
||||||
|
else if (s == "COMPLEXITY") return IterativeOptimizationParameters::COMPLEXITY;
|
||||||
|
else if (s == "ERROR") return IterativeOptimizationParameters::ERROR;
|
||||||
|
/* default is default */
|
||||||
|
else return IterativeOptimizationParameters::SILENT;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string IterativeOptimizationParameters::kernelTranslator(IterativeOptimizationParameters::Kernel k) {
|
||||||
|
if ( k == CG ) return "CG";
|
||||||
|
else return "UNKNOWN";
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string IterativeOptimizationParameters::verbosityTranslator(IterativeOptimizationParameters::Verbosity verbosity) {
|
||||||
|
if (verbosity == SILENT) return "SILENT";
|
||||||
|
else if (verbosity == COMPLEXITY) return "COMPLEXITY";
|
||||||
|
else if (verbosity == ERROR) return "ERROR";
|
||||||
|
else return "UNKNOWN";
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
|
|
||||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||||
#include <gtsam/linear/VectorValues.h>
|
#include <gtsam/linear/VectorValues.h>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
|
@ -41,22 +42,32 @@ namespace gtsam {
|
||||||
inline Kernel kernel() const { return kernel_; }
|
inline Kernel kernel() const { return kernel_; }
|
||||||
inline Verbosity verbosity() const { return verbosity_; }
|
inline Verbosity verbosity() const { return verbosity_; }
|
||||||
|
|
||||||
|
/* matlab interface */
|
||||||
|
std::string getKernel() const ;
|
||||||
|
std::string getVerbosity() const;
|
||||||
|
void setKernel(const std::string &s) ;
|
||||||
|
void setVerbosity(const std::string &s) ;
|
||||||
|
|
||||||
void print() const {
|
void print() const {
|
||||||
const std::string kernelStr[1] = {"cg"};
|
const std::string kernelStr[1] = {"cg"};
|
||||||
std::cout << "IterativeOptimizationParameters: "
|
std::cout << "IterativeOptimizationParameters: "
|
||||||
<< "kernel = " << kernelStr[kernel_]
|
<< "kernel = " << kernelStr[kernel_]
|
||||||
<< ", verbosity = " << verbosity_ << std::endl;
|
<< ", verbosity = " << verbosity_ << std::endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static Kernel kernelTranslator(const std::string &s);
|
||||||
|
static Verbosity verbosityTranslator(const std::string &s);
|
||||||
|
static std::string kernelTranslator(Kernel k);
|
||||||
|
static std::string verbosityTranslator(Verbosity v);
|
||||||
};
|
};
|
||||||
|
|
||||||
class IterativeSolver {
|
class IterativeSolver {
|
||||||
public:
|
public:
|
||||||
typedef boost::shared_ptr<IterativeSolver> shared_ptr;
|
typedef boost::shared_ptr<IterativeSolver> shared_ptr;
|
||||||
IterativeSolver(){}
|
IterativeSolver() {}
|
||||||
virtual ~IterativeSolver() {}
|
virtual ~IterativeSolver() {}
|
||||||
|
|
||||||
/* update interface to the nonlinear optimizer */
|
/* interface to the nonlinear optimizer */
|
||||||
virtual void replaceFactors(const GaussianFactorGraph::shared_ptr &factorGraph, const double lambda) {}
|
|
||||||
virtual VectorValues optimize () = 0;
|
virtual VectorValues optimize () = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -25,9 +25,9 @@ class SubgraphSolverParameters : public ConjugateGradientParameters {
|
||||||
public:
|
public:
|
||||||
typedef ConjugateGradientParameters Base;
|
typedef ConjugateGradientParameters Base;
|
||||||
SubgraphSolverParameters() : Base() {}
|
SubgraphSolverParameters() : Base() {}
|
||||||
|
virtual void print(const std::string &s="") const { Base::print(s); }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class implements the SPCG solver presented in Dellaert et al in IROS'10.
|
* This class implements the SPCG solver presented in Dellaert et al in IROS'10.
|
||||||
*
|
*
|
||||||
|
|
|
@ -15,6 +15,10 @@
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
|
void SuccessiveLinearizationParams::setIterativeParams(const SubgraphSolverParameters ¶ms) {
|
||||||
|
iterativeParams = boost::make_shared<SubgraphSolverParameters>(params);
|
||||||
|
}
|
||||||
|
|
||||||
void SuccessiveLinearizationParams::print(const std::string& str) const {
|
void SuccessiveLinearizationParams::print(const std::string& str) const {
|
||||||
NonlinearOptimizerParams::print(str);
|
NonlinearOptimizerParams::print(str);
|
||||||
switch ( linearSolverType ) {
|
switch ( linearSolverType ) {
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <gtsam/nonlinear/NonlinearOptimizer.h>
|
#include <gtsam/nonlinear/NonlinearOptimizer.h>
|
||||||
#include <gtsam/linear/IterativeSolver.h>
|
#include <gtsam/linear/SubgraphSolver.h>
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
|
@ -74,6 +74,7 @@ public:
|
||||||
std::string getLinearSolverType() const { return linearSolverTranslator(linearSolverType); }
|
std::string getLinearSolverType() const { return linearSolverTranslator(linearSolverType); }
|
||||||
|
|
||||||
void setLinearSolverType(const std::string& solver) { linearSolverType = linearSolverTranslator(solver); }
|
void setLinearSolverType(const std::string& solver) { linearSolverType = linearSolverTranslator(solver); }
|
||||||
|
void setIterativeParams(const SubgraphSolverParameters ¶ms);
|
||||||
void setOrdering(const Ordering& ordering) { this->ordering = ordering; }
|
void setOrdering(const Ordering& ordering) { this->ordering = ordering; }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
|
@ -51,7 +51,14 @@ initialEstimate.insert(4, Pose2(4.0, 2.0, pi ));
|
||||||
initialEstimate.insert(5, Pose2(2.1, 2.1,-pi/2));
|
initialEstimate.insert(5, Pose2(2.1, 2.1,-pi/2));
|
||||||
initialEstimate.print(sprintf('\nInitial estimate:\n'));
|
initialEstimate.print(sprintf('\nInitial estimate:\n'));
|
||||||
|
|
||||||
%% Optimize using Levenberg-Marquardt optimization with an ordering from colamd
|
%% Optimize using Levenberg-Marquardt optimization with SubgraphSolver
|
||||||
optimizer = DoglegOptimizer(graph, initialEstimate);
|
params = gtsam.LevenbergMarquardtParams;
|
||||||
result = optimizer.optimizeSafely();
|
subgraphParams = gtsam.SubgraphSolverParameters;
|
||||||
result.print(sprintf('\nFinal result:\n'));
|
params.setLinearSolverType('CONJUGATE_GRADIENT');
|
||||||
|
params.setIterativeParams(subgraphParams);
|
||||||
|
optimizer = gtsam.LevenbergMarquardtOptimizer(graph, initialEstimate);
|
||||||
|
result = optimizer.optimize();
|
||||||
|
result.print(sprintf('\nFinal result:\n'));
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue