Updating nonlinear factors, nonlinear factor graph, nonlinear optimizers, and LinearContainerFactor to work with unordered linear. Does not compile - needs more work.

release/4.3a0
Richard Roberts 2013-08-01 21:57:33 +00:00
parent 5e3b4bf477
commit c868056c22
17 changed files with 253 additions and 414 deletions

View File

@ -17,10 +17,11 @@
*/
#include <gtsam/nonlinear/DoglegOptimizer.h>
#include <gtsam/inference/EliminationTreeOrdered.h>
#include <gtsam/linear/GaussianJunctionTreeOrdered.h>
#include <gtsam/nonlinear/DoglegOptimizerImpl.h>
#include <gtsam/linear/GaussianBayesTree.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/VectorValues.h>
#include <boost/algorithm/string.hpp>
@ -53,8 +54,7 @@ std::string DoglegParams::verbosityDLTranslator(VerbosityDL verbosityDL) const {
void DoglegOptimizer::iterate(void) {
// Linearize graph
const OrderingOrdered& ordering = *params_.ordering;
GaussianFactorGraphOrdered::shared_ptr linear = graph_.linearize(state_.values, ordering);
GaussianFactorGraph::shared_ptr linear = graph_.linearize(state_.values);
// Pull out parameters we'll use
const bool dlVerbose = (params_.verbosityDL > DoglegParams::SILENT);
@ -63,13 +63,12 @@ void DoglegOptimizer::iterate(void) {
DoglegOptimizerImpl::IterationResult result;
if ( params_.isMultifrontal() ) {
GaussianBayesTreeOrdered bt;
bt.insert(GaussianJunctionTreeOrdered(*linear).eliminate(params_.getEliminationFunction()));
result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, bt, graph_, state_.values, ordering, state_.error, dlVerbose);
GaussianBayesTree bt = *linear->eliminateMultifrontal(*params_.ordering, params_.getEliminationFunction());
result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, bt, graph_, state_.values, state_.error, dlVerbose);
}
else if ( params_.isSequential() ) {
GaussianBayesNetOrdered::shared_ptr bn = EliminationTreeOrdered<GaussianFactorOrdered>::Create(*linear)->eliminate(params_.getEliminationFunction());
result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, graph_, state_.values, ordering, state_.error, dlVerbose);
GaussianBayesNet bn = *linear->eliminateSequential(*params_.ordering, params_.getEliminationFunction());
result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, bn, graph_, state_.values, state_.error, dlVerbose);
}
else if ( params_.isCG() ) {
throw runtime_error("todo: ");
@ -82,10 +81,17 @@ void DoglegOptimizer::iterate(void) {
if(params_.verbosity >= NonlinearOptimizerParams::DELTA) result.dx_d.print("delta");
// Create new state with new values and new error
state_.values = state_.values.retract(result.dx_d, ordering);
state_.values = state_.values.retract(result.dx_d);
state_.error = result.f_error;
state_.Delta = result.Delta;
++state_.iterations;
}
/* ************************************************************************* */
DoglegParams DoglegOptimizer::ensureHasOrdering(DoglegParams params, const NonlinearFactorGraph& graph) const {
if(!params.ordering)
params.ordering = Ordering::COLAMD(graph);
return params;
}
} /* namespace gtsam */

View File

@ -158,11 +158,7 @@ protected:
virtual const NonlinearOptimizerState& _state() const { return state_; }
/** Internal function for computing a COLAMD ordering if no ordering is specified */
DoglegParams ensureHasOrdering(DoglegParams params, const NonlinearFactorGraph& graph, const Values& values) const {
if(!params.ordering)
params.ordering = *graph.orderingCOLAMD(values);
return params;
}
DoglegParams ensureHasOrdering(DoglegParams params, const NonlinearFactorGraph& graph) const;
};
}

View File

@ -22,8 +22,8 @@ using namespace std;
namespace gtsam {
/* ************************************************************************* */
VectorValuesOrdered DoglegOptimizerImpl::ComputeDoglegPoint(
double Delta, const VectorValuesOrdered& dx_u, const VectorValuesOrdered& dx_n, const bool verbose) {
VectorValues DoglegOptimizerImpl::ComputeDoglegPoint(
double Delta, const VectorValues& dx_u, const VectorValues& dx_n, const bool verbose) {
// Get magnitude of each update and find out which segment Delta falls in
assert(Delta >= 0.0);
@ -33,7 +33,7 @@ VectorValuesOrdered DoglegOptimizerImpl::ComputeDoglegPoint(
if(verbose) cout << "Steepest descent magnitude " << std::sqrt(x_u_norm_sq) << ", Newton's method magnitude " << std::sqrt(x_n_norm_sq) << endl;
if(DeltaSq < x_u_norm_sq) {
// Trust region is smaller than steepest descent update
VectorValuesOrdered x_d = std::sqrt(DeltaSq / x_u_norm_sq) * dx_u;
VectorValues x_d = std::sqrt(DeltaSq / x_u_norm_sq) * dx_u;
if(verbose) cout << "In steepest descent region with fraction " << std::sqrt(DeltaSq / x_u_norm_sq) << " of steepest descent magnitude" << endl;
return x_d;
} else if(DeltaSq < x_n_norm_sq) {
@ -48,7 +48,7 @@ VectorValuesOrdered DoglegOptimizerImpl::ComputeDoglegPoint(
}
/* ************************************************************************* */
VectorValuesOrdered DoglegOptimizerImpl::ComputeBlend(double Delta, const VectorValuesOrdered& x_u, const VectorValuesOrdered& x_n, const bool verbose) {
VectorValues DoglegOptimizerImpl::ComputeBlend(double Delta, const VectorValues& x_u, const VectorValues& x_n, const bool verbose) {
// See doc/trustregion.lyx or doc/trustregion.pdf
@ -78,7 +78,7 @@ VectorValuesOrdered DoglegOptimizerImpl::ComputeBlend(double Delta, const Vector
// Compute blended point
if(verbose) cout << "In blend region with fraction " << tau << " of Newton's method point" << endl;
VectorValuesOrdered blend = (1. - tau) * x_u; axpy(tau, x_n, blend);
VectorValues blend = (1. - tau) * x_u; axpy(tau, x_n, blend);
return blend;
}

View File

@ -18,10 +18,8 @@
#include <iomanip>
#include <gtsam/linear/GaussianBayesNetOrdered.h>
#include <gtsam/linear/GaussianISAMOrdered.h> // To get optimize(BayesTree<GaussianConditional>)
//#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/OrderingOrdered.h>
#include <gtsam/linear/VectorValues.h>
#include <gtsam/inference/Ordering.h>
namespace gtsam {
@ -42,7 +40,7 @@ struct GTSAM_EXPORT DoglegOptimizerImpl {
struct GTSAM_EXPORT IterationResult {
double Delta;
VectorValuesOrdered dx_d;
VectorValues dx_d;
double f_error;
};
@ -103,7 +101,7 @@ struct GTSAM_EXPORT DoglegOptimizerImpl {
template<class M, class F, class VALUES>
static IterationResult Iterate(
double Delta, TrustRegionAdaptationMode mode, const M& Rd,
const F& f, const VALUES& x0, const OrderingOrdered& ordering, const double f_error, const bool verbose=false);
const F& f, const VALUES& x0, const double f_error, const bool verbose=false);
/**
* Compute the dogleg point given a trust region radius \f$ \Delta \f$. The
@ -127,7 +125,7 @@ struct GTSAM_EXPORT DoglegOptimizerImpl {
* @param dx_n The Gauss-Newton point
* @return The dogleg point \f$ \delta x_d \f$
*/
static VectorValuesOrdered ComputeDoglegPoint(double Delta, const VectorValuesOrdered& dx_u, const VectorValuesOrdered& dx_n, const bool verbose=false);
static VectorValues ComputeDoglegPoint(double Delta, const VectorValues& dx_u, const VectorValues& dx_n, const bool verbose=false);
/** Compute the point on the line between the steepest descent point and the
* Newton's method point intersecting the trust region boundary.
@ -138,7 +136,7 @@ struct GTSAM_EXPORT DoglegOptimizerImpl {
* @param x_u Steepest descent minimizer
* @param x_n Newton's method minimizer
*/
static VectorValuesOrdered ComputeBlend(double Delta, const VectorValuesOrdered& x_u, const VectorValuesOrdered& x_n, const bool verbose=false);
static VectorValues ComputeBlend(double Delta, const VectorValues& x_u, const VectorValues& x_n, const bool verbose=false);
};
@ -146,25 +144,18 @@ struct GTSAM_EXPORT DoglegOptimizerImpl {
template<class M, class F, class VALUES>
typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate(
double Delta, TrustRegionAdaptationMode mode, const M& Rd,
const F& f, const VALUES& x0, const OrderingOrdered& ordering, const double f_error, const bool verbose) {
const F& f, const VALUES& x0, const double f_error, const bool verbose)
{
// Compute steepest descent and Newton's method points
gttic(optimizeGradientSearch);
gttic(allocateVectorValues);
VectorValuesOrdered dx_u = *allocateVectorValues(Rd);
gttoc(allocateVectorValues);
gttic(optimizeGradientSearchInPlace);
optimizeGradientSearchInPlace(Rd, dx_u);
gttoc(optimizeGradientSearchInPlace);
VectorValuesOrdered dx_u = Rd.optimizeGradientSearch();
gttoc(optimizeGradientSearch);
gttic(optimizeInPlace);
VectorValuesOrdered dx_n(VectorValuesOrdered::SameStructure(dx_u));
optimizeInPlace(Rd, dx_n);
gttoc(optimizeInPlace);
gttic(jfg_error);
const GaussianFactorGraphOrdered jfg(Rd);
const double M_error = jfg.error(VectorValuesOrdered::Zero(dx_u));
gttoc(jfg_error);
gttic(optimize);
VectorValuesOrdered dx_n = Rd.optimize();
gttoc(optimize);
gttic(M_error);
const double M_error = Rd.error(VectorValuesOrdered::Zero(dx_u));
gttoc(M_error);
// Result to return
IterationResult result;
@ -181,7 +172,7 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate(
gttic(retract);
// Compute expmapped solution
const VALUES x_d(x0.retract(result.dx_d, ordering));
const VALUES x_d(x0.retract(result.dx_d));
gttoc(retract);
gttic(decrease_in_f);
@ -189,10 +180,10 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate(
result.f_error = f.error(x_d);
gttoc(decrease_in_f);
gttic(decrease_in_M);
gttic(new_M_error);
// Compute decrease in M
const double new_M_error = jfg.error(result.dx_d);
gttoc(decrease_in_M);
const double new_M_error = Rd.error(result.dx_d);
gttoc(new_M_error);
if(verbose) std::cout << std::setprecision(15) << "f error: " << f_error << " -> " << result.f_error << std::endl;
if(verbose) std::cout << std::setprecision(15) << "M error: " << M_error << " -> " << new_M_error << std::endl;

View File

@ -17,6 +17,8 @@
*/
#include <gtsam/nonlinear/GaussNewtonOptimizer.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/VectorValues.h>
using namespace std;
@ -28,18 +30,27 @@ void GaussNewtonOptimizer::iterate() {
const NonlinearOptimizerState& current = state_;
// Linearize graph
GaussianFactorGraphOrdered::shared_ptr linear = graph_.linearize(current.values, *params_.ordering);
GaussianFactorGraph::shared_ptr linear = graph_.linearize(current.values);
// Solve Factor Graph
const VectorValuesOrdered delta = solveGaussianFactorGraph(*linear, params_);
const VectorValues delta = solveGaussianFactorGraph(*linear, params_);
// Maybe show output
if(params_.verbosity >= NonlinearOptimizerParams::DELTA) delta.print("delta");
// Create new state with new values and new error
state_.values = current.values.retract(delta, *params_.ordering);
state_.values = current.values.retract(delta);
state_.error = graph_.error(state_.values);
++ state_.iterations;
}
/* ************************************************************************* */
GaussNewtonParams GaussNewtonOptimizer::ensureHasOrdering(
GaussNewtonParams params, const NonlinearFactorGraph& graph) const
{
if(!params.ordering)
params.ordering = Ordering::COLAMD(graph);
return params;
}
} /* namespace gtsam */

View File

@ -70,7 +70,7 @@ public:
* @param graph The nonlinear factor graph to optimize
* @param initialValues The initial variable assignments
*/
GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Values& initialValues, const OrderingOrdered& ordering) :
GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Values& initialValues, const Ordering& ordering) :
NonlinearOptimizer(graph), state_(graph, initialValues) {
params_.ordering = ordering; }
@ -110,11 +110,7 @@ protected:
virtual const NonlinearOptimizerState& _state() const { return state_; }
/** Internal function for computing a COLAMD ordering if no ordering is specified */
GaussNewtonParams ensureHasOrdering(GaussNewtonParams params, const NonlinearFactorGraph& graph, const Values& values) const {
if(!params.ordering)
params.ordering = *graph.orderingCOLAMD(values);
return params;
}
GaussNewtonParams ensureHasOrdering(GaussNewtonParams params, const NonlinearFactorGraph& graph) const;
};

View File

@ -19,6 +19,8 @@
#include <cmath>
#include <gtsam/linear/linearExceptions.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/VectorValues.h>
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
@ -74,7 +76,7 @@ void LevenbergMarquardtOptimizer::iterate() {
gttic(LM_iterate);
// Linearize graph
GaussianFactorGraphOrdered::shared_ptr linear = graph_.linearize(state_.values, *params_.ordering);
GaussianFactorGraph::shared_ptr linear = graph_.linearize(state_.values);
// Pull out parameters we'll use
const NonlinearOptimizerParams::Verbosity nloVerbosity = params_.verbosity;
@ -88,7 +90,7 @@ void LevenbergMarquardtOptimizer::iterate() {
// Add prior-factors
// TODO: replace this dampening with a backsubstitution approach
gttic(damp);
GaussianFactorGraphOrdered dampedSystem(*linear);
GaussianFactorGraph dampedSystem = *linear;
{
double sigma = 1.0 / std::sqrt(state_.lambda);
dampedSystem.reserve(dampedSystem.size() + dimensions_.size());
@ -97,9 +99,8 @@ void LevenbergMarquardtOptimizer::iterate() {
size_t dim = (dimensions_)[j];
Matrix A = eye(dim);
Vector b = zero(dim);
SharedDiagonal model = noiseModel::Isotropic::Sigma(dim,sigma);
GaussianFactorOrdered::shared_ptr prior(new JacobianFactorOrdered(j, A, b, model));
dampedSystem.push_back(prior);
SharedDiagonal model = noiseModel::Isotropic::Sigma(dim, sigma);
dampedSystem += boost::make_shared<JacobianFactor>(j, A, b, model);
}
}
gttoc(damp);
@ -108,14 +109,14 @@ void LevenbergMarquardtOptimizer::iterate() {
// Try solving
try {
// Solve Damped Gaussian Factor Graph
const VectorValuesOrdered delta = solveGaussianFactorGraph(dampedSystem, params_);
const VectorValues delta = solveGaussianFactorGraph(dampedSystem, params_);
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) cout << "linear delta norm = " << delta.norm() << endl;
if (lmVerbosity >= LevenbergMarquardtParams::TRYDELTA) delta.print("delta");
// update values
gttic(retract);
Values newValues = state_.values.retract(delta, *params_.ordering);
Values newValues = state_.values.retract(delta);
gttoc(retract);
// create new optimization state with more adventurous lambda
@ -168,4 +169,13 @@ void LevenbergMarquardtOptimizer::iterate() {
++state_.iterations;
}
/* ************************************************************************* */
LevenbergMarquardtParams LevenbergMarquardtOptimizer::ensureHasOrdering(
LevenbergMarquardtParams params, const NonlinearFactorGraph& graph) const
{
if(!params.ordering)
params.ordering = Ordering::COLAMD(graph);
return params;
}
} /* namespace gtsam */

View File

@ -172,11 +172,7 @@ protected:
virtual const NonlinearOptimizerState& _state() const { return state_; }
/** Internal function for computing a COLAMD ordering if no ordering is specified */
LevenbergMarquardtParams ensureHasOrdering(LevenbergMarquardtParams params, const NonlinearFactorGraph& graph, const Values& values) const {
if(!params.ordering)
params.ordering = *graph.orderingCOLAMD(values);
return params;
}
LevenbergMarquardtParams ensureHasOrdering(LevenbergMarquardtParams params, const NonlinearFactorGraph& graph) const;
};
}

View File

@ -6,20 +6,13 @@
*/
#include <gtsam/nonlinear/LinearContainerFactor.h>
#include <gtsam/linear/HessianFactor.h>
#include <gtsam/linear/VectorValues.h>
#include <boost/foreach.hpp>
namespace gtsam {
/* ************************************************************************* */
void LinearContainerFactor::rekeyFactor(const OrderingOrdered& ordering) {
BOOST_FOREACH(Index& idx, factor_->keys()) {
Key fullKey = ordering.key(idx);
idx = fullKey;
keys_.push_back(fullKey);
}
}
/* ************************************************************************* */
void LinearContainerFactor::initializeLinearizationPoint(const Values& linearizationPoint) {
if (!linearizationPoint.empty()) {
@ -33,7 +26,7 @@ void LinearContainerFactor::initializeLinearizationPoint(const Values& lineariza
}
/* ************************************************************************* */
LinearContainerFactor::LinearContainerFactor(const GaussianFactorOrdered::shared_ptr& factor,
LinearContainerFactor::LinearContainerFactor(const GaussianFactor::shared_ptr& factor,
const boost::optional<Values>& linearizationPoint)
: factor_(factor), linearizationPoint_(linearizationPoint) {
// Extract keys stashed in linear factor
@ -43,28 +36,22 @@ LinearContainerFactor::LinearContainerFactor(const GaussianFactorOrdered::shared
/* ************************************************************************* */
LinearContainerFactor::LinearContainerFactor(
const JacobianFactorOrdered& factor, const OrderingOrdered& ordering,
const Values& linearizationPoint)
const JacobianFactor& factor, const Values& linearizationPoint)
: factor_(factor.clone()) {
rekeyFactor(ordering);
initializeLinearizationPoint(linearizationPoint);
}
/* ************************************************************************* */
LinearContainerFactor::LinearContainerFactor(
const HessianFactorOrdered& factor, const OrderingOrdered& ordering,
const Values& linearizationPoint)
const HessianFactor& factor, const Values& linearizationPoint)
: factor_(factor.clone()) {
rekeyFactor(ordering);
initializeLinearizationPoint(linearizationPoint);
}
/* ************************************************************************* */
LinearContainerFactor::LinearContainerFactor(
const GaussianFactorOrdered::shared_ptr& factor, const OrderingOrdered& ordering,
const Values& linearizationPoint)
const GaussianFactor::shared_ptr& factor, const Values& linearizationPoint)
: factor_(factor->clone()) {
rekeyFactor(ordering);
initializeLinearizationPoint(linearizationPoint);
}
@ -112,19 +99,11 @@ double LinearContainerFactor::error(const Values& c) const {
csub.insert(key, c.at(key));
// create dummy ordering for evaluation
OrderingOrdered ordering = *csub.orderingArbitrary();
VectorValuesOrdered delta = linearizationPoint_->localCoordinates(csub, ordering);
// Change keys on stored factor
BOOST_FOREACH(gtsam::Index& index, factor_->keys())
index = ordering[index];
VectorValues delta = linearizationPoint_->localCoordinates(csub);
// compute error
double error = factor_->error(delta);
// change keys back
factor_->keys() = keys();
return error;
}
@ -137,103 +116,79 @@ size_t LinearContainerFactor::dim() const {
}
/* ************************************************************************* */
GaussianFactorOrdered::shared_ptr LinearContainerFactor::order(const OrderingOrdered& ordering) const {
// clone factor
boost::shared_ptr<GaussianFactorOrdered> result = factor_->clone();
// rekey
BOOST_FOREACH(Index& key, result->keys())
key = ordering[key];
return result;
}
/* ************************************************************************* */
GaussianFactorOrdered::shared_ptr LinearContainerFactor::linearize(
const Values& c, const OrderingOrdered& ordering) const {
GaussianFactor::shared_ptr LinearContainerFactor::linearize(const Values& c) const {
if (!hasLinearizationPoint())
return order(ordering);
return factor_;
// Extract subset of values
Values subsetC;
BOOST_FOREACH(const gtsam::Key& key, this->keys())
subsetC.insert(key, c.at(key));
// Create a temp ordering for this factor
OrderingOrdered localOrdering = *subsetC.orderingArbitrary();
// Determine delta between linearization points using new ordering
VectorValuesOrdered delta = linearizationPoint_->localCoordinates(subsetC, localOrdering);
VectorValues delta = linearizationPoint_->localCoordinates(subsetC);
// clone and reorder linear factor to final ordering
GaussianFactorOrdered::shared_ptr linFactor = order(localOrdering);
GaussianFactor::shared_ptr linFactor = factor_->clone();
if (isJacobian()) {
JacobianFactorOrdered::shared_ptr jacFactor = boost::dynamic_pointer_cast<JacobianFactorOrdered>(linFactor);
JacobianFactor::shared_ptr jacFactor = boost::dynamic_pointer_cast<JacobianFactor>(linFactor);
jacFactor->getb() = -jacFactor->unweighted_error(delta);
} else {
HessianFactorOrdered::shared_ptr hesFactor = boost::dynamic_pointer_cast<HessianFactorOrdered>(linFactor);
HessianFactor::shared_ptr hesFactor = boost::dynamic_pointer_cast<HessianFactor>(linFactor);
size_t dim = hesFactor->linearTerm().size();
Eigen::Block<HessianFactorOrdered::Block> Gview = hesFactor->info().block(0, 0, dim, dim);
Eigen::Block<HessianFactor::Block> Gview = hesFactor->info().block(0, 0, dim, dim);
Vector deltaVector = delta.asVector();
Vector G_delta = Gview.selfadjointView<Eigen::Upper>() * deltaVector;
hesFactor->constantTerm() += deltaVector.dot(G_delta) - 2.0 * deltaVector.dot(hesFactor->linearTerm());
hesFactor->linearTerm() -= G_delta;
}
// reset ordering
BOOST_FOREACH(Index& idx, linFactor->keys())
idx = ordering[localOrdering.key(idx)];
return linFactor;
}
/* ************************************************************************* */
bool LinearContainerFactor::isJacobian() const {
return boost::dynamic_pointer_cast<JacobianFactorOrdered>(factor_);
return boost::dynamic_pointer_cast<JacobianFactor>(factor_);
}
/* ************************************************************************* */
bool LinearContainerFactor::isHessian() const {
return boost::dynamic_pointer_cast<HessianFactorOrdered>(factor_);
return boost::dynamic_pointer_cast<HessianFactor>(factor_);
}
/* ************************************************************************* */
JacobianFactorOrdered::shared_ptr LinearContainerFactor::toJacobian() const {
return boost::dynamic_pointer_cast<JacobianFactorOrdered>(factor_);
JacobianFactor::shared_ptr LinearContainerFactor::toJacobian() const {
return boost::dynamic_pointer_cast<JacobianFactor>(factor_);
}
/* ************************************************************************* */
HessianFactorOrdered::shared_ptr LinearContainerFactor::toHessian() const {
return boost::dynamic_pointer_cast<HessianFactorOrdered>(factor_);
HessianFactor::shared_ptr LinearContainerFactor::toHessian() const {
return boost::dynamic_pointer_cast<HessianFactor>(factor_);
}
/* ************************************************************************* */
GaussianFactorOrdered::shared_ptr LinearContainerFactor::negate(const OrderingOrdered& ordering) const {
GaussianFactorOrdered::shared_ptr result = factor_->negate();
BOOST_FOREACH(Key& key, result->keys())
key = ordering[key];
return result;
GaussianFactor::shared_ptr LinearContainerFactor::negateToGaussian() const {
GaussianFactor::shared_ptr result = factor_->negate();
}
/* ************************************************************************* */
NonlinearFactor::shared_ptr LinearContainerFactor::negate() const {
GaussianFactorOrdered::shared_ptr antifactor = factor_->negate(); // already has keys in place
return NonlinearFactor::shared_ptr(new LinearContainerFactor(antifactor,linearizationPoint_));
NonlinearFactor::shared_ptr LinearContainerFactor::negateToNonlinear() const {
GaussianFactor::shared_ptr antifactor = factor_->negate(); // already has keys in place
return boost::make_shared<LinearContainerFactor>(antifactor, linearizationPoint_);
}
/* ************************************************************************* */
NonlinearFactorGraph LinearContainerFactor::convertLinearGraph(
const GaussianFactorGraphOrdered& linear_graph, const OrderingOrdered& ordering,
const Values& linearizationPoint) {
const GaussianFactorGraph& linear_graph, const Values& linearizationPoint)
{
NonlinearFactorGraph result;
BOOST_FOREACH(const GaussianFactorOrdered::shared_ptr& f, linear_graph)
BOOST_FOREACH(const GaussianFactor::shared_ptr& f, linear_graph)
if (f)
result.push_back(NonlinearFactorGraph::sharedFactor(
new LinearContainerFactor(f, ordering, linearizationPoint)));
new LinearContainerFactor(f, linearizationPoint)));
return result;
}
/* ************************************************************************* */
} // \namespace gtsam

View File

@ -9,11 +9,13 @@
#pragma once
#include <gtsam/linear/HessianFactorOrdered.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
namespace gtsam {
// Forward declarations
class HessianFactor;
/**
* Dummy version of a generic linear factor to be injected into a nonlinear factor graph
*
@ -23,37 +25,32 @@ namespace gtsam {
class GTSAM_EXPORT LinearContainerFactor : public NonlinearFactor {
protected:
GaussianFactorOrdered::shared_ptr factor_;
GaussianFactor::shared_ptr factor_;
boost::optional<Values> linearizationPoint_;
/** Default constructor - necessary for serialization */
LinearContainerFactor() {}
/** direct copy constructor */
LinearContainerFactor(const GaussianFactorOrdered::shared_ptr& factor,
const boost::optional<Values>& linearizationPoint);
LinearContainerFactor(const GaussianFactor::shared_ptr& factor, const boost::optional<Values>& linearizationPoint);
public:
/** Primary constructor: store a linear factor and decode the ordering */
LinearContainerFactor(const JacobianFactorOrdered& factor, const OrderingOrdered& ordering,
const Values& linearizationPoint = Values());
LinearContainerFactor(const JacobianFactor& factor, const Values& linearizationPoint = Values());
/** Primary constructor: store a linear factor and decode the ordering */
LinearContainerFactor(const HessianFactorOrdered& factor, const OrderingOrdered& ordering,
const Values& linearizationPoint = Values());
LinearContainerFactor(const HessianFactor& factor, const Values& linearizationPoint = Values());
/** Constructor from shared_ptr */
LinearContainerFactor(const GaussianFactorOrdered::shared_ptr& factor, const OrderingOrdered& ordering,
const Values& linearizationPoint = Values());
LinearContainerFactor(const GaussianFactor::shared_ptr& factor, const Values& linearizationPoint = Values());
/** Constructor from re-keyed factor: all indices assumed replaced with Key */
LinearContainerFactor(const GaussianFactorOrdered::shared_ptr& factor,
const Values& linearizationPoint = Values());
LinearContainerFactor(const GaussianFactor::shared_ptr& factor, const Values& linearizationPoint = Values());
// Access
const GaussianFactorOrdered::shared_ptr& factor() const { return factor_; }
const GaussianFactor::shared_ptr& factor() const { return factor_; }
// Testable
@ -81,9 +78,6 @@ public:
/** Extract the linearization point used in recalculating error */
const boost::optional<Values>& linearizationPoint() const { return linearizationPoint_; }
/** Apply the ordering to a graph - same as linearize(), but without needing a linearization point */
GaussianFactorOrdered::shared_ptr order(const OrderingOrdered& ordering) const;
/**
* Linearize to a GaussianFactor, with method depending on the presence of a linearizationPoint
* - With no linearization point, returns a reordered, but numerically identical,
@ -101,18 +95,18 @@ public:
* TODO: better approximation of relinearization
* TODO: switchable modes for approximation technique
*/
GaussianFactorOrdered::shared_ptr linearize(const Values& c, const OrderingOrdered& ordering) const;
GaussianFactor::shared_ptr linearize(const Values& c) const;
/**
* Creates an anti-factor directly and performs rekeying due to ordering
*/
GaussianFactorOrdered::shared_ptr negate(const OrderingOrdered& ordering) const;
GaussianFactor::shared_ptr negateToGaussian() const;
/**
* Creates the equivalent anti-factor as another LinearContainerFactor,
* so it remains independent of ordering.
*/
NonlinearFactor::shared_ptr negate() const;
NonlinearFactor::shared_ptr negateToNonlinear() const;
/**
* Creates a shared_ptr clone of the factor - needs to be specialized to allow
@ -135,20 +129,19 @@ public:
bool isHessian() const;
/** Casts to JacobianFactor */
JacobianFactorOrdered::shared_ptr toJacobian() const;
JacobianFactor::shared_ptr toJacobian() const;
/** Casts to HessianFactor */
HessianFactorOrdered::shared_ptr toHessian() const;
HessianFactor::shared_ptr toHessian() const;
/**
* Utility function for converting linear graphs to nonlinear graphs
* consisting of LinearContainerFactors.
*/
static NonlinearFactorGraph convertLinearGraph(const GaussianFactorGraphOrdered& linear_graph,
const OrderingOrdered& ordering, const Values& linearizationPoint = Values());
static NonlinearFactorGraph convertLinearGraph(const GaussianFactorGraph& linear_graph,
const Values& linearizationPoint = Values());
protected:
void rekeyFactor(const OrderingOrdered& ordering);
void initializeLinearizationPoint(const Values& linearizationPoint);
private:

View File

@ -20,19 +20,14 @@
#pragma once
#include <list>
#include <limits>
#include <boost/serialization/base_object.hpp>
#include <boost/function.hpp>
#include <gtsam/inference/FactorOrdered-inl.h>
#include <gtsam/inference/IndexFactorOrdered.h>
#include <gtsam/linear/NoiseModel.h>
#include <gtsam/linear/JacobianFactorOrdered.h>
#include <boost/assign/list_of.hpp>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/nonlinear/OrderingOrdered.h>
#include <gtsam/linear/NoiseModel.h>
#include <gtsam/linear/JacobianFactor.h>
#include <gtsam/inference/Factor.h>
/**
* Macro to add a standard clone function to a derived factor
@ -54,51 +49,39 @@ namespace gtsam {
* which are objects in non-linear manifolds (Lie groups).
* \nosubgrouping
*/
class NonlinearFactor: public FactorOrdered<Key> {
class NonlinearFactor: public Factor {
protected:
// Some handy typedefs
typedef FactorOrdered<Key> Base;
typedef Factor Base;
typedef NonlinearFactor This;
public:
typedef boost::shared_ptr<NonlinearFactor> shared_ptr;
typedef boost::shared_ptr<This> shared_ptr;
/// @name Standard Constructors
/// @{
/** Default constructor for I/O only */
NonlinearFactor() {
}
NonlinearFactor() {}
/**
* Constructor from a vector of the keys involved in this factor
* Constructor from a collection of the keys involved in this factor
*/
NonlinearFactor(const std::vector<size_t>& keys) :
template<typename CONTAINER>
NonlinearFactor(const CONTAINER& keys) :
Base(keys) {}
/**
* Constructor from iterators over the keys involved in this factor
*/
template<class ITERATOR>
NonlinearFactor(ITERATOR beginKeys, ITERATOR endKeys) :
Base(beginKeys, endKeys) {}
NonlinearFactor(Key key) : Base(key) {} ///< Convenience constructor for 1 key
NonlinearFactor(Key key1, Key key2) : Base(key1, key2) {} ///< Convenience constructor for 2 keys
NonlinearFactor(Key key1, Key key2, Key key3) : Base(key1, key2, key3) {} ///< Convenience constructor for 3 keys
NonlinearFactor(Key key1, Key key2, Key key3, Key key4) : Base(key1, key2, key3, key4) {} ///< Convenience constructor for 4 keys
NonlinearFactor(Key key1, Key key2, Key key3, Key key4, Key key5) : Base(key1, key2, key3, key4, key5) {} ///< Convenience constructor for 5 keys
NonlinearFactor(Key key1, Key key2, Key key3, Key key4, Key key5, Key key6) : Base(key1, key2, key3, key4, key5, key6) {} ///< Convenience constructor for 6 keys
/// @}
/// @name Testable
/// @{
/** print */
virtual void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const {
virtual void print(const std::string& s = "",
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const
{
std::cout << s << " keys = { ";
BOOST_FOREACH(Key key, this->keys()) { std::cout << keyFormatter(key) << " "; }
std::cout << "}" << std::endl;
@ -140,19 +123,19 @@ public:
virtual bool active(const Values& c) const { return true; }
/** linearize to a GaussianFactor */
virtual boost::shared_ptr<GaussianFactorOrdered>
linearize(const Values& c, const OrderingOrdered& ordering) const = 0;
virtual boost::shared_ptr<GaussianFactor>
linearize(const Values& c) const = 0;
/**
* Create a symbolic factor using the given ordering to determine the
* variable indices.
*/
virtual IndexFactorOrdered::shared_ptr symbolic(const OrderingOrdered& ordering) const {
std::vector<Index> indices(this->size());
for(size_t j=0; j<this->size(); ++j)
indices[j] = ordering[this->keys()[j]];
return IndexFactorOrdered::shared_ptr(new IndexFactorOrdered(indices));
}
//virtual IndexFactorOrdered::shared_ptr symbolic(const OrderingOrdered& ordering) const {
// std::vector<Index> indices(this->size());
// for(size_t j=0; j<this->size(); ++j)
// indices[j] = ordering[this->keys()[j]];
// return IndexFactorOrdered::shared_ptr(new IndexFactorOrdered(indices));
//}
/**
* Creates a shared_ptr clone of the factor - needs to be specialized to allow
@ -218,11 +201,10 @@ protected:
public:
typedef boost::shared_ptr<NoiseModelFactor > shared_ptr;
typedef boost::shared_ptr<This> shared_ptr;
/** Default constructor for I/O only */
NoiseModelFactor() {
}
NoiseModelFactor() {}
/** Destructor */
virtual ~NoiseModelFactor() {}
@ -230,17 +212,9 @@ public:
/**
* Constructor
*/
template<class ITERATOR>
NoiseModelFactor(const SharedNoiseModel& noiseModel, ITERATOR beginKeys, ITERATOR endKeys)
: Base(beginKeys, endKeys), noiseModel_(noiseModel) {
}
NoiseModelFactor(const SharedNoiseModel& noiseModel, Key key) : Base(key), noiseModel_(noiseModel) {} ///< Convenience constructor for 1 key
NoiseModelFactor(const SharedNoiseModel& noiseModel, Key key1, Key key2) : Base(key1, key2), noiseModel_(noiseModel) {} ///< Convenience constructor for 2 keys
NoiseModelFactor(const SharedNoiseModel& noiseModel, Key key1, Key key2, Key key3) : Base(key1, key2, key3), noiseModel_(noiseModel) {} ///< Convenience constructor for 3 keys
NoiseModelFactor(const SharedNoiseModel& noiseModel, Key key1, Key key2, Key key3, Key key4) : Base(key1, key2, key3, key4), noiseModel_(noiseModel) {} ///< Convenience constructor for 4 keys
NoiseModelFactor(const SharedNoiseModel& noiseModel, Key key1, Key key2, Key key3, Key key4, Key key5) : Base(key1, key2, key3, key4, key5), noiseModel_(noiseModel) {} ///< Convenience constructor for 5 keys
NoiseModelFactor(const SharedNoiseModel& noiseModel, Key key1, Key key2, Key key3, Key key4, Key key5, Key key6) : Base(key1, key2, key3, key4, key5, key6), noiseModel_(noiseModel) {} ///< Convenience constructor for 6 keys
template<typename CONTAINER>
NoiseModelFactor(const SharedNoiseModel& noiseModel, const CONTAINER& keys) :
Base(keys), noiseModel_(noiseModel) {}
protected:
@ -252,7 +226,9 @@ protected:
public:
/** Print */
virtual void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const {
virtual void print(const std::string& s = "",
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const
{
Base::print(s, keyFormatter);
this->noiseModel_->print(" noise model: ");
}
@ -314,10 +290,10 @@ public:
* \f$ Ax-b \approx h(x+\delta x)-z = h(x) + A \delta x - z \f$
* Hence \f$ b = z - h(x) = - \mathtt{error\_vector}(x) \f$
*/
boost::shared_ptr<GaussianFactorOrdered> linearize(const Values& x, const OrderingOrdered& ordering) const {
boost::shared_ptr<GaussianFactor> linearize(const Values& x) const {
// Only linearize if the factor is active
if (!this->active(x))
return boost::shared_ptr<JacobianFactorOrdered>();
return boost::shared_ptr<JacobianFactor>();
// Create the set of terms - Jacobians for each index
Vector b;
@ -332,7 +308,7 @@ public:
std::vector<std::pair<Index, Matrix> > terms(this->size());
// Fill in terms
for(size_t j=0; j<this->size(); ++j) {
terms[j].first = ordering[this->keys()[j]];
terms[j].first = this->keys()[j];
terms[j].second.swap(A[j]);
}
@ -340,11 +316,9 @@ public:
noiseModel::Constrained::shared_ptr constrained =
boost::dynamic_pointer_cast<noiseModel::Constrained>(this->noiseModel_);
if(constrained)
return GaussianFactorOrdered::shared_ptr(
new JacobianFactorOrdered(terms, b, constrained->unit()));
return GaussianFactor::shared_ptr(new JacobianFactor(terms, b, constrained->unit()));
else
return GaussianFactorOrdered::shared_ptr(
new JacobianFactorOrdered(terms, b, noiseModel::Unit::Create(b.size())));
return GaussianFactor::shared_ptr(new JacobianFactor(terms, b));
}
private:
@ -392,7 +366,7 @@ public:
* @param key1 by which to look up X value in Values
*/
NoiseModelFactor1(const SharedNoiseModel& noiseModel, Key key1) :
Base(noiseModel, key1) {}
Base(noiseModel, boost::assign::cref_list_of<1>(key1)) {}
/** Calls the 1-key specific version of evaluateError, which is pure virtual
* so must be implemented in the derived class.
@ -461,7 +435,7 @@ public:
* @param j2 key of the second variable
*/
NoiseModelFactor2(const SharedNoiseModel& noiseModel, Key j1, Key j2) :
Base(noiseModel, j1, j2) {}
Base(noiseModel, boost::assign::cref_list_of<2>(j1)(j2)) {}
virtual ~NoiseModelFactor2() {}
@ -538,7 +512,7 @@ public:
* @param j3 key of the third variable
*/
NoiseModelFactor3(const SharedNoiseModel& noiseModel, Key j1, Key j2, Key j3) :
Base(noiseModel, j1, j2, j3) {}
Base(noiseModel, boost::assign::cref_list_of<1>(j1)(j2)(j3)) {}
virtual ~NoiseModelFactor3() {}
@ -617,7 +591,7 @@ public:
* @param j4 key of the fourth variable
*/
NoiseModelFactor4(const SharedNoiseModel& noiseModel, Key j1, Key j2, Key j3, Key j4) :
Base(noiseModel, j1, j2, j3, j4) {}
Base(noiseModel, boost::assign::cref_list_of<1>(j1)(j2)(j3)(j4)) {}
virtual ~NoiseModelFactor4() {}
@ -700,7 +674,7 @@ public:
* @param j5 key of the fifth variable
*/
NoiseModelFactor5(const SharedNoiseModel& noiseModel, Key j1, Key j2, Key j3, Key j4, Key j5) :
Base(noiseModel, j1, j2, j3, j4, j5) {}
Base(noiseModel, boost::assign::cref_list_of<1>(j1)(j2)(j3)(j4)(j5)) {}
virtual ~NoiseModelFactor5() {}
@ -787,7 +761,7 @@ public:
* @param j6 key of the fifth variable
*/
NoiseModelFactor6(const SharedNoiseModel& noiseModel, Key j1, Key j2, Key j3, Key j4, Key j5, Key j6) :
Base(noiseModel, j1, j2, j3, j4, j5, j6) {}
Base(noiseModel, boost::assign::cref_list_of<1>(j1)(j2)(j3)(j4)(j5)(j6)) {}
virtual ~NoiseModelFactor6() {}

View File

@ -20,10 +20,12 @@
#include <cmath>
#include <limits>
#include <boost/foreach.hpp>
#include <gtsam/inference/FactorGraphOrdered.h>
#include <gtsam/inference/inferenceOrdered.h>
#include <gtsam/inference/FactorGraph-inst.h>
#include <gtsam/geometry/Pose2.h>
#include <gtsam/geometry/Pose3.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/inference/Ordering.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
using namespace std;
@ -207,107 +209,61 @@ FastSet<Key> NonlinearFactorGraph::keys() const {
}
/* ************************************************************************* */
OrderingOrdered::shared_ptr NonlinearFactorGraph::orderingCOLAMD(
const Values& config) const
Ordering NonlinearFactorGraph::orderingCOLAMD() const
{
gttic(NonlinearFactorGraph_orderingCOLAMD);
// Create symbolic graph and initial (iterator) ordering
SymbolicFactorGraphOrdered::shared_ptr symbolic;
OrderingOrdered::shared_ptr ordering;
boost::tie(symbolic, ordering) = this->symbolic(config);
// Compute the VariableIndex (column-wise index)
VariableIndexOrdered variableIndex(*symbolic, ordering->size());
if (config.size() != variableIndex.size()) throw std::runtime_error(
"orderingCOLAMD: some variables in the graph are not constrained!");
// Compute a fill-reducing ordering with COLAMD
Permutation::shared_ptr colamdPerm(inference::PermutationCOLAMD(
variableIndex));
// Permute the Ordering with the COLAMD ordering
ordering->permuteInPlace(*colamdPerm);
return ordering;
return Ordering::COLAMD(*this);
}
/* ************************************************************************* */
OrderingOrdered::shared_ptr NonlinearFactorGraph::orderingCOLAMDConstrained(const Values& config,
const std::map<Key, int>& constraints) const
Ordering NonlinearFactorGraph::orderingCOLAMDConstrained(const FastMap<Key, int>& constraints) const
{
gttic(NonlinearFactorGraph_orderingCOLAMDConstrained);
// Create symbolic graph and initial (iterator) ordering
SymbolicFactorGraphOrdered::shared_ptr symbolic;
OrderingOrdered::shared_ptr ordering;
boost::tie(symbolic, ordering) = this->symbolic(config);
// Compute the VariableIndex (column-wise index)
VariableIndexOrdered variableIndex(*symbolic, ordering->size());
if (config.size() != variableIndex.size()) throw std::runtime_error(
"orderingCOLAMD: some variables in the graph are not constrained!");
// Create a constraint list with correct indices
typedef std::map<Key, int>::value_type constraint_type;
std::map<Index, int> index_constraints;
BOOST_FOREACH(const constraint_type& p, constraints)
index_constraints[ordering->at(p.first)] = p.second;
// Compute a constrained fill-reducing ordering with COLAMD
Permutation::shared_ptr colamdPerm(inference::PermutationCOLAMDGrouped(
variableIndex, index_constraints));
// Permute the Ordering with the COLAMD ordering
ordering->permuteInPlace(*colamdPerm);
return ordering;
return Ordering::COLAMDConstrained(*this, constraints);
}
/* ************************************************************************* */
SymbolicFactorGraphOrdered::shared_ptr NonlinearFactorGraph::symbolic(const OrderingOrdered& ordering) const {
gttic(NonlinearFactorGraph_symbolic_from_Ordering);
// Generate the symbolic factor graph
SymbolicFactorGraphOrdered::shared_ptr symbolicfg(new SymbolicFactorGraphOrdered);
symbolicfg->reserve(this->size());
BOOST_FOREACH(const sharedFactor& factor, this->factors_) {
if(factor)
symbolicfg->push_back(factor->symbolic(ordering));
else
symbolicfg->push_back(SymbolicFactorGraphOrdered::sharedFactor());
}
return symbolicfg;
}
//SymbolicFactorGraphOrdered::shared_ptr NonlinearFactorGraph::symbolic(const OrderingOrdered& ordering) const {
// gttic(NonlinearFactorGraph_symbolic_from_Ordering);
//
// // Generate the symbolic factor graph
// SymbolicFactorGraphOrdered::shared_ptr symbolicfg(new SymbolicFactorGraphOrdered);
// symbolicfg->reserve(this->size());
//
// BOOST_FOREACH(const sharedFactor& factor, this->factors_) {
// if(factor)
// symbolicfg->push_back(factor->symbolic(ordering));
// else
// symbolicfg->push_back(SymbolicFactorGraphOrdered::sharedFactor());
// }
//
// return symbolicfg;
//}
/* ************************************************************************* */
pair<SymbolicFactorGraphOrdered::shared_ptr, OrderingOrdered::shared_ptr> NonlinearFactorGraph::symbolic(
const Values& config) const
{
gttic(NonlinearFactorGraph_symbolic_from_Values);
// Generate an initial key ordering in iterator order
OrderingOrdered::shared_ptr ordering(config.orderingArbitrary());
return make_pair(symbolic(*ordering), ordering);
}
//pair<SymbolicFactorGraphOrdered::shared_ptr, OrderingOrdered::shared_ptr> NonlinearFactorGraph::symbolic(
// const Values& config) const
//{
// gttic(NonlinearFactorGraph_symbolic_from_Values);
//
// // Generate an initial key ordering in iterator order
// OrderingOrdered::shared_ptr ordering(config.orderingArbitrary());
// return make_pair(symbolic(*ordering), ordering);
//}
/* ************************************************************************* */
GaussianFactorGraphOrdered::shared_ptr NonlinearFactorGraph::linearize(
const Values& config, const OrderingOrdered& ordering) const
GaussianFactorGraph::shared_ptr NonlinearFactorGraph::linearize(const Values& linearizationPoint) const
{
gttic(NonlinearFactorGraph_linearize);
// create an empty linear FG
GaussianFactorGraphOrdered::shared_ptr linearFG(new GaussianFactorGraphOrdered);
GaussianFactorGraph::shared_ptr linearFG = boost::make_shared<GaussianFactorGraph>();
linearFG->reserve(this->size());
// linearize all factors
BOOST_FOREACH(const sharedFactor& factor, this->factors_) {
if(factor) {
GaussianFactorOrdered::shared_ptr lf = factor->linearize(config, ordering);
if (lf) linearFG->push_back(lf);
(*linearFG) += factor->linearize(linearizationPoint);
} else
linearFG->push_back(GaussianFactorOrdered::shared_ptr());
(*linearFG) += GaussianFactor::shared_ptr();
}
return linearFG;

View File

@ -22,12 +22,16 @@
#pragma once
#include <gtsam/geometry/Point2.h>
#include <gtsam/inference/SymbolicFactorGraphOrdered.h>
#include <gtsam/linear/GaussianFactorGraphOrdered.h>
#include <gtsam/nonlinear/NonlinearFactor.h>
#include <gtsam/inference/FactorGraph.h>
namespace gtsam {
// Forward declarations
class Values;
class Ordering;
class GaussianFactorGraph;
/**
* Formatting options when saving in GraphViz format using
* NonlinearFactorGraph::saveGraph.
@ -58,13 +62,28 @@ namespace gtsam {
* tangent vector space at the linearization point. Because the tangent space is a true
* vector space, the config type will be an VectorValues in that linearized factor graph.
*/
class NonlinearFactorGraph: public FactorGraphOrdered<NonlinearFactor> {
class NonlinearFactorGraph: public FactorGraph<NonlinearFactor> {
public:
typedef FactorGraphOrdered<NonlinearFactor> Base;
typedef boost::shared_ptr<NonlinearFactorGraph> shared_ptr;
typedef boost::shared_ptr<NonlinearFactor> sharedFactor;
typedef FactorGraph<NonlinearFactor> Base;
typedef NonlinearFactorGraph This;
typedef boost::shared_ptr<This> shared_ptr;
/** Default constructor */
NonlinearFactorGraph() {}
/** Construct from iterator over factors */
template<typename ITERATOR>
NonlinearFactorGraph(ITERATOR firstFactor, ITERATOR lastFactor) : Base(firstFactor, lastFactor) {}
/** Construct from container of factors (shared_ptr or plain objects) */
template<class CONTAINER>
explicit NonlinearFactorGraph(const CONTAINER& factors) : Base(factors) {}
/** Implicit copy/downcast constructor to override explicit template container constructor */
template<class DERIVEDFACTOR>
NonlinearFactorGraph(const FactorGraph<DERIVEDFACTOR>& graph) : Base(graph) {}
/** print just calls base class */
GTSAM_EXPORT void print(const std::string& str = "NonlinearFactorGraph: ", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
@ -83,20 +102,10 @@ namespace gtsam {
/** Unnormalized probability. O(n) */
GTSAM_EXPORT double probPrime(const Values& c) const;
/// Add a factor by value - copies the factor object
void add(const NonlinearFactor& factor) {
this->push_back(factor.clone());
}
/// Add a factor by pointer - stores pointer without copying factor object
void add(const sharedFactor& factor) {
this->push_back(factor);
}
/**
* Create a symbolic factor graph using an existing ordering
*/
GTSAM_EXPORT SymbolicFactorGraphOrdered::shared_ptr symbolic(const OrderingOrdered& ordering) const;
//GTSAM_EXPORT SymbolicFactorGraph::shared_ptr symbolic() const;
/**
* Create a symbolic factor graph and initial variable ordering that can
@ -104,13 +113,13 @@ namespace gtsam {
* The graph and ordering should be permuted after such a fill-reducing
* ordering is found.
*/
GTSAM_EXPORT std::pair<SymbolicFactorGraphOrdered::shared_ptr, OrderingOrdered::shared_ptr>
symbolic(const Values& config) const;
//GTSAM_EXPORT std::pair<SymbolicFactorGraph::shared_ptr, Ordering::shared_ptr>
// symbolic(const Values& config) const;
/**
* Compute a fill-reducing ordering using COLAMD.
*/
GTSAM_EXPORT OrderingOrdered::shared_ptr orderingCOLAMD(const Values& config) const;
GTSAM_EXPORT Ordering orderingCOLAMD() const;
/**
* Compute a fill-reducing ordering with constraints using CCOLAMD
@ -120,14 +129,12 @@ namespace gtsam {
* indices need to appear in the constraints, unconstrained is assumed for all
* other variables
*/
GTSAM_EXPORT OrderingOrdered::shared_ptr orderingCOLAMDConstrained(const Values& config,
const std::map<Key, int>& constraints) const;
GTSAM_EXPORT Ordering orderingCOLAMDConstrained(const FastMap<Key, int>& constraints) const;
/**
* linearize a nonlinear factor graph
*/
GTSAM_EXPORT boost::shared_ptr<GaussianFactorGraphOrdered >
linearize(const Values& config, const OrderingOrdered& ordering) const;
GTSAM_EXPORT boost::shared_ptr<GaussianFactorGraph> linearize(const Values& linearizationPoint) const;
/**
* Clone() performs a deep-copy of the graph, including all of the factors

View File

@ -6,10 +6,10 @@
*/
#include <gtsam/nonlinear/SuccessiveLinearizationOptimizer.h>
#include <gtsam/inference/EliminationTreeOrdered.h>
#include <gtsam/linear/GaussianJunctionTreeOrdered.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/GaussianEliminationTree.h>
#include <gtsam/linear/SubgraphSolver.h>
#include <gtsam/linear/VectorValuesOrdered.h>
#include <gtsam/linear/VectorValues.h>
#include <boost/shared_ptr.hpp>
#include <stdexcept>
@ -53,15 +53,14 @@ void SuccessiveLinearizationParams::print(const std::string& str) const {
std::cout.flush();
}
VectorValuesOrdered solveGaussianFactorGraph(const GaussianFactorGraphOrdered &gfg, const SuccessiveLinearizationParams &params) {
VectorValues solveGaussianFactorGraph(const GaussianFactorGraph &gfg, const SuccessiveLinearizationParams &params)
{
gttic(solveGaussianFactorGraph);
VectorValuesOrdered delta;
VectorValues delta;
if (params.isMultifrontal()) {
delta = GaussianJunctionTreeOrdered(gfg).optimize(params.getEliminationFunction());
delta = gfg.optimize(*params.ordering, params.getEliminationFunction());
} else if(params.isSequential()) {
const boost::shared_ptr<GaussianBayesNetOrdered> gbn =
EliminationTreeOrdered<GaussianFactorOrdered>::Create(gfg)->eliminate(params.getEliminationFunction());
delta = gtsam::optimize(*gbn);
delta = gfg.eliminateSequential(*params.ordering, params.getEliminationFunction())->optimize();
}
else if ( params.isCG() ) {
if ( !params.iterativeParams ) throw std::runtime_error("solveGaussianFactorGraph: cg parameter has to be assigned ...");

View File

@ -20,6 +20,7 @@
#include <gtsam/nonlinear/NonlinearOptimizer.h>
#include <gtsam/linear/SubgraphSolver.h>
#include <gtsam/inference/Ordering.h>
namespace gtsam {
@ -36,7 +37,7 @@ public:
};
LinearSolverType linearSolverType; ///< The type of linear solver to use in the nonlinear optimizer
boost::optional<OrderingOrdered> ordering; ///< The variable elimination ordering, or empty to use COLAMD (default: empty)
boost::optional<Ordering> ordering; ///< The variable elimination ordering, or empty to use COLAMD (default: empty)
IterativeOptimizationParameters::shared_ptr iterativeParams; ///< The container for iterativeOptimization parameters. used in CG Solvers.
SuccessiveLinearizationParams() : linearSolverType(MULTIFRONTAL_CHOLESKY) {}
@ -75,7 +76,7 @@ public:
void setLinearSolverType(const std::string& solver) { linearSolverType = linearSolverTranslator(solver); }
void setIterativeParams(const SubgraphSolverParameters &params);
void setOrdering(const OrderingOrdered& ordering) { this->ordering = ordering; }
void setOrdering(const Ordering& ordering) { this->ordering = ordering; }
private:
std::string linearSolverTranslator(LinearSolverType linearSolverType) const {
@ -101,6 +102,6 @@ private:
};
/* a wrapper for solving a GaussianFactorGraph according to the parameters */
GTSAM_EXPORT VectorValuesOrdered solveGaussianFactorGraph(const GaussianFactorGraphOrdered &gfg, const SuccessiveLinearizationParams &params) ;
GTSAM_EXPORT VectorValues solveGaussianFactorGraph(const GaussianFactorGraph &gfg, const SuccessiveLinearizationParams &params);
} /* namespace gtsam */

View File

@ -23,6 +23,7 @@
*/
#include <gtsam/nonlinear/Values.h>
#include <gtsam/linear/VectorValues.h>
#include <list>
@ -76,12 +77,8 @@ namespace gtsam {
}
/* ************************************************************************* */
VectorValuesOrdered Values::zeroVectors(const OrderingOrdered& ordering) const {
return VectorValuesOrdered::Zero(this->dims(ordering));
}
/* ************************************************************************* */
Values Values::retract(const VectorValuesOrdered& delta, const OrderingOrdered& ordering) const {
Values Values::retract(const VectorValues& delta) const
{
Values result;
for(const_iterator key_value = begin(); key_value != end(); ++key_value) {
@ -95,32 +92,20 @@ namespace gtsam {
}
/* ************************************************************************* */
VectorValuesOrdered Values::localCoordinates(const Values& cp, const OrderingOrdered& ordering) const {
VectorValuesOrdered result(this->dims(ordering));
VectorValues Values::localCoordinates(const Values& cp) const {
if(this->size() != cp.size())
throw DynamicValuesMismatched();
VectorValues result;
for(const_iterator it1=this->begin(), it2=cp.begin(); it1!=this->end(); ++it1, ++it2) {
if(it1->key != it2->key)
throw DynamicValuesMismatched(); // If keys do not match
// Will throw a dynamic_cast exception if types do not match
// NOTE: this is separate from localCoordinates(cp, ordering, result) due to at() vs. insert
result.at(ordering[it1->key]) = it1->value.localCoordinates_(it2->value);
result.insert(it1->key, it1->value.localCoordinates_(it2->value));
}
return result;
}
/* ************************************************************************* */
void Values::localCoordinates(const Values& cp, const OrderingOrdered& ordering, VectorValuesOrdered& result) const {
if(this->size() != cp.size())
throw DynamicValuesMismatched();
for(const_iterator it1=this->begin(), it2=cp.begin(); it1!=this->end(); ++it1, ++it2) {
if(it1->key != it2->key)
throw DynamicValuesMismatched(); // If keys do not match
// Will throw a dynamic_cast exception if types do not match
result.insert(ordering[it1->key], it1->value.localCoordinates_(it2->value));
}
}
/* ************************************************************************* */
const Value& Values::at(Key j) const {
// Find the item
@ -192,16 +177,6 @@ namespace gtsam {
return *this;
}
/* ************************************************************************* */
vector<size_t> Values::dims(const OrderingOrdered& ordering) const {
assert(ordering.size() == this->size()); // reads off of end of array if difference in size
vector<size_t> result(values_.size());
BOOST_FOREACH(const ConstKeyValuePair& key_value, *this) {
result[ordering[key_value.key]] = key_value.value.dim();
}
return result;
}
/* ************************************************************************* */
size_t Values::dim() const {
size_t result = 0;
@ -211,15 +186,6 @@ namespace gtsam {
return result;
}
/* ************************************************************************* */
OrderingOrdered::shared_ptr Values::orderingArbitrary(Index firstVar) const {
OrderingOrdered::shared_ptr ordering(new OrderingOrdered);
for(const_iterator key_value = begin(); key_value != end(); ++key_value) {
ordering->insert(key_value->key, firstVar++);
}
return ordering;
}
/* ************************************************************************* */
const char* ValuesKeyAlreadyExists::what() const throw() {
if(message_.empty())

View File

@ -45,13 +45,12 @@
#include <gtsam/base/Value.h>
#include <gtsam/base/FastMap.h>
#include <gtsam/linear/VectorValuesOrdered.h>
#include <gtsam/inference/Key.h>
#include <gtsam/nonlinear/OrderingOrdered.h>
namespace gtsam {
// Forward declarations / utilities
class VectorValues;
class ValueCloneAllocator;
class ValueAutomaticCasting;
template<typename T> static bool _truePredicate(const T&) { return true; }
@ -206,9 +205,6 @@ namespace gtsam {
/** whether the config is empty */
bool empty() const { return values_.empty(); }
/** Get a zero VectorValues of the correct structure */
VectorValuesOrdered zeroVectors(const OrderingOrdered& ordering) const;
const_iterator begin() const { return boost::make_transform_iterator(values_.begin(), &make_const_deref_pair); }
const_iterator end() const { return boost::make_transform_iterator(values_.end(), &make_const_deref_pair); }
iterator begin() { return boost::make_transform_iterator(values_.begin(), &make_deref_pair); }
@ -222,13 +218,10 @@ namespace gtsam {
/// @{
/** Add a delta config to current config and returns a new config */
Values retract(const VectorValuesOrdered& delta, const OrderingOrdered& ordering) const;
Values retract(const VectorValues& delta) const;
/** Get a delta config about a linearization point c0 (*this) */
VectorValuesOrdered localCoordinates(const Values& cp, const OrderingOrdered& ordering) const;
/** Get a delta config about a linearization point c0 (*this) - assumes uninitialized delta */
void localCoordinates(const Values& cp, const OrderingOrdered& ordering, VectorValuesOrdered& delta) const;
VectorValues localCoordinates(const Values& cp) const;
///@}
@ -262,20 +255,9 @@ namespace gtsam {
/** Remove all variables from the config */
void clear() { values_.clear(); }
/** Create an array of variable dimensions using the given ordering (\f$ O(n) \f$) */
std::vector<size_t> dims(const OrderingOrdered& ordering) const;
/** Compute the total dimensionality of all values (\f$ O(n) \f$) */
size_t dim() const;
/**
* Generate a default ordering, simply in key sort order. To instead
* create a fill-reducing ordering, use
* NonlinearFactorGraph::orderingCOLAMD(). Alternatively, you may permute
* this ordering yourself (as orderingCOLAMD() does internally).
*/
OrderingOrdered::shared_ptr orderingArbitrary(Index firstVar = 0) const;
/**
* Return a filtered view of this Values class, without copying any data.
* When iterating over the filtered view, only the key-value pairs