Merged in feature/LinearSolverCleanup (pull request #148)
This has - Separate Scatter class - change from updateATA to updateHessian as a virtual method - Fixed-size BinaryJacobianFactor that overloads updateHessian - SFM Timing Script that takes BAL filesrelease/4.3a0
commit
b68f763fe7
|
@ -99,7 +99,7 @@ namespace gtsam
|
||||||
|
|
||||||
// Do dense elimination step
|
// Do dense elimination step
|
||||||
std::pair<boost::shared_ptr<ConditionalType>, boost::shared_ptr<FactorType> > eliminationResult =
|
std::pair<boost::shared_ptr<ConditionalType>, boost::shared_ptr<FactorType> > eliminationResult =
|
||||||
eliminationFunction(gatheredFactors, Ordering(node->keys));
|
eliminationFunction(gatheredFactors, node->orderedFrontalKeys);
|
||||||
|
|
||||||
// Store conditional in BayesTree clique, and in the case of ISAM2Clique also store the remaining factor
|
// Store conditional in BayesTree clique, and in the case of ISAM2Clique also store the remaining factor
|
||||||
myData.bayesTreeNode->setEliminationResult(eliminationResult);
|
myData.bayesTreeNode->setEliminationResult(eliminationResult);
|
||||||
|
@ -123,7 +123,7 @@ namespace gtsam
|
||||||
const std::string& s, const KeyFormatter& keyFormatter) const
|
const std::string& s, const KeyFormatter& keyFormatter) const
|
||||||
{
|
{
|
||||||
std::cout << s;
|
std::cout << s;
|
||||||
BOOST_FOREACH(Key j, keys)
|
BOOST_FOREACH(Key j, orderedFrontalKeys)
|
||||||
std::cout << j << " ";
|
std::cout << j << " ";
|
||||||
std::cout << "problemSize = " << problemSize_ << std::endl;
|
std::cout << "problemSize = " << problemSize_ << std::endl;
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
|
|
||||||
#include <gtsam/base/Testable.h>
|
#include <gtsam/base/Testable.h>
|
||||||
#include <gtsam/base/FastVector.h>
|
#include <gtsam/base/FastVector.h>
|
||||||
#include <gtsam/inference/Key.h>
|
#include <gtsam/inference/Ordering.h>
|
||||||
|
|
||||||
namespace gtsam
|
namespace gtsam
|
||||||
{
|
{
|
||||||
|
@ -37,11 +37,11 @@ namespace gtsam
|
||||||
typedef typename FactorGraphType::Eliminate Eliminate; ///< Typedef for an eliminate subroutine
|
typedef typename FactorGraphType::Eliminate Eliminate; ///< Typedef for an eliminate subroutine
|
||||||
|
|
||||||
struct Cluster {
|
struct Cluster {
|
||||||
typedef FastVector<Key> Keys;
|
typedef Ordering Keys;
|
||||||
typedef FastVector<sharedFactor> Factors;
|
typedef FastVector<sharedFactor> Factors;
|
||||||
typedef FastVector<boost::shared_ptr<Cluster> > Children;
|
typedef FastVector<boost::shared_ptr<Cluster> > Children;
|
||||||
|
|
||||||
Keys keys; ///< Frontal keys of this node
|
Keys orderedFrontalKeys; ///< Frontal keys of this node
|
||||||
Factors factors; ///< Factors associated with this node
|
Factors factors; ///< Factors associated with this node
|
||||||
Children children; ///< sub-trees
|
Children children; ///< sub-trees
|
||||||
int problemSize_;
|
int problemSize_;
|
||||||
|
|
|
@ -49,7 +49,7 @@ namespace gtsam {
|
||||||
// structure with its own JT node, and create a child pointer in its parent.
|
// structure with its own JT node, and create a child pointer in its parent.
|
||||||
ConstructorTraversalData<BAYESTREE,GRAPH> myData = ConstructorTraversalData<BAYESTREE,GRAPH>(&parentData);
|
ConstructorTraversalData<BAYESTREE,GRAPH> myData = ConstructorTraversalData<BAYESTREE,GRAPH>(&parentData);
|
||||||
myData.myJTNode = boost::make_shared<typename JunctionTree<BAYESTREE,GRAPH>::Node>();
|
myData.myJTNode = boost::make_shared<typename JunctionTree<BAYESTREE,GRAPH>::Node>();
|
||||||
myData.myJTNode->keys.push_back(node->key);
|
myData.myJTNode->orderedFrontalKeys.push_back(node->key);
|
||||||
myData.myJTNode->factors.insert(myData.myJTNode->factors.begin(), node->factors.begin(), node->factors.end());
|
myData.myJTNode->factors.insert(myData.myJTNode->factors.begin(), node->factors.begin(), node->factors.end());
|
||||||
parentData.myJTNode->children.push_back(myData.myJTNode);
|
parentData.myJTNode->children.push_back(myData.myJTNode);
|
||||||
return myData;
|
return myData;
|
||||||
|
@ -101,13 +101,20 @@ namespace gtsam {
|
||||||
const typename JunctionTree<BAYESTREE, GRAPH>::Node& childToMerge =
|
const typename JunctionTree<BAYESTREE, GRAPH>::Node& childToMerge =
|
||||||
*myData.myJTNode->children[child - nrMergedChildren];
|
*myData.myJTNode->children[child - nrMergedChildren];
|
||||||
// Merge keys, factors, and children.
|
// Merge keys, factors, and children.
|
||||||
myData.myJTNode->keys.insert(myData.myJTNode->keys.begin(), childToMerge.keys.begin(), childToMerge.keys.end());
|
myData.myJTNode->orderedFrontalKeys.insert(
|
||||||
myData.myJTNode->factors.insert(myData.myJTNode->factors.end(), childToMerge.factors.begin(), childToMerge.factors.end());
|
myData.myJTNode->orderedFrontalKeys.begin(),
|
||||||
myData.myJTNode->children.insert(myData.myJTNode->children.end(), childToMerge.children.begin(), childToMerge.children.end());
|
childToMerge.orderedFrontalKeys.begin(),
|
||||||
|
childToMerge.orderedFrontalKeys.end());
|
||||||
|
myData.myJTNode->factors.insert(myData.myJTNode->factors.end(),
|
||||||
|
childToMerge.factors.begin(),
|
||||||
|
childToMerge.factors.end());
|
||||||
|
myData.myJTNode->children.insert(myData.myJTNode->children.end(),
|
||||||
|
childToMerge.children.begin(),
|
||||||
|
childToMerge.children.end());
|
||||||
// Increment problem size
|
// Increment problem size
|
||||||
combinedProblemSize = std::max(combinedProblemSize, childToMerge.problemSize_);
|
combinedProblemSize = std::max(combinedProblemSize, childToMerge.problemSize_);
|
||||||
// Increment number of frontal variables
|
// Increment number of frontal variables
|
||||||
myNrFrontals += childToMerge.keys.size();
|
myNrFrontals += childToMerge.orderedFrontalKeys.size();
|
||||||
// Remove child from list.
|
// Remove child from list.
|
||||||
myData.myJTNode->children.erase(myData.myJTNode->children.begin() + (child - nrMergedChildren));
|
myData.myJTNode->children.erase(myData.myJTNode->children.begin() + (child - nrMergedChildren));
|
||||||
// Increment number of merged children
|
// Increment number of merged children
|
||||||
|
|
|
@ -18,15 +18,15 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <algorithm>
|
|
||||||
#include <vector>
|
|
||||||
#include <boost/assign/list_inserter.hpp>
|
|
||||||
|
|
||||||
#include <gtsam/base/FastSet.h>
|
|
||||||
#include <gtsam/inference/Key.h>
|
#include <gtsam/inference/Key.h>
|
||||||
#include <gtsam/inference/VariableIndex.h>
|
#include <gtsam/inference/VariableIndex.h>
|
||||||
#include <gtsam/inference/MetisIndex.h>
|
#include <gtsam/inference/MetisIndex.h>
|
||||||
#include <gtsam/inference/FactorGraph.h>
|
#include <gtsam/inference/FactorGraph.h>
|
||||||
|
#include <gtsam/base/FastSet.h>
|
||||||
|
|
||||||
|
#include <boost/assign/list_inserter.hpp>
|
||||||
|
#include <algorithm>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
|
@ -104,9 +104,9 @@ namespace gtsam {
|
||||||
/// VariableIndex, it is faster to use COLAMD(const VariableIndex&). This function constrains
|
/// VariableIndex, it is faster to use COLAMD(const VariableIndex&). This function constrains
|
||||||
/// the variables in \c constrainLast to the end of the ordering, and orders all other variables
|
/// the variables in \c constrainLast to the end of the ordering, and orders all other variables
|
||||||
/// before in a fill-reducing ordering. If \c forceOrder is true, the variables in \c
|
/// before in a fill-reducing ordering. If \c forceOrder is true, the variables in \c
|
||||||
/// constrainLast will be ordered in the same order specified in the vector<Key> \c
|
/// constrainFirst will be ordered in the same order specified in the vector<Key> \c
|
||||||
/// constrainLast. If \c forceOrder is false, the variables in \c constrainFirst will be
|
/// constrainFirst. If \c forceOrder is false, the variables in \c constrainFirst will be
|
||||||
/// ordered after all the others, but will be rearranged by CCOLAMD to reduce fill-in as well.
|
/// ordered before all the others, but will be rearranged by CCOLAMD to reduce fill-in as well.
|
||||||
template<class FACTOR>
|
template<class FACTOR>
|
||||||
static Ordering colamdConstrainedFirst(const FactorGraph<FACTOR>& graph,
|
static Ordering colamdConstrainedFirst(const FactorGraph<FACTOR>& graph,
|
||||||
const std::vector<Key>& constrainFirst, bool forceOrder = false) {
|
const std::vector<Key>& constrainFirst, bool forceOrder = false) {
|
||||||
|
@ -117,7 +117,7 @@ namespace gtsam {
|
||||||
/// orders all other variables after in a fill-reducing ordering. If \c forceOrder is true, the
|
/// orders all other variables after in a fill-reducing ordering. If \c forceOrder is true, the
|
||||||
/// variables in \c constrainFirst will be ordered in the same order specified in the
|
/// variables in \c constrainFirst will be ordered in the same order specified in the
|
||||||
/// vector<Key> \c constrainFirst. If \c forceOrder is false, the variables in \c
|
/// vector<Key> \c constrainFirst. If \c forceOrder is false, the variables in \c
|
||||||
/// constrainFirst will be ordered after all the others, but will be rearranged by CCOLAMD to
|
/// constrainFirst will be ordered before all the others, but will be rearranged by CCOLAMD to
|
||||||
/// reduce fill-in as well.
|
/// reduce fill-in as well.
|
||||||
static GTSAM_EXPORT Ordering colamdConstrainedFirst(const VariableIndex& variableIndex,
|
static GTSAM_EXPORT Ordering colamdConstrainedFirst(const VariableIndex& variableIndex,
|
||||||
const std::vector<Key>& constrainFirst, bool forceOrder = false);
|
const std::vector<Key>& constrainFirst, bool forceOrder = false);
|
||||||
|
|
|
@ -0,0 +1,91 @@
|
||||||
|
/* ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||||
|
* Atlanta, Georgia 30332-0415
|
||||||
|
* All Rights Reserved
|
||||||
|
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||||
|
|
||||||
|
* See LICENSE for the license information
|
||||||
|
|
||||||
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @file BinaryJacobianFactor.h
|
||||||
|
*
|
||||||
|
* @brief A binary JacobianFactor specialization that uses fixed matrix math for speed
|
||||||
|
*
|
||||||
|
* @date June 2015
|
||||||
|
* @author Frank Dellaert
|
||||||
|
*/
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <gtsam/linear/JacobianFactor.h>
|
||||||
|
#include <gtsam/base/SymmetricBlockMatrix.h>
|
||||||
|
#include <gtsam/base/timing.h>
|
||||||
|
|
||||||
|
namespace gtsam {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A binary JacobianFactor specialization that uses fixed matrix math for speed
|
||||||
|
*/
|
||||||
|
template<int M, int N1, int N2>
|
||||||
|
struct BinaryJacobianFactor: JacobianFactor {
|
||||||
|
|
||||||
|
/// Constructor
|
||||||
|
BinaryJacobianFactor(Key key1, const Eigen::Matrix<double, M, N1>& A1,
|
||||||
|
Key key2, const Eigen::Matrix<double, M, N2>& A2,
|
||||||
|
const Eigen::Matrix<double, M, 1>& b, //
|
||||||
|
const SharedDiagonal& model = SharedDiagonal()) :
|
||||||
|
JacobianFactor(key1, A1, key2, A2, b, model) {
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Key key1() const {
|
||||||
|
return keys_[0];
|
||||||
|
}
|
||||||
|
inline Key key2() const {
|
||||||
|
return keys_[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fixed-size matrix update
|
||||||
|
void updateHessian(const FastVector<Key>& infoKeys,
|
||||||
|
SymmetricBlockMatrix* info) const {
|
||||||
|
gttic(updateHessian_BinaryJacobianFactor);
|
||||||
|
// Whiten the factor if it has a noise model
|
||||||
|
const SharedDiagonal& model = get_model();
|
||||||
|
if (model && !model->isUnit()) {
|
||||||
|
if (model->isConstrained())
|
||||||
|
throw std::invalid_argument(
|
||||||
|
"BinaryJacobianFactor::updateHessian: cannot update information with "
|
||||||
|
"constrained noise model");
|
||||||
|
BinaryJacobianFactor whitenedFactor(key1(), model->Whiten(getA(begin())),
|
||||||
|
key2(), model->Whiten(getA(end())), model->whiten(getb()));
|
||||||
|
whitenedFactor.updateHessian(infoKeys, info);
|
||||||
|
} else {
|
||||||
|
// First build an array of slots
|
||||||
|
DenseIndex slot1 = Slot(infoKeys, key1());
|
||||||
|
DenseIndex slot2 = Slot(infoKeys, key2());
|
||||||
|
DenseIndex slotB = info->nBlocks() - 1;
|
||||||
|
|
||||||
|
const Matrix& Ab = Ab_.matrix();
|
||||||
|
Eigen::Block<const Matrix, M, N1> A1(Ab, 0, 0);
|
||||||
|
Eigen::Block<const Matrix, M, N2> A2(Ab, 0, N1);
|
||||||
|
Eigen::Block<const Matrix, M, 1> b(Ab, 0, N1 + N2);
|
||||||
|
|
||||||
|
// We perform I += A'*A to the upper triangle
|
||||||
|
(*info)(slot1, slot1).selfadjointView().rankUpdate(A1.transpose());
|
||||||
|
(*info)(slot1, slot2).knownOffDiagonal() += A1.transpose() * A2;
|
||||||
|
(*info)(slot1, slotB).knownOffDiagonal() += A1.transpose() * b;
|
||||||
|
(*info)(slot2, slot2).selfadjointView().rankUpdate(A2.transpose());
|
||||||
|
(*info)(slot2, slotB).knownOffDiagonal() += A2.transpose() * b;
|
||||||
|
(*info)(slotB, slotB)(0, 0) += b.transpose() * b;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
template<int M, int N1, int N2>
|
||||||
|
struct traits<BinaryJacobianFactor<M, N1, N2> > : Testable<
|
||||||
|
BinaryJacobianFactor<M, N1, N2> > {
|
||||||
|
};
|
||||||
|
|
||||||
|
} //namespace gtsam
|
|
@ -28,6 +28,8 @@ namespace gtsam {
|
||||||
|
|
||||||
// Forward declarations
|
// Forward declarations
|
||||||
class VectorValues;
|
class VectorValues;
|
||||||
|
class Scatter;
|
||||||
|
class SymmetricBlockMatrix;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An abstract virtual base class for JacobianFactor and HessianFactor. A GaussianFactor has a
|
* An abstract virtual base class for JacobianFactor and HessianFactor. A GaussianFactor has a
|
||||||
|
@ -119,6 +121,14 @@ namespace gtsam {
|
||||||
*/
|
*/
|
||||||
virtual GaussianFactor::shared_ptr negate() const = 0;
|
virtual GaussianFactor::shared_ptr negate() const = 0;
|
||||||
|
|
||||||
|
/** Update an information matrix by adding the information corresponding to this factor
|
||||||
|
* (used internally during elimination).
|
||||||
|
* @param scatter A mapping from variable index to slot index in this HessianFactor
|
||||||
|
* @param info The information matrix to be updated
|
||||||
|
*/
|
||||||
|
virtual void updateHessian(const FastVector<Key>& keys,
|
||||||
|
SymmetricBlockMatrix* info) const = 0;
|
||||||
|
|
||||||
/// y += alpha * A'*A*x
|
/// y += alpha * A'*A*x
|
||||||
virtual void multiplyHessianAdd(double alpha, const VectorValues& x, VectorValues& y) const = 0;
|
virtual void multiplyHessianAdd(double alpha, const VectorValues& x, VectorValues& y) const = 0;
|
||||||
|
|
||||||
|
@ -131,6 +141,12 @@ namespace gtsam {
|
||||||
/// Gradient wrt a key at any values
|
/// Gradient wrt a key at any values
|
||||||
virtual Vector gradient(Key key, const VectorValues& x) const = 0;
|
virtual Vector gradient(Key key, const VectorValues& x) const = 0;
|
||||||
|
|
||||||
|
// Determine position of a given key
|
||||||
|
template <typename CONTAINER>
|
||||||
|
static DenseIndex Slot(const CONTAINER& keys, Key key) {
|
||||||
|
return std::find(keys.begin(), keys.end(), key) - keys.begin();
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
/** Serialization function */
|
/** Serialization function */
|
||||||
friend class boost::serialization::access;
|
friend class boost::serialization::access;
|
||||||
|
@ -140,7 +156,7 @@ namespace gtsam {
|
||||||
}
|
}
|
||||||
|
|
||||||
}; // GaussianFactor
|
}; // GaussianFactor
|
||||||
|
|
||||||
/// traits
|
/// traits
|
||||||
template<>
|
template<>
|
||||||
struct traits<GaussianFactor> : public Testable<GaussianFactor> {
|
struct traits<GaussianFactor> : public Testable<GaussianFactor> {
|
||||||
|
|
|
@ -15,17 +15,17 @@
|
||||||
* @date Dec 8, 2010
|
* @date Dec 8, 2010
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <gtsam/base/debug.h>
|
|
||||||
#include <gtsam/base/timing.h>
|
|
||||||
#include <gtsam/base/Matrix.h>
|
|
||||||
#include <gtsam/base/FastMap.h>
|
|
||||||
#include <gtsam/base/cholesky.h>
|
|
||||||
#include <gtsam/linear/linearExceptions.h>
|
#include <gtsam/linear/linearExceptions.h>
|
||||||
#include <gtsam/linear/GaussianConditional.h>
|
#include <gtsam/linear/GaussianConditional.h>
|
||||||
#include <gtsam/linear/GaussianFactor.h>
|
#include <gtsam/linear/GaussianFactor.h>
|
||||||
#include <gtsam/linear/HessianFactor.h>
|
#include <gtsam/linear/HessianFactor.h>
|
||||||
#include <gtsam/linear/JacobianFactor.h>
|
#include <gtsam/linear/JacobianFactor.h>
|
||||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||||
|
#include <gtsam/base/debug.h>
|
||||||
|
#include <gtsam/base/timing.h>
|
||||||
|
#include <gtsam/base/Matrix.h>
|
||||||
|
#include <gtsam/base/FastMap.h>
|
||||||
|
#include <gtsam/base/cholesky.h>
|
||||||
|
|
||||||
#include <boost/foreach.hpp>
|
#include <boost/foreach.hpp>
|
||||||
#include <boost/format.hpp>
|
#include <boost/format.hpp>
|
||||||
|
@ -49,232 +49,185 @@
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using namespace boost::assign;
|
using namespace boost::assign;
|
||||||
namespace br { using namespace boost::range; using namespace boost::adaptors; }
|
namespace br {
|
||||||
|
using namespace boost::range;
|
||||||
|
using namespace boost::adaptors;
|
||||||
|
}
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
string SlotEntry::toString() const {
|
|
||||||
ostringstream oss;
|
|
||||||
oss << "SlotEntry: slot=" << slot << ", dim=" << dimension;
|
|
||||||
return oss.str();
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
Scatter::Scatter(const GaussianFactorGraph& gfg,
|
|
||||||
boost::optional<const Ordering&> ordering) {
|
|
||||||
gttic(Scatter_Constructor);
|
|
||||||
static const size_t none = std::numeric_limits<size_t>::max();
|
|
||||||
|
|
||||||
// First do the set union.
|
|
||||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, gfg) {
|
|
||||||
if (factor) {
|
|
||||||
for (GaussianFactor::const_iterator variable = factor->begin();
|
|
||||||
variable != factor->end(); ++variable) {
|
|
||||||
// TODO: Fix this hack to cope with zero-row Jacobians that come from BayesTreeOrphanWrappers
|
|
||||||
const JacobianFactor* asJacobian =
|
|
||||||
dynamic_cast<const JacobianFactor*>(factor.get());
|
|
||||||
if (!asJacobian || asJacobian->cols() > 1)
|
|
||||||
this->insert(
|
|
||||||
make_pair(*variable, SlotEntry(none, factor->getDim(variable))));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we have an ordering, pre-fill the ordered variables first
|
|
||||||
size_t slot = 0;
|
|
||||||
if (ordering) {
|
|
||||||
BOOST_FOREACH(Key key, *ordering) {
|
|
||||||
const_iterator entry = find(key);
|
|
||||||
if (entry == end())
|
|
||||||
throw std::invalid_argument(
|
|
||||||
"The ordering provided to the HessianFactor Scatter constructor\n"
|
|
||||||
"contained extra variables that did not appear in the factors to combine.");
|
|
||||||
at(key).slot = (slot++);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Next fill in the slot indices (we can only get these after doing the set
|
|
||||||
// union.
|
|
||||||
BOOST_FOREACH(value_type& var_slot, *this) {
|
|
||||||
if (var_slot.second.slot == none)
|
|
||||||
var_slot.second.slot = (slot++);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
HessianFactor::HessianFactor() :
|
HessianFactor::HessianFactor() :
|
||||||
info_(cref_list_of<1>(1))
|
info_(cref_list_of<1>(1)) {
|
||||||
{
|
|
||||||
linearTerm().setZero();
|
linearTerm().setZero();
|
||||||
constantTerm() = 0.0;
|
constantTerm() = 0.0;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
HessianFactor::HessianFactor(Key j, const Matrix& G, const Vector& g, double f) :
|
HessianFactor::HessianFactor(Key j, const Matrix& G, const Vector& g, double f) :
|
||||||
GaussianFactor(cref_list_of<1>(j)), info_(cref_list_of<2>(G.cols())(1))
|
GaussianFactor(cref_list_of<1>(j)), info_(cref_list_of<2>(G.cols())(1)) {
|
||||||
{
|
if (G.rows() != G.cols() || G.rows() != g.size())
|
||||||
if(G.rows() != G.cols() || G.rows() != g.size()) throw invalid_argument(
|
throw invalid_argument(
|
||||||
"Attempting to construct HessianFactor with inconsistent matrix and/or vector dimensions");
|
"Attempting to construct HessianFactor with inconsistent matrix and/or vector dimensions");
|
||||||
info_(0,0) = G;
|
info_(0, 0) = G;
|
||||||
info_(0,1) = g;
|
info_(0, 1) = g;
|
||||||
info_(1,1)(0,0) = f;
|
info_(1, 1)(0, 0) = f;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
// error is 0.5*(x-mu)'*inv(Sigma)*(x-mu) = 0.5*(x'*G*x - 2*x'*G*mu + mu'*G*mu)
|
// error is 0.5*(x-mu)'*inv(Sigma)*(x-mu) = 0.5*(x'*G*x - 2*x'*G*mu + mu'*G*mu)
|
||||||
// where G = inv(Sigma), g = G*mu, f = mu'*G*mu = mu'*g
|
// where G = inv(Sigma), g = G*mu, f = mu'*G*mu = mu'*g
|
||||||
HessianFactor::HessianFactor(Key j, const Vector& mu, const Matrix& Sigma) :
|
HessianFactor::HessianFactor(Key j, const Vector& mu, const Matrix& Sigma) :
|
||||||
GaussianFactor(cref_list_of<1>(j)),
|
GaussianFactor(cref_list_of<1>(j)), info_(cref_list_of<2>(Sigma.cols())(1)) {
|
||||||
info_(cref_list_of<2> (Sigma.cols()) (1) )
|
if (Sigma.rows() != Sigma.cols() || Sigma.rows() != mu.size())
|
||||||
{
|
throw invalid_argument(
|
||||||
if (Sigma.rows() != Sigma.cols() || Sigma.rows() != mu.size()) throw invalid_argument(
|
"Attempting to construct HessianFactor with inconsistent matrix and/or vector dimensions");
|
||||||
"Attempting to construct HessianFactor with inconsistent matrix and/or vector dimensions");
|
info_(0, 0) = Sigma.inverse(); // G
|
||||||
info_(0,0) = Sigma.inverse(); // G
|
info_(0, 1) = info_(0, 0).selfadjointView() * mu; // g
|
||||||
info_(0,1) = info_(0,0).selfadjointView() * mu; // g
|
info_(1, 1)(0, 0) = mu.dot(info_(0, 1).knownOffDiagonal().col(0)); // f
|
||||||
info_(1,1)(0,0) = mu.dot(info_(0,1).knownOffDiagonal().col(0)); // f
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
HessianFactor::HessianFactor(Key j1, Key j2,
|
HessianFactor::HessianFactor(Key j1, Key j2, const Matrix& G11,
|
||||||
const Matrix& G11, const Matrix& G12, const Vector& g1,
|
const Matrix& G12, const Vector& g1, const Matrix& G22, const Vector& g2,
|
||||||
const Matrix& G22, const Vector& g2, double f) :
|
double f) :
|
||||||
GaussianFactor(cref_list_of<2>(j1)(j2)),
|
GaussianFactor(cref_list_of<2>(j1)(j2)), info_(
|
||||||
info_(cref_list_of<3> (G11.cols()) (G22.cols()) (1) )
|
cref_list_of<3>(G11.cols())(G22.cols())(1)) {
|
||||||
{
|
info_(0, 0) = G11;
|
||||||
info_(0,0) = G11;
|
info_(0, 1) = G12;
|
||||||
info_(0,1) = G12;
|
info_(0, 2) = g1;
|
||||||
info_(0,2) = g1;
|
info_(1, 1) = G22;
|
||||||
info_(1,1) = G22;
|
info_(1, 2) = g2;
|
||||||
info_(1,2) = g2;
|
info_(2, 2)(0, 0) = f;
|
||||||
info_(2,2)(0,0) = f;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
HessianFactor::HessianFactor(Key j1, Key j2, Key j3,
|
HessianFactor::HessianFactor(Key j1, Key j2, Key j3, const Matrix& G11,
|
||||||
const Matrix& G11, const Matrix& G12, const Matrix& G13, const Vector& g1,
|
const Matrix& G12, const Matrix& G13, const Vector& g1, const Matrix& G22,
|
||||||
const Matrix& G22, const Matrix& G23, const Vector& g2,
|
const Matrix& G23, const Vector& g2, const Matrix& G33, const Vector& g3,
|
||||||
const Matrix& G33, const Vector& g3, double f) :
|
double f) :
|
||||||
GaussianFactor(cref_list_of<3>(j1)(j2)(j3)),
|
GaussianFactor(cref_list_of<3>(j1)(j2)(j3)), info_(
|
||||||
info_(cref_list_of<4> (G11.cols()) (G22.cols()) (G33.cols()) (1) )
|
cref_list_of<4>(G11.cols())(G22.cols())(G33.cols())(1)) {
|
||||||
{
|
if (G11.rows() != G11.cols() || G11.rows() != G12.rows()
|
||||||
if(G11.rows() != G11.cols() || G11.rows() != G12.rows() || G11.rows() != G13.rows() || G11.rows() != g1.size() ||
|
|| G11.rows() != G13.rows() || G11.rows() != g1.size()
|
||||||
G22.cols() != G12.cols() || G33.cols() != G13.cols() || G22.cols() != g2.size() || G33.cols() != g3.size())
|
|| G22.cols() != G12.cols() || G33.cols() != G13.cols()
|
||||||
throw invalid_argument("Inconsistent matrix and/or vector dimensions in HessianFactor constructor");
|
|| G22.cols() != g2.size() || G33.cols() != g3.size())
|
||||||
info_(0,0) = G11;
|
throw invalid_argument(
|
||||||
info_(0,1) = G12;
|
"Inconsistent matrix and/or vector dimensions in HessianFactor constructor");
|
||||||
info_(0,2) = G13;
|
info_(0, 0) = G11;
|
||||||
info_(0,3) = g1;
|
info_(0, 1) = G12;
|
||||||
info_(1,1) = G22;
|
info_(0, 2) = G13;
|
||||||
info_(1,2) = G23;
|
info_(0, 3) = g1;
|
||||||
info_(1,3) = g2;
|
info_(1, 1) = G22;
|
||||||
info_(2,2) = G33;
|
info_(1, 2) = G23;
|
||||||
info_(2,3) = g3;
|
info_(1, 3) = g2;
|
||||||
info_(3,3)(0,0) = f;
|
info_(2, 2) = G33;
|
||||||
|
info_(2, 3) = g3;
|
||||||
|
info_(3, 3)(0, 0) = f;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
namespace {
|
namespace {
|
||||||
DenseIndex _getSizeHF(const Vector& m) { return m.size(); }
|
DenseIndex _getSizeHF(const Vector& m) {
|
||||||
|
return m.size();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
HessianFactor::HessianFactor(const std::vector<Key>& js, const std::vector<Matrix>& Gs,
|
HessianFactor::HessianFactor(const std::vector<Key>& js,
|
||||||
const std::vector<Vector>& gs, double f) :
|
const std::vector<Matrix>& Gs, const std::vector<Vector>& gs, double f) :
|
||||||
GaussianFactor(js), info_(gs | br::transformed(&_getSizeHF), true)
|
GaussianFactor(js), info_(gs | br::transformed(&_getSizeHF), true) {
|
||||||
{
|
|
||||||
// Get the number of variables
|
// Get the number of variables
|
||||||
size_t variable_count = js.size();
|
size_t variable_count = js.size();
|
||||||
|
|
||||||
// Verify the provided number of entries in the vectors are consistent
|
// Verify the provided number of entries in the vectors are consistent
|
||||||
if(gs.size() != variable_count || Gs.size() != (variable_count*(variable_count+1))/2)
|
if (gs.size() != variable_count
|
||||||
throw invalid_argument("Inconsistent number of entries between js, Gs, and gs in HessianFactor constructor.\nThe number of keys provided \
|
|| Gs.size() != (variable_count * (variable_count + 1)) / 2)
|
||||||
|
throw invalid_argument(
|
||||||
|
"Inconsistent number of entries between js, Gs, and gs in HessianFactor constructor.\nThe number of keys provided \
|
||||||
in js must match the number of linear vector pieces in gs. The number of upper-diagonal blocks in Gs must be n*(n+1)/2");
|
in js must match the number of linear vector pieces in gs. The number of upper-diagonal blocks in Gs must be n*(n+1)/2");
|
||||||
|
|
||||||
// Verify the dimensions of each provided matrix are consistent
|
// Verify the dimensions of each provided matrix are consistent
|
||||||
// Note: equations for calculating the indices derived from the "sum of an arithmetic sequence" formula
|
// Note: equations for calculating the indices derived from the "sum of an arithmetic sequence" formula
|
||||||
for(size_t i = 0; i < variable_count; ++i){
|
for (size_t i = 0; i < variable_count; ++i) {
|
||||||
DenseIndex block_size = gs[i].size();
|
DenseIndex block_size = gs[i].size();
|
||||||
// Check rows
|
// Check rows
|
||||||
for(size_t j = 0; j < variable_count-i; ++j){
|
for (size_t j = 0; j < variable_count - i; ++j) {
|
||||||
size_t index = i*(2*variable_count - i + 1)/2 + j;
|
size_t index = i * (2 * variable_count - i + 1) / 2 + j;
|
||||||
if(Gs[index].rows() != block_size){
|
if (Gs[index].rows() != block_size) {
|
||||||
throw invalid_argument("Inconsistent matrix and/or vector dimensions in HessianFactor constructor");
|
throw invalid_argument(
|
||||||
|
"Inconsistent matrix and/or vector dimensions in HessianFactor constructor");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Check cols
|
// Check cols
|
||||||
for(size_t j = 0; j <= i; ++j){
|
for (size_t j = 0; j <= i; ++j) {
|
||||||
size_t index = j*(2*variable_count - j + 1)/2 + (i-j);
|
size_t index = j * (2 * variable_count - j + 1) / 2 + (i - j);
|
||||||
if(Gs[index].cols() != block_size){
|
if (Gs[index].cols() != block_size) {
|
||||||
throw invalid_argument("Inconsistent matrix and/or vector dimensions in HessianFactor constructor");
|
throw invalid_argument(
|
||||||
|
"Inconsistent matrix and/or vector dimensions in HessianFactor constructor");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fill in the blocks
|
// Fill in the blocks
|
||||||
size_t index = 0;
|
size_t index = 0;
|
||||||
for(size_t i = 0; i < variable_count; ++i){
|
for (size_t i = 0; i < variable_count; ++i) {
|
||||||
for(size_t j = i; j < variable_count; ++j){
|
for (size_t j = i; j < variable_count; ++j) {
|
||||||
info_(i, j) = Gs[index++];
|
info_(i, j) = Gs[index++];
|
||||||
}
|
}
|
||||||
info_(i, variable_count) = gs[i];
|
info_(i, variable_count) = gs[i];
|
||||||
}
|
}
|
||||||
info_(variable_count, variable_count)(0,0) = f;
|
info_(variable_count, variable_count)(0, 0) = f;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
namespace {
|
namespace {
|
||||||
void _FromJacobianHelper(const JacobianFactor& jf, SymmetricBlockMatrix& info)
|
void _FromJacobianHelper(const JacobianFactor& jf, SymmetricBlockMatrix& info) {
|
||||||
{
|
|
||||||
gttic(HessianFactor_fromJacobian);
|
gttic(HessianFactor_fromJacobian);
|
||||||
const SharedDiagonal& jfModel = jf.get_model();
|
const SharedDiagonal& jfModel = jf.get_model();
|
||||||
if(jfModel)
|
if (jfModel) {
|
||||||
{
|
if (jf.get_model()->isConstrained())
|
||||||
if(jf.get_model()->isConstrained())
|
throw invalid_argument(
|
||||||
throw invalid_argument("Cannot construct HessianFactor from JacobianFactor with constrained noise model");
|
"Cannot construct HessianFactor from JacobianFactor with constrained noise model");
|
||||||
info.full().triangularView() = jf.matrixObject().full().transpose() *
|
info.full().triangularView() =
|
||||||
(jfModel->invsigmas().array() * jfModel->invsigmas().array()).matrix().asDiagonal() *
|
jf.matrixObject().full().transpose()
|
||||||
jf.matrixObject().full();
|
* (jfModel->invsigmas().array() * jfModel->invsigmas().array()).matrix().asDiagonal()
|
||||||
|
* jf.matrixObject().full();
|
||||||
} else {
|
} else {
|
||||||
info.full().triangularView() = jf.matrixObject().full().transpose() * jf.matrixObject().full();
|
info.full().triangularView() = jf.matrixObject().full().transpose()
|
||||||
|
* jf.matrixObject().full();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
HessianFactor::HessianFactor(const JacobianFactor& jf) :
|
HessianFactor::HessianFactor(const JacobianFactor& jf) :
|
||||||
GaussianFactor(jf), info_(SymmetricBlockMatrix::LikeActiveViewOf(jf.matrixObject()))
|
GaussianFactor(jf), info_(
|
||||||
{
|
SymmetricBlockMatrix::LikeActiveViewOf(jf.matrixObject())) {
|
||||||
_FromJacobianHelper(jf, info_);
|
_FromJacobianHelper(jf, info_);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
HessianFactor::HessianFactor(const GaussianFactor& gf) :
|
HessianFactor::HessianFactor(const GaussianFactor& gf) :
|
||||||
GaussianFactor(gf)
|
GaussianFactor(gf) {
|
||||||
{
|
|
||||||
// Copy the matrix data depending on what type of factor we're copying from
|
// Copy the matrix data depending on what type of factor we're copying from
|
||||||
if(const JacobianFactor* jf = dynamic_cast<const JacobianFactor*>(&gf))
|
if (const JacobianFactor* jf = dynamic_cast<const JacobianFactor*>(&gf)) {
|
||||||
{
|
|
||||||
info_ = SymmetricBlockMatrix::LikeActiveViewOf(jf->matrixObject());
|
info_ = SymmetricBlockMatrix::LikeActiveViewOf(jf->matrixObject());
|
||||||
_FromJacobianHelper(*jf, info_);
|
_FromJacobianHelper(*jf, info_);
|
||||||
}
|
} else if (const HessianFactor* hf = dynamic_cast<const HessianFactor*>(&gf)) {
|
||||||
else if(const HessianFactor* hf = dynamic_cast<const HessianFactor*>(&gf))
|
|
||||||
{
|
|
||||||
info_ = hf->info_;
|
info_ = hf->info_;
|
||||||
}
|
} else {
|
||||||
else
|
throw std::invalid_argument(
|
||||||
{
|
"In HessianFactor(const GaussianFactor& gf), gf is neither a JacobianFactor nor a HessianFactor");
|
||||||
throw std::invalid_argument("In HessianFactor(const GaussianFactor& gf), gf is neither a JacobianFactor nor a HessianFactor");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
HessianFactor::HessianFactor(const GaussianFactorGraph& factors,
|
HessianFactor::HessianFactor(const GaussianFactorGraph& factors,
|
||||||
boost::optional<const Scatter&> scatter)
|
boost::optional<const Scatter&> scatter) {
|
||||||
{
|
|
||||||
gttic(HessianFactor_MergeConstructor);
|
gttic(HessianFactor_MergeConstructor);
|
||||||
boost::optional<Scatter> computedScatter;
|
boost::optional<Scatter> computedScatter;
|
||||||
if(!scatter) {
|
if (!scatter) {
|
||||||
computedScatter = Scatter(factors);
|
computedScatter = Scatter(factors);
|
||||||
scatter = computedScatter;
|
scatter = computedScatter;
|
||||||
}
|
}
|
||||||
|
@ -296,64 +249,57 @@ HessianFactor::HessianFactor(const GaussianFactorGraph& factors,
|
||||||
// Form A' * A
|
// Form A' * A
|
||||||
gttic(update);
|
gttic(update);
|
||||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors)
|
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors)
|
||||||
{
|
if (factor)
|
||||||
if(factor) {
|
factor->updateHessian(keys_, &info_);
|
||||||
if(const HessianFactor* hessian = dynamic_cast<const HessianFactor*>(factor.get()))
|
|
||||||
updateATA(*hessian, *scatter);
|
|
||||||
else if(const JacobianFactor* jacobian = dynamic_cast<const JacobianFactor*>(factor.get()))
|
|
||||||
updateATA(*jacobian, *scatter);
|
|
||||||
else
|
|
||||||
throw invalid_argument("GaussianFactor is neither Hessian nor Jacobian");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
gttoc(update);
|
gttoc(update);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
void HessianFactor::print(const std::string& s, const KeyFormatter& formatter) const {
|
void HessianFactor::print(const std::string& s,
|
||||||
|
const KeyFormatter& formatter) const {
|
||||||
cout << s << "\n";
|
cout << s << "\n";
|
||||||
cout << " keys: ";
|
cout << " keys: ";
|
||||||
for(const_iterator key=this->begin(); key!=this->end(); ++key)
|
for (const_iterator key = begin(); key != end(); ++key)
|
||||||
cout << formatter(*key) << "(" << this->getDim(key) << ") ";
|
cout << formatter(*key) << "(" << getDim(key) << ") ";
|
||||||
cout << "\n";
|
cout << "\n";
|
||||||
gtsam::print(Matrix(info_.full().selfadjointView()), "Augmented information matrix: ");
|
gtsam::print(Matrix(info_.full().selfadjointView()),
|
||||||
|
"Augmented information matrix: ");
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
bool HessianFactor::equals(const GaussianFactor& lf, double tol) const {
|
bool HessianFactor::equals(const GaussianFactor& lf, double tol) const {
|
||||||
if(!dynamic_cast<const HessianFactor*>(&lf))
|
if (!dynamic_cast<const HessianFactor*>(&lf))
|
||||||
return false;
|
return false;
|
||||||
else {
|
else {
|
||||||
if(!Factor::equals(lf, tol))
|
if (!Factor::equals(lf, tol))
|
||||||
return false;
|
return false;
|
||||||
Matrix thisMatrix = this->info_.full().selfadjointView();
|
Matrix thisMatrix = info_.full().selfadjointView();
|
||||||
thisMatrix(thisMatrix.rows()-1, thisMatrix.cols()-1) = 0.0;
|
thisMatrix(thisMatrix.rows() - 1, thisMatrix.cols() - 1) = 0.0;
|
||||||
Matrix rhsMatrix = static_cast<const HessianFactor&>(lf).info_.full().selfadjointView();
|
Matrix rhsMatrix =
|
||||||
rhsMatrix(rhsMatrix.rows()-1, rhsMatrix.cols()-1) = 0.0;
|
static_cast<const HessianFactor&>(lf).info_.full().selfadjointView();
|
||||||
|
rhsMatrix(rhsMatrix.rows() - 1, rhsMatrix.cols() - 1) = 0.0;
|
||||||
return equal_with_abs_tol(thisMatrix, rhsMatrix, tol);
|
return equal_with_abs_tol(thisMatrix, rhsMatrix, tol);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Matrix HessianFactor::augmentedInformation() const
|
Matrix HessianFactor::augmentedInformation() const {
|
||||||
{
|
|
||||||
return info_.full().selfadjointView();
|
return info_.full().selfadjointView();
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Matrix HessianFactor::information() const
|
Matrix HessianFactor::information() const {
|
||||||
{
|
return info_.range(0, size(), 0, size()).selfadjointView();
|
||||||
return info_.range(0, this->size(), 0, this->size()).selfadjointView();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
VectorValues HessianFactor::hessianDiagonal() const {
|
VectorValues HessianFactor::hessianDiagonal() const {
|
||||||
VectorValues d;
|
VectorValues d;
|
||||||
// Loop over all variables
|
// Loop over all variables
|
||||||
for (DenseIndex j = 0; j < (DenseIndex)size(); ++j) {
|
for (DenseIndex j = 0; j < (DenseIndex) size(); ++j) {
|
||||||
// Get the diagonal block, and insert its diagonal
|
// Get the diagonal block, and insert its diagonal
|
||||||
Matrix B = info_(j, j).selfadjointView();
|
Matrix B = info_(j, j).selfadjointView();
|
||||||
d.insert(keys_[j],B.diagonal());
|
d.insert(keys_[j], B.diagonal());
|
||||||
}
|
}
|
||||||
return d;
|
return d;
|
||||||
}
|
}
|
||||||
|
@ -366,26 +312,24 @@ void HessianFactor::hessianDiagonal(double* d) const {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
map<Key,Matrix> HessianFactor::hessianBlockDiagonal() const {
|
map<Key, Matrix> HessianFactor::hessianBlockDiagonal() const {
|
||||||
map<Key,Matrix> blocks;
|
map<Key, Matrix> blocks;
|
||||||
// Loop over all variables
|
// Loop over all variables
|
||||||
for (DenseIndex j = 0; j < (DenseIndex)size(); ++j) {
|
for (DenseIndex j = 0; j < (DenseIndex) size(); ++j) {
|
||||||
// Get the diagonal block, and insert it
|
// Get the diagonal block, and insert it
|
||||||
Matrix B = info_(j, j).selfadjointView();
|
Matrix B = info_(j, j).selfadjointView();
|
||||||
blocks.insert(make_pair(keys_[j],B));
|
blocks.insert(make_pair(keys_[j], B));
|
||||||
}
|
}
|
||||||
return blocks;
|
return blocks;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Matrix HessianFactor::augmentedJacobian() const
|
Matrix HessianFactor::augmentedJacobian() const {
|
||||||
{
|
|
||||||
return JacobianFactor(*this).augmentedJacobian();
|
return JacobianFactor(*this).augmentedJacobian();
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
std::pair<Matrix, Vector> HessianFactor::jacobian() const
|
std::pair<Matrix, Vector> HessianFactor::jacobian() const {
|
||||||
{
|
|
||||||
return JacobianFactor(*this).jacobian();
|
return JacobianFactor(*this).jacobian();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -396,103 +340,34 @@ double HessianFactor::error(const VectorValues& c) const {
|
||||||
double xtg = 0, xGx = 0;
|
double xtg = 0, xGx = 0;
|
||||||
// extract the relevant subset of the VectorValues
|
// extract the relevant subset of the VectorValues
|
||||||
// NOTE may not be as efficient
|
// NOTE may not be as efficient
|
||||||
const Vector x = c.vector(this->keys());
|
const Vector x = c.vector(keys());
|
||||||
xtg = x.dot(linearTerm());
|
xtg = x.dot(linearTerm());
|
||||||
xGx = x.transpose() * info_.range(0, this->size(), 0, this->size()).selfadjointView() * x;
|
xGx = x.transpose() * info_.range(0, size(), 0, size()).selfadjointView() * x;
|
||||||
return 0.5 * (f - 2.0 * xtg + xGx);
|
return 0.5 * (f - 2.0 * xtg + xGx);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
void HessianFactor::updateATA(const HessianFactor& update, const Scatter& scatter)
|
void HessianFactor::updateHessian(const FastVector<Key>& infoKeys,
|
||||||
{
|
SymmetricBlockMatrix* info) const {
|
||||||
gttic(updateATA);
|
gttic(updateHessian_HessianFactor);
|
||||||
// This function updates 'combined' with the information in 'update'. 'scatter' maps variables in
|
|
||||||
// the update factor to slots in the combined factor.
|
|
||||||
|
|
||||||
// First build an array of slots
|
|
||||||
gttic(slots);
|
|
||||||
FastVector<DenseIndex> slots(update.size());
|
|
||||||
DenseIndex slot = 0;
|
|
||||||
BOOST_FOREACH(Key j, update) {
|
|
||||||
slots[slot] = scatter.at(j).slot;
|
|
||||||
++ slot;
|
|
||||||
}
|
|
||||||
gttoc(slots);
|
|
||||||
|
|
||||||
// Apply updates to the upper triangle
|
// Apply updates to the upper triangle
|
||||||
gttic(update);
|
DenseIndex n = size(), N = info->nBlocks() - 1;
|
||||||
size_t nrInfoBlocks = this->info_.nBlocks();
|
vector<DenseIndex> slots(n + 1);
|
||||||
for(DenseIndex j2=0; j2<update.info_.nBlocks(); ++j2) {
|
for (DenseIndex j = 0; j <= n; ++j) {
|
||||||
DenseIndex slot2 = (j2 == (DenseIndex)update.size()) ? nrInfoBlocks-1 : slots[j2];
|
const DenseIndex J = (j == n) ? N : Slot(infoKeys, keys_[j]);
|
||||||
for(DenseIndex j1=0; j1<=j2; ++j1) {
|
slots[j] = J;
|
||||||
DenseIndex slot1 = (j1 == (DenseIndex)update.size()) ? nrInfoBlocks-1 : slots[j1];
|
for (DenseIndex i = 0; i <= j; ++i) {
|
||||||
info_(slot1, slot2) += update.info_(j1, j2);
|
const DenseIndex I = slots[i]; // because i<=j, slots[i] is valid.
|
||||||
|
(*info)(I, J) += info_(i, j);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
gttoc(update);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
void HessianFactor::updateATA(const JacobianFactor& update,
|
|
||||||
const Scatter& scatter) {
|
|
||||||
|
|
||||||
// This function updates 'combined' with the information in 'update'.
|
|
||||||
// 'scatter' maps variables in the update factor to slots in the combined
|
|
||||||
// factor.
|
|
||||||
|
|
||||||
gttic(updateATA);
|
|
||||||
|
|
||||||
if (update.rows() > 0) {
|
|
||||||
gttic(whiten);
|
|
||||||
// Whiten the factor if it has a noise model
|
|
||||||
boost::optional<JacobianFactor> _whitenedFactor;
|
|
||||||
const JacobianFactor* whitenedFactor = &update;
|
|
||||||
if (update.get_model()) {
|
|
||||||
if (update.get_model()->isConstrained())
|
|
||||||
throw invalid_argument(
|
|
||||||
"Cannot update HessianFactor from JacobianFactor with constrained noise model");
|
|
||||||
_whitenedFactor = update.whiten();
|
|
||||||
whitenedFactor = &(*_whitenedFactor);
|
|
||||||
}
|
|
||||||
gttoc(whiten);
|
|
||||||
|
|
||||||
// A is the whitened Jacobian matrix A, and we are going to perform I += A'*A below
|
|
||||||
const VerticalBlockMatrix& A = whitenedFactor->matrixObject();
|
|
||||||
|
|
||||||
// N is number of variables in HessianFactor, n in JacobianFactor
|
|
||||||
DenseIndex N = this->info_.nBlocks() - 1, n = A.nBlocks() - 1;
|
|
||||||
|
|
||||||
// First build an array of slots
|
|
||||||
gttic(slots);
|
|
||||||
FastVector<DenseIndex> slots(n + 1);
|
|
||||||
DenseIndex slot = 0;
|
|
||||||
BOOST_FOREACH(Key j, update)
|
|
||||||
slots[slot++] = scatter.at(j).slot;
|
|
||||||
slots[n] = N;
|
|
||||||
gttoc(slots);
|
|
||||||
|
|
||||||
// Apply updates to the upper triangle
|
|
||||||
gttic(update);
|
|
||||||
// Loop over blocks of A, including RHS with j==n
|
|
||||||
for (DenseIndex j = 0; j <= n; ++j) {
|
|
||||||
DenseIndex J = slots[j]; // get block in Hessian
|
|
||||||
// Fill off-diagonal blocks with Ai'*Aj
|
|
||||||
for (DenseIndex i = 0; i < j; ++i) {
|
|
||||||
DenseIndex I = slots[i]; // get block in Hessian
|
|
||||||
info_(I, J).knownOffDiagonal() += A(i).transpose() * A(j);
|
|
||||||
}
|
|
||||||
// Fill diagonal block with Aj'*Aj
|
|
||||||
info_(J, J).selfadjointView().rankUpdate(A(j).transpose());
|
|
||||||
}
|
|
||||||
gttoc(update);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
GaussianFactor::shared_ptr HessianFactor::negate() const
|
GaussianFactor::shared_ptr HessianFactor::negate() const {
|
||||||
{
|
|
||||||
shared_ptr result = boost::make_shared<This>(*this);
|
shared_ptr result = boost::make_shared<This>(*this);
|
||||||
result->info_.full().triangularView() = -result->info_.full().triangularView().nestedExpression(); // Negate the information matrix of the result
|
result->info_.full().triangularView() =
|
||||||
|
-result->info_.full().triangularView().nestedExpression(); // Negate the information matrix of the result
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -509,7 +384,7 @@ void HessianFactor::multiplyHessianAdd(double alpha, const VectorValues& x,
|
||||||
// Accessing the VectorValues one by one is expensive
|
// Accessing the VectorValues one by one is expensive
|
||||||
// So we will loop over columns to access x only once per column
|
// So we will loop over columns to access x only once per column
|
||||||
// And fill the above temporary y values, to be added into yvalues after
|
// And fill the above temporary y values, to be added into yvalues after
|
||||||
for (DenseIndex j = 0; j < (DenseIndex)size(); ++j) {
|
for (DenseIndex j = 0; j < (DenseIndex) size(); ++j) {
|
||||||
// xj is the input vector
|
// xj is the input vector
|
||||||
Vector xj = x.at(keys_[j]);
|
Vector xj = x.at(keys_[j]);
|
||||||
DenseIndex i = 0;
|
DenseIndex i = 0;
|
||||||
|
@ -518,13 +393,13 @@ void HessianFactor::multiplyHessianAdd(double alpha, const VectorValues& x,
|
||||||
// blocks on the diagonal are only half
|
// blocks on the diagonal are only half
|
||||||
y[i] += info_(j, j).selfadjointView() * xj;
|
y[i] += info_(j, j).selfadjointView() * xj;
|
||||||
// for below diagonal, we take transpose block from upper triangular part
|
// for below diagonal, we take transpose block from upper triangular part
|
||||||
for (i = j + 1; i < (DenseIndex)size(); ++i)
|
for (i = j + 1; i < (DenseIndex) size(); ++i)
|
||||||
y[i] += info_(i, j).knownOffDiagonal() * xj;
|
y[i] += info_(i, j).knownOffDiagonal() * xj;
|
||||||
}
|
}
|
||||||
|
|
||||||
// copy to yvalues
|
// copy to yvalues
|
||||||
for(DenseIndex i = 0; i < (DenseIndex)size(); ++i) {
|
for (DenseIndex i = 0; i < (DenseIndex) size(); ++i) {
|
||||||
bool didNotExist;
|
bool didNotExist;
|
||||||
VectorValues::iterator it;
|
VectorValues::iterator it;
|
||||||
boost::tie(it, didNotExist) = yvalues.tryInsert(keys_[i], Vector());
|
boost::tie(it, didNotExist) = yvalues.tryInsert(keys_[i], Vector());
|
||||||
if (didNotExist)
|
if (didNotExist)
|
||||||
|
@ -539,7 +414,7 @@ VectorValues HessianFactor::gradientAtZero() const {
|
||||||
VectorValues g;
|
VectorValues g;
|
||||||
size_t n = size();
|
size_t n = size();
|
||||||
for (size_t j = 0; j < n; ++j)
|
for (size_t j = 0; j < n; ++j)
|
||||||
g.insert(keys_[j], -info_(j,n).knownOffDiagonal());
|
g.insert(keys_[j], -info_(j, n).knownOffDiagonal());
|
||||||
return g;
|
return g;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -562,8 +437,7 @@ Vector HessianFactor::gradient(Key key, const VectorValues& x) const {
|
||||||
if (i > j) {
|
if (i > j) {
|
||||||
Matrix Gji = info(j, i);
|
Matrix Gji = info(j, i);
|
||||||
Gij = Gji.transpose();
|
Gij = Gji.transpose();
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
Gij = info(i, j);
|
Gij = info(i, j);
|
||||||
}
|
}
|
||||||
// Accumulate Gij*xj to gradf
|
// Accumulate Gij*xj to gradf
|
||||||
|
@ -575,30 +449,34 @@ Vector HessianFactor::gradient(Key key, const VectorValues& x) const {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
std::pair<boost::shared_ptr<GaussianConditional>, boost::shared_ptr<HessianFactor> >
|
std::pair<boost::shared_ptr<GaussianConditional>,
|
||||||
EliminateCholesky(const GaussianFactorGraph& factors, const Ordering& keys)
|
boost::shared_ptr<HessianFactor> > EliminateCholesky(
|
||||||
{
|
const GaussianFactorGraph& factors, const Ordering& keys) {
|
||||||
gttic(EliminateCholesky);
|
gttic(EliminateCholesky);
|
||||||
|
|
||||||
// Build joint factor
|
// Build joint factor
|
||||||
HessianFactor::shared_ptr jointFactor;
|
HessianFactor::shared_ptr jointFactor;
|
||||||
try {
|
try {
|
||||||
jointFactor = boost::make_shared<HessianFactor>(factors, Scatter(factors, keys));
|
jointFactor = boost::make_shared<HessianFactor>(factors,
|
||||||
} catch(std::invalid_argument&) {
|
Scatter(factors, keys));
|
||||||
|
} catch (std::invalid_argument&) {
|
||||||
throw InvalidDenseElimination(
|
throw InvalidDenseElimination(
|
||||||
"EliminateCholesky was called with a request to eliminate variables that are not\n"
|
"EliminateCholesky was called with a request to eliminate variables that are not\n"
|
||||||
"involved in the provided factors.");
|
"involved in the provided factors.");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Do dense elimination
|
// Do dense elimination
|
||||||
GaussianConditional::shared_ptr conditional;
|
GaussianConditional::shared_ptr conditional;
|
||||||
try {
|
try {
|
||||||
size_t numberOfKeysToEliminate = keys.size();
|
size_t numberOfKeysToEliminate = keys.size();
|
||||||
VerticalBlockMatrix Ab = jointFactor->info_.choleskyPartial(numberOfKeysToEliminate);
|
VerticalBlockMatrix Ab = jointFactor->info_.choleskyPartial(
|
||||||
conditional = boost::make_shared<GaussianConditional>(jointFactor->keys(), numberOfKeysToEliminate, Ab);
|
numberOfKeysToEliminate);
|
||||||
|
conditional = boost::make_shared<GaussianConditional>(jointFactor->keys(),
|
||||||
|
numberOfKeysToEliminate, Ab);
|
||||||
// Erase the eliminated keys in the remaining factor
|
// Erase the eliminated keys in the remaining factor
|
||||||
jointFactor->keys_.erase(jointFactor->begin(), jointFactor->begin() + numberOfKeysToEliminate);
|
jointFactor->keys_.erase(jointFactor->begin(),
|
||||||
} catch(CholeskyFailed&) {
|
jointFactor->begin() + numberOfKeysToEliminate);
|
||||||
|
} catch (CholeskyFailed&) {
|
||||||
throw IndeterminantLinearSystemException(keys.front());
|
throw IndeterminantLinearSystemException(keys.front());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -607,9 +485,9 @@ EliminateCholesky(const GaussianFactorGraph& factors, const Ordering& keys)
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
std::pair<boost::shared_ptr<GaussianConditional>, boost::shared_ptr<GaussianFactor> >
|
std::pair<boost::shared_ptr<GaussianConditional>,
|
||||||
EliminatePreferCholesky(const GaussianFactorGraph& factors, const Ordering& keys)
|
boost::shared_ptr<GaussianFactor> > EliminatePreferCholesky(
|
||||||
{
|
const GaussianFactorGraph& factors, const Ordering& keys) {
|
||||||
gttic(EliminatePreferCholesky);
|
gttic(EliminatePreferCholesky);
|
||||||
|
|
||||||
// If any JacobianFactors have constrained noise models, we have to convert
|
// If any JacobianFactors have constrained noise models, we have to convert
|
||||||
|
|
|
@ -18,10 +18,10 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include <gtsam/linear/GaussianFactor.h>
|
||||||
|
#include <gtsam/linear/Scatter.h>
|
||||||
#include <gtsam/base/SymmetricBlockMatrix.h>
|
#include <gtsam/base/SymmetricBlockMatrix.h>
|
||||||
#include <gtsam/base/FastVector.h>
|
#include <gtsam/base/FastVector.h>
|
||||||
#include <gtsam/base/FastMap.h>
|
|
||||||
#include <gtsam/linear/GaussianFactor.h>
|
|
||||||
|
|
||||||
#include <boost/make_shared.hpp>
|
#include <boost/make_shared.hpp>
|
||||||
|
|
||||||
|
@ -41,30 +41,6 @@ namespace gtsam {
|
||||||
GTSAM_EXPORT std::pair<boost::shared_ptr<GaussianConditional>, boost::shared_ptr<HessianFactor> >
|
GTSAM_EXPORT std::pair<boost::shared_ptr<GaussianConditional>, boost::shared_ptr<HessianFactor> >
|
||||||
EliminateCholesky(const GaussianFactorGraph& factors, const Ordering& keys);
|
EliminateCholesky(const GaussianFactorGraph& factors, const Ordering& keys);
|
||||||
|
|
||||||
/**
|
|
||||||
* One SlotEntry stores the slot index for a variable, as well its dimension.
|
|
||||||
*/
|
|
||||||
struct GTSAM_EXPORT SlotEntry {
|
|
||||||
size_t slot, dimension;
|
|
||||||
SlotEntry(size_t _slot, size_t _dimension)
|
|
||||||
: slot(_slot), dimension(_dimension) {}
|
|
||||||
std::string toString() const;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Scatter is an intermediate data structure used when building a HessianFactor
|
|
||||||
* incrementally, to get the keys in the right order. The "scatter" is a map from
|
|
||||||
* global variable indices to slot indices in the union of involved variables.
|
|
||||||
* We also include the dimensionality of the variable.
|
|
||||||
*/
|
|
||||||
class Scatter: public FastMap<Key, SlotEntry> {
|
|
||||||
public:
|
|
||||||
Scatter() {
|
|
||||||
}
|
|
||||||
Scatter(const GaussianFactorGraph& gfg,
|
|
||||||
boost::optional<const Ordering&> ordering = boost::none);
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief A Gaussian factor using the canonical parameters (information form)
|
* @brief A Gaussian factor using the canonical parameters (information form)
|
||||||
*
|
*
|
||||||
|
@ -363,19 +339,12 @@ namespace gtsam {
|
||||||
/** Return the full augmented Hessian matrix of this factor as a SymmetricBlockMatrix object. */
|
/** Return the full augmented Hessian matrix of this factor as a SymmetricBlockMatrix object. */
|
||||||
const SymmetricBlockMatrix& matrixObject() const { return info_; }
|
const SymmetricBlockMatrix& matrixObject() const { return info_; }
|
||||||
|
|
||||||
/** Update the factor by adding the information from the JacobianFactor
|
/** Update an information matrix by adding the information corresponding to this factor
|
||||||
* (used internally during elimination).
|
* (used internally during elimination).
|
||||||
* @param update The JacobianFactor containing the new information to add
|
|
||||||
* @param scatter A mapping from variable index to slot index in this HessianFactor
|
* @param scatter A mapping from variable index to slot index in this HessianFactor
|
||||||
|
* @param info The information matrix to be updated
|
||||||
*/
|
*/
|
||||||
void updateATA(const JacobianFactor& update, const Scatter& scatter);
|
void updateHessian(const FastVector<Key>& keys, SymmetricBlockMatrix* info) const;
|
||||||
|
|
||||||
/** Update the factor by adding the information from the HessianFactor
|
|
||||||
* (used internally during elimination).
|
|
||||||
* @param update The HessianFactor containing the new information to add
|
|
||||||
* @param scatter A mapping from variable index to slot index in this HessianFactor
|
|
||||||
*/
|
|
||||||
void updateATA(const HessianFactor& update, const Scatter& scatter);
|
|
||||||
|
|
||||||
/** y += alpha * A'*A*x */
|
/** y += alpha * A'*A*x */
|
||||||
void multiplyHessianAdd(double alpha, const VectorValues& x, VectorValues& y) const;
|
void multiplyHessianAdd(double alpha, const VectorValues& x, VectorValues& y) const;
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
#include <gtsam/linear/linearExceptions.h>
|
#include <gtsam/linear/linearExceptions.h>
|
||||||
#include <gtsam/linear/GaussianConditional.h>
|
#include <gtsam/linear/GaussianConditional.h>
|
||||||
#include <gtsam/linear/JacobianFactor.h>
|
#include <gtsam/linear/JacobianFactor.h>
|
||||||
#include <gtsam/linear/HessianFactor.h>
|
#include <gtsam/linear/Scatter.h>
|
||||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||||
#include <gtsam/linear/VectorValues.h>
|
#include <gtsam/linear/VectorValues.h>
|
||||||
#include <gtsam/inference/VariableSlots.h>
|
#include <gtsam/inference/VariableSlots.h>
|
||||||
|
@ -497,6 +497,43 @@ map<Key, Matrix> JacobianFactor::hessianBlockDiagonal() const {
|
||||||
return blocks;
|
return blocks;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
void JacobianFactor::updateHessian(const FastVector<Key>& infoKeys,
|
||||||
|
SymmetricBlockMatrix* info) const {
|
||||||
|
gttic(updateHessian_JacobianFactor);
|
||||||
|
|
||||||
|
if (rows() == 0) return;
|
||||||
|
|
||||||
|
// Whiten the factor if it has a noise model
|
||||||
|
const SharedDiagonal& model = get_model();
|
||||||
|
if (model && !model->isUnit()) {
|
||||||
|
if (model->isConstrained())
|
||||||
|
throw invalid_argument(
|
||||||
|
"JacobianFactor::updateHessian: cannot update information with "
|
||||||
|
"constrained noise model");
|
||||||
|
JacobianFactor whitenedFactor = whiten();
|
||||||
|
whitenedFactor.updateHessian(infoKeys, info);
|
||||||
|
} else {
|
||||||
|
// Ab_ is the augmented Jacobian matrix A, and we perform I += A'*A below
|
||||||
|
DenseIndex n = Ab_.nBlocks() - 1, N = info->nBlocks() - 1;
|
||||||
|
|
||||||
|
// Apply updates to the upper triangle
|
||||||
|
// Loop over blocks of A, including RHS with j==n
|
||||||
|
vector<DenseIndex> slots(n+1);
|
||||||
|
for (DenseIndex j = 0; j <= n; ++j) {
|
||||||
|
const DenseIndex J = (j == n) ? N : Slot(infoKeys, keys_[j]);
|
||||||
|
slots[j] = J;
|
||||||
|
// Fill off-diagonal blocks with Ai'*Aj
|
||||||
|
for (DenseIndex i = 0; i < j; ++i) {
|
||||||
|
const DenseIndex I = slots[i]; // because i<j, slots[i] is valid.
|
||||||
|
(*info)(I, J).knownOffDiagonal() += Ab_(i).transpose() * Ab_(j);
|
||||||
|
}
|
||||||
|
// Fill diagonal block with Aj'*Aj
|
||||||
|
(*info)(J, J).selfadjointView().rankUpdate(Ab_(j).transpose());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Vector JacobianFactor::operator*(const VectorValues& x) const {
|
Vector JacobianFactor::operator*(const VectorValues& x) const {
|
||||||
Vector Ax = zero(Ab_.rows());
|
Vector Ax = zero(Ab_.rows());
|
||||||
|
|
|
@ -273,6 +273,13 @@ namespace gtsam {
|
||||||
/** Get a view of the A matrix */
|
/** Get a view of the A matrix */
|
||||||
ABlock getA() { return Ab_.range(0, size()); }
|
ABlock getA() { return Ab_.range(0, size()); }
|
||||||
|
|
||||||
|
/** Update an information matrix by adding the information corresponding to this factor
|
||||||
|
* (used internally during elimination).
|
||||||
|
* @param scatter A mapping from variable index to slot index in this HessianFactor
|
||||||
|
* @param info The information matrix to be updated
|
||||||
|
*/
|
||||||
|
void updateHessian(const FastVector<Key>& keys, SymmetricBlockMatrix* info) const;
|
||||||
|
|
||||||
/** Return A*x */
|
/** Return A*x */
|
||||||
Vector operator*(const VectorValues& x) const;
|
Vector operator*(const VectorValues& x) const;
|
||||||
|
|
||||||
|
|
|
@ -376,8 +376,11 @@ double Constrained::distance(const Vector& v) const {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Matrix Constrained::Whiten(const Matrix& H) const {
|
Matrix Constrained::Whiten(const Matrix& H) const {
|
||||||
// selective scaling
|
Matrix A = H;
|
||||||
return vector_scale(invsigmas(), H, true);
|
for (DenseIndex i=0; i<(DenseIndex)dim_; ++i)
|
||||||
|
if (!constrained(i)) // if constrained, leave row of A as is
|
||||||
|
A.row(i) *= invsigmas_(i);
|
||||||
|
return A;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
|
@ -62,10 +62,11 @@ namespace gtsam {
|
||||||
Base(size_t dim = 1):dim_(dim) {}
|
Base(size_t dim = 1):dim_(dim) {}
|
||||||
virtual ~Base() {}
|
virtual ~Base() {}
|
||||||
|
|
||||||
/// true if a constrained noise mode, saves slow/clumsy dynamic casting
|
/// true if a constrained noise model, saves slow/clumsy dynamic casting
|
||||||
virtual bool isConstrained() const {
|
virtual bool isConstrained() const { return false; } // default false
|
||||||
return false; // default false
|
|
||||||
}
|
/// true if a unit noise model, saves slow/clumsy dynamic casting
|
||||||
|
virtual bool isUnit() const { return false; } // default false
|
||||||
|
|
||||||
/// Dimensionality
|
/// Dimensionality
|
||||||
inline size_t dim() const { return dim_;}
|
inline size_t dim() const { return dim_;}
|
||||||
|
@ -80,6 +81,9 @@ namespace gtsam {
|
||||||
/// Whiten an error vector.
|
/// Whiten an error vector.
|
||||||
virtual Vector whiten(const Vector& v) const = 0;
|
virtual Vector whiten(const Vector& v) const = 0;
|
||||||
|
|
||||||
|
/// Whiten a matrix.
|
||||||
|
virtual Matrix Whiten(const Matrix& H) const = 0;
|
||||||
|
|
||||||
/// Unwhiten an error vector.
|
/// Unwhiten an error vector.
|
||||||
virtual Vector unwhiten(const Vector& v) const = 0;
|
virtual Vector unwhiten(const Vector& v) const = 0;
|
||||||
|
|
||||||
|
@ -390,9 +394,7 @@ namespace gtsam {
|
||||||
virtual ~Constrained() {}
|
virtual ~Constrained() {}
|
||||||
|
|
||||||
/// true if a constrained noise mode, saves slow/clumsy dynamic casting
|
/// true if a constrained noise mode, saves slow/clumsy dynamic casting
|
||||||
virtual bool isConstrained() const {
|
virtual bool isConstrained() const { return true; }
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return true if a particular dimension is free or constrained
|
/// Return true if a particular dimension is free or constrained
|
||||||
bool constrained(size_t i) const;
|
bool constrained(size_t i) const;
|
||||||
|
@ -590,6 +592,9 @@ namespace gtsam {
|
||||||
return shared_ptr(new Unit(dim));
|
return shared_ptr(new Unit(dim));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// true if a unit noise model, saves slow/clumsy dynamic casting
|
||||||
|
virtual bool isUnit() const { return true; }
|
||||||
|
|
||||||
virtual void print(const std::string& name) const;
|
virtual void print(const std::string& name) const;
|
||||||
virtual double Mahalanobis(const Vector& v) const {return v.dot(v); }
|
virtual double Mahalanobis(const Vector& v) const {return v.dot(v); }
|
||||||
virtual Vector whiten(const Vector& v) const { return v; }
|
virtual Vector whiten(const Vector& v) const { return v; }
|
||||||
|
@ -854,6 +859,8 @@ namespace gtsam {
|
||||||
// TODO: functions below are dummy but necessary for the noiseModel::Base
|
// TODO: functions below are dummy but necessary for the noiseModel::Base
|
||||||
inline virtual Vector whiten(const Vector& v) const
|
inline virtual Vector whiten(const Vector& v) const
|
||||||
{ Vector r = v; this->WhitenSystem(r); return r; }
|
{ Vector r = v; this->WhitenSystem(r); return r; }
|
||||||
|
inline virtual Matrix Whiten(const Matrix& A) const
|
||||||
|
{ Vector b; Matrix B=A; this->WhitenSystem(B,b); return B; }
|
||||||
inline virtual Vector unwhiten(const Vector& /*v*/) const
|
inline virtual Vector unwhiten(const Vector& /*v*/) const
|
||||||
{ throw std::invalid_argument("unwhiten is not currently supported for robust noise models."); }
|
{ throw std::invalid_argument("unwhiten is not currently supported for robust noise models."); }
|
||||||
inline virtual double distance(const Vector& v) const
|
inline virtual double distance(const Vector& v) const
|
||||||
|
|
|
@ -0,0 +1,81 @@
|
||||||
|
/* ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||||
|
* Atlanta, Georgia 30332-0415
|
||||||
|
* All Rights Reserved
|
||||||
|
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||||
|
|
||||||
|
* See LICENSE for the license information
|
||||||
|
|
||||||
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @file Scatter.cpp
|
||||||
|
* @author Richard Roberts
|
||||||
|
* @author Frank Dellaert
|
||||||
|
* @date June 2015
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||||
|
#include <gtsam/linear/Scatter.h>
|
||||||
|
#include <gtsam/inference/Ordering.h>
|
||||||
|
|
||||||
|
#include <boost/foreach.hpp>
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
|
||||||
|
namespace gtsam {
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
string SlotEntry::toString() const {
|
||||||
|
ostringstream oss;
|
||||||
|
oss << "SlotEntry: slot=" << slot << ", dim=" << dimension;
|
||||||
|
return oss.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
Scatter::Scatter(const GaussianFactorGraph& gfg,
|
||||||
|
boost::optional<const Ordering&> ordering) {
|
||||||
|
gttic(Scatter_Constructor);
|
||||||
|
static const DenseIndex none = std::numeric_limits<size_t>::max();
|
||||||
|
|
||||||
|
// First do the set union.
|
||||||
|
BOOST_FOREACH (const GaussianFactor::shared_ptr& factor, gfg) {
|
||||||
|
if (factor) {
|
||||||
|
for (GaussianFactor::const_iterator variable = factor->begin();
|
||||||
|
variable != factor->end(); ++variable) {
|
||||||
|
// TODO: Fix this hack to cope with zero-row Jacobians that come from
|
||||||
|
// BayesTreeOrphanWrappers
|
||||||
|
const JacobianFactor* asJacobian =
|
||||||
|
dynamic_cast<const JacobianFactor*>(factor.get());
|
||||||
|
if (!asJacobian || asJacobian->cols() > 1)
|
||||||
|
insert(
|
||||||
|
make_pair(*variable, SlotEntry(none, factor->getDim(variable))));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we have an ordering, pre-fill the ordered variables first
|
||||||
|
size_t slot = 0;
|
||||||
|
if (ordering) {
|
||||||
|
BOOST_FOREACH (Key key, *ordering) {
|
||||||
|
const_iterator entry = find(key);
|
||||||
|
if (entry == end())
|
||||||
|
throw std::invalid_argument(
|
||||||
|
"The ordering provided to the HessianFactor Scatter constructor\n"
|
||||||
|
"contained extra variables that did not appear in the factors to "
|
||||||
|
"combine.");
|
||||||
|
at(key).slot = (slot++);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next fill in the slot indices (we can only get these after doing the set
|
||||||
|
// union.
|
||||||
|
BOOST_FOREACH (value_type& var_slot, *this) {
|
||||||
|
if (var_slot.second.slot == none) var_slot.second.slot = (slot++);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
|
||||||
|
} // gtsam
|
|
@ -0,0 +1,61 @@
|
||||||
|
/* ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||||
|
* Atlanta, Georgia 30332-0415
|
||||||
|
* All Rights Reserved
|
||||||
|
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||||
|
|
||||||
|
* See LICENSE for the license information
|
||||||
|
|
||||||
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @file Scatter.h
|
||||||
|
* @brief Maps global variable indices to slot indices
|
||||||
|
* @author Richard Roberts
|
||||||
|
* @author Frank Dellaert
|
||||||
|
* @date June 2015
|
||||||
|
*/
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <gtsam/inference/Key.h>
|
||||||
|
#include <gtsam/base/FastMap.h>
|
||||||
|
#include <gtsam/dllexport.h>
|
||||||
|
|
||||||
|
#include <boost/optional.hpp>
|
||||||
|
|
||||||
|
namespace gtsam {
|
||||||
|
|
||||||
|
class GaussianFactorGraph;
|
||||||
|
class Ordering;
|
||||||
|
|
||||||
|
/// One SlotEntry stores the slot index for a variable, as well its dim.
|
||||||
|
struct GTSAM_EXPORT SlotEntry {
|
||||||
|
DenseIndex slot;
|
||||||
|
size_t dimension;
|
||||||
|
SlotEntry(DenseIndex _slot, size_t _dimension)
|
||||||
|
: slot(_slot), dimension(_dimension) {}
|
||||||
|
std::string toString() const;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scatter is an intermediate data structure used when building a HessianFactor
|
||||||
|
* incrementally, to get the keys in the right order. The "scatter" is a map
|
||||||
|
* from global variable indices to slot indices in the union of involved
|
||||||
|
* variables. We also include the dimensionality of the variable.
|
||||||
|
*/
|
||||||
|
class Scatter : public FastMap<Key, SlotEntry> {
|
||||||
|
public:
|
||||||
|
/// Constructor
|
||||||
|
Scatter(const GaussianFactorGraph& gfg,
|
||||||
|
boost::optional<const Ordering&> ordering = boost::none);
|
||||||
|
|
||||||
|
/// Get the slot corresponding to the given key
|
||||||
|
DenseIndex slot(Key key) const { return at(key).slot; }
|
||||||
|
|
||||||
|
/// Get the dimension corresponding to the given key
|
||||||
|
DenseIndex dim(Key key) const { return at(key).dimension; }
|
||||||
|
};
|
||||||
|
|
||||||
|
} // \ namespace gtsam
|
|
@ -10,7 +10,7 @@
|
||||||
* -------------------------------------------------------------------------- */
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @file testCholeskyFactor.cpp
|
* @file testHessianFactor.cpp
|
||||||
* @author Richard Roberts
|
* @author Richard Roberts
|
||||||
* @date Dec 15, 2010
|
* @date Dec 15, 2010
|
||||||
*/
|
*/
|
||||||
|
@ -38,6 +38,16 @@ using namespace gtsam;
|
||||||
|
|
||||||
const double tol = 1e-5;
|
const double tol = 1e-5;
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
TEST(HessianFactor, Slot)
|
||||||
|
{
|
||||||
|
FastVector<Key> keys = list_of(2)(4)(1);
|
||||||
|
EXPECT_LONGS_EQUAL(0, GaussianFactor::Slot(keys,2));
|
||||||
|
EXPECT_LONGS_EQUAL(1, GaussianFactor::Slot(keys,4));
|
||||||
|
EXPECT_LONGS_EQUAL(2, GaussianFactor::Slot(keys,1));
|
||||||
|
EXPECT_LONGS_EQUAL(3, GaussianFactor::Slot(keys,5)); // does not exist
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(HessianFactor, emptyConstructor)
|
TEST(HessianFactor, emptyConstructor)
|
||||||
{
|
{
|
||||||
|
@ -302,15 +312,17 @@ TEST(HessianFactor, CombineAndEliminate)
|
||||||
gfg.add(0, A10, 1, A11, b1, noiseModel::Diagonal::Sigmas(s1, true));
|
gfg.add(0, A10, 1, A11, b1, noiseModel::Diagonal::Sigmas(s1, true));
|
||||||
gfg.add(1, A21, b2, noiseModel::Diagonal::Sigmas(s2, true));
|
gfg.add(1, A21, b2, noiseModel::Diagonal::Sigmas(s2, true));
|
||||||
|
|
||||||
Matrix zero3x3 = zeros(3,3);
|
Matrix93 A0; A0 << A10, Z_3x3, Z_3x3;
|
||||||
Matrix A0 = gtsam::stack(3, &A10, &zero3x3, &zero3x3);
|
Matrix93 A1; A1 << A11, A01, A21;
|
||||||
Matrix A1 = gtsam::stack(3, &A11, &A01, &A21);
|
|
||||||
Vector9 b; b << b1, b0, b2;
|
Vector9 b; b << b1, b0, b2;
|
||||||
Vector9 sigmas; sigmas << s1, s0, s2;
|
Vector9 sigmas; sigmas << s1, s0, s2;
|
||||||
|
|
||||||
// create a full, uneliminated version of the factor
|
// create a full, uneliminated version of the factor
|
||||||
JacobianFactor expectedFactor(0, A0, 1, A1, b, noiseModel::Diagonal::Sigmas(sigmas, true));
|
JacobianFactor expectedFactor(0, A0, 1, A1, b, noiseModel::Diagonal::Sigmas(sigmas, true));
|
||||||
|
|
||||||
|
// Make sure combining works
|
||||||
|
EXPECT(assert_equal(HessianFactor(expectedFactor), HessianFactor(gfg), 1e-6));
|
||||||
|
|
||||||
// perform elimination on jacobian
|
// perform elimination on jacobian
|
||||||
GaussianConditional::shared_ptr expectedConditional;
|
GaussianConditional::shared_ptr expectedConditional;
|
||||||
JacobianFactor::shared_ptr expectedRemainingFactor;
|
JacobianFactor::shared_ptr expectedRemainingFactor;
|
||||||
|
|
|
@ -0,0 +1,63 @@
|
||||||
|
/* ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||||
|
* Atlanta, Georgia 30332-0415
|
||||||
|
* All Rights Reserved
|
||||||
|
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||||
|
|
||||||
|
* See LICENSE for the license information
|
||||||
|
|
||||||
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @file testScatter.cpp
|
||||||
|
* @author Frank Dellaert
|
||||||
|
* @date June, 2015
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include <gtsam/linear/Scatter.h>
|
||||||
|
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||||
|
#include <CppUnitLite/TestHarness.h>
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
using namespace gtsam;
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
TEST(HessianFactor, CombineAndEliminate) {
|
||||||
|
static const size_t m = 3, n = 3;
|
||||||
|
Matrix A01 =
|
||||||
|
(Matrix(m, n) << 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0).finished();
|
||||||
|
Vector3 b0(1.5, 1.5, 1.5);
|
||||||
|
Vector3 s0(1.6, 1.6, 1.6);
|
||||||
|
|
||||||
|
Matrix A10 =
|
||||||
|
(Matrix(m, n) << 2.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 2.0).finished();
|
||||||
|
Matrix A11 = (Matrix(m, n) << -2.0, 0.0, 0.0, 0.0, -2.0, 0.0, 0.0, 0.0, -2.0)
|
||||||
|
.finished();
|
||||||
|
Vector3 b1(2.5, 2.5, 2.5);
|
||||||
|
Vector3 s1(2.6, 2.6, 2.6);
|
||||||
|
|
||||||
|
Matrix A21 =
|
||||||
|
(Matrix(m, n) << 3.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 3.0).finished();
|
||||||
|
Vector3 b2(3.5, 3.5, 3.5);
|
||||||
|
Vector3 s2(3.6, 3.6, 3.6);
|
||||||
|
|
||||||
|
GaussianFactorGraph gfg;
|
||||||
|
gfg.add(1, A01, b0, noiseModel::Diagonal::Sigmas(s0, true));
|
||||||
|
gfg.add(0, A10, 1, A11, b1, noiseModel::Diagonal::Sigmas(s1, true));
|
||||||
|
gfg.add(1, A21, b2, noiseModel::Diagonal::Sigmas(s2, true));
|
||||||
|
|
||||||
|
Scatter scatter(gfg);
|
||||||
|
EXPECT_LONGS_EQUAL(2, scatter.size());
|
||||||
|
EXPECT_LONGS_EQUAL(0, scatter.at(0).slot);
|
||||||
|
EXPECT_LONGS_EQUAL(1, scatter.at(1).slot);
|
||||||
|
EXPECT_LONGS_EQUAL(n, scatter.at(0).dimension);
|
||||||
|
EXPECT_LONGS_EQUAL(n, scatter.at(1).dimension);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
int main() {
|
||||||
|
TestResult tr;
|
||||||
|
return TestRegistry::runAllTests(tr);
|
||||||
|
}
|
||||||
|
/* ************************************************************************* */
|
|
@ -54,65 +54,45 @@ public:
|
||||||
double minModelFidelity; ///< Lower bound for the modelFidelity to accept the result of an LM iteration
|
double minModelFidelity; ///< Lower bound for the modelFidelity to accept the result of an LM iteration
|
||||||
std::string logFile; ///< an optional CSV log file, with [iteration, time, error, labda]
|
std::string logFile; ///< an optional CSV log file, with [iteration, time, error, labda]
|
||||||
bool diagonalDamping; ///< if true, use diagonal of Hessian
|
bool diagonalDamping; ///< if true, use diagonal of Hessian
|
||||||
bool reuse_diagonal_; ///< an additional option in Ceres for diagonalDamping (related to efficiency)
|
bool reuse_diagonal_; ///< an additional option in Ceres for diagonalDamping (TODO: should be in state?)
|
||||||
bool useFixedLambdaFactor_; ///< if true applies constant increase (or decrease) to lambda according to lambdaFactor
|
bool useFixedLambdaFactor_; ///< if true applies constant increase (or decrease) to lambda according to lambdaFactor
|
||||||
double min_diagonal_; ///< when using diagonal damping saturates the minimum diagonal entries (default: 1e-6)
|
double min_diagonal_; ///< when using diagonal damping saturates the minimum diagonal entries (default: 1e-6)
|
||||||
double max_diagonal_; ///< when using diagonal damping saturates the maximum diagonal entries (default: 1e32)
|
double max_diagonal_; ///< when using diagonal damping saturates the maximum diagonal entries (default: 1e32)
|
||||||
|
|
||||||
LevenbergMarquardtParams() :
|
LevenbergMarquardtParams()
|
||||||
lambdaInitial(1e-5), lambdaFactor(10.0), lambdaUpperBound(1e5), lambdaLowerBound(
|
: lambdaInitial(1e-5),
|
||||||
0.0), verbosityLM(SILENT), minModelFidelity(1e-3),
|
lambdaFactor(10.0),
|
||||||
diagonalDamping(false), reuse_diagonal_(false), useFixedLambdaFactor_(true),
|
lambdaUpperBound(1e5),
|
||||||
min_diagonal_(1e-6), max_diagonal_(1e32) {
|
lambdaLowerBound(0.0),
|
||||||
}
|
verbosityLM(SILENT),
|
||||||
virtual ~LevenbergMarquardtParams() {
|
minModelFidelity(1e-3),
|
||||||
}
|
diagonalDamping(false),
|
||||||
|
reuse_diagonal_(false),
|
||||||
|
useFixedLambdaFactor_(true),
|
||||||
|
min_diagonal_(1e-6),
|
||||||
|
max_diagonal_(1e32) {}
|
||||||
|
|
||||||
|
virtual ~LevenbergMarquardtParams() {}
|
||||||
virtual void print(const std::string& str = "") const;
|
virtual void print(const std::string& str = "") const;
|
||||||
|
inline double getlambdaInitial() const { return lambdaInitial; }
|
||||||
inline double getlambdaInitial() const {
|
inline double getlambdaFactor() const { return lambdaFactor; }
|
||||||
return lambdaInitial;
|
inline double getlambdaUpperBound() const { return lambdaUpperBound; }
|
||||||
}
|
inline double getlambdaLowerBound() const { return lambdaLowerBound; }
|
||||||
inline double getlambdaFactor() const {
|
|
||||||
return lambdaFactor;
|
|
||||||
}
|
|
||||||
inline double getlambdaUpperBound() const {
|
|
||||||
return lambdaUpperBound;
|
|
||||||
}
|
|
||||||
inline double getlambdaLowerBound() const {
|
|
||||||
return lambdaLowerBound;
|
|
||||||
}
|
|
||||||
inline std::string getVerbosityLM() const {
|
inline std::string getVerbosityLM() const {
|
||||||
return verbosityLMTranslator(verbosityLM);
|
return verbosityLMTranslator(verbosityLM);
|
||||||
}
|
}
|
||||||
inline std::string getLogFile() const {
|
inline std::string getLogFile() const { return logFile; }
|
||||||
return logFile;
|
inline bool getDiagonalDamping() const { return diagonalDamping; }
|
||||||
}
|
|
||||||
inline bool getDiagonalDamping() const {
|
|
||||||
return diagonalDamping;
|
|
||||||
}
|
|
||||||
|
|
||||||
inline void setlambdaInitial(double value) {
|
inline void setlambdaInitial(double value) { lambdaInitial = value; }
|
||||||
lambdaInitial = value;
|
inline void setlambdaFactor(double value) { lambdaFactor = value; }
|
||||||
}
|
inline void setlambdaUpperBound(double value) { lambdaUpperBound = value; }
|
||||||
inline void setlambdaFactor(double value) {
|
inline void setlambdaLowerBound(double value) { lambdaLowerBound = value; }
|
||||||
lambdaFactor = value;
|
inline void setVerbosityLM(const std::string& s) {
|
||||||
}
|
|
||||||
inline void setlambdaUpperBound(double value) {
|
|
||||||
lambdaUpperBound = value;
|
|
||||||
}
|
|
||||||
inline void setlambdaLowerBound(double value) {
|
|
||||||
lambdaLowerBound = value;
|
|
||||||
}
|
|
||||||
inline void setVerbosityLM(const std::string &s) {
|
|
||||||
verbosityLM = verbosityLMTranslator(s);
|
verbosityLM = verbosityLMTranslator(s);
|
||||||
}
|
}
|
||||||
inline void setLogFile(const std::string &s) {
|
inline void setLogFile(const std::string& s) { logFile = s; }
|
||||||
logFile = s;
|
inline void setDiagonalDamping(bool flag) { diagonalDamping = flag; }
|
||||||
}
|
|
||||||
inline void setDiagonalDamping(bool flag) {
|
|
||||||
diagonalDamping = flag;
|
|
||||||
}
|
|
||||||
inline void setUseFixedLambdaFactor(bool flag) {
|
inline void setUseFixedLambdaFactor(bool flag) {
|
||||||
useFixedLambdaFactor_ = flag;
|
useFixedLambdaFactor_ = flag;
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,6 +68,7 @@ void NonlinearOptimizer::defaultOptimize() {
|
||||||
// Do next iteration
|
// Do next iteration
|
||||||
currentError = this->error();
|
currentError = this->error();
|
||||||
this->iterate();
|
this->iterate();
|
||||||
|
tictoc_finishedIteration();
|
||||||
|
|
||||||
// Maybe show output
|
// Maybe show output
|
||||||
if(params.verbosity >= NonlinearOptimizerParams::VALUES) this->values().print("newValues");
|
if(params.verbosity >= NonlinearOptimizerParams::VALUES) this->values().print("newValues");
|
||||||
|
|
|
@ -25,13 +25,16 @@
|
||||||
#include <gtsam/geometry/Point3.h>
|
#include <gtsam/geometry/Point3.h>
|
||||||
#include <gtsam/geometry/Pose3.h>
|
#include <gtsam/geometry/Pose3.h>
|
||||||
#include <gtsam/nonlinear/NonlinearFactor.h>
|
#include <gtsam/nonlinear/NonlinearFactor.h>
|
||||||
|
#include <gtsam/linear/BinaryJacobianFactor.h>
|
||||||
#include <gtsam/linear/NoiseModel.h>
|
#include <gtsam/linear/NoiseModel.h>
|
||||||
#include <gtsam/base/concepts.h>
|
#include <gtsam/base/concepts.h>
|
||||||
#include <gtsam/base/Manifold.h>
|
#include <gtsam/base/Manifold.h>
|
||||||
#include <gtsam/base/Matrix.h>
|
#include <gtsam/base/Matrix.h>
|
||||||
|
#include <gtsam/base/SymmetricBlockMatrix.h>
|
||||||
#include <gtsam/base/types.h>
|
#include <gtsam/base/types.h>
|
||||||
#include <gtsam/base/Testable.h>
|
#include <gtsam/base/Testable.h>
|
||||||
#include <gtsam/base/Vector.h>
|
#include <gtsam/base/Vector.h>
|
||||||
|
#include <gtsam/base/timing.h>
|
||||||
|
|
||||||
#include <boost/none.hpp>
|
#include <boost/none.hpp>
|
||||||
#include <boost/optional/optional.hpp>
|
#include <boost/optional/optional.hpp>
|
||||||
|
@ -48,209 +51,249 @@ class access;
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Non-linear factor for a constraint derived from a 2D measurement.
|
* Non-linear factor for a constraint derived from a 2D measurement.
|
||||||
* The calibration is unknown here compared to GenericProjectionFactor
|
* The calibration is unknown here compared to GenericProjectionFactor
|
||||||
* @addtogroup SLAM
|
* @addtogroup SLAM
|
||||||
*/
|
*/
|
||||||
template <class CAMERA, class LANDMARK>
|
template<class CAMERA, class LANDMARK>
|
||||||
class GeneralSFMFactor: public NoiseModelFactor2<CAMERA, LANDMARK> {
|
class GeneralSFMFactor: public NoiseModelFactor2<CAMERA, LANDMARK> {
|
||||||
|
|
||||||
GTSAM_CONCEPT_MANIFOLD_TYPE(CAMERA)
|
GTSAM_CONCEPT_MANIFOLD_TYPE(CAMERA);
|
||||||
GTSAM_CONCEPT_MANIFOLD_TYPE(LANDMARK)
|
GTSAM_CONCEPT_MANIFOLD_TYPE(LANDMARK);
|
||||||
|
|
||||||
static const int DimC = FixedDimension<CAMERA>::value;
|
static const int DimC = FixedDimension<CAMERA>::value;
|
||||||
static const int DimL = FixedDimension<LANDMARK>::value;
|
static const int DimL = FixedDimension<LANDMARK>::value;
|
||||||
|
typedef Eigen::Matrix<double, 2, DimC> JacobianC;
|
||||||
|
typedef Eigen::Matrix<double, 2, DimL> JacobianL;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
Point2 measured_; ///< the 2D measurement
|
Point2 measured_; ///< the 2D measurement
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
typedef GeneralSFMFactor<CAMERA, LANDMARK> This; ///< typedef for this object
|
typedef GeneralSFMFactor<CAMERA, LANDMARK> This;///< typedef for this object
|
||||||
typedef NoiseModelFactor2<CAMERA, LANDMARK> Base; ///< typedef for the base class
|
typedef NoiseModelFactor2<CAMERA, LANDMARK> Base;///< typedef for the base class
|
||||||
|
|
||||||
// shorthand for a smart pointer to a factor
|
// shorthand for a smart pointer to a factor
|
||||||
typedef boost::shared_ptr<This> shared_ptr;
|
typedef boost::shared_ptr<This> shared_ptr;
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructor
|
|
||||||
* @param measured is the 2 dimensional location of point in image (the measurement)
|
|
||||||
* @param model is the standard deviation of the measurements
|
|
||||||
* @param cameraKey is the index of the camera
|
|
||||||
* @param landmarkKey is the index of the landmark
|
|
||||||
*/
|
|
||||||
GeneralSFMFactor(const Point2& measured, const SharedNoiseModel& model, Key cameraKey, Key landmarkKey) :
|
|
||||||
Base(model, cameraKey, landmarkKey), measured_(measured) {}
|
|
||||||
|
|
||||||
GeneralSFMFactor():measured_(0.0,0.0) {} ///< default constructor
|
|
||||||
GeneralSFMFactor(const Point2 & p):measured_(p) {} ///< constructor that takes a Point2
|
|
||||||
GeneralSFMFactor(double x, double y):measured_(x,y) {} ///< constructor that takes doubles x,y to make a Point2
|
|
||||||
|
|
||||||
virtual ~GeneralSFMFactor() {} ///< destructor
|
|
||||||
|
|
||||||
/// @return a deep copy of this factor
|
|
||||||
virtual gtsam::NonlinearFactor::shared_ptr clone() const {
|
|
||||||
return boost::static_pointer_cast<gtsam::NonlinearFactor>(
|
|
||||||
gtsam::NonlinearFactor::shared_ptr(new This(*this))); }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* print
|
|
||||||
* @param s optional string naming the factor
|
|
||||||
* @param keyFormatter optional formatter for printing out Symbols
|
|
||||||
*/
|
|
||||||
void print(const std::string& s = "SFMFactor", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const {
|
|
||||||
Base::print(s, keyFormatter);
|
|
||||||
measured_.print(s + ".z");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* equals
|
|
||||||
*/
|
|
||||||
bool equals(const NonlinearFactor &p, double tol = 1e-9) const {
|
|
||||||
const This* e = dynamic_cast<const This*>(&p);
|
|
||||||
return e && Base::equals(p, tol) && this->measured_.equals(e->measured_, tol) ;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** h(x)-z */
|
|
||||||
Vector evaluateError(const CAMERA& camera, const LANDMARK& point,
|
|
||||||
boost::optional<Matrix&> H1=boost::none, boost::optional<Matrix&> H2=boost::none) const {
|
|
||||||
|
|
||||||
try {
|
|
||||||
Point2 reprojError(camera.project2(point,H1,H2) - measured_);
|
|
||||||
return reprojError.vector();
|
|
||||||
}
|
|
||||||
catch( CheiralityException& e) {
|
|
||||||
if (H1) *H1 = zeros(2, DimC);
|
|
||||||
if (H2) *H2 = zeros(2, DimL);
|
|
||||||
std::cout << e.what() << ": Landmark "<< DefaultKeyFormatter(this->key2())
|
|
||||||
<< " behind Camera " << DefaultKeyFormatter(this->key1()) << std::endl;
|
|
||||||
return zero(2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** return the measured */
|
|
||||||
inline const Point2 measured() const {
|
|
||||||
return measured_;
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
|
||||||
/** Serialization function */
|
|
||||||
friend class boost::serialization::access;
|
|
||||||
template<class Archive>
|
|
||||||
void serialize(Archive & ar, const unsigned int /*version*/) {
|
|
||||||
ar & boost::serialization::make_nvp("NoiseModelFactor2",
|
|
||||||
boost::serialization::base_object<Base>(*this));
|
|
||||||
ar & BOOST_SERIALIZATION_NVP(measured_);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
template<class CAMERA, class LANDMARK>
|
|
||||||
struct traits<GeneralSFMFactor<CAMERA, LANDMARK> > : Testable<
|
|
||||||
GeneralSFMFactor<CAMERA, LANDMARK> > {
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Non-linear factor for a constraint derived from a 2D measurement.
|
* Constructor
|
||||||
* Compared to GeneralSFMFactor, it is a ternary-factor because the calibration is isolated from camera..
|
* @param measured is the 2 dimensional location of point in image (the measurement)
|
||||||
|
* @param model is the standard deviation of the measurements
|
||||||
|
* @param cameraKey is the index of the camera
|
||||||
|
* @param landmarkKey is the index of the landmark
|
||||||
*/
|
*/
|
||||||
template <class CALIBRATION>
|
GeneralSFMFactor(const Point2& measured, const SharedNoiseModel& model, Key cameraKey, Key landmarkKey) :
|
||||||
class GeneralSFMFactor2: public NoiseModelFactor3<Pose3, Point3, CALIBRATION> {
|
Base(model, cameraKey, landmarkKey), measured_(measured) {}
|
||||||
|
|
||||||
GTSAM_CONCEPT_MANIFOLD_TYPE(CALIBRATION)
|
GeneralSFMFactor():measured_(0.0,0.0) {} ///< default constructor
|
||||||
static const int DimK = FixedDimension<CALIBRATION>::value;
|
GeneralSFMFactor(const Point2 & p):measured_(p) {} ///< constructor that takes a Point2
|
||||||
|
GeneralSFMFactor(double x, double y):measured_(x,y) {} ///< constructor that takes doubles x,y to make a Point2
|
||||||
|
|
||||||
protected:
|
virtual ~GeneralSFMFactor() {} ///< destructor
|
||||||
|
|
||||||
Point2 measured_; ///< the 2D measurement
|
/// @return a deep copy of this factor
|
||||||
|
virtual gtsam::NonlinearFactor::shared_ptr clone() const {
|
||||||
|
return boost::static_pointer_cast<gtsam::NonlinearFactor>(
|
||||||
|
gtsam::NonlinearFactor::shared_ptr(new This(*this)));}
|
||||||
|
|
||||||
public:
|
/**
|
||||||
|
* print
|
||||||
|
* @param s optional string naming the factor
|
||||||
|
* @param keyFormatter optional formatter for printing out Symbols
|
||||||
|
*/
|
||||||
|
void print(const std::string& s = "SFMFactor", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const {
|
||||||
|
Base::print(s, keyFormatter);
|
||||||
|
measured_.print(s + ".z");
|
||||||
|
}
|
||||||
|
|
||||||
typedef GeneralSFMFactor2<CALIBRATION> This;
|
/**
|
||||||
typedef PinholeCamera<CALIBRATION> Camera; ///< typedef for camera type
|
* equals
|
||||||
typedef NoiseModelFactor3<Pose3, Point3, CALIBRATION> Base; ///< typedef for the base class
|
*/
|
||||||
|
bool equals(const NonlinearFactor &p, double tol = 1e-9) const {
|
||||||
|
const This* e = dynamic_cast<const This*>(&p);
|
||||||
|
return e && Base::equals(p, tol) && this->measured_.equals(e->measured_, tol);
|
||||||
|
}
|
||||||
|
|
||||||
// shorthand for a smart pointer to a factor
|
/** h(x)-z */
|
||||||
typedef boost::shared_ptr<This> shared_ptr;
|
Vector evaluateError(const CAMERA& camera, const LANDMARK& point,
|
||||||
|
boost::optional<Matrix&> H1=boost::none, boost::optional<Matrix&> H2=boost::none) const {
|
||||||
/**
|
try {
|
||||||
* Constructor
|
Point2 reprojError(camera.project2(point,H1,H2) - measured_);
|
||||||
* @param measured is the 2 dimensional location of point in image (the measurement)
|
return reprojError.vector();
|
||||||
* @param model is the standard deviation of the measurements
|
|
||||||
* @param poseKey is the index of the camera
|
|
||||||
* @param landmarkKey is the index of the landmark
|
|
||||||
* @param calibKey is the index of the calibration
|
|
||||||
*/
|
|
||||||
GeneralSFMFactor2(const Point2& measured, const SharedNoiseModel& model, Key poseKey, Key landmarkKey, Key calibKey) :
|
|
||||||
Base(model, poseKey, landmarkKey, calibKey), measured_(measured) {}
|
|
||||||
GeneralSFMFactor2():measured_(0.0,0.0) {} ///< default constructor
|
|
||||||
|
|
||||||
virtual ~GeneralSFMFactor2() {} ///< destructor
|
|
||||||
|
|
||||||
/// @return a deep copy of this factor
|
|
||||||
virtual gtsam::NonlinearFactor::shared_ptr clone() const {
|
|
||||||
return boost::static_pointer_cast<gtsam::NonlinearFactor>(
|
|
||||||
gtsam::NonlinearFactor::shared_ptr(new This(*this))); }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* print
|
|
||||||
* @param s optional string naming the factor
|
|
||||||
* @param keyFormatter optional formatter useful for printing Symbols
|
|
||||||
*/
|
|
||||||
void print(const std::string& s = "SFMFactor2", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const {
|
|
||||||
Base::print(s, keyFormatter);
|
|
||||||
measured_.print(s + ".z");
|
|
||||||
}
|
}
|
||||||
|
catch( CheiralityException& e) {
|
||||||
/**
|
if (H1) *H1 = JacobianC::Zero();
|
||||||
* equals
|
if (H2) *H2 = JacobianL::Zero();
|
||||||
*/
|
// TODO warn if verbose output asked for
|
||||||
bool equals(const NonlinearFactor &p, double tol = 1e-9) const {
|
|
||||||
const This* e = dynamic_cast<const This*>(&p);
|
|
||||||
return e && Base::equals(p, tol) && this->measured_.equals(e->measured_, tol) ;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** h(x)-z */
|
|
||||||
Vector evaluateError(const Pose3& pose3, const Point3& point, const CALIBRATION &calib,
|
|
||||||
boost::optional<Matrix&> H1=boost::none,
|
|
||||||
boost::optional<Matrix&> H2=boost::none,
|
|
||||||
boost::optional<Matrix&> H3=boost::none) const
|
|
||||||
{
|
|
||||||
try {
|
|
||||||
Camera camera(pose3,calib);
|
|
||||||
Point2 reprojError(camera.project(point, H1, H2, H3) - measured_);
|
|
||||||
return reprojError.vector();
|
|
||||||
}
|
|
||||||
catch( CheiralityException& e) {
|
|
||||||
if (H1) *H1 = zeros(2, 6);
|
|
||||||
if (H2) *H2 = zeros(2, 3);
|
|
||||||
if (H3) *H3 = zeros(2, DimK);
|
|
||||||
std::cout << e.what() << ": Landmark "<< DefaultKeyFormatter(this->key2())
|
|
||||||
<< " behind Camera " << DefaultKeyFormatter(this->key1()) << std::endl;
|
|
||||||
}
|
|
||||||
return zero(2);
|
return zero(2);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/** return the measured */
|
/// Linearize using fixed-size matrices
|
||||||
inline const Point2 measured() const {
|
boost::shared_ptr<GaussianFactor> linearize(const Values& values) const {
|
||||||
return measured_;
|
// Only linearize if the factor is active
|
||||||
|
if (!this->active(values)) return boost::shared_ptr<JacobianFactor>();
|
||||||
|
|
||||||
|
const Key key1 = this->key1(), key2 = this->key2();
|
||||||
|
JacobianC H1;
|
||||||
|
JacobianL H2;
|
||||||
|
Vector2 b;
|
||||||
|
try {
|
||||||
|
const CAMERA& camera = values.at<CAMERA>(key1);
|
||||||
|
const LANDMARK& point = values.at<LANDMARK>(key2);
|
||||||
|
Point2 reprojError(camera.project2(point, H1, H2) - measured());
|
||||||
|
b = -reprojError.vector();
|
||||||
|
} catch (CheiralityException& e) {
|
||||||
|
H1.setZero();
|
||||||
|
H2.setZero();
|
||||||
|
b.setZero();
|
||||||
|
// TODO warn if verbose output asked for
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
// Whiten the system if needed
|
||||||
/** Serialization function */
|
const SharedNoiseModel& noiseModel = this->get_noiseModel();
|
||||||
friend class boost::serialization::access;
|
if (noiseModel && !noiseModel->isUnit()) {
|
||||||
template<class Archive>
|
// TODO: implement WhitenSystem for fixed size matrices and include
|
||||||
void serialize(Archive & ar, const unsigned int /*version*/) {
|
// above
|
||||||
ar & boost::serialization::make_nvp("NoiseModelFactor3",
|
H1 = noiseModel->Whiten(H1);
|
||||||
boost::serialization::base_object<Base>(*this));
|
H2 = noiseModel->Whiten(H2);
|
||||||
ar & BOOST_SERIALIZATION_NVP(measured_);
|
b = noiseModel->Whiten(b);
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
template<class CALIBRATION>
|
// Create new (unit) noiseModel, preserving constraints if applicable
|
||||||
struct traits<GeneralSFMFactor2<CALIBRATION> > : Testable<
|
SharedDiagonal model;
|
||||||
GeneralSFMFactor2<CALIBRATION> > {
|
if (noiseModel && noiseModel->isConstrained()) {
|
||||||
};
|
model = boost::static_pointer_cast<noiseModel::Constrained>(noiseModel)->unit();
|
||||||
|
}
|
||||||
|
|
||||||
|
return boost::make_shared<BinaryJacobianFactor<2, DimC, DimL> >(key1, H1, key2, H2, b, model);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** return the measured */
|
||||||
|
inline const Point2 measured() const {
|
||||||
|
return measured_;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
/** Serialization function */
|
||||||
|
friend class boost::serialization::access;
|
||||||
|
template<class Archive>
|
||||||
|
void serialize(Archive & ar, const unsigned int /*version*/) {
|
||||||
|
ar & boost::serialization::make_nvp("NoiseModelFactor2",
|
||||||
|
boost::serialization::base_object<Base>(*this));
|
||||||
|
ar & BOOST_SERIALIZATION_NVP(measured_);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
template<class CAMERA, class LANDMARK>
|
||||||
|
struct traits<GeneralSFMFactor<CAMERA, LANDMARK> > : Testable<
|
||||||
|
GeneralSFMFactor<CAMERA, LANDMARK> > {
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Non-linear factor for a constraint derived from a 2D measurement.
|
||||||
|
* Compared to GeneralSFMFactor, it is a ternary-factor because the calibration is isolated from camera..
|
||||||
|
*/
|
||||||
|
template<class CALIBRATION>
|
||||||
|
class GeneralSFMFactor2: public NoiseModelFactor3<Pose3, Point3, CALIBRATION> {
|
||||||
|
|
||||||
|
GTSAM_CONCEPT_MANIFOLD_TYPE(CALIBRATION);
|
||||||
|
static const int DimK = FixedDimension<CALIBRATION>::value;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
|
||||||
|
Point2 measured_; ///< the 2D measurement
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
typedef GeneralSFMFactor2<CALIBRATION> This;
|
||||||
|
typedef PinholeCamera<CALIBRATION> Camera;///< typedef for camera type
|
||||||
|
typedef NoiseModelFactor3<Pose3, Point3, CALIBRATION> Base;///< typedef for the base class
|
||||||
|
|
||||||
|
// shorthand for a smart pointer to a factor
|
||||||
|
typedef boost::shared_ptr<This> shared_ptr;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor
|
||||||
|
* @param measured is the 2 dimensional location of point in image (the measurement)
|
||||||
|
* @param model is the standard deviation of the measurements
|
||||||
|
* @param poseKey is the index of the camera
|
||||||
|
* @param landmarkKey is the index of the landmark
|
||||||
|
* @param calibKey is the index of the calibration
|
||||||
|
*/
|
||||||
|
GeneralSFMFactor2(const Point2& measured, const SharedNoiseModel& model, Key poseKey, Key landmarkKey, Key calibKey) :
|
||||||
|
Base(model, poseKey, landmarkKey, calibKey), measured_(measured) {}
|
||||||
|
GeneralSFMFactor2():measured_(0.0,0.0) {} ///< default constructor
|
||||||
|
|
||||||
|
virtual ~GeneralSFMFactor2() {} ///< destructor
|
||||||
|
|
||||||
|
/// @return a deep copy of this factor
|
||||||
|
virtual gtsam::NonlinearFactor::shared_ptr clone() const {
|
||||||
|
return boost::static_pointer_cast<gtsam::NonlinearFactor>(
|
||||||
|
gtsam::NonlinearFactor::shared_ptr(new This(*this)));}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* print
|
||||||
|
* @param s optional string naming the factor
|
||||||
|
* @param keyFormatter optional formatter useful for printing Symbols
|
||||||
|
*/
|
||||||
|
void print(const std::string& s = "SFMFactor2", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const {
|
||||||
|
Base::print(s, keyFormatter);
|
||||||
|
measured_.print(s + ".z");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* equals
|
||||||
|
*/
|
||||||
|
bool equals(const NonlinearFactor &p, double tol = 1e-9) const {
|
||||||
|
const This* e = dynamic_cast<const This*>(&p);
|
||||||
|
return e && Base::equals(p, tol) && this->measured_.equals(e->measured_, tol);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** h(x)-z */
|
||||||
|
Vector evaluateError(const Pose3& pose3, const Point3& point, const CALIBRATION &calib,
|
||||||
|
boost::optional<Matrix&> H1=boost::none,
|
||||||
|
boost::optional<Matrix&> H2=boost::none,
|
||||||
|
boost::optional<Matrix&> H3=boost::none) const
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
Camera camera(pose3,calib);
|
||||||
|
Point2 reprojError(camera.project(point, H1, H2, H3) - measured_);
|
||||||
|
return reprojError.vector();
|
||||||
|
}
|
||||||
|
catch( CheiralityException& e) {
|
||||||
|
if (H1) *H1 = zeros(2, 6);
|
||||||
|
if (H2) *H2 = zeros(2, 3);
|
||||||
|
if (H3) *H3 = zeros(2, DimK);
|
||||||
|
std::cout << e.what() << ": Landmark "<< DefaultKeyFormatter(this->key2())
|
||||||
|
<< " behind Camera " << DefaultKeyFormatter(this->key1()) << std::endl;
|
||||||
|
}
|
||||||
|
return zero(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** return the measured */
|
||||||
|
inline const Point2 measured() const {
|
||||||
|
return measured_;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
/** Serialization function */
|
||||||
|
friend class boost::serialization::access;
|
||||||
|
template<class Archive>
|
||||||
|
void serialize(Archive & ar, const unsigned int /*version*/) {
|
||||||
|
ar & boost::serialization::make_nvp("NoiseModelFactor3",
|
||||||
|
boost::serialization::base_object<Base>(*this));
|
||||||
|
ar & BOOST_SERIALIZATION_NVP(measured_);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
template<class CALIBRATION>
|
||||||
|
struct traits<GeneralSFMFactor2<CALIBRATION> > : Testable<
|
||||||
|
GeneralSFMFactor2<CALIBRATION> > {
|
||||||
|
};
|
||||||
|
|
||||||
} //namespace
|
} //namespace
|
||||||
|
|
|
@ -115,6 +115,11 @@ public:
|
||||||
return D;
|
return D;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
virtual void updateHessian(const FastVector<Key>& keys,
|
||||||
|
SymmetricBlockMatrix* info) const {
|
||||||
|
throw std::runtime_error(
|
||||||
|
"RegularImplicitSchurFactor::updateHessian non implemented");
|
||||||
|
}
|
||||||
virtual Matrix augmentedJacobian() const {
|
virtual Matrix augmentedJacobian() const {
|
||||||
throw std::runtime_error(
|
throw std::runtime_error(
|
||||||
"RegularImplicitSchurFactor::augmentedJacobian non implemented");
|
"RegularImplicitSchurFactor::augmentedJacobian non implemented");
|
||||||
|
|
|
@ -19,18 +19,20 @@
|
||||||
#include <gtsam/slam/GeneralSFMFactor.h>
|
#include <gtsam/slam/GeneralSFMFactor.h>
|
||||||
#include <gtsam/slam/RangeFactor.h>
|
#include <gtsam/slam/RangeFactor.h>
|
||||||
#include <gtsam/slam/PriorFactor.h>
|
#include <gtsam/slam/PriorFactor.h>
|
||||||
|
#include <gtsam/geometry/Cal3_S2.h>
|
||||||
|
#include <gtsam/geometry/Rot2.h>
|
||||||
|
#include <gtsam/geometry/PinholeCamera.h>
|
||||||
#include <gtsam/nonlinear/NonlinearEquality.h>
|
#include <gtsam/nonlinear/NonlinearEquality.h>
|
||||||
#include <gtsam/inference/Symbol.h>
|
|
||||||
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
||||||
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
|
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
|
||||||
#include <gtsam/linear/VectorValues.h>
|
#include <gtsam/linear/VectorValues.h>
|
||||||
#include <gtsam/geometry/Cal3_S2.h>
|
#include <gtsam/inference/Symbol.h>
|
||||||
#include <gtsam/geometry/PinholeCamera.h>
|
|
||||||
#include <gtsam/base/Testable.h>
|
#include <gtsam/base/Testable.h>
|
||||||
|
|
||||||
|
#include <boost/assign/std/vector.hpp>
|
||||||
#include <boost/shared_ptr.hpp>
|
#include <boost/shared_ptr.hpp>
|
||||||
#include <CppUnitLite/TestHarness.h>
|
#include <CppUnitLite/TestHarness.h>
|
||||||
using namespace boost;
|
using namespace boost::assign;
|
||||||
|
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
@ -49,7 +51,8 @@ typedef NonlinearEquality<Point3> Point3Constraint;
|
||||||
|
|
||||||
class Graph: public NonlinearFactorGraph {
|
class Graph: public NonlinearFactorGraph {
|
||||||
public:
|
public:
|
||||||
void addMeasurement(int i, int j, const Point2& z, const SharedNoiseModel& model) {
|
void addMeasurement(int i, int j, const Point2& z,
|
||||||
|
const SharedNoiseModel& model) {
|
||||||
push_back(boost::make_shared<Projection>(z, model, X(i), L(j)));
|
push_back(boost::make_shared<Projection>(z, model, X(i), L(j)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -65,98 +68,99 @@ public:
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
static double getGaussian()
|
static double getGaussian() {
|
||||||
{
|
double S, V1, V2;
|
||||||
double S,V1,V2;
|
// Use Box-Muller method to create gauss noise from uniform noise
|
||||||
// Use Box-Muller method to create gauss noise from uniform noise
|
do {
|
||||||
do
|
double U1 = rand() / (double) (RAND_MAX);
|
||||||
{
|
double U2 = rand() / (double) (RAND_MAX);
|
||||||
double U1 = rand() / (double)(RAND_MAX);
|
V1 = 2 * U1 - 1; /* V1=[-1,1] */
|
||||||
double U2 = rand() / (double)(RAND_MAX);
|
V2 = 2 * U2 - 1; /* V2=[-1,1] */
|
||||||
V1 = 2 * U1 - 1; /* V1=[-1,1] */
|
S = V1 * V1 + V2 * V2;
|
||||||
V2 = 2 * U2 - 1; /* V2=[-1,1] */
|
} while (S >= 1);
|
||||||
S = V1 * V1 + V2 * V2;
|
return sqrt(-2.f * (double) log(S) / S) * V1;
|
||||||
} while(S>=1);
|
|
||||||
return sqrt(-2.0f * (double)log(S) / S) * V1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static const SharedNoiseModel sigma1(noiseModel::Unit::Create(2));
|
static const SharedNoiseModel sigma1(noiseModel::Unit::Create(2));
|
||||||
|
static const double baseline = 5.;
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST( GeneralSFMFactor, equals )
|
static vector<Point3> genPoint3() {
|
||||||
{
|
const double z = 5;
|
||||||
// Create two identical factors and make sure they're equal
|
vector<Point3> landmarks;
|
||||||
Point2 z(323.,240.);
|
landmarks.push_back(Point3(-1., -1., z));
|
||||||
const Symbol cameraFrameNumber('x',1), landmarkNumber('l',1);
|
landmarks.push_back(Point3(-1., 1., z));
|
||||||
const SharedNoiseModel sigma(noiseModel::Unit::Create(1));
|
landmarks.push_back(Point3(1., 1., z));
|
||||||
boost::shared_ptr<Projection>
|
landmarks.push_back(Point3(1., -1., z));
|
||||||
factor1(new Projection(z, sigma, cameraFrameNumber, landmarkNumber));
|
landmarks.push_back(Point3(-1.5, -1.5, 1.5 * z));
|
||||||
|
landmarks.push_back(Point3(-1.5, 1.5, 1.5 * z));
|
||||||
|
landmarks.push_back(Point3(1.5, 1.5, 1.5 * z));
|
||||||
|
landmarks.push_back(Point3(1.5, -1.5, 1.5 * z));
|
||||||
|
landmarks.push_back(Point3(-2., -2., 2 * z));
|
||||||
|
landmarks.push_back(Point3(-2., 2., 2 * z));
|
||||||
|
landmarks.push_back(Point3(2., 2., 2 * z));
|
||||||
|
landmarks.push_back(Point3(2., -2., 2 * z));
|
||||||
|
return landmarks;
|
||||||
|
}
|
||||||
|
|
||||||
boost::shared_ptr<Projection>
|
static vector<GeneralCamera> genCameraDefaultCalibration() {
|
||||||
factor2(new Projection(z, sigma, cameraFrameNumber, landmarkNumber));
|
vector<GeneralCamera> X;
|
||||||
|
X.push_back(GeneralCamera(Pose3(eye(3), Point3(-baseline / 2., 0., 0.))));
|
||||||
|
X.push_back(GeneralCamera(Pose3(eye(3), Point3(baseline / 2., 0., 0.))));
|
||||||
|
return X;
|
||||||
|
}
|
||||||
|
|
||||||
|
static vector<GeneralCamera> genCameraVariableCalibration() {
|
||||||
|
const Cal3_S2 K(640, 480, 0.1, 320, 240);
|
||||||
|
vector<GeneralCamera> X;
|
||||||
|
X.push_back(GeneralCamera(Pose3(eye(3), Point3(-baseline / 2., 0., 0.)), K));
|
||||||
|
X.push_back(GeneralCamera(Pose3(eye(3), Point3(baseline / 2., 0., 0.)), K));
|
||||||
|
return X;
|
||||||
|
}
|
||||||
|
|
||||||
|
static boost::shared_ptr<Ordering> getOrdering(
|
||||||
|
const vector<GeneralCamera>& cameras, const vector<Point3>& landmarks) {
|
||||||
|
boost::shared_ptr<Ordering> ordering(new Ordering);
|
||||||
|
for (size_t i = 0; i < landmarks.size(); ++i)
|
||||||
|
ordering->push_back(L(i));
|
||||||
|
for (size_t i = 0; i < cameras.size(); ++i)
|
||||||
|
ordering->push_back(X(i));
|
||||||
|
return ordering;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
TEST( GeneralSFMFactor, equals ) {
|
||||||
|
// Create two identical factors and make sure they're equal
|
||||||
|
Point2 z(323., 240.);
|
||||||
|
const Symbol cameraFrameNumber('x', 1), landmarkNumber('l', 1);
|
||||||
|
const SharedNoiseModel sigma(noiseModel::Unit::Create(1));
|
||||||
|
boost::shared_ptr<Projection> factor1(
|
||||||
|
new Projection(z, sigma, cameraFrameNumber, landmarkNumber));
|
||||||
|
|
||||||
|
boost::shared_ptr<Projection> factor2(
|
||||||
|
new Projection(z, sigma, cameraFrameNumber, landmarkNumber));
|
||||||
|
|
||||||
EXPECT(assert_equal(*factor1, *factor2));
|
EXPECT(assert_equal(*factor1, *factor2));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST( GeneralSFMFactor, error ) {
|
TEST( GeneralSFMFactor, error ) {
|
||||||
Point2 z(3.,0.);
|
Point2 z(3., 0.);
|
||||||
const SharedNoiseModel sigma(noiseModel::Unit::Create(1));
|
const SharedNoiseModel sigma(noiseModel::Unit::Create(2));
|
||||||
boost::shared_ptr<Projection> factor(new Projection(z, sigma, X(1), L(1)));
|
Projection factor(z, sigma, X(1), L(1));
|
||||||
// For the following configuration, the factor predicts 320,240
|
// For the following configuration, the factor predicts 320,240
|
||||||
Values values;
|
Values values;
|
||||||
Rot3 R;
|
Rot3 R;
|
||||||
Point3 t1(0,0,-6);
|
Point3 t1(0, 0, -6);
|
||||||
Pose3 x1(R,t1);
|
Pose3 x1(R, t1);
|
||||||
values.insert(X(1), GeneralCamera(x1));
|
values.insert(X(1), GeneralCamera(x1));
|
||||||
Point3 l1; values.insert(L(1), l1);
|
Point3 l1;
|
||||||
EXPECT(assert_equal(((Vector) Vector2(-3.0, 0.0)), factor->unwhitenedError(values)));
|
values.insert(L(1), l1);
|
||||||
|
EXPECT(
|
||||||
|
assert_equal(((Vector ) Vector2(-3., 0.)),
|
||||||
|
factor.unwhitenedError(values)));
|
||||||
}
|
}
|
||||||
|
|
||||||
static const double baseline = 5.0 ;
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
static vector<Point3> genPoint3() {
|
|
||||||
const double z = 5;
|
|
||||||
vector<Point3> landmarks ;
|
|
||||||
landmarks.push_back(Point3 (-1.0,-1.0, z));
|
|
||||||
landmarks.push_back(Point3 (-1.0, 1.0, z));
|
|
||||||
landmarks.push_back(Point3 ( 1.0, 1.0, z));
|
|
||||||
landmarks.push_back(Point3 ( 1.0,-1.0, z));
|
|
||||||
landmarks.push_back(Point3 (-1.5,-1.5, 1.5*z));
|
|
||||||
landmarks.push_back(Point3 (-1.5, 1.5, 1.5*z));
|
|
||||||
landmarks.push_back(Point3 ( 1.5, 1.5, 1.5*z));
|
|
||||||
landmarks.push_back(Point3 ( 1.5,-1.5, 1.5*z));
|
|
||||||
landmarks.push_back(Point3 (-2.0,-2.0, 2*z));
|
|
||||||
landmarks.push_back(Point3 (-2.0, 2.0, 2*z));
|
|
||||||
landmarks.push_back(Point3 ( 2.0, 2.0, 2*z));
|
|
||||||
landmarks.push_back(Point3 ( 2.0,-2.0, 2*z));
|
|
||||||
return landmarks ;
|
|
||||||
}
|
|
||||||
|
|
||||||
static vector<GeneralCamera> genCameraDefaultCalibration() {
|
|
||||||
vector<GeneralCamera> X ;
|
|
||||||
X.push_back(GeneralCamera(Pose3(eye(3),Point3(-baseline/2.0, 0.0, 0.0))));
|
|
||||||
X.push_back(GeneralCamera(Pose3(eye(3),Point3( baseline/2.0, 0.0, 0.0))));
|
|
||||||
return X ;
|
|
||||||
}
|
|
||||||
|
|
||||||
static vector<GeneralCamera> genCameraVariableCalibration() {
|
|
||||||
const Cal3_S2 K(640,480,0.01,320,240);
|
|
||||||
vector<GeneralCamera> X ;
|
|
||||||
X.push_back(GeneralCamera(Pose3(eye(3),Point3(-baseline/2.0, 0.0, 0.0)), K));
|
|
||||||
X.push_back(GeneralCamera(Pose3(eye(3),Point3( baseline/2.0, 0.0, 0.0)), K));
|
|
||||||
return X ;
|
|
||||||
}
|
|
||||||
|
|
||||||
static boost::shared_ptr<Ordering> getOrdering(const vector<GeneralCamera>& cameras, const vector<Point3>& landmarks) {
|
|
||||||
boost::shared_ptr<Ordering> ordering(new Ordering);
|
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i ) ordering->push_back(L(i)) ;
|
|
||||||
for ( size_t i = 0 ; i < cameras.size() ; ++i ) ordering->push_back(X(i)) ;
|
|
||||||
return ordering ;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST( GeneralSFMFactor, optimize_defaultK ) {
|
TEST( GeneralSFMFactor, optimize_defaultK ) {
|
||||||
|
|
||||||
|
@ -165,32 +169,32 @@ TEST( GeneralSFMFactor, optimize_defaultK ) {
|
||||||
|
|
||||||
// add measurement with noise
|
// add measurement with noise
|
||||||
Graph graph;
|
Graph graph;
|
||||||
for ( size_t j = 0 ; j < cameras.size() ; ++j) {
|
for (size_t j = 0; j < cameras.size(); ++j) {
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
Point2 pt = cameras[j].project(landmarks[i]) ;
|
Point2 pt = cameras[j].project(landmarks[i]);
|
||||||
graph.addMeasurement(j, i, pt, sigma1);
|
graph.addMeasurement(j, i, pt, sigma1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const size_t nMeasurements = cameras.size()*landmarks.size() ;
|
const size_t nMeasurements = cameras.size() * landmarks.size();
|
||||||
|
|
||||||
// add initial
|
// add initial
|
||||||
const double noise = baseline*0.1;
|
const double noise = baseline * 0.1;
|
||||||
Values values;
|
Values values;
|
||||||
for ( size_t i = 0 ; i < cameras.size() ; ++i )
|
for (size_t i = 0; i < cameras.size(); ++i)
|
||||||
values.insert(X(i), cameras[i]) ;
|
values.insert(X(i), cameras[i]);
|
||||||
|
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i ) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
Point3 pt(landmarks[i].x()+noise*getGaussian(),
|
Point3 pt(landmarks[i].x() + noise * getGaussian(),
|
||||||
landmarks[i].y()+noise*getGaussian(),
|
landmarks[i].y() + noise * getGaussian(),
|
||||||
landmarks[i].z()+noise*getGaussian());
|
landmarks[i].z() + noise * getGaussian());
|
||||||
values.insert(L(i), pt) ;
|
values.insert(L(i), pt);
|
||||||
}
|
}
|
||||||
|
|
||||||
graph.addCameraConstraint(0, cameras[0]);
|
graph.addCameraConstraint(0, cameras[0]);
|
||||||
|
|
||||||
// Create an ordering of the variables
|
// Create an ordering of the variables
|
||||||
Ordering ordering = *getOrdering(cameras,landmarks);
|
Ordering ordering = *getOrdering(cameras, landmarks);
|
||||||
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
||||||
Values final = optimizer.optimize();
|
Values final = optimizer.optimize();
|
||||||
EXPECT(optimizer.error() < 0.5 * 1e-5 * nMeasurements);
|
EXPECT(optimizer.error() < 0.5 * 1e-5 * nMeasurements);
|
||||||
|
@ -202,38 +206,37 @@ TEST( GeneralSFMFactor, optimize_varK_SingleMeasurementError ) {
|
||||||
vector<GeneralCamera> cameras = genCameraVariableCalibration();
|
vector<GeneralCamera> cameras = genCameraVariableCalibration();
|
||||||
// add measurement with noise
|
// add measurement with noise
|
||||||
Graph graph;
|
Graph graph;
|
||||||
for ( size_t j = 0 ; j < cameras.size() ; ++j) {
|
for (size_t j = 0; j < cameras.size(); ++j) {
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
Point2 pt = cameras[j].project(landmarks[i]) ;
|
Point2 pt = cameras[j].project(landmarks[i]);
|
||||||
graph.addMeasurement(j, i, pt, sigma1);
|
graph.addMeasurement(j, i, pt, sigma1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const size_t nMeasurements = cameras.size()*landmarks.size() ;
|
const size_t nMeasurements = cameras.size() * landmarks.size();
|
||||||
|
|
||||||
// add initial
|
// add initial
|
||||||
const double noise = baseline*0.1;
|
const double noise = baseline * 0.1;
|
||||||
Values values;
|
Values values;
|
||||||
for ( size_t i = 0 ; i < cameras.size() ; ++i )
|
for (size_t i = 0; i < cameras.size(); ++i)
|
||||||
values.insert(X(i), cameras[i]) ;
|
values.insert(X(i), cameras[i]);
|
||||||
|
|
||||||
// add noise only to the first landmark
|
// add noise only to the first landmark
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i ) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
if ( i == 0 ) {
|
if (i == 0) {
|
||||||
Point3 pt(landmarks[i].x()+noise*getGaussian(),
|
Point3 pt(landmarks[i].x() + noise * getGaussian(),
|
||||||
landmarks[i].y()+noise*getGaussian(),
|
landmarks[i].y() + noise * getGaussian(),
|
||||||
landmarks[i].z()+noise*getGaussian());
|
landmarks[i].z() + noise * getGaussian());
|
||||||
values.insert(L(i), pt) ;
|
values.insert(L(i), pt);
|
||||||
}
|
} else {
|
||||||
else {
|
values.insert(L(i), landmarks[i]);
|
||||||
values.insert(L(i), landmarks[i]) ;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
graph.addCameraConstraint(0, cameras[0]);
|
graph.addCameraConstraint(0, cameras[0]);
|
||||||
const double reproj_error = 1e-5;
|
const double reproj_error = 1e-5;
|
||||||
|
|
||||||
Ordering ordering = *getOrdering(cameras,landmarks);
|
Ordering ordering = *getOrdering(cameras, landmarks);
|
||||||
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
||||||
Values final = optimizer.optimize();
|
Values final = optimizer.optimize();
|
||||||
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
||||||
|
@ -246,35 +249,34 @@ TEST( GeneralSFMFactor, optimize_varK_FixCameras ) {
|
||||||
vector<GeneralCamera> cameras = genCameraVariableCalibration();
|
vector<GeneralCamera> cameras = genCameraVariableCalibration();
|
||||||
|
|
||||||
// add measurement with noise
|
// add measurement with noise
|
||||||
const double noise = baseline*0.1;
|
const double noise = baseline * 0.1;
|
||||||
Graph graph;
|
Graph graph;
|
||||||
for ( size_t j = 0 ; j < cameras.size() ; ++j) {
|
for (size_t i = 0; i < cameras.size(); ++i) {
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i) {
|
for (size_t j = 0; j < landmarks.size(); ++j) {
|
||||||
Point2 pt = cameras[j].project(landmarks[i]) ;
|
Point2 z = cameras[i].project(landmarks[j]);
|
||||||
graph.addMeasurement(j, i, pt, sigma1);
|
graph.addMeasurement(i, j, z, sigma1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const size_t nMeasurements = landmarks.size()*cameras.size();
|
const size_t nMeasurements = landmarks.size() * cameras.size();
|
||||||
|
|
||||||
Values values;
|
Values values;
|
||||||
for ( size_t i = 0 ; i < cameras.size() ; ++i )
|
for (size_t i = 0; i < cameras.size(); ++i)
|
||||||
values.insert(X(i), cameras[i]) ;
|
values.insert(X(i), cameras[i]);
|
||||||
|
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i ) {
|
for (size_t j = 0; j < landmarks.size(); ++j) {
|
||||||
Point3 pt(landmarks[i].x()+noise*getGaussian(),
|
Point3 pt(landmarks[j].x() + noise * getGaussian(),
|
||||||
landmarks[i].y()+noise*getGaussian(),
|
landmarks[j].y() + noise * getGaussian(),
|
||||||
landmarks[i].z()+noise*getGaussian());
|
landmarks[j].z() + noise * getGaussian());
|
||||||
//Point3 pt(landmarks[i].x(), landmarks[i].y(), landmarks[i].z());
|
values.insert(L(j), pt);
|
||||||
values.insert(L(i), pt) ;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for ( size_t i = 0 ; i < cameras.size() ; ++i )
|
for (size_t i = 0; i < cameras.size(); ++i)
|
||||||
graph.addCameraConstraint(i, cameras[i]);
|
graph.addCameraConstraint(i, cameras[i]);
|
||||||
|
|
||||||
const double reproj_error = 1e-5 ;
|
const double reproj_error = 1e-5;
|
||||||
|
|
||||||
Ordering ordering = *getOrdering(cameras,landmarks);
|
Ordering ordering = *getOrdering(cameras, landmarks);
|
||||||
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
||||||
Values final = optimizer.optimize();
|
Values final = optimizer.optimize();
|
||||||
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
||||||
|
@ -288,50 +290,45 @@ TEST( GeneralSFMFactor, optimize_varK_FixLandmarks ) {
|
||||||
|
|
||||||
// add measurement with noise
|
// add measurement with noise
|
||||||
Graph graph;
|
Graph graph;
|
||||||
for ( size_t j = 0 ; j < cameras.size() ; ++j) {
|
for (size_t j = 0; j < cameras.size(); ++j) {
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
Point2 pt = cameras[j].project(landmarks[i]) ;
|
Point2 pt = cameras[j].project(landmarks[i]);
|
||||||
graph.addMeasurement(j, i, pt, sigma1);
|
graph.addMeasurement(j, i, pt, sigma1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const size_t nMeasurements = landmarks.size()*cameras.size();
|
const size_t nMeasurements = landmarks.size() * cameras.size();
|
||||||
|
|
||||||
Values values;
|
Values values;
|
||||||
for ( size_t i = 0 ; i < cameras.size() ; ++i ) {
|
for (size_t i = 0; i < cameras.size(); ++i) {
|
||||||
const double
|
const double rot_noise = 1e-5, trans_noise = 1e-3, focal_noise = 1,
|
||||||
rot_noise = 1e-5,
|
skew_noise = 1e-5;
|
||||||
trans_noise = 1e-3,
|
if (i == 0) {
|
||||||
focal_noise = 1,
|
values.insert(X(i), cameras[i]);
|
||||||
skew_noise = 1e-5;
|
} else {
|
||||||
if ( i == 0 ) {
|
|
||||||
values.insert(X(i), cameras[i]) ;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
|
|
||||||
Vector delta = (Vector(11) <<
|
Vector delta = (Vector(11) << rot_noise, rot_noise, rot_noise, // rotation
|
||||||
rot_noise, rot_noise, rot_noise, // rotation
|
trans_noise, trans_noise, trans_noise, // translation
|
||||||
trans_noise, trans_noise, trans_noise, // translation
|
focal_noise, focal_noise, // f_x, f_y
|
||||||
focal_noise, focal_noise, // f_x, f_y
|
skew_noise, // s
|
||||||
skew_noise, // s
|
trans_noise, trans_noise // ux, uy
|
||||||
trans_noise, trans_noise // ux, uy
|
|
||||||
).finished();
|
).finished();
|
||||||
values.insert(X(i), cameras[i].retract(delta)) ;
|
values.insert(X(i), cameras[i].retract(delta));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i ) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
values.insert(L(i), landmarks[i]) ;
|
values.insert(L(i), landmarks[i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
// fix X0 and all landmarks, allow only the cameras[1] to move
|
// fix X0 and all landmarks, allow only the cameras[1] to move
|
||||||
graph.addCameraConstraint(0, cameras[0]);
|
graph.addCameraConstraint(0, cameras[0]);
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i )
|
for (size_t i = 0; i < landmarks.size(); ++i)
|
||||||
graph.addPoint3Constraint(i, landmarks[i]);
|
graph.addPoint3Constraint(i, landmarks[i]);
|
||||||
|
|
||||||
const double reproj_error = 1e-5 ;
|
const double reproj_error = 1e-5;
|
||||||
|
|
||||||
Ordering ordering = *getOrdering(cameras,landmarks);
|
Ordering ordering = *getOrdering(cameras, landmarks);
|
||||||
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
||||||
Values final = optimizer.optimize();
|
Values final = optimizer.optimize();
|
||||||
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
||||||
|
@ -344,38 +341,40 @@ TEST( GeneralSFMFactor, optimize_varK_BA ) {
|
||||||
|
|
||||||
// add measurement with noise
|
// add measurement with noise
|
||||||
Graph graph;
|
Graph graph;
|
||||||
for ( size_t j = 0 ; j < cameras.size() ; ++j) {
|
for (size_t j = 0; j < cameras.size(); ++j) {
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
Point2 pt = cameras[j].project(landmarks[i]) ;
|
Point2 pt = cameras[j].project(landmarks[i]);
|
||||||
graph.addMeasurement(j, i, pt, sigma1);
|
graph.addMeasurement(j, i, pt, sigma1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const size_t nMeasurements = cameras.size()*landmarks.size() ;
|
const size_t nMeasurements = cameras.size() * landmarks.size();
|
||||||
|
|
||||||
// add initial
|
// add initial
|
||||||
const double noise = baseline*0.1;
|
const double noise = baseline * 0.1;
|
||||||
Values values;
|
Values values;
|
||||||
for ( size_t i = 0 ; i < cameras.size() ; ++i )
|
for (size_t i = 0; i < cameras.size(); ++i)
|
||||||
values.insert(X(i), cameras[i]) ;
|
values.insert(X(i), cameras[i]);
|
||||||
|
|
||||||
// add noise only to the first landmark
|
// add noise only to the first landmark
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i ) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
Point3 pt(landmarks[i].x()+noise*getGaussian(),
|
Point3 pt(landmarks[i].x() + noise * getGaussian(),
|
||||||
landmarks[i].y()+noise*getGaussian(),
|
landmarks[i].y() + noise * getGaussian(),
|
||||||
landmarks[i].z()+noise*getGaussian());
|
landmarks[i].z() + noise * getGaussian());
|
||||||
values.insert(L(i), pt) ;
|
values.insert(L(i), pt);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Constrain position of system with the first camera constrained to the origin
|
// Constrain position of system with the first camera constrained to the origin
|
||||||
graph.addCameraConstraint(0, cameras[0]);
|
graph.addCameraConstraint(0, cameras[0]);
|
||||||
|
|
||||||
// Constrain the scale of the problem with a soft range factor of 1m between the cameras
|
// Constrain the scale of the problem with a soft range factor of 1m between the cameras
|
||||||
graph.push_back(RangeFactor<GeneralCamera,GeneralCamera>(X(0), X(1), 2.0, noiseModel::Isotropic::Sigma(1, 10.0)));
|
graph.push_back(
|
||||||
|
RangeFactor<GeneralCamera, GeneralCamera>(X(0), X(1), 2.,
|
||||||
|
noiseModel::Isotropic::Sigma(1, 10.)));
|
||||||
|
|
||||||
const double reproj_error = 1e-5 ;
|
const double reproj_error = 1e-5;
|
||||||
|
|
||||||
Ordering ordering = *getOrdering(cameras,landmarks);
|
Ordering ordering = *getOrdering(cameras, landmarks);
|
||||||
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
||||||
Values final = optimizer.optimize();
|
Values final = optimizer.optimize();
|
||||||
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
||||||
|
@ -386,17 +385,21 @@ TEST(GeneralSFMFactor, GeneralCameraPoseRange) {
|
||||||
// Tests range factor between a GeneralCamera and a Pose3
|
// Tests range factor between a GeneralCamera and a Pose3
|
||||||
Graph graph;
|
Graph graph;
|
||||||
graph.addCameraConstraint(0, GeneralCamera());
|
graph.addCameraConstraint(0, GeneralCamera());
|
||||||
graph.push_back(RangeFactor<GeneralCamera, Pose3>(X(0), X(1), 2.0, noiseModel::Isotropic::Sigma(1, 1.0)));
|
graph.push_back(
|
||||||
graph.push_back(PriorFactor<Pose3>(X(1), Pose3(Rot3(), Point3(1.0, 0.0, 0.0)), noiseModel::Isotropic::Sigma(6, 1.0)));
|
RangeFactor<GeneralCamera, Pose3>(X(0), X(1), 2.,
|
||||||
|
noiseModel::Isotropic::Sigma(1, 1.)));
|
||||||
|
graph.push_back(
|
||||||
|
PriorFactor<Pose3>(X(1), Pose3(Rot3(), Point3(1., 0., 0.)),
|
||||||
|
noiseModel::Isotropic::Sigma(6, 1.)));
|
||||||
|
|
||||||
Values init;
|
Values init;
|
||||||
init.insert(X(0), GeneralCamera());
|
init.insert(X(0), GeneralCamera());
|
||||||
init.insert(X(1), Pose3(Rot3(), Point3(1.0,1.0,1.0)));
|
init.insert(X(1), Pose3(Rot3(), Point3(1., 1., 1.)));
|
||||||
|
|
||||||
// The optimal value between the 2m range factor and 1m prior is 1.5m
|
// The optimal value between the 2m range factor and 1m prior is 1.5m
|
||||||
Values expected;
|
Values expected;
|
||||||
expected.insert(X(0), GeneralCamera());
|
expected.insert(X(0), GeneralCamera());
|
||||||
expected.insert(X(1), Pose3(Rot3(), Point3(1.5,0.0,0.0)));
|
expected.insert(X(1), Pose3(Rot3(), Point3(1.5, 0., 0.)));
|
||||||
|
|
||||||
LevenbergMarquardtParams params;
|
LevenbergMarquardtParams params;
|
||||||
params.absoluteErrorTol = 1e-9;
|
params.absoluteErrorTol = 1e-9;
|
||||||
|
@ -410,16 +413,23 @@ TEST(GeneralSFMFactor, GeneralCameraPoseRange) {
|
||||||
TEST(GeneralSFMFactor, CalibratedCameraPoseRange) {
|
TEST(GeneralSFMFactor, CalibratedCameraPoseRange) {
|
||||||
// Tests range factor between a CalibratedCamera and a Pose3
|
// Tests range factor between a CalibratedCamera and a Pose3
|
||||||
NonlinearFactorGraph graph;
|
NonlinearFactorGraph graph;
|
||||||
graph.push_back(PriorFactor<CalibratedCamera>(X(0), CalibratedCamera(), noiseModel::Isotropic::Sigma(6, 1.0)));
|
graph.push_back(
|
||||||
graph.push_back(RangeFactor<CalibratedCamera, Pose3>(X(0), X(1), 2.0, noiseModel::Isotropic::Sigma(1, 1.0)));
|
PriorFactor<CalibratedCamera>(X(0), CalibratedCamera(),
|
||||||
graph.push_back(PriorFactor<Pose3>(X(1), Pose3(Rot3(), Point3(1.0, 0.0, 0.0)), noiseModel::Isotropic::Sigma(6, 1.0)));
|
noiseModel::Isotropic::Sigma(6, 1.)));
|
||||||
|
graph.push_back(
|
||||||
|
RangeFactor<CalibratedCamera, Pose3>(X(0), X(1), 2.,
|
||||||
|
noiseModel::Isotropic::Sigma(1, 1.)));
|
||||||
|
graph.push_back(
|
||||||
|
PriorFactor<Pose3>(X(1), Pose3(Rot3(), Point3(1., 0., 0.)),
|
||||||
|
noiseModel::Isotropic::Sigma(6, 1.)));
|
||||||
|
|
||||||
Values init;
|
Values init;
|
||||||
init.insert(X(0), CalibratedCamera());
|
init.insert(X(0), CalibratedCamera());
|
||||||
init.insert(X(1), Pose3(Rot3(), Point3(1.0,1.0,1.0)));
|
init.insert(X(1), Pose3(Rot3(), Point3(1., 1., 1.)));
|
||||||
|
|
||||||
Values expected;
|
Values expected;
|
||||||
expected.insert(X(0), CalibratedCamera(Pose3(Rot3(), Point3(-0.333333333333, 0, 0))));
|
expected.insert(X(0),
|
||||||
|
CalibratedCamera(Pose3(Rot3(), Point3(-0.333333333333, 0, 0))));
|
||||||
expected.insert(X(1), Pose3(Rot3(), Point3(1.333333333333, 0, 0)));
|
expected.insert(X(1), Pose3(Rot3(), Point3(1.333333333333, 0, 0)));
|
||||||
|
|
||||||
LevenbergMarquardtParams params;
|
LevenbergMarquardtParams params;
|
||||||
|
@ -431,5 +441,95 @@ TEST(GeneralSFMFactor, CalibratedCameraPoseRange) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
int main() { TestResult tr; return TestRegistry::runAllTests(tr); }
|
// Frank created these tests after switching to a custom LinearizedFactor
|
||||||
|
TEST(GeneralSFMFactor, BinaryJacobianFactor) {
|
||||||
|
Point2 measurement(3., -1.);
|
||||||
|
|
||||||
|
// Create Values
|
||||||
|
Values values;
|
||||||
|
Rot3 R;
|
||||||
|
Point3 t1(0, 0, -6);
|
||||||
|
Pose3 x1(R, t1);
|
||||||
|
values.insert(X(1), GeneralCamera(x1));
|
||||||
|
Point3 l1;
|
||||||
|
values.insert(L(1), l1);
|
||||||
|
|
||||||
|
vector<SharedNoiseModel> models;
|
||||||
|
{
|
||||||
|
// Create various noise-models to test all cases
|
||||||
|
using namespace noiseModel;
|
||||||
|
Rot2 R = Rot2::fromAngle(0.3);
|
||||||
|
Matrix2 cov = R.matrix() * R.matrix().transpose();
|
||||||
|
models += SharedNoiseModel(), Unit::Create(2), //
|
||||||
|
Isotropic::Sigma(2, 0.5), Constrained::All(2), Gaussian::Covariance(cov);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now loop over all these noise models
|
||||||
|
BOOST_FOREACH(SharedNoiseModel model, models) {
|
||||||
|
Projection factor(measurement, model, X(1), L(1));
|
||||||
|
|
||||||
|
// Test linearize
|
||||||
|
GaussianFactor::shared_ptr expected = //
|
||||||
|
factor.NoiseModelFactor::linearize(values);
|
||||||
|
GaussianFactor::shared_ptr actual = factor.linearize(values);
|
||||||
|
EXPECT(assert_equal(*expected, *actual, 1e-9));
|
||||||
|
|
||||||
|
// Test methods that rely on updateHessian
|
||||||
|
if (model && !model->isConstrained()) {
|
||||||
|
// Construct HessianFactor from single JacobianFactor
|
||||||
|
HessianFactor expectedHessian(*expected), actualHessian(*actual);
|
||||||
|
EXPECT(assert_equal(expectedHessian, actualHessian, 1e-9));
|
||||||
|
|
||||||
|
// Convert back
|
||||||
|
JacobianFactor actualJacobian(actualHessian);
|
||||||
|
// Note we do not expect the actualJacobian to match *expected
|
||||||
|
// Just that they have the same information on the variable.
|
||||||
|
EXPECT(
|
||||||
|
assert_equal(expected->augmentedInformation(),
|
||||||
|
actualJacobian.augmentedInformation(), 1e-9));
|
||||||
|
|
||||||
|
// Construct from GaussianFactorGraph
|
||||||
|
GaussianFactorGraph gfg1;
|
||||||
|
gfg1 += expected;
|
||||||
|
GaussianFactorGraph gfg2;
|
||||||
|
gfg2 += actual;
|
||||||
|
HessianFactor hessian1(gfg1), hessian2(gfg2);
|
||||||
|
EXPECT(assert_equal(hessian1, hessian2, 1e-9));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
// Do a thorough test of BinaryJacobianFactor
|
||||||
|
TEST( GeneralSFMFactor, BinaryJacobianFactor2 ) {
|
||||||
|
|
||||||
|
vector<Point3> landmarks = genPoint3();
|
||||||
|
vector<GeneralCamera> cameras = genCameraVariableCalibration();
|
||||||
|
|
||||||
|
Values values;
|
||||||
|
for (size_t i = 0; i < cameras.size(); ++i)
|
||||||
|
values.insert(X(i), cameras[i]);
|
||||||
|
for (size_t j = 0; j < landmarks.size(); ++j)
|
||||||
|
values.insert(L(j), landmarks[j]);
|
||||||
|
|
||||||
|
for (size_t i = 0; i < cameras.size(); ++i) {
|
||||||
|
for (size_t j = 0; j < landmarks.size(); ++j) {
|
||||||
|
Point2 z = cameras[i].project(landmarks[j]);
|
||||||
|
Projection::shared_ptr nonlinear = //
|
||||||
|
boost::make_shared<Projection>(z, sigma1, X(i), L(j));
|
||||||
|
GaussianFactor::shared_ptr factor = nonlinear->linearize(values);
|
||||||
|
HessianFactor hessian(*factor);
|
||||||
|
JacobianFactor jacobian(hessian);
|
||||||
|
EXPECT(
|
||||||
|
assert_equal(factor->augmentedInformation(),
|
||||||
|
jacobian.augmentedInformation(), 1e-9));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
int main() {
|
||||||
|
TestResult tr;
|
||||||
|
return TestRegistry::runAllTests(tr);
|
||||||
|
}
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
|
@ -49,7 +49,8 @@ typedef NonlinearEquality<Point3> Point3Constraint;
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
class Graph: public NonlinearFactorGraph {
|
class Graph: public NonlinearFactorGraph {
|
||||||
public:
|
public:
|
||||||
void addMeasurement(const int& i, const int& j, const Point2& z, const SharedNoiseModel& model) {
|
void addMeasurement(const int& i, const int& j, const Point2& z,
|
||||||
|
const SharedNoiseModel& model) {
|
||||||
push_back(boost::make_shared<Projection>(z, model, X(i), L(j)));
|
push_back(boost::make_shared<Projection>(z, model, X(i), L(j)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -65,97 +66,101 @@ public:
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
static double getGaussian()
|
static double getGaussian() {
|
||||||
{
|
double S, V1, V2;
|
||||||
double S,V1,V2;
|
// Use Box-Muller method to create gauss noise from uniform noise
|
||||||
// Use Box-Muller method to create gauss noise from uniform noise
|
do {
|
||||||
do
|
double U1 = rand() / (double) (RAND_MAX);
|
||||||
{
|
double U2 = rand() / (double) (RAND_MAX);
|
||||||
double U1 = rand() / (double)(RAND_MAX);
|
V1 = 2 * U1 - 1; /* V1=[-1,1] */
|
||||||
double U2 = rand() / (double)(RAND_MAX);
|
V2 = 2 * U2 - 1; /* V2=[-1,1] */
|
||||||
V1 = 2 * U1 - 1; /* V1=[-1,1] */
|
S = V1 * V1 + V2 * V2;
|
||||||
V2 = 2 * U2 - 1; /* V2=[-1,1] */
|
} while (S >= 1);
|
||||||
S = V1 * V1 + V2 * V2;
|
return sqrt(-2.f * (double) log(S) / S) * V1;
|
||||||
} while(S>=1);
|
|
||||||
return sqrt(-2.0f * (double)log(S) / S) * V1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static const SharedNoiseModel sigma1(noiseModel::Unit::Create(2));
|
static const SharedNoiseModel sigma1(noiseModel::Unit::Create(2));
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST( GeneralSFMFactor_Cal3Bundler, equals )
|
TEST( GeneralSFMFactor_Cal3Bundler, equals ) {
|
||||||
{
|
|
||||||
// Create two identical factors and make sure they're equal
|
// Create two identical factors and make sure they're equal
|
||||||
Point2 z(323.,240.);
|
Point2 z(323., 240.);
|
||||||
const Symbol cameraFrameNumber('x',1), landmarkNumber('l',1);
|
const Symbol cameraFrameNumber('x', 1), landmarkNumber('l', 1);
|
||||||
const SharedNoiseModel sigma(noiseModel::Unit::Create(1));
|
const SharedNoiseModel sigma(noiseModel::Unit::Create(1));
|
||||||
boost::shared_ptr<Projection>
|
boost::shared_ptr<Projection> factor1(
|
||||||
factor1(new Projection(z, sigma, cameraFrameNumber, landmarkNumber));
|
new Projection(z, sigma, cameraFrameNumber, landmarkNumber));
|
||||||
|
|
||||||
boost::shared_ptr<Projection>
|
boost::shared_ptr<Projection> factor2(
|
||||||
factor2(new Projection(z, sigma, cameraFrameNumber, landmarkNumber));
|
new Projection(z, sigma, cameraFrameNumber, landmarkNumber));
|
||||||
|
|
||||||
EXPECT(assert_equal(*factor1, *factor2));
|
EXPECT(assert_equal(*factor1, *factor2));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST( GeneralSFMFactor_Cal3Bundler, error ) {
|
TEST( GeneralSFMFactor_Cal3Bundler, error ) {
|
||||||
Point2 z(3.,0.);
|
Point2 z(3., 0.);
|
||||||
const SharedNoiseModel sigma(noiseModel::Unit::Create(1));
|
const SharedNoiseModel sigma(noiseModel::Unit::Create(1));
|
||||||
boost::shared_ptr<Projection>
|
boost::shared_ptr<Projection> factor(new Projection(z, sigma, X(1), L(1)));
|
||||||
factor(new Projection(z, sigma, X(1), L(1)));
|
|
||||||
// For the following configuration, the factor predicts 320,240
|
// For the following configuration, the factor predicts 320,240
|
||||||
Values values;
|
Values values;
|
||||||
Rot3 R;
|
Rot3 R;
|
||||||
Point3 t1(0,0,-6);
|
Point3 t1(0, 0, -6);
|
||||||
Pose3 x1(R,t1);
|
Pose3 x1(R, t1);
|
||||||
values.insert(X(1), GeneralCamera(x1));
|
values.insert(X(1), GeneralCamera(x1));
|
||||||
Point3 l1; values.insert(L(1), l1);
|
Point3 l1;
|
||||||
EXPECT(assert_equal(Vector2(-3.0, 0.0), factor->unwhitenedError(values)));
|
values.insert(L(1), l1);
|
||||||
|
EXPECT(assert_equal(Vector2(-3., 0.), factor->unwhitenedError(values)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static const double baseline = 5.;
|
||||||
static const double baseline = 5.0 ;
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
static vector<Point3> genPoint3() {
|
static vector<Point3> genPoint3() {
|
||||||
const double z = 5;
|
const double z = 5;
|
||||||
vector<Point3> landmarks ;
|
vector<Point3> landmarks;
|
||||||
landmarks.push_back(Point3 (-1.0,-1.0, z));
|
landmarks.push_back(Point3(-1., -1., z));
|
||||||
landmarks.push_back(Point3 (-1.0, 1.0, z));
|
landmarks.push_back(Point3(-1., 1., z));
|
||||||
landmarks.push_back(Point3 ( 1.0, 1.0, z));
|
landmarks.push_back(Point3(1., 1., z));
|
||||||
landmarks.push_back(Point3 ( 1.0,-1.0, z));
|
landmarks.push_back(Point3(1., -1., z));
|
||||||
landmarks.push_back(Point3 (-1.5,-1.5, 1.5*z));
|
landmarks.push_back(Point3(-1.5, -1.5, 1.5 * z));
|
||||||
landmarks.push_back(Point3 (-1.5, 1.5, 1.5*z));
|
landmarks.push_back(Point3(-1.5, 1.5, 1.5 * z));
|
||||||
landmarks.push_back(Point3 ( 1.5, 1.5, 1.5*z));
|
landmarks.push_back(Point3(1.5, 1.5, 1.5 * z));
|
||||||
landmarks.push_back(Point3 ( 1.5,-1.5, 1.5*z));
|
landmarks.push_back(Point3(1.5, -1.5, 1.5 * z));
|
||||||
landmarks.push_back(Point3 (-2.0,-2.0, 2*z));
|
landmarks.push_back(Point3(-2., -2., 2 * z));
|
||||||
landmarks.push_back(Point3 (-2.0, 2.0, 2*z));
|
landmarks.push_back(Point3(-2., 2., 2 * z));
|
||||||
landmarks.push_back(Point3 ( 2.0, 2.0, 2*z));
|
landmarks.push_back(Point3(2., 2., 2 * z));
|
||||||
landmarks.push_back(Point3 ( 2.0,-2.0, 2*z));
|
landmarks.push_back(Point3(2., -2., 2 * z));
|
||||||
return landmarks ;
|
return landmarks;
|
||||||
}
|
}
|
||||||
|
|
||||||
static vector<GeneralCamera> genCameraDefaultCalibration() {
|
static vector<GeneralCamera> genCameraDefaultCalibration() {
|
||||||
vector<GeneralCamera> cameras ;
|
vector<GeneralCamera> cameras;
|
||||||
cameras.push_back(GeneralCamera(Pose3(eye(3),Point3(-baseline/2.0, 0.0, 0.0))));
|
cameras.push_back(
|
||||||
cameras.push_back(GeneralCamera(Pose3(eye(3),Point3( baseline/2.0, 0.0, 0.0))));
|
GeneralCamera(Pose3(Rot3(), Point3(-baseline / 2., 0., 0.))));
|
||||||
return cameras ;
|
cameras.push_back(
|
||||||
|
GeneralCamera(Pose3(Rot3(), Point3(baseline / 2., 0., 0.))));
|
||||||
|
return cameras;
|
||||||
}
|
}
|
||||||
|
|
||||||
static vector<GeneralCamera> genCameraVariableCalibration() {
|
static vector<GeneralCamera> genCameraVariableCalibration() {
|
||||||
const Cal3Bundler K(500,1e-3,1e-3);
|
const Cal3Bundler K(500, 1e-3, 1e-3);
|
||||||
vector<GeneralCamera> cameras ;
|
vector<GeneralCamera> cameras;
|
||||||
cameras.push_back(GeneralCamera(Pose3(eye(3),Point3(-baseline/2.0, 0.0, 0.0)), K));
|
cameras.push_back(
|
||||||
cameras.push_back(GeneralCamera(Pose3(eye(3),Point3( baseline/2.0, 0.0, 0.0)), K));
|
GeneralCamera(Pose3(Rot3(), Point3(-baseline / 2., 0., 0.)), K));
|
||||||
return cameras ;
|
cameras.push_back(
|
||||||
|
GeneralCamera(Pose3(Rot3(), Point3(baseline / 2., 0., 0.)), K));
|
||||||
|
return cameras;
|
||||||
}
|
}
|
||||||
|
|
||||||
static boost::shared_ptr<Ordering> getOrdering(const std::vector<GeneralCamera>& cameras, const std::vector<Point3>& landmarks) {
|
static boost::shared_ptr<Ordering> getOrdering(
|
||||||
|
const std::vector<GeneralCamera>& cameras,
|
||||||
|
const std::vector<Point3>& landmarks) {
|
||||||
boost::shared_ptr<Ordering> ordering(new Ordering);
|
boost::shared_ptr<Ordering> ordering(new Ordering);
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i ) ordering->push_back(L(i)) ;
|
for (size_t i = 0; i < landmarks.size(); ++i)
|
||||||
for ( size_t i = 0 ; i < cameras.size() ; ++i ) ordering->push_back(X(i)) ;
|
ordering->push_back(L(i));
|
||||||
return ordering ;
|
for (size_t i = 0; i < cameras.size(); ++i)
|
||||||
|
ordering->push_back(X(i));
|
||||||
|
return ordering;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
@ -166,32 +171,32 @@ TEST( GeneralSFMFactor_Cal3Bundler, optimize_defaultK ) {
|
||||||
|
|
||||||
// add measurement with noise
|
// add measurement with noise
|
||||||
Graph graph;
|
Graph graph;
|
||||||
for ( size_t j = 0 ; j < cameras.size() ; ++j) {
|
for (size_t j = 0; j < cameras.size(); ++j) {
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
Point2 pt = cameras[j].project(landmarks[i]) ;
|
Point2 pt = cameras[j].project(landmarks[i]);
|
||||||
graph.addMeasurement(j, i, pt, sigma1);
|
graph.addMeasurement(j, i, pt, sigma1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const size_t nMeasurements = cameras.size()*landmarks.size() ;
|
const size_t nMeasurements = cameras.size() * landmarks.size();
|
||||||
|
|
||||||
// add initial
|
// add initial
|
||||||
const double noise = baseline*0.1;
|
const double noise = baseline * 0.1;
|
||||||
Values values;
|
Values values;
|
||||||
for ( size_t i = 0 ; i < cameras.size() ; ++i )
|
for (size_t i = 0; i < cameras.size(); ++i)
|
||||||
values.insert(X(i), cameras[i]) ;
|
values.insert(X(i), cameras[i]);
|
||||||
|
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i ) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
Point3 pt(landmarks[i].x()+noise*getGaussian(),
|
Point3 pt(landmarks[i].x() + noise * getGaussian(),
|
||||||
landmarks[i].y()+noise*getGaussian(),
|
landmarks[i].y() + noise * getGaussian(),
|
||||||
landmarks[i].z()+noise*getGaussian());
|
landmarks[i].z() + noise * getGaussian());
|
||||||
values.insert(L(i), pt) ;
|
values.insert(L(i), pt);
|
||||||
}
|
}
|
||||||
|
|
||||||
graph.addCameraConstraint(0, cameras[0]);
|
graph.addCameraConstraint(0, cameras[0]);
|
||||||
|
|
||||||
// Create an ordering of the variables
|
// Create an ordering of the variables
|
||||||
Ordering ordering = *getOrdering(cameras,landmarks);
|
Ordering ordering = *getOrdering(cameras, landmarks);
|
||||||
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
||||||
Values final = optimizer.optimize();
|
Values final = optimizer.optimize();
|
||||||
EXPECT(optimizer.error() < 0.5 * 1e-5 * nMeasurements);
|
EXPECT(optimizer.error() < 0.5 * 1e-5 * nMeasurements);
|
||||||
|
@ -203,38 +208,37 @@ TEST( GeneralSFMFactor_Cal3Bundler, optimize_varK_SingleMeasurementError ) {
|
||||||
vector<GeneralCamera> cameras = genCameraVariableCalibration();
|
vector<GeneralCamera> cameras = genCameraVariableCalibration();
|
||||||
// add measurement with noise
|
// add measurement with noise
|
||||||
Graph graph;
|
Graph graph;
|
||||||
for ( size_t j = 0 ; j < cameras.size() ; ++j) {
|
for (size_t j = 0; j < cameras.size(); ++j) {
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
Point2 pt = cameras[j].project(landmarks[i]) ;
|
Point2 pt = cameras[j].project(landmarks[i]);
|
||||||
graph.addMeasurement(j, i, pt, sigma1);
|
graph.addMeasurement(j, i, pt, sigma1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const size_t nMeasurements = cameras.size()*landmarks.size() ;
|
const size_t nMeasurements = cameras.size() * landmarks.size();
|
||||||
|
|
||||||
// add initial
|
// add initial
|
||||||
const double noise = baseline*0.1;
|
const double noise = baseline * 0.1;
|
||||||
Values values;
|
Values values;
|
||||||
for ( size_t i = 0 ; i < cameras.size() ; ++i )
|
for (size_t i = 0; i < cameras.size(); ++i)
|
||||||
values.insert(X(i), cameras[i]) ;
|
values.insert(X(i), cameras[i]);
|
||||||
|
|
||||||
// add noise only to the first landmark
|
// add noise only to the first landmark
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i ) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
if ( i == 0 ) {
|
if (i == 0) {
|
||||||
Point3 pt(landmarks[i].x()+noise*getGaussian(),
|
Point3 pt(landmarks[i].x() + noise * getGaussian(),
|
||||||
landmarks[i].y()+noise*getGaussian(),
|
landmarks[i].y() + noise * getGaussian(),
|
||||||
landmarks[i].z()+noise*getGaussian());
|
landmarks[i].z() + noise * getGaussian());
|
||||||
values.insert(L(i), pt) ;
|
values.insert(L(i), pt);
|
||||||
}
|
} else {
|
||||||
else {
|
values.insert(L(i), landmarks[i]);
|
||||||
values.insert(L(i), landmarks[i]) ;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
graph.addCameraConstraint(0, cameras[0]);
|
graph.addCameraConstraint(0, cameras[0]);
|
||||||
const double reproj_error = 1e-5;
|
const double reproj_error = 1e-5;
|
||||||
|
|
||||||
Ordering ordering = *getOrdering(cameras,landmarks);
|
Ordering ordering = *getOrdering(cameras, landmarks);
|
||||||
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
||||||
Values final = optimizer.optimize();
|
Values final = optimizer.optimize();
|
||||||
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
||||||
|
@ -247,35 +251,35 @@ TEST( GeneralSFMFactor_Cal3Bundler, optimize_varK_FixCameras ) {
|
||||||
vector<GeneralCamera> cameras = genCameraVariableCalibration();
|
vector<GeneralCamera> cameras = genCameraVariableCalibration();
|
||||||
|
|
||||||
// add measurement with noise
|
// add measurement with noise
|
||||||
const double noise = baseline*0.1;
|
const double noise = baseline * 0.1;
|
||||||
Graph graph;
|
Graph graph;
|
||||||
for ( size_t j = 0 ; j < cameras.size() ; ++j) {
|
for (size_t j = 0; j < cameras.size(); ++j) {
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
Point2 pt = cameras[j].project(landmarks[i]) ;
|
Point2 pt = cameras[j].project(landmarks[i]);
|
||||||
graph.addMeasurement(j, i, pt, sigma1);
|
graph.addMeasurement(j, i, pt, sigma1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const size_t nMeasurements = landmarks.size()*cameras.size();
|
const size_t nMeasurements = landmarks.size() * cameras.size();
|
||||||
|
|
||||||
Values values;
|
Values values;
|
||||||
for ( size_t i = 0 ; i < cameras.size() ; ++i )
|
for (size_t i = 0; i < cameras.size(); ++i)
|
||||||
values.insert(X(i), cameras[i]) ;
|
values.insert(X(i), cameras[i]);
|
||||||
|
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i ) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
Point3 pt(landmarks[i].x()+noise*getGaussian(),
|
Point3 pt(landmarks[i].x() + noise * getGaussian(),
|
||||||
landmarks[i].y()+noise*getGaussian(),
|
landmarks[i].y() + noise * getGaussian(),
|
||||||
landmarks[i].z()+noise*getGaussian());
|
landmarks[i].z() + noise * getGaussian());
|
||||||
//Point3 pt(landmarks[i].x(), landmarks[i].y(), landmarks[i].z());
|
//Point3 pt(landmarks[i].x(), landmarks[i].y(), landmarks[i].z());
|
||||||
values.insert(L(i), pt) ;
|
values.insert(L(i), pt);
|
||||||
}
|
}
|
||||||
|
|
||||||
for ( size_t i = 0 ; i < cameras.size() ; ++i )
|
for (size_t i = 0; i < cameras.size(); ++i)
|
||||||
graph.addCameraConstraint(i, cameras[i]);
|
graph.addCameraConstraint(i, cameras[i]);
|
||||||
|
|
||||||
const double reproj_error = 1e-5 ;
|
const double reproj_error = 1e-5;
|
||||||
|
|
||||||
Ordering ordering = *getOrdering(cameras,landmarks);
|
Ordering ordering = *getOrdering(cameras, landmarks);
|
||||||
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
||||||
Values final = optimizer.optimize();
|
Values final = optimizer.optimize();
|
||||||
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
||||||
|
@ -289,46 +293,43 @@ TEST( GeneralSFMFactor_Cal3Bundler, optimize_varK_FixLandmarks ) {
|
||||||
|
|
||||||
// add measurement with noise
|
// add measurement with noise
|
||||||
Graph graph;
|
Graph graph;
|
||||||
for ( size_t j = 0 ; j < cameras.size() ; ++j) {
|
for (size_t j = 0; j < cameras.size(); ++j) {
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
Point2 pt = cameras[j].project(landmarks[i]) ;
|
Point2 pt = cameras[j].project(landmarks[i]);
|
||||||
graph.addMeasurement(j, i, pt, sigma1);
|
graph.addMeasurement(j, i, pt, sigma1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const size_t nMeasurements = landmarks.size()*cameras.size();
|
const size_t nMeasurements = landmarks.size() * cameras.size();
|
||||||
|
|
||||||
Values values;
|
Values values;
|
||||||
for ( size_t i = 0 ; i < cameras.size() ; ++i ) {
|
for (size_t i = 0; i < cameras.size(); ++i) {
|
||||||
const double
|
const double rot_noise = 1e-5, trans_noise = 1e-3, focal_noise = 1,
|
||||||
rot_noise = 1e-5, trans_noise = 1e-3,
|
distort_noise = 1e-3;
|
||||||
focal_noise = 1, distort_noise = 1e-3;
|
if (i == 0) {
|
||||||
if ( i == 0 ) {
|
values.insert(X(i), cameras[i]);
|
||||||
values.insert(X(i), cameras[i]) ;
|
} else {
|
||||||
}
|
|
||||||
else {
|
|
||||||
|
|
||||||
Vector delta = (Vector(9) <<
|
Vector delta = (Vector(9) << rot_noise, rot_noise, rot_noise, // rotation
|
||||||
rot_noise, rot_noise, rot_noise, // rotation
|
trans_noise, trans_noise, trans_noise, // translation
|
||||||
trans_noise, trans_noise, trans_noise, // translation
|
focal_noise, distort_noise, distort_noise // f, k1, k2
|
||||||
focal_noise, distort_noise, distort_noise // f, k1, k2
|
|
||||||
).finished();
|
).finished();
|
||||||
values.insert(X(i), cameras[i].retract(delta)) ;
|
values.insert(X(i), cameras[i].retract(delta));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i ) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
values.insert(L(i), landmarks[i]) ;
|
values.insert(L(i), landmarks[i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
// fix X0 and all landmarks, allow only the cameras[1] to move
|
// fix X0 and all landmarks, allow only the cameras[1] to move
|
||||||
graph.addCameraConstraint(0, cameras[0]);
|
graph.addCameraConstraint(0, cameras[0]);
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i )
|
for (size_t i = 0; i < landmarks.size(); ++i)
|
||||||
graph.addPoint3Constraint(i, landmarks[i]);
|
graph.addPoint3Constraint(i, landmarks[i]);
|
||||||
|
|
||||||
const double reproj_error = 1e-5 ;
|
const double reproj_error = 1e-5;
|
||||||
|
|
||||||
Ordering ordering = *getOrdering(cameras,landmarks);
|
Ordering ordering = *getOrdering(cameras, landmarks);
|
||||||
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
||||||
Values final = optimizer.optimize();
|
Values final = optimizer.optimize();
|
||||||
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
||||||
|
@ -341,43 +342,48 @@ TEST( GeneralSFMFactor_Cal3Bundler, optimize_varK_BA ) {
|
||||||
|
|
||||||
// add measurement with noise
|
// add measurement with noise
|
||||||
Graph graph;
|
Graph graph;
|
||||||
for ( size_t j = 0 ; j < cameras.size() ; ++j) {
|
for (size_t j = 0; j < cameras.size(); ++j) {
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
Point2 pt = cameras[j].project(landmarks[i]) ;
|
Point2 pt = cameras[j].project(landmarks[i]);
|
||||||
graph.addMeasurement(j, i, pt, sigma1);
|
graph.addMeasurement(j, i, pt, sigma1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const size_t nMeasurements = cameras.size()*landmarks.size() ;
|
const size_t nMeasurements = cameras.size() * landmarks.size();
|
||||||
|
|
||||||
// add initial
|
// add initial
|
||||||
const double noise = baseline*0.1;
|
const double noise = baseline * 0.1;
|
||||||
Values values;
|
Values values;
|
||||||
for ( size_t i = 0 ; i < cameras.size() ; ++i )
|
for (size_t i = 0; i < cameras.size(); ++i)
|
||||||
values.insert(X(i), cameras[i]) ;
|
values.insert(X(i), cameras[i]);
|
||||||
|
|
||||||
// add noise only to the first landmark
|
// add noise only to the first landmark
|
||||||
for ( size_t i = 0 ; i < landmarks.size() ; ++i ) {
|
for (size_t i = 0; i < landmarks.size(); ++i) {
|
||||||
Point3 pt(landmarks[i].x()+noise*getGaussian(),
|
Point3 pt(landmarks[i].x() + noise * getGaussian(),
|
||||||
landmarks[i].y()+noise*getGaussian(),
|
landmarks[i].y() + noise * getGaussian(),
|
||||||
landmarks[i].z()+noise*getGaussian());
|
landmarks[i].z() + noise * getGaussian());
|
||||||
values.insert(L(i), pt) ;
|
values.insert(L(i), pt);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Constrain position of system with the first camera constrained to the origin
|
// Constrain position of system with the first camera constrained to the origin
|
||||||
graph.addCameraConstraint(0, cameras[0]);
|
graph.addCameraConstraint(0, cameras[0]);
|
||||||
|
|
||||||
// Constrain the scale of the problem with a soft range factor of 1m between the cameras
|
// Constrain the scale of the problem with a soft range factor of 1m between the cameras
|
||||||
graph.push_back(RangeFactor<GeneralCamera,GeneralCamera>(X(0), X(1), 2.0, noiseModel::Isotropic::Sigma(1, 10.0)));
|
graph.push_back(
|
||||||
|
RangeFactor<GeneralCamera, GeneralCamera>(X(0), X(1), 2.,
|
||||||
|
noiseModel::Isotropic::Sigma(1, 10.)));
|
||||||
|
|
||||||
const double reproj_error = 1e-5 ;
|
const double reproj_error = 1e-5;
|
||||||
|
|
||||||
Ordering ordering = *getOrdering(cameras,landmarks);
|
Ordering ordering = *getOrdering(cameras, landmarks);
|
||||||
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
LevenbergMarquardtOptimizer optimizer(graph, values, ordering);
|
||||||
Values final = optimizer.optimize();
|
Values final = optimizer.optimize();
|
||||||
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
EXPECT(optimizer.error() < 0.5 * reproj_error * nMeasurements);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
int main() { TestResult tr; return TestRegistry::runAllTests(tr); }
|
int main() {
|
||||||
|
TestResult tr;
|
||||||
|
return TestRegistry::runAllTests(tr);
|
||||||
|
}
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
|
@ -46,10 +46,10 @@ TEST( JunctionTree, constructor )
|
||||||
frontal1 = list_of(2)(3),
|
frontal1 = list_of(2)(3),
|
||||||
frontal2 = list_of(0)(1),
|
frontal2 = list_of(0)(1),
|
||||||
sep1, sep2 = list_of(2);
|
sep1, sep2 = list_of(2);
|
||||||
EXPECT(assert_container_equality(frontal1, actual.roots().front()->keys));
|
EXPECT(assert_container_equality(frontal1, actual.roots().front()->orderedFrontalKeys));
|
||||||
//EXPECT(assert_equal(sep1, actual.roots().front()->separator));
|
//EXPECT(assert_equal(sep1, actual.roots().front()->separator));
|
||||||
LONGS_EQUAL(1, (long)actual.roots().front()->factors.size());
|
LONGS_EQUAL(1, (long)actual.roots().front()->factors.size());
|
||||||
EXPECT(assert_container_equality(frontal2, actual.roots().front()->children.front()->keys));
|
EXPECT(assert_container_equality(frontal2, actual.roots().front()->children.front()->orderedFrontalKeys));
|
||||||
//EXPECT(assert_equal(sep2, actual.roots().front()->children.front()->separator));
|
//EXPECT(assert_equal(sep2, actual.roots().front()->children.front()->separator));
|
||||||
LONGS_EQUAL(2, (long)actual.roots().front()->children.front()->factors.size());
|
LONGS_EQUAL(2, (long)actual.roots().front()->children.front()->factors.size());
|
||||||
EXPECT(assert_equal(*simpleChain[2], *actual.roots().front()->factors[0]));
|
EXPECT(assert_equal(*simpleChain[2], *actual.roots().front()->factors[0]));
|
||||||
|
|
|
@ -0,0 +1,89 @@
|
||||||
|
/* ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||||
|
* Atlanta, Georgia 30332-0415
|
||||||
|
* All Rights Reserved
|
||||||
|
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||||
|
|
||||||
|
* See LICENSE for the license information
|
||||||
|
|
||||||
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @file timeSFMBAL.cpp
|
||||||
|
* @brief time structure from motion with BAL file
|
||||||
|
* @author Frank Dellaert
|
||||||
|
* @date June 6, 2015
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include <gtsam/slam/dataset.h>
|
||||||
|
#include <gtsam/slam/GeneralSFMFactor.h>
|
||||||
|
#include <gtsam/geometry/Cal3Bundler.h>
|
||||||
|
#include <gtsam/geometry/PinholeCamera.h>
|
||||||
|
#include <gtsam/geometry/Point3.h>
|
||||||
|
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
|
||||||
|
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
||||||
|
#include <gtsam/nonlinear/Values.h>
|
||||||
|
#include <gtsam/linear/NoiseModel.h>
|
||||||
|
#include <gtsam/inference/FactorGraph.h>
|
||||||
|
#include <gtsam/inference/Symbol.h>
|
||||||
|
#include <gtsam/base/timing.h>
|
||||||
|
|
||||||
|
#include <boost/foreach.hpp>
|
||||||
|
#include <stddef.h>
|
||||||
|
#include <stdexcept>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
using namespace gtsam;
|
||||||
|
|
||||||
|
//#define TERNARY
|
||||||
|
|
||||||
|
int main(int argc, char* argv[]) {
|
||||||
|
typedef GeneralSFMFactor<PinholeCamera<Cal3Bundler>, Point3> sfmFactor;
|
||||||
|
using symbol_shorthand::P;
|
||||||
|
|
||||||
|
// Load BAL file (default is tiny)
|
||||||
|
string defaultFilename = findExampleDataFile("dubrovnik-3-7-pre");
|
||||||
|
SfM_data db;
|
||||||
|
bool success = readBAL(argc > 1 ? argv[1] : defaultFilename, db);
|
||||||
|
if (!success) throw runtime_error("Could not access file!");
|
||||||
|
|
||||||
|
// Build graph
|
||||||
|
SharedNoiseModel unit2 = noiseModel::Unit::Create(2);
|
||||||
|
NonlinearFactorGraph graph;
|
||||||
|
for (size_t j = 0; j < db.number_tracks(); j++) {
|
||||||
|
BOOST_FOREACH (const SfM_Measurement& m, db.tracks[j].measurements)
|
||||||
|
graph.push_back(sfmFactor(m.second, unit2, m.first, P(j)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Values initial = initialCamerasAndPointsEstimate(db);
|
||||||
|
|
||||||
|
// Create Schur-complement ordering
|
||||||
|
#ifdef CCOLAMD
|
||||||
|
vector<Key> pointKeys;
|
||||||
|
for (size_t j = 0; j < db.number_tracks(); j++) pointKeys.push_back(P(j));
|
||||||
|
Ordering ordering = Ordering::colamdConstrainedFirst(graph, pointKeys, true);
|
||||||
|
#else
|
||||||
|
Ordering ordering;
|
||||||
|
for (size_t j = 0; j < db.number_tracks(); j++) ordering.push_back(P(j));
|
||||||
|
for (size_t i = 0; i < db.number_cameras(); i++) ordering.push_back(i);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// Optimize
|
||||||
|
// Set parameters to be similar to ceres
|
||||||
|
LevenbergMarquardtParams params;
|
||||||
|
params.setOrdering(ordering);
|
||||||
|
params.setVerbosity("ERROR");
|
||||||
|
params.setVerbosityLM("TRYLAMBDA");
|
||||||
|
params.setDiagonalDamping(true);
|
||||||
|
params.setlambdaInitial(1e-4);
|
||||||
|
params.setlambdaFactor(2.0);
|
||||||
|
LevenbergMarquardtOptimizer lm(graph, initial, params);
|
||||||
|
Values actual = lm.optimize();
|
||||||
|
|
||||||
|
tictoc_finishedIteration_();
|
||||||
|
tictoc_print_();
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
Loading…
Reference in New Issue