Almost done. Still need to figure out how to remove the remaining 3 occurrences.

release/4.3a0
yao 2016-05-21 12:44:46 -04:00
parent f7ec58cde0
commit a0661b3813
47 changed files with 270 additions and 290 deletions

View File

@ -919,7 +919,7 @@ TEST(Matrix, weighted_elimination )
// unpack and verify
i = 0;
for(boost::tie(r, di, sigma): solution){
BOOST_FOREACH(boost::tie(r, di, sigma), solution) {
EXPECT(assert_equal(r, expectedR.row(i))); // verify r
DOUBLES_EQUAL(d(i), di, 1e-8); // verify d
DOUBLES_EQUAL(newSigmas(i), sigma, 1e-5); // verify sigma

View File

@ -23,7 +23,6 @@
#include <gtsam/inference/FactorGraph.h>
#include <boost/foreach.hpp>
#include <boost/bind.hpp>
#include <stdio.h>
@ -66,7 +65,7 @@ namespace gtsam {
template<class FACTOR>
size_t FactorGraph<FACTOR>::nrFactors() const {
size_t size_ = 0;
BOOST_FOREACH(const sharedFactor& factor, factors_)
for(const sharedFactor& factor: factors_)
if (factor) size_++;
return size_;
}
@ -75,7 +74,7 @@ namespace gtsam {
template<class FACTOR>
KeySet FactorGraph<FACTOR>::keys() const {
KeySet keys;
BOOST_FOREACH(const sharedFactor& factor, this->factors_) {
for(const sharedFactor& factor: this->factors_) {
if(factor)
keys.insert(factor->begin(), factor->end());
}
@ -87,7 +86,7 @@ namespace gtsam {
KeyVector FactorGraph<FACTOR>::keyVector() const {
KeyVector keys;
keys.reserve(2 * size()); // guess at size
BOOST_FOREACH (const sharedFactor& factor, factors_)
for (const sharedFactor& factor: factors_)
if (factor)
keys.insert(keys.end(), factor->begin(), factor->end());
std::sort(keys.begin(), keys.end());

View File

@ -193,7 +193,7 @@ boost::shared_ptr<Values> composePoses(const G& graph, const PredecessorMap<KEY>
// attach the relative poses to the edges
PoseEdge edge12, edge21;
bool found1, found2;
BOOST_FOREACH(typename G::sharedFactor nl_factor, graph) {
for(typename G::sharedFactor nl_factor: graph) {
if (nl_factor->keys().size() > 2)
throw std::invalid_argument("composePoses: only support factors with at most two keys");
@ -243,7 +243,7 @@ PredecessorMap<KEY> findMinimumSpanningTree(const G& fg) {
// convert edge to string pairs
PredecessorMap<KEY> tree;
typename SDGraph<KEY>::vertex_iterator itVertex = boost::vertices(g).first;
BOOST_FOREACH(const typename SDGraph<KEY>::Vertex& vi, p_map){
for(const typename SDGraph<KEY>::Vertex& vi: p_map){
KEY key = boost::get(boost::vertex_name, g, *itVertex);
KEY parent = boost::get(boost::vertex_name, g, vi);
tree.insert(key, parent);
@ -258,7 +258,7 @@ void split(const G& g, const PredecessorMap<KEY>& tree, G& Ab1, G& Ab2) {
typedef typename G::sharedFactor F ;
BOOST_FOREACH(const F& factor, g)
for(const F& factor: g)
{
if (factor->keys().size() > 2)
throw(std::invalid_argument("split: only support factors with at most two keys"));

View File

@ -17,7 +17,6 @@
* @author Christian Potthast
*/
#include <boost/foreach.hpp>
#include <boost/range/adaptor/map.hpp>
#include <gtsam/linear/Errors.h>
#include <gtsam/linear/VectorValues.h>
@ -31,7 +30,7 @@ Errors::Errors(){}
/* ************************************************************************* */
Errors::Errors(const VectorValues& V) {
BOOST_FOREACH(const Vector& e, V | boost::adaptors::map_values) {
for(const Vector& e: V | boost::adaptors::map_values) {
push_back(e);
}
}
@ -39,7 +38,7 @@ Errors::Errors(const VectorValues& V) {
/* ************************************************************************* */
void Errors::print(const std::string& s) const {
cout << s << endl;
BOOST_FOREACH(const Vector& v, *this)
for(const Vector& v: *this)
gtsam::print(v);
}
@ -66,7 +65,7 @@ Errors Errors::operator+(const Errors& b) const {
#endif
Errors result;
Errors::const_iterator it = b.begin();
BOOST_FOREACH(const Vector& ai, *this)
for(const Vector& ai: *this)
result.push_back(ai + *(it++));
return result;
}
@ -81,7 +80,7 @@ Errors Errors::operator-(const Errors& b) const {
#endif
Errors result;
Errors::const_iterator it = b.begin();
BOOST_FOREACH(const Vector& ai, *this)
for(const Vector& ai: *this)
result.push_back(ai - *(it++));
return result;
}
@ -89,7 +88,7 @@ Errors Errors::operator-(const Errors& b) const {
/* ************************************************************************* */
Errors Errors::operator-() const {
Errors result;
BOOST_FOREACH(const Vector& ai, *this)
for(const Vector& ai: *this)
result.push_back(-ai);
return result;
}
@ -105,7 +104,7 @@ double dot(const Errors& a, const Errors& b) {
#endif
double result = 0.0;
Errors::const_iterator it = b.begin();
BOOST_FOREACH(const Vector& ai, a)
for(const Vector& ai: a)
result += gtsam::dot(ai, *(it++));
return result;
}
@ -114,7 +113,7 @@ double dot(const Errors& a, const Errors& b) {
template<>
void axpy<Errors,Errors>(double alpha, const Errors& x, Errors& y) {
Errors::const_iterator it = x.begin();
BOOST_FOREACH(Vector& yi, y)
for(Vector& yi: y)
axpy(alpha,*(it++),yi);
}

View File

@ -25,7 +25,7 @@
using namespace std;
using namespace gtsam;
#define FOREACH_PAIR( KEY, VAL, COL) BOOST_FOREACH (boost::tie(KEY,VAL),COL)
#define FOREACH_PAIR( KEY, VAL, COL) BOOST_FOREACH (boost::tie(KEY,VAL):COL)
#define REVERSE_FOREACH_PAIR( KEY, VAL, COL) BOOST_REVERSE_FOREACH (boost::tie(KEY,VAL),COL)
namespace gtsam {
@ -107,7 +107,7 @@ namespace gtsam {
// we loop from first-eliminated to last-eliminated
// i^th part of L*gy=gx is done block-column by block-column of L
BOOST_FOREACH(const sharedConditional& cg, *this)
for(const sharedConditional& cg: *this)
cg->solveTransposeInPlace(gy);
return gy;
@ -146,15 +146,15 @@ namespace gtsam {
KeySet keys = factorGraph.keys();
// add frontal keys in order
Ordering ordering;
BOOST_FOREACH (const sharedConditional& cg, *this)
for (const sharedConditional& cg: *this)
if (cg) {
BOOST_FOREACH (Key key, cg->frontals()) {
for (Key key: cg->frontals()) {
ordering.push_back(key);
keys.erase(key);
}
}
// add remaining keys in case Bayes net is incomplete
BOOST_FOREACH (Key key, keys)
for (Key key: keys)
ordering.push_back(key);
// return matrix and RHS
return factorGraph.jacobian(ordering);
@ -182,7 +182,7 @@ namespace gtsam {
double GaussianBayesNet::logDeterminant() const
{
double logDet = 0.0;
BOOST_FOREACH(const sharedConditional& cg, *this) {
for(const sharedConditional& cg: *this) {
if(cg->get_model()) {
Vector diag = cg->get_R().diagonal();
cg->get_model()->whitenInPlace(diag);

View File

@ -19,8 +19,6 @@
#pragma once
#include <boost/foreach.hpp>
#include <gtsam/linear/GaussianBayesTree.h> // Only to help Eclipse
#include <stdarg.h>
@ -35,7 +33,7 @@ void optimizeInPlace(const typename BAYESTREE::sharedClique& clique, VectorValue
clique->conditional()->solveInPlace(result);
// starting from the root, call optimize on each conditional
BOOST_FOREACH(const typename BAYESTREE::sharedClique& child, clique->children_)
for(const typename BAYESTREE::sharedClique& child: clique->children_)
optimizeInPlace<BAYESTREE>(child, result);
}
@ -48,7 +46,7 @@ double logDeterminant(const typename BAYESTREE::sharedClique& clique) {
result += clique->conditional()->get_R().diagonal().unaryExpr(std::ptr_fun<double,double>(log)).sum();
// sum of children
BOOST_FOREACH(const typename BAYESTREE::sharedClique& child, clique->children_)
for(const typename BAYESTREE::sharedClique& child: clique->children_)
result += logDeterminant<BAYESTREE>(child);
return result;

View File

@ -30,8 +30,6 @@
#include <gtsam/base/timing.h>
#include <gtsam/base/cholesky.h>
#include <boost/foreach.hpp>
using namespace std;
using namespace gtsam;
@ -50,7 +48,7 @@ namespace gtsam {
/* ************************************************************************* */
GaussianFactorGraph::Keys GaussianFactorGraph::keys() const {
KeySet keys;
BOOST_FOREACH(const sharedFactor& factor, *this)
for(const sharedFactor& factor: *this)
if (factor)
keys.insert(factor->begin(), factor->end());
return keys;
@ -59,7 +57,7 @@ namespace gtsam {
/* ************************************************************************* */
std::map<Key, size_t> GaussianFactorGraph::getKeyDimMap() const {
map<Key, size_t> spec;
BOOST_FOREACH ( const GaussianFactor::shared_ptr &gf, *this ) {
for ( const GaussianFactor::shared_ptr &gf: *this ) {
for ( GaussianFactor::const_iterator it = gf->begin() ; it != gf->end() ; it++ ) {
map<Key,size_t>::iterator it2 = spec.find(*it);
if ( it2 == spec.end() ) {
@ -80,7 +78,7 @@ namespace gtsam {
/* ************************************************************************* */
GaussianFactorGraph GaussianFactorGraph::clone() const {
GaussianFactorGraph result;
BOOST_FOREACH(const sharedFactor& f, *this) {
for(const sharedFactor& f: *this) {
if (f)
result.push_back(f->clone());
else
@ -92,7 +90,7 @@ namespace gtsam {
/* ************************************************************************* */
GaussianFactorGraph GaussianFactorGraph::negate() const {
GaussianFactorGraph result;
BOOST_FOREACH(const sharedFactor& f, *this) {
for(const sharedFactor& f: *this) {
if (f)
result.push_back(f->negate());
else
@ -106,7 +104,7 @@ namespace gtsam {
// First find dimensions of each variable
typedef std::map<Key, size_t> KeySizeMap;
KeySizeMap dims;
BOOST_FOREACH(const sharedFactor& factor, *this) {
for(const sharedFactor& factor: *this) {
if (!static_cast<bool>(factor)) continue;
for (GaussianFactor::const_iterator key = factor->begin();
@ -118,7 +116,7 @@ namespace gtsam {
// Compute first scalar column of each variable
size_t currentColIndex = 0;
KeySizeMap columnIndices = dims;
BOOST_FOREACH(const KeySizeMap::value_type& col, dims) {
for(const KeySizeMap::value_type& col: dims) {
columnIndices[col.first] = currentColIndex;
currentColIndex += dims[col.first];
}
@ -127,7 +125,7 @@ namespace gtsam {
typedef boost::tuple<size_t, size_t, double> triplet;
vector<triplet> entries;
size_t row = 0;
BOOST_FOREACH(const sharedFactor& factor, *this) {
for(const sharedFactor& factor: *this) {
if (!static_cast<bool>(factor)) continue;
// Convert to JacobianFactor if necessary
@ -229,7 +227,7 @@ namespace gtsam {
/* ************************************************************************* */
VectorValues GaussianFactorGraph::hessianDiagonal() const {
VectorValues d;
BOOST_FOREACH(const sharedFactor& factor, *this) {
for(const sharedFactor& factor: *this) {
if(factor){
VectorValues di = factor->hessianDiagonal();
d.addInPlace_(di);
@ -241,7 +239,7 @@ namespace gtsam {
/* ************************************************************************* */
map<Key,Matrix> GaussianFactorGraph::hessianBlockDiagonal() const {
map<Key,Matrix> blocks;
BOOST_FOREACH(const sharedFactor& factor, *this) {
for(const sharedFactor& factor: *this) {
if (!factor) continue;
map<Key,Matrix> BD = factor->hessianBlockDiagonal();
map<Key,Matrix>::const_iterator it = BD.begin();
@ -279,7 +277,7 @@ namespace gtsam {
VectorValues GaussianFactorGraph::gradient(const VectorValues& x0) const
{
VectorValues g = VectorValues::Zero(x0);
BOOST_FOREACH(const sharedFactor& Ai_G, *this) {
for(const sharedFactor& Ai_G: *this) {
JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G);
Vector e = Ai->error_vector(x0);
Ai->transposeMultiplyAdd(1.0, e, g);
@ -291,7 +289,7 @@ namespace gtsam {
VectorValues GaussianFactorGraph::gradientAtZero() const {
// Zero-out the gradient
VectorValues g;
BOOST_FOREACH(const sharedFactor& factor, *this) {
for(const sharedFactor& factor: *this) {
if (!factor) continue;
VectorValues gi = factor->gradientAtZero();
g.addInPlace_(gi);
@ -331,7 +329,7 @@ namespace gtsam {
/* ************************************************************************* */
Errors GaussianFactorGraph::operator*(const VectorValues& x) const {
Errors e;
BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, *this) {
for(const GaussianFactor::shared_ptr& Ai_G: *this) {
JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G);
e.push_back((*Ai) * x);
}
@ -341,7 +339,7 @@ namespace gtsam {
/* ************************************************************************* */
void GaussianFactorGraph::multiplyHessianAdd(double alpha,
const VectorValues& x, VectorValues& y) const {
BOOST_FOREACH(const GaussianFactor::shared_ptr& f, *this)
for(const GaussianFactor::shared_ptr& f: *this)
f->multiplyHessianAdd(alpha, x, y);
}
@ -353,7 +351,7 @@ namespace gtsam {
/* ************************************************************************* */
void GaussianFactorGraph::multiplyInPlace(const VectorValues& x, const Errors::iterator& e) const {
Errors::iterator ei = e;
BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, *this) {
for(const GaussianFactor::shared_ptr& Ai_G: *this) {
JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G);
*ei = (*Ai)*x;
ei++;
@ -363,7 +361,7 @@ namespace gtsam {
/* ************************************************************************* */
bool hasConstraints(const GaussianFactorGraph& factors) {
typedef JacobianFactor J;
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) {
for(const GaussianFactor::shared_ptr& factor: factors) {
J::shared_ptr jacobian(boost::dynamic_pointer_cast<J>(factor));
if (jacobian && jacobian->get_model() && jacobian->get_model()->isConstrained()) {
return true;
@ -378,7 +376,7 @@ namespace gtsam {
VectorValues& x) const {
// For each factor add the gradient contribution
Errors::const_iterator ei = e.begin();
BOOST_FOREACH(const sharedFactor& Ai_G, *this) {
for(const sharedFactor& Ai_G: *this) {
JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G);
Ai->transposeMultiplyAdd(alpha, *(ei++), x);
}
@ -387,7 +385,7 @@ namespace gtsam {
///* ************************************************************************* */
//void residual(const GaussianFactorGraph& fg, const VectorValues &x, VectorValues &r) {
// Key i = 0 ;
// BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, fg) {
// for(const GaussianFactor::shared_ptr& Ai_G: fg) {
// JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G);
// r[i] = Ai->getb();
// i++;
@ -401,7 +399,7 @@ namespace gtsam {
//void multiply(const GaussianFactorGraph& fg, const VectorValues &x, VectorValues &r) {
// r.setZero();
// Key i = 0;
// BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, fg) {
// for(const GaussianFactor::shared_ptr& Ai_G: fg) {
// JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G);
// Vector &y = r[i];
// for(JacobianFactor::const_iterator j = Ai->begin(); j != Ai->end(); ++j) {
@ -416,7 +414,7 @@ namespace gtsam {
{
VectorValues x;
Errors::const_iterator ei = e.begin();
BOOST_FOREACH(const sharedFactor& Ai_G, *this) {
for(const sharedFactor& Ai_G: *this) {
JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G);
for(JacobianFactor::const_iterator j = Ai->begin(); j != Ai->end(); ++j) {
// Create the value as a zero vector if it does not exist.
@ -434,7 +432,7 @@ namespace gtsam {
Errors GaussianFactorGraph::gaussianErrors(const VectorValues& x) const
{
Errors e;
BOOST_FOREACH(const sharedFactor& Ai_G, *this) {
for(const sharedFactor& Ai_G: *this) {
JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G);
e.push_back(Ai->error_vector(x));
}

View File

@ -144,7 +144,7 @@ namespace gtsam {
/** unnormalized error */
double error(const VectorValues& x) const {
double total_error = 0.;
BOOST_FOREACH(const sharedFactor& factor, *this){
for(const sharedFactor& factor: *this){
if(factor)
total_error += factor->error(x);
}

View File

@ -29,7 +29,6 @@
#include <gtsam/base/ThreadsafeException.h>
#include <gtsam/base/timing.h>
#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/make_shared.hpp>
#include <boost/tuple/tuple.hpp>
@ -241,7 +240,7 @@ HessianFactor::HessianFactor(const GaussianFactorGraph& factors,
keys_.resize(n);
FastVector<DenseIndex> dims(n + 1);
DenseIndex slot = 0;
BOOST_FOREACH(const SlotEntry& slotentry, *scatter) {
for(const SlotEntry& slotentry: *scatter) {
keys_[slot] = slotentry.key;
dims[slot] = slotentry.dimension;
++slot;
@ -253,7 +252,7 @@ HessianFactor::HessianFactor(const GaussianFactorGraph& factors,
// Form A' * A
gttic(update);
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors)
for(const GaussianFactor::shared_ptr& factor: factors)
if (factor)
factor->updateHessian(keys_, &info_);
gttoc(update);

View File

@ -20,7 +20,6 @@
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/VectorValues.h>
#include <boost/algorithm/string.hpp>
#include <boost/foreach.hpp>
#include <iostream>
using namespace std;
@ -127,7 +126,7 @@ void KeyInfo::initialize(const GaussianFactorGraph &fg) {
/****************************************************************************/
vector<size_t> KeyInfo::colSpec() const {
std::vector<size_t> result(size(), 0);
BOOST_FOREACH ( const KeyInfo::value_type &item, *this ) {
for ( const KeyInfo::value_type &item: *this ) {
result[item.second.index()] = item.second.dim();
}
return result;
@ -136,7 +135,7 @@ vector<size_t> KeyInfo::colSpec() const {
/****************************************************************************/
VectorValues KeyInfo::x0() const {
VectorValues result;
BOOST_FOREACH ( const KeyInfo::value_type &item, *this ) {
for ( const KeyInfo::value_type &item: *this ) {
result.insert(item.first, Vector::Zero(item.second.dim()));
}
return result;

View File

@ -32,7 +32,6 @@
#include <gtsam/base/cholesky.h>
#include <boost/assign/list_of.hpp>
#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/make_shared.hpp>
#include <boost/array.hpp>
@ -185,12 +184,12 @@ boost::tuple<FastVector<DenseIndex>, DenseIndex, DenseIndex> _countDims(
"Unable to determine dimensionality for all variables");
}
BOOST_FOREACH(const JacobianFactor::shared_ptr& factor, factors) {
for(const JacobianFactor::shared_ptr& factor: factors) {
m += factor->rows();
}
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
BOOST_FOREACH(DenseIndex d, varDims) {
for(DenseIndex d: varDims) {
assert(d != numeric_limits<DenseIndex>::max());
}
#endif
@ -204,7 +203,7 @@ FastVector<JacobianFactor::shared_ptr> _convertOrCastToJacobians(
gttic(Convert_to_Jacobians);
FastVector<JacobianFactor::shared_ptr> jacobians;
jacobians.reserve(factors.size());
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) {
for(const GaussianFactor::shared_ptr& factor: factors) {
if (factor) {
if (JacobianFactor::shared_ptr jf = boost::dynamic_pointer_cast<
JacobianFactor>(factor))
@ -262,7 +261,7 @@ JacobianFactor::JacobianFactor(const GaussianFactorGraph& graph,
"The ordering provided to the JacobianFactor combine constructor\n"
"contained extra variables that did not appear in the factors to combine.");
// Add the remaining slots
BOOST_FOREACH(VariableSlots::const_iterator item, unorderedSlots) {
for(VariableSlots::const_iterator item: unorderedSlots) {
orderedSlots.push_back(item);
}
} else {
@ -292,7 +291,7 @@ JacobianFactor::JacobianFactor(const GaussianFactorGraph& graph,
// Loop over slots in combined factor and copy blocks from source factors
gttic(copy_blocks);
size_t combinedSlot = 0;
BOOST_FOREACH(VariableSlots::const_iterator varslot, orderedSlots) {
for(VariableSlots::const_iterator varslot: orderedSlots) {
JacobianFactor::ABlock destSlot(this->getA(this->begin() + combinedSlot));
// Loop over source jacobians
DenseIndex nextRow = 0;

View File

@ -19,7 +19,6 @@
#include <gtsam/linear/NoiseModel.h>
#include <gtsam/base/timing.h>
#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <boost/make_shared.hpp>
#include <boost/random/linear_congruential.hpp>
@ -211,7 +210,7 @@ SharedDiagonal Gaussian::QR(Matrix& Ab) const {
}
void Gaussian::WhitenSystem(vector<Matrix>& A, Vector& b) const {
BOOST_FOREACH(Matrix& Aj, A) { WhitenInPlace(Aj); }
for(Matrix& Aj: A) { WhitenInPlace(Aj); }
whitenInPlace(b);
}
@ -542,7 +541,7 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const {
size_t i = 0; // start with first row
bool mixed = false;
Ab.setZero(); // make sure we don't look below
BOOST_FOREACH (const Triple& t, Rd) {
for (const Triple& t: Rd) {
const size_t& j = t.get<0>();
const Matrix& rd = t.get<1>();
precisions(i) = t.get<2>();
@ -647,14 +646,14 @@ void Base::reweight(Vector& error) const {
void Base::reweight(vector<Matrix> &A, Vector &error) const {
if ( reweight_ == Block ) {
const double w = sqrtWeight(error.norm());
BOOST_FOREACH(Matrix& Aj, A) {
for(Matrix& Aj: A) {
Aj *= w;
}
error *= w;
}
else {
const Vector W = sqrtWeight(error);
BOOST_FOREACH(Matrix& Aj, A) {
for(Matrix& Aj: A) {
vector_scale_inplace(W,Aj);
}
error = W.cwiseProduct(error);

View File

@ -22,7 +22,6 @@
#include <gtsam/linear/Preconditioner.h>
#include <gtsam/linear/VectorValues.h>
#include <boost/foreach.hpp>
#include <boost/algorithm/string.hpp>
#include <algorithm>
@ -150,7 +149,7 @@ VectorValues buildVectorValues(const Vector &v, const Ordering &ordering,
/**********************************************************************************/
VectorValues buildVectorValues(const Vector &v, const KeyInfo &keyInfo) {
VectorValues result;
BOOST_FOREACH ( const KeyInfo::value_type &item, keyInfo ) {
for ( const KeyInfo::value_type &item: keyInfo ) {
result.insert(item.first,
v.segment(item.second.colstart(), item.second.dim()));
}

View File

@ -145,7 +145,7 @@ void BlockJacobiPreconditioner::build(
/* getting the block diagonals over the factors */
std::map<Key, Matrix> hessianMap =gfg.hessianBlockDiagonal();
BOOST_FOREACH ( const Matrix hessian, hessianMap | boost::adaptors::map_values)
for ( const Matrix hessian: hessianMap | boost::adaptors::map_values)
blocks.push_back(hessian);
/* if necessary, allocating the memory for cacheing the factorization results */

View File

@ -20,7 +20,6 @@
#include <gtsam/linear/HessianFactor.h>
#include <gtsam/linear/RegularJacobianFactor.h>
#include <boost/foreach.hpp>
#include <vector>
namespace gtsam {
@ -114,7 +113,7 @@ public:
double* yvalues) const {
// Create a vector of temporary y_ values, corresponding to rows i
y_.resize(size());
BOOST_FOREACH(VectorD & yi, y_)
for(VectorD & yi: y_)
yi.setZero();
// Accessing the VectorValues one by one is expensive
@ -147,7 +146,7 @@ public:
// Create a vector of temporary y_ values, corresponding to rows i
y_.resize(size());
BOOST_FOREACH(VectorD & yi, y_)
for(VectorD & yi: y_)
yi.setZero();
// Accessing the VectorValues one by one is expensive

View File

@ -20,7 +20,6 @@
#include <gtsam/linear/Scatter.h>
#include <gtsam/inference/Ordering.h>
#include <boost/foreach.hpp>
#include <algorithm>
using namespace std;
@ -41,13 +40,13 @@ Scatter::Scatter(const GaussianFactorGraph& gfg,
// If we have an ordering, pre-fill the ordered variables first
if (ordering) {
BOOST_FOREACH (Key key, *ordering) {
for (Key key: *ordering) {
push_back(SlotEntry(key, 0));
}
}
// Now, find dimensions of variables and/or extend
BOOST_FOREACH (const GaussianFactor::shared_ptr& factor, gfg) {
for (const GaussianFactor::shared_ptr& factor: gfg) {
if (!factor) continue;
// TODO: Fix this hack to cope with zero-row Jacobians that come from BayesTreeOrphanWrappers

View File

@ -59,7 +59,7 @@ namespace gtsam {
/* ************************************************************************* */
static GaussianFactorGraph::shared_ptr convertToJacobianFactors(const GaussianFactorGraph &gfg) {
GaussianFactorGraph::shared_ptr result(new GaussianFactorGraph());
BOOST_FOREACH(const GaussianFactor::shared_ptr &gf, gfg) {
for(const GaussianFactor::shared_ptr &gf: gfg) {
JacobianFactor::shared_ptr jf = boost::dynamic_pointer_cast<JacobianFactor>(gf);
if( !jf ) {
jf = boost::make_shared<JacobianFactor>(*gf); // Convert any non-Jacobian factors to Jacobians (e.g. Hessian -> Jacobian with Cholesky)
@ -122,7 +122,7 @@ vector<size_t> uniqueSampler(const vector<double> &weight, const size_t n) {
/* sampling and cache results */
vector<size_t> samples = iidSampler(localWeights, n-count);
BOOST_FOREACH ( const size_t &id, samples ) {
for ( const size_t &id: samples ) {
if ( touched[id] == false ) {
touched[id] = true ;
result.push_back(id);
@ -136,7 +136,7 @@ vector<size_t> uniqueSampler(const vector<double> &weight, const size_t n) {
/****************************************************************************/
Subgraph::Subgraph(const std::vector<size_t> &indices) {
edges_.reserve(indices.size());
BOOST_FOREACH ( const size_t &idx, indices ) {
for ( const size_t &idx: indices ) {
edges_.push_back(SubgraphEdge(idx, 1.0));
}
}
@ -144,7 +144,7 @@ Subgraph::Subgraph(const std::vector<size_t> &indices) {
/****************************************************************************/
std::vector<size_t> Subgraph::edgeIndices() const {
std::vector<size_t> eid; eid.reserve(size());
BOOST_FOREACH ( const SubgraphEdge &edge, edges_ ) {
for ( const SubgraphEdge &edge: edges_ ) {
eid.push_back(edge.index_);
}
return eid;
@ -180,7 +180,7 @@ std::ostream &operator<<(std::ostream &os, const SubgraphEdge &edge) {
/****************************************************************************/
std::ostream &operator<<(std::ostream &os, const Subgraph &subgraph) {
os << "Subgraph" << endl;
BOOST_FOREACH ( const SubgraphEdge &e, subgraph.edges() ) {
for ( const SubgraphEdge &e: subgraph.edges() ) {
os << e << ", " ;
}
return os;
@ -286,7 +286,7 @@ std::vector<size_t> SubgraphBuilder::buildTree(const GaussianFactorGraph &gfg, c
std::vector<size_t> SubgraphBuilder::unary(const GaussianFactorGraph &gfg) const {
std::vector<size_t> result ;
size_t idx = 0;
BOOST_FOREACH ( const GaussianFactor::shared_ptr &gf, gfg ) {
for ( const GaussianFactor::shared_ptr &gf: gfg ) {
if ( gf->size() == 1 ) {
result.push_back(idx);
}
@ -299,7 +299,7 @@ std::vector<size_t> SubgraphBuilder::unary(const GaussianFactorGraph &gfg) const
std::vector<size_t> SubgraphBuilder::natural_chain(const GaussianFactorGraph &gfg) const {
std::vector<size_t> result ;
size_t idx = 0;
BOOST_FOREACH ( const GaussianFactor::shared_ptr &gf, gfg ) {
for ( const GaussianFactor::shared_ptr &gf: gfg ) {
if ( gf->size() == 2 ) {
const Key k0 = gf->keys()[0], k1 = gf->keys()[1];
if ( (k1-k0) == 1 || (k0-k1) == 1 )
@ -332,9 +332,9 @@ std::vector<size_t> SubgraphBuilder::bfs(const GaussianFactorGraph &gfg) const {
/* traversal */
while ( !q.empty() ) {
const size_t head = q.front(); q.pop();
BOOST_FOREACH ( const size_t id, variableIndex[head] ) {
for ( const size_t id: variableIndex[head] ) {
const GaussianFactor &gf = *gfg[id];
BOOST_FOREACH ( const size_t key, gf.keys() ) {
for ( const size_t key: gf.keys() ) {
if ( flags.count(key) == 0 ) {
q.push(key);
flags.insert(key);
@ -360,7 +360,7 @@ std::vector<size_t> SubgraphBuilder::kruskal(const GaussianFactorGraph &gfg, con
DSFVector D(n) ;
size_t count = 0 ; double sum = 0.0 ;
BOOST_FOREACH (const size_t id, idx) {
for (const size_t id: idx) {
const GaussianFactor &gf = *gfg[id];
if ( gf.keys().size() != 2 ) continue;
const size_t u = ordering.find(gf.keys()[0])->second,
@ -399,7 +399,7 @@ Subgraph::shared_ptr SubgraphBuilder::operator() (const GaussianFactorGraph &gfg
}
/* down weight the tree edges to zero */
BOOST_FOREACH ( const size_t id, tree ) {
for ( const size_t id: tree ) {
w[id] = 0.0;
}
@ -419,7 +419,7 @@ SubgraphBuilder::Weights SubgraphBuilder::weights(const GaussianFactorGraph &gfg
const size_t m = gfg.size() ;
Weights weight; weight.reserve(m);
BOOST_FOREACH(const GaussianFactor::shared_ptr &gf, gfg ) {
for(const GaussianFactor::shared_ptr &gf: gfg ) {
switch ( parameters_.skeletonWeight_ ) {
case SubgraphBuilderParameters::EQUAL:
weight.push_back(1.0);
@ -589,7 +589,7 @@ void SubgraphPreconditioner::transposeSolve(const Vector& y, Vector& x) const
std::copy(y.data(), y.data() + y.rows(), x.data());
/* in place back substitute */
BOOST_FOREACH(const boost::shared_ptr<GaussianConditional> & cg, *Rc1_) {
for(const boost::shared_ptr<GaussianConditional> & cg: *Rc1_) {
const Vector rhsFrontal = getSubvector(x, keyInfo_, FastVector<Key>(cg->beginFrontals(), cg->endFrontals()));
// const Vector solFrontal = cg->get_R().triangularView<Eigen::Upper>().transpose().solve(rhsFrontal);
const Vector solFrontal = cg->get_R().transpose().triangularView<Eigen::Lower>().solve(rhsFrontal);
@ -634,7 +634,7 @@ Vector getSubvector(const Vector &src, const KeyInfo &keyInfo, const FastVector<
/* figure out dimension by traversing the keys */
size_t d = 0;
BOOST_FOREACH ( const Key &key, keys ) {
for ( const Key &key: keys ) {
const KeyInfoEntry &entry = keyInfo.find(key)->second;
cache.push_back(make_pair(entry.colstart(), entry.dim()));
d += entry.dim();
@ -643,7 +643,7 @@ Vector getSubvector(const Vector &src, const KeyInfo &keyInfo, const FastVector<
/* use the cache to fill the result */
Vector result = Vector::Zero(d, 1);
size_t idx = 0;
BOOST_FOREACH ( const Cache::value_type &p, cache ) {
for ( const Cache::value_type &p: cache ) {
result.segment(idx, p.second) = src.segment(p.first, p.second) ;
idx += p.second;
}
@ -655,7 +655,7 @@ Vector getSubvector(const Vector &src, const KeyInfo &keyInfo, const FastVector<
void setSubvector(const Vector &src, const KeyInfo &keyInfo, const FastVector<Key> &keys, Vector &dst) {
/* use the cache */
size_t idx = 0;
BOOST_FOREACH ( const Key &key, keys ) {
for ( const Key &key: keys ) {
const KeyInfoEntry &entry = keyInfo.find(key)->second;
dst.segment(entry.colstart(), entry.dim()) = src.segment(idx, entry.dim()) ;
idx += entry.dim();
@ -668,7 +668,7 @@ buildFactorSubgraph(const GaussianFactorGraph &gfg, const Subgraph &subgraph, co
GaussianFactorGraph::shared_ptr result(new GaussianFactorGraph());
result->reserve(subgraph.size());
BOOST_FOREACH ( const SubgraphEdge &e, subgraph ) {
for ( const SubgraphEdge &e: subgraph ) {
const size_t idx = e.index();
if ( clone ) result->push_back(gfg[idx]->clone());
else result->push_back(gfg[idx]);

View File

@ -131,7 +131,7 @@ SubgraphSolver::splitGraph(const GaussianFactorGraph &jfg) {
GaussianFactorGraph::shared_ptr Ac(new GaussianFactorGraph());
size_t t = 0;
BOOST_FOREACH ( const GaussianFactor::shared_ptr &gf, jfg ) {
for ( const GaussianFactor::shared_ptr &gf: jfg ) {
if (gf->keys().size() > 2) {
throw runtime_error(

View File

@ -18,7 +18,6 @@
#include <gtsam/linear/VectorValues.h>
#include <boost/foreach.hpp>
#include <boost/bind.hpp>
#include <boost/range/combine.hpp>
#include <boost/range/numeric.hpp>
@ -48,7 +47,7 @@ namespace gtsam {
VectorValues::VectorValues(const Vector& x, const Dims& dims) {
typedef pair<Key, size_t> Pair;
size_t j = 0;
BOOST_FOREACH(const Pair& v, dims) {
for(const Pair& v: dims) {
Key key;
size_t n;
boost::tie(key, n) = v;
@ -61,7 +60,7 @@ namespace gtsam {
VectorValues VectorValues::Zero(const VectorValues& other)
{
VectorValues result;
BOOST_FOREACH(const KeyValuePair& v, other)
for(const KeyValuePair& v: other)
result.values_.insert(make_pair(v.first, Vector::Zero(v.second.size())));
return result;
}
@ -82,7 +81,7 @@ namespace gtsam {
void VectorValues::update(const VectorValues& values)
{
iterator hint = begin();
BOOST_FOREACH(const KeyValuePair& key_value, values)
for(const KeyValuePair& key_value: values)
{
// Use this trick to find the value using a hint, since we are inserting from another sorted map
size_t oldSize = values_.size();
@ -108,14 +107,14 @@ namespace gtsam {
/* ************************************************************************* */
void VectorValues::setZero()
{
BOOST_FOREACH(Vector& v, values_ | map_values)
for(Vector& v: values_ | map_values)
v.setZero();
}
/* ************************************************************************* */
void VectorValues::print(const string& str, const KeyFormatter& formatter) const {
cout << str << ": " << size() << " elements\n";
BOOST_FOREACH(const value_type& key_value, *this)
for(const value_type& key_value: *this)
cout << " " << formatter(key_value.first) << ": " << key_value.second.transpose() << "\n";
cout.flush();
}
@ -125,7 +124,7 @@ namespace gtsam {
if(this->size() != x.size())
return false;
typedef boost::tuple<value_type, value_type> ValuePair;
BOOST_FOREACH(const ValuePair& values, boost::combine(*this, x)) {
for(const ValuePair& values: boost::combine(*this, x)) {
if(values.get<0>().first != values.get<1>().first ||
!equal_with_abs_tol(values.get<0>().second, values.get<1>().second, tol))
return false;
@ -138,13 +137,13 @@ namespace gtsam {
{
// Count dimensions
DenseIndex totalDim = 0;
BOOST_FOREACH(const Vector& v, *this | map_values)
for(const Vector& v: *this | map_values)
totalDim += v.size();
// Copy vectors
Vector result(totalDim);
DenseIndex pos = 0;
BOOST_FOREACH(const Vector& v, *this | map_values) {
for(const Vector& v: *this | map_values) {
result.segment(pos, v.size()) = v;
pos += v.size();
}
@ -166,7 +165,7 @@ namespace gtsam {
// Copy vectors
Vector result(totalDim);
DenseIndex pos = 0;
BOOST_FOREACH(const Vector *v, items) {
for(const Vector *v: items) {
result.segment(pos, v->size()) = *v;
pos += v->size();
}
@ -179,11 +178,11 @@ namespace gtsam {
{
// Count dimensions
DenseIndex totalDim = 0;
BOOST_FOREACH(size_t dim, keys | map_values)
for(size_t dim: keys | map_values)
totalDim += dim;
Vector result(totalDim);
size_t j = 0;
BOOST_FOREACH(const Dims::value_type& it, keys) {
for(const Dims::value_type& it: keys) {
result.segment(j,it.second) = at(it.first);
j += it.second;
}
@ -221,7 +220,7 @@ namespace gtsam {
double result = 0.0;
typedef boost::tuple<value_type, value_type> ValuePair;
using boost::adaptors::map_values;
BOOST_FOREACH(const ValuePair& values, boost::combine(*this, v)) {
for(const ValuePair& values: boost::combine(*this, v)) {
assert_throw(values.get<0>().first == values.get<1>().first,
invalid_argument("VectorValues::dot called with a VectorValues of different structure"));
assert_throw(values.get<0>().second.size() == values.get<1>().second.size(),
@ -240,7 +239,7 @@ namespace gtsam {
double VectorValues::squaredNorm() const {
double sumSquares = 0.0;
using boost::adaptors::map_values;
BOOST_FOREACH(const Vector& v, *this | map_values)
for(const Vector& v: *this | map_values)
sumSquares += v.squaredNorm();
return sumSquares;
}
@ -329,7 +328,7 @@ namespace gtsam {
VectorValues operator*(const double a, const VectorValues &v)
{
VectorValues result;
BOOST_FOREACH(const VectorValues::KeyValuePair& key_v, v)
for(const VectorValues::KeyValuePair& key_v: v)
result.values_.insert(result.values_.end(), make_pair(key_v.first, a * key_v.second));
return result;
}
@ -343,7 +342,7 @@ namespace gtsam {
/* ************************************************************************* */
VectorValues& VectorValues::operator*=(double alpha)
{
BOOST_FOREACH(Vector& v, *this | map_values)
for(Vector& v: *this | map_values)
v *= alpha;
return *this;
}

View File

@ -55,7 +55,7 @@ namespace gtsam
OptimizeData myData;
myData.parentData = parentData;
// Take any ancestor results we'll need
BOOST_FOREACH(Key parent, clique->conditional_->parents())
for(Key parent: clique->conditional_->parents())
myData.cliqueResults.insert(std::make_pair(parent, myData.parentData->cliqueResults.at(parent)));
// Solve and store in our results
//collectedResult.insert(clique->conditional()->solve(collectedResult/*myData.ancestorResults*/));
@ -68,7 +68,7 @@ namespace gtsam
DenseIndex dim = 0;
FastVector<VectorValues::const_iterator> parentPointers;
parentPointers.reserve(clique->conditional()->nrParents());
BOOST_FOREACH(Key parent, clique->conditional()->parents()) {
for(Key parent: clique->conditional()->parents()) {
parentPointers.push_back(myData.cliqueResults.at(parent));
dim += parentPointers.back()->second.size();
}
@ -76,7 +76,7 @@ namespace gtsam
// Fill parent vector
xS.resize(dim);
DenseIndex vectorPos = 0;
BOOST_FOREACH(const VectorValues::const_iterator& parentPointer, parentPointers) {
for(const VectorValues::const_iterator& parentPointer: parentPointers) {
const Vector& parentVector = parentPointer->second;
xS.block(vectorPos,0,parentVector.size(),1) = parentVector.block(0,0,parentVector.size(),1);
vectorPos += parentVector.size();
@ -108,7 +108,7 @@ namespace gtsam
// OptimizeData myData;
// myData.parentData = parentData;
// // Take any ancestor results we'll need
// BOOST_FOREACH(Key parent, clique->conditional_->parents())
// for(Key parent: clique->conditional_->parents())
// myData.ancestorResults.insert(parent, myData.parentData->ancestorResults[parent]);
// // Solve and store in our results
// myData.results.insert(clique->conditional()->solve(myData.ancestorResults));

View File

@ -22,7 +22,6 @@
#include <CppUnitLite/TestHarness.h>
#include <boost/foreach.hpp>
#include <boost/assign/std/vector.hpp>
#include <iostream>
@ -58,32 +57,32 @@ TEST(NoiseModel, constructors)
m.push_back(Isotropic::Precision(3, prc,false));
// test kSigmas
BOOST_FOREACH(Gaussian::shared_ptr mi, m)
for(Gaussian::shared_ptr mi: m)
EXPECT(assert_equal(kSigmas,mi->sigmas()));
// test whiten
BOOST_FOREACH(Gaussian::shared_ptr mi, m)
for(Gaussian::shared_ptr mi: m)
EXPECT(assert_equal(whitened,mi->whiten(unwhitened)));
// test unwhiten
BOOST_FOREACH(Gaussian::shared_ptr mi, m)
for(Gaussian::shared_ptr mi: m)
EXPECT(assert_equal(unwhitened,mi->unwhiten(whitened)));
// test Mahalanobis distance
double distance = 5*5+10*10+15*15;
BOOST_FOREACH(Gaussian::shared_ptr mi, m)
for(Gaussian::shared_ptr mi: m)
DOUBLES_EQUAL(distance,mi->Mahalanobis(unwhitened),1e-9);
// test R matrix
BOOST_FOREACH(Gaussian::shared_ptr mi, m)
for(Gaussian::shared_ptr mi: m)
EXPECT(assert_equal(R,mi->R()));
// test covariance
BOOST_FOREACH(Gaussian::shared_ptr mi, m)
for(Gaussian::shared_ptr mi: m)
EXPECT(assert_equal(kCovariance,mi->covariance()));
// test covariance
BOOST_FOREACH(Gaussian::shared_ptr mi, m)
for(Gaussian::shared_ptr mi: m)
EXPECT(assert_equal(kCovariance.inverse(),mi->information()));
// test Whiten operator
@ -92,7 +91,7 @@ TEST(NoiseModel, constructors)
0.0, 1.0, 0.0, 1.0,
1.0, 0.0, 0.0, 1.0).finished());
Matrix expected = kInverseSigma * H;
BOOST_FOREACH(Gaussian::shared_ptr mi, m)
for(Gaussian::shared_ptr mi: m)
EXPECT(assert_equal(expected,mi->Whiten(H)));
// can only test inplace version once :-)

View File

@ -93,7 +93,7 @@ void ISAM2::Impl::RemoveVariables(const KeySet& unusedKeys, const FastVector<ISA
KeySet& fixedVariables)
{
variableIndex.removeUnusedVariables(unusedKeys.begin(), unusedKeys.end());
BOOST_FOREACH(Key key, unusedKeys) {
for(Key key: unusedKeys) {
delta.erase(key);
deltaNewton.erase(key);
RgProd.erase(key);
@ -112,7 +112,7 @@ KeySet ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta,
if(const double* threshold = boost::get<double>(&relinearizeThreshold))
{
BOOST_FOREACH(const VectorValues::KeyValuePair& key_delta, delta) {
for(const VectorValues::KeyValuePair& key_delta: delta) {
double maxDelta = key_delta.second.lpNorm<Eigen::Infinity>();
if(maxDelta >= *threshold)
relinKeys.insert(key_delta.first);
@ -120,7 +120,7 @@ KeySet ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta,
}
else if(const FastMap<char,Vector>* thresholds = boost::get<FastMap<char,Vector> >(&relinearizeThreshold))
{
BOOST_FOREACH(const VectorValues::KeyValuePair& key_delta, delta) {
for(const VectorValues::KeyValuePair& key_delta: delta) {
const Vector& threshold = thresholds->find(Symbol(key_delta.first).chr())->second;
if(threshold.rows() != key_delta.second.rows())
throw std::invalid_argument("Relinearization threshold vector dimensionality for '" + std::string(1, Symbol(key_delta.first).chr()) + "' passed into iSAM2 parameters does not match actual variable dimensionality.");
@ -138,7 +138,7 @@ void CheckRelinearizationRecursiveDouble(KeySet& relinKeys, double threshold,
{
// Check the current clique for relinearization
bool relinearize = false;
BOOST_FOREACH(Key var, *clique->conditional()) {
for(Key var: *clique->conditional()) {
double maxDelta = delta[var].lpNorm<Eigen::Infinity>();
if(maxDelta >= threshold) {
relinKeys.insert(var);
@ -148,7 +148,7 @@ void CheckRelinearizationRecursiveDouble(KeySet& relinKeys, double threshold,
// If this node was relinearized, also check its children
if(relinearize) {
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
for(const ISAM2Clique::shared_ptr& child: clique->children) {
CheckRelinearizationRecursiveDouble(relinKeys, threshold, delta, child);
}
}
@ -161,7 +161,7 @@ void CheckRelinearizationRecursiveMap(KeySet& relinKeys, const FastMap<char,Vect
{
// Check the current clique for relinearization
bool relinearize = false;
BOOST_FOREACH(Key var, *clique->conditional()) {
for(Key var: *clique->conditional()) {
// Find the threshold for this variable type
const Vector& threshold = thresholds.find(Symbol(var).chr())->second;
@ -180,7 +180,7 @@ void CheckRelinearizationRecursiveMap(KeySet& relinKeys, const FastMap<char,Vect
// If this node was relinearized, also check its children
if(relinearize) {
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
for(const ISAM2Clique::shared_ptr& child: clique->children) {
CheckRelinearizationRecursiveMap(relinKeys, thresholds, delta, child);
}
}
@ -192,7 +192,7 @@ KeySet ISAM2::Impl::CheckRelinearizationPartial(const FastVector<ISAM2::sharedCl
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold)
{
KeySet relinKeys;
BOOST_FOREACH(const ISAM2::sharedClique& root, roots) {
for(const ISAM2::sharedClique& root: roots) {
if(relinearizeThreshold.type() == typeid(double))
CheckRelinearizationRecursiveDouble(relinKeys, boost::get<double>(relinearizeThreshold), delta, root);
else if(relinearizeThreshold.type() == typeid(FastMap<char,Vector>))
@ -207,7 +207,7 @@ void ISAM2::Impl::FindAll(ISAM2Clique::shared_ptr clique, KeySet& keys, const Ke
static const bool debug = false;
// does the separator contain any of the variables?
bool found = false;
BOOST_FOREACH(Key key, clique->conditional()->parents()) {
for(Key key: clique->conditional()->parents()) {
if (markedMask.exists(key)) {
found = true;
break;
@ -219,7 +219,7 @@ void ISAM2::Impl::FindAll(ISAM2Clique::shared_ptr clique, KeySet& keys, const Ke
if(debug) clique->print("Key(s) marked in clique ");
if(debug) cout << "so marking key " << clique->conditional()->front() << endl;
}
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
for(const ISAM2Clique::shared_ptr& child: clique->children) {
FindAll(child, keys, markedMask);
}
}
@ -267,7 +267,7 @@ inline static void optimizeInPlace(const boost::shared_ptr<ISAM2Clique>& clique,
result.update(clique->conditional()->solve(result));
// starting from the root, call optimize on each conditional
BOOST_FOREACH(const boost::shared_ptr<ISAM2Clique>& child, clique->children)
for(const boost::shared_ptr<ISAM2Clique>& child: clique->children)
optimizeInPlace(child, result);
}
}
@ -280,14 +280,14 @@ size_t ISAM2::Impl::UpdateGaussNewtonDelta(const FastVector<ISAM2::sharedClique>
if (wildfireThreshold <= 0.0) {
// Threshold is zero or less, so do a full recalculation
BOOST_FOREACH(const ISAM2::sharedClique& root, roots)
for(const ISAM2::sharedClique& root: roots)
internal::optimizeInPlace(root, delta);
lastBacksubVariableCount = delta.size();
} else {
// Optimize with wildfire
lastBacksubVariableCount = 0;
BOOST_FOREACH(const ISAM2::sharedClique& root, roots)
for(const ISAM2::sharedClique& root: roots)
lastBacksubVariableCount += optimizeWildfireNonRecursive(
root, wildfireThreshold, replacedKeys, delta); // modifies delta
@ -309,7 +309,7 @@ void updateRgProd(const boost::shared_ptr<ISAM2Clique>& clique, const KeySet& re
// update deltas and recurse to children, but if not, we do not need to
// recurse further because of the running separator property.
bool anyReplaced = false;
BOOST_FOREACH(Key j, *clique->conditional()) {
for(Key j: *clique->conditional()) {
if(replacedKeys.exists(j)) {
anyReplaced = true;
break;
@ -327,7 +327,7 @@ void updateRgProd(const boost::shared_ptr<ISAM2Clique>& clique, const KeySet& re
// Write into RgProd vector
DenseIndex vectorPosition = 0;
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
for(Key frontal: clique->conditional()->frontals()) {
Vector& RgProdValue = RgProd[frontal];
RgProdValue = RSgProd.segment(vectorPosition, RgProdValue.size());
vectorPosition += RgProdValue.size();
@ -339,7 +339,7 @@ void updateRgProd(const boost::shared_ptr<ISAM2Clique>& clique, const KeySet& re
varsUpdated += clique->conditional()->nrFrontals();
// Recurse to children
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
for(const ISAM2Clique::shared_ptr& child: clique->children) {
updateRgProd(child, replacedKeys, grad, RgProd, varsUpdated); }
}
}
@ -351,7 +351,7 @@ size_t ISAM2::Impl::UpdateRgProd(const ISAM2::Roots& roots, const KeySet& replac
// Update variables
size_t varsUpdated = 0;
BOOST_FOREACH(const ISAM2::sharedClique& root, roots) {
for(const ISAM2::sharedClique& root: roots) {
internal::updateRgProd(root, replacedKeys, gradAtZero, RgProd, varsUpdated);
}

View File

@ -45,7 +45,7 @@ void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
// Are any clique variables part of the tree that has been redone?
bool cliqueReplaced = replaced.exists((*clique)->frontals().front());
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
for(Key frontal: clique->conditional()->frontals()) {
assert(cliqueReplaced == replaced.exists(frontal));
}
#endif
@ -53,7 +53,7 @@ void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
// If not redone, then has one of the separator variables changed significantly?
bool recalculate = cliqueReplaced;
if(!recalculate) {
BOOST_FOREACH(Key parent, clique->conditional()->parents()) {
for(Key parent: clique->conditional()->parents()) {
if(changed.exists(parent)) {
recalculate = true;
break;
@ -94,7 +94,7 @@ void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
// If the values were above the threshold or this clique was replaced
if(valuesChanged) {
// Set changed flag for each frontal variable and leave the new values
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
for(Key frontal: clique->conditional()->frontals()) {
changed.insert(frontal);
}
} else {
@ -105,7 +105,7 @@ void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
}
// Recurse to children
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children) {
for(const typename CLIQUE::shared_ptr& child: clique->children) {
optimizeWildfire(child, threshold, changed, replaced, delta, count);
}
}
@ -122,7 +122,7 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
// Are any clique variables part of the tree that has been redone?
bool cliqueReplaced = replaced.exists(clique->conditional()->frontals().front());
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
for(Key frontal: clique->conditional()->frontals()) {
assert(cliqueReplaced == replaced.exists(frontal));
}
#endif
@ -130,7 +130,7 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
// If not redone, then has one of the separator variables changed significantly?
bool recalculate = cliqueReplaced;
if(!recalculate) {
BOOST_FOREACH(Key parent, clique->conditional()->parents()) {
for(Key parent: clique->conditional()->parents()) {
if(changed.exists(parent)) {
recalculate = true;
break;
@ -156,9 +156,9 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
boost::shared_ptr<CLIQUE> parent = clique->parent_.lock();
if(clique->solnPointers_.empty() && (clique->isRoot() || !parent->solnPointers_.empty()))
{
BOOST_FOREACH(Key key, clique->conditional()->frontals())
for(Key key: clique->conditional()->frontals())
clique->solnPointers_.insert(std::make_pair(key, delta.find(key)));
BOOST_FOREACH(Key key, clique->conditional()->parents())
for(Key key: clique->conditional()->parents())
clique->solnPointers_.insert(std::make_pair(key, parent->solnPointers_.at(key)));
}
@ -174,7 +174,7 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
DenseIndex dim = 0;
FastVector<VectorValues::const_iterator> parentPointers;
parentPointers.reserve(clique->conditional()->nrParents());
BOOST_FOREACH(Key parent, clique->conditional()->parents()) {
for(Key parent: clique->conditional()->parents()) {
parentPointers.push_back(clique->solnPointers_.at(parent));
dim += parentPointers.back()->second.size();
}
@ -182,7 +182,7 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
// Fill parent vector
xS.resize(dim);
DenseIndex vectorPos = 0;
BOOST_FOREACH(const VectorValues::const_iterator& parentPointer, parentPointers) {
for(const VectorValues::const_iterator& parentPointer: parentPointers) {
const Vector& parentVector = parentPointer->second;
xS.block(vectorPos,0,parentVector.size(),1) = parentVector.block(0,0,parentVector.size(),1);
vectorPos += parentVector.size();
@ -227,7 +227,7 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
// If the values were above the threshold or this clique was replaced
if(valuesChanged) {
// Set changed flag for each frontal variable and leave the new values
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
for(Key frontal: clique->conditional()->frontals()) {
changed.insert(frontal);
}
} else {
@ -270,7 +270,7 @@ size_t optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root, doubl
travStack.pop();
bool recalculate = internal::optimizeWildfireNode(currentNode, threshold, changed, keys, delta, count);
if (recalculate) {
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, currentNode->children) {
for(const typename CLIQUE::shared_ptr& child: currentNode->children) {
travStack.push(child);
}
}
@ -287,7 +287,7 @@ void nnz_internal(const boost::shared_ptr<CLIQUE>& clique, int& result) {
int dimSep = (int)clique->conditional()->get_S().cols();
result += ((dimR+1)*dimR)/2 + dimSep*dimR;
// traverse the children
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children) {
for(const typename CLIQUE::shared_ptr& child: clique->children) {
nnz_internal(child, result);
}
}

View File

@ -174,17 +174,17 @@ bool ISAM2::equals(const ISAM2& other, double tol) const {
KeySet ISAM2::getAffectedFactors(const KeyList& keys) const {
static const bool debug = false;
if(debug) cout << "Getting affected factors for ";
if(debug) { BOOST_FOREACH(const Key key, keys) { cout << key << " "; } }
if(debug) { for(const Key key: keys) { cout << key << " "; } }
if(debug) cout << endl;
NonlinearFactorGraph allAffected;
KeySet indices;
BOOST_FOREACH(const Key key, keys) {
for(const Key key: keys) {
const VariableIndex::Factors& factors(variableIndex_[key]);
indices.insert(factors.begin(), factors.end());
}
if(debug) cout << "Affected factors are: ";
if(debug) { BOOST_FOREACH(const size_t index, indices) { cout << index << " "; } }
if(debug) { for(const size_t index: indices) { cout << index << " "; } }
if(debug) cout << endl;
return indices;
}
@ -210,10 +210,10 @@ ISAM2::relinearizeAffectedFactors(const FastList<Key>& affectedKeys, const KeySe
gttic(check_candidates_and_linearize);
GaussianFactorGraph::shared_ptr linearized = boost::make_shared<GaussianFactorGraph>();
BOOST_FOREACH(Key idx, candidates) {
for(Key idx: candidates) {
bool inside = true;
bool useCachedLinear = params_.cacheLinearizedFactors;
BOOST_FOREACH(Key key, nonlinearFactors_[idx]->keys()) {
for(Key key: nonlinearFactors_[idx]->keys()) {
if(affectedKeysSet.find(key) == affectedKeysSet.end()) {
inside = false;
break;
@ -251,7 +251,7 @@ ISAM2::relinearizeAffectedFactors(const FastList<Key>& affectedKeys, const KeySe
GaussianFactorGraph ISAM2::getCachedBoundaryFactors(Cliques& orphans) {
GaussianFactorGraph cachedBoundary;
BOOST_FOREACH(sharedClique orphan, orphans) {
for(sharedClique orphan: orphans) {
// retrieve the cached factor and add to boundary
cachedBoundary.push_back(orphan->cachedFactor());
}
@ -292,10 +292,10 @@ boost::shared_ptr<KeySet > ISAM2::recalculate(const KeySet& markedKeys, const Ke
if(debug) {
cout << "markedKeys: ";
BOOST_FOREACH(const Key key, markedKeys) { cout << key << " "; }
for(const Key key: markedKeys) { cout << key << " "; }
cout << endl;
cout << "observedKeys: ";
BOOST_FOREACH(const Key key, observedKeys) { cout << key << " "; }
for(const Key key: observedKeys) { cout << key << " "; }
cout << endl;
}
@ -322,7 +322,7 @@ boost::shared_ptr<KeySet > ISAM2::recalculate(const KeySet& markedKeys, const Ke
// ordering provides all keys in conditionals, there cannot be others because path to root included
gttic(affectedKeys);
FastList<Key> affectedKeys;
BOOST_FOREACH(const ConditionalType::shared_ptr& conditional, affectedBayesNet)
for(const ConditionalType::shared_ptr& conditional: affectedBayesNet)
affectedKeys.insert(affectedKeys.end(), conditional->beginFrontals(), conditional->endFrontals());
gttoc(affectedKeys);
@ -349,7 +349,7 @@ boost::shared_ptr<KeySet > ISAM2::recalculate(const KeySet& markedKeys, const Ke
{
// Only if some variables are unconstrained
FastMap<Key, int> constraintGroups;
BOOST_FOREACH(Key var, observedKeys)
for(Key var: observedKeys)
constraintGroups[var] = 1;
order = Ordering::ColamdConstrained(variableIndex_, constraintGroups);
}
@ -386,7 +386,7 @@ boost::shared_ptr<KeySet > ISAM2::recalculate(const KeySet& markedKeys, const Ke
// Reeliminated keys for detailed results
if(params_.enableDetailedResults) {
BOOST_FOREACH(Key key, theta_.keys()) {
for(Key key: theta_.keys()) {
result.detail->variableStatus[key].isReeliminated = true;
}
}
@ -406,11 +406,11 @@ boost::shared_ptr<KeySet > ISAM2::recalculate(const KeySet& markedKeys, const Ke
if(debug) factors.print("Relinearized factors: ");
gttoc(relinearizeAffected);
if(debug) { cout << "Affected keys: "; BOOST_FOREACH(const Key key, affectedKeys) { cout << key << " "; } cout << endl; }
if(debug) { cout << "Affected keys: "; for(const Key key: affectedKeys) { cout << key << " "; } cout << endl; }
// Reeliminated keys for detailed results
if(params_.enableDetailedResults) {
BOOST_FOREACH(Key key, affectedAndNewKeys) {
for(Key key: affectedAndNewKeys) {
result.detail->variableStatus[key].isReeliminated = true;
}
}
@ -437,7 +437,7 @@ boost::shared_ptr<KeySet > ISAM2::recalculate(const KeySet& markedKeys, const Ke
gttic(orphans);
// Add the orphaned subtrees
BOOST_FOREACH(const sharedClique& orphan, orphans)
for(const sharedClique& orphan: orphans)
factors += boost::make_shared<BayesTreeOrphanWrapper<Clique> >(orphan);
gttoc(orphans);
@ -465,7 +465,7 @@ boost::shared_ptr<KeySet > ISAM2::recalculate(const KeySet& markedKeys, const Ke
} else {
constraintGroups = FastMap<Key,int>();
const int group = observedKeys.size() < affectedFactorsVarIndex.size() ? 1 : 0;
BOOST_FOREACH (Key var, observedKeys)
for (Key var: observedKeys)
constraintGroups.insert(make_pair(var, group));
}
@ -500,8 +500,8 @@ boost::shared_ptr<KeySet > ISAM2::recalculate(const KeySet& markedKeys, const Ke
// Root clique variables for detailed results
if(params_.enableDetailedResults) {
BOOST_FOREACH(const sharedNode& root, this->roots())
BOOST_FOREACH(Key var, *root->conditional())
for(const sharedNode& root: this->roots())
for(Key var: *root->conditional())
result.detail->variableStatus[var].inRootClique = true;
}
@ -554,7 +554,7 @@ ISAM2Result ISAM2::update(
// Remove the removed factors
NonlinearFactorGraph removeFactors; removeFactors.reserve(removeFactorIndices.size());
BOOST_FOREACH(size_t index, removeFactorIndices) {
for(size_t index: removeFactorIndices) {
removeFactors.push_back(nonlinearFactors_[index]);
nonlinearFactors_.remove(index);
if(params_.cacheLinearizedFactors)
@ -571,7 +571,7 @@ ISAM2Result ISAM2::update(
// Get keys from removed factors and new factors, and compute unused keys,
// i.e., keys that are empty now and do not appear in the new factors.
KeySet removedAndEmpty;
BOOST_FOREACH(Key key, removeFactors.keys()) {
for(Key key: removeFactors.keys()) {
if(variableIndex_[key].empty())
removedAndEmpty.insert(removedAndEmpty.end(), key);
}
@ -580,7 +580,7 @@ ISAM2Result ISAM2::update(
newFactorSymbKeys.begin(), newFactorSymbKeys.end(), std::inserter(unusedKeys, unusedKeys.end()));
// Get indices for unused keys
BOOST_FOREACH(Key key, unusedKeys) {
for(Key key: unusedKeys) {
unusedIndices.insert(unusedIndices.end(), key);
}
}
@ -591,7 +591,7 @@ ISAM2Result ISAM2::update(
Impl::AddVariables(newTheta, theta_, delta_, deltaNewton_, RgProd_);
// New keys for detailed results
if(params_.enableDetailedResults) {
BOOST_FOREACH(Key key, newTheta.keys()) { result.detail->variableStatus[key].isNew = true; } }
for(Key key: newTheta.keys()) { result.detail->variableStatus[key].isNew = true; } }
gttoc(add_new_variables);
gttic(evaluate_error_before);
@ -609,14 +609,14 @@ ISAM2Result ISAM2::update(
}
// Also mark any provided extra re-eliminate keys
if(extraReelimKeys) {
BOOST_FOREACH(Key key, *extraReelimKeys) {
for(Key key: *extraReelimKeys) {
markedKeys.insert(key);
}
}
// Observed keys for detailed results
if(params_.enableDetailedResults) {
BOOST_FOREACH(Key key, markedKeys) {
for(Key key: markedKeys) {
result.detail->variableStatus[key].isObserved = true;
}
}
@ -624,7 +624,7 @@ ISAM2Result ISAM2::update(
// is a vector of size_t, so the constructor unintentionally resolves to
// vector(size_t count, Key value) instead of the iterator constructor.
FastVector<Key> observedKeys; observedKeys.reserve(markedKeys.size());
BOOST_FOREACH(Key index, markedKeys) {
for(Key index: markedKeys) {
if(unusedIndices.find(index) == unusedIndices.end()) // Only add if not unused
observedKeys.push_back(index); // Make a copy of these, as we'll soon add to them
}
@ -642,24 +642,24 @@ ISAM2Result ISAM2::update(
if(disableReordering) relinKeys = Impl::CheckRelinearizationFull(delta_, 0.0); // This is used for debugging
// Remove from relinKeys any keys whose linearization points are fixed
BOOST_FOREACH(Key key, fixedVariables_) {
for(Key key: fixedVariables_) {
relinKeys.erase(key);
}
if(noRelinKeys) {
BOOST_FOREACH(Key key, *noRelinKeys) {
for(Key key: *noRelinKeys) {
relinKeys.erase(key);
}
}
// Above relin threshold keys for detailed results
if(params_.enableDetailedResults) {
BOOST_FOREACH(Key key, relinKeys) {
for(Key key: relinKeys) {
result.detail->variableStatus[key].isAboveRelinThreshold = true;
result.detail->variableStatus[key].isRelinearized = true; } }
// Add the variables being relinearized to the marked keys
KeySet markedRelinMask;
BOOST_FOREACH(const Key key, relinKeys)
for(const Key key: relinKeys)
markedRelinMask.insert(key);
markedKeys.insert(relinKeys.begin(), relinKeys.end());
gttoc(gather_relinearize_keys);
@ -667,16 +667,16 @@ ISAM2Result ISAM2::update(
gttic(fluid_find_all);
// 5. Mark all cliques that involve marked variables \Theta_{J} and all their ancestors.
if (!relinKeys.empty()) {
BOOST_FOREACH(const sharedClique& root, roots_)
for(const sharedClique& root: roots_)
// add other cliques that have the marked ones in the separator
Impl::FindAll(root, markedKeys, markedRelinMask);
// Relin involved keys for detailed results
if(params_.enableDetailedResults) {
KeySet involvedRelinKeys;
BOOST_FOREACH(const sharedClique& root, roots_)
for(const sharedClique& root: roots_)
Impl::FindAll(root, involvedRelinKeys, markedRelinMask);
BOOST_FOREACH(Key key, involvedRelinKeys) {
for(Key key: involvedRelinKeys) {
if(!result.detail->variableStatus[key].isAboveRelinThreshold) {
result.detail->variableStatus[key].isRelinearizeInvolved = true;
result.detail->variableStatus[key].isRelinearized = true; } }
@ -771,7 +771,7 @@ void ISAM2::marginalizeLeaves(const FastList<Key>& leafKeysList,
KeySet leafKeysRemoved;
// Remove each variable and its subtrees
BOOST_FOREACH(Key j, leafKeys) {
for(Key j: leafKeys) {
if(!leafKeysRemoved.exists(j)) { // If the index was not already removed by removing another subtree
// Traverse up the tree to find the root of the marginalized subtree
@ -789,7 +789,7 @@ void ISAM2::marginalizeLeaves(const FastList<Key>& leafKeysList,
// See if we should remove the whole clique
bool marginalizeEntireClique = true;
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
for(Key frontal: clique->conditional()->frontals()) {
if(!leafKeys.exists(frontal)) {
marginalizeEntireClique = false;
break; } }
@ -806,10 +806,10 @@ void ISAM2::marginalizeLeaves(const FastList<Key>& leafKeysList,
// Now remove this clique and its subtree - all of its marginal
// information has been stored in marginalFactors.
const Cliques removedCliques = this->removeSubtree(clique); // Remove the subtree and throw away the cliques
BOOST_FOREACH(const sharedClique& removedClique, removedCliques) {
for(const sharedClique& removedClique: removedCliques) {
marginalFactors.erase(removedClique->conditional()->front());
leafKeysRemoved.insert(removedClique->conditional()->beginFrontals(), removedClique->conditional()->endFrontals());
BOOST_FOREACH(Key frontal, removedClique->conditional()->frontals())
for(Key frontal: removedClique->conditional()->frontals())
{
// Add to factors to remove
const VariableIndex::Factors& involvedFactors = variableIndex_[frontal];
@ -832,9 +832,9 @@ void ISAM2::marginalizeLeaves(const FastList<Key>& leafKeysList,
GaussianFactorGraph graph;
KeySet factorsInSubtreeRoot;
Cliques subtreesToRemove;
BOOST_FOREACH(const sharedClique& child, clique->children) {
for(const sharedClique& child: clique->children) {
// Remove subtree if child depends on any marginalized keys
BOOST_FOREACH(Key parent, child->conditional()->parents()) {
for(Key parent: child->conditional()->parents()) {
if(leafKeys.exists(parent)) {
subtreesToRemove.push_back(child);
graph.push_back(child->cachedFactor()); // Add child marginal
@ -843,13 +843,13 @@ void ISAM2::marginalizeLeaves(const FastList<Key>& leafKeysList,
}
}
Cliques childrenRemoved;
BOOST_FOREACH(const sharedClique& childToRemove, subtreesToRemove) {
for(const sharedClique& childToRemove: subtreesToRemove) {
const Cliques removedCliques = this->removeSubtree(childToRemove); // Remove the subtree and throw away the cliques
childrenRemoved.insert(childrenRemoved.end(), removedCliques.begin(), removedCliques.end());
BOOST_FOREACH(const sharedClique& removedClique, removedCliques) {
for(const sharedClique& removedClique: removedCliques) {
marginalFactors.erase(removedClique->conditional()->front());
leafKeysRemoved.insert(removedClique->conditional()->beginFrontals(), removedClique->conditional()->endFrontals());
BOOST_FOREACH(Key frontal, removedClique->conditional()->frontals())
for(Key frontal: removedClique->conditional()->frontals())
{
// Add to factors to remove
const VariableIndex::Factors& involvedFactors = variableIndex_[frontal];
@ -867,16 +867,16 @@ void ISAM2::marginalizeLeaves(const FastList<Key>& leafKeysList,
// but do not involve frontal variables of any of its children.
// TODO: reuse cached linear factors
KeySet factorsFromMarginalizedInClique_step1;
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
for(Key frontal: clique->conditional()->frontals()) {
if(leafKeys.exists(frontal))
factorsFromMarginalizedInClique_step1.insert(variableIndex_[frontal].begin(), variableIndex_[frontal].end()); }
// Remove any factors in subtrees that we're removing at this step
BOOST_FOREACH(const sharedClique& removedChild, childrenRemoved) {
BOOST_FOREACH(Key indexInClique, removedChild->conditional()->frontals()) {
BOOST_FOREACH(Key factorInvolving, variableIndex_[indexInClique]) {
for(const sharedClique& removedChild: childrenRemoved) {
for(Key indexInClique: removedChild->conditional()->frontals()) {
for(Key factorInvolving: variableIndex_[indexInClique]) {
factorsFromMarginalizedInClique_step1.erase(factorInvolving); } } }
// Create factor graph from factor indices
BOOST_FOREACH(size_t i, factorsFromMarginalizedInClique_step1) {
for(size_t i: factorsFromMarginalizedInClique_step1) {
graph.push_back(nonlinearFactors_[i]->linearize(theta_)); }
// Reeliminate the linear graph to get the marginal and discard the conditional
@ -908,7 +908,7 @@ void ISAM2::marginalizeLeaves(const FastList<Key>& leafKeysList,
clique->conditional()->nrFrontals() -= nToRemove;
// Add to factors to remove factors involved in frontals of current clique
BOOST_FOREACH(Key frontal, cliqueFrontalsToEliminate)
for(Key frontal: cliqueFrontalsToEliminate)
{
const VariableIndex::Factors& involvedFactors = variableIndex_[frontal];
factorIndicesToRemove.insert(involvedFactors.begin(), involvedFactors.end());
@ -925,8 +925,8 @@ void ISAM2::marginalizeLeaves(const FastList<Key>& leafKeysList,
// Gather factors to add - the new marginal factors
GaussianFactorGraph factorsToAdd;
typedef pair<Key, vector<GaussianFactor::shared_ptr> > Key_Factors;
BOOST_FOREACH(const Key_Factors& key_factors, marginalFactors) {
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, key_factors.second) {
for(const Key_Factors& key_factors: marginalFactors) {
for(const GaussianFactor::shared_ptr& factor: key_factors.second) {
if(factor) {
factorsToAdd.push_back(factor);
if(marginalFactorsIndices)
@ -935,7 +935,7 @@ void ISAM2::marginalizeLeaves(const FastList<Key>& leafKeysList,
factor));
if(params_.cacheLinearizedFactors)
linearFactors_.push_back(factor);
BOOST_FOREACH(Key factorKey, *factor) {
for(Key factorKey: *factor) {
fixedVariables_.insert(factorKey); }
}
}
@ -944,7 +944,7 @@ void ISAM2::marginalizeLeaves(const FastList<Key>& leafKeysList,
// Remove the factors to remove that have been summarized in the newly-added marginal factors
NonlinearFactorGraph removedFactors;
BOOST_FOREACH(size_t i, factorIndicesToRemove) {
for(size_t i: factorIndicesToRemove) {
removedFactors.push_back(nonlinearFactors_[i]);
nonlinearFactors_.remove(i);
if(params_.cacheLinearizedFactors)
@ -1065,7 +1065,7 @@ static void gradientAtZeroTreeAdder(const boost::shared_ptr<ISAM2Clique>& root,
// Recursively add contributions from children
typedef boost::shared_ptr<ISAM2Clique> sharedClique;
BOOST_FOREACH(const sharedClique& child, root->children) {
for(const sharedClique& child: root->children) {
gradientAtZeroTreeAdder(child, g);
}
}
@ -1077,7 +1077,7 @@ VectorValues ISAM2::gradientAtZero() const
VectorValues g;
// Sum up contributions for each clique
BOOST_FOREACH(const ISAM2::sharedClique& root, this->roots())
for(const ISAM2::sharedClique& root: this->roots())
gradientAtZeroTreeAdder(root, g);
return g;

View File

@ -200,7 +200,7 @@ struct GTSAM_EXPORT ISAM2Params {
else
{
std::cout << "relinearizeThreshold: " << "{mapped}" << "\n";
BOOST_FOREACH(const ISAM2ThresholdMapValue& value, boost::get<ISAM2ThresholdMap>(relinearizeThreshold)) {
for(const ISAM2ThresholdMapValue& value: boost::get<ISAM2ThresholdMap>(relinearizeThreshold)) {
std::cout << " '" << value.first << "' -> [" << value.second.transpose() << " ]\n";
}
}

View File

@ -157,7 +157,7 @@ GaussianFactorGraph::shared_ptr LevenbergMarquardtOptimizer::buildDampedSystem(
// Only retrieve diagonal vector when reuse_diagonal = false
if (params_.diagonalDamping && state_.reuseDiagonal == false) {
state_.hessianDiagonal = linear.hessianDiagonal();
BOOST_FOREACH(Vector& v, state_.hessianDiagonal | map_values) {
for(Vector& v: state_.hessianDiagonal | map_values) {
for (int aa = 0; aa < v.size(); aa++) {
v(aa) = std::min(std::max(v(aa), params_.minDiagonal),
params_.maxDiagonal);
@ -172,7 +172,7 @@ GaussianFactorGraph::shared_ptr LevenbergMarquardtOptimizer::buildDampedSystem(
GaussianFactorGraph &damped = (*dampedPtr);
damped.reserve(damped.size() + state_.values.size());
if (params_.diagonalDamping) {
BOOST_FOREACH(const VectorValues::KeyValuePair& key_vector, state_.hessianDiagonal) {
for(const VectorValues::KeyValuePair& key_vector: state_.hessianDiagonal) {
// Fill in the diagonal of A with diag(hessian)
try {
Matrix A = Eigen::DiagonalMatrix<double, Eigen::Dynamic>(
@ -192,7 +192,7 @@ GaussianFactorGraph::shared_ptr LevenbergMarquardtOptimizer::buildDampedSystem(
// initialize noise model cache to a reasonable default size
NoiseCacheVector noises(6);
BOOST_FOREACH(const Values::KeyValuePair& key_value, state_.values) {
for(const Values::KeyValuePair& key_value: state_.values) {
size_t dim = key_value.value.dim();
if (dim > noises.size())

View File

@ -11,7 +11,6 @@
#include <gtsam/linear/VectorValues.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <boost/foreach.hpp>
namespace gtsam {
@ -19,7 +18,7 @@ namespace gtsam {
void LinearContainerFactor::initializeLinearizationPoint(const Values& linearizationPoint) {
if (!linearizationPoint.empty()) {
linearizationPoint_ = Values();
BOOST_FOREACH(const gtsam::Key& key, this->keys()) {
for(const gtsam::Key& key: this->keys()) {
linearizationPoint_->insert(key, linearizationPoint.at(key));
}
} else {
@ -82,7 +81,7 @@ double LinearContainerFactor::error(const Values& c) const {
// Extract subset of values for comparison
Values csub;
BOOST_FOREACH(const gtsam::Key& key, keys())
for(const gtsam::Key& key: keys())
csub.insert(key, c.at(key));
// create dummy ordering for evaluation
@ -111,7 +110,7 @@ GaussianFactor::shared_ptr LinearContainerFactor::linearize(const Values& c) con
// Extract subset of values
Values subsetC;
BOOST_FOREACH(const gtsam::Key& key, this->keys())
for(const gtsam::Key& key: this->keys())
subsetC.insert(key, c.at(key));
// Determine delta between linearization points using new ordering
@ -170,7 +169,7 @@ NonlinearFactorGraph LinearContainerFactor::convertLinearGraph(
const GaussianFactorGraph& linear_graph, const Values& linearizationPoint)
{
NonlinearFactorGraph result;
BOOST_FOREACH(const GaussianFactor::shared_ptr& f, linear_graph)
for(const GaussianFactor::shared_ptr& f: linear_graph)
if (f)
result.push_back(NonlinearFactorGraph::sharedFactor(
new LinearContainerFactor(f, linearizationPoint)));

View File

@ -127,7 +127,7 @@ JointMarginal Marginals::jointMarginalInformation(const std::vector<Key>& variab
// Get dimensions from factor graph
std::vector<size_t> dims;
dims.reserve(variablesSorted.size());
BOOST_FOREACH(Key key, variablesSorted) {
for(Key key: variablesSorted) {
dims.push_back(values_.at(key).dim());
}
@ -144,7 +144,7 @@ VectorValues Marginals::optimize() const {
void JointMarginal::print(const std::string& s, const KeyFormatter& formatter) const {
cout << s << "Joint marginal on keys ";
bool first = true;
BOOST_FOREACH(Key key, keys_) {
for(Key key: keys_) {
if(!first)
cout << ", ";
else

View File

@ -26,7 +26,7 @@ namespace gtsam {
void NonlinearFactor::print(const std::string& s,
const KeyFormatter& keyFormatter) const {
std::cout << s << " keys = { ";
BOOST_FOREACH(Key key, keys()) {
for(Key key: keys()) {
std::cout << keyFormatter(key) << " ";
}
std::cout << "}" << std::endl;

View File

@ -31,7 +31,6 @@
# include <tbb/parallel_for.h>
#endif
#include <boost/foreach.hpp>
#include <cmath>
#include <limits>
@ -133,7 +132,7 @@ void NonlinearFactorGraph::saveGraph(std::ostream &stm, const Values& values,
// Find bounds
double minX = numeric_limits<double>::infinity(), maxX = -numeric_limits<double>::infinity();
double minY = numeric_limits<double>::infinity(), maxY = -numeric_limits<double>::infinity();
BOOST_FOREACH(Key key, keys) {
for(Key key: keys) {
if(values.exists(key)) {
boost::optional<Point2> xy = getXY(values.at(key), formatting);
if(xy) {
@ -150,7 +149,7 @@ void NonlinearFactorGraph::saveGraph(std::ostream &stm, const Values& values,
}
// Create nodes for each variable in the graph
BOOST_FOREACH(Key key, keys){
for(Key key: keys){
// Label the node with the label from the KeyFormatter
stm << " var" << key << "[label=\"" << keyFormatter(key) << "\"";
if(values.exists(key)) {
@ -165,7 +164,7 @@ void NonlinearFactorGraph::saveGraph(std::ostream &stm, const Values& values,
if (formatting.mergeSimilarFactors) {
// Remove duplicate factors
std::set<vector<Key> > structure;
BOOST_FOREACH(const sharedFactor& factor, *this){
for(const sharedFactor& factor: *this){
if(factor) {
vector<Key> factorKeys = factor->keys();
std::sort(factorKeys.begin(), factorKeys.end());
@ -175,7 +174,7 @@ void NonlinearFactorGraph::saveGraph(std::ostream &stm, const Values& values,
// Create factors and variable connections
size_t i = 0;
BOOST_FOREACH(const vector<Key>& factorKeys, structure){
for(const vector<Key>& factorKeys: structure){
// Make each factor a dot
stm << " factor" << i << "[label=\"\", shape=point";
{
@ -187,7 +186,7 @@ void NonlinearFactorGraph::saveGraph(std::ostream &stm, const Values& values,
stm << "];\n";
// Make factor-variable connections
BOOST_FOREACH(Key key, factorKeys) {
for(Key key: factorKeys) {
stm << " var" << key << "--" << "factor" << i << ";\n";
}
@ -214,7 +213,7 @@ void NonlinearFactorGraph::saveGraph(std::ostream &stm, const Values& values,
// Make factor-variable connections
if(formatting.connectKeysToFactor && factor) {
BOOST_FOREACH(Key key, *factor) {
for(Key key: *factor) {
stm << " var" << key << "--" << "factor" << i << ";\n";
}
}
@ -224,7 +223,7 @@ void NonlinearFactorGraph::saveGraph(std::ostream &stm, const Values& values,
if(factor) {
Key k;
bool firstTime = true;
BOOST_FOREACH(Key key, *this->at(i)) {
for(Key key: *this->at(i)) {
if(firstTime) {
k = key;
firstTime = false;
@ -246,7 +245,7 @@ double NonlinearFactorGraph::error(const Values& values) const {
gttic(NonlinearFactorGraph_error);
double total_error = 0.;
// iterate over all the factors_ to accumulate the log probabilities
BOOST_FOREACH(const sharedFactor& factor, this->factors_) {
for(const sharedFactor& factor: this->factors_) {
if(factor)
total_error += factor->error(values);
}
@ -272,7 +271,7 @@ SymbolicFactorGraph::shared_ptr NonlinearFactorGraph::symbolic() const
SymbolicFactorGraph::shared_ptr symbolic = boost::make_shared<SymbolicFactorGraph>();
symbolic->reserve(this->size());
BOOST_FOREACH(const sharedFactor& factor, *this) {
for(const sharedFactor& factor: *this) {
if(factor)
*symbolic += SymbolicFactor(*factor);
else
@ -330,7 +329,7 @@ GaussianFactorGraph::shared_ptr NonlinearFactorGraph::linearize(const Values& li
linearFG->reserve(this->size());
// linearize all factors
BOOST_FOREACH(const sharedFactor& factor, this->factors_) {
for(const sharedFactor& factor: this->factors_) {
if(factor) {
(*linearFG) += factor->linearize(linearizationPoint);
} else
@ -345,7 +344,7 @@ GaussianFactorGraph::shared_ptr NonlinearFactorGraph::linearize(const Values& li
/* ************************************************************************* */
NonlinearFactorGraph NonlinearFactorGraph::clone() const {
NonlinearFactorGraph result;
BOOST_FOREACH(const sharedFactor& f, *this) {
for(const sharedFactor& f: *this) {
if (f)
result.push_back(f->clone());
else
@ -357,7 +356,7 @@ NonlinearFactorGraph NonlinearFactorGraph::clone() const {
/* ************************************************************************* */
NonlinearFactorGraph NonlinearFactorGraph::rekey(const std::map<Key,Key>& rekey_mapping) const {
NonlinearFactorGraph result;
BOOST_FOREACH(const sharedFactor& f, *this) {
for(const sharedFactor& f: *this) {
if (f)
result.push_back(f->rekey(rekey_mapping));
else

View File

@ -26,8 +26,6 @@
#include <utility>
#include <boost/foreach.hpp>
#include <gtsam/base/DerivedValue.h>
#include <gtsam/nonlinear/Values.h> // Only so Eclipse finds class definition
@ -220,7 +218,7 @@ namespace gtsam {
/** Constructor from a Filtered view copies out all values */
template<class ValueType>
Values::Values(const Values::Filtered<ValueType>& view) {
BOOST_FOREACH(const typename Filtered<ValueType>::KeyValuePair& key_value, view) {
for(const typename Filtered<ValueType>::KeyValuePair& key_value: view) {
Key key = key_value.key;
insert(key, static_cast<const ValueType&>(key_value.value));
}
@ -229,7 +227,7 @@ namespace gtsam {
/* ************************************************************************* */
template<class ValueType>
Values::Values(const Values::ConstFiltered<ValueType>& view) {
BOOST_FOREACH(const typename ConstFiltered<ValueType>::KeyValuePair& key_value, view) {
for(const typename ConstFiltered<ValueType>::KeyValuePair& key_value: view) {
Key key = key_value.key;
insert(key, static_cast<const ValueType&>(key_value.value));
}

View File

@ -25,7 +25,6 @@
#include <gtsam/nonlinear/Values.h>
#include <gtsam/linear/VectorValues.h>
#include <boost/foreach.hpp>
#ifdef __GNUC__
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-variable"
@ -194,7 +193,7 @@ namespace gtsam {
/* ************************************************************************* */
size_t Values::dim() const {
size_t result = 0;
BOOST_FOREACH(const ConstKeyValuePair& key_value, *this) {
for(const ConstKeyValuePair& key_value: *this) {
result += key_value.value.dim();
}
return result;
@ -203,7 +202,7 @@ namespace gtsam {
/* ************************************************************************* */
VectorValues Values::zeroVectors() const {
VectorValues result;
BOOST_FOREACH(const ConstKeyValuePair& key_value, *this)
for(const ConstKeyValuePair& key_value: *this)
result.insert(key_value.key, Vector::Zero(key_value.value.dim()));
return result;
}

View File

@ -49,7 +49,7 @@ JacobianFactor linearizeNumerically(const NoiseModelFactor& factor,
// Loop over all variables
const double one_over_2delta = 1.0 / (2.0 * delta);
VectorValues dX = values.zeroVectors();
BOOST_FOREACH(Key key, factor) {
for(Key key: factor) {
// Compute central differences using the values struct.
const size_t cols = dX.dim(key);
Matrix J = Matrix::Zero(rows, cols);

View File

@ -366,7 +366,7 @@ TEST(Values, filter) {
int i = 0;
Values::Filtered<Value> filtered = values.filter(boost::bind(std::greater_equal<Key>(), _1, 2));
EXPECT_LONGS_EQUAL(2, (long)filtered.size());
BOOST_FOREACH(const Values::Filtered<>::KeyValuePair& key_value, filtered) {
for(const Values::Filtered<>::KeyValuePair& key_value: filtered) {
if(i == 0) {
LONGS_EQUAL(2, (long)key_value.key);
try {key_value.value.cast<Pose2>();} catch (const std::bad_cast& e) { FAIL("can't cast Value to Pose2");}
@ -401,7 +401,7 @@ TEST(Values, filter) {
i = 0;
Values::ConstFiltered<Pose3> pose_filtered = values.filter<Pose3>();
EXPECT_LONGS_EQUAL(2, (long)pose_filtered.size());
BOOST_FOREACH(const Values::ConstFiltered<Pose3>::KeyValuePair& key_value, pose_filtered) {
for(const Values::ConstFiltered<Pose3>::KeyValuePair& key_value: pose_filtered) {
if(i == 0) {
EXPECT_LONGS_EQUAL(1, (long)key_value.key);
EXPECT(assert_equal(pose1, key_value.value));
@ -437,7 +437,7 @@ TEST(Values, Symbol_filter) {
values.insert(Symbol('y', 3), pose3);
int i = 0;
BOOST_FOREACH(const Values::Filtered<Value>::KeyValuePair& key_value, values.filter(Symbol::ChrTest('y'))) {
for(const Values::Filtered<Value>::KeyValuePair& key_value: values.filter(Symbol::ChrTest('y'))) {
if(i == 0) {
LONGS_EQUAL(Symbol('y', 1), (long)key_value.key);
EXPECT(assert_equal(pose1, key_value.value.cast<Pose3>()));

View File

@ -43,7 +43,7 @@ GaussianFactorGraph buildLinearOrientationGraph(const NonlinearFactorGraph& g) {
GaussianFactorGraph linearGraph;
noiseModel::Unit::shared_ptr model = noiseModel::Unit::Create(9);
BOOST_FOREACH(const boost::shared_ptr<NonlinearFactor>& factor, g) {
for(const boost::shared_ptr<NonlinearFactor>& factor: g) {
Matrix3 Rij;
boost::shared_ptr<BetweenFactor<Pose3> > pose3Between =
@ -75,7 +75,7 @@ Values normalizeRelaxedRotations(const VectorValues& relaxedRot3) {
ppm(0,0) = 1; ppm(1,1) = 1; ppm(2,2) = -1;
Values validRot3;
BOOST_FOREACH(const VectorValues::value_type& it, relaxedRot3) {
for(const VectorValues::value_type& it: relaxedRot3) {
Key key = it.first;
if (key != keyAnchor) {
const Vector& rotVector = it.second;
@ -108,7 +108,7 @@ NonlinearFactorGraph buildPose3graph(const NonlinearFactorGraph& graph) {
gttic(InitializePose3_buildPose3graph);
NonlinearFactorGraph pose3Graph;
BOOST_FOREACH(const boost::shared_ptr<NonlinearFactor>& factor, graph) {
for(const boost::shared_ptr<NonlinearFactor>& factor: graph) {
// recast to a between on Pose3
boost::shared_ptr<BetweenFactor<Pose3> > pose3Between =
@ -150,7 +150,7 @@ Values computeOrientationsGradient(const NonlinearFactorGraph& pose3Graph, const
// this works on the inverse rotations, according to Tron&Vidal,2011
Values inverseRot;
inverseRot.insert(keyAnchor, Rot3());
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, givenGuess) {
for(const Values::ConstKeyValuePair& key_value: givenGuess) {
Key key = key_value.key;
const Pose3& pose = givenGuess.at<Pose3>(key);
inverseRot.insert(key, pose.rotation().inverse());
@ -165,7 +165,7 @@ Values computeOrientationsGradient(const NonlinearFactorGraph& pose3Graph, const
// calculate max node degree & allocate gradient
size_t maxNodeDeg = 0;
VectorValues grad;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, inverseRot) {
for(const Values::ConstKeyValuePair& key_value: inverseRot) {
Key key = key_value.key;
grad.insert(key,Vector3::Zero());
size_t currNodeDeg = (adjEdgesMap.at(key)).size();
@ -191,12 +191,12 @@ Values computeOrientationsGradient(const NonlinearFactorGraph& pose3Graph, const
//std::cout << "it " << it <<" b " << b <<" f0 " << f0 <<" a " << a
// <<" rho " << rho <<" stepsize " << stepsize << " maxNodeDeg "<< maxNodeDeg << std::endl;
maxGrad = 0;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, inverseRot) {
for(const Values::ConstKeyValuePair& key_value: inverseRot) {
Key key = key_value.key;
//std::cout << "---------------------------key " << DefaultKeyFormatter(key) << std::endl;
Vector gradKey = Vector3::Zero();
// collect the gradient for each edge incident on key
BOOST_FOREACH(const size_t& factorId, adjEdgesMap.at(key)){
for(const size_t& factorId: adjEdgesMap.at(key)){
Rot3 Rij = factorId2RotMap.at(factorId);
Rot3 Ri = inverseRot.at<Rot3>(key);
if( key == (pose3Graph.at(factorId))->keys()[0] ){
@ -236,7 +236,7 @@ Values computeOrientationsGradient(const NonlinearFactorGraph& pose3Graph, const
// Return correct rotations
const Rot3& Rref = inverseRot.at<Rot3>(keyAnchor); // This will be set to the identity as so far we included no prior
Values estimateRot;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, inverseRot) {
for(const Values::ConstKeyValuePair& key_value: inverseRot) {
Key key = key_value.key;
if (key != keyAnchor) {
const Rot3& R = inverseRot.at<Rot3>(key);
@ -252,7 +252,7 @@ Values computeOrientationsGradient(const NonlinearFactorGraph& pose3Graph, const
/* ************************************************************************* */
void createSymbolicGraph(KeyVectorMap& adjEdgesMap, KeyRotMap& factorId2RotMap, const NonlinearFactorGraph& pose3Graph){
size_t factorId = 0;
BOOST_FOREACH(const boost::shared_ptr<NonlinearFactor>& factor, pose3Graph) {
for(const boost::shared_ptr<NonlinearFactor>& factor: pose3Graph) {
boost::shared_ptr<BetweenFactor<Pose3> > pose3Between =
boost::dynamic_pointer_cast<BetweenFactor<Pose3> >(factor);
if (pose3Between){
@ -321,7 +321,7 @@ Values computePoses(NonlinearFactorGraph& pose3graph, Values& initialRot) {
// put into Values structure
Values initialPose;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, initialRot){
for(const Values::ConstKeyValuePair& key_value: initialRot){
Key key = key_value.key;
const Rot3& rot = initialRot.at<Rot3>(key);
Pose3 initializedPose = Pose3(rot, Point3(0, 0, 0));
@ -346,7 +346,7 @@ Values computePoses(NonlinearFactorGraph& pose3graph, Values& initialRot) {
// put into Values structure
Values estimate;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, GNresult) {
for(const Values::ConstKeyValuePair& key_value: GNresult) {
Key key = key_value.key;
if (key != keyAnchor) {
const Pose3& pose = GNresult.at<Pose3>(key);
@ -391,7 +391,7 @@ Values initialize(const NonlinearFactorGraph& graph, const Values& givenGuess, b
// Compute the full poses (1 GN iteration on full poses)
return computePoses(pose3Graph, orientations);
// BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, orientations) {
// for(const Values::ConstKeyValuePair& key_value: orientations) {
// Key key = key_value.key;
// if (key != keyAnchor) {
// const Point3& pos = givenGuess.at<Pose3>(key).translation();

View File

@ -44,7 +44,7 @@ public:
Vector zeroVector = Vector::Zero(0);
std::vector<KeyMatrix> QF;
QF.reserve(keys.size());
BOOST_FOREACH(const Key& key, keys)
for(const Key& key: keys)
QF.push_back(KeyMatrix(key, zeroMatrix));
JacobianFactor::fillTerms(QF, zeroVector, model);
}

View File

@ -45,7 +45,7 @@ public:
Vector zeroVector = Vector::Zero(0);
std::vector<KeyMatrix> QF;
QF.reserve(keys.size());
BOOST_FOREACH(const Key& key, keys)
for(const Key& key: keys)
QF.push_back(KeyMatrix(key, zeroMatrix));
JacobianFactor::fillTerms(QF, zeroVector, model);
}
@ -67,7 +67,7 @@ public:
size_t m2 = ZDim * numKeys - 3; // TODO: is this not just Enull.rows()?
// PLAIN NULL SPACE TRICK
// Matrix Q = Enull * Enull.transpose();
// BOOST_FOREACH(const KeyMatrixZD& it, Fblocks)
// for(const KeyMatrixZD& it: Fblocks)
// QF.push_back(KeyMatrix(it.first, Q.block(0, 2 * j++, m2, 2) * it.second));
// JacobianFactor factor(QF, Q * b);
std::vector<KeyMatrix> QF;

View File

@ -161,7 +161,7 @@ public:
/// Collect all cameras: important that in key order
virtual Cameras cameras(const Values& values) const {
Cameras cameras;
BOOST_FOREACH(const Key& k, this->keys_)
for(const Key& k: this->keys_)
cameras.push_back(values.at<CAMERA>(k));
return cameras;
}

View File

@ -290,9 +290,9 @@ public:
if (params_.degeneracyMode == ZERO_ON_DEGENERACY && !result_) {
// failed: return"empty" Hessian
BOOST_FOREACH(Matrix& m, Gs)
for(Matrix& m: Gs)
m = Matrix::Zero(Base::Dim, Base::Dim);
BOOST_FOREACH(Vector& v, gs)
for(Vector& v: gs)
v = Vector::Zero(Base::Dim);
return boost::make_shared<RegularHessianFactor<Base::Dim> >(this->keys_,
Gs, gs, 0.0);

View File

@ -123,7 +123,7 @@ public:
*/
typename Base::Cameras cameras(const Values& values) const {
typename Base::Cameras cameras;
BOOST_FOREACH(const Key& k, this->keys_) {
for(const Key& k: this->keys_) {
Pose3 pose = values.at<Pose3>(k);
if (Base::body_P_sensor_)
pose = pose.compose(*(Base::body_P_sensor_));

View File

@ -37,7 +37,6 @@
#include <boost/assign/list_inserter.hpp>
#include <boost/filesystem/operations.hpp>
#include <boost/filesystem/path.hpp>
#include <boost/foreach.hpp>
#include <cmath>
#include <fstream>
@ -68,8 +67,8 @@ string findExampleDataFile(const string& name) {
namesToSearch.push_back(name + ".out");
// Find first name that exists
BOOST_FOREACH(const fs::path& root, rootsToSearch) {
BOOST_FOREACH(const fs::path& name, namesToSearch) {
for(const fs::path& root: rootsToSearch) {
for(const fs::path& name: namesToSearch) {
if (fs::is_regular_file(root / name))
return (root / name).string();
}
@ -366,7 +365,7 @@ void save2D(const NonlinearFactorGraph& graph, const Values& config,
// save poses
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, config) {
for(const Values::ConstKeyValuePair& key_value: config) {
const Pose2& pose = key_value.value.cast<Pose2>();
stream << "VERTEX2 " << key_value.key << " " << pose.x() << " " << pose.y()
<< " " << pose.theta() << endl;
@ -375,7 +374,7 @@ void save2D(const NonlinearFactorGraph& graph, const Values& config,
// save edges
Matrix R = model->R();
Matrix RR = trans(R) * R; //prod(trans(R),R);
BOOST_FOREACH(boost::shared_ptr<NonlinearFactor> factor_, graph) {
for(boost::shared_ptr<NonlinearFactor> factor_: graph) {
boost::shared_ptr<BetweenFactor<Pose2> > factor =
boost::dynamic_pointer_cast<BetweenFactor<Pose2> >(factor_);
if (!factor)
@ -413,13 +412,13 @@ void writeG2o(const NonlinearFactorGraph& graph, const Values& estimate,
// save 2D & 3D poses
Values::ConstFiltered<Pose2> viewPose2 = estimate.filter<Pose2>();
BOOST_FOREACH(const Values::ConstFiltered<Pose2>::KeyValuePair& key_value, viewPose2) {
for(const Values::ConstFiltered<Pose2>::KeyValuePair& key_value: viewPose2) {
stream << "VERTEX_SE2 " << key_value.key << " " << key_value.value.x() << " "
<< key_value.value.y() << " " << key_value.value.theta() << endl;
}
Values::ConstFiltered<Pose3> viewPose3 = estimate.filter<Pose3>();
BOOST_FOREACH(const Values::ConstFiltered<Pose3>::KeyValuePair& key_value, viewPose3) {
for(const Values::ConstFiltered<Pose3>::KeyValuePair& key_value: viewPose3) {
Point3 p = key_value.value.translation();
Rot3 R = key_value.value.rotation();
stream << "VERTEX_SE3:QUAT " << key_value.key << " " << p.x() << " " << p.y() << " " << p.z()
@ -428,7 +427,7 @@ void writeG2o(const NonlinearFactorGraph& graph, const Values& estimate,
}
// save edges (2D or 3D)
BOOST_FOREACH(boost::shared_ptr<NonlinearFactor> factor_, graph) {
for(boost::shared_ptr<NonlinearFactor> factor_: graph) {
boost::shared_ptr<BetweenFactor<Pose2> > factor =
boost::dynamic_pointer_cast<BetweenFactor<Pose2> >(factor_);
if (factor){
@ -898,7 +897,7 @@ bool writeBALfromValues(const string& filename, const SfM_data &data,
Values initialCamerasEstimate(const SfM_data& db) {
Values initial;
size_t i = 0; // NO POINTS: j = 0;
BOOST_FOREACH(const SfM_Camera& camera, db.cameras)
for(const SfM_Camera& camera: db.cameras)
initial.insert(i++, camera);
return initial;
}
@ -906,9 +905,9 @@ Values initialCamerasEstimate(const SfM_data& db) {
Values initialCamerasAndPointsEstimate(const SfM_data& db) {
Values initial;
size_t i = 0, j = 0;
BOOST_FOREACH(const SfM_Camera& camera, db.cameras)
for(const SfM_Camera& camera: db.cameras)
initial.insert((i++), camera);
BOOST_FOREACH(const SfM_Track& track, db.tracks)
for(const SfM_Track& track: db.tracks)
initial.insert(P(j++), track.p);
return initial;
}

View File

@ -82,7 +82,7 @@ key2doubleMap computeThetasToRoot(const key2doubleMap& deltaThetaMap,
thetaToRootMap.insert(pair<Key, double>(keyAnchor, 0.0));
// for all nodes in the tree
BOOST_FOREACH(const key2doubleMap::value_type& it, deltaThetaMap) {
for(const key2doubleMap::value_type& it: deltaThetaMap) {
// compute the orientation wrt root
Key nodeKey = it.first;
double nodeTheta = computeThetaToRoot(nodeKey, tree, deltaThetaMap,
@ -101,7 +101,7 @@ void getSymbolicGraph(
// Get keys for which you want the orientation
size_t id = 0;
// Loop over the factors
BOOST_FOREACH(const boost::shared_ptr<NonlinearFactor>& factor, g) {
for(const boost::shared_ptr<NonlinearFactor>& factor: g) {
if (factor->keys().size() == 2) {
Key key1 = factor->keys()[0];
Key key2 = factor->keys()[1];
@ -167,14 +167,14 @@ GaussianFactorGraph buildLinearOrientationGraph(
noiseModel::Diagonal::shared_ptr model_deltaTheta;
// put original measurements in the spanning tree
BOOST_FOREACH(const size_t& factorId, spanningTreeIds) {
for(const size_t& factorId: spanningTreeIds) {
const FastVector<Key>& keys = g[factorId]->keys();
Key key1 = keys[0], key2 = keys[1];
getDeltaThetaAndNoise(g[factorId], deltaTheta, model_deltaTheta);
lagoGraph.add(key1, -I, key2, I, deltaTheta, model_deltaTheta);
}
// put regularized measurements in the chordsIds
BOOST_FOREACH(const size_t& factorId, chordsIds) {
for(const size_t& factorId: chordsIds) {
const FastVector<Key>& keys = g[factorId]->keys();
Key key1 = keys[0], key2 = keys[1];
getDeltaThetaAndNoise(g[factorId], deltaTheta, model_deltaTheta);
@ -199,7 +199,7 @@ static NonlinearFactorGraph buildPose2graph(const NonlinearFactorGraph& graph) {
gttic(lago_buildPose2graph);
NonlinearFactorGraph pose2Graph;
BOOST_FOREACH(const boost::shared_ptr<NonlinearFactor>& factor, graph) {
for(const boost::shared_ptr<NonlinearFactor>& factor: graph) {
// recast to a between on Pose2
boost::shared_ptr<BetweenFactor<Pose2> > pose2Between =
@ -226,7 +226,7 @@ static PredecessorMap<Key> findOdometricPath(
Key minKey = keyAnchor; // this initialization does not matter
bool minUnassigned = true;
BOOST_FOREACH(const boost::shared_ptr<NonlinearFactor>& factor, pose2Graph) {
for(const boost::shared_ptr<NonlinearFactor>& factor: pose2Graph) {
Key key1 = std::min(factor->keys()[0], factor->keys()[1]);
Key key2 = std::max(factor->keys()[0], factor->keys()[1]);
@ -299,7 +299,7 @@ Values computePoses(const NonlinearFactorGraph& pose2graph,
GaussianFactorGraph linearPose2graph;
// We include the linear version of each between factor
BOOST_FOREACH(const boost::shared_ptr<NonlinearFactor>& factor, pose2graph) {
for(const boost::shared_ptr<NonlinearFactor>& factor: pose2graph) {
boost::shared_ptr<BetweenFactor<Pose2> > pose2Between =
boost::dynamic_pointer_cast<BetweenFactor<Pose2> >(factor);
@ -346,7 +346,7 @@ Values computePoses(const NonlinearFactorGraph& pose2graph,
// put into Values structure
Values initialGuessLago;
BOOST_FOREACH(const VectorValues::value_type& it, posesLago) {
for(const VectorValues::value_type& it: posesLago) {
Key key = it.first;
if (key != keyAnchor) {
const Vector& poseVector = it.second;
@ -383,7 +383,7 @@ Values initialize(const NonlinearFactorGraph& graph,
VectorValues orientations = initializeOrientations(graph);
// for all nodes in the tree
BOOST_FOREACH(const VectorValues::value_type& it, orientations) {
for(const VectorValues::value_type& it: orientations) {
Key key = it.first;
if (key != keyAnchor) {
const Pose2& pose = initialGuess.at<Pose2>(key);

View File

@ -465,7 +465,7 @@ TEST(GeneralSFMFactor, BinaryJacobianFactor) {
}
// Now loop over all these noise models
BOOST_FOREACH(SharedNoiseModel model, models) {
for(SharedNoiseModel model: models) {
Projection factor(measurement, model, X(1), L(1));
// Test linearize

View File

@ -285,7 +285,7 @@ TEST( Lago, largeGraphNoisy_orientations ) {
Values::shared_ptr expected;
boost::tie(gmatlab, expected) = readG2o(matlabFile);
BOOST_FOREACH(const Values::KeyValuePair& key_val, *expected){
for(const Values::KeyValuePair& key_val: *expected){
Key k = key_val.key;
EXPECT(assert_equal(expected->at<Pose2>(k), actual.at<Pose2>(k), 1e-5));
}
@ -311,7 +311,7 @@ TEST( Lago, largeGraphNoisy ) {
Values::shared_ptr expected;
boost::tie(gmatlab, expected) = readG2o(matlabFile);
BOOST_FOREACH(const Values::KeyValuePair& key_val, *expected){
for(const Values::KeyValuePair& key_val: *expected){
Key k = key_val.key;
EXPECT(assert_equal(expected->at<Pose2>(k), actual.at<Pose2>(k), 1e-2));
}

View File

@ -20,6 +20,8 @@
#include <gtsam/symbolic/SymbolicConditional.h>
#include <gtsam/inference/FactorGraph-inst.h>
#include <boost/foreach.hpp>
#include <fstream>
namespace gtsam {
@ -43,7 +45,7 @@ namespace gtsam {
SymbolicConditional::Frontals frontals = conditional->frontals();
Key me = frontals.front();
SymbolicConditional::Parents parents = conditional->parents();
BOOST_FOREACH(Key p, parents)
for(Key p: parents)
of << p << "->" << me << std::endl;
}

View File

@ -24,7 +24,6 @@
#include <gtsam/base/timing.h>
#include <boost/shared_ptr.hpp>
#include <boost/foreach.hpp>
#include <boost/make_shared.hpp>
#include <utility>
@ -43,12 +42,12 @@ namespace gtsam
// Gather all keys
KeySet allKeys;
BOOST_FOREACH(const boost::shared_ptr<FACTOR>& factor, factors) {
for(const boost::shared_ptr<FACTOR>& factor: factors) {
allKeys.insert(factor->begin(), factor->end());
}
// Check keys
BOOST_FOREACH(Key key, keys) {
for(Key key: keys) {
if(allKeys.find(key) == allKeys.end())
throw std::runtime_error("Requested to eliminate a key that is not in the factors");
}

View File

@ -213,7 +213,7 @@ void getAllCliques(const SymbolicBayesTree::sharedClique& subtree, SymbolicBayes
if (subtree) {
cliques.push_back(subtree);
// Recursive call over all child cliques
BOOST_FOREACH(SymbolicBayesTree::sharedClique& childClique, subtree->children) {
for(SymbolicBayesTree::sharedClique& childClique: subtree->children) {
getAllCliques(childClique,cliques);
}
}
@ -241,7 +241,7 @@ TEST( BayesTree, shortcutCheck )
SymbolicBayesTree::Cliques allCliques;
getAllCliques(rootClique,allCliques);
BOOST_FOREACH(SymbolicBayesTree::sharedClique& clique, allCliques) {
for(SymbolicBayesTree::sharedClique& clique: allCliques) {
//clique->print("Clique#");
SymbolicBayesNet bn = clique->shortcut(rootClique);
//bn.print("Shortcut:\n");
@ -250,13 +250,13 @@ TEST( BayesTree, shortcutCheck )
// Check if all the cached shortcuts are cleared
rootClique->deleteCachedShortcuts();
BOOST_FOREACH(SymbolicBayesTree::sharedClique& clique, allCliques) {
for(SymbolicBayesTree::sharedClique& clique: allCliques) {
bool notCleared = clique->cachedSeparatorMarginal().is_initialized();
CHECK( notCleared == false);
}
EXPECT_LONGS_EQUAL(0, (long)rootClique->numCachedSeparatorMarginals());
// BOOST_FOREACH(SymbolicBayesTree::sharedClique& clique, allCliques) {
// for(SymbolicBayesTree::sharedClique& clique: allCliques) {
// clique->print("Clique#");
// if(clique->cachedShortcut()){
// bn = clique->cachedShortcut().get();