Replaced BOOSE_FOREACH with for in gtsam_unstable folder.
parent
ce2cd71112
commit
3b7c57aedf
|
@ -19,7 +19,6 @@
|
|||
#pragma once
|
||||
|
||||
#include <gtsam_unstable/base/BTree.h>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <iostream>
|
||||
#include <list>
|
||||
#include <set>
|
||||
|
@ -58,14 +57,14 @@ public:
|
|||
// constructor with a list of unconnected keys
|
||||
DSF(const std::list<KEY>& keys) :
|
||||
Tree() {
|
||||
BOOST_FOREACH(const KEY& key, keys)
|
||||
for(const KEY& key: keys)
|
||||
*this = this->add(key, key);
|
||||
}
|
||||
|
||||
// constructor with a set of unconnected keys
|
||||
DSF(const std::set<KEY>& keys) :
|
||||
Tree() {
|
||||
BOOST_FOREACH(const KEY& key, keys)
|
||||
for(const KEY& key: keys)
|
||||
*this = this->add(key, key);
|
||||
}
|
||||
|
||||
|
@ -109,7 +108,7 @@ public:
|
|||
// create a new singleton with a list of fully connected keys
|
||||
Self makeList(const std::list<KEY>& keys) const {
|
||||
Self t = *this;
|
||||
BOOST_FOREACH(const KEY& key, keys)
|
||||
for(const KEY& key: keys)
|
||||
t = t.makePair(key, keys.front());
|
||||
return t;
|
||||
}
|
||||
|
@ -117,7 +116,7 @@ public:
|
|||
// return a dsf in which all find_set operations will be O(1) due to path compression.
|
||||
DSF flatten() const {
|
||||
DSF t = *this;
|
||||
BOOST_FOREACH(const KeyLabel& pair, (Tree)t)
|
||||
for(const KeyLabel& pair: (Tree)t)
|
||||
t.findSet_(pair.first);
|
||||
return t;
|
||||
}
|
||||
|
@ -125,7 +124,7 @@ public:
|
|||
// maps f over all keys, must be invertible
|
||||
DSF map(boost::function<KEY(const KEY&)> func) const {
|
||||
DSF t;
|
||||
BOOST_FOREACH(const KeyLabel& pair, (Tree)*this)
|
||||
for(const KeyLabel& pair: (Tree)*this)
|
||||
t = t.add(func(pair.first), func(pair.second));
|
||||
return t;
|
||||
}
|
||||
|
@ -133,7 +132,7 @@ public:
|
|||
// return the number of sets
|
||||
size_t numSets() const {
|
||||
size_t num = 0;
|
||||
BOOST_FOREACH(const KeyLabel& pair, (Tree)*this)
|
||||
for(const KeyLabel& pair: (Tree)*this)
|
||||
if (pair.first == pair.second)
|
||||
num++;
|
||||
return num;
|
||||
|
@ -147,7 +146,7 @@ public:
|
|||
// return all sets, i.e. a partition of all elements
|
||||
std::map<KEY, Set> sets() const {
|
||||
std::map<KEY, Set> sets;
|
||||
BOOST_FOREACH(const KeyLabel& pair, (Tree)*this)
|
||||
for(const KeyLabel& pair: (Tree)*this)
|
||||
sets[findSet(pair.second)].insert(pair.first);
|
||||
return sets;
|
||||
}
|
||||
|
@ -155,7 +154,7 @@ public:
|
|||
// return a partition of the given elements {keys}
|
||||
std::map<KEY, Set> partition(const std::list<KEY>& keys) const {
|
||||
std::map<KEY, Set> partitions;
|
||||
BOOST_FOREACH(const KEY& key, keys)
|
||||
for(const KEY& key: keys)
|
||||
partitions[findSet(key)].insert(key);
|
||||
return partitions;
|
||||
}
|
||||
|
@ -163,7 +162,7 @@ public:
|
|||
// get the nodes in the tree with the given label
|
||||
Set set(const KEY& label) const {
|
||||
Set set;
|
||||
BOOST_FOREACH(const KeyLabel& pair, (Tree)*this) {
|
||||
for(const KeyLabel& pair: (Tree)*this) {
|
||||
if (pair.second == label || findSet(pair.second) == label)
|
||||
set.insert(pair.first);
|
||||
}
|
||||
|
@ -183,7 +182,7 @@ public:
|
|||
// print the object
|
||||
void print(const std::string& name = "DSF") const {
|
||||
std::cout << name << std::endl;
|
||||
BOOST_FOREACH(const KeyLabel& pair, (Tree)*this)
|
||||
for(const KeyLabel& pair: (Tree)*this)
|
||||
std::cout << (std::string) pair.first << " " << (std::string) pair.second
|
||||
<< std::endl;
|
||||
}
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
*/
|
||||
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/assign/std/list.hpp> // for +=
|
||||
using namespace boost::assign;
|
||||
|
||||
|
@ -148,7 +147,7 @@ TEST( BTree, iterating )
|
|||
|
||||
// acid iterator test: BOOST_FOREACH
|
||||
int sum = 0;
|
||||
BOOST_FOREACH(const KeyInt& p, tree)
|
||||
for(const KeyInt& p: tree)
|
||||
sum += p.second;
|
||||
LONGS_EQUAL(15,sum)
|
||||
|
||||
|
|
|
@ -295,7 +295,7 @@ TEST(DSF, mergePairwiseMatches) {
|
|||
|
||||
// Merge matches
|
||||
DSF<Measurement> dsf(measurements);
|
||||
BOOST_FOREACH(const Match& m, matches)
|
||||
for(const Match& m, matches)
|
||||
dsf.makeUnionInPlace(m.first,m.second);
|
||||
|
||||
// Check that sets are merged correctly
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
|
||||
#include <gtsam_unstable/base/DSFMap.h>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/assign/std/list.hpp>
|
||||
#include <boost/assign/std/set.hpp>
|
||||
using namespace boost::assign;
|
||||
|
@ -72,7 +71,7 @@ TEST(DSFMap, mergePairwiseMatches) {
|
|||
|
||||
// Merge matches
|
||||
DSFMap<size_t> dsf;
|
||||
BOOST_FOREACH(const Match& m, matches)
|
||||
for(const Match& m: matches)
|
||||
dsf.merge(m.first,m.second);
|
||||
|
||||
// Each point is now associated with a set, represented by one of its members
|
||||
|
@ -102,7 +101,7 @@ TEST(DSFMap, mergePairwiseMatches2) {
|
|||
|
||||
// Merge matches
|
||||
DSFMap<Measurement> dsf;
|
||||
BOOST_FOREACH(const Match& m, matches)
|
||||
for(const Match& m: matches)
|
||||
dsf.merge(m.first,m.second);
|
||||
|
||||
// Check that sets are merged correctly
|
||||
|
@ -122,7 +121,7 @@ TEST(DSFMap, sets){
|
|||
|
||||
// Merge matches
|
||||
DSFMap<size_t> dsf;
|
||||
BOOST_FOREACH(const Match& m, matches)
|
||||
for(const Match& m: matches)
|
||||
dsf.merge(m.first,m.second);
|
||||
|
||||
map<size_t, set<size_t> > sets = dsf.sets();
|
||||
|
@ -130,9 +129,9 @@ TEST(DSFMap, sets){
|
|||
s1 += 1,2,3;
|
||||
s2 += 4,5,6;
|
||||
|
||||
/*BOOST_FOREACH(key_pair st, sets){
|
||||
/*for(key_pair st: sets){
|
||||
cout << "Set " << st.first << " :{";
|
||||
BOOST_FOREACH(const size_t s, st.second)
|
||||
for(const size_t s: st.second)
|
||||
cout << s << ", ";
|
||||
cout << "}" << endl;
|
||||
}*/
|
||||
|
|
|
@ -15,7 +15,7 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
AllDiff::AllDiff(const DiscreteKeys& dkeys) :
|
||||
Constraint(dkeys.indices()) {
|
||||
BOOST_FOREACH(const DiscreteKey& dkey, dkeys)
|
||||
for(const DiscreteKey& dkey: dkeys)
|
||||
cardinalities_.insert(dkey);
|
||||
}
|
||||
|
||||
|
@ -23,7 +23,7 @@ namespace gtsam {
|
|||
void AllDiff::print(const std::string& s,
|
||||
const KeyFormatter& formatter) const {
|
||||
std::cout << s << "AllDiff on ";
|
||||
BOOST_FOREACH (Key dkey, keys_)
|
||||
for (Key dkey: keys_)
|
||||
std::cout << formatter(dkey) << " ";
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
double AllDiff::operator()(const Values& values) const {
|
||||
std::set < size_t > taken; // record values taken by keys
|
||||
BOOST_FOREACH(Key dkey, keys_) {
|
||||
for(Key dkey: keys_) {
|
||||
size_t value = values.at(dkey); // get the value for that key
|
||||
if (taken.count(value)) return 0.0;// check if value alreday taken
|
||||
taken.insert(value);// if not, record it as taken and keep checking
|
||||
|
@ -70,7 +70,7 @@ namespace gtsam {
|
|||
// Check all other domains for singletons and erase corresponding values
|
||||
// This is the same as arc-consistency on the equivalent binary constraints
|
||||
bool changed = false;
|
||||
BOOST_FOREACH(Key k, keys_)
|
||||
for(Key k: keys_)
|
||||
if (k != j) {
|
||||
const Domain& Dk = domains[k];
|
||||
if (Dk.isSingleton()) { // check if singleton
|
||||
|
@ -88,7 +88,7 @@ namespace gtsam {
|
|||
Constraint::shared_ptr AllDiff::partiallyApply(const Values& values) const {
|
||||
DiscreteKeys newKeys;
|
||||
// loop over keys and add them only if they do not appear in values
|
||||
BOOST_FOREACH(Key k, keys_)
|
||||
for(Key k: keys_)
|
||||
if (values.find(k) == values.end()) {
|
||||
newKeys.push_back(DiscreteKey(k,cardinalities_.at(k)));
|
||||
}
|
||||
|
@ -99,7 +99,7 @@ namespace gtsam {
|
|||
Constraint::shared_ptr AllDiff::partiallyApply(
|
||||
const std::vector<Domain>& domains) const {
|
||||
DiscreteFactor::Values known;
|
||||
BOOST_FOREACH(Key k, keys_) {
|
||||
for(Key k: keys_) {
|
||||
const Domain& Dk = domains[k];
|
||||
if (Dk.isSingleton())
|
||||
known[k] = Dk.firstValue();
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
#include <gtsam_unstable/discrete/Domain.h>
|
||||
#include <gtsam_unstable/discrete/CSP.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
using namespace std;
|
||||
|
||||
|
@ -45,7 +44,7 @@ namespace gtsam {
|
|||
changed[v] = false;
|
||||
// loop over all factors/constraints for variable v
|
||||
const VariableIndex::Factors& factors = index[v];
|
||||
BOOST_FOREACH(size_t f,factors) {
|
||||
for(size_t f: factors) {
|
||||
// if not already a singleton
|
||||
if (!domains[v].isSingleton()) {
|
||||
// get the constraint and call its ensureArcConsistency method
|
||||
|
@ -85,7 +84,7 @@ namespace gtsam {
|
|||
// TODO: create a new ordering as we go, to ensure a connected graph
|
||||
// KeyOrdering ordering;
|
||||
// vector<Index> dkeys;
|
||||
BOOST_FOREACH(const DiscreteFactor::shared_ptr& f, factors_) {
|
||||
for(const DiscreteFactor::shared_ptr& f: factors_) {
|
||||
Constraint::shared_ptr constraint = boost::dynamic_pointer_cast<Constraint>(f);
|
||||
if (!constraint) throw runtime_error("CSP:runArcConsistency: non-constraint factor");
|
||||
Constraint::shared_ptr reduced = constraint->partiallyApply(domains);
|
||||
|
|
|
@ -54,7 +54,7 @@ namespace gtsam {
|
|||
// /** return product of all factors as a single factor */
|
||||
// DecisionTreeFactor product() const {
|
||||
// DecisionTreeFactor result;
|
||||
// BOOST_FOREACH(const sharedFactor& factor, *this)
|
||||
// for(const sharedFactor& factor: *this)
|
||||
// if (factor) result = (*factor) * result;
|
||||
// return result;
|
||||
// }
|
||||
|
|
|
@ -19,9 +19,9 @@ namespace gtsam {
|
|||
const KeyFormatter& formatter) const {
|
||||
// cout << s << ": Domain on " << formatter(keys_[0]) << " (j=" <<
|
||||
// formatter(keys_[0]) << ") with values";
|
||||
// BOOST_FOREACH (size_t v,values_) cout << " " << v;
|
||||
// for (size_t v: values_) cout << " " << v;
|
||||
// cout << endl;
|
||||
BOOST_FOREACH (size_t v,values_) cout << v;
|
||||
for (size_t v: values_) cout << v;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -50,7 +50,7 @@ namespace gtsam {
|
|||
bool Domain::ensureArcConsistency(size_t j, vector<Domain>& domains) const {
|
||||
if (j != keys_[0]) throw invalid_argument("Domain check on wrong domain");
|
||||
Domain& D = domains[j];
|
||||
BOOST_FOREACH(size_t value, values_)
|
||||
for(size_t value: values_)
|
||||
if (!D.contains(value)) throw runtime_error("Unsatisfiable");
|
||||
D = *this;
|
||||
return true;
|
||||
|
@ -60,9 +60,9 @@ namespace gtsam {
|
|||
bool Domain::checkAllDiff(const vector<Key> keys, vector<Domain>& domains) {
|
||||
Key j = keys_[0];
|
||||
// for all values in this domain
|
||||
BOOST_FOREACH(size_t value, values_) {
|
||||
for(size_t value: values_) {
|
||||
// for all connected domains
|
||||
BOOST_FOREACH(Key k, keys)
|
||||
for(Key k: keys)
|
||||
// if any domain contains the value we cannot make this domain singleton
|
||||
if (k!=j && domains[k].contains(value))
|
||||
goto found;
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
#include <gtsam/base/timing.h>
|
||||
|
||||
#include <boost/tokenizer.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
#include <fstream>
|
||||
#include <iomanip>
|
||||
|
@ -163,7 +162,7 @@ namespace gtsam {
|
|||
|
||||
if (!mutexBound) {
|
||||
DiscreteKeys dkeys;
|
||||
BOOST_FOREACH(const Student& s, students_)
|
||||
for(const Student& s: students_)
|
||||
dkeys.push_back(s.key_);
|
||||
addAllDiff(dkeys);
|
||||
} else {
|
||||
|
@ -182,30 +181,30 @@ namespace gtsam {
|
|||
void Scheduler::print(const string& s) const {
|
||||
|
||||
cout << s << " Faculty:" << endl;
|
||||
BOOST_FOREACH(const string& name, facultyName_)
|
||||
for(const string& name: facultyName_)
|
||||
cout << name << '\n';
|
||||
cout << endl;
|
||||
|
||||
cout << s << " Slots:\n";
|
||||
size_t i = 0;
|
||||
BOOST_FOREACH(const string& name, slotName_)
|
||||
for(const string& name: slotName_)
|
||||
cout << i++ << " " << name << endl;
|
||||
cout << endl;
|
||||
|
||||
cout << "Availability:\n" << available_ << '\n';
|
||||
|
||||
cout << s << " Area constraints:\n";
|
||||
BOOST_FOREACH(const FacultyInArea::value_type& it, facultyInArea_)
|
||||
for(const FacultyInArea::value_type& it: facultyInArea_)
|
||||
{
|
||||
cout << setw(12) << it.first << ": ";
|
||||
BOOST_FOREACH(double v, it.second)
|
||||
for(double v: it.second)
|
||||
cout << v << " ";
|
||||
cout << '\n';
|
||||
}
|
||||
cout << endl;
|
||||
|
||||
cout << s << " Students:\n";
|
||||
BOOST_FOREACH (const Student& student, students_)
|
||||
for (const Student& student: students_)
|
||||
student.print();
|
||||
cout << endl;
|
||||
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/assign/std/map.hpp>
|
||||
#include <boost/optional.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/format.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
|
@ -303,7 +302,7 @@ void accomodateStudent() {
|
|||
vector<size_t> slots;
|
||||
slots += 16, 17, 11, 2, 0, 5, 9, 14;
|
||||
vector<double> slotsAvailable(scheduler.nrTimeSlots(), 1.0);
|
||||
BOOST_FOREACH(size_t s, slots)
|
||||
for(size_t s: slots)
|
||||
slotsAvailable[s] = 0;
|
||||
scheduler.setSlotsAvailable(slotsAvailable);
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ class LoopyBelief {
|
|||
void print(const std::string& s = "") const {
|
||||
cout << s << ":" << endl;
|
||||
star->print("Star graph: ");
|
||||
BOOST_FOREACH(Key key, correctedBeliefIndices | boost::adaptors::map_keys) {
|
||||
for(Key key: correctedBeliefIndices | boost::adaptors::map_keys) {
|
||||
cout << "Belief factor index for " << key << ": "
|
||||
<< correctedBeliefIndices.at(key) << endl;
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ public:
|
|||
/// print
|
||||
void print(const std::string& s = "") const {
|
||||
cout << s << ":" << endl;
|
||||
BOOST_FOREACH(Key key, starGraphs_ | boost::adaptors::map_keys) {
|
||||
for(Key key: starGraphs_ | boost::adaptors::map_keys) {
|
||||
starGraphs_.at(key).print((boost::format("Node %d:") % key).str());
|
||||
}
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ public:
|
|||
DiscreteFactorGraph::shared_ptr beliefs(new DiscreteFactorGraph());
|
||||
std::map<Key, std::map<Key, DiscreteFactor::shared_ptr> > allMessages;
|
||||
// Eliminate each star graph
|
||||
BOOST_FOREACH(Key key, starGraphs_ | boost::adaptors::map_keys) {
|
||||
for(Key key: starGraphs_ | boost::adaptors::map_keys) {
|
||||
// cout << "***** Node " << key << "*****" << endl;
|
||||
// initialize belief to the unary factor from the original graph
|
||||
DecisionTreeFactor::shared_ptr beliefAtKey;
|
||||
|
@ -92,9 +92,9 @@ public:
|
|||
std::map<Key, DiscreteFactor::shared_ptr> messages;
|
||||
|
||||
// eliminate each neighbor in this star graph one by one
|
||||
BOOST_FOREACH(Key neighbor, starGraphs_.at(key).correctedBeliefIndices | boost::adaptors::map_keys) {
|
||||
for(Key neighbor: starGraphs_.at(key).correctedBeliefIndices | boost::adaptors::map_keys) {
|
||||
DiscreteFactorGraph subGraph;
|
||||
BOOST_FOREACH(size_t factor, starGraphs_.at(key).varIndex_[neighbor]) {
|
||||
for(size_t factor: starGraphs_.at(key).varIndex_[neighbor]) {
|
||||
subGraph.push_back(starGraphs_.at(key).star->at(factor));
|
||||
}
|
||||
if (debug) subGraph.print("------- Subgraph:");
|
||||
|
@ -141,9 +141,9 @@ public:
|
|||
|
||||
// Update corrected beliefs
|
||||
VariableIndex beliefFactors(*beliefs);
|
||||
BOOST_FOREACH(Key key, starGraphs_ | boost::adaptors::map_keys) {
|
||||
for(Key key: starGraphs_ | boost::adaptors::map_keys) {
|
||||
std::map<Key, DiscreteFactor::shared_ptr> messages = allMessages[key];
|
||||
BOOST_FOREACH(Key neighbor, starGraphs_.at(key).correctedBeliefIndices | boost::adaptors::map_keys) {
|
||||
for(Key neighbor: starGraphs_.at(key).correctedBeliefIndices | boost::adaptors::map_keys) {
|
||||
DecisionTreeFactor correctedBelief = (*boost::dynamic_pointer_cast<
|
||||
DecisionTreeFactor>(beliefs->at(beliefFactors[key].front())))
|
||||
/ (*boost::dynamic_pointer_cast<DecisionTreeFactor>(
|
||||
|
@ -169,13 +169,13 @@ private:
|
|||
const std::map<Key, DiscreteKey>& allDiscreteKeys) const {
|
||||
StarGraphs starGraphs;
|
||||
VariableIndex varIndex(graph); ///< access to all factors of each node
|
||||
BOOST_FOREACH(Key key, varIndex | boost::adaptors::map_keys) {
|
||||
for(Key key: varIndex | boost::adaptors::map_keys) {
|
||||
// initialize to multiply with other unary factors later
|
||||
DecisionTreeFactor::shared_ptr prodOfUnaries;
|
||||
|
||||
// collect all factors involving this key in the original graph
|
||||
DiscreteFactorGraph::shared_ptr star(new DiscreteFactorGraph());
|
||||
BOOST_FOREACH(size_t factorIdx, varIndex[key]) {
|
||||
for(size_t factorIdx: varIndex[key]) {
|
||||
star->push_back(graph.at(factorIdx));
|
||||
|
||||
// accumulate unary factors
|
||||
|
@ -196,7 +196,7 @@ private:
|
|||
KeySet neighbors = star->keys();
|
||||
neighbors.erase(key);
|
||||
CorrectedBeliefIndices correctedBeliefIndices;
|
||||
BOOST_FOREACH(Key neighbor, neighbors) {
|
||||
for(Key neighbor: neighbors) {
|
||||
// TODO: default table for keys with more than 2 values?
|
||||
string initialBelief;
|
||||
for (size_t v = 0; v < allDiscreteKeys.at(neighbor).second - 1; ++v) {
|
||||
|
|
|
@ -309,19 +309,19 @@ int main(int argc, char** argv) {
|
|||
// And to demonstrate the fixed-lag aspect, print the keys contained in each smoother after 3.0 seconds
|
||||
cout << "After 15.0 seconds, each version contains to the following keys:" << endl;
|
||||
cout << " Concurrent Filter Keys: " << endl;
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, concurrentFilter.getLinearizationPoint()) {
|
||||
for(const Values::ConstKeyValuePair& key_value: concurrentFilter.getLinearizationPoint()) {
|
||||
cout << setprecision(5) << " Key: " << key_value.key << endl;
|
||||
}
|
||||
cout << " Concurrent Smoother Keys: " << endl;
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, concurrentSmoother.getLinearizationPoint()) {
|
||||
for(const Values::ConstKeyValuePair& key_value: concurrentSmoother.getLinearizationPoint()) {
|
||||
cout << setprecision(5) << " Key: " << key_value.key << endl;
|
||||
}
|
||||
cout << " Fixed-Lag Smoother Keys: " << endl;
|
||||
BOOST_FOREACH(const FixedLagSmoother::KeyTimestampMap::value_type& key_timestamp, fixedlagSmoother.timestamps()) {
|
||||
for(const FixedLagSmoother::KeyTimestampMap::value_type& key_timestamp: fixedlagSmoother.timestamps()) {
|
||||
cout << setprecision(5) << " Key: " << key_timestamp.first << endl;
|
||||
}
|
||||
cout << " Batch Smoother Keys: " << endl;
|
||||
BOOST_FOREACH(const FixedLagSmoother::KeyTimestampMap::value_type& key_timestamp, batchSmoother.timestamps()) {
|
||||
for(const FixedLagSmoother::KeyTimestampMap::value_type& key_timestamp: batchSmoother.timestamps()) {
|
||||
cout << setprecision(5) << " Key: " << key_timestamp.first << endl;
|
||||
}
|
||||
|
||||
|
|
|
@ -133,11 +133,11 @@ int main(int argc, char** argv) {
|
|||
// And to demonstrate the fixed-lag aspect, print the keys contained in each smoother after 3.0 seconds
|
||||
cout << "After 3.0 seconds, " << endl;
|
||||
cout << " Batch Smoother Keys: " << endl;
|
||||
BOOST_FOREACH(const FixedLagSmoother::KeyTimestampMap::value_type& key_timestamp, smootherBatch.timestamps()) {
|
||||
for(const FixedLagSmoother::KeyTimestampMap::value_type& key_timestamp: smootherBatch.timestamps()) {
|
||||
cout << setprecision(5) << " Key: " << key_timestamp.first << " Time: " << key_timestamp.second << endl;
|
||||
}
|
||||
cout << " iSAM2 Smoother Keys: " << endl;
|
||||
BOOST_FOREACH(const FixedLagSmoother::KeyTimestampMap::value_type& key_timestamp, smootherISAM2.timestamps()) {
|
||||
for(const FixedLagSmoother::KeyTimestampMap::value_type& key_timestamp: smootherISAM2.timestamps()) {
|
||||
cout << setprecision(5) << " Key: " << key_timestamp.first << " Time: " << key_timestamp.second << endl;
|
||||
}
|
||||
|
||||
|
|
|
@ -43,7 +43,6 @@
|
|||
#include <gtsam_unstable/slam/SmartRangeFactor.h>
|
||||
|
||||
// Standard headers, added last, so we know headers above work on their own
|
||||
#include <boost/foreach.hpp>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
|
||||
|
@ -152,7 +151,7 @@ int main(int argc, char** argv) {
|
|||
typedef boost::shared_ptr<SmartRangeFactor> SmartPtr;
|
||||
map<size_t, SmartPtr> smartFactors;
|
||||
if (smart) {
|
||||
BOOST_FOREACH(size_t jj,ids) {
|
||||
for(size_t jj: ids) {
|
||||
smartFactors[jj] = SmartPtr(new SmartRangeFactor(sigmaR));
|
||||
newFactors.push_back(smartFactors[jj]);
|
||||
}
|
||||
|
@ -166,7 +165,7 @@ int main(int argc, char** argv) {
|
|||
|
||||
// Loop over odometry
|
||||
gttic_(iSAM);
|
||||
BOOST_FOREACH(const TimedOdometry& timedOdometry, odometry) {
|
||||
for(const TimedOdometry& timedOdometry: odometry) {
|
||||
//--------------------------------- odometry loop -----------------------------------------
|
||||
double t;
|
||||
Pose2 odometry;
|
||||
|
@ -219,25 +218,25 @@ int main(int argc, char** argv) {
|
|||
if (hasLandmarks) {
|
||||
// update landmark estimates
|
||||
landmarkEstimates = Values();
|
||||
BOOST_FOREACH(size_t jj,ids)
|
||||
for(size_t jj: ids)
|
||||
landmarkEstimates.insert(symbol('L', jj), result.at(symbol('L', jj)));
|
||||
}
|
||||
newFactors = NonlinearFactorGraph();
|
||||
initial = Values();
|
||||
if (smart && !hasLandmarks) {
|
||||
cout << "initialize from smart landmarks" << endl;
|
||||
BOOST_FOREACH(size_t jj,ids) {
|
||||
for(size_t jj: ids) {
|
||||
Point2 landmark = smartFactors[jj]->triangulate(result);
|
||||
initial.insert(symbol('L', jj), landmark);
|
||||
landmarkEstimates.insert(symbol('L', jj), landmark);
|
||||
}
|
||||
}
|
||||
countK = 0;
|
||||
BOOST_FOREACH(const Values::ConstFiltered<Point2>::KeyValuePair& it, result.filter<Point2>())
|
||||
for(const Values::ConstFiltered<Point2>::KeyValuePair& it: result.filter<Point2>())
|
||||
os2 << it.key << "\t" << it.value.x() << "\t" << it.value.y() << "\t1"
|
||||
<< endl;
|
||||
if (smart) {
|
||||
BOOST_FOREACH(size_t jj,ids) {
|
||||
for(size_t jj: ids) {
|
||||
Point2 landmark = smartFactors[jj]->triangulate(result);
|
||||
os3 << jj << "\t" << landmark.x() << "\t" << landmark.y() << "\t1"
|
||||
<< endl;
|
||||
|
@ -247,7 +246,7 @@ int main(int argc, char** argv) {
|
|||
i += 1;
|
||||
if (i>end) break;
|
||||
//--------------------------------- odometry loop -----------------------------------------
|
||||
} // BOOST_FOREACH
|
||||
} // end for
|
||||
gttoc_(iSAM);
|
||||
|
||||
// Print timings
|
||||
|
@ -257,7 +256,7 @@ int main(int argc, char** argv) {
|
|||
Values result = isam.calculateEstimate();
|
||||
ofstream os(
|
||||
"/Users/dellaert/borg/gtsam/gtsam_unstable/examples/rangeResult.txt");
|
||||
BOOST_FOREACH(const Values::ConstFiltered<Pose2>::KeyValuePair& it, result.filter<Pose2>())
|
||||
for(const Values::ConstFiltered<Pose2>::KeyValuePair& it: result.filter<Pose2>())
|
||||
os << it.key << "\t" << it.value.x() << "\t" << it.value.y() << "\t"
|
||||
<< it.value.theta() << endl;
|
||||
exit(0);
|
||||
|
|
|
@ -42,7 +42,6 @@
|
|||
#include <gtsam/sam/RangeFactor.h>
|
||||
|
||||
// Standard headers, added last, so we know headers above work on their own
|
||||
#include <boost/foreach.hpp>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
|
||||
|
@ -143,7 +142,7 @@ int main(int argc, char** argv) {
|
|||
|
||||
// Loop over odometry
|
||||
gttic_(iSAM);
|
||||
BOOST_FOREACH(const TimedOdometry& timedOdometry, odometry) {
|
||||
for(const TimedOdometry& timedOdometry: odometry) {
|
||||
//--------------------------------- odometry loop -----------------------------------------
|
||||
double t;
|
||||
Pose2 odometry;
|
||||
|
@ -194,7 +193,7 @@ int main(int argc, char** argv) {
|
|||
}
|
||||
i += 1;
|
||||
//--------------------------------- odometry loop -----------------------------------------
|
||||
} // BOOST_FOREACH
|
||||
} // end for
|
||||
gttoc_(iSAM);
|
||||
|
||||
// Print timings
|
||||
|
@ -204,12 +203,12 @@ int main(int argc, char** argv) {
|
|||
Values result = isam.calculateEstimate();
|
||||
ofstream os2(
|
||||
"/Users/dellaert/borg/gtsam/gtsam_unstable/examples/rangeResultLM.txt");
|
||||
BOOST_FOREACH(const Values::ConstFiltered<Point2>::KeyValuePair& it, result.filter<Point2>())
|
||||
for(const Values::ConstFiltered<Point2>::KeyValuePair& it: result.filter<Point2>())
|
||||
os2 << it.key << "\t" << it.value.x() << "\t" << it.value.y() << "\t1"
|
||||
<< endl;
|
||||
ofstream os(
|
||||
"/Users/dellaert/borg/gtsam/gtsam_unstable/examples/rangeResult.txt");
|
||||
BOOST_FOREACH(const Values::ConstFiltered<Pose2>::KeyValuePair& it, result.filter<Pose2>())
|
||||
for(const Values::ConstFiltered<Pose2>::KeyValuePair& it: result.filter<Pose2>())
|
||||
os << it.key << "\t" << it.value.x() << "\t" << it.value.y() << "\t"
|
||||
<< it.value.theta() << endl;
|
||||
exit(0);
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
*/
|
||||
|
||||
#include <iostream>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/random/linear_congruential.hpp>
|
||||
#include <boost/random/uniform_real.hpp>
|
||||
#include <boost/random/normal_distribution.hpp>
|
||||
|
@ -55,7 +54,7 @@ bool SimPolygon2D::equals(const SimPolygon2D& p, double tol) const {
|
|||
/* ************************************************************************* */
|
||||
void SimPolygon2D::print(const string& s) const {
|
||||
cout << "SimPolygon " << s << ": " << endl;
|
||||
BOOST_FOREACH(const Point2& p, landmarks_)
|
||||
for(const Point2& p: landmarks_)
|
||||
p.print(" ");
|
||||
}
|
||||
|
||||
|
@ -73,7 +72,7 @@ bool SimPolygon2D::contains(const Point2& c) const {
|
|||
vector<SimWall2D> edges = walls();
|
||||
bool initialized = false;
|
||||
bool lastSide = false;
|
||||
BOOST_FOREACH(const SimWall2D& ab, edges) {
|
||||
for(const SimWall2D& ab: edges) {
|
||||
// compute cross product of ab and ac
|
||||
Point2 dab = ab.b() - ab.a();
|
||||
Point2 dac = c - ab.a();
|
||||
|
@ -97,10 +96,10 @@ bool SimPolygon2D::contains(const Point2& c) const {
|
|||
|
||||
/* ************************************************************************* */
|
||||
bool SimPolygon2D::overlaps(const SimPolygon2D& p) const {
|
||||
BOOST_FOREACH(const Point2& a, landmarks_)
|
||||
for(const Point2& a: landmarks_)
|
||||
if (p.contains(a))
|
||||
return true;
|
||||
BOOST_FOREACH(const Point2& a, p.landmarks_)
|
||||
for(const Point2& a: p.landmarks_)
|
||||
if (contains(a))
|
||||
return true;
|
||||
return false;
|
||||
|
@ -108,7 +107,7 @@ bool SimPolygon2D::overlaps(const SimPolygon2D& p) const {
|
|||
|
||||
/* ***************************************************************** */
|
||||
bool SimPolygon2D::anyContains(const Point2& p, const vector<SimPolygon2D>& obstacles) {
|
||||
BOOST_FOREACH(const SimPolygon2D& poly, obstacles)
|
||||
for(const SimPolygon2D& poly: obstacles)
|
||||
if (poly.contains(p))
|
||||
return true;
|
||||
return false;
|
||||
|
@ -116,7 +115,7 @@ bool SimPolygon2D::anyContains(const Point2& p, const vector<SimPolygon2D>& obst
|
|||
|
||||
/* ************************************************************************* */
|
||||
bool SimPolygon2D::anyOverlaps(const SimPolygon2D& p, const vector<SimPolygon2D>& obstacles) {
|
||||
BOOST_FOREACH(const SimPolygon2D& poly, obstacles)
|
||||
for(const SimPolygon2D& poly: obstacles)
|
||||
if (poly.overlaps(p))
|
||||
return true;
|
||||
return false;
|
||||
|
@ -134,7 +133,7 @@ SimPolygon2D SimPolygon2D::randomTriangle(
|
|||
lms.push_back(Point2(-d2,-d2));
|
||||
lms.push_back(Point2( d2,-d2));
|
||||
|
||||
BOOST_FOREACH(const SimPolygon2D& poly, existing_polys)
|
||||
for(const SimPolygon2D& poly: existing_polys)
|
||||
lms.insert(lms.begin(), poly.vertices().begin(), poly.vertices().end());
|
||||
|
||||
for (size_t i=0; i<max_it; ++i) {
|
||||
|
@ -188,7 +187,7 @@ SimPolygon2D SimPolygon2D::randomRectangle(
|
|||
lms.push_back(Point2(-d2, d2));
|
||||
lms.push_back(Point2(-d2,-d2));
|
||||
lms.push_back(Point2( d2,-d2));
|
||||
BOOST_FOREACH(const SimPolygon2D& poly, existing_polys)
|
||||
for(const SimPolygon2D& poly: existing_polys)
|
||||
lms.insert(lms.begin(), poly.vertices().begin(), poly.vertices().end());
|
||||
|
||||
const Point2 lower_corner(-side_len,-side_len);
|
||||
|
@ -320,7 +319,7 @@ bool SimPolygon2D::insideBox(double s, const Point2& p) {
|
|||
/* ***************************************************************** */
|
||||
bool SimPolygon2D::nearExisting(const std::vector<Point2>& S,
|
||||
const Point2& p, double threshold) {
|
||||
BOOST_FOREACH(const Point2& Sp, S)
|
||||
for(const Point2& Sp: S)
|
||||
if (Sp.distance(p) < threshold)
|
||||
return true;
|
||||
return false;
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
* @author Alex Cunningham
|
||||
*/
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
#include <gtsam/geometry/Pose2.h>
|
||||
#include <gtsam_unstable/geometry/SimWall2D.h>
|
||||
|
@ -135,7 +134,7 @@ std::pair<Pose2, bool> moveWithBounce(const Pose2& cur_pose, double step_size,
|
|||
SimWall2D traj(test_pose.t(), cur_pose.t());
|
||||
bool collision = false; Point2 intersection(1e+10, 1e+10);
|
||||
SimWall2D closest_wall;
|
||||
BOOST_FOREACH(const SimWall2D& wall, walls) {
|
||||
for(const SimWall2D& wall: walls) {
|
||||
Point2 cur_intersection;
|
||||
if (wall.intersects(traj,cur_intersection)) {
|
||||
collision = true;
|
||||
|
|
|
@ -30,7 +30,7 @@ QPSolver::QPSolver(const QP& qp) : qp_(qp) {
|
|||
VectorValues QPSolver::solveWithCurrentWorkingSet(
|
||||
const LinearInequalityFactorGraph& workingSet) const {
|
||||
GaussianFactorGraph workingGraph = baseGraph_;
|
||||
BOOST_FOREACH(const LinearInequality::shared_ptr& factor, workingSet) {
|
||||
for(const LinearInequality::shared_ptr& factor: workingSet) {
|
||||
if (factor->active())
|
||||
workingGraph.push_back(factor);
|
||||
}
|
||||
|
@ -53,7 +53,7 @@ JacobianFactor::shared_ptr QPSolver::createDualFactor(Key key,
|
|||
if (Aterms.size() > 0) {
|
||||
Vector b = Vector::Zero(delta.at(key).size());
|
||||
if (costVariableIndex_.find(key) != costVariableIndex_.end()) {
|
||||
BOOST_FOREACH(size_t factorIx, costVariableIndex_[key]) {
|
||||
for(size_t factorIx: costVariableIndex_[key]) {
|
||||
GaussianFactor::shared_ptr factor = qp_.cost.at(factorIx);
|
||||
b += factor->gradient(key, delta);
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ JacobianFactor::shared_ptr QPSolver::createDualFactor(Key key,
|
|||
GaussianFactorGraph::shared_ptr QPSolver::buildDualGraph(
|
||||
const LinearInequalityFactorGraph& workingSet, const VectorValues& delta) const {
|
||||
GaussianFactorGraph::shared_ptr dualGraph(new GaussianFactorGraph());
|
||||
BOOST_FOREACH(Key key, constrainedKeys_) {
|
||||
for(Key key: constrainedKeys_) {
|
||||
// Each constrained key becomes a factor in the dual graph
|
||||
JacobianFactor::shared_ptr dualFactor = createDualFactor(key, workingSet, delta);
|
||||
if (!dualFactor->empty())
|
||||
|
@ -219,7 +219,7 @@ LinearInequalityFactorGraph QPSolver::identifyActiveConstraints(
|
|||
const LinearInequalityFactorGraph& inequalities,
|
||||
const VectorValues& initialValues) const {
|
||||
LinearInequalityFactorGraph workingSet;
|
||||
BOOST_FOREACH(const LinearInequality::shared_ptr& factor, inequalities){
|
||||
for(const LinearInequality::shared_ptr& factor: inequalities){
|
||||
LinearInequality::shared_ptr workingFactor(new LinearInequality(*factor));
|
||||
double error = workingFactor->error(initialValues);
|
||||
if (fabs(error)>1e-7){
|
||||
|
|
|
@ -68,7 +68,7 @@ public:
|
|||
const VariableIndex& variableIndex) const {
|
||||
std::vector<std::pair<Key, Matrix> > Aterms;
|
||||
if (variableIndex.find(key) != variableIndex.end()) {
|
||||
BOOST_FOREACH(size_t factorIx, variableIndex[key]){
|
||||
for(size_t factorIx: variableIndex[key]){
|
||||
typename FACTOR::shared_ptr factor = graph.at(factorIx);
|
||||
if (!factor->active()) continue;
|
||||
Matrix Ai = factor->getA(factor->find(key)).transpose();
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
#include <gtsam_unstable/slam/DummyFactor.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
using namespace boost::assign;
|
||||
|
||||
|
@ -29,7 +28,7 @@ DummyFactor::DummyFactor(const Key& key1, size_t dim1, const Key& key2, size_t d
|
|||
/* ************************************************************************* */
|
||||
void DummyFactor::print(const std::string& s, const KeyFormatter& keyFormatter) const {
|
||||
std::cout << s << " DummyFactor dim = " << rowDim_ << ", keys = { ";
|
||||
BOOST_FOREACH(Key key, this->keys()) { std::cout << keyFormatter(key) << " "; }
|
||||
for(Key key: this->keys()) { std::cout << keyFormatter(key) << " "; }
|
||||
std::cout << "}" << std::endl;
|
||||
}
|
||||
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/geometry/Pose2.h>
|
||||
#include <gtsam/base/timing.h>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <map>
|
||||
|
||||
namespace gtsam {
|
||||
|
@ -100,7 +99,7 @@ public:
|
|||
boost::optional<Circle2> best_circle;
|
||||
|
||||
// loop over all circles
|
||||
BOOST_FOREACH(const Circle2& it, circles) {
|
||||
for(const Circle2& it: circles) {
|
||||
// distance between circle centers.
|
||||
double d = circle1.center.dist(it.center);
|
||||
if (d < 1e-9)
|
||||
|
@ -123,7 +122,7 @@ public:
|
|||
// pick winner based on other measurements
|
||||
double error1 = 0, error2 = 0;
|
||||
Point2 p1 = intersections.front(), p2 = intersections.back();
|
||||
BOOST_FOREACH(const Circle2& it, circles) {
|
||||
for(const Circle2& it: circles) {
|
||||
error1 += it.center.dist(p1);
|
||||
error2 += it.center.dist(p2);
|
||||
}
|
||||
|
|
|
@ -254,7 +254,7 @@ public:
|
|||
}
|
||||
|
||||
Point3 pw_sum(0,0,0);
|
||||
BOOST_FOREACH(const Point3& pw, reprojections) {
|
||||
for(const Point3& pw: reprojections) {
|
||||
pw_sum = pw_sum + pw;
|
||||
}
|
||||
// average reprojected landmark
|
||||
|
@ -339,9 +339,9 @@ public:
|
|||
|
||||
if (params_.degeneracyMode == ZERO_ON_DEGENERACY && !result_) {
|
||||
// failed: return"empty" Hessian
|
||||
BOOST_FOREACH(Matrix& m, Gs)
|
||||
for(Matrix& m: Gs)
|
||||
m = Matrix::Zero(Base::Dim, Base::Dim);
|
||||
BOOST_FOREACH(Vector& v, gs)
|
||||
for(Vector& v: gs)
|
||||
v = Vector::Zero(Base::Dim);
|
||||
return boost::make_shared<RegularHessianFactor<Base::Dim> >(this->keys_,
|
||||
Gs, gs, 0.0);
|
||||
|
|
|
@ -122,7 +122,7 @@ public:
|
|||
void print(const std::string& s = "", const KeyFormatter& keyFormatter =
|
||||
DefaultKeyFormatter) const {
|
||||
std::cout << s << "SmartStereoProjectionPoseFactor, z = \n ";
|
||||
BOOST_FOREACH(const boost::shared_ptr<Cal3_S2Stereo>& K, K_all_)
|
||||
for(const boost::shared_ptr<Cal3_S2Stereo>& K: K_all_)
|
||||
K->print("calibration = ");
|
||||
Base::print("", keyFormatter);
|
||||
}
|
||||
|
@ -160,7 +160,7 @@ public:
|
|||
Base::Cameras cameras(const Values& values) const {
|
||||
Base::Cameras cameras;
|
||||
size_t i=0;
|
||||
BOOST_FOREACH(const Key& k, this->keys_) {
|
||||
for(const Key& k: this->keys_) {
|
||||
const Pose3& pose = values.at<Pose3>(k);
|
||||
StereoCamera camera(pose, K_all_[i++]);
|
||||
cameras.push_back(camera);
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
#include <boost/random.hpp>
|
||||
#include <boost/timer.hpp>
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
|
||||
#include <iostream>
|
||||
|
@ -46,7 +45,7 @@ int main(int argc, char* argv[]) {
|
|||
// loop over number of images
|
||||
vector<size_t> ms;
|
||||
ms += 10, 20, 30, 40, 50, 100, 200, 300, 400, 500, 1000;
|
||||
BOOST_FOREACH(size_t m,ms) {
|
||||
for(size_t m: ms) {
|
||||
// We use volatile here to make these appear to the optimizing compiler as
|
||||
// if their values are only known at run-time.
|
||||
volatile size_t n = 500; // number of points per image
|
||||
|
@ -74,7 +73,7 @@ int main(int argc, char* argv[]) {
|
|||
// DSFBase version
|
||||
timer tim;
|
||||
DSFBase dsf(N); // Allow for N keys
|
||||
BOOST_FOREACH(const Match& m, matches)
|
||||
for(const Match& m: matches)
|
||||
dsf.merge(m.first, m.second);
|
||||
os << tim.elapsed() << ",";
|
||||
cout << format("DSFBase: %1% s") % tim.elapsed() << endl;
|
||||
|
@ -84,7 +83,7 @@ int main(int argc, char* argv[]) {
|
|||
// DSFMap version
|
||||
timer tim;
|
||||
DSFMap<size_t> dsf;
|
||||
BOOST_FOREACH(const Match& m, matches)
|
||||
for(const Match& m: matches)
|
||||
dsf.merge(m.first, m.second);
|
||||
os << tim.elapsed() << endl;
|
||||
cout << format("DSFMap: %1% s") % tim.elapsed() << endl;
|
||||
|
@ -96,7 +95,7 @@ int main(int argc, char* argv[]) {
|
|||
DSF<size_t> dsf;
|
||||
for (size_t j = 0; j < N; j++)
|
||||
dsf = dsf.makeSet(j);
|
||||
BOOST_FOREACH(const Match& m, matches)
|
||||
for(const Match& m: matches)
|
||||
dsf = dsf.makeUnion(m.first, m.second);
|
||||
os << tim.elapsed() << endl;
|
||||
cout << format("DSF functional: %1% s") % tim.elapsed() << endl;
|
||||
|
@ -108,7 +107,7 @@ int main(int argc, char* argv[]) {
|
|||
DSF<size_t> dsf;
|
||||
for (size_t j = 0; j < N; j++)
|
||||
dsf.makeSetInPlace(j);
|
||||
BOOST_FOREACH(const Match& m, matches)
|
||||
for(const Match& m: matches)
|
||||
dsf.makeUnionInPlace(m.first, m.second);
|
||||
os << tim.elapsed() << ",";
|
||||
cout << format("DSF in-place: %1% s") % tim.elapsed() << endl;
|
||||
|
|
Loading…
Reference in New Issue