Replaced BOOSE_FOREACH with for in gtsam_unstable folder.
parent
3b7c57aedf
commit
f7ec58cde0
|
@ -395,7 +395,7 @@ namespace gtsam {
|
|||
|
||||
}; // const_iterator
|
||||
|
||||
// to make BTree work with BOOST_FOREACH
|
||||
// to make BTree work with range-based for
|
||||
// We do *not* want a non-const iterator
|
||||
typedef const_iterator iterator;
|
||||
|
||||
|
|
|
@ -145,7 +145,7 @@ TEST( BTree, iterating )
|
|||
CHECK(*(++it) == p5)
|
||||
CHECK((++it)==tree.end())
|
||||
|
||||
// acid iterator test: BOOST_FOREACH
|
||||
// acid iterator test
|
||||
int sum = 0;
|
||||
for(const KeyInt& p: tree)
|
||||
sum += p.second;
|
||||
|
|
|
@ -295,7 +295,7 @@ TEST(DSF, mergePairwiseMatches) {
|
|||
|
||||
// Merge matches
|
||||
DSF<Measurement> dsf(measurements);
|
||||
for(const Match& m, matches)
|
||||
for(const Match& m: matches)
|
||||
dsf.makeUnionInPlace(m.first,m.second);
|
||||
|
||||
// Check that sets are merged correctly
|
||||
|
|
|
@ -64,7 +64,7 @@ FixedLagSmoother::Result BatchFixedLagSmoother::update(
|
|||
// Add the new variables to theta
|
||||
theta_.insert(newTheta);
|
||||
// Add new variables to the end of the ordering
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, newTheta) {
|
||||
for(const Values::ConstKeyValuePair& key_value: newTheta) {
|
||||
ordering_.push_back(key_value.key);
|
||||
}
|
||||
// Augment Delta
|
||||
|
@ -87,7 +87,7 @@ FixedLagSmoother::Result BatchFixedLagSmoother::update(
|
|||
current_timestamp - smootherLag_);
|
||||
if (debug) {
|
||||
std::cout << "Marginalizable Keys: ";
|
||||
BOOST_FOREACH(Key key, marginalizableKeys) {
|
||||
for(Key key: marginalizableKeys) {
|
||||
std::cout << DefaultKeyFormatter(key) << " ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
|
@ -123,7 +123,7 @@ FixedLagSmoother::Result BatchFixedLagSmoother::update(
|
|||
/* ************************************************************************* */
|
||||
void BatchFixedLagSmoother::insertFactors(
|
||||
const NonlinearFactorGraph& newFactors) {
|
||||
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, newFactors) {
|
||||
for(const NonlinearFactor::shared_ptr& factor: newFactors) {
|
||||
Key index;
|
||||
// Insert the factor into an existing hole in the factor graph, if possible
|
||||
if (availableSlots_.size() > 0) {
|
||||
|
@ -135,7 +135,7 @@ void BatchFixedLagSmoother::insertFactors(
|
|||
factors_.push_back(factor);
|
||||
}
|
||||
// Update the FactorIndex
|
||||
BOOST_FOREACH(Key key, *factor) {
|
||||
for(Key key: *factor) {
|
||||
factorIndex_[key].insert(index);
|
||||
}
|
||||
}
|
||||
|
@ -144,10 +144,10 @@ void BatchFixedLagSmoother::insertFactors(
|
|||
/* ************************************************************************* */
|
||||
void BatchFixedLagSmoother::removeFactors(
|
||||
const std::set<size_t>& deleteFactors) {
|
||||
BOOST_FOREACH(size_t slot, deleteFactors) {
|
||||
for(size_t slot: deleteFactors) {
|
||||
if (factors_.at(slot)) {
|
||||
// Remove references to this factor from the FactorIndex
|
||||
BOOST_FOREACH(Key key, *(factors_.at(slot))) {
|
||||
for(Key key: *(factors_.at(slot))) {
|
||||
factorIndex_[key].erase(slot);
|
||||
}
|
||||
// Remove the factor from the factor graph
|
||||
|
@ -165,7 +165,7 @@ void BatchFixedLagSmoother::removeFactors(
|
|||
/* ************************************************************************* */
|
||||
void BatchFixedLagSmoother::eraseKeys(const std::set<Key>& keys) {
|
||||
|
||||
BOOST_FOREACH(Key key, keys) {
|
||||
for(Key key: keys) {
|
||||
// Erase the key from the values
|
||||
theta_.erase(key);
|
||||
|
||||
|
@ -181,7 +181,7 @@ void BatchFixedLagSmoother::eraseKeys(const std::set<Key>& keys) {
|
|||
eraseKeyTimestampMap(keys);
|
||||
|
||||
// Remove marginalized keys from the ordering and delta
|
||||
BOOST_FOREACH(Key key, keys) {
|
||||
for(Key key: keys) {
|
||||
ordering_.erase(std::find(ordering_.begin(), ordering_.end(), key));
|
||||
delta_.erase(key);
|
||||
}
|
||||
|
@ -198,7 +198,7 @@ void BatchFixedLagSmoother::reorder(const std::set<Key>& marginalizeKeys) {
|
|||
|
||||
if (debug) {
|
||||
std::cout << "Marginalizable Keys: ";
|
||||
BOOST_FOREACH(Key key, marginalizeKeys) {
|
||||
for(Key key: marginalizeKeys) {
|
||||
std::cout << DefaultKeyFormatter(key) << " ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
|
@ -288,7 +288,7 @@ FixedLagSmoother::Result BatchFixedLagSmoother::optimize() {
|
|||
{
|
||||
// for each of the variables, add a prior at the current solution
|
||||
double sigma = 1.0 / std::sqrt(lambda);
|
||||
BOOST_FOREACH(const VectorValues::KeyValuePair& key_value, delta_) {
|
||||
for(const VectorValues::KeyValuePair& key_value: delta_) {
|
||||
size_t dim = key_value.second.size();
|
||||
Matrix A = Matrix::Identity(dim, dim);
|
||||
Vector b = key_value.second;
|
||||
|
@ -332,7 +332,7 @@ FixedLagSmoother::Result BatchFixedLagSmoother::optimize() {
|
|||
// Put the linearization points and deltas back for specific variables
|
||||
if (enforceConsistency_ && (linearKeys_.size() > 0)) {
|
||||
theta_.update(linearKeys_);
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, linearKeys_) {
|
||||
for(const Values::ConstKeyValuePair& key_value: linearKeys_) {
|
||||
delta_.at(key_value.key) = newDelta.at(key_value.key);
|
||||
}
|
||||
}
|
||||
|
@ -397,7 +397,7 @@ void BatchFixedLagSmoother::marginalize(const std::set<Key>& marginalizeKeys) {
|
|||
|
||||
if (debug) {
|
||||
std::cout << "BatchFixedLagSmoother::marginalize Marginalize Keys: ";
|
||||
BOOST_FOREACH(Key key, marginalizeKeys) {
|
||||
for(Key key: marginalizeKeys) {
|
||||
std::cout << DefaultKeyFormatter(key) << " ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
|
@ -406,14 +406,14 @@ void BatchFixedLagSmoother::marginalize(const std::set<Key>& marginalizeKeys) {
|
|||
// Identify all of the factors involving any marginalized variable. These must be removed.
|
||||
std::set<size_t> removedFactorSlots;
|
||||
VariableIndex variableIndex(factors_);
|
||||
BOOST_FOREACH(Key key, marginalizeKeys) {
|
||||
for(Key key: marginalizeKeys) {
|
||||
const FastVector<size_t>& slots = variableIndex[key];
|
||||
removedFactorSlots.insert(slots.begin(), slots.end());
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
std::cout << "BatchFixedLagSmoother::marginalize Removed Factor Slots: ";
|
||||
BOOST_FOREACH(size_t slot, removedFactorSlots) {
|
||||
for(size_t slot: removedFactorSlots) {
|
||||
std::cout << slot << " ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
|
@ -421,7 +421,7 @@ void BatchFixedLagSmoother::marginalize(const std::set<Key>& marginalizeKeys) {
|
|||
|
||||
// Add the removed factors to a factor graph
|
||||
NonlinearFactorGraph removedFactors;
|
||||
BOOST_FOREACH(size_t slot, removedFactorSlots) {
|
||||
for(size_t slot: removedFactorSlots) {
|
||||
if (factors_.at(slot)) {
|
||||
removedFactors.push_back(factors_.at(slot));
|
||||
}
|
||||
|
@ -456,7 +456,7 @@ void BatchFixedLagSmoother::marginalize(const std::set<Key>& marginalizeKeys) {
|
|||
void BatchFixedLagSmoother::PrintKeySet(const std::set<Key>& keys,
|
||||
const std::string& label) {
|
||||
std::cout << label;
|
||||
BOOST_FOREACH(gtsam::Key key, keys) {
|
||||
for(gtsam::Key key: keys) {
|
||||
std::cout << " " << gtsam::DefaultKeyFormatter(key);
|
||||
}
|
||||
std::cout << std::endl;
|
||||
|
@ -466,7 +466,7 @@ void BatchFixedLagSmoother::PrintKeySet(const std::set<Key>& keys,
|
|||
void BatchFixedLagSmoother::PrintKeySet(const gtsam::KeySet& keys,
|
||||
const std::string& label) {
|
||||
std::cout << label;
|
||||
BOOST_FOREACH(gtsam::Key key, keys) {
|
||||
for(gtsam::Key key: keys) {
|
||||
std::cout << " " << gtsam::DefaultKeyFormatter(key);
|
||||
}
|
||||
std::cout << std::endl;
|
||||
|
@ -477,7 +477,7 @@ void BatchFixedLagSmoother::PrintSymbolicFactor(
|
|||
const NonlinearFactor::shared_ptr& factor) {
|
||||
std::cout << "f(";
|
||||
if (factor) {
|
||||
BOOST_FOREACH(Key key, factor->keys()) {
|
||||
for(Key key: factor->keys()) {
|
||||
std::cout << " " << gtsam::DefaultKeyFormatter(key);
|
||||
}
|
||||
} else {
|
||||
|
@ -490,7 +490,7 @@ void BatchFixedLagSmoother::PrintSymbolicFactor(
|
|||
void BatchFixedLagSmoother::PrintSymbolicFactor(
|
||||
const GaussianFactor::shared_ptr& factor) {
|
||||
std::cout << "f(";
|
||||
BOOST_FOREACH(Key key, factor->keys()) {
|
||||
for(Key key: factor->keys()) {
|
||||
std::cout << " " << gtsam::DefaultKeyFormatter(key);
|
||||
}
|
||||
std::cout << " )" << std::endl;
|
||||
|
@ -500,7 +500,7 @@ void BatchFixedLagSmoother::PrintSymbolicFactor(
|
|||
void BatchFixedLagSmoother::PrintSymbolicGraph(
|
||||
const NonlinearFactorGraph& graph, const std::string& label) {
|
||||
std::cout << label << std::endl;
|
||||
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, graph) {
|
||||
for(const NonlinearFactor::shared_ptr& factor: graph) {
|
||||
PrintSymbolicFactor(factor);
|
||||
}
|
||||
}
|
||||
|
@ -509,7 +509,7 @@ void BatchFixedLagSmoother::PrintSymbolicGraph(
|
|||
void BatchFixedLagSmoother::PrintSymbolicGraph(const GaussianFactorGraph& graph,
|
||||
const std::string& label) {
|
||||
std::cout << label << std::endl;
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, graph) {
|
||||
for(const GaussianFactor::shared_ptr& factor: graph) {
|
||||
PrintSymbolicFactor(factor);
|
||||
}
|
||||
}
|
||||
|
@ -568,7 +568,7 @@ NonlinearFactorGraph BatchFixedLagSmoother::calculateMarginalFactors(
|
|||
// Wrap in nonlinear container factors
|
||||
NonlinearFactorGraph marginalFactors;
|
||||
marginalFactors.reserve(marginalLinearFactors.size());
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& gaussianFactor, marginalLinearFactors) {
|
||||
for(const GaussianFactor::shared_ptr& gaussianFactor: marginalLinearFactors) {
|
||||
marginalFactors += boost::make_shared<LinearContainerFactor>(
|
||||
gaussianFactor, theta);
|
||||
if (debug) {
|
||||
|
|
|
@ -33,7 +33,7 @@ void ConcurrentBatchFilter::PrintNonlinearFactor(const NonlinearFactor::shared_p
|
|||
} else {
|
||||
std::cout << "f( ";
|
||||
}
|
||||
BOOST_FOREACH(Key key, *factor) {
|
||||
for(Key key: *factor) {
|
||||
std::cout << keyFormatter(key) << " ";
|
||||
}
|
||||
std::cout << ")" << std::endl;
|
||||
|
@ -46,7 +46,7 @@ void ConcurrentBatchFilter::PrintNonlinearFactor(const NonlinearFactor::shared_p
|
|||
void ConcurrentBatchFilter::PrintNonlinearFactorGraph(const NonlinearFactorGraph& factors,
|
||||
const std::string& indent, const std::string& title, const KeyFormatter& keyFormatter) {
|
||||
std::cout << indent << title << std::endl;
|
||||
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, factors) {
|
||||
for(const NonlinearFactor::shared_ptr& factor: factors) {
|
||||
PrintNonlinearFactor(factor, indent + " ", keyFormatter);
|
||||
}
|
||||
}
|
||||
|
@ -55,7 +55,7 @@ void ConcurrentBatchFilter::PrintNonlinearFactorGraph(const NonlinearFactorGraph
|
|||
void ConcurrentBatchFilter::PrintNonlinearFactorGraph(const NonlinearFactorGraph& factors, const std::vector<size_t>& slots,
|
||||
const std::string& indent, const std::string& title, const KeyFormatter& keyFormatter) {
|
||||
std::cout << indent << title << std::endl;
|
||||
BOOST_FOREACH(size_t slot, slots) {
|
||||
for(size_t slot: slots) {
|
||||
PrintNonlinearFactor(factors.at(slot), indent + " ", keyFormatter);
|
||||
}
|
||||
}
|
||||
|
@ -74,7 +74,7 @@ void ConcurrentBatchFilter::PrintLinearFactor(const GaussianFactor::shared_ptr&
|
|||
} else {
|
||||
std::cout << "g( ";
|
||||
}
|
||||
BOOST_FOREACH(Key key, *factor) {
|
||||
for(Key key: *factor) {
|
||||
std::cout << keyFormatter(key) << " ";
|
||||
}
|
||||
std::cout << ")" << std::endl;
|
||||
|
@ -87,7 +87,7 @@ void ConcurrentBatchFilter::PrintLinearFactor(const GaussianFactor::shared_ptr&
|
|||
void ConcurrentBatchFilter::PrintLinearFactorGraph(const GaussianFactorGraph& factors,
|
||||
const std::string& indent, const std::string& title, const KeyFormatter& keyFormatter) {
|
||||
std::cout << indent << title << std::endl;
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) {
|
||||
for(const GaussianFactor::shared_ptr& factor: factors) {
|
||||
PrintLinearFactor(factor, indent + " ", keyFormatter);
|
||||
}
|
||||
}
|
||||
|
@ -139,7 +139,7 @@ ConcurrentBatchFilter::Result ConcurrentBatchFilter::update(const NonlinearFacto
|
|||
// Add the new variables to theta
|
||||
theta_.insert(newTheta);
|
||||
// Add new variables to the end of the ordering
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, newTheta) {
|
||||
for(const Values::ConstKeyValuePair& key_value: newTheta) {
|
||||
ordering_.push_back(key_value.key);
|
||||
}
|
||||
// Augment Delta
|
||||
|
@ -222,7 +222,7 @@ void ConcurrentBatchFilter::synchronize(const NonlinearFactorGraph& smootherSumm
|
|||
|
||||
// Find the set of new separator keys
|
||||
KeySet newSeparatorKeys;
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
|
||||
for(const Values::ConstKeyValuePair& key_value: separatorValues_) {
|
||||
newSeparatorKeys.insert(key_value.key);
|
||||
}
|
||||
|
||||
|
@ -236,7 +236,7 @@ void ConcurrentBatchFilter::synchronize(const NonlinearFactorGraph& smootherSumm
|
|||
graph.push_back(smootherShortcut_);
|
||||
Values values;
|
||||
values.insert(smootherSummarizationValues);
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
|
||||
for(const Values::ConstKeyValuePair& key_value: separatorValues_) {
|
||||
if(!values.exists(key_value.key)) {
|
||||
values.insert(key_value.key, key_value.value);
|
||||
}
|
||||
|
@ -321,7 +321,7 @@ std::vector<size_t> ConcurrentBatchFilter::insertFactors(const NonlinearFactorGr
|
|||
slots.reserve(factors.size());
|
||||
|
||||
// Insert the factor into an existing hole in the factor graph, if possible
|
||||
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, factors) {
|
||||
for(const NonlinearFactor::shared_ptr& factor: factors) {
|
||||
size_t slot;
|
||||
if(availableSlots_.size() > 0) {
|
||||
slot = availableSlots_.front();
|
||||
|
@ -345,7 +345,7 @@ void ConcurrentBatchFilter::removeFactors(const std::vector<size_t>& slots) {
|
|||
gttic(remove_factors);
|
||||
|
||||
// For each factor slot to delete...
|
||||
BOOST_FOREACH(size_t slot, slots) {
|
||||
for(size_t slot: slots) {
|
||||
|
||||
// Remove the factor from the graph
|
||||
factors_.remove(slot);
|
||||
|
@ -431,7 +431,7 @@ void ConcurrentBatchFilter::optimize(const NonlinearFactorGraph& factors, Values
|
|||
double sigma = 1.0 / std::sqrt(lambda);
|
||||
|
||||
// for each of the variables, add a prior at the current solution
|
||||
BOOST_FOREACH(const VectorValues::KeyValuePair& key_value, delta) {
|
||||
for(const VectorValues::KeyValuePair& key_value: delta) {
|
||||
size_t dim = key_value.second.size();
|
||||
Matrix A = Matrix::Identity(dim,dim);
|
||||
Vector b = key_value.second;
|
||||
|
@ -471,7 +471,7 @@ void ConcurrentBatchFilter::optimize(const NonlinearFactorGraph& factors, Values
|
|||
// Put the linearization points and deltas back for specific variables
|
||||
if(linearValues.size() > 0) {
|
||||
theta.update(linearValues);
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, linearValues) {
|
||||
for(const Values::ConstKeyValuePair& key_value: linearValues) {
|
||||
delta.at(key_value.key) = newDelta.at(key_value.key);
|
||||
}
|
||||
}
|
||||
|
@ -535,7 +535,7 @@ void ConcurrentBatchFilter::moveSeparator(const FastList<Key>& keysToMove) {
|
|||
// Identify all of the new factors to be sent to the smoother (any factor involving keysToMove)
|
||||
std::vector<size_t> removedFactorSlots;
|
||||
VariableIndex variableIndex(factors_);
|
||||
BOOST_FOREACH(Key key, keysToMove) {
|
||||
for(Key key: keysToMove) {
|
||||
const FastVector<size_t>& slots = variableIndex[key];
|
||||
removedFactorSlots.insert(removedFactorSlots.end(), slots.begin(), slots.end());
|
||||
}
|
||||
|
@ -544,14 +544,14 @@ void ConcurrentBatchFilter::moveSeparator(const FastList<Key>& keysToMove) {
|
|||
std::sort(removedFactorSlots.begin(), removedFactorSlots.end());
|
||||
removedFactorSlots.erase(std::unique(removedFactorSlots.begin(), removedFactorSlots.end()), removedFactorSlots.end());
|
||||
// Remove any linear/marginal factor that made it into the set
|
||||
BOOST_FOREACH(size_t index, separatorSummarizationSlots_) {
|
||||
for(size_t index: separatorSummarizationSlots_) {
|
||||
removedFactorSlots.erase(std::remove(removedFactorSlots.begin(), removedFactorSlots.end(), index), removedFactorSlots.end());
|
||||
}
|
||||
|
||||
// TODO: Make this compact
|
||||
if(debug) {
|
||||
std::cout << "ConcurrentBatchFilter::moveSeparator Removed Factor Slots: ";
|
||||
BOOST_FOREACH(size_t slot, removedFactorSlots) {
|
||||
for(size_t slot: removedFactorSlots) {
|
||||
std::cout << slot << " ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
|
@ -559,7 +559,7 @@ void ConcurrentBatchFilter::moveSeparator(const FastList<Key>& keysToMove) {
|
|||
|
||||
// Add these factors to a factor graph
|
||||
NonlinearFactorGraph removedFactors;
|
||||
BOOST_FOREACH(size_t slot, removedFactorSlots) {
|
||||
for(size_t slot: removedFactorSlots) {
|
||||
if(factors_.at(slot)) {
|
||||
removedFactors.push_back(factors_.at(slot));
|
||||
}
|
||||
|
@ -574,10 +574,10 @@ void ConcurrentBatchFilter::moveSeparator(const FastList<Key>& keysToMove) {
|
|||
|
||||
// Calculate the set of new separator keys: AffectedKeys + PreviousSeparatorKeys - KeysToMove
|
||||
KeySet newSeparatorKeys = removedFactors.keys();
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
|
||||
for(const Values::ConstKeyValuePair& key_value: separatorValues_) {
|
||||
newSeparatorKeys.insert(key_value.key);
|
||||
}
|
||||
BOOST_FOREACH(Key key, keysToMove) {
|
||||
for(Key key: keysToMove) {
|
||||
newSeparatorKeys.erase(key);
|
||||
}
|
||||
|
||||
|
@ -585,7 +585,7 @@ void ConcurrentBatchFilter::moveSeparator(const FastList<Key>& keysToMove) {
|
|||
|
||||
// Calculate the set of shortcut keys: NewSeparatorKeys + OldSeparatorKeys
|
||||
KeySet shortcutKeys = newSeparatorKeys;
|
||||
BOOST_FOREACH(Key key, smootherSummarization_.keys()) {
|
||||
for(Key key: smootherSummarization_.keys()) {
|
||||
shortcutKeys.insert(key);
|
||||
}
|
||||
|
||||
|
@ -632,7 +632,7 @@ void ConcurrentBatchFilter::moveSeparator(const FastList<Key>& keysToMove) {
|
|||
|
||||
// Update the separatorValues object (should only contain the new separator keys)
|
||||
separatorValues_.clear();
|
||||
BOOST_FOREACH(Key key, separatorSummarization.keys()) {
|
||||
for(Key key: separatorSummarization.keys()) {
|
||||
separatorValues_.insert(key, theta_.at(key));
|
||||
}
|
||||
|
||||
|
@ -641,12 +641,12 @@ void ConcurrentBatchFilter::moveSeparator(const FastList<Key>& keysToMove) {
|
|||
removeFactors(removedFactorSlots);
|
||||
|
||||
// Add the linearization point of the moved variables to the smoother cache
|
||||
BOOST_FOREACH(Key key, keysToMove) {
|
||||
for(Key key: keysToMove) {
|
||||
smootherValues_.insert(key, theta_.at(key));
|
||||
}
|
||||
|
||||
// Remove marginalized keys from values (and separator)
|
||||
BOOST_FOREACH(Key key, keysToMove) {
|
||||
for(Key key: keysToMove) {
|
||||
theta_.erase(key);
|
||||
ordering_.erase(std::find(ordering_.begin(), ordering_.end(), key));
|
||||
delta_.erase(key);
|
||||
|
|
|
@ -244,7 +244,7 @@ private:
|
|||
template<class Container>
|
||||
void ConcurrentBatchFilter::PrintKeys(const Container& keys, const std::string& indent, const std::string& title, const KeyFormatter& keyFormatter) {
|
||||
std::cout << indent << title;
|
||||
BOOST_FOREACH(Key key, keys) {
|
||||
for(Key key: keys) {
|
||||
std::cout << " " << keyFormatter(key);
|
||||
}
|
||||
std::cout << std::endl;
|
||||
|
|
|
@ -28,7 +28,7 @@ namespace gtsam {
|
|||
void ConcurrentBatchSmoother::print(const std::string& s, const KeyFormatter& keyFormatter) const {
|
||||
std::cout << s;
|
||||
std::cout << " Factors:" << std::endl;
|
||||
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, factors_) {
|
||||
for(const NonlinearFactor::shared_ptr& factor: factors_) {
|
||||
PrintNonlinearFactor(factor, " ", keyFormatter);
|
||||
}
|
||||
theta_.print("Values:\n");
|
||||
|
@ -61,7 +61,7 @@ ConcurrentBatchSmoother::Result ConcurrentBatchSmoother::update(const NonlinearF
|
|||
theta_.insert(newTheta);
|
||||
|
||||
// Add new variables to the end of the ordering
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, newTheta) {
|
||||
for(const Values::ConstKeyValuePair& key_value: newTheta) {
|
||||
ordering_.push_back(key_value.key);
|
||||
}
|
||||
|
||||
|
@ -135,7 +135,7 @@ void ConcurrentBatchSmoother::synchronize(const NonlinearFactorGraph& smootherFa
|
|||
removeFactors(filterSummarizationSlots_);
|
||||
|
||||
// Insert new linpoints into the values, augment the ordering, and store new dims to augment delta
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, smootherValues) {
|
||||
for(const Values::ConstKeyValuePair& key_value: smootherValues) {
|
||||
std::pair<Values::iterator, bool> iter_inserted = theta_.tryInsert(key_value.key, key_value.value);
|
||||
if(iter_inserted.second) {
|
||||
// If the insert succeeded
|
||||
|
@ -146,7 +146,7 @@ void ConcurrentBatchSmoother::synchronize(const NonlinearFactorGraph& smootherFa
|
|||
iter_inserted.first->value = key_value.value;
|
||||
}
|
||||
}
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues) {
|
||||
for(const Values::ConstKeyValuePair& key_value: separatorValues) {
|
||||
std::pair<Values::iterator, bool> iter_inserted = theta_.tryInsert(key_value.key, key_value.value);
|
||||
if(iter_inserted.second) {
|
||||
// If the insert succeeded
|
||||
|
@ -188,7 +188,7 @@ std::vector<size_t> ConcurrentBatchSmoother::insertFactors(const NonlinearFactor
|
|||
slots.reserve(factors.size());
|
||||
|
||||
// Insert the factor into an existing hole in the factor graph, if possible
|
||||
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, factors) {
|
||||
for(const NonlinearFactor::shared_ptr& factor: factors) {
|
||||
size_t slot;
|
||||
if(availableSlots_.size() > 0) {
|
||||
slot = availableSlots_.front();
|
||||
|
@ -212,7 +212,7 @@ void ConcurrentBatchSmoother::removeFactors(const std::vector<size_t>& slots) {
|
|||
gttic(remove_factors);
|
||||
|
||||
// For each factor slot to delete...
|
||||
BOOST_FOREACH(size_t slot, slots) {
|
||||
for(size_t slot: slots) {
|
||||
|
||||
// Remove the factor from the graph
|
||||
factors_.remove(slot);
|
||||
|
@ -277,7 +277,7 @@ ConcurrentBatchSmoother::Result ConcurrentBatchSmoother::optimize() {
|
|||
dampedFactorGraph.reserve(linearFactorGraph.size() + delta_.size());
|
||||
{
|
||||
// for each of the variables, add a prior at the current solution
|
||||
BOOST_FOREACH(const VectorValues::KeyValuePair& key_value, delta_) {
|
||||
for(const VectorValues::KeyValuePair& key_value: delta_) {
|
||||
size_t dim = key_value.second.size();
|
||||
Matrix A = Matrix::Identity(dim,dim);
|
||||
Vector b = key_value.second;
|
||||
|
@ -322,7 +322,7 @@ ConcurrentBatchSmoother::Result ConcurrentBatchSmoother::optimize() {
|
|||
// Put the linearization points and deltas back for specific variables
|
||||
if(separatorValues_.size() > 0) {
|
||||
theta_.update(separatorValues_);
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
|
||||
for(const Values::ConstKeyValuePair& key_value: separatorValues_) {
|
||||
delta_.at(key_value.key) = newDelta.at(key_value.key);
|
||||
}
|
||||
}
|
||||
|
@ -366,13 +366,13 @@ void ConcurrentBatchSmoother::updateSmootherSummarization() {
|
|||
|
||||
// Create a nonlinear factor graph without the filter summarization factors
|
||||
NonlinearFactorGraph graph(factors_);
|
||||
BOOST_FOREACH(size_t slot, filterSummarizationSlots_) {
|
||||
for(size_t slot: filterSummarizationSlots_) {
|
||||
graph.remove(slot);
|
||||
}
|
||||
|
||||
// Get the set of separator keys
|
||||
gtsam::KeySet separatorKeys;
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
|
||||
for(const Values::ConstKeyValuePair& key_value: separatorValues_) {
|
||||
separatorKeys.insert(key_value.key);
|
||||
}
|
||||
|
||||
|
@ -389,7 +389,7 @@ void ConcurrentBatchSmoother::PrintNonlinearFactor(const NonlinearFactor::shared
|
|||
} else {
|
||||
std::cout << "f( ";
|
||||
}
|
||||
BOOST_FOREACH(Key key, *factor) {
|
||||
for(Key key: *factor) {
|
||||
std::cout << keyFormatter(key) << " ";
|
||||
}
|
||||
std::cout << ")" << std::endl;
|
||||
|
@ -403,7 +403,7 @@ void ConcurrentBatchSmoother::PrintLinearFactor(const GaussianFactor::shared_ptr
|
|||
std::cout << indent;
|
||||
if(factor) {
|
||||
std::cout << "g( ";
|
||||
BOOST_FOREACH(Key key, *factor) {
|
||||
for(Key key: *factor) {
|
||||
std::cout << keyFormatter(key) << " ";
|
||||
}
|
||||
std::cout << ")" << std::endl;
|
||||
|
|
|
@ -77,7 +77,7 @@ NonlinearFactorGraph calculateMarginalFactors(const NonlinearFactorGraph& graph,
|
|||
// Wrap in nonlinear container factors
|
||||
NonlinearFactorGraph marginalFactors;
|
||||
marginalFactors.reserve(marginalLinearFactors.size());
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& gaussianFactor, marginalLinearFactors) {
|
||||
for(const GaussianFactor::shared_ptr& gaussianFactor: marginalLinearFactors) {
|
||||
marginalFactors += boost::make_shared<LinearContainerFactor>(gaussianFactor, theta);
|
||||
}
|
||||
|
||||
|
|
|
@ -69,17 +69,17 @@ ConcurrentIncrementalFilter::Result ConcurrentIncrementalFilter::update(const No
|
|||
int group = 1;
|
||||
// Set all existing variables to Group1
|
||||
if(isam2_.getLinearizationPoint().size() > 0) {
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, isam2_.getLinearizationPoint()) {
|
||||
for(const Values::ConstKeyValuePair& key_value: isam2_.getLinearizationPoint()) {
|
||||
orderingConstraints->operator[](key_value.key) = group;
|
||||
}
|
||||
++group;
|
||||
}
|
||||
// Assign new variables to the root
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, newTheta) {
|
||||
for(const Values::ConstKeyValuePair& key_value: newTheta) {
|
||||
orderingConstraints->operator[](key_value.key) = group;
|
||||
}
|
||||
// Set marginalizable variables to Group0
|
||||
BOOST_FOREACH(Key key, *keysToMove){
|
||||
for(Key key: *keysToMove){
|
||||
orderingConstraints->operator[](key) = 0;
|
||||
}
|
||||
}
|
||||
|
@ -92,7 +92,7 @@ ConcurrentIncrementalFilter::Result ConcurrentIncrementalFilter::update(const No
|
|||
boost::optional<FastList<Key> > additionalKeys = boost::none;
|
||||
if(keysToMove && keysToMove->size() > 0) {
|
||||
std::set<Key> markedKeys;
|
||||
BOOST_FOREACH(Key key, *keysToMove) {
|
||||
for(Key key: *keysToMove) {
|
||||
if(isam2_.getLinearizationPoint().exists(key)) {
|
||||
ISAM2Clique::shared_ptr clique = isam2_[key];
|
||||
GaussianConditional::const_iterator key_iter = clique->conditional()->begin();
|
||||
|
@ -100,7 +100,7 @@ ConcurrentIncrementalFilter::Result ConcurrentIncrementalFilter::update(const No
|
|||
markedKeys.insert(*key_iter);
|
||||
++key_iter;
|
||||
}
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||
for(const ISAM2Clique::shared_ptr& child: clique->children) {
|
||||
RecursiveMarkAffectedKeys(key, child, markedKeys);
|
||||
}
|
||||
}
|
||||
|
@ -120,7 +120,7 @@ ConcurrentIncrementalFilter::Result ConcurrentIncrementalFilter::update(const No
|
|||
FactorIndices removedFactorSlots = FindAdjacentFactors(isam2_, *keysToMove, currentSmootherSummarizationSlots_);
|
||||
// Cache these factors for later transmission to the smoother
|
||||
NonlinearFactorGraph removedFactors;
|
||||
BOOST_FOREACH(size_t slot, removedFactorSlots) {
|
||||
for(size_t slot: removedFactorSlots) {
|
||||
const NonlinearFactor::shared_ptr& factor = isam2_.getFactorsUnsafe().at(slot);
|
||||
if(factor) {
|
||||
smootherFactors_.push_back(factor);
|
||||
|
@ -128,7 +128,7 @@ ConcurrentIncrementalFilter::Result ConcurrentIncrementalFilter::update(const No
|
|||
}
|
||||
}
|
||||
// Cache removed values for later transmission to the smoother
|
||||
BOOST_FOREACH(Key key, *keysToMove) {
|
||||
for(Key key: *keysToMove) {
|
||||
smootherValues_.insert(key, isam2_.getLinearizationPoint().at(key));
|
||||
}
|
||||
gttoc(cache_smoother_factors);
|
||||
|
@ -138,7 +138,7 @@ ConcurrentIncrementalFilter::Result ConcurrentIncrementalFilter::update(const No
|
|||
FactorIndices deletedFactorsIndices;
|
||||
isam2_.marginalizeLeaves(*keysToMove, marginalFactorsIndices, deletedFactorsIndices);
|
||||
currentSmootherSummarizationSlots_.insert(currentSmootherSummarizationSlots_.end(), marginalFactorsIndices.begin(), marginalFactorsIndices.end());
|
||||
BOOST_FOREACH(size_t index, deletedFactorsIndices) {
|
||||
for(size_t index: deletedFactorsIndices) {
|
||||
currentSmootherSummarizationSlots_.erase(std::remove(currentSmootherSummarizationSlots_.begin(), currentSmootherSummarizationSlots_.end(), index), currentSmootherSummarizationSlots_.end());
|
||||
}
|
||||
gttoc(marginalize);
|
||||
|
@ -193,7 +193,7 @@ void ConcurrentIncrementalFilter::synchronize(const NonlinearFactorGraph& smooth
|
|||
graph.push_back(smootherShortcut_);
|
||||
Values values;
|
||||
values.insert(smootherSummarizationValues);
|
||||
BOOST_FOREACH(Key key, newSeparatorKeys) {
|
||||
for(Key key: newSeparatorKeys) {
|
||||
if(!values.exists(key)) {
|
||||
values.insert(key, isam2_.getLinearizationPoint().at(key));
|
||||
}
|
||||
|
@ -201,7 +201,7 @@ void ConcurrentIncrementalFilter::synchronize(const NonlinearFactorGraph& smooth
|
|||
|
||||
// Force iSAM2 not to relinearize anything during this iteration
|
||||
FastList<Key> noRelinKeys;
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, isam2_.getLinearizationPoint()) {
|
||||
for(const Values::ConstKeyValuePair& key_value: isam2_.getLinearizationPoint()) {
|
||||
noRelinKeys.push_back(key_value.key);
|
||||
}
|
||||
|
||||
|
@ -232,7 +232,7 @@ void ConcurrentIncrementalFilter::getSummarizedFactors(NonlinearFactorGraph& fil
|
|||
filterSummarization = calculateFilterSummarization();
|
||||
|
||||
// Copy the current separator values into the output
|
||||
BOOST_FOREACH(Key key, isam2_.getFixedVariables()) {
|
||||
for(Key key: isam2_.getFixedVariables()) {
|
||||
filterSummarizationValues.insert(key, isam2_.getLinearizationPoint().at(key));
|
||||
}
|
||||
|
||||
|
@ -271,12 +271,12 @@ void ConcurrentIncrementalFilter::RecursiveMarkAffectedKeys(const Key& key, cons
|
|||
if(std::find(clique->conditional()->beginParents(), clique->conditional()->endParents(), key) != clique->conditional()->endParents()) {
|
||||
|
||||
// Mark the frontal keys of the current clique
|
||||
BOOST_FOREACH(Key idx, clique->conditional()->frontals()) {
|
||||
for(Key idx: clique->conditional()->frontals()) {
|
||||
additionalKeys.insert(idx);
|
||||
}
|
||||
|
||||
// Recursively mark all of the children
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||
for(const ISAM2Clique::shared_ptr& child: clique->children) {
|
||||
RecursiveMarkAffectedKeys(key, child, additionalKeys);
|
||||
}
|
||||
}
|
||||
|
@ -290,7 +290,7 @@ FactorIndices ConcurrentIncrementalFilter::FindAdjacentFactors(const ISAM2& isam
|
|||
// Identify any new factors to be sent to the smoother (i.e. any factor involving keysToMove)
|
||||
FactorIndices removedFactorSlots;
|
||||
const VariableIndex& variableIndex = isam2.getVariableIndex();
|
||||
BOOST_FOREACH(Key key, keys) {
|
||||
for(Key key: keys) {
|
||||
const FastVector<size_t>& slots = variableIndex[key];
|
||||
removedFactorSlots.insert(removedFactorSlots.end(), slots.begin(), slots.end());
|
||||
}
|
||||
|
@ -300,7 +300,7 @@ FactorIndices ConcurrentIncrementalFilter::FindAdjacentFactors(const ISAM2& isam
|
|||
removedFactorSlots.erase(std::unique(removedFactorSlots.begin(), removedFactorSlots.end()), removedFactorSlots.end());
|
||||
|
||||
// Remove any linear/marginal factor that made it into the set
|
||||
BOOST_FOREACH(size_t index, factorsToIgnore) {
|
||||
for(size_t index: factorsToIgnore) {
|
||||
removedFactorSlots.erase(std::remove(removedFactorSlots.begin(), removedFactorSlots.end(), index), removedFactorSlots.end());
|
||||
}
|
||||
|
||||
|
@ -313,13 +313,13 @@ void ConcurrentIncrementalFilter::updateShortcut(const NonlinearFactorGraph& rem
|
|||
|
||||
// Calculate the set of shortcut keys: NewSeparatorKeys + OldSeparatorKeys
|
||||
KeySet shortcutKeys;
|
||||
BOOST_FOREACH(size_t slot, currentSmootherSummarizationSlots_) {
|
||||
for(size_t slot: currentSmootherSummarizationSlots_) {
|
||||
const NonlinearFactor::shared_ptr& factor = isam2_.getFactorsUnsafe().at(slot);
|
||||
if(factor) {
|
||||
shortcutKeys.insert(factor->begin(), factor->end());
|
||||
}
|
||||
}
|
||||
BOOST_FOREACH(Key key, previousSmootherSummarization_.keys()) {
|
||||
for(Key key: previousSmootherSummarization_.keys()) {
|
||||
shortcutKeys.insert(key);
|
||||
}
|
||||
|
||||
|
@ -347,15 +347,15 @@ NonlinearFactorGraph ConcurrentIncrementalFilter::calculateFilterSummarization()
|
|||
|
||||
// Find all cliques that contain any separator variables
|
||||
std::set<ISAM2Clique::shared_ptr> separatorCliques;
|
||||
BOOST_FOREACH(Key key, separatorKeys) {
|
||||
for(Key key: separatorKeys) {
|
||||
ISAM2Clique::shared_ptr clique = isam2_[key];
|
||||
separatorCliques.insert( clique );
|
||||
}
|
||||
|
||||
// Create the set of clique keys LC:
|
||||
std::vector<Key> cliqueKeys;
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, separatorCliques) {
|
||||
BOOST_FOREACH(Key key, clique->conditional()->frontals()) {
|
||||
for(const ISAM2Clique::shared_ptr& clique: separatorCliques) {
|
||||
for(Key key: clique->conditional()->frontals()) {
|
||||
cliqueKeys.push_back(key);
|
||||
}
|
||||
}
|
||||
|
@ -363,8 +363,8 @@ NonlinearFactorGraph ConcurrentIncrementalFilter::calculateFilterSummarization()
|
|||
|
||||
// Gather all factors that involve only clique keys
|
||||
std::set<size_t> cliqueFactorSlots;
|
||||
BOOST_FOREACH(Key key, cliqueKeys) {
|
||||
BOOST_FOREACH(size_t slot, isam2_.getVariableIndex()[key]) {
|
||||
for(Key key: cliqueKeys) {
|
||||
for(size_t slot: isam2_.getVariableIndex()[key]) {
|
||||
const NonlinearFactor::shared_ptr& factor = isam2_.getFactorsUnsafe().at(slot);
|
||||
if(factor) {
|
||||
std::set<Key> factorKeys(factor->begin(), factor->end());
|
||||
|
@ -376,29 +376,29 @@ NonlinearFactorGraph ConcurrentIncrementalFilter::calculateFilterSummarization()
|
|||
}
|
||||
|
||||
// Remove any factor included in the current smoother summarization
|
||||
BOOST_FOREACH(size_t slot, currentSmootherSummarizationSlots_) {
|
||||
for(size_t slot: currentSmootherSummarizationSlots_) {
|
||||
cliqueFactorSlots.erase(slot);
|
||||
}
|
||||
|
||||
// Create a factor graph from the identified factors
|
||||
NonlinearFactorGraph graph;
|
||||
BOOST_FOREACH(size_t slot, cliqueFactorSlots) {
|
||||
for(size_t slot: cliqueFactorSlots) {
|
||||
graph.push_back(isam2_.getFactorsUnsafe().at(slot));
|
||||
}
|
||||
|
||||
// Find the set of children of the separator cliques
|
||||
std::set<ISAM2Clique::shared_ptr> childCliques;
|
||||
// Add all of the children
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, separatorCliques) {
|
||||
for(const ISAM2Clique::shared_ptr& clique: separatorCliques) {
|
||||
childCliques.insert(clique->children.begin(), clique->children.end());
|
||||
}
|
||||
// Remove any separator cliques that were added because they were children of other separator cliques
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, separatorCliques) {
|
||||
for(const ISAM2Clique::shared_ptr& clique: separatorCliques) {
|
||||
childCliques.erase(clique);
|
||||
}
|
||||
|
||||
// Augment the factor graph with cached factors from the children
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, childCliques) {
|
||||
for(const ISAM2Clique::shared_ptr& clique: childCliques) {
|
||||
LinearContainerFactor::shared_ptr factor(new LinearContainerFactor(clique->cachedFactor(), isam2_.getLinearizationPoint()));
|
||||
graph.push_back( factor );
|
||||
}
|
||||
|
|
|
@ -66,7 +66,7 @@ ConcurrentIncrementalSmoother::Result ConcurrentIncrementalSmoother::update(cons
|
|||
// Also, mark the separator keys as fixed linearization points
|
||||
FastMap<Key,int> constrainedKeys;
|
||||
FastList<Key> noRelinKeys;
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
|
||||
for(const Values::ConstKeyValuePair& key_value: separatorValues_) {
|
||||
constrainedKeys[key_value.key] = 1;
|
||||
noRelinKeys.push_back(key_value.key);
|
||||
}
|
||||
|
@ -82,12 +82,12 @@ ConcurrentIncrementalSmoother::Result ConcurrentIncrementalSmoother::update(cons
|
|||
Values values(newTheta);
|
||||
// Unfortunately, we must be careful here, as some of the smoother values
|
||||
// and/or separator values may have been added previously
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, smootherValues_) {
|
||||
for(const Values::ConstKeyValuePair& key_value: smootherValues_) {
|
||||
if(!isam2_.getLinearizationPoint().exists(key_value.key)) {
|
||||
values.insert(key_value.key, smootherValues_.at(key_value.key));
|
||||
}
|
||||
}
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
|
||||
for(const Values::ConstKeyValuePair& key_value: separatorValues_) {
|
||||
if(!isam2_.getLinearizationPoint().exists(key_value.key)) {
|
||||
values.insert(key_value.key, separatorValues_.at(key_value.key));
|
||||
}
|
||||
|
@ -188,15 +188,15 @@ void ConcurrentIncrementalSmoother::updateSmootherSummarization() {
|
|||
|
||||
// Find all cliques that contain any separator variables
|
||||
std::set<ISAM2Clique::shared_ptr> separatorCliques;
|
||||
BOOST_FOREACH(Key key, separatorValues_.keys()) {
|
||||
for(Key key: separatorValues_.keys()) {
|
||||
ISAM2Clique::shared_ptr clique = isam2_[key];
|
||||
separatorCliques.insert( clique );
|
||||
}
|
||||
|
||||
// Create the set of clique keys LC:
|
||||
std::vector<Key> cliqueKeys;
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, separatorCliques) {
|
||||
BOOST_FOREACH(Key key, clique->conditional()->frontals()) {
|
||||
for(const ISAM2Clique::shared_ptr& clique: separatorCliques) {
|
||||
for(Key key: clique->conditional()->frontals()) {
|
||||
cliqueKeys.push_back(key);
|
||||
}
|
||||
}
|
||||
|
@ -204,8 +204,8 @@ void ConcurrentIncrementalSmoother::updateSmootherSummarization() {
|
|||
|
||||
// Gather all factors that involve only clique keys
|
||||
std::set<size_t> cliqueFactorSlots;
|
||||
BOOST_FOREACH(Key key, cliqueKeys) {
|
||||
BOOST_FOREACH(size_t slot, isam2_.getVariableIndex()[key]) {
|
||||
for(Key key: cliqueKeys) {
|
||||
for(size_t slot: isam2_.getVariableIndex()[key]) {
|
||||
const NonlinearFactor::shared_ptr& factor = isam2_.getFactorsUnsafe().at(slot);
|
||||
if(factor) {
|
||||
std::set<Key> factorKeys(factor->begin(), factor->end());
|
||||
|
@ -217,36 +217,36 @@ void ConcurrentIncrementalSmoother::updateSmootherSummarization() {
|
|||
}
|
||||
|
||||
// Remove any factor included in the filter summarization
|
||||
BOOST_FOREACH(size_t slot, filterSummarizationSlots_) {
|
||||
for(size_t slot: filterSummarizationSlots_) {
|
||||
cliqueFactorSlots.erase(slot);
|
||||
}
|
||||
|
||||
// Create a factor graph from the identified factors
|
||||
NonlinearFactorGraph graph;
|
||||
BOOST_FOREACH(size_t slot, cliqueFactorSlots) {
|
||||
for(size_t slot: cliqueFactorSlots) {
|
||||
graph.push_back(isam2_.getFactorsUnsafe().at(slot));
|
||||
}
|
||||
|
||||
// Find the set of children of the separator cliques
|
||||
std::set<ISAM2Clique::shared_ptr> childCliques;
|
||||
// Add all of the children
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, separatorCliques) {
|
||||
for(const ISAM2Clique::shared_ptr& clique: separatorCliques) {
|
||||
childCliques.insert(clique->children.begin(), clique->children.end());
|
||||
}
|
||||
// Remove any separator cliques that were added because they were children of other separator cliques
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, separatorCliques) {
|
||||
for(const ISAM2Clique::shared_ptr& clique: separatorCliques) {
|
||||
childCliques.erase(clique);
|
||||
}
|
||||
|
||||
// Augment the factor graph with cached factors from the children
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, childCliques) {
|
||||
for(const ISAM2Clique::shared_ptr& clique: childCliques) {
|
||||
LinearContainerFactor::shared_ptr factor(new LinearContainerFactor(clique->cachedFactor(), isam2_.getLinearizationPoint()));
|
||||
graph.push_back( factor );
|
||||
}
|
||||
|
||||
// Get the set of separator keys
|
||||
KeySet separatorKeys;
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
|
||||
for(const Values::ConstKeyValuePair& key_value: separatorValues_) {
|
||||
separatorKeys.insert(key_value.key);
|
||||
}
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ bool FixedLagSmoother::equals(const FixedLagSmoother& rhs, double tol) const {
|
|||
/* ************************************************************************* */
|
||||
void FixedLagSmoother::updateKeyTimestampMap(const KeyTimestampMap& timestamps) {
|
||||
// Loop through each key and add/update it in the map
|
||||
BOOST_FOREACH(const KeyTimestampMap::value_type& key_timestamp, timestamps) {
|
||||
for(const KeyTimestampMap::value_type& key_timestamp: timestamps) {
|
||||
// Check to see if this key already exists in the database
|
||||
KeyTimestampMap::iterator keyIter = keyTimestampMap_.find(key_timestamp.first);
|
||||
|
||||
|
@ -65,7 +65,7 @@ void FixedLagSmoother::updateKeyTimestampMap(const KeyTimestampMap& timestamps)
|
|||
|
||||
/* ************************************************************************* */
|
||||
void FixedLagSmoother::eraseKeyTimestampMap(const std::set<Key>& keys) {
|
||||
BOOST_FOREACH(Key key, keys) {
|
||||
for(Key key: keys) {
|
||||
// Erase the key from the Timestamp->Key map
|
||||
double timestamp = keyTimestampMap_.at(key);
|
||||
|
||||
|
|
|
@ -34,12 +34,12 @@ void recursiveMarkAffectedKeys(const Key& key,
|
|||
!= clique->conditional()->endParents()) {
|
||||
|
||||
// Mark the frontal keys of the current clique
|
||||
BOOST_FOREACH(Key i, clique->conditional()->frontals()) {
|
||||
for(Key i: clique->conditional()->frontals()) {
|
||||
additionalKeys.insert(i);
|
||||
}
|
||||
|
||||
// Recursively mark all of the children
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||
for(const ISAM2Clique::shared_ptr& child: clique->children) {
|
||||
recursiveMarkAffectedKeys(key, child, additionalKeys);
|
||||
}
|
||||
}
|
||||
|
@ -93,7 +93,7 @@ FixedLagSmoother::Result IncrementalFixedLagSmoother::update(
|
|||
|
||||
if (debug) {
|
||||
std::cout << "Marginalizable Keys: ";
|
||||
BOOST_FOREACH(Key key, marginalizableKeys) {
|
||||
for(Key key: marginalizableKeys) {
|
||||
std::cout << DefaultKeyFormatter(key) << " ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
|
@ -116,9 +116,9 @@ FixedLagSmoother::Result IncrementalFixedLagSmoother::update(
|
|||
|
||||
// Mark additional keys between the marginalized keys and the leaves
|
||||
std::set<Key> additionalKeys;
|
||||
BOOST_FOREACH(Key key, marginalizableKeys) {
|
||||
for(Key key: marginalizableKeys) {
|
||||
ISAM2Clique::shared_ptr clique = isam_[key];
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||
for(const ISAM2Clique::shared_ptr& child: clique->children) {
|
||||
recursiveMarkAffectedKeys(key, child, additionalKeys);
|
||||
}
|
||||
}
|
||||
|
@ -180,11 +180,11 @@ void IncrementalFixedLagSmoother::createOrderingConstraints(
|
|||
constrainedKeys = FastMap<Key, int>();
|
||||
// Generate ordering constraints so that the marginalizable variables will be eliminated first
|
||||
// Set all variables to Group1
|
||||
BOOST_FOREACH(const TimestampKeyMap::value_type& timestamp_key, timestampKeyMap_) {
|
||||
for(const TimestampKeyMap::value_type& timestamp_key: timestampKeyMap_) {
|
||||
constrainedKeys->operator[](timestamp_key.second) = 1;
|
||||
}
|
||||
// Set marginalizable variables to Group0
|
||||
BOOST_FOREACH(Key key, marginalizableKeys) {
|
||||
for(Key key: marginalizableKeys) {
|
||||
constrainedKeys->operator[](key) = 0;
|
||||
}
|
||||
}
|
||||
|
@ -194,7 +194,7 @@ void IncrementalFixedLagSmoother::createOrderingConstraints(
|
|||
void IncrementalFixedLagSmoother::PrintKeySet(const std::set<Key>& keys,
|
||||
const std::string& label) {
|
||||
std::cout << label;
|
||||
BOOST_FOREACH(Key key, keys) {
|
||||
for(Key key: keys) {
|
||||
std::cout << " " << DefaultKeyFormatter(key);
|
||||
}
|
||||
std::cout << std::endl;
|
||||
|
@ -204,7 +204,7 @@ void IncrementalFixedLagSmoother::PrintKeySet(const std::set<Key>& keys,
|
|||
void IncrementalFixedLagSmoother::PrintSymbolicFactor(
|
||||
const GaussianFactor::shared_ptr& factor) {
|
||||
std::cout << "f(";
|
||||
BOOST_FOREACH(Key key, factor->keys()) {
|
||||
for(Key key: factor->keys()) {
|
||||
std::cout << " " << DefaultKeyFormatter(key);
|
||||
}
|
||||
std::cout << " )" << std::endl;
|
||||
|
@ -214,7 +214,7 @@ void IncrementalFixedLagSmoother::PrintSymbolicFactor(
|
|||
void IncrementalFixedLagSmoother::PrintSymbolicGraph(
|
||||
const GaussianFactorGraph& graph, const std::string& label) {
|
||||
std::cout << label << std::endl;
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, graph) {
|
||||
for(const GaussianFactor::shared_ptr& factor: graph) {
|
||||
PrintSymbolicFactor(factor);
|
||||
}
|
||||
}
|
||||
|
@ -224,7 +224,7 @@ void IncrementalFixedLagSmoother::PrintSymbolicTree(const ISAM2& isam,
|
|||
const std::string& label) {
|
||||
std::cout << label << std::endl;
|
||||
if (!isam.roots().empty()) {
|
||||
BOOST_FOREACH(const ISAM2::sharedClique& root, isam.roots()) {
|
||||
for(const ISAM2::sharedClique& root: isam.roots()) {
|
||||
PrintSymbolicTreeHelper(root);
|
||||
}
|
||||
} else
|
||||
|
@ -237,18 +237,18 @@ void IncrementalFixedLagSmoother::PrintSymbolicTreeHelper(
|
|||
|
||||
// Print the current clique
|
||||
std::cout << indent << "P( ";
|
||||
BOOST_FOREACH(Key key, clique->conditional()->frontals()) {
|
||||
for(Key key: clique->conditional()->frontals()) {
|
||||
std::cout << DefaultKeyFormatter(key) << " ";
|
||||
}
|
||||
if (clique->conditional()->nrParents() > 0)
|
||||
std::cout << "| ";
|
||||
BOOST_FOREACH(Key key, clique->conditional()->parents()) {
|
||||
for(Key key: clique->conditional()->parents()) {
|
||||
std::cout << DefaultKeyFormatter(key) << " ";
|
||||
}
|
||||
std::cout << ")" << std::endl;
|
||||
|
||||
// Recursively print all of the children
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||
for(const ISAM2Clique::shared_ptr& child: clique->children) {
|
||||
PrintSymbolicTreeHelper(child, indent + " ");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ LinearizedGaussianFactor::LinearizedGaussianFactor(
|
|||
: NonlinearFactor(gaussian->keys())
|
||||
{
|
||||
// Extract the keys and linearization points
|
||||
BOOST_FOREACH(const Key& key, gaussian->keys()) {
|
||||
for(const Key& key: gaussian->keys()) {
|
||||
// extract linearization point
|
||||
assert(lin_points.exists(key));
|
||||
this->lin_points_.insert(key, lin_points.at(key));
|
||||
|
@ -65,7 +65,7 @@ void LinearizedJacobianFactor::print(const std::string& s, const KeyFormatter& k
|
|||
std::cout << s << std::endl;
|
||||
|
||||
std::cout << "Nonlinear Keys: ";
|
||||
BOOST_FOREACH(const Key& key, this->keys())
|
||||
for(const Key& key: this->keys())
|
||||
std::cout << keyFormatter(key) << " ";
|
||||
std::cout << std::endl;
|
||||
|
||||
|
@ -105,7 +105,7 @@ LinearizedJacobianFactor::linearize(const Values& c) const {
|
|||
|
||||
// Create the 'terms' data structure for the Jacobian constructor
|
||||
std::vector<std::pair<Key, Matrix> > terms;
|
||||
BOOST_FOREACH(Key key, keys()) {
|
||||
for(Key key: keys()) {
|
||||
terms.push_back(std::make_pair(key, this->A(key)));
|
||||
}
|
||||
|
||||
|
@ -120,7 +120,7 @@ Vector LinearizedJacobianFactor::error_vector(const Values& c) const {
|
|||
|
||||
Vector errorVector = -b();
|
||||
|
||||
BOOST_FOREACH(Key key, this->keys()) {
|
||||
for(Key key: this->keys()) {
|
||||
const Value& newPt = c.at(key);
|
||||
const Value& linPt = lin_points_.at(key);
|
||||
Vector d = linPt.localCoordinates_(newPt);
|
||||
|
@ -162,7 +162,7 @@ void LinearizedHessianFactor::print(const std::string& s, const KeyFormatter& ke
|
|||
std::cout << s << std::endl;
|
||||
|
||||
std::cout << "Nonlinear Keys: ";
|
||||
BOOST_FOREACH(const Key& key, this->keys())
|
||||
for(const Key& key: this->keys())
|
||||
std::cout << keyFormatter(key) << " ";
|
||||
std::cout << std::endl;
|
||||
|
||||
|
|
|
@ -64,18 +64,18 @@ NonlinearFactorGraph CalculateMarginals(const NonlinearFactorGraph& factorGraph,
|
|||
|
||||
|
||||
std::set<Key> KeysToKeep;
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, linPoint) { // we cycle over all the keys of factorGraph
|
||||
for(const Values::ConstKeyValuePair& key_value: linPoint) { // we cycle over all the keys of factorGraph
|
||||
KeysToKeep.insert(key_value.key);
|
||||
} // so far we are keeping all keys, but we want to delete the ones that we are going to marginalize
|
||||
BOOST_FOREACH(Key key, keysToMarginalize) {
|
||||
for(Key key: keysToMarginalize) {
|
||||
KeysToKeep.erase(key);
|
||||
} // we removed the keys that we have to marginalize
|
||||
|
||||
Ordering ordering;
|
||||
BOOST_FOREACH(Key key, keysToMarginalize) {
|
||||
for(Key key: keysToMarginalize) {
|
||||
ordering.push_back(key);
|
||||
} // the keys that we marginalize should be at the beginning in the ordering
|
||||
BOOST_FOREACH(Key key, KeysToKeep) {
|
||||
for(Key key: KeysToKeep) {
|
||||
ordering.push_back(key);
|
||||
}
|
||||
|
||||
|
@ -84,7 +84,7 @@ NonlinearFactorGraph CalculateMarginals(const NonlinearFactorGraph& factorGraph,
|
|||
GaussianFactorGraph marginal = *linearGraph.eliminatePartialMultifrontal(vector<Key>(keysToMarginalize.begin(), keysToMarginalize.end()), EliminateCholesky).second;
|
||||
|
||||
NonlinearFactorGraph LinearContainerForGaussianMarginals;
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, marginal) {
|
||||
for(const GaussianFactor::shared_ptr& factor: marginal) {
|
||||
LinearContainerForGaussianMarginals.push_back(LinearContainerFactor(factor, linPoint));
|
||||
}
|
||||
return LinearContainerForGaussianMarginals;
|
||||
|
@ -439,7 +439,7 @@ TEST( ConcurrentBatchFilter, update_and_marginalize )
|
|||
// ==========================================================
|
||||
expectedGraph.push_back(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
|
||||
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, result) {
|
||||
for(const GaussianFactor::shared_ptr& factor: result) {
|
||||
expectedGraph.push_back(LinearContainerFactor(factor, partialValues));
|
||||
}
|
||||
|
||||
|
@ -451,7 +451,7 @@ TEST( ConcurrentBatchFilter, update_and_marginalize )
|
|||
Values expectedValues = optimalValues;
|
||||
|
||||
// Check
|
||||
BOOST_FOREACH(Key key, keysToMove) {
|
||||
for(Key key: keysToMove) {
|
||||
expectedValues.erase(key);
|
||||
}
|
||||
|
||||
|
@ -1014,7 +1014,7 @@ TEST( ConcurrentBatchFilter, CalculateMarginals_1 )
|
|||
GaussianFactorGraph result = *linearFactorGraph.eliminatePartialMultifrontal(linearIndices, EliminateCholesky).second;
|
||||
|
||||
NonlinearFactorGraph expectedMarginals;
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, result) {
|
||||
for(const GaussianFactor::shared_ptr& factor: result) {
|
||||
expectedMarginals.push_back(LinearContainerFactor(factor, newValues));
|
||||
|
||||
}
|
||||
|
@ -1069,7 +1069,7 @@ TEST( ConcurrentBatchFilter, CalculateMarginals_2 )
|
|||
GaussianFactorGraph result = *linearFactorGraph.eliminatePartialMultifrontal(linearIndices, EliminateCholesky).second;
|
||||
|
||||
NonlinearFactorGraph expectedMarginals;
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, result) {
|
||||
for(const GaussianFactor::shared_ptr& factor: result) {
|
||||
expectedMarginals.push_back(LinearContainerFactor(factor, newValues));
|
||||
|
||||
}
|
||||
|
|
|
@ -560,14 +560,14 @@ TEST( ConcurrentBatchSmoother, synchronize_3 )
|
|||
GaussianFactorGraph::shared_ptr linearFactors = allFactors.linearize(allValues);
|
||||
|
||||
KeySet eliminateKeys = linearFactors->keys();
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues) {
|
||||
for(const Values::ConstKeyValuePair& key_value: filterSeparatorValues) {
|
||||
eliminateKeys.erase(key_value.key);
|
||||
}
|
||||
std::vector<Key> variables(eliminateKeys.begin(), eliminateKeys.end());
|
||||
GaussianFactorGraph result = *linearFactors->eliminatePartialMultifrontal(variables, EliminateCholesky).second;
|
||||
|
||||
expectedSmootherSummarization.resize(0);
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, result) {
|
||||
for(const GaussianFactor::shared_ptr& factor: result) {
|
||||
expectedSmootherSummarization.push_back(LinearContainerFactor(factor, allValues));
|
||||
}
|
||||
|
||||
|
|
|
@ -62,7 +62,7 @@ Values BatchOptimize(const NonlinearFactorGraph& graph, const Values& theta, int
|
|||
|
||||
// it is the same as the input graph, but we removed the empty factors that may be present in the input graph
|
||||
NonlinearFactorGraph graphForISAM2;
|
||||
BOOST_FOREACH(NonlinearFactor::shared_ptr factor, graph){
|
||||
for(NonlinearFactor::shared_ptr factor: graph){
|
||||
if(factor)
|
||||
graphForISAM2.push_back(factor);
|
||||
}
|
||||
|
@ -80,18 +80,18 @@ NonlinearFactorGraph CalculateMarginals(const NonlinearFactorGraph& factorGraph,
|
|||
|
||||
|
||||
std::set<Key> KeysToKeep;
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, linPoint) { // we cycle over all the keys of factorGraph
|
||||
for(const Values::ConstKeyValuePair& key_value: linPoint) { // we cycle over all the keys of factorGraph
|
||||
KeysToKeep.insert(key_value.key);
|
||||
} // so far we are keeping all keys, but we want to delete the ones that we are going to marginalize
|
||||
BOOST_FOREACH(Key key, keysToMarginalize) {
|
||||
for(Key key: keysToMarginalize) {
|
||||
KeysToKeep.erase(key);
|
||||
} // we removed the keys that we have to marginalize
|
||||
|
||||
Ordering ordering;
|
||||
BOOST_FOREACH(Key key, keysToMarginalize) {
|
||||
for(Key key: keysToMarginalize) {
|
||||
ordering.push_back(key);
|
||||
} // the keys that we marginalize should be at the beginning in the ordering
|
||||
BOOST_FOREACH(Key key, KeysToKeep) {
|
||||
for(Key key: KeysToKeep) {
|
||||
ordering.push_back(key);
|
||||
}
|
||||
|
||||
|
@ -101,7 +101,7 @@ NonlinearFactorGraph CalculateMarginals(const NonlinearFactorGraph& factorGraph,
|
|||
GaussianFactorGraph marginal = *linearGraph.eliminatePartialMultifrontal(vector<Key>(keysToMarginalize.begin(), keysToMarginalize.end()), EliminateCholesky).second;
|
||||
|
||||
NonlinearFactorGraph LinearContainerForGaussianMarginals;
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, marginal) {
|
||||
for(const GaussianFactor::shared_ptr& factor: marginal) {
|
||||
LinearContainerForGaussianMarginals.push_back(LinearContainerFactor(factor, linPoint));
|
||||
}
|
||||
|
||||
|
@ -417,7 +417,7 @@ TEST( ConcurrentIncrementalFilter, update_and_marginalize_1 )
|
|||
Values expectedValues = optimalValues;
|
||||
|
||||
// Check
|
||||
BOOST_FOREACH(Key key, keysToMove) {
|
||||
for(Key key: keysToMove) {
|
||||
expectedValues.erase(key);
|
||||
}
|
||||
|
||||
|
@ -443,7 +443,7 @@ TEST( ConcurrentIncrementalFilter, update_and_marginalize_1 )
|
|||
// ==========================================================
|
||||
expectedGraph.push_back(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
|
||||
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, marginal) {
|
||||
for(const GaussianFactor::shared_ptr& factor: marginal) {
|
||||
// the linearization point for the linear container is optional, but it is not used in the filter,
|
||||
// therefore if we add it here it will not pass the test
|
||||
// expectedGraph.push_back(LinearContainerFactor(factor, ordering, partialValues));
|
||||
|
@ -501,7 +501,7 @@ TEST( ConcurrentIncrementalFilter, update_and_marginalize_2 )
|
|||
Values expectedValues = optimalValues;
|
||||
|
||||
// Check
|
||||
BOOST_FOREACH(Key key, keysToMove) {
|
||||
for(Key key: keysToMove) {
|
||||
expectedValues.erase(key);
|
||||
}
|
||||
|
||||
|
@ -527,7 +527,7 @@ TEST( ConcurrentIncrementalFilter, update_and_marginalize_2 )
|
|||
// ==========================================================
|
||||
expectedGraph.push_back(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
|
||||
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, marginal) {
|
||||
for(const GaussianFactor::shared_ptr& factor: marginal) {
|
||||
// the linearization point for the linear container is optional, but it is not used in the filter,
|
||||
// therefore if we add it here it will not pass the test
|
||||
// expectedGraph.push_back(LinearContainerFactor(factor, ordering, partialValues));
|
||||
|
@ -543,7 +543,7 @@ TEST( ConcurrentIncrementalFilter, update_and_marginalize_2 )
|
|||
Values optimalValues2 = BatchOptimize(newFactors, optimalValues);
|
||||
Values expectedValues2 = optimalValues2;
|
||||
// Check
|
||||
BOOST_FOREACH(Key key, keysToMove) {
|
||||
for(Key key: keysToMove) {
|
||||
expectedValues2.erase(key);
|
||||
}
|
||||
Values actualValues2 = filter.calculateEstimate();
|
||||
|
@ -1116,7 +1116,7 @@ TEST( ConcurrentIncrementalFilter, CalculateMarginals_1 )
|
|||
GaussianFactorGraph marginal = *linearGraph.eliminatePartialMultifrontal(vector<Key>(linearIndices.begin(), linearIndices.end()), EliminateCholesky).second;
|
||||
|
||||
NonlinearFactorGraph expectedMarginals;
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, marginal) {
|
||||
for(const GaussianFactor::shared_ptr& factor: marginal) {
|
||||
expectedMarginals.push_back(LinearContainerFactor(factor, newValues));
|
||||
}
|
||||
|
||||
|
@ -1167,7 +1167,7 @@ TEST( ConcurrentIncrementalFilter, CalculateMarginals_2 )
|
|||
GaussianFactorGraph marginal = *linearGraph.eliminatePartialMultifrontal(vector<Key>(linearIndices.begin(), linearIndices.end()), EliminateCholesky).second;
|
||||
|
||||
NonlinearFactorGraph expectedMarginals;
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, marginal) {
|
||||
for(const GaussianFactor::shared_ptr& factor: marginal) {
|
||||
expectedMarginals.push_back(LinearContainerFactor(factor, newValues));
|
||||
}
|
||||
|
||||
|
|
|
@ -512,7 +512,7 @@ TEST( ConcurrentIncrementalSmootherDL, synchronize_2 )
|
|||
// Values expectedLinearizationPoint = BatchOptimize(allFactors, allValues, 1);
|
||||
Values expectedLinearizationPoint = filterSeparatorValues;
|
||||
Values actualLinearizationPoint;
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues) {
|
||||
for(const Values::ConstKeyValuePair& key_value: filterSeparatorValues) {
|
||||
actualLinearizationPoint.insert(key_value.key, smoother.getLinearizationPoint().at(key_value.key));
|
||||
}
|
||||
CHECK(assert_equal(expectedLinearizationPoint, actualLinearizationPoint, 1e-6));
|
||||
|
@ -580,14 +580,14 @@ TEST( ConcurrentIncrementalSmootherDL, synchronize_3 )
|
|||
// GaussianBayesNet::shared_ptr GBNsptr = GSS.eliminate();
|
||||
|
||||
KeySet allkeys = LinFactorGraph->keys();
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues) {
|
||||
for(const Values::ConstKeyValuePair& key_value: filterSeparatorValues) {
|
||||
allkeys.erase(key_value.key);
|
||||
}
|
||||
std::vector<Key> variables(allkeys.begin(), allkeys.end());
|
||||
std::pair<GaussianBayesNet::shared_ptr, GaussianFactorGraph::shared_ptr> result = LinFactorGraph->eliminatePartialSequential(variables, EliminateCholesky);
|
||||
|
||||
expectedSmootherSummarization.resize(0);
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, *result.second) {
|
||||
for(const GaussianFactor::shared_ptr& factor: *result.second) {
|
||||
expectedSmootherSummarization.push_back(LinearContainerFactor(factor, allValues));
|
||||
}
|
||||
|
||||
|
|
|
@ -513,7 +513,7 @@ TEST( ConcurrentIncrementalSmootherGN, synchronize_2 )
|
|||
// Values expectedLinearizationPoint = BatchOptimize(allFactors, allValues, 1);
|
||||
Values expectedLinearizationPoint = filterSeparatorValues;
|
||||
Values actualLinearizationPoint;
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues) {
|
||||
for(const Values::ConstKeyValuePair& key_value: filterSeparatorValues) {
|
||||
actualLinearizationPoint.insert(key_value.key, smoother.getLinearizationPoint().at(key_value.key));
|
||||
}
|
||||
CHECK(assert_equal(expectedLinearizationPoint, actualLinearizationPoint, 1e-6));
|
||||
|
@ -582,13 +582,13 @@ TEST( ConcurrentIncrementalSmootherGN, synchronize_3 )
|
|||
// GaussianBayesNet::shared_ptr GBNsptr = GSS.eliminate();
|
||||
|
||||
KeySet allkeys = LinFactorGraph->keys();
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues)
|
||||
for(const Values::ConstKeyValuePair& key_value: filterSeparatorValues)
|
||||
allkeys.erase(key_value.key);
|
||||
std::vector<Key> variables(allkeys.begin(), allkeys.end());
|
||||
std::pair<GaussianBayesNet::shared_ptr, GaussianFactorGraph::shared_ptr> result = LinFactorGraph->eliminatePartialSequential(variables, EliminateCholesky);
|
||||
|
||||
expectedSmootherSummarization.resize(0);
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, *result.second) {
|
||||
for(const GaussianFactor::shared_ptr& factor: *result.second) {
|
||||
expectedSmootherSummarization.push_back(LinearContainerFactor(factor, allValues));
|
||||
}
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
#include <stdexcept>
|
||||
#include <iostream>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/shared_array.hpp>
|
||||
#include <boost/timer.hpp>
|
||||
|
||||
|
@ -193,7 +192,7 @@ namespace gtsam { namespace partition {
|
|||
std::cout << "Number of nodes: " << adjacencyMap.size() << std::endl;
|
||||
int index1, index2;
|
||||
|
||||
BOOST_FOREACH(const typename GenericGraph::value_type& factor, graph){
|
||||
for(const typename GenericGraph::value_type& factor: graph){
|
||||
index1 = dictionary[factor->key1.index];
|
||||
index2 = dictionary[factor->key2.index];
|
||||
std::cout << "index1: " << index1 << std::endl;
|
||||
|
@ -222,7 +221,7 @@ namespace gtsam { namespace partition {
|
|||
sharedInts& adjncy = *ptr_adjncy;
|
||||
sharedInts& adjwgt = *ptr_adjwgt;
|
||||
int ind_xadj = 0, ind_adjncy = 0;
|
||||
BOOST_FOREACH(const NeighborsInfo& info, adjacencyMap) {
|
||||
for(const NeighborsInfo& info: adjacencyMap) {
|
||||
*(xadj.get() + ind_xadj) = ind_adjncy;
|
||||
std::copy(info.first .begin(), info.first .end(), adjncy.get() + ind_adjncy);
|
||||
std::copy(info.second.begin(), info.second.end(), adjwgt.get() + ind_adjncy);
|
||||
|
@ -334,7 +333,7 @@ namespace gtsam { namespace partition {
|
|||
<< " " << result.B.size() << std::endl;
|
||||
int edgeCut = 0;
|
||||
|
||||
BOOST_FOREACH(const typename GenericGraph::value_type& factor, graph){
|
||||
for(const typename GenericGraph::value_type& factor: graph){
|
||||
int key1 = factor->key1.index;
|
||||
int key2 = factor->key2.index;
|
||||
// print keys and their subgraph assignment
|
||||
|
@ -372,19 +371,19 @@ namespace gtsam { namespace partition {
|
|||
// debug functions
|
||||
void printIsland(const std::vector<size_t>& island) {
|
||||
std::cout << "island: ";
|
||||
BOOST_FOREACH(const size_t key, island)
|
||||
for(const size_t key: island)
|
||||
std::cout << key << " ";
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
void printIslands(const std::list<std::vector<size_t> >& islands) {
|
||||
BOOST_FOREACH(const std::vector<std::size_t>& island, islands)
|
||||
for(const std::vector<std::size_t>& island: islands)
|
||||
printIsland(island);
|
||||
}
|
||||
|
||||
void printNumCamerasLandmarks(const std::vector<size_t>& keys, const std::vector<Symbol>& int2symbol) {
|
||||
int numCamera = 0, numLandmark = 0;
|
||||
BOOST_FOREACH(const size_t key, keys)
|
||||
for(const size_t key: keys)
|
||||
if (int2symbol[key].chr() == 'x')
|
||||
numCamera++;
|
||||
else
|
||||
|
@ -403,16 +402,16 @@ namespace gtsam { namespace partition {
|
|||
std::vector<size_t>& C = partitionResult.C;
|
||||
std::vector<int>& dictionary = workspace.dictionary;
|
||||
std::fill(dictionary.begin(), dictionary.end(), -1);
|
||||
BOOST_FOREACH(const size_t a, A)
|
||||
for(const size_t a: A)
|
||||
dictionary[a] = 1;
|
||||
BOOST_FOREACH(const size_t b, B)
|
||||
for(const size_t b: B)
|
||||
dictionary[b] = 2;
|
||||
if (!C.empty())
|
||||
throw std::runtime_error("addLandmarkToPartitionResult: C is not empty");
|
||||
|
||||
// set up landmarks
|
||||
size_t i,j;
|
||||
BOOST_FOREACH(const typename GenericGraph::value_type& factor, graph) {
|
||||
for(const typename GenericGraph::value_type& factor: graph) {
|
||||
i = factor->key1.index;
|
||||
j = factor->key2.index;
|
||||
if (dictionary[j] == 0) // if the landmark is already in the separator, continue
|
||||
|
@ -427,7 +426,7 @@ namespace gtsam { namespace partition {
|
|||
// std::cout << "dictionary[67980]" << dictionary[j] << std::endl;
|
||||
}
|
||||
|
||||
BOOST_FOREACH(const size_t j, landmarkKeys) {
|
||||
for(const size_t j: landmarkKeys) {
|
||||
switch(dictionary[j]) {
|
||||
case 0: C.push_back(j); break;
|
||||
case 1: A.push_back(j); break;
|
||||
|
@ -456,7 +455,7 @@ namespace gtsam { namespace partition {
|
|||
// find out all the landmark keys, which are to be eliminated
|
||||
cameraKeys.reserve(keys.size());
|
||||
landmarkKeys.reserve(keys.size());
|
||||
BOOST_FOREACH(const size_t key, keys) {
|
||||
for(const size_t key: keys) {
|
||||
if((*int2symbol)[key].chr() == 'x')
|
||||
cameraKeys.push_back(key);
|
||||
else
|
||||
|
@ -518,11 +517,11 @@ namespace gtsam { namespace partition {
|
|||
// printNumCamerasLandmarks(result->C, *int2symbol);
|
||||
// std::cout << "no. of island: " << islands.size() << "; ";
|
||||
// std::cout << "island size: ";
|
||||
// BOOST_FOREACH(const Island& island, islands)
|
||||
// for(const Island& island: islands)
|
||||
// std::cout << island.size() << " ";
|
||||
// std::cout << std::endl;
|
||||
|
||||
// BOOST_FOREACH(const Island& island, islands) {
|
||||
// for(const Island& island: islands) {
|
||||
// printNumCamerasLandmarks(island, int2symbol);
|
||||
// }
|
||||
#endif
|
||||
|
@ -550,12 +549,12 @@ namespace gtsam { namespace partition {
|
|||
// generate the node map
|
||||
std::vector<int>& partitionTable = workspace.partitionTable;
|
||||
std::fill(partitionTable.begin(), partitionTable.end(), -1);
|
||||
BOOST_FOREACH(const size_t key, result->C)
|
||||
for(const size_t key: result->C)
|
||||
partitionTable[key] = 0;
|
||||
int idx = 0;
|
||||
BOOST_FOREACH(const Island& island, islands) {
|
||||
for(const Island& island: islands) {
|
||||
idx++;
|
||||
BOOST_FOREACH(const size_t key, island) {
|
||||
for(const size_t key: island) {
|
||||
partitionTable[key] = idx;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
* Description: generic graph types used in partitioning
|
||||
*/
|
||||
#include <iostream>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/lexical_cast.hpp>
|
||||
|
@ -98,7 +97,7 @@ namespace gtsam { namespace partition {
|
|||
}
|
||||
|
||||
// erase unused factors
|
||||
BOOST_FOREACH(const FactorList::iterator& it, toErase)
|
||||
for(const FactorList::iterator& it: toErase)
|
||||
factors.erase(it);
|
||||
|
||||
if (!succeed) break;
|
||||
|
@ -107,7 +106,7 @@ namespace gtsam { namespace partition {
|
|||
list<vector<size_t> > islands;
|
||||
map<size_t, vector<size_t> > arrays = dsf.arrays();
|
||||
size_t key; vector<size_t> array;
|
||||
BOOST_FOREACH(boost::tie(key, array), arrays)
|
||||
for(boost::tie(key, array): arrays)
|
||||
islands.push_back(array);
|
||||
return islands;
|
||||
}
|
||||
|
@ -116,14 +115,14 @@ namespace gtsam { namespace partition {
|
|||
/* ************************************************************************* */
|
||||
void print(const GenericGraph2D& graph, const std::string name) {
|
||||
cout << name << endl;
|
||||
BOOST_FOREACH(const sharedGenericFactor2D& factor_, graph)
|
||||
for(const sharedGenericFactor2D& factor_: graph)
|
||||
cout << factor_->key1.index << " " << factor_->key2.index << endl;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void print(const GenericGraph3D& graph, const std::string name) {
|
||||
cout << name << endl;
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph)
|
||||
for(const sharedGenericFactor3D& factor_: graph)
|
||||
cout << factor_->key1.index << " " << factor_->key2.index << " (" <<
|
||||
factor_->key1.type << ", " << factor_->key2.type <<")" << endl;
|
||||
}
|
||||
|
@ -174,7 +173,7 @@ namespace gtsam { namespace partition {
|
|||
}
|
||||
|
||||
// erase unused factors
|
||||
BOOST_FOREACH(const FactorList::iterator& it, toErase)
|
||||
for(const FactorList::iterator& it: toErase)
|
||||
factors.erase(it);
|
||||
|
||||
if (!succeed) break;
|
||||
|
@ -204,7 +203,7 @@ namespace gtsam { namespace partition {
|
|||
|
||||
// compute the constraint number per camera
|
||||
std::fill(nrConstraints.begin(), nrConstraints.end(), 0);
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph) {
|
||||
for(const sharedGenericFactor3D& factor_: graph) {
|
||||
const int& key1 = factor_->key1.index;
|
||||
const int& key2 = factor_->key2.index;
|
||||
if (workspace.dictionary[key1] != -1 && workspace.dictionary[key2] != -1 &&
|
||||
|
@ -258,7 +257,7 @@ namespace gtsam { namespace partition {
|
|||
|
||||
// check the constraint number of every variable
|
||||
// find the camera and landmark keys
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph) {
|
||||
for(const sharedGenericFactor3D& factor_: graph) {
|
||||
//assert(factor_->key2.type == NODE_LANDMARK_3D); // only VisualSLAM should come here, not StereoSLAM
|
||||
if (workspace.dictionary[factor_->key1.index] != -1) {
|
||||
if (factor_->key1.type == NODE_POSE_3D)
|
||||
|
@ -287,7 +286,7 @@ namespace gtsam { namespace partition {
|
|||
// add singular variables directly as islands
|
||||
if (!singularCameras.empty()) {
|
||||
if (verbose) cout << "singular cameras:";
|
||||
BOOST_FOREACH(const size_t i, singularCameras) {
|
||||
for(const size_t i: singularCameras) {
|
||||
islands.push_back(vector<size_t>(1, i)); // <---------------------------
|
||||
if (verbose) cout << i << " ";
|
||||
}
|
||||
|
@ -295,7 +294,7 @@ namespace gtsam { namespace partition {
|
|||
}
|
||||
if (!singularLandmarks.empty()) {
|
||||
if (verbose) cout << "singular landmarks:";
|
||||
BOOST_FOREACH(const size_t i, singularLandmarks) {
|
||||
for(const size_t i: singularLandmarks) {
|
||||
islands.push_back(vector<size_t>(1, i)); // <---------------------------
|
||||
if (verbose) cout << i << " ";
|
||||
}
|
||||
|
@ -306,10 +305,10 @@ namespace gtsam { namespace partition {
|
|||
// regenerating islands
|
||||
map<size_t, vector<size_t> > labelIslands = dsf.arrays();
|
||||
size_t label; vector<size_t> island;
|
||||
BOOST_FOREACH(boost::tie(label, island), labelIslands) {
|
||||
for(boost::tie(label, island): labelIslands) {
|
||||
vector<size_t> filteredIsland; // remove singular cameras from array
|
||||
filteredIsland.reserve(island.size());
|
||||
BOOST_FOREACH(const size_t key, island) {
|
||||
for(const size_t key: island) {
|
||||
if ((isCamera[key] && singularCameras.find(key) == singularCameras.end()) || // not singular
|
||||
(isLandmark[key] && singularLandmarks.find(key) == singularLandmarks.end()) || // not singular
|
||||
(!isCamera[key] && !isLandmark[key])) { // the key is not involved in any factor, so the type is undertermined
|
||||
|
@ -321,7 +320,7 @@ namespace gtsam { namespace partition {
|
|||
|
||||
// sanity check
|
||||
size_t nrKeys = 0;
|
||||
BOOST_FOREACH(const vector<size_t>& island, islands)
|
||||
for(const vector<size_t>& island: islands)
|
||||
nrKeys += island.size();
|
||||
if (nrKeys != keys.size()) {
|
||||
cout << nrKeys << " vs " << keys.size() << endl;
|
||||
|
@ -363,7 +362,7 @@ namespace gtsam { namespace partition {
|
|||
// for odometry xi-xj where i<j, we always store cameraToCamera[i] = j, otherwise equal to -1 if no odometry
|
||||
vector<int> cameraToCamera(dictionary.size(), -1);
|
||||
size_t key_i, key_j;
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph) {
|
||||
for(const sharedGenericFactor3D& factor_: graph) {
|
||||
if (factor_->key1.type == NODE_POSE_3D) {
|
||||
if (factor_->key2.type == NODE_LANDMARK_3D) {// projection factor
|
||||
cameraToLandmarks[factor_->key1.index].push_back(factor_->key2.index);
|
||||
|
@ -382,7 +381,7 @@ namespace gtsam { namespace partition {
|
|||
}
|
||||
|
||||
// sort the landmark keys for the late getNrCommonLandmarks call
|
||||
BOOST_FOREACH(vector<LandmarkKey> &landmarks, cameraToLandmarks){
|
||||
for(vector<LandmarkKey> &landmarks: cameraToLandmarks){
|
||||
if (!landmarks.empty())
|
||||
std::sort(landmarks.begin(), landmarks.end());
|
||||
}
|
||||
|
@ -417,7 +416,7 @@ namespace gtsam { namespace partition {
|
|||
const vector<int>& dictionary = workspace.dictionary;
|
||||
vector<bool> isValidCamera(workspace.dictionary.size(), false);
|
||||
vector<bool> isValidLandmark(workspace.dictionary.size(), false);
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph) {
|
||||
for(const sharedGenericFactor3D& factor_: graph) {
|
||||
assert(factor_->key1.type == NODE_POSE_3D);
|
||||
//assert(factor_->key2.type == NODE_LANDMARK_3D);
|
||||
const size_t& key1 = factor_->key1.index;
|
||||
|
@ -463,7 +462,7 @@ namespace gtsam { namespace partition {
|
|||
}
|
||||
|
||||
// debug info
|
||||
BOOST_FOREACH(const size_t key, frontals) {
|
||||
for(const size_t key: frontals) {
|
||||
if (isValidCamera[key] && nrConstraints[key] < minNrConstraintsPerCamera)
|
||||
cout << "singular camera:" << key << " with " << nrConstraints[key] << " constraints" << endl;
|
||||
}
|
||||
|
|
|
@ -70,11 +70,11 @@ namespace gtsam { namespace partition {
|
|||
boost::shared_ptr<SubNLG> NestedDissection<NLG, SubNLG, GenericGraph>::makeSubNLG(
|
||||
const NLG& fg, const vector<size_t>& frontals, const vector<size_t>& sep, const boost::shared_ptr<SubNLG>& parent) const {
|
||||
OrderedSymbols frontalKeys;
|
||||
BOOST_FOREACH(const size_t index, frontals)
|
||||
for(const size_t index: frontals)
|
||||
frontalKeys.push_back(int2symbol_[index]);
|
||||
|
||||
UnorderedSymbols sepKeys;
|
||||
BOOST_FOREACH(const size_t index, sep)
|
||||
for(const size_t index: sep)
|
||||
sepKeys.insert(int2symbol_[index]);
|
||||
|
||||
return boost::make_shared<SubNLG>(fg, frontalKeys, sepKeys, parent);
|
||||
|
@ -129,7 +129,7 @@ namespace gtsam { namespace partition {
|
|||
// make three lists of variables A, B, and C
|
||||
int partition;
|
||||
childFrontals.resize(numSubmaps);
|
||||
BOOST_FOREACH(const size_t key, keys){
|
||||
for(const size_t key: keys){
|
||||
partition = partitionTable[key];
|
||||
switch (partition) {
|
||||
case -1: break; // the separator of the separator variables
|
||||
|
@ -144,13 +144,13 @@ namespace gtsam { namespace partition {
|
|||
childSeps.reserve(numSubmaps);
|
||||
frontalFactors.resize(numSubmaps);
|
||||
frontalUnaryFactors.resize(numSubmaps);
|
||||
BOOST_FOREACH(typename GenericGraph::value_type factor, fg)
|
||||
for(typename GenericGraph::value_type factor: fg)
|
||||
processFactor(factor, partitionTable, frontalFactors, sepFactors, childSeps_, weeklinks);
|
||||
BOOST_FOREACH(const set<size_t>& childSep, childSeps_)
|
||||
for(const set<size_t>& childSep: childSeps_)
|
||||
childSeps.push_back(vector<size_t>(childSep.begin(), childSep.end()));
|
||||
|
||||
// add unary factor to the current cluster or pass it to one of the child clusters
|
||||
BOOST_FOREACH(const sharedGenericUnaryFactor& unaryFactor_, unaryFactors) {
|
||||
for(const sharedGenericUnaryFactor& unaryFactor_: unaryFactors) {
|
||||
partition = partitionTable[unaryFactor_->key.index];
|
||||
if (!partition) sepFactors.push_back(fg_[unaryFactor_->index]);
|
||||
else frontalUnaryFactors[partition-1].push_back(unaryFactor_);
|
||||
|
@ -164,7 +164,7 @@ namespace gtsam { namespace partition {
|
|||
NLG sepFactors;
|
||||
typename GenericGraph::const_iterator it = gfg.begin(), itLast = gfg.end();
|
||||
while(it!=itLast) sepFactors.push_back(fg_[(*it++)->index]);
|
||||
BOOST_FOREACH(const sharedGenericUnaryFactor& unaryFactor_, unaryFactors)
|
||||
for(const sharedGenericUnaryFactor& unaryFactor_: unaryFactors)
|
||||
sepFactors.push_back(fg_[unaryFactor_->index]);
|
||||
return sepFactors;
|
||||
}
|
||||
|
|
|
@ -10,7 +10,6 @@
|
|||
#include <boost/assign/std/set.hpp> // for operator +=
|
||||
#include <boost/assign/std/vector.hpp> // for operator +=
|
||||
using namespace boost::assign;
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
|
@ -89,10 +88,10 @@ TEST ( Partition, edgePartitionByMetis )
|
|||
vector<size_t> A_expected; A_expected += 0, 1; // frontal
|
||||
vector<size_t> B_expected; B_expected += 2, 3; // frontal
|
||||
vector<size_t> C_expected; // separator
|
||||
// BOOST_FOREACH(const size_t a, actual->A)
|
||||
// for(const size_t a: actual->A)
|
||||
// cout << a << " ";
|
||||
// cout << endl;
|
||||
// BOOST_FOREACH(const size_t b, actual->B)
|
||||
// for(const size_t b: actual->B)
|
||||
// cout << b << " ";
|
||||
// cout << endl;
|
||||
|
||||
|
|
Loading…
Reference in New Issue