Deprecated boost iterators in Values
parent
9bca36fd2c
commit
ae63321d1b
|
@ -43,9 +43,9 @@ int main(const int argc, const char* argv[]) {
|
|||
auto priorModel = noiseModel::Diagonal::Variances(
|
||||
(Vector(6) << 1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4).finished());
|
||||
Key firstKey = 0;
|
||||
for (const auto key_value : *initial) {
|
||||
for (const auto key : initial->keys()) {
|
||||
std::cout << "Adding prior to g2o file " << std::endl;
|
||||
firstKey = key_value.key;
|
||||
firstKey = key;
|
||||
graph->addPrior(firstKey, Pose3(), priorModel);
|
||||
break;
|
||||
}
|
||||
|
@ -74,10 +74,8 @@ int main(const int argc, const char* argv[]) {
|
|||
|
||||
// Calculate and print marginal covariances for all variables
|
||||
Marginals marginals(*graph, result);
|
||||
for (const auto key_value : result) {
|
||||
auto p = dynamic_cast<const GenericValue<Pose3>*>(&key_value.value);
|
||||
if (!p) continue;
|
||||
std::cout << marginals.marginalCovariance(key_value.key) << endl;
|
||||
for (const auto& key_pose : result.extract<Pose3>()) {
|
||||
std::cout << marginals.marginalCovariance(key_pose.first) << endl;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -55,14 +55,14 @@ int main(const int argc, const char *argv[]) {
|
|||
std::cout << "Rewriting input to file: " << inputFileRewritten << std::endl;
|
||||
// Additional: rewrite input with simplified keys 0,1,...
|
||||
Values simpleInitial;
|
||||
for(const auto key_value: *initial) {
|
||||
for (const auto k : initial->keys()) {
|
||||
Key key;
|
||||
if(add)
|
||||
key = key_value.key + firstKey;
|
||||
if (add)
|
||||
key = k + firstKey;
|
||||
else
|
||||
key = key_value.key - firstKey;
|
||||
key = k - firstKey;
|
||||
|
||||
simpleInitial.insert(key, initial->at(key_value.key));
|
||||
simpleInitial.insert(key, initial->at(k));
|
||||
}
|
||||
NonlinearFactorGraph simpleGraph;
|
||||
for(const boost::shared_ptr<NonlinearFactor>& factor: *graph) {
|
||||
|
|
|
@ -42,9 +42,9 @@ int main(const int argc, const char* argv[]) {
|
|||
auto priorModel = noiseModel::Diagonal::Variances(
|
||||
(Vector(6) << 1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4).finished());
|
||||
Key firstKey = 0;
|
||||
for (const auto key_value : *initial) {
|
||||
for (const auto key : initial->keys()) {
|
||||
std::cout << "Adding prior to g2o file " << std::endl;
|
||||
firstKey = key_value.key;
|
||||
firstKey = key;
|
||||
graph->addPrior(firstKey, Pose3(), priorModel);
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -42,9 +42,9 @@ int main(const int argc, const char* argv[]) {
|
|||
auto priorModel = noiseModel::Diagonal::Variances(
|
||||
(Vector(6) << 1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4).finished());
|
||||
Key firstKey = 0;
|
||||
for (const auto key_value : *initial) {
|
||||
for (const auto key : initial->keys()) {
|
||||
std::cout << "Adding prior to g2o file " << std::endl;
|
||||
firstKey = key_value.key;
|
||||
firstKey = key;
|
||||
graph->addPrior(firstKey, Pose3(), priorModel);
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -42,9 +42,9 @@ int main(const int argc, const char* argv[]) {
|
|||
auto priorModel = noiseModel::Diagonal::Variances(
|
||||
(Vector(6) << 1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4).finished());
|
||||
Key firstKey = 0;
|
||||
for (const auto key_value : *initial) {
|
||||
for (const auto key : initial->keys()) {
|
||||
std::cout << "Adding prior to g2o file " << std::endl;
|
||||
firstKey = key_value.key;
|
||||
firstKey = key;
|
||||
graph->addPrior(firstKey, Pose3(), priorModel);
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -559,12 +559,12 @@ void runPerturb()
|
|||
|
||||
// Perturb values
|
||||
VectorValues noise;
|
||||
for(const Values::KeyValuePair key_val: initial)
|
||||
for(const auto& key_dim: initial.dims())
|
||||
{
|
||||
Vector noisev(key_val.value.dim());
|
||||
Vector noisev(key_dim.second);
|
||||
for(Vector::Index i = 0; i < noisev.size(); ++i)
|
||||
noisev(i) = normal(rng);
|
||||
noise.insert(key_val.key, noisev);
|
||||
noise.insert(key_dim.first, noisev);
|
||||
}
|
||||
Values perturbed = initial.retract(noise);
|
||||
|
||||
|
|
|
@ -102,7 +102,7 @@ class GTSAM_EXPORT HybridValues {
|
|||
|
||||
/// Check whether a variable with key \c j exists in values.
|
||||
bool existsNonlinear(Key j) {
|
||||
return (nonlinear_.find(j) != nonlinear_.end());
|
||||
return nonlinear_.exists(j);
|
||||
};
|
||||
|
||||
/// Check whether a variable with key \c j exists.
|
||||
|
|
|
@ -129,10 +129,7 @@ TEST(HybridBayesNet, evaluateHybrid) {
|
|||
TEST(HybridBayesNet, Choose) {
|
||||
Switching s(4);
|
||||
|
||||
Ordering ordering;
|
||||
for (auto&& kvp : s.linearizationPoint) {
|
||||
ordering += kvp.key;
|
||||
}
|
||||
const Ordering ordering(s.linearizationPoint.keys());
|
||||
|
||||
HybridBayesNet::shared_ptr hybridBayesNet;
|
||||
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
|
||||
|
@ -163,10 +160,7 @@ TEST(HybridBayesNet, Choose) {
|
|||
TEST(HybridBayesNet, OptimizeAssignment) {
|
||||
Switching s(4);
|
||||
|
||||
Ordering ordering;
|
||||
for (auto&& kvp : s.linearizationPoint) {
|
||||
ordering += kvp.key;
|
||||
}
|
||||
const Ordering ordering(s.linearizationPoint.keys());
|
||||
|
||||
HybridBayesNet::shared_ptr hybridBayesNet;
|
||||
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
|
||||
|
|
|
@ -436,36 +436,6 @@ struct GTSAM_EXPORT UpdateImpl {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply expmap to the given values, but only for indices appearing in
|
||||
* \c mask. Values are expmapped in-place.
|
||||
* \param mask Mask on linear indices, only \c true entries are expmapped
|
||||
*/
|
||||
static void ExpmapMasked(const VectorValues& delta, const KeySet& mask,
|
||||
Values* theta) {
|
||||
gttic(ExpmapMasked);
|
||||
assert(theta->size() == delta.size());
|
||||
Values::iterator key_value;
|
||||
VectorValues::const_iterator key_delta;
|
||||
#ifdef GTSAM_USE_TBB
|
||||
for (key_value = theta->begin(); key_value != theta->end(); ++key_value) {
|
||||
key_delta = delta.find(key_value->key);
|
||||
#else
|
||||
for (key_value = theta->begin(), key_delta = delta.begin();
|
||||
key_value != theta->end(); ++key_value, ++key_delta) {
|
||||
assert(key_value->key == key_delta->first);
|
||||
#endif
|
||||
Key var = key_value->key;
|
||||
assert(static_cast<size_t>(delta[var].size()) == key_value->value.dim());
|
||||
assert(delta[var].allFinite());
|
||||
if (mask.exists(var)) {
|
||||
Value* retracted = key_value->value.retract_(delta[var]);
|
||||
key_value->value = *retracted;
|
||||
retracted->deallocate_();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Linearize new factors
|
||||
void linearizeNewFactors(const NonlinearFactorGraph& newFactors,
|
||||
const Values& theta, size_t numNonlinearFactors,
|
||||
|
|
|
@ -454,7 +454,7 @@ ISAM2Result ISAM2::update(const NonlinearFactorGraph& newFactors,
|
|||
update.findFluid(roots_, relinKeys, &result.markedKeys, result.details());
|
||||
// 6. Update linearization point for marked variables:
|
||||
// \Theta_{J}:=\Theta_{J}+\Delta_{J}.
|
||||
UpdateImpl::ExpmapMasked(delta_, relinKeys, &theta_);
|
||||
theta_.retractMasked(delta_, relinKeys);
|
||||
}
|
||||
result.variablesRelinearized = result.markedKeys.size();
|
||||
}
|
||||
|
|
|
@ -285,8 +285,8 @@ static Scatter scatterFromValues(const Values& values) {
|
|||
scatter.reserve(values.size());
|
||||
|
||||
// use "natural" ordering with keys taken from the initial values
|
||||
for (const auto key_value : values) {
|
||||
scatter.add(key_value.key, key_value.value.dim());
|
||||
for (const auto& key_dim : values.dims()) {
|
||||
scatter.add(key_dim.first, key_dim.second);
|
||||
}
|
||||
|
||||
return scatter;
|
||||
|
|
|
@ -24,12 +24,10 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/nonlinear/Values.h>
|
||||
|
||||
#include <utility>
|
||||
|
||||
#include <boost/bind/bind.hpp>
|
||||
|
||||
#include <gtsam/nonlinear/Values.h> // Only so Eclipse finds class definition
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
||||
|
@ -95,13 +93,13 @@ namespace gtsam {
|
|||
std::map<Key, ValueType>
|
||||
Values::extract(const std::function<bool(Key)>& filterFcn) const {
|
||||
std::map<Key, ValueType> result;
|
||||
for (const auto& key_value : *this) {
|
||||
for (const auto& key_value : values_) {
|
||||
// Check if key matches
|
||||
if (filterFcn(key_value.key)) {
|
||||
if (filterFcn(key_value.first)) {
|
||||
// Check if type matches (typically does as symbols matched with types)
|
||||
if (auto t =
|
||||
dynamic_cast<const GenericValue<ValueType>*>(&key_value.value))
|
||||
result[key_value.key] = t->value();
|
||||
dynamic_cast<const GenericValue<ValueType>*>(key_value.second))
|
||||
result[key_value.first] = t->value();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
|
@ -109,6 +107,8 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
#ifdef GTSAM_ALLOW_DEPRECATED_SINCE_V42
|
||||
#include <boost/bind/bind.hpp>
|
||||
|
||||
template<class ValueType>
|
||||
class Values::Filtered {
|
||||
public:
|
||||
|
|
|
@ -25,8 +25,6 @@
|
|||
#include <gtsam/nonlinear/Values.h>
|
||||
#include <gtsam/linear/VectorValues.h>
|
||||
|
||||
#include <boost/iterator/transform_iterator.hpp>
|
||||
|
||||
#include <list>
|
||||
#include <memory>
|
||||
#include <sstream>
|
||||
|
@ -52,15 +50,15 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
Values::Values(const Values& other, const VectorValues& delta) {
|
||||
for (const_iterator key_value = other.begin(); key_value != other.end(); ++key_value) {
|
||||
VectorValues::const_iterator it = delta.find(key_value->key);
|
||||
Key key = key_value->key; // Non-const duplicate to deal with non-const insert argument
|
||||
for (const auto& key_value : other.values_) {
|
||||
VectorValues::const_iterator it = delta.find(key_value.first);
|
||||
Key key = key_value.first; // Non-const duplicate to deal with non-const insert argument
|
||||
if (it != delta.end()) {
|
||||
const Vector& v = it->second;
|
||||
Value* retractedValue(key_value->value.retract_(v)); // Retract
|
||||
Value* retractedValue(key_value.second->retract_(v)); // Retract
|
||||
values_.insert(key, retractedValue); // Add retracted result directly to result values
|
||||
} else {
|
||||
values_.insert(key, key_value->value.clone_()); // Add original version to result values
|
||||
values_.insert(key, key_value.second->clone_()); // Add original version to result values
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -69,9 +67,9 @@ namespace gtsam {
|
|||
void Values::print(const string& str, const KeyFormatter& keyFormatter) const {
|
||||
cout << str << (str.empty() ? "" : "\n");
|
||||
cout << "Values with " << size() << " values:\n";
|
||||
for(const_iterator key_value = begin(); key_value != end(); ++key_value) {
|
||||
cout << "Value " << keyFormatter(key_value->key) << ": ";
|
||||
key_value->value.print("");
|
||||
for (const auto& key_value : values_) {
|
||||
cout << "Value " << keyFormatter(key_value.first) << ": ";
|
||||
key_value.second->print("");
|
||||
cout << "\n";
|
||||
}
|
||||
}
|
||||
|
@ -80,12 +78,12 @@ namespace gtsam {
|
|||
bool Values::equals(const Values& other, double tol) const {
|
||||
if (this->size() != other.size())
|
||||
return false;
|
||||
for (const_iterator it1 = this->begin(), it2 = other.begin();
|
||||
it1 != this->end(); ++it1, ++it2) {
|
||||
const Value& value1 = it1->value;
|
||||
const Value& value2 = it2->value;
|
||||
if (typeid(value1) != typeid(value2) || it1->key != it2->key
|
||||
|| !value1.equals_(value2, tol)) {
|
||||
for (auto it1 = values_.begin(), it2 = other.values_.begin();
|
||||
it1 != values_.end(); ++it1, ++it2) {
|
||||
const Value* value1 = it1->second;
|
||||
const Value* value2 = it2->second;
|
||||
if (typeid(*value1) != typeid(*value2) || it1->first != it2->first
|
||||
|| !value1->equals_(*value2, tol)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -102,17 +100,44 @@ namespace gtsam {
|
|||
return Values(*this, delta);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void Values::retractMasked(const VectorValues& delta, const KeySet& mask) {
|
||||
gttic(retractMasked);
|
||||
assert(theta->size() == delta.size());
|
||||
auto key_value = values_.begin();
|
||||
VectorValues::const_iterator key_delta;
|
||||
#ifdef GTSAM_USE_TBB
|
||||
for (; key_value != values_.end(); ++key_value) {
|
||||
key_delta = delta.find(key_value.first);
|
||||
#else
|
||||
for (key_delta = delta.begin(); key_value != values_.end();
|
||||
++key_value, ++key_delta) {
|
||||
assert(key_value.first == key_delta->first);
|
||||
#endif
|
||||
Key var = key_value->first;
|
||||
assert(static_cast<size_t>(delta[var].size()) == key_value->second->dim());
|
||||
assert(delta[var].allFinite());
|
||||
if (mask.exists(var)) {
|
||||
Value* retracted = key_value->second->retract_(delta[var]);
|
||||
// TODO(dellaert): can we use std::move here?
|
||||
*(key_value->second) = *retracted;
|
||||
retracted->deallocate_();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorValues Values::localCoordinates(const Values& cp) const {
|
||||
if(this->size() != cp.size())
|
||||
throw DynamicValuesMismatched();
|
||||
VectorValues result;
|
||||
for(const_iterator it1=this->begin(), it2=cp.begin(); it1!=this->end(); ++it1, ++it2) {
|
||||
if(it1->key != it2->key)
|
||||
for (auto it1 = values_.begin(), it2 = cp.values_.begin();
|
||||
it1 != values_.end(); ++it1, ++it2) {
|
||||
if(it1->first != it2->first)
|
||||
throw DynamicValuesMismatched(); // If keys do not match
|
||||
// Will throw a dynamic_cast exception if types do not match
|
||||
// NOTE: this is separate from localCoordinates(cp, ordering, result) due to at() vs. insert
|
||||
result.insert(it1->key, it1->value.localCoordinates_(it2->value));
|
||||
result.insert(it1->first, it1->second->localCoordinates_(*it2->second));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -130,24 +155,26 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
void Values::insert(Key j, const Value& val) {
|
||||
std::pair<iterator,bool> insertResult = tryInsert(j, val);
|
||||
auto insertResult = values_.insert(j, val.clone_());
|
||||
if(!insertResult.second)
|
||||
throw ValuesKeyAlreadyExists(j);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void Values::insert(const Values& values) {
|
||||
for(const_iterator key_value = values.begin(); key_value != values.end(); ++key_value) {
|
||||
Key key = key_value->key; // Non-const duplicate to deal with non-const insert argument
|
||||
insert(key, key_value->value);
|
||||
void Values::insert(const Values& other) {
|
||||
for (auto key_value = other.values_.begin();
|
||||
key_value != other.values_.end(); ++key_value) {
|
||||
insert(key_value->first, *(key_value->second));
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
#ifdef GTSAM_ALLOW_DEPRECATED_SINCE_V42
|
||||
std::pair<Values::iterator, bool> Values::tryInsert(Key j, const Value& value) {
|
||||
std::pair<KeyValueMap::iterator, bool> result = values_.insert(j, value.clone_());
|
||||
return std::make_pair(boost::make_transform_iterator(result.first, &make_deref_pair), result.second);
|
||||
}
|
||||
#endif
|
||||
|
||||
/* ************************************************************************* */
|
||||
void Values::update(Key j, const Value& val) {
|
||||
|
@ -165,9 +192,10 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void Values::update(const Values& values) {
|
||||
for(const_iterator key_value = values.begin(); key_value != values.end(); ++key_value) {
|
||||
this->update(key_value->key, key_value->value);
|
||||
void Values::update(const Values& other) {
|
||||
for (auto key_value = other.values_.begin();
|
||||
key_value != other.values_.end(); ++key_value) {
|
||||
this->update(key_value->first, *(key_value->second));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -183,10 +211,10 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
void Values::insert_or_assign(const Values& values) {
|
||||
for (const_iterator key_value = values.begin(); key_value != values.end();
|
||||
++key_value) {
|
||||
this->insert_or_assign(key_value->key, key_value->value);
|
||||
void Values::insert_or_assign(const Values& other) {
|
||||
for (auto key_value = other.values_.begin();
|
||||
key_value != other.values_.end(); ++key_value) {
|
||||
this->insert_or_assign(key_value->first, *(key_value->second));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -202,8 +230,16 @@ namespace gtsam {
|
|||
KeyVector Values::keys() const {
|
||||
KeyVector result;
|
||||
result.reserve(size());
|
||||
for(const_iterator key_value = begin(); key_value != end(); ++key_value)
|
||||
result.push_back(key_value->key);
|
||||
for(const auto& key_value: values_)
|
||||
result.push_back(key_value.first);
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
KeySet Values::keySet() const {
|
||||
KeySet result;
|
||||
for(const auto& key_value: values_)
|
||||
result.insert(key_value.first);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -217,8 +253,17 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
size_t Values::dim() const {
|
||||
size_t result = 0;
|
||||
for(const auto key_value: *this) {
|
||||
result += key_value.value.dim();
|
||||
for (const auto key_value : values_) {
|
||||
result += key_value->second->dim();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
std::map<Key,size_t> Values::dims() const {
|
||||
std::map<Key,size_t> result;
|
||||
for (const auto key_value : values_) {
|
||||
result.emplace(key_value->first, key_value->second->dim());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -226,8 +271,8 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
VectorValues Values::zeroVectors() const {
|
||||
VectorValues result;
|
||||
for(const auto key_value: *this)
|
||||
result.insert(key_value.key, Vector::Zero(key_value.value.dim()));
|
||||
for (const auto key_value : values_)
|
||||
result.insert(key_value->first, Vector::Zero(key_value->second->dim()));
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -28,10 +28,10 @@
|
|||
#include <gtsam/base/GenericValue.h>
|
||||
#include <gtsam/base/VectorSpace.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <boost/iterator/transform_iterator.hpp>
|
||||
#include <boost/ptr_container/serialize_ptr_map.hpp>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#ifdef GTSAM_ALLOW_DEPRECATED_SINCE_V42
|
||||
#include <boost/iterator/transform_iterator.hpp>
|
||||
#include <boost/iterator/filter_iterator.hpp>
|
||||
#endif
|
||||
|
||||
|
@ -81,10 +81,6 @@ namespace gtsam {
|
|||
// The member to store the values, see just above
|
||||
KeyValueMap values_;
|
||||
|
||||
// Types obtained by iterating
|
||||
typedef KeyValueMap::const_iterator::value_type ConstKeyValuePtrPair;
|
||||
typedef KeyValueMap::iterator::value_type KeyValuePtrPair;
|
||||
|
||||
public:
|
||||
|
||||
/// A shared_ptr to this class
|
||||
|
@ -110,22 +106,6 @@ namespace gtsam {
|
|||
ConstKeyValuePair(const KeyValuePair& kv) : key(kv.key), value(kv.value) {}
|
||||
};
|
||||
|
||||
/// Mutable forward iterator, with value type KeyValuePair
|
||||
typedef boost::transform_iterator<
|
||||
std::function<KeyValuePair(const KeyValuePtrPair&)>, KeyValueMap::iterator> iterator;
|
||||
|
||||
/// Const forward iterator, with value type ConstKeyValuePair
|
||||
typedef boost::transform_iterator<
|
||||
std::function<ConstKeyValuePair(const ConstKeyValuePtrPair&)>, KeyValueMap::const_iterator> const_iterator;
|
||||
|
||||
/// Mutable reverse iterator, with value type KeyValuePair
|
||||
typedef boost::transform_iterator<
|
||||
std::function<KeyValuePair(const KeyValuePtrPair&)>, KeyValueMap::reverse_iterator> reverse_iterator;
|
||||
|
||||
/// Const reverse iterator, with value type ConstKeyValuePair
|
||||
typedef boost::transform_iterator<
|
||||
std::function<ConstKeyValuePair(const ConstKeyValuePtrPair&)>, KeyValueMap::const_reverse_iterator> const_reverse_iterator;
|
||||
|
||||
typedef KeyValuePair value_type;
|
||||
|
||||
/** Default constructor creates an empty Values class */
|
||||
|
@ -191,47 +171,24 @@ namespace gtsam {
|
|||
template<typename ValueType>
|
||||
boost::optional<const ValueType&> exists(Key j) const;
|
||||
|
||||
/** Find an element by key, returning an iterator, or end() if the key was
|
||||
* not found. */
|
||||
iterator find(Key j) { return boost::make_transform_iterator(values_.find(j), &make_deref_pair); }
|
||||
|
||||
/** Find an element by key, returning an iterator, or end() if the key was
|
||||
* not found. */
|
||||
const_iterator find(Key j) const { return boost::make_transform_iterator(values_.find(j), &make_const_deref_pair); }
|
||||
|
||||
/** Find the element greater than or equal to the specified key. */
|
||||
iterator lower_bound(Key j) { return boost::make_transform_iterator(values_.lower_bound(j), &make_deref_pair); }
|
||||
|
||||
/** Find the element greater than or equal to the specified key. */
|
||||
const_iterator lower_bound(Key j) const { return boost::make_transform_iterator(values_.lower_bound(j), &make_const_deref_pair); }
|
||||
|
||||
/** Find the lowest-ordered element greater than the specified key. */
|
||||
iterator upper_bound(Key j) { return boost::make_transform_iterator(values_.upper_bound(j), &make_deref_pair); }
|
||||
|
||||
/** Find the lowest-ordered element greater than the specified key. */
|
||||
const_iterator upper_bound(Key j) const { return boost::make_transform_iterator(values_.upper_bound(j), &make_const_deref_pair); }
|
||||
|
||||
/** The number of variables in this config */
|
||||
size_t size() const { return values_.size(); }
|
||||
|
||||
/** whether the config is empty */
|
||||
bool empty() const { return values_.empty(); }
|
||||
|
||||
const_iterator begin() const { return boost::make_transform_iterator(values_.begin(), &make_const_deref_pair); }
|
||||
const_iterator end() const { return boost::make_transform_iterator(values_.end(), &make_const_deref_pair); }
|
||||
iterator begin() { return boost::make_transform_iterator(values_.begin(), &make_deref_pair); }
|
||||
iterator end() { return boost::make_transform_iterator(values_.end(), &make_deref_pair); }
|
||||
const_reverse_iterator rbegin() const { return boost::make_transform_iterator(values_.rbegin(), &make_const_deref_pair); }
|
||||
const_reverse_iterator rend() const { return boost::make_transform_iterator(values_.rend(), &make_const_deref_pair); }
|
||||
reverse_iterator rbegin() { return boost::make_transform_iterator(values_.rbegin(), &make_deref_pair); }
|
||||
reverse_iterator rend() { return boost::make_transform_iterator(values_.rend(), &make_deref_pair); }
|
||||
|
||||
/// @name Manifold Operations
|
||||
/// @{
|
||||
|
||||
/** Add a delta config to current config and returns a new config */
|
||||
Values retract(const VectorValues& delta) const;
|
||||
|
||||
/**
|
||||
* Retract, but only for Keys appearing in \c mask. In-place.
|
||||
* \param mask Mask on Keys where to apply retract.
|
||||
*/
|
||||
void retractMasked(const VectorValues& delta, const KeySet& mask);
|
||||
|
||||
/** Get a delta config about a linearization point c0 (*this) */
|
||||
VectorValues localCoordinates(const Values& cp) const;
|
||||
|
||||
|
@ -252,12 +209,6 @@ namespace gtsam {
|
|||
/// version for double
|
||||
void insertDouble(Key j, double c) { insert<double>(j,c); }
|
||||
|
||||
/** insert that mimics the STL map insert - if the value already exists, the map is not modified
|
||||
* and an iterator to the existing value is returned, along with 'false'. If the value did not
|
||||
* exist, it is inserted and an iterator pointing to the new element, along with 'true', is
|
||||
* returned. */
|
||||
std::pair<iterator, bool> tryInsert(Key j, const Value& value);
|
||||
|
||||
/** single element change of existing element */
|
||||
void update(Key j, const Value& val);
|
||||
|
||||
|
@ -288,11 +239,16 @@ namespace gtsam {
|
|||
void erase(Key j);
|
||||
|
||||
/**
|
||||
* Returns a set of keys in the config
|
||||
* Returns a vector of keys in the config.
|
||||
* Note: by construction, the list is ordered
|
||||
*/
|
||||
KeyVector keys() const;
|
||||
|
||||
/**
|
||||
* Returns a set of keys in the config.
|
||||
*/
|
||||
KeySet keySet() const;
|
||||
|
||||
/** Replace all keys and variables */
|
||||
Values& operator=(const Values& rhs);
|
||||
|
||||
|
@ -305,6 +261,9 @@ namespace gtsam {
|
|||
/** Compute the total dimensionality of all values (\f$ O(n) \f$) */
|
||||
size_t dim() const;
|
||||
|
||||
/** Return all dimensions in a map (\f$ O(n log n) \f$) */
|
||||
std::map<Key,size_t> dims() const;
|
||||
|
||||
/** Return a VectorValues of zero vectors for each variable in this Values */
|
||||
VectorValues zeroVectors() const;
|
||||
|
||||
|
@ -312,8 +271,8 @@ namespace gtsam {
|
|||
template<class ValueType>
|
||||
size_t count() const {
|
||||
size_t i = 0;
|
||||
for (const auto key_value : *this) {
|
||||
if (dynamic_cast<const GenericValue<ValueType>*>(&key_value.value))
|
||||
for (const auto key_value : values_) {
|
||||
if (dynamic_cast<const GenericValue<ValueType>*>(key_value.second))
|
||||
++i;
|
||||
}
|
||||
return i;
|
||||
|
@ -343,6 +302,67 @@ namespace gtsam {
|
|||
extract(const std::function<bool(Key)>& filterFcn = &_truePredicate<Key>) const;
|
||||
|
||||
#ifdef GTSAM_ALLOW_DEPRECATED_SINCE_V42
|
||||
// Types obtained by iterating
|
||||
typedef KeyValueMap::const_iterator::value_type ConstKeyValuePtrPair;
|
||||
typedef KeyValueMap::iterator::value_type KeyValuePtrPair;
|
||||
|
||||
/** insert that mimics the STL map insert - if the value already exists, the map is not modified
|
||||
* and an iterator to the existing value is returned, along with 'false'. If the value did not
|
||||
* exist, it is inserted and an iterator pointing to the new element, along with 'true', is
|
||||
* returned. */
|
||||
std::pair<iterator, bool> tryInsert(Key j, const Value& value);
|
||||
|
||||
/// Mutable forward iterator, with value type KeyValuePair
|
||||
typedef boost::transform_iterator<
|
||||
std::function<KeyValuePair(const KeyValuePtrPair&)>, KeyValueMap::iterator> iterator;
|
||||
|
||||
/// Const forward iterator, with value type ConstKeyValuePair
|
||||
typedef boost::transform_iterator<
|
||||
std::function<ConstKeyValuePair(const ConstKeyValuePtrPair&)>, KeyValueMap::const_iterator> const_iterator;
|
||||
|
||||
/// Mutable reverse iterator, with value type KeyValuePair
|
||||
typedef boost::transform_iterator<
|
||||
std::function<KeyValuePair(const KeyValuePtrPair&)>, KeyValueMap::reverse_iterator> reverse_iterator;
|
||||
|
||||
/// Const reverse iterator, with value type ConstKeyValuePair
|
||||
typedef boost::transform_iterator<
|
||||
std::function<ConstKeyValuePair(const ConstKeyValuePtrPair&)>, KeyValueMap::const_reverse_iterator> const_reverse_iterator;
|
||||
|
||||
static ConstKeyValuePair make_const_deref_pair(const KeyValueMap::const_iterator::value_type& key_value) {
|
||||
return ConstKeyValuePair(key_value.first, *key_value.second); }
|
||||
|
||||
static KeyValuePair make_deref_pair(const KeyValueMap::iterator::value_type& key_value) {
|
||||
return KeyValuePair(key_value.first, *key_value.second); }
|
||||
|
||||
const_iterator begin() const { return boost::make_transform_iterator(values_.begin(), &make_const_deref_pair); }
|
||||
const_iterator end() const { return boost::make_transform_iterator(values_.end(), &make_const_deref_pair); }
|
||||
iterator begin() { return boost::make_transform_iterator(values_.begin(), &make_deref_pair); }
|
||||
iterator end() { return boost::make_transform_iterator(values_.end(), &make_deref_pair); }
|
||||
const_reverse_iterator rbegin() const { return boost::make_transform_iterator(values_.rbegin(), &make_const_deref_pair); }
|
||||
const_reverse_iterator rend() const { return boost::make_transform_iterator(values_.rend(), &make_const_deref_pair); }
|
||||
reverse_iterator rbegin() { return boost::make_transform_iterator(values_.rbegin(), &make_deref_pair); }
|
||||
reverse_iterator rend() { return boost::make_transform_iterator(values_.rend(), &make_deref_pair); }
|
||||
|
||||
/** Find an element by key, returning an iterator, or end() if the key was
|
||||
* not found. */
|
||||
iterator find(Key j) { return boost::make_transform_iterator(values_.find(j), &make_deref_pair); }
|
||||
|
||||
/** Find an element by key, returning an iterator, or end() if the key was
|
||||
* not found. */
|
||||
const_iterator find(Key j) const { return boost::make_transform_iterator(values_.find(j), &make_const_deref_pair); }
|
||||
|
||||
/** Find the element greater than or equal to the specified key. */
|
||||
iterator lower_bound(Key j) { return boost::make_transform_iterator(values_.lower_bound(j), &make_deref_pair); }
|
||||
|
||||
/** Find the element greater than or equal to the specified key. */
|
||||
const_iterator lower_bound(Key j) const { return boost::make_transform_iterator(values_.lower_bound(j), &make_const_deref_pair); }
|
||||
|
||||
/** Find the lowest-ordered element greater than the specified key. */
|
||||
iterator upper_bound(Key j) { return boost::make_transform_iterator(values_.upper_bound(j), &make_deref_pair); }
|
||||
|
||||
/** Find the lowest-ordered element greater than the specified key. */
|
||||
const_iterator upper_bound(Key j) const { return boost::make_transform_iterator(values_.upper_bound(j), &make_const_deref_pair); }
|
||||
|
||||
/** A filtered view of a Values, returned from Values::filter. */
|
||||
template <class ValueType = Value>
|
||||
class Filtered;
|
||||
|
@ -395,12 +415,6 @@ namespace gtsam {
|
|||
ar & BOOST_SERIALIZATION_NVP(values_);
|
||||
}
|
||||
|
||||
static ConstKeyValuePair make_const_deref_pair(const KeyValueMap::const_iterator::value_type& key_value) {
|
||||
return ConstKeyValuePair(key_value.first, *key_value.second); }
|
||||
|
||||
static KeyValuePair make_deref_pair(const KeyValueMap::iterator::value_type& key_value) {
|
||||
return KeyValuePair(key_value.first, *key_value.second); }
|
||||
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -128,9 +128,10 @@ struct LevenbergMarquardtState : public NonlinearOptimizerState {
|
|||
noiseModelCache.resize(0);
|
||||
// for each of the variables, add a prior
|
||||
damped.reserve(damped.size() + values.size());
|
||||
for (const auto key_value : values) {
|
||||
const Key key = key_value.key;
|
||||
const size_t dim = key_value.value.dim();
|
||||
std::map<Key,size_t> dims = values.dims();
|
||||
for (const auto& key_dim : dims) {
|
||||
const Key& key = key_dim.first;
|
||||
const size_t& dim = key_dim.second;
|
||||
const CachedModel* item = getCachedModel(dim);
|
||||
damped += boost::make_shared<JacobianFactor>(key, item->A, item->b, item->model);
|
||||
}
|
||||
|
|
|
@ -350,8 +350,8 @@ TEST(TestLinearContainerFactor, Rekey) {
|
|||
|
||||
// For extra fun lets try linearizing this LCF
|
||||
gtsam::Values linearization_pt_rekeyed;
|
||||
for (auto key_val : linearization_pt) {
|
||||
linearization_pt_rekeyed.insert(key_map.at(key_val.key), key_val.value);
|
||||
for (auto key : linearization_pt.keys()) {
|
||||
linearization_pt_rekeyed.insert(key_map.at(key), linearization_pt.at(key));
|
||||
}
|
||||
|
||||
// Check independent values since we don't want to unnecessarily sort
|
||||
|
|
|
@ -195,6 +195,7 @@ TEST(Values, basic_functions)
|
|||
values.insert(6, M1);
|
||||
values.insert(8, M2);
|
||||
|
||||
#ifdef GTSAM_ALLOW_DEPRECATED_SINCE_V42
|
||||
// find
|
||||
EXPECT_LONGS_EQUAL(4, values.find(4)->key);
|
||||
EXPECT_LONGS_EQUAL(4, values_c.find(4)->key);
|
||||
|
@ -210,7 +211,7 @@ TEST(Values, basic_functions)
|
|||
EXPECT_LONGS_EQUAL(6, values_c.upper_bound(4)->key);
|
||||
EXPECT_LONGS_EQUAL(4, values.upper_bound(3)->key);
|
||||
EXPECT_LONGS_EQUAL(4, values_c.upper_bound(3)->key);
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -220,8 +221,8 @@ TEST(Values, retract_full)
|
|||
config0.insert(key1, Vector3(1.0, 2.0, 3.0));
|
||||
config0.insert(key2, Vector3(5.0, 6.0, 7.0));
|
||||
|
||||
VectorValues delta {{key1, Vector3(1.0, 1.1, 1.2)},
|
||||
{key2, Vector3(1.3, 1.4, 1.5)}};
|
||||
const VectorValues delta{{key1, Vector3(1.0, 1.1, 1.2)},
|
||||
{key2, Vector3(1.3, 1.4, 1.5)}};
|
||||
|
||||
Values expected;
|
||||
expected.insert(key1, Vector3(2.0, 3.1, 4.2));
|
||||
|
@ -238,7 +239,7 @@ TEST(Values, retract_partial)
|
|||
config0.insert(key1, Vector3(1.0, 2.0, 3.0));
|
||||
config0.insert(key2, Vector3(5.0, 6.0, 7.0));
|
||||
|
||||
VectorValues delta {{key2, Vector3(1.3, 1.4, 1.5)}};
|
||||
const VectorValues delta{{key2, Vector3(1.3, 1.4, 1.5)}};
|
||||
|
||||
Values expected;
|
||||
expected.insert(key1, Vector3(1.0, 2.0, 3.0));
|
||||
|
@ -248,6 +249,24 @@ TEST(Values, retract_partial)
|
|||
CHECK(assert_equal(expected, Values(config0, delta)));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(Values, retract_masked)
|
||||
{
|
||||
Values config0;
|
||||
config0.insert(key1, Vector3(1.0, 2.0, 3.0));
|
||||
config0.insert(key2, Vector3(5.0, 6.0, 7.0));
|
||||
|
||||
const VectorValues delta{{key1, Vector3(1.0, 1.1, 1.2)},
|
||||
{key2, Vector3(1.3, 1.4, 1.5)}};
|
||||
|
||||
Values expected;
|
||||
expected.insert(key1, Vector3(1.0, 2.0, 3.0));
|
||||
expected.insert(key2, Vector3(6.3, 7.4, 8.5));
|
||||
|
||||
config0.retractMasked(delta, {key2});
|
||||
CHECK(assert_equal(expected, config0));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(Values, equals)
|
||||
{
|
||||
|
|
|
@ -62,10 +62,10 @@ static Values computePoses(const Values& initialRot,
|
|||
|
||||
// Upgrade rotations to full poses
|
||||
Values initialPose;
|
||||
for (const auto key_value : initialRot) {
|
||||
Key key = key_value.key;
|
||||
const auto& rot = initialRot.at<typename Pose::Rotation>(key);
|
||||
Pose initializedPose = Pose(rot, origin);
|
||||
for (const auto& key_rot : initialRot.extract<typename Pose::Rotation>()) {
|
||||
const Key& key = key_rot.first;
|
||||
const auto& rot = key_rot.second;
|
||||
const Pose initializedPose(rot, origin);
|
||||
initialPose.insert(key, initializedPose);
|
||||
}
|
||||
|
||||
|
@ -82,14 +82,14 @@ static Values computePoses(const Values& initialRot,
|
|||
params.setVerbosity("TERMINATION");
|
||||
}
|
||||
GaussNewtonOptimizer optimizer(*posegraph, initialPose, params);
|
||||
Values GNresult = optimizer.optimize();
|
||||
const Values GNresult = optimizer.optimize();
|
||||
|
||||
// put into Values structure
|
||||
Values estimate;
|
||||
for (const auto key_value : GNresult) {
|
||||
Key key = key_value.key;
|
||||
for (const auto& key_pose : GNresult.extract<Pose>()) {
|
||||
const Key& key = key_pose.first;
|
||||
if (key != kAnchorKey) {
|
||||
const Pose& pose = GNresult.at<Pose>(key);
|
||||
const Pose& pose = key_pose.second;
|
||||
estimate.insert(key, pose);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -122,12 +122,12 @@ Values InitializePose3::computeOrientationsGradient(
|
|||
gttic(InitializePose3_computeOrientationsGradient);
|
||||
|
||||
// this works on the inverse rotations, according to Tron&Vidal,2011
|
||||
Values inverseRot;
|
||||
inverseRot.insert(initialize::kAnchorKey, Rot3());
|
||||
for(const auto key_value: givenGuess) {
|
||||
Key key = key_value.key;
|
||||
const Pose3& pose = givenGuess.at<Pose3>(key);
|
||||
inverseRot.insert(key, pose.rotation().inverse());
|
||||
std::map<Key,Rot3> inverseRot;
|
||||
inverseRot.emplace(initialize::kAnchorKey, Rot3());
|
||||
for(const auto& key_pose: givenGuess.extract<Pose3>()) {
|
||||
const Key& key = key_pose.first;
|
||||
const Pose3& pose = key_pose.second;
|
||||
inverseRot.emplace(key, pose.rotation().inverse());
|
||||
}
|
||||
|
||||
// Create the map of edges incident on each node
|
||||
|
@ -138,10 +138,8 @@ Values InitializePose3::computeOrientationsGradient(
|
|||
|
||||
// calculate max node degree & allocate gradient
|
||||
size_t maxNodeDeg = 0;
|
||||
VectorValues grad;
|
||||
for(const auto key_value: inverseRot) {
|
||||
Key key = key_value.key;
|
||||
grad.insert(key,Z_3x1);
|
||||
for (const auto& key_R : inverseRot) {
|
||||
const Key& key = key_R.first;
|
||||
size_t currNodeDeg = (adjEdgesMap.at(key)).size();
|
||||
if(currNodeDeg > maxNodeDeg)
|
||||
maxNodeDeg = currNodeDeg;
|
||||
|
@ -162,28 +160,29 @@ Values InitializePose3::computeOrientationsGradient(
|
|||
//////////////////////////////////////////////////////////////////////////
|
||||
// compute the gradient at each node
|
||||
maxGrad = 0;
|
||||
for (const auto key_value : inverseRot) {
|
||||
Key key = key_value.key;
|
||||
VectorValues grad;
|
||||
for (const auto& key_R : inverseRot) {
|
||||
const Key& key = key_R.first;
|
||||
const Rot3& Ri = key_R.second;
|
||||
Vector gradKey = Z_3x1;
|
||||
// collect the gradient for each edge incident on key
|
||||
for (const size_t& factorId : adjEdgesMap.at(key)) {
|
||||
Rot3 Rij = factorId2RotMap.at(factorId);
|
||||
Rot3 Ri = inverseRot.at<Rot3>(key);
|
||||
const Rot3& Rij = factorId2RotMap.at(factorId);
|
||||
auto factor = pose3Graph.at(factorId);
|
||||
const auto& keys = factor->keys();
|
||||
if (key == keys[0]) {
|
||||
Key key1 = keys[1];
|
||||
Rot3 Rj = inverseRot.at<Rot3>(key1);
|
||||
const Rot3& Rj = inverseRot.at(key1);
|
||||
gradKey = gradKey + gradientTron(Ri, Rij * Rj, a, b);
|
||||
} else if (key == keys[1]) {
|
||||
Key key0 = keys[0];
|
||||
Rot3 Rj = inverseRot.at<Rot3>(key0);
|
||||
const Rot3& Rj = inverseRot.at(key0);
|
||||
gradKey = gradKey + gradientTron(Ri, Rij.between(Rj), a, b);
|
||||
} else {
|
||||
cout << "Error in gradient computation" << endl;
|
||||
}
|
||||
} // end of i-th gradient computation
|
||||
grad.at(key) = stepsize * gradKey;
|
||||
grad.insert(key, stepsize * gradKey);
|
||||
|
||||
double normGradKey = (gradKey).norm();
|
||||
if(normGradKey>maxGrad)
|
||||
|
@ -192,8 +191,12 @@ Values InitializePose3::computeOrientationsGradient(
|
|||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
// update estimates
|
||||
inverseRot = inverseRot.retract(grad);
|
||||
|
||||
for (auto& key_R : inverseRot) {
|
||||
const Key& key = key_R.first;
|
||||
Rot3& Ri = key_R.second;
|
||||
Ri = Ri.retract(grad.at(key));
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
// check stopping condition
|
||||
if (it>20 && maxGrad < 5e-3)
|
||||
|
@ -201,12 +204,13 @@ Values InitializePose3::computeOrientationsGradient(
|
|||
} // enf of gradient iterations
|
||||
|
||||
// Return correct rotations
|
||||
const Rot3& Rref = inverseRot.at<Rot3>(initialize::kAnchorKey); // This will be set to the identity as so far we included no prior
|
||||
const Rot3& Rref = inverseRot.at(initialize::kAnchorKey); // This will be set to the identity as so far we included no prior
|
||||
Values estimateRot;
|
||||
for(const auto key_value: inverseRot) {
|
||||
Key key = key_value.key;
|
||||
for (const auto key_R : inverseRot) {
|
||||
const Key& key = key_R.first;
|
||||
const Rot3& Ri = key_R.second;
|
||||
if (key != initialize::kAnchorKey) {
|
||||
const Rot3& R = inverseRot.at<Rot3>(key);
|
||||
const Rot3& R = inverseRot.at(key);
|
||||
if(setRefFrame)
|
||||
estimateRot.insert(key, Rref.compose(R.inverse()));
|
||||
else
|
||||
|
|
|
@ -586,9 +586,9 @@ void save2D(const NonlinearFactorGraph &graph, const Values &config,
|
|||
fstream stream(filename.c_str(), fstream::out);
|
||||
|
||||
// save poses
|
||||
for (const auto key_value : config) {
|
||||
const Pose2 &pose = key_value.value.cast<Pose2>();
|
||||
stream << "VERTEX2 " << key_value.key << " " << pose.x() << " " << pose.y()
|
||||
for (const auto &key_pose : config.extract<Pose2>()) {
|
||||
const Pose2 &pose = key_pose.second;
|
||||
stream << "VERTEX2 " << key_pose.first << " " << pose.x() << " " << pose.y()
|
||||
<< " " << pose.theta() << endl;
|
||||
}
|
||||
|
||||
|
@ -635,45 +635,33 @@ void writeG2o(const NonlinearFactorGraph &graph, const Values &estimate,
|
|||
auto index = [](gtsam::Key key) { return Symbol(key).index(); };
|
||||
|
||||
// save 2D poses
|
||||
for (const auto key_value : estimate) {
|
||||
auto p = dynamic_cast<const GenericValue<Pose2> *>(&key_value.value);
|
||||
if (!p)
|
||||
continue;
|
||||
const Pose2 &pose = p->value();
|
||||
stream << "VERTEX_SE2 " << index(key_value.key) << " " << pose.x() << " "
|
||||
for (const auto &pair : estimate.extract<Pose2>()) {
|
||||
const Pose2 &pose = pair.second;
|
||||
stream << "VERTEX_SE2 " << index(pair.first) << " " << pose.x() << " "
|
||||
<< pose.y() << " " << pose.theta() << endl;
|
||||
}
|
||||
|
||||
// save 3D poses
|
||||
for (const auto key_value : estimate) {
|
||||
auto p = dynamic_cast<const GenericValue<Pose3> *>(&key_value.value);
|
||||
if (!p)
|
||||
continue;
|
||||
const Pose3 &pose = p->value();
|
||||
for (const auto &pair : estimate.extract<Pose3>()) {
|
||||
const Pose3 &pose = pair.second;
|
||||
const Point3 t = pose.translation();
|
||||
const auto q = pose.rotation().toQuaternion();
|
||||
stream << "VERTEX_SE3:QUAT " << index(key_value.key) << " " << t.x() << " "
|
||||
stream << "VERTEX_SE3:QUAT " << index(pair.first) << " " << t.x() << " "
|
||||
<< t.y() << " " << t.z() << " " << q.x() << " " << q.y() << " "
|
||||
<< q.z() << " " << q.w() << endl;
|
||||
}
|
||||
|
||||
// save 2D landmarks
|
||||
for (const auto key_value : estimate) {
|
||||
auto p = dynamic_cast<const GenericValue<Point2> *>(&key_value.value);
|
||||
if (!p)
|
||||
continue;
|
||||
const Point2 &point = p->value();
|
||||
stream << "VERTEX_XY " << index(key_value.key) << " " << point.x() << " "
|
||||
for (const auto &pair : estimate.extract<Point2>()) {
|
||||
const Point2 &point = pair.second;
|
||||
stream << "VERTEX_XY " << index(pair.first) << " " << point.x() << " "
|
||||
<< point.y() << endl;
|
||||
}
|
||||
|
||||
// save 3D landmarks
|
||||
for (const auto key_value : estimate) {
|
||||
auto p = dynamic_cast<const GenericValue<Point3> *>(&key_value.value);
|
||||
if (!p)
|
||||
continue;
|
||||
const Point3 &point = p->value();
|
||||
stream << "VERTEX_TRACKXYZ " << index(key_value.key) << " " << point.x()
|
||||
for (const auto &pair : estimate.extract<Point3>()) {
|
||||
const Point3 &point = pair.second;
|
||||
stream << "VERTEX_TRACKXYZ " << index(pair.first) << " " << point.x()
|
||||
<< " " << point.y() << " " << point.z() << endl;
|
||||
}
|
||||
|
||||
|
|
|
@ -283,9 +283,10 @@ TEST( Lago, largeGraphNoisy_orientations ) {
|
|||
Values::shared_ptr expected;
|
||||
boost::tie(gmatlab, expected) = readG2o(matlabFile);
|
||||
|
||||
for(const auto key_val: *expected){
|
||||
Key k = key_val.key;
|
||||
EXPECT(assert_equal(expected->at<Pose2>(k), actual.at<Pose2>(k), 1e-5));
|
||||
for(const auto key_pose: expected->extract<Pose2>()){
|
||||
const Key& k = key_pose.first;
|
||||
const Pose2& pose = key_pose.second;
|
||||
EXPECT(assert_equal(pose, actual.at<Pose2>(k), 1e-5));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -309,9 +310,10 @@ TEST( Lago, largeGraphNoisy ) {
|
|||
Values::shared_ptr expected;
|
||||
boost::tie(gmatlab, expected) = readG2o(matlabFile);
|
||||
|
||||
for(const auto key_val: *expected){
|
||||
Key k = key_val.key;
|
||||
EXPECT(assert_equal(expected->at<Pose2>(k), actual.at<Pose2>(k), 1e-2));
|
||||
for(const auto key_pose: expected->extract<Pose2>()){
|
||||
const Key& k = key_pose.first;
|
||||
const Pose2& pose = key_pose.second;
|
||||
EXPECT(assert_equal(pose, actual.at<Pose2>(k), 1e-2));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -308,12 +308,12 @@ int main(int argc, char** argv) {
|
|||
// And to demonstrate the fixed-lag aspect, print the keys contained in each smoother after 3.0 seconds
|
||||
cout << "After 15.0 seconds, each version contains to the following keys:" << endl;
|
||||
cout << " Concurrent Filter Keys: " << endl;
|
||||
for(const auto key_value: concurrentFilter.getLinearizationPoint()) {
|
||||
cout << setprecision(5) << " Key: " << key_value.key << endl;
|
||||
for(const auto key: concurrentFilter.getLinearizationPoint().keys()) {
|
||||
cout << setprecision(5) << " Key: " << key << endl;
|
||||
}
|
||||
cout << " Concurrent Smoother Keys: " << endl;
|
||||
for(const auto key_value: concurrentSmoother.getLinearizationPoint()) {
|
||||
cout << setprecision(5) << " Key: " << key_value.key << endl;
|
||||
for(const auto key: concurrentSmoother.getLinearizationPoint().keys()) {
|
||||
cout << setprecision(5) << " Key: " << key << endl;
|
||||
}
|
||||
cout << " Fixed-Lag Smoother Keys: " << endl;
|
||||
for(const auto& key_timestamp: fixedlagSmoother.timestamps()) {
|
||||
|
|
|
@ -59,8 +59,8 @@ FixedLagSmoother::Result BatchFixedLagSmoother::update(
|
|||
// Add the new variables to theta
|
||||
theta_.insert(newTheta);
|
||||
// Add new variables to the end of the ordering
|
||||
for (const auto key_value : newTheta) {
|
||||
ordering_.push_back(key_value.key);
|
||||
for (const auto key : newTheta.keys()) {
|
||||
ordering_.push_back(key);
|
||||
}
|
||||
// Augment Delta
|
||||
delta_.insert(newTheta.zeroVectors());
|
||||
|
@ -161,8 +161,8 @@ void BatchFixedLagSmoother::eraseKeys(const KeyVector& keys) {
|
|||
factorIndex_.erase(key);
|
||||
|
||||
// Erase the key from the set of linearized keys
|
||||
if (linearKeys_.exists(key)) {
|
||||
linearKeys_.erase(key);
|
||||
if (linearValues_.exists(key)) {
|
||||
linearValues_.erase(key);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -186,8 +186,8 @@ FixedLagSmoother::Result BatchFixedLagSmoother::optimize() {
|
|||
|
||||
// Create output result structure
|
||||
Result result;
|
||||
result.nonlinearVariables = theta_.size() - linearKeys_.size();
|
||||
result.linearVariables = linearKeys_.size();
|
||||
result.nonlinearVariables = theta_.size() - linearValues_.size();
|
||||
result.linearVariables = linearValues_.size();
|
||||
|
||||
// Set optimization parameters
|
||||
double lambda = parameters_.lambdaInitial;
|
||||
|
@ -265,10 +265,10 @@ FixedLagSmoother::Result BatchFixedLagSmoother::optimize() {
|
|||
// Reset the deltas to zeros
|
||||
delta_.setZero();
|
||||
// Put the linearization points and deltas back for specific variables
|
||||
if (enforceConsistency_ && (linearKeys_.size() > 0)) {
|
||||
theta_.update(linearKeys_);
|
||||
for(const auto key_value: linearKeys_) {
|
||||
delta_.at(key_value.key) = newDelta.at(key_value.key);
|
||||
if (enforceConsistency_ && (linearValues_.size() > 0)) {
|
||||
theta_.update(linearValues_);
|
||||
for(const auto key: linearValues_.keys()) {
|
||||
delta_.at(key) = newDelta.at(key);
|
||||
}
|
||||
}
|
||||
// Decrease lambda for next time
|
||||
|
|
|
@ -136,8 +136,8 @@ protected:
|
|||
/** The current linearization point **/
|
||||
Values theta_;
|
||||
|
||||
/** The set of keys involved in current linear factors. These keys should not be relinearized. **/
|
||||
Values linearKeys_;
|
||||
/** The set of values involved in current linear factors. **/
|
||||
Values linearValues_;
|
||||
|
||||
/** The current ordering */
|
||||
Ordering ordering_;
|
||||
|
|
|
@ -139,8 +139,8 @@ ConcurrentBatchFilter::Result ConcurrentBatchFilter::update(const NonlinearFacto
|
|||
// Add the new variables to theta
|
||||
theta_.insert(newTheta);
|
||||
// Add new variables to the end of the ordering
|
||||
for(const auto key_value: newTheta) {
|
||||
ordering_.push_back(key_value.key);
|
||||
for (const auto key : newTheta.keys()) {
|
||||
ordering_.push_back(key);
|
||||
}
|
||||
// Augment Delta
|
||||
delta_.insert(newTheta.zeroVectors());
|
||||
|
@ -221,10 +221,7 @@ void ConcurrentBatchFilter::synchronize(const NonlinearFactorGraph& smootherSumm
|
|||
if(debug) { PrintNonlinearFactorGraph(smootherSummarization_, "ConcurrentBatchFilter::synchronize ", "Updated Smoother Summarization:", DefaultKeyFormatter); }
|
||||
|
||||
// Find the set of new separator keys
|
||||
KeySet newSeparatorKeys;
|
||||
for(const auto key_value: separatorValues_) {
|
||||
newSeparatorKeys.insert(key_value.key);
|
||||
}
|
||||
const KeySet newSeparatorKeys = separatorValues_.keySet();
|
||||
|
||||
if(debug) { PrintKeys(newSeparatorKeys, "ConcurrentBatchFilter::synchronize ", "Current Separator Keys:"); }
|
||||
|
||||
|
@ -236,9 +233,9 @@ void ConcurrentBatchFilter::synchronize(const NonlinearFactorGraph& smootherSumm
|
|||
graph.push_back(smootherShortcut_);
|
||||
Values values;
|
||||
values.insert(smootherSummarizationValues);
|
||||
for(const auto key_value: separatorValues_) {
|
||||
if(!values.exists(key_value.key)) {
|
||||
values.insert(key_value.key, key_value.value);
|
||||
for(const auto key: newSeparatorKeys) {
|
||||
if(!values.exists(key)) {
|
||||
values.insert(key, separatorValues_.at(key));
|
||||
}
|
||||
}
|
||||
// Calculate the summarized factor on just the new separator keys
|
||||
|
@ -471,8 +468,8 @@ void ConcurrentBatchFilter::optimize(const NonlinearFactorGraph& factors, Values
|
|||
// Put the linearization points and deltas back for specific variables
|
||||
if(linearValues.size() > 0) {
|
||||
theta.update(linearValues);
|
||||
for(const auto key_value: linearValues) {
|
||||
delta.at(key_value.key) = newDelta.at(key_value.key);
|
||||
for(const auto key: linearValues.keys()) {
|
||||
delta.at(key) = newDelta.at(key);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -574,9 +571,7 @@ void ConcurrentBatchFilter::moveSeparator(const FastList<Key>& keysToMove) {
|
|||
|
||||
// Calculate the set of new separator keys: AffectedKeys + PreviousSeparatorKeys - KeysToMove
|
||||
KeySet newSeparatorKeys = removedFactors.keys();
|
||||
for(const auto key_value: separatorValues_) {
|
||||
newSeparatorKeys.insert(key_value.key);
|
||||
}
|
||||
newSeparatorKeys.merge(separatorValues_.keySet());
|
||||
for(Key key: keysToMove) {
|
||||
newSeparatorKeys.erase(key);
|
||||
}
|
||||
|
|
|
@ -61,8 +61,8 @@ ConcurrentBatchSmoother::Result ConcurrentBatchSmoother::update(const NonlinearF
|
|||
theta_.insert(newTheta);
|
||||
|
||||
// Add new variables to the end of the ordering
|
||||
for(const auto key_value: newTheta) {
|
||||
ordering_.push_back(key_value.key);
|
||||
for(const auto key: newTheta.keys()) {
|
||||
ordering_.push_back(key);
|
||||
}
|
||||
|
||||
// Augment Delta
|
||||
|
@ -135,26 +135,30 @@ void ConcurrentBatchSmoother::synchronize(const NonlinearFactorGraph& smootherFa
|
|||
removeFactors(filterSummarizationSlots_);
|
||||
|
||||
// Insert new linpoints into the values, augment the ordering, and store new dims to augment delta
|
||||
for(const auto key_value: smootherValues) {
|
||||
std::pair<Values::iterator, bool> iter_inserted = theta_.tryInsert(key_value.key, key_value.value);
|
||||
if(iter_inserted.second) {
|
||||
// If the insert succeeded
|
||||
ordering_.push_back(key_value.key);
|
||||
delta_.insert(key_value.key, Vector::Zero(key_value.value.dim()));
|
||||
for(const auto key: smootherValues.keys()) {
|
||||
if(!theta_.exists(key)) {
|
||||
// If this a new key for theta_, also add to ordering and delta.
|
||||
const auto& value = smootherValues.at(key);
|
||||
delta_.insert(key, Vector::Zero(value.dim()));
|
||||
theta_.insert(key, value);
|
||||
ordering_.push_back(key);
|
||||
} else {
|
||||
// If the element already existed in theta_
|
||||
iter_inserted.first->value = key_value.value;
|
||||
// If the key already existed in theta_, just update.
|
||||
const auto& value = smootherValues.at(key);
|
||||
theta_.update(key, value);
|
||||
}
|
||||
}
|
||||
for(const auto key_value: separatorValues) {
|
||||
std::pair<Values::iterator, bool> iter_inserted = theta_.tryInsert(key_value.key, key_value.value);
|
||||
if(iter_inserted.second) {
|
||||
// If the insert succeeded
|
||||
ordering_.push_back(key_value.key);
|
||||
delta_.insert(key_value.key, Vector::Zero(key_value.value.dim()));
|
||||
for(const auto key: separatorValues.keys()) {
|
||||
if(!theta_.exists(key)) {
|
||||
// If this a new key for theta_, also add to ordering and delta.
|
||||
const auto& value = separatorValues.at(key);
|
||||
delta_.insert(key, Vector::Zero(value.dim()));
|
||||
theta_.insert(key, value);
|
||||
ordering_.push_back(key);
|
||||
} else {
|
||||
// If the element already existed in theta_
|
||||
iter_inserted.first->value = key_value.value;
|
||||
// If the key already existed in theta_, just update.
|
||||
const auto& value = separatorValues.at(key);
|
||||
theta_.update(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -322,8 +326,8 @@ ConcurrentBatchSmoother::Result ConcurrentBatchSmoother::optimize() {
|
|||
// Put the linearization points and deltas back for specific variables
|
||||
if(separatorValues_.size() > 0) {
|
||||
theta_.update(separatorValues_);
|
||||
for(const auto key_value: separatorValues_) {
|
||||
delta_.at(key_value.key) = newDelta.at(key_value.key);
|
||||
for(const auto key: separatorValues_.keys()) {
|
||||
delta_.at(key) = newDelta.at(key);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -371,10 +375,7 @@ void ConcurrentBatchSmoother::updateSmootherSummarization() {
|
|||
}
|
||||
|
||||
// Get the set of separator keys
|
||||
gtsam::KeySet separatorKeys;
|
||||
for(const auto key_value: separatorValues_) {
|
||||
separatorKeys.insert(key_value.key);
|
||||
}
|
||||
const KeySet separatorKeys = separatorValues_.keySet();
|
||||
|
||||
// Calculate the marginal factors on the separator
|
||||
smootherSummarization_ = internal::calculateMarginalFactors(graph, theta_, separatorKeys, parameters_.getEliminationFunction());
|
||||
|
|
|
@ -69,14 +69,14 @@ ConcurrentIncrementalFilter::Result ConcurrentIncrementalFilter::update(const No
|
|||
int group = 1;
|
||||
// Set all existing variables to Group1
|
||||
if(isam2_.getLinearizationPoint().size() > 0) {
|
||||
for(const auto key_value: isam2_.getLinearizationPoint()) {
|
||||
orderingConstraints->operator[](key_value.key) = group;
|
||||
for(const auto key: isam2_.getLinearizationPoint().keys()) {
|
||||
orderingConstraints->operator[](key) = group;
|
||||
}
|
||||
++group;
|
||||
}
|
||||
// Assign new variables to the root
|
||||
for(const auto key_value: newTheta) {
|
||||
orderingConstraints->operator[](key_value.key) = group;
|
||||
for(const auto key: newTheta.keys()) {
|
||||
orderingConstraints->operator[](key) = group;
|
||||
}
|
||||
// Set marginalizable variables to Group0
|
||||
for(Key key: *keysToMove){
|
||||
|
@ -201,8 +201,8 @@ void ConcurrentIncrementalFilter::synchronize(const NonlinearFactorGraph& smooth
|
|||
|
||||
// Force iSAM2 not to relinearize anything during this iteration
|
||||
FastList<Key> noRelinKeys;
|
||||
for(const auto key_value: isam2_.getLinearizationPoint()) {
|
||||
noRelinKeys.push_back(key_value.key);
|
||||
for(const auto key: isam2_.getLinearizationPoint().keys()) {
|
||||
noRelinKeys.push_back(key);
|
||||
}
|
||||
|
||||
// Calculate the summarized factor on just the new separator keys
|
||||
|
|
|
@ -66,9 +66,9 @@ ConcurrentIncrementalSmoother::Result ConcurrentIncrementalSmoother::update(cons
|
|||
// Also, mark the separator keys as fixed linearization points
|
||||
FastMap<Key,int> constrainedKeys;
|
||||
FastList<Key> noRelinKeys;
|
||||
for(const auto key_value: separatorValues_) {
|
||||
constrainedKeys[key_value.key] = 1;
|
||||
noRelinKeys.push_back(key_value.key);
|
||||
for (const auto key : separatorValues_.keys()) {
|
||||
constrainedKeys[key] = 1;
|
||||
noRelinKeys.push_back(key);
|
||||
}
|
||||
|
||||
// Use iSAM2 to perform an update
|
||||
|
@ -82,14 +82,14 @@ ConcurrentIncrementalSmoother::Result ConcurrentIncrementalSmoother::update(cons
|
|||
Values values(newTheta);
|
||||
// Unfortunately, we must be careful here, as some of the smoother values
|
||||
// and/or separator values may have been added previously
|
||||
for(const auto key_value: smootherValues_) {
|
||||
if(!isam2_.getLinearizationPoint().exists(key_value.key)) {
|
||||
values.insert(key_value.key, smootherValues_.at(key_value.key));
|
||||
for(const auto key: smootherValues_.keys()) {
|
||||
if(!isam2_.getLinearizationPoint().exists(key)) {
|
||||
values.insert(key, smootherValues_.at(key));
|
||||
}
|
||||
}
|
||||
for(const auto key_value: separatorValues_) {
|
||||
if(!isam2_.getLinearizationPoint().exists(key_value.key)) {
|
||||
values.insert(key_value.key, separatorValues_.at(key_value.key));
|
||||
for(const auto key: separatorValues_.keys()) {
|
||||
if(!isam2_.getLinearizationPoint().exists(key)) {
|
||||
values.insert(key, separatorValues_.at(key));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -245,10 +245,7 @@ void ConcurrentIncrementalSmoother::updateSmootherSummarization() {
|
|||
}
|
||||
|
||||
// Get the set of separator keys
|
||||
KeySet separatorKeys;
|
||||
for(const auto key_value: separatorValues_) {
|
||||
separatorKeys.insert(key_value.key);
|
||||
}
|
||||
const KeySet separatorKeys = separatorValues_.keySet();
|
||||
|
||||
// Calculate the marginal factors on the separator
|
||||
smootherSummarization_ = internal::calculateMarginalFactors(graph, isam2_.getLinearizationPoint(), separatorKeys,
|
||||
|
|
|
@ -64,8 +64,8 @@ NonlinearFactorGraph CalculateMarginals(const NonlinearFactorGraph& factorGraph,
|
|||
|
||||
|
||||
std::set<Key> KeysToKeep;
|
||||
for(const auto key_value: linPoint) { // we cycle over all the keys of factorGraph
|
||||
KeysToKeep.insert(key_value.key);
|
||||
for(const auto key: linPoint.keys()) { // we cycle over all the keys of factorGraph
|
||||
KeysToKeep.insert(key);
|
||||
} // so far we are keeping all keys, but we want to delete the ones that we are going to marginalize
|
||||
for(Key key: keysToMarginalize) {
|
||||
KeysToKeep.erase(key);
|
||||
|
|
|
@ -560,8 +560,8 @@ TEST( ConcurrentBatchSmoother, synchronize_3 )
|
|||
GaussianFactorGraph::shared_ptr linearFactors = allFactors.linearize(allValues);
|
||||
|
||||
KeySet eliminateKeys = linearFactors->keys();
|
||||
for(const auto key_value: filterSeparatorValues) {
|
||||
eliminateKeys.erase(key_value.key);
|
||||
for(const auto key: filterSeparatorValues.keys()) {
|
||||
eliminateKeys.erase(key);
|
||||
}
|
||||
KeyVector variables(eliminateKeys.begin(), eliminateKeys.end());
|
||||
GaussianFactorGraph result = *linearFactors->eliminatePartialMultifrontal(variables, EliminateCholesky).second;
|
||||
|
|
|
@ -80,8 +80,8 @@ NonlinearFactorGraph CalculateMarginals(const NonlinearFactorGraph& factorGraph,
|
|||
|
||||
|
||||
std::set<Key> KeysToKeep;
|
||||
for(const auto key_value: linPoint) { // we cycle over all the keys of factorGraph
|
||||
KeysToKeep.insert(key_value.key);
|
||||
for(const auto key: linPoint.keys()) { // we cycle over all the keys of factorGraph
|
||||
KeysToKeep.insert(key);
|
||||
} // so far we are keeping all keys, but we want to delete the ones that we are going to marginalize
|
||||
for(Key key: keysToMarginalize) {
|
||||
KeysToKeep.erase(key);
|
||||
|
|
|
@ -512,8 +512,8 @@ TEST( ConcurrentIncrementalSmootherDL, synchronize_2 )
|
|||
// Values expectedLinearizationPoint = BatchOptimize(allFactors, allValues, 1);
|
||||
Values expectedLinearizationPoint = filterSeparatorValues;
|
||||
Values actualLinearizationPoint;
|
||||
for(const auto key_value: filterSeparatorValues) {
|
||||
actualLinearizationPoint.insert(key_value.key, smoother.getLinearizationPoint().at(key_value.key));
|
||||
for(const auto key: filterSeparatorValues.keys()) {
|
||||
actualLinearizationPoint.insert(key, smoother.getLinearizationPoint().at(key));
|
||||
}
|
||||
CHECK(assert_equal(expectedLinearizationPoint, actualLinearizationPoint, 1e-6));
|
||||
}
|
||||
|
@ -580,8 +580,8 @@ TEST( ConcurrentIncrementalSmootherDL, synchronize_3 )
|
|||
// GaussianBayesNet::shared_ptr GBNsptr = GSS.eliminate();
|
||||
|
||||
KeySet allkeys = LinFactorGraph->keys();
|
||||
for(const auto key_value: filterSeparatorValues) {
|
||||
allkeys.erase(key_value.key);
|
||||
for(const auto key: filterSeparatorValues.keys()) {
|
||||
allkeys.erase(key);
|
||||
}
|
||||
KeyVector variables(allkeys.begin(), allkeys.end());
|
||||
std::pair<GaussianBayesNet::shared_ptr, GaussianFactorGraph::shared_ptr> result = LinFactorGraph->eliminatePartialSequential(variables, EliminateCholesky);
|
||||
|
|
|
@ -513,8 +513,8 @@ TEST( ConcurrentIncrementalSmootherGN, synchronize_2 )
|
|||
// Values expectedLinearizationPoint = BatchOptimize(allFactors, allValues, 1);
|
||||
Values expectedLinearizationPoint = filterSeparatorValues;
|
||||
Values actualLinearizationPoint;
|
||||
for(const auto key_value: filterSeparatorValues) {
|
||||
actualLinearizationPoint.insert(key_value.key, smoother.getLinearizationPoint().at(key_value.key));
|
||||
for(const auto key: filterSeparatorValues.keys()) {
|
||||
actualLinearizationPoint.insert(key, smoother.getLinearizationPoint().at(key));
|
||||
}
|
||||
CHECK(assert_equal(expectedLinearizationPoint, actualLinearizationPoint, 1e-6));
|
||||
}
|
||||
|
@ -582,8 +582,7 @@ TEST( ConcurrentIncrementalSmootherGN, synchronize_3 )
|
|||
// GaussianBayesNet::shared_ptr GBNsptr = GSS.eliminate();
|
||||
|
||||
KeySet allkeys = LinFactorGraph->keys();
|
||||
for(const auto key_value: filterSeparatorValues)
|
||||
allkeys.erase(key_value.key);
|
||||
for (const auto key : filterSeparatorValues.keys()) allkeys.erase(key);
|
||||
KeyVector variables(allkeys.begin(), allkeys.end());
|
||||
std::pair<GaussianBayesNet::shared_ptr, GaussianFactorGraph::shared_ptr> result = LinFactorGraph->eliminatePartialSequential(variables, EliminateCholesky);
|
||||
|
||||
|
|
Loading…
Reference in New Issue