From 4b40e6e34632ba199602d4b4c11fc372c2b3aabd Mon Sep 17 00:00:00 2001 From: kartik arcot Date: Thu, 19 Jan 2023 11:22:52 -0800 Subject: [PATCH] map_values eliminated --- gtsam/linear/Errors.cpp | 3 +- gtsam/linear/Preconditioner.cpp | 4 +-- gtsam/linear/VectorValues.cpp | 30 +++++++++---------- gtsam/nonlinear/Expression-inl.h | 9 ++++-- .../nonlinear/LevenbergMarquardtOptimizer.cpp | 6 ++-- tests/testGaussianFactorGraphB.cpp | 6 +--- tests/testGaussianISAM.cpp | 5 +--- tests/testGaussianISAM2.cpp | 16 +++++----- tests/testNonlinearOptimizer.cpp | 6 ++-- 9 files changed, 40 insertions(+), 45 deletions(-) diff --git a/gtsam/linear/Errors.cpp b/gtsam/linear/Errors.cpp index 92f7bb4b8..d36b647ce 100644 --- a/gtsam/linear/Errors.cpp +++ b/gtsam/linear/Errors.cpp @@ -17,7 +17,6 @@ * @author Christian Potthast */ -#include #include #include @@ -28,7 +27,7 @@ namespace gtsam { /* ************************************************************************* */ Errors createErrors(const VectorValues& V) { Errors result; - for (const Vector& e : V | boost::adaptors::map_values) { + for (const auto& [key, e] : V) { result.push_back(e); } return result; diff --git a/gtsam/linear/Preconditioner.cpp b/gtsam/linear/Preconditioner.cpp index cb7e36503..84837760c 100644 --- a/gtsam/linear/Preconditioner.cpp +++ b/gtsam/linear/Preconditioner.cpp @@ -14,7 +14,6 @@ #include #include #include -#include #include #include @@ -145,8 +144,9 @@ void BlockJacobiPreconditioner::build( /* getting the block diagonals over the factors */ std::map hessianMap =gfg.hessianBlockDiagonal(); - for (const Matrix& hessian: hessianMap | boost::adaptors::map_values) + for (const auto& [key, hessian]: hessianMap) { blocks.push_back(hessian); + } /* if necessary, allocating the memory for cacheing the factorization results */ if ( nnz > bufferSize_ ) { diff --git a/gtsam/linear/VectorValues.cpp b/gtsam/linear/VectorValues.cpp index 781e1ff00..2e9dd24d0 100644 --- a/gtsam/linear/VectorValues.cpp +++ b/gtsam/linear/VectorValues.cpp @@ -28,7 +28,6 @@ using namespace std; namespace gtsam { using boost::adaptors::transformed; - using boost::adaptors::map_values; using boost::accumulate; /* ************************************************************************ */ @@ -129,8 +128,9 @@ namespace gtsam { /* ************************************************************************ */ void VectorValues::setZero() { - for(Vector& v: values_ | map_values) - v.setZero(); + for(auto& [key, value] : *this) { + value.setZero(); + } } /* ************************************************************************ */ @@ -178,14 +178,15 @@ namespace gtsam { Vector VectorValues::vector() const { // Count dimensions DenseIndex totalDim = 0; - for (const Vector& v : *this | map_values) totalDim += v.size(); + for (const auto& [key, value] : *this) + totalDim += value.size(); // Copy vectors Vector result(totalDim); DenseIndex pos = 0; - for (const Vector& v : *this | map_values) { - result.segment(pos, v.size()) = v; - pos += v.size(); + for (const auto& [key, value] : *this) { + result.segment(pos, value.size()) = value; + pos += value.size(); } return result; @@ -196,7 +197,7 @@ namespace gtsam { { // Count dimensions DenseIndex totalDim = 0; - for(size_t dim: keys | map_values) + for (const auto& [key, dim] : keys) totalDim += dim; Vector result(totalDim); size_t j = 0; @@ -236,8 +237,6 @@ namespace gtsam { if(this->size() != v.size()) throw invalid_argument("VectorValues::dot called with a VectorValues of different structure"); double result = 0.0; - typedef std::tuple ValuePair; - using boost::adaptors::map_values; auto this_it = this->begin(); auto v_it = v.begin(); for(; this_it != this->end(); ++this_it, ++v_it) { @@ -258,9 +257,9 @@ namespace gtsam { /* ************************************************************************ */ double VectorValues::squaredNorm() const { double sumSquares = 0.0; - using boost::adaptors::map_values; - for(const Vector& v: *this | map_values) - sumSquares += v.squaredNorm(); + for(const auto& [key, value]: *this) { + sumSquares += value.squaredNorm(); + } return sumSquares; } @@ -374,8 +373,9 @@ namespace gtsam { /* ************************************************************************ */ VectorValues& VectorValues::operator*=(double alpha) { - for(Vector& v: *this | map_values) - v *= alpha; + for (auto& [key, value]: *this) { + value *= alpha; + } return *this; } diff --git a/gtsam/nonlinear/Expression-inl.h b/gtsam/nonlinear/Expression-inl.h index c4899c66b..a18052556 100644 --- a/gtsam/nonlinear/Expression-inl.h +++ b/gtsam/nonlinear/Expression-inl.h @@ -229,8 +229,13 @@ typename Expression::KeysAndDims Expression::keysAndDims() const { dims(map); size_t n = map.size(); KeysAndDims pair = std::make_pair(KeyVector(n), FastVector(n)); - boost::copy(map | boost::adaptors::map_keys, pair.first.begin()); - boost::copy(map | boost::adaptors::map_values, pair.second.begin()); + // Copy map into pair of vectors + auto key_it = pair.first.begin(); + auto dim_it = pair.second.begin(); + for (const auto& [key, value] : map) { + *key_it++ = key; + *dim_it++ = value; + } return pair; } diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp index 5cbed2a1f..3c0624986 100644 --- a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp @@ -29,7 +29,6 @@ #include #include -#include #include #include @@ -41,7 +40,6 @@ using namespace std; namespace gtsam { -using boost::adaptors::map_values; typedef internal::LevenbergMarquardtState State; /* ************************************************************************* */ @@ -281,8 +279,8 @@ GaussianFactorGraph::shared_ptr LevenbergMarquardtOptimizer::iterate() { VectorValues sqrtHessianDiagonal; if (params_.diagonalDamping) { sqrtHessianDiagonal = linear->hessianDiagonal(); - for (Vector& v : sqrtHessianDiagonal | map_values) { - v = v.cwiseMax(params_.minDiagonal).cwiseMin(params_.maxDiagonal).cwiseSqrt(); + for (auto& [key, value] : sqrtHessianDiagonal) { + value = value.cwiseMax(params_.minDiagonal).cwiseMin(params_.maxDiagonal).cwiseSqrt(); } } diff --git a/tests/testGaussianFactorGraphB.cpp b/tests/testGaussianFactorGraphB.cpp index 19a3567e8..0d8cad53d 100644 --- a/tests/testGaussianFactorGraphB.cpp +++ b/tests/testGaussianFactorGraphB.cpp @@ -26,10 +26,6 @@ #include -#include -#include -namespace br { using namespace boost::range; using namespace boost::adaptors; } - #include #include @@ -450,7 +446,7 @@ TEST( GaussianFactorGraph, conditional_sigma_failure) { GaussianBayesTree actBT = *lfg.eliminateMultifrontal(); // Check that all sigmas in an unconstrained bayes tree are set to one - for(const GaussianBayesTree::sharedClique& clique: actBT.nodes() | br::map_values) { + for (const auto& [key, clique]: actBT.nodes()) { GaussianConditional::shared_ptr conditional = clique->conditional(); //size_t dim = conditional->rows(); //EXPECT(assert_equal(gtsam::Vector::Ones(dim), conditional->get_model()->sigmas(), tol)); diff --git a/tests/testGaussianISAM.cpp b/tests/testGaussianISAM.cpp index 63cf654e5..053dedb93 100644 --- a/tests/testGaussianISAM.cpp +++ b/tests/testGaussianISAM.cpp @@ -22,9 +22,6 @@ #include #include -#include -namespace br { using namespace boost::adaptors; using namespace boost::range; } - using namespace std; using namespace gtsam; using namespace example; @@ -53,7 +50,7 @@ TEST( ISAM, iSAM_smoother ) GaussianBayesTree expected = *smoother.eliminateMultifrontal(ordering); // Verify sigmas in the bayes tree - for(const GaussianBayesTree::sharedClique& clique: expected.nodes() | br::map_values) { + for (const auto& [key, clique] : expected.nodes()) { GaussianConditional::shared_ptr conditional = clique->conditional(); EXPECT(!conditional->get_model()); } diff --git a/tests/testGaussianISAM2.cpp b/tests/testGaussianISAM2.cpp index 8c4238e2f..8edb29d4d 100644 --- a/tests/testGaussianISAM2.cpp +++ b/tests/testGaussianISAM2.cpp @@ -24,8 +24,6 @@ #include -#include -namespace br { using namespace boost::adaptors; using namespace boost::range; } using namespace std; using namespace gtsam; @@ -249,7 +247,7 @@ bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, c // Check gradient at each node bool nodeGradientsOk = true; typedef ISAM2::sharedClique sharedClique; - for(const sharedClique& clique: isam.nodes() | br::map_values) { + for (const auto& [key, clique] : isam.nodes()) { // Compute expected gradient GaussianFactorGraph jfg; jfg += clique->conditional(); @@ -453,7 +451,7 @@ TEST(ISAM2, swapFactors) // Check gradient at each node typedef ISAM2::sharedClique sharedClique; - for(const sharedClique& clique: isam.nodes() | br::map_values) { + for (const auto& [key, clique]: isam.nodes()) { // Compute expected gradient GaussianFactorGraph jfg; jfg += clique->conditional(); @@ -578,7 +576,7 @@ TEST(ISAM2, constrained_ordering) // Check gradient at each node typedef ISAM2::sharedClique sharedClique; - for(const sharedClique& clique: isam.nodes() | br::map_values) { + for (const auto& [key, clique]: isam.nodes()) { // Compute expected gradient GaussianFactorGraph jfg; jfg += clique->conditional(); @@ -620,9 +618,11 @@ namespace { bool checkMarginalizeLeaves(ISAM2& isam, const FastList& leafKeys) { Matrix expectedAugmentedHessian, expected3AugmentedHessian; KeyVector toKeep; - for(Key j: isam.getDelta() | br::map_keys) - if(find(leafKeys.begin(), leafKeys.end(), j) == leafKeys.end()) - toKeep.push_back(j); + for (const auto& [key, clique]: isam.getDelta()) { + if(find(leafKeys.begin(), leafKeys.end(), key) == leafKeys.end()) { + toKeep.push_back(key); + } + } // Calculate expected marginal from iSAM2 tree expectedAugmentedHessian = GaussianFactorGraph(isam).marginal(toKeep, EliminateQR)->augmentedHessian(); diff --git a/tests/testNonlinearOptimizer.cpp b/tests/testNonlinearOptimizer.cpp index c15de4f5d..b73ac7376 100644 --- a/tests/testNonlinearOptimizer.cpp +++ b/tests/testNonlinearOptimizer.cpp @@ -31,8 +31,6 @@ #include -#include -using boost::adaptors::map_values; #include #include @@ -302,7 +300,9 @@ TEST_UNSAFE(NonlinearOptimizer, MoreOptimization) { GaussianFactorGraph::shared_ptr linear = optimizer.linearize(); VectorValues d = linear->hessianDiagonal(); VectorValues sqrtHessianDiagonal = d; - for (Vector& v : sqrtHessianDiagonal | map_values) v = v.cwiseSqrt(); + for (auto& [key, value] : sqrtHessianDiagonal) { + value = value.cwiseSqrt(); + } GaussianFactorGraph damped = optimizer.buildDampedSystem(*linear, sqrtHessianDiagonal); VectorValues expectedDiagonal = d + params.lambdaInitial * d; EXPECT(assert_equal(expectedDiagonal, damped.hessianDiagonal()));